Compare commits

..

6 Commits

Author SHA1 Message Date
Marko Budiselic
9aa18dcb77 Add Unix specific comment 2023-04-08 11:58:14 +00:00
Marko Budiselic
795845ccf7 Add more stuff to the env mac 2022-10-09 09:28:31 +02:00
Marko Budiselic
fbc2b4c99b Make mg-utils compile (not working properly yet) 2022-10-08 22:36:30 +02:00
Marko Budiselic
32facac135 Add place for the async timer concept 2022-10-08 10:10:24 +02:00
Marko Budiselic
7181e546f5 Add more details 2022-10-02 13:41:50 +02:00
Marko Budiselic
4d9bb27db7 The first few different things 2022-09-17 13:14:16 +02:00
1434 changed files with 32519 additions and 162343 deletions

View File

@ -1,7 +1,6 @@
--- ---
BasedOnStyle: Google
---
Language: Cpp Language: Cpp
BasedOnStyle: Google
Standard: "c++20" Standard: "c++20"
UseTab: Never UseTab: Never
DerivePointerAlignment: false DerivePointerAlignment: false

View File

@ -6,7 +6,6 @@ Checks: '*,
-altera-unroll-loops, -altera-unroll-loops,
-android-*, -android-*,
-cert-err58-cpp, -cert-err58-cpp,
-cppcoreguidelines-avoid-do-while,
-cppcoreguidelines-avoid-c-arrays, -cppcoreguidelines-avoid-c-arrays,
-cppcoreguidelines-avoid-goto, -cppcoreguidelines-avoid-goto,
-cppcoreguidelines-avoid-magic-numbers, -cppcoreguidelines-avoid-magic-numbers,
@ -61,11 +60,10 @@ Checks: '*,
-readability-implicit-bool-conversion, -readability-implicit-bool-conversion,
-readability-magic-numbers, -readability-magic-numbers,
-readability-named-parameter, -readability-named-parameter,
-readability-identifier-length,
-misc-no-recursion, -misc-no-recursion,
-concurrency-mt-unsafe, -concurrency-mt-unsafe,
-bugprone-easily-swappable-parameters, -bugprone-easily-swappable-parameters'
-bugprone-unchecked-optional-access'
WarningsAsErrors: '' WarningsAsErrors: ''
HeaderFilterRegex: 'src/.*' HeaderFilterRegex: 'src/.*'
AnalyzeTemporaryDtors: false AnalyzeTemporaryDtors: false

View File

@ -33,4 +33,4 @@ for file in $modified_files; do
fi fi
done; done;
exit ${FAIL} return ${FAIL}

View File

@ -1,17 +1,19 @@
--- ---
name: Bug report name: Bug report
about: Create a report to help us improve about: Create a report to help us improve
title: "" title: "[BUG] "
labels: bug labels: bug
assignees: gitbuda, antonio2368
--- ---
**Memgraph version** **Memgraph version**
Which version did you use? Which version did you use?
**Environment** **Environment**
Some information about the environment you are using Memgraph on: operating Some information about the environment you are using Memgraph on: operating
system, architecture (ARM, x86), how do you connect, with or without docker, system, how do you connect, with or without docker, which driver etc.
which driver etc.
**Describe the bug** **Describe the bug**
A clear and concise description of what the bug is. A clear and concise description of what the bug is.
@ -20,7 +22,6 @@ A clear and concise description of what the bug is.
Steps to reproduce the behavior: Steps to reproduce the behavior:
1. Run the following query '...' 1. Run the following query '...'
2. Click on '....' 2. Click on '....'
3. ... IDEALLY: link to the workload info (DATASET & QUERIES) ...
**Expected behavior** **Expected behavior**
A clear and concise description of what you expected to happen. A clear and concise description of what you expected to happen.
@ -31,11 +32,3 @@ your problem.
**Additional context** **Additional context**
Add any other context about the problem here. Add any other context about the problem here.
**Verification Environment**
Once we fix it, what do you need to verify the fix?
Do you need:
* Plain memgraph package -> for which Linux?
* Plain memgraph Docker image?
* Which architecture do you use ARM | x86?
* Full Memgraph platform?

View File

@ -1,28 +1,11 @@
### Description
Please briefly explain the changes you made here.
Please delete either the [master < EPIC] or [master < Task] part, depending on what are your needs.
[master < Epic] PR [master < Epic] PR
- [ ] Check, and update documentation if necessary
- [ ] Update [changelog](https://docs.memgraph.com/memgraph/changelog)
- [ ] Write E2E tests - [ ] Write E2E tests
- [ ] Compare the [benchmarking results](https://bench-graph.memgraph.com/) between the master branch and the Epic branch - [ ] Compare the [benchmarking results](https://bench-graph.memgraph.com/) between the master branch and the Epic branch
- [ ] Provide the full content or a guide for the final git message - [ ] Provide the full content or a guide for the final git message
- [FINAL GIT MESSAGE]
[master < Task] PR [master < Task] PR
- [ ] Check, and update documentation if necessary
- [ ] Update [changelog](https://docs.memgraph.com/memgraph/changelog)
- [ ] Provide the full content or a guide for the final git message - [ ] Provide the full content or a guide for the final git message
- **[FINAL GIT MESSAGE]**
### Documentation checklist
- [ ] Add the documentation label tag
- [ ] Add the bug / feature label tag
- [ ] Add the milestone for which this feature is intended
- If not known, set for a later milestone
- [ ] Write a release note, including added/changed clauses
- **[Release note text]**
- [ ] Link the documentation PR here
- **[Documentation PR link]**
- [ ] Tag someone from docs team in the comments

View File

@ -3,7 +3,7 @@ name: Daily Benchmark
on: on:
workflow_dispatch: workflow_dispatch:
schedule: schedule:
- cron: "0 22 * * *" - cron: "0 1 * * *"
jobs: jobs:
release_benchmarks: release_benchmarks:
@ -16,7 +16,7 @@ jobs:
steps: steps:
- name: Set up repository - name: Set up repository
uses: actions/checkout@v4 uses: actions/checkout@v2
with: with:
# Number of commits to fetch. `0` indicates all history for all # Number of commits to fetch. `0` indicates all history for all
# branches and tags. (default: 1) # branches and tags. (default: 1)
@ -59,7 +59,7 @@ jobs:
source ve3/bin/activate source ve3/bin/activate
pip install -r requirements.txt pip install -r requirements.txt
./main.py --benchmark-name "macro_benchmark" \ ./main.py --benchmark-name "macro_benchmark" \
--benchmark-results "../../tests/macro_benchmark/.harness_summary" \ --benchmark-results-path "../../tests/macro_benchmark/.harness_summary" \
--github-run-id "${{ github.run_id }}" \ --github-run-id "${{ github.run_id }}" \
--github-run-number "${{ github.run_number }}" \ --github-run-number "${{ github.run_number }}" \
--head-branch-name "${{ env.BRANCH_NAME }}" --head-branch-name "${{ env.BRANCH_NAME }}"
@ -67,13 +67,7 @@ jobs:
- name: Run mgbench - name: Run mgbench
run: | run: |
cd tests/mgbench cd tests/mgbench
./benchmark.py vendor-native --num-workers-for-benchmark 12 --export-results benchmark_pokec.json pokec/medium/*/* ./benchmark.py --num-workers-for-benchmark 12 --export-results benchmark_result.json pokec/medium/*/*
./benchmark.py vendor-native --num-workers-for-benchmark 1 --export-results benchmark_supernode.json supernode
./benchmark.py vendor-native --num-workers-for-benchmark 1 --export-results benchmark_high_write_set_property.json high_write_set_property
./benchmark.py vendor-native --num-workers-for-benchmark 12 --export-results cartesian.json cartesian
- name: Upload mgbench results - name: Upload mgbench results
run: | run: |
@ -82,25 +76,7 @@ jobs:
source ve3/bin/activate source ve3/bin/activate
pip install -r requirements.txt pip install -r requirements.txt
./main.py --benchmark-name "mgbench" \ ./main.py --benchmark-name "mgbench" \
--benchmark-results "../../tests/mgbench/benchmark_pokec.json" \ --benchmark-results-path "../../tests/mgbench/benchmark_result.json" \
--github-run-id "${{ github.run_id }}" \
--github-run-number "${{ github.run_number }}" \
--head-branch-name "${{ env.BRANCH_NAME }}"
./main.py --benchmark-name "supernode" \
--benchmark-results "../../tests/mgbench/benchmark_supernode.json" \
--github-run-id "${{ github.run_id }}" \
--github-run-number "${{ github.run_number }}" \
--head-branch-name "${{ env.BRANCH_NAME }}"
./main.py --benchmark-name "high_write_set_property" \
--benchmark-results "../../tests/mgbench/benchmark_high_write_set_property.json" \
--github-run-id "${{ github.run_id }}" \
--github-run-number "${{ github.run_number }}" \
--head-branch-name "${{ env.BRANCH_NAME }}"
./main.py --benchmark-name "cartesian" \
--benchmark-results "../../tests/mgbench/cartesian.json" \
--github-run-id "${{ github.run_id }}" \ --github-run-id "${{ github.run_id }}" \
--github-run-number "${{ github.run_number }}" \ --github-run-number "${{ github.run_number }}" \
--head-branch-name "${{ env.BRANCH_NAME }}" --head-branch-name "${{ env.BRANCH_NAME }}"

View File

@ -14,111 +14,106 @@ on:
- "**/*.md" - "**/*.md"
- ".clang-format" - ".clang-format"
- "CODEOWNERS" - "CODEOWNERS"
- "licenses/*"
jobs: jobs:
community_build: community_build:
name: "Community build" name: "Community build"
runs-on: [self-hosted, Linux, X64, DockerMgBuild] runs-on: [self-hosted, Linux, X64, Diff]
timeout-minutes: 60
env: env:
THREADS: 24 THREADS: 24
MEMGRAPH_ENTERPRISE_LICENSE: ${{ secrets.MEMGRAPH_ENTERPRISE_LICENSE }} MEMGRAPH_ENTERPRISE_LICENSE: ${{ secrets.MEMGRAPH_ENTERPRISE_LICENSE }}
MEMGRAPH_ORGANIZATION_NAME: ${{ secrets.MEMGRAPH_ORGANIZATION_NAME }} MEMGRAPH_ORGANIZATION_NAME: ${{ secrets.MEMGRAPH_ORGANIZATION_NAME }}
OS: debian-11
TOOLCHAIN: v5
ARCH: amd
BUILD_TYPE: RelWithDebInfo
steps: steps:
- name: Set up repository - name: Set up repository
uses: actions/checkout@v4 uses: actions/checkout@v2
with: with:
# Number of commits to fetch. `0` indicates all history for all # Number of commits to fetch. `0` indicates all history for all
# branches and tags. (default: 1) # branches and tags. (default: 1)
fetch-depth: 0 fetch-depth: 0
- name: Spin up mgbuild container - name: Build community binaries
run: | run: |
./release/package/mgbuild.sh \ # Activate toolchain.
--toolchain $TOOLCHAIN \ source /opt/toolchain-v4/activate
--os $OS \
--arch $ARCH \
run
- name: Build release binaries # Initialize dependencies.
run: | ./init
./release/package/mgbuild.sh \
--toolchain $TOOLCHAIN \ # Build community binaries.
--os $OS \ cd build
--arch $ARCH \ cmake -DCMAKE_BUILD_TYPE=release -DMG_ENTERPRISE=OFF ..
--build-type $BUILD_TYPE \ make -j$THREADS
--threads $THREADS \
build-memgraph --community
- name: Run unit tests - name: Run unit tests
run: | run: |
./release/package/mgbuild.sh \ # Activate toolchain.
--toolchain $TOOLCHAIN \ source /opt/toolchain-v4/activate
--os $OS \
--arch $ARCH \
--threads $THREADS \
--enterprise-license $MEMGRAPH_ENTERPRISE_LICENSE \
--organization-name $MEMGRAPH_ORGANIZATION_NAME \
test-memgraph unit
- name: Stop mgbuild container # Run unit tests.
if: always() cd build
run: | ctest -R memgraph__unit --output-on-failure -j$THREADS
./release/package/mgbuild.sh \
--toolchain $TOOLCHAIN \
--os $OS \
--arch $ARCH \
stop --remove
code_analysis: code_analysis:
name: "Code analysis" name: "Code analysis"
runs-on: [self-hosted, Linux, X64, DockerMgBuild] runs-on: [self-hosted, Linux, X64, Diff]
timeout-minutes: 60
env: env:
THREADS: 24 THREADS: 24
MEMGRAPH_ENTERPRISE_LICENSE: ${{ secrets.MEMGRAPH_ENTERPRISE_LICENSE }} MEMGRAPH_ENTERPRISE_LICENSE: ${{ secrets.MEMGRAPH_ENTERPRISE_LICENSE }}
MEMGRAPH_ORGANIZATION_NAME: ${{ secrets.MEMGRAPH_ORGANIZATION_NAME }} MEMGRAPH_ORGANIZATION_NAME: ${{ secrets.MEMGRAPH_ORGANIZATION_NAME }}
OS: debian-11
TOOLCHAIN: v5
ARCH: amd
BUILD_TYPE: Debug
steps: steps:
- name: Set up repository - name: Set up repository
uses: actions/checkout@v4 uses: actions/checkout@v2
with: with:
# Number of commits to fetch. `0` indicates all history for all # Number of commits to fetch. `0` indicates all history for all
# branches and tags. (default: 1) # branches and tags. (default: 1)
fetch-depth: 0 fetch-depth: 0
- name: Spin up mgbuild container
run: |
./release/package/mgbuild.sh \
--toolchain $TOOLCHAIN \
--os $OS \
--arch $ARCH \
run
# This is also needed if we want do to comparison against other branches # This is also needed if we want do to comparison against other branches
# See https://github.community/t/checkout-code-fails-when-it-runs-lerna-run-test-since-master/17920 # See https://github.community/t/checkout-code-fails-when-it-runs-lerna-run-test-since-master/17920
- name: Fetch all history for all tags and branches - name: Fetch all history for all tags and branches
run: git fetch run: git fetch
- name: Initialize deps - name: Build combined ASAN, UBSAN and coverage binaries
run: | run: |
./release/package/mgbuild.sh \ # Activate toolchain.
--toolchain $TOOLCHAIN \ source /opt/toolchain-v4/activate
--os $OS \
--arch $ARCH \ # Initialize dependencies.
--build-type $BUILD_TYPE \ ./init
--threads $THREADS \
build-memgraph --init-only cd build
cmake -DTEST_COVERAGE=ON -DASAN=ON -DUBSAN=ON ..
make -j$THREADS memgraph__unit
- name: Run unit tests
run: |
# Activate toolchain.
source /opt/toolchain-v4/activate
# Run unit tests. It is restricted to 2 threads intentionally, because higher concurrency makes the timing related tests unstable.
cd build
LSAN_OPTIONS=suppressions=$PWD/../tools/lsan.supp UBSAN_OPTIONS=halt_on_error=1 ctest -R memgraph__unit --output-on-failure -j2
- name: Compute code coverage
run: |
# Activate toolchain.
source /opt/toolchain-v4/activate
# Compute code coverage.
cd tools/github
./coverage_convert
# Package code coverage.
cd generated
tar -czf code_coverage.tar.gz coverage.json html report.json summary.rmu
- name: Save code coverage
uses: actions/upload-artifact@v2
with:
name: "Code coverage"
path: tools/github/generated/code_coverage.tar.gz
- name: Set base branch - name: Set base branch
if: ${{ github.event_name == 'pull_request' }} if: ${{ github.event_name == 'pull_request' }}
@ -130,232 +125,128 @@ jobs:
run: | run: |
echo "BASE_BRANCH=origin/master" >> $GITHUB_ENV echo "BASE_BRANCH=origin/master" >> $GITHUB_ENV
- name: Python code analysis
run: |
./release/package/mgbuild.sh \
--toolchain $TOOLCHAIN \
--os $OS \
--arch $ARCH \
--enterprise-license $MEMGRAPH_ENTERPRISE_LICENSE \
--organization-name $MEMGRAPH_ORGANIZATION_NAME \
test-memgraph code-analysis --base-branch "${{ env.BASE_BRANCH }}"
- name: Build combined ASAN, UBSAN and coverage binaries
run: |
./release/package/mgbuild.sh \
--toolchain $TOOLCHAIN \
--os $OS \
--arch $ARCH \
--build-type $BUILD_TYPE \
--threads $THREADS \
build-memgraph --coverage --asan --ubsan
- name: Run unit tests
run: |
./release/package/mgbuild.sh \
--toolchain $TOOLCHAIN \
--os $OS \
--arch $ARCH \
--enterprise-license $MEMGRAPH_ENTERPRISE_LICENSE \
--organization-name $MEMGRAPH_ORGANIZATION_NAME \
test-memgraph unit-coverage
- name: Compute code coverage
run: |
./release/package/mgbuild.sh \
--toolchain $TOOLCHAIN \
--os $OS \
--arch $ARCH \
--enterprise-license $MEMGRAPH_ENTERPRISE_LICENSE \
--organization-name $MEMGRAPH_ORGANIZATION_NAME \
test-memgraph code-coverage
- name: Save code coverage
uses: actions/upload-artifact@v4
with:
name: "Code coverage(Code analysis)"
path: tools/github/generated/code_coverage.tar.gz
- name: Run clang-tidy - name: Run clang-tidy
run: | run: |
./release/package/mgbuild.sh \ source /opt/toolchain-v4/activate
--toolchain $TOOLCHAIN \
--os $OS \
--arch $ARCH \
--threads $THREADS \
--enterprise-license $MEMGRAPH_ENTERPRISE_LICENSE \
--organization-name $MEMGRAPH_ORGANIZATION_NAME \
test-memgraph clang-tidy --base-branch "${{ env.BASE_BRANCH }}"
- name: Stop mgbuild container # Restrict clang-tidy results only to the modified parts
if: always() git diff -U0 ${{ env.BASE_BRANCH }}... -- src | ./tools/github/clang-tidy/clang-tidy-diff.py -p 1 -j $THREADS -path build -regex ".+\.cpp" | tee ./build/clang_tidy_output.txt
run: |
./release/package/mgbuild.sh \ # Fail if any warning is reported
--toolchain $TOOLCHAIN \ ! cat ./build/clang_tidy_output.txt | ./tools/github/clang-tidy/grep_error_lines.sh > /dev/null
--os $OS \
--arch $ARCH \
stop --remove
debug_build: debug_build:
name: "Debug build" name: "Debug build"
runs-on: [self-hosted, Linux, X64, DockerMgBuild] runs-on: [self-hosted, Linux, X64, Diff]
timeout-minutes: 100
env: env:
THREADS: 24 THREADS: 24
MEMGRAPH_ENTERPRISE_LICENSE: ${{ secrets.MEMGRAPH_ENTERPRISE_LICENSE }} MEMGRAPH_ENTERPRISE_LICENSE: ${{ secrets.MEMGRAPH_ENTERPRISE_LICENSE }}
MEMGRAPH_ORGANIZATION_NAME: ${{ secrets.MEMGRAPH_ORGANIZATION_NAME }} MEMGRAPH_ORGANIZATION_NAME: ${{ secrets.MEMGRAPH_ORGANIZATION_NAME }}
OS: debian-11
TOOLCHAIN: v5
ARCH: amd
BUILD_TYPE: Debug
steps: steps:
- name: Set up repository - name: Set up repository
uses: actions/checkout@v4 uses: actions/checkout@v2
with: with:
# Number of commits to fetch. `0` indicates all history for all # Number of commits to fetch. `0` indicates all history for all
# branches and tags. (default: 1) # branches and tags. (default: 1)
fetch-depth: 0 fetch-depth: 0
- name: Spin up mgbuild container - name: Build debug binaries
run: | run: |
./release/package/mgbuild.sh \ # Activate toolchain.
--toolchain $TOOLCHAIN \ source /opt/toolchain-v4/activate
--os $OS \
--arch $ARCH \
run
- name: Build release binaries # Initialize dependencies.
run: | ./init
./release/package/mgbuild.sh \
--toolchain $TOOLCHAIN \ # Build debug binaries.
--os $OS \ cd build
--arch $ARCH \ cmake ..
--build-type $BUILD_TYPE \ make -j$THREADS
--threads $THREADS \
build-memgraph
- name: Run leftover CTest tests - name: Run leftover CTest tests
run: | run: |
./release/package/mgbuild.sh \ # Activate toolchain.
--toolchain $TOOLCHAIN \ source /opt/toolchain-v4/activate
--os $OS \
--arch $ARCH \ # Run leftover CTest tests (all except unit and benchmark tests).
--threads $THREADS \ cd build
--enterprise-license $MEMGRAPH_ENTERPRISE_LICENSE \ ctest -E "(memgraph__unit|memgraph__benchmark)" --output-on-failure
--organization-name $MEMGRAPH_ORGANIZATION_NAME \
test-memgraph leftover-CTest
- name: Run drivers tests - name: Run drivers tests
run: | run: |
./release/package/mgbuild.sh \ ./tests/drivers/run.sh
--toolchain $TOOLCHAIN \
--os $OS \
--arch $ARCH \
--threads $THREADS \
--enterprise-license $MEMGRAPH_ENTERPRISE_LICENSE \
--organization-name $MEMGRAPH_ORGANIZATION_NAME \
test-memgraph drivers
- name: Run HA driver tests
run: |
./release/package/mgbuild.sh \
--toolchain $TOOLCHAIN \
--os $OS \
--arch $ARCH \
--threads $THREADS \
--enterprise-license $MEMGRAPH_ENTERPRISE_LICENSE \
--organization-name $MEMGRAPH_ORGANIZATION_NAME \
test-memgraph drivers-high-availability
- name: Run integration tests - name: Run integration tests
run: | run: |
./release/package/mgbuild.sh \ cd tests/integration
--toolchain $TOOLCHAIN \ for name in *; do
--os $OS \ if [ ! -d $name ]; then continue; fi
--arch $ARCH \ pushd $name >/dev/null
--threads $THREADS \ echo "Running: $name"
--enterprise-license $MEMGRAPH_ENTERPRISE_LICENSE \ if [ -x prepare.sh ]; then
--organization-name $MEMGRAPH_ORGANIZATION_NAME \ ./prepare.sh
test-memgraph integration fi
if [ -x runner.py ]; then
./runner.py
elif [ -x runner.sh ]; then
./runner.sh
fi
echo
popd >/dev/null
done
- name: Run cppcheck and clang-format - name: Run cppcheck and clang-format
run: | run: |
./release/package/mgbuild.sh \ # Activate toolchain.
--toolchain $TOOLCHAIN \ source /opt/toolchain-v4/activate
--os $OS \
--arch $ARCH \ # Run cppcheck and clang-format.
--threads $THREADS \ cd tools/github
--enterprise-license $MEMGRAPH_ENTERPRISE_LICENSE \ ./cppcheck_and_clang_format diff
--organization-name $MEMGRAPH_ORGANIZATION_NAME \
test-memgraph cppcheck-and-clang-format
- name: Save cppcheck and clang-format errors - name: Save cppcheck and clang-format errors
uses: actions/upload-artifact@v4 uses: actions/upload-artifact@v2
with: with:
name: "Code coverage(Debug build)" name: "Code coverage"
path: tools/github/cppcheck_and_clang_format.txt path: tools/github/cppcheck_and_clang_format.txt
- name: Stop mgbuild container
if: always()
run: |
./release/package/mgbuild.sh \
--toolchain $TOOLCHAIN \
--os $OS \
--arch $ARCH \
stop --remove
release_build: release_build:
name: "Release build" name: "Release build"
runs-on: [self-hosted, Linux, X64, DockerMgBuild] runs-on: [self-hosted, Linux, X64, Diff]
timeout-minutes: 100
env: env:
THREADS: 24 THREADS: 24
MEMGRAPH_ENTERPRISE_LICENSE: ${{ secrets.MEMGRAPH_ENTERPRISE_LICENSE }} MEMGRAPH_ENTERPRISE_LICENSE: ${{ secrets.MEMGRAPH_ENTERPRISE_LICENSE }}
MEMGRAPH_ORGANIZATION_NAME: ${{ secrets.MEMGRAPH_ORGANIZATION_NAME }} MEMGRAPH_ORGANIZATION_NAME: ${{ secrets.MEMGRAPH_ORGANIZATION_NAME }}
OS: debian-11
TOOLCHAIN: v5
ARCH: amd
BUILD_TYPE: Release
steps: steps:
- name: Set up repository - name: Set up repository
uses: actions/checkout@v4 uses: actions/checkout@v2
with: with:
# Number of commits to fetch. `0` indicates all history for all # Number of commits to fetch. `0` indicates all history for all
# branches and tags. (default: 1) # branches and tags. (default: 1)
fetch-depth: 0 fetch-depth: 0
- name: Spin up mgbuild container
run: |
./release/package/mgbuild.sh \
--toolchain $TOOLCHAIN \
--os $OS \
--arch $ARCH \
run
- name: Build release binaries - name: Build release binaries
run: | run: |
./release/package/mgbuild.sh \ # Activate toolchain.
--toolchain $TOOLCHAIN \ source /opt/toolchain-v4/activate
--os $OS \
--arch $ARCH \ # Initialize dependencies.
--build-type $BUILD_TYPE \ ./init
--threads $THREADS \
build-memgraph # Build release binaries.
cd build
cmake -DCMAKE_BUILD_TYPE=release ..
make -j$THREADS
- name: Run GQL Behave tests - name: Run GQL Behave tests
run: | run: |
./release/package/mgbuild.sh \ cd tests/gql_behave
--toolchain $TOOLCHAIN \ ./continuous_integration
--os $OS \
--arch $ARCH \
--enterprise-license $MEMGRAPH_ENTERPRISE_LICENSE \
--organization-name $MEMGRAPH_ORGANIZATION_NAME \
test-memgraph gql-behave
- name: Save quality assurance status - name: Save quality assurance status
uses: actions/upload-artifact@v4 uses: actions/upload-artifact@v2
with: with:
name: "GQL Behave Status" name: "GQL Behave Status"
path: | path: |
@ -364,241 +255,145 @@ jobs:
- name: Run unit tests - name: Run unit tests
run: | run: |
./release/package/mgbuild.sh \ # Activate toolchain.
--toolchain $TOOLCHAIN \ source /opt/toolchain-v4/activate
--os $OS \
--arch $ARCH \
--threads $THREADS \
--enterprise-license $MEMGRAPH_ENTERPRISE_LICENSE \
--organization-name $MEMGRAPH_ORGANIZATION_NAME \
test-memgraph unit
# This step will be skipped because the e2e stream tests have been disabled # Run unit tests.
# We need to fix this as soon as possible cd build
- name: Ensure Kafka and Pulsar are up ctest -R memgraph__unit --output-on-failure -j$THREADS
if: false
run: |
cd tests/e2e/streams/kafka
docker-compose up -d
cd ../pulsar
docker-compose up -d
- name: Run e2e tests - name: Run e2e tests
run: | run: |
./release/package/mgbuild.sh \ # TODO(gitbuda): Setup mgclient and pymgclient properly.
--toolchain $TOOLCHAIN \ cd tests
--os $OS \ ./setup.sh
--arch $ARCH \ source ve3/bin/activate
--enterprise-license $MEMGRAPH_ENTERPRISE_LICENSE \ cd e2e
--organization-name $MEMGRAPH_ORGANIZATION_NAME \ LD_LIBRARY_PATH=$LD_LIBRARY_PATH:../../libs/mgclient/lib python runner.py --workloads-root-directory .
test-memgraph e2e
# Same as two steps prior
- name: Ensure Kafka and Pulsar are down
if: false
run: |
cd tests/e2e/streams/kafka
docker-compose down
cd ../pulsar
docker-compose down
- name: Run stress test (plain) - name: Run stress test (plain)
run: | run: |
./release/package/mgbuild.sh \ cd tests/stress
--toolchain $TOOLCHAIN \ ./continuous_integration
--os $OS \
--arch $ARCH \
--enterprise-license $MEMGRAPH_ENTERPRISE_LICENSE \
--organization-name $MEMGRAPH_ORGANIZATION_NAME \
test-memgraph stress-plain
- name: Run stress test (SSL) - name: Run stress test (SSL)
run: | run: |
./release/package/mgbuild.sh \ cd tests/stress
--toolchain $TOOLCHAIN \ ./continuous_integration --use-ssl
--os $OS \
--arch $ARCH \
--enterprise-license $MEMGRAPH_ENTERPRISE_LICENSE \
--organization-name $MEMGRAPH_ORGANIZATION_NAME \
test-memgraph stress-ssl
- name: Run durability test - name: Run durability test
run: | run: |
./release/package/mgbuild.sh \ cd tests/stress
--toolchain $TOOLCHAIN \ source ve3/bin/activate
--os $OS \ python3 durability --num-steps 5
--arch $ARCH \
--enterprise-license $MEMGRAPH_ENTERPRISE_LICENSE \
--organization-name $MEMGRAPH_ORGANIZATION_NAME \
test-memgraph durability
- name: Create enterprise DEB package - name: Create enterprise DEB package
run: | run: |
./release/package/mgbuild.sh \ # Activate toolchain.
--toolchain $TOOLCHAIN \ source /opt/toolchain-v4/activate
--os $OS \
--arch $ARCH \
--enterprise-license $MEMGRAPH_ENTERPRISE_LICENSE \
--organization-name $MEMGRAPH_ORGANIZATION_NAME \
package-memgraph
./release/package/mgbuild.sh \ cd build
--toolchain $TOOLCHAIN \
--os $OS \ # create mgconsole
--arch $ARCH \ # we use the -B to force the build
copy --package make -j$THREADS -B mgconsole
# Create enterprise DEB package.
mkdir output && cd output
cpack -G DEB --config ../CPackConfig.cmake
- name: Save enterprise DEB package - name: Save enterprise DEB package
uses: actions/upload-artifact@v4 uses: actions/upload-artifact@v2
with: with:
name: "Enterprise DEB package" name: "Enterprise DEB package"
path: build/output/${{ env.OS }}/memgraph*.deb path: build/output/memgraph*.deb
- name: Copy build logs
run: |
./release/package/mgbuild.sh \
--toolchain $TOOLCHAIN \
--os $OS \
--arch $ARCH \
copy --build-logs
- name: Save test data - name: Save test data
uses: actions/upload-artifact@v4 uses: actions/upload-artifact@v2
if: always() if: always()
with: with:
name: "Test data(Release build)" name: "Test data"
path: build/logs path: |
# multiple paths could be defined
- name: Stop mgbuild container build/logs
if: always()
run: |
./release/package/mgbuild.sh \
--toolchain $TOOLCHAIN \
--os $OS \
--arch $ARCH \
stop --remove
release_jepsen_test: release_jepsen_test:
name: "Release Jepsen Test" name: "Release Jepsen Test"
runs-on: [self-hosted, Linux, X64, DockerMgBuild] runs-on: [self-hosted, Linux, X64, Debian10, JepsenControl]
timeout-minutes: 80 #continue-on-error: true
env: env:
THREADS: 24 THREADS: 24
MEMGRAPH_ENTERPRISE_LICENSE: ${{ secrets.MEMGRAPH_ENTERPRISE_LICENSE }} MEMGRAPH_ENTERPRISE_LICENSE: ${{ secrets.MEMGRAPH_ENTERPRISE_LICENSE }}
MEMGRAPH_ORGANIZATION_NAME: ${{ secrets.MEMGRAPH_ORGANIZATION_NAME }} MEMGRAPH_ORGANIZATION_NAME: ${{ secrets.MEMGRAPH_ORGANIZATION_NAME }}
OS: debian-12
TOOLCHAIN: v5
ARCH: amd
BUILD_TYPE: RelWithDebInfo
steps: steps:
- name: Set up repository - name: Set up repository
uses: actions/checkout@v4 uses: actions/checkout@v2
with: with:
# Number of commits to fetch. `0` indicates all history for all # Number of commits to fetch. `0` indicates all history for all
# branches and tags. (default: 1) # branches and tags. (default: 1)
fetch-depth: 0 fetch-depth: 0
- name: Spin up mgbuild container
run: |
./release/package/mgbuild.sh \
--toolchain $TOOLCHAIN \
--os $OS \
--arch $ARCH \
run
- name: Build release binaries - name: Build release binaries
run: | run: |
./release/package/mgbuild.sh \ # Activate toolchain.
--toolchain $TOOLCHAIN \ source /opt/toolchain-v4/activate
--os $OS \
--arch $ARCH \
--build-type $BUILD_TYPE \
--threads $THREADS \
build-memgraph
- name: Copy memgraph binary # Initialize dependencies.
run: | ./init
./release/package/mgbuild.sh \
--toolchain $TOOLCHAIN \
--os $OS \
--arch $ARCH \
copy --binary
- name: Refresh Jepsen Cluster # Build only memgraph release binarie.
run: | cd build
cd tests/jepsen cmake -DCMAKE_BUILD_TYPE=release ..
./run.sh cluster-refresh make -j$THREADS memgraph
- name: Run Jepsen tests - name: Run Jepsen tests
run: | run: |
cd tests/jepsen cd tests/jepsen
./run.sh test-all-individually --binary ../../build/memgraph --ignore-run-stdout-logs --ignore-run-stderr-logs ./run.sh test --binary ../../build/memgraph --run-args "test-all --node-configs resources/node-config.edn" --ignore-run-stdout-logs --ignore-run-stderr-logs
- name: Save Jepsen report - name: Save Jepsen report
uses: actions/upload-artifact@v4 uses: actions/upload-artifact@v2
if: ${{ always() }} if: ${{ always() }}
with: with:
name: "Jepsen Report" name: "Jepsen Report"
path: tests/jepsen/Jepsen.tar.gz path: tests/jepsen/Jepsen.tar.gz
- name: Stop mgbuild container
if: always()
run: |
./release/package/mgbuild.sh \
--toolchain $TOOLCHAIN \
--os $OS \
--arch $ARCH \
stop --remove
release_benchmarks: release_benchmarks:
name: "Release benchmarks" name: "Release benchmarks"
runs-on: [self-hosted, Linux, X64, DockerMgBuild, Gen7] runs-on: [self-hosted, Linux, X64, Diff, Gen7]
timeout-minutes: 60
env: env:
THREADS: 24 THREADS: 24
MEMGRAPH_ENTERPRISE_LICENSE: ${{ secrets.MEMGRAPH_ENTERPRISE_LICENSE }} MEMGRAPH_ENTERPRISE_LICENSE: ${{ secrets.MEMGRAPH_ENTERPRISE_LICENSE }}
MEMGRAPH_ORGANIZATION_NAME: ${{ secrets.MEMGRAPH_ORGANIZATION_NAME }} MEMGRAPH_ORGANIZATION_NAME: ${{ secrets.MEMGRAPH_ORGANIZATION_NAME }}
OS: debian-11
TOOLCHAIN: v5
ARCH: amd
BUILD_TYPE: Release
steps: steps:
- name: Set up repository - name: Set up repository
uses: actions/checkout@v4 uses: actions/checkout@v2
with: with:
# Number of commits to fetch. `0` indicates all history for all # Number of commits to fetch. `0` indicates all history for all
# branches and tags. (default: 1) # branches and tags. (default: 1)
fetch-depth: 0 fetch-depth: 0
- name: Spin up mgbuild container
run: |
./release/package/mgbuild.sh \
--toolchain $TOOLCHAIN \
--os $OS \
--arch $ARCH \
run
- name: Build release binaries - name: Build release binaries
run: | run: |
./release/package/mgbuild.sh \ # Activate toolchain.
--toolchain $TOOLCHAIN \ source /opt/toolchain-v4/activate
--os $OS \
--arch $ARCH \ # Initialize dependencies.
--build-type $BUILD_TYPE \ ./init
--threads $THREADS \
build-memgraph # Build only memgraph release binaries.
cd build
cmake -DCMAKE_BUILD_TYPE=release ..
make -j$THREADS
- name: Run macro benchmarks - name: Run macro benchmarks
run: | run: |
./release/package/mgbuild.sh \ cd tests/macro_benchmark
--toolchain $TOOLCHAIN \ ./harness QuerySuite MemgraphRunner \
--os $OS \ --groups aggregation 1000_create unwind_create dense_expand match \
--arch $ARCH \ --no-strict
--enterprise-license $MEMGRAPH_ENTERPRISE_LICENSE \
--organization-name $MEMGRAPH_ORGANIZATION_NAME \
test-memgraph macro-benchmark
- name: Get branch name (merge) - name: Get branch name (merge)
if: github.event_name != 'pull_request' if: github.event_name != 'pull_request'
@ -612,49 +407,29 @@ jobs:
- name: Upload macro benchmark results - name: Upload macro benchmark results
run: | run: |
./release/package/mgbuild.sh \ cd tools/bench-graph-client
--toolchain $TOOLCHAIN \ virtualenv -p python3 ve3
--os $OS \ source ve3/bin/activate
--arch $ARCH \ pip install -r requirements.txt
--enterprise-license $MEMGRAPH_ENTERPRISE_LICENSE \ ./main.py --benchmark-name "macro_benchmark" \
--organization-name $MEMGRAPH_ORGANIZATION_NAME \ --benchmark-results-path "../../tests/macro_benchmark/.harness_summary" \
test-memgraph upload-to-bench-graph \ --github-run-id "${{ github.run_id }}" \
--benchmark-name "macro_benchmark" \ --github-run-number "${{ github.run_number }}" \
--benchmark-results "../../tests/macro_benchmark/.harness_summary" \ --head-branch-name "${{ env.BRANCH_NAME }}"
--github-run-id ${{ github.run_id }} \
--github-run-number ${{ github.run_number }} \
--head-branch-name ${{ env.BRANCH_NAME }}
- name: Run mgbench - name: Run mgbench
run: | run: |
./release/package/mgbuild.sh \ cd tests/mgbench
--toolchain $TOOLCHAIN \ ./benchmark.py --num-workers-for-benchmark 12 --export-results benchmark_result.json pokec/medium/*/*
--os $OS \
--arch $ARCH \
--enterprise-license $MEMGRAPH_ENTERPRISE_LICENSE \
--organization-name $MEMGRAPH_ORGANIZATION_NAME \
test-memgraph mgbench
- name: Upload mgbench results - name: Upload mgbench results
run: | run: |
./release/package/mgbuild.sh \ cd tools/bench-graph-client
--toolchain $TOOLCHAIN \ virtualenv -p python3 ve3
--os $OS \ source ve3/bin/activate
--arch $ARCH \ pip install -r requirements.txt
--enterprise-license $MEMGRAPH_ENTERPRISE_LICENSE \ ./main.py --benchmark-name "mgbench" \
--organization-name $MEMGRAPH_ORGANIZATION_NAME \ --benchmark-results-path "../../tests/mgbench/benchmark_result.json" \
test-memgraph upload-to-bench-graph \ --github-run-id "${{ github.run_id }}" \
--benchmark-name "mgbench" \ --github-run-number "${{ github.run_number }}" \
--benchmark-results "../../tests/mgbench/benchmark_result.json" \ --head-branch-name "${{ env.BRANCH_NAME }}"
--github-run-id "${{ github.run_id }}" \
--github-run-number "${{ github.run_number }}" \
--head-branch-name "${{ env.BRANCH_NAME }}"
- name: Stop mgbuild container
if: always()
run: |
./release/package/mgbuild.sh \
--toolchain $TOOLCHAIN \
--os $OS \
--arch $ARCH \
stop --remove

View File

@ -14,7 +14,7 @@ jobs:
steps: steps:
- name: Set up repository - name: Set up repository
uses: actions/checkout@v4 uses: actions/checkout@v2
with: with:
# Number of commits to fetch. `0` indicates all history for all # Number of commits to fetch. `0` indicates all history for all
# branches and tags. (default: 1) # branches and tags. (default: 1)

178
.github/workflows/package_all.yaml vendored Normal file
View File

@ -0,0 +1,178 @@
name: Package All
# TODO(gitbuda): Cleanup docker container if GHA job was canceled.
on: workflow_dispatch
jobs:
centos-7:
runs-on: [self-hosted, DockerMgBuild, X64]
timeout-minutes: 60
steps:
- name: "Set up repository"
uses: actions/checkout@v3
with:
fetch-depth: 0 # Required because of release/get_version.py
- name: "Build package"
run: |
./release/package/run.sh package centos-7
- name: "Upload package"
uses: actions/upload-artifact@v2
with:
name: centos-7
path: build/output/centos-7/memgraph*.rpm
centos-9:
runs-on: [self-hosted, DockerMgBuild, X64]
timeout-minutes: 60
steps:
- name: "Set up repository"
uses: actions/checkout@v3
with:
fetch-depth: 0 # Required because of release/get_version.py
- name: "Build package"
run: |
./release/package/run.sh package centos-9
- name: "Upload package"
uses: actions/upload-artifact@v2
with:
name: centos-9
path: build/output/centos-9/memgraph*.rpm
debian-10:
runs-on: [self-hosted, DockerMgBuild, X64]
timeout-minutes: 60
steps:
- name: "Set up repository"
uses: actions/checkout@v3
with:
fetch-depth: 0 # Required because of release/get_version.py
- name: "Build package"
run: |
./release/package/run.sh package debian-10
- name: "Upload package"
uses: actions/upload-artifact@v2
with:
name: debian-10
path: build/output/debian-10/memgraph*.deb
debian-11:
runs-on: [self-hosted, DockerMgBuild, X64]
timeout-minutes: 60
steps:
- name: "Set up repository"
uses: actions/checkout@v3
with:
fetch-depth: 0 # Required because of release/get_version.py
- name: "Build package"
run: |
./release/package/run.sh package debian-11
- name: "Upload package"
uses: actions/upload-artifact@v2
with:
name: debian-11
path: build/output/debian-11/memgraph*.deb
docker:
runs-on: [self-hosted, DockerMgBuild, X64]
timeout-minutes: 60
steps:
- name: "Set up repository"
uses: actions/checkout@v3
with:
fetch-depth: 0 # Required because of release/get_version.py
- name: "Build package"
run: |
cd release/package
./run.sh package debian-11 --for-docker
./run.sh docker
- name: "Upload package"
uses: actions/upload-artifact@v2
with:
name: docker
path: build/output/docker/memgraph*.tar.gz
ubuntu-1804:
runs-on: [self-hosted, DockerMgBuild, X64]
timeout-minutes: 60
steps:
- name: "Set up repository"
uses: actions/checkout@v3
with:
fetch-depth: 0 # Required because of release/get_version.py
- name: "Build package"
run: |
./release/package/run.sh package ubuntu-18.04
- name: "Upload package"
uses: actions/upload-artifact@v2
with:
name: ubuntu-1804
path: build/output/ubuntu-18.04/memgraph*.deb
ubuntu-2004:
runs-on: [self-hosted, DockerMgBuild, X64]
timeout-minutes: 60
steps:
- name: "Set up repository"
uses: actions/checkout@v3
with:
fetch-depth: 0 # Required because of release/get_version.py
- name: "Build package"
run: |
./release/package/run.sh package ubuntu-20.04
- name: "Upload package"
uses: actions/upload-artifact@v2
with:
name: ubuntu-2004
path: build/output/ubuntu-20.04/memgraph*.deb
ubuntu-2204:
runs-on: [self-hosted, DockerMgBuild, X64]
timeout-minutes: 60
steps:
- name: "Set up repository"
uses: actions/checkout@v3
with:
fetch-depth: 0 # Required because of release/get_version.py
- name: "Build package"
run: |
./release/package/run.sh package ubuntu-22.04
- name: "Upload package"
uses: actions/upload-artifact@v2
with:
name: ubuntu-2204
path: build/output/ubuntu-22.04/memgraph*.deb
debian-11-platform:
runs-on: [self-hosted, DockerMgBuild, X64]
timeout-minutes: 60
steps:
- name: "Set up repository"
uses: actions/checkout@v3
with:
fetch-depth: 0 # Required because of release/get_version.py
- name: "Build package"
run: |
./release/package/run.sh package debian-11 --for-platform
- name: "Upload package"
uses: actions/upload-artifact@v2
with:
name: debian-11-platform
path: build/output/debian-11/memgraph*.deb
debian-11-arm:
runs-on: [self-hosted, DockerMgBuild, ARM64, strange]
timeout-minutes: 60
steps:
- name: "Set up repository"
uses: actions/checkout@v3
with:
fetch-depth: 0 # Required because of release/get_version.py
- name: "Build package"
run: |
./release/package/run.sh package debian-11-arm
- name: "Upload package"
uses: actions/upload-artifact@v2
with:
name: debian-11-arm
path: build/output/debian-11-arm/memgraph*.deb

View File

@ -1,295 +0,0 @@
name: Package memgraph
# TODO(gitbuda): Cleanup docker container if GHA job was canceled.
on:
workflow_dispatch:
inputs:
memgraph_version:
description: "Memgraph version to upload as. Leave this field empty if you don't want to upload binaries to S3. Format: 'X.Y.Z'"
required: false
build_type:
type: choice
description: "Memgraph Build type. Default value is Release"
default: 'Release'
options:
- Release
- RelWithDebInfo
target_os:
type: choice
description: "Target OS for which memgraph will be packaged. Select 'all' if you want to package for every listed OS. Default is Ubuntu 22.04"
default: 'ubuntu-22_04'
options:
- all
- amzn-2
- centos-7
- centos-9
- debian-10
- debian-11
- debian-11-arm
- debian-11-platform
- docker
- fedora-36
- ubuntu-18_04
- ubuntu-20_04
- ubuntu-22_04
- ubuntu-22_04-arm
jobs:
amzn-2:
if: ${{ github.event.inputs.target_os == 'amzn-2' || github.event.inputs.target_os == 'all' }}
runs-on: [self-hosted, DockerMgBuild, X64]
timeout-minutes: 60
steps:
- name: "Set up repository"
uses: actions/checkout@v4
with:
fetch-depth: 0 # Required because of release/get_version.py
- name: "Build package"
run: |
./release/package/run.sh package amzn-2 ${{ github.event.inputs.build_type }}
- name: "Upload package"
uses: actions/upload-artifact@v4
with:
name: amzn-2
path: build/output/amzn-2/memgraph*.rpm
centos-7:
if: ${{ github.event.inputs.target_os == 'centos-7' || github.event.inputs.target_os == 'all' }}
runs-on: [self-hosted, DockerMgBuild, X64]
timeout-minutes: 60
steps:
- name: "Set up repository"
uses: actions/checkout@v4
with:
fetch-depth: 0 # Required because of release/get_version.py
- name: "Build package"
run: |
./release/package/run.sh package centos-7 ${{ github.event.inputs.build_type }}
- name: "Upload package"
uses: actions/upload-artifact@v4
with:
name: centos-7
path: build/output/centos-7/memgraph*.rpm
centos-9:
if: ${{ github.event.inputs.target_os == 'centos-9' || github.event.inputs.target_os == 'all' }}
runs-on: [self-hosted, DockerMgBuild, X64]
timeout-minutes: 60
steps:
- name: "Set up repository"
uses: actions/checkout@v4
with:
fetch-depth: 0 # Required because of release/get_version.py
- name: "Build package"
run: |
./release/package/run.sh package centos-9 ${{ github.event.inputs.build_type }}
- name: "Upload package"
uses: actions/upload-artifact@v4
with:
name: centos-9
path: build/output/centos-9/memgraph*.rpm
debian-10:
if: ${{ github.event.inputs.target_os == 'debian-10' || github.event.inputs.target_os == 'all' }}
runs-on: [self-hosted, DockerMgBuild, X64]
timeout-minutes: 60
steps:
- name: "Set up repository"
uses: actions/checkout@v4
with:
fetch-depth: 0 # Required because of release/get_version.py
- name: "Build package"
run: |
./release/package/run.sh package debian-10 ${{ github.event.inputs.build_type }}
- name: "Upload package"
uses: actions/upload-artifact@v4
with:
name: debian-10
path: build/output/debian-10/memgraph*.deb
debian-11:
if: ${{ github.event.inputs.target_os == 'debian-11' || github.event.inputs.target_os == 'all' }}
runs-on: [self-hosted, DockerMgBuild, X64]
timeout-minutes: 60
steps:
- name: "Set up repository"
uses: actions/checkout@v4
with:
fetch-depth: 0 # Required because of release/get_version.py
- name: "Build package"
run: |
./release/package/run.sh package debian-11 ${{ github.event.inputs.build_type }}
- name: "Upload package"
uses: actions/upload-artifact@v4
with:
name: debian-11
path: build/output/debian-11/memgraph*.deb
debian-11-arm:
if: ${{ github.event.inputs.target_os == 'debian-11-arm' || github.event.inputs.target_os == 'all' }}
runs-on: [self-hosted, DockerMgBuild, ARM64, strange]
timeout-minutes: 120
steps:
- name: "Set up repository"
uses: actions/checkout@v4
with:
fetch-depth: 0 # Required because of release/get_version.py
- name: "Build package"
run: |
./release/package/run.sh package debian-11-arm ${{ github.event.inputs.build_type }}
- name: "Upload package"
uses: actions/upload-artifact@v4
with:
name: debian-11-aarch64
path: build/output/debian-11-arm/memgraph*.deb
debian-11-platform:
if: ${{ github.event.inputs.target_os == 'debian-11-platform' || github.event.inputs.target_os == 'all' }}
runs-on: [self-hosted, DockerMgBuild, X64]
timeout-minutes: 60
steps:
- name: "Set up repository"
uses: actions/checkout@v4
with:
fetch-depth: 0 # Required because of release/get_version.py
- name: "Build package"
run: |
./release/package/run.sh package debian-11 ${{ github.event.inputs.build_type }} --for-platform
- name: "Upload package"
uses: actions/upload-artifact@v4
with:
name: debian-11-platform
path: build/output/debian-11/memgraph*.deb
docker:
if: ${{ github.event.inputs.target_os == 'docker' || github.event.inputs.target_os == 'all' }}
runs-on: [self-hosted, DockerMgBuild, X64]
timeout-minutes: 60
steps:
- name: "Set up repository"
uses: actions/checkout@v4
with:
fetch-depth: 0 # Required because of release/get_version.py
- name: "Build package"
run: |
cd release/package
./run.sh package debian-11 ${{ github.event.inputs.build_type }} --for-docker
./run.sh docker
- name: "Upload package"
uses: actions/upload-artifact@v4
with:
name: docker
path: build/output/docker/memgraph*.tar.gz
fedora-36:
if: ${{ github.event.inputs.target_os == 'fedora-36' || github.event.inputs.target_os == 'all' }}
runs-on: [self-hosted, DockerMgBuild, X64]
timeout-minutes: 60
steps:
- name: "Set up repository"
uses: actions/checkout@v4
with:
fetch-depth: 0 # Required because of release/get_version.py
- name: "Build package"
run: |
./release/package/run.sh package fedora-36 ${{ github.event.inputs.build_type }}
- name: "Upload package"
uses: actions/upload-artifact@v4
with:
name: fedora-36
path: build/output/fedora-36/memgraph*.rpm
ubuntu-18_04:
if: ${{ github.event.inputs.target_os == 'ubuntu-18_04' || github.event.inputs.target_os == 'all' }}
runs-on: [self-hosted, DockerMgBuild, X64]
timeout-minutes: 60
steps:
- name: "Set up repository"
uses: actions/checkout@v4
with:
fetch-depth: 0 # Required because of release/get_version.py
- name: "Build package"
run: |
./release/package/run.sh package ubuntu-18.04 ${{ github.event.inputs.build_type }}
- name: "Upload package"
uses: actions/upload-artifact@v4
with:
name: ubuntu-18.04
path: build/output/ubuntu-18.04/memgraph*.deb
ubuntu-20_04:
if: ${{ github.event.inputs.target_os == 'ubuntu-20_04' || github.event.inputs.target_os == 'all' }}
runs-on: [self-hosted, DockerMgBuild, X64]
timeout-minutes: 60
steps:
- name: "Set up repository"
uses: actions/checkout@v4
with:
fetch-depth: 0 # Required because of release/get_version.py
- name: "Build package"
run: |
./release/package/run.sh package ubuntu-20.04 ${{ github.event.inputs.build_type }}
- name: "Upload package"
uses: actions/upload-artifact@v4
with:
name: ubuntu-20.04
path: build/output/ubuntu-20.04/memgraph*.deb
ubuntu-22_04:
if: ${{ github.event.inputs.target_os == 'ubuntu-22_04' || github.event.inputs.target_os == 'all' }}
runs-on: [self-hosted, DockerMgBuild, X64]
timeout-minutes: 60
steps:
- name: "Set up repository"
uses: actions/checkout@v4
with:
fetch-depth: 0 # Required because of release/get_version.py
- name: "Build package"
run: |
./release/package/run.sh package ubuntu-22.04 ${{ github.event.inputs.build_type }}
- name: "Upload package"
uses: actions/upload-artifact@v4
with:
name: ubuntu-22.04
path: build/output/ubuntu-22.04/memgraph*.deb
ubuntu-22_04-arm:
if: ${{ github.event.inputs.target_os == 'ubuntu-22_04-arm' || github.event.inputs.target_os == 'all' }}
runs-on: [self-hosted, DockerMgBuild, ARM64, strange]
timeout-minutes: 120
steps:
- name: "Set up repository"
uses: actions/checkout@v4
with:
fetch-depth: 0 # Required because of release/get_version.py
- name: "Build package"
run: |
./release/package/run.sh package ubuntu-22.04-arm ${{ github.event.inputs.build_type }}
- name: "Upload package"
uses: actions/upload-artifact@v4
with:
name: ubuntu-22.04-aarch64
path: build/output/ubuntu-22.04-arm/memgraph*.deb
upload-to-s3:
# only run upload if we specified version. Allows for runs without upload
if: "${{ github.event.inputs.memgraph_version != '' }}"
needs: [amzn-2, centos-7, centos-9, debian-10, debian-11, debian-11-arm, debian-11-platform, docker, fedora-36, ubuntu-18_04, ubuntu-20_04, ubuntu-22_04, ubuntu-22_04-arm]
runs-on: ubuntu-latest
steps:
- name: Download artifacts
uses: actions/download-artifact@v4
with:
# name: # if name input parameter is not provided, all artifacts are downloaded
# and put in directories named after each one.
path: build/output/release
- name: Upload to S3
uses: jakejarvis/s3-sync-action@v0.5.1
env:
AWS_S3_BUCKET: "download.memgraph.com"
AWS_ACCESS_KEY_ID: ${{ secrets.S3_AWS_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.S3_AWS_SECRET_ACCESS_KEY }}
AWS_REGION: "eu-west-1"
SOURCE_DIR: "build/output/release"
DEST_DIR: "memgraph/v${{ github.event.inputs.memgraph_version }}/"

View File

@ -1,85 +0,0 @@
name: Run performance benchmarks manually
on:
workflow_dispatch:
jobs:
performance_benchmarks:
name: "Performance benchmarks"
runs-on: [self-hosted, Linux, X64, Diff, Gen7]
env:
THREADS: 24
MEMGRAPH_ENTERPRISE_LICENSE: ${{ secrets.MEMGRAPH_ENTERPRISE_LICENSE }}
MEMGRAPH_ORGANIZATION_NAME: ${{ secrets.MEMGRAPH_ORGANIZATION_NAME }}
steps:
- name: Set up repository
uses: actions/checkout@v4
with:
# Number of commits to fetch. `0` indicates all history for all
# branches and tags. (default: 1)
fetch-depth: 0
- name: Build release binaries
run: |
# Activate toolchain.
source /opt/toolchain-v4/activate
# Initialize dependencies.
./init
# Build only memgraph release binaries.
cd build
cmake -DCMAKE_BUILD_TYPE=release ..
make -j$THREADS
- name: Get branch name (merge)
if: github.event_name != 'pull_request'
shell: bash
run: echo "BRANCH_NAME=$(echo ${GITHUB_REF#refs/heads/} | tr / -)" >> $GITHUB_ENV
- name: Get branch name (pull request)
if: github.event_name == 'pull_request'
shell: bash
run: echo "BRANCH_NAME=$(echo ${GITHUB_HEAD_REF} | tr / -)" >> $GITHUB_ENV
- name: Run benchmarks
run: |
cd tests/mgbench
./benchmark.py vendor-native --num-workers-for-benchmark 12 --export-results benchmark_result.json pokec/medium/*/*
./benchmark.py vendor-native --num-workers-for-benchmark 1 --export-results benchmark_supernode.json supernode
./benchmark.py vendor-native --num-workers-for-benchmark 1 --export-results benchmark_high_write_set_property.json high_write_set_property
./benchmark.py vendor-native --num-workers-for-benchmark 12 --export-results benchmark_cartesian.json cartesian
- name: Upload benchmark results
run: |
cd tools/bench-graph-client
virtualenv -p python3 ve3
source ve3/bin/activate
pip install -r requirements.txt
./main.py --benchmark-name "mgbench" \
--benchmark-results "../../tests/mgbench/benchmark_result.json" \
--github-run-id "${{ github.run_id }}" \
--github-run-number "${{ github.run_number }}" \
--head-branch-name "${{ env.BRANCH_NAME }}"
./main.py --benchmark-name "supernode" \
--benchmark-results "../../tests/mgbench/benchmark_supernode.json" \
--github-run-id "${{ github.run_id }}" \
--github-run-number "${{ github.run_number }}" \
--head-branch-name "${{ env.BRANCH_NAME }}"
./main.py --benchmark-name "high_write_set_property" \
--benchmark-results "../../tests/mgbench/benchmark_high_write_set_property.json" \
--github-run-id "${{ github.run_id }}" \
--github-run-number "${{ github.run_number }}" \
--head-branch-name "${{ env.BRANCH_NAME }}"
./main.py --benchmark-name "cartesian" \
--benchmark-results "../../tests/mgbench/cartesian.json" \
--github-run-id "${{ github.run_id }}" \
--github-run-number "${{ github.run_number }}" \
--head-branch-name "${{ env.BRANCH_NAME }}"

View File

@ -1,208 +0,0 @@
name: Release build test
concurrency:
group: ${{ github.workflow }}-${{ github.ref_name }}
cancel-in-progress: true
on:
workflow_dispatch:
inputs:
build_type:
type: choice
description: "Memgraph Build type. Default value is Release."
default: 'Release'
options:
- Release
- RelWithDebInfo
push:
branches:
- "release/**"
tags:
- "v*.*.*-rc*"
- "v*.*-rc*"
schedule:
# UTC
- cron: "0 22 * * *"
env:
THREADS: 24
MEMGRAPH_ENTERPRISE_LICENSE: ${{ secrets.MEMGRAPH_ENTERPRISE_LICENSE }}
MEMGRAPH_ORGANIZATION_NAME: ${{ secrets.MEMGRAPH_ORGANIZATION_NAME }}
BUILD_TYPE: ${{ github.event.inputs.build_type || 'Release' }}
jobs:
Debian10:
uses: ./.github/workflows/release_debian10.yaml
with:
build_type: ${{ github.event.inputs.build_type || 'Release' }}
secrets: inherit
Ubuntu20_04:
uses: ./.github/workflows/release_ubuntu2004.yaml
with:
build_type: ${{ github.event.inputs.build_type || 'Release' }}
secrets: inherit
PackageDebian10:
if: github.ref_type == 'tag'
needs: [Debian10]
runs-on: [self-hosted, DockerMgBuild, X64]
timeout-minutes: 60
steps:
- name: "Set up repository"
uses: actions/checkout@v4
with:
fetch-depth: 0 # Required because of release/get_version.py
- name: "Build package"
run: |
./release/package/run.sh package debian-10 $BUILD_TYPE
- name: Upload to S3
uses: jakejarvis/s3-sync-action@v0.5.1
env:
AWS_S3_BUCKET: "deps.memgraph.io"
AWS_ACCESS_KEY_ID: ${{ secrets.S3_AWS_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.S3_AWS_SECRET_ACCESS_KEY }}
AWS_REGION: "eu-west-1"
SOURCE_DIR: "build/output"
DEST_DIR: "memgraph-unofficial/${{ github.ref_name }}/"
- name: "Upload package"
uses: actions/upload-artifact@v4
with:
name: debian-10
path: build/output/debian-10/memgraph*.deb
PackageUbuntu20_04:
if: github.ref_type == 'tag'
needs: [Ubuntu20_04]
runs-on: [self-hosted, DockerMgBuild, X64]
timeout-minutes: 60
steps:
- name: "Set up repository"
uses: actions/checkout@v4
with:
fetch-depth: 0 # Required because of release/get_version.py
- name: "Build package"
run: |
./release/package/run.sh package ubuntu-22.04 $BUILD_TYPE
- name: Upload to S3
uses: jakejarvis/s3-sync-action@v0.5.1
env:
AWS_S3_BUCKET: "deps.memgraph.io"
AWS_ACCESS_KEY_ID: ${{ secrets.S3_AWS_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.S3_AWS_SECRET_ACCESS_KEY }}
AWS_REGION: "eu-west-1"
SOURCE_DIR: "build/output"
DEST_DIR: "memgraph-unofficial/${{ github.ref_name }}/"
- name: "Upload package"
uses: actions/upload-artifact@v4
with:
name: ubuntu-22.04
path: build/output/ubuntu-22.04/memgraph*.deb
PackageUbuntu20_04_ARM:
if: github.ref_type == 'tag'
needs: [Ubuntu20_04]
runs-on: [self-hosted, DockerMgBuild, ARM64]
# M1 Mac mini is sometimes slower
timeout-minutes: 150
steps:
- name: "Set up repository"
uses: actions/checkout@v4
with:
fetch-depth: 0 # Required because of release/get_version.py
- name: "Build package"
run: |
./release/package/run.sh package ubuntu-22.04-arm $BUILD_TYPE
- name: "Upload package"
uses: actions/upload-artifact@v4
with:
name: ubuntu-22.04-aarch64
path: build/output/ubuntu-22.04-arm/memgraph*.deb
PushToS3Ubuntu20_04_ARM:
if: github.ref_type == 'tag'
needs: [PackageUbuntu20_04_ARM]
runs-on: ubuntu-latest
steps:
- name: Download package
uses: actions/download-artifact@v4
with:
name: ubuntu-22.04-aarch64
path: build/output/release
- name: Upload to S3
uses: jakejarvis/s3-sync-action@v0.5.1
env:
AWS_S3_BUCKET: "deps.memgraph.io"
AWS_ACCESS_KEY_ID: ${{ secrets.S3_AWS_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.S3_AWS_SECRET_ACCESS_KEY }}
AWS_REGION: "eu-west-1"
SOURCE_DIR: "build/output/release"
DEST_DIR: "memgraph-unofficial/${{ github.ref_name }}/"
PackageDebian11:
if: github.ref_type == 'tag'
needs: [Debian10, Ubuntu20_04]
runs-on: [self-hosted, DockerMgBuild, X64]
timeout-minutes: 60
steps:
- name: "Set up repository"
uses: actions/checkout@v4
with:
fetch-depth: 0 # Required because of release/get_version.py
- name: "Build package"
run: |
./release/package/run.sh package debian-11 $BUILD_TYPE
- name: Upload to S3
uses: jakejarvis/s3-sync-action@v0.5.1
env:
AWS_S3_BUCKET: "deps.memgraph.io"
AWS_ACCESS_KEY_ID: ${{ secrets.S3_AWS_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.S3_AWS_SECRET_ACCESS_KEY }}
AWS_REGION: "eu-west-1"
SOURCE_DIR: "build/output"
DEST_DIR: "memgraph-unofficial/${{ github.ref_name }}/"
- name: "Upload package"
uses: actions/upload-artifact@v4
with:
name: debian-11
path: build/output/debian-11/memgraph*.deb
PackageDebian11_ARM:
if: github.ref_type == 'tag'
needs: [Debian10, Ubuntu20_04]
runs-on: [self-hosted, DockerMgBuild, ARM64]
# M1 Mac mini is sometimes slower
timeout-minutes: 150
steps:
- name: "Set up repository"
uses: actions/checkout@v4
with:
fetch-depth: 0 # Required because of release/get_version.py
- name: "Build package"
run: |
./release/package/run.sh package debian-11-arm $BUILD_TYPE
- name: "Upload package"
uses: actions/upload-artifact@v4
with:
name: debian-11-aarch64
path: build/output/debian-11-arm/memgraph*.deb
PushToS3Debian11_ARM:
if: github.ref_type == 'tag'
needs: [PackageDebian11_ARM]
runs-on: ubuntu-latest
steps:
- name: Download package
uses: actions/download-artifact@v4
with:
name: debian-11-aarch64
path: build/output/release
- name: Upload to S3
uses: jakejarvis/s3-sync-action@v0.5.1
env:
AWS_S3_BUCKET: "deps.memgraph.io"
AWS_ACCESS_KEY_ID: ${{ secrets.S3_AWS_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.S3_AWS_SECRET_ACCESS_KEY }}
AWS_REGION: "eu-west-1"
SOURCE_DIR: "build/output/release"
DEST_DIR: "memgraph-unofficial/${{ github.ref_name }}/"

315
.github/workflows/release_centos8.yaml vendored Normal file
View File

@ -0,0 +1,315 @@
name: Release CentOS 8
on:
workflow_dispatch:
schedule:
- cron: "0 1 * * *"
jobs:
community_build:
name: "Community build"
runs-on: [self-hosted, Linux, X64, CentOS8]
env:
THREADS: 24
MEMGRAPH_ENTERPRISE_LICENSE: ${{ secrets.MEMGRAPH_ENTERPRISE_LICENSE }}
MEMGRAPH_ORGANIZATION_NAME: ${{ secrets.MEMGRAPH_ORGANIZATION_NAME }}
timeout-minutes: 960
steps:
- name: Set up repository
uses: actions/checkout@v2
with:
# Number of commits to fetch. `0` indicates all history for all
# branches and tags. (default: 1)
fetch-depth: 0
- name: Build community binaries
run: |
# Activate toolchain.
source /opt/toolchain-v4/activate
# Initialize dependencies.
./init
# Build community binaries.
cd build
cmake -DCMAKE_BUILD_TYPE=release -DMG_ENTERPRISE=OFF ..
make -j$THREADS
- name: Run unit tests
run: |
# Activate toolchain.
source /opt/toolchain-v4/activate
# Run unit tests.
cd build
ctest -R memgraph__unit --output-on-failure
coverage_build:
name: "Coverage build"
runs-on: [self-hosted, Linux, X64, CentOS8]
env:
THREADS: 24
MEMGRAPH_ENTERPRISE_LICENSE: ${{ secrets.MEMGRAPH_ENTERPRISE_LICENSE }}
MEMGRAPH_ORGANIZATION_NAME: ${{ secrets.MEMGRAPH_ORGANIZATION_NAME }}
steps:
- name: Set up repository
uses: actions/checkout@v2
with:
# Number of commits to fetch. `0` indicates all history for all
# branches and tags. (default: 1)
fetch-depth: 0
- name: Build coverage binaries
run: |
# Activate toolchain.
source /opt/toolchain-v4/activate
# Initialize dependencies.
./init
# Build coverage binaries.
cd build
cmake -DTEST_COVERAGE=ON ..
make -j$THREADS memgraph__unit
- name: Run unit tests
run: |
# Activate toolchain.
source /opt/toolchain-v4/activate
# Run unit tests.
cd build
ctest -R memgraph__unit --output-on-failure
- name: Compute code coverage
run: |
# Activate toolchain.
source /opt/toolchain-v4/activate
# Compute code coverage.
cd tools/github
./coverage_convert
# Package code coverage.
cd generated
tar -czf code_coverage.tar.gz coverage.json html report.json summary.rmu
- name: Save code coverage
uses: actions/upload-artifact@v2
with:
name: "Code coverage"
path: tools/github/generated/code_coverage.tar.gz
debug_build:
name: "Debug build"
runs-on: [self-hosted, Linux, X64, CentOS8]
env:
THREADS: 24
MEMGRAPH_ENTERPRISE_LICENSE: ${{ secrets.MEMGRAPH_ENTERPRISE_LICENSE }}
MEMGRAPH_ORGANIZATION_NAME: ${{ secrets.MEMGRAPH_ORGANIZATION_NAME }}
steps:
- name: Set up repository
uses: actions/checkout@v2
with:
# Number of commits to fetch. `0` indicates all history for all
# branches and tags. (default: 1)
fetch-depth: 0
- name: Build debug binaries
run: |
# Activate toolchain.
source /opt/toolchain-v4/activate
# Initialize dependencies.
./init
# Build debug binaries.
cd build
cmake ..
make -j$THREADS
- name: Run leftover CTest tests
run: |
# Activate toolchain.
source /opt/toolchain-v4/activate
# Run leftover CTest tests (all except unit and benchmark tests).
cd build
ctest -E "(memgraph__unit|memgraph__benchmark)" --output-on-failure
- name: Run drivers tests
run: |
./tests/drivers/run.sh
- name: Run integration tests
run: |
cd tests/integration
for name in *; do
if [ ! -d $name ]; then continue; fi
pushd $name >/dev/null
echo "Running: $name"
if [ -x prepare.sh ]; then
./prepare.sh
fi
if [ -x runner.py ]; then
./runner.py
elif [ -x runner.sh ]; then
./runner.sh
fi
echo
popd >/dev/null
done
- name: Run cppcheck and clang-format
run: |
# Activate toolchain.
source /opt/toolchain-v4/activate
# Run cppcheck and clang-format.
cd tools/github
./cppcheck_and_clang_format diff
- name: Save cppcheck and clang-format errors
uses: actions/upload-artifact@v2
with:
name: "Code coverage"
path: tools/github/cppcheck_and_clang_format.txt
release_build:
name: "Release build"
runs-on: [self-hosted, Linux, X64, CentOS8]
env:
THREADS: 24
MEMGRAPH_ENTERPRISE_LICENSE: ${{ secrets.MEMGRAPH_ENTERPRISE_LICENSE }}
MEMGRAPH_ORGANIZATION_NAME: ${{ secrets.MEMGRAPH_ORGANIZATION_NAME }}
timeout-minutes: 960
steps:
- name: Set up repository
uses: actions/checkout@v2
with:
# Number of commits to fetch. `0` indicates all history for all
# branches and tags. (default: 1)
fetch-depth: 0
- name: Build release binaries
run: |
# Activate toolchain.
source /opt/toolchain-v4/activate
# Initialize dependencies.
./init
# Build release binaries.
cd build
cmake -DCMAKE_BUILD_TYPE=release ..
make -j$THREADS
- name: Create enterprise RPM package
run: |
# Activate toolchain.
source /opt/toolchain-v4/activate
cd build
# create mgconsole
# we use the -B to force the build
make -j$THREADS -B mgconsole
# Create enterprise RPM package.
mkdir output && cd output
cpack -G RPM --config ../CPackConfig.cmake
rpmlint memgraph*.rpm
- name: Save enterprise RPM package
uses: actions/upload-artifact@v2
with:
name: "Enterprise RPM package"
path: build/output/memgraph*.rpm
- name: Run micro benchmark tests
run: |
# Activate toolchain.
source /opt/toolchain-v4/activate
# Run micro benchmark tests.
cd build
# The `eval` benchmark needs a large stack limit.
ulimit -s 262144
ctest -R memgraph__benchmark -V
- name: Run macro benchmark tests
run: |
cd tests/macro_benchmark
./harness QuerySuite MemgraphRunner \
--groups aggregation 1000_create unwind_create dense_expand match \
--no-strict
- name: Run parallel macro benchmark tests
run: |
cd tests/macro_benchmark
./harness QueryParallelSuite MemgraphRunner \
--groups aggregation_parallel create_parallel bfs_parallel \
--num-database-workers 9 --num-clients-workers 30 \
--no-strict
- name: Run GQL Behave tests
run: |
cd tests/gql_behave
./continuous_integration
- name: Save quality assurance status
uses: actions/upload-artifact@v2
with:
name: "GQL Behave Status"
path: |
tests/gql_behave/gql_behave_status.csv
tests/gql_behave/gql_behave_status.html
- name: Run unit tests
run: |
# Activate toolchain.
source /opt/toolchain-v4/activate
# Run unit tests.
cd build
ctest -R memgraph__unit --output-on-failure
- name: Run e2e tests
run: |
# TODO(gitbuda): Setup mgclient and pymgclient properly.
cd tests
./setup.sh
source ve3/bin/activate
cd e2e
LD_LIBRARY_PATH=$LD_LIBRARY_PATH:../../libs/mgclient/lib python runner.py --workloads-root-directory .
- name: Run stress test (plain)
run: |
cd tests/stress
./continuous_integration
- name: Run stress test (SSL)
run: |
cd tests/stress
./continuous_integration --use-ssl
- name: Run stress test (large)
run: |
cd tests/stress
./continuous_integration --large-dataset
- name: Run durability test (plain)
run: |
cd tests/stress
source ve3/bin/activate
python3 durability --num-steps 5
- name: Run durability test (large)
run: |
cd tests/stress
source ve3/bin/activate
python3 durability --num-steps 20

View File

@ -1,38 +1,23 @@
name: Release Debian 10 name: Release Debian 10
on: on:
workflow_call:
inputs:
build_type:
type: string
description: "Memgraph Build type. Default value is Release."
default: 'Release'
workflow_dispatch: workflow_dispatch:
inputs: schedule:
build_type: - cron: "0 1 * * *"
type: choice
description: "Memgraph Build type. Default value is Release."
default: 'Release'
options:
- Release
- RelWithDebInfo
env:
OS: "Debian10"
THREADS: 24
MEMGRAPH_ENTERPRISE_LICENSE: ${{ secrets.MEMGRAPH_ENTERPRISE_LICENSE }}
MEMGRAPH_ORGANIZATION_NAME: ${{ secrets.MEMGRAPH_ORGANIZATION_NAME }}
BUILD_TYPE: ${{ github.event.inputs.build_type || 'Release' }}
jobs: jobs:
community_build: community_build:
name: "Community build" name: "Community build"
runs-on: [self-hosted, Linux, X64, Debian10] runs-on: [self-hosted, Linux, X64, Debian10]
timeout-minutes: 60 env:
THREADS: 24
MEMGRAPH_ENTERPRISE_LICENSE: ${{ secrets.MEMGRAPH_ENTERPRISE_LICENSE }}
MEMGRAPH_ORGANIZATION_NAME: ${{ secrets.MEMGRAPH_ORGANIZATION_NAME }}
timeout-minutes: 960
steps: steps:
- name: Set up repository - name: Set up repository
uses: actions/checkout@v4 uses: actions/checkout@v2
with: with:
# Number of commits to fetch. `0` indicates all history for all # Number of commits to fetch. `0` indicates all history for all
# branches and tags. (default: 1) # branches and tags. (default: 1)
@ -48,7 +33,7 @@ jobs:
# Build community binaries. # Build community binaries.
cd build cd build
cmake -DCMAKE_BUILD_TYPE=$BUILD_TYPE -DMG_ENTERPRISE=OFF .. cmake -DCMAKE_BUILD_TYPE=release -DMG_ENTERPRISE=OFF ..
make -j$THREADS make -j$THREADS
- name: Run unit tests - name: Run unit tests
@ -67,11 +52,10 @@ jobs:
THREADS: 24 THREADS: 24
MEMGRAPH_ENTERPRISE_LICENSE: ${{ secrets.MEMGRAPH_ENTERPRISE_LICENSE }} MEMGRAPH_ENTERPRISE_LICENSE: ${{ secrets.MEMGRAPH_ENTERPRISE_LICENSE }}
MEMGRAPH_ORGANIZATION_NAME: ${{ secrets.MEMGRAPH_ORGANIZATION_NAME }} MEMGRAPH_ORGANIZATION_NAME: ${{ secrets.MEMGRAPH_ORGANIZATION_NAME }}
timeout-minutes: 60
steps: steps:
- name: Set up repository - name: Set up repository
uses: actions/checkout@v4 uses: actions/checkout@v2
with: with:
# Number of commits to fetch. `0` indicates all history for all # Number of commits to fetch. `0` indicates all history for all
# branches and tags. (default: 1) # branches and tags. (default: 1)
@ -113,19 +97,22 @@ jobs:
tar -czf code_coverage.tar.gz coverage.json html report.json summary.rmu tar -czf code_coverage.tar.gz coverage.json html report.json summary.rmu
- name: Save code coverage - name: Save code coverage
uses: actions/upload-artifact@v4 uses: actions/upload-artifact@v2
with: with:
name: "Code coverage(Coverage build)-${{ env.OS }}" name: "Code coverage"
path: tools/github/generated/code_coverage.tar.gz path: tools/github/generated/code_coverage.tar.gz
debug_build: debug_build:
name: "Debug build" name: "Debug build"
runs-on: [self-hosted, Linux, X64, Debian10] runs-on: [self-hosted, Linux, X64, Debian10]
timeout-minutes: 60 env:
THREADS: 24
MEMGRAPH_ENTERPRISE_LICENSE: ${{ secrets.MEMGRAPH_ENTERPRISE_LICENSE }}
MEMGRAPH_ORGANIZATION_NAME: ${{ secrets.MEMGRAPH_ORGANIZATION_NAME }}
steps: steps:
- name: Set up repository - name: Set up repository
uses: actions/checkout@v4 uses: actions/checkout@v2
with: with:
# Number of commits to fetch. `0` indicates all history for all # Number of commits to fetch. `0` indicates all history for all
# branches and tags. (default: 1) # branches and tags. (default: 1)
@ -157,6 +144,25 @@ jobs:
run: | run: |
./tests/drivers/run.sh ./tests/drivers/run.sh
- name: Run integration tests
run: |
cd tests/integration
for name in *; do
if [ ! -d $name ]; then continue; fi
pushd $name >/dev/null
echo "Running: $name"
if [ -x prepare.sh ]; then
./prepare.sh
fi
if [ -x runner.py ]; then
./runner.py
elif [ -x runner.sh ]; then
./runner.sh
fi
echo
popd >/dev/null
done
- name: Run cppcheck and clang-format - name: Run cppcheck and clang-format
run: | run: |
# Activate toolchain. # Activate toolchain.
@ -167,49 +173,23 @@ jobs:
./cppcheck_and_clang_format diff ./cppcheck_and_clang_format diff
- name: Save cppcheck and clang-format errors - name: Save cppcheck and clang-format errors
uses: actions/upload-artifact@v4 uses: actions/upload-artifact@v2
with: with:
name: "Code coverage(Debug build)-${{ env.OS }}" name: "Code coverage"
path: tools/github/cppcheck_and_clang_format.txt path: tools/github/cppcheck_and_clang_format.txt
debug_integration_test:
name: "Debug integration tests"
runs-on: [self-hosted, Linux, X64, Debian10]
timeout-minutes: 60
steps:
- name: Set up repository
uses: actions/checkout@v4
with:
# Number of commits to fetch. `0` indicates all history for all
# branches and tags. (default: 1)
fetch-depth: 0
- name: Build debug binaries
run: |
# Activate toolchain.
source /opt/toolchain-v4/activate
# Initialize dependencies.
./init
# Build debug binaries.
cd build
cmake ..
make -j$THREADS
- name: Run integration tests
run: |
tests/integration/run.sh
release_build: release_build:
name: "Release build" name: "Release build"
runs-on: [self-hosted, Linux, X64, Debian10] runs-on: [self-hosted, Linux, X64, Debian10]
timeout-minutes: 60 env:
THREADS: 24
MEMGRAPH_ENTERPRISE_LICENSE: ${{ secrets.MEMGRAPH_ENTERPRISE_LICENSE }}
MEMGRAPH_ORGANIZATION_NAME: ${{ secrets.MEMGRAPH_ORGANIZATION_NAME }}
timeout-minutes: 960
steps: steps:
- name: Set up repository - name: Set up repository
uses: actions/checkout@v4 uses: actions/checkout@v2
with: with:
# Number of commits to fetch. `0` indicates all history for all # Number of commits to fetch. `0` indicates all history for all
# branches and tags. (default: 1) # branches and tags. (default: 1)
@ -225,7 +205,7 @@ jobs:
# Build release binaries. # Build release binaries.
cd build cd build
cmake -DCMAKE_BUILD_TYPE=$BUILD_TYPE .. cmake -DCMAKE_BUILD_TYPE=release ..
make -j$THREADS make -j$THREADS
- name: Create enterprise DEB package - name: Create enterprise DEB package
@ -244,60 +224,11 @@ jobs:
cpack -G DEB --config ../CPackConfig.cmake cpack -G DEB --config ../CPackConfig.cmake
- name: Save enterprise DEB package - name: Save enterprise DEB package
uses: actions/upload-artifact@v4 uses: actions/upload-artifact@v2
with: with:
name: "Enterprise DEB package-${{ env.OS}}" name: "Enterprise DEB package"
path: build/output/memgraph*.deb path: build/output/memgraph*.deb
- name: Run GQL Behave tests
run: |
cd tests
./setup.sh /opt/toolchain-v4/activate
cd gql_behave
./continuous_integration
- name: Save quality assurance status
uses: actions/upload-artifact@v4
with:
name: "GQL Behave Status-${{ env.OS }}"
path: |
tests/gql_behave/gql_behave_status.csv
tests/gql_behave/gql_behave_status.html
- name: Run unit tests
run: |
# Activate toolchain.
source /opt/toolchain-v4/activate
# Run unit tests.
cd build
ctest -R memgraph__unit --output-on-failure
release_benchmark_tests:
name: "Release Benchmark Tests"
runs-on: [self-hosted, Linux, X64, Debian10]
timeout-minutes: 60
steps:
- name: Set up repository
uses: actions/checkout@v4
with:
# Number of commits to fetch. `0` indicates all history for all
# branches and tags. (default: 1)
fetch-depth: 0
- name: Build release binaries
run: |
# Activate toolchain.
source /opt/toolchain-v4/activate
# Initialize dependencies.
./init
# Build release binaries
cd build
cmake -DCMAKE_BUILD_TYPE=$BUILD_TYPE ..
make -j$THREADS
- name: Run micro benchmark tests - name: Run micro benchmark tests
run: | run: |
# Activate toolchain. # Activate toolchain.
@ -324,79 +255,36 @@ jobs:
--num-database-workers 9 --num-clients-workers 30 \ --num-database-workers 9 --num-clients-workers 30 \
--no-strict --no-strict
release_e2e_test: - name: Run GQL Behave tests
name: "Release End-to-end Test" run: |
runs-on: [self-hosted, Linux, X64, Debian10] cd tests/gql_behave
timeout-minutes: 60 ./continuous_integration
steps: - name: Save quality assurance status
- name: Set up repository uses: actions/upload-artifact@v2
uses: actions/checkout@v4
with: with:
# Number of commits to fetch. `0` indicates all history for all name: "GQL Behave Status"
# branches and tags. (default: 1) path: |
fetch-depth: 0 tests/gql_behave/gql_behave_status.csv
tests/gql_behave/gql_behave_status.html
- name: Build release binaries - name: Run unit tests
run: | run: |
# Activate toolchain. # Activate toolchain.
source /opt/toolchain-v4/activate source /opt/toolchain-v4/activate
# Initialize dependencies.
./init
# Build release binaries # Run unit tests.
cd build cd build
cmake -DCMAKE_BUILD_TYPE=$BUILD_TYPE .. ctest -R memgraph__unit --output-on-failure
make -j$THREADS
- name: Ensure Kafka and Pulsar are up
run: |
cd tests/e2e/streams/kafka
docker-compose up -d
cd ../pulsar
docker-compose up -d
- name: Run e2e tests - name: Run e2e tests
run: | run: |
# TODO(gitbuda): Setup mgclient and pymgclient properly.
cd tests cd tests
./setup.sh /opt/toolchain-v4/activate ./setup.sh
source ve3/bin/activate_e2e source ve3/bin/activate
cd e2e cd e2e
./run.sh LD_LIBRARY_PATH=$LD_LIBRARY_PATH:../../libs/mgclient/lib python runner.py --workloads-root-directory .
- name: Ensure Kafka and Pulsar are down
if: always()
run: |
cd tests/e2e/streams/kafka
docker-compose down
cd ../pulsar
docker-compose down
release_durability_stress_tests:
name: "Release durability and stress tests"
runs-on: [self-hosted, Linux, X64, Debian10]
timeout-minutes: 60
steps:
- name: Set up repository
uses: actions/checkout@v4
with:
# Number of commits to fetch. `0` indicates all history for all
# branches and tags. (default: 1)
fetch-depth: 0
- name: Build release binaries
run: |
# Activate toolchain.
source /opt/toolchain-v4/activate
# Initialize dependencies.
./init
# Build release binaries.
cd build
cmake -DCMAKE_BUILD_TYPE=$BUILD_TYPE ..
make -j$THREADS
- name: Run stress test (plain) - name: Run stress test (plain)
run: | run: |
@ -408,6 +296,11 @@ jobs:
cd tests/stress cd tests/stress
./continuous_integration --use-ssl ./continuous_integration --use-ssl
- name: Run stress test (large)
run: |
cd tests/stress
./continuous_integration --large-dataset
- name: Run durability test (plain) - name: Run durability test (plain)
run: | run: |
cd tests/stress cd tests/stress
@ -423,11 +316,15 @@ jobs:
release_jepsen_test: release_jepsen_test:
name: "Release Jepsen Test" name: "Release Jepsen Test"
runs-on: [self-hosted, Linux, X64, Debian10, JepsenControl] runs-on: [self-hosted, Linux, X64, Debian10, JepsenControl]
env:
THREADS: 24
MEMGRAPH_ENTERPRISE_LICENSE: ${{ secrets.MEMGRAPH_ENTERPRISE_LICENSE }}
MEMGRAPH_ORGANIZATION_NAME: ${{ secrets.MEMGRAPH_ORGANIZATION_NAME }}
timeout-minutes: 60 timeout-minutes: 60
steps: steps:
- name: Set up repository - name: Set up repository
uses: actions/checkout@v4 uses: actions/checkout@v2
with: with:
# Number of commits to fetch. `0` indicates all history for all # Number of commits to fetch. `0` indicates all history for all
# branches and tags. (default: 1) # branches and tags. (default: 1)
@ -437,27 +334,23 @@ jobs:
run: | run: |
# Activate toolchain. # Activate toolchain.
source /opt/toolchain-v4/activate source /opt/toolchain-v4/activate
# Initialize dependencies. # Initialize dependencies.
./init ./init
# Build only memgraph release binary. # Build only memgraph release binary.
cd build cd build
cmake -DCMAKE_BUILD_TYPE=$BUILD_TYPE .. cmake -DCMAKE_BUILD_TYPE=release ..
make -j$THREADS memgraph make -j$THREADS memgraph
- name: Refresh Jepsen Cluster
run: |
cd tests/jepsen
./run.sh cluster-refresh
- name: Run Jepsen tests - name: Run Jepsen tests
run: | run: |
cd tests/jepsen cd tests/jepsen
./run.sh test-all-individually --binary ../../build/memgraph --ignore-run-stdout-logs --ignore-run-stderr-logs ./run.sh test --binary ../../build/memgraph --run-args "test-all --node-configs resources/node-config.edn" --ignore-run-stdout-logs --ignore-run-stderr-logs
- name: Save Jepsen report - name: Save Jepsen report
uses: actions/upload-artifact@v4 uses: actions/upload-artifact@v2
if: ${{ always() }} if: ${{ always() }}
with: with:
name: "Jepsen Report-${{ env.OS }}" name: "Jepsen Report"
path: tests/jepsen/Jepsen.tar.gz path: tests/jepsen/Jepsen.tar.gz

View File

@ -19,20 +19,20 @@ jobs:
DOCKER_REPOSITORY_NAME: memgraph DOCKER_REPOSITORY_NAME: memgraph
steps: steps:
- name: Checkout - name: Checkout
uses: actions/checkout@v4 uses: actions/checkout@v2
- name: Set up QEMU - name: Set up QEMU
uses: docker/setup-qemu-action@v2 uses: docker/setup-qemu-action@v1
- name: Set up Docker Buildx - name: Set up Docker Buildx
id: buildx id: buildx
uses: docker/setup-buildx-action@v2 uses: docker/setup-buildx-action@v1
- name: Log in to Docker Hub - name: Log in to Docker Hub
uses: docker/login-action@v2 uses: docker/login-action@v1
with: with:
username: ${{ secrets.DOCKERHUB_USERNAME }} username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }} password: ${{ secrets.DOCKER_PASSWORD }}
- name: Download memgraph binary - name: Download memgraph binary
run: | run: |

View File

@ -1,63 +0,0 @@
name: "Mgbench Bolt Client Publish Docker Image"
on:
workflow_dispatch:
inputs:
version:
description: "Mgbench bolt client version to publish on Dockerhub."
required: true
force_release:
type: boolean
required: false
default: false
jobs:
mgbench_docker_publish:
runs-on: ubuntu-latest
env:
DOCKER_ORGANIZATION_NAME: memgraph
DOCKER_REPOSITORY_NAME: mgbench-client
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Set up QEMU
uses: docker/setup-qemu-action@v2
- name: Set up Docker Buildx
id: buildx
uses: docker/setup-buildx-action@v2
- name: Log in to Docker Hub
uses: docker/login-action@v2
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Check if specified version is already pushed
run: |
EXISTS=$(docker manifest inspect $DOCKER_ORGANIZATION_NAME/$DOCKER_REPOSITORY_NAME:${{ github.event.inputs.version }} > /dev/null; echo $?)
echo $EXISTS
if [[ ${EXISTS} -eq 0 ]]; then
echo 'The specified version has been already released to DockerHub.'
if [[ ${{ github.event.inputs.force_release }} = true ]]; then
echo 'Forcing the release!'
else
echo 'Stopping the release!'
exit 1
fi
else
echo 'All good the specified version has not been release to DockerHub.'
fi
- name: Build & push docker images
run: |
cd tests/mgbench
docker buildx build \
--build-arg TOOLCHAIN_VERSION=toolchain-v4 \
--platform linux/amd64,linux/arm64 \
--tag $DOCKER_ORGANIZATION_NAME/$DOCKER_REPOSITORY_NAME:${{ github.event.inputs.version }} \
--tag $DOCKER_ORGANIZATION_NAME/$DOCKER_REPOSITORY_NAME:latest \
--file Dockerfile.mgbench_client \
--push .

View File

@ -1,38 +1,23 @@
name: Release Ubuntu 20.04 name: Release Ubuntu 20.04
on: on:
workflow_call:
inputs:
build_type:
type: string
description: "Memgraph Build type. Default value is Release."
default: 'Release'
workflow_dispatch: workflow_dispatch:
inputs: schedule:
build_type: - cron: "0 1 * * *"
type: choice
description: "Memgraph Build type. Default value is Release."
default: 'Release'
options:
- Release
- RelWithDebInfo
env:
OS: "Ubuntu 20.04"
THREADS: 24
MEMGRAPH_ENTERPRISE_LICENSE: ${{ secrets.MEMGRAPH_ENTERPRISE_LICENSE }}
MEMGRAPH_ORGANIZATION_NAME: ${{ secrets.MEMGRAPH_ORGANIZATION_NAME }}
BUILD_TYPE: ${{ github.event.inputs.build_type || 'Release' }}
jobs: jobs:
community_build: community_build:
name: "Community build" name: "Community build"
runs-on: [self-hosted, Linux, X64, Ubuntu20.04] runs-on: [self-hosted, Linux, X64, Ubuntu20.04]
timeout-minutes: 60 env:
THREADS: 24
MEMGRAPH_ENTERPRISE_LICENSE: ${{ secrets.MEMGRAPH_ENTERPRISE_LICENSE }}
MEMGRAPH_ORGANIZATION_NAME: ${{ secrets.MEMGRAPH_ORGANIZATION_NAME }}
timeout-minutes: 960
steps: steps:
- name: Set up repository - name: Set up repository
uses: actions/checkout@v4 uses: actions/checkout@v2
with: with:
# Number of commits to fetch. `0` indicates all history for all # Number of commits to fetch. `0` indicates all history for all
# branches and tags. (default: 1) # branches and tags. (default: 1)
@ -48,7 +33,7 @@ jobs:
# Build community binaries. # Build community binaries.
cd build cd build
cmake -DCMAKE_BUILD_TYPE=$BUILD_TYPE -DMG_ENTERPRISE=OFF .. cmake -DCMAKE_BUILD_TYPE=release -DMG_ENTERPRISE=OFF ..
make -j$THREADS make -j$THREADS
- name: Run unit tests - name: Run unit tests
@ -63,11 +48,14 @@ jobs:
coverage_build: coverage_build:
name: "Coverage build" name: "Coverage build"
runs-on: [self-hosted, Linux, X64, Ubuntu20.04] runs-on: [self-hosted, Linux, X64, Ubuntu20.04]
timeout-minutes: 60 env:
THREADS: 24
MEMGRAPH_ENTERPRISE_LICENSE: ${{ secrets.MEMGRAPH_ENTERPRISE_LICENSE }}
MEMGRAPH_ORGANIZATION_NAME: ${{ secrets.MEMGRAPH_ORGANIZATION_NAME }}
steps: steps:
- name: Set up repository - name: Set up repository
uses: actions/checkout@v4 uses: actions/checkout@v2
with: with:
# Number of commits to fetch. `0` indicates all history for all # Number of commits to fetch. `0` indicates all history for all
# branches and tags. (default: 1) # branches and tags. (default: 1)
@ -109,19 +97,22 @@ jobs:
tar -czf code_coverage.tar.gz coverage.json html report.json summary.rmu tar -czf code_coverage.tar.gz coverage.json html report.json summary.rmu
- name: Save code coverage - name: Save code coverage
uses: actions/upload-artifact@v4 uses: actions/upload-artifact@v2
with: with:
name: "Code coverage(Coverage build)-${{ env.OS }}" name: "Code coverage"
path: tools/github/generated/code_coverage.tar.gz path: tools/github/generated/code_coverage.tar.gz
debug_build: debug_build:
name: "Debug build" name: "Debug build"
runs-on: [self-hosted, Linux, X64, Ubuntu20.04] runs-on: [self-hosted, Linux, X64, Ubuntu20.04]
timeout-minutes: 60 env:
THREADS: 24
MEMGRAPH_ENTERPRISE_LICENSE: ${{ secrets.MEMGRAPH_ENTERPRISE_LICENSE }}
MEMGRAPH_ORGANIZATION_NAME: ${{ secrets.MEMGRAPH_ORGANIZATION_NAME }}
steps: steps:
- name: Set up repository - name: Set up repository
uses: actions/checkout@v4 uses: actions/checkout@v2
with: with:
# Number of commits to fetch. `0` indicates all history for all # Number of commits to fetch. `0` indicates all history for all
# branches and tags. (default: 1) # branches and tags. (default: 1)
@ -153,6 +144,25 @@ jobs:
run: | run: |
./tests/drivers/run.sh ./tests/drivers/run.sh
- name: Run integration tests
run: |
cd tests/integration
for name in *; do
if [ ! -d $name ]; then continue; fi
pushd $name >/dev/null
echo "Running: $name"
if [ -x prepare.sh ]; then
./prepare.sh
fi
if [ -x runner.py ]; then
./runner.py
elif [ -x runner.sh ]; then
./runner.sh
fi
echo
popd >/dev/null
done
- name: Run cppcheck and clang-format - name: Run cppcheck and clang-format
run: | run: |
# Activate toolchain. # Activate toolchain.
@ -163,49 +173,23 @@ jobs:
./cppcheck_and_clang_format diff ./cppcheck_and_clang_format diff
- name: Save cppcheck and clang-format errors - name: Save cppcheck and clang-format errors
uses: actions/upload-artifact@v4 uses: actions/upload-artifact@v2
with: with:
name: "Code coverage(Debug build)-${{ env.OS }}" name: "Code coverage"
path: tools/github/cppcheck_and_clang_format.txt path: tools/github/cppcheck_and_clang_format.txt
debug_integration_test:
name: "Debug integration tests"
runs-on: [self-hosted, Linux, X64, Ubuntu20.04]
timeout-minutes: 60
steps:
- name: Set up repository
uses: actions/checkout@v4
with:
# Number of commits to fetch. `0` indicates all history for all
# branches and tags. (default: 1)
fetch-depth: 0
- name: Build debug binaries
run: |
# Activate toolchain.
source /opt/toolchain-v4/activate
# Initialize dependencies.
./init
# Build debug binaries.
cd build
cmake ..
make -j$THREADS
- name: Run integration tests
run: |
tests/integration/run.sh
release_build: release_build:
name: "Release build" name: "Release build"
runs-on: [self-hosted, Linux, X64, Ubuntu20.04] runs-on: [self-hosted, Linux, X64, Ubuntu20.04]
timeout-minutes: 60 env:
THREADS: 24
MEMGRAPH_ENTERPRISE_LICENSE: ${{ secrets.MEMGRAPH_ENTERPRISE_LICENSE }}
MEMGRAPH_ORGANIZATION_NAME: ${{ secrets.MEMGRAPH_ORGANIZATION_NAME }}
timeout-minutes: 960
steps: steps:
- name: Set up repository - name: Set up repository
uses: actions/checkout@v4 uses: actions/checkout@v2
with: with:
# Number of commits to fetch. `0` indicates all history for all # Number of commits to fetch. `0` indicates all history for all
# branches and tags. (default: 1) # branches and tags. (default: 1)
@ -221,7 +205,7 @@ jobs:
# Build release binaries. # Build release binaries.
cd build cd build
cmake -DCMAKE_BUILD_TYPE=$BUILD_TYPE .. cmake -DCMAKE_BUILD_TYPE=release ..
make -j$THREADS make -j$THREADS
- name: Create enterprise DEB package - name: Create enterprise DEB package
@ -240,60 +224,11 @@ jobs:
cpack -G DEB --config ../CPackConfig.cmake cpack -G DEB --config ../CPackConfig.cmake
- name: Save enterprise DEB package - name: Save enterprise DEB package
uses: actions/upload-artifact@v4 uses: actions/upload-artifact@v2
with: with:
name: "Enterprise DEB package-${{ env.OS }}" name: "Enterprise DEB package"
path: build/output/memgraph*.deb path: build/output/memgraph*.deb
- name: Run GQL Behave tests
run: |
cd tests
./setup.sh /opt/toolchain-v4/activate
cd gql_behave
./continuous_integration
- name: Save quality assurance status
uses: actions/upload-artifact@v4
with:
name: "GQL Behave Status-${{ env.OS }}"
path: |
tests/gql_behave/gql_behave_status.csv
tests/gql_behave/gql_behave_status.html
- name: Run unit tests
run: |
# Activate toolchain.
source /opt/toolchain-v4/activate
# Run unit tests.
cd build
ctest -R memgraph__unit --output-on-failure
release_benchmark_tests:
name: "Release Benchmark Tests"
runs-on: [self-hosted, Linux, X64, Ubuntu20.04]
timeout-minutes: 60
steps:
- name: Set up repository
uses: actions/checkout@v4
with:
# Number of commits to fetch. `0` indicates all history for all
# branches and tags. (default: 1)
fetch-depth: 0
- name: Build release binaries
run: |
# Activate toolchain.
source /opt/toolchain-v4/activate
# Initialize dependencies.
./init
# Build release binaries
cd build
cmake -DCMAKE_BUILD_TYPE=$BUILD_TYPE ..
make -j$THREADS
- name: Run micro benchmark tests - name: Run micro benchmark tests
run: | run: |
# Activate toolchain. # Activate toolchain.
@ -320,79 +255,36 @@ jobs:
--num-database-workers 9 --num-clients-workers 30 \ --num-database-workers 9 --num-clients-workers 30 \
--no-strict --no-strict
release_e2e_test: - name: Run GQL Behave tests
name: "Release End-to-end Test" run: |
runs-on: [self-hosted, Linux, X64, Ubuntu20.04] cd tests/gql_behave
timeout-minutes: 60 ./continuous_integration
steps: - name: Save quality assurance status
- name: Set up repository uses: actions/upload-artifact@v2
uses: actions/checkout@v4
with: with:
# Number of commits to fetch. `0` indicates all history for all name: "GQL Behave Status"
# branches and tags. (default: 1) path: |
fetch-depth: 0 tests/gql_behave/gql_behave_status.csv
tests/gql_behave/gql_behave_status.html
- name: Build release binaries - name: Run unit tests
run: | run: |
# Activate toolchain. # Activate toolchain.
source /opt/toolchain-v4/activate source /opt/toolchain-v4/activate
# Initialize dependencies.
./init
# Build release binaries # Run unit tests.
cd build cd build
cmake -DCMAKE_BUILD_TYPE=$BUILD_TYPE .. ctest -R memgraph__unit --output-on-failure
make -j$THREADS
- name: Ensure Kafka and Pulsar are up
run: |
cd tests/e2e/streams/kafka
docker-compose up -d
cd ../pulsar
docker-compose up -d
- name: Run e2e tests - name: Run e2e tests
run: | run: |
# TODO(gitbuda): Setup mgclient and pymgclient properly.
cd tests cd tests
./setup.sh /opt/toolchain-v4/activate ./setup.sh
source ve3/bin/activate_e2e source ve3/bin/activate
cd e2e cd e2e
./run.sh LD_LIBRARY_PATH=$LD_LIBRARY_PATH:../../libs/mgclient/lib python runner.py --workloads-root-directory .
- name: Ensure Kafka and Pulsar are down
if: always()
run: |
cd tests/e2e/streams/kafka
docker-compose down
cd ../pulsar
docker-compose down
release_durability_stress_tests:
name: "Release durability and stress tests"
runs-on: [self-hosted, Linux, X64, Ubuntu20.04]
timeout-minutes: 60
steps:
- name: Set up repository
uses: actions/checkout@v4
with:
# Number of commits to fetch. `0` indicates all history for all
# branches and tags. (default: 1)
fetch-depth: 0
- name: Build release binaries
run: |
# Activate toolchain.
source /opt/toolchain-v4/activate
# Initialize dependencies.
./init
# Build release binaries.
cd build
cmake -DCMAKE_BUILD_TYPE=$BUILD_TYPE ..
make -j$THREADS
- name: Run stress test (plain) - name: Run stress test (plain)
run: | run: |
@ -404,6 +296,11 @@ jobs:
cd tests/stress cd tests/stress
./continuous_integration --use-ssl ./continuous_integration --use-ssl
- name: Run stress test (large)
run: |
cd tests/stress
./continuous_integration --large-dataset
- name: Run durability test (plain) - name: Run durability test (plain)
run: | run: |
cd tests/stress cd tests/stress

View File

@ -1,68 +0,0 @@
name: Stress test large
concurrency:
group: ${{ github.workflow }}-${{ github.ref_name }}
cancel-in-progress: true
on:
workflow_dispatch:
inputs:
build_type:
type: choice
description: "Memgraph Build type. Default value is Release."
default: 'Release'
options:
- Release
- RelWithDebInfo
push:
tags:
- "v*.*.*-rc*"
- "v*.*-rc*"
schedule:
- cron: "0 22 * * *"
env:
THREADS: 24
MEMGRAPH_ENTERPRISE_LICENSE: ${{ secrets.MEMGRAPH_ENTERPRISE_LICENSE }}
MEMGRAPH_ORGANIZATION_NAME: ${{ secrets.MEMGRAPH_ORGANIZATION_NAME }}
BUILD_TYPE: ${{ github.event.inputs.build_type || 'Release' }}
jobs:
stress_test_large:
name: "Stress test large"
timeout-minutes: 720
strategy:
matrix:
os: [Debian10, Ubuntu20.04]
extra: [BigMemory, Gen8]
exclude:
- os: Debian10
extra: Gen8
- os: Ubuntu20.04
extra: BigMemory
runs-on: [self-hosted, Linux, X64, "${{ matrix.os }}", "${{ matrix.extra }}"]
steps:
- name: Set up repository
uses: actions/checkout@v4
with:
# Number of commits to fetch. `0` indicates all history for all
# branches and tags. (default: 1)
fetch-depth: 0
- name: Build release binaries
run: |
# Activate toolchain.
source /opt/toolchain-v4/activate
# Initialize dependencies.
./init
# Build release binaries.
cd build
cmake -DCMAKE_BUILD_TYPE=$BUILD_TYPE ..
make -j$THREADS
- name: Run stress test (large)
run: |
cd tests/stress
./continuous_integration --large-dataset

View File

@ -1,32 +0,0 @@
name: Upload Package All artifacts to S3
on:
workflow_dispatch:
inputs:
memgraph_version:
description: "Memgraph version to upload as. Format: 'X.Y.Z'"
required: true
run_number:
description: "# of the package_all workflow run to upload artifacts from. Format: '#XYZ'"
required: true
jobs:
upload-to-s3:
runs-on: ubuntu-latest
steps:
- name: Download artifacts
uses: dawidd6/action-download-artifact@v4
with:
workflow: package_all.yaml
workflow_conclusion: success
run_number: "${{ github.event.inputs.run_number }}"
path: build/output/release
- name: Upload to S3
uses: jakejarvis/s3-sync-action@v0.5.1
env:
AWS_S3_BUCKET: "download.memgraph.com"
AWS_ACCESS_KEY_ID: ${{ secrets.S3_AWS_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.S3_AWS_SECRET_ACCESS_KEY }}
AWS_REGION: "eu-west-1"
SOURCE_DIR: "build/output/release"
DEST_DIR: "memgraph/v${{ github.event.inputs.memgraph_version }}/"

14
.gitignore vendored
View File

@ -16,7 +16,8 @@
.ycm_extra_conf.pyc .ycm_extra_conf.pyc
.temp/ .temp/
Testing/ Testing/
/build*/ build
build/
release/examples/build release/examples/build
cmake-build-* cmake-build-*
cmake/DownloadProject/ cmake/DownloadProject/
@ -33,6 +34,9 @@ TAGS
*.fas *.fas
*.fasl *.fasl
# LCP generated C++ files
*.lcp.cpp
src/database/distributed/serialization.hpp src/database/distributed/serialization.hpp
src/database/single_node_ha/serialization.hpp src/database/single_node_ha/serialization.hpp
src/distributed/bfs_rpc_messages.hpp src/distributed/bfs_rpc_messages.hpp
@ -46,11 +50,15 @@ src/distributed/pull_produce_rpc_messages.hpp
src/distributed/storage_gc_rpc_messages.hpp src/distributed/storage_gc_rpc_messages.hpp
src/distributed/token_sharing_rpc_messages.hpp src/distributed/token_sharing_rpc_messages.hpp
src/distributed/updates_rpc_messages.hpp src/distributed/updates_rpc_messages.hpp
src/query/frontend/ast/ast.hpp
src/query/distributed/frontend/ast/ast_serialization.hpp
src/durability/distributed/state_delta.hpp src/durability/distributed/state_delta.hpp
src/durability/single_node/state_delta.hpp src/durability/single_node/state_delta.hpp
src/durability/single_node_ha/state_delta.hpp src/durability/single_node_ha/state_delta.hpp
src/query/frontend/semantic/symbol.hpp
src/query/distributed/frontend/semantic/symbol_serialization.hpp src/query/distributed/frontend/semantic/symbol_serialization.hpp
src/query/distributed/plan/ops.hpp src/query/distributed/plan/ops.hpp
src/query/plan/operator.hpp
src/raft/log_entry.hpp src/raft/log_entry.hpp
src/raft/raft_rpc_messages.hpp src/raft/raft_rpc_messages.hpp
src/raft/snapshot_metadata.hpp src/raft/snapshot_metadata.hpp
@ -58,7 +66,3 @@ src/raft/storage_info_rpc_messages.hpp
src/stats/stats_rpc_messages.hpp src/stats/stats_rpc_messages.hpp
src/storage/distributed/rpc/concurrent_id_mapper_rpc_messages.hpp src/storage/distributed/rpc/concurrent_id_mapper_rpc_messages.hpp
src/transactions/distributed/engine_rpc_messages.hpp src/transactions/distributed/engine_rpc_messages.hpp
/tests/manual/js/transaction_timeout/package-lock.json
/tests/manual/js/transaction_timeout/node_modules/
.vscode/
src/query/frontend/opencypher/grammar/.antlr/*

View File

@ -1,35 +1,24 @@
repos: repos:
- repo: https://github.com/pre-commit/pre-commit-hooks - repo: https://github.com/pre-commit/pre-commit-hooks
rev: v4.4.0 rev: v2.3.0
hooks: hooks:
- id: check-yaml - id: check-yaml
args: [--allow-multiple-documents]
- id: end-of-file-fixer - id: end-of-file-fixer
- id: trailing-whitespace - id: trailing-whitespace
- repo: https://github.com/psf/black - repo: https://github.com/psf/black
rev: 23.1.0 rev: 22.3.0
hooks: hooks:
- id: black - id: black
- repo: https://github.com/pycqa/isort args: # arguments to configure black
rev: 5.12.0 - --line-length=120
hooks: - --include='\.pyi?$'
- id: isort # these folders wont be formatted by black
name: isort (python) - --exclude="""\.git |
args: ["--profile", "black"] \.__pycache__|
build|
libs|
.cache"""
- repo: https://github.com/pre-commit/mirrors-clang-format - repo: https://github.com/pre-commit/mirrors-clang-format
rev: v13.0.0 rev: v13.0.0
hooks: hooks:
- id: clang-format - id: clang-format
# - repo: local
# hooks:
# - id: clang-tidy
# name: clang-tidy
# description: Runs clang-tidy and checks for errors
# entry: python ./tools/pre-commit/clang-tidy.py
# language: python
# files: ^src/
# types: [c++, text]
# fail_fast: true
# require_serial: true
# args: [--compile_commands_path=build]
# pass_filenames: false

View File

@ -1,22 +0,0 @@
# Path to sources
sonar.sources = .
# sonar.exclusions=
sonar.inclusions=src,include,query_modules
# Path to tests
sonar.tests = tests/
# sonar.test.exclusions=
# sonar.test.inclusions=
# Source encoding
# sonar.sourceEncoding=
# Exclusions for copy-paste detection
# sonar.cpd.exclusions=
# Python version (for python projects only)
# sonar.python.version=
# C++ standard version (for C++ projects only)
# If not specified, it defaults to the latest supported standard
# sonar.cfamily.reportingCppStandardOverride=c++98|c++11|c++14|c++17|c++20

View File

@ -1,32 +0,0 @@
# Tantivy ADR
**Author**
Marko Budiselic (github.com/gitbuda)
**Status**
APPROVED
**Date**
January 5, 2024
**Problem**
For some of Memgraph workloads, text search is a required feature. We don't
want to build a new text search engine because that's not Memgraph's core
value.
**Criteria**
- easy integration with our C++ codebase
- ability to operate in-memory and on-disk
- sufficient features (regex, full-text search, fuzzy search, aggregations over
text data)
- production-ready
**Decision**
All known C++ libraries are not production-ready. Recent Rust libraries, in
particular [Tantivy](https://github.com/quickwit-oss/tantivy), seem to provide
much more features, it is production ready. The way how we'll integrate Tantivy
into the current Memgraph codebase is via
[cxx](https://github.com/dtolnay/cxx). **We select Tantivy.**

View File

@ -1,34 +0,0 @@
# NuRaft ADR
**Author**
Marko Budiselic (github.com/gitbuda)
**Status**
PROPOSED
**Date**
January 10, 2024
**Problem**
In order to enhance Memgraph to have High Availability features as requested by
customers, we want to have reliable coordinators backed by RAFT consensus algorithm. Implementing
RAFT to be correct and performant is a very challenging task. Skillful Memgraph
engineers already tried 3 times and failed to deliver in a reasonable timeframe
all three times (approximately 4 person-weeks of engineering work each time).
**Criteria**
- easy integration with our C++ codebase
- heavily tested in production environments
- implementation of performance optimizations on top of the canonical Raft
implementation
**Decision**
There are a few, robust C++ implementations of Raft but as a part of other
projects or bigger libraries. **We select
[NuRaft](https://github.com/eBay/NuRaft)** because it focuses on delivering
Raft without bloatware, and it's used by
[Clickhouse](https://github.com/ClickHouse/ClickHouse) (an comparable peer to
Memgraph, a very well-established product).

View File

@ -1,38 +0,0 @@
# RocksDB ADR
**Author**
Marko Budiselic (github.com/gitbuda)
**Status**
ACCEPTED
**Date**
January 23, 2024
**Problem**
Interacting with data (reads and writes) on disk in a concurrent, safe, and
fast way is a challenging task. Implementing all low-level primitives to
interact with various disk hardware efficiently consumes significant
engineering people. Whenever Memgraph has to store data on disk (or any
other colder than RAM storage system), the problem is how to do that in the
least amount of development time while satisfying all functional requirements
(often performance).
**Criteria**
- working efficiently in a highly concurrent environment
- easy integration with Memgraph's C++ codebase
- providing low-level key-value API
- heavily tested in production environments
- providing abstractions for the storage hardware (even for cloud-based
storages like S3)
**Decision**
There are a few robust key-value stores, but finding one that is
production-ready and compatible with Memgraph's C++ codebase is challenging.
**We select [RocksDB](https://github.com/facebook/rocksdb)** because it
delivers robust API to manage data on disk; it's battle-tested in many
production environments (many databases systems are embedding RocksDB), and
it's the most compatible one.

View File

@ -1,67 +0,0 @@
# Architecture Decision Records
Also known as ADRs. This practice has become widespread in many
high performing engineering teams. It is a technique for communicating
between software engineers. ADRs provide a clear and documented
history of architectural choices, ensuring that everyone on the
team is on the same page. This improves communication and reduces
misunderstandings. The act of recording decisions encourages
thoughtful consideration before making choices. This can lead to
more robust and better-informed architectural decisions.
Links must be created, pointing both to and from the Github Issues
and/or the Notion Program Management "Initiative" database.
ADRs are complimentary to any tech specs that get written while
designing a solution. ADRs are very short and to the point, while
tech specs will include diagrams and can be quite verbose.
## HOWTO
Each ADR will be assigned a monotonically increasing unique numeric
identifier, which will be zero-padded to 3 digits. Each ADR will
be in a single markdown file containing no more than one page of
text, and the filename will start with that unique identifier,
followed by a camel case phrase summarizing the problem. For
example: `001_architecture_decision_records.md` or
`002_big_integration_cap_theorem.md`.
We want to use an ADR when:
1. Significant Impact: This includes choices that affect scalability, performance, or fundamental design principles.
1. Long-Term Ramifications: When a decision is expected to have long-term ramifications or is difficult to reverse.
1. Architectural Principles: ADRs are suitable for documenting decisions related to architectural principles, frameworks, or patterns that shape the system's structure.
1. Controversial Choices: When a decision is likely to be controversial or may require justification in the future.
The most senior engineer on a project will evaluate and decide
whether or not an ADR is needed.
## Do
1. Keep them brief and concise.
1. Explain the trade-offs.
1. Each ADR should be about one AD, not multiple ADs
1. Don't alter existing information in an ADR. Instead, amend the ADR by adding new information, or supersede the ADR by creating a new ADR.
1. Explain your organization's situation and business priorities.
1. Include rationale and considerations based on social and skills makeups of your teams.
1. Include pros and cons that are relevant, and describe them in terms that align with your needs and goals.
1. Explain what follows from making the decision. This can include the effects, outcomes, outputs, follow ups, and more.
## Don't
1. Try to guess what the executive leader wants, and then attempt to please them. Be objective.
1. Try to solve everything all at once. A pretty good solution now is MUCH BETTER than a perfect solution later. Carpe diem!
1. Hide any doubts or unanswered questions.
1. Make it a sales pitch. Everything has upsides and downsides - be authentic and honest about them.
1. Perform merely a superficial investigation. If an ADR doesn't call for some deep thinking, then it probably shouldn't exist.
1. Ignore the long-term costs such as performance, tech debt or hardware and maintenance.
1. Get tunnel vision where creative or surprising approaches are not explored.
# Template - use the format below for each new ADR
1. **Author** - who has written the ADR
1. **Status** - one of: PROPOSED, ACCEPTED, REJECTED, SUPERSEDED-BY or DEPRECATED
1. **Date** - when the status was most recently updated
1. **Problem** - a concise paragraph explaining the context
1. **Criteria** - a list of the two or three metrics by which the solution was evaluated, and their relative weights (importance)
1. **Decision** - what was chosen as the way forward, and what the consequences are of the decision

View File

@ -1,7 +1,6 @@
# MemGraph CMake configuration # MemGraph CMake configuration
cmake_minimum_required(VERSION 3.12) cmake_minimum_required(VERSION 3.8)
cmake_policy(SET CMP0076 NEW)
# !! IMPORTANT !! run ./project_root/init.sh before cmake command # !! IMPORTANT !! run ./project_root/init.sh before cmake command
# to download dependencies # to download dependencies
@ -19,12 +18,10 @@ set_directory_properties(PROPERTIES CLEAN_NO_CUSTOM TRUE)
# during the code coverage process # during the code coverage process
find_program(CCACHE_FOUND ccache) find_program(CCACHE_FOUND ccache)
option(USE_CCACHE "ccache:" ON) option(USE_CCACHE "ccache:" ON)
message(STATUS "CCache: ${USE_CCACHE}")
if(CCACHE_FOUND AND USE_CCACHE) if(CCACHE_FOUND AND USE_CCACHE)
set_property(GLOBAL PROPERTY RULE_LAUNCH_COMPILE ccache) set_property(GLOBAL PROPERTY RULE_LAUNCH_COMPILE ccache)
set_property(GLOBAL PROPERTY RULE_LAUNCH_LINK ccache) set_property(GLOBAL PROPERTY RULE_LAUNCH_LINK ccache)
message(STATUS "CCache: Used")
else ()
message(STATUS "CCache: Not used")
endif(CCACHE_FOUND AND USE_CCACHE) endif(CCACHE_FOUND AND USE_CCACHE)
# choose a compiler # choose a compiler
@ -40,14 +37,7 @@ endif()
# ----------------------------------------------------------------------------- # -----------------------------------------------------------------------------
project(memgraph LANGUAGES C CXX) project(memgraph)
#TODO: upgrade to cmake 3.24 + CheckIPOSupported
#cmake_policy(SET CMP0138 NEW)
#include(CheckIPOSupported)
#check_ipo_supported()
#set(CMAKE_INTERPROCEDURAL_OPTIMIZATION_Release TRUE)
#set(CMAKE_INTERPROCEDURAL_OPTIMIZATION_RelWithDebInfo TRUE)
# Install licenses. # Install licenses.
install(DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR}/licenses/ install(DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR}/licenses/
@ -153,9 +143,7 @@ endif()
# files used can be seen here: # files used can be seen here:
# https://git-scm.com/book/en/v2/Git-Internals-Git-References # https://git-scm.com/book/en/v2/Git-Internals-Git-References
set(git_directory "${CMAKE_SOURCE_DIR}/.git") set(git_directory "${CMAKE_SOURCE_DIR}/.git")
# Check for directory because if the repo is cloned as a git submodule, .git is if (EXISTS "${git_directory}")
# a file and below code doesn't work.
if (IS_DIRECTORY "${git_directory}")
set_property(DIRECTORY APPEND PROPERTY set_property(DIRECTORY APPEND PROPERTY
CMAKE_CONFIGURE_DEPENDS "${git_directory}/HEAD") CMAKE_CONFIGURE_DEPENDS "${git_directory}/HEAD")
file(STRINGS "${git_directory}/HEAD" git_head_data) file(STRINGS "${git_directory}/HEAD" git_head_data)
@ -170,7 +158,7 @@ endif()
# setup CMake module path, defines path for include() and find_package() # setup CMake module path, defines path for include() and find_package()
# https://cmake.org/cmake/help/latest/variable/CMAKE_MODULE_PATH.html # https://cmake.org/cmake/help/latest/variable/CMAKE_MODULE_PATH.html
list(APPEND CMAKE_MODULE_PATH "${CMAKE_CURRENT_SOURCE_DIR}/cmake") set(CMAKE_MODULE_PATH ${CMAKE_MODULE_PATH} ${PROJECT_SOURCE_DIR}/cmake)
# custom function definitions # custom function definitions
include(functions) include(functions)
# ----------------------------------------------------------------------------- # -----------------------------------------------------------------------------
@ -196,7 +184,7 @@ set(CMAKE_CXX_STANDARD_REQUIRED ON)
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Wall \ set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Wall \
-Werror=switch -Werror=switch-bool -Werror=return-type \ -Werror=switch -Werror=switch-bool -Werror=return-type \
-Werror=return-stack-address \ -Werror=return-stack-address \
-Wno-c99-designator -Wmissing-field-initializers \ -Wno-c99-designator \
-DBOOST_ASIO_USE_TS_EXECUTOR_AS_DEFAULT") -DBOOST_ASIO_USE_TS_EXECUTOR_AS_DEFAULT")
# Don't omit frame pointer in RelWithDebInfo, for additional callchain debug. # Don't omit frame pointer in RelWithDebInfo, for additional callchain debug.
@ -211,13 +199,8 @@ set(CMAKE_CXX_FLAGS_RELWITHDEBINFO
# ** Static linking is allowed only for executables! ** # ** Static linking is allowed only for executables! **
set(CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} -static-libgcc -static-libstdc++") set(CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} -static-libgcc -static-libstdc++")
# Use lld linker to speedup build and use less memory. # Use gold linker to speedup build
add_link_options(-fuse-ld=lld) set(CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} -fuse-ld=gold")
# NOTE: Moving to latest Clang (probably starting from 15), lld stopped to work
# without explicit link_directories call.
string(REPLACE ":" " " LD_LIBS $ENV{LD_LIBRARY_PATH})
separate_arguments(LD_LIBS)
link_directories(${LD_LIBS})
# release flags # release flags
set(CMAKE_CXX_FLAGS_RELEASE "-O2 -DNDEBUG") set(CMAKE_CXX_FLAGS_RELEASE "-O2 -DNDEBUG")
@ -240,6 +223,7 @@ else()
endif() endif()
# ----------------------------------------------------------------------------- # -----------------------------------------------------------------------------
# default build type is debug # default build type is debug
if (NOT CMAKE_BUILD_TYPE) if (NOT CMAKE_BUILD_TYPE)
set(CMAKE_BUILD_TYPE "Debug") set(CMAKE_BUILD_TYPE "Debug")
@ -247,20 +231,16 @@ endif()
message(STATUS "CMake build type: ${CMAKE_BUILD_TYPE}") message(STATUS "CMake build type: ${CMAKE_BUILD_TYPE}")
# ----------------------------------------------------------------------------- # -----------------------------------------------------------------------------
add_definitions( -DCMAKE_BUILD_TYPE_NAME="${CMAKE_BUILD_TYPE}") set(MG_ARCH "x86_64" CACHE STRING "Host architecture to build Memgraph on. Supported values are x86_64 (default), ARM64.")
if (NOT MG_ARCH)
set(MG_ARCH_DESCR "Host architecture to build Memgraph on. Supported values are x86_64, ARM64.")
if (${CMAKE_HOST_SYSTEM_PROCESSOR} MATCHES "aarch64")
set(MG_ARCH "ARM64" CACHE STRING ${MG_ARCH_DESCR})
else()
set(MG_ARCH "x86_64" CACHE STRING ${MG_ARCH_DESCR})
endif()
endif()
message(STATUS "MG_ARCH: ${MG_ARCH}")
# setup external dependencies ------------------------------------------------- # setup external dependencies -------------------------------------------------
set(CMAKE_THREAD_LIBS_INIT "-lpthread")
set(CMAKE_HAVE_THREADS_LIBRARY 1)
set(CMAKE_USE_WIN32_THREADS_INIT 0)
set(CMAKE_USE_PTHREADS_INIT 1)
set(THREADS_PREFER_PTHREAD_FLAG ON)
# threading # threading
find_package(Threads REQUIRED) find_package(Threads REQUIRED)
# optional readline # optional readline
@ -276,6 +256,7 @@ endif()
set(libs_dir ${CMAKE_SOURCE_DIR}/libs) set(libs_dir ${CMAKE_SOURCE_DIR}/libs)
add_subdirectory(libs EXCLUDE_FROM_ALL) add_subdirectory(libs EXCLUDE_FROM_ALL)
# Optional subproject configuration -------------------------------------------
option(TEST_COVERAGE "Generate coverage reports from running memgraph" OFF) option(TEST_COVERAGE "Generate coverage reports from running memgraph" OFF)
option(TOOLS "Build tools binaries" ON) option(TOOLS "Build tools binaries" ON)
option(QUERY_MODULES "Build query modules containing custom procedures" ON) option(QUERY_MODULES "Build query modules containing custom procedures" ON)
@ -283,8 +264,6 @@ option(ASAN "Build with Address Sanitizer. To get a reasonable performance optio
option(TSAN "Build with Thread Sanitizer. To get a reasonable performance option should be used only in Release or RelWithDebInfo build " OFF) option(TSAN "Build with Thread Sanitizer. To get a reasonable performance option should be used only in Release or RelWithDebInfo build " OFF)
option(UBSAN "Build with Undefined Behaviour Sanitizer" OFF) option(UBSAN "Build with Undefined Behaviour Sanitizer" OFF)
# Build feature flags
if (TEST_COVERAGE) if (TEST_COVERAGE)
string(TOLOWER ${CMAKE_BUILD_TYPE} lower_build_type) string(TOLOWER ${CMAKE_BUILD_TYPE} lower_build_type)
if (NOT lower_build_type STREQUAL "debug") if (NOT lower_build_type STREQUAL "debug")
@ -298,25 +277,12 @@ if (MG_ENTERPRISE)
add_definitions(-DMG_ENTERPRISE) add_definitions(-DMG_ENTERPRISE)
endif() endif()
option(ENABLE_JEMALLOC "Use jemalloc" ON) set(ENABLE_JEMALLOC ON)
option(MG_MEMORY_PROFILE "If build should be setup for memory profiling" OFF)
if (MG_MEMORY_PROFILE AND ENABLE_JEMALLOC)
message(STATUS "Jemalloc has been disabled because MG_MEMORY_PROFILE is enabled")
set(ENABLE_JEMALLOC OFF)
endif ()
if (MG_MEMORY_PROFILE AND ASAN)
message(STATUS "ASAN has been disabled because MG_MEMORY_PROFILE is enabled")
set(ASAN OFF)
endif ()
if (MG_MEMORY_PROFILE)
add_compile_definitions(MG_MEMORY_PROFILE)
endif ()
if (ASAN) if (ASAN)
message(WARNING "Disabling jemalloc as it doesn't work well with ASAN") message(WARNING "Disabling jemalloc as it doesn't work well with ASAN")
set(ENABLE_JEMALLOC OFF) set(ENABLE_JEMALLOC OFF)
# Enable Address sanitizer and get nicer stack traces in error messages. # Enable Addres sanitizer and get nicer stack traces in error messages.
# NOTE: AddressSanitizer uses llvm-symbolizer binary from the Clang # NOTE: AddressSanitizer uses llvm-symbolizer binary from the Clang
# distribution to symbolize the stack traces (note that ideally the # distribution to symbolize the stack traces (note that ideally the
# llvm-symbolizer version must match the version of ASan runtime library). # llvm-symbolizer version must match the version of ASan runtime library).
@ -337,8 +303,6 @@ if (ASAN)
endif() endif()
if (TSAN) if (TSAN)
message(WARNING "Disabling jemalloc as it doesn't work well with ASAN")
set(ENABLE_JEMALLOC OFF)
# ThreadSanitizer generally requires all code to be compiled with -fsanitize=thread. # ThreadSanitizer generally requires all code to be compiled with -fsanitize=thread.
# If some code (e.g. dynamic libraries) is not compiled with the flag, it can # If some code (e.g. dynamic libraries) is not compiled with the flag, it can
# lead to false positive race reports, false negative race reports and/or # lead to false positive race reports, false negative race reports and/or
@ -354,7 +318,7 @@ if (TSAN)
# By default ThreadSanitizer uses addr2line utility to symbolize reports. # By default ThreadSanitizer uses addr2line utility to symbolize reports.
# llvm-symbolizer is faster, consumes less memory and produces much better # llvm-symbolizer is faster, consumes less memory and produces much better
# reports. To use it set runtime flag: # reports. To use it set runtime flag:
# TSAN_OPTIONS="extern-symbolizer-path=~/llvm-symbolizer" # TSAN_OPTIONS="extern-symbolizer-path=~/llvm-symbolizer"
# For more runtime flags see: https://github.com/google/sanitizers/wiki/ThreadSanitizerFlags # For more runtime flags see: https://github.com/google/sanitizers/wiki/ThreadSanitizerFlags
endif() endif()

111
README.md
View File

@ -1,9 +1,13 @@
<p align="center"> <p align="center">
<img src="https://public-assets.memgraph.com/github-readme-images/github-memgraph-repo-banner.png"> <img width="400px" src="https://uploads-ssl.webflow.com/5e7ceb09657a69bdab054b3a/5e7ceb09657a6937ab054bba_Black_Original%20_Logo.png">
</p> </p>
--- ---
<p align="center">
Build modern, graph-based applications on top of your streaming data in minutes.
</p>
<p align="center"> <p align="center">
<a href="https://github.com/memgraph/memgraph/blob/master/licenses/APL.txt"> <a href="https://github.com/memgraph/memgraph/blob/master/licenses/APL.txt">
<img src="https://img.shields.io/badge/license-APL-green" alt="license" title="license"/> <img src="https://img.shields.io/badge/license-APL-green" alt="license" title="license"/>
@ -18,7 +22,7 @@
<p align="center"> <p align="center">
<a href="https://github.com/memgraph/memgraph"> <a href="https://github.com/memgraph/memgraph">
<img src="https://img.shields.io/github/actions/workflow/status/memgraph/memgraph/release_debian10.yaml?branch=master&label=build%20and%20test&logo=github"/> <img src="https://img.shields.io/github/workflow/status/memgraph/memgraph/Release%20Ubuntu%2020.04/master" alt="build" title="build"/>
</a> </a>
<a href="https://memgraph.com/docs/" alt="Documentation"> <a href="https://memgraph.com/docs/" alt="Documentation">
<img src="https://img.shields.io/badge/documentation-Memgraph-orange" /> <img src="https://img.shields.io/badge/documentation-Memgraph-orange" />
@ -33,10 +37,9 @@
## :clipboard: Description ## :clipboard: Description
Memgraph is an open source graph database built for real-time streaming and Memgraph is a streaming graph application platform that helps you wrangle your
compatible with Neo4j. Whether you're a developer or a data scientist with streaming data, build sophisticated models that you can query in real-time, and
interconnected data, Memgraph will get you the immediate actionable insights develop graph applications.
fast.
Memgraph directly connects to your streaming infrastructure. You can ingest data Memgraph directly connects to your streaming infrastructure. You can ingest data
from sources like Kafka, SQL, or plain CSV files. Memgraph provides a standard from sources like Kafka, SQL, or plain CSV files. Memgraph provides a standard
@ -48,20 +51,8 @@ natural and effective way to model many real-world problems without relying on
complex SQL schemas. complex SQL schemas.
Memgraph is implemented in C/C++ and leverages an in-memory first architecture Memgraph is implemented in C/C++ and leverages an in-memory first architecture
to ensure that youre getting the [best possible to ensure that youre getting the best possible performance consistently and
performance](http://memgraph.com/benchgraph) consistently and without surprises. without surprises. Its also ACID-compliant and highly available.
Its also ACID-compliant and highly available.
## :zap: Features
- Run Python, Rust, and C/C++ code natively, check out the
[MAGE](https://github.com/memgraph/mage) graph algorithm library
- Native support for machine learning
- Streaming support
- Replication
- Authentication and authorization
- ACID compliance
## :video_game: Memgraph Playground ## :video_game: Memgraph Playground
@ -85,49 +76,28 @@ your browser.
### macOS ### macOS
[![macOS](https://img.shields.io/badge/macOS-Docker-000000?style=for-the-badge&logo=macos&logoColor=F0F0F0)](https://memgraph.com/docs/memgraph/install-memgraph-on-macos-docker) [![macOS](https://img.shields.io/badge/macOS-Docker-000000?style=for-the-badge&logo=macos&logoColor=F0F0F0)](https://memgraph.com/docs/memgraph/install-memgraph-on-macos-docker)
[![macOS](https://img.shields.io/badge/lima-AACF41?style=for-the-badge&logo=macos&logoColor=F0F0F0)](https://memgraph.com/docs/memgraph/install-memgraph-on-ubuntu)
### Linux ### Linux
[![Linux](https://img.shields.io/badge/Linux-Docker-FCC624?style=for-the-badge&logo=linux&logoColor=black)](https://memgraph.com/docs/memgraph/install-memgraph-on-linux-docker) [![Linux](https://img.shields.io/badge/Linux-Docker-FCC624?style=for-the-badge&logo=linux&logoColor=black)](https://memgraph.com/docs/memgraph/install-memgraph-on-linux-docker)
[![Debian](https://img.shields.io/badge/Debian-D70A53?style=for-the-badge&logo=debian&logoColor=white)](https://memgraph.com/docs/memgraph/install-memgraph-on-debian) [![Debian](https://img.shields.io/badge/Debian-D70A53?style=for-the-badge&logo=debian&logoColor=white)](https://memgraph.com/docs/memgraph/install-memgraph-on-debian)
[![Ubuntu](https://img.shields.io/badge/Ubuntu-E95420?style=for-the-badge&logo=ubuntu&logoColor=white)](https://memgraph.com/docs/memgraph/install-memgraph-on-ubuntu) [![Ubuntu](https://img.shields.io/badge/Ubuntu-E95420?style=for-the-badge&logo=ubuntu&logoColor=white)](https://memgraph.com/docs/memgraph/install-memgraph-on-ubuntu)
[![Cent OS](https://img.shields.io/badge/cent%20os-002260?style=for-the-badge&logo=centos&logoColor=F0F0F0)](https://memgraph.com/docs/memgraph/install-memgraph-from-rpm) [![Cent
[![Fedora](https://img.shields.io/badge/fedora-0B57A4?style=for-the-badge&logo=fedora&logoColor=F0F0F0)](https://memgraph.com/docs/memgraph/install-memgraph-from-rpm) OS](https://img.shields.io/badge/cent%20os-002260?style=for-the-badge&logo=centos&logoColor=F0F0F0)](https://memgraph.com/docs/memgraph/install-memgraph-from-rpm)
[![RedHat](https://img.shields.io/badge/redhat-EE0000?style=for-the-badge&logo=redhat&logoColor=F0F0F0)](https://memgraph.com/docs/memgraph/install-memgraph-from-rpm)
You can find the binaries and Docker images on the [Download You can find the binaries and Docker images on the [Download
Hub](https://memgraph.com/download) and the installation instructions in the Hub](https://memgraph.com/download) and the installation instructions in the
[official documentation](https://memgraph.com/docs/memgraph/installation). [official documentation](https://memgraph.com/docs/memgraph/installation).
## :zap: Features
## :cloud: Memgraph Cloud - Run Python, Rust, and C/C++ code natively, check out the
[MAGE](https://github.com/memgraph/mage) graph algorithm library
Check out [Memgraph Cloud](https://memgraph.com/docs/memgraph-cloud) - a cloud service fully managed on AWS and available in 6 geographic regions around the world. Memgraph Cloud allows you to create projects with Enterprise instances of MemgraphDB from your browser. - Native support for machine learning
- Streaming support
<p align="left"> - Replication
<a href="https://memgraph.com/docs/memgraph-cloud"> - Authentication and authorization
<img width="450px" alt="Memgraph Cloud" src="https://public-assets.memgraph.com/memgraph-gifs%2Fcloud.gif"> - ACID compliance
</a>
</p>
## :link: Connect to Memgraph
[Connect to the database](https://memgraph.com/docs/memgraph/connect-to-memgraph) using Memgraph Lab, mgconsole, various drivers (Python, C/C++ and others) and WebSocket.
### :microscope: Memgraph Lab
Visualize graphs and play with queries to understand your data. [Memgraph Lab](https://memgraph.com/docs/memgraph-lab) is a user interface that helps you explore and manipulate the data stored in Memgraph. Visualize graphs, execute ad hoc queries, and optimize their performance.
<p align="left">
<a href="https://memgraph.com/docs/memgraph-lab">
<img width="450px" alt="Memgraph Cloud" src="https://public-assets.memgraph.com/memgraph-gifs%2Flab.gif">
</a>
</p>
## :file_folder: Import data
[Import data](https://memgraph.com/docs/memgraph/import-data) into Memgraph using Kafka, RedPanda or Pulsar streams, CSV and JSON files, or Cypher commands.
## :bookmark_tabs: Documentation ## :bookmark_tabs: Documentation
@ -141,20 +111,29 @@ guide](https://memgraph.com/docs/memgraph/reference-guide/configuration).
## :trophy: Contributing ## :trophy: Contributing
Welcome to the heart of Memgraph development! We're on a mission to supercharge Memgraph, making it faster, more user-friendly, and even more powerful. We owe a big thanks to our fantastic community of contributors who help us fix bugs and bring incredible improvements to life. If you're passionate about databases and open source, here's your chance to make a difference! The main purpose of this repository is to continue evolving Memgraph, making it
faster and easier to use. Development of Memgraph happens in the open on GitHub,
### Explore Memgraph Internals and we are grateful to the community for contributing bug fixes and
improvements. Read below to learn how you can take part in improving Memgraph.
Interested in the nuts and bolts of Memgraph? Our [internals documentation](https://memgraph.notion.site/Memgraph-Internals-12b69132d67a417898972927d6870bd2) is where you can uncover the inner workings of Memgraph's architecture, learn how to build the project from scratch, and discover the secrets of effective contributions. Dive deep into the database!
### Dive into the Contributing Guide
Ready to jump into the action? Explore our [contributing guide](CONTRIBUTING.md) to get the inside scoop on how we develop Memgraph. It's your roadmap for suggesting bug fixes and enhancements. Contribute your skills and ideas!
### Code of Conduct ### Code of Conduct
Our commitment to a respectful and professional community is unwavering. Every participant in Memgraph is expected to adhere to a stringent Code of Conduct. Please carefully review [the complete text](CODE_OF_CONDUCT.md) to gain a comprehensive understanding of the behaviors that are both expected and explicitly prohibited. Memgraph has adopted a Code of Conduct that we expect project participants to
adhere to. Please read [the full text](CODE_OF_CONDUCT.md) so that you can
understand what actions will and will not be tolerated.
We maintain a zero-tolerance policy towards any violations. Our shared commitment to this Code of Conduct ensures that Memgraph remains a place where integrity and excellence are paramount. ### Contributing Guide
Read our [contributing guide](CONTRIBUTING.md) to learn about our development
process and how to propose bug fixes and improvements.
### Internals
Read our
[internal](https://memgraph.notion.site/Memgraph-Internals-12b69132d67a417898972927d6870bd2)
docs to learn more about Memgraph's architecture, how to build the project from
source and how to start contributing. All information related to the database,
can be found in the aforementioned docs.
### :scroll: License ### :scroll: License
@ -162,16 +141,8 @@ Memgraph Community is available under the [BSL
license](./licenses/BSL.txt).</br> Memgraph Enterprise is available under the license](./licenses/BSL.txt).</br> Memgraph Enterprise is available under the
[MEL license](./licenses/MEL.txt). [MEL license](./licenses/MEL.txt).
## :busts_in_silhouette: Community
- :purple_heart: [**Discord**](https://discord.gg/memgraph)
- :ocean: [**Stack Overflow**](https://stackoverflow.com/questions/tagged/memgraphdb)
- :bird: [**Twitter**](https://twitter.com/memgraphdb)
- :movie_camera:
[**YouTube**](https://www.youtube.com/channel/UCZ3HOJvHGxtQ_JHxOselBYg)
<p align="center"> <p align="center">
<a href="#"> <a href="#">
<img src="https://img.shields.io/badge/⬆️ back_to_top_⬆-white" alt="Back to top" title="Back to top"/> <img src="https://img.shields.io/badge/⬆back_to_top_⬆-white" alt="Back to top" title="Back to top"/>
</a> </a>
</p> </p>

55
cmake/FindJemalloc.cmake Normal file
View File

@ -0,0 +1,55 @@
# Try to find jemalloc library
#
# Use this module as:
# find_package(Jemalloc)
#
# or:
# find_package(Jemalloc REQUIRED)
#
# This will define the following variables:
#
# Jemalloc_FOUND True if the system has the jemalloc library.
# Jemalloc_INCLUDE_DIRS Include directories needed to use jemalloc.
# Jemalloc_LIBRARIES Libraries needed to link to jemalloc.
#
# The following cache variables may also be set:
#
# Jemalloc_INCLUDE_DIR The directory containing jemalloc/jemalloc.h.
# Jemalloc_LIBRARY The path to the jemalloc static library.
find_path(Jemalloc_INCLUDE_DIR NAMES jemalloc/jemalloc.h PATH_SUFFIXES include)
find_library(Jemalloc_LIBRARY NAMES libjemalloc.a PATH_SUFFIXES lib)
include(FindPackageHandleStandardArgs)
find_package_handle_standard_args(Jemalloc
FOUND_VAR Jemalloc_FOUND
REQUIRED_VARS
Jemalloc_LIBRARY
Jemalloc_INCLUDE_DIR
)
if(Jemalloc_FOUND)
set(Jemalloc_LIBRARIES ${Jemalloc_LIBRARY})
set(Jemalloc_INCLUDE_DIRS ${Jemalloc_INCLUDE_DIR})
else()
if(Jemalloc_FIND_REQUIRED)
message(FATAL_ERROR "Cannot find jemalloc!")
else()
message(WARNING "jemalloc is not found!")
endif()
endif()
if(Jemalloc_FOUND AND NOT TARGET Jemalloc::Jemalloc)
add_library(Jemalloc::Jemalloc UNKNOWN IMPORTED)
set_target_properties(Jemalloc::Jemalloc
PROPERTIES
IMPORTED_LOCATION "${Jemalloc_LIBRARY}"
INTERFACE_INCLUDE_DIRECTORIES "${Jemalloc_INCLUDE_DIR}"
)
endif()
mark_as_advanced(
Jemalloc_INCLUDE_DIR
Jemalloc_LIBRARY
)

View File

@ -1,67 +0,0 @@
# Try to find jemalloc library
#
# Use this module as:
# find_package(jemalloc)
#
# or:
# find_package(jemalloc REQUIRED)
#
# This will define the following variables:
#
# JEMALLOC_FOUND True if the system has the jemalloc library.
# Jemalloc_INCLUDE_DIRS Include directories needed to use jemalloc.
# Jemalloc_LIBRARIES Libraries needed to link to jemalloc.
#
# The following cache variables may also be set:
#
# Jemalloc_INCLUDE_DIR The directory containing jemalloc/jemalloc.h.
# Jemalloc_LIBRARY The path to the jemalloc static library.
include(FindPackageHandleStandardArgs)
find_package_handle_standard_args(jemalloc
FOUND_VAR JEMALLOC_FOUND
REQUIRED_VARS
JEMALLOC_LIBRARY
JEMALLOC_INCLUDE_DIR
)
if(JEMALLOC_INCLUDE_DIR)
message(STATUS "Found jemalloc include dir: ${JEMALLOC_INCLUDE_DIR}")
else()
message(WARNING "jemalloc not found!")
endif()
if(JEMALLOC_LIBRARY)
message(STATUS "Found jemalloc library: ${JEMALLOC_LIBRARY}")
else()
message(WARNING "jemalloc library not found!")
endif()
if(JEMALLOC_FOUND)
set(Jemalloc_LIBRARIES ${JEMALLOC_LIBRARY})
set(Jemalloc_INCLUDE_DIRS ${JEMALLOC_INCLUDE_DIR})
else()
if(Jemalloc_FIND_REQUIRED)
message(FATAL_ERROR "Cannot find jemalloc!")
else()
message(WARNING "jemalloc is not found!")
endif()
endif()
if(JEMALLOC_FOUND AND NOT TARGET Jemalloc::Jemalloc)
message(STATUS "JEMALLOC NOT TARGET")
add_library(Jemalloc::Jemalloc UNKNOWN IMPORTED)
set_target_properties(Jemalloc::Jemalloc
PROPERTIES
IMPORTED_LOCATION "${JEMALLOC_LIBRARY}"
INTERFACE_INCLUDE_DIRECTORIES "${JEMALLOC_INCLUDE_DIR}"
)
endif()
mark_as_advanced(
JEMALLOC_INCLUDE_DIR
JEMALLOC_LIBRARY
)

View File

@ -99,30 +99,10 @@ modifications:
value: "SNAPSHOT_ISOLATION" value: "SNAPSHOT_ISOLATION"
override: true override: true
- name: "storage_mode"
value: "IN_MEMORY_TRANSACTIONAL"
override: true
- name: "allow_load_csv" - name: "allow_load_csv"
value: "true" value: "true"
override: false override: false
- name: "storage_parallel_index_recovery"
value: "false"
override: true
- name: "storage_parallel_schema_recovery"
value: "false"
override: true
- name: "storage_enable_schema_metadata"
value: "false"
override: true
- name: "query_callable_mappings_path"
value: "/etc/memgraph/apoc_compatibility_mappings.json"
override: true
undocumented: undocumented:
- "flag_file" - "flag_file"
- "also_log_to_stderr" - "also_log_to_stderr"

View File

@ -5,10 +5,12 @@ import os
import subprocess import subprocess
import sys import sys
import textwrap import textwrap
import xml.etree.ElementTree as ET import xml.etree.ElementTree as ET
import yaml import yaml
SCRIPT_DIR = os.path.dirname(os.path.realpath(__file__)) SCRIPT_DIR = os.path.dirname(os.path.realpath(__file__))
CONFIG_FILE = os.path.join(SCRIPT_DIR, "flags.yaml") CONFIG_FILE = os.path.join(SCRIPT_DIR, "flags.yaml")
WIDTH = 80 WIDTH = 80
@ -16,21 +18,14 @@ WIDTH = 80
def wrap_text(s, initial_indent="# "): def wrap_text(s, initial_indent="# "):
return "\n#\n".join( return "\n#\n".join(
map(lambda x: textwrap.fill(x, WIDTH, initial_indent=initial_indent, subsequent_indent="# "), s.split("\n")) map(lambda x: textwrap.fill(x, WIDTH, initial_indent=initial_indent,
) subsequent_indent="# "), s.split("\n")))
def extract_flags(binary_path): def extract_flags(binary_path):
ret = {} ret = {}
data = subprocess.run([binary_path, "--help-xml"], stdout=subprocess.PIPE).stdout.decode("utf-8") data = subprocess.run([binary_path, "--help-xml"],
# If something is printed out before the help output, it will break the the stdout=subprocess.PIPE).stdout.decode("utf-8")
# XML parsing -> filter out if something is not XML line because something
# can be logged before gflags output (e.g. during the global objects init).
# This gets called during memgraph build phase to generate default config
# file later installed under /etc/memgraph/memgraph.conf
# NOTE: Don't use \n in the gflags description strings.
# NOTE: Check here if gflags version changes because of the XML format.
data = "\n".join([line for line in data.split("\n") if line.startswith("<")])
root = ET.fromstring(data) root = ET.fromstring(data)
for child in root: for child in root:
if child.tag == "usage" and child.text.lower().count("warning"): if child.tag == "usage" and child.text.lower().count("warning"):
@ -51,7 +46,8 @@ def apply_config_to_flags(config, flags):
for modification in config["modifications"]: for modification in config["modifications"]:
name = modification["name"] name = modification["name"]
if name not in flags: if name not in flags:
print("WARNING: Flag '" + name + "' missing from binary!", file=sys.stderr) print("WARNING: Flag '" + name + "' missing from binary!",
file=sys.stderr)
continue continue
flags[name]["default"] = modification["value"] flags[name]["default"] = modification["value"]
flags[name]["override"] = modification["override"] flags[name]["override"] = modification["override"]
@ -79,9 +75,8 @@ def extract_sections(flags):
else: else:
sections.append((current_section, current_flags)) sections.append((current_section, current_flags))
sections.append(("other", other)) sections.append(("other", other))
assert set(sum(map(lambda x: x[1], sections), [])) == set( assert set(sum(map(lambda x: x[1], sections), [])) == set(flags.keys()), \
flags.keys() "The section extraction algorithm lost some flags!"
), "The section extraction algorithm lost some flags!"
return sections return sections
@ -94,7 +89,8 @@ def generate_config_file(sections, flags):
helpstr = flag["meaning"] + " [" + flag["type"] + "]" helpstr = flag["meaning"] + " [" + flag["type"] + "]"
ret += wrap_text(helpstr) + "\n" ret += wrap_text(helpstr) + "\n"
prefix = "# " if not flag["override"] else "" prefix = "# " if not flag["override"] else ""
ret += prefix + "--" + flag["name"].replace("_", "-") + "=" + flag["default"] + "\n\n" ret += prefix + "--" + flag["name"].replace("_", "-") + \
"=" + flag["default"] + "\n\n"
ret += "\n" ret += "\n"
ret += wrap_text(config["footer"]) ret += wrap_text(config["footer"])
return ret.strip() + "\n" return ret.strip() + "\n"
@ -102,9 +98,13 @@ def generate_config_file(sections, flags):
if __name__ == "__main__": if __name__ == "__main__":
parser = argparse.ArgumentParser() parser = argparse.ArgumentParser()
parser.add_argument("memgraph_binary", help="path to Memgraph binary") parser.add_argument("memgraph_binary",
parser.add_argument("output_file", help="path where to store the generated Memgraph " "configuration file") help="path to Memgraph binary")
parser.add_argument("--config-file", default=CONFIG_FILE, help="path to generator configuration file") parser.add_argument("output_file",
help="path where to store the generated Memgraph "
"configuration file")
parser.add_argument("--config-file", default=CONFIG_FILE,
help="path to generator configuration file")
args = parser.parse_args() args = parser.parse_args()
flags = extract_flags(args.memgraph_binary) flags = extract_flags(args.memgraph_binary)

View File

@ -1,26 +0,0 @@
{
"dbms.components": "mgps.components",
"apoc.util.validate": "mgps.validate",
"db.schema.nodeTypeProperties": "schema.NodeTypeOroperties",
"db.schema.relTypeProperties": "schema.RelTypeProperties",
"apoc.coll.contains": "collections.contains",
"apoc.coll.partition": "collections.partition",
"apoc.coll.toSet": "collections.to_set",
"apoc.coll.unionAll": "collections.unionAll",
"apoc.coll.removeAll": "collections.remove_all",
"apoc.coll.union": "collections.union",
"apoc.coll.sum": "collections.sum",
"apoc.coll.pairs": "collections.pairs",
"apoc.map.fromLists": "map.from_lists",
"apoc.map.removeKeys": "map.remove_keys",
"apoc.map.merge": "map.merge",
"apoc.create.nodes": "create.nodes",
"apoc.create.removeProperties": "create.remove_properties",
"apoc.create.node": "create.node",
"apoc.create.removeLabel": "create.remove_label",
"apoc.refactor.invert": "refactor.invert",
"apoc.refactor.cloneNode": "refactor.clone_node",
"apoc.refactor.cloneSubgraph": "refactor.clone_subgraph",
"apoc.refactor.cloneSubgraphFromPath": "refactor.clone_subgraph_from_path",
"apoc.label.exists": "label.exists"
}

View File

@ -202,29 +202,3 @@ for row in csv.reader(stream, delimiter=',', doublequote=True,
For more information about the meaning of the above values, see: For more information about the meaning of the above values, see:
https://docs.python.org/3/library/csv.html#csv.Dialect https://docs.python.org/3/library/csv.html#csv.Dialect
## Errors
1. [Skipping duplicate node with ID '{}'. For more details, visit:
memgr.ph/csv-import-tool.](#error-1)
2. [Skipping bad relationship with START_ID '{}'. For more details, visit:
memgr.ph/csv-import-tool.](#error-2)
3. [Skipping bad relationship with END_ID '{}'. For more details, visit:
memgr.ph/csv-import-tool.](#error-3)
## Skipping duplicate node with ID {} {#error-1}
Duplicate nodes are nodes that have an ID that is the same as another node that
was already imported. You can instruct the importer to ignore all duplicate
nodes (instead of raising an error) by using the `--skip-duplicate-nodes` flag.
## Skipping bad relationship with START_ID {} {#error-2}
A node with the id `START_ID` doesn't exist. You can instruct the importer to
ignore all bad relationships (instead of raising an error) that refer to nodes
that don't exist in the node files by using the `--skip-bad-relationships` flag.
## Skipping bad relationship with END_ID {} {#error-3}
A node with the id `END_ID` doesn't exist. You can instruct the importer to
ignore all bad relationships (instead of raising an error) that refer to nodes
that don't exist in the node files by using the `--skip-bad-relationships` flag.

2
environment/.gitignore vendored Normal file
View File

@ -0,0 +1,2 @@
archives
build

View File

@ -1,15 +0,0 @@
# Memgraph Operating Environments
## Issues related to build toolchain
* GCC 11.2 (toolchain-v4) doesn't compile on Fedora 38, multiple definitions of enum issue
* spdlog 1.10/11 doesn't work with fmt 10.0.0
## os
Under the `os` directory, you can find scripts to install all required system
dependencies on operating systems where Memgraph natively builds. The testing
script helps to see how to install all packages (in the case of a new package),
or make any adjustments in the overall system setup. Also, the testing script
helps check if Memgraph runs on a freshly installed operating system (with no
packages installed).

View File

@ -1,6 +1,3 @@
*.deb *.deb
*.deb.*
*.rpm *.rpm
*.rpm.*
*.tar.gz *.tar.gz
*.tar.gz.*

View File

@ -1,190 +0,0 @@
#!/bin/bash
set -Eeuo pipefail
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )"
source "$DIR/../util.sh"
check_operating_system "amzn-2"
check_architecture "x86_64"
TOOLCHAIN_BUILD_DEPS=(
git gcc gcc-c++ make # generic build tools
wget # used for archive download
gnupg2 # used for archive signature verification
tar gzip bzip2 xz unzip # used for archive unpacking
zlib-devel # zlib library used for all builds
expat-devel xz-devel python3-devel texinfo
curl libcurl-devel # for cmake
readline-devel # for cmake and llvm
libffi-devel libxml2-devel # for llvm
libedit-devel pcre-devel pcre2-devel automake bison # for swig
file
openssl-devel
gmp-devel
gperf
diffutils
patch
libipt libipt-devel # intel
perl # for openssl
)
TOOLCHAIN_RUN_DEPS=(
make # generic build tools
tar gzip bzip2 xz # used for archive unpacking
zlib # zlib library used for all builds
expat xz-libs python3 # for gdb
readline # for cmake and llvm
libffi libxml2 # for llvm
openssl-devel
)
MEMGRAPH_BUILD_DEPS=(
git # source code control
make cmake # build system
wget # for downloading libs
libuuid-devel java-11-openjdk # required by antlr
readline-devel # for memgraph console
python3-devel # for query modules
openssl-devel
openssl
libseccomp-devel
python3 python3-pip nmap-ncat # for tests
#
# IMPORTANT: python3-yaml does NOT exist on CentOS
# Install it using `pip3 install PyYAML`
#
PyYAML # Package name here does not correspond to the yum package!
libcurl-devel # mg-requests
rpm-build rpmlint # for RPM package building
doxygen graphviz # source documentation generators
which nodejs golang custom-golang1.18.9 zip unzip java-11-openjdk-devel jdk-17 custom-maven3.9.3 # for driver tests
autoconf # for jemalloc code generation
libtool # for protobuf code generation
cyrus-sasl-devel
)
MEMGRAPH_TEST_DEPS="${MEMGRAPH_BUILD_DEPS[*]}"
MEMGRAPH_RUN_DEPS=(
logrotate openssl python3 libseccomp
)
NEW_DEPS=(
wget curl tar gzip
)
list() {
echo "$1"
}
check() {
local missing=""
# On Fedora yum/dnf and python10 use newer glibc which is not compatible
# with ours, so we need to momentarely disable env
local OLD_LD_LIBRARY_PATH=${LD_LIBRARY_PATH:-""}
LD_LIBRARY_PATH=""
for pkg in $1; do
if [ "$pkg" == custom-maven3.9.3 ]; then
if [ ! -f "/opt/apache-maven-3.9.3/bin/mvn" ]; then
missing="$pkg $missing"
fi
continue
fi
if [ "$pkg" == custom-golang1.18.9 ]; then
if [ ! -f "/opt/go1.18.9/go/bin/go" ]; then
missing="$pkg $missing"
fi
continue
fi
if [ "$pkg" == "PyYAML" ]; then
if ! python3 -c "import yaml" >/dev/null 2>/dev/null; then
missing="$pkg $missing"
fi
continue
fi
if ! yum list installed "$pkg" >/dev/null 2>/dev/null; then
missing="$pkg $missing"
fi
done
if [ "$missing" != "" ]; then
echo "MISSING PACKAGES: $missing"
exit 1
fi
LD_LIBRARY_PATH=${OLD_LD_LIBRARY_PATH}
}
install() {
cd "$DIR"
if [ "$EUID" -ne 0 ]; then
echo "Please run as root."
exit 1
fi
# If GitHub Actions runner is installed, append LANG to the environment.
# Python related tests don't work without the LANG export.
if [ -d "/home/gh/actions-runner" ]; then
echo "LANG=en_US.utf8" >> /home/gh/actions-runner/.env
else
echo "NOTE: export LANG=en_US.utf8"
fi
yum update -y
for pkg in $1; do
if [ "$pkg" == custom-maven3.9.3 ]; then
install_custom_maven "3.9.3"
continue
fi
if [ "$pkg" == custom-golang1.18.9 ]; then
install_custom_golang "1.18.9"
continue
fi
if [ "$pkg" == jdk-17 ]; then
if ! yum list installed jdk-17 >/dev/null 2>/dev/null; then
wget --no-check-certificate -c --header "Cookie: oraclelicense=accept-securebackup-cookie" https://download.oracle.com/java/17/latest/jdk-17_linux-x64_bin.rpm
rpm -Uvh jdk-17_linux-x64_bin.rpm
# NOTE: Set Java 11 as default.
update-alternatives --set java java-11-openjdk.x86_64
update-alternatives --set javac java-11-openjdk.x86_64
fi
continue
fi
if [ "$pkg" == libipt ]; then
if ! yum list installed libipt >/dev/null 2>/dev/null; then
yum install -y http://repo.okay.com.mx/centos/8/x86_64/release/libipt-1.6.1-8.el8.x86_64.rpm
fi
continue
fi
if [ "$pkg" == libipt-devel ]; then
if ! yum list installed libipt-devel >/dev/null 2>/dev/null; then
yum install -y http://repo.okay.com.mx/centos/8/x86_64/release/libipt-devel-1.6.1-8.el8.x86_64.rpm
fi
continue
fi
if [ "$pkg" == nodejs ]; then
if ! yum list installed nodejs >/dev/null 2>/dev/null; then
yum install https://rpm.nodesource.com/pub_16.x/nodistro/repo/nodesource-release-nodistro-1.noarch.rpm -y
yum install nodejs -y --setopt=nodesource-nodejs.module_hotfixes=1
fi
continue
fi
if [ "$pkg" == PyYAML ]; then
if [ -z ${SUDO_USER+x} ]; then # Running as root (e.g. Docker).
pip3 install --user PyYAML
else # Running using sudo.
sudo -H -u "$SUDO_USER" bash -c "pip3 install --user PyYAML"
fi
continue
fi
if [ "$pkg" == java-11-openjdk ]; then
amazon-linux-extras install -y java-openjdk11
continue
fi
if [ "$pkg" == java-11-openjdk-devel ]; then
amazon-linux-extras install -y java-openjdk11
yum install -y java-11-openjdk-devel
continue
fi
yum install -y "$pkg"
done
}
deps=$2"[*]"
"$1" "${!deps}"

View File

@ -1,5 +1,7 @@
#!/bin/bash #!/bin/bash
set -Eeuo pipefail set -Eeuo pipefail
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )" DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )"
source "$DIR/../util.sh" source "$DIR/../util.sh"
@ -18,7 +20,7 @@ TOOLCHAIN_BUILD_DEPS=(
curl # snappy curl # snappy
readline-devel # cmake and llvm readline-devel # cmake and llvm
libffi-devel libxml2-devel perl-Digest-MD5 # llvm libffi-devel libxml2-devel perl-Digest-MD5 # llvm
libedit-devel pcre-devel pcre2-devel automake bison # swig libedit-devel pcre-devel automake bison # swig
file file
openssl-devel openssl-devel
gmp-devel gmp-devel
@ -37,13 +39,12 @@ TOOLCHAIN_RUN_DEPS=(
) )
MEMGRAPH_BUILD_DEPS=( MEMGRAPH_BUILD_DEPS=(
make cmake pkgconfig # build system make pkgconfig # build system
curl wget # for downloading libs curl wget # for downloading libs
libuuid-devel java-11-openjdk # required by antlr libuuid-devel java-11-openjdk # required by antlr
readline-devel # for memgraph console readline-devel # for memgraph console
python3-devel # for query modules python3-devel # for query modules
openssl-devel openssl-devel
openssl
libseccomp-devel libseccomp-devel
python3 python-virtualenv python3-pip nmap-ncat # for qa, macro_benchmark and stress tests python3 python-virtualenv python3-pip nmap-ncat # for qa, macro_benchmark and stress tests
# #
@ -55,21 +56,9 @@ MEMGRAPH_BUILD_DEPS=(
sbcl # for custom Lisp C++ preprocessing sbcl # for custom Lisp C++ preprocessing
rpm-build rpmlint # for RPM package building rpm-build rpmlint # for RPM package building
doxygen graphviz # source documentation generators doxygen graphviz # source documentation generators
which mono-complete dotnet-sdk-3.1 golang custom-golang1.18.9 # for driver tests which mono-complete dotnet-sdk-3.1 golang nodejs zip unzip java-11-openjdk-devel # for driver tests
nodejs zip unzip java-11-openjdk-devel jdk-17 custom-maven3.9.3 # for driver tests
autoconf # for jemalloc code generation autoconf # for jemalloc code generation
libtool # for protobuf code generation libtool # for protobuf code generation
cyrus-sasl-devel
)
MEMGRAPH_TEST_DEPS="${MEMGRAPH_BUILD_DEPS[*]}"
MEMGRAPH_RUN_DEPS=(
logrotate openssl python3 libseccomp
)
NEW_DEPS=(
wget curl tar gzip
) )
list() { list() {
@ -79,18 +68,6 @@ list() {
check() { check() {
local missing="" local missing=""
for pkg in $1; do for pkg in $1; do
if [ "$pkg" == custom-maven3.9.3 ]; then
if [ ! -f "/opt/apache-maven-3.9.3/bin/mvn" ]; then
missing="$pkg $missing"
fi
continue
fi
if [ "$pkg" == custom-golang1.18.9 ]; then
if [ ! -f "/opt/go1.18.9/go/bin/go" ]; then
missing="$pkg $missing"
fi
continue
fi
if [ "$pkg" == git ]; then if [ "$pkg" == git ]; then
if ! which "git" >/dev/null; then if ! which "git" >/dev/null; then
missing="git $missing" missing="git $missing"
@ -133,25 +110,7 @@ install() {
yum update -y yum update -y
yum install -y wget python3 python3-pip yum install -y wget python3 python3-pip
yum install -y git yum install -y git
for pkg in $1; do for pkg in $1; do
if [ "$pkg" == custom-maven3.9.3 ]; then
install_custom_maven "3.9.3"
continue
fi
if [ "$pkg" == custom-golang1.18.9 ]; then
install_custom_golang "1.18.9"
continue
fi
if [ "$pkg" == jdk-17 ]; then
if ! yum list installed jdk-17 >/dev/null 2>/dev/null; then
wget https://download.oracle.com/java/17/latest/jdk-17_linux-x64_bin.rpm
rpm -ivh jdk-17_linux-x64_bin.rpm
update-alternatives --set java java-11-openjdk.x86_64
update-alternatives --set javac java-11-openjdk.x86_64
fi
continue
fi
if [ "$pkg" == libipt ]; then if [ "$pkg" == libipt ]; then
if ! yum list installed libipt >/dev/null 2>/dev/null; then if ! yum list installed libipt >/dev/null 2>/dev/null; then
yum install -y http://repo.okay.com.mx/centos/8/x86_64/release/libipt-1.6.1-8.el8.x86_64.rpm yum install -y http://repo.okay.com.mx/centos/8/x86_64/release/libipt-1.6.1-8.el8.x86_64.rpm

View File

@ -1,5 +1,7 @@
#!/bin/bash #!/bin/bash
set -Eeuo pipefail set -Eeuo pipefail
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )" DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )"
source "$DIR/../util.sh" source "$DIR/../util.sh"
@ -7,17 +9,15 @@ check_operating_system "centos-9"
check_architecture "x86_64" check_architecture "x86_64"
TOOLCHAIN_BUILD_DEPS=( TOOLCHAIN_BUILD_DEPS=(
wget # used for archive download
coreutils-common gcc gcc-c++ make # generic build tools coreutils-common gcc gcc-c++ make # generic build tools
# NOTE: Pure libcurl conflicts with libcurl-minimal wget # used for archive download
libcurl-devel # cmake build requires it
gnupg2 # used for archive signature verification gnupg2 # used for archive signature verification
tar gzip bzip2 xz unzip # used for archive unpacking tar gzip bzip2 xz unzip # used for archive unpacking
zlib-devel # zlib library used for all builds zlib-devel # zlib library used for all builds
expat-devel xz-devel python3-devel texinfo libbabeltrace-devel # for gdb expat-devel xz-devel python3-devel texinfo libbabeltrace-devel # for gdb
readline-devel # for cmake and llvm readline-devel # for cmake and llvm
libffi-devel libxml2-devel # for llvm libffi-devel libxml2-devel # for llvm
libedit-devel pcre-devel pcre2-devel automake bison # for swig libedit-devel pcre-devel automake bison # for swig
file file
openssl-devel openssl-devel
gmp-devel gmp-devel
@ -40,7 +40,7 @@ TOOLCHAIN_RUN_DEPS=(
MEMGRAPH_BUILD_DEPS=( MEMGRAPH_BUILD_DEPS=(
git # source code control git # source code control
make cmake pkgconf-pkg-config # build system make pkgconf-pkg-config # build system
wget # for downloading libs wget # for downloading libs
libuuid-devel java-11-openjdk # required by antlr libuuid-devel java-11-openjdk # required by antlr
readline-devel # for memgraph console readline-devel # for memgraph console
@ -56,22 +56,10 @@ MEMGRAPH_BUILD_DEPS=(
libcurl-devel # mg-requests libcurl-devel # mg-requests
rpm-build rpmlint # for RPM package building rpm-build rpmlint # for RPM package building
doxygen graphviz # source documentation generators doxygen graphviz # source documentation generators
which nodejs golang custom-golang1.18.9 # for driver tests which nodejs golang zip unzip java-11-openjdk-devel # for driver tests
zip unzip java-11-openjdk-devel java-17-openjdk java-17-openjdk-devel custom-maven3.9.3 # for driver tests
sbcl # for custom Lisp C++ preprocessing sbcl # for custom Lisp C++ preprocessing
autoconf # for jemalloc code generation autoconf # for jemalloc code generation
libtool # for protobuf code generation libtool # for protobuf code generation
cyrus-sasl-devel
)
MEMGRAPH_TEST_DEPS="${MEMGRAPH_BUILD_DEPS[*]}"
MEMGRAPH_RUN_DEPS=(
logrotate openssl python3 libseccomp
)
NEW_DEPS=(
wget curl tar gzip
) )
list() { list() {
@ -81,18 +69,6 @@ list() {
check() { check() {
local missing="" local missing=""
for pkg in $1; do for pkg in $1; do
if [ "$pkg" == custom-maven3.9.3 ]; then
if [ ! -f "/opt/apache-maven-3.9.3/bin/mvn" ]; then
missing="$pkg $missing"
fi
continue
fi
if [ "$pkg" == custom-golang1.18.9 ]; then
if [ ! -f "/opt/go1.18.9/go/bin/go" ]; then
missing="$pkg $missing"
fi
continue
fi
if [ "$pkg" == "PyYAML" ]; then if [ "$pkg" == "PyYAML" ]; then
if ! python3 -c "import yaml" >/dev/null 2>/dev/null; then if ! python3 -c "import yaml" >/dev/null 2>/dev/null; then
missing="$pkg $missing" missing="$pkg $missing"
@ -125,20 +101,9 @@ install() {
else else
echo "NOTE: export LANG=en_US.utf8" echo "NOTE: export LANG=en_US.utf8"
fi fi
# --nobest is used because of libipt because we install custom versions yum update -y
# because libipt-devel is not available on CentOS 9 Stream
yum update -y --nobest
yum install -y wget git python3 python3-pip yum install -y wget git python3 python3-pip
for pkg in $1; do for pkg in $1; do
if [ "$pkg" == custom-maven3.9.3 ]; then
install_custom_maven "3.9.3"
continue
fi
if [ "$pkg" == custom-golang1.18.9 ]; then
install_custom_golang "1.18.9"
continue
fi
# Since there is no support for libipt-devel for CentOS 9 we install # Since there is no support for libipt-devel for CentOS 9 we install
# Fedoras version of same libs, they are the same version but released # Fedoras version of same libs, they are the same version but released
# for different OS # for different OS

View File

@ -1,10 +1,10 @@
#!/bin/bash #!/bin/bash
set -Eeuo pipefail set -Eeuo pipefail
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )" DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )"
source "$DIR/../util.sh" source "$DIR/../util.sh"
# IMPORTANT: Deprecated since memgraph v2.12.0.
check_operating_system "debian-10" check_operating_system "debian-10"
check_architecture "x86_64" check_architecture "x86_64"
@ -24,7 +24,7 @@ TOOLCHAIN_BUILD_DEPS=(
libgmp-dev # for gdb libgmp-dev # for gdb
gperf # for proxygen gperf # for proxygen
git # for fbthrift git # for fbthrift
libedit-dev libpcre2-dev libpcre3-dev automake bison # for swig libedit-dev libpcre3-dev automake bison # for swig
) )
TOOLCHAIN_RUN_DEPS=( TOOLCHAIN_RUN_DEPS=(
@ -40,7 +40,7 @@ TOOLCHAIN_RUN_DEPS=(
MEMGRAPH_BUILD_DEPS=( MEMGRAPH_BUILD_DEPS=(
git # source code control git # source code control
make cmake pkg-config # build system make pkg-config # build system
curl wget # for downloading libs curl wget # for downloading libs
uuid-dev default-jre-headless # required by antlr uuid-dev default-jre-headless # required by antlr
libreadline-dev # for memgraph console libreadline-dev # for memgraph console
@ -53,19 +53,10 @@ MEMGRAPH_BUILD_DEPS=(
libcurl4-openssl-dev # mg-requests libcurl4-openssl-dev # mg-requests
sbcl # for custom Lisp C++ preprocessing sbcl # for custom Lisp C++ preprocessing
doxygen graphviz # source documentation generators doxygen graphviz # source documentation generators
mono-runtime mono-mcs zip unzip default-jdk-headless oracle-java17-installer custom-maven3.9.3 # for driver tests mono-runtime mono-mcs zip unzip default-jdk-headless # for driver tests
dotnet-sdk-3.1 golang custom-golang1.18.9 nodejs npm # for driver tests dotnet-sdk-3.1 golang nodejs npm
autoconf # for jemalloc code generation autoconf # for jemalloc code generation
libtool # for protobuf code generation libtool # for protobuf code generation
libsasl2-dev
)
MEMGRAPH_RUN_DEPS=(
logrotate openssl python3 libseccomp
)
NEW_DEPS=(
wget curl tar gzip
) )
list() { list() {
@ -73,28 +64,7 @@ list() {
} }
check() { check() {
local missing="" check_all_dpkg "$1"
for pkg in $1; do
if [ "$pkg" == custom-maven3.9.3 ]; then
if [ ! -f "/opt/apache-maven-3.9.3/bin/mvn" ]; then
missing="$pkg $missing"
fi
continue
fi
if [ "$pkg" == custom-golang1.18.9 ]; then
if [ ! -f "/opt/go1.18.9/go/bin/go" ]; then
missing="$pkg $missing"
fi
continue
fi
if ! dpkg -s "$pkg" >/dev/null 2>/dev/null; then
missing="$pkg $missing"
fi
done
if [ "$missing" != "" ]; then
echo "MISSING PACKAGES: $missing"
exit 1
fi
} }
install() { install() {
@ -105,15 +75,8 @@ deb http://deb.debian.org/debian/ buster-updates main contrib non-free
deb-src http://deb.debian.org/debian/ buster-updates main contrib non-free deb-src http://deb.debian.org/debian/ buster-updates main contrib non-free
deb http://security.debian.org/debian-security buster/updates main contrib non-free deb http://security.debian.org/debian-security buster/updates main contrib non-free
deb-src http://security.debian.org/debian-security buster/updates main contrib non-free deb-src http://security.debian.org/debian-security buster/updates main contrib non-free
EOF
apt --allow-releaseinfo-change update
cat >/etc/apt/sources.list.d/java.list << EOF
deb http://ppa.launchpad.net/linuxuprising/java/ubuntu bionic main
deb-src http://ppa.launchpad.net/linuxuprising/java/ubuntu bionic main
EOF EOF
cd "$DIR" cd "$DIR"
apt install -y gnupg
apt-key adv --keyserver keyserver.ubuntu.com --recv-keys EA8CACC073C3DB2A
apt --allow-releaseinfo-change update apt --allow-releaseinfo-change update
# If GitHub Actions runner is installed, append LANG to the environment. # If GitHub Actions runner is installed, append LANG to the environment.
# Python related tests doesn't work the LANG export. # Python related tests doesn't work the LANG export.
@ -122,26 +85,8 @@ EOF
else else
echo "NOTE: export LANG=en_US.utf8" echo "NOTE: export LANG=en_US.utf8"
fi fi
apt install -y wget
for pkg in $1; do for pkg in $1; do
if [ "$pkg" == custom-maven3.9.3 ]; then
install_custom_maven "3.9.3"
continue
fi
if [ "$pkg" == custom-golang1.18.9 ]; then
install_custom_golang "1.18.9"
continue
fi
if [ "$pkg" == oracle-java17-installer ]; then
if ! dpkg -s "$pkg" 2>/dev/null >/dev/null; then
echo oracle-java17-installer shared/accepted-oracle-license-v1-3 select true | /usr/bin/debconf-set-selections
echo oracle-java17-installer shared/accepted-oracle-license-v1-3 seen true | /usr/bin/debconf-set-selections
apt install -y "$pkg"
update-alternatives --set java /usr/lib/jvm/java-11-openjdk-amd64/bin/java
update-alternatives --set javac /usr/lib/jvm/java-11-openjdk-amd64/bin/javac
fi
continue
fi
if [ "$pkg" == dotnet-sdk-3.1 ]; then if [ "$pkg" == dotnet-sdk-3.1 ]; then
if ! dpkg -s "$pkg" 2>/dev/null >/dev/null; then if ! dpkg -s "$pkg" 2>/dev/null >/dev/null; then
wget -nv https://packages.microsoft.com/config/debian/10/packages-microsoft-prod.deb -O packages-microsoft-prod.deb wget -nv https://packages.microsoft.com/config/debian/10/packages-microsoft-prod.deb -O packages-microsoft-prod.deb

View File

@ -1,12 +1,12 @@
#!/bin/bash #!/bin/bash
set -Eeuo pipefail set -Eeuo pipefail
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )" DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )"
source "$DIR/../util.sh" source "$DIR/../util.sh"
# IMPORTANT: Deprecated since memgraph v2.12.0.
check_operating_system "debian-11" check_operating_system "debian-11"
check_architecture "arm64" "aarch64" check_architecture "arm64"
TOOLCHAIN_BUILD_DEPS=( TOOLCHAIN_BUILD_DEPS=(
coreutils gcc g++ build-essential make # generic build tools coreutils gcc g++ build-essential make # generic build tools
@ -18,7 +18,7 @@ TOOLCHAIN_BUILD_DEPS=(
libcurl4-openssl-dev # for cmake libcurl4-openssl-dev # for cmake
libreadline-dev # for cmake and llvm libreadline-dev # for cmake and llvm
libffi-dev libxml2-dev # for llvm libffi-dev libxml2-dev # for llvm
libedit-dev libpcre2-dev libpcre3-dev automake bison # for swig libedit-dev libpcre3-dev automake bison # for swig
curl # snappy curl # snappy
file # for libunwind file # for libunwind
libssl-dev # for libevent libssl-dev # for libevent
@ -54,19 +54,10 @@ MEMGRAPH_BUILD_DEPS=(
libcurl4-openssl-dev # mg-requests libcurl4-openssl-dev # mg-requests
sbcl # for custom Lisp C++ preprocessing sbcl # for custom Lisp C++ preprocessing
doxygen graphviz # source documentation generators doxygen graphviz # source documentation generators
mono-runtime mono-mcs zip unzip default-jdk-headless openjdk-17-jdk custom-maven3.9.3 # for driver tests mono-runtime mono-mcs zip unzip default-jdk-headless # for driver tests
golang custom-golang1.18.9 nodejs npm golang nodejs npm
autoconf # for jemalloc code generation autoconf # for jemalloc code generation
libtool # for protobuf code generation libtool # for protobuf code generation
libsasl2-dev
)
MEMGRAPH_RUN_DEPS=(
logrotate openssl python3 libseccomp
)
NEW_DEPS=(
wget curl tar gzip
) )
list() { list() {
@ -74,28 +65,7 @@ list() {
} }
check() { check() {
local missing="" check_all_dpkg "$1"
for pkg in $1; do
if [ "$pkg" == custom-maven3.9.3 ]; then
if [ ! -f "/opt/apache-maven-3.9.3/bin/mvn" ]; then
missing="$pkg $missing"
fi
continue
fi
if [ "$pkg" == custom-golang1.18.9 ]; then
if [ ! -f "/opt/go1.18.9/go/bin/go" ]; then
missing="$pkg $missing"
fi
continue
fi
if ! dpkg -s "$pkg" >/dev/null 2>/dev/null; then
missing="$pkg $missing"
fi
done
if [ "$missing" != "" ]; then
echo "MISSING PACKAGES: $missing"
exit 1
fi
} }
install() { install() {
@ -119,25 +89,7 @@ EOF
echo "NOTE: export LANG=en_US.utf8" echo "NOTE: export LANG=en_US.utf8"
fi fi
apt install -y wget apt install -y wget
for pkg in $1; do for pkg in $1; do
if [ "$pkg" == custom-maven3.9.3 ]; then
install_custom_maven "3.9.3"
continue
fi
if [ "$pkg" == custom-golang1.18.9 ]; then
install_custom_golang "1.18.9"
continue
fi
if [ "$pkg" == openjdk-17-jdk ]; then
if ! dpkg -s "$pkg" 2>/dev/null >/dev/null; then
apt install -y "$pkg"
# The default Java version should be Java 11
update-alternatives --set java /usr/lib/jvm/java-11-openjdk-arm64/bin/java
update-alternatives --set javac /usr/lib/jvm/java-11-openjdk-arm64/bin/javac
fi
continue
fi
apt install -y "$pkg" apt install -y "$pkg"
done done
} }

View File

@ -1,5 +1,7 @@
#!/bin/bash #!/bin/bash
set -Eeuo pipefail set -Eeuo pipefail
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )" DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )"
source "$DIR/../util.sh" source "$DIR/../util.sh"
@ -16,7 +18,7 @@ TOOLCHAIN_BUILD_DEPS=(
libcurl4-openssl-dev # for cmake libcurl4-openssl-dev # for cmake
libreadline-dev # for cmake and llvm libreadline-dev # for cmake and llvm
libffi-dev libxml2-dev # for llvm libffi-dev libxml2-dev # for llvm
libedit-dev libpcre2-dev libpcre3-dev automake bison # for swig libedit-dev libpcre3-dev automake bison # for swig
curl # snappy curl # snappy
file # for libunwind file # for libunwind
libssl-dev # for libevent libssl-dev # for libevent
@ -39,7 +41,7 @@ TOOLCHAIN_RUN_DEPS=(
MEMGRAPH_BUILD_DEPS=( MEMGRAPH_BUILD_DEPS=(
git # source code control git # source code control
make cmake pkg-config # build system make pkg-config # build system
curl wget # for downloading libs curl wget # for downloading libs
uuid-dev default-jre-headless # required by antlr uuid-dev default-jre-headless # required by antlr
libreadline-dev # for memgraph console libreadline-dev # for memgraph console
@ -52,21 +54,10 @@ MEMGRAPH_BUILD_DEPS=(
libcurl4-openssl-dev # mg-requests libcurl4-openssl-dev # mg-requests
sbcl # for custom Lisp C++ preprocessing sbcl # for custom Lisp C++ preprocessing
doxygen graphviz # source documentation generators doxygen graphviz # source documentation generators
mono-runtime mono-mcs zip unzip default-jdk-headless openjdk-17-jdk custom-maven3.9.3 # for driver tests mono-runtime mono-mcs zip unzip default-jdk-headless # for driver tests
dotnet-sdk-3.1 golang custom-golang1.18.9 nodejs npm dotnet-sdk-3.1 golang nodejs npm
autoconf # for jemalloc code generation autoconf # for jemalloc code generation
libtool # for protobuf code generation libtool # for protobuf code generation
libsasl2-dev
)
MEMGRAPH_TEST_DEPS="${MEMGRAPH_BUILD_DEPS[*]}"
MEMGRAPH_RUN_DEPS=(
logrotate openssl python3 libseccomp
)
NEW_DEPS=(
wget curl tar gzip
) )
list() { list() {
@ -74,28 +65,7 @@ list() {
} }
check() { check() {
local missing="" check_all_dpkg "$1"
for pkg in $1; do
if [ "$pkg" == custom-maven3.9.3 ]; then
if [ ! -f "/opt/apache-maven-3.9.3/bin/mvn" ]; then
missing="$pkg $missing"
fi
continue
fi
if [ "$pkg" == custom-golang1.18.9 ]; then
if [ ! -f "/opt/go1.18.9/go/bin/go" ]; then
missing="$pkg $missing"
fi
continue
fi
if ! dpkg -s "$pkg" >/dev/null 2>/dev/null; then
missing="$pkg $missing"
fi
done
if [ "$missing" != "" ]; then
echo "MISSING PACKAGES: $missing"
exit 1
fi
} }
install() { install() {
@ -119,25 +89,7 @@ EOF
echo "NOTE: export LANG=en_US.utf8" echo "NOTE: export LANG=en_US.utf8"
fi fi
apt install -y wget apt install -y wget
for pkg in $1; do for pkg in $1; do
if [ "$pkg" == custom-maven3.9.3 ]; then
install_custom_maven "3.9.3"
continue
fi
if [ "$pkg" == custom-golang1.18.9 ]; then
install_custom_golang "1.18.9"
continue
fi
if [ "$pkg" == openjdk-17-jdk ]; then
if ! dpkg -s "$pkg" 2>/dev/null >/dev/null; then
apt install -y "$pkg"
# The default Java version should be Java 11
update-alternatives --set java /usr/lib/jvm/java-11-openjdk-amd64/bin/java
update-alternatives --set javac /usr/lib/jvm/java-11-openjdk-amd64/bin/javac
fi
continue
fi
if [ "$pkg" == dotnet-sdk-3.1 ]; then if [ "$pkg" == dotnet-sdk-3.1 ]; then
if ! dpkg -s "$pkg" 2>/dev/null >/dev/null; then if ! dpkg -s "$pkg" 2>/dev/null >/dev/null; then
wget -nv https://packages.microsoft.com/config/debian/10/packages-microsoft-prod.deb -O packages-microsoft-prod.deb wget -nv https://packages.microsoft.com/config/debian/10/packages-microsoft-prod.deb -O packages-microsoft-prod.deb

View File

@ -1,134 +0,0 @@
#!/bin/bash
set -Eeuo pipefail
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )"
source "$DIR/../util.sh"
check_operating_system "debian-12"
check_architecture "arm64" "aarch64"
TOOLCHAIN_BUILD_DEPS=(
coreutils gcc g++ build-essential make # generic build tools
wget # used for archive download
gnupg # used for archive signature verification
tar gzip bzip2 xz-utils unzip # used for archive unpacking
zlib1g-dev # zlib library used for all builds
libexpat1-dev liblzma-dev python3-dev texinfo # for gdb
libcurl4-openssl-dev # for cmake
libreadline-dev # for cmake and llvm
libffi-dev libxml2-dev # for llvm
libedit-dev libpcre2-dev libpcre3-dev automake bison # for swig
curl # snappy
file # for libunwind
libssl-dev # for libevent
libgmp-dev
gperf # for proxygen
git # for fbthrift
)
TOOLCHAIN_RUN_DEPS=(
make # generic build tools
tar gzip bzip2 xz-utils # used for archive unpacking
zlib1g # zlib library used for all builds
libexpat1 liblzma5 python3 # for gdb
libcurl4 # for cmake
file # for CPack
libreadline8 # for cmake and llvm
libffi8 libxml2 # for llvm
libssl-dev # for libevent
)
MEMGRAPH_BUILD_DEPS=(
git # source code control
make pkg-config # build system
curl wget # for downloading libs
uuid-dev default-jre-headless # required by antlr
libreadline-dev # for memgraph console
libpython3-dev python3-dev # for query modules
libssl-dev
libseccomp-dev
netcat # tests are using nc to wait for memgraph
python3 virtualenv python3-virtualenv python3-pip # for qa, macro_benchmark and stress tests
python3-yaml # for the configuration generator
libcurl4-openssl-dev # mg-requests
sbcl # for custom Lisp C++ preprocessing
doxygen graphviz # source documentation generators
mono-runtime mono-mcs zip unzip default-jdk-headless custom-maven3.9.3 # for driver tests
dotnet-sdk-7.0 golang custom-golang1.18.9 nodejs npm
autoconf # for jemalloc code generation
libtool # for protobuf code generation
libsasl2-dev
)
MEMGRAPH_RUN_DEPS=(
logrotate openssl python3 libseccomp
)
NEW_DEPS=(
wget curl tar gzip
)
list() {
echo "$1"
}
check() {
local missing=""
for pkg in $1; do
if [ "$pkg" == custom-maven3.9.3 ]; then
if [ ! -f "/opt/apache-maven-3.9.3/bin/mvn" ]; then
missing="$pkg $missing"
fi
continue
fi
if [ "$pkg" == custom-golang1.18.9 ]; then
if [ ! -f "/opt/go1.18.9/go/bin/go" ]; then
missing="$pkg $missing"
fi
continue
fi
if ! dpkg -s "$pkg" >/dev/null 2>/dev/null; then
missing="$pkg $missing"
fi
done
if [ "$missing" != "" ]; then
echo "MISSING PACKAGES: $missing"
exit 1
fi
}
install() {
cd "$DIR"
apt update
# If GitHub Actions runner is installed, append LANG to the environment.
# Python related tests doesn't work the LANG export.
if [ -d "/home/gh/actions-runner" ]; then
echo "LANG=en_US.utf8" >> /home/gh/actions-runner/.env
else
echo "NOTE: export LANG=en_US.utf8"
fi
apt install -y wget
for pkg in $1; do
if [ "$pkg" == custom-maven3.9.3 ]; then
install_custom_maven "3.9.3"
continue
fi
if [ "$pkg" == custom-golang1.18.9 ]; then
install_custom_golang "1.18.9"
continue
fi
if [ "$pkg" == dotnet-sdk-7.0 ]; then
if ! dpkg -s "$pkg" 2>/dev/null >/dev/null; then
wget -nv https://packages.microsoft.com/config/debian/12/packages-microsoft-prod.deb -O packages-microsoft-prod.deb
dpkg -i packages-microsoft-prod.deb
apt-get update
apt-get install -y apt-transport-https dotnet-sdk-7.0
fi
continue
fi
apt install -y "$pkg"
done
}
deps=$2"[*]"
"$1" "${!deps}"

View File

@ -1,136 +0,0 @@
#!/bin/bash
set -Eeuo pipefail
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )"
source "$DIR/../util.sh"
check_operating_system "debian-12"
check_architecture "x86_64"
TOOLCHAIN_BUILD_DEPS=(
coreutils gcc g++ build-essential make # generic build tools
wget # used for archive download
gnupg # used for archive signature verification
tar gzip bzip2 xz-utils unzip # used for archive unpacking
zlib1g-dev # zlib library used for all builds
libexpat1-dev libipt-dev libbabeltrace-dev liblzma-dev python3-dev texinfo # for gdb
libcurl4-openssl-dev # for cmake
libreadline-dev # for cmake and llvm
libffi-dev libxml2-dev # for llvm
libedit-dev libpcre2-dev libpcre3-dev automake bison # for swig
curl # snappy
file # for libunwind
libssl-dev # for libevent
libgmp-dev
gperf # for proxygen
git # for fbthrift
)
TOOLCHAIN_RUN_DEPS=(
make # generic build tools
tar gzip bzip2 xz-utils # used for archive unpacking
zlib1g # zlib library used for all builds
libexpat1 libipt2 libbabeltrace1 liblzma5 python3 # for gdb
libcurl4 # for cmake
file # for CPack
libreadline8 # for cmake and llvm
libffi8 libxml2 # for llvm
libssl-dev # for libevent
)
MEMGRAPH_BUILD_DEPS=(
git # source code control
make cmake pkg-config # build system
curl wget # for downloading libs
uuid-dev default-jre-headless # required by antlr
libreadline-dev # for memgraph console
libpython3-dev python3-dev # for query modules
libssl-dev
libseccomp-dev
netcat-traditional # tests are using nc to wait for memgraph
python3 virtualenv python3-virtualenv python3-pip # for qa, macro_benchmark and stress tests
python3-yaml # for the configuration generator
libcurl4-openssl-dev # mg-requests
sbcl # for custom Lisp C++ preprocessing
doxygen graphviz # source documentation generators
mono-runtime mono-mcs zip unzip default-jdk-headless custom-maven3.9.3 # for driver tests
dotnet-sdk-7.0 golang custom-golang1.18.9 nodejs npm
autoconf # for jemalloc code generation
libtool # for protobuf code generation
libsasl2-dev
)
MEMGRAPH_TEST_DEPS="${MEMGRAPH_BUILD_DEPS[*]}"
MEMGRAPH_RUN_DEPS=(
logrotate openssl python3 libseccomp
)
NEW_DEPS=(
wget curl tar gzip
)
list() {
echo "$1"
}
check() {
local missing=""
for pkg in $1; do
if [ "$pkg" == custom-maven3.9.3 ]; then
if [ ! -f "/opt/apache-maven-3.9.3/bin/mvn" ]; then
missing="$pkg $missing"
fi
continue
fi
if [ "$pkg" == custom-golang1.18.9 ]; then
if [ ! -f "/opt/go1.18.9/go/bin/go" ]; then
missing="$pkg $missing"
fi
continue
fi
if ! dpkg -s "$pkg" >/dev/null 2>/dev/null; then
missing="$pkg $missing"
fi
done
if [ "$missing" != "" ]; then
echo "MISSING PACKAGES: $missing"
exit 1
fi
}
install() {
cd "$DIR"
apt update
# If GitHub Actions runner is installed, append LANG to the environment.
# Python related tests doesn't work the LANG export.
if [ -d "/home/gh/actions-runner" ]; then
echo "LANG=en_US.utf8" >> /home/gh/actions-runner/.env
else
echo "NOTE: export LANG=en_US.utf8"
fi
apt install -y wget
for pkg in $1; do
if [ "$pkg" == custom-maven3.9.3 ]; then
install_custom_maven "3.9.3"
continue
fi
if [ "$pkg" == custom-golang1.18.9 ]; then
install_custom_golang "1.18.9"
continue
fi
if [ "$pkg" == dotnet-sdk-7.0 ]; then
if ! dpkg -s "$pkg" 2>/dev/null >/dev/null; then
wget -nv https://packages.microsoft.com/config/debian/12/packages-microsoft-prod.deb -O packages-microsoft-prod.deb
dpkg -i packages-microsoft-prod.deb
apt-get update
apt-get install -y apt-transport-https dotnet-sdk-7.0
fi
continue
fi
apt install -y "$pkg"
done
}
deps=$2"[*]"
"$1" "${!deps}"

View File

@ -1,150 +0,0 @@
#!/bin/bash
set -Eeuo pipefail
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )"
source "$DIR/../util.sh"
# IMPORTANT: Deprecated since memgraph v2.12.0.
check_operating_system "fedora-36"
check_architecture "x86_64"
TOOLCHAIN_BUILD_DEPS=(
coreutils-common gcc gcc-c++ make # generic build tools
wget # used for archive download
gnupg2 # used for archive signature verification
tar gzip bzip2 xz unzip # used for archive unpacking
zlib-devel # zlib library used for all builds
expat-devel xz-devel python3-devel texinfo libbabeltrace-devel # for gdb
curl libcurl-devel # for cmake
readline-devel # for cmake and llvm
libffi-devel libxml2-devel # for llvm
libedit-devel pcre-devel pcre2-devel automake bison # for swig
file
openssl-devel
gmp-devel
gperf
diffutils
libipt libipt-devel # intel
patch
perl # for openssl
git
)
TOOLCHAIN_RUN_DEPS=(
make # generic build tools
tar gzip bzip2 xz # used for archive unpacking
zlib # zlib library used for all builds
expat xz-libs python3 # for gdb
readline # for cmake and llvm
libffi libxml2 # for llvm
openssl-devel
)
MEMGRAPH_BUILD_DEPS=(
git # source code control
make cmake pkgconf-pkg-config # build system
wget # for downloading libs
libuuid-devel java-11-openjdk # required by antlr
readline-devel # for memgraph console
python3-devel # for query modules
openssl-devel
libseccomp-devel
python3 python3-pip python3-virtualenv python3-virtualenvwrapper python3-pyyaml nmap-ncat # for tests
libcurl-devel # mg-requests
rpm-build rpmlint # for RPM package building
doxygen graphviz # source documentation generators
java-11-openjdk-devel java-17-openjdk-devel custom-maven3.9.3 # for driver tests
which zip unzip
nodejs golang custom-golang1.18.9 # for driver tests
sbcl # for custom Lisp C++ preprocessing
autoconf # for jemalloc code generation
libtool # for protobuf code generation
cyrus-sasl-devel
)
MEMGRAPH_RUN_DEPS=(
logrotate openssl python3 libseccomp
)
NEW_DEPS=(
wget curl tar gzip
)
list() {
echo "$1"
}
check() {
local missing=""
if [ -v LD_LIBRARY_PATH ]; then
# On Fedora yum/dnf and python10 use newer glibc which is not compatible
# with ours, so we need to momentarely disable env
local OLD_LD_LIBRARY_PATH=${LD_LIBRARY_PATH}
LD_LIBRARY_PATH=""
fi
for pkg in $1; do
if [ "$pkg" == custom-maven3.9.3 ]; then
if [ ! -f "/opt/apache-maven-3.9.3/bin/mvn" ]; then
missing="$pkg $missing"
fi
continue
fi
if [ "$pkg" == custom-golang1.18.9 ]; then
if [ ! -f "/opt/go1.18.9/go/bin/go" ]; then
missing="$pkg $missing"
fi
continue
fi
if ! dnf list installed "$pkg" >/dev/null 2>/dev/null; then
missing="$pkg $missing"
fi
done
if [ "$missing" != "" ]; then
echo "MISSING PACKAGES: $missing"
exit 1
fi
if [ -v OLD_LD_LIBRARY_PATH ]; then
echo "Restoring LD_LIBRARY_PATH..."
LD_LIBRARY_PATH=${OLD_LD_LIBRARY_PATH}
fi
}
install() {
cd "$DIR"
if [ "$EUID" -ne 0 ]; then
echo "Please run as root."
exit 1
fi
# If GitHub Actions runner is installed, append LANG to the environment.
# Python related tests don't work without the LANG export.
if [ -d "/home/gh/actions-runner" ]; then
echo "LANG=en_US.utf8" >> /home/gh/actions-runner/.env
else
echo "NOTE: export LANG=en_US.utf8"
fi
dnf update -y
for pkg in $1; do
if [ "$pkg" == custom-maven3.9.3 ]; then
install_custom_maven "3.9.3"
continue
fi
if [ "$pkg" == custom-golang1.18.9 ]; then
install_custom_golang "1.18.9"
continue
fi
if [ "$pkg" == java-17-openjdk-devel ]; then
if ! dnf list installed "$pkg" >/dev/null 2>/dev/null; then
dnf install -y "$pkg"
# The default Java version should be Java 11
update-alternatives --set java java-11-openjdk.x86_64
update-alternatives --set javac java-11-openjdk.x86_64
fi
continue
fi
dnf install -y "$pkg"
done
}
deps=$2"[*]"
"$1" "${!deps}"

View File

@ -1,117 +0,0 @@
#!/bin/bash
set -Eeuo pipefail
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )"
source "$DIR/../util.sh"
check_operating_system "fedora-38"
check_architecture "x86_64"
TOOLCHAIN_BUILD_DEPS=(
coreutils-common gcc gcc-c++ make # generic build tools
wget # used for archive download
gnupg2 # used for archive signature verification
tar gzip bzip2 xz unzip # used for archive unpacking
zlib-devel # zlib library used for all builds
expat-devel xz-devel python3-devel texinfo libbabeltrace-devel # for gdb
curl libcurl-devel # for cmake
readline-devel # for cmake and llvm
libffi-devel libxml2-devel # for llvm
libedit-devel pcre-devel pcre2-devel automake bison # for swig
file
openssl-devel
gmp-devel
gperf
diffutils
libipt libipt-devel # intel
patch
perl # for openssl
git
)
TOOLCHAIN_RUN_DEPS=(
make # generic build tools
tar gzip bzip2 xz # used for archive unpacking
zlib # zlib library used for all builds
expat xz-libs python3 # for gdb
readline # for cmake and llvm
libffi libxml2 # for llvm
openssl-devel
)
MEMGRAPH_BUILD_DEPS=(
git # source code control
make pkgconf-pkg-config # build system
wget # for downloading libs
libuuid-devel java-11-openjdk # required by antlr
readline-devel # for memgraph console
python3-devel # for query modules
openssl-devel
libseccomp-devel
python3 python3-pip python3-virtualenv python3-virtualenvwrapper python3-pyyaml nmap-ncat # for tests
libcurl-devel # mg-requests
rpm-build rpmlint # for RPM package building
doxygen graphviz # source documentation generators
which nodejs golang zip unzip java-11-openjdk-devel # for driver tests
sbcl # for custom Lisp C++ preprocessing
autoconf # for jemalloc code generation
libtool # for protobuf code generation
)
MEMGRAPH_TEST_DEPS="${MEMGRAPH_BUILD_DEPS[*]}"
MEMGRAPH_RUN_DEPS=(
logrotate openssl python3 libseccomp
)
NEW_DEPS=(
wget curl tar gzip
)
list() {
echo "$1"
}
check() {
if [ -v LD_LIBRARY_PATH ]; then
# On Fedora 38 yum/dnf and python11 use newer glibc which is not compatible
# with ours, so we need to momentarely disable env
local OLD_LD_LIBRARY_PATH=${LD_LIBRARY_PATH}
LD_LIBRARY_PATH=""
fi
local missing=""
for pkg in $1; do
if ! dnf list installed "$pkg" >/dev/null 2>/dev/null; then
missing="$pkg $missing"
fi
done
if [ "$missing" != "" ]; then
echo "MISSING PACKAGES: $missing"
exit 1
fi
if [ -v OLD_LD_LIBRARY_PATH ]; then
echo "Restoring LD_LIBRARY_PATH..."
LD_LIBRARY_PATH=${OLD_LD_LIBRARY_PATH}
fi
}
install() {
cd "$DIR"
if [ "$EUID" -ne 0 ]; then
echo "Please run as root."
exit 1
fi
# If GitHub Actions runner is installed, append LANG to the environment.
# Python related tests don't work without the LANG export.
if [ -d "/home/gh/actions-runner" ]; then
echo "LANG=en_US.utf8" >> /home/gh/actions-runner/.env
else
echo "NOTE: export LANG=en_US.utf8"
fi
dnf update -y
for pkg in $1; do
dnf install -y "$pkg"
done
}
deps=$2"[*]"
"$1" "${!deps}"

View File

@ -1,117 +0,0 @@
#!/bin/bash
set -Eeuo pipefail
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )"
source "$DIR/../util.sh"
check_operating_system "fedora-39"
check_architecture "x86_64"
TOOLCHAIN_BUILD_DEPS=(
coreutils-common gcc gcc-c++ make # generic build tools
wget # used for archive download
gnupg2 # used for archive signature verification
tar gzip bzip2 xz unzip # used for archive unpacking
zlib-devel # zlib library used for all builds
expat-devel xz-devel python3-devel texinfo libbabeltrace-devel # for gdb
curl libcurl-devel # for cmake
readline-devel # for cmake and llvm
libffi-devel libxml2-devel # for llvm
libedit-devel pcre-devel pcre2-devel automake bison # for swig
file
openssl-devel
gmp-devel
gperf
diffutils
libipt libipt-devel # intel
patch
perl # for openssl
git
)
TOOLCHAIN_RUN_DEPS=(
make # generic build tools
tar gzip bzip2 xz # used for archive unpacking
zlib # zlib library used for all builds
expat xz-libs python3 # for gdb
readline # for cmake and llvm
libffi libxml2 # for llvm
openssl-devel
)
MEMGRAPH_BUILD_DEPS=(
git # source code control
make pkgconf-pkg-config # build system
wget # for downloading libs
libuuid-devel java-11-openjdk # required by antlr
readline-devel # for memgraph console
python3-devel # for query modules
openssl-devel
libseccomp-devel
python3 python3-pip python3-virtualenv python3-virtualenvwrapper python3-pyyaml nmap-ncat # for tests
libcurl-devel # mg-requests
rpm-build rpmlint # for RPM package building
doxygen graphviz # source documentation generators
which nodejs golang zip unzip java-11-openjdk-devel # for driver tests
sbcl # for custom Lisp C++ preprocessing
autoconf # for jemalloc code generation
libtool # for protobuf code generation
)
MEMGRAPH_TEST_DEPS="${MEMGRAPH_BUILD_DEPS[*]}"
MEMGRAPH_RUN_DEPS=(
logrotate openssl python3 libseccomp
)
NEW_DEPS=(
wget curl tar gzip
)
list() {
echo "$1"
}
check() {
if [ -v LD_LIBRARY_PATH ]; then
# On Fedora 38 yum/dnf and python11 use newer glibc which is not compatible
# with ours, so we need to momentarely disable env
local OLD_LD_LIBRARY_PATH=${LD_LIBRARY_PATH}
LD_LIBRARY_PATH=""
fi
local missing=""
for pkg in $1; do
if ! dnf list installed "$pkg" >/dev/null 2>/dev/null; then
missing="$pkg $missing"
fi
done
if [ "$missing" != "" ]; then
echo "MISSING PACKAGES: $missing"
exit 1
fi
if [ -v OLD_LD_LIBRARY_PATH ]; then
echo "Restoring LD_LIBRARY_PATH..."
LD_LIBRARY_PATH=${OLD_LD_LIBRARY_PATH}
fi
}
install() {
cd "$DIR"
if [ "$EUID" -ne 0 ]; then
echo "Please run as root."
exit 1
fi
# If GitHub Actions runner is installed, append LANG to the environment.
# Python related tests don't work without the LANG export.
if [ -d "/home/gh/actions-runner" ]; then
echo "LANG=en_US.utf8" >> /home/gh/actions-runner/.env
else
echo "NOTE: export LANG=en_US.utf8"
fi
dnf update -y
for pkg in $1; do
dnf install -y "$pkg"
done
}
deps=$2"[*]"
"$1" "${!deps}"

23
environment/os/macos-12.sh Executable file
View File

@ -0,0 +1,23 @@
#/bin/bash
brew install bash
brew install cmake
brew install clisp sbcl
brew install boost gflags fmt jemalloc openssl
brew install openssl@1.1
brew install cyrus-sasl
# cyrus-sasl is keg-only, which means it was not symlinked into /opt/homebrew,
# because macOS already provides this software and installing another version in
# parallel can cause all kinds of trouble.
# If you need to have cyrus-sasl first in your PATH, run:
# echo 'export PATH="/opt/homebrew/opt/cyrus-sasl/sbin:$PATH"' >> ~/.zshrc
# For compilers to find cyrus-sasl you may need to set:
# export LDFLAGS="-L/opt/homebrew/opt/cyrus-sasl/lib"
# export CPPFLAGS="-I/opt/homebrew/opt/cyrus-sasl/include"
# TODO(gitbuda): memgraph::utils::SpinLock
# TODO(gitbuda): memgraph::utils::AsyncTimer
# TODO(gitbuda): memgraph::utils::RWLock
# TODO(gitbuda): memgraph::utils::ThreadSetName
# TODO(gitbuda): RocksDB 7.7.2 compiles fine

View File

@ -1,212 +0,0 @@
#!/bin/bash
set -Eeuo pipefail
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )"
source "$DIR/../util.sh"
# TODO(gitbuda): Rocky gets automatically updates -> figure out how to handle it.
check_operating_system "rocky-9.3"
check_architecture "x86_64"
TOOLCHAIN_BUILD_DEPS=(
wget # used for archive download
coreutils-common gcc gcc-c++ make # generic build tools
# NOTE: Pure libcurl conflicts with libcurl-minimal
libcurl-devel # cmake build requires it
gnupg2 # used for archive signature verification
tar gzip bzip2 xz unzip # used for archive unpacking
zlib-devel # zlib library used for all builds
expat-devel xz-devel python3-devel perl-Unicode-EastAsianWidth texinfo libbabeltrace-devel # for gdb
readline-devel # for cmake and llvm
libffi-devel libxml2-devel # for llvm
libedit-devel pcre-devel pcre2-devel automake bison # for swig
file
openssl-devel
gmp-devel
gperf
diffutils
libipt libipt-devel # intel
patch
)
TOOLCHAIN_RUN_DEPS=(
make # generic build tools
tar gzip bzip2 xz # used for archive unpacking
zlib # zlib library used for all builds
expat xz-libs python3 # for gdb
readline # for cmake and llvm
libffi libxml2 # for llvm
openssl-devel
perl # for openssl
)
MEMGRAPH_BUILD_DEPS=(
git # source code control
make cmake pkgconf-pkg-config # build system
wget # for downloading libs
libuuid-devel java-11-openjdk # required by antlr
readline-devel # for memgraph console
python3-devel # for query modules
openssl-devel
libseccomp-devel
python3 python3-pip python3-virtualenv nmap-ncat # for qa, macro_benchmark and stress tests
#
# IMPORTANT: python3-yaml does NOT exist on CentOS
# Install it manually using `pip3 install PyYAML`
#
PyYAML # Package name here does not correspond to the yum package!
libcurl-devel # mg-requests
rpm-build rpmlint # for RPM package building
doxygen graphviz # source documentation generators
which nodejs golang custom-golang1.18.9 # for driver tests
zip unzip java-11-openjdk-devel java-17-openjdk java-17-openjdk-devel custom-maven3.9.3 # for driver tests
cl-asdf common-lisp-controller sbcl # for custom Lisp C++ preprocessing
autoconf # for jemalloc code generation
libtool # for protobuf code generation
cyrus-sasl-devel
)
MEMGRAPH_TEST_DEPS="${MEMGRAPH_BUILD_DEPS[*]}"
MEMGRAPH_RUN_DEPS=(
logrotate openssl python3 libseccomp
)
NEW_DEPS=(
wget curl tar gzip
)
list() {
echo "$1"
}
check() {
local missing=""
for pkg in $1; do
if [ "$pkg" == custom-maven3.9.3 ]; then
if [ ! -f "/opt/apache-maven-3.9.3/bin/mvn" ]; then
missing="$pkg $missing"
fi
continue
fi
if [ "$pkg" == custom-golang1.18.9 ]; then
if [ ! -f "/opt/go1.18.9/go/bin/go" ]; then
missing="$pkg $missing"
fi
continue
fi
if [ "$pkg" == "PyYAML" ]; then
if ! python3 -c "import yaml" >/dev/null 2>/dev/null; then
missing="$pkg $missing"
fi
continue
fi
if [ "$pkg" == "python3-virtualenv" ]; then
continue
fi
if ! yum list installed "$pkg" >/dev/null 2>/dev/null; then
missing="$pkg $missing"
fi
done
if [ "$missing" != "" ]; then
echo "MISSING PACKAGES: $missing"
exit 1
fi
}
install() {
cd "$DIR"
if [ "$EUID" -ne 0 ]; then
echo "Please run as root."
exit 1
fi
# If GitHub Actions runner is installed, append LANG to the environment.
# Python related tests doesn't work the LANG export.
if [ -d "/home/gh/actions-runner" ]; then
echo "LANG=en_US.utf8" >> /home/gh/actions-runner/.env
else
echo "NOTE: export LANG=en_US.utf8"
fi
yum update -y
yum install -y wget git python3 python3-pip
for pkg in $1; do
if [ "$pkg" == custom-maven3.9.3 ]; then
install_custom_maven "3.9.3"
continue
fi
if [ "$pkg" == custom-golang1.18.9 ]; then
install_custom_golang "1.18.9"
continue
fi
if [ "$pkg" == perl-Unicode-EastAsianWidth ]; then
if ! dnf list installed perl-Unicode-EastAsianWidth >/dev/null 2>/dev/null; then
dnf install -y https://dl.rockylinux.org/pub/rocky/9/CRB/x86_64/os/Packages/p/perl-Unicode-EastAsianWidth-12.0-7.el9.noarch.rpm
fi
continue
fi
if [ "$pkg" == texinfo ]; then
if ! dnf list installed texinfo >/dev/null 2>/dev/null; then
dnf install -y https://dl.rockylinux.org/pub/rocky/9/CRB/x86_64/os/Packages/t/texinfo-6.7-15.el9.x86_64.rpm
fi
continue
fi
if [ "$pkg" == libbabeltrace-devel ]; then
if ! dnf list installed libbabeltrace-devel >/dev/null 2>/dev/null; then
dnf install -y https://dl.rockylinux.org/pub/rocky/9/devel/x86_64/os/Packages/l/libbabeltrace-devel-1.5.8-10.el9.x86_64.rpm
fi
continue
fi
if [ "$pkg" == libipt-devel ]; then
if ! dnf list installed libipt-devel >/dev/null 2>/dev/null; then
dnf install -y https://dl.rockylinux.org/pub/rocky/9/devel/x86_64/os/Packages/l/libipt-devel-2.0.4-5.el9.x86_64.rpm
fi
continue
fi
if [ "$pkg" == doxygen ]; then
if ! dnf list installed doxygen >/dev/null 2>/dev/null; then
dnf install -y https://dl.rockylinux.org/pub/rocky/9/CRB/x86_64/os/Packages/d/doxygen-1.9.1-11.el9.x86_64.rpm
fi
continue
fi
if [ "$pkg" == cl-asdf ]; then
if ! dnf list installed cl-asdf >/dev/null 2>/dev/null; then
dnf install -y https://pkgs.sysadmins.ws/el8/base/x86_64/cl-asdf-20101028-18.el8.noarch.rpm
fi
continue
fi
if [ "$pkg" == common-lisp-controller ]; then
if ! dnf list installed common-lisp-controller >/dev/null 2>/dev/null; then
dnf install -y https://pkgs.sysadmins.ws/el8/base/x86_64/common-lisp-controller-7.4-20.el8.noarch.rpm
fi
continue
fi
if [ "$pkg" == sbcl ]; then
if ! dnf list installed sbcl >/dev/null 2>/dev/null; then
dnf install -y https://pkgs.sysadmins.ws/el8/base/x86_64/sbcl-2.0.1-4.el8.x86_64.rpm
fi
continue
fi
if [ "$pkg" == PyYAML ]; then
if [ -z ${SUDO_USER+x} ]; then # Running as root (e.g. Docker).
pip3 install --user PyYAML
else # Running using sudo.
sudo -H -u "$SUDO_USER" bash -c "pip3 install --user PyYAML"
fi
continue
fi
if [ "$pkg" == python3-virtualenv ]; then
if [ -z ${SUDO_USER+x} ]; then # Running as root (e.g. Docker).
pip3 install virtualenv
pip3 install virtualenvwrapper
else # Running using sudo.
sudo -H -u "$SUDO_USER" bash -c "pip3 install virtualenv"
sudo -H -u "$SUDO_USER" bash -c "pip3 install virtualenvwrapper"
fi
continue
fi
yum install -y "$pkg"
done
}
deps=$2"[*]"
"$1" "${!deps}"

View File

@ -1,158 +0,0 @@
#!/bin/bash
set -Eeuo pipefail
SCRIPT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )"
IFS=' '
# NOTE: docker_image_name could be local image build based on release/package images.
# NOTE: each line has to be under quotes, docker_container_type, script_name and docker_image_name separate with a space.
# "docker_container_type script_name docker_image_name"
# docker_container_type OPTIONS:
# * mgrun -> running plain/empty operating system for the purposes of testing native memgraph package
# * mgbuild -> running the builder container to build memgraph inside it -> it's possible create builder images using release/package/run.sh
OPERATING_SYSTEMS=(
# "mgrun amzn-2 amazonlinux:2"
# "mgrun centos-7 centos:7"
# "mgrun centos-9 dokken/centos-stream-9"
# "mgrun debian-10 debian:10"
# "mgrun debian-11 debian:11"
# "mgrun fedora-36 fedora:36"
# "mgrun ubuntu-18.04 ubuntu:18.04"
# "mgrun ubuntu-20.04 ubuntu:20.04"
# "mgrun ubuntu-22.04 ubuntu:22.04"
# "mgbuild debian-12 memgraph/memgraph-builder:v5_debian-12"
)
if [ ! "$(docker info)" ]; then
echo "ERROR: Docker is required"
exit 1
fi
print_help () {
echo -e "$0 all\t\t\t\t => start + init all containers in the background"
echo -e "$0 check\t\t\t\t => check all containers"
echo -e "$0 delete\t\t\t\t => stop + remove all containers"
echo -e "$0 copy src_container dst_container => copy build package from src to dst container"
exit 1
}
# NOTE: This is an idempotent operation!
# TODO(gitbuda): Consider making docker_run always delete + start a new container or add a new function.
docker_run () {
cnt_type="$1"
if [[ "$cnt_type" != "mgbuild" && "$cnt_type" != "mgrun" ]]; then
echo "ERROR: Wrong docker_container_type -> valid options are mgbuild, mgrun"
exit 1
fi
cnt_name="$2"
cnt_image="$3"
if [ ! "$(docker ps -q -f name=$cnt_name)" ]; then
if [ "$(docker ps -aq -f status=exited -f name=$cnt_name)" ]; then
echo "Cleanup of the old exited container..."
docker rm $cnt_name
fi
if [[ "$cnt_type" == "mgbuild" ]]; then
docker run -d --volume "$SCRIPT_DIR/../../:/memgraph" --network host --name "$cnt_name" "$cnt_image"
fi
if [[ "$cnt_type" == "mgrun" ]]; then
docker run -d --volume "$SCRIPT_DIR/../../:/memgraph" --network host --name "$cnt_name" "$cnt_image" sleep infinity
fi
fi
echo "The $cnt_image container is active under $cnt_name name!"
}
docker_exec () {
cnt_name="$1"
cnt_cmd="$2"
docker exec -it "$cnt_name" bash -c "$cnt_cmd"
}
docker_stop_and_rm () {
cnt_name="$1"
if [ "$(docker ps -q -f name=$cnt_name)" ]; then
docker stop "$1"
fi
if [ "$(docker ps -aq -f status=exited -f name=$cnt_name)" ]; then
docker rm "$1"
fi
}
# TODO(gitbuda): Make the call to `install NEW_DEPS` configurable, the question what else is useful?
start_all () {
for script_docker_pair in "${OPERATING_SYSTEMS[@]}"; do
read -a script_docker <<< "$script_docker_pair"
docker_container_type="${script_docker[0]}"
script_name="${script_docker[1]}"
docker_image="${script_docker[2]}"
docker_name="${docker_container_type}_$script_name"
echo ""
echo "~~~~ OPERATING ON $docker_image as $docker_name..."
docker_run "$docker_container_type" "$docker_name" "$docker_image"
docker_exec "$docker_name" "/memgraph/environment/os/$script_name.sh install NEW_DEPS"
echo "---- DONE EVERYHING FOR $docker_image as $docker_name..."
echo ""
done
}
check_all () {
for script_docker_pair in "${OPERATING_SYSTEMS[@]}"; do
read -a script_docker <<< "$script_docker_pair"
docker_container_type="${script_docker[0]}"
script_name="${script_docker[1]}"
docker_image="${script_docker[2]}"
docker_name="${docker_container_type}_$script_name"
echo ""
echo "~~~~ OPERATING ON $docker_image as $docker_name..."
docker_exec "$docker_name" "/memgraph/environment/os/$script_name.sh check NEW_DEPS"
echo "---- DONE EVERYHING FOR $docker_image as $docker_name..."
echo ""
done
}
delete_all () {
for script_docker_pair in "${OPERATING_SYSTEMS[@]}"; do
read -a script_docker <<< "$script_docker_pair"
docker_container_type="${script_docker[0]}"
script_name="${script_docker[1]}"
docker_image="${script_docker[2]}"
docker_name="${docker_container_type}_$script_name"
docker_stop_and_rm "$docker_name"
echo "~~~~ $docker_image as $docker_name DELETED"
done
}
# TODO(gitbuda): Copy file between containers is a useful util, also delete, + consider copying of a whole folder.
# TODO(gitbuda): Add args: src_cnt dst_cnt abs_path; both file and recursive folder, always delete + copy.
copy_build_package () {
src_container="$1"
dst_container="$2"
src="$src_container:/memgraph/build/output"
tmp_dst="$SCRIPT_DIR/../../build"
mkdir -p "$tmp_dst"
rm -rf "$tmp_dst/output"
dst="$dst_container:/"
docker cp "$src" "$tmp_dst"
docker cp "$tmp_dst/output" "$dst"
}
if [ "$#" -eq 0 ]; then
print_help
else
case $1 in
all)
start_all
;;
check)
check_all
;;
delete)
delete_all
;;
copy) # src_container dst_container
if [ "$#" -ne 3 ]; then
print_help
fi
copy_build_package "$2" "$3"
;;
*)
print_help
;;
esac
fi

View File

@ -1,10 +1,11 @@
#!/bin/bash #!/bin/bash
set -Eeuo pipefail set -Eeuo pipefail
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )" DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )"
source "$DIR/../util.sh" source "$DIR/../util.sh"
check_operating_system "todo-os-name" check_operating_system "todo-os-name"
check_architecture "todo-arch-name"
TOOLCHAIN_BUILD_DEPS=( TOOLCHAIN_BUILD_DEPS=(
pkg pkg
@ -18,20 +19,6 @@ MEMGRAPH_BUILD_DEPS=(
pkg pkg
) )
MEMGRAPH_TEST_DEPS=(
pkg
)
MEMGRAPH_RUN_DEPS=(
pkg
)
# NEW_DEPS is useful when you won't to test the installation of a new package.
# During the test you can put here packages like wget curl tar gzip
NEW_DEPS=(
pkg
)
list() { list() {
echo "$1" echo "$1"
} }

View File

@ -1,10 +1,10 @@
#!/bin/bash #!/bin/bash
set -Eeuo pipefail set -Eeuo pipefail
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )" DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )"
source "$DIR/../util.sh" source "$DIR/../util.sh"
# IMPORTANT: Deprecated since memgraph v2.12.0.
check_operating_system "ubuntu-18.04" check_operating_system "ubuntu-18.04"
check_architecture "x86_64" check_architecture "x86_64"
@ -25,7 +25,7 @@ TOOLCHAIN_BUILD_DEPS=(
libgmp-dev # for gdb libgmp-dev # for gdb
gperf # for proxygen gperf # for proxygen
libssl-dev libssl-dev
libedit-dev libpcre2-dev libpcre3-dev automake bison # swig libedit-dev libpcre3-dev automake bison # swig
) )
TOOLCHAIN_RUN_DEPS=( TOOLCHAIN_RUN_DEPS=(
@ -41,7 +41,7 @@ TOOLCHAIN_RUN_DEPS=(
MEMGRAPH_BUILD_DEPS=( MEMGRAPH_BUILD_DEPS=(
git # source code control git # source code control
make cmake pkg-config # build system make pkg-config # build system
curl wget # downloading libs curl wget # downloading libs
uuid-dev default-jre-headless # required by antlr uuid-dev default-jre-headless # required by antlr
libreadline-dev # memgraph console libreadline-dev # memgraph console
@ -53,19 +53,9 @@ MEMGRAPH_BUILD_DEPS=(
libcurl4-openssl-dev # mg-requests libcurl4-openssl-dev # mg-requests
sbcl # custom Lisp C++ preprocessing sbcl # custom Lisp C++ preprocessing
doxygen graphviz # source documentation generators doxygen graphviz # source documentation generators
mono-runtime mono-mcs nodejs zip unzip default-jdk-headless openjdk-17-jdk-headless custom-maven3.9.3 # driver tests mono-runtime mono-mcs nodejs zip unzip default-jdk-headless # driver tests
custom-golang1.18.9 # for driver tests
autoconf # for jemalloc code generation autoconf # for jemalloc code generation
libtool # for protobuf code generation libtool # for protobuf code generation
libsasl2-dev
)
MEMGRAPH_RUN_DEPS=(
logrotate openssl python3 libseccomp2
)
NEW_DEPS=(
wget curl tar gzip
) )
list() { list() {
@ -73,53 +63,11 @@ list() {
} }
check() { check() {
local missing="" check_all_dpkg "$1"
for pkg in $1; do
if [ "$pkg" == custom-maven3.9.3 ]; then
if [ ! -f "/opt/apache-maven-3.9.3/bin/mvn" ]; then
missing="$pkg $missing"
fi
continue
fi
if [ "$pkg" == custom-golang1.18.9 ]; then
if [ ! -f "/opt/go1.18.9/go/bin/go" ]; then
missing="$pkg $missing"
fi
continue
fi
if ! dpkg -s "$pkg" >/dev/null 2>/dev/null; then
missing="$pkg $missing"
fi
done
if [ "$missing" != "" ]; then
echo "MISSING PACKAGES: $missing"
exit 1
fi
} }
install() { install() {
apt update -y apt install -y $1
for pkg in $1; do
if [ "$pkg" == custom-maven3.9.3 ]; then
install_custom_maven "3.9.3"
continue
fi
if [ "$pkg" == custom-golang1.18.9 ]; then
install_custom_golang "1.18.9"
continue
fi
if [ "$pkg" == openjdk-17-jdk-headless ]; then
if ! dpkg -s "$pkg" 2>/dev/null >/dev/null; then
apt install -y "$pkg"
# The default Java version should be Java 11
update-alternatives --set java /usr/lib/jvm/java-11-openjdk-amd64/bin/java
update-alternatives --set javac /usr/lib/jvm/java-11-openjdk-amd64/bin/javac
fi
continue
fi
apt install -y "$pkg"
done
} }
deps=$2"[*]" deps=$2"[*]"

View File

@ -1,5 +1,7 @@
#!/bin/bash #!/bin/bash
set -Eeuo pipefail set -Eeuo pipefail
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )" DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )"
source "$DIR/../util.sh" source "$DIR/../util.sh"
@ -22,7 +24,7 @@ TOOLCHAIN_BUILD_DEPS=(
libgmp-dev # for gdb libgmp-dev # for gdb
gperf # for proxygen gperf # for proxygen
libssl-dev libssl-dev
libedit-dev libpcre2-dev libpcre3-dev automake bison # for swig libedit-dev libpcre3-dev automake bison # for swig
) )
TOOLCHAIN_RUN_DEPS=( TOOLCHAIN_RUN_DEPS=(
@ -38,7 +40,7 @@ TOOLCHAIN_RUN_DEPS=(
MEMGRAPH_BUILD_DEPS=( MEMGRAPH_BUILD_DEPS=(
git # source code control git # source code control
make cmake pkg-config # build system make pkg-config # build system
curl wget # for downloading libs curl wget # for downloading libs
uuid-dev default-jre-headless # required by antlr uuid-dev default-jre-headless # required by antlr
libreadline-dev # for memgraph console libreadline-dev # for memgraph console
@ -51,21 +53,10 @@ MEMGRAPH_BUILD_DEPS=(
libcurl4-openssl-dev # mg-requests libcurl4-openssl-dev # mg-requests
sbcl # for custom Lisp C++ preprocessing sbcl # for custom Lisp C++ preprocessing
doxygen graphviz # source documentation generators doxygen graphviz # source documentation generators
mono-runtime mono-mcs zip unzip default-jdk-headless openjdk-17-jdk-headless custom-maven3.9.3 # for driver tests mono-runtime mono-mcs zip unzip default-jdk-headless # for driver tests
dotnet-sdk-3.1 golang custom-golang1.18.9 nodejs npm # for driver tests dotnet-sdk-3.1 golang nodejs npm
autoconf # for jemalloc code generation autoconf # for jemalloc code generation
libtool # for protobuf code generation libtool # for protobuf code generation
libsasl2-dev
)
MEMGRAPH_TEST_DEPS="${MEMGRAPH_BUILD_DEPS[*]}"
MEMGRAPH_RUN_DEPS=(
logrotate openssl python3 libseccomp2
)
NEW_DEPS=(
wget curl tar gzip
) )
list() { list() {
@ -73,35 +64,12 @@ list() {
} }
check() { check() {
local missing="" check_all_dpkg "$1"
for pkg in $1; do
if [ "$pkg" == custom-maven3.9.3 ]; then
if [ ! -f "/opt/apache-maven-3.9.3/bin/mvn" ]; then
missing="$pkg $missing"
fi
continue
fi
if [ "$pkg" == custom-golang1.18.9 ]; then
if [ ! -f "/opt/go1.18.9/go/bin/go" ]; then
missing="$pkg $missing"
fi
continue
fi
if ! dpkg -s "$pkg" >/dev/null 2>/dev/null; then
missing="$pkg $missing"
fi
done
if [ "$missing" != "" ]; then
echo "MISSING PACKAGES: $missing"
exit 1
fi
} }
install() { install() {
cd "$DIR" cd "$DIR"
export DEBIAN_FRONTEND=noninteractive apt update
apt update -y
apt install -y wget
# If GitHub Actions runner is installed, append LANG to the environment. # If GitHub Actions runner is installed, append LANG to the environment.
# Python related tests doesn't work the LANG export. # Python related tests doesn't work the LANG export.
if [ -d "/home/gh/actions-runner" ]; then if [ -d "/home/gh/actions-runner" ]; then
@ -109,16 +77,8 @@ install() {
else else
echo "NOTE: export LANG=en_US.utf8" echo "NOTE: export LANG=en_US.utf8"
fi fi
apt install -y wget
for pkg in $1; do for pkg in $1; do
if [ "$pkg" == custom-maven3.9.3 ]; then
install_custom_maven "3.9.3"
continue
fi
if [ "$pkg" == custom-golang1.18.9 ]; then
install_custom_golang "1.18.9"
continue
fi
if [ "$pkg" == dotnet-sdk-3.1 ]; then if [ "$pkg" == dotnet-sdk-3.1 ]; then
if ! dpkg -s dotnet-sdk-3.1 2>/dev/null >/dev/null; then if ! dpkg -s dotnet-sdk-3.1 2>/dev/null >/dev/null; then
wget -nv https://packages.microsoft.com/config/ubuntu/20.04/packages-microsoft-prod.deb -O packages-microsoft-prod.deb wget -nv https://packages.microsoft.com/config/ubuntu/20.04/packages-microsoft-prod.deb -O packages-microsoft-prod.deb
@ -128,15 +88,6 @@ install() {
fi fi
continue continue
fi fi
if [ "$pkg" == openjdk-17-jdk-headless ]; then
if ! dpkg -s "$pkg" 2>/dev/null >/dev/null; then
apt install -y "$pkg"
# The default Java version should be Java 11
update-alternatives --set java /usr/lib/jvm/java-11-openjdk-amd64/bin/java
update-alternatives --set javac /usr/lib/jvm/java-11-openjdk-amd64/bin/javac
fi
continue
fi
apt install -y "$pkg" apt install -y "$pkg"
done done
} }

View File

@ -1,144 +0,0 @@
#!/bin/bash
set -Eeuo pipefail
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )"
source "$DIR/../util.sh"
check_operating_system "ubuntu-22.04"
check_architecture "arm64" "aarch64"
TOOLCHAIN_BUILD_DEPS=(
coreutils gcc g++ build-essential make # generic build tools
wget # used for archive download
gnupg # used for archive signature verification
tar gzip bzip2 xz-utils unzip # used for archive unpacking
zlib1g-dev # zlib library used for all builds
libexpat1-dev libbabeltrace-dev liblzma-dev python3-dev texinfo # for gdb
libcurl4-openssl-dev # for cmake
libreadline-dev # for cmake and llvm
libffi-dev libxml2-dev # for llvm
curl # snappy
file
git # for thrift
libgmp-dev # for gdb
gperf # for proxygen
libssl-dev
libedit-dev libpcre2-dev libpcre3-dev automake bison # for swig
)
TOOLCHAIN_RUN_DEPS=(
make # generic build tools
tar gzip bzip2 xz-utils # used for archive unpacking
zlib1g # zlib library used for all builds
libexpat1 libbabeltrace1 liblzma5 python3 # for gdb
libcurl4 # for cmake
libreadline8 # for cmake and llvm
libffi7 libxml2 # for llvm
libssl-dev # for libevent
)
MEMGRAPH_BUILD_DEPS=(
git # source code control
make cmake pkg-config # build system
curl wget # for downloading libs
uuid-dev default-jre-headless # required by antlr
libreadline-dev # for memgraph console
libpython3-dev python3-dev # for query modules
libssl-dev
libseccomp-dev
netcat # tests are using nc to wait for memgraph
python3 python3-virtualenv python3-pip # for qa, macro_benchmark and stress tests
python3-yaml # for the configuration generator
libcurl4-openssl-dev # mg-requests
sbcl # for custom Lisp C++ preprocessing
doxygen graphviz # source documentation generators
mono-runtime mono-mcs zip unzip default-jdk-headless openjdk-17-jdk-headless custom-maven3.9.3 # for driver tests
dotnet-sdk-6.0 golang custom-golang1.18.9 nodejs npm
autoconf # for jemalloc code generation
libtool # for protobuf code generation
libsasl2-dev
)
MEMGRAPH_TEST_DEPS="${MEMGRAPH_BUILD_DEPS[*]}"
MEMGRAPH_RUN_DEPS=(
logrotate openssl python3 libseccomp2
)
NEW_DEPS=(
wget curl tar gzip
)
list() {
echo "$1"
}
check() {
local missing=""
for pkg in $1; do
if [ "$pkg" == custom-maven3.9.3 ]; then
if [ ! -f "/opt/apache-maven-3.9.3/bin/mvn" ]; then
missing="$pkg $missing"
fi
continue
fi
if [ "$pkg" == custom-golang1.18.9 ]; then
if [ ! -f "/opt/go1.18.9/go/bin/go" ]; then
missing="$pkg $missing"
fi
continue
fi
if ! dpkg -s "$pkg" >/dev/null 2>/dev/null; then
missing="$pkg $missing"
fi
done
if [ "$missing" != "" ]; then
echo "MISSING PACKAGES: $missing"
exit 1
fi
}
install() {
cd "$DIR"
apt update
# If GitHub Actions runner is installed, append LANG to the environment.
# Python related tests doesn't work the LANG export.
if [ -d "/home/gh/actions-runner" ]; then
echo "LANG=en_US.utf8" >> /home/gh/actions-runner/.env
else
echo "NOTE: export LANG=en_US.utf8"
fi
apt install -y wget
for pkg in $1; do
if [ "$pkg" == custom-maven3.9.3 ]; then
install_custom_maven "3.9.3"
continue
fi
if [ "$pkg" == custom-golang1.18.9 ]; then
install_custom_golang "1.18.9"
continue
fi
if [ "$pkg" == dotnet-sdk-6.0 ]; then
if ! dpkg -s dotnet-sdk-6.0 2>/dev/null >/dev/null; then
wget -nv https://packages.microsoft.com/config/ubuntu/22.04/packages-microsoft-prod.deb -O packages-microsoft-prod.deb
dpkg -i packages-microsoft-prod.deb
apt-get update
apt-get install -y apt-transport-https dotnet-sdk-6.0
fi
continue
fi
if [ "$pkg" == openjdk-17-jdk-headless ]; then
if ! dpkg -s "$pkg" 2>/dev/null >/dev/null; then
apt install -y "$pkg"
# The default Java version should be Java 11
update-alternatives --set java /usr/lib/jvm/java-11-openjdk-arm64/bin/java
update-alternatives --set javac /usr/lib/jvm/java-11-openjdk-arm64/bin/javac
fi
continue
fi
apt install -y "$pkg"
done
}
deps=$2"[*]"
"$1" "${!deps}"

View File

@ -1,5 +1,7 @@
#!/bin/bash #!/bin/bash
set -Eeuo pipefail set -Eeuo pipefail
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )" DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )"
source "$DIR/../util.sh" source "$DIR/../util.sh"
@ -22,7 +24,7 @@ TOOLCHAIN_BUILD_DEPS=(
libgmp-dev # for gdb libgmp-dev # for gdb
gperf # for proxygen gperf # for proxygen
libssl-dev libssl-dev
libedit-dev libpcre2-dev libpcre3-dev automake bison # for swig libedit-dev libpcre3-dev automake bison # for swig
) )
TOOLCHAIN_RUN_DEPS=( TOOLCHAIN_RUN_DEPS=(
@ -38,7 +40,7 @@ TOOLCHAIN_RUN_DEPS=(
MEMGRAPH_BUILD_DEPS=( MEMGRAPH_BUILD_DEPS=(
git # source code control git # source code control
make cmake pkg-config # build system make pkg-config # build system
curl wget # for downloading libs curl wget # for downloading libs
uuid-dev default-jre-headless # required by antlr uuid-dev default-jre-headless # required by antlr
libreadline-dev # for memgraph console libreadline-dev # for memgraph console
@ -51,21 +53,10 @@ MEMGRAPH_BUILD_DEPS=(
libcurl4-openssl-dev # mg-requests libcurl4-openssl-dev # mg-requests
sbcl # for custom Lisp C++ preprocessing sbcl # for custom Lisp C++ preprocessing
doxygen graphviz # source documentation generators doxygen graphviz # source documentation generators
mono-runtime mono-mcs zip unzip default-jdk-headless openjdk-17-jdk-headless custom-maven3.9.3 # for driver tests mono-runtime mono-mcs zip unzip default-jdk-headless # for driver tests
dotnet-sdk-6.0 golang custom-golang1.18.9 nodejs npm # for driver tests dotnet-sdk-6.0 golang nodejs npm
autoconf # for jemalloc code generation autoconf # for jemalloc code generation
libtool # for protobuf code generation libtool # for protobuf code generation
libsasl2-dev
)
MEMGRAPH_TEST_DEPS="${MEMGRAPH_BUILD_DEPS[*]}"
MEMGRAPH_RUN_DEPS=(
logrotate openssl python3 libseccomp2
)
NEW_DEPS=(
wget curl tar gzip
) )
list() { list() {
@ -73,34 +64,12 @@ list() {
} }
check() { check() {
local missing="" check_all_dpkg "$1"
for pkg in $1; do
if [ "$pkg" == custom-maven3.9.3 ]; then
if [ ! -f "/opt/apache-maven-3.9.3/bin/mvn" ]; then
missing="$pkg $missing"
fi
continue
fi
if [ "$pkg" == custom-golang1.18.9 ]; then
if [ ! -f "/opt/go1.18.9/go/bin/go" ]; then
missing="$pkg $missing"
fi
continue
fi
if ! dpkg -s "$pkg" >/dev/null 2>/dev/null; then
missing="$pkg $missing"
fi
done
if [ "$missing" != "" ]; then
echo "MISSING PACKAGES: $missing"
exit 1
fi
} }
install() { install() {
cd "$DIR" cd "$DIR"
apt update -y apt update
apt install -y wget
# If GitHub Actions runner is installed, append LANG to the environment. # If GitHub Actions runner is installed, append LANG to the environment.
# Python related tests doesn't work the LANG export. # Python related tests doesn't work the LANG export.
if [ -d "/home/gh/actions-runner" ]; then if [ -d "/home/gh/actions-runner" ]; then
@ -108,16 +77,8 @@ install() {
else else
echo "NOTE: export LANG=en_US.utf8" echo "NOTE: export LANG=en_US.utf8"
fi fi
apt install -y wget
for pkg in $1; do for pkg in $1; do
if [ "$pkg" == custom-maven3.9.3 ]; then
install_custom_maven "3.9.3"
continue
fi
if [ "$pkg" == custom-golang1.18.9 ]; then
install_custom_golang "1.18.9"
continue
fi
if [ "$pkg" == dotnet-sdk-6.0 ]; then if [ "$pkg" == dotnet-sdk-6.0 ]; then
if ! dpkg -s dotnet-sdk-6.0 2>/dev/null >/dev/null; then if ! dpkg -s dotnet-sdk-6.0 2>/dev/null >/dev/null; then
wget -nv https://packages.microsoft.com/config/ubuntu/22.04/packages-microsoft-prod.deb -O packages-microsoft-prod.deb wget -nv https://packages.microsoft.com/config/ubuntu/22.04/packages-microsoft-prod.deb -O packages-microsoft-prod.deb
@ -127,15 +88,6 @@ install() {
fi fi
continue continue
fi fi
if [ "$pkg" == openjdk-17-jdk-headless ]; then
if ! dpkg -s "$pkg" 2>/dev/null >/dev/null; then
apt install -y "$pkg"
# The default Java version should be Java 11
update-alternatives --set java /usr/lib/jvm/java-11-openjdk-amd64/bin/java
update-alternatives --set javac /usr/lib/jvm/java-11-openjdk-amd64/bin/javac
fi
continue
fi
apt install -y "$pkg" apt install -y "$pkg"
done done
} }

View File

@ -1,5 +0,0 @@
archives
build
output
*.tar.gz
tmp_build.sh

View File

@ -4,7 +4,7 @@ diff -ur a/CMakeLists.txt b/CMakeLists.txt
@@ -52,9 +52,9 @@ @@ -52,9 +52,9 @@
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} /EHs-c-") set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} /EHs-c-")
add_definitions(-D_HAS_EXCEPTIONS=0) add_definitions(-D_HAS_EXCEPTIONS=0)
- # Disable RTTI. - # Disable RTTI.
- string(REGEX REPLACE "/GR" "" CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS}") - string(REGEX REPLACE "/GR" "" CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS}")
- set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} /GR-") - set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} /GR-")
@ -17,7 +17,7 @@ diff -ur a/CMakeLists.txt b/CMakeLists.txt
@@ -77,9 +77,9 @@ @@ -77,9 +77,9 @@
string(REGEX REPLACE "-fexceptions" "" CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS}") string(REGEX REPLACE "-fexceptions" "" CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS}")
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -fno-exceptions") set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -fno-exceptions")
- # Disable RTTI. - # Disable RTTI.
- string(REGEX REPLACE "-frtti" "" CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS}") - string(REGEX REPLACE "-frtti" "" CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS}")
- set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -fno-rtti") - set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -fno-rtti")
@ -25,5 +25,5 @@ diff -ur a/CMakeLists.txt b/CMakeLists.txt
+ # string(REGEX REPLACE "-frtti" "" CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS}") + # string(REGEX REPLACE "-frtti" "" CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS}")
+ # set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -fno-rtti") + # set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -fno-rtti")
endif(CMAKE_CXX_COMPILER_ID STREQUAL "MSVC") endif(CMAKE_CXX_COMPILER_ID STREQUAL "MSVC")
# BUILD_SHARED_LIBS is a standard CMake variable, but we declare it here to make # BUILD_SHARED_LIBS is a standard CMake variable, but we declare it here to make

View File

@ -1,48 +0,0 @@
#!/bin/bash -e
# NOTE: Copy this under memgraph/environment/toolchain/vN/tmp_build.sh, edit and test.
pushd () { command pushd "$@" > /dev/null; }
popd () { command popd "$@" > /dev/null; }
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )"
CPUS=$( grep -c processor < /proc/cpuinfo )
cd "$DIR"
source "$DIR/../../util.sh"
DISTRO="$(operating_system)"
TOOLCHAIN_VERSION=5
NAME=toolchain-v$TOOLCHAIN_VERSION
PREFIX=/opt/$NAME
function log_tool_name () {
echo ""
echo ""
echo "#### $1 ####"
echo ""
echo ""
}
# HERE: Remove/clear dependencies from a given toolchain.
mkdir -p archives && pushd archives
# HERE: Download dependencies here.
popd
mkdir -p build
pushd build
source $PREFIX/activate
export CC=$PREFIX/bin/clang
export CXX=$PREFIX/bin/clang++
export CFLAGS="$CFLAGS -fPIC"
export PATH=$PREFIX/bin:$PATH
export LD_LIBRARY_PATH=$PREFIX/lib64
COMMON_CMAKE_FLAGS="-DCMAKE_INSTALL_PREFIX=$PREFIX
-DCMAKE_PREFIX_PATH=$PREFIX
-DCMAKE_BUILD_TYPE=Release
-DCMAKE_C_COMPILER=$CC
-DCMAKE_CXX_COMPILER=$CXX
-DBUILD_SHARED_LIBS=OFF
-DCMAKE_CXX_STANDARD=20
-DBUILD_TESTING=OFF
-DCMAKE_REQUIRED_INCLUDES=$PREFIX/include
-DCMAKE_POSITION_INDEPENDENT_CODE=ON"
# HERE: Add dependencies to test below.

View File

@ -7,7 +7,7 @@ DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )"
CPUS=$( grep -c processor < /proc/cpuinfo ) CPUS=$( grep -c processor < /proc/cpuinfo )
cd "$DIR" cd "$DIR"
source "$DIR/../../util.sh" source "$DIR/../util.sh"
DISTRO="$(operating_system)" DISTRO="$(operating_system)"
# toolchain version # toolchain version
@ -30,10 +30,10 @@ LLVM_VERSION=11.0.0
SWIG_VERSION=4.0.2 # used only for LLVM compilation SWIG_VERSION=4.0.2 # used only for LLVM compilation
# Check for the dependencies. # Check for the dependencies.
echo "ALL BUILD PACKAGES: $($DIR/../../os/$DISTRO.sh list TOOLCHAIN_BUILD_DEPS)" echo "ALL BUILD PACKAGES: $($DIR/../os/$DISTRO.sh list TOOLCHAIN_BUILD_DEPS)"
$DIR/../../os/$DISTRO.sh check TOOLCHAIN_BUILD_DEPS $DIR/../os/$DISTRO.sh check TOOLCHAIN_BUILD_DEPS
echo "ALL RUN PACKAGES: $($DIR/../../os/$DISTRO.sh list TOOLCHAIN_RUN_DEPS)" echo "ALL RUN PACKAGES: $($DIR/../os/$DISTRO.sh list TOOLCHAIN_RUN_DEPS)"
$DIR/../../os/$DISTRO.sh check TOOLCHAIN_RUN_DEPS $DIR/../os/$DISTRO.sh check TOOLCHAIN_RUN_DEPS
# check installation directory # check installation directory
NAME=toolchain-v$TOOLCHAIN_VERSION NAME=toolchain-v$TOOLCHAIN_VERSION
@ -442,7 +442,7 @@ In order to be able to run all of these tools you should install the following
packages: packages:
\`\`\` \`\`\`
$($DIR/../../os/$DISTRO.sh list TOOLCHAIN_RUN_DEPS) $($DIR/../os/$DISTRO.sh list TOOLCHAIN_RUN_DEPS)
\`\`\` \`\`\`
## Usage ## Usage

View File

@ -7,7 +7,7 @@ DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )"
CPUS=$( grep -c processor < /proc/cpuinfo ) CPUS=$( grep -c processor < /proc/cpuinfo )
cd "$DIR" cd "$DIR"
source "$DIR/../../util.sh" source "$DIR/../util.sh"
DISTRO="$(operating_system)" DISTRO="$(operating_system)"
# toolchain version # toolchain version
@ -31,10 +31,10 @@ LLVM_VERSION_LONG=12.0.1-rc4
SWIG_VERSION=4.0.2 # used only for LLVM compilation SWIG_VERSION=4.0.2 # used only for LLVM compilation
# Check for the dependencies. # Check for the dependencies.
echo "ALL BUILD PACKAGES: $($DIR/../../os/$DISTRO.sh list TOOLCHAIN_BUILD_DEPS)" echo "ALL BUILD PACKAGES: $($DIR/../os/$DISTRO.sh list TOOLCHAIN_BUILD_DEPS)"
$DIR/../../os/$DISTRO.sh check TOOLCHAIN_BUILD_DEPS $DIR/../os/$DISTRO.sh check TOOLCHAIN_BUILD_DEPS
echo "ALL RUN PACKAGES: $($DIR/../../os/$DISTRO.sh list TOOLCHAIN_RUN_DEPS)" echo "ALL RUN PACKAGES: $($DIR/../os/$DISTRO.sh list TOOLCHAIN_RUN_DEPS)"
$DIR/../../os/$DISTRO.sh check TOOLCHAIN_RUN_DEPS $DIR/../os/$DISTRO.sh check TOOLCHAIN_RUN_DEPS
# check installation directory # check installation directory
NAME=toolchain-v$TOOLCHAIN_VERSION NAME=toolchain-v$TOOLCHAIN_VERSION
@ -452,7 +452,7 @@ In order to be able to run all of these tools you should install the following
packages: packages:
\`\`\` \`\`\`
$($DIR/../../os/$DISTRO.sh list TOOLCHAIN_RUN_DEPS) $($DIR/../os/$DISTRO.sh list TOOLCHAIN_RUN_DEPS)
\`\`\` \`\`\`
## Usage ## Usage

View File

@ -7,17 +7,9 @@ DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )"
CPUS=$( grep -c processor < /proc/cpuinfo ) CPUS=$( grep -c processor < /proc/cpuinfo )
cd "$DIR" cd "$DIR"
source "$DIR/../../util.sh" source "$DIR/../util.sh"
DISTRO="$(operating_system)" DISTRO="$(operating_system)"
function log_tool_name () {
echo ""
echo ""
echo "#### $1 ####"
echo ""
echo ""
}
for_arm=false for_arm=false
if [[ "$#" -eq 1 ]]; then if [[ "$#" -eq 1 ]]; then
if [[ "$1" == "--for-arm" ]]; then if [[ "$1" == "--for-arm" ]]; then
@ -28,11 +20,9 @@ if [[ "$#" -eq 1 ]]; then
fi fi
fi fi
TOOLCHAIN_STDCXX="${TOOLCHAIN_STDCXX:-libstdc++}" os="$1"
if [[ "$TOOLCHAIN_STDCXX" != "libstdc++" && "$TOOLCHAIN_STDCXX" != "libc++" ]]; then
echo "Only GCC (libstdc++) or LLVM (libc++) C++ standard library implementations are supported." # toolchain version
exit 1
fi
TOOLCHAIN_VERSION=4 TOOLCHAIN_VERSION=4
# package versions used # package versions used
@ -51,15 +41,11 @@ CPPCHECK_VERSION=2.6
LLVM_VERSION=13.0.0 LLVM_VERSION=13.0.0
SWIG_VERSION=4.0.2 # used only for LLVM compilation SWIG_VERSION=4.0.2 # used only for LLVM compilation
# Set the right operating system setup script. # Check for the dependencies.
ENV_SCRIPT="$DIR/../../os/$DISTRO.sh" echo "ALL BUILD PACKAGES: $($DIR/../os/$DISTRO.sh list TOOLCHAIN_BUILD_DEPS)"
if [[ "$for_arm" = true ]]; then $DIR/../os/$DISTRO.sh check TOOLCHAIN_BUILD_DEPS
ENV_SCRIPT="$DIR/../../os/$DISTRO-arm.sh" echo "ALL RUN PACKAGES: $($DIR/../os/$DISTRO.sh list TOOLCHAIN_RUN_DEPS)"
fi $DIR/../os/$DISTRO.sh check TOOLCHAIN_RUN_DEPS
echo "ALL BUILD PACKAGES: $(${ENV_SCRIPT} list TOOLCHAIN_BUILD_DEPS)"
${ENV_SCRIPT} check TOOLCHAIN_BUILD_DEPS
echo "ALL RUN PACKAGES: $(${ENV_SCRIPT} list TOOLCHAIN_RUN_DEPS)"
${ENV_SCRIPT} check TOOLCHAIN_RUN_DEPS
# check installation directory # check installation directory
NAME=toolchain-v$TOOLCHAIN_VERSION NAME=toolchain-v$TOOLCHAIN_VERSION
@ -113,8 +99,6 @@ if [ ! -f llvm-$LLVM_VERSION.src.tar.xz ]; then
wget https://github.com/llvm/llvm-project/releases/download/llvmorg-$LLVM_VERSION/clang-tools-extra-$LLVM_VERSION.src.tar.xz wget https://github.com/llvm/llvm-project/releases/download/llvmorg-$LLVM_VERSION/clang-tools-extra-$LLVM_VERSION.src.tar.xz
wget https://github.com/llvm/llvm-project/releases/download/llvmorg-$LLVM_VERSION/compiler-rt-$LLVM_VERSION.src.tar.xz wget https://github.com/llvm/llvm-project/releases/download/llvmorg-$LLVM_VERSION/compiler-rt-$LLVM_VERSION.src.tar.xz
wget https://github.com/llvm/llvm-project/releases/download/llvmorg-$LLVM_VERSION/libunwind-$LLVM_VERSION.src.tar.xz wget https://github.com/llvm/llvm-project/releases/download/llvmorg-$LLVM_VERSION/libunwind-$LLVM_VERSION.src.tar.xz
wget https://github.com/llvm/llvm-project/releases/download/llvmorg-$LLVM_VERSION/libcxx-$LLVM_VERSION.src.tar.xz
wget https://github.com/llvm/llvm-project/releases/download/llvmorg-$LLVM_VERSION/libcxxabi-$LLVM_VERSION.src.tar.xz
fi fi
if [ ! -f pahole-gdb-master.zip ]; then if [ ! -f pahole-gdb-master.zip ]; then
wget https://github.com/PhilArmstrong/pahole-gdb/archive/master.zip -O pahole-gdb-master.zip wget https://github.com/PhilArmstrong/pahole-gdb/archive/master.zip -O pahole-gdb-master.zip
@ -172,8 +156,6 @@ if [ ! -f llvm-$LLVM_VERSION.src.tar.xz.sig ]; then
wget https://github.com/llvm/llvm-project/releases/download/llvmorg-$LLVM_VERSION/clang-tools-extra-$LLVM_VERSION.src.tar.xz.sig wget https://github.com/llvm/llvm-project/releases/download/llvmorg-$LLVM_VERSION/clang-tools-extra-$LLVM_VERSION.src.tar.xz.sig
wget https://github.com/llvm/llvm-project/releases/download/llvmorg-$LLVM_VERSION/compiler-rt-$LLVM_VERSION.src.tar.xz.sig wget https://github.com/llvm/llvm-project/releases/download/llvmorg-$LLVM_VERSION/compiler-rt-$LLVM_VERSION.src.tar.xz.sig
wget https://github.com/llvm/llvm-project/releases/download/llvmorg-$LLVM_VERSION/libunwind-$LLVM_VERSION.src.tar.xz.sig wget https://github.com/llvm/llvm-project/releases/download/llvmorg-$LLVM_VERSION/libunwind-$LLVM_VERSION.src.tar.xz.sig
wget https://github.com/llvm/llvm-project/releases/download/llvmorg-$LLVM_VERSION/libcxx-$LLVM_VERSION.src.tar.xz.sig
wget https://github.com/llvm/llvm-project/releases/download/llvmorg-$LLVM_VERSION/libcxxabi-$LLVM_VERSION.src.tar.xz.sig
fi fi
# list of valid llvm gnupg keys: https://releases.llvm.org/download.html # list of valid llvm gnupg keys: https://releases.llvm.org/download.html
$GPG --keyserver $KEYSERVER --recv-keys 0x474E22316ABF4785A88C6E8EA2C794A986419D8A $GPG --keyserver $KEYSERVER --recv-keys 0x474E22316ABF4785A88C6E8EA2C794A986419D8A
@ -183,8 +165,6 @@ $GPG --verify lld-$LLVM_VERSION.src.tar.xz.sig lld-$LLVM_VERSION.src.tar.xz
$GPG --verify clang-tools-extra-$LLVM_VERSION.src.tar.xz.sig clang-tools-extra-$LLVM_VERSION.src.tar.xz $GPG --verify clang-tools-extra-$LLVM_VERSION.src.tar.xz.sig clang-tools-extra-$LLVM_VERSION.src.tar.xz
$GPG --verify compiler-rt-$LLVM_VERSION.src.tar.xz.sig compiler-rt-$LLVM_VERSION.src.tar.xz $GPG --verify compiler-rt-$LLVM_VERSION.src.tar.xz.sig compiler-rt-$LLVM_VERSION.src.tar.xz
$GPG --verify libunwind-$LLVM_VERSION.src.tar.xz.sig libunwind-$LLVM_VERSION.src.tar.xz $GPG --verify libunwind-$LLVM_VERSION.src.tar.xz.sig libunwind-$LLVM_VERSION.src.tar.xz
$GPG --verify libcxx-$LLVM_VERSION.src.tar.xz.sig libcxx-$LLVM_VERSION.src.tar.xz
$GPG --verify libcxxabi-$LLVM_VERSION.src.tar.xz.sig libcxxabi-$LLVM_VERSION.src.tar.xz
popd popd
@ -192,7 +172,7 @@ popd
mkdir -p build mkdir -p build
pushd build pushd build
log_tool_name "GCC $GCC_VERSION" # compile gcc
if [ ! -f $PREFIX/bin/gcc ]; then if [ ! -f $PREFIX/bin/gcc ]; then
if [ -d gcc-$GCC_VERSION ]; then if [ -d gcc-$GCC_VERSION ]; then
rm -rf gcc-$GCC_VERSION rm -rf gcc-$GCC_VERSION
@ -283,7 +263,7 @@ fi
export PATH=$PREFIX/bin:$PATH export PATH=$PREFIX/bin:$PATH
export LD_LIBRARY_PATH=$PREFIX/lib64 export LD_LIBRARY_PATH=$PREFIX/lib64
log_tool_name "binutils $BINUTILS_VERSION" # compile binutils
if [ ! -f $PREFIX/bin/ld.gold ]; then if [ ! -f $PREFIX/bin/ld.gold ]; then
if [ -d binutils-$BINUTILS_VERSION ]; then if [ -d binutils-$BINUTILS_VERSION ]; then
rm -rf binutils-$BINUTILS_VERSION rm -rf binutils-$BINUTILS_VERSION
@ -347,7 +327,7 @@ if [ ! -f $PREFIX/bin/ld.gold ]; then
popd && popd popd && popd
fi fi
log_tool_name "GDB $GDB_VERSION" # compile gdb
if [ ! -f $PREFIX/bin/gdb ]; then if [ ! -f $PREFIX/bin/gdb ]; then
if [ -d gdb-$GDB_VERSION ]; then if [ -d gdb-$GDB_VERSION ]; then
rm -rf gdb-$GDB_VERSION rm -rf gdb-$GDB_VERSION
@ -383,62 +363,6 @@ if [ ! -f $PREFIX/bin/gdb ]; then
--without-babeltrace \ --without-babeltrace \
--enable-tui \ --enable-tui \
--with-python=python3 --with-python=python3
elif [[ "${DISTRO}" == fedora* ]]; then
# Remove readline, gdb does not compile
env \
CC=gcc \
CXX=g++ \
CFLAGS="-g -O2 -fstack-protector-strong -Wformat -Werror=format-security" \
CXXFLAGS="-g -O2 -fstack-protector-strong -Wformat -Werror=format-security" \
CPPFLAGS="-Wdate-time -D_FORTIFY_SOURCE=2 -fPIC" \
LDFLAGS="-Wl,-z,relro" \
PYTHON="" \
../configure \
--build=x86_64-linux-gnu \
--host=x86_64-linux-gnu \
--prefix=$PREFIX \
--disable-maintainer-mode \
--disable-dependency-tracking \
--disable-silent-rules \
--disable-gdbtk \
--disable-shared \
--without-guile \
--with-system-gdbinit=$PREFIX/etc/gdb/gdbinit \
--with-expat \
--with-system-zlib \
--with-lzma \
--with-babeltrace \
--with-intel-pt \
--enable-tui \
--with-python=python3
elif [[ "${DISTRO}" == "amzn-2" ]]; then
# Remove readline, gdb does not compile
env \
CC=gcc \
CXX=g++ \
CFLAGS="-g -O2 -fstack-protector-strong -Wformat -Werror=format-security" \
CXXFLAGS="-g -O2 -fstack-protector-strong -Wformat -Werror=format-security" \
CPPFLAGS="-Wdate-time -D_FORTIFY_SOURCE=2 -fPIC" \
LDFLAGS="-Wl,-z,relro" \
PYTHON="" \
../configure \
--build=x86_64-linux-gnu \
--host=x86_64-linux-gnu \
--prefix=$PREFIX \
--disable-maintainer-mode \
--disable-dependency-tracking \
--disable-silent-rules \
--disable-gdbtk \
--disable-shared \
--without-guile \
--with-system-gdbinit=$PREFIX/etc/gdb/gdbinit \
--with-expat \
--with-system-zlib \
--with-lzma \
--with-babeltrace \
--with-intel-pt \
--enable-tui \
--with-python=python3
else else
# https://buildd.debian.org/status/fetch.php?pkg=gdb&arch=amd64&ver=8.2.1-2&stamp=1550831554&raw=0 # https://buildd.debian.org/status/fetch.php?pkg=gdb&arch=amd64&ver=8.2.1-2&stamp=1550831554&raw=0
env \ env \
@ -474,13 +398,13 @@ if [ ! -f $PREFIX/bin/gdb ]; then
popd && popd popd && popd
fi fi
log_tool_name "install pahole" # install pahole
if [ ! -d $PREFIX/share/pahole-gdb ]; then if [ ! -d $PREFIX/share/pahole-gdb ]; then
unzip ../archives/pahole-gdb-master.zip unzip ../archives/pahole-gdb-master.zip
mv pahole-gdb-master $PREFIX/share/pahole-gdb mv pahole-gdb-master $PREFIX/share/pahole-gdb
fi fi
log_tool_name "setup system gdbinit" # setup system gdbinit
if [ ! -f $PREFIX/etc/gdb/gdbinit ]; then if [ ! -f $PREFIX/etc/gdb/gdbinit ]; then
mkdir -p $PREFIX/etc/gdb mkdir -p $PREFIX/etc/gdb
cat >$PREFIX/etc/gdb/gdbinit <<EOF cat >$PREFIX/etc/gdb/gdbinit <<EOF
@ -506,7 +430,7 @@ end
EOF EOF
fi fi
log_tool_name "cmake $CMAKE_VERSION" # compile cmake
if [ ! -f $PREFIX/bin/cmake ]; then if [ ! -f $PREFIX/bin/cmake ]; then
if [ -d cmake-$CMAKE_VERSION ]; then if [ -d cmake-$CMAKE_VERSION ]; then
rm -rf cmake-$CMAKE_VERSION rm -rf cmake-$CMAKE_VERSION
@ -532,7 +456,7 @@ if [ ! -f $PREFIX/bin/cmake ]; then
popd && popd popd && popd
fi fi
log_tool_name "cppcheck $CPPCHECK_VERSION" # compile cppcheck
if [ ! -f $PREFIX/bin/cppcheck ]; then if [ ! -f $PREFIX/bin/cppcheck ]; then
if [ -d cppcheck-$CPPCHECK_VERSION ]; then if [ -d cppcheck-$CPPCHECK_VERSION ]; then
rm -rf cppcheck-$CPPCHECK_VERSION rm -rf cppcheck-$CPPCHECK_VERSION
@ -556,7 +480,7 @@ if [ ! -f $PREFIX/bin/cppcheck ]; then
popd popd
fi fi
log_tool_name "swig $SWIG_VERSION" # compile swig
if [ ! -d swig-$SWIG_VERSION/install ]; then if [ ! -d swig-$SWIG_VERSION/install ]; then
if [ -d swig-$SWIG_VERSION ]; then if [ -d swig-$SWIG_VERSION ]; then
rm -rf swig-$SWIG_VERSION rm -rf swig-$SWIG_VERSION
@ -572,7 +496,7 @@ if [ ! -d swig-$SWIG_VERSION/install ]; then
popd && popd popd && popd
fi fi
log_tool_name "LLVM $LLVM_VERSION" # compile llvm
if [ ! -f $PREFIX/bin/clang ]; then if [ ! -f $PREFIX/bin/clang ]; then
if [ -d llvm-$LLVM_VERSION ]; then if [ -d llvm-$LLVM_VERSION ]; then
rm -rf llvm-$LLVM_VERSION rm -rf llvm-$LLVM_VERSION
@ -589,19 +513,8 @@ if [ ! -f $PREFIX/bin/clang ]; then
mv compiler-rt-$LLVM_VERSION.src/ llvm-$LLVM_VERSION/projects/compiler-rt mv compiler-rt-$LLVM_VERSION.src/ llvm-$LLVM_VERSION/projects/compiler-rt
tar -xvf ../archives/libunwind-$LLVM_VERSION.src.tar.xz tar -xvf ../archives/libunwind-$LLVM_VERSION.src.tar.xz
mv libunwind-$LLVM_VERSION.src/include/mach-o llvm-$LLVM_VERSION/tools/lld/include mv libunwind-$LLVM_VERSION.src/include/mach-o llvm-$LLVM_VERSION/tools/lld/include
# The following is required because of libc++
tar -xvf ../archives/libcxx-$LLVM_VERSION.src.tar.xz
mv libcxx-$LLVM_VERSION.src llvm-$LLVM_VERSION/projects/libcxx
tar -xvf ../archives/libcxxabi-$LLVM_VERSION.src.tar.xz
mv libcxxabi-$LLVM_VERSION.src llvm-$LLVM_VERSION/projects/libcxxabi
# NOTE: We moved part of the libunwind in one of the previous step.
rm -r libunwind-$LLVM_VERSION.src
tar -xvf ../archives/libunwind-$LLVM_VERSION.src.tar.xz
mv libunwind-$LLVM_VERSION.src llvm-$LLVM_VERSION/projects/libunwind
pushd llvm-$LLVM_VERSION pushd llvm-$LLVM_VERSION
mkdir -p build && pushd build mkdir build && pushd build
# activate swig # activate swig
export PATH=$DIR/build/swig-$SWIG_VERSION/install/bin:$PATH export PATH=$DIR/build/swig-$SWIG_VERSION/install/bin:$PATH
# influenced by: https://buildd.debian.org/status/fetch.php?pkg=llvm-toolchain-7&arch=amd64&ver=1%3A7.0.1%7E%2Brc2-1%7Eexp1&stamp=1541506173&raw=0 # influenced by: https://buildd.debian.org/status/fetch.php?pkg=llvm-toolchain-7&arch=amd64&ver=1%3A7.0.1%7E%2Brc2-1%7Eexp1&stamp=1541506173&raw=0
@ -654,7 +567,7 @@ In order to be able to run all of these tools you should install the following
packages: packages:
\`\`\` \`\`\`
$($DIR/../../os/$ENV_SCRIPT.sh list TOOLCHAIN_RUN_DEPS) $($DIR/../os/$DISTRO.sh list TOOLCHAIN_RUN_DEPS)
\`\`\` \`\`\`
## Usage ## Usage
@ -711,7 +624,6 @@ export PS1="($NAME) \$PS1"
export LD_LIBRARY_PATH=$PREFIX/lib:$PREFIX/lib64 export LD_LIBRARY_PATH=$PREFIX/lib:$PREFIX/lib64
export CXXFLAGS=-isystem\ $PREFIX/include\ \$CXXFLAGS export CXXFLAGS=-isystem\ $PREFIX/include\ \$CXXFLAGS
export CFLAGS=-isystem\ $PREFIX/include\ \$CFLAGS export CFLAGS=-isystem\ $PREFIX/include\ \$CFLAGS
export VENV=$PREFIX
# disable root # disable root
function su () { function su () {
@ -763,7 +675,7 @@ PROXYGEN_SHA256=5360a8ccdfb2f5a6c7b3eed331ec7ab0e2c792d579c6fff499c85c516c11fe14
SNAPPY_SHA256=75c1fbb3d618dd3a0483bff0e26d0a92b495bbe5059c8b4f1c962b478b6e06e7 SNAPPY_SHA256=75c1fbb3d618dd3a0483bff0e26d0a92b495bbe5059c8b4f1c962b478b6e06e7
SNAPPY_VERSION=1.1.9 SNAPPY_VERSION=1.1.9
XZ_VERSION=5.2.5 # for LZMA XZ_VERSION=5.2.5 # for LZMA
ZLIB_VERSION=1.2.13 ZLIB_VERSION=1.2.12
ZSTD_VERSION=1.5.0 ZSTD_VERSION=1.5.0
WANGLE_SHA256=1002e9c32b6f4837f6a760016e3b3e22f3509880ef3eaad191c80dc92655f23f WANGLE_SHA256=1002e9c32b6f4837f6a760016e3b3e22f3509880ef3eaad191c80dc92655f23f
@ -908,11 +820,7 @@ source $PREFIX/activate
export CC=$PREFIX/bin/clang export CC=$PREFIX/bin/clang
export CXX=$PREFIX/bin/clang++ export CXX=$PREFIX/bin/clang++
export CFLAGS="$CFLAGS -fPIC" export CFLAGS="$CFLAGS -fPIC"
if [ "$TOOLCHAIN_STDCXX" = "libstdc++" ]; then export CXXFLAGS="$CXXFLAGS -fPIC"
export CXXFLAGS="$CXXFLAGS -fPIC"
else
export CXXFLAGS="$CXXFLAGS -fPIC -stdlib=libc++"
fi
COMMON_CMAKE_FLAGS="-DCMAKE_INSTALL_PREFIX=$PREFIX COMMON_CMAKE_FLAGS="-DCMAKE_INSTALL_PREFIX=$PREFIX
-DCMAKE_PREFIX_PATH=$PREFIX -DCMAKE_PREFIX_PATH=$PREFIX
-DCMAKE_BUILD_TYPE=Release -DCMAKE_BUILD_TYPE=Release
@ -926,7 +834,7 @@ COMMON_CMAKE_FLAGS="-DCMAKE_INSTALL_PREFIX=$PREFIX
COMMON_CONFIGURE_FLAGS="--enable-shared=no --prefix=$PREFIX" COMMON_CONFIGURE_FLAGS="--enable-shared=no --prefix=$PREFIX"
COMMON_MAKE_INSTALL_FLAGS="-j$CPUS BUILD_SHARED=no PREFIX=$PREFIX install" COMMON_MAKE_INSTALL_FLAGS="-j$CPUS BUILD_SHARED=no PREFIX=$PREFIX install"
log_tool_name "bzip2 $BZIP2_VERSION" # install bzip2
if [ ! -f $PREFIX/include/bzlib.h ]; then if [ ! -f $PREFIX/include/bzlib.h ]; then
if [ -d bzip2-$BZIP2_VERSION ]; then if [ -d bzip2-$BZIP2_VERSION ]; then
rm -rf bzip2-$BZIP2_VERSION rm -rf bzip2-$BZIP2_VERSION
@ -937,7 +845,7 @@ if [ ! -f $PREFIX/include/bzlib.h ]; then
popd popd
fi fi
log_tool_name "fmt $FMT_VERSION" # install fmt
if [ ! -d $PREFIX/include/fmt ]; then if [ ! -d $PREFIX/include/fmt ]; then
if [ -d fmt-$FMT_VERSION ]; then if [ -d fmt-$FMT_VERSION ]; then
rm -rf fmt-$FMT_VERSION rm -rf fmt-$FMT_VERSION
@ -950,7 +858,7 @@ if [ ! -d $PREFIX/include/fmt ]; then
popd && popd popd && popd
fi fi
log_tool_name "lz4 $LZ4_VERSION" # install lz4
if [ ! -f $PREFIX/include/lz4.h ]; then if [ ! -f $PREFIX/include/lz4.h ]; then
if [ -d lz4-$LZ4_VERSION ]; then if [ -d lz4-$LZ4_VERSION ]; then
rm -rf lz4-$LZ4_VERSION rm -rf lz4-$LZ4_VERSION
@ -961,7 +869,7 @@ if [ ! -f $PREFIX/include/lz4.h ]; then
popd popd
fi fi
log_tool_name "xz $XZ_VERSION" # install xz
if [ ! -f $PREFIX/include/lzma.h ]; then if [ ! -f $PREFIX/include/lzma.h ]; then
if [ -d xz-$XZ_VERSION ]; then if [ -d xz-$XZ_VERSION ]; then
rm -rf xz-$XZ_VERSION rm -rf xz-$XZ_VERSION
@ -973,7 +881,7 @@ if [ ! -f $PREFIX/include/lzma.h ]; then
popd popd
fi fi
log_tool_name "zlib $ZLIB_VERSION" # install zlib
if [ ! -f $PREFIX/include/zlib.h ]; then if [ ! -f $PREFIX/include/zlib.h ]; then
if [ -d zlib-$ZLIB_VERSION ]; then if [ -d zlib-$ZLIB_VERSION ]; then
rm -rf zlib-$ZLIB_VERSION rm -rf zlib-$ZLIB_VERSION
@ -987,7 +895,7 @@ if [ ! -f $PREFIX/include/zlib.h ]; then
popd && popd popd && popd
fi fi
log_tool_name "zstd $ZSTD_VERSION" # install zstd
if [ ! -f $PREFIX/include/zstd.h ]; then if [ ! -f $PREFIX/include/zstd.h ]; then
if [ -d zstd-$ZSTD_VERSION ]; then if [ -d zstd-$ZSTD_VERSION ]; then
rm -rf zstd-$ZSTD_VERSION rm -rf zstd-$ZSTD_VERSION
@ -1002,8 +910,7 @@ if [ ! -f $PREFIX/include/zstd.h ]; then
popd && popd popd && popd
fi fi
# TODO(gitbuda): Freeze jmalloc version. #install jemalloc
log_tool_name "jmalloc"
if [ ! -d $PREFIX/include/jemalloc ]; then if [ ! -d $PREFIX/include/jemalloc ]; then
if [ -d jemalloc ]; then if [ -d jemalloc ]; then
rm -rf jemalloc rm -rf jemalloc
@ -1020,7 +927,7 @@ if [ ! -d $PREFIX/include/jemalloc ]; then
popd popd
fi fi
log_tool_name "BOOST $BOOST_VERSION" # install boost
if [ ! -d $PREFIX/include/boost ]; then if [ ! -d $PREFIX/include/boost ]; then
if [ -d boost_$BOOST_VERSION_UNDERSCORES ]; then if [ -d boost_$BOOST_VERSION_UNDERSCORES ]; then
rm -rf boost_$BOOST_VERSION_UNDERSCORES rm -rf boost_$BOOST_VERSION_UNDERSCORES
@ -1028,24 +935,15 @@ if [ ! -d $PREFIX/include/boost ]; then
tar -xzf ../archives/boost_$BOOST_VERSION_UNDERSCORES.tar.gz tar -xzf ../archives/boost_$BOOST_VERSION_UNDERSCORES.tar.gz
pushd boost_$BOOST_VERSION_UNDERSCORES pushd boost_$BOOST_VERSION_UNDERSCORES
./bootstrap.sh --prefix=$PREFIX --with-toolset=clang --with-python=python3 --without-icu ./bootstrap.sh --prefix=$PREFIX --with-toolset=clang --with-python=python3 --without-icu
if [ "$TOOLCHAIN_STDCXX" = "libstdc++" ]; then ./b2 toolset=clang -j$CPUS install variant=release link=static cxxstd=20 --disable-icu \
./b2 toolset=clang -j$CPUS install variant=release link=static cxxstd=20 --disable-icu \ -sZLIB_SOURCE="$PREFIX" -sZLIB_INCLUDE="$PREFIX/include" -sZLIB_LIBPATH="$PREFIX/lib" \
-sZLIB_SOURCE="$PREFIX" -sZLIB_INCLUDE="$PREFIX/include" -sZLIB_LIBPATH="$PREFIX/lib" \ -sBZIP2_SOURCE="$PREFIX" -sBZIP2_INCLUDE="$PREFIX/include" -sBZIP2_LIBPATH="$PREFIX/lib" \
-sBZIP2_SOURCE="$PREFIX" -sBZIP2_INCLUDE="$PREFIX/include" -sBZIP2_LIBPATH="$PREFIX/lib" \ -sLZMA_SOURCE="$PREFIX" -sLZMA_INCLUDE="$PREFIX/include" -sLZMA_LIBPATH="$PREFIX/lib" \
-sLZMA_SOURCE="$PREFIX" -sLZMA_INCLUDE="$PREFIX/include" -sLZMA_LIBPATH="$PREFIX/lib" \ -sZSTD_SOURCE="$PREFIX" -sZSTD_INCLUDE="$PREFIX/include" -sZSTD_LIBPATH="$PREFIX/lib"
-sZSTD_SOURCE="$PREFIX" -sZSTD_INCLUDE="$PREFIX/include" -sZSTD_LIBPATH="$PREFIX/lib"
else
./b2 toolset=clang -j$CPUS install variant=release link=static cxxstd=20 --disable-icu \
cxxflags="-stdlib=libc++" linkflags="-stdlib=libc++" \
-sZLIB_SOURCE="$PREFIX" -sZLIB_INCLUDE="$PREFIX/include" -sZLIB_LIBPATH="$PREFIX/lib" \
-sBZIP2_SOURCE="$PREFIX" -sBZIP2_INCLUDE="$PREFIX/include" -sBZIP2_LIBPATH="$PREFIX/lib" \
-sLZMA_SOURCE="$PREFIX" -sLZMA_INCLUDE="$PREFIX/include" -sLZMA_LIBPATH="$PREFIX/lib" \
-sZSTD_SOURCE="$PREFIX" -sZSTD_INCLUDE="$PREFIX/include" -sZSTD_LIBPATH="$PREFIX/lib"
fi
popd popd
fi fi
log_tool_name "double-conversion $DOUBLE_CONVERSION_VERSION" # install double-conversion
if [ ! -d $PREFIX/include/double-conversion ]; then if [ ! -d $PREFIX/include/double-conversion ]; then
if [ -d double-conversion-$DOUBLE_CONVERSION_VERSION ]; then if [ -d double-conversion-$DOUBLE_CONVERSION_VERSION ]; then
rm -rf double-conversion-$DOUBLE_CONVERSION_VERSION rm -rf double-conversion-$DOUBLE_CONVERSION_VERSION
@ -1060,8 +958,7 @@ if [ ! -d $PREFIX/include/double-conversion ]; then
popd && popd popd && popd
fi fi
# TODO(gitbuda): Freeze gflags version. # install gflags
log_tool_name "gflags"
if [ ! -d $PREFIX/include/gflags ]; then if [ ! -d $PREFIX/include/gflags ]; then
if [ -d gflags ]; then if [ -d gflags ]; then
rm -rf gflags rm -rf gflags
@ -1080,7 +977,7 @@ if [ ! -d $PREFIX/include/gflags ]; then
popd && popd popd && popd
fi fi
log_tool_name "libunwind $LIBUNWIND_VERSION" # install libunwind
if [ ! -f $PREFIX/include/libunwind.h ]; then if [ ! -f $PREFIX/include/libunwind.h ]; then
if [ -d libunwind-$LIBUNWIND_VERSION ]; then if [ -d libunwind-$LIBUNWIND_VERSION ]; then
rm -rf libunwind-$LIBUNWIND_VERSION rm -rf libunwind-$LIBUNWIND_VERSION
@ -1093,7 +990,7 @@ if [ ! -f $PREFIX/include/libunwind.h ]; then
popd popd
fi fi
log_tool_name "glog $GLOG_VERSION" # install glog
if [ ! -d $PREFIX/include/glog ]; then if [ ! -d $PREFIX/include/glog ]; then
if [ -d glog-$GLOG_VERSION ]; then if [ -d glog-$GLOG_VERSION ]; then
rm -rf glog-$GLOG_VERSION rm -rf glog-$GLOG_VERSION
@ -1107,7 +1004,7 @@ if [ ! -d $PREFIX/include/glog ]; then
popd && popd popd && popd
fi fi
log_tool_name "libevent $LIBEVENT_VERSION" # install libevent
if [ ! -d $PREFIX/include/event2 ]; then if [ ! -d $PREFIX/include/event2 ]; then
if [ -d libevent-$LIBEVENT_VERSION ]; then if [ -d libevent-$LIBEVENT_VERSION ]; then
rm -rf libevent-$LIBEVENT_VERSION rm -rf libevent-$LIBEVENT_VERSION
@ -1126,7 +1023,7 @@ if [ ! -d $PREFIX/include/event2 ]; then
popd && popd popd && popd
fi fi
log_tool_name "snappy $SNAPPY_VERSION" # install snappy
if [ ! -f $PREFIX/include/snappy.h ]; then if [ ! -f $PREFIX/include/snappy.h ]; then
if [ -d snappy-$SNAPPY_VERSION ]; then if [ -d snappy-$SNAPPY_VERSION ]; then
rm -rf snappy-$SNAPPY_VERSION rm -rf snappy-$SNAPPY_VERSION
@ -1144,7 +1041,7 @@ if [ ! -f $PREFIX/include/snappy.h ]; then
popd && popd popd && popd
fi fi
log_tool_name "libsodium $LIBSODIUM_VERSION" # install libsodium
if [ ! -f $PREFIX/include/sodium.h ]; then if [ ! -f $PREFIX/include/sodium.h ]; then
if [ -d libsodium-$LIBSODIUM_VERSION ]; then if [ -d libsodium-$LIBSODIUM_VERSION ]; then
rm -rf libsodium-$LIBSODIUM_VERSION rm -rf libsodium-$LIBSODIUM_VERSION
@ -1156,7 +1053,7 @@ if [ ! -f $PREFIX/include/sodium.h ]; then
popd popd
fi fi
log_tool_name "libaio $LIBAIO_VERSION" # install libaio
if [ ! -f $PREFIX/include/libaio.h ]; then if [ ! -f $PREFIX/include/libaio.h ]; then
if [ -d libaio-$LIBAIO_VERSION ]; then if [ -d libaio-$LIBAIO_VERSION ]; then
rm -rf libaio-$LIBAIO_VERSION rm -rf libaio-$LIBAIO_VERSION
@ -1167,121 +1064,114 @@ if [ ! -f $PREFIX/include/libaio.h ]; then
popd popd
fi fi
if [[ "${DISTRO}" != "amzn-2" ]]; then # install folly
log_tool_name "folly $FBLIBS_VERSION" if [ ! -d $PREFIX/include/folly ]; then
if [ ! -d $PREFIX/include/folly ]; then if [ -d folly-$FBLIBS_VERSION ]; then
if [ -d folly-$FBLIBS_VERSION ]; then rm -rf folly-$FBLIBS_VERSION
rm -rf folly-$FBLIBS_VERSION
fi
mkdir folly-$FBLIBS_VERSION
tar -xzf ../archives/folly-$FBLIBS_VERSION.tar.gz -C folly-$FBLIBS_VERSION
pushd folly-$FBLIBS_VERSION
patch -p1 < ../../folly.patch
# build is used by facebook builder
mkdir _build
pushd _build
cmake .. $COMMON_CMAKE_FLAGS \
-DBOOST_LINK_STATIC=ON \
-DBUILD_TESTS=OFF \
-DGFLAGS_NOTHREADS=OFF \
-DCXX_STD="c++20"
make -j$CPUS install
popd && popd
fi fi
mkdir folly-$FBLIBS_VERSION
tar -xzf ../archives/folly-$FBLIBS_VERSION.tar.gz -C folly-$FBLIBS_VERSION
pushd folly-$FBLIBS_VERSION
patch -p1 < ../../folly.patch
# build is used by facebook builder
mkdir _build
pushd _build
cmake .. $COMMON_CMAKE_FLAGS \
-DBOOST_LINK_STATIC=ON \
-DBUILD_TESTS=OFF \
-DGFLAGS_NOTHREADS=OFF \
-DCXX_STD="c++20"
make -j$CPUS install
popd && popd
fi
log_tool_name "fizz $FBLIBS_VERSION" # install fizz
if [ ! -d $PREFIX/include/fizz ]; then if [ ! -d $PREFIX/include/fizz ]; then
if [ -d fizz-$FBLIBS_VERSION ]; then if [ -d fizz-$FBLIBS_VERSION ]; then
rm -rf fizz-$FBLIBS_VERSION rm -rf fizz-$FBLIBS_VERSION
fi
mkdir fizz-$FBLIBS_VERSION
tar -xzf ../archives/fizz-$FBLIBS_VERSION.tar.gz -C fizz-$FBLIBS_VERSION
pushd fizz-$FBLIBS_VERSION
# build is used by facebook builder
mkdir _build
pushd _build
cmake ../fizz $COMMON_CMAKE_FLAGS \
-DBUILD_TESTS=OFF \
-DBUILD_EXAMPLES=OFF \
-DGFLAGS_NOTHREADS=OFF
make -j$CPUS install
popd && popd
fi fi
mkdir fizz-$FBLIBS_VERSION
tar -xzf ../archives/fizz-$FBLIBS_VERSION.tar.gz -C fizz-$FBLIBS_VERSION
pushd fizz-$FBLIBS_VERSION
# build is used by facebook builder
mkdir _build
pushd _build
cmake ../fizz $COMMON_CMAKE_FLAGS \
-DBUILD_TESTS=OFF \
-DBUILD_EXAMPLES=OFF \
-DGFLAGS_NOTHREADS=OFF
make -j$CPUS install
popd && popd
fi
log_tool_name "wangle FBLIBS_VERSION" # install wangle
if [ ! -d $PREFIX/include/wangle ]; then if [ ! -d $PREFIX/include/wangle ]; then
if [ -d wangle-$FBLIBS_VERSION ]; then if [ -d wangle-$FBLIBS_VERSION ]; then
rm -rf wangle-$FBLIBS_VERSION rm -rf wangle-$FBLIBS_VERSION
fi
mkdir wangle-$FBLIBS_VERSION
tar -xzf ../archives/wangle-$FBLIBS_VERSION.tar.gz -C wangle-$FBLIBS_VERSION
pushd wangle-$FBLIBS_VERSION
# build is used by facebook builder
mkdir _build
pushd _build
cmake ../wangle $COMMON_CMAKE_FLAGS \
-DBUILD_TESTS=OFF \
-DBUILD_EXAMPLES=OFF \
-DGFLAGS_NOTHREADS=OFF
make -j$CPUS install
popd && popd
fi fi
mkdir wangle-$FBLIBS_VERSION
tar -xzf ../archives/wangle-$FBLIBS_VERSION.tar.gz -C wangle-$FBLIBS_VERSION
pushd wangle-$FBLIBS_VERSION
# build is used by facebook builder
mkdir _build
pushd _build
cmake ../wangle $COMMON_CMAKE_FLAGS \
-DBUILD_TESTS=OFF \
-DBUILD_EXAMPLES=OFF \
-DGFLAGS_NOTHREADS=OFF
make -j$CPUS install
popd && popd
fi
log_tool_name "proxygen $FBLIBS_VERSION" # install proxygen
if [ ! -d $PREFIX/include/proxygen ]; then if [ ! -d $PREFIX/include/proxygen ]; then
if [ -d proxygen-$FBLIBS_VERSION ]; then if [ -d proxygen-$FBLIBS_VERSION ]; then
rm -rf proxygen-$FBLIBS_VERSION rm -rf proxygen-$FBLIBS_VERSION
fi
mkdir proxygen-$FBLIBS_VERSION
tar -xzf ../archives/proxygen-$FBLIBS_VERSION.tar.gz -C proxygen-$FBLIBS_VERSION
pushd proxygen-$FBLIBS_VERSION
patch -p1 < ../../proxygen.patch
# build is used by facebook builder
mkdir _build
pushd _build
cmake .. $COMMON_CMAKE_FLAGS \
-DBUILD_TESTS=OFF \
-DBUILD_SAMPLES=OFF \
-DGFLAGS_NOTHREADS=OFF \
-DBUILD_QUIC=OFF
make -j$CPUS install
popd && popd
fi fi
mkdir proxygen-$FBLIBS_VERSION
tar -xzf ../archives/proxygen-$FBLIBS_VERSION.tar.gz -C proxygen-$FBLIBS_VERSION
pushd proxygen-$FBLIBS_VERSION
patch -p1 < ../../proxygen.patch
# build is used by facebook builder
mkdir _build
pushd _build
cmake .. $COMMON_CMAKE_FLAGS \
-DBUILD_TESTS=OFF \
-DBUILD_SAMPLES=OFF \
-DGFLAGS_NOTHREADS=OFF \
-DBUILD_QUIC=OFF
make -j$CPUS install
popd && popd
fi
log_tool_name "flex $FBLIBS_VERSION" # install flex
if [ ! -f $PREFIX/include/FlexLexer.h ]; then if [ ! -f $PREFIX/include/FlexLexer.h ]; then
if [ -d flex-$FLEX_VERSION ]; then if [ -d flex-$FLEX_VERSION ]; then
rm -rf flex-$FLEX_VERSION rm -rf flex-$FLEX_VERSION
fi
tar -xzf ../archives/flex-$FLEX_VERSION.tar.gz
pushd flex-$FLEX_VERSION
./configure $COMMON_CONFIGURE_FLAGS
make -j$CPUS install
popd
fi fi
tar -xzf ../archives/flex-$FLEX_VERSION.tar.gz
pushd flex-$FLEX_VERSION
./configure $COMMON_CONFIGURE_FLAGS
make -j$CPUS install
popd
fi
log_tool_name "fbthrift $FBLIBS_VERSION" # install fbthrift
if [ ! -d $PREFIX/include/thrift ]; then if [ ! -d $PREFIX/include/thrift ]; then
if [ -d fbthrift-$FBLIBS_VERSION ]; then if [ -d fbthrift-$FBLIBS_VERSION ]; then
rm -rf fbthrift-$FBLIBS_VERSION rm -rf fbthrift-$FBLIBS_VERSION
fi
git clone --depth 1 --branch v$FBLIBS_VERSION https://github.com/facebook/fbthrift.git fbthrift-$FBLIBS_VERSION
pushd fbthrift-$FBLIBS_VERSION
# build is used by facebook builder
mkdir _build
pushd _build
if [ "$TOOLCHAIN_STDCXX" = "libstdc++" ]; then
CMAKE_CXX_FLAGS="-fsized-deallocation"
else
CMAKE_CXX_FLAGS="-fsized-deallocation -stdlib=libc++"
fi
cmake .. $COMMON_CMAKE_FLAGS \
-Denable_tests=OFF \
-DGFLAGS_NOTHREADS=OFF \
-DCMAKE_CXX_FLAGS="$CMAKE_CXX_FLAGS"
make -j$CPUS install
popd
fi fi
git clone --depth 1 --branch v$FBLIBS_VERSION https://github.com/facebook/fbthrift.git fbthrift-$FBLIBS_VERSION
pushd fbthrift-$FBLIBS_VERSION
# build is used by facebook builder
mkdir _build
pushd _build
cmake .. $COMMON_CMAKE_FLAGS \
-Denable_tests=OFF \
-DGFLAGS_NOTHREADS=OFF \
-DCMAKE_CXX_FLAGS=-fsized-deallocation
make -j$CPUS install
popd
fi fi
popd popd
@ -1289,7 +1179,7 @@ popd
# create toolchain archive # create toolchain archive
if [ ! -f $NAME-binaries-$DISTRO.tar.gz ]; then if [ ! -f $NAME-binaries-$DISTRO.tar.gz ]; then
DISTRO_FULL_NAME=${DISTRO} DISTRO_FULL_NAME=${DISTRO}
if [[ "${DISTRO}" == centos* ]] || [[ "${DISTRO}" == fedora* ]]; then if [[ "${DISTRO}" == centos* ]]; then
if [[ "$for_arm" = "true" ]]; then if [[ "$for_arm" = "true" ]]; then
DISTRO_FULL_NAME="$DISTRO_FULL_NAME-aarch64" DISTRO_FULL_NAME="$DISTRO_FULL_NAME-aarch64"
else else
@ -1302,12 +1192,7 @@ if [ ! -f $NAME-binaries-$DISTRO.tar.gz ]; then
DISTRO_FULL_NAME="$DISTRO_FULL_NAME-amd64" DISTRO_FULL_NAME="$DISTRO_FULL_NAME-amd64"
fi fi
fi fi
if [ "$TOOLCHAIN_STDCXX" = "libstdc++" ]; then
# Pass because infra scripts assume there is not C++ standard lib in the name.
echo "NOTE: Not adding anything to the archive name that GCC C++ standard lib is used."
else
DISTRO_FULL_NAME="$DISTRO_FULL_NAME-libc++"
fi
tar --owner=root --group=root -cpvzf $NAME-binaries-$DISTRO_FULL_NAME.tar.gz -C /opt $NAME tar --owner=root --group=root -cpvzf $NAME-binaries-$DISTRO_FULL_NAME.tar.gz -C /opt $NAME
fi fi

File diff suppressed because it is too large Load Diff

View File

@ -1,42 +0,0 @@
#!/bin/bash -ex
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )"
PREFIX=/opt/toolchain-v5
# NOTE: Often times when versions in the build script are changes, something
# doesn't work. To avoid rebuild of the whole toolchain but rebuild specific
# lib from 0, just comment specific line under this cript and run it. Don't
# forget to comment back to avoid unnecessary deletes next time your run this
# cript.
# rm -rf "$DIR/build"
# rm -rf "$DIR/output"
# rm -rf "$PREFIX/bin/gcc"
# rm -rf "$PREFIX/bin/ld.gold"
# rm -rf "$PREFIX/bin/gdb"
# rm -rf "$PREFIX/bin/cmake"
# rm -rf "$PREFIX/bin/clang"
# rm -rf "$PREFIX/include/bzlib.h"
# rm -rf "$PREFIX/include/fmt"
# rm -rf "$PREFIX/include/lz4.h"
# rm -rf "$PREFIX/include/lzma.h"
# rm -rf "$PREFIX/include/zlib.h"
# rm -rf "$PREFIX/include/zstd.h"
# rm -rf "$PREFIX/include/jemalloc"
# rm -rf "$PREFIX/include/boost"
# rm -rf "$PREFIX/include/double-conversion"
# rm -rf "$PREFIX/include/gflags"
# rm -rf "$PREFIX/include/libunwind.h"
# rm -rf "$PREFIX/include/glog"
# rm -rf "$PREFIX/include/event2"
# rm -rf "$PREFIX/include/sodium.h"
# rm -rf "$PREFIX/include/libaio.h"
# rm -rf "$PREFIX/include/FlexLexer.h"
# rm -rf "$PREFIX/include/snappy.h"
# rm -rf "$PREFIX/include/fizz"
# rm -rf "$PREFIX/include/folly"
# rm -rf "$PREFIX/include/proxygen"
# rm -rf "$PREFIX/include/wangle"
# rm -rf "$PREFIX/include/thrift"
# rm -rf "$PREFIX"

View File

@ -1,41 +0,0 @@
diff -ur a/folly/CMakeLists.txt b/folly/CMakeLists.txt
--- a/folly/CMakeLists.txt 2021-12-12 23:10:42.000000000 +0100
+++ b/folly/CMakeLists.txt 2022-02-03 15:19:41.349693134 +0100
@@ -28,7 +28,6 @@
)
add_subdirectory(experimental/exception_tracer)
-add_subdirectory(logging/example)
if (PYTHON_EXTENSIONS)
# Create tree of symbolic links in structure required for successful
diff -ur a/folly/experimental/exception_tracer/ExceptionTracerLib.cpp b/folly/experimental/exception_tracer/ExceptionTracerLib.cpp
--- a/folly/experimental/exception_tracer/ExceptionTracerLib.cpp 2021-12-12 23:10:42.000000000 +0100
+++ b/folly/experimental/exception_tracer/ExceptionTracerLib.cpp 2022-02-03 15:19:11.003368891 +0100
@@ -96,6 +96,7 @@
#define __builtin_unreachable()
#endif
+#if 0
namespace __cxxabiv1 {
void __cxa_throw(
@@ -154,5 +155,5 @@
}
} // namespace std
-
+#endif
#endif // defined(__GLIBCXX__)
diff -ur a/folly/Portability.h b/folly/Portability.h
--- a/folly/Portability.h 2021-12-12 23:10:42.000000000 +0100
+++ b/folly/Portability.h 2022-02-03 15:19:11.003368891 +0100
@@ -566,7 +566,7 @@
#define FOLLY_HAS_COROUTINES 0
#elif (__cpp_coroutines >= 201703L || __cpp_impl_coroutine >= 201902L) && \
(__has_include(<coroutine>) || __has_include(<experimental/coroutine>))
-#define FOLLY_HAS_COROUTINES 1
+#define FOLLY_HAS_COROUTINES 0
// This is mainly to workaround bugs triggered by LTO, when stack allocated
// variables in await_suspend end up on a coroutine frame.
#define FOLLY_CORO_AWAIT_SUSPEND_NONTRIVIAL_ATTRIBUTES FOLLY_NOINLINE

View File

@ -1,26 +0,0 @@
diff --git a/folly/CMakeLists.txt b/folly/CMakeLists.txt
index e0e16df..471131e 100644
--- a/folly/CMakeLists.txt
+++ b/folly/CMakeLists.txt
@@ -28,7 +28,7 @@ install(
)
add_subdirectory(experimental/exception_tracer)
-add_subdirectory(logging/example)
+# add_subdirectory(logging/example)
if (PYTHON_EXTENSIONS)
# Create tree of symbolic links in structure required for successful
diff --git a/folly/Portability.h b/folly/Portability.h
index 365ef1b..42d24b8 100644
--- a/folly/Portability.h
+++ b/folly/Portability.h
@@ -560,7 +560,7 @@ constexpr auto kCpplibVer = 0;
(defined(__cpp_coroutines) && __cpp_coroutines >= 201703L) || \
(defined(__cpp_impl_coroutine) && __cpp_impl_coroutine >= 201902L)) && \
(__has_include(<coroutine>) || __has_include(<experimental/coroutine>))
-#define FOLLY_HAS_COROUTINES 1
+#define FOLLY_HAS_COROUTINES 0
// This is mainly to workaround bugs triggered by LTO, when stack allocated
// variables in await_suspend end up on a coroutine frame.
#define FOLLY_CORO_AWAIT_SUSPEND_NONTRIVIAL_ATTRIBUTES FOLLY_NOINLINE

View File

@ -1,29 +0,0 @@
diff -ur a/CMakeLists.txt b/CMakeLists.txt
--- a/CMakeLists.txt 2021-05-05 00:53:34.000000000 +0200
+++ b/CMakeLists.txt 2022-01-27 17:18:34.758302398 +0100
@@ -52,9 +52,9 @@
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} /EHs-c-")
add_definitions(-D_HAS_EXCEPTIONS=0)
- # Disable RTTI.
- string(REGEX REPLACE "/GR" "" CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS}")
- set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} /GR-")
+ # # Disable RTTI.
+ # string(REGEX REPLACE "/GR" "" CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS}")
+ # set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} /GR-")
else(CMAKE_CXX_COMPILER_ID STREQUAL "MSVC")
# Use -Wall for clang and gcc.
if(NOT CMAKE_CXX_FLAGS MATCHES "-Wall")
@@ -77,9 +77,9 @@
string(REGEX REPLACE "-fexceptions" "" CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS}")
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -fno-exceptions")
- # Disable RTTI.
- string(REGEX REPLACE "-frtti" "" CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS}")
- set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -fno-rtti")
+ # # Disable RTTI.
+ # string(REGEX REPLACE "-frtti" "" CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS}")
+ # set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -fno-rtti")
endif(CMAKE_CXX_COMPILER_ID STREQUAL "MSVC")
# BUILD_SHARED_LIBS is a standard CMake variable, but we declare it here to make

View File

@ -1,75 +0,0 @@
-----BEGIN PGP PUBLIC KEY BLOCK-----
mQINBEzEOZIBEACxg/IuXERlDB48JBWmF4NxNUuuup1IhJAJyFGFSKh3OGAO2Ard
sNuRLjANsFXA7m7P5eTFcG+BoHHuAVYmKnI3PPZtHVLnUt4pGItPczQZ2BE1WpcI
ayjGTBJeKItX3Npqg9D/odO9WWS1i3FQPVdrLn0YH37/BA66jeMQCRo7g7GLpaNf
IrvYGsqTbxCwsmA37rpE7oyU4Yrf74HT091WBsRIoq/MelhbxTDMR8eu/dUGZQVc
Kj3lN55RepwWwUUKyqarY0zMt4HkFJ7v7yRL+Cvzy92Ouv4Wf2FlhNtEs5LE4Tax
W0PO5AEmUoKjX87SezQK0f652018b4u6Ex52cY7p+n5TII/UyoowH6+tY8UHo9yb
fStrqgNE/mY2bhA6+AwCaOUGsFzVVPTbjtxL3HacUP/jlA1h78V8VTvTs5d55iG7
jSqR9o05wje8rwNiXXK0xtiJahyNzL97Kn/DgPSqPIi45G+8nxWSPFM5eunBKRl9
vAnsvwrdPRsR6YR3uMHTuVhQX9/CY891MHkaZJ6wydWtKt3yQwJLYqwo5d4DwnUX
CduUwSKv+6RmtWI5ZmTQYOcBRcZyGKml9X9Q8iSbm6cnpFXmLrNQwCJN+D3SiYGc
MtbltZo0ysPMa6Xj5xFaYqWk/BI4iLb2Gs+ByGo/+a0Eq4XYBMOpitNniQARAQAB
tCdMYXNzZSBDb2xsaW4gPGxhc3NlLmNvbGxpbkB0dWthYW5pLm9yZz6JAlEEEwEK
ADsCGwMCHgECF4AECwkIBwMVCggFFgIDAQAWIQQ2kMJAzlG0Zw0wrRw47nV9aRhG
IAUCYEt9dQUJFxeR4wAKCRA47nV9aRhGIBNDEACxD6vJ+enZwe3IgkJh5JtLsC9b
MWCQRlPW1EVMsg96Cb5Rtron1eN1pp1TlzENJu1/C7C/VEsr9WwOPg26Men7fNf/
O21QM9IBWd/uB0Pu333WqKh92ESS5x9ST9DrG39nVGSPkQQBMuia72VrA+crPnwT
/h/u1IN6/sff5VDIU24rUiqW2Npy733dANruj7Ny0scRXVPltnVdhqwPHt6qNjC1
t+/cCnwHgW1BR1RYXBPpB42z/m29dL9rPrG0YPGWs2Bc+EATUICfEE6eIvwfciue
IJTjKT9Y9DrogJC2AYFhjC7N04OKdCB2hFs4BjexJwr4X0GJO7LhFl03c951AsIE
GHwrucRPB5bo2vmvQ8IvZn7CmtdUJzXv9JlyU6p+MIK1pz7TK6GgSOSffQIXZn6e
nUPtm9mEwuncOfmW8/ODYPs1gCWYgyiFJx8h7eEu+M4MxHSFBs7MwXf/Ae2fSp+M
P/p198qB8fC5oVBnF95qb0Qi0uc1D+Gb+gpBF+ymMb+s/VBOR3QWiym7AzBrJ62g
UnbC9jMLGnSRI+7p7raUfMTgXr5/oQoBw7ExJVltSSRrim2YH/t4CV47mO6dR9J3
1RtsTFIRNhz+07XPsETcuCV/dgqeC8fOFLt9MY17Sufhb1DcGy4urZBOIhXcpTV7
vHVj5IYH5nYOT49NRYkCOAQTAQIAIgUCTMQ5kgIbAwYLCQgHAwIGFQgCCQoLBBYC
AwECHgECF4AACgkQOO51fWkYRiAg4A/7BXKwoRaXrMbMPOW7vuVF7c2IKB2Yqzn1
vLBCwuEHkqY237lDcXY4/5LR+1gcZ3Duw1n/BRSm0FBdvyX/JTWiWNSDUkKAO/0l
T2Tg44YLrDT3bzwu8dbU9xQt6kH+SCOHvv5Oe4k79l5mro6fF3H1M0bN63x/YoFY
ojy09D7/JptY82oR4f/VdKnfZLJcCViCb0wp8SD2NkDAudKg+K+7PD8HlTWklQQg
TZdRXxVZKIJeU42aJDqnRbAhJd64YHyClhqut9F5LUmiP5qfLfNhkKDhNOwk2Blr
BGBJkSd7wPyzcX4Mun/L6YspHjbeVMt9TD7HQlo+OOd2OjAHCx6pqwkXnzeLPEaE
cPdQ1SHgrBViAxX3DNPubLP0Knw8XwFu96EuhHZgexE1W7bB4LFsJyXAc5k1PqPD
CLsAauxmvI2OfI7opG/8wyxDvNgoPjG8fZNAgY0REqPC0JnTXChH31IxUmhNotH8
tD3DDTZOHw05n5MwwUrEE9xiETVDfFQcMLfxZ9KLz+BC2g1t5LYublRgnCMNJzFg
sNUMM02CphABzl/LCLnumr0eyQQ/weV4twEhLwSDmqLYHL0EdYW0Y3CnnU9vmYxQ
cXKbstS71sEJJYBBmSBbf9GxkOY8BRNtwVwY0kPgxv1WqdVBiAFvfB+pyAsrax9B
3UeB7ZSwRD6JAhwEEAEKAAYFAlS25GwACgkQlbYYGy0z6ew92Q//ZA9/6piQtoW4
PwP/1DtWGyKU8hwR+9FG669iPk/dAG+yoEJtFMOUpg/FUFmCX8Bc4oEHsCVyLxKt
DcCVUIRcYNSFi5hTZaBEbwsOlDT37gtlfIIu34hhHRccKaLnN/N9gNMNw8wGh9xg
Q/KtxZwcbk/bZIlDkKTJkFBRAekdEGAFDWb/AZOy+LQxS8ZAh1eWkfV0i8opmK9k
gPXtLE0WSsqtYyGs58z+BFE9NH3tEUwK6jSvtuLwQl4UrICNbKthcpb8WwH6UXzb
q3QNSYVOpf/cqRdBJA6bvb/ku/xyKVL08lGmxD9v1b137R7mafDAFPTsvH2Mt/0V
YuhtWav3r1Bl9QksDxt2DTS8wiWDUBetGqOVdcw7vBrXPEWDNBmxeJXsiJ7zJlR+
9wrJOm6RV2+l1IPxu96EaPS+kTNBijKrhxb67bww8BTEWTd0wcdJmgWRkM8SIstp
IKqd0L2TFYph2/NtrBhRg+DIEPJPpSTGsUMcCEXCZPQ+cIdlQKsWpk0tZ62DlvEl
r7E+wgUSQolRfx5KrpZifiS2zQlhzdXv28CJhsVbLyw5fUAWUKIH/dCo5NKsNLk2
Lc5DH9VWnFgxAAtW290FqeK/4ulMq7Vs1dQSwyHM2Ni3QqqeaiOrh8gbSY5CMLFN
Y3HYRwuTYPa3AobsozCzBj0Zdf/6AFe5Ag0ETMQ5kgEQAL/FwKdjxgPxtSpgq1SM
zgZtTTyLqhgGD3NZfadHWHYRIL38NDV3JeTA79Y2zj2dj7KQPDT+0aqeizTV2E3j
P3iCQ53VOT4consBaQAgKexpptnS+T1DobtICFJ0GGzf0HRj6KO2zSOuOitWPWlU
wbvX7M0LLI2+hqlx0jTPqbJFZ/Za6KTtbS6xdCPVUpUqYZQpokEZcwQmUp8Q+lGo
JD2sNYCZyap63X/aAOgCGr2RXYddOH5e8vGzGW+mwtCv+WQ9Ay35mGqI5MqkbZd1
Qbuv2b1647E/QEEucfRHVbJVKGGPpFMUJtcItyyIt5jo+r9CCL4Cs47dF/9/RNwu
NvpvHXUyqMBQdWNZRMx4k/NGD/WviPi9m6mIMui6rOQsSOaqYdcUX4Nq2Orr3Oaz
2JPQdUfeI23iot1vK8hxvUCQTV3HfJghizN6spVl0yQOKBiE8miJRgrjHilH3hTb
xoo42xDkNAq+CQo3QAm1ibDxKCDq0RcWPjcCRAN/Q5MmpcodpdKkzV0yGIS4g7s5
frVrgV/kox2r4/Yxsr8K909+4H82AjTKGX/BmsQFCTAqBk6p7I0zxjIqJ/w33TZB
Q0Pn4r3WIlUPafzY6a9/LAvN1fHRxf9SpCByJsszD03Qu5f5TB8gthsdnVmTo7jj
iordEKMtw2aEMLzdWWTQ/TNVABEBAAGJAjwEGAEKACYCGwwWIQQ2kMJAzlG0Zw0w
rRw47nV9aRhGIAUCYEt9YAUJFxeRzgAKCRA47nV9aRhGIMLtD/9HuKM4pngImcuz
YwzQmdv4j26YYyh4jVsKEmVWTiRcehEgUIlrWkCu3qzd5NK+RetS7kJ8MPnzEUfj
YbpdC6yrF6n1mSrZZ4VJMkV2ev37bIgXM+Wp1mCAGbjNxQnjn9RabT/gjIqmGuRn
AP7RsSeOSuO/gO9h2Pteciz23ussTilB+8cTooQEQQZe6Kv/zukvL+ccSehLHsZ7
qVfRUAmtt8nFkXXE+s8jfLfhqstaI2/RJu5witaPcXM8Mnz2E95aASAbZy0eQot9
0Pvf07n9yuC3tueTvzvlXx3h5U3yT44tIOmzANIQjay1TGdm+RBJ2ZYyhyLawlZ2
NVUXXSp4QZZXPA0UWbF+pb7Q9cdKDNFVuvGBljuea0Yd0T2o+ibDq43HziX9ll+l
SXk9mqvW1UcDOaxWrSsm1Gc1O9g3wqH5xHAhtY8GPh/7VgAawskPkmnlkMW6pYPy
zibbeISJL1gd1jIT63y6aoVrtNoo+wYJm280ROflh4+5QOo6QJ+jm70fkXSG/qJ5
a8/qCPTHkJc/rpkL6/TDQAJURi9RhDAC0gb40HtusbN1LZEA+i0cWTmYXap+DB4Y
R4pApilpaG87M+VUokR4xpnx7vTb2MPa7Mdenvi9FEGnKXadmT8038vlfzz5GGUT
MlVin9BQPTpdA+PpRiJvKJgVDeAFOg==
=asTC
-----END PGP PUBLIC KEY BLOCK-----

View File

@ -1,18 +1,11 @@
#!/bin/bash #!/bin/bash
function operating_system() { operating_system() {
if [[ "$OSTYPE" == "linux-gnu"* ]]; then grep -E '^(VERSION_)?ID=' /etc/os-release | \
grep -E '^(VERSION_)?ID=' /etc/os-release | \ sort | cut -d '=' -f 2- | sed 's/"//g' | paste -s -d '-'
sort | cut -d '=' -f 2- | sed 's/"//g' | paste -s -d '-'
elif [[ "$OSTYPE" == "darwin"* ]]; then
echo "$(sw_vers -productName)-$(sw_vers -productVersion | cut -d '.' -f 1)"
else
echo "operating_system called on an unknown OS"
exit 1
fi
} }
function check_operating_system() { check_operating_system() {
if [ "$(operating_system)" != "$1" ]; then if [ "$(operating_system)" != "$1" ]; then
echo "Not the right operating system!" echo "Not the right operating system!"
exit 1 exit 1
@ -21,25 +14,20 @@ function check_operating_system() {
fi fi
} }
function architecture() { architecture() {
uname -m uname -m
} }
check_architecture() { check_architecture() {
local ARCH=$(architecture) if [ "$(architecture)" != "$1" ]; then
for arch in "$@"; do echo "Not the right architecture!"
if [ "${ARCH}" = "$arch" ]; then exit 1
echo "The right architecture!" else
return 0 echo "The right architecture."
fi fi
done
echo "Not the right architecture!"
echo "Expected: $@"
echo "Actual: ${ARCH}"
exit 1
} }
function check_all_yum() { check_all_yum() {
local missing="" local missing=""
for pkg in $1; do for pkg in $1; do
if ! yum list installed "$pkg" >/dev/null 2>/dev/null; then if ! yum list installed "$pkg" >/dev/null 2>/dev/null; then
@ -52,7 +40,7 @@ function check_all_yum() {
fi fi
} }
function check_all_dpkg() { check_all_dpkg() {
local missing="" local missing=""
for pkg in $1; do for pkg in $1; do
if ! dpkg -s "$pkg" >/dev/null 2>/dev/null; then if ! dpkg -s "$pkg" >/dev/null 2>/dev/null; then
@ -65,7 +53,7 @@ function check_all_dpkg() {
fi fi
} }
function check_all_dnf() { check_all_dnf() {
local missing="" local missing=""
for pkg in $1; do for pkg in $1; do
if ! dnf list installed "$pkg" >/dev/null 2>/dev/null; then if ! dnf list installed "$pkg" >/dev/null 2>/dev/null; then
@ -77,34 +65,8 @@ function check_all_dnf() {
exit 1 exit 1
fi fi
} }
install_all_apt() {
function install_all_apt() {
for pkg in $1; do for pkg in $1; do
apt install -y "$pkg" apt install -y "$pkg"
done done
} }
function install_custom_golang() {
# NOTE: The official https://go.dev/doc/manage-install doesn't seem to be working.
GOVERSION="$1"
GOINSTALLDIR="/opt/go$GOVERSION"
GOROOT="$GOINSTALLDIR/go" # GOPATH=$HOME/go
if [ ! -f "$GOROOT/bin/go" ]; then
curl -LO https://go.dev/dl/go$GOVERSION.linux-amd64.tar.gz
mkdir -p "$GOINSTALLDIR"
tar -C "$GOINSTALLDIR" -xzf go$GOVERSION.linux-amd64.tar.gz
fi
echo "go $GOVERSION installed under $GOROOT"
}
function install_custom_maven() {
MVNVERSION="$1"
MVNINSTALLDIR="/opt/apache-maven-$MVNVERSION"
MVNURL="https://s3.eu-west-1.amazonaws.com/deps.memgraph.io/maven/apache-maven-$MVNVERSION-bin.tar.gz"
if [ ! -f "$MVNINSTALLDIR/bin/mvn" ]; then
echo "Downloading maven from $MVNURL"
curl -LO "$MVNURL"
tar -C "/opt" -xzf "apache-maven-$MVNVERSION-bin.tar.gz"
fi
echo "maven $MVNVERSION installed under $MVNINSTALLDIR"
}

View File

@ -1,26 +0,0 @@
#!/bin/bash -e
COLOR_ORANGE="\e[38;5;208m"
COLOR_GREEN="\e[38;5;35m"
COLOR_RED="\e[0;31m"
COLOR_NULL="\e[0m"
print_help() {
echo -e "${COLOR_ORANGE}HOW TO RUN:${COLOR_NULL} $0 memgraph_logs_file_path cypherl_output_path"
exit 1
}
if [ "$#" -ne 2 ]; then
print_help
fi
INPUT="$1"
OUTPUT="$2"
if [ ! -f "$INPUT" ]; then
echo -e "${COLOR_RED}ERROR:${COLOR_NULL} memgraph_logs_file_path is not a file!"
print_help
fi
awk -v RS="Run] '" 'NR>1 { print $0 }' < "$INPUT" | sed -e "/^\[/d;" -e "s/'\([^']*\)$/;/g" > "$OUTPUT"
echo -e "${COLOR_GREEN}DONE!${COLOR_NULL} Please find Memgraph compatible cypherl file under $OUTPUT"
echo ""
echo "Import can be done by executing => \`cat $OUTPUT | mgconsole\`"

View File

@ -1,39 +0,0 @@
#!/bin/bash -e
COLOR_ORANGE="\e[38;5;208m"
COLOR_GREEN="\e[38;5;35m"
COLOR_RED="\e[0;31m"
COLOR_NULL="\e[0m"
print_help() {
echo -e "${COLOR_ORANGE}HOW TO RUN:${COLOR_NULL} $0 input_file_path output_file_path"
exit 1
}
if [ "$#" -ne 2 ]; then
print_help
fi
INPUT="$1"
OUTPUT="$2"
if [ ! -f "$INPUT" ]; then
echo -e "${COLOR_RED}ERROR:${COLOR_NULL} input_file_path is not a file!"
print_help
fi
echo -e "${COLOR_ORANGE}NOTE:${COLOR_NULL} BEGIN and COMMIT are required because variables share the same name (e.g. row)"
echo -e "${COLOR_ORANGE}NOTE:${COLOR_NULL} CONSTRAINTS are just skipped -> ${COLOR_RED}please create constraints manually if needed${COLOR_NULL}"
echo 'CREATE INDEX ON :`UNIQUE IMPORT LABEL`(`UNIQUE IMPORT ID`);' > "$OUTPUT"
sed -e 's/^:begin/BEGIN/g; s/^BEGIN$/BEGIN;/g;' \
-e 's/^:commit/COMMIT/g; s/^COMMIT$/COMMIT;/g;' \
-e '/^CALL/d; /^SCHEMA AWAIT/d;' \
-e 's/CREATE RANGE INDEX FOR (n:/CREATE INDEX ON :/g;' \
-e 's/) ON (n./(/g;' \
-e '/^CREATE CONSTRAINT/d; /^DROP CONSTRAINT/d;' "$INPUT" >> "$OUTPUT"
echo 'DROP INDEX ON :`UNIQUE IMPORT LABEL`(`UNIQUE IMPORT ID`);' >> "$OUTPUT"
echo ""
echo -e "${COLOR_GREEN}DONE!${COLOR_NULL} Please find Memgraph compatible cypherl|.cypher file under $OUTPUT"
echo ""
echo "Please import data by executing => \`cat $OUTPUT | mgconsole\`"

View File

@ -1,61 +0,0 @@
#!/bin/bash -e
COLOR_ORANGE="\e[38;5;208m"
COLOR_GREEN="\e[38;5;35m"
COLOR_RED="\e[0;31m"
COLOR_NULL="\e[0m"
print_help() {
echo -e "${COLOR_ORANGE}HOW TO RUN:${COLOR_NULL} $0 input_file_schema_path input_file_nodes_path input_file_relationships_path input_file_cleanup_path output_file_path"
exit 1
}
if [ "$#" -ne 5 ]; then
print_help
fi
INPUT_SCHEMA="$1"
INPUT_NODES="$2"
INPUT_RELATIONSHIPS="$3"
INPUT_CLEANUP="$4"
OUTPUT="$5"
if [ ! -f "$INPUT_SCHEMA" ]; then
echo -e "${COLOR_RED}ERROR:${COLOR_NULL} input_file_path is not a file!"
print_help
fi
if [ ! -f "$INPUT_NODES" ]; then
echo -e "${COLOR_RED}ERROR:${COLOR_NULL} input_file_path is not a file!"
print_help
fi
if [ ! -f "$INPUT_RELATIONSHIPS" ]; then
echo -e "${COLOR_RED}ERROR:${COLOR_NULL} input_file_path is not a file!"
print_help
fi
if [ ! -f "$INPUT_CLEANUP" ]; then
echo -e "${COLOR_RED}ERROR:${COLOR_NULL} input_file_path is not a file!"
print_help
fi
echo -e "${COLOR_ORANGE}NOTE:${COLOR_NULL} BEGIN and COMMIT are required because variables share the same name (e.g. row)"
echo -e "${COLOR_ORANGE}NOTE:${COLOR_NULL} CONSTRAINTS are just skipped -> ${COLOR_RED}please create constraints manually if needed${COLOR_NULL}"
echo 'CREATE INDEX ON :`UNIQUE IMPORT LABEL`(`UNIQUE IMPORT ID`);' > "$OUTPUT"
sed -e 's/CREATE RANGE INDEX FOR (n:/CREATE INDEX ON :/g;' \
-e 's/) ON (n./(/g;' \
-e '/^CREATE CONSTRAINT/d' $INPUT_SCHEMA >> "$OUTPUT"
cat "$INPUT_NODES" >> "$OUTPUT"
cat "$INPUT_RELATIONSHIPS" >> "$OUTPUT"
sed -e '/^DROP CONSTRAINT/d' "$INPUT_CLEANUP" >> "$OUTPUT"
echo 'DROP INDEX ON :`UNIQUE IMPORT LABEL`(`UNIQUE IMPORT ID`);' >> "$OUTPUT"
echo ""
echo -e "${COLOR_GREEN}DONE!${COLOR_NULL} Please find Memgraph compatible cypherl|.cypher file under $OUTPUT"
echo ""
echo "Please import data by executing => \`cat $OUTPUT | mgconsole\`"

View File

@ -1,64 +0,0 @@
#!/bin/bash -e
COLOR_ORANGE="\e[38;5;208m"
COLOR_GREEN="\e[38;5;35m"
COLOR_RED="\e[0;31m"
COLOR_NULL="\e[0m"
print_help() {
echo -e "${COLOR_ORANGE}HOW TO RUN:${COLOR_NULL} $0 input_file_schema_path input_file_nodes_path input_file_relationships_path input_file_cleanup_path output_file_schema_path output_file_nodes_path output_file_relationships_path output_file_cleanup_path"
exit 1
}
if [ "$#" -ne 8 ]; then
print_help
fi
INPUT_SCHEMA="$1"
INPUT_NODES="$2"
INPUT_RELATIONSHIPS="$3"
INPUT_CLEANUP="$4"
OUTPUT_SCHEMA="$5"
OUTPUT_NODES="$6"
OUTPUT_RELATIONSHIPS="$7"
OUTPUT_CLEANUP="$8"
if [ ! -f "$INPUT_SCHEMA" ]; then
echo -e "${COLOR_RED}ERROR:${COLOR_NULL} input_file_path is not a file!"
print_help
fi
if [ ! -f "$INPUT_NODES" ]; then
echo -e "${COLOR_RED}ERROR:${COLOR_NULL} input_file_path is not a file!"
print_help
fi
if [ ! -f "$INPUT_RELATIONSHIPS" ]; then
echo -e "${COLOR_RED}ERROR:${COLOR_NULL} input_file_path is not a file!"
print_help
fi
if [ ! -f "$INPUT_CLEANUP" ]; then
echo -e "${COLOR_RED}ERROR:${COLOR_NULL} input_file_path is not a file!"
print_help
fi
echo -e "${COLOR_ORANGE}NOTE:${COLOR_NULL} BEGIN and COMMIT are required because variables share the same name (e.g. row)"
echo -e "${COLOR_ORANGE}NOTE:${COLOR_NULL} CONSTRAINTS are just skipped -> ${COLOR_RED}please create constraints manually if needed${COLOR_NULL}"
echo 'CREATE INDEX ON :`UNIQUE IMPORT LABEL`(`UNIQUE IMPORT ID`);' > "$OUTPUT_SCHEMA"
sed -e 's/CREATE RANGE INDEX FOR (n:/CREATE INDEX ON :/g;' \
-e 's/) ON (n./(/g;' \
-e '/^CREATE CONSTRAINT/d' $INPUT_SCHEMA >> "$OUTPUT_SCHEMA"
cat "$INPUT_NODES" > "$OUTPUT_NODES"
cat "$INPUT_RELATIONSHIPS" > "$OUTPUT_RELATIONSHIPS"
sed -e '/^DROP CONSTRAINT/d' "$INPUT_CLEANUP" >> "$OUTPUT_CLEANUP"
echo 'DROP INDEX ON :`UNIQUE IMPORT LABEL`(`UNIQUE IMPORT ID`);' >> "$OUTPUT_CLEANUP"
echo ""
echo -e "${COLOR_GREEN}DONE!${COLOR_NULL} Please find Memgraph compatible cypherl|.cypher files under $OUTPUT_SCHEMA, $OUTPUT_NODES, $OUTPUT_RELATIONSHIPS and $OUTPUT_CLEANUP"
echo ""
echo "Please import data by executing => \`cat $OUTPUT_SCHEMA | mgconsole\`, \`cat $OUTPUT_NODES | mgconsole\`, \`cat $OUTPUT_RELATIONSHIPS | mgconsole\` and \`cat $OUTPUT_CLEANUP | mgconsole\`"

File diff suppressed because it is too large Load Diff

View File

@ -1,350 +0,0 @@
import typing
from enum import Enum
import networkx as nx
NX_LABEL_ATTR = "labels"
NX_TYPE_ATTR = "type"
SOURCE_TYPE_KAFKA = "SOURCE_TYPE_KAFKA"
SOURCE_TYPE_PULSAR = "SOURCE_TYPE_PULSAR"
"""
This module provides helpers for the mock Python API, much like _mgp.py does for mgp.py.
"""
class InvalidArgumentError(Exception):
"""
Signals that some of the arguments have invalid values.
"""
pass
class ImmutableObjectError(Exception):
pass
class LogicErrorError(Exception):
pass
class DeletedObjectError(Exception):
pass
class EdgeConstants(Enum):
I_START = 0
I_END = 1
I_KEY = 2
class Graph:
"""Wrapper around a NetworkX MultiDiGraph instance."""
__slots__ = ("nx", "_highest_vertex_id", "_highest_edge_id", "_valid")
def __init__(self, graph: nx.MultiDiGraph) -> None:
if not isinstance(graph, nx.MultiDiGraph):
raise TypeError(f"Expected 'networkx.classes.multidigraph.MultiDiGraph', got '{type(graph)}'")
self.nx = graph
self._highest_vertex_id = None
self._highest_edge_id = None
self._valid = True
@property
def vertex_ids(self):
return self.nx.nodes
def vertex_is_isolate(self, vertex_id: int) -> bool:
return nx.is_isolate(self.nx, vertex_id)
@property
def vertices(self):
return (Vertex(node_id, self) for node_id in self.nx.nodes)
def has_node(self, node_id):
return self.nx.has_node(node_id)
@property
def edges(self):
return self.nx.edges
def is_valid(self) -> bool:
return self._valid
def get_vertex_by_id(self, vertex_id: int) -> "Vertex":
return Vertex(vertex_id, self)
def invalidate(self):
self._valid = False
def is_immutable(self) -> bool:
return nx.is_frozen(self.nx)
def make_immutable(self):
self.nx = nx.freeze(self.nx)
def _new_vertex_id(self):
if self._highest_vertex_id is None:
self._highest_vertex_id = max(vertex_id for vertex_id in self.nx.nodes)
return self._highest_vertex_id + 1
def _new_edge_id(self):
if self._highest_edge_id is None:
self._highest_edge_id = max(edge[EdgeConstants.I_KEY.value] for edge in self.nx.edges(keys=True))
return self._highest_edge_id + 1
def create_vertex(self) -> "Vertex":
vertex_id = self._new_vertex_id()
self.nx.add_node(vertex_id)
self._highest_vertex_id = vertex_id
return Vertex(vertex_id, self)
def create_edge(self, from_vertex: "Vertex", to_vertex: "Vertex", edge_type: str) -> "Edge":
if from_vertex.is_deleted() or to_vertex.is_deleted():
raise DeletedObjectError("Accessing deleted object.")
edge_id = self._new_edge_id()
from_id = from_vertex.id
to_id = to_vertex.id
self.nx.add_edge(from_id, to_id, key=edge_id, type=edge_type)
self._highest_edge_id = edge_id
return Edge((from_id, to_id, edge_id), self)
def delete_vertex(self, vertex_id: int):
self.nx.remove_node(vertex_id)
def delete_edge(self, from_vertex_id: int, to_vertex_id: int, edge_id: int):
self.nx.remove_edge(from_vertex_id, to_vertex_id, edge_id)
@property
def highest_vertex_id(self) -> int:
if self._highest_vertex_id is None:
self._highest_vertex_id = max(vertex_id for vertex_id in self.nx.nodes) + 1
return self._highest_vertex_id
@property
def highest_edge_id(self) -> int:
if self._highest_edge_id is None:
self._highest_edge_id = max(edge[EdgeConstants.I_KEY.value] for edge in self.nx.edges(keys=True))
return self._highest_edge_id + 1
class Vertex:
"""Represents a graph vertex."""
__slots__ = ("_id", "_graph")
def __init__(self, id: int, graph: Graph) -> None:
if not isinstance(id, int):
raise TypeError(f"Expected 'int', got '{type(id)}'")
if not isinstance(graph, Graph):
raise TypeError(f"Expected '_mgp_mock.Graph', got '{type(graph)}'")
if not graph.nx.has_node(id):
raise IndexError(f"Unable to find vertex with ID {id}.")
self._id = id
self._graph = graph
def is_valid(self) -> bool:
return self._graph.is_valid()
def is_deleted(self) -> bool:
return not self._graph.nx.has_node(self._id) and self._id <= self._graph.highest_vertex_id
@property
def underlying_graph(self) -> Graph:
return self._graph
def underlying_graph_is_mutable(self) -> bool:
return not nx.is_frozen(self._graph.nx)
@property
def labels(self) -> typing.List[int]:
return self._graph.nx.nodes[self._id][NX_LABEL_ATTR].split(":")
def add_label(self, label: str) -> None:
if nx.is_frozen(self._graph.nx):
raise ImmutableObjectError("Cannot modify immutable object.")
self._graph.nx.nodes[self._id][NX_LABEL_ATTR] += f":{label}"
def remove_label(self, label: str) -> None:
if nx.is_frozen(self._graph.nx):
raise ImmutableObjectError("Cannot modify immutable object.")
labels = self._graph.nx.nodes[self._id][NX_LABEL_ATTR]
if labels.startswith(f"{label}:"):
labels = "\n" + labels # pseudo-string starter
self._graph.nx.nodes[self._id][NX_LABEL_ATTR] = labels.replace(f"\n{label}:", "")
elif labels.endswith(f":{label}"):
labels += "\n" # pseudo-string terminator
self._graph.nx.nodes[self._id][NX_LABEL_ATTR] = labels.replace(f":{label}\n", "")
else:
self._graph.nx.nodes[self._id][NX_LABEL_ATTR] = labels.replace(f":{label}:", ":")
@property
def id(self) -> int:
return self._id
@property
def properties(self):
return (
(key, value)
for key, value in self._graph.nx.nodes[self._id].items()
if key not in (NX_LABEL_ATTR, NX_TYPE_ATTR)
)
def get_property(self, property_name: str):
return self._graph.nx.nodes[self._id][property_name]
def set_property(self, property_name: str, value: object):
self._graph.nx.nodes[self._id][property_name] = value
@property
def in_edges(self) -> typing.Iterable["Edge"]:
return [Edge(edge, self._graph) for edge in self._graph.nx.in_edges(self._id, keys=True)]
@property
def out_edges(self) -> typing.Iterable["Edge"]:
return [Edge(edge, self._graph) for edge in self._graph.nx.out_edges(self._id, keys=True)]
class Edge:
"""Represents a graph edge."""
__slots__ = ("_edge", "_graph")
def __init__(self, edge: typing.Tuple[int, int, int], graph: Graph) -> None:
if not isinstance(edge, typing.Tuple):
raise TypeError(f"Expected 'Tuple', got '{type(edge)}'")
if not isinstance(graph, Graph):
raise TypeError(f"Expected '_mgp_mock.Graph', got '{type(graph)}'")
if not graph.nx.has_edge(*edge):
raise IndexError(f"Unable to find edge with ID {edge[EdgeConstants.I_KEY.value]}.")
self._edge = edge
self._graph = graph
def is_valid(self) -> bool:
return self._graph.is_valid()
def is_deleted(self) -> bool:
return (
not self._graph.nx.has_edge(*self._edge)
and self._edge[EdgeConstants.I_KEY.value] <= self._graph.highest_edge_id
)
def underlying_graph_is_mutable(self) -> bool:
return not nx.is_frozen(self._graph.nx)
@property
def id(self) -> int:
return self._edge[EdgeConstants.I_KEY.value]
@property
def edge(self) -> typing.Tuple[int, int, int]:
return self._edge
@property
def start_id(self) -> int:
return self._edge[EdgeConstants.I_START.value]
@property
def end_id(self) -> int:
return self._edge[EdgeConstants.I_END.value]
def get_type_name(self):
return self._graph.nx.get_edge_data(*self._edge)[NX_TYPE_ATTR]
def from_vertex(self) -> Vertex:
return Vertex(self.start_id, self._graph)
def to_vertex(self) -> Vertex:
return Vertex(self.end_id, self._graph)
@property
def properties(self):
return (
(key, value)
for key, value in self._graph.nx.edges[self._edge].items()
if key not in (NX_LABEL_ATTR, NX_TYPE_ATTR)
)
def get_property(self, property_name: str):
return self._graph.nx.edges[self._edge][property_name]
def set_property(self, property_name: str, value: object):
self._graph.nx.edges[self._edge][property_name] = value
class Path:
"""Represents a path comprised of `Vertex` and `Edge` instances."""
__slots__ = ("_vertices", "_edges", "_graph")
__create_key = object()
def __init__(self, create_key, vertex_id: int, graph: Graph) -> None:
assert create_key == Path.__create_key, "Path objects must be created using Path.make_with_start"
self._vertices = [vertex_id]
self._edges = []
self._graph = graph
@classmethod
def make_with_start(cls, vertex: Vertex) -> "Path":
if not isinstance(vertex, Vertex):
raise TypeError(f"Expected 'Vertex', got '{type(vertex)}'")
if not isinstance(vertex.underlying_graph, Graph):
raise TypeError(f"Expected '_mgp_mock.Graph', got '{type(vertex.underlying_graph)}'")
if not vertex.underlying_graph.nx.has_node(vertex._id):
raise IndexError(f"Unable to find vertex with ID {vertex._id}.")
return Path(cls.__create_key, vertex._id, vertex.underlying_graph)
def is_valid(self) -> bool:
return self._graph.is_valid()
def underlying_graph_is_mutable(self) -> bool:
return not nx.is_frozen(self._graph.nx)
def expand(self, edge: Edge):
if edge.start_id != self._vertices[-1]:
raise LogicErrorError("Logic error.")
self._vertices.append(edge.end_id)
self._edges.append((edge.start_id, edge.end_id, edge.id))
def pop(self):
if not self._edges:
raise IndexError("Path contains no relationships.")
self._vertices.pop()
self._edges.pop()
def vertex_at(self, index: int) -> Vertex:
return Vertex(self._vertices[index], self._graph)
def edge_at(self, index: int) -> Edge:
return Edge(self._edges[index], self._graph)
def size(self) -> int:
return len(self._edges)

View File

@ -1,4 +1,4 @@
// Copyright 2024 Memgraph Ltd. // Copyright 2022 Memgraph Ltd.
// //
// Use of this software is governed by the Business Source License // Use of this software is governed by the Business Source License
// included in the file licenses/BSL.txt; by using this file, you agree to be bound by the terms of the Business Source // included in the file licenses/BSL.txt; by using this file, you agree to be bound by the terms of the Business Source
@ -13,96 +13,45 @@
#include <exception> #include <exception>
#include <iostream> #include <iostream>
#include <sstream>
#include <string>
namespace mg_exception { namespace mg_exception {
// Instead of writing this utility function, we could have used `fmt::format`, but that's not an ideal option here
// because that would introduce dependency that would be propagated to the client code (if exceptions here would be
// used). Since the functionality here is not complex + the code is not on a critical path, we opted for a pure C++
// solution.
template <typename FirstArg, typename... Args>
std::string StringSerialize(FirstArg &&firstArg, Args &&...args) {
std::stringstream stream;
stream << std::forward<FirstArg>(firstArg);
((stream << " " << args), ...);
return stream.str();
}
struct UnknownException : public std::exception {
const char *what() const noexcept override { return "Unknown exception!"; }
};
struct NotEnoughMemoryException : public std::exception { struct NotEnoughMemoryException : public std::exception {
NotEnoughMemoryException() const char *what() const throw() { return "Not enough memory!"; }
: message_{ };
StringSerialize("Not enough memory! For more details please visit", "https://memgr.ph/memory-control")} {} struct UnknownException : public std::exception {
const char *what() const noexcept override { return message_.c_str(); } const char *what() const throw() { return "Unknown exception!"; }
private:
std::string message_;
}; };
struct AllocationException : public std::exception { struct AllocationException : public std::exception {
AllocationException() const char *what() const throw() { return "Could not allocate memory!"; }
: message_{StringSerialize("Could not allocate memory. For more details please visit",
"https://memgr.ph/memory-control")} {}
const char *what() const noexcept override { return message_.c_str(); }
private:
std::string message_;
}; };
struct InsufficientBufferException : public std::exception { struct InsufficientBufferException : public std::exception {
const char *what() const noexcept override { return "Buffer is not sufficient to process procedure!"; } const char *what() const throw() { return "Buffer is not sufficient to process procedure!"; }
}; };
struct OutOfRangeException : public std::exception { struct OutOfRangeException : public std::exception {
const char *what() const noexcept override { return "Index out of range!"; } const char *what() const throw() { return "Index out of range!"; }
}; };
struct LogicException : public std::exception { struct LogicException : public std::exception {
const char *what() const noexcept override { return "Logic exception, check the procedure signature!"; } const char *what() const throw() { return "Logic exception, check the procedure signature!"; }
}; };
struct DeletedObjectException : public std::exception { struct DeletedObjectException : public std::exception {
const char *what() const noexcept override { return "Object is deleted!"; } const char *what() const throw() { return "Object is deleted!"; }
}; };
struct InvalidArgumentException : public std::exception { struct InvalidArgumentException : public std::exception {
const char *what() const noexcept override { return "Invalid argument!"; } const char *what() const throw() { return "Invalid argument!"; }
}; };
struct InvalidIDException : public std::exception { struct InvalidIDException : public std::exception {
InvalidIDException() : message_{"Invalid ID!"} {} const char *what() const throw() { return "Invalid ID!"; }
explicit InvalidIDException(std::uint64_t identifier) : message_{StringSerialize("Invalid ID =", identifier)} {}
const char *what() const noexcept override { return message_.c_str(); }
private:
std::string message_;
}; };
struct KeyAlreadyExistsException : public std::exception { struct KeyAlreadyExistsException : public std::exception {
KeyAlreadyExistsException() : message_{"Key you are trying to set already exists!"} {} const char *what() const throw() { return "Key you are trying to set already exists!"; }
explicit KeyAlreadyExistsException(const std::string &key)
: message_{StringSerialize("Key you are trying to set already exists! KEY = ", key)} {}
const char *what() const noexcept override { return message_.c_str(); }
private:
std::string message_;
}; };
struct ImmutableObjectException : public std::exception { struct ImmutableObjectException : public std::exception {
const char *what() const noexcept override { return "Object you are trying to change is immutable!"; } const char *what() const throw() { return "Object you are trying to change is immutable!"; }
}; };
struct ValueConversionException : public std::exception { struct ValueConversionException : public std::exception {
const char *what() const noexcept override { return "Error in value conversion!"; } const char *what() const throw() { return "Error in value conversion!"; }
}; };
struct SerializationException : public std::exception { struct SerializationException : public std::exception {
const char *what() const noexcept override { return "Error in serialization!"; } const char *what() const throw() { return "Error in serialization!"; }
}; };
} // namespace mg_exception } // namespace mg_exception

View File

@ -1,4 +1,4 @@
// Copyright 2024 Memgraph Ltd. // Copyright 2022 Memgraph Ltd.
// //
// Use of this software is governed by the Business Source License // Use of this software is governed by the Business Source License
// included in the file licenses/BSL.txt; by using this file, you agree to be bound by the terms of the Business Source // included in the file licenses/BSL.txt; by using this file, you agree to be bound by the terms of the Business Source
@ -111,22 +111,6 @@ enum mgp_error mgp_global_aligned_alloc(size_t size_in_bytes, size_t alignment,
/// The behavior is undefined if `ptr` is not a value returned from a prior /// The behavior is undefined if `ptr` is not a value returned from a prior
/// mgp_global_alloc() or mgp_global_aligned_alloc(). /// mgp_global_alloc() or mgp_global_aligned_alloc().
void mgp_global_free(void *p); void mgp_global_free(void *p);
/// State of the graph database.
struct mgp_graph;
/// Allocations are tracked only for master thread. If new threads are spawned
/// inside procedure, by calling following function
/// you can start tracking allocations for current thread too. This
/// is important if you need query memory limit to work
/// for given procedure or per procedure memory limit.
enum mgp_error mgp_track_current_thread_allocations(struct mgp_graph *graph);
/// Once allocations are tracked for current thread, you need to stop tracking allocations
/// for given thread, before thread finishes with execution, or is detached.
/// Otherwise it might result in slowdown of system due to unnecessary tracking of
/// allocations.
enum mgp_error mgp_untrack_current_thread_allocations(struct mgp_graph *graph);
///@} ///@}
/// @name Operations on mgp_value /// @name Operations on mgp_value
@ -429,9 +413,6 @@ enum mgp_error mgp_list_copy(struct mgp_list *list, struct mgp_memory *memory, s
/// Free the memory used by the given mgp_list and contained elements. /// Free the memory used by the given mgp_list and contained elements.
void mgp_list_destroy(struct mgp_list *list); void mgp_list_destroy(struct mgp_list *list);
/// Return whether the given mgp_list contains any deleted values.
enum mgp_error mgp_list_contains_deleted(struct mgp_list *list, int *result);
/// Append a copy of mgp_value to mgp_list if capacity allows. /// Append a copy of mgp_value to mgp_list if capacity allows.
/// The list copies the given value and therefore does not take ownership of the /// The list copies the given value and therefore does not take ownership of the
/// original value. You still need to call mgp_value_destroy to free the /// original value. You still need to call mgp_value_destroy to free the
@ -472,9 +453,6 @@ enum mgp_error mgp_map_copy(struct mgp_map *map, struct mgp_memory *memory, stru
/// Free the memory used by the given mgp_map and contained items. /// Free the memory used by the given mgp_map and contained items.
void mgp_map_destroy(struct mgp_map *map); void mgp_map_destroy(struct mgp_map *map);
/// Return whether the given mgp_map contains any deleted values.
enum mgp_error mgp_map_contains_deleted(struct mgp_map *map, int *result);
/// Insert a new mapping from a NULL terminated character string to a value. /// Insert a new mapping from a NULL terminated character string to a value.
/// If a mapping with the same key already exists, it is *not* replaced. /// If a mapping with the same key already exists, it is *not* replaced.
/// In case of insertion, both the string and the value are copied into the map. /// In case of insertion, both the string and the value are copied into the map.
@ -484,18 +462,6 @@ enum mgp_error mgp_map_contains_deleted(struct mgp_map *map, int *result);
/// Return mgp_error::MGP_ERROR_KEY_ALREADY_EXISTS if a previous mapping already exists. /// Return mgp_error::MGP_ERROR_KEY_ALREADY_EXISTS if a previous mapping already exists.
enum mgp_error mgp_map_insert(struct mgp_map *map, const char *key, struct mgp_value *value); enum mgp_error mgp_map_insert(struct mgp_map *map, const char *key, struct mgp_value *value);
/// Insert a mapping from a NULL terminated character string to a value.
/// If a mapping with the same key already exists, it is replaced.
/// In case of update, both the string and the value are copied into the map.
/// Therefore, the map does not take ownership of the original key nor value, so
/// you still need to free their memory explicitly.
/// Return mgp_error::MGP_ERROR_UNABLE_TO_ALLOCATE is returned if unable to allocate for insertion.
enum mgp_error mgp_map_update(struct mgp_map *map, const char *key, struct mgp_value *value);
// Erase a mapping by key.
// If the key doesn't exist in the map nothing happens
enum mgp_error mgp_map_erase(struct mgp_map *map, const char *key);
/// Get the number of items stored in mgp_map. /// Get the number of items stored in mgp_map.
/// Current implementation always returns without errors. /// Current implementation always returns without errors.
enum mgp_error mgp_map_size(struct mgp_map *map, size_t *result); enum mgp_error mgp_map_size(struct mgp_map *map, size_t *result);
@ -504,9 +470,6 @@ enum mgp_error mgp_map_size(struct mgp_map *map, size_t *result);
/// Result is NULL if no mapping exists. /// Result is NULL if no mapping exists.
enum mgp_error mgp_map_at(struct mgp_map *map, const char *key, struct mgp_value **result); enum mgp_error mgp_map_at(struct mgp_map *map, const char *key, struct mgp_value **result);
/// Returns true if key in map.
enum mgp_error mgp_key_exists(struct mgp_map *map, const char *key, int *result);
/// An item in the mgp_map. /// An item in the mgp_map.
struct mgp_map_item; struct mgp_map_item;
@ -558,9 +521,6 @@ enum mgp_error mgp_path_copy(struct mgp_path *path, struct mgp_memory *memory, s
/// Free the memory used by the given mgp_path and contained vertices and edges. /// Free the memory used by the given mgp_path and contained vertices and edges.
void mgp_path_destroy(struct mgp_path *path); void mgp_path_destroy(struct mgp_path *path);
/// Return whether the given mgp_path contains any deleted values.
enum mgp_error mgp_path_contains_deleted(struct mgp_path *path, int *result);
/// Append an edge continuing from the last vertex on the path. /// Append an edge continuing from the last vertex on the path.
/// The edge is copied into the path. Therefore, the path does not take /// The edge is copied into the path. Therefore, the path does not take
/// ownership of the original edge, so you still need to free the edge memory /// ownership of the original edge, so you still need to free the edge memory
@ -571,10 +531,6 @@ enum mgp_error mgp_path_contains_deleted(struct mgp_path *path, int *result);
/// Return mgp_error::MGP_ERROR_UNABLE_TO_ALLOCATE if unable to allocate memory for path extension. /// Return mgp_error::MGP_ERROR_UNABLE_TO_ALLOCATE if unable to allocate memory for path extension.
enum mgp_error mgp_path_expand(struct mgp_path *path, struct mgp_edge *edge); enum mgp_error mgp_path_expand(struct mgp_path *path, struct mgp_edge *edge);
/// Remove the last node and the last relationship from the path.
/// Return mgp_error::MGP_ERROR_OUT_OF_RANGE if the path contains no relationships.
enum mgp_error mgp_path_pop(struct mgp_path *path);
/// Get the number of edges in a mgp_path. /// Get the number of edges in a mgp_path.
/// Current implementation always returns without errors. /// Current implementation always returns without errors.
enum mgp_error mgp_path_size(struct mgp_path *path, size_t *result); enum mgp_error mgp_path_size(struct mgp_path *path, size_t *result);
@ -679,12 +635,6 @@ struct mgp_vertex_id {
/// Get the ID of given vertex. /// Get the ID of given vertex.
enum mgp_error mgp_vertex_get_id(struct mgp_vertex *v, struct mgp_vertex_id *result); enum mgp_error mgp_vertex_get_id(struct mgp_vertex *v, struct mgp_vertex_id *result);
/// Get the in degree of given vertex.
enum mgp_error mgp_vertex_get_in_degree(struct mgp_vertex *v, size_t *result);
/// Get the out degree of given vertex.
enum mgp_error mgp_vertex_get_out_degree(struct mgp_vertex *v, size_t *result);
/// Result is non-zero if the vertex can be modified. /// Result is non-zero if the vertex can be modified.
/// The mutability of the vertex is the same as the graph which it is part of. If a vertex is immutable, then edges /// The mutability of the vertex is the same as the graph which it is part of. If a vertex is immutable, then edges
/// cannot be created or deleted, properties and labels cannot be set or removed and all of the returned edges will be /// cannot be created or deleted, properties and labels cannot be set or removed and all of the returned edges will be
@ -702,15 +652,6 @@ enum mgp_error mgp_vertex_underlying_graph_is_mutable(struct mgp_vertex *v, int
enum mgp_error mgp_vertex_set_property(struct mgp_vertex *v, const char *property_name, enum mgp_error mgp_vertex_set_property(struct mgp_vertex *v, const char *property_name,
struct mgp_value *property_value); struct mgp_value *property_value);
/// Set the value of properties on a vertex.
/// When the value is `null`, then the property is removed from the vertex.
/// Return mgp_error::MGP_ERROR_UNABLE_TO_ALLOCATE if unable to allocate memory for storing the property.
/// Return mgp_error::MGP_ERROR_IMMUTABLE_OBJECT if `v` is immutable.
/// Return mgp_error::MGP_ERROR_DELETED_OBJECT if `v` has been deleted.
/// Return mgp_error::MGP_ERROR_SERIALIZATION_ERROR if `v` has been modified by another transaction.
/// Return mgp_error::MGP_ERROR_VALUE_CONVERSION if `property_value` is vertex, edge or path.
enum mgp_error mgp_vertex_set_properties(struct mgp_vertex *v, struct mgp_map *properties);
/// Add the label to the vertex. /// Add the label to the vertex.
/// If the vertex already has the label, this function does nothing. /// If the vertex already has the label, this function does nothing.
/// Return mgp_error::MGP_ERROR_UNABLE_TO_ALLOCATE if unable to allocate memory for storing the label. /// Return mgp_error::MGP_ERROR_UNABLE_TO_ALLOCATE if unable to allocate memory for storing the label.
@ -734,9 +675,6 @@ enum mgp_error mgp_vertex_copy(struct mgp_vertex *v, struct mgp_memory *memory,
/// Free the memory used by a mgp_vertex. /// Free the memory used by a mgp_vertex.
void mgp_vertex_destroy(struct mgp_vertex *v); void mgp_vertex_destroy(struct mgp_vertex *v);
/// Return whether the given mgp_vertex is deleted.
enum mgp_error mgp_vertex_is_deleted(struct mgp_vertex *v, int *result);
/// Result is non-zero if given vertices are equal, otherwise 0. /// Result is non-zero if given vertices are equal, otherwise 0.
enum mgp_error mgp_vertex_equal(struct mgp_vertex *v1, struct mgp_vertex *v2, int *result); enum mgp_error mgp_vertex_equal(struct mgp_vertex *v1, struct mgp_vertex *v2, int *result);
@ -831,9 +769,6 @@ enum mgp_error mgp_edge_copy(struct mgp_edge *e, struct mgp_memory *memory, stru
/// Free the memory used by a mgp_edge. /// Free the memory used by a mgp_edge.
void mgp_edge_destroy(struct mgp_edge *e); void mgp_edge_destroy(struct mgp_edge *e);
/// Return whether the given mgp_edge is deleted.
enum mgp_error mgp_edge_is_deleted(struct mgp_edge *e, int *result);
/// Result is non-zero if given edges are equal, otherwise 0. /// Result is non-zero if given edges are equal, otherwise 0.
enum mgp_error mgp_edge_equal(struct mgp_edge *e1, struct mgp_edge *e2, int *result); enum mgp_error mgp_edge_equal(struct mgp_edge *e1, struct mgp_edge *e2, int *result);
@ -867,15 +802,6 @@ enum mgp_error mgp_edge_get_property(struct mgp_edge *e, const char *property_na
/// Return mgp_error::MGP_ERROR_VALUE_CONVERSION if `property_value` is vertex, edge or path. /// Return mgp_error::MGP_ERROR_VALUE_CONVERSION if `property_value` is vertex, edge or path.
enum mgp_error mgp_edge_set_property(struct mgp_edge *e, const char *property_name, struct mgp_value *property_value); enum mgp_error mgp_edge_set_property(struct mgp_edge *e, const char *property_name, struct mgp_value *property_value);
/// Set the value of properties on a vertex.
/// When the value is `null`, then the property is removed from the vertex.
/// Return mgp_error::MGP_ERROR_UNABLE_TO_ALLOCATE if unable to allocate memory for storing the property.
/// Return mgp_error::MGP_ERROR_IMMUTABLE_OBJECT if `v` is immutable.
/// Return mgp_error::MGP_ERROR_DELETED_OBJECT if `v` has been deleted.
/// Return mgp_error::MGP_ERROR_SERIALIZATION_ERROR if `v` has been modified by another transaction.
/// Return mgp_error::MGP_ERROR_VALUE_CONVERSION if `property_value` is vertex, edge or path.
enum mgp_error mgp_edge_set_properties(struct mgp_edge *e, struct mgp_map *properties);
/// Start iterating over properties stored in the given edge. /// Start iterating over properties stored in the given edge.
/// The properties of the edge are copied when the iterator is created, therefore later changes won't affect them. /// The properties of the edge are copied when the iterator is created, therefore later changes won't affect them.
/// Resulting mgp_properties_iterator needs to be deallocated with /// Resulting mgp_properties_iterator needs to be deallocated with
@ -885,113 +811,21 @@ enum mgp_error mgp_edge_set_properties(struct mgp_edge *e, struct mgp_map *prope
enum mgp_error mgp_edge_iter_properties(struct mgp_edge *e, struct mgp_memory *memory, enum mgp_error mgp_edge_iter_properties(struct mgp_edge *e, struct mgp_memory *memory,
struct mgp_properties_iterator **result); struct mgp_properties_iterator **result);
/// State of the graph database.
struct mgp_graph;
/// Get the vertex corresponding to given ID, or NULL if no such vertex exists. /// Get the vertex corresponding to given ID, or NULL if no such vertex exists.
/// Resulting vertex must be freed using mgp_vertex_destroy. /// Resulting vertex must be freed using mgp_vertex_destroy.
/// Return mgp_error::MGP_ERROR_UNABLE_TO_ALLOCATE if unable to allocate the vertex. /// Return mgp_error::MGP_ERROR_UNABLE_TO_ALLOCATE if unable to allocate the vertex.
enum mgp_error mgp_graph_get_vertex_by_id(struct mgp_graph *g, struct mgp_vertex_id id, struct mgp_memory *memory, enum mgp_error mgp_graph_get_vertex_by_id(struct mgp_graph *g, struct mgp_vertex_id id, struct mgp_memory *memory,
struct mgp_vertex **result); struct mgp_vertex **result);
/// Result is non-zero if the index with the given name exists.
/// The current implementation always returns without errors.
enum mgp_error mgp_graph_has_text_index(struct mgp_graph *graph, const char *index_name, int *result);
/// Available modes of searching text indices.
MGP_ENUM_CLASS text_search_mode{
SPECIFIED_PROPERTIES,
REGEX,
ALL_PROPERTIES,
};
/// Search the named text index for the given query. The result is a map with the "search_results" and "error_msg" keys.
/// The "search_results" key contains the vertices whose text-indexed properties match the given query.
/// In case of a Tantivy error, the "search_results" key is absent, and "error_msg" contains the error message.
/// Return mgp_error::MGP_ERROR_UNABLE_TO_ALLOCATE if theres an allocation error while constructing the results map.
/// Return mgp_error::MGP_ERROR_KEY_ALREADY_EXISTS if the same key is being created in the results map more than once.
enum mgp_error mgp_graph_search_text_index(struct mgp_graph *graph, const char *index_name, const char *search_query,
enum text_search_mode search_mode, struct mgp_memory *memory,
struct mgp_map **result);
/// Aggregate over the results of a search over the named text index. The result is a map with the "aggregation_results"
/// and "error_msg" keys.
/// The "aggregation_results" key contains the vertices whose text-indexed properties match the given query.
/// In case of a Tantivy error, the "aggregation_results" key is absent, and "error_msg" contains the error message.
/// Return mgp_error::MGP_ERROR_UNABLE_TO_ALLOCATE if theres an allocation error while constructing the results map.
/// Return mgp_error::MGP_ERROR_KEY_ALREADY_EXISTS if the same key is being created in the results map more than once.
enum mgp_error mgp_graph_aggregate_over_text_index(struct mgp_graph *graph, const char *index_name,
const char *search_query, const char *aggregation_query,
struct mgp_memory *memory, struct mgp_map **result);
/// Creates label index for given label.
/// mgp_error::MGP_ERROR_NO_ERROR is always returned.
/// if label index already exists, result will be 0, otherwise 1.
enum mgp_error mgp_create_label_index(struct mgp_graph *graph, const char *label, int *result);
/// Drop label index.
enum mgp_error mgp_drop_label_index(struct mgp_graph *graph, const char *label, int *result);
/// List all label indices.
enum mgp_error mgp_list_all_label_indices(struct mgp_graph *graph, struct mgp_memory *memory, struct mgp_list **result);
/// Creates label-property index for given label and propery.
/// mgp_error::MGP_ERROR_NO_ERROR is always returned.
/// if label property index already exists, result will be 0, otherwise 1.
enum mgp_error mgp_create_label_property_index(struct mgp_graph *graph, const char *label, const char *property,
int *result);
/// Drops label-property index for given label and propery.
/// mgp_error::MGP_ERROR_NO_ERROR is always returned.
/// if dropping label property index failed, result will be 0, otherwise 1.
enum mgp_error mgp_drop_label_property_index(struct mgp_graph *graph, const char *label, const char *property,
int *result);
/// List all label+property indices.
enum mgp_error mgp_list_all_label_property_indices(struct mgp_graph *graph, struct mgp_memory *memory,
struct mgp_list **result);
/// Creates existence constraint for given label and property.
/// mgp_error::MGP_ERROR_NO_ERROR is always returned.
/// if creating existence constraint failed, result will be 0, otherwise 1.
enum mgp_error mgp_create_existence_constraint(struct mgp_graph *graph, const char *label, const char *property,
int *result);
/// Drops existence constraint for given label and property.
/// mgp_error::MGP_ERROR_NO_ERROR is always returned.
/// if dropping existence constraint failed, result will be 0, otherwise 1.
enum mgp_error mgp_drop_existence_constraint(struct mgp_graph *graph, const char *label, const char *property,
int *result);
/// List all existence constraints.
enum mgp_error mgp_list_all_existence_constraints(struct mgp_graph *graph, struct mgp_memory *memory,
struct mgp_list **result);
/// Creates unique constraint for given label and properties.
/// mgp_error::MGP_ERROR_NO_ERROR is always returned.
/// if creating unique constraint failed, result will be 0, otherwise 1.
enum mgp_error mgp_create_unique_constraint(struct mgp_graph *graph, const char *label, struct mgp_value *properties,
int *result);
/// Drops unique constraint for given label and properties.
/// mgp_error::MGP_ERROR_NO_ERROR is always returned.
/// if dropping unique constraint failed, result will be 0, otherwise 1.
enum mgp_error mgp_drop_unique_constraint(struct mgp_graph *graph, const char *label, struct mgp_value *properties,
int *result);
/// List all unique constraints
enum mgp_error mgp_list_all_unique_constraints(struct mgp_graph *graph, struct mgp_memory *memory,
struct mgp_list **result);
/// Result is non-zero if the graph can be modified. /// Result is non-zero if the graph can be modified.
/// If a graph is immutable, then vertices cannot be created or deleted, and all of the returned vertices will be /// If a graph is immutable, then vertices cannot be created or deleted, and all of the returned vertices will be
/// immutable also. The same applies for edges. /// immutable also. The same applies for edges.
/// Current implementation always returns without errors. /// Current implementation always returns without errors.
enum mgp_error mgp_graph_is_mutable(struct mgp_graph *graph, int *result); enum mgp_error mgp_graph_is_mutable(struct mgp_graph *graph, int *result);
/// Result is non-zero if the graph is in transactional storage mode.
/// If a graph is not in transactional mode (i.e. analytical mode), then vertices and edges can be missing
/// because changes from other transactions are visible.
/// Current implementation always returns without errors.
enum mgp_error mgp_graph_is_transactional(struct mgp_graph *graph, int *result);
/// Add a new vertex to the graph. /// Add a new vertex to the graph.
/// Resulting vertex must be freed using mgp_vertex_destroy. /// Resulting vertex must be freed using mgp_vertex_destroy.
/// Return mgp_error::MGP_ERROR_IMMUTABLE_OBJECT if `graph` is immutable. /// Return mgp_error::MGP_ERROR_IMMUTABLE_OBJECT if `graph` is immutable.
@ -1018,29 +852,6 @@ enum mgp_error mgp_graph_detach_delete_vertex(struct mgp_graph *graph, struct mg
enum mgp_error mgp_graph_create_edge(struct mgp_graph *graph, struct mgp_vertex *from, struct mgp_vertex *to, enum mgp_error mgp_graph_create_edge(struct mgp_graph *graph, struct mgp_vertex *from, struct mgp_vertex *to,
struct mgp_edge_type type, struct mgp_memory *memory, struct mgp_edge **result); struct mgp_edge_type type, struct mgp_memory *memory, struct mgp_edge **result);
/// Change edge from vertex
/// Return mgp_error::MGP_ERROR_IMMUTABLE_OBJECT if `graph` is immutable.
/// Return mgp_error::MGP_ERROR_UNABLE_TO_ALLOCATE if unable to allocate a mgp_edge.
/// Return mgp_error::MGP_ERROR_DELETED_OBJECT if `from` or `to` has been deleted.
/// Return mgp_error::MGP_ERROR_SERIALIZATION_ERROR if `from` or `to` has been modified by another transaction.
enum mgp_error mgp_graph_edge_set_from(struct mgp_graph *graph, struct mgp_edge *e, struct mgp_vertex *new_from,
struct mgp_memory *memory, struct mgp_edge **result);
/// Change edge to vertex
/// Return mgp_error::MGP_ERROR_IMMUTABLE_OBJECT if `graph` is immutable.
/// Return mgp_error::MGP_ERROR_UNABLE_TO_ALLOCATE if unable to allocate a mgp_edge.
/// Return mgp_error::MGP_ERROR_DELETED_OBJECT if `from` or `to` has been deleted.
/// Return mgp_error::MGP_ERROR_SERIALIZATION_ERROR if `from` or `to` has been modified by another transaction.
enum mgp_error mgp_graph_edge_set_to(struct mgp_graph *graph, struct mgp_edge *e, struct mgp_vertex *new_to,
struct mgp_memory *memory, struct mgp_edge **result);
/// Change edge type
/// Return mgp_error::MGP_ERROR_IMMUTABLE_OBJECT if `graph` is immutable.
/// Return mgp_error::MGP_ERROR_SERIALIZATION_ERROR if `edge`, its source or destination vertex has been modified by
/// another transaction.
enum mgp_error mgp_graph_edge_change_type(struct mgp_graph *graph, struct mgp_edge *e, struct mgp_edge_type new_type,
struct mgp_memory *memory, struct mgp_edge **result);
/// Delete an edge from the graph. /// Delete an edge from the graph.
/// Return mgp_error::MGP_ERROR_IMMUTABLE_OBJECT if `graph` is immutable. /// Return mgp_error::MGP_ERROR_IMMUTABLE_OBJECT if `graph` is immutable.
/// Return mgp_error::MGP_ERROR_SERIALIZATION_ERROR if `edge`, its source or destination vertex has been modified by /// Return mgp_error::MGP_ERROR_SERIALIZATION_ERROR if `edge`, its source or destination vertex has been modified by
@ -1507,13 +1318,6 @@ MGP_ENUM_CLASS mgp_log_level{
/// to allocate global resources. /// to allocate global resources.
typedef void (*mgp_proc_cb)(struct mgp_list *, struct mgp_graph *, struct mgp_result *, struct mgp_memory *); typedef void (*mgp_proc_cb)(struct mgp_list *, struct mgp_graph *, struct mgp_result *, struct mgp_memory *);
/// Cleanup for a query module read procedure. Can't be invoked through OpenCypher. Cleans batched stream.
typedef void (*mgp_proc_cleanup)();
/// Initializer for a query module batched read procedure. Can't be invoked through OpenCypher. Initializes batched
/// stream.
typedef void (*mgp_proc_initializer)(struct mgp_list *, struct mgp_graph *, struct mgp_memory *);
/// Register a read-only procedure to a module. /// Register a read-only procedure to a module.
/// ///
/// The `name` must be a sequence of digits, underscores, lowercase and /// The `name` must be a sequence of digits, underscores, lowercase and
@ -1538,30 +1342,6 @@ enum mgp_error mgp_module_add_read_procedure(struct mgp_module *module, const ch
enum mgp_error mgp_module_add_write_procedure(struct mgp_module *module, const char *name, mgp_proc_cb cb, enum mgp_error mgp_module_add_write_procedure(struct mgp_module *module, const char *name, mgp_proc_cb cb,
struct mgp_proc **result); struct mgp_proc **result);
/// Register a readable batched procedure to a module.
///
/// The `name` must be a valid identifier, following the same rules as the
/// procedure`name` in mgp_module_add_read_procedure.
///
/// Return mgp_error::MGP_ERROR_UNABLE_TO_ALLOCATE if unable to allocate memory for mgp_proc.
/// Return mgp_error::MGP_ERROR_INVALID_ARGUMENT if `name` is not a valid procedure name.
/// RETURN mgp_error::MGP_ERROR_LOGIC_ERROR if a procedure with the same name was already registered.
enum mgp_error mgp_module_add_batch_read_procedure(struct mgp_module *module, const char *name, mgp_proc_cb cb,
mgp_proc_initializer initializer, mgp_proc_cleanup cleanup,
struct mgp_proc **result);
/// Register a writeable batched procedure to a module.
///
/// The `name` must be a valid identifier, following the same rules as the
/// procedure`name` in mgp_module_add_read_procedure.
///
/// Return mgp_error::MGP_ERROR_UNABLE_TO_ALLOCATE if unable to allocate memory for mgp_proc.
/// Return mgp_error::MGP_ERROR_INVALID_ARGUMENT if `name` is not a valid procedure name.
/// RETURN mgp_error::MGP_ERROR_LOGIC_ERROR if a procedure with the same name was already registered.
enum mgp_error mgp_module_add_batch_write_procedure(struct mgp_module *module, const char *name, mgp_proc_cb cb,
mgp_proc_initializer initializer, mgp_proc_cleanup cleanup,
struct mgp_proc **result);
/// Add a required argument to a procedure. /// Add a required argument to a procedure.
/// ///
/// The order of adding arguments will correspond to the order the procedure /// The order of adding arguments will correspond to the order the procedure
@ -1637,10 +1417,7 @@ enum mgp_error mgp_log(enum mgp_log_level log_level, const char *output);
/// @{ /// @{
/// Return non-zero if the currently executing procedure should abort as soon as /// Return non-zero if the currently executing procedure should abort as soon as
/// possible. If non-zero the reasons are: /// possible.
/// (1) The transaction was requested to be terminated
/// (2) The server is gracefully shutting down
/// (3) The transaction has hit its timeout threshold
/// ///
/// Procedures which perform heavyweight processing run the risk of running too /// Procedures which perform heavyweight processing run the risk of running too
/// long and going over the query execution time limit. To prevent this, such /// long and going over the query execution time limit. To prevent this, such

File diff suppressed because it is too large Load Diff

View File

@ -479,12 +479,6 @@ class Properties:
except KeyError: except KeyError:
return False return False
def set_properties(self, properties: dict) -> None:
if not self._vertex_or_edge.is_valid():
raise InvalidContextError()
self._vertex_or_edge.set_properties(properties)
class EdgeType: class EdgeType:
"""Type of an Edge.""" """Type of an Edge."""
@ -983,24 +977,6 @@ class Path:
self._vertices = None self._vertices = None
self._edges = None self._edges = None
def pop(self):
"""
Remove the last node and the last relationship from the path.
Raises:
InvalidContextError: If using an invalid `Path` instance
OutOfRangeError: If the path contains no relationships.
Examples:
```path.pop()```
"""
if not self.is_valid():
raise InvalidContextError()
self._path.pop()
# Invalidate our cached tuples
self._vertices = None
self._edges = None
@property @property
def vertices(self) -> typing.Tuple[Vertex, ...]: def vertices(self) -> typing.Tuple[Vertex, ...]:
""" """
@ -1041,10 +1017,6 @@ class Path:
self._edges = tuple(Edge(self._path.edge_at(i)) for i in range(num_edges)) self._edges = tuple(Edge(self._path.edge_at(i)) for i in range(num_edges))
return self._edges return self._edges
@property
def length(self) -> int:
return self._path.size()
class Record: class Record:
"""Represents a record of resulting field values.""" """Represents a record of resulting field values."""
@ -1311,7 +1283,7 @@ class Graph:
raise InvalidContextError() raise InvalidContextError()
self._graph.detach_delete_vertex(vertex._vertex) self._graph.detach_delete_vertex(vertex._vertex)
def create_edge(self, from_vertex: Vertex, to_vertex: Vertex, edge_type: EdgeType) -> Edge: def create_edge(self, from_vertex: Vertex, to_vertex: Vertex, edge_type: EdgeType) -> None:
""" """
Create an edge. Create an edge.
@ -1320,16 +1292,13 @@ class Graph:
to_vertex: `Vertex' to where edge is directed. to_vertex: `Vertex' to where edge is directed.
edge_type: `EdgeType` defines the type of edge. edge_type: `EdgeType` defines the type of edge.
Returns:
Created `Edge`.
Raises: Raises:
ImmutableObjectError: If `graph` is immutable. ImmutableObjectError: If `graph` is immutable.
UnableToAllocateError: If unable to allocate an edge. UnableToAllocateError: If unable to allocate an edge.
DeletedObjectError: If `from_vertex` or `to_vertex` has been deleted. DeletedObjectError: If `from_vertex` or `to_vertex` has been deleted.
SerializationError: If `from_vertex` or `to_vertex` has been modified by another transaction. SerializationError: If `from_vertex` or `to_vertex` has been modified by another transaction.
Examples: Examples:
```edge = graph.create_edge(from_vertex, vertex, edge_type)``` ```graph.create_edge(from_vertex, vertex, edge_type)```
""" """
if not self.is_valid(): if not self.is_valid():
raise InvalidContextError() raise InvalidContextError()
@ -1430,13 +1399,6 @@ class UnsupportedTypingError(Exception):
super().__init__("Unsupported typing annotation '{}'".format(type_)) super().__init__("Unsupported typing annotation '{}'".format(type_))
class UnequalTypesError(Exception):
"""Signals a typing annotation is not equal between types"""
def __init__(self, type1_: typing.Any, type2_: typing.Any):
super().__init__(f"Unequal typing annotation '{type1_}' and '{type2_}'")
def _typing_to_cypher_type(type_): def _typing_to_cypher_type(type_):
"""Convert typing annotation to a _mgp.CypherType instance.""" """Convert typing annotation to a _mgp.CypherType instance."""
simple_types = { simple_types = {
@ -1549,72 +1511,6 @@ def _typing_to_cypher_type(type_):
return parse_typing(str(type_)) return parse_typing(str(type_))
def _is_typing_same(type1_, type2_):
"""Convert typing annotation to a _mgp.CypherType instance."""
simple_types = {
typing.Any: 1,
object: 2,
list: 3,
Any: 4,
bool: 5,
str: 6,
int: 7,
float: 8,
Number: 9,
Map: 10,
Vertex: 11,
Edge: 12,
Path: 13,
Date: 14,
LocalTime: 15,
LocalDateTime: 16,
Duration: 17,
}
try:
return simple_types[type1_] == simple_types[type2_]
except KeyError:
pass
if sys.version_info < (3, 8):
# skip type checks
return True
complex_type1 = typing.get_origin(type1_)
type_args1 = typing.get_args(type2_)
complex_type2 = typing.get_origin(type1_)
type_args2 = typing.get_args(type2_)
if complex_type2 != complex_type1:
raise UnequalTypesError(type1_, type2_)
if complex_type1 == typing.Union:
contains_none_arg1 = type(None) in type_args1
contains_none_arg2 = type(None) in type_args2
if contains_none_arg1 != contains_none_arg2:
raise UnequalTypesError(type1_, type2_)
if contains_none_arg1:
types1 = tuple(t for t in type_args1 if t is not type(None)) # noqa E721
types2 = tuple(t for t in type_args2 if t is not type(None)) # noqa E721
if len(types1) != len(types2):
raise UnequalTypesError(types1, types2)
if len(types1) == 1:
(type_arg1,) = types1
(type_arg2,) = types2
else:
type_arg1 = typing.Union.__getitem__(types1)
type_arg2 = typing.Union.__getitem__(types2)
return _is_typing_same(type_arg1, type_arg2)
elif complex_type1 == list:
(type_arg1,) = type_args1
(type_arg2,) = type_args2
return _is_typing_same(type_arg1, type_arg2)
# skip type checks
return True
# Procedure registration # Procedure registration
@ -1774,92 +1670,6 @@ def write_proc(func: typing.Callable[..., Record]):
return _register_proc(func, True) return _register_proc(func, True)
def _register_batch_proc(
func: typing.Callable[..., Record], initializer: typing.Callable, cleanup: typing.Callable, is_write: bool
):
raise_if_does_not_meet_requirements(func)
register_func = _mgp.Module.add_batch_write_procedure if is_write else _mgp.Module.add_batch_read_procedure
func_sig = inspect.signature(func)
func_params = tuple(func_sig.parameters.values())
initializer_sig = inspect.signature(initializer)
initializer_params = tuple(initializer_sig.parameters.values())
assert (
func_params and initializer_params or not func_params and not initializer_params
), "Both function params and initializer params must exist or not exist"
assert len(func_params) == len(initializer_params), "Number of params must be same"
assert initializer_sig.return_annotation is initializer_sig.empty, "Initializer can't return anything"
if func_params and func_params[0].annotation is ProcCtx:
assert (
initializer_params and initializer_params[0].annotation is ProcCtx
), "Initializer must have mgp.ProcCtx as first parameter"
@wraps(func)
def wrapper_func(graph, args):
return func(ProcCtx(graph), *args)
@wraps(initializer)
def wrapper_initializer(graph, args):
return initializer(ProcCtx(graph), *args)
func_params = func_params[1:]
initializer_params = initializer_params[1:]
mgp_proc = register_func(_mgp._MODULE, wrapper_func, wrapper_initializer, cleanup)
else:
@wraps(func)
def wrapper_func(graph, args):
return func(*args)
@wraps(initializer)
def wrapper_initializer(graph, args):
return initializer(*args)
mgp_proc = register_func(_mgp._MODULE, wrapper_func, wrapper_initializer, cleanup)
for func_param, initializer_param in zip(func_params, initializer_params):
func_param_name = func_param.name
func_param_type_ = func_param.annotation
if func_param_type_ is func_param.empty:
func_param_type_ = object
initializer_param_type_ = initializer_param.annotation
if initializer_param.annotation is initializer_param.empty:
initializer_param_type_ = object
assert _is_typing_same(
func_param_type_, initializer_param_type_
), "Types of initializer and function must be same"
func_cypher_type = _typing_to_cypher_type(func_param_type_)
if func_param.default is func_param.empty:
mgp_proc.add_arg(func_param_name, func_cypher_type)
else:
mgp_proc.add_opt_arg(func_param_name, func_cypher_type, func_param.default)
if func_sig.return_annotation is not func_sig.empty:
record = func_sig.return_annotation
if not isinstance(record, Record):
raise TypeError("Expected '{}' to return 'mgp.Record', got '{}'".format(func.__name__, type(record)))
for name, type_ in record.fields.items():
if isinstance(type_, Deprecated):
cypher_type = _typing_to_cypher_type(type_.field_type)
mgp_proc.add_deprecated_result(name, cypher_type)
else:
mgp_proc.add_result(name, _typing_to_cypher_type(type_))
return func
def add_batch_write_proc(func: typing.Callable[..., Record], initializer: typing.Callable, cleanup: typing.Callable):
return _register_batch_proc(func, initializer, cleanup, True)
def add_batch_read_proc(func: typing.Callable[..., Record], initializer: typing.Callable, cleanup: typing.Callable):
return _register_batch_proc(func, initializer, cleanup, False)
class InvalidMessageError(Exception): class InvalidMessageError(Exception):
""" """
Signals using a message instance outside of the registered transformation. Signals using a message instance outside of the registered transformation.

File diff suppressed because it is too large Load Diff

168
init
View File

@ -1,20 +1,17 @@
#!/bin/bash -e #!/opt/homebrew/Cellar/bash/5.1.16/bin/bash -e
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
cd "$DIR" cd "$DIR"
source "$DIR/environment/util.sh" source "$DIR/environment/util.sh"
DISTRO=$(operating_system)
ARCHITECTURE=$(architecture)
function print_help () { function print_help () {
echo "Usage: $0 [OPTION]" echo "Usage: $0 [OPTION]"
echo -e "Check for missing packages and setup the project.\n" echo -e "Check for missing packages and setup the project.\n"
echo "Optional arguments:" echo "Optional arguments:"
echo -e " -h\tdisplay this help and exit" echo -e " -h\tdisplay this help and exit"
echo -e " --without-libs-setup\tskip the step for setting up libs" echo -e " --without-libs-setup\tskip the step for setting up libs"
echo -e " --ci\tscript is being run inside ci" echo -e " --wsl-quicklisp-proxy \"host:port\"\tquicklist HTTP proxy (this flag + HTTP proxy are required on WSL)"
} }
function setup_virtualenv () { function setup_virtualenv () {
@ -35,22 +32,28 @@ function setup_virtualenv () {
popd > /dev/null popd > /dev/null
} }
wsl_quicklisp_proxy=""
setup_libs=true setup_libs=true
ci=false
if [[ $# -eq 1 && "$1" == "-h" ]]; then if [[ $# -eq 1 && "$1" == "-h" ]]; then
print_help print_help
exit 0 exit 0
else else
while(($#)); do while(($#)); do
case "$1" in case "$1" in
--wsl-quicklisp-proxy)
shift
if [[ $# -eq 0 ]]; then
echo "Missing proxy URL"
print_help
exit 1
fi
wsl_quicklisp_proxy=":proxy \"http://$1/\""
shift
;;
--without-libs-setup) --without-libs-setup)
shift shift
setup_libs=false setup_libs=false
;; ;;
--ci)
shift
ci=true
;;
*) *)
# unknown option # unknown option
echo "Invalid argument provided: $1" echo "Invalid argument provided: $1"
@ -61,83 +64,82 @@ else
done done
fi fi
if [ "${ARCHITECTURE}" = "arm64" ] || [ "${ARCHITECTURE}" = "aarch64" ]; then # DISTRO=$(operating_system)
OS_SCRIPT=$DIR/environment/os/$DISTRO-arm.sh # ARCHITECTURE=$(architecture)
else # if [ "${ARCHITECTURE}" = "arm64" ] || [ "${ARCHITECTURE}" = "aarch64" ]; then
OS_SCRIPT=$DIR/environment/os/$DISTRO.sh # OS_SCRIPT=$DIR/environment/os/$DISTRO-arm.sh
fi # else
echo "ALL BUILD PACKAGES: $($OS_SCRIPT list MEMGRAPH_BUILD_DEPS)" # OS_SCRIPT=$DIR/environment/os/$DISTRO.sh
$OS_SCRIPT check MEMGRAPH_BUILD_DEPS # fi
echo "All packages are in-place..." # echo "ALL BUILD PACKAGES: $($OS_SCRIPT list MEMGRAPH_BUILD_DEPS)"
# $OS_SCRIPT check MEMGRAPH_BUILD_DEPS
# echo "All packages are in-place..."
# create a default build directory # create a default build directory
mkdir -p ./build mkdir -p ./build
# quicklisp package manager for Common Lisp
quicklisp_install_dir="$HOME/quicklisp"
if [[ -v QUICKLISP_HOME ]]; then
quicklisp_install_dir="${QUICKLISP_HOME}"
fi
if [[ ! -f "${quicklisp_install_dir}/setup.lisp" ]]; then
wget -nv https://beta.quicklisp.org/quicklisp.lisp -O quicklisp.lisp || exit 1
echo \
"
(load \"${DIR}/quicklisp.lisp\")
(quicklisp-quickstart:install $wsl_quicklisp_proxy :path \"${quicklisp_install_dir}\")
" | sbcl --script || exit 1
rm -rf quicklisp.lisp || exit 1
fi
# TODO(gitbuda): -T doesn't work on Mac
ln -fs "$DIR/src/lisp" "${quicklisp_install_dir}/local-projects/lcp"
# Install LCP dependencies
# TODO: We should at some point cache or have a mirror of packages we use.
# TODO: move the installation of LCP's dependencies into ./setup.sh
echo \
"
(load \"${quicklisp_install_dir}/setup.lisp\")
(ql:quickload '(:lcp :lcp/test) :silent t)
" | sbcl --script
if [[ "$setup_libs" == "true" ]]; then if [[ "$setup_libs" == "true" ]]; then
# Setup libs (download). # Setup libs (download).
cd libs cd libs
./cleanup.sh ./cleanup.sh
./setup.sh ./setup.sh
cd .. cd ..
fi fi
# Fix for centos 7 during release # # setup gql_behave dependencies
if [[ "$ci" == "false" ]]; then # setup_virtualenv tests/gql_behave
if [ "${DISTRO}" = "centos-7" ] || [ "${DISTRO}" = "debian-11" ] || [ "${DISTRO}" = "amzn-2" ]; then #
if python3 -m pip show virtualenv >/dev/null 2>/dev/null; then # # setup stress dependencies
python3 -m pip uninstall -y virtualenv # setup_virtualenv tests/stress
fi #
python3 -m pip install virtualenv # # setup integration/ldap dependencies
fi # setup_virtualenv tests/integration/ldap
fi #
# # Setup tests dependencies.
# setup gql_behave dependencies # # cd tests
setup_virtualenv tests/gql_behave # # ./setup.sh
# # cd ..
# setup stress dependencies # # TODO(gitbuda): Remove setup_virtualenv, replace it with tests/ve3. Take care
setup_virtualenv tests/stress # # of the build order because tests/setup.py builds pymgclient which depends on
# # mgclient which is build after this script by calling make.
# setup integration/ldap dependencies #
setup_virtualenv tests/integration/ldap # echo "Done installing dependencies for Memgraph"
#
# Setup tests dependencies. # echo "Linking git hooks"
# NOTE: This is commented out because of the build order (at the time of # for hook in $(find $DIR/.githooks -type f -printf "%f\n"); do
# execution mgclient is not built yet) which makes this setup to fail. mgclient # ln -s -f "$DIR/.githooks/$hook" "$DIR/.git/hooks/$hook"
# is built during the make phase. The tests/setup.sh is called under GHA CI # echo "Added $hook hook"
# jobs. # done;
# cd tests #
# ./setup.sh # # Install precommit hook
# cd .. # python3 -m pip install pre-commit
# TODO(gitbuda): Remove setup_virtualenv, replace it with tests/ve3. Take care # python3 -m pre_commit install
# of the build order because tests/setup.py builds pymgclient which depends on #
# mgclient which is build after this script by calling make. # # Link `include/mgp.py` with `release/mgp/mgp.py`
# ln -v -f include/mgp.py release/mgp/mgp.py
echo "Done installing dependencies for Memgraph"
echo "Linking git hooks OR skip if .git folder is not there"
if [ -d "$DIR/.git" ]; then
for hook in $(find $DIR/.githooks -type f -printf "%f\n"); do
ln -s -f "$DIR/.githooks/$hook" "$DIR/.git/hooks/$hook"
echo "Added $hook hook"
done;
else
echo "WARNING: .git folder not present, skip adding hooks"
fi
# Install precommit hook except on old operating systems because we don't
# develop on them -> pre-commit hook not required -> we can use latest
# packages.
if [[ "$ci" == "false" ]]; then
if [ "${DISTRO}" != "centos-7" ] && [ "$DISTRO" != "debian-10" ] && [ "${DISTRO}" != "ubuntu-18.04" ] && [ "${DISTRO}" != "amzn-2" ]; then
python3 -m pip install pre-commit
python3 -m pre_commit install
# Install py format tools for usage during the development.
echo "Install black formatter"
python3 -m pip install black==23.1.*
echo "Install isort"
python3 -m pip install isort==5.12.*
fi
fi
# Link `include/mgp.py` with `release/mgp/mgp.py`
ln -v -f include/mgp.py release/mgp/mgp.py

2
libs/.gitignore vendored
View File

@ -6,5 +6,3 @@
!__main.cpp !__main.cpp
!pulsar.patch !pulsar.patch
!antlr4.10.1.patch !antlr4.10.1.patch
!rocksdb8.1.1.patch
!nuraft2.1.0.patch

View File

@ -4,8 +4,7 @@ include(GNUInstallDirs)
include(ProcessorCount) include(ProcessorCount)
ProcessorCount(NPROC) ProcessorCount(NPROC)
if (NPROC EQUAL 0)
if(NPROC EQUAL 0)
set(NPROC 1) set(NPROC 1)
endif() endif()
@ -13,10 +12,9 @@ find_package(Boost 1.78 REQUIRED)
find_package(BZip2 1.0.6 REQUIRED) find_package(BZip2 1.0.6 REQUIRED)
find_package(Threads REQUIRED) find_package(Threads REQUIRED)
set(GFLAGS_NOTHREADS OFF) set(GFLAGS_NOTHREADS OFF)
# NOTE: config/generate.py depends on the gflags help XML format.
find_package(gflags REQUIRED) find_package(gflags REQUIRED)
find_package(fmt 8.0.1 REQUIRED) find_package(fmt 8.0.1)
find_package(Jemalloc REQUIRED)
find_package(ZLIB 1.2.11 REQUIRED) find_package(ZLIB 1.2.11 REQUIRED)
set(LIB_DIR ${CMAKE_CURRENT_SOURCE_DIR}) set(LIB_DIR ${CMAKE_CURRENT_SOURCE_DIR})
@ -25,27 +23,23 @@ set(LIB_DIR ${CMAKE_CURRENT_SOURCE_DIR})
function(import_header_library name include_dir) function(import_header_library name include_dir)
add_library(${name} INTERFACE IMPORTED GLOBAL) add_library(${name} INTERFACE IMPORTED GLOBAL)
set_property(TARGET ${name} PROPERTY set_property(TARGET ${name} PROPERTY
INTERFACE_INCLUDE_DIRECTORIES ${include_dir}) INTERFACE_INCLUDE_DIRECTORIES ${include_dir})
string(TOUPPER ${name} _upper_name) string(TOUPPER ${name} _upper_name)
set(${_upper_name}_INCLUDE_DIR ${include_dir} CACHE FILEPATH set(${_upper_name}_INCLUDE_DIR ${include_dir} CACHE FILEPATH
"Path to ${name} include directory" FORCE) "Path to ${name} include directory" FORCE)
mark_as_advanced(${_upper_name}_INCLUDE_DIR) mark_as_advanced(${_upper_name}_INCLUDE_DIR)
add_library(lib::${name} ALIAS ${name})
endfunction(import_header_library) endfunction(import_header_library)
function(import_library name type location include_dir) function(import_library name type location include_dir)
add_library(${name} ${type} IMPORTED GLOBAL) add_library(${name} ${type} IMPORTED GLOBAL)
if (${ARGN})
if(${ARGN})
# Optional argument is the name of the external project that we need to # Optional argument is the name of the external project that we need to
# depend on. # depend on.
add_dependencies(${name} ${ARGN0}) add_dependencies(${name} ${ARGN0})
else() else()
add_dependencies(${name} ${name}-proj) add_dependencies(${name} ${name}-proj)
endif() endif()
set_property(TARGET ${name} PROPERTY IMPORTED_LOCATION ${location}) set_property(TARGET ${name} PROPERTY IMPORTED_LOCATION ${location})
# We need to create the include directory first in order to be able to add it # We need to create the include directory first in order to be able to add it
# as an include directory. The header files in the include directory will be # as an include directory. The header files in the include directory will be
# generated later during the build process. # generated later during the build process.
@ -65,59 +59,43 @@ function(add_external_project name)
set(options NO_C_COMPILER) set(options NO_C_COMPILER)
set(one_value_kwargs SOURCE_DIR BUILD_IN_SOURCE) set(one_value_kwargs SOURCE_DIR BUILD_IN_SOURCE)
set(multi_value_kwargs CMAKE_ARGS DEPENDS INSTALL_COMMAND BUILD_COMMAND set(multi_value_kwargs CMAKE_ARGS DEPENDS INSTALL_COMMAND BUILD_COMMAND
CONFIGURE_COMMAND) CONFIGURE_COMMAND)
cmake_parse_arguments(KW "${options}" "${one_value_kwargs}" "${multi_value_kwargs}" ${ARGN}) cmake_parse_arguments(KW "${options}" "${one_value_kwargs}" "${multi_value_kwargs}" ${ARGN})
set(source_dir ${CMAKE_CURRENT_SOURCE_DIR}/${name}) set(source_dir ${CMAKE_CURRENT_SOURCE_DIR}/${name})
if (KW_SOURCE_DIR)
if(KW_SOURCE_DIR)
set(source_dir ${KW_SOURCE_DIR}) set(source_dir ${KW_SOURCE_DIR})
endif() endif()
set(build_in_source 0) set(build_in_source 0)
if (KW_BUILD_IN_SOURCE)
if(KW_BUILD_IN_SOURCE)
set(build_in_source ${KW_BUILD_IN_SOURCE}) set(build_in_source ${KW_BUILD_IN_SOURCE})
endif() endif()
if (NOT KW_NO_C_COMPILER)
if(NOT KW_NO_C_COMPILER)
set(KW_CMAKE_ARGS -DCMAKE_C_COMPILER=${CMAKE_C_COMPILER} ${KW_CMAKE_ARGS}) set(KW_CMAKE_ARGS -DCMAKE_C_COMPILER=${CMAKE_C_COMPILER} ${KW_CMAKE_ARGS})
endif() endif()
ExternalProject_Add(${name}-proj DEPENDS ${KW_DEPENDS} ExternalProject_Add(${name}-proj DEPENDS ${KW_DEPENDS}
PREFIX ${source_dir} SOURCE_DIR ${source_dir} PREFIX ${source_dir} SOURCE_DIR ${source_dir}
BUILD_IN_SOURCE ${build_in_source} BUILD_IN_SOURCE ${build_in_source}
CONFIGURE_COMMAND ${KW_CONFIGURE_COMMAND} CONFIGURE_COMMAND ${KW_CONFIGURE_COMMAND}
CMAKE_ARGS -DCMAKE_BUILD_TYPE=Release CMAKE_ARGS -DCMAKE_BUILD_TYPE=Release
-DCMAKE_CXX_COMPILER=${CMAKE_CXX_COMPILER} -DCMAKE_CXX_COMPILER=${CMAKE_CXX_COMPILER}
-DCMAKE_INSTALL_PREFIX=${source_dir} -DCMAKE_INSTALL_PREFIX=${source_dir}
${KW_CMAKE_ARGS} ${KW_CMAKE_ARGS}
INSTALL_COMMAND ${KW_INSTALL_COMMAND} INSTALL_COMMAND ${KW_INSTALL_COMMAND}
BUILD_COMMAND ${KW_BUILD_COMMAND}) BUILD_COMMAND ${KW_BUILD_COMMAND})
endfunction(add_external_project) endfunction(add_external_project)
# Calls `add_external_project`, sets NAME_LIBRARY, NAME_INCLUDE_DIR variables # Calls `add_external_project`, sets NAME_LIBRARY, NAME_INCLUDE_DIR variables
# and adds the library via `import_library`. # and adds the library via `import_library`.
macro(import_external_library name type library_location include_dir) macro(import_external_library name type library_location include_dir)
add_external_project(${name} ${ARGN}) add_external_project(${name} ${ARGN})
string(TOUPPER ${name} _upper_name)
set(${_upper_name}_LIBRARY ${library_location} CACHE FILEPATH
"Path to ${name} library" FORCE)
set(${_upper_name}_INCLUDE_DIR ${include_dir} CACHE FILEPATH
"Path to ${name} include directory" FORCE)
mark_as_advanced(${_upper_name}_LIBRARY ${_upper_name}_INCLUDE_DIR)
import_library(${name} ${type} ${${_upper_name}_LIBRARY} ${${_upper_name}_INCLUDE_DIR})
endmacro(import_external_library)
macro(set_path_external_library name type library_location include_dir)
string(TOUPPER ${name} _upper_name) string(TOUPPER ${name} _upper_name)
set(${_upper_name}_LIBRARY ${library_location} CACHE FILEPATH set(${_upper_name}_LIBRARY ${library_location} CACHE FILEPATH
"Path to ${name} library" FORCE) "Path to ${name} library" FORCE)
set(${_upper_name}_INCLUDE_DIR ${include_dir} CACHE FILEPATH set(${_upper_name}_INCLUDE_DIR ${include_dir} CACHE FILEPATH
"Path to ${name} include directory" FORCE) "Path to ${name} include directory" FORCE)
mark_as_advanced(${name}_LIBRARY ${name}_INCLUDE_DIR) mark_as_advanced(${_upper_name}_LIBRARY ${_upper_name}_INCLUDE_DIR)
endmacro(set_path_external_library) import_library(${name} ${type} ${${_upper_name}_LIBRARY} ${${_upper_name}_INCLUDE_DIR})
endmacro(import_external_library)
# setup antlr # setup antlr
import_external_library(antlr4 STATIC import_external_library(antlr4 STATIC
@ -125,10 +103,10 @@ import_external_library(antlr4 STATIC
${CMAKE_CURRENT_SOURCE_DIR}/antlr4/runtime/Cpp/include/antlr4-runtime ${CMAKE_CURRENT_SOURCE_DIR}/antlr4/runtime/Cpp/include/antlr4-runtime
SOURCE_DIR ${CMAKE_CURRENT_SOURCE_DIR}/antlr4/runtime/Cpp SOURCE_DIR ${CMAKE_CURRENT_SOURCE_DIR}/antlr4/runtime/Cpp
CMAKE_ARGS # http://stackoverflow.com/questions/37096062/get-a-basic-c-program-to-compile-using-clang-on-ubuntu-16/38385967#38385967 CMAKE_ARGS # http://stackoverflow.com/questions/37096062/get-a-basic-c-program-to-compile-using-clang-on-ubuntu-16/38385967#38385967
-DWITH_LIBCXX=OFF # because of debian bug -DWITH_LIBCXX=OFF # because of debian bug
-DCMAKE_SKIP_INSTALL_ALL_DEPENDENCY=true -DCMAKE_SKIP_INSTALL_ALL_DEPENDENCY=true
-DCMAKE_CXX_STANDARD=20 -DCMAKE_CXX_STANDARD=20
-DANTLR_BUILD_CPP_TESTS=OFF -DANTLR_BUILD_CPP_TESTS=OFF
BUILD_COMMAND $(MAKE) antlr4_static BUILD_COMMAND $(MAKE) antlr4_static
INSTALL_COMMAND $(MAKE) install) INSTALL_COMMAND $(MAKE) install)
@ -136,7 +114,6 @@ import_external_library(antlr4 STATIC
import_external_library(benchmark STATIC import_external_library(benchmark STATIC
${CMAKE_CURRENT_SOURCE_DIR}/benchmark/${CMAKE_INSTALL_LIBDIR}/libbenchmark.a ${CMAKE_CURRENT_SOURCE_DIR}/benchmark/${CMAKE_INSTALL_LIBDIR}/libbenchmark.a
${CMAKE_CURRENT_SOURCE_DIR}/benchmark/include ${CMAKE_CURRENT_SOURCE_DIR}/benchmark/include
# Skip testing. The tests don't compile with Clang 8. # Skip testing. The tests don't compile with Clang 8.
CMAKE_ARGS -DBENCHMARK_ENABLE_TESTING=OFF) CMAKE_ARGS -DBENCHMARK_ENABLE_TESTING=OFF)
@ -152,15 +129,15 @@ add_subdirectory(rapidcheck EXCLUDE_FROM_ALL)
# setup google test # setup google test
add_external_project(gtest SOURCE_DIR ${CMAKE_CURRENT_SOURCE_DIR}/googletest) add_external_project(gtest SOURCE_DIR ${CMAKE_CURRENT_SOURCE_DIR}/googletest)
set(GTEST_INCLUDE_DIR ${CMAKE_CURRENT_SOURCE_DIR}/googletest/include set(GTEST_INCLUDE_DIR ${CMAKE_CURRENT_SOURCE_DIR}/googletest/include
CACHE PATH "Path to gtest and gmock include directory" FORCE) CACHE PATH "Path to gtest and gmock include directory" FORCE)
set(GMOCK_LIBRARY ${CMAKE_CURRENT_SOURCE_DIR}/googletest/lib/libgmock.a set(GMOCK_LIBRARY ${CMAKE_CURRENT_SOURCE_DIR}/googletest/lib/libgmock.a
CACHE FILEPATH "Path to gmock library" FORCE) CACHE FILEPATH "Path to gmock library" FORCE)
set(GMOCK_MAIN_LIBRARY ${CMAKE_CURRENT_SOURCE_DIR}/googletest/lib/libgmock_main.a set(GMOCK_MAIN_LIBRARY ${CMAKE_CURRENT_SOURCE_DIR}/googletest/lib/libgmock_main.a
CACHE FILEPATH "Path to gmock_main library" FORCE) CACHE FILEPATH "Path to gmock_main library" FORCE)
set(GTEST_LIBRARY ${CMAKE_CURRENT_SOURCE_DIR}/googletest/lib/libgtest.a set(GTEST_LIBRARY ${CMAKE_CURRENT_SOURCE_DIR}/googletest/lib/libgtest.a
CACHE FILEPATH "Path to gtest library" FORCE) CACHE FILEPATH "Path to gtest library" FORCE)
set(GTEST_MAIN_LIBRARY ${CMAKE_CURRENT_SOURCE_DIR}/googletest/lib/libgtest_main.a set(GTEST_MAIN_LIBRARY ${CMAKE_CURRENT_SOURCE_DIR}/googletest/lib/libgtest_main.a
CACHE FILEPATH "Path to gtest_main library" FORCE) CACHE FILEPATH "Path to gtest_main library" FORCE)
mark_as_advanced(GTEST_INCLUDE_DIR GMOCK_LIBRARY GMOCK_MAIN_LIBRARY GTEST_LIBRARY GTEST_MAIN_LIBRARY) mark_as_advanced(GTEST_INCLUDE_DIR GMOCK_LIBRARY GMOCK_MAIN_LIBRARY GTEST_LIBRARY GTEST_MAIN_LIBRARY)
import_library(gtest STATIC ${GTEST_LIBRARY} ${GTEST_INCLUDE_DIR} gtest-proj) import_library(gtest STATIC ${GTEST_LIBRARY} ${GTEST_INCLUDE_DIR} gtest-proj)
import_library(gtest_main STATIC ${GTEST_MAIN_LIBRARY} ${GTEST_INCLUDE_DIR} gtest-proj) import_library(gtest_main STATIC ${GTEST_MAIN_LIBRARY} ${GTEST_INCLUDE_DIR} gtest-proj)
@ -178,10 +155,10 @@ import_external_library(rocksdb STATIC
${CMAKE_CURRENT_SOURCE_DIR}/rocksdb/lib/librocksdb.a ${CMAKE_CURRENT_SOURCE_DIR}/rocksdb/lib/librocksdb.a
${CMAKE_CURRENT_SOURCE_DIR}/rocksdb/include ${CMAKE_CURRENT_SOURCE_DIR}/rocksdb/include
CMAKE_ARGS -DUSE_RTTI=ON CMAKE_ARGS -DUSE_RTTI=ON
-DWITH_TESTS=OFF -DWITH_TESTS=OFF
-DGFLAGS_NOTHREADS=OFF -DGFLAGS_NOTHREADS=OFF
-DCMAKE_INSTALL_LIBDIR=lib -DCMAKE_INSTALL_LIBDIR=lib
-DCMAKE_SKIP_INSTALL_ALL_DEPENDENCY=true -DCMAKE_SKIP_INSTALL_ALL_DEPENDENCY=true
BUILD_COMMAND $(MAKE) rocksdb) BUILD_COMMAND $(MAKE) rocksdb)
# Setup libbcrypt # Setup libbcrypt
@ -190,8 +167,8 @@ import_external_library(libbcrypt STATIC
${CMAKE_CURRENT_SOURCE_DIR}/libbcrypt ${CMAKE_CURRENT_SOURCE_DIR}/libbcrypt
CONFIGURE_COMMAND sed s/-Wcast-align// -i ${CMAKE_CURRENT_SOURCE_DIR}/libbcrypt/crypt_blowfish/Makefile CONFIGURE_COMMAND sed s/-Wcast-align// -i ${CMAKE_CURRENT_SOURCE_DIR}/libbcrypt/crypt_blowfish/Makefile
BUILD_COMMAND make -C ${CMAKE_CURRENT_SOURCE_DIR}/libbcrypt BUILD_COMMAND make -C ${CMAKE_CURRENT_SOURCE_DIR}/libbcrypt
CC=${CMAKE_C_COMPILER} CC=${CMAKE_C_COMPILER}
CXX=${CMAKE_CXX_COMPILER} CXX=${CMAKE_CXX_COMPILER}
INSTALL_COMMAND true) INSTALL_COMMAND true)
# Setup mgclient # Setup mgclient
@ -199,16 +176,16 @@ import_external_library(mgclient STATIC
${CMAKE_CURRENT_SOURCE_DIR}/mgclient/lib/libmgclient.a ${CMAKE_CURRENT_SOURCE_DIR}/mgclient/lib/libmgclient.a
${CMAKE_CURRENT_SOURCE_DIR}/mgclient/include ${CMAKE_CURRENT_SOURCE_DIR}/mgclient/include
CMAKE_ARGS -DCMAKE_C_COMPILER=${CMAKE_C_COMPILER} CMAKE_ARGS -DCMAKE_C_COMPILER=${CMAKE_C_COMPILER}
-DCMAKE_CXX_COMPILER=${CMAKE_CXX_COMPILER} -DCMAKE_CXX_COMPILER=${CMAKE_CXX_COMPILER}
-DBUILD_TESTING=OFF -DBUILD_TESTING=OFF
-DBUILD_CPP_BINDINGS=ON) -DBUILD_CPP_BINDINGS=ON)
find_package(OpenSSL REQUIRED) find_package(OpenSSL REQUIRED)
target_link_libraries(mgclient INTERFACE ${OPENSSL_LIBRARIES}) target_link_libraries(mgclient INTERFACE ${OPENSSL_LIBRARIES})
add_external_project(mgconsole add_external_project(mgconsole
SOURCE_DIR ${CMAKE_CURRENT_SOURCE_DIR}/mgconsole SOURCE_DIR ${CMAKE_CURRENT_SOURCE_DIR}/mgconsole
CMAKE_ARGS CMAKE_ARGS
-DCMAKE_INSTALL_PREFIX:PATH=${CMAKE_BINARY_DIR} -DCMAKE_INSTALL_PREFIX:PATH=${CMAKE_BINARY_DIR}
BUILD_COMMAND $(MAKE) mgconsole) BUILD_COMMAND $(MAKE) mgconsole)
add_custom_target(mgconsole DEPENDS mgconsole-proj) add_custom_target(mgconsole DEPENDS mgconsole-proj)
@ -225,15 +202,14 @@ import_external_library(librdkafka STATIC
${CMAKE_CURRENT_SOURCE_DIR}/librdkafka/lib/librdkafka.a ${CMAKE_CURRENT_SOURCE_DIR}/librdkafka/lib/librdkafka.a
${CMAKE_CURRENT_SOURCE_DIR}/librdkafka/include ${CMAKE_CURRENT_SOURCE_DIR}/librdkafka/include
CMAKE_ARGS -DRDKAFKA_BUILD_STATIC=ON CMAKE_ARGS -DRDKAFKA_BUILD_STATIC=ON
-DRDKAFKA_BUILD_EXAMPLES=OFF -DRDKAFKA_BUILD_EXAMPLES=OFF
-DRDKAFKA_BUILD_TESTS=OFF -DRDKAFKA_BUILD_TESTS=OFF
-DWITH_ZSTD=OFF -DWITH_ZSTD=OFF
-DENABLE_LZ4_EXT=OFF -DENABLE_LZ4_EXT=OFF
-DCMAKE_INSTALL_LIBDIR=lib -DCMAKE_INSTALL_LIBDIR=lib
-DWITH_SSL=ON -DWITH_SSL=ON
# If we want SASL, we need to install it on build machines
# If we want SASL, we need to install it on build machines -DWITH_SASL=OFF)
-DWITH_SASL=OFF)
target_link_libraries(librdkafka INTERFACE ${OPENSSL_LIBRARIES} ZLIB::ZLIB) target_link_libraries(librdkafka INTERFACE ${OPENSSL_LIBRARIES} ZLIB::ZLIB)
import_library(librdkafka++ STATIC import_library(librdkafka++ STATIC
@ -254,24 +230,24 @@ import_external_library(pulsar STATIC
${CMAKE_CURRENT_SOURCE_DIR}/pulsar/install/include ${CMAKE_CURRENT_SOURCE_DIR}/pulsar/install/include
BUILD_IN_SOURCE 1 BUILD_IN_SOURCE 1
CONFIGURE_COMMAND cmake pulsar-client-cpp CONFIGURE_COMMAND cmake pulsar-client-cpp
-DCMAKE_INSTALL_PREFIX=${CMAKE_CURRENT_SOURCE_DIR}/pulsar/install -DCMAKE_INSTALL_PREFIX=${CMAKE_CURRENT_SOURCE_DIR}/pulsar/install
-DCMAKE_CXX_COMPILER=${CMAKE_CXX_COMPILER} -DCMAKE_CXX_COMPILER=${CMAKE_CXX_COMPILER}
-DCMAKE_C_COMPILER=${CMAKE_C_COMPILER} -DCMAKE_C_COMPILER=${CMAKE_C_COMPILER}
-DBUILD_DYNAMIC_LIB=OFF -DBUILD_DYNAMIC_LIB=OFF
-DBUILD_STATIC_LIB=ON -DBUILD_STATIC_LIB=ON
-DBUILD_TESTS=OFF -DBUILD_TESTS=OFF
-DLINK_STATIC=ON -DLINK_STATIC=ON
-DPROTOC_PATH=${PROTOBUF_ROOT}/bin/protoc -DPROTOC_PATH=${PROTOBUF_ROOT}/bin/protoc
-DBOOST_ROOT=${BOOST_ROOT} -DBOOST_ROOT=${BOOST_ROOT}
-DCMAKE_PREFIX_PATH=${PROTOBUF_ROOT} -DCMAKE_PREFIX_PATH=${PROTOBUF_ROOT}
-DProtobuf_INCLUDE_DIRS=${PROTOBUF_ROOT}/include -DProtobuf_INCLUDE_DIRS=${PROTOBUF_ROOT}/include
-DBUILD_PYTHON_WRAPPER=OFF -DBUILD_PYTHON_WRAPPER=OFF
-DBUILD_PERF_TOOLS=OFF -DBUILD_PERF_TOOLS=OFF
-DUSE_LOG4CXX=OFF -DUSE_LOG4CXX=OFF
BUILD_COMMAND $(MAKE) pulsarStaticWithDeps) BUILD_COMMAND $(MAKE) pulsarStaticWithDeps)
add_dependencies(pulsar-proj protobuf) add_dependencies(pulsar-proj protobuf)
if(${MG_ARCH} STREQUAL "ARM64") if (${MG_ARCH} STREQUAL "ARM64")
set(MG_LIBRDTSC_CMAKE_ARGS -DLIBRDTSC_ARCH_x86=OFF -DLIBRDTSC_ARCH_ARM64=ON) set(MG_LIBRDTSC_CMAKE_ARGS -DLIBRDTSC_ARCH_x86=OFF -DLIBRDTSC_ARCH_ARM64=ON)
endif() endif()
@ -280,52 +256,3 @@ import_external_library(librdtsc STATIC
${CMAKE_CURRENT_SOURCE_DIR}/librdtsc/include ${CMAKE_CURRENT_SOURCE_DIR}/librdtsc/include
CMAKE_ARGS ${MG_LIBRDTSC_CMAKE_ARGS} CMAKE_ARGS ${MG_LIBRDTSC_CMAKE_ARGS}
BUILD_COMMAND $(MAKE) rdtsc) BUILD_COMMAND $(MAKE) rdtsc)
# setup ctre
import_header_library(ctre ${CMAKE_CURRENT_SOURCE_DIR})
# setup absl (cmake sub_directory tolerant)
set(ABSL_PROPAGATE_CXX_STD ON)
add_subdirectory(absl EXCLUDE_FROM_ALL)
# set Jemalloc
set_path_external_library(jemalloc STATIC
${CMAKE_CURRENT_SOURCE_DIR}/jemalloc/lib/libjemalloc.a
${CMAKE_CURRENT_SOURCE_DIR}/jemalloc/include/)
import_header_library(rangev3 ${CMAKE_CURRENT_SOURCE_DIR}/rangev3/include)
ExternalProject_Add(mgcxx-proj
PREFIX mgcxx-proj
GIT_REPOSITORY https://github.com/memgraph/mgcxx
GIT_TAG "v0.0.4"
CMAKE_ARGS
"-DCMAKE_INSTALL_PREFIX=<INSTALL_DIR>"
"-DCMAKE_BUILD_TYPE=${CMAKE_BUILD_TYPE}"
"-DENABLE_TESTS=OFF"
"-DCMAKE_C_COMPILER=${CMAKE_C_COMPILER}"
"-DCMAKE_CXX_COMPILER=${CMAKE_CXX_COMPILER}"
INSTALL_DIR "${PROJECT_BINARY_DIR}/mgcxx"
)
ExternalProject_Get_Property(mgcxx-proj install_dir)
set(MGCXX_ROOT ${install_dir})
add_library(tantivy_text_search STATIC IMPORTED GLOBAL)
add_dependencies(tantivy_text_search mgcxx-proj)
set_property(TARGET tantivy_text_search PROPERTY IMPORTED_LOCATION ${MGCXX_ROOT}/lib/libtantivy_text_search.a)
add_library(mgcxx_text_search STATIC IMPORTED GLOBAL)
add_dependencies(mgcxx_text_search mgcxx-proj)
set_property(TARGET mgcxx_text_search PROPERTY IMPORTED_LOCATION ${MGCXX_ROOT}/lib/libmgcxx_text_search.a)
# We need to create the include directory first in order to be able to add it
# as an include directory. The header files in the include directory will be
# generated later during the build process.
file(MAKE_DIRECTORY ${MGCXX_ROOT}/include)
set_property(TARGET mgcxx_text_search PROPERTY INTERFACE_INCLUDE_DIRECTORIES ${MGCXX_ROOT}/include)
# Setup NuRaft
import_external_library(nuraft STATIC
${CMAKE_CURRENT_SOURCE_DIR}/nuraft/lib/libnuraft.a
${CMAKE_CURRENT_SOURCE_DIR}/nuraft/include/)
find_package(OpenSSL REQUIRED)
target_link_libraries(nuraft INTERFACE ${OPENSSL_LIBRARIES})

View File

@ -5,7 +5,7 @@ index ee9b58c..31359a9 100644
@@ -48,7 +48,7 @@ option(LIBRDTSC_USE_PMU "Enables PMU usage on ARM platforms" OFF) @@ -48,7 +48,7 @@ option(LIBRDTSC_USE_PMU "Enables PMU usage on ARM platforms" OFF)
# | Library Build and Install Properties | # | Library Build and Install Properties |
# +--------------------------------------------------------+ # +--------------------------------------------------------+
-add_library(rdtsc SHARED -add_library(rdtsc SHARED
+add_library(rdtsc +add_library(rdtsc
src/cycles.c src/cycles.c
@ -14,7 +14,7 @@ index ee9b58c..31359a9 100644
@@ -72,15 +72,6 @@ target_include_directories(rdtsc @@ -72,15 +72,6 @@ target_include_directories(rdtsc
PUBLIC ${CMAKE_CURRENT_SOURCE_DIR}/include PUBLIC ${CMAKE_CURRENT_SOURCE_DIR}/include
) )
-# Install directory changes depending on build mode -# Install directory changes depending on build mode
-if (CMAKE_BUILD_TYPE MATCHES "^[Dd]ebug") -if (CMAKE_BUILD_TYPE MATCHES "^[Dd]ebug")
- # During debug, the library will be installed into a local directory - # During debug, the library will be installed into a local directory
@ -27,15 +27,3 @@ index ee9b58c..31359a9 100644
# Specifying what to export when installing (GNUInstallDirs required) # Specifying what to export when installing (GNUInstallDirs required)
install(TARGETS rdtsc install(TARGETS rdtsc
EXPORT librstsc-config EXPORT librstsc-config
diff --git a/include/librdtsc/common_timer.h b/include/librdtsc/common_timer.h
index a6922d8..080dc77 100644
--- a/include/librdtsc/common_timer.h
+++ b/include/librdtsc/common_timer.h
@@ -2,6 +2,7 @@
#define LIBRDTSC_COMMON_TIMER_H
#include <librdtsc/common.h>
+#include <librdtsc/cycles.h>
extern uint64_t rdtsc_get_tsc_freq_arch();
extern uint64_t rdtsc_get_tsc_freq();

View File

@ -1,24 +0,0 @@
diff --git a/include/libnuraft/asio_service_options.hxx b/include/libnuraft/asio_service_options.hxx
index 8fe1ec9..9497355 100644
--- a/include/libnuraft/asio_service_options.hxx
+++ b/include/libnuraft/asio_service_options.hxx
@@ -17,6 +17,7 @@ limitations under the License.
#pragma once
+#include <cstdint>
#include <functional>
#include <string>
#include <system_error>
diff --git a/include/libnuraft/callback.hxx b/include/libnuraft/callback.hxx
index 7b71624..d48c1e2 100644
--- a/include/libnuraft/callback.hxx
+++ b/include/libnuraft/callback.hxx
@@ -18,6 +18,7 @@ limitations under the License.
#ifndef _CALLBACK_H_
#define _CALLBACK_H_
+#include <cstdint>
#include <functional>
#include <string>

21
libs/rocksdb.patch Normal file
View File

@ -0,0 +1,21 @@
diff --git a/CMakeLists.txt b/CMakeLists.txt
index 6761929..6a369af 100644
--- a/CMakeLists.txt
+++ b/CMakeLists.txt
@@ -220,6 +220,7 @@ else()
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -momit-leaf-frame-pointer")
endif()
endif()
+ set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Wno-deprecated-copy -Wno-unused-but-set-variable")
endif()
include(CheckCCompilerFlag)
@@ -997,7 +998,7 @@ if(NOT WIN32 OR ROCKSDB_INSTALL_ON_WINDOWS)
if(ROCKSDB_BUILD_SHARED)
install(
- TARGETS ${ROCKSDB_SHARED_LIB}
+ TARGETS ${ROCKSDB_SHARED_LIB} OPTIONAL
EXPORT RocksDBTargets
COMPONENT runtime
ARCHIVE DESTINATION "${CMAKE_INSTALL_LIBDIR}"

View File

@ -1,13 +0,0 @@
diff --git a/CMakeLists.txt b/CMakeLists.txt
index 598c728..816c705 100644
--- a/CMakeLists.txt
+++ b/CMakeLists.txt
@@ -1242,7 +1242,7 @@ if(NOT WIN32 OR ROCKSDB_INSTALL_ON_WINDOWS)
if(ROCKSDB_BUILD_SHARED)
install(
- TARGETS ${ROCKSDB_SHARED_LIB}
+ TARGETS ${ROCKSDB_SHARED_LIB} OPTIONAL
EXPORT RocksDBTargets
COMPONENT runtime
ARCHIVE DESTINATION "${CMAKE_INSTALL_LIBDIR}"

View File

@ -1,4 +1,4 @@
#!/bin/bash -e #!/opt/homebrew/Cellar/bash/5.1.16/bin/bash -e
# Download external dependencies. # Download external dependencies.
# Don't forget to add/update the license in release/third-party-licenses of added/updated libs! # Don't forget to add/update the license in release/third-party-licenses of added/updated libs!
@ -71,8 +71,8 @@ file_get_try_double () {
if [ -z "$primary_url" ]; then echo "Primary should not be empty." && exit 1; fi if [ -z "$primary_url" ]; then echo "Primary should not be empty." && exit 1; fi
if [ -z "$secondary_url" ]; then echo "Secondary should not be empty." && exit 1; fi if [ -z "$secondary_url" ]; then echo "Secondary should not be empty." && exit 1; fi
filename="$(basename "$secondary_url")" filename="$(basename "$secondary_url")"
# Redirect primary/cache to /dev/null to make it less confusing for a new contributor because only CI has access to the cache. wget -nv "$primary_url" -O "$filename" || wget -nv "$secondary_url" -O "$filename" || exit 1
wget -nv "$primary_url" -O "$filename" >/dev/null 2>&1 || wget -nv "$secondary_url" -O "$filename" || exit 1 echo ""
} }
repo_clone_try_double () { repo_clone_try_double () {
@ -86,8 +86,8 @@ repo_clone_try_double () {
if [ -z "$secondary_url" ]; then echo "Secondary should not be empty." && exit 1; fi if [ -z "$secondary_url" ]; then echo "Secondary should not be empty." && exit 1; fi
if [ -z "$folder_name" ]; then echo "Clone folder should not be empty." && exit 1; fi if [ -z "$folder_name" ]; then echo "Clone folder should not be empty." && exit 1; fi
if [ -z "$ref" ]; then echo "Git clone ref should not be empty." && exit 1; fi if [ -z "$ref" ]; then echo "Git clone ref should not be empty." && exit 1; fi
# Redirect primary/cache to /dev/null to make it less confusing for a new contributor because only CI has access to the cache. clone "$primary_url" "$folder_name" "$ref" "$shallow" || clone "$secondary_url" "$folder_name" "$ref" "$shallow" || exit 1
clone "$primary_url" "$folder_name" "$ref" "$shallow" >/dev/null 2>&1 || clone "$secondary_url" "$folder_name" "$ref" "$shallow" || exit 1 echo ""
} }
# List all dependencies. # List all dependencies.
@ -117,17 +117,11 @@ declare -A primary_urls=(
["mgconsole"]="http://$local_cache_host/git/mgconsole.git" ["mgconsole"]="http://$local_cache_host/git/mgconsole.git"
["spdlog"]="http://$local_cache_host/git/spdlog" ["spdlog"]="http://$local_cache_host/git/spdlog"
["nlohmann"]="http://$local_cache_host/file/nlohmann/json/4f8fba14066156b73f1189a2b8bd568bde5284c5/single_include/nlohmann/json.hpp" ["nlohmann"]="http://$local_cache_host/file/nlohmann/json/4f8fba14066156b73f1189a2b8bd568bde5284c5/single_include/nlohmann/json.hpp"
["neo4j"]="http://$local_cache_host/file/neo4j-community-5.6.0-unix.tar.gz" ["neo4j"]="http://$local_cache_host/file/neo4j-community-3.2.3-unix.tar.gz"
["librdkafka"]="http://$local_cache_host/git/librdkafka.git" ["librdkafka"]="http://$local_cache_host/git/librdkafka.git"
["protobuf"]="http://$local_cache_host/git/protobuf.git" ["protobuf"]="http://$local_cache_host/git/protobuf.git"
["pulsar"]="http://$local_cache_host/git/pulsar.git" ["pulsar"]="http://$local_cache_host/git/pulsar.git"
["librdtsc"]="http://$local_cache_host/git/librdtsc.git" ["librdtsc"]="http://$local_cache_host/git/librdtsc.git"
["ctre"]="http://$local_cache_host/file/hanickadot/compile-time-regular-expressions/v3.7.2/single-header/ctre.hpp"
["absl"]="http://$local_cache_host/git/abseil-cpp.git"
["jemalloc"]="http://$local_cache_host/git/jemalloc.git"
["range-v3"]="http://$local_cache_host/git/range-v3.git"
["nuraft"]="http://$local_cache_host/git/NuRaft.git"
["asio"]="http://$local_cache_host/git/asio.git"
) )
# The goal of secondary urls is to have links to the "source of truth" of # The goal of secondary urls is to have links to the "source of truth" of
@ -145,20 +139,14 @@ declare -A secondary_urls=(
["rocksdb"]="https://github.com/facebook/rocksdb.git" ["rocksdb"]="https://github.com/facebook/rocksdb.git"
["mgclient"]="https://github.com/memgraph/mgclient.git" ["mgclient"]="https://github.com/memgraph/mgclient.git"
["pymgclient"]="https://github.com/memgraph/pymgclient.git" ["pymgclient"]="https://github.com/memgraph/pymgclient.git"
["mgconsole"]="https://github.com/memgraph/mgconsole.git" ["mgconsole"]="http://github.com/memgraph/mgconsole.git"
["spdlog"]="https://github.com/gabime/spdlog" ["spdlog"]="https://github.com/gabime/spdlog"
["nlohmann"]="https://raw.githubusercontent.com/nlohmann/json/4f8fba14066156b73f1189a2b8bd568bde5284c5/single_include/nlohmann/json.hpp" ["nlohmann"]="https://raw.githubusercontent.com/nlohmann/json/4f8fba14066156b73f1189a2b8bd568bde5284c5/single_include/nlohmann/json.hpp"
["neo4j"]="https://dist.neo4j.org/neo4j-community-5.6.0-unix.tar.gz" ["neo4j"]="https://s3-eu-west-1.amazonaws.com/deps.memgraph.io/neo4j-community-3.2.3-unix.tar.gz"
["librdkafka"]="https://github.com/edenhill/librdkafka.git" ["librdkafka"]="https://github.com/edenhill/librdkafka.git"
["protobuf"]="https://github.com/protocolbuffers/protobuf.git" ["protobuf"]="https://github.com/protocolbuffers/protobuf.git"
["pulsar"]="https://github.com/apache/pulsar.git" ["pulsar"]="https://github.com/apache/pulsar.git"
["librdtsc"]="https://github.com/gabrieleara/librdtsc.git" ["librdtsc"]="https://github.com/gabrieleara/librdtsc.git"
["ctre"]="https://raw.githubusercontent.com/hanickadot/compile-time-regular-expressions/v3.7.2/single-header/ctre.hpp"
["absl"]="https://github.com/abseil/abseil-cpp.git"
["jemalloc"]="https://github.com/jemalloc/jemalloc.git"
["range-v3"]="https://github.com/ericniebler/range-v3.git"
["nuraft"]="https://github.com/eBay/NuRaft.git"
["asio"]="https://github.com/chriskohlhoff/asio.git"
) )
# antlr # antlr
@ -170,11 +158,12 @@ pushd antlr4
git apply ../antlr4.10.1.patch git apply ../antlr4.10.1.patch
popd popd
cppitertools_ref="v2.1" # 2021-01-15 # cppitertools v2.0 2019-12-23
cppitertools_ref="cb3635456bdb531121b82b4d2e3afc7ae1f56d47"
repo_clone_try_double "${primary_urls[cppitertools]}" "${secondary_urls[cppitertools]}" "cppitertools" "$cppitertools_ref" repo_clone_try_double "${primary_urls[cppitertools]}" "${secondary_urls[cppitertools]}" "cppitertools" "$cppitertools_ref"
# rapidcheck # rapidcheck
rapidcheck_tag="1c91f40e64d87869250cfb610376c629307bf77d" # (2023-08-15) rapidcheck_tag="7bc7d302191a4f3d0bf005692677126136e02f60" # (2020-05-04)
repo_clone_try_double "${primary_urls[rapidcheck]}" "${secondary_urls[rapidcheck]}" "rapidcheck" "$rapidcheck_tag" repo_clone_try_double "${primary_urls[rapidcheck]}" "${secondary_urls[rapidcheck]}" "rapidcheck" "$rapidcheck_tag"
# google benchmark # google benchmark
@ -182,7 +171,7 @@ benchmark_tag="v1.6.0"
repo_clone_try_double "${primary_urls[gbenchmark]}" "${secondary_urls[gbenchmark]}" "benchmark" "$benchmark_tag" true repo_clone_try_double "${primary_urls[gbenchmark]}" "${secondary_urls[gbenchmark]}" "benchmark" "$benchmark_tag" true
# google test # google test
googletest_tag="v1.14.0" googletest_tag="release-1.8.0"
repo_clone_try_double "${primary_urls[gtest]}" "${secondary_urls[gtest]}" "googletest" "$googletest_tag" true repo_clone_try_double "${primary_urls[gtest]}" "${secondary_urls[gtest]}" "googletest" "$googletest_tag" true
# libbcrypt # libbcrypt
@ -191,9 +180,9 @@ repo_clone_try_double "${primary_urls[libbcrypt]}" "${secondary_urls[libbcrypt]}
# neo4j # neo4j
file_get_try_double "${primary_urls[neo4j]}" "${secondary_urls[neo4j]}" file_get_try_double "${primary_urls[neo4j]}" "${secondary_urls[neo4j]}"
tar -xzf neo4j-community-5.6.0-unix.tar.gz tar -xzf neo4j-community-3.2.3-unix.tar.gz
mv neo4j-community-5.6.0 neo4j mv neo4j-community-3.2.3 neo4j
rm neo4j-community-5.6.0-unix.tar.gz rm neo4j-community-3.2.3-unix.tar.gz
# nlohmann json # nlohmann json
# We wget header instead of cloning repo since repo is huge (lots of test data). # We wget header instead of cloning repo since repo is huge (lots of test data).
@ -203,26 +192,26 @@ cd json
file_get_try_double "${primary_urls[nlohmann]}" "${secondary_urls[nlohmann]}" file_get_try_double "${primary_urls[nlohmann]}" "${secondary_urls[nlohmann]}"
cd .. cd ..
rocksdb_tag="v8.1.1" # (2023-04-21) rocksdb_tag="v6.14.6" # (2020-10-14)
repo_clone_try_double "${primary_urls[rocksdb]}" "${secondary_urls[rocksdb]}" "rocksdb" "$rocksdb_tag" true repo_clone_try_double "${primary_urls[rocksdb]}" "${secondary_urls[rocksdb]}" "rocksdb" "$rocksdb_tag" true
pushd rocksdb pushd rocksdb
git apply ../rocksdb8.1.1.patch git apply ../rocksdb.patch
popd popd
# mgclient # mgclient
mgclient_tag="v1.4.0" # (2022-06-14) mgclient_tag="v1.4.0" # (2022-06-14)
repo_clone_try_double "${primary_urls[mgclient]}" "${secondary_urls[mgclient]}" "mgclient" "$mgclient_tag" repo_clone_try_double "${primary_urls[mgclient]}" "${secondary_urls[mgclient]}" "mgclient" "$mgclient_tag"
sed -i 's/\${CMAKE_INSTALL_LIBDIR}/lib/' mgclient/src/CMakeLists.txt sed -e 's/\${CMAKE_INSTALL_LIBDIR}/lib/' mgclient/src/CMakeLists.txt
# pymgclient # pymgclient
pymgclient_tag="4f85c179e56302d46a1e3e2cf43509db65f062b3" # (2021-01-15) pymgclient_tag="4f85c179e56302d46a1e3e2cf43509db65f062b3" # (2021-01-15)
repo_clone_try_double "${primary_urls[pymgclient]}" "${secondary_urls[pymgclient]}" "pymgclient" "$pymgclient_tag" repo_clone_try_double "${primary_urls[pymgclient]}" "${secondary_urls[pymgclient]}" "pymgclient" "$pymgclient_tag"
# mgconsole # mgconsole
mgconsole_tag="v1.4.0" # (2023-05-21) mgconsole_tag="v1.1.0" # (2021-10-07)
repo_clone_try_double "${primary_urls[mgconsole]}" "${secondary_urls[mgconsole]}" "mgconsole" "$mgconsole_tag" true repo_clone_try_double "${primary_urls[mgconsole]}" "${secondary_urls[mgconsole]}" "mgconsole" "$mgconsole_tag" true
spdlog_tag="v1.12.0" # (2022-11-02) spdlog_tag="v1.9.2" # (2021-08-12)
repo_clone_try_double "${primary_urls[spdlog]}" "${secondary_urls[spdlog]}" "spdlog" "$spdlog_tag" true repo_clone_try_double "${primary_urls[spdlog]}" "${secondary_urls[spdlog]}" "spdlog" "$spdlog_tag" true
# librdkafka # librdkafka
@ -244,51 +233,9 @@ git apply ../pulsar.patch
popd popd
#librdtsc #librdtsc
# TODO(gitbuda): __always_inline doesn't work on Apple Clang 14
librdtsc_tag="v0.3" librdtsc_tag="v0.3"
repo_clone_try_double "${primary_urls[librdtsc]}" "${secondary_urls[librdtsc]}" "librdtsc" "$librdtsc_tag" true repo_clone_try_double "${primary_urls[librdtsc]}" "${secondary_urls[librdtsc]}" "librdtsc" "$librdtsc_tag" true
pushd librdtsc pushd librdtsc
git apply ../librdtsc.patch git apply ../librdtsc.patch
popd popd
#ctre
mkdir -p ctre
cd ctre
file_get_try_double "${primary_urls[ctre]}" "${secondary_urls[ctre]}"
cd ..
# abseil 20230125.3
absl_ref="20230125.3"
repo_clone_try_double "${primary_urls[absl]}" "${secondary_urls[absl]}" "absl" "$absl_ref"
# jemalloc ea6b3e973b477b8061e0076bb257dbd7f3faa756
JEMALLOC_COMMIT_VERSION="5.2.1"
repo_clone_try_double "${primary_urls[jemalloc]}" "${secondary_urls[jemalloc]}" "jemalloc" "$JEMALLOC_COMMIT_VERSION"
# this is hack for cmake in libs to set path, and for FindJemalloc to use Jemalloc_INCLUDE_DIR
pushd jemalloc
./autogen.sh
MALLOC_CONF="background_thread:true,retain:false,percpu_arena:percpu,oversize_threshold:0,muzzy_decay_ms:5000,dirty_decay_ms:5000" \
./configure \
--disable-cxx \
--with-lg-page=12 \
--with-lg-hugepage=21 \
--enable-shared=no --prefix=$working_dir \
--with-malloc-conf="background_thread:true,retain:false,percpu_arena:percpu,oversize_threshold:0,muzzy_decay_ms:5000,dirty_decay_ms:5000"
make -j$CPUS install
popd
#range-v3 release-0.12.0
range_v3_ref="release-0.12.0"
repo_clone_try_double "${primary_urls[range-v3]}" "${secondary_urls[range-v3]}" "rangev3" "$range_v3_ref"
# NuRaft
nuraft_tag="v2.1.0"
repo_clone_try_double "${primary_urls[nuraft]}" "${secondary_urls[nuraft]}" "nuraft" "$nuraft_tag" true
pushd nuraft
git apply ../nuraft2.1.0.patch
asio_tag="asio-1-29-0"
repo_clone_try_double "${primary_urls[asio]}" "${secondary_urls[asio]}" "asio" "$asio_tag" true
./prepare.sh
popd

View File

@ -36,7 +36,7 @@ ADDITIONAL USE GRANT: You may use the Licensed Work in accordance with the
3. using the Licensed Work to create a work or solution 3. using the Licensed Work to create a work or solution
which competes (or might reasonably be expected to which competes (or might reasonably be expected to
compete) with the Licensed Work. compete) with the Licensed Work.
CHANGE DATE: 2028-21-01 CHANGE DATE: 2026-27-04
CHANGE LICENSE: Apache License, Version 2.0 CHANGE LICENSE: Apache License, Version 2.0
For information about alternative licensing arrangements, please visit: https://memgraph.com/legal. For information about alternative licensing arrangements, please visit: https://memgraph.com/legal.

View File

@ -2,8 +2,8 @@ MEMGRAPH
ENTERPRISE LICENCE AGREEMENT ENTERPRISE LICENCE AGREEMENT
Memgraph Limited is registered in England under registration 10195084 and has its registered office at 90a High Street, Memgraph Limited is registered in England under registration 10195084 and has its registered office at Suite 4,
Hertfordshire, Berkhamsted, HP4 2BL United Kingdom ("Memgraph"). Ironstone House, Ironstone Way, Brixworth, Northampton, NN6 9UD (“Memgraph”).
Memgraph agrees to license and/or grant you (the “Customer”) access to the Software ( as defined below) and provide Memgraph agrees to license and/or grant you (the “Customer”) access to the Software ( as defined below) and provide

View File

@ -1,202 +0,0 @@
Apache License
Version 2.0, January 2004
https://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
APPENDIX: How to apply the Apache License to your work.
To apply the Apache License to your work, attach the following
boilerplate notice, with the fields enclosed by brackets "[]"
replaced with your own identifying information. (Don't include
the brackets!) The text should be enclosed in the appropriate
comment syntax for the file format. We also recommend that a
file or class name and description of purpose be included on the
same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright [yyyy] [name of copyright owner]
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
https://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.

View File

@ -1,218 +0,0 @@
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
APPENDIX: How to apply the Apache License to your work.
To apply the Apache License to your work, attach the following
boilerplate notice, with the fields enclosed by brackets "[]"
replaced with your own identifying information. (Don't include
the brackets!) The text should be enclosed in the appropriate
comment syntax for the file format. We also recommend that a
file or class name and description of purpose be included on the
same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright [yyyy] [name of copyright owner]
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
--- LLVM Exceptions to the Apache 2.0 License ----
As an exception, if, as a result of your compiling your source code, portions
of this Software are embedded into an Object form of such source code, you
may redistribute such embedded portions in such Object form without complying
with the conditions of Sections 4(a), 4(b) and 4(d) of the License.
In addition, if you combine or link compiled forms of this Software with
software that is licensed under the GPLv2 ("Combined Software") and if a
court of competent jurisdiction determines that the patent provision (Section
3), the indemnity provision (Section 9) or other Section of the License
conflicts with the conditions of the GPLv2, you may retroactively and
prospectively choose to deem waived or otherwise exclude such Section(s) of
the License, but only in their entirety and only with respect to the Combined
Software.

View File

@ -1,202 +0,0 @@
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
APPENDIX: How to apply the Apache License to your work.
To apply the Apache License to your work, attach the following
boilerplate notice, with the fields enclosed by brackets "[]"
replaced with your own identifying information. (Don't include
the brackets!) The text should be enclosed in the appropriate
comment syntax for the file format. We also recommend that a
file or class name and description of purpose be included on the
same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright [yyyy] [name of copyright owner]
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.

View File

@ -1,12 +0,0 @@
[tool.black]
line-length = 120
include = '\.pyi?$'
extend-exclude = '''
/(
| .git
| .__pycache__
| build
| libs
| .cache
)/
'''

View File

@ -6,8 +6,6 @@ project(memgraph_query_modules)
disallow_in_source_build() disallow_in_source_build()
find_package(fmt REQUIRED)
# Everything that is installed here, should be under the "query_modules" component. # Everything that is installed here, should be under the "query_modules" component.
set(CMAKE_INSTALL_DEFAULT_COMPONENT_NAME "query_modules") set(CMAKE_INSTALL_DEFAULT_COMPONENT_NAME "query_modules")
string(TOLOWER ${CMAKE_BUILD_TYPE} lower_build_type) string(TOLOWER ${CMAKE_BUILD_TYPE} lower_build_type)
@ -15,7 +13,6 @@ string(TOLOWER ${CMAKE_BUILD_TYPE} lower_build_type)
add_library(example_c SHARED example.c) add_library(example_c SHARED example.c)
target_include_directories(example_c PRIVATE ${CMAKE_SOURCE_DIR}/include) target_include_directories(example_c PRIVATE ${CMAKE_SOURCE_DIR}/include)
target_compile_options(example_c PRIVATE -Wall) target_compile_options(example_c PRIVATE -Wall)
target_link_libraries(example_c PRIVATE -static-libgcc -static-libstdc++)
# Strip C example in release build. # Strip C example in release build.
if (lower_build_type STREQUAL "release") if (lower_build_type STREQUAL "release")
add_custom_command(TARGET example_c POST_BUILD add_custom_command(TARGET example_c POST_BUILD
@ -31,7 +28,6 @@ install(FILES example.c DESTINATION lib/memgraph/query_modules/src)
add_library(example_cpp SHARED example.cpp) add_library(example_cpp SHARED example.cpp)
target_include_directories(example_cpp PRIVATE ${CMAKE_SOURCE_DIR}/include) target_include_directories(example_cpp PRIVATE ${CMAKE_SOURCE_DIR}/include)
target_compile_options(example_cpp PRIVATE -Wall) target_compile_options(example_cpp PRIVATE -Wall)
target_link_libraries(example_cpp PRIVATE -static-libgcc -static-libstdc++)
# Strip C++ example in release build. # Strip C++ example in release build.
if (lower_build_type STREQUAL "release") if (lower_build_type STREQUAL "release")
add_custom_command(TARGET example_cpp POST_BUILD add_custom_command(TARGET example_cpp POST_BUILD
@ -44,43 +40,9 @@ install(PROGRAMS $<TARGET_FILE:example_cpp>
# Also install the source of the example, so user can read it. # Also install the source of the example, so user can read it.
install(FILES example.cpp DESTINATION lib/memgraph/query_modules/src) install(FILES example.cpp DESTINATION lib/memgraph/query_modules/src)
add_library(schema SHARED schema.cpp)
target_include_directories(schema PRIVATE ${CMAKE_SOURCE_DIR}/include)
target_compile_options(schema PRIVATE -Wall)
target_link_libraries(schema PRIVATE -static-libgcc -static-libstdc++)
# Strip C++ example in release build.
if (lower_build_type STREQUAL "release")
add_custom_command(TARGET schema POST_BUILD
COMMAND strip -s $<TARGET_FILE:schema>
COMMENT "Stripping symbols and sections from the C++ schema module")
endif()
install(PROGRAMS $<TARGET_FILE:schema>
DESTINATION lib/memgraph/query_modules
RENAME schema.so)
# Also install the source of the example, so user can read it.
install(FILES schema.cpp DESTINATION lib/memgraph/query_modules/src)
add_library(text SHARED text_search_module.cpp)
target_include_directories(text PRIVATE ${CMAKE_SOURCE_DIR}/include)
target_compile_options(text PRIVATE -Wall)
target_link_libraries(text PRIVATE -static-libgcc -static-libstdc++ fmt::fmt)
# Strip C++ example in release build.
if (lower_build_type STREQUAL "release")
add_custom_command(TARGET text POST_BUILD
COMMAND strip -s $<TARGET_FILE:text>
COMMENT "Stripping symbols and sections from the C++ text_search module")
endif()
install(PROGRAMS $<TARGET_FILE:text>
DESTINATION lib/memgraph/query_modules
RENAME text.so)
# Also install the source of the example, so user can read it.
install(FILES text_search_module.cpp DESTINATION lib/memgraph/query_modules/src)
# Install the Python example and modules # Install the Python example and modules
install(FILES example.py DESTINATION lib/memgraph/query_modules RENAME py_example.py) install(FILES example.py DESTINATION lib/memgraph/query_modules RENAME py_example.py)
install(FILES graph_analyzer.py DESTINATION lib/memgraph/query_modules) install(FILES graph_analyzer.py DESTINATION lib/memgraph/query_modules)
install(FILES mgp_networkx.py DESTINATION lib/memgraph/query_modules) install(FILES mgp_networkx.py DESTINATION lib/memgraph/query_modules)
install(FILES nxalg.py DESTINATION lib/memgraph/query_modules) install(FILES nxalg.py DESTINATION lib/memgraph/query_modules)
install(FILES wcc.py DESTINATION lib/memgraph/query_modules) install(FILES wcc.py DESTINATION lib/memgraph/query_modules)
install(FILES mgps.py DESTINATION lib/memgraph/query_modules)
install(FILES convert.py DESTINATION lib/memgraph/query_modules)

View File

@ -1,10 +0,0 @@
from json import loads
import mgp
@mgp.function
def str2object(string: str) -> mgp.Any:
if string:
return loads(string)
return None

Some files were not shown because too many files have changed in this diff Show More