Compare commits

..

798 Commits

Author SHA1 Message Date
Marko Budiselic
ebb7b9c61d Add WIP 2023-03-26 16:14:59 +02:00
Marko Budiselic
9b42d4f4f8 Add some TODOs 2023-02-22 14:11:46 +01:00
Marko Budiselic
3095cbd956 Add some initial measurements 2023-02-21 22:40:07 +01:00
János Benjamin Antal
51ed451b82 Run nquery performance benchmark on CI/CD 2023-02-21 21:46:49 +01:00
János Benjamin Antal
c344645978 Add sheebang to runner 2023-02-21 21:43:05 +01:00
János Benjamin Antal
4cb63e44d9 Download benchmark data 2023-02-21 21:37:00 +01:00
János Benjamin Antal
c3042906c5 Add base of query performance test runner 2023-02-21 14:03:17 +01:00
János Benjamin Antal
92da012f9e Add flag to binary to control data directory 2023-02-21 14:02:17 +01:00
János Benjamin Antal
c7c488bb46 Add flag to output results in json 2023-02-20 14:53:13 +01:00
János Benjamin Antal
d2156683d3 Suuport multiple benchmark queries files 2023-02-20 13:46:13 +01:00
János Benjamin Antal
b56b1521a6 Extract file reading logic into separate function 2023-02-20 13:01:33 +01:00
János Benjamin Antal
c50f2c2621 Fix e2e tests 2023-02-20 12:35:01 +01:00
János Benjamin Antal
f194160d7c Add detailed description about the query_performance binary 2023-02-17 17:33:24 +01:00
János Benjamin Antal
13acd22f3b Merge branch 'MG-access-control-benchmark-fixes' into MG-mf-query-performance-test 2023-02-15 15:00:50 +01:00
János Benjamin Antal
a2ce9c4396 Trigger CI 2023-02-15 13:59:31 +01:00
János Benjamin Antal
6d4dff7e6e Suppress clang-tidy warnings 2023-02-15 13:53:48 +01:00
János Benjamin Antal
6fc0b9ff02 Remove reference to non-existing file 2023-02-15 09:32:59 +01:00
János Benjamin Antal
c88bb50dff Merge branch 'MG-access-control-benchmark-fixes' into MG-mf-query-performance-test 2023-02-15 09:08:01 +01:00
János Benjamin Antal
3b0d531343 Supress clang-tidy warning 2023-02-15 09:07:48 +01:00
János Benjamin Antal
0344ba5e58 Merge branch 'MG-access-control-benchmark-fixes' into MG-mf-query-performance-test 2023-02-15 08:38:01 +01:00
János Benjamin Antal
a17010ed16 Supress clang-tidy warnings 2023-02-15 08:37:45 +01:00
János Benjamin Antal
3a59bee80c Make profiling easier 2023-02-14 14:33:41 +01:00
Josipmrden
6f730f9a91 Do not pull vertices one by one 2023-02-14 14:11:01 +01:00
János Benjamin Antal
fe14a8674c Include both v2 and v3 in query performance test 2023-02-13 23:33:01 +01:00
János Benjamin Antal
1bc93b64f4 Make it possible to compile v2 and v3 interpreter into a single binary 2023-02-13 23:32:33 +01:00
János Benjamin Antal
155388c0a1 Add benchmarking executable 2023-02-09 17:35:04 +01:00
János Benjamin Antal
74f53369c0 Add two more queries to simple benchmark 2023-02-09 13:17:55 +01:00
János Benjamin Antal
2b3141879b Make the output table nicer for comparing results 2023-02-09 13:17:46 +01:00
János Benjamin Antal
53f95ed1a7 Format python file 2023-02-09 13:17:15 +01:00
János Benjamin Antal
b678e6a63b Handle bool flags properly in benchmark runner 2023-02-09 12:42:02 +01:00
János Benjamin Antal
563035645c Add command line flag to opt for using MultiFrame 2023-02-09 12:41:41 +01:00
János Benjamin Antal
12bc78ca2d Add command line flag to determine MultiFrame size 2023-02-09 12:41:17 +01:00
János Benjamin Antal
a9a388ce44 Use parametrized queries for vertex creation 2023-02-08 13:52:51 +01:00
János Benjamin Antal
6bc2e6d8b6 Merge remote-tracking branch 'origin/project-pineapples' into MG-access-control-benchmark-fixes 2023-02-08 13:48:50 +01:00
János Benjamin Antal
a02abc8f79
Merge pull request #763 from memgraph/T1235-MG-implement-EdgeUniquenessFilter-with-PullMultiple
Implement edge uniqueness filter with pull multiple
2023-02-08 13:40:15 +01:00
gvolfing
096d1ce5f4 Invert boolean logic when checking for unique edges 2023-02-08 12:57:21 +01:00
gvolfing
657279949a Fix compile error 2023-02-08 12:13:46 +01:00
gvolfing
25226cca92
Update src/query/v2/plan/operator.cpp
Co-authored-by: Jure Bajic <jure.bajic@memgraph.com>
2023-02-08 11:41:43 +01:00
János Benjamin Antal
292a55f4ff Add new line at the end of dataset file 2023-02-08 11:28:19 +01:00
gvolfing
37f19867b0 Make EdgeUniquenessFilterCursor impl simpler 2023-02-07 08:25:50 +01:00
János Benjamin Antal
b26c7d09ef Ignore not value equality property filters for ScanByPrimaryKey 2023-02-06 16:39:42 +01:00
János Benjamin Antal
4bad8c0d1e Filter edges on types 2023-02-06 16:14:39 +01:00
János Benjamin Antal
2b01f2280c Add TODO about failing query 2023-02-06 16:14:21 +01:00
gvolfing
ac59e7f7e0 Move loop variables incrementation into the same place 2023-02-06 15:54:07 +01:00
gvolfing
7e99f32adb Remove uncommented, useless code 2023-02-06 15:47:18 +01:00
gvolfing
a1a612899c Merge branch 'project-pineapples' into T1235-MG-implement-EdgeUniquenessFilter-with-PullMultiple 2023-02-06 13:53:51 +01:00
gvolfing
2219dee6f6 Add initial impl for EdgeUniquenessFilterCursor::PullMultiple 2023-02-06 12:49:32 +01:00
János Benjamin Antal
7bf8550c86 Merge remote-tracking branch 'origin/project-pineapples' into MG-access-control-benchmark-fixes 2023-02-06 08:18:25 +01:00
János Benjamin Antal
8b392ecc97
Merge pull request #752 from memgraph/T1226-MG-Implement-scanbyprimarykey-with-multiframe
Implement ScanByPrimaryKey with multiframe
2023-02-06 08:13:43 +01:00
gvolfing
8e315875f2
Update src/query/v2/plan/operator.cpp
Co-authored-by: János Benjamin Antal <antaljanosbenjamin@users.noreply.github.com>
2023-02-02 07:44:47 +01:00
János Benjamin Antal
41183b328b Invalidate consumed frames in ScanByPrimaryKey 2023-02-01 17:19:02 +01:00
János Benjamin Antal
c9a0c15c16 Make frames invalid on consumption 2023-02-01 16:17:06 +01:00
János Benjamin Antal
50327254e0 Make queries into a single line 2023-02-01 14:59:50 +01:00
János Benjamin Antal
24ae6069f0 Split edge creation into batches 2023-02-01 14:31:56 +01:00
János Benjamin Antal
7be66f0c54 Add unwind based dataset creator 2023-02-01 14:24:04 +01:00
János Benjamin Antal
b136cd71d2 Fix DistributedCreatedNodeCursor in case of UNWIND 2023-02-01 14:22:47 +01:00
János Benjamin Antal
a38401130e Set vertex id in Expand properly 2023-02-01 13:24:58 +01:00
gvolfing
bf93b53e7d Fix compile error due to wrong aggregate field name 2023-02-01 12:36:27 +01:00
gvolfing
4be4a86d0a
Apply suggestions from code review
Co-authored-by: János Benjamin Antal <antaljanosbenjamin@users.noreply.github.com>
2023-02-01 11:39:48 +01:00
János Benjamin Antal
9a805bff8b Merge remote-tracking branch 'origin/T1226-MG-Implement-scanbyprimarykey-with-multiframe' into MG-create-properties 2023-02-01 11:37:06 +01:00
gvolfing
7d63236f87 Set the default pull-mechanism back to single-pull 2023-01-31 17:36:52 +01:00
gvolfing
4f8c241934 Merge branch 'project-pineapples' into T1226-MG-Implement-scanbyprimarykey-with-multiframe 2023-01-31 17:33:12 +01:00
gvolfing
60b71cc2c1 Rework ScanByPrimaryKey operator - multiframe 2023-01-31 17:30:31 +01:00
János Benjamin Antal
da28a29c7f Pass properties when creating vertices 2023-01-31 17:09:41 +01:00
János Benjamin Antal
303362d41c
Merge pull request #751 from memgraph/T1229-MG-implement-unwind-with-multiframe
Add implementation of `UNWIND` with `MultiFrame`
2023-01-31 15:06:46 +01:00
gvolfing
272e710510 Merge branch 'T1229-MG-implement-unwind-with-multiframe' into T1226-MG-Implement-scanbyprimarykey-with-multiframe 2023-01-31 08:58:27 +01:00
gvolfing
0e3229756f Merge branch 'project-pineapples' into T1226-MG-Implement-scanbyprimarykey-with-multiframe 2023-01-31 08:49:22 +01:00
János Benjamin Antal
e96eed81b1
Merge branch 'project-pineapples' into T1229-MG-implement-unwind-with-multiframe 2023-01-30 15:56:44 +01:00
János Benjamin Antal
7fa7586940
Merge pull request #729 from memgraph/T1216-MG-implement-aggregate
Implement Aggregate with `MultiFrame`
2023-01-30 15:56:30 +01:00
gvolfing
436e41f71f Init POC of ScanByPrimaryKey multiframe 2023-01-30 13:06:05 +01:00
János Benjamin Antal
fd047e7303 Unify even more logic 2023-01-27 17:10:21 +01:00
János Benjamin Antal
9214c715e2 Address review comment 2023-01-27 17:05:03 +01:00
János Benjamin Antal
44fc2d01c7 Unify logic between multi and single frame pull 2023-01-27 17:04:40 +01:00
János Benjamin Antal
d36c0cc424 Refactor AggregateCursor::ProcessAll 2023-01-27 16:52:46 +01:00
János Benjamin Antal
2ecf580ae7 Eliminate warnings 2023-01-27 16:50:58 +01:00
János Benjamin Antal
c12a5a9019 Make multi-create queries work 2023-01-27 16:50:16 +01:00
János Benjamin Antal
883922dba5 Eliminate warning about deprecated macro 2023-01-27 13:11:55 +01:00
János Benjamin Antal
c7591887a8 Merge branch 'project-pineapples' into T1216-MG-implement-aggregate 2023-01-27 09:07:14 +01:00
János Benjamin Antal
33454c7d8e Add implementation 2023-01-27 08:43:10 +01:00
János Benjamin Antal
f67422f8b9
Merge pull request #742 from memgraph/T1223-MG-implement-filter-with-multiframe
Make access control benchmark run with `MultiFrame`
2023-01-26 21:19:36 +01:00
János Benjamin Antal
23297c2afb Remove unnecessary function 2023-01-26 19:47:56 +01:00
János Benjamin Antal
3d35a10783
Merge branch 'project-pineapples' into T1223-MG-implement-filter-with-multiframe 2023-01-26 17:31:19 +01:00
János Benjamin Antal
999015a250
Merge pull request #738 from memgraph/T1214-MG-implement-expand-with-multiframe
Implement expand with `MultiFrame`
2023-01-26 17:30:55 +01:00
János Benjamin Antal
7d3d52c067
Merge branch 'project-pineapples' into T1214-MG-implement-expand-with-multiframe 2023-01-25 16:19:56 +01:00
János Benjamin Antal
6d4401dc92
Merge pull request #707 from memgraph/T1165-MG-add-property-based-high-level-query-test
Add property based high level query tests
2023-01-25 16:19:22 +01:00
János Benjamin Antal
0d00ae74dd
Merge branch 'project-pineapples' into T1214-MG-implement-expand-with-multiframe 2023-01-25 15:27:19 +01:00
János Benjamin Antal
16b78e4eb3
Merge branch 'project-pineapples' into T1165-MG-add-property-based-high-level-query-test 2023-01-25 15:14:26 +01:00
János Benjamin Antal
d44910bc9a
Merge pull request #710 from memgraph/T1190-MG-Implement-ScanAll-and-ScanAllByLabel-with-MultiFrame_2
Implement scan all and scan all by label with `MultiFrame`
2023-01-25 15:09:11 +01:00
gvolfing
54c1efab7f Merge branch 'project-pineapples' into 'T1165-MG-add-property-based-high-level-query-test' 2023-01-25 13:40:16 +01:00
János Benjamin Antal
7a0c9beba5
Merge branch 'project-pineapples' into T1190-MG-Implement-ScanAll-and-ScanAllByLabel-with-MultiFrame_2 2023-01-25 13:17:28 +01:00
János Benjamin Antal
e13faf41e9
Merge pull request #663 from memgraph/T1167-MG-create-scanbyprimarykey-operator
Create ScanAllByPrimaryKey operator
2023-01-25 12:51:04 +01:00
gvolfing
e24a6a86e4 Apply changes from code-review 2023-01-25 12:42:44 +01:00
gvolfing
3bc9c571a0 Make the use RequestRouter more restricted 2023-01-25 07:02:03 +01:00
János Benjamin Antal
7972b3af43 Merge branch 'T1214-MG-implement-expand-with-multiframe' into T1223-MG-implement-filter-with-multiframe 2023-01-24 17:25:30 +01:00
János Benjamin Antal
e2a1029120 Fix simulation test 2023-01-24 17:24:10 +01:00
János Benjamin Antal
7cb07672ff Make DistributedExpandCursor handle existing nodes with MultiFrame 2023-01-24 17:17:47 +01:00
gvolfing
36fccc32c2 Address PR comments 2023-01-24 16:59:38 +01:00
János Benjamin Antal
4908af5a18 Make DistributedExpand operator handle repeated vertices 2023-01-24 16:33:15 +01:00
János Benjamin Antal
fa8eee2043 Use destination vertex as other end for out edges 2023-01-24 16:19:24 +01:00
gvolfing
b4ae8aea95 Apply suggestions from code review 2023-01-24 15:59:50 +01:00
gvolfing
27ff18733c NOLINTNEXTLINE(cppcoreguidelines-macro-usage) 2023-01-23 15:08:51 +01:00
gvolfing
0aa7daf002 NOLINTNEXTLINE(bugprone-macro-parentheses) 2023-01-23 14:34:16 +01:00
gvolfing
b341a8d7dd Merge branch 'project-pineapples' into T1165-MG-add-property-based-high-level-query-test 2023-01-23 14:12:42 +01:00
gvolfing
b7dbc7267b Appease clang-tidy bugprone-macro-parentheses 2023-01-23 14:09:25 +01:00
gvolfing
1951d781d0 Appease clang-tidy 2023-01-23 13:43:51 +01:00
gvolfing
e65f585fc6 Appease clang-tidy 2023-01-23 13:22:27 +01:00
gvolfing
fa86d4c989 Offer sacrifice to the lisp gods 2023-01-23 12:51:00 +01:00
gvolfing
fcbacdc80d Rename ScanAllByPrimaryKey operator, fix e2e fail
Rename ScanAllByPrimaryKey operator to ScanByPrimaryKey. Make the
LabelIndexExist function use the same functionality as PrimaryKeyExists
again, for now. Previously it was just returning false and before that
it used the same implementation as PrimaryKeyExist. The change to false
broke some existing e2e tests that relied on some label based indexing
operator being instantiated.
2023-01-23 11:56:58 +01:00
gvolfing
69fa4e8c8d Return fals from unimplemented function, so the benchmark tests can run 2023-01-23 10:43:41 +01:00
gvolfing
ea646e1803 Add missing mock implementation to MockedRequestRouter 2023-01-23 10:06:52 +01:00
gvolfing
89d26c36c9 Merge branch 'project-pineapples' into T1167-MG-create-scanbyprimarykey-operator 2023-01-23 08:42:14 +01:00
gvolfing
cf76e0e19b React to PR comments 2023-01-23 08:27:44 +01:00
János Benjamin Antal
900ece8109 Add PullMultiple to DeleteCursor 2023-01-20 23:04:52 +01:00
János Benjamin Antal
544c75c212 Add explanation about limitations of current implementation 2023-01-20 23:04:33 +01:00
János Benjamin Antal
0285b56915 Fix compilation error 2023-01-20 23:01:23 +01:00
János Benjamin Antal
515a52130e Prevent moving from valid frames during defregmentation of MultiFrame 2023-01-20 22:12:24 +01:00
János Benjamin Antal
55b5d76092 Add docs to PullMultiple 2023-01-20 21:38:51 +01:00
János Benjamin Antal
0eee3ad7b7 Fix DistributedExpandCursor 2023-01-20 21:38:35 +01:00
János Benjamin Antal
c9299a6c72 Turn the scan all cursor into an automaton 2023-01-20 21:37:57 +01:00
János Benjamin Antal
de99025c39 Implement PullMultiple for FilterCursor 2023-01-20 15:36:38 +01:00
János Benjamin Antal
be39fac72e Add return value to PullMultiple
Because the `FilterCursor` might push down the same multiframe multiple
times, it is easier if each cursor maintains whether it put any new data
on the `MultiFrame` or not. This way each cursor can decide easily
whether it has to do more work or not.
2023-01-20 15:36:24 +01:00
gvolfing
5eea3ceee4
Apply suggestions from code review
Co-authored-by: Jure Bajic <jure.bajic@memgraph.com>
Co-authored-by: János Benjamin Antal <antaljanosbenjamin@users.noreply.github.com>
2023-01-20 06:22:37 +01:00
gvolfing
cc643aac69 Deal with unprotected comma in MOCKED_METHOD 2023-01-19 17:45:15 +01:00
János Benjamin Antal
e7f10ec8f4 Remove duplicated definition 2023-01-19 17:44:03 +01:00
gvolfing
6fe244b209 Add missing MOCK_METHODS to MockedRequestRouter 2023-01-19 17:16:29 +01:00
János Benjamin Antal
ede6281e00 Fix unit tests 2023-01-19 16:58:11 +01:00
János Benjamin Antal
70c8ed9180 Merge branch 'T1190-MG-Implement-ScanAll-and-ScanAllByLabel-with-MultiFrame_2' into T1214-MG-implement-expand-with-multiframe 2023-01-19 16:56:23 +01:00
János Benjamin Antal
59b39e03cb Merge remote-tracking branch 'origin/project-pineapples' into T1190-MG-Implement-ScanAll-and-ScanAllByLabel-with-MultiFrame_2 2023-01-19 16:24:13 +01:00
gvolfing
96ea113a69 Merge branch 'project-pineapples' into T1165-MG-add-property-based-high-level-query-test 2023-01-19 16:22:46 +01:00
János Benjamin Antal
a31c7cce08
Merge pull request #702 from memgraph/T1189-MG-implement-create-node-cursor-mf
Implement CreateNode cursor with MultiFrame
2023-01-19 16:22:30 +01:00
János Benjamin Antal
8f6fac3cde Make arguments const 2023-01-19 14:35:25 +01:00
gvolfing
a0ada914ab Fix segfault 2023-01-19 13:10:53 +01:00
János Benjamin Antal
ca62fa5123 Fetch properties of destination vertex 2023-01-19 11:42:24 +01:00
János Benjamin Antal
d1548c9253 Eliminate fully 2023-01-19 10:43:54 +01:00
János Benjamin Antal
52baaf8030 Detect when no work should be done because of lack of input data in cursors 2023-01-19 10:43:07 +01:00
János Benjamin Antal
a0274bbdd9 Prevent reexecution of cursor when no output symbols are present 2023-01-19 10:42:09 +01:00
János Benjamin Antal
c5c6fd9b86 Merge branch 'T1190-MG-Implement-ScanAll-and-ScanAllByLabel-with-MultiFrame_2' into T1214-MG-implement-expand-with-multiframe 2023-01-18 22:22:21 +01:00
János Benjamin Antal
586999475b Merge remote-tracking branch 'origin/T1189-MG-implement-create-node-cursor-mf' into T1190-MG-Implement-ScanAll-and-ScanAllByLabel-with-MultiFrame_2 2023-01-18 22:22:05 +01:00
János Benjamin Antal
e888464de2 Implement automaton for ExpandOneCursor 2023-01-18 17:32:22 +01:00
gvolfing
94a536a2b9 Fix hanging conditionvariable 2023-01-18 16:03:34 +01:00
János Benjamin Antal
e42c60d555
Merge branch 'project-pineapples' into T1189-MG-implement-create-node-cursor-mf 2023-01-18 13:37:36 +01:00
János Benjamin Antal
00d6b42c37
Merge pull request #699 from memgraph/T1191-MG-implement-create-expand-with-multiframe 2023-01-18 13:37:24 +01:00
János Benjamin Antal
f39a937323 Add first, but buggy implementation 2023-01-18 13:31:35 +01:00
János Benjamin Antal
575361827e Add comment about invalid usage of MutliFrame 2023-01-17 21:01:54 +01:00
János Benjamin Antal
901da4c9b3 Update InvalidFramesPopulator to follow the conventions 2023-01-17 21:01:22 +01:00
János Benjamin Antal
81675106fd Use tests namespace for tests 2023-01-17 20:33:14 +01:00
János Benjamin Antal
7fb828bca3 Update outdated comments 2023-01-17 20:32:00 +01:00
János Benjamin Antal
c04cfc5596 Merge branch 'T1191-MG-implement-create-expand-with-multiframe' into T1189-MG-implement-create-node-cursor-mf 2023-01-17 20:29:52 +01:00
János Benjamin Antal
a3b1676c42 Separate include blocks 2023-01-17 20:25:28 +01:00
János Benjamin Antal
b38a9b9c90 Use tests namespace for tests 2023-01-17 20:25:10 +01:00
János Benjamin Antal
57690c5390 Refactor DistributedScanAllAndFilterCursor 2023-01-17 08:34:08 +01:00
János Benjamin Antal
d11d5c3fa9 Make special member functions of MultiFrame iterators consistent 2023-01-17 08:33:40 +01:00
János Benjamin Antal
36891c119b Remove unnecessary state from DistributedScanAllAndFilterCursor 2023-01-17 07:17:53 +01:00
János Benjamin Antal
b91b16de96 Fix Interpreter::PullMultiple for queries that return some values 2023-01-17 07:06:25 +01:00
gvolfing
cfdc728d64 Merge branch 'project-pineapples' into T1165-MG-add-property-based-high-level-query-test 2023-01-16 16:26:33 +01:00
gvolfing
40835b8c9c General clean-up 2023-01-16 14:54:20 +01:00
gvolfing
d1fe73c987 Conform simulation to test GetProperties correctly 2023-01-16 14:05:13 +01:00
gvolfing
3257d46d18 Remove assertion 2023-01-16 13:44:02 +01:00
gvolfing
9be5ee1ae9 Make GetProperties return the PrimaryKeys as well, when queried for all properties like ScanAll 2023-01-16 13:26:33 +01:00
gvolfing
fdd89e0e81 Replace ScanVertices with GetProperties request, in the case of ScanAllByPrimaryKey operator 2023-01-16 11:27:41 +01:00
János Benjamin Antal
82203fa1ca Merge branch 'T1189-MG-implement-create-node-cursor-mf' into T1190-MG-Implement-ScanAll-and-ScanAllByLabel-with-MultiFrame_2 2023-01-16 10:44:58 +01:00
János Benjamin Antal
775e950dba Update unit tests to test the new logic 2023-01-16 10:16:12 +01:00
János Benjamin Antal
920ad277a5 Add assertion about primary label 2023-01-16 09:03:35 +01:00
János Benjamin Antal
392f6e2b73 Reduce the number of node infos to a maximum of one 2023-01-16 08:57:23 +01:00
János Benjamin Antal
e40f7f507b Fix pull logic for multiframe 2023-01-16 08:40:43 +01:00
János Benjamin Antal
c139856b2a Fix unit tests 2023-01-15 18:52:36 +01:00
János Benjamin Antal
b30137ab7a Improve unit tests to catch bug 2023-01-15 18:39:58 +01:00
János Benjamin Antal
b2b9b1d5cb Eliminate warnings about deprecated methods 2023-01-15 18:25:48 +01:00
János Benjamin Antal
ace1eb401f Make unit tests compile with new gtest version 2023-01-15 18:25:32 +01:00
gvolfing
668f7857b1 Pass the correct expression to the operator 2023-01-13 12:38:50 +01:00
gvolfing
d22c962af4 Turn RequestRouter into an interface again 2023-01-12 21:25:40 +01:00
gvolfing
4c25a4dfbd Remove unnecessary comments 2023-01-12 20:17:35 +01:00
gvolfing
61d84bd622 Set the type of the frame size to size_t from int64_t 2023-01-12 16:58:09 +01:00
gvolfing
3b06db5a02 Clang-tidy 2023-01-12 16:13:27 +01:00
gvolfing
afde0c6926 Remove outcommented code, conform clang-tidy 2023-01-12 15:45:14 +01:00
gvolfing
41bb988fe9 Fix failing benchmark tests and implement cursor
The benchmarking tests were failing because of the incorrect
implementation of the ScanAllByPrimaryKeyCursor. The previous
implementation caused the currently allocateable 1m edgeids to run out
very quickly, causing the the tests to freeze.
2023-01-12 14:14:59 +01:00
János Benjamin Antal
d7bd2cc754 Eliminate copying expands 2023-01-12 09:05:29 +01:00
János Benjamin Antal
599b133a55 Fix edge direction when creating edges 2023-01-12 09:04:18 +01:00
jeremy
c38a80ccd7 Merge branch 'T1190-MG-Implement-ScanAll-and-ScanAllByLabel-with-MultiFrame_2' into T1216-MG-implement-aggregate 2023-01-03 14:58:48 +01:00
jeremy
dee33b2072 Merge branch 'T1189-MG-implement-create-node-cursor-mf' into T1190-MG-Implement-ScanAll-and-ScanAllByLabel-with-MultiFrame_2 2023-01-03 14:58:21 +01:00
jeremy
c283c6e6ea Merge branch 'T1191-MG-implement-create-expand-with-multiframe' into T1189-MG-implement-create-node-cursor-mf 2023-01-03 14:57:02 +01:00
Jeremy B
65113bc55b
Merge branch 'project-pineapples' into T1191-MG-implement-create-expand-with-multiframe 2023-01-03 14:55:56 +01:00
gvolfing
32ea124d4b Merge branch 'project-pineapples' into T1167-MG-create-scanbyprimarykey-operator 2023-01-02 13:37:07 +01:00
gvolfing
68175bc97c Init basic cursor class
Conform clang-tidy and modify PullMultiple behavior
2023-01-02 13:05:44 +01:00
jeremy
9589dd97b6 Impl and correct aggregate 2022-12-30 16:21:41 +01:00
Jure Bajic
41f5c00f5f
Merge pull request #680 from memgraph/T1185-MG-replace-skip-list
- Replace `SkipList` with `std::map` as vertex container
- Replace `SkipList` with `std::map` as edge container
- Replace `SkipList` with `std::set` as `LabelPropertyIndex` container
- Replace `SkipList` with `std::set` as `LabelIndex` container
- Remove `KeyStore` and `LexicographiOrderedVertex`
2022-12-20 16:36:55 +01:00
jbajic
e82955895a Leave a TODO 2022-12-20 16:14:01 +01:00
jeremy
751c27f792 Get ride of attribute has_valid_frames_ 2022-12-20 10:12:50 +01:00
jeremy
7e217e94b3 Tests: CreateMultiFrame create invalid frames
Test uses real "once" instead of mocked version
2022-12-19 15:44:01 +01:00
gvolfing
1ebde8be74 Merge branch 'project-pineapples' into T1165-MG-add-property-based-high-level-query-test 2022-12-15 17:21:39 +01:00
gvolfing
32231fe49a Move the implementation of AllocateInitialEdgeIds into the child class 2022-12-15 17:10:27 +01:00
gvolfing
fa39c6740b Apply review comments 2022-12-15 17:02:01 +01:00
jeremy
1aa40e5e3f Add const to method 2022-12-15 16:24:45 +01:00
gvolfing
ae57fa3199
Apply suggestions from code review
Co-authored-by: Kostas Kyrimis  <kostaskyrim@gmail.com>
2022-12-15 15:25:46 +01:00
jbajic
a0e0791aa1 Address review comments 2022-12-15 14:52:13 +01:00
jeremy
311994a36d Impl of version more memory friendly 2022-12-15 14:40:20 +01:00
jeremy
54ce79baa0 Add empty line 2022-12-15 14:40:20 +01:00
jeremy
83306d21de Revert changes 2022-12-15 14:40:20 +01:00
jeremy
ac16348fff Remove unused variable 2022-12-15 14:40:20 +01:00
jeremy
af812d1311 Implement scanAll MultiFrame version 2022-12-15 14:40:20 +01:00
gvolfing
f36b96744c Apply post-merge fixes 2022-12-15 11:04:20 +01:00
gvolfing
14000d727f Merge branch 'project-pineapples' into T1167-MG-create-scanbyprimarykey-operator 2022-12-15 10:26:36 +01:00
Kostas Kyrimis
a9eca651df Address GH comments and fix a bug in ValidFramesModifier postincrement 2022-12-14 18:26:40 +02:00
gvolfing
3604046f68 Implement cypher query based simulation testing
Make the Interpreter be able to handle SimulatorTransport as well. This
includes introducing changes that make it possible to use the different
transport types in a semi-polymorphic way with the introduction of
factory methods in the RequestRouter. The reason for this solution is
that the classes that represent the different transport types have
member function templates, that we can not make virtual. This solution
seemed to be the least convoluted. In the testing itself now it is
possible to pass a set of cypher queried to the interpreter which would
run these queries against the interpreter and the individual shards that
are managed and started up by the MachineManager with the different
entities communicating over the simulated network.
2022-12-14 13:55:45 +01:00
jbajic
817433d342 Revert to approximate 2022-12-13 13:32:14 +01:00
jbajic
76dcf3ad0f Use std::set in LabelIndex 2022-12-13 13:21:52 +01:00
Kostas Kyrimis
04450dada7 Simplify tests 2022-12-12 19:23:40 +02:00
Kostas Kyrimis
8c2b2f4be2 Merge branch 'T1191-MG-implement-create-expand-with-multiframe' into T1189-MG-implement-create-node-cursor-mf 2022-12-12 19:17:43 +02:00
Kostas Kyrimis
70200919cd Merge branch 'project-pineapples' into T1191-MG-implement-create-expand-with-multiframe 2022-12-12 19:16:05 +02:00
Kostas Kyrimis
f04ed3c137 Simplify Mocks and test 2022-12-12 19:15:49 +02:00
Kostas Kyrimis
2e4e975102 Update GoogleTest lib to latest release version 1.12.1 2022-12-12 19:15:28 +02:00
Jure Bajic
c24c699c78
Merge branch 'project-pineapples' into T1185-MG-replace-skip-list 2022-12-12 08:01:47 -08:00
Tyler Neely
070225df28
Merge pull request #708 from memgraph/tyler_shard_struct
[project-pineapples <-] Make coordinator::Shard into a proper struct named ShardMetadata
2022-12-12 16:51:54 +01:00
Tyler Neely
1170e6762f Rename coordinator::Shard to coordinator::ShardMetadata to avoid conflation with storage::v3::Shard 2022-12-12 15:22:17 +00:00
Tyler Neely
d6f1505582 Make Shard into a proper struct that can contain additional metadata 2022-12-12 15:14:48 +00:00
jbajic
edb122cb33 Fix benchmark tests 2022-12-12 16:01:20 +01:00
jbajic
b0c4544287 Add asserts 2022-12-12 15:46:41 +01:00
Jure Bajic
0d856bee45
Merge branch 'project-pineapples' into T1185-MG-replace-skip-list 2022-12-12 05:45:09 -08:00
jbajic
c3e19498da Replace LabelPropertyIndex with std::set 2022-12-12 14:44:58 +01:00
Jure Bajic
03d994318e
Merge pull request #668 from memgraph/T1173-MG-benchmark-datastructures
- Add benchmarks comparing insert, remove, find and contains for std::map, std::set and utils::SkipList- 
- Create a python plot script
- Create a bash helper script for executing and plotting benchmarks
2022-12-12 04:11:55 -08:00
Jure Bajic
4888605972
Merge branch 'project-pineapples' into T1173-MG-benchmark-datastructures 2022-12-12 03:46:49 -08:00
jbajic
a90a2d86c9 Fix edge test 2022-12-12 12:46:13 +01:00
jbajic
0cf440519e Introduce VertexData 2022-12-12 12:39:49 +01:00
Jeremy B
9c41e702e6
Merge pull request #703 from memgraph/T1190-correct-ValidFramesConsumer-begin
[🍍 < T1190] Correct implementation of begin iterators
Correct implementation of MultiFrame iterators::begin. If no Frames are valid, we need to return end()
2022-12-09 12:56:31 +01:00
jeremy
50f76b926b Make MultiFrame pointer instead of ref inside impl of iterators 2022-12-09 12:20:01 +01:00
jeremy
0353262cc2 Correct impl of begin iterators 2022-12-09 12:10:48 +01:00
Kostas Kyrimis
4ed20f0247 Add prototype for CreateNode multiframe 2022-12-08 18:46:30 +02:00
Jure Bajic
50fb8fe3c7
Merge branch 'project-pineapples' into T1173-MG-benchmark-datastructures 2022-12-08 04:41:34 -08:00
Jure Bajic
32322d39b8
Merge branch 'project-pineapples' into T1185-MG-replace-skip-list 2022-12-08 04:40:02 -08:00
Kostas Kyrimis
89f42ef73e Add CreateExpand PullMultiple and prototype mocks for testing 2022-12-07 19:03:30 +02:00
Kostas Kyrimis
0d19d347f8
Merge pull request #695 from memgraph/T1163-remove-template-from-frame 2022-12-06 13:57:50 +02:00
Kostas Kyrimis
a5520f5eae
Merge branch 'project-pineapples' into T1163-remove-template-from-frame 2022-12-06 13:24:55 +02:00
Jure Bajic
eb154d1310
Merge branch 'project-pineapples' into T1173-MG-benchmark-datastructures 2022-12-06 03:08:26 -08:00
Tyler Neely
59c94c90e6
Merge pull request #690 from memgraph/tyler_full_async_request_router
[project-pineapples <-] full async request router
2022-12-06 11:50:59 +01:00
jbajic
a7f5212c6e Merge branch 'T1173-MG-benchmark-datastructures' of github.com:memgraph/memgraph into T1173-MG-benchmark-datastructures 2022-12-06 11:47:57 +01:00
jbajic
b4d6ca2233 Use bool as data 2022-12-06 11:47:48 +01:00
Tyler Neely
5d3d67cbd0 Rename unsent_requests to requests_to_be_sent in RequestRouter 2022-12-06 10:32:57 +00:00
Tyler Neely
675c2fe24a
Update src/io/rsm/rsm_client.hpp
Co-authored-by: Jeremy B <97525434+42jeremy@users.noreply.github.com>
2022-12-06 11:31:46 +01:00
Tyler Neely
b288f06cb7
Update src/io/rsm/rsm_client.hpp
Co-authored-by: Jeremy B <97525434+42jeremy@users.noreply.github.com>
2022-12-06 11:31:40 +01:00
Jure Bajic
5beb7c0966
Merge branch 'project-pineapples' into T1173-MG-benchmark-datastructures 2022-12-05 07:00:30 -08:00
jbajic
e0b7d7abeb Address review comments 2022-12-05 16:00:17 +01:00
Jure Bajic
9bab26fb10
Apply suggestions from code review
Co-authored-by: Kostas Kyrimis  <kostaskyrim@gmail.com>
2022-12-05 15:50:40 +01:00
Jure Bajic
c39f264684
Merge branch 'project-pineapples' into T1185-MG-replace-skip-list 2022-12-05 06:37:43 -08:00
jbajic
2488895362 Rename ApproximateVertexCount to VertexCount 2022-12-05 15:37:10 +01:00
jbajic
a20edf2b74 Fix bounds 2022-12-05 15:30:39 +01:00
Tyler Neely
25713405df
Update src/io/simulator/simulator_transport.hpp
Co-authored-by: gvolfing <107616712+gvolfing@users.noreply.github.com>
2022-12-05 15:26:29 +01:00
Tyler Neely
2a81ce5640
Update src/io/simulator/simulator.hpp
Co-authored-by: gvolfing <107616712+gvolfing@users.noreply.github.com>
2022-12-05 15:26:18 +01:00
Tyler Neely
747b8a21cd Fix bug with polling redirected requests 2022-12-05 14:20:06 +00:00
jbajic
65e9ceb779 Use multimap as index structure 2022-12-05 14:52:51 +01:00
Jeremy B
6338690b00
Merge branch 'project-pineapples' into T1163-remove-template-from-frame 2022-12-05 14:45:54 +01:00
Tyler Neely
ca3f748325 Apply clang-tidy feedback 2022-12-05 13:43:20 +00:00
Tyler Neely
1b458ebc41 Complete migration of GetProperties to new request style 2022-12-05 13:26:44 +00:00
Tyler Neely
6efe074313 Update GetProperties to use the correct style of request driving in the RequestRouter 2022-12-05 13:15:12 +00:00
Tyler Neely
3c72af0c10
Merge branch 'project-pineapples' into tyler_full_async_request_router 2022-12-05 14:09:23 +01:00
jeremy
6d3f9ab695 Removing template from class Frame 2022-12-05 14:07:53 +01:00
Kostas Kyrimis
e8240df29e
Merge pull request #694 from memgraph/T1188-MG-remove-unused-txt 2022-12-05 13:18:38 +02:00
Kostas Kyrimis
ec73ee666c Remove unused jba.txt 2022-12-05 13:02:33 +02:00
János Benjamin Antal
1d7f61dd0b
Merge pull request #676 from memgraph/T1163-MG-add-multiframe-and-some-operators
Implement MultiFrame with iterators
2022-12-05 11:51:11 +01:00
jeremy
7f9eceadb3 Remove un-needed frame modification in Once
This is not needed and would be incorrect with the optional
2022-12-05 11:19:47 +01:00
jeremy
f4428af210 Remove reference function on operator++() & 2022-12-05 11:05:47 +01:00
jeremy
9f5af97044 Merge branch 'T1163-MG-add-multiframe-and-some-operators' of github.com:memgraph/memgraph into T1163-MG-add-multiframe-and-some-operators 2022-12-05 10:38:06 +01:00
jeremy
c7c0234889 Add default constructor to iterators 2022-12-05 10:38:01 +01:00
Jeremy B
68ae729b07
Update src/query/v2/multiframe.hpp
Co-authored-by: János Benjamin Antal <antaljanosbenjamin@users.noreply.github.com>
2022-12-05 10:34:58 +01:00
Tyler Neely
2e33f8275b
Merge branch 'project-pineapples' into tyler_full_async_request_router 2022-12-02 19:05:15 +01:00
Tyler Neely
9a62503803 Tick the simulator forward from Notify::Await in a similar way that Future::Wait does 2022-12-02 18:04:38 +00:00
Jure Bajic
75b598d014
Merge branch 'project-pineapples' into T1173-MG-benchmark-datastructures 2022-12-02 07:28:39 -08:00
Kostas Kyrimis
98ae30d861 Merge branch 'project-pineapples' into T1163-MG-add-multiframe-and-some-operators 2022-12-02 15:12:02 +02:00
Kostas Kyrimis
6b8a5fd41d Make all variants of multiframe iterators model ForwardIterator concept properly 2022-12-02 15:11:51 +02:00
jbajic
4a3f950cf9 Fix indices 2022-12-02 10:17:26 +01:00
Kostas Kyrimis
146ed5756c
Merge pull request #666 from memgraph/T1172-MG-implement-get-properties-on-qe 2022-12-01 19:14:20 +02:00
Tyler Neely
438b519703 Apply clang-tidy feedback 2022-12-01 16:26:41 +00:00
Kostas Kyrimis
0ad702175f Fix expression evaluator mocked request router 2022-12-01 18:24:51 +02:00
jeremy
be3797e0a1 Remove unused param 2022-12-01 17:01:08 +01:00
Tyler Neely
cdde7ca670
Merge branch 'project-pineapples' into tyler_full_async_request_router 2022-12-01 17:00:09 +01:00
jeremy
d0e1d86df3 Remove unused param 2022-12-01 16:57:09 +01:00
Tyler Neely
366a4e2b9a Add support for efficiently executing multiple asynchronous requests out-of-order from the RequestRouter 2022-12-01 15:56:16 +00:00
Kostas Kyrimis
439eae3a72 Merge branch 'project-pineapples' into T1172-MG-implement-get-properties-on-qe 2022-12-01 17:42:01 +02:00
Kostas Kyrimis
c15e75b48c Remove old shard request manager header 2022-12-01 17:40:58 +02:00
Kostas Kyrimis
2120645d6a Remove dead code in request_router simulation test 2022-12-01 17:39:12 +02:00
Jeremy B
616b79ce6c
Merge branch 'project-pineapples' into T1163-MG-add-multiframe-and-some-operators 2022-12-01 16:36:59 +01:00
jeremy
13cabcaab5 Re-implement ValidFramesReader and iterators 2022-12-01 16:31:21 +01:00
Kostas Kyrimis
f1ea76a3d7 Merge branch 'T0919-MG-implement-get-properties-storage' into T1172-MG-implement-get-properties-on-qe 2022-12-01 17:19:00 +02:00
Kostas Kyrimis
18b3c1e8b1
Merge pull request #652 from memgraph/T0919-MG-implement-get-properties-storage
Implements GetProperties request handler on shard_rsm
2022-12-01 17:15:15 +02:00
jeremy
db45845619 format 2022-12-01 15:52:35 +01:00
Jeremy B
4bbf3c95ca
Update src/query/v2/multiframe.cpp
Co-authored-by: János Benjamin Antal <antaljanosbenjamin@users.noreply.github.com>
2022-12-01 15:49:17 +01:00
jeremy
e11856acf8 Merge branch 'T1163-MG-add-multiframe-and-some-operators' of github.com:memgraph/memgraph into T1163-MG-add-multiframe-and-some-operators 2022-12-01 15:19:40 +01:00
Jeremy B
5cd0d5137e
Update src/query/v2/multiframe.hpp
Co-authored-by: Kostas Kyrimis  <kostaskyrim@gmail.com>
2022-12-01 15:18:50 +01:00
jeremy
8eec8399a3 Rmove unneeded "this" 2022-12-01 15:04:51 +01:00
jeremy
d18d4f198e Merge branch 'T1163-MG-add-multiframe-and-some-operators' of github.com:memgraph/memgraph into T1163-MG-add-multiframe-and-some-operators 2022-12-01 14:55:45 +01:00
jeremy
29347c83e7 Remove unneeded tag 2022-12-01 14:54:26 +01:00
Jeremy B
452722f4f8
Update src/query/v2/multiframe.hpp
Co-authored-by: Kostas Kyrimis  <kostaskyrim@gmail.com>
2022-12-01 14:52:30 +01:00
jbajic
730bac6b74 Replace edges skiplist 2022-12-01 14:32:33 +01:00
Jeremy B
6c441b80ec
Update src/query/v2/multiframe.hpp
Co-authored-by: Kostas Kyrimis  <kostaskyrim@gmail.com>
2022-12-01 14:20:57 +01:00
Jeremy B
ee9ba1a7f8
Update src/query/v2/multiframe.hpp
Co-authored-by: Kostas Kyrimis  <kostaskyrim@gmail.com>
2022-12-01 14:20:26 +01:00
Jeremy B
d0c960e900
Update src/query/v2/multiframe.hpp
Co-authored-by: Kostas Kyrimis  <kostaskyrim@gmail.com>
2022-12-01 14:20:19 +01:00
jeremy
00fd69c170 Add statement to ignroe clang warning 2022-12-01 14:19:41 +01:00
Jeremy B
54907d2a1a
Update src/query/v2/multiframe.cpp
Co-authored-by: Kostas Kyrimis  <kostaskyrim@gmail.com>
2022-12-01 14:19:15 +01:00
jeremy
23bfd7f4fc Updated OnceCursor 2022-12-01 13:45:24 +01:00
Kostas Kyrimis
f8cbaaf362 Allow requests with zero properties 2022-12-01 14:41:21 +02:00
jbajic
4e7d8c3ba2 Replace LabelPropertyIndex skiplist with std::map 2022-12-01 12:06:01 +01:00
jeremy
e5d892683c Keep wraper as ptr instead of ref 2022-12-01 11:14:54 +01:00
jbajic
d4bdedd9e8 Fix GetProperty 2022-11-30 17:12:09 +01:00
jeremy
9f9a81455f Change type size_t->int64_t 2022-11-30 16:56:35 +01:00
jeremy
b0b8c0a5c9 Add noexcept to basic functions 2022-11-30 16:49:32 +01:00
Kostas Kyrimis
8af635c8d7 Fix clang-tidy warnings 2022-11-30 17:44:37 +02:00
jeremy
5e64b19745 Replace pull_count_->did_pull_ 2022-11-30 16:30:55 +01:00
jeremy
38f3a4cacb Use range for loop instead of idx based 2022-11-30 16:28:01 +01:00
Kostas Kyrimis
3f3d6c52a3 Merge branch 'project-pineapples' into T0919-MG-implement-get-properties-storage 2022-11-30 17:25:05 +02:00
Kostas Kyrimis
94ef57c459 Fix small bugs 2022-11-30 17:24:46 +02:00
jeremy
56556f7c2d Update incorrect de-referencing 2022-11-30 16:22:39 +01:00
jeremy
a2027fc6ac Remove default_frame 2022-11-30 16:09:06 +01:00
jeremy
deb31e4b77 Multiframe only expects size of frame instead of default frame 2022-11-30 16:00:24 +01:00
jeremy
072bc58b1e Reverse condition in while() + comment 2022-11-30 14:50:28 +01:00
jbajic
31f907cb53 Remove keystore 2022-11-30 14:40:12 +01:00
jbajic
9af20c295c Fix VerticesContainer being passed by value 2022-11-30 14:39:52 +01:00
jeremy
969b8f0da7 Remove un-necessary internal_ptr 2022-11-30 14:32:19 +01:00
jeremy
55008a2927 Rename func ResetAllFramesInvalid->MakeAllFramesInvalid 2022-11-30 14:17:01 +01:00
Jure Bajic
494f6ac25f
Merge branch 'project-pineapples' into T1185-MG-replace-skip-list 2022-11-30 05:12:43 -08:00
jeremy
02ca6734c1 Correct comment to follow common style 2022-11-30 14:11:38 +01:00
jbajic
976e6ff0a6 Replace skiplist with std::set 2022-11-30 14:11:22 +01:00
Kostas Kyrimis
9621532d3d Prototype suggested changes and polish PR 2022-11-30 14:59:00 +02:00
gvolfing
7e8b4921b4 Make query_v2_plan unit tests available again.
The query_v2_plan unit tests were needed in order to properly test if
the created logical plan of the new operator work properly. In order to
achieve this v2 versions of the several files were created, where the
old utilities were replaced with new ones, like query::v2 and
storage::v3. A new fake db accessor was also created in order to be able
to test the ScanAllByPrimaryKey operator.
2022-11-30 13:16:04 +01:00
jeremy
a10c254caa Add // NOLINTNEXTLINE to correct place 2022-11-30 11:43:16 +01:00
jeremy
9c0c0a2d1c Add clang warning suppress 2022-11-30 10:14:07 +01:00
Jure Bajic
0450163c5e
Merge branch 'project-pineapples' into T1173-MG-benchmark-datastructures 2022-11-30 00:17:13 -08:00
Jeremy B
62ee6f0e05
Merge branch 'project-pineapples' into T1163-MG-add-multiframe-and-some-operators 2022-11-29 17:34:16 +01:00
jeremy
00a4127e4e Remove incorrect = default 2022-11-29 17:15:52 +01:00
Tyler Neely
53040c6758
Merge pull request #651 from memgraph/T1157-MG-concurrent-RsmClient-requests
Support concurrent RsmClient requests
2022-11-29 17:11:25 +01:00
Tyler Neely
04124a1e9b Make AsyncRequestToken arguments const, reserve size in response vectors 2022-11-29 15:50:35 +00:00
jeremy
f107ef8aea Default destructor in header 2022-11-29 16:12:50 +01:00
Tyler Neely
b3605c9ab1 Fix typo in new simplified RequestRouter::CreateExpand method 2022-11-29 15:11:35 +00:00
Tyler Neely
7df3a743b9 Simplify and parallelize CreateExpand 2022-11-29 15:07:59 +00:00
Tyler Neely
f8215306e8 A variety of small code clean-ups, remove overloaded methods 2022-11-29 15:06:01 +00:00
Tyler Neely
8f08d986cb Make method names clear for RequestRouter requests, avoid unnecessary overloading 2022-11-29 14:47:32 +00:00
jeremy
cc3bcf1dc2 Clang tidy 2022-11-29 15:43:24 +01:00
Tyler Neely
9144d2dccd Remove bug-prone inverted ownership of ExecutionState as a consideration of operators 2022-11-29 14:30:59 +00:00
jeremy
9faa206f95 Clang tidy 2022-11-29 15:03:03 +01:00
jbajic
1b97bb0856 Remove redundant line from skiplist 2022-11-29 14:57:53 +01:00
Jure Bajic
f54701cc96
Merge branch 'project-pineapples' into T1173-MG-benchmark-datastructures 2022-11-29 05:55:29 -08:00
jbajic
1f98d33fa6 Remove b++ tree 2022-11-29 14:40:44 +01:00
jeremy
86f7b82bdc Clang tidy 2022-11-29 14:26:17 +01:00
jeremy
8c5edaaeb9 Update type 2022-11-29 14:07:15 +01:00
jbajic
632db4175a Revert storage changes 2022-11-29 14:04:12 +01:00
jeremy
3c0e38aacb Adapt comment 2022-11-29 13:54:26 +01:00
jeremy
aace5db8cc Adapt comment 2022-11-29 13:53:08 +01:00
jeremy
bc32a3d305 Adapt comment 2022-11-29 13:52:29 +01:00
jeremy
cead1bcb21 Rename ValidFramesInvalidator->ValidFramesConsumer 2022-11-29 13:49:37 +01:00
Tyler Neely
ec529da8d2 Address clang-tidy feedback 2022-11-29 12:28:19 +00:00
jbajic
ce5f1c2f17 Add memory runner for insert 2022-11-29 13:27:28 +01:00
Tyler Neely
1b77e029ca Use the Async methods under the hood for the blocking RsmClient::Send*Request methods for code-reuse 2022-11-29 11:33:49 +00:00
jeremy
16c30d61aa Merge branch 'T1163-MG-add-multiframe-and-some-operators' of github.com:memgraph/memgraph into T1163-MG-add-multiframe-and-some-operators 2022-11-29 12:30:11 +01:00
Jeremy B
b8de2c9c7a
Merge branch 'project-pineapples' into T1163-MG-add-multiframe-and-some-operators 2022-11-29 12:30:00 +01:00
jeremy
cd0aaeb5e9 Merge branch 'T1163-MG-add-multiframe-and-some-operators' of github.com:memgraph/memgraph into T1163-MG-add-multiframe-and-some-operators 2022-11-29 12:29:59 +01:00
Jeremy B
cf388d80fc
Update src/query/v2/plan/operator.lcp 2022-11-29 12:29:52 +01:00
Tyler Neely
a308ee501a Unify the driving of read and write requests in the RequestRouter 2022-11-29 11:25:29 +00:00
jbajic
884831ece5 Use pk 2022-11-29 11:56:01 +01:00
Tyler Neely
aa7d362296 Merge branch 'project-pineapples' of github.com:memgraph/memgraph into T1157-MG-concurrent-RsmClient-requests 2022-11-29 10:13:42 +00:00
jeremy
e946eb50d2 Add version ValidFramesModifier to distinguish between reading-only and reading+modifying 2022-11-29 11:05:11 +01:00
jeremy
8f19ce88d9 Rename ItOnNonConstInvalidFrames->InvalidFramesPopulator 2022-11-29 10:40:52 +01:00
jeremy
7c37ed2313 Rename ItOnNonConstValidFrames -> ValidFramesInvalidator 2022-11-29 10:38:01 +01:00
jeremy
11119e5406 Rename ItOnConstValidFrames->ValidFramesReader 2022-11-29 10:32:52 +01:00
Tyler Neely
7d52eedb21
Merge pull request #674 from memgraph/tyler_rename_ShardRequestManager_to_RequestRouter
[project-pineapples <- ] Rename ShardRequestManager to RequestRouter
2022-11-29 10:29:42 +01:00
Tyler Neely
4f18fa7431 Fix LCP that broke with invisible merge conflict 2022-11-29 09:07:18 +00:00
Tyler Neely
9f10c3ea06 Merge branch 'project-pineapples' of github.com:memgraph/memgraph into tyler_rename_ShardRequestManager_to_RequestRouter 2022-11-29 08:57:28 +00:00
Jeremy B
18b3550dbe
Merge branch 'project-pineapples' into T1163-MG-add-multiframe-and-some-operators 2022-11-28 17:34:07 +01:00
jeremy
b244c4d6ee Impl of Multiframe and iterators 2022-11-28 17:32:29 +01:00
Kostas Kyrimis
4dc639a05a
Merge pull request #644 from memgraph/T1159-MG-Add-memgraph-functions
Add mg-functions lib, expression evaluator units and e2e tests
2022-11-28 18:18:58 +02:00
Kostas Kyrimis
50df0d4d53 Fix python imports and replace const char array with constexpr 2022-11-28 17:31:41 +02:00
jbajic
5f5d839f0c Update analyzer script 2022-11-28 16:28:36 +01:00
gvolfing
22e3164e60 Add missing license 2022-11-28 16:18:28 +01:00
Kostas Kyrimis
8fd7327fbd Address GH comments 2022-11-28 15:38:12 +02:00
jbajic
500691318a Add analyze script 2022-11-28 14:15:41 +01:00
Tyler Neely
9fc7f9dced Standardize RequestRouter variable names as request_router 2022-11-28 13:03:07 +00:00
Tyler Neely
82db1d4ad8 Rename ShardRequestManager to RequestRouter 2022-11-28 12:38:38 +00:00
Kostas Kyrimis
ed0b67dfdb Fix compilation issues of the merge 2022-11-28 13:38:12 +02:00
Kostas Kyrimis
0f34c49e21 Merge branch 'project-pineapples' into T1159-MG-Add-memgraph-functions 2022-11-28 13:18:51 +02:00
Tyler Neely
f4d0c7769e
Merge pull request #670 from memgraph/tyler_set_proper_namespace_for_ShardRequestManager
Change the namespace of ShardRequestManager to query::v2 instead of msgs
2022-11-28 11:31:09 +01:00
Tyler Neely
6d3c04bd61 Address clang-tidy feedback 2022-11-28 10:09:59 +00:00
Tyler Neely
a8dc6fd41e Change the namespace of ShardRequestManager to query::v2 instead of msgs 2022-11-28 09:43:56 +00:00
Tyler Neely
de84d4d6ea Merge branch 'project-pineapples' of github.com:memgraph/memgraph into T1157-MG-concurrent-RsmClient-requests 2022-11-28 08:53:21 +00:00
jbajic
36a7abb170 Add remove benchmark 2022-11-27 22:50:36 +01:00
jbajic
41b06a0a37 Extract common functionalities 2022-11-27 22:28:23 +01:00
jbajic
cceab46a7c Add find benchmark 2022-11-27 21:59:18 +01:00
jbajic
243fa5e4b2 Add insert benchmark 2022-11-27 21:21:55 +01:00
Kostas Kyrimis
6f4996de0e Fix broken merge and address GH comments 2022-11-25 16:20:38 +02:00
Kostas Kyrimis
01d5953bb6 Merge branch 'project-pineapples' into T1159-MG-Add-memgraph-functions 2022-11-25 15:47:44 +02:00
János Benjamin Antal
6b64fd5ce5
Merge pull request #664 from memgraph/MG-implicit-fallthrough-as-error
Make implicit-fallthrough a compilation error
2022-11-24 19:32:59 +01:00
jbajic
c5138c8d58 Add b+ tree 2022-11-24 17:45:43 +01:00
jbajic
8fe1b8d7fc Add std::map and skiplist benchamrk 2022-11-24 17:44:41 +01:00
Kostas Kyrimis
7a3caa320c WiP 2022-11-24 15:29:51 +02:00
Kostas Kyrimis
07a8ac0db8 Merge branch 'project-pineapples' into T0919-MG-implement-get-properties-storage 2022-11-24 15:29:05 +02:00
János Benjamin Antal
3b798ab313
Merge branch 'project-pineapples' into MG-implicit-fallthrough-as-error 2022-11-24 11:16:57 +01:00
János Benjamin Antal
d44c1836c7
Merge pull request #649 from memgraph/T0995-MG-implement-top-error-handling-storage
Implement top level error handling in storage
2022-11-24 11:14:59 +01:00
János Benjamin Antal
c4327cfb00 Make implicit-fallthrough a compilation error 2022-11-24 11:13:55 +01:00
gvolfing
a65ea4fe01 Conform clang-tidy 2022-11-24 09:37:47 +01:00
jbajic
d820d0a9e5 Fix clang tidy errors 2022-11-23 22:29:03 +01:00
jbajic
7aa68164f0 Merge branch 'project-pineapples' into T0995-MG-implement-top-error-handling-storage 2022-11-23 21:55:08 +01:00
jbajic
d6b444c38b Log transaction id 2022-11-23 18:02:00 +01:00
János Benjamin Antal
c647134916
Merge pull request #611 from memgraph/T1083-MG-limit-and-order-expand-one_v3
Add Limit and OrderBy to ExpandOne
2022-11-23 17:37:32 +01:00
jbajic
b3eec92525 Merge branch 'project-pineapples' into T0995-MG-implement-top-error-handling-storage 2022-11-23 16:11:58 +01:00
gvolfing
1b73ca4860 Remove ScanAllById operator 2022-11-23 16:04:09 +01:00
jbajic
ab5fc05fd7 Address review comments 2022-11-23 15:32:28 +01:00
gvolfing
814c5eb397 Add the possibility of primary-key based indexing
Add new possibility to base our indexing solution on. Add
ScanAllOperator that represents the semantics and integrate its use
through index_lookup.
2022-11-23 15:15:26 +01:00
jeremy
e77843f2ec Merge branch 'project-pineapples' into T1083-MG-limit-and-order-expand-one_v3 2022-11-23 14:47:29 +01:00
János Benjamin Antal
40e145e7d0
Merge pull request #646 from memgraph/T1116-MG-update-delete-vertex-labels
Update/Delete vertex labels
2022-11-23 14:35:11 +01:00
Kostas Kyrimis
2ff81ebf04 Address missed GH comments && fix broken merge 2022-11-23 15:16:14 +02:00
jeremy
c3c68cc2ce Merge branch 'T1083-MG-limit-and-order-expand-one_v3' of github.com:memgraph/memgraph into T1083-MG-limit-and-order-expand-one_v3 2022-11-23 13:55:02 +01:00
jeremy
6d86801be0 Extract logic to convert ORderingDirection to Ordering 2022-11-23 13:54:54 +01:00
Kostas Kyrimis
407418e8f5 Merge branch 'project-pineapples' into T1159-MG-Add-memgraph-functions 2022-11-23 14:53:53 +02:00
Kostas Kyrimis
6d544e4fc0 Address GH comments 2022-11-23 14:51:38 +02:00
Jeremy B
56e2ad4546
Update src/storage/v3/shard_rsm.cpp
Co-authored-by: János Benjamin Antal <antaljanosbenjamin@users.noreply.github.com>
2022-11-23 13:24:35 +01:00
jbajic
9b19dd57d3 Remove storage include 2022-11-23 13:19:25 +01:00
Tyler Neely
5045323b1d Merge branch 'project-pineapples' of github.com:memgraph/memgraph into T1157-MG-concurrent-RsmClient-requests 2022-11-23 11:31:14 +00:00
Jeremy B
aa146d28f8
Merge branch 'project-pineapples' into T1083-MG-limit-and-order-expand-one_v3 2022-11-23 12:26:58 +01:00
János Benjamin Antal
1cd10ab409
Merge branch 'project-pineapples' into T1116-MG-update-delete-vertex-labels 2022-11-23 12:15:34 +01:00
János Benjamin Antal
36390608aa
Merge pull request #648 from memgraph/MG-remove-unused-storage-files
Remove `storage.hpp` and `storage.cpp` files
2022-11-23 12:15:09 +01:00
Jure Bajic
e565fc6e3a
Merge branch 'project-pineapples' into T0995-MG-implement-top-error-handling-storage 2022-11-23 02:33:07 -08:00
jbajic
ce0f1a09f7 Remove unused methods from accessors 2022-11-23 11:32:53 +01:00
jbajic
accf015dcf Fix clang-tidy errors 2022-11-23 11:03:17 +01:00
Jeremy B
84800cff76
Merge branch 'project-pineapples' into T1083-MG-limit-and-order-expand-one_v3 2022-11-23 10:57:05 +01:00
jbajic
9fade5ebac Fix tests 2022-11-23 10:37:33 +01:00
Jure Bajic
75606dfeb0
Merge branch 'project-pineapples' into T1116-MG-update-delete-vertex-labels 2022-11-23 00:34:27 -08:00
János Benjamin Antal
9c36d8928a
Merge branch 'project-pineapples' into MG-remove-unused-storage-files 2022-11-23 09:19:14 +01:00
János Benjamin Antal
d926c4c4a4
Merge pull request #657 from memgraph/tyler_simulator_determinism_test
Improve simulator determinism
2022-11-23 09:14:25 +01:00
Kostas Kyrimis
9ec72bd969 Add GetProperties shard handler and tests 2022-11-22 19:44:22 +02:00
jeremy
85034ddcbe Rename variable un function def 2022-11-22 17:29:02 +01:00
Tyler Neely
ea533f43fc Print out the simulator seed when we exceed the configured abort_time 2022-11-22 16:06:57 +00:00
Tyler Neely
e0086e5666 Use spdlog::trace instead of info for simulator-related messages 2022-11-22 16:06:35 +00:00
Tyler Neely
c0a103e851 Do not advance the clock with every message, as this prevents messages of a certain request depth from ever completing 2022-11-22 16:00:06 +00:00
jeremy
3a171376d7 OrderBy in Expand has two members to differ vertices Vs edges 2022-11-22 16:47:25 +01:00
jbajic
cd7b33f23f use comparison between ErrorCode and ShardError 2022-11-22 16:22:42 +01:00
jbajic
d82cfb349e Use ShardResult 2022-11-22 15:00:32 +01:00
jeremy
6801d6ff09 Remove duplicate using statement 2022-11-22 14:49:54 +01:00
jbajic
f1e360469a Expand SHARD_ERROR with fmt format 2022-11-22 14:44:28 +01:00
jeremy
307cce9e21 Remove unused struct 2022-11-22 14:23:24 +01:00
jeremy
662fa2e6d2 Remove uneeded using statement 2022-11-22 14:22:19 +01:00
jeremy
c0cb53e156 Replace if by switch 2022-11-22 14:20:22 +01:00
jeremy
742017548f Merge branch 'T1083-MG-limit-and-order-expand-one_v3' of github.com:memgraph/memgraph into T1083-MG-limit-and-order-expand-one_v3 2022-11-22 14:11:48 +01:00
Jeremy B
bbbd722eeb
Update src/storage/v3/request_helper.hpp
Co-authored-by: Jure Bajic <jure.bajic@memgraph.com>
2022-11-22 14:11:29 +01:00
jeremy
f463d9f59f Merge branch 'T1083-MG-limit-and-order-expand-one_v3' of github.com:memgraph/memgraph into T1083-MG-limit-and-order-expand-one_v3 2022-11-22 14:10:07 +01:00
Jeremy B
a6f3937692
Update src/storage/v3/request_helper.hpp
Co-authored-by: Jure Bajic <jure.bajic@memgraph.com>
2022-11-22 14:10:02 +01:00
jbajic
5717dfb165 Add ErrorCode to SHARD_ERROR macro 2022-11-22 14:05:04 +01:00
jeremy
1101c2444c Make ConvertPropertyMap expect ref and not rvalue 2022-11-22 14:01:16 +01:00
jbajic
2a6dc7bb93 Remove success 2022-11-22 13:49:12 +01:00
jbajic
d080e260e6 Remove redundant code 2022-11-22 13:27:02 +01:00
jbajic
37f5fb29ea Introduce BoltResult 2022-11-22 13:15:32 +01:00
Tyler Neely
c8c72de6ac Use duration_cast to ensure that we are retrieving milliseconds inside SimulatorStats::operator<< 2022-11-22 11:30:24 +00:00
Tyler Neely
0b19b62b12 Set the abort_time for raft tests to 1 simulated hour 2022-11-22 11:25:24 +00:00
jbajic
bffef1a653 Use experimental source location 2022-11-22 11:13:23 +01:00
Tyler Neely
66f39f2681 Add elapsed time to the SimulatorStats 2022-11-22 08:55:48 +00:00
Tyler Neely
081c3e5bed Capitalize unsigned integer literal 2022-11-21 13:16:35 +00:00
jeremy
ce8bc522d0 Clang warning 2022-11-21 12:23:04 +01:00
Tyler Neely
0f66ae31dd Use explicit unsigned integer in right shift operation 2022-11-21 11:11:39 +00:00
jeremy
e9e42a0614 add * token to variable declaration 2022-11-21 11:50:56 +01:00
jeremy
1a67dec302 Update test to use OrderBy and Limit on Expand 2022-11-21 11:45:35 +01:00
jeremy
4eb673c7b9 Add const to variable 2022-11-21 11:31:22 +01:00
Tyler Neely
71dcba331e Increment simulator time by up to 30ms in ticks 2022-11-21 10:10:45 +00:00
Tyler Neely
e43f4e2181 Sort simulator in_flight_ messages based on a stable sort of the sender address 2022-11-21 10:08:42 +00:00
jeremy
b2050d55ce Add const 2022-11-21 10:59:38 +01:00
jeremy
2fc1aeb087 Remove unneeded using statements 2022-11-21 10:59:16 +01:00
jeremy
86e5b44c1c Remove Shard::Accessor (unsued) 2022-11-21 10:42:15 +01:00
jeremy
1b0db5289d OrderByVertices only keeps OrderBy expression which corresponds to Vertices 2022-11-21 10:41:08 +01:00
Tyler Neely
45badbe21f Use unsigned integer literals for bit shifting in the NewShardUuid function 2022-11-18 17:22:50 +00:00
Tyler Neely
04420a84c7 Fix incorrect usage of IncrementServerCountAndWaitForQuiescentState in the shard_rsm.cpp simulation test 2022-11-18 16:54:38 +00:00
Tyler Neely
ce45a548c7 Significantly improve the determinism of the coordinator, UUID generation, the machine manager, the shard manager, and the cluster property test 2022-11-18 16:42:18 +00:00
Tyler Neely
3ad8489735 Run raft sim with random seeds over time, but allow a seed to be easily replayed using the RunWithSeed function 2022-11-18 10:34:21 +00:00
Tyler Neely
7115a7e75b Apply clang-tidy fixes 2022-11-18 10:24:19 +00:00
Tyler Neely
0f32407bdc Add compare header to histogram collector header 2022-11-18 10:20:45 +00:00
Tyler Neely
a37e7e4aff Add assert to ensure TestAddress will not be higher than the uchar max 2022-11-18 10:19:55 +00:00
Tyler Neely
6b9a617df0 Streamline simulator tick condition varible notification. Advance time more aggressively 2022-11-18 09:20:15 +00:00
Tyler Neely
923325b8fa Progress the simulator clock even when there are messages to deliver 2022-11-18 09:04:29 +00:00
Tyler Neely
f6017697d6 Make raft tests fully deterministic for rng_seeds between 0 and 500 at 1% message loss 2022-11-17 21:32:55 +00:00
Tyler Neely
9c3d683942 Explicitly join test threads before collecting test stats 2022-11-17 21:28:17 +00:00
Tyler Neely
cf73ed529d Block messages from being delivered upon ShutDown 2022-11-17 21:27:48 +00:00
Tyler Neely
262df5c6a2 Avoid unordered_map in Raft code for more determinism 2022-11-17 21:24:13 +00:00
Tyler Neely
098084314e Make TestAddress deterministically sortable 2022-11-17 21:22:41 +00:00
Tyler Neely
12880fc71a Don't advance the simulator handle from server threads themselves 2022-11-17 18:27:12 +00:00
Tyler Neely
80d6776210 Improve simulator determinism 2022-11-17 17:36:46 +00:00
jeremy
3840c14846 Remove nocommit comment 2022-11-17 14:33:08 +01:00
jeremy
fe03f5b206 Update include to full path
add auto To variable declaration
2022-11-17 14:11:25 +01:00
jeremy
5f88e75571 Remove double declaration 2022-11-17 14:10:49 +01:00
jeremy
27495ef43a Merge branch 'T1083-MG-limit-and-order-expand-one_v3' of github.com:memgraph/memgraph into T1083-MG-limit-and-order-expand-one_v3 2022-11-17 13:34:08 +01:00
jeremy
a499bf6dfd Rename variable 2022-11-17 13:33:11 +01:00
Jeremy B
49652d0a61
Update src/storage/v3/shard_rsm.cpp
Co-authored-by: János Benjamin Antal <antaljanosbenjamin@users.noreply.github.com>
2022-11-17 13:32:48 +01:00
jeremy
6e44c2295d Remove template from OrderByEdges + move to impl file 2022-11-17 13:30:39 +01:00
jeremy
d482f7da90 Merge branch 'T1083-MG-limit-and-order-expand-one_v3' of github.com:memgraph/memgraph into T1083-MG-limit-and-order-expand-one_v3 2022-11-17 12:59:37 +01:00
Jeremy B
77ab07d991
Update src/storage/v3/request_helper.hpp
Co-authored-by: János Benjamin Antal <antaljanosbenjamin@users.noreply.github.com>
2022-11-17 12:58:39 +01:00
jeremy
38b0b308ce Remove unnecessary reserve 2022-11-17 12:58:28 +01:00
jeremy
2f55491271 use std::SameAsAnyOf i.o. is_sale_v 2022-11-17 12:45:15 +01:00
Jeremy B
68e51e73ba
Update src/storage/v3/shard_rsm.cpp
Co-authored-by: János Benjamin Antal <antaljanosbenjamin@users.noreply.github.com>
2022-11-17 12:42:36 +01:00
jeremy
a17a6aea5a rename variable vertice->vertex 2022-11-17 12:27:12 +01:00
jbajic
ec4804b72a Move ErrorCode to common 2022-11-17 11:03:03 +01:00
jbajic
e98ef634de Ignore warning 2022-11-16 21:03:36 +01:00
jeremy
c4e22ffde3 Remove unnecessary tag 2022-11-16 18:51:57 +01:00
jeremy
b3ef0ccd71 Moving function from shard_rsm to helper files 2022-11-16 18:50:22 +01:00
jbajic
15fc3c0834 Fix error 2022-11-16 18:12:25 +01:00
jbajic
9261fabe60 Adapt for unfound object 2022-11-16 17:41:43 +01:00
jbajic
7bdcd8f9f4 Add shard_error in response 2022-11-16 14:48:06 +01:00
jeremy
bd11225d23 Use ref instead of optional
Use ref i.o. ptr
Rename variable for clarity
2022-11-16 14:14:35 +01:00
jbajic
1f159b5a80 Fix scheme test 2022-11-16 08:58:06 +01:00
jbajic
14ddd7254d Fix communication and memgraph 2022-11-15 19:37:24 +01:00
jbajic
8629ee5ebc Fix query v2 2022-11-15 19:30:34 +01:00
jbajic
1c94c59a24 Fix tests 2022-11-15 19:29:56 +01:00
Tyler Neely
631d18465b Allow the RsmClient to store multiple in-flight requests. Update the ShardRequestManager to use the new request tokens and refactor some bug-prone aspects of it 2022-11-15 17:52:38 +00:00
jbajic
3f97a13493 Revert TError to Error 2022-11-15 18:30:25 +01:00
jbajic
5656a24c96 Remove unused exception 2022-11-15 18:25:25 +01:00
jbajic
9c05910e68 Adapt storage to new erro handling 2022-11-15 18:25:12 +01:00
jbajic
3d66bbd988 Remove shard_operation_result 2022-11-15 18:24:40 +01:00
jbajic
07032887a4 Adapt mg-expr 2022-11-15 18:24:28 +01:00
Kostas Kyrimis
9e81fe791c Fix clang-tidy warnings, remove commented out code and add HasLabel
functions in ShardRequestManager to avoid throwing bad optional on
expression evaluator
2022-11-15 19:20:01 +02:00
jeremy
cca4e97bcf Remove un-needed argument from OrderByEdges 2022-11-15 13:37:43 +01:00
Kostas Kyrimis
213583f916 Add expression evaluator && awesome_memgraph_functions unit tests 2022-11-14 20:46:20 +02:00
Kostas Kyrimis
94bc671551 Add awesome memgraph functions e2e tests 2022-11-14 20:46:19 +02:00
Kostas Kyrimis
02ef954e51 Add mg-functions lib and integrated with SE and QE 2022-11-14 20:46:16 +02:00
jeremy
131d7f2a74 OrderByElements: no longer templated over vertice/edge types. For edges, we always need to have access to the corresponding vertex_accessor (ex of sorting expr needing both : "vertex.map[edge]")
ComputeExpression: made assert instead of if check
2022-11-14 18:21:03 +01:00
jbajic
618237cc96 Adapt schema validator 2022-11-14 14:25:20 +01:00
jbajic
ef755e466c Add ShardError 2022-11-14 14:15:29 +01:00
Jure Bajic
a9c5d40721
Update src/utils/template_utils.hpp
Co-authored-by: János Benjamin Antal <antaljanosbenjamin@users.noreply.github.com>
2022-11-14 10:32:18 +01:00
jbajic
b4c24f4506 Remvoe storage.hpp and storage.cpp files 2022-11-14 10:23:48 +01:00
jbajic
0462b8fc8f Address review comments 2022-11-14 09:15:03 +01:00
jbajic
6eabceca4a Add unit tests for UpdateVertex 2022-11-14 09:02:55 +01:00
jeremy
2045f54577 Correct merge issue 2022-11-09 21:36:41 +01:00
Jeremy B
18009c06b6
Merge branch 'project-pineapples' into T1083-MG-limit-and-order-expand-one_v3 2022-11-09 16:47:06 +01:00
Jure Bajic
db7b2aa59a
Merge branch 'project-pineapples' into T1116-MG-update-delete-vertex-labels 2022-11-09 07:24:26 -08:00
János Benjamin Antal
5c0e41ed44
Merge pull request #593 from memgraph/T1079-MG-add-simple-query-to-benchmark_v2
Add new dataset for mgbench
2022-11-09 16:21:56 +01:00
Jure Bajic
12e9c1a739
Merge branch 'project-pineapples' into T1116-MG-update-delete-vertex-labels 2022-11-09 07:08:04 -08:00
jbajic
a030419565 Move LogResultError to helper 2022-11-09 16:07:27 +01:00
jeremy
6df2db0d19 Remove comment force github workflow 2022-11-09 16:02:59 +01:00
jeremy
968584a8fc Add comment force github workflow 2022-11-09 16:02:25 +01:00
jbajic
8636788ab2 Fix typos 2022-11-09 16:00:25 +01:00
jbajic
691f6af36d Remove redundant code 2022-11-09 15:52:08 +01:00
jbajic
23f1536eac Add tests 2022-11-09 15:42:09 +01:00
jeremy
33add3ecd0 Force formatting 2022-11-09 15:38:51 +01:00
János Benjamin Antal
91b5092c71
Merge branch 'project-pineapples' into T1079-MG-add-simple-query-to-benchmark_v2 2022-11-09 13:05:30 +01:00
János Benjamin Antal
c0f576c187
Merge pull request #645 from memgraph/MG-add-python-checks-pineapples
Add python checks
2022-11-09 13:05:15 +01:00
jbajic
a2735c8953 Add label add/remove in UpdateVertex req 2022-11-09 12:10:46 +01:00
jbajic
51fb4a6e7a Remove empty line 2022-11-09 11:32:54 +01:00
jbajic
cba183898a Update actions versions 2022-11-09 10:40:46 +01:00
jbajic
5f5fe7eb29 Add python checks 2022-11-09 10:39:29 +01:00
jeremy
2087877df2 Add more checks in test 2022-11-09 08:48:51 +01:00
Jeremy B
efa4378fb4
Merge branch 'project-pineapples' into T1079-MG-add-simple-query-to-benchmark_v2 2022-11-09 08:26:25 +01:00
jeremy
cad0e80d00 Update test 2022-11-08 17:42:31 +01:00
János Benjamin Antal
3c07a5dc04
Merge pull request #639 from memgraph/T1145-MG-handle-insertions-of-vertices-that-already-exist-in-a-better-way
Return an error for insertions of vertices that already exist
2022-11-08 17:11:22 +01:00
Jeremy B
e442963466
Merge branch 'project-pineapples' into T1079-MG-add-simple-query-to-benchmark_v2 2022-11-08 17:00:30 +01:00
János Benjamin Antal
32fe4d94d9
Merge branch 'project-pineapples' into T1145-MG-handle-insertions-of-vertices-that-already-exist-in-a-better-way 2022-11-08 16:35:38 +01:00
János Benjamin Antal
94b66a4e81
Merge pull request #638 from memgraph/T1148-MG-fix-expand-one-source-vertex
Fix setting the primary key of source vertex in ExpandOne
2022-11-08 16:35:30 +01:00
jeremy
b2f3fab693 Remove comment 2022-11-08 15:06:27 +01:00
jeremy
d7ce7cea13 Merge branch 'project-pineapples' into T1083-MG-limit-and-order-expand-one_v3 2022-11-08 15:02:56 +01:00
jeremy
61b9457718 Remove split-files logic from test code 2022-11-08 11:35:54 +01:00
János Benjamin Antal
159b30ba5f
Merge branch 'project-pineapples' into T1148-MG-fix-expand-one-source-vertex 2022-11-08 08:20:25 +01:00
gvolfing
b67e5b9a6c Merge branch 'project-pineapples' into T1145-MG-handle-insertions-of-vertices-that-already-exist-in-a-better-way 2022-11-08 07:53:45 +01:00
gvolfing
79756ae6fb Modify unit test
Instead of Creating an exception that is would be only used in this
file, just assert the type of the error the double vertex insertion
operation should yield.
2022-11-08 07:31:01 +01:00
jeremy
a54bcb9819 Remove un-necessary rm in workflow 2022-11-07 17:32:09 +01:00
jeremy
c16f948de9 Delete cache folder before running benchmark test 2022-11-07 13:08:35 +01:00
jeremy
2a7ed1ad82 Add single e2e benchmark test 2022-11-07 12:55:08 +01:00
jeremy
5201db46d2 Add assert for split_file 2022-11-07 12:15:57 +01:00
gvolfing
91550128a5 Conform unit test with the new error-handling 2022-11-07 11:46:24 +01:00
jeremy
d5966101ad Merge branch 'T1079-MG-add-simple-query-to-benchmark_v2' of github.com:memgraph/memgraph into T1079-MG-add-simple-query-to-benchmark_v2 2022-11-07 10:55:48 +01:00
jbajic
88487e2513 Extract vertex creation from FillUpSourceVertexSecondaryLabels 2022-11-07 10:54:28 +01:00
jeremy
b10b1eb239 Correct shard configuration 2022-11-07 10:33:24 +01:00
jeremy
dca94f42bb Update key type in shard configuration 2022-11-07 10:14:49 +01:00
gvolfing
39b40ecf00 Extend the Error enum instead of a separate type
The error representing that a vertex is already inserted into the
skip-list was represented by the struct AlreadyInseertedElement. Instead
of using that struct, extend the memgraph::storage::v3::Error scoped
enum and use that to represent the double-insertion error.
2022-11-07 10:00:34 +01:00
jeremy
baacc52a65 Add support for split file configuration 2022-11-07 09:54:14 +01:00
jeremy
5273d319e2 Add split file for access control 2022-11-07 09:53:29 +01:00
Jeremy B
23838b50ea
Merge branch 'project-pineapples' into T1079-MG-add-simple-query-to-benchmark_v2 2022-11-05 14:46:33 +01:00
Tyler Neely
d85fb94bc7
Merge pull request #634 from memgraph/T1122-MG-ShardManager-ThreadPool
Add shard manager thread pool
2022-11-04 16:41:53 +01:00
jbajic
4d2036249e Fix e2e test 2022-11-04 16:37:35 +01:00
Jure Bajic
d23643d4ff
Merge branch 'project-pineapples' into T1148-MG-fix-expand-one-source-vertex 2022-11-04 16:19:55 +01:00
jbajic
e52ce1e363 Add expandone test 2022-11-04 16:18:18 +01:00
Tyler Neely
1abfe28806 Correctly use a moved transport interface while constructing CoordinatorWorker 2022-11-04 15:11:32 +00:00
jeremy
d17970f6d9 Update default value for --datasets 2022-11-04 16:04:45 +01:00
jeremy
9e72c7cb54 Add extra safe check to in case we call on dataset.py 2022-11-04 15:57:26 +01:00
Tyler Neely
528e30a9be Avoid warning for not using captured this. Use std::move instead of forward where appropriate 2022-11-04 14:13:15 +00:00
Tyler Neely
bb7c7f7627 Make popped messages non-const to allow for RVO 2022-11-04 14:07:49 +00:00
jbajic
bab5e1386a Address review comments 2022-11-04 15:05:55 +01:00
gvolfing
3d954e7abc Restructure SchemaResult type and uts usage
Rename SchemaResult to ShardOperationResult move it into a separate
header and add a new type to the underlying variant that indicates that
the vertex, the user would like to insert into the skip-list already
exist.
2022-11-04 15:04:25 +01:00
Tyler Neely
fa5c9a2568 Make items popped from *worker::Queue const. Use std::move instead of std::forward in one place 2022-11-04 11:33:02 +00:00
Tyler Neely
c745f8c877 Fix build after breaking code suggestion 2022-11-04 11:32:17 +00:00
Tyler Neely
486231b1b9
Update src/machine_manager/machine_manager.hpp
Co-authored-by: Kostas Kyrimis  <kostaskyrim@gmail.com>
2022-11-04 12:28:01 +01:00
Tyler Neely
43ad5855c4
Update src/machine_manager/machine_manager.hpp
Co-authored-by: Kostas Kyrimis  <kostaskyrim@gmail.com>
2022-11-04 12:27:40 +01:00
Tyler Neely
24864ff7d2
Update src/coordinator/coordinator_worker.hpp
Co-authored-by: Kostas Kyrimis  <kostaskyrim@gmail.com>
2022-11-04 12:17:57 +01:00
Tyler Neely
8598f6edf4 Fix a race condition that happens when logging from a detached thread in the cluster property test. Improve the ShardManager dtor and log statements 2022-11-04 11:14:39 +00:00
jeremy
e41073bc2c Update script to need single argument for local dataset 2022-11-04 09:17:09 +01:00
jeremy
14e3e72565 Correct badly written range 2022-11-04 08:52:47 +01:00
gvolfing
1cee7ecb8a Make ShardRsm aware of trying to write the same vertex into the skip-list 2022-11-04 08:12:37 +01:00
Jeremy B
690a389563
Merge branch 'project-pineapples' into T1079-MG-add-simple-query-to-benchmark_v2 2022-11-03 17:43:47 +01:00
Tyler Neely
9203616283
Merge branch 'project-pineapples' into T1122-MG-ShardManager-ThreadPool 2022-11-03 17:01:07 +01:00
Tyler Neely
d198819bf1
Merge pull request #637 from memgraph/tyler_remove_storage_v2_durability_test
Remove unit tests for storage v2
2022-11-03 17:00:46 +01:00
jbajic
3a8f01af79 Fix expand one source vertex pk setting 2022-11-03 16:29:28 +01:00
Tyler Neely
7e6ec8bb26 Capture this instead of all references in scope for call to std::visit to route CoordinatorWorker messages 2022-11-03 14:38:16 +00:00
Tyler Neely
6138d76690 Remove unit tests for storage v2 2022-11-03 14:21:17 +00:00
Tyler Neely
25fdb1a1f0 Make the high_density_shard_create_scan test run much faster 2022-11-03 13:48:11 +00:00
Tyler Neely
2de1d6c359 Fix UB due to integer overflow 2022-11-03 13:27:45 +00:00
Tyler Neely
0364311dd0 Log latency histograms in the high_density_shard_create_scan test 2022-11-03 12:44:47 +00:00
Tyler Neely
9235515dab Reduce high_density_shard_create_scan shard+thread combinations. Log Raft write request demangled names 2022-11-03 12:38:12 +00:00
Tyler Neely
b83fb287ad Apply feedback from clang-tidy 2022-11-03 11:00:07 +00:00
jeremy
70dc19dfdb Mgbench: apply filtering on results from client 2022-11-03 11:03:21 +01:00
Tyler Neely
b685a21171 Merge branch 'project-pineapples' of github.com:memgraph/memgraph into T1122-MG-ShardManager-ThreadPool 2022-11-03 09:42:28 +00:00
Tyler Neely
4db83b8159 Add a few safety checks to the new concurrent Queue structures and the promise maps in the transport layer 2022-11-03 09:41:28 +00:00
Tyler Neely
dd8dd4f6c4 Restructure responsibilities for assigning request ids to the transport handles. Simplify promise tracking to avoid replier addresses, enabling eventual direct server return (DSR) 2022-11-03 09:31:06 +00:00
Tyler Neely
fa1ddfea12 Fix a bug where the MachineManager's destructor became incorrect after being moved 2022-11-02 17:45:27 +00:00
Tyler Neely
a815ec9617 Handle Coordinator work on a separate thread, unblocking the MachineManager to route additional messages to Shards 2022-11-02 17:15:52 +00:00
Tyler Neely
78528bd609 Avoid the ShutDown explicit acknowledgement due to jthread already blocking on this 2022-11-02 17:12:21 +00:00
Tyler Neely
6239f4fc3e Simplify usage of PromiseKey in LocalTransportHandle to avoid replier address 2022-11-02 17:11:26 +00:00
Jeremy B
c5eb3ff2c0
Merge branch 'project-pineapples' into T1079-MG-add-simple-query-to-benchmark_v2 2022-11-02 17:01:08 +01:00
János Benjamin Antal
9f71ce0f78
Merge pull request #635 from memgraph/MG-expand-fixes 2022-11-02 15:55:49 +01:00
János Benjamin Antal
589dd36bf2 Make function parameter constant 2022-11-02 14:44:54 +01:00
János Benjamin Antal
57e7169203 Eliminate dangling reference from lambda 2022-11-02 14:22:01 +01:00
János Benjamin Antal
bb3b053375 Create edge in the correct direction 2022-11-02 14:22:01 +01:00
János Benjamin Antal
a96f489756 Count the number of requests per operator 2022-11-02 14:22:01 +01:00
jeremy
e909e7d2d8 Format 2022-11-02 14:18:04 +01:00
Jeremy B
2d91a7fd1e
Merge branch 'project-pineapples' into T1079-MG-add-simple-query-to-benchmark_v2 2022-11-02 14:14:24 +01:00
Jeremy B
1d18f1197f
Update tests/mgbench/dataset_creator.py
Co-authored-by: Jure Bajic <jure.bajic@memgraph.com>
2022-11-02 14:14:16 +01:00
Jeremy B
edeebf46ec
Update tests/mgbench/benchmark.py
Co-authored-by: Jure Bajic <jure.bajic@memgraph.com>
2022-11-02 14:13:58 +01:00
Jeremy B
1148fe9aad
Update tests/mgbench/datasets.py
Co-authored-by: Jure Bajic <jure.bajic@memgraph.com>
2022-11-02 14:13:40 +01:00
János Benjamin Antal
c92c795b1a
Merge pull request #632 from memgraph/MG-improve-parameters 2022-11-02 14:06:30 +01:00
János Benjamin Antal
0fee412f92
Merge branch 'project-pineapples' into MG-improve-parameters 2022-11-02 13:37:35 +01:00
Tyler Neely
7596e85358 When message conversion fails to happen, demangle the concrete type name and error log a useful message 2022-11-02 12:37:34 +00:00
Jeremy B
014836ca3c
Merge branch 'project-pineapples' into T1079-MG-add-simple-query-to-benchmark_v2 2022-11-02 12:51:12 +01:00
Tyler Neely
68654b5a19 Merge branch 'project-pineapples' of github.com:memgraph/memgraph into T1122-MG-ShardManager-ThreadPool 2022-11-02 10:55:18 +00:00
János Benjamin Antal
4d96bf5006
Merge pull request #628 from memgraph/T1138-MG-fix-edge-id-allocator
Fix passing of edge IdAllocator not by value but as pointer
2022-11-02 10:40:35 +01:00
Marko Budiselić
9bb70eb0b9
Merge branch 'project-pineapples' into T1138-MG-fix-edge-id-allocator 2022-11-01 19:38:10 +01:00
Jeremy B
7076788dd5
Merge branch 'project-pineapples' into T1079-MG-add-simple-query-to-benchmark_v2 2022-11-01 18:51:57 +01:00
Tyler Neely
ee4be9aa5b
Merge pull request #631 from memgraph/tyler_remove_shard_map_from_machine_manager
Remove redundant ShardMap copy from MachineManager to avoid race conditions
2022-11-01 18:33:37 +01:00
Jeremy B
96a2ad4b63
Merge branch 'project-pineapples' into T1079-MG-add-simple-query-to-benchmark_v2 2022-11-01 18:21:36 +01:00
Tyler Neely
84509fa477 Fix merge conflict with MachineManager constructor 2022-11-01 17:07:14 +00:00
Tyler Neely
19b5fe3caf
Merge branch 'project-pineapples' into tyler_remove_shard_map_from_machine_manager 2022-11-01 17:57:03 +01:00
Tyler Neely
74db669e23
Merge pull request #622 from memgraph/tyler_1k_shard_unit_test
Add test for 1k shards, 1k create vertices, and then a scan all
2022-11-01 17:56:42 +01:00
Tyler Neely
599802033f
Reformat TODO comment 2022-11-01 17:21:15 +01:00
Tyler Neely
6dd57426f8 Revert broken code suggestion 2022-11-01 15:51:37 +00:00
Tyler Neely
3c9f0c48e9
Merge branch 'project-pineapples' into tyler_1k_shard_unit_test 2022-11-01 16:31:29 +01:00
Tyler Neely
9a1258a708
Update tests/unit/1k_shards_1k_create_scanall.cpp
Co-authored-by: Kostas Kyrimis  <kostaskyrim@gmail.com>
2022-11-01 16:31:14 +01:00
Tyler Neely
c55ca836a4
Update tests/unit/1k_shards_1k_create_scanall.cpp
Co-authored-by: Kostas Kyrimis  <kostaskyrim@gmail.com>
2022-11-01 16:31:06 +01:00
Tyler Neely
5674ef4016
Update tests/unit/1k_shards_1k_create_scanall.cpp
Co-authored-by: Kostas Kyrimis  <kostaskyrim@gmail.com>
2022-11-01 16:30:37 +01:00
Tyler Neely
77c2afc9e7 Add TODO about ID mapping improvements 2022-11-01 15:27:35 +00:00
Tyler Neely
27292dd921
Merge branch 'project-pineapples' into tyler_remove_shard_map_from_machine_manager 2022-11-01 16:20:54 +01:00
Jeremy B
e4f1fd8647
Merge branch 'project-pineapples' into T1079-MG-add-simple-query-to-benchmark_v2 2022-11-01 16:06:44 +01:00
János Benjamin Antal
4608af9d00
Merge branch 'project-pineapples' into MG-improve-parameters 2022-11-01 15:56:16 +01:00
János Benjamin Antal
50e72a7c28
Merge branch 'project-pineapples' into T1138-MG-fix-edge-id-allocator 2022-11-01 15:56:12 +01:00
János Benjamin Antal
17a104677d
Merge pull request #617 from memgraph/T1105-MG-profile-query-in-distributed 2022-11-01 15:55:52 +01:00
Tyler Neely
a6add80fc9 Use static RSM partitioning function for achieving a smooth Shard->ShardWorker distribution 2022-11-01 14:52:38 +00:00
Tyler Neely
5d3eaf6a55 Rename 1k_shards_1k_create_scanall test to high_density_shard_create_scan 2022-11-01 14:02:07 +00:00
Tyler Neely
a13f260236 Parameterize shard worker threads in the MachineConfig and simplify test output 2022-11-01 13:57:57 +00:00
jeremy
1f778ba5f3 Add possibility to give extra tests arg to MGBench 2022-11-01 14:14:55 +01:00
jeremy
787987168c Make benchmark work with any customer datasets 2022-11-01 12:51:01 +01:00
János Benjamin Antal
7b4b1ba8ed Speed up Parameters::AtTokenPosition 2022-11-01 11:51:35 +01:00
Tyler Neely
bb1e8aa164 Improve histogram output by adding a pretty table printing function 2022-11-01 10:46:25 +00:00
Tyler Neely
27a1311966 Merge branch 'tyler_1k_shard_unit_test' of github.com:memgraph/memgraph into T1122-MG-ShardManager-ThreadPool 2022-11-01 09:49:15 +00:00
János Benjamin Antal
30ff6487f2 Merge remote-tracking branch 'origin/project-pineapples' into T1105-MG-profile-query-in-distributed 2022-11-01 10:32:44 +01:00
Tyler Neely
119da2d7a7
Update CMakeLists.txt 2022-11-01 09:51:43 +01:00
Tyler Neely
fa972813d2
Update src/coordinator/shard_map.cpp
Co-authored-by: Jure Bajic <jure.bajic@memgraph.com>
2022-10-31 19:05:19 +01:00
Tyler Neely
951b058116 Complete migration from single-threaded ShardManager to multi-threaded ShardWorker processing 2022-10-31 18:04:30 +00:00
Tyler Neely
d0cad6e6ba Temporarily duplicate shard management logic from ShardManger in ShardWorker 2022-10-31 17:25:08 +00:00
Tyler Neely
6138277972 Merge branch 'tyler_remove_shard_map_from_machine_manager' of github.com:memgraph/memgraph into T1122-MG-ShardManager-ThreadPool 2022-10-31 17:14:01 +00:00
Tyler Neely
2f77eb96ff Remove redundant ShardMap copy from MachineManager to avoid race conditions 2022-10-31 16:49:46 +00:00
Tyler Neely
d7bc93c55f Fill Cron next time promise from each worker thread 2022-10-31 16:17:34 +00:00
Tyler Neely
039d5f51d3
Merge branch 'project-pineapples' into T1122-MG-ShardManager-ThreadPool 2022-10-31 17:06:36 +01:00
Tyler Neely
cebe6f62fa Implement skeleton worker threadpool for the ShardManager 2022-10-31 16:03:47 +00:00
jbajic
6834ce01fe Fix edge id allocator 2022-10-31 16:26:56 +01:00
jeremy
4c5cd1f847 Add possibility to have MgBench working against local file 2022-10-31 16:10:04 +01:00
János Benjamin Antal
f9e2a66961 Add const qualifier to function parameters 2022-10-31 15:53:04 +01:00
János Benjamin Antal
d5700ab5ff Use [[unlikely]] attribute 2022-10-31 15:53:04 +01:00
Tyler Neely
b0aaba6765
Merge branch 'project-pineapples' into tyler_1k_shard_unit_test 2022-10-31 15:47:38 +01:00
jeremy
e1f18f3733 Update location of Pokec datasets on aws 2022-10-31 15:19:34 +01:00
jeremy
c90b38faf0 Update aws address for datasets 2022-10-31 14:49:32 +01:00
jeremy
acbf3c764c Remove arg from __init__ 2022-10-31 13:35:41 +01:00
jeremy
37df41796f git puMerge branch 'T1079-MG-add-simple-query-to-benchmark_v2' of github.com:memgraph/memgraph into T1079-MG-add-simple-query-to-benchmark_v2 2022-10-31 13:02:12 +01:00
jeremy
f04e1cda4b Add function 2022-10-31 13:02:05 +01:00
Jeremy B
f28ba89584
Update tests/mgbench/dataset_creator.py
Co-authored-by: Jure Bajic <jure.bajic@memgraph.com>
2022-10-31 13:01:42 +01:00
jeremy
60485311c8 Merge branch 'T1079-MG-add-simple-query-to-benchmark_v2' of github.com:memgraph/memgraph into T1079-MG-add-simple-query-to-benchmark_v2 2022-10-31 12:57:46 +01:00
Jeremy B
36a1c43851
Merge branch 'project-pineapples' into T1079-MG-add-simple-query-to-benchmark_v2 2022-10-31 12:57:41 +01:00
Jeremy B
bae8c084b1
Update tests/mgbench/datasets.py
Co-authored-by: Jure Bajic <jure.bajic@memgraph.com>
2022-10-31 12:56:02 +01:00
Jeremy B
2898120eeb
Update tests/mgbench/datasets.py
Co-authored-by: Jure Bajic <jure.bajic@memgraph.com>
2022-10-31 12:55:42 +01:00
jeremy
d62a45752a Remove unused variable 2022-10-31 12:55:38 +01:00
jeremy
c5ee6ffbc2 Update dataset_creator script 2022-10-31 12:41:28 +01:00
jeremy
5ef08f841a Update Dataset creation script 2022-10-31 11:56:20 +01:00
Kostas Kyrimis
59c7d81ae8
[🍍 < T1086-MG] Test distributed operators e2e (#607)
* Fix Explain queries
* Add Vertex/Edge accessor support for properties
* Fix projections
* Fix expansions to fetch destination vertex properties
* Fix improper use of ShardMap on bolt and replaced it with the ShardRequestManager 
* Add NameToId mappers on ShardRequestManager
* Add e2e tests for operators
* Fix OPTIONAL MATCH
2022-10-31 11:52:20 +02:00
jeremy
03c095e780 Update assert 2022-10-31 10:52:06 +01:00
Jeremy B
b542e49b3e
Merge branch 'project-pineapples' into T1079-MG-add-simple-query-to-benchmark_v2 2022-10-31 09:44:34 +01:00
Jeremy B
ddb30f49ea
Update datasets.py 2022-10-31 09:43:22 +01:00
János Benjamin Antal
d920d7c293
Merge branch 'project-pineapples' into T1105-MG-profile-query-in-distributed 2022-10-31 09:27:05 +01:00
jeremy
b2e9717ec3 Factor HandleRead(msgs::ExpandOneRequest.. 2022-10-28 15:31:29 +02:00
jeremy
e0f6c951c1 Add possibilty to orderByElement on vector<VertexAccessor> 2022-10-28 15:31:15 +02:00
jeremy
7e35c71c58 Merge branch 'T1083-MG-limit-and-order-expand-one_v3' of github.com:memgraph/memgraph into T1083-MG-limit-and-order-expand-one_v3 2022-10-28 13:22:54 +02:00
Jeremy B
b6814b7a49
Update src/storage/v3/shard_rsm.cpp
Co-authored-by: Jure Bajic <jure.bajic@memgraph.com>
2022-10-28 13:22:49 +02:00
jeremy
903e29a081 Merge branch 'T1083-MG-limit-and-order-expand-one_v3' of github.com:memgraph/memgraph into T1083-MG-limit-and-order-expand-one_v3 2022-10-28 13:22:12 +02:00
Jeremy B
1c17692a26
Update src/storage/v3/shard_rsm.cpp
Co-authored-by: Jure Bajic <jure.bajic@memgraph.com>
2022-10-28 13:22:07 +02:00
jeremy
34fbaa0aee Merge branch 'T1083-MG-limit-and-order-expand-one_v3' of github.com:memgraph/memgraph into T1083-MG-limit-and-order-expand-one_v3 2022-10-28 13:21:52 +02:00
Jeremy B
476e2670d5
Update src/storage/v3/request_helper.cpp
Co-authored-by: Jure Bajic <jure.bajic@memgraph.com>
2022-10-28 13:21:48 +02:00
jeremy
be7aa55686 Add std::move 2022-10-28 13:21:43 +02:00
jeremy
46388ad35c Correct compilation 2022-10-28 13:10:00 +02:00
jeremy
0d5ee49e19 Correct test expectation 2022-10-28 12:55:56 +02:00
jeremy
74181114c2 Remove un-necessary variable 2022-10-28 12:55:47 +02:00
jeremy
009c1b4074 Replace include 2022-10-28 12:55:23 +02:00
jeremy
d0b8b27c29 Rename ordered->sorted 2022-10-28 12:52:37 +02:00
jeremy
79c2ae206f Update FillEdges usage (for compilation) 2022-10-28 11:24:41 +02:00
Tyler Neely
6b0168cb3d Add LocalTransport::ResponseLatencies 2022-10-28 08:51:12 +00:00
Tyler Neely
03cf264b76 Add test for 1k shards, 1k create vertices, and then a scan all 2022-10-28 08:29:27 +00:00
Tyler Neely
9448e23dc9 Check-in basic shard scheduler skeleton 2022-10-27 13:36:53 +00:00
Jeremy B
fa6129dc2b
Merge branch 'project-pineapples' into T1083-MG-limit-and-order-expand-one_v3 2022-10-26 15:59:15 +02:00
Tyler Neely
eafccaea84
Add a logarithmically bucketing histogram and record response latencies per message type (#616) 2022-10-26 15:57:11 +02:00
János Benjamin Antal
0c4f591b3c
Merge branch 'project-pineapples' into T1105-MG-profile-query-in-distributed 2022-10-26 14:48:27 +02:00
Jeremy B
aeed7c32f9
Merge branch 'project-pineapples' into T1083-MG-limit-and-order-expand-one_v3 2022-10-26 11:32:44 +02:00
János Benjamin Antal
534e365271 Suppress warning about exception escape for destructor 2022-10-26 11:22:27 +02:00
János Benjamin Antal
281ae158ec Make ReadTSC noexcept 2022-10-26 11:21:27 +02:00
János Benjamin Antal
39c9c215b1 Suppress clang-tidy warnings for 2022-10-26 11:14:24 +02:00
János Benjamin Antal
f6e78ce6da
Fix TODOs in storage engine (#614)
Fixed various TODOs that were easy to fix to improve the code quality of the
newly implemented storage.
2022-10-26 10:40:35 +02:00
Jeremy B
8e7118efde
Merge branch 'project-pineapples' into T1083-MG-limit-and-order-expand-one_v3 2022-10-26 08:39:21 +02:00
János Benjamin Antal
3cf79f5bbf
Merge branch 'project-pineapples' into T1079-MG-add-simple-query-to-benchmark_v2 2022-10-25 21:24:20 +02:00
János Benjamin Antal
6a31c49432 Flatten nested conditional statements 2022-10-25 20:30:29 +02:00
János Benjamin Antal
fea92f4829 Merge remote-tracking branch 'origin/project-pineapples' into T1105-MG-profile-query-in-distributed 2022-10-25 20:28:54 +02:00
János Benjamin Antal
1703cd039d Populate custom data of profile query with request wait times 2022-10-25 20:15:06 +02:00
gvolfing
ca2351124b
Make primary labels act as label indices (#605)
Because of the lexicographical sharding, the primary labels themselves are
acting as indexes. If a primary label is specified in a MATCH query we can
safely narrow the range of shards we have to scan through based on that label.
This PR introduces the necessary changes in order to achieve that.
2022-10-25 19:48:17 +02:00
János Benjamin Antal
5784c0d473 Return the custom data for profile queries 2022-10-25 15:26:42 +02:00
János Benjamin Antal
8ebc704819 Fix profile queries with ScanAll 2022-10-25 14:37:18 +02:00
János Benjamin Antal
5939fb2b0c Start transaction properly 2022-10-25 14:35:26 +02:00
János Benjamin Antal
ee64684b0b Add ScopedCustomProfile 2022-10-25 13:37:57 +02:00
János Benjamin Antal
1e4c02f8a5 Make profile query work 2022-10-25 13:37:27 +02:00
János Benjamin Antal
8353228ba7 Upgrade json lib to 3.11.2 2022-10-25 13:10:03 +02:00
jeremy
cdab8828e4 remove else 2022-10-25 12:30:14 +02:00
jeremy
c1d0fddaac Remove unnecessary else 2022-10-25 11:31:23 +02:00
jeremy
18423ce34d remove includes 2022-10-25 11:01:04 +02:00
jeremy
b4f68e7a60 remove includes 2022-10-25 10:56:16 +02:00
jeremy
e901c1fdb7 Refactor code 2022-10-25 10:45:33 +02:00
Jeremy B
016b3ee0d2
Merge branch 'project-pineapples' into T1083-MG-limit-and-order-expand-one_v3 2022-10-25 10:30:22 +02:00
János Benjamin Antal
332afadf21
Split file parsing (#600)
Add temporary support for split files. This is only temporary solution until we
get the shard splitting implemented.
2022-10-25 10:27:13 +02:00
Tyler Neely
acc655f4fd
Model-based testing of simulated full cluster (#584)
This PR adds support for generating randomized workloads that will be executed
against a simulated cluster, as well as against a correctness model. Initially
this just generates ScanAll and CreateVertex requests, and anything that it
creates, it also inserts into a `std::set`, and when we do a ScanAll, it asserts
that we get the same number of requests back. This will become much more
sophisticated over time, but it's already hitting pay-dirt.
2022-10-24 19:54:09 +02:00
jeremy
862af55266 Remove #NoCommit 2022-10-24 16:39:38 +02:00
jeremy
33c9ccee66 Adapt test 2022-10-24 16:38:28 +02:00
jeremy
8b9e7e2c65 Correct behavior of batch limit (was size_t) 2022-10-24 16:23:42 +02:00
jeremy
51e6802aa7 Safeguard in case ComputeExpression is called without opt 2022-10-24 15:44:57 +02:00
jeremy
386a0c5686 add comment 2022-10-24 15:44:37 +02:00
jeremy
994aab8774 Merge branch 'T1083-MG-limit-and-order-expand-one_v3' of github.com:memgraph/memgraph into T1083-MG-limit-and-order-expand-one_v3 2022-10-24 12:08:38 +02:00
jeremy
8112957a35 Merge branch 'project-pineapples' into T1083-MG-limit-and-order-expand-one_v3 2022-10-24 12:08:02 +02:00
jeremy
b82e8748ad Attempt impl 2022-10-24 12:03:51 +02:00
Jeremy B
cb00b43ca7
Merge branch 'project-pineapples' into T1083-MG-limit-and-order-expand-one_v3 2022-10-21 18:17:34 +02:00
jeremy
55e0dbca80 Add limit to ExpandOne
Add missing pragma
Add test
Merge conflicts
2022-10-21 16:32:49 +02:00
Tyler Neely
12e7a261aa
operator<< implementations for a few foundational types (#604) 2022-10-21 15:25:40 +02:00
Marko Budiselić
1c3bb969e9
Decouple interactive planning manual test (#585) 2022-10-21 13:34:13 +02:00
Jure Bajic
e5437080c5
Handle OrderBy in ScanVertices request (#594)
- Refactor shard_rsm and move function into expr.hpp/expr.cpp and request_helper.hpp/request_helper.cpp
2022-10-21 12:33:40 +02:00
Tyler Neely
17090dd8ac
Load SPDLOG_LEVEL environment variable in unit tests binaries (#606)
Co-authored-by: János Benjamin Antal <antaljanosbenjamin@users.noreply.github.com>
2022-10-20 18:27:52 +02:00
gvolfing
281cebd386
Add filtering capabilities to ExpandOne request (#581) 2022-10-20 17:51:44 +02:00
János Benjamin Antal
f89a2bbf42
Make ExpandOne work in the query engine (#589) 2022-10-20 11:35:00 +02:00
gvolfing
85b8ce9101
Implement filtering capabilities for ScanAll (#578)
ScanVerticesrequest was not able to utilize filtering capabilities
before. With these modification it is now able to filter the scanned
vertices based on the filter_expressions field in the
ScanVerticesRequest message type.
2022-10-19 16:09:00 +02:00
Jure Bajic
6bb40a7f49
Create ExpandOne request (#580)
Refactor CreateEdge into CreateExpand
2022-10-19 13:55:46 +02:00
Jeremy B
1707ee648e
Merge branch 'project-pineapples' into T1079-MG-add-simple-query-to-benchmark_v2 2022-10-19 08:47:13 +02:00
gvolfing
07f34838bd
Add e2e test to check ScanAll on new architecture (#603)
Add testing capabilities similar to what existed before on the older
architecture. With this test specifically, test if we can create
vertices and return them with ScanAll. There is a hard-coded 3s wait in
the test which is needed because the MachineManager need this time to
initialize itself.
2022-10-18 21:38:56 +02:00
Jeremy B
b6f8b5e130
Merge branch 'project-pineapples' into T1079-MG-add-simple-query-to-benchmark_v2 2022-10-18 15:49:19 +02:00
jeremy
58243f4a26 Rename User->File
Use parser for argument i.o. simple variable in script
2022-10-18 15:47:13 +02:00
gvolfing
d06132cb33
Make ShardRequestManager work with futures (#588)
The communication between the ShardRequestManager and the RsmClient
used to be direct. In this PR this changes into a future-based
communication type. The RsmClient stores state about the currently
processed future (either read or write request) and exposes blocking
and non-blocking functionality to obtain the filled future. The
ShardRequestManager -for now- will send of the set of requests present
in the ExecutionState and block on each of them until the requests are
completed or the set of paginated responses(caused by, for example the
batch-limit in ScanAll) are ready for the next round.
2022-10-18 09:53:37 +02:00
Jeremy B
f063c1b1ad
Merge branch 'project-pineapples' into T1079-MG-add-simple-query-to-benchmark_v2 2022-10-17 11:41:55 +02:00
Marko Budiselić
5347c06d76
Add shutdown of LocalSystem during the shutdown process (#592) 2022-10-14 11:59:50 +02:00
Tyler Neely
4634ac484a
Optimize simulator RNG usage (#590)
This causes instructions for `basic_request.cpp` to drop from 281 million to 28 million
2022-10-12 19:19:06 +03:00
jeremy
a5dc818e19 Add new dataset for mgbench 2022-10-12 16:35:47 +02:00
Jeremy B
4cb3b064c4
Add filter to scan all (#575)
Add several versions of ScanAll with filters.
Add helper function to transform an expression into string that can be parsed again once on the storage.
2022-10-12 11:46:59 +02:00
Jure Bajic
23171e76b6
Integrate bolt server (#572)
* Use query-v2 in the main executable
* Set up machine manager in memgraph
* Add `ShardRequestManager` to `Interpreter`
* Make vertex creation work
* Make scan all work
* Add edge type map in shard request manager
* Send schema over request
* Empty out DbAccessor
* Store shard mapping at creation
* Remove failing CI steps

Cooltura is the best place in Zagreb!

Co-authored-by: János Benjamin Antal <benjamin.antal@memgraph.io>
2022-10-11 16:31:46 +02:00
Jeremy B
6fd64d31f2
Update usage of Shard (#574)
Updating tests to use new constructor of Shard
Commenting test shard_request_manager
2022-10-05 11:56:36 +02:00
gvolfing
87111b2f89
Implement missing message handlers (#563)
Implement the missing message handlers with basic functionality. The
implementation does not include any capabilities to filter based on
expressions.
2022-10-03 15:31:06 +02:00
Tyler Neely
b5c7078c7d
Stitch request manager with shard (#570)
Fix various bugs
2022-09-23 20:07:41 +02:00
Kostas Kyrimis
925835b080
Implement query engine client (#531)
- Add shard request manager
2022-09-22 15:05:43 +02:00
Tyler Neely
ce788f5f65
Machine manager and shard stitch (#569) 2022-09-22 13:55:16 +02:00
János Benjamin Antal
b8186bea2e
Lexicographically ordered storage (#454) 2022-09-21 18:58:31 +02:00
János Benjamin Antal
3c4856dcb7 Merge remote-tracking branch 'origin/project-pineapples' into E118-MG-lexicographically-ordered-storage 2022-09-21 18:50:00 +02:00
Marko Budiselić
b4d6dc0930
Add proper transaction handling (#550) 2022-09-21 18:25:51 +02:00
Jure Bajic
817161a915
Implement expression evaluator in storage v3 (#534)
- Enable `mg-expr` in mg-storage-v3
-  Adapt id_mapper
- Change conversion function from PropertyValue to TypedValue
- Remove memgraph functions
- Enable expression tests on for storage
2022-09-21 17:26:55 +02:00
Tyler Neely
58eb2caf0f Add machine manager prototype (#533) 2022-09-21 16:57:25 +02:00
gvolfing
ecda71168c
Implement message based actions (#538)
Create shard-side handlers for basic messages

Implement the handlers for CreateVertices, CreateEdges and ScanAll. Use
or modify the defined messages to interact with individual Shards and
test their behavior. Shard is currently being owned by ShardRsm
instances. The two top level dispatching functions Read() and Apply()
are responsible for read- and write operations respectively. Currently
there are a handful of messages that are defined but not utilized, these
will be used in the near future, as well as a couple of handler
functions with empty implementations.
2022-09-20 11:15:19 +02:00
János Benjamin Antal
8e1f83acc9
Reference vertices by id in edges (#532) 2022-09-14 13:11:14 +02:00
jbajic
f910cb770c Merge branch 'project-pineapples' into E118-MG-lexicographically-ordered-storage 2022-09-09 12:39:28 +02:00
Kostas Kyrimis
38d0b89b04
Implement expression evaluator library (#486) 2022-09-07 18:15:32 +03:00
Jure Bajic
a2a6a3855b
Remove primary label from vertex (#529)
- Remove the primary label from vertex
- Pass vertex_validator instead of schema validator
- Fix vertex ctors
2022-09-07 13:36:10 +02:00
Jure Bajic
e9f0360fb3
Enable indices for storage v3 (#528)
- Enable indices in storage v3
- Add new test cases
- Change `CreateVertexAndValidate` to call `SetProperty` and `AddLabel`
2022-09-06 22:48:25 +02:00
János Benjamin Antal
fe4955447e
Merge branch 'project-pineapples' into E118-MG-lexicographically-ordered-storage 2022-09-05 14:59:01 +02:00
Tyler Neely
1631c20df2
In memory shard map (#505) 2022-09-05 15:50:54 +03:00
János Benjamin Antal
947baedbe6 Merge remote-tracking branch 'origin/project-pineapples' into E118-MG-lexicographically-ordered-storage 2022-09-02 11:41:03 +02:00
Kostas Kyrimis
c6447eb48b
Add shard requests responses (#526) 2022-09-01 18:54:47 +03:00
Jure Bajic
7e84744d07
Split storage and shards (#519)
- Rename storage to shard
- Add primary label and range for shard
- Remove id_mapper functionality from shard
- Adapt tests
2022-09-01 09:10:40 +02:00
Tyler Neely
c0d03888f4
Implement basic raft version (#498) 2022-08-30 15:07:34 +02:00
János Benjamin Antal
efb3c8d03d
Remove multi-threaded related logic and variables (#460)
* Remove logic that was necessary for optimal multi-threaded performance, such
  as accumulating deleted object in local containers and appending them to a
  global one, handling overlapping locking.
* Remove background GC thread.
* Remove mutexes, locks and atomics throughout storage.
2022-08-30 13:41:53 +02:00
Jure Bajic
95dbc022c0
Integrate schema and keystore (#497)
- Integrate schema and keystore on vertex creation
- Add GC test for storage v3
- Add tests for accessors
- Fix all tests related to this except for query v2
- Fix labels not returning primary label
2022-08-29 14:38:25 +02:00
Tyler Neely
a40403e3ce
Add local transport (#512)
* Create LocalTransport Io provider for sending messages to components on the same machine
* Move src/io/simulation/message_conversion.hpp to src/io/message_conversion.hpp for use in other Io providers
2022-08-29 13:49:51 +02:00
Tyler Neely
14c9e68456
Transport prototype (#466) 2022-08-12 08:24:32 +02:00
jbajic
68b26275a3 Merge branch 'project-pineapples' into E118-MG-lexicographically-ordered-storage 2022-08-08 11:34:46 +02:00
jbajic
5012824e05 Address review comments 2022-08-04 11:45:16 +02:00
Jure Bajic
a12a1ea358
Move schema to storage v3 and query v2
* Move schema to storage v3

* Remove schema from v2

* Move schema to query v2

* Remove schema from query v1

* Make glue v2

* Move schema related tests to newer versions of query and storage

* Fix typo in CMake

* Fix interpreter test

* Fix clang tidy errors

* Change temp dir name
2022-08-04 09:50:02 +02:00
János Benjamin Antal
2891041468
Make storage use KeyStore (#455) 2022-08-03 18:10:58 +02:00
János Benjamin Antal
cc5ee6a496 Merge remote-tracking branch 'origin/project-pineapples' into E118-MG-lexicographically-ordered-storage 2022-08-02 08:19:43 +02:00
jbajic
f57f30c8cf Merge branch 'project-pineapples' into E112-MG-implement-partial-schema 2022-08-01 10:46:11 +02:00
Jure Bajic
462daf3a2b
Enforce schema on vertex creation
- Separating schema definition from schema validation
- Updating vertex_accessor and db_accessors with necessary methods
- Adding a primary label to Vertex
- Adding schema tests
- Updating existing tests for storage v3, and deprecating old:
  - interpreter => interpreter_v2
  - query_plan_accumulate_aggregate => storage_v3_query_plan_accumulate_aggregate
  - query_plan_create_set_remove_delete => storage_v3_query_plan_create_set_remove_delete
  - query_plan_bag_semantics => storage_v3_query_plan_bag_semantics
  - query_plan_edge_cases => storage_v3_query_plan_edge_cases
  - query_plan_v2_create_set_remove_delete => storage_v3_query_plan_v2_create_set_remove_delete
  - query_plan_match_filter_return => storage_v3_query_plan_match_filter_return
2022-07-29 13:38:17 +02:00
Marko Budiselic
2009fefc8a Merge branch 'master' into project-pineapples 2022-07-28 15:36:51 +02:00
Marko Budiselić
eb3f96d1f6
Bring changes from master to project-pineapples (#477)
* Fix aggregation functions on `null` and group-by inputs (#448)
* Upgrade Antrl to 4.10.1 and remove antlr_lock (#441)
* Update clang-tidy job (#476)
* Add parser stress test (#463)

NOTE: Doing this to have buildable comments on the project-pineapples branch

Co-authored-by: gvolfing <107616712+gvolfing@users.noreply.github.com>
Co-authored-by: Jure Bajic <jure.bajic@memgraph.com>
2022-07-28 15:36:17 +02:00
jbajic
264b233053 Merge branch 'project-pineapples' into E112-MG-implement-partial-schema 2022-07-22 11:48:45 +02:00
Jure Bajic
2ceaf59767
Create query engine v2 (#444)
Create version v2 of the query engine.
Adjust CMake and lisp files
Connect query engine v2 with storage engine v3
2022-07-19 12:28:19 +02:00
János Benjamin Antal
c0bee760bf git 2022-07-18 08:21:04 +02:00
Jure Bajic
3f4f66b57f
Create schema DDL expressions
* Add initial schema implementation

* Add index to schema

* List schemas and enable multiple properties

* Implement SchemaTypes

* Apply suggestions from code review

Co-authored-by: Jeremy B <97525434+42jeremy@users.noreply.github.com>
Co-authored-by: János Benjamin Antal <antaljanosbenjamin@users.noreply.github.com>

* Address review comments

* Remove Map and List

* Add schema operations in storage

* Add create and show schema queries

* Add privileges for schema

* Add missing keywords into lexer

* Add drop schema query

* Add schema visitors

* Update metadata

* Add PrepareSchemaQuery function

* Implement show schemas

* Add show schema query

* Fix schema visitor

* Add common schema type

* Fix grammar

* Temporary create ddl logic

* Fix naming for schemaproperty type to schema type

* Rename schemaproperty to schemapropertytype

* Enable Create schema ddl

* Override visitPropertyType

* Add initial schema implementation

* Add initial schema implementation

* Add index to schema

* List schemas and enable multiple properties

* Implement SchemaTypes

* Apply suggestions from code review

Co-authored-by: Jeremy B <97525434+42jeremy@users.noreply.github.com>
Co-authored-by: János Benjamin Antal <antaljanosbenjamin@users.noreply.github.com>

* Address review comments

* Remove Map and List

* Apply suggestions from code review

Co-authored-by: Kostas Kyrimis  <kostaskyrim@gmail.com>

Co-authored-by: Jeremy B <97525434+42jeremy@users.noreply.github.com>
Co-authored-by: János Benjamin Antal <antaljanosbenjamin@users.noreply.github.com>
Co-authored-by: Kostas Kyrimis  <kostaskyrim@gmail.com>

* Add verification on creation and deletion

* Rename DeleteSchema to DropSchema

* Remove list and map from lexer

* Fix grammar with schemaTypeMap

* Add privilege and cypher visitor tests

* Catch repeating type name in schema definition

* Fix conflicting keywords

* Add notifications

* Drop float support

* Finish interpreter tests

* Fix tests

* Fix clang tidy errors

* Fix GetSchema

* Replace for with transfrom

* Add cloning og schema_property_map

* Address review comments

* Rename SchemaPropertyType to SchemaType

* Remove inline

* Assert of schema properties

Co-authored-by: Jeremy B <97525434+42jeremy@users.noreply.github.com>
Co-authored-by: János Benjamin Antal <antaljanosbenjamin@users.noreply.github.com>
Co-authored-by: Kostas Kyrimis  <kostaskyrim@gmail.com>
2022-07-11 09:20:15 +02:00
Jure Bajic
2998f92595 Add initial schema implementation
* Add initial schema implementation

* Add index to schema

* List schemas and enable multiple properties

* Implement SchemaTypes

* Apply suggestions from code review

Co-authored-by: Jeremy B <97525434+42jeremy@users.noreply.github.com>
Co-authored-by: János Benjamin Antal <antaljanosbenjamin@users.noreply.github.com>

* Address review comments

* Remove Map and List

* Apply suggestions from code review

Co-authored-by: Kostas Kyrimis  <kostaskyrim@gmail.com>

Co-authored-by: Jeremy B <97525434+42jeremy@users.noreply.github.com>
Co-authored-by: János Benjamin Antal <antaljanosbenjamin@users.noreply.github.com>
Co-authored-by: Kostas Kyrimis  <kostaskyrim@gmail.com>
2022-07-08 10:33:43 +02:00
János Benjamin Antal
1bdc32ba5d
Copy storage v2 to create storage v3 (#416)
* Copy storage v2 to v3

* Integrate v3 to cmake

* Fix clang-tidy warnings

* Add dummy unit test for storage-v3 to trigger build for code analysis builds
2022-07-05 08:20:59 +02:00
János Benjamin Antal
21870a0e7e Merge branch 'master' into project-pineapples 2022-06-23 14:49:14 +02:00
1722 changed files with 109531 additions and 187976 deletions

View File

@ -1,7 +1,6 @@
---
BasedOnStyle: Google
---
Language: Cpp
BasedOnStyle: Google
Standard: "c++20"
UseTab: Never
DerivePointerAlignment: false

View File

@ -6,7 +6,7 @@ Checks: '*,
-altera-unroll-loops,
-android-*,
-cert-err58-cpp,
-cppcoreguidelines-avoid-do-while,
-cert-str34-c,
-cppcoreguidelines-avoid-c-arrays,
-cppcoreguidelines-avoid-goto,
-cppcoreguidelines-avoid-magic-numbers,
@ -50,6 +50,7 @@ Checks: '*,
-misc-non-private-member-variables-in-classes,
-modernize-avoid-c-arrays,
-modernize-concat-nested-namespaces,
-modernize-loop-convert,
-modernize-pass-by-value,
-modernize-use-equals-default,
-modernize-use-nodiscard,
@ -61,11 +62,10 @@ Checks: '*,
-readability-implicit-bool-conversion,
-readability-magic-numbers,
-readability-named-parameter,
-readability-identifier-length,
-misc-no-recursion,
-concurrency-mt-unsafe,
-bugprone-easily-swappable-parameters,
-bugprone-unchecked-optional-access'
-bugprone-easily-swappable-parameters'
WarningsAsErrors: ''
HeaderFilterRegex: 'src/.*'
AnalyzeTemporaryDtors: false

View File

@ -33,4 +33,4 @@ for file in $modified_files; do
fi
done;
exit ${FAIL}
return ${FAIL}

View File

@ -1,17 +1,19 @@
---
name: Bug report
about: Create a report to help us improve
title: ""
title: "[BUG] "
labels: bug
assignees: gitbuda, antonio2368
---
**Memgraph version**
Which version did you use?
**Environment**
Some information about the environment you are using Memgraph on: operating
system, architecture (ARM, x86), how do you connect, with or without docker,
which driver etc.
system, how do you connect, with or without docker, which driver etc.
**Describe the bug**
A clear and concise description of what the bug is.
@ -20,7 +22,6 @@ A clear and concise description of what the bug is.
Steps to reproduce the behavior:
1. Run the following query '...'
2. Click on '....'
3. ... IDEALLY: link to the workload info (DATASET & QUERIES) ...
**Expected behavior**
A clear and concise description of what you expected to happen.
@ -31,11 +32,3 @@ your problem.
**Additional context**
Add any other context about the problem here.
**Verification Environment**
Once we fix it, what do you need to verify the fix?
Do you need:
* Plain memgraph package -> for which Linux?
* Plain memgraph Docker image?
* Which architecture do you use ARM | x86?
* Full Memgraph platform?

View File

@ -1,28 +1,11 @@
### Description
Please briefly explain the changes you made here.
Please delete either the [master < EPIC] or [master < Task] part, depending on what are your needs.
[master < Epic] PR
- [ ] Check, and update documentation if necessary
- [ ] Update [changelog](https://docs.memgraph.com/memgraph/changelog)
- [ ] Write E2E tests
- [ ] Compare the [benchmarking results](https://bench-graph.memgraph.com/) between the master branch and the Epic branch
- [ ] Provide the full content or a guide for the final git message
- [FINAL GIT MESSAGE]
[master < Task] PR
- [ ] Check, and update documentation if necessary
- [ ] Update [changelog](https://docs.memgraph.com/memgraph/changelog)
- [ ] Provide the full content or a guide for the final git message
- **[FINAL GIT MESSAGE]**
### Documentation checklist
- [ ] Add the documentation label tag
- [ ] Add the bug / feature label tag
- [ ] Add the milestone for which this feature is intended
- If not known, set for a later milestone
- [ ] Write a release note, including added/changed clauses
- **[Release note text]**
- [ ] Link the documentation PR here
- **[Documentation PR link]**
- [ ] Tag someone from docs team in the comments

View File

@ -3,7 +3,7 @@ name: Daily Benchmark
on:
workflow_dispatch:
schedule:
- cron: "0 22 * * *"
- cron: "0 1 * * *"
jobs:
release_benchmarks:
@ -16,7 +16,7 @@ jobs:
steps:
- name: Set up repository
uses: actions/checkout@v4
uses: actions/checkout@v3
with:
# Number of commits to fetch. `0` indicates all history for all
# branches and tags. (default: 1)
@ -59,7 +59,7 @@ jobs:
source ve3/bin/activate
pip install -r requirements.txt
./main.py --benchmark-name "macro_benchmark" \
--benchmark-results "../../tests/macro_benchmark/.harness_summary" \
--benchmark-results-path "../../tests/macro_benchmark/.harness_summary" \
--github-run-id "${{ github.run_id }}" \
--github-run-number "${{ github.run_number }}" \
--head-branch-name "${{ env.BRANCH_NAME }}"
@ -67,13 +67,7 @@ jobs:
- name: Run mgbench
run: |
cd tests/mgbench
./benchmark.py vendor-native --num-workers-for-benchmark 12 --export-results benchmark_pokec.json pokec/medium/*/*
./benchmark.py vendor-native --num-workers-for-benchmark 1 --export-results benchmark_supernode.json supernode
./benchmark.py vendor-native --num-workers-for-benchmark 1 --export-results benchmark_high_write_set_property.json high_write_set_property
./benchmark.py vendor-native --num-workers-for-benchmark 12 --export-results cartesian.json cartesian
./benchmark.py --num-workers-for-benchmark 12 --export-results benchmark_result.json pokec/medium/*/*
- name: Upload mgbench results
run: |
@ -82,25 +76,7 @@ jobs:
source ve3/bin/activate
pip install -r requirements.txt
./main.py --benchmark-name "mgbench" \
--benchmark-results "../../tests/mgbench/benchmark_pokec.json" \
--github-run-id "${{ github.run_id }}" \
--github-run-number "${{ github.run_number }}" \
--head-branch-name "${{ env.BRANCH_NAME }}"
./main.py --benchmark-name "supernode" \
--benchmark-results "../../tests/mgbench/benchmark_supernode.json" \
--github-run-id "${{ github.run_id }}" \
--github-run-number "${{ github.run_number }}" \
--head-branch-name "${{ env.BRANCH_NAME }}"
./main.py --benchmark-name "high_write_set_property" \
--benchmark-results "../../tests/mgbench/benchmark_high_write_set_property.json" \
--github-run-id "${{ github.run_id }}" \
--github-run-number "${{ github.run_number }}" \
--head-branch-name "${{ env.BRANCH_NAME }}"
./main.py --benchmark-name "cartesian" \
--benchmark-results "../../tests/mgbench/cartesian.json" \
--benchmark-results-path "../../tests/mgbench/benchmark_result.json" \
--github-run-id "${{ github.run_id }}" \
--github-run-number "${{ github.run_number }}" \
--head-branch-name "${{ env.BRANCH_NAME }}"

View File

@ -14,97 +14,62 @@ on:
- "**/*.md"
- ".clang-format"
- "CODEOWNERS"
- "licenses/*"
jobs:
community_build:
name: "Community build"
runs-on: [self-hosted, Linux, X64, DockerMgBuild]
timeout-minutes: 60
runs-on: [self-hosted, Linux, X64, Diff]
env:
THREADS: 24
MEMGRAPH_ENTERPRISE_LICENSE: ${{ secrets.MEMGRAPH_ENTERPRISE_LICENSE }}
MEMGRAPH_ORGANIZATION_NAME: ${{ secrets.MEMGRAPH_ORGANIZATION_NAME }}
OS: debian-11
TOOLCHAIN: v5
ARCH: amd
BUILD_TYPE: RelWithDebInfo
steps:
- name: Set up repository
uses: actions/checkout@v4
uses: actions/checkout@v3
with:
# Number of commits to fetch. `0` indicates all history for all
# branches and tags. (default: 1)
fetch-depth: 0
- name: Spin up mgbuild container
- name: Build community binaries
run: |
./release/package/mgbuild.sh \
--toolchain $TOOLCHAIN \
--os $OS \
--arch $ARCH \
run
# Activate toolchain.
source /opt/toolchain-v4/activate
- name: Build release binaries
run: |
./release/package/mgbuild.sh \
--toolchain $TOOLCHAIN \
--os $OS \
--arch $ARCH \
--build-type $BUILD_TYPE \
--threads $THREADS \
build-memgraph --community
# Initialize dependencies.
./init
# Build community binaries.
cd build
cmake -DCMAKE_BUILD_TYPE=release -DMG_ENTERPRISE=OFF ..
make -j$THREADS
- name: Run unit tests
run: |
./release/package/mgbuild.sh \
--toolchain $TOOLCHAIN \
--os $OS \
--arch $ARCH \
--threads $THREADS \
--enterprise-license $MEMGRAPH_ENTERPRISE_LICENSE \
--organization-name $MEMGRAPH_ORGANIZATION_NAME \
test-memgraph unit
# Activate toolchain.
source /opt/toolchain-v4/activate
- name: Stop mgbuild container
if: always()
run: |
./release/package/mgbuild.sh \
--toolchain $TOOLCHAIN \
--os $OS \
--arch $ARCH \
stop --remove
# Run unit tests.
cd build
ctest -R memgraph__unit --output-on-failure -j$THREADS
code_analysis:
name: "Code analysis"
runs-on: [self-hosted, Linux, X64, DockerMgBuild]
timeout-minutes: 60
runs-on: [self-hosted, Linux, X64, Diff]
env:
THREADS: 24
MEMGRAPH_ENTERPRISE_LICENSE: ${{ secrets.MEMGRAPH_ENTERPRISE_LICENSE }}
MEMGRAPH_ORGANIZATION_NAME: ${{ secrets.MEMGRAPH_ORGANIZATION_NAME }}
OS: debian-11
TOOLCHAIN: v5
ARCH: amd
BUILD_TYPE: Debug
steps:
- name: Set up repository
uses: actions/checkout@v4
uses: actions/checkout@v3
with:
# Number of commits to fetch. `0` indicates all history for all
# branches and tags. (default: 1)
fetch-depth: 0
- name: Spin up mgbuild container
run: |
./release/package/mgbuild.sh \
--toolchain $TOOLCHAIN \
--os $OS \
--arch $ARCH \
run
# This is also needed if we want do to comparison against other branches
# See https://github.community/t/checkout-code-fails-when-it-runs-lerna-run-test-since-master/17920
- name: Fetch all history for all tags and branches
@ -112,13 +77,10 @@ jobs:
- name: Initialize deps
run: |
./release/package/mgbuild.sh \
--toolchain $TOOLCHAIN \
--os $OS \
--arch $ARCH \
--build-type $BUILD_TYPE \
--threads $THREADS \
build-memgraph --init-only
# Activate toolchain.
source /opt/toolchain-v4/activate
# Initialize dependencies.
./init
- name: Set base branch
if: ${{ github.event_name == 'pull_request' }}
@ -132,473 +94,188 @@ jobs:
- name: Python code analysis
run: |
./release/package/mgbuild.sh \
--toolchain $TOOLCHAIN \
--os $OS \
--arch $ARCH \
--enterprise-license $MEMGRAPH_ENTERPRISE_LICENSE \
--organization-name $MEMGRAPH_ORGANIZATION_NAME \
test-memgraph code-analysis --base-branch "${{ env.BASE_BRANCH }}"
CHANGED_FILES=$(git diff -U0 ${{ env.BASE_BRANCH }}... --name-only)
for file in ${CHANGED_FILES}; do
echo ${file}
if [[ ${file} == *.py ]]; then
python3 -m black --check --diff ${file}
python3 -m isort --check-only --profile "black" --diff ${file}
fi
done
- name: Build combined ASAN, UBSAN and coverage binaries
run: |
./release/package/mgbuild.sh \
--toolchain $TOOLCHAIN \
--os $OS \
--arch $ARCH \
--build-type $BUILD_TYPE \
--threads $THREADS \
build-memgraph --coverage --asan --ubsan
# Activate toolchain.
source /opt/toolchain-v4/activate
cd build
cmake -DTEST_COVERAGE=ON -DASAN=ON -DUBSAN=ON ..
make -j$THREADS memgraph__unit
- name: Run unit tests
run: |
./release/package/mgbuild.sh \
--toolchain $TOOLCHAIN \
--os $OS \
--arch $ARCH \
--enterprise-license $MEMGRAPH_ENTERPRISE_LICENSE \
--organization-name $MEMGRAPH_ORGANIZATION_NAME \
test-memgraph unit-coverage
# Activate toolchain.
source /opt/toolchain-v4/activate
# Run unit tests. It is restricted to 2 threads intentionally, because higher concurrency makes the timing related tests unstable.
cd build
LSAN_OPTIONS=suppressions=$PWD/../tools/lsan.supp UBSAN_OPTIONS=halt_on_error=1 ctest -R memgraph__unit --output-on-failure -j2
- name: Compute code coverage
run: |
./release/package/mgbuild.sh \
--toolchain $TOOLCHAIN \
--os $OS \
--arch $ARCH \
--enterprise-license $MEMGRAPH_ENTERPRISE_LICENSE \
--organization-name $MEMGRAPH_ORGANIZATION_NAME \
test-memgraph code-coverage
# Activate toolchain.
source /opt/toolchain-v4/activate
# Compute code coverage.
cd tools/github
./coverage_convert
# Package code coverage.
cd generated
tar -czf code_coverage.tar.gz coverage.json html report.json summary.rmu
- name: Save code coverage
uses: actions/upload-artifact@v4
uses: actions/upload-artifact@v3
with:
name: "Code coverage(Code analysis)"
name: "Code coverage"
path: tools/github/generated/code_coverage.tar.gz
- name: Set base branch
if: ${{ github.event_name == 'pull_request' }}
run: |
echo "BASE_BRANCH=origin/${{ github.base_ref }}" >> $GITHUB_ENV
- name: Set base branch # if we manually dispatch or push to master
if: ${{ github.event_name != 'pull_request' }}
run: |
echo "BASE_BRANCH=origin/master" >> $GITHUB_ENV
- name: Run clang-tidy
run: |
./release/package/mgbuild.sh \
--toolchain $TOOLCHAIN \
--os $OS \
--arch $ARCH \
--threads $THREADS \
--enterprise-license $MEMGRAPH_ENTERPRISE_LICENSE \
--organization-name $MEMGRAPH_ORGANIZATION_NAME \
test-memgraph clang-tidy --base-branch "${{ env.BASE_BRANCH }}"
source /opt/toolchain-v4/activate
- name: Stop mgbuild container
if: always()
run: |
./release/package/mgbuild.sh \
--toolchain $TOOLCHAIN \
--os $OS \
--arch $ARCH \
stop --remove
# Restrict clang-tidy results only to the modified parts
git diff -U0 ${{ env.BASE_BRANCH }}... -- src | ./tools/github/clang-tidy/clang-tidy-diff.py -p 1 -j $THREADS -extra-arg="-DMG_CLANG_TIDY_CHECK" -path build | tee ./build/clang_tidy_output.txt
# Fail if any warning is reported
! cat ./build/clang_tidy_output.txt | ./tools/github/clang-tidy/grep_error_lines.sh > /dev/null
debug_build:
name: "Debug build"
runs-on: [self-hosted, Linux, X64, DockerMgBuild]
timeout-minutes: 100
runs-on: [self-hosted, Linux, X64, Diff]
env:
THREADS: 24
MEMGRAPH_ENTERPRISE_LICENSE: ${{ secrets.MEMGRAPH_ENTERPRISE_LICENSE }}
MEMGRAPH_ORGANIZATION_NAME: ${{ secrets.MEMGRAPH_ORGANIZATION_NAME }}
OS: debian-11
TOOLCHAIN: v5
ARCH: amd
BUILD_TYPE: Debug
steps:
- name: Set up repository
uses: actions/checkout@v4
uses: actions/checkout@v3
with:
# Number of commits to fetch. `0` indicates all history for all
# branches and tags. (default: 1)
fetch-depth: 0
- name: Spin up mgbuild container
- name: Build debug binaries
run: |
./release/package/mgbuild.sh \
--toolchain $TOOLCHAIN \
--os $OS \
--arch $ARCH \
run
# Activate toolchain.
source /opt/toolchain-v4/activate
- name: Build release binaries
# Initialize dependencies.
./init
# Build debug binaries.
cd build
cmake ..
make -j$THREADS
- name: Run simulation tests
run: |
./release/package/mgbuild.sh \
--toolchain $TOOLCHAIN \
--os $OS \
--arch $ARCH \
--build-type $BUILD_TYPE \
--threads $THREADS \
build-memgraph
# Activate toolchain.
source /opt/toolchain-v4/activate
- name: Run leftover CTest tests
# Run simulation tests.
cd build
ctest -R memgraph__simulation --output-on-failure -j$THREADS
- name: Run single benchmark test
run: |
./release/package/mgbuild.sh \
--toolchain $TOOLCHAIN \
--os $OS \
--arch $ARCH \
--threads $THREADS \
--enterprise-license $MEMGRAPH_ENTERPRISE_LICENSE \
--organization-name $MEMGRAPH_ORGANIZATION_NAME \
test-memgraph leftover-CTest
# Activate toolchain.
source /opt/toolchain-v4/activate
- name: Run drivers tests
run: |
./release/package/mgbuild.sh \
--toolchain $TOOLCHAIN \
--os $OS \
--arch $ARCH \
--threads $THREADS \
--enterprise-license $MEMGRAPH_ENTERPRISE_LICENSE \
--organization-name $MEMGRAPH_ORGANIZATION_NAME \
test-memgraph drivers
- name: Run HA driver tests
run: |
./release/package/mgbuild.sh \
--toolchain $TOOLCHAIN \
--os $OS \
--arch $ARCH \
--threads $THREADS \
--enterprise-license $MEMGRAPH_ENTERPRISE_LICENSE \
--organization-name $MEMGRAPH_ORGANIZATION_NAME \
test-memgraph drivers-high-availability
- name: Run integration tests
run: |
./release/package/mgbuild.sh \
--toolchain $TOOLCHAIN \
--os $OS \
--arch $ARCH \
--threads $THREADS \
--enterprise-license $MEMGRAPH_ENTERPRISE_LICENSE \
--organization-name $MEMGRAPH_ORGANIZATION_NAME \
test-memgraph integration
- name: Run cppcheck and clang-format
run: |
./release/package/mgbuild.sh \
--toolchain $TOOLCHAIN \
--os $OS \
--arch $ARCH \
--threads $THREADS \
--enterprise-license $MEMGRAPH_ENTERPRISE_LICENSE \
--organization-name $MEMGRAPH_ORGANIZATION_NAME \
test-memgraph cppcheck-and-clang-format
- name: Save cppcheck and clang-format errors
uses: actions/upload-artifact@v4
with:
name: "Code coverage(Debug build)"
path: tools/github/cppcheck_and_clang_format.txt
- name: Stop mgbuild container
if: always()
run: |
./release/package/mgbuild.sh \
--toolchain $TOOLCHAIN \
--os $OS \
--arch $ARCH \
stop --remove
# Run simulation tests.
cd tests/mgbench
./benchmark.py accesscontrol/small --num-workers-for-import 1 --test-system-arg "split-file splitfiles/accesscontrol_small.shard_configuration bolt-num-workers 1"
release_build:
name: "Release build"
runs-on: [self-hosted, Linux, X64, DockerMgBuild]
timeout-minutes: 100
runs-on: [self-hosted, Linux, X64, Diff]
env:
THREADS: 24
MEMGRAPH_ENTERPRISE_LICENSE: ${{ secrets.MEMGRAPH_ENTERPRISE_LICENSE }}
MEMGRAPH_ORGANIZATION_NAME: ${{ secrets.MEMGRAPH_ORGANIZATION_NAME }}
OS: debian-11
TOOLCHAIN: v5
ARCH: amd
BUILD_TYPE: Release
steps:
- name: Set up repository
uses: actions/checkout@v4
uses: actions/checkout@v3
with:
# Number of commits to fetch. `0` indicates all history for all
# branches and tags. (default: 1)
fetch-depth: 0
- name: Spin up mgbuild container
run: |
./release/package/mgbuild.sh \
--toolchain $TOOLCHAIN \
--os $OS \
--arch $ARCH \
run
- name: Build release binaries
run: |
./release/package/mgbuild.sh \
--toolchain $TOOLCHAIN \
--os $OS \
--arch $ARCH \
--build-type $BUILD_TYPE \
--threads $THREADS \
build-memgraph
# Activate toolchain.
source /opt/toolchain-v4/activate
- name: Run GQL Behave tests
run: |
./release/package/mgbuild.sh \
--toolchain $TOOLCHAIN \
--os $OS \
--arch $ARCH \
--enterprise-license $MEMGRAPH_ENTERPRISE_LICENSE \
--organization-name $MEMGRAPH_ORGANIZATION_NAME \
test-memgraph gql-behave
# Initialize dependencies.
./init
- name: Save quality assurance status
uses: actions/upload-artifact@v4
with:
name: "GQL Behave Status"
path: |
tests/gql_behave/gql_behave_status.csv
tests/gql_behave/gql_behave_status.html
# Build release binaries.
cd build
cmake -DCMAKE_BUILD_TYPE=release ..
make -j$THREADS
- name: Run unit tests
run: |
./release/package/mgbuild.sh \
--toolchain $TOOLCHAIN \
--os $OS \
--arch $ARCH \
--threads $THREADS \
--enterprise-license $MEMGRAPH_ENTERPRISE_LICENSE \
--organization-name $MEMGRAPH_ORGANIZATION_NAME \
test-memgraph unit
# Activate toolchain.
source /opt/toolchain-v4/activate
# This step will be skipped because the e2e stream tests have been disabled
# We need to fix this as soon as possible
- name: Ensure Kafka and Pulsar are up
if: false
# Run unit tests.
cd build
ctest -R memgraph__unit --output-on-failure -j$THREADS
- name: Run simulation tests
run: |
cd tests/e2e/streams/kafka
docker-compose up -d
cd ../pulsar
docker-compose up -d
# Activate toolchain.
source /opt/toolchain-v4/activate
# Run simulation tests.
cd build
ctest -R memgraph__simulation --output-on-failure -j$THREADS
- name: Run single benchmark test
run: |
# Activate toolchain.
source /opt/toolchain-v4/activate
# Run simulation tests.
cd tests/mgbench
./benchmark.py accesscontrol/small --num-workers-for-import 1 --test-system-arg "split-file splitfiles/accesscontrol_small.shard_configuration bolt-num-workers 1"
- name: Run e2e tests
run: |
./release/package/mgbuild.sh \
--toolchain $TOOLCHAIN \
--os $OS \
--arch $ARCH \
--enterprise-license $MEMGRAPH_ENTERPRISE_LICENSE \
--organization-name $MEMGRAPH_ORGANIZATION_NAME \
test-memgraph e2e
# TODO(gitbuda): Setup mgclient and pymgclient properly.
cd tests
./setup.sh
source ve3/bin/activate
cd e2e
LD_LIBRARY_PATH=$LD_LIBRARY_PATH:../../libs/mgclient/lib python runner.py --workloads-root-directory ./distributed_queries
# Same as two steps prior
- name: Ensure Kafka and Pulsar are down
if: false
- name: Run query performance tests
run: |
cd tests/e2e/streams/kafka
docker-compose down
cd ../pulsar
docker-compose down
- name: Run stress test (plain)
run: |
./release/package/mgbuild.sh \
--toolchain $TOOLCHAIN \
--os $OS \
--arch $ARCH \
--enterprise-license $MEMGRAPH_ENTERPRISE_LICENSE \
--organization-name $MEMGRAPH_ORGANIZATION_NAME \
test-memgraph stress-plain
- name: Run stress test (SSL)
run: |
./release/package/mgbuild.sh \
--toolchain $TOOLCHAIN \
--os $OS \
--arch $ARCH \
--enterprise-license $MEMGRAPH_ENTERPRISE_LICENSE \
--organization-name $MEMGRAPH_ORGANIZATION_NAME \
test-memgraph stress-ssl
- name: Run durability test
run: |
./release/package/mgbuild.sh \
--toolchain $TOOLCHAIN \
--os $OS \
--arch $ARCH \
--enterprise-license $MEMGRAPH_ENTERPRISE_LICENSE \
--organization-name $MEMGRAPH_ORGANIZATION_NAME \
test-memgraph durability
- name: Create enterprise DEB package
run: |
./release/package/mgbuild.sh \
--toolchain $TOOLCHAIN \
--os $OS \
--arch $ARCH \
--enterprise-license $MEMGRAPH_ENTERPRISE_LICENSE \
--organization-name $MEMGRAPH_ORGANIZATION_NAME \
package-memgraph
./release/package/mgbuild.sh \
--toolchain $TOOLCHAIN \
--os $OS \
--arch $ARCH \
copy --package
- name: Save enterprise DEB package
uses: actions/upload-artifact@v4
with:
name: "Enterprise DEB package"
path: build/output/${{ env.OS }}/memgraph*.deb
- name: Copy build logs
run: |
./release/package/mgbuild.sh \
--toolchain $TOOLCHAIN \
--os $OS \
--arch $ARCH \
copy --build-logs
- name: Save test data
uses: actions/upload-artifact@v4
if: always()
with:
name: "Test data(Release build)"
path: build/logs
- name: Stop mgbuild container
if: always()
run: |
./release/package/mgbuild.sh \
--toolchain $TOOLCHAIN \
--os $OS \
--arch $ARCH \
stop --remove
release_jepsen_test:
name: "Release Jepsen Test"
runs-on: [self-hosted, Linux, X64, DockerMgBuild]
timeout-minutes: 80
env:
THREADS: 24
MEMGRAPH_ENTERPRISE_LICENSE: ${{ secrets.MEMGRAPH_ENTERPRISE_LICENSE }}
MEMGRAPH_ORGANIZATION_NAME: ${{ secrets.MEMGRAPH_ORGANIZATION_NAME }}
OS: debian-12
TOOLCHAIN: v5
ARCH: amd
BUILD_TYPE: RelWithDebInfo
steps:
- name: Set up repository
uses: actions/checkout@v4
with:
# Number of commits to fetch. `0` indicates all history for all
# branches and tags. (default: 1)
fetch-depth: 0
- name: Spin up mgbuild container
run: |
./release/package/mgbuild.sh \
--toolchain $TOOLCHAIN \
--os $OS \
--arch $ARCH \
run
- name: Build release binaries
run: |
./release/package/mgbuild.sh \
--toolchain $TOOLCHAIN \
--os $OS \
--arch $ARCH \
--build-type $BUILD_TYPE \
--threads $THREADS \
build-memgraph
- name: Copy memgraph binary
run: |
./release/package/mgbuild.sh \
--toolchain $TOOLCHAIN \
--os $OS \
--arch $ARCH \
copy --binary
- name: Refresh Jepsen Cluster
run: |
cd tests/jepsen
./run.sh cluster-refresh
- name: Run Jepsen tests
run: |
cd tests/jepsen
./run.sh test-all-individually --binary ../../build/memgraph --ignore-run-stdout-logs --ignore-run-stderr-logs
- name: Save Jepsen report
uses: actions/upload-artifact@v4
if: ${{ always() }}
with:
name: "Jepsen Report"
path: tests/jepsen/Jepsen.tar.gz
- name: Stop mgbuild container
if: always()
run: |
./release/package/mgbuild.sh \
--toolchain $TOOLCHAIN \
--os $OS \
--arch $ARCH \
stop --remove
release_benchmarks:
name: "Release benchmarks"
runs-on: [self-hosted, Linux, X64, DockerMgBuild, Gen7]
timeout-minutes: 60
env:
THREADS: 24
MEMGRAPH_ENTERPRISE_LICENSE: ${{ secrets.MEMGRAPH_ENTERPRISE_LICENSE }}
MEMGRAPH_ORGANIZATION_NAME: ${{ secrets.MEMGRAPH_ORGANIZATION_NAME }}
OS: debian-11
TOOLCHAIN: v5
ARCH: amd
BUILD_TYPE: Release
steps:
- name: Set up repository
uses: actions/checkout@v4
with:
# Number of commits to fetch. `0` indicates all history for all
# branches and tags. (default: 1)
fetch-depth: 0
- name: Spin up mgbuild container
run: |
./release/package/mgbuild.sh \
--toolchain $TOOLCHAIN \
--os $OS \
--arch $ARCH \
run
- name: Build release binaries
run: |
./release/package/mgbuild.sh \
--toolchain $TOOLCHAIN \
--os $OS \
--arch $ARCH \
--build-type $BUILD_TYPE \
--threads $THREADS \
build-memgraph
- name: Run macro benchmarks
run: |
./release/package/mgbuild.sh \
--toolchain $TOOLCHAIN \
--os $OS \
--arch $ARCH \
--enterprise-license $MEMGRAPH_ENTERPRISE_LICENSE \
--organization-name $MEMGRAPH_ORGANIZATION_NAME \
test-memgraph macro-benchmark
cd tests/manual
./query_performance_runner.py
- name: Get branch name (merge)
if: github.event_name != 'pull_request'
@ -612,49 +289,12 @@ jobs:
- name: Upload macro benchmark results
run: |
./release/package/mgbuild.sh \
--toolchain $TOOLCHAIN \
--os $OS \
--arch $ARCH \
--enterprise-license $MEMGRAPH_ENTERPRISE_LICENSE \
--organization-name $MEMGRAPH_ORGANIZATION_NAME \
test-memgraph upload-to-bench-graph \
--benchmark-name "macro_benchmark" \
--benchmark-results "../../tests/macro_benchmark/.harness_summary" \
--github-run-id ${{ github.run_id }} \
--github-run-number ${{ github.run_number }} \
--head-branch-name ${{ env.BRANCH_NAME }}
- name: Run mgbench
run: |
./release/package/mgbuild.sh \
--toolchain $TOOLCHAIN \
--os $OS \
--arch $ARCH \
--enterprise-license $MEMGRAPH_ENTERPRISE_LICENSE \
--organization-name $MEMGRAPH_ORGANIZATION_NAME \
test-memgraph mgbench
- name: Upload mgbench results
run: |
./release/package/mgbuild.sh \
--toolchain $TOOLCHAIN \
--os $OS \
--arch $ARCH \
--enterprise-license $MEMGRAPH_ENTERPRISE_LICENSE \
--organization-name $MEMGRAPH_ORGANIZATION_NAME \
test-memgraph upload-to-bench-graph \
--benchmark-name "mgbench" \
--benchmark-results "../../tests/mgbench/benchmark_result.json" \
--github-run-id "${{ github.run_id }}" \
--github-run-number "${{ github.run_number }}" \
--head-branch-name "${{ env.BRANCH_NAME }}"
- name: Stop mgbuild container
if: always()
run: |
./release/package/mgbuild.sh \
--toolchain $TOOLCHAIN \
--os $OS \
--arch $ARCH \
stop --remove
cd tools/bench-graph-client
virtualenv -p python3 ve3
source ve3/bin/activate
pip install -r requirements.txt
./main.py --benchmark-name "query_performance" \
--benchmark-results-path "../../build/tests/manual/query_performance_benchmark/summary.json" \
--github-run-id "${{ github.run_id }}" \
--github-run-number "${{ github.run_number }}" \
--head-branch-name "${{ env.BRANCH_NAME }}"

View File

@ -14,7 +14,7 @@ jobs:
steps:
- name: Set up repository
uses: actions/checkout@v4
uses: actions/checkout@v3
with:
# Number of commits to fetch. `0` indicates all history for all
# branches and tags. (default: 1)
@ -39,7 +39,7 @@ jobs:
source /opt/toolchain-v4/activate
# The results are also written to standard output in order to retain them in the logs
./tools/github/clang-tidy/run-clang-tidy.py -p build -j $THREADS -clang-tidy-binary=/opt/toolchain-v4/bin/clang-tidy "$PWD/src/*" |
./tools/github/clang-tidy/run-clang-tidy.py -p build -j $THREADS -extra-arg="-DMG_CLANG_TIDY_CHECK" -clang-tidy-binary=/opt/toolchain-v4/bin/clang-tidy "$PWD/src/*" |
tee ./build/full_clang_tidy_output.txt
- name: Summarize clang-tidy results

178
.github/workflows/package_all.yaml vendored Normal file
View File

@ -0,0 +1,178 @@
name: Package All
# TODO(gitbuda): Cleanup docker container if GHA job was canceled.
on: workflow_dispatch
jobs:
centos-7:
runs-on: [self-hosted, DockerMgBuild, X64]
timeout-minutes: 60
steps:
- name: "Set up repository"
uses: actions/checkout@v3
with:
fetch-depth: 0 # Required because of release/get_version.py
- name: "Build package"
run: |
./release/package/run.sh package centos-7
- name: "Upload package"
uses: actions/upload-artifact@v3
with:
name: centos-7
path: build/output/centos-7/memgraph*.rpm
centos-9:
runs-on: [self-hosted, DockerMgBuild, X64]
timeout-minutes: 60
steps:
- name: "Set up repository"
uses: actions/checkout@v3
with:
fetch-depth: 0 # Required because of release/get_version.py
- name: "Build package"
run: |
./release/package/run.sh package centos-9
- name: "Upload package"
uses: actions/upload-artifact@v3
with:
name: centos-9
path: build/output/centos-9/memgraph*.rpm
debian-10:
runs-on: [self-hosted, DockerMgBuild, X64]
timeout-minutes: 60
steps:
- name: "Set up repository"
uses: actions/checkout@v3
with:
fetch-depth: 0 # Required because of release/get_version.py
- name: "Build package"
run: |
./release/package/run.sh package debian-10
- name: "Upload package"
uses: actions/upload-artifact@v3
with:
name: debian-10
path: build/output/debian-10/memgraph*.deb
debian-11:
runs-on: [self-hosted, DockerMgBuild, X64]
timeout-minutes: 60
steps:
- name: "Set up repository"
uses: actions/checkout@v3
with:
fetch-depth: 0 # Required because of release/get_version.py
- name: "Build package"
run: |
./release/package/run.sh package debian-11
- name: "Upload package"
uses: actions/upload-artifact@v3
with:
name: debian-11
path: build/output/debian-11/memgraph*.deb
docker:
runs-on: [self-hosted, DockerMgBuild, X64]
timeout-minutes: 60
steps:
- name: "Set up repository"
uses: actions/checkout@v3
with:
fetch-depth: 0 # Required because of release/get_version.py
- name: "Build package"
run: |
cd release/package
./run.sh package debian-11 --for-docker
./run.sh docker
- name: "Upload package"
uses: actions/upload-artifact@v3
with:
name: docker
path: build/output/docker/memgraph*.tar.gz
ubuntu-1804:
runs-on: [self-hosted, DockerMgBuild, X64]
timeout-minutes: 60
steps:
- name: "Set up repository"
uses: actions/checkout@v3
with:
fetch-depth: 0 # Required because of release/get_version.py
- name: "Build package"
run: |
./release/package/run.sh package ubuntu-18.04
- name: "Upload package"
uses: actions/upload-artifact@v3
with:
name: ubuntu-1804
path: build/output/ubuntu-18.04/memgraph*.deb
ubuntu-2004:
runs-on: [self-hosted, DockerMgBuild, X64]
timeout-minutes: 60
steps:
- name: "Set up repository"
uses: actions/checkout@v3
with:
fetch-depth: 0 # Required because of release/get_version.py
- name: "Build package"
run: |
./release/package/run.sh package ubuntu-20.04
- name: "Upload package"
uses: actions/upload-artifact@v3
with:
name: ubuntu-2004
path: build/output/ubuntu-20.04/memgraph*.deb
ubuntu-2204:
runs-on: [self-hosted, DockerMgBuild, X64]
timeout-minutes: 60
steps:
- name: "Set up repository"
uses: actions/checkout@v3
with:
fetch-depth: 0 # Required because of release/get_version.py
- name: "Build package"
run: |
./release/package/run.sh package ubuntu-22.04
- name: "Upload package"
uses: actions/upload-artifact@v3
with:
name: ubuntu-2204
path: build/output/ubuntu-22.04/memgraph*.deb
debian-11-platform:
runs-on: [self-hosted, DockerMgBuild, X64]
timeout-minutes: 60
steps:
- name: "Set up repository"
uses: actions/checkout@v3
with:
fetch-depth: 0 # Required because of release/get_version.py
- name: "Build package"
run: |
./release/package/run.sh package debian-11 --for-platform
- name: "Upload package"
uses: actions/upload-artifact@v3
with:
name: debian-11-platform
path: build/output/debian-11/memgraph*.deb
debian-11-arm:
runs-on: [self-hosted, DockerMgBuild, ARM64, strange]
timeout-minutes: 60
steps:
- name: "Set up repository"
uses: actions/checkout@v3
with:
fetch-depth: 0 # Required because of release/get_version.py
- name: "Build package"
run: |
./release/package/run.sh package debian-11-arm
- name: "Upload package"
uses: actions/upload-artifact@v3
with:
name: debian-11-arm
path: build/output/debian-11-arm/memgraph*.deb

View File

@ -1,295 +0,0 @@
name: Package memgraph
# TODO(gitbuda): Cleanup docker container if GHA job was canceled.
on:
workflow_dispatch:
inputs:
memgraph_version:
description: "Memgraph version to upload as. Leave this field empty if you don't want to upload binaries to S3. Format: 'X.Y.Z'"
required: false
build_type:
type: choice
description: "Memgraph Build type. Default value is Release"
default: 'Release'
options:
- Release
- RelWithDebInfo
target_os:
type: choice
description: "Target OS for which memgraph will be packaged. Select 'all' if you want to package for every listed OS. Default is Ubuntu 22.04"
default: 'ubuntu-22_04'
options:
- all
- amzn-2
- centos-7
- centos-9
- debian-10
- debian-11
- debian-11-arm
- debian-11-platform
- docker
- fedora-36
- ubuntu-18_04
- ubuntu-20_04
- ubuntu-22_04
- ubuntu-22_04-arm
jobs:
amzn-2:
if: ${{ github.event.inputs.target_os == 'amzn-2' || github.event.inputs.target_os == 'all' }}
runs-on: [self-hosted, DockerMgBuild, X64]
timeout-minutes: 60
steps:
- name: "Set up repository"
uses: actions/checkout@v4
with:
fetch-depth: 0 # Required because of release/get_version.py
- name: "Build package"
run: |
./release/package/run.sh package amzn-2 ${{ github.event.inputs.build_type }}
- name: "Upload package"
uses: actions/upload-artifact@v4
with:
name: amzn-2
path: build/output/amzn-2/memgraph*.rpm
centos-7:
if: ${{ github.event.inputs.target_os == 'centos-7' || github.event.inputs.target_os == 'all' }}
runs-on: [self-hosted, DockerMgBuild, X64]
timeout-minutes: 60
steps:
- name: "Set up repository"
uses: actions/checkout@v4
with:
fetch-depth: 0 # Required because of release/get_version.py
- name: "Build package"
run: |
./release/package/run.sh package centos-7 ${{ github.event.inputs.build_type }}
- name: "Upload package"
uses: actions/upload-artifact@v4
with:
name: centos-7
path: build/output/centos-7/memgraph*.rpm
centos-9:
if: ${{ github.event.inputs.target_os == 'centos-9' || github.event.inputs.target_os == 'all' }}
runs-on: [self-hosted, DockerMgBuild, X64]
timeout-minutes: 60
steps:
- name: "Set up repository"
uses: actions/checkout@v4
with:
fetch-depth: 0 # Required because of release/get_version.py
- name: "Build package"
run: |
./release/package/run.sh package centos-9 ${{ github.event.inputs.build_type }}
- name: "Upload package"
uses: actions/upload-artifact@v4
with:
name: centos-9
path: build/output/centos-9/memgraph*.rpm
debian-10:
if: ${{ github.event.inputs.target_os == 'debian-10' || github.event.inputs.target_os == 'all' }}
runs-on: [self-hosted, DockerMgBuild, X64]
timeout-minutes: 60
steps:
- name: "Set up repository"
uses: actions/checkout@v4
with:
fetch-depth: 0 # Required because of release/get_version.py
- name: "Build package"
run: |
./release/package/run.sh package debian-10 ${{ github.event.inputs.build_type }}
- name: "Upload package"
uses: actions/upload-artifact@v4
with:
name: debian-10
path: build/output/debian-10/memgraph*.deb
debian-11:
if: ${{ github.event.inputs.target_os == 'debian-11' || github.event.inputs.target_os == 'all' }}
runs-on: [self-hosted, DockerMgBuild, X64]
timeout-minutes: 60
steps:
- name: "Set up repository"
uses: actions/checkout@v4
with:
fetch-depth: 0 # Required because of release/get_version.py
- name: "Build package"
run: |
./release/package/run.sh package debian-11 ${{ github.event.inputs.build_type }}
- name: "Upload package"
uses: actions/upload-artifact@v4
with:
name: debian-11
path: build/output/debian-11/memgraph*.deb
debian-11-arm:
if: ${{ github.event.inputs.target_os == 'debian-11-arm' || github.event.inputs.target_os == 'all' }}
runs-on: [self-hosted, DockerMgBuild, ARM64, strange]
timeout-minutes: 120
steps:
- name: "Set up repository"
uses: actions/checkout@v4
with:
fetch-depth: 0 # Required because of release/get_version.py
- name: "Build package"
run: |
./release/package/run.sh package debian-11-arm ${{ github.event.inputs.build_type }}
- name: "Upload package"
uses: actions/upload-artifact@v4
with:
name: debian-11-aarch64
path: build/output/debian-11-arm/memgraph*.deb
debian-11-platform:
if: ${{ github.event.inputs.target_os == 'debian-11-platform' || github.event.inputs.target_os == 'all' }}
runs-on: [self-hosted, DockerMgBuild, X64]
timeout-minutes: 60
steps:
- name: "Set up repository"
uses: actions/checkout@v4
with:
fetch-depth: 0 # Required because of release/get_version.py
- name: "Build package"
run: |
./release/package/run.sh package debian-11 ${{ github.event.inputs.build_type }} --for-platform
- name: "Upload package"
uses: actions/upload-artifact@v4
with:
name: debian-11-platform
path: build/output/debian-11/memgraph*.deb
docker:
if: ${{ github.event.inputs.target_os == 'docker' || github.event.inputs.target_os == 'all' }}
runs-on: [self-hosted, DockerMgBuild, X64]
timeout-minutes: 60
steps:
- name: "Set up repository"
uses: actions/checkout@v4
with:
fetch-depth: 0 # Required because of release/get_version.py
- name: "Build package"
run: |
cd release/package
./run.sh package debian-11 ${{ github.event.inputs.build_type }} --for-docker
./run.sh docker
- name: "Upload package"
uses: actions/upload-artifact@v4
with:
name: docker
path: build/output/docker/memgraph*.tar.gz
fedora-36:
if: ${{ github.event.inputs.target_os == 'fedora-36' || github.event.inputs.target_os == 'all' }}
runs-on: [self-hosted, DockerMgBuild, X64]
timeout-minutes: 60
steps:
- name: "Set up repository"
uses: actions/checkout@v4
with:
fetch-depth: 0 # Required because of release/get_version.py
- name: "Build package"
run: |
./release/package/run.sh package fedora-36 ${{ github.event.inputs.build_type }}
- name: "Upload package"
uses: actions/upload-artifact@v4
with:
name: fedora-36
path: build/output/fedora-36/memgraph*.rpm
ubuntu-18_04:
if: ${{ github.event.inputs.target_os == 'ubuntu-18_04' || github.event.inputs.target_os == 'all' }}
runs-on: [self-hosted, DockerMgBuild, X64]
timeout-minutes: 60
steps:
- name: "Set up repository"
uses: actions/checkout@v4
with:
fetch-depth: 0 # Required because of release/get_version.py
- name: "Build package"
run: |
./release/package/run.sh package ubuntu-18.04 ${{ github.event.inputs.build_type }}
- name: "Upload package"
uses: actions/upload-artifact@v4
with:
name: ubuntu-18.04
path: build/output/ubuntu-18.04/memgraph*.deb
ubuntu-20_04:
if: ${{ github.event.inputs.target_os == 'ubuntu-20_04' || github.event.inputs.target_os == 'all' }}
runs-on: [self-hosted, DockerMgBuild, X64]
timeout-minutes: 60
steps:
- name: "Set up repository"
uses: actions/checkout@v4
with:
fetch-depth: 0 # Required because of release/get_version.py
- name: "Build package"
run: |
./release/package/run.sh package ubuntu-20.04 ${{ github.event.inputs.build_type }}
- name: "Upload package"
uses: actions/upload-artifact@v4
with:
name: ubuntu-20.04
path: build/output/ubuntu-20.04/memgraph*.deb
ubuntu-22_04:
if: ${{ github.event.inputs.target_os == 'ubuntu-22_04' || github.event.inputs.target_os == 'all' }}
runs-on: [self-hosted, DockerMgBuild, X64]
timeout-minutes: 60
steps:
- name: "Set up repository"
uses: actions/checkout@v4
with:
fetch-depth: 0 # Required because of release/get_version.py
- name: "Build package"
run: |
./release/package/run.sh package ubuntu-22.04 ${{ github.event.inputs.build_type }}
- name: "Upload package"
uses: actions/upload-artifact@v4
with:
name: ubuntu-22.04
path: build/output/ubuntu-22.04/memgraph*.deb
ubuntu-22_04-arm:
if: ${{ github.event.inputs.target_os == 'ubuntu-22_04-arm' || github.event.inputs.target_os == 'all' }}
runs-on: [self-hosted, DockerMgBuild, ARM64, strange]
timeout-minutes: 120
steps:
- name: "Set up repository"
uses: actions/checkout@v4
with:
fetch-depth: 0 # Required because of release/get_version.py
- name: "Build package"
run: |
./release/package/run.sh package ubuntu-22.04-arm ${{ github.event.inputs.build_type }}
- name: "Upload package"
uses: actions/upload-artifact@v4
with:
name: ubuntu-22.04-aarch64
path: build/output/ubuntu-22.04-arm/memgraph*.deb
upload-to-s3:
# only run upload if we specified version. Allows for runs without upload
if: "${{ github.event.inputs.memgraph_version != '' }}"
needs: [amzn-2, centos-7, centos-9, debian-10, debian-11, debian-11-arm, debian-11-platform, docker, fedora-36, ubuntu-18_04, ubuntu-20_04, ubuntu-22_04, ubuntu-22_04-arm]
runs-on: ubuntu-latest
steps:
- name: Download artifacts
uses: actions/download-artifact@v4
with:
# name: # if name input parameter is not provided, all artifacts are downloaded
# and put in directories named after each one.
path: build/output/release
- name: Upload to S3
uses: jakejarvis/s3-sync-action@v0.5.1
env:
AWS_S3_BUCKET: "download.memgraph.com"
AWS_ACCESS_KEY_ID: ${{ secrets.S3_AWS_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.S3_AWS_SECRET_ACCESS_KEY }}
AWS_REGION: "eu-west-1"
SOURCE_DIR: "build/output/release"
DEST_DIR: "memgraph/v${{ github.event.inputs.memgraph_version }}/"

View File

@ -1,85 +0,0 @@
name: Run performance benchmarks manually
on:
workflow_dispatch:
jobs:
performance_benchmarks:
name: "Performance benchmarks"
runs-on: [self-hosted, Linux, X64, Diff, Gen7]
env:
THREADS: 24
MEMGRAPH_ENTERPRISE_LICENSE: ${{ secrets.MEMGRAPH_ENTERPRISE_LICENSE }}
MEMGRAPH_ORGANIZATION_NAME: ${{ secrets.MEMGRAPH_ORGANIZATION_NAME }}
steps:
- name: Set up repository
uses: actions/checkout@v4
with:
# Number of commits to fetch. `0` indicates all history for all
# branches and tags. (default: 1)
fetch-depth: 0
- name: Build release binaries
run: |
# Activate toolchain.
source /opt/toolchain-v4/activate
# Initialize dependencies.
./init
# Build only memgraph release binaries.
cd build
cmake -DCMAKE_BUILD_TYPE=release ..
make -j$THREADS
- name: Get branch name (merge)
if: github.event_name != 'pull_request'
shell: bash
run: echo "BRANCH_NAME=$(echo ${GITHUB_REF#refs/heads/} | tr / -)" >> $GITHUB_ENV
- name: Get branch name (pull request)
if: github.event_name == 'pull_request'
shell: bash
run: echo "BRANCH_NAME=$(echo ${GITHUB_HEAD_REF} | tr / -)" >> $GITHUB_ENV
- name: Run benchmarks
run: |
cd tests/mgbench
./benchmark.py vendor-native --num-workers-for-benchmark 12 --export-results benchmark_result.json pokec/medium/*/*
./benchmark.py vendor-native --num-workers-for-benchmark 1 --export-results benchmark_supernode.json supernode
./benchmark.py vendor-native --num-workers-for-benchmark 1 --export-results benchmark_high_write_set_property.json high_write_set_property
./benchmark.py vendor-native --num-workers-for-benchmark 12 --export-results benchmark_cartesian.json cartesian
- name: Upload benchmark results
run: |
cd tools/bench-graph-client
virtualenv -p python3 ve3
source ve3/bin/activate
pip install -r requirements.txt
./main.py --benchmark-name "mgbench" \
--benchmark-results "../../tests/mgbench/benchmark_result.json" \
--github-run-id "${{ github.run_id }}" \
--github-run-number "${{ github.run_number }}" \
--head-branch-name "${{ env.BRANCH_NAME }}"
./main.py --benchmark-name "supernode" \
--benchmark-results "../../tests/mgbench/benchmark_supernode.json" \
--github-run-id "${{ github.run_id }}" \
--github-run-number "${{ github.run_number }}" \
--head-branch-name "${{ env.BRANCH_NAME }}"
./main.py --benchmark-name "high_write_set_property" \
--benchmark-results "../../tests/mgbench/benchmark_high_write_set_property.json" \
--github-run-id "${{ github.run_id }}" \
--github-run-number "${{ github.run_number }}" \
--head-branch-name "${{ env.BRANCH_NAME }}"
./main.py --benchmark-name "cartesian" \
--benchmark-results "../../tests/mgbench/cartesian.json" \
--github-run-id "${{ github.run_id }}" \
--github-run-number "${{ github.run_number }}" \
--head-branch-name "${{ env.BRANCH_NAME }}"

View File

@ -1,208 +0,0 @@
name: Release build test
concurrency:
group: ${{ github.workflow }}-${{ github.ref_name }}
cancel-in-progress: true
on:
workflow_dispatch:
inputs:
build_type:
type: choice
description: "Memgraph Build type. Default value is Release."
default: 'Release'
options:
- Release
- RelWithDebInfo
push:
branches:
- "release/**"
tags:
- "v*.*.*-rc*"
- "v*.*-rc*"
schedule:
# UTC
- cron: "0 22 * * *"
env:
THREADS: 24
MEMGRAPH_ENTERPRISE_LICENSE: ${{ secrets.MEMGRAPH_ENTERPRISE_LICENSE }}
MEMGRAPH_ORGANIZATION_NAME: ${{ secrets.MEMGRAPH_ORGANIZATION_NAME }}
BUILD_TYPE: ${{ github.event.inputs.build_type || 'Release' }}
jobs:
Debian10:
uses: ./.github/workflows/release_debian10.yaml
with:
build_type: ${{ github.event.inputs.build_type || 'Release' }}
secrets: inherit
Ubuntu20_04:
uses: ./.github/workflows/release_ubuntu2004.yaml
with:
build_type: ${{ github.event.inputs.build_type || 'Release' }}
secrets: inherit
PackageDebian10:
if: github.ref_type == 'tag'
needs: [Debian10]
runs-on: [self-hosted, DockerMgBuild, X64]
timeout-minutes: 60
steps:
- name: "Set up repository"
uses: actions/checkout@v4
with:
fetch-depth: 0 # Required because of release/get_version.py
- name: "Build package"
run: |
./release/package/run.sh package debian-10 $BUILD_TYPE
- name: Upload to S3
uses: jakejarvis/s3-sync-action@v0.5.1
env:
AWS_S3_BUCKET: "deps.memgraph.io"
AWS_ACCESS_KEY_ID: ${{ secrets.S3_AWS_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.S3_AWS_SECRET_ACCESS_KEY }}
AWS_REGION: "eu-west-1"
SOURCE_DIR: "build/output"
DEST_DIR: "memgraph-unofficial/${{ github.ref_name }}/"
- name: "Upload package"
uses: actions/upload-artifact@v4
with:
name: debian-10
path: build/output/debian-10/memgraph*.deb
PackageUbuntu20_04:
if: github.ref_type == 'tag'
needs: [Ubuntu20_04]
runs-on: [self-hosted, DockerMgBuild, X64]
timeout-minutes: 60
steps:
- name: "Set up repository"
uses: actions/checkout@v4
with:
fetch-depth: 0 # Required because of release/get_version.py
- name: "Build package"
run: |
./release/package/run.sh package ubuntu-22.04 $BUILD_TYPE
- name: Upload to S3
uses: jakejarvis/s3-sync-action@v0.5.1
env:
AWS_S3_BUCKET: "deps.memgraph.io"
AWS_ACCESS_KEY_ID: ${{ secrets.S3_AWS_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.S3_AWS_SECRET_ACCESS_KEY }}
AWS_REGION: "eu-west-1"
SOURCE_DIR: "build/output"
DEST_DIR: "memgraph-unofficial/${{ github.ref_name }}/"
- name: "Upload package"
uses: actions/upload-artifact@v4
with:
name: ubuntu-22.04
path: build/output/ubuntu-22.04/memgraph*.deb
PackageUbuntu20_04_ARM:
if: github.ref_type == 'tag'
needs: [Ubuntu20_04]
runs-on: [self-hosted, DockerMgBuild, ARM64]
# M1 Mac mini is sometimes slower
timeout-minutes: 150
steps:
- name: "Set up repository"
uses: actions/checkout@v4
with:
fetch-depth: 0 # Required because of release/get_version.py
- name: "Build package"
run: |
./release/package/run.sh package ubuntu-22.04-arm $BUILD_TYPE
- name: "Upload package"
uses: actions/upload-artifact@v4
with:
name: ubuntu-22.04-aarch64
path: build/output/ubuntu-22.04-arm/memgraph*.deb
PushToS3Ubuntu20_04_ARM:
if: github.ref_type == 'tag'
needs: [PackageUbuntu20_04_ARM]
runs-on: ubuntu-latest
steps:
- name: Download package
uses: actions/download-artifact@v4
with:
name: ubuntu-22.04-aarch64
path: build/output/release
- name: Upload to S3
uses: jakejarvis/s3-sync-action@v0.5.1
env:
AWS_S3_BUCKET: "deps.memgraph.io"
AWS_ACCESS_KEY_ID: ${{ secrets.S3_AWS_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.S3_AWS_SECRET_ACCESS_KEY }}
AWS_REGION: "eu-west-1"
SOURCE_DIR: "build/output/release"
DEST_DIR: "memgraph-unofficial/${{ github.ref_name }}/"
PackageDebian11:
if: github.ref_type == 'tag'
needs: [Debian10, Ubuntu20_04]
runs-on: [self-hosted, DockerMgBuild, X64]
timeout-minutes: 60
steps:
- name: "Set up repository"
uses: actions/checkout@v4
with:
fetch-depth: 0 # Required because of release/get_version.py
- name: "Build package"
run: |
./release/package/run.sh package debian-11 $BUILD_TYPE
- name: Upload to S3
uses: jakejarvis/s3-sync-action@v0.5.1
env:
AWS_S3_BUCKET: "deps.memgraph.io"
AWS_ACCESS_KEY_ID: ${{ secrets.S3_AWS_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.S3_AWS_SECRET_ACCESS_KEY }}
AWS_REGION: "eu-west-1"
SOURCE_DIR: "build/output"
DEST_DIR: "memgraph-unofficial/${{ github.ref_name }}/"
- name: "Upload package"
uses: actions/upload-artifact@v4
with:
name: debian-11
path: build/output/debian-11/memgraph*.deb
PackageDebian11_ARM:
if: github.ref_type == 'tag'
needs: [Debian10, Ubuntu20_04]
runs-on: [self-hosted, DockerMgBuild, ARM64]
# M1 Mac mini is sometimes slower
timeout-minutes: 150
steps:
- name: "Set up repository"
uses: actions/checkout@v4
with:
fetch-depth: 0 # Required because of release/get_version.py
- name: "Build package"
run: |
./release/package/run.sh package debian-11-arm $BUILD_TYPE
- name: "Upload package"
uses: actions/upload-artifact@v4
with:
name: debian-11-aarch64
path: build/output/debian-11-arm/memgraph*.deb
PushToS3Debian11_ARM:
if: github.ref_type == 'tag'
needs: [PackageDebian11_ARM]
runs-on: ubuntu-latest
steps:
- name: Download package
uses: actions/download-artifact@v4
with:
name: debian-11-aarch64
path: build/output/release
- name: Upload to S3
uses: jakejarvis/s3-sync-action@v0.5.1
env:
AWS_S3_BUCKET: "deps.memgraph.io"
AWS_ACCESS_KEY_ID: ${{ secrets.S3_AWS_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.S3_AWS_SECRET_ACCESS_KEY }}
AWS_REGION: "eu-west-1"
SOURCE_DIR: "build/output/release"
DEST_DIR: "memgraph-unofficial/${{ github.ref_name }}/"

315
.github/workflows/release_centos8.yaml vendored Normal file
View File

@ -0,0 +1,315 @@
name: Release CentOS 8
on:
workflow_dispatch:
schedule:
- cron: "0 1 * * *"
jobs:
community_build:
name: "Community build"
runs-on: [self-hosted, Linux, X64, CentOS8]
env:
THREADS: 24
MEMGRAPH_ENTERPRISE_LICENSE: ${{ secrets.MEMGRAPH_ENTERPRISE_LICENSE }}
MEMGRAPH_ORGANIZATION_NAME: ${{ secrets.MEMGRAPH_ORGANIZATION_NAME }}
timeout-minutes: 960
steps:
- name: Set up repository
uses: actions/checkout@v3
with:
# Number of commits to fetch. `0` indicates all history for all
# branches and tags. (default: 1)
fetch-depth: 0
- name: Build community binaries
run: |
# Activate toolchain.
source /opt/toolchain-v4/activate
# Initialize dependencies.
./init
# Build community binaries.
cd build
cmake -DCMAKE_BUILD_TYPE=release -DMG_ENTERPRISE=OFF ..
make -j$THREADS
- name: Run unit tests
run: |
# Activate toolchain.
source /opt/toolchain-v4/activate
# Run unit tests.
cd build
ctest -R memgraph__unit --output-on-failure
coverage_build:
name: "Coverage build"
runs-on: [self-hosted, Linux, X64, CentOS8]
env:
THREADS: 24
MEMGRAPH_ENTERPRISE_LICENSE: ${{ secrets.MEMGRAPH_ENTERPRISE_LICENSE }}
MEMGRAPH_ORGANIZATION_NAME: ${{ secrets.MEMGRAPH_ORGANIZATION_NAME }}
steps:
- name: Set up repository
uses: actions/checkout@v3
with:
# Number of commits to fetch. `0` indicates all history for all
# branches and tags. (default: 1)
fetch-depth: 0
- name: Build coverage binaries
run: |
# Activate toolchain.
source /opt/toolchain-v4/activate
# Initialize dependencies.
./init
# Build coverage binaries.
cd build
cmake -DTEST_COVERAGE=ON ..
make -j$THREADS memgraph__unit
- name: Run unit tests
run: |
# Activate toolchain.
source /opt/toolchain-v4/activate
# Run unit tests.
cd build
ctest -R memgraph__unit --output-on-failure
- name: Compute code coverage
run: |
# Activate toolchain.
source /opt/toolchain-v4/activate
# Compute code coverage.
cd tools/github
./coverage_convert
# Package code coverage.
cd generated
tar -czf code_coverage.tar.gz coverage.json html report.json summary.rmu
- name: Save code coverage
uses: actions/upload-artifact@v3
with:
name: "Code coverage"
path: tools/github/generated/code_coverage.tar.gz
debug_build:
name: "Debug build"
runs-on: [self-hosted, Linux, X64, CentOS8]
env:
THREADS: 24
MEMGRAPH_ENTERPRISE_LICENSE: ${{ secrets.MEMGRAPH_ENTERPRISE_LICENSE }}
MEMGRAPH_ORGANIZATION_NAME: ${{ secrets.MEMGRAPH_ORGANIZATION_NAME }}
steps:
- name: Set up repository
uses: actions/checkout@v3
with:
# Number of commits to fetch. `0` indicates all history for all
# branches and tags. (default: 1)
fetch-depth: 0
- name: Build debug binaries
run: |
# Activate toolchain.
source /opt/toolchain-v4/activate
# Initialize dependencies.
./init
# Build debug binaries.
cd build
cmake ..
make -j$THREADS
- name: Run leftover CTest tests
run: |
# Activate toolchain.
source /opt/toolchain-v4/activate
# Run leftover CTest tests (all except unit and benchmark tests).
cd build
ctest -E "(memgraph__unit|memgraph__benchmark)" --output-on-failure
- name: Run drivers tests
run: |
./tests/drivers/run.sh
- name: Run integration tests
run: |
cd tests/integration
for name in *; do
if [ ! -d $name ]; then continue; fi
pushd $name >/dev/null
echo "Running: $name"
if [ -x prepare.sh ]; then
./prepare.sh
fi
if [ -x runner.py ]; then
./runner.py
elif [ -x runner.sh ]; then
./runner.sh
fi
echo
popd >/dev/null
done
- name: Run cppcheck and clang-format
run: |
# Activate toolchain.
source /opt/toolchain-v4/activate
# Run cppcheck and clang-format.
cd tools/github
./cppcheck_and_clang_format diff
- name: Save cppcheck and clang-format errors
uses: actions/upload-artifact@v3
with:
name: "Code coverage"
path: tools/github/cppcheck_and_clang_format.txt
release_build:
name: "Release build"
runs-on: [self-hosted, Linux, X64, CentOS8]
env:
THREADS: 24
MEMGRAPH_ENTERPRISE_LICENSE: ${{ secrets.MEMGRAPH_ENTERPRISE_LICENSE }}
MEMGRAPH_ORGANIZATION_NAME: ${{ secrets.MEMGRAPH_ORGANIZATION_NAME }}
timeout-minutes: 960
steps:
- name: Set up repository
uses: actions/checkout@v3
with:
# Number of commits to fetch. `0` indicates all history for all
# branches and tags. (default: 1)
fetch-depth: 0
- name: Build release binaries
run: |
# Activate toolchain.
source /opt/toolchain-v4/activate
# Initialize dependencies.
./init
# Build release binaries.
cd build
cmake -DCMAKE_BUILD_TYPE=release ..
make -j$THREADS
- name: Create enterprise RPM package
run: |
# Activate toolchain.
source /opt/toolchain-v4/activate
cd build
# create mgconsole
# we use the -B to force the build
make -j$THREADS -B mgconsole
# Create enterprise RPM package.
mkdir output && cd output
cpack -G RPM --config ../CPackConfig.cmake
rpmlint memgraph*.rpm
- name: Save enterprise RPM package
uses: actions/upload-artifact@v3
with:
name: "Enterprise RPM package"
path: build/output/memgraph*.rpm
- name: Run micro benchmark tests
run: |
# Activate toolchain.
source /opt/toolchain-v4/activate
# Run micro benchmark tests.
cd build
# The `eval` benchmark needs a large stack limit.
ulimit -s 262144
ctest -R memgraph__benchmark -V
- name: Run macro benchmark tests
run: |
cd tests/macro_benchmark
./harness QuerySuite MemgraphRunner \
--groups aggregation 1000_create unwind_create dense_expand match \
--no-strict
- name: Run parallel macro benchmark tests
run: |
cd tests/macro_benchmark
./harness QueryParallelSuite MemgraphRunner \
--groups aggregation_parallel create_parallel bfs_parallel \
--num-database-workers 9 --num-clients-workers 30 \
--no-strict
- name: Run GQL Behave tests
run: |
cd tests/gql_behave
./continuous_integration
- name: Save quality assurance status
uses: actions/upload-artifact@v3
with:
name: "GQL Behave Status"
path: |
tests/gql_behave/gql_behave_status.csv
tests/gql_behave/gql_behave_status.html
- name: Run unit tests
run: |
# Activate toolchain.
source /opt/toolchain-v4/activate
# Run unit tests.
cd build
ctest -R memgraph__unit --output-on-failure
- name: Run e2e tests
run: |
# TODO(gitbuda): Setup mgclient and pymgclient properly.
cd tests
./setup.sh
source ve3/bin/activate
cd e2e
LD_LIBRARY_PATH=$LD_LIBRARY_PATH:../../libs/mgclient/lib python runner.py --workloads-root-directory .
- name: Run stress test (plain)
run: |
cd tests/stress
./continuous_integration
- name: Run stress test (SSL)
run: |
cd tests/stress
./continuous_integration --use-ssl
- name: Run stress test (large)
run: |
cd tests/stress
./continuous_integration --large-dataset
- name: Run durability test (plain)
run: |
cd tests/stress
source ve3/bin/activate
python3 durability --num-steps 5
- name: Run durability test (large)
run: |
cd tests/stress
source ve3/bin/activate
python3 durability --num-steps 20

View File

@ -1,38 +1,23 @@
name: Release Debian 10
on:
workflow_call:
inputs:
build_type:
type: string
description: "Memgraph Build type. Default value is Release."
default: 'Release'
workflow_dispatch:
inputs:
build_type:
type: choice
description: "Memgraph Build type. Default value is Release."
default: 'Release'
options:
- Release
- RelWithDebInfo
env:
OS: "Debian10"
THREADS: 24
MEMGRAPH_ENTERPRISE_LICENSE: ${{ secrets.MEMGRAPH_ENTERPRISE_LICENSE }}
MEMGRAPH_ORGANIZATION_NAME: ${{ secrets.MEMGRAPH_ORGANIZATION_NAME }}
BUILD_TYPE: ${{ github.event.inputs.build_type || 'Release' }}
schedule:
- cron: "0 1 * * *"
jobs:
community_build:
name: "Community build"
runs-on: [self-hosted, Linux, X64, Debian10]
timeout-minutes: 60
env:
THREADS: 24
MEMGRAPH_ENTERPRISE_LICENSE: ${{ secrets.MEMGRAPH_ENTERPRISE_LICENSE }}
MEMGRAPH_ORGANIZATION_NAME: ${{ secrets.MEMGRAPH_ORGANIZATION_NAME }}
timeout-minutes: 960
steps:
- name: Set up repository
uses: actions/checkout@v4
uses: actions/checkout@v3
with:
# Number of commits to fetch. `0` indicates all history for all
# branches and tags. (default: 1)
@ -48,7 +33,7 @@ jobs:
# Build community binaries.
cd build
cmake -DCMAKE_BUILD_TYPE=$BUILD_TYPE -DMG_ENTERPRISE=OFF ..
cmake -DCMAKE_BUILD_TYPE=release -DMG_ENTERPRISE=OFF ..
make -j$THREADS
- name: Run unit tests
@ -67,11 +52,10 @@ jobs:
THREADS: 24
MEMGRAPH_ENTERPRISE_LICENSE: ${{ secrets.MEMGRAPH_ENTERPRISE_LICENSE }}
MEMGRAPH_ORGANIZATION_NAME: ${{ secrets.MEMGRAPH_ORGANIZATION_NAME }}
timeout-minutes: 60
steps:
- name: Set up repository
uses: actions/checkout@v4
uses: actions/checkout@v3
with:
# Number of commits to fetch. `0` indicates all history for all
# branches and tags. (default: 1)
@ -113,19 +97,22 @@ jobs:
tar -czf code_coverage.tar.gz coverage.json html report.json summary.rmu
- name: Save code coverage
uses: actions/upload-artifact@v4
uses: actions/upload-artifact@v3
with:
name: "Code coverage(Coverage build)-${{ env.OS }}"
name: "Code coverage"
path: tools/github/generated/code_coverage.tar.gz
debug_build:
name: "Debug build"
runs-on: [self-hosted, Linux, X64, Debian10]
timeout-minutes: 60
env:
THREADS: 24
MEMGRAPH_ENTERPRISE_LICENSE: ${{ secrets.MEMGRAPH_ENTERPRISE_LICENSE }}
MEMGRAPH_ORGANIZATION_NAME: ${{ secrets.MEMGRAPH_ORGANIZATION_NAME }}
steps:
- name: Set up repository
uses: actions/checkout@v4
uses: actions/checkout@v3
with:
# Number of commits to fetch. `0` indicates all history for all
# branches and tags. (default: 1)
@ -157,6 +144,25 @@ jobs:
run: |
./tests/drivers/run.sh
- name: Run integration tests
run: |
cd tests/integration
for name in *; do
if [ ! -d $name ]; then continue; fi
pushd $name >/dev/null
echo "Running: $name"
if [ -x prepare.sh ]; then
./prepare.sh
fi
if [ -x runner.py ]; then
./runner.py
elif [ -x runner.sh ]; then
./runner.sh
fi
echo
popd >/dev/null
done
- name: Run cppcheck and clang-format
run: |
# Activate toolchain.
@ -167,49 +173,23 @@ jobs:
./cppcheck_and_clang_format diff
- name: Save cppcheck and clang-format errors
uses: actions/upload-artifact@v4
uses: actions/upload-artifact@v3
with:
name: "Code coverage(Debug build)-${{ env.OS }}"
name: "Code coverage"
path: tools/github/cppcheck_and_clang_format.txt
debug_integration_test:
name: "Debug integration tests"
runs-on: [self-hosted, Linux, X64, Debian10]
timeout-minutes: 60
steps:
- name: Set up repository
uses: actions/checkout@v4
with:
# Number of commits to fetch. `0` indicates all history for all
# branches and tags. (default: 1)
fetch-depth: 0
- name: Build debug binaries
run: |
# Activate toolchain.
source /opt/toolchain-v4/activate
# Initialize dependencies.
./init
# Build debug binaries.
cd build
cmake ..
make -j$THREADS
- name: Run integration tests
run: |
tests/integration/run.sh
release_build:
name: "Release build"
runs-on: [self-hosted, Linux, X64, Debian10]
timeout-minutes: 60
env:
THREADS: 24
MEMGRAPH_ENTERPRISE_LICENSE: ${{ secrets.MEMGRAPH_ENTERPRISE_LICENSE }}
MEMGRAPH_ORGANIZATION_NAME: ${{ secrets.MEMGRAPH_ORGANIZATION_NAME }}
timeout-minutes: 960
steps:
- name: Set up repository
uses: actions/checkout@v4
uses: actions/checkout@v3
with:
# Number of commits to fetch. `0` indicates all history for all
# branches and tags. (default: 1)
@ -225,7 +205,7 @@ jobs:
# Build release binaries.
cd build
cmake -DCMAKE_BUILD_TYPE=$BUILD_TYPE ..
cmake -DCMAKE_BUILD_TYPE=release ..
make -j$THREADS
- name: Create enterprise DEB package
@ -244,60 +224,11 @@ jobs:
cpack -G DEB --config ../CPackConfig.cmake
- name: Save enterprise DEB package
uses: actions/upload-artifact@v4
uses: actions/upload-artifact@v3
with:
name: "Enterprise DEB package-${{ env.OS}}"
name: "Enterprise DEB package"
path: build/output/memgraph*.deb
- name: Run GQL Behave tests
run: |
cd tests
./setup.sh /opt/toolchain-v4/activate
cd gql_behave
./continuous_integration
- name: Save quality assurance status
uses: actions/upload-artifact@v4
with:
name: "GQL Behave Status-${{ env.OS }}"
path: |
tests/gql_behave/gql_behave_status.csv
tests/gql_behave/gql_behave_status.html
- name: Run unit tests
run: |
# Activate toolchain.
source /opt/toolchain-v4/activate
# Run unit tests.
cd build
ctest -R memgraph__unit --output-on-failure
release_benchmark_tests:
name: "Release Benchmark Tests"
runs-on: [self-hosted, Linux, X64, Debian10]
timeout-minutes: 60
steps:
- name: Set up repository
uses: actions/checkout@v4
with:
# Number of commits to fetch. `0` indicates all history for all
# branches and tags. (default: 1)
fetch-depth: 0
- name: Build release binaries
run: |
# Activate toolchain.
source /opt/toolchain-v4/activate
# Initialize dependencies.
./init
# Build release binaries
cd build
cmake -DCMAKE_BUILD_TYPE=$BUILD_TYPE ..
make -j$THREADS
- name: Run micro benchmark tests
run: |
# Activate toolchain.
@ -324,79 +255,36 @@ jobs:
--num-database-workers 9 --num-clients-workers 30 \
--no-strict
release_e2e_test:
name: "Release End-to-end Test"
runs-on: [self-hosted, Linux, X64, Debian10]
timeout-minutes: 60
- name: Run GQL Behave tests
run: |
cd tests/gql_behave
./continuous_integration
steps:
- name: Set up repository
uses: actions/checkout@v4
- name: Save quality assurance status
uses: actions/upload-artifact@v3
with:
# Number of commits to fetch. `0` indicates all history for all
# branches and tags. (default: 1)
fetch-depth: 0
name: "GQL Behave Status"
path: |
tests/gql_behave/gql_behave_status.csv
tests/gql_behave/gql_behave_status.html
- name: Build release binaries
- name: Run unit tests
run: |
# Activate toolchain.
source /opt/toolchain-v4/activate
# Initialize dependencies.
./init
# Build release binaries
# Run unit tests.
cd build
cmake -DCMAKE_BUILD_TYPE=$BUILD_TYPE ..
make -j$THREADS
- name: Ensure Kafka and Pulsar are up
run: |
cd tests/e2e/streams/kafka
docker-compose up -d
cd ../pulsar
docker-compose up -d
ctest -R memgraph__unit --output-on-failure
- name: Run e2e tests
run: |
# TODO(gitbuda): Setup mgclient and pymgclient properly.
cd tests
./setup.sh /opt/toolchain-v4/activate
source ve3/bin/activate_e2e
./setup.sh
source ve3/bin/activate
cd e2e
./run.sh
- name: Ensure Kafka and Pulsar are down
if: always()
run: |
cd tests/e2e/streams/kafka
docker-compose down
cd ../pulsar
docker-compose down
release_durability_stress_tests:
name: "Release durability and stress tests"
runs-on: [self-hosted, Linux, X64, Debian10]
timeout-minutes: 60
steps:
- name: Set up repository
uses: actions/checkout@v4
with:
# Number of commits to fetch. `0` indicates all history for all
# branches and tags. (default: 1)
fetch-depth: 0
- name: Build release binaries
run: |
# Activate toolchain.
source /opt/toolchain-v4/activate
# Initialize dependencies.
./init
# Build release binaries.
cd build
cmake -DCMAKE_BUILD_TYPE=$BUILD_TYPE ..
make -j$THREADS
LD_LIBRARY_PATH=$LD_LIBRARY_PATH:../../libs/mgclient/lib python runner.py --workloads-root-directory .
- name: Run stress test (plain)
run: |
@ -408,6 +296,11 @@ jobs:
cd tests/stress
./continuous_integration --use-ssl
- name: Run stress test (large)
run: |
cd tests/stress
./continuous_integration --large-dataset
- name: Run durability test (plain)
run: |
cd tests/stress
@ -423,11 +316,15 @@ jobs:
release_jepsen_test:
name: "Release Jepsen Test"
runs-on: [self-hosted, Linux, X64, Debian10, JepsenControl]
env:
THREADS: 24
MEMGRAPH_ENTERPRISE_LICENSE: ${{ secrets.MEMGRAPH_ENTERPRISE_LICENSE }}
MEMGRAPH_ORGANIZATION_NAME: ${{ secrets.MEMGRAPH_ORGANIZATION_NAME }}
timeout-minutes: 60
steps:
- name: Set up repository
uses: actions/checkout@v4
uses: actions/checkout@v3
with:
# Number of commits to fetch. `0` indicates all history for all
# branches and tags. (default: 1)
@ -437,27 +334,23 @@ jobs:
run: |
# Activate toolchain.
source /opt/toolchain-v4/activate
# Initialize dependencies.
./init
# Build only memgraph release binary.
cd build
cmake -DCMAKE_BUILD_TYPE=$BUILD_TYPE ..
cmake -DCMAKE_BUILD_TYPE=release ..
make -j$THREADS memgraph
- name: Refresh Jepsen Cluster
run: |
cd tests/jepsen
./run.sh cluster-refresh
- name: Run Jepsen tests
run: |
cd tests/jepsen
./run.sh test-all-individually --binary ../../build/memgraph --ignore-run-stdout-logs --ignore-run-stderr-logs
./run.sh test --binary ../../build/memgraph --run-args "test-all --node-configs resources/node-config.edn" --ignore-run-stdout-logs --ignore-run-stderr-logs
- name: Save Jepsen report
uses: actions/upload-artifact@v4
uses: actions/upload-artifact@v3
if: ${{ always() }}
with:
name: "Jepsen Report-${{ env.OS }}"
name: "Jepsen Report"
path: tests/jepsen/Jepsen.tar.gz

View File

@ -19,20 +19,20 @@ jobs:
DOCKER_REPOSITORY_NAME: memgraph
steps:
- name: Checkout
uses: actions/checkout@v4
uses: actions/checkout@v3
- name: Set up QEMU
uses: docker/setup-qemu-action@v2
uses: docker/setup-qemu-action@v1
- name: Set up Docker Buildx
id: buildx
uses: docker/setup-buildx-action@v2
uses: docker/setup-buildx-action@v1
- name: Log in to Docker Hub
uses: docker/login-action@v2
uses: docker/login-action@v1
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_PASSWORD }}
- name: Download memgraph binary
run: |

View File

@ -1,63 +0,0 @@
name: "Mgbench Bolt Client Publish Docker Image"
on:
workflow_dispatch:
inputs:
version:
description: "Mgbench bolt client version to publish on Dockerhub."
required: true
force_release:
type: boolean
required: false
default: false
jobs:
mgbench_docker_publish:
runs-on: ubuntu-latest
env:
DOCKER_ORGANIZATION_NAME: memgraph
DOCKER_REPOSITORY_NAME: mgbench-client
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Set up QEMU
uses: docker/setup-qemu-action@v2
- name: Set up Docker Buildx
id: buildx
uses: docker/setup-buildx-action@v2
- name: Log in to Docker Hub
uses: docker/login-action@v2
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Check if specified version is already pushed
run: |
EXISTS=$(docker manifest inspect $DOCKER_ORGANIZATION_NAME/$DOCKER_REPOSITORY_NAME:${{ github.event.inputs.version }} > /dev/null; echo $?)
echo $EXISTS
if [[ ${EXISTS} -eq 0 ]]; then
echo 'The specified version has been already released to DockerHub.'
if [[ ${{ github.event.inputs.force_release }} = true ]]; then
echo 'Forcing the release!'
else
echo 'Stopping the release!'
exit 1
fi
else
echo 'All good the specified version has not been release to DockerHub.'
fi
- name: Build & push docker images
run: |
cd tests/mgbench
docker buildx build \
--build-arg TOOLCHAIN_VERSION=toolchain-v4 \
--platform linux/amd64,linux/arm64 \
--tag $DOCKER_ORGANIZATION_NAME/$DOCKER_REPOSITORY_NAME:${{ github.event.inputs.version }} \
--tag $DOCKER_ORGANIZATION_NAME/$DOCKER_REPOSITORY_NAME:latest \
--file Dockerfile.mgbench_client \
--push .

View File

@ -1,38 +1,23 @@
name: Release Ubuntu 20.04
on:
workflow_call:
inputs:
build_type:
type: string
description: "Memgraph Build type. Default value is Release."
default: 'Release'
workflow_dispatch:
inputs:
build_type:
type: choice
description: "Memgraph Build type. Default value is Release."
default: 'Release'
options:
- Release
- RelWithDebInfo
env:
OS: "Ubuntu 20.04"
THREADS: 24
MEMGRAPH_ENTERPRISE_LICENSE: ${{ secrets.MEMGRAPH_ENTERPRISE_LICENSE }}
MEMGRAPH_ORGANIZATION_NAME: ${{ secrets.MEMGRAPH_ORGANIZATION_NAME }}
BUILD_TYPE: ${{ github.event.inputs.build_type || 'Release' }}
schedule:
- cron: "0 1 * * *"
jobs:
community_build:
name: "Community build"
runs-on: [self-hosted, Linux, X64, Ubuntu20.04]
timeout-minutes: 60
env:
THREADS: 24
MEMGRAPH_ENTERPRISE_LICENSE: ${{ secrets.MEMGRAPH_ENTERPRISE_LICENSE }}
MEMGRAPH_ORGANIZATION_NAME: ${{ secrets.MEMGRAPH_ORGANIZATION_NAME }}
timeout-minutes: 960
steps:
- name: Set up repository
uses: actions/checkout@v4
uses: actions/checkout@v3
with:
# Number of commits to fetch. `0` indicates all history for all
# branches and tags. (default: 1)
@ -48,7 +33,7 @@ jobs:
# Build community binaries.
cd build
cmake -DCMAKE_BUILD_TYPE=$BUILD_TYPE -DMG_ENTERPRISE=OFF ..
cmake -DCMAKE_BUILD_TYPE=release -DMG_ENTERPRISE=OFF ..
make -j$THREADS
- name: Run unit tests
@ -63,11 +48,14 @@ jobs:
coverage_build:
name: "Coverage build"
runs-on: [self-hosted, Linux, X64, Ubuntu20.04]
timeout-minutes: 60
env:
THREADS: 24
MEMGRAPH_ENTERPRISE_LICENSE: ${{ secrets.MEMGRAPH_ENTERPRISE_LICENSE }}
MEMGRAPH_ORGANIZATION_NAME: ${{ secrets.MEMGRAPH_ORGANIZATION_NAME }}
steps:
- name: Set up repository
uses: actions/checkout@v4
uses: actions/checkout@v3
with:
# Number of commits to fetch. `0` indicates all history for all
# branches and tags. (default: 1)
@ -109,19 +97,22 @@ jobs:
tar -czf code_coverage.tar.gz coverage.json html report.json summary.rmu
- name: Save code coverage
uses: actions/upload-artifact@v4
uses: actions/upload-artifact@v3
with:
name: "Code coverage(Coverage build)-${{ env.OS }}"
name: "Code coverage"
path: tools/github/generated/code_coverage.tar.gz
debug_build:
name: "Debug build"
runs-on: [self-hosted, Linux, X64, Ubuntu20.04]
timeout-minutes: 60
env:
THREADS: 24
MEMGRAPH_ENTERPRISE_LICENSE: ${{ secrets.MEMGRAPH_ENTERPRISE_LICENSE }}
MEMGRAPH_ORGANIZATION_NAME: ${{ secrets.MEMGRAPH_ORGANIZATION_NAME }}
steps:
- name: Set up repository
uses: actions/checkout@v4
uses: actions/checkout@v3
with:
# Number of commits to fetch. `0` indicates all history for all
# branches and tags. (default: 1)
@ -153,6 +144,25 @@ jobs:
run: |
./tests/drivers/run.sh
- name: Run integration tests
run: |
cd tests/integration
for name in *; do
if [ ! -d $name ]; then continue; fi
pushd $name >/dev/null
echo "Running: $name"
if [ -x prepare.sh ]; then
./prepare.sh
fi
if [ -x runner.py ]; then
./runner.py
elif [ -x runner.sh ]; then
./runner.sh
fi
echo
popd >/dev/null
done
- name: Run cppcheck and clang-format
run: |
# Activate toolchain.
@ -163,49 +173,23 @@ jobs:
./cppcheck_and_clang_format diff
- name: Save cppcheck and clang-format errors
uses: actions/upload-artifact@v4
uses: actions/upload-artifact@v3
with:
name: "Code coverage(Debug build)-${{ env.OS }}"
name: "Code coverage"
path: tools/github/cppcheck_and_clang_format.txt
debug_integration_test:
name: "Debug integration tests"
runs-on: [self-hosted, Linux, X64, Ubuntu20.04]
timeout-minutes: 60
steps:
- name: Set up repository
uses: actions/checkout@v4
with:
# Number of commits to fetch. `0` indicates all history for all
# branches and tags. (default: 1)
fetch-depth: 0
- name: Build debug binaries
run: |
# Activate toolchain.
source /opt/toolchain-v4/activate
# Initialize dependencies.
./init
# Build debug binaries.
cd build
cmake ..
make -j$THREADS
- name: Run integration tests
run: |
tests/integration/run.sh
release_build:
name: "Release build"
runs-on: [self-hosted, Linux, X64, Ubuntu20.04]
timeout-minutes: 60
env:
THREADS: 24
MEMGRAPH_ENTERPRISE_LICENSE: ${{ secrets.MEMGRAPH_ENTERPRISE_LICENSE }}
MEMGRAPH_ORGANIZATION_NAME: ${{ secrets.MEMGRAPH_ORGANIZATION_NAME }}
timeout-minutes: 960
steps:
- name: Set up repository
uses: actions/checkout@v4
uses: actions/checkout@v3
with:
# Number of commits to fetch. `0` indicates all history for all
# branches and tags. (default: 1)
@ -221,7 +205,7 @@ jobs:
# Build release binaries.
cd build
cmake -DCMAKE_BUILD_TYPE=$BUILD_TYPE ..
cmake -DCMAKE_BUILD_TYPE=release ..
make -j$THREADS
- name: Create enterprise DEB package
@ -240,60 +224,11 @@ jobs:
cpack -G DEB --config ../CPackConfig.cmake
- name: Save enterprise DEB package
uses: actions/upload-artifact@v4
uses: actions/upload-artifact@v3
with:
name: "Enterprise DEB package-${{ env.OS }}"
name: "Enterprise DEB package"
path: build/output/memgraph*.deb
- name: Run GQL Behave tests
run: |
cd tests
./setup.sh /opt/toolchain-v4/activate
cd gql_behave
./continuous_integration
- name: Save quality assurance status
uses: actions/upload-artifact@v4
with:
name: "GQL Behave Status-${{ env.OS }}"
path: |
tests/gql_behave/gql_behave_status.csv
tests/gql_behave/gql_behave_status.html
- name: Run unit tests
run: |
# Activate toolchain.
source /opt/toolchain-v4/activate
# Run unit tests.
cd build
ctest -R memgraph__unit --output-on-failure
release_benchmark_tests:
name: "Release Benchmark Tests"
runs-on: [self-hosted, Linux, X64, Ubuntu20.04]
timeout-minutes: 60
steps:
- name: Set up repository
uses: actions/checkout@v4
with:
# Number of commits to fetch. `0` indicates all history for all
# branches and tags. (default: 1)
fetch-depth: 0
- name: Build release binaries
run: |
# Activate toolchain.
source /opt/toolchain-v4/activate
# Initialize dependencies.
./init
# Build release binaries
cd build
cmake -DCMAKE_BUILD_TYPE=$BUILD_TYPE ..
make -j$THREADS
- name: Run micro benchmark tests
run: |
# Activate toolchain.
@ -320,79 +255,36 @@ jobs:
--num-database-workers 9 --num-clients-workers 30 \
--no-strict
release_e2e_test:
name: "Release End-to-end Test"
runs-on: [self-hosted, Linux, X64, Ubuntu20.04]
timeout-minutes: 60
- name: Run GQL Behave tests
run: |
cd tests/gql_behave
./continuous_integration
steps:
- name: Set up repository
uses: actions/checkout@v4
- name: Save quality assurance status
uses: actions/upload-artifact@v3
with:
# Number of commits to fetch. `0` indicates all history for all
# branches and tags. (default: 1)
fetch-depth: 0
name: "GQL Behave Status"
path: |
tests/gql_behave/gql_behave_status.csv
tests/gql_behave/gql_behave_status.html
- name: Build release binaries
- name: Run unit tests
run: |
# Activate toolchain.
source /opt/toolchain-v4/activate
# Initialize dependencies.
./init
# Build release binaries
# Run unit tests.
cd build
cmake -DCMAKE_BUILD_TYPE=$BUILD_TYPE ..
make -j$THREADS
- name: Ensure Kafka and Pulsar are up
run: |
cd tests/e2e/streams/kafka
docker-compose up -d
cd ../pulsar
docker-compose up -d
ctest -R memgraph__unit --output-on-failure
- name: Run e2e tests
run: |
# TODO(gitbuda): Setup mgclient and pymgclient properly.
cd tests
./setup.sh /opt/toolchain-v4/activate
source ve3/bin/activate_e2e
./setup.sh
source ve3/bin/activate
cd e2e
./run.sh
- name: Ensure Kafka and Pulsar are down
if: always()
run: |
cd tests/e2e/streams/kafka
docker-compose down
cd ../pulsar
docker-compose down
release_durability_stress_tests:
name: "Release durability and stress tests"
runs-on: [self-hosted, Linux, X64, Ubuntu20.04]
timeout-minutes: 60
steps:
- name: Set up repository
uses: actions/checkout@v4
with:
# Number of commits to fetch. `0` indicates all history for all
# branches and tags. (default: 1)
fetch-depth: 0
- name: Build release binaries
run: |
# Activate toolchain.
source /opt/toolchain-v4/activate
# Initialize dependencies.
./init
# Build release binaries.
cd build
cmake -DCMAKE_BUILD_TYPE=$BUILD_TYPE ..
make -j$THREADS
LD_LIBRARY_PATH=$LD_LIBRARY_PATH:../../libs/mgclient/lib python runner.py --workloads-root-directory .
- name: Run stress test (plain)
run: |
@ -404,6 +296,11 @@ jobs:
cd tests/stress
./continuous_integration --use-ssl
- name: Run stress test (large)
run: |
cd tests/stress
./continuous_integration --large-dataset
- name: Run durability test (plain)
run: |
cd tests/stress

View File

@ -1,68 +0,0 @@
name: Stress test large
concurrency:
group: ${{ github.workflow }}-${{ github.ref_name }}
cancel-in-progress: true
on:
workflow_dispatch:
inputs:
build_type:
type: choice
description: "Memgraph Build type. Default value is Release."
default: 'Release'
options:
- Release
- RelWithDebInfo
push:
tags:
- "v*.*.*-rc*"
- "v*.*-rc*"
schedule:
- cron: "0 22 * * *"
env:
THREADS: 24
MEMGRAPH_ENTERPRISE_LICENSE: ${{ secrets.MEMGRAPH_ENTERPRISE_LICENSE }}
MEMGRAPH_ORGANIZATION_NAME: ${{ secrets.MEMGRAPH_ORGANIZATION_NAME }}
BUILD_TYPE: ${{ github.event.inputs.build_type || 'Release' }}
jobs:
stress_test_large:
name: "Stress test large"
timeout-minutes: 720
strategy:
matrix:
os: [Debian10, Ubuntu20.04]
extra: [BigMemory, Gen8]
exclude:
- os: Debian10
extra: Gen8
- os: Ubuntu20.04
extra: BigMemory
runs-on: [self-hosted, Linux, X64, "${{ matrix.os }}", "${{ matrix.extra }}"]
steps:
- name: Set up repository
uses: actions/checkout@v4
with:
# Number of commits to fetch. `0` indicates all history for all
# branches and tags. (default: 1)
fetch-depth: 0
- name: Build release binaries
run: |
# Activate toolchain.
source /opt/toolchain-v4/activate
# Initialize dependencies.
./init
# Build release binaries.
cd build
cmake -DCMAKE_BUILD_TYPE=$BUILD_TYPE ..
make -j$THREADS
- name: Run stress test (large)
run: |
cd tests/stress
./continuous_integration --large-dataset

View File

@ -1,32 +0,0 @@
name: Upload Package All artifacts to S3
on:
workflow_dispatch:
inputs:
memgraph_version:
description: "Memgraph version to upload as. Format: 'X.Y.Z'"
required: true
run_number:
description: "# of the package_all workflow run to upload artifacts from. Format: '#XYZ'"
required: true
jobs:
upload-to-s3:
runs-on: ubuntu-latest
steps:
- name: Download artifacts
uses: dawidd6/action-download-artifact@v4
with:
workflow: package_all.yaml
workflow_conclusion: success
run_number: "${{ github.event.inputs.run_number }}"
path: build/output/release
- name: Upload to S3
uses: jakejarvis/s3-sync-action@v0.5.1
env:
AWS_S3_BUCKET: "download.memgraph.com"
AWS_ACCESS_KEY_ID: ${{ secrets.S3_AWS_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.S3_AWS_SECRET_ACCESS_KEY }}
AWS_REGION: "eu-west-1"
SOURCE_DIR: "build/output/release"
DEST_DIR: "memgraph/v${{ github.event.inputs.memgraph_version }}/"

26
.gitignore vendored
View File

@ -16,12 +16,15 @@
.ycm_extra_conf.pyc
.temp/
Testing/
/build*/
build
build/
release/examples/build
cmake-build-*
cmake/DownloadProject/
dist/
src/query/frontend/opencypher/generated/
src/query/v2/frontend/opencypher/generated/
src/parser/opencypher/generated
tags
ve/
ve3/
@ -33,6 +36,9 @@ TAGS
*.fas
*.fasl
# LCP generated C++ files
*.lcp.cpp
src/database/distributed/serialization.hpp
src/database/single_node_ha/serialization.hpp
src/distributed/bfs_rpc_messages.hpp
@ -46,11 +52,25 @@ src/distributed/pull_produce_rpc_messages.hpp
src/distributed/storage_gc_rpc_messages.hpp
src/distributed/token_sharing_rpc_messages.hpp
src/distributed/updates_rpc_messages.hpp
src/query/v2/frontend/ast/ast.hpp
src/query/frontend/ast/ast.hpp
src/storage/v3/bindings/ast/ast.hpp
src/query/distributed/frontend/ast/ast_serialization.hpp
src/query/v2/distributed/frontend/ast/ast_serialization.hpp
src/durability/distributed/state_delta.hpp
src/durability/single_node/state_delta.hpp
src/durability/single_node_ha/state_delta.hpp
src/query/frontend/semantic/symbol.hpp
src/query/v2/frontend/semantic/symbol.hpp
src/expr/semantic/symbol.hpp
src/query/distributed/frontend/semantic/symbol_serialization.hpp
src/query/v2/distributed/frontend/semantic/symbol_serialization.hpp
src/query/distributed/plan/ops.hpp
src/query/v2/distributed/plan/ops.hpp
src/query/plan/operator.hpp
src/query/v2/plan/operator.hpp
src/parser/opencypher/generated
src/expr/semantic/symbol.hpp
src/raft/log_entry.hpp
src/raft/raft_rpc_messages.hpp
src/raft/snapshot_metadata.hpp
@ -58,7 +78,3 @@ src/raft/storage_info_rpc_messages.hpp
src/stats/stats_rpc_messages.hpp
src/storage/distributed/rpc/concurrent_id_mapper_rpc_messages.hpp
src/transactions/distributed/engine_rpc_messages.hpp
/tests/manual/js/transaction_timeout/package-lock.json
/tests/manual/js/transaction_timeout/node_modules/
.vscode/
src/query/frontend/opencypher/grammar/.antlr/*

View File

@ -3,7 +3,6 @@ repos:
rev: v4.4.0
hooks:
- id: check-yaml
args: [--allow-multiple-documents]
- id: end-of-file-fixer
- id: trailing-whitespace
- repo: https://github.com/psf/black
@ -20,16 +19,3 @@ repos:
rev: v13.0.0
hooks:
- id: clang-format
# - repo: local
# hooks:
# - id: clang-tidy
# name: clang-tidy
# description: Runs clang-tidy and checks for errors
# entry: python ./tools/pre-commit/clang-tidy.py
# language: python
# files: ^src/
# types: [c++, text]
# fail_fast: true
# require_serial: true
# args: [--compile_commands_path=build]
# pass_filenames: false

View File

@ -1,22 +0,0 @@
# Path to sources
sonar.sources = .
# sonar.exclusions=
sonar.inclusions=src,include,query_modules
# Path to tests
sonar.tests = tests/
# sonar.test.exclusions=
# sonar.test.inclusions=
# Source encoding
# sonar.sourceEncoding=
# Exclusions for copy-paste detection
# sonar.cpd.exclusions=
# Python version (for python projects only)
# sonar.python.version=
# C++ standard version (for C++ projects only)
# If not specified, it defaults to the latest supported standard
# sonar.cfamily.reportingCppStandardOverride=c++98|c++11|c++14|c++17|c++20

View File

@ -1,32 +0,0 @@
# Tantivy ADR
**Author**
Marko Budiselic (github.com/gitbuda)
**Status**
APPROVED
**Date**
January 5, 2024
**Problem**
For some of Memgraph workloads, text search is a required feature. We don't
want to build a new text search engine because that's not Memgraph's core
value.
**Criteria**
- easy integration with our C++ codebase
- ability to operate in-memory and on-disk
- sufficient features (regex, full-text search, fuzzy search, aggregations over
text data)
- production-ready
**Decision**
All known C++ libraries are not production-ready. Recent Rust libraries, in
particular [Tantivy](https://github.com/quickwit-oss/tantivy), seem to provide
much more features, it is production ready. The way how we'll integrate Tantivy
into the current Memgraph codebase is via
[cxx](https://github.com/dtolnay/cxx). **We select Tantivy.**

View File

@ -1,34 +0,0 @@
# NuRaft ADR
**Author**
Marko Budiselic (github.com/gitbuda)
**Status**
PROPOSED
**Date**
January 10, 2024
**Problem**
In order to enhance Memgraph to have High Availability features as requested by
customers, we want to have reliable coordinators backed by RAFT consensus algorithm. Implementing
RAFT to be correct and performant is a very challenging task. Skillful Memgraph
engineers already tried 3 times and failed to deliver in a reasonable timeframe
all three times (approximately 4 person-weeks of engineering work each time).
**Criteria**
- easy integration with our C++ codebase
- heavily tested in production environments
- implementation of performance optimizations on top of the canonical Raft
implementation
**Decision**
There are a few, robust C++ implementations of Raft but as a part of other
projects or bigger libraries. **We select
[NuRaft](https://github.com/eBay/NuRaft)** because it focuses on delivering
Raft without bloatware, and it's used by
[Clickhouse](https://github.com/ClickHouse/ClickHouse) (an comparable peer to
Memgraph, a very well-established product).

View File

@ -1,38 +0,0 @@
# RocksDB ADR
**Author**
Marko Budiselic (github.com/gitbuda)
**Status**
ACCEPTED
**Date**
January 23, 2024
**Problem**
Interacting with data (reads and writes) on disk in a concurrent, safe, and
fast way is a challenging task. Implementing all low-level primitives to
interact with various disk hardware efficiently consumes significant
engineering people. Whenever Memgraph has to store data on disk (or any
other colder than RAM storage system), the problem is how to do that in the
least amount of development time while satisfying all functional requirements
(often performance).
**Criteria**
- working efficiently in a highly concurrent environment
- easy integration with Memgraph's C++ codebase
- providing low-level key-value API
- heavily tested in production environments
- providing abstractions for the storage hardware (even for cloud-based
storages like S3)
**Decision**
There are a few robust key-value stores, but finding one that is
production-ready and compatible with Memgraph's C++ codebase is challenging.
**We select [RocksDB](https://github.com/facebook/rocksdb)** because it
delivers robust API to manage data on disk; it's battle-tested in many
production environments (many databases systems are embedding RocksDB), and
it's the most compatible one.

View File

@ -1,67 +0,0 @@
# Architecture Decision Records
Also known as ADRs. This practice has become widespread in many
high performing engineering teams. It is a technique for communicating
between software engineers. ADRs provide a clear and documented
history of architectural choices, ensuring that everyone on the
team is on the same page. This improves communication and reduces
misunderstandings. The act of recording decisions encourages
thoughtful consideration before making choices. This can lead to
more robust and better-informed architectural decisions.
Links must be created, pointing both to and from the Github Issues
and/or the Notion Program Management "Initiative" database.
ADRs are complimentary to any tech specs that get written while
designing a solution. ADRs are very short and to the point, while
tech specs will include diagrams and can be quite verbose.
## HOWTO
Each ADR will be assigned a monotonically increasing unique numeric
identifier, which will be zero-padded to 3 digits. Each ADR will
be in a single markdown file containing no more than one page of
text, and the filename will start with that unique identifier,
followed by a camel case phrase summarizing the problem. For
example: `001_architecture_decision_records.md` or
`002_big_integration_cap_theorem.md`.
We want to use an ADR when:
1. Significant Impact: This includes choices that affect scalability, performance, or fundamental design principles.
1. Long-Term Ramifications: When a decision is expected to have long-term ramifications or is difficult to reverse.
1. Architectural Principles: ADRs are suitable for documenting decisions related to architectural principles, frameworks, or patterns that shape the system's structure.
1. Controversial Choices: When a decision is likely to be controversial or may require justification in the future.
The most senior engineer on a project will evaluate and decide
whether or not an ADR is needed.
## Do
1. Keep them brief and concise.
1. Explain the trade-offs.
1. Each ADR should be about one AD, not multiple ADs
1. Don't alter existing information in an ADR. Instead, amend the ADR by adding new information, or supersede the ADR by creating a new ADR.
1. Explain your organization's situation and business priorities.
1. Include rationale and considerations based on social and skills makeups of your teams.
1. Include pros and cons that are relevant, and describe them in terms that align with your needs and goals.
1. Explain what follows from making the decision. This can include the effects, outcomes, outputs, follow ups, and more.
## Don't
1. Try to guess what the executive leader wants, and then attempt to please them. Be objective.
1. Try to solve everything all at once. A pretty good solution now is MUCH BETTER than a perfect solution later. Carpe diem!
1. Hide any doubts or unanswered questions.
1. Make it a sales pitch. Everything has upsides and downsides - be authentic and honest about them.
1. Perform merely a superficial investigation. If an ADR doesn't call for some deep thinking, then it probably shouldn't exist.
1. Ignore the long-term costs such as performance, tech debt or hardware and maintenance.
1. Get tunnel vision where creative or surprising approaches are not explored.
# Template - use the format below for each new ADR
1. **Author** - who has written the ADR
1. **Status** - one of: PROPOSED, ACCEPTED, REJECTED, SUPERSEDED-BY or DEPRECATED
1. **Date** - when the status was most recently updated
1. **Problem** - a concise paragraph explaining the context
1. **Criteria** - a list of the two or three metrics by which the solution was evaluated, and their relative weights (importance)
1. **Decision** - what was chosen as the way forward, and what the consequences are of the decision

View File

@ -1,7 +1,6 @@
# MemGraph CMake configuration
cmake_minimum_required(VERSION 3.12)
cmake_policy(SET CMP0076 NEW)
cmake_minimum_required(VERSION 3.8)
# !! IMPORTANT !! run ./project_root/init.sh before cmake command
# to download dependencies
@ -19,12 +18,10 @@ set_directory_properties(PROPERTIES CLEAN_NO_CUSTOM TRUE)
# during the code coverage process
find_program(CCACHE_FOUND ccache)
option(USE_CCACHE "ccache:" ON)
message(STATUS "CCache: ${USE_CCACHE}")
if(CCACHE_FOUND AND USE_CCACHE)
set_property(GLOBAL PROPERTY RULE_LAUNCH_COMPILE ccache)
set_property(GLOBAL PROPERTY RULE_LAUNCH_LINK ccache)
message(STATUS "CCache: Used")
else ()
message(STATUS "CCache: Not used")
endif(CCACHE_FOUND AND USE_CCACHE)
# choose a compiler
@ -40,14 +37,7 @@ endif()
# -----------------------------------------------------------------------------
project(memgraph LANGUAGES C CXX)
#TODO: upgrade to cmake 3.24 + CheckIPOSupported
#cmake_policy(SET CMP0138 NEW)
#include(CheckIPOSupported)
#check_ipo_supported()
#set(CMAKE_INTERPROCEDURAL_OPTIMIZATION_Release TRUE)
#set(CMAKE_INTERPROCEDURAL_OPTIMIZATION_RelWithDebInfo TRUE)
project(memgraph)
# Install licenses.
install(DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR}/licenses/
@ -153,9 +143,7 @@ endif()
# files used can be seen here:
# https://git-scm.com/book/en/v2/Git-Internals-Git-References
set(git_directory "${CMAKE_SOURCE_DIR}/.git")
# Check for directory because if the repo is cloned as a git submodule, .git is
# a file and below code doesn't work.
if (IS_DIRECTORY "${git_directory}")
if (EXISTS "${git_directory}")
set_property(DIRECTORY APPEND PROPERTY
CMAKE_CONFIGURE_DEPENDS "${git_directory}/HEAD")
file(STRINGS "${git_directory}/HEAD" git_head_data)
@ -170,7 +158,7 @@ endif()
# setup CMake module path, defines path for include() and find_package()
# https://cmake.org/cmake/help/latest/variable/CMAKE_MODULE_PATH.html
list(APPEND CMAKE_MODULE_PATH "${CMAKE_CURRENT_SOURCE_DIR}/cmake")
set(CMAKE_MODULE_PATH ${CMAKE_MODULE_PATH} ${PROJECT_SOURCE_DIR}/cmake)
# custom function definitions
include(functions)
# -----------------------------------------------------------------------------
@ -194,9 +182,10 @@ set(CMAKE_CXX_STANDARD_REQUIRED ON)
# c99-designator is disabled because of required mixture of designated and
# non-designated initializers in Python Query Module code (`py_module.cpp`).
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Wall \
-Werror=switch -Werror=switch-bool -Werror=return-type \
-Werror=switch -Werror=switch-bool -Werror=implicit-fallthrough \
-Werror=return-type \
-Werror=return-stack-address \
-Wno-c99-designator -Wmissing-field-initializers \
-Wno-c99-designator \
-DBOOST_ASIO_USE_TS_EXECUTOR_AS_DEFAULT")
# Don't omit frame pointer in RelWithDebInfo, for additional callchain debug.
@ -211,13 +200,8 @@ set(CMAKE_CXX_FLAGS_RELWITHDEBINFO
# ** Static linking is allowed only for executables! **
set(CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} -static-libgcc -static-libstdc++")
# Use lld linker to speedup build and use less memory.
add_link_options(-fuse-ld=lld)
# NOTE: Moving to latest Clang (probably starting from 15), lld stopped to work
# without explicit link_directories call.
string(REPLACE ":" " " LD_LIBS $ENV{LD_LIBRARY_PATH})
separate_arguments(LD_LIBS)
link_directories(${LD_LIBS})
# Use gold linker to speedup build
set(CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} -fuse-ld=gold")
# release flags
set(CMAKE_CXX_FLAGS_RELEASE "-O2 -DNDEBUG")
@ -240,6 +224,7 @@ else()
endif()
# -----------------------------------------------------------------------------
# default build type is debug
if (NOT CMAKE_BUILD_TYPE)
set(CMAKE_BUILD_TYPE "Debug")
@ -247,17 +232,7 @@ endif()
message(STATUS "CMake build type: ${CMAKE_BUILD_TYPE}")
# -----------------------------------------------------------------------------
add_definitions( -DCMAKE_BUILD_TYPE_NAME="${CMAKE_BUILD_TYPE}")
if (NOT MG_ARCH)
set(MG_ARCH_DESCR "Host architecture to build Memgraph on. Supported values are x86_64, ARM64.")
if (${CMAKE_HOST_SYSTEM_PROCESSOR} MATCHES "aarch64")
set(MG_ARCH "ARM64" CACHE STRING ${MG_ARCH_DESCR})
else()
set(MG_ARCH "x86_64" CACHE STRING ${MG_ARCH_DESCR})
endif()
endif()
message(STATUS "MG_ARCH: ${MG_ARCH}")
set(MG_ARCH "x86_64" CACHE STRING "Host architecture to build Memgraph on. Supported values are x86_64 (default), ARM64.")
# setup external dependencies -------------------------------------------------
@ -276,6 +251,7 @@ endif()
set(libs_dir ${CMAKE_SOURCE_DIR}/libs)
add_subdirectory(libs EXCLUDE_FROM_ALL)
# Optional subproject configuration -------------------------------------------
option(TEST_COVERAGE "Generate coverage reports from running memgraph" OFF)
option(TOOLS "Build tools binaries" ON)
option(QUERY_MODULES "Build query modules containing custom procedures" ON)
@ -283,8 +259,6 @@ option(ASAN "Build with Address Sanitizer. To get a reasonable performance optio
option(TSAN "Build with Thread Sanitizer. To get a reasonable performance option should be used only in Release or RelWithDebInfo build " OFF)
option(UBSAN "Build with Undefined Behaviour Sanitizer" OFF)
# Build feature flags
if (TEST_COVERAGE)
string(TOLOWER ${CMAKE_BUILD_TYPE} lower_build_type)
if (NOT lower_build_type STREQUAL "debug")
@ -298,25 +272,12 @@ if (MG_ENTERPRISE)
add_definitions(-DMG_ENTERPRISE)
endif()
option(ENABLE_JEMALLOC "Use jemalloc" ON)
option(MG_MEMORY_PROFILE "If build should be setup for memory profiling" OFF)
if (MG_MEMORY_PROFILE AND ENABLE_JEMALLOC)
message(STATUS "Jemalloc has been disabled because MG_MEMORY_PROFILE is enabled")
set(ENABLE_JEMALLOC OFF)
endif ()
if (MG_MEMORY_PROFILE AND ASAN)
message(STATUS "ASAN has been disabled because MG_MEMORY_PROFILE is enabled")
set(ASAN OFF)
endif ()
if (MG_MEMORY_PROFILE)
add_compile_definitions(MG_MEMORY_PROFILE)
endif ()
set(ENABLE_JEMALLOC ON)
if (ASAN)
message(WARNING "Disabling jemalloc as it doesn't work well with ASAN")
set(ENABLE_JEMALLOC OFF)
# Enable Address sanitizer and get nicer stack traces in error messages.
# Enable Addres sanitizer and get nicer stack traces in error messages.
# NOTE: AddressSanitizer uses llvm-symbolizer binary from the Clang
# distribution to symbolize the stack traces (note that ideally the
# llvm-symbolizer version must match the version of ASan runtime library).
@ -337,8 +298,6 @@ if (ASAN)
endif()
if (TSAN)
message(WARNING "Disabling jemalloc as it doesn't work well with ASAN")
set(ENABLE_JEMALLOC OFF)
# ThreadSanitizer generally requires all code to be compiled with -fsanitize=thread.
# If some code (e.g. dynamic libraries) is not compiled with the flag, it can
# lead to false positive race reports, false negative race reports and/or
@ -354,7 +313,7 @@ if (TSAN)
# By default ThreadSanitizer uses addr2line utility to symbolize reports.
# llvm-symbolizer is faster, consumes less memory and produces much better
# reports. To use it set runtime flag:
# TSAN_OPTIONS="extern-symbolizer-path=~/llvm-symbolizer"
# TSAN_OPTIONS="extern-symbolizer-path=~/llvm-symbolizer"
# For more runtime flags see: https://github.com/google/sanitizers/wiki/ThreadSanitizerFlags
endif()

111
README.md
View File

@ -1,9 +1,13 @@
<p align="center">
<img src="https://public-assets.memgraph.com/github-readme-images/github-memgraph-repo-banner.png">
<img width="400px" src="https://uploads-ssl.webflow.com/5e7ceb09657a69bdab054b3a/5e7ceb09657a6937ab054bba_Black_Original%20_Logo.png">
</p>
---
<p align="center">
Build modern, graph-based applications on top of your streaming data in minutes.
</p>
<p align="center">
<a href="https://github.com/memgraph/memgraph/blob/master/licenses/APL.txt">
<img src="https://img.shields.io/badge/license-APL-green" alt="license" title="license"/>
@ -18,7 +22,7 @@
<p align="center">
<a href="https://github.com/memgraph/memgraph">
<img src="https://img.shields.io/github/actions/workflow/status/memgraph/memgraph/release_debian10.yaml?branch=master&label=build%20and%20test&logo=github"/>
<img src="https://img.shields.io/github/workflow/status/memgraph/memgraph/Release%20Ubuntu%2020.04/master" alt="build" title="build"/>
</a>
<a href="https://memgraph.com/docs/" alt="Documentation">
<img src="https://img.shields.io/badge/documentation-Memgraph-orange" />
@ -33,10 +37,9 @@
## :clipboard: Description
Memgraph is an open source graph database built for real-time streaming and
compatible with Neo4j. Whether you're a developer or a data scientist with
interconnected data, Memgraph will get you the immediate actionable insights
fast.
Memgraph is a streaming graph application platform that helps you wrangle your
streaming data, build sophisticated models that you can query in real-time, and
develop graph applications.
Memgraph directly connects to your streaming infrastructure. You can ingest data
from sources like Kafka, SQL, or plain CSV files. Memgraph provides a standard
@ -48,20 +51,8 @@ natural and effective way to model many real-world problems without relying on
complex SQL schemas.
Memgraph is implemented in C/C++ and leverages an in-memory first architecture
to ensure that youre getting the [best possible
performance](http://memgraph.com/benchgraph) consistently and without surprises.
Its also ACID-compliant and highly available.
## :zap: Features
- Run Python, Rust, and C/C++ code natively, check out the
[MAGE](https://github.com/memgraph/mage) graph algorithm library
- Native support for machine learning
- Streaming support
- Replication
- Authentication and authorization
- ACID compliance
to ensure that youre getting the best possible performance consistently and
without surprises. Its also ACID-compliant and highly available.
## :video_game: Memgraph Playground
@ -85,49 +76,28 @@ your browser.
### macOS
[![macOS](https://img.shields.io/badge/macOS-Docker-000000?style=for-the-badge&logo=macos&logoColor=F0F0F0)](https://memgraph.com/docs/memgraph/install-memgraph-on-macos-docker)
[![macOS](https://img.shields.io/badge/lima-AACF41?style=for-the-badge&logo=macos&logoColor=F0F0F0)](https://memgraph.com/docs/memgraph/install-memgraph-on-ubuntu)
### Linux
[![Linux](https://img.shields.io/badge/Linux-Docker-FCC624?style=for-the-badge&logo=linux&logoColor=black)](https://memgraph.com/docs/memgraph/install-memgraph-on-linux-docker)
[![Debian](https://img.shields.io/badge/Debian-D70A53?style=for-the-badge&logo=debian&logoColor=white)](https://memgraph.com/docs/memgraph/install-memgraph-on-debian)
[![Ubuntu](https://img.shields.io/badge/Ubuntu-E95420?style=for-the-badge&logo=ubuntu&logoColor=white)](https://memgraph.com/docs/memgraph/install-memgraph-on-ubuntu)
[![Cent OS](https://img.shields.io/badge/cent%20os-002260?style=for-the-badge&logo=centos&logoColor=F0F0F0)](https://memgraph.com/docs/memgraph/install-memgraph-from-rpm)
[![Fedora](https://img.shields.io/badge/fedora-0B57A4?style=for-the-badge&logo=fedora&logoColor=F0F0F0)](https://memgraph.com/docs/memgraph/install-memgraph-from-rpm)
[![RedHat](https://img.shields.io/badge/redhat-EE0000?style=for-the-badge&logo=redhat&logoColor=F0F0F0)](https://memgraph.com/docs/memgraph/install-memgraph-from-rpm)
[![Cent
OS](https://img.shields.io/badge/cent%20os-002260?style=for-the-badge&logo=centos&logoColor=F0F0F0)](https://memgraph.com/docs/memgraph/install-memgraph-from-rpm)
You can find the binaries and Docker images on the [Download
Hub](https://memgraph.com/download) and the installation instructions in the
[official documentation](https://memgraph.com/docs/memgraph/installation).
## :zap: Features
## :cloud: Memgraph Cloud
Check out [Memgraph Cloud](https://memgraph.com/docs/memgraph-cloud) - a cloud service fully managed on AWS and available in 6 geographic regions around the world. Memgraph Cloud allows you to create projects with Enterprise instances of MemgraphDB from your browser.
<p align="left">
<a href="https://memgraph.com/docs/memgraph-cloud">
<img width="450px" alt="Memgraph Cloud" src="https://public-assets.memgraph.com/memgraph-gifs%2Fcloud.gif">
</a>
</p>
## :link: Connect to Memgraph
[Connect to the database](https://memgraph.com/docs/memgraph/connect-to-memgraph) using Memgraph Lab, mgconsole, various drivers (Python, C/C++ and others) and WebSocket.
### :microscope: Memgraph Lab
Visualize graphs and play with queries to understand your data. [Memgraph Lab](https://memgraph.com/docs/memgraph-lab) is a user interface that helps you explore and manipulate the data stored in Memgraph. Visualize graphs, execute ad hoc queries, and optimize their performance.
<p align="left">
<a href="https://memgraph.com/docs/memgraph-lab">
<img width="450px" alt="Memgraph Cloud" src="https://public-assets.memgraph.com/memgraph-gifs%2Flab.gif">
</a>
</p>
## :file_folder: Import data
[Import data](https://memgraph.com/docs/memgraph/import-data) into Memgraph using Kafka, RedPanda or Pulsar streams, CSV and JSON files, or Cypher commands.
- Run Python, Rust, and C/C++ code natively, check out the
[MAGE](https://github.com/memgraph/mage) graph algorithm library
- Native support for machine learning
- Streaming support
- Replication
- Authentication and authorization
- ACID compliance
## :bookmark_tabs: Documentation
@ -141,20 +111,29 @@ guide](https://memgraph.com/docs/memgraph/reference-guide/configuration).
## :trophy: Contributing
Welcome to the heart of Memgraph development! We're on a mission to supercharge Memgraph, making it faster, more user-friendly, and even more powerful. We owe a big thanks to our fantastic community of contributors who help us fix bugs and bring incredible improvements to life. If you're passionate about databases and open source, here's your chance to make a difference!
### Explore Memgraph Internals
Interested in the nuts and bolts of Memgraph? Our [internals documentation](https://memgraph.notion.site/Memgraph-Internals-12b69132d67a417898972927d6870bd2) is where you can uncover the inner workings of Memgraph's architecture, learn how to build the project from scratch, and discover the secrets of effective contributions. Dive deep into the database!
### Dive into the Contributing Guide
Ready to jump into the action? Explore our [contributing guide](CONTRIBUTING.md) to get the inside scoop on how we develop Memgraph. It's your roadmap for suggesting bug fixes and enhancements. Contribute your skills and ideas!
The main purpose of this repository is to continue evolving Memgraph, making it
faster and easier to use. Development of Memgraph happens in the open on GitHub,
and we are grateful to the community for contributing bug fixes and
improvements. Read below to learn how you can take part in improving Memgraph.
### Code of Conduct
Our commitment to a respectful and professional community is unwavering. Every participant in Memgraph is expected to adhere to a stringent Code of Conduct. Please carefully review [the complete text](CODE_OF_CONDUCT.md) to gain a comprehensive understanding of the behaviors that are both expected and explicitly prohibited.
Memgraph has adopted a Code of Conduct that we expect project participants to
adhere to. Please read [the full text](CODE_OF_CONDUCT.md) so that you can
understand what actions will and will not be tolerated.
We maintain a zero-tolerance policy towards any violations. Our shared commitment to this Code of Conduct ensures that Memgraph remains a place where integrity and excellence are paramount.
### Contributing Guide
Read our [contributing guide](CONTRIBUTING.md) to learn about our development
process and how to propose bug fixes and improvements.
### Internals
Read our
[internal](https://memgraph.notion.site/Memgraph-Internals-12b69132d67a417898972927d6870bd2)
docs to learn more about Memgraph's architecture, how to build the project from
source and how to start contributing. All information related to the database,
can be found in the aforementioned docs.
### :scroll: License
@ -162,16 +141,8 @@ Memgraph Community is available under the [BSL
license](./licenses/BSL.txt).</br> Memgraph Enterprise is available under the
[MEL license](./licenses/MEL.txt).
## :busts_in_silhouette: Community
- :purple_heart: [**Discord**](https://discord.gg/memgraph)
- :ocean: [**Stack Overflow**](https://stackoverflow.com/questions/tagged/memgraphdb)
- :bird: [**Twitter**](https://twitter.com/memgraphdb)
- :movie_camera:
[**YouTube**](https://www.youtube.com/channel/UCZ3HOJvHGxtQ_JHxOselBYg)
<p align="center">
<a href="#">
<img src="https://img.shields.io/badge/⬆️ back_to_top_⬆-white" alt="Back to top" title="Back to top"/>
<img src="https://img.shields.io/badge/⬆back_to_top_⬆-white" alt="Back to top" title="Back to top"/>
</a>
</p>

55
cmake/FindJemalloc.cmake Normal file
View File

@ -0,0 +1,55 @@
# Try to find jemalloc library
#
# Use this module as:
# find_package(Jemalloc)
#
# or:
# find_package(Jemalloc REQUIRED)
#
# This will define the following variables:
#
# Jemalloc_FOUND True if the system has the jemalloc library.
# Jemalloc_INCLUDE_DIRS Include directories needed to use jemalloc.
# Jemalloc_LIBRARIES Libraries needed to link to jemalloc.
#
# The following cache variables may also be set:
#
# Jemalloc_INCLUDE_DIR The directory containing jemalloc/jemalloc.h.
# Jemalloc_LIBRARY The path to the jemalloc static library.
find_path(Jemalloc_INCLUDE_DIR NAMES jemalloc/jemalloc.h PATH_SUFFIXES include)
find_library(Jemalloc_LIBRARY NAMES libjemalloc.a PATH_SUFFIXES lib)
include(FindPackageHandleStandardArgs)
find_package_handle_standard_args(Jemalloc
FOUND_VAR Jemalloc_FOUND
REQUIRED_VARS
Jemalloc_LIBRARY
Jemalloc_INCLUDE_DIR
)
if(Jemalloc_FOUND)
set(Jemalloc_LIBRARIES ${Jemalloc_LIBRARY})
set(Jemalloc_INCLUDE_DIRS ${Jemalloc_INCLUDE_DIR})
else()
if(Jemalloc_FIND_REQUIRED)
message(FATAL_ERROR "Cannot find jemalloc!")
else()
message(WARNING "jemalloc is not found!")
endif()
endif()
if(Jemalloc_FOUND AND NOT TARGET Jemalloc::Jemalloc)
add_library(Jemalloc::Jemalloc UNKNOWN IMPORTED)
set_target_properties(Jemalloc::Jemalloc
PROPERTIES
IMPORTED_LOCATION "${Jemalloc_LIBRARY}"
INTERFACE_INCLUDE_DIRECTORIES "${Jemalloc_INCLUDE_DIR}"
)
endif()
mark_as_advanced(
Jemalloc_INCLUDE_DIR
Jemalloc_LIBRARY
)

View File

@ -1,67 +0,0 @@
# Try to find jemalloc library
#
# Use this module as:
# find_package(jemalloc)
#
# or:
# find_package(jemalloc REQUIRED)
#
# This will define the following variables:
#
# JEMALLOC_FOUND True if the system has the jemalloc library.
# Jemalloc_INCLUDE_DIRS Include directories needed to use jemalloc.
# Jemalloc_LIBRARIES Libraries needed to link to jemalloc.
#
# The following cache variables may also be set:
#
# Jemalloc_INCLUDE_DIR The directory containing jemalloc/jemalloc.h.
# Jemalloc_LIBRARY The path to the jemalloc static library.
include(FindPackageHandleStandardArgs)
find_package_handle_standard_args(jemalloc
FOUND_VAR JEMALLOC_FOUND
REQUIRED_VARS
JEMALLOC_LIBRARY
JEMALLOC_INCLUDE_DIR
)
if(JEMALLOC_INCLUDE_DIR)
message(STATUS "Found jemalloc include dir: ${JEMALLOC_INCLUDE_DIR}")
else()
message(WARNING "jemalloc not found!")
endif()
if(JEMALLOC_LIBRARY)
message(STATUS "Found jemalloc library: ${JEMALLOC_LIBRARY}")
else()
message(WARNING "jemalloc library not found!")
endif()
if(JEMALLOC_FOUND)
set(Jemalloc_LIBRARIES ${JEMALLOC_LIBRARY})
set(Jemalloc_INCLUDE_DIRS ${JEMALLOC_INCLUDE_DIR})
else()
if(Jemalloc_FIND_REQUIRED)
message(FATAL_ERROR "Cannot find jemalloc!")
else()
message(WARNING "jemalloc is not found!")
endif()
endif()
if(JEMALLOC_FOUND AND NOT TARGET Jemalloc::Jemalloc)
message(STATUS "JEMALLOC NOT TARGET")
add_library(Jemalloc::Jemalloc UNKNOWN IMPORTED)
set_target_properties(Jemalloc::Jemalloc
PROPERTIES
IMPORTED_LOCATION "${JEMALLOC_LIBRARY}"
INTERFACE_INCLUDE_DIRECTORIES "${JEMALLOC_INCLUDE_DIR}"
)
endif()
mark_as_advanced(
JEMALLOC_INCLUDE_DIR
JEMALLOC_LIBRARY
)

View File

@ -83,13 +83,9 @@ modifications:
value: "true"
override: true
- name: "query_modules_directory"
value: "/usr/lib/memgraph/query_modules"
override: true
- name: "auth_module_executable"
value: "/usr/lib/memgraph/auth_module/example.py"
override: false
# - name: "query_modules_directory"
# value: "/usr/lib/memgraph/query_modules"
# override: true
- name: "memory_limit"
value: "0"
@ -99,30 +95,10 @@ modifications:
value: "SNAPSHOT_ISOLATION"
override: true
- name: "storage_mode"
value: "IN_MEMORY_TRANSACTIONAL"
override: true
- name: "allow_load_csv"
value: "true"
override: false
- name: "storage_parallel_index_recovery"
value: "false"
override: true
- name: "storage_parallel_schema_recovery"
value: "false"
override: true
- name: "storage_enable_schema_metadata"
value: "false"
override: true
- name: "query_callable_mappings_path"
value: "/etc/memgraph/apoc_compatibility_mappings.json"
override: true
undocumented:
- "flag_file"
- "also_log_to_stderr"

View File

@ -5,10 +5,12 @@ import os
import subprocess
import sys
import textwrap
import xml.etree.ElementTree as ET
import yaml
SCRIPT_DIR = os.path.dirname(os.path.realpath(__file__))
CONFIG_FILE = os.path.join(SCRIPT_DIR, "flags.yaml")
WIDTH = 80
@ -16,21 +18,14 @@ WIDTH = 80
def wrap_text(s, initial_indent="# "):
return "\n#\n".join(
map(lambda x: textwrap.fill(x, WIDTH, initial_indent=initial_indent, subsequent_indent="# "), s.split("\n"))
)
map(lambda x: textwrap.fill(x, WIDTH, initial_indent=initial_indent,
subsequent_indent="# "), s.split("\n")))
def extract_flags(binary_path):
ret = {}
data = subprocess.run([binary_path, "--help-xml"], stdout=subprocess.PIPE).stdout.decode("utf-8")
# If something is printed out before the help output, it will break the the
# XML parsing -> filter out if something is not XML line because something
# can be logged before gflags output (e.g. during the global objects init).
# This gets called during memgraph build phase to generate default config
# file later installed under /etc/memgraph/memgraph.conf
# NOTE: Don't use \n in the gflags description strings.
# NOTE: Check here if gflags version changes because of the XML format.
data = "\n".join([line for line in data.split("\n") if line.startswith("<")])
data = subprocess.run([binary_path, "--help-xml"],
stdout=subprocess.PIPE).stdout.decode("utf-8")
root = ET.fromstring(data)
for child in root:
if child.tag == "usage" and child.text.lower().count("warning"):
@ -51,7 +46,8 @@ def apply_config_to_flags(config, flags):
for modification in config["modifications"]:
name = modification["name"]
if name not in flags:
print("WARNING: Flag '" + name + "' missing from binary!", file=sys.stderr)
print("WARNING: Flag '" + name + "' missing from binary!",
file=sys.stderr)
continue
flags[name]["default"] = modification["value"]
flags[name]["override"] = modification["override"]
@ -79,9 +75,8 @@ def extract_sections(flags):
else:
sections.append((current_section, current_flags))
sections.append(("other", other))
assert set(sum(map(lambda x: x[1], sections), [])) == set(
flags.keys()
), "The section extraction algorithm lost some flags!"
assert set(sum(map(lambda x: x[1], sections), [])) == set(flags.keys()), \
"The section extraction algorithm lost some flags!"
return sections
@ -94,7 +89,8 @@ def generate_config_file(sections, flags):
helpstr = flag["meaning"] + " [" + flag["type"] + "]"
ret += wrap_text(helpstr) + "\n"
prefix = "# " if not flag["override"] else ""
ret += prefix + "--" + flag["name"].replace("_", "-") + "=" + flag["default"] + "\n\n"
ret += prefix + "--" + flag["name"].replace("_", "-") + \
"=" + flag["default"] + "\n\n"
ret += "\n"
ret += wrap_text(config["footer"])
return ret.strip() + "\n"
@ -102,9 +98,13 @@ def generate_config_file(sections, flags):
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument("memgraph_binary", help="path to Memgraph binary")
parser.add_argument("output_file", help="path where to store the generated Memgraph " "configuration file")
parser.add_argument("--config-file", default=CONFIG_FILE, help="path to generator configuration file")
parser.add_argument("memgraph_binary",
help="path to Memgraph binary")
parser.add_argument("output_file",
help="path where to store the generated Memgraph "
"configuration file")
parser.add_argument("--config-file", default=CONFIG_FILE,
help="path to generator configuration file")
args = parser.parse_args()
flags = extract_flags(args.memgraph_binary)

View File

@ -1,26 +0,0 @@
{
"dbms.components": "mgps.components",
"apoc.util.validate": "mgps.validate",
"db.schema.nodeTypeProperties": "schema.NodeTypeOroperties",
"db.schema.relTypeProperties": "schema.RelTypeProperties",
"apoc.coll.contains": "collections.contains",
"apoc.coll.partition": "collections.partition",
"apoc.coll.toSet": "collections.to_set",
"apoc.coll.unionAll": "collections.unionAll",
"apoc.coll.removeAll": "collections.remove_all",
"apoc.coll.union": "collections.union",
"apoc.coll.sum": "collections.sum",
"apoc.coll.pairs": "collections.pairs",
"apoc.map.fromLists": "map.from_lists",
"apoc.map.removeKeys": "map.remove_keys",
"apoc.map.merge": "map.merge",
"apoc.create.nodes": "create.nodes",
"apoc.create.removeProperties": "create.remove_properties",
"apoc.create.node": "create.node",
"apoc.create.removeLabel": "create.remove_label",
"apoc.refactor.invert": "refactor.invert",
"apoc.refactor.cloneNode": "refactor.clone_node",
"apoc.refactor.cloneSubgraph": "refactor.clone_subgraph",
"apoc.refactor.cloneSubgraphFromPath": "refactor.clone_subgraph_from_path",
"apoc.label.exists": "label.exists"
}

View File

@ -1,230 +0,0 @@
# CSV Import Tool Documentation
CSV is a universal and very versatile data format used to store large quantities
of data. Each Memgraph database instance has a CSV import tool installed called
`mg_import_csv`. The CSV import tool should be used for initial bulk ingestion
of data into the database. Upon ingestion, the CSV importer creates a snapshot
that will be used by the database to recover its state on its next startup.
If you are already familiar with the Neo4j bulk import tool, then using the
`mg_import_csv` tool should be easy. The CSV import tool is fully compatible
with the [Neo4j CSV
format](https://neo4j.com/docs/operations-manual/current/tools/import/). If you
already have a pipeline set-up for Neo4j, you should only replace `neo4j-admin
import` with `mg_import_csv`.
## CSV File Format
Each row of a CSV file represents a single entry that should be imported into
the database. Both nodes and relationships can be imported into the database
using CSV files.
Each set of CSV files must have a header that describes the data that is stored
in the CSV files. Each field in the CSV header is in the format
`<name>[:<type>]` which identifies the name that should be used for that column
and the type that should be used for that column. The type is optional and
defaults to `string` (see the following chapter).
Each CSV field must be divided using the delimiter and each CSV field can either
be quoted or unquoted. When the field is quoted, the first and last character in
the field *must* be the quote character. If the field isn't quoted, and a quote
character appears in it, it is treated as a regular character. If a quote
character appears inside a quoted string then the quote character must be
doubled in order to escape it. Line feeds and carriage returns are ignored in
the CSV file, also, the file can't contain a NULL character.
## Properties
Both nodes and relationships can have properties added to them. When importing
properties, the CSV importer uses the name specified in the header of the
corresponding CSV column for the name of the property. A property is designated
by specifying one of the following types in the header:
- `integer`, `int`, `long`, `byte`, `short`: creates an integer property
- `float`, `double`: creates a float property
- `boolean`, `bool`: creates a boolean property
- `string`, `char`: creates a string property
When importing a boolean value, the CSV field should contain exactly the text
`true` to import a `True` boolean value. All other text values are treated as a
boolean value `False`.
If you want to import an array of values, you can do so by appending `[]` to any
of the above types. The values of the array are then determined by splitting
the raw CSV value using the array delimiter character.
Assuming that the array delimiter is `;`, the following example:
```plaintext
first_name,last_name:string,number:integer,aliases:string[]
John,Doe,1,Johnny;Jo;J-man
Melissa,Doe,2,Mel
```
Will yield these results:
```plaintext
CREATE ({first_name: "John", last_name: "Doe", number: 1, aliases: ["Johnny", "Jo", "J-man"]});
CREATE ({first_name: "Melissa", last_name: "Doe", number: 2, aliases: ["Mel"]});
```
### Nodes
When importing nodes, several more types can be specified in the header of the
CSV file (along with all property types):
- `ID`: id of the node that should be used as the node ID when importing
relationships
- `LABEL`: designates that the field contains additional labels for the node
- `IGNORE`: designates that the field should be ignored
The `ID` field type sets the internal ID that will be used for the node when
creating relationships. It is optional and nodes that don't have an ID value
specified will be imported, but can't be connected to any relationships. If you
want to save the ID value as a property in the database, just specify a name for
the ID (`user_id:ID`). If you just want to use the ID during the import, leave
out the name of the field (`:ID`). The `ID` field also supports creating
separate ID spaces. The ID space is specified with the ID space name appended
to the `ID` type in parentheses (`ID(user)`). That allows you to have the same
IDs (by value) for multiple different node files (for example, numbers from 1 to
N). The IDs in each ID space will be treated as an independent set of IDs that
don't interfere with IDs in another ID space.
The `LABEL` field type adds additional labels to the node. The value is treated
as an array type so that multiple additional labels can be specified for each
node. The value is split using the array delimiter (`--array-delimiter` flag).
### Relationships
In order to be able to import relationships, you must import the nodes in the
same invocation of `mg_import_csv` that is used to import the relationships.
When importing relationships, several more types can be specified in the header
of the CSV file (along with all property types):
- `START_ID`: id of the start node that should be connected with the
relationship
- `END_ID`: id of the end node that should be connected with the relationship
- `TYPE`: designates the type of the relationship
- `IGNORE`: designates that the field should be ignored
The `START_ID` field type sets the start node that should be connected with the
relationship to the end node. The field *must* be specified and the node ID
must be one of the node IDs that were specified in the node CSV files. The name
of this field is ignored. If the node ID is in an ID space, you can specify the
ID space for the in the same way as for the node ID (`START_ID(user)`).
The `END_ID` field type sets the end node that should be connected with the
relationship to the start node. The field *must* be specified and the node ID
must be one of the node IDs that were specified in the node CSV files. The name
of this field is ignored. If the node ID is in an ID space, you can specify the
ID space for the in the same way as for the node ID (`END_ID(user)`).
The `TYPE` field type sets the type of the relationship. Each relationship
*must* have a relationship type, but it doesn't necessarily need to be specified
in the CSV file, it can also be set externally for the whole CSV file. The name
of this field is ignored.
## CSV Importer Flags
The importer has many command line options that allow you to customize the way
the importer loads your data.
The two main flags that are used to specify the input CSV files are `--nodes`
and `--relationships`. Basic description of these flags is provided in the table
and more detailed explainion can be found further down bellow.
| Flag | Description |
|-----------------------| -------------- |
|`--nodes` | Used to specify CSV files that contain the nodes to the importer. |
|`--relationships` | Used to specify CSV files that contain the relationships to the importer.|
|`--delimiter` | Sets the delimiter that should be used when splitting the CSV fields (default `,`)|
|`--quote` | Sets the quote character that should be used to quote a CSV field (default `"`)|
|`--array-delimiter` | Sets the delimiter that should be used when splitting array values (default `;`)|
|`--id-type` | Specifies which data type should be used to store the supplied <br /> node IDs when storing them as properties (if the field name is supplied). <br /> The supported values are either `STRING` or `INTEGER`. (default `STRING`)|
|`--ignore-empty-strings` | Instructs the importer to treat all empty strings as `Null` values <br /> instead of an empty string value (default `false`)|
|`--ignore-extra-columns` | Instructs the importer to ignore all columns (instead of raising an error) <br /> that aren't specified after the last specified column in the CSV header. (default `false`) |
| `--skip-bad-relationships`| Instructs the importer to ignore all relationships (instead of raising an error) <br /> that refer to nodes that don't exist in the node files. (default `false`) |
|`--skip-duplicate-nodes` | Instructs the importer to ignore all duplicate nodes (instead of raising an error). <br /> Duplicate nodes are nodes that have an ID that is the same as another node that was already imported. (default `false`) |
| `--trim-strings`| Instructs the importer to trim all of the loaded CSV field values before processing them further. <br /> Trimming the fields removes all leading and trailing whitespace from them. (default `false`) |
The `--nodes` and `--relationships` flags are used to specify CSV files that
contain the nodes and relationships to the importer. Multiple files can be
specified in each supplied `--nodes` or `--relationships` flag. Files that are
supplied in one `--nodes` or `--relationships` flag are treated by the CSV
parser as one big CSV file. Only the first line of the first file is parsed for
the CSV header, all other files (and rows) are treated as data. This is useful
when you have a very large CSV file and don't want to edit its first line just
to add a CSV header. Instead, you can specify the header in a separate file
(e.g. `users_header.csv` or `friendships_header.csv`) and have the data intact
in the large file (e.g. `users.csv` or `friendships.csv`). Also, you can supply
additional labels for each set of node files.
The format of `--nodes` flag is:
`[<label>[:<label>]...=]<file>[,<file>][,<file>]...`. Take note that only the
first `<file>` part is mandatory, all other parts of the flag value are
optional. Multiple `--nodes` flags can be supplied to describe multiple sets of
different node files. For the importer to work, at least one `--nodes` flag
*must* be supplied.
The format of `--relationships` flag is: `[<type>=]<file>[,<file>][,<file>]...`.
Take note that only the first `<file>` part is mandatory, all other parts of the
flag value are optional. Multiple `--relationships` flags can be supplied to
describe multiple sets of different relationship files. The `--relationships`
flag isn't mandatory.
## CSV Parser Logic
The CSV parser uses the same logic as the standard Python CSV parser. The data
is parsed in the same way as the following snippet:
```python
import csv
for row in csv.reader(stream, strict=True):
# process 'row'
```
Python uses 'excel' as the default dialect when parsing CSV files and the
default settings for the CSV parser are:
- delimiter: `','`
- doublequote: `True`
- escapechar: `None`
- lineterminator: `'\r\n'`
- quotechar: `'"'`
- skipinitialspace: `False`
The above snippet can be expanded to:
```python
import csv
for row in csv.reader(stream, delimiter=',', doublequote=True,
escapechar=None, lineterminator='\r\n',
quotechar='"', skipinitialspace=False,
strict=True):
# process 'row'
```
For more information about the meaning of the above values, see:
https://docs.python.org/3/library/csv.html#csv.Dialect
## Errors
1. [Skipping duplicate node with ID '{}'. For more details, visit:
memgr.ph/csv-import-tool.](#error-1)
2. [Skipping bad relationship with START_ID '{}'. For more details, visit:
memgr.ph/csv-import-tool.](#error-2)
3. [Skipping bad relationship with END_ID '{}'. For more details, visit:
memgr.ph/csv-import-tool.](#error-3)
## Skipping duplicate node with ID {} {#error-1}
Duplicate nodes are nodes that have an ID that is the same as another node that
was already imported. You can instruct the importer to ignore all duplicate
nodes (instead of raising an error) by using the `--skip-duplicate-nodes` flag.
## Skipping bad relationship with START_ID {} {#error-2}
A node with the id `START_ID` doesn't exist. You can instruct the importer to
ignore all bad relationships (instead of raising an error) that refer to nodes
that don't exist in the node files by using the `--skip-bad-relationships` flag.
## Skipping bad relationship with END_ID {} {#error-3}
A node with the id `END_ID` doesn't exist. You can instruct the importer to
ignore all bad relationships (instead of raising an error) that refer to nodes
that don't exist in the node files by using the `--skip-bad-relationships` flag.

2
environment/.gitignore vendored Normal file
View File

@ -0,0 +1,2 @@
archives
build

View File

@ -1,15 +0,0 @@
# Memgraph Operating Environments
## Issues related to build toolchain
* GCC 11.2 (toolchain-v4) doesn't compile on Fedora 38, multiple definitions of enum issue
* spdlog 1.10/11 doesn't work with fmt 10.0.0
## os
Under the `os` directory, you can find scripts to install all required system
dependencies on operating systems where Memgraph natively builds. The testing
script helps to see how to install all packages (in the case of a new package),
or make any adjustments in the overall system setup. Also, the testing script
helps check if Memgraph runs on a freshly installed operating system (with no
packages installed).

View File

@ -1,6 +1,3 @@
*.deb
*.deb.*
*.rpm
*.rpm.*
*.tar.gz
*.tar.gz.*

View File

@ -1,190 +0,0 @@
#!/bin/bash
set -Eeuo pipefail
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )"
source "$DIR/../util.sh"
check_operating_system "amzn-2"
check_architecture "x86_64"
TOOLCHAIN_BUILD_DEPS=(
git gcc gcc-c++ make # generic build tools
wget # used for archive download
gnupg2 # used for archive signature verification
tar gzip bzip2 xz unzip # used for archive unpacking
zlib-devel # zlib library used for all builds
expat-devel xz-devel python3-devel texinfo
curl libcurl-devel # for cmake
readline-devel # for cmake and llvm
libffi-devel libxml2-devel # for llvm
libedit-devel pcre-devel pcre2-devel automake bison # for swig
file
openssl-devel
gmp-devel
gperf
diffutils
patch
libipt libipt-devel # intel
perl # for openssl
)
TOOLCHAIN_RUN_DEPS=(
make # generic build tools
tar gzip bzip2 xz # used for archive unpacking
zlib # zlib library used for all builds
expat xz-libs python3 # for gdb
readline # for cmake and llvm
libffi libxml2 # for llvm
openssl-devel
)
MEMGRAPH_BUILD_DEPS=(
git # source code control
make cmake # build system
wget # for downloading libs
libuuid-devel java-11-openjdk # required by antlr
readline-devel # for memgraph console
python3-devel # for query modules
openssl-devel
openssl
libseccomp-devel
python3 python3-pip nmap-ncat # for tests
#
# IMPORTANT: python3-yaml does NOT exist on CentOS
# Install it using `pip3 install PyYAML`
#
PyYAML # Package name here does not correspond to the yum package!
libcurl-devel # mg-requests
rpm-build rpmlint # for RPM package building
doxygen graphviz # source documentation generators
which nodejs golang custom-golang1.18.9 zip unzip java-11-openjdk-devel jdk-17 custom-maven3.9.3 # for driver tests
autoconf # for jemalloc code generation
libtool # for protobuf code generation
cyrus-sasl-devel
)
MEMGRAPH_TEST_DEPS="${MEMGRAPH_BUILD_DEPS[*]}"
MEMGRAPH_RUN_DEPS=(
logrotate openssl python3 libseccomp
)
NEW_DEPS=(
wget curl tar gzip
)
list() {
echo "$1"
}
check() {
local missing=""
# On Fedora yum/dnf and python10 use newer glibc which is not compatible
# with ours, so we need to momentarely disable env
local OLD_LD_LIBRARY_PATH=${LD_LIBRARY_PATH:-""}
LD_LIBRARY_PATH=""
for pkg in $1; do
if [ "$pkg" == custom-maven3.9.3 ]; then
if [ ! -f "/opt/apache-maven-3.9.3/bin/mvn" ]; then
missing="$pkg $missing"
fi
continue
fi
if [ "$pkg" == custom-golang1.18.9 ]; then
if [ ! -f "/opt/go1.18.9/go/bin/go" ]; then
missing="$pkg $missing"
fi
continue
fi
if [ "$pkg" == "PyYAML" ]; then
if ! python3 -c "import yaml" >/dev/null 2>/dev/null; then
missing="$pkg $missing"
fi
continue
fi
if ! yum list installed "$pkg" >/dev/null 2>/dev/null; then
missing="$pkg $missing"
fi
done
if [ "$missing" != "" ]; then
echo "MISSING PACKAGES: $missing"
exit 1
fi
LD_LIBRARY_PATH=${OLD_LD_LIBRARY_PATH}
}
install() {
cd "$DIR"
if [ "$EUID" -ne 0 ]; then
echo "Please run as root."
exit 1
fi
# If GitHub Actions runner is installed, append LANG to the environment.
# Python related tests don't work without the LANG export.
if [ -d "/home/gh/actions-runner" ]; then
echo "LANG=en_US.utf8" >> /home/gh/actions-runner/.env
else
echo "NOTE: export LANG=en_US.utf8"
fi
yum update -y
for pkg in $1; do
if [ "$pkg" == custom-maven3.9.3 ]; then
install_custom_maven "3.9.3"
continue
fi
if [ "$pkg" == custom-golang1.18.9 ]; then
install_custom_golang "1.18.9"
continue
fi
if [ "$pkg" == jdk-17 ]; then
if ! yum list installed jdk-17 >/dev/null 2>/dev/null; then
wget --no-check-certificate -c --header "Cookie: oraclelicense=accept-securebackup-cookie" https://download.oracle.com/java/17/latest/jdk-17_linux-x64_bin.rpm
rpm -Uvh jdk-17_linux-x64_bin.rpm
# NOTE: Set Java 11 as default.
update-alternatives --set java java-11-openjdk.x86_64
update-alternatives --set javac java-11-openjdk.x86_64
fi
continue
fi
if [ "$pkg" == libipt ]; then
if ! yum list installed libipt >/dev/null 2>/dev/null; then
yum install -y http://repo.okay.com.mx/centos/8/x86_64/release/libipt-1.6.1-8.el8.x86_64.rpm
fi
continue
fi
if [ "$pkg" == libipt-devel ]; then
if ! yum list installed libipt-devel >/dev/null 2>/dev/null; then
yum install -y http://repo.okay.com.mx/centos/8/x86_64/release/libipt-devel-1.6.1-8.el8.x86_64.rpm
fi
continue
fi
if [ "$pkg" == nodejs ]; then
if ! yum list installed nodejs >/dev/null 2>/dev/null; then
yum install https://rpm.nodesource.com/pub_16.x/nodistro/repo/nodesource-release-nodistro-1.noarch.rpm -y
yum install nodejs -y --setopt=nodesource-nodejs.module_hotfixes=1
fi
continue
fi
if [ "$pkg" == PyYAML ]; then
if [ -z ${SUDO_USER+x} ]; then # Running as root (e.g. Docker).
pip3 install --user PyYAML
else # Running using sudo.
sudo -H -u "$SUDO_USER" bash -c "pip3 install --user PyYAML"
fi
continue
fi
if [ "$pkg" == java-11-openjdk ]; then
amazon-linux-extras install -y java-openjdk11
continue
fi
if [ "$pkg" == java-11-openjdk-devel ]; then
amazon-linux-extras install -y java-openjdk11
yum install -y java-11-openjdk-devel
continue
fi
yum install -y "$pkg"
done
}
deps=$2"[*]"
"$1" "${!deps}"

View File

@ -1,5 +1,7 @@
#!/bin/bash
set -Eeuo pipefail
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )"
source "$DIR/../util.sh"
@ -18,7 +20,7 @@ TOOLCHAIN_BUILD_DEPS=(
curl # snappy
readline-devel # cmake and llvm
libffi-devel libxml2-devel perl-Digest-MD5 # llvm
libedit-devel pcre-devel pcre2-devel automake bison # swig
libedit-devel pcre-devel automake bison # swig
file
openssl-devel
gmp-devel
@ -37,13 +39,12 @@ TOOLCHAIN_RUN_DEPS=(
)
MEMGRAPH_BUILD_DEPS=(
make cmake pkgconfig # build system
make pkgconfig # build system
curl wget # for downloading libs
libuuid-devel java-11-openjdk # required by antlr
readline-devel # for memgraph console
python3-devel # for query modules
openssl-devel
openssl
libseccomp-devel
python3 python-virtualenv python3-pip nmap-ncat # for qa, macro_benchmark and stress tests
#
@ -55,21 +56,9 @@ MEMGRAPH_BUILD_DEPS=(
sbcl # for custom Lisp C++ preprocessing
rpm-build rpmlint # for RPM package building
doxygen graphviz # source documentation generators
which mono-complete dotnet-sdk-3.1 golang custom-golang1.18.9 # for driver tests
nodejs zip unzip java-11-openjdk-devel jdk-17 custom-maven3.9.3 # for driver tests
which mono-complete dotnet-sdk-3.1 golang nodejs zip unzip java-11-openjdk-devel # for driver tests
autoconf # for jemalloc code generation
libtool # for protobuf code generation
cyrus-sasl-devel
)
MEMGRAPH_TEST_DEPS="${MEMGRAPH_BUILD_DEPS[*]}"
MEMGRAPH_RUN_DEPS=(
logrotate openssl python3 libseccomp
)
NEW_DEPS=(
wget curl tar gzip
)
list() {
@ -79,18 +68,6 @@ list() {
check() {
local missing=""
for pkg in $1; do
if [ "$pkg" == custom-maven3.9.3 ]; then
if [ ! -f "/opt/apache-maven-3.9.3/bin/mvn" ]; then
missing="$pkg $missing"
fi
continue
fi
if [ "$pkg" == custom-golang1.18.9 ]; then
if [ ! -f "/opt/go1.18.9/go/bin/go" ]; then
missing="$pkg $missing"
fi
continue
fi
if [ "$pkg" == git ]; then
if ! which "git" >/dev/null; then
missing="git $missing"
@ -133,25 +110,7 @@ install() {
yum update -y
yum install -y wget python3 python3-pip
yum install -y git
for pkg in $1; do
if [ "$pkg" == custom-maven3.9.3 ]; then
install_custom_maven "3.9.3"
continue
fi
if [ "$pkg" == custom-golang1.18.9 ]; then
install_custom_golang "1.18.9"
continue
fi
if [ "$pkg" == jdk-17 ]; then
if ! yum list installed jdk-17 >/dev/null 2>/dev/null; then
wget https://download.oracle.com/java/17/latest/jdk-17_linux-x64_bin.rpm
rpm -ivh jdk-17_linux-x64_bin.rpm
update-alternatives --set java java-11-openjdk.x86_64
update-alternatives --set javac java-11-openjdk.x86_64
fi
continue
fi
if [ "$pkg" == libipt ]; then
if ! yum list installed libipt >/dev/null 2>/dev/null; then
yum install -y http://repo.okay.com.mx/centos/8/x86_64/release/libipt-1.6.1-8.el8.x86_64.rpm

View File

@ -1,5 +1,7 @@
#!/bin/bash
set -Eeuo pipefail
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )"
source "$DIR/../util.sh"
@ -7,17 +9,15 @@ check_operating_system "centos-9"
check_architecture "x86_64"
TOOLCHAIN_BUILD_DEPS=(
wget # used for archive download
coreutils-common gcc gcc-c++ make # generic build tools
# NOTE: Pure libcurl conflicts with libcurl-minimal
libcurl-devel # cmake build requires it
wget # used for archive download
gnupg2 # used for archive signature verification
tar gzip bzip2 xz unzip # used for archive unpacking
zlib-devel # zlib library used for all builds
expat-devel xz-devel python3-devel texinfo libbabeltrace-devel # for gdb
readline-devel # for cmake and llvm
libffi-devel libxml2-devel # for llvm
libedit-devel pcre-devel pcre2-devel automake bison # for swig
libedit-devel pcre-devel automake bison # for swig
file
openssl-devel
gmp-devel
@ -40,7 +40,7 @@ TOOLCHAIN_RUN_DEPS=(
MEMGRAPH_BUILD_DEPS=(
git # source code control
make cmake pkgconf-pkg-config # build system
make pkgconf-pkg-config # build system
wget # for downloading libs
libuuid-devel java-11-openjdk # required by antlr
readline-devel # for memgraph console
@ -56,22 +56,10 @@ MEMGRAPH_BUILD_DEPS=(
libcurl-devel # mg-requests
rpm-build rpmlint # for RPM package building
doxygen graphviz # source documentation generators
which nodejs golang custom-golang1.18.9 # for driver tests
zip unzip java-11-openjdk-devel java-17-openjdk java-17-openjdk-devel custom-maven3.9.3 # for driver tests
which nodejs golang zip unzip java-11-openjdk-devel # for driver tests
sbcl # for custom Lisp C++ preprocessing
autoconf # for jemalloc code generation
libtool # for protobuf code generation
cyrus-sasl-devel
)
MEMGRAPH_TEST_DEPS="${MEMGRAPH_BUILD_DEPS[*]}"
MEMGRAPH_RUN_DEPS=(
logrotate openssl python3 libseccomp
)
NEW_DEPS=(
wget curl tar gzip
)
list() {
@ -81,18 +69,6 @@ list() {
check() {
local missing=""
for pkg in $1; do
if [ "$pkg" == custom-maven3.9.3 ]; then
if [ ! -f "/opt/apache-maven-3.9.3/bin/mvn" ]; then
missing="$pkg $missing"
fi
continue
fi
if [ "$pkg" == custom-golang1.18.9 ]; then
if [ ! -f "/opt/go1.18.9/go/bin/go" ]; then
missing="$pkg $missing"
fi
continue
fi
if [ "$pkg" == "PyYAML" ]; then
if ! python3 -c "import yaml" >/dev/null 2>/dev/null; then
missing="$pkg $missing"
@ -125,20 +101,9 @@ install() {
else
echo "NOTE: export LANG=en_US.utf8"
fi
# --nobest is used because of libipt because we install custom versions
# because libipt-devel is not available on CentOS 9 Stream
yum update -y --nobest
yum update -y
yum install -y wget git python3 python3-pip
for pkg in $1; do
if [ "$pkg" == custom-maven3.9.3 ]; then
install_custom_maven "3.9.3"
continue
fi
if [ "$pkg" == custom-golang1.18.9 ]; then
install_custom_golang "1.18.9"
continue
fi
# Since there is no support for libipt-devel for CentOS 9 we install
# Fedoras version of same libs, they are the same version but released
# for different OS

View File

@ -1,10 +1,10 @@
#!/bin/bash
set -Eeuo pipefail
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )"
source "$DIR/../util.sh"
# IMPORTANT: Deprecated since memgraph v2.12.0.
check_operating_system "debian-10"
check_architecture "x86_64"
@ -24,7 +24,7 @@ TOOLCHAIN_BUILD_DEPS=(
libgmp-dev # for gdb
gperf # for proxygen
git # for fbthrift
libedit-dev libpcre2-dev libpcre3-dev automake bison # for swig
libedit-dev libpcre3-dev automake bison # for swig
)
TOOLCHAIN_RUN_DEPS=(
@ -40,7 +40,7 @@ TOOLCHAIN_RUN_DEPS=(
MEMGRAPH_BUILD_DEPS=(
git # source code control
make cmake pkg-config # build system
make pkg-config # build system
curl wget # for downloading libs
uuid-dev default-jre-headless # required by antlr
libreadline-dev # for memgraph console
@ -53,19 +53,10 @@ MEMGRAPH_BUILD_DEPS=(
libcurl4-openssl-dev # mg-requests
sbcl # for custom Lisp C++ preprocessing
doxygen graphviz # source documentation generators
mono-runtime mono-mcs zip unzip default-jdk-headless oracle-java17-installer custom-maven3.9.3 # for driver tests
dotnet-sdk-3.1 golang custom-golang1.18.9 nodejs npm # for driver tests
mono-runtime mono-mcs zip unzip default-jdk-headless # for driver tests
dotnet-sdk-3.1 golang nodejs npm
autoconf # for jemalloc code generation
libtool # for protobuf code generation
libsasl2-dev
)
MEMGRAPH_RUN_DEPS=(
logrotate openssl python3 libseccomp
)
NEW_DEPS=(
wget curl tar gzip
)
list() {
@ -73,28 +64,7 @@ list() {
}
check() {
local missing=""
for pkg in $1; do
if [ "$pkg" == custom-maven3.9.3 ]; then
if [ ! -f "/opt/apache-maven-3.9.3/bin/mvn" ]; then
missing="$pkg $missing"
fi
continue
fi
if [ "$pkg" == custom-golang1.18.9 ]; then
if [ ! -f "/opt/go1.18.9/go/bin/go" ]; then
missing="$pkg $missing"
fi
continue
fi
if ! dpkg -s "$pkg" >/dev/null 2>/dev/null; then
missing="$pkg $missing"
fi
done
if [ "$missing" != "" ]; then
echo "MISSING PACKAGES: $missing"
exit 1
fi
check_all_dpkg "$1"
}
install() {
@ -105,15 +75,8 @@ deb http://deb.debian.org/debian/ buster-updates main contrib non-free
deb-src http://deb.debian.org/debian/ buster-updates main contrib non-free
deb http://security.debian.org/debian-security buster/updates main contrib non-free
deb-src http://security.debian.org/debian-security buster/updates main contrib non-free
EOF
apt --allow-releaseinfo-change update
cat >/etc/apt/sources.list.d/java.list << EOF
deb http://ppa.launchpad.net/linuxuprising/java/ubuntu bionic main
deb-src http://ppa.launchpad.net/linuxuprising/java/ubuntu bionic main
EOF
cd "$DIR"
apt install -y gnupg
apt-key adv --keyserver keyserver.ubuntu.com --recv-keys EA8CACC073C3DB2A
apt --allow-releaseinfo-change update
# If GitHub Actions runner is installed, append LANG to the environment.
# Python related tests doesn't work the LANG export.
@ -122,26 +85,8 @@ EOF
else
echo "NOTE: export LANG=en_US.utf8"
fi
apt install -y wget
for pkg in $1; do
if [ "$pkg" == custom-maven3.9.3 ]; then
install_custom_maven "3.9.3"
continue
fi
if [ "$pkg" == custom-golang1.18.9 ]; then
install_custom_golang "1.18.9"
continue
fi
if [ "$pkg" == oracle-java17-installer ]; then
if ! dpkg -s "$pkg" 2>/dev/null >/dev/null; then
echo oracle-java17-installer shared/accepted-oracle-license-v1-3 select true | /usr/bin/debconf-set-selections
echo oracle-java17-installer shared/accepted-oracle-license-v1-3 seen true | /usr/bin/debconf-set-selections
apt install -y "$pkg"
update-alternatives --set java /usr/lib/jvm/java-11-openjdk-amd64/bin/java
update-alternatives --set javac /usr/lib/jvm/java-11-openjdk-amd64/bin/javac
fi
continue
fi
if [ "$pkg" == dotnet-sdk-3.1 ]; then
if ! dpkg -s "$pkg" 2>/dev/null >/dev/null; then
wget -nv https://packages.microsoft.com/config/debian/10/packages-microsoft-prod.deb -O packages-microsoft-prod.deb

View File

@ -1,12 +1,12 @@
#!/bin/bash
set -Eeuo pipefail
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )"
source "$DIR/../util.sh"
# IMPORTANT: Deprecated since memgraph v2.12.0.
check_operating_system "debian-11"
check_architecture "arm64" "aarch64"
check_architecture "arm64"
TOOLCHAIN_BUILD_DEPS=(
coreutils gcc g++ build-essential make # generic build tools
@ -18,7 +18,7 @@ TOOLCHAIN_BUILD_DEPS=(
libcurl4-openssl-dev # for cmake
libreadline-dev # for cmake and llvm
libffi-dev libxml2-dev # for llvm
libedit-dev libpcre2-dev libpcre3-dev automake bison # for swig
libedit-dev libpcre3-dev automake bison # for swig
curl # snappy
file # for libunwind
libssl-dev # for libevent
@ -54,19 +54,10 @@ MEMGRAPH_BUILD_DEPS=(
libcurl4-openssl-dev # mg-requests
sbcl # for custom Lisp C++ preprocessing
doxygen graphviz # source documentation generators
mono-runtime mono-mcs zip unzip default-jdk-headless openjdk-17-jdk custom-maven3.9.3 # for driver tests
golang custom-golang1.18.9 nodejs npm
mono-runtime mono-mcs zip unzip default-jdk-headless # for driver tests
golang nodejs npm
autoconf # for jemalloc code generation
libtool # for protobuf code generation
libsasl2-dev
)
MEMGRAPH_RUN_DEPS=(
logrotate openssl python3 libseccomp
)
NEW_DEPS=(
wget curl tar gzip
)
list() {
@ -74,28 +65,7 @@ list() {
}
check() {
local missing=""
for pkg in $1; do
if [ "$pkg" == custom-maven3.9.3 ]; then
if [ ! -f "/opt/apache-maven-3.9.3/bin/mvn" ]; then
missing="$pkg $missing"
fi
continue
fi
if [ "$pkg" == custom-golang1.18.9 ]; then
if [ ! -f "/opt/go1.18.9/go/bin/go" ]; then
missing="$pkg $missing"
fi
continue
fi
if ! dpkg -s "$pkg" >/dev/null 2>/dev/null; then
missing="$pkg $missing"
fi
done
if [ "$missing" != "" ]; then
echo "MISSING PACKAGES: $missing"
exit 1
fi
check_all_dpkg "$1"
}
install() {
@ -119,25 +89,7 @@ EOF
echo "NOTE: export LANG=en_US.utf8"
fi
apt install -y wget
for pkg in $1; do
if [ "$pkg" == custom-maven3.9.3 ]; then
install_custom_maven "3.9.3"
continue
fi
if [ "$pkg" == custom-golang1.18.9 ]; then
install_custom_golang "1.18.9"
continue
fi
if [ "$pkg" == openjdk-17-jdk ]; then
if ! dpkg -s "$pkg" 2>/dev/null >/dev/null; then
apt install -y "$pkg"
# The default Java version should be Java 11
update-alternatives --set java /usr/lib/jvm/java-11-openjdk-arm64/bin/java
update-alternatives --set javac /usr/lib/jvm/java-11-openjdk-arm64/bin/javac
fi
continue
fi
apt install -y "$pkg"
done
}

View File

@ -1,5 +1,7 @@
#!/bin/bash
set -Eeuo pipefail
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )"
source "$DIR/../util.sh"
@ -16,7 +18,7 @@ TOOLCHAIN_BUILD_DEPS=(
libcurl4-openssl-dev # for cmake
libreadline-dev # for cmake and llvm
libffi-dev libxml2-dev # for llvm
libedit-dev libpcre2-dev libpcre3-dev automake bison # for swig
libedit-dev libpcre3-dev automake bison # for swig
curl # snappy
file # for libunwind
libssl-dev # for libevent
@ -39,7 +41,7 @@ TOOLCHAIN_RUN_DEPS=(
MEMGRAPH_BUILD_DEPS=(
git # source code control
make cmake pkg-config # build system
make pkg-config # build system
curl wget # for downloading libs
uuid-dev default-jre-headless # required by antlr
libreadline-dev # for memgraph console
@ -52,21 +54,10 @@ MEMGRAPH_BUILD_DEPS=(
libcurl4-openssl-dev # mg-requests
sbcl # for custom Lisp C++ preprocessing
doxygen graphviz # source documentation generators
mono-runtime mono-mcs zip unzip default-jdk-headless openjdk-17-jdk custom-maven3.9.3 # for driver tests
dotnet-sdk-3.1 golang custom-golang1.18.9 nodejs npm
mono-runtime mono-mcs zip unzip default-jdk-headless # for driver tests
dotnet-sdk-3.1 golang nodejs npm
autoconf # for jemalloc code generation
libtool # for protobuf code generation
libsasl2-dev
)
MEMGRAPH_TEST_DEPS="${MEMGRAPH_BUILD_DEPS[*]}"
MEMGRAPH_RUN_DEPS=(
logrotate openssl python3 libseccomp
)
NEW_DEPS=(
wget curl tar gzip
)
list() {
@ -74,28 +65,7 @@ list() {
}
check() {
local missing=""
for pkg in $1; do
if [ "$pkg" == custom-maven3.9.3 ]; then
if [ ! -f "/opt/apache-maven-3.9.3/bin/mvn" ]; then
missing="$pkg $missing"
fi
continue
fi
if [ "$pkg" == custom-golang1.18.9 ]; then
if [ ! -f "/opt/go1.18.9/go/bin/go" ]; then
missing="$pkg $missing"
fi
continue
fi
if ! dpkg -s "$pkg" >/dev/null 2>/dev/null; then
missing="$pkg $missing"
fi
done
if [ "$missing" != "" ]; then
echo "MISSING PACKAGES: $missing"
exit 1
fi
check_all_dpkg "$1"
}
install() {
@ -119,25 +89,7 @@ EOF
echo "NOTE: export LANG=en_US.utf8"
fi
apt install -y wget
for pkg in $1; do
if [ "$pkg" == custom-maven3.9.3 ]; then
install_custom_maven "3.9.3"
continue
fi
if [ "$pkg" == custom-golang1.18.9 ]; then
install_custom_golang "1.18.9"
continue
fi
if [ "$pkg" == openjdk-17-jdk ]; then
if ! dpkg -s "$pkg" 2>/dev/null >/dev/null; then
apt install -y "$pkg"
# The default Java version should be Java 11
update-alternatives --set java /usr/lib/jvm/java-11-openjdk-amd64/bin/java
update-alternatives --set javac /usr/lib/jvm/java-11-openjdk-amd64/bin/javac
fi
continue
fi
if [ "$pkg" == dotnet-sdk-3.1 ]; then
if ! dpkg -s "$pkg" 2>/dev/null >/dev/null; then
wget -nv https://packages.microsoft.com/config/debian/10/packages-microsoft-prod.deb -O packages-microsoft-prod.deb

View File

@ -1,134 +0,0 @@
#!/bin/bash
set -Eeuo pipefail
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )"
source "$DIR/../util.sh"
check_operating_system "debian-12"
check_architecture "arm64" "aarch64"
TOOLCHAIN_BUILD_DEPS=(
coreutils gcc g++ build-essential make # generic build tools
wget # used for archive download
gnupg # used for archive signature verification
tar gzip bzip2 xz-utils unzip # used for archive unpacking
zlib1g-dev # zlib library used for all builds
libexpat1-dev liblzma-dev python3-dev texinfo # for gdb
libcurl4-openssl-dev # for cmake
libreadline-dev # for cmake and llvm
libffi-dev libxml2-dev # for llvm
libedit-dev libpcre2-dev libpcre3-dev automake bison # for swig
curl # snappy
file # for libunwind
libssl-dev # for libevent
libgmp-dev
gperf # for proxygen
git # for fbthrift
)
TOOLCHAIN_RUN_DEPS=(
make # generic build tools
tar gzip bzip2 xz-utils # used for archive unpacking
zlib1g # zlib library used for all builds
libexpat1 liblzma5 python3 # for gdb
libcurl4 # for cmake
file # for CPack
libreadline8 # for cmake and llvm
libffi8 libxml2 # for llvm
libssl-dev # for libevent
)
MEMGRAPH_BUILD_DEPS=(
git # source code control
make pkg-config # build system
curl wget # for downloading libs
uuid-dev default-jre-headless # required by antlr
libreadline-dev # for memgraph console
libpython3-dev python3-dev # for query modules
libssl-dev
libseccomp-dev
netcat # tests are using nc to wait for memgraph
python3 virtualenv python3-virtualenv python3-pip # for qa, macro_benchmark and stress tests
python3-yaml # for the configuration generator
libcurl4-openssl-dev # mg-requests
sbcl # for custom Lisp C++ preprocessing
doxygen graphviz # source documentation generators
mono-runtime mono-mcs zip unzip default-jdk-headless custom-maven3.9.3 # for driver tests
dotnet-sdk-7.0 golang custom-golang1.18.9 nodejs npm
autoconf # for jemalloc code generation
libtool # for protobuf code generation
libsasl2-dev
)
MEMGRAPH_RUN_DEPS=(
logrotate openssl python3 libseccomp
)
NEW_DEPS=(
wget curl tar gzip
)
list() {
echo "$1"
}
check() {
local missing=""
for pkg in $1; do
if [ "$pkg" == custom-maven3.9.3 ]; then
if [ ! -f "/opt/apache-maven-3.9.3/bin/mvn" ]; then
missing="$pkg $missing"
fi
continue
fi
if [ "$pkg" == custom-golang1.18.9 ]; then
if [ ! -f "/opt/go1.18.9/go/bin/go" ]; then
missing="$pkg $missing"
fi
continue
fi
if ! dpkg -s "$pkg" >/dev/null 2>/dev/null; then
missing="$pkg $missing"
fi
done
if [ "$missing" != "" ]; then
echo "MISSING PACKAGES: $missing"
exit 1
fi
}
install() {
cd "$DIR"
apt update
# If GitHub Actions runner is installed, append LANG to the environment.
# Python related tests doesn't work the LANG export.
if [ -d "/home/gh/actions-runner" ]; then
echo "LANG=en_US.utf8" >> /home/gh/actions-runner/.env
else
echo "NOTE: export LANG=en_US.utf8"
fi
apt install -y wget
for pkg in $1; do
if [ "$pkg" == custom-maven3.9.3 ]; then
install_custom_maven "3.9.3"
continue
fi
if [ "$pkg" == custom-golang1.18.9 ]; then
install_custom_golang "1.18.9"
continue
fi
if [ "$pkg" == dotnet-sdk-7.0 ]; then
if ! dpkg -s "$pkg" 2>/dev/null >/dev/null; then
wget -nv https://packages.microsoft.com/config/debian/12/packages-microsoft-prod.deb -O packages-microsoft-prod.deb
dpkg -i packages-microsoft-prod.deb
apt-get update
apt-get install -y apt-transport-https dotnet-sdk-7.0
fi
continue
fi
apt install -y "$pkg"
done
}
deps=$2"[*]"
"$1" "${!deps}"

View File

@ -1,136 +0,0 @@
#!/bin/bash
set -Eeuo pipefail
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )"
source "$DIR/../util.sh"
check_operating_system "debian-12"
check_architecture "x86_64"
TOOLCHAIN_BUILD_DEPS=(
coreutils gcc g++ build-essential make # generic build tools
wget # used for archive download
gnupg # used for archive signature verification
tar gzip bzip2 xz-utils unzip # used for archive unpacking
zlib1g-dev # zlib library used for all builds
libexpat1-dev libipt-dev libbabeltrace-dev liblzma-dev python3-dev texinfo # for gdb
libcurl4-openssl-dev # for cmake
libreadline-dev # for cmake and llvm
libffi-dev libxml2-dev # for llvm
libedit-dev libpcre2-dev libpcre3-dev automake bison # for swig
curl # snappy
file # for libunwind
libssl-dev # for libevent
libgmp-dev
gperf # for proxygen
git # for fbthrift
)
TOOLCHAIN_RUN_DEPS=(
make # generic build tools
tar gzip bzip2 xz-utils # used for archive unpacking
zlib1g # zlib library used for all builds
libexpat1 libipt2 libbabeltrace1 liblzma5 python3 # for gdb
libcurl4 # for cmake
file # for CPack
libreadline8 # for cmake and llvm
libffi8 libxml2 # for llvm
libssl-dev # for libevent
)
MEMGRAPH_BUILD_DEPS=(
git # source code control
make cmake pkg-config # build system
curl wget # for downloading libs
uuid-dev default-jre-headless # required by antlr
libreadline-dev # for memgraph console
libpython3-dev python3-dev # for query modules
libssl-dev
libseccomp-dev
netcat-traditional # tests are using nc to wait for memgraph
python3 virtualenv python3-virtualenv python3-pip # for qa, macro_benchmark and stress tests
python3-yaml # for the configuration generator
libcurl4-openssl-dev # mg-requests
sbcl # for custom Lisp C++ preprocessing
doxygen graphviz # source documentation generators
mono-runtime mono-mcs zip unzip default-jdk-headless custom-maven3.9.3 # for driver tests
dotnet-sdk-7.0 golang custom-golang1.18.9 nodejs npm
autoconf # for jemalloc code generation
libtool # for protobuf code generation
libsasl2-dev
)
MEMGRAPH_TEST_DEPS="${MEMGRAPH_BUILD_DEPS[*]}"
MEMGRAPH_RUN_DEPS=(
logrotate openssl python3 libseccomp
)
NEW_DEPS=(
wget curl tar gzip
)
list() {
echo "$1"
}
check() {
local missing=""
for pkg in $1; do
if [ "$pkg" == custom-maven3.9.3 ]; then
if [ ! -f "/opt/apache-maven-3.9.3/bin/mvn" ]; then
missing="$pkg $missing"
fi
continue
fi
if [ "$pkg" == custom-golang1.18.9 ]; then
if [ ! -f "/opt/go1.18.9/go/bin/go" ]; then
missing="$pkg $missing"
fi
continue
fi
if ! dpkg -s "$pkg" >/dev/null 2>/dev/null; then
missing="$pkg $missing"
fi
done
if [ "$missing" != "" ]; then
echo "MISSING PACKAGES: $missing"
exit 1
fi
}
install() {
cd "$DIR"
apt update
# If GitHub Actions runner is installed, append LANG to the environment.
# Python related tests doesn't work the LANG export.
if [ -d "/home/gh/actions-runner" ]; then
echo "LANG=en_US.utf8" >> /home/gh/actions-runner/.env
else
echo "NOTE: export LANG=en_US.utf8"
fi
apt install -y wget
for pkg in $1; do
if [ "$pkg" == custom-maven3.9.3 ]; then
install_custom_maven "3.9.3"
continue
fi
if [ "$pkg" == custom-golang1.18.9 ]; then
install_custom_golang "1.18.9"
continue
fi
if [ "$pkg" == dotnet-sdk-7.0 ]; then
if ! dpkg -s "$pkg" 2>/dev/null >/dev/null; then
wget -nv https://packages.microsoft.com/config/debian/12/packages-microsoft-prod.deb -O packages-microsoft-prod.deb
dpkg -i packages-microsoft-prod.deb
apt-get update
apt-get install -y apt-transport-https dotnet-sdk-7.0
fi
continue
fi
apt install -y "$pkg"
done
}
deps=$2"[*]"
"$1" "${!deps}"

View File

@ -1,150 +0,0 @@
#!/bin/bash
set -Eeuo pipefail
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )"
source "$DIR/../util.sh"
# IMPORTANT: Deprecated since memgraph v2.12.0.
check_operating_system "fedora-36"
check_architecture "x86_64"
TOOLCHAIN_BUILD_DEPS=(
coreutils-common gcc gcc-c++ make # generic build tools
wget # used for archive download
gnupg2 # used for archive signature verification
tar gzip bzip2 xz unzip # used for archive unpacking
zlib-devel # zlib library used for all builds
expat-devel xz-devel python3-devel texinfo libbabeltrace-devel # for gdb
curl libcurl-devel # for cmake
readline-devel # for cmake and llvm
libffi-devel libxml2-devel # for llvm
libedit-devel pcre-devel pcre2-devel automake bison # for swig
file
openssl-devel
gmp-devel
gperf
diffutils
libipt libipt-devel # intel
patch
perl # for openssl
git
)
TOOLCHAIN_RUN_DEPS=(
make # generic build tools
tar gzip bzip2 xz # used for archive unpacking
zlib # zlib library used for all builds
expat xz-libs python3 # for gdb
readline # for cmake and llvm
libffi libxml2 # for llvm
openssl-devel
)
MEMGRAPH_BUILD_DEPS=(
git # source code control
make cmake pkgconf-pkg-config # build system
wget # for downloading libs
libuuid-devel java-11-openjdk # required by antlr
readline-devel # for memgraph console
python3-devel # for query modules
openssl-devel
libseccomp-devel
python3 python3-pip python3-virtualenv python3-virtualenvwrapper python3-pyyaml nmap-ncat # for tests
libcurl-devel # mg-requests
rpm-build rpmlint # for RPM package building
doxygen graphviz # source documentation generators
java-11-openjdk-devel java-17-openjdk-devel custom-maven3.9.3 # for driver tests
which zip unzip
nodejs golang custom-golang1.18.9 # for driver tests
sbcl # for custom Lisp C++ preprocessing
autoconf # for jemalloc code generation
libtool # for protobuf code generation
cyrus-sasl-devel
)
MEMGRAPH_RUN_DEPS=(
logrotate openssl python3 libseccomp
)
NEW_DEPS=(
wget curl tar gzip
)
list() {
echo "$1"
}
check() {
local missing=""
if [ -v LD_LIBRARY_PATH ]; then
# On Fedora yum/dnf and python10 use newer glibc which is not compatible
# with ours, so we need to momentarely disable env
local OLD_LD_LIBRARY_PATH=${LD_LIBRARY_PATH}
LD_LIBRARY_PATH=""
fi
for pkg in $1; do
if [ "$pkg" == custom-maven3.9.3 ]; then
if [ ! -f "/opt/apache-maven-3.9.3/bin/mvn" ]; then
missing="$pkg $missing"
fi
continue
fi
if [ "$pkg" == custom-golang1.18.9 ]; then
if [ ! -f "/opt/go1.18.9/go/bin/go" ]; then
missing="$pkg $missing"
fi
continue
fi
if ! dnf list installed "$pkg" >/dev/null 2>/dev/null; then
missing="$pkg $missing"
fi
done
if [ "$missing" != "" ]; then
echo "MISSING PACKAGES: $missing"
exit 1
fi
if [ -v OLD_LD_LIBRARY_PATH ]; then
echo "Restoring LD_LIBRARY_PATH..."
LD_LIBRARY_PATH=${OLD_LD_LIBRARY_PATH}
fi
}
install() {
cd "$DIR"
if [ "$EUID" -ne 0 ]; then
echo "Please run as root."
exit 1
fi
# If GitHub Actions runner is installed, append LANG to the environment.
# Python related tests don't work without the LANG export.
if [ -d "/home/gh/actions-runner" ]; then
echo "LANG=en_US.utf8" >> /home/gh/actions-runner/.env
else
echo "NOTE: export LANG=en_US.utf8"
fi
dnf update -y
for pkg in $1; do
if [ "$pkg" == custom-maven3.9.3 ]; then
install_custom_maven "3.9.3"
continue
fi
if [ "$pkg" == custom-golang1.18.9 ]; then
install_custom_golang "1.18.9"
continue
fi
if [ "$pkg" == java-17-openjdk-devel ]; then
if ! dnf list installed "$pkg" >/dev/null 2>/dev/null; then
dnf install -y "$pkg"
# The default Java version should be Java 11
update-alternatives --set java java-11-openjdk.x86_64
update-alternatives --set javac java-11-openjdk.x86_64
fi
continue
fi
dnf install -y "$pkg"
done
}
deps=$2"[*]"
"$1" "${!deps}"

View File

@ -1,117 +0,0 @@
#!/bin/bash
set -Eeuo pipefail
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )"
source "$DIR/../util.sh"
check_operating_system "fedora-38"
check_architecture "x86_64"
TOOLCHAIN_BUILD_DEPS=(
coreutils-common gcc gcc-c++ make # generic build tools
wget # used for archive download
gnupg2 # used for archive signature verification
tar gzip bzip2 xz unzip # used for archive unpacking
zlib-devel # zlib library used for all builds
expat-devel xz-devel python3-devel texinfo libbabeltrace-devel # for gdb
curl libcurl-devel # for cmake
readline-devel # for cmake and llvm
libffi-devel libxml2-devel # for llvm
libedit-devel pcre-devel pcre2-devel automake bison # for swig
file
openssl-devel
gmp-devel
gperf
diffutils
libipt libipt-devel # intel
patch
perl # for openssl
git
)
TOOLCHAIN_RUN_DEPS=(
make # generic build tools
tar gzip bzip2 xz # used for archive unpacking
zlib # zlib library used for all builds
expat xz-libs python3 # for gdb
readline # for cmake and llvm
libffi libxml2 # for llvm
openssl-devel
)
MEMGRAPH_BUILD_DEPS=(
git # source code control
make pkgconf-pkg-config # build system
wget # for downloading libs
libuuid-devel java-11-openjdk # required by antlr
readline-devel # for memgraph console
python3-devel # for query modules
openssl-devel
libseccomp-devel
python3 python3-pip python3-virtualenv python3-virtualenvwrapper python3-pyyaml nmap-ncat # for tests
libcurl-devel # mg-requests
rpm-build rpmlint # for RPM package building
doxygen graphviz # source documentation generators
which nodejs golang zip unzip java-11-openjdk-devel # for driver tests
sbcl # for custom Lisp C++ preprocessing
autoconf # for jemalloc code generation
libtool # for protobuf code generation
)
MEMGRAPH_TEST_DEPS="${MEMGRAPH_BUILD_DEPS[*]}"
MEMGRAPH_RUN_DEPS=(
logrotate openssl python3 libseccomp
)
NEW_DEPS=(
wget curl tar gzip
)
list() {
echo "$1"
}
check() {
if [ -v LD_LIBRARY_PATH ]; then
# On Fedora 38 yum/dnf and python11 use newer glibc which is not compatible
# with ours, so we need to momentarely disable env
local OLD_LD_LIBRARY_PATH=${LD_LIBRARY_PATH}
LD_LIBRARY_PATH=""
fi
local missing=""
for pkg in $1; do
if ! dnf list installed "$pkg" >/dev/null 2>/dev/null; then
missing="$pkg $missing"
fi
done
if [ "$missing" != "" ]; then
echo "MISSING PACKAGES: $missing"
exit 1
fi
if [ -v OLD_LD_LIBRARY_PATH ]; then
echo "Restoring LD_LIBRARY_PATH..."
LD_LIBRARY_PATH=${OLD_LD_LIBRARY_PATH}
fi
}
install() {
cd "$DIR"
if [ "$EUID" -ne 0 ]; then
echo "Please run as root."
exit 1
fi
# If GitHub Actions runner is installed, append LANG to the environment.
# Python related tests don't work without the LANG export.
if [ -d "/home/gh/actions-runner" ]; then
echo "LANG=en_US.utf8" >> /home/gh/actions-runner/.env
else
echo "NOTE: export LANG=en_US.utf8"
fi
dnf update -y
for pkg in $1; do
dnf install -y "$pkg"
done
}
deps=$2"[*]"
"$1" "${!deps}"

View File

@ -1,117 +0,0 @@
#!/bin/bash
set -Eeuo pipefail
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )"
source "$DIR/../util.sh"
check_operating_system "fedora-39"
check_architecture "x86_64"
TOOLCHAIN_BUILD_DEPS=(
coreutils-common gcc gcc-c++ make # generic build tools
wget # used for archive download
gnupg2 # used for archive signature verification
tar gzip bzip2 xz unzip # used for archive unpacking
zlib-devel # zlib library used for all builds
expat-devel xz-devel python3-devel texinfo libbabeltrace-devel # for gdb
curl libcurl-devel # for cmake
readline-devel # for cmake and llvm
libffi-devel libxml2-devel # for llvm
libedit-devel pcre-devel pcre2-devel automake bison # for swig
file
openssl-devel
gmp-devel
gperf
diffutils
libipt libipt-devel # intel
patch
perl # for openssl
git
)
TOOLCHAIN_RUN_DEPS=(
make # generic build tools
tar gzip bzip2 xz # used for archive unpacking
zlib # zlib library used for all builds
expat xz-libs python3 # for gdb
readline # for cmake and llvm
libffi libxml2 # for llvm
openssl-devel
)
MEMGRAPH_BUILD_DEPS=(
git # source code control
make pkgconf-pkg-config # build system
wget # for downloading libs
libuuid-devel java-11-openjdk # required by antlr
readline-devel # for memgraph console
python3-devel # for query modules
openssl-devel
libseccomp-devel
python3 python3-pip python3-virtualenv python3-virtualenvwrapper python3-pyyaml nmap-ncat # for tests
libcurl-devel # mg-requests
rpm-build rpmlint # for RPM package building
doxygen graphviz # source documentation generators
which nodejs golang zip unzip java-11-openjdk-devel # for driver tests
sbcl # for custom Lisp C++ preprocessing
autoconf # for jemalloc code generation
libtool # for protobuf code generation
)
MEMGRAPH_TEST_DEPS="${MEMGRAPH_BUILD_DEPS[*]}"
MEMGRAPH_RUN_DEPS=(
logrotate openssl python3 libseccomp
)
NEW_DEPS=(
wget curl tar gzip
)
list() {
echo "$1"
}
check() {
if [ -v LD_LIBRARY_PATH ]; then
# On Fedora 38 yum/dnf and python11 use newer glibc which is not compatible
# with ours, so we need to momentarely disable env
local OLD_LD_LIBRARY_PATH=${LD_LIBRARY_PATH}
LD_LIBRARY_PATH=""
fi
local missing=""
for pkg in $1; do
if ! dnf list installed "$pkg" >/dev/null 2>/dev/null; then
missing="$pkg $missing"
fi
done
if [ "$missing" != "" ]; then
echo "MISSING PACKAGES: $missing"
exit 1
fi
if [ -v OLD_LD_LIBRARY_PATH ]; then
echo "Restoring LD_LIBRARY_PATH..."
LD_LIBRARY_PATH=${OLD_LD_LIBRARY_PATH}
fi
}
install() {
cd "$DIR"
if [ "$EUID" -ne 0 ]; then
echo "Please run as root."
exit 1
fi
# If GitHub Actions runner is installed, append LANG to the environment.
# Python related tests don't work without the LANG export.
if [ -d "/home/gh/actions-runner" ]; then
echo "LANG=en_US.utf8" >> /home/gh/actions-runner/.env
else
echo "NOTE: export LANG=en_US.utf8"
fi
dnf update -y
for pkg in $1; do
dnf install -y "$pkg"
done
}
deps=$2"[*]"
"$1" "${!deps}"

View File

@ -1,212 +0,0 @@
#!/bin/bash
set -Eeuo pipefail
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )"
source "$DIR/../util.sh"
# TODO(gitbuda): Rocky gets automatically updates -> figure out how to handle it.
check_operating_system "rocky-9.3"
check_architecture "x86_64"
TOOLCHAIN_BUILD_DEPS=(
wget # used for archive download
coreutils-common gcc gcc-c++ make # generic build tools
# NOTE: Pure libcurl conflicts with libcurl-minimal
libcurl-devel # cmake build requires it
gnupg2 # used for archive signature verification
tar gzip bzip2 xz unzip # used for archive unpacking
zlib-devel # zlib library used for all builds
expat-devel xz-devel python3-devel perl-Unicode-EastAsianWidth texinfo libbabeltrace-devel # for gdb
readline-devel # for cmake and llvm
libffi-devel libxml2-devel # for llvm
libedit-devel pcre-devel pcre2-devel automake bison # for swig
file
openssl-devel
gmp-devel
gperf
diffutils
libipt libipt-devel # intel
patch
)
TOOLCHAIN_RUN_DEPS=(
make # generic build tools
tar gzip bzip2 xz # used for archive unpacking
zlib # zlib library used for all builds
expat xz-libs python3 # for gdb
readline # for cmake and llvm
libffi libxml2 # for llvm
openssl-devel
perl # for openssl
)
MEMGRAPH_BUILD_DEPS=(
git # source code control
make cmake pkgconf-pkg-config # build system
wget # for downloading libs
libuuid-devel java-11-openjdk # required by antlr
readline-devel # for memgraph console
python3-devel # for query modules
openssl-devel
libseccomp-devel
python3 python3-pip python3-virtualenv nmap-ncat # for qa, macro_benchmark and stress tests
#
# IMPORTANT: python3-yaml does NOT exist on CentOS
# Install it manually using `pip3 install PyYAML`
#
PyYAML # Package name here does not correspond to the yum package!
libcurl-devel # mg-requests
rpm-build rpmlint # for RPM package building
doxygen graphviz # source documentation generators
which nodejs golang custom-golang1.18.9 # for driver tests
zip unzip java-11-openjdk-devel java-17-openjdk java-17-openjdk-devel custom-maven3.9.3 # for driver tests
cl-asdf common-lisp-controller sbcl # for custom Lisp C++ preprocessing
autoconf # for jemalloc code generation
libtool # for protobuf code generation
cyrus-sasl-devel
)
MEMGRAPH_TEST_DEPS="${MEMGRAPH_BUILD_DEPS[*]}"
MEMGRAPH_RUN_DEPS=(
logrotate openssl python3 libseccomp
)
NEW_DEPS=(
wget curl tar gzip
)
list() {
echo "$1"
}
check() {
local missing=""
for pkg in $1; do
if [ "$pkg" == custom-maven3.9.3 ]; then
if [ ! -f "/opt/apache-maven-3.9.3/bin/mvn" ]; then
missing="$pkg $missing"
fi
continue
fi
if [ "$pkg" == custom-golang1.18.9 ]; then
if [ ! -f "/opt/go1.18.9/go/bin/go" ]; then
missing="$pkg $missing"
fi
continue
fi
if [ "$pkg" == "PyYAML" ]; then
if ! python3 -c "import yaml" >/dev/null 2>/dev/null; then
missing="$pkg $missing"
fi
continue
fi
if [ "$pkg" == "python3-virtualenv" ]; then
continue
fi
if ! yum list installed "$pkg" >/dev/null 2>/dev/null; then
missing="$pkg $missing"
fi
done
if [ "$missing" != "" ]; then
echo "MISSING PACKAGES: $missing"
exit 1
fi
}
install() {
cd "$DIR"
if [ "$EUID" -ne 0 ]; then
echo "Please run as root."
exit 1
fi
# If GitHub Actions runner is installed, append LANG to the environment.
# Python related tests doesn't work the LANG export.
if [ -d "/home/gh/actions-runner" ]; then
echo "LANG=en_US.utf8" >> /home/gh/actions-runner/.env
else
echo "NOTE: export LANG=en_US.utf8"
fi
yum update -y
yum install -y wget git python3 python3-pip
for pkg in $1; do
if [ "$pkg" == custom-maven3.9.3 ]; then
install_custom_maven "3.9.3"
continue
fi
if [ "$pkg" == custom-golang1.18.9 ]; then
install_custom_golang "1.18.9"
continue
fi
if [ "$pkg" == perl-Unicode-EastAsianWidth ]; then
if ! dnf list installed perl-Unicode-EastAsianWidth >/dev/null 2>/dev/null; then
dnf install -y https://dl.rockylinux.org/pub/rocky/9/CRB/x86_64/os/Packages/p/perl-Unicode-EastAsianWidth-12.0-7.el9.noarch.rpm
fi
continue
fi
if [ "$pkg" == texinfo ]; then
if ! dnf list installed texinfo >/dev/null 2>/dev/null; then
dnf install -y https://dl.rockylinux.org/pub/rocky/9/CRB/x86_64/os/Packages/t/texinfo-6.7-15.el9.x86_64.rpm
fi
continue
fi
if [ "$pkg" == libbabeltrace-devel ]; then
if ! dnf list installed libbabeltrace-devel >/dev/null 2>/dev/null; then
dnf install -y https://dl.rockylinux.org/pub/rocky/9/devel/x86_64/os/Packages/l/libbabeltrace-devel-1.5.8-10.el9.x86_64.rpm
fi
continue
fi
if [ "$pkg" == libipt-devel ]; then
if ! dnf list installed libipt-devel >/dev/null 2>/dev/null; then
dnf install -y https://dl.rockylinux.org/pub/rocky/9/devel/x86_64/os/Packages/l/libipt-devel-2.0.4-5.el9.x86_64.rpm
fi
continue
fi
if [ "$pkg" == doxygen ]; then
if ! dnf list installed doxygen >/dev/null 2>/dev/null; then
dnf install -y https://dl.rockylinux.org/pub/rocky/9/CRB/x86_64/os/Packages/d/doxygen-1.9.1-11.el9.x86_64.rpm
fi
continue
fi
if [ "$pkg" == cl-asdf ]; then
if ! dnf list installed cl-asdf >/dev/null 2>/dev/null; then
dnf install -y https://pkgs.sysadmins.ws/el8/base/x86_64/cl-asdf-20101028-18.el8.noarch.rpm
fi
continue
fi
if [ "$pkg" == common-lisp-controller ]; then
if ! dnf list installed common-lisp-controller >/dev/null 2>/dev/null; then
dnf install -y https://pkgs.sysadmins.ws/el8/base/x86_64/common-lisp-controller-7.4-20.el8.noarch.rpm
fi
continue
fi
if [ "$pkg" == sbcl ]; then
if ! dnf list installed sbcl >/dev/null 2>/dev/null; then
dnf install -y https://pkgs.sysadmins.ws/el8/base/x86_64/sbcl-2.0.1-4.el8.x86_64.rpm
fi
continue
fi
if [ "$pkg" == PyYAML ]; then
if [ -z ${SUDO_USER+x} ]; then # Running as root (e.g. Docker).
pip3 install --user PyYAML
else # Running using sudo.
sudo -H -u "$SUDO_USER" bash -c "pip3 install --user PyYAML"
fi
continue
fi
if [ "$pkg" == python3-virtualenv ]; then
if [ -z ${SUDO_USER+x} ]; then # Running as root (e.g. Docker).
pip3 install virtualenv
pip3 install virtualenvwrapper
else # Running using sudo.
sudo -H -u "$SUDO_USER" bash -c "pip3 install virtualenv"
sudo -H -u "$SUDO_USER" bash -c "pip3 install virtualenvwrapper"
fi
continue
fi
yum install -y "$pkg"
done
}
deps=$2"[*]"
"$1" "${!deps}"

View File

@ -1,158 +0,0 @@
#!/bin/bash
set -Eeuo pipefail
SCRIPT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )"
IFS=' '
# NOTE: docker_image_name could be local image build based on release/package images.
# NOTE: each line has to be under quotes, docker_container_type, script_name and docker_image_name separate with a space.
# "docker_container_type script_name docker_image_name"
# docker_container_type OPTIONS:
# * mgrun -> running plain/empty operating system for the purposes of testing native memgraph package
# * mgbuild -> running the builder container to build memgraph inside it -> it's possible create builder images using release/package/run.sh
OPERATING_SYSTEMS=(
# "mgrun amzn-2 amazonlinux:2"
# "mgrun centos-7 centos:7"
# "mgrun centos-9 dokken/centos-stream-9"
# "mgrun debian-10 debian:10"
# "mgrun debian-11 debian:11"
# "mgrun fedora-36 fedora:36"
# "mgrun ubuntu-18.04 ubuntu:18.04"
# "mgrun ubuntu-20.04 ubuntu:20.04"
# "mgrun ubuntu-22.04 ubuntu:22.04"
# "mgbuild debian-12 memgraph/memgraph-builder:v5_debian-12"
)
if [ ! "$(docker info)" ]; then
echo "ERROR: Docker is required"
exit 1
fi
print_help () {
echo -e "$0 all\t\t\t\t => start + init all containers in the background"
echo -e "$0 check\t\t\t\t => check all containers"
echo -e "$0 delete\t\t\t\t => stop + remove all containers"
echo -e "$0 copy src_container dst_container => copy build package from src to dst container"
exit 1
}
# NOTE: This is an idempotent operation!
# TODO(gitbuda): Consider making docker_run always delete + start a new container or add a new function.
docker_run () {
cnt_type="$1"
if [[ "$cnt_type" != "mgbuild" && "$cnt_type" != "mgrun" ]]; then
echo "ERROR: Wrong docker_container_type -> valid options are mgbuild, mgrun"
exit 1
fi
cnt_name="$2"
cnt_image="$3"
if [ ! "$(docker ps -q -f name=$cnt_name)" ]; then
if [ "$(docker ps -aq -f status=exited -f name=$cnt_name)" ]; then
echo "Cleanup of the old exited container..."
docker rm $cnt_name
fi
if [[ "$cnt_type" == "mgbuild" ]]; then
docker run -d --volume "$SCRIPT_DIR/../../:/memgraph" --network host --name "$cnt_name" "$cnt_image"
fi
if [[ "$cnt_type" == "mgrun" ]]; then
docker run -d --volume "$SCRIPT_DIR/../../:/memgraph" --network host --name "$cnt_name" "$cnt_image" sleep infinity
fi
fi
echo "The $cnt_image container is active under $cnt_name name!"
}
docker_exec () {
cnt_name="$1"
cnt_cmd="$2"
docker exec -it "$cnt_name" bash -c "$cnt_cmd"
}
docker_stop_and_rm () {
cnt_name="$1"
if [ "$(docker ps -q -f name=$cnt_name)" ]; then
docker stop "$1"
fi
if [ "$(docker ps -aq -f status=exited -f name=$cnt_name)" ]; then
docker rm "$1"
fi
}
# TODO(gitbuda): Make the call to `install NEW_DEPS` configurable, the question what else is useful?
start_all () {
for script_docker_pair in "${OPERATING_SYSTEMS[@]}"; do
read -a script_docker <<< "$script_docker_pair"
docker_container_type="${script_docker[0]}"
script_name="${script_docker[1]}"
docker_image="${script_docker[2]}"
docker_name="${docker_container_type}_$script_name"
echo ""
echo "~~~~ OPERATING ON $docker_image as $docker_name..."
docker_run "$docker_container_type" "$docker_name" "$docker_image"
docker_exec "$docker_name" "/memgraph/environment/os/$script_name.sh install NEW_DEPS"
echo "---- DONE EVERYHING FOR $docker_image as $docker_name..."
echo ""
done
}
check_all () {
for script_docker_pair in "${OPERATING_SYSTEMS[@]}"; do
read -a script_docker <<< "$script_docker_pair"
docker_container_type="${script_docker[0]}"
script_name="${script_docker[1]}"
docker_image="${script_docker[2]}"
docker_name="${docker_container_type}_$script_name"
echo ""
echo "~~~~ OPERATING ON $docker_image as $docker_name..."
docker_exec "$docker_name" "/memgraph/environment/os/$script_name.sh check NEW_DEPS"
echo "---- DONE EVERYHING FOR $docker_image as $docker_name..."
echo ""
done
}
delete_all () {
for script_docker_pair in "${OPERATING_SYSTEMS[@]}"; do
read -a script_docker <<< "$script_docker_pair"
docker_container_type="${script_docker[0]}"
script_name="${script_docker[1]}"
docker_image="${script_docker[2]}"
docker_name="${docker_container_type}_$script_name"
docker_stop_and_rm "$docker_name"
echo "~~~~ $docker_image as $docker_name DELETED"
done
}
# TODO(gitbuda): Copy file between containers is a useful util, also delete, + consider copying of a whole folder.
# TODO(gitbuda): Add args: src_cnt dst_cnt abs_path; both file and recursive folder, always delete + copy.
copy_build_package () {
src_container="$1"
dst_container="$2"
src="$src_container:/memgraph/build/output"
tmp_dst="$SCRIPT_DIR/../../build"
mkdir -p "$tmp_dst"
rm -rf "$tmp_dst/output"
dst="$dst_container:/"
docker cp "$src" "$tmp_dst"
docker cp "$tmp_dst/output" "$dst"
}
if [ "$#" -eq 0 ]; then
print_help
else
case $1 in
all)
start_all
;;
check)
check_all
;;
delete)
delete_all
;;
copy) # src_container dst_container
if [ "$#" -ne 3 ]; then
print_help
fi
copy_build_package "$2" "$3"
;;
*)
print_help
;;
esac
fi

View File

@ -1,10 +1,11 @@
#!/bin/bash
set -Eeuo pipefail
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )"
source "$DIR/../util.sh"
check_operating_system "todo-os-name"
check_architecture "todo-arch-name"
TOOLCHAIN_BUILD_DEPS=(
pkg
@ -18,20 +19,6 @@ MEMGRAPH_BUILD_DEPS=(
pkg
)
MEMGRAPH_TEST_DEPS=(
pkg
)
MEMGRAPH_RUN_DEPS=(
pkg
)
# NEW_DEPS is useful when you won't to test the installation of a new package.
# During the test you can put here packages like wget curl tar gzip
NEW_DEPS=(
pkg
)
list() {
echo "$1"
}

View File

@ -1,10 +1,10 @@
#!/bin/bash
set -Eeuo pipefail
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )"
source "$DIR/../util.sh"
# IMPORTANT: Deprecated since memgraph v2.12.0.
check_operating_system "ubuntu-18.04"
check_architecture "x86_64"
@ -25,7 +25,7 @@ TOOLCHAIN_BUILD_DEPS=(
libgmp-dev # for gdb
gperf # for proxygen
libssl-dev
libedit-dev libpcre2-dev libpcre3-dev automake bison # swig
libedit-dev libpcre3-dev automake bison # swig
)
TOOLCHAIN_RUN_DEPS=(
@ -41,7 +41,7 @@ TOOLCHAIN_RUN_DEPS=(
MEMGRAPH_BUILD_DEPS=(
git # source code control
make cmake pkg-config # build system
make pkg-config # build system
curl wget # downloading libs
uuid-dev default-jre-headless # required by antlr
libreadline-dev # memgraph console
@ -53,19 +53,9 @@ MEMGRAPH_BUILD_DEPS=(
libcurl4-openssl-dev # mg-requests
sbcl # custom Lisp C++ preprocessing
doxygen graphviz # source documentation generators
mono-runtime mono-mcs nodejs zip unzip default-jdk-headless openjdk-17-jdk-headless custom-maven3.9.3 # driver tests
custom-golang1.18.9 # for driver tests
mono-runtime mono-mcs nodejs zip unzip default-jdk-headless # driver tests
autoconf # for jemalloc code generation
libtool # for protobuf code generation
libsasl2-dev
)
MEMGRAPH_RUN_DEPS=(
logrotate openssl python3 libseccomp2
)
NEW_DEPS=(
wget curl tar gzip
)
list() {
@ -73,53 +63,11 @@ list() {
}
check() {
local missing=""
for pkg in $1; do
if [ "$pkg" == custom-maven3.9.3 ]; then
if [ ! -f "/opt/apache-maven-3.9.3/bin/mvn" ]; then
missing="$pkg $missing"
fi
continue
fi
if [ "$pkg" == custom-golang1.18.9 ]; then
if [ ! -f "/opt/go1.18.9/go/bin/go" ]; then
missing="$pkg $missing"
fi
continue
fi
if ! dpkg -s "$pkg" >/dev/null 2>/dev/null; then
missing="$pkg $missing"
fi
done
if [ "$missing" != "" ]; then
echo "MISSING PACKAGES: $missing"
exit 1
fi
check_all_dpkg "$1"
}
install() {
apt update -y
for pkg in $1; do
if [ "$pkg" == custom-maven3.9.3 ]; then
install_custom_maven "3.9.3"
continue
fi
if [ "$pkg" == custom-golang1.18.9 ]; then
install_custom_golang "1.18.9"
continue
fi
if [ "$pkg" == openjdk-17-jdk-headless ]; then
if ! dpkg -s "$pkg" 2>/dev/null >/dev/null; then
apt install -y "$pkg"
# The default Java version should be Java 11
update-alternatives --set java /usr/lib/jvm/java-11-openjdk-amd64/bin/java
update-alternatives --set javac /usr/lib/jvm/java-11-openjdk-amd64/bin/javac
fi
continue
fi
apt install -y "$pkg"
done
apt install -y $1
}
deps=$2"[*]"

View File

@ -1,5 +1,7 @@
#!/bin/bash
set -Eeuo pipefail
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )"
source "$DIR/../util.sh"
@ -22,7 +24,7 @@ TOOLCHAIN_BUILD_DEPS=(
libgmp-dev # for gdb
gperf # for proxygen
libssl-dev
libedit-dev libpcre2-dev libpcre3-dev automake bison # for swig
libedit-dev libpcre3-dev automake bison # for swig
)
TOOLCHAIN_RUN_DEPS=(
@ -38,7 +40,7 @@ TOOLCHAIN_RUN_DEPS=(
MEMGRAPH_BUILD_DEPS=(
git # source code control
make cmake pkg-config # build system
make pkg-config # build system
curl wget # for downloading libs
uuid-dev default-jre-headless # required by antlr
libreadline-dev # for memgraph console
@ -51,21 +53,10 @@ MEMGRAPH_BUILD_DEPS=(
libcurl4-openssl-dev # mg-requests
sbcl # for custom Lisp C++ preprocessing
doxygen graphviz # source documentation generators
mono-runtime mono-mcs zip unzip default-jdk-headless openjdk-17-jdk-headless custom-maven3.9.3 # for driver tests
dotnet-sdk-3.1 golang custom-golang1.18.9 nodejs npm # for driver tests
mono-runtime mono-mcs zip unzip default-jdk-headless # for driver tests
dotnet-sdk-3.1 golang nodejs npm
autoconf # for jemalloc code generation
libtool # for protobuf code generation
libsasl2-dev
)
MEMGRAPH_TEST_DEPS="${MEMGRAPH_BUILD_DEPS[*]}"
MEMGRAPH_RUN_DEPS=(
logrotate openssl python3 libseccomp2
)
NEW_DEPS=(
wget curl tar gzip
)
list() {
@ -73,35 +64,12 @@ list() {
}
check() {
local missing=""
for pkg in $1; do
if [ "$pkg" == custom-maven3.9.3 ]; then
if [ ! -f "/opt/apache-maven-3.9.3/bin/mvn" ]; then
missing="$pkg $missing"
fi
continue
fi
if [ "$pkg" == custom-golang1.18.9 ]; then
if [ ! -f "/opt/go1.18.9/go/bin/go" ]; then
missing="$pkg $missing"
fi
continue
fi
if ! dpkg -s "$pkg" >/dev/null 2>/dev/null; then
missing="$pkg $missing"
fi
done
if [ "$missing" != "" ]; then
echo "MISSING PACKAGES: $missing"
exit 1
fi
check_all_dpkg "$1"
}
install() {
cd "$DIR"
export DEBIAN_FRONTEND=noninteractive
apt update -y
apt install -y wget
apt update
# If GitHub Actions runner is installed, append LANG to the environment.
# Python related tests doesn't work the LANG export.
if [ -d "/home/gh/actions-runner" ]; then
@ -109,16 +77,8 @@ install() {
else
echo "NOTE: export LANG=en_US.utf8"
fi
apt install -y wget
for pkg in $1; do
if [ "$pkg" == custom-maven3.9.3 ]; then
install_custom_maven "3.9.3"
continue
fi
if [ "$pkg" == custom-golang1.18.9 ]; then
install_custom_golang "1.18.9"
continue
fi
if [ "$pkg" == dotnet-sdk-3.1 ]; then
if ! dpkg -s dotnet-sdk-3.1 2>/dev/null >/dev/null; then
wget -nv https://packages.microsoft.com/config/ubuntu/20.04/packages-microsoft-prod.deb -O packages-microsoft-prod.deb
@ -128,15 +88,6 @@ install() {
fi
continue
fi
if [ "$pkg" == openjdk-17-jdk-headless ]; then
if ! dpkg -s "$pkg" 2>/dev/null >/dev/null; then
apt install -y "$pkg"
# The default Java version should be Java 11
update-alternatives --set java /usr/lib/jvm/java-11-openjdk-amd64/bin/java
update-alternatives --set javac /usr/lib/jvm/java-11-openjdk-amd64/bin/javac
fi
continue
fi
apt install -y "$pkg"
done
}

View File

@ -1,144 +0,0 @@
#!/bin/bash
set -Eeuo pipefail
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )"
source "$DIR/../util.sh"
check_operating_system "ubuntu-22.04"
check_architecture "arm64" "aarch64"
TOOLCHAIN_BUILD_DEPS=(
coreutils gcc g++ build-essential make # generic build tools
wget # used for archive download
gnupg # used for archive signature verification
tar gzip bzip2 xz-utils unzip # used for archive unpacking
zlib1g-dev # zlib library used for all builds
libexpat1-dev libbabeltrace-dev liblzma-dev python3-dev texinfo # for gdb
libcurl4-openssl-dev # for cmake
libreadline-dev # for cmake and llvm
libffi-dev libxml2-dev # for llvm
curl # snappy
file
git # for thrift
libgmp-dev # for gdb
gperf # for proxygen
libssl-dev
libedit-dev libpcre2-dev libpcre3-dev automake bison # for swig
)
TOOLCHAIN_RUN_DEPS=(
make # generic build tools
tar gzip bzip2 xz-utils # used for archive unpacking
zlib1g # zlib library used for all builds
libexpat1 libbabeltrace1 liblzma5 python3 # for gdb
libcurl4 # for cmake
libreadline8 # for cmake and llvm
libffi7 libxml2 # for llvm
libssl-dev # for libevent
)
MEMGRAPH_BUILD_DEPS=(
git # source code control
make cmake pkg-config # build system
curl wget # for downloading libs
uuid-dev default-jre-headless # required by antlr
libreadline-dev # for memgraph console
libpython3-dev python3-dev # for query modules
libssl-dev
libseccomp-dev
netcat # tests are using nc to wait for memgraph
python3 python3-virtualenv python3-pip # for qa, macro_benchmark and stress tests
python3-yaml # for the configuration generator
libcurl4-openssl-dev # mg-requests
sbcl # for custom Lisp C++ preprocessing
doxygen graphviz # source documentation generators
mono-runtime mono-mcs zip unzip default-jdk-headless openjdk-17-jdk-headless custom-maven3.9.3 # for driver tests
dotnet-sdk-6.0 golang custom-golang1.18.9 nodejs npm
autoconf # for jemalloc code generation
libtool # for protobuf code generation
libsasl2-dev
)
MEMGRAPH_TEST_DEPS="${MEMGRAPH_BUILD_DEPS[*]}"
MEMGRAPH_RUN_DEPS=(
logrotate openssl python3 libseccomp2
)
NEW_DEPS=(
wget curl tar gzip
)
list() {
echo "$1"
}
check() {
local missing=""
for pkg in $1; do
if [ "$pkg" == custom-maven3.9.3 ]; then
if [ ! -f "/opt/apache-maven-3.9.3/bin/mvn" ]; then
missing="$pkg $missing"
fi
continue
fi
if [ "$pkg" == custom-golang1.18.9 ]; then
if [ ! -f "/opt/go1.18.9/go/bin/go" ]; then
missing="$pkg $missing"
fi
continue
fi
if ! dpkg -s "$pkg" >/dev/null 2>/dev/null; then
missing="$pkg $missing"
fi
done
if [ "$missing" != "" ]; then
echo "MISSING PACKAGES: $missing"
exit 1
fi
}
install() {
cd "$DIR"
apt update
# If GitHub Actions runner is installed, append LANG to the environment.
# Python related tests doesn't work the LANG export.
if [ -d "/home/gh/actions-runner" ]; then
echo "LANG=en_US.utf8" >> /home/gh/actions-runner/.env
else
echo "NOTE: export LANG=en_US.utf8"
fi
apt install -y wget
for pkg in $1; do
if [ "$pkg" == custom-maven3.9.3 ]; then
install_custom_maven "3.9.3"
continue
fi
if [ "$pkg" == custom-golang1.18.9 ]; then
install_custom_golang "1.18.9"
continue
fi
if [ "$pkg" == dotnet-sdk-6.0 ]; then
if ! dpkg -s dotnet-sdk-6.0 2>/dev/null >/dev/null; then
wget -nv https://packages.microsoft.com/config/ubuntu/22.04/packages-microsoft-prod.deb -O packages-microsoft-prod.deb
dpkg -i packages-microsoft-prod.deb
apt-get update
apt-get install -y apt-transport-https dotnet-sdk-6.0
fi
continue
fi
if [ "$pkg" == openjdk-17-jdk-headless ]; then
if ! dpkg -s "$pkg" 2>/dev/null >/dev/null; then
apt install -y "$pkg"
# The default Java version should be Java 11
update-alternatives --set java /usr/lib/jvm/java-11-openjdk-arm64/bin/java
update-alternatives --set javac /usr/lib/jvm/java-11-openjdk-arm64/bin/javac
fi
continue
fi
apt install -y "$pkg"
done
}
deps=$2"[*]"
"$1" "${!deps}"

View File

@ -1,5 +1,7 @@
#!/bin/bash
set -Eeuo pipefail
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )"
source "$DIR/../util.sh"
@ -22,7 +24,7 @@ TOOLCHAIN_BUILD_DEPS=(
libgmp-dev # for gdb
gperf # for proxygen
libssl-dev
libedit-dev libpcre2-dev libpcre3-dev automake bison # for swig
libedit-dev libpcre3-dev automake bison # for swig
)
TOOLCHAIN_RUN_DEPS=(
@ -38,7 +40,7 @@ TOOLCHAIN_RUN_DEPS=(
MEMGRAPH_BUILD_DEPS=(
git # source code control
make cmake pkg-config # build system
make pkg-config # build system
curl wget # for downloading libs
uuid-dev default-jre-headless # required by antlr
libreadline-dev # for memgraph console
@ -51,21 +53,10 @@ MEMGRAPH_BUILD_DEPS=(
libcurl4-openssl-dev # mg-requests
sbcl # for custom Lisp C++ preprocessing
doxygen graphviz # source documentation generators
mono-runtime mono-mcs zip unzip default-jdk-headless openjdk-17-jdk-headless custom-maven3.9.3 # for driver tests
dotnet-sdk-6.0 golang custom-golang1.18.9 nodejs npm # for driver tests
mono-runtime mono-mcs zip unzip default-jdk-headless # for driver tests
dotnet-sdk-6.0 golang nodejs npm
autoconf # for jemalloc code generation
libtool # for protobuf code generation
libsasl2-dev
)
MEMGRAPH_TEST_DEPS="${MEMGRAPH_BUILD_DEPS[*]}"
MEMGRAPH_RUN_DEPS=(
logrotate openssl python3 libseccomp2
)
NEW_DEPS=(
wget curl tar gzip
)
list() {
@ -73,34 +64,12 @@ list() {
}
check() {
local missing=""
for pkg in $1; do
if [ "$pkg" == custom-maven3.9.3 ]; then
if [ ! -f "/opt/apache-maven-3.9.3/bin/mvn" ]; then
missing="$pkg $missing"
fi
continue
fi
if [ "$pkg" == custom-golang1.18.9 ]; then
if [ ! -f "/opt/go1.18.9/go/bin/go" ]; then
missing="$pkg $missing"
fi
continue
fi
if ! dpkg -s "$pkg" >/dev/null 2>/dev/null; then
missing="$pkg $missing"
fi
done
if [ "$missing" != "" ]; then
echo "MISSING PACKAGES: $missing"
exit 1
fi
check_all_dpkg "$1"
}
install() {
cd "$DIR"
apt update -y
apt install -y wget
apt update
# If GitHub Actions runner is installed, append LANG to the environment.
# Python related tests doesn't work the LANG export.
if [ -d "/home/gh/actions-runner" ]; then
@ -108,16 +77,8 @@ install() {
else
echo "NOTE: export LANG=en_US.utf8"
fi
apt install -y wget
for pkg in $1; do
if [ "$pkg" == custom-maven3.9.3 ]; then
install_custom_maven "3.9.3"
continue
fi
if [ "$pkg" == custom-golang1.18.9 ]; then
install_custom_golang "1.18.9"
continue
fi
if [ "$pkg" == dotnet-sdk-6.0 ]; then
if ! dpkg -s dotnet-sdk-6.0 2>/dev/null >/dev/null; then
wget -nv https://packages.microsoft.com/config/ubuntu/22.04/packages-microsoft-prod.deb -O packages-microsoft-prod.deb
@ -127,15 +88,6 @@ install() {
fi
continue
fi
if [ "$pkg" == openjdk-17-jdk-headless ]; then
if ! dpkg -s "$pkg" 2>/dev/null >/dev/null; then
apt install -y "$pkg"
# The default Java version should be Java 11
update-alternatives --set java /usr/lib/jvm/java-11-openjdk-amd64/bin/java
update-alternatives --set javac /usr/lib/jvm/java-11-openjdk-amd64/bin/javac
fi
continue
fi
apt install -y "$pkg"
done
}

View File

@ -1,5 +0,0 @@
archives
build
output
*.tar.gz
tmp_build.sh

View File

@ -4,7 +4,7 @@ diff -ur a/CMakeLists.txt b/CMakeLists.txt
@@ -52,9 +52,9 @@
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} /EHs-c-")
add_definitions(-D_HAS_EXCEPTIONS=0)
- # Disable RTTI.
- string(REGEX REPLACE "/GR" "" CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS}")
- set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} /GR-")
@ -17,7 +17,7 @@ diff -ur a/CMakeLists.txt b/CMakeLists.txt
@@ -77,9 +77,9 @@
string(REGEX REPLACE "-fexceptions" "" CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS}")
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -fno-exceptions")
- # Disable RTTI.
- string(REGEX REPLACE "-frtti" "" CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS}")
- set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -fno-rtti")
@ -25,5 +25,5 @@ diff -ur a/CMakeLists.txt b/CMakeLists.txt
+ # string(REGEX REPLACE "-frtti" "" CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS}")
+ # set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -fno-rtti")
endif(CMAKE_CXX_COMPILER_ID STREQUAL "MSVC")
# BUILD_SHARED_LIBS is a standard CMake variable, but we declare it here to make

View File

@ -1,48 +0,0 @@
#!/bin/bash -e
# NOTE: Copy this under memgraph/environment/toolchain/vN/tmp_build.sh, edit and test.
pushd () { command pushd "$@" > /dev/null; }
popd () { command popd "$@" > /dev/null; }
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )"
CPUS=$( grep -c processor < /proc/cpuinfo )
cd "$DIR"
source "$DIR/../../util.sh"
DISTRO="$(operating_system)"
TOOLCHAIN_VERSION=5
NAME=toolchain-v$TOOLCHAIN_VERSION
PREFIX=/opt/$NAME
function log_tool_name () {
echo ""
echo ""
echo "#### $1 ####"
echo ""
echo ""
}
# HERE: Remove/clear dependencies from a given toolchain.
mkdir -p archives && pushd archives
# HERE: Download dependencies here.
popd
mkdir -p build
pushd build
source $PREFIX/activate
export CC=$PREFIX/bin/clang
export CXX=$PREFIX/bin/clang++
export CFLAGS="$CFLAGS -fPIC"
export PATH=$PREFIX/bin:$PATH
export LD_LIBRARY_PATH=$PREFIX/lib64
COMMON_CMAKE_FLAGS="-DCMAKE_INSTALL_PREFIX=$PREFIX
-DCMAKE_PREFIX_PATH=$PREFIX
-DCMAKE_BUILD_TYPE=Release
-DCMAKE_C_COMPILER=$CC
-DCMAKE_CXX_COMPILER=$CXX
-DBUILD_SHARED_LIBS=OFF
-DCMAKE_CXX_STANDARD=20
-DBUILD_TESTING=OFF
-DCMAKE_REQUIRED_INCLUDES=$PREFIX/include
-DCMAKE_POSITION_INDEPENDENT_CODE=ON"
# HERE: Add dependencies to test below.

View File

@ -7,7 +7,7 @@ DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )"
CPUS=$( grep -c processor < /proc/cpuinfo )
cd "$DIR"
source "$DIR/../../util.sh"
source "$DIR/../util.sh"
DISTRO="$(operating_system)"
# toolchain version
@ -30,10 +30,10 @@ LLVM_VERSION=11.0.0
SWIG_VERSION=4.0.2 # used only for LLVM compilation
# Check for the dependencies.
echo "ALL BUILD PACKAGES: $($DIR/../../os/$DISTRO.sh list TOOLCHAIN_BUILD_DEPS)"
$DIR/../../os/$DISTRO.sh check TOOLCHAIN_BUILD_DEPS
echo "ALL RUN PACKAGES: $($DIR/../../os/$DISTRO.sh list TOOLCHAIN_RUN_DEPS)"
$DIR/../../os/$DISTRO.sh check TOOLCHAIN_RUN_DEPS
echo "ALL BUILD PACKAGES: $($DIR/../os/$DISTRO.sh list TOOLCHAIN_BUILD_DEPS)"
$DIR/../os/$DISTRO.sh check TOOLCHAIN_BUILD_DEPS
echo "ALL RUN PACKAGES: $($DIR/../os/$DISTRO.sh list TOOLCHAIN_RUN_DEPS)"
$DIR/../os/$DISTRO.sh check TOOLCHAIN_RUN_DEPS
# check installation directory
NAME=toolchain-v$TOOLCHAIN_VERSION
@ -442,7 +442,7 @@ In order to be able to run all of these tools you should install the following
packages:
\`\`\`
$($DIR/../../os/$DISTRO.sh list TOOLCHAIN_RUN_DEPS)
$($DIR/../os/$DISTRO.sh list TOOLCHAIN_RUN_DEPS)
\`\`\`
## Usage

View File

@ -7,7 +7,7 @@ DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )"
CPUS=$( grep -c processor < /proc/cpuinfo )
cd "$DIR"
source "$DIR/../../util.sh"
source "$DIR/../util.sh"
DISTRO="$(operating_system)"
# toolchain version
@ -31,10 +31,10 @@ LLVM_VERSION_LONG=12.0.1-rc4
SWIG_VERSION=4.0.2 # used only for LLVM compilation
# Check for the dependencies.
echo "ALL BUILD PACKAGES: $($DIR/../../os/$DISTRO.sh list TOOLCHAIN_BUILD_DEPS)"
$DIR/../../os/$DISTRO.sh check TOOLCHAIN_BUILD_DEPS
echo "ALL RUN PACKAGES: $($DIR/../../os/$DISTRO.sh list TOOLCHAIN_RUN_DEPS)"
$DIR/../../os/$DISTRO.sh check TOOLCHAIN_RUN_DEPS
echo "ALL BUILD PACKAGES: $($DIR/../os/$DISTRO.sh list TOOLCHAIN_BUILD_DEPS)"
$DIR/../os/$DISTRO.sh check TOOLCHAIN_BUILD_DEPS
echo "ALL RUN PACKAGES: $($DIR/../os/$DISTRO.sh list TOOLCHAIN_RUN_DEPS)"
$DIR/../os/$DISTRO.sh check TOOLCHAIN_RUN_DEPS
# check installation directory
NAME=toolchain-v$TOOLCHAIN_VERSION
@ -452,7 +452,7 @@ In order to be able to run all of these tools you should install the following
packages:
\`\`\`
$($DIR/../../os/$DISTRO.sh list TOOLCHAIN_RUN_DEPS)
$($DIR/../os/$DISTRO.sh list TOOLCHAIN_RUN_DEPS)
\`\`\`
## Usage

View File

@ -7,17 +7,9 @@ DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )"
CPUS=$( grep -c processor < /proc/cpuinfo )
cd "$DIR"
source "$DIR/../../util.sh"
source "$DIR/../util.sh"
DISTRO="$(operating_system)"
function log_tool_name () {
echo ""
echo ""
echo "#### $1 ####"
echo ""
echo ""
}
for_arm=false
if [[ "$#" -eq 1 ]]; then
if [[ "$1" == "--for-arm" ]]; then
@ -28,11 +20,9 @@ if [[ "$#" -eq 1 ]]; then
fi
fi
TOOLCHAIN_STDCXX="${TOOLCHAIN_STDCXX:-libstdc++}"
if [[ "$TOOLCHAIN_STDCXX" != "libstdc++" && "$TOOLCHAIN_STDCXX" != "libc++" ]]; then
echo "Only GCC (libstdc++) or LLVM (libc++) C++ standard library implementations are supported."
exit 1
fi
os="$1"
# toolchain version
TOOLCHAIN_VERSION=4
# package versions used
@ -51,15 +41,11 @@ CPPCHECK_VERSION=2.6
LLVM_VERSION=13.0.0
SWIG_VERSION=4.0.2 # used only for LLVM compilation
# Set the right operating system setup script.
ENV_SCRIPT="$DIR/../../os/$DISTRO.sh"
if [[ "$for_arm" = true ]]; then
ENV_SCRIPT="$DIR/../../os/$DISTRO-arm.sh"
fi
echo "ALL BUILD PACKAGES: $(${ENV_SCRIPT} list TOOLCHAIN_BUILD_DEPS)"
${ENV_SCRIPT} check TOOLCHAIN_BUILD_DEPS
echo "ALL RUN PACKAGES: $(${ENV_SCRIPT} list TOOLCHAIN_RUN_DEPS)"
${ENV_SCRIPT} check TOOLCHAIN_RUN_DEPS
# Check for the dependencies.
echo "ALL BUILD PACKAGES: $($DIR/../os/$DISTRO.sh list TOOLCHAIN_BUILD_DEPS)"
$DIR/../os/$DISTRO.sh check TOOLCHAIN_BUILD_DEPS
echo "ALL RUN PACKAGES: $($DIR/../os/$DISTRO.sh list TOOLCHAIN_RUN_DEPS)"
$DIR/../os/$DISTRO.sh check TOOLCHAIN_RUN_DEPS
# check installation directory
NAME=toolchain-v$TOOLCHAIN_VERSION
@ -113,8 +99,6 @@ if [ ! -f llvm-$LLVM_VERSION.src.tar.xz ]; then
wget https://github.com/llvm/llvm-project/releases/download/llvmorg-$LLVM_VERSION/clang-tools-extra-$LLVM_VERSION.src.tar.xz
wget https://github.com/llvm/llvm-project/releases/download/llvmorg-$LLVM_VERSION/compiler-rt-$LLVM_VERSION.src.tar.xz
wget https://github.com/llvm/llvm-project/releases/download/llvmorg-$LLVM_VERSION/libunwind-$LLVM_VERSION.src.tar.xz
wget https://github.com/llvm/llvm-project/releases/download/llvmorg-$LLVM_VERSION/libcxx-$LLVM_VERSION.src.tar.xz
wget https://github.com/llvm/llvm-project/releases/download/llvmorg-$LLVM_VERSION/libcxxabi-$LLVM_VERSION.src.tar.xz
fi
if [ ! -f pahole-gdb-master.zip ]; then
wget https://github.com/PhilArmstrong/pahole-gdb/archive/master.zip -O pahole-gdb-master.zip
@ -172,8 +156,6 @@ if [ ! -f llvm-$LLVM_VERSION.src.tar.xz.sig ]; then
wget https://github.com/llvm/llvm-project/releases/download/llvmorg-$LLVM_VERSION/clang-tools-extra-$LLVM_VERSION.src.tar.xz.sig
wget https://github.com/llvm/llvm-project/releases/download/llvmorg-$LLVM_VERSION/compiler-rt-$LLVM_VERSION.src.tar.xz.sig
wget https://github.com/llvm/llvm-project/releases/download/llvmorg-$LLVM_VERSION/libunwind-$LLVM_VERSION.src.tar.xz.sig
wget https://github.com/llvm/llvm-project/releases/download/llvmorg-$LLVM_VERSION/libcxx-$LLVM_VERSION.src.tar.xz.sig
wget https://github.com/llvm/llvm-project/releases/download/llvmorg-$LLVM_VERSION/libcxxabi-$LLVM_VERSION.src.tar.xz.sig
fi
# list of valid llvm gnupg keys: https://releases.llvm.org/download.html
$GPG --keyserver $KEYSERVER --recv-keys 0x474E22316ABF4785A88C6E8EA2C794A986419D8A
@ -183,8 +165,6 @@ $GPG --verify lld-$LLVM_VERSION.src.tar.xz.sig lld-$LLVM_VERSION.src.tar.xz
$GPG --verify clang-tools-extra-$LLVM_VERSION.src.tar.xz.sig clang-tools-extra-$LLVM_VERSION.src.tar.xz
$GPG --verify compiler-rt-$LLVM_VERSION.src.tar.xz.sig compiler-rt-$LLVM_VERSION.src.tar.xz
$GPG --verify libunwind-$LLVM_VERSION.src.tar.xz.sig libunwind-$LLVM_VERSION.src.tar.xz
$GPG --verify libcxx-$LLVM_VERSION.src.tar.xz.sig libcxx-$LLVM_VERSION.src.tar.xz
$GPG --verify libcxxabi-$LLVM_VERSION.src.tar.xz.sig libcxxabi-$LLVM_VERSION.src.tar.xz
popd
@ -192,7 +172,7 @@ popd
mkdir -p build
pushd build
log_tool_name "GCC $GCC_VERSION"
# compile gcc
if [ ! -f $PREFIX/bin/gcc ]; then
if [ -d gcc-$GCC_VERSION ]; then
rm -rf gcc-$GCC_VERSION
@ -283,7 +263,7 @@ fi
export PATH=$PREFIX/bin:$PATH
export LD_LIBRARY_PATH=$PREFIX/lib64
log_tool_name "binutils $BINUTILS_VERSION"
# compile binutils
if [ ! -f $PREFIX/bin/ld.gold ]; then
if [ -d binutils-$BINUTILS_VERSION ]; then
rm -rf binutils-$BINUTILS_VERSION
@ -347,7 +327,7 @@ if [ ! -f $PREFIX/bin/ld.gold ]; then
popd && popd
fi
log_tool_name "GDB $GDB_VERSION"
# compile gdb
if [ ! -f $PREFIX/bin/gdb ]; then
if [ -d gdb-$GDB_VERSION ]; then
rm -rf gdb-$GDB_VERSION
@ -383,62 +363,6 @@ if [ ! -f $PREFIX/bin/gdb ]; then
--without-babeltrace \
--enable-tui \
--with-python=python3
elif [[ "${DISTRO}" == fedora* ]]; then
# Remove readline, gdb does not compile
env \
CC=gcc \
CXX=g++ \
CFLAGS="-g -O2 -fstack-protector-strong -Wformat -Werror=format-security" \
CXXFLAGS="-g -O2 -fstack-protector-strong -Wformat -Werror=format-security" \
CPPFLAGS="-Wdate-time -D_FORTIFY_SOURCE=2 -fPIC" \
LDFLAGS="-Wl,-z,relro" \
PYTHON="" \
../configure \
--build=x86_64-linux-gnu \
--host=x86_64-linux-gnu \
--prefix=$PREFIX \
--disable-maintainer-mode \
--disable-dependency-tracking \
--disable-silent-rules \
--disable-gdbtk \
--disable-shared \
--without-guile \
--with-system-gdbinit=$PREFIX/etc/gdb/gdbinit \
--with-expat \
--with-system-zlib \
--with-lzma \
--with-babeltrace \
--with-intel-pt \
--enable-tui \
--with-python=python3
elif [[ "${DISTRO}" == "amzn-2" ]]; then
# Remove readline, gdb does not compile
env \
CC=gcc \
CXX=g++ \
CFLAGS="-g -O2 -fstack-protector-strong -Wformat -Werror=format-security" \
CXXFLAGS="-g -O2 -fstack-protector-strong -Wformat -Werror=format-security" \
CPPFLAGS="-Wdate-time -D_FORTIFY_SOURCE=2 -fPIC" \
LDFLAGS="-Wl,-z,relro" \
PYTHON="" \
../configure \
--build=x86_64-linux-gnu \
--host=x86_64-linux-gnu \
--prefix=$PREFIX \
--disable-maintainer-mode \
--disable-dependency-tracking \
--disable-silent-rules \
--disable-gdbtk \
--disable-shared \
--without-guile \
--with-system-gdbinit=$PREFIX/etc/gdb/gdbinit \
--with-expat \
--with-system-zlib \
--with-lzma \
--with-babeltrace \
--with-intel-pt \
--enable-tui \
--with-python=python3
else
# https://buildd.debian.org/status/fetch.php?pkg=gdb&arch=amd64&ver=8.2.1-2&stamp=1550831554&raw=0
env \
@ -474,13 +398,13 @@ if [ ! -f $PREFIX/bin/gdb ]; then
popd && popd
fi
log_tool_name "install pahole"
# install pahole
if [ ! -d $PREFIX/share/pahole-gdb ]; then
unzip ../archives/pahole-gdb-master.zip
mv pahole-gdb-master $PREFIX/share/pahole-gdb
fi
log_tool_name "setup system gdbinit"
# setup system gdbinit
if [ ! -f $PREFIX/etc/gdb/gdbinit ]; then
mkdir -p $PREFIX/etc/gdb
cat >$PREFIX/etc/gdb/gdbinit <<EOF
@ -506,7 +430,7 @@ end
EOF
fi
log_tool_name "cmake $CMAKE_VERSION"
# compile cmake
if [ ! -f $PREFIX/bin/cmake ]; then
if [ -d cmake-$CMAKE_VERSION ]; then
rm -rf cmake-$CMAKE_VERSION
@ -532,7 +456,7 @@ if [ ! -f $PREFIX/bin/cmake ]; then
popd && popd
fi
log_tool_name "cppcheck $CPPCHECK_VERSION"
# compile cppcheck
if [ ! -f $PREFIX/bin/cppcheck ]; then
if [ -d cppcheck-$CPPCHECK_VERSION ]; then
rm -rf cppcheck-$CPPCHECK_VERSION
@ -556,7 +480,7 @@ if [ ! -f $PREFIX/bin/cppcheck ]; then
popd
fi
log_tool_name "swig $SWIG_VERSION"
# compile swig
if [ ! -d swig-$SWIG_VERSION/install ]; then
if [ -d swig-$SWIG_VERSION ]; then
rm -rf swig-$SWIG_VERSION
@ -572,7 +496,7 @@ if [ ! -d swig-$SWIG_VERSION/install ]; then
popd && popd
fi
log_tool_name "LLVM $LLVM_VERSION"
# compile llvm
if [ ! -f $PREFIX/bin/clang ]; then
if [ -d llvm-$LLVM_VERSION ]; then
rm -rf llvm-$LLVM_VERSION
@ -589,19 +513,8 @@ if [ ! -f $PREFIX/bin/clang ]; then
mv compiler-rt-$LLVM_VERSION.src/ llvm-$LLVM_VERSION/projects/compiler-rt
tar -xvf ../archives/libunwind-$LLVM_VERSION.src.tar.xz
mv libunwind-$LLVM_VERSION.src/include/mach-o llvm-$LLVM_VERSION/tools/lld/include
# The following is required because of libc++
tar -xvf ../archives/libcxx-$LLVM_VERSION.src.tar.xz
mv libcxx-$LLVM_VERSION.src llvm-$LLVM_VERSION/projects/libcxx
tar -xvf ../archives/libcxxabi-$LLVM_VERSION.src.tar.xz
mv libcxxabi-$LLVM_VERSION.src llvm-$LLVM_VERSION/projects/libcxxabi
# NOTE: We moved part of the libunwind in one of the previous step.
rm -r libunwind-$LLVM_VERSION.src
tar -xvf ../archives/libunwind-$LLVM_VERSION.src.tar.xz
mv libunwind-$LLVM_VERSION.src llvm-$LLVM_VERSION/projects/libunwind
pushd llvm-$LLVM_VERSION
mkdir -p build && pushd build
mkdir build && pushd build
# activate swig
export PATH=$DIR/build/swig-$SWIG_VERSION/install/bin:$PATH
# influenced by: https://buildd.debian.org/status/fetch.php?pkg=llvm-toolchain-7&arch=amd64&ver=1%3A7.0.1%7E%2Brc2-1%7Eexp1&stamp=1541506173&raw=0
@ -654,7 +567,7 @@ In order to be able to run all of these tools you should install the following
packages:
\`\`\`
$($DIR/../../os/$ENV_SCRIPT.sh list TOOLCHAIN_RUN_DEPS)
$($DIR/../os/$DISTRO.sh list TOOLCHAIN_RUN_DEPS)
\`\`\`
## Usage
@ -711,7 +624,6 @@ export PS1="($NAME) \$PS1"
export LD_LIBRARY_PATH=$PREFIX/lib:$PREFIX/lib64
export CXXFLAGS=-isystem\ $PREFIX/include\ \$CXXFLAGS
export CFLAGS=-isystem\ $PREFIX/include\ \$CFLAGS
export VENV=$PREFIX
# disable root
function su () {
@ -763,7 +675,7 @@ PROXYGEN_SHA256=5360a8ccdfb2f5a6c7b3eed331ec7ab0e2c792d579c6fff499c85c516c11fe14
SNAPPY_SHA256=75c1fbb3d618dd3a0483bff0e26d0a92b495bbe5059c8b4f1c962b478b6e06e7
SNAPPY_VERSION=1.1.9
XZ_VERSION=5.2.5 # for LZMA
ZLIB_VERSION=1.2.13
ZLIB_VERSION=1.2.12
ZSTD_VERSION=1.5.0
WANGLE_SHA256=1002e9c32b6f4837f6a760016e3b3e22f3509880ef3eaad191c80dc92655f23f
@ -908,11 +820,7 @@ source $PREFIX/activate
export CC=$PREFIX/bin/clang
export CXX=$PREFIX/bin/clang++
export CFLAGS="$CFLAGS -fPIC"
if [ "$TOOLCHAIN_STDCXX" = "libstdc++" ]; then
export CXXFLAGS="$CXXFLAGS -fPIC"
else
export CXXFLAGS="$CXXFLAGS -fPIC -stdlib=libc++"
fi
export CXXFLAGS="$CXXFLAGS -fPIC"
COMMON_CMAKE_FLAGS="-DCMAKE_INSTALL_PREFIX=$PREFIX
-DCMAKE_PREFIX_PATH=$PREFIX
-DCMAKE_BUILD_TYPE=Release
@ -926,7 +834,7 @@ COMMON_CMAKE_FLAGS="-DCMAKE_INSTALL_PREFIX=$PREFIX
COMMON_CONFIGURE_FLAGS="--enable-shared=no --prefix=$PREFIX"
COMMON_MAKE_INSTALL_FLAGS="-j$CPUS BUILD_SHARED=no PREFIX=$PREFIX install"
log_tool_name "bzip2 $BZIP2_VERSION"
# install bzip2
if [ ! -f $PREFIX/include/bzlib.h ]; then
if [ -d bzip2-$BZIP2_VERSION ]; then
rm -rf bzip2-$BZIP2_VERSION
@ -937,7 +845,7 @@ if [ ! -f $PREFIX/include/bzlib.h ]; then
popd
fi
log_tool_name "fmt $FMT_VERSION"
# install fmt
if [ ! -d $PREFIX/include/fmt ]; then
if [ -d fmt-$FMT_VERSION ]; then
rm -rf fmt-$FMT_VERSION
@ -950,7 +858,7 @@ if [ ! -d $PREFIX/include/fmt ]; then
popd && popd
fi
log_tool_name "lz4 $LZ4_VERSION"
# install lz4
if [ ! -f $PREFIX/include/lz4.h ]; then
if [ -d lz4-$LZ4_VERSION ]; then
rm -rf lz4-$LZ4_VERSION
@ -961,7 +869,7 @@ if [ ! -f $PREFIX/include/lz4.h ]; then
popd
fi
log_tool_name "xz $XZ_VERSION"
# install xz
if [ ! -f $PREFIX/include/lzma.h ]; then
if [ -d xz-$XZ_VERSION ]; then
rm -rf xz-$XZ_VERSION
@ -973,7 +881,7 @@ if [ ! -f $PREFIX/include/lzma.h ]; then
popd
fi
log_tool_name "zlib $ZLIB_VERSION"
# install zlib
if [ ! -f $PREFIX/include/zlib.h ]; then
if [ -d zlib-$ZLIB_VERSION ]; then
rm -rf zlib-$ZLIB_VERSION
@ -987,7 +895,7 @@ if [ ! -f $PREFIX/include/zlib.h ]; then
popd && popd
fi
log_tool_name "zstd $ZSTD_VERSION"
# install zstd
if [ ! -f $PREFIX/include/zstd.h ]; then
if [ -d zstd-$ZSTD_VERSION ]; then
rm -rf zstd-$ZSTD_VERSION
@ -1002,8 +910,7 @@ if [ ! -f $PREFIX/include/zstd.h ]; then
popd && popd
fi
# TODO(gitbuda): Freeze jmalloc version.
log_tool_name "jmalloc"
#install jemalloc
if [ ! -d $PREFIX/include/jemalloc ]; then
if [ -d jemalloc ]; then
rm -rf jemalloc
@ -1020,7 +927,7 @@ if [ ! -d $PREFIX/include/jemalloc ]; then
popd
fi
log_tool_name "BOOST $BOOST_VERSION"
# install boost
if [ ! -d $PREFIX/include/boost ]; then
if [ -d boost_$BOOST_VERSION_UNDERSCORES ]; then
rm -rf boost_$BOOST_VERSION_UNDERSCORES
@ -1028,24 +935,15 @@ if [ ! -d $PREFIX/include/boost ]; then
tar -xzf ../archives/boost_$BOOST_VERSION_UNDERSCORES.tar.gz
pushd boost_$BOOST_VERSION_UNDERSCORES
./bootstrap.sh --prefix=$PREFIX --with-toolset=clang --with-python=python3 --without-icu
if [ "$TOOLCHAIN_STDCXX" = "libstdc++" ]; then
./b2 toolset=clang -j$CPUS install variant=release link=static cxxstd=20 --disable-icu \
-sZLIB_SOURCE="$PREFIX" -sZLIB_INCLUDE="$PREFIX/include" -sZLIB_LIBPATH="$PREFIX/lib" \
-sBZIP2_SOURCE="$PREFIX" -sBZIP2_INCLUDE="$PREFIX/include" -sBZIP2_LIBPATH="$PREFIX/lib" \
-sLZMA_SOURCE="$PREFIX" -sLZMA_INCLUDE="$PREFIX/include" -sLZMA_LIBPATH="$PREFIX/lib" \
-sZSTD_SOURCE="$PREFIX" -sZSTD_INCLUDE="$PREFIX/include" -sZSTD_LIBPATH="$PREFIX/lib"
else
./b2 toolset=clang -j$CPUS install variant=release link=static cxxstd=20 --disable-icu \
cxxflags="-stdlib=libc++" linkflags="-stdlib=libc++" \
-sZLIB_SOURCE="$PREFIX" -sZLIB_INCLUDE="$PREFIX/include" -sZLIB_LIBPATH="$PREFIX/lib" \
-sBZIP2_SOURCE="$PREFIX" -sBZIP2_INCLUDE="$PREFIX/include" -sBZIP2_LIBPATH="$PREFIX/lib" \
-sLZMA_SOURCE="$PREFIX" -sLZMA_INCLUDE="$PREFIX/include" -sLZMA_LIBPATH="$PREFIX/lib" \
-sZSTD_SOURCE="$PREFIX" -sZSTD_INCLUDE="$PREFIX/include" -sZSTD_LIBPATH="$PREFIX/lib"
fi
./b2 toolset=clang -j$CPUS install variant=release link=static cxxstd=20 --disable-icu \
-sZLIB_SOURCE="$PREFIX" -sZLIB_INCLUDE="$PREFIX/include" -sZLIB_LIBPATH="$PREFIX/lib" \
-sBZIP2_SOURCE="$PREFIX" -sBZIP2_INCLUDE="$PREFIX/include" -sBZIP2_LIBPATH="$PREFIX/lib" \
-sLZMA_SOURCE="$PREFIX" -sLZMA_INCLUDE="$PREFIX/include" -sLZMA_LIBPATH="$PREFIX/lib" \
-sZSTD_SOURCE="$PREFIX" -sZSTD_INCLUDE="$PREFIX/include" -sZSTD_LIBPATH="$PREFIX/lib"
popd
fi
log_tool_name "double-conversion $DOUBLE_CONVERSION_VERSION"
# install double-conversion
if [ ! -d $PREFIX/include/double-conversion ]; then
if [ -d double-conversion-$DOUBLE_CONVERSION_VERSION ]; then
rm -rf double-conversion-$DOUBLE_CONVERSION_VERSION
@ -1060,8 +958,7 @@ if [ ! -d $PREFIX/include/double-conversion ]; then
popd && popd
fi
# TODO(gitbuda): Freeze gflags version.
log_tool_name "gflags"
# install gflags
if [ ! -d $PREFIX/include/gflags ]; then
if [ -d gflags ]; then
rm -rf gflags
@ -1080,7 +977,7 @@ if [ ! -d $PREFIX/include/gflags ]; then
popd && popd
fi
log_tool_name "libunwind $LIBUNWIND_VERSION"
# install libunwind
if [ ! -f $PREFIX/include/libunwind.h ]; then
if [ -d libunwind-$LIBUNWIND_VERSION ]; then
rm -rf libunwind-$LIBUNWIND_VERSION
@ -1093,7 +990,7 @@ if [ ! -f $PREFIX/include/libunwind.h ]; then
popd
fi
log_tool_name "glog $GLOG_VERSION"
# install glog
if [ ! -d $PREFIX/include/glog ]; then
if [ -d glog-$GLOG_VERSION ]; then
rm -rf glog-$GLOG_VERSION
@ -1107,7 +1004,7 @@ if [ ! -d $PREFIX/include/glog ]; then
popd && popd
fi
log_tool_name "libevent $LIBEVENT_VERSION"
# install libevent
if [ ! -d $PREFIX/include/event2 ]; then
if [ -d libevent-$LIBEVENT_VERSION ]; then
rm -rf libevent-$LIBEVENT_VERSION
@ -1126,7 +1023,7 @@ if [ ! -d $PREFIX/include/event2 ]; then
popd && popd
fi
log_tool_name "snappy $SNAPPY_VERSION"
# install snappy
if [ ! -f $PREFIX/include/snappy.h ]; then
if [ -d snappy-$SNAPPY_VERSION ]; then
rm -rf snappy-$SNAPPY_VERSION
@ -1144,7 +1041,7 @@ if [ ! -f $PREFIX/include/snappy.h ]; then
popd && popd
fi
log_tool_name "libsodium $LIBSODIUM_VERSION"
# install libsodium
if [ ! -f $PREFIX/include/sodium.h ]; then
if [ -d libsodium-$LIBSODIUM_VERSION ]; then
rm -rf libsodium-$LIBSODIUM_VERSION
@ -1156,7 +1053,7 @@ if [ ! -f $PREFIX/include/sodium.h ]; then
popd
fi
log_tool_name "libaio $LIBAIO_VERSION"
# install libaio
if [ ! -f $PREFIX/include/libaio.h ]; then
if [ -d libaio-$LIBAIO_VERSION ]; then
rm -rf libaio-$LIBAIO_VERSION
@ -1167,121 +1064,114 @@ if [ ! -f $PREFIX/include/libaio.h ]; then
popd
fi
if [[ "${DISTRO}" != "amzn-2" ]]; then
log_tool_name "folly $FBLIBS_VERSION"
if [ ! -d $PREFIX/include/folly ]; then
if [ -d folly-$FBLIBS_VERSION ]; then
rm -rf folly-$FBLIBS_VERSION
fi
mkdir folly-$FBLIBS_VERSION
tar -xzf ../archives/folly-$FBLIBS_VERSION.tar.gz -C folly-$FBLIBS_VERSION
pushd folly-$FBLIBS_VERSION
patch -p1 < ../../folly.patch
# build is used by facebook builder
mkdir _build
pushd _build
cmake .. $COMMON_CMAKE_FLAGS \
-DBOOST_LINK_STATIC=ON \
-DBUILD_TESTS=OFF \
-DGFLAGS_NOTHREADS=OFF \
-DCXX_STD="c++20"
make -j$CPUS install
popd && popd
# install folly
if [ ! -d $PREFIX/include/folly ]; then
if [ -d folly-$FBLIBS_VERSION ]; then
rm -rf folly-$FBLIBS_VERSION
fi
mkdir folly-$FBLIBS_VERSION
tar -xzf ../archives/folly-$FBLIBS_VERSION.tar.gz -C folly-$FBLIBS_VERSION
pushd folly-$FBLIBS_VERSION
patch -p1 < ../../folly.patch
# build is used by facebook builder
mkdir _build
pushd _build
cmake .. $COMMON_CMAKE_FLAGS \
-DBOOST_LINK_STATIC=ON \
-DBUILD_TESTS=OFF \
-DGFLAGS_NOTHREADS=OFF \
-DCXX_STD="c++20"
make -j$CPUS install
popd && popd
fi
log_tool_name "fizz $FBLIBS_VERSION"
if [ ! -d $PREFIX/include/fizz ]; then
if [ -d fizz-$FBLIBS_VERSION ]; then
rm -rf fizz-$FBLIBS_VERSION
fi
mkdir fizz-$FBLIBS_VERSION
tar -xzf ../archives/fizz-$FBLIBS_VERSION.tar.gz -C fizz-$FBLIBS_VERSION
pushd fizz-$FBLIBS_VERSION
# build is used by facebook builder
mkdir _build
pushd _build
cmake ../fizz $COMMON_CMAKE_FLAGS \
-DBUILD_TESTS=OFF \
-DBUILD_EXAMPLES=OFF \
-DGFLAGS_NOTHREADS=OFF
make -j$CPUS install
popd && popd
# install fizz
if [ ! -d $PREFIX/include/fizz ]; then
if [ -d fizz-$FBLIBS_VERSION ]; then
rm -rf fizz-$FBLIBS_VERSION
fi
mkdir fizz-$FBLIBS_VERSION
tar -xzf ../archives/fizz-$FBLIBS_VERSION.tar.gz -C fizz-$FBLIBS_VERSION
pushd fizz-$FBLIBS_VERSION
# build is used by facebook builder
mkdir _build
pushd _build
cmake ../fizz $COMMON_CMAKE_FLAGS \
-DBUILD_TESTS=OFF \
-DBUILD_EXAMPLES=OFF \
-DGFLAGS_NOTHREADS=OFF
make -j$CPUS install
popd && popd
fi
log_tool_name "wangle FBLIBS_VERSION"
if [ ! -d $PREFIX/include/wangle ]; then
if [ -d wangle-$FBLIBS_VERSION ]; then
rm -rf wangle-$FBLIBS_VERSION
fi
mkdir wangle-$FBLIBS_VERSION
tar -xzf ../archives/wangle-$FBLIBS_VERSION.tar.gz -C wangle-$FBLIBS_VERSION
pushd wangle-$FBLIBS_VERSION
# build is used by facebook builder
mkdir _build
pushd _build
cmake ../wangle $COMMON_CMAKE_FLAGS \
-DBUILD_TESTS=OFF \
-DBUILD_EXAMPLES=OFF \
-DGFLAGS_NOTHREADS=OFF
make -j$CPUS install
popd && popd
# install wangle
if [ ! -d $PREFIX/include/wangle ]; then
if [ -d wangle-$FBLIBS_VERSION ]; then
rm -rf wangle-$FBLIBS_VERSION
fi
mkdir wangle-$FBLIBS_VERSION
tar -xzf ../archives/wangle-$FBLIBS_VERSION.tar.gz -C wangle-$FBLIBS_VERSION
pushd wangle-$FBLIBS_VERSION
# build is used by facebook builder
mkdir _build
pushd _build
cmake ../wangle $COMMON_CMAKE_FLAGS \
-DBUILD_TESTS=OFF \
-DBUILD_EXAMPLES=OFF \
-DGFLAGS_NOTHREADS=OFF
make -j$CPUS install
popd && popd
fi
log_tool_name "proxygen $FBLIBS_VERSION"
if [ ! -d $PREFIX/include/proxygen ]; then
if [ -d proxygen-$FBLIBS_VERSION ]; then
rm -rf proxygen-$FBLIBS_VERSION
fi
mkdir proxygen-$FBLIBS_VERSION
tar -xzf ../archives/proxygen-$FBLIBS_VERSION.tar.gz -C proxygen-$FBLIBS_VERSION
pushd proxygen-$FBLIBS_VERSION
patch -p1 < ../../proxygen.patch
# build is used by facebook builder
mkdir _build
pushd _build
cmake .. $COMMON_CMAKE_FLAGS \
-DBUILD_TESTS=OFF \
-DBUILD_SAMPLES=OFF \
-DGFLAGS_NOTHREADS=OFF \
-DBUILD_QUIC=OFF
make -j$CPUS install
popd && popd
# install proxygen
if [ ! -d $PREFIX/include/proxygen ]; then
if [ -d proxygen-$FBLIBS_VERSION ]; then
rm -rf proxygen-$FBLIBS_VERSION
fi
mkdir proxygen-$FBLIBS_VERSION
tar -xzf ../archives/proxygen-$FBLIBS_VERSION.tar.gz -C proxygen-$FBLIBS_VERSION
pushd proxygen-$FBLIBS_VERSION
patch -p1 < ../../proxygen.patch
# build is used by facebook builder
mkdir _build
pushd _build
cmake .. $COMMON_CMAKE_FLAGS \
-DBUILD_TESTS=OFF \
-DBUILD_SAMPLES=OFF \
-DGFLAGS_NOTHREADS=OFF \
-DBUILD_QUIC=OFF
make -j$CPUS install
popd && popd
fi
log_tool_name "flex $FBLIBS_VERSION"
if [ ! -f $PREFIX/include/FlexLexer.h ]; then
if [ -d flex-$FLEX_VERSION ]; then
rm -rf flex-$FLEX_VERSION
fi
tar -xzf ../archives/flex-$FLEX_VERSION.tar.gz
pushd flex-$FLEX_VERSION
./configure $COMMON_CONFIGURE_FLAGS
make -j$CPUS install
popd
# install flex
if [ ! -f $PREFIX/include/FlexLexer.h ]; then
if [ -d flex-$FLEX_VERSION ]; then
rm -rf flex-$FLEX_VERSION
fi
tar -xzf ../archives/flex-$FLEX_VERSION.tar.gz
pushd flex-$FLEX_VERSION
./configure $COMMON_CONFIGURE_FLAGS
make -j$CPUS install
popd
fi
log_tool_name "fbthrift $FBLIBS_VERSION"
if [ ! -d $PREFIX/include/thrift ]; then
if [ -d fbthrift-$FBLIBS_VERSION ]; then
rm -rf fbthrift-$FBLIBS_VERSION
fi
git clone --depth 1 --branch v$FBLIBS_VERSION https://github.com/facebook/fbthrift.git fbthrift-$FBLIBS_VERSION
pushd fbthrift-$FBLIBS_VERSION
# build is used by facebook builder
mkdir _build
pushd _build
if [ "$TOOLCHAIN_STDCXX" = "libstdc++" ]; then
CMAKE_CXX_FLAGS="-fsized-deallocation"
else
CMAKE_CXX_FLAGS="-fsized-deallocation -stdlib=libc++"
fi
cmake .. $COMMON_CMAKE_FLAGS \
-Denable_tests=OFF \
-DGFLAGS_NOTHREADS=OFF \
-DCMAKE_CXX_FLAGS="$CMAKE_CXX_FLAGS"
make -j$CPUS install
popd
# install fbthrift
if [ ! -d $PREFIX/include/thrift ]; then
if [ -d fbthrift-$FBLIBS_VERSION ]; then
rm -rf fbthrift-$FBLIBS_VERSION
fi
git clone --depth 1 --branch v$FBLIBS_VERSION https://github.com/facebook/fbthrift.git fbthrift-$FBLIBS_VERSION
pushd fbthrift-$FBLIBS_VERSION
# build is used by facebook builder
mkdir _build
pushd _build
cmake .. $COMMON_CMAKE_FLAGS \
-Denable_tests=OFF \
-DGFLAGS_NOTHREADS=OFF \
-DCMAKE_CXX_FLAGS=-fsized-deallocation
make -j$CPUS install
popd
fi
popd
@ -1289,7 +1179,7 @@ popd
# create toolchain archive
if [ ! -f $NAME-binaries-$DISTRO.tar.gz ]; then
DISTRO_FULL_NAME=${DISTRO}
if [[ "${DISTRO}" == centos* ]] || [[ "${DISTRO}" == fedora* ]]; then
if [[ "${DISTRO}" == centos* ]]; then
if [[ "$for_arm" = "true" ]]; then
DISTRO_FULL_NAME="$DISTRO_FULL_NAME-aarch64"
else
@ -1302,12 +1192,7 @@ if [ ! -f $NAME-binaries-$DISTRO.tar.gz ]; then
DISTRO_FULL_NAME="$DISTRO_FULL_NAME-amd64"
fi
fi
if [ "$TOOLCHAIN_STDCXX" = "libstdc++" ]; then
# Pass because infra scripts assume there is not C++ standard lib in the name.
echo "NOTE: Not adding anything to the archive name that GCC C++ standard lib is used."
else
DISTRO_FULL_NAME="$DISTRO_FULL_NAME-libc++"
fi
tar --owner=root --group=root -cpvzf $NAME-binaries-$DISTRO_FULL_NAME.tar.gz -C /opt $NAME
fi

File diff suppressed because it is too large Load Diff

View File

@ -1,42 +0,0 @@
#!/bin/bash -ex
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )"
PREFIX=/opt/toolchain-v5
# NOTE: Often times when versions in the build script are changes, something
# doesn't work. To avoid rebuild of the whole toolchain but rebuild specific
# lib from 0, just comment specific line under this cript and run it. Don't
# forget to comment back to avoid unnecessary deletes next time your run this
# cript.
# rm -rf "$DIR/build"
# rm -rf "$DIR/output"
# rm -rf "$PREFIX/bin/gcc"
# rm -rf "$PREFIX/bin/ld.gold"
# rm -rf "$PREFIX/bin/gdb"
# rm -rf "$PREFIX/bin/cmake"
# rm -rf "$PREFIX/bin/clang"
# rm -rf "$PREFIX/include/bzlib.h"
# rm -rf "$PREFIX/include/fmt"
# rm -rf "$PREFIX/include/lz4.h"
# rm -rf "$PREFIX/include/lzma.h"
# rm -rf "$PREFIX/include/zlib.h"
# rm -rf "$PREFIX/include/zstd.h"
# rm -rf "$PREFIX/include/jemalloc"
# rm -rf "$PREFIX/include/boost"
# rm -rf "$PREFIX/include/double-conversion"
# rm -rf "$PREFIX/include/gflags"
# rm -rf "$PREFIX/include/libunwind.h"
# rm -rf "$PREFIX/include/glog"
# rm -rf "$PREFIX/include/event2"
# rm -rf "$PREFIX/include/sodium.h"
# rm -rf "$PREFIX/include/libaio.h"
# rm -rf "$PREFIX/include/FlexLexer.h"
# rm -rf "$PREFIX/include/snappy.h"
# rm -rf "$PREFIX/include/fizz"
# rm -rf "$PREFIX/include/folly"
# rm -rf "$PREFIX/include/proxygen"
# rm -rf "$PREFIX/include/wangle"
# rm -rf "$PREFIX/include/thrift"
# rm -rf "$PREFIX"

View File

@ -1,41 +0,0 @@
diff -ur a/folly/CMakeLists.txt b/folly/CMakeLists.txt
--- a/folly/CMakeLists.txt 2021-12-12 23:10:42.000000000 +0100
+++ b/folly/CMakeLists.txt 2022-02-03 15:19:41.349693134 +0100
@@ -28,7 +28,6 @@
)
add_subdirectory(experimental/exception_tracer)
-add_subdirectory(logging/example)
if (PYTHON_EXTENSIONS)
# Create tree of symbolic links in structure required for successful
diff -ur a/folly/experimental/exception_tracer/ExceptionTracerLib.cpp b/folly/experimental/exception_tracer/ExceptionTracerLib.cpp
--- a/folly/experimental/exception_tracer/ExceptionTracerLib.cpp 2021-12-12 23:10:42.000000000 +0100
+++ b/folly/experimental/exception_tracer/ExceptionTracerLib.cpp 2022-02-03 15:19:11.003368891 +0100
@@ -96,6 +96,7 @@
#define __builtin_unreachable()
#endif
+#if 0
namespace __cxxabiv1 {
void __cxa_throw(
@@ -154,5 +155,5 @@
}
} // namespace std
-
+#endif
#endif // defined(__GLIBCXX__)
diff -ur a/folly/Portability.h b/folly/Portability.h
--- a/folly/Portability.h 2021-12-12 23:10:42.000000000 +0100
+++ b/folly/Portability.h 2022-02-03 15:19:11.003368891 +0100
@@ -566,7 +566,7 @@
#define FOLLY_HAS_COROUTINES 0
#elif (__cpp_coroutines >= 201703L || __cpp_impl_coroutine >= 201902L) && \
(__has_include(<coroutine>) || __has_include(<experimental/coroutine>))
-#define FOLLY_HAS_COROUTINES 1
+#define FOLLY_HAS_COROUTINES 0
// This is mainly to workaround bugs triggered by LTO, when stack allocated
// variables in await_suspend end up on a coroutine frame.
#define FOLLY_CORO_AWAIT_SUSPEND_NONTRIVIAL_ATTRIBUTES FOLLY_NOINLINE

View File

@ -1,26 +0,0 @@
diff --git a/folly/CMakeLists.txt b/folly/CMakeLists.txt
index e0e16df..471131e 100644
--- a/folly/CMakeLists.txt
+++ b/folly/CMakeLists.txt
@@ -28,7 +28,7 @@ install(
)
add_subdirectory(experimental/exception_tracer)
-add_subdirectory(logging/example)
+# add_subdirectory(logging/example)
if (PYTHON_EXTENSIONS)
# Create tree of symbolic links in structure required for successful
diff --git a/folly/Portability.h b/folly/Portability.h
index 365ef1b..42d24b8 100644
--- a/folly/Portability.h
+++ b/folly/Portability.h
@@ -560,7 +560,7 @@ constexpr auto kCpplibVer = 0;
(defined(__cpp_coroutines) && __cpp_coroutines >= 201703L) || \
(defined(__cpp_impl_coroutine) && __cpp_impl_coroutine >= 201902L)) && \
(__has_include(<coroutine>) || __has_include(<experimental/coroutine>))
-#define FOLLY_HAS_COROUTINES 1
+#define FOLLY_HAS_COROUTINES 0
// This is mainly to workaround bugs triggered by LTO, when stack allocated
// variables in await_suspend end up on a coroutine frame.
#define FOLLY_CORO_AWAIT_SUSPEND_NONTRIVIAL_ATTRIBUTES FOLLY_NOINLINE

View File

@ -1,29 +0,0 @@
diff -ur a/CMakeLists.txt b/CMakeLists.txt
--- a/CMakeLists.txt 2021-05-05 00:53:34.000000000 +0200
+++ b/CMakeLists.txt 2022-01-27 17:18:34.758302398 +0100
@@ -52,9 +52,9 @@
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} /EHs-c-")
add_definitions(-D_HAS_EXCEPTIONS=0)
- # Disable RTTI.
- string(REGEX REPLACE "/GR" "" CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS}")
- set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} /GR-")
+ # # Disable RTTI.
+ # string(REGEX REPLACE "/GR" "" CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS}")
+ # set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} /GR-")
else(CMAKE_CXX_COMPILER_ID STREQUAL "MSVC")
# Use -Wall for clang and gcc.
if(NOT CMAKE_CXX_FLAGS MATCHES "-Wall")
@@ -77,9 +77,9 @@
string(REGEX REPLACE "-fexceptions" "" CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS}")
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -fno-exceptions")
- # Disable RTTI.
- string(REGEX REPLACE "-frtti" "" CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS}")
- set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -fno-rtti")
+ # # Disable RTTI.
+ # string(REGEX REPLACE "-frtti" "" CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS}")
+ # set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -fno-rtti")
endif(CMAKE_CXX_COMPILER_ID STREQUAL "MSVC")
# BUILD_SHARED_LIBS is a standard CMake variable, but we declare it here to make

View File

@ -1,75 +0,0 @@
-----BEGIN PGP PUBLIC KEY BLOCK-----
mQINBEzEOZIBEACxg/IuXERlDB48JBWmF4NxNUuuup1IhJAJyFGFSKh3OGAO2Ard
sNuRLjANsFXA7m7P5eTFcG+BoHHuAVYmKnI3PPZtHVLnUt4pGItPczQZ2BE1WpcI
ayjGTBJeKItX3Npqg9D/odO9WWS1i3FQPVdrLn0YH37/BA66jeMQCRo7g7GLpaNf
IrvYGsqTbxCwsmA37rpE7oyU4Yrf74HT091WBsRIoq/MelhbxTDMR8eu/dUGZQVc
Kj3lN55RepwWwUUKyqarY0zMt4HkFJ7v7yRL+Cvzy92Ouv4Wf2FlhNtEs5LE4Tax
W0PO5AEmUoKjX87SezQK0f652018b4u6Ex52cY7p+n5TII/UyoowH6+tY8UHo9yb
fStrqgNE/mY2bhA6+AwCaOUGsFzVVPTbjtxL3HacUP/jlA1h78V8VTvTs5d55iG7
jSqR9o05wje8rwNiXXK0xtiJahyNzL97Kn/DgPSqPIi45G+8nxWSPFM5eunBKRl9
vAnsvwrdPRsR6YR3uMHTuVhQX9/CY891MHkaZJ6wydWtKt3yQwJLYqwo5d4DwnUX
CduUwSKv+6RmtWI5ZmTQYOcBRcZyGKml9X9Q8iSbm6cnpFXmLrNQwCJN+D3SiYGc
MtbltZo0ysPMa6Xj5xFaYqWk/BI4iLb2Gs+ByGo/+a0Eq4XYBMOpitNniQARAQAB
tCdMYXNzZSBDb2xsaW4gPGxhc3NlLmNvbGxpbkB0dWthYW5pLm9yZz6JAlEEEwEK
ADsCGwMCHgECF4AECwkIBwMVCggFFgIDAQAWIQQ2kMJAzlG0Zw0wrRw47nV9aRhG
IAUCYEt9dQUJFxeR4wAKCRA47nV9aRhGIBNDEACxD6vJ+enZwe3IgkJh5JtLsC9b
MWCQRlPW1EVMsg96Cb5Rtron1eN1pp1TlzENJu1/C7C/VEsr9WwOPg26Men7fNf/
O21QM9IBWd/uB0Pu333WqKh92ESS5x9ST9DrG39nVGSPkQQBMuia72VrA+crPnwT
/h/u1IN6/sff5VDIU24rUiqW2Npy733dANruj7Ny0scRXVPltnVdhqwPHt6qNjC1
t+/cCnwHgW1BR1RYXBPpB42z/m29dL9rPrG0YPGWs2Bc+EATUICfEE6eIvwfciue
IJTjKT9Y9DrogJC2AYFhjC7N04OKdCB2hFs4BjexJwr4X0GJO7LhFl03c951AsIE
GHwrucRPB5bo2vmvQ8IvZn7CmtdUJzXv9JlyU6p+MIK1pz7TK6GgSOSffQIXZn6e
nUPtm9mEwuncOfmW8/ODYPs1gCWYgyiFJx8h7eEu+M4MxHSFBs7MwXf/Ae2fSp+M
P/p198qB8fC5oVBnF95qb0Qi0uc1D+Gb+gpBF+ymMb+s/VBOR3QWiym7AzBrJ62g
UnbC9jMLGnSRI+7p7raUfMTgXr5/oQoBw7ExJVltSSRrim2YH/t4CV47mO6dR9J3
1RtsTFIRNhz+07XPsETcuCV/dgqeC8fOFLt9MY17Sufhb1DcGy4urZBOIhXcpTV7
vHVj5IYH5nYOT49NRYkCOAQTAQIAIgUCTMQ5kgIbAwYLCQgHAwIGFQgCCQoLBBYC
AwECHgECF4AACgkQOO51fWkYRiAg4A/7BXKwoRaXrMbMPOW7vuVF7c2IKB2Yqzn1
vLBCwuEHkqY237lDcXY4/5LR+1gcZ3Duw1n/BRSm0FBdvyX/JTWiWNSDUkKAO/0l
T2Tg44YLrDT3bzwu8dbU9xQt6kH+SCOHvv5Oe4k79l5mro6fF3H1M0bN63x/YoFY
ojy09D7/JptY82oR4f/VdKnfZLJcCViCb0wp8SD2NkDAudKg+K+7PD8HlTWklQQg
TZdRXxVZKIJeU42aJDqnRbAhJd64YHyClhqut9F5LUmiP5qfLfNhkKDhNOwk2Blr
BGBJkSd7wPyzcX4Mun/L6YspHjbeVMt9TD7HQlo+OOd2OjAHCx6pqwkXnzeLPEaE
cPdQ1SHgrBViAxX3DNPubLP0Knw8XwFu96EuhHZgexE1W7bB4LFsJyXAc5k1PqPD
CLsAauxmvI2OfI7opG/8wyxDvNgoPjG8fZNAgY0REqPC0JnTXChH31IxUmhNotH8
tD3DDTZOHw05n5MwwUrEE9xiETVDfFQcMLfxZ9KLz+BC2g1t5LYublRgnCMNJzFg
sNUMM02CphABzl/LCLnumr0eyQQ/weV4twEhLwSDmqLYHL0EdYW0Y3CnnU9vmYxQ
cXKbstS71sEJJYBBmSBbf9GxkOY8BRNtwVwY0kPgxv1WqdVBiAFvfB+pyAsrax9B
3UeB7ZSwRD6JAhwEEAEKAAYFAlS25GwACgkQlbYYGy0z6ew92Q//ZA9/6piQtoW4
PwP/1DtWGyKU8hwR+9FG669iPk/dAG+yoEJtFMOUpg/FUFmCX8Bc4oEHsCVyLxKt
DcCVUIRcYNSFi5hTZaBEbwsOlDT37gtlfIIu34hhHRccKaLnN/N9gNMNw8wGh9xg
Q/KtxZwcbk/bZIlDkKTJkFBRAekdEGAFDWb/AZOy+LQxS8ZAh1eWkfV0i8opmK9k
gPXtLE0WSsqtYyGs58z+BFE9NH3tEUwK6jSvtuLwQl4UrICNbKthcpb8WwH6UXzb
q3QNSYVOpf/cqRdBJA6bvb/ku/xyKVL08lGmxD9v1b137R7mafDAFPTsvH2Mt/0V
YuhtWav3r1Bl9QksDxt2DTS8wiWDUBetGqOVdcw7vBrXPEWDNBmxeJXsiJ7zJlR+
9wrJOm6RV2+l1IPxu96EaPS+kTNBijKrhxb67bww8BTEWTd0wcdJmgWRkM8SIstp
IKqd0L2TFYph2/NtrBhRg+DIEPJPpSTGsUMcCEXCZPQ+cIdlQKsWpk0tZ62DlvEl
r7E+wgUSQolRfx5KrpZifiS2zQlhzdXv28CJhsVbLyw5fUAWUKIH/dCo5NKsNLk2
Lc5DH9VWnFgxAAtW290FqeK/4ulMq7Vs1dQSwyHM2Ni3QqqeaiOrh8gbSY5CMLFN
Y3HYRwuTYPa3AobsozCzBj0Zdf/6AFe5Ag0ETMQ5kgEQAL/FwKdjxgPxtSpgq1SM
zgZtTTyLqhgGD3NZfadHWHYRIL38NDV3JeTA79Y2zj2dj7KQPDT+0aqeizTV2E3j
P3iCQ53VOT4consBaQAgKexpptnS+T1DobtICFJ0GGzf0HRj6KO2zSOuOitWPWlU
wbvX7M0LLI2+hqlx0jTPqbJFZ/Za6KTtbS6xdCPVUpUqYZQpokEZcwQmUp8Q+lGo
JD2sNYCZyap63X/aAOgCGr2RXYddOH5e8vGzGW+mwtCv+WQ9Ay35mGqI5MqkbZd1
Qbuv2b1647E/QEEucfRHVbJVKGGPpFMUJtcItyyIt5jo+r9CCL4Cs47dF/9/RNwu
NvpvHXUyqMBQdWNZRMx4k/NGD/WviPi9m6mIMui6rOQsSOaqYdcUX4Nq2Orr3Oaz
2JPQdUfeI23iot1vK8hxvUCQTV3HfJghizN6spVl0yQOKBiE8miJRgrjHilH3hTb
xoo42xDkNAq+CQo3QAm1ibDxKCDq0RcWPjcCRAN/Q5MmpcodpdKkzV0yGIS4g7s5
frVrgV/kox2r4/Yxsr8K909+4H82AjTKGX/BmsQFCTAqBk6p7I0zxjIqJ/w33TZB
Q0Pn4r3WIlUPafzY6a9/LAvN1fHRxf9SpCByJsszD03Qu5f5TB8gthsdnVmTo7jj
iordEKMtw2aEMLzdWWTQ/TNVABEBAAGJAjwEGAEKACYCGwwWIQQ2kMJAzlG0Zw0w
rRw47nV9aRhGIAUCYEt9YAUJFxeRzgAKCRA47nV9aRhGIMLtD/9HuKM4pngImcuz
YwzQmdv4j26YYyh4jVsKEmVWTiRcehEgUIlrWkCu3qzd5NK+RetS7kJ8MPnzEUfj
YbpdC6yrF6n1mSrZZ4VJMkV2ev37bIgXM+Wp1mCAGbjNxQnjn9RabT/gjIqmGuRn
AP7RsSeOSuO/gO9h2Pteciz23ussTilB+8cTooQEQQZe6Kv/zukvL+ccSehLHsZ7
qVfRUAmtt8nFkXXE+s8jfLfhqstaI2/RJu5witaPcXM8Mnz2E95aASAbZy0eQot9
0Pvf07n9yuC3tueTvzvlXx3h5U3yT44tIOmzANIQjay1TGdm+RBJ2ZYyhyLawlZ2
NVUXXSp4QZZXPA0UWbF+pb7Q9cdKDNFVuvGBljuea0Yd0T2o+ibDq43HziX9ll+l
SXk9mqvW1UcDOaxWrSsm1Gc1O9g3wqH5xHAhtY8GPh/7VgAawskPkmnlkMW6pYPy
zibbeISJL1gd1jIT63y6aoVrtNoo+wYJm280ROflh4+5QOo6QJ+jm70fkXSG/qJ5
a8/qCPTHkJc/rpkL6/TDQAJURi9RhDAC0gb40HtusbN1LZEA+i0cWTmYXap+DB4Y
R4pApilpaG87M+VUokR4xpnx7vTb2MPa7Mdenvi9FEGnKXadmT8038vlfzz5GGUT
MlVin9BQPTpdA+PpRiJvKJgVDeAFOg==
=asTC
-----END PGP PUBLIC KEY BLOCK-----

View File

@ -1,18 +1,11 @@
#!/bin/bash
function operating_system() {
if [[ "$OSTYPE" == "linux-gnu"* ]]; then
grep -E '^(VERSION_)?ID=' /etc/os-release | \
sort | cut -d '=' -f 2- | sed 's/"//g' | paste -s -d '-'
elif [[ "$OSTYPE" == "darwin"* ]]; then
echo "$(sw_vers -productName)-$(sw_vers -productVersion | cut -d '.' -f 1)"
else
echo "operating_system called on an unknown OS"
exit 1
fi
operating_system() {
grep -E '^(VERSION_)?ID=' /etc/os-release | \
sort | cut -d '=' -f 2- | sed 's/"//g' | paste -s -d '-'
}
function check_operating_system() {
check_operating_system() {
if [ "$(operating_system)" != "$1" ]; then
echo "Not the right operating system!"
exit 1
@ -21,25 +14,20 @@ function check_operating_system() {
fi
}
function architecture() {
architecture() {
uname -m
}
check_architecture() {
local ARCH=$(architecture)
for arch in "$@"; do
if [ "${ARCH}" = "$arch" ]; then
echo "The right architecture!"
return 0
fi
done
echo "Not the right architecture!"
echo "Expected: $@"
echo "Actual: ${ARCH}"
exit 1
if [ "$(architecture)" != "$1" ]; then
echo "Not the right architecture!"
exit 1
else
echo "The right architecture."
fi
}
function check_all_yum() {
check_all_yum() {
local missing=""
for pkg in $1; do
if ! yum list installed "$pkg" >/dev/null 2>/dev/null; then
@ -52,7 +40,7 @@ function check_all_yum() {
fi
}
function check_all_dpkg() {
check_all_dpkg() {
local missing=""
for pkg in $1; do
if ! dpkg -s "$pkg" >/dev/null 2>/dev/null; then
@ -65,7 +53,7 @@ function check_all_dpkg() {
fi
}
function check_all_dnf() {
check_all_dnf() {
local missing=""
for pkg in $1; do
if ! dnf list installed "$pkg" >/dev/null 2>/dev/null; then
@ -77,34 +65,8 @@ function check_all_dnf() {
exit 1
fi
}
function install_all_apt() {
install_all_apt() {
for pkg in $1; do
apt install -y "$pkg"
done
}
function install_custom_golang() {
# NOTE: The official https://go.dev/doc/manage-install doesn't seem to be working.
GOVERSION="$1"
GOINSTALLDIR="/opt/go$GOVERSION"
GOROOT="$GOINSTALLDIR/go" # GOPATH=$HOME/go
if [ ! -f "$GOROOT/bin/go" ]; then
curl -LO https://go.dev/dl/go$GOVERSION.linux-amd64.tar.gz
mkdir -p "$GOINSTALLDIR"
tar -C "$GOINSTALLDIR" -xzf go$GOVERSION.linux-amd64.tar.gz
fi
echo "go $GOVERSION installed under $GOROOT"
}
function install_custom_maven() {
MVNVERSION="$1"
MVNINSTALLDIR="/opt/apache-maven-$MVNVERSION"
MVNURL="https://s3.eu-west-1.amazonaws.com/deps.memgraph.io/maven/apache-maven-$MVNVERSION-bin.tar.gz"
if [ ! -f "$MVNINSTALLDIR/bin/mvn" ]; then
echo "Downloading maven from $MVNURL"
curl -LO "$MVNURL"
tar -C "/opt" -xzf "apache-maven-$MVNVERSION-bin.tar.gz"
fi
echo "maven $MVNVERSION installed under $MVNINSTALLDIR"
}

View File

@ -1,26 +0,0 @@
#!/bin/bash -e
COLOR_ORANGE="\e[38;5;208m"
COLOR_GREEN="\e[38;5;35m"
COLOR_RED="\e[0;31m"
COLOR_NULL="\e[0m"
print_help() {
echo -e "${COLOR_ORANGE}HOW TO RUN:${COLOR_NULL} $0 memgraph_logs_file_path cypherl_output_path"
exit 1
}
if [ "$#" -ne 2 ]; then
print_help
fi
INPUT="$1"
OUTPUT="$2"
if [ ! -f "$INPUT" ]; then
echo -e "${COLOR_RED}ERROR:${COLOR_NULL} memgraph_logs_file_path is not a file!"
print_help
fi
awk -v RS="Run] '" 'NR>1 { print $0 }' < "$INPUT" | sed -e "/^\[/d;" -e "s/'\([^']*\)$/;/g" > "$OUTPUT"
echo -e "${COLOR_GREEN}DONE!${COLOR_NULL} Please find Memgraph compatible cypherl file under $OUTPUT"
echo ""
echo "Import can be done by executing => \`cat $OUTPUT | mgconsole\`"

View File

@ -1,39 +0,0 @@
#!/bin/bash -e
COLOR_ORANGE="\e[38;5;208m"
COLOR_GREEN="\e[38;5;35m"
COLOR_RED="\e[0;31m"
COLOR_NULL="\e[0m"
print_help() {
echo -e "${COLOR_ORANGE}HOW TO RUN:${COLOR_NULL} $0 input_file_path output_file_path"
exit 1
}
if [ "$#" -ne 2 ]; then
print_help
fi
INPUT="$1"
OUTPUT="$2"
if [ ! -f "$INPUT" ]; then
echo -e "${COLOR_RED}ERROR:${COLOR_NULL} input_file_path is not a file!"
print_help
fi
echo -e "${COLOR_ORANGE}NOTE:${COLOR_NULL} BEGIN and COMMIT are required because variables share the same name (e.g. row)"
echo -e "${COLOR_ORANGE}NOTE:${COLOR_NULL} CONSTRAINTS are just skipped -> ${COLOR_RED}please create constraints manually if needed${COLOR_NULL}"
echo 'CREATE INDEX ON :`UNIQUE IMPORT LABEL`(`UNIQUE IMPORT ID`);' > "$OUTPUT"
sed -e 's/^:begin/BEGIN/g; s/^BEGIN$/BEGIN;/g;' \
-e 's/^:commit/COMMIT/g; s/^COMMIT$/COMMIT;/g;' \
-e '/^CALL/d; /^SCHEMA AWAIT/d;' \
-e 's/CREATE RANGE INDEX FOR (n:/CREATE INDEX ON :/g;' \
-e 's/) ON (n./(/g;' \
-e '/^CREATE CONSTRAINT/d; /^DROP CONSTRAINT/d;' "$INPUT" >> "$OUTPUT"
echo 'DROP INDEX ON :`UNIQUE IMPORT LABEL`(`UNIQUE IMPORT ID`);' >> "$OUTPUT"
echo ""
echo -e "${COLOR_GREEN}DONE!${COLOR_NULL} Please find Memgraph compatible cypherl|.cypher file under $OUTPUT"
echo ""
echo "Please import data by executing => \`cat $OUTPUT | mgconsole\`"

View File

@ -1,61 +0,0 @@
#!/bin/bash -e
COLOR_ORANGE="\e[38;5;208m"
COLOR_GREEN="\e[38;5;35m"
COLOR_RED="\e[0;31m"
COLOR_NULL="\e[0m"
print_help() {
echo -e "${COLOR_ORANGE}HOW TO RUN:${COLOR_NULL} $0 input_file_schema_path input_file_nodes_path input_file_relationships_path input_file_cleanup_path output_file_path"
exit 1
}
if [ "$#" -ne 5 ]; then
print_help
fi
INPUT_SCHEMA="$1"
INPUT_NODES="$2"
INPUT_RELATIONSHIPS="$3"
INPUT_CLEANUP="$4"
OUTPUT="$5"
if [ ! -f "$INPUT_SCHEMA" ]; then
echo -e "${COLOR_RED}ERROR:${COLOR_NULL} input_file_path is not a file!"
print_help
fi
if [ ! -f "$INPUT_NODES" ]; then
echo -e "${COLOR_RED}ERROR:${COLOR_NULL} input_file_path is not a file!"
print_help
fi
if [ ! -f "$INPUT_RELATIONSHIPS" ]; then
echo -e "${COLOR_RED}ERROR:${COLOR_NULL} input_file_path is not a file!"
print_help
fi
if [ ! -f "$INPUT_CLEANUP" ]; then
echo -e "${COLOR_RED}ERROR:${COLOR_NULL} input_file_path is not a file!"
print_help
fi
echo -e "${COLOR_ORANGE}NOTE:${COLOR_NULL} BEGIN and COMMIT are required because variables share the same name (e.g. row)"
echo -e "${COLOR_ORANGE}NOTE:${COLOR_NULL} CONSTRAINTS are just skipped -> ${COLOR_RED}please create constraints manually if needed${COLOR_NULL}"
echo 'CREATE INDEX ON :`UNIQUE IMPORT LABEL`(`UNIQUE IMPORT ID`);' > "$OUTPUT"
sed -e 's/CREATE RANGE INDEX FOR (n:/CREATE INDEX ON :/g;' \
-e 's/) ON (n./(/g;' \
-e '/^CREATE CONSTRAINT/d' $INPUT_SCHEMA >> "$OUTPUT"
cat "$INPUT_NODES" >> "$OUTPUT"
cat "$INPUT_RELATIONSHIPS" >> "$OUTPUT"
sed -e '/^DROP CONSTRAINT/d' "$INPUT_CLEANUP" >> "$OUTPUT"
echo 'DROP INDEX ON :`UNIQUE IMPORT LABEL`(`UNIQUE IMPORT ID`);' >> "$OUTPUT"
echo ""
echo -e "${COLOR_GREEN}DONE!${COLOR_NULL} Please find Memgraph compatible cypherl|.cypher file under $OUTPUT"
echo ""
echo "Please import data by executing => \`cat $OUTPUT | mgconsole\`"

View File

@ -1,64 +0,0 @@
#!/bin/bash -e
COLOR_ORANGE="\e[38;5;208m"
COLOR_GREEN="\e[38;5;35m"
COLOR_RED="\e[0;31m"
COLOR_NULL="\e[0m"
print_help() {
echo -e "${COLOR_ORANGE}HOW TO RUN:${COLOR_NULL} $0 input_file_schema_path input_file_nodes_path input_file_relationships_path input_file_cleanup_path output_file_schema_path output_file_nodes_path output_file_relationships_path output_file_cleanup_path"
exit 1
}
if [ "$#" -ne 8 ]; then
print_help
fi
INPUT_SCHEMA="$1"
INPUT_NODES="$2"
INPUT_RELATIONSHIPS="$3"
INPUT_CLEANUP="$4"
OUTPUT_SCHEMA="$5"
OUTPUT_NODES="$6"
OUTPUT_RELATIONSHIPS="$7"
OUTPUT_CLEANUP="$8"
if [ ! -f "$INPUT_SCHEMA" ]; then
echo -e "${COLOR_RED}ERROR:${COLOR_NULL} input_file_path is not a file!"
print_help
fi
if [ ! -f "$INPUT_NODES" ]; then
echo -e "${COLOR_RED}ERROR:${COLOR_NULL} input_file_path is not a file!"
print_help
fi
if [ ! -f "$INPUT_RELATIONSHIPS" ]; then
echo -e "${COLOR_RED}ERROR:${COLOR_NULL} input_file_path is not a file!"
print_help
fi
if [ ! -f "$INPUT_CLEANUP" ]; then
echo -e "${COLOR_RED}ERROR:${COLOR_NULL} input_file_path is not a file!"
print_help
fi
echo -e "${COLOR_ORANGE}NOTE:${COLOR_NULL} BEGIN and COMMIT are required because variables share the same name (e.g. row)"
echo -e "${COLOR_ORANGE}NOTE:${COLOR_NULL} CONSTRAINTS are just skipped -> ${COLOR_RED}please create constraints manually if needed${COLOR_NULL}"
echo 'CREATE INDEX ON :`UNIQUE IMPORT LABEL`(`UNIQUE IMPORT ID`);' > "$OUTPUT_SCHEMA"
sed -e 's/CREATE RANGE INDEX FOR (n:/CREATE INDEX ON :/g;' \
-e 's/) ON (n./(/g;' \
-e '/^CREATE CONSTRAINT/d' $INPUT_SCHEMA >> "$OUTPUT_SCHEMA"
cat "$INPUT_NODES" > "$OUTPUT_NODES"
cat "$INPUT_RELATIONSHIPS" > "$OUTPUT_RELATIONSHIPS"
sed -e '/^DROP CONSTRAINT/d' "$INPUT_CLEANUP" >> "$OUTPUT_CLEANUP"
echo 'DROP INDEX ON :`UNIQUE IMPORT LABEL`(`UNIQUE IMPORT ID`);' >> "$OUTPUT_CLEANUP"
echo ""
echo -e "${COLOR_GREEN}DONE!${COLOR_NULL} Please find Memgraph compatible cypherl|.cypher files under $OUTPUT_SCHEMA, $OUTPUT_NODES, $OUTPUT_RELATIONSHIPS and $OUTPUT_CLEANUP"
echo ""
echo "Please import data by executing => \`cat $OUTPUT_SCHEMA | mgconsole\`, \`cat $OUTPUT_NODES | mgconsole\`, \`cat $OUTPUT_RELATIONSHIPS | mgconsole\` and \`cat $OUTPUT_CLEANUP | mgconsole\`"

View File

@ -1,869 +0,0 @@
// Copyright 2024 Memgraph Ltd.
//
// Use of this software is governed by the Business Source License
// included in the file licenses/BSL.txt; by using this file, you agree to be bound by the terms of the Business Source
// License, and you may not use this file except in compliance with the Business Source License.
//
// As of the Change Date specified in that file, in accordance with
// the Business Source License, use of this software will be governed
// by the Apache License, Version 2.0, included in the file
// licenses/APL.txt.
/// @file _mgp.hpp
///
/// The file contains methods that connect mg procedures and the outside code
/// Methods like mapping a graph into memory or assigning new mg results or
/// their properties are implemented.
#pragma once
#include "mg_exceptions.hpp"
#include "mg_procedure.h"
namespace mgp {
namespace {
inline void MgExceptionHandle(mgp_error result_code) {
switch (result_code) {
case mgp_error::MGP_ERROR_UNKNOWN_ERROR:
throw mg_exception::UnknownException();
case mgp_error::MGP_ERROR_UNABLE_TO_ALLOCATE:
throw mg_exception::AllocationException();
case mgp_error::MGP_ERROR_INSUFFICIENT_BUFFER:
throw mg_exception::InsufficientBufferException();
case mgp_error::MGP_ERROR_OUT_OF_RANGE:
throw mg_exception::OutOfRangeException();
case mgp_error::MGP_ERROR_LOGIC_ERROR:
throw mg_exception::LogicException();
case mgp_error::MGP_ERROR_DELETED_OBJECT:
throw mg_exception::DeletedObjectException();
case mgp_error::MGP_ERROR_INVALID_ARGUMENT:
throw mg_exception::InvalidArgumentException();
case mgp_error::MGP_ERROR_KEY_ALREADY_EXISTS:
throw mg_exception::KeyAlreadyExistsException();
case mgp_error::MGP_ERROR_IMMUTABLE_OBJECT:
throw mg_exception::ImmutableObjectException();
case mgp_error::MGP_ERROR_VALUE_CONVERSION:
throw mg_exception::ValueConversionException();
case mgp_error::MGP_ERROR_SERIALIZATION_ERROR:
throw mg_exception::SerializationException();
default:
return;
}
}
template <typename TResult, typename TFunc, typename... TArgs>
TResult MgInvoke(TFunc func, TArgs... args) {
TResult result{};
auto result_code = func(args..., &result);
MgExceptionHandle(result_code);
return result;
}
template <typename TFunc, typename... TArgs>
inline void MgInvokeVoid(TFunc func, TArgs... args) {
auto result_code = func(args...);
MgExceptionHandle(result_code);
}
} // namespace
// mgp_value
// Make value
inline mgp_value *value_make_null(mgp_memory *memory) { return MgInvoke<mgp_value *>(mgp_value_make_null, memory); }
inline mgp_value *value_make_bool(int val, mgp_memory *memory) {
return MgInvoke<mgp_value *>(mgp_value_make_bool, val, memory);
}
inline mgp_value *value_make_int(int64_t val, mgp_memory *memory) {
return MgInvoke<mgp_value *>(mgp_value_make_int, val, memory);
}
inline mgp_value *value_make_double(double val, mgp_memory *memory) {
return MgInvoke<mgp_value *>(mgp_value_make_double, val, memory);
}
inline mgp_value *value_make_string(const char *val, mgp_memory *memory) {
return MgInvoke<mgp_value *>(mgp_value_make_string, val, memory);
}
inline mgp_value *value_make_list(mgp_list *val) { return MgInvoke<mgp_value *>(mgp_value_make_list, val); }
inline mgp_value *value_make_map(mgp_map *val) { return MgInvoke<mgp_value *>(mgp_value_make_map, val); }
inline mgp_value *value_make_vertex(mgp_vertex *val) { return MgInvoke<mgp_value *>(mgp_value_make_vertex, val); }
inline mgp_value *value_make_edge(mgp_edge *val) { return MgInvoke<mgp_value *>(mgp_value_make_edge, val); }
inline mgp_value *value_make_path(mgp_path *val) { return MgInvoke<mgp_value *>(mgp_value_make_path, val); }
inline mgp_value *value_make_date(mgp_date *val) { return MgInvoke<mgp_value *>(mgp_value_make_date, val); }
inline mgp_value *value_make_local_time(mgp_local_time *val) {
return MgInvoke<mgp_value *>(mgp_value_make_local_time, val);
}
inline mgp_value *value_make_local_date_time(mgp_local_date_time *val) {
return MgInvoke<mgp_value *>(mgp_value_make_local_date_time, val);
}
inline mgp_value *value_make_duration(mgp_duration *val) { return MgInvoke<mgp_value *>(mgp_value_make_duration, val); }
// Copy value
// TODO: implement within MGP API
// with primitive types ({bool, int, double, string}), create a new identical value
// otherwise call mgp_##TYPE_copy and convert tpye
inline mgp_value *value_copy(mgp_value *val, mgp_memory *memory) {
return MgInvoke<mgp_value *>(mgp_value_copy, val, memory);
}
// Destroy value
inline void value_destroy(mgp_value *val) { mgp_value_destroy(val); }
// Get value of type
inline mgp_value_type value_get_type(mgp_value *val) { return MgInvoke<mgp_value_type>(mgp_value_get_type, val); }
inline bool value_get_bool(mgp_value *val) { return MgInvoke<int>(mgp_value_get_bool, val); }
inline int64_t value_get_int(mgp_value *val) { return MgInvoke<int64_t>(mgp_value_get_int, val); }
inline double value_get_double(mgp_value *val) { return MgInvoke<double>(mgp_value_get_double, val); }
inline double value_get_numeric(mgp_value *val) {
if (MgInvoke<int>(mgp_value_is_int, val)) {
return static_cast<double>(value_get_int(val));
}
return value_get_double(val);
}
inline const char *value_get_string(mgp_value *val) { return MgInvoke<const char *>(mgp_value_get_string, val); }
inline mgp_list *value_get_list(mgp_value *val) { return MgInvoke<mgp_list *>(mgp_value_get_list, val); }
inline mgp_map *value_get_map(mgp_value *val) { return MgInvoke<mgp_map *>(mgp_value_get_map, val); }
inline mgp_vertex *value_get_vertex(mgp_value *val) { return MgInvoke<mgp_vertex *>(mgp_value_get_vertex, val); }
inline mgp_edge *value_get_edge(mgp_value *val) { return MgInvoke<mgp_edge *>(mgp_value_get_edge, val); }
inline mgp_path *value_get_path(mgp_value *val) { return MgInvoke<mgp_path *>(mgp_value_get_path, val); }
inline mgp_date *value_get_date(mgp_value *val) { return MgInvoke<mgp_date *>(mgp_value_get_date, val); }
inline mgp_local_time *value_get_local_time(mgp_value *val) {
return MgInvoke<mgp_local_time *>(mgp_value_get_local_time, val);
}
inline mgp_local_date_time *value_get_local_date_time(mgp_value *val) {
return MgInvoke<mgp_local_date_time *>(mgp_value_get_local_date_time, val);
}
inline mgp_duration *value_get_duration(mgp_value *val) {
return MgInvoke<mgp_duration *>(mgp_value_get_duration, val);
}
// Check type of value
inline bool value_is_null(mgp_value *val) { return MgInvoke<int>(mgp_value_is_null, val); }
inline bool value_is_bool(mgp_value *val) { return MgInvoke<int>(mgp_value_is_bool, val); }
inline bool value_is_int(mgp_value *val) { return MgInvoke<int>(mgp_value_is_int, val); }
inline bool value_is_double(mgp_value *val) { return MgInvoke<int>(mgp_value_is_double, val); }
inline bool value_is_numeric(mgp_value *val) { return value_is_int(val) || value_is_double(val); }
inline bool value_is_string(mgp_value *val) { return MgInvoke<int>(mgp_value_is_string, val); }
inline bool value_is_list(mgp_value *val) { return MgInvoke<int>(mgp_value_is_list, val); }
inline bool value_is_map(mgp_value *val) { return MgInvoke<int>(mgp_value_is_map, val); }
inline bool value_is_vertex(mgp_value *val) { return MgInvoke<int>(mgp_value_is_vertex, val); }
inline bool value_is_edge(mgp_value *val) { return MgInvoke<int>(mgp_value_is_edge, val); }
inline bool value_is_path(mgp_value *val) { return MgInvoke<int>(mgp_value_is_path, val); }
inline bool value_is_date(mgp_value *val) { return MgInvoke<int>(mgp_value_is_date, val); }
inline bool value_is_local_time(mgp_value *val) { return MgInvoke<int>(mgp_value_is_local_time, val); }
inline bool value_is_local_date_time(mgp_value *val) { return MgInvoke<int>(mgp_value_is_local_date_time, val); }
inline bool value_is_duration(mgp_value *val) { return MgInvoke<int>(mgp_value_is_duration, val); }
// Get type
inline mgp_type *type_any() { return MgInvoke<mgp_type *>(mgp_type_any); }
inline mgp_type *type_bool() { return MgInvoke<mgp_type *>(mgp_type_bool); }
inline mgp_type *type_string() { return MgInvoke<mgp_type *>(mgp_type_string); }
inline mgp_type *type_int() { return MgInvoke<mgp_type *>(mgp_type_int); }
inline mgp_type *type_float() { return MgInvoke<mgp_type *>(mgp_type_float); }
inline mgp_type *type_number() { return MgInvoke<mgp_type *>(mgp_type_number); }
inline mgp_type *type_list(mgp_type *element_type) { return MgInvoke<mgp_type *>(mgp_type_list, element_type); }
inline mgp_type *type_map() { return MgInvoke<mgp_type *>(mgp_type_map); }
inline mgp_type *type_node() { return MgInvoke<mgp_type *>(mgp_type_node); }
inline mgp_type *type_relationship() { return MgInvoke<mgp_type *>(mgp_type_relationship); }
inline mgp_type *type_path() { return MgInvoke<mgp_type *>(mgp_type_path); }
inline mgp_type *type_date() { return MgInvoke<mgp_type *>(mgp_type_date); }
inline mgp_type *type_local_time() { return MgInvoke<mgp_type *>(mgp_type_local_time); }
inline mgp_type *type_local_date_time() { return MgInvoke<mgp_type *>(mgp_type_local_date_time); }
inline mgp_type *type_duration() { return MgInvoke<mgp_type *>(mgp_type_duration); }
inline mgp_type *type_nullable(mgp_type *type) { return MgInvoke<mgp_type *>(mgp_type_nullable, type); }
inline bool create_label_index(mgp_graph *graph, const char *label) {
return MgInvoke<int>(mgp_create_label_index, graph, label);
}
inline bool drop_label_index(mgp_graph *graph, const char *label) {
return MgInvoke<int>(mgp_drop_label_index, graph, label);
}
inline mgp_list *list_all_label_indices(mgp_graph *graph, mgp_memory *memory) {
return MgInvoke<mgp_list *>(mgp_list_all_label_indices, graph, memory);
}
inline bool create_label_property_index(mgp_graph *graph, const char *label, const char *property) {
return MgInvoke<int>(mgp_create_label_property_index, graph, label, property);
}
inline bool drop_label_property_index(mgp_graph *graph, const char *label, const char *property) {
return MgInvoke<int>(mgp_drop_label_property_index, graph, label, property);
}
inline mgp_list *list_all_label_property_indices(mgp_graph *graph, mgp_memory *memory) {
return MgInvoke<mgp_list *>(mgp_list_all_label_property_indices, graph, memory);
}
inline bool create_existence_constraint(mgp_graph *graph, const char *label, const char *property) {
return MgInvoke<int>(mgp_create_existence_constraint, graph, label, property);
}
inline bool drop_existence_constraint(mgp_graph *graph, const char *label, const char *property) {
return MgInvoke<int>(mgp_drop_existence_constraint, graph, label, property);
}
inline mgp_list *list_all_existence_constraints(mgp_graph *graph, mgp_memory *memory) {
return MgInvoke<mgp_list *>(mgp_list_all_existence_constraints, graph, memory);
}
inline bool create_unique_constraint(mgp_graph *memgraph_graph, const char *label, mgp_value *properties) {
return MgInvoke<int>(mgp_create_unique_constraint, memgraph_graph, label, properties);
}
inline bool drop_unique_constraint(mgp_graph *memgraph_graph, const char *label, mgp_value *properties) {
return MgInvoke<int>(mgp_drop_unique_constraint, memgraph_graph, label, properties);
}
inline mgp_list *list_all_unique_constraints(mgp_graph *graph, mgp_memory *memory) {
return MgInvoke<mgp_list *>(mgp_list_all_unique_constraints, graph, memory);
}
// mgp_graph
inline bool graph_is_transactional(mgp_graph *graph) { return MgInvoke<int>(mgp_graph_is_transactional, graph); }
inline bool graph_is_mutable(mgp_graph *graph) { return MgInvoke<int>(mgp_graph_is_mutable, graph); }
inline mgp_vertex *graph_create_vertex(mgp_graph *graph, mgp_memory *memory) {
return MgInvoke<mgp_vertex *>(mgp_graph_create_vertex, graph, memory);
}
inline void graph_delete_vertex(mgp_graph *graph, mgp_vertex *vertex) {
MgInvokeVoid(mgp_graph_delete_vertex, graph, vertex);
}
inline void graph_detach_delete_vertex(mgp_graph *graph, mgp_vertex *vertex) {
MgInvokeVoid(mgp_graph_detach_delete_vertex, graph, vertex);
}
inline mgp_edge *graph_create_edge(mgp_graph *graph, mgp_vertex *from, mgp_vertex *to, mgp_edge_type type,
mgp_memory *memory) {
return MgInvoke<mgp_edge *>(mgp_graph_create_edge, graph, from, to, type, memory);
}
inline mgp_edge *graph_edge_set_from(struct mgp_graph *graph, struct mgp_edge *e, struct mgp_vertex *new_from,
mgp_memory *memory) {
return MgInvoke<mgp_edge *>(mgp_graph_edge_set_from, graph, e, new_from, memory);
}
inline mgp_edge *graph_edge_set_to(struct mgp_graph *graph, struct mgp_edge *e, struct mgp_vertex *new_to,
mgp_memory *memory) {
return MgInvoke<mgp_edge *>(mgp_graph_edge_set_to, graph, e, new_to, memory);
}
inline mgp_edge *graph_edge_change_type(struct mgp_graph *graph, struct mgp_edge *e, struct mgp_edge_type new_type,
mgp_memory *memory) {
return MgInvoke<mgp_edge *>(mgp_graph_edge_change_type, graph, e, new_type, memory);
}
inline void graph_delete_edge(mgp_graph *graph, mgp_edge *edge) { MgInvokeVoid(mgp_graph_delete_edge, graph, edge); }
inline mgp_vertex *graph_get_vertex_by_id(mgp_graph *g, mgp_vertex_id id, mgp_memory *memory) {
return MgInvoke<mgp_vertex *>(mgp_graph_get_vertex_by_id, g, id, memory);
}
inline bool graph_has_text_index(mgp_graph *graph, const char *index_name) {
return MgInvoke<int>(mgp_graph_has_text_index, graph, index_name);
}
inline mgp_map *graph_search_text_index(mgp_graph *graph, const char *index_name, const char *search_query,
text_search_mode search_mode, mgp_memory *memory) {
return MgInvoke<mgp_map *>(mgp_graph_search_text_index, graph, index_name, search_query, search_mode, memory);
}
inline mgp_map *graph_aggregate_over_text_index(mgp_graph *graph, const char *index_name, const char *search_query,
const char *aggregation_query, mgp_memory *memory) {
return MgInvoke<mgp_map *>(mgp_graph_aggregate_over_text_index, graph, index_name, search_query, aggregation_query,
memory);
}
inline mgp_vertices_iterator *graph_iter_vertices(mgp_graph *g, mgp_memory *memory) {
return MgInvoke<mgp_vertices_iterator *>(mgp_graph_iter_vertices, g, memory);
}
// mgp_vertices_iterator
inline void vertices_iterator_destroy(mgp_vertices_iterator *it) { mgp_vertices_iterator_destroy(it); }
inline mgp_vertex *vertices_iterator_get(mgp_vertices_iterator *it) {
return MgInvoke<mgp_vertex *>(mgp_vertices_iterator_get, it);
}
inline mgp_vertex *vertices_iterator_next(mgp_vertices_iterator *it) {
return MgInvoke<mgp_vertex *>(mgp_vertices_iterator_next, it);
}
// mgp_edges_iterator
inline void edges_iterator_destroy(mgp_edges_iterator *it) { mgp_edges_iterator_destroy(it); }
inline mgp_edge *edges_iterator_get(mgp_edges_iterator *it) { return MgInvoke<mgp_edge *>(mgp_edges_iterator_get, it); }
inline mgp_edge *edges_iterator_next(mgp_edges_iterator *it) {
return MgInvoke<mgp_edge *>(mgp_edges_iterator_next, it);
}
// mgp_properties_iterator
inline void properties_iterator_destroy(mgp_properties_iterator *it) { mgp_properties_iterator_destroy(it); }
inline mgp_property *properties_iterator_get(mgp_properties_iterator *it) {
return MgInvoke<mgp_property *>(mgp_properties_iterator_get, it);
}
inline mgp_property *properties_iterator_next(mgp_properties_iterator *it) {
return MgInvoke<mgp_property *>(mgp_properties_iterator_next, it);
}
// Container {mgp_list, mgp_map} methods
// mgp_list
inline mgp_list *list_make_empty(size_t capacity, mgp_memory *memory) {
return MgInvoke<mgp_list *>(mgp_list_make_empty, capacity, memory);
}
inline mgp_list *list_copy(mgp_list *list, mgp_memory *memory) {
return MgInvoke<mgp_list *>(mgp_list_copy, list, memory);
}
inline void list_destroy(mgp_list *list) { mgp_list_destroy(list); }
inline bool list_contains_deleted(mgp_list *list) { return MgInvoke<int>(mgp_list_contains_deleted, list); }
inline void list_append(mgp_list *list, mgp_value *val) { MgInvokeVoid(mgp_list_append, list, val); }
inline void list_append_extend(mgp_list *list, mgp_value *val) { MgInvokeVoid(mgp_list_append_extend, list, val); }
inline size_t list_size(mgp_list *list) { return MgInvoke<size_t>(mgp_list_size, list); }
inline size_t list_capacity(mgp_list *list) { return MgInvoke<size_t>(mgp_list_capacity, list); }
inline mgp_value *list_at(mgp_list *list, size_t index) { return MgInvoke<mgp_value *>(mgp_list_at, list, index); }
// mgp_map
inline mgp_map *map_make_empty(mgp_memory *memory) { return MgInvoke<mgp_map *>(mgp_map_make_empty, memory); }
inline mgp_map *map_copy(mgp_map *map, mgp_memory *memory) { return MgInvoke<mgp_map *>(mgp_map_copy, map, memory); }
inline void map_destroy(mgp_map *map) { mgp_map_destroy(map); }
inline bool map_contains_deleted(mgp_map *map) { return MgInvoke<int>(mgp_map_contains_deleted, map); }
inline void map_insert(mgp_map *map, const char *key, mgp_value *value) {
MgInvokeVoid(mgp_map_insert, map, key, value);
}
inline void map_update(mgp_map *map, const char *key, mgp_value *value) {
MgInvokeVoid(mgp_map_update, map, key, value);
}
inline void map_erase(mgp_map *map, const char *key) { MgInvokeVoid(mgp_map_erase, map, key); }
inline size_t map_size(mgp_map *map) { return MgInvoke<size_t>(mgp_map_size, map); }
inline mgp_value *map_at(mgp_map *map, const char *key) { return MgInvoke<mgp_value *>(mgp_map_at, map, key); }
inline bool key_exists(mgp_map *map, const char *key) { return MgInvoke<int>(mgp_key_exists, map, key); }
inline const char *map_item_key(mgp_map_item *item) { return MgInvoke<const char *>(mgp_map_item_key, item); }
inline mgp_value *map_item_value(mgp_map_item *item) { return MgInvoke<mgp_value *>(mgp_map_item_value, item); }
inline mgp_map_items_iterator *map_iter_items(mgp_map *map, mgp_memory *memory) {
return MgInvoke<mgp_map_items_iterator *>(mgp_map_iter_items, map, memory);
}
inline void map_items_iterator_destroy(mgp_map_items_iterator *it) { mgp_map_items_iterator_destroy(it); }
inline mgp_map_item *map_items_iterator_get(mgp_map_items_iterator *it) {
return MgInvoke<mgp_map_item *>(mgp_map_items_iterator_get, it);
}
inline mgp_map_item *map_items_iterator_next(mgp_map_items_iterator *it) {
return MgInvoke<mgp_map_item *>(mgp_map_items_iterator_next, it);
}
// mgp_vertex
inline mgp_vertex_id vertex_get_id(mgp_vertex *v) { return MgInvoke<mgp_vertex_id>(mgp_vertex_get_id, v); }
inline size_t vertex_get_in_degree(mgp_vertex *v) { return MgInvoke<size_t>(mgp_vertex_get_in_degree, v); }
inline size_t vertex_get_out_degree(mgp_vertex *v) { return MgInvoke<size_t>(mgp_vertex_get_out_degree, v); }
inline mgp_vertex *vertex_copy(mgp_vertex *v, mgp_memory *memory) {
return MgInvoke<mgp_vertex *>(mgp_vertex_copy, v, memory);
}
inline void vertex_destroy(mgp_vertex *v) { mgp_vertex_destroy(v); }
inline bool vertex_is_deleted(mgp_vertex *v) { return MgInvoke<int>(mgp_vertex_is_deleted, v); }
inline bool vertex_equal(mgp_vertex *v1, mgp_vertex *v2) { return MgInvoke<int>(mgp_vertex_equal, v1, v2); }
inline size_t vertex_labels_count(mgp_vertex *v) { return MgInvoke<size_t>(mgp_vertex_labels_count, v); }
inline mgp_label vertex_label_at(mgp_vertex *v, size_t index) {
return MgInvoke<mgp_label>(mgp_vertex_label_at, v, index);
}
inline bool vertex_has_label(mgp_vertex *v, mgp_label label) { return MgInvoke<int>(mgp_vertex_has_label, v, label); }
inline bool vertex_has_label_named(mgp_vertex *v, const char *label_name) {
return MgInvoke<int>(mgp_vertex_has_label_named, v, label_name);
}
inline void vertex_add_label(mgp_vertex *vertex, mgp_label label) { MgInvokeVoid(mgp_vertex_add_label, vertex, label); }
inline void vertex_remove_label(mgp_vertex *vertex, mgp_label label) {
MgInvokeVoid(mgp_vertex_remove_label, vertex, label);
}
inline mgp_value *vertex_get_property(mgp_vertex *v, const char *property_name, mgp_memory *memory) {
return MgInvoke<mgp_value *>(mgp_vertex_get_property, v, property_name, memory);
}
inline void vertex_set_property(mgp_vertex *v, const char *property_name, mgp_value *property_value) {
MgInvokeVoid(mgp_vertex_set_property, v, property_name, property_value);
}
inline void vertex_set_properties(mgp_vertex *v, struct mgp_map *properties) {
MgInvokeVoid(mgp_vertex_set_properties, v, properties);
}
inline mgp_properties_iterator *vertex_iter_properties(mgp_vertex *v, mgp_memory *memory) {
return MgInvoke<mgp_properties_iterator *>(mgp_vertex_iter_properties, v, memory);
}
inline mgp_edges_iterator *vertex_iter_in_edges(mgp_vertex *v, mgp_memory *memory) {
return MgInvoke<mgp_edges_iterator *>(mgp_vertex_iter_in_edges, v, memory);
}
inline mgp_edges_iterator *vertex_iter_out_edges(mgp_vertex *v, mgp_memory *memory) {
return MgInvoke<mgp_edges_iterator *>(mgp_vertex_iter_out_edges, v, memory);
}
// mgp_edge
inline mgp_edge_id edge_get_id(mgp_edge *e) { return MgInvoke<mgp_edge_id>(mgp_edge_get_id, e); }
inline mgp_edge *edge_copy(mgp_edge *e, mgp_memory *memory) { return MgInvoke<mgp_edge *>(mgp_edge_copy, e, memory); }
inline void edge_destroy(mgp_edge *e) { mgp_edge_destroy(e); }
inline bool edge_is_deleted(mgp_edge *e) { return MgInvoke<int>(mgp_edge_is_deleted, e); }
inline bool edge_equal(mgp_edge *e1, mgp_edge *e2) { return MgInvoke<int>(mgp_edge_equal, e1, e2); }
inline mgp_edge_type edge_get_type(mgp_edge *e) { return MgInvoke<mgp_edge_type>(mgp_edge_get_type, e); }
inline mgp_vertex *edge_get_from(mgp_edge *e) { return MgInvoke<mgp_vertex *>(mgp_edge_get_from, e); }
inline mgp_vertex *edge_get_to(mgp_edge *e) { return MgInvoke<mgp_vertex *>(mgp_edge_get_to, e); }
inline mgp_value *edge_get_property(mgp_edge *e, const char *property_name, mgp_memory *memory) {
return MgInvoke<mgp_value *>(mgp_edge_get_property, e, property_name, memory);
}
inline void edge_set_property(mgp_edge *e, const char *property_name, mgp_value *property_value) {
MgInvokeVoid(mgp_edge_set_property, e, property_name, property_value);
}
inline void edge_set_properties(mgp_edge *e, struct mgp_map *properties) {
MgInvokeVoid(mgp_edge_set_properties, e, properties);
}
inline mgp_properties_iterator *edge_iter_properties(mgp_edge *e, mgp_memory *memory) {
return MgInvoke<mgp_properties_iterator *>(mgp_edge_iter_properties, e, memory);
}
// mgp_path
inline mgp_path *path_make_with_start(mgp_vertex *vertex, mgp_memory *memory) {
return MgInvoke<mgp_path *>(mgp_path_make_with_start, vertex, memory);
}
inline mgp_path *path_copy(mgp_path *path, mgp_memory *memory) {
return MgInvoke<mgp_path *>(mgp_path_copy, path, memory);
}
inline void path_destroy(mgp_path *path) { mgp_path_destroy(path); }
inline bool path_contains_deleted(mgp_path *path) { return MgInvoke<int>(mgp_path_contains_deleted, path); }
inline void path_expand(mgp_path *path, mgp_edge *edge) { MgInvokeVoid(mgp_path_expand, path, edge); }
inline void path_pop(mgp_path *path) { MgInvokeVoid(mgp_path_pop, path); }
inline size_t path_size(mgp_path *path) { return MgInvoke<size_t>(mgp_path_size, path); }
inline mgp_vertex *path_vertex_at(mgp_path *path, size_t index) {
return MgInvoke<mgp_vertex *>(mgp_path_vertex_at, path, index);
}
inline mgp_edge *path_edge_at(mgp_path *path, size_t index) {
return MgInvoke<mgp_edge *>(mgp_path_edge_at, path, index);
}
inline bool path_equal(mgp_path *p1, mgp_path *p2) { return MgInvoke<int>(mgp_path_equal, p1, p2); }
// Temporal type {mgp_date, mgp_local_time, mgp_local_date_time, mgp_duration} methods
// mgp_date
inline mgp_date *date_from_string(const char *string, mgp_memory *memory) {
return MgInvoke<mgp_date *>(mgp_date_from_string, string, memory);
}
inline mgp_date *date_from_parameters(mgp_date_parameters *parameters, mgp_memory *memory) {
return MgInvoke<mgp_date *>(mgp_date_from_parameters, parameters, memory);
}
inline mgp_date *date_copy(mgp_date *date, mgp_memory *memory) {
return MgInvoke<mgp_date *>(mgp_date_copy, date, memory);
}
inline void date_destroy(mgp_date *date) { mgp_date_destroy(date); }
inline bool date_equal(mgp_date *first, mgp_date *second) { return MgInvoke<int>(mgp_date_equal, first, second); }
inline int date_get_year(mgp_date *date) { return MgInvoke<int>(mgp_date_get_year, date); }
inline int date_get_month(mgp_date *date) { return MgInvoke<int>(mgp_date_get_month, date); }
inline int date_get_day(mgp_date *date) { return MgInvoke<int>(mgp_date_get_day, date); }
inline int64_t date_timestamp(mgp_date *date) { return MgInvoke<int64_t>(mgp_date_timestamp, date); }
inline mgp_date *date_now(mgp_memory *memory) { return MgInvoke<mgp_date *>(mgp_date_now, memory); }
inline mgp_date *date_add_duration(mgp_date *date, mgp_duration *dur, mgp_memory *memory) {
return MgInvoke<mgp_date *>(mgp_date_add_duration, date, dur, memory);
}
inline mgp_date *date_sub_duration(mgp_date *date, mgp_duration *dur, mgp_memory *memory) {
return MgInvoke<mgp_date *>(mgp_date_sub_duration, date, dur, memory);
}
inline mgp_duration *date_diff(mgp_date *first, mgp_date *second, mgp_memory *memory) {
return MgInvoke<mgp_duration *>(mgp_date_diff, first, second, memory);
}
// mgp_local_time
inline mgp_local_time *local_time_from_string(const char *string, mgp_memory *memory) {
return MgInvoke<mgp_local_time *>(mgp_local_time_from_string, string, memory);
}
inline mgp_local_time *local_time_from_parameters(mgp_local_time_parameters *parameters, mgp_memory *memory) {
return MgInvoke<mgp_local_time *>(mgp_local_time_from_parameters, parameters, memory);
}
inline mgp_local_time *local_time_copy(mgp_local_time *local_time, mgp_memory *memory) {
return MgInvoke<mgp_local_time *>(mgp_local_time_copy, local_time, memory);
}
inline void local_time_destroy(mgp_local_time *local_time) { mgp_local_time_destroy(local_time); }
inline bool local_time_equal(mgp_local_time *first, mgp_local_time *second) {
return MgInvoke<int>(mgp_local_time_equal, first, second);
}
inline int local_time_get_hour(mgp_local_time *local_time) {
return MgInvoke<int>(mgp_local_time_get_hour, local_time);
}
inline int local_time_get_minute(mgp_local_time *local_time) {
return MgInvoke<int>(mgp_local_time_get_minute, local_time);
}
inline int local_time_get_second(mgp_local_time *local_time) {
return MgInvoke<int>(mgp_local_time_get_second, local_time);
}
inline int local_time_get_millisecond(mgp_local_time *local_time) {
return MgInvoke<int>(mgp_local_time_get_millisecond, local_time);
}
inline int local_time_get_microsecond(mgp_local_time *local_time) {
return MgInvoke<int>(mgp_local_time_get_microsecond, local_time);
}
inline int64_t local_time_timestamp(mgp_local_time *local_time) {
return MgInvoke<int64_t>(mgp_local_time_timestamp, local_time);
}
inline mgp_local_time *local_time_now(mgp_memory *memory) {
return MgInvoke<mgp_local_time *>(mgp_local_time_now, memory);
}
inline mgp_local_time *local_time_add_duration(mgp_local_time *local_time, mgp_duration *dur, mgp_memory *memory) {
return MgInvoke<mgp_local_time *>(mgp_local_time_add_duration, local_time, dur, memory);
}
inline mgp_local_time *local_time_sub_duration(mgp_local_time *local_time, mgp_duration *dur, mgp_memory *memory) {
return MgInvoke<mgp_local_time *>(mgp_local_time_sub_duration, local_time, dur, memory);
}
inline mgp_duration *local_time_diff(mgp_local_time *first, mgp_local_time *second, mgp_memory *memory) {
return MgInvoke<mgp_duration *>(mgp_local_time_diff, first, second, memory);
}
// mgp_local_date_time
inline mgp_local_date_time *local_date_time_from_string(const char *string, mgp_memory *memory) {
return MgInvoke<mgp_local_date_time *>(mgp_local_date_time_from_string, string, memory);
}
inline mgp_local_date_time *local_date_time_from_parameters(mgp_local_date_time_parameters *parameters,
mgp_memory *memory) {
return MgInvoke<mgp_local_date_time *>(mgp_local_date_time_from_parameters, parameters, memory);
}
inline mgp_local_date_time *local_date_time_copy(mgp_local_date_time *local_date_time, mgp_memory *memory) {
return MgInvoke<mgp_local_date_time *>(mgp_local_date_time_copy, local_date_time, memory);
}
inline void local_date_time_destroy(mgp_local_date_time *local_date_time) {
mgp_local_date_time_destroy(local_date_time);
}
inline bool local_date_time_equal(mgp_local_date_time *first, mgp_local_date_time *second) {
return MgInvoke<int>(mgp_local_date_time_equal, first, second);
}
inline int local_date_time_get_year(mgp_local_date_time *local_date_time) {
return MgInvoke<int>(mgp_local_date_time_get_year, local_date_time);
}
inline int local_date_time_get_month(mgp_local_date_time *local_date_time) {
return MgInvoke<int>(mgp_local_date_time_get_month, local_date_time);
}
inline int local_date_time_get_day(mgp_local_date_time *local_date_time) {
return MgInvoke<int>(mgp_local_date_time_get_day, local_date_time);
}
inline int local_date_time_get_hour(mgp_local_date_time *local_date_time) {
return MgInvoke<int>(mgp_local_date_time_get_hour, local_date_time);
}
inline int local_date_time_get_minute(mgp_local_date_time *local_date_time) {
return MgInvoke<int>(mgp_local_date_time_get_minute, local_date_time);
}
inline int local_date_time_get_second(mgp_local_date_time *local_date_time) {
return MgInvoke<int>(mgp_local_date_time_get_second, local_date_time);
}
inline int local_date_time_get_millisecond(mgp_local_date_time *local_date_time) {
return MgInvoke<int>(mgp_local_date_time_get_millisecond, local_date_time);
}
inline int local_date_time_get_microsecond(mgp_local_date_time *local_date_time) {
return MgInvoke<int>(mgp_local_date_time_get_microsecond, local_date_time);
}
inline int64_t local_date_time_timestamp(mgp_local_date_time *local_date_time) {
return MgInvoke<int64_t>(mgp_local_date_time_timestamp, local_date_time);
}
inline mgp_local_date_time *local_date_time_now(mgp_memory *memory) {
return MgInvoke<mgp_local_date_time *>(mgp_local_date_time_now, memory);
}
inline mgp_local_date_time *local_date_time_add_duration(mgp_local_date_time *local_date_time, mgp_duration *dur,
mgp_memory *memory) {
return MgInvoke<mgp_local_date_time *>(mgp_local_date_time_add_duration, local_date_time, dur, memory);
}
inline mgp_local_date_time *local_date_time_sub_duration(mgp_local_date_time *local_date_time, mgp_duration *dur,
mgp_memory *memory) {
return MgInvoke<mgp_local_date_time *>(mgp_local_date_time_sub_duration, local_date_time, dur, memory);
}
inline mgp_duration *local_date_time_diff(mgp_local_date_time *first, mgp_local_date_time *second, mgp_memory *memory) {
return MgInvoke<mgp_duration *>(mgp_local_date_time_diff, first, second, memory);
}
// mgp_duration
inline mgp_duration *duration_from_string(const char *string, mgp_memory *memory) {
return MgInvoke<mgp_duration *>(mgp_duration_from_string, string, memory);
}
inline mgp_duration *duration_from_parameters(mgp_duration_parameters *parameters, mgp_memory *memory) {
return MgInvoke<mgp_duration *>(mgp_duration_from_parameters, parameters, memory);
}
inline mgp_duration *duration_from_microseconds(int64_t microseconds, mgp_memory *memory) {
return MgInvoke<mgp_duration *>(mgp_duration_from_microseconds, microseconds, memory);
}
inline mgp_duration *duration_copy(mgp_duration *duration, mgp_memory *memory) {
return MgInvoke<mgp_duration *>(mgp_duration_copy, duration, memory);
}
inline void duration_destroy(mgp_duration *duration) { mgp_duration_destroy(duration); }
inline int64_t duration_get_microseconds(mgp_duration *duration) {
return MgInvoke<int64_t>(mgp_duration_get_microseconds, duration);
}
inline bool duration_equal(mgp_duration *first, mgp_duration *second) {
return MgInvoke<int>(mgp_duration_equal, first, second);
}
inline mgp_duration *duration_neg(mgp_duration *duration, mgp_memory *memory) {
return MgInvoke<mgp_duration *>(mgp_duration_neg, duration, memory);
}
inline mgp_duration *duration_add(mgp_duration *first, mgp_duration *second, mgp_memory *memory) {
return MgInvoke<mgp_duration *>(mgp_duration_add, first, second, memory);
}
inline mgp_duration *duration_sub(mgp_duration *first, mgp_duration *second, mgp_memory *memory) {
return MgInvoke<mgp_duration *>(mgp_duration_sub, first, second, memory);
}
// Procedure
inline mgp_proc *module_add_read_procedure(mgp_module *module, const char *name, mgp_proc_cb cb) {
return MgInvoke<mgp_proc *>(mgp_module_add_read_procedure, module, name, cb);
}
inline mgp_proc *module_add_write_procedure(mgp_module *module, const char *name, mgp_proc_cb cb) {
return MgInvoke<mgp_proc *>(mgp_module_add_write_procedure, module, name, cb);
}
inline mgp_proc *module_add_batch_read_procedure(mgp_module *module, const char *name, mgp_proc_cb cb,
mgp_proc_initializer initializer, mgp_proc_cleanup cleanup) {
return MgInvoke<mgp_proc *>(mgp_module_add_batch_read_procedure, module, name, cb, initializer, cleanup);
}
inline mgp_proc *module_add_batch_write_procedure(mgp_module *module, const char *name, mgp_proc_cb cb,
mgp_proc_initializer initializer, mgp_proc_cleanup cleanup) {
return MgInvoke<mgp_proc *>(mgp_module_add_batch_write_procedure, module, name, cb, initializer, cleanup);
}
inline void proc_add_arg(mgp_proc *proc, const char *name, mgp_type *type) {
MgInvokeVoid(mgp_proc_add_arg, proc, name, type);
}
inline void proc_add_opt_arg(mgp_proc *proc, const char *name, mgp_type *type, mgp_value *default_value) {
MgInvokeVoid(mgp_proc_add_opt_arg, proc, name, type, default_value);
}
inline void proc_add_result(mgp_proc *proc, const char *name, mgp_type *type) {
MgInvokeVoid(mgp_proc_add_result, proc, name, type);
}
inline void proc_add_deprecated_result(mgp_proc *proc, const char *name, mgp_type *type) {
MgInvokeVoid(mgp_proc_add_deprecated_result, proc, name, type);
}
inline int must_abort(mgp_graph *graph) { return mgp_must_abort(graph); }
// mgp_result
inline void result_set_error_msg(mgp_result *res, const char *error_msg) {
MgInvokeVoid(mgp_result_set_error_msg, res, error_msg);
}
inline mgp_result_record *result_new_record(mgp_result *res) {
return MgInvoke<mgp_result_record *>(mgp_result_new_record, res);
}
inline void result_record_insert(mgp_result_record *record, const char *field_name, mgp_value *val) {
MgInvokeVoid(mgp_result_record_insert, record, field_name, val);
}
// Function
inline mgp_func *module_add_function(mgp_module *module, const char *name, mgp_func_cb cb) {
return MgInvoke<mgp_func *>(mgp_module_add_function, module, name, cb);
}
inline void func_add_arg(mgp_func *func, const char *name, mgp_type *type) {
MgInvokeVoid(mgp_func_add_arg, func, name, type);
}
inline void func_add_opt_arg(mgp_func *func, const char *name, mgp_type *type, mgp_value *default_value) {
MgInvokeVoid(mgp_func_add_opt_arg, func, name, type, default_value);
}
inline void func_result_set_error_msg(mgp_func_result *res, const char *msg, mgp_memory *memory) {
MgInvokeVoid(mgp_func_result_set_error_msg, res, msg, memory);
}
inline void func_result_set_value(mgp_func_result *res, mgp_value *value, mgp_memory *memory) {
MgInvokeVoid(mgp_func_result_set_value, res, value, memory);
}
} // namespace mgp

View File

@ -1,350 +0,0 @@
import typing
from enum import Enum
import networkx as nx
NX_LABEL_ATTR = "labels"
NX_TYPE_ATTR = "type"
SOURCE_TYPE_KAFKA = "SOURCE_TYPE_KAFKA"
SOURCE_TYPE_PULSAR = "SOURCE_TYPE_PULSAR"
"""
This module provides helpers for the mock Python API, much like _mgp.py does for mgp.py.
"""
class InvalidArgumentError(Exception):
"""
Signals that some of the arguments have invalid values.
"""
pass
class ImmutableObjectError(Exception):
pass
class LogicErrorError(Exception):
pass
class DeletedObjectError(Exception):
pass
class EdgeConstants(Enum):
I_START = 0
I_END = 1
I_KEY = 2
class Graph:
"""Wrapper around a NetworkX MultiDiGraph instance."""
__slots__ = ("nx", "_highest_vertex_id", "_highest_edge_id", "_valid")
def __init__(self, graph: nx.MultiDiGraph) -> None:
if not isinstance(graph, nx.MultiDiGraph):
raise TypeError(f"Expected 'networkx.classes.multidigraph.MultiDiGraph', got '{type(graph)}'")
self.nx = graph
self._highest_vertex_id = None
self._highest_edge_id = None
self._valid = True
@property
def vertex_ids(self):
return self.nx.nodes
def vertex_is_isolate(self, vertex_id: int) -> bool:
return nx.is_isolate(self.nx, vertex_id)
@property
def vertices(self):
return (Vertex(node_id, self) for node_id in self.nx.nodes)
def has_node(self, node_id):
return self.nx.has_node(node_id)
@property
def edges(self):
return self.nx.edges
def is_valid(self) -> bool:
return self._valid
def get_vertex_by_id(self, vertex_id: int) -> "Vertex":
return Vertex(vertex_id, self)
def invalidate(self):
self._valid = False
def is_immutable(self) -> bool:
return nx.is_frozen(self.nx)
def make_immutable(self):
self.nx = nx.freeze(self.nx)
def _new_vertex_id(self):
if self._highest_vertex_id is None:
self._highest_vertex_id = max(vertex_id for vertex_id in self.nx.nodes)
return self._highest_vertex_id + 1
def _new_edge_id(self):
if self._highest_edge_id is None:
self._highest_edge_id = max(edge[EdgeConstants.I_KEY.value] for edge in self.nx.edges(keys=True))
return self._highest_edge_id + 1
def create_vertex(self) -> "Vertex":
vertex_id = self._new_vertex_id()
self.nx.add_node(vertex_id)
self._highest_vertex_id = vertex_id
return Vertex(vertex_id, self)
def create_edge(self, from_vertex: "Vertex", to_vertex: "Vertex", edge_type: str) -> "Edge":
if from_vertex.is_deleted() or to_vertex.is_deleted():
raise DeletedObjectError("Accessing deleted object.")
edge_id = self._new_edge_id()
from_id = from_vertex.id
to_id = to_vertex.id
self.nx.add_edge(from_id, to_id, key=edge_id, type=edge_type)
self._highest_edge_id = edge_id
return Edge((from_id, to_id, edge_id), self)
def delete_vertex(self, vertex_id: int):
self.nx.remove_node(vertex_id)
def delete_edge(self, from_vertex_id: int, to_vertex_id: int, edge_id: int):
self.nx.remove_edge(from_vertex_id, to_vertex_id, edge_id)
@property
def highest_vertex_id(self) -> int:
if self._highest_vertex_id is None:
self._highest_vertex_id = max(vertex_id for vertex_id in self.nx.nodes) + 1
return self._highest_vertex_id
@property
def highest_edge_id(self) -> int:
if self._highest_edge_id is None:
self._highest_edge_id = max(edge[EdgeConstants.I_KEY.value] for edge in self.nx.edges(keys=True))
return self._highest_edge_id + 1
class Vertex:
"""Represents a graph vertex."""
__slots__ = ("_id", "_graph")
def __init__(self, id: int, graph: Graph) -> None:
if not isinstance(id, int):
raise TypeError(f"Expected 'int', got '{type(id)}'")
if not isinstance(graph, Graph):
raise TypeError(f"Expected '_mgp_mock.Graph', got '{type(graph)}'")
if not graph.nx.has_node(id):
raise IndexError(f"Unable to find vertex with ID {id}.")
self._id = id
self._graph = graph
def is_valid(self) -> bool:
return self._graph.is_valid()
def is_deleted(self) -> bool:
return not self._graph.nx.has_node(self._id) and self._id <= self._graph.highest_vertex_id
@property
def underlying_graph(self) -> Graph:
return self._graph
def underlying_graph_is_mutable(self) -> bool:
return not nx.is_frozen(self._graph.nx)
@property
def labels(self) -> typing.List[int]:
return self._graph.nx.nodes[self._id][NX_LABEL_ATTR].split(":")
def add_label(self, label: str) -> None:
if nx.is_frozen(self._graph.nx):
raise ImmutableObjectError("Cannot modify immutable object.")
self._graph.nx.nodes[self._id][NX_LABEL_ATTR] += f":{label}"
def remove_label(self, label: str) -> None:
if nx.is_frozen(self._graph.nx):
raise ImmutableObjectError("Cannot modify immutable object.")
labels = self._graph.nx.nodes[self._id][NX_LABEL_ATTR]
if labels.startswith(f"{label}:"):
labels = "\n" + labels # pseudo-string starter
self._graph.nx.nodes[self._id][NX_LABEL_ATTR] = labels.replace(f"\n{label}:", "")
elif labels.endswith(f":{label}"):
labels += "\n" # pseudo-string terminator
self._graph.nx.nodes[self._id][NX_LABEL_ATTR] = labels.replace(f":{label}\n", "")
else:
self._graph.nx.nodes[self._id][NX_LABEL_ATTR] = labels.replace(f":{label}:", ":")
@property
def id(self) -> int:
return self._id
@property
def properties(self):
return (
(key, value)
for key, value in self._graph.nx.nodes[self._id].items()
if key not in (NX_LABEL_ATTR, NX_TYPE_ATTR)
)
def get_property(self, property_name: str):
return self._graph.nx.nodes[self._id][property_name]
def set_property(self, property_name: str, value: object):
self._graph.nx.nodes[self._id][property_name] = value
@property
def in_edges(self) -> typing.Iterable["Edge"]:
return [Edge(edge, self._graph) for edge in self._graph.nx.in_edges(self._id, keys=True)]
@property
def out_edges(self) -> typing.Iterable["Edge"]:
return [Edge(edge, self._graph) for edge in self._graph.nx.out_edges(self._id, keys=True)]
class Edge:
"""Represents a graph edge."""
__slots__ = ("_edge", "_graph")
def __init__(self, edge: typing.Tuple[int, int, int], graph: Graph) -> None:
if not isinstance(edge, typing.Tuple):
raise TypeError(f"Expected 'Tuple', got '{type(edge)}'")
if not isinstance(graph, Graph):
raise TypeError(f"Expected '_mgp_mock.Graph', got '{type(graph)}'")
if not graph.nx.has_edge(*edge):
raise IndexError(f"Unable to find edge with ID {edge[EdgeConstants.I_KEY.value]}.")
self._edge = edge
self._graph = graph
def is_valid(self) -> bool:
return self._graph.is_valid()
def is_deleted(self) -> bool:
return (
not self._graph.nx.has_edge(*self._edge)
and self._edge[EdgeConstants.I_KEY.value] <= self._graph.highest_edge_id
)
def underlying_graph_is_mutable(self) -> bool:
return not nx.is_frozen(self._graph.nx)
@property
def id(self) -> int:
return self._edge[EdgeConstants.I_KEY.value]
@property
def edge(self) -> typing.Tuple[int, int, int]:
return self._edge
@property
def start_id(self) -> int:
return self._edge[EdgeConstants.I_START.value]
@property
def end_id(self) -> int:
return self._edge[EdgeConstants.I_END.value]
def get_type_name(self):
return self._graph.nx.get_edge_data(*self._edge)[NX_TYPE_ATTR]
def from_vertex(self) -> Vertex:
return Vertex(self.start_id, self._graph)
def to_vertex(self) -> Vertex:
return Vertex(self.end_id, self._graph)
@property
def properties(self):
return (
(key, value)
for key, value in self._graph.nx.edges[self._edge].items()
if key not in (NX_LABEL_ATTR, NX_TYPE_ATTR)
)
def get_property(self, property_name: str):
return self._graph.nx.edges[self._edge][property_name]
def set_property(self, property_name: str, value: object):
self._graph.nx.edges[self._edge][property_name] = value
class Path:
"""Represents a path comprised of `Vertex` and `Edge` instances."""
__slots__ = ("_vertices", "_edges", "_graph")
__create_key = object()
def __init__(self, create_key, vertex_id: int, graph: Graph) -> None:
assert create_key == Path.__create_key, "Path objects must be created using Path.make_with_start"
self._vertices = [vertex_id]
self._edges = []
self._graph = graph
@classmethod
def make_with_start(cls, vertex: Vertex) -> "Path":
if not isinstance(vertex, Vertex):
raise TypeError(f"Expected 'Vertex', got '{type(vertex)}'")
if not isinstance(vertex.underlying_graph, Graph):
raise TypeError(f"Expected '_mgp_mock.Graph', got '{type(vertex.underlying_graph)}'")
if not vertex.underlying_graph.nx.has_node(vertex._id):
raise IndexError(f"Unable to find vertex with ID {vertex._id}.")
return Path(cls.__create_key, vertex._id, vertex.underlying_graph)
def is_valid(self) -> bool:
return self._graph.is_valid()
def underlying_graph_is_mutable(self) -> bool:
return not nx.is_frozen(self._graph.nx)
def expand(self, edge: Edge):
if edge.start_id != self._vertices[-1]:
raise LogicErrorError("Logic error.")
self._vertices.append(edge.end_id)
self._edges.append((edge.start_id, edge.end_id, edge.id))
def pop(self):
if not self._edges:
raise IndexError("Path contains no relationships.")
self._vertices.pop()
self._edges.pop()
def vertex_at(self, index: int) -> Vertex:
return Vertex(self._vertices[index], self._graph)
def edge_at(self, index: int) -> Edge:
return Edge(self._edges[index], self._graph)
def size(self) -> int:
return len(self._edges)

View File

@ -1,108 +0,0 @@
// Copyright 2024 Memgraph Ltd.
//
// Use of this software is governed by the Business Source License
// included in the file licenses/BSL.txt; by using this file, you agree to be bound by the terms of the Business Source
// License, and you may not use this file except in compliance with the Business Source License.
//
// As of the Change Date specified in that file, in accordance with
// the Business Source License, use of this software will be governed
// by the Apache License, Version 2.0, included in the file
// licenses/APL.txt.
#pragma once
#include <exception>
#include <iostream>
#include <sstream>
#include <string>
namespace mg_exception {
// Instead of writing this utility function, we could have used `fmt::format`, but that's not an ideal option here
// because that would introduce dependency that would be propagated to the client code (if exceptions here would be
// used). Since the functionality here is not complex + the code is not on a critical path, we opted for a pure C++
// solution.
template <typename FirstArg, typename... Args>
std::string StringSerialize(FirstArg &&firstArg, Args &&...args) {
std::stringstream stream;
stream << std::forward<FirstArg>(firstArg);
((stream << " " << args), ...);
return stream.str();
}
struct UnknownException : public std::exception {
const char *what() const noexcept override { return "Unknown exception!"; }
};
struct NotEnoughMemoryException : public std::exception {
NotEnoughMemoryException()
: message_{
StringSerialize("Not enough memory! For more details please visit", "https://memgr.ph/memory-control")} {}
const char *what() const noexcept override { return message_.c_str(); }
private:
std::string message_;
};
struct AllocationException : public std::exception {
AllocationException()
: message_{StringSerialize("Could not allocate memory. For more details please visit",
"https://memgr.ph/memory-control")} {}
const char *what() const noexcept override { return message_.c_str(); }
private:
std::string message_;
};
struct InsufficientBufferException : public std::exception {
const char *what() const noexcept override { return "Buffer is not sufficient to process procedure!"; }
};
struct OutOfRangeException : public std::exception {
const char *what() const noexcept override { return "Index out of range!"; }
};
struct LogicException : public std::exception {
const char *what() const noexcept override { return "Logic exception, check the procedure signature!"; }
};
struct DeletedObjectException : public std::exception {
const char *what() const noexcept override { return "Object is deleted!"; }
};
struct InvalidArgumentException : public std::exception {
const char *what() const noexcept override { return "Invalid argument!"; }
};
struct InvalidIDException : public std::exception {
InvalidIDException() : message_{"Invalid ID!"} {}
explicit InvalidIDException(std::uint64_t identifier) : message_{StringSerialize("Invalid ID =", identifier)} {}
const char *what() const noexcept override { return message_.c_str(); }
private:
std::string message_;
};
struct KeyAlreadyExistsException : public std::exception {
KeyAlreadyExistsException() : message_{"Key you are trying to set already exists!"} {}
explicit KeyAlreadyExistsException(const std::string &key)
: message_{StringSerialize("Key you are trying to set already exists! KEY = ", key)} {}
const char *what() const noexcept override { return message_.c_str(); }
private:
std::string message_;
};
struct ImmutableObjectException : public std::exception {
const char *what() const noexcept override { return "Object you are trying to change is immutable!"; }
};
struct ValueConversionException : public std::exception {
const char *what() const noexcept override { return "Error in value conversion!"; }
};
struct SerializationException : public std::exception {
const char *what() const noexcept override { return "Error in serialization!"; }
};
} // namespace mg_exception

View File

@ -1,4 +1,4 @@
// Copyright 2024 Memgraph Ltd.
// Copyright 2022 Memgraph Ltd.
//
// Use of this software is governed by the Business Source License
// included in the file licenses/BSL.txt; by using this file, you agree to be bound by the terms of the Business Source
@ -37,19 +37,12 @@ extern "C" {
/// All functions return an error code that can be used to figure out whether the API call was successful or not. In
/// case of failure, the specific error code can be used to identify the reason of the failure.
MGP_ENUM_CLASS MGP_NODISCARD mgp_error{
MGP_ERROR_NO_ERROR,
MGP_ERROR_UNKNOWN_ERROR,
MGP_ERROR_UNABLE_TO_ALLOCATE,
MGP_ERROR_INSUFFICIENT_BUFFER,
MGP_ERROR_OUT_OF_RANGE,
MGP_ERROR_LOGIC_ERROR,
MGP_ERROR_DELETED_OBJECT,
MGP_ERROR_INVALID_ARGUMENT,
MGP_ERROR_KEY_ALREADY_EXISTS,
MGP_ERROR_IMMUTABLE_OBJECT,
MGP_ERROR_VALUE_CONVERSION,
MGP_ERROR_SERIALIZATION_ERROR,
MGP_ERROR_AUTHORIZATION_ERROR,
MGP_ERROR_NO_ERROR, MGP_ERROR_UNKNOWN_ERROR,
MGP_ERROR_UNABLE_TO_ALLOCATE, MGP_ERROR_INSUFFICIENT_BUFFER,
MGP_ERROR_OUT_OF_RANGE, MGP_ERROR_LOGIC_ERROR,
MGP_ERROR_DELETED_OBJECT, MGP_ERROR_INVALID_ARGUMENT,
MGP_ERROR_KEY_ALREADY_EXISTS, MGP_ERROR_IMMUTABLE_OBJECT,
MGP_ERROR_VALUE_CONVERSION, MGP_ERROR_SERIALIZATION_ERROR,
};
///@}
@ -111,22 +104,6 @@ enum mgp_error mgp_global_aligned_alloc(size_t size_in_bytes, size_t alignment,
/// The behavior is undefined if `ptr` is not a value returned from a prior
/// mgp_global_alloc() or mgp_global_aligned_alloc().
void mgp_global_free(void *p);
/// State of the graph database.
struct mgp_graph;
/// Allocations are tracked only for master thread. If new threads are spawned
/// inside procedure, by calling following function
/// you can start tracking allocations for current thread too. This
/// is important if you need query memory limit to work
/// for given procedure or per procedure memory limit.
enum mgp_error mgp_track_current_thread_allocations(struct mgp_graph *graph);
/// Once allocations are tracked for current thread, you need to stop tracking allocations
/// for given thread, before thread finishes with execution, or is detached.
/// Otherwise it might result in slowdown of system due to unnecessary tracking of
/// allocations.
enum mgp_error mgp_untrack_current_thread_allocations(struct mgp_graph *graph);
///@}
/// @name Operations on mgp_value
@ -187,8 +164,6 @@ enum mgp_value_type {
MGP_VALUE_TYPE_DURATION,
};
enum mgp_error mgp_value_copy(struct mgp_value *val, struct mgp_memory *memory, struct mgp_value **result);
/// Free the memory used by the given mgp_value instance.
void mgp_value_destroy(struct mgp_value *val);
@ -424,14 +399,9 @@ enum mgp_error mgp_value_get_duration(struct mgp_value *val, struct mgp_duration
/// mgp_error::MGP_ERROR_UNABLE_TO_ALLOCATE is returned if unable to allocate a mgp_list.
enum mgp_error mgp_list_make_empty(size_t capacity, struct mgp_memory *memory, struct mgp_list **result);
enum mgp_error mgp_list_copy(struct mgp_list *list, struct mgp_memory *memory, struct mgp_list **result);
/// Free the memory used by the given mgp_list and contained elements.
void mgp_list_destroy(struct mgp_list *list);
/// Return whether the given mgp_list contains any deleted values.
enum mgp_error mgp_list_contains_deleted(struct mgp_list *list, int *result);
/// Append a copy of mgp_value to mgp_list if capacity allows.
/// The list copies the given value and therefore does not take ownership of the
/// original value. You still need to call mgp_value_destroy to free the
@ -467,14 +437,9 @@ enum mgp_error mgp_list_at(struct mgp_list *list, size_t index, struct mgp_value
/// mgp_error::MGP_ERROR_UNABLE_TO_ALLOCATE is returned if unable to allocate a mgp_map.
enum mgp_error mgp_map_make_empty(struct mgp_memory *memory, struct mgp_map **result);
enum mgp_error mgp_map_copy(struct mgp_map *map, struct mgp_memory *memory, struct mgp_map **result);
/// Free the memory used by the given mgp_map and contained items.
void mgp_map_destroy(struct mgp_map *map);
/// Return whether the given mgp_map contains any deleted values.
enum mgp_error mgp_map_contains_deleted(struct mgp_map *map, int *result);
/// Insert a new mapping from a NULL terminated character string to a value.
/// If a mapping with the same key already exists, it is *not* replaced.
/// In case of insertion, both the string and the value are copied into the map.
@ -484,18 +449,6 @@ enum mgp_error mgp_map_contains_deleted(struct mgp_map *map, int *result);
/// Return mgp_error::MGP_ERROR_KEY_ALREADY_EXISTS if a previous mapping already exists.
enum mgp_error mgp_map_insert(struct mgp_map *map, const char *key, struct mgp_value *value);
/// Insert a mapping from a NULL terminated character string to a value.
/// If a mapping with the same key already exists, it is replaced.
/// In case of update, both the string and the value are copied into the map.
/// Therefore, the map does not take ownership of the original key nor value, so
/// you still need to free their memory explicitly.
/// Return mgp_error::MGP_ERROR_UNABLE_TO_ALLOCATE is returned if unable to allocate for insertion.
enum mgp_error mgp_map_update(struct mgp_map *map, const char *key, struct mgp_value *value);
// Erase a mapping by key.
// If the key doesn't exist in the map nothing happens
enum mgp_error mgp_map_erase(struct mgp_map *map, const char *key);
/// Get the number of items stored in mgp_map.
/// Current implementation always returns without errors.
enum mgp_error mgp_map_size(struct mgp_map *map, size_t *result);
@ -504,9 +457,6 @@ enum mgp_error mgp_map_size(struct mgp_map *map, size_t *result);
/// Result is NULL if no mapping exists.
enum mgp_error mgp_map_at(struct mgp_map *map, const char *key, struct mgp_value **result);
/// Returns true if key in map.
enum mgp_error mgp_key_exists(struct mgp_map *map, const char *key, int *result);
/// An item in the mgp_map.
struct mgp_map_item;
@ -558,9 +508,6 @@ enum mgp_error mgp_path_copy(struct mgp_path *path, struct mgp_memory *memory, s
/// Free the memory used by the given mgp_path and contained vertices and edges.
void mgp_path_destroy(struct mgp_path *path);
/// Return whether the given mgp_path contains any deleted values.
enum mgp_error mgp_path_contains_deleted(struct mgp_path *path, int *result);
/// Append an edge continuing from the last vertex on the path.
/// The edge is copied into the path. Therefore, the path does not take
/// ownership of the original edge, so you still need to free the edge memory
@ -571,10 +518,6 @@ enum mgp_error mgp_path_contains_deleted(struct mgp_path *path, int *result);
/// Return mgp_error::MGP_ERROR_UNABLE_TO_ALLOCATE if unable to allocate memory for path extension.
enum mgp_error mgp_path_expand(struct mgp_path *path, struct mgp_edge *edge);
/// Remove the last node and the last relationship from the path.
/// Return mgp_error::MGP_ERROR_OUT_OF_RANGE if the path contains no relationships.
enum mgp_error mgp_path_pop(struct mgp_path *path);
/// Get the number of edges in a mgp_path.
/// Current implementation always returns without errors.
enum mgp_error mgp_path_size(struct mgp_path *path, size_t *result);
@ -679,12 +622,6 @@ struct mgp_vertex_id {
/// Get the ID of given vertex.
enum mgp_error mgp_vertex_get_id(struct mgp_vertex *v, struct mgp_vertex_id *result);
/// Get the in degree of given vertex.
enum mgp_error mgp_vertex_get_in_degree(struct mgp_vertex *v, size_t *result);
/// Get the out degree of given vertex.
enum mgp_error mgp_vertex_get_out_degree(struct mgp_vertex *v, size_t *result);
/// Result is non-zero if the vertex can be modified.
/// The mutability of the vertex is the same as the graph which it is part of. If a vertex is immutable, then edges
/// cannot be created or deleted, properties and labels cannot be set or removed and all of the returned edges will be
@ -702,15 +639,6 @@ enum mgp_error mgp_vertex_underlying_graph_is_mutable(struct mgp_vertex *v, int
enum mgp_error mgp_vertex_set_property(struct mgp_vertex *v, const char *property_name,
struct mgp_value *property_value);
/// Set the value of properties on a vertex.
/// When the value is `null`, then the property is removed from the vertex.
/// Return mgp_error::MGP_ERROR_UNABLE_TO_ALLOCATE if unable to allocate memory for storing the property.
/// Return mgp_error::MGP_ERROR_IMMUTABLE_OBJECT if `v` is immutable.
/// Return mgp_error::MGP_ERROR_DELETED_OBJECT if `v` has been deleted.
/// Return mgp_error::MGP_ERROR_SERIALIZATION_ERROR if `v` has been modified by another transaction.
/// Return mgp_error::MGP_ERROR_VALUE_CONVERSION if `property_value` is vertex, edge or path.
enum mgp_error mgp_vertex_set_properties(struct mgp_vertex *v, struct mgp_map *properties);
/// Add the label to the vertex.
/// If the vertex already has the label, this function does nothing.
/// Return mgp_error::MGP_ERROR_UNABLE_TO_ALLOCATE if unable to allocate memory for storing the label.
@ -734,9 +662,6 @@ enum mgp_error mgp_vertex_copy(struct mgp_vertex *v, struct mgp_memory *memory,
/// Free the memory used by a mgp_vertex.
void mgp_vertex_destroy(struct mgp_vertex *v);
/// Return whether the given mgp_vertex is deleted.
enum mgp_error mgp_vertex_is_deleted(struct mgp_vertex *v, int *result);
/// Result is non-zero if given vertices are equal, otherwise 0.
enum mgp_error mgp_vertex_equal(struct mgp_vertex *v1, struct mgp_vertex *v2, int *result);
@ -831,9 +756,6 @@ enum mgp_error mgp_edge_copy(struct mgp_edge *e, struct mgp_memory *memory, stru
/// Free the memory used by a mgp_edge.
void mgp_edge_destroy(struct mgp_edge *e);
/// Return whether the given mgp_edge is deleted.
enum mgp_error mgp_edge_is_deleted(struct mgp_edge *e, int *result);
/// Result is non-zero if given edges are equal, otherwise 0.
enum mgp_error mgp_edge_equal(struct mgp_edge *e1, struct mgp_edge *e2, int *result);
@ -867,15 +789,6 @@ enum mgp_error mgp_edge_get_property(struct mgp_edge *e, const char *property_na
/// Return mgp_error::MGP_ERROR_VALUE_CONVERSION if `property_value` is vertex, edge or path.
enum mgp_error mgp_edge_set_property(struct mgp_edge *e, const char *property_name, struct mgp_value *property_value);
/// Set the value of properties on a vertex.
/// When the value is `null`, then the property is removed from the vertex.
/// Return mgp_error::MGP_ERROR_UNABLE_TO_ALLOCATE if unable to allocate memory for storing the property.
/// Return mgp_error::MGP_ERROR_IMMUTABLE_OBJECT if `v` is immutable.
/// Return mgp_error::MGP_ERROR_DELETED_OBJECT if `v` has been deleted.
/// Return mgp_error::MGP_ERROR_SERIALIZATION_ERROR if `v` has been modified by another transaction.
/// Return mgp_error::MGP_ERROR_VALUE_CONVERSION if `property_value` is vertex, edge or path.
enum mgp_error mgp_edge_set_properties(struct mgp_edge *e, struct mgp_map *properties);
/// Start iterating over properties stored in the given edge.
/// The properties of the edge are copied when the iterator is created, therefore later changes won't affect them.
/// Resulting mgp_properties_iterator needs to be deallocated with
@ -885,113 +798,21 @@ enum mgp_error mgp_edge_set_properties(struct mgp_edge *e, struct mgp_map *prope
enum mgp_error mgp_edge_iter_properties(struct mgp_edge *e, struct mgp_memory *memory,
struct mgp_properties_iterator **result);
/// State of the graph database.
struct mgp_graph;
/// Get the vertex corresponding to given ID, or NULL if no such vertex exists.
/// Resulting vertex must be freed using mgp_vertex_destroy.
/// Return mgp_error::MGP_ERROR_UNABLE_TO_ALLOCATE if unable to allocate the vertex.
enum mgp_error mgp_graph_get_vertex_by_id(struct mgp_graph *g, struct mgp_vertex_id id, struct mgp_memory *memory,
struct mgp_vertex **result);
/// Result is non-zero if the index with the given name exists.
/// The current implementation always returns without errors.
enum mgp_error mgp_graph_has_text_index(struct mgp_graph *graph, const char *index_name, int *result);
/// Available modes of searching text indices.
MGP_ENUM_CLASS text_search_mode{
SPECIFIED_PROPERTIES,
REGEX,
ALL_PROPERTIES,
};
/// Search the named text index for the given query. The result is a map with the "search_results" and "error_msg" keys.
/// The "search_results" key contains the vertices whose text-indexed properties match the given query.
/// In case of a Tantivy error, the "search_results" key is absent, and "error_msg" contains the error message.
/// Return mgp_error::MGP_ERROR_UNABLE_TO_ALLOCATE if theres an allocation error while constructing the results map.
/// Return mgp_error::MGP_ERROR_KEY_ALREADY_EXISTS if the same key is being created in the results map more than once.
enum mgp_error mgp_graph_search_text_index(struct mgp_graph *graph, const char *index_name, const char *search_query,
enum text_search_mode search_mode, struct mgp_memory *memory,
struct mgp_map **result);
/// Aggregate over the results of a search over the named text index. The result is a map with the "aggregation_results"
/// and "error_msg" keys.
/// The "aggregation_results" key contains the vertices whose text-indexed properties match the given query.
/// In case of a Tantivy error, the "aggregation_results" key is absent, and "error_msg" contains the error message.
/// Return mgp_error::MGP_ERROR_UNABLE_TO_ALLOCATE if theres an allocation error while constructing the results map.
/// Return mgp_error::MGP_ERROR_KEY_ALREADY_EXISTS if the same key is being created in the results map more than once.
enum mgp_error mgp_graph_aggregate_over_text_index(struct mgp_graph *graph, const char *index_name,
const char *search_query, const char *aggregation_query,
struct mgp_memory *memory, struct mgp_map **result);
/// Creates label index for given label.
/// mgp_error::MGP_ERROR_NO_ERROR is always returned.
/// if label index already exists, result will be 0, otherwise 1.
enum mgp_error mgp_create_label_index(struct mgp_graph *graph, const char *label, int *result);
/// Drop label index.
enum mgp_error mgp_drop_label_index(struct mgp_graph *graph, const char *label, int *result);
/// List all label indices.
enum mgp_error mgp_list_all_label_indices(struct mgp_graph *graph, struct mgp_memory *memory, struct mgp_list **result);
/// Creates label-property index for given label and propery.
/// mgp_error::MGP_ERROR_NO_ERROR is always returned.
/// if label property index already exists, result will be 0, otherwise 1.
enum mgp_error mgp_create_label_property_index(struct mgp_graph *graph, const char *label, const char *property,
int *result);
/// Drops label-property index for given label and propery.
/// mgp_error::MGP_ERROR_NO_ERROR is always returned.
/// if dropping label property index failed, result will be 0, otherwise 1.
enum mgp_error mgp_drop_label_property_index(struct mgp_graph *graph, const char *label, const char *property,
int *result);
/// List all label+property indices.
enum mgp_error mgp_list_all_label_property_indices(struct mgp_graph *graph, struct mgp_memory *memory,
struct mgp_list **result);
/// Creates existence constraint for given label and property.
/// mgp_error::MGP_ERROR_NO_ERROR is always returned.
/// if creating existence constraint failed, result will be 0, otherwise 1.
enum mgp_error mgp_create_existence_constraint(struct mgp_graph *graph, const char *label, const char *property,
int *result);
/// Drops existence constraint for given label and property.
/// mgp_error::MGP_ERROR_NO_ERROR is always returned.
/// if dropping existence constraint failed, result will be 0, otherwise 1.
enum mgp_error mgp_drop_existence_constraint(struct mgp_graph *graph, const char *label, const char *property,
int *result);
/// List all existence constraints.
enum mgp_error mgp_list_all_existence_constraints(struct mgp_graph *graph, struct mgp_memory *memory,
struct mgp_list **result);
/// Creates unique constraint for given label and properties.
/// mgp_error::MGP_ERROR_NO_ERROR is always returned.
/// if creating unique constraint failed, result will be 0, otherwise 1.
enum mgp_error mgp_create_unique_constraint(struct mgp_graph *graph, const char *label, struct mgp_value *properties,
int *result);
/// Drops unique constraint for given label and properties.
/// mgp_error::MGP_ERROR_NO_ERROR is always returned.
/// if dropping unique constraint failed, result will be 0, otherwise 1.
enum mgp_error mgp_drop_unique_constraint(struct mgp_graph *graph, const char *label, struct mgp_value *properties,
int *result);
/// List all unique constraints
enum mgp_error mgp_list_all_unique_constraints(struct mgp_graph *graph, struct mgp_memory *memory,
struct mgp_list **result);
/// Result is non-zero if the graph can be modified.
/// If a graph is immutable, then vertices cannot be created or deleted, and all of the returned vertices will be
/// immutable also. The same applies for edges.
/// Current implementation always returns without errors.
enum mgp_error mgp_graph_is_mutable(struct mgp_graph *graph, int *result);
/// Result is non-zero if the graph is in transactional storage mode.
/// If a graph is not in transactional mode (i.e. analytical mode), then vertices and edges can be missing
/// because changes from other transactions are visible.
/// Current implementation always returns without errors.
enum mgp_error mgp_graph_is_transactional(struct mgp_graph *graph, int *result);
/// Add a new vertex to the graph.
/// Resulting vertex must be freed using mgp_vertex_destroy.
/// Return mgp_error::MGP_ERROR_IMMUTABLE_OBJECT if `graph` is immutable.
@ -1018,29 +839,6 @@ enum mgp_error mgp_graph_detach_delete_vertex(struct mgp_graph *graph, struct mg
enum mgp_error mgp_graph_create_edge(struct mgp_graph *graph, struct mgp_vertex *from, struct mgp_vertex *to,
struct mgp_edge_type type, struct mgp_memory *memory, struct mgp_edge **result);
/// Change edge from vertex
/// Return mgp_error::MGP_ERROR_IMMUTABLE_OBJECT if `graph` is immutable.
/// Return mgp_error::MGP_ERROR_UNABLE_TO_ALLOCATE if unable to allocate a mgp_edge.
/// Return mgp_error::MGP_ERROR_DELETED_OBJECT if `from` or `to` has been deleted.
/// Return mgp_error::MGP_ERROR_SERIALIZATION_ERROR if `from` or `to` has been modified by another transaction.
enum mgp_error mgp_graph_edge_set_from(struct mgp_graph *graph, struct mgp_edge *e, struct mgp_vertex *new_from,
struct mgp_memory *memory, struct mgp_edge **result);
/// Change edge to vertex
/// Return mgp_error::MGP_ERROR_IMMUTABLE_OBJECT if `graph` is immutable.
/// Return mgp_error::MGP_ERROR_UNABLE_TO_ALLOCATE if unable to allocate a mgp_edge.
/// Return mgp_error::MGP_ERROR_DELETED_OBJECT if `from` or `to` has been deleted.
/// Return mgp_error::MGP_ERROR_SERIALIZATION_ERROR if `from` or `to` has been modified by another transaction.
enum mgp_error mgp_graph_edge_set_to(struct mgp_graph *graph, struct mgp_edge *e, struct mgp_vertex *new_to,
struct mgp_memory *memory, struct mgp_edge **result);
/// Change edge type
/// Return mgp_error::MGP_ERROR_IMMUTABLE_OBJECT if `graph` is immutable.
/// Return mgp_error::MGP_ERROR_SERIALIZATION_ERROR if `edge`, its source or destination vertex has been modified by
/// another transaction.
enum mgp_error mgp_graph_edge_change_type(struct mgp_graph *graph, struct mgp_edge *e, struct mgp_edge_type new_type,
struct mgp_memory *memory, struct mgp_edge **result);
/// Delete an edge from the graph.
/// Return mgp_error::MGP_ERROR_IMMUTABLE_OBJECT if `graph` is immutable.
/// Return mgp_error::MGP_ERROR_SERIALIZATION_ERROR if `edge`, its source or destination vertex has been modified by
@ -1494,12 +1292,6 @@ struct mgp_proc;
/// Describes a Memgraph magic function.
struct mgp_func;
/// All available log levels that can be used in mgp_log function
MGP_ENUM_CLASS mgp_log_level{
MGP_LOG_LEVEL_TRACE, MGP_LOG_LEVEL_DEBUG, MGP_LOG_LEVEL_INFO,
MGP_LOG_LEVEL_WARN, MGP_LOG_LEVEL_ERROR, MGP_LOG_LEVEL_CRITICAL,
};
/// Entry-point for a query module read procedure, invoked through openCypher.
///
/// Passed in arguments will not live longer than the callback's execution.
@ -1507,13 +1299,6 @@ MGP_ENUM_CLASS mgp_log_level{
/// to allocate global resources.
typedef void (*mgp_proc_cb)(struct mgp_list *, struct mgp_graph *, struct mgp_result *, struct mgp_memory *);
/// Cleanup for a query module read procedure. Can't be invoked through OpenCypher. Cleans batched stream.
typedef void (*mgp_proc_cleanup)();
/// Initializer for a query module batched read procedure. Can't be invoked through OpenCypher. Initializes batched
/// stream.
typedef void (*mgp_proc_initializer)(struct mgp_list *, struct mgp_graph *, struct mgp_memory *);
/// Register a read-only procedure to a module.
///
/// The `name` must be a sequence of digits, underscores, lowercase and
@ -1538,30 +1323,6 @@ enum mgp_error mgp_module_add_read_procedure(struct mgp_module *module, const ch
enum mgp_error mgp_module_add_write_procedure(struct mgp_module *module, const char *name, mgp_proc_cb cb,
struct mgp_proc **result);
/// Register a readable batched procedure to a module.
///
/// The `name` must be a valid identifier, following the same rules as the
/// procedure`name` in mgp_module_add_read_procedure.
///
/// Return mgp_error::MGP_ERROR_UNABLE_TO_ALLOCATE if unable to allocate memory for mgp_proc.
/// Return mgp_error::MGP_ERROR_INVALID_ARGUMENT if `name` is not a valid procedure name.
/// RETURN mgp_error::MGP_ERROR_LOGIC_ERROR if a procedure with the same name was already registered.
enum mgp_error mgp_module_add_batch_read_procedure(struct mgp_module *module, const char *name, mgp_proc_cb cb,
mgp_proc_initializer initializer, mgp_proc_cleanup cleanup,
struct mgp_proc **result);
/// Register a writeable batched procedure to a module.
///
/// The `name` must be a valid identifier, following the same rules as the
/// procedure`name` in mgp_module_add_read_procedure.
///
/// Return mgp_error::MGP_ERROR_UNABLE_TO_ALLOCATE if unable to allocate memory for mgp_proc.
/// Return mgp_error::MGP_ERROR_INVALID_ARGUMENT if `name` is not a valid procedure name.
/// RETURN mgp_error::MGP_ERROR_LOGIC_ERROR if a procedure with the same name was already registered.
enum mgp_error mgp_module_add_batch_write_procedure(struct mgp_module *module, const char *name, mgp_proc_cb cb,
mgp_proc_initializer initializer, mgp_proc_cleanup cleanup,
struct mgp_proc **result);
/// Add a required argument to a procedure.
///
/// The order of adding arguments will correspond to the order the procedure
@ -1625,9 +1386,6 @@ enum mgp_error mgp_proc_add_result(struct mgp_proc *proc, const char *name, stru
/// Return mgp_error::MGP_ERROR_INVALID_ARGUMENT if `name` is not a valid result name.
/// RETURN mgp_error::MGP_ERROR_LOGIC_ERROR if a result field with the same name was already added.
enum mgp_error mgp_proc_add_deprecated_result(struct mgp_proc *proc, const char *name, struct mgp_type *type);
/// Log a message on a certain level.
enum mgp_error mgp_log(enum mgp_log_level log_level, const char *output);
///@}
/// @name Execution
@ -1637,10 +1395,7 @@ enum mgp_error mgp_log(enum mgp_log_level log_level, const char *output);
/// @{
/// Return non-zero if the currently executing procedure should abort as soon as
/// possible. If non-zero the reasons are:
/// (1) The transaction was requested to be terminated
/// (2) The server is gracefully shutting down
/// (3) The transaction has hit its timeout threshold
/// possible.
///
/// Procedures which perform heavyweight processing run the risk of running too
/// long and going over the query execution time limit. To prevent this, such
@ -1757,10 +1512,6 @@ enum mgp_error mgp_module_add_transformation(struct mgp_module *module, const ch
///
///@{
/// State of the database that is exposed to magic functions. Currently it is unused, but it enables extending the
/// functionalities of magic functions in future without breaking the API.
struct mgp_func_context;
/// Add a required argument to a function.
///
/// The order of the added arguments corresponds to the signature of the openCypher function.

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

108
init
View File

@ -5,16 +5,13 @@ cd "$DIR"
source "$DIR/environment/util.sh"
DISTRO=$(operating_system)
ARCHITECTURE=$(architecture)
function print_help () {
echo "Usage: $0 [OPTION]"
echo -e "Check for missing packages and setup the project.\n"
echo "Optional arguments:"
echo -e " -h\tdisplay this help and exit"
echo -e " --without-libs-setup\tskip the step for setting up libs"
echo -e " --ci\tscript is being run inside ci"
echo -e " --wsl-quicklisp-proxy \"host:port\"\tquicklist HTTP proxy (this flag + HTTP proxy are required on WSL)"
}
function setup_virtualenv () {
@ -35,22 +32,28 @@ function setup_virtualenv () {
popd > /dev/null
}
wsl_quicklisp_proxy=""
setup_libs=true
ci=false
if [[ $# -eq 1 && "$1" == "-h" ]]; then
print_help
exit 0
else
while(($#)); do
case "$1" in
--wsl-quicklisp-proxy)
shift
if [[ $# -eq 0 ]]; then
echo "Missing proxy URL"
print_help
exit 1
fi
wsl_quicklisp_proxy=":proxy \"http://$1/\""
shift
;;
--without-libs-setup)
shift
setup_libs=false
;;
--ci)
shift
ci=true
;;
*)
# unknown option
echo "Invalid argument provided: $1"
@ -61,6 +64,8 @@ else
done
fi
DISTRO=$(operating_system)
ARCHITECTURE=$(architecture)
if [ "${ARCHITECTURE}" = "arm64" ] || [ "${ARCHITECTURE}" = "aarch64" ]; then
OS_SCRIPT=$DIR/environment/os/$DISTRO-arm.sh
else
@ -73,22 +78,37 @@ echo "All packages are in-place..."
# create a default build directory
mkdir -p ./build
if [[ "$setup_libs" == "true" ]]; then
# Setup libs (download).
cd libs
./cleanup.sh
./setup.sh
cd ..
# quicklisp package manager for Common Lisp
quicklisp_install_dir="$HOME/quicklisp"
if [[ -v QUICKLISP_HOME ]]; then
quicklisp_install_dir="${QUICKLISP_HOME}"
fi
# Fix for centos 7 during release
if [[ "$ci" == "false" ]]; then
if [ "${DISTRO}" = "centos-7" ] || [ "${DISTRO}" = "debian-11" ] || [ "${DISTRO}" = "amzn-2" ]; then
if python3 -m pip show virtualenv >/dev/null 2>/dev/null; then
python3 -m pip uninstall -y virtualenv
fi
python3 -m pip install virtualenv
fi
if [[ ! -f "${quicklisp_install_dir}/setup.lisp" ]]; then
wget -nv https://beta.quicklisp.org/quicklisp.lisp -O quicklisp.lisp || exit 1
echo \
"
(load \"${DIR}/quicklisp.lisp\")
(quicklisp-quickstart:install $wsl_quicklisp_proxy :path \"${quicklisp_install_dir}\")
" | sbcl --script || exit 1
rm -rf quicklisp.lisp || exit 1
fi
ln -Tfs "$DIR/src/lisp" "${quicklisp_install_dir}/local-projects/lcp"
# Install LCP dependencies
# TODO: We should at some point cache or have a mirror of packages we use.
# TODO: move the installation of LCP's dependencies into ./setup.sh
echo \
"
(load \"${quicklisp_install_dir}/setup.lisp\")
(ql:quickload '(:lcp :lcp/test) :silent t)
" | sbcl --script
if [[ "$setup_libs" == "true" ]]; then
# Setup libs (download).
cd libs
./cleanup.sh
./setup.sh
cd ..
fi
# setup gql_behave dependencies
@ -101,10 +121,6 @@ setup_virtualenv tests/stress
setup_virtualenv tests/integration/ldap
# Setup tests dependencies.
# NOTE: This is commented out because of the build order (at the time of
# execution mgclient is not built yet) which makes this setup to fail. mgclient
# is built during the make phase. The tests/setup.sh is called under GHA CI
# jobs.
# cd tests
# ./setup.sh
# cd ..
@ -114,30 +130,22 @@ setup_virtualenv tests/integration/ldap
echo "Done installing dependencies for Memgraph"
echo "Linking git hooks OR skip if .git folder is not there"
if [ -d "$DIR/.git" ]; then
for hook in $(find $DIR/.githooks -type f -printf "%f\n"); do
ln -s -f "$DIR/.githooks/$hook" "$DIR/.git/hooks/$hook"
echo "Added $hook hook"
done;
else
echo "WARNING: .git folder not present, skip adding hooks"
fi
echo "Linking git hooks"
for hook in $(find $DIR/.githooks -type f -printf "%f\n"); do
ln -s -f "$DIR/.githooks/$hook" "$DIR/.git/hooks/$hook"
echo "Added $hook hook"
done;
# Install precommit hook
python3 -m pip install pre-commit
python3 -m pre_commit install
# Install py format tools
echo "Install black formatter"
python3 -m pip install black==22.10.*
echo "Install isort"
python3 -m pip install isort==5.10.*
# Install precommit hook except on old operating systems because we don't
# develop on them -> pre-commit hook not required -> we can use latest
# packages.
if [[ "$ci" == "false" ]]; then
if [ "${DISTRO}" != "centos-7" ] && [ "$DISTRO" != "debian-10" ] && [ "${DISTRO}" != "ubuntu-18.04" ] && [ "${DISTRO}" != "amzn-2" ]; then
python3 -m pip install pre-commit
python3 -m pre_commit install
# Install py format tools for usage during the development.
echo "Install black formatter"
python3 -m pip install black==23.1.*
echo "Install isort"
python3 -m pip install isort==5.12.*
fi
fi
# Link `include/mgp.py` with `release/mgp/mgp.py`
ln -v -f include/mgp.py release/mgp/mgp.py

2
libs/.gitignore vendored
View File

@ -6,5 +6,3 @@
!__main.cpp
!pulsar.patch
!antlr4.10.1.patch
!rocksdb8.1.1.patch
!nuraft2.1.0.patch

View File

@ -4,8 +4,7 @@ include(GNUInstallDirs)
include(ProcessorCount)
ProcessorCount(NPROC)
if(NPROC EQUAL 0)
if (NPROC EQUAL 0)
set(NPROC 1)
endif()
@ -13,10 +12,9 @@ find_package(Boost 1.78 REQUIRED)
find_package(BZip2 1.0.6 REQUIRED)
find_package(Threads REQUIRED)
set(GFLAGS_NOTHREADS OFF)
# NOTE: config/generate.py depends on the gflags help XML format.
find_package(gflags REQUIRED)
find_package(fmt 8.0.1 REQUIRED)
find_package(fmt 8.0.1)
find_package(Jemalloc REQUIRED)
find_package(ZLIB 1.2.11 REQUIRED)
set(LIB_DIR ${CMAKE_CURRENT_SOURCE_DIR})
@ -25,27 +23,23 @@ set(LIB_DIR ${CMAKE_CURRENT_SOURCE_DIR})
function(import_header_library name include_dir)
add_library(${name} INTERFACE IMPORTED GLOBAL)
set_property(TARGET ${name} PROPERTY
INTERFACE_INCLUDE_DIRECTORIES ${include_dir})
INTERFACE_INCLUDE_DIRECTORIES ${include_dir})
string(TOUPPER ${name} _upper_name)
set(${_upper_name}_INCLUDE_DIR ${include_dir} CACHE FILEPATH
"Path to ${name} include directory" FORCE)
"Path to ${name} include directory" FORCE)
mark_as_advanced(${_upper_name}_INCLUDE_DIR)
add_library(lib::${name} ALIAS ${name})
endfunction(import_header_library)
function(import_library name type location include_dir)
add_library(${name} ${type} IMPORTED GLOBAL)
if(${ARGN})
if (${ARGN})
# Optional argument is the name of the external project that we need to
# depend on.
add_dependencies(${name} ${ARGN0})
else()
add_dependencies(${name} ${name}-proj)
endif()
set_property(TARGET ${name} PROPERTY IMPORTED_LOCATION ${location})
# We need to create the include directory first in order to be able to add it
# as an include directory. The header files in the include directory will be
# generated later during the build process.
@ -65,59 +59,43 @@ function(add_external_project name)
set(options NO_C_COMPILER)
set(one_value_kwargs SOURCE_DIR BUILD_IN_SOURCE)
set(multi_value_kwargs CMAKE_ARGS DEPENDS INSTALL_COMMAND BUILD_COMMAND
CONFIGURE_COMMAND)
CONFIGURE_COMMAND)
cmake_parse_arguments(KW "${options}" "${one_value_kwargs}" "${multi_value_kwargs}" ${ARGN})
set(source_dir ${CMAKE_CURRENT_SOURCE_DIR}/${name})
if(KW_SOURCE_DIR)
if (KW_SOURCE_DIR)
set(source_dir ${KW_SOURCE_DIR})
endif()
set(build_in_source 0)
if(KW_BUILD_IN_SOURCE)
if (KW_BUILD_IN_SOURCE)
set(build_in_source ${KW_BUILD_IN_SOURCE})
endif()
if(NOT KW_NO_C_COMPILER)
if (NOT KW_NO_C_COMPILER)
set(KW_CMAKE_ARGS -DCMAKE_C_COMPILER=${CMAKE_C_COMPILER} ${KW_CMAKE_ARGS})
endif()
ExternalProject_Add(${name}-proj DEPENDS ${KW_DEPENDS}
PREFIX ${source_dir} SOURCE_DIR ${source_dir}
BUILD_IN_SOURCE ${build_in_source}
CONFIGURE_COMMAND ${KW_CONFIGURE_COMMAND}
CMAKE_ARGS -DCMAKE_BUILD_TYPE=Release
-DCMAKE_CXX_COMPILER=${CMAKE_CXX_COMPILER}
-DCMAKE_INSTALL_PREFIX=${source_dir}
${KW_CMAKE_ARGS}
INSTALL_COMMAND ${KW_INSTALL_COMMAND}
BUILD_COMMAND ${KW_BUILD_COMMAND})
PREFIX ${source_dir} SOURCE_DIR ${source_dir}
BUILD_IN_SOURCE ${build_in_source}
CONFIGURE_COMMAND ${KW_CONFIGURE_COMMAND}
CMAKE_ARGS -DCMAKE_BUILD_TYPE=Release
-DCMAKE_CXX_COMPILER=${CMAKE_CXX_COMPILER}
-DCMAKE_INSTALL_PREFIX=${source_dir}
${KW_CMAKE_ARGS}
INSTALL_COMMAND ${KW_INSTALL_COMMAND}
BUILD_COMMAND ${KW_BUILD_COMMAND})
endfunction(add_external_project)
# Calls `add_external_project`, sets NAME_LIBRARY, NAME_INCLUDE_DIR variables
# and adds the library via `import_library`.
macro(import_external_library name type library_location include_dir)
add_external_project(${name} ${ARGN})
string(TOUPPER ${name} _upper_name)
set(${_upper_name}_LIBRARY ${library_location} CACHE FILEPATH
"Path to ${name} library" FORCE)
set(${_upper_name}_INCLUDE_DIR ${include_dir} CACHE FILEPATH
"Path to ${name} include directory" FORCE)
mark_as_advanced(${_upper_name}_LIBRARY ${_upper_name}_INCLUDE_DIR)
import_library(${name} ${type} ${${_upper_name}_LIBRARY} ${${_upper_name}_INCLUDE_DIR})
endmacro(import_external_library)
macro(set_path_external_library name type library_location include_dir)
string(TOUPPER ${name} _upper_name)
set(${_upper_name}_LIBRARY ${library_location} CACHE FILEPATH
"Path to ${name} library" FORCE)
set(${_upper_name}_INCLUDE_DIR ${include_dir} CACHE FILEPATH
"Path to ${name} include directory" FORCE)
mark_as_advanced(${name}_LIBRARY ${name}_INCLUDE_DIR)
endmacro(set_path_external_library)
mark_as_advanced(${_upper_name}_LIBRARY ${_upper_name}_INCLUDE_DIR)
import_library(${name} ${type} ${${_upper_name}_LIBRARY} ${${_upper_name}_INCLUDE_DIR})
endmacro(import_external_library)
# setup antlr
import_external_library(antlr4 STATIC
@ -125,10 +103,10 @@ import_external_library(antlr4 STATIC
${CMAKE_CURRENT_SOURCE_DIR}/antlr4/runtime/Cpp/include/antlr4-runtime
SOURCE_DIR ${CMAKE_CURRENT_SOURCE_DIR}/antlr4/runtime/Cpp
CMAKE_ARGS # http://stackoverflow.com/questions/37096062/get-a-basic-c-program-to-compile-using-clang-on-ubuntu-16/38385967#38385967
-DWITH_LIBCXX=OFF # because of debian bug
-DCMAKE_SKIP_INSTALL_ALL_DEPENDENCY=true
-DCMAKE_CXX_STANDARD=20
-DANTLR_BUILD_CPP_TESTS=OFF
-DWITH_LIBCXX=OFF # because of debian bug
-DCMAKE_SKIP_INSTALL_ALL_DEPENDENCY=true
-DCMAKE_CXX_STANDARD=20
-DANTLR_BUILD_CPP_TESTS=OFF
BUILD_COMMAND $(MAKE) antlr4_static
INSTALL_COMMAND $(MAKE) install)
@ -136,7 +114,6 @@ import_external_library(antlr4 STATIC
import_external_library(benchmark STATIC
${CMAKE_CURRENT_SOURCE_DIR}/benchmark/${CMAKE_INSTALL_LIBDIR}/libbenchmark.a
${CMAKE_CURRENT_SOURCE_DIR}/benchmark/include
# Skip testing. The tests don't compile with Clang 8.
CMAKE_ARGS -DBENCHMARK_ENABLE_TESTING=OFF)
@ -152,15 +129,15 @@ add_subdirectory(rapidcheck EXCLUDE_FROM_ALL)
# setup google test
add_external_project(gtest SOURCE_DIR ${CMAKE_CURRENT_SOURCE_DIR}/googletest)
set(GTEST_INCLUDE_DIR ${CMAKE_CURRENT_SOURCE_DIR}/googletest/include
CACHE PATH "Path to gtest and gmock include directory" FORCE)
CACHE PATH "Path to gtest and gmock include directory" FORCE)
set(GMOCK_LIBRARY ${CMAKE_CURRENT_SOURCE_DIR}/googletest/lib/libgmock.a
CACHE FILEPATH "Path to gmock library" FORCE)
CACHE FILEPATH "Path to gmock library" FORCE)
set(GMOCK_MAIN_LIBRARY ${CMAKE_CURRENT_SOURCE_DIR}/googletest/lib/libgmock_main.a
CACHE FILEPATH "Path to gmock_main library" FORCE)
CACHE FILEPATH "Path to gmock_main library" FORCE)
set(GTEST_LIBRARY ${CMAKE_CURRENT_SOURCE_DIR}/googletest/lib/libgtest.a
CACHE FILEPATH "Path to gtest library" FORCE)
CACHE FILEPATH "Path to gtest library" FORCE)
set(GTEST_MAIN_LIBRARY ${CMAKE_CURRENT_SOURCE_DIR}/googletest/lib/libgtest_main.a
CACHE FILEPATH "Path to gtest_main library" FORCE)
CACHE FILEPATH "Path to gtest_main library" FORCE)
mark_as_advanced(GTEST_INCLUDE_DIR GMOCK_LIBRARY GMOCK_MAIN_LIBRARY GTEST_LIBRARY GTEST_MAIN_LIBRARY)
import_library(gtest STATIC ${GTEST_LIBRARY} ${GTEST_INCLUDE_DIR} gtest-proj)
import_library(gtest_main STATIC ${GTEST_MAIN_LIBRARY} ${GTEST_INCLUDE_DIR} gtest-proj)
@ -178,10 +155,10 @@ import_external_library(rocksdb STATIC
${CMAKE_CURRENT_SOURCE_DIR}/rocksdb/lib/librocksdb.a
${CMAKE_CURRENT_SOURCE_DIR}/rocksdb/include
CMAKE_ARGS -DUSE_RTTI=ON
-DWITH_TESTS=OFF
-DGFLAGS_NOTHREADS=OFF
-DCMAKE_INSTALL_LIBDIR=lib
-DCMAKE_SKIP_INSTALL_ALL_DEPENDENCY=true
-DWITH_TESTS=OFF
-DGFLAGS_NOTHREADS=OFF
-DCMAKE_INSTALL_LIBDIR=lib
-DCMAKE_SKIP_INSTALL_ALL_DEPENDENCY=true
BUILD_COMMAND $(MAKE) rocksdb)
# Setup libbcrypt
@ -190,8 +167,8 @@ import_external_library(libbcrypt STATIC
${CMAKE_CURRENT_SOURCE_DIR}/libbcrypt
CONFIGURE_COMMAND sed s/-Wcast-align// -i ${CMAKE_CURRENT_SOURCE_DIR}/libbcrypt/crypt_blowfish/Makefile
BUILD_COMMAND make -C ${CMAKE_CURRENT_SOURCE_DIR}/libbcrypt
CC=${CMAKE_C_COMPILER}
CXX=${CMAKE_CXX_COMPILER}
CC=${CMAKE_C_COMPILER}
CXX=${CMAKE_CXX_COMPILER}
INSTALL_COMMAND true)
# Setup mgclient
@ -199,16 +176,16 @@ import_external_library(mgclient STATIC
${CMAKE_CURRENT_SOURCE_DIR}/mgclient/lib/libmgclient.a
${CMAKE_CURRENT_SOURCE_DIR}/mgclient/include
CMAKE_ARGS -DCMAKE_C_COMPILER=${CMAKE_C_COMPILER}
-DCMAKE_CXX_COMPILER=${CMAKE_CXX_COMPILER}
-DBUILD_TESTING=OFF
-DBUILD_CPP_BINDINGS=ON)
-DCMAKE_CXX_COMPILER=${CMAKE_CXX_COMPILER}
-DBUILD_TESTING=OFF
-DBUILD_CPP_BINDINGS=ON)
find_package(OpenSSL REQUIRED)
target_link_libraries(mgclient INTERFACE ${OPENSSL_LIBRARIES})
add_external_project(mgconsole
SOURCE_DIR ${CMAKE_CURRENT_SOURCE_DIR}/mgconsole
CMAKE_ARGS
-DCMAKE_INSTALL_PREFIX:PATH=${CMAKE_BINARY_DIR}
-DCMAKE_INSTALL_PREFIX:PATH=${CMAKE_BINARY_DIR}
BUILD_COMMAND $(MAKE) mgconsole)
add_custom_target(mgconsole DEPENDS mgconsole-proj)
@ -225,15 +202,14 @@ import_external_library(librdkafka STATIC
${CMAKE_CURRENT_SOURCE_DIR}/librdkafka/lib/librdkafka.a
${CMAKE_CURRENT_SOURCE_DIR}/librdkafka/include
CMAKE_ARGS -DRDKAFKA_BUILD_STATIC=ON
-DRDKAFKA_BUILD_EXAMPLES=OFF
-DRDKAFKA_BUILD_TESTS=OFF
-DWITH_ZSTD=OFF
-DENABLE_LZ4_EXT=OFF
-DCMAKE_INSTALL_LIBDIR=lib
-DWITH_SSL=ON
# If we want SASL, we need to install it on build machines
-DWITH_SASL=OFF)
-DRDKAFKA_BUILD_EXAMPLES=OFF
-DRDKAFKA_BUILD_TESTS=OFF
-DWITH_ZSTD=OFF
-DENABLE_LZ4_EXT=OFF
-DCMAKE_INSTALL_LIBDIR=lib
-DWITH_SSL=ON
# If we want SASL, we need to install it on build machines
-DWITH_SASL=OFF)
target_link_libraries(librdkafka INTERFACE ${OPENSSL_LIBRARIES} ZLIB::ZLIB)
import_library(librdkafka++ STATIC
@ -254,24 +230,24 @@ import_external_library(pulsar STATIC
${CMAKE_CURRENT_SOURCE_DIR}/pulsar/install/include
BUILD_IN_SOURCE 1
CONFIGURE_COMMAND cmake pulsar-client-cpp
-DCMAKE_INSTALL_PREFIX=${CMAKE_CURRENT_SOURCE_DIR}/pulsar/install
-DCMAKE_CXX_COMPILER=${CMAKE_CXX_COMPILER}
-DCMAKE_C_COMPILER=${CMAKE_C_COMPILER}
-DBUILD_DYNAMIC_LIB=OFF
-DBUILD_STATIC_LIB=ON
-DBUILD_TESTS=OFF
-DLINK_STATIC=ON
-DPROTOC_PATH=${PROTOBUF_ROOT}/bin/protoc
-DBOOST_ROOT=${BOOST_ROOT}
-DCMAKE_PREFIX_PATH=${PROTOBUF_ROOT}
-DProtobuf_INCLUDE_DIRS=${PROTOBUF_ROOT}/include
-DBUILD_PYTHON_WRAPPER=OFF
-DBUILD_PERF_TOOLS=OFF
-DUSE_LOG4CXX=OFF
BUILD_COMMAND $(MAKE) pulsarStaticWithDeps)
-DCMAKE_INSTALL_PREFIX=${CMAKE_CURRENT_SOURCE_DIR}/pulsar/install
-DCMAKE_CXX_COMPILER=${CMAKE_CXX_COMPILER}
-DCMAKE_C_COMPILER=${CMAKE_C_COMPILER}
-DBUILD_DYNAMIC_LIB=OFF
-DBUILD_STATIC_LIB=ON
-DBUILD_TESTS=OFF
-DLINK_STATIC=ON
-DPROTOC_PATH=${PROTOBUF_ROOT}/bin/protoc
-DBOOST_ROOT=${BOOST_ROOT}
-DCMAKE_PREFIX_PATH=${PROTOBUF_ROOT}
-DProtobuf_INCLUDE_DIRS=${PROTOBUF_ROOT}/include
-DBUILD_PYTHON_WRAPPER=OFF
-DBUILD_PERF_TOOLS=OFF
-DUSE_LOG4CXX=OFF
BUILD_COMMAND $(MAKE) pulsarStaticWithDeps)
add_dependencies(pulsar-proj protobuf)
if(${MG_ARCH} STREQUAL "ARM64")
if (${MG_ARCH} STREQUAL "ARM64")
set(MG_LIBRDTSC_CMAKE_ARGS -DLIBRDTSC_ARCH_x86=OFF -DLIBRDTSC_ARCH_ARM64=ON)
endif()
@ -280,52 +256,3 @@ import_external_library(librdtsc STATIC
${CMAKE_CURRENT_SOURCE_DIR}/librdtsc/include
CMAKE_ARGS ${MG_LIBRDTSC_CMAKE_ARGS}
BUILD_COMMAND $(MAKE) rdtsc)
# setup ctre
import_header_library(ctre ${CMAKE_CURRENT_SOURCE_DIR})
# setup absl (cmake sub_directory tolerant)
set(ABSL_PROPAGATE_CXX_STD ON)
add_subdirectory(absl EXCLUDE_FROM_ALL)
# set Jemalloc
set_path_external_library(jemalloc STATIC
${CMAKE_CURRENT_SOURCE_DIR}/jemalloc/lib/libjemalloc.a
${CMAKE_CURRENT_SOURCE_DIR}/jemalloc/include/)
import_header_library(rangev3 ${CMAKE_CURRENT_SOURCE_DIR}/rangev3/include)
ExternalProject_Add(mgcxx-proj
PREFIX mgcxx-proj
GIT_REPOSITORY https://github.com/memgraph/mgcxx
GIT_TAG "v0.0.4"
CMAKE_ARGS
"-DCMAKE_INSTALL_PREFIX=<INSTALL_DIR>"
"-DCMAKE_BUILD_TYPE=${CMAKE_BUILD_TYPE}"
"-DENABLE_TESTS=OFF"
"-DCMAKE_C_COMPILER=${CMAKE_C_COMPILER}"
"-DCMAKE_CXX_COMPILER=${CMAKE_CXX_COMPILER}"
INSTALL_DIR "${PROJECT_BINARY_DIR}/mgcxx"
)
ExternalProject_Get_Property(mgcxx-proj install_dir)
set(MGCXX_ROOT ${install_dir})
add_library(tantivy_text_search STATIC IMPORTED GLOBAL)
add_dependencies(tantivy_text_search mgcxx-proj)
set_property(TARGET tantivy_text_search PROPERTY IMPORTED_LOCATION ${MGCXX_ROOT}/lib/libtantivy_text_search.a)
add_library(mgcxx_text_search STATIC IMPORTED GLOBAL)
add_dependencies(mgcxx_text_search mgcxx-proj)
set_property(TARGET mgcxx_text_search PROPERTY IMPORTED_LOCATION ${MGCXX_ROOT}/lib/libmgcxx_text_search.a)
# We need to create the include directory first in order to be able to add it
# as an include directory. The header files in the include directory will be
# generated later during the build process.
file(MAKE_DIRECTORY ${MGCXX_ROOT}/include)
set_property(TARGET mgcxx_text_search PROPERTY INTERFACE_INCLUDE_DIRECTORIES ${MGCXX_ROOT}/include)
# Setup NuRaft
import_external_library(nuraft STATIC
${CMAKE_CURRENT_SOURCE_DIR}/nuraft/lib/libnuraft.a
${CMAKE_CURRENT_SOURCE_DIR}/nuraft/include/)
find_package(OpenSSL REQUIRED)
target_link_libraries(nuraft INTERFACE ${OPENSSL_LIBRARIES})

View File

@ -5,7 +5,7 @@ index ee9b58c..31359a9 100644
@@ -48,7 +48,7 @@ option(LIBRDTSC_USE_PMU "Enables PMU usage on ARM platforms" OFF)
# | Library Build and Install Properties |
# +--------------------------------------------------------+
-add_library(rdtsc SHARED
+add_library(rdtsc
src/cycles.c
@ -14,7 +14,7 @@ index ee9b58c..31359a9 100644
@@ -72,15 +72,6 @@ target_include_directories(rdtsc
PUBLIC ${CMAKE_CURRENT_SOURCE_DIR}/include
)
-# Install directory changes depending on build mode
-if (CMAKE_BUILD_TYPE MATCHES "^[Dd]ebug")
- # During debug, the library will be installed into a local directory
@ -27,15 +27,3 @@ index ee9b58c..31359a9 100644
# Specifying what to export when installing (GNUInstallDirs required)
install(TARGETS rdtsc
EXPORT librstsc-config
diff --git a/include/librdtsc/common_timer.h b/include/librdtsc/common_timer.h
index a6922d8..080dc77 100644
--- a/include/librdtsc/common_timer.h
+++ b/include/librdtsc/common_timer.h
@@ -2,6 +2,7 @@
#define LIBRDTSC_COMMON_TIMER_H
#include <librdtsc/common.h>
+#include <librdtsc/cycles.h>
extern uint64_t rdtsc_get_tsc_freq_arch();
extern uint64_t rdtsc_get_tsc_freq();

View File

@ -1,24 +0,0 @@
diff --git a/include/libnuraft/asio_service_options.hxx b/include/libnuraft/asio_service_options.hxx
index 8fe1ec9..9497355 100644
--- a/include/libnuraft/asio_service_options.hxx
+++ b/include/libnuraft/asio_service_options.hxx
@@ -17,6 +17,7 @@ limitations under the License.
#pragma once
+#include <cstdint>
#include <functional>
#include <string>
#include <system_error>
diff --git a/include/libnuraft/callback.hxx b/include/libnuraft/callback.hxx
index 7b71624..d48c1e2 100644
--- a/include/libnuraft/callback.hxx
+++ b/include/libnuraft/callback.hxx
@@ -18,6 +18,7 @@ limitations under the License.
#ifndef _CALLBACK_H_
#define _CALLBACK_H_
+#include <cstdint>
#include <functional>
#include <string>

21
libs/rocksdb.patch Normal file
View File

@ -0,0 +1,21 @@
diff --git a/CMakeLists.txt b/CMakeLists.txt
index 6761929..6a369af 100644
--- a/CMakeLists.txt
+++ b/CMakeLists.txt
@@ -220,6 +220,7 @@ else()
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -momit-leaf-frame-pointer")
endif()
endif()
+ set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Wno-deprecated-copy -Wno-unused-but-set-variable")
endif()
include(CheckCCompilerFlag)
@@ -997,7 +998,7 @@ if(NOT WIN32 OR ROCKSDB_INSTALL_ON_WINDOWS)
if(ROCKSDB_BUILD_SHARED)
install(
- TARGETS ${ROCKSDB_SHARED_LIB}
+ TARGETS ${ROCKSDB_SHARED_LIB} OPTIONAL
EXPORT RocksDBTargets
COMPONENT runtime
ARCHIVE DESTINATION "${CMAKE_INSTALL_LIBDIR}"

View File

@ -1,13 +0,0 @@
diff --git a/CMakeLists.txt b/CMakeLists.txt
index 598c728..816c705 100644
--- a/CMakeLists.txt
+++ b/CMakeLists.txt
@@ -1242,7 +1242,7 @@ if(NOT WIN32 OR ROCKSDB_INSTALL_ON_WINDOWS)
if(ROCKSDB_BUILD_SHARED)
install(
- TARGETS ${ROCKSDB_SHARED_LIB}
+ TARGETS ${ROCKSDB_SHARED_LIB} OPTIONAL
EXPORT RocksDBTargets
COMPONENT runtime
ARCHIVE DESTINATION "${CMAKE_INSTALL_LIBDIR}"

View File

@ -71,8 +71,8 @@ file_get_try_double () {
if [ -z "$primary_url" ]; then echo "Primary should not be empty." && exit 1; fi
if [ -z "$secondary_url" ]; then echo "Secondary should not be empty." && exit 1; fi
filename="$(basename "$secondary_url")"
# Redirect primary/cache to /dev/null to make it less confusing for a new contributor because only CI has access to the cache.
wget -nv "$primary_url" -O "$filename" >/dev/null 2>&1 || wget -nv "$secondary_url" -O "$filename" || exit 1
wget -nv "$primary_url" -O "$filename" || wget -nv "$secondary_url" -O "$filename" || exit 1
echo ""
}
repo_clone_try_double () {
@ -86,8 +86,8 @@ repo_clone_try_double () {
if [ -z "$secondary_url" ]; then echo "Secondary should not be empty." && exit 1; fi
if [ -z "$folder_name" ]; then echo "Clone folder should not be empty." && exit 1; fi
if [ -z "$ref" ]; then echo "Git clone ref should not be empty." && exit 1; fi
# Redirect primary/cache to /dev/null to make it less confusing for a new contributor because only CI has access to the cache.
clone "$primary_url" "$folder_name" "$ref" "$shallow" >/dev/null 2>&1 || clone "$secondary_url" "$folder_name" "$ref" "$shallow" || exit 1
clone "$primary_url" "$folder_name" "$ref" "$shallow" || clone "$secondary_url" "$folder_name" "$ref" "$shallow" || exit 1
echo ""
}
# List all dependencies.
@ -116,18 +116,12 @@ declare -A primary_urls=(
["pymgclient"]="http://$local_cache_host/git/pymgclient.git"
["mgconsole"]="http://$local_cache_host/git/mgconsole.git"
["spdlog"]="http://$local_cache_host/git/spdlog"
["nlohmann"]="http://$local_cache_host/file/nlohmann/json/4f8fba14066156b73f1189a2b8bd568bde5284c5/single_include/nlohmann/json.hpp"
["neo4j"]="http://$local_cache_host/file/neo4j-community-5.6.0-unix.tar.gz"
["nlohmann"]="http://$local_cache_host/file/nlohmann/json/9d69186291aca4f0137b69c1dee313b391ff564c/single_include/nlohmann/json.hpp"
["neo4j"]="http://$local_cache_host/file/neo4j-community-3.2.3-unix.tar.gz"
["librdkafka"]="http://$local_cache_host/git/librdkafka.git"
["protobuf"]="http://$local_cache_host/git/protobuf.git"
["pulsar"]="http://$local_cache_host/git/pulsar.git"
["librdtsc"]="http://$local_cache_host/git/librdtsc.git"
["ctre"]="http://$local_cache_host/file/hanickadot/compile-time-regular-expressions/v3.7.2/single-header/ctre.hpp"
["absl"]="http://$local_cache_host/git/abseil-cpp.git"
["jemalloc"]="http://$local_cache_host/git/jemalloc.git"
["range-v3"]="http://$local_cache_host/git/range-v3.git"
["nuraft"]="http://$local_cache_host/git/NuRaft.git"
["asio"]="http://$local_cache_host/git/asio.git"
)
# The goal of secondary urls is to have links to the "source of truth" of
@ -145,20 +139,14 @@ declare -A secondary_urls=(
["rocksdb"]="https://github.com/facebook/rocksdb.git"
["mgclient"]="https://github.com/memgraph/mgclient.git"
["pymgclient"]="https://github.com/memgraph/pymgclient.git"
["mgconsole"]="https://github.com/memgraph/mgconsole.git"
["mgconsole"]="http://github.com/memgraph/mgconsole.git"
["spdlog"]="https://github.com/gabime/spdlog"
["nlohmann"]="https://raw.githubusercontent.com/nlohmann/json/4f8fba14066156b73f1189a2b8bd568bde5284c5/single_include/nlohmann/json.hpp"
["neo4j"]="https://dist.neo4j.org/neo4j-community-5.6.0-unix.tar.gz"
["nlohmann"]="https://raw.githubusercontent.com/nlohmann/json/9d69186291aca4f0137b69c1dee313b391ff564c/single_include/nlohmann/json.hpp"
["neo4j"]="https://s3-eu-west-1.amazonaws.com/deps.memgraph.io/neo4j-community-3.2.3-unix.tar.gz"
["librdkafka"]="https://github.com/edenhill/librdkafka.git"
["protobuf"]="https://github.com/protocolbuffers/protobuf.git"
["pulsar"]="https://github.com/apache/pulsar.git"
["librdtsc"]="https://github.com/gabrieleara/librdtsc.git"
["ctre"]="https://raw.githubusercontent.com/hanickadot/compile-time-regular-expressions/v3.7.2/single-header/ctre.hpp"
["absl"]="https://github.com/abseil/abseil-cpp.git"
["jemalloc"]="https://github.com/jemalloc/jemalloc.git"
["range-v3"]="https://github.com/ericniebler/range-v3.git"
["nuraft"]="https://github.com/eBay/NuRaft.git"
["asio"]="https://github.com/chriskohlhoff/asio.git"
)
# antlr
@ -170,11 +158,12 @@ pushd antlr4
git apply ../antlr4.10.1.patch
popd
cppitertools_ref="v2.1" # 2021-01-15
# cppitertools v2.0 2019-12-23
cppitertools_ref="cb3635456bdb531121b82b4d2e3afc7ae1f56d47"
repo_clone_try_double "${primary_urls[cppitertools]}" "${secondary_urls[cppitertools]}" "cppitertools" "$cppitertools_ref"
# rapidcheck
rapidcheck_tag="1c91f40e64d87869250cfb610376c629307bf77d" # (2023-08-15)
rapidcheck_tag="7bc7d302191a4f3d0bf005692677126136e02f60" # (2020-05-04)
repo_clone_try_double "${primary_urls[rapidcheck]}" "${secondary_urls[rapidcheck]}" "rapidcheck" "$rapidcheck_tag"
# google benchmark
@ -182,7 +171,7 @@ benchmark_tag="v1.6.0"
repo_clone_try_double "${primary_urls[gbenchmark]}" "${secondary_urls[gbenchmark]}" "benchmark" "$benchmark_tag" true
# google test
googletest_tag="v1.14.0"
googletest_tag="release-1.12.1"
repo_clone_try_double "${primary_urls[gtest]}" "${secondary_urls[gtest]}" "googletest" "$googletest_tag" true
# libbcrypt
@ -191,9 +180,9 @@ repo_clone_try_double "${primary_urls[libbcrypt]}" "${secondary_urls[libbcrypt]}
# neo4j
file_get_try_double "${primary_urls[neo4j]}" "${secondary_urls[neo4j]}"
tar -xzf neo4j-community-5.6.0-unix.tar.gz
mv neo4j-community-5.6.0 neo4j
rm neo4j-community-5.6.0-unix.tar.gz
tar -xzf neo4j-community-3.2.3-unix.tar.gz
mv neo4j-community-3.2.3 neo4j
rm neo4j-community-3.2.3-unix.tar.gz
# nlohmann json
# We wget header instead of cloning repo since repo is huge (lots of test data).
@ -203,10 +192,10 @@ cd json
file_get_try_double "${primary_urls[nlohmann]}" "${secondary_urls[nlohmann]}"
cd ..
rocksdb_tag="v8.1.1" # (2023-04-21)
rocksdb_tag="v6.14.6" # (2020-10-14)
repo_clone_try_double "${primary_urls[rocksdb]}" "${secondary_urls[rocksdb]}" "rocksdb" "$rocksdb_tag" true
pushd rocksdb
git apply ../rocksdb8.1.1.patch
git apply ../rocksdb.patch
popd
# mgclient
@ -219,10 +208,10 @@ pymgclient_tag="4f85c179e56302d46a1e3e2cf43509db65f062b3" # (2021-01-15)
repo_clone_try_double "${primary_urls[pymgclient]}" "${secondary_urls[pymgclient]}" "pymgclient" "$pymgclient_tag"
# mgconsole
mgconsole_tag="v1.4.0" # (2023-05-21)
mgconsole_tag="v1.1.0" # (2021-10-07)
repo_clone_try_double "${primary_urls[mgconsole]}" "${secondary_urls[mgconsole]}" "mgconsole" "$mgconsole_tag" true
spdlog_tag="v1.12.0" # (2022-11-02)
spdlog_tag="v1.9.2" # (2021-08-12)
repo_clone_try_double "${primary_urls[spdlog]}" "${secondary_urls[spdlog]}" "spdlog" "$spdlog_tag" true
# librdkafka
@ -249,46 +238,3 @@ repo_clone_try_double "${primary_urls[librdtsc]}" "${secondary_urls[librdtsc]}"
pushd librdtsc
git apply ../librdtsc.patch
popd
#ctre
mkdir -p ctre
cd ctre
file_get_try_double "${primary_urls[ctre]}" "${secondary_urls[ctre]}"
cd ..
# abseil 20230125.3
absl_ref="20230125.3"
repo_clone_try_double "${primary_urls[absl]}" "${secondary_urls[absl]}" "absl" "$absl_ref"
# jemalloc ea6b3e973b477b8061e0076bb257dbd7f3faa756
JEMALLOC_COMMIT_VERSION="5.2.1"
repo_clone_try_double "${primary_urls[jemalloc]}" "${secondary_urls[jemalloc]}" "jemalloc" "$JEMALLOC_COMMIT_VERSION"
# this is hack for cmake in libs to set path, and for FindJemalloc to use Jemalloc_INCLUDE_DIR
pushd jemalloc
./autogen.sh
MALLOC_CONF="background_thread:true,retain:false,percpu_arena:percpu,oversize_threshold:0,muzzy_decay_ms:5000,dirty_decay_ms:5000" \
./configure \
--disable-cxx \
--with-lg-page=12 \
--with-lg-hugepage=21 \
--enable-shared=no --prefix=$working_dir \
--with-malloc-conf="background_thread:true,retain:false,percpu_arena:percpu,oversize_threshold:0,muzzy_decay_ms:5000,dirty_decay_ms:5000"
make -j$CPUS install
popd
#range-v3 release-0.12.0
range_v3_ref="release-0.12.0"
repo_clone_try_double "${primary_urls[range-v3]}" "${secondary_urls[range-v3]}" "rangev3" "$range_v3_ref"
# NuRaft
nuraft_tag="v2.1.0"
repo_clone_try_double "${primary_urls[nuraft]}" "${secondary_urls[nuraft]}" "nuraft" "$nuraft_tag" true
pushd nuraft
git apply ../nuraft2.1.0.patch
asio_tag="asio-1-29-0"
repo_clone_try_double "${primary_urls[asio]}" "${secondary_urls[asio]}" "asio" "$asio_tag" true
./prepare.sh
popd

View File

@ -36,7 +36,7 @@ ADDITIONAL USE GRANT: You may use the Licensed Work in accordance with the
3. using the Licensed Work to create a work or solution
which competes (or might reasonably be expected to
compete) with the Licensed Work.
CHANGE DATE: 2028-21-01
CHANGE DATE: 2026-27-04
CHANGE LICENSE: Apache License, Version 2.0
For information about alternative licensing arrangements, please visit: https://memgraph.com/legal.

View File

@ -2,8 +2,8 @@ MEMGRAPH
ENTERPRISE LICENCE AGREEMENT
Memgraph Limited is registered in England under registration 10195084 and has its registered office at 90a High Street,
Hertfordshire, Berkhamsted, HP4 2BL United Kingdom ("Memgraph").
Memgraph Limited is registered in England under registration 10195084 and has its registered office at Suite 4,
Ironstone House, Ironstone Way, Brixworth, Northampton, NN6 9UD (“Memgraph”).
Memgraph agrees to license and/or grant you (the “Customer”) access to the Software ( as defined below) and provide

View File

@ -1,202 +0,0 @@
Apache License
Version 2.0, January 2004
https://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
APPENDIX: How to apply the Apache License to your work.
To apply the Apache License to your work, attach the following
boilerplate notice, with the fields enclosed by brackets "[]"
replaced with your own identifying information. (Don't include
the brackets!) The text should be enclosed in the appropriate
comment syntax for the file format. We also recommend that a
file or class name and description of purpose be included on the
same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright [yyyy] [name of copyright owner]
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
https://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.

View File

@ -1,218 +0,0 @@
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
APPENDIX: How to apply the Apache License to your work.
To apply the Apache License to your work, attach the following
boilerplate notice, with the fields enclosed by brackets "[]"
replaced with your own identifying information. (Don't include
the brackets!) The text should be enclosed in the appropriate
comment syntax for the file format. We also recommend that a
file or class name and description of purpose be included on the
same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright [yyyy] [name of copyright owner]
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
--- LLVM Exceptions to the Apache 2.0 License ----
As an exception, if, as a result of your compiling your source code, portions
of this Software are embedded into an Object form of such source code, you
may redistribute such embedded portions in such Object form without complying
with the conditions of Sections 4(a), 4(b) and 4(d) of the License.
In addition, if you combine or link compiled forms of this Software with
software that is licensed under the GPLv2 ("Combined Software") and if a
court of competent jurisdiction determines that the patent provision (Section
3), the indemnity provision (Section 9) or other Section of the License
conflicts with the conditions of the GPLv2, you may retroactively and
prospectively choose to deem waived or otherwise exclude such Section(s) of
the License, but only in their entirety and only with respect to the Combined
Software.

View File

@ -1,202 +0,0 @@
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
APPENDIX: How to apply the Apache License to your work.
To apply the Apache License to your work, attach the following
boilerplate notice, with the fields enclosed by brackets "[]"
replaced with your own identifying information. (Don't include
the brackets!) The text should be enclosed in the appropriate
comment syntax for the file format. We also recommend that a
file or class name and description of purpose be included on the
same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright [yyyy] [name of copyright owner]
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.

View File

@ -6,81 +6,32 @@ project(memgraph_query_modules)
disallow_in_source_build()
find_package(fmt REQUIRED)
# Everything that is installed here, should be under the "query_modules" component.
set(CMAKE_INSTALL_DEFAULT_COMPONENT_NAME "query_modules")
string(TOLOWER ${CMAKE_BUILD_TYPE} lower_build_type)
add_library(example_c SHARED example.c)
target_include_directories(example_c PRIVATE ${CMAKE_SOURCE_DIR}/include)
target_compile_options(example_c PRIVATE -Wall)
target_link_libraries(example_c PRIVATE -static-libgcc -static-libstdc++)
# Strip C example in release build.
add_library(example SHARED example.c)
target_include_directories(example PRIVATE ${CMAKE_SOURCE_DIR}/include)
target_compile_options(example PRIVATE -Wall)
# Strip the library in release build.
string(TOLOWER ${CMAKE_BUILD_TYPE} lower_build_type)
if (lower_build_type STREQUAL "release")
add_custom_command(TARGET example_c POST_BUILD
COMMAND strip -s $<TARGET_FILE:example_c>
COMMENT "Stripping symbols and sections from the C example module")
add_custom_command(TARGET example POST_BUILD
COMMAND strip -s $<TARGET_FILE:example>
COMMENT "Stripping symbols and sections from example module")
endif()
install(PROGRAMS $<TARGET_FILE:example_c>
install(PROGRAMS $<TARGET_FILE:example>
DESTINATION lib/memgraph/query_modules
RENAME example_c.so)
RENAME example.so)
# Also install the source of the example, so user can read it.
install(FILES example.c DESTINATION lib/memgraph/query_modules/src)
add_library(example_cpp SHARED example.cpp)
target_include_directories(example_cpp PRIVATE ${CMAKE_SOURCE_DIR}/include)
target_compile_options(example_cpp PRIVATE -Wall)
target_link_libraries(example_cpp PRIVATE -static-libgcc -static-libstdc++)
# Strip C++ example in release build.
if (lower_build_type STREQUAL "release")
add_custom_command(TARGET example_cpp POST_BUILD
COMMAND strip -s $<TARGET_FILE:example_cpp>
COMMENT "Stripping symbols and sections from the C++ example module")
endif()
install(PROGRAMS $<TARGET_FILE:example_cpp>
DESTINATION lib/memgraph/query_modules
RENAME example_cpp.so)
# Also install the source of the example, so user can read it.
install(FILES example.cpp DESTINATION lib/memgraph/query_modules/src)
add_library(schema SHARED schema.cpp)
target_include_directories(schema PRIVATE ${CMAKE_SOURCE_DIR}/include)
target_compile_options(schema PRIVATE -Wall)
target_link_libraries(schema PRIVATE -static-libgcc -static-libstdc++)
# Strip C++ example in release build.
if (lower_build_type STREQUAL "release")
add_custom_command(TARGET schema POST_BUILD
COMMAND strip -s $<TARGET_FILE:schema>
COMMENT "Stripping symbols and sections from the C++ schema module")
endif()
install(PROGRAMS $<TARGET_FILE:schema>
DESTINATION lib/memgraph/query_modules
RENAME schema.so)
# Also install the source of the example, so user can read it.
install(FILES schema.cpp DESTINATION lib/memgraph/query_modules/src)
add_library(text SHARED text_search_module.cpp)
target_include_directories(text PRIVATE ${CMAKE_SOURCE_DIR}/include)
target_compile_options(text PRIVATE -Wall)
target_link_libraries(text PRIVATE -static-libgcc -static-libstdc++ fmt::fmt)
# Strip C++ example in release build.
if (lower_build_type STREQUAL "release")
add_custom_command(TARGET text POST_BUILD
COMMAND strip -s $<TARGET_FILE:text>
COMMENT "Stripping symbols and sections from the C++ text_search module")
endif()
install(PROGRAMS $<TARGET_FILE:text>
DESTINATION lib/memgraph/query_modules
RENAME text.so)
# Also install the source of the example, so user can read it.
install(FILES text_search_module.cpp DESTINATION lib/memgraph/query_modules/src)
# Install the Python example and modules
# Install the Python example
install(FILES example.py DESTINATION lib/memgraph/query_modules RENAME py_example.py)
# Install the Python modules
install(FILES graph_analyzer.py DESTINATION lib/memgraph/query_modules)
install(FILES mgp_networkx.py DESTINATION lib/memgraph/query_modules)
install(FILES nxalg.py DESTINATION lib/memgraph/query_modules)
install(FILES wcc.py DESTINATION lib/memgraph/query_modules)
install(FILES mgps.py DESTINATION lib/memgraph/query_modules)
install(FILES convert.py DESTINATION lib/memgraph/query_modules)

View File

@ -1,10 +0,0 @@
from json import loads
import mgp
@mgp.function
def str2object(string: str) -> mgp.Any:
if string:
return loads(string)
return None

View File

@ -1,127 +0,0 @@
// Copyright 2023 Memgraph Ltd.
//
// Use of this software is governed by the Business Source License
// included in the file licenses/BSL.txt; by using this file, you agree to be bound by the terms of the Business Source
// License, and you may not use this file except in compliance with the Business Source License.
//
// As of the Change Date specified in that file, in accordance with
// the Business Source License, use of this software will be governed
// by the Apache License, Version 2.0, included in the file
// licenses/APL.txt.
#include <mgp.hpp>
void ProcImpl(std::vector<mgp::Value> arguments, mgp::Graph graph, mgp::RecordFactory record_factory) {
auto record = record_factory.NewRecord();
record.Insert("out", true);
}
void SampleReadProc(mgp_list *args, mgp_graph *memgraph_graph, mgp_result *result, mgp_memory *memory) {
try {
// The outcommented way of assigning the memory pointer is still
// working, but it is deprecated because of certain concurrency
// issues. Please use the guard instead.
// mgp::memory = memory;
mgp::MemoryDispatcherGuard guard(memory);
std::vector<mgp::Value> arguments;
for (size_t i = 0; i < mgp::list_size(args); i++) {
auto arg = mgp::Value(mgp::list_at(args, i));
arguments.push_back(arg);
}
ProcImpl(arguments, mgp::Graph(memgraph_graph), mgp::RecordFactory(result));
} catch (const std::exception &e) {
mgp::result_set_error_msg(result, e.what());
return;
}
}
void AddXNodes(mgp_list *args, mgp_graph *memgraph_graph, mgp_result *result, mgp_memory *memory) {
// The outcommented way of assigning the memory pointer is still
// working, but it is deprecated because of certain concurrency
// issues. Please use the guard instead.
// mgp::memory = memory;
mgp::MemoryDispatcherGuard guard(memory);
auto graph = mgp::Graph(memgraph_graph);
std::vector<mgp::Value> arguments;
for (size_t i = 0; i < mgp::list_size(args); i++) {
auto arg = mgp::Value(mgp::list_at(args, i));
arguments.push_back(arg);
}
for (int i = 0; i < arguments[0].ValueInt(); i++) {
graph.CreateNode();
}
}
void Multiply(mgp_list *args, mgp_func_context *ctx, mgp_func_result *res, mgp_memory *memory) {
// The outcommented way of assigning the memory pointer is still
// working, but it is deprecated because of certain concurrency
// issues. Please use the guard instead.
// mgp::memory = memory;
mgp::MemoryDispatcherGuard guard(memory);
std::vector<mgp::Value> arguments;
for (size_t i = 0; i < mgp::list_size(args); i++) {
auto arg = mgp::Value(mgp::list_at(args, i));
arguments.push_back(arg);
}
auto result = mgp::Result(res);
auto first = arguments[0].ValueInt();
auto second = arguments[1].ValueInt();
result.SetValue(first * second);
}
extern "C" int mgp_init_module(struct mgp_module *module, struct mgp_memory *memory) {
try {
// The outcommented way of assigning the memory pointer is still
// working, but it is deprecated because of certain concurrency
// issues. Please use the guard instead.
// mgp::memory = memory;
mgp::MemoryDispatcherGuard guard(memory);
AddProcedure(SampleReadProc, "return_true", mgp::ProcedureType::Read,
{mgp::Parameter("param_1", mgp::Type::Int), mgp::Parameter("param_2", mgp::Type::Double, 2.3)},
{mgp::Return("out", mgp::Type::Bool)}, module, memory);
} catch (const std::exception &e) {
return 1;
}
try {
// The outcommented way of assigning the memory pointer is still
// working, but it is deprecated because of certain concurrency
// issues. Please use the guard instead.
// mgp::memory = memory;
mgp::MemoryDispatcherGuard guard(memory);
mgp::AddProcedure(AddXNodes, "add_x_nodes", mgp::ProcedureType::Write, {mgp::Parameter("param_1", mgp::Type::Int)},
{}, module, memory);
} catch (const std::exception &e) {
return 1;
}
try {
// The outcommented way of assigning the memory pointer is still
// working, but it is deprecated because of certain concurrency
// issues. Please use the guard instead.
// mgp::memory = memory;
mgp::MemoryDispatcherGuard guard(memory);
mgp::AddFunction(Multiply, "multiply",
{mgp::Parameter("int", mgp::Type::Int), mgp::Parameter("int", mgp::Type::Int, (int64_t)3)}, module,
memory);
} catch (const std::exception &e) {
return 1;
}
return 0;
}
extern "C" int mgp_shutdown_module() { return 0; }

View File

@ -1,8 +0,0 @@
import mgp
@mgp.read_proc
def components(
context: mgp.ProcCtx,
) -> mgp.Record(versions=list, edition=str, name=str):
return mgp.Record(versions=["5.9.0"], edition="community", name="Memgraph")

Some files were not shown because too many files have changed in this diff Show More