Compare commits
798 Commits
master
...
add-manual
Author | SHA1 | Date | |
---|---|---|---|
|
ebb7b9c61d | ||
|
9b42d4f4f8 | ||
|
3095cbd956 | ||
|
51ed451b82 | ||
|
c344645978 | ||
|
4cb63e44d9 | ||
|
c3042906c5 | ||
|
92da012f9e | ||
|
c7c488bb46 | ||
|
d2156683d3 | ||
|
b56b1521a6 | ||
|
c50f2c2621 | ||
|
f194160d7c | ||
|
13acd22f3b | ||
|
a2ce9c4396 | ||
|
6d4dff7e6e | ||
|
6fc0b9ff02 | ||
|
c88bb50dff | ||
|
3b0d531343 | ||
|
0344ba5e58 | ||
|
a17010ed16 | ||
|
3a59bee80c | ||
|
6f730f9a91 | ||
|
fe14a8674c | ||
|
1bc93b64f4 | ||
|
155388c0a1 | ||
|
74f53369c0 | ||
|
2b3141879b | ||
|
53f95ed1a7 | ||
|
b678e6a63b | ||
|
563035645c | ||
|
12bc78ca2d | ||
|
a9a388ce44 | ||
|
6bc2e6d8b6 | ||
|
a02abc8f79 | ||
|
096d1ce5f4 | ||
|
657279949a | ||
|
25226cca92 | ||
|
292a55f4ff | ||
|
37f19867b0 | ||
|
b26c7d09ef | ||
|
4bad8c0d1e | ||
|
2b01f2280c | ||
|
ac59e7f7e0 | ||
|
7e99f32adb | ||
|
a1a612899c | ||
|
2219dee6f6 | ||
|
7bf8550c86 | ||
|
8b392ecc97 | ||
|
8e315875f2 | ||
|
41183b328b | ||
|
c9a0c15c16 | ||
|
50327254e0 | ||
|
24ae6069f0 | ||
|
7be66f0c54 | ||
|
b136cd71d2 | ||
|
a38401130e | ||
|
bf93b53e7d | ||
|
4be4a86d0a | ||
|
9a805bff8b | ||
|
7d63236f87 | ||
|
4f8c241934 | ||
|
60b71cc2c1 | ||
|
da28a29c7f | ||
|
303362d41c | ||
|
272e710510 | ||
|
0e3229756f | ||
|
e96eed81b1 | ||
|
7fa7586940 | ||
|
436e41f71f | ||
|
fd047e7303 | ||
|
9214c715e2 | ||
|
44fc2d01c7 | ||
|
d36c0cc424 | ||
|
2ecf580ae7 | ||
|
c12a5a9019 | ||
|
883922dba5 | ||
|
c7591887a8 | ||
|
33454c7d8e | ||
|
f67422f8b9 | ||
|
23297c2afb | ||
|
3d35a10783 | ||
|
999015a250 | ||
|
7d3d52c067 | ||
|
6d4401dc92 | ||
|
0d00ae74dd | ||
|
16b78e4eb3 | ||
|
d44910bc9a | ||
|
54c1efab7f | ||
|
7a0c9beba5 | ||
|
e13faf41e9 | ||
|
e24a6a86e4 | ||
|
3bc9c571a0 | ||
|
7972b3af43 | ||
|
e2a1029120 | ||
|
7cb07672ff | ||
|
36fccc32c2 | ||
|
4908af5a18 | ||
|
fa8eee2043 | ||
|
b4ae8aea95 | ||
|
27ff18733c | ||
|
0aa7daf002 | ||
|
b341a8d7dd | ||
|
b7dbc7267b | ||
|
1951d781d0 | ||
|
e65f585fc6 | ||
|
fa86d4c989 | ||
|
fcbacdc80d | ||
|
69fa4e8c8d | ||
|
ea646e1803 | ||
|
89d26c36c9 | ||
|
cf76e0e19b | ||
|
900ece8109 | ||
|
544c75c212 | ||
|
0285b56915 | ||
|
515a52130e | ||
|
55b5d76092 | ||
|
0eee3ad7b7 | ||
|
c9299a6c72 | ||
|
de99025c39 | ||
|
be39fac72e | ||
|
5eea3ceee4 | ||
|
cc643aac69 | ||
|
e7f10ec8f4 | ||
|
6fe244b209 | ||
|
ede6281e00 | ||
|
70c8ed9180 | ||
|
59b39e03cb | ||
|
96ea113a69 | ||
|
a31c7cce08 | ||
|
8f6fac3cde | ||
|
a0ada914ab | ||
|
ca62fa5123 | ||
|
d1548c9253 | ||
|
52baaf8030 | ||
|
a0274bbdd9 | ||
|
c5c6fd9b86 | ||
|
586999475b | ||
|
e888464de2 | ||
|
94a536a2b9 | ||
|
e42c60d555 | ||
|
00d6b42c37 | ||
|
f39a937323 | ||
|
575361827e | ||
|
901da4c9b3 | ||
|
81675106fd | ||
|
7fb828bca3 | ||
|
c04cfc5596 | ||
|
a3b1676c42 | ||
|
b38a9b9c90 | ||
|
57690c5390 | ||
|
d11d5c3fa9 | ||
|
36891c119b | ||
|
b91b16de96 | ||
|
cfdc728d64 | ||
|
40835b8c9c | ||
|
d1fe73c987 | ||
|
3257d46d18 | ||
|
9be5ee1ae9 | ||
|
fdd89e0e81 | ||
|
82203fa1ca | ||
|
775e950dba | ||
|
920ad277a5 | ||
|
392f6e2b73 | ||
|
e40f7f507b | ||
|
c139856b2a | ||
|
b30137ab7a | ||
|
b2b9b1d5cb | ||
|
ace1eb401f | ||
|
668f7857b1 | ||
|
d22c962af4 | ||
|
4c25a4dfbd | ||
|
61d84bd622 | ||
|
3b06db5a02 | ||
|
afde0c6926 | ||
|
41bb988fe9 | ||
|
d7bd2cc754 | ||
|
599b133a55 | ||
|
c38a80ccd7 | ||
|
dee33b2072 | ||
|
c283c6e6ea | ||
|
65113bc55b | ||
|
32ea124d4b | ||
|
68175bc97c | ||
|
9589dd97b6 | ||
|
41f5c00f5f | ||
|
e82955895a | ||
|
751c27f792 | ||
|
7e217e94b3 | ||
|
1ebde8be74 | ||
|
32231fe49a | ||
|
fa39c6740b | ||
|
1aa40e5e3f | ||
|
ae57fa3199 | ||
|
a0e0791aa1 | ||
|
311994a36d | ||
|
54ce79baa0 | ||
|
83306d21de | ||
|
ac16348fff | ||
|
af812d1311 | ||
|
f36b96744c | ||
|
14000d727f | ||
|
a9eca651df | ||
|
3604046f68 | ||
|
817433d342 | ||
|
76dcf3ad0f | ||
|
04450dada7 | ||
|
8c2b2f4be2 | ||
|
70200919cd | ||
|
f04ed3c137 | ||
|
2e4e975102 | ||
|
c24c699c78 | ||
|
070225df28 | ||
|
1170e6762f | ||
|
d6f1505582 | ||
|
edb122cb33 | ||
|
b0c4544287 | ||
|
0d856bee45 | ||
|
c3e19498da | ||
|
03d994318e | ||
|
4888605972 | ||
|
a90a2d86c9 | ||
|
0cf440519e | ||
|
9c41e702e6 | ||
|
50f76b926b | ||
|
0353262cc2 | ||
|
4ed20f0247 | ||
|
50fb8fe3c7 | ||
|
32322d39b8 | ||
|
89f42ef73e | ||
|
0d19d347f8 | ||
|
a5520f5eae | ||
|
eb154d1310 | ||
|
59c94c90e6 | ||
|
a7f5212c6e | ||
|
b4d6ca2233 | ||
|
5d3d67cbd0 | ||
|
675c2fe24a | ||
|
b288f06cb7 | ||
|
5beb7c0966 | ||
|
e0b7d7abeb | ||
|
9bab26fb10 | ||
|
c39f264684 | ||
|
2488895362 | ||
|
a20edf2b74 | ||
|
25713405df | ||
|
2a81ce5640 | ||
|
747b8a21cd | ||
|
65e9ceb779 | ||
|
6338690b00 | ||
|
ca3f748325 | ||
|
1b458ebc41 | ||
|
6efe074313 | ||
|
3c72af0c10 | ||
|
6d3f9ab695 | ||
|
e8240df29e | ||
|
ec73ee666c | ||
|
1d7f61dd0b | ||
|
7f9eceadb3 | ||
|
f4428af210 | ||
|
9f5af97044 | ||
|
c7c0234889 | ||
|
68ae729b07 | ||
|
2e33f8275b | ||
|
9a62503803 | ||
|
75b598d014 | ||
|
98ae30d861 | ||
|
6b8a5fd41d | ||
|
4a3f950cf9 | ||
|
146ed5756c | ||
|
438b519703 | ||
|
0ad702175f | ||
|
be3797e0a1 | ||
|
cdde7ca670 | ||
|
d0e1d86df3 | ||
|
366a4e2b9a | ||
|
439eae3a72 | ||
|
c15e75b48c | ||
|
2120645d6a | ||
|
616b79ce6c | ||
|
13cabcaab5 | ||
|
f1ea76a3d7 | ||
|
18b3c1e8b1 | ||
|
db45845619 | ||
|
4bbf3c95ca | ||
|
e11856acf8 | ||
|
5cd0d5137e | ||
|
8eec8399a3 | ||
|
d18d4f198e | ||
|
29347c83e7 | ||
|
452722f4f8 | ||
|
730bac6b74 | ||
|
6c441b80ec | ||
|
ee9ba1a7f8 | ||
|
d0c960e900 | ||
|
00fd69c170 | ||
|
54907d2a1a | ||
|
23bfd7f4fc | ||
|
f8cbaaf362 | ||
|
4e7d8c3ba2 | ||
|
e5d892683c | ||
|
d4bdedd9e8 | ||
|
9f9a81455f | ||
|
b0b8c0a5c9 | ||
|
8af635c8d7 | ||
|
5e64b19745 | ||
|
38f3a4cacb | ||
|
3f3d6c52a3 | ||
|
94ef57c459 | ||
|
56556f7c2d | ||
|
a2027fc6ac | ||
|
deb31e4b77 | ||
|
072bc58b1e | ||
|
31f907cb53 | ||
|
9af20c295c | ||
|
969b8f0da7 | ||
|
55008a2927 | ||
|
494f6ac25f | ||
|
02ca6734c1 | ||
|
976e6ff0a6 | ||
|
9621532d3d | ||
|
7e8b4921b4 | ||
|
a10c254caa | ||
|
9c0c0a2d1c | ||
|
0450163c5e | ||
|
62ee6f0e05 | ||
|
00a4127e4e | ||
|
53040c6758 | ||
|
04124a1e9b | ||
|
f107ef8aea | ||
|
b3605c9ab1 | ||
|
7df3a743b9 | ||
|
f8215306e8 | ||
|
8f08d986cb | ||
|
cc3bcf1dc2 | ||
|
9144d2dccd | ||
|
9faa206f95 | ||
|
1b97bb0856 | ||
|
f54701cc96 | ||
|
1f98d33fa6 | ||
|
86f7b82bdc | ||
|
8c5edaaeb9 | ||
|
632db4175a | ||
|
3c0e38aacb | ||
|
aace5db8cc | ||
|
bc32a3d305 | ||
|
cead1bcb21 | ||
|
ec529da8d2 | ||
|
ce5f1c2f17 | ||
|
1b77e029ca | ||
|
16c30d61aa | ||
|
b8de2c9c7a | ||
|
cd0aaeb5e9 | ||
|
cf388d80fc | ||
|
a308ee501a | ||
|
884831ece5 | ||
|
aa7d362296 | ||
|
e946eb50d2 | ||
|
8f19ce88d9 | ||
|
7c37ed2313 | ||
|
11119e5406 | ||
|
7d52eedb21 | ||
|
4f18fa7431 | ||
|
9f10c3ea06 | ||
|
18b3550dbe | ||
|
b244c4d6ee | ||
|
4dc639a05a | ||
|
50df0d4d53 | ||
|
5f5d839f0c | ||
|
22e3164e60 | ||
|
8fd7327fbd | ||
|
500691318a | ||
|
9fc7f9dced | ||
|
82db1d4ad8 | ||
|
ed0b67dfdb | ||
|
0f34c49e21 | ||
|
f4d0c7769e | ||
|
6d3c04bd61 | ||
|
a8dc6fd41e | ||
|
de84d4d6ea | ||
|
36a7abb170 | ||
|
41b06a0a37 | ||
|
cceab46a7c | ||
|
243fa5e4b2 | ||
|
6f4996de0e | ||
|
01d5953bb6 | ||
|
6b64fd5ce5 | ||
|
c5138c8d58 | ||
|
8fe1b8d7fc | ||
|
7a3caa320c | ||
|
07a8ac0db8 | ||
|
3b798ab313 | ||
|
d44c1836c7 | ||
|
c4327cfb00 | ||
|
a65ea4fe01 | ||
|
d820d0a9e5 | ||
|
7aa68164f0 | ||
|
d6b444c38b | ||
|
c647134916 | ||
|
b3eec92525 | ||
|
1b73ca4860 | ||
|
ab5fc05fd7 | ||
|
814c5eb397 | ||
|
e77843f2ec | ||
|
40e145e7d0 | ||
|
2ff81ebf04 | ||
|
c3c68cc2ce | ||
|
6d86801be0 | ||
|
407418e8f5 | ||
|
6d544e4fc0 | ||
|
56e2ad4546 | ||
|
9b19dd57d3 | ||
|
5045323b1d | ||
|
aa146d28f8 | ||
|
1cd10ab409 | ||
|
36390608aa | ||
|
e565fc6e3a | ||
|
ce0f1a09f7 | ||
|
accf015dcf | ||
|
84800cff76 | ||
|
9fade5ebac | ||
|
75606dfeb0 | ||
|
9c36d8928a | ||
|
d926c4c4a4 | ||
|
9ec72bd969 | ||
|
85034ddcbe | ||
|
ea533f43fc | ||
|
e0086e5666 | ||
|
c0a103e851 | ||
|
3a171376d7 | ||
|
cd7b33f23f | ||
|
d82cfb349e | ||
|
6801d6ff09 | ||
|
f1e360469a | ||
|
307cce9e21 | ||
|
662fa2e6d2 | ||
|
c0cb53e156 | ||
|
742017548f | ||
|
bbbd722eeb | ||
|
f463d9f59f | ||
|
a6f3937692 | ||
|
5717dfb165 | ||
|
1101c2444c | ||
|
2a6dc7bb93 | ||
|
d080e260e6 | ||
|
37f5fb29ea | ||
|
c8c72de6ac | ||
|
0b19b62b12 | ||
|
bffef1a653 | ||
|
66f39f2681 | ||
|
081c3e5bed | ||
|
ce8bc522d0 | ||
|
0f66ae31dd | ||
|
e9e42a0614 | ||
|
1a67dec302 | ||
|
4eb673c7b9 | ||
|
71dcba331e | ||
|
e43f4e2181 | ||
|
b2050d55ce | ||
|
2fc1aeb087 | ||
|
86e5b44c1c | ||
|
1b0db5289d | ||
|
45badbe21f | ||
|
04420a84c7 | ||
|
ce45a548c7 | ||
|
3ad8489735 | ||
|
7115a7e75b | ||
|
0f32407bdc | ||
|
a37e7e4aff | ||
|
6b9a617df0 | ||
|
923325b8fa | ||
|
f6017697d6 | ||
|
9c3d683942 | ||
|
cf73ed529d | ||
|
262df5c6a2 | ||
|
098084314e | ||
|
12880fc71a | ||
|
80d6776210 | ||
|
3840c14846 | ||
|
fe03f5b206 | ||
|
5f88e75571 | ||
|
27495ef43a | ||
|
a499bf6dfd | ||
|
49652d0a61 | ||
|
6e44c2295d | ||
|
d482f7da90 | ||
|
77ab07d991 | ||
|
38b0b308ce | ||
|
2f55491271 | ||
|
68e51e73ba | ||
|
a17a6aea5a | ||
|
ec4804b72a | ||
|
e98ef634de | ||
|
c4e22ffde3 | ||
|
b3ef0ccd71 | ||
|
15fc3c0834 | ||
|
9261fabe60 | ||
|
7bdcd8f9f4 | ||
|
bd11225d23 | ||
|
1f159b5a80 | ||
|
14ddd7254d | ||
|
8629ee5ebc | ||
|
1c94c59a24 | ||
|
631d18465b | ||
|
3f97a13493 | ||
|
5656a24c96 | ||
|
9c05910e68 | ||
|
3d66bbd988 | ||
|
07032887a4 | ||
|
9e81fe791c | ||
|
cca4e97bcf | ||
|
213583f916 | ||
|
94bc671551 | ||
|
02ef954e51 | ||
|
131d7f2a74 | ||
|
618237cc96 | ||
|
ef755e466c | ||
|
a9c5d40721 | ||
|
b4c24f4506 | ||
|
0462b8fc8f | ||
|
6eabceca4a | ||
|
2045f54577 | ||
|
18009c06b6 | ||
|
db7b2aa59a | ||
|
5c0e41ed44 | ||
|
12e9c1a739 | ||
|
a030419565 | ||
|
6df2db0d19 | ||
|
968584a8fc | ||
|
8636788ab2 | ||
|
691f6af36d | ||
|
23f1536eac | ||
|
33add3ecd0 | ||
|
91b5092c71 | ||
|
c0f576c187 | ||
|
a2735c8953 | ||
|
51fb4a6e7a | ||
|
cba183898a | ||
|
5f5fe7eb29 | ||
|
2087877df2 | ||
|
efa4378fb4 | ||
|
cad0e80d00 | ||
|
3c07a5dc04 | ||
|
e442963466 | ||
|
32fe4d94d9 | ||
|
94b66a4e81 | ||
|
b2f3fab693 | ||
|
d7ce7cea13 | ||
|
61b9457718 | ||
|
159b30ba5f | ||
|
b67e5b9a6c | ||
|
79756ae6fb | ||
|
a54bcb9819 | ||
|
c16f948de9 | ||
|
2a7ed1ad82 | ||
|
5201db46d2 | ||
|
91550128a5 | ||
|
d5966101ad | ||
|
88487e2513 | ||
|
b10b1eb239 | ||
|
dca94f42bb | ||
|
39b40ecf00 | ||
|
baacc52a65 | ||
|
5273d319e2 | ||
|
23838b50ea | ||
|
d85fb94bc7 | ||
|
4d2036249e | ||
|
d23643d4ff | ||
|
e52ce1e363 | ||
|
1abfe28806 | ||
|
d17970f6d9 | ||
|
9e72c7cb54 | ||
|
528e30a9be | ||
|
bb7c7f7627 | ||
|
bab5e1386a | ||
|
3d954e7abc | ||
|
fa5c9a2568 | ||
|
c745f8c877 | ||
|
486231b1b9 | ||
|
43ad5855c4 | ||
|
24864ff7d2 | ||
|
8598f6edf4 | ||
|
e41073bc2c | ||
|
14e3e72565 | ||
|
1cee7ecb8a | ||
|
690a389563 | ||
|
9203616283 | ||
|
d198819bf1 | ||
|
3a8f01af79 | ||
|
7e6ec8bb26 | ||
|
6138d76690 | ||
|
25fdb1a1f0 | ||
|
2de1d6c359 | ||
|
0364311dd0 | ||
|
9235515dab | ||
|
b83fb287ad | ||
|
70dc19dfdb | ||
|
b685a21171 | ||
|
4db83b8159 | ||
|
dd8dd4f6c4 | ||
|
fa1ddfea12 | ||
|
a815ec9617 | ||
|
78528bd609 | ||
|
6239f4fc3e | ||
|
c5eb3ff2c0 | ||
|
9f71ce0f78 | ||
|
589dd36bf2 | ||
|
57e7169203 | ||
|
bb3b053375 | ||
|
a96f489756 | ||
|
e909e7d2d8 | ||
|
2d91a7fd1e | ||
|
1d18f1197f | ||
|
edeebf46ec | ||
|
1148fe9aad | ||
|
c92c795b1a | ||
|
0fee412f92 | ||
|
7596e85358 | ||
|
014836ca3c | ||
|
68654b5a19 | ||
|
4d96bf5006 | ||
|
9bb70eb0b9 | ||
|
7076788dd5 | ||
|
ee4be9aa5b | ||
|
96a2ad4b63 | ||
|
84509fa477 | ||
|
19b5fe3caf | ||
|
74db669e23 | ||
|
599802033f | ||
|
6dd57426f8 | ||
|
3c9f0c48e9 | ||
|
9a1258a708 | ||
|
c55ca836a4 | ||
|
5674ef4016 | ||
|
77c2afc9e7 | ||
|
27292dd921 | ||
|
e4f1fd8647 | ||
|
4608af9d00 | ||
|
50e72a7c28 | ||
|
17a104677d | ||
|
a6add80fc9 | ||
|
5d3eaf6a55 | ||
|
a13f260236 | ||
|
1f778ba5f3 | ||
|
787987168c | ||
|
7b4b1ba8ed | ||
|
bb1e8aa164 | ||
|
27a1311966 | ||
|
30ff6487f2 | ||
|
119da2d7a7 | ||
|
fa972813d2 | ||
|
951b058116 | ||
|
d0cad6e6ba | ||
|
6138277972 | ||
|
2f77eb96ff | ||
|
d7bc93c55f | ||
|
039d5f51d3 | ||
|
cebe6f62fa | ||
|
6834ce01fe | ||
|
4c5cd1f847 | ||
|
f9e2a66961 | ||
|
d5700ab5ff | ||
|
b0aaba6765 | ||
|
e1f18f3733 | ||
|
c90b38faf0 | ||
|
acbf3c764c | ||
|
37df41796f | ||
|
f04e1cda4b | ||
|
f28ba89584 | ||
|
60485311c8 | ||
|
36a1c43851 | ||
|
bae8c084b1 | ||
|
2898120eeb | ||
|
d62a45752a | ||
|
c5ee6ffbc2 | ||
|
5ef08f841a | ||
|
59c7d81ae8 | ||
|
03c095e780 | ||
|
b542e49b3e | ||
|
ddb30f49ea | ||
|
d920d7c293 | ||
|
b2e9717ec3 | ||
|
e0f6c951c1 | ||
|
7e35c71c58 | ||
|
b6814b7a49 | ||
|
903e29a081 | ||
|
1c17692a26 | ||
|
34fbaa0aee | ||
|
476e2670d5 | ||
|
be7aa55686 | ||
|
46388ad35c | ||
|
0d5ee49e19 | ||
|
74181114c2 | ||
|
009c1b4074 | ||
|
d0b8b27c29 | ||
|
79c2ae206f | ||
|
6b0168cb3d | ||
|
03cf264b76 | ||
|
9448e23dc9 | ||
|
fa6129dc2b | ||
|
eafccaea84 | ||
|
0c4f591b3c | ||
|
aeed7c32f9 | ||
|
534e365271 | ||
|
281ae158ec | ||
|
39c9c215b1 | ||
|
f6e78ce6da | ||
|
8e7118efde | ||
|
3cf79f5bbf | ||
|
6a31c49432 | ||
|
fea92f4829 | ||
|
1703cd039d | ||
|
ca2351124b | ||
|
5784c0d473 | ||
|
8ebc704819 | ||
|
5939fb2b0c | ||
|
ee64684b0b | ||
|
1e4c02f8a5 | ||
|
8353228ba7 | ||
|
cdab8828e4 | ||
|
c1d0fddaac | ||
|
18423ce34d | ||
|
b4f68e7a60 | ||
|
e901c1fdb7 | ||
|
016b3ee0d2 | ||
|
332afadf21 | ||
|
acc655f4fd | ||
|
862af55266 | ||
|
33c9ccee66 | ||
|
8b9e7e2c65 | ||
|
51e6802aa7 | ||
|
386a0c5686 | ||
|
994aab8774 | ||
|
8112957a35 | ||
|
b82e8748ad | ||
|
cb00b43ca7 | ||
|
55e0dbca80 | ||
|
12e7a261aa | ||
|
1c3bb969e9 | ||
|
e5437080c5 | ||
|
17090dd8ac | ||
|
281cebd386 | ||
|
f89a2bbf42 | ||
|
85b8ce9101 | ||
|
6bb40a7f49 | ||
|
1707ee648e | ||
|
07f34838bd | ||
|
b6f8b5e130 | ||
|
58243f4a26 | ||
|
d06132cb33 | ||
|
f063c1b1ad | ||
|
5347c06d76 | ||
|
4634ac484a | ||
|
a5dc818e19 | ||
|
4cb3b064c4 | ||
|
23171e76b6 | ||
|
6fd64d31f2 | ||
|
87111b2f89 | ||
|
b5c7078c7d | ||
|
925835b080 | ||
|
ce788f5f65 | ||
|
b8186bea2e | ||
|
3c4856dcb7 | ||
|
b4d6dc0930 | ||
|
817161a915 | ||
|
58eb2caf0f | ||
|
ecda71168c | ||
|
8e1f83acc9 | ||
|
f910cb770c | ||
|
38d0b89b04 | ||
|
a2a6a3855b | ||
|
e9f0360fb3 | ||
|
fe4955447e | ||
|
1631c20df2 | ||
|
947baedbe6 | ||
|
c6447eb48b | ||
|
7e84744d07 | ||
|
c0d03888f4 | ||
|
efb3c8d03d | ||
|
95dbc022c0 | ||
|
a40403e3ce | ||
|
14c9e68456 | ||
|
68b26275a3 | ||
|
5012824e05 | ||
|
a12a1ea358 | ||
|
2891041468 | ||
|
cc5ee6a496 | ||
|
f57f30c8cf | ||
|
462daf3a2b | ||
|
2009fefc8a | ||
|
eb3f96d1f6 | ||
|
264b233053 | ||
|
2ceaf59767 | ||
|
c0bee760bf | ||
|
3f4f66b57f | ||
|
2998f92595 | ||
|
1bdc32ba5d | ||
|
21870a0e7e |
@ -1,7 +1,6 @@
|
||||
---
|
||||
BasedOnStyle: Google
|
||||
---
|
||||
Language: Cpp
|
||||
BasedOnStyle: Google
|
||||
Standard: "c++20"
|
||||
UseTab: Never
|
||||
DerivePointerAlignment: false
|
||||
|
@ -6,7 +6,7 @@ Checks: '*,
|
||||
-altera-unroll-loops,
|
||||
-android-*,
|
||||
-cert-err58-cpp,
|
||||
-cppcoreguidelines-avoid-do-while,
|
||||
-cert-str34-c,
|
||||
-cppcoreguidelines-avoid-c-arrays,
|
||||
-cppcoreguidelines-avoid-goto,
|
||||
-cppcoreguidelines-avoid-magic-numbers,
|
||||
@ -50,6 +50,7 @@ Checks: '*,
|
||||
-misc-non-private-member-variables-in-classes,
|
||||
-modernize-avoid-c-arrays,
|
||||
-modernize-concat-nested-namespaces,
|
||||
-modernize-loop-convert,
|
||||
-modernize-pass-by-value,
|
||||
-modernize-use-equals-default,
|
||||
-modernize-use-nodiscard,
|
||||
@ -61,11 +62,10 @@ Checks: '*,
|
||||
-readability-implicit-bool-conversion,
|
||||
-readability-magic-numbers,
|
||||
-readability-named-parameter,
|
||||
-readability-identifier-length,
|
||||
-misc-no-recursion,
|
||||
-concurrency-mt-unsafe,
|
||||
-bugprone-easily-swappable-parameters,
|
||||
-bugprone-unchecked-optional-access'
|
||||
-bugprone-easily-swappable-parameters'
|
||||
|
||||
WarningsAsErrors: ''
|
||||
HeaderFilterRegex: 'src/.*'
|
||||
AnalyzeTemporaryDtors: false
|
||||
|
@ -33,4 +33,4 @@ for file in $modified_files; do
|
||||
fi
|
||||
done;
|
||||
|
||||
exit ${FAIL}
|
||||
return ${FAIL}
|
||||
|
17
.github/ISSUE_TEMPLATE/bug_report.md
vendored
17
.github/ISSUE_TEMPLATE/bug_report.md
vendored
@ -1,17 +1,19 @@
|
||||
---
|
||||
name: Bug report
|
||||
about: Create a report to help us improve
|
||||
title: ""
|
||||
title: "[BUG] "
|
||||
labels: bug
|
||||
assignees: gitbuda, antonio2368
|
||||
|
||||
---
|
||||
|
||||
|
||||
**Memgraph version**
|
||||
Which version did you use?
|
||||
|
||||
**Environment**
|
||||
Some information about the environment you are using Memgraph on: operating
|
||||
system, architecture (ARM, x86), how do you connect, with or without docker,
|
||||
which driver etc.
|
||||
system, how do you connect, with or without docker, which driver etc.
|
||||
|
||||
**Describe the bug**
|
||||
A clear and concise description of what the bug is.
|
||||
@ -20,7 +22,6 @@ A clear and concise description of what the bug is.
|
||||
Steps to reproduce the behavior:
|
||||
1. Run the following query '...'
|
||||
2. Click on '....'
|
||||
3. ... IDEALLY: link to the workload info (DATASET & QUERIES) ...
|
||||
|
||||
**Expected behavior**
|
||||
A clear and concise description of what you expected to happen.
|
||||
@ -31,11 +32,3 @@ your problem.
|
||||
|
||||
**Additional context**
|
||||
Add any other context about the problem here.
|
||||
|
||||
**Verification Environment**
|
||||
Once we fix it, what do you need to verify the fix?
|
||||
Do you need:
|
||||
* Plain memgraph package -> for which Linux?
|
||||
* Plain memgraph Docker image?
|
||||
* Which architecture do you use ARM | x86?
|
||||
* Full Memgraph platform?
|
||||
|
25
.github/pull_request_template.md
vendored
25
.github/pull_request_template.md
vendored
@ -1,28 +1,11 @@
|
||||
### Description
|
||||
|
||||
Please briefly explain the changes you made here.
|
||||
|
||||
|
||||
Please delete either the [master < EPIC] or [master < Task] part, depending on what are your needs.
|
||||
|
||||
[master < Epic] PR
|
||||
- [ ] Check, and update documentation if necessary
|
||||
- [ ] Update [changelog](https://docs.memgraph.com/memgraph/changelog)
|
||||
- [ ] Write E2E tests
|
||||
- [ ] Compare the [benchmarking results](https://bench-graph.memgraph.com/) between the master branch and the Epic branch
|
||||
- [ ] Provide the full content or a guide for the final git message
|
||||
- [FINAL GIT MESSAGE]
|
||||
|
||||
[master < Task] PR
|
||||
- [ ] Check, and update documentation if necessary
|
||||
- [ ] Update [changelog](https://docs.memgraph.com/memgraph/changelog)
|
||||
- [ ] Provide the full content or a guide for the final git message
|
||||
- **[FINAL GIT MESSAGE]**
|
||||
|
||||
|
||||
### Documentation checklist
|
||||
- [ ] Add the documentation label tag
|
||||
- [ ] Add the bug / feature label tag
|
||||
- [ ] Add the milestone for which this feature is intended
|
||||
- If not known, set for a later milestone
|
||||
- [ ] Write a release note, including added/changed clauses
|
||||
- **[Release note text]**
|
||||
- [ ] Link the documentation PR here
|
||||
- **[Documentation PR link]**
|
||||
- [ ] Tag someone from docs team in the comments
|
||||
|
34
.github/workflows/daily_benchmark.yaml
vendored
34
.github/workflows/daily_benchmark.yaml
vendored
@ -3,7 +3,7 @@ name: Daily Benchmark
|
||||
on:
|
||||
workflow_dispatch:
|
||||
schedule:
|
||||
- cron: "0 22 * * *"
|
||||
- cron: "0 1 * * *"
|
||||
|
||||
jobs:
|
||||
release_benchmarks:
|
||||
@ -16,7 +16,7 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Set up repository
|
||||
uses: actions/checkout@v4
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
# Number of commits to fetch. `0` indicates all history for all
|
||||
# branches and tags. (default: 1)
|
||||
@ -59,7 +59,7 @@ jobs:
|
||||
source ve3/bin/activate
|
||||
pip install -r requirements.txt
|
||||
./main.py --benchmark-name "macro_benchmark" \
|
||||
--benchmark-results "../../tests/macro_benchmark/.harness_summary" \
|
||||
--benchmark-results-path "../../tests/macro_benchmark/.harness_summary" \
|
||||
--github-run-id "${{ github.run_id }}" \
|
||||
--github-run-number "${{ github.run_number }}" \
|
||||
--head-branch-name "${{ env.BRANCH_NAME }}"
|
||||
@ -67,13 +67,7 @@ jobs:
|
||||
- name: Run mgbench
|
||||
run: |
|
||||
cd tests/mgbench
|
||||
./benchmark.py vendor-native --num-workers-for-benchmark 12 --export-results benchmark_pokec.json pokec/medium/*/*
|
||||
|
||||
./benchmark.py vendor-native --num-workers-for-benchmark 1 --export-results benchmark_supernode.json supernode
|
||||
|
||||
./benchmark.py vendor-native --num-workers-for-benchmark 1 --export-results benchmark_high_write_set_property.json high_write_set_property
|
||||
|
||||
./benchmark.py vendor-native --num-workers-for-benchmark 12 --export-results cartesian.json cartesian
|
||||
./benchmark.py --num-workers-for-benchmark 12 --export-results benchmark_result.json pokec/medium/*/*
|
||||
|
||||
- name: Upload mgbench results
|
||||
run: |
|
||||
@ -82,25 +76,7 @@ jobs:
|
||||
source ve3/bin/activate
|
||||
pip install -r requirements.txt
|
||||
./main.py --benchmark-name "mgbench" \
|
||||
--benchmark-results "../../tests/mgbench/benchmark_pokec.json" \
|
||||
--github-run-id "${{ github.run_id }}" \
|
||||
--github-run-number "${{ github.run_number }}" \
|
||||
--head-branch-name "${{ env.BRANCH_NAME }}"
|
||||
|
||||
./main.py --benchmark-name "supernode" \
|
||||
--benchmark-results "../../tests/mgbench/benchmark_supernode.json" \
|
||||
--github-run-id "${{ github.run_id }}" \
|
||||
--github-run-number "${{ github.run_number }}" \
|
||||
--head-branch-name "${{ env.BRANCH_NAME }}"
|
||||
|
||||
./main.py --benchmark-name "high_write_set_property" \
|
||||
--benchmark-results "../../tests/mgbench/benchmark_high_write_set_property.json" \
|
||||
--github-run-id "${{ github.run_id }}" \
|
||||
--github-run-number "${{ github.run_number }}" \
|
||||
--head-branch-name "${{ env.BRANCH_NAME }}"
|
||||
|
||||
./main.py --benchmark-name "cartesian" \
|
||||
--benchmark-results "../../tests/mgbench/cartesian.json" \
|
||||
--benchmark-results-path "../../tests/mgbench/benchmark_result.json" \
|
||||
--github-run-id "${{ github.run_id }}" \
|
||||
--github-run-number "${{ github.run_number }}" \
|
||||
--head-branch-name "${{ env.BRANCH_NAME }}"
|
||||
|
654
.github/workflows/diff.yaml
vendored
654
.github/workflows/diff.yaml
vendored
@ -14,97 +14,62 @@ on:
|
||||
- "**/*.md"
|
||||
- ".clang-format"
|
||||
- "CODEOWNERS"
|
||||
- "licenses/*"
|
||||
|
||||
jobs:
|
||||
community_build:
|
||||
name: "Community build"
|
||||
runs-on: [self-hosted, Linux, X64, DockerMgBuild]
|
||||
timeout-minutes: 60
|
||||
runs-on: [self-hosted, Linux, X64, Diff]
|
||||
env:
|
||||
THREADS: 24
|
||||
MEMGRAPH_ENTERPRISE_LICENSE: ${{ secrets.MEMGRAPH_ENTERPRISE_LICENSE }}
|
||||
MEMGRAPH_ORGANIZATION_NAME: ${{ secrets.MEMGRAPH_ORGANIZATION_NAME }}
|
||||
OS: debian-11
|
||||
TOOLCHAIN: v5
|
||||
ARCH: amd
|
||||
BUILD_TYPE: RelWithDebInfo
|
||||
|
||||
steps:
|
||||
- name: Set up repository
|
||||
uses: actions/checkout@v4
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
# Number of commits to fetch. `0` indicates all history for all
|
||||
# branches and tags. (default: 1)
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Spin up mgbuild container
|
||||
- name: Build community binaries
|
||||
run: |
|
||||
./release/package/mgbuild.sh \
|
||||
--toolchain $TOOLCHAIN \
|
||||
--os $OS \
|
||||
--arch $ARCH \
|
||||
run
|
||||
# Activate toolchain.
|
||||
source /opt/toolchain-v4/activate
|
||||
|
||||
- name: Build release binaries
|
||||
run: |
|
||||
./release/package/mgbuild.sh \
|
||||
--toolchain $TOOLCHAIN \
|
||||
--os $OS \
|
||||
--arch $ARCH \
|
||||
--build-type $BUILD_TYPE \
|
||||
--threads $THREADS \
|
||||
build-memgraph --community
|
||||
# Initialize dependencies.
|
||||
./init
|
||||
|
||||
# Build community binaries.
|
||||
cd build
|
||||
cmake -DCMAKE_BUILD_TYPE=release -DMG_ENTERPRISE=OFF ..
|
||||
make -j$THREADS
|
||||
|
||||
- name: Run unit tests
|
||||
run: |
|
||||
./release/package/mgbuild.sh \
|
||||
--toolchain $TOOLCHAIN \
|
||||
--os $OS \
|
||||
--arch $ARCH \
|
||||
--threads $THREADS \
|
||||
--enterprise-license $MEMGRAPH_ENTERPRISE_LICENSE \
|
||||
--organization-name $MEMGRAPH_ORGANIZATION_NAME \
|
||||
test-memgraph unit
|
||||
# Activate toolchain.
|
||||
source /opt/toolchain-v4/activate
|
||||
|
||||
- name: Stop mgbuild container
|
||||
if: always()
|
||||
run: |
|
||||
./release/package/mgbuild.sh \
|
||||
--toolchain $TOOLCHAIN \
|
||||
--os $OS \
|
||||
--arch $ARCH \
|
||||
stop --remove
|
||||
# Run unit tests.
|
||||
cd build
|
||||
ctest -R memgraph__unit --output-on-failure -j$THREADS
|
||||
|
||||
code_analysis:
|
||||
name: "Code analysis"
|
||||
runs-on: [self-hosted, Linux, X64, DockerMgBuild]
|
||||
timeout-minutes: 60
|
||||
runs-on: [self-hosted, Linux, X64, Diff]
|
||||
env:
|
||||
THREADS: 24
|
||||
MEMGRAPH_ENTERPRISE_LICENSE: ${{ secrets.MEMGRAPH_ENTERPRISE_LICENSE }}
|
||||
MEMGRAPH_ORGANIZATION_NAME: ${{ secrets.MEMGRAPH_ORGANIZATION_NAME }}
|
||||
OS: debian-11
|
||||
TOOLCHAIN: v5
|
||||
ARCH: amd
|
||||
BUILD_TYPE: Debug
|
||||
|
||||
steps:
|
||||
- name: Set up repository
|
||||
uses: actions/checkout@v4
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
# Number of commits to fetch. `0` indicates all history for all
|
||||
# branches and tags. (default: 1)
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Spin up mgbuild container
|
||||
run: |
|
||||
./release/package/mgbuild.sh \
|
||||
--toolchain $TOOLCHAIN \
|
||||
--os $OS \
|
||||
--arch $ARCH \
|
||||
run
|
||||
|
||||
# This is also needed if we want do to comparison against other branches
|
||||
# See https://github.community/t/checkout-code-fails-when-it-runs-lerna-run-test-since-master/17920
|
||||
- name: Fetch all history for all tags and branches
|
||||
@ -112,13 +77,10 @@ jobs:
|
||||
|
||||
- name: Initialize deps
|
||||
run: |
|
||||
./release/package/mgbuild.sh \
|
||||
--toolchain $TOOLCHAIN \
|
||||
--os $OS \
|
||||
--arch $ARCH \
|
||||
--build-type $BUILD_TYPE \
|
||||
--threads $THREADS \
|
||||
build-memgraph --init-only
|
||||
# Activate toolchain.
|
||||
source /opt/toolchain-v4/activate
|
||||
# Initialize dependencies.
|
||||
./init
|
||||
|
||||
- name: Set base branch
|
||||
if: ${{ github.event_name == 'pull_request' }}
|
||||
@ -132,473 +94,188 @@ jobs:
|
||||
|
||||
- name: Python code analysis
|
||||
run: |
|
||||
./release/package/mgbuild.sh \
|
||||
--toolchain $TOOLCHAIN \
|
||||
--os $OS \
|
||||
--arch $ARCH \
|
||||
--enterprise-license $MEMGRAPH_ENTERPRISE_LICENSE \
|
||||
--organization-name $MEMGRAPH_ORGANIZATION_NAME \
|
||||
test-memgraph code-analysis --base-branch "${{ env.BASE_BRANCH }}"
|
||||
CHANGED_FILES=$(git diff -U0 ${{ env.BASE_BRANCH }}... --name-only)
|
||||
for file in ${CHANGED_FILES}; do
|
||||
echo ${file}
|
||||
if [[ ${file} == *.py ]]; then
|
||||
python3 -m black --check --diff ${file}
|
||||
python3 -m isort --check-only --profile "black" --diff ${file}
|
||||
fi
|
||||
done
|
||||
|
||||
- name: Build combined ASAN, UBSAN and coverage binaries
|
||||
run: |
|
||||
./release/package/mgbuild.sh \
|
||||
--toolchain $TOOLCHAIN \
|
||||
--os $OS \
|
||||
--arch $ARCH \
|
||||
--build-type $BUILD_TYPE \
|
||||
--threads $THREADS \
|
||||
build-memgraph --coverage --asan --ubsan
|
||||
# Activate toolchain.
|
||||
source /opt/toolchain-v4/activate
|
||||
|
||||
cd build
|
||||
cmake -DTEST_COVERAGE=ON -DASAN=ON -DUBSAN=ON ..
|
||||
make -j$THREADS memgraph__unit
|
||||
|
||||
- name: Run unit tests
|
||||
run: |
|
||||
./release/package/mgbuild.sh \
|
||||
--toolchain $TOOLCHAIN \
|
||||
--os $OS \
|
||||
--arch $ARCH \
|
||||
--enterprise-license $MEMGRAPH_ENTERPRISE_LICENSE \
|
||||
--organization-name $MEMGRAPH_ORGANIZATION_NAME \
|
||||
test-memgraph unit-coverage
|
||||
# Activate toolchain.
|
||||
source /opt/toolchain-v4/activate
|
||||
|
||||
# Run unit tests. It is restricted to 2 threads intentionally, because higher concurrency makes the timing related tests unstable.
|
||||
cd build
|
||||
LSAN_OPTIONS=suppressions=$PWD/../tools/lsan.supp UBSAN_OPTIONS=halt_on_error=1 ctest -R memgraph__unit --output-on-failure -j2
|
||||
|
||||
- name: Compute code coverage
|
||||
run: |
|
||||
./release/package/mgbuild.sh \
|
||||
--toolchain $TOOLCHAIN \
|
||||
--os $OS \
|
||||
--arch $ARCH \
|
||||
--enterprise-license $MEMGRAPH_ENTERPRISE_LICENSE \
|
||||
--organization-name $MEMGRAPH_ORGANIZATION_NAME \
|
||||
test-memgraph code-coverage
|
||||
# Activate toolchain.
|
||||
source /opt/toolchain-v4/activate
|
||||
|
||||
# Compute code coverage.
|
||||
cd tools/github
|
||||
./coverage_convert
|
||||
|
||||
# Package code coverage.
|
||||
cd generated
|
||||
tar -czf code_coverage.tar.gz coverage.json html report.json summary.rmu
|
||||
|
||||
- name: Save code coverage
|
||||
uses: actions/upload-artifact@v4
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: "Code coverage(Code analysis)"
|
||||
name: "Code coverage"
|
||||
path: tools/github/generated/code_coverage.tar.gz
|
||||
|
||||
- name: Set base branch
|
||||
if: ${{ github.event_name == 'pull_request' }}
|
||||
run: |
|
||||
echo "BASE_BRANCH=origin/${{ github.base_ref }}" >> $GITHUB_ENV
|
||||
|
||||
- name: Set base branch # if we manually dispatch or push to master
|
||||
if: ${{ github.event_name != 'pull_request' }}
|
||||
run: |
|
||||
echo "BASE_BRANCH=origin/master" >> $GITHUB_ENV
|
||||
|
||||
- name: Run clang-tidy
|
||||
run: |
|
||||
./release/package/mgbuild.sh \
|
||||
--toolchain $TOOLCHAIN \
|
||||
--os $OS \
|
||||
--arch $ARCH \
|
||||
--threads $THREADS \
|
||||
--enterprise-license $MEMGRAPH_ENTERPRISE_LICENSE \
|
||||
--organization-name $MEMGRAPH_ORGANIZATION_NAME \
|
||||
test-memgraph clang-tidy --base-branch "${{ env.BASE_BRANCH }}"
|
||||
source /opt/toolchain-v4/activate
|
||||
|
||||
- name: Stop mgbuild container
|
||||
if: always()
|
||||
run: |
|
||||
./release/package/mgbuild.sh \
|
||||
--toolchain $TOOLCHAIN \
|
||||
--os $OS \
|
||||
--arch $ARCH \
|
||||
stop --remove
|
||||
# Restrict clang-tidy results only to the modified parts
|
||||
git diff -U0 ${{ env.BASE_BRANCH }}... -- src | ./tools/github/clang-tidy/clang-tidy-diff.py -p 1 -j $THREADS -extra-arg="-DMG_CLANG_TIDY_CHECK" -path build | tee ./build/clang_tidy_output.txt
|
||||
|
||||
# Fail if any warning is reported
|
||||
! cat ./build/clang_tidy_output.txt | ./tools/github/clang-tidy/grep_error_lines.sh > /dev/null
|
||||
|
||||
debug_build:
|
||||
name: "Debug build"
|
||||
runs-on: [self-hosted, Linux, X64, DockerMgBuild]
|
||||
timeout-minutes: 100
|
||||
runs-on: [self-hosted, Linux, X64, Diff]
|
||||
env:
|
||||
THREADS: 24
|
||||
MEMGRAPH_ENTERPRISE_LICENSE: ${{ secrets.MEMGRAPH_ENTERPRISE_LICENSE }}
|
||||
MEMGRAPH_ORGANIZATION_NAME: ${{ secrets.MEMGRAPH_ORGANIZATION_NAME }}
|
||||
OS: debian-11
|
||||
TOOLCHAIN: v5
|
||||
ARCH: amd
|
||||
BUILD_TYPE: Debug
|
||||
|
||||
steps:
|
||||
- name: Set up repository
|
||||
uses: actions/checkout@v4
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
# Number of commits to fetch. `0` indicates all history for all
|
||||
# branches and tags. (default: 1)
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Spin up mgbuild container
|
||||
- name: Build debug binaries
|
||||
run: |
|
||||
./release/package/mgbuild.sh \
|
||||
--toolchain $TOOLCHAIN \
|
||||
--os $OS \
|
||||
--arch $ARCH \
|
||||
run
|
||||
# Activate toolchain.
|
||||
source /opt/toolchain-v4/activate
|
||||
|
||||
- name: Build release binaries
|
||||
# Initialize dependencies.
|
||||
./init
|
||||
|
||||
# Build debug binaries.
|
||||
cd build
|
||||
cmake ..
|
||||
make -j$THREADS
|
||||
|
||||
- name: Run simulation tests
|
||||
run: |
|
||||
./release/package/mgbuild.sh \
|
||||
--toolchain $TOOLCHAIN \
|
||||
--os $OS \
|
||||
--arch $ARCH \
|
||||
--build-type $BUILD_TYPE \
|
||||
--threads $THREADS \
|
||||
build-memgraph
|
||||
# Activate toolchain.
|
||||
source /opt/toolchain-v4/activate
|
||||
|
||||
- name: Run leftover CTest tests
|
||||
# Run simulation tests.
|
||||
cd build
|
||||
ctest -R memgraph__simulation --output-on-failure -j$THREADS
|
||||
|
||||
- name: Run single benchmark test
|
||||
run: |
|
||||
./release/package/mgbuild.sh \
|
||||
--toolchain $TOOLCHAIN \
|
||||
--os $OS \
|
||||
--arch $ARCH \
|
||||
--threads $THREADS \
|
||||
--enterprise-license $MEMGRAPH_ENTERPRISE_LICENSE \
|
||||
--organization-name $MEMGRAPH_ORGANIZATION_NAME \
|
||||
test-memgraph leftover-CTest
|
||||
# Activate toolchain.
|
||||
source /opt/toolchain-v4/activate
|
||||
|
||||
- name: Run drivers tests
|
||||
run: |
|
||||
./release/package/mgbuild.sh \
|
||||
--toolchain $TOOLCHAIN \
|
||||
--os $OS \
|
||||
--arch $ARCH \
|
||||
--threads $THREADS \
|
||||
--enterprise-license $MEMGRAPH_ENTERPRISE_LICENSE \
|
||||
--organization-name $MEMGRAPH_ORGANIZATION_NAME \
|
||||
test-memgraph drivers
|
||||
|
||||
- name: Run HA driver tests
|
||||
run: |
|
||||
./release/package/mgbuild.sh \
|
||||
--toolchain $TOOLCHAIN \
|
||||
--os $OS \
|
||||
--arch $ARCH \
|
||||
--threads $THREADS \
|
||||
--enterprise-license $MEMGRAPH_ENTERPRISE_LICENSE \
|
||||
--organization-name $MEMGRAPH_ORGANIZATION_NAME \
|
||||
test-memgraph drivers-high-availability
|
||||
|
||||
- name: Run integration tests
|
||||
run: |
|
||||
./release/package/mgbuild.sh \
|
||||
--toolchain $TOOLCHAIN \
|
||||
--os $OS \
|
||||
--arch $ARCH \
|
||||
--threads $THREADS \
|
||||
--enterprise-license $MEMGRAPH_ENTERPRISE_LICENSE \
|
||||
--organization-name $MEMGRAPH_ORGANIZATION_NAME \
|
||||
test-memgraph integration
|
||||
|
||||
- name: Run cppcheck and clang-format
|
||||
run: |
|
||||
./release/package/mgbuild.sh \
|
||||
--toolchain $TOOLCHAIN \
|
||||
--os $OS \
|
||||
--arch $ARCH \
|
||||
--threads $THREADS \
|
||||
--enterprise-license $MEMGRAPH_ENTERPRISE_LICENSE \
|
||||
--organization-name $MEMGRAPH_ORGANIZATION_NAME \
|
||||
test-memgraph cppcheck-and-clang-format
|
||||
|
||||
- name: Save cppcheck and clang-format errors
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: "Code coverage(Debug build)"
|
||||
path: tools/github/cppcheck_and_clang_format.txt
|
||||
|
||||
- name: Stop mgbuild container
|
||||
if: always()
|
||||
run: |
|
||||
./release/package/mgbuild.sh \
|
||||
--toolchain $TOOLCHAIN \
|
||||
--os $OS \
|
||||
--arch $ARCH \
|
||||
stop --remove
|
||||
# Run simulation tests.
|
||||
cd tests/mgbench
|
||||
./benchmark.py accesscontrol/small --num-workers-for-import 1 --test-system-arg "split-file splitfiles/accesscontrol_small.shard_configuration bolt-num-workers 1"
|
||||
|
||||
release_build:
|
||||
name: "Release build"
|
||||
runs-on: [self-hosted, Linux, X64, DockerMgBuild]
|
||||
timeout-minutes: 100
|
||||
runs-on: [self-hosted, Linux, X64, Diff]
|
||||
env:
|
||||
THREADS: 24
|
||||
MEMGRAPH_ENTERPRISE_LICENSE: ${{ secrets.MEMGRAPH_ENTERPRISE_LICENSE }}
|
||||
MEMGRAPH_ORGANIZATION_NAME: ${{ secrets.MEMGRAPH_ORGANIZATION_NAME }}
|
||||
OS: debian-11
|
||||
TOOLCHAIN: v5
|
||||
ARCH: amd
|
||||
BUILD_TYPE: Release
|
||||
|
||||
steps:
|
||||
- name: Set up repository
|
||||
uses: actions/checkout@v4
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
# Number of commits to fetch. `0` indicates all history for all
|
||||
# branches and tags. (default: 1)
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Spin up mgbuild container
|
||||
run: |
|
||||
./release/package/mgbuild.sh \
|
||||
--toolchain $TOOLCHAIN \
|
||||
--os $OS \
|
||||
--arch $ARCH \
|
||||
run
|
||||
|
||||
- name: Build release binaries
|
||||
run: |
|
||||
./release/package/mgbuild.sh \
|
||||
--toolchain $TOOLCHAIN \
|
||||
--os $OS \
|
||||
--arch $ARCH \
|
||||
--build-type $BUILD_TYPE \
|
||||
--threads $THREADS \
|
||||
build-memgraph
|
||||
# Activate toolchain.
|
||||
source /opt/toolchain-v4/activate
|
||||
|
||||
- name: Run GQL Behave tests
|
||||
run: |
|
||||
./release/package/mgbuild.sh \
|
||||
--toolchain $TOOLCHAIN \
|
||||
--os $OS \
|
||||
--arch $ARCH \
|
||||
--enterprise-license $MEMGRAPH_ENTERPRISE_LICENSE \
|
||||
--organization-name $MEMGRAPH_ORGANIZATION_NAME \
|
||||
test-memgraph gql-behave
|
||||
# Initialize dependencies.
|
||||
./init
|
||||
|
||||
- name: Save quality assurance status
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: "GQL Behave Status"
|
||||
path: |
|
||||
tests/gql_behave/gql_behave_status.csv
|
||||
tests/gql_behave/gql_behave_status.html
|
||||
# Build release binaries.
|
||||
cd build
|
||||
cmake -DCMAKE_BUILD_TYPE=release ..
|
||||
make -j$THREADS
|
||||
|
||||
- name: Run unit tests
|
||||
run: |
|
||||
./release/package/mgbuild.sh \
|
||||
--toolchain $TOOLCHAIN \
|
||||
--os $OS \
|
||||
--arch $ARCH \
|
||||
--threads $THREADS \
|
||||
--enterprise-license $MEMGRAPH_ENTERPRISE_LICENSE \
|
||||
--organization-name $MEMGRAPH_ORGANIZATION_NAME \
|
||||
test-memgraph unit
|
||||
# Activate toolchain.
|
||||
source /opt/toolchain-v4/activate
|
||||
|
||||
# This step will be skipped because the e2e stream tests have been disabled
|
||||
# We need to fix this as soon as possible
|
||||
- name: Ensure Kafka and Pulsar are up
|
||||
if: false
|
||||
# Run unit tests.
|
||||
cd build
|
||||
ctest -R memgraph__unit --output-on-failure -j$THREADS
|
||||
|
||||
- name: Run simulation tests
|
||||
run: |
|
||||
cd tests/e2e/streams/kafka
|
||||
docker-compose up -d
|
||||
cd ../pulsar
|
||||
docker-compose up -d
|
||||
# Activate toolchain.
|
||||
source /opt/toolchain-v4/activate
|
||||
|
||||
# Run simulation tests.
|
||||
cd build
|
||||
ctest -R memgraph__simulation --output-on-failure -j$THREADS
|
||||
|
||||
- name: Run single benchmark test
|
||||
run: |
|
||||
# Activate toolchain.
|
||||
source /opt/toolchain-v4/activate
|
||||
|
||||
# Run simulation tests.
|
||||
cd tests/mgbench
|
||||
./benchmark.py accesscontrol/small --num-workers-for-import 1 --test-system-arg "split-file splitfiles/accesscontrol_small.shard_configuration bolt-num-workers 1"
|
||||
|
||||
- name: Run e2e tests
|
||||
run: |
|
||||
./release/package/mgbuild.sh \
|
||||
--toolchain $TOOLCHAIN \
|
||||
--os $OS \
|
||||
--arch $ARCH \
|
||||
--enterprise-license $MEMGRAPH_ENTERPRISE_LICENSE \
|
||||
--organization-name $MEMGRAPH_ORGANIZATION_NAME \
|
||||
test-memgraph e2e
|
||||
# TODO(gitbuda): Setup mgclient and pymgclient properly.
|
||||
cd tests
|
||||
./setup.sh
|
||||
source ve3/bin/activate
|
||||
cd e2e
|
||||
LD_LIBRARY_PATH=$LD_LIBRARY_PATH:../../libs/mgclient/lib python runner.py --workloads-root-directory ./distributed_queries
|
||||
|
||||
# Same as two steps prior
|
||||
- name: Ensure Kafka and Pulsar are down
|
||||
if: false
|
||||
- name: Run query performance tests
|
||||
run: |
|
||||
cd tests/e2e/streams/kafka
|
||||
docker-compose down
|
||||
cd ../pulsar
|
||||
docker-compose down
|
||||
|
||||
- name: Run stress test (plain)
|
||||
run: |
|
||||
./release/package/mgbuild.sh \
|
||||
--toolchain $TOOLCHAIN \
|
||||
--os $OS \
|
||||
--arch $ARCH \
|
||||
--enterprise-license $MEMGRAPH_ENTERPRISE_LICENSE \
|
||||
--organization-name $MEMGRAPH_ORGANIZATION_NAME \
|
||||
test-memgraph stress-plain
|
||||
|
||||
- name: Run stress test (SSL)
|
||||
run: |
|
||||
./release/package/mgbuild.sh \
|
||||
--toolchain $TOOLCHAIN \
|
||||
--os $OS \
|
||||
--arch $ARCH \
|
||||
--enterprise-license $MEMGRAPH_ENTERPRISE_LICENSE \
|
||||
--organization-name $MEMGRAPH_ORGANIZATION_NAME \
|
||||
test-memgraph stress-ssl
|
||||
|
||||
- name: Run durability test
|
||||
run: |
|
||||
./release/package/mgbuild.sh \
|
||||
--toolchain $TOOLCHAIN \
|
||||
--os $OS \
|
||||
--arch $ARCH \
|
||||
--enterprise-license $MEMGRAPH_ENTERPRISE_LICENSE \
|
||||
--organization-name $MEMGRAPH_ORGANIZATION_NAME \
|
||||
test-memgraph durability
|
||||
|
||||
- name: Create enterprise DEB package
|
||||
run: |
|
||||
./release/package/mgbuild.sh \
|
||||
--toolchain $TOOLCHAIN \
|
||||
--os $OS \
|
||||
--arch $ARCH \
|
||||
--enterprise-license $MEMGRAPH_ENTERPRISE_LICENSE \
|
||||
--organization-name $MEMGRAPH_ORGANIZATION_NAME \
|
||||
package-memgraph
|
||||
|
||||
./release/package/mgbuild.sh \
|
||||
--toolchain $TOOLCHAIN \
|
||||
--os $OS \
|
||||
--arch $ARCH \
|
||||
copy --package
|
||||
|
||||
- name: Save enterprise DEB package
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: "Enterprise DEB package"
|
||||
path: build/output/${{ env.OS }}/memgraph*.deb
|
||||
|
||||
- name: Copy build logs
|
||||
run: |
|
||||
./release/package/mgbuild.sh \
|
||||
--toolchain $TOOLCHAIN \
|
||||
--os $OS \
|
||||
--arch $ARCH \
|
||||
copy --build-logs
|
||||
|
||||
- name: Save test data
|
||||
uses: actions/upload-artifact@v4
|
||||
if: always()
|
||||
with:
|
||||
name: "Test data(Release build)"
|
||||
path: build/logs
|
||||
|
||||
- name: Stop mgbuild container
|
||||
if: always()
|
||||
run: |
|
||||
./release/package/mgbuild.sh \
|
||||
--toolchain $TOOLCHAIN \
|
||||
--os $OS \
|
||||
--arch $ARCH \
|
||||
stop --remove
|
||||
|
||||
release_jepsen_test:
|
||||
name: "Release Jepsen Test"
|
||||
runs-on: [self-hosted, Linux, X64, DockerMgBuild]
|
||||
timeout-minutes: 80
|
||||
env:
|
||||
THREADS: 24
|
||||
MEMGRAPH_ENTERPRISE_LICENSE: ${{ secrets.MEMGRAPH_ENTERPRISE_LICENSE }}
|
||||
MEMGRAPH_ORGANIZATION_NAME: ${{ secrets.MEMGRAPH_ORGANIZATION_NAME }}
|
||||
OS: debian-12
|
||||
TOOLCHAIN: v5
|
||||
ARCH: amd
|
||||
BUILD_TYPE: RelWithDebInfo
|
||||
|
||||
steps:
|
||||
- name: Set up repository
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
# Number of commits to fetch. `0` indicates all history for all
|
||||
# branches and tags. (default: 1)
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Spin up mgbuild container
|
||||
run: |
|
||||
./release/package/mgbuild.sh \
|
||||
--toolchain $TOOLCHAIN \
|
||||
--os $OS \
|
||||
--arch $ARCH \
|
||||
run
|
||||
|
||||
- name: Build release binaries
|
||||
run: |
|
||||
./release/package/mgbuild.sh \
|
||||
--toolchain $TOOLCHAIN \
|
||||
--os $OS \
|
||||
--arch $ARCH \
|
||||
--build-type $BUILD_TYPE \
|
||||
--threads $THREADS \
|
||||
build-memgraph
|
||||
|
||||
- name: Copy memgraph binary
|
||||
run: |
|
||||
./release/package/mgbuild.sh \
|
||||
--toolchain $TOOLCHAIN \
|
||||
--os $OS \
|
||||
--arch $ARCH \
|
||||
copy --binary
|
||||
|
||||
- name: Refresh Jepsen Cluster
|
||||
run: |
|
||||
cd tests/jepsen
|
||||
./run.sh cluster-refresh
|
||||
|
||||
- name: Run Jepsen tests
|
||||
run: |
|
||||
cd tests/jepsen
|
||||
./run.sh test-all-individually --binary ../../build/memgraph --ignore-run-stdout-logs --ignore-run-stderr-logs
|
||||
|
||||
- name: Save Jepsen report
|
||||
uses: actions/upload-artifact@v4
|
||||
if: ${{ always() }}
|
||||
with:
|
||||
name: "Jepsen Report"
|
||||
path: tests/jepsen/Jepsen.tar.gz
|
||||
|
||||
- name: Stop mgbuild container
|
||||
if: always()
|
||||
run: |
|
||||
./release/package/mgbuild.sh \
|
||||
--toolchain $TOOLCHAIN \
|
||||
--os $OS \
|
||||
--arch $ARCH \
|
||||
stop --remove
|
||||
|
||||
release_benchmarks:
|
||||
name: "Release benchmarks"
|
||||
runs-on: [self-hosted, Linux, X64, DockerMgBuild, Gen7]
|
||||
timeout-minutes: 60
|
||||
env:
|
||||
THREADS: 24
|
||||
MEMGRAPH_ENTERPRISE_LICENSE: ${{ secrets.MEMGRAPH_ENTERPRISE_LICENSE }}
|
||||
MEMGRAPH_ORGANIZATION_NAME: ${{ secrets.MEMGRAPH_ORGANIZATION_NAME }}
|
||||
OS: debian-11
|
||||
TOOLCHAIN: v5
|
||||
ARCH: amd
|
||||
BUILD_TYPE: Release
|
||||
|
||||
steps:
|
||||
- name: Set up repository
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
# Number of commits to fetch. `0` indicates all history for all
|
||||
# branches and tags. (default: 1)
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Spin up mgbuild container
|
||||
run: |
|
||||
./release/package/mgbuild.sh \
|
||||
--toolchain $TOOLCHAIN \
|
||||
--os $OS \
|
||||
--arch $ARCH \
|
||||
run
|
||||
|
||||
- name: Build release binaries
|
||||
run: |
|
||||
./release/package/mgbuild.sh \
|
||||
--toolchain $TOOLCHAIN \
|
||||
--os $OS \
|
||||
--arch $ARCH \
|
||||
--build-type $BUILD_TYPE \
|
||||
--threads $THREADS \
|
||||
build-memgraph
|
||||
|
||||
- name: Run macro benchmarks
|
||||
run: |
|
||||
./release/package/mgbuild.sh \
|
||||
--toolchain $TOOLCHAIN \
|
||||
--os $OS \
|
||||
--arch $ARCH \
|
||||
--enterprise-license $MEMGRAPH_ENTERPRISE_LICENSE \
|
||||
--organization-name $MEMGRAPH_ORGANIZATION_NAME \
|
||||
test-memgraph macro-benchmark
|
||||
cd tests/manual
|
||||
./query_performance_runner.py
|
||||
|
||||
- name: Get branch name (merge)
|
||||
if: github.event_name != 'pull_request'
|
||||
@ -612,49 +289,12 @@ jobs:
|
||||
|
||||
- name: Upload macro benchmark results
|
||||
run: |
|
||||
./release/package/mgbuild.sh \
|
||||
--toolchain $TOOLCHAIN \
|
||||
--os $OS \
|
||||
--arch $ARCH \
|
||||
--enterprise-license $MEMGRAPH_ENTERPRISE_LICENSE \
|
||||
--organization-name $MEMGRAPH_ORGANIZATION_NAME \
|
||||
test-memgraph upload-to-bench-graph \
|
||||
--benchmark-name "macro_benchmark" \
|
||||
--benchmark-results "../../tests/macro_benchmark/.harness_summary" \
|
||||
--github-run-id ${{ github.run_id }} \
|
||||
--github-run-number ${{ github.run_number }} \
|
||||
--head-branch-name ${{ env.BRANCH_NAME }}
|
||||
|
||||
- name: Run mgbench
|
||||
run: |
|
||||
./release/package/mgbuild.sh \
|
||||
--toolchain $TOOLCHAIN \
|
||||
--os $OS \
|
||||
--arch $ARCH \
|
||||
--enterprise-license $MEMGRAPH_ENTERPRISE_LICENSE \
|
||||
--organization-name $MEMGRAPH_ORGANIZATION_NAME \
|
||||
test-memgraph mgbench
|
||||
|
||||
- name: Upload mgbench results
|
||||
run: |
|
||||
./release/package/mgbuild.sh \
|
||||
--toolchain $TOOLCHAIN \
|
||||
--os $OS \
|
||||
--arch $ARCH \
|
||||
--enterprise-license $MEMGRAPH_ENTERPRISE_LICENSE \
|
||||
--organization-name $MEMGRAPH_ORGANIZATION_NAME \
|
||||
test-memgraph upload-to-bench-graph \
|
||||
--benchmark-name "mgbench" \
|
||||
--benchmark-results "../../tests/mgbench/benchmark_result.json" \
|
||||
--github-run-id "${{ github.run_id }}" \
|
||||
--github-run-number "${{ github.run_number }}" \
|
||||
--head-branch-name "${{ env.BRANCH_NAME }}"
|
||||
|
||||
- name: Stop mgbuild container
|
||||
if: always()
|
||||
run: |
|
||||
./release/package/mgbuild.sh \
|
||||
--toolchain $TOOLCHAIN \
|
||||
--os $OS \
|
||||
--arch $ARCH \
|
||||
stop --remove
|
||||
cd tools/bench-graph-client
|
||||
virtualenv -p python3 ve3
|
||||
source ve3/bin/activate
|
||||
pip install -r requirements.txt
|
||||
./main.py --benchmark-name "query_performance" \
|
||||
--benchmark-results-path "../../build/tests/manual/query_performance_benchmark/summary.json" \
|
||||
--github-run-id "${{ github.run_id }}" \
|
||||
--github-run-number "${{ github.run_number }}" \
|
||||
--head-branch-name "${{ env.BRANCH_NAME }}"
|
||||
|
4
.github/workflows/full_clang_tidy.yaml
vendored
4
.github/workflows/full_clang_tidy.yaml
vendored
@ -14,7 +14,7 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Set up repository
|
||||
uses: actions/checkout@v4
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
# Number of commits to fetch. `0` indicates all history for all
|
||||
# branches and tags. (default: 1)
|
||||
@ -39,7 +39,7 @@ jobs:
|
||||
source /opt/toolchain-v4/activate
|
||||
|
||||
# The results are also written to standard output in order to retain them in the logs
|
||||
./tools/github/clang-tidy/run-clang-tidy.py -p build -j $THREADS -clang-tidy-binary=/opt/toolchain-v4/bin/clang-tidy "$PWD/src/*" |
|
||||
./tools/github/clang-tidy/run-clang-tidy.py -p build -j $THREADS -extra-arg="-DMG_CLANG_TIDY_CHECK" -clang-tidy-binary=/opt/toolchain-v4/bin/clang-tidy "$PWD/src/*" |
|
||||
tee ./build/full_clang_tidy_output.txt
|
||||
|
||||
- name: Summarize clang-tidy results
|
||||
|
178
.github/workflows/package_all.yaml
vendored
Normal file
178
.github/workflows/package_all.yaml
vendored
Normal file
@ -0,0 +1,178 @@
|
||||
name: Package All
|
||||
|
||||
# TODO(gitbuda): Cleanup docker container if GHA job was canceled.
|
||||
|
||||
on: workflow_dispatch
|
||||
|
||||
jobs:
|
||||
centos-7:
|
||||
runs-on: [self-hosted, DockerMgBuild, X64]
|
||||
timeout-minutes: 60
|
||||
steps:
|
||||
- name: "Set up repository"
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
fetch-depth: 0 # Required because of release/get_version.py
|
||||
- name: "Build package"
|
||||
run: |
|
||||
./release/package/run.sh package centos-7
|
||||
- name: "Upload package"
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: centos-7
|
||||
path: build/output/centos-7/memgraph*.rpm
|
||||
|
||||
centos-9:
|
||||
runs-on: [self-hosted, DockerMgBuild, X64]
|
||||
timeout-minutes: 60
|
||||
steps:
|
||||
- name: "Set up repository"
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
fetch-depth: 0 # Required because of release/get_version.py
|
||||
- name: "Build package"
|
||||
run: |
|
||||
./release/package/run.sh package centos-9
|
||||
- name: "Upload package"
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: centos-9
|
||||
path: build/output/centos-9/memgraph*.rpm
|
||||
|
||||
debian-10:
|
||||
runs-on: [self-hosted, DockerMgBuild, X64]
|
||||
timeout-minutes: 60
|
||||
steps:
|
||||
- name: "Set up repository"
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
fetch-depth: 0 # Required because of release/get_version.py
|
||||
- name: "Build package"
|
||||
run: |
|
||||
./release/package/run.sh package debian-10
|
||||
- name: "Upload package"
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: debian-10
|
||||
path: build/output/debian-10/memgraph*.deb
|
||||
|
||||
debian-11:
|
||||
runs-on: [self-hosted, DockerMgBuild, X64]
|
||||
timeout-minutes: 60
|
||||
steps:
|
||||
- name: "Set up repository"
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
fetch-depth: 0 # Required because of release/get_version.py
|
||||
- name: "Build package"
|
||||
run: |
|
||||
./release/package/run.sh package debian-11
|
||||
- name: "Upload package"
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: debian-11
|
||||
path: build/output/debian-11/memgraph*.deb
|
||||
|
||||
docker:
|
||||
runs-on: [self-hosted, DockerMgBuild, X64]
|
||||
timeout-minutes: 60
|
||||
steps:
|
||||
- name: "Set up repository"
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
fetch-depth: 0 # Required because of release/get_version.py
|
||||
- name: "Build package"
|
||||
run: |
|
||||
cd release/package
|
||||
./run.sh package debian-11 --for-docker
|
||||
./run.sh docker
|
||||
- name: "Upload package"
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: docker
|
||||
path: build/output/docker/memgraph*.tar.gz
|
||||
|
||||
ubuntu-1804:
|
||||
runs-on: [self-hosted, DockerMgBuild, X64]
|
||||
timeout-minutes: 60
|
||||
steps:
|
||||
- name: "Set up repository"
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
fetch-depth: 0 # Required because of release/get_version.py
|
||||
- name: "Build package"
|
||||
run: |
|
||||
./release/package/run.sh package ubuntu-18.04
|
||||
- name: "Upload package"
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: ubuntu-1804
|
||||
path: build/output/ubuntu-18.04/memgraph*.deb
|
||||
|
||||
ubuntu-2004:
|
||||
runs-on: [self-hosted, DockerMgBuild, X64]
|
||||
timeout-minutes: 60
|
||||
steps:
|
||||
- name: "Set up repository"
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
fetch-depth: 0 # Required because of release/get_version.py
|
||||
- name: "Build package"
|
||||
run: |
|
||||
./release/package/run.sh package ubuntu-20.04
|
||||
- name: "Upload package"
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: ubuntu-2004
|
||||
path: build/output/ubuntu-20.04/memgraph*.deb
|
||||
|
||||
ubuntu-2204:
|
||||
runs-on: [self-hosted, DockerMgBuild, X64]
|
||||
timeout-minutes: 60
|
||||
steps:
|
||||
- name: "Set up repository"
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
fetch-depth: 0 # Required because of release/get_version.py
|
||||
- name: "Build package"
|
||||
run: |
|
||||
./release/package/run.sh package ubuntu-22.04
|
||||
- name: "Upload package"
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: ubuntu-2204
|
||||
path: build/output/ubuntu-22.04/memgraph*.deb
|
||||
|
||||
debian-11-platform:
|
||||
runs-on: [self-hosted, DockerMgBuild, X64]
|
||||
timeout-minutes: 60
|
||||
steps:
|
||||
- name: "Set up repository"
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
fetch-depth: 0 # Required because of release/get_version.py
|
||||
- name: "Build package"
|
||||
run: |
|
||||
./release/package/run.sh package debian-11 --for-platform
|
||||
- name: "Upload package"
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: debian-11-platform
|
||||
path: build/output/debian-11/memgraph*.deb
|
||||
|
||||
debian-11-arm:
|
||||
runs-on: [self-hosted, DockerMgBuild, ARM64, strange]
|
||||
timeout-minutes: 60
|
||||
steps:
|
||||
- name: "Set up repository"
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
fetch-depth: 0 # Required because of release/get_version.py
|
||||
- name: "Build package"
|
||||
run: |
|
||||
./release/package/run.sh package debian-11-arm
|
||||
- name: "Upload package"
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: debian-11-arm
|
||||
path: build/output/debian-11-arm/memgraph*.deb
|
295
.github/workflows/package_memgraph.yaml
vendored
295
.github/workflows/package_memgraph.yaml
vendored
@ -1,295 +0,0 @@
|
||||
name: Package memgraph
|
||||
|
||||
# TODO(gitbuda): Cleanup docker container if GHA job was canceled.
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
memgraph_version:
|
||||
description: "Memgraph version to upload as. Leave this field empty if you don't want to upload binaries to S3. Format: 'X.Y.Z'"
|
||||
required: false
|
||||
build_type:
|
||||
type: choice
|
||||
description: "Memgraph Build type. Default value is Release"
|
||||
default: 'Release'
|
||||
options:
|
||||
- Release
|
||||
- RelWithDebInfo
|
||||
target_os:
|
||||
type: choice
|
||||
description: "Target OS for which memgraph will be packaged. Select 'all' if you want to package for every listed OS. Default is Ubuntu 22.04"
|
||||
default: 'ubuntu-22_04'
|
||||
options:
|
||||
- all
|
||||
- amzn-2
|
||||
- centos-7
|
||||
- centos-9
|
||||
- debian-10
|
||||
- debian-11
|
||||
- debian-11-arm
|
||||
- debian-11-platform
|
||||
- docker
|
||||
- fedora-36
|
||||
- ubuntu-18_04
|
||||
- ubuntu-20_04
|
||||
- ubuntu-22_04
|
||||
- ubuntu-22_04-arm
|
||||
|
||||
jobs:
|
||||
amzn-2:
|
||||
if: ${{ github.event.inputs.target_os == 'amzn-2' || github.event.inputs.target_os == 'all' }}
|
||||
runs-on: [self-hosted, DockerMgBuild, X64]
|
||||
timeout-minutes: 60
|
||||
steps:
|
||||
- name: "Set up repository"
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0 # Required because of release/get_version.py
|
||||
- name: "Build package"
|
||||
run: |
|
||||
./release/package/run.sh package amzn-2 ${{ github.event.inputs.build_type }}
|
||||
- name: "Upload package"
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: amzn-2
|
||||
path: build/output/amzn-2/memgraph*.rpm
|
||||
|
||||
centos-7:
|
||||
if: ${{ github.event.inputs.target_os == 'centos-7' || github.event.inputs.target_os == 'all' }}
|
||||
runs-on: [self-hosted, DockerMgBuild, X64]
|
||||
timeout-minutes: 60
|
||||
steps:
|
||||
- name: "Set up repository"
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0 # Required because of release/get_version.py
|
||||
- name: "Build package"
|
||||
run: |
|
||||
./release/package/run.sh package centos-7 ${{ github.event.inputs.build_type }}
|
||||
- name: "Upload package"
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: centos-7
|
||||
path: build/output/centos-7/memgraph*.rpm
|
||||
|
||||
centos-9:
|
||||
if: ${{ github.event.inputs.target_os == 'centos-9' || github.event.inputs.target_os == 'all' }}
|
||||
runs-on: [self-hosted, DockerMgBuild, X64]
|
||||
timeout-minutes: 60
|
||||
steps:
|
||||
- name: "Set up repository"
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0 # Required because of release/get_version.py
|
||||
- name: "Build package"
|
||||
run: |
|
||||
./release/package/run.sh package centos-9 ${{ github.event.inputs.build_type }}
|
||||
- name: "Upload package"
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: centos-9
|
||||
path: build/output/centos-9/memgraph*.rpm
|
||||
|
||||
debian-10:
|
||||
if: ${{ github.event.inputs.target_os == 'debian-10' || github.event.inputs.target_os == 'all' }}
|
||||
runs-on: [self-hosted, DockerMgBuild, X64]
|
||||
timeout-minutes: 60
|
||||
steps:
|
||||
- name: "Set up repository"
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0 # Required because of release/get_version.py
|
||||
- name: "Build package"
|
||||
run: |
|
||||
./release/package/run.sh package debian-10 ${{ github.event.inputs.build_type }}
|
||||
- name: "Upload package"
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: debian-10
|
||||
path: build/output/debian-10/memgraph*.deb
|
||||
|
||||
debian-11:
|
||||
if: ${{ github.event.inputs.target_os == 'debian-11' || github.event.inputs.target_os == 'all' }}
|
||||
runs-on: [self-hosted, DockerMgBuild, X64]
|
||||
timeout-minutes: 60
|
||||
steps:
|
||||
- name: "Set up repository"
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0 # Required because of release/get_version.py
|
||||
- name: "Build package"
|
||||
run: |
|
||||
./release/package/run.sh package debian-11 ${{ github.event.inputs.build_type }}
|
||||
- name: "Upload package"
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: debian-11
|
||||
path: build/output/debian-11/memgraph*.deb
|
||||
|
||||
debian-11-arm:
|
||||
if: ${{ github.event.inputs.target_os == 'debian-11-arm' || github.event.inputs.target_os == 'all' }}
|
||||
runs-on: [self-hosted, DockerMgBuild, ARM64, strange]
|
||||
timeout-minutes: 120
|
||||
steps:
|
||||
- name: "Set up repository"
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0 # Required because of release/get_version.py
|
||||
- name: "Build package"
|
||||
run: |
|
||||
./release/package/run.sh package debian-11-arm ${{ github.event.inputs.build_type }}
|
||||
- name: "Upload package"
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: debian-11-aarch64
|
||||
path: build/output/debian-11-arm/memgraph*.deb
|
||||
|
||||
debian-11-platform:
|
||||
if: ${{ github.event.inputs.target_os == 'debian-11-platform' || github.event.inputs.target_os == 'all' }}
|
||||
runs-on: [self-hosted, DockerMgBuild, X64]
|
||||
timeout-minutes: 60
|
||||
steps:
|
||||
- name: "Set up repository"
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0 # Required because of release/get_version.py
|
||||
- name: "Build package"
|
||||
run: |
|
||||
./release/package/run.sh package debian-11 ${{ github.event.inputs.build_type }} --for-platform
|
||||
- name: "Upload package"
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: debian-11-platform
|
||||
path: build/output/debian-11/memgraph*.deb
|
||||
|
||||
docker:
|
||||
if: ${{ github.event.inputs.target_os == 'docker' || github.event.inputs.target_os == 'all' }}
|
||||
runs-on: [self-hosted, DockerMgBuild, X64]
|
||||
timeout-minutes: 60
|
||||
steps:
|
||||
- name: "Set up repository"
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0 # Required because of release/get_version.py
|
||||
- name: "Build package"
|
||||
run: |
|
||||
cd release/package
|
||||
./run.sh package debian-11 ${{ github.event.inputs.build_type }} --for-docker
|
||||
./run.sh docker
|
||||
- name: "Upload package"
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: docker
|
||||
path: build/output/docker/memgraph*.tar.gz
|
||||
|
||||
fedora-36:
|
||||
if: ${{ github.event.inputs.target_os == 'fedora-36' || github.event.inputs.target_os == 'all' }}
|
||||
runs-on: [self-hosted, DockerMgBuild, X64]
|
||||
timeout-minutes: 60
|
||||
steps:
|
||||
- name: "Set up repository"
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0 # Required because of release/get_version.py
|
||||
- name: "Build package"
|
||||
run: |
|
||||
./release/package/run.sh package fedora-36 ${{ github.event.inputs.build_type }}
|
||||
- name: "Upload package"
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: fedora-36
|
||||
path: build/output/fedora-36/memgraph*.rpm
|
||||
|
||||
ubuntu-18_04:
|
||||
if: ${{ github.event.inputs.target_os == 'ubuntu-18_04' || github.event.inputs.target_os == 'all' }}
|
||||
runs-on: [self-hosted, DockerMgBuild, X64]
|
||||
timeout-minutes: 60
|
||||
steps:
|
||||
- name: "Set up repository"
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0 # Required because of release/get_version.py
|
||||
- name: "Build package"
|
||||
run: |
|
||||
./release/package/run.sh package ubuntu-18.04 ${{ github.event.inputs.build_type }}
|
||||
- name: "Upload package"
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: ubuntu-18.04
|
||||
path: build/output/ubuntu-18.04/memgraph*.deb
|
||||
|
||||
ubuntu-20_04:
|
||||
if: ${{ github.event.inputs.target_os == 'ubuntu-20_04' || github.event.inputs.target_os == 'all' }}
|
||||
runs-on: [self-hosted, DockerMgBuild, X64]
|
||||
timeout-minutes: 60
|
||||
steps:
|
||||
- name: "Set up repository"
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0 # Required because of release/get_version.py
|
||||
- name: "Build package"
|
||||
run: |
|
||||
./release/package/run.sh package ubuntu-20.04 ${{ github.event.inputs.build_type }}
|
||||
- name: "Upload package"
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: ubuntu-20.04
|
||||
path: build/output/ubuntu-20.04/memgraph*.deb
|
||||
|
||||
ubuntu-22_04:
|
||||
if: ${{ github.event.inputs.target_os == 'ubuntu-22_04' || github.event.inputs.target_os == 'all' }}
|
||||
runs-on: [self-hosted, DockerMgBuild, X64]
|
||||
timeout-minutes: 60
|
||||
steps:
|
||||
- name: "Set up repository"
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0 # Required because of release/get_version.py
|
||||
- name: "Build package"
|
||||
run: |
|
||||
./release/package/run.sh package ubuntu-22.04 ${{ github.event.inputs.build_type }}
|
||||
- name: "Upload package"
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: ubuntu-22.04
|
||||
path: build/output/ubuntu-22.04/memgraph*.deb
|
||||
|
||||
ubuntu-22_04-arm:
|
||||
if: ${{ github.event.inputs.target_os == 'ubuntu-22_04-arm' || github.event.inputs.target_os == 'all' }}
|
||||
runs-on: [self-hosted, DockerMgBuild, ARM64, strange]
|
||||
timeout-minutes: 120
|
||||
steps:
|
||||
- name: "Set up repository"
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0 # Required because of release/get_version.py
|
||||
- name: "Build package"
|
||||
run: |
|
||||
./release/package/run.sh package ubuntu-22.04-arm ${{ github.event.inputs.build_type }}
|
||||
- name: "Upload package"
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: ubuntu-22.04-aarch64
|
||||
path: build/output/ubuntu-22.04-arm/memgraph*.deb
|
||||
|
||||
upload-to-s3:
|
||||
# only run upload if we specified version. Allows for runs without upload
|
||||
if: "${{ github.event.inputs.memgraph_version != '' }}"
|
||||
needs: [amzn-2, centos-7, centos-9, debian-10, debian-11, debian-11-arm, debian-11-platform, docker, fedora-36, ubuntu-18_04, ubuntu-20_04, ubuntu-22_04, ubuntu-22_04-arm]
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Download artifacts
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
# name: # if name input parameter is not provided, all artifacts are downloaded
|
||||
# and put in directories named after each one.
|
||||
path: build/output/release
|
||||
- name: Upload to S3
|
||||
uses: jakejarvis/s3-sync-action@v0.5.1
|
||||
env:
|
||||
AWS_S3_BUCKET: "download.memgraph.com"
|
||||
AWS_ACCESS_KEY_ID: ${{ secrets.S3_AWS_ACCESS_KEY_ID }}
|
||||
AWS_SECRET_ACCESS_KEY: ${{ secrets.S3_AWS_SECRET_ACCESS_KEY }}
|
||||
AWS_REGION: "eu-west-1"
|
||||
SOURCE_DIR: "build/output/release"
|
||||
DEST_DIR: "memgraph/v${{ github.event.inputs.memgraph_version }}/"
|
85
.github/workflows/performance_benchmarks.yaml
vendored
85
.github/workflows/performance_benchmarks.yaml
vendored
@ -1,85 +0,0 @@
|
||||
name: Run performance benchmarks manually
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
performance_benchmarks:
|
||||
name: "Performance benchmarks"
|
||||
runs-on: [self-hosted, Linux, X64, Diff, Gen7]
|
||||
env:
|
||||
THREADS: 24
|
||||
MEMGRAPH_ENTERPRISE_LICENSE: ${{ secrets.MEMGRAPH_ENTERPRISE_LICENSE }}
|
||||
MEMGRAPH_ORGANIZATION_NAME: ${{ secrets.MEMGRAPH_ORGANIZATION_NAME }}
|
||||
|
||||
steps:
|
||||
- name: Set up repository
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
# Number of commits to fetch. `0` indicates all history for all
|
||||
# branches and tags. (default: 1)
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Build release binaries
|
||||
run: |
|
||||
# Activate toolchain.
|
||||
source /opt/toolchain-v4/activate
|
||||
|
||||
# Initialize dependencies.
|
||||
./init
|
||||
|
||||
# Build only memgraph release binaries.
|
||||
cd build
|
||||
cmake -DCMAKE_BUILD_TYPE=release ..
|
||||
make -j$THREADS
|
||||
|
||||
- name: Get branch name (merge)
|
||||
if: github.event_name != 'pull_request'
|
||||
shell: bash
|
||||
run: echo "BRANCH_NAME=$(echo ${GITHUB_REF#refs/heads/} | tr / -)" >> $GITHUB_ENV
|
||||
|
||||
- name: Get branch name (pull request)
|
||||
if: github.event_name == 'pull_request'
|
||||
shell: bash
|
||||
run: echo "BRANCH_NAME=$(echo ${GITHUB_HEAD_REF} | tr / -)" >> $GITHUB_ENV
|
||||
|
||||
- name: Run benchmarks
|
||||
run: |
|
||||
cd tests/mgbench
|
||||
./benchmark.py vendor-native --num-workers-for-benchmark 12 --export-results benchmark_result.json pokec/medium/*/*
|
||||
|
||||
./benchmark.py vendor-native --num-workers-for-benchmark 1 --export-results benchmark_supernode.json supernode
|
||||
|
||||
./benchmark.py vendor-native --num-workers-for-benchmark 1 --export-results benchmark_high_write_set_property.json high_write_set_property
|
||||
|
||||
./benchmark.py vendor-native --num-workers-for-benchmark 12 --export-results benchmark_cartesian.json cartesian
|
||||
|
||||
- name: Upload benchmark results
|
||||
run: |
|
||||
cd tools/bench-graph-client
|
||||
virtualenv -p python3 ve3
|
||||
source ve3/bin/activate
|
||||
pip install -r requirements.txt
|
||||
./main.py --benchmark-name "mgbench" \
|
||||
--benchmark-results "../../tests/mgbench/benchmark_result.json" \
|
||||
--github-run-id "${{ github.run_id }}" \
|
||||
--github-run-number "${{ github.run_number }}" \
|
||||
--head-branch-name "${{ env.BRANCH_NAME }}"
|
||||
|
||||
./main.py --benchmark-name "supernode" \
|
||||
--benchmark-results "../../tests/mgbench/benchmark_supernode.json" \
|
||||
--github-run-id "${{ github.run_id }}" \
|
||||
--github-run-number "${{ github.run_number }}" \
|
||||
--head-branch-name "${{ env.BRANCH_NAME }}"
|
||||
|
||||
./main.py --benchmark-name "high_write_set_property" \
|
||||
--benchmark-results "../../tests/mgbench/benchmark_high_write_set_property.json" \
|
||||
--github-run-id "${{ github.run_id }}" \
|
||||
--github-run-number "${{ github.run_number }}" \
|
||||
--head-branch-name "${{ env.BRANCH_NAME }}"
|
||||
|
||||
./main.py --benchmark-name "cartesian" \
|
||||
--benchmark-results "../../tests/mgbench/cartesian.json" \
|
||||
--github-run-id "${{ github.run_id }}" \
|
||||
--github-run-number "${{ github.run_number }}" \
|
||||
--head-branch-name "${{ env.BRANCH_NAME }}"
|
208
.github/workflows/release_build_test.yaml
vendored
208
.github/workflows/release_build_test.yaml
vendored
@ -1,208 +0,0 @@
|
||||
name: Release build test
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref_name }}
|
||||
cancel-in-progress: true
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
build_type:
|
||||
type: choice
|
||||
description: "Memgraph Build type. Default value is Release."
|
||||
default: 'Release'
|
||||
options:
|
||||
- Release
|
||||
- RelWithDebInfo
|
||||
|
||||
push:
|
||||
branches:
|
||||
- "release/**"
|
||||
tags:
|
||||
- "v*.*.*-rc*"
|
||||
- "v*.*-rc*"
|
||||
schedule:
|
||||
# UTC
|
||||
- cron: "0 22 * * *"
|
||||
|
||||
env:
|
||||
THREADS: 24
|
||||
MEMGRAPH_ENTERPRISE_LICENSE: ${{ secrets.MEMGRAPH_ENTERPRISE_LICENSE }}
|
||||
MEMGRAPH_ORGANIZATION_NAME: ${{ secrets.MEMGRAPH_ORGANIZATION_NAME }}
|
||||
BUILD_TYPE: ${{ github.event.inputs.build_type || 'Release' }}
|
||||
|
||||
jobs:
|
||||
Debian10:
|
||||
uses: ./.github/workflows/release_debian10.yaml
|
||||
with:
|
||||
build_type: ${{ github.event.inputs.build_type || 'Release' }}
|
||||
secrets: inherit
|
||||
|
||||
Ubuntu20_04:
|
||||
uses: ./.github/workflows/release_ubuntu2004.yaml
|
||||
with:
|
||||
build_type: ${{ github.event.inputs.build_type || 'Release' }}
|
||||
secrets: inherit
|
||||
|
||||
PackageDebian10:
|
||||
if: github.ref_type == 'tag'
|
||||
needs: [Debian10]
|
||||
runs-on: [self-hosted, DockerMgBuild, X64]
|
||||
timeout-minutes: 60
|
||||
steps:
|
||||
- name: "Set up repository"
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0 # Required because of release/get_version.py
|
||||
- name: "Build package"
|
||||
run: |
|
||||
./release/package/run.sh package debian-10 $BUILD_TYPE
|
||||
- name: Upload to S3
|
||||
uses: jakejarvis/s3-sync-action@v0.5.1
|
||||
env:
|
||||
AWS_S3_BUCKET: "deps.memgraph.io"
|
||||
AWS_ACCESS_KEY_ID: ${{ secrets.S3_AWS_ACCESS_KEY_ID }}
|
||||
AWS_SECRET_ACCESS_KEY: ${{ secrets.S3_AWS_SECRET_ACCESS_KEY }}
|
||||
AWS_REGION: "eu-west-1"
|
||||
SOURCE_DIR: "build/output"
|
||||
DEST_DIR: "memgraph-unofficial/${{ github.ref_name }}/"
|
||||
- name: "Upload package"
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: debian-10
|
||||
path: build/output/debian-10/memgraph*.deb
|
||||
|
||||
PackageUbuntu20_04:
|
||||
if: github.ref_type == 'tag'
|
||||
needs: [Ubuntu20_04]
|
||||
runs-on: [self-hosted, DockerMgBuild, X64]
|
||||
timeout-minutes: 60
|
||||
steps:
|
||||
- name: "Set up repository"
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0 # Required because of release/get_version.py
|
||||
- name: "Build package"
|
||||
run: |
|
||||
./release/package/run.sh package ubuntu-22.04 $BUILD_TYPE
|
||||
- name: Upload to S3
|
||||
uses: jakejarvis/s3-sync-action@v0.5.1
|
||||
env:
|
||||
AWS_S3_BUCKET: "deps.memgraph.io"
|
||||
AWS_ACCESS_KEY_ID: ${{ secrets.S3_AWS_ACCESS_KEY_ID }}
|
||||
AWS_SECRET_ACCESS_KEY: ${{ secrets.S3_AWS_SECRET_ACCESS_KEY }}
|
||||
AWS_REGION: "eu-west-1"
|
||||
SOURCE_DIR: "build/output"
|
||||
DEST_DIR: "memgraph-unofficial/${{ github.ref_name }}/"
|
||||
- name: "Upload package"
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: ubuntu-22.04
|
||||
path: build/output/ubuntu-22.04/memgraph*.deb
|
||||
|
||||
PackageUbuntu20_04_ARM:
|
||||
if: github.ref_type == 'tag'
|
||||
needs: [Ubuntu20_04]
|
||||
runs-on: [self-hosted, DockerMgBuild, ARM64]
|
||||
# M1 Mac mini is sometimes slower
|
||||
timeout-minutes: 150
|
||||
steps:
|
||||
- name: "Set up repository"
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0 # Required because of release/get_version.py
|
||||
- name: "Build package"
|
||||
run: |
|
||||
./release/package/run.sh package ubuntu-22.04-arm $BUILD_TYPE
|
||||
- name: "Upload package"
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: ubuntu-22.04-aarch64
|
||||
path: build/output/ubuntu-22.04-arm/memgraph*.deb
|
||||
|
||||
PushToS3Ubuntu20_04_ARM:
|
||||
if: github.ref_type == 'tag'
|
||||
needs: [PackageUbuntu20_04_ARM]
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Download package
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: ubuntu-22.04-aarch64
|
||||
path: build/output/release
|
||||
- name: Upload to S3
|
||||
uses: jakejarvis/s3-sync-action@v0.5.1
|
||||
env:
|
||||
AWS_S3_BUCKET: "deps.memgraph.io"
|
||||
AWS_ACCESS_KEY_ID: ${{ secrets.S3_AWS_ACCESS_KEY_ID }}
|
||||
AWS_SECRET_ACCESS_KEY: ${{ secrets.S3_AWS_SECRET_ACCESS_KEY }}
|
||||
AWS_REGION: "eu-west-1"
|
||||
SOURCE_DIR: "build/output/release"
|
||||
DEST_DIR: "memgraph-unofficial/${{ github.ref_name }}/"
|
||||
|
||||
PackageDebian11:
|
||||
if: github.ref_type == 'tag'
|
||||
needs: [Debian10, Ubuntu20_04]
|
||||
runs-on: [self-hosted, DockerMgBuild, X64]
|
||||
timeout-minutes: 60
|
||||
steps:
|
||||
- name: "Set up repository"
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0 # Required because of release/get_version.py
|
||||
- name: "Build package"
|
||||
run: |
|
||||
./release/package/run.sh package debian-11 $BUILD_TYPE
|
||||
- name: Upload to S3
|
||||
uses: jakejarvis/s3-sync-action@v0.5.1
|
||||
env:
|
||||
AWS_S3_BUCKET: "deps.memgraph.io"
|
||||
AWS_ACCESS_KEY_ID: ${{ secrets.S3_AWS_ACCESS_KEY_ID }}
|
||||
AWS_SECRET_ACCESS_KEY: ${{ secrets.S3_AWS_SECRET_ACCESS_KEY }}
|
||||
AWS_REGION: "eu-west-1"
|
||||
SOURCE_DIR: "build/output"
|
||||
DEST_DIR: "memgraph-unofficial/${{ github.ref_name }}/"
|
||||
- name: "Upload package"
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: debian-11
|
||||
path: build/output/debian-11/memgraph*.deb
|
||||
|
||||
PackageDebian11_ARM:
|
||||
if: github.ref_type == 'tag'
|
||||
needs: [Debian10, Ubuntu20_04]
|
||||
runs-on: [self-hosted, DockerMgBuild, ARM64]
|
||||
# M1 Mac mini is sometimes slower
|
||||
timeout-minutes: 150
|
||||
steps:
|
||||
- name: "Set up repository"
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0 # Required because of release/get_version.py
|
||||
- name: "Build package"
|
||||
run: |
|
||||
./release/package/run.sh package debian-11-arm $BUILD_TYPE
|
||||
- name: "Upload package"
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: debian-11-aarch64
|
||||
path: build/output/debian-11-arm/memgraph*.deb
|
||||
|
||||
PushToS3Debian11_ARM:
|
||||
if: github.ref_type == 'tag'
|
||||
needs: [PackageDebian11_ARM]
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Download package
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: debian-11-aarch64
|
||||
path: build/output/release
|
||||
- name: Upload to S3
|
||||
uses: jakejarvis/s3-sync-action@v0.5.1
|
||||
env:
|
||||
AWS_S3_BUCKET: "deps.memgraph.io"
|
||||
AWS_ACCESS_KEY_ID: ${{ secrets.S3_AWS_ACCESS_KEY_ID }}
|
||||
AWS_SECRET_ACCESS_KEY: ${{ secrets.S3_AWS_SECRET_ACCESS_KEY }}
|
||||
AWS_REGION: "eu-west-1"
|
||||
SOURCE_DIR: "build/output/release"
|
||||
DEST_DIR: "memgraph-unofficial/${{ github.ref_name }}/"
|
315
.github/workflows/release_centos8.yaml
vendored
Normal file
315
.github/workflows/release_centos8.yaml
vendored
Normal file
@ -0,0 +1,315 @@
|
||||
name: Release CentOS 8
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
schedule:
|
||||
- cron: "0 1 * * *"
|
||||
|
||||
jobs:
|
||||
community_build:
|
||||
name: "Community build"
|
||||
runs-on: [self-hosted, Linux, X64, CentOS8]
|
||||
env:
|
||||
THREADS: 24
|
||||
MEMGRAPH_ENTERPRISE_LICENSE: ${{ secrets.MEMGRAPH_ENTERPRISE_LICENSE }}
|
||||
MEMGRAPH_ORGANIZATION_NAME: ${{ secrets.MEMGRAPH_ORGANIZATION_NAME }}
|
||||
timeout-minutes: 960
|
||||
|
||||
steps:
|
||||
- name: Set up repository
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
# Number of commits to fetch. `0` indicates all history for all
|
||||
# branches and tags. (default: 1)
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Build community binaries
|
||||
run: |
|
||||
# Activate toolchain.
|
||||
source /opt/toolchain-v4/activate
|
||||
|
||||
# Initialize dependencies.
|
||||
./init
|
||||
|
||||
# Build community binaries.
|
||||
cd build
|
||||
cmake -DCMAKE_BUILD_TYPE=release -DMG_ENTERPRISE=OFF ..
|
||||
make -j$THREADS
|
||||
|
||||
- name: Run unit tests
|
||||
run: |
|
||||
# Activate toolchain.
|
||||
source /opt/toolchain-v4/activate
|
||||
|
||||
# Run unit tests.
|
||||
cd build
|
||||
ctest -R memgraph__unit --output-on-failure
|
||||
|
||||
coverage_build:
|
||||
name: "Coverage build"
|
||||
runs-on: [self-hosted, Linux, X64, CentOS8]
|
||||
env:
|
||||
THREADS: 24
|
||||
MEMGRAPH_ENTERPRISE_LICENSE: ${{ secrets.MEMGRAPH_ENTERPRISE_LICENSE }}
|
||||
MEMGRAPH_ORGANIZATION_NAME: ${{ secrets.MEMGRAPH_ORGANIZATION_NAME }}
|
||||
|
||||
steps:
|
||||
- name: Set up repository
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
# Number of commits to fetch. `0` indicates all history for all
|
||||
# branches and tags. (default: 1)
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Build coverage binaries
|
||||
run: |
|
||||
# Activate toolchain.
|
||||
source /opt/toolchain-v4/activate
|
||||
|
||||
# Initialize dependencies.
|
||||
./init
|
||||
|
||||
# Build coverage binaries.
|
||||
cd build
|
||||
cmake -DTEST_COVERAGE=ON ..
|
||||
make -j$THREADS memgraph__unit
|
||||
|
||||
- name: Run unit tests
|
||||
run: |
|
||||
# Activate toolchain.
|
||||
source /opt/toolchain-v4/activate
|
||||
|
||||
# Run unit tests.
|
||||
cd build
|
||||
ctest -R memgraph__unit --output-on-failure
|
||||
|
||||
- name: Compute code coverage
|
||||
run: |
|
||||
# Activate toolchain.
|
||||
source /opt/toolchain-v4/activate
|
||||
|
||||
# Compute code coverage.
|
||||
cd tools/github
|
||||
./coverage_convert
|
||||
|
||||
# Package code coverage.
|
||||
cd generated
|
||||
tar -czf code_coverage.tar.gz coverage.json html report.json summary.rmu
|
||||
|
||||
- name: Save code coverage
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: "Code coverage"
|
||||
path: tools/github/generated/code_coverage.tar.gz
|
||||
|
||||
debug_build:
|
||||
name: "Debug build"
|
||||
runs-on: [self-hosted, Linux, X64, CentOS8]
|
||||
env:
|
||||
THREADS: 24
|
||||
MEMGRAPH_ENTERPRISE_LICENSE: ${{ secrets.MEMGRAPH_ENTERPRISE_LICENSE }}
|
||||
MEMGRAPH_ORGANIZATION_NAME: ${{ secrets.MEMGRAPH_ORGANIZATION_NAME }}
|
||||
|
||||
steps:
|
||||
- name: Set up repository
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
# Number of commits to fetch. `0` indicates all history for all
|
||||
# branches and tags. (default: 1)
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Build debug binaries
|
||||
run: |
|
||||
# Activate toolchain.
|
||||
source /opt/toolchain-v4/activate
|
||||
|
||||
# Initialize dependencies.
|
||||
./init
|
||||
|
||||
# Build debug binaries.
|
||||
cd build
|
||||
cmake ..
|
||||
make -j$THREADS
|
||||
|
||||
- name: Run leftover CTest tests
|
||||
run: |
|
||||
# Activate toolchain.
|
||||
source /opt/toolchain-v4/activate
|
||||
|
||||
# Run leftover CTest tests (all except unit and benchmark tests).
|
||||
cd build
|
||||
ctest -E "(memgraph__unit|memgraph__benchmark)" --output-on-failure
|
||||
|
||||
- name: Run drivers tests
|
||||
run: |
|
||||
./tests/drivers/run.sh
|
||||
|
||||
- name: Run integration tests
|
||||
run: |
|
||||
cd tests/integration
|
||||
for name in *; do
|
||||
if [ ! -d $name ]; then continue; fi
|
||||
pushd $name >/dev/null
|
||||
echo "Running: $name"
|
||||
if [ -x prepare.sh ]; then
|
||||
./prepare.sh
|
||||
fi
|
||||
if [ -x runner.py ]; then
|
||||
./runner.py
|
||||
elif [ -x runner.sh ]; then
|
||||
./runner.sh
|
||||
fi
|
||||
echo
|
||||
popd >/dev/null
|
||||
done
|
||||
|
||||
- name: Run cppcheck and clang-format
|
||||
run: |
|
||||
# Activate toolchain.
|
||||
source /opt/toolchain-v4/activate
|
||||
|
||||
# Run cppcheck and clang-format.
|
||||
cd tools/github
|
||||
./cppcheck_and_clang_format diff
|
||||
|
||||
- name: Save cppcheck and clang-format errors
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: "Code coverage"
|
||||
path: tools/github/cppcheck_and_clang_format.txt
|
||||
|
||||
release_build:
|
||||
name: "Release build"
|
||||
runs-on: [self-hosted, Linux, X64, CentOS8]
|
||||
env:
|
||||
THREADS: 24
|
||||
MEMGRAPH_ENTERPRISE_LICENSE: ${{ secrets.MEMGRAPH_ENTERPRISE_LICENSE }}
|
||||
MEMGRAPH_ORGANIZATION_NAME: ${{ secrets.MEMGRAPH_ORGANIZATION_NAME }}
|
||||
timeout-minutes: 960
|
||||
|
||||
steps:
|
||||
- name: Set up repository
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
# Number of commits to fetch. `0` indicates all history for all
|
||||
# branches and tags. (default: 1)
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Build release binaries
|
||||
run: |
|
||||
# Activate toolchain.
|
||||
source /opt/toolchain-v4/activate
|
||||
|
||||
# Initialize dependencies.
|
||||
./init
|
||||
|
||||
# Build release binaries.
|
||||
cd build
|
||||
cmake -DCMAKE_BUILD_TYPE=release ..
|
||||
make -j$THREADS
|
||||
|
||||
- name: Create enterprise RPM package
|
||||
run: |
|
||||
# Activate toolchain.
|
||||
source /opt/toolchain-v4/activate
|
||||
|
||||
cd build
|
||||
|
||||
# create mgconsole
|
||||
# we use the -B to force the build
|
||||
make -j$THREADS -B mgconsole
|
||||
|
||||
# Create enterprise RPM package.
|
||||
mkdir output && cd output
|
||||
cpack -G RPM --config ../CPackConfig.cmake
|
||||
rpmlint memgraph*.rpm
|
||||
|
||||
- name: Save enterprise RPM package
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: "Enterprise RPM package"
|
||||
path: build/output/memgraph*.rpm
|
||||
|
||||
- name: Run micro benchmark tests
|
||||
run: |
|
||||
# Activate toolchain.
|
||||
source /opt/toolchain-v4/activate
|
||||
|
||||
# Run micro benchmark tests.
|
||||
cd build
|
||||
# The `eval` benchmark needs a large stack limit.
|
||||
ulimit -s 262144
|
||||
ctest -R memgraph__benchmark -V
|
||||
|
||||
- name: Run macro benchmark tests
|
||||
run: |
|
||||
cd tests/macro_benchmark
|
||||
./harness QuerySuite MemgraphRunner \
|
||||
--groups aggregation 1000_create unwind_create dense_expand match \
|
||||
--no-strict
|
||||
|
||||
- name: Run parallel macro benchmark tests
|
||||
run: |
|
||||
cd tests/macro_benchmark
|
||||
./harness QueryParallelSuite MemgraphRunner \
|
||||
--groups aggregation_parallel create_parallel bfs_parallel \
|
||||
--num-database-workers 9 --num-clients-workers 30 \
|
||||
--no-strict
|
||||
|
||||
- name: Run GQL Behave tests
|
||||
run: |
|
||||
cd tests/gql_behave
|
||||
./continuous_integration
|
||||
|
||||
- name: Save quality assurance status
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: "GQL Behave Status"
|
||||
path: |
|
||||
tests/gql_behave/gql_behave_status.csv
|
||||
tests/gql_behave/gql_behave_status.html
|
||||
|
||||
- name: Run unit tests
|
||||
run: |
|
||||
# Activate toolchain.
|
||||
source /opt/toolchain-v4/activate
|
||||
|
||||
# Run unit tests.
|
||||
cd build
|
||||
ctest -R memgraph__unit --output-on-failure
|
||||
|
||||
- name: Run e2e tests
|
||||
run: |
|
||||
# TODO(gitbuda): Setup mgclient and pymgclient properly.
|
||||
cd tests
|
||||
./setup.sh
|
||||
source ve3/bin/activate
|
||||
cd e2e
|
||||
LD_LIBRARY_PATH=$LD_LIBRARY_PATH:../../libs/mgclient/lib python runner.py --workloads-root-directory .
|
||||
|
||||
- name: Run stress test (plain)
|
||||
run: |
|
||||
cd tests/stress
|
||||
./continuous_integration
|
||||
|
||||
- name: Run stress test (SSL)
|
||||
run: |
|
||||
cd tests/stress
|
||||
./continuous_integration --use-ssl
|
||||
|
||||
- name: Run stress test (large)
|
||||
run: |
|
||||
cd tests/stress
|
||||
./continuous_integration --large-dataset
|
||||
|
||||
- name: Run durability test (plain)
|
||||
run: |
|
||||
cd tests/stress
|
||||
source ve3/bin/activate
|
||||
python3 durability --num-steps 5
|
||||
|
||||
- name: Run durability test (large)
|
||||
run: |
|
||||
cd tests/stress
|
||||
source ve3/bin/activate
|
||||
python3 durability --num-steps 20
|
265
.github/workflows/release_debian10.yaml
vendored
265
.github/workflows/release_debian10.yaml
vendored
@ -1,38 +1,23 @@
|
||||
name: Release Debian 10
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
build_type:
|
||||
type: string
|
||||
description: "Memgraph Build type. Default value is Release."
|
||||
default: 'Release'
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
build_type:
|
||||
type: choice
|
||||
description: "Memgraph Build type. Default value is Release."
|
||||
default: 'Release'
|
||||
options:
|
||||
- Release
|
||||
- RelWithDebInfo
|
||||
|
||||
env:
|
||||
OS: "Debian10"
|
||||
THREADS: 24
|
||||
MEMGRAPH_ENTERPRISE_LICENSE: ${{ secrets.MEMGRAPH_ENTERPRISE_LICENSE }}
|
||||
MEMGRAPH_ORGANIZATION_NAME: ${{ secrets.MEMGRAPH_ORGANIZATION_NAME }}
|
||||
BUILD_TYPE: ${{ github.event.inputs.build_type || 'Release' }}
|
||||
schedule:
|
||||
- cron: "0 1 * * *"
|
||||
|
||||
jobs:
|
||||
community_build:
|
||||
name: "Community build"
|
||||
runs-on: [self-hosted, Linux, X64, Debian10]
|
||||
timeout-minutes: 60
|
||||
env:
|
||||
THREADS: 24
|
||||
MEMGRAPH_ENTERPRISE_LICENSE: ${{ secrets.MEMGRAPH_ENTERPRISE_LICENSE }}
|
||||
MEMGRAPH_ORGANIZATION_NAME: ${{ secrets.MEMGRAPH_ORGANIZATION_NAME }}
|
||||
timeout-minutes: 960
|
||||
|
||||
steps:
|
||||
- name: Set up repository
|
||||
uses: actions/checkout@v4
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
# Number of commits to fetch. `0` indicates all history for all
|
||||
# branches and tags. (default: 1)
|
||||
@ -48,7 +33,7 @@ jobs:
|
||||
|
||||
# Build community binaries.
|
||||
cd build
|
||||
cmake -DCMAKE_BUILD_TYPE=$BUILD_TYPE -DMG_ENTERPRISE=OFF ..
|
||||
cmake -DCMAKE_BUILD_TYPE=release -DMG_ENTERPRISE=OFF ..
|
||||
make -j$THREADS
|
||||
|
||||
- name: Run unit tests
|
||||
@ -67,11 +52,10 @@ jobs:
|
||||
THREADS: 24
|
||||
MEMGRAPH_ENTERPRISE_LICENSE: ${{ secrets.MEMGRAPH_ENTERPRISE_LICENSE }}
|
||||
MEMGRAPH_ORGANIZATION_NAME: ${{ secrets.MEMGRAPH_ORGANIZATION_NAME }}
|
||||
timeout-minutes: 60
|
||||
|
||||
steps:
|
||||
- name: Set up repository
|
||||
uses: actions/checkout@v4
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
# Number of commits to fetch. `0` indicates all history for all
|
||||
# branches and tags. (default: 1)
|
||||
@ -113,19 +97,22 @@ jobs:
|
||||
tar -czf code_coverage.tar.gz coverage.json html report.json summary.rmu
|
||||
|
||||
- name: Save code coverage
|
||||
uses: actions/upload-artifact@v4
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: "Code coverage(Coverage build)-${{ env.OS }}"
|
||||
name: "Code coverage"
|
||||
path: tools/github/generated/code_coverage.tar.gz
|
||||
|
||||
debug_build:
|
||||
name: "Debug build"
|
||||
runs-on: [self-hosted, Linux, X64, Debian10]
|
||||
timeout-minutes: 60
|
||||
env:
|
||||
THREADS: 24
|
||||
MEMGRAPH_ENTERPRISE_LICENSE: ${{ secrets.MEMGRAPH_ENTERPRISE_LICENSE }}
|
||||
MEMGRAPH_ORGANIZATION_NAME: ${{ secrets.MEMGRAPH_ORGANIZATION_NAME }}
|
||||
|
||||
steps:
|
||||
- name: Set up repository
|
||||
uses: actions/checkout@v4
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
# Number of commits to fetch. `0` indicates all history for all
|
||||
# branches and tags. (default: 1)
|
||||
@ -157,6 +144,25 @@ jobs:
|
||||
run: |
|
||||
./tests/drivers/run.sh
|
||||
|
||||
- name: Run integration tests
|
||||
run: |
|
||||
cd tests/integration
|
||||
for name in *; do
|
||||
if [ ! -d $name ]; then continue; fi
|
||||
pushd $name >/dev/null
|
||||
echo "Running: $name"
|
||||
if [ -x prepare.sh ]; then
|
||||
./prepare.sh
|
||||
fi
|
||||
if [ -x runner.py ]; then
|
||||
./runner.py
|
||||
elif [ -x runner.sh ]; then
|
||||
./runner.sh
|
||||
fi
|
||||
echo
|
||||
popd >/dev/null
|
||||
done
|
||||
|
||||
- name: Run cppcheck and clang-format
|
||||
run: |
|
||||
# Activate toolchain.
|
||||
@ -167,49 +173,23 @@ jobs:
|
||||
./cppcheck_and_clang_format diff
|
||||
|
||||
- name: Save cppcheck and clang-format errors
|
||||
uses: actions/upload-artifact@v4
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: "Code coverage(Debug build)-${{ env.OS }}"
|
||||
name: "Code coverage"
|
||||
path: tools/github/cppcheck_and_clang_format.txt
|
||||
|
||||
debug_integration_test:
|
||||
name: "Debug integration tests"
|
||||
runs-on: [self-hosted, Linux, X64, Debian10]
|
||||
timeout-minutes: 60
|
||||
|
||||
steps:
|
||||
- name: Set up repository
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
# Number of commits to fetch. `0` indicates all history for all
|
||||
# branches and tags. (default: 1)
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Build debug binaries
|
||||
run: |
|
||||
# Activate toolchain.
|
||||
source /opt/toolchain-v4/activate
|
||||
|
||||
# Initialize dependencies.
|
||||
./init
|
||||
|
||||
# Build debug binaries.
|
||||
cd build
|
||||
cmake ..
|
||||
make -j$THREADS
|
||||
|
||||
- name: Run integration tests
|
||||
run: |
|
||||
tests/integration/run.sh
|
||||
|
||||
release_build:
|
||||
name: "Release build"
|
||||
runs-on: [self-hosted, Linux, X64, Debian10]
|
||||
timeout-minutes: 60
|
||||
env:
|
||||
THREADS: 24
|
||||
MEMGRAPH_ENTERPRISE_LICENSE: ${{ secrets.MEMGRAPH_ENTERPRISE_LICENSE }}
|
||||
MEMGRAPH_ORGANIZATION_NAME: ${{ secrets.MEMGRAPH_ORGANIZATION_NAME }}
|
||||
timeout-minutes: 960
|
||||
|
||||
steps:
|
||||
- name: Set up repository
|
||||
uses: actions/checkout@v4
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
# Number of commits to fetch. `0` indicates all history for all
|
||||
# branches and tags. (default: 1)
|
||||
@ -225,7 +205,7 @@ jobs:
|
||||
|
||||
# Build release binaries.
|
||||
cd build
|
||||
cmake -DCMAKE_BUILD_TYPE=$BUILD_TYPE ..
|
||||
cmake -DCMAKE_BUILD_TYPE=release ..
|
||||
make -j$THREADS
|
||||
|
||||
- name: Create enterprise DEB package
|
||||
@ -244,60 +224,11 @@ jobs:
|
||||
cpack -G DEB --config ../CPackConfig.cmake
|
||||
|
||||
- name: Save enterprise DEB package
|
||||
uses: actions/upload-artifact@v4
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: "Enterprise DEB package-${{ env.OS}}"
|
||||
name: "Enterprise DEB package"
|
||||
path: build/output/memgraph*.deb
|
||||
|
||||
- name: Run GQL Behave tests
|
||||
run: |
|
||||
cd tests
|
||||
./setup.sh /opt/toolchain-v4/activate
|
||||
cd gql_behave
|
||||
./continuous_integration
|
||||
|
||||
- name: Save quality assurance status
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: "GQL Behave Status-${{ env.OS }}"
|
||||
path: |
|
||||
tests/gql_behave/gql_behave_status.csv
|
||||
tests/gql_behave/gql_behave_status.html
|
||||
|
||||
- name: Run unit tests
|
||||
run: |
|
||||
# Activate toolchain.
|
||||
source /opt/toolchain-v4/activate
|
||||
|
||||
# Run unit tests.
|
||||
cd build
|
||||
ctest -R memgraph__unit --output-on-failure
|
||||
|
||||
release_benchmark_tests:
|
||||
name: "Release Benchmark Tests"
|
||||
runs-on: [self-hosted, Linux, X64, Debian10]
|
||||
timeout-minutes: 60
|
||||
|
||||
steps:
|
||||
- name: Set up repository
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
# Number of commits to fetch. `0` indicates all history for all
|
||||
# branches and tags. (default: 1)
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Build release binaries
|
||||
run: |
|
||||
# Activate toolchain.
|
||||
source /opt/toolchain-v4/activate
|
||||
# Initialize dependencies.
|
||||
./init
|
||||
|
||||
# Build release binaries
|
||||
cd build
|
||||
cmake -DCMAKE_BUILD_TYPE=$BUILD_TYPE ..
|
||||
make -j$THREADS
|
||||
|
||||
- name: Run micro benchmark tests
|
||||
run: |
|
||||
# Activate toolchain.
|
||||
@ -324,79 +255,36 @@ jobs:
|
||||
--num-database-workers 9 --num-clients-workers 30 \
|
||||
--no-strict
|
||||
|
||||
release_e2e_test:
|
||||
name: "Release End-to-end Test"
|
||||
runs-on: [self-hosted, Linux, X64, Debian10]
|
||||
timeout-minutes: 60
|
||||
- name: Run GQL Behave tests
|
||||
run: |
|
||||
cd tests/gql_behave
|
||||
./continuous_integration
|
||||
|
||||
steps:
|
||||
- name: Set up repository
|
||||
uses: actions/checkout@v4
|
||||
- name: Save quality assurance status
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
# Number of commits to fetch. `0` indicates all history for all
|
||||
# branches and tags. (default: 1)
|
||||
fetch-depth: 0
|
||||
name: "GQL Behave Status"
|
||||
path: |
|
||||
tests/gql_behave/gql_behave_status.csv
|
||||
tests/gql_behave/gql_behave_status.html
|
||||
|
||||
- name: Build release binaries
|
||||
- name: Run unit tests
|
||||
run: |
|
||||
# Activate toolchain.
|
||||
source /opt/toolchain-v4/activate
|
||||
# Initialize dependencies.
|
||||
./init
|
||||
|
||||
# Build release binaries
|
||||
# Run unit tests.
|
||||
cd build
|
||||
cmake -DCMAKE_BUILD_TYPE=$BUILD_TYPE ..
|
||||
make -j$THREADS
|
||||
|
||||
- name: Ensure Kafka and Pulsar are up
|
||||
run: |
|
||||
cd tests/e2e/streams/kafka
|
||||
docker-compose up -d
|
||||
cd ../pulsar
|
||||
docker-compose up -d
|
||||
ctest -R memgraph__unit --output-on-failure
|
||||
|
||||
- name: Run e2e tests
|
||||
run: |
|
||||
# TODO(gitbuda): Setup mgclient and pymgclient properly.
|
||||
cd tests
|
||||
./setup.sh /opt/toolchain-v4/activate
|
||||
source ve3/bin/activate_e2e
|
||||
./setup.sh
|
||||
source ve3/bin/activate
|
||||
cd e2e
|
||||
./run.sh
|
||||
|
||||
- name: Ensure Kafka and Pulsar are down
|
||||
if: always()
|
||||
run: |
|
||||
cd tests/e2e/streams/kafka
|
||||
docker-compose down
|
||||
cd ../pulsar
|
||||
docker-compose down
|
||||
|
||||
release_durability_stress_tests:
|
||||
name: "Release durability and stress tests"
|
||||
runs-on: [self-hosted, Linux, X64, Debian10]
|
||||
timeout-minutes: 60
|
||||
|
||||
steps:
|
||||
- name: Set up repository
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
# Number of commits to fetch. `0` indicates all history for all
|
||||
# branches and tags. (default: 1)
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Build release binaries
|
||||
run: |
|
||||
# Activate toolchain.
|
||||
source /opt/toolchain-v4/activate
|
||||
|
||||
# Initialize dependencies.
|
||||
./init
|
||||
|
||||
# Build release binaries.
|
||||
cd build
|
||||
cmake -DCMAKE_BUILD_TYPE=$BUILD_TYPE ..
|
||||
make -j$THREADS
|
||||
LD_LIBRARY_PATH=$LD_LIBRARY_PATH:../../libs/mgclient/lib python runner.py --workloads-root-directory .
|
||||
|
||||
- name: Run stress test (plain)
|
||||
run: |
|
||||
@ -408,6 +296,11 @@ jobs:
|
||||
cd tests/stress
|
||||
./continuous_integration --use-ssl
|
||||
|
||||
- name: Run stress test (large)
|
||||
run: |
|
||||
cd tests/stress
|
||||
./continuous_integration --large-dataset
|
||||
|
||||
- name: Run durability test (plain)
|
||||
run: |
|
||||
cd tests/stress
|
||||
@ -423,11 +316,15 @@ jobs:
|
||||
release_jepsen_test:
|
||||
name: "Release Jepsen Test"
|
||||
runs-on: [self-hosted, Linux, X64, Debian10, JepsenControl]
|
||||
env:
|
||||
THREADS: 24
|
||||
MEMGRAPH_ENTERPRISE_LICENSE: ${{ secrets.MEMGRAPH_ENTERPRISE_LICENSE }}
|
||||
MEMGRAPH_ORGANIZATION_NAME: ${{ secrets.MEMGRAPH_ORGANIZATION_NAME }}
|
||||
timeout-minutes: 60
|
||||
|
||||
steps:
|
||||
- name: Set up repository
|
||||
uses: actions/checkout@v4
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
# Number of commits to fetch. `0` indicates all history for all
|
||||
# branches and tags. (default: 1)
|
||||
@ -437,27 +334,23 @@ jobs:
|
||||
run: |
|
||||
# Activate toolchain.
|
||||
source /opt/toolchain-v4/activate
|
||||
|
||||
# Initialize dependencies.
|
||||
./init
|
||||
|
||||
# Build only memgraph release binary.
|
||||
cd build
|
||||
cmake -DCMAKE_BUILD_TYPE=$BUILD_TYPE ..
|
||||
cmake -DCMAKE_BUILD_TYPE=release ..
|
||||
make -j$THREADS memgraph
|
||||
|
||||
- name: Refresh Jepsen Cluster
|
||||
run: |
|
||||
cd tests/jepsen
|
||||
./run.sh cluster-refresh
|
||||
|
||||
- name: Run Jepsen tests
|
||||
run: |
|
||||
cd tests/jepsen
|
||||
./run.sh test-all-individually --binary ../../build/memgraph --ignore-run-stdout-logs --ignore-run-stderr-logs
|
||||
./run.sh test --binary ../../build/memgraph --run-args "test-all --node-configs resources/node-config.edn" --ignore-run-stdout-logs --ignore-run-stderr-logs
|
||||
|
||||
- name: Save Jepsen report
|
||||
uses: actions/upload-artifact@v4
|
||||
uses: actions/upload-artifact@v3
|
||||
if: ${{ always() }}
|
||||
with:
|
||||
name: "Jepsen Report-${{ env.OS }}"
|
||||
name: "Jepsen Report"
|
||||
path: tests/jepsen/Jepsen.tar.gz
|
||||
|
12
.github/workflows/release_docker.yaml
vendored
12
.github/workflows/release_docker.yaml
vendored
@ -19,20 +19,20 @@ jobs:
|
||||
DOCKER_REPOSITORY_NAME: memgraph
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
uses: actions/checkout@v3
|
||||
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v2
|
||||
uses: docker/setup-qemu-action@v1
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
id: buildx
|
||||
uses: docker/setup-buildx-action@v2
|
||||
uses: docker/setup-buildx-action@v1
|
||||
|
||||
- name: Log in to Docker Hub
|
||||
uses: docker/login-action@v2
|
||||
uses: docker/login-action@v1
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
username: ${{ secrets.DOCKER_USERNAME }}
|
||||
password: ${{ secrets.DOCKER_PASSWORD }}
|
||||
|
||||
- name: Download memgraph binary
|
||||
run: |
|
||||
|
63
.github/workflows/release_mgbench_client.yaml
vendored
63
.github/workflows/release_mgbench_client.yaml
vendored
@ -1,63 +0,0 @@
|
||||
name: "Mgbench Bolt Client Publish Docker Image"
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
version:
|
||||
description: "Mgbench bolt client version to publish on Dockerhub."
|
||||
required: true
|
||||
force_release:
|
||||
type: boolean
|
||||
required: false
|
||||
default: false
|
||||
|
||||
jobs:
|
||||
mgbench_docker_publish:
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
DOCKER_ORGANIZATION_NAME: memgraph
|
||||
DOCKER_REPOSITORY_NAME: mgbench-client
|
||||
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v2
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
id: buildx
|
||||
uses: docker/setup-buildx-action@v2
|
||||
|
||||
- name: Log in to Docker Hub
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Check if specified version is already pushed
|
||||
run: |
|
||||
EXISTS=$(docker manifest inspect $DOCKER_ORGANIZATION_NAME/$DOCKER_REPOSITORY_NAME:${{ github.event.inputs.version }} > /dev/null; echo $?)
|
||||
echo $EXISTS
|
||||
if [[ ${EXISTS} -eq 0 ]]; then
|
||||
echo 'The specified version has been already released to DockerHub.'
|
||||
if [[ ${{ github.event.inputs.force_release }} = true ]]; then
|
||||
echo 'Forcing the release!'
|
||||
else
|
||||
echo 'Stopping the release!'
|
||||
exit 1
|
||||
fi
|
||||
else
|
||||
echo 'All good the specified version has not been release to DockerHub.'
|
||||
fi
|
||||
|
||||
- name: Build & push docker images
|
||||
run: |
|
||||
cd tests/mgbench
|
||||
docker buildx build \
|
||||
--build-arg TOOLCHAIN_VERSION=toolchain-v4 \
|
||||
--platform linux/amd64,linux/arm64 \
|
||||
--tag $DOCKER_ORGANIZATION_NAME/$DOCKER_REPOSITORY_NAME:${{ github.event.inputs.version }} \
|
||||
--tag $DOCKER_ORGANIZATION_NAME/$DOCKER_REPOSITORY_NAME:latest \
|
||||
--file Dockerfile.mgbench_client \
|
||||
--push .
|
249
.github/workflows/release_ubuntu2004.yaml
vendored
249
.github/workflows/release_ubuntu2004.yaml
vendored
@ -1,38 +1,23 @@
|
||||
name: Release Ubuntu 20.04
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
build_type:
|
||||
type: string
|
||||
description: "Memgraph Build type. Default value is Release."
|
||||
default: 'Release'
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
build_type:
|
||||
type: choice
|
||||
description: "Memgraph Build type. Default value is Release."
|
||||
default: 'Release'
|
||||
options:
|
||||
- Release
|
||||
- RelWithDebInfo
|
||||
|
||||
env:
|
||||
OS: "Ubuntu 20.04"
|
||||
THREADS: 24
|
||||
MEMGRAPH_ENTERPRISE_LICENSE: ${{ secrets.MEMGRAPH_ENTERPRISE_LICENSE }}
|
||||
MEMGRAPH_ORGANIZATION_NAME: ${{ secrets.MEMGRAPH_ORGANIZATION_NAME }}
|
||||
BUILD_TYPE: ${{ github.event.inputs.build_type || 'Release' }}
|
||||
schedule:
|
||||
- cron: "0 1 * * *"
|
||||
|
||||
jobs:
|
||||
community_build:
|
||||
name: "Community build"
|
||||
runs-on: [self-hosted, Linux, X64, Ubuntu20.04]
|
||||
timeout-minutes: 60
|
||||
env:
|
||||
THREADS: 24
|
||||
MEMGRAPH_ENTERPRISE_LICENSE: ${{ secrets.MEMGRAPH_ENTERPRISE_LICENSE }}
|
||||
MEMGRAPH_ORGANIZATION_NAME: ${{ secrets.MEMGRAPH_ORGANIZATION_NAME }}
|
||||
timeout-minutes: 960
|
||||
|
||||
steps:
|
||||
- name: Set up repository
|
||||
uses: actions/checkout@v4
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
# Number of commits to fetch. `0` indicates all history for all
|
||||
# branches and tags. (default: 1)
|
||||
@ -48,7 +33,7 @@ jobs:
|
||||
|
||||
# Build community binaries.
|
||||
cd build
|
||||
cmake -DCMAKE_BUILD_TYPE=$BUILD_TYPE -DMG_ENTERPRISE=OFF ..
|
||||
cmake -DCMAKE_BUILD_TYPE=release -DMG_ENTERPRISE=OFF ..
|
||||
make -j$THREADS
|
||||
|
||||
- name: Run unit tests
|
||||
@ -63,11 +48,14 @@ jobs:
|
||||
coverage_build:
|
||||
name: "Coverage build"
|
||||
runs-on: [self-hosted, Linux, X64, Ubuntu20.04]
|
||||
timeout-minutes: 60
|
||||
env:
|
||||
THREADS: 24
|
||||
MEMGRAPH_ENTERPRISE_LICENSE: ${{ secrets.MEMGRAPH_ENTERPRISE_LICENSE }}
|
||||
MEMGRAPH_ORGANIZATION_NAME: ${{ secrets.MEMGRAPH_ORGANIZATION_NAME }}
|
||||
|
||||
steps:
|
||||
- name: Set up repository
|
||||
uses: actions/checkout@v4
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
# Number of commits to fetch. `0` indicates all history for all
|
||||
# branches and tags. (default: 1)
|
||||
@ -109,19 +97,22 @@ jobs:
|
||||
tar -czf code_coverage.tar.gz coverage.json html report.json summary.rmu
|
||||
|
||||
- name: Save code coverage
|
||||
uses: actions/upload-artifact@v4
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: "Code coverage(Coverage build)-${{ env.OS }}"
|
||||
name: "Code coverage"
|
||||
path: tools/github/generated/code_coverage.tar.gz
|
||||
|
||||
debug_build:
|
||||
name: "Debug build"
|
||||
runs-on: [self-hosted, Linux, X64, Ubuntu20.04]
|
||||
timeout-minutes: 60
|
||||
env:
|
||||
THREADS: 24
|
||||
MEMGRAPH_ENTERPRISE_LICENSE: ${{ secrets.MEMGRAPH_ENTERPRISE_LICENSE }}
|
||||
MEMGRAPH_ORGANIZATION_NAME: ${{ secrets.MEMGRAPH_ORGANIZATION_NAME }}
|
||||
|
||||
steps:
|
||||
- name: Set up repository
|
||||
uses: actions/checkout@v4
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
# Number of commits to fetch. `0` indicates all history for all
|
||||
# branches and tags. (default: 1)
|
||||
@ -153,6 +144,25 @@ jobs:
|
||||
run: |
|
||||
./tests/drivers/run.sh
|
||||
|
||||
- name: Run integration tests
|
||||
run: |
|
||||
cd tests/integration
|
||||
for name in *; do
|
||||
if [ ! -d $name ]; then continue; fi
|
||||
pushd $name >/dev/null
|
||||
echo "Running: $name"
|
||||
if [ -x prepare.sh ]; then
|
||||
./prepare.sh
|
||||
fi
|
||||
if [ -x runner.py ]; then
|
||||
./runner.py
|
||||
elif [ -x runner.sh ]; then
|
||||
./runner.sh
|
||||
fi
|
||||
echo
|
||||
popd >/dev/null
|
||||
done
|
||||
|
||||
- name: Run cppcheck and clang-format
|
||||
run: |
|
||||
# Activate toolchain.
|
||||
@ -163,49 +173,23 @@ jobs:
|
||||
./cppcheck_and_clang_format diff
|
||||
|
||||
- name: Save cppcheck and clang-format errors
|
||||
uses: actions/upload-artifact@v4
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: "Code coverage(Debug build)-${{ env.OS }}"
|
||||
name: "Code coverage"
|
||||
path: tools/github/cppcheck_and_clang_format.txt
|
||||
|
||||
debug_integration_test:
|
||||
name: "Debug integration tests"
|
||||
runs-on: [self-hosted, Linux, X64, Ubuntu20.04]
|
||||
timeout-minutes: 60
|
||||
|
||||
steps:
|
||||
- name: Set up repository
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
# Number of commits to fetch. `0` indicates all history for all
|
||||
# branches and tags. (default: 1)
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Build debug binaries
|
||||
run: |
|
||||
# Activate toolchain.
|
||||
source /opt/toolchain-v4/activate
|
||||
|
||||
# Initialize dependencies.
|
||||
./init
|
||||
|
||||
# Build debug binaries.
|
||||
cd build
|
||||
cmake ..
|
||||
make -j$THREADS
|
||||
|
||||
- name: Run integration tests
|
||||
run: |
|
||||
tests/integration/run.sh
|
||||
|
||||
release_build:
|
||||
name: "Release build"
|
||||
runs-on: [self-hosted, Linux, X64, Ubuntu20.04]
|
||||
timeout-minutes: 60
|
||||
env:
|
||||
THREADS: 24
|
||||
MEMGRAPH_ENTERPRISE_LICENSE: ${{ secrets.MEMGRAPH_ENTERPRISE_LICENSE }}
|
||||
MEMGRAPH_ORGANIZATION_NAME: ${{ secrets.MEMGRAPH_ORGANIZATION_NAME }}
|
||||
timeout-minutes: 960
|
||||
|
||||
steps:
|
||||
- name: Set up repository
|
||||
uses: actions/checkout@v4
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
# Number of commits to fetch. `0` indicates all history for all
|
||||
# branches and tags. (default: 1)
|
||||
@ -221,7 +205,7 @@ jobs:
|
||||
|
||||
# Build release binaries.
|
||||
cd build
|
||||
cmake -DCMAKE_BUILD_TYPE=$BUILD_TYPE ..
|
||||
cmake -DCMAKE_BUILD_TYPE=release ..
|
||||
make -j$THREADS
|
||||
|
||||
- name: Create enterprise DEB package
|
||||
@ -240,60 +224,11 @@ jobs:
|
||||
cpack -G DEB --config ../CPackConfig.cmake
|
||||
|
||||
- name: Save enterprise DEB package
|
||||
uses: actions/upload-artifact@v4
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: "Enterprise DEB package-${{ env.OS }}"
|
||||
name: "Enterprise DEB package"
|
||||
path: build/output/memgraph*.deb
|
||||
|
||||
- name: Run GQL Behave tests
|
||||
run: |
|
||||
cd tests
|
||||
./setup.sh /opt/toolchain-v4/activate
|
||||
cd gql_behave
|
||||
./continuous_integration
|
||||
|
||||
- name: Save quality assurance status
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: "GQL Behave Status-${{ env.OS }}"
|
||||
path: |
|
||||
tests/gql_behave/gql_behave_status.csv
|
||||
tests/gql_behave/gql_behave_status.html
|
||||
|
||||
- name: Run unit tests
|
||||
run: |
|
||||
# Activate toolchain.
|
||||
source /opt/toolchain-v4/activate
|
||||
|
||||
# Run unit tests.
|
||||
cd build
|
||||
ctest -R memgraph__unit --output-on-failure
|
||||
|
||||
release_benchmark_tests:
|
||||
name: "Release Benchmark Tests"
|
||||
runs-on: [self-hosted, Linux, X64, Ubuntu20.04]
|
||||
timeout-minutes: 60
|
||||
|
||||
steps:
|
||||
- name: Set up repository
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
# Number of commits to fetch. `0` indicates all history for all
|
||||
# branches and tags. (default: 1)
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Build release binaries
|
||||
run: |
|
||||
# Activate toolchain.
|
||||
source /opt/toolchain-v4/activate
|
||||
# Initialize dependencies.
|
||||
./init
|
||||
|
||||
# Build release binaries
|
||||
cd build
|
||||
cmake -DCMAKE_BUILD_TYPE=$BUILD_TYPE ..
|
||||
make -j$THREADS
|
||||
|
||||
- name: Run micro benchmark tests
|
||||
run: |
|
||||
# Activate toolchain.
|
||||
@ -320,79 +255,36 @@ jobs:
|
||||
--num-database-workers 9 --num-clients-workers 30 \
|
||||
--no-strict
|
||||
|
||||
release_e2e_test:
|
||||
name: "Release End-to-end Test"
|
||||
runs-on: [self-hosted, Linux, X64, Ubuntu20.04]
|
||||
timeout-minutes: 60
|
||||
- name: Run GQL Behave tests
|
||||
run: |
|
||||
cd tests/gql_behave
|
||||
./continuous_integration
|
||||
|
||||
steps:
|
||||
- name: Set up repository
|
||||
uses: actions/checkout@v4
|
||||
- name: Save quality assurance status
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
# Number of commits to fetch. `0` indicates all history for all
|
||||
# branches and tags. (default: 1)
|
||||
fetch-depth: 0
|
||||
name: "GQL Behave Status"
|
||||
path: |
|
||||
tests/gql_behave/gql_behave_status.csv
|
||||
tests/gql_behave/gql_behave_status.html
|
||||
|
||||
- name: Build release binaries
|
||||
- name: Run unit tests
|
||||
run: |
|
||||
# Activate toolchain.
|
||||
source /opt/toolchain-v4/activate
|
||||
# Initialize dependencies.
|
||||
./init
|
||||
|
||||
# Build release binaries
|
||||
# Run unit tests.
|
||||
cd build
|
||||
cmake -DCMAKE_BUILD_TYPE=$BUILD_TYPE ..
|
||||
make -j$THREADS
|
||||
|
||||
- name: Ensure Kafka and Pulsar are up
|
||||
run: |
|
||||
cd tests/e2e/streams/kafka
|
||||
docker-compose up -d
|
||||
cd ../pulsar
|
||||
docker-compose up -d
|
||||
ctest -R memgraph__unit --output-on-failure
|
||||
|
||||
- name: Run e2e tests
|
||||
run: |
|
||||
# TODO(gitbuda): Setup mgclient and pymgclient properly.
|
||||
cd tests
|
||||
./setup.sh /opt/toolchain-v4/activate
|
||||
source ve3/bin/activate_e2e
|
||||
./setup.sh
|
||||
source ve3/bin/activate
|
||||
cd e2e
|
||||
./run.sh
|
||||
|
||||
- name: Ensure Kafka and Pulsar are down
|
||||
if: always()
|
||||
run: |
|
||||
cd tests/e2e/streams/kafka
|
||||
docker-compose down
|
||||
cd ../pulsar
|
||||
docker-compose down
|
||||
|
||||
release_durability_stress_tests:
|
||||
name: "Release durability and stress tests"
|
||||
runs-on: [self-hosted, Linux, X64, Ubuntu20.04]
|
||||
timeout-minutes: 60
|
||||
|
||||
steps:
|
||||
- name: Set up repository
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
# Number of commits to fetch. `0` indicates all history for all
|
||||
# branches and tags. (default: 1)
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Build release binaries
|
||||
run: |
|
||||
# Activate toolchain.
|
||||
source /opt/toolchain-v4/activate
|
||||
|
||||
# Initialize dependencies.
|
||||
./init
|
||||
|
||||
# Build release binaries.
|
||||
cd build
|
||||
cmake -DCMAKE_BUILD_TYPE=$BUILD_TYPE ..
|
||||
make -j$THREADS
|
||||
LD_LIBRARY_PATH=$LD_LIBRARY_PATH:../../libs/mgclient/lib python runner.py --workloads-root-directory .
|
||||
|
||||
- name: Run stress test (plain)
|
||||
run: |
|
||||
@ -404,6 +296,11 @@ jobs:
|
||||
cd tests/stress
|
||||
./continuous_integration --use-ssl
|
||||
|
||||
- name: Run stress test (large)
|
||||
run: |
|
||||
cd tests/stress
|
||||
./continuous_integration --large-dataset
|
||||
|
||||
- name: Run durability test (plain)
|
||||
run: |
|
||||
cd tests/stress
|
||||
|
68
.github/workflows/stress_test_large.yaml
vendored
68
.github/workflows/stress_test_large.yaml
vendored
@ -1,68 +0,0 @@
|
||||
name: Stress test large
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref_name }}
|
||||
cancel-in-progress: true
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
build_type:
|
||||
type: choice
|
||||
description: "Memgraph Build type. Default value is Release."
|
||||
default: 'Release'
|
||||
options:
|
||||
- Release
|
||||
- RelWithDebInfo
|
||||
push:
|
||||
tags:
|
||||
- "v*.*.*-rc*"
|
||||
- "v*.*-rc*"
|
||||
schedule:
|
||||
- cron: "0 22 * * *"
|
||||
|
||||
env:
|
||||
THREADS: 24
|
||||
MEMGRAPH_ENTERPRISE_LICENSE: ${{ secrets.MEMGRAPH_ENTERPRISE_LICENSE }}
|
||||
MEMGRAPH_ORGANIZATION_NAME: ${{ secrets.MEMGRAPH_ORGANIZATION_NAME }}
|
||||
BUILD_TYPE: ${{ github.event.inputs.build_type || 'Release' }}
|
||||
|
||||
jobs:
|
||||
stress_test_large:
|
||||
name: "Stress test large"
|
||||
timeout-minutes: 720
|
||||
strategy:
|
||||
matrix:
|
||||
os: [Debian10, Ubuntu20.04]
|
||||
extra: [BigMemory, Gen8]
|
||||
exclude:
|
||||
- os: Debian10
|
||||
extra: Gen8
|
||||
- os: Ubuntu20.04
|
||||
extra: BigMemory
|
||||
runs-on: [self-hosted, Linux, X64, "${{ matrix.os }}", "${{ matrix.extra }}"]
|
||||
|
||||
steps:
|
||||
- name: Set up repository
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
# Number of commits to fetch. `0` indicates all history for all
|
||||
# branches and tags. (default: 1)
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Build release binaries
|
||||
run: |
|
||||
# Activate toolchain.
|
||||
source /opt/toolchain-v4/activate
|
||||
|
||||
# Initialize dependencies.
|
||||
./init
|
||||
|
||||
# Build release binaries.
|
||||
cd build
|
||||
cmake -DCMAKE_BUILD_TYPE=$BUILD_TYPE ..
|
||||
make -j$THREADS
|
||||
|
||||
- name: Run stress test (large)
|
||||
run: |
|
||||
cd tests/stress
|
||||
./continuous_integration --large-dataset
|
32
.github/workflows/upload_to_s3.yaml
vendored
32
.github/workflows/upload_to_s3.yaml
vendored
@ -1,32 +0,0 @@
|
||||
name: Upload Package All artifacts to S3
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
memgraph_version:
|
||||
description: "Memgraph version to upload as. Format: 'X.Y.Z'"
|
||||
required: true
|
||||
run_number:
|
||||
description: "# of the package_all workflow run to upload artifacts from. Format: '#XYZ'"
|
||||
required: true
|
||||
|
||||
jobs:
|
||||
upload-to-s3:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Download artifacts
|
||||
uses: dawidd6/action-download-artifact@v4
|
||||
with:
|
||||
workflow: package_all.yaml
|
||||
workflow_conclusion: success
|
||||
run_number: "${{ github.event.inputs.run_number }}"
|
||||
path: build/output/release
|
||||
- name: Upload to S3
|
||||
uses: jakejarvis/s3-sync-action@v0.5.1
|
||||
env:
|
||||
AWS_S3_BUCKET: "download.memgraph.com"
|
||||
AWS_ACCESS_KEY_ID: ${{ secrets.S3_AWS_ACCESS_KEY_ID }}
|
||||
AWS_SECRET_ACCESS_KEY: ${{ secrets.S3_AWS_SECRET_ACCESS_KEY }}
|
||||
AWS_REGION: "eu-west-1"
|
||||
SOURCE_DIR: "build/output/release"
|
||||
DEST_DIR: "memgraph/v${{ github.event.inputs.memgraph_version }}/"
|
26
.gitignore
vendored
26
.gitignore
vendored
@ -16,12 +16,15 @@
|
||||
.ycm_extra_conf.pyc
|
||||
.temp/
|
||||
Testing/
|
||||
/build*/
|
||||
build
|
||||
build/
|
||||
release/examples/build
|
||||
cmake-build-*
|
||||
cmake/DownloadProject/
|
||||
dist/
|
||||
src/query/frontend/opencypher/generated/
|
||||
src/query/v2/frontend/opencypher/generated/
|
||||
src/parser/opencypher/generated
|
||||
tags
|
||||
ve/
|
||||
ve3/
|
||||
@ -33,6 +36,9 @@ TAGS
|
||||
*.fas
|
||||
*.fasl
|
||||
|
||||
# LCP generated C++ files
|
||||
*.lcp.cpp
|
||||
|
||||
src/database/distributed/serialization.hpp
|
||||
src/database/single_node_ha/serialization.hpp
|
||||
src/distributed/bfs_rpc_messages.hpp
|
||||
@ -46,11 +52,25 @@ src/distributed/pull_produce_rpc_messages.hpp
|
||||
src/distributed/storage_gc_rpc_messages.hpp
|
||||
src/distributed/token_sharing_rpc_messages.hpp
|
||||
src/distributed/updates_rpc_messages.hpp
|
||||
src/query/v2/frontend/ast/ast.hpp
|
||||
src/query/frontend/ast/ast.hpp
|
||||
src/storage/v3/bindings/ast/ast.hpp
|
||||
src/query/distributed/frontend/ast/ast_serialization.hpp
|
||||
src/query/v2/distributed/frontend/ast/ast_serialization.hpp
|
||||
src/durability/distributed/state_delta.hpp
|
||||
src/durability/single_node/state_delta.hpp
|
||||
src/durability/single_node_ha/state_delta.hpp
|
||||
src/query/frontend/semantic/symbol.hpp
|
||||
src/query/v2/frontend/semantic/symbol.hpp
|
||||
src/expr/semantic/symbol.hpp
|
||||
src/query/distributed/frontend/semantic/symbol_serialization.hpp
|
||||
src/query/v2/distributed/frontend/semantic/symbol_serialization.hpp
|
||||
src/query/distributed/plan/ops.hpp
|
||||
src/query/v2/distributed/plan/ops.hpp
|
||||
src/query/plan/operator.hpp
|
||||
src/query/v2/plan/operator.hpp
|
||||
src/parser/opencypher/generated
|
||||
src/expr/semantic/symbol.hpp
|
||||
src/raft/log_entry.hpp
|
||||
src/raft/raft_rpc_messages.hpp
|
||||
src/raft/snapshot_metadata.hpp
|
||||
@ -58,7 +78,3 @@ src/raft/storage_info_rpc_messages.hpp
|
||||
src/stats/stats_rpc_messages.hpp
|
||||
src/storage/distributed/rpc/concurrent_id_mapper_rpc_messages.hpp
|
||||
src/transactions/distributed/engine_rpc_messages.hpp
|
||||
/tests/manual/js/transaction_timeout/package-lock.json
|
||||
/tests/manual/js/transaction_timeout/node_modules/
|
||||
.vscode/
|
||||
src/query/frontend/opencypher/grammar/.antlr/*
|
||||
|
@ -3,7 +3,6 @@ repos:
|
||||
rev: v4.4.0
|
||||
hooks:
|
||||
- id: check-yaml
|
||||
args: [--allow-multiple-documents]
|
||||
- id: end-of-file-fixer
|
||||
- id: trailing-whitespace
|
||||
- repo: https://github.com/psf/black
|
||||
@ -20,16 +19,3 @@ repos:
|
||||
rev: v13.0.0
|
||||
hooks:
|
||||
- id: clang-format
|
||||
# - repo: local
|
||||
# hooks:
|
||||
# - id: clang-tidy
|
||||
# name: clang-tidy
|
||||
# description: Runs clang-tidy and checks for errors
|
||||
# entry: python ./tools/pre-commit/clang-tidy.py
|
||||
# language: python
|
||||
# files: ^src/
|
||||
# types: [c++, text]
|
||||
# fail_fast: true
|
||||
# require_serial: true
|
||||
# args: [--compile_commands_path=build]
|
||||
# pass_filenames: false
|
||||
|
@ -1,22 +0,0 @@
|
||||
# Path to sources
|
||||
sonar.sources = .
|
||||
# sonar.exclusions=
|
||||
sonar.inclusions=src,include,query_modules
|
||||
|
||||
# Path to tests
|
||||
sonar.tests = tests/
|
||||
# sonar.test.exclusions=
|
||||
# sonar.test.inclusions=
|
||||
|
||||
# Source encoding
|
||||
# sonar.sourceEncoding=
|
||||
|
||||
# Exclusions for copy-paste detection
|
||||
# sonar.cpd.exclusions=
|
||||
|
||||
# Python version (for python projects only)
|
||||
# sonar.python.version=
|
||||
|
||||
# C++ standard version (for C++ projects only)
|
||||
# If not specified, it defaults to the latest supported standard
|
||||
# sonar.cfamily.reportingCppStandardOverride=c++98|c++11|c++14|c++17|c++20
|
@ -1,32 +0,0 @@
|
||||
# Tantivy ADR
|
||||
|
||||
**Author**
|
||||
Marko Budiselic (github.com/gitbuda)
|
||||
|
||||
**Status**
|
||||
APPROVED
|
||||
|
||||
**Date**
|
||||
January 5, 2024
|
||||
|
||||
**Problem**
|
||||
|
||||
For some of Memgraph workloads, text search is a required feature. We don't
|
||||
want to build a new text search engine because that's not Memgraph's core
|
||||
value.
|
||||
|
||||
**Criteria**
|
||||
|
||||
- easy integration with our C++ codebase
|
||||
- ability to operate in-memory and on-disk
|
||||
- sufficient features (regex, full-text search, fuzzy search, aggregations over
|
||||
text data)
|
||||
- production-ready
|
||||
|
||||
**Decision**
|
||||
|
||||
All known C++ libraries are not production-ready. Recent Rust libraries, in
|
||||
particular [Tantivy](https://github.com/quickwit-oss/tantivy), seem to provide
|
||||
much more features, it is production ready. The way how we'll integrate Tantivy
|
||||
into the current Memgraph codebase is via
|
||||
[cxx](https://github.com/dtolnay/cxx). **We select Tantivy.**
|
@ -1,34 +0,0 @@
|
||||
# NuRaft ADR
|
||||
|
||||
**Author**
|
||||
Marko Budiselic (github.com/gitbuda)
|
||||
|
||||
**Status**
|
||||
PROPOSED
|
||||
|
||||
**Date**
|
||||
January 10, 2024
|
||||
|
||||
**Problem**
|
||||
|
||||
In order to enhance Memgraph to have High Availability features as requested by
|
||||
customers, we want to have reliable coordinators backed by RAFT consensus algorithm. Implementing
|
||||
RAFT to be correct and performant is a very challenging task. Skillful Memgraph
|
||||
engineers already tried 3 times and failed to deliver in a reasonable timeframe
|
||||
all three times (approximately 4 person-weeks of engineering work each time).
|
||||
|
||||
**Criteria**
|
||||
|
||||
- easy integration with our C++ codebase
|
||||
- heavily tested in production environments
|
||||
- implementation of performance optimizations on top of the canonical Raft
|
||||
implementation
|
||||
|
||||
**Decision**
|
||||
|
||||
There are a few, robust C++ implementations of Raft but as a part of other
|
||||
projects or bigger libraries. **We select
|
||||
[NuRaft](https://github.com/eBay/NuRaft)** because it focuses on delivering
|
||||
Raft without bloatware, and it's used by
|
||||
[Clickhouse](https://github.com/ClickHouse/ClickHouse) (an comparable peer to
|
||||
Memgraph, a very well-established product).
|
@ -1,38 +0,0 @@
|
||||
# RocksDB ADR
|
||||
|
||||
**Author**
|
||||
Marko Budiselic (github.com/gitbuda)
|
||||
|
||||
**Status**
|
||||
ACCEPTED
|
||||
|
||||
**Date**
|
||||
January 23, 2024
|
||||
|
||||
**Problem**
|
||||
|
||||
Interacting with data (reads and writes) on disk in a concurrent, safe, and
|
||||
fast way is a challenging task. Implementing all low-level primitives to
|
||||
interact with various disk hardware efficiently consumes significant
|
||||
engineering people. Whenever Memgraph has to store data on disk (or any
|
||||
other colder than RAM storage system), the problem is how to do that in the
|
||||
least amount of development time while satisfying all functional requirements
|
||||
(often performance).
|
||||
|
||||
**Criteria**
|
||||
|
||||
- working efficiently in a highly concurrent environment
|
||||
- easy integration with Memgraph's C++ codebase
|
||||
- providing low-level key-value API
|
||||
- heavily tested in production environments
|
||||
- providing abstractions for the storage hardware (even for cloud-based
|
||||
storages like S3)
|
||||
|
||||
**Decision**
|
||||
|
||||
There are a few robust key-value stores, but finding one that is
|
||||
production-ready and compatible with Memgraph's C++ codebase is challenging.
|
||||
**We select [RocksDB](https://github.com/facebook/rocksdb)** because it
|
||||
delivers robust API to manage data on disk; it's battle-tested in many
|
||||
production environments (many databases systems are embedding RocksDB), and
|
||||
it's the most compatible one.
|
@ -1,67 +0,0 @@
|
||||
# Architecture Decision Records
|
||||
|
||||
Also known as ADRs. This practice has become widespread in many
|
||||
high performing engineering teams. It is a technique for communicating
|
||||
between software engineers. ADRs provide a clear and documented
|
||||
history of architectural choices, ensuring that everyone on the
|
||||
team is on the same page. This improves communication and reduces
|
||||
misunderstandings. The act of recording decisions encourages
|
||||
thoughtful consideration before making choices. This can lead to
|
||||
more robust and better-informed architectural decisions.
|
||||
|
||||
Links must be created, pointing both to and from the Github Issues
|
||||
and/or the Notion Program Management "Initiative" database.
|
||||
|
||||
ADRs are complimentary to any tech specs that get written while
|
||||
designing a solution. ADRs are very short and to the point, while
|
||||
tech specs will include diagrams and can be quite verbose.
|
||||
|
||||
## HOWTO
|
||||
|
||||
Each ADR will be assigned a monotonically increasing unique numeric
|
||||
identifier, which will be zero-padded to 3 digits. Each ADR will
|
||||
be in a single markdown file containing no more than one page of
|
||||
text, and the filename will start with that unique identifier,
|
||||
followed by a camel case phrase summarizing the problem. For
|
||||
example: `001_architecture_decision_records.md` or
|
||||
`002_big_integration_cap_theorem.md`.
|
||||
|
||||
We want to use an ADR when:
|
||||
1. Significant Impact: This includes choices that affect scalability, performance, or fundamental design principles.
|
||||
1. Long-Term Ramifications: When a decision is expected to have long-term ramifications or is difficult to reverse.
|
||||
1. Architectural Principles: ADRs are suitable for documenting decisions related to architectural principles, frameworks, or patterns that shape the system's structure.
|
||||
1. Controversial Choices: When a decision is likely to be controversial or may require justification in the future.
|
||||
|
||||
The most senior engineer on a project will evaluate and decide
|
||||
whether or not an ADR is needed.
|
||||
|
||||
## Do
|
||||
|
||||
1. Keep them brief and concise.
|
||||
1. Explain the trade-offs.
|
||||
1. Each ADR should be about one AD, not multiple ADs
|
||||
1. Don't alter existing information in an ADR. Instead, amend the ADR by adding new information, or supersede the ADR by creating a new ADR.
|
||||
1. Explain your organization's situation and business priorities.
|
||||
1. Include rationale and considerations based on social and skills makeups of your teams.
|
||||
1. Include pros and cons that are relevant, and describe them in terms that align with your needs and goals.
|
||||
1. Explain what follows from making the decision. This can include the effects, outcomes, outputs, follow ups, and more.
|
||||
|
||||
## Don't
|
||||
|
||||
1. Try to guess what the executive leader wants, and then attempt to please them. Be objective.
|
||||
1. Try to solve everything all at once. A pretty good solution now is MUCH BETTER than a perfect solution later. Carpe diem!
|
||||
1. Hide any doubts or unanswered questions.
|
||||
1. Make it a sales pitch. Everything has upsides and downsides - be authentic and honest about them.
|
||||
1. Perform merely a superficial investigation. If an ADR doesn't call for some deep thinking, then it probably shouldn't exist.
|
||||
1. Ignore the long-term costs such as performance, tech debt or hardware and maintenance.
|
||||
1. Get tunnel vision where creative or surprising approaches are not explored.
|
||||
|
||||
# Template - use the format below for each new ADR
|
||||
|
||||
1. **Author** - who has written the ADR
|
||||
1. **Status** - one of: PROPOSED, ACCEPTED, REJECTED, SUPERSEDED-BY or DEPRECATED
|
||||
1. **Date** - when the status was most recently updated
|
||||
1. **Problem** - a concise paragraph explaining the context
|
||||
1. **Criteria** - a list of the two or three metrics by which the solution was evaluated, and their relative weights (importance)
|
||||
1. **Decision** - what was chosen as the way forward, and what the consequences are of the decision
|
||||
|
@ -1,7 +1,6 @@
|
||||
# MemGraph CMake configuration
|
||||
|
||||
cmake_minimum_required(VERSION 3.12)
|
||||
cmake_policy(SET CMP0076 NEW)
|
||||
cmake_minimum_required(VERSION 3.8)
|
||||
|
||||
# !! IMPORTANT !! run ./project_root/init.sh before cmake command
|
||||
# to download dependencies
|
||||
@ -19,12 +18,10 @@ set_directory_properties(PROPERTIES CLEAN_NO_CUSTOM TRUE)
|
||||
# during the code coverage process
|
||||
find_program(CCACHE_FOUND ccache)
|
||||
option(USE_CCACHE "ccache:" ON)
|
||||
message(STATUS "CCache: ${USE_CCACHE}")
|
||||
if(CCACHE_FOUND AND USE_CCACHE)
|
||||
set_property(GLOBAL PROPERTY RULE_LAUNCH_COMPILE ccache)
|
||||
set_property(GLOBAL PROPERTY RULE_LAUNCH_LINK ccache)
|
||||
message(STATUS "CCache: Used")
|
||||
else ()
|
||||
message(STATUS "CCache: Not used")
|
||||
endif(CCACHE_FOUND AND USE_CCACHE)
|
||||
|
||||
# choose a compiler
|
||||
@ -40,14 +37,7 @@ endif()
|
||||
|
||||
# -----------------------------------------------------------------------------
|
||||
|
||||
project(memgraph LANGUAGES C CXX)
|
||||
|
||||
#TODO: upgrade to cmake 3.24 + CheckIPOSupported
|
||||
#cmake_policy(SET CMP0138 NEW)
|
||||
#include(CheckIPOSupported)
|
||||
#check_ipo_supported()
|
||||
#set(CMAKE_INTERPROCEDURAL_OPTIMIZATION_Release TRUE)
|
||||
#set(CMAKE_INTERPROCEDURAL_OPTIMIZATION_RelWithDebInfo TRUE)
|
||||
project(memgraph)
|
||||
|
||||
# Install licenses.
|
||||
install(DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR}/licenses/
|
||||
@ -153,9 +143,7 @@ endif()
|
||||
# files used can be seen here:
|
||||
# https://git-scm.com/book/en/v2/Git-Internals-Git-References
|
||||
set(git_directory "${CMAKE_SOURCE_DIR}/.git")
|
||||
# Check for directory because if the repo is cloned as a git submodule, .git is
|
||||
# a file and below code doesn't work.
|
||||
if (IS_DIRECTORY "${git_directory}")
|
||||
if (EXISTS "${git_directory}")
|
||||
set_property(DIRECTORY APPEND PROPERTY
|
||||
CMAKE_CONFIGURE_DEPENDS "${git_directory}/HEAD")
|
||||
file(STRINGS "${git_directory}/HEAD" git_head_data)
|
||||
@ -170,7 +158,7 @@ endif()
|
||||
|
||||
# setup CMake module path, defines path for include() and find_package()
|
||||
# https://cmake.org/cmake/help/latest/variable/CMAKE_MODULE_PATH.html
|
||||
list(APPEND CMAKE_MODULE_PATH "${CMAKE_CURRENT_SOURCE_DIR}/cmake")
|
||||
set(CMAKE_MODULE_PATH ${CMAKE_MODULE_PATH} ${PROJECT_SOURCE_DIR}/cmake)
|
||||
# custom function definitions
|
||||
include(functions)
|
||||
# -----------------------------------------------------------------------------
|
||||
@ -194,9 +182,10 @@ set(CMAKE_CXX_STANDARD_REQUIRED ON)
|
||||
# c99-designator is disabled because of required mixture of designated and
|
||||
# non-designated initializers in Python Query Module code (`py_module.cpp`).
|
||||
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Wall \
|
||||
-Werror=switch -Werror=switch-bool -Werror=return-type \
|
||||
-Werror=switch -Werror=switch-bool -Werror=implicit-fallthrough \
|
||||
-Werror=return-type \
|
||||
-Werror=return-stack-address \
|
||||
-Wno-c99-designator -Wmissing-field-initializers \
|
||||
-Wno-c99-designator \
|
||||
-DBOOST_ASIO_USE_TS_EXECUTOR_AS_DEFAULT")
|
||||
|
||||
# Don't omit frame pointer in RelWithDebInfo, for additional callchain debug.
|
||||
@ -211,13 +200,8 @@ set(CMAKE_CXX_FLAGS_RELWITHDEBINFO
|
||||
# ** Static linking is allowed only for executables! **
|
||||
set(CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} -static-libgcc -static-libstdc++")
|
||||
|
||||
# Use lld linker to speedup build and use less memory.
|
||||
add_link_options(-fuse-ld=lld)
|
||||
# NOTE: Moving to latest Clang (probably starting from 15), lld stopped to work
|
||||
# without explicit link_directories call.
|
||||
string(REPLACE ":" " " LD_LIBS $ENV{LD_LIBRARY_PATH})
|
||||
separate_arguments(LD_LIBS)
|
||||
link_directories(${LD_LIBS})
|
||||
# Use gold linker to speedup build
|
||||
set(CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} -fuse-ld=gold")
|
||||
|
||||
# release flags
|
||||
set(CMAKE_CXX_FLAGS_RELEASE "-O2 -DNDEBUG")
|
||||
@ -240,6 +224,7 @@ else()
|
||||
endif()
|
||||
|
||||
# -----------------------------------------------------------------------------
|
||||
|
||||
# default build type is debug
|
||||
if (NOT CMAKE_BUILD_TYPE)
|
||||
set(CMAKE_BUILD_TYPE "Debug")
|
||||
@ -247,17 +232,7 @@ endif()
|
||||
message(STATUS "CMake build type: ${CMAKE_BUILD_TYPE}")
|
||||
# -----------------------------------------------------------------------------
|
||||
|
||||
add_definitions( -DCMAKE_BUILD_TYPE_NAME="${CMAKE_BUILD_TYPE}")
|
||||
|
||||
if (NOT MG_ARCH)
|
||||
set(MG_ARCH_DESCR "Host architecture to build Memgraph on. Supported values are x86_64, ARM64.")
|
||||
if (${CMAKE_HOST_SYSTEM_PROCESSOR} MATCHES "aarch64")
|
||||
set(MG_ARCH "ARM64" CACHE STRING ${MG_ARCH_DESCR})
|
||||
else()
|
||||
set(MG_ARCH "x86_64" CACHE STRING ${MG_ARCH_DESCR})
|
||||
endif()
|
||||
endif()
|
||||
message(STATUS "MG_ARCH: ${MG_ARCH}")
|
||||
set(MG_ARCH "x86_64" CACHE STRING "Host architecture to build Memgraph on. Supported values are x86_64 (default), ARM64.")
|
||||
|
||||
# setup external dependencies -------------------------------------------------
|
||||
|
||||
@ -276,6 +251,7 @@ endif()
|
||||
set(libs_dir ${CMAKE_SOURCE_DIR}/libs)
|
||||
add_subdirectory(libs EXCLUDE_FROM_ALL)
|
||||
|
||||
# Optional subproject configuration -------------------------------------------
|
||||
option(TEST_COVERAGE "Generate coverage reports from running memgraph" OFF)
|
||||
option(TOOLS "Build tools binaries" ON)
|
||||
option(QUERY_MODULES "Build query modules containing custom procedures" ON)
|
||||
@ -283,8 +259,6 @@ option(ASAN "Build with Address Sanitizer. To get a reasonable performance optio
|
||||
option(TSAN "Build with Thread Sanitizer. To get a reasonable performance option should be used only in Release or RelWithDebInfo build " OFF)
|
||||
option(UBSAN "Build with Undefined Behaviour Sanitizer" OFF)
|
||||
|
||||
# Build feature flags
|
||||
|
||||
if (TEST_COVERAGE)
|
||||
string(TOLOWER ${CMAKE_BUILD_TYPE} lower_build_type)
|
||||
if (NOT lower_build_type STREQUAL "debug")
|
||||
@ -298,25 +272,12 @@ if (MG_ENTERPRISE)
|
||||
add_definitions(-DMG_ENTERPRISE)
|
||||
endif()
|
||||
|
||||
option(ENABLE_JEMALLOC "Use jemalloc" ON)
|
||||
|
||||
option(MG_MEMORY_PROFILE "If build should be setup for memory profiling" OFF)
|
||||
if (MG_MEMORY_PROFILE AND ENABLE_JEMALLOC)
|
||||
message(STATUS "Jemalloc has been disabled because MG_MEMORY_PROFILE is enabled")
|
||||
set(ENABLE_JEMALLOC OFF)
|
||||
endif ()
|
||||
if (MG_MEMORY_PROFILE AND ASAN)
|
||||
message(STATUS "ASAN has been disabled because MG_MEMORY_PROFILE is enabled")
|
||||
set(ASAN OFF)
|
||||
endif ()
|
||||
if (MG_MEMORY_PROFILE)
|
||||
add_compile_definitions(MG_MEMORY_PROFILE)
|
||||
endif ()
|
||||
set(ENABLE_JEMALLOC ON)
|
||||
|
||||
if (ASAN)
|
||||
message(WARNING "Disabling jemalloc as it doesn't work well with ASAN")
|
||||
set(ENABLE_JEMALLOC OFF)
|
||||
# Enable Address sanitizer and get nicer stack traces in error messages.
|
||||
# Enable Addres sanitizer and get nicer stack traces in error messages.
|
||||
# NOTE: AddressSanitizer uses llvm-symbolizer binary from the Clang
|
||||
# distribution to symbolize the stack traces (note that ideally the
|
||||
# llvm-symbolizer version must match the version of ASan runtime library).
|
||||
@ -337,8 +298,6 @@ if (ASAN)
|
||||
endif()
|
||||
|
||||
if (TSAN)
|
||||
message(WARNING "Disabling jemalloc as it doesn't work well with ASAN")
|
||||
set(ENABLE_JEMALLOC OFF)
|
||||
# ThreadSanitizer generally requires all code to be compiled with -fsanitize=thread.
|
||||
# If some code (e.g. dynamic libraries) is not compiled with the flag, it can
|
||||
# lead to false positive race reports, false negative race reports and/or
|
||||
@ -354,7 +313,7 @@ if (TSAN)
|
||||
# By default ThreadSanitizer uses addr2line utility to symbolize reports.
|
||||
# llvm-symbolizer is faster, consumes less memory and produces much better
|
||||
# reports. To use it set runtime flag:
|
||||
# TSAN_OPTIONS="extern-symbolizer-path=~/llvm-symbolizer"
|
||||
# TSAN_OPTIONS="extern-symbolizer-path=~/llvm-symbolizer"
|
||||
# For more runtime flags see: https://github.com/google/sanitizers/wiki/ThreadSanitizerFlags
|
||||
endif()
|
||||
|
||||
|
111
README.md
111
README.md
@ -1,9 +1,13 @@
|
||||
<p align="center">
|
||||
<img src="https://public-assets.memgraph.com/github-readme-images/github-memgraph-repo-banner.png">
|
||||
<img width="400px" src="https://uploads-ssl.webflow.com/5e7ceb09657a69bdab054b3a/5e7ceb09657a6937ab054bba_Black_Original%20_Logo.png">
|
||||
</p>
|
||||
|
||||
---
|
||||
|
||||
<p align="center">
|
||||
Build modern, graph-based applications on top of your streaming data in minutes.
|
||||
</p>
|
||||
|
||||
<p align="center">
|
||||
<a href="https://github.com/memgraph/memgraph/blob/master/licenses/APL.txt">
|
||||
<img src="https://img.shields.io/badge/license-APL-green" alt="license" title="license"/>
|
||||
@ -18,7 +22,7 @@
|
||||
|
||||
<p align="center">
|
||||
<a href="https://github.com/memgraph/memgraph">
|
||||
<img src="https://img.shields.io/github/actions/workflow/status/memgraph/memgraph/release_debian10.yaml?branch=master&label=build%20and%20test&logo=github"/>
|
||||
<img src="https://img.shields.io/github/workflow/status/memgraph/memgraph/Release%20Ubuntu%2020.04/master" alt="build" title="build"/>
|
||||
</a>
|
||||
<a href="https://memgraph.com/docs/" alt="Documentation">
|
||||
<img src="https://img.shields.io/badge/documentation-Memgraph-orange" />
|
||||
@ -33,10 +37,9 @@
|
||||
|
||||
## :clipboard: Description
|
||||
|
||||
Memgraph is an open source graph database built for real-time streaming and
|
||||
compatible with Neo4j. Whether you're a developer or a data scientist with
|
||||
interconnected data, Memgraph will get you the immediate actionable insights
|
||||
fast.
|
||||
Memgraph is a streaming graph application platform that helps you wrangle your
|
||||
streaming data, build sophisticated models that you can query in real-time, and
|
||||
develop graph applications.
|
||||
|
||||
Memgraph directly connects to your streaming infrastructure. You can ingest data
|
||||
from sources like Kafka, SQL, or plain CSV files. Memgraph provides a standard
|
||||
@ -48,20 +51,8 @@ natural and effective way to model many real-world problems without relying on
|
||||
complex SQL schemas.
|
||||
|
||||
Memgraph is implemented in C/C++ and leverages an in-memory first architecture
|
||||
to ensure that you’re getting the [best possible
|
||||
performance](http://memgraph.com/benchgraph) consistently and without surprises.
|
||||
It’s also ACID-compliant and highly available.
|
||||
|
||||
## :zap: Features
|
||||
|
||||
- Run Python, Rust, and C/C++ code natively, check out the
|
||||
[MAGE](https://github.com/memgraph/mage) graph algorithm library
|
||||
- Native support for machine learning
|
||||
- Streaming support
|
||||
- Replication
|
||||
- Authentication and authorization
|
||||
- ACID compliance
|
||||
|
||||
to ensure that you’re getting the best possible performance consistently and
|
||||
without surprises. It’s also ACID-compliant and highly available.
|
||||
|
||||
## :video_game: Memgraph Playground
|
||||
|
||||
@ -85,49 +76,28 @@ your browser.
|
||||
### macOS
|
||||
|
||||
[![macOS](https://img.shields.io/badge/macOS-Docker-000000?style=for-the-badge&logo=macos&logoColor=F0F0F0)](https://memgraph.com/docs/memgraph/install-memgraph-on-macos-docker)
|
||||
[![macOS](https://img.shields.io/badge/lima-AACF41?style=for-the-badge&logo=macos&logoColor=F0F0F0)](https://memgraph.com/docs/memgraph/install-memgraph-on-ubuntu)
|
||||
|
||||
### Linux
|
||||
|
||||
[![Linux](https://img.shields.io/badge/Linux-Docker-FCC624?style=for-the-badge&logo=linux&logoColor=black)](https://memgraph.com/docs/memgraph/install-memgraph-on-linux-docker)
|
||||
[![Debian](https://img.shields.io/badge/Debian-D70A53?style=for-the-badge&logo=debian&logoColor=white)](https://memgraph.com/docs/memgraph/install-memgraph-on-debian)
|
||||
[![Ubuntu](https://img.shields.io/badge/Ubuntu-E95420?style=for-the-badge&logo=ubuntu&logoColor=white)](https://memgraph.com/docs/memgraph/install-memgraph-on-ubuntu)
|
||||
[![Cent OS](https://img.shields.io/badge/cent%20os-002260?style=for-the-badge&logo=centos&logoColor=F0F0F0)](https://memgraph.com/docs/memgraph/install-memgraph-from-rpm)
|
||||
[![Fedora](https://img.shields.io/badge/fedora-0B57A4?style=for-the-badge&logo=fedora&logoColor=F0F0F0)](https://memgraph.com/docs/memgraph/install-memgraph-from-rpm)
|
||||
[![RedHat](https://img.shields.io/badge/redhat-EE0000?style=for-the-badge&logo=redhat&logoColor=F0F0F0)](https://memgraph.com/docs/memgraph/install-memgraph-from-rpm)
|
||||
[![Cent
|
||||
OS](https://img.shields.io/badge/cent%20os-002260?style=for-the-badge&logo=centos&logoColor=F0F0F0)](https://memgraph.com/docs/memgraph/install-memgraph-from-rpm)
|
||||
|
||||
You can find the binaries and Docker images on the [Download
|
||||
Hub](https://memgraph.com/download) and the installation instructions in the
|
||||
[official documentation](https://memgraph.com/docs/memgraph/installation).
|
||||
|
||||
## :zap: Features
|
||||
|
||||
## :cloud: Memgraph Cloud
|
||||
|
||||
Check out [Memgraph Cloud](https://memgraph.com/docs/memgraph-cloud) - a cloud service fully managed on AWS and available in 6 geographic regions around the world. Memgraph Cloud allows you to create projects with Enterprise instances of MemgraphDB from your browser.
|
||||
|
||||
<p align="left">
|
||||
<a href="https://memgraph.com/docs/memgraph-cloud">
|
||||
<img width="450px" alt="Memgraph Cloud" src="https://public-assets.memgraph.com/memgraph-gifs%2Fcloud.gif">
|
||||
</a>
|
||||
</p>
|
||||
|
||||
## :link: Connect to Memgraph
|
||||
|
||||
[Connect to the database](https://memgraph.com/docs/memgraph/connect-to-memgraph) using Memgraph Lab, mgconsole, various drivers (Python, C/C++ and others) and WebSocket.
|
||||
|
||||
### :microscope: Memgraph Lab
|
||||
|
||||
Visualize graphs and play with queries to understand your data. [Memgraph Lab](https://memgraph.com/docs/memgraph-lab) is a user interface that helps you explore and manipulate the data stored in Memgraph. Visualize graphs, execute ad hoc queries, and optimize their performance.
|
||||
|
||||
<p align="left">
|
||||
<a href="https://memgraph.com/docs/memgraph-lab">
|
||||
<img width="450px" alt="Memgraph Cloud" src="https://public-assets.memgraph.com/memgraph-gifs%2Flab.gif">
|
||||
</a>
|
||||
</p>
|
||||
|
||||
## :file_folder: Import data
|
||||
|
||||
[Import data](https://memgraph.com/docs/memgraph/import-data) into Memgraph using Kafka, RedPanda or Pulsar streams, CSV and JSON files, or Cypher commands.
|
||||
- Run Python, Rust, and C/C++ code natively, check out the
|
||||
[MAGE](https://github.com/memgraph/mage) graph algorithm library
|
||||
- Native support for machine learning
|
||||
- Streaming support
|
||||
- Replication
|
||||
- Authentication and authorization
|
||||
- ACID compliance
|
||||
|
||||
## :bookmark_tabs: Documentation
|
||||
|
||||
@ -141,20 +111,29 @@ guide](https://memgraph.com/docs/memgraph/reference-guide/configuration).
|
||||
|
||||
## :trophy: Contributing
|
||||
|
||||
Welcome to the heart of Memgraph development! We're on a mission to supercharge Memgraph, making it faster, more user-friendly, and even more powerful. We owe a big thanks to our fantastic community of contributors who help us fix bugs and bring incredible improvements to life. If you're passionate about databases and open source, here's your chance to make a difference!
|
||||
|
||||
### Explore Memgraph Internals
|
||||
|
||||
Interested in the nuts and bolts of Memgraph? Our [internals documentation](https://memgraph.notion.site/Memgraph-Internals-12b69132d67a417898972927d6870bd2) is where you can uncover the inner workings of Memgraph's architecture, learn how to build the project from scratch, and discover the secrets of effective contributions. Dive deep into the database!
|
||||
|
||||
### Dive into the Contributing Guide
|
||||
Ready to jump into the action? Explore our [contributing guide](CONTRIBUTING.md) to get the inside scoop on how we develop Memgraph. It's your roadmap for suggesting bug fixes and enhancements. Contribute your skills and ideas!
|
||||
The main purpose of this repository is to continue evolving Memgraph, making it
|
||||
faster and easier to use. Development of Memgraph happens in the open on GitHub,
|
||||
and we are grateful to the community for contributing bug fixes and
|
||||
improvements. Read below to learn how you can take part in improving Memgraph.
|
||||
|
||||
### Code of Conduct
|
||||
|
||||
Our commitment to a respectful and professional community is unwavering. Every participant in Memgraph is expected to adhere to a stringent Code of Conduct. Please carefully review [the complete text](CODE_OF_CONDUCT.md) to gain a comprehensive understanding of the behaviors that are both expected and explicitly prohibited.
|
||||
Memgraph has adopted a Code of Conduct that we expect project participants to
|
||||
adhere to. Please read [the full text](CODE_OF_CONDUCT.md) so that you can
|
||||
understand what actions will and will not be tolerated.
|
||||
|
||||
We maintain a zero-tolerance policy towards any violations. Our shared commitment to this Code of Conduct ensures that Memgraph remains a place where integrity and excellence are paramount.
|
||||
### Contributing Guide
|
||||
|
||||
Read our [contributing guide](CONTRIBUTING.md) to learn about our development
|
||||
process and how to propose bug fixes and improvements.
|
||||
|
||||
### Internals
|
||||
|
||||
Read our
|
||||
[internal](https://memgraph.notion.site/Memgraph-Internals-12b69132d67a417898972927d6870bd2)
|
||||
docs to learn more about Memgraph's architecture, how to build the project from
|
||||
source and how to start contributing. All information related to the database,
|
||||
can be found in the aforementioned docs.
|
||||
|
||||
### :scroll: License
|
||||
|
||||
@ -162,16 +141,8 @@ Memgraph Community is available under the [BSL
|
||||
license](./licenses/BSL.txt).</br> Memgraph Enterprise is available under the
|
||||
[MEL license](./licenses/MEL.txt).
|
||||
|
||||
## :busts_in_silhouette: Community
|
||||
|
||||
- :purple_heart: [**Discord**](https://discord.gg/memgraph)
|
||||
- :ocean: [**Stack Overflow**](https://stackoverflow.com/questions/tagged/memgraphdb)
|
||||
- :bird: [**Twitter**](https://twitter.com/memgraphdb)
|
||||
- :movie_camera:
|
||||
[**YouTube**](https://www.youtube.com/channel/UCZ3HOJvHGxtQ_JHxOselBYg)
|
||||
|
||||
<p align="center">
|
||||
<a href="#">
|
||||
<img src="https://img.shields.io/badge/⬆️ back_to_top_⬆️-white" alt="Back to top" title="Back to top"/>
|
||||
<img src="https://img.shields.io/badge/⬆️back_to_top_⬆️-white" alt="Back to top" title="Back to top"/>
|
||||
</a>
|
||||
</p>
|
||||
|
55
cmake/FindJemalloc.cmake
Normal file
55
cmake/FindJemalloc.cmake
Normal file
@ -0,0 +1,55 @@
|
||||
# Try to find jemalloc library
|
||||
#
|
||||
# Use this module as:
|
||||
# find_package(Jemalloc)
|
||||
#
|
||||
# or:
|
||||
# find_package(Jemalloc REQUIRED)
|
||||
#
|
||||
# This will define the following variables:
|
||||
#
|
||||
# Jemalloc_FOUND True if the system has the jemalloc library.
|
||||
# Jemalloc_INCLUDE_DIRS Include directories needed to use jemalloc.
|
||||
# Jemalloc_LIBRARIES Libraries needed to link to jemalloc.
|
||||
#
|
||||
# The following cache variables may also be set:
|
||||
#
|
||||
# Jemalloc_INCLUDE_DIR The directory containing jemalloc/jemalloc.h.
|
||||
# Jemalloc_LIBRARY The path to the jemalloc static library.
|
||||
|
||||
find_path(Jemalloc_INCLUDE_DIR NAMES jemalloc/jemalloc.h PATH_SUFFIXES include)
|
||||
|
||||
find_library(Jemalloc_LIBRARY NAMES libjemalloc.a PATH_SUFFIXES lib)
|
||||
|
||||
include(FindPackageHandleStandardArgs)
|
||||
find_package_handle_standard_args(Jemalloc
|
||||
FOUND_VAR Jemalloc_FOUND
|
||||
REQUIRED_VARS
|
||||
Jemalloc_LIBRARY
|
||||
Jemalloc_INCLUDE_DIR
|
||||
)
|
||||
|
||||
if(Jemalloc_FOUND)
|
||||
set(Jemalloc_LIBRARIES ${Jemalloc_LIBRARY})
|
||||
set(Jemalloc_INCLUDE_DIRS ${Jemalloc_INCLUDE_DIR})
|
||||
else()
|
||||
if(Jemalloc_FIND_REQUIRED)
|
||||
message(FATAL_ERROR "Cannot find jemalloc!")
|
||||
else()
|
||||
message(WARNING "jemalloc is not found!")
|
||||
endif()
|
||||
endif()
|
||||
|
||||
if(Jemalloc_FOUND AND NOT TARGET Jemalloc::Jemalloc)
|
||||
add_library(Jemalloc::Jemalloc UNKNOWN IMPORTED)
|
||||
set_target_properties(Jemalloc::Jemalloc
|
||||
PROPERTIES
|
||||
IMPORTED_LOCATION "${Jemalloc_LIBRARY}"
|
||||
INTERFACE_INCLUDE_DIRECTORIES "${Jemalloc_INCLUDE_DIR}"
|
||||
)
|
||||
endif()
|
||||
|
||||
mark_as_advanced(
|
||||
Jemalloc_INCLUDE_DIR
|
||||
Jemalloc_LIBRARY
|
||||
)
|
@ -1,67 +0,0 @@
|
||||
# Try to find jemalloc library
|
||||
#
|
||||
# Use this module as:
|
||||
# find_package(jemalloc)
|
||||
#
|
||||
# or:
|
||||
# find_package(jemalloc REQUIRED)
|
||||
#
|
||||
# This will define the following variables:
|
||||
#
|
||||
# JEMALLOC_FOUND True if the system has the jemalloc library.
|
||||
# Jemalloc_INCLUDE_DIRS Include directories needed to use jemalloc.
|
||||
# Jemalloc_LIBRARIES Libraries needed to link to jemalloc.
|
||||
#
|
||||
# The following cache variables may also be set:
|
||||
#
|
||||
# Jemalloc_INCLUDE_DIR The directory containing jemalloc/jemalloc.h.
|
||||
# Jemalloc_LIBRARY The path to the jemalloc static library.
|
||||
|
||||
|
||||
|
||||
include(FindPackageHandleStandardArgs)
|
||||
find_package_handle_standard_args(jemalloc
|
||||
FOUND_VAR JEMALLOC_FOUND
|
||||
REQUIRED_VARS
|
||||
JEMALLOC_LIBRARY
|
||||
JEMALLOC_INCLUDE_DIR
|
||||
)
|
||||
|
||||
if(JEMALLOC_INCLUDE_DIR)
|
||||
message(STATUS "Found jemalloc include dir: ${JEMALLOC_INCLUDE_DIR}")
|
||||
else()
|
||||
message(WARNING "jemalloc not found!")
|
||||
endif()
|
||||
|
||||
if(JEMALLOC_LIBRARY)
|
||||
message(STATUS "Found jemalloc library: ${JEMALLOC_LIBRARY}")
|
||||
else()
|
||||
message(WARNING "jemalloc library not found!")
|
||||
endif()
|
||||
|
||||
if(JEMALLOC_FOUND)
|
||||
set(Jemalloc_LIBRARIES ${JEMALLOC_LIBRARY})
|
||||
set(Jemalloc_INCLUDE_DIRS ${JEMALLOC_INCLUDE_DIR})
|
||||
else()
|
||||
if(Jemalloc_FIND_REQUIRED)
|
||||
message(FATAL_ERROR "Cannot find jemalloc!")
|
||||
else()
|
||||
message(WARNING "jemalloc is not found!")
|
||||
endif()
|
||||
endif()
|
||||
|
||||
if(JEMALLOC_FOUND AND NOT TARGET Jemalloc::Jemalloc)
|
||||
message(STATUS "JEMALLOC NOT TARGET")
|
||||
|
||||
add_library(Jemalloc::Jemalloc UNKNOWN IMPORTED)
|
||||
set_target_properties(Jemalloc::Jemalloc
|
||||
PROPERTIES
|
||||
IMPORTED_LOCATION "${JEMALLOC_LIBRARY}"
|
||||
INTERFACE_INCLUDE_DIRECTORIES "${JEMALLOC_INCLUDE_DIR}"
|
||||
)
|
||||
endif()
|
||||
|
||||
mark_as_advanced(
|
||||
JEMALLOC_INCLUDE_DIR
|
||||
JEMALLOC_LIBRARY
|
||||
)
|
@ -83,13 +83,9 @@ modifications:
|
||||
value: "true"
|
||||
override: true
|
||||
|
||||
- name: "query_modules_directory"
|
||||
value: "/usr/lib/memgraph/query_modules"
|
||||
override: true
|
||||
|
||||
- name: "auth_module_executable"
|
||||
value: "/usr/lib/memgraph/auth_module/example.py"
|
||||
override: false
|
||||
# - name: "query_modules_directory"
|
||||
# value: "/usr/lib/memgraph/query_modules"
|
||||
# override: true
|
||||
|
||||
- name: "memory_limit"
|
||||
value: "0"
|
||||
@ -99,30 +95,10 @@ modifications:
|
||||
value: "SNAPSHOT_ISOLATION"
|
||||
override: true
|
||||
|
||||
- name: "storage_mode"
|
||||
value: "IN_MEMORY_TRANSACTIONAL"
|
||||
override: true
|
||||
|
||||
- name: "allow_load_csv"
|
||||
value: "true"
|
||||
override: false
|
||||
|
||||
- name: "storage_parallel_index_recovery"
|
||||
value: "false"
|
||||
override: true
|
||||
|
||||
- name: "storage_parallel_schema_recovery"
|
||||
value: "false"
|
||||
override: true
|
||||
|
||||
- name: "storage_enable_schema_metadata"
|
||||
value: "false"
|
||||
override: true
|
||||
|
||||
- name: "query_callable_mappings_path"
|
||||
value: "/etc/memgraph/apoc_compatibility_mappings.json"
|
||||
override: true
|
||||
|
||||
undocumented:
|
||||
- "flag_file"
|
||||
- "also_log_to_stderr"
|
||||
|
@ -5,10 +5,12 @@ import os
|
||||
import subprocess
|
||||
import sys
|
||||
import textwrap
|
||||
|
||||
import xml.etree.ElementTree as ET
|
||||
|
||||
import yaml
|
||||
|
||||
|
||||
SCRIPT_DIR = os.path.dirname(os.path.realpath(__file__))
|
||||
CONFIG_FILE = os.path.join(SCRIPT_DIR, "flags.yaml")
|
||||
WIDTH = 80
|
||||
@ -16,21 +18,14 @@ WIDTH = 80
|
||||
|
||||
def wrap_text(s, initial_indent="# "):
|
||||
return "\n#\n".join(
|
||||
map(lambda x: textwrap.fill(x, WIDTH, initial_indent=initial_indent, subsequent_indent="# "), s.split("\n"))
|
||||
)
|
||||
map(lambda x: textwrap.fill(x, WIDTH, initial_indent=initial_indent,
|
||||
subsequent_indent="# "), s.split("\n")))
|
||||
|
||||
|
||||
def extract_flags(binary_path):
|
||||
ret = {}
|
||||
data = subprocess.run([binary_path, "--help-xml"], stdout=subprocess.PIPE).stdout.decode("utf-8")
|
||||
# If something is printed out before the help output, it will break the the
|
||||
# XML parsing -> filter out if something is not XML line because something
|
||||
# can be logged before gflags output (e.g. during the global objects init).
|
||||
# This gets called during memgraph build phase to generate default config
|
||||
# file later installed under /etc/memgraph/memgraph.conf
|
||||
# NOTE: Don't use \n in the gflags description strings.
|
||||
# NOTE: Check here if gflags version changes because of the XML format.
|
||||
data = "\n".join([line for line in data.split("\n") if line.startswith("<")])
|
||||
data = subprocess.run([binary_path, "--help-xml"],
|
||||
stdout=subprocess.PIPE).stdout.decode("utf-8")
|
||||
root = ET.fromstring(data)
|
||||
for child in root:
|
||||
if child.tag == "usage" and child.text.lower().count("warning"):
|
||||
@ -51,7 +46,8 @@ def apply_config_to_flags(config, flags):
|
||||
for modification in config["modifications"]:
|
||||
name = modification["name"]
|
||||
if name not in flags:
|
||||
print("WARNING: Flag '" + name + "' missing from binary!", file=sys.stderr)
|
||||
print("WARNING: Flag '" + name + "' missing from binary!",
|
||||
file=sys.stderr)
|
||||
continue
|
||||
flags[name]["default"] = modification["value"]
|
||||
flags[name]["override"] = modification["override"]
|
||||
@ -79,9 +75,8 @@ def extract_sections(flags):
|
||||
else:
|
||||
sections.append((current_section, current_flags))
|
||||
sections.append(("other", other))
|
||||
assert set(sum(map(lambda x: x[1], sections), [])) == set(
|
||||
flags.keys()
|
||||
), "The section extraction algorithm lost some flags!"
|
||||
assert set(sum(map(lambda x: x[1], sections), [])) == set(flags.keys()), \
|
||||
"The section extraction algorithm lost some flags!"
|
||||
return sections
|
||||
|
||||
|
||||
@ -94,7 +89,8 @@ def generate_config_file(sections, flags):
|
||||
helpstr = flag["meaning"] + " [" + flag["type"] + "]"
|
||||
ret += wrap_text(helpstr) + "\n"
|
||||
prefix = "# " if not flag["override"] else ""
|
||||
ret += prefix + "--" + flag["name"].replace("_", "-") + "=" + flag["default"] + "\n\n"
|
||||
ret += prefix + "--" + flag["name"].replace("_", "-") + \
|
||||
"=" + flag["default"] + "\n\n"
|
||||
ret += "\n"
|
||||
ret += wrap_text(config["footer"])
|
||||
return ret.strip() + "\n"
|
||||
@ -102,9 +98,13 @@ def generate_config_file(sections, flags):
|
||||
|
||||
if __name__ == "__main__":
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument("memgraph_binary", help="path to Memgraph binary")
|
||||
parser.add_argument("output_file", help="path where to store the generated Memgraph " "configuration file")
|
||||
parser.add_argument("--config-file", default=CONFIG_FILE, help="path to generator configuration file")
|
||||
parser.add_argument("memgraph_binary",
|
||||
help="path to Memgraph binary")
|
||||
parser.add_argument("output_file",
|
||||
help="path where to store the generated Memgraph "
|
||||
"configuration file")
|
||||
parser.add_argument("--config-file", default=CONFIG_FILE,
|
||||
help="path to generator configuration file")
|
||||
|
||||
args = parser.parse_args()
|
||||
flags = extract_flags(args.memgraph_binary)
|
||||
|
@ -1,26 +0,0 @@
|
||||
{
|
||||
"dbms.components": "mgps.components",
|
||||
"apoc.util.validate": "mgps.validate",
|
||||
"db.schema.nodeTypeProperties": "schema.NodeTypeOroperties",
|
||||
"db.schema.relTypeProperties": "schema.RelTypeProperties",
|
||||
"apoc.coll.contains": "collections.contains",
|
||||
"apoc.coll.partition": "collections.partition",
|
||||
"apoc.coll.toSet": "collections.to_set",
|
||||
"apoc.coll.unionAll": "collections.unionAll",
|
||||
"apoc.coll.removeAll": "collections.remove_all",
|
||||
"apoc.coll.union": "collections.union",
|
||||
"apoc.coll.sum": "collections.sum",
|
||||
"apoc.coll.pairs": "collections.pairs",
|
||||
"apoc.map.fromLists": "map.from_lists",
|
||||
"apoc.map.removeKeys": "map.remove_keys",
|
||||
"apoc.map.merge": "map.merge",
|
||||
"apoc.create.nodes": "create.nodes",
|
||||
"apoc.create.removeProperties": "create.remove_properties",
|
||||
"apoc.create.node": "create.node",
|
||||
"apoc.create.removeLabel": "create.remove_label",
|
||||
"apoc.refactor.invert": "refactor.invert",
|
||||
"apoc.refactor.cloneNode": "refactor.clone_node",
|
||||
"apoc.refactor.cloneSubgraph": "refactor.clone_subgraph",
|
||||
"apoc.refactor.cloneSubgraphFromPath": "refactor.clone_subgraph_from_path",
|
||||
"apoc.label.exists": "label.exists"
|
||||
}
|
@ -1,230 +0,0 @@
|
||||
# CSV Import Tool Documentation
|
||||
|
||||
CSV is a universal and very versatile data format used to store large quantities
|
||||
of data. Each Memgraph database instance has a CSV import tool installed called
|
||||
`mg_import_csv`. The CSV import tool should be used for initial bulk ingestion
|
||||
of data into the database. Upon ingestion, the CSV importer creates a snapshot
|
||||
that will be used by the database to recover its state on its next startup.
|
||||
|
||||
If you are already familiar with the Neo4j bulk import tool, then using the
|
||||
`mg_import_csv` tool should be easy. The CSV import tool is fully compatible
|
||||
with the [Neo4j CSV
|
||||
format](https://neo4j.com/docs/operations-manual/current/tools/import/). If you
|
||||
already have a pipeline set-up for Neo4j, you should only replace `neo4j-admin
|
||||
import` with `mg_import_csv`.
|
||||
|
||||
## CSV File Format
|
||||
|
||||
Each row of a CSV file represents a single entry that should be imported into
|
||||
the database. Both nodes and relationships can be imported into the database
|
||||
using CSV files.
|
||||
|
||||
Each set of CSV files must have a header that describes the data that is stored
|
||||
in the CSV files. Each field in the CSV header is in the format
|
||||
`<name>[:<type>]` which identifies the name that should be used for that column
|
||||
and the type that should be used for that column. The type is optional and
|
||||
defaults to `string` (see the following chapter).
|
||||
|
||||
Each CSV field must be divided using the delimiter and each CSV field can either
|
||||
be quoted or unquoted. When the field is quoted, the first and last character in
|
||||
the field *must* be the quote character. If the field isn't quoted, and a quote
|
||||
character appears in it, it is treated as a regular character. If a quote
|
||||
character appears inside a quoted string then the quote character must be
|
||||
doubled in order to escape it. Line feeds and carriage returns are ignored in
|
||||
the CSV file, also, the file can't contain a NULL character.
|
||||
|
||||
## Properties
|
||||
|
||||
Both nodes and relationships can have properties added to them. When importing
|
||||
properties, the CSV importer uses the name specified in the header of the
|
||||
corresponding CSV column for the name of the property. A property is designated
|
||||
by specifying one of the following types in the header:
|
||||
- `integer`, `int`, `long`, `byte`, `short`: creates an integer property
|
||||
- `float`, `double`: creates a float property
|
||||
- `boolean`, `bool`: creates a boolean property
|
||||
- `string`, `char`: creates a string property
|
||||
|
||||
When importing a boolean value, the CSV field should contain exactly the text
|
||||
`true` to import a `True` boolean value. All other text values are treated as a
|
||||
boolean value `False`.
|
||||
|
||||
If you want to import an array of values, you can do so by appending `[]` to any
|
||||
of the above types. The values of the array are then determined by splitting
|
||||
the raw CSV value using the array delimiter character.
|
||||
|
||||
Assuming that the array delimiter is `;`, the following example:
|
||||
```plaintext
|
||||
first_name,last_name:string,number:integer,aliases:string[]
|
||||
John,Doe,1,Johnny;Jo;J-man
|
||||
Melissa,Doe,2,Mel
|
||||
```
|
||||
|
||||
Will yield these results:
|
||||
```plaintext
|
||||
CREATE ({first_name: "John", last_name: "Doe", number: 1, aliases: ["Johnny", "Jo", "J-man"]});
|
||||
CREATE ({first_name: "Melissa", last_name: "Doe", number: 2, aliases: ["Mel"]});
|
||||
```
|
||||
### Nodes
|
||||
|
||||
When importing nodes, several more types can be specified in the header of the
|
||||
CSV file (along with all property types):
|
||||
- `ID`: id of the node that should be used as the node ID when importing
|
||||
relationships
|
||||
- `LABEL`: designates that the field contains additional labels for the node
|
||||
- `IGNORE`: designates that the field should be ignored
|
||||
|
||||
The `ID` field type sets the internal ID that will be used for the node when
|
||||
creating relationships. It is optional and nodes that don't have an ID value
|
||||
specified will be imported, but can't be connected to any relationships. If you
|
||||
want to save the ID value as a property in the database, just specify a name for
|
||||
the ID (`user_id:ID`). If you just want to use the ID during the import, leave
|
||||
out the name of the field (`:ID`). The `ID` field also supports creating
|
||||
separate ID spaces. The ID space is specified with the ID space name appended
|
||||
to the `ID` type in parentheses (`ID(user)`). That allows you to have the same
|
||||
IDs (by value) for multiple different node files (for example, numbers from 1 to
|
||||
N). The IDs in each ID space will be treated as an independent set of IDs that
|
||||
don't interfere with IDs in another ID space.
|
||||
|
||||
The `LABEL` field type adds additional labels to the node. The value is treated
|
||||
as an array type so that multiple additional labels can be specified for each
|
||||
node. The value is split using the array delimiter (`--array-delimiter` flag).
|
||||
|
||||
### Relationships
|
||||
|
||||
In order to be able to import relationships, you must import the nodes in the
|
||||
same invocation of `mg_import_csv` that is used to import the relationships.
|
||||
|
||||
When importing relationships, several more types can be specified in the header
|
||||
of the CSV file (along with all property types):
|
||||
- `START_ID`: id of the start node that should be connected with the
|
||||
relationship
|
||||
- `END_ID`: id of the end node that should be connected with the relationship
|
||||
- `TYPE`: designates the type of the relationship
|
||||
- `IGNORE`: designates that the field should be ignored
|
||||
|
||||
The `START_ID` field type sets the start node that should be connected with the
|
||||
relationship to the end node. The field *must* be specified and the node ID
|
||||
must be one of the node IDs that were specified in the node CSV files. The name
|
||||
of this field is ignored. If the node ID is in an ID space, you can specify the
|
||||
ID space for the in the same way as for the node ID (`START_ID(user)`).
|
||||
|
||||
The `END_ID` field type sets the end node that should be connected with the
|
||||
relationship to the start node. The field *must* be specified and the node ID
|
||||
must be one of the node IDs that were specified in the node CSV files. The name
|
||||
of this field is ignored. If the node ID is in an ID space, you can specify the
|
||||
ID space for the in the same way as for the node ID (`END_ID(user)`).
|
||||
|
||||
The `TYPE` field type sets the type of the relationship. Each relationship
|
||||
*must* have a relationship type, but it doesn't necessarily need to be specified
|
||||
in the CSV file, it can also be set externally for the whole CSV file. The name
|
||||
of this field is ignored.
|
||||
|
||||
## CSV Importer Flags
|
||||
|
||||
The importer has many command line options that allow you to customize the way
|
||||
the importer loads your data.
|
||||
|
||||
The two main flags that are used to specify the input CSV files are `--nodes`
|
||||
and `--relationships`. Basic description of these flags is provided in the table
|
||||
and more detailed explainion can be found further down bellow.
|
||||
|
||||
|
||||
| Flag | Description |
|
||||
|-----------------------| -------------- |
|
||||
|`--nodes` | Used to specify CSV files that contain the nodes to the importer. |
|
||||
|`--relationships` | Used to specify CSV files that contain the relationships to the importer.|
|
||||
|`--delimiter` | Sets the delimiter that should be used when splitting the CSV fields (default `,`)|
|
||||
|`--quote` | Sets the quote character that should be used to quote a CSV field (default `"`)|
|
||||
|`--array-delimiter` | Sets the delimiter that should be used when splitting array values (default `;`)|
|
||||
|`--id-type` | Specifies which data type should be used to store the supplied <br /> node IDs when storing them as properties (if the field name is supplied). <br /> The supported values are either `STRING` or `INTEGER`. (default `STRING`)|
|
||||
|`--ignore-empty-strings` | Instructs the importer to treat all empty strings as `Null` values <br /> instead of an empty string value (default `false`)|
|
||||
|`--ignore-extra-columns` | Instructs the importer to ignore all columns (instead of raising an error) <br /> that aren't specified after the last specified column in the CSV header. (default `false`) |
|
||||
| `--skip-bad-relationships`| Instructs the importer to ignore all relationships (instead of raising an error) <br /> that refer to nodes that don't exist in the node files. (default `false`) |
|
||||
|`--skip-duplicate-nodes` | Instructs the importer to ignore all duplicate nodes (instead of raising an error). <br /> Duplicate nodes are nodes that have an ID that is the same as another node that was already imported. (default `false`) |
|
||||
| `--trim-strings`| Instructs the importer to trim all of the loaded CSV field values before processing them further. <br /> Trimming the fields removes all leading and trailing whitespace from them. (default `false`) |
|
||||
|
||||
The `--nodes` and `--relationships` flags are used to specify CSV files that
|
||||
contain the nodes and relationships to the importer. Multiple files can be
|
||||
specified in each supplied `--nodes` or `--relationships` flag. Files that are
|
||||
supplied in one `--nodes` or `--relationships` flag are treated by the CSV
|
||||
parser as one big CSV file. Only the first line of the first file is parsed for
|
||||
the CSV header, all other files (and rows) are treated as data. This is useful
|
||||
when you have a very large CSV file and don't want to edit its first line just
|
||||
to add a CSV header. Instead, you can specify the header in a separate file
|
||||
(e.g. `users_header.csv` or `friendships_header.csv`) and have the data intact
|
||||
in the large file (e.g. `users.csv` or `friendships.csv`). Also, you can supply
|
||||
additional labels for each set of node files.
|
||||
|
||||
The format of `--nodes` flag is:
|
||||
`[<label>[:<label>]...=]<file>[,<file>][,<file>]...`. Take note that only the
|
||||
first `<file>` part is mandatory, all other parts of the flag value are
|
||||
optional. Multiple `--nodes` flags can be supplied to describe multiple sets of
|
||||
different node files. For the importer to work, at least one `--nodes` flag
|
||||
*must* be supplied.
|
||||
|
||||
The format of `--relationships` flag is: `[<type>=]<file>[,<file>][,<file>]...`.
|
||||
Take note that only the first `<file>` part is mandatory, all other parts of the
|
||||
flag value are optional. Multiple `--relationships` flags can be supplied to
|
||||
describe multiple sets of different relationship files. The `--relationships`
|
||||
flag isn't mandatory.
|
||||
|
||||
## CSV Parser Logic
|
||||
|
||||
The CSV parser uses the same logic as the standard Python CSV parser. The data
|
||||
is parsed in the same way as the following snippet:
|
||||
|
||||
```python
|
||||
import csv
|
||||
for row in csv.reader(stream, strict=True):
|
||||
# process 'row'
|
||||
```
|
||||
|
||||
Python uses 'excel' as the default dialect when parsing CSV files and the
|
||||
default settings for the CSV parser are:
|
||||
- delimiter: `','`
|
||||
- doublequote: `True`
|
||||
- escapechar: `None`
|
||||
- lineterminator: `'\r\n'`
|
||||
- quotechar: `'"'`
|
||||
- skipinitialspace: `False`
|
||||
|
||||
The above snippet can be expanded to:
|
||||
|
||||
```python
|
||||
import csv
|
||||
for row in csv.reader(stream, delimiter=',', doublequote=True,
|
||||
escapechar=None, lineterminator='\r\n',
|
||||
quotechar='"', skipinitialspace=False,
|
||||
strict=True):
|
||||
# process 'row'
|
||||
```
|
||||
|
||||
For more information about the meaning of the above values, see:
|
||||
https://docs.python.org/3/library/csv.html#csv.Dialect
|
||||
|
||||
## Errors
|
||||
|
||||
1. [Skipping duplicate node with ID '{}'. For more details, visit:
|
||||
memgr.ph/csv-import-tool.](#error-1)
|
||||
2. [Skipping bad relationship with START_ID '{}'. For more details, visit:
|
||||
memgr.ph/csv-import-tool.](#error-2)
|
||||
3. [Skipping bad relationship with END_ID '{}'. For more details, visit:
|
||||
memgr.ph/csv-import-tool.](#error-3)
|
||||
|
||||
## Skipping duplicate node with ID {} {#error-1}
|
||||
|
||||
Duplicate nodes are nodes that have an ID that is the same as another node that
|
||||
was already imported. You can instruct the importer to ignore all duplicate
|
||||
nodes (instead of raising an error) by using the `--skip-duplicate-nodes` flag.
|
||||
|
||||
## Skipping bad relationship with START_ID {} {#error-2}
|
||||
|
||||
A node with the id `START_ID` doesn't exist. You can instruct the importer to
|
||||
ignore all bad relationships (instead of raising an error) that refer to nodes
|
||||
that don't exist in the node files by using the `--skip-bad-relationships` flag.
|
||||
|
||||
## Skipping bad relationship with END_ID {} {#error-3}
|
||||
|
||||
A node with the id `END_ID` doesn't exist. You can instruct the importer to
|
||||
ignore all bad relationships (instead of raising an error) that refer to nodes
|
||||
that don't exist in the node files by using the `--skip-bad-relationships` flag.
|
2
environment/.gitignore
vendored
Normal file
2
environment/.gitignore
vendored
Normal file
@ -0,0 +1,2 @@
|
||||
archives
|
||||
build
|
@ -1,15 +0,0 @@
|
||||
# Memgraph Operating Environments
|
||||
|
||||
## Issues related to build toolchain
|
||||
|
||||
* GCC 11.2 (toolchain-v4) doesn't compile on Fedora 38, multiple definitions of enum issue
|
||||
* spdlog 1.10/11 doesn't work with fmt 10.0.0
|
||||
|
||||
## os
|
||||
|
||||
Under the `os` directory, you can find scripts to install all required system
|
||||
dependencies on operating systems where Memgraph natively builds. The testing
|
||||
script helps to see how to install all packages (in the case of a new package),
|
||||
or make any adjustments in the overall system setup. Also, the testing script
|
||||
helps check if Memgraph runs on a freshly installed operating system (with no
|
||||
packages installed).
|
3
environment/os/.gitignore
vendored
3
environment/os/.gitignore
vendored
@ -1,6 +1,3 @@
|
||||
*.deb
|
||||
*.deb.*
|
||||
*.rpm
|
||||
*.rpm.*
|
||||
*.tar.gz
|
||||
*.tar.gz.*
|
||||
|
@ -1,190 +0,0 @@
|
||||
#!/bin/bash
|
||||
set -Eeuo pipefail
|
||||
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )"
|
||||
source "$DIR/../util.sh"
|
||||
|
||||
check_operating_system "amzn-2"
|
||||
check_architecture "x86_64"
|
||||
|
||||
TOOLCHAIN_BUILD_DEPS=(
|
||||
git gcc gcc-c++ make # generic build tools
|
||||
wget # used for archive download
|
||||
gnupg2 # used for archive signature verification
|
||||
tar gzip bzip2 xz unzip # used for archive unpacking
|
||||
zlib-devel # zlib library used for all builds
|
||||
expat-devel xz-devel python3-devel texinfo
|
||||
curl libcurl-devel # for cmake
|
||||
readline-devel # for cmake and llvm
|
||||
libffi-devel libxml2-devel # for llvm
|
||||
libedit-devel pcre-devel pcre2-devel automake bison # for swig
|
||||
file
|
||||
openssl-devel
|
||||
gmp-devel
|
||||
gperf
|
||||
diffutils
|
||||
patch
|
||||
libipt libipt-devel # intel
|
||||
perl # for openssl
|
||||
)
|
||||
|
||||
TOOLCHAIN_RUN_DEPS=(
|
||||
make # generic build tools
|
||||
tar gzip bzip2 xz # used for archive unpacking
|
||||
zlib # zlib library used for all builds
|
||||
expat xz-libs python3 # for gdb
|
||||
readline # for cmake and llvm
|
||||
libffi libxml2 # for llvm
|
||||
openssl-devel
|
||||
)
|
||||
|
||||
MEMGRAPH_BUILD_DEPS=(
|
||||
git # source code control
|
||||
make cmake # build system
|
||||
wget # for downloading libs
|
||||
libuuid-devel java-11-openjdk # required by antlr
|
||||
readline-devel # for memgraph console
|
||||
python3-devel # for query modules
|
||||
openssl-devel
|
||||
openssl
|
||||
libseccomp-devel
|
||||
python3 python3-pip nmap-ncat # for tests
|
||||
#
|
||||
# IMPORTANT: python3-yaml does NOT exist on CentOS
|
||||
# Install it using `pip3 install PyYAML`
|
||||
#
|
||||
PyYAML # Package name here does not correspond to the yum package!
|
||||
libcurl-devel # mg-requests
|
||||
rpm-build rpmlint # for RPM package building
|
||||
doxygen graphviz # source documentation generators
|
||||
which nodejs golang custom-golang1.18.9 zip unzip java-11-openjdk-devel jdk-17 custom-maven3.9.3 # for driver tests
|
||||
autoconf # for jemalloc code generation
|
||||
libtool # for protobuf code generation
|
||||
cyrus-sasl-devel
|
||||
)
|
||||
|
||||
MEMGRAPH_TEST_DEPS="${MEMGRAPH_BUILD_DEPS[*]}"
|
||||
|
||||
MEMGRAPH_RUN_DEPS=(
|
||||
logrotate openssl python3 libseccomp
|
||||
)
|
||||
|
||||
NEW_DEPS=(
|
||||
wget curl tar gzip
|
||||
)
|
||||
|
||||
list() {
|
||||
echo "$1"
|
||||
}
|
||||
|
||||
check() {
|
||||
local missing=""
|
||||
# On Fedora yum/dnf and python10 use newer glibc which is not compatible
|
||||
# with ours, so we need to momentarely disable env
|
||||
local OLD_LD_LIBRARY_PATH=${LD_LIBRARY_PATH:-""}
|
||||
LD_LIBRARY_PATH=""
|
||||
for pkg in $1; do
|
||||
if [ "$pkg" == custom-maven3.9.3 ]; then
|
||||
if [ ! -f "/opt/apache-maven-3.9.3/bin/mvn" ]; then
|
||||
missing="$pkg $missing"
|
||||
fi
|
||||
continue
|
||||
fi
|
||||
if [ "$pkg" == custom-golang1.18.9 ]; then
|
||||
if [ ! -f "/opt/go1.18.9/go/bin/go" ]; then
|
||||
missing="$pkg $missing"
|
||||
fi
|
||||
continue
|
||||
fi
|
||||
if [ "$pkg" == "PyYAML" ]; then
|
||||
if ! python3 -c "import yaml" >/dev/null 2>/dev/null; then
|
||||
missing="$pkg $missing"
|
||||
fi
|
||||
continue
|
||||
fi
|
||||
if ! yum list installed "$pkg" >/dev/null 2>/dev/null; then
|
||||
missing="$pkg $missing"
|
||||
fi
|
||||
done
|
||||
if [ "$missing" != "" ]; then
|
||||
echo "MISSING PACKAGES: $missing"
|
||||
exit 1
|
||||
fi
|
||||
LD_LIBRARY_PATH=${OLD_LD_LIBRARY_PATH}
|
||||
}
|
||||
|
||||
install() {
|
||||
cd "$DIR"
|
||||
if [ "$EUID" -ne 0 ]; then
|
||||
echo "Please run as root."
|
||||
exit 1
|
||||
fi
|
||||
# If GitHub Actions runner is installed, append LANG to the environment.
|
||||
# Python related tests don't work without the LANG export.
|
||||
if [ -d "/home/gh/actions-runner" ]; then
|
||||
echo "LANG=en_US.utf8" >> /home/gh/actions-runner/.env
|
||||
else
|
||||
echo "NOTE: export LANG=en_US.utf8"
|
||||
fi
|
||||
|
||||
yum update -y
|
||||
for pkg in $1; do
|
||||
if [ "$pkg" == custom-maven3.9.3 ]; then
|
||||
install_custom_maven "3.9.3"
|
||||
continue
|
||||
fi
|
||||
if [ "$pkg" == custom-golang1.18.9 ]; then
|
||||
install_custom_golang "1.18.9"
|
||||
continue
|
||||
fi
|
||||
if [ "$pkg" == jdk-17 ]; then
|
||||
if ! yum list installed jdk-17 >/dev/null 2>/dev/null; then
|
||||
wget --no-check-certificate -c --header "Cookie: oraclelicense=accept-securebackup-cookie" https://download.oracle.com/java/17/latest/jdk-17_linux-x64_bin.rpm
|
||||
rpm -Uvh jdk-17_linux-x64_bin.rpm
|
||||
# NOTE: Set Java 11 as default.
|
||||
update-alternatives --set java java-11-openjdk.x86_64
|
||||
update-alternatives --set javac java-11-openjdk.x86_64
|
||||
fi
|
||||
continue
|
||||
fi
|
||||
if [ "$pkg" == libipt ]; then
|
||||
if ! yum list installed libipt >/dev/null 2>/dev/null; then
|
||||
yum install -y http://repo.okay.com.mx/centos/8/x86_64/release/libipt-1.6.1-8.el8.x86_64.rpm
|
||||
fi
|
||||
continue
|
||||
fi
|
||||
if [ "$pkg" == libipt-devel ]; then
|
||||
if ! yum list installed libipt-devel >/dev/null 2>/dev/null; then
|
||||
yum install -y http://repo.okay.com.mx/centos/8/x86_64/release/libipt-devel-1.6.1-8.el8.x86_64.rpm
|
||||
fi
|
||||
continue
|
||||
fi
|
||||
if [ "$pkg" == nodejs ]; then
|
||||
if ! yum list installed nodejs >/dev/null 2>/dev/null; then
|
||||
yum install https://rpm.nodesource.com/pub_16.x/nodistro/repo/nodesource-release-nodistro-1.noarch.rpm -y
|
||||
yum install nodejs -y --setopt=nodesource-nodejs.module_hotfixes=1
|
||||
fi
|
||||
continue
|
||||
fi
|
||||
if [ "$pkg" == PyYAML ]; then
|
||||
if [ -z ${SUDO_USER+x} ]; then # Running as root (e.g. Docker).
|
||||
pip3 install --user PyYAML
|
||||
else # Running using sudo.
|
||||
sudo -H -u "$SUDO_USER" bash -c "pip3 install --user PyYAML"
|
||||
fi
|
||||
continue
|
||||
fi
|
||||
if [ "$pkg" == java-11-openjdk ]; then
|
||||
amazon-linux-extras install -y java-openjdk11
|
||||
continue
|
||||
fi
|
||||
if [ "$pkg" == java-11-openjdk-devel ]; then
|
||||
amazon-linux-extras install -y java-openjdk11
|
||||
yum install -y java-11-openjdk-devel
|
||||
continue
|
||||
fi
|
||||
yum install -y "$pkg"
|
||||
done
|
||||
}
|
||||
|
||||
deps=$2"[*]"
|
||||
"$1" "${!deps}"
|
@ -1,5 +1,7 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -Eeuo pipefail
|
||||
|
||||
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )"
|
||||
source "$DIR/../util.sh"
|
||||
|
||||
@ -18,7 +20,7 @@ TOOLCHAIN_BUILD_DEPS=(
|
||||
curl # snappy
|
||||
readline-devel # cmake and llvm
|
||||
libffi-devel libxml2-devel perl-Digest-MD5 # llvm
|
||||
libedit-devel pcre-devel pcre2-devel automake bison # swig
|
||||
libedit-devel pcre-devel automake bison # swig
|
||||
file
|
||||
openssl-devel
|
||||
gmp-devel
|
||||
@ -37,13 +39,12 @@ TOOLCHAIN_RUN_DEPS=(
|
||||
)
|
||||
|
||||
MEMGRAPH_BUILD_DEPS=(
|
||||
make cmake pkgconfig # build system
|
||||
make pkgconfig # build system
|
||||
curl wget # for downloading libs
|
||||
libuuid-devel java-11-openjdk # required by antlr
|
||||
readline-devel # for memgraph console
|
||||
python3-devel # for query modules
|
||||
openssl-devel
|
||||
openssl
|
||||
libseccomp-devel
|
||||
python3 python-virtualenv python3-pip nmap-ncat # for qa, macro_benchmark and stress tests
|
||||
#
|
||||
@ -55,21 +56,9 @@ MEMGRAPH_BUILD_DEPS=(
|
||||
sbcl # for custom Lisp C++ preprocessing
|
||||
rpm-build rpmlint # for RPM package building
|
||||
doxygen graphviz # source documentation generators
|
||||
which mono-complete dotnet-sdk-3.1 golang custom-golang1.18.9 # for driver tests
|
||||
nodejs zip unzip java-11-openjdk-devel jdk-17 custom-maven3.9.3 # for driver tests
|
||||
which mono-complete dotnet-sdk-3.1 golang nodejs zip unzip java-11-openjdk-devel # for driver tests
|
||||
autoconf # for jemalloc code generation
|
||||
libtool # for protobuf code generation
|
||||
cyrus-sasl-devel
|
||||
)
|
||||
|
||||
MEMGRAPH_TEST_DEPS="${MEMGRAPH_BUILD_DEPS[*]}"
|
||||
|
||||
MEMGRAPH_RUN_DEPS=(
|
||||
logrotate openssl python3 libseccomp
|
||||
)
|
||||
|
||||
NEW_DEPS=(
|
||||
wget curl tar gzip
|
||||
)
|
||||
|
||||
list() {
|
||||
@ -79,18 +68,6 @@ list() {
|
||||
check() {
|
||||
local missing=""
|
||||
for pkg in $1; do
|
||||
if [ "$pkg" == custom-maven3.9.3 ]; then
|
||||
if [ ! -f "/opt/apache-maven-3.9.3/bin/mvn" ]; then
|
||||
missing="$pkg $missing"
|
||||
fi
|
||||
continue
|
||||
fi
|
||||
if [ "$pkg" == custom-golang1.18.9 ]; then
|
||||
if [ ! -f "/opt/go1.18.9/go/bin/go" ]; then
|
||||
missing="$pkg $missing"
|
||||
fi
|
||||
continue
|
||||
fi
|
||||
if [ "$pkg" == git ]; then
|
||||
if ! which "git" >/dev/null; then
|
||||
missing="git $missing"
|
||||
@ -133,25 +110,7 @@ install() {
|
||||
yum update -y
|
||||
yum install -y wget python3 python3-pip
|
||||
yum install -y git
|
||||
|
||||
for pkg in $1; do
|
||||
if [ "$pkg" == custom-maven3.9.3 ]; then
|
||||
install_custom_maven "3.9.3"
|
||||
continue
|
||||
fi
|
||||
if [ "$pkg" == custom-golang1.18.9 ]; then
|
||||
install_custom_golang "1.18.9"
|
||||
continue
|
||||
fi
|
||||
if [ "$pkg" == jdk-17 ]; then
|
||||
if ! yum list installed jdk-17 >/dev/null 2>/dev/null; then
|
||||
wget https://download.oracle.com/java/17/latest/jdk-17_linux-x64_bin.rpm
|
||||
rpm -ivh jdk-17_linux-x64_bin.rpm
|
||||
update-alternatives --set java java-11-openjdk.x86_64
|
||||
update-alternatives --set javac java-11-openjdk.x86_64
|
||||
fi
|
||||
continue
|
||||
fi
|
||||
if [ "$pkg" == libipt ]; then
|
||||
if ! yum list installed libipt >/dev/null 2>/dev/null; then
|
||||
yum install -y http://repo.okay.com.mx/centos/8/x86_64/release/libipt-1.6.1-8.el8.x86_64.rpm
|
||||
|
@ -1,5 +1,7 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -Eeuo pipefail
|
||||
|
||||
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )"
|
||||
source "$DIR/../util.sh"
|
||||
|
||||
@ -7,17 +9,15 @@ check_operating_system "centos-9"
|
||||
check_architecture "x86_64"
|
||||
|
||||
TOOLCHAIN_BUILD_DEPS=(
|
||||
wget # used for archive download
|
||||
coreutils-common gcc gcc-c++ make # generic build tools
|
||||
# NOTE: Pure libcurl conflicts with libcurl-minimal
|
||||
libcurl-devel # cmake build requires it
|
||||
wget # used for archive download
|
||||
gnupg2 # used for archive signature verification
|
||||
tar gzip bzip2 xz unzip # used for archive unpacking
|
||||
zlib-devel # zlib library used for all builds
|
||||
expat-devel xz-devel python3-devel texinfo libbabeltrace-devel # for gdb
|
||||
readline-devel # for cmake and llvm
|
||||
libffi-devel libxml2-devel # for llvm
|
||||
libedit-devel pcre-devel pcre2-devel automake bison # for swig
|
||||
libedit-devel pcre-devel automake bison # for swig
|
||||
file
|
||||
openssl-devel
|
||||
gmp-devel
|
||||
@ -40,7 +40,7 @@ TOOLCHAIN_RUN_DEPS=(
|
||||
|
||||
MEMGRAPH_BUILD_DEPS=(
|
||||
git # source code control
|
||||
make cmake pkgconf-pkg-config # build system
|
||||
make pkgconf-pkg-config # build system
|
||||
wget # for downloading libs
|
||||
libuuid-devel java-11-openjdk # required by antlr
|
||||
readline-devel # for memgraph console
|
||||
@ -56,22 +56,10 @@ MEMGRAPH_BUILD_DEPS=(
|
||||
libcurl-devel # mg-requests
|
||||
rpm-build rpmlint # for RPM package building
|
||||
doxygen graphviz # source documentation generators
|
||||
which nodejs golang custom-golang1.18.9 # for driver tests
|
||||
zip unzip java-11-openjdk-devel java-17-openjdk java-17-openjdk-devel custom-maven3.9.3 # for driver tests
|
||||
which nodejs golang zip unzip java-11-openjdk-devel # for driver tests
|
||||
sbcl # for custom Lisp C++ preprocessing
|
||||
autoconf # for jemalloc code generation
|
||||
libtool # for protobuf code generation
|
||||
cyrus-sasl-devel
|
||||
)
|
||||
|
||||
MEMGRAPH_TEST_DEPS="${MEMGRAPH_BUILD_DEPS[*]}"
|
||||
|
||||
MEMGRAPH_RUN_DEPS=(
|
||||
logrotate openssl python3 libseccomp
|
||||
)
|
||||
|
||||
NEW_DEPS=(
|
||||
wget curl tar gzip
|
||||
)
|
||||
|
||||
list() {
|
||||
@ -81,18 +69,6 @@ list() {
|
||||
check() {
|
||||
local missing=""
|
||||
for pkg in $1; do
|
||||
if [ "$pkg" == custom-maven3.9.3 ]; then
|
||||
if [ ! -f "/opt/apache-maven-3.9.3/bin/mvn" ]; then
|
||||
missing="$pkg $missing"
|
||||
fi
|
||||
continue
|
||||
fi
|
||||
if [ "$pkg" == custom-golang1.18.9 ]; then
|
||||
if [ ! -f "/opt/go1.18.9/go/bin/go" ]; then
|
||||
missing="$pkg $missing"
|
||||
fi
|
||||
continue
|
||||
fi
|
||||
if [ "$pkg" == "PyYAML" ]; then
|
||||
if ! python3 -c "import yaml" >/dev/null 2>/dev/null; then
|
||||
missing="$pkg $missing"
|
||||
@ -125,20 +101,9 @@ install() {
|
||||
else
|
||||
echo "NOTE: export LANG=en_US.utf8"
|
||||
fi
|
||||
# --nobest is used because of libipt because we install custom versions
|
||||
# because libipt-devel is not available on CentOS 9 Stream
|
||||
yum update -y --nobest
|
||||
yum update -y
|
||||
yum install -y wget git python3 python3-pip
|
||||
|
||||
for pkg in $1; do
|
||||
if [ "$pkg" == custom-maven3.9.3 ]; then
|
||||
install_custom_maven "3.9.3"
|
||||
continue
|
||||
fi
|
||||
if [ "$pkg" == custom-golang1.18.9 ]; then
|
||||
install_custom_golang "1.18.9"
|
||||
continue
|
||||
fi
|
||||
# Since there is no support for libipt-devel for CentOS 9 we install
|
||||
# Fedoras version of same libs, they are the same version but released
|
||||
# for different OS
|
||||
|
@ -1,10 +1,10 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -Eeuo pipefail
|
||||
|
||||
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )"
|
||||
source "$DIR/../util.sh"
|
||||
|
||||
# IMPORTANT: Deprecated since memgraph v2.12.0.
|
||||
|
||||
check_operating_system "debian-10"
|
||||
check_architecture "x86_64"
|
||||
|
||||
@ -24,7 +24,7 @@ TOOLCHAIN_BUILD_DEPS=(
|
||||
libgmp-dev # for gdb
|
||||
gperf # for proxygen
|
||||
git # for fbthrift
|
||||
libedit-dev libpcre2-dev libpcre3-dev automake bison # for swig
|
||||
libedit-dev libpcre3-dev automake bison # for swig
|
||||
)
|
||||
|
||||
TOOLCHAIN_RUN_DEPS=(
|
||||
@ -40,7 +40,7 @@ TOOLCHAIN_RUN_DEPS=(
|
||||
|
||||
MEMGRAPH_BUILD_DEPS=(
|
||||
git # source code control
|
||||
make cmake pkg-config # build system
|
||||
make pkg-config # build system
|
||||
curl wget # for downloading libs
|
||||
uuid-dev default-jre-headless # required by antlr
|
||||
libreadline-dev # for memgraph console
|
||||
@ -53,19 +53,10 @@ MEMGRAPH_BUILD_DEPS=(
|
||||
libcurl4-openssl-dev # mg-requests
|
||||
sbcl # for custom Lisp C++ preprocessing
|
||||
doxygen graphviz # source documentation generators
|
||||
mono-runtime mono-mcs zip unzip default-jdk-headless oracle-java17-installer custom-maven3.9.3 # for driver tests
|
||||
dotnet-sdk-3.1 golang custom-golang1.18.9 nodejs npm # for driver tests
|
||||
mono-runtime mono-mcs zip unzip default-jdk-headless # for driver tests
|
||||
dotnet-sdk-3.1 golang nodejs npm
|
||||
autoconf # for jemalloc code generation
|
||||
libtool # for protobuf code generation
|
||||
libsasl2-dev
|
||||
)
|
||||
|
||||
MEMGRAPH_RUN_DEPS=(
|
||||
logrotate openssl python3 libseccomp
|
||||
)
|
||||
|
||||
NEW_DEPS=(
|
||||
wget curl tar gzip
|
||||
)
|
||||
|
||||
list() {
|
||||
@ -73,28 +64,7 @@ list() {
|
||||
}
|
||||
|
||||
check() {
|
||||
local missing=""
|
||||
for pkg in $1; do
|
||||
if [ "$pkg" == custom-maven3.9.3 ]; then
|
||||
if [ ! -f "/opt/apache-maven-3.9.3/bin/mvn" ]; then
|
||||
missing="$pkg $missing"
|
||||
fi
|
||||
continue
|
||||
fi
|
||||
if [ "$pkg" == custom-golang1.18.9 ]; then
|
||||
if [ ! -f "/opt/go1.18.9/go/bin/go" ]; then
|
||||
missing="$pkg $missing"
|
||||
fi
|
||||
continue
|
||||
fi
|
||||
if ! dpkg -s "$pkg" >/dev/null 2>/dev/null; then
|
||||
missing="$pkg $missing"
|
||||
fi
|
||||
done
|
||||
if [ "$missing" != "" ]; then
|
||||
echo "MISSING PACKAGES: $missing"
|
||||
exit 1
|
||||
fi
|
||||
check_all_dpkg "$1"
|
||||
}
|
||||
|
||||
install() {
|
||||
@ -105,15 +75,8 @@ deb http://deb.debian.org/debian/ buster-updates main contrib non-free
|
||||
deb-src http://deb.debian.org/debian/ buster-updates main contrib non-free
|
||||
deb http://security.debian.org/debian-security buster/updates main contrib non-free
|
||||
deb-src http://security.debian.org/debian-security buster/updates main contrib non-free
|
||||
EOF
|
||||
apt --allow-releaseinfo-change update
|
||||
cat >/etc/apt/sources.list.d/java.list << EOF
|
||||
deb http://ppa.launchpad.net/linuxuprising/java/ubuntu bionic main
|
||||
deb-src http://ppa.launchpad.net/linuxuprising/java/ubuntu bionic main
|
||||
EOF
|
||||
cd "$DIR"
|
||||
apt install -y gnupg
|
||||
apt-key adv --keyserver keyserver.ubuntu.com --recv-keys EA8CACC073C3DB2A
|
||||
apt --allow-releaseinfo-change update
|
||||
# If GitHub Actions runner is installed, append LANG to the environment.
|
||||
# Python related tests doesn't work the LANG export.
|
||||
@ -122,26 +85,8 @@ EOF
|
||||
else
|
||||
echo "NOTE: export LANG=en_US.utf8"
|
||||
fi
|
||||
|
||||
apt install -y wget
|
||||
for pkg in $1; do
|
||||
if [ "$pkg" == custom-maven3.9.3 ]; then
|
||||
install_custom_maven "3.9.3"
|
||||
continue
|
||||
fi
|
||||
if [ "$pkg" == custom-golang1.18.9 ]; then
|
||||
install_custom_golang "1.18.9"
|
||||
continue
|
||||
fi
|
||||
if [ "$pkg" == oracle-java17-installer ]; then
|
||||
if ! dpkg -s "$pkg" 2>/dev/null >/dev/null; then
|
||||
echo oracle-java17-installer shared/accepted-oracle-license-v1-3 select true | /usr/bin/debconf-set-selections
|
||||
echo oracle-java17-installer shared/accepted-oracle-license-v1-3 seen true | /usr/bin/debconf-set-selections
|
||||
apt install -y "$pkg"
|
||||
update-alternatives --set java /usr/lib/jvm/java-11-openjdk-amd64/bin/java
|
||||
update-alternatives --set javac /usr/lib/jvm/java-11-openjdk-amd64/bin/javac
|
||||
fi
|
||||
continue
|
||||
fi
|
||||
if [ "$pkg" == dotnet-sdk-3.1 ]; then
|
||||
if ! dpkg -s "$pkg" 2>/dev/null >/dev/null; then
|
||||
wget -nv https://packages.microsoft.com/config/debian/10/packages-microsoft-prod.deb -O packages-microsoft-prod.deb
|
||||
|
@ -1,12 +1,12 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -Eeuo pipefail
|
||||
|
||||
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )"
|
||||
source "$DIR/../util.sh"
|
||||
|
||||
# IMPORTANT: Deprecated since memgraph v2.12.0.
|
||||
|
||||
check_operating_system "debian-11"
|
||||
check_architecture "arm64" "aarch64"
|
||||
check_architecture "arm64"
|
||||
|
||||
TOOLCHAIN_BUILD_DEPS=(
|
||||
coreutils gcc g++ build-essential make # generic build tools
|
||||
@ -18,7 +18,7 @@ TOOLCHAIN_BUILD_DEPS=(
|
||||
libcurl4-openssl-dev # for cmake
|
||||
libreadline-dev # for cmake and llvm
|
||||
libffi-dev libxml2-dev # for llvm
|
||||
libedit-dev libpcre2-dev libpcre3-dev automake bison # for swig
|
||||
libedit-dev libpcre3-dev automake bison # for swig
|
||||
curl # snappy
|
||||
file # for libunwind
|
||||
libssl-dev # for libevent
|
||||
@ -54,19 +54,10 @@ MEMGRAPH_BUILD_DEPS=(
|
||||
libcurl4-openssl-dev # mg-requests
|
||||
sbcl # for custom Lisp C++ preprocessing
|
||||
doxygen graphviz # source documentation generators
|
||||
mono-runtime mono-mcs zip unzip default-jdk-headless openjdk-17-jdk custom-maven3.9.3 # for driver tests
|
||||
golang custom-golang1.18.9 nodejs npm
|
||||
mono-runtime mono-mcs zip unzip default-jdk-headless # for driver tests
|
||||
golang nodejs npm
|
||||
autoconf # for jemalloc code generation
|
||||
libtool # for protobuf code generation
|
||||
libsasl2-dev
|
||||
)
|
||||
|
||||
MEMGRAPH_RUN_DEPS=(
|
||||
logrotate openssl python3 libseccomp
|
||||
)
|
||||
|
||||
NEW_DEPS=(
|
||||
wget curl tar gzip
|
||||
)
|
||||
|
||||
list() {
|
||||
@ -74,28 +65,7 @@ list() {
|
||||
}
|
||||
|
||||
check() {
|
||||
local missing=""
|
||||
for pkg in $1; do
|
||||
if [ "$pkg" == custom-maven3.9.3 ]; then
|
||||
if [ ! -f "/opt/apache-maven-3.9.3/bin/mvn" ]; then
|
||||
missing="$pkg $missing"
|
||||
fi
|
||||
continue
|
||||
fi
|
||||
if [ "$pkg" == custom-golang1.18.9 ]; then
|
||||
if [ ! -f "/opt/go1.18.9/go/bin/go" ]; then
|
||||
missing="$pkg $missing"
|
||||
fi
|
||||
continue
|
||||
fi
|
||||
if ! dpkg -s "$pkg" >/dev/null 2>/dev/null; then
|
||||
missing="$pkg $missing"
|
||||
fi
|
||||
done
|
||||
if [ "$missing" != "" ]; then
|
||||
echo "MISSING PACKAGES: $missing"
|
||||
exit 1
|
||||
fi
|
||||
check_all_dpkg "$1"
|
||||
}
|
||||
|
||||
install() {
|
||||
@ -119,25 +89,7 @@ EOF
|
||||
echo "NOTE: export LANG=en_US.utf8"
|
||||
fi
|
||||
apt install -y wget
|
||||
|
||||
for pkg in $1; do
|
||||
if [ "$pkg" == custom-maven3.9.3 ]; then
|
||||
install_custom_maven "3.9.3"
|
||||
continue
|
||||
fi
|
||||
if [ "$pkg" == custom-golang1.18.9 ]; then
|
||||
install_custom_golang "1.18.9"
|
||||
continue
|
||||
fi
|
||||
if [ "$pkg" == openjdk-17-jdk ]; then
|
||||
if ! dpkg -s "$pkg" 2>/dev/null >/dev/null; then
|
||||
apt install -y "$pkg"
|
||||
# The default Java version should be Java 11
|
||||
update-alternatives --set java /usr/lib/jvm/java-11-openjdk-arm64/bin/java
|
||||
update-alternatives --set javac /usr/lib/jvm/java-11-openjdk-arm64/bin/javac
|
||||
fi
|
||||
continue
|
||||
fi
|
||||
apt install -y "$pkg"
|
||||
done
|
||||
}
|
||||
|
@ -1,5 +1,7 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -Eeuo pipefail
|
||||
|
||||
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )"
|
||||
source "$DIR/../util.sh"
|
||||
|
||||
@ -16,7 +18,7 @@ TOOLCHAIN_BUILD_DEPS=(
|
||||
libcurl4-openssl-dev # for cmake
|
||||
libreadline-dev # for cmake and llvm
|
||||
libffi-dev libxml2-dev # for llvm
|
||||
libedit-dev libpcre2-dev libpcre3-dev automake bison # for swig
|
||||
libedit-dev libpcre3-dev automake bison # for swig
|
||||
curl # snappy
|
||||
file # for libunwind
|
||||
libssl-dev # for libevent
|
||||
@ -39,7 +41,7 @@ TOOLCHAIN_RUN_DEPS=(
|
||||
|
||||
MEMGRAPH_BUILD_DEPS=(
|
||||
git # source code control
|
||||
make cmake pkg-config # build system
|
||||
make pkg-config # build system
|
||||
curl wget # for downloading libs
|
||||
uuid-dev default-jre-headless # required by antlr
|
||||
libreadline-dev # for memgraph console
|
||||
@ -52,21 +54,10 @@ MEMGRAPH_BUILD_DEPS=(
|
||||
libcurl4-openssl-dev # mg-requests
|
||||
sbcl # for custom Lisp C++ preprocessing
|
||||
doxygen graphviz # source documentation generators
|
||||
mono-runtime mono-mcs zip unzip default-jdk-headless openjdk-17-jdk custom-maven3.9.3 # for driver tests
|
||||
dotnet-sdk-3.1 golang custom-golang1.18.9 nodejs npm
|
||||
mono-runtime mono-mcs zip unzip default-jdk-headless # for driver tests
|
||||
dotnet-sdk-3.1 golang nodejs npm
|
||||
autoconf # for jemalloc code generation
|
||||
libtool # for protobuf code generation
|
||||
libsasl2-dev
|
||||
)
|
||||
|
||||
MEMGRAPH_TEST_DEPS="${MEMGRAPH_BUILD_DEPS[*]}"
|
||||
|
||||
MEMGRAPH_RUN_DEPS=(
|
||||
logrotate openssl python3 libseccomp
|
||||
)
|
||||
|
||||
NEW_DEPS=(
|
||||
wget curl tar gzip
|
||||
)
|
||||
|
||||
list() {
|
||||
@ -74,28 +65,7 @@ list() {
|
||||
}
|
||||
|
||||
check() {
|
||||
local missing=""
|
||||
for pkg in $1; do
|
||||
if [ "$pkg" == custom-maven3.9.3 ]; then
|
||||
if [ ! -f "/opt/apache-maven-3.9.3/bin/mvn" ]; then
|
||||
missing="$pkg $missing"
|
||||
fi
|
||||
continue
|
||||
fi
|
||||
if [ "$pkg" == custom-golang1.18.9 ]; then
|
||||
if [ ! -f "/opt/go1.18.9/go/bin/go" ]; then
|
||||
missing="$pkg $missing"
|
||||
fi
|
||||
continue
|
||||
fi
|
||||
if ! dpkg -s "$pkg" >/dev/null 2>/dev/null; then
|
||||
missing="$pkg $missing"
|
||||
fi
|
||||
done
|
||||
if [ "$missing" != "" ]; then
|
||||
echo "MISSING PACKAGES: $missing"
|
||||
exit 1
|
||||
fi
|
||||
check_all_dpkg "$1"
|
||||
}
|
||||
|
||||
install() {
|
||||
@ -119,25 +89,7 @@ EOF
|
||||
echo "NOTE: export LANG=en_US.utf8"
|
||||
fi
|
||||
apt install -y wget
|
||||
|
||||
for pkg in $1; do
|
||||
if [ "$pkg" == custom-maven3.9.3 ]; then
|
||||
install_custom_maven "3.9.3"
|
||||
continue
|
||||
fi
|
||||
if [ "$pkg" == custom-golang1.18.9 ]; then
|
||||
install_custom_golang "1.18.9"
|
||||
continue
|
||||
fi
|
||||
if [ "$pkg" == openjdk-17-jdk ]; then
|
||||
if ! dpkg -s "$pkg" 2>/dev/null >/dev/null; then
|
||||
apt install -y "$pkg"
|
||||
# The default Java version should be Java 11
|
||||
update-alternatives --set java /usr/lib/jvm/java-11-openjdk-amd64/bin/java
|
||||
update-alternatives --set javac /usr/lib/jvm/java-11-openjdk-amd64/bin/javac
|
||||
fi
|
||||
continue
|
||||
fi
|
||||
if [ "$pkg" == dotnet-sdk-3.1 ]; then
|
||||
if ! dpkg -s "$pkg" 2>/dev/null >/dev/null; then
|
||||
wget -nv https://packages.microsoft.com/config/debian/10/packages-microsoft-prod.deb -O packages-microsoft-prod.deb
|
||||
|
@ -1,134 +0,0 @@
|
||||
#!/bin/bash
|
||||
set -Eeuo pipefail
|
||||
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )"
|
||||
source "$DIR/../util.sh"
|
||||
|
||||
check_operating_system "debian-12"
|
||||
check_architecture "arm64" "aarch64"
|
||||
|
||||
TOOLCHAIN_BUILD_DEPS=(
|
||||
coreutils gcc g++ build-essential make # generic build tools
|
||||
wget # used for archive download
|
||||
gnupg # used for archive signature verification
|
||||
tar gzip bzip2 xz-utils unzip # used for archive unpacking
|
||||
zlib1g-dev # zlib library used for all builds
|
||||
libexpat1-dev liblzma-dev python3-dev texinfo # for gdb
|
||||
libcurl4-openssl-dev # for cmake
|
||||
libreadline-dev # for cmake and llvm
|
||||
libffi-dev libxml2-dev # for llvm
|
||||
libedit-dev libpcre2-dev libpcre3-dev automake bison # for swig
|
||||
curl # snappy
|
||||
file # for libunwind
|
||||
libssl-dev # for libevent
|
||||
libgmp-dev
|
||||
gperf # for proxygen
|
||||
git # for fbthrift
|
||||
)
|
||||
|
||||
TOOLCHAIN_RUN_DEPS=(
|
||||
make # generic build tools
|
||||
tar gzip bzip2 xz-utils # used for archive unpacking
|
||||
zlib1g # zlib library used for all builds
|
||||
libexpat1 liblzma5 python3 # for gdb
|
||||
libcurl4 # for cmake
|
||||
file # for CPack
|
||||
libreadline8 # for cmake and llvm
|
||||
libffi8 libxml2 # for llvm
|
||||
libssl-dev # for libevent
|
||||
)
|
||||
|
||||
MEMGRAPH_BUILD_DEPS=(
|
||||
git # source code control
|
||||
make pkg-config # build system
|
||||
curl wget # for downloading libs
|
||||
uuid-dev default-jre-headless # required by antlr
|
||||
libreadline-dev # for memgraph console
|
||||
libpython3-dev python3-dev # for query modules
|
||||
libssl-dev
|
||||
libseccomp-dev
|
||||
netcat # tests are using nc to wait for memgraph
|
||||
python3 virtualenv python3-virtualenv python3-pip # for qa, macro_benchmark and stress tests
|
||||
python3-yaml # for the configuration generator
|
||||
libcurl4-openssl-dev # mg-requests
|
||||
sbcl # for custom Lisp C++ preprocessing
|
||||
doxygen graphviz # source documentation generators
|
||||
mono-runtime mono-mcs zip unzip default-jdk-headless custom-maven3.9.3 # for driver tests
|
||||
dotnet-sdk-7.0 golang custom-golang1.18.9 nodejs npm
|
||||
autoconf # for jemalloc code generation
|
||||
libtool # for protobuf code generation
|
||||
libsasl2-dev
|
||||
)
|
||||
|
||||
MEMGRAPH_RUN_DEPS=(
|
||||
logrotate openssl python3 libseccomp
|
||||
)
|
||||
|
||||
NEW_DEPS=(
|
||||
wget curl tar gzip
|
||||
)
|
||||
|
||||
list() {
|
||||
echo "$1"
|
||||
}
|
||||
|
||||
check() {
|
||||
local missing=""
|
||||
for pkg in $1; do
|
||||
if [ "$pkg" == custom-maven3.9.3 ]; then
|
||||
if [ ! -f "/opt/apache-maven-3.9.3/bin/mvn" ]; then
|
||||
missing="$pkg $missing"
|
||||
fi
|
||||
continue
|
||||
fi
|
||||
if [ "$pkg" == custom-golang1.18.9 ]; then
|
||||
if [ ! -f "/opt/go1.18.9/go/bin/go" ]; then
|
||||
missing="$pkg $missing"
|
||||
fi
|
||||
continue
|
||||
fi
|
||||
if ! dpkg -s "$pkg" >/dev/null 2>/dev/null; then
|
||||
missing="$pkg $missing"
|
||||
fi
|
||||
done
|
||||
if [ "$missing" != "" ]; then
|
||||
echo "MISSING PACKAGES: $missing"
|
||||
exit 1
|
||||
fi
|
||||
}
|
||||
|
||||
install() {
|
||||
cd "$DIR"
|
||||
apt update
|
||||
# If GitHub Actions runner is installed, append LANG to the environment.
|
||||
# Python related tests doesn't work the LANG export.
|
||||
if [ -d "/home/gh/actions-runner" ]; then
|
||||
echo "LANG=en_US.utf8" >> /home/gh/actions-runner/.env
|
||||
else
|
||||
echo "NOTE: export LANG=en_US.utf8"
|
||||
fi
|
||||
apt install -y wget
|
||||
|
||||
for pkg in $1; do
|
||||
if [ "$pkg" == custom-maven3.9.3 ]; then
|
||||
install_custom_maven "3.9.3"
|
||||
continue
|
||||
fi
|
||||
if [ "$pkg" == custom-golang1.18.9 ]; then
|
||||
install_custom_golang "1.18.9"
|
||||
continue
|
||||
fi
|
||||
if [ "$pkg" == dotnet-sdk-7.0 ]; then
|
||||
if ! dpkg -s "$pkg" 2>/dev/null >/dev/null; then
|
||||
wget -nv https://packages.microsoft.com/config/debian/12/packages-microsoft-prod.deb -O packages-microsoft-prod.deb
|
||||
dpkg -i packages-microsoft-prod.deb
|
||||
apt-get update
|
||||
apt-get install -y apt-transport-https dotnet-sdk-7.0
|
||||
fi
|
||||
continue
|
||||
fi
|
||||
apt install -y "$pkg"
|
||||
done
|
||||
}
|
||||
|
||||
deps=$2"[*]"
|
||||
"$1" "${!deps}"
|
@ -1,136 +0,0 @@
|
||||
#!/bin/bash
|
||||
set -Eeuo pipefail
|
||||
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )"
|
||||
source "$DIR/../util.sh"
|
||||
|
||||
check_operating_system "debian-12"
|
||||
check_architecture "x86_64"
|
||||
|
||||
TOOLCHAIN_BUILD_DEPS=(
|
||||
coreutils gcc g++ build-essential make # generic build tools
|
||||
wget # used for archive download
|
||||
gnupg # used for archive signature verification
|
||||
tar gzip bzip2 xz-utils unzip # used for archive unpacking
|
||||
zlib1g-dev # zlib library used for all builds
|
||||
libexpat1-dev libipt-dev libbabeltrace-dev liblzma-dev python3-dev texinfo # for gdb
|
||||
libcurl4-openssl-dev # for cmake
|
||||
libreadline-dev # for cmake and llvm
|
||||
libffi-dev libxml2-dev # for llvm
|
||||
libedit-dev libpcre2-dev libpcre3-dev automake bison # for swig
|
||||
curl # snappy
|
||||
file # for libunwind
|
||||
libssl-dev # for libevent
|
||||
libgmp-dev
|
||||
gperf # for proxygen
|
||||
git # for fbthrift
|
||||
)
|
||||
|
||||
TOOLCHAIN_RUN_DEPS=(
|
||||
make # generic build tools
|
||||
tar gzip bzip2 xz-utils # used for archive unpacking
|
||||
zlib1g # zlib library used for all builds
|
||||
libexpat1 libipt2 libbabeltrace1 liblzma5 python3 # for gdb
|
||||
libcurl4 # for cmake
|
||||
file # for CPack
|
||||
libreadline8 # for cmake and llvm
|
||||
libffi8 libxml2 # for llvm
|
||||
libssl-dev # for libevent
|
||||
)
|
||||
|
||||
MEMGRAPH_BUILD_DEPS=(
|
||||
git # source code control
|
||||
make cmake pkg-config # build system
|
||||
curl wget # for downloading libs
|
||||
uuid-dev default-jre-headless # required by antlr
|
||||
libreadline-dev # for memgraph console
|
||||
libpython3-dev python3-dev # for query modules
|
||||
libssl-dev
|
||||
libseccomp-dev
|
||||
netcat-traditional # tests are using nc to wait for memgraph
|
||||
python3 virtualenv python3-virtualenv python3-pip # for qa, macro_benchmark and stress tests
|
||||
python3-yaml # for the configuration generator
|
||||
libcurl4-openssl-dev # mg-requests
|
||||
sbcl # for custom Lisp C++ preprocessing
|
||||
doxygen graphviz # source documentation generators
|
||||
mono-runtime mono-mcs zip unzip default-jdk-headless custom-maven3.9.3 # for driver tests
|
||||
dotnet-sdk-7.0 golang custom-golang1.18.9 nodejs npm
|
||||
autoconf # for jemalloc code generation
|
||||
libtool # for protobuf code generation
|
||||
libsasl2-dev
|
||||
)
|
||||
|
||||
MEMGRAPH_TEST_DEPS="${MEMGRAPH_BUILD_DEPS[*]}"
|
||||
|
||||
MEMGRAPH_RUN_DEPS=(
|
||||
logrotate openssl python3 libseccomp
|
||||
)
|
||||
|
||||
NEW_DEPS=(
|
||||
wget curl tar gzip
|
||||
)
|
||||
|
||||
list() {
|
||||
echo "$1"
|
||||
}
|
||||
|
||||
check() {
|
||||
local missing=""
|
||||
for pkg in $1; do
|
||||
if [ "$pkg" == custom-maven3.9.3 ]; then
|
||||
if [ ! -f "/opt/apache-maven-3.9.3/bin/mvn" ]; then
|
||||
missing="$pkg $missing"
|
||||
fi
|
||||
continue
|
||||
fi
|
||||
if [ "$pkg" == custom-golang1.18.9 ]; then
|
||||
if [ ! -f "/opt/go1.18.9/go/bin/go" ]; then
|
||||
missing="$pkg $missing"
|
||||
fi
|
||||
continue
|
||||
fi
|
||||
if ! dpkg -s "$pkg" >/dev/null 2>/dev/null; then
|
||||
missing="$pkg $missing"
|
||||
fi
|
||||
done
|
||||
if [ "$missing" != "" ]; then
|
||||
echo "MISSING PACKAGES: $missing"
|
||||
exit 1
|
||||
fi
|
||||
}
|
||||
|
||||
install() {
|
||||
cd "$DIR"
|
||||
apt update
|
||||
# If GitHub Actions runner is installed, append LANG to the environment.
|
||||
# Python related tests doesn't work the LANG export.
|
||||
if [ -d "/home/gh/actions-runner" ]; then
|
||||
echo "LANG=en_US.utf8" >> /home/gh/actions-runner/.env
|
||||
else
|
||||
echo "NOTE: export LANG=en_US.utf8"
|
||||
fi
|
||||
apt install -y wget
|
||||
|
||||
for pkg in $1; do
|
||||
if [ "$pkg" == custom-maven3.9.3 ]; then
|
||||
install_custom_maven "3.9.3"
|
||||
continue
|
||||
fi
|
||||
if [ "$pkg" == custom-golang1.18.9 ]; then
|
||||
install_custom_golang "1.18.9"
|
||||
continue
|
||||
fi
|
||||
if [ "$pkg" == dotnet-sdk-7.0 ]; then
|
||||
if ! dpkg -s "$pkg" 2>/dev/null >/dev/null; then
|
||||
wget -nv https://packages.microsoft.com/config/debian/12/packages-microsoft-prod.deb -O packages-microsoft-prod.deb
|
||||
dpkg -i packages-microsoft-prod.deb
|
||||
apt-get update
|
||||
apt-get install -y apt-transport-https dotnet-sdk-7.0
|
||||
fi
|
||||
continue
|
||||
fi
|
||||
apt install -y "$pkg"
|
||||
done
|
||||
}
|
||||
|
||||
deps=$2"[*]"
|
||||
"$1" "${!deps}"
|
@ -1,150 +0,0 @@
|
||||
#!/bin/bash
|
||||
set -Eeuo pipefail
|
||||
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )"
|
||||
source "$DIR/../util.sh"
|
||||
|
||||
# IMPORTANT: Deprecated since memgraph v2.12.0.
|
||||
|
||||
check_operating_system "fedora-36"
|
||||
check_architecture "x86_64"
|
||||
|
||||
TOOLCHAIN_BUILD_DEPS=(
|
||||
coreutils-common gcc gcc-c++ make # generic build tools
|
||||
wget # used for archive download
|
||||
gnupg2 # used for archive signature verification
|
||||
tar gzip bzip2 xz unzip # used for archive unpacking
|
||||
zlib-devel # zlib library used for all builds
|
||||
expat-devel xz-devel python3-devel texinfo libbabeltrace-devel # for gdb
|
||||
curl libcurl-devel # for cmake
|
||||
readline-devel # for cmake and llvm
|
||||
libffi-devel libxml2-devel # for llvm
|
||||
libedit-devel pcre-devel pcre2-devel automake bison # for swig
|
||||
file
|
||||
openssl-devel
|
||||
gmp-devel
|
||||
gperf
|
||||
diffutils
|
||||
libipt libipt-devel # intel
|
||||
patch
|
||||
perl # for openssl
|
||||
git
|
||||
)
|
||||
|
||||
TOOLCHAIN_RUN_DEPS=(
|
||||
make # generic build tools
|
||||
tar gzip bzip2 xz # used for archive unpacking
|
||||
zlib # zlib library used for all builds
|
||||
expat xz-libs python3 # for gdb
|
||||
readline # for cmake and llvm
|
||||
libffi libxml2 # for llvm
|
||||
openssl-devel
|
||||
)
|
||||
|
||||
MEMGRAPH_BUILD_DEPS=(
|
||||
git # source code control
|
||||
make cmake pkgconf-pkg-config # build system
|
||||
wget # for downloading libs
|
||||
libuuid-devel java-11-openjdk # required by antlr
|
||||
readline-devel # for memgraph console
|
||||
python3-devel # for query modules
|
||||
openssl-devel
|
||||
libseccomp-devel
|
||||
python3 python3-pip python3-virtualenv python3-virtualenvwrapper python3-pyyaml nmap-ncat # for tests
|
||||
libcurl-devel # mg-requests
|
||||
rpm-build rpmlint # for RPM package building
|
||||
doxygen graphviz # source documentation generators
|
||||
java-11-openjdk-devel java-17-openjdk-devel custom-maven3.9.3 # for driver tests
|
||||
which zip unzip
|
||||
nodejs golang custom-golang1.18.9 # for driver tests
|
||||
sbcl # for custom Lisp C++ preprocessing
|
||||
autoconf # for jemalloc code generation
|
||||
libtool # for protobuf code generation
|
||||
cyrus-sasl-devel
|
||||
)
|
||||
|
||||
MEMGRAPH_RUN_DEPS=(
|
||||
logrotate openssl python3 libseccomp
|
||||
)
|
||||
|
||||
NEW_DEPS=(
|
||||
wget curl tar gzip
|
||||
)
|
||||
|
||||
list() {
|
||||
echo "$1"
|
||||
}
|
||||
|
||||
check() {
|
||||
local missing=""
|
||||
if [ -v LD_LIBRARY_PATH ]; then
|
||||
# On Fedora yum/dnf and python10 use newer glibc which is not compatible
|
||||
# with ours, so we need to momentarely disable env
|
||||
local OLD_LD_LIBRARY_PATH=${LD_LIBRARY_PATH}
|
||||
LD_LIBRARY_PATH=""
|
||||
fi
|
||||
for pkg in $1; do
|
||||
if [ "$pkg" == custom-maven3.9.3 ]; then
|
||||
if [ ! -f "/opt/apache-maven-3.9.3/bin/mvn" ]; then
|
||||
missing="$pkg $missing"
|
||||
fi
|
||||
continue
|
||||
fi
|
||||
if [ "$pkg" == custom-golang1.18.9 ]; then
|
||||
if [ ! -f "/opt/go1.18.9/go/bin/go" ]; then
|
||||
missing="$pkg $missing"
|
||||
fi
|
||||
continue
|
||||
fi
|
||||
if ! dnf list installed "$pkg" >/dev/null 2>/dev/null; then
|
||||
missing="$pkg $missing"
|
||||
fi
|
||||
done
|
||||
if [ "$missing" != "" ]; then
|
||||
echo "MISSING PACKAGES: $missing"
|
||||
exit 1
|
||||
fi
|
||||
if [ -v OLD_LD_LIBRARY_PATH ]; then
|
||||
echo "Restoring LD_LIBRARY_PATH..."
|
||||
LD_LIBRARY_PATH=${OLD_LD_LIBRARY_PATH}
|
||||
fi
|
||||
}
|
||||
|
||||
install() {
|
||||
cd "$DIR"
|
||||
if [ "$EUID" -ne 0 ]; then
|
||||
echo "Please run as root."
|
||||
exit 1
|
||||
fi
|
||||
# If GitHub Actions runner is installed, append LANG to the environment.
|
||||
# Python related tests don't work without the LANG export.
|
||||
if [ -d "/home/gh/actions-runner" ]; then
|
||||
echo "LANG=en_US.utf8" >> /home/gh/actions-runner/.env
|
||||
else
|
||||
echo "NOTE: export LANG=en_US.utf8"
|
||||
fi
|
||||
dnf update -y
|
||||
|
||||
for pkg in $1; do
|
||||
if [ "$pkg" == custom-maven3.9.3 ]; then
|
||||
install_custom_maven "3.9.3"
|
||||
continue
|
||||
fi
|
||||
if [ "$pkg" == custom-golang1.18.9 ]; then
|
||||
install_custom_golang "1.18.9"
|
||||
continue
|
||||
fi
|
||||
if [ "$pkg" == java-17-openjdk-devel ]; then
|
||||
if ! dnf list installed "$pkg" >/dev/null 2>/dev/null; then
|
||||
dnf install -y "$pkg"
|
||||
# The default Java version should be Java 11
|
||||
update-alternatives --set java java-11-openjdk.x86_64
|
||||
update-alternatives --set javac java-11-openjdk.x86_64
|
||||
fi
|
||||
continue
|
||||
fi
|
||||
dnf install -y "$pkg"
|
||||
done
|
||||
}
|
||||
|
||||
deps=$2"[*]"
|
||||
"$1" "${!deps}"
|
@ -1,117 +0,0 @@
|
||||
#!/bin/bash
|
||||
set -Eeuo pipefail
|
||||
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )"
|
||||
source "$DIR/../util.sh"
|
||||
|
||||
check_operating_system "fedora-38"
|
||||
check_architecture "x86_64"
|
||||
|
||||
TOOLCHAIN_BUILD_DEPS=(
|
||||
coreutils-common gcc gcc-c++ make # generic build tools
|
||||
wget # used for archive download
|
||||
gnupg2 # used for archive signature verification
|
||||
tar gzip bzip2 xz unzip # used for archive unpacking
|
||||
zlib-devel # zlib library used for all builds
|
||||
expat-devel xz-devel python3-devel texinfo libbabeltrace-devel # for gdb
|
||||
curl libcurl-devel # for cmake
|
||||
readline-devel # for cmake and llvm
|
||||
libffi-devel libxml2-devel # for llvm
|
||||
libedit-devel pcre-devel pcre2-devel automake bison # for swig
|
||||
file
|
||||
openssl-devel
|
||||
gmp-devel
|
||||
gperf
|
||||
diffutils
|
||||
libipt libipt-devel # intel
|
||||
patch
|
||||
perl # for openssl
|
||||
git
|
||||
)
|
||||
|
||||
TOOLCHAIN_RUN_DEPS=(
|
||||
make # generic build tools
|
||||
tar gzip bzip2 xz # used for archive unpacking
|
||||
zlib # zlib library used for all builds
|
||||
expat xz-libs python3 # for gdb
|
||||
readline # for cmake and llvm
|
||||
libffi libxml2 # for llvm
|
||||
openssl-devel
|
||||
)
|
||||
|
||||
MEMGRAPH_BUILD_DEPS=(
|
||||
git # source code control
|
||||
make pkgconf-pkg-config # build system
|
||||
wget # for downloading libs
|
||||
libuuid-devel java-11-openjdk # required by antlr
|
||||
readline-devel # for memgraph console
|
||||
python3-devel # for query modules
|
||||
openssl-devel
|
||||
libseccomp-devel
|
||||
python3 python3-pip python3-virtualenv python3-virtualenvwrapper python3-pyyaml nmap-ncat # for tests
|
||||
libcurl-devel # mg-requests
|
||||
rpm-build rpmlint # for RPM package building
|
||||
doxygen graphviz # source documentation generators
|
||||
which nodejs golang zip unzip java-11-openjdk-devel # for driver tests
|
||||
sbcl # for custom Lisp C++ preprocessing
|
||||
autoconf # for jemalloc code generation
|
||||
libtool # for protobuf code generation
|
||||
)
|
||||
|
||||
MEMGRAPH_TEST_DEPS="${MEMGRAPH_BUILD_DEPS[*]}"
|
||||
|
||||
MEMGRAPH_RUN_DEPS=(
|
||||
logrotate openssl python3 libseccomp
|
||||
)
|
||||
|
||||
NEW_DEPS=(
|
||||
wget curl tar gzip
|
||||
)
|
||||
|
||||
list() {
|
||||
echo "$1"
|
||||
}
|
||||
|
||||
check() {
|
||||
if [ -v LD_LIBRARY_PATH ]; then
|
||||
# On Fedora 38 yum/dnf and python11 use newer glibc which is not compatible
|
||||
# with ours, so we need to momentarely disable env
|
||||
local OLD_LD_LIBRARY_PATH=${LD_LIBRARY_PATH}
|
||||
LD_LIBRARY_PATH=""
|
||||
fi
|
||||
local missing=""
|
||||
for pkg in $1; do
|
||||
if ! dnf list installed "$pkg" >/dev/null 2>/dev/null; then
|
||||
missing="$pkg $missing"
|
||||
fi
|
||||
done
|
||||
if [ "$missing" != "" ]; then
|
||||
echo "MISSING PACKAGES: $missing"
|
||||
exit 1
|
||||
fi
|
||||
if [ -v OLD_LD_LIBRARY_PATH ]; then
|
||||
echo "Restoring LD_LIBRARY_PATH..."
|
||||
LD_LIBRARY_PATH=${OLD_LD_LIBRARY_PATH}
|
||||
fi
|
||||
}
|
||||
|
||||
install() {
|
||||
cd "$DIR"
|
||||
if [ "$EUID" -ne 0 ]; then
|
||||
echo "Please run as root."
|
||||
exit 1
|
||||
fi
|
||||
# If GitHub Actions runner is installed, append LANG to the environment.
|
||||
# Python related tests don't work without the LANG export.
|
||||
if [ -d "/home/gh/actions-runner" ]; then
|
||||
echo "LANG=en_US.utf8" >> /home/gh/actions-runner/.env
|
||||
else
|
||||
echo "NOTE: export LANG=en_US.utf8"
|
||||
fi
|
||||
dnf update -y
|
||||
for pkg in $1; do
|
||||
dnf install -y "$pkg"
|
||||
done
|
||||
}
|
||||
|
||||
deps=$2"[*]"
|
||||
"$1" "${!deps}"
|
@ -1,117 +0,0 @@
|
||||
#!/bin/bash
|
||||
set -Eeuo pipefail
|
||||
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )"
|
||||
source "$DIR/../util.sh"
|
||||
|
||||
check_operating_system "fedora-39"
|
||||
check_architecture "x86_64"
|
||||
|
||||
TOOLCHAIN_BUILD_DEPS=(
|
||||
coreutils-common gcc gcc-c++ make # generic build tools
|
||||
wget # used for archive download
|
||||
gnupg2 # used for archive signature verification
|
||||
tar gzip bzip2 xz unzip # used for archive unpacking
|
||||
zlib-devel # zlib library used for all builds
|
||||
expat-devel xz-devel python3-devel texinfo libbabeltrace-devel # for gdb
|
||||
curl libcurl-devel # for cmake
|
||||
readline-devel # for cmake and llvm
|
||||
libffi-devel libxml2-devel # for llvm
|
||||
libedit-devel pcre-devel pcre2-devel automake bison # for swig
|
||||
file
|
||||
openssl-devel
|
||||
gmp-devel
|
||||
gperf
|
||||
diffutils
|
||||
libipt libipt-devel # intel
|
||||
patch
|
||||
perl # for openssl
|
||||
git
|
||||
)
|
||||
|
||||
TOOLCHAIN_RUN_DEPS=(
|
||||
make # generic build tools
|
||||
tar gzip bzip2 xz # used for archive unpacking
|
||||
zlib # zlib library used for all builds
|
||||
expat xz-libs python3 # for gdb
|
||||
readline # for cmake and llvm
|
||||
libffi libxml2 # for llvm
|
||||
openssl-devel
|
||||
)
|
||||
|
||||
MEMGRAPH_BUILD_DEPS=(
|
||||
git # source code control
|
||||
make pkgconf-pkg-config # build system
|
||||
wget # for downloading libs
|
||||
libuuid-devel java-11-openjdk # required by antlr
|
||||
readline-devel # for memgraph console
|
||||
python3-devel # for query modules
|
||||
openssl-devel
|
||||
libseccomp-devel
|
||||
python3 python3-pip python3-virtualenv python3-virtualenvwrapper python3-pyyaml nmap-ncat # for tests
|
||||
libcurl-devel # mg-requests
|
||||
rpm-build rpmlint # for RPM package building
|
||||
doxygen graphviz # source documentation generators
|
||||
which nodejs golang zip unzip java-11-openjdk-devel # for driver tests
|
||||
sbcl # for custom Lisp C++ preprocessing
|
||||
autoconf # for jemalloc code generation
|
||||
libtool # for protobuf code generation
|
||||
)
|
||||
|
||||
MEMGRAPH_TEST_DEPS="${MEMGRAPH_BUILD_DEPS[*]}"
|
||||
|
||||
MEMGRAPH_RUN_DEPS=(
|
||||
logrotate openssl python3 libseccomp
|
||||
)
|
||||
|
||||
NEW_DEPS=(
|
||||
wget curl tar gzip
|
||||
)
|
||||
|
||||
list() {
|
||||
echo "$1"
|
||||
}
|
||||
|
||||
check() {
|
||||
if [ -v LD_LIBRARY_PATH ]; then
|
||||
# On Fedora 38 yum/dnf and python11 use newer glibc which is not compatible
|
||||
# with ours, so we need to momentarely disable env
|
||||
local OLD_LD_LIBRARY_PATH=${LD_LIBRARY_PATH}
|
||||
LD_LIBRARY_PATH=""
|
||||
fi
|
||||
local missing=""
|
||||
for pkg in $1; do
|
||||
if ! dnf list installed "$pkg" >/dev/null 2>/dev/null; then
|
||||
missing="$pkg $missing"
|
||||
fi
|
||||
done
|
||||
if [ "$missing" != "" ]; then
|
||||
echo "MISSING PACKAGES: $missing"
|
||||
exit 1
|
||||
fi
|
||||
if [ -v OLD_LD_LIBRARY_PATH ]; then
|
||||
echo "Restoring LD_LIBRARY_PATH..."
|
||||
LD_LIBRARY_PATH=${OLD_LD_LIBRARY_PATH}
|
||||
fi
|
||||
}
|
||||
|
||||
install() {
|
||||
cd "$DIR"
|
||||
if [ "$EUID" -ne 0 ]; then
|
||||
echo "Please run as root."
|
||||
exit 1
|
||||
fi
|
||||
# If GitHub Actions runner is installed, append LANG to the environment.
|
||||
# Python related tests don't work without the LANG export.
|
||||
if [ -d "/home/gh/actions-runner" ]; then
|
||||
echo "LANG=en_US.utf8" >> /home/gh/actions-runner/.env
|
||||
else
|
||||
echo "NOTE: export LANG=en_US.utf8"
|
||||
fi
|
||||
dnf update -y
|
||||
for pkg in $1; do
|
||||
dnf install -y "$pkg"
|
||||
done
|
||||
}
|
||||
|
||||
deps=$2"[*]"
|
||||
"$1" "${!deps}"
|
@ -1,212 +0,0 @@
|
||||
#!/bin/bash
|
||||
set -Eeuo pipefail
|
||||
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )"
|
||||
source "$DIR/../util.sh"
|
||||
|
||||
# TODO(gitbuda): Rocky gets automatically updates -> figure out how to handle it.
|
||||
check_operating_system "rocky-9.3"
|
||||
check_architecture "x86_64"
|
||||
|
||||
TOOLCHAIN_BUILD_DEPS=(
|
||||
wget # used for archive download
|
||||
coreutils-common gcc gcc-c++ make # generic build tools
|
||||
# NOTE: Pure libcurl conflicts with libcurl-minimal
|
||||
libcurl-devel # cmake build requires it
|
||||
gnupg2 # used for archive signature verification
|
||||
tar gzip bzip2 xz unzip # used for archive unpacking
|
||||
zlib-devel # zlib library used for all builds
|
||||
expat-devel xz-devel python3-devel perl-Unicode-EastAsianWidth texinfo libbabeltrace-devel # for gdb
|
||||
readline-devel # for cmake and llvm
|
||||
libffi-devel libxml2-devel # for llvm
|
||||
libedit-devel pcre-devel pcre2-devel automake bison # for swig
|
||||
file
|
||||
openssl-devel
|
||||
gmp-devel
|
||||
gperf
|
||||
diffutils
|
||||
libipt libipt-devel # intel
|
||||
patch
|
||||
)
|
||||
|
||||
TOOLCHAIN_RUN_DEPS=(
|
||||
make # generic build tools
|
||||
tar gzip bzip2 xz # used for archive unpacking
|
||||
zlib # zlib library used for all builds
|
||||
expat xz-libs python3 # for gdb
|
||||
readline # for cmake and llvm
|
||||
libffi libxml2 # for llvm
|
||||
openssl-devel
|
||||
perl # for openssl
|
||||
)
|
||||
|
||||
MEMGRAPH_BUILD_DEPS=(
|
||||
git # source code control
|
||||
make cmake pkgconf-pkg-config # build system
|
||||
wget # for downloading libs
|
||||
libuuid-devel java-11-openjdk # required by antlr
|
||||
readline-devel # for memgraph console
|
||||
python3-devel # for query modules
|
||||
openssl-devel
|
||||
libseccomp-devel
|
||||
python3 python3-pip python3-virtualenv nmap-ncat # for qa, macro_benchmark and stress tests
|
||||
#
|
||||
# IMPORTANT: python3-yaml does NOT exist on CentOS
|
||||
# Install it manually using `pip3 install PyYAML`
|
||||
#
|
||||
PyYAML # Package name here does not correspond to the yum package!
|
||||
libcurl-devel # mg-requests
|
||||
rpm-build rpmlint # for RPM package building
|
||||
doxygen graphviz # source documentation generators
|
||||
which nodejs golang custom-golang1.18.9 # for driver tests
|
||||
zip unzip java-11-openjdk-devel java-17-openjdk java-17-openjdk-devel custom-maven3.9.3 # for driver tests
|
||||
cl-asdf common-lisp-controller sbcl # for custom Lisp C++ preprocessing
|
||||
autoconf # for jemalloc code generation
|
||||
libtool # for protobuf code generation
|
||||
cyrus-sasl-devel
|
||||
)
|
||||
|
||||
MEMGRAPH_TEST_DEPS="${MEMGRAPH_BUILD_DEPS[*]}"
|
||||
|
||||
MEMGRAPH_RUN_DEPS=(
|
||||
logrotate openssl python3 libseccomp
|
||||
)
|
||||
|
||||
NEW_DEPS=(
|
||||
wget curl tar gzip
|
||||
)
|
||||
|
||||
list() {
|
||||
echo "$1"
|
||||
}
|
||||
|
||||
check() {
|
||||
local missing=""
|
||||
for pkg in $1; do
|
||||
if [ "$pkg" == custom-maven3.9.3 ]; then
|
||||
if [ ! -f "/opt/apache-maven-3.9.3/bin/mvn" ]; then
|
||||
missing="$pkg $missing"
|
||||
fi
|
||||
continue
|
||||
fi
|
||||
if [ "$pkg" == custom-golang1.18.9 ]; then
|
||||
if [ ! -f "/opt/go1.18.9/go/bin/go" ]; then
|
||||
missing="$pkg $missing"
|
||||
fi
|
||||
continue
|
||||
fi
|
||||
if [ "$pkg" == "PyYAML" ]; then
|
||||
if ! python3 -c "import yaml" >/dev/null 2>/dev/null; then
|
||||
missing="$pkg $missing"
|
||||
fi
|
||||
continue
|
||||
fi
|
||||
if [ "$pkg" == "python3-virtualenv" ]; then
|
||||
continue
|
||||
fi
|
||||
if ! yum list installed "$pkg" >/dev/null 2>/dev/null; then
|
||||
missing="$pkg $missing"
|
||||
fi
|
||||
done
|
||||
if [ "$missing" != "" ]; then
|
||||
echo "MISSING PACKAGES: $missing"
|
||||
exit 1
|
||||
fi
|
||||
}
|
||||
|
||||
install() {
|
||||
cd "$DIR"
|
||||
if [ "$EUID" -ne 0 ]; then
|
||||
echo "Please run as root."
|
||||
exit 1
|
||||
fi
|
||||
# If GitHub Actions runner is installed, append LANG to the environment.
|
||||
# Python related tests doesn't work the LANG export.
|
||||
if [ -d "/home/gh/actions-runner" ]; then
|
||||
echo "LANG=en_US.utf8" >> /home/gh/actions-runner/.env
|
||||
else
|
||||
echo "NOTE: export LANG=en_US.utf8"
|
||||
fi
|
||||
yum update -y
|
||||
yum install -y wget git python3 python3-pip
|
||||
|
||||
for pkg in $1; do
|
||||
if [ "$pkg" == custom-maven3.9.3 ]; then
|
||||
install_custom_maven "3.9.3"
|
||||
continue
|
||||
fi
|
||||
if [ "$pkg" == custom-golang1.18.9 ]; then
|
||||
install_custom_golang "1.18.9"
|
||||
continue
|
||||
fi
|
||||
if [ "$pkg" == perl-Unicode-EastAsianWidth ]; then
|
||||
if ! dnf list installed perl-Unicode-EastAsianWidth >/dev/null 2>/dev/null; then
|
||||
dnf install -y https://dl.rockylinux.org/pub/rocky/9/CRB/x86_64/os/Packages/p/perl-Unicode-EastAsianWidth-12.0-7.el9.noarch.rpm
|
||||
fi
|
||||
continue
|
||||
fi
|
||||
if [ "$pkg" == texinfo ]; then
|
||||
if ! dnf list installed texinfo >/dev/null 2>/dev/null; then
|
||||
dnf install -y https://dl.rockylinux.org/pub/rocky/9/CRB/x86_64/os/Packages/t/texinfo-6.7-15.el9.x86_64.rpm
|
||||
fi
|
||||
continue
|
||||
fi
|
||||
if [ "$pkg" == libbabeltrace-devel ]; then
|
||||
if ! dnf list installed libbabeltrace-devel >/dev/null 2>/dev/null; then
|
||||
dnf install -y https://dl.rockylinux.org/pub/rocky/9/devel/x86_64/os/Packages/l/libbabeltrace-devel-1.5.8-10.el9.x86_64.rpm
|
||||
fi
|
||||
continue
|
||||
fi
|
||||
if [ "$pkg" == libipt-devel ]; then
|
||||
if ! dnf list installed libipt-devel >/dev/null 2>/dev/null; then
|
||||
dnf install -y https://dl.rockylinux.org/pub/rocky/9/devel/x86_64/os/Packages/l/libipt-devel-2.0.4-5.el9.x86_64.rpm
|
||||
fi
|
||||
continue
|
||||
fi
|
||||
if [ "$pkg" == doxygen ]; then
|
||||
if ! dnf list installed doxygen >/dev/null 2>/dev/null; then
|
||||
dnf install -y https://dl.rockylinux.org/pub/rocky/9/CRB/x86_64/os/Packages/d/doxygen-1.9.1-11.el9.x86_64.rpm
|
||||
fi
|
||||
continue
|
||||
fi
|
||||
if [ "$pkg" == cl-asdf ]; then
|
||||
if ! dnf list installed cl-asdf >/dev/null 2>/dev/null; then
|
||||
dnf install -y https://pkgs.sysadmins.ws/el8/base/x86_64/cl-asdf-20101028-18.el8.noarch.rpm
|
||||
fi
|
||||
continue
|
||||
fi
|
||||
if [ "$pkg" == common-lisp-controller ]; then
|
||||
if ! dnf list installed common-lisp-controller >/dev/null 2>/dev/null; then
|
||||
dnf install -y https://pkgs.sysadmins.ws/el8/base/x86_64/common-lisp-controller-7.4-20.el8.noarch.rpm
|
||||
fi
|
||||
continue
|
||||
fi
|
||||
if [ "$pkg" == sbcl ]; then
|
||||
if ! dnf list installed sbcl >/dev/null 2>/dev/null; then
|
||||
dnf install -y https://pkgs.sysadmins.ws/el8/base/x86_64/sbcl-2.0.1-4.el8.x86_64.rpm
|
||||
fi
|
||||
continue
|
||||
fi
|
||||
if [ "$pkg" == PyYAML ]; then
|
||||
if [ -z ${SUDO_USER+x} ]; then # Running as root (e.g. Docker).
|
||||
pip3 install --user PyYAML
|
||||
else # Running using sudo.
|
||||
sudo -H -u "$SUDO_USER" bash -c "pip3 install --user PyYAML"
|
||||
fi
|
||||
continue
|
||||
fi
|
||||
if [ "$pkg" == python3-virtualenv ]; then
|
||||
if [ -z ${SUDO_USER+x} ]; then # Running as root (e.g. Docker).
|
||||
pip3 install virtualenv
|
||||
pip3 install virtualenvwrapper
|
||||
else # Running using sudo.
|
||||
sudo -H -u "$SUDO_USER" bash -c "pip3 install virtualenv"
|
||||
sudo -H -u "$SUDO_USER" bash -c "pip3 install virtualenvwrapper"
|
||||
fi
|
||||
continue
|
||||
fi
|
||||
yum install -y "$pkg"
|
||||
done
|
||||
}
|
||||
|
||||
deps=$2"[*]"
|
||||
"$1" "${!deps}"
|
@ -1,158 +0,0 @@
|
||||
#!/bin/bash
|
||||
set -Eeuo pipefail
|
||||
SCRIPT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )"
|
||||
IFS=' '
|
||||
# NOTE: docker_image_name could be local image build based on release/package images.
|
||||
# NOTE: each line has to be under quotes, docker_container_type, script_name and docker_image_name separate with a space.
|
||||
# "docker_container_type script_name docker_image_name"
|
||||
# docker_container_type OPTIONS:
|
||||
# * mgrun -> running plain/empty operating system for the purposes of testing native memgraph package
|
||||
# * mgbuild -> running the builder container to build memgraph inside it -> it's possible create builder images using release/package/run.sh
|
||||
OPERATING_SYSTEMS=(
|
||||
# "mgrun amzn-2 amazonlinux:2"
|
||||
# "mgrun centos-7 centos:7"
|
||||
# "mgrun centos-9 dokken/centos-stream-9"
|
||||
# "mgrun debian-10 debian:10"
|
||||
# "mgrun debian-11 debian:11"
|
||||
# "mgrun fedora-36 fedora:36"
|
||||
# "mgrun ubuntu-18.04 ubuntu:18.04"
|
||||
# "mgrun ubuntu-20.04 ubuntu:20.04"
|
||||
# "mgrun ubuntu-22.04 ubuntu:22.04"
|
||||
# "mgbuild debian-12 memgraph/memgraph-builder:v5_debian-12"
|
||||
)
|
||||
|
||||
if [ ! "$(docker info)" ]; then
|
||||
echo "ERROR: Docker is required"
|
||||
exit 1
|
||||
fi
|
||||
print_help () {
|
||||
echo -e "$0 all\t\t\t\t => start + init all containers in the background"
|
||||
echo -e "$0 check\t\t\t\t => check all containers"
|
||||
echo -e "$0 delete\t\t\t\t => stop + remove all containers"
|
||||
echo -e "$0 copy src_container dst_container => copy build package from src to dst container"
|
||||
exit 1
|
||||
}
|
||||
|
||||
# NOTE: This is an idempotent operation!
|
||||
# TODO(gitbuda): Consider making docker_run always delete + start a new container or add a new function.
|
||||
docker_run () {
|
||||
cnt_type="$1"
|
||||
if [[ "$cnt_type" != "mgbuild" && "$cnt_type" != "mgrun" ]]; then
|
||||
echo "ERROR: Wrong docker_container_type -> valid options are mgbuild, mgrun"
|
||||
exit 1
|
||||
fi
|
||||
cnt_name="$2"
|
||||
cnt_image="$3"
|
||||
if [ ! "$(docker ps -q -f name=$cnt_name)" ]; then
|
||||
if [ "$(docker ps -aq -f status=exited -f name=$cnt_name)" ]; then
|
||||
echo "Cleanup of the old exited container..."
|
||||
docker rm $cnt_name
|
||||
fi
|
||||
if [[ "$cnt_type" == "mgbuild" ]]; then
|
||||
docker run -d --volume "$SCRIPT_DIR/../../:/memgraph" --network host --name "$cnt_name" "$cnt_image"
|
||||
fi
|
||||
if [[ "$cnt_type" == "mgrun" ]]; then
|
||||
docker run -d --volume "$SCRIPT_DIR/../../:/memgraph" --network host --name "$cnt_name" "$cnt_image" sleep infinity
|
||||
fi
|
||||
fi
|
||||
echo "The $cnt_image container is active under $cnt_name name!"
|
||||
}
|
||||
|
||||
docker_exec () {
|
||||
cnt_name="$1"
|
||||
cnt_cmd="$2"
|
||||
docker exec -it "$cnt_name" bash -c "$cnt_cmd"
|
||||
}
|
||||
|
||||
docker_stop_and_rm () {
|
||||
cnt_name="$1"
|
||||
if [ "$(docker ps -q -f name=$cnt_name)" ]; then
|
||||
docker stop "$1"
|
||||
fi
|
||||
if [ "$(docker ps -aq -f status=exited -f name=$cnt_name)" ]; then
|
||||
docker rm "$1"
|
||||
fi
|
||||
}
|
||||
|
||||
# TODO(gitbuda): Make the call to `install NEW_DEPS` configurable, the question what else is useful?
|
||||
start_all () {
|
||||
for script_docker_pair in "${OPERATING_SYSTEMS[@]}"; do
|
||||
read -a script_docker <<< "$script_docker_pair"
|
||||
docker_container_type="${script_docker[0]}"
|
||||
script_name="${script_docker[1]}"
|
||||
docker_image="${script_docker[2]}"
|
||||
docker_name="${docker_container_type}_$script_name"
|
||||
echo ""
|
||||
echo "~~~~ OPERATING ON $docker_image as $docker_name..."
|
||||
docker_run "$docker_container_type" "$docker_name" "$docker_image"
|
||||
docker_exec "$docker_name" "/memgraph/environment/os/$script_name.sh install NEW_DEPS"
|
||||
echo "---- DONE EVERYHING FOR $docker_image as $docker_name..."
|
||||
echo ""
|
||||
done
|
||||
}
|
||||
|
||||
check_all () {
|
||||
for script_docker_pair in "${OPERATING_SYSTEMS[@]}"; do
|
||||
read -a script_docker <<< "$script_docker_pair"
|
||||
docker_container_type="${script_docker[0]}"
|
||||
script_name="${script_docker[1]}"
|
||||
docker_image="${script_docker[2]}"
|
||||
docker_name="${docker_container_type}_$script_name"
|
||||
echo ""
|
||||
echo "~~~~ OPERATING ON $docker_image as $docker_name..."
|
||||
docker_exec "$docker_name" "/memgraph/environment/os/$script_name.sh check NEW_DEPS"
|
||||
echo "---- DONE EVERYHING FOR $docker_image as $docker_name..."
|
||||
echo ""
|
||||
done
|
||||
}
|
||||
|
||||
delete_all () {
|
||||
for script_docker_pair in "${OPERATING_SYSTEMS[@]}"; do
|
||||
read -a script_docker <<< "$script_docker_pair"
|
||||
docker_container_type="${script_docker[0]}"
|
||||
script_name="${script_docker[1]}"
|
||||
docker_image="${script_docker[2]}"
|
||||
docker_name="${docker_container_type}_$script_name"
|
||||
docker_stop_and_rm "$docker_name"
|
||||
echo "~~~~ $docker_image as $docker_name DELETED"
|
||||
done
|
||||
}
|
||||
|
||||
# TODO(gitbuda): Copy file between containers is a useful util, also delete, + consider copying of a whole folder.
|
||||
# TODO(gitbuda): Add args: src_cnt dst_cnt abs_path; both file and recursive folder, always delete + copy.
|
||||
copy_build_package () {
|
||||
src_container="$1"
|
||||
dst_container="$2"
|
||||
src="$src_container:/memgraph/build/output"
|
||||
tmp_dst="$SCRIPT_DIR/../../build"
|
||||
mkdir -p "$tmp_dst"
|
||||
rm -rf "$tmp_dst/output"
|
||||
dst="$dst_container:/"
|
||||
docker cp "$src" "$tmp_dst"
|
||||
docker cp "$tmp_dst/output" "$dst"
|
||||
}
|
||||
|
||||
if [ "$#" -eq 0 ]; then
|
||||
print_help
|
||||
else
|
||||
case $1 in
|
||||
all)
|
||||
start_all
|
||||
;;
|
||||
check)
|
||||
check_all
|
||||
;;
|
||||
delete)
|
||||
delete_all
|
||||
;;
|
||||
copy) # src_container dst_container
|
||||
if [ "$#" -ne 3 ]; then
|
||||
print_help
|
||||
fi
|
||||
copy_build_package "$2" "$3"
|
||||
;;
|
||||
*)
|
||||
print_help
|
||||
;;
|
||||
esac
|
||||
fi
|
@ -1,10 +1,11 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -Eeuo pipefail
|
||||
|
||||
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )"
|
||||
source "$DIR/../util.sh"
|
||||
|
||||
check_operating_system "todo-os-name"
|
||||
check_architecture "todo-arch-name"
|
||||
|
||||
TOOLCHAIN_BUILD_DEPS=(
|
||||
pkg
|
||||
@ -18,20 +19,6 @@ MEMGRAPH_BUILD_DEPS=(
|
||||
pkg
|
||||
)
|
||||
|
||||
MEMGRAPH_TEST_DEPS=(
|
||||
pkg
|
||||
)
|
||||
|
||||
MEMGRAPH_RUN_DEPS=(
|
||||
pkg
|
||||
)
|
||||
|
||||
# NEW_DEPS is useful when you won't to test the installation of a new package.
|
||||
# During the test you can put here packages like wget curl tar gzip
|
||||
NEW_DEPS=(
|
||||
pkg
|
||||
)
|
||||
|
||||
list() {
|
||||
echo "$1"
|
||||
}
|
||||
|
@ -1,10 +1,10 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -Eeuo pipefail
|
||||
|
||||
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )"
|
||||
source "$DIR/../util.sh"
|
||||
|
||||
# IMPORTANT: Deprecated since memgraph v2.12.0.
|
||||
|
||||
check_operating_system "ubuntu-18.04"
|
||||
check_architecture "x86_64"
|
||||
|
||||
@ -25,7 +25,7 @@ TOOLCHAIN_BUILD_DEPS=(
|
||||
libgmp-dev # for gdb
|
||||
gperf # for proxygen
|
||||
libssl-dev
|
||||
libedit-dev libpcre2-dev libpcre3-dev automake bison # swig
|
||||
libedit-dev libpcre3-dev automake bison # swig
|
||||
)
|
||||
|
||||
TOOLCHAIN_RUN_DEPS=(
|
||||
@ -41,7 +41,7 @@ TOOLCHAIN_RUN_DEPS=(
|
||||
|
||||
MEMGRAPH_BUILD_DEPS=(
|
||||
git # source code control
|
||||
make cmake pkg-config # build system
|
||||
make pkg-config # build system
|
||||
curl wget # downloading libs
|
||||
uuid-dev default-jre-headless # required by antlr
|
||||
libreadline-dev # memgraph console
|
||||
@ -53,19 +53,9 @@ MEMGRAPH_BUILD_DEPS=(
|
||||
libcurl4-openssl-dev # mg-requests
|
||||
sbcl # custom Lisp C++ preprocessing
|
||||
doxygen graphviz # source documentation generators
|
||||
mono-runtime mono-mcs nodejs zip unzip default-jdk-headless openjdk-17-jdk-headless custom-maven3.9.3 # driver tests
|
||||
custom-golang1.18.9 # for driver tests
|
||||
mono-runtime mono-mcs nodejs zip unzip default-jdk-headless # driver tests
|
||||
autoconf # for jemalloc code generation
|
||||
libtool # for protobuf code generation
|
||||
libsasl2-dev
|
||||
)
|
||||
|
||||
MEMGRAPH_RUN_DEPS=(
|
||||
logrotate openssl python3 libseccomp2
|
||||
)
|
||||
|
||||
NEW_DEPS=(
|
||||
wget curl tar gzip
|
||||
)
|
||||
|
||||
list() {
|
||||
@ -73,53 +63,11 @@ list() {
|
||||
}
|
||||
|
||||
check() {
|
||||
local missing=""
|
||||
for pkg in $1; do
|
||||
if [ "$pkg" == custom-maven3.9.3 ]; then
|
||||
if [ ! -f "/opt/apache-maven-3.9.3/bin/mvn" ]; then
|
||||
missing="$pkg $missing"
|
||||
fi
|
||||
continue
|
||||
fi
|
||||
if [ "$pkg" == custom-golang1.18.9 ]; then
|
||||
if [ ! -f "/opt/go1.18.9/go/bin/go" ]; then
|
||||
missing="$pkg $missing"
|
||||
fi
|
||||
continue
|
||||
fi
|
||||
if ! dpkg -s "$pkg" >/dev/null 2>/dev/null; then
|
||||
missing="$pkg $missing"
|
||||
fi
|
||||
done
|
||||
if [ "$missing" != "" ]; then
|
||||
echo "MISSING PACKAGES: $missing"
|
||||
exit 1
|
||||
fi
|
||||
check_all_dpkg "$1"
|
||||
}
|
||||
|
||||
install() {
|
||||
apt update -y
|
||||
|
||||
for pkg in $1; do
|
||||
if [ "$pkg" == custom-maven3.9.3 ]; then
|
||||
install_custom_maven "3.9.3"
|
||||
continue
|
||||
fi
|
||||
if [ "$pkg" == custom-golang1.18.9 ]; then
|
||||
install_custom_golang "1.18.9"
|
||||
continue
|
||||
fi
|
||||
if [ "$pkg" == openjdk-17-jdk-headless ]; then
|
||||
if ! dpkg -s "$pkg" 2>/dev/null >/dev/null; then
|
||||
apt install -y "$pkg"
|
||||
# The default Java version should be Java 11
|
||||
update-alternatives --set java /usr/lib/jvm/java-11-openjdk-amd64/bin/java
|
||||
update-alternatives --set javac /usr/lib/jvm/java-11-openjdk-amd64/bin/javac
|
||||
fi
|
||||
continue
|
||||
fi
|
||||
apt install -y "$pkg"
|
||||
done
|
||||
apt install -y $1
|
||||
}
|
||||
|
||||
deps=$2"[*]"
|
||||
|
@ -1,5 +1,7 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -Eeuo pipefail
|
||||
|
||||
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )"
|
||||
source "$DIR/../util.sh"
|
||||
|
||||
@ -22,7 +24,7 @@ TOOLCHAIN_BUILD_DEPS=(
|
||||
libgmp-dev # for gdb
|
||||
gperf # for proxygen
|
||||
libssl-dev
|
||||
libedit-dev libpcre2-dev libpcre3-dev automake bison # for swig
|
||||
libedit-dev libpcre3-dev automake bison # for swig
|
||||
)
|
||||
|
||||
TOOLCHAIN_RUN_DEPS=(
|
||||
@ -38,7 +40,7 @@ TOOLCHAIN_RUN_DEPS=(
|
||||
|
||||
MEMGRAPH_BUILD_DEPS=(
|
||||
git # source code control
|
||||
make cmake pkg-config # build system
|
||||
make pkg-config # build system
|
||||
curl wget # for downloading libs
|
||||
uuid-dev default-jre-headless # required by antlr
|
||||
libreadline-dev # for memgraph console
|
||||
@ -51,21 +53,10 @@ MEMGRAPH_BUILD_DEPS=(
|
||||
libcurl4-openssl-dev # mg-requests
|
||||
sbcl # for custom Lisp C++ preprocessing
|
||||
doxygen graphviz # source documentation generators
|
||||
mono-runtime mono-mcs zip unzip default-jdk-headless openjdk-17-jdk-headless custom-maven3.9.3 # for driver tests
|
||||
dotnet-sdk-3.1 golang custom-golang1.18.9 nodejs npm # for driver tests
|
||||
mono-runtime mono-mcs zip unzip default-jdk-headless # for driver tests
|
||||
dotnet-sdk-3.1 golang nodejs npm
|
||||
autoconf # for jemalloc code generation
|
||||
libtool # for protobuf code generation
|
||||
libsasl2-dev
|
||||
)
|
||||
|
||||
MEMGRAPH_TEST_DEPS="${MEMGRAPH_BUILD_DEPS[*]}"
|
||||
|
||||
MEMGRAPH_RUN_DEPS=(
|
||||
logrotate openssl python3 libseccomp2
|
||||
)
|
||||
|
||||
NEW_DEPS=(
|
||||
wget curl tar gzip
|
||||
)
|
||||
|
||||
list() {
|
||||
@ -73,35 +64,12 @@ list() {
|
||||
}
|
||||
|
||||
check() {
|
||||
local missing=""
|
||||
for pkg in $1; do
|
||||
if [ "$pkg" == custom-maven3.9.3 ]; then
|
||||
if [ ! -f "/opt/apache-maven-3.9.3/bin/mvn" ]; then
|
||||
missing="$pkg $missing"
|
||||
fi
|
||||
continue
|
||||
fi
|
||||
if [ "$pkg" == custom-golang1.18.9 ]; then
|
||||
if [ ! -f "/opt/go1.18.9/go/bin/go" ]; then
|
||||
missing="$pkg $missing"
|
||||
fi
|
||||
continue
|
||||
fi
|
||||
if ! dpkg -s "$pkg" >/dev/null 2>/dev/null; then
|
||||
missing="$pkg $missing"
|
||||
fi
|
||||
done
|
||||
if [ "$missing" != "" ]; then
|
||||
echo "MISSING PACKAGES: $missing"
|
||||
exit 1
|
||||
fi
|
||||
check_all_dpkg "$1"
|
||||
}
|
||||
|
||||
install() {
|
||||
cd "$DIR"
|
||||
export DEBIAN_FRONTEND=noninteractive
|
||||
apt update -y
|
||||
apt install -y wget
|
||||
apt update
|
||||
# If GitHub Actions runner is installed, append LANG to the environment.
|
||||
# Python related tests doesn't work the LANG export.
|
||||
if [ -d "/home/gh/actions-runner" ]; then
|
||||
@ -109,16 +77,8 @@ install() {
|
||||
else
|
||||
echo "NOTE: export LANG=en_US.utf8"
|
||||
fi
|
||||
|
||||
apt install -y wget
|
||||
for pkg in $1; do
|
||||
if [ "$pkg" == custom-maven3.9.3 ]; then
|
||||
install_custom_maven "3.9.3"
|
||||
continue
|
||||
fi
|
||||
if [ "$pkg" == custom-golang1.18.9 ]; then
|
||||
install_custom_golang "1.18.9"
|
||||
continue
|
||||
fi
|
||||
if [ "$pkg" == dotnet-sdk-3.1 ]; then
|
||||
if ! dpkg -s dotnet-sdk-3.1 2>/dev/null >/dev/null; then
|
||||
wget -nv https://packages.microsoft.com/config/ubuntu/20.04/packages-microsoft-prod.deb -O packages-microsoft-prod.deb
|
||||
@ -128,15 +88,6 @@ install() {
|
||||
fi
|
||||
continue
|
||||
fi
|
||||
if [ "$pkg" == openjdk-17-jdk-headless ]; then
|
||||
if ! dpkg -s "$pkg" 2>/dev/null >/dev/null; then
|
||||
apt install -y "$pkg"
|
||||
# The default Java version should be Java 11
|
||||
update-alternatives --set java /usr/lib/jvm/java-11-openjdk-amd64/bin/java
|
||||
update-alternatives --set javac /usr/lib/jvm/java-11-openjdk-amd64/bin/javac
|
||||
fi
|
||||
continue
|
||||
fi
|
||||
apt install -y "$pkg"
|
||||
done
|
||||
}
|
||||
|
@ -1,144 +0,0 @@
|
||||
#!/bin/bash
|
||||
set -Eeuo pipefail
|
||||
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )"
|
||||
source "$DIR/../util.sh"
|
||||
|
||||
check_operating_system "ubuntu-22.04"
|
||||
check_architecture "arm64" "aarch64"
|
||||
|
||||
TOOLCHAIN_BUILD_DEPS=(
|
||||
coreutils gcc g++ build-essential make # generic build tools
|
||||
wget # used for archive download
|
||||
gnupg # used for archive signature verification
|
||||
tar gzip bzip2 xz-utils unzip # used for archive unpacking
|
||||
zlib1g-dev # zlib library used for all builds
|
||||
libexpat1-dev libbabeltrace-dev liblzma-dev python3-dev texinfo # for gdb
|
||||
libcurl4-openssl-dev # for cmake
|
||||
libreadline-dev # for cmake and llvm
|
||||
libffi-dev libxml2-dev # for llvm
|
||||
curl # snappy
|
||||
file
|
||||
git # for thrift
|
||||
libgmp-dev # for gdb
|
||||
gperf # for proxygen
|
||||
libssl-dev
|
||||
libedit-dev libpcre2-dev libpcre3-dev automake bison # for swig
|
||||
)
|
||||
|
||||
TOOLCHAIN_RUN_DEPS=(
|
||||
make # generic build tools
|
||||
tar gzip bzip2 xz-utils # used for archive unpacking
|
||||
zlib1g # zlib library used for all builds
|
||||
libexpat1 libbabeltrace1 liblzma5 python3 # for gdb
|
||||
libcurl4 # for cmake
|
||||
libreadline8 # for cmake and llvm
|
||||
libffi7 libxml2 # for llvm
|
||||
libssl-dev # for libevent
|
||||
)
|
||||
|
||||
MEMGRAPH_BUILD_DEPS=(
|
||||
git # source code control
|
||||
make cmake pkg-config # build system
|
||||
curl wget # for downloading libs
|
||||
uuid-dev default-jre-headless # required by antlr
|
||||
libreadline-dev # for memgraph console
|
||||
libpython3-dev python3-dev # for query modules
|
||||
libssl-dev
|
||||
libseccomp-dev
|
||||
netcat # tests are using nc to wait for memgraph
|
||||
python3 python3-virtualenv python3-pip # for qa, macro_benchmark and stress tests
|
||||
python3-yaml # for the configuration generator
|
||||
libcurl4-openssl-dev # mg-requests
|
||||
sbcl # for custom Lisp C++ preprocessing
|
||||
doxygen graphviz # source documentation generators
|
||||
mono-runtime mono-mcs zip unzip default-jdk-headless openjdk-17-jdk-headless custom-maven3.9.3 # for driver tests
|
||||
dotnet-sdk-6.0 golang custom-golang1.18.9 nodejs npm
|
||||
autoconf # for jemalloc code generation
|
||||
libtool # for protobuf code generation
|
||||
libsasl2-dev
|
||||
)
|
||||
|
||||
MEMGRAPH_TEST_DEPS="${MEMGRAPH_BUILD_DEPS[*]}"
|
||||
|
||||
MEMGRAPH_RUN_DEPS=(
|
||||
logrotate openssl python3 libseccomp2
|
||||
)
|
||||
|
||||
NEW_DEPS=(
|
||||
wget curl tar gzip
|
||||
)
|
||||
|
||||
list() {
|
||||
echo "$1"
|
||||
}
|
||||
|
||||
check() {
|
||||
local missing=""
|
||||
for pkg in $1; do
|
||||
if [ "$pkg" == custom-maven3.9.3 ]; then
|
||||
if [ ! -f "/opt/apache-maven-3.9.3/bin/mvn" ]; then
|
||||
missing="$pkg $missing"
|
||||
fi
|
||||
continue
|
||||
fi
|
||||
if [ "$pkg" == custom-golang1.18.9 ]; then
|
||||
if [ ! -f "/opt/go1.18.9/go/bin/go" ]; then
|
||||
missing="$pkg $missing"
|
||||
fi
|
||||
continue
|
||||
fi
|
||||
if ! dpkg -s "$pkg" >/dev/null 2>/dev/null; then
|
||||
missing="$pkg $missing"
|
||||
fi
|
||||
done
|
||||
if [ "$missing" != "" ]; then
|
||||
echo "MISSING PACKAGES: $missing"
|
||||
exit 1
|
||||
fi
|
||||
}
|
||||
|
||||
install() {
|
||||
cd "$DIR"
|
||||
apt update
|
||||
# If GitHub Actions runner is installed, append LANG to the environment.
|
||||
# Python related tests doesn't work the LANG export.
|
||||
if [ -d "/home/gh/actions-runner" ]; then
|
||||
echo "LANG=en_US.utf8" >> /home/gh/actions-runner/.env
|
||||
else
|
||||
echo "NOTE: export LANG=en_US.utf8"
|
||||
fi
|
||||
apt install -y wget
|
||||
|
||||
for pkg in $1; do
|
||||
if [ "$pkg" == custom-maven3.9.3 ]; then
|
||||
install_custom_maven "3.9.3"
|
||||
continue
|
||||
fi
|
||||
if [ "$pkg" == custom-golang1.18.9 ]; then
|
||||
install_custom_golang "1.18.9"
|
||||
continue
|
||||
fi
|
||||
if [ "$pkg" == dotnet-sdk-6.0 ]; then
|
||||
if ! dpkg -s dotnet-sdk-6.0 2>/dev/null >/dev/null; then
|
||||
wget -nv https://packages.microsoft.com/config/ubuntu/22.04/packages-microsoft-prod.deb -O packages-microsoft-prod.deb
|
||||
dpkg -i packages-microsoft-prod.deb
|
||||
apt-get update
|
||||
apt-get install -y apt-transport-https dotnet-sdk-6.0
|
||||
fi
|
||||
continue
|
||||
fi
|
||||
if [ "$pkg" == openjdk-17-jdk-headless ]; then
|
||||
if ! dpkg -s "$pkg" 2>/dev/null >/dev/null; then
|
||||
apt install -y "$pkg"
|
||||
# The default Java version should be Java 11
|
||||
update-alternatives --set java /usr/lib/jvm/java-11-openjdk-arm64/bin/java
|
||||
update-alternatives --set javac /usr/lib/jvm/java-11-openjdk-arm64/bin/javac
|
||||
fi
|
||||
continue
|
||||
fi
|
||||
apt install -y "$pkg"
|
||||
done
|
||||
}
|
||||
|
||||
deps=$2"[*]"
|
||||
"$1" "${!deps}"
|
@ -1,5 +1,7 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -Eeuo pipefail
|
||||
|
||||
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )"
|
||||
source "$DIR/../util.sh"
|
||||
|
||||
@ -22,7 +24,7 @@ TOOLCHAIN_BUILD_DEPS=(
|
||||
libgmp-dev # for gdb
|
||||
gperf # for proxygen
|
||||
libssl-dev
|
||||
libedit-dev libpcre2-dev libpcre3-dev automake bison # for swig
|
||||
libedit-dev libpcre3-dev automake bison # for swig
|
||||
)
|
||||
|
||||
TOOLCHAIN_RUN_DEPS=(
|
||||
@ -38,7 +40,7 @@ TOOLCHAIN_RUN_DEPS=(
|
||||
|
||||
MEMGRAPH_BUILD_DEPS=(
|
||||
git # source code control
|
||||
make cmake pkg-config # build system
|
||||
make pkg-config # build system
|
||||
curl wget # for downloading libs
|
||||
uuid-dev default-jre-headless # required by antlr
|
||||
libreadline-dev # for memgraph console
|
||||
@ -51,21 +53,10 @@ MEMGRAPH_BUILD_DEPS=(
|
||||
libcurl4-openssl-dev # mg-requests
|
||||
sbcl # for custom Lisp C++ preprocessing
|
||||
doxygen graphviz # source documentation generators
|
||||
mono-runtime mono-mcs zip unzip default-jdk-headless openjdk-17-jdk-headless custom-maven3.9.3 # for driver tests
|
||||
dotnet-sdk-6.0 golang custom-golang1.18.9 nodejs npm # for driver tests
|
||||
mono-runtime mono-mcs zip unzip default-jdk-headless # for driver tests
|
||||
dotnet-sdk-6.0 golang nodejs npm
|
||||
autoconf # for jemalloc code generation
|
||||
libtool # for protobuf code generation
|
||||
libsasl2-dev
|
||||
)
|
||||
|
||||
MEMGRAPH_TEST_DEPS="${MEMGRAPH_BUILD_DEPS[*]}"
|
||||
|
||||
MEMGRAPH_RUN_DEPS=(
|
||||
logrotate openssl python3 libseccomp2
|
||||
)
|
||||
|
||||
NEW_DEPS=(
|
||||
wget curl tar gzip
|
||||
)
|
||||
|
||||
list() {
|
||||
@ -73,34 +64,12 @@ list() {
|
||||
}
|
||||
|
||||
check() {
|
||||
local missing=""
|
||||
for pkg in $1; do
|
||||
if [ "$pkg" == custom-maven3.9.3 ]; then
|
||||
if [ ! -f "/opt/apache-maven-3.9.3/bin/mvn" ]; then
|
||||
missing="$pkg $missing"
|
||||
fi
|
||||
continue
|
||||
fi
|
||||
if [ "$pkg" == custom-golang1.18.9 ]; then
|
||||
if [ ! -f "/opt/go1.18.9/go/bin/go" ]; then
|
||||
missing="$pkg $missing"
|
||||
fi
|
||||
continue
|
||||
fi
|
||||
if ! dpkg -s "$pkg" >/dev/null 2>/dev/null; then
|
||||
missing="$pkg $missing"
|
||||
fi
|
||||
done
|
||||
if [ "$missing" != "" ]; then
|
||||
echo "MISSING PACKAGES: $missing"
|
||||
exit 1
|
||||
fi
|
||||
check_all_dpkg "$1"
|
||||
}
|
||||
|
||||
install() {
|
||||
cd "$DIR"
|
||||
apt update -y
|
||||
apt install -y wget
|
||||
apt update
|
||||
# If GitHub Actions runner is installed, append LANG to the environment.
|
||||
# Python related tests doesn't work the LANG export.
|
||||
if [ -d "/home/gh/actions-runner" ]; then
|
||||
@ -108,16 +77,8 @@ install() {
|
||||
else
|
||||
echo "NOTE: export LANG=en_US.utf8"
|
||||
fi
|
||||
|
||||
apt install -y wget
|
||||
for pkg in $1; do
|
||||
if [ "$pkg" == custom-maven3.9.3 ]; then
|
||||
install_custom_maven "3.9.3"
|
||||
continue
|
||||
fi
|
||||
if [ "$pkg" == custom-golang1.18.9 ]; then
|
||||
install_custom_golang "1.18.9"
|
||||
continue
|
||||
fi
|
||||
if [ "$pkg" == dotnet-sdk-6.0 ]; then
|
||||
if ! dpkg -s dotnet-sdk-6.0 2>/dev/null >/dev/null; then
|
||||
wget -nv https://packages.microsoft.com/config/ubuntu/22.04/packages-microsoft-prod.deb -O packages-microsoft-prod.deb
|
||||
@ -127,15 +88,6 @@ install() {
|
||||
fi
|
||||
continue
|
||||
fi
|
||||
if [ "$pkg" == openjdk-17-jdk-headless ]; then
|
||||
if ! dpkg -s "$pkg" 2>/dev/null >/dev/null; then
|
||||
apt install -y "$pkg"
|
||||
# The default Java version should be Java 11
|
||||
update-alternatives --set java /usr/lib/jvm/java-11-openjdk-amd64/bin/java
|
||||
update-alternatives --set javac /usr/lib/jvm/java-11-openjdk-amd64/bin/javac
|
||||
fi
|
||||
continue
|
||||
fi
|
||||
apt install -y "$pkg"
|
||||
done
|
||||
}
|
||||
|
5
environment/toolchain/.gitignore
vendored
5
environment/toolchain/.gitignore
vendored
@ -1,5 +0,0 @@
|
||||
archives
|
||||
build
|
||||
output
|
||||
*.tar.gz
|
||||
tmp_build.sh
|
@ -4,7 +4,7 @@ diff -ur a/CMakeLists.txt b/CMakeLists.txt
|
||||
@@ -52,9 +52,9 @@
|
||||
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} /EHs-c-")
|
||||
add_definitions(-D_HAS_EXCEPTIONS=0)
|
||||
|
||||
|
||||
- # Disable RTTI.
|
||||
- string(REGEX REPLACE "/GR" "" CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS}")
|
||||
- set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} /GR-")
|
||||
@ -17,7 +17,7 @@ diff -ur a/CMakeLists.txt b/CMakeLists.txt
|
||||
@@ -77,9 +77,9 @@
|
||||
string(REGEX REPLACE "-fexceptions" "" CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS}")
|
||||
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -fno-exceptions")
|
||||
|
||||
|
||||
- # Disable RTTI.
|
||||
- string(REGEX REPLACE "-frtti" "" CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS}")
|
||||
- set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -fno-rtti")
|
||||
@ -25,5 +25,5 @@ diff -ur a/CMakeLists.txt b/CMakeLists.txt
|
||||
+ # string(REGEX REPLACE "-frtti" "" CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS}")
|
||||
+ # set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -fno-rtti")
|
||||
endif(CMAKE_CXX_COMPILER_ID STREQUAL "MSVC")
|
||||
|
||||
|
||||
# BUILD_SHARED_LIBS is a standard CMake variable, but we declare it here to make
|
@ -1,48 +0,0 @@
|
||||
#!/bin/bash -e
|
||||
|
||||
# NOTE: Copy this under memgraph/environment/toolchain/vN/tmp_build.sh, edit and test.
|
||||
|
||||
pushd () { command pushd "$@" > /dev/null; }
|
||||
popd () { command popd "$@" > /dev/null; }
|
||||
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )"
|
||||
CPUS=$( grep -c processor < /proc/cpuinfo )
|
||||
cd "$DIR"
|
||||
source "$DIR/../../util.sh"
|
||||
DISTRO="$(operating_system)"
|
||||
TOOLCHAIN_VERSION=5
|
||||
NAME=toolchain-v$TOOLCHAIN_VERSION
|
||||
PREFIX=/opt/$NAME
|
||||
function log_tool_name () {
|
||||
echo ""
|
||||
echo ""
|
||||
echo "#### $1 ####"
|
||||
echo ""
|
||||
echo ""
|
||||
}
|
||||
|
||||
# HERE: Remove/clear dependencies from a given toolchain.
|
||||
|
||||
mkdir -p archives && pushd archives
|
||||
# HERE: Download dependencies here.
|
||||
popd
|
||||
|
||||
mkdir -p build
|
||||
pushd build
|
||||
source $PREFIX/activate
|
||||
export CC=$PREFIX/bin/clang
|
||||
export CXX=$PREFIX/bin/clang++
|
||||
export CFLAGS="$CFLAGS -fPIC"
|
||||
export PATH=$PREFIX/bin:$PATH
|
||||
export LD_LIBRARY_PATH=$PREFIX/lib64
|
||||
COMMON_CMAKE_FLAGS="-DCMAKE_INSTALL_PREFIX=$PREFIX
|
||||
-DCMAKE_PREFIX_PATH=$PREFIX
|
||||
-DCMAKE_BUILD_TYPE=Release
|
||||
-DCMAKE_C_COMPILER=$CC
|
||||
-DCMAKE_CXX_COMPILER=$CXX
|
||||
-DBUILD_SHARED_LIBS=OFF
|
||||
-DCMAKE_CXX_STANDARD=20
|
||||
-DBUILD_TESTING=OFF
|
||||
-DCMAKE_REQUIRED_INCLUDES=$PREFIX/include
|
||||
-DCMAKE_POSITION_INDEPENDENT_CODE=ON"
|
||||
|
||||
# HERE: Add dependencies to test below.
|
@ -7,7 +7,7 @@ DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )"
|
||||
CPUS=$( grep -c processor < /proc/cpuinfo )
|
||||
cd "$DIR"
|
||||
|
||||
source "$DIR/../../util.sh"
|
||||
source "$DIR/../util.sh"
|
||||
DISTRO="$(operating_system)"
|
||||
|
||||
# toolchain version
|
||||
@ -30,10 +30,10 @@ LLVM_VERSION=11.0.0
|
||||
SWIG_VERSION=4.0.2 # used only for LLVM compilation
|
||||
|
||||
# Check for the dependencies.
|
||||
echo "ALL BUILD PACKAGES: $($DIR/../../os/$DISTRO.sh list TOOLCHAIN_BUILD_DEPS)"
|
||||
$DIR/../../os/$DISTRO.sh check TOOLCHAIN_BUILD_DEPS
|
||||
echo "ALL RUN PACKAGES: $($DIR/../../os/$DISTRO.sh list TOOLCHAIN_RUN_DEPS)"
|
||||
$DIR/../../os/$DISTRO.sh check TOOLCHAIN_RUN_DEPS
|
||||
echo "ALL BUILD PACKAGES: $($DIR/../os/$DISTRO.sh list TOOLCHAIN_BUILD_DEPS)"
|
||||
$DIR/../os/$DISTRO.sh check TOOLCHAIN_BUILD_DEPS
|
||||
echo "ALL RUN PACKAGES: $($DIR/../os/$DISTRO.sh list TOOLCHAIN_RUN_DEPS)"
|
||||
$DIR/../os/$DISTRO.sh check TOOLCHAIN_RUN_DEPS
|
||||
|
||||
# check installation directory
|
||||
NAME=toolchain-v$TOOLCHAIN_VERSION
|
||||
@ -442,7 +442,7 @@ In order to be able to run all of these tools you should install the following
|
||||
packages:
|
||||
|
||||
\`\`\`
|
||||
$($DIR/../../os/$DISTRO.sh list TOOLCHAIN_RUN_DEPS)
|
||||
$($DIR/../os/$DISTRO.sh list TOOLCHAIN_RUN_DEPS)
|
||||
\`\`\`
|
||||
|
||||
## Usage
|
@ -7,7 +7,7 @@ DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )"
|
||||
CPUS=$( grep -c processor < /proc/cpuinfo )
|
||||
cd "$DIR"
|
||||
|
||||
source "$DIR/../../util.sh"
|
||||
source "$DIR/../util.sh"
|
||||
DISTRO="$(operating_system)"
|
||||
|
||||
# toolchain version
|
||||
@ -31,10 +31,10 @@ LLVM_VERSION_LONG=12.0.1-rc4
|
||||
SWIG_VERSION=4.0.2 # used only for LLVM compilation
|
||||
|
||||
# Check for the dependencies.
|
||||
echo "ALL BUILD PACKAGES: $($DIR/../../os/$DISTRO.sh list TOOLCHAIN_BUILD_DEPS)"
|
||||
$DIR/../../os/$DISTRO.sh check TOOLCHAIN_BUILD_DEPS
|
||||
echo "ALL RUN PACKAGES: $($DIR/../../os/$DISTRO.sh list TOOLCHAIN_RUN_DEPS)"
|
||||
$DIR/../../os/$DISTRO.sh check TOOLCHAIN_RUN_DEPS
|
||||
echo "ALL BUILD PACKAGES: $($DIR/../os/$DISTRO.sh list TOOLCHAIN_BUILD_DEPS)"
|
||||
$DIR/../os/$DISTRO.sh check TOOLCHAIN_BUILD_DEPS
|
||||
echo "ALL RUN PACKAGES: $($DIR/../os/$DISTRO.sh list TOOLCHAIN_RUN_DEPS)"
|
||||
$DIR/../os/$DISTRO.sh check TOOLCHAIN_RUN_DEPS
|
||||
|
||||
# check installation directory
|
||||
NAME=toolchain-v$TOOLCHAIN_VERSION
|
||||
@ -452,7 +452,7 @@ In order to be able to run all of these tools you should install the following
|
||||
packages:
|
||||
|
||||
\`\`\`
|
||||
$($DIR/../../os/$DISTRO.sh list TOOLCHAIN_RUN_DEPS)
|
||||
$($DIR/../os/$DISTRO.sh list TOOLCHAIN_RUN_DEPS)
|
||||
\`\`\`
|
||||
|
||||
## Usage
|
@ -7,17 +7,9 @@ DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )"
|
||||
CPUS=$( grep -c processor < /proc/cpuinfo )
|
||||
cd "$DIR"
|
||||
|
||||
source "$DIR/../../util.sh"
|
||||
source "$DIR/../util.sh"
|
||||
DISTRO="$(operating_system)"
|
||||
|
||||
function log_tool_name () {
|
||||
echo ""
|
||||
echo ""
|
||||
echo "#### $1 ####"
|
||||
echo ""
|
||||
echo ""
|
||||
}
|
||||
|
||||
for_arm=false
|
||||
if [[ "$#" -eq 1 ]]; then
|
||||
if [[ "$1" == "--for-arm" ]]; then
|
||||
@ -28,11 +20,9 @@ if [[ "$#" -eq 1 ]]; then
|
||||
fi
|
||||
fi
|
||||
|
||||
TOOLCHAIN_STDCXX="${TOOLCHAIN_STDCXX:-libstdc++}"
|
||||
if [[ "$TOOLCHAIN_STDCXX" != "libstdc++" && "$TOOLCHAIN_STDCXX" != "libc++" ]]; then
|
||||
echo "Only GCC (libstdc++) or LLVM (libc++) C++ standard library implementations are supported."
|
||||
exit 1
|
||||
fi
|
||||
os="$1"
|
||||
|
||||
# toolchain version
|
||||
TOOLCHAIN_VERSION=4
|
||||
|
||||
# package versions used
|
||||
@ -51,15 +41,11 @@ CPPCHECK_VERSION=2.6
|
||||
LLVM_VERSION=13.0.0
|
||||
SWIG_VERSION=4.0.2 # used only for LLVM compilation
|
||||
|
||||
# Set the right operating system setup script.
|
||||
ENV_SCRIPT="$DIR/../../os/$DISTRO.sh"
|
||||
if [[ "$for_arm" = true ]]; then
|
||||
ENV_SCRIPT="$DIR/../../os/$DISTRO-arm.sh"
|
||||
fi
|
||||
echo "ALL BUILD PACKAGES: $(${ENV_SCRIPT} list TOOLCHAIN_BUILD_DEPS)"
|
||||
${ENV_SCRIPT} check TOOLCHAIN_BUILD_DEPS
|
||||
echo "ALL RUN PACKAGES: $(${ENV_SCRIPT} list TOOLCHAIN_RUN_DEPS)"
|
||||
${ENV_SCRIPT} check TOOLCHAIN_RUN_DEPS
|
||||
# Check for the dependencies.
|
||||
echo "ALL BUILD PACKAGES: $($DIR/../os/$DISTRO.sh list TOOLCHAIN_BUILD_DEPS)"
|
||||
$DIR/../os/$DISTRO.sh check TOOLCHAIN_BUILD_DEPS
|
||||
echo "ALL RUN PACKAGES: $($DIR/../os/$DISTRO.sh list TOOLCHAIN_RUN_DEPS)"
|
||||
$DIR/../os/$DISTRO.sh check TOOLCHAIN_RUN_DEPS
|
||||
|
||||
# check installation directory
|
||||
NAME=toolchain-v$TOOLCHAIN_VERSION
|
||||
@ -113,8 +99,6 @@ if [ ! -f llvm-$LLVM_VERSION.src.tar.xz ]; then
|
||||
wget https://github.com/llvm/llvm-project/releases/download/llvmorg-$LLVM_VERSION/clang-tools-extra-$LLVM_VERSION.src.tar.xz
|
||||
wget https://github.com/llvm/llvm-project/releases/download/llvmorg-$LLVM_VERSION/compiler-rt-$LLVM_VERSION.src.tar.xz
|
||||
wget https://github.com/llvm/llvm-project/releases/download/llvmorg-$LLVM_VERSION/libunwind-$LLVM_VERSION.src.tar.xz
|
||||
wget https://github.com/llvm/llvm-project/releases/download/llvmorg-$LLVM_VERSION/libcxx-$LLVM_VERSION.src.tar.xz
|
||||
wget https://github.com/llvm/llvm-project/releases/download/llvmorg-$LLVM_VERSION/libcxxabi-$LLVM_VERSION.src.tar.xz
|
||||
fi
|
||||
if [ ! -f pahole-gdb-master.zip ]; then
|
||||
wget https://github.com/PhilArmstrong/pahole-gdb/archive/master.zip -O pahole-gdb-master.zip
|
||||
@ -172,8 +156,6 @@ if [ ! -f llvm-$LLVM_VERSION.src.tar.xz.sig ]; then
|
||||
wget https://github.com/llvm/llvm-project/releases/download/llvmorg-$LLVM_VERSION/clang-tools-extra-$LLVM_VERSION.src.tar.xz.sig
|
||||
wget https://github.com/llvm/llvm-project/releases/download/llvmorg-$LLVM_VERSION/compiler-rt-$LLVM_VERSION.src.tar.xz.sig
|
||||
wget https://github.com/llvm/llvm-project/releases/download/llvmorg-$LLVM_VERSION/libunwind-$LLVM_VERSION.src.tar.xz.sig
|
||||
wget https://github.com/llvm/llvm-project/releases/download/llvmorg-$LLVM_VERSION/libcxx-$LLVM_VERSION.src.tar.xz.sig
|
||||
wget https://github.com/llvm/llvm-project/releases/download/llvmorg-$LLVM_VERSION/libcxxabi-$LLVM_VERSION.src.tar.xz.sig
|
||||
fi
|
||||
# list of valid llvm gnupg keys: https://releases.llvm.org/download.html
|
||||
$GPG --keyserver $KEYSERVER --recv-keys 0x474E22316ABF4785A88C6E8EA2C794A986419D8A
|
||||
@ -183,8 +165,6 @@ $GPG --verify lld-$LLVM_VERSION.src.tar.xz.sig lld-$LLVM_VERSION.src.tar.xz
|
||||
$GPG --verify clang-tools-extra-$LLVM_VERSION.src.tar.xz.sig clang-tools-extra-$LLVM_VERSION.src.tar.xz
|
||||
$GPG --verify compiler-rt-$LLVM_VERSION.src.tar.xz.sig compiler-rt-$LLVM_VERSION.src.tar.xz
|
||||
$GPG --verify libunwind-$LLVM_VERSION.src.tar.xz.sig libunwind-$LLVM_VERSION.src.tar.xz
|
||||
$GPG --verify libcxx-$LLVM_VERSION.src.tar.xz.sig libcxx-$LLVM_VERSION.src.tar.xz
|
||||
$GPG --verify libcxxabi-$LLVM_VERSION.src.tar.xz.sig libcxxabi-$LLVM_VERSION.src.tar.xz
|
||||
|
||||
popd
|
||||
|
||||
@ -192,7 +172,7 @@ popd
|
||||
mkdir -p build
|
||||
pushd build
|
||||
|
||||
log_tool_name "GCC $GCC_VERSION"
|
||||
# compile gcc
|
||||
if [ ! -f $PREFIX/bin/gcc ]; then
|
||||
if [ -d gcc-$GCC_VERSION ]; then
|
||||
rm -rf gcc-$GCC_VERSION
|
||||
@ -283,7 +263,7 @@ fi
|
||||
export PATH=$PREFIX/bin:$PATH
|
||||
export LD_LIBRARY_PATH=$PREFIX/lib64
|
||||
|
||||
log_tool_name "binutils $BINUTILS_VERSION"
|
||||
# compile binutils
|
||||
if [ ! -f $PREFIX/bin/ld.gold ]; then
|
||||
if [ -d binutils-$BINUTILS_VERSION ]; then
|
||||
rm -rf binutils-$BINUTILS_VERSION
|
||||
@ -347,7 +327,7 @@ if [ ! -f $PREFIX/bin/ld.gold ]; then
|
||||
popd && popd
|
||||
fi
|
||||
|
||||
log_tool_name "GDB $GDB_VERSION"
|
||||
# compile gdb
|
||||
if [ ! -f $PREFIX/bin/gdb ]; then
|
||||
if [ -d gdb-$GDB_VERSION ]; then
|
||||
rm -rf gdb-$GDB_VERSION
|
||||
@ -383,62 +363,6 @@ if [ ! -f $PREFIX/bin/gdb ]; then
|
||||
--without-babeltrace \
|
||||
--enable-tui \
|
||||
--with-python=python3
|
||||
elif [[ "${DISTRO}" == fedora* ]]; then
|
||||
# Remove readline, gdb does not compile
|
||||
env \
|
||||
CC=gcc \
|
||||
CXX=g++ \
|
||||
CFLAGS="-g -O2 -fstack-protector-strong -Wformat -Werror=format-security" \
|
||||
CXXFLAGS="-g -O2 -fstack-protector-strong -Wformat -Werror=format-security" \
|
||||
CPPFLAGS="-Wdate-time -D_FORTIFY_SOURCE=2 -fPIC" \
|
||||
LDFLAGS="-Wl,-z,relro" \
|
||||
PYTHON="" \
|
||||
../configure \
|
||||
--build=x86_64-linux-gnu \
|
||||
--host=x86_64-linux-gnu \
|
||||
--prefix=$PREFIX \
|
||||
--disable-maintainer-mode \
|
||||
--disable-dependency-tracking \
|
||||
--disable-silent-rules \
|
||||
--disable-gdbtk \
|
||||
--disable-shared \
|
||||
--without-guile \
|
||||
--with-system-gdbinit=$PREFIX/etc/gdb/gdbinit \
|
||||
--with-expat \
|
||||
--with-system-zlib \
|
||||
--with-lzma \
|
||||
--with-babeltrace \
|
||||
--with-intel-pt \
|
||||
--enable-tui \
|
||||
--with-python=python3
|
||||
elif [[ "${DISTRO}" == "amzn-2" ]]; then
|
||||
# Remove readline, gdb does not compile
|
||||
env \
|
||||
CC=gcc \
|
||||
CXX=g++ \
|
||||
CFLAGS="-g -O2 -fstack-protector-strong -Wformat -Werror=format-security" \
|
||||
CXXFLAGS="-g -O2 -fstack-protector-strong -Wformat -Werror=format-security" \
|
||||
CPPFLAGS="-Wdate-time -D_FORTIFY_SOURCE=2 -fPIC" \
|
||||
LDFLAGS="-Wl,-z,relro" \
|
||||
PYTHON="" \
|
||||
../configure \
|
||||
--build=x86_64-linux-gnu \
|
||||
--host=x86_64-linux-gnu \
|
||||
--prefix=$PREFIX \
|
||||
--disable-maintainer-mode \
|
||||
--disable-dependency-tracking \
|
||||
--disable-silent-rules \
|
||||
--disable-gdbtk \
|
||||
--disable-shared \
|
||||
--without-guile \
|
||||
--with-system-gdbinit=$PREFIX/etc/gdb/gdbinit \
|
||||
--with-expat \
|
||||
--with-system-zlib \
|
||||
--with-lzma \
|
||||
--with-babeltrace \
|
||||
--with-intel-pt \
|
||||
--enable-tui \
|
||||
--with-python=python3
|
||||
else
|
||||
# https://buildd.debian.org/status/fetch.php?pkg=gdb&arch=amd64&ver=8.2.1-2&stamp=1550831554&raw=0
|
||||
env \
|
||||
@ -474,13 +398,13 @@ if [ ! -f $PREFIX/bin/gdb ]; then
|
||||
popd && popd
|
||||
fi
|
||||
|
||||
log_tool_name "install pahole"
|
||||
# install pahole
|
||||
if [ ! -d $PREFIX/share/pahole-gdb ]; then
|
||||
unzip ../archives/pahole-gdb-master.zip
|
||||
mv pahole-gdb-master $PREFIX/share/pahole-gdb
|
||||
fi
|
||||
|
||||
log_tool_name "setup system gdbinit"
|
||||
# setup system gdbinit
|
||||
if [ ! -f $PREFIX/etc/gdb/gdbinit ]; then
|
||||
mkdir -p $PREFIX/etc/gdb
|
||||
cat >$PREFIX/etc/gdb/gdbinit <<EOF
|
||||
@ -506,7 +430,7 @@ end
|
||||
EOF
|
||||
fi
|
||||
|
||||
log_tool_name "cmake $CMAKE_VERSION"
|
||||
# compile cmake
|
||||
if [ ! -f $PREFIX/bin/cmake ]; then
|
||||
if [ -d cmake-$CMAKE_VERSION ]; then
|
||||
rm -rf cmake-$CMAKE_VERSION
|
||||
@ -532,7 +456,7 @@ if [ ! -f $PREFIX/bin/cmake ]; then
|
||||
popd && popd
|
||||
fi
|
||||
|
||||
log_tool_name "cppcheck $CPPCHECK_VERSION"
|
||||
# compile cppcheck
|
||||
if [ ! -f $PREFIX/bin/cppcheck ]; then
|
||||
if [ -d cppcheck-$CPPCHECK_VERSION ]; then
|
||||
rm -rf cppcheck-$CPPCHECK_VERSION
|
||||
@ -556,7 +480,7 @@ if [ ! -f $PREFIX/bin/cppcheck ]; then
|
||||
popd
|
||||
fi
|
||||
|
||||
log_tool_name "swig $SWIG_VERSION"
|
||||
# compile swig
|
||||
if [ ! -d swig-$SWIG_VERSION/install ]; then
|
||||
if [ -d swig-$SWIG_VERSION ]; then
|
||||
rm -rf swig-$SWIG_VERSION
|
||||
@ -572,7 +496,7 @@ if [ ! -d swig-$SWIG_VERSION/install ]; then
|
||||
popd && popd
|
||||
fi
|
||||
|
||||
log_tool_name "LLVM $LLVM_VERSION"
|
||||
# compile llvm
|
||||
if [ ! -f $PREFIX/bin/clang ]; then
|
||||
if [ -d llvm-$LLVM_VERSION ]; then
|
||||
rm -rf llvm-$LLVM_VERSION
|
||||
@ -589,19 +513,8 @@ if [ ! -f $PREFIX/bin/clang ]; then
|
||||
mv compiler-rt-$LLVM_VERSION.src/ llvm-$LLVM_VERSION/projects/compiler-rt
|
||||
tar -xvf ../archives/libunwind-$LLVM_VERSION.src.tar.xz
|
||||
mv libunwind-$LLVM_VERSION.src/include/mach-o llvm-$LLVM_VERSION/tools/lld/include
|
||||
|
||||
# The following is required because of libc++
|
||||
tar -xvf ../archives/libcxx-$LLVM_VERSION.src.tar.xz
|
||||
mv libcxx-$LLVM_VERSION.src llvm-$LLVM_VERSION/projects/libcxx
|
||||
tar -xvf ../archives/libcxxabi-$LLVM_VERSION.src.tar.xz
|
||||
mv libcxxabi-$LLVM_VERSION.src llvm-$LLVM_VERSION/projects/libcxxabi
|
||||
# NOTE: We moved part of the libunwind in one of the previous step.
|
||||
rm -r libunwind-$LLVM_VERSION.src
|
||||
tar -xvf ../archives/libunwind-$LLVM_VERSION.src.tar.xz
|
||||
mv libunwind-$LLVM_VERSION.src llvm-$LLVM_VERSION/projects/libunwind
|
||||
|
||||
pushd llvm-$LLVM_VERSION
|
||||
mkdir -p build && pushd build
|
||||
mkdir build && pushd build
|
||||
# activate swig
|
||||
export PATH=$DIR/build/swig-$SWIG_VERSION/install/bin:$PATH
|
||||
# influenced by: https://buildd.debian.org/status/fetch.php?pkg=llvm-toolchain-7&arch=amd64&ver=1%3A7.0.1%7E%2Brc2-1%7Eexp1&stamp=1541506173&raw=0
|
||||
@ -654,7 +567,7 @@ In order to be able to run all of these tools you should install the following
|
||||
packages:
|
||||
|
||||
\`\`\`
|
||||
$($DIR/../../os/$ENV_SCRIPT.sh list TOOLCHAIN_RUN_DEPS)
|
||||
$($DIR/../os/$DISTRO.sh list TOOLCHAIN_RUN_DEPS)
|
||||
\`\`\`
|
||||
|
||||
## Usage
|
||||
@ -711,7 +624,6 @@ export PS1="($NAME) \$PS1"
|
||||
export LD_LIBRARY_PATH=$PREFIX/lib:$PREFIX/lib64
|
||||
export CXXFLAGS=-isystem\ $PREFIX/include\ \$CXXFLAGS
|
||||
export CFLAGS=-isystem\ $PREFIX/include\ \$CFLAGS
|
||||
export VENV=$PREFIX
|
||||
|
||||
# disable root
|
||||
function su () {
|
||||
@ -763,7 +675,7 @@ PROXYGEN_SHA256=5360a8ccdfb2f5a6c7b3eed331ec7ab0e2c792d579c6fff499c85c516c11fe14
|
||||
SNAPPY_SHA256=75c1fbb3d618dd3a0483bff0e26d0a92b495bbe5059c8b4f1c962b478b6e06e7
|
||||
SNAPPY_VERSION=1.1.9
|
||||
XZ_VERSION=5.2.5 # for LZMA
|
||||
ZLIB_VERSION=1.2.13
|
||||
ZLIB_VERSION=1.2.12
|
||||
ZSTD_VERSION=1.5.0
|
||||
WANGLE_SHA256=1002e9c32b6f4837f6a760016e3b3e22f3509880ef3eaad191c80dc92655f23f
|
||||
|
||||
@ -908,11 +820,7 @@ source $PREFIX/activate
|
||||
export CC=$PREFIX/bin/clang
|
||||
export CXX=$PREFIX/bin/clang++
|
||||
export CFLAGS="$CFLAGS -fPIC"
|
||||
if [ "$TOOLCHAIN_STDCXX" = "libstdc++" ]; then
|
||||
export CXXFLAGS="$CXXFLAGS -fPIC"
|
||||
else
|
||||
export CXXFLAGS="$CXXFLAGS -fPIC -stdlib=libc++"
|
||||
fi
|
||||
export CXXFLAGS="$CXXFLAGS -fPIC"
|
||||
COMMON_CMAKE_FLAGS="-DCMAKE_INSTALL_PREFIX=$PREFIX
|
||||
-DCMAKE_PREFIX_PATH=$PREFIX
|
||||
-DCMAKE_BUILD_TYPE=Release
|
||||
@ -926,7 +834,7 @@ COMMON_CMAKE_FLAGS="-DCMAKE_INSTALL_PREFIX=$PREFIX
|
||||
COMMON_CONFIGURE_FLAGS="--enable-shared=no --prefix=$PREFIX"
|
||||
COMMON_MAKE_INSTALL_FLAGS="-j$CPUS BUILD_SHARED=no PREFIX=$PREFIX install"
|
||||
|
||||
log_tool_name "bzip2 $BZIP2_VERSION"
|
||||
# install bzip2
|
||||
if [ ! -f $PREFIX/include/bzlib.h ]; then
|
||||
if [ -d bzip2-$BZIP2_VERSION ]; then
|
||||
rm -rf bzip2-$BZIP2_VERSION
|
||||
@ -937,7 +845,7 @@ if [ ! -f $PREFIX/include/bzlib.h ]; then
|
||||
popd
|
||||
fi
|
||||
|
||||
log_tool_name "fmt $FMT_VERSION"
|
||||
# install fmt
|
||||
if [ ! -d $PREFIX/include/fmt ]; then
|
||||
if [ -d fmt-$FMT_VERSION ]; then
|
||||
rm -rf fmt-$FMT_VERSION
|
||||
@ -950,7 +858,7 @@ if [ ! -d $PREFIX/include/fmt ]; then
|
||||
popd && popd
|
||||
fi
|
||||
|
||||
log_tool_name "lz4 $LZ4_VERSION"
|
||||
# install lz4
|
||||
if [ ! -f $PREFIX/include/lz4.h ]; then
|
||||
if [ -d lz4-$LZ4_VERSION ]; then
|
||||
rm -rf lz4-$LZ4_VERSION
|
||||
@ -961,7 +869,7 @@ if [ ! -f $PREFIX/include/lz4.h ]; then
|
||||
popd
|
||||
fi
|
||||
|
||||
log_tool_name "xz $XZ_VERSION"
|
||||
# install xz
|
||||
if [ ! -f $PREFIX/include/lzma.h ]; then
|
||||
if [ -d xz-$XZ_VERSION ]; then
|
||||
rm -rf xz-$XZ_VERSION
|
||||
@ -973,7 +881,7 @@ if [ ! -f $PREFIX/include/lzma.h ]; then
|
||||
popd
|
||||
fi
|
||||
|
||||
log_tool_name "zlib $ZLIB_VERSION"
|
||||
# install zlib
|
||||
if [ ! -f $PREFIX/include/zlib.h ]; then
|
||||
if [ -d zlib-$ZLIB_VERSION ]; then
|
||||
rm -rf zlib-$ZLIB_VERSION
|
||||
@ -987,7 +895,7 @@ if [ ! -f $PREFIX/include/zlib.h ]; then
|
||||
popd && popd
|
||||
fi
|
||||
|
||||
log_tool_name "zstd $ZSTD_VERSION"
|
||||
# install zstd
|
||||
if [ ! -f $PREFIX/include/zstd.h ]; then
|
||||
if [ -d zstd-$ZSTD_VERSION ]; then
|
||||
rm -rf zstd-$ZSTD_VERSION
|
||||
@ -1002,8 +910,7 @@ if [ ! -f $PREFIX/include/zstd.h ]; then
|
||||
popd && popd
|
||||
fi
|
||||
|
||||
# TODO(gitbuda): Freeze jmalloc version.
|
||||
log_tool_name "jmalloc"
|
||||
#install jemalloc
|
||||
if [ ! -d $PREFIX/include/jemalloc ]; then
|
||||
if [ -d jemalloc ]; then
|
||||
rm -rf jemalloc
|
||||
@ -1020,7 +927,7 @@ if [ ! -d $PREFIX/include/jemalloc ]; then
|
||||
popd
|
||||
fi
|
||||
|
||||
log_tool_name "BOOST $BOOST_VERSION"
|
||||
# install boost
|
||||
if [ ! -d $PREFIX/include/boost ]; then
|
||||
if [ -d boost_$BOOST_VERSION_UNDERSCORES ]; then
|
||||
rm -rf boost_$BOOST_VERSION_UNDERSCORES
|
||||
@ -1028,24 +935,15 @@ if [ ! -d $PREFIX/include/boost ]; then
|
||||
tar -xzf ../archives/boost_$BOOST_VERSION_UNDERSCORES.tar.gz
|
||||
pushd boost_$BOOST_VERSION_UNDERSCORES
|
||||
./bootstrap.sh --prefix=$PREFIX --with-toolset=clang --with-python=python3 --without-icu
|
||||
if [ "$TOOLCHAIN_STDCXX" = "libstdc++" ]; then
|
||||
./b2 toolset=clang -j$CPUS install variant=release link=static cxxstd=20 --disable-icu \
|
||||
-sZLIB_SOURCE="$PREFIX" -sZLIB_INCLUDE="$PREFIX/include" -sZLIB_LIBPATH="$PREFIX/lib" \
|
||||
-sBZIP2_SOURCE="$PREFIX" -sBZIP2_INCLUDE="$PREFIX/include" -sBZIP2_LIBPATH="$PREFIX/lib" \
|
||||
-sLZMA_SOURCE="$PREFIX" -sLZMA_INCLUDE="$PREFIX/include" -sLZMA_LIBPATH="$PREFIX/lib" \
|
||||
-sZSTD_SOURCE="$PREFIX" -sZSTD_INCLUDE="$PREFIX/include" -sZSTD_LIBPATH="$PREFIX/lib"
|
||||
else
|
||||
./b2 toolset=clang -j$CPUS install variant=release link=static cxxstd=20 --disable-icu \
|
||||
cxxflags="-stdlib=libc++" linkflags="-stdlib=libc++" \
|
||||
-sZLIB_SOURCE="$PREFIX" -sZLIB_INCLUDE="$PREFIX/include" -sZLIB_LIBPATH="$PREFIX/lib" \
|
||||
-sBZIP2_SOURCE="$PREFIX" -sBZIP2_INCLUDE="$PREFIX/include" -sBZIP2_LIBPATH="$PREFIX/lib" \
|
||||
-sLZMA_SOURCE="$PREFIX" -sLZMA_INCLUDE="$PREFIX/include" -sLZMA_LIBPATH="$PREFIX/lib" \
|
||||
-sZSTD_SOURCE="$PREFIX" -sZSTD_INCLUDE="$PREFIX/include" -sZSTD_LIBPATH="$PREFIX/lib"
|
||||
fi
|
||||
./b2 toolset=clang -j$CPUS install variant=release link=static cxxstd=20 --disable-icu \
|
||||
-sZLIB_SOURCE="$PREFIX" -sZLIB_INCLUDE="$PREFIX/include" -sZLIB_LIBPATH="$PREFIX/lib" \
|
||||
-sBZIP2_SOURCE="$PREFIX" -sBZIP2_INCLUDE="$PREFIX/include" -sBZIP2_LIBPATH="$PREFIX/lib" \
|
||||
-sLZMA_SOURCE="$PREFIX" -sLZMA_INCLUDE="$PREFIX/include" -sLZMA_LIBPATH="$PREFIX/lib" \
|
||||
-sZSTD_SOURCE="$PREFIX" -sZSTD_INCLUDE="$PREFIX/include" -sZSTD_LIBPATH="$PREFIX/lib"
|
||||
popd
|
||||
fi
|
||||
|
||||
log_tool_name "double-conversion $DOUBLE_CONVERSION_VERSION"
|
||||
# install double-conversion
|
||||
if [ ! -d $PREFIX/include/double-conversion ]; then
|
||||
if [ -d double-conversion-$DOUBLE_CONVERSION_VERSION ]; then
|
||||
rm -rf double-conversion-$DOUBLE_CONVERSION_VERSION
|
||||
@ -1060,8 +958,7 @@ if [ ! -d $PREFIX/include/double-conversion ]; then
|
||||
popd && popd
|
||||
fi
|
||||
|
||||
# TODO(gitbuda): Freeze gflags version.
|
||||
log_tool_name "gflags"
|
||||
# install gflags
|
||||
if [ ! -d $PREFIX/include/gflags ]; then
|
||||
if [ -d gflags ]; then
|
||||
rm -rf gflags
|
||||
@ -1080,7 +977,7 @@ if [ ! -d $PREFIX/include/gflags ]; then
|
||||
popd && popd
|
||||
fi
|
||||
|
||||
log_tool_name "libunwind $LIBUNWIND_VERSION"
|
||||
# install libunwind
|
||||
if [ ! -f $PREFIX/include/libunwind.h ]; then
|
||||
if [ -d libunwind-$LIBUNWIND_VERSION ]; then
|
||||
rm -rf libunwind-$LIBUNWIND_VERSION
|
||||
@ -1093,7 +990,7 @@ if [ ! -f $PREFIX/include/libunwind.h ]; then
|
||||
popd
|
||||
fi
|
||||
|
||||
log_tool_name "glog $GLOG_VERSION"
|
||||
# install glog
|
||||
if [ ! -d $PREFIX/include/glog ]; then
|
||||
if [ -d glog-$GLOG_VERSION ]; then
|
||||
rm -rf glog-$GLOG_VERSION
|
||||
@ -1107,7 +1004,7 @@ if [ ! -d $PREFIX/include/glog ]; then
|
||||
popd && popd
|
||||
fi
|
||||
|
||||
log_tool_name "libevent $LIBEVENT_VERSION"
|
||||
# install libevent
|
||||
if [ ! -d $PREFIX/include/event2 ]; then
|
||||
if [ -d libevent-$LIBEVENT_VERSION ]; then
|
||||
rm -rf libevent-$LIBEVENT_VERSION
|
||||
@ -1126,7 +1023,7 @@ if [ ! -d $PREFIX/include/event2 ]; then
|
||||
popd && popd
|
||||
fi
|
||||
|
||||
log_tool_name "snappy $SNAPPY_VERSION"
|
||||
# install snappy
|
||||
if [ ! -f $PREFIX/include/snappy.h ]; then
|
||||
if [ -d snappy-$SNAPPY_VERSION ]; then
|
||||
rm -rf snappy-$SNAPPY_VERSION
|
||||
@ -1144,7 +1041,7 @@ if [ ! -f $PREFIX/include/snappy.h ]; then
|
||||
popd && popd
|
||||
fi
|
||||
|
||||
log_tool_name "libsodium $LIBSODIUM_VERSION"
|
||||
# install libsodium
|
||||
if [ ! -f $PREFIX/include/sodium.h ]; then
|
||||
if [ -d libsodium-$LIBSODIUM_VERSION ]; then
|
||||
rm -rf libsodium-$LIBSODIUM_VERSION
|
||||
@ -1156,7 +1053,7 @@ if [ ! -f $PREFIX/include/sodium.h ]; then
|
||||
popd
|
||||
fi
|
||||
|
||||
log_tool_name "libaio $LIBAIO_VERSION"
|
||||
# install libaio
|
||||
if [ ! -f $PREFIX/include/libaio.h ]; then
|
||||
if [ -d libaio-$LIBAIO_VERSION ]; then
|
||||
rm -rf libaio-$LIBAIO_VERSION
|
||||
@ -1167,121 +1064,114 @@ if [ ! -f $PREFIX/include/libaio.h ]; then
|
||||
popd
|
||||
fi
|
||||
|
||||
if [[ "${DISTRO}" != "amzn-2" ]]; then
|
||||
log_tool_name "folly $FBLIBS_VERSION"
|
||||
if [ ! -d $PREFIX/include/folly ]; then
|
||||
if [ -d folly-$FBLIBS_VERSION ]; then
|
||||
rm -rf folly-$FBLIBS_VERSION
|
||||
fi
|
||||
mkdir folly-$FBLIBS_VERSION
|
||||
tar -xzf ../archives/folly-$FBLIBS_VERSION.tar.gz -C folly-$FBLIBS_VERSION
|
||||
pushd folly-$FBLIBS_VERSION
|
||||
patch -p1 < ../../folly.patch
|
||||
# build is used by facebook builder
|
||||
mkdir _build
|
||||
pushd _build
|
||||
cmake .. $COMMON_CMAKE_FLAGS \
|
||||
-DBOOST_LINK_STATIC=ON \
|
||||
-DBUILD_TESTS=OFF \
|
||||
-DGFLAGS_NOTHREADS=OFF \
|
||||
-DCXX_STD="c++20"
|
||||
make -j$CPUS install
|
||||
popd && popd
|
||||
# install folly
|
||||
if [ ! -d $PREFIX/include/folly ]; then
|
||||
if [ -d folly-$FBLIBS_VERSION ]; then
|
||||
rm -rf folly-$FBLIBS_VERSION
|
||||
fi
|
||||
mkdir folly-$FBLIBS_VERSION
|
||||
tar -xzf ../archives/folly-$FBLIBS_VERSION.tar.gz -C folly-$FBLIBS_VERSION
|
||||
pushd folly-$FBLIBS_VERSION
|
||||
patch -p1 < ../../folly.patch
|
||||
# build is used by facebook builder
|
||||
mkdir _build
|
||||
pushd _build
|
||||
cmake .. $COMMON_CMAKE_FLAGS \
|
||||
-DBOOST_LINK_STATIC=ON \
|
||||
-DBUILD_TESTS=OFF \
|
||||
-DGFLAGS_NOTHREADS=OFF \
|
||||
-DCXX_STD="c++20"
|
||||
make -j$CPUS install
|
||||
popd && popd
|
||||
fi
|
||||
|
||||
log_tool_name "fizz $FBLIBS_VERSION"
|
||||
if [ ! -d $PREFIX/include/fizz ]; then
|
||||
if [ -d fizz-$FBLIBS_VERSION ]; then
|
||||
rm -rf fizz-$FBLIBS_VERSION
|
||||
fi
|
||||
mkdir fizz-$FBLIBS_VERSION
|
||||
tar -xzf ../archives/fizz-$FBLIBS_VERSION.tar.gz -C fizz-$FBLIBS_VERSION
|
||||
pushd fizz-$FBLIBS_VERSION
|
||||
# build is used by facebook builder
|
||||
mkdir _build
|
||||
pushd _build
|
||||
cmake ../fizz $COMMON_CMAKE_FLAGS \
|
||||
-DBUILD_TESTS=OFF \
|
||||
-DBUILD_EXAMPLES=OFF \
|
||||
-DGFLAGS_NOTHREADS=OFF
|
||||
make -j$CPUS install
|
||||
popd && popd
|
||||
# install fizz
|
||||
if [ ! -d $PREFIX/include/fizz ]; then
|
||||
if [ -d fizz-$FBLIBS_VERSION ]; then
|
||||
rm -rf fizz-$FBLIBS_VERSION
|
||||
fi
|
||||
mkdir fizz-$FBLIBS_VERSION
|
||||
tar -xzf ../archives/fizz-$FBLIBS_VERSION.tar.gz -C fizz-$FBLIBS_VERSION
|
||||
pushd fizz-$FBLIBS_VERSION
|
||||
# build is used by facebook builder
|
||||
mkdir _build
|
||||
pushd _build
|
||||
cmake ../fizz $COMMON_CMAKE_FLAGS \
|
||||
-DBUILD_TESTS=OFF \
|
||||
-DBUILD_EXAMPLES=OFF \
|
||||
-DGFLAGS_NOTHREADS=OFF
|
||||
make -j$CPUS install
|
||||
popd && popd
|
||||
fi
|
||||
|
||||
log_tool_name "wangle FBLIBS_VERSION"
|
||||
if [ ! -d $PREFIX/include/wangle ]; then
|
||||
if [ -d wangle-$FBLIBS_VERSION ]; then
|
||||
rm -rf wangle-$FBLIBS_VERSION
|
||||
fi
|
||||
mkdir wangle-$FBLIBS_VERSION
|
||||
tar -xzf ../archives/wangle-$FBLIBS_VERSION.tar.gz -C wangle-$FBLIBS_VERSION
|
||||
pushd wangle-$FBLIBS_VERSION
|
||||
# build is used by facebook builder
|
||||
mkdir _build
|
||||
pushd _build
|
||||
cmake ../wangle $COMMON_CMAKE_FLAGS \
|
||||
-DBUILD_TESTS=OFF \
|
||||
-DBUILD_EXAMPLES=OFF \
|
||||
-DGFLAGS_NOTHREADS=OFF
|
||||
make -j$CPUS install
|
||||
popd && popd
|
||||
# install wangle
|
||||
if [ ! -d $PREFIX/include/wangle ]; then
|
||||
if [ -d wangle-$FBLIBS_VERSION ]; then
|
||||
rm -rf wangle-$FBLIBS_VERSION
|
||||
fi
|
||||
mkdir wangle-$FBLIBS_VERSION
|
||||
tar -xzf ../archives/wangle-$FBLIBS_VERSION.tar.gz -C wangle-$FBLIBS_VERSION
|
||||
pushd wangle-$FBLIBS_VERSION
|
||||
# build is used by facebook builder
|
||||
mkdir _build
|
||||
pushd _build
|
||||
cmake ../wangle $COMMON_CMAKE_FLAGS \
|
||||
-DBUILD_TESTS=OFF \
|
||||
-DBUILD_EXAMPLES=OFF \
|
||||
-DGFLAGS_NOTHREADS=OFF
|
||||
make -j$CPUS install
|
||||
popd && popd
|
||||
fi
|
||||
|
||||
log_tool_name "proxygen $FBLIBS_VERSION"
|
||||
if [ ! -d $PREFIX/include/proxygen ]; then
|
||||
if [ -d proxygen-$FBLIBS_VERSION ]; then
|
||||
rm -rf proxygen-$FBLIBS_VERSION
|
||||
fi
|
||||
mkdir proxygen-$FBLIBS_VERSION
|
||||
tar -xzf ../archives/proxygen-$FBLIBS_VERSION.tar.gz -C proxygen-$FBLIBS_VERSION
|
||||
pushd proxygen-$FBLIBS_VERSION
|
||||
patch -p1 < ../../proxygen.patch
|
||||
# build is used by facebook builder
|
||||
mkdir _build
|
||||
pushd _build
|
||||
cmake .. $COMMON_CMAKE_FLAGS \
|
||||
-DBUILD_TESTS=OFF \
|
||||
-DBUILD_SAMPLES=OFF \
|
||||
-DGFLAGS_NOTHREADS=OFF \
|
||||
-DBUILD_QUIC=OFF
|
||||
make -j$CPUS install
|
||||
popd && popd
|
||||
# install proxygen
|
||||
if [ ! -d $PREFIX/include/proxygen ]; then
|
||||
if [ -d proxygen-$FBLIBS_VERSION ]; then
|
||||
rm -rf proxygen-$FBLIBS_VERSION
|
||||
fi
|
||||
mkdir proxygen-$FBLIBS_VERSION
|
||||
tar -xzf ../archives/proxygen-$FBLIBS_VERSION.tar.gz -C proxygen-$FBLIBS_VERSION
|
||||
pushd proxygen-$FBLIBS_VERSION
|
||||
patch -p1 < ../../proxygen.patch
|
||||
# build is used by facebook builder
|
||||
mkdir _build
|
||||
pushd _build
|
||||
cmake .. $COMMON_CMAKE_FLAGS \
|
||||
-DBUILD_TESTS=OFF \
|
||||
-DBUILD_SAMPLES=OFF \
|
||||
-DGFLAGS_NOTHREADS=OFF \
|
||||
-DBUILD_QUIC=OFF
|
||||
make -j$CPUS install
|
||||
popd && popd
|
||||
fi
|
||||
|
||||
log_tool_name "flex $FBLIBS_VERSION"
|
||||
if [ ! -f $PREFIX/include/FlexLexer.h ]; then
|
||||
if [ -d flex-$FLEX_VERSION ]; then
|
||||
rm -rf flex-$FLEX_VERSION
|
||||
fi
|
||||
tar -xzf ../archives/flex-$FLEX_VERSION.tar.gz
|
||||
pushd flex-$FLEX_VERSION
|
||||
./configure $COMMON_CONFIGURE_FLAGS
|
||||
make -j$CPUS install
|
||||
popd
|
||||
# install flex
|
||||
if [ ! -f $PREFIX/include/FlexLexer.h ]; then
|
||||
if [ -d flex-$FLEX_VERSION ]; then
|
||||
rm -rf flex-$FLEX_VERSION
|
||||
fi
|
||||
tar -xzf ../archives/flex-$FLEX_VERSION.tar.gz
|
||||
pushd flex-$FLEX_VERSION
|
||||
./configure $COMMON_CONFIGURE_FLAGS
|
||||
make -j$CPUS install
|
||||
popd
|
||||
fi
|
||||
|
||||
log_tool_name "fbthrift $FBLIBS_VERSION"
|
||||
if [ ! -d $PREFIX/include/thrift ]; then
|
||||
if [ -d fbthrift-$FBLIBS_VERSION ]; then
|
||||
rm -rf fbthrift-$FBLIBS_VERSION
|
||||
fi
|
||||
git clone --depth 1 --branch v$FBLIBS_VERSION https://github.com/facebook/fbthrift.git fbthrift-$FBLIBS_VERSION
|
||||
pushd fbthrift-$FBLIBS_VERSION
|
||||
# build is used by facebook builder
|
||||
mkdir _build
|
||||
pushd _build
|
||||
if [ "$TOOLCHAIN_STDCXX" = "libstdc++" ]; then
|
||||
CMAKE_CXX_FLAGS="-fsized-deallocation"
|
||||
else
|
||||
CMAKE_CXX_FLAGS="-fsized-deallocation -stdlib=libc++"
|
||||
fi
|
||||
cmake .. $COMMON_CMAKE_FLAGS \
|
||||
-Denable_tests=OFF \
|
||||
-DGFLAGS_NOTHREADS=OFF \
|
||||
-DCMAKE_CXX_FLAGS="$CMAKE_CXX_FLAGS"
|
||||
make -j$CPUS install
|
||||
popd
|
||||
# install fbthrift
|
||||
if [ ! -d $PREFIX/include/thrift ]; then
|
||||
if [ -d fbthrift-$FBLIBS_VERSION ]; then
|
||||
rm -rf fbthrift-$FBLIBS_VERSION
|
||||
fi
|
||||
git clone --depth 1 --branch v$FBLIBS_VERSION https://github.com/facebook/fbthrift.git fbthrift-$FBLIBS_VERSION
|
||||
pushd fbthrift-$FBLIBS_VERSION
|
||||
# build is used by facebook builder
|
||||
mkdir _build
|
||||
pushd _build
|
||||
cmake .. $COMMON_CMAKE_FLAGS \
|
||||
-Denable_tests=OFF \
|
||||
-DGFLAGS_NOTHREADS=OFF \
|
||||
-DCMAKE_CXX_FLAGS=-fsized-deallocation
|
||||
make -j$CPUS install
|
||||
popd
|
||||
fi
|
||||
|
||||
popd
|
||||
@ -1289,7 +1179,7 @@ popd
|
||||
# create toolchain archive
|
||||
if [ ! -f $NAME-binaries-$DISTRO.tar.gz ]; then
|
||||
DISTRO_FULL_NAME=${DISTRO}
|
||||
if [[ "${DISTRO}" == centos* ]] || [[ "${DISTRO}" == fedora* ]]; then
|
||||
if [[ "${DISTRO}" == centos* ]]; then
|
||||
if [[ "$for_arm" = "true" ]]; then
|
||||
DISTRO_FULL_NAME="$DISTRO_FULL_NAME-aarch64"
|
||||
else
|
||||
@ -1302,12 +1192,7 @@ if [ ! -f $NAME-binaries-$DISTRO.tar.gz ]; then
|
||||
DISTRO_FULL_NAME="$DISTRO_FULL_NAME-amd64"
|
||||
fi
|
||||
fi
|
||||
if [ "$TOOLCHAIN_STDCXX" = "libstdc++" ]; then
|
||||
# Pass because infra scripts assume there is not C++ standard lib in the name.
|
||||
echo "NOTE: Not adding anything to the archive name that GCC C++ standard lib is used."
|
||||
else
|
||||
DISTRO_FULL_NAME="$DISTRO_FULL_NAME-libc++"
|
||||
fi
|
||||
|
||||
tar --owner=root --group=root -cpvzf $NAME-binaries-$DISTRO_FULL_NAME.tar.gz -C /opt $NAME
|
||||
fi
|
||||
|
File diff suppressed because it is too large
Load Diff
@ -1,42 +0,0 @@
|
||||
#!/bin/bash -ex
|
||||
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )"
|
||||
PREFIX=/opt/toolchain-v5
|
||||
|
||||
# NOTE: Often times when versions in the build script are changes, something
|
||||
# doesn't work. To avoid rebuild of the whole toolchain but rebuild specific
|
||||
# lib from 0, just comment specific line under this cript and run it. Don't
|
||||
# forget to comment back to avoid unnecessary deletes next time your run this
|
||||
# cript.
|
||||
|
||||
# rm -rf "$DIR/build"
|
||||
# rm -rf "$DIR/output"
|
||||
|
||||
# rm -rf "$PREFIX/bin/gcc"
|
||||
# rm -rf "$PREFIX/bin/ld.gold"
|
||||
# rm -rf "$PREFIX/bin/gdb"
|
||||
# rm -rf "$PREFIX/bin/cmake"
|
||||
# rm -rf "$PREFIX/bin/clang"
|
||||
# rm -rf "$PREFIX/include/bzlib.h"
|
||||
# rm -rf "$PREFIX/include/fmt"
|
||||
# rm -rf "$PREFIX/include/lz4.h"
|
||||
# rm -rf "$PREFIX/include/lzma.h"
|
||||
# rm -rf "$PREFIX/include/zlib.h"
|
||||
# rm -rf "$PREFIX/include/zstd.h"
|
||||
# rm -rf "$PREFIX/include/jemalloc"
|
||||
# rm -rf "$PREFIX/include/boost"
|
||||
# rm -rf "$PREFIX/include/double-conversion"
|
||||
# rm -rf "$PREFIX/include/gflags"
|
||||
# rm -rf "$PREFIX/include/libunwind.h"
|
||||
# rm -rf "$PREFIX/include/glog"
|
||||
# rm -rf "$PREFIX/include/event2"
|
||||
# rm -rf "$PREFIX/include/sodium.h"
|
||||
# rm -rf "$PREFIX/include/libaio.h"
|
||||
# rm -rf "$PREFIX/include/FlexLexer.h"
|
||||
# rm -rf "$PREFIX/include/snappy.h"
|
||||
# rm -rf "$PREFIX/include/fizz"
|
||||
# rm -rf "$PREFIX/include/folly"
|
||||
# rm -rf "$PREFIX/include/proxygen"
|
||||
# rm -rf "$PREFIX/include/wangle"
|
||||
# rm -rf "$PREFIX/include/thrift"
|
||||
|
||||
# rm -rf "$PREFIX"
|
@ -1,41 +0,0 @@
|
||||
diff -ur a/folly/CMakeLists.txt b/folly/CMakeLists.txt
|
||||
--- a/folly/CMakeLists.txt 2021-12-12 23:10:42.000000000 +0100
|
||||
+++ b/folly/CMakeLists.txt 2022-02-03 15:19:41.349693134 +0100
|
||||
@@ -28,7 +28,6 @@
|
||||
)
|
||||
|
||||
add_subdirectory(experimental/exception_tracer)
|
||||
-add_subdirectory(logging/example)
|
||||
|
||||
if (PYTHON_EXTENSIONS)
|
||||
# Create tree of symbolic links in structure required for successful
|
||||
diff -ur a/folly/experimental/exception_tracer/ExceptionTracerLib.cpp b/folly/experimental/exception_tracer/ExceptionTracerLib.cpp
|
||||
--- a/folly/experimental/exception_tracer/ExceptionTracerLib.cpp 2021-12-12 23:10:42.000000000 +0100
|
||||
+++ b/folly/experimental/exception_tracer/ExceptionTracerLib.cpp 2022-02-03 15:19:11.003368891 +0100
|
||||
@@ -96,6 +96,7 @@
|
||||
#define __builtin_unreachable()
|
||||
#endif
|
||||
|
||||
+#if 0
|
||||
namespace __cxxabiv1 {
|
||||
|
||||
void __cxa_throw(
|
||||
@@ -154,5 +155,5 @@
|
||||
}
|
||||
|
||||
} // namespace std
|
||||
-
|
||||
+#endif
|
||||
#endif // defined(__GLIBCXX__)
|
||||
diff -ur a/folly/Portability.h b/folly/Portability.h
|
||||
--- a/folly/Portability.h 2021-12-12 23:10:42.000000000 +0100
|
||||
+++ b/folly/Portability.h 2022-02-03 15:19:11.003368891 +0100
|
||||
@@ -566,7 +566,7 @@
|
||||
#define FOLLY_HAS_COROUTINES 0
|
||||
#elif (__cpp_coroutines >= 201703L || __cpp_impl_coroutine >= 201902L) && \
|
||||
(__has_include(<coroutine>) || __has_include(<experimental/coroutine>))
|
||||
-#define FOLLY_HAS_COROUTINES 1
|
||||
+#define FOLLY_HAS_COROUTINES 0
|
||||
// This is mainly to workaround bugs triggered by LTO, when stack allocated
|
||||
// variables in await_suspend end up on a coroutine frame.
|
||||
#define FOLLY_CORO_AWAIT_SUSPEND_NONTRIVIAL_ATTRIBUTES FOLLY_NOINLINE
|
@ -1,26 +0,0 @@
|
||||
diff --git a/folly/CMakeLists.txt b/folly/CMakeLists.txt
|
||||
index e0e16df..471131e 100644
|
||||
--- a/folly/CMakeLists.txt
|
||||
+++ b/folly/CMakeLists.txt
|
||||
@@ -28,7 +28,7 @@ install(
|
||||
)
|
||||
|
||||
add_subdirectory(experimental/exception_tracer)
|
||||
-add_subdirectory(logging/example)
|
||||
+# add_subdirectory(logging/example)
|
||||
|
||||
if (PYTHON_EXTENSIONS)
|
||||
# Create tree of symbolic links in structure required for successful
|
||||
diff --git a/folly/Portability.h b/folly/Portability.h
|
||||
index 365ef1b..42d24b8 100644
|
||||
--- a/folly/Portability.h
|
||||
+++ b/folly/Portability.h
|
||||
@@ -560,7 +560,7 @@ constexpr auto kCpplibVer = 0;
|
||||
(defined(__cpp_coroutines) && __cpp_coroutines >= 201703L) || \
|
||||
(defined(__cpp_impl_coroutine) && __cpp_impl_coroutine >= 201902L)) && \
|
||||
(__has_include(<coroutine>) || __has_include(<experimental/coroutine>))
|
||||
-#define FOLLY_HAS_COROUTINES 1
|
||||
+#define FOLLY_HAS_COROUTINES 0
|
||||
// This is mainly to workaround bugs triggered by LTO, when stack allocated
|
||||
// variables in await_suspend end up on a coroutine frame.
|
||||
#define FOLLY_CORO_AWAIT_SUSPEND_NONTRIVIAL_ATTRIBUTES FOLLY_NOINLINE
|
@ -1,29 +0,0 @@
|
||||
diff -ur a/CMakeLists.txt b/CMakeLists.txt
|
||||
--- a/CMakeLists.txt 2021-05-05 00:53:34.000000000 +0200
|
||||
+++ b/CMakeLists.txt 2022-01-27 17:18:34.758302398 +0100
|
||||
@@ -52,9 +52,9 @@
|
||||
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} /EHs-c-")
|
||||
add_definitions(-D_HAS_EXCEPTIONS=0)
|
||||
|
||||
- # Disable RTTI.
|
||||
- string(REGEX REPLACE "/GR" "" CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS}")
|
||||
- set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} /GR-")
|
||||
+ # # Disable RTTI.
|
||||
+ # string(REGEX REPLACE "/GR" "" CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS}")
|
||||
+ # set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} /GR-")
|
||||
else(CMAKE_CXX_COMPILER_ID STREQUAL "MSVC")
|
||||
# Use -Wall for clang and gcc.
|
||||
if(NOT CMAKE_CXX_FLAGS MATCHES "-Wall")
|
||||
@@ -77,9 +77,9 @@
|
||||
string(REGEX REPLACE "-fexceptions" "" CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS}")
|
||||
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -fno-exceptions")
|
||||
|
||||
- # Disable RTTI.
|
||||
- string(REGEX REPLACE "-frtti" "" CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS}")
|
||||
- set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -fno-rtti")
|
||||
+ # # Disable RTTI.
|
||||
+ # string(REGEX REPLACE "-frtti" "" CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS}")
|
||||
+ # set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -fno-rtti")
|
||||
endif(CMAKE_CXX_COMPILER_ID STREQUAL "MSVC")
|
||||
|
||||
# BUILD_SHARED_LIBS is a standard CMake variable, but we declare it here to make
|
@ -1,75 +0,0 @@
|
||||
-----BEGIN PGP PUBLIC KEY BLOCK-----
|
||||
|
||||
mQINBEzEOZIBEACxg/IuXERlDB48JBWmF4NxNUuuup1IhJAJyFGFSKh3OGAO2Ard
|
||||
sNuRLjANsFXA7m7P5eTFcG+BoHHuAVYmKnI3PPZtHVLnUt4pGItPczQZ2BE1WpcI
|
||||
ayjGTBJeKItX3Npqg9D/odO9WWS1i3FQPVdrLn0YH37/BA66jeMQCRo7g7GLpaNf
|
||||
IrvYGsqTbxCwsmA37rpE7oyU4Yrf74HT091WBsRIoq/MelhbxTDMR8eu/dUGZQVc
|
||||
Kj3lN55RepwWwUUKyqarY0zMt4HkFJ7v7yRL+Cvzy92Ouv4Wf2FlhNtEs5LE4Tax
|
||||
W0PO5AEmUoKjX87SezQK0f652018b4u6Ex52cY7p+n5TII/UyoowH6+tY8UHo9yb
|
||||
fStrqgNE/mY2bhA6+AwCaOUGsFzVVPTbjtxL3HacUP/jlA1h78V8VTvTs5d55iG7
|
||||
jSqR9o05wje8rwNiXXK0xtiJahyNzL97Kn/DgPSqPIi45G+8nxWSPFM5eunBKRl9
|
||||
vAnsvwrdPRsR6YR3uMHTuVhQX9/CY891MHkaZJ6wydWtKt3yQwJLYqwo5d4DwnUX
|
||||
CduUwSKv+6RmtWI5ZmTQYOcBRcZyGKml9X9Q8iSbm6cnpFXmLrNQwCJN+D3SiYGc
|
||||
MtbltZo0ysPMa6Xj5xFaYqWk/BI4iLb2Gs+ByGo/+a0Eq4XYBMOpitNniQARAQAB
|
||||
tCdMYXNzZSBDb2xsaW4gPGxhc3NlLmNvbGxpbkB0dWthYW5pLm9yZz6JAlEEEwEK
|
||||
ADsCGwMCHgECF4AECwkIBwMVCggFFgIDAQAWIQQ2kMJAzlG0Zw0wrRw47nV9aRhG
|
||||
IAUCYEt9dQUJFxeR4wAKCRA47nV9aRhGIBNDEACxD6vJ+enZwe3IgkJh5JtLsC9b
|
||||
MWCQRlPW1EVMsg96Cb5Rtron1eN1pp1TlzENJu1/C7C/VEsr9WwOPg26Men7fNf/
|
||||
O21QM9IBWd/uB0Pu333WqKh92ESS5x9ST9DrG39nVGSPkQQBMuia72VrA+crPnwT
|
||||
/h/u1IN6/sff5VDIU24rUiqW2Npy733dANruj7Ny0scRXVPltnVdhqwPHt6qNjC1
|
||||
t+/cCnwHgW1BR1RYXBPpB42z/m29dL9rPrG0YPGWs2Bc+EATUICfEE6eIvwfciue
|
||||
IJTjKT9Y9DrogJC2AYFhjC7N04OKdCB2hFs4BjexJwr4X0GJO7LhFl03c951AsIE
|
||||
GHwrucRPB5bo2vmvQ8IvZn7CmtdUJzXv9JlyU6p+MIK1pz7TK6GgSOSffQIXZn6e
|
||||
nUPtm9mEwuncOfmW8/ODYPs1gCWYgyiFJx8h7eEu+M4MxHSFBs7MwXf/Ae2fSp+M
|
||||
P/p198qB8fC5oVBnF95qb0Qi0uc1D+Gb+gpBF+ymMb+s/VBOR3QWiym7AzBrJ62g
|
||||
UnbC9jMLGnSRI+7p7raUfMTgXr5/oQoBw7ExJVltSSRrim2YH/t4CV47mO6dR9J3
|
||||
1RtsTFIRNhz+07XPsETcuCV/dgqeC8fOFLt9MY17Sufhb1DcGy4urZBOIhXcpTV7
|
||||
vHVj5IYH5nYOT49NRYkCOAQTAQIAIgUCTMQ5kgIbAwYLCQgHAwIGFQgCCQoLBBYC
|
||||
AwECHgECF4AACgkQOO51fWkYRiAg4A/7BXKwoRaXrMbMPOW7vuVF7c2IKB2Yqzn1
|
||||
vLBCwuEHkqY237lDcXY4/5LR+1gcZ3Duw1n/BRSm0FBdvyX/JTWiWNSDUkKAO/0l
|
||||
T2Tg44YLrDT3bzwu8dbU9xQt6kH+SCOHvv5Oe4k79l5mro6fF3H1M0bN63x/YoFY
|
||||
ojy09D7/JptY82oR4f/VdKnfZLJcCViCb0wp8SD2NkDAudKg+K+7PD8HlTWklQQg
|
||||
TZdRXxVZKIJeU42aJDqnRbAhJd64YHyClhqut9F5LUmiP5qfLfNhkKDhNOwk2Blr
|
||||
BGBJkSd7wPyzcX4Mun/L6YspHjbeVMt9TD7HQlo+OOd2OjAHCx6pqwkXnzeLPEaE
|
||||
cPdQ1SHgrBViAxX3DNPubLP0Knw8XwFu96EuhHZgexE1W7bB4LFsJyXAc5k1PqPD
|
||||
CLsAauxmvI2OfI7opG/8wyxDvNgoPjG8fZNAgY0REqPC0JnTXChH31IxUmhNotH8
|
||||
tD3DDTZOHw05n5MwwUrEE9xiETVDfFQcMLfxZ9KLz+BC2g1t5LYublRgnCMNJzFg
|
||||
sNUMM02CphABzl/LCLnumr0eyQQ/weV4twEhLwSDmqLYHL0EdYW0Y3CnnU9vmYxQ
|
||||
cXKbstS71sEJJYBBmSBbf9GxkOY8BRNtwVwY0kPgxv1WqdVBiAFvfB+pyAsrax9B
|
||||
3UeB7ZSwRD6JAhwEEAEKAAYFAlS25GwACgkQlbYYGy0z6ew92Q//ZA9/6piQtoW4
|
||||
PwP/1DtWGyKU8hwR+9FG669iPk/dAG+yoEJtFMOUpg/FUFmCX8Bc4oEHsCVyLxKt
|
||||
DcCVUIRcYNSFi5hTZaBEbwsOlDT37gtlfIIu34hhHRccKaLnN/N9gNMNw8wGh9xg
|
||||
Q/KtxZwcbk/bZIlDkKTJkFBRAekdEGAFDWb/AZOy+LQxS8ZAh1eWkfV0i8opmK9k
|
||||
gPXtLE0WSsqtYyGs58z+BFE9NH3tEUwK6jSvtuLwQl4UrICNbKthcpb8WwH6UXzb
|
||||
q3QNSYVOpf/cqRdBJA6bvb/ku/xyKVL08lGmxD9v1b137R7mafDAFPTsvH2Mt/0V
|
||||
YuhtWav3r1Bl9QksDxt2DTS8wiWDUBetGqOVdcw7vBrXPEWDNBmxeJXsiJ7zJlR+
|
||||
9wrJOm6RV2+l1IPxu96EaPS+kTNBijKrhxb67bww8BTEWTd0wcdJmgWRkM8SIstp
|
||||
IKqd0L2TFYph2/NtrBhRg+DIEPJPpSTGsUMcCEXCZPQ+cIdlQKsWpk0tZ62DlvEl
|
||||
r7E+wgUSQolRfx5KrpZifiS2zQlhzdXv28CJhsVbLyw5fUAWUKIH/dCo5NKsNLk2
|
||||
Lc5DH9VWnFgxAAtW290FqeK/4ulMq7Vs1dQSwyHM2Ni3QqqeaiOrh8gbSY5CMLFN
|
||||
Y3HYRwuTYPa3AobsozCzBj0Zdf/6AFe5Ag0ETMQ5kgEQAL/FwKdjxgPxtSpgq1SM
|
||||
zgZtTTyLqhgGD3NZfadHWHYRIL38NDV3JeTA79Y2zj2dj7KQPDT+0aqeizTV2E3j
|
||||
P3iCQ53VOT4consBaQAgKexpptnS+T1DobtICFJ0GGzf0HRj6KO2zSOuOitWPWlU
|
||||
wbvX7M0LLI2+hqlx0jTPqbJFZ/Za6KTtbS6xdCPVUpUqYZQpokEZcwQmUp8Q+lGo
|
||||
JD2sNYCZyap63X/aAOgCGr2RXYddOH5e8vGzGW+mwtCv+WQ9Ay35mGqI5MqkbZd1
|
||||
Qbuv2b1647E/QEEucfRHVbJVKGGPpFMUJtcItyyIt5jo+r9CCL4Cs47dF/9/RNwu
|
||||
NvpvHXUyqMBQdWNZRMx4k/NGD/WviPi9m6mIMui6rOQsSOaqYdcUX4Nq2Orr3Oaz
|
||||
2JPQdUfeI23iot1vK8hxvUCQTV3HfJghizN6spVl0yQOKBiE8miJRgrjHilH3hTb
|
||||
xoo42xDkNAq+CQo3QAm1ibDxKCDq0RcWPjcCRAN/Q5MmpcodpdKkzV0yGIS4g7s5
|
||||
frVrgV/kox2r4/Yxsr8K909+4H82AjTKGX/BmsQFCTAqBk6p7I0zxjIqJ/w33TZB
|
||||
Q0Pn4r3WIlUPafzY6a9/LAvN1fHRxf9SpCByJsszD03Qu5f5TB8gthsdnVmTo7jj
|
||||
iordEKMtw2aEMLzdWWTQ/TNVABEBAAGJAjwEGAEKACYCGwwWIQQ2kMJAzlG0Zw0w
|
||||
rRw47nV9aRhGIAUCYEt9YAUJFxeRzgAKCRA47nV9aRhGIMLtD/9HuKM4pngImcuz
|
||||
YwzQmdv4j26YYyh4jVsKEmVWTiRcehEgUIlrWkCu3qzd5NK+RetS7kJ8MPnzEUfj
|
||||
YbpdC6yrF6n1mSrZZ4VJMkV2ev37bIgXM+Wp1mCAGbjNxQnjn9RabT/gjIqmGuRn
|
||||
AP7RsSeOSuO/gO9h2Pteciz23ussTilB+8cTooQEQQZe6Kv/zukvL+ccSehLHsZ7
|
||||
qVfRUAmtt8nFkXXE+s8jfLfhqstaI2/RJu5witaPcXM8Mnz2E95aASAbZy0eQot9
|
||||
0Pvf07n9yuC3tueTvzvlXx3h5U3yT44tIOmzANIQjay1TGdm+RBJ2ZYyhyLawlZ2
|
||||
NVUXXSp4QZZXPA0UWbF+pb7Q9cdKDNFVuvGBljuea0Yd0T2o+ibDq43HziX9ll+l
|
||||
SXk9mqvW1UcDOaxWrSsm1Gc1O9g3wqH5xHAhtY8GPh/7VgAawskPkmnlkMW6pYPy
|
||||
zibbeISJL1gd1jIT63y6aoVrtNoo+wYJm280ROflh4+5QOo6QJ+jm70fkXSG/qJ5
|
||||
a8/qCPTHkJc/rpkL6/TDQAJURi9RhDAC0gb40HtusbN1LZEA+i0cWTmYXap+DB4Y
|
||||
R4pApilpaG87M+VUokR4xpnx7vTb2MPa7Mdenvi9FEGnKXadmT8038vlfzz5GGUT
|
||||
MlVin9BQPTpdA+PpRiJvKJgVDeAFOg==
|
||||
=asTC
|
||||
-----END PGP PUBLIC KEY BLOCK-----
|
@ -1,18 +1,11 @@
|
||||
#!/bin/bash
|
||||
|
||||
function operating_system() {
|
||||
if [[ "$OSTYPE" == "linux-gnu"* ]]; then
|
||||
grep -E '^(VERSION_)?ID=' /etc/os-release | \
|
||||
sort | cut -d '=' -f 2- | sed 's/"//g' | paste -s -d '-'
|
||||
elif [[ "$OSTYPE" == "darwin"* ]]; then
|
||||
echo "$(sw_vers -productName)-$(sw_vers -productVersion | cut -d '.' -f 1)"
|
||||
else
|
||||
echo "operating_system called on an unknown OS"
|
||||
exit 1
|
||||
fi
|
||||
operating_system() {
|
||||
grep -E '^(VERSION_)?ID=' /etc/os-release | \
|
||||
sort | cut -d '=' -f 2- | sed 's/"//g' | paste -s -d '-'
|
||||
}
|
||||
|
||||
function check_operating_system() {
|
||||
check_operating_system() {
|
||||
if [ "$(operating_system)" != "$1" ]; then
|
||||
echo "Not the right operating system!"
|
||||
exit 1
|
||||
@ -21,25 +14,20 @@ function check_operating_system() {
|
||||
fi
|
||||
}
|
||||
|
||||
function architecture() {
|
||||
architecture() {
|
||||
uname -m
|
||||
}
|
||||
|
||||
check_architecture() {
|
||||
local ARCH=$(architecture)
|
||||
for arch in "$@"; do
|
||||
if [ "${ARCH}" = "$arch" ]; then
|
||||
echo "The right architecture!"
|
||||
return 0
|
||||
fi
|
||||
done
|
||||
echo "Not the right architecture!"
|
||||
echo "Expected: $@"
|
||||
echo "Actual: ${ARCH}"
|
||||
exit 1
|
||||
if [ "$(architecture)" != "$1" ]; then
|
||||
echo "Not the right architecture!"
|
||||
exit 1
|
||||
else
|
||||
echo "The right architecture."
|
||||
fi
|
||||
}
|
||||
|
||||
function check_all_yum() {
|
||||
check_all_yum() {
|
||||
local missing=""
|
||||
for pkg in $1; do
|
||||
if ! yum list installed "$pkg" >/dev/null 2>/dev/null; then
|
||||
@ -52,7 +40,7 @@ function check_all_yum() {
|
||||
fi
|
||||
}
|
||||
|
||||
function check_all_dpkg() {
|
||||
check_all_dpkg() {
|
||||
local missing=""
|
||||
for pkg in $1; do
|
||||
if ! dpkg -s "$pkg" >/dev/null 2>/dev/null; then
|
||||
@ -65,7 +53,7 @@ function check_all_dpkg() {
|
||||
fi
|
||||
}
|
||||
|
||||
function check_all_dnf() {
|
||||
check_all_dnf() {
|
||||
local missing=""
|
||||
for pkg in $1; do
|
||||
if ! dnf list installed "$pkg" >/dev/null 2>/dev/null; then
|
||||
@ -77,34 +65,8 @@ function check_all_dnf() {
|
||||
exit 1
|
||||
fi
|
||||
}
|
||||
|
||||
function install_all_apt() {
|
||||
install_all_apt() {
|
||||
for pkg in $1; do
|
||||
apt install -y "$pkg"
|
||||
done
|
||||
}
|
||||
|
||||
function install_custom_golang() {
|
||||
# NOTE: The official https://go.dev/doc/manage-install doesn't seem to be working.
|
||||
GOVERSION="$1"
|
||||
GOINSTALLDIR="/opt/go$GOVERSION"
|
||||
GOROOT="$GOINSTALLDIR/go" # GOPATH=$HOME/go
|
||||
if [ ! -f "$GOROOT/bin/go" ]; then
|
||||
curl -LO https://go.dev/dl/go$GOVERSION.linux-amd64.tar.gz
|
||||
mkdir -p "$GOINSTALLDIR"
|
||||
tar -C "$GOINSTALLDIR" -xzf go$GOVERSION.linux-amd64.tar.gz
|
||||
fi
|
||||
echo "go $GOVERSION installed under $GOROOT"
|
||||
}
|
||||
|
||||
function install_custom_maven() {
|
||||
MVNVERSION="$1"
|
||||
MVNINSTALLDIR="/opt/apache-maven-$MVNVERSION"
|
||||
MVNURL="https://s3.eu-west-1.amazonaws.com/deps.memgraph.io/maven/apache-maven-$MVNVERSION-bin.tar.gz"
|
||||
if [ ! -f "$MVNINSTALLDIR/bin/mvn" ]; then
|
||||
echo "Downloading maven from $MVNURL"
|
||||
curl -LO "$MVNURL"
|
||||
tar -C "/opt" -xzf "apache-maven-$MVNVERSION-bin.tar.gz"
|
||||
fi
|
||||
echo "maven $MVNVERSION installed under $MVNINSTALLDIR"
|
||||
}
|
||||
|
@ -1,26 +0,0 @@
|
||||
#!/bin/bash -e
|
||||
COLOR_ORANGE="\e[38;5;208m"
|
||||
COLOR_GREEN="\e[38;5;35m"
|
||||
COLOR_RED="\e[0;31m"
|
||||
COLOR_NULL="\e[0m"
|
||||
|
||||
print_help() {
|
||||
echo -e "${COLOR_ORANGE}HOW TO RUN:${COLOR_NULL} $0 memgraph_logs_file_path cypherl_output_path"
|
||||
exit 1
|
||||
}
|
||||
|
||||
if [ "$#" -ne 2 ]; then
|
||||
print_help
|
||||
fi
|
||||
INPUT="$1"
|
||||
OUTPUT="$2"
|
||||
if [ ! -f "$INPUT" ]; then
|
||||
echo -e "${COLOR_RED}ERROR:${COLOR_NULL} memgraph_logs_file_path is not a file!"
|
||||
print_help
|
||||
fi
|
||||
|
||||
awk -v RS="Run] '" 'NR>1 { print $0 }' < "$INPUT" | sed -e "/^\[/d;" -e "s/'\([^']*\)$/;/g" > "$OUTPUT"
|
||||
|
||||
echo -e "${COLOR_GREEN}DONE!${COLOR_NULL} Please find Memgraph compatible cypherl file under $OUTPUT"
|
||||
echo ""
|
||||
echo "Import can be done by executing => \`cat $OUTPUT | mgconsole\`"
|
@ -1,39 +0,0 @@
|
||||
#!/bin/bash -e
|
||||
COLOR_ORANGE="\e[38;5;208m"
|
||||
COLOR_GREEN="\e[38;5;35m"
|
||||
COLOR_RED="\e[0;31m"
|
||||
COLOR_NULL="\e[0m"
|
||||
|
||||
print_help() {
|
||||
echo -e "${COLOR_ORANGE}HOW TO RUN:${COLOR_NULL} $0 input_file_path output_file_path"
|
||||
exit 1
|
||||
}
|
||||
|
||||
if [ "$#" -ne 2 ]; then
|
||||
print_help
|
||||
fi
|
||||
INPUT="$1"
|
||||
OUTPUT="$2"
|
||||
if [ ! -f "$INPUT" ]; then
|
||||
echo -e "${COLOR_RED}ERROR:${COLOR_NULL} input_file_path is not a file!"
|
||||
print_help
|
||||
fi
|
||||
|
||||
echo -e "${COLOR_ORANGE}NOTE:${COLOR_NULL} BEGIN and COMMIT are required because variables share the same name (e.g. row)"
|
||||
echo -e "${COLOR_ORANGE}NOTE:${COLOR_NULL} CONSTRAINTS are just skipped -> ${COLOR_RED}please create constraints manually if needed${COLOR_NULL}"
|
||||
|
||||
echo 'CREATE INDEX ON :`UNIQUE IMPORT LABEL`(`UNIQUE IMPORT ID`);' > "$OUTPUT"
|
||||
|
||||
sed -e 's/^:begin/BEGIN/g; s/^BEGIN$/BEGIN;/g;' \
|
||||
-e 's/^:commit/COMMIT/g; s/^COMMIT$/COMMIT;/g;' \
|
||||
-e '/^CALL/d; /^SCHEMA AWAIT/d;' \
|
||||
-e 's/CREATE RANGE INDEX FOR (n:/CREATE INDEX ON :/g;' \
|
||||
-e 's/) ON (n./(/g;' \
|
||||
-e '/^CREATE CONSTRAINT/d; /^DROP CONSTRAINT/d;' "$INPUT" >> "$OUTPUT"
|
||||
|
||||
echo 'DROP INDEX ON :`UNIQUE IMPORT LABEL`(`UNIQUE IMPORT ID`);' >> "$OUTPUT"
|
||||
|
||||
echo ""
|
||||
echo -e "${COLOR_GREEN}DONE!${COLOR_NULL} Please find Memgraph compatible cypherl|.cypher file under $OUTPUT"
|
||||
echo ""
|
||||
echo "Please import data by executing => \`cat $OUTPUT | mgconsole\`"
|
@ -1,61 +0,0 @@
|
||||
#!/bin/bash -e
|
||||
COLOR_ORANGE="\e[38;5;208m"
|
||||
COLOR_GREEN="\e[38;5;35m"
|
||||
COLOR_RED="\e[0;31m"
|
||||
COLOR_NULL="\e[0m"
|
||||
|
||||
print_help() {
|
||||
echo -e "${COLOR_ORANGE}HOW TO RUN:${COLOR_NULL} $0 input_file_schema_path input_file_nodes_path input_file_relationships_path input_file_cleanup_path output_file_path"
|
||||
exit 1
|
||||
}
|
||||
|
||||
if [ "$#" -ne 5 ]; then
|
||||
print_help
|
||||
fi
|
||||
INPUT_SCHEMA="$1"
|
||||
INPUT_NODES="$2"
|
||||
INPUT_RELATIONSHIPS="$3"
|
||||
INPUT_CLEANUP="$4"
|
||||
OUTPUT="$5"
|
||||
|
||||
if [ ! -f "$INPUT_SCHEMA" ]; then
|
||||
echo -e "${COLOR_RED}ERROR:${COLOR_NULL} input_file_path is not a file!"
|
||||
print_help
|
||||
fi
|
||||
|
||||
if [ ! -f "$INPUT_NODES" ]; then
|
||||
echo -e "${COLOR_RED}ERROR:${COLOR_NULL} input_file_path is not a file!"
|
||||
print_help
|
||||
fi
|
||||
|
||||
if [ ! -f "$INPUT_RELATIONSHIPS" ]; then
|
||||
echo -e "${COLOR_RED}ERROR:${COLOR_NULL} input_file_path is not a file!"
|
||||
print_help
|
||||
fi
|
||||
|
||||
if [ ! -f "$INPUT_CLEANUP" ]; then
|
||||
echo -e "${COLOR_RED}ERROR:${COLOR_NULL} input_file_path is not a file!"
|
||||
print_help
|
||||
fi
|
||||
|
||||
echo -e "${COLOR_ORANGE}NOTE:${COLOR_NULL} BEGIN and COMMIT are required because variables share the same name (e.g. row)"
|
||||
echo -e "${COLOR_ORANGE}NOTE:${COLOR_NULL} CONSTRAINTS are just skipped -> ${COLOR_RED}please create constraints manually if needed${COLOR_NULL}"
|
||||
|
||||
|
||||
echo 'CREATE INDEX ON :`UNIQUE IMPORT LABEL`(`UNIQUE IMPORT ID`);' > "$OUTPUT"
|
||||
|
||||
sed -e 's/CREATE RANGE INDEX FOR (n:/CREATE INDEX ON :/g;' \
|
||||
-e 's/) ON (n./(/g;' \
|
||||
-e '/^CREATE CONSTRAINT/d' $INPUT_SCHEMA >> "$OUTPUT"
|
||||
|
||||
cat "$INPUT_NODES" >> "$OUTPUT"
|
||||
cat "$INPUT_RELATIONSHIPS" >> "$OUTPUT"
|
||||
|
||||
sed -e '/^DROP CONSTRAINT/d' "$INPUT_CLEANUP" >> "$OUTPUT"
|
||||
|
||||
echo 'DROP INDEX ON :`UNIQUE IMPORT LABEL`(`UNIQUE IMPORT ID`);' >> "$OUTPUT"
|
||||
|
||||
echo ""
|
||||
echo -e "${COLOR_GREEN}DONE!${COLOR_NULL} Please find Memgraph compatible cypherl|.cypher file under $OUTPUT"
|
||||
echo ""
|
||||
echo "Please import data by executing => \`cat $OUTPUT | mgconsole\`"
|
@ -1,64 +0,0 @@
|
||||
#!/bin/bash -e
|
||||
COLOR_ORANGE="\e[38;5;208m"
|
||||
COLOR_GREEN="\e[38;5;35m"
|
||||
COLOR_RED="\e[0;31m"
|
||||
COLOR_NULL="\e[0m"
|
||||
|
||||
print_help() {
|
||||
echo -e "${COLOR_ORANGE}HOW TO RUN:${COLOR_NULL} $0 input_file_schema_path input_file_nodes_path input_file_relationships_path input_file_cleanup_path output_file_schema_path output_file_nodes_path output_file_relationships_path output_file_cleanup_path"
|
||||
exit 1
|
||||
}
|
||||
|
||||
if [ "$#" -ne 8 ]; then
|
||||
print_help
|
||||
fi
|
||||
INPUT_SCHEMA="$1"
|
||||
INPUT_NODES="$2"
|
||||
INPUT_RELATIONSHIPS="$3"
|
||||
INPUT_CLEANUP="$4"
|
||||
OUTPUT_SCHEMA="$5"
|
||||
OUTPUT_NODES="$6"
|
||||
OUTPUT_RELATIONSHIPS="$7"
|
||||
OUTPUT_CLEANUP="$8"
|
||||
|
||||
if [ ! -f "$INPUT_SCHEMA" ]; then
|
||||
echo -e "${COLOR_RED}ERROR:${COLOR_NULL} input_file_path is not a file!"
|
||||
print_help
|
||||
fi
|
||||
|
||||
if [ ! -f "$INPUT_NODES" ]; then
|
||||
echo -e "${COLOR_RED}ERROR:${COLOR_NULL} input_file_path is not a file!"
|
||||
print_help
|
||||
fi
|
||||
|
||||
if [ ! -f "$INPUT_RELATIONSHIPS" ]; then
|
||||
echo -e "${COLOR_RED}ERROR:${COLOR_NULL} input_file_path is not a file!"
|
||||
print_help
|
||||
fi
|
||||
|
||||
if [ ! -f "$INPUT_CLEANUP" ]; then
|
||||
echo -e "${COLOR_RED}ERROR:${COLOR_NULL} input_file_path is not a file!"
|
||||
print_help
|
||||
fi
|
||||
|
||||
echo -e "${COLOR_ORANGE}NOTE:${COLOR_NULL} BEGIN and COMMIT are required because variables share the same name (e.g. row)"
|
||||
echo -e "${COLOR_ORANGE}NOTE:${COLOR_NULL} CONSTRAINTS are just skipped -> ${COLOR_RED}please create constraints manually if needed${COLOR_NULL}"
|
||||
|
||||
|
||||
echo 'CREATE INDEX ON :`UNIQUE IMPORT LABEL`(`UNIQUE IMPORT ID`);' > "$OUTPUT_SCHEMA"
|
||||
|
||||
sed -e 's/CREATE RANGE INDEX FOR (n:/CREATE INDEX ON :/g;' \
|
||||
-e 's/) ON (n./(/g;' \
|
||||
-e '/^CREATE CONSTRAINT/d' $INPUT_SCHEMA >> "$OUTPUT_SCHEMA"
|
||||
|
||||
cat "$INPUT_NODES" > "$OUTPUT_NODES"
|
||||
cat "$INPUT_RELATIONSHIPS" > "$OUTPUT_RELATIONSHIPS"
|
||||
|
||||
sed -e '/^DROP CONSTRAINT/d' "$INPUT_CLEANUP" >> "$OUTPUT_CLEANUP"
|
||||
|
||||
echo 'DROP INDEX ON :`UNIQUE IMPORT LABEL`(`UNIQUE IMPORT ID`);' >> "$OUTPUT_CLEANUP"
|
||||
|
||||
echo ""
|
||||
echo -e "${COLOR_GREEN}DONE!${COLOR_NULL} Please find Memgraph compatible cypherl|.cypher files under $OUTPUT_SCHEMA, $OUTPUT_NODES, $OUTPUT_RELATIONSHIPS and $OUTPUT_CLEANUP"
|
||||
echo ""
|
||||
echo "Please import data by executing => \`cat $OUTPUT_SCHEMA | mgconsole\`, \`cat $OUTPUT_NODES | mgconsole\`, \`cat $OUTPUT_RELATIONSHIPS | mgconsole\` and \`cat $OUTPUT_CLEANUP | mgconsole\`"
|
869
include/_mgp.hpp
869
include/_mgp.hpp
@ -1,869 +0,0 @@
|
||||
// Copyright 2024 Memgraph Ltd.
|
||||
//
|
||||
// Use of this software is governed by the Business Source License
|
||||
// included in the file licenses/BSL.txt; by using this file, you agree to be bound by the terms of the Business Source
|
||||
// License, and you may not use this file except in compliance with the Business Source License.
|
||||
//
|
||||
// As of the Change Date specified in that file, in accordance with
|
||||
// the Business Source License, use of this software will be governed
|
||||
// by the Apache License, Version 2.0, included in the file
|
||||
// licenses/APL.txt.
|
||||
|
||||
/// @file _mgp.hpp
|
||||
///
|
||||
/// The file contains methods that connect mg procedures and the outside code
|
||||
/// Methods like mapping a graph into memory or assigning new mg results or
|
||||
/// their properties are implemented.
|
||||
#pragma once
|
||||
|
||||
#include "mg_exceptions.hpp"
|
||||
#include "mg_procedure.h"
|
||||
|
||||
namespace mgp {
|
||||
|
||||
namespace {
|
||||
inline void MgExceptionHandle(mgp_error result_code) {
|
||||
switch (result_code) {
|
||||
case mgp_error::MGP_ERROR_UNKNOWN_ERROR:
|
||||
throw mg_exception::UnknownException();
|
||||
case mgp_error::MGP_ERROR_UNABLE_TO_ALLOCATE:
|
||||
throw mg_exception::AllocationException();
|
||||
case mgp_error::MGP_ERROR_INSUFFICIENT_BUFFER:
|
||||
throw mg_exception::InsufficientBufferException();
|
||||
case mgp_error::MGP_ERROR_OUT_OF_RANGE:
|
||||
throw mg_exception::OutOfRangeException();
|
||||
case mgp_error::MGP_ERROR_LOGIC_ERROR:
|
||||
throw mg_exception::LogicException();
|
||||
case mgp_error::MGP_ERROR_DELETED_OBJECT:
|
||||
throw mg_exception::DeletedObjectException();
|
||||
case mgp_error::MGP_ERROR_INVALID_ARGUMENT:
|
||||
throw mg_exception::InvalidArgumentException();
|
||||
case mgp_error::MGP_ERROR_KEY_ALREADY_EXISTS:
|
||||
throw mg_exception::KeyAlreadyExistsException();
|
||||
case mgp_error::MGP_ERROR_IMMUTABLE_OBJECT:
|
||||
throw mg_exception::ImmutableObjectException();
|
||||
case mgp_error::MGP_ERROR_VALUE_CONVERSION:
|
||||
throw mg_exception::ValueConversionException();
|
||||
case mgp_error::MGP_ERROR_SERIALIZATION_ERROR:
|
||||
throw mg_exception::SerializationException();
|
||||
default:
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
template <typename TResult, typename TFunc, typename... TArgs>
|
||||
TResult MgInvoke(TFunc func, TArgs... args) {
|
||||
TResult result{};
|
||||
|
||||
auto result_code = func(args..., &result);
|
||||
MgExceptionHandle(result_code);
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
template <typename TFunc, typename... TArgs>
|
||||
inline void MgInvokeVoid(TFunc func, TArgs... args) {
|
||||
auto result_code = func(args...);
|
||||
MgExceptionHandle(result_code);
|
||||
}
|
||||
} // namespace
|
||||
|
||||
// mgp_value
|
||||
|
||||
// Make value
|
||||
|
||||
inline mgp_value *value_make_null(mgp_memory *memory) { return MgInvoke<mgp_value *>(mgp_value_make_null, memory); }
|
||||
|
||||
inline mgp_value *value_make_bool(int val, mgp_memory *memory) {
|
||||
return MgInvoke<mgp_value *>(mgp_value_make_bool, val, memory);
|
||||
}
|
||||
|
||||
inline mgp_value *value_make_int(int64_t val, mgp_memory *memory) {
|
||||
return MgInvoke<mgp_value *>(mgp_value_make_int, val, memory);
|
||||
}
|
||||
|
||||
inline mgp_value *value_make_double(double val, mgp_memory *memory) {
|
||||
return MgInvoke<mgp_value *>(mgp_value_make_double, val, memory);
|
||||
}
|
||||
|
||||
inline mgp_value *value_make_string(const char *val, mgp_memory *memory) {
|
||||
return MgInvoke<mgp_value *>(mgp_value_make_string, val, memory);
|
||||
}
|
||||
|
||||
inline mgp_value *value_make_list(mgp_list *val) { return MgInvoke<mgp_value *>(mgp_value_make_list, val); }
|
||||
|
||||
inline mgp_value *value_make_map(mgp_map *val) { return MgInvoke<mgp_value *>(mgp_value_make_map, val); }
|
||||
|
||||
inline mgp_value *value_make_vertex(mgp_vertex *val) { return MgInvoke<mgp_value *>(mgp_value_make_vertex, val); }
|
||||
|
||||
inline mgp_value *value_make_edge(mgp_edge *val) { return MgInvoke<mgp_value *>(mgp_value_make_edge, val); }
|
||||
|
||||
inline mgp_value *value_make_path(mgp_path *val) { return MgInvoke<mgp_value *>(mgp_value_make_path, val); }
|
||||
|
||||
inline mgp_value *value_make_date(mgp_date *val) { return MgInvoke<mgp_value *>(mgp_value_make_date, val); }
|
||||
|
||||
inline mgp_value *value_make_local_time(mgp_local_time *val) {
|
||||
return MgInvoke<mgp_value *>(mgp_value_make_local_time, val);
|
||||
}
|
||||
|
||||
inline mgp_value *value_make_local_date_time(mgp_local_date_time *val) {
|
||||
return MgInvoke<mgp_value *>(mgp_value_make_local_date_time, val);
|
||||
}
|
||||
|
||||
inline mgp_value *value_make_duration(mgp_duration *val) { return MgInvoke<mgp_value *>(mgp_value_make_duration, val); }
|
||||
|
||||
// Copy value
|
||||
|
||||
// TODO: implement within MGP API
|
||||
// with primitive types ({bool, int, double, string}), create a new identical value
|
||||
// otherwise call mgp_##TYPE_copy and convert tpye
|
||||
inline mgp_value *value_copy(mgp_value *val, mgp_memory *memory) {
|
||||
return MgInvoke<mgp_value *>(mgp_value_copy, val, memory);
|
||||
}
|
||||
|
||||
// Destroy value
|
||||
|
||||
inline void value_destroy(mgp_value *val) { mgp_value_destroy(val); }
|
||||
|
||||
// Get value of type
|
||||
|
||||
inline mgp_value_type value_get_type(mgp_value *val) { return MgInvoke<mgp_value_type>(mgp_value_get_type, val); }
|
||||
|
||||
inline bool value_get_bool(mgp_value *val) { return MgInvoke<int>(mgp_value_get_bool, val); }
|
||||
|
||||
inline int64_t value_get_int(mgp_value *val) { return MgInvoke<int64_t>(mgp_value_get_int, val); }
|
||||
|
||||
inline double value_get_double(mgp_value *val) { return MgInvoke<double>(mgp_value_get_double, val); }
|
||||
|
||||
inline double value_get_numeric(mgp_value *val) {
|
||||
if (MgInvoke<int>(mgp_value_is_int, val)) {
|
||||
return static_cast<double>(value_get_int(val));
|
||||
}
|
||||
return value_get_double(val);
|
||||
}
|
||||
|
||||
inline const char *value_get_string(mgp_value *val) { return MgInvoke<const char *>(mgp_value_get_string, val); }
|
||||
|
||||
inline mgp_list *value_get_list(mgp_value *val) { return MgInvoke<mgp_list *>(mgp_value_get_list, val); }
|
||||
|
||||
inline mgp_map *value_get_map(mgp_value *val) { return MgInvoke<mgp_map *>(mgp_value_get_map, val); }
|
||||
|
||||
inline mgp_vertex *value_get_vertex(mgp_value *val) { return MgInvoke<mgp_vertex *>(mgp_value_get_vertex, val); }
|
||||
|
||||
inline mgp_edge *value_get_edge(mgp_value *val) { return MgInvoke<mgp_edge *>(mgp_value_get_edge, val); }
|
||||
|
||||
inline mgp_path *value_get_path(mgp_value *val) { return MgInvoke<mgp_path *>(mgp_value_get_path, val); }
|
||||
|
||||
inline mgp_date *value_get_date(mgp_value *val) { return MgInvoke<mgp_date *>(mgp_value_get_date, val); }
|
||||
|
||||
inline mgp_local_time *value_get_local_time(mgp_value *val) {
|
||||
return MgInvoke<mgp_local_time *>(mgp_value_get_local_time, val);
|
||||
}
|
||||
|
||||
inline mgp_local_date_time *value_get_local_date_time(mgp_value *val) {
|
||||
return MgInvoke<mgp_local_date_time *>(mgp_value_get_local_date_time, val);
|
||||
}
|
||||
|
||||
inline mgp_duration *value_get_duration(mgp_value *val) {
|
||||
return MgInvoke<mgp_duration *>(mgp_value_get_duration, val);
|
||||
}
|
||||
|
||||
// Check type of value
|
||||
|
||||
inline bool value_is_null(mgp_value *val) { return MgInvoke<int>(mgp_value_is_null, val); }
|
||||
|
||||
inline bool value_is_bool(mgp_value *val) { return MgInvoke<int>(mgp_value_is_bool, val); }
|
||||
|
||||
inline bool value_is_int(mgp_value *val) { return MgInvoke<int>(mgp_value_is_int, val); }
|
||||
|
||||
inline bool value_is_double(mgp_value *val) { return MgInvoke<int>(mgp_value_is_double, val); }
|
||||
|
||||
inline bool value_is_numeric(mgp_value *val) { return value_is_int(val) || value_is_double(val); }
|
||||
|
||||
inline bool value_is_string(mgp_value *val) { return MgInvoke<int>(mgp_value_is_string, val); }
|
||||
|
||||
inline bool value_is_list(mgp_value *val) { return MgInvoke<int>(mgp_value_is_list, val); }
|
||||
|
||||
inline bool value_is_map(mgp_value *val) { return MgInvoke<int>(mgp_value_is_map, val); }
|
||||
|
||||
inline bool value_is_vertex(mgp_value *val) { return MgInvoke<int>(mgp_value_is_vertex, val); }
|
||||
|
||||
inline bool value_is_edge(mgp_value *val) { return MgInvoke<int>(mgp_value_is_edge, val); }
|
||||
|
||||
inline bool value_is_path(mgp_value *val) { return MgInvoke<int>(mgp_value_is_path, val); }
|
||||
|
||||
inline bool value_is_date(mgp_value *val) { return MgInvoke<int>(mgp_value_is_date, val); }
|
||||
|
||||
inline bool value_is_local_time(mgp_value *val) { return MgInvoke<int>(mgp_value_is_local_time, val); }
|
||||
|
||||
inline bool value_is_local_date_time(mgp_value *val) { return MgInvoke<int>(mgp_value_is_local_date_time, val); }
|
||||
|
||||
inline bool value_is_duration(mgp_value *val) { return MgInvoke<int>(mgp_value_is_duration, val); }
|
||||
|
||||
// Get type
|
||||
|
||||
inline mgp_type *type_any() { return MgInvoke<mgp_type *>(mgp_type_any); }
|
||||
|
||||
inline mgp_type *type_bool() { return MgInvoke<mgp_type *>(mgp_type_bool); }
|
||||
|
||||
inline mgp_type *type_string() { return MgInvoke<mgp_type *>(mgp_type_string); }
|
||||
|
||||
inline mgp_type *type_int() { return MgInvoke<mgp_type *>(mgp_type_int); }
|
||||
|
||||
inline mgp_type *type_float() { return MgInvoke<mgp_type *>(mgp_type_float); }
|
||||
|
||||
inline mgp_type *type_number() { return MgInvoke<mgp_type *>(mgp_type_number); }
|
||||
|
||||
inline mgp_type *type_list(mgp_type *element_type) { return MgInvoke<mgp_type *>(mgp_type_list, element_type); }
|
||||
|
||||
inline mgp_type *type_map() { return MgInvoke<mgp_type *>(mgp_type_map); }
|
||||
|
||||
inline mgp_type *type_node() { return MgInvoke<mgp_type *>(mgp_type_node); }
|
||||
|
||||
inline mgp_type *type_relationship() { return MgInvoke<mgp_type *>(mgp_type_relationship); }
|
||||
|
||||
inline mgp_type *type_path() { return MgInvoke<mgp_type *>(mgp_type_path); }
|
||||
|
||||
inline mgp_type *type_date() { return MgInvoke<mgp_type *>(mgp_type_date); }
|
||||
|
||||
inline mgp_type *type_local_time() { return MgInvoke<mgp_type *>(mgp_type_local_time); }
|
||||
|
||||
inline mgp_type *type_local_date_time() { return MgInvoke<mgp_type *>(mgp_type_local_date_time); }
|
||||
|
||||
inline mgp_type *type_duration() { return MgInvoke<mgp_type *>(mgp_type_duration); }
|
||||
|
||||
inline mgp_type *type_nullable(mgp_type *type) { return MgInvoke<mgp_type *>(mgp_type_nullable, type); }
|
||||
|
||||
inline bool create_label_index(mgp_graph *graph, const char *label) {
|
||||
return MgInvoke<int>(mgp_create_label_index, graph, label);
|
||||
}
|
||||
|
||||
inline bool drop_label_index(mgp_graph *graph, const char *label) {
|
||||
return MgInvoke<int>(mgp_drop_label_index, graph, label);
|
||||
}
|
||||
|
||||
inline mgp_list *list_all_label_indices(mgp_graph *graph, mgp_memory *memory) {
|
||||
return MgInvoke<mgp_list *>(mgp_list_all_label_indices, graph, memory);
|
||||
}
|
||||
|
||||
inline bool create_label_property_index(mgp_graph *graph, const char *label, const char *property) {
|
||||
return MgInvoke<int>(mgp_create_label_property_index, graph, label, property);
|
||||
}
|
||||
|
||||
inline bool drop_label_property_index(mgp_graph *graph, const char *label, const char *property) {
|
||||
return MgInvoke<int>(mgp_drop_label_property_index, graph, label, property);
|
||||
}
|
||||
|
||||
inline mgp_list *list_all_label_property_indices(mgp_graph *graph, mgp_memory *memory) {
|
||||
return MgInvoke<mgp_list *>(mgp_list_all_label_property_indices, graph, memory);
|
||||
}
|
||||
|
||||
inline bool create_existence_constraint(mgp_graph *graph, const char *label, const char *property) {
|
||||
return MgInvoke<int>(mgp_create_existence_constraint, graph, label, property);
|
||||
}
|
||||
|
||||
inline bool drop_existence_constraint(mgp_graph *graph, const char *label, const char *property) {
|
||||
return MgInvoke<int>(mgp_drop_existence_constraint, graph, label, property);
|
||||
}
|
||||
|
||||
inline mgp_list *list_all_existence_constraints(mgp_graph *graph, mgp_memory *memory) {
|
||||
return MgInvoke<mgp_list *>(mgp_list_all_existence_constraints, graph, memory);
|
||||
}
|
||||
|
||||
inline bool create_unique_constraint(mgp_graph *memgraph_graph, const char *label, mgp_value *properties) {
|
||||
return MgInvoke<int>(mgp_create_unique_constraint, memgraph_graph, label, properties);
|
||||
}
|
||||
|
||||
inline bool drop_unique_constraint(mgp_graph *memgraph_graph, const char *label, mgp_value *properties) {
|
||||
return MgInvoke<int>(mgp_drop_unique_constraint, memgraph_graph, label, properties);
|
||||
}
|
||||
|
||||
inline mgp_list *list_all_unique_constraints(mgp_graph *graph, mgp_memory *memory) {
|
||||
return MgInvoke<mgp_list *>(mgp_list_all_unique_constraints, graph, memory);
|
||||
}
|
||||
|
||||
// mgp_graph
|
||||
|
||||
inline bool graph_is_transactional(mgp_graph *graph) { return MgInvoke<int>(mgp_graph_is_transactional, graph); }
|
||||
|
||||
inline bool graph_is_mutable(mgp_graph *graph) { return MgInvoke<int>(mgp_graph_is_mutable, graph); }
|
||||
|
||||
inline mgp_vertex *graph_create_vertex(mgp_graph *graph, mgp_memory *memory) {
|
||||
return MgInvoke<mgp_vertex *>(mgp_graph_create_vertex, graph, memory);
|
||||
}
|
||||
|
||||
inline void graph_delete_vertex(mgp_graph *graph, mgp_vertex *vertex) {
|
||||
MgInvokeVoid(mgp_graph_delete_vertex, graph, vertex);
|
||||
}
|
||||
|
||||
inline void graph_detach_delete_vertex(mgp_graph *graph, mgp_vertex *vertex) {
|
||||
MgInvokeVoid(mgp_graph_detach_delete_vertex, graph, vertex);
|
||||
}
|
||||
|
||||
inline mgp_edge *graph_create_edge(mgp_graph *graph, mgp_vertex *from, mgp_vertex *to, mgp_edge_type type,
|
||||
mgp_memory *memory) {
|
||||
return MgInvoke<mgp_edge *>(mgp_graph_create_edge, graph, from, to, type, memory);
|
||||
}
|
||||
|
||||
inline mgp_edge *graph_edge_set_from(struct mgp_graph *graph, struct mgp_edge *e, struct mgp_vertex *new_from,
|
||||
mgp_memory *memory) {
|
||||
return MgInvoke<mgp_edge *>(mgp_graph_edge_set_from, graph, e, new_from, memory);
|
||||
}
|
||||
|
||||
inline mgp_edge *graph_edge_set_to(struct mgp_graph *graph, struct mgp_edge *e, struct mgp_vertex *new_to,
|
||||
mgp_memory *memory) {
|
||||
return MgInvoke<mgp_edge *>(mgp_graph_edge_set_to, graph, e, new_to, memory);
|
||||
}
|
||||
|
||||
inline mgp_edge *graph_edge_change_type(struct mgp_graph *graph, struct mgp_edge *e, struct mgp_edge_type new_type,
|
||||
mgp_memory *memory) {
|
||||
return MgInvoke<mgp_edge *>(mgp_graph_edge_change_type, graph, e, new_type, memory);
|
||||
}
|
||||
|
||||
inline void graph_delete_edge(mgp_graph *graph, mgp_edge *edge) { MgInvokeVoid(mgp_graph_delete_edge, graph, edge); }
|
||||
|
||||
inline mgp_vertex *graph_get_vertex_by_id(mgp_graph *g, mgp_vertex_id id, mgp_memory *memory) {
|
||||
return MgInvoke<mgp_vertex *>(mgp_graph_get_vertex_by_id, g, id, memory);
|
||||
}
|
||||
|
||||
inline bool graph_has_text_index(mgp_graph *graph, const char *index_name) {
|
||||
return MgInvoke<int>(mgp_graph_has_text_index, graph, index_name);
|
||||
}
|
||||
|
||||
inline mgp_map *graph_search_text_index(mgp_graph *graph, const char *index_name, const char *search_query,
|
||||
text_search_mode search_mode, mgp_memory *memory) {
|
||||
return MgInvoke<mgp_map *>(mgp_graph_search_text_index, graph, index_name, search_query, search_mode, memory);
|
||||
}
|
||||
|
||||
inline mgp_map *graph_aggregate_over_text_index(mgp_graph *graph, const char *index_name, const char *search_query,
|
||||
const char *aggregation_query, mgp_memory *memory) {
|
||||
return MgInvoke<mgp_map *>(mgp_graph_aggregate_over_text_index, graph, index_name, search_query, aggregation_query,
|
||||
memory);
|
||||
}
|
||||
|
||||
inline mgp_vertices_iterator *graph_iter_vertices(mgp_graph *g, mgp_memory *memory) {
|
||||
return MgInvoke<mgp_vertices_iterator *>(mgp_graph_iter_vertices, g, memory);
|
||||
}
|
||||
|
||||
// mgp_vertices_iterator
|
||||
|
||||
inline void vertices_iterator_destroy(mgp_vertices_iterator *it) { mgp_vertices_iterator_destroy(it); }
|
||||
|
||||
inline mgp_vertex *vertices_iterator_get(mgp_vertices_iterator *it) {
|
||||
return MgInvoke<mgp_vertex *>(mgp_vertices_iterator_get, it);
|
||||
}
|
||||
|
||||
inline mgp_vertex *vertices_iterator_next(mgp_vertices_iterator *it) {
|
||||
return MgInvoke<mgp_vertex *>(mgp_vertices_iterator_next, it);
|
||||
}
|
||||
|
||||
// mgp_edges_iterator
|
||||
|
||||
inline void edges_iterator_destroy(mgp_edges_iterator *it) { mgp_edges_iterator_destroy(it); }
|
||||
|
||||
inline mgp_edge *edges_iterator_get(mgp_edges_iterator *it) { return MgInvoke<mgp_edge *>(mgp_edges_iterator_get, it); }
|
||||
|
||||
inline mgp_edge *edges_iterator_next(mgp_edges_iterator *it) {
|
||||
return MgInvoke<mgp_edge *>(mgp_edges_iterator_next, it);
|
||||
}
|
||||
|
||||
// mgp_properties_iterator
|
||||
|
||||
inline void properties_iterator_destroy(mgp_properties_iterator *it) { mgp_properties_iterator_destroy(it); }
|
||||
|
||||
inline mgp_property *properties_iterator_get(mgp_properties_iterator *it) {
|
||||
return MgInvoke<mgp_property *>(mgp_properties_iterator_get, it);
|
||||
}
|
||||
|
||||
inline mgp_property *properties_iterator_next(mgp_properties_iterator *it) {
|
||||
return MgInvoke<mgp_property *>(mgp_properties_iterator_next, it);
|
||||
}
|
||||
|
||||
// Container {mgp_list, mgp_map} methods
|
||||
|
||||
// mgp_list
|
||||
|
||||
inline mgp_list *list_make_empty(size_t capacity, mgp_memory *memory) {
|
||||
return MgInvoke<mgp_list *>(mgp_list_make_empty, capacity, memory);
|
||||
}
|
||||
|
||||
inline mgp_list *list_copy(mgp_list *list, mgp_memory *memory) {
|
||||
return MgInvoke<mgp_list *>(mgp_list_copy, list, memory);
|
||||
}
|
||||
|
||||
inline void list_destroy(mgp_list *list) { mgp_list_destroy(list); }
|
||||
|
||||
inline bool list_contains_deleted(mgp_list *list) { return MgInvoke<int>(mgp_list_contains_deleted, list); }
|
||||
|
||||
inline void list_append(mgp_list *list, mgp_value *val) { MgInvokeVoid(mgp_list_append, list, val); }
|
||||
|
||||
inline void list_append_extend(mgp_list *list, mgp_value *val) { MgInvokeVoid(mgp_list_append_extend, list, val); }
|
||||
|
||||
inline size_t list_size(mgp_list *list) { return MgInvoke<size_t>(mgp_list_size, list); }
|
||||
|
||||
inline size_t list_capacity(mgp_list *list) { return MgInvoke<size_t>(mgp_list_capacity, list); }
|
||||
|
||||
inline mgp_value *list_at(mgp_list *list, size_t index) { return MgInvoke<mgp_value *>(mgp_list_at, list, index); }
|
||||
|
||||
// mgp_map
|
||||
|
||||
inline mgp_map *map_make_empty(mgp_memory *memory) { return MgInvoke<mgp_map *>(mgp_map_make_empty, memory); }
|
||||
|
||||
inline mgp_map *map_copy(mgp_map *map, mgp_memory *memory) { return MgInvoke<mgp_map *>(mgp_map_copy, map, memory); }
|
||||
|
||||
inline void map_destroy(mgp_map *map) { mgp_map_destroy(map); }
|
||||
|
||||
inline bool map_contains_deleted(mgp_map *map) { return MgInvoke<int>(mgp_map_contains_deleted, map); }
|
||||
|
||||
inline void map_insert(mgp_map *map, const char *key, mgp_value *value) {
|
||||
MgInvokeVoid(mgp_map_insert, map, key, value);
|
||||
}
|
||||
|
||||
inline void map_update(mgp_map *map, const char *key, mgp_value *value) {
|
||||
MgInvokeVoid(mgp_map_update, map, key, value);
|
||||
}
|
||||
|
||||
inline void map_erase(mgp_map *map, const char *key) { MgInvokeVoid(mgp_map_erase, map, key); }
|
||||
|
||||
inline size_t map_size(mgp_map *map) { return MgInvoke<size_t>(mgp_map_size, map); }
|
||||
|
||||
inline mgp_value *map_at(mgp_map *map, const char *key) { return MgInvoke<mgp_value *>(mgp_map_at, map, key); }
|
||||
|
||||
inline bool key_exists(mgp_map *map, const char *key) { return MgInvoke<int>(mgp_key_exists, map, key); }
|
||||
|
||||
inline const char *map_item_key(mgp_map_item *item) { return MgInvoke<const char *>(mgp_map_item_key, item); }
|
||||
|
||||
inline mgp_value *map_item_value(mgp_map_item *item) { return MgInvoke<mgp_value *>(mgp_map_item_value, item); }
|
||||
|
||||
inline mgp_map_items_iterator *map_iter_items(mgp_map *map, mgp_memory *memory) {
|
||||
return MgInvoke<mgp_map_items_iterator *>(mgp_map_iter_items, map, memory);
|
||||
}
|
||||
|
||||
inline void map_items_iterator_destroy(mgp_map_items_iterator *it) { mgp_map_items_iterator_destroy(it); }
|
||||
|
||||
inline mgp_map_item *map_items_iterator_get(mgp_map_items_iterator *it) {
|
||||
return MgInvoke<mgp_map_item *>(mgp_map_items_iterator_get, it);
|
||||
}
|
||||
|
||||
inline mgp_map_item *map_items_iterator_next(mgp_map_items_iterator *it) {
|
||||
return MgInvoke<mgp_map_item *>(mgp_map_items_iterator_next, it);
|
||||
}
|
||||
|
||||
// mgp_vertex
|
||||
|
||||
inline mgp_vertex_id vertex_get_id(mgp_vertex *v) { return MgInvoke<mgp_vertex_id>(mgp_vertex_get_id, v); }
|
||||
|
||||
inline size_t vertex_get_in_degree(mgp_vertex *v) { return MgInvoke<size_t>(mgp_vertex_get_in_degree, v); }
|
||||
|
||||
inline size_t vertex_get_out_degree(mgp_vertex *v) { return MgInvoke<size_t>(mgp_vertex_get_out_degree, v); }
|
||||
|
||||
inline mgp_vertex *vertex_copy(mgp_vertex *v, mgp_memory *memory) {
|
||||
return MgInvoke<mgp_vertex *>(mgp_vertex_copy, v, memory);
|
||||
}
|
||||
|
||||
inline void vertex_destroy(mgp_vertex *v) { mgp_vertex_destroy(v); }
|
||||
|
||||
inline bool vertex_is_deleted(mgp_vertex *v) { return MgInvoke<int>(mgp_vertex_is_deleted, v); }
|
||||
|
||||
inline bool vertex_equal(mgp_vertex *v1, mgp_vertex *v2) { return MgInvoke<int>(mgp_vertex_equal, v1, v2); }
|
||||
|
||||
inline size_t vertex_labels_count(mgp_vertex *v) { return MgInvoke<size_t>(mgp_vertex_labels_count, v); }
|
||||
|
||||
inline mgp_label vertex_label_at(mgp_vertex *v, size_t index) {
|
||||
return MgInvoke<mgp_label>(mgp_vertex_label_at, v, index);
|
||||
}
|
||||
|
||||
inline bool vertex_has_label(mgp_vertex *v, mgp_label label) { return MgInvoke<int>(mgp_vertex_has_label, v, label); }
|
||||
|
||||
inline bool vertex_has_label_named(mgp_vertex *v, const char *label_name) {
|
||||
return MgInvoke<int>(mgp_vertex_has_label_named, v, label_name);
|
||||
}
|
||||
|
||||
inline void vertex_add_label(mgp_vertex *vertex, mgp_label label) { MgInvokeVoid(mgp_vertex_add_label, vertex, label); }
|
||||
|
||||
inline void vertex_remove_label(mgp_vertex *vertex, mgp_label label) {
|
||||
MgInvokeVoid(mgp_vertex_remove_label, vertex, label);
|
||||
}
|
||||
|
||||
inline mgp_value *vertex_get_property(mgp_vertex *v, const char *property_name, mgp_memory *memory) {
|
||||
return MgInvoke<mgp_value *>(mgp_vertex_get_property, v, property_name, memory);
|
||||
}
|
||||
|
||||
inline void vertex_set_property(mgp_vertex *v, const char *property_name, mgp_value *property_value) {
|
||||
MgInvokeVoid(mgp_vertex_set_property, v, property_name, property_value);
|
||||
}
|
||||
|
||||
inline void vertex_set_properties(mgp_vertex *v, struct mgp_map *properties) {
|
||||
MgInvokeVoid(mgp_vertex_set_properties, v, properties);
|
||||
}
|
||||
|
||||
inline mgp_properties_iterator *vertex_iter_properties(mgp_vertex *v, mgp_memory *memory) {
|
||||
return MgInvoke<mgp_properties_iterator *>(mgp_vertex_iter_properties, v, memory);
|
||||
}
|
||||
|
||||
inline mgp_edges_iterator *vertex_iter_in_edges(mgp_vertex *v, mgp_memory *memory) {
|
||||
return MgInvoke<mgp_edges_iterator *>(mgp_vertex_iter_in_edges, v, memory);
|
||||
}
|
||||
|
||||
inline mgp_edges_iterator *vertex_iter_out_edges(mgp_vertex *v, mgp_memory *memory) {
|
||||
return MgInvoke<mgp_edges_iterator *>(mgp_vertex_iter_out_edges, v, memory);
|
||||
}
|
||||
|
||||
// mgp_edge
|
||||
|
||||
inline mgp_edge_id edge_get_id(mgp_edge *e) { return MgInvoke<mgp_edge_id>(mgp_edge_get_id, e); }
|
||||
|
||||
inline mgp_edge *edge_copy(mgp_edge *e, mgp_memory *memory) { return MgInvoke<mgp_edge *>(mgp_edge_copy, e, memory); }
|
||||
|
||||
inline void edge_destroy(mgp_edge *e) { mgp_edge_destroy(e); }
|
||||
|
||||
inline bool edge_is_deleted(mgp_edge *e) { return MgInvoke<int>(mgp_edge_is_deleted, e); }
|
||||
|
||||
inline bool edge_equal(mgp_edge *e1, mgp_edge *e2) { return MgInvoke<int>(mgp_edge_equal, e1, e2); }
|
||||
|
||||
inline mgp_edge_type edge_get_type(mgp_edge *e) { return MgInvoke<mgp_edge_type>(mgp_edge_get_type, e); }
|
||||
|
||||
inline mgp_vertex *edge_get_from(mgp_edge *e) { return MgInvoke<mgp_vertex *>(mgp_edge_get_from, e); }
|
||||
|
||||
inline mgp_vertex *edge_get_to(mgp_edge *e) { return MgInvoke<mgp_vertex *>(mgp_edge_get_to, e); }
|
||||
|
||||
inline mgp_value *edge_get_property(mgp_edge *e, const char *property_name, mgp_memory *memory) {
|
||||
return MgInvoke<mgp_value *>(mgp_edge_get_property, e, property_name, memory);
|
||||
}
|
||||
|
||||
inline void edge_set_property(mgp_edge *e, const char *property_name, mgp_value *property_value) {
|
||||
MgInvokeVoid(mgp_edge_set_property, e, property_name, property_value);
|
||||
}
|
||||
|
||||
inline void edge_set_properties(mgp_edge *e, struct mgp_map *properties) {
|
||||
MgInvokeVoid(mgp_edge_set_properties, e, properties);
|
||||
}
|
||||
|
||||
inline mgp_properties_iterator *edge_iter_properties(mgp_edge *e, mgp_memory *memory) {
|
||||
return MgInvoke<mgp_properties_iterator *>(mgp_edge_iter_properties, e, memory);
|
||||
}
|
||||
|
||||
// mgp_path
|
||||
|
||||
inline mgp_path *path_make_with_start(mgp_vertex *vertex, mgp_memory *memory) {
|
||||
return MgInvoke<mgp_path *>(mgp_path_make_with_start, vertex, memory);
|
||||
}
|
||||
|
||||
inline mgp_path *path_copy(mgp_path *path, mgp_memory *memory) {
|
||||
return MgInvoke<mgp_path *>(mgp_path_copy, path, memory);
|
||||
}
|
||||
|
||||
inline void path_destroy(mgp_path *path) { mgp_path_destroy(path); }
|
||||
|
||||
inline bool path_contains_deleted(mgp_path *path) { return MgInvoke<int>(mgp_path_contains_deleted, path); }
|
||||
|
||||
inline void path_expand(mgp_path *path, mgp_edge *edge) { MgInvokeVoid(mgp_path_expand, path, edge); }
|
||||
|
||||
inline void path_pop(mgp_path *path) { MgInvokeVoid(mgp_path_pop, path); }
|
||||
|
||||
inline size_t path_size(mgp_path *path) { return MgInvoke<size_t>(mgp_path_size, path); }
|
||||
|
||||
inline mgp_vertex *path_vertex_at(mgp_path *path, size_t index) {
|
||||
return MgInvoke<mgp_vertex *>(mgp_path_vertex_at, path, index);
|
||||
}
|
||||
|
||||
inline mgp_edge *path_edge_at(mgp_path *path, size_t index) {
|
||||
return MgInvoke<mgp_edge *>(mgp_path_edge_at, path, index);
|
||||
}
|
||||
|
||||
inline bool path_equal(mgp_path *p1, mgp_path *p2) { return MgInvoke<int>(mgp_path_equal, p1, p2); }
|
||||
|
||||
// Temporal type {mgp_date, mgp_local_time, mgp_local_date_time, mgp_duration} methods
|
||||
|
||||
// mgp_date
|
||||
|
||||
inline mgp_date *date_from_string(const char *string, mgp_memory *memory) {
|
||||
return MgInvoke<mgp_date *>(mgp_date_from_string, string, memory);
|
||||
}
|
||||
|
||||
inline mgp_date *date_from_parameters(mgp_date_parameters *parameters, mgp_memory *memory) {
|
||||
return MgInvoke<mgp_date *>(mgp_date_from_parameters, parameters, memory);
|
||||
}
|
||||
|
||||
inline mgp_date *date_copy(mgp_date *date, mgp_memory *memory) {
|
||||
return MgInvoke<mgp_date *>(mgp_date_copy, date, memory);
|
||||
}
|
||||
|
||||
inline void date_destroy(mgp_date *date) { mgp_date_destroy(date); }
|
||||
|
||||
inline bool date_equal(mgp_date *first, mgp_date *second) { return MgInvoke<int>(mgp_date_equal, first, second); }
|
||||
|
||||
inline int date_get_year(mgp_date *date) { return MgInvoke<int>(mgp_date_get_year, date); }
|
||||
|
||||
inline int date_get_month(mgp_date *date) { return MgInvoke<int>(mgp_date_get_month, date); }
|
||||
|
||||
inline int date_get_day(mgp_date *date) { return MgInvoke<int>(mgp_date_get_day, date); }
|
||||
|
||||
inline int64_t date_timestamp(mgp_date *date) { return MgInvoke<int64_t>(mgp_date_timestamp, date); }
|
||||
|
||||
inline mgp_date *date_now(mgp_memory *memory) { return MgInvoke<mgp_date *>(mgp_date_now, memory); }
|
||||
|
||||
inline mgp_date *date_add_duration(mgp_date *date, mgp_duration *dur, mgp_memory *memory) {
|
||||
return MgInvoke<mgp_date *>(mgp_date_add_duration, date, dur, memory);
|
||||
}
|
||||
|
||||
inline mgp_date *date_sub_duration(mgp_date *date, mgp_duration *dur, mgp_memory *memory) {
|
||||
return MgInvoke<mgp_date *>(mgp_date_sub_duration, date, dur, memory);
|
||||
}
|
||||
|
||||
inline mgp_duration *date_diff(mgp_date *first, mgp_date *second, mgp_memory *memory) {
|
||||
return MgInvoke<mgp_duration *>(mgp_date_diff, first, second, memory);
|
||||
}
|
||||
|
||||
// mgp_local_time
|
||||
|
||||
inline mgp_local_time *local_time_from_string(const char *string, mgp_memory *memory) {
|
||||
return MgInvoke<mgp_local_time *>(mgp_local_time_from_string, string, memory);
|
||||
}
|
||||
|
||||
inline mgp_local_time *local_time_from_parameters(mgp_local_time_parameters *parameters, mgp_memory *memory) {
|
||||
return MgInvoke<mgp_local_time *>(mgp_local_time_from_parameters, parameters, memory);
|
||||
}
|
||||
|
||||
inline mgp_local_time *local_time_copy(mgp_local_time *local_time, mgp_memory *memory) {
|
||||
return MgInvoke<mgp_local_time *>(mgp_local_time_copy, local_time, memory);
|
||||
}
|
||||
|
||||
inline void local_time_destroy(mgp_local_time *local_time) { mgp_local_time_destroy(local_time); }
|
||||
|
||||
inline bool local_time_equal(mgp_local_time *first, mgp_local_time *second) {
|
||||
return MgInvoke<int>(mgp_local_time_equal, first, second);
|
||||
}
|
||||
|
||||
inline int local_time_get_hour(mgp_local_time *local_time) {
|
||||
return MgInvoke<int>(mgp_local_time_get_hour, local_time);
|
||||
}
|
||||
|
||||
inline int local_time_get_minute(mgp_local_time *local_time) {
|
||||
return MgInvoke<int>(mgp_local_time_get_minute, local_time);
|
||||
}
|
||||
|
||||
inline int local_time_get_second(mgp_local_time *local_time) {
|
||||
return MgInvoke<int>(mgp_local_time_get_second, local_time);
|
||||
}
|
||||
|
||||
inline int local_time_get_millisecond(mgp_local_time *local_time) {
|
||||
return MgInvoke<int>(mgp_local_time_get_millisecond, local_time);
|
||||
}
|
||||
|
||||
inline int local_time_get_microsecond(mgp_local_time *local_time) {
|
||||
return MgInvoke<int>(mgp_local_time_get_microsecond, local_time);
|
||||
}
|
||||
|
||||
inline int64_t local_time_timestamp(mgp_local_time *local_time) {
|
||||
return MgInvoke<int64_t>(mgp_local_time_timestamp, local_time);
|
||||
}
|
||||
|
||||
inline mgp_local_time *local_time_now(mgp_memory *memory) {
|
||||
return MgInvoke<mgp_local_time *>(mgp_local_time_now, memory);
|
||||
}
|
||||
|
||||
inline mgp_local_time *local_time_add_duration(mgp_local_time *local_time, mgp_duration *dur, mgp_memory *memory) {
|
||||
return MgInvoke<mgp_local_time *>(mgp_local_time_add_duration, local_time, dur, memory);
|
||||
}
|
||||
|
||||
inline mgp_local_time *local_time_sub_duration(mgp_local_time *local_time, mgp_duration *dur, mgp_memory *memory) {
|
||||
return MgInvoke<mgp_local_time *>(mgp_local_time_sub_duration, local_time, dur, memory);
|
||||
}
|
||||
|
||||
inline mgp_duration *local_time_diff(mgp_local_time *first, mgp_local_time *second, mgp_memory *memory) {
|
||||
return MgInvoke<mgp_duration *>(mgp_local_time_diff, first, second, memory);
|
||||
}
|
||||
|
||||
// mgp_local_date_time
|
||||
|
||||
inline mgp_local_date_time *local_date_time_from_string(const char *string, mgp_memory *memory) {
|
||||
return MgInvoke<mgp_local_date_time *>(mgp_local_date_time_from_string, string, memory);
|
||||
}
|
||||
|
||||
inline mgp_local_date_time *local_date_time_from_parameters(mgp_local_date_time_parameters *parameters,
|
||||
mgp_memory *memory) {
|
||||
return MgInvoke<mgp_local_date_time *>(mgp_local_date_time_from_parameters, parameters, memory);
|
||||
}
|
||||
|
||||
inline mgp_local_date_time *local_date_time_copy(mgp_local_date_time *local_date_time, mgp_memory *memory) {
|
||||
return MgInvoke<mgp_local_date_time *>(mgp_local_date_time_copy, local_date_time, memory);
|
||||
}
|
||||
|
||||
inline void local_date_time_destroy(mgp_local_date_time *local_date_time) {
|
||||
mgp_local_date_time_destroy(local_date_time);
|
||||
}
|
||||
|
||||
inline bool local_date_time_equal(mgp_local_date_time *first, mgp_local_date_time *second) {
|
||||
return MgInvoke<int>(mgp_local_date_time_equal, first, second);
|
||||
}
|
||||
|
||||
inline int local_date_time_get_year(mgp_local_date_time *local_date_time) {
|
||||
return MgInvoke<int>(mgp_local_date_time_get_year, local_date_time);
|
||||
}
|
||||
|
||||
inline int local_date_time_get_month(mgp_local_date_time *local_date_time) {
|
||||
return MgInvoke<int>(mgp_local_date_time_get_month, local_date_time);
|
||||
}
|
||||
|
||||
inline int local_date_time_get_day(mgp_local_date_time *local_date_time) {
|
||||
return MgInvoke<int>(mgp_local_date_time_get_day, local_date_time);
|
||||
}
|
||||
|
||||
inline int local_date_time_get_hour(mgp_local_date_time *local_date_time) {
|
||||
return MgInvoke<int>(mgp_local_date_time_get_hour, local_date_time);
|
||||
}
|
||||
|
||||
inline int local_date_time_get_minute(mgp_local_date_time *local_date_time) {
|
||||
return MgInvoke<int>(mgp_local_date_time_get_minute, local_date_time);
|
||||
}
|
||||
|
||||
inline int local_date_time_get_second(mgp_local_date_time *local_date_time) {
|
||||
return MgInvoke<int>(mgp_local_date_time_get_second, local_date_time);
|
||||
}
|
||||
|
||||
inline int local_date_time_get_millisecond(mgp_local_date_time *local_date_time) {
|
||||
return MgInvoke<int>(mgp_local_date_time_get_millisecond, local_date_time);
|
||||
}
|
||||
|
||||
inline int local_date_time_get_microsecond(mgp_local_date_time *local_date_time) {
|
||||
return MgInvoke<int>(mgp_local_date_time_get_microsecond, local_date_time);
|
||||
}
|
||||
|
||||
inline int64_t local_date_time_timestamp(mgp_local_date_time *local_date_time) {
|
||||
return MgInvoke<int64_t>(mgp_local_date_time_timestamp, local_date_time);
|
||||
}
|
||||
|
||||
inline mgp_local_date_time *local_date_time_now(mgp_memory *memory) {
|
||||
return MgInvoke<mgp_local_date_time *>(mgp_local_date_time_now, memory);
|
||||
}
|
||||
|
||||
inline mgp_local_date_time *local_date_time_add_duration(mgp_local_date_time *local_date_time, mgp_duration *dur,
|
||||
mgp_memory *memory) {
|
||||
return MgInvoke<mgp_local_date_time *>(mgp_local_date_time_add_duration, local_date_time, dur, memory);
|
||||
}
|
||||
|
||||
inline mgp_local_date_time *local_date_time_sub_duration(mgp_local_date_time *local_date_time, mgp_duration *dur,
|
||||
mgp_memory *memory) {
|
||||
return MgInvoke<mgp_local_date_time *>(mgp_local_date_time_sub_duration, local_date_time, dur, memory);
|
||||
}
|
||||
|
||||
inline mgp_duration *local_date_time_diff(mgp_local_date_time *first, mgp_local_date_time *second, mgp_memory *memory) {
|
||||
return MgInvoke<mgp_duration *>(mgp_local_date_time_diff, first, second, memory);
|
||||
}
|
||||
|
||||
// mgp_duration
|
||||
|
||||
inline mgp_duration *duration_from_string(const char *string, mgp_memory *memory) {
|
||||
return MgInvoke<mgp_duration *>(mgp_duration_from_string, string, memory);
|
||||
}
|
||||
|
||||
inline mgp_duration *duration_from_parameters(mgp_duration_parameters *parameters, mgp_memory *memory) {
|
||||
return MgInvoke<mgp_duration *>(mgp_duration_from_parameters, parameters, memory);
|
||||
}
|
||||
|
||||
inline mgp_duration *duration_from_microseconds(int64_t microseconds, mgp_memory *memory) {
|
||||
return MgInvoke<mgp_duration *>(mgp_duration_from_microseconds, microseconds, memory);
|
||||
}
|
||||
|
||||
inline mgp_duration *duration_copy(mgp_duration *duration, mgp_memory *memory) {
|
||||
return MgInvoke<mgp_duration *>(mgp_duration_copy, duration, memory);
|
||||
}
|
||||
|
||||
inline void duration_destroy(mgp_duration *duration) { mgp_duration_destroy(duration); }
|
||||
|
||||
inline int64_t duration_get_microseconds(mgp_duration *duration) {
|
||||
return MgInvoke<int64_t>(mgp_duration_get_microseconds, duration);
|
||||
}
|
||||
|
||||
inline bool duration_equal(mgp_duration *first, mgp_duration *second) {
|
||||
return MgInvoke<int>(mgp_duration_equal, first, second);
|
||||
}
|
||||
|
||||
inline mgp_duration *duration_neg(mgp_duration *duration, mgp_memory *memory) {
|
||||
return MgInvoke<mgp_duration *>(mgp_duration_neg, duration, memory);
|
||||
}
|
||||
|
||||
inline mgp_duration *duration_add(mgp_duration *first, mgp_duration *second, mgp_memory *memory) {
|
||||
return MgInvoke<mgp_duration *>(mgp_duration_add, first, second, memory);
|
||||
}
|
||||
|
||||
inline mgp_duration *duration_sub(mgp_duration *first, mgp_duration *second, mgp_memory *memory) {
|
||||
return MgInvoke<mgp_duration *>(mgp_duration_sub, first, second, memory);
|
||||
}
|
||||
|
||||
// Procedure
|
||||
|
||||
inline mgp_proc *module_add_read_procedure(mgp_module *module, const char *name, mgp_proc_cb cb) {
|
||||
return MgInvoke<mgp_proc *>(mgp_module_add_read_procedure, module, name, cb);
|
||||
}
|
||||
|
||||
inline mgp_proc *module_add_write_procedure(mgp_module *module, const char *name, mgp_proc_cb cb) {
|
||||
return MgInvoke<mgp_proc *>(mgp_module_add_write_procedure, module, name, cb);
|
||||
}
|
||||
|
||||
inline mgp_proc *module_add_batch_read_procedure(mgp_module *module, const char *name, mgp_proc_cb cb,
|
||||
mgp_proc_initializer initializer, mgp_proc_cleanup cleanup) {
|
||||
return MgInvoke<mgp_proc *>(mgp_module_add_batch_read_procedure, module, name, cb, initializer, cleanup);
|
||||
}
|
||||
|
||||
inline mgp_proc *module_add_batch_write_procedure(mgp_module *module, const char *name, mgp_proc_cb cb,
|
||||
mgp_proc_initializer initializer, mgp_proc_cleanup cleanup) {
|
||||
return MgInvoke<mgp_proc *>(mgp_module_add_batch_write_procedure, module, name, cb, initializer, cleanup);
|
||||
}
|
||||
|
||||
inline void proc_add_arg(mgp_proc *proc, const char *name, mgp_type *type) {
|
||||
MgInvokeVoid(mgp_proc_add_arg, proc, name, type);
|
||||
}
|
||||
|
||||
inline void proc_add_opt_arg(mgp_proc *proc, const char *name, mgp_type *type, mgp_value *default_value) {
|
||||
MgInvokeVoid(mgp_proc_add_opt_arg, proc, name, type, default_value);
|
||||
}
|
||||
|
||||
inline void proc_add_result(mgp_proc *proc, const char *name, mgp_type *type) {
|
||||
MgInvokeVoid(mgp_proc_add_result, proc, name, type);
|
||||
}
|
||||
|
||||
inline void proc_add_deprecated_result(mgp_proc *proc, const char *name, mgp_type *type) {
|
||||
MgInvokeVoid(mgp_proc_add_deprecated_result, proc, name, type);
|
||||
}
|
||||
|
||||
inline int must_abort(mgp_graph *graph) { return mgp_must_abort(graph); }
|
||||
|
||||
// mgp_result
|
||||
|
||||
inline void result_set_error_msg(mgp_result *res, const char *error_msg) {
|
||||
MgInvokeVoid(mgp_result_set_error_msg, res, error_msg);
|
||||
}
|
||||
|
||||
inline mgp_result_record *result_new_record(mgp_result *res) {
|
||||
return MgInvoke<mgp_result_record *>(mgp_result_new_record, res);
|
||||
}
|
||||
|
||||
inline void result_record_insert(mgp_result_record *record, const char *field_name, mgp_value *val) {
|
||||
MgInvokeVoid(mgp_result_record_insert, record, field_name, val);
|
||||
}
|
||||
|
||||
// Function
|
||||
|
||||
inline mgp_func *module_add_function(mgp_module *module, const char *name, mgp_func_cb cb) {
|
||||
return MgInvoke<mgp_func *>(mgp_module_add_function, module, name, cb);
|
||||
}
|
||||
|
||||
inline void func_add_arg(mgp_func *func, const char *name, mgp_type *type) {
|
||||
MgInvokeVoid(mgp_func_add_arg, func, name, type);
|
||||
}
|
||||
|
||||
inline void func_add_opt_arg(mgp_func *func, const char *name, mgp_type *type, mgp_value *default_value) {
|
||||
MgInvokeVoid(mgp_func_add_opt_arg, func, name, type, default_value);
|
||||
}
|
||||
|
||||
inline void func_result_set_error_msg(mgp_func_result *res, const char *msg, mgp_memory *memory) {
|
||||
MgInvokeVoid(mgp_func_result_set_error_msg, res, msg, memory);
|
||||
}
|
||||
|
||||
inline void func_result_set_value(mgp_func_result *res, mgp_value *value, mgp_memory *memory) {
|
||||
MgInvokeVoid(mgp_func_result_set_value, res, value, memory);
|
||||
}
|
||||
|
||||
} // namespace mgp
|
@ -1,350 +0,0 @@
|
||||
import typing
|
||||
from enum import Enum
|
||||
|
||||
import networkx as nx
|
||||
|
||||
NX_LABEL_ATTR = "labels"
|
||||
NX_TYPE_ATTR = "type"
|
||||
|
||||
SOURCE_TYPE_KAFKA = "SOURCE_TYPE_KAFKA"
|
||||
SOURCE_TYPE_PULSAR = "SOURCE_TYPE_PULSAR"
|
||||
|
||||
"""
|
||||
This module provides helpers for the mock Python API, much like _mgp.py does for mgp.py.
|
||||
"""
|
||||
|
||||
|
||||
class InvalidArgumentError(Exception):
|
||||
"""
|
||||
Signals that some of the arguments have invalid values.
|
||||
"""
|
||||
|
||||
pass
|
||||
|
||||
|
||||
class ImmutableObjectError(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class LogicErrorError(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class DeletedObjectError(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class EdgeConstants(Enum):
|
||||
I_START = 0
|
||||
I_END = 1
|
||||
I_KEY = 2
|
||||
|
||||
|
||||
class Graph:
|
||||
"""Wrapper around a NetworkX MultiDiGraph instance."""
|
||||
|
||||
__slots__ = ("nx", "_highest_vertex_id", "_highest_edge_id", "_valid")
|
||||
|
||||
def __init__(self, graph: nx.MultiDiGraph) -> None:
|
||||
if not isinstance(graph, nx.MultiDiGraph):
|
||||
raise TypeError(f"Expected 'networkx.classes.multidigraph.MultiDiGraph', got '{type(graph)}'")
|
||||
|
||||
self.nx = graph
|
||||
self._highest_vertex_id = None
|
||||
self._highest_edge_id = None
|
||||
self._valid = True
|
||||
|
||||
@property
|
||||
def vertex_ids(self):
|
||||
return self.nx.nodes
|
||||
|
||||
def vertex_is_isolate(self, vertex_id: int) -> bool:
|
||||
return nx.is_isolate(self.nx, vertex_id)
|
||||
|
||||
@property
|
||||
def vertices(self):
|
||||
return (Vertex(node_id, self) for node_id in self.nx.nodes)
|
||||
|
||||
def has_node(self, node_id):
|
||||
return self.nx.has_node(node_id)
|
||||
|
||||
@property
|
||||
def edges(self):
|
||||
return self.nx.edges
|
||||
|
||||
def is_valid(self) -> bool:
|
||||
return self._valid
|
||||
|
||||
def get_vertex_by_id(self, vertex_id: int) -> "Vertex":
|
||||
return Vertex(vertex_id, self)
|
||||
|
||||
def invalidate(self):
|
||||
self._valid = False
|
||||
|
||||
def is_immutable(self) -> bool:
|
||||
return nx.is_frozen(self.nx)
|
||||
|
||||
def make_immutable(self):
|
||||
self.nx = nx.freeze(self.nx)
|
||||
|
||||
def _new_vertex_id(self):
|
||||
if self._highest_vertex_id is None:
|
||||
self._highest_vertex_id = max(vertex_id for vertex_id in self.nx.nodes)
|
||||
|
||||
return self._highest_vertex_id + 1
|
||||
|
||||
def _new_edge_id(self):
|
||||
if self._highest_edge_id is None:
|
||||
self._highest_edge_id = max(edge[EdgeConstants.I_KEY.value] for edge in self.nx.edges(keys=True))
|
||||
|
||||
return self._highest_edge_id + 1
|
||||
|
||||
def create_vertex(self) -> "Vertex":
|
||||
vertex_id = self._new_vertex_id()
|
||||
|
||||
self.nx.add_node(vertex_id)
|
||||
self._highest_vertex_id = vertex_id
|
||||
|
||||
return Vertex(vertex_id, self)
|
||||
|
||||
def create_edge(self, from_vertex: "Vertex", to_vertex: "Vertex", edge_type: str) -> "Edge":
|
||||
if from_vertex.is_deleted() or to_vertex.is_deleted():
|
||||
raise DeletedObjectError("Accessing deleted object.")
|
||||
|
||||
edge_id = self._new_edge_id()
|
||||
|
||||
from_id = from_vertex.id
|
||||
to_id = to_vertex.id
|
||||
|
||||
self.nx.add_edge(from_id, to_id, key=edge_id, type=edge_type)
|
||||
self._highest_edge_id = edge_id
|
||||
|
||||
return Edge((from_id, to_id, edge_id), self)
|
||||
|
||||
def delete_vertex(self, vertex_id: int):
|
||||
self.nx.remove_node(vertex_id)
|
||||
|
||||
def delete_edge(self, from_vertex_id: int, to_vertex_id: int, edge_id: int):
|
||||
self.nx.remove_edge(from_vertex_id, to_vertex_id, edge_id)
|
||||
|
||||
@property
|
||||
def highest_vertex_id(self) -> int:
|
||||
if self._highest_vertex_id is None:
|
||||
self._highest_vertex_id = max(vertex_id for vertex_id in self.nx.nodes) + 1
|
||||
|
||||
return self._highest_vertex_id
|
||||
|
||||
@property
|
||||
def highest_edge_id(self) -> int:
|
||||
if self._highest_edge_id is None:
|
||||
self._highest_edge_id = max(edge[EdgeConstants.I_KEY.value] for edge in self.nx.edges(keys=True))
|
||||
|
||||
return self._highest_edge_id + 1
|
||||
|
||||
|
||||
class Vertex:
|
||||
"""Represents a graph vertex."""
|
||||
|
||||
__slots__ = ("_id", "_graph")
|
||||
|
||||
def __init__(self, id: int, graph: Graph) -> None:
|
||||
if not isinstance(id, int):
|
||||
raise TypeError(f"Expected 'int', got '{type(id)}'")
|
||||
|
||||
if not isinstance(graph, Graph):
|
||||
raise TypeError(f"Expected '_mgp_mock.Graph', got '{type(graph)}'")
|
||||
|
||||
if not graph.nx.has_node(id):
|
||||
raise IndexError(f"Unable to find vertex with ID {id}.")
|
||||
|
||||
self._id = id
|
||||
self._graph = graph
|
||||
|
||||
def is_valid(self) -> bool:
|
||||
return self._graph.is_valid()
|
||||
|
||||
def is_deleted(self) -> bool:
|
||||
return not self._graph.nx.has_node(self._id) and self._id <= self._graph.highest_vertex_id
|
||||
|
||||
@property
|
||||
def underlying_graph(self) -> Graph:
|
||||
return self._graph
|
||||
|
||||
def underlying_graph_is_mutable(self) -> bool:
|
||||
return not nx.is_frozen(self._graph.nx)
|
||||
|
||||
@property
|
||||
def labels(self) -> typing.List[int]:
|
||||
return self._graph.nx.nodes[self._id][NX_LABEL_ATTR].split(":")
|
||||
|
||||
def add_label(self, label: str) -> None:
|
||||
if nx.is_frozen(self._graph.nx):
|
||||
raise ImmutableObjectError("Cannot modify immutable object.")
|
||||
|
||||
self._graph.nx.nodes[self._id][NX_LABEL_ATTR] += f":{label}"
|
||||
|
||||
def remove_label(self, label: str) -> None:
|
||||
if nx.is_frozen(self._graph.nx):
|
||||
raise ImmutableObjectError("Cannot modify immutable object.")
|
||||
|
||||
labels = self._graph.nx.nodes[self._id][NX_LABEL_ATTR]
|
||||
if labels.startswith(f"{label}:"):
|
||||
labels = "\n" + labels # pseudo-string starter
|
||||
self._graph.nx.nodes[self._id][NX_LABEL_ATTR] = labels.replace(f"\n{label}:", "")
|
||||
elif labels.endswith(f":{label}"):
|
||||
labels += "\n" # pseudo-string terminator
|
||||
self._graph.nx.nodes[self._id][NX_LABEL_ATTR] = labels.replace(f":{label}\n", "")
|
||||
else:
|
||||
self._graph.nx.nodes[self._id][NX_LABEL_ATTR] = labels.replace(f":{label}:", ":")
|
||||
|
||||
@property
|
||||
def id(self) -> int:
|
||||
return self._id
|
||||
|
||||
@property
|
||||
def properties(self):
|
||||
return (
|
||||
(key, value)
|
||||
for key, value in self._graph.nx.nodes[self._id].items()
|
||||
if key not in (NX_LABEL_ATTR, NX_TYPE_ATTR)
|
||||
)
|
||||
|
||||
def get_property(self, property_name: str):
|
||||
return self._graph.nx.nodes[self._id][property_name]
|
||||
|
||||
def set_property(self, property_name: str, value: object):
|
||||
self._graph.nx.nodes[self._id][property_name] = value
|
||||
|
||||
@property
|
||||
def in_edges(self) -> typing.Iterable["Edge"]:
|
||||
return [Edge(edge, self._graph) for edge in self._graph.nx.in_edges(self._id, keys=True)]
|
||||
|
||||
@property
|
||||
def out_edges(self) -> typing.Iterable["Edge"]:
|
||||
return [Edge(edge, self._graph) for edge in self._graph.nx.out_edges(self._id, keys=True)]
|
||||
|
||||
|
||||
class Edge:
|
||||
"""Represents a graph edge."""
|
||||
|
||||
__slots__ = ("_edge", "_graph")
|
||||
|
||||
def __init__(self, edge: typing.Tuple[int, int, int], graph: Graph) -> None:
|
||||
if not isinstance(edge, typing.Tuple):
|
||||
raise TypeError(f"Expected 'Tuple', got '{type(edge)}'")
|
||||
|
||||
if not isinstance(graph, Graph):
|
||||
raise TypeError(f"Expected '_mgp_mock.Graph', got '{type(graph)}'")
|
||||
|
||||
if not graph.nx.has_edge(*edge):
|
||||
raise IndexError(f"Unable to find edge with ID {edge[EdgeConstants.I_KEY.value]}.")
|
||||
|
||||
self._edge = edge
|
||||
self._graph = graph
|
||||
|
||||
def is_valid(self) -> bool:
|
||||
return self._graph.is_valid()
|
||||
|
||||
def is_deleted(self) -> bool:
|
||||
return (
|
||||
not self._graph.nx.has_edge(*self._edge)
|
||||
and self._edge[EdgeConstants.I_KEY.value] <= self._graph.highest_edge_id
|
||||
)
|
||||
|
||||
def underlying_graph_is_mutable(self) -> bool:
|
||||
return not nx.is_frozen(self._graph.nx)
|
||||
|
||||
@property
|
||||
def id(self) -> int:
|
||||
return self._edge[EdgeConstants.I_KEY.value]
|
||||
|
||||
@property
|
||||
def edge(self) -> typing.Tuple[int, int, int]:
|
||||
return self._edge
|
||||
|
||||
@property
|
||||
def start_id(self) -> int:
|
||||
return self._edge[EdgeConstants.I_START.value]
|
||||
|
||||
@property
|
||||
def end_id(self) -> int:
|
||||
return self._edge[EdgeConstants.I_END.value]
|
||||
|
||||
def get_type_name(self):
|
||||
return self._graph.nx.get_edge_data(*self._edge)[NX_TYPE_ATTR]
|
||||
|
||||
def from_vertex(self) -> Vertex:
|
||||
return Vertex(self.start_id, self._graph)
|
||||
|
||||
def to_vertex(self) -> Vertex:
|
||||
return Vertex(self.end_id, self._graph)
|
||||
|
||||
@property
|
||||
def properties(self):
|
||||
return (
|
||||
(key, value)
|
||||
for key, value in self._graph.nx.edges[self._edge].items()
|
||||
if key not in (NX_LABEL_ATTR, NX_TYPE_ATTR)
|
||||
)
|
||||
|
||||
def get_property(self, property_name: str):
|
||||
return self._graph.nx.edges[self._edge][property_name]
|
||||
|
||||
def set_property(self, property_name: str, value: object):
|
||||
self._graph.nx.edges[self._edge][property_name] = value
|
||||
|
||||
|
||||
class Path:
|
||||
"""Represents a path comprised of `Vertex` and `Edge` instances."""
|
||||
|
||||
__slots__ = ("_vertices", "_edges", "_graph")
|
||||
__create_key = object()
|
||||
|
||||
def __init__(self, create_key, vertex_id: int, graph: Graph) -> None:
|
||||
assert create_key == Path.__create_key, "Path objects must be created using Path.make_with_start"
|
||||
|
||||
self._vertices = [vertex_id]
|
||||
self._edges = []
|
||||
self._graph = graph
|
||||
|
||||
@classmethod
|
||||
def make_with_start(cls, vertex: Vertex) -> "Path":
|
||||
if not isinstance(vertex, Vertex):
|
||||
raise TypeError(f"Expected 'Vertex', got '{type(vertex)}'")
|
||||
|
||||
if not isinstance(vertex.underlying_graph, Graph):
|
||||
raise TypeError(f"Expected '_mgp_mock.Graph', got '{type(vertex.underlying_graph)}'")
|
||||
|
||||
if not vertex.underlying_graph.nx.has_node(vertex._id):
|
||||
raise IndexError(f"Unable to find vertex with ID {vertex._id}.")
|
||||
|
||||
return Path(cls.__create_key, vertex._id, vertex.underlying_graph)
|
||||
|
||||
def is_valid(self) -> bool:
|
||||
return self._graph.is_valid()
|
||||
|
||||
def underlying_graph_is_mutable(self) -> bool:
|
||||
return not nx.is_frozen(self._graph.nx)
|
||||
|
||||
def expand(self, edge: Edge):
|
||||
if edge.start_id != self._vertices[-1]:
|
||||
raise LogicErrorError("Logic error.")
|
||||
|
||||
self._vertices.append(edge.end_id)
|
||||
self._edges.append((edge.start_id, edge.end_id, edge.id))
|
||||
|
||||
def pop(self):
|
||||
if not self._edges:
|
||||
raise IndexError("Path contains no relationships.")
|
||||
|
||||
self._vertices.pop()
|
||||
self._edges.pop()
|
||||
|
||||
def vertex_at(self, index: int) -> Vertex:
|
||||
return Vertex(self._vertices[index], self._graph)
|
||||
|
||||
def edge_at(self, index: int) -> Edge:
|
||||
return Edge(self._edges[index], self._graph)
|
||||
|
||||
def size(self) -> int:
|
||||
return len(self._edges)
|
@ -1,108 +0,0 @@
|
||||
// Copyright 2024 Memgraph Ltd.
|
||||
//
|
||||
// Use of this software is governed by the Business Source License
|
||||
// included in the file licenses/BSL.txt; by using this file, you agree to be bound by the terms of the Business Source
|
||||
// License, and you may not use this file except in compliance with the Business Source License.
|
||||
//
|
||||
// As of the Change Date specified in that file, in accordance with
|
||||
// the Business Source License, use of this software will be governed
|
||||
// by the Apache License, Version 2.0, included in the file
|
||||
// licenses/APL.txt.
|
||||
|
||||
#pragma once
|
||||
|
||||
#include <exception>
|
||||
#include <iostream>
|
||||
#include <sstream>
|
||||
#include <string>
|
||||
|
||||
namespace mg_exception {
|
||||
|
||||
// Instead of writing this utility function, we could have used `fmt::format`, but that's not an ideal option here
|
||||
// because that would introduce dependency that would be propagated to the client code (if exceptions here would be
|
||||
// used). Since the functionality here is not complex + the code is not on a critical path, we opted for a pure C++
|
||||
// solution.
|
||||
template <typename FirstArg, typename... Args>
|
||||
std::string StringSerialize(FirstArg &&firstArg, Args &&...args) {
|
||||
std::stringstream stream;
|
||||
stream << std::forward<FirstArg>(firstArg);
|
||||
((stream << " " << args), ...);
|
||||
return stream.str();
|
||||
}
|
||||
|
||||
struct UnknownException : public std::exception {
|
||||
const char *what() const noexcept override { return "Unknown exception!"; }
|
||||
};
|
||||
|
||||
struct NotEnoughMemoryException : public std::exception {
|
||||
NotEnoughMemoryException()
|
||||
: message_{
|
||||
StringSerialize("Not enough memory! For more details please visit", "https://memgr.ph/memory-control")} {}
|
||||
const char *what() const noexcept override { return message_.c_str(); }
|
||||
|
||||
private:
|
||||
std::string message_;
|
||||
};
|
||||
|
||||
struct AllocationException : public std::exception {
|
||||
AllocationException()
|
||||
: message_{StringSerialize("Could not allocate memory. For more details please visit",
|
||||
"https://memgr.ph/memory-control")} {}
|
||||
const char *what() const noexcept override { return message_.c_str(); }
|
||||
|
||||
private:
|
||||
std::string message_;
|
||||
};
|
||||
|
||||
struct InsufficientBufferException : public std::exception {
|
||||
const char *what() const noexcept override { return "Buffer is not sufficient to process procedure!"; }
|
||||
};
|
||||
|
||||
struct OutOfRangeException : public std::exception {
|
||||
const char *what() const noexcept override { return "Index out of range!"; }
|
||||
};
|
||||
|
||||
struct LogicException : public std::exception {
|
||||
const char *what() const noexcept override { return "Logic exception, check the procedure signature!"; }
|
||||
};
|
||||
|
||||
struct DeletedObjectException : public std::exception {
|
||||
const char *what() const noexcept override { return "Object is deleted!"; }
|
||||
};
|
||||
|
||||
struct InvalidArgumentException : public std::exception {
|
||||
const char *what() const noexcept override { return "Invalid argument!"; }
|
||||
};
|
||||
|
||||
struct InvalidIDException : public std::exception {
|
||||
InvalidIDException() : message_{"Invalid ID!"} {}
|
||||
explicit InvalidIDException(std::uint64_t identifier) : message_{StringSerialize("Invalid ID =", identifier)} {}
|
||||
const char *what() const noexcept override { return message_.c_str(); }
|
||||
|
||||
private:
|
||||
std::string message_;
|
||||
};
|
||||
|
||||
struct KeyAlreadyExistsException : public std::exception {
|
||||
KeyAlreadyExistsException() : message_{"Key you are trying to set already exists!"} {}
|
||||
explicit KeyAlreadyExistsException(const std::string &key)
|
||||
: message_{StringSerialize("Key you are trying to set already exists! KEY = ", key)} {}
|
||||
const char *what() const noexcept override { return message_.c_str(); }
|
||||
|
||||
private:
|
||||
std::string message_;
|
||||
};
|
||||
|
||||
struct ImmutableObjectException : public std::exception {
|
||||
const char *what() const noexcept override { return "Object you are trying to change is immutable!"; }
|
||||
};
|
||||
|
||||
struct ValueConversionException : public std::exception {
|
||||
const char *what() const noexcept override { return "Error in value conversion!"; }
|
||||
};
|
||||
|
||||
struct SerializationException : public std::exception {
|
||||
const char *what() const noexcept override { return "Error in serialization!"; }
|
||||
};
|
||||
|
||||
} // namespace mg_exception
|
@ -1,4 +1,4 @@
|
||||
// Copyright 2024 Memgraph Ltd.
|
||||
// Copyright 2022 Memgraph Ltd.
|
||||
//
|
||||
// Use of this software is governed by the Business Source License
|
||||
// included in the file licenses/BSL.txt; by using this file, you agree to be bound by the terms of the Business Source
|
||||
@ -37,19 +37,12 @@ extern "C" {
|
||||
/// All functions return an error code that can be used to figure out whether the API call was successful or not. In
|
||||
/// case of failure, the specific error code can be used to identify the reason of the failure.
|
||||
MGP_ENUM_CLASS MGP_NODISCARD mgp_error{
|
||||
MGP_ERROR_NO_ERROR,
|
||||
MGP_ERROR_UNKNOWN_ERROR,
|
||||
MGP_ERROR_UNABLE_TO_ALLOCATE,
|
||||
MGP_ERROR_INSUFFICIENT_BUFFER,
|
||||
MGP_ERROR_OUT_OF_RANGE,
|
||||
MGP_ERROR_LOGIC_ERROR,
|
||||
MGP_ERROR_DELETED_OBJECT,
|
||||
MGP_ERROR_INVALID_ARGUMENT,
|
||||
MGP_ERROR_KEY_ALREADY_EXISTS,
|
||||
MGP_ERROR_IMMUTABLE_OBJECT,
|
||||
MGP_ERROR_VALUE_CONVERSION,
|
||||
MGP_ERROR_SERIALIZATION_ERROR,
|
||||
MGP_ERROR_AUTHORIZATION_ERROR,
|
||||
MGP_ERROR_NO_ERROR, MGP_ERROR_UNKNOWN_ERROR,
|
||||
MGP_ERROR_UNABLE_TO_ALLOCATE, MGP_ERROR_INSUFFICIENT_BUFFER,
|
||||
MGP_ERROR_OUT_OF_RANGE, MGP_ERROR_LOGIC_ERROR,
|
||||
MGP_ERROR_DELETED_OBJECT, MGP_ERROR_INVALID_ARGUMENT,
|
||||
MGP_ERROR_KEY_ALREADY_EXISTS, MGP_ERROR_IMMUTABLE_OBJECT,
|
||||
MGP_ERROR_VALUE_CONVERSION, MGP_ERROR_SERIALIZATION_ERROR,
|
||||
};
|
||||
///@}
|
||||
|
||||
@ -111,22 +104,6 @@ enum mgp_error mgp_global_aligned_alloc(size_t size_in_bytes, size_t alignment,
|
||||
/// The behavior is undefined if `ptr` is not a value returned from a prior
|
||||
/// mgp_global_alloc() or mgp_global_aligned_alloc().
|
||||
void mgp_global_free(void *p);
|
||||
|
||||
/// State of the graph database.
|
||||
struct mgp_graph;
|
||||
|
||||
/// Allocations are tracked only for master thread. If new threads are spawned
|
||||
/// inside procedure, by calling following function
|
||||
/// you can start tracking allocations for current thread too. This
|
||||
/// is important if you need query memory limit to work
|
||||
/// for given procedure or per procedure memory limit.
|
||||
enum mgp_error mgp_track_current_thread_allocations(struct mgp_graph *graph);
|
||||
|
||||
/// Once allocations are tracked for current thread, you need to stop tracking allocations
|
||||
/// for given thread, before thread finishes with execution, or is detached.
|
||||
/// Otherwise it might result in slowdown of system due to unnecessary tracking of
|
||||
/// allocations.
|
||||
enum mgp_error mgp_untrack_current_thread_allocations(struct mgp_graph *graph);
|
||||
///@}
|
||||
|
||||
/// @name Operations on mgp_value
|
||||
@ -187,8 +164,6 @@ enum mgp_value_type {
|
||||
MGP_VALUE_TYPE_DURATION,
|
||||
};
|
||||
|
||||
enum mgp_error mgp_value_copy(struct mgp_value *val, struct mgp_memory *memory, struct mgp_value **result);
|
||||
|
||||
/// Free the memory used by the given mgp_value instance.
|
||||
void mgp_value_destroy(struct mgp_value *val);
|
||||
|
||||
@ -424,14 +399,9 @@ enum mgp_error mgp_value_get_duration(struct mgp_value *val, struct mgp_duration
|
||||
/// mgp_error::MGP_ERROR_UNABLE_TO_ALLOCATE is returned if unable to allocate a mgp_list.
|
||||
enum mgp_error mgp_list_make_empty(size_t capacity, struct mgp_memory *memory, struct mgp_list **result);
|
||||
|
||||
enum mgp_error mgp_list_copy(struct mgp_list *list, struct mgp_memory *memory, struct mgp_list **result);
|
||||
|
||||
/// Free the memory used by the given mgp_list and contained elements.
|
||||
void mgp_list_destroy(struct mgp_list *list);
|
||||
|
||||
/// Return whether the given mgp_list contains any deleted values.
|
||||
enum mgp_error mgp_list_contains_deleted(struct mgp_list *list, int *result);
|
||||
|
||||
/// Append a copy of mgp_value to mgp_list if capacity allows.
|
||||
/// The list copies the given value and therefore does not take ownership of the
|
||||
/// original value. You still need to call mgp_value_destroy to free the
|
||||
@ -467,14 +437,9 @@ enum mgp_error mgp_list_at(struct mgp_list *list, size_t index, struct mgp_value
|
||||
/// mgp_error::MGP_ERROR_UNABLE_TO_ALLOCATE is returned if unable to allocate a mgp_map.
|
||||
enum mgp_error mgp_map_make_empty(struct mgp_memory *memory, struct mgp_map **result);
|
||||
|
||||
enum mgp_error mgp_map_copy(struct mgp_map *map, struct mgp_memory *memory, struct mgp_map **result);
|
||||
|
||||
/// Free the memory used by the given mgp_map and contained items.
|
||||
void mgp_map_destroy(struct mgp_map *map);
|
||||
|
||||
/// Return whether the given mgp_map contains any deleted values.
|
||||
enum mgp_error mgp_map_contains_deleted(struct mgp_map *map, int *result);
|
||||
|
||||
/// Insert a new mapping from a NULL terminated character string to a value.
|
||||
/// If a mapping with the same key already exists, it is *not* replaced.
|
||||
/// In case of insertion, both the string and the value are copied into the map.
|
||||
@ -484,18 +449,6 @@ enum mgp_error mgp_map_contains_deleted(struct mgp_map *map, int *result);
|
||||
/// Return mgp_error::MGP_ERROR_KEY_ALREADY_EXISTS if a previous mapping already exists.
|
||||
enum mgp_error mgp_map_insert(struct mgp_map *map, const char *key, struct mgp_value *value);
|
||||
|
||||
/// Insert a mapping from a NULL terminated character string to a value.
|
||||
/// If a mapping with the same key already exists, it is replaced.
|
||||
/// In case of update, both the string and the value are copied into the map.
|
||||
/// Therefore, the map does not take ownership of the original key nor value, so
|
||||
/// you still need to free their memory explicitly.
|
||||
/// Return mgp_error::MGP_ERROR_UNABLE_TO_ALLOCATE is returned if unable to allocate for insertion.
|
||||
enum mgp_error mgp_map_update(struct mgp_map *map, const char *key, struct mgp_value *value);
|
||||
|
||||
// Erase a mapping by key.
|
||||
// If the key doesn't exist in the map nothing happens
|
||||
enum mgp_error mgp_map_erase(struct mgp_map *map, const char *key);
|
||||
|
||||
/// Get the number of items stored in mgp_map.
|
||||
/// Current implementation always returns without errors.
|
||||
enum mgp_error mgp_map_size(struct mgp_map *map, size_t *result);
|
||||
@ -504,9 +457,6 @@ enum mgp_error mgp_map_size(struct mgp_map *map, size_t *result);
|
||||
/// Result is NULL if no mapping exists.
|
||||
enum mgp_error mgp_map_at(struct mgp_map *map, const char *key, struct mgp_value **result);
|
||||
|
||||
/// Returns true if key in map.
|
||||
enum mgp_error mgp_key_exists(struct mgp_map *map, const char *key, int *result);
|
||||
|
||||
/// An item in the mgp_map.
|
||||
struct mgp_map_item;
|
||||
|
||||
@ -558,9 +508,6 @@ enum mgp_error mgp_path_copy(struct mgp_path *path, struct mgp_memory *memory, s
|
||||
/// Free the memory used by the given mgp_path and contained vertices and edges.
|
||||
void mgp_path_destroy(struct mgp_path *path);
|
||||
|
||||
/// Return whether the given mgp_path contains any deleted values.
|
||||
enum mgp_error mgp_path_contains_deleted(struct mgp_path *path, int *result);
|
||||
|
||||
/// Append an edge continuing from the last vertex on the path.
|
||||
/// The edge is copied into the path. Therefore, the path does not take
|
||||
/// ownership of the original edge, so you still need to free the edge memory
|
||||
@ -571,10 +518,6 @@ enum mgp_error mgp_path_contains_deleted(struct mgp_path *path, int *result);
|
||||
/// Return mgp_error::MGP_ERROR_UNABLE_TO_ALLOCATE if unable to allocate memory for path extension.
|
||||
enum mgp_error mgp_path_expand(struct mgp_path *path, struct mgp_edge *edge);
|
||||
|
||||
/// Remove the last node and the last relationship from the path.
|
||||
/// Return mgp_error::MGP_ERROR_OUT_OF_RANGE if the path contains no relationships.
|
||||
enum mgp_error mgp_path_pop(struct mgp_path *path);
|
||||
|
||||
/// Get the number of edges in a mgp_path.
|
||||
/// Current implementation always returns without errors.
|
||||
enum mgp_error mgp_path_size(struct mgp_path *path, size_t *result);
|
||||
@ -679,12 +622,6 @@ struct mgp_vertex_id {
|
||||
/// Get the ID of given vertex.
|
||||
enum mgp_error mgp_vertex_get_id(struct mgp_vertex *v, struct mgp_vertex_id *result);
|
||||
|
||||
/// Get the in degree of given vertex.
|
||||
enum mgp_error mgp_vertex_get_in_degree(struct mgp_vertex *v, size_t *result);
|
||||
|
||||
/// Get the out degree of given vertex.
|
||||
enum mgp_error mgp_vertex_get_out_degree(struct mgp_vertex *v, size_t *result);
|
||||
|
||||
/// Result is non-zero if the vertex can be modified.
|
||||
/// The mutability of the vertex is the same as the graph which it is part of. If a vertex is immutable, then edges
|
||||
/// cannot be created or deleted, properties and labels cannot be set or removed and all of the returned edges will be
|
||||
@ -702,15 +639,6 @@ enum mgp_error mgp_vertex_underlying_graph_is_mutable(struct mgp_vertex *v, int
|
||||
enum mgp_error mgp_vertex_set_property(struct mgp_vertex *v, const char *property_name,
|
||||
struct mgp_value *property_value);
|
||||
|
||||
/// Set the value of properties on a vertex.
|
||||
/// When the value is `null`, then the property is removed from the vertex.
|
||||
/// Return mgp_error::MGP_ERROR_UNABLE_TO_ALLOCATE if unable to allocate memory for storing the property.
|
||||
/// Return mgp_error::MGP_ERROR_IMMUTABLE_OBJECT if `v` is immutable.
|
||||
/// Return mgp_error::MGP_ERROR_DELETED_OBJECT if `v` has been deleted.
|
||||
/// Return mgp_error::MGP_ERROR_SERIALIZATION_ERROR if `v` has been modified by another transaction.
|
||||
/// Return mgp_error::MGP_ERROR_VALUE_CONVERSION if `property_value` is vertex, edge or path.
|
||||
enum mgp_error mgp_vertex_set_properties(struct mgp_vertex *v, struct mgp_map *properties);
|
||||
|
||||
/// Add the label to the vertex.
|
||||
/// If the vertex already has the label, this function does nothing.
|
||||
/// Return mgp_error::MGP_ERROR_UNABLE_TO_ALLOCATE if unable to allocate memory for storing the label.
|
||||
@ -734,9 +662,6 @@ enum mgp_error mgp_vertex_copy(struct mgp_vertex *v, struct mgp_memory *memory,
|
||||
/// Free the memory used by a mgp_vertex.
|
||||
void mgp_vertex_destroy(struct mgp_vertex *v);
|
||||
|
||||
/// Return whether the given mgp_vertex is deleted.
|
||||
enum mgp_error mgp_vertex_is_deleted(struct mgp_vertex *v, int *result);
|
||||
|
||||
/// Result is non-zero if given vertices are equal, otherwise 0.
|
||||
enum mgp_error mgp_vertex_equal(struct mgp_vertex *v1, struct mgp_vertex *v2, int *result);
|
||||
|
||||
@ -831,9 +756,6 @@ enum mgp_error mgp_edge_copy(struct mgp_edge *e, struct mgp_memory *memory, stru
|
||||
/// Free the memory used by a mgp_edge.
|
||||
void mgp_edge_destroy(struct mgp_edge *e);
|
||||
|
||||
/// Return whether the given mgp_edge is deleted.
|
||||
enum mgp_error mgp_edge_is_deleted(struct mgp_edge *e, int *result);
|
||||
|
||||
/// Result is non-zero if given edges are equal, otherwise 0.
|
||||
enum mgp_error mgp_edge_equal(struct mgp_edge *e1, struct mgp_edge *e2, int *result);
|
||||
|
||||
@ -867,15 +789,6 @@ enum mgp_error mgp_edge_get_property(struct mgp_edge *e, const char *property_na
|
||||
/// Return mgp_error::MGP_ERROR_VALUE_CONVERSION if `property_value` is vertex, edge or path.
|
||||
enum mgp_error mgp_edge_set_property(struct mgp_edge *e, const char *property_name, struct mgp_value *property_value);
|
||||
|
||||
/// Set the value of properties on a vertex.
|
||||
/// When the value is `null`, then the property is removed from the vertex.
|
||||
/// Return mgp_error::MGP_ERROR_UNABLE_TO_ALLOCATE if unable to allocate memory for storing the property.
|
||||
/// Return mgp_error::MGP_ERROR_IMMUTABLE_OBJECT if `v` is immutable.
|
||||
/// Return mgp_error::MGP_ERROR_DELETED_OBJECT if `v` has been deleted.
|
||||
/// Return mgp_error::MGP_ERROR_SERIALIZATION_ERROR if `v` has been modified by another transaction.
|
||||
/// Return mgp_error::MGP_ERROR_VALUE_CONVERSION if `property_value` is vertex, edge or path.
|
||||
enum mgp_error mgp_edge_set_properties(struct mgp_edge *e, struct mgp_map *properties);
|
||||
|
||||
/// Start iterating over properties stored in the given edge.
|
||||
/// The properties of the edge are copied when the iterator is created, therefore later changes won't affect them.
|
||||
/// Resulting mgp_properties_iterator needs to be deallocated with
|
||||
@ -885,113 +798,21 @@ enum mgp_error mgp_edge_set_properties(struct mgp_edge *e, struct mgp_map *prope
|
||||
enum mgp_error mgp_edge_iter_properties(struct mgp_edge *e, struct mgp_memory *memory,
|
||||
struct mgp_properties_iterator **result);
|
||||
|
||||
/// State of the graph database.
|
||||
struct mgp_graph;
|
||||
|
||||
/// Get the vertex corresponding to given ID, or NULL if no such vertex exists.
|
||||
/// Resulting vertex must be freed using mgp_vertex_destroy.
|
||||
/// Return mgp_error::MGP_ERROR_UNABLE_TO_ALLOCATE if unable to allocate the vertex.
|
||||
enum mgp_error mgp_graph_get_vertex_by_id(struct mgp_graph *g, struct mgp_vertex_id id, struct mgp_memory *memory,
|
||||
struct mgp_vertex **result);
|
||||
|
||||
/// Result is non-zero if the index with the given name exists.
|
||||
/// The current implementation always returns without errors.
|
||||
enum mgp_error mgp_graph_has_text_index(struct mgp_graph *graph, const char *index_name, int *result);
|
||||
|
||||
/// Available modes of searching text indices.
|
||||
MGP_ENUM_CLASS text_search_mode{
|
||||
SPECIFIED_PROPERTIES,
|
||||
REGEX,
|
||||
ALL_PROPERTIES,
|
||||
};
|
||||
|
||||
/// Search the named text index for the given query. The result is a map with the "search_results" and "error_msg" keys.
|
||||
/// The "search_results" key contains the vertices whose text-indexed properties match the given query.
|
||||
/// In case of a Tantivy error, the "search_results" key is absent, and "error_msg" contains the error message.
|
||||
/// Return mgp_error::MGP_ERROR_UNABLE_TO_ALLOCATE if there’s an allocation error while constructing the results map.
|
||||
/// Return mgp_error::MGP_ERROR_KEY_ALREADY_EXISTS if the same key is being created in the results map more than once.
|
||||
enum mgp_error mgp_graph_search_text_index(struct mgp_graph *graph, const char *index_name, const char *search_query,
|
||||
enum text_search_mode search_mode, struct mgp_memory *memory,
|
||||
struct mgp_map **result);
|
||||
|
||||
/// Aggregate over the results of a search over the named text index. The result is a map with the "aggregation_results"
|
||||
/// and "error_msg" keys.
|
||||
/// The "aggregation_results" key contains the vertices whose text-indexed properties match the given query.
|
||||
/// In case of a Tantivy error, the "aggregation_results" key is absent, and "error_msg" contains the error message.
|
||||
/// Return mgp_error::MGP_ERROR_UNABLE_TO_ALLOCATE if there’s an allocation error while constructing the results map.
|
||||
/// Return mgp_error::MGP_ERROR_KEY_ALREADY_EXISTS if the same key is being created in the results map more than once.
|
||||
enum mgp_error mgp_graph_aggregate_over_text_index(struct mgp_graph *graph, const char *index_name,
|
||||
const char *search_query, const char *aggregation_query,
|
||||
struct mgp_memory *memory, struct mgp_map **result);
|
||||
|
||||
/// Creates label index for given label.
|
||||
/// mgp_error::MGP_ERROR_NO_ERROR is always returned.
|
||||
/// if label index already exists, result will be 0, otherwise 1.
|
||||
enum mgp_error mgp_create_label_index(struct mgp_graph *graph, const char *label, int *result);
|
||||
|
||||
/// Drop label index.
|
||||
enum mgp_error mgp_drop_label_index(struct mgp_graph *graph, const char *label, int *result);
|
||||
|
||||
/// List all label indices.
|
||||
enum mgp_error mgp_list_all_label_indices(struct mgp_graph *graph, struct mgp_memory *memory, struct mgp_list **result);
|
||||
|
||||
/// Creates label-property index for given label and propery.
|
||||
/// mgp_error::MGP_ERROR_NO_ERROR is always returned.
|
||||
/// if label property index already exists, result will be 0, otherwise 1.
|
||||
enum mgp_error mgp_create_label_property_index(struct mgp_graph *graph, const char *label, const char *property,
|
||||
int *result);
|
||||
|
||||
/// Drops label-property index for given label and propery.
|
||||
/// mgp_error::MGP_ERROR_NO_ERROR is always returned.
|
||||
/// if dropping label property index failed, result will be 0, otherwise 1.
|
||||
enum mgp_error mgp_drop_label_property_index(struct mgp_graph *graph, const char *label, const char *property,
|
||||
int *result);
|
||||
|
||||
/// List all label+property indices.
|
||||
enum mgp_error mgp_list_all_label_property_indices(struct mgp_graph *graph, struct mgp_memory *memory,
|
||||
struct mgp_list **result);
|
||||
|
||||
/// Creates existence constraint for given label and property.
|
||||
/// mgp_error::MGP_ERROR_NO_ERROR is always returned.
|
||||
/// if creating existence constraint failed, result will be 0, otherwise 1.
|
||||
enum mgp_error mgp_create_existence_constraint(struct mgp_graph *graph, const char *label, const char *property,
|
||||
int *result);
|
||||
|
||||
/// Drops existence constraint for given label and property.
|
||||
/// mgp_error::MGP_ERROR_NO_ERROR is always returned.
|
||||
/// if dropping existence constraint failed, result will be 0, otherwise 1.
|
||||
enum mgp_error mgp_drop_existence_constraint(struct mgp_graph *graph, const char *label, const char *property,
|
||||
int *result);
|
||||
|
||||
/// List all existence constraints.
|
||||
enum mgp_error mgp_list_all_existence_constraints(struct mgp_graph *graph, struct mgp_memory *memory,
|
||||
struct mgp_list **result);
|
||||
|
||||
/// Creates unique constraint for given label and properties.
|
||||
/// mgp_error::MGP_ERROR_NO_ERROR is always returned.
|
||||
/// if creating unique constraint failed, result will be 0, otherwise 1.
|
||||
enum mgp_error mgp_create_unique_constraint(struct mgp_graph *graph, const char *label, struct mgp_value *properties,
|
||||
int *result);
|
||||
|
||||
/// Drops unique constraint for given label and properties.
|
||||
/// mgp_error::MGP_ERROR_NO_ERROR is always returned.
|
||||
/// if dropping unique constraint failed, result will be 0, otherwise 1.
|
||||
enum mgp_error mgp_drop_unique_constraint(struct mgp_graph *graph, const char *label, struct mgp_value *properties,
|
||||
int *result);
|
||||
|
||||
/// List all unique constraints
|
||||
enum mgp_error mgp_list_all_unique_constraints(struct mgp_graph *graph, struct mgp_memory *memory,
|
||||
struct mgp_list **result);
|
||||
|
||||
/// Result is non-zero if the graph can be modified.
|
||||
/// If a graph is immutable, then vertices cannot be created or deleted, and all of the returned vertices will be
|
||||
/// immutable also. The same applies for edges.
|
||||
/// Current implementation always returns without errors.
|
||||
enum mgp_error mgp_graph_is_mutable(struct mgp_graph *graph, int *result);
|
||||
|
||||
/// Result is non-zero if the graph is in transactional storage mode.
|
||||
/// If a graph is not in transactional mode (i.e. analytical mode), then vertices and edges can be missing
|
||||
/// because changes from other transactions are visible.
|
||||
/// Current implementation always returns without errors.
|
||||
enum mgp_error mgp_graph_is_transactional(struct mgp_graph *graph, int *result);
|
||||
|
||||
/// Add a new vertex to the graph.
|
||||
/// Resulting vertex must be freed using mgp_vertex_destroy.
|
||||
/// Return mgp_error::MGP_ERROR_IMMUTABLE_OBJECT if `graph` is immutable.
|
||||
@ -1018,29 +839,6 @@ enum mgp_error mgp_graph_detach_delete_vertex(struct mgp_graph *graph, struct mg
|
||||
enum mgp_error mgp_graph_create_edge(struct mgp_graph *graph, struct mgp_vertex *from, struct mgp_vertex *to,
|
||||
struct mgp_edge_type type, struct mgp_memory *memory, struct mgp_edge **result);
|
||||
|
||||
/// Change edge from vertex
|
||||
/// Return mgp_error::MGP_ERROR_IMMUTABLE_OBJECT if `graph` is immutable.
|
||||
/// Return mgp_error::MGP_ERROR_UNABLE_TO_ALLOCATE if unable to allocate a mgp_edge.
|
||||
/// Return mgp_error::MGP_ERROR_DELETED_OBJECT if `from` or `to` has been deleted.
|
||||
/// Return mgp_error::MGP_ERROR_SERIALIZATION_ERROR if `from` or `to` has been modified by another transaction.
|
||||
enum mgp_error mgp_graph_edge_set_from(struct mgp_graph *graph, struct mgp_edge *e, struct mgp_vertex *new_from,
|
||||
struct mgp_memory *memory, struct mgp_edge **result);
|
||||
|
||||
/// Change edge to vertex
|
||||
/// Return mgp_error::MGP_ERROR_IMMUTABLE_OBJECT if `graph` is immutable.
|
||||
/// Return mgp_error::MGP_ERROR_UNABLE_TO_ALLOCATE if unable to allocate a mgp_edge.
|
||||
/// Return mgp_error::MGP_ERROR_DELETED_OBJECT if `from` or `to` has been deleted.
|
||||
/// Return mgp_error::MGP_ERROR_SERIALIZATION_ERROR if `from` or `to` has been modified by another transaction.
|
||||
enum mgp_error mgp_graph_edge_set_to(struct mgp_graph *graph, struct mgp_edge *e, struct mgp_vertex *new_to,
|
||||
struct mgp_memory *memory, struct mgp_edge **result);
|
||||
|
||||
/// Change edge type
|
||||
/// Return mgp_error::MGP_ERROR_IMMUTABLE_OBJECT if `graph` is immutable.
|
||||
/// Return mgp_error::MGP_ERROR_SERIALIZATION_ERROR if `edge`, its source or destination vertex has been modified by
|
||||
/// another transaction.
|
||||
enum mgp_error mgp_graph_edge_change_type(struct mgp_graph *graph, struct mgp_edge *e, struct mgp_edge_type new_type,
|
||||
struct mgp_memory *memory, struct mgp_edge **result);
|
||||
|
||||
/// Delete an edge from the graph.
|
||||
/// Return mgp_error::MGP_ERROR_IMMUTABLE_OBJECT if `graph` is immutable.
|
||||
/// Return mgp_error::MGP_ERROR_SERIALIZATION_ERROR if `edge`, its source or destination vertex has been modified by
|
||||
@ -1494,12 +1292,6 @@ struct mgp_proc;
|
||||
/// Describes a Memgraph magic function.
|
||||
struct mgp_func;
|
||||
|
||||
/// All available log levels that can be used in mgp_log function
|
||||
MGP_ENUM_CLASS mgp_log_level{
|
||||
MGP_LOG_LEVEL_TRACE, MGP_LOG_LEVEL_DEBUG, MGP_LOG_LEVEL_INFO,
|
||||
MGP_LOG_LEVEL_WARN, MGP_LOG_LEVEL_ERROR, MGP_LOG_LEVEL_CRITICAL,
|
||||
};
|
||||
|
||||
/// Entry-point for a query module read procedure, invoked through openCypher.
|
||||
///
|
||||
/// Passed in arguments will not live longer than the callback's execution.
|
||||
@ -1507,13 +1299,6 @@ MGP_ENUM_CLASS mgp_log_level{
|
||||
/// to allocate global resources.
|
||||
typedef void (*mgp_proc_cb)(struct mgp_list *, struct mgp_graph *, struct mgp_result *, struct mgp_memory *);
|
||||
|
||||
/// Cleanup for a query module read procedure. Can't be invoked through OpenCypher. Cleans batched stream.
|
||||
typedef void (*mgp_proc_cleanup)();
|
||||
|
||||
/// Initializer for a query module batched read procedure. Can't be invoked through OpenCypher. Initializes batched
|
||||
/// stream.
|
||||
typedef void (*mgp_proc_initializer)(struct mgp_list *, struct mgp_graph *, struct mgp_memory *);
|
||||
|
||||
/// Register a read-only procedure to a module.
|
||||
///
|
||||
/// The `name` must be a sequence of digits, underscores, lowercase and
|
||||
@ -1538,30 +1323,6 @@ enum mgp_error mgp_module_add_read_procedure(struct mgp_module *module, const ch
|
||||
enum mgp_error mgp_module_add_write_procedure(struct mgp_module *module, const char *name, mgp_proc_cb cb,
|
||||
struct mgp_proc **result);
|
||||
|
||||
/// Register a readable batched procedure to a module.
|
||||
///
|
||||
/// The `name` must be a valid identifier, following the same rules as the
|
||||
/// procedure`name` in mgp_module_add_read_procedure.
|
||||
///
|
||||
/// Return mgp_error::MGP_ERROR_UNABLE_TO_ALLOCATE if unable to allocate memory for mgp_proc.
|
||||
/// Return mgp_error::MGP_ERROR_INVALID_ARGUMENT if `name` is not a valid procedure name.
|
||||
/// RETURN mgp_error::MGP_ERROR_LOGIC_ERROR if a procedure with the same name was already registered.
|
||||
enum mgp_error mgp_module_add_batch_read_procedure(struct mgp_module *module, const char *name, mgp_proc_cb cb,
|
||||
mgp_proc_initializer initializer, mgp_proc_cleanup cleanup,
|
||||
struct mgp_proc **result);
|
||||
|
||||
/// Register a writeable batched procedure to a module.
|
||||
///
|
||||
/// The `name` must be a valid identifier, following the same rules as the
|
||||
/// procedure`name` in mgp_module_add_read_procedure.
|
||||
///
|
||||
/// Return mgp_error::MGP_ERROR_UNABLE_TO_ALLOCATE if unable to allocate memory for mgp_proc.
|
||||
/// Return mgp_error::MGP_ERROR_INVALID_ARGUMENT if `name` is not a valid procedure name.
|
||||
/// RETURN mgp_error::MGP_ERROR_LOGIC_ERROR if a procedure with the same name was already registered.
|
||||
enum mgp_error mgp_module_add_batch_write_procedure(struct mgp_module *module, const char *name, mgp_proc_cb cb,
|
||||
mgp_proc_initializer initializer, mgp_proc_cleanup cleanup,
|
||||
struct mgp_proc **result);
|
||||
|
||||
/// Add a required argument to a procedure.
|
||||
///
|
||||
/// The order of adding arguments will correspond to the order the procedure
|
||||
@ -1625,9 +1386,6 @@ enum mgp_error mgp_proc_add_result(struct mgp_proc *proc, const char *name, stru
|
||||
/// Return mgp_error::MGP_ERROR_INVALID_ARGUMENT if `name` is not a valid result name.
|
||||
/// RETURN mgp_error::MGP_ERROR_LOGIC_ERROR if a result field with the same name was already added.
|
||||
enum mgp_error mgp_proc_add_deprecated_result(struct mgp_proc *proc, const char *name, struct mgp_type *type);
|
||||
|
||||
/// Log a message on a certain level.
|
||||
enum mgp_error mgp_log(enum mgp_log_level log_level, const char *output);
|
||||
///@}
|
||||
|
||||
/// @name Execution
|
||||
@ -1637,10 +1395,7 @@ enum mgp_error mgp_log(enum mgp_log_level log_level, const char *output);
|
||||
/// @{
|
||||
|
||||
/// Return non-zero if the currently executing procedure should abort as soon as
|
||||
/// possible. If non-zero the reasons are:
|
||||
/// (1) The transaction was requested to be terminated
|
||||
/// (2) The server is gracefully shutting down
|
||||
/// (3) The transaction has hit its timeout threshold
|
||||
/// possible.
|
||||
///
|
||||
/// Procedures which perform heavyweight processing run the risk of running too
|
||||
/// long and going over the query execution time limit. To prevent this, such
|
||||
@ -1757,10 +1512,6 @@ enum mgp_error mgp_module_add_transformation(struct mgp_module *module, const ch
|
||||
///
|
||||
///@{
|
||||
|
||||
/// State of the database that is exposed to magic functions. Currently it is unused, but it enables extending the
|
||||
/// functionalities of magic functions in future without breaking the API.
|
||||
struct mgp_func_context;
|
||||
|
||||
/// Add a required argument to a function.
|
||||
///
|
||||
/// The order of the added arguments corresponds to the signature of the openCypher function.
|
||||
|
4591
include/mgp.hpp
4591
include/mgp.hpp
File diff suppressed because it is too large
Load Diff
633
include/mgp.py
633
include/mgp.py
File diff suppressed because it is too large
Load Diff
1674
include/mgp_mock.py
1674
include/mgp_mock.py
File diff suppressed because it is too large
Load Diff
108
init
108
init
@ -5,16 +5,13 @@ cd "$DIR"
|
||||
|
||||
source "$DIR/environment/util.sh"
|
||||
|
||||
DISTRO=$(operating_system)
|
||||
ARCHITECTURE=$(architecture)
|
||||
|
||||
function print_help () {
|
||||
echo "Usage: $0 [OPTION]"
|
||||
echo -e "Check for missing packages and setup the project.\n"
|
||||
echo "Optional arguments:"
|
||||
echo -e " -h\tdisplay this help and exit"
|
||||
echo -e " --without-libs-setup\tskip the step for setting up libs"
|
||||
echo -e " --ci\tscript is being run inside ci"
|
||||
echo -e " --wsl-quicklisp-proxy \"host:port\"\tquicklist HTTP proxy (this flag + HTTP proxy are required on WSL)"
|
||||
}
|
||||
|
||||
function setup_virtualenv () {
|
||||
@ -35,22 +32,28 @@ function setup_virtualenv () {
|
||||
popd > /dev/null
|
||||
}
|
||||
|
||||
wsl_quicklisp_proxy=""
|
||||
setup_libs=true
|
||||
ci=false
|
||||
if [[ $# -eq 1 && "$1" == "-h" ]]; then
|
||||
print_help
|
||||
exit 0
|
||||
else
|
||||
while(($#)); do
|
||||
case "$1" in
|
||||
--wsl-quicklisp-proxy)
|
||||
shift
|
||||
if [[ $# -eq 0 ]]; then
|
||||
echo "Missing proxy URL"
|
||||
print_help
|
||||
exit 1
|
||||
fi
|
||||
wsl_quicklisp_proxy=":proxy \"http://$1/\""
|
||||
shift
|
||||
;;
|
||||
--without-libs-setup)
|
||||
shift
|
||||
setup_libs=false
|
||||
;;
|
||||
--ci)
|
||||
shift
|
||||
ci=true
|
||||
;;
|
||||
*)
|
||||
# unknown option
|
||||
echo "Invalid argument provided: $1"
|
||||
@ -61,6 +64,8 @@ else
|
||||
done
|
||||
fi
|
||||
|
||||
DISTRO=$(operating_system)
|
||||
ARCHITECTURE=$(architecture)
|
||||
if [ "${ARCHITECTURE}" = "arm64" ] || [ "${ARCHITECTURE}" = "aarch64" ]; then
|
||||
OS_SCRIPT=$DIR/environment/os/$DISTRO-arm.sh
|
||||
else
|
||||
@ -73,22 +78,37 @@ echo "All packages are in-place..."
|
||||
# create a default build directory
|
||||
mkdir -p ./build
|
||||
|
||||
if [[ "$setup_libs" == "true" ]]; then
|
||||
# Setup libs (download).
|
||||
cd libs
|
||||
./cleanup.sh
|
||||
./setup.sh
|
||||
cd ..
|
||||
# quicklisp package manager for Common Lisp
|
||||
quicklisp_install_dir="$HOME/quicklisp"
|
||||
if [[ -v QUICKLISP_HOME ]]; then
|
||||
quicklisp_install_dir="${QUICKLISP_HOME}"
|
||||
fi
|
||||
|
||||
# Fix for centos 7 during release
|
||||
if [[ "$ci" == "false" ]]; then
|
||||
if [ "${DISTRO}" = "centos-7" ] || [ "${DISTRO}" = "debian-11" ] || [ "${DISTRO}" = "amzn-2" ]; then
|
||||
if python3 -m pip show virtualenv >/dev/null 2>/dev/null; then
|
||||
python3 -m pip uninstall -y virtualenv
|
||||
fi
|
||||
python3 -m pip install virtualenv
|
||||
fi
|
||||
if [[ ! -f "${quicklisp_install_dir}/setup.lisp" ]]; then
|
||||
wget -nv https://beta.quicklisp.org/quicklisp.lisp -O quicklisp.lisp || exit 1
|
||||
echo \
|
||||
"
|
||||
(load \"${DIR}/quicklisp.lisp\")
|
||||
(quicklisp-quickstart:install $wsl_quicklisp_proxy :path \"${quicklisp_install_dir}\")
|
||||
" | sbcl --script || exit 1
|
||||
rm -rf quicklisp.lisp || exit 1
|
||||
fi
|
||||
ln -Tfs "$DIR/src/lisp" "${quicklisp_install_dir}/local-projects/lcp"
|
||||
# Install LCP dependencies
|
||||
# TODO: We should at some point cache or have a mirror of packages we use.
|
||||
# TODO: move the installation of LCP's dependencies into ./setup.sh
|
||||
echo \
|
||||
"
|
||||
(load \"${quicklisp_install_dir}/setup.lisp\")
|
||||
(ql:quickload '(:lcp :lcp/test) :silent t)
|
||||
" | sbcl --script
|
||||
|
||||
if [[ "$setup_libs" == "true" ]]; then
|
||||
# Setup libs (download).
|
||||
cd libs
|
||||
./cleanup.sh
|
||||
./setup.sh
|
||||
cd ..
|
||||
fi
|
||||
|
||||
# setup gql_behave dependencies
|
||||
@ -101,10 +121,6 @@ setup_virtualenv tests/stress
|
||||
setup_virtualenv tests/integration/ldap
|
||||
|
||||
# Setup tests dependencies.
|
||||
# NOTE: This is commented out because of the build order (at the time of
|
||||
# execution mgclient is not built yet) which makes this setup to fail. mgclient
|
||||
# is built during the make phase. The tests/setup.sh is called under GHA CI
|
||||
# jobs.
|
||||
# cd tests
|
||||
# ./setup.sh
|
||||
# cd ..
|
||||
@ -114,30 +130,22 @@ setup_virtualenv tests/integration/ldap
|
||||
|
||||
echo "Done installing dependencies for Memgraph"
|
||||
|
||||
echo "Linking git hooks OR skip if .git folder is not there"
|
||||
if [ -d "$DIR/.git" ]; then
|
||||
for hook in $(find $DIR/.githooks -type f -printf "%f\n"); do
|
||||
ln -s -f "$DIR/.githooks/$hook" "$DIR/.git/hooks/$hook"
|
||||
echo "Added $hook hook"
|
||||
done;
|
||||
else
|
||||
echo "WARNING: .git folder not present, skip adding hooks"
|
||||
fi
|
||||
echo "Linking git hooks"
|
||||
for hook in $(find $DIR/.githooks -type f -printf "%f\n"); do
|
||||
ln -s -f "$DIR/.githooks/$hook" "$DIR/.git/hooks/$hook"
|
||||
echo "Added $hook hook"
|
||||
done;
|
||||
|
||||
# Install precommit hook
|
||||
python3 -m pip install pre-commit
|
||||
python3 -m pre_commit install
|
||||
|
||||
# Install py format tools
|
||||
echo "Install black formatter"
|
||||
python3 -m pip install black==22.10.*
|
||||
echo "Install isort"
|
||||
python3 -m pip install isort==5.10.*
|
||||
|
||||
# Install precommit hook except on old operating systems because we don't
|
||||
# develop on them -> pre-commit hook not required -> we can use latest
|
||||
# packages.
|
||||
if [[ "$ci" == "false" ]]; then
|
||||
if [ "${DISTRO}" != "centos-7" ] && [ "$DISTRO" != "debian-10" ] && [ "${DISTRO}" != "ubuntu-18.04" ] && [ "${DISTRO}" != "amzn-2" ]; then
|
||||
python3 -m pip install pre-commit
|
||||
python3 -m pre_commit install
|
||||
# Install py format tools for usage during the development.
|
||||
echo "Install black formatter"
|
||||
python3 -m pip install black==23.1.*
|
||||
echo "Install isort"
|
||||
python3 -m pip install isort==5.12.*
|
||||
fi
|
||||
fi
|
||||
|
||||
# Link `include/mgp.py` with `release/mgp/mgp.py`
|
||||
ln -v -f include/mgp.py release/mgp/mgp.py
|
||||
|
2
libs/.gitignore
vendored
2
libs/.gitignore
vendored
@ -6,5 +6,3 @@
|
||||
!__main.cpp
|
||||
!pulsar.patch
|
||||
!antlr4.10.1.patch
|
||||
!rocksdb8.1.1.patch
|
||||
!nuraft2.1.0.patch
|
||||
|
@ -4,8 +4,7 @@ include(GNUInstallDirs)
|
||||
|
||||
include(ProcessorCount)
|
||||
ProcessorCount(NPROC)
|
||||
|
||||
if(NPROC EQUAL 0)
|
||||
if (NPROC EQUAL 0)
|
||||
set(NPROC 1)
|
||||
endif()
|
||||
|
||||
@ -13,10 +12,9 @@ find_package(Boost 1.78 REQUIRED)
|
||||
find_package(BZip2 1.0.6 REQUIRED)
|
||||
find_package(Threads REQUIRED)
|
||||
set(GFLAGS_NOTHREADS OFF)
|
||||
|
||||
# NOTE: config/generate.py depends on the gflags help XML format.
|
||||
find_package(gflags REQUIRED)
|
||||
find_package(fmt 8.0.1 REQUIRED)
|
||||
find_package(fmt 8.0.1)
|
||||
find_package(Jemalloc REQUIRED)
|
||||
find_package(ZLIB 1.2.11 REQUIRED)
|
||||
|
||||
set(LIB_DIR ${CMAKE_CURRENT_SOURCE_DIR})
|
||||
@ -25,27 +23,23 @@ set(LIB_DIR ${CMAKE_CURRENT_SOURCE_DIR})
|
||||
function(import_header_library name include_dir)
|
||||
add_library(${name} INTERFACE IMPORTED GLOBAL)
|
||||
set_property(TARGET ${name} PROPERTY
|
||||
INTERFACE_INCLUDE_DIRECTORIES ${include_dir})
|
||||
INTERFACE_INCLUDE_DIRECTORIES ${include_dir})
|
||||
string(TOUPPER ${name} _upper_name)
|
||||
set(${_upper_name}_INCLUDE_DIR ${include_dir} CACHE FILEPATH
|
||||
"Path to ${name} include directory" FORCE)
|
||||
"Path to ${name} include directory" FORCE)
|
||||
mark_as_advanced(${_upper_name}_INCLUDE_DIR)
|
||||
add_library(lib::${name} ALIAS ${name})
|
||||
endfunction(import_header_library)
|
||||
|
||||
function(import_library name type location include_dir)
|
||||
add_library(${name} ${type} IMPORTED GLOBAL)
|
||||
|
||||
if(${ARGN})
|
||||
if (${ARGN})
|
||||
# Optional argument is the name of the external project that we need to
|
||||
# depend on.
|
||||
add_dependencies(${name} ${ARGN0})
|
||||
else()
|
||||
add_dependencies(${name} ${name}-proj)
|
||||
endif()
|
||||
|
||||
set_property(TARGET ${name} PROPERTY IMPORTED_LOCATION ${location})
|
||||
|
||||
# We need to create the include directory first in order to be able to add it
|
||||
# as an include directory. The header files in the include directory will be
|
||||
# generated later during the build process.
|
||||
@ -65,59 +59,43 @@ function(add_external_project name)
|
||||
set(options NO_C_COMPILER)
|
||||
set(one_value_kwargs SOURCE_DIR BUILD_IN_SOURCE)
|
||||
set(multi_value_kwargs CMAKE_ARGS DEPENDS INSTALL_COMMAND BUILD_COMMAND
|
||||
CONFIGURE_COMMAND)
|
||||
CONFIGURE_COMMAND)
|
||||
cmake_parse_arguments(KW "${options}" "${one_value_kwargs}" "${multi_value_kwargs}" ${ARGN})
|
||||
set(source_dir ${CMAKE_CURRENT_SOURCE_DIR}/${name})
|
||||
|
||||
if(KW_SOURCE_DIR)
|
||||
if (KW_SOURCE_DIR)
|
||||
set(source_dir ${KW_SOURCE_DIR})
|
||||
endif()
|
||||
|
||||
set(build_in_source 0)
|
||||
|
||||
if(KW_BUILD_IN_SOURCE)
|
||||
if (KW_BUILD_IN_SOURCE)
|
||||
set(build_in_source ${KW_BUILD_IN_SOURCE})
|
||||
endif()
|
||||
|
||||
if(NOT KW_NO_C_COMPILER)
|
||||
if (NOT KW_NO_C_COMPILER)
|
||||
set(KW_CMAKE_ARGS -DCMAKE_C_COMPILER=${CMAKE_C_COMPILER} ${KW_CMAKE_ARGS})
|
||||
endif()
|
||||
|
||||
ExternalProject_Add(${name}-proj DEPENDS ${KW_DEPENDS}
|
||||
PREFIX ${source_dir} SOURCE_DIR ${source_dir}
|
||||
BUILD_IN_SOURCE ${build_in_source}
|
||||
CONFIGURE_COMMAND ${KW_CONFIGURE_COMMAND}
|
||||
CMAKE_ARGS -DCMAKE_BUILD_TYPE=Release
|
||||
-DCMAKE_CXX_COMPILER=${CMAKE_CXX_COMPILER}
|
||||
-DCMAKE_INSTALL_PREFIX=${source_dir}
|
||||
${KW_CMAKE_ARGS}
|
||||
INSTALL_COMMAND ${KW_INSTALL_COMMAND}
|
||||
BUILD_COMMAND ${KW_BUILD_COMMAND})
|
||||
PREFIX ${source_dir} SOURCE_DIR ${source_dir}
|
||||
BUILD_IN_SOURCE ${build_in_source}
|
||||
CONFIGURE_COMMAND ${KW_CONFIGURE_COMMAND}
|
||||
CMAKE_ARGS -DCMAKE_BUILD_TYPE=Release
|
||||
-DCMAKE_CXX_COMPILER=${CMAKE_CXX_COMPILER}
|
||||
-DCMAKE_INSTALL_PREFIX=${source_dir}
|
||||
${KW_CMAKE_ARGS}
|
||||
INSTALL_COMMAND ${KW_INSTALL_COMMAND}
|
||||
BUILD_COMMAND ${KW_BUILD_COMMAND})
|
||||
endfunction(add_external_project)
|
||||
|
||||
# Calls `add_external_project`, sets NAME_LIBRARY, NAME_INCLUDE_DIR variables
|
||||
# and adds the library via `import_library`.
|
||||
macro(import_external_library name type library_location include_dir)
|
||||
add_external_project(${name} ${ARGN})
|
||||
string(TOUPPER ${name} _upper_name)
|
||||
set(${_upper_name}_LIBRARY ${library_location} CACHE FILEPATH
|
||||
"Path to ${name} library" FORCE)
|
||||
set(${_upper_name}_INCLUDE_DIR ${include_dir} CACHE FILEPATH
|
||||
"Path to ${name} include directory" FORCE)
|
||||
mark_as_advanced(${_upper_name}_LIBRARY ${_upper_name}_INCLUDE_DIR)
|
||||
import_library(${name} ${type} ${${_upper_name}_LIBRARY} ${${_upper_name}_INCLUDE_DIR})
|
||||
endmacro(import_external_library)
|
||||
|
||||
|
||||
macro(set_path_external_library name type library_location include_dir)
|
||||
string(TOUPPER ${name} _upper_name)
|
||||
set(${_upper_name}_LIBRARY ${library_location} CACHE FILEPATH
|
||||
"Path to ${name} library" FORCE)
|
||||
set(${_upper_name}_INCLUDE_DIR ${include_dir} CACHE FILEPATH
|
||||
"Path to ${name} include directory" FORCE)
|
||||
mark_as_advanced(${name}_LIBRARY ${name}_INCLUDE_DIR)
|
||||
endmacro(set_path_external_library)
|
||||
|
||||
mark_as_advanced(${_upper_name}_LIBRARY ${_upper_name}_INCLUDE_DIR)
|
||||
import_library(${name} ${type} ${${_upper_name}_LIBRARY} ${${_upper_name}_INCLUDE_DIR})
|
||||
endmacro(import_external_library)
|
||||
|
||||
# setup antlr
|
||||
import_external_library(antlr4 STATIC
|
||||
@ -125,10 +103,10 @@ import_external_library(antlr4 STATIC
|
||||
${CMAKE_CURRENT_SOURCE_DIR}/antlr4/runtime/Cpp/include/antlr4-runtime
|
||||
SOURCE_DIR ${CMAKE_CURRENT_SOURCE_DIR}/antlr4/runtime/Cpp
|
||||
CMAKE_ARGS # http://stackoverflow.com/questions/37096062/get-a-basic-c-program-to-compile-using-clang-on-ubuntu-16/38385967#38385967
|
||||
-DWITH_LIBCXX=OFF # because of debian bug
|
||||
-DCMAKE_SKIP_INSTALL_ALL_DEPENDENCY=true
|
||||
-DCMAKE_CXX_STANDARD=20
|
||||
-DANTLR_BUILD_CPP_TESTS=OFF
|
||||
-DWITH_LIBCXX=OFF # because of debian bug
|
||||
-DCMAKE_SKIP_INSTALL_ALL_DEPENDENCY=true
|
||||
-DCMAKE_CXX_STANDARD=20
|
||||
-DANTLR_BUILD_CPP_TESTS=OFF
|
||||
BUILD_COMMAND $(MAKE) antlr4_static
|
||||
INSTALL_COMMAND $(MAKE) install)
|
||||
|
||||
@ -136,7 +114,6 @@ import_external_library(antlr4 STATIC
|
||||
import_external_library(benchmark STATIC
|
||||
${CMAKE_CURRENT_SOURCE_DIR}/benchmark/${CMAKE_INSTALL_LIBDIR}/libbenchmark.a
|
||||
${CMAKE_CURRENT_SOURCE_DIR}/benchmark/include
|
||||
|
||||
# Skip testing. The tests don't compile with Clang 8.
|
||||
CMAKE_ARGS -DBENCHMARK_ENABLE_TESTING=OFF)
|
||||
|
||||
@ -152,15 +129,15 @@ add_subdirectory(rapidcheck EXCLUDE_FROM_ALL)
|
||||
# setup google test
|
||||
add_external_project(gtest SOURCE_DIR ${CMAKE_CURRENT_SOURCE_DIR}/googletest)
|
||||
set(GTEST_INCLUDE_DIR ${CMAKE_CURRENT_SOURCE_DIR}/googletest/include
|
||||
CACHE PATH "Path to gtest and gmock include directory" FORCE)
|
||||
CACHE PATH "Path to gtest and gmock include directory" FORCE)
|
||||
set(GMOCK_LIBRARY ${CMAKE_CURRENT_SOURCE_DIR}/googletest/lib/libgmock.a
|
||||
CACHE FILEPATH "Path to gmock library" FORCE)
|
||||
CACHE FILEPATH "Path to gmock library" FORCE)
|
||||
set(GMOCK_MAIN_LIBRARY ${CMAKE_CURRENT_SOURCE_DIR}/googletest/lib/libgmock_main.a
|
||||
CACHE FILEPATH "Path to gmock_main library" FORCE)
|
||||
CACHE FILEPATH "Path to gmock_main library" FORCE)
|
||||
set(GTEST_LIBRARY ${CMAKE_CURRENT_SOURCE_DIR}/googletest/lib/libgtest.a
|
||||
CACHE FILEPATH "Path to gtest library" FORCE)
|
||||
CACHE FILEPATH "Path to gtest library" FORCE)
|
||||
set(GTEST_MAIN_LIBRARY ${CMAKE_CURRENT_SOURCE_DIR}/googletest/lib/libgtest_main.a
|
||||
CACHE FILEPATH "Path to gtest_main library" FORCE)
|
||||
CACHE FILEPATH "Path to gtest_main library" FORCE)
|
||||
mark_as_advanced(GTEST_INCLUDE_DIR GMOCK_LIBRARY GMOCK_MAIN_LIBRARY GTEST_LIBRARY GTEST_MAIN_LIBRARY)
|
||||
import_library(gtest STATIC ${GTEST_LIBRARY} ${GTEST_INCLUDE_DIR} gtest-proj)
|
||||
import_library(gtest_main STATIC ${GTEST_MAIN_LIBRARY} ${GTEST_INCLUDE_DIR} gtest-proj)
|
||||
@ -178,10 +155,10 @@ import_external_library(rocksdb STATIC
|
||||
${CMAKE_CURRENT_SOURCE_DIR}/rocksdb/lib/librocksdb.a
|
||||
${CMAKE_CURRENT_SOURCE_DIR}/rocksdb/include
|
||||
CMAKE_ARGS -DUSE_RTTI=ON
|
||||
-DWITH_TESTS=OFF
|
||||
-DGFLAGS_NOTHREADS=OFF
|
||||
-DCMAKE_INSTALL_LIBDIR=lib
|
||||
-DCMAKE_SKIP_INSTALL_ALL_DEPENDENCY=true
|
||||
-DWITH_TESTS=OFF
|
||||
-DGFLAGS_NOTHREADS=OFF
|
||||
-DCMAKE_INSTALL_LIBDIR=lib
|
||||
-DCMAKE_SKIP_INSTALL_ALL_DEPENDENCY=true
|
||||
BUILD_COMMAND $(MAKE) rocksdb)
|
||||
|
||||
# Setup libbcrypt
|
||||
@ -190,8 +167,8 @@ import_external_library(libbcrypt STATIC
|
||||
${CMAKE_CURRENT_SOURCE_DIR}/libbcrypt
|
||||
CONFIGURE_COMMAND sed s/-Wcast-align// -i ${CMAKE_CURRENT_SOURCE_DIR}/libbcrypt/crypt_blowfish/Makefile
|
||||
BUILD_COMMAND make -C ${CMAKE_CURRENT_SOURCE_DIR}/libbcrypt
|
||||
CC=${CMAKE_C_COMPILER}
|
||||
CXX=${CMAKE_CXX_COMPILER}
|
||||
CC=${CMAKE_C_COMPILER}
|
||||
CXX=${CMAKE_CXX_COMPILER}
|
||||
INSTALL_COMMAND true)
|
||||
|
||||
# Setup mgclient
|
||||
@ -199,16 +176,16 @@ import_external_library(mgclient STATIC
|
||||
${CMAKE_CURRENT_SOURCE_DIR}/mgclient/lib/libmgclient.a
|
||||
${CMAKE_CURRENT_SOURCE_DIR}/mgclient/include
|
||||
CMAKE_ARGS -DCMAKE_C_COMPILER=${CMAKE_C_COMPILER}
|
||||
-DCMAKE_CXX_COMPILER=${CMAKE_CXX_COMPILER}
|
||||
-DBUILD_TESTING=OFF
|
||||
-DBUILD_CPP_BINDINGS=ON)
|
||||
-DCMAKE_CXX_COMPILER=${CMAKE_CXX_COMPILER}
|
||||
-DBUILD_TESTING=OFF
|
||||
-DBUILD_CPP_BINDINGS=ON)
|
||||
find_package(OpenSSL REQUIRED)
|
||||
target_link_libraries(mgclient INTERFACE ${OPENSSL_LIBRARIES})
|
||||
|
||||
add_external_project(mgconsole
|
||||
SOURCE_DIR ${CMAKE_CURRENT_SOURCE_DIR}/mgconsole
|
||||
CMAKE_ARGS
|
||||
-DCMAKE_INSTALL_PREFIX:PATH=${CMAKE_BINARY_DIR}
|
||||
-DCMAKE_INSTALL_PREFIX:PATH=${CMAKE_BINARY_DIR}
|
||||
BUILD_COMMAND $(MAKE) mgconsole)
|
||||
|
||||
add_custom_target(mgconsole DEPENDS mgconsole-proj)
|
||||
@ -225,15 +202,14 @@ import_external_library(librdkafka STATIC
|
||||
${CMAKE_CURRENT_SOURCE_DIR}/librdkafka/lib/librdkafka.a
|
||||
${CMAKE_CURRENT_SOURCE_DIR}/librdkafka/include
|
||||
CMAKE_ARGS -DRDKAFKA_BUILD_STATIC=ON
|
||||
-DRDKAFKA_BUILD_EXAMPLES=OFF
|
||||
-DRDKAFKA_BUILD_TESTS=OFF
|
||||
-DWITH_ZSTD=OFF
|
||||
-DENABLE_LZ4_EXT=OFF
|
||||
-DCMAKE_INSTALL_LIBDIR=lib
|
||||
-DWITH_SSL=ON
|
||||
|
||||
# If we want SASL, we need to install it on build machines
|
||||
-DWITH_SASL=OFF)
|
||||
-DRDKAFKA_BUILD_EXAMPLES=OFF
|
||||
-DRDKAFKA_BUILD_TESTS=OFF
|
||||
-DWITH_ZSTD=OFF
|
||||
-DENABLE_LZ4_EXT=OFF
|
||||
-DCMAKE_INSTALL_LIBDIR=lib
|
||||
-DWITH_SSL=ON
|
||||
# If we want SASL, we need to install it on build machines
|
||||
-DWITH_SASL=OFF)
|
||||
target_link_libraries(librdkafka INTERFACE ${OPENSSL_LIBRARIES} ZLIB::ZLIB)
|
||||
|
||||
import_library(librdkafka++ STATIC
|
||||
@ -254,24 +230,24 @@ import_external_library(pulsar STATIC
|
||||
${CMAKE_CURRENT_SOURCE_DIR}/pulsar/install/include
|
||||
BUILD_IN_SOURCE 1
|
||||
CONFIGURE_COMMAND cmake pulsar-client-cpp
|
||||
-DCMAKE_INSTALL_PREFIX=${CMAKE_CURRENT_SOURCE_DIR}/pulsar/install
|
||||
-DCMAKE_CXX_COMPILER=${CMAKE_CXX_COMPILER}
|
||||
-DCMAKE_C_COMPILER=${CMAKE_C_COMPILER}
|
||||
-DBUILD_DYNAMIC_LIB=OFF
|
||||
-DBUILD_STATIC_LIB=ON
|
||||
-DBUILD_TESTS=OFF
|
||||
-DLINK_STATIC=ON
|
||||
-DPROTOC_PATH=${PROTOBUF_ROOT}/bin/protoc
|
||||
-DBOOST_ROOT=${BOOST_ROOT}
|
||||
-DCMAKE_PREFIX_PATH=${PROTOBUF_ROOT}
|
||||
-DProtobuf_INCLUDE_DIRS=${PROTOBUF_ROOT}/include
|
||||
-DBUILD_PYTHON_WRAPPER=OFF
|
||||
-DBUILD_PERF_TOOLS=OFF
|
||||
-DUSE_LOG4CXX=OFF
|
||||
BUILD_COMMAND $(MAKE) pulsarStaticWithDeps)
|
||||
-DCMAKE_INSTALL_PREFIX=${CMAKE_CURRENT_SOURCE_DIR}/pulsar/install
|
||||
-DCMAKE_CXX_COMPILER=${CMAKE_CXX_COMPILER}
|
||||
-DCMAKE_C_COMPILER=${CMAKE_C_COMPILER}
|
||||
-DBUILD_DYNAMIC_LIB=OFF
|
||||
-DBUILD_STATIC_LIB=ON
|
||||
-DBUILD_TESTS=OFF
|
||||
-DLINK_STATIC=ON
|
||||
-DPROTOC_PATH=${PROTOBUF_ROOT}/bin/protoc
|
||||
-DBOOST_ROOT=${BOOST_ROOT}
|
||||
-DCMAKE_PREFIX_PATH=${PROTOBUF_ROOT}
|
||||
-DProtobuf_INCLUDE_DIRS=${PROTOBUF_ROOT}/include
|
||||
-DBUILD_PYTHON_WRAPPER=OFF
|
||||
-DBUILD_PERF_TOOLS=OFF
|
||||
-DUSE_LOG4CXX=OFF
|
||||
BUILD_COMMAND $(MAKE) pulsarStaticWithDeps)
|
||||
add_dependencies(pulsar-proj protobuf)
|
||||
|
||||
if(${MG_ARCH} STREQUAL "ARM64")
|
||||
if (${MG_ARCH} STREQUAL "ARM64")
|
||||
set(MG_LIBRDTSC_CMAKE_ARGS -DLIBRDTSC_ARCH_x86=OFF -DLIBRDTSC_ARCH_ARM64=ON)
|
||||
endif()
|
||||
|
||||
@ -280,52 +256,3 @@ import_external_library(librdtsc STATIC
|
||||
${CMAKE_CURRENT_SOURCE_DIR}/librdtsc/include
|
||||
CMAKE_ARGS ${MG_LIBRDTSC_CMAKE_ARGS}
|
||||
BUILD_COMMAND $(MAKE) rdtsc)
|
||||
|
||||
# setup ctre
|
||||
import_header_library(ctre ${CMAKE_CURRENT_SOURCE_DIR})
|
||||
|
||||
# setup absl (cmake sub_directory tolerant)
|
||||
set(ABSL_PROPAGATE_CXX_STD ON)
|
||||
add_subdirectory(absl EXCLUDE_FROM_ALL)
|
||||
|
||||
# set Jemalloc
|
||||
set_path_external_library(jemalloc STATIC
|
||||
${CMAKE_CURRENT_SOURCE_DIR}/jemalloc/lib/libjemalloc.a
|
||||
${CMAKE_CURRENT_SOURCE_DIR}/jemalloc/include/)
|
||||
|
||||
import_header_library(rangev3 ${CMAKE_CURRENT_SOURCE_DIR}/rangev3/include)
|
||||
|
||||
ExternalProject_Add(mgcxx-proj
|
||||
PREFIX mgcxx-proj
|
||||
GIT_REPOSITORY https://github.com/memgraph/mgcxx
|
||||
GIT_TAG "v0.0.4"
|
||||
CMAKE_ARGS
|
||||
"-DCMAKE_INSTALL_PREFIX=<INSTALL_DIR>"
|
||||
"-DCMAKE_BUILD_TYPE=${CMAKE_BUILD_TYPE}"
|
||||
"-DENABLE_TESTS=OFF"
|
||||
"-DCMAKE_C_COMPILER=${CMAKE_C_COMPILER}"
|
||||
"-DCMAKE_CXX_COMPILER=${CMAKE_CXX_COMPILER}"
|
||||
INSTALL_DIR "${PROJECT_BINARY_DIR}/mgcxx"
|
||||
)
|
||||
ExternalProject_Get_Property(mgcxx-proj install_dir)
|
||||
set(MGCXX_ROOT ${install_dir})
|
||||
|
||||
add_library(tantivy_text_search STATIC IMPORTED GLOBAL)
|
||||
add_dependencies(tantivy_text_search mgcxx-proj)
|
||||
set_property(TARGET tantivy_text_search PROPERTY IMPORTED_LOCATION ${MGCXX_ROOT}/lib/libtantivy_text_search.a)
|
||||
|
||||
add_library(mgcxx_text_search STATIC IMPORTED GLOBAL)
|
||||
add_dependencies(mgcxx_text_search mgcxx-proj)
|
||||
set_property(TARGET mgcxx_text_search PROPERTY IMPORTED_LOCATION ${MGCXX_ROOT}/lib/libmgcxx_text_search.a)
|
||||
# We need to create the include directory first in order to be able to add it
|
||||
# as an include directory. The header files in the include directory will be
|
||||
# generated later during the build process.
|
||||
file(MAKE_DIRECTORY ${MGCXX_ROOT}/include)
|
||||
set_property(TARGET mgcxx_text_search PROPERTY INTERFACE_INCLUDE_DIRECTORIES ${MGCXX_ROOT}/include)
|
||||
|
||||
# Setup NuRaft
|
||||
import_external_library(nuraft STATIC
|
||||
${CMAKE_CURRENT_SOURCE_DIR}/nuraft/lib/libnuraft.a
|
||||
${CMAKE_CURRENT_SOURCE_DIR}/nuraft/include/)
|
||||
find_package(OpenSSL REQUIRED)
|
||||
target_link_libraries(nuraft INTERFACE ${OPENSSL_LIBRARIES})
|
||||
|
@ -5,7 +5,7 @@ index ee9b58c..31359a9 100644
|
||||
@@ -48,7 +48,7 @@ option(LIBRDTSC_USE_PMU "Enables PMU usage on ARM platforms" OFF)
|
||||
# | Library Build and Install Properties |
|
||||
# +--------------------------------------------------------+
|
||||
|
||||
|
||||
-add_library(rdtsc SHARED
|
||||
+add_library(rdtsc
|
||||
src/cycles.c
|
||||
@ -14,7 +14,7 @@ index ee9b58c..31359a9 100644
|
||||
@@ -72,15 +72,6 @@ target_include_directories(rdtsc
|
||||
PUBLIC ${CMAKE_CURRENT_SOURCE_DIR}/include
|
||||
)
|
||||
|
||||
|
||||
-# Install directory changes depending on build mode
|
||||
-if (CMAKE_BUILD_TYPE MATCHES "^[Dd]ebug")
|
||||
- # During debug, the library will be installed into a local directory
|
||||
@ -27,15 +27,3 @@ index ee9b58c..31359a9 100644
|
||||
# Specifying what to export when installing (GNUInstallDirs required)
|
||||
install(TARGETS rdtsc
|
||||
EXPORT librstsc-config
|
||||
diff --git a/include/librdtsc/common_timer.h b/include/librdtsc/common_timer.h
|
||||
index a6922d8..080dc77 100644
|
||||
--- a/include/librdtsc/common_timer.h
|
||||
+++ b/include/librdtsc/common_timer.h
|
||||
@@ -2,6 +2,7 @@
|
||||
#define LIBRDTSC_COMMON_TIMER_H
|
||||
|
||||
#include <librdtsc/common.h>
|
||||
+#include <librdtsc/cycles.h>
|
||||
|
||||
extern uint64_t rdtsc_get_tsc_freq_arch();
|
||||
extern uint64_t rdtsc_get_tsc_freq();
|
||||
|
@ -1,24 +0,0 @@
|
||||
diff --git a/include/libnuraft/asio_service_options.hxx b/include/libnuraft/asio_service_options.hxx
|
||||
index 8fe1ec9..9497355 100644
|
||||
--- a/include/libnuraft/asio_service_options.hxx
|
||||
+++ b/include/libnuraft/asio_service_options.hxx
|
||||
@@ -17,6 +17,7 @@ limitations under the License.
|
||||
|
||||
#pragma once
|
||||
|
||||
+#include <cstdint>
|
||||
#include <functional>
|
||||
#include <string>
|
||||
#include <system_error>
|
||||
diff --git a/include/libnuraft/callback.hxx b/include/libnuraft/callback.hxx
|
||||
index 7b71624..d48c1e2 100644
|
||||
--- a/include/libnuraft/callback.hxx
|
||||
+++ b/include/libnuraft/callback.hxx
|
||||
@@ -18,6 +18,7 @@ limitations under the License.
|
||||
#ifndef _CALLBACK_H_
|
||||
#define _CALLBACK_H_
|
||||
|
||||
+#include <cstdint>
|
||||
#include <functional>
|
||||
#include <string>
|
||||
|
21
libs/rocksdb.patch
Normal file
21
libs/rocksdb.patch
Normal file
@ -0,0 +1,21 @@
|
||||
diff --git a/CMakeLists.txt b/CMakeLists.txt
|
||||
index 6761929..6a369af 100644
|
||||
--- a/CMakeLists.txt
|
||||
+++ b/CMakeLists.txt
|
||||
@@ -220,6 +220,7 @@ else()
|
||||
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -momit-leaf-frame-pointer")
|
||||
endif()
|
||||
endif()
|
||||
+ set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Wno-deprecated-copy -Wno-unused-but-set-variable")
|
||||
endif()
|
||||
|
||||
include(CheckCCompilerFlag)
|
||||
@@ -997,7 +998,7 @@ if(NOT WIN32 OR ROCKSDB_INSTALL_ON_WINDOWS)
|
||||
|
||||
if(ROCKSDB_BUILD_SHARED)
|
||||
install(
|
||||
- TARGETS ${ROCKSDB_SHARED_LIB}
|
||||
+ TARGETS ${ROCKSDB_SHARED_LIB} OPTIONAL
|
||||
EXPORT RocksDBTargets
|
||||
COMPONENT runtime
|
||||
ARCHIVE DESTINATION "${CMAKE_INSTALL_LIBDIR}"
|
@ -1,13 +0,0 @@
|
||||
diff --git a/CMakeLists.txt b/CMakeLists.txt
|
||||
index 598c728..816c705 100644
|
||||
--- a/CMakeLists.txt
|
||||
+++ b/CMakeLists.txt
|
||||
@@ -1242,7 +1242,7 @@ if(NOT WIN32 OR ROCKSDB_INSTALL_ON_WINDOWS)
|
||||
|
||||
if(ROCKSDB_BUILD_SHARED)
|
||||
install(
|
||||
- TARGETS ${ROCKSDB_SHARED_LIB}
|
||||
+ TARGETS ${ROCKSDB_SHARED_LIB} OPTIONAL
|
||||
EXPORT RocksDBTargets
|
||||
COMPONENT runtime
|
||||
ARCHIVE DESTINATION "${CMAKE_INSTALL_LIBDIR}"
|
@ -71,8 +71,8 @@ file_get_try_double () {
|
||||
if [ -z "$primary_url" ]; then echo "Primary should not be empty." && exit 1; fi
|
||||
if [ -z "$secondary_url" ]; then echo "Secondary should not be empty." && exit 1; fi
|
||||
filename="$(basename "$secondary_url")"
|
||||
# Redirect primary/cache to /dev/null to make it less confusing for a new contributor because only CI has access to the cache.
|
||||
wget -nv "$primary_url" -O "$filename" >/dev/null 2>&1 || wget -nv "$secondary_url" -O "$filename" || exit 1
|
||||
wget -nv "$primary_url" -O "$filename" || wget -nv "$secondary_url" -O "$filename" || exit 1
|
||||
echo ""
|
||||
}
|
||||
|
||||
repo_clone_try_double () {
|
||||
@ -86,8 +86,8 @@ repo_clone_try_double () {
|
||||
if [ -z "$secondary_url" ]; then echo "Secondary should not be empty." && exit 1; fi
|
||||
if [ -z "$folder_name" ]; then echo "Clone folder should not be empty." && exit 1; fi
|
||||
if [ -z "$ref" ]; then echo "Git clone ref should not be empty." && exit 1; fi
|
||||
# Redirect primary/cache to /dev/null to make it less confusing for a new contributor because only CI has access to the cache.
|
||||
clone "$primary_url" "$folder_name" "$ref" "$shallow" >/dev/null 2>&1 || clone "$secondary_url" "$folder_name" "$ref" "$shallow" || exit 1
|
||||
clone "$primary_url" "$folder_name" "$ref" "$shallow" || clone "$secondary_url" "$folder_name" "$ref" "$shallow" || exit 1
|
||||
echo ""
|
||||
}
|
||||
|
||||
# List all dependencies.
|
||||
@ -116,18 +116,12 @@ declare -A primary_urls=(
|
||||
["pymgclient"]="http://$local_cache_host/git/pymgclient.git"
|
||||
["mgconsole"]="http://$local_cache_host/git/mgconsole.git"
|
||||
["spdlog"]="http://$local_cache_host/git/spdlog"
|
||||
["nlohmann"]="http://$local_cache_host/file/nlohmann/json/4f8fba14066156b73f1189a2b8bd568bde5284c5/single_include/nlohmann/json.hpp"
|
||||
["neo4j"]="http://$local_cache_host/file/neo4j-community-5.6.0-unix.tar.gz"
|
||||
["nlohmann"]="http://$local_cache_host/file/nlohmann/json/9d69186291aca4f0137b69c1dee313b391ff564c/single_include/nlohmann/json.hpp"
|
||||
["neo4j"]="http://$local_cache_host/file/neo4j-community-3.2.3-unix.tar.gz"
|
||||
["librdkafka"]="http://$local_cache_host/git/librdkafka.git"
|
||||
["protobuf"]="http://$local_cache_host/git/protobuf.git"
|
||||
["pulsar"]="http://$local_cache_host/git/pulsar.git"
|
||||
["librdtsc"]="http://$local_cache_host/git/librdtsc.git"
|
||||
["ctre"]="http://$local_cache_host/file/hanickadot/compile-time-regular-expressions/v3.7.2/single-header/ctre.hpp"
|
||||
["absl"]="http://$local_cache_host/git/abseil-cpp.git"
|
||||
["jemalloc"]="http://$local_cache_host/git/jemalloc.git"
|
||||
["range-v3"]="http://$local_cache_host/git/range-v3.git"
|
||||
["nuraft"]="http://$local_cache_host/git/NuRaft.git"
|
||||
["asio"]="http://$local_cache_host/git/asio.git"
|
||||
)
|
||||
|
||||
# The goal of secondary urls is to have links to the "source of truth" of
|
||||
@ -145,20 +139,14 @@ declare -A secondary_urls=(
|
||||
["rocksdb"]="https://github.com/facebook/rocksdb.git"
|
||||
["mgclient"]="https://github.com/memgraph/mgclient.git"
|
||||
["pymgclient"]="https://github.com/memgraph/pymgclient.git"
|
||||
["mgconsole"]="https://github.com/memgraph/mgconsole.git"
|
||||
["mgconsole"]="http://github.com/memgraph/mgconsole.git"
|
||||
["spdlog"]="https://github.com/gabime/spdlog"
|
||||
["nlohmann"]="https://raw.githubusercontent.com/nlohmann/json/4f8fba14066156b73f1189a2b8bd568bde5284c5/single_include/nlohmann/json.hpp"
|
||||
["neo4j"]="https://dist.neo4j.org/neo4j-community-5.6.0-unix.tar.gz"
|
||||
["nlohmann"]="https://raw.githubusercontent.com/nlohmann/json/9d69186291aca4f0137b69c1dee313b391ff564c/single_include/nlohmann/json.hpp"
|
||||
["neo4j"]="https://s3-eu-west-1.amazonaws.com/deps.memgraph.io/neo4j-community-3.2.3-unix.tar.gz"
|
||||
["librdkafka"]="https://github.com/edenhill/librdkafka.git"
|
||||
["protobuf"]="https://github.com/protocolbuffers/protobuf.git"
|
||||
["pulsar"]="https://github.com/apache/pulsar.git"
|
||||
["librdtsc"]="https://github.com/gabrieleara/librdtsc.git"
|
||||
["ctre"]="https://raw.githubusercontent.com/hanickadot/compile-time-regular-expressions/v3.7.2/single-header/ctre.hpp"
|
||||
["absl"]="https://github.com/abseil/abseil-cpp.git"
|
||||
["jemalloc"]="https://github.com/jemalloc/jemalloc.git"
|
||||
["range-v3"]="https://github.com/ericniebler/range-v3.git"
|
||||
["nuraft"]="https://github.com/eBay/NuRaft.git"
|
||||
["asio"]="https://github.com/chriskohlhoff/asio.git"
|
||||
)
|
||||
|
||||
# antlr
|
||||
@ -170,11 +158,12 @@ pushd antlr4
|
||||
git apply ../antlr4.10.1.patch
|
||||
popd
|
||||
|
||||
cppitertools_ref="v2.1" # 2021-01-15
|
||||
# cppitertools v2.0 2019-12-23
|
||||
cppitertools_ref="cb3635456bdb531121b82b4d2e3afc7ae1f56d47"
|
||||
repo_clone_try_double "${primary_urls[cppitertools]}" "${secondary_urls[cppitertools]}" "cppitertools" "$cppitertools_ref"
|
||||
|
||||
# rapidcheck
|
||||
rapidcheck_tag="1c91f40e64d87869250cfb610376c629307bf77d" # (2023-08-15)
|
||||
rapidcheck_tag="7bc7d302191a4f3d0bf005692677126136e02f60" # (2020-05-04)
|
||||
repo_clone_try_double "${primary_urls[rapidcheck]}" "${secondary_urls[rapidcheck]}" "rapidcheck" "$rapidcheck_tag"
|
||||
|
||||
# google benchmark
|
||||
@ -182,7 +171,7 @@ benchmark_tag="v1.6.0"
|
||||
repo_clone_try_double "${primary_urls[gbenchmark]}" "${secondary_urls[gbenchmark]}" "benchmark" "$benchmark_tag" true
|
||||
|
||||
# google test
|
||||
googletest_tag="v1.14.0"
|
||||
googletest_tag="release-1.12.1"
|
||||
repo_clone_try_double "${primary_urls[gtest]}" "${secondary_urls[gtest]}" "googletest" "$googletest_tag" true
|
||||
|
||||
# libbcrypt
|
||||
@ -191,9 +180,9 @@ repo_clone_try_double "${primary_urls[libbcrypt]}" "${secondary_urls[libbcrypt]}
|
||||
|
||||
# neo4j
|
||||
file_get_try_double "${primary_urls[neo4j]}" "${secondary_urls[neo4j]}"
|
||||
tar -xzf neo4j-community-5.6.0-unix.tar.gz
|
||||
mv neo4j-community-5.6.0 neo4j
|
||||
rm neo4j-community-5.6.0-unix.tar.gz
|
||||
tar -xzf neo4j-community-3.2.3-unix.tar.gz
|
||||
mv neo4j-community-3.2.3 neo4j
|
||||
rm neo4j-community-3.2.3-unix.tar.gz
|
||||
|
||||
# nlohmann json
|
||||
# We wget header instead of cloning repo since repo is huge (lots of test data).
|
||||
@ -203,10 +192,10 @@ cd json
|
||||
file_get_try_double "${primary_urls[nlohmann]}" "${secondary_urls[nlohmann]}"
|
||||
cd ..
|
||||
|
||||
rocksdb_tag="v8.1.1" # (2023-04-21)
|
||||
rocksdb_tag="v6.14.6" # (2020-10-14)
|
||||
repo_clone_try_double "${primary_urls[rocksdb]}" "${secondary_urls[rocksdb]}" "rocksdb" "$rocksdb_tag" true
|
||||
pushd rocksdb
|
||||
git apply ../rocksdb8.1.1.patch
|
||||
git apply ../rocksdb.patch
|
||||
popd
|
||||
|
||||
# mgclient
|
||||
@ -219,10 +208,10 @@ pymgclient_tag="4f85c179e56302d46a1e3e2cf43509db65f062b3" # (2021-01-15)
|
||||
repo_clone_try_double "${primary_urls[pymgclient]}" "${secondary_urls[pymgclient]}" "pymgclient" "$pymgclient_tag"
|
||||
|
||||
# mgconsole
|
||||
mgconsole_tag="v1.4.0" # (2023-05-21)
|
||||
mgconsole_tag="v1.1.0" # (2021-10-07)
|
||||
repo_clone_try_double "${primary_urls[mgconsole]}" "${secondary_urls[mgconsole]}" "mgconsole" "$mgconsole_tag" true
|
||||
|
||||
spdlog_tag="v1.12.0" # (2022-11-02)
|
||||
spdlog_tag="v1.9.2" # (2021-08-12)
|
||||
repo_clone_try_double "${primary_urls[spdlog]}" "${secondary_urls[spdlog]}" "spdlog" "$spdlog_tag" true
|
||||
|
||||
# librdkafka
|
||||
@ -249,46 +238,3 @@ repo_clone_try_double "${primary_urls[librdtsc]}" "${secondary_urls[librdtsc]}"
|
||||
pushd librdtsc
|
||||
git apply ../librdtsc.patch
|
||||
popd
|
||||
|
||||
#ctre
|
||||
mkdir -p ctre
|
||||
cd ctre
|
||||
file_get_try_double "${primary_urls[ctre]}" "${secondary_urls[ctre]}"
|
||||
cd ..
|
||||
|
||||
# abseil 20230125.3
|
||||
absl_ref="20230125.3"
|
||||
repo_clone_try_double "${primary_urls[absl]}" "${secondary_urls[absl]}" "absl" "$absl_ref"
|
||||
|
||||
# jemalloc ea6b3e973b477b8061e0076bb257dbd7f3faa756
|
||||
JEMALLOC_COMMIT_VERSION="5.2.1"
|
||||
repo_clone_try_double "${primary_urls[jemalloc]}" "${secondary_urls[jemalloc]}" "jemalloc" "$JEMALLOC_COMMIT_VERSION"
|
||||
|
||||
# this is hack for cmake in libs to set path, and for FindJemalloc to use Jemalloc_INCLUDE_DIR
|
||||
pushd jemalloc
|
||||
|
||||
./autogen.sh
|
||||
MALLOC_CONF="background_thread:true,retain:false,percpu_arena:percpu,oversize_threshold:0,muzzy_decay_ms:5000,dirty_decay_ms:5000" \
|
||||
./configure \
|
||||
--disable-cxx \
|
||||
--with-lg-page=12 \
|
||||
--with-lg-hugepage=21 \
|
||||
--enable-shared=no --prefix=$working_dir \
|
||||
--with-malloc-conf="background_thread:true,retain:false,percpu_arena:percpu,oversize_threshold:0,muzzy_decay_ms:5000,dirty_decay_ms:5000"
|
||||
|
||||
make -j$CPUS install
|
||||
popd
|
||||
|
||||
#range-v3 release-0.12.0
|
||||
range_v3_ref="release-0.12.0"
|
||||
repo_clone_try_double "${primary_urls[range-v3]}" "${secondary_urls[range-v3]}" "rangev3" "$range_v3_ref"
|
||||
|
||||
# NuRaft
|
||||
nuraft_tag="v2.1.0"
|
||||
repo_clone_try_double "${primary_urls[nuraft]}" "${secondary_urls[nuraft]}" "nuraft" "$nuraft_tag" true
|
||||
pushd nuraft
|
||||
git apply ../nuraft2.1.0.patch
|
||||
asio_tag="asio-1-29-0"
|
||||
repo_clone_try_double "${primary_urls[asio]}" "${secondary_urls[asio]}" "asio" "$asio_tag" true
|
||||
./prepare.sh
|
||||
popd
|
||||
|
@ -36,7 +36,7 @@ ADDITIONAL USE GRANT: You may use the Licensed Work in accordance with the
|
||||
3. using the Licensed Work to create a work or solution
|
||||
which competes (or might reasonably be expected to
|
||||
compete) with the Licensed Work.
|
||||
CHANGE DATE: 2028-21-01
|
||||
CHANGE DATE: 2026-27-04
|
||||
CHANGE LICENSE: Apache License, Version 2.0
|
||||
|
||||
For information about alternative licensing arrangements, please visit: https://memgraph.com/legal.
|
||||
|
@ -2,8 +2,8 @@ MEMGRAPH
|
||||
ENTERPRISE LICENCE AGREEMENT
|
||||
|
||||
|
||||
Memgraph Limited is registered in England under registration 10195084 and has its registered office at 90a High Street,
|
||||
Hertfordshire, Berkhamsted, HP4 2BL United Kingdom ("Memgraph").
|
||||
Memgraph Limited is registered in England under registration 10195084 and has its registered office at Suite 4,
|
||||
Ironstone House, Ironstone Way, Brixworth, Northampton, NN6 9UD (“Memgraph”).
|
||||
|
||||
|
||||
Memgraph agrees to license and/or grant you (the “Customer”) access to the Software ( as defined below) and provide
|
||||
|
202
licenses/third-party/abseil-cpp/LICENSE
vendored
202
licenses/third-party/abseil-cpp/LICENSE
vendored
@ -1,202 +0,0 @@
|
||||
|
||||
Apache License
|
||||
Version 2.0, January 2004
|
||||
https://www.apache.org/licenses/
|
||||
|
||||
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
||||
|
||||
1. Definitions.
|
||||
|
||||
"License" shall mean the terms and conditions for use, reproduction,
|
||||
and distribution as defined by Sections 1 through 9 of this document.
|
||||
|
||||
"Licensor" shall mean the copyright owner or entity authorized by
|
||||
the copyright owner that is granting the License.
|
||||
|
||||
"Legal Entity" shall mean the union of the acting entity and all
|
||||
other entities that control, are controlled by, or are under common
|
||||
control with that entity. For the purposes of this definition,
|
||||
"control" means (i) the power, direct or indirect, to cause the
|
||||
direction or management of such entity, whether by contract or
|
||||
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
||||
outstanding shares, or (iii) beneficial ownership of such entity.
|
||||
|
||||
"You" (or "Your") shall mean an individual or Legal Entity
|
||||
exercising permissions granted by this License.
|
||||
|
||||
"Source" form shall mean the preferred form for making modifications,
|
||||
including but not limited to software source code, documentation
|
||||
source, and configuration files.
|
||||
|
||||
"Object" form shall mean any form resulting from mechanical
|
||||
transformation or translation of a Source form, including but
|
||||
not limited to compiled object code, generated documentation,
|
||||
and conversions to other media types.
|
||||
|
||||
"Work" shall mean the work of authorship, whether in Source or
|
||||
Object form, made available under the License, as indicated by a
|
||||
copyright notice that is included in or attached to the work
|
||||
(an example is provided in the Appendix below).
|
||||
|
||||
"Derivative Works" shall mean any work, whether in Source or Object
|
||||
form, that is based on (or derived from) the Work and for which the
|
||||
editorial revisions, annotations, elaborations, or other modifications
|
||||
represent, as a whole, an original work of authorship. For the purposes
|
||||
of this License, Derivative Works shall not include works that remain
|
||||
separable from, or merely link (or bind by name) to the interfaces of,
|
||||
the Work and Derivative Works thereof.
|
||||
|
||||
"Contribution" shall mean any work of authorship, including
|
||||
the original version of the Work and any modifications or additions
|
||||
to that Work or Derivative Works thereof, that is intentionally
|
||||
submitted to Licensor for inclusion in the Work by the copyright owner
|
||||
or by an individual or Legal Entity authorized to submit on behalf of
|
||||
the copyright owner. For the purposes of this definition, "submitted"
|
||||
means any form of electronic, verbal, or written communication sent
|
||||
to the Licensor or its representatives, including but not limited to
|
||||
communication on electronic mailing lists, source code control systems,
|
||||
and issue tracking systems that are managed by, or on behalf of, the
|
||||
Licensor for the purpose of discussing and improving the Work, but
|
||||
excluding communication that is conspicuously marked or otherwise
|
||||
designated in writing by the copyright owner as "Not a Contribution."
|
||||
|
||||
"Contributor" shall mean Licensor and any individual or Legal Entity
|
||||
on behalf of whom a Contribution has been received by Licensor and
|
||||
subsequently incorporated within the Work.
|
||||
|
||||
2. Grant of Copyright License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
copyright license to reproduce, prepare Derivative Works of,
|
||||
publicly display, publicly perform, sublicense, and distribute the
|
||||
Work and such Derivative Works in Source or Object form.
|
||||
|
||||
3. Grant of Patent License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
(except as stated in this section) patent license to make, have made,
|
||||
use, offer to sell, sell, import, and otherwise transfer the Work,
|
||||
where such license applies only to those patent claims licensable
|
||||
by such Contributor that are necessarily infringed by their
|
||||
Contribution(s) alone or by combination of their Contribution(s)
|
||||
with the Work to which such Contribution(s) was submitted. If You
|
||||
institute patent litigation against any entity (including a
|
||||
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
||||
or a Contribution incorporated within the Work constitutes direct
|
||||
or contributory patent infringement, then any patent licenses
|
||||
granted to You under this License for that Work shall terminate
|
||||
as of the date such litigation is filed.
|
||||
|
||||
4. Redistribution. You may reproduce and distribute copies of the
|
||||
Work or Derivative Works thereof in any medium, with or without
|
||||
modifications, and in Source or Object form, provided that You
|
||||
meet the following conditions:
|
||||
|
||||
(a) You must give any other recipients of the Work or
|
||||
Derivative Works a copy of this License; and
|
||||
|
||||
(b) You must cause any modified files to carry prominent notices
|
||||
stating that You changed the files; and
|
||||
|
||||
(c) You must retain, in the Source form of any Derivative Works
|
||||
that You distribute, all copyright, patent, trademark, and
|
||||
attribution notices from the Source form of the Work,
|
||||
excluding those notices that do not pertain to any part of
|
||||
the Derivative Works; and
|
||||
|
||||
(d) If the Work includes a "NOTICE" text file as part of its
|
||||
distribution, then any Derivative Works that You distribute must
|
||||
include a readable copy of the attribution notices contained
|
||||
within such NOTICE file, excluding those notices that do not
|
||||
pertain to any part of the Derivative Works, in at least one
|
||||
of the following places: within a NOTICE text file distributed
|
||||
as part of the Derivative Works; within the Source form or
|
||||
documentation, if provided along with the Derivative Works; or,
|
||||
within a display generated by the Derivative Works, if and
|
||||
wherever such third-party notices normally appear. The contents
|
||||
of the NOTICE file are for informational purposes only and
|
||||
do not modify the License. You may add Your own attribution
|
||||
notices within Derivative Works that You distribute, alongside
|
||||
or as an addendum to the NOTICE text from the Work, provided
|
||||
that such additional attribution notices cannot be construed
|
||||
as modifying the License.
|
||||
|
||||
You may add Your own copyright statement to Your modifications and
|
||||
may provide additional or different license terms and conditions
|
||||
for use, reproduction, or distribution of Your modifications, or
|
||||
for any such Derivative Works as a whole, provided Your use,
|
||||
reproduction, and distribution of the Work otherwise complies with
|
||||
the conditions stated in this License.
|
||||
|
||||
5. Submission of Contributions. Unless You explicitly state otherwise,
|
||||
any Contribution intentionally submitted for inclusion in the Work
|
||||
by You to the Licensor shall be under the terms and conditions of
|
||||
this License, without any additional terms or conditions.
|
||||
Notwithstanding the above, nothing herein shall supersede or modify
|
||||
the terms of any separate license agreement you may have executed
|
||||
with Licensor regarding such Contributions.
|
||||
|
||||
6. Trademarks. This License does not grant permission to use the trade
|
||||
names, trademarks, service marks, or product names of the Licensor,
|
||||
except as required for reasonable and customary use in describing the
|
||||
origin of the Work and reproducing the content of the NOTICE file.
|
||||
|
||||
7. Disclaimer of Warranty. Unless required by applicable law or
|
||||
agreed to in writing, Licensor provides the Work (and each
|
||||
Contributor provides its Contributions) on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
implied, including, without limitation, any warranties or conditions
|
||||
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
||||
PARTICULAR PURPOSE. You are solely responsible for determining the
|
||||
appropriateness of using or redistributing the Work and assume any
|
||||
risks associated with Your exercise of permissions under this License.
|
||||
|
||||
8. Limitation of Liability. In no event and under no legal theory,
|
||||
whether in tort (including negligence), contract, or otherwise,
|
||||
unless required by applicable law (such as deliberate and grossly
|
||||
negligent acts) or agreed to in writing, shall any Contributor be
|
||||
liable to You for damages, including any direct, indirect, special,
|
||||
incidental, or consequential damages of any character arising as a
|
||||
result of this License or out of the use or inability to use the
|
||||
Work (including but not limited to damages for loss of goodwill,
|
||||
work stoppage, computer failure or malfunction, or any and all
|
||||
other commercial damages or losses), even if such Contributor
|
||||
has been advised of the possibility of such damages.
|
||||
|
||||
9. Accepting Warranty or Additional Liability. While redistributing
|
||||
the Work or Derivative Works thereof, You may choose to offer,
|
||||
and charge a fee for, acceptance of support, warranty, indemnity,
|
||||
or other liability obligations and/or rights consistent with this
|
||||
License. However, in accepting such obligations, You may act only
|
||||
on Your own behalf and on Your sole responsibility, not on behalf
|
||||
of any other Contributor, and only if You agree to indemnify,
|
||||
defend, and hold each Contributor harmless for any liability
|
||||
incurred by, or claims asserted against, such Contributor by reason
|
||||
of your accepting any such warranty or additional liability.
|
||||
|
||||
END OF TERMS AND CONDITIONS
|
||||
|
||||
APPENDIX: How to apply the Apache License to your work.
|
||||
|
||||
To apply the Apache License to your work, attach the following
|
||||
boilerplate notice, with the fields enclosed by brackets "[]"
|
||||
replaced with your own identifying information. (Don't include
|
||||
the brackets!) The text should be enclosed in the appropriate
|
||||
comment syntax for the file format. We also recommend that a
|
||||
file or class name and description of purpose be included on the
|
||||
same "printed page" as the copyright notice for easier
|
||||
identification within third-party archives.
|
||||
|
||||
Copyright [yyyy] [name of copyright owner]
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
https://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
@ -1,218 +0,0 @@
|
||||
Apache License
|
||||
Version 2.0, January 2004
|
||||
http://www.apache.org/licenses/
|
||||
|
||||
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
||||
|
||||
1. Definitions.
|
||||
|
||||
"License" shall mean the terms and conditions for use, reproduction,
|
||||
and distribution as defined by Sections 1 through 9 of this document.
|
||||
|
||||
"Licensor" shall mean the copyright owner or entity authorized by
|
||||
the copyright owner that is granting the License.
|
||||
|
||||
"Legal Entity" shall mean the union of the acting entity and all
|
||||
other entities that control, are controlled by, or are under common
|
||||
control with that entity. For the purposes of this definition,
|
||||
"control" means (i) the power, direct or indirect, to cause the
|
||||
direction or management of such entity, whether by contract or
|
||||
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
||||
outstanding shares, or (iii) beneficial ownership of such entity.
|
||||
|
||||
"You" (or "Your") shall mean an individual or Legal Entity
|
||||
exercising permissions granted by this License.
|
||||
|
||||
"Source" form shall mean the preferred form for making modifications,
|
||||
including but not limited to software source code, documentation
|
||||
source, and configuration files.
|
||||
|
||||
"Object" form shall mean any form resulting from mechanical
|
||||
transformation or translation of a Source form, including but
|
||||
not limited to compiled object code, generated documentation,
|
||||
and conversions to other media types.
|
||||
|
||||
"Work" shall mean the work of authorship, whether in Source or
|
||||
Object form, made available under the License, as indicated by a
|
||||
copyright notice that is included in or attached to the work
|
||||
(an example is provided in the Appendix below).
|
||||
|
||||
"Derivative Works" shall mean any work, whether in Source or Object
|
||||
form, that is based on (or derived from) the Work and for which the
|
||||
editorial revisions, annotations, elaborations, or other modifications
|
||||
represent, as a whole, an original work of authorship. For the purposes
|
||||
of this License, Derivative Works shall not include works that remain
|
||||
separable from, or merely link (or bind by name) to the interfaces of,
|
||||
the Work and Derivative Works thereof.
|
||||
|
||||
"Contribution" shall mean any work of authorship, including
|
||||
the original version of the Work and any modifications or additions
|
||||
to that Work or Derivative Works thereof, that is intentionally
|
||||
submitted to Licensor for inclusion in the Work by the copyright owner
|
||||
or by an individual or Legal Entity authorized to submit on behalf of
|
||||
the copyright owner. For the purposes of this definition, "submitted"
|
||||
means any form of electronic, verbal, or written communication sent
|
||||
to the Licensor or its representatives, including but not limited to
|
||||
communication on electronic mailing lists, source code control systems,
|
||||
and issue tracking systems that are managed by, or on behalf of, the
|
||||
Licensor for the purpose of discussing and improving the Work, but
|
||||
excluding communication that is conspicuously marked or otherwise
|
||||
designated in writing by the copyright owner as "Not a Contribution."
|
||||
|
||||
"Contributor" shall mean Licensor and any individual or Legal Entity
|
||||
on behalf of whom a Contribution has been received by Licensor and
|
||||
subsequently incorporated within the Work.
|
||||
|
||||
2. Grant of Copyright License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
copyright license to reproduce, prepare Derivative Works of,
|
||||
publicly display, publicly perform, sublicense, and distribute the
|
||||
Work and such Derivative Works in Source or Object form.
|
||||
|
||||
3. Grant of Patent License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
(except as stated in this section) patent license to make, have made,
|
||||
use, offer to sell, sell, import, and otherwise transfer the Work,
|
||||
where such license applies only to those patent claims licensable
|
||||
by such Contributor that are necessarily infringed by their
|
||||
Contribution(s) alone or by combination of their Contribution(s)
|
||||
with the Work to which such Contribution(s) was submitted. If You
|
||||
institute patent litigation against any entity (including a
|
||||
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
||||
or a Contribution incorporated within the Work constitutes direct
|
||||
or contributory patent infringement, then any patent licenses
|
||||
granted to You under this License for that Work shall terminate
|
||||
as of the date such litigation is filed.
|
||||
|
||||
4. Redistribution. You may reproduce and distribute copies of the
|
||||
Work or Derivative Works thereof in any medium, with or without
|
||||
modifications, and in Source or Object form, provided that You
|
||||
meet the following conditions:
|
||||
|
||||
(a) You must give any other recipients of the Work or
|
||||
Derivative Works a copy of this License; and
|
||||
|
||||
(b) You must cause any modified files to carry prominent notices
|
||||
stating that You changed the files; and
|
||||
|
||||
(c) You must retain, in the Source form of any Derivative Works
|
||||
that You distribute, all copyright, patent, trademark, and
|
||||
attribution notices from the Source form of the Work,
|
||||
excluding those notices that do not pertain to any part of
|
||||
the Derivative Works; and
|
||||
|
||||
(d) If the Work includes a "NOTICE" text file as part of its
|
||||
distribution, then any Derivative Works that You distribute must
|
||||
include a readable copy of the attribution notices contained
|
||||
within such NOTICE file, excluding those notices that do not
|
||||
pertain to any part of the Derivative Works, in at least one
|
||||
of the following places: within a NOTICE text file distributed
|
||||
as part of the Derivative Works; within the Source form or
|
||||
documentation, if provided along with the Derivative Works; or,
|
||||
within a display generated by the Derivative Works, if and
|
||||
wherever such third-party notices normally appear. The contents
|
||||
of the NOTICE file are for informational purposes only and
|
||||
do not modify the License. You may add Your own attribution
|
||||
notices within Derivative Works that You distribute, alongside
|
||||
or as an addendum to the NOTICE text from the Work, provided
|
||||
that such additional attribution notices cannot be construed
|
||||
as modifying the License.
|
||||
|
||||
You may add Your own copyright statement to Your modifications and
|
||||
may provide additional or different license terms and conditions
|
||||
for use, reproduction, or distribution of Your modifications, or
|
||||
for any such Derivative Works as a whole, provided Your use,
|
||||
reproduction, and distribution of the Work otherwise complies with
|
||||
the conditions stated in this License.
|
||||
|
||||
5. Submission of Contributions. Unless You explicitly state otherwise,
|
||||
any Contribution intentionally submitted for inclusion in the Work
|
||||
by You to the Licensor shall be under the terms and conditions of
|
||||
this License, without any additional terms or conditions.
|
||||
Notwithstanding the above, nothing herein shall supersede or modify
|
||||
the terms of any separate license agreement you may have executed
|
||||
with Licensor regarding such Contributions.
|
||||
|
||||
6. Trademarks. This License does not grant permission to use the trade
|
||||
names, trademarks, service marks, or product names of the Licensor,
|
||||
except as required for reasonable and customary use in describing the
|
||||
origin of the Work and reproducing the content of the NOTICE file.
|
||||
|
||||
7. Disclaimer of Warranty. Unless required by applicable law or
|
||||
agreed to in writing, Licensor provides the Work (and each
|
||||
Contributor provides its Contributions) on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
implied, including, without limitation, any warranties or conditions
|
||||
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
||||
PARTICULAR PURPOSE. You are solely responsible for determining the
|
||||
appropriateness of using or redistributing the Work and assume any
|
||||
risks associated with Your exercise of permissions under this License.
|
||||
|
||||
8. Limitation of Liability. In no event and under no legal theory,
|
||||
whether in tort (including negligence), contract, or otherwise,
|
||||
unless required by applicable law (such as deliberate and grossly
|
||||
negligent acts) or agreed to in writing, shall any Contributor be
|
||||
liable to You for damages, including any direct, indirect, special,
|
||||
incidental, or consequential damages of any character arising as a
|
||||
result of this License or out of the use or inability to use the
|
||||
Work (including but not limited to damages for loss of goodwill,
|
||||
work stoppage, computer failure or malfunction, or any and all
|
||||
other commercial damages or losses), even if such Contributor
|
||||
has been advised of the possibility of such damages.
|
||||
|
||||
9. Accepting Warranty or Additional Liability. While redistributing
|
||||
the Work or Derivative Works thereof, You may choose to offer,
|
||||
and charge a fee for, acceptance of support, warranty, indemnity,
|
||||
or other liability obligations and/or rights consistent with this
|
||||
License. However, in accepting such obligations, You may act only
|
||||
on Your own behalf and on Your sole responsibility, not on behalf
|
||||
of any other Contributor, and only if You agree to indemnify,
|
||||
defend, and hold each Contributor harmless for any liability
|
||||
incurred by, or claims asserted against, such Contributor by reason
|
||||
of your accepting any such warranty or additional liability.
|
||||
|
||||
END OF TERMS AND CONDITIONS
|
||||
|
||||
APPENDIX: How to apply the Apache License to your work.
|
||||
|
||||
To apply the Apache License to your work, attach the following
|
||||
boilerplate notice, with the fields enclosed by brackets "[]"
|
||||
replaced with your own identifying information. (Don't include
|
||||
the brackets!) The text should be enclosed in the appropriate
|
||||
comment syntax for the file format. We also recommend that a
|
||||
file or class name and description of purpose be included on the
|
||||
same "printed page" as the copyright notice for easier
|
||||
identification within third-party archives.
|
||||
|
||||
Copyright [yyyy] [name of copyright owner]
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
|
||||
|
||||
--- LLVM Exceptions to the Apache 2.0 License ----
|
||||
|
||||
As an exception, if, as a result of your compiling your source code, portions
|
||||
of this Software are embedded into an Object form of such source code, you
|
||||
may redistribute such embedded portions in such Object form without complying
|
||||
with the conditions of Sections 4(a), 4(b) and 4(d) of the License.
|
||||
|
||||
In addition, if you combine or link compiled forms of this Software with
|
||||
software that is licensed under the GPLv2 ("Combined Software") and if a
|
||||
court of competent jurisdiction determines that the patent provision (Section
|
||||
3), the indemnity provision (Section 9) or other Section of the License
|
||||
conflicts with the conditions of the GPLv2, you may retroactively and
|
||||
prospectively choose to deem waived or otherwise exclude such Section(s) of
|
||||
the License, but only in their entirety and only with respect to the Combined
|
||||
Software.
|
202
licenses/third-party/ldbc/LICENSE
vendored
202
licenses/third-party/ldbc/LICENSE
vendored
@ -1,202 +0,0 @@
|
||||
|
||||
Apache License
|
||||
Version 2.0, January 2004
|
||||
http://www.apache.org/licenses/
|
||||
|
||||
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
||||
|
||||
1. Definitions.
|
||||
|
||||
"License" shall mean the terms and conditions for use, reproduction,
|
||||
and distribution as defined by Sections 1 through 9 of this document.
|
||||
|
||||
"Licensor" shall mean the copyright owner or entity authorized by
|
||||
the copyright owner that is granting the License.
|
||||
|
||||
"Legal Entity" shall mean the union of the acting entity and all
|
||||
other entities that control, are controlled by, or are under common
|
||||
control with that entity. For the purposes of this definition,
|
||||
"control" means (i) the power, direct or indirect, to cause the
|
||||
direction or management of such entity, whether by contract or
|
||||
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
||||
outstanding shares, or (iii) beneficial ownership of such entity.
|
||||
|
||||
"You" (or "Your") shall mean an individual or Legal Entity
|
||||
exercising permissions granted by this License.
|
||||
|
||||
"Source" form shall mean the preferred form for making modifications,
|
||||
including but not limited to software source code, documentation
|
||||
source, and configuration files.
|
||||
|
||||
"Object" form shall mean any form resulting from mechanical
|
||||
transformation or translation of a Source form, including but
|
||||
not limited to compiled object code, generated documentation,
|
||||
and conversions to other media types.
|
||||
|
||||
"Work" shall mean the work of authorship, whether in Source or
|
||||
Object form, made available under the License, as indicated by a
|
||||
copyright notice that is included in or attached to the work
|
||||
(an example is provided in the Appendix below).
|
||||
|
||||
"Derivative Works" shall mean any work, whether in Source or Object
|
||||
form, that is based on (or derived from) the Work and for which the
|
||||
editorial revisions, annotations, elaborations, or other modifications
|
||||
represent, as a whole, an original work of authorship. For the purposes
|
||||
of this License, Derivative Works shall not include works that remain
|
||||
separable from, or merely link (or bind by name) to the interfaces of,
|
||||
the Work and Derivative Works thereof.
|
||||
|
||||
"Contribution" shall mean any work of authorship, including
|
||||
the original version of the Work and any modifications or additions
|
||||
to that Work or Derivative Works thereof, that is intentionally
|
||||
submitted to Licensor for inclusion in the Work by the copyright owner
|
||||
or by an individual or Legal Entity authorized to submit on behalf of
|
||||
the copyright owner. For the purposes of this definition, "submitted"
|
||||
means any form of electronic, verbal, or written communication sent
|
||||
to the Licensor or its representatives, including but not limited to
|
||||
communication on electronic mailing lists, source code control systems,
|
||||
and issue tracking systems that are managed by, or on behalf of, the
|
||||
Licensor for the purpose of discussing and improving the Work, but
|
||||
excluding communication that is conspicuously marked or otherwise
|
||||
designated in writing by the copyright owner as "Not a Contribution."
|
||||
|
||||
"Contributor" shall mean Licensor and any individual or Legal Entity
|
||||
on behalf of whom a Contribution has been received by Licensor and
|
||||
subsequently incorporated within the Work.
|
||||
|
||||
2. Grant of Copyright License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
copyright license to reproduce, prepare Derivative Works of,
|
||||
publicly display, publicly perform, sublicense, and distribute the
|
||||
Work and such Derivative Works in Source or Object form.
|
||||
|
||||
3. Grant of Patent License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
(except as stated in this section) patent license to make, have made,
|
||||
use, offer to sell, sell, import, and otherwise transfer the Work,
|
||||
where such license applies only to those patent claims licensable
|
||||
by such Contributor that are necessarily infringed by their
|
||||
Contribution(s) alone or by combination of their Contribution(s)
|
||||
with the Work to which such Contribution(s) was submitted. If You
|
||||
institute patent litigation against any entity (including a
|
||||
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
||||
or a Contribution incorporated within the Work constitutes direct
|
||||
or contributory patent infringement, then any patent licenses
|
||||
granted to You under this License for that Work shall terminate
|
||||
as of the date such litigation is filed.
|
||||
|
||||
4. Redistribution. You may reproduce and distribute copies of the
|
||||
Work or Derivative Works thereof in any medium, with or without
|
||||
modifications, and in Source or Object form, provided that You
|
||||
meet the following conditions:
|
||||
|
||||
(a) You must give any other recipients of the Work or
|
||||
Derivative Works a copy of this License; and
|
||||
|
||||
(b) You must cause any modified files to carry prominent notices
|
||||
stating that You changed the files; and
|
||||
|
||||
(c) You must retain, in the Source form of any Derivative Works
|
||||
that You distribute, all copyright, patent, trademark, and
|
||||
attribution notices from the Source form of the Work,
|
||||
excluding those notices that do not pertain to any part of
|
||||
the Derivative Works; and
|
||||
|
||||
(d) If the Work includes a "NOTICE" text file as part of its
|
||||
distribution, then any Derivative Works that You distribute must
|
||||
include a readable copy of the attribution notices contained
|
||||
within such NOTICE file, excluding those notices that do not
|
||||
pertain to any part of the Derivative Works, in at least one
|
||||
of the following places: within a NOTICE text file distributed
|
||||
as part of the Derivative Works; within the Source form or
|
||||
documentation, if provided along with the Derivative Works; or,
|
||||
within a display generated by the Derivative Works, if and
|
||||
wherever such third-party notices normally appear. The contents
|
||||
of the NOTICE file are for informational purposes only and
|
||||
do not modify the License. You may add Your own attribution
|
||||
notices within Derivative Works that You distribute, alongside
|
||||
or as an addendum to the NOTICE text from the Work, provided
|
||||
that such additional attribution notices cannot be construed
|
||||
as modifying the License.
|
||||
|
||||
You may add Your own copyright statement to Your modifications and
|
||||
may provide additional or different license terms and conditions
|
||||
for use, reproduction, or distribution of Your modifications, or
|
||||
for any such Derivative Works as a whole, provided Your use,
|
||||
reproduction, and distribution of the Work otherwise complies with
|
||||
the conditions stated in this License.
|
||||
|
||||
5. Submission of Contributions. Unless You explicitly state otherwise,
|
||||
any Contribution intentionally submitted for inclusion in the Work
|
||||
by You to the Licensor shall be under the terms and conditions of
|
||||
this License, without any additional terms or conditions.
|
||||
Notwithstanding the above, nothing herein shall supersede or modify
|
||||
the terms of any separate license agreement you may have executed
|
||||
with Licensor regarding such Contributions.
|
||||
|
||||
6. Trademarks. This License does not grant permission to use the trade
|
||||
names, trademarks, service marks, or product names of the Licensor,
|
||||
except as required for reasonable and customary use in describing the
|
||||
origin of the Work and reproducing the content of the NOTICE file.
|
||||
|
||||
7. Disclaimer of Warranty. Unless required by applicable law or
|
||||
agreed to in writing, Licensor provides the Work (and each
|
||||
Contributor provides its Contributions) on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
implied, including, without limitation, any warranties or conditions
|
||||
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
||||
PARTICULAR PURPOSE. You are solely responsible for determining the
|
||||
appropriateness of using or redistributing the Work and assume any
|
||||
risks associated with Your exercise of permissions under this License.
|
||||
|
||||
8. Limitation of Liability. In no event and under no legal theory,
|
||||
whether in tort (including negligence), contract, or otherwise,
|
||||
unless required by applicable law (such as deliberate and grossly
|
||||
negligent acts) or agreed to in writing, shall any Contributor be
|
||||
liable to You for damages, including any direct, indirect, special,
|
||||
incidental, or consequential damages of any character arising as a
|
||||
result of this License or out of the use or inability to use the
|
||||
Work (including but not limited to damages for loss of goodwill,
|
||||
work stoppage, computer failure or malfunction, or any and all
|
||||
other commercial damages or losses), even if such Contributor
|
||||
has been advised of the possibility of such damages.
|
||||
|
||||
9. Accepting Warranty or Additional Liability. While redistributing
|
||||
the Work or Derivative Works thereof, You may choose to offer,
|
||||
and charge a fee for, acceptance of support, warranty, indemnity,
|
||||
or other liability obligations and/or rights consistent with this
|
||||
License. However, in accepting such obligations, You may act only
|
||||
on Your own behalf and on Your sole responsibility, not on behalf
|
||||
of any other Contributor, and only if You agree to indemnify,
|
||||
defend, and hold each Contributor harmless for any liability
|
||||
incurred by, or claims asserted against, such Contributor by reason
|
||||
of your accepting any such warranty or additional liability.
|
||||
|
||||
END OF TERMS AND CONDITIONS
|
||||
|
||||
APPENDIX: How to apply the Apache License to your work.
|
||||
|
||||
To apply the Apache License to your work, attach the following
|
||||
boilerplate notice, with the fields enclosed by brackets "[]"
|
||||
replaced with your own identifying information. (Don't include
|
||||
the brackets!) The text should be enclosed in the appropriate
|
||||
comment syntax for the file format. We also recommend that a
|
||||
file or class name and description of purpose be included on the
|
||||
same "printed page" as the copyright notice for easier
|
||||
identification within third-party archives.
|
||||
|
||||
Copyright [yyyy] [name of copyright owner]
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
@ -6,81 +6,32 @@ project(memgraph_query_modules)
|
||||
|
||||
disallow_in_source_build()
|
||||
|
||||
find_package(fmt REQUIRED)
|
||||
|
||||
# Everything that is installed here, should be under the "query_modules" component.
|
||||
set(CMAKE_INSTALL_DEFAULT_COMPONENT_NAME "query_modules")
|
||||
string(TOLOWER ${CMAKE_BUILD_TYPE} lower_build_type)
|
||||
|
||||
add_library(example_c SHARED example.c)
|
||||
target_include_directories(example_c PRIVATE ${CMAKE_SOURCE_DIR}/include)
|
||||
target_compile_options(example_c PRIVATE -Wall)
|
||||
target_link_libraries(example_c PRIVATE -static-libgcc -static-libstdc++)
|
||||
# Strip C example in release build.
|
||||
add_library(example SHARED example.c)
|
||||
target_include_directories(example PRIVATE ${CMAKE_SOURCE_DIR}/include)
|
||||
target_compile_options(example PRIVATE -Wall)
|
||||
|
||||
# Strip the library in release build.
|
||||
string(TOLOWER ${CMAKE_BUILD_TYPE} lower_build_type)
|
||||
if (lower_build_type STREQUAL "release")
|
||||
add_custom_command(TARGET example_c POST_BUILD
|
||||
COMMAND strip -s $<TARGET_FILE:example_c>
|
||||
COMMENT "Stripping symbols and sections from the C example module")
|
||||
add_custom_command(TARGET example POST_BUILD
|
||||
COMMAND strip -s $<TARGET_FILE:example>
|
||||
COMMENT "Stripping symbols and sections from example module")
|
||||
endif()
|
||||
install(PROGRAMS $<TARGET_FILE:example_c>
|
||||
|
||||
install(PROGRAMS $<TARGET_FILE:example>
|
||||
DESTINATION lib/memgraph/query_modules
|
||||
RENAME example_c.so)
|
||||
RENAME example.so)
|
||||
# Also install the source of the example, so user can read it.
|
||||
install(FILES example.c DESTINATION lib/memgraph/query_modules/src)
|
||||
|
||||
add_library(example_cpp SHARED example.cpp)
|
||||
target_include_directories(example_cpp PRIVATE ${CMAKE_SOURCE_DIR}/include)
|
||||
target_compile_options(example_cpp PRIVATE -Wall)
|
||||
target_link_libraries(example_cpp PRIVATE -static-libgcc -static-libstdc++)
|
||||
# Strip C++ example in release build.
|
||||
if (lower_build_type STREQUAL "release")
|
||||
add_custom_command(TARGET example_cpp POST_BUILD
|
||||
COMMAND strip -s $<TARGET_FILE:example_cpp>
|
||||
COMMENT "Stripping symbols and sections from the C++ example module")
|
||||
endif()
|
||||
install(PROGRAMS $<TARGET_FILE:example_cpp>
|
||||
DESTINATION lib/memgraph/query_modules
|
||||
RENAME example_cpp.so)
|
||||
# Also install the source of the example, so user can read it.
|
||||
install(FILES example.cpp DESTINATION lib/memgraph/query_modules/src)
|
||||
|
||||
add_library(schema SHARED schema.cpp)
|
||||
target_include_directories(schema PRIVATE ${CMAKE_SOURCE_DIR}/include)
|
||||
target_compile_options(schema PRIVATE -Wall)
|
||||
target_link_libraries(schema PRIVATE -static-libgcc -static-libstdc++)
|
||||
# Strip C++ example in release build.
|
||||
if (lower_build_type STREQUAL "release")
|
||||
add_custom_command(TARGET schema POST_BUILD
|
||||
COMMAND strip -s $<TARGET_FILE:schema>
|
||||
COMMENT "Stripping symbols and sections from the C++ schema module")
|
||||
endif()
|
||||
install(PROGRAMS $<TARGET_FILE:schema>
|
||||
DESTINATION lib/memgraph/query_modules
|
||||
RENAME schema.so)
|
||||
# Also install the source of the example, so user can read it.
|
||||
install(FILES schema.cpp DESTINATION lib/memgraph/query_modules/src)
|
||||
|
||||
add_library(text SHARED text_search_module.cpp)
|
||||
target_include_directories(text PRIVATE ${CMAKE_SOURCE_DIR}/include)
|
||||
target_compile_options(text PRIVATE -Wall)
|
||||
target_link_libraries(text PRIVATE -static-libgcc -static-libstdc++ fmt::fmt)
|
||||
# Strip C++ example in release build.
|
||||
if (lower_build_type STREQUAL "release")
|
||||
add_custom_command(TARGET text POST_BUILD
|
||||
COMMAND strip -s $<TARGET_FILE:text>
|
||||
COMMENT "Stripping symbols and sections from the C++ text_search module")
|
||||
endif()
|
||||
install(PROGRAMS $<TARGET_FILE:text>
|
||||
DESTINATION lib/memgraph/query_modules
|
||||
RENAME text.so)
|
||||
# Also install the source of the example, so user can read it.
|
||||
install(FILES text_search_module.cpp DESTINATION lib/memgraph/query_modules/src)
|
||||
|
||||
# Install the Python example and modules
|
||||
# Install the Python example
|
||||
install(FILES example.py DESTINATION lib/memgraph/query_modules RENAME py_example.py)
|
||||
|
||||
# Install the Python modules
|
||||
install(FILES graph_analyzer.py DESTINATION lib/memgraph/query_modules)
|
||||
install(FILES mgp_networkx.py DESTINATION lib/memgraph/query_modules)
|
||||
install(FILES nxalg.py DESTINATION lib/memgraph/query_modules)
|
||||
install(FILES wcc.py DESTINATION lib/memgraph/query_modules)
|
||||
install(FILES mgps.py DESTINATION lib/memgraph/query_modules)
|
||||
install(FILES convert.py DESTINATION lib/memgraph/query_modules)
|
||||
|
@ -1,10 +0,0 @@
|
||||
from json import loads
|
||||
|
||||
import mgp
|
||||
|
||||
|
||||
@mgp.function
|
||||
def str2object(string: str) -> mgp.Any:
|
||||
if string:
|
||||
return loads(string)
|
||||
return None
|
@ -1,127 +0,0 @@
|
||||
// Copyright 2023 Memgraph Ltd.
|
||||
//
|
||||
// Use of this software is governed by the Business Source License
|
||||
// included in the file licenses/BSL.txt; by using this file, you agree to be bound by the terms of the Business Source
|
||||
// License, and you may not use this file except in compliance with the Business Source License.
|
||||
//
|
||||
// As of the Change Date specified in that file, in accordance with
|
||||
// the Business Source License, use of this software will be governed
|
||||
// by the Apache License, Version 2.0, included in the file
|
||||
// licenses/APL.txt.
|
||||
|
||||
#include <mgp.hpp>
|
||||
|
||||
void ProcImpl(std::vector<mgp::Value> arguments, mgp::Graph graph, mgp::RecordFactory record_factory) {
|
||||
auto record = record_factory.NewRecord();
|
||||
record.Insert("out", true);
|
||||
}
|
||||
|
||||
void SampleReadProc(mgp_list *args, mgp_graph *memgraph_graph, mgp_result *result, mgp_memory *memory) {
|
||||
try {
|
||||
// The outcommented way of assigning the memory pointer is still
|
||||
// working, but it is deprecated because of certain concurrency
|
||||
// issues. Please use the guard instead.
|
||||
// mgp::memory = memory;
|
||||
mgp::MemoryDispatcherGuard guard(memory);
|
||||
|
||||
std::vector<mgp::Value> arguments;
|
||||
for (size_t i = 0; i < mgp::list_size(args); i++) {
|
||||
auto arg = mgp::Value(mgp::list_at(args, i));
|
||||
arguments.push_back(arg);
|
||||
}
|
||||
|
||||
ProcImpl(arguments, mgp::Graph(memgraph_graph), mgp::RecordFactory(result));
|
||||
} catch (const std::exception &e) {
|
||||
mgp::result_set_error_msg(result, e.what());
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
void AddXNodes(mgp_list *args, mgp_graph *memgraph_graph, mgp_result *result, mgp_memory *memory) {
|
||||
// The outcommented way of assigning the memory pointer is still
|
||||
// working, but it is deprecated because of certain concurrency
|
||||
// issues. Please use the guard instead.
|
||||
// mgp::memory = memory;
|
||||
mgp::MemoryDispatcherGuard guard(memory);
|
||||
auto graph = mgp::Graph(memgraph_graph);
|
||||
|
||||
std::vector<mgp::Value> arguments;
|
||||
for (size_t i = 0; i < mgp::list_size(args); i++) {
|
||||
auto arg = mgp::Value(mgp::list_at(args, i));
|
||||
arguments.push_back(arg);
|
||||
}
|
||||
|
||||
for (int i = 0; i < arguments[0].ValueInt(); i++) {
|
||||
graph.CreateNode();
|
||||
}
|
||||
}
|
||||
|
||||
void Multiply(mgp_list *args, mgp_func_context *ctx, mgp_func_result *res, mgp_memory *memory) {
|
||||
// The outcommented way of assigning the memory pointer is still
|
||||
// working, but it is deprecated because of certain concurrency
|
||||
// issues. Please use the guard instead.
|
||||
// mgp::memory = memory;
|
||||
mgp::MemoryDispatcherGuard guard(memory);
|
||||
|
||||
std::vector<mgp::Value> arguments;
|
||||
for (size_t i = 0; i < mgp::list_size(args); i++) {
|
||||
auto arg = mgp::Value(mgp::list_at(args, i));
|
||||
arguments.push_back(arg);
|
||||
}
|
||||
|
||||
auto result = mgp::Result(res);
|
||||
|
||||
auto first = arguments[0].ValueInt();
|
||||
auto second = arguments[1].ValueInt();
|
||||
|
||||
result.SetValue(first * second);
|
||||
}
|
||||
|
||||
extern "C" int mgp_init_module(struct mgp_module *module, struct mgp_memory *memory) {
|
||||
try {
|
||||
// The outcommented way of assigning the memory pointer is still
|
||||
// working, but it is deprecated because of certain concurrency
|
||||
// issues. Please use the guard instead.
|
||||
// mgp::memory = memory;
|
||||
mgp::MemoryDispatcherGuard guard(memory);
|
||||
|
||||
AddProcedure(SampleReadProc, "return_true", mgp::ProcedureType::Read,
|
||||
{mgp::Parameter("param_1", mgp::Type::Int), mgp::Parameter("param_2", mgp::Type::Double, 2.3)},
|
||||
{mgp::Return("out", mgp::Type::Bool)}, module, memory);
|
||||
} catch (const std::exception &e) {
|
||||
return 1;
|
||||
}
|
||||
|
||||
try {
|
||||
// The outcommented way of assigning the memory pointer is still
|
||||
// working, but it is deprecated because of certain concurrency
|
||||
// issues. Please use the guard instead.
|
||||
// mgp::memory = memory;
|
||||
mgp::MemoryDispatcherGuard guard(memory);
|
||||
|
||||
mgp::AddProcedure(AddXNodes, "add_x_nodes", mgp::ProcedureType::Write, {mgp::Parameter("param_1", mgp::Type::Int)},
|
||||
{}, module, memory);
|
||||
|
||||
} catch (const std::exception &e) {
|
||||
return 1;
|
||||
}
|
||||
|
||||
try {
|
||||
// The outcommented way of assigning the memory pointer is still
|
||||
// working, but it is deprecated because of certain concurrency
|
||||
// issues. Please use the guard instead.
|
||||
// mgp::memory = memory;
|
||||
mgp::MemoryDispatcherGuard guard(memory);
|
||||
|
||||
mgp::AddFunction(Multiply, "multiply",
|
||||
{mgp::Parameter("int", mgp::Type::Int), mgp::Parameter("int", mgp::Type::Int, (int64_t)3)}, module,
|
||||
memory);
|
||||
|
||||
} catch (const std::exception &e) {
|
||||
return 1;
|
||||
}
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
extern "C" int mgp_shutdown_module() { return 0; }
|
@ -1,8 +0,0 @@
|
||||
import mgp
|
||||
|
||||
|
||||
@mgp.read_proc
|
||||
def components(
|
||||
context: mgp.ProcCtx,
|
||||
) -> mgp.Record(versions=list, edition=str, name=str):
|
||||
return mgp.Record(versions=["5.9.0"], edition="community", name="Memgraph")
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user