Compare commits
761 commits
v3.21.20-b
...
develop
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
e995fc1c5f | ||
|
|
81d10bed0a | ||
|
|
d9071ad6d6 | ||
|
|
14ad0f7a19 | ||
|
|
afab2c6247 | ||
|
|
fa1eaf26f4 | ||
|
|
c2106ca487 | ||
|
|
507d693c83 | ||
|
|
87a935e63d | ||
|
|
105531b8ce | ||
|
|
8500dc66f9 | ||
|
|
c8fe0c6bf2 | ||
|
|
ccf6496958 | ||
|
|
c5b483da06 | ||
|
|
d103bc54fa | ||
|
|
be16229ec3 | ||
|
|
861115284f | ||
|
|
108f4f259f | ||
|
|
e21d6087fd | ||
|
|
0357060e63 | ||
|
|
5e370dece9 | ||
|
|
fdc10d7dc8 | ||
|
|
91db41c758 | ||
|
|
1d51dc9cd6 | ||
|
|
5bb37c36c3 | ||
|
|
ea8f4b03a6 | ||
|
|
d963256e66 | ||
|
|
ef02052ab5 | ||
|
|
6804a0afb1 | ||
|
|
f7dde80801 | ||
|
|
53780ad9b1 | ||
|
|
ddeb0d2e90 | ||
|
|
3d9666428f | ||
|
|
180a3554df | ||
|
|
178b479d73 | ||
|
|
5be73f5e20 | ||
|
|
7354cbd604 | ||
|
|
af244ab6fb | ||
|
|
fd6611dadc | ||
|
|
02d7124f24 | ||
|
|
d315f7e9f7 | ||
|
|
4d954d46f2 | ||
|
|
ed5b7919ab | ||
|
|
9e93225f8f | ||
|
|
625c91b9d3 | ||
|
|
3821ae520a | ||
|
|
53498e96c3 | ||
|
|
b0b7c786f2 | ||
|
|
dc7aa5c07c | ||
|
|
3621f2569f | ||
|
|
5fa054215f | ||
|
|
230c058993 | ||
|
|
7a303663ae | ||
|
|
727b7ae951 | ||
|
|
79ddbaf42c | ||
|
|
7e945976ff | ||
|
|
985bcfd2bb | ||
|
|
cf2b8e29cb | ||
|
|
13bddce444 | ||
|
|
927ebbd172 | ||
|
|
1b699934b7 | ||
|
|
c9f71055b6 | ||
|
|
b49b7bdd14 | ||
|
|
e62b6b9ef5 | ||
|
|
5d4a2fe715 | ||
|
|
52aa260371 | ||
|
|
88334da193 | ||
|
|
cbdceb450d | ||
|
|
a2fa230eae | ||
|
|
2496f74270 | ||
|
|
0ca9e5b90d | ||
|
|
43c3bd5ff6 | ||
|
|
ad1c57bc26 | ||
|
|
4239fbef9a | ||
|
|
0a79df9333 | ||
|
|
702e3d1186 | ||
|
|
40bec54eee | ||
|
|
a95aadcc7b | ||
|
|
c5fc95a86c | ||
|
|
3f18babc81 | ||
|
|
3458600254 | ||
|
|
48209f6035 | ||
|
|
73a23412ed | ||
|
|
db2cef397b | ||
|
|
3dbe04c500 | ||
|
|
9b46ca939a | ||
|
|
35853a7342 | ||
|
|
40cfe5f4fd | ||
|
|
cb4b792897 | ||
|
|
f74af24b37 | ||
|
|
9326d284a3 | ||
|
|
c2903613a0 | ||
|
|
0f600aac5f | ||
|
|
2568090c1e | ||
|
|
4895ebdd46 | ||
|
|
25bef13b58 | ||
|
|
fa6f52db73 | ||
|
|
4fcb65a8bf | ||
|
|
e36629dd74 | ||
|
|
f4161d3cf5 | ||
|
|
c2d9198cb9 | ||
|
|
524cc210b8 | ||
|
|
f6bdfb3c6a | ||
|
|
10857e1678 | ||
|
|
1ecd1eca10 | ||
|
|
da967142d6 | ||
|
|
1457bc5998 | ||
|
|
d8f197aaa9 | ||
|
|
14ef545d0c | ||
|
|
d281f59e20 | ||
|
|
14140db4b9 | ||
|
|
904eea8400 | ||
|
|
18be5b8e46 | ||
|
|
bec189419a | ||
|
|
a500ebb736 | ||
|
|
7ec84c4d78 | ||
|
|
e78c2f4cf7 | ||
|
|
5f0cf66b8d | ||
|
|
40f8d33777 | ||
|
|
5e1e6aa33e | ||
|
|
15deefc56d | ||
|
|
d164f08962 | ||
|
|
b362fc5983 | ||
|
|
73e5bd60d9 | ||
|
|
637515a108 | ||
|
|
09cdb05285 | ||
|
|
e4289884e6 | ||
|
|
2d3f239010 | ||
|
|
60bea3c847 | ||
|
|
4bf70a9888 | ||
|
|
f0ac50ba3e | ||
|
|
656c846813 | ||
|
|
dc6362df74 | ||
|
|
8f6a820f0b | ||
|
|
987e9b8fea | ||
|
|
a113f500bf | ||
|
|
794297efdb | ||
|
|
a6016523d8 | ||
|
|
d7ace8f9ea | ||
|
|
a4eae42594 | ||
|
|
81537637f7 | ||
|
|
bfce5c559f | ||
|
|
b457ce5e4c | ||
|
|
9e712a4a69 | ||
|
|
9d5f91f954 | ||
|
|
7ef4cdd831 | ||
|
|
6b2840df9a | ||
|
|
39d5edfc56 | ||
|
|
0b5c0ed889 | ||
|
|
383730e240 | ||
|
|
bd6566ba4e | ||
|
|
dec64398ba | ||
|
|
40f1254d71 | ||
|
|
ecb8988797 | ||
|
|
825c7c1697 | ||
|
|
114e9acf74 | ||
|
|
88037958d0 | ||
|
|
af9e359d58 | ||
|
|
269ffeddc2 | ||
|
|
650685f6ac | ||
|
|
c11d8aa414 | ||
|
|
696409afab | ||
|
|
98b5cd7469 | ||
|
|
f4804d2db9 | ||
|
|
74058a06cc | ||
|
|
f736af6313 | ||
|
|
96093ef381 | ||
|
|
f4627faf89 | ||
|
|
39589176de | ||
|
|
70c1c9febd | ||
|
|
c41a3c1ce4 | ||
|
|
b6932fae44 | ||
|
|
0a7925b4ad | ||
|
|
41dcae0af9 | ||
|
|
4d943ea2bd | ||
|
|
89aff818eb | ||
|
|
00d673991d | ||
|
|
e5620809d6 | ||
|
|
1c50a5ac0e | ||
|
|
064cec751d | ||
|
|
357694fa06 | ||
|
|
c95279b931 | ||
|
|
cfb901dae0 | ||
|
|
f17c2a9713 | ||
|
|
c37f659c49 | ||
|
|
13bfc34e5b | ||
|
|
1f5219b556 | ||
|
|
1a69e61eb6 | ||
|
|
1e0fdea3c3 | ||
|
|
a038d7249d | ||
|
|
2a2a7c27de | ||
|
|
8c49905aed | ||
|
|
fc4bda417a | ||
|
|
b620535dcc | ||
|
|
796f842dec | ||
|
|
511261b1ce | ||
|
|
102ce4c239 | ||
|
|
c31a5ec49f | ||
|
|
d03964332f | ||
|
|
5d42a01522 | ||
|
|
10a37eb8ce | ||
|
|
9b1d44cc8f | ||
|
|
5e131dd9c5 | ||
|
|
8b9117b0ae | ||
|
|
2d41c6fb2c | ||
|
|
99eb38ec8b | ||
|
|
72ae316af0 | ||
|
|
48ec3b7b78 | ||
|
|
65694e8525 | ||
|
|
5f43d0d2e6 | ||
|
|
da494229f1 | ||
|
|
318c1251f5 | ||
|
|
345232bf69 | ||
|
|
802953f405 | ||
|
|
a268ee7192 | ||
|
|
3186851d71 | ||
|
|
fabccee363 | ||
|
|
34a29d24bc | ||
|
|
4d5581e53f | ||
|
|
8668132655 | ||
|
|
764885d2fe | ||
|
|
778f891f61 | ||
|
|
b2dadb2363 | ||
|
|
146d91f919 | ||
|
|
cbde3d50c1 | ||
|
|
28aab7e663 | ||
|
|
009e179b98 | ||
|
|
ecdb07bfe7 | ||
|
|
e38329f484 | ||
|
|
77e4991346 | ||
|
|
7d03c5b5d5 | ||
|
|
417b20e6ee | ||
|
|
d06ddc0740 | ||
|
|
0e639d7ccc | ||
|
|
2b71169732 | ||
|
|
cc8c063aee | ||
|
|
2e6caf2960 | ||
|
|
7c782cb6f5 | ||
|
|
f13bd4d7aa | ||
|
|
dccaf3221f | ||
|
|
50afddea64 | ||
|
|
bb93c91152 | ||
|
|
7f9f6015c9 | ||
|
|
4d5d986038 | ||
|
|
36551c742a | ||
|
|
610dbc3c05 | ||
|
|
f7a2d36bba | ||
|
|
44c898679b | ||
|
|
640288c04e | ||
|
|
ad2678fef0 | ||
|
|
bbf1121478 | ||
|
|
edc17cbafd | ||
|
|
ac4a13ebd9 | ||
|
|
afb70f260c | ||
|
|
451ed039cd | ||
|
|
9336cbc08b | ||
|
|
980190e94c | ||
|
|
8c59f5b560 | ||
|
|
ce9f724de6 | ||
|
|
ae0e197adf | ||
|
|
b7af237500 | ||
|
|
f7d87f4131 | ||
|
|
8c77672a6c | ||
|
|
04202be837 | ||
|
|
f488d5e4f1 | ||
|
|
f3b9b3870e | ||
|
|
a4151fab31 | ||
|
|
75b1562da6 | ||
|
|
4196b30cb3 | ||
|
|
fb609b0ed5 | ||
|
|
4b61037603 | ||
|
|
3ddb59174c | ||
|
|
8db4ee0ca7 | ||
|
|
6fb7077eba | ||
|
|
b81ed81f93 | ||
|
|
6041a17265 | ||
|
|
50ab9fae32 | ||
|
|
5c09a1b16e | ||
|
|
c89455520c | ||
|
|
921714d8a9 | ||
|
|
a2eebf8595 | ||
|
|
58581e3a9b | ||
|
|
6be8c18887 | ||
|
|
c48a380d89 | ||
|
|
45150bcd81 | ||
|
|
9451002ba4 | ||
|
|
03b2d83732 | ||
|
|
add8a4e317 | ||
|
|
56b84982bc | ||
|
|
3899d9ba40 | ||
|
|
27e78409d7 | ||
|
|
3e78e1e319 | ||
|
|
d78f6ca853 | ||
|
|
9521b8e9d7 | ||
|
|
4ba1a066eb | ||
|
|
adda4e7f2e | ||
|
|
7c5718d873 | ||
|
|
f929911d54 | ||
|
|
dbe1d5ef91 | ||
|
|
4b1d4d466c | ||
|
|
717a629310 | ||
|
|
f531b80f0d | ||
|
|
7357042472 | ||
|
|
16846f1397 | ||
|
|
b4e98cd176 | ||
|
|
08a10b7e72 | ||
|
|
df311eeb03 | ||
|
|
ef9e2e37ae | ||
|
|
1b75d0ada8 | ||
|
|
d0261393dc | ||
|
|
2a813eb7a4 | ||
|
|
d0b49b962c | ||
|
|
72aba149df | ||
|
|
b0bcb2880c | ||
|
|
f3f9efaa8b | ||
|
|
dcc1406e8a | ||
|
|
23a2577d4d | ||
|
|
e370d9b6ae | ||
|
|
ddc47ef8df | ||
|
|
d815b7f9a6 | ||
|
|
033f9efd55 | ||
|
|
797a3778d0 | ||
|
|
3a05564099 | ||
|
|
7f7fbaae29 | ||
|
|
1ceb999d73 | ||
|
|
39bf3db655 | ||
|
|
267d6e7201 | ||
|
|
741e5df1d3 | ||
|
|
1eacba8ddc | ||
|
|
2828c12767 | ||
|
|
4fd8eb80e7 | ||
|
|
88ff720c37 | ||
|
|
3eb94c263f | ||
|
|
ceb00ec33b | ||
|
|
68df3ba0a9 | ||
|
|
cedfbbdeaa | ||
|
|
681e80ac01 | ||
|
|
75e59f56ce | ||
|
|
aa84035333 | ||
|
|
74fbc77f49 | ||
|
|
1b63833526 | ||
|
|
776cb46ad6 | ||
|
|
db952bc7f7 | ||
|
|
c9e5cb8562 | ||
|
|
27464b9221 | ||
|
|
61a8867c5c | ||
|
|
cdf86568e1 | ||
|
|
c23c0bfadc | ||
|
|
4d799b250b | ||
|
|
fc461798a4 | ||
|
|
6f289bd5a0 | ||
|
|
c707b2b635 | ||
|
|
9d47b638ae | ||
|
|
ace6ec9795 | ||
|
|
750380f781 | ||
|
|
ad1e671a5c | ||
|
|
42357386e9 | ||
|
|
ae45c1ebe7 | ||
|
|
b18a0eb6d8 | ||
|
|
e433551599 | ||
|
|
350129652e | ||
|
|
7c78b4c292 | ||
|
|
0659e7cdd3 | ||
|
|
3a9b9882e6 | ||
|
|
acdf6b8abb | ||
|
|
2804e39264 | ||
|
|
1e89efb4c6 | ||
|
|
777b149c42 | ||
|
|
492384aff6 | ||
|
|
4f51346960 | ||
|
|
dded9a4c9e | ||
|
|
35b85316ae | ||
|
|
d4ba0aaaae | ||
|
|
1638f15167 | ||
|
|
2907fb991d | ||
|
|
1c49e1aa3c | ||
|
|
2a1059774c | ||
|
|
460a4da2a6 | ||
|
|
44fc122d4c | ||
|
|
c8a322bf44 | ||
|
|
02ef742bbf | ||
|
|
adbefd9135 | ||
|
|
f72e32fabe | ||
|
|
1c728eb50d | ||
|
|
5a4507fba9 | ||
|
|
064b98aeb5 | ||
|
|
044d2aa648 | ||
|
|
e13ecb1f3e | ||
|
|
9995d6a505 | ||
|
|
0eb5b36b30 | ||
|
|
66cecf98fd | ||
|
|
c1db9379fd | ||
|
|
8deb03170b | ||
|
|
7e91607eac | ||
|
|
a921bef17a | ||
|
|
dbba507bdc | ||
|
|
3f5a5107d4 | ||
|
|
103c6ca268 | ||
|
|
d9aff720e4 | ||
|
|
627d1c8a0b | ||
|
|
820e213746 | ||
|
|
7c332a9491 | ||
|
|
9bb2c0d5ab | ||
|
|
c63d6ff002 | ||
|
|
1a2a2d643b | ||
|
|
c7a340b5bb | ||
|
|
897dbaf442 | ||
|
|
066a729325 | ||
|
|
862c06d2e0 | ||
|
|
d85af30d97 | ||
|
|
bb0608ce1b | ||
|
|
0669b799de | ||
|
|
393ff106ef | ||
|
|
762736d431 | ||
|
|
fd5ab07a3b | ||
|
|
f03f261059 | ||
|
|
89b4f44024 | ||
|
|
409696f798 | ||
|
|
aa2e9d959f | ||
|
|
433a0fbf88 | ||
|
|
9a855f4722 | ||
|
|
acc3a3b8af | ||
|
|
46af29a542 | ||
|
|
812d4e92a0 | ||
|
|
d2d76cab19 | ||
|
|
91f56fab5d | ||
|
|
9f3dc356c8 | ||
|
|
357573db60 | ||
|
|
e7fba25846 | ||
|
|
4ac81242dc | ||
|
|
f0126f8345 | ||
|
|
588f3f35d5 | ||
|
|
66cd31f8a7 | ||
|
|
a89ea366d3 | ||
|
|
d51e86dfe0 | ||
|
|
cf9afd12d8 | ||
|
|
82f5b6c651 | ||
|
|
6cb2aa0a3a | ||
|
|
e1b474bdc4 | ||
|
|
51e329123a | ||
|
|
c6c3d03c1e | ||
|
|
c5fc4e2b2a | ||
|
|
235a3e39d3 | ||
|
|
886af71905 | ||
|
|
dadc2c0756 | ||
|
|
44a82740b3 | ||
|
|
724952835e | ||
|
|
e162f0e444 | ||
|
|
c6d9cb4aac | ||
|
|
9c9d98bcf4 | ||
|
|
de0638b993 | ||
|
|
ed2dbe4938 | ||
|
|
e9d6282c40 | ||
|
|
e678669ea6 | ||
|
|
f1e04ce5bc | ||
|
|
273f420cd3 | ||
|
|
a7cc192fce | ||
|
|
0d90a329f7 | ||
|
|
6bbdd76ebd | ||
|
|
ae89645fb7 | ||
|
|
b5ab988397 | ||
|
|
101650a0ba | ||
|
|
d743c33933 | ||
|
|
24c5a08d33 | ||
|
|
f5c0cf35a7 | ||
|
|
231f44c066 | ||
|
|
88e069c8d4 | ||
|
|
e48a40a081 | ||
|
|
6a0472641b | ||
|
|
7f0fb2ff04 | ||
|
|
5a4e30495d | ||
|
|
6daf862997 | ||
|
|
bee5513f0b | ||
|
|
6056a3e420 | ||
|
|
f954edcb86 | ||
|
|
4fa379e0f4 | ||
|
|
7d3c1895b5 | ||
|
|
14774cdb70 | ||
|
|
03b3d05ef2 | ||
|
|
1087ec76a1 | ||
|
|
eeb7b3b7a6 | ||
|
|
b6556fe4da | ||
|
|
6027e5eb03 | ||
|
|
6c2367f23d | ||
|
|
163ae40ba6 | ||
|
|
05f1293cca | ||
|
|
12862f3010 | ||
|
|
c8f32b522e | ||
|
|
b08730bf4d | ||
|
|
2f8236cb8e | ||
|
|
cf1a58b2b1 | ||
|
|
718dfeed37 | ||
|
|
d04b7f22b0 | ||
|
|
6bce6ed4e7 | ||
|
|
ca2514e284 | ||
|
|
204983dee5 | ||
|
|
a3ca14eb97 | ||
|
|
ca13f81147 | ||
|
|
5185951b21 | ||
|
|
c9998f02c2 | ||
|
|
450bf4a12c | ||
|
|
48b0194de8 | ||
|
|
c255f95a6e | ||
|
|
efbe3556e7 | ||
|
|
56fd2ff5e3 | ||
|
|
9c160bc04a | ||
|
|
32b0f7785a | ||
|
|
98f21440e3 | ||
|
|
107a185a8c | ||
|
|
d278946373 | ||
|
|
7f58263378 | ||
|
|
8870c472be | ||
|
|
46626980dc | ||
|
|
c2e1564533 | ||
|
|
373082c880 | ||
|
|
ebcd5936f5 | ||
|
|
33caa93fd6 | ||
|
|
b6b23c8c40 | ||
|
|
6d272cc63e | ||
|
|
ccffc0fabc | ||
|
|
03e074ad48 | ||
|
|
c9a95dfaa6 | ||
|
|
08f22b68d0 | ||
|
|
6bb03104dc | ||
|
|
7d4709efe7 | ||
|
|
662a4f3ae0 | ||
|
|
0e4e23879a | ||
|
|
97b2ee83ed | ||
|
|
c491dadfd8 | ||
|
|
d364827ba7 | ||
|
|
4acc87eb36 | ||
|
|
7540c7dfc5 | ||
|
|
84913a658f | ||
|
|
117e2ef27c | ||
|
|
9d92b4b848 | ||
|
|
1f7e2e9d45 | ||
|
|
738ecf1576 | ||
|
|
9cf86c862e | ||
|
|
e25f1ae6f5 | ||
|
|
dcf1e4c16d | ||
|
|
52b15814fe | ||
|
|
04d1c25f0b | ||
|
|
9c2ac21206 | ||
|
|
9569be1412 | ||
|
|
7f95723f43 | ||
|
|
762c42fccb | ||
|
|
df07b3216b | ||
|
|
56b50c8ab5 | ||
|
|
08aca1e21e | ||
|
|
7539a385b6 | ||
|
|
a945731e6a | ||
|
|
60f69b79a2 | ||
|
|
1df259e816 | ||
|
|
a01089680e | ||
|
|
b2b8de1a3d | ||
|
|
e4dafda7a8 | ||
|
|
87ab1b12da | ||
|
|
760b2ae895 | ||
|
|
10b334b5a4 | ||
|
|
bf36a125db | ||
|
|
2dbe792741 | ||
|
|
b8115a8ef1 | ||
|
|
b165ead455 | ||
|
|
618ac7399a | ||
|
|
0752bb3fbf | ||
|
|
664828e8c3 | ||
|
|
f836095a54 | ||
|
|
1568fe1331 | ||
|
|
0d6a364fe5 | ||
|
|
91364a2460 | ||
|
|
ea2327066a | ||
|
|
b2305bf9aa | ||
|
|
f9dc6e3afb | ||
|
|
b2cc63f6a8 | ||
|
|
ffb4e2ecbd | ||
|
|
c8168fa0e1 | ||
|
|
8427b2ab95 | ||
|
|
1025196829 | ||
|
|
6f93abf529 | ||
|
|
402b9d1d20 | ||
|
|
e3436d75a1 | ||
|
|
1476e102a5 | ||
|
|
8440a41b4b | ||
|
|
59f50898ff | ||
|
|
13793ff148 | ||
|
|
692a816aeb | ||
|
|
1c84f10dc0 | ||
|
|
d553e47fc2 | ||
|
|
81f13c41ff | ||
|
|
a5f53b9493 | ||
|
|
ca04352ba6 | ||
|
|
6b9b902137 | ||
|
|
8790c674ae | ||
|
|
b44cac88b4 | ||
|
|
0a47a27d7a | ||
|
|
b179e92bb5 | ||
|
|
f976ab01e9 | ||
|
|
ae7e7431b8 | ||
|
|
9f3ed98f2b | ||
|
|
d34d0c10fe | ||
|
|
bcba8d6467 | ||
|
|
d4aea0f621 | ||
|
|
e91ff22a3b | ||
|
|
bc4e5fe48a | ||
|
|
b7b8a54c41 | ||
|
|
1e6688415b | ||
|
|
aa8fe75998 | ||
|
|
a4bc5b92e9 | ||
|
|
1ac3451102 | ||
|
|
6b490d019e | ||
|
|
7a9099c173 | ||
|
|
5f319aa71c | ||
|
|
29460a2d9a | ||
|
|
f6357fede7 | ||
|
|
61cc77bbcb | ||
|
|
12d9d89e4b | ||
|
|
10e54ae276 | ||
|
|
a9f8a15b98 | ||
|
|
bdcfba912a | ||
|
|
a1d586e1bf | ||
|
|
e1d9544c05 | ||
|
|
e22c30a679 | ||
|
|
228c80bbd5 | ||
|
|
08552c750d | ||
|
|
6f82a769e6 | ||
|
|
c7dac6cfd1 | ||
|
|
d0c27ae6e3 | ||
|
|
870923aea0 | ||
|
|
555bc93da1 | ||
|
|
5221dc268b | ||
|
|
2ddac4d119 | ||
|
|
97c8302376 | ||
|
|
ca87b15366 | ||
|
|
7d4eed342c | ||
|
|
52d256ec5e | ||
|
|
69e979987e | ||
|
|
b15bafe1c2 | ||
|
|
c6c5270f73 | ||
|
|
2c5b7d2415 | ||
|
|
5c70d0a838 | ||
|
|
60fa3a4d7d | ||
|
|
7843a7b4fd | ||
|
|
549683d090 | ||
|
|
af38c008c7 | ||
|
|
0f4e89f06e | ||
|
|
ed099dd70b | ||
|
|
bc336a1fd4 | ||
|
|
4146961a04 | ||
|
|
18aa06288d | ||
|
|
4cbabf8cae | ||
|
|
c32c562a6e | ||
|
|
84e2abd8f6 | ||
|
|
16b9d92146 | ||
|
|
2204623574 | ||
|
|
a49692e0a7 | ||
|
|
77abba5e68 | ||
|
|
3f51421330 | ||
|
|
d1414fd2da | ||
|
|
dfb7669283 | ||
|
|
95b043a382 | ||
|
|
c9fe192c44 | ||
|
|
a6b3f486e3 | ||
|
|
d6cf5365e6 | ||
|
|
ca85271ccb | ||
|
|
7789284ef8 | ||
|
|
6e216dc444 | ||
|
|
756cf88a2f | ||
|
|
3891f1952c | ||
|
|
d3b190f712 | ||
|
|
6bf74ffb23 | ||
|
|
cea8e70028 | ||
|
|
e6ef9b6c5e | ||
|
|
cdcd05330b | ||
|
|
567fbbd4af | ||
|
|
dc22bafac5 | ||
|
|
0a699a74bd | ||
|
|
741f00bf7c | ||
|
|
d648d14f79 | ||
|
|
12d0760816 | ||
|
|
538516016b | ||
|
|
14c3071efa | ||
|
|
0b60e50f12 | ||
|
|
1da955cbda | ||
|
|
0edfc084cc | ||
|
|
f77f4e7432 | ||
|
|
d74107afcf | ||
|
|
708955146a | ||
|
|
e5c9ab8a81 | ||
|
|
9d3fcffebc | ||
|
|
afcbef5dc8 | ||
|
|
45395aeb2f | ||
|
|
0c9dead723 | ||
|
|
70dbe32b32 | ||
|
|
9f9473a4d5 | ||
|
|
515bfb216d | ||
|
|
ac103dcd22 | ||
|
|
17169cb3ac | ||
|
|
6aa68ed9e3 | ||
|
|
22030ed16e | ||
|
|
c850549e41 | ||
|
|
be07d87824 | ||
|
|
84c65b6bcd | ||
|
|
78b26487cc | ||
|
|
2f42d6427c | ||
|
|
44434dcf50 | ||
|
|
c46cb7d8b9 | ||
|
|
c5962e50fb | ||
|
|
978dbf51d2 | ||
|
|
11a4e1ec2f | ||
|
|
a07e21fa21 | ||
|
|
82929db3d3 | ||
|
|
855ab19f4a | ||
|
|
8d8201bc32 | ||
|
|
b283fe9781 | ||
|
|
b7ff0a9fd6 | ||
|
|
057168b95b | ||
|
|
67548b68f0 | ||
|
|
3bb580198e | ||
|
|
a7195b588e | ||
|
|
d36ed961c3 | ||
|
|
1246f23fc6 | ||
|
|
6e2eb08afc | ||
|
|
14b3318b34 | ||
|
|
cddd5ea8d4 | ||
|
|
77af1217be | ||
|
|
c22ae7f495 | ||
|
|
3b2a2dddae | ||
|
|
895451a6c5 | ||
|
|
751eaaa56b | ||
|
|
a4ff317901 | ||
|
|
8e808e09d6 | ||
|
|
a07a867748 | ||
|
|
ebd18a8757 | ||
|
|
2c4c659217 | ||
|
|
795762b10c | ||
|
|
ac0a288891 | ||
|
|
79e872226d | ||
|
|
accd1ed04b | ||
|
|
8126623dd5 | ||
|
|
86f0288442 | ||
|
|
3c797da2c8 | ||
|
|
1eed85af9e | ||
|
|
572310af62 | ||
|
|
43c6735dd5 | ||
|
|
ba3912ce53 | ||
|
|
b420cfd498 | ||
|
|
8a7dc03e8a | ||
|
|
e4228489b7 | ||
|
|
91d9da714e | ||
|
|
ef7a535362 | ||
|
|
62ef4461c3 | ||
|
|
cf03b5ef51 | ||
|
|
c290082868 | ||
|
|
23315c57dd | ||
|
|
7e76d54688 | ||
|
|
b0a9b4f258 | ||
|
|
5739a7fee5 | ||
|
|
8425cc2045 | ||
|
|
2e86a9e83c | ||
|
|
a25a320b9d | ||
|
|
122474ada3 |
203
.github/workflows/cloud-frontend-cf-pages-prod.yml
vendored
Normal file
|
|
@ -0,0 +1,203 @@
|
|||
name: Deploy to Cloudflare Pages prod (Cloud Frontend)
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
branch:
|
||||
description: 'Git branch to deploy (must start with "lts-", e.g., lts-3.6)'
|
||||
required: true
|
||||
|
||||
jobs:
|
||||
deploy:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: ✅ Check user authorization
|
||||
run: |
|
||||
allowed_user1=${{ secrets.ALLOWED_USER1_USERNAME }}
|
||||
allowed_user2=${{ secrets.ALLOWED_USER2_USERNAME }}
|
||||
allowed_user3=${{ secrets.ALLOWED_USER3_USERNAME }}
|
||||
|
||||
if [[ "${{ github.actor }}" != "$allowed_user1" && \
|
||||
"${{ github.actor }}" != "$allowed_user2" && \
|
||||
"${{ github.actor }}" != "$allowed_user3" ]]; then
|
||||
echo "❌ User '${{ github.actor }}' is not authorized to trigger this workflow."
|
||||
exit 1
|
||||
else
|
||||
echo "✅ User '${{ github.actor }}' is authorized."
|
||||
fi
|
||||
|
||||
- name: 📥 Manual Git checkout with submodules
|
||||
run: |
|
||||
set -e
|
||||
|
||||
BRANCH="${{ github.event.inputs.branch }}"
|
||||
REPO="https://x-access-token:${{ secrets.CUSTOM_GITHUB_TOKEN }}@github.com/${{ github.repository }}"
|
||||
|
||||
git config --global url."https://x-access-token:${{ secrets.CUSTOM_GITHUB_TOKEN }}@github.com/".insteadOf "https://github.com/"
|
||||
git config --global http.version HTTP/1.1
|
||||
git config --global http.postBuffer 524288000
|
||||
|
||||
echo "👉 Cloning $REPO (branch: $BRANCH)"
|
||||
git clone --recurse-submodules --depth=1 --branch "$BRANCH" "$REPO" repo
|
||||
cd repo
|
||||
|
||||
echo "🔎 Main repo: verifying checkout"
|
||||
MAIN_CURRENT=$(git rev-parse --abbrev-ref HEAD)
|
||||
echo "✅ Main repo: successfully checked out branch $MAIN_CURRENT"
|
||||
echo "📍 Main repo: current commit $(git rev-parse --short HEAD): $(git log -1 --pretty=%s)"
|
||||
|
||||
echo "🔁 Updating submodules"
|
||||
git submodule update --init --recursive
|
||||
|
||||
echo "🔀 Attempting to checkout '$BRANCH' in each submodule and validating"
|
||||
|
||||
BRANCH="$BRANCH" git submodule foreach --recursive bash -c '
|
||||
name="$sm_path"
|
||||
echo ""
|
||||
echo "Entering '\''$name'\''"
|
||||
echo "↪ $name: trying to checkout branch '\''$BRANCH'\''"
|
||||
|
||||
if git ls-remote --exit-code --heads origin "$BRANCH" >/dev/null; then
|
||||
git fetch origin "$BRANCH:$BRANCH" || {
|
||||
echo "❌ $name: fetch failed for $BRANCH"
|
||||
exit 1
|
||||
}
|
||||
|
||||
PREV=$(git rev-parse --short HEAD || echo "unknown")
|
||||
git checkout "$BRANCH" || {
|
||||
echo "❌ $name: checkout failed for $BRANCH"
|
||||
exit 1
|
||||
}
|
||||
|
||||
echo "Previous HEAD position was $PREV: $(git log -1 --pretty=%s || echo 'unknown')"
|
||||
echo "✅ $name: checked out branch $BRANCH"
|
||||
else
|
||||
echo "⚠️ $name: branch '$BRANCH' not found on origin. Falling back to 'lts-3.16'"
|
||||
PREV=$(git rev-parse --short HEAD || echo "unknown")
|
||||
git fetch origin lts-3.16:lts-3.16 || {
|
||||
echo "❌ $name: fetch failed for lts-3.16"
|
||||
exit 1
|
||||
}
|
||||
git checkout lts-3.16 || {
|
||||
echo "❌ $name: fallback to lts-3.16 failed"
|
||||
exit 1
|
||||
}
|
||||
echo "Previous HEAD position was $PREV: $(git log -1 --pretty=%s || echo 'unknown')"
|
||||
echo "✅ $name: now on branch lts-3.16"
|
||||
fi
|
||||
|
||||
CURRENT=$(git rev-parse --abbrev-ref HEAD)
|
||||
echo "🔎 $name: current branch = $CURRENT"
|
||||
if [ "$CURRENT" != "$BRANCH" ] && [ "$CURRENT" != "lts-3.16" ]; then
|
||||
echo "❌ $name: unexpected branch state — wanted '$BRANCH' or fallback 'lts-3.16', got '$CURRENT'"
|
||||
exit 1
|
||||
fi
|
||||
'
|
||||
|
||||
- name: 🧰 Setup Node.js
|
||||
uses: actions/setup-node@v2
|
||||
with:
|
||||
node-version: 22.15.1
|
||||
|
||||
- name: 📦 Install dependencies
|
||||
run: npm install
|
||||
working-directory: repo
|
||||
|
||||
- name: 🛠️ Build the project
|
||||
run: npm run build:plugins:prod && npm run build:frontend:cloud
|
||||
working-directory: repo
|
||||
env:
|
||||
GOOGLE_MAPS_API_KEY: ${{ secrets.CLOUD_PROD_CLOUD_GOOGLE_MAPS_API_KEY }}
|
||||
NODE_ENV: ${{ secrets.CLOUD_PROD_CLOUD_NODE_ENV }}
|
||||
NODE_OPTIONS: ${{ secrets.CLOUD_PROD_CLOUD_NODE_OPTIONS }}
|
||||
SENTRY_AUTH_TOKEN: ${{ secrets.CLOUD_PROD_CLOUD_SENTRY_AUTH_TOKEN }}
|
||||
SENTRY_ORG: ${{ secrets.CLOUD_PROD_CLOUD_SENTRY_ORG }}
|
||||
SENTRY_PROJECT: ${{ secrets.CLOUD_PROD_CLOUD_SENTRY_PROJECT }}
|
||||
SERVE_CLIENT: ${{ secrets.CLOUD_PROD_CLOUD_SERVE_CLIENT }}
|
||||
SERVER_IP: ${{ secrets.CLOUD_PROD_CLOUD_SERVER_IP }}
|
||||
TJDB_SQL_MODE_DISABLE: ${{ secrets.CLOUD_PROD_CLOUD_TJDB_SQL_MODE_DISABLE }}
|
||||
TOOLJET_SERVER_URL: ${{ secrets.CLOUD_PROD_CLOUD_TOOLJET_SERVER_URL }}
|
||||
WEBSITE_SIGNUP_URL: https://www.tooljet.com/create-account
|
||||
TOOLJET_EDITION: cloud
|
||||
|
||||
- name: 📝 Add SPA routing redirect rule
|
||||
run: echo "/* /index.html 200" > repo/frontend/build/_redirects
|
||||
|
||||
- name: 🔧 Set CF Pages production branch to input branch
|
||||
run: |
|
||||
echo "🔄 Updating CF Pages production branch to: ${{ github.event.inputs.branch }}"
|
||||
response=$(curl -s -w "\n%{http_code}" -X PATCH \
|
||||
"https://api.cloudflare.com/client/v4/accounts/${{ secrets.CF_PAGES_ACCOUNT_ID }}/pages/projects/${{ secrets.CF_PAGES_PROJECT_NAME_PROD }}" \
|
||||
-H "Authorization: Bearer ${{ secrets.CF_PAGES_API_TOKEN }}" \
|
||||
-H "Content-Type: application/json" \
|
||||
--data '{"production_branch": "${{ github.event.inputs.branch }}"}')
|
||||
|
||||
http_code=$(echo "$response" | tail -n1)
|
||||
if [ "$http_code" = "200" ]; then
|
||||
echo "✅ Production branch updated to: ${{ github.event.inputs.branch }}"
|
||||
else
|
||||
echo "❌ Failed to update production branch (HTTP $http_code)"
|
||||
echo "$response"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
- name: 🚀 Deploy to Cloudflare Pages
|
||||
run: |
|
||||
echo "📦 Built from source branch: ${{ github.event.inputs.branch }}"
|
||||
echo "🎯 Targeting CF Pages production slot (branch alias: ${{ github.event.inputs.branch }})"
|
||||
npx wrangler pages deploy frontend/build \
|
||||
--project-name=${{ secrets.CF_PAGES_PROJECT_NAME_PROD }} \
|
||||
--branch=${{ github.event.inputs.branch }} \
|
||||
--commit-dirty=true
|
||||
working-directory: repo
|
||||
env:
|
||||
CLOUDFLARE_API_TOKEN: ${{ secrets.CF_PAGES_API_TOKEN }}
|
||||
CLOUDFLARE_ACCOUNT_ID: ${{ secrets.CF_PAGES_ACCOUNT_ID }}
|
||||
|
||||
purge_cache:
|
||||
needs: deploy
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: ✅ Check user authorization
|
||||
run: |
|
||||
allowed_user1=${{ secrets.ALLOWED_USER1_USERNAME }}
|
||||
allowed_user2=${{ secrets.ALLOWED_USER2_USERNAME }}
|
||||
allowed_user3=${{ secrets.ALLOWED_USER3_USERNAME }}
|
||||
|
||||
if [[ "${{ github.actor }}" != "$allowed_user1" && \
|
||||
"${{ github.actor }}" != "$allowed_user2" && \
|
||||
"${{ github.actor }}" != "$allowed_user3" ]]; then
|
||||
echo "❌ User '${{ github.actor }}' is not authorized to trigger this workflow."
|
||||
exit 1
|
||||
else
|
||||
echo "✅ User '${{ github.actor }}' is authorized."
|
||||
fi
|
||||
|
||||
- name: 🧹 Purge Cloudflare Cache
|
||||
continue-on-error: true
|
||||
run: |
|
||||
echo "🔄 Purging Cloudflare cache for specific URLs..."
|
||||
response=$(curl -s -w "\n%{http_code}" -X POST \
|
||||
"https://api.cloudflare.com/client/v4/zones/${{ secrets.CLOUDFLARE_ZONE_ID_PROD }}/purge_cache" \
|
||||
-H "Authorization: Bearer ${{ secrets.CLOUDFLARE_API_TOKEN_PROD }}" \
|
||||
-H "Content-Type: application/json" \
|
||||
--data '{
|
||||
"files": [
|
||||
"${{ secrets.CLOUDFLARE_CONFIG_URL_PROD }}",
|
||||
"${{ secrets.CLOUDFLARE_METADATA_URL_PROD }}"
|
||||
]
|
||||
}')
|
||||
|
||||
http_code=$(echo "$response" | tail -n1)
|
||||
body=$(echo "$response" | sed '$d')
|
||||
|
||||
if [ "$http_code" = "200" ]; then
|
||||
echo "✅ Cloudflare cache purged successfully for specified URLs"
|
||||
echo "$body"
|
||||
else
|
||||
echo "⚠️ Cloudflare cache purge failed with status code: $http_code"
|
||||
echo "$body"
|
||||
exit 1
|
||||
fi
|
||||
203
.github/workflows/cloud-frontend-cf-pages-stage.yml
vendored
Normal file
|
|
@ -0,0 +1,203 @@
|
|||
name: Deploy to Cloudflare Pages stage (Cloud Frontend)
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
branch:
|
||||
description: 'Git branch to deploy (must start with "lts-", e.g., lts-3.6)'
|
||||
required: true
|
||||
|
||||
jobs:
|
||||
deploy:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: ✅ Check user authorization
|
||||
run: |
|
||||
allowed_user1=${{ secrets.ALLOWED_USER1_USERNAME }}
|
||||
allowed_user2=${{ secrets.ALLOWED_USER2_USERNAME }}
|
||||
allowed_user3=${{ secrets.ALLOWED_USER3_USERNAME }}
|
||||
|
||||
if [[ "${{ github.actor }}" != "$allowed_user1" && \
|
||||
"${{ github.actor }}" != "$allowed_user2" && \
|
||||
"${{ github.actor }}" != "$allowed_user3" ]]; then
|
||||
echo "❌ User '${{ github.actor }}' is not authorized to trigger this workflow."
|
||||
exit 1
|
||||
else
|
||||
echo "✅ User '${{ github.actor }}' is authorized."
|
||||
fi
|
||||
|
||||
- name: 📥 Manual Git checkout with submodules
|
||||
run: |
|
||||
set -e
|
||||
|
||||
BRANCH="${{ github.event.inputs.branch }}"
|
||||
REPO="https://x-access-token:${{ secrets.CUSTOM_GITHUB_TOKEN }}@github.com/${{ github.repository }}"
|
||||
|
||||
git config --global url."https://x-access-token:${{ secrets.CUSTOM_GITHUB_TOKEN }}@github.com/".insteadOf "https://github.com/"
|
||||
git config --global http.version HTTP/1.1
|
||||
git config --global http.postBuffer 524288000
|
||||
|
||||
echo "👉 Cloning $REPO (branch: $BRANCH)"
|
||||
git clone --recurse-submodules --depth=1 --branch "$BRANCH" "$REPO" repo
|
||||
cd repo
|
||||
|
||||
echo "🔎 Main repo: verifying checkout"
|
||||
MAIN_CURRENT=$(git rev-parse --abbrev-ref HEAD)
|
||||
echo "✅ Main repo: successfully checked out branch $MAIN_CURRENT"
|
||||
echo "📍 Main repo: current commit $(git rev-parse --short HEAD): $(git log -1 --pretty=%s)"
|
||||
|
||||
echo "🔁 Updating submodules"
|
||||
git submodule update --init --recursive
|
||||
|
||||
echo "🔀 Attempting to checkout '$BRANCH' in each submodule and validating"
|
||||
|
||||
BRANCH="$BRANCH" git submodule foreach --recursive bash -c '
|
||||
name="$sm_path"
|
||||
echo ""
|
||||
echo "Entering '\''$name'\''"
|
||||
echo "↪ $name: trying to checkout branch '\''$BRANCH'\''"
|
||||
|
||||
if git ls-remote --exit-code --heads origin "$BRANCH" >/dev/null; then
|
||||
git fetch origin "$BRANCH:$BRANCH" || {
|
||||
echo "❌ $name: fetch failed for $BRANCH"
|
||||
exit 1
|
||||
}
|
||||
|
||||
PREV=$(git rev-parse --short HEAD || echo "unknown")
|
||||
git checkout "$BRANCH" || {
|
||||
echo "❌ $name: checkout failed for $BRANCH"
|
||||
exit 1
|
||||
}
|
||||
|
||||
echo "Previous HEAD position was $PREV: $(git log -1 --pretty=%s || echo 'unknown')"
|
||||
echo "✅ $name: checked out branch $BRANCH"
|
||||
else
|
||||
echo "⚠️ $name: branch '$BRANCH' not found on origin. Falling back to 'lts-3.16'"
|
||||
PREV=$(git rev-parse --short HEAD || echo "unknown")
|
||||
git fetch origin lts-3.16:lts-3.16 || {
|
||||
echo "❌ $name: fetch failed for lts-3.16"
|
||||
exit 1
|
||||
}
|
||||
git checkout lts-3.16 || {
|
||||
echo "❌ $name: fallback to lts-3.16 failed"
|
||||
exit 1
|
||||
}
|
||||
echo "Previous HEAD position was $PREV: $(git log -1 --pretty=%s || echo 'unknown')"
|
||||
echo "✅ $name: now on branch lts-3.16"
|
||||
fi
|
||||
|
||||
CURRENT=$(git rev-parse --abbrev-ref HEAD)
|
||||
echo "🔎 $name: current branch = $CURRENT"
|
||||
if [ "$CURRENT" != "$BRANCH" ] && [ "$CURRENT" != "lts-3.16" ]; then
|
||||
echo "❌ $name: unexpected branch state — wanted '$BRANCH' or fallback 'lts-3.16', got '$CURRENT'"
|
||||
exit 1
|
||||
fi
|
||||
'
|
||||
|
||||
- name: 🧰 Setup Node.js
|
||||
uses: actions/setup-node@v2
|
||||
with:
|
||||
node-version: 22.15.1
|
||||
|
||||
- name: 📦 Install dependencies
|
||||
run: npm install
|
||||
working-directory: repo
|
||||
|
||||
- name: 🛠️ Build the project
|
||||
run: npm run build:plugins:prod && npm run build:frontend:cloud
|
||||
working-directory: repo
|
||||
env:
|
||||
GOOGLE_MAPS_API_KEY: ${{ secrets.CLOUD_GOOGLE_MAPS_API_KEY }}
|
||||
NODE_ENV: ${{ secrets.CLOUD_NODE_ENV }}
|
||||
NODE_OPTIONS: ${{ secrets.CLOUD_NODE_OPTIONS }}
|
||||
SENTRY_AUTH_TOKEN: ${{ secrets.CLOUD_SENTRY_AUTH_TOKEN }}
|
||||
SENTRY_ORG: ${{ secrets.CLOUD_SENTRY_ORG }}
|
||||
SENTRY_PROJECT: ${{ secrets.CLOUD_SENTRY_PROJECT }}
|
||||
SERVE_CLIENT: ${{ secrets.CLOUD_SERVE_CLIENT }}
|
||||
SERVER_IP: ${{ secrets.CLOUD_SERVER_IP }}
|
||||
TJDB_SQL_MODE_DISABLE: ${{ secrets.CLOUD_TJDB_SQL_MODE_DISABLE }}
|
||||
TOOLJET_SERVER_URL: ${{ secrets.CLOUD_TOOLJET_SERVER_URL }}
|
||||
TOOLJET_EDITION: cloud
|
||||
WEBSITE_SIGNUP_URL: https://website-stage.tooljet.ai/signup
|
||||
|
||||
- name: 📝 Add SPA routing redirect rule
|
||||
run: echo "/* /index.html 200" > repo/frontend/build/_redirects
|
||||
|
||||
- name: 🔧 Set CF Pages production branch to input branch
|
||||
run: |
|
||||
echo "🔄 Updating CF Pages production branch to: ${{ github.event.inputs.branch }}"
|
||||
response=$(curl -s -w "\n%{http_code}" -X PATCH \
|
||||
"https://api.cloudflare.com/client/v4/accounts/${{ secrets.CF_PAGES_ACCOUNT_ID }}/pages/projects/${{ secrets.CF_PAGES_PROJECT_NAME }}" \
|
||||
-H "Authorization: Bearer ${{ secrets.CF_PAGES_API_TOKEN }}" \
|
||||
-H "Content-Type: application/json" \
|
||||
--data '{"production_branch": "${{ github.event.inputs.branch }}"}')
|
||||
|
||||
http_code=$(echo "$response" | tail -n1)
|
||||
if [ "$http_code" = "200" ]; then
|
||||
echo "✅ Production branch updated to: ${{ github.event.inputs.branch }}"
|
||||
else
|
||||
echo "❌ Failed to update production branch (HTTP $http_code)"
|
||||
echo "$response"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
- name: 🚀 Deploy to Cloudflare Pages
|
||||
run: |
|
||||
echo "📦 Built from source branch: ${{ github.event.inputs.branch }}"
|
||||
echo "🎯 Targeting CF Pages production slot (branch alias: ${{ github.event.inputs.branch }})"
|
||||
npx wrangler pages deploy frontend/build \
|
||||
--project-name=${{ secrets.CF_PAGES_PROJECT_NAME }} \
|
||||
--branch=${{ github.event.inputs.branch }} \
|
||||
--commit-dirty=true
|
||||
working-directory: repo
|
||||
env:
|
||||
CLOUDFLARE_API_TOKEN: ${{ secrets.CF_PAGES_API_TOKEN }}
|
||||
CLOUDFLARE_ACCOUNT_ID: ${{ secrets.CF_PAGES_ACCOUNT_ID }}
|
||||
|
||||
purge_cache:
|
||||
needs: deploy
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: ✅ Check user authorization
|
||||
run: |
|
||||
allowed_user1=${{ secrets.ALLOWED_USER1_USERNAME }}
|
||||
allowed_user2=${{ secrets.ALLOWED_USER2_USERNAME }}
|
||||
allowed_user3=${{ secrets.ALLOWED_USER3_USERNAME }}
|
||||
|
||||
if [[ "${{ github.actor }}" != "$allowed_user1" && \
|
||||
"${{ github.actor }}" != "$allowed_user2" && \
|
||||
"${{ github.actor }}" != "$allowed_user3" ]]; then
|
||||
echo "❌ User '${{ github.actor }}' is not authorized to trigger this workflow."
|
||||
exit 1
|
||||
else
|
||||
echo "✅ User '${{ github.actor }}' is authorized."
|
||||
fi
|
||||
|
||||
- name: 🧹 Purge Cloudflare Cache
|
||||
continue-on-error: true
|
||||
run: |
|
||||
echo "🔄 Purging Cloudflare cache for specific URLs..."
|
||||
response=$(curl -s -w "\n%{http_code}" -X POST \
|
||||
"https://api.cloudflare.com/client/v4/zones/${{ secrets.CLOUDFLARE_ZONE_ID_PROD }}/purge_cache" \
|
||||
-H "Authorization: Bearer ${{ secrets.CLOUDFLARE_API_TOKEN_PROD }}" \
|
||||
-H "Content-Type: application/json" \
|
||||
--data '{
|
||||
"files": [
|
||||
"${{ secrets.CLOUDFLARE_CONFIG_URL_STAGE }}",
|
||||
"${{ secrets.CLOUDFLARE_METADATA_URL_STAGE }}"
|
||||
]
|
||||
}')
|
||||
|
||||
http_code=$(echo "$response" | tail -n1)
|
||||
body=$(echo "$response" | sed '$d')
|
||||
|
||||
if [ "$http_code" = "200" ]; then
|
||||
echo "✅ Cloudflare cache purged successfully for specified URLs"
|
||||
echo "$body"
|
||||
else
|
||||
echo "⚠️ Cloudflare cache purge failed with status code: $http_code"
|
||||
echo "$body"
|
||||
exit 1
|
||||
fi
|
||||
133
.github/workflows/cloud-frontend-gcp.yml
vendored
|
|
@ -1,133 +0,0 @@
|
|||
name: Deploy to cloud frontend stage
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
branch:
|
||||
description: 'Git branch to deploy (must start with "lts-", e.g., lts-3.6)'
|
||||
required: true
|
||||
|
||||
jobs:
|
||||
deploy:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: ✅ Check user authorization
|
||||
run: |
|
||||
allowed_user1=${{ secrets.ALLOWED_USER1_USERNAME }}
|
||||
allowed_user2=${{ secrets.ALLOWED_USER2_USERNAME }}
|
||||
allowed_user3=${{ secrets.ALLOWED_USER3_USERNAME }}
|
||||
|
||||
if [[ "${{ github.actor }}" != "$allowed_user1" && \
|
||||
"${{ github.actor }}" != "$allowed_user2" && \
|
||||
"${{ github.actor }}" != "$allowed_user3" ]]; then
|
||||
echo "❌ User '${{ github.actor }}' is not authorized to trigger this workflow."
|
||||
exit 1
|
||||
else
|
||||
echo "✅ User '${{ github.actor }}' is authorized."
|
||||
fi
|
||||
|
||||
- name: 📥 Manual Git checkout with submodules
|
||||
run: |
|
||||
set -e
|
||||
|
||||
BRANCH="${{ github.event.inputs.branch }}"
|
||||
REPO="https://x-access-token:${{ secrets.CUSTOM_GITHUB_TOKEN }}@github.com/${{ github.repository }}"
|
||||
|
||||
git config --global url."https://x-access-token:${{ secrets.CUSTOM_GITHUB_TOKEN }}@github.com/".insteadOf "https://github.com/"
|
||||
git config --global http.version HTTP/1.1
|
||||
git config --global http.postBuffer 524288000
|
||||
|
||||
echo "👉 Cloning $REPO (branch: $BRANCH)"
|
||||
git clone --recurse-submodules --depth=1 --branch "$BRANCH" "$REPO" repo
|
||||
cd repo
|
||||
|
||||
echo "🔁 Updating submodules"
|
||||
git submodule update --init --recursive
|
||||
|
||||
echo "🔀 Attempting to checkout '$BRANCH' in each submodule and validating"
|
||||
|
||||
BRANCH="$BRANCH" git submodule foreach --recursive bash -c '
|
||||
name="$sm_path"
|
||||
echo ""
|
||||
echo "Entering '\''$name'\''"
|
||||
echo "↪ $name: trying to checkout branch '\''$BRANCH'\''"
|
||||
|
||||
if git ls-remote --exit-code --heads origin "$BRANCH" >/dev/null; then
|
||||
git fetch origin "$BRANCH:$BRANCH" || {
|
||||
echo "❌ $name: fetch failed for $BRANCH"
|
||||
exit 1
|
||||
}
|
||||
|
||||
PREV=$(git rev-parse --short HEAD || echo "unknown")
|
||||
git checkout "$BRANCH" || {
|
||||
echo "❌ $name: checkout failed for $BRANCH"
|
||||
exit 1
|
||||
}
|
||||
|
||||
echo "Previous HEAD position was $PREV: $(git log -1 --pretty=%s || echo 'unknown')"
|
||||
echo "✅ $name: checked out branch $BRANCH"
|
||||
else
|
||||
echo "⚠️ $name: branch '$BRANCH' not found on origin. Falling back to 'main'"
|
||||
PREV=$(git rev-parse --short HEAD || echo "unknown")
|
||||
git checkout main && git pull origin main || {
|
||||
echo "❌ $name: fallback to main failed"
|
||||
exit 1
|
||||
}
|
||||
echo "Previous HEAD position was $PREV: $(git log -1 --pretty=%s || echo 'unknown')"
|
||||
echo "✅ $name: now on branch main"
|
||||
fi
|
||||
|
||||
CURRENT=$(git rev-parse --abbrev-ref HEAD)
|
||||
echo "🔎 $name: current branch = $CURRENT"
|
||||
if [ "$CURRENT" != "$BRANCH" ] && [ "$CURRENT" != "main" ]; then
|
||||
echo "❌ $name: unexpected branch state — wanted '$BRANCH' or fallback 'main', got '$CURRENT'"
|
||||
exit 1
|
||||
fi
|
||||
'
|
||||
|
||||
- name: 🧰 Setup Node.js
|
||||
uses: actions/setup-node@v2
|
||||
with:
|
||||
node-version: 22.15.1
|
||||
|
||||
- name: 📦 Install dependencies
|
||||
run: npm install
|
||||
working-directory: repo
|
||||
|
||||
- name: 🛠️ Build the project
|
||||
run: npm run build:plugins:prod && npm run build:frontend
|
||||
working-directory: repo
|
||||
env:
|
||||
GOOGLE_MAPS_API_KEY: ${{ secrets.CLOUD_GOOGLE_MAPS_API_KEY }}
|
||||
NODE_ENV: ${{ secrets.CLOUD_NODE_ENV }}
|
||||
NODE_OPTIONS: ${{ secrets.CLOUD_NODE_OPTIONS }}
|
||||
SENTRY_AUTH_TOKEN: ${{ secrets.CLOUD_SENTRY_AUTH_TOKEN }}
|
||||
SENTRY_ORG: ${{ secrets.CLOUD_SENTRY_ORG }}
|
||||
SENTRY_PROJECT: ${{ secrets.CLOUD_SENTRY_PROJECT }}
|
||||
SERVE_CLIENT: ${{ secrets.CLOUD_SERVE_CLIENT }}
|
||||
SERVER_IP: ${{ secrets.CLOUD_SERVER_IP }}
|
||||
TJDB_SQL_MODE_DISABLE: ${{ secrets.CLOUD_TJDB_SQL_MODE_DISABLE }}
|
||||
TOOLJET_SERVER_URL: ${{ secrets.CLOUD_TOOLJET_SERVER_URL }}
|
||||
TOOLJET_EDITION: cloud
|
||||
WEBSITE_SIGNUP_URL: https://website-stage.tooljet.ai/ai-create-account
|
||||
|
||||
- name: 🚀 Deploy to Netlify
|
||||
run: |
|
||||
npm install -g netlify-cli
|
||||
netlify deploy --prod --dir=frontend/build --auth=$NETLIFY_AUTH_TOKEN --site=${{ secrets.CLOUD_NETLIFY_SITE_ID }}
|
||||
working-directory: repo
|
||||
env:
|
||||
NETLIFY_AUTH_TOKEN: ${{ secrets.NETLIFY_AUTH_TOKEN }}
|
||||
GOOGLE_MAPS_API_KEY: ${{ secrets.CLOUD_GOOGLE_MAPS_API_KEY }}
|
||||
NODE_ENV: ${{ secrets.CLOUD_NODE_ENV }}
|
||||
NODE_OPTIONS: ${{ secrets.CLOUD_NODE_OPTIONS }}
|
||||
SENTRY_AUTH_TOKEN: ${{ secrets.CLOUD_SENTRY_AUTH_TOKEN }}
|
||||
SENTRY_ORG: ${{ secrets.CLOUD_SENTRY_ORG }}
|
||||
SENTRY_PROJECT: ${{ secrets.CLOUD_SENTRY_PROJECT }}
|
||||
SERVE_CLIENT: ${{ secrets.CLOUD_SERVE_CLIENT }}
|
||||
SERVER_IP: ${{ secrets.CLOUD_SERVER_IP }}
|
||||
TJDB_SQL_MODE_DISABLE: ${{ secrets.CLOUD_TJDB_SQL_MODE_DISABLE }}
|
||||
TOOLJET_SERVER_URL: ${{ secrets.CLOUD_TOOLJET_SERVER_URL }}
|
||||
WEBSITE_SIGNUP_URL: https://website-stage.tooljet.ai/ai-create-account
|
||||
TOOLJET_EDITION: cloud
|
||||
72
.github/workflows/cloud-frontend.yml
vendored
|
|
@ -42,6 +42,11 @@ jobs:
|
|||
git clone --recurse-submodules --depth=1 --branch "$BRANCH" "$REPO" repo
|
||||
cd repo
|
||||
|
||||
echo "🔎 Main repo: verifying checkout"
|
||||
MAIN_CURRENT=$(git rev-parse --abbrev-ref HEAD)
|
||||
echo "✅ Main repo: successfully checked out branch $MAIN_CURRENT"
|
||||
echo "📍 Main repo: current commit $(git rev-parse --short HEAD): $(git log -1 --pretty=%s)"
|
||||
|
||||
echo "🔁 Updating submodules"
|
||||
git submodule update --init --recursive
|
||||
|
||||
|
|
@ -68,20 +73,24 @@ jobs:
|
|||
echo "Previous HEAD position was $PREV: $(git log -1 --pretty=%s || echo 'unknown')"
|
||||
echo "✅ $name: checked out branch $BRANCH"
|
||||
else
|
||||
echo "⚠️ $name: branch '$BRANCH' not found on origin. Falling back to 'main'"
|
||||
echo "⚠️ $name: branch '$BRANCH' not found on origin. Falling back to 'lts-3.16'"
|
||||
PREV=$(git rev-parse --short HEAD || echo "unknown")
|
||||
git checkout main && git pull origin main || {
|
||||
echo "❌ $name: fallback to main failed"
|
||||
git fetch origin lts-3.16:lts-3.16 || {
|
||||
echo "❌ $name: fetch failed for lts-3.16"
|
||||
exit 1
|
||||
}
|
||||
git checkout lts-3.16 || {
|
||||
echo "❌ $name: fallback to lts-3.16 failed"
|
||||
exit 1
|
||||
}
|
||||
echo "Previous HEAD position was $PREV: $(git log -1 --pretty=%s || echo 'unknown')"
|
||||
echo "✅ $name: now on branch main"
|
||||
echo "✅ $name: now on branch lts-3.16"
|
||||
fi
|
||||
|
||||
CURRENT=$(git rev-parse --abbrev-ref HEAD)
|
||||
echo "🔎 $name: current branch = $CURRENT"
|
||||
if [ "$CURRENT" != "$BRANCH" ] && [ "$CURRENT" != "main" ]; then
|
||||
echo "❌ $name: unexpected branch state — wanted '$BRANCH' or fallback 'main', got '$CURRENT'"
|
||||
if [ "$CURRENT" != "$BRANCH" ] && [ "$CURRENT" != "lts-3.16" ]; then
|
||||
echo "❌ $name: unexpected branch state — wanted '$BRANCH' or fallback 'lts-3.16', got '$CURRENT'"
|
||||
exit 1
|
||||
fi
|
||||
'
|
||||
|
|
@ -109,7 +118,7 @@ jobs:
|
|||
SERVER_IP: ${{ secrets.CLOUD_PROD_CLOUD_SERVER_IP }}
|
||||
TJDB_SQL_MODE_DISABLE: ${{ secrets.CLOUD_TJDB_SQL_MODE_DISABLE }}
|
||||
TOOLJET_SERVER_URL: ${{ secrets.CLOUD_TOOLJET_SERVER_URL }}
|
||||
WEBSITE_SIGNUP_URL: https://tooljet.ai/ai-create-account
|
||||
WEBSITE_SIGNUP_URL: https://www.tooljet.com/create-account
|
||||
TOOLJET_EDITION: cloud
|
||||
|
||||
- name: 🚀 Deploy to Netlify
|
||||
|
|
@ -129,5 +138,52 @@ jobs:
|
|||
SERVER_IP: ${{ secrets.CLOUD_PROD_CLOUD_SERVER_IP }}
|
||||
TJDB_SQL_MODE_DISABLE: ${{ secrets.CLOUD_PROD_TJDB_SQL_MODE_DISABLE }}
|
||||
TOOLJET_SERVER_URL: ${{ secrets.CLOUD_PROD_TOOLJET_SERVER_URL }}
|
||||
WEBSITE_SIGNUP_URL: https://tooljet.ai/ai-create-account
|
||||
WEBSITE_SIGNUP_URL: https://www.tooljet.com/create-account
|
||||
TOOLJET_EDITION: cloud
|
||||
|
||||
Purge_Cloudflare_Cache:
|
||||
needs: deploy
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: ✅ Check user authorization
|
||||
run: |
|
||||
allowed_user1=${{ secrets.ALLOWED_USER1_USERNAME }}
|
||||
allowed_user2=${{ secrets.ALLOWED_USER2_USERNAME }}
|
||||
allowed_user3=${{ secrets.ALLOWED_USER3_USERNAME }}
|
||||
|
||||
if [[ "${{ github.actor }}" != "$allowed_user1" && \
|
||||
"${{ github.actor }}" != "$allowed_user2" && \
|
||||
"${{ github.actor }}" != "$allowed_user3" ]]; then
|
||||
echo "❌ User '${{ github.actor }}' is not authorized to trigger this workflow."
|
||||
exit 1
|
||||
else
|
||||
echo "✅ User '${{ github.actor }}' is authorized."
|
||||
fi
|
||||
|
||||
- name: 🧹 Purge Cloudflare Cache
|
||||
continue-on-error: true
|
||||
run: |
|
||||
echo "🔄 Purging Cloudflare cache for specific URLs..."
|
||||
response=$(curl -s -w "\n%{http_code}" -X POST \
|
||||
"https://api.cloudflare.com/client/v4/zones/${{ secrets.CLOUDFLARE_ZONE_ID_PROD }}/purge_cache" \
|
||||
-H "Authorization: Bearer ${{ secrets.CLOUDFLARE_API_TOKEN_PROD }}" \
|
||||
-H "Content-Type: application/json" \
|
||||
--data '{
|
||||
"files": [
|
||||
"${{ secrets.CLOUDFLARE_CONFIG_URL_PROD }}",
|
||||
"${{ secrets.CLOUDFLARE_METADATA_URL_PROD }}"
|
||||
]
|
||||
}')
|
||||
|
||||
http_code=$(echo "$response" | tail -n1)
|
||||
body=$(echo "$response" | sed '$d')
|
||||
|
||||
if [ "$http_code" = "200" ]; then
|
||||
echo "✅ Cloudflare cache purged successfully for specified URLs"
|
||||
echo "$body"
|
||||
else
|
||||
echo "⚠️ Cloudflare cache purge failed with status code: $http_code"
|
||||
echo "$body"
|
||||
exit 1
|
||||
fi
|
||||
|
|
|
|||
53
.github/workflows/code-coverage.yml
vendored
|
|
@ -1,13 +1,13 @@
|
|||
name: Cypress Code-Coverage
|
||||
|
||||
on:
|
||||
pull_request_target:
|
||||
types: [labeled, unlabeled, closed]
|
||||
pull_request:
|
||||
types: [labeled, unlabeled, synchronize, closed]
|
||||
|
||||
workflow_dispatch:
|
||||
|
||||
env:
|
||||
PR_NUMBER: ${{ github.event.number }}
|
||||
PR_NUMBER: ${{ github.event.pull_request.number }}
|
||||
BRANCH_NAME: ${{ github.head_ref || github.ref_name }}
|
||||
|
||||
jobs:
|
||||
|
|
@ -15,26 +15,34 @@ jobs:
|
|||
name: Code coverage
|
||||
runs-on: ubuntu-22.04
|
||||
|
||||
if: ${{ github.event.action == 'labeled' && (github.event.label.name == 'check-coverage') }}
|
||||
if: >-
|
||||
(
|
||||
(github.event.action == 'labeled' && github.event.label.name == 'check-coverage')
|
||||
|| (github.event.action == 'synchronize' && contains(github.event.pull_request.labels.*.name, 'check-coverage'))
|
||||
)
|
||||
&& github.event.pull_request.head.repo.full_name == github.repository
|
||||
|
||||
steps:
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@v2
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: 18.18.2
|
||||
|
||||
- name: Set up Docker
|
||||
uses: docker-practice/actions-setup-docker@master
|
||||
|
||||
- name: Run PosgtreSQL Database Docker Container
|
||||
- name: Run PostgreSQL Database Docker Container
|
||||
run: |
|
||||
sudo docker network create tooljet
|
||||
sudo docker run -d --name postgres --network tooljet -p 5432:5432 -e POSTGRES_PASSWORD=postgres -e POSTGRES_USER=postgres -e POSTGRES_PORT=5432 -d postgres:13
|
||||
sudo docker run -d --name postgres --network tooljet \
|
||||
-p 5432:5432 \
|
||||
-e POSTGRES_PASSWORD=postgres \
|
||||
-e POSTGRES_USER=postgres \
|
||||
-e POSTGRES_PORT=5432 \
|
||||
postgres:13
|
||||
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
ref: ${{ github.event.pull_request.head.ref }}
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Install and build dependencies
|
||||
run: |
|
||||
|
|
@ -61,7 +69,7 @@ jobs:
|
|||
echo "TOOLJET_DB_PASS=postgres" >> .env
|
||||
echo "PGRST_JWT_SECRET=r9iMKoe5CRMgvJBBtp4HrqN7QiPpUToj" >> .env
|
||||
echo "PGRST_HOST=localhost:3001" >> .env
|
||||
echo "NODE_ENV=developement" >> .env
|
||||
echo "NODE_ENV=development" >> .env
|
||||
|
||||
- name: Set up database
|
||||
run: |
|
||||
|
|
@ -69,14 +77,16 @@ jobs:
|
|||
npm run --prefix server db:reset
|
||||
npm run --prefix server db:seed
|
||||
|
||||
- name: sleep 5
|
||||
- name: Wait for database seed to settle
|
||||
run: sleep 5
|
||||
|
||||
- name: Run PostgREST Docker Container
|
||||
run: |
|
||||
sudo docker run -d --name postgrest --network tooljet -p 3001:3000 \
|
||||
-e PGRST_DB_URI="postgres://postgres:postgres@postgres:5432/tooljet" -e PGRST_DB_ANON_ROLE="postgres" -e PGRST_JWT_SECRET="r9iMKoe5CRMgvJBBtp4HrqN7QiPpUToj" \
|
||||
postgrest/postgrest:v10.1.1.20221215
|
||||
-e PGRST_DB_URI="postgres://postgres:postgres@postgres:5432/tooljet" \
|
||||
-e PGRST_DB_ANON_ROLE="postgres" \
|
||||
-e PGRST_JWT_SECRET="r9iMKoe5CRMgvJBBtp4HrqN7QiPpUToj" \
|
||||
postgrest/postgrest:v10.1.1.20221215
|
||||
|
||||
- name: Run plugins compilation in watch mode
|
||||
run: cd plugins && npm start &
|
||||
|
|
@ -94,7 +104,7 @@ jobs:
|
|||
sleep 5
|
||||
done'
|
||||
|
||||
- name: docker logs
|
||||
- name: PostgREST logs
|
||||
run: sudo docker logs postgrest
|
||||
|
||||
- name: Create Cypress environment file
|
||||
|
|
@ -107,25 +117,22 @@ jobs:
|
|||
|
||||
- name: Install Cypress
|
||||
working-directory: ./cypress-tests
|
||||
run: |
|
||||
npm install
|
||||
|
||||
run: npm install
|
||||
|
||||
- name: Run Cypress tests
|
||||
working-directory: ./cypress-tests
|
||||
run: |
|
||||
npm run cy:run
|
||||
run: npm run cy:run
|
||||
|
||||
- name: Debugging
|
||||
if: always()
|
||||
run: |
|
||||
ls -R cypress-tests
|
||||
ls -R /home/runner/work/ToolJet/ToolJet/cypress-tests
|
||||
cat /home/runner/work/ToolJet/ToolJet/cypress-tests/.nyc_output/out.json
|
||||
|
||||
|
||||
- name: Upload Coverage Report
|
||||
uses: actions/upload-artifact@v3
|
||||
uses: actions/upload-artifact@v4
|
||||
if: always()
|
||||
with:
|
||||
name: coverage
|
||||
path: cypress-tests/coverage
|
||||
path: cypress-tests/coverage
|
||||
|
|
|
|||
374
.github/workflows/cypress-marketplace.yml
vendored
|
|
@ -6,51 +6,65 @@ on:
|
|||
|
||||
workflow_dispatch:
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
pull-requests: write
|
||||
issues: write
|
||||
|
||||
env:
|
||||
PR_NUMBER: ${{ github.event.number }}
|
||||
BRANCH_NAME: ${{ github.head_ref || github.ref_name }}
|
||||
TIMESTAMP: ${{ github.run_number }}-${{ github.run_attempt }}
|
||||
|
||||
jobs:
|
||||
Cypress-Marketplace:
|
||||
runs-on: ubuntu-22.04
|
||||
|
||||
if: contains(github.event.pull_request.labels.*.name, 'run-cypress') ||
|
||||
contains(github.event.pull_request.labels.*.name, 'run-cypress-marketplace-ce') ||
|
||||
contains(github.event.pull_request.labels.*.name, 'run-cypress-marketplace-ee') ||
|
||||
contains(github.event.pull_request.labels.*.name, 'run-cypress-ce')
|
||||
if: contains(github.event.pull_request.labels.*.name, 'run-cypress-marketplace-ee')
|
||||
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
edition: >-
|
||||
${{
|
||||
contains(github.event.pull_request.labels.*.name, 'run-cypress') && fromJson('["ce", "ee"]') ||
|
||||
contains(github.event.pull_request.labels.*.name, 'run-cypress-ce') && fromJson('["ce"]') ||
|
||||
contains(github.event.pull_request.labels.*.name, 'run-cypress-marketplace-ce') && fromJson('["ce"]') ||
|
||||
contains(github.event.pull_request.labels.*.name, 'run-cypress-marketplace-ee') && fromJson('["ee"]') ||
|
||||
fromJson('[]')
|
||||
}}
|
||||
edition:
|
||||
- ee
|
||||
|
||||
steps:
|
||||
- name: Debug labels and matrix edition
|
||||
run: |
|
||||
echo "Labels: ${{ toJSON(github.event.pull_request.labels.*.name) }}"
|
||||
echo "Matrix edition: ${{ matrix.edition }}"
|
||||
|
||||
- name: Free up disk space
|
||||
run: |
|
||||
echo "Available disk space before cleanup:"
|
||||
df -h
|
||||
|
||||
# Remove unnecessary packages
|
||||
sudo apt-get remove -y '^aspnetcore-.*' '^dotnet-.*' '^llvm-.*' '^php.*' '^mongodb-.*' '^mysql-.*' azure-cli google-cloud-sdk hhvm firefox powershell mono-devel || true
|
||||
sudo apt-get autoremove -y
|
||||
sudo apt-get clean
|
||||
|
||||
# Remove large directories
|
||||
sudo rm -rf /usr/share/dotnet
|
||||
sudo rm -rf /usr/local/lib/android
|
||||
sudo rm -rf /opt/ghc
|
||||
sudo rm -rf /usr/local/share/boost
|
||||
sudo rm -rf "$AGENT_TOOLSDIRECTORY"
|
||||
|
||||
# Clean Docker
|
||||
docker system prune -af --volumes
|
||||
|
||||
echo "Available disk space after cleanup:"
|
||||
df -h
|
||||
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
ref: ${{ github.event.pull_request.head.ref }}
|
||||
|
||||
# Create Docker Buildx builder with platform configuration
|
||||
- name: Set up Docker Buildx
|
||||
run: |
|
||||
mkdir -p ~/.docker/cli-plugins
|
||||
curl -SL https://github.com/docker/buildx/releases/download/v0.11.0/buildx-v0.11.0.linux-amd64 -o ~/.docker/cli-plugins/docker-buildx
|
||||
chmod a+x ~/.docker/cli-plugins/docker-buildx
|
||||
docker buildx create --name mybuilder --platform linux/arm64,linux/amd64
|
||||
docker buildx use mybuilder
|
||||
|
||||
- name: Set DOCKER_CLI_EXPERIMENTAL
|
||||
run: echo "DOCKER_CLI_EXPERIMENTAL=enabled" >> $GITHUB_ENV
|
||||
|
||||
- name: use mybuilder buildx
|
||||
run: docker buildx use mybuilder
|
||||
|
||||
- name: Docker Login
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
|
|
@ -60,34 +74,37 @@ jobs:
|
|||
- name: Set SAFE_BRANCH_NAME
|
||||
run: echo "SAFE_BRANCH_NAME=$(echo ${{ env.BRANCH_NAME }} | tr '/' '-')" >> $GITHUB_ENV
|
||||
|
||||
- name: Build CE Docker image
|
||||
uses: docker/build-push-action@v4
|
||||
with:
|
||||
context: .
|
||||
file: docker/ce-production.Dockerfile
|
||||
push: true
|
||||
tags: tooljet/tj-osv:${{ env.SAFE_BRANCH_NAME }}-ce
|
||||
platforms: linux/amd64
|
||||
env:
|
||||
DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }}
|
||||
DOCKER_PASSWORD: ${{ secrets.DOCKER_PASSWORD }}
|
||||
- name: Log selected matrix
|
||||
run: |
|
||||
echo "Selected edition: ${{ matrix.edition }}"
|
||||
echo "Matrix: ${{ toJSON(matrix) }}"
|
||||
|
||||
- name: Build EE Docker image
|
||||
if: matrix.edition == 'ee'
|
||||
uses: docker/build-push-action@v4
|
||||
with:
|
||||
context: .
|
||||
file: docker/ee/ee-production.Dockerfile
|
||||
push: true
|
||||
tags: tooljet/tj-osv:${{ env.SAFE_BRANCH_NAME }}-ee
|
||||
platforms: linux/amd64
|
||||
env:
|
||||
DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }}
|
||||
DOCKER_PASSWORD: ${{ secrets.DOCKER_PASSWORD }}
|
||||
run: |
|
||||
echo "Building EE Docker image..."
|
||||
docker buildx build \
|
||||
--platform=linux/amd64 \
|
||||
-f cypress-tests/cypress-lts.Dockerfile \
|
||||
--build-arg CUSTOM_GITHUB_TOKEN=${{ secrets.CUSTOM_GITHUB_TOKEN }} \
|
||||
--build-arg BRANCH_NAME=${{ github.event.pull_request.head.ref }} \
|
||||
-t tooljet/tj-osv:${{ env.SAFE_BRANCH_NAME }}-ee \
|
||||
--no-cache \
|
||||
--load \
|
||||
.
|
||||
|
||||
echo "Pushing EE Docker image..."
|
||||
docker push tooljet/tj-osv:${{ env.SAFE_BRANCH_NAME }}-ee
|
||||
|
||||
echo "Cleaning up build cache..."
|
||||
docker builder prune -af
|
||||
|
||||
echo "Disk space after build:"
|
||||
df -h
|
||||
|
||||
- name: Set up environment variables
|
||||
run: |
|
||||
echo "TOOLJET_EDITION=${{ matrix.edition == 'ee' && 'EE' || 'CE' }}" >> .env
|
||||
echo "TOOLJET_EDITION=${{ matrix.edition }}" >> .env
|
||||
echo "TOOLJET_HOST=http://localhost:3000" >> .env
|
||||
echo "LOCKBOX_MASTER_KEY=cd97331a419c09387bef49787f7da8d2a81d30733f0de6bed23ad8356d2068b2" >> .env
|
||||
echo "SECRET_KEY_BASE=7073b9a35a15dd20914ae17e36a693093f25b74b96517a5fec461fc901c51e011cd142c731bee48c5081ec8bac321c1f259ef097ef2a16f25df17a3798c03426" >> .env
|
||||
|
|
@ -97,37 +114,61 @@ jobs:
|
|||
echo "PG_PASS=postgres" >> .env
|
||||
echo "PG_PORT=5432" >> .env
|
||||
echo "ENABLE_TOOLJET_DB=true" >> .env
|
||||
echo "PGRST_DB_PRE_CONFIG=postgrest.pre_config" >> .env
|
||||
echo "TOOLJET_DB=tooljet_db" >> .env
|
||||
echo "TOOLJET_DB_USER=postgres" >> .env
|
||||
echo "TOOLJET_DB_HOST=postgres" >> .env
|
||||
echo "TOOLJET_DB_PASS=postgres" >> .env
|
||||
echo "TOOLJET_DB_STATEMENT_TIMEOUT=60000" >> .env
|
||||
echo "TOOLJET_DB_RECONFIG=true" >> .env
|
||||
echo "PGRST_JWT_SECRET=r9iMKoe5CRMgvJBBtp4HrqN7QiPpUToj" >> .env
|
||||
echo "PGRST_HOST=postgrest" >> .env
|
||||
echo "PGRST_HOST=localhost:3001" >> .env
|
||||
echo "PGRST_DB_PRE_CONFIG=postgrest.pre_config" >> .env
|
||||
echo "PGRST_DB_URI=postgres://postgres:postgres@postgres/tooljet_db" >> .env
|
||||
echo "SSO_GIT_OAUTH2_CLIENT_ID=dummy" >> .env
|
||||
echo "SSO_GIT_OAUTH2_CLIENT_SECRET=dummy" >> .env
|
||||
echo "SSO_GIT_OAUTH2_HOST=dummy" >> .env
|
||||
echo "SSO_GOOGLE_OAUTH2_CLIENT_ID=dummy" >> .env
|
||||
echo "PGRST_SERVER_PORT=3001" >> .env
|
||||
echo "ENABLE_MARKETPLACE_FEATURE=true" >> .env
|
||||
echo "ENABLE_MARKETPLACE_DEV_MODE=true" >> .env
|
||||
echo "ENABLE_PRIVATE_APP_EMBED=true" >> .env
|
||||
echo "SSO_GOOGLE_OAUTH2_CLIENT_ID=123456789.apps.googleusercontent.com" >> .env
|
||||
echo "SSO_GOOGLE_OAUTH2_CLIENT_SECRET=ABCGFDNF-FHSDVFY-bskfh6234" >> .env
|
||||
echo "SSO_GIT_OAUTH2_CLIENT_ID=1234567890" >> .env
|
||||
echo "SSO_GIT_OAUTH2_CLIENT_SECRET=3346shfvkdjjsfkvxce32854e026a4531ed" >> .env
|
||||
echo "SSO_OPENID_NAME=tj-oidc-simulator" >> .env
|
||||
echo "SSO_OPENID_CLIENT_ID=${{ secrets.SSO_OPENID_CLIENT_ID }}" >> .env
|
||||
echo "SSO_OPENID_CLIENT_SECRET=${{ secrets.SSO_OPENID_CLIENT_SECRET }}" >> .env
|
||||
echo "ENABLE_EXTERNAL_API=true" >> .env
|
||||
echo "EXTERNAL_API_ACCESS_TOKEN=d980eb3af24d783991cee51a2d84dce9f9bd41d4b46f441cc691ccebbecd3cbc" >> .env
|
||||
echo "TOOLJET_GLOBAL_CONSTANTS__development='{\"envConstant\":\"globalUI\",\"headerKey\":\"customHeader\",\"ui_url\":\"http://20.29.40.108:4000/development\",\"headerValue\":\"key=value\"}'" >> .env
|
||||
echo "TOOLJET_SECRET_CONSTANTS__development='{\"envSecret\":\"secret\",\"headerKey\":\"customHeader\",\"ui_url\":\"http://20.29.40.108:4000/development\",\"headerValue\":\"key=value\"}'" >> .env
|
||||
echo "TOOLJET_GLOBAL_CONSTANTS__staging='{\"envConstant\":\"globalUI\",\"headerKey\":\"customHeader\",\"ui_url\":\"http://20.29.40.108:4000/staging\",\"headerValue\":\"key=value\"}'" >> .env
|
||||
echo "TOOLJET_SECRET_CONSTANTS__staging='{\"envSecret\":\"secret\",\"headerKey\":\"customHeader\",\"ui_url\":\"http://20.29.40.108:4000/staging\",\"headerValue\":\"key=value\"}'" >> .env
|
||||
echo "TOOLJET_GLOBAL_CONSTANTS__production='{\"envConstant\":\"globalUI\",\"headerKey\":\"customHeader\",\"ui_url\":\"http://20.29.40.108:4000/production\",\"headerValue\":\"key=value\"}'" >> .env
|
||||
echo "TOOLJET_SECRET_CONSTANTS__production='{\"envSecret\":\"secret\",\"headerKey\":\"customHeader\",\"ui_url\":\"http://20.29.40.108:4000/production\",\"headerValue\":\"key=value\"}'" >> .env
|
||||
echo "SAML_SET_ENTITY_ID_REDIRECT_URL=true" >> .env
|
||||
|
||||
- name: clean up old docker containers
|
||||
run: |
|
||||
docker system prune -af --volumes
|
||||
echo "Disk space after Docker cleanup:"
|
||||
df -h
|
||||
|
||||
- name: Pulling the docker-compose file
|
||||
run: curl -LO https://tooljet-test.s3.us-west-1.amazonaws.com/docker-compose.yaml && mkdir postgres_data
|
||||
|
||||
- name: Update docker-compose file for CE
|
||||
- name: Update docker-compose file
|
||||
run: |
|
||||
# Update docker-compose.yaml with the new image
|
||||
sed -i '/^[[:space:]]*tooljet:/,/^$/ s|^\([[:space:]]*image:[[:space:]]*\).*|\1tooljet/tj-osv:${{ env.SAFE_BRANCH_NAME }}-ce|' docker-compose.yaml
|
||||
# Update docker-compose.yaml with the appropriate image based on edition
|
||||
if [ "${{ matrix.edition }}" = "ce" ]; then
|
||||
sed -i '/^[[:space:]]*tooljet:/,/^$/ s|^\([[:space:]]*image:[[:space:]]*\).*|\1tooljet/tj-osv:${{ env.SAFE_BRANCH_NAME }}-ce|' docker-compose.yaml
|
||||
elif [ "${{ matrix.edition }}" = "ee" ]; then
|
||||
sed -i '/^[[:space:]]*tooljet:/,/^$/ s|^\([[:space:]]*image:[[:space:]]*\).*|\1tooljet/tj-osv:${{ env.SAFE_BRANCH_NAME }}-ee|' docker-compose.yaml
|
||||
fi
|
||||
|
||||
- name: Update docker-compose file for CE
|
||||
if: matrix.edition == 'ee'
|
||||
run: |
|
||||
# Update docker-compose.yaml with the new image
|
||||
sed -i '/^[[:space:]]*tooljet:/,/^$/ s|^\([[:space:]]*image:[[:space:]]*\).*|\1tooljet/tj-osv:${{ env.SAFE_BRANCH_NAME }}-ee|' docker-compose.yaml
|
||||
- name: view docker-compose file
|
||||
run: cat docker-compose.yaml
|
||||
|
||||
- name: Install Docker Compose
|
||||
run: |
|
||||
curl -L "https://github.com/docker/compose/releases/download/v2.10.2/docker-compose-$(uname -s)-$(uname -m)" -o /usr/local/bin/docker-compose
|
||||
curl -L "https://github.com/docker/compose/releases/download/v2.27.0/docker-compose-$(uname -s)-$(uname -m)" -o /usr/local/bin/docker-compose
|
||||
chmod +x /usr/local/bin/docker-compose
|
||||
|
||||
- name: Run docker-compose file
|
||||
|
|
@ -136,31 +177,216 @@ jobs:
|
|||
- name: Checking containers
|
||||
run: docker ps -a
|
||||
|
||||
- name: Checking containers
|
||||
run: docker ps -a
|
||||
- name: sleep
|
||||
run: sleep 80
|
||||
|
||||
- name: docker logs
|
||||
run: sudo docker logs Tooljet-app
|
||||
run: docker-compose logs tooljet
|
||||
|
||||
- name: Wait for the server to be ready
|
||||
run: |
|
||||
timeout 1500 bash -c '
|
||||
until curl --silent --fail http://localhost:3000; do
|
||||
sleep 5
|
||||
done'
|
||||
echo "Waiting for ToolJet to start (timeout: 700 seconds)..."
|
||||
SUCCESS_FOUND=false
|
||||
TIMEOUT=700
|
||||
ELAPSED=0
|
||||
|
||||
while [ $ELAPSED -lt $TIMEOUT ]; do
|
||||
# Check for success message in logs
|
||||
if docker-compose logs tooljet 2>/dev/null | grep -qE "TOOLJET APPLICATION STARTED SUCCESSFULLY|Ready to use at http://localhost:82|Ready to use at http://localhost:80"; then
|
||||
echo "Found success message in logs!"
|
||||
SUCCESS_FOUND=true
|
||||
break
|
||||
fi
|
||||
|
||||
echo "Still waiting... (${ELAPSED}s elapsed)"
|
||||
sleep 10
|
||||
ELAPSED=$((ELAPSED + 10))
|
||||
done
|
||||
|
||||
if [ "$SUCCESS_FOUND" = false ]; then
|
||||
echo "Timeout reached without finding success logs"
|
||||
echo "Showing current logs for troubleshooting..."
|
||||
docker-compose logs --tail=100 tooljet
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "Server is ready!"
|
||||
|
||||
- name: Test database connection
|
||||
run: |
|
||||
# Wait for database to be ready
|
||||
echo "Testing database connection..."
|
||||
docker-compose exec -T postgres psql -U postgres -d tooljet_development -c "SELECT current_database();"
|
||||
|
||||
- name: Create delete_user procedure
|
||||
run: |
|
||||
echo "Creating delete_users stored procedure..."
|
||||
docker-compose exec -T postgres psql -U postgres -d tooljet_development -c "
|
||||
CREATE OR REPLACE PROCEDURE delete_users(p_emails TEXT[])
|
||||
LANGUAGE plpgsql
|
||||
AS \$\$
|
||||
DECLARE
|
||||
v_email TEXT;
|
||||
v_user_id UUID;
|
||||
v_organization_ids UUID[] := ARRAY[]::UUID[];
|
||||
v_organizations_to_delete UUID[] := ARRAY[]::UUID[];
|
||||
v_log_message TEXT;
|
||||
BEGIN
|
||||
IF COALESCE(array_length(p_emails, 1), 0) = 0 THEN
|
||||
RAISE NOTICE 'delete_users: no emails provided';
|
||||
RETURN;
|
||||
END IF;
|
||||
|
||||
FOREACH v_email IN ARRAY p_emails LOOP
|
||||
BEGIN
|
||||
RAISE NOTICE '========================================';
|
||||
RAISE NOTICE 'Starting user deletion for email: %', v_email;
|
||||
|
||||
-- Fetch user id
|
||||
SELECT id INTO v_user_id
|
||||
FROM users
|
||||
WHERE email = v_email;
|
||||
|
||||
IF v_user_id IS NULL THEN
|
||||
RAISE NOTICE 'User with email % not found. Skipping.', v_email;
|
||||
CONTINUE;
|
||||
END IF;
|
||||
|
||||
RAISE NOTICE 'User found with id: %', v_user_id;
|
||||
|
||||
-- Collect organization memberships
|
||||
SELECT COALESCE(ARRAY_AGG(organization_id), ARRAY[]::UUID[])
|
||||
INTO v_organization_ids
|
||||
FROM organization_users
|
||||
WHERE user_id = v_user_id;
|
||||
|
||||
RAISE NOTICE 'Found % organizations for user',
|
||||
COALESCE(array_length(v_organization_ids, 1), 0);
|
||||
|
||||
-- Find organizations with that single user
|
||||
IF array_length(v_organization_ids, 1) > 0 THEN
|
||||
SELECT COALESCE(ARRAY_AGG(organization_id), ARRAY[]::UUID[])
|
||||
INTO v_organizations_to_delete
|
||||
FROM (
|
||||
SELECT organization_id
|
||||
FROM organization_users
|
||||
WHERE organization_id = ANY(v_organization_ids)
|
||||
GROUP BY organization_id
|
||||
HAVING COUNT(*) = 1
|
||||
) subquery;
|
||||
ELSE
|
||||
v_organizations_to_delete := ARRAY[]::UUID[];
|
||||
END IF;
|
||||
|
||||
RAISE NOTICE 'Found % organizations to delete',
|
||||
COALESCE(array_length(v_organizations_to_delete, 1), 0);
|
||||
|
||||
-- Cascade delete records for orgs slated for removal
|
||||
IF array_length(v_organizations_to_delete, 1) > 0 THEN
|
||||
WITH deleted_apps AS (
|
||||
DELETE FROM apps
|
||||
WHERE organization_id = ANY(v_organizations_to_delete)
|
||||
RETURNING id
|
||||
)
|
||||
SELECT 'Deleted ' || COUNT(*) || ' apps'
|
||||
INTO v_log_message FROM deleted_apps;
|
||||
RAISE NOTICE '%', v_log_message;
|
||||
|
||||
WITH deleted_data_sources AS (
|
||||
DELETE FROM data_sources
|
||||
WHERE organization_id = ANY(v_organizations_to_delete)
|
||||
RETURNING id
|
||||
)
|
||||
SELECT 'Deleted ' || COUNT(*) || ' data sources'
|
||||
INTO v_log_message FROM deleted_data_sources;
|
||||
RAISE NOTICE '%', v_log_message;
|
||||
|
||||
WITH deleted_organizations AS (
|
||||
DELETE FROM organizations
|
||||
WHERE id = ANY(v_organizations_to_delete)
|
||||
RETURNING id
|
||||
)
|
||||
SELECT 'Deleted ' || COUNT(*) || ' organizations'
|
||||
INTO v_log_message FROM deleted_organizations;
|
||||
RAISE NOTICE '%', v_log_message;
|
||||
ELSE
|
||||
RAISE NOTICE 'No organizations removed for user %', v_email;
|
||||
END IF;
|
||||
|
||||
-- Delete audit logs for orgs (if any) and user
|
||||
WITH deleted_audit_logs AS (
|
||||
DELETE FROM audit_logs
|
||||
WHERE user_id = v_user_id
|
||||
OR organization_id = ANY(v_organizations_to_delete)
|
||||
RETURNING id
|
||||
)
|
||||
SELECT 'Deleted ' || COUNT(*) || ' audit logs'
|
||||
INTO v_log_message FROM deleted_audit_logs;
|
||||
RAISE NOTICE '%', v_log_message;
|
||||
|
||||
-- Delete organization membership records
|
||||
DELETE FROM organization_users
|
||||
WHERE user_id = v_user_id;
|
||||
|
||||
-- Delete the user
|
||||
DELETE FROM users
|
||||
WHERE id = v_user_id;
|
||||
|
||||
RAISE NOTICE 'Deleted user with id: %', v_user_id;
|
||||
RAISE NOTICE 'User deletion completed for email: %', v_email;
|
||||
EXCEPTION
|
||||
WHEN OTHERS THEN
|
||||
RAISE NOTICE 'Error deleting user %: %', v_email, SQLERRM;
|
||||
-- continue with next email
|
||||
END;
|
||||
END LOOP;
|
||||
|
||||
RAISE NOTICE '========================================';
|
||||
RAISE NOTICE 'delete_users procedure finished.';
|
||||
END;
|
||||
\$\$;"
|
||||
echo "delete_users procedure created successfully"
|
||||
|
||||
- name: Seeding (Setup Super Admin)
|
||||
run: |
|
||||
curl 'http://localhost:3000/api/onboarding/setup-super-admin' \
|
||||
curl --fail-with-body 'http://localhost:3000/api/onboarding/setup-super-admin' \
|
||||
-H 'Content-Type: application/json' \
|
||||
--data-raw '{
|
||||
"companyName": "ToolJet",
|
||||
"name": "The Developer",
|
||||
"workspaceName": "Tooljet'\''s workspace",
|
||||
"workspaceName": "My workspace",
|
||||
"email": "dev@tooljet.io",
|
||||
"password": "password"
|
||||
}'
|
||||
|
||||
- name: Seeding (Authenticate)
|
||||
run: |
|
||||
AUTH_RESPONSE=$(curl --fail-with-body \
|
||||
-c /tmp/tj_cookies.txt \
|
||||
'http://localhost:3000/api/authenticate' \
|
||||
-H 'Content-Type: application/json' \
|
||||
--data-raw '{
|
||||
"email": "dev@tooljet.io",
|
||||
"password": "password"
|
||||
}')
|
||||
echo "$AUTH_RESPONSE"
|
||||
# Extract org ID and export for the next step via GITHUB_ENV
|
||||
ORG_ID=$(echo "$AUTH_RESPONSE" | jq -r '.current_organization_id')
|
||||
echo "TJ_ORG_ID=$ORG_ID" >> $GITHUB_ENV
|
||||
|
||||
- name: Seeding (Complete Onboarding)
|
||||
run: |
|
||||
# Sets onboarding_status = onboarding_completed so the frontend does not
|
||||
# redirect every page visit to /setup, which would break all UI tests.
|
||||
# tj-workspace-id header is required by JwtStrategy to resolve the user.
|
||||
curl --fail-with-body \
|
||||
-b /tmp/tj_cookies.txt \
|
||||
-X POST \
|
||||
'http://localhost:3000/api/onboarding/finish' \
|
||||
-H 'Content-Type: application/json' \
|
||||
-H "tj-workspace-id: $TJ_ORG_ID" \
|
||||
--data-raw '{"region": "us"}'
|
||||
|
||||
- name: Create Cypress environment file
|
||||
id: create-json
|
||||
uses: jsdaniell/create-json@1.1.2
|
||||
|
|
@ -170,8 +396,9 @@ jobs:
|
|||
dir: "./cypress-tests"
|
||||
|
||||
- name: Marketplace
|
||||
uses: cypress-io/github-action@v5
|
||||
uses: cypress-io/github-action@v6
|
||||
with:
|
||||
browser: chrome
|
||||
working-directory: ./cypress-tests
|
||||
config: "baseUrl=http://localhost:3000"
|
||||
config-file: cypress-marketplace.config.js
|
||||
|
|
@ -180,7 +407,7 @@ jobs:
|
|||
uses: actions/upload-artifact@v4
|
||||
if: always()
|
||||
with:
|
||||
name: screenshots
|
||||
name: screenshots-${{ matrix.edition }}
|
||||
path: cypress-tests/cypress/screenshots
|
||||
|
||||
Cypress-Marketplace-Subpath:
|
||||
|
|
@ -238,6 +465,7 @@ jobs:
|
|||
echo "SUB_PATH=/apps/tooljet/" >> .env
|
||||
echo "NODE_ENV=production" >> .env
|
||||
echo "SERVE_CLIENT=true" >> .env
|
||||
echo "LICENSE_KEY=${{ secrets.RENDER_LICENSE_KEY }}" >> .env
|
||||
|
||||
- name: Pulling the docker-compose file
|
||||
run: curl -LO https://tooljet-test.s3.us-west-1.amazonaws.com/docker-compose.yaml && mkdir postgres_data
|
||||
|
|
@ -281,4 +509,4 @@ jobs:
|
|||
if: always()
|
||||
with:
|
||||
name: screenshots
|
||||
path: cypress-tests/cypress/screenshots
|
||||
path: cypress-tests/cypress/screenshots
|
||||
1543
.github/workflows/cypress-platform.yml
vendored
125
.github/workflows/deploy-to-stage.yml
vendored
|
|
@ -4,23 +4,23 @@ on:
|
|||
workflow_dispatch:
|
||||
inputs:
|
||||
branch_name:
|
||||
description: 'Git branch to build from'
|
||||
description: "Git branch to build from"
|
||||
required: true
|
||||
default: 'main'
|
||||
default: "lts-3.16"
|
||||
dockerfile_path:
|
||||
description: 'Path to Dockerfile'
|
||||
description: "Path to Dockerfile"
|
||||
required: true
|
||||
default: './docker/LTS/cloud/cloud-server.Dockerfile'
|
||||
default: "./docker/LTS/cloud/cloud-server.Dockerfile"
|
||||
type: choice
|
||||
options:
|
||||
- ./docker/LTS/cloud/cloud-server.Dockerfile
|
||||
docker_tag:
|
||||
description: 'Docker tag suffix (e.g., cloud-staging-v14)'
|
||||
description: "Docker tag suffix (e.g., cloud-staging-v14)"
|
||||
required: true
|
||||
|
||||
jobs:
|
||||
full-deploy:
|
||||
name: Build Image, Deploy to AKS & Netlify
|
||||
name: Build Image, Deploy to AKS & cloudflare
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
|
|
@ -39,6 +39,16 @@ jobs:
|
|||
echo "✅ User '${{ github.actor }}' is authorized."
|
||||
fi
|
||||
|
||||
- name: Free up disk space
|
||||
run: |
|
||||
sudo rm -rf /usr/share/dotnet
|
||||
sudo rm -rf /opt/ghc
|
||||
sudo rm -rf /usr/local/share/boost
|
||||
sudo rm -rf "$AGENT_TOOLSDIRECTORY"
|
||||
sudo docker system prune -af
|
||||
sudo apt-get clean
|
||||
df -h
|
||||
|
||||
- name: Checkout Repo
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
|
|
@ -82,7 +92,7 @@ jobs:
|
|||
BRANCH_NAME=${{ github.event.inputs.branch_name }}
|
||||
|
||||
- name: Show the full Docker tag
|
||||
run: |
|
||||
run: |
|
||||
echo "✅ Docker image tagged as: $IMAGE_TAG"
|
||||
|
||||
# Deploy to AKS
|
||||
|
|
@ -121,7 +131,7 @@ jobs:
|
|||
exit 1
|
||||
|
||||
deploy-frontend:
|
||||
name: Deploy Frontend to Netlify
|
||||
name: Deploy Frontend to cloudflare
|
||||
runs-on: ubuntu-latest
|
||||
needs: full-deploy
|
||||
|
||||
|
|
@ -144,6 +154,7 @@ jobs:
|
|||
- name: 📥 Manual Git checkout with submodules
|
||||
run: |
|
||||
set -e
|
||||
|
||||
BRANCH="${{ github.event.inputs.branch_name }}"
|
||||
REPO="https://x-access-token:${{ secrets.CUSTOM_GITHUB_TOKEN }}@github.com/${{ github.repository }}"
|
||||
|
||||
|
|
@ -155,6 +166,11 @@ jobs:
|
|||
git clone --recurse-submodules --depth=1 --branch "$BRANCH" "$REPO" repo
|
||||
cd repo
|
||||
|
||||
echo "🔎 Main repo: verifying checkout"
|
||||
MAIN_CURRENT=$(git rev-parse --abbrev-ref HEAD)
|
||||
echo "✅ Main repo: successfully checked out branch $MAIN_CURRENT"
|
||||
echo "📍 Main repo: current commit $(git rev-parse --short HEAD): $(git log -1 --pretty=%s)"
|
||||
|
||||
echo "🔁 Updating submodules"
|
||||
git submodule update --init --recursive
|
||||
|
||||
|
|
@ -162,13 +178,44 @@ jobs:
|
|||
|
||||
BRANCH="$BRANCH" git submodule foreach --recursive bash -c '
|
||||
name="$sm_path"
|
||||
echo "↪ $name: checking out branch $BRANCH"
|
||||
echo ""
|
||||
echo "Entering '\''$name'\''"
|
||||
echo "↪ $name: trying to checkout branch '\''$BRANCH'\''"
|
||||
|
||||
if git ls-remote --exit-code --heads origin "$BRANCH" >/dev/null; then
|
||||
git fetch origin "$BRANCH:$BRANCH"
|
||||
git checkout "$BRANCH"
|
||||
git fetch origin "$BRANCH:$BRANCH" || {
|
||||
echo "❌ $name: fetch failed for $BRANCH"
|
||||
exit 1
|
||||
}
|
||||
|
||||
PREV=$(git rev-parse --short HEAD || echo "unknown")
|
||||
git checkout "$BRANCH" || {
|
||||
echo "❌ $name: checkout failed for $BRANCH"
|
||||
exit 1
|
||||
}
|
||||
|
||||
echo "Previous HEAD position was $PREV: $(git log -1 --pretty=%s || echo 'unknown')"
|
||||
echo "✅ $name: checked out branch $BRANCH"
|
||||
else
|
||||
echo "⚠️ Branch not found, falling back to main"
|
||||
git checkout main && git pull origin main
|
||||
echo "⚠️ $name: branch '$BRANCH' not found on origin. Falling back to 'lts-3.16'"
|
||||
PREV=$(git rev-parse --short HEAD || echo "unknown")
|
||||
git fetch origin lts-3.16:lts-3.16 || {
|
||||
echo "❌ $name: fetch failed for lts-3.16"
|
||||
exit 1
|
||||
}
|
||||
git checkout lts-3.16 || {
|
||||
echo "❌ $name: fallback to lts-3.16 failed"
|
||||
exit 1
|
||||
}
|
||||
echo "Previous HEAD position was $PREV: $(git log -1 --pretty=%s || echo 'unknown')"
|
||||
echo "✅ $name: now on branch lts-3.16"
|
||||
fi
|
||||
|
||||
CURRENT=$(git rev-parse --abbrev-ref HEAD)
|
||||
echo "🔎 $name: current branch = $CURRENT"
|
||||
if [ "$CURRENT" != "$BRANCH" ] && [ "$CURRENT" != "lts-3.16" ]; then
|
||||
echo "❌ $name: unexpected branch state — wanted '$BRANCH' or fallback 'lts-3.16', got '$CURRENT'"
|
||||
exit 1
|
||||
fi
|
||||
'
|
||||
|
||||
|
|
@ -181,8 +228,8 @@ jobs:
|
|||
run: npm install
|
||||
working-directory: repo
|
||||
|
||||
- name: 🛠️ Build project
|
||||
run: npm run build:plugins:prod && npm run build:frontend
|
||||
- name: 🛠️ Build the project
|
||||
run: npm run build:plugins:prod && npm run build:frontend:cloud
|
||||
working-directory: repo
|
||||
env:
|
||||
GOOGLE_MAPS_API_KEY: ${{ secrets.CLOUD_GOOGLE_MAPS_API_KEY }}
|
||||
|
|
@ -196,24 +243,38 @@ jobs:
|
|||
TJDB_SQL_MODE_DISABLE: ${{ secrets.CLOUD_TJDB_SQL_MODE_DISABLE }}
|
||||
TOOLJET_SERVER_URL: ${{ secrets.CLOUD_TOOLJET_SERVER_URL }}
|
||||
TOOLJET_EDITION: cloud
|
||||
WEBSITE_SIGNUP_URL: https://website-stage.tooljet.ai/ai-create-account
|
||||
WEBSITE_SIGNUP_URL: https://website-stage.tooljet.ai/signup
|
||||
|
||||
- name: 🚀 Deploy to Netlify
|
||||
- name: 📝 Add SPA routing redirect rule
|
||||
run: echo "/* /index.html 200" > repo/frontend/build/_redirects
|
||||
|
||||
- name: 🔧 Set CF Pages production branch to input branch
|
||||
run: |
|
||||
npm install -g netlify-cli
|
||||
netlify deploy --prod --dir=frontend/build --auth=$NETLIFY_AUTH_TOKEN --site=${{ secrets.CLOUD_NETLIFY_SITE_ID }}
|
||||
echo "🔄 Updating CF Pages production branch to: ${{ github.event.inputs.branch_name }}"
|
||||
response=$(curl -s -w "\n%{http_code}" -X PATCH \
|
||||
"https://api.cloudflare.com/client/v4/accounts/${{ secrets.CF_PAGES_ACCOUNT_ID }}/pages/projects/${{ secrets.CF_PAGES_PROJECT_NAME }}" \
|
||||
-H "Authorization: Bearer ${{ secrets.CF_PAGES_API_TOKEN }}" \
|
||||
-H "Content-Type: application/json" \
|
||||
--data '{"production_branch": "${{ github.event.inputs.branch_name }}"}')
|
||||
|
||||
http_code=$(echo "$response" | tail -n1)
|
||||
if [ "$http_code" = "200" ]; then
|
||||
echo "✅ Production branch updated to: ${{ github.event.inputs.branch_name }}"
|
||||
else
|
||||
echo "❌ Failed to update production branch (HTTP $http_code)"
|
||||
echo "$response"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
- name: 🚀 Deploy to Cloudflare Pages
|
||||
run: |
|
||||
echo "📦 Built from source branch: ${{ github.event.inputs.branch_name }}"
|
||||
echo "🎯 Targeting CF Pages production slot (branch alias: ${{ github.event.inputs.branch_name }})"
|
||||
npx wrangler pages deploy frontend/build \
|
||||
--project-name=${{ secrets.CF_PAGES_PROJECT_NAME }} \
|
||||
--branch=${{ github.event.inputs.branch_name }} \
|
||||
--commit-dirty=true
|
||||
working-directory: repo
|
||||
env:
|
||||
NETLIFY_AUTH_TOKEN: ${{ secrets.NETLIFY_AUTH_TOKEN }}
|
||||
GOOGLE_MAPS_API_KEY: ${{ secrets.CLOUD_GOOGLE_MAPS_API_KEY }}
|
||||
NODE_ENV: ${{ secrets.CLOUD_NODE_ENV }}
|
||||
NODE_OPTIONS: ${{ secrets.CLOUD_NODE_OPTIONS }}
|
||||
SENTRY_AUTH_TOKEN: ${{ secrets.CLOUD_SENTRY_AUTH_TOKEN }}
|
||||
SENTRY_ORG: ${{ secrets.CLOUD_SENTRY_ORG }}
|
||||
SENTRY_PROJECT: ${{ secrets.CLOUD_SENTRY_PROJECT }}
|
||||
SERVE_CLIENT: ${{ secrets.CLOUD_SERVE_CLIENT }}
|
||||
SERVER_IP: ${{ secrets.CLOUD_SERVER_IP }}
|
||||
TJDB_SQL_MODE_DISABLE: ${{ secrets.CLOUD_TJDB_SQL_MODE_DISABLE }}
|
||||
TOOLJET_SERVER_URL: ${{ secrets.CLOUD_TOOLJET_SERVER_URL }}
|
||||
WEBSITE_SIGNUP_URL: https://website-stage.tooljet.ai/ai-create-account
|
||||
TOOLJET_EDITION: cloud
|
||||
CLOUDFLARE_API_TOKEN: ${{ secrets.CF_PAGES_API_TOKEN }}
|
||||
CLOUDFLARE_ACCOUNT_ID: ${{ secrets.CF_PAGES_ACCOUNT_ID }}
|
||||
|
|
|
|||
26
.github/workflows/docs-netlify.yml
vendored
|
|
@ -4,34 +4,40 @@ on:
|
|||
workflow_dispatch:
|
||||
push:
|
||||
branches:
|
||||
- develop
|
||||
- documentation
|
||||
paths:
|
||||
- docs/**
|
||||
- docs/**
|
||||
schedule:
|
||||
- cron: '30 3 * * *' # 9:00 AM IST
|
||||
- cron: '30 16 * * *' # 10:00 PM IST
|
||||
|
||||
jobs:
|
||||
build:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v2
|
||||
- name: Checkout documentation branch
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
ref: documentation
|
||||
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@v2
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: 18.18.2
|
||||
|
||||
- name: Install dependencies
|
||||
run: npm install
|
||||
working-directory: docs
|
||||
run: npm install
|
||||
|
||||
- name: Build the project
|
||||
run: GTM=${{ secrets.GTM }} ALGOLIA_API_KEY=${{ secrets.ALGOLIA_API_KEY }} npm run build
|
||||
working-directory: docs
|
||||
run: |
|
||||
GTM=${{ secrets.GTM }} ALGOLIA_API_KEY=${{ secrets.ALGOLIA_API_KEY }} npm run build
|
||||
|
||||
- name: Deploy to Netlify
|
||||
run: |
|
||||
npm install -g netlify-cli
|
||||
netlify deploy --prod --dir=docs/build --auth=$NETLIFY_AUTH_TOKEN --site=${{ secrets.NETLIFY_SITE_ID }}
|
||||
env:
|
||||
NETLIFY_AUTH_TOKEN: ${{ secrets.NETLIFY_AUTH_TOKEN }}
|
||||
run: |
|
||||
npm install -g netlify-cli
|
||||
netlify deploy --prod --dir=docs/build --site=${{ secrets.NETLIFY_SITE_ID }}
|
||||
|
|
|
|||
194
.github/workflows/grype-slack-notify.yml
vendored
Normal file
|
|
@ -0,0 +1,194 @@
|
|||
name: Grype - Docker Image Vulnerability Scan
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
schedule:
|
||||
- cron: "30 6 * * 1"
|
||||
|
||||
jobs:
|
||||
PeriodicVulnerability-CheckOn-docker-image-lts:
|
||||
if: github.event_name == 'schedule' || github.event_name == 'workflow_dispatch'
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Free up disk space
|
||||
run: |
|
||||
echo "=== Disk space before cleanup ==="
|
||||
df -h
|
||||
sudo rm -rf /usr/share/dotnet
|
||||
sudo rm -rf /opt/ghc
|
||||
sudo rm -rf /usr/local/share/boost
|
||||
sudo rm -rf "$AGENT_TOOLSDIRECTORY"
|
||||
sudo docker system prune -af
|
||||
sudo apt-get clean
|
||||
echo "=== Disk space after cleanup ==="
|
||||
df -h
|
||||
|
||||
- name: Pull ToolJet LTS Docker image
|
||||
run: docker pull tooljet/tooljet:ee-lts-latest
|
||||
|
||||
- name: Grype Scan - Table Output (visible in logs)
|
||||
uses: anchore/scan-action@v7
|
||||
with:
|
||||
image: 'tooljet/tooljet:ee-lts-latest'
|
||||
fail-build: false
|
||||
severity-cutoff: high
|
||||
output-format: table
|
||||
only-fixed: true
|
||||
|
||||
- name: Grype Scan - JSON Output (for report)
|
||||
uses: anchore/scan-action@v7
|
||||
with:
|
||||
image: 'tooljet/tooljet:ee-lts-latest'
|
||||
fail-build: false
|
||||
severity-cutoff: high
|
||||
output-format: json
|
||||
output-file: grype-lts-results.json
|
||||
only-fixed: true
|
||||
|
||||
- name: Parse Results
|
||||
id: parse-grype
|
||||
run: |
|
||||
if [ -f grype-lts-results.json ]; then
|
||||
critical=$(jq '[.matches[]? | select(.vulnerability.severity=="Critical")] | length' grype-lts-results.json)
|
||||
high=$(jq '[.matches[]? | select(.vulnerability.severity=="High")] | length' grype-lts-results.json)
|
||||
else
|
||||
critical=0
|
||||
high=0
|
||||
fi
|
||||
total=$((critical + high))
|
||||
echo "critical=$critical" >> $GITHUB_OUTPUT
|
||||
echo "high=$high" >> $GITHUB_OUTPUT
|
||||
echo "total=$total" >> $GITHUB_OUTPUT
|
||||
echo "=== Vulnerability Summary ==="
|
||||
echo "Critical: $critical"
|
||||
echo "High: $high"
|
||||
echo "Total: $total"
|
||||
|
||||
- name: Upload JSON Report as Artifact
|
||||
if: always()
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: grype-lts-scan-report
|
||||
path: grype-lts-results.json
|
||||
retention-days: 7
|
||||
if-no-files-found: warn
|
||||
|
||||
- name: Determine notification color
|
||||
id: determine-color
|
||||
run: |
|
||||
critical=${{ steps.parse-grype.outputs.critical }}
|
||||
high=${{ steps.parse-grype.outputs.high }}
|
||||
|
||||
if [ "$critical" -gt 0 ]; then
|
||||
echo "color=#FF0000" >> $GITHUB_OUTPUT
|
||||
elif [ "$high" -gt 0 ]; then
|
||||
echo "color=#FFA500" >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo "color=#36A64F" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
- name: Send Slack Notification
|
||||
run: |
|
||||
payload=$(cat <<EOF
|
||||
{
|
||||
"attachments": [
|
||||
{
|
||||
"color": "${{ steps.determine-color.outputs.color }}",
|
||||
"blocks": [
|
||||
{
|
||||
"type": "header",
|
||||
"text": {
|
||||
"type": "plain_text",
|
||||
"text": "🐳 Docker Image Vulnerability Scan Report",
|
||||
"emoji": true
|
||||
}
|
||||
},
|
||||
{
|
||||
"type": "section",
|
||||
"fields": [
|
||||
{
|
||||
"type": "mrkdwn",
|
||||
"text": "*Repository:*\n${{ github.repository }}"
|
||||
},
|
||||
{
|
||||
"type": "mrkdwn",
|
||||
"text": "*Image:*\ntooljet/tooljet:ee-lts-latest"
|
||||
},
|
||||
{
|
||||
"type": "mrkdwn",
|
||||
"text": "*Scanner:*\nGrype"
|
||||
},
|
||||
{
|
||||
"type": "mrkdwn",
|
||||
"text": "*Scan Time:*\n$(date -u +"%Y-%m-%d %H:%M UTC")"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"type": "divider"
|
||||
},
|
||||
{
|
||||
"type": "section",
|
||||
"text": {
|
||||
"type": "mrkdwn",
|
||||
"text": "*Docker Image Vulnerabilities (fixable only):*"
|
||||
}
|
||||
},
|
||||
{
|
||||
"type": "section",
|
||||
"fields": [
|
||||
{
|
||||
"type": "mrkdwn",
|
||||
"text": "🔴 *Critical:*\n${{ steps.parse-grype.outputs.critical }}"
|
||||
},
|
||||
{
|
||||
"type": "mrkdwn",
|
||||
"text": "🟠 *High:*\n${{ steps.parse-grype.outputs.high }}"
|
||||
},
|
||||
{
|
||||
"type": "mrkdwn",
|
||||
"text": "📊 *Total:*\n${{ steps.parse-grype.outputs.total }}"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"type": "divider"
|
||||
},
|
||||
{
|
||||
"type": "actions",
|
||||
"elements": [
|
||||
{
|
||||
"type": "button",
|
||||
"text": {
|
||||
"type": "plain_text",
|
||||
"text": "📥 Download Full Report",
|
||||
"emoji": true
|
||||
},
|
||||
"url": "https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }}",
|
||||
"style": "primary"
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
EOF
|
||||
)
|
||||
|
||||
response=$(curl -s -w "%{http_code}" -X POST \
|
||||
-H 'Content-type: application/json' \
|
||||
--data "$payload" \
|
||||
"${{ secrets.SLACK_WEBHOOK_URL_VUR }}")
|
||||
|
||||
http_code="${response: -3}"
|
||||
if [ "$http_code" != "200" ]; then
|
||||
echo "Slack notification failed with HTTP $http_code"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "Slack notification sent successfully"
|
||||
195
.github/workflows/license-compliance.yml
vendored
Normal file
|
|
@ -0,0 +1,195 @@
|
|||
name: License Compliance Check
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
types: [opened, synchronize, reopened]
|
||||
|
||||
jobs:
|
||||
license-check:
|
||||
name: Check New Package Licenses
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
pull-requests: write
|
||||
contents: read
|
||||
|
||||
steps:
|
||||
- name: Check licenses of new packages
|
||||
uses: actions/github-script@v7
|
||||
with:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
script: |
|
||||
const https = require('https');
|
||||
|
||||
// ── Fetch license from npm registry ──────────────────────────────
|
||||
|
||||
function fetchLicense(packageName) {
|
||||
return new Promise((resolve) => {
|
||||
const encoded = packageName.replace('/', '%2F');
|
||||
const url = `https://registry.npmjs.org/${encoded}/latest`;
|
||||
https.get(url, { headers: { 'User-Agent': 'tooljet-license-checker' } }, (res) => {
|
||||
let data = '';
|
||||
res.on('data', chunk => data += chunk);
|
||||
res.on('end', () => {
|
||||
try {
|
||||
const json = JSON.parse(data);
|
||||
resolve(json.license || 'UNKNOWN');
|
||||
} catch {
|
||||
resolve('UNKNOWN');
|
||||
}
|
||||
});
|
||||
}).on('error', () => resolve('UNKNOWN'));
|
||||
});
|
||||
}
|
||||
|
||||
// ── License check — ONLY exact MIT or Apache-2.0 ─────────────────
|
||||
// Dual licenses like "(MIT OR GPL-3.0-or-later)" are NOT permitted.
|
||||
|
||||
function isPermitted(license) {
|
||||
if (!license || license === 'UNKNOWN') return false;
|
||||
const l = license.trim();
|
||||
return l === 'MIT' || l === 'Apache-2.0';
|
||||
}
|
||||
|
||||
// ── Get PR diff files from GitHub API ─────────────────────────────
|
||||
|
||||
const prFiles = await github.rest.pulls.listFiles({
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
pull_number: context.issue.number,
|
||||
per_page: 100,
|
||||
});
|
||||
|
||||
const pkgFiles = prFiles.data.filter(f =>
|
||||
f.filename.endsWith('package.json') &&
|
||||
!f.filename.includes('node_modules')
|
||||
);
|
||||
|
||||
if (pkgFiles.length === 0) {
|
||||
console.log('No package.json files changed in this PR. Skipping.');
|
||||
return;
|
||||
}
|
||||
|
||||
console.log(`package.json files changed: ${pkgFiles.map(f => f.filename).join(', ')}`);
|
||||
|
||||
// ── Extract newly added packages from diff patch ──────────────────
|
||||
|
||||
function extractAddedPackages(patch) {
|
||||
if (!patch) return [];
|
||||
const packages = [];
|
||||
for (const line of patch.split('\n')) {
|
||||
if (!line.startsWith('+') || line.startsWith('+++')) continue;
|
||||
const match = line.match(/^\+\s*"(@?[a-zA-Z0-9][\w\-\.\/]*)"\s*:\s*"\^?[\d~*]/);
|
||||
if (match) {
|
||||
packages.push(match[1]);
|
||||
}
|
||||
}
|
||||
return packages;
|
||||
}
|
||||
|
||||
// ── Main scan ─────────────────────────────────────────────────────
|
||||
|
||||
const violations = [];
|
||||
const permitted = [];
|
||||
|
||||
for (const file of pkgFiles) {
|
||||
console.log(`\n── Scanning: ${file.filename}`);
|
||||
|
||||
const addedPackages = extractAddedPackages(file.patch);
|
||||
|
||||
if (addedPackages.length === 0) {
|
||||
console.log(' No new packages added.');
|
||||
continue;
|
||||
}
|
||||
|
||||
console.log(` New packages found: ${addedPackages.join(', ')}`);
|
||||
|
||||
for (const pkg of addedPackages) {
|
||||
const license = await fetchLicense(pkg);
|
||||
const ok = isPermitted(license);
|
||||
|
||||
if (ok) {
|
||||
console.log(` [OK] ${pkg} — ${license}`);
|
||||
permitted.push({ pkg, license, file: file.filename });
|
||||
} else {
|
||||
console.log(` [FAIL] ${pkg} — ${license}`);
|
||||
violations.push({ pkg, license, file: file.filename });
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
console.log(`\n── Summary`);
|
||||
console.log(` Permitted : ${permitted.length}`);
|
||||
console.log(` Violations: ${violations.length}`);
|
||||
|
||||
// ── Delete previous bot comment if any ────────────────────────────
|
||||
|
||||
const comments = await github.rest.issues.listComments({
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
issue_number: context.issue.number,
|
||||
});
|
||||
for (const comment of comments.data) {
|
||||
if (comment.body.includes('<!-- license-compliance-bot -->')) {
|
||||
await github.rest.issues.deleteComment({
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
comment_id: comment.id,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// ── Skip comment if nothing new was added ─────────────────────────
|
||||
|
||||
if (permitted.length === 0 && violations.length === 0) {
|
||||
console.log('No new packages detected in diff. Skipping comment.');
|
||||
return;
|
||||
}
|
||||
|
||||
// ── Build and post comment ────────────────────────────────────────
|
||||
|
||||
let body = `<!-- license-compliance-bot -->\n`;
|
||||
|
||||
if (violations.length === 0) {
|
||||
body += `## ✅ License Compliance Check Passed\n\n`;
|
||||
body += `All new packages added in this PR use permitted licenses (MIT or Apache-2.0).\n\n`;
|
||||
body += `| Package | License | File |\n|---|---|---|\n`;
|
||||
body += permitted.map(p =>
|
||||
`| \`${p.pkg}\` | \`${p.license}\` | \`${p.file}\` |`
|
||||
).join('\n');
|
||||
body += '\n';
|
||||
} else {
|
||||
body += `## ❌ License Compliance Check Failed\n\n`;
|
||||
body += `This PR adds package(s) with licenses that are **not permitted**.\n`;
|
||||
body += `Only \`MIT\` and \`Apache-2.0\` licenses are allowed.\n\n`;
|
||||
body += `### 🚫 Not Permitted\n\n`;
|
||||
body += `| Package | License | File |\n|---|---|---|\n`;
|
||||
body += violations.map(v =>
|
||||
`| \`${v.pkg}\` | \`${v.license}\` | \`${v.file}\` |`
|
||||
).join('\n');
|
||||
body += `\n\n`;
|
||||
body += `> ❌ The package(s) above are not permitted. Please replace them with an equivalent that uses an MIT or Apache-2.0 license.\n`;
|
||||
body += `> If this package genuinely needs to be exempted, a maintainer can bypass this check using the bypass rules option on this PR.\n\n`;
|
||||
if (permitted.length > 0) {
|
||||
body += `### ✅ Permitted Packages\n\n`;
|
||||
body += `| Package | License | File |\n|---|---|---|\n`;
|
||||
body += permitted.map(p =>
|
||||
`| \`${p.pkg}\` | \`${p.license}\` | \`${p.file}\` |`
|
||||
).join('\n');
|
||||
body += '\n';
|
||||
}
|
||||
}
|
||||
|
||||
await github.rest.issues.createComment({
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
issue_number: context.issue.number,
|
||||
body,
|
||||
});
|
||||
|
||||
if (violations.length > 0) {
|
||||
core.setFailed(
|
||||
`License check failed: ${violations.length} package(s) with non-permitted licenses. See PR comment for details.`
|
||||
);
|
||||
}
|
||||
|
||||
|
||||
|
|
@ -38,7 +38,7 @@ jobs:
|
|||
- name: Setup Node.js
|
||||
uses: actions/setup-node@v2
|
||||
with:
|
||||
node-version: 18.18.2
|
||||
node-version: 22.15.1
|
||||
|
||||
- name: Configure AWS Credentials
|
||||
uses: aws-actions/configure-aws-credentials@v4
|
||||
|
|
@ -63,12 +63,11 @@ jobs:
|
|||
npm run build --workspace=plugins/$plugin || exit 1
|
||||
done
|
||||
|
||||
|
||||
- name: Build marketplace plugins and capture summary
|
||||
run: |
|
||||
cd marketplace
|
||||
echo "🚀 Uploading to S3"
|
||||
AWS_BUCKET=tooljet-plugins-stage node scripts/upload-to-s3.js | tee upload_summary.log
|
||||
AWS_BUCKET=tooljet-plugins-stage bash scripts/upload-to-s3.sh | tee upload_summary.log
|
||||
|
||||
- name: Extract upload summary
|
||||
id: upload_summary
|
||||
|
|
@ -133,4 +132,4 @@ jobs:
|
|||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
body: `❌ Marketplace Plugin deployment failed.\n\n🔍 [View Deployment Logs & Summary](${runUrl})`
|
||||
});
|
||||
});
|
||||
|
|
|
|||
27
.github/workflows/manual-docker-build.yml
vendored
|
|
@ -4,14 +4,19 @@ on:
|
|||
workflow_dispatch:
|
||||
inputs:
|
||||
branch_name:
|
||||
description: 'Git branch to build from'
|
||||
description: "Git branch to build from"
|
||||
required: true
|
||||
default: 'main'
|
||||
default: "main"
|
||||
dockerfile_path:
|
||||
description: 'Path to Dockerfile'
|
||||
description: "Path to Dockerfile"
|
||||
required: true
|
||||
type: choice
|
||||
options:
|
||||
- ./docker/LTS/ee/ee-production.Dockerfile
|
||||
- ./docker/pre-release/ee/ee-production.Dockerfile
|
||||
- ./docker/LTS/cloud/cloud-server.Dockerfile
|
||||
docker_tag:
|
||||
description: 'Docker tag suffix (e.g., pre-release-14)'
|
||||
description: "Docker tag suffix (e.g., pre-release-14)"
|
||||
required: true
|
||||
|
||||
jobs:
|
||||
|
|
@ -19,6 +24,16 @@ jobs:
|
|||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: Free up disk space
|
||||
run: |
|
||||
sudo rm -rf /usr/share/dotnet
|
||||
sudo rm -rf /opt/ghc
|
||||
sudo rm -rf /usr/local/share/boost
|
||||
sudo rm -rf "$AGENT_TOOLSDIRECTORY"
|
||||
sudo docker system prune -af
|
||||
sudo apt-get clean
|
||||
df -h
|
||||
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
|
|
@ -32,7 +47,7 @@ jobs:
|
|||
uses: docker/login-action@v3
|
||||
with:
|
||||
username: ${{ secrets.DOCKER_USERNAME }}
|
||||
password: ${{ secrets.DOCKER_PASSWORD }}
|
||||
password: ${{ secrets.DOCKER_PAT }}
|
||||
|
||||
- name: Generate full Docker tag
|
||||
id: taggen
|
||||
|
|
@ -52,6 +67,8 @@ jobs:
|
|||
push: true
|
||||
tags: ${{ steps.taggen.outputs.tag }}
|
||||
platforms: linux/amd64
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=max
|
||||
build-args: |
|
||||
CUSTOM_GITHUB_TOKEN=${{ secrets.CUSTOM_GITHUB_TOKEN }}
|
||||
BRANCH_NAME=${{ github.event.inputs.branch_name }}
|
||||
|
|
|
|||
180
.github/workflows/marketplace-plugins-deploy-production.yml
vendored
Normal file
|
|
@ -0,0 +1,180 @@
|
|||
name: Marketplace Plugin Production Deploy
|
||||
|
||||
on:
|
||||
pull_request_target:
|
||||
types: [labeled, unlabeled, closed]
|
||||
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
branch:
|
||||
description: "Branch to deploy from"
|
||||
required: true
|
||||
default: "lts-3.16"
|
||||
plugin_name:
|
||||
description: "Plugin name to deploy individually (leave empty to deploy all)"
|
||||
required: false
|
||||
default: ""
|
||||
|
||||
env:
|
||||
PR_NUMBER: ${{ github.event.number }}
|
||||
BRANCH_NAME: ${{ github.event.inputs.branch || github.head_ref || github.ref_name }}
|
||||
|
||||
jobs:
|
||||
deploy-marketplace-plugin-production:
|
||||
if: |
|
||||
github.event_name == 'workflow_dispatch' ||
|
||||
(github.event.action == 'labeled' && github.event.label.name == 'deploy-marketplace-plugin-prod')
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: ✅ Check user authorization
|
||||
run: |
|
||||
allowed_user1="${{ secrets.ALLOWED_USER1_USERNAME }}"
|
||||
allowed_user2="${{ secrets.ALLOWED_USER2_USERNAME }}"
|
||||
allowed_user3="${{ secrets.ALLOWED_USER3_USERNAME }}"
|
||||
|
||||
if [[ "${{ github.actor }}" != "$allowed_user1" && \
|
||||
"${{ github.actor }}" != "$allowed_user2" && \
|
||||
"${{ github.actor }}" != "$allowed_user3" ]]; then
|
||||
echo "❌ User '${{ github.actor }}' is not authorized to trigger this workflow."
|
||||
echo "Only the following users can deploy to production:"
|
||||
echo " - $allowed_user1"
|
||||
echo " - $allowed_user2"
|
||||
echo " - $allowed_user3"
|
||||
exit 1
|
||||
else
|
||||
echo "✅ User '${{ github.actor }}' is authorized to deploy to production."
|
||||
fi
|
||||
|
||||
- name: Sync repo
|
||||
uses: actions/checkout@v3
|
||||
|
||||
- name: Check if PR is from the same repo
|
||||
if: github.event_name == 'pull_request_target'
|
||||
id: check_repo
|
||||
run: echo "::set-output name=is_fork::$(if [[ '${{ github.event.pull_request.head.repo.full_name }}' != '${{ github.event.pull_request.base.repo.full_name }}' ]]; then echo true; else echo false; fi)"
|
||||
|
||||
- name: Fetch the remote branch if it's a forked PR
|
||||
if: github.event_name == 'pull_request_target' && steps.check_repo.outputs.is_fork == 'true'
|
||||
run: |
|
||||
git fetch origin pull/${{ github.event.number }}/head:${{ env.BRANCH_NAME }}
|
||||
git checkout ${{ env.BRANCH_NAME }}
|
||||
|
||||
- name: Checkout PR branch
|
||||
if: github.event_name == 'pull_request_target' && steps.check_repo.outputs.is_fork == 'false'
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
ref: ${{ github.event.pull_request.head.ref }}
|
||||
|
||||
- name: Checkout specified branch
|
||||
if: github.event_name == 'workflow_dispatch'
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
ref: ${{ github.event.inputs.branch }}
|
||||
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@v2
|
||||
with:
|
||||
node-version: 22.15.1
|
||||
|
||||
- name: Configure AWS Credentials
|
||||
uses: aws-actions/configure-aws-credentials@v4
|
||||
with:
|
||||
aws-access-key-id: ${{ secrets.AWS_ACCESS_PROD_KEY_ID }}
|
||||
aws-secret-access-key: ${{ secrets.AWS_SECRET_PROD_ACCESS_KEY }}
|
||||
aws-region: us-east-2
|
||||
|
||||
- name: Install and build dependencies in order
|
||||
run: |
|
||||
cd marketplace
|
||||
echo "🔧 Installing all workspace dependencies"
|
||||
npm install
|
||||
|
||||
echo "🏗️ Building 'common' plugin first"
|
||||
npm run build --workspace=plugins/common || exit 1
|
||||
|
||||
echo "🔁 Building all remaining plugins"
|
||||
PLUGINS=$(ls plugins | grep -v '^common$')
|
||||
for plugin in $PLUGINS; do
|
||||
echo "🔨 Building plugin: $plugin"
|
||||
npm run build --workspace=plugins/$plugin || exit 1
|
||||
done
|
||||
|
||||
- name: Build marketplace plugins and capture summary
|
||||
run: |
|
||||
cd marketplace
|
||||
echo "🚀 Uploading to S3 Production"
|
||||
AWS_BUCKET=${{ secrets.S3_BUCKET_PRODUCTION }} bash scripts/upload-to-s3.sh ${{ github.event.inputs.plugin_name }} | tee upload_summary.log
|
||||
|
||||
- name: Extract upload summary
|
||||
id: upload_summary
|
||||
run: |
|
||||
SUMMARY=$(awk '/UPLOAD SUMMARY/,0' marketplace/upload_summary.log)
|
||||
echo "UPLOAD_SUMMARY<<EOF" >> $GITHUB_ENV
|
||||
echo "$SUMMARY" >> $GITHUB_ENV
|
||||
echo "EOF" >> $GITHUB_ENV
|
||||
|
||||
- name: Output summary to logs (manual trigger)
|
||||
if: success() && github.event_name == 'workflow_dispatch'
|
||||
run: |
|
||||
echo "========================================="
|
||||
echo "PRODUCTION DEPLOYMENT SUMMARY"
|
||||
echo "========================================="
|
||||
echo "${{ env.UPLOAD_SUMMARY }}"
|
||||
echo "========================================="
|
||||
|
||||
- name: Comment on success
|
||||
if: success() && github.event_name == 'pull_request_target'
|
||||
uses: actions/github-script@v5
|
||||
with:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
script: |
|
||||
const runId = process.env.GITHUB_RUN_ID;
|
||||
const runUrl = `https://github.com/${{ github.repository }}/actions/runs/${runId}`;
|
||||
const summary = process.env.UPLOAD_SUMMARY;
|
||||
const body = `✅ Marketplace Plugin deployed to **PRODUCTION** bucket\n\n🔍 [View Deployment Logs & Summary](${runUrl})\n\n\`\`\`\n${summary}\n\`\`\``;
|
||||
|
||||
github.rest.issues.createComment({
|
||||
issue_number: context.issue.number,
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
body
|
||||
});
|
||||
|
||||
- name: Label update on success
|
||||
if: success() && github.event_name == 'pull_request_target'
|
||||
uses: actions/github-script@v6
|
||||
with:
|
||||
script: |
|
||||
try {
|
||||
await github.rest.issues.removeLabel({
|
||||
issue_number: context.issue.number,
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
name: 'deploy-marketplace-plugin-prod'
|
||||
})
|
||||
} catch (e) {
|
||||
console.log(e)
|
||||
}
|
||||
|
||||
await github.rest.issues.addLabels({
|
||||
issue_number: context.issue.number,
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
labels: ['plugin-deployed-production']
|
||||
})
|
||||
|
||||
- name: Comment on failure
|
||||
if: failure() && github.event_name == 'pull_request_target'
|
||||
uses: actions/github-script@v6
|
||||
with:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
script: |
|
||||
const runId = process.env.GITHUB_RUN_ID;
|
||||
const runUrl = `https://github.com/${{ github.repository }}/actions/runs/${runId}`;
|
||||
|
||||
await github.rest.issues.createComment({
|
||||
issue_number: context.issue.number,
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
body: `❌ Marketplace Plugin deployment to **PRODUCTION** failed.\n\n🔍 [View Deployment Logs & Summary](${runUrl})`
|
||||
});
|
||||
72
.github/workflows/packer-build.yml
vendored
|
|
@ -7,20 +7,55 @@ on:
|
|||
# Allows you to run this workflow manually from the Actions tab
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
branch:
|
||||
description: "Branch to build from (e.g. lts-3.16)"
|
||||
required: true
|
||||
default: "lts-3.16"
|
||||
version:
|
||||
description: "RELEASE_VERSION"
|
||||
required: true
|
||||
region:
|
||||
description: "AWS region to build AMI in (default: us-west-1)"
|
||||
required: false
|
||||
default: "us-west-1"
|
||||
|
||||
jobs:
|
||||
check-version:
|
||||
runs-on: ubuntu-latest
|
||||
name: check-version
|
||||
outputs:
|
||||
should_build: ${{ steps.check.outputs.should_build }}
|
||||
steps:
|
||||
- name: Check if version is AMI-eligible (multiple of 10)
|
||||
id: check
|
||||
run: |
|
||||
if [[ "${{ github.event_name }}" == "workflow_dispatch" ]]; then
|
||||
echo "Manual dispatch — always build"
|
||||
echo "should_build=true" >> $GITHUB_OUTPUT
|
||||
else
|
||||
TAG="${GITHUB_REF#refs/*/}"
|
||||
# Extract patch number: v3.20.100-lts → 100
|
||||
PATCH=$(echo "$TAG" | sed 's/^v//' | cut -d'.' -f3 | cut -d'-' -f1)
|
||||
if (( PATCH % 10 == 0 )); then
|
||||
echo "Version $TAG is AMI-eligible (patch $PATCH is multiple of 10)"
|
||||
echo "should_build=true" >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo "Skipping AMI build — $TAG patch $PATCH is not a multiple of 10"
|
||||
echo "should_build=false" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
fi
|
||||
|
||||
packer-ee:
|
||||
needs: check-version
|
||||
if: needs.check-version.outputs.should_build == 'true'
|
||||
runs-on: ubuntu-latest
|
||||
name: packer-ee
|
||||
|
||||
steps:
|
||||
- name: Checkout code to lts-3.16 branch
|
||||
if: contains(github.event.release.tag_name, '-lts')
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v2
|
||||
with:
|
||||
ref: refs/heads/lts-3.16
|
||||
ref: refs/heads/${{ github.event.inputs.branch || 'lts-3.16' }}
|
||||
|
||||
- name: Setting tag
|
||||
if: "${{ github.event.inputs.version != '' }}"
|
||||
|
|
@ -35,7 +70,7 @@ jobs:
|
|||
with:
|
||||
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
|
||||
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
|
||||
aws-region: us-west-1
|
||||
aws-region: ${{ github.event.inputs.region || 'us-west-1' }}
|
||||
|
||||
# Initialize Packer templates
|
||||
- name: Initialize Packer Template
|
||||
|
|
@ -61,26 +96,43 @@ jobs:
|
|||
# Dynamically update setup_machine.sh with PAT
|
||||
- name: Validate PAT
|
||||
run: |
|
||||
sed -i "s|git config --global url."https://x-access-token:CUSTOM_GITHUB_TOKEN@github.com/".insteadOf "https://github.com/"|git config --global url."https://x-access-token:${ secrets.CUSTOM_GITHUB_TOKEN }@github.com/".insteadOf "https://github.com/"|g" ./deploy/ec2/ee/setup_machine.sh
|
||||
sed -i "s|CUSTOM_GITHUB_TOKEN|${{ secrets.CUSTOM_GITHUB_TOKEN }}|g" ./deploy/ec2/ee/setup_machine.sh
|
||||
|
||||
# build artifact
|
||||
- name: Build Artifact
|
||||
id: packer-build
|
||||
uses: hashicorp/packer-github-actions@master
|
||||
with:
|
||||
command: build
|
||||
#The the below argument is specific for building EE AMI image
|
||||
arguments: -color=false -on-error=abort -var ami_name=tooljet_${{ env.RELEASE_VERSION }}.ubuntu_jammy
|
||||
arguments: -color=false -on-error=abort -var ami_name=tooljet_${{ env.RELEASE_VERSION }}.ubuntu_jammy -var ami_region=${{ github.event.inputs.region || 'us-west-1' }}
|
||||
target: .
|
||||
working_directory: deploy/ec2/ee
|
||||
env:
|
||||
PACKER_LOG: 1
|
||||
|
||||
- name: Send Slack Notification
|
||||
- name: Cleanup EC2 instances
|
||||
if: always()
|
||||
run: |
|
||||
if [[ "${{ job.status }}" == "success" ]]; then
|
||||
message="ToolJet enterprise AWS AMI published:\\n\`tooljet_${{ env.RELEASE_VERSION }}.ubuntu-jammy\`"
|
||||
echo "Listing all EC2 instances..."
|
||||
|
||||
INSTANCE_IDS=$(aws ec2 describe-instances \
|
||||
--region ${{ github.event.inputs.region || 'us-west-1' }} \
|
||||
--query 'Reservations[*].Instances[*].InstanceId' \
|
||||
--output text)
|
||||
|
||||
if [ -n "$INSTANCE_IDS" ] && [ "$INSTANCE_IDS" != "None" ]; then
|
||||
echo "Found instances: $INSTANCE_IDS"
|
||||
aws ec2 terminate-instances --region ${{ github.event.inputs.region || 'us-west-1' }} --instance-ids $INSTANCE_IDS
|
||||
echo "Terminated instances: $INSTANCE_IDS"
|
||||
else
|
||||
message="ToolJet enterprise AWS AMI release failed! \\n\`tooljet_${{ env.RELEASE_VERSION }}.ubuntu-jammy\`"
|
||||
echo "No instances found to cleanup"
|
||||
fi
|
||||
|
||||
- name: Send Slack Notification
|
||||
if: success()
|
||||
run: |
|
||||
ami_name="tooljet_${{ env.RELEASE_VERSION }}.ubuntu_jammy"
|
||||
message="✅ *ToolJet Enterprise AWS AMI Published*\nVersion: \`${{ env.RELEASE_VERSION }}\`\nType: 🔒 LTS Release\nBranch: \`${{ github.event.inputs.branch || 'lts-3.16' }}\`\nRegion: \`${{ github.event.inputs.region || 'us-west-1' }}\`\nAMI Name: \`${ami_name}\`"
|
||||
|
||||
curl -X POST -H 'Content-type: application/json' --data "{\"text\":\"$message\"}" ${{ secrets.SLACK_WEBHOOK_URL }}
|
||||
|
|
|
|||
1449
.github/workflows/render-preview-deploy-v2.yml
vendored
Normal file
588
.github/workflows/render-preview-deploy.yml
vendored
|
|
@ -5,8 +5,8 @@ on:
|
|||
issue_comment:
|
||||
types: [created, edited, deleted]
|
||||
env:
|
||||
PR_NUMBER: ${{ github.event.number }}
|
||||
BRANCH_NAME: ${{ github.head_ref || github.ref_name }}
|
||||
PR_NUMBER: ${{ github.event.pull_request.number || github.event.issue.number }}
|
||||
BRANCH_NAME: ${{ github.event.pull_request.head.ref || github.head_ref || github.ref_name }}
|
||||
|
||||
permissions:
|
||||
pull-requests: write
|
||||
|
|
@ -15,11 +15,26 @@ permissions:
|
|||
jobs:
|
||||
|
||||
# Community Edition CE
|
||||
create-ce-review-app:
|
||||
if: ${{ github.event.action == 'labeled' && (github.event.label.name == 'create-ce-review-app' || github.event.label.name == 'review-app') }}
|
||||
create-ce-review-app-old:
|
||||
if: ${{ github.event.action == 'labeled' && (github.event.label.name == 'create-ce-review-app-old' || github.event.label.name == 'review-app') }}
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: Get PR details for issue_comment events
|
||||
if: github.event_name == 'issue_comment'
|
||||
uses: actions/github-script@v6
|
||||
with:
|
||||
script: |
|
||||
const pr = await github.rest.pulls.get({
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
pull_number: context.issue.number
|
||||
});
|
||||
core.exportVariable('PR_NUMBER', pr.data.number);
|
||||
core.exportVariable('BRANCH_NAME', pr.data.head.ref);
|
||||
console.log(`✅ PR Number: ${pr.data.number}`);
|
||||
console.log(`✅ Branch Name: ${pr.data.head.ref}`);
|
||||
|
||||
- name: Sync repo
|
||||
uses: actions/checkout@v3
|
||||
|
||||
|
|
@ -218,7 +233,7 @@ jobs:
|
|||
issue_number: context.issue.number,
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
name: 'create-ce-review-app'
|
||||
name: 'create-ce-review-app-old'
|
||||
})
|
||||
} catch (e) {
|
||||
console.log(e)
|
||||
|
|
@ -228,11 +243,11 @@ jobs:
|
|||
issue_number: context.issue.number,
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
labels: ['active-ce-review-app']
|
||||
labels: ['active-ce-review-app-old']
|
||||
})
|
||||
|
||||
destroy-ce-review-app:
|
||||
if: ${{ (github.event.action == 'labeled' && github.event.label.name == 'destroy-ce-review-app') || github.event.action == 'closed' }}
|
||||
destroy-ce-review-app-old:
|
||||
if: ${{ (github.event.action == 'labeled' && github.event.label.name == 'destroy-ce-review-app-old') || github.event.action == 'closed' }}
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
|
|
@ -257,7 +272,7 @@ jobs:
|
|||
issue_number: context.issue.number,
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
name: 'destroy-ce-review-app'
|
||||
name: 'destroy-ce-review-app-old'
|
||||
})
|
||||
} catch (e) {
|
||||
console.log(e)
|
||||
|
|
@ -268,7 +283,7 @@ jobs:
|
|||
issue_number: context.issue.number,
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
name: 'suspend-ce-review-app'
|
||||
name: 'suspend-ce-review-app-old'
|
||||
})
|
||||
} catch (e) {
|
||||
console.log(e)
|
||||
|
|
@ -279,44 +294,15 @@ jobs:
|
|||
issue_number: context.issue.number,
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
name: 'active-ce-review-app'
|
||||
name: 'active-ce-review-app-old'
|
||||
})
|
||||
} catch (e) {
|
||||
console.log(e)
|
||||
}
|
||||
|
||||
# - name: Install PostgreSQL client
|
||||
# run: |
|
||||
# sudo apt update
|
||||
# sudo apt install postgresql-client -y
|
||||
|
||||
# - name: Wait after installing PostgreSQL
|
||||
# run: sleep 25
|
||||
|
||||
# - name: Drop PostgreSQL PR databases
|
||||
# env:
|
||||
# PGHOST: ${{ secrets.RENDER_DS_PG_HOST }}
|
||||
# PGPORT: 5432
|
||||
# PGUSER: ${{ secrets.RENDER_DS_PG_USER }}
|
||||
# PGDATABASE: ${{ env.PR_NUMBER }}-ce
|
||||
# PGTJBDATABASE: ${{ env.PR_NUMBER }}-ce-tjdb
|
||||
# run: |
|
||||
# if PGPASSWORD=${{ secrets.RENDER_DS_PG_PASS }} psql -h $PGHOST -p $PGPORT -U $PGUSER -lqt | cut -d \| -f 1 | grep -qw $PGDATABASE; then
|
||||
# echo "Database $PGDATABASE exists, deleting..."
|
||||
# PGPASSWORD=${{ secrets.RENDER_DS_PG_PASS }} psql -h $PGHOST -p $PGPORT -U $PGUSER -d postgres -c "drop database \"$PGDATABASE\" ;"
|
||||
# else
|
||||
# echo "Database $PGDATABASE does not exist."
|
||||
# fi
|
||||
|
||||
# if PGPASSWORD=${{ secrets.RENDER_DS_PG_PASS }} psql -h $PGHOST -p $PGPORT -U $PGUSER -lqt | cut -d \| -f 1 | grep -qw $PGTJBDATABASE; then
|
||||
# echo "Database $PGTJBDATABASE exists, deleting..."
|
||||
# PGPASSWORD=${{ secrets.RENDER_DS_PG_PASS }} psql -h $PGHOST -p $PGPORT -U $PGUSER -d postgres -c "drop database \"$PGTJBDATABASE\" ;"
|
||||
# else
|
||||
# echo "Database $PGTJBDATABASE does not exist."
|
||||
# fi
|
||||
|
||||
suspend-ce-review-app:
|
||||
if: ${{ github.event.action == 'labeled' && github.event.label.name == 'suspend-ce-review-app' }}
|
||||
|
||||
suspend-ce-review-app-old:
|
||||
if: ${{ github.event.action == 'labeled' && github.event.label.name == 'suspend-ce-review-app-old' }}
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
|
|
@ -341,14 +327,14 @@ jobs:
|
|||
issue_number: context.issue.number,
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
name: 'active-ce-review-app'
|
||||
name: 'active-ce-review-app-old'
|
||||
})
|
||||
} catch (e) {
|
||||
console.log(e)
|
||||
}
|
||||
|
||||
resume-ce-review-app:
|
||||
if: ${{ github.event.action == 'unlabeled' && github.event.label.name == 'suspend-ce-review-app' }}
|
||||
resume-ce-review-app-old:
|
||||
if: ${{ github.event.action == 'unlabeled' && github.event.label.name == 'suspend-ce-review-app-old' }}
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
|
|
@ -372,7 +358,7 @@ jobs:
|
|||
issue_number: context.issue.number,
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
labels: ['active-ce-review-app']
|
||||
labels: ['active-ce-review-app-old']
|
||||
})
|
||||
|
||||
try {
|
||||
|
|
@ -380,7 +366,7 @@ jobs:
|
|||
issue_number: context.issue.number,
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
name: 'suspend-ce-review-app'
|
||||
name: 'suspend-ce-review-app-old'
|
||||
})
|
||||
} catch (e) {
|
||||
console.log(e)
|
||||
|
|
@ -390,11 +376,28 @@ jobs:
|
|||
|
||||
# Enterprise Edition
|
||||
|
||||
create-ee-review-app:
|
||||
if: ${{ github.event.action == 'labeled' && (github.event.label.name == 'create-ee-review-app' || github.event.label.name == 'review-app') }}
|
||||
create-ee-review-app-old:
|
||||
if: |
|
||||
(github.event.action == 'labeled' && (github.event.label.name == 'create-ee-review-app-old' || github.event.label.name == 'create-ee-lts-review-app-old' || github.event.label.name == 'review-app-old')) ||
|
||||
(github.event.action == 'created' && (contains(github.event.comment.body, '/deploy-ee') || contains(github.event.comment.body, '/deploy-ee-lts')))
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: Get PR details for issue_comment events
|
||||
if: github.event_name == 'issue_comment'
|
||||
uses: actions/github-script@v6
|
||||
with:
|
||||
script: |
|
||||
const pr = await github.rest.pulls.get({
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
pull_number: context.issue.number
|
||||
});
|
||||
core.exportVariable('PR_NUMBER', pr.data.number);
|
||||
core.exportVariable('BRANCH_NAME', pr.data.head.ref);
|
||||
console.log(`✅ PR Number: ${pr.data.number}`);
|
||||
console.log(`✅ Branch Name: ${pr.data.head.ref}`);
|
||||
|
||||
- name: Sync repo
|
||||
uses: actions/checkout@v3
|
||||
|
||||
|
|
@ -426,25 +429,26 @@ jobs:
|
|||
if: env.is_fork == 'false'
|
||||
uses: actions/checkout@v3
|
||||
|
||||
- name: Detect base branch and set Dockerfile path
|
||||
- name: Determine Dockerfile path
|
||||
run: |
|
||||
BASE_BRANCH="${{ github.event.pull_request.base.ref }}"
|
||||
echo "Base branch: $BASE_BRANCH"
|
||||
|
||||
if [[ "$BASE_BRANCH" == "main" || "$BASE_BRANCH" == release/* ]]; then
|
||||
DOCKERFILE="./docker/pre-release/ee/ee-preview.Dockerfile"
|
||||
echo "Using pre-release track"
|
||||
elif [[ "$BASE_BRANCH" == "lts-3.16" || "$BASE_BRANCH" == release-lts/* ]]; then
|
||||
# Check if LTS deployment is requested via comment or label
|
||||
if [[ "${{ github.event.action }}" == "labeled" && "${{ github.event.label.name }}" == "create-ee-lts-review-app-old" ]]; then
|
||||
DOCKERFILE="./docker/LTS/ee/ee-preview.Dockerfile"
|
||||
echo "Using LTS track"
|
||||
EDITION_TYPE="LTS"
|
||||
echo "Using LTS EE Dockerfile (triggered by label)"
|
||||
elif [[ "${{ github.event.action }}" == "created" && "${{ github.event.comment.body }}" == *"/deploy-ee-lts"* ]]; then
|
||||
DOCKERFILE="./docker/LTS/ee/ee-preview.Dockerfile"
|
||||
EDITION_TYPE="LTS"
|
||||
echo "Using LTS EE Dockerfile (triggered by comment)"
|
||||
else
|
||||
echo "Error: Unsupported base branch '$BASE_BRANCH'"
|
||||
echo "Supported branches: main, release/*, lts-3.16, release-lts/*"
|
||||
exit 1
|
||||
DOCKERFILE="./docker/pre-release/ee/ee-preview.Dockerfile"
|
||||
EDITION_TYPE="pre-release"
|
||||
echo "Using pre-release EE Dockerfile"
|
||||
fi
|
||||
|
||||
echo "Edition Type: $EDITION_TYPE"
|
||||
echo "Selected Dockerfile: $DOCKERFILE"
|
||||
echo "DOCKERFILE=$DOCKERFILE" >> $GITHUB_ENV
|
||||
echo "EDITION_TYPE=$EDITION_TYPE" >> $GITHUB_ENV
|
||||
|
||||
- name: Creating deployment for Enterprise Edition
|
||||
id: create-ee-deployment
|
||||
|
|
@ -561,23 +565,27 @@ jobs:
|
|||
},
|
||||
{
|
||||
"key": "REDIS_HOST",
|
||||
"value": "${{ secrets.RENDER_REDIS_HOST }}"
|
||||
"value": "localhost"
|
||||
},
|
||||
{
|
||||
"key": "REDIS_PORT",
|
||||
"value": "${{ secrets.RENDER_REDIS_PORT }}"
|
||||
"value": "6379"
|
||||
},
|
||||
{
|
||||
"key": "TEMPORAL_SERVER_ADDRESS",
|
||||
"value": "https://auto-setup-1-25-1.onrender.com"
|
||||
"key": "REDIS_DB",
|
||||
"value": "0"
|
||||
},
|
||||
{
|
||||
"key": "TEMPORAL_TASK_QUEUE_NAME_FOR_WORKFLOWS",
|
||||
"value": "tooljet-ee-pr-${{ env.PR_NUMBER }}"
|
||||
"key": "REDIS_TLS_ENABLED",
|
||||
"value": "false"
|
||||
},
|
||||
{
|
||||
"key": "TOOLJET_WORKFLOWS_TEMPORAL_NAMESPACE",
|
||||
"value": "default"
|
||||
"key": "REDIS_PASSWORD",
|
||||
"value": ""
|
||||
},
|
||||
{
|
||||
"key": "WORKER",
|
||||
"value": "true"
|
||||
},
|
||||
{
|
||||
"key": "TOOLJET_MARKETPLACE_URL",
|
||||
|
|
@ -626,11 +634,12 @@ jobs:
|
|||
with:
|
||||
github-token: ${{secrets.GITHUB_TOKEN}}
|
||||
script: |
|
||||
const editionType = '${{ env.EDITION_TYPE }}' === 'LTS' ? '(LTS)' : '';
|
||||
github.rest.issues.createComment({
|
||||
issue_number: context.issue.number,
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
body: 'Enterpise Edition: \n Deployment: https://tooljet-ee-pr-${{ env.PR_NUMBER }}.onrender.com \n Dashboard: https://dashboard.render.com/web/${{ env.SERVICE_ID }}'
|
||||
body: `Enterprise Edition ${editionType}: \n Deployment: https://tooljet-ee-pr-${{ env.PR_NUMBER }}.onrender.com \n Dashboard: https://dashboard.render.com/web/${{ env.SERVICE_ID }}`
|
||||
})
|
||||
|
||||
- uses: actions/github-script@v6
|
||||
|
|
@ -641,7 +650,18 @@ jobs:
|
|||
issue_number: context.issue.number,
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
name: 'create-ee-review-app'
|
||||
name: 'create-ee-review-app-old'
|
||||
})
|
||||
} catch (e) {
|
||||
console.log(e)
|
||||
}
|
||||
|
||||
try {
|
||||
await github.rest.issues.removeLabel({
|
||||
issue_number: context.issue.number,
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
name: 'create-ee-lts-review-app-old'
|
||||
})
|
||||
} catch (e) {
|
||||
console.log(e)
|
||||
|
|
@ -651,11 +671,11 @@ jobs:
|
|||
issue_number: context.issue.number,
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
labels: ['active-ee-review-app']
|
||||
labels: ['active-ee-review-app-old']
|
||||
})
|
||||
|
||||
destroy-ee-review-app:
|
||||
if: ${{ (github.event.action == 'labeled' && github.event.label.name == 'destroy-ee-review-app') || github.event.action == 'closed' }}
|
||||
destroy-ee-review-app-old:
|
||||
if: ${{ (github.event.action == 'labeled' && github.event.label.name == 'destroy-ee-review-app-old') || github.event.action == 'closed' }}
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
|
|
@ -680,7 +700,7 @@ jobs:
|
|||
issue_number: context.issue.number,
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
name: 'destroy-ee-review-app'
|
||||
name: 'destroy-ee-review-app-old'
|
||||
})
|
||||
} catch (e) {
|
||||
console.log(e)
|
||||
|
|
@ -691,7 +711,7 @@ jobs:
|
|||
issue_number: context.issue.number,
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
name: 'suspend-ee-review-app'
|
||||
name: 'suspend-ee-review-app-old'
|
||||
})
|
||||
} catch (e) {
|
||||
console.log(e)
|
||||
|
|
@ -702,44 +722,16 @@ jobs:
|
|||
issue_number: context.issue.number,
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
name: 'active-ee-review-app'
|
||||
name: 'active-ee-review-app-old'
|
||||
})
|
||||
} catch (e) {
|
||||
console.log(e)
|
||||
}
|
||||
|
||||
# - name: Install PostgreSQL client
|
||||
# run: |
|
||||
# sudo apt update
|
||||
# sudo apt install postgresql-client -y
|
||||
|
||||
# - name: Wait after installing PostgreSQL
|
||||
# run: sleep 25
|
||||
|
||||
# - name: Drop PostgreSQL PR databases
|
||||
# env:
|
||||
# PGHOST: ${{ secrets.RENDER_DS_PG_HOST }}
|
||||
# PGPORT: 5432
|
||||
# PGUSER: ${{ secrets.RENDER_DS_PG_USER }}
|
||||
# PGDATABASE: ${{ env.PR_NUMBER }}-ee
|
||||
# PGTJBDATABASE: ${{ env.PR_NUMBER }}-ee-tjdb
|
||||
# run: |
|
||||
# if PGPASSWORD=${{ secrets.RENDER_DS_PG_PASS }} psql -h $PGHOST -p $PGPORT -U $PGUSER -lqt | cut -d \| -f 1 | grep -qw $PGDATABASE; then
|
||||
# echo "Database $PGDATABASE exists, deleting..."
|
||||
# PGPASSWORD=${{ secrets.RENDER_DS_PG_PASS }} psql -h $PGHOST -p $PGPORT -U $PGUSER -d postgres -c "drop database \"$PGDATABASE\" ;"
|
||||
# else
|
||||
# echo "Database $PGDATABASE does not exist."
|
||||
# fi
|
||||
|
||||
# if PGPASSWORD=${{ secrets.RENDER_DS_PG_PASS }} psql -h $PGHOST -p $PGPORT -U $PGUSER -lqt | cut -d \| -f 1 | grep -qw $PGTJBDATABASE; then
|
||||
# echo "Database $PGTJBDATABASE exists, deleting..."
|
||||
# PGPASSWORD=${{ secrets.RENDER_DS_PG_PASS }} psql -h $PGHOST -p $PGPORT -U $PGUSER -d postgres -c "drop database \"$PGTJBDATABASE\" ;"
|
||||
# else
|
||||
# echo "Database $PGTJBDATABASE does not exist."
|
||||
# fi
|
||||
|
||||
suspend-ee-review-app:
|
||||
if: ${{ github.event.action == 'labeled' && github.event.label.name == 'suspend-ee-review-app' }}
|
||||
suspend-ee-review-app-old:
|
||||
if: ${{ github.event.action == 'labeled' && github.event.label.name == 'suspend-ee-review-app-old' }}
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
|
|
@ -764,14 +756,14 @@ jobs:
|
|||
issue_number: context.issue.number,
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
name: 'active-ee-review-app'
|
||||
name: 'active-ee-review-app-old'
|
||||
})
|
||||
} catch (e) {
|
||||
console.log(e)
|
||||
}
|
||||
|
||||
resume-ee-review-app:
|
||||
if: ${{ github.event.action == 'unlabeled' && github.event.label.name == 'suspend-ee-review-app' }}
|
||||
resume-ee-review-app-old:
|
||||
if: ${{ github.event.action == 'unlabeled' && github.event.label.name == 'suspend-ee-review-app-old' }}
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
|
|
@ -795,7 +787,7 @@ jobs:
|
|||
issue_number: context.issue.number,
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
labels: ['active-ee-review-app']
|
||||
labels: ['active-ee-review-app-old']
|
||||
})
|
||||
|
||||
try {
|
||||
|
|
@ -803,7 +795,7 @@ jobs:
|
|||
issue_number: context.issue.number,
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
name: 'suspend-ee-review-app'
|
||||
name: 'suspend-ee-review-app-old'
|
||||
})
|
||||
} catch (e) {
|
||||
console.log(e)
|
||||
|
|
@ -811,369 +803,6 @@ jobs:
|
|||
|
||||
|
||||
|
||||
|
||||
# Cloud Edition
|
||||
|
||||
# create-cloud-review-app:
|
||||
# if: ${{ github.event.action == 'labeled' && (github.event.label.name == 'create-cloud-review-app' || github.event.label.name == 'review-app') }}
|
||||
# runs-on: ubuntu-latest
|
||||
|
||||
# steps:
|
||||
# - name: Creating deployment for Cloud Edition
|
||||
# id: create-cloud-deployment
|
||||
# run: |
|
||||
# export RESPONSE=$(curl --request POST \
|
||||
# --url https://api.render.com/v1/services \
|
||||
# --header 'accept: application/json' \
|
||||
# --header 'content-type: application/json' \
|
||||
# --header 'Authorization: Bearer ${{ secrets.RENDER_API_KEY }}' \
|
||||
# --data '
|
||||
# {
|
||||
# "autoDeploy": "yes",
|
||||
# "branch": "${{ env.BRANCH_NAME }}",
|
||||
# "name": "ToolJet Cloud PR #${{ env.PR_NUMBER }}",
|
||||
# "notifyOnFail": "default",
|
||||
# "ownerId": "tea-caeo4bj19n072h3dddc0",
|
||||
# "repo": "https://github.com/ToolJet/ToolJet",
|
||||
# "slug": "tooljet-cloud-pr-${{ env.PR_NUMBER }}",
|
||||
# "suspended": "not_suspended",
|
||||
# "suspenders": [],
|
||||
# "type": "web_service",
|
||||
# "envVars": [
|
||||
# {
|
||||
# "key": "PG_HOST",
|
||||
# "value": "${{ secrets.RENDER_PG_HOST }}"
|
||||
# },
|
||||
# {
|
||||
# "key": "PG_PORT",
|
||||
# "value": "5432"
|
||||
# },
|
||||
# {
|
||||
# "key": "PG_USER",
|
||||
# "value": "${{ secrets.RENDER_PG_USER }}"
|
||||
# },
|
||||
# {
|
||||
# "key": "PG_PASS",
|
||||
# "value": "${{ secrets.RENDER_PG_PASS }}"
|
||||
# },
|
||||
# {
|
||||
# "key": "PG_DB",
|
||||
# "value": "${{ env.PR_NUMBER }}-cloud"
|
||||
# },
|
||||
# {
|
||||
# "key": "TOOLJET_DB",
|
||||
# "value": "${{ env.PR_NUMBER }}-cloud-tjdb"
|
||||
# },
|
||||
# {
|
||||
# "key": "TOOLJET_DB_HOST",
|
||||
# "value": "${{ secrets.RENDER_PG_HOST }}"
|
||||
# },
|
||||
# {
|
||||
# "key": "TOOLJET_DB_USER",
|
||||
# "value": "${{ secrets.RENDER_PG_USER }}"
|
||||
# },
|
||||
# {
|
||||
# "key": "TOOLJET_DB_PASS",
|
||||
# "value": "${{ secrets.RENDER_PG_PASS }}"
|
||||
# },
|
||||
# {
|
||||
# "key": "TOOLJET_DB_PORT",
|
||||
# "value": "5432"
|
||||
# },
|
||||
# {
|
||||
# "key": "PGRST_DB_PRE_CONFIG",
|
||||
# "value": "postgrest.pre_config"
|
||||
# },
|
||||
# {
|
||||
# "key": "PGRST_DB_URI",
|
||||
# "value": "postgres://${{ secrets.RENDER_PG_USER }}:${{ secrets.RENDER_PG_PASS }}@${{ secrets.RENDER_PG_HOST }}/${{ env.PR_NUMBER }}-cloud-tjdb"
|
||||
# },
|
||||
# {
|
||||
# "key": "PGRST_HOST",
|
||||
# "value": "127.0.0.1:3000"
|
||||
# },
|
||||
# {
|
||||
# "key": "PGRST_JWT_SECRET",
|
||||
# "value": "r9iMKoe5CRMgvJBBtp4HrqN7QiPpUToj"
|
||||
# },
|
||||
# {
|
||||
# "key": "PGRST_LOG_LEVEL",
|
||||
# "value": "info"
|
||||
# },
|
||||
# {
|
||||
# "key": "PORT",
|
||||
# "value": "80"
|
||||
# },
|
||||
# {
|
||||
# "key": "TOOLJET_HOST",
|
||||
# "value": "https://tooljet-cloud-pr-${{ env.PR_NUMBER }}.onrender.com"
|
||||
# },
|
||||
# {
|
||||
# "key": "DISABLE_TOOLJET_TELEMETRY",
|
||||
# "value": "true"
|
||||
# },
|
||||
# {
|
||||
# "key": "SMTP_ADDRESS",
|
||||
# "value": "smtp.mailtrap.io"
|
||||
# },
|
||||
# {
|
||||
# "key": "SMTP_DOMAIN",
|
||||
# "value": "smtp.mailtrap.io"
|
||||
# },
|
||||
# {
|
||||
# "key": "SMTP_PORT",
|
||||
# "value": "2525"
|
||||
# },
|
||||
# {
|
||||
# "key": "SMTP_USERNAME",
|
||||
# "value": "${{ secrets.RENDER_SMTP_USERNAME }}"
|
||||
# },
|
||||
# {
|
||||
# "key": "SMTP_PASSWORD",
|
||||
# "value": "${{ secrets.RENDER_SMTP_PASSWORD }}"
|
||||
# },
|
||||
# {
|
||||
# "key": "REDIS_HOST",
|
||||
# "value": "${{ secrets.RENDER_REDIS_HOST }}"
|
||||
# },
|
||||
# {
|
||||
# "key": "REDIS_PORT",
|
||||
# "value": "${{ secrets.RENDER_REDIS_PORT }}"
|
||||
# },
|
||||
# {
|
||||
# "key": "TEMPORAL_SERVER_ADDRESS",
|
||||
# "value": "https://auto-setup-1-25-1.onrender.com"
|
||||
# },
|
||||
# {
|
||||
# "key": "TEMPORAL_TASK_QUEUE_NAME_FOR_WORKFLOWS",
|
||||
# "value": "tooljet-cloud-pr-${{ env.PR_NUMBER }}"
|
||||
# },
|
||||
# {
|
||||
# "key": "TOOLJET_WORKFLOWS_TEMPORAL_NAMESPACE",
|
||||
# "value": "default"
|
||||
# },
|
||||
# {
|
||||
# "key": "TOOLJET_MARKETPLACE_URL",
|
||||
# "value": "${{ secrets.MARKETPLACE_BUCKET }}"
|
||||
# },
|
||||
# {
|
||||
# "key": "CUSTOM_GITHUB_TOKEN",
|
||||
# "value": "${{ secrets.CUSTOM_GITHUB_TOKEN }}"
|
||||
# }
|
||||
# ],
|
||||
# "serviceDetails": {
|
||||
# "disk": null,
|
||||
# "env": "docker",
|
||||
# "envSpecificDetails": {
|
||||
# "dockerCommand": "",
|
||||
# "dockerContext": "./",
|
||||
# "dockerfilePath": "./docker/cloud/cloud-preview.Dockerfile"
|
||||
# },
|
||||
# "healthCheckPath": "/api/health",
|
||||
# "numInstances": 1,
|
||||
# "openPorts": [{
|
||||
# "port": 80,
|
||||
# "protocol": "TCP"
|
||||
# }],
|
||||
# "plan": "starter",
|
||||
# "pullRequestPreviewsEnabled": "no",
|
||||
# "region": "oregon",
|
||||
# "url": "https://tooljet-cloud-pr-${{ env.PR_NUMBER }}.onrender.com"
|
||||
# }
|
||||
# }')
|
||||
|
||||
# echo "response: $RESPONSE"
|
||||
# export SERVICE_ID=$(echo $RESPONSE | jq -r '.service.id')
|
||||
# echo "SERVICE_ID=$SERVICE_ID" >> $GITHUB_ENV
|
||||
|
||||
# - name: Comment deployment URL
|
||||
# uses: actions/github-script@v5
|
||||
# with:
|
||||
# github-token: ${{secrets.GITHUB_TOKEN}}
|
||||
# script: |
|
||||
# github.rest.issues.createComment({
|
||||
# issue_number: context.issue.number,
|
||||
# owner: context.repo.owner,
|
||||
# repo: context.repo.repo,
|
||||
# body: 'Cloud Edition: \n Deployment: https://tooljet-cloud-pr-${{ env.PR_NUMBER }}.onrender.com \n Dashboard: https://dashboard.render.com/web/${{ env.SERVICE_ID }}'
|
||||
# })
|
||||
|
||||
# - uses: actions/github-script@v6
|
||||
# with:
|
||||
# script: |
|
||||
# try {
|
||||
# await github.rest.issues.removeLabel({
|
||||
# issue_number: context.issue.number,
|
||||
# owner: context.repo.owner,
|
||||
# repo: context.repo.repo,
|
||||
# name: 'create-cloud-review-app'
|
||||
# })
|
||||
# } catch (e) {
|
||||
# console.log(e)
|
||||
# }
|
||||
|
||||
# await github.rest.issues.addLabels({
|
||||
# issue_number: context.issue.number,
|
||||
# owner: context.repo.owner,
|
||||
# repo: context.repo.repo,
|
||||
# labels: ['active-cloud-review-app']
|
||||
# })
|
||||
|
||||
# destroy-cloud-review-app:
|
||||
# if: ${{ (github.event.action == 'labeled' && github.event.label.name == 'destroy-cloud-review-app') || github.event.action == 'closed' }}
|
||||
# runs-on: ubuntu-latest
|
||||
|
||||
# steps:
|
||||
# - name: Delete service
|
||||
# run: |
|
||||
# export SERVICE_ID=$(curl --request GET \
|
||||
# --url 'https://api.render.com/v1/services?name=ToolJet%20PR%20%23${{ env.PR_NUMBER }}&limit=1' \
|
||||
# --header 'accept: application/json' \
|
||||
# --header 'authorization: Bearer ${{ secrets.RENDER_API_KEY }}' | \
|
||||
# jq -r '.[0].service.id')
|
||||
|
||||
# curl --request DELETE \
|
||||
# --url https://api.render.com/v1/services/$SERVICE_ID \
|
||||
# --header 'accept: application/json' \
|
||||
# --header 'authorization: Bearer ${{ secrets.RENDER_API_KEY }}'
|
||||
|
||||
# - uses: actions/github-script@v6
|
||||
# with:
|
||||
# script: |
|
||||
# try {
|
||||
# await github.rest.issues.removeLabel({
|
||||
# issue_number: context.issue.number,
|
||||
# owner: context.repo.owner,
|
||||
# repo: context.repo.repo,
|
||||
# name: 'destroy-cloud-review-app'
|
||||
# })
|
||||
# } catch (e) {
|
||||
# console.log(e)
|
||||
# }
|
||||
|
||||
# try {
|
||||
# await github.rest.issues.removeLabel({
|
||||
# issue_number: context.issue.number,
|
||||
# owner: context.repo.owner,
|
||||
# repo: context.repo.repo,
|
||||
# name: 'suspend-cloud-review-app'
|
||||
# })
|
||||
# } catch (e) {
|
||||
# console.log(e)
|
||||
# }
|
||||
|
||||
# try {
|
||||
# await github.rest.issues.removeLabel({
|
||||
# issue_number: context.issue.number,
|
||||
# owner: context.repo.owner,
|
||||
# repo: context.repo.repo,
|
||||
# name: 'active-cloud-review-app'
|
||||
# })
|
||||
# } catch (e) {
|
||||
# console.log(e)
|
||||
# }
|
||||
|
||||
# - name: Install PostgreSQL client
|
||||
# run: |
|
||||
# sudo apt update
|
||||
# sudo apt install postgresql-client -y
|
||||
|
||||
# - name: Wait after installing PostgreSQL
|
||||
# run: sleep 25
|
||||
|
||||
# - name: Drop PostgreSQL PR databases
|
||||
# env:
|
||||
# PGHOST: ${{ secrets.RENDER_DS_PG_HOST }}
|
||||
# PGPORT: 5432
|
||||
# PGUSER: ${{ secrets.RENDER_DS_PG_USER }}
|
||||
# PGDATABASE: ${{ env.PR_NUMBER }}-cloud
|
||||
# PGTJBDATABASE: ${{ env.PR_NUMBER }}-cloud-tjdb
|
||||
# run: |
|
||||
# if PGPASSWORD=${{ secrets.RENDER_DS_PG_PASS }} psql -h $PGHOST -p $PGPORT -U $PGUSER -lqt | cut -d \| -f 1 | grep -qw $PGDATABASE; then
|
||||
# echo "Database $PGDATABASE exists, deleting..."
|
||||
# PGPASSWORD=${{ secrets.RENDER_DS_PG_PASS }} psql -h $PGHOST -p $PGPORT -U $PGUSER -d postgres -c "drop database \"$PGDATABASE\" ;"
|
||||
# else
|
||||
# echo "Database $PGDATABASE does not exist."
|
||||
# fi
|
||||
|
||||
# if PGPASSWORD=${{ secrets.RENDER_DS_PG_PASS }} psql -h $PGHOST -p $PGPORT -U $PGUSER -lqt | cut -d \| -f 1 | grep -qw $PGTJBDATABASE; then
|
||||
# echo "Database $PGTJBDATABASE exists, deleting..."
|
||||
# PGPASSWORD=${{ secrets.RENDER_DS_PG_PASS }} psql -h $PGHOST -p $PGPORT -U $PGUSER -d postgres -c "drop database \"$PGTJBDATABASE\" ;"
|
||||
# else
|
||||
# echo "Database $PGTJBDATABASE does not exist."
|
||||
# fi
|
||||
|
||||
# suspend-cloud-review-app:
|
||||
# if: ${{ github.event.action == 'labeled' && github.event.label.name == 'suspend-cloud-review-app' }}
|
||||
# runs-on: ubuntu-latest
|
||||
|
||||
# steps:
|
||||
# - name: Suspend service
|
||||
# run: |
|
||||
# export SERVICE_ID=$(curl --request GET \
|
||||
# --url 'https://api.render.com/v1/services?name=ToolJet%20PR%20%23${{ env.PR_NUMBER }}&limit=1' \
|
||||
# --header 'accept: application/json' \
|
||||
# --header 'authorization: Bearer ${{ secrets.RENDER_API_KEY }}' | \
|
||||
# jq -r '.[0].service.id')
|
||||
|
||||
# curl --request POST \
|
||||
# --url https://api.render.com/v1/services/$SERVICE_ID/suspend \
|
||||
# --header 'accept: application/json' \
|
||||
# --header 'authorization: Bearer ${{ secrets.RENDER_API_KEY }}'
|
||||
|
||||
# - uses: actions/github-script@v6
|
||||
# with:
|
||||
# script: |
|
||||
# try {
|
||||
# await github.rest.issues.removeLabel({
|
||||
# issue_number: context.issue.number,
|
||||
# owner: context.repo.owner,
|
||||
# repo: context.repo.repo,
|
||||
# name: 'active-cloud-review-app'
|
||||
# })
|
||||
# } catch (e) {
|
||||
# console.log(e)
|
||||
# }
|
||||
|
||||
# resume-cloud-review-app:
|
||||
# if: ${{ github.event.action == 'unlabeled' && github.event.label.name == 'suspend-cloud-review-app' }}
|
||||
# runs-on: ubuntu-latest
|
||||
|
||||
# steps:
|
||||
# - name: Resume service
|
||||
# run: |
|
||||
# export SERVICE_ID=$(curl --request GET \
|
||||
# --url 'https://api.render.com/v1/services?name=ToolJet%20PR%20%23${{ env.PR_NUMBER }}&limit=1' \
|
||||
# --header 'accept: application/json' \
|
||||
# --header 'authorization: Bearer ${{ secrets.RENDER_API_KEY }}' | \
|
||||
# jq -r '.[0].service.id')
|
||||
|
||||
# curl --request POST \
|
||||
# --url https://api.render.com/v1/services/$SERVICE_ID/resume \
|
||||
# --header 'accept: application/json' \
|
||||
# --header 'authorization: Bearer ${{ secrets.RENDER_API_KEY }}'
|
||||
|
||||
# - uses: actions/github-script@v6
|
||||
# with:
|
||||
# script: |
|
||||
# await github.rest.issues.addLabels({
|
||||
# issue_number: context.issue.number,
|
||||
# owner: context.repo.owner,
|
||||
# repo: context.repo.repo,
|
||||
# labels: ['active-cloud-review-app']
|
||||
# })
|
||||
|
||||
# try {
|
||||
# await github.rest.issues.removeLabel({
|
||||
# issue_number: context.issue.number,
|
||||
# owner: context.repo.owner,
|
||||
# repo: context.repo.repo,
|
||||
# name: 'suspend-cloud-review-app'
|
||||
# })
|
||||
# } catch (e) {
|
||||
# console.log(e)
|
||||
# }
|
||||
|
||||
redeploy-review-app:
|
||||
if: ${{ github.event.action == 'synchronize' || github.event.action == 'opened' }}
|
||||
runs-on: ubuntu-latest
|
||||
|
|
@ -1191,7 +820,7 @@ jobs:
|
|||
return labels.data.map(l => l.name);
|
||||
|
||||
- name: Redeploy CE review app if active
|
||||
if: contains(steps.get_labels.outputs.result, 'active-ce-review-app')
|
||||
if: contains(steps.get_labels.outputs.result, 'active-ce-review-app-old')
|
||||
id: redeploy_ce
|
||||
env:
|
||||
RENDER_API_KEY: ${{ secrets.RENDER_API_KEY }}
|
||||
|
|
@ -1213,7 +842,7 @@ jobs:
|
|||
|
||||
|
||||
- name: Redeploy EE review app if active
|
||||
if: contains(steps.get_labels.outputs.result, 'active-ee-review-app')
|
||||
if: contains(steps.get_labels.outputs.result, 'active-ee-review-app-old')
|
||||
id: redeploy_ee
|
||||
env:
|
||||
RENDER_API_KEY: ${{ secrets.RENDER_API_KEY }}
|
||||
|
|
@ -1404,3 +1033,4 @@ jobs:
|
|||
} catch (e) {
|
||||
console.log(e)
|
||||
}
|
||||
|
||||
74
.github/workflows/render-suspend-labeler.yml
vendored
|
|
@ -2,7 +2,7 @@ name: Label for stale render deploys
|
|||
on:
|
||||
workflow_dispatch:
|
||||
schedule:
|
||||
- cron: '0 0 * * *'
|
||||
- cron: '30 15 * * *'
|
||||
|
||||
permissions:
|
||||
issues: write
|
||||
|
|
@ -79,3 +79,75 @@ jobs:
|
|||
labels: ['suspend-ee-review-app']
|
||||
})
|
||||
}
|
||||
|
||||
label-stale-ee-lts-deploys:
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
pull-requests: write
|
||||
steps:
|
||||
- uses: akshaysasidrn/stale-label-fetch@v1.1
|
||||
id: stale-label
|
||||
with:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
stale-label: 'active-ee-lts-review-app'
|
||||
stale-time: '86400'
|
||||
type: 'pull_request'
|
||||
- name: Get stale numbers
|
||||
run: echo "Matched PR numbers - ${{ steps.stale-label.outputs.stale-numbers }}"
|
||||
- name: Add suspend label
|
||||
uses: actions/github-script@v6
|
||||
env:
|
||||
STALE_NUMBERS: ${{ steps.stale-label.outputs.stale-numbers }}
|
||||
with:
|
||||
github-token: ${{ secrets.TJ_BOT_PAT }}
|
||||
script: |
|
||||
if (!process.env.STALE_NUMBERS) return
|
||||
|
||||
const prNumbers = process.env.STALE_NUMBERS.split(",")
|
||||
|
||||
console.log(`Adding suspend labels for EE LTS: ${prNumbers}`)
|
||||
|
||||
for (const prNumber of prNumbers) {
|
||||
github.rest.issues.addLabels({
|
||||
issue_number: prNumber,
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
labels: ['suspend-ee-lts-review-app']
|
||||
})
|
||||
}
|
||||
|
||||
label-stale-ee-pre-release-deploys:
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
pull-requests: write
|
||||
steps:
|
||||
- uses: akshaysasidrn/stale-label-fetch@v1.1
|
||||
id: stale-label
|
||||
with:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
stale-label: 'active-ee-pre-release-review-app'
|
||||
stale-time: '86400'
|
||||
type: 'pull_request'
|
||||
- name: Get stale numbers
|
||||
run: echo "Matched PR numbers - ${{ steps.stale-label.outputs.stale-numbers }}"
|
||||
- name: Add suspend label
|
||||
uses: actions/github-script@v6
|
||||
env:
|
||||
STALE_NUMBERS: ${{ steps.stale-label.outputs.stale-numbers }}
|
||||
with:
|
||||
github-token: ${{ secrets.TJ_BOT_PAT }}
|
||||
script: |
|
||||
if (!process.env.STALE_NUMBERS) return
|
||||
|
||||
const prNumbers = process.env.STALE_NUMBERS.split(",")
|
||||
|
||||
console.log(`Adding suspend labels for EE Pre-release: ${prNumbers}`)
|
||||
|
||||
for (const prNumber of prNumbers) {
|
||||
github.rest.issues.addLabels({
|
||||
issue_number: prNumber,
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
labels: ['suspend-ee-pre-release-review-app']
|
||||
})
|
||||
}
|
||||
|
|
|
|||
53
.github/workflows/storybook-netlify-deploy.yml
vendored
Normal file
|
|
@ -0,0 +1,53 @@
|
|||
name: Deploy Storybook to Netlify
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
types: [closed]
|
||||
branches:
|
||||
- lts-3.16
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
branch:
|
||||
description: "Branch to deploy"
|
||||
required: true
|
||||
default: "lts-3.16"
|
||||
|
||||
jobs:
|
||||
deploy-storybook:
|
||||
if: github.event_name == 'workflow_dispatch' || github.event.pull_request.merged == true
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
ref: ${{ github.event_name == 'workflow_dispatch' && inputs.branch || 'lts-3.16' }}
|
||||
|
||||
- name: Set up Node.js
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: "22.15.1"
|
||||
cache: "npm"
|
||||
cache-dependency-path: frontend/package.json
|
||||
|
||||
- name: Install dependencies
|
||||
working-directory: frontend
|
||||
run: npm install
|
||||
|
||||
- name: Build Storybook
|
||||
working-directory: frontend
|
||||
run: npx storybook build
|
||||
|
||||
- name: Deploy to Netlify
|
||||
uses: nwtgck/actions-netlify@v3
|
||||
with:
|
||||
publish-dir: frontend/storybook-static
|
||||
production-branch: lts-3.16
|
||||
production-deploy: ${{ github.event_name == 'pull_request' || inputs.branch == 'lts-3.16' }}
|
||||
deploy-message: |
|
||||
Storybook deploy from ${{ github.event_name == 'workflow_dispatch' && inputs.branch || github.ref_name }} @ ${{ github.sha }}
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
enable-commit-comment: false
|
||||
env:
|
||||
NETLIFY_AUTH_TOKEN: ${{ secrets.NETLIFY_AUTH_TOKEN }}
|
||||
NETLIFY_SITE_ID: ${{ secrets.NETLIFY_STORYBOOK_SITE_ID }}
|
||||
459
.github/workflows/update-test-system.yml
vendored
Normal file
|
|
@ -0,0 +1,459 @@
|
|||
name: Update test system (LTS and pre-release)
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
branch_name:
|
||||
description: "Git branch to build from (required for deploy operations)"
|
||||
required: false
|
||||
default: "main"
|
||||
dockerfile_path:
|
||||
description: "Select Dockerfile (required for deploy operations)"
|
||||
required: false
|
||||
type: choice
|
||||
options:
|
||||
- ./docker/LTS/ee/ee-production.Dockerfile
|
||||
- ./docker/pre-release/ee/ee-production.Dockerfile
|
||||
docker_tag:
|
||||
description: "Docker tag suffix (e.g., pre-release-14). Leave blank if only managing env vars."
|
||||
required: false
|
||||
default: ""
|
||||
env_changes:
|
||||
description: "Environment changes (Format: ADD KEY=value, EDIT KEY=value, REMOVE KEY) - one per line. Leave blank if only deploying."
|
||||
required: false
|
||||
default: ""
|
||||
test_system:
|
||||
description: "Select test system"
|
||||
required: true
|
||||
type: choice
|
||||
options:
|
||||
- app-builder-3.16-lts
|
||||
- app-builder-pre-release
|
||||
- platform-3.16-lts
|
||||
- platform-pre-release
|
||||
- marketplace-3.16-lts
|
||||
- marketplace-pre-release
|
||||
- ai-3.16-lts
|
||||
- ai-pre-release
|
||||
|
||||
jobs:
|
||||
manage-environment:
|
||||
if: ${{ github.event.inputs.env_changes != '' }}
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
|
||||
- name: ✅ Check user authorization
|
||||
run: |
|
||||
allowed_users=(
|
||||
"${{ secrets.ALLOWED_USER1_TEST_SYSTEM }}"
|
||||
"${{ secrets.ALLOWED_USER2_TEST_SYSTEM }}"
|
||||
"${{ secrets.ALLOWED_USER3_TEST_SYSTEM }}"
|
||||
"${{ secrets.ALLOWED_USER4_TEST_SYSTEM }}"
|
||||
"${{ secrets.ALLOWED_USER5_TEST_SYSTEM }}"
|
||||
"${{ secrets.ALLOWED_USER6_TEST_SYSTEM }}"
|
||||
"${{ secrets.ALLOWED_USER7_TEST_SYSTEM }}"
|
||||
"${{ secrets.ALLOWED_USER8_TEST_SYSTEM }}"
|
||||
"${{ secrets.ALLOWED_USER9_TEST_SYSTEM }}"
|
||||
"${{ secrets.ALLOWED_USER10_TEST_SYSTEM }}"
|
||||
"${{ secrets.ALLOWED_USER11_TEST_SYSTEM }}"
|
||||
"${{ secrets.ALLOWED_USER12_TEST_SYSTEM }}"
|
||||
"${{ secrets.ALLOWED_USER13_TEST_SYSTEM }}"
|
||||
"${{ secrets.ALLOWED_USER14_TEST_SYSTEM }}"
|
||||
)
|
||||
current_user="${{ github.actor }}"
|
||||
authorized=false
|
||||
for user in "${allowed_users[@]}"; do
|
||||
if [[ "$current_user" == "$user" ]]; then
|
||||
authorized=true
|
||||
break
|
||||
fi
|
||||
done
|
||||
if [[ "$authorized" == "false" ]]; then
|
||||
echo "❌ User '$current_user' is not authorized to trigger this workflow."
|
||||
exit 1
|
||||
else
|
||||
echo "✅ User '$current_user' is authorized."
|
||||
fi
|
||||
|
||||
- name: Install SSH and JQ
|
||||
run: sudo apt-get update && sudo apt-get install -y jq openssh-client
|
||||
|
||||
- name: Determine target host
|
||||
id: vmhost
|
||||
run: |
|
||||
test_system="${{ github.event.inputs.test_system }}"
|
||||
vm_host=$(echo '${{ secrets.VM_HOST_MAP_JSON }}' | jq -r --arg sys "$test_system" '.[$sys]')
|
||||
if [[ -z "$vm_host" || "$vm_host" == "null" ]]; then
|
||||
echo "VM mapping not found for $test_system"
|
||||
exit 1
|
||||
fi
|
||||
echo "host=$vm_host" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Update environment variables
|
||||
run: |
|
||||
echo "$SSH_KEY" > key.pem
|
||||
chmod 600 key.pem
|
||||
TARGET_SYSTEM="${{ github.event.inputs.test_system }}"
|
||||
ENV_CHANGES="${{ github.event.inputs.env_changes }}"
|
||||
ssh -o StrictHostKeyChecking=no -o LogLevel=ERROR -i key.pem $SSH_USER@${{ steps.vmhost.outputs.host }} << EOF
|
||||
set -e
|
||||
TARGET_SYSTEM="$TARGET_SYSTEM"
|
||||
ENV_CHANGES="$ENV_CHANGES"
|
||||
cd ~
|
||||
echo "📁 Finding correct deployment directory"
|
||||
if [[ "\$TARGET_SYSTEM" == *-3.16-lts ]]; then
|
||||
echo "Detected LTS system: \$TARGET_SYSTEM"
|
||||
echo "🔍 Searching for LTS directories..."
|
||||
LTS_DIRS=\$(ls -1d ./*-lts 2>/dev/null | grep -E '[0-9]+\.[0-9]+' | sed 's|^\./||' | sort -V; \\
|
||||
ls -1d ./*-lts 2>/dev/null | grep -Ev '[0-9]+\.[0-9]+' | sed 's|^\./||' | sort)
|
||||
if [[ -z "\$LTS_DIRS" ]]; then
|
||||
echo "❌ No LTS directories found!"
|
||||
echo "Available directories:"
|
||||
ls -la | grep "^d"
|
||||
exit 1
|
||||
fi
|
||||
echo "Available LTS directories:"
|
||||
echo "\$LTS_DIRS"
|
||||
SELECTED_LTS_DIR=\$(echo "\$LTS_DIRS" | head -n 1)
|
||||
echo "📂 Selected LTS directory: \$SELECTED_LTS_DIR"
|
||||
cd "\$SELECTED_LTS_DIR"
|
||||
echo "✅ Now in directory: \$(pwd)"
|
||||
else
|
||||
echo "Detected pre-release system: \$TARGET_SYSTEM"
|
||||
echo "📂 Working in home directory: \$(pwd)"
|
||||
fi
|
||||
echo ""
|
||||
echo "🔧 PROCESSING ENVIRONMENT VARIABLE CHANGES"
|
||||
BACKUP_FILE=".env.backup.\$(date +%s)"
|
||||
sudo cp .env "\$BACKUP_FILE"
|
||||
echo "✅ Backup created: \$BACKUP_FILE"
|
||||
PROTECTED_VARS="TOOLJET_HOST|LOCKBOX_MASTER_KEY|SECRET_KEY_BASE|ORM_LOGGING|PG_DB|PG_USER|PG_HOST|PG_PASS|TOOLJET_DB|TOOLJET_DB_USER|TOOLJET_DB_HOST|TOOLJET_DB_PASS|PGRST_DB_URI|PGRST_HOST|PGRST_JWT_SECRET|PGRST_SERVER_PORT|REDIS_HOST|REDIS_PORT|REDIS_USER|REDIS_PASSWORD|OLD_IMAGE|TOOLJET_IMAGE"
|
||||
ADD_SUCCESS=0
|
||||
ADD_FAIL=0
|
||||
EDIT_SUCCESS=0
|
||||
EDIT_FAIL=0
|
||||
REMOVE_SUCCESS=0
|
||||
REMOVE_FAIL=0
|
||||
while IFS= read -r line; do
|
||||
line=\$(echo "\$line" | xargs)
|
||||
[[ -z "\$line" || "\$line" =~ ^# ]] && continue
|
||||
if [[ "\$line" =~ ^ADD[[:space:]]+([^=]+)=(.*)$ ]]; then
|
||||
KEY="\${BASH_REMATCH[1]}"
|
||||
VALUE="\${BASH_REMATCH[2]}"
|
||||
if echo "\$KEY" | grep -qE "\$PROTECTED_VARS"; then
|
||||
echo "❌ FAILED: Cannot add protected variable '\$KEY'"
|
||||
ADD_FAIL=\$((ADD_FAIL + 1))
|
||||
continue
|
||||
fi
|
||||
if grep -q "^\${KEY}=" .env; then
|
||||
echo "⚠️ SKIPPED: Variable '\$KEY' already exists (use EDIT)"
|
||||
continue
|
||||
else
|
||||
if echo "\${KEY}=\${VALUE}" | sudo tee -a .env > /dev/null; then
|
||||
echo "✅ SUCCESS: Added '\$KEY'"
|
||||
ADD_SUCCESS=\$((ADD_SUCCESS + 1))
|
||||
else
|
||||
echo "❌ FAILED: Could not add '\$KEY'"
|
||||
ADD_FAIL=\$((ADD_FAIL + 1))
|
||||
fi
|
||||
fi
|
||||
elif [[ "\$line" =~ ^EDIT[[:space:]]+([^=]+)=(.*)$ ]]; then
|
||||
KEY="\${BASH_REMATCH[1]}"
|
||||
VALUE="\${BASH_REMATCH[2]}"
|
||||
if echo "\$KEY" | grep -qE "\$PROTECTED_VARS"; then
|
||||
echo "❌ FAILED: Cannot edit protected variable '\$KEY'"
|
||||
EDIT_FAIL=\$((EDIT_FAIL + 1))
|
||||
continue
|
||||
fi
|
||||
if grep -q "^\${KEY}=" .env; then
|
||||
if sudo sed -i "s|^\${KEY}=.*|\${KEY}=\${VALUE}|" .env; then
|
||||
echo "✅ SUCCESS: Edited '\$KEY'"
|
||||
EDIT_SUCCESS=\$((EDIT_SUCCESS + 1))
|
||||
else
|
||||
echo "❌ FAILED: Could not edit '\$KEY'"
|
||||
EDIT_FAIL=\$((EDIT_FAIL + 1))
|
||||
fi
|
||||
else
|
||||
echo "⚠️ SKIPPED: Variable '\$KEY' not found (use ADD)"
|
||||
continue
|
||||
fi
|
||||
elif [[ "\$line" =~ ^REMOVE[[:space:]]+([^=[:space:]]+)$ ]]; then
|
||||
KEY="\${BASH_REMATCH[1]}"
|
||||
if echo "\$KEY" | grep -qE "\$PROTECTED_VARS"; then
|
||||
echo "❌ FAILED: Cannot remove protected variable '\$KEY'"
|
||||
REMOVE_FAIL=\$((REMOVE_FAIL + 1))
|
||||
continue
|
||||
fi
|
||||
if grep -q "^\${KEY}=" .env; then
|
||||
if sudo sed -i "/^\${KEY}=/d" .env; then
|
||||
echo "✅ SUCCESS: Removed '\$KEY'"
|
||||
REMOVE_SUCCESS=\$((REMOVE_SUCCESS + 1))
|
||||
else
|
||||
echo "❌ FAILED: Could not remove '\$KEY'"
|
||||
REMOVE_FAIL=\$((REMOVE_FAIL + 1))
|
||||
fi
|
||||
else
|
||||
echo "⚠️ SKIPPED: Variable '\$KEY' not found"
|
||||
continue
|
||||
fi
|
||||
else
|
||||
echo "⚠️ INVALID FORMAT: \$line"
|
||||
fi
|
||||
done <<< "\$ENV_CHANGES"
|
||||
echo ""
|
||||
echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━"
|
||||
echo "📊 SUMMARY"
|
||||
echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━"
|
||||
echo "ADD: ✅ \$ADD_SUCCESS succeeded | ❌ \$ADD_FAIL failed"
|
||||
echo "EDIT: ✅ \$EDIT_SUCCESS succeeded | ❌ \$EDIT_FAIL failed"
|
||||
echo "REMOVE: ✅ \$REMOVE_SUCCESS succeeded | ❌ \$REMOVE_FAIL failed"
|
||||
echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━"
|
||||
TOTAL_SUCCESS=\$((ADD_SUCCESS + EDIT_SUCCESS + REMOVE_SUCCESS))
|
||||
TOTAL_FAIL=\$((ADD_FAIL + EDIT_FAIL + REMOVE_FAIL))
|
||||
if [ \$TOTAL_SUCCESS -eq 0 ]; then
|
||||
echo "⚠️ No changes were applied"
|
||||
sudo cp "\$BACKUP_FILE" .env
|
||||
exit 1
|
||||
fi
|
||||
echo ""
|
||||
echo "🔄 Restarting containers..."
|
||||
sudo docker-compose down
|
||||
sudo docker-compose up -d
|
||||
echo "⏳ Waiting for containers (timeout: 120s)..."
|
||||
SUCCESS_FOUND=false
|
||||
TIMEOUT=120
|
||||
ELAPSED=0
|
||||
while [ \$ELAPSED -lt \$TIMEOUT ]; do
|
||||
if sudo docker-compose logs 2>/dev/null | grep -qE "🚀 TOOLJET APPLICATION STARTED SUCCESSFULLY|Ready to use at http://localhost:82 🚀|Ready to use at http://localhost:80"; then
|
||||
SUCCESS_FOUND=true
|
||||
break
|
||||
fi
|
||||
sleep 10
|
||||
ELAPSED=\$((ELAPSED + 10))
|
||||
done
|
||||
if [ "\$SUCCESS_FOUND" = false ]; then
|
||||
echo "❌ Container startup failed"
|
||||
echo "🔄 Rolling back..."
|
||||
sudo cp "\$BACKUP_FILE" .env
|
||||
sudo docker-compose down
|
||||
sudo docker-compose up -d
|
||||
echo "✅ Rollback completed"
|
||||
exit 1
|
||||
fi
|
||||
echo "✅ Environment variables updated successfully!"
|
||||
echo "🧹 Cleaning up old backups..."
|
||||
ls -t .env.backup.* 2>/dev/null | tail -n +2 | xargs -r sudo rm -f
|
||||
EOF
|
||||
env:
|
||||
SSH_USER: ${{ secrets.AZURE_VM_USER }}
|
||||
SSH_KEY: ${{ secrets.AZURE_VM_KEY }}
|
||||
|
||||
build-and-deploy:
|
||||
if: ${{ !cancelled() && github.event.inputs.docker_tag != '' }}
|
||||
needs: manage-environment
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Validate required inputs
|
||||
run: |
|
||||
if [[ -z "${{ github.event.inputs.branch_name }}" ]]; then
|
||||
echo "❌ Error: branch_name is required"
|
||||
exit 1
|
||||
fi
|
||||
if [[ -z "${{ github.event.inputs.dockerfile_path }}" ]]; then
|
||||
echo "❌ Error: dockerfile_path is required"
|
||||
exit 1
|
||||
fi
|
||||
- name: Free up disk space
|
||||
run: |
|
||||
sudo rm -rf /usr/share/dotnet
|
||||
sudo rm -rf /opt/ghc
|
||||
sudo rm -rf /usr/local/share/boost
|
||||
sudo rm -rf "$AGENT_TOOLSDIRECTORY"
|
||||
sudo docker system prune -af
|
||||
sudo apt-get clean
|
||||
df -h
|
||||
- name: ✅ Check user authorization
|
||||
run: |
|
||||
allowed_users=(
|
||||
"${{ secrets.ALLOWED_USER1_TEST_SYSTEM }}"
|
||||
"${{ secrets.ALLOWED_USER2_TEST_SYSTEM }}"
|
||||
"${{ secrets.ALLOWED_USER3_TEST_SYSTEM }}"
|
||||
"${{ secrets.ALLOWED_USER4_TEST_SYSTEM }}"
|
||||
"${{ secrets.ALLOWED_USER5_TEST_SYSTEM }}"
|
||||
"${{ secrets.ALLOWED_USER6_TEST_SYSTEM }}"
|
||||
"${{ secrets.ALLOWED_USER7_TEST_SYSTEM }}"
|
||||
"${{ secrets.ALLOWED_USER8_TEST_SYSTEM }}"
|
||||
"${{ secrets.ALLOWED_USER9_TEST_SYSTEM }}"
|
||||
"${{ secrets.ALLOWED_USER10_TEST_SYSTEM }}"
|
||||
"${{ secrets.ALLOWED_USER11_TEST_SYSTEM }}"
|
||||
"${{ secrets.ALLOWED_USER12_TEST_SYSTEM }}"
|
||||
"${{ secrets.ALLOWED_USER13_TEST_SYSTEM }}"
|
||||
"${{ secrets.ALLOWED_USER14_TEST_SYSTEM }}"
|
||||
)
|
||||
current_user="${{ github.actor }}"
|
||||
authorized=false
|
||||
for user in "${allowed_users[@]}"; do
|
||||
if [[ "$current_user" == "$user" ]]; then
|
||||
authorized=true
|
||||
break
|
||||
fi
|
||||
done
|
||||
if [[ "$authorized" == "false" ]]; then
|
||||
echo "❌ User '$current_user' is not authorized to trigger this workflow."
|
||||
exit 1
|
||||
else
|
||||
echo "✅ User '$current_user' is authorized."
|
||||
fi
|
||||
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
ref: ${{ github.event.inputs.branch_name }}
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
|
||||
- name: Log in to DockerHub
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
username: ${{ secrets.DOCKER_USERNAME }}
|
||||
password: ${{ secrets.DOCKER_PAT }}
|
||||
|
||||
- name: Generate full Docker tag
|
||||
id: taggen
|
||||
run: |
|
||||
input_tag="${{ github.event.inputs.docker_tag }}"
|
||||
if [[ "$input_tag" == *"/"* ]]; then
|
||||
echo "tag=$input_tag" >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo "tag=tooljet/tj-osv:$input_tag" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
- name: Build and Push Docker image
|
||||
uses: docker/build-push-action@v4
|
||||
with:
|
||||
context: .
|
||||
file: ${{ github.event.inputs.dockerfile_path }}
|
||||
push: true
|
||||
tags: ${{ steps.taggen.outputs.tag }}
|
||||
platforms: linux/amd64
|
||||
build-args: |
|
||||
CUSTOM_GITHUB_TOKEN=${{ secrets.CUSTOM_GITHUB_TOKEN }}
|
||||
BRANCH_NAME=${{ github.event.inputs.branch_name }}
|
||||
|
||||
- name: Show the full Docker tag
|
||||
run: echo "✅ Docker image built and pushed:${{ steps.taggen.outputs.tag }}"
|
||||
|
||||
- name: Install SSH and JQ
|
||||
run: sudo apt-get update && sudo apt-get install -y jq openssh-client
|
||||
|
||||
- name: Determine target host
|
||||
id: vmhost
|
||||
run: |
|
||||
test_system="${{ github.event.inputs.test_system }}"
|
||||
vm_host=$(echo '${{ secrets.VM_HOST_MAP_JSON }}' | jq -r --arg sys "$test_system" '.[$sys]')
|
||||
if [[ -z "$vm_host" || "$vm_host" == "null" ]]; then
|
||||
echo "VM mapping not found for $test_system"
|
||||
exit 1
|
||||
fi
|
||||
echo "host=$vm_host" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Deploy to target environment
|
||||
run: |
|
||||
echo "$SSH_KEY" > key.pem
|
||||
chmod 600 key.pem
|
||||
IMAGE_TAG="${{ steps.taggen.outputs.tag }}"
|
||||
TARGET_SYSTEM="${{ github.event.inputs.test_system }}"
|
||||
ssh -o StrictHostKeyChecking=no -o LogLevel=ERROR -i key.pem $SSH_USER@${{ steps.vmhost.outputs.host }} << EOF
|
||||
set -e
|
||||
IMAGE_TAG="$IMAGE_TAG"
|
||||
TARGET_SYSTEM="$TARGET_SYSTEM"
|
||||
cd ~
|
||||
echo "📁 Finding correct deployment directory"
|
||||
if [[ "\$TARGET_SYSTEM" == *-3.16-lts ]]; then
|
||||
echo "Detected LTS system: \$TARGET_SYSTEM"
|
||||
echo "🔍 Searching for LTS directories..."
|
||||
LTS_DIRS=\$(ls -1d ./*-lts 2>/dev/null | grep -E '[0-9]+\.[0-9]+' | sed 's|^\./||' | sort -V; \\
|
||||
ls -1d ./*-lts 2>/dev/null | grep -Ev '[0-9]+\.[0-9]+' | sed 's|^\./||' | sort)
|
||||
if [[ -z "\$LTS_DIRS" ]]; then
|
||||
echo "❌ No LTS directories found!"
|
||||
echo "Available directories:"
|
||||
ls -la | grep "^d"
|
||||
exit 1
|
||||
fi
|
||||
echo "Available LTS directories:"
|
||||
echo "\$LTS_DIRS"
|
||||
SELECTED_LTS_DIR=\$(echo "\$LTS_DIRS" | head -n 1)
|
||||
echo "📂 Selected LTS directory: \$SELECTED_LTS_DIR"
|
||||
cd "\$SELECTED_LTS_DIR"
|
||||
echo "✅ Now in directory: \$(pwd)"
|
||||
else
|
||||
echo "Detected pre-release system: \$TARGET_SYSTEM"
|
||||
echo "📂 Moving to target directory: \$TARGET_SYSTEM"
|
||||
cd ~
|
||||
echo "✅ Now in directory: \$(pwd)"
|
||||
fi
|
||||
echo "🔐 Docker login"
|
||||
echo "${{ secrets.DOCKER_PAT }}" | sudo docker login --username "${{ secrets.DOCKER_USERNAME }}" --password-stdin
|
||||
echo "current image"
|
||||
cat .env | grep TOOLJET_IMAGE
|
||||
echo "📦 Reading current TOOLJET_IMAGE from .env"
|
||||
CURRENT_IMAGE=\$(grep '^TOOLJET_IMAGE=' .env | cut -d '=' -f2- | tr -d '"' | tr -d "'")
|
||||
echo "Found CURRENT_IMAGE: \$CURRENT_IMAGE"
|
||||
echo "🛑 Stopping containers"
|
||||
sudo docker-compose down
|
||||
echo "📝 Updating .env with new image"
|
||||
sudo sed -i "s|^TOOLJET_IMAGE=.*|TOOLJET_IMAGE=\$IMAGE_TAG|" .env
|
||||
echo "📥 Pulling new image: \$IMAGE_TAG"
|
||||
if [ -z "\$IMAGE_TAG" ]; then
|
||||
echo "❌ IMAGE_TAG is empty!"
|
||||
exit 1
|
||||
fi
|
||||
sudo docker pull "\$IMAGE_TAG"
|
||||
echo "🚀 Starting container in background"
|
||||
sudo docker-compose up -d
|
||||
echo "⏳ Waiting for ToolJet to start (timeout: 300 seconds)..."
|
||||
SUCCESS_FOUND=false
|
||||
TIMEOUT=300
|
||||
ELAPSED=0
|
||||
while [ \$ELAPSED -lt \$TIMEOUT ]; do
|
||||
if sudo docker-compose logs 2>/dev/null | grep -qE "🚀 TOOLJET APPLICATION STARTED SUCCESSFULLY|Ready to use at http://localhost:82 🚀|Ready to use at http://localhost:80"; then
|
||||
echo "✅ Found success message in logs!"
|
||||
SUCCESS_FOUND=true
|
||||
break
|
||||
fi
|
||||
echo "⏳ Still waiting... (\${ELAPSED}s elapsed)"
|
||||
sleep 10
|
||||
ELAPSED=\$((ELAPSED + 10))
|
||||
done
|
||||
if [ "\$SUCCESS_FOUND" = false ]; then
|
||||
echo "❌ Timeout reached without finding success logs"
|
||||
echo "📄 Showing current logs for troubleshooting..."
|
||||
sudo docker-compose logs --tail=50
|
||||
echo ""
|
||||
echo "=== CONTAINER STATUS ==="
|
||||
sudo docker-compose ps
|
||||
echo ""
|
||||
echo "🛑 Starting rollback process..."
|
||||
sudo docker-compose down
|
||||
echo "🔄 Reverting to previous image: \$CURRENT_IMAGE"
|
||||
sudo sed -i "s|^TOOLJET_IMAGE=.*|TOOLJET_IMAGE=\$CURRENT_IMAGE|" .env
|
||||
echo "🔄 Starting previous image..."
|
||||
sudo docker-compose up -d
|
||||
echo "✅ Rollback completed!"
|
||||
exit 1
|
||||
fi
|
||||
echo "✅ Deployment successful!"
|
||||
echo "📌 Storing successful deployment info in .env"
|
||||
sudo sed -i "/^OLD_IMAGE=/d" .env
|
||||
echo "OLD_IMAGE=\$CURRENT_IMAGE" | sudo tee -a .env
|
||||
echo "📄 Final application logs:"
|
||||
sudo docker-compose logs --tail=50
|
||||
echo "🧹 Pruning old Docker images"
|
||||
sudo docker image prune -a -f
|
||||
EOF
|
||||
env:
|
||||
SSH_USER: ${{ secrets.AZURE_VM_USER }}
|
||||
SSH_KEY: ${{ secrets.AZURE_VM_KEY }}
|
||||
|
||||
19
.github/workflows/updating-dockertag.yml
vendored
|
|
@ -3,7 +3,7 @@ name: Update LTS Table
|
|||
on:
|
||||
workflow_dispatch: # manually triggered
|
||||
schedule:
|
||||
- cron: '30 5 * * 1,4'
|
||||
- cron: '0 9 * * 1,3,5' # 9am UTC — Monday, Wednesday, Friday
|
||||
|
||||
jobs:
|
||||
update-lts:
|
||||
|
|
@ -14,7 +14,7 @@ jobs:
|
|||
- name: ⬇️ Checkout repo
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
ref: develop
|
||||
ref: documentation
|
||||
|
||||
- name: 🛠 Setup Git
|
||||
run: |
|
||||
|
|
@ -29,7 +29,19 @@ jobs:
|
|||
- name: 🧠 Run regenerate_lts_table.sh
|
||||
run: bash ./docs/regenerate_lts_table.sh
|
||||
|
||||
- name: 🔍 Check for changes
|
||||
id: changes
|
||||
run: |
|
||||
if git diff --quiet; then
|
||||
echo "changed=false" >> $GITHUB_OUTPUT
|
||||
echo "ℹ️ No new patch found — skipping PR."
|
||||
else
|
||||
echo "changed=true" >> $GITHUB_OUTPUT
|
||||
echo "✅ New patch detected — creating PR."
|
||||
fi
|
||||
|
||||
- name: 📦 Create Pull Request
|
||||
if: steps.changes.outputs.changed == 'true'
|
||||
id: cpr
|
||||
uses: peter-evans/create-pull-request@v6
|
||||
with:
|
||||
|
|
@ -37,9 +49,10 @@ jobs:
|
|||
branch: auto/update-lts-${{ github.run_id }}
|
||||
title: "docs: update LTS version table"
|
||||
body: "Automated update of the LTS version table from DockerHub."
|
||||
base: develop
|
||||
base: documentation
|
||||
|
||||
- name: 🤖 Auto-merge the PR (fallback if needed)
|
||||
if: steps.changes.outputs.changed == 'true'
|
||||
run: |
|
||||
echo "ℹ️ Attempting auto-merge of PR #${PR_NUMBER}..."
|
||||
gh pr merge --squash --auto "$PR_NUMBER" --repo ToolJet/ToolJet || {
|
||||
|
|
|
|||
1456
.github/workflows/vulnerability-ci.yml
vendored
100
README.md
|
|
@ -1,8 +1,8 @@
|
|||
ToolJet is an **open-source low-code framework** to build and deploy internal tools with minimal engineering effort. ToolJet's drag-and-drop frontend builder allows you to create complex, responsive frontends within minutes. Additionally, you can integrate various data sources, including databases like PostgreSQL, MongoDB, and Elasticsearch; API endpoints with OpenAPI spec and OAuth2 support; SaaS tools such as Stripe, Slack, Google Sheets, Airtable, and Notion; as well as object storage services like S3, GCS, and Minio, to fetch and write data.
|
||||
ToolJet is the open-source foundation of ToolJet AI - the AI-native platform for building and deploying internal tools, workflows and AI agents. The community edition provides a powerful visual builder, drag-and-drop UI, and integrations with databases, APIs, SaaS apps, and object storage. For AI-powered UI generation, query building, debugging, and enterprise features, see ToolJet AI.
|
||||
|
||||
:star: If you find ToolJet useful, please consider giving us a star on GitHub! Your support helps us continue to innovate and deliver exciting features.
|
||||
|
||||

|
||||

|
||||

|
||||
[](https://github.com/ToolJet/ToolJet/issues)
|
||||
[](https://github.com/ToolJet/ToolJet/stargazers)
|
||||
|
|
@ -14,39 +14,45 @@ ToolJet is an **open-source low-code framework** to build and deploy internal to
|
|||
[](https://twitter.com/ToolJet)
|
||||
|
||||
<p align="center">
|
||||
<img src="https://user-images.githubusercontent.com/7828962/211444352-4d6d2e4a-13c9-4980-9e16-4aed4af9811b.png" alt="Tooljet dashboard showing inventory and orders"/>
|
||||
<img src="docs/static/img/readme/banner.png" alt="Tooljet dashboard showing inventory and orders"/>
|
||||
</p>
|
||||
|
||||
<p align="center">
|
||||
<img src="https://github.com/ToolJet/ToolJet/assets/25361949/0e711f3a-edb7-496b-8833-107de3826933"/>
|
||||
<img src="docs/static/img/readme/flowchart.png"/>
|
||||
</p>
|
||||
|
||||
## All features
|
||||
## Features
|
||||
|
||||
- **Visual App Builder:** 45+ built-in responsive components, including Tables, Charts, Lists, Forms, and Progress Bars.
|
||||
- **ToolJet Database:** Built-in no-code database.
|
||||
- **Multi-Page:** Build an application with multiple pages.
|
||||
- **Multiplayer editing:** Allows simultaneous app building by multiple developers.
|
||||
- **50+ data sources:** Integrate with external databases, cloud storage, and APIs.
|
||||
- **Desktop & mobile:** Customize layout widths to fit various screen sizes.
|
||||
- **Self-host:** Supports Docker, Kubernetes, AWS EC2, Google Cloud Run, and more.
|
||||
- **Collaborate:** Add comments anywhere on the canvas and tag your team members.
|
||||
- **Extend with plugins:** Use our [command-line tool](https://www.npmjs.com/package/@tooljet/cli) to easily bootstrap new connectors.
|
||||
- **Version control:** Manage multiple application versions with a structured release cycle.
|
||||
- **Run JS & Python code:** Execute custom JavaScript and Python snippets.
|
||||
- **Granular access control:** Set permissions at both group and app levels.
|
||||
- **Low-code:** Use JS code almost anywhere within the builder, such as setting text color based on status with
|
||||
`status === 'success' ? 'green' : 'red'`.
|
||||
- **No-code query editors:** Query Editors are available for all supported data sources.
|
||||
- **Join and transform data:** Transform query results using JavaScript or Python code.
|
||||
- **Secure:** All the credentials are securely encrypted using `aes-256-gcm`.
|
||||
- **Data Privacy:** ToolJet serves solely as a proxy and does not store data.
|
||||
- **SSO:** Supports multiple Single Sign-On providers.
|
||||
### Community Edition (CE)
|
||||
- **Visual App Builder:** 60+ responsive components (Tables, Charts, Forms, Lists, Progress Bars, and more).
|
||||
- **ToolJet Database:** Built-in no-code database.
|
||||
- **Multi-page Apps & Multiplayer Editing:** Build complex apps collaboratively.
|
||||
- **80+ Data Sources:** Connect to databases, APIs, cloud storage, and SaaS tools.
|
||||
- **Flexible Deployment:** Self-host with Docker, Kubernetes, AWS, GCP, Azure, and more.
|
||||
- **Collaboration Tools:** Inline comments, mentions, and granular access control.
|
||||
- **Extensibility:** Create plugins and connectors with the [ToolJet CLI](https://www.npmjs.com/package/@tooljet/cli).
|
||||
- **Code Anywhere:** Run JavaScript and Python inside your apps.
|
||||
- **Secure by Design:** AES-256-GCM encryption, proxy-only data flow, SSO support.
|
||||
|
||||
### ToolJet AI (Enterprise)
|
||||
Everything in CE, plus:
|
||||
- **AI App Generation:** Create apps instantly from natural language prompts.
|
||||
- **AI Query Builder:** Generate and transform queries with AI assistance.
|
||||
- **AI Debugging:** Identify and fix issues with one click.
|
||||
- **Agent Builder:** Create intelligent agents to automate workflows and orchestrate processes.
|
||||
- **Enterprise-grade Security & Compliance:** SOC 2 and GDPR readiness, audit logs, and advanced access control.
|
||||
- **User Management:** Role-based access (RBAC), custom groups, and granular app/data permissions.
|
||||
- **Multi-environment Management:** Seamless dev/stage/prod environments.
|
||||
- **GitSync & CI/CD:** Integrate with GitHub/GitLab for version control and streamlined deployments.
|
||||
- **Branding & Customization:** White-labeling, and custom theming for organizational branding.
|
||||
- **Fine-Grained Access Control:** Secure data and actions at the row, component, page, and query levels.
|
||||
- **Embedded Apps:** Embed ToolJet apps securely within other applications or portals.
|
||||
- **Enterprise Support:** SLAs, priority bug fixes, and onboarding assistance.
|
||||
|
||||
<hr>
|
||||
|
||||
## Quickstart
|
||||
The easiest way to get started with ToolJet is by creating a [ToolJet Cloud](https://tooljet.ai) account. ToolJet Cloud offers a hosted solution of ToolJet. If you want to self-host ToolJet, kindly proceed to [deployment documentation](https://docs.tooljet.ai/docs/setup/).
|
||||
The easiest way to get started with ToolJet is by creating a [ToolJet Cloud](https://tooljet.com) account. ToolJet Cloud offers a hosted solution of ToolJet. If you want to self-host ToolJet, kindly proceed to [deployment documentation](https://docs.tooljet.com/docs/setup/).
|
||||
|
||||
### Try using Docker
|
||||
Want to give ToolJet a quick spin on your local machine? You can run the following command from your terminal to have ToolJet up and running right away.
|
||||
|
|
@ -66,35 +72,35 @@ docker run \
|
|||
|
||||
## Tutorials and examples
|
||||
|
||||
[Time Tracker Application](https://docs.tooljet.ai/docs/#quickstart-guide)<br>
|
||||
[Build your own CMS using low-code](https://blog.tooljet.ai/build-cms-using-lowcode-and-mongodb/)<br>
|
||||
[AWS S3 Browser](https://blog.tooljet.ai/build-an-aws-s3-broswer-with-tooljet/)<br>
|
||||
[Time Tracker Application](https://docs.tooljet.com/docs/#quickstart-guide)<br>
|
||||
[Build your own CMS using low-code](https://blog.tooljet.com/build-cms-using-lowcode-and-mongodb/)<br>
|
||||
[AWS S3 Browser](https://blog.tooljet.com/build-an-aws-s3-broswer-with-tooljet/)<br>
|
||||
|
||||
## Documentation
|
||||
Documentation is available at https://docs.tooljet.ai.
|
||||
Documentation is available at https://docs.tooljet.com.
|
||||
|
||||
- [Getting Started](https://docs.tooljet.ai)<br>
|
||||
- [Data source Reference](https://docs.tooljet.ai/docs/data-sources/airtable/)<br>
|
||||
- [Component Reference](https://docs.tooljet.ai/docs/widgets/button)
|
||||
- [Getting Started](https://docs.tooljet.com)<br>
|
||||
- [Data source Reference](https://docs.tooljet.com/docs/data-sources/airtable/)<br>
|
||||
- [Component Reference](https://docs.tooljet.com/docs/widgets/button)
|
||||
|
||||
## Self-hosted
|
||||
You can use ToolJet Cloud for a fully managed solution. If you want to self-host ToolJet, we have guides on deploying ToolJet on Kubernetes, AWS EC2, Docker, and more.
|
||||
|
||||
| Provider | Documentation |
|
||||
| :------------- | :------------- |
|
||||
| Digital Ocean | [Link](https://docs.tooljet.ai/docs/setup/digitalocean) |
|
||||
| Docker | [Link](https://docs.tooljet.ai/docs/setup/docker) |
|
||||
| AWS EC2 | [Link](https://docs.tooljet.ai/docs/setup/ec2) |
|
||||
| AWS ECS | [Link](https://docs.tooljet.ai/docs/setup/ecs) |
|
||||
| OpenShift | [Link](https://docs.tooljet.ai/docs/setup/openshift) |
|
||||
| Helm | [Link](https://docs.tooljet.ai/docs/setup/helm) |
|
||||
| AWS EKS (Kubernetes) | [Link](https://docs.tooljet.ai/docs/setup/kubernetes) |
|
||||
| GCP GKE (Kubernetes) | [Link](https://docs.tooljet.ai/docs/setup/kubernetes-gke) |
|
||||
| Azure AKS (Kubernetes) | [Link](https://docs.tooljet.ai/docs/setup/kubernetes-aks) |
|
||||
| Azure Container | [Link](https://docs.tooljet.ai/docs/setup/azure-container) |
|
||||
| Google Cloud Run | [Link](https://docs.tooljet.ai/docs/setup/google-cloud-run) |
|
||||
| Deploying ToolJet client | [Link](https://docs.tooljet.ai/docs/setup/client) |
|
||||
| Deploying ToolJet on a Subpath | [Link](https://docs.tooljet.ai/docs/setup/tooljet-subpath/) |
|
||||
| Digital Ocean | [Link](https://docs.tooljet.com/docs/setup/digitalocean) |
|
||||
| Docker | [Link](https://docs.tooljet.com/docs/setup/docker) |
|
||||
| AWS EC2 | [Link](https://docs.tooljet.com/docs/setup/ec2) |
|
||||
| AWS ECS | [Link](https://docs.tooljet.com/docs/setup/ecs) |
|
||||
| OpenShift | [Link](https://docs.tooljet.com/docs/setup/openshift) |
|
||||
| Helm | [Link](https://docs.tooljet.com/docs/setup/helm) |
|
||||
| AWS EKS (Kubernetes) | [Link](https://docs.tooljet.com/docs/setup/kubernetes) |
|
||||
| GCP GKE (Kubernetes) | [Link](https://docs.tooljet.com/docs/setup/kubernetes-gke) |
|
||||
| Azure AKS (Kubernetes) | [Link](https://docs.tooljet.com/docs/setup/kubernetes-aks) |
|
||||
| Azure Container | [Link](https://docs.tooljet.com/docs/setup/azure-container) |
|
||||
| Google Cloud Run | [Link](https://docs.tooljet.com/docs/setup/google-cloud-run) |
|
||||
| Deploying ToolJet client | [Link](https://docs.tooljet.com/docs/setup/client) |
|
||||
| Deploying ToolJet on a Subpath | [Link](https://docs.tooljet.com/docs/setup/tooljet-subpath/) |
|
||||
|
||||
## Marketplace
|
||||
ToolJet can now be found on both AWS and Azure Marketplaces, making it simpler than ever to access and deploy our app-building platform.
|
||||
|
|
@ -102,9 +108,9 @@ ToolJet can now be found on both AWS and Azure Marketplaces, making it simpler t
|
|||
Find ToolJet on AWS Marketplace [here](https://aws.amazon.com/marketplace/pp/prodview-fxjto27jkpqfg?sr=0-1&ref_=beagle&applicationId=AWSMPContessa) and explore seamless integration on Azure Marketplace [here](https://azuremarketplace.microsoft.com/en-us/marketplace/apps/tooljetsolutioninc1679496832216.tooljet?tab=Overview).
|
||||
|
||||
## Community support
|
||||
For general help using ToolJet, please refer to the official [documentation](https://docs.tooljet.ai/docs/). For additional help, you can use one of these channels to ask a question:
|
||||
For general help using ToolJet, please refer to the official [documentation](https://docs.tooljet.com/docs/). For additional help, you can use one of these channels to ask a question:
|
||||
|
||||
- [Slack](https://tooljet.ai/slack) - Discussions with the community and the team.
|
||||
- [Slack](https://tooljet.com/slack) - Discussions with the community and the team.
|
||||
- [GitHub](https://github.com/ToolJet/ToolJet/issues) - For bug reports and feature requests.
|
||||
- [𝕏 (Twitter)](https://twitter.com/ToolJet) - Get the product updates quickly.
|
||||
|
||||
|
|
|
|||
4
cypress-tests/.gitignore
vendored
|
|
@ -5,4 +5,6 @@
|
|||
/cypress/downloads
|
||||
/cypress/videos
|
||||
/coverage
|
||||
/.nyc_output
|
||||
/.nyc_output
|
||||
/.claude
|
||||
/.webpack_cache
|
||||
49
cypress-tests/cypress-gitsync.config.js
Normal file
|
|
@ -0,0 +1,49 @@
|
|||
const { defineConfig } = require("cypress");
|
||||
|
||||
module.exports = defineConfig({
|
||||
execTimeout: 1800000,
|
||||
defaultCommandTimeout: 30000,
|
||||
requestTimeout: 30000,
|
||||
pageLoadTimeout: 30000,
|
||||
responseTimeout: 30000,
|
||||
viewportWidth: 1440,
|
||||
viewportHeight: 960,
|
||||
chromeWebSecurity: false,
|
||||
projectId: "sk3oji",
|
||||
|
||||
e2e: {
|
||||
setupNodeEvents(on, config) {
|
||||
require("./cypress/config/tasks")(on);
|
||||
require("./cypress/config/browserConfig")(on);
|
||||
|
||||
return require("./cypress/plugins/index.js")(on, config);
|
||||
},
|
||||
|
||||
baseUrl: "http://localhost:3000", // Default for local development (GitHub workflow overrides this)
|
||||
specPattern: [
|
||||
"cypress/e2e/happyPath/platform/firstUser/firstUserOnboarding.cy.js",
|
||||
"cypress/e2e/happyPath/platform/eeTestcases/licensing/updateLicense.cy.js",
|
||||
"cypress/e2e/happyPath/platform/eeTestcases/gitSync/**/*.cy.js",
|
||||
],
|
||||
|
||||
testIsolation: true,
|
||||
redirectionLimit: 10,
|
||||
|
||||
numTestsKeptInMemory: 0,
|
||||
experimentalMemoryManagement: true,
|
||||
|
||||
experimentalRunAllSpecs: true,
|
||||
experimentalModifyObstructiveThirdPartyCode: true,
|
||||
experimentalOriginDependencies: true,
|
||||
|
||||
downloadsFolder: "cypress/downloads",
|
||||
trashAssetsBeforeRuns: true,
|
||||
video: false,
|
||||
videoUploadOnPasses: false,
|
||||
screenshotOnRunFailure: true,
|
||||
screenshotsFolder: "cypress/screenshots",
|
||||
|
||||
coverage: false,
|
||||
codeCoverageTasksRegistered: false,
|
||||
},
|
||||
});
|
||||
|
|
@ -1,75 +0,0 @@
|
|||
# Create .env from this example file and replace values for the environment.
|
||||
# The application expects a separate .env.test for test environment configuration
|
||||
# Get detailed information about each variable here: https://docs.tooljet.com/docs/setup/env-vars
|
||||
|
||||
TOOLJET_HOST=http://localhost:8082
|
||||
LOCKBOX_MASTER_KEY= # replace_with_lockbox_master_key
|
||||
SECRET_KEY_BASE= # replace_with_secret_key_base
|
||||
|
||||
# DATABASE CONFIG
|
||||
ORM_LOGGING=all
|
||||
PG_DB=tooljet_production
|
||||
PG_USER=postgres
|
||||
PG_HOST=postgresql
|
||||
PG_PASS= # postgres database password
|
||||
|
||||
# The above postgres values is set to its default state. If necessary, kindly modify it according to your personal preference.
|
||||
|
||||
# TOOLJET DATABASE
|
||||
TOOLJET_DB=tooljet_db
|
||||
TOOLJET_DB_USER=postgres
|
||||
TOOLJET_DB_HOST=postgresql
|
||||
TOOLJET_DB_PASS=
|
||||
|
||||
PGRST_DB_URI= # postgres://<postgres_username>:<postgres_password><@postgres_hostname>/<database_name>
|
||||
PGRST_HOST=postgrest
|
||||
PGRST_JWT_SECRET= # If you have openssl installed, you can run the following command openssl rand -hex 32 to generate the value for PGRST_JWT_SECRET.
|
||||
|
||||
# Checks every 24 hours to see if a new version of ToolJet is available
|
||||
# (Enabled by default. Set false to disable)
|
||||
CHECK_FOR_UPDATES=true
|
||||
|
||||
# Checks every 24 hours to update app telemetry data to ToolJet hub.
|
||||
# (Telemetry is enabled by default. Set value to true to disable.)
|
||||
# DISABLE_TOOLJET_TELEMETRY=false
|
||||
|
||||
GOOGLE_CLIENT_ID=
|
||||
GOOGLE_CLIENT_SECRET=
|
||||
|
||||
# EMAIL CONFIGURATION
|
||||
DEFAULT_FROM_EMAIL=hello@tooljet.io
|
||||
SMTP_USERNAME=
|
||||
SMTP_PASSWORD=
|
||||
SMTP_DOMAIN=
|
||||
SMTP_PORT=
|
||||
|
||||
# DISABLE USER SIGNUPS (true or false). only applicable if Multi-Workspace feature is enabled
|
||||
DISABLE_SIGNUPS=
|
||||
|
||||
|
||||
# OBSERVABILITY
|
||||
APM_VENDOR=
|
||||
SENTRY_DNS=
|
||||
SENTRY_DEBUG=
|
||||
|
||||
# FEATURE TOGGLE
|
||||
COMMENT_FEATURE_ENABLE=
|
||||
ENABLE_MULTIPLAYER_EDITING=true
|
||||
|
||||
|
||||
# SSO (Applicable only for Multi-Workspace)
|
||||
SSO_GOOGLE_OAUTH2_CLIENT_ID=
|
||||
SSO_GIT_OAUTH2_CLIENT_ID=
|
||||
SSO_GIT_OAUTH2_CLIENT_SECRET=
|
||||
SSO_GIT_OAUTH2_HOST=
|
||||
SSO_ACCEPTED_DOMAINS=
|
||||
SSO_DISABLE_SIGNUPS=
|
||||
|
||||
#ONBOARDING
|
||||
ENABLE_ONBOARDING_QUESTIONS_FOR_ALL_SIGN_UPS=
|
||||
|
||||
#session expiry in minutes
|
||||
USER_SESSION_EXPIRY=2880
|
||||
|
||||
#TELEMETRY
|
||||
DEPLOYMENT_PLATFORM=docker
|
||||
|
|
@ -1,51 +0,0 @@
|
|||
version: "3"
|
||||
|
||||
services:
|
||||
tooljet:
|
||||
tty: true
|
||||
stdin_open: true
|
||||
container_name: Tooljet-app
|
||||
image: tooljet/tooljet-ce:latest
|
||||
restart: always
|
||||
env_file: .env
|
||||
ports:
|
||||
- 80:80
|
||||
depends_on:
|
||||
- postgres
|
||||
environment:
|
||||
SERVE_CLIENT: "true"
|
||||
PORT: "80"
|
||||
command: npm run start:prod
|
||||
|
||||
postgres:
|
||||
container_name: ${PG_HOST}
|
||||
image: postgres:13
|
||||
restart: always
|
||||
volumes:
|
||||
- postgres:/var/lib/postgresql/data
|
||||
env_file: .env
|
||||
environment:
|
||||
- POSTGRES_USER=${PG_USER}
|
||||
- POSTGRES_PASSWORD=${PG_PASS}
|
||||
|
||||
postgrest:
|
||||
container_name: postgrest
|
||||
image: postgrest/postgrest:v12.0.2
|
||||
restart: always
|
||||
depends_on:
|
||||
- postgres
|
||||
env_file: .env
|
||||
environment:
|
||||
- PGRST_SERVER_PORT=80
|
||||
- PGRST_DB_PRE_CONFIG=postgrest.pre_config
|
||||
|
||||
volumes:
|
||||
postgres:
|
||||
driver: local
|
||||
driver_opts:
|
||||
o: bind
|
||||
type: none
|
||||
device: ${PWD}/postgres_data
|
||||
certs:
|
||||
logs:
|
||||
fallbackcerts:
|
||||
|
|
@ -0,0 +1,77 @@
|
|||
# Create .env from this example file and replace values for the environment.
|
||||
# The application expects a separate .env.test for test environment configuration
|
||||
# Get detailed information about each variable here: https://docs.tooljet.com/docs/setup/env-vars
|
||||
|
||||
TOOLJET_HOST=http://localhost:80
|
||||
LOCKBOX_MASTER_KEY= # replace_with_lockbox_master_key
|
||||
SECRET_KEY_BASE= # replace_with_secret_key_base
|
||||
|
||||
# DATABASE CONFIG
|
||||
ORM_LOGGING=all
|
||||
PG_DB=tooljet_production
|
||||
PG_USER=postgres
|
||||
PG_HOST=postgresql
|
||||
PG_PASS= # postgres database password
|
||||
|
||||
# The above postgres values is set to its default state. If necessary, kindly modify it according to your personal preference.
|
||||
|
||||
# TOOLJET DATABASE
|
||||
TOOLJET_DB=tooljet_db
|
||||
TOOLJET_DB_USER=postgres
|
||||
TOOLJET_DB_HOST=postgresql
|
||||
TOOLJET_DB_PASS=
|
||||
|
||||
PGRST_DB_URI= # postgres://<postgres_username>:<postgres_password><@postgres_hostname>/<database_name>
|
||||
PGRST_HOST=localhost:3001
|
||||
PGRST_JWT_SECRET= # If you have openssl installed, you can run the following command openssl rand -hex 32 to generate the value for PGRST_JWT_SECRET.
|
||||
PGRST_SERVER_PORT=3001
|
||||
PGRST_DB_PRE_CONFIG=postgrest.pre_config
|
||||
|
||||
# Checks every 24 hours to see if a new version of ToolJet is available
|
||||
# (Enabled by default. Set false to disable)
|
||||
CHECK_FOR_UPDATES=true
|
||||
|
||||
# Checks every 24 hours to update app telemetry data to ToolJet hub.
|
||||
# (Telemetry is enabled by default. Set value to true to disable.)
|
||||
# DISABLE_TOOLJET_TELEMETRY=false
|
||||
|
||||
GOOGLE_CLIENT_ID=
|
||||
GOOGLE_CLIENT_SECRET=
|
||||
|
||||
# EMAIL CONFIGURATION
|
||||
DEFAULT_FROM_EMAIL=hello@tooljet.io
|
||||
SMTP_USERNAME=
|
||||
SMTP_PASSWORD=
|
||||
SMTP_DOMAIN=
|
||||
SMTP_PORT=
|
||||
|
||||
# DISABLE USER SIGNUPS (true or false). only applicable if Multi-Workspace feature is enabled
|
||||
DISABLE_SIGNUPS=
|
||||
|
||||
|
||||
# OBSERVABILITY
|
||||
APM_VENDOR=
|
||||
SENTRY_DNS=
|
||||
SENTRY_DEBUG=
|
||||
|
||||
# FEATURE TOGGLE
|
||||
COMMENT_FEATURE_ENABLE=
|
||||
ENABLE_MULTIPLAYER_EDITING=true
|
||||
|
||||
|
||||
# SSO (Applicable only for Multi-Workspace)
|
||||
SSO_GOOGLE_OAUTH2_CLIENT_ID=
|
||||
SSO_GIT_OAUTH2_CLIENT_ID=
|
||||
SSO_GIT_OAUTH2_CLIENT_SECRET=
|
||||
SSO_GIT_OAUTH2_HOST=
|
||||
SSO_ACCEPTED_DOMAINS=
|
||||
SSO_DISABLE_SIGNUPS=
|
||||
|
||||
#ONBOARDING
|
||||
ENABLE_ONBOARDING_QUESTIONS_FOR_ALL_SIGN_UPS=
|
||||
|
||||
#session expiry in minutes
|
||||
USER_SESSION_EXPIRY=2880
|
||||
|
||||
#TELEMETRY
|
||||
DEPLOYMENT_PLATFORM=docker
|
||||
|
|
@ -0,0 +1,41 @@
|
|||
version: "3"
|
||||
|
||||
services:
|
||||
tooljet:
|
||||
tty: true
|
||||
stdin_open: true
|
||||
container_name: Tooljet-app
|
||||
image: tooljet/tooljet-ce:ce-lts-latest
|
||||
platform: linux/amd64
|
||||
restart: always
|
||||
env_file: .env
|
||||
ports:
|
||||
- 80:80
|
||||
depends_on:
|
||||
- postgres
|
||||
environment:
|
||||
SERVE_CLIENT: "true"
|
||||
PORT: "80"
|
||||
command: npm run start:prod
|
||||
|
||||
postgres:
|
||||
container_name: ${PG_HOST}
|
||||
image: postgres:16
|
||||
restart: always
|
||||
volumes:
|
||||
- postgres:/var/lib/postgresql/data
|
||||
env_file: .env
|
||||
environment:
|
||||
- POSTGRES_USER=${PG_USER}
|
||||
- POSTGRES_PASSWORD=${PG_PASS}
|
||||
|
||||
volumes:
|
||||
postgres:
|
||||
driver: local
|
||||
driver_opts:
|
||||
o: bind
|
||||
type: none
|
||||
device: ${PWD}/postgres_data
|
||||
certs:
|
||||
logs:
|
||||
fallbackcerts:
|
||||
|
|
@ -22,10 +22,11 @@ TOOLJET_DB_USER= # Postgres database username
|
|||
TOOLJET_DB_HOST= # Postgres database host
|
||||
TOOLJET_DB_PASS= # Postgres database password
|
||||
|
||||
PGRST_HOST=postgrest
|
||||
PGRST_HOST=localhost:3001
|
||||
PGRST_DB_URI=
|
||||
PGRST_JWT_SECRET= # If you have openssl installed, you can run the following command openssl rand -hex 32 to generate the value for PGRST_JWT_SECRET.
|
||||
|
||||
PGRST_SERVER_PORT=3001
|
||||
PGRST_DB_PRE_CONFIG=postgrest.pre_config
|
||||
|
||||
# Checks every 24 hours to see if a new version of ToolJet is available
|
||||
# (Enabled by default. Set false to disable)
|
||||
|
|
@ -0,0 +1,17 @@
|
|||
version: "3"
|
||||
|
||||
services:
|
||||
tooljet:
|
||||
tty: true
|
||||
stdin_open: true
|
||||
container_name: Tooljet-app
|
||||
image: tooljet/tooljet-ce:ce-lts-latest
|
||||
platform: linux/amd64
|
||||
restart: always
|
||||
env_file: .env
|
||||
ports:
|
||||
- 80:80
|
||||
environment:
|
||||
SERVE_CLIENT: "true"
|
||||
PORT: "80"
|
||||
command: npm run start:prod
|
||||
|
|
@ -1,24 +0,0 @@
|
|||
version: "3"
|
||||
|
||||
services:
|
||||
tooljet:
|
||||
tty: true
|
||||
stdin_open: true
|
||||
container_name: Tooljet-app
|
||||
image: tooljet/tooljet-ce:latest
|
||||
restart: always
|
||||
env_file: .env
|
||||
ports:
|
||||
- 80:80
|
||||
environment:
|
||||
SERVE_CLIENT: "true"
|
||||
PORT: "80"
|
||||
command: npm run start:prod
|
||||
# Uncomment if ENABLE_TOOLJET_DB=true
|
||||
postgrest:
|
||||
image: postgrest/postgrest:v12.0.2
|
||||
restart: always
|
||||
env_file: .env
|
||||
environment:
|
||||
- PGRST_SERVER_PORT=80
|
||||
- PGRST_DB_PRE_CONFIG=postgrest.pre_config
|
||||
|
|
@ -11,7 +11,6 @@ source "amazon-ebs" "ubuntu" {
|
|||
ami_name = "${var.ami_name}"
|
||||
instance_type = "${var.instance_type}"
|
||||
region = "${var.ami_region}"
|
||||
ami_regions = "${var.ami_regions}"
|
||||
ami_groups = "${var.ami_groups}"
|
||||
|
||||
source_ami_filter {
|
||||
|
|
@ -30,7 +29,7 @@ source "amazon-ebs" "ubuntu" {
|
|||
|
||||
launch_block_device_mappings {
|
||||
device_name = "/dev/sda1"
|
||||
volume_size = 30
|
||||
volume_size = 15
|
||||
delete_on_termination = true
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -9,7 +9,7 @@ variable "instance_type" {
|
|||
|
||||
variable "ami_region" {
|
||||
type = string
|
||||
default = "us-west-2"
|
||||
default = "us-east-1"
|
||||
}
|
||||
|
||||
variable "ami_groups" {
|
||||
|
|
@ -17,11 +17,6 @@ variable "ami_groups" {
|
|||
default = ["all"]
|
||||
}
|
||||
|
||||
variable "ami_regions" {
|
||||
type = list(string)
|
||||
default = ["us-west-1","us-east-1", "us-east-2", "eu-central-1", "ap-northeast-1", "ca-central-1"]
|
||||
}
|
||||
|
||||
variable "PACKER_BUILDER_TYPE" {
|
||||
type = string
|
||||
default = "amazon-ebs"
|
||||
|
|
|
|||
|
|
@ -40,6 +40,8 @@ services:
|
|||
platform: linux/x86_64
|
||||
depends_on:
|
||||
- postgres
|
||||
env_file:
|
||||
- .env
|
||||
volumes:
|
||||
- ./server:/app/server:delegated
|
||||
- ./plugins:/app/plugins
|
||||
|
|
@ -57,7 +59,7 @@ services:
|
|||
container_name: postgrest
|
||||
image: postgrest/postgrest:v12.0.2
|
||||
ports:
|
||||
- "3001:3000"
|
||||
- "3002:3002"
|
||||
env_file:
|
||||
- .env
|
||||
depends_on:
|
||||
|
|
@ -76,5 +78,22 @@ services:
|
|||
- POSTGRES_USER=${PG_USER}
|
||||
- POSTGRES_PASSWORD=${PG_PASS}
|
||||
|
||||
redis:
|
||||
container_name: redis
|
||||
image: redis:6.2
|
||||
restart: always
|
||||
deploy:
|
||||
resources:
|
||||
limits:
|
||||
cpus: '0.5'
|
||||
memory: 1G
|
||||
env_file:
|
||||
- .env
|
||||
environment:
|
||||
- MASTER=redis
|
||||
- REDIS_USER=${REDIS_USER}
|
||||
- REDIS_PASSWORD=${REDIS_PASSWORD}
|
||||
|
||||
volumes:
|
||||
postgres:
|
||||
redis:
|
||||
|
|
|
|||
82
docker/.env.internal.example
Normal file
|
|
@ -0,0 +1,82 @@
|
|||
# Create .env from this example file and replace values for the environment.
|
||||
# The application expects a separate .env.test for test environment configuration
|
||||
# Get detailed information about each variable here: https://docs.tooljet.com/docs/setup/env-vars
|
||||
|
||||
TOOLJET_HOST=http://localhost:8082
|
||||
LOCKBOX_MASTER_KEY= # replace_with_lockbox_master_key
|
||||
SECRET_KEY_BASE= # replace_with_secret_key_base
|
||||
|
||||
# DATABASE CONFIG
|
||||
ORM_LOGGING=all
|
||||
PG_DB=tooljet_production
|
||||
PG_USER=postgres
|
||||
PG_HOST=postgresql
|
||||
PG_PASS= # postgres database password
|
||||
|
||||
# The above postgres values is set to its default state. If necessary, kindly modify it according to your personal preference.
|
||||
|
||||
# TOOLJET DATABASE
|
||||
TOOLJET_DB=tooljet_db
|
||||
TOOLJET_DB_USER=postgres
|
||||
TOOLJET_DB_HOST=postgresql
|
||||
TOOLJET_DB_PASS=
|
||||
|
||||
PGRST_DB_URI= # postgres://<postgres_username>:<postgres_password><@postgres_hostname>/<database_name>
|
||||
PGRST_HOST=postgrest:3002
|
||||
PGRST_JWT_SECRET= # If you have openssl installed, you can run the following command openssl rand -hex 32 to generate the value for PGRST_JWT_SECRET.
|
||||
PGRST_SERVER_PORT=3002
|
||||
PGRST_DB_PRE_CONFIG=postgrest.pre_config
|
||||
|
||||
# Redis configuration
|
||||
REDIS_HOST=redis
|
||||
REDIS_PORT=6379
|
||||
REDIS_USER=default
|
||||
REDIS_PASSWORD=
|
||||
# Checks every 24 hours to see if a new version of ToolJet is available
|
||||
# (Enabled by default. Set false to disable)
|
||||
CHECK_FOR_UPDATES=true
|
||||
|
||||
# Checks every 24 hours to update app telemetry data to ToolJet hub.
|
||||
# (Telemetry is enabled by default. Set value to true to disable.)
|
||||
# DISABLE_TOOLJET_TELEMETRY=false
|
||||
|
||||
GOOGLE_CLIENT_ID=
|
||||
GOOGLE_CLIENT_SECRET=
|
||||
|
||||
# EMAIL CONFIGURATION
|
||||
DEFAULT_FROM_EMAIL=hello@tooljet.io
|
||||
SMTP_USERNAME=
|
||||
SMTP_PASSWORD=
|
||||
SMTP_DOMAIN=
|
||||
SMTP_PORT=
|
||||
|
||||
# DISABLE USER SIGNUPS (true or false). only applicable if Multi-Workspace feature is enabled
|
||||
DISABLE_SIGNUPS=
|
||||
|
||||
|
||||
# OBSERVABILITY
|
||||
APM_VENDOR=
|
||||
SENTRY_DNS=
|
||||
SENTRY_DEBUG=
|
||||
|
||||
# FEATURE TOGGLE
|
||||
COMMENT_FEATURE_ENABLE=
|
||||
ENABLE_MULTIPLAYER_EDITING=true
|
||||
|
||||
|
||||
# SSO (Applicable only for Multi-Workspace)
|
||||
SSO_GOOGLE_OAUTH2_CLIENT_ID=
|
||||
SSO_GIT_OAUTH2_CLIENT_ID=
|
||||
SSO_GIT_OAUTH2_CLIENT_SECRET=
|
||||
SSO_GIT_OAUTH2_HOST=
|
||||
SSO_ACCEPTED_DOMAINS=
|
||||
SSO_DISABLE_SIGNUPS=
|
||||
|
||||
#ONBOARDING
|
||||
ENABLE_ONBOARDING_QUESTIONS_FOR_ALL_SIGN_UPS=
|
||||
|
||||
#session expiry in minutes
|
||||
USER_SESSION_EXPIRY=2880
|
||||
|
||||
#TELEMETRY
|
||||
DEPLOYMENT_PLATFORM=docker
|
||||
|
|
@ -5,6 +5,24 @@ if [ -f "./.env" ]; then
|
|||
export $(grep -v '^#' ./.env | xargs -d '\n') || true
|
||||
fi
|
||||
|
||||
# Check if PGRST_HOST starts with "localhost"
|
||||
if [[ "$PGRST_HOST" == localhost:* ]]; then
|
||||
echo "Starting PostgREST server locally..."
|
||||
|
||||
# Generate PostgREST configuration in a writable directory
|
||||
POSTGREST_CONFIG_PATH="/tmp/postgrest.conf"
|
||||
|
||||
echo "db-uri = \"${PGRST_DB_URI}\"" > "$POSTGREST_CONFIG_PATH"
|
||||
echo "db-pre-config = \"postgrest.pre_config\"" >> "$POSTGREST_CONFIG_PATH"
|
||||
echo "server-port = \"${PGRST_SERVER_PORT}\"" >> "$POSTGREST_CONFIG_PATH"
|
||||
|
||||
# Starting PostgREST
|
||||
echo "Starting PostgREST..."
|
||||
postgrest "$POSTGREST_CONFIG_PATH" &
|
||||
else
|
||||
echo "Using external PostgREST at $PGRST_HOST."
|
||||
fi
|
||||
|
||||
if [ -d "./server/dist" ]; then
|
||||
SETUP_CMD='npm run db:setup:prod'
|
||||
else
|
||||
|
|
|
|||
|
|
@ -35,6 +35,19 @@ RUN npm install -g @nestjs/cli
|
|||
RUN npm install -g copyfiles
|
||||
RUN npm --prefix server run build
|
||||
|
||||
# Install dependencies for PostgREST, curl, tar, etc.
|
||||
RUN apt-get update && apt-get install -y \
|
||||
curl ca-certificates tar \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
ENV POSTGREST_VERSION=v12.2.0
|
||||
|
||||
RUN curl -Lo postgrest.tar.xz https://github.com/PostgREST/postgrest/releases/download/${POSTGREST_VERSION}/postgrest-v12.2.0-linux-static-x64.tar.xz && \
|
||||
tar -xf postgrest.tar.xz && \
|
||||
mv postgrest /postgrest && \
|
||||
rm postgrest.tar.xz && \
|
||||
chmod +x /postgrest
|
||||
|
||||
FROM debian:12
|
||||
|
||||
RUN apt-get update -yq \
|
||||
|
|
@ -103,6 +116,13 @@ RUN useradd --create-home --home-dir /home/appuser appuser \
|
|||
&& chmod u+x /app \
|
||||
&& chmod -R g=u /app
|
||||
|
||||
# Use the PostgREST binary from the builder stage
|
||||
COPY --from=builder --chown=appuser:0 /postgrest /usr/local/bin/postgrest
|
||||
|
||||
RUN mv /usr/local/bin/postgrest /usr/local/bin/postgrest-original && \
|
||||
echo '#!/bin/bash\nexec /usr/local/bin/postgrest-original "$@" 2>&1 | sed "s/^/[PostgREST] /"' > /usr/local/bin/postgrest && \
|
||||
chmod +x /usr/local/bin/postgrest
|
||||
|
||||
# Set npm cache directory
|
||||
ENV npm_config_cache /home/appuser/.npm
|
||||
|
||||
|
|
|
|||
|
|
@ -1,9 +1,9 @@
|
|||
# pull official base image
|
||||
FROM node:18.18.2-buster
|
||||
FROM node:22.15.1-bullseye
|
||||
|
||||
ENV NODE_ENV=development
|
||||
|
||||
RUN npm i -g npm@9.8.1
|
||||
RUN npm i -g npm@10.9.2
|
||||
|
||||
# set working directory
|
||||
WORKDIR /app
|
||||
|
|
|
|||
101
docker/internal.sh
Executable file
|
|
@ -0,0 +1,101 @@
|
|||
#!/bin/bash
|
||||
|
||||
# Load the .env file
|
||||
source .env
|
||||
|
||||
# Check if LOCKBOX_MASTER_KEY is present or empty
|
||||
if [[ -z "$LOCKBOX_MASTER_KEY" ]]; then
|
||||
# Generate LOCKBOX_MASTER_KEY
|
||||
LOCKBOX_MASTER_KEY=$(openssl rand -hex 32)
|
||||
|
||||
# Update .env file
|
||||
awk -v key="$LOCKBOX_MASTER_KEY" '
|
||||
BEGIN { FS=OFS="=" }
|
||||
/^LOCKBOX_MASTER_KEY=/ { $2=key; found=1 }
|
||||
1
|
||||
END { if (!found) print "LOCKBOX_MASTER_KEY="key }
|
||||
' .env > temp.env && mv temp.env .env
|
||||
|
||||
echo "Generated a secure master key for the lockbox"
|
||||
else
|
||||
echo "The lockbox master key already exists."
|
||||
fi
|
||||
|
||||
# Check if SECRET_KEY_BASE is present or empty
|
||||
if [[ -z "$SECRET_KEY_BASE" ]]; then
|
||||
# Generate SECRET_KEY_BASE
|
||||
SECRET_KEY_BASE=$(openssl rand -hex 64)
|
||||
|
||||
# Update .env file
|
||||
awk -v key="$SECRET_KEY_BASE" '
|
||||
BEGIN { FS=OFS="=" }
|
||||
/^SECRET_KEY_BASE=/ { $2=key; found=1 }
|
||||
1
|
||||
END { if (!found) print "SECRET_KEY_BASE="key }
|
||||
' .env > temp.env && mv temp.env .env
|
||||
|
||||
echo "Created a secret key for secure operations."
|
||||
else
|
||||
echo "The secret key base is already in place."
|
||||
fi
|
||||
|
||||
# Check if PGRST_JWT_SECRET is present or empty
|
||||
if [[ -z "$PGRST_JWT_SECRET" ]]; then
|
||||
# Generate PGRST_JWT_SECRET
|
||||
PGRST_JWT_SECRET=$(openssl rand -hex 32)
|
||||
|
||||
# Update .env file
|
||||
awk -v key="$PGRST_JWT_SECRET" '
|
||||
BEGIN { FS=OFS="=" }
|
||||
/^PGRST_JWT_SECRET=/ { $2=key; found=1 }
|
||||
1
|
||||
END { if (!found) print "PGRST_JWT_SECRET="key }
|
||||
' .env > temp.env && mv temp.env .env
|
||||
|
||||
echo "Generated a unique secret for PGRST authentication."
|
||||
else
|
||||
echo "The PGRST JWT secret is already generated and in place."
|
||||
fi
|
||||
|
||||
# Function to generate a random password
|
||||
generate_password() {
|
||||
openssl rand -base64 12 | tr -d '/+' | cut -c1-16
|
||||
}
|
||||
|
||||
# Check if PG_PASS and TOOLJET_DB_PASS are present or empty
|
||||
if [[ -z "$PG_PASS" ]] && [[ -z "$TOOLJET_DB_PASS" ]]; then
|
||||
# Generate random passwords
|
||||
PASSWORD=$(generate_password)
|
||||
|
||||
# Update .env file
|
||||
awk -v pass="$PASSWORD" '
|
||||
BEGIN { FS=OFS="=" }
|
||||
/^(PG_PASS|TOOLJET_DB_PASS)=/ { $2=pass; found=1 }
|
||||
1
|
||||
END { if (!found) print "PG_PASS="pass ORS "TOOLJET_DB_PASS="pass }
|
||||
' .env > temp.env && mv temp.env .env
|
||||
|
||||
echo "Successfully generated a secure password for the PostgreSQL database."
|
||||
else
|
||||
echo "Postgres password already exist"
|
||||
fi
|
||||
|
||||
# Check if PGRST_DB_URI is present or empty
|
||||
if [[ -z "$PGRST_DB_URI" ]]; then
|
||||
# Construct PGRST_DB_URI with PG_PASS
|
||||
PGRST_DB_URI="postgres://postgres:$PASSWORD@postgresql/tooljet_db"
|
||||
|
||||
# Update .env file for PGRST_DB_URI
|
||||
awk -v uri="$PGRST_DB_URI" '
|
||||
BEGIN { FS=OFS="=" }
|
||||
/^PGRST_DB_URI=/ { $2=uri; found=1 }
|
||||
1
|
||||
END { if (!found) print "PGRST_DB_URI="uri }
|
||||
' .env > temp.env && mv temp.env .env
|
||||
|
||||
echo "Successfully updated PGRST database URI"
|
||||
else
|
||||
echo "The PGRST DB URI is already configured and in use."
|
||||
fi
|
||||
|
||||
exec "$@"
|
||||
|
|
@ -1,7 +1,7 @@
|
|||
# pull official base image
|
||||
FROM node:18.18.2-buster
|
||||
FROM node:22.15.1-bullseye
|
||||
|
||||
RUN npm i -g npm@9.8.1
|
||||
RUN npm i -g npm@10.9.2
|
||||
|
||||
# set working directory
|
||||
WORKDIR /app
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
# pull official base image
|
||||
FROM node:18.18.2-buster
|
||||
FROM node:22.15.1-bullseye
|
||||
RUN apt-get update && apt-get install -y postgresql-client freetds-dev libaio1 wget
|
||||
|
||||
# Install Instantclient Basic Light Oracle and Dependencies
|
||||
|
|
@ -19,7 +19,7 @@ WORKDIR /
|
|||
ENV NODE_ENV=development
|
||||
ENV NODE_OPTIONS="--max-old-space-size=4096"
|
||||
|
||||
RUN npm i -g npm@9.8.1
|
||||
RUN npm i -g npm@10.9.2
|
||||
RUN mkdir -p /app
|
||||
WORKDIR /app
|
||||
|
||||
|
|
@ -30,4 +30,4 @@ COPY ./server/package.json ./server/package-lock.json ./server/
|
|||
RUN npm --prefix server install
|
||||
COPY ./server/ ./server/
|
||||
|
||||
ENTRYPOINT ["./server/entrypoint.sh"]
|
||||
ENTRYPOINT ["./server/local-ce-entrypoint.sh"]
|
||||
|
|
|
|||
|
|
@ -3,7 +3,7 @@
|
|||
<h1 align="center">ToolJet Documentation</h1>
|
||||
</p>
|
||||
|
||||
The directory "ToolJet/docs/" holds the code and markdown source files for the ToolJet documentation website, which is accessible at [docs.tooljet.ai](docs.tooljet.ai)
|
||||
The directory "ToolJet/docs/" holds the code and markdown source files for the ToolJet documentation website, which is accessible at [docs.tooljet.com](docs.tooljet.com)
|
||||
|
||||
## Index
|
||||
- [Feedback](#feedback)
|
||||
|
|
@ -26,7 +26,7 @@ In case you encounter any issues with the ToolJet product, please select the rel
|
|||
To contribute to ToolJet documentation, you need to fork this repository and submit a pull request for the Markdown and/or image changes that you're proposing.
|
||||
|
||||
### Repository organization
|
||||
The content in this directory follows the organization of documentation at https://docs.tooljet.ai
|
||||
The content in this directory follows the organization of documentation at https://docs.tooljet.com
|
||||
|
||||
This directory contains the following folders:
|
||||
|
||||
|
|
@ -41,15 +41,15 @@ This directory contains the following folders:
|
|||
├── versioned_docs
|
||||
│ ├── version-x.x.x # Current/latest version (set it on docusauras.config.js)
|
||||
│ │ ├── Enterprise
|
||||
│ │ │ └── multi-environment.md # https://docs.tooljet.ai/docs/Enterprise/multi-environment
|
||||
│ │ └── tooljet-database.md. # https://docs.tooljet.ai/docs/tooljet-database
|
||||
│ │ │ └── multi-environment.md # https://docs.tooljet.com/docs/Enterprise/multi-environment
|
||||
│ │ └── tooljet-database.md. # https://docs.tooljet.com/docs/tooljet-database
|
||||
│ └── version-2.0.0
|
||||
│ │ ├── Enterprise
|
||||
│ │ │ └── multi-environment.md # https://docs.tooljet.ai/docs/2.0.0/Enterprise/multi-environment
|
||||
│ │ │ └── multi-environment.md # https://docs.tooljet.com/docs/2.0.0/Enterprise/multi-environment
|
||||
│ │ └── tooljet-database.md
|
||||
│ └── version-1.0.0
|
||||
│ ├── Enterprise
|
||||
│ │ └── multi-environment.md # https://docs.tooljet.ai/docs/1.0.0/Enterprise/multi-environment
|
||||
│ │ └── multi-environment.md # https://docs.tooljet.com/docs/1.0.0/Enterprise/multi-environment
|
||||
│ └── tooljet-database.md
|
||||
├── versioned_sidebars # includes sidebar for the specific versions
|
||||
│ ├── version-x.x.x-sidebars.json
|
||||
|
|
|
|||
|
|
@ -39,16 +39,16 @@ Make sure to run it within the WSL2 terminal.
|
|||
git clone https://github.com/<your-username>/ToolJet.git
|
||||
```
|
||||
|
||||
3. Create a `.env` file by copying `.env.example`. More information on the variables that can be set is given in the **[environment variables reference](/docs/setup/env-vars)**.
|
||||
3. Create a `.env` file by copying `.env.internal.example`. More information on the variables that can be set is given in the **[environment variables reference](/docs/setup/env-vars)**.
|
||||
|
||||
```bash
|
||||
cp ./deploy/docker/.env.internal.example .env
|
||||
cp ./docker/.env.internal.example .env
|
||||
```
|
||||
|
||||
4. Populate the keys in the `.env` using the below the command:
|
||||
|
||||
```bash
|
||||
chmod +x ./deploy/docker/internal.sh && ./deploy/docker/internal.sh
|
||||
chmod +x ./docker/internal.sh && ./docker/internal.sh
|
||||
```
|
||||
|
||||
:::warning
|
||||
|
|
|
|||
|
|
@ -67,7 +67,7 @@ To create a query for sending an email, follow these steps:
|
|||
- **CC mail to** : Email address of the recipients that will receive a copy of the email, and their email addresses will be visible to other recipients.
|
||||
- **BCC mail to** : Email address of the recipients that will receive a copy of the email but the email addressed will be hidden to other recipients.
|
||||
- **Attachments** : You can add attachments to an SMTP query by referencing the file from the File Picker component in the attachments field.
|
||||
- For instance, you can set the `Attachments` field value to `{{ components.filepicker1.file }}` or pass an object `{{ name: 'filename.jpg', dataURL: '......' }}` to include attachments.
|
||||
- For instance, you can set the `Attachments` field value to `{{ components.filepicker1.file }}` or pass an object `{{[{ name: "filename.jpg", dataURL: " " }]}}` to include attachments.
|
||||
|
||||
<img className="screenshot-full" src="/img/datasource-reference/smtp/querysmtp-v2.png" alt="smtp connect" />
|
||||
|
||||
|
|
|
|||
|
|
@ -55,6 +55,8 @@ To remove a plugin, follow these steps:
|
|||
- On the `Installed` page, click on the `Remove` button of the related plugin that you wish to remove.
|
||||
|
||||
## Available Plugins
|
||||
|
||||
- **[Aftership](/docs/marketplace/plugins/marketplace-plugin-aftership)**
|
||||
- **[Anthropic](/docs/marketplace/plugins/marketplace-plugin-anthropic)**
|
||||
- **[AWS Redshift](/docs/marketplace/plugins/marketplace-plugin-awsredshift)**
|
||||
- **[AWS Textract](/docs/marketplace/plugins/marketplace-plugin-textract)**
|
||||
|
|
@ -67,6 +69,7 @@ To remove a plugin, follow these steps:
|
|||
- **[HarperDB](/docs/marketplace/plugins/marketplace-plugin-harperdb)**
|
||||
- **[Hugging Face](/docs/marketplace/plugins/marketplace-plugin-hugging_face)**
|
||||
- **[Jira](/docs/marketplace/plugins/marketplace-plugin-jira)**
|
||||
- **[Microsoft Graph](/docs/marketplace/plugins/marketplace-plugin-microsoft_graph)**
|
||||
- **[Mistral AI](/docs/marketplace/plugins/marketplace-plugin-mistral_ai)**
|
||||
- **[OpenAI](/docs/marketplace/plugins/marketplace-plugin-openai)**
|
||||
- **[Pinecone](/docs/marketplace/plugins/marketplace-plugin-pinecone)**
|
||||
|
|
@ -78,6 +81,7 @@ To remove a plugin, follow these steps:
|
|||
- **[Salesforce](/docs/marketplace/plugins/marketplace-plugin-salesforce)**
|
||||
- **[Sharepoint](/docs/marketplace/plugins/marketplace-plugin-sharepoint)**
|
||||
- **[Supabase](/docs/marketplace/plugins/marketplace-plugin-supabase)**
|
||||
- **[UPS](/docs/marketplace/plugins/marketplace-plugin-ups)**
|
||||
- **[Weaviate](/docs/marketplace/plugins/marketplace-plugin-weaviate)**
|
||||
|
||||
:::info For Plugin Developers
|
||||
|
|
|
|||
185
docs/docs/marketplace/plugins/aftership.md
Normal file
|
|
@ -0,0 +1,185 @@
|
|||
---
|
||||
id: marketplace-plugin-aftership
|
||||
title: Aftership
|
||||
---
|
||||
|
||||
Integrating AfterShip with ToolJet enables teams to build custom internal tools for tracking and managing shipments in real time. With this integration, you can fetch delivery statuses, monitor carrier updates, and centralize logistics data within your ToolJet application, streamlining operations and improving customer support efficiency.
|
||||
|
||||
## Connection
|
||||
|
||||
To connect AfterShip with ToolJet you will need the API Key, which you can generate from [Aftership Tracking API](https://www.aftership.com/tracking-api).
|
||||
|
||||
<img className="screenshot-full img-full" src="/img/marketplace/plugins/aftership/connection.png" alt="Aftership Configuration" />
|
||||
|
||||
## Supported Operations
|
||||
|
||||
### Tracking
|
||||
|
||||
#### Basic Tracking Operations
|
||||
|
||||
| Method | Endpoint | Description |
|
||||
| ------ | ------------ | --------------------------- |
|
||||
| GET | `/trackings` | Retrieve list of trackings. |
|
||||
| POST | `/trackings` | Create a new tracking. |
|
||||
| GET | `/couriers` | Get supported courier list. |
|
||||
|
||||
#### ID
|
||||
|
||||
| Method | Endpoint | Description |
|
||||
| ------ | ----------------------------------- | ---------------------------- |
|
||||
| GET | `/trackings/{id}` | Get tracking by ID. |
|
||||
| PUT | `/trackings/{id}` | Update tracking by ID. |
|
||||
| DELETE | `/trackings/{id}` | Delete tracking by ID. |
|
||||
| POST | `/trackings/{id}/retrack` | Retrack an expired tracking. |
|
||||
| POST | `/trackings/{id}/mark-as-completed` | Mark tracking as completed. |
|
||||
|
||||
#### Detect
|
||||
|
||||
| Method | Endpoint | Description |
|
||||
| ------ | ------------------ | ---------------------------------- |
|
||||
| POST | `/couriers/detect` | Detect courier by tracking number. |
|
||||
|
||||
#### All
|
||||
|
||||
| Method | Endpoint | Description |
|
||||
| ------ | ------------------ | ---------------------------------- |
|
||||
| GET | `/couriers/all` | Get all available couriers. |
|
||||
|
||||
#### Predict Batch
|
||||
|
||||
| Method | Endpoint | Description |
|
||||
| ------ | ---------------------------------------- | ------------------------------------- |
|
||||
| POST | `/estimated-delivery-date/predict-batch` | Predict estimated delivery for batch. |
|
||||
|
||||
### Shipping
|
||||
|
||||
#### Labels
|
||||
|
||||
| Method | Endpoint | Description |
|
||||
| ------ | -------------- | ----------------- |
|
||||
| GET | `/labels` | Get labels |
|
||||
| POST | `/labels` | Create a label |
|
||||
| GET | `/labels/{id}` | Get a label by ID |
|
||||
|
||||
#### Cancel Labels
|
||||
|
||||
| Method | Endpoint | Description |
|
||||
| ------ | --------------------- | --------------------------- |
|
||||
| GET | `/cancel-labels` | Get the cancelled labels |
|
||||
| POST | `/cancel-labels` | Cancel a label |
|
||||
| GET | `/cancel-labels/{id}` | Get a cancelled label by ID |
|
||||
|
||||
#### Rates
|
||||
|
||||
| Method | Endpoint | Description |
|
||||
| ------ | ------------- | ---------------- |
|
||||
| GET | `/rates` | Get rates |
|
||||
| POST | `/rates` | Calculate rates |
|
||||
| GET | `/rates/{id}` | Get a rate by ID |
|
||||
|
||||
#### Manifests
|
||||
|
||||
| Method | Endpoint | Description |
|
||||
| ------ | ----------------- | -------------------- |
|
||||
| GET | `/manifests` | Get manifests |
|
||||
| POST | `/manifests` | Create a manifest |
|
||||
| GET | `/manifests/{id}` | Get a manifest by ID |
|
||||
|
||||
#### Couriers
|
||||
|
||||
| Method | Endpoint | Description |
|
||||
| ------ | ----------- | ---------------- |
|
||||
| GET | `/couriers` | Get all couriers |
|
||||
|
||||
#### Address Validations
|
||||
|
||||
| Method | Endpoint | Description |
|
||||
| ------ | ---------------------- | ---------------------------- |
|
||||
| POST | `/address-validations` | Create an address validation |
|
||||
|
||||
#### Location
|
||||
|
||||
| Method | Endpoint | Description |
|
||||
| ------ | ------------ | ------------------------------------------- |
|
||||
| GET | `/locations` | Get carrier locations (requires production) |
|
||||
|
||||
#### Pickup
|
||||
|
||||
| Method | Endpoint | Description |
|
||||
| ------ | --------------- | ---------------------------------------------------- |
|
||||
| GET | `/pickups` | Get pickups |
|
||||
| POST | `/pickups` | Create a pickup (FedEx, UPS, DHL Express, Purolator) |
|
||||
| GET | `/pickups/{id}` | Get a pickup by ID |
|
||||
|
||||
#### Cancel Pickups
|
||||
|
||||
| Method | Endpoint | Description |
|
||||
| ------ | ---------------------- | ---------------------------- |
|
||||
| GET | `/cancel-pickups` | Get the cancelled pickups |
|
||||
| POST | `/cancel-pickups` | Cancel a pickup |
|
||||
| GET | `/cancel-pickups/{id}` | Get a cancelled pickup by ID |
|
||||
|
||||
#### Shipper Accounts
|
||||
|
||||
| Method | Endpoint | Description |
|
||||
| ------ | ------------------------------------ | ----------------------------------------- |
|
||||
| GET | `/shipper-accounts` | Get shipper accounts |
|
||||
| POST | `/shipper-accounts` | Create a shipper account |
|
||||
| GET | `/shipper-accounts/{id}` | Get a shipper account by ID |
|
||||
| DELETE | `/shipper-accounts/{id}` | Delete a shipper account |
|
||||
| PUT | `/shipper-accounts/{id}/info` | Update shipper account's information |
|
||||
| PUT | `/shipper-accounts/{id}/credentials` | Update shipper account's credentials |
|
||||
| PUT | `/shipper-accounts/{id}/settings` | Update shipper account's settings (FedEx) |
|
||||
|
||||
### Return
|
||||
|
||||
#### Returns Management
|
||||
|
||||
| Method | Endpoint | Description |
|
||||
| ------ | --------------------------- | -------------------------------------------- |
|
||||
| GET | `/returns` | Get returns with optional filtering |
|
||||
| POST | `/returns` | Create a new return (supports only "Refund") |
|
||||
| GET | `/returns/{return_id}` | Get return detail by return ID |
|
||||
| GET | `/returns/rma/{rma_number}` | Get return detail by RMA number |
|
||||
|
||||
#### Return Status Management
|
||||
|
||||
| Method | Endpoint | Description |
|
||||
| ------ | ----------------------------------- | ---------------------------- |
|
||||
| POST | `/returns/{return_id}/approve` | Approve return by return ID |
|
||||
| POST | `/returns/rma/{rma_number}/approve` | Approve return by RMA number |
|
||||
| POST | `/returns/{return_id}/resolve` | Resolve return by return ID |
|
||||
| POST | `/returns/rma/{rma_number}/resolve` | Resolve return by RMA number |
|
||||
| POST | `/returns/{return_id}/reject` | Reject return by return ID |
|
||||
| POST | `/returns/rma/{rma_number}/reject` | Reject return by RMA number |
|
||||
|
||||
#### Item Management
|
||||
|
||||
| Method | Endpoint | Description |
|
||||
| ------ | ------------------------------------------- | ---------------------------------------------- |
|
||||
| POST | `/returns/{return_id}/receive-items` | Record received items by return ID |
|
||||
| POST | `/returns/rma/{rma_number}/receive-items` | Record received items by RMA number |
|
||||
| PUT | `/returns/{return_id}/items/{item_id}` | Update return item (tags/images) by return ID |
|
||||
| PUT | `/returns/rma/{rma_number}/items/{item_id}` | Update return item (tags/images) by RMA number |
|
||||
| POST | `/returns/{return_id}/remove-items` | Remove items from return by return ID |
|
||||
| POST | `/returns/rma/{rma_number}/remove-items` | Remove items from return by RMA number |
|
||||
|
||||
#### Shipping Management
|
||||
|
||||
| Method | Endpoint | Description |
|
||||
| ------ | -------------------------------------------- | ---------------------------------- |
|
||||
| POST | `/returns/{return_id}/attach-shipments` | Upload shipment info by return ID |
|
||||
| POST | `/returns/rma/{rma_number}/attach-shipments` | Upload shipment info by RMA number |
|
||||
|
||||
#### Dropoff Management
|
||||
|
||||
| Method | Endpoint | Description |
|
||||
| ------ | ------------------------------------------------------- | -------------------------------------- |
|
||||
| POST | `/returns/rma/{rma_number}/dropoffs/{dropoff_id}/drops` | Record dropped-off items (QR dropoffs) |
|
||||
|
||||
#### Utility Endpoints
|
||||
|
||||
| Method | Endpoint | Description |
|
||||
| ------ | --------------- | ---------------------------------------------------- |
|
||||
| POST | `/returns/link` | Generate returns page deep link with pre-filled info |
|
||||
| GET | `/item-tags` | Retrieve all available item tags |
|
||||
246
docs/docs/marketplace/plugins/couchbase.md
Normal file
|
|
@ -0,0 +1,246 @@
|
|||
---
|
||||
id: marketplace-plugin-couchbase
|
||||
title: Couchbase
|
||||
---
|
||||
|
||||
ToolJet integrates with Couchbase to utilize its NoSQL database capabilities and advanced vector search features. This integration enables ToolJet to perform document operations such as creating, reading, updating, and deleting documents, as well as executing SQL++ queries, Full-Text Search (FTS) operations in Couchbase databases. With Couchbase's vector store capabilities, ToolJet can leverage semantic search, hybrid search combining traditional and AI-powered queries, and build intelligent applications.
|
||||
|
||||
:::note
|
||||
Before following this guide, it is assumed that you have already completed the process of **[Using Marketplace plugins](/docs/marketplace/marketplace-overview#using-marketplace-plugins)**.
|
||||
:::
|
||||
|
||||
## Connection
|
||||
|
||||
For connecting to Couchbase, the following credentials are required:
|
||||
|
||||
- **Data API Endpoint**: Your Couchbase Data API endpoint URL
|
||||
- **Username**: Your Couchbase username
|
||||
- **Password**: Your Couchbase password
|
||||
|
||||
<img className="screenshot-full" src="/img/marketplace/plugins/couchbase/connection.png" alt="Configuring Couchbase in ToolJet" />
|
||||
|
||||
## Supported Operations
|
||||
|
||||
- **[Get Document](#get-document)**
|
||||
- **[Create Document](#create-document)**
|
||||
- **[Update Document](#update-document)**
|
||||
- **[Delete Document](#delete-document)**
|
||||
- **[Query](#query)**
|
||||
- **[FTS Search](#fts-search)**
|
||||
|
||||
### Get Document
|
||||
|
||||
This operation retrieves a specific document by its ID from a Couchbase collection.
|
||||
|
||||
#### Required Parameters
|
||||
|
||||
- **Bucket**: The name of the bucket containing the document
|
||||
- **Document ID**: The unique identifier of the document to retrieve
|
||||
- **Scope**: The scope name
|
||||
- **Collection**: The collection name
|
||||
|
||||
<img className="screenshot-full" src="/img/marketplace/plugins/couchbase/get-document.png" alt="Get Document Operation" />
|
||||
|
||||
<details id="tj-dropdown">
|
||||
<summary>**Example Response**</summary>
|
||||
|
||||
```json
|
||||
{
|
||||
"id": "user::123",
|
||||
"name": "John Doe",
|
||||
"email": "john@example.com",
|
||||
"age": 30,
|
||||
"created_at": "2023-01-15T10:30:00Z"
|
||||
}
|
||||
```
|
||||
</details>
|
||||
|
||||
### Create Document
|
||||
|
||||
This operation creates a new document in a Couchbase collection.
|
||||
|
||||
#### Required Parameters
|
||||
|
||||
- **Bucket**: The name of the bucket to create the document in
|
||||
- **Scope**: The scope name
|
||||
- **Collection**: The collection name
|
||||
- **Document ID**: The unique identifier for the new document
|
||||
- **Document**: The document data as a JSON object
|
||||
|
||||
|
||||
<img className="screenshot-full" src="/img/marketplace/plugins/couchbase/create-document.png" alt="Create Document Operation" />
|
||||
|
||||
<details id="tj-dropdown">
|
||||
<summary>**Example Response**</summary>
|
||||
|
||||
```yaml
|
||||
Created successfully
|
||||
```
|
||||
</details>
|
||||
|
||||
### Update Document
|
||||
|
||||
This operation updates an existing document in a Couchbase collection.
|
||||
|
||||
#### Required Parameters
|
||||
|
||||
- **Bucket**: The name of the bucket containing the document
|
||||
- **Scope**: The scope name
|
||||
- **Collection**: The collection name
|
||||
- **Document ID**: The unique identifier of the document to update
|
||||
- **Document**: The updated document data as a JSON object
|
||||
|
||||
|
||||
<img className="screenshot-full" src="/img/marketplace/plugins/couchbase/update-document.png" alt="Update Document Operation" />
|
||||
|
||||
<details id="tj-dropdown">
|
||||
<summary>**Example Response**</summary>
|
||||
|
||||
```yaml
|
||||
Updated successfully
|
||||
```
|
||||
</details>
|
||||
|
||||
Note: Update operation replaces the original document with the updated value of the document passed.
|
||||
|
||||
### Delete Document
|
||||
|
||||
This operation deletes a document from a Couchbase collection.
|
||||
|
||||
#### Required Parameters
|
||||
|
||||
- **Bucket**: The name of the bucket containing the document
|
||||
- **Scope**: The scope name
|
||||
- **Collection**: The collection name
|
||||
- **Document ID**: The unique identifier of the document to delete
|
||||
|
||||
<img className="screenshot-full" src="/img/marketplace/plugins/couchbase/delete-document.png" alt="Delete Document Operation" />
|
||||
|
||||
<details id="tj-dropdown">
|
||||
<summary>**Example Response**</summary>
|
||||
|
||||
```yaml
|
||||
Deleted successfully
|
||||
```
|
||||
</details>
|
||||
|
||||
### Query
|
||||
|
||||
This operation executes SQL++ queries against your Couchbase database.
|
||||
|
||||
#### Required Parameters
|
||||
|
||||
- **SQL++ Query**: The SQL++ statement to execute (use `$parameter` placeholders for named parameters)
|
||||
|
||||
#### Optional Parameters
|
||||
|
||||
- **Arguments (Key-Value)**: Key-value object for named parameters that replace `$parameter` placeholders in the query
|
||||
- **Query Options**: JSON object containing additional query options like `readonly`, `timeout`, etc.
|
||||
|
||||
<img className="screenshot-full" src="/img/marketplace/plugins/couchbase/query.png" alt="Query Operation" />
|
||||
|
||||
<details id="tj-dropdown">
|
||||
<summary>**Example Query**</summary>
|
||||
|
||||
```sql
|
||||
SELECT * FROM `travel-sample`.`inventory`.`airline` WHERE country = $country LIMIT 10
|
||||
```
|
||||
|
||||
**Arguments (Key-Value)**: `{ "$country": "France" }`
|
||||
|
||||
**Query Options**: `{ "readonly": true, "query_context": "travel-sample.inventory" }`
|
||||
|
||||
Refer to the [request paramters](https://docs.couchbase.com/server/current/n1ql-rest-query/index.html#Request) for supported query options.
|
||||
|
||||
</details>
|
||||
|
||||
<details id="tj-dropdown">
|
||||
<summary>**Example Response**</summary>
|
||||
|
||||
```json
|
||||
{
|
||||
"results": [
|
||||
{
|
||||
"airline": {
|
||||
"id": 137,
|
||||
"type": "airline",
|
||||
"name": "Air France",
|
||||
"iata": "AF",
|
||||
"icao": "AFR",
|
||||
"callsign": "AIRFRANS",
|
||||
"country": "France"
|
||||
}
|
||||
}
|
||||
],
|
||||
"status": "success",
|
||||
"metrics": {
|
||||
"elapsedTime": "15.2ms",
|
||||
"executionTime": "14.8ms",
|
||||
"resultCount": 1,
|
||||
"resultSize": 234
|
||||
}
|
||||
}
|
||||
```
|
||||
</details>
|
||||
|
||||
### FTS Search
|
||||
|
||||
This operation performs Full-Text Search queries against a Couchbase FTS index.
|
||||
|
||||
#### Required Parameters
|
||||
|
||||
- **Bucket**: The name of the bucket to search in
|
||||
- **Scope**: The scope name
|
||||
- **Index Name**: The name of the FTS index to search against
|
||||
- **Search Query**: The FTS search query as a JSON object
|
||||
|
||||
<img className="screenshot-full" src="/img/marketplace/plugins/couchbase/fts-search.png" alt="FTS Search Operation" />
|
||||
|
||||
<details id="tj-dropdown">
|
||||
<summary>**Example Search Query**</summary>
|
||||
|
||||
```json
|
||||
{
|
||||
"query": {
|
||||
"match": "hotel",
|
||||
"field": "name"
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
</details>
|
||||
|
||||
<details id="tj-dropdown">
|
||||
<summary>**Example Response**</summary>
|
||||
|
||||
```json
|
||||
{
|
||||
"status": {
|
||||
"total": 1,
|
||||
"failed": 0,
|
||||
"successful": 1
|
||||
},
|
||||
"request": {
|
||||
"query": {
|
||||
"match": "hotel",
|
||||
"field": "name"
|
||||
}
|
||||
},
|
||||
"hits": [
|
||||
{
|
||||
"index": "hotel-index",
|
||||
"id": "hotel_123",
|
||||
"score": 0.8567,
|
||||
"fields": {
|
||||
"name": "Grand Hotel",
|
||||
"city": "Paris",
|
||||
"country": "France"
|
||||
}
|
||||
}
|
||||
],
|
||||
"total_hits": 1,
|
||||
"max_score": 0.8567,
|
||||
"took": 12
|
||||
}
|
||||
```
|
||||
</details>
|
||||
231
docs/docs/marketplace/plugins/microsoft-graph.md
Normal file
|
|
@ -0,0 +1,231 @@
|
|||
---
|
||||
id: marketplace-plugin-microsoft_graph
|
||||
title: Microsoft Graph
|
||||
---
|
||||
|
||||
By integrating Microsoft Graph with ToolJet, you can interact with Microsoft 365 services such as Outlook Mail, Calendar, Users, and OneDrive.
|
||||
|
||||
## Connection
|
||||
|
||||
To connect ToolJet with Microsoft Graph, you’ll need the following credentials:
|
||||
|
||||
- Tenant
|
||||
- Access token URL
|
||||
- Client ID
|
||||
- Client secret
|
||||
|
||||
Follow this [Microsoft guide](https://learn.microsoft.com/en-us/graph/auth-register-app-v2) to register an app and generate the required credentials.
|
||||
|
||||
You can enable the **Authentication required for all users** toggle in the configuration panel. When enabled, each user will be redirected to the OAuth consent screen the first time a query from this data source is triggered in your application. This ensures that every user connects with their own Microsoft account securely.
|
||||
|
||||
**Note**: After completing the OAuth flow, the query must be triggered again to fetch data from Microsoft Graph.
|
||||
|
||||
<img className="screenshot-full img-full" src="/img/marketplace/plugins/microsoft-graph/connection.png" alt="Microsoft Graph Configuration" />
|
||||
|
||||
## Supported Operations
|
||||
|
||||
### Outlook
|
||||
|
||||
#### Messages
|
||||
|
||||
| Method | Endpoint | Description |
|
||||
| ------ | ------------------------------------------ | ------------------------------------ |
|
||||
| GET | `/me/messages` | List messages in the user's mailbox. |
|
||||
| POST | `/me/messages` | Create a new draft message. |
|
||||
| GET | `/me/messages/{message-id}` | Get a specific message by ID. |
|
||||
| PATCH | `/me/messages/{message-id}` | Update a message. |
|
||||
| DELETE | `/me/messages/{message-id}` | Delete a message. |
|
||||
| POST | `/me/messages/{message-id}/forward` | Forward an existing message. |
|
||||
| POST | `/me/messages/{message-id}/createForward` | Create a forward draft. |
|
||||
| POST | `/me/messages/{message-id}/reply` | Reply to a message. |
|
||||
| POST | `/me/messages/{message-id}/createReply` | Create a reply draft. |
|
||||
| POST | `/me/messages/{message-id}/replyAll` | Reply all to a message. |
|
||||
| POST | `/me/messages/{message-id}/createReplyAll` | Create a reply-all draft. |
|
||||
| POST | `/me/messages/{message-id}/send` | Send a draft message. |
|
||||
| POST | `/me/messages/{message-id}/move` | Move a message. |
|
||||
| POST | `/me/messages/{message-id}/copy` | Copy a message. |
|
||||
| POST | `/me/sendMail` | Send mail without creating a draft. |
|
||||
|
||||
#### Mail Folders
|
||||
|
||||
| Method | Endpoint | Description |
|
||||
| ------ | ------------------------------------------------ | ------------------------------------- |
|
||||
| GET | `/me/mailFolders` | List mail folders. |
|
||||
| POST | `/me/mailFolders` | Create a mail folder. |
|
||||
| GET | `/me/mailFolders/{mailFolder-id}` | Get specific mail folder. |
|
||||
| PATCH | `/me/mailFolders/{mailFolder-id}` | Update a mail folder. |
|
||||
| DELETE | `/me/mailFolders/{mailFolder-id}` | Delete a mail folder. |
|
||||
| GET | `/me/mailFolders/{mailFolder-id}/messages` | List messages inside a folder. |
|
||||
| GET | `/me/mailFolders/Inbox/messages/delta` | Track changes to inbox messages. |
|
||||
| GET | `/me/mailFolders/{mailFolder-id}/messages/delta` | Track changes to a folder's messages. |
|
||||
| GET | `/me/mailFolders/delta` | Track changes to all folders. |
|
||||
|
||||
#### Categories and Rooms
|
||||
|
||||
| Method | API Endpoint | Description |
|
||||
| ------ | --------------------------------------------------- | ----------------------- |
|
||||
| GET | `/me/outlook/masterCategories` | List master categories |
|
||||
| POST | `/me/outlook/masterCategories` | Create a new category |
|
||||
| GET | `/me/outlook/masterCategories/{outlookCategory-id}` | Get a specific category |
|
||||
| PATCH | `/me/outlook/masterCategories/{outlookCategory-id}` | Update a category |
|
||||
| DELETE | `/me/outlook/masterCategories/{outlookCategory-id}` | Delete a category |
|
||||
| GET | `/me/findRooms` | List available rooms |
|
||||
| GET | `/me/findRooms(RoomList='{roomList-emailAddress}')` | Find rooms by room list |
|
||||
| GET | `/me/findRoomLists` | List room lists |
|
||||
|
||||
### Calendar
|
||||
|
||||
#### Default Calendar
|
||||
|
||||
| Method | API Endpoint | Description |
|
||||
| ------ | ------------------------------------------------- | ------------------------------------- |
|
||||
| GET | `/me/calendar` | Get default calendar |
|
||||
| PATCH | `/me/calendar` | Update default calendar |
|
||||
| GET | `/me/calendar/events` | List events from default calendar |
|
||||
| POST | `/me/calendar/events` | Create an event in default calendar |
|
||||
| GET | `/me/calendar/calendarPermissions` | List calendar permissions |
|
||||
| POST | `/me/calendar/calendarPermissions` | Grant permissions to default calendar |
|
||||
| GET | `/me/calendar/calendarPermissions/{permissionId}` | Get specific calendar permission |
|
||||
| PATCH | `/me/calendar/calendarPermissions/{permissionId}` | Update calendar permission |
|
||||
| DELETE | `/me/calendar/calendarPermissions/{permissionId}` | Delete calendar permission |
|
||||
| POST | `/me/calendar/getSchedule` | Get free/busy schedule info |
|
||||
|
||||
#### User Calendars and Groups
|
||||
|
||||
| Method | API Endpoint | Description |
|
||||
| ------ | ---------------------------------------- | --------------------------------------- |
|
||||
| GET | `/user/{userId}/calendar` | Get default calendar of a specific user |
|
||||
| GET | `/me/calendars` | List user calendars |
|
||||
| POST | `/me/calendars` | Create a new calendar |
|
||||
| GET | `/me/calendars/{calendarId}` | Get a specific calendar |
|
||||
| PATCH | `/me/calendars/{calendarId}` | Update a calendar |
|
||||
| DELETE | `/me/calendars/{calendarId}` | Delete a calendar |
|
||||
| GET | `/me/calendars/{calendarId}/events` | List events in a specific calendar |
|
||||
| POST | `/me/calendars/{calendarId}/events` | Create event in a specific calendar |
|
||||
| GET | `/me/calendarGroups` | List calendar groups |
|
||||
| POST | `/me/calendarGroups` | Create a calendar group |
|
||||
| GET | `/me/calendarGroups/{groupId}/calendars` | Get calendars in a group |
|
||||
| POST | `/me/calendarGroups/{groupId}/calendars` | Add calendar to a group |
|
||||
|
||||
#### Events
|
||||
|
||||
| Method | API Endpoint | Description |
|
||||
| ------ | ---------------------------------- | ----------------------------------- |
|
||||
| GET | `/me/events/{eventId}` | Get an event by ID |
|
||||
| PATCH | `/me/events/{eventId}` | Update an event |
|
||||
| DELETE | `/me/events/{eventId}` | Delete an event |
|
||||
| GET | `/me/events/{eventId}/instances` | List instances of a recurring event |
|
||||
| GET | `/me/events/{eventId}/attachments` | List attachments of an event |
|
||||
| POST | `/me/events/{eventId}/attachments` | Add attachments to an event |
|
||||
| GET | `/me/calendarView` | Get calendar view of events |
|
||||
| POST | `/me/findMeetingTimes` | Find meeting times |
|
||||
|
||||
### Users
|
||||
|
||||
#### User Management
|
||||
|
||||
| Method | API Endpoint | Description |
|
||||
| ------ | ------------------ | ---------------------- |
|
||||
| GET | `/users` | List all users |
|
||||
| POST | `/users` | Create a user |
|
||||
| GET | `/users/{user-id}` | Get a specific user |
|
||||
| PATCH | `/users/{user-id}` | Update a specific user |
|
||||
| DELETE | `/users/{user-id}` | Delete a specific user |
|
||||
|
||||
#### Profile
|
||||
|
||||
| Method | API Endpoint | Description |
|
||||
| ------ | ------------ | -------------------------------- |
|
||||
| GET | `/me` | Get profile of signed-in user |
|
||||
| PATCH | `/me` | Update profile of signed-in user |
|
||||
|
||||
### Teams
|
||||
|
||||
#### Teams and Chats
|
||||
|
||||
| Method | API Endpoint | Description |
|
||||
| ------ | ----------------- | ------------------------------ |
|
||||
| GET | `/teams` | List teams |
|
||||
| POST | `/teams` | Create a team |
|
||||
| GET | `/chats` | List chats |
|
||||
| POST | `/chats` | Create a chat |
|
||||
| GET | `/me/joinedTeams` | List teams the user has joined |
|
||||
|
||||
#### Chat Operations
|
||||
|
||||
| Method | API Endpoint | Description |
|
||||
| ------ | -------------------------------------------------- | -------------------------------- |
|
||||
| GET | `/chats/{chat-id}` | Get a chat |
|
||||
| PATCH | `/chats/{chat-id}` | Update a chat |
|
||||
| DELETE | `/chats/{chat-id}` | Delete a chat |
|
||||
| GET | `/chats/{chat-id}/members` | List members in a chat |
|
||||
| POST | `/chats/{chat-id}/members` | Add members to a chat |
|
||||
| POST | `/chats/{chat-id}/members/add` | Add members (alternate endpoint) |
|
||||
| GET | `/chats/{chat-id}/members/{conversationMember-id}` | Get chat member details |
|
||||
| PATCH | `/chats/{chat-id}/members/{conversationMember-id}` | Update chat member |
|
||||
| DELETE | `/chats/{chat-id}/members/{conversationMember-id}` | Remove chat member |
|
||||
| GET | `/chats/{chat-id}/messages` | List messages in a chat |
|
||||
| POST | `/chats/{chat-id}/messages` | Send message in a chat |
|
||||
| GET | `/chats/{chat-id}/messages/{chatMessage-id}` | Get a specific chat message |
|
||||
| PATCH | `/chats/{chat-id}/messages/{chatMessage-id}` | Update a chat message |
|
||||
| DELETE | `/chats/{chat-id}/messages/{chatMessage-id}` | Delete a chat message |
|
||||
| GET | `/chats/getAllMessages` | Get all messages across chats |
|
||||
|
||||
#### Team Operation
|
||||
|
||||
| Method | API Endpoint | Description |
|
||||
| ------ | ------------------------------ | -------------------------------- |
|
||||
| GET | `/teams/{team-id}` | Get a team |
|
||||
| PATCH | `/teams/{team-id}` | Update a team |
|
||||
| DELETE | `/teams/{team-id}` | Delete a team |
|
||||
| POST | `/teams/{team-id}/archive` | Archive a team |
|
||||
| POST | `/teams/{team-id}/unarchive` | Unarchive a team |
|
||||
| GET | `/teams/{team-id}/members` | List team members |
|
||||
| POST | `/teams/{team-id}/members` | Add team members |
|
||||
| POST | `/teams/{team-id}/members/add` | Add members (alternate endpoint) |
|
||||
|
||||
#### Channels and Messages
|
||||
|
||||
| Method | API Endpoint | Description |
|
||||
| ------ | ------------------------------------------------------------------ | --------------------------------------- |
|
||||
| GET | `/teams/{team-id}/allChannels` | List all channels in a team |
|
||||
| GET | `/teams/{team-id}/channels` | List standard channels in a team |
|
||||
| POST | `/teams/{team-id}/channels` | Create a channel in a team |
|
||||
| GET | `/teams/{team-id}/channels/{channel-id}` | Get channel details |
|
||||
| PATCH | `/teams/{team-id}/channels/{channel-id}` | Update a channel |
|
||||
| DELETE | `/teams/{team-id}/channels/{channel-id}` | Delete a channel |
|
||||
| GET | `/teams/{team-id}/channels/{channel-id}/members` | List members in a channel |
|
||||
| POST | `/teams/{team-id}/channels/{channel-id}/members` | Add members to a channel |
|
||||
| GET | `/teams/{team-id}/channels/{channel-id}/messages` | List messages in a channel |
|
||||
| POST | `/teams/{team-id}/channels/{channel-id}/messages` | Send message in a channel |
|
||||
| GET | `/teams/{team-id}/channels/{channel-id}/messages/{chatMessage-id}` | Get a specific channel message |
|
||||
| PATCH | `/teams/{team-id}/channels/{channel-id}/messages/{chatMessage-id}` | Update a channel message |
|
||||
| DELETE | `/teams/{team-id}/channels/{channel-id}/messages/{chatMessage-id}` | Delete a channel message |
|
||||
| GET | `/teams/{team-id}/allChannels/{channel-id}` | Get specific channel under all channels |
|
||||
|
||||
### OneDrive
|
||||
|
||||
#### Root and Shared Content
|
||||
|
||||
| Method | API Endpoint | Description |
|
||||
| ------ | ------------------------------------------- | ----------------------------------- |
|
||||
| GET | `/me/drive/root/children` | List items in root folder |
|
||||
| POST | `/me/drive/root/children` | Create a new file or folder in root |
|
||||
| GET | `/me/drive/recent` | List recent files |
|
||||
| GET | `/me/drive/sharedWithMe` | List files shared with the user |
|
||||
| GET | `/me/drive/root/search(q='{search-query}')` | Search files by query |
|
||||
|
||||
#### Specific Drives and Items
|
||||
|
||||
| Method | API Endpoint | Description |
|
||||
| ------ | ------------------------------------------------ | ------------------------------------- |
|
||||
| GET | `/drives/{drive-id}/root/children` | List items in a specific drive's root |
|
||||
| GET | `/drives/{drive-id}/items/{item-id}/children` | List children of a folder |
|
||||
| POST | `/drives/{drive-id}/items/{item-id}/children` | Add item to folder |
|
||||
| GET | `/drives/{drive-id}/items/{item-id}` | Get metadata for an item |
|
||||
| PATCH | `/drives/{drive-id}/items/{item-id}` | Update metadata of an item |
|
||||
| DELETE | `/drives/{drive-id}/items/{item-id}` | Delete an item |
|
||||
| GET | `/drives/{drive-id}/items/{item-id}/content` | Download file content |
|
||||
| PUT | `/drives/{drive-id}/items/{item-id}/content` | Upload file content |
|
||||
| POST | `/drives/{drive-id}/items/{item-id}/createLink` | Create sharing link |
|
||||
| GET | `/drives/{drive-id}/items/{item-id}/permissions` | Get item permissions |
|
||||
98
docs/docs/marketplace/plugins/ups.md
Normal file
|
|
@ -0,0 +1,98 @@
|
|||
---
|
||||
id: marketplace-plugin-ups
|
||||
title: UPS
|
||||
---
|
||||
|
||||
By integrating UPS with ToolJet you can track packages, calculate shipping rates, validate addresses, and automate logistics processes, all within your ToolJet applications to enhance operational visibility and reduce manual overhead.
|
||||
|
||||
## Connection
|
||||
|
||||
To connect with UPS you need the following credentials:
|
||||
- Client ID
|
||||
- Client secret
|
||||
- Shipper number
|
||||
|
||||
You can follow the steps in the [Getting Staerted with UPS APIs](https://developer.ups.com/get-started) guide to generate these credentials.
|
||||
|
||||
<img className="screenshot-full img-full" src="/img/marketplace/plugins/ups/connection.png" alt="UPS Install" />
|
||||
|
||||
## Supported Operations
|
||||
|
||||
### Shipping
|
||||
|
||||
#### Version
|
||||
|
||||
| **Method** | **Endpoint** | **Description** |
|
||||
| ---------- | --------------| -----------------|
|
||||
| POST | `/shipments/{version}/ship` | Create a new shipment. |
|
||||
| DELETE | `/shipments/{version}/void/cancel/{shipmentIdentificationNumber}` | Cancel a shipment using its shipment ID. |
|
||||
| POST | `/labels/{version}/recovery` | Recover a label for a previously created shipment. |
|
||||
|
||||
#### Deprecated Version
|
||||
|
||||
| **Method** | **Endpoint** | **Description** |
|
||||
| ---------- | --------------| -----------------|
|
||||
| POST | `/shipments/{deprecatedversion}/ship` | Create shipment using an older API version. |
|
||||
| DELETE | `/shipments/{deprecatedversion}/void/cancel/{shipmentIdentificationNumber}` | Cancel shipment using an older API version. |
|
||||
|
||||
### Rating
|
||||
|
||||
#### Version
|
||||
|
||||
| Method | API Endpoint | Description |
|
||||
| ------ | ----------------------------------- | ------------------------------------------------ |
|
||||
| POST | `/rating/{version}/{requestoption}` | Retrieve or calculate shipping rate quotes (UPS) |
|
||||
|
||||
#### Deprecated Version
|
||||
|
||||
| Method | API Endpoint | Description |
|
||||
| ------ | --------------| ------------ |
|
||||
| POST | `/rating/{deprecatedVersion}/{requestoption}` | Retrieve shipping rate quotes using a deprecated UPS API version |
|
||||
|
||||
### Tracking
|
||||
|
||||
| Method | API Endpoint | Description |
|
||||
| ------ | -------------| -------------|
|
||||
| GET | `/track/v1/details/{inquiryNumber}` | Retrieve shipment tracking details using the tracking (inquiry) number |
|
||||
| GET | `/track/v1/reference/details/{referenceNumber}` | Retrieve tracking information using a shipment reference number |
|
||||
|
||||
### Address Validation
|
||||
|
||||
#### Version
|
||||
|
||||
| Method | API Endpoint | Description |
|
||||
| ------ | -------------| -------------|
|
||||
| POST | `/addressvalidation/{version}/{requestoption}` | Validate and verify shipping addresses to ensure accuracy (UPS) |
|
||||
|
||||
#### Deprecated Version
|
||||
|
||||
| Method | API Endpoint | Description |
|
||||
| ------ | -------------| -------------|
|
||||
| POST | `/addressvalidation/{deprecatedVersion}/{requestoption}` | Validate shipping addresses using a deprecated UPS API version |
|
||||
|
||||
### Time In Transit
|
||||
|
||||
| Method | API Endpoint | Description |
|
||||
| ------ | ------------ | ----------- |
|
||||
| POST | `/shipments/{version}/{transittimes}` | Retrieve estimated transit times for shipments using UPS API |
|
||||
|
||||
### Pickup
|
||||
|
||||
#### Versions
|
||||
|
||||
| Method | API Endpoint | Description |
|
||||
| ------ | ------------ | ----------- |
|
||||
| POST | `/shipments/{version}/pickup/{pickuptype}` | Schedule a shipment pickup based on pickup type |
|
||||
| GET | `/shipments/{version}/pickup/{pickuptype}` | Retrieve pickup availability or details for a specific pickup type |
|
||||
| DELETE | `/shipments/{version}/pickup/{CancelBy}` | Cancel a scheduled pickup using a specified cancellation method |
|
||||
| POST | `/pickupcreation/{version}/pickup` | Create a new UPS pickup request |
|
||||
| GET | `/pickup/{version}/countries/{countrycode}` | Get pickup service availability for a specified country |
|
||||
| POST | `/pickup/{version}/servicecenterlocations` | Locate nearby UPS service centers for pickup services |
|
||||
|
||||
#### Deprecated Version
|
||||
|
||||
| Method | API Endpoint | Description |
|
||||
| ------ | -------------| ------------|
|
||||
| DELETE | `/shipments/{deprecatedVersion}/pickup/{CancelBy}` | Cancel a scheduled pickup using a deprecated UPS API version |
|
||||
| POST | `/pickupcreation/{deprecatedVersion}/pickup` | Create a new pickup request using a deprecated UPS API version |
|
||||
|
||||
|
|
@ -16,9 +16,10 @@ Please find the latest LTS version here: <br/>
|
|||
Starting from **`v3.5.0-ee-lts`** all releases are AI releases. Checkout the **[Build with AI](/docs/build-with-ai/overview)** section for more information. If you have any questions feel free to join our [Slack Community](https://join.slack.com/t/tooljet/shared_invite/zt-2rk4w42t0-ZV_KJcWU9VL1BBEjnSHLCA) or send us an email at hello@tooljet.com.
|
||||
:::
|
||||
|
||||
| Version | Release Date | Docker Pull Command |
|
||||
| ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ----------------- | -------------------------------------------- |
|
||||
| Latest EE-LTS | N/A | `docker pull tooljet/tooljet:ee-lts-latest` |
|
||||
| Version | Release Date | Docker Pull Command |
|
||||
| ---------| ------------- | ----------------------|
|
||||
| Latest EE-LTS | N/A | `docker pull tooljet/tooljet:ee-lts-latest` |
|
||||
| [v3.16.0-lts](https://hub.docker.com/layers/tooljet/tooljet/v3.16.0-lts/images/sha256-626a6463504f74659e1468a69edbdacc264eded5867ae159a18358fc43d47b48) | August 4, 2025 | `docker pull tooljet/tooljet:v3.16.0-lts` |
|
||||
| [v3.5.0-ee-lts](https://hub.docker.com/layers/tooljet/tooljet/v3.5.0-ee-lts/images/sha256-9580d2377d17ce0c26fca0535eca51bce899015f26bfc81769d032b4b15a5da5) | February 12, 2025 | `docker pull tooljet/tooljet:v3.5.0-ee-lts` |
|
||||
| [v3.0.24-ee-lts](https://hub.docker.com/layers/tooljet/tooljet/v3.0.24-ee-lts/images/sha256-33494c8ee72c440ce0ded925cdeb15507cd87f2b7c3fe172dd1cbee790e3b96f?context=explore) | January 3, 2025 | `docker pull tooljet/tooljet:v3.0.24-ee-lts` |
|
||||
| [v3.0.23-ee-lts](https://hub.docker.com/layers/tooljet/tooljet/v3.0.23-ee-lts/images/sha256-1ca2bcb5dac66b1d3d089bd8300b7077c0dcd27bb2cfe6665bf388b680294467?context=explore) | January 2, 2025 | `docker pull tooljet/tooljet:v3.0.23-ee-lts` |
|
||||
|
|
|
|||
|
|
@ -129,7 +129,7 @@ To use ToolJet Database, you'd have to set up and deploy PostgREST server which
|
|||
|
||||
Deploying ToolJet Database is mandatory from ToolJet 3.0 or else the migration might break, checkout the following docs to know more about new major version, including breaking changes that require you to adjust your applications accordingly:
|
||||
|
||||
- [ToolJet 3.0 Migration Guide for Self-Hosted Versions](./upgrade-to-v3.md)
|
||||
- [ToolJet 3.0 Migration Guide for Self-Hosted Versions](/docs/setup/upgrade-to-v3/)
|
||||
- [Cloud](./cloud-v3-migration.md)
|
||||
|
||||
Follow the steps below to deploy PostgREST on a ECS cluster.
|
||||
|
|
|
|||
|
|
@ -1,368 +1,248 @@
|
|||
---
|
||||
id: env-vars
|
||||
title: Environment variables
|
||||
title: Environment Variables
|
||||
---
|
||||
|
||||
# Environment variables
|
||||
ToolJet requires several environment variables to function properly. Below is a simplified guide to setting them up.
|
||||
|
||||
Both the ToolJet server and client requires some environment variables to start running.
|
||||
## ToolJet Server
|
||||
|
||||
_If you have any questions feel free to join our [Slack Community](https://join.slack.com/t/tooljet/shared_invite/zt-2rk4w42t0-ZV_KJcWU9VL1BBEjnSHLCA) or send us an email at hello@tooljet.com._
|
||||
### Required Variables
|
||||
|
||||
## ToolJet server
|
||||
#### ToolJet Host
|
||||
|
||||
### ToolJet host ( required )
|
||||
- `TOOLJET_HOST`: Public URL of ToolJet (e.g., `https://app.tooljet.ai`)
|
||||
|
||||
| variable | description |
|
||||
| ------------ | ---------------------------------------------------------------- |
|
||||
| TOOLJET_HOST | the public URL of ToolJet client ( eg: https://app.tooljet.com ) |
|
||||
#### Lockbox Configuration
|
||||
- `LOCKBOX_MASTER_KEY`: 32-byte hex string for encrypting datasource credentials
|
||||
- Generate using: `openssl rand -hex 32`
|
||||
|
||||
### Lockbox configuration ( required )
|
||||
#### Application Secret
|
||||
- `SECRET_KEY_BASE`: 64-byte hex string for encrypting session cookies
|
||||
- Generate using: `openssl rand -hex 64`
|
||||
|
||||
ToolJet server uses lockbox to encrypt datasource credentials. You should set the environment variable `LOCKBOX_MASTER_KEY` with a 32 byte hexadecimal string.
|
||||
#### Database Configuration
|
||||
- `PG_HOST`: PostgreSQL database host
|
||||
- `PG_DB`: Database name
|
||||
- `PG_USER`: Username
|
||||
- `PG_PASS`: Password
|
||||
- `PG_PORT`: Port
|
||||
|
||||
**Docker Compose Setup:** If you are using a Docker Compose setup with an in-built PostgreSQL instance, set `PG_HOST` to `postgres`. This ensures that Docker's internal DNS resolves the hostname correctly, allowing the ToolJet server to connect to the database seamlessly.
|
||||
|
||||
### Application Secret ( required )
|
||||
**Database Connection URL:** If you intend to use the database connection URL and your database does not support SSL, use the following format when setting the `DATABASE_URL` variable:
|
||||
|
||||
ToolJet server uses a secure 64 byte hexadecimal string to encrypt session cookies. You should set the environment variable `SECRET_KEY_BASE`.
|
||||
```
|
||||
DATABASE_URL=postgres://PG_USER:PG_PASS@PG_HOST:5432/PG_DB?sslmode=disable
|
||||
```
|
||||
|
||||
:::tip
|
||||
If you have `openssl` installed, you can run the following commands to generate the value for `LOCKBOX_MASTER_KEY` and `SECRET_KEY_BASE`.
|
||||
Replace `username`, `password`, `hostname`, `port`, and `database_name` with your actual database details.
|
||||
|
||||
For `LOCKBOX_MASTER_KEY` use `openssl rand -hex 32`
|
||||
For `SECRET_KEY_BASE` use `openssl rand -hex 64`
|
||||
:::
|
||||
#### Disabling Automatic Database & Extension Creation (Optional)
|
||||
- `PG_DB_OWNER=false`: ToolJet by default tries to create database based on `PG_DB` variable set and additionally my try to create postgres extensions. This requires the postgres user to have `CREATEDB` permission. If this cannot be granted you can disable this behaviour by setting `PG_DB_OWNER` as `false` and will have to manually run them.
|
||||
|
||||
### Database configuration ( required )
|
||||
#### ToolJet Database
|
||||
- `TOOLJET_DB`: Default database name (`tooljet_db`)
|
||||
- `TOOLJET_DB_HOST`: Database host
|
||||
- `TOOLJET_DB_USER`: Database username
|
||||
- `TOOLJET_DB_PASS`: Database password
|
||||
- `TOOLJET_DB_PORT`: Database port
|
||||
|
||||
ToolJet server uses PostgreSQL as the database.
|
||||
|
||||
| variable | description |
|
||||
| -------- | ---------------------- |
|
||||
| PG_HOST | postgres database host |
|
||||
| PG_DB | name of the database |
|
||||
| PG_USER | username |
|
||||
| PG_PASS | password |
|
||||
| PG_PORT | port |
|
||||
|
||||
:::tip
|
||||
If you are using docker-compose setup, you can set PG_HOST as `postgres` which will be DNS resolved by docker
|
||||
:::
|
||||
|
||||
:::info
|
||||
If you intent you use the DB connection url and if the connection does not support ssl. Please use the below format using the variable DATABASE_URL.
|
||||
`postgres://username:password@hostname:port/database_name?sslmode=disable`
|
||||
:::
|
||||
|
||||
### Disable database and extension creation (optional)
|
||||
|
||||
ToolJet by default tries to create database based on `PG_DB` variable set and additionally my try to create postgres extensions. This requires the postgres user to have CREATEDB permission. If this cannot be granted you can disable this behaviour by setting `PG_DB_OWNER` as `false` and will have to manually run them.
|
||||
|
||||
### Check for updates ( optional )
|
||||
|
||||
Self-hosted version of ToolJet pings our server to fetch the latest product updates every 24 hours. You can disable this by setting the value of `CHECK_FOR_UPDATES` environment variable to `0`. This feature is enabled by default.
|
||||
|
||||
### Comment feature enable ( optional )
|
||||
|
||||
Use this environment variable to enable/disable the feature that allows you to add comments on the canvas. To configure this environment variable, ensure that multiplayer editing is enabled in the Settings.
|
||||
|
||||
| variable | value |
|
||||
| ---------------------- | ----------------- |
|
||||
| COMMENT_FEATURE_ENABLE | `true` or `false` |
|
||||
|
||||
### Marketplace
|
||||
|
||||
#### Enable Marketplace plugin developement mode ( optional )
|
||||
|
||||
Use this environment variable to enable/disable the developement mode that allows developers to build the plugin.
|
||||
|
||||
| variable | value |
|
||||
| --------------------------- | ----------------- |
|
||||
| ENABLE_MARKETPLACE_DEV_MODE | `true` or `false` |
|
||||
|
||||
### User Session Expiry Time (Optional)
|
||||
|
||||
| variable | description |
|
||||
| ------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- |
|
||||
| USER_SESSION_EXPIRY | This variable controls the user session expiry time. By default, the session expires after **10** days. The variable expects the value in minutes. ex: USER_SESSION_EXPIRY = 120 which is 2 hours |
|
||||
|
||||
### Enable ToolJet Database (required)
|
||||
|
||||
| variable | description |
|
||||
| ------------------- | -------------------------------------------- |
|
||||
| TOOLJET_DB | Default value is `tooljet_db` |
|
||||
| TOOLJET_DB_HOST | database host |
|
||||
| TOOLJET_DB_USER | database username |
|
||||
| TOOLJET_DB_PASS | database password |
|
||||
| TOOLJET_DB_PORT | database port |
|
||||
| PGRST_JWT_SECRET | JWT token client provided for authentication |
|
||||
| PGRST_HOST | postgrest database host |
|
||||
| PGRST_DB_PRE_CONFIG | postgrest.pre_config |
|
||||
|
||||
:::tip
|
||||
The database name provided for `TOOLJET_DB` will be utilized to create a new database during server boot process in all of our production deploy setups.
|
||||
Incase you want to trigger it manually, use the command `npm run db:create` on ToolJet server.
|
||||
:::
|
||||
|
||||
### Why ToolJet Requires Two Databases
|
||||
#### Why ToolJet Requires Two Databases
|
||||
|
||||
ToolJet requires two separate databases for optimal functionality. **TOOLJET_DB** is used to store the platform's internal metadata, including tables created within ToolJet. On the other hand, **PG_DB** acts as the primary database for application data, handling end-user data managed by the apps built on ToolJet.
|
||||
|
||||
:::info
|
||||
If you intent you use the DB connection url and if the connection does not support ssl. Please use the below format using the variable TOOLJET_DB_URL.
|
||||
`postgres://username:password@hostname:port/database_name?sslmode=disable`
|
||||
:::
|
||||
|
||||
### Server Host ( optional )
|
||||
**Automatic Database Creation:** The database name specified in `TOOLJET_DB` will be automatically created during the server boot process in all production deployment setups.
|
||||
|
||||
You can specify a different server for backend if it is hosted on another server.
|
||||
#### PostgREST
|
||||
ToolJet uses **PostgREST (v12.2.0)** for API access. The following environment variables are required for PostgREST:
|
||||
|
||||
| variable | value |
|
||||
| ----------- | ------------------------------------------------------------------------------------------------- |
|
||||
| SERVER_HOST | Configure a hostname for the server as a proxy pass. If no value is set, it defaults to `server`. |
|
||||
- `PGRST_JWT_SECRET`: JWT secret (Generate using `openssl rand -hex 32`). If this parameter is not specified, PostgREST will refuse authentication requests.
|
||||
- `PGRST_DB_URI`: Database connection string
|
||||
- `PGRST_LOG_LEVEL=info`
|
||||
|
||||
### Hide account setup link
|
||||
If you intent to make changes in the above configuration. Please refer [PostgREST configuration docs](https://postgrest.org/en/stable/configuration.html#environment-variables).
|
||||
|
||||
If you want to hide account setup link from admin in manage user page, set the environment variable `HIDE_ACCOUNT_SETUP_LINK` to `true`, please make sure you have configured SMTP to receive welcome mail for users.
|
||||
#### Configuring PGRST_DB_URI
|
||||
|
||||
### Disabling signups ( optional )
|
||||
`PGRST_DB_URI` is required for PostgREST, which is responsible for exposing the database as a REST API. It must be explicitly set to ensure proper functionality.
|
||||
|
||||
If you want to restrict the signups and allow new users only by invitations, set the environment variable `DISABLE_SIGNUPS` to `true`.
|
||||
This follows the format:
|
||||
|
||||
:::tip
|
||||
You will still be able to see the signup page but won't be able to successfully submit the form.
|
||||
:::
|
||||
```
|
||||
PGRST_DB_URI=postgres://TOOLJET_DB_USER:TOOLJET_DB_PASS@TOOLJET_DB_HOST:5432/TOOLJET_DB
|
||||
```
|
||||
|
||||
### Serve client as a server end-point ( optional )
|
||||
Ensure that:
|
||||
|
||||
By default, the `SERVE_CLIENT` variable will be unset and the server will serve the client at its `/` end-point.
|
||||
You can set `SERVE_CLIENT` to `false` to disable this behaviour.
|
||||
- `username` and `password` match the credentials for the PostgREST database user.
|
||||
- `hostname` is correctly set (`postgres` if using Docker Compose setup with an in-built PostgreSQL).
|
||||
- `port` is the PostgreSQL port (default: `5432`).
|
||||
- `database_name` is the database used for PostgREST (`tooljet_db` in this example).
|
||||
|
||||
### Serve client at subpath
|
||||
#### Redis Configuration
|
||||
|
||||
If ToolJet is hosted on a domain subpath, you can set the environment variable `SUB_PATH` to support it.
|
||||
Please note the subpath is to be set with trailing `/` and is applicable only when the server is serving the frontend client.
|
||||
Include the following Redis environment variables within the ToolJet deployment only if you are connecting to an external **Redis instance (v6.2)** for a multi-service or multi-pod setup and have followed the necessary steps to create Redis.
|
||||
|
||||
### SMTP Configuration (Optional)
|
||||
```
|
||||
REDIS_HOST=
|
||||
REDIS_PORT=
|
||||
REDIS_USER=
|
||||
REDIS_PASSWORD=
|
||||
```
|
||||
|
||||
ToolJet uses SMTP services to send emails (e.g., invitation emails when you add new users to your workspace).
|
||||
### Optional Configurations
|
||||
|
||||
For Enterprise Edition, you must configure SMTP settings through the user interface (UI) in the ToolJet Settings. For more information, see [SMTP Configuration](/docs/org-setup/smtp-config).
|
||||
#### Comments Feature
|
||||
|
||||
- `COMMENT_FEATURE_ENABLE=true/false`: Use this environment variable to enable/disable the feature that allows you to add comments on the canvas. To configure this environment variable, ensure that multiplayer editing is enabled in the Settings.
|
||||
|
||||
#### User Session Expiry
|
||||
- `USER_SESSION_EXPIRY`: Controls session expiry time (in minutes). Default: **10 days**.
|
||||
|
||||
Note: The variable expects the value in minutes. ex: USER_SESSION_EXPIRY = 120 which is 2 hours
|
||||
|
||||
#### Password Retry Limit
|
||||
By default, an account is locked after 5 failed login attempts. You can control this with:
|
||||
|
||||
- `DISABLE_PASSWORD_RETRY_LIMIT=true`: Disables the retry limit.
|
||||
- `PASSWORD_RETRY_LIMIT=<number>`: Sets a custom retry limit (default is 5).
|
||||
|
||||
#### Hide Account Setup Link
|
||||
|
||||
- `HIDE_ACCOUNT_SETUP_LINK`: Set to `true` to hide the account setup link from the admin in the manage user page. Ensure SMTP is configured to send welcome emails.
|
||||
|
||||
#### Restrict Signups
|
||||
Set `DISABLE_SIGNUPS=true` to allow only invited users to sign up. The signup page will still be visible but unusable.
|
||||
|
||||
#### SMTP Configuration
|
||||
ToolJet sends emails via SMTP.
|
||||
|
||||
:::info
|
||||
If you have upgraded from a version prior to v2.62.0, the SMTP variables in your .env file will automatically be mapped to the UI.
|
||||
For versions v2.62.0 and later, SMTP configuration will no longer be picked up from the .env file for Enterprise Edition. You must configure SMTP through the UI. You can safely remove these variables from your .env file after ensuring that the configuration is properly set up in the UI.
|
||||
If you have upgraded from a version prior to v2.62.0, the SMTP variables in your .env file will automatically be mapped to the UI. For versions v2.62.0 and later, SMTP configuration will no longer be picked up from the .env file for Enterprise Edition. You must configure SMTP through the UI. You can safely remove these variables from your .env file after ensuring that the configuration is properly set up in the UI.
|
||||
:::
|
||||
|
||||
For Community Edition, you can configure SMTP via environment variables using the following:
|
||||
For **Enterprise Edition**, configure SMTP in the ToolJet Settings UI.
|
||||
|
||||
| Variable | Description |
|
||||
| ------------------ | ------------------------------------- |
|
||||
| DEFAULT_FROM_EMAIL | From email for emails sent by ToolJet |
|
||||
| SMTP_USERNAME | Username |
|
||||
| SMTP_PASSWORD | Password |
|
||||
| SMTP_DOMAIN | Domain or host |
|
||||
| SMTP_PORT | Port |
|
||||
For **Community Edition**, use these environment variables:
|
||||
|
||||
### Slack configuration ( optional )
|
||||
- `DEFAULT_FROM_EMAIL`: Sender email address
|
||||
- `SMTP_USERNAME`: SMTP username
|
||||
- `SMTP_PASSWORD`: SMTP password
|
||||
- `SMTP_DOMAIN`: SMTP host
|
||||
- `SMTP_PORT`: SMTP port
|
||||
|
||||
If your ToolJet installation requires Slack as a data source, you need to create a Slack app and set the following environment variables:
|
||||
#### Custom CA Certificate
|
||||
If ToolJet needs to connect to self-signed HTTPS endpoints, ensure the `NODE_EXTRA_CA_CERTS` environment variable is set to the absolute path of the CA certificate file.
|
||||
|
||||
| variable | description |
|
||||
| ------------------- | ------------------------------ |
|
||||
| SLACK_CLIENT_ID | client id of the slack app |
|
||||
| SLACK_CLIENT_SECRET | client secret of the slack app |
|
||||
- `NODE_EXTRA_CA_CERTS=/path/to/cert.pem`: Absolute path to the PEM file (can contain multiple certificates).
|
||||
|
||||
### Google OAuth ( optional )
|
||||
#### ToolJet API Import Application
|
||||
|
||||
If your ToolJet installation needs access to data sources such as Google sheets, you need to create OAuth credentials from Google Cloud Console.
|
||||
By default, server accepts maximum JSON size as 50 MB. To increase this limit, use the following environment variable:
|
||||
|
||||
| variable | description |
|
||||
| -------------------- | ------------- |
|
||||
| GOOGLE_CLIENT_ID | client id |
|
||||
| GOOGLE_CLIENT_SECRET | client secret |
|
||||
- `MAX_JSON_SIZE = "150mb"`
|
||||
|
||||
### Google maps configuration ( optional )
|
||||
### Third-Party Integrations
|
||||
|
||||
If your ToolJet installation requires `Maps` widget, you need to create an API key for Google Maps API.
|
||||
#### Slack
|
||||
To use Slack as a data source in ToolJet, create a Slack app and set:
|
||||
|
||||
| variable | description |
|
||||
| ------------------- | ------------------- |
|
||||
| GOOGLE_MAPS_API_KEY | Google maps API key |
|
||||
- `SLACK_CLIENT_ID`: Slack app client ID
|
||||
- `SLACK_CLIENT_SECRET`: Slack app client secret
|
||||
|
||||
### APM VENDOR ( optional )
|
||||
#### Google OAuth
|
||||
To connect ToolJet with Google services like Google Sheets, create OAuth credentials in Google Cloud Console.
|
||||
|
||||
Specify application monitoring vendor. Currently supported values - `sentry`.
|
||||
- `GOOGLE_CLIENT_ID`: Google OAuth client ID
|
||||
- `GOOGLE_CLIENT_SECRET`: Google OAuth client secret
|
||||
|
||||
| variable | description |
|
||||
| ---------- | ----------------------------------------- |
|
||||
| APM_VENDOR | Application performance monitoring vendor |
|
||||
#### Google Maps API
|
||||
To use the Maps widget in ToolJet, create a Google Maps API key and set:
|
||||
|
||||
### SENTRY DNS ( optional )
|
||||
- `GOOGLE_MAPS_API_KEY`: Google Maps API key
|
||||
|
||||
| variable | description |
|
||||
| ---------- | ------------------------------------------------------------------------------------------------- |
|
||||
| SENTRY_DNS | DSN tells a Sentry SDK where to send events so the events are associated with the correct project |
|
||||
#### Application Monitoring (APM)
|
||||
- `APM_VENDOR=sentry`: Set APM vendor.
|
||||
- `SENTRY_DNS`: Sentry project DSN.
|
||||
- `SENTRY_DEBUG=true/false`: Enable/disable Sentry debugging.
|
||||
|
||||
### SENTRY DEBUG ( optional )
|
||||
#### Security & Authentication
|
||||
By default, ToolJet sends user count updates every 24 hours. To disable this, use:
|
||||
|
||||
Prints logs for sentry.
|
||||
- `DISABLE_TOOLJET_TELEMETRY=true`: Disables telemetry.(Enabled by default)
|
||||
|
||||
| variable | description |
|
||||
| ------------ | ------------------------------------------- |
|
||||
| SENTRY_DEBUG | `true` or `false`. Default value is `false` |
|
||||
#### Single Sign-On (SSO)
|
||||
Enable Google or GitHub SSO with these environment variables:
|
||||
|
||||
### Server URL ( optional)
|
||||
**Google SSO:**
|
||||
- `SSO_GOOGLE_OAUTH2_CLIENT_ID`: Google OAuth client ID
|
||||
|
||||
This is used to set up for CSP headers and put trace info to be used with APM vendors.
|
||||
**GitHub SSO:**
|
||||
- `SSO_GIT_OAUTH2_CLIENT_ID`: GitHub OAuth client ID
|
||||
- `SSO_GIT_OAUTH2_CLIENT_SECRET`: GitHub OAuth client secret
|
||||
- `SSO_GIT_OAUTH2_HOST`: GitHub host if self-hosted
|
||||
|
||||
| variable | description |
|
||||
| ------------------ | -------------------------------------------------------------- |
|
||||
| TOOLJET_SERVER_URL | the URL of ToolJet server ( eg: `https://server.tooljet.com` ) |
|
||||
**General SSO Settings:**
|
||||
- `SSO_ACCEPTED_DOMAINS`: Comma-separated list of allowed email domains
|
||||
- `SSO_DISABLE_SIGNUPS=true`: Restricts signups to existing users
|
||||
|
||||
### RELEASE VERSION ( optional)
|
||||
#### REST API Cookie Forwarding
|
||||
By default, ToolJet does not forward cookies with REST API requests. To enable this (self-hosted only), set:
|
||||
|
||||
Once set any APM provider that supports segregation with releases will track it.
|
||||
- `FORWARD_RESTAPI_COOKIES=true`: Allows forwarding cookies with REST API requests.
|
||||
|
||||
### NODE_EXTRA_CA_CERTS (optional)
|
||||
#### Asset Path
|
||||
|
||||
ToolJet needs to be configured for custom CA certificate to be able to trust and establish connection over https. This requires you to configure an additional env var `NODE_EXTRA_CA_CERTS` to have absolute path to your CA certificates. This file named `cert.pem` needs to be in PEM format and can have more than one certificates.
|
||||
This is required when the assets for the client are to be loaded from elsewhere (eg: CDN). This can be an absolute path, or relative to main HTML file.
|
||||
|
||||
| variable | description |
|
||||
| ------------------- | ------------------------------------------------------------------ |
|
||||
| NODE_EXTRA_CA_CERTS | absolute path to certificate PEM file ( eg: /ToolJet/ca/cert.pem ) |
|
||||
- `ASSET_PATH`: Path for loading frontend assets (e.g., `https://app.tooljet.ai/`)
|
||||
|
||||
### Disable telemetry ( optional )
|
||||
## Additional Configurations
|
||||
|
||||
Pings our server to update the total user count every 24 hours. You can disable this by setting the value of `DISABLE_TOOLJET_TELEMETRY` environment variable to `true`. This feature is enabled by default.
|
||||
#### Log File Path
|
||||
- `LOG_FILE_PATH`: Path to store audit logs (e.g., `tooljet/log/tooljet-audit.log`)
|
||||
|
||||
### Password Retry Limit (Optional)
|
||||
#### Embedding Private Apps
|
||||
By default, only public apps can be embedded. To allow embedding of private ToolJet apps, set:
|
||||
|
||||
The maximum retry limit of login password for a user is by default set to 5, account will be locked after 5 unsuccessful login attempts. Use the variables mentioned below to control this behavior:
|
||||
- `ENABLE_PRIVATE_APP_EMBED=true/false`: Allows embedding of private ToolJet apps.
|
||||
|
||||
| variable | description |
|
||||
| ---------------------------- | ------------------------------------------------------------------------------------------------------ |
|
||||
| DISABLE_PASSWORD_RETRY_LIMIT | (true/false) To disable the password retry check, if value is `true` then no limits for password retry |
|
||||
| PASSWORD_RETRY_LIMIT | To change the default password retry limit (5) |
|
||||
**Note: Available in ToolJet Enterprise 2.8.0+ and Community/Cloud 2.10.0+.**
|
||||
|
||||
### SSO Configurations (Optional)
|
||||
#### Default Language
|
||||
Set the default language using the `LANGUAGE` variable. Supported options:
|
||||
|
||||
Configurations for instance level SSO.
|
||||
|
||||
| variable | description |
|
||||
| ---------------------------- | -------------------------------------------------------------- |
|
||||
| SSO_GOOGLE_OAUTH2_CLIENT_ID | Google OAuth client id |
|
||||
| SSO_GIT_OAUTH2_CLIENT_ID | GitHub OAuth client id |
|
||||
| SSO_GIT_OAUTH2_CLIENT_SECRET | GitHub OAuth client secret |
|
||||
| SSO_GIT_OAUTH2_HOST | GitHub OAuth host name if GitHub is self hosted |
|
||||
| SSO_ACCEPTED_DOMAINS | comma separated email domains that supports SSO authentication |
|
||||
| SSO_DISABLE_SIGNUPS | Disable user sign up if authenticated user does not exist |
|
||||
<div style={{ display: 'flex' }} >
|
||||
|
||||
### Enable Cookie Forwarding to REST API (Optional)
|
||||
<div style = {{ width:'40%' }} >
|
||||
|
||||
By default, the ToolJet server does not forward cookies along with the REST API requests. You can enable this functionality by setting the `FORWARD_RESTAPI_COOKIES` environment variable to `true`. This option is available only in the self-hosted version of ToolJet.
|
||||
| Language | Code | Native Name |
|
||||
|-------------|------|-------------------|
|
||||
| English | en | English |
|
||||
| French | fr | Français |
|
||||
| Spanish | es | Español |
|
||||
| Italian | it | Italiano |
|
||||
|
||||
| variable | description |
|
||||
| ----------------------- | ----------------- |
|
||||
| FORWARD_RESTAPI_COOKIES | `true` or `false` |
|
||||
</div>
|
||||
|
||||
## ToolJet client
|
||||
<div style = {{ width:'5%' }} > </div>
|
||||
|
||||
### Server URL ( optionally required )
|
||||
<div style = {{ width:'50%' }} >
|
||||
|
||||
This is required when client is built separately.
|
||||
| Language | Code | Native Name |
|
||||
|-------------|------|-------------------|
|
||||
| Indonesian | id | Bahasa Indonesia |
|
||||
| Ukrainian | uk | Українська |
|
||||
| Russian | ru | Русский |
|
||||
| German | de | Deutsch |
|
||||
|
||||
| variable | description |
|
||||
| ------------------ | -------------------------------------------------------------- |
|
||||
| TOOLJET_SERVER_URL | the URL of ToolJet server ( eg: `https://server.tooljet.com` ) |
|
||||
</div>
|
||||
|
||||
### Server Port ( optional)
|
||||
</div>
|
||||
|
||||
This could be used to for local development, it will set the server url like so: `http://localhost:<TOOLJET_SERVER_PORT>`
|
||||
Example: `LANGUAGE=fr` (for French).
|
||||
|
||||
| variable | description |
|
||||
| ------------------- | --------------------------------------- |
|
||||
| TOOLJET_SERVER_PORT | the port of ToolJet server ( eg: 3000 ) |
|
||||
|
||||
### Asset path ( optionally required )
|
||||
|
||||
This is required when the assets for the client are to be loaded from elsewhere (eg: CDN).
|
||||
This can be an absolute path, or relative to main HTML file.
|
||||
|
||||
| variable | description |
|
||||
| ---------- | ------------------------------------------------------------- |
|
||||
| ASSET_PATH | the asset path for the website ( eg: https://app.tooljet.ai/) |
|
||||
|
||||
### Serve client as a server end-point ( optional )
|
||||
|
||||
By default the client build will be done to be served with ToolJet server.
|
||||
If you intend to use client separately then can set `SERVE_CLIENT` to `false`.
|
||||
|
||||
## PostgREST server (required)
|
||||
|
||||
| variable | description |
|
||||
| ---------------- | ----------------------------------------------- |
|
||||
| PGRST_JWT_SECRET | JWT token client provided for authentication |
|
||||
| PGRST_DB_URI | database connection string for tooljet database |
|
||||
| PGRST_LOG_LEVEL | `info` |
|
||||
|
||||
If you intent to make changes in the above configuration. Please refer [PostgREST configuration docs](https://postgrest.org/en/stable/configuration.html#environment-variables).
|
||||
|
||||
:::tip
|
||||
If you have openssl installed, you can run the
|
||||
command `openssl rand -hex 32` to generate the value for `PGRST_JWT_SECRET`.
|
||||
|
||||
If this parameter is not specified, PostgREST will refuse authentication requests.
|
||||
:::
|
||||
|
||||
:::info
|
||||
Please make sure that DB_URI is given in the format `postgrest://[USERNAME]:[PASSWORD]@[HOST]:[PORT]/[DATABASE]`
|
||||
:::
|
||||
|
||||
## Log file path ( Optional )
|
||||
|
||||
If a log file path is specified in environment variables, a log file containing all the data from audit logs will be created at the specified path. The file will be updated every time a new audit log is created.
|
||||
|
||||
| Variable | Description |
|
||||
| ------------- | -------------------------------------------------------------------------------- |
|
||||
| LOG_FILE_PATH | the path where the log file will be created ( eg: tooljet/log/tooljet-audit.log) |
|
||||
|
||||
## ToolJet Apps
|
||||
|
||||
### Enabling embedding of private apps
|
||||
|
||||
By default, only embedding of public apps is permitted. By setting this variable, users will be able to embed private ToolJet Apps.
|
||||
|
||||
| Variable | Description |
|
||||
| ------------------------ | ----------------- |
|
||||
| ENABLE_PRIVATE_APP_EMBED | `true` or `false` |
|
||||
|
||||
:::caution
|
||||
The option is only available starting from ToolJet Enterprise Edition `2.8.0` or higher, and `2.10.0` for the Community edition and cloud version.
|
||||
:::
|
||||
|
||||
## Configuring the Default Language
|
||||
|
||||
To change the default language, set the LANGUAGE variable to your desired language code.
|
||||
|
||||
| Variable | Description |
|
||||
| -------- | --------------- |
|
||||
| LANGUAGE | `LANGUAGE_CODE` |
|
||||
|
||||
Available Languages with their codes and native names:
|
||||
|
||||
| Language | Code | Native Name |
|
||||
| ---------- | ---- | ---------------- |
|
||||
| English | en | English |
|
||||
| French | fr | Français |
|
||||
| Spanish | es | Español |
|
||||
| Italian | it | Italiano |
|
||||
| Indonesian | id | Bahasa Indonesia |
|
||||
| Ukrainian | uk | Українська |
|
||||
| Russian | ru | Русский |
|
||||
| German | de | Deutsch |
|
||||
|
||||
For instance, to set the language to French, you can set the LANGUAGE variable to `fr`.
|
||||
|
||||
:::info
|
||||
The option to set a default language is not available on cloud version of ToolJet.
|
||||
:::
|
||||
**Note:** This setting is not available in ToolJet Cloud.
|
||||
46
docs/docs/setup/tooljet-domain-change.md
Normal file
|
|
@ -0,0 +1,46 @@
|
|||
---
|
||||
id: tooljet-domain-change
|
||||
title: ToolJet Domain Change
|
||||
---
|
||||
|
||||
We're updating our domain from `tooljet.ai` to `tooljet.com`.
|
||||
|
||||
## When is This Happening?
|
||||
|
||||
The change will take place at the following times:
|
||||
|
||||
- **ET (Eastern Time):** Sunday, November 23, 2025 – 11:00 PM
|
||||
- **PT (Pacific Time):** Sunday, November 23, 2025 – 8:00 PM
|
||||
- **GMT (Greenwich Mean Time):** Monday, November 24, 2025 – 4:00 AM
|
||||
- **IST (Indian Standard Time):** Monday, November 24, 2025 – 9:30 AM
|
||||
|
||||
## What This Means For You
|
||||
|
||||
If your organization uses **Single Sign-On (SSO)** to access ToolJet, you’ll need to update your SSO redirect URLs to continue signing in after the domain change.
|
||||
|
||||
- This change is **only applicable to ToolJet Cloud** users.
|
||||
- If you do **not** update your SSO configuration, **SSO login will stop working** after the domain change.
|
||||
|
||||
## How to Fix it
|
||||
|
||||
You’ll need to regenerate and update the redirect URL for **each SSO provider** you have configured.
|
||||
|
||||
### Steps
|
||||
|
||||
1. Log in to ToolJet at **`https://app.tooljet.com`**.
|
||||
2. Go to: **Workspace settings → Workspace login**.
|
||||
3. Under SSO providers (Google, OIDC, SAML, etc.):
|
||||
- Click on each configured provider.
|
||||
- Copy the new redirect URL (it will now contain `tooljet.com` instead of `tooljet.ai`).
|
||||
4. Go to your SSO provider’s admin console (e.g., Google, Okta, Azure AD).
|
||||
5. Update the redirect/callback URL with the new **`tooljet.com`** URL.
|
||||
6. Save the changes.
|
||||
7. Test SSO login to ensure everything is working correctly.
|
||||
|
||||
For provider-specific configuration details, refer to your usual **[SSO](/docs/user-management/sso/overview)** setup guides.
|
||||
|
||||
## When to Update
|
||||
|
||||
To ensure uninterrupted access for your team, **please complete this update by Sunday, November 23, 2025, at 8:00 PM** (aligned with your relevant timezone in the schedule above).
|
||||
|
||||
If the redirect URLs are not updated before the domain change window, users relying on SSO will not be able to sign in until the configuration is updated.
|
||||
|
|
@ -34,11 +34,6 @@ ToolJet API allows you to interact with the ToolJet platform programmatically. Y
|
|||
- [Replace User Workspaces Relations](#replace-user-workspaces-relations)
|
||||
- [Export Application](#export-application)
|
||||
- [Import Application](#import-application)
|
||||
- [Add HTTPS Git Config for an Organization](#add-https-git-config-for-an-organization)
|
||||
- [Push an App Version to GitHub](#push-an-app-version-to-github)
|
||||
- [Create a New App from GitHub](#create-a-new-app-from-github)
|
||||
- [Sync and Pull Changes to Existing App from Git](#sync-and-pull-changes-to-existing-app-from-git)
|
||||
- [Auto Promote App](#auto-promote-app)
|
||||
|
||||
## Enabling ToolJet API
|
||||
|
||||
|
|
@ -942,6 +937,8 @@ From version **`v3.5.7-ee-lts`**, you can use ToolJet API to import application.
|
|||
:::info
|
||||
By default, server accepts maximum JSON size as 50 MB. To increase this limit, use the following environment variable:
|
||||
`MAX_JSON_SIZE`
|
||||
|
||||
**Example**: `MAX_JSON_SIZE = "250mb"`
|
||||
:::
|
||||
|
||||
<details id="tj-dropdown">
|
||||
|
|
|
|||
|
|
@ -12,14 +12,10 @@ To set up LDAP as Single Sign-On (SSO) for ToolJet, follow these steps:
|
|||
Role Required: **Admin** <br/>
|
||||
|
||||
1. Click on the settings icon (⚙️) on the bottom left of your dashboard.
|
||||
|
||||
2. Go to **Workspace settings > Workspace login**. <br/>
|
||||
(Example URL - `https://app.corp.com/nexus/workspace-settings/workspace-login`)
|
||||
|
||||
<img className="screenshot-full" src="/img/sso/ldap/url-v4.png" alt="SSO :LDAP"/>
|
||||
|
||||
<img style={{ marginBottom:'15px', marginTop: '15px' }} className="screenshot-full" src="/img/sso/ldap/url-v4.png" alt="SSO :LDAP"/>
|
||||
3. To **enable** LDAP, toggle the switch. Then, add the configuration:
|
||||
|
||||
- **Name**: Enter the name of the SSO.
|
||||
- **Hostname**: Provide the hostname or IP address of your LDAP server.
|
||||
- **Port**: Enter the Port number of LDAP server.
|
||||
|
|
@ -27,17 +23,12 @@ Role Required: **Admin** <br/>
|
|||
- **SSL**: Toggle this option to enable the SSL. After enabling you can select the type of SSL: **None** or **Certificates**. If you choose Certificates, you'll need to provide the **Client Key**, **Client Certificate**, and **Server Certificate**.
|
||||
<br/>
|
||||
<img className="screenshot-full img-l" src="/img/sso/ldap/fields-v2.png" alt="SSO :LDAP"/>
|
||||
|
||||
4. After making the necessary configurations, click the **Save Changes** button located at the bottom.
|
||||
|
||||
5. Next, proceed to the **Workspace login** and copy the **Login URL** provided.
|
||||
|
||||
6. The **Login URL** obtained can be utilized for accessing the workspace. Please note that ToolJet supports LDAP login at the workspace level and not at the instance level. Thus, users will be logged in specifically to the chosen workspace.
|
||||
<img className="screenshot-full" src="/img/sso/ldap/login-v2.png" alt="SSO :LDAP"/>
|
||||
|
||||
<img style={{ marginBottom:'15px', marginTop: '15px' }} className="screenshot-full" src="/img/sso/ldap/login-v2.png" alt="SSO :LDAP"/>
|
||||
7. Click on the **Sign in with `<LDAP Name>`** button, and provide your username and password to log in to the workspace. For signing in, ToolJet uses the **common name (cn)** associated with each LDAP server user as the **Username**. Upon the initial login, users will be redirected to the **Workspace Invite** page, while subsequent logins will lead them directly to the ToolJet dashboard.
|
||||
|
||||
:::info
|
||||
During the first login, ToolJet performs additional checks. It verifies the user groups in the LDAP server, and if the corresponding group exists in the ToolJet workspace, the user will be automatically added to that group. Additionally, ToolJet also looks for the user's profile picture in the LDAP server and updates the ToolJet account accordingly.
|
||||
:::
|
||||
|
||||
|
|
|
|||
|
|
@ -5,23 +5,26 @@ title: ToolJet Documentation Versions
|
|||
|
||||
## Current LTS Versions (Stable)
|
||||
|
||||
| Version | Documentation |
|
||||
|------------|-------------------------------------------------|
|
||||
| 3.5.0-LTS | [Documentation](https://docs.tooljet.ai/docs/) |
|
||||
| 3.0.0-LTS | [Documentation](https://docs.tooljet.ai/docs/3.0.0-LTS/) |
|
||||
| Version | Documentation |
|
||||
|------------|------------------------------------------------------------|
|
||||
| 3.16.0-LTS | [Documentation](https://docs.tooljet.ai/docs/) |
|
||||
| 3.5.0-LTS | [Documentation](https://docs.tooljet.ai/docs/3.5.0-LTS/) |
|
||||
| 3.0.0-LTS | [Documentation](https://docs.tooljet.ai/docs/3.0.0-LTS/) |
|
||||
| 2.50.0-LTS | [Documentation](https://docs.tooljet.ai/docs/2.50.0-LTS/) |
|
||||
|
||||
<!--
|
||||
## Beta Version (Pre-Release)
|
||||
|
||||
| Version | Documentation |
|
||||
|-------------|-------------------------------------------|
|
||||
| 3.11.0-Beta 🚧 | [Documentation](https://docs.tooljet.ai/docs/beta/) |
|
||||
| Version | Documentation |
|
||||
|------------|------------------------------------------------------|
|
||||
| beta 🚧 | [Documentation](https://docs.tooljet.ai/docs/beta/) |
|
||||
-->
|
||||
|
||||
## Past Versions (Not Maintained Anymore)
|
||||
|
||||
| Version | Documentation |
|
||||
|-------------|-------------------------------------------|
|
||||
| 2.65.0 | [Documentation](https://archived-docs.tooljet.com/docs/) |
|
||||
| Version | Documentation |
|
||||
|-------------|----------------------------------------------------------------|
|
||||
| 2.65.0 | [Documentation](https://archived-docs.tooljet.com/docs/) |
|
||||
| 2.62.0 | [Documentation](https://archived-docs.tooljet.com/docs/2.62.0) |
|
||||
| 2.61.0 | [Documentation](https://archived-docs.tooljet.com/docs/2.61.0) |
|
||||
| 2.43.0 | [Documentation](https://archived-docs.tooljet.com/docs/2.43.0) |
|
||||
|
|
|
|||
|
|
@ -16,7 +16,7 @@ const isProd = process.env.NODE_ENV === 'production';
|
|||
module.exports = {
|
||||
title: 'ToolJet',
|
||||
tagline: 'Low-code framework to Build internal tools and business apps.',
|
||||
url: 'https://docs.tooljet.ai',
|
||||
url: 'https://docs.tooljet.com',
|
||||
baseUrl: '/',
|
||||
onBrokenLinks: 'ignore',
|
||||
onBrokenMarkdownLinks: 'warn',
|
||||
|
|
@ -60,21 +60,21 @@ module.exports = {
|
|||
position: 'right',
|
||||
},
|
||||
{
|
||||
href: 'https://www.tooljet.ai/',
|
||||
href: 'https://www.tooljet.com/',
|
||||
position: 'right',
|
||||
label: 'Website',
|
||||
className: 'navbar-signin',
|
||||
'aria-label': 'Visit ToolJet Website',
|
||||
},
|
||||
{
|
||||
href: 'https://www.tooljet.ai/login',
|
||||
href: 'https://www.tooljet.com/login',
|
||||
position: 'right',
|
||||
label: 'Sign in',
|
||||
className: 'navbar-signin',
|
||||
'aria-label': 'Signin to ToolJet',
|
||||
},
|
||||
{
|
||||
href: 'https://www.tooljet.ai/create-account',
|
||||
href: 'https://www.tooljet.com/create-account',
|
||||
position: 'right',
|
||||
label: 'Try for free',
|
||||
className: 'navbar-website',
|
||||
|
|
@ -83,7 +83,7 @@ module.exports = {
|
|||
],
|
||||
},
|
||||
footer: {
|
||||
style: 'light',
|
||||
style: 'light',
|
||||
logo: {
|
||||
alt: 'ToolJet Logo',
|
||||
src: '/img/docs_logo.svg',
|
||||
|
|
@ -93,35 +93,36 @@ module.exports = {
|
|||
{
|
||||
title: 'Platform',
|
||||
items: [
|
||||
{ label: 'App builder', to: 'https://www.tooljet.ai/visual-app-builder' },
|
||||
{ label: 'AI Agent builder', to: 'https://www.tooljet.ai/ai-agent-builder' },
|
||||
{ label: 'ToolJet Database', to: 'https://www.tooljet.ai/database' },
|
||||
{ label: 'App builder', to: 'https://www.tooljet.com/visual-app-builder' },
|
||||
{ label: 'AI Agent builder', to: 'https://www.tooljet.com/ai-agent-builder' },
|
||||
{ label: 'ToolJet Database', to: 'https://www.tooljet.com/database' },
|
||||
{ label: 'Trust Center', to: 'https://trust.tooljet.com/' },
|
||||
],
|
||||
},
|
||||
{
|
||||
title: 'Solutions',
|
||||
items: [
|
||||
{ label: 'Back office tools', to: 'https://www.tooljet.ai/building-back-office-apps' },
|
||||
{ label: 'Business applications', to: 'https://www.tooljet.ai/business-applications' },
|
||||
{ label: 'Back office tools', to: 'https://www.tooljet.com/building-back-office-apps' },
|
||||
{ label: 'Business applications', to: 'https://www.tooljet.com/business-applications' },
|
||||
],
|
||||
},
|
||||
{
|
||||
title: 'Developers',
|
||||
items: [
|
||||
{ label: 'Blogs', to: 'https://blog.tooljet.ai/' },
|
||||
{ label: 'Events', to: 'https://www.tooljet.ai/events' },
|
||||
{ label: 'Blogs', to: 'https://blog.tooljet.com/' },
|
||||
{ label: 'Events', to: 'https://www.tooljet.com/events' },
|
||||
{ label: 'GitHub', href: 'https://github.com/ToolJet/ToolJet' },
|
||||
{ label: 'Slack', href: 'https://tooljet.ai/slack' },
|
||||
{ label: 'Slack', href: 'https://tooljet.com/slack' },
|
||||
],
|
||||
},
|
||||
{
|
||||
title: 'Templates',
|
||||
items: [
|
||||
{ label: 'Lead management', to: 'https://www.tooljet.ai/templates/lead-management-system' },
|
||||
{ label: 'KPI management', to: 'https://www.tooljet.ai/templates/kpi-management-dashboard' },
|
||||
{ label: 'Inventory management', to: 'https://www.tooljet.ai/templates/inventory-management-system' },
|
||||
{ label: 'Leave management', to: 'https://www.tooljet.ai/templates/leave-management-portal' },
|
||||
{ label: 'Applicant tracking', to: 'https://www.tooljet.ai/templates/applicant-tracking-system' },
|
||||
{ label: 'Lead management', to: 'https://www.tooljet.com/templates/lead-management-system' },
|
||||
{ label: 'KPI management', to: 'https://www.tooljet.com/templates/kpi-management-dashboard' },
|
||||
{ label: 'Inventory management', to: 'https://www.tooljet.com/templates/inventory-management-system' },
|
||||
{ label: 'Leave management', to: 'https://www.tooljet.com/templates/leave-management-portal' },
|
||||
{ label: 'Applicant tracking', to: 'https://www.tooljet.com/templates/applicant-tracking-system' },
|
||||
],
|
||||
},
|
||||
{
|
||||
|
|
@ -242,13 +243,47 @@ module.exports = {
|
|||
window.buildUrlWithStoredParams = buildUrlWithStoredParams; // NEW: Build URLs with UTM params
|
||||
})();
|
||||
</script>
|
||||
<script>
|
||||
document.addEventListener('DOMContentLoaded', function () {
|
||||
console.log("Script for cookie called");
|
||||
var cookieName = "source_page";
|
||||
var domain = ".tooljet.ai";
|
||||
var maxAge = 7 * 24 * 60 * 60; // 7 days
|
||||
var currentHost = window.location.hostname;
|
||||
var fullUrl = window.location.href;
|
||||
// Helper: read cookie
|
||||
function getCookie(name) {
|
||||
var match = document.cookie.match(new RegExp('(^| )' + name + '=([^;]+)'));
|
||||
return match ? decodeURIComponent(match[2]) : null;
|
||||
}
|
||||
// Helper: set cookie
|
||||
function setCookie(name, value, maxAgeSeconds, domain) {
|
||||
document.cookie =
|
||||
name + "=" + encodeURIComponent(value) +
|
||||
"; path=/; domain=" + domain +
|
||||
"; max-age=" + maxAgeSeconds + ";";
|
||||
}
|
||||
// If user is on blog.tooljet.ai → always update cookie with latest blog URL
|
||||
// Else → do not overwrite, just keep existing one
|
||||
if (currentHost.includes("blog.tooljet.ai")) {
|
||||
setCookie(cookieName, fullUrl, maxAge, domain);
|
||||
console.log("Updated source_page cookie with latest blog URL: " + fullUrl);
|
||||
} else {
|
||||
console.log("Not on blog domain — keeping existing source_page: " + getCookie(cookieName));
|
||||
}
|
||||
});
|
||||
</script>
|
||||
<!-- Start of HubSpot Embed Code -->
|
||||
<script type="text/javascript" id="hs-script-loader" async defer src="//js.hs-scripts.com/39494431.js"></script>
|
||||
<!-- End of HubSpot Embed Code -->
|
||||
`,
|
||||
},
|
||||
},
|
||||
algolia: {
|
||||
appId: 'O8HQRLI0WA',
|
||||
apiKey: process.env.ALGOLIA_API_KEY || 'development', // Public API key: it is safe to commit it
|
||||
indexName: 'tooljet',
|
||||
contextualSearch: true,
|
||||
insights: true,
|
||||
externalUrlRegex: 'external\\.com|domain\\.com',
|
||||
},
|
||||
},
|
||||
|
|
@ -260,16 +295,18 @@ module.exports = {
|
|||
sidebarPath: require.resolve('./sidebars.js'),
|
||||
// Please change this to your repo.
|
||||
editUrl: 'https://github.com/ToolJet/Tooljet/blob/develop/docs/',
|
||||
includeCurrentVersion: true,
|
||||
includeCurrentVersion: false, // Set to true if you want to include the beta version in the sidebar
|
||||
lastVersion: '3.16.0-LTS',
|
||||
versions: {
|
||||
current: {
|
||||
label: 'beta 🚧',
|
||||
path: 'beta',
|
||||
banner: 'none',
|
||||
badge: false
|
||||
},
|
||||
// Uncomment the following line to include the beta version in the sidebar
|
||||
// current: {
|
||||
// label: 'beta 🚧',
|
||||
// path: 'beta',
|
||||
// banner: 'none',
|
||||
// badge: false,
|
||||
// },
|
||||
"2.50.0-LTS": {
|
||||
label: '2.50.0-LTS (Legacy)',
|
||||
banner: 'none',
|
||||
badge: false
|
||||
},
|
||||
|
|
@ -282,6 +319,7 @@ module.exports = {
|
|||
badge: false
|
||||
},
|
||||
"3.16.0-LTS": {
|
||||
label: '3.16 - 3.20 LTS',
|
||||
banner: 'none',
|
||||
badge: false
|
||||
}
|
||||
|
|
@ -296,6 +334,7 @@ module.exports = {
|
|||
ignorePatterns: ['/docs/1.x.x/**'],
|
||||
filename: 'sitemap.xml',
|
||||
},
|
||||
|
||||
googleTagManager: isProd
|
||||
? {
|
||||
containerId: process.env.GTM || 'development',
|
||||
|
|
@ -303,6 +342,18 @@ module.exports = {
|
|||
: undefined,
|
||||
},
|
||||
],
|
||||
[
|
||||
'redocusaurus',
|
||||
{
|
||||
openapi: {
|
||||
path: 'openapi', // scans all folders inside openapi/, e.g., scim, tj-api
|
||||
routeBasePath: '/api', // pages will be /api/scim, /api/tj-api
|
||||
},
|
||||
theme: {
|
||||
primaryColor: '#1890ff', // customize the color
|
||||
},
|
||||
},
|
||||
],
|
||||
],
|
||||
plugins: [
|
||||
devServerPlugin,
|
||||
|
|
@ -343,122 +394,6 @@ module.exports = {
|
|||
to: '/docs/user-management/authentication/self-hosted/instance-login',
|
||||
from: '/docs/enterprise/superadmin',
|
||||
},
|
||||
{
|
||||
to: '/docs/beta/user-management/sso/oidc/setup',
|
||||
from: '/docs/beta/category/openid-connect/',
|
||||
},
|
||||
{
|
||||
to: '/docs/beta/development-lifecycle/release/share-app/',
|
||||
from: '/docs/beta/dashboard',
|
||||
},
|
||||
{
|
||||
to: '/docs/beta/security/audit-logs',
|
||||
from: '/docs/beta/enterprise/audit_logs',
|
||||
},
|
||||
{
|
||||
to: '/docs/beta/user-management/role-based-access/super-admin',
|
||||
from: '/docs/beta/enterprise/superadmin',
|
||||
},
|
||||
{
|
||||
to: '/docs/beta/tj-setup/org-branding/white-labeling',
|
||||
from: '/docs/beta/enterprise/white-label',
|
||||
},
|
||||
{
|
||||
to: '/docs/beta/development-lifecycle/gitsync/overview',
|
||||
from: '/docs/beta/gitsync',
|
||||
},
|
||||
{
|
||||
to: '/docs/beta/tj-setup/licensing/self-hosted',
|
||||
from: '/docs/beta/org-management/licensing/self-hosted/',
|
||||
},
|
||||
{
|
||||
to: '/docs/beta/user-management/role-based-access/access-control',
|
||||
from: '/docs/beta/org-management/permissions',
|
||||
},
|
||||
{
|
||||
to: '/docs/beta/tj-setup/smtp-setup/configuration',
|
||||
from: '/docs/beta/org-management/smtp-configuration',
|
||||
},
|
||||
{
|
||||
to: '/docs/beta/security/constants/',
|
||||
from: '/docs/beta/org-management/workspaces/workspace_constants/',
|
||||
},
|
||||
{
|
||||
to: '/docs/beta/tj-setup/workspaces',
|
||||
from: '/docs/beta/org-management/workspaces/workspace_overview/',
|
||||
},
|
||||
{
|
||||
to: '/docs/beta/security/constants/variables',
|
||||
from: '/docs/beta/org-management/workspaces/workspace-variables-migration',
|
||||
},
|
||||
{
|
||||
to: '/docs/beta/development-lifecycle/gitsync/pull',
|
||||
from: '/docs/beta/release-management/gitsync/git-pull',
|
||||
},
|
||||
{
|
||||
to: '/docs/beta/development-lifecycle/gitsync/gitsync-config',
|
||||
from: '/docs/beta/release-management/gitsync/tj-config/',
|
||||
},
|
||||
{
|
||||
to: '/docs/beta/security/compliance',
|
||||
from: '/docs/beta/security',
|
||||
},
|
||||
{
|
||||
to: '/docs/beta/build-with-ai/overview',
|
||||
from: '/docs/beta/tooljet-copilot',
|
||||
},
|
||||
{
|
||||
to: '/docs/beta/user-management/role-based-access/custom-groups',
|
||||
from: '/docs/beta/tutorial/manage-users-groups',
|
||||
},
|
||||
{
|
||||
to: '/docs/beta/tooljet-api',
|
||||
from: '/docs/beta/tutorial/tooljet-api',
|
||||
},
|
||||
{
|
||||
to: '/docs/beta/user-management/authentication/self-hosted/overview',
|
||||
from: '/docs/beta/user-authentication/general-settings/',
|
||||
},
|
||||
{
|
||||
to: '/docs/beta/user-management/authentication/self-hosted/instance-login',
|
||||
from: '/docs/beta/user-authentication/password-login',
|
||||
},
|
||||
{
|
||||
to: '/docs/beta/user-management/authentication/self-hosted/instance-login',
|
||||
from: '/docs/beta/user-authentication/sso/auto-sso-login',
|
||||
},
|
||||
{
|
||||
to: '/docs/user-management/sso/github',
|
||||
from: '/docs/beta/user-authentication/sso/github',
|
||||
},
|
||||
{
|
||||
to: '/docs/user-management/sso/ldap',
|
||||
from: '/docs/beta/user-authentication/sso/ldap',
|
||||
},
|
||||
{
|
||||
to: '/docs/beta/user-management/sso/oidc/azuread',
|
||||
from: '/docs/beta/user-authentication/sso/openid/azuread/',
|
||||
},
|
||||
{
|
||||
to: '/docs/beta/user-management/sso/oidc/google',
|
||||
from: '/docs/beta/user-authentication/sso/openid/google-openid',
|
||||
},
|
||||
{
|
||||
to: '/docs/beta/user-management/sso/oidc/okta',
|
||||
from: '/docs/beta/user-authentication/sso/openid/okta',
|
||||
},
|
||||
{
|
||||
to: '/docs/beta/user-management/sso/saml/setup',
|
||||
from: '/docs/beta/user-authentication/sso/saml',
|
||||
},
|
||||
{
|
||||
to: '/docs/beta/user-management/onboard-users/overview',
|
||||
from: '/docs/beta/user-authentication/user-lifecycle/',
|
||||
},
|
||||
{
|
||||
to: '/docs/beta/user-management/authentication/self-hosted/workspace-login',
|
||||
from: '/docs/beta/user-authentication/workspace-login',
|
||||
},
|
||||
{
|
||||
to: '/docs/user-management/sso/oidc/setup',
|
||||
from: '/docs/category/openid-connect',
|
||||
|
|
@ -577,11 +512,15 @@ module.exports = {
|
|||
from: '/docs/widgets/table/table-properties',
|
||||
},
|
||||
{
|
||||
to: '/docs/workflows/how-to/trigger-workflow-from-app',
|
||||
from: '/docs/workflows/trigger-workflow-from-app',
|
||||
}
|
||||
to: '/docs/setup/upgrade-to-v3',
|
||||
from: '/docs/setup/cloud-v3-migration',
|
||||
},
|
||||
// {
|
||||
// to: '/docs/workflows/how-to/trigger-workflow-from-app',
|
||||
// from: '/docs/workflows/trigger-workflow-from-app',
|
||||
// }
|
||||
],
|
||||
},
|
||||
],
|
||||
],
|
||||
};
|
||||
};
|
||||
|
|
|
|||
419
docs/openapi/scim/index.openapi.yaml
Normal file
|
|
@ -0,0 +1,419 @@
|
|||
openapi: 3.0.3
|
||||
info:
|
||||
title: ToolJet SCIM API
|
||||
version: 1.0.0
|
||||
description: >
|
||||
ToolJet supports SCIM 2.0 for automated user and group provisioning.
|
||||
All standard SCIM endpoints are supported — including `/Schemas`, `/ResourceTypes`, `/Users`, and `/Groups`.
|
||||
|
||||
servers:
|
||||
- url: https://app.tooljet.com/api/scim/v2
|
||||
description: Production server
|
||||
- url: http://localhost:3000/api/scim/v2
|
||||
description: Local development server
|
||||
|
||||
paths:
|
||||
|
||||
/Users:
|
||||
get:
|
||||
summary: List Users
|
||||
responses:
|
||||
"200":
|
||||
description: List of users
|
||||
content:
|
||||
application/scim+json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/SCIMListResponse'
|
||||
post:
|
||||
summary: Create User
|
||||
requestBody:
|
||||
required: true
|
||||
content:
|
||||
application/scim+json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/SCIMUserRequest'
|
||||
responses:
|
||||
"201":
|
||||
description: User created
|
||||
content:
|
||||
application/scim+json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/SCIMUserResponse'
|
||||
|
||||
/Users/{id}:
|
||||
get:
|
||||
summary: Get User by ID
|
||||
parameters:
|
||||
- name: id
|
||||
in: path
|
||||
required: true
|
||||
schema:
|
||||
type: string
|
||||
responses:
|
||||
"200":
|
||||
description: User details
|
||||
content:
|
||||
application/scim+json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/SCIMUserResponse'
|
||||
put:
|
||||
summary: Replace User
|
||||
parameters:
|
||||
- name: id
|
||||
in: path
|
||||
required: true
|
||||
schema:
|
||||
type: string
|
||||
requestBody:
|
||||
required: true
|
||||
content:
|
||||
application/scim+json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/SCIMUserRequest'
|
||||
responses:
|
||||
"200":
|
||||
description: Updated user
|
||||
content:
|
||||
application/scim+json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/SCIMUserResponse'
|
||||
patch:
|
||||
summary: Patch User
|
||||
parameters:
|
||||
- name: id
|
||||
in: path
|
||||
required: true
|
||||
schema:
|
||||
type: string
|
||||
requestBody:
|
||||
required: true
|
||||
content:
|
||||
application/scim+json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/SCIMPatchRequest'
|
||||
responses:
|
||||
"200":
|
||||
description: User updated
|
||||
content:
|
||||
application/scim+json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/SCIMUserResponse'
|
||||
delete:
|
||||
summary: Delete User
|
||||
parameters:
|
||||
- name: id
|
||||
in: path
|
||||
required: true
|
||||
schema:
|
||||
type: string
|
||||
responses:
|
||||
"204":
|
||||
description: User deleted
|
||||
|
||||
/Groups:
|
||||
get:
|
||||
summary: List Groups
|
||||
responses:
|
||||
"200":
|
||||
description: List of groups
|
||||
content:
|
||||
application/scim+json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/SCIMGroupListResponse'
|
||||
post:
|
||||
summary: Create Group
|
||||
requestBody:
|
||||
required: true
|
||||
content:
|
||||
application/scim+json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/SCIMGroupRequest'
|
||||
responses:
|
||||
"201":
|
||||
description: Group created
|
||||
content:
|
||||
application/scim+json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/SCIMGroupResponse'
|
||||
|
||||
/Groups/{id}:
|
||||
get:
|
||||
summary: Get Group by ID
|
||||
parameters:
|
||||
- name: id
|
||||
in: path
|
||||
required: true
|
||||
schema:
|
||||
type: string
|
||||
responses:
|
||||
"200":
|
||||
description: Group details
|
||||
content:
|
||||
application/scim+json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/SCIMGroupResponse'
|
||||
put:
|
||||
summary: Replace Group
|
||||
parameters:
|
||||
- name: id
|
||||
in: path
|
||||
required: true
|
||||
schema:
|
||||
type: string
|
||||
requestBody:
|
||||
required: true
|
||||
content:
|
||||
application/scim+json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/SCIMGroupRequest'
|
||||
responses:
|
||||
"200":
|
||||
description: Group updated
|
||||
content:
|
||||
application/scim+json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/SCIMGroupResponse'
|
||||
patch:
|
||||
summary: Patch Group
|
||||
parameters:
|
||||
- name: id
|
||||
in: path
|
||||
required: true
|
||||
schema:
|
||||
type: string
|
||||
requestBody:
|
||||
required: true
|
||||
content:
|
||||
application/scim+json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/SCIMPatchRequest'
|
||||
responses:
|
||||
"200":
|
||||
description: Group updated
|
||||
content:
|
||||
application/scim+json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/SCIMGroupResponse'
|
||||
delete:
|
||||
summary: Delete Group
|
||||
parameters:
|
||||
- name: id
|
||||
in: path
|
||||
required: true
|
||||
schema:
|
||||
type: string
|
||||
responses:
|
||||
"204":
|
||||
description: Group deleted
|
||||
|
||||
components:
|
||||
schemas:
|
||||
|
||||
SCIMUserRequest:
|
||||
type: object
|
||||
required:
|
||||
- schemas
|
||||
- userName
|
||||
properties:
|
||||
schemas:
|
||||
type: array
|
||||
items:
|
||||
type: string
|
||||
example:
|
||||
- "urn:ietf:params:scim:schemas:core:2.0:User"
|
||||
- "urn:ietf:params:scim:schemas:extension:tooljet:User:2.0"
|
||||
userName:
|
||||
type: string
|
||||
name:
|
||||
type: object
|
||||
properties:
|
||||
givenName:
|
||||
type: string
|
||||
familyName:
|
||||
type: string
|
||||
active:
|
||||
type: boolean
|
||||
password:
|
||||
type: string
|
||||
description: User password for creation.
|
||||
emails:
|
||||
type: array
|
||||
items:
|
||||
type: object
|
||||
properties:
|
||||
value:
|
||||
type: string
|
||||
primary:
|
||||
type: boolean
|
||||
type:
|
||||
type: string
|
||||
groups:
|
||||
type: array
|
||||
items:
|
||||
type: object
|
||||
properties:
|
||||
value:
|
||||
type: string
|
||||
display:
|
||||
type: string
|
||||
meta:
|
||||
type: object
|
||||
properties:
|
||||
resourceType:
|
||||
type: string
|
||||
urn:ietf:params:scim:schemas:extension:tooljet:User:2.0:
|
||||
type: object
|
||||
description: ToolJet’s custom SCIM extension (only used in requests)
|
||||
properties:
|
||||
role:
|
||||
type: string
|
||||
|
||||
SCIMUserResponse:
|
||||
type: object
|
||||
properties:
|
||||
schemas:
|
||||
type: array
|
||||
items:
|
||||
type: string
|
||||
example:
|
||||
- "urn:ietf:params:scim:schemas:core:2.0:User"
|
||||
id:
|
||||
type: string
|
||||
format: uuid
|
||||
userName:
|
||||
type: string
|
||||
name:
|
||||
type: object
|
||||
properties:
|
||||
givenName:
|
||||
type: string
|
||||
familyName:
|
||||
type: string
|
||||
active:
|
||||
type: boolean
|
||||
emails:
|
||||
type: array
|
||||
items:
|
||||
type: object
|
||||
properties:
|
||||
value:
|
||||
type: string
|
||||
primary:
|
||||
type: boolean
|
||||
type:
|
||||
type: string
|
||||
meta:
|
||||
type: object
|
||||
properties:
|
||||
resourceType:
|
||||
type: string
|
||||
example: "User"
|
||||
created:
|
||||
type: string
|
||||
format: date-time
|
||||
lastModified:
|
||||
type: string
|
||||
format: date-time
|
||||
|
||||
SCIMListResponse:
|
||||
type: object
|
||||
properties:
|
||||
totalResults:
|
||||
type: integer
|
||||
startIndex:
|
||||
type: integer
|
||||
itemsPerPage:
|
||||
type: integer
|
||||
Resources:
|
||||
type: array
|
||||
items:
|
||||
$ref: '#/components/schemas/SCIMUserResponse'
|
||||
|
||||
SCIMPatchRequest:
|
||||
type: object
|
||||
required:
|
||||
- schemas
|
||||
- Operations
|
||||
properties:
|
||||
schemas:
|
||||
type: array
|
||||
items:
|
||||
type: string
|
||||
example:
|
||||
- "urn:ietf:params:scim:api:messages:2.0:PatchOp"
|
||||
Operations:
|
||||
type: array
|
||||
items:
|
||||
type: object
|
||||
properties:
|
||||
op:
|
||||
type: string
|
||||
enum: [add, remove, replace]
|
||||
path:
|
||||
type: string
|
||||
value: {}
|
||||
|
||||
SCIMGroupRequest:
|
||||
type: object
|
||||
required:
|
||||
- schemas
|
||||
- displayName
|
||||
properties:
|
||||
schemas:
|
||||
type: array
|
||||
items:
|
||||
type: string
|
||||
example:
|
||||
- "urn:ietf:params:scim:schemas:core:2.0:Group"
|
||||
displayName:
|
||||
type: string
|
||||
example: "Developers"
|
||||
members:
|
||||
type: array
|
||||
description: List of User members belonging to this group.
|
||||
items:
|
||||
type: object
|
||||
properties:
|
||||
value:
|
||||
type: string
|
||||
format: uuid
|
||||
description: User ID
|
||||
display:
|
||||
type: string
|
||||
description: User display name
|
||||
|
||||
|
||||
SCIMGroupResponse:
|
||||
allOf:
|
||||
- $ref: '#/components/schemas/SCIMGroupRequest'
|
||||
- type: object
|
||||
properties:
|
||||
id:
|
||||
type: string
|
||||
format: uuid
|
||||
meta:
|
||||
type: object
|
||||
properties:
|
||||
resourceType:
|
||||
type: string
|
||||
example: "Group"
|
||||
created:
|
||||
type: string
|
||||
format: date-time
|
||||
lastModified:
|
||||
type: string
|
||||
format: date-time
|
||||
|
||||
SCIMGroupListResponse:
|
||||
type: object
|
||||
properties:
|
||||
totalResults:
|
||||
type: integer
|
||||
startIndex:
|
||||
type: integer
|
||||
itemsPerPage:
|
||||
type: integer
|
||||
Resources:
|
||||
type: array
|
||||
items:
|
||||
$ref: '#/components/schemas/SCIMGroupResponse'
|
||||
1518
docs/package-lock.json
generated
|
|
@ -28,6 +28,7 @@
|
|||
"prism-react-renderer": "^2.1.0",
|
||||
"react": "^18.2.0",
|
||||
"react-dom": "^18.2.0",
|
||||
"redocusaurus": "^2.5.0",
|
||||
"tailwindcss": "^3.4.12"
|
||||
},
|
||||
"devDependencies": {
|
||||
|
|
|
|||
|
|
@ -3,11 +3,16 @@ set -e
|
|||
|
||||
# Configuration
|
||||
DOCKER_REPO="tooljet/tooljet"
|
||||
MARKDOWN_FILE="docs/versioned_docs/version-3.5.0-LTS/setup/choose-your-tooljet.md"
|
||||
MARKDOWN_FILE="docs/versioned_docs/version-3.16.0-LTS/setup/overview/choose-your-tooljet.mdx"
|
||||
TABLE_HEADER="| Version | Release Date | Docker Pull Command |"
|
||||
TABLE_DIVIDER="|---------|--------------|----------------------|"
|
||||
DRY_RUN=false
|
||||
|
||||
# Current LTS line — update CURRENT_LTS_PREFIX when a new LTS series starts (e.g. v3.21)
|
||||
CURRENT_LTS_PREFIX="v3.20"
|
||||
CURRENT_LTS_TAG_PATTERN="^v3\\.20\\.[0-9]+-lts$"
|
||||
CURRENT_LTS_MAX=6
|
||||
|
||||
# Enhanced logging function
|
||||
log() {
|
||||
echo "[$(date '+%Y-%m-%d %H:%M:%S')] $1" >&2
|
||||
|
|
@ -81,34 +86,45 @@ get_lts_tags() {
|
|||
fi
|
||||
|
||||
local page_tags
|
||||
page_tags=$(echo "$resp" | jq -r '.results[]? | select(.name | test("^v.*ee-lts$")) | .name' 2>/dev/null)
|
||||
|
||||
page_tags=$(echo "$resp" | jq -r --arg pat "$CURRENT_LTS_TAG_PATTERN" '.results[]? | select(.name | test($pat)) | .name' 2>/dev/null)
|
||||
|
||||
if [[ -n "$page_tags" ]]; then
|
||||
while IFS= read -r tag; do
|
||||
[[ -n "$tag" ]] && tags+=("$tag")
|
||||
done <<< "$page_tags"
|
||||
fi
|
||||
|
||||
|
||||
url=$(echo "$resp" | jq -r '.next // empty' 2>/dev/null)
|
||||
[[ "$url" == "null" || -z "$url" ]] && break
|
||||
|
||||
|
||||
# Stop early once we have enough tags to avoid unnecessary pages
|
||||
if [[ ${#tags[@]} -ge $CURRENT_LTS_MAX ]]; then
|
||||
log "✅ Collected enough tags (${#tags[@]}), stopping pagination"
|
||||
break
|
||||
fi
|
||||
|
||||
# Safety check to prevent infinite loops
|
||||
if [[ $page_count -gt 10 ]]; then
|
||||
log "⚠️ Reached maximum page limit (10), stopping pagination"
|
||||
break
|
||||
fi
|
||||
done
|
||||
|
||||
|
||||
if [[ ${#tags[@]} -eq 0 ]]; then
|
||||
log_error "No LTS tags found"
|
||||
return 1
|
||||
fi
|
||||
|
||||
|
||||
log "✅ Found ${#tags[@]} LTS tags"
|
||||
|
||||
# Sort tags by version (reverse)
|
||||
|
||||
# Sort tags by version (reverse) and cap to CURRENT_LTS_MAX
|
||||
IFS=$'\n' tags=($(printf '%s\n' "${tags[@]}" | sort -Vr))
|
||||
|
||||
|
||||
if [[ ${#tags[@]} -gt $CURRENT_LTS_MAX ]]; then
|
||||
tags=("${tags[@]:0:$CURRENT_LTS_MAX}")
|
||||
log "✂️ Capped to ${CURRENT_LTS_MAX} most recent ${CURRENT_LTS_PREFIX} tags"
|
||||
fi
|
||||
|
||||
log "📋 LTS tags (sorted):"
|
||||
printf ' %s\n' "${tags[@]}"
|
||||
|
||||
|
|
@ -196,14 +212,7 @@ build_table_rows() {
|
|||
return 1
|
||||
fi
|
||||
|
||||
# Add latest EE-LTS row (always use ee-lts-latest tag)
|
||||
local latest="${tags[0]}"
|
||||
log "⭐ Latest EE-LTS is: $latest"
|
||||
local latest_row="| Latest EE-LTS | N/A | \`docker pull tooljet/tooljet:ee-lts-latest\` |"
|
||||
|
||||
log "📝 Generated ${#rows[@]} table rows plus Latest EE-LTS row"
|
||||
|
||||
echo "$latest_row"
|
||||
log "📝 Generated ${#rows[@]} table rows"
|
||||
printf "%s\n" "${rows[@]}"
|
||||
}
|
||||
|
||||
|
|
@ -239,38 +248,41 @@ replace_table_in_file() {
|
|||
|
||||
log "✍️ Writing to $MARKDOWN_FILE..."
|
||||
local tmp_md="${MARKDOWN_FILE}.tmp"
|
||||
local in_table=false
|
||||
local lines_written=0
|
||||
local table_lines=0
|
||||
|
||||
> "$tmp_md"
|
||||
|
||||
while IFS= read -r line || [[ -n "$line" ]]; do
|
||||
if [[ "$line" == "$TABLE_HEADER" ]]; then
|
||||
log "🔍 Found table header, replacing table content"
|
||||
echo "$TABLE_HEADER" >> "$tmp_md"
|
||||
echo "$TABLE_DIVIDER" >> "$tmp_md"
|
||||
echo "$table_body" >> "$tmp_md"
|
||||
in_table=true
|
||||
table_lines=$(echo "$table_body" | wc -l)
|
||||
lines_written=$((lines_written + 2 + table_lines))
|
||||
elif [[ "$in_table" == true && "$line" == :::* ]]; then
|
||||
echo "$line" >> "$tmp_md"
|
||||
in_table=false
|
||||
lines_written=$((lines_written + 1))
|
||||
log "✅ Table replacement complete"
|
||||
elif [[ "$in_table" == false ]]; then
|
||||
echo "$line" >> "$tmp_md"
|
||||
lines_written=$((lines_written + 1))
|
||||
fi
|
||||
done < "$MARKDOWN_FILE"
|
||||
|
||||
|
||||
# Extract preserved rows — older LTS lines that are NOT part of the current LTS series.
|
||||
# These rows sit below the current LTS entries and are never auto-updated.
|
||||
local preserved_rows
|
||||
preserved_rows=$(awk '
|
||||
/^### Latest Patch$/ { in_section=1; next }
|
||||
/^### Base Versions$/ { in_section=0 }
|
||||
in_section && /^\| \[/ { print }
|
||||
' "$MARKDOWN_FILE" | grep -v "${CURRENT_LTS_PREFIX}\.")
|
||||
|
||||
# Combine: new current-LTS rows on top, preserved older-LTS rows below
|
||||
local full_body
|
||||
if [[ -n "$preserved_rows" ]]; then
|
||||
full_body="${table_body}
|
||||
${preserved_rows}"
|
||||
else
|
||||
full_body="$table_body"
|
||||
fi
|
||||
|
||||
local new_table="${TABLE_HEADER}
|
||||
${TABLE_DIVIDER}
|
||||
${full_body}"
|
||||
|
||||
awk -v tbl="$new_table" '
|
||||
/^### Latest Patch$/ { print; print ""; print tbl; skip=1; next }
|
||||
/^### Base Versions$/ { skip=0 }
|
||||
!skip { print }
|
||||
' "$MARKDOWN_FILE" > "$tmp_md"
|
||||
|
||||
if ! mv "$tmp_md" "$MARKDOWN_FILE"; then
|
||||
log_error "Failed to move temporary file to $MARKDOWN_FILE"
|
||||
return 1
|
||||
fi
|
||||
|
||||
log "✅ Markdown updated successfully ($lines_written lines written, $table_lines table rows)"
|
||||
|
||||
log "✅ Markdown updated successfully"
|
||||
}
|
||||
|
||||
main() {
|
||||
|
|
|
|||
|
|
@ -86,7 +86,7 @@ const sidebars = {
|
|||
'setup/upgrade-to-v3',
|
||||
'setup/cloud-v3-migration',
|
||||
'setup/upgrade-to-v3.16',
|
||||
|
||||
'setup/tooljet-domain-change'
|
||||
]
|
||||
}
|
||||
],
|
||||
|
|
@ -438,7 +438,10 @@ const sidebars = {
|
|||
'marketplace/plugins/marketplace-plugin-weaviate',
|
||||
'marketplace/plugins/marketplace-plugin-qdrant',
|
||||
'marketplace/plugins/marketplace-plugin-azurerepos',
|
||||
'marketplace/plugins/marketplace-plugin-googlecalendar'
|
||||
'marketplace/plugins/marketplace-plugin-googlecalendar',
|
||||
'marketplace/plugins/marketplace-plugin-ups',
|
||||
'marketplace/plugins/marketplace-plugin-aftership',
|
||||
'marketplace/plugins/marketplace-plugin-microsoft_graph'
|
||||
],
|
||||
},
|
||||
],
|
||||
|
|
|
|||
|
|
@ -1,54 +1,57 @@
|
|||
import React, { useEffect, useState } from 'react'
|
||||
import styles from './DocsCard.css'
|
||||
import React, { useEffect, useState } from 'react'
|
||||
import styles from './DocsCard.css'
|
||||
|
||||
export const DocsCard = ({ label, imgSrc, link, height = 40, width = 40, title }) => {
|
||||
const kubernetesSvg = '/img/setup/icons/kubernetes.svg'
|
||||
export const DocsCard = ({ label, imgSrc, link, height = 40, width = 40, title }) => {
|
||||
const kubernetesSvg = '/img/setup/icons/kubernetes.svg'
|
||||
|
||||
const imagePath = imgSrc && imgSrc.includes('kubernetes')
|
||||
? kubernetesSvg
|
||||
: imgSrc
|
||||
? `/img/setup/icons/${imgSrc}.svg`
|
||||
: '/img/setup/icons/default.svg';
|
||||
|
||||
const imagePath = imgSrc.includes('kubernetes') ? kubernetesSvg : `/img/setup/icons/${imgSrc}.svg`
|
||||
|
||||
const description = {
|
||||
"Try ToolJet": "Try out ToolJet with single docker command",
|
||||
"Choose Your ToolJet": "Important information on which version of ToolJet to use.",
|
||||
"System Requirements": "Learn about system requirements for running ToolJet",
|
||||
DigitalOcean: "Quickly deploy ToolJet using the Deploy to DigitalOcean button",
|
||||
Docker: "Deploy ToolJet on a server using docker-compose",
|
||||
Heroku: "Deploy ToolJet on Heroku using the one-click-deployment button",
|
||||
"AWS AMI": "Deploy ToolJet on AWS AMI instances",
|
||||
"AWS ECS": "Deploy ToolJet on AWS ECS instances",
|
||||
Openshift: "Deploy ToolJet on Openshift",
|
||||
Helm: "Deploy ToolJet with Helm Chart",
|
||||
Kubernetes: "Deploy ToolJet on a Kubernetes cluster",
|
||||
"Kubernetes (GKE)": "Deploy ToolJet on a GKE Kubernetes cluster",
|
||||
"Kubernetes (AKS)": "Deploy ToolJet on a AKS Kubernetes cluster",
|
||||
"Kubernetes (EKS)": "Deploy ToolJet on a EKS Kubernetes cluster",
|
||||
|
||||
const description = {
|
||||
"Try ToolJet": "Try out ToolJet with single docker command",
|
||||
"Choose Your ToolJet": "Important information on which version of ToolJet to use.",
|
||||
"System Requirements": "Learn about system requirements for running ToolJet",
|
||||
DigitalOcean: "Quickly deploy ToolJet using the Deploy to DigitalOcean button",
|
||||
Docker: "Deploy ToolJet on a server using docker-compose",
|
||||
Heroku: "Deploy ToolJet on Heroku using the one-click-deployment button",
|
||||
"AWS AMI": "Deploy ToolJet on AWS AMI instances",
|
||||
"AWS ECS": "Deploy ToolJet on AWS ECS instances",
|
||||
Openshift: "Deploy ToolJet on Openshift",
|
||||
Helm: "Deploy ToolJet with Helm Chart",
|
||||
Kubernetes: "Deploy ToolJet on a Kubernetes cluster",
|
||||
"Kubernetes (GKE)": "Deploy ToolJet on a GKE Kubernetes cluster",
|
||||
"Kubernetes (AKS)": "Deploy ToolJet on a AKS Kubernetes cluster",
|
||||
"Kubernetes (EKS)": "Deploy ToolJet on a EKS Kubernetes cluster",
|
||||
"Azure container apps": "Deploy ToolJet on a Azure Container Apps",
|
||||
"Google Cloud Run": "Deploy ToolJet on Cloud Run with GCloud CLI",
|
||||
"Deploying ToolJet client": "Deploy ToolJet Client on static website hosting services",
|
||||
"Environment variables": "Environment variables required by ToolJet Client and Server to start running",
|
||||
"Connecting via HTTP proxy": "Environment variables required by ToolJet to connect via HTTP proxy",
|
||||
"Deploying ToolJet on a subpath": "Steps to deploy ToolJet on a subpath rather than root of domain",
|
||||
"V2 migration guide": "Things to know before migrating to ToolJet V2",
|
||||
"Upgrading ToolJet to the LTS Version": "Guide to upgrade ToolJet to the latest LTS Version.",
|
||||
"ToolJet v3 (Beta) Migration Guide": "Breaking changes and migration guide for ToolJet v3",
|
||||
"ToolJet Cloud v3 Migration Guide": "Breaking changes and migration guide for ToolJet Cloud v3",
|
||||
|
||||
}
|
||||
|
||||
return (
|
||||
|
||||
<a href={link} className="card" style={{ textDecoration: "none", color: "inherit" }}>
|
||||
<div className="card-body">
|
||||
<div className="card-icon">
|
||||
<img className='img' src={imagePath} width="100%" />
|
||||
</div>
|
||||
<div className="card-info">
|
||||
<h3 style={{ margin: "0", paddingBottom: "0.5rem" }}>{label}</h3>
|
||||
<p>
|
||||
{description[label]}
|
||||
</p>
|
||||
</div>
|
||||
</div>
|
||||
</a>
|
||||
)
|
||||
}
|
||||
|
||||
"Google Cloud Run": "Deploy ToolJet on Cloud Run with GCloud CLI",
|
||||
"Deploying ToolJet client": "Deploy ToolJet Client on static website hosting services",
|
||||
"Environment variables": "Environment variables required by ToolJet Client and Server to start running",
|
||||
"Connecting via HTTP proxy": "Environment variables required by ToolJet to connect via HTTP proxy",
|
||||
"Deploying ToolJet on a subpath": "Steps to deploy ToolJet on a subpath rather than root of domain",
|
||||
"V2 migration guide": "Things to know before migrating to ToolJet V2",
|
||||
"Upgrading ToolJet to the LTS Version": "Guide to upgrade ToolJet to the latest LTS Version.",
|
||||
"ToolJet v3 (Beta) Migration Guide": "Breaking changes and migration guide for ToolJet v3",
|
||||
"ToolJet Cloud v3 Migration Guide": "Breaking changes and migration guide for ToolJet Cloud v3",
|
||||
}
|
||||
|
||||
return (
|
||||
|
||||
<a href={link} className="card" style={{ textDecoration: "none", color: "inherit" }}>
|
||||
<div className="card-body">
|
||||
<div className="card-icon">
|
||||
<img className='img' src={imagePath} width="100%" />
|
||||
</div>
|
||||
<div className="card-info">
|
||||
<h3 style={{ margin: "0", paddingBottom: "0.5rem" }}>{label}</h3>
|
||||
<p>
|
||||
{description[label]}
|
||||
</p>
|
||||
</div>
|
||||
</div>
|
||||
</a>
|
||||
)
|
||||
}
|
||||
|
|
@ -1,12 +1,24 @@
|
|||
import React from 'react'
|
||||
import { DocsCard } from './';
|
||||
import styles from './DocsCard.css'
|
||||
|
||||
export const DocsCardList = ({ list }) => {
|
||||
return (
|
||||
<div className='card-container-setup'>
|
||||
{list.map(item => <DocsCard key={item.docId} label={item.label} imgSrc={item.docId.split('/')[1]} link={item.href} />)}
|
||||
</div>
|
||||
)
|
||||
}
|
||||
import React from 'react'
|
||||
import { DocsCard } from './';
|
||||
import styles from './DocsCard.css'
|
||||
|
||||
export const DocsCardList = ({ list }) => {
|
||||
return (
|
||||
<div className='card-container-setup'>
|
||||
{list.map(item => {
|
||||
const docId = item?.docId || "";
|
||||
const parts = docId.split("/");
|
||||
const imgSrc = item.customProps?.icon || parts[parts.length - 1];
|
||||
|
||||
return (
|
||||
<DocsCard
|
||||
key={docId || item.label}
|
||||
label={item.label}
|
||||
imgSrc={imgSrc}
|
||||
link={item.href}
|
||||
/>
|
||||
);
|
||||
})}
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
|
@ -536,12 +536,16 @@ img {
|
|||
}
|
||||
|
||||
[data-theme='dark'] .navbar-signin,
|
||||
[data-theme='dark'] .navbar-signin:hover,
|
||||
[data-theme='dark'] .navbar-website,
|
||||
[data-theme='dark'] .navbar-website:hover {
|
||||
[data-theme='dark'] .navbar-website
|
||||
{
|
||||
color: white;
|
||||
}
|
||||
|
||||
[data-theme='dark'] .navbar-signin:hover,
|
||||
[data-theme='dark'] .navbar-website:hover {
|
||||
color: black;
|
||||
}
|
||||
|
||||
.navbar-website {
|
||||
box-shadow: 0px 0px 1px 0px rgba(48, 50, 51, 0.05),
|
||||
0px 1px 1px 0px rgba(48, 50, 51, 0.10);
|
||||
|
|
@ -890,6 +894,16 @@ button[title="Switch between dark and light mode (currently light mode)"] svg,
|
|||
background-image: url('../../static/img/sidebar-icons/resources.svg');
|
||||
}
|
||||
|
||||
/* Self-hosted only indicator for sidebar category */
|
||||
.self-hosted-icon {
|
||||
width: 14px;
|
||||
height: 14px;
|
||||
margin-left: 6px;
|
||||
vertical-align: middle;
|
||||
display: inline-block;
|
||||
box-shadow: none !important;
|
||||
border-radius: 0 !important;
|
||||
}
|
||||
|
||||
/* Dropdownns */
|
||||
|
||||
|
|
@ -967,3 +981,50 @@ button[title="Switch between dark and light mode (currently light mode)"] svg,
|
|||
.footer {
|
||||
padding: 0px 70px !important;
|
||||
}
|
||||
|
||||
.sc-kSaXSp.dkiBl { /* This hides the redocly branding at the bottom on the API pages.*/
|
||||
visibility: hidden;
|
||||
}
|
||||
|
||||
.jcDBan {
|
||||
border: 0px !important;
|
||||
margin-left: 0px !important;
|
||||
}
|
||||
|
||||
.gHrCVQ {
|
||||
padding: 30px 0 !important;
|
||||
}
|
||||
|
||||
.hgujxv {
|
||||
margin: 0 !important;
|
||||
}
|
||||
|
||||
.sc-dTvVRJ {
|
||||
padding: 10px 0px !important;
|
||||
}
|
||||
|
||||
/* Tooltip for outdated version in navbar dropdown */
|
||||
.dropdown__menu {
|
||||
overflow: visible !important;
|
||||
}
|
||||
|
||||
.dropdown__link[href*="2.50.0-LTS"] {
|
||||
position: relative;
|
||||
}
|
||||
|
||||
.dropdown__link[href*="2.50.0-LTS"]:hover::after {
|
||||
content: "No longer maintained — upgrade to the latest LTS version.";
|
||||
position: absolute;
|
||||
bottom: calc(100% + 6px);
|
||||
left: 50%;
|
||||
transform: translateX(-50%);
|
||||
background: #1b1f24;
|
||||
color: #fff;
|
||||
font-size: 11px;
|
||||
padding: 5px 10px;
|
||||
border-radius: 6px;
|
||||
white-space: nowrap;
|
||||
pointer-events: none;
|
||||
z-index: 9999;
|
||||
box-shadow: 0 2px 8px rgba(0,0,0,0.25);
|
||||
}
|
||||
25
docs/src/theme/DocSidebarItem/Category/index.js
Normal file
|
|
@ -0,0 +1,25 @@
|
|||
import React from 'react';
|
||||
import Category from '@theme-original/DocSidebarItem/Category';
|
||||
|
||||
export default function CategoryWrapper(props) {
|
||||
const isSelfHosted = props.item.customProps?.selfHosted === true;
|
||||
|
||||
if (isSelfHosted) {
|
||||
const modifiedItem = {
|
||||
...props.item,
|
||||
label: (
|
||||
<>
|
||||
{props.item.label}
|
||||
<img
|
||||
src="/img/badge-icons/premium.svg"
|
||||
alt="Self-hosted"
|
||||
className="self-hosted-icon"
|
||||
/>
|
||||
</>
|
||||
),
|
||||
};
|
||||
return <Category {...props} item={modifiedItem} />;
|
||||
}
|
||||
|
||||
return <Category {...props} />;
|
||||
}
|
||||
25
docs/src/theme/DocSidebarItem/Link/index.js
Normal file
|
|
@ -0,0 +1,25 @@
|
|||
import React from 'react';
|
||||
import Link from '@theme-original/DocSidebarItem/Link';
|
||||
|
||||
export default function LinkWrapper(props) {
|
||||
const isSelfHosted = props.item.customProps?.selfHosted === true;
|
||||
|
||||
if (isSelfHosted) {
|
||||
const modifiedItem = {
|
||||
...props.item,
|
||||
label: (
|
||||
<>
|
||||
{props.item.label}
|
||||
<img
|
||||
src="/img/badge-icons/premium.svg"
|
||||
alt="Self-hosted"
|
||||
className="self-hosted-icon"
|
||||
/>
|
||||
</>
|
||||
),
|
||||
};
|
||||
return <Link {...props} item={modifiedItem} />;
|
||||
}
|
||||
|
||||
return <Link {...props} />;
|
||||
}
|
||||
189
docs/src/theme/Root.js
Normal file
|
|
@ -0,0 +1,189 @@
|
|||
import React, { useCallback, useEffect } from "react";
|
||||
import { useLocation, useHistory } from "@docusaurus/router";
|
||||
|
||||
const GOOGLE_TRANSLATE_SCRIPT_ID = "tooljet-google-translate-script";
|
||||
const GOOGLE_TRANSLATE_CALLBACK = "tooljetGoogleTranslateInit";
|
||||
const GOOGLE_TRANSLATE_CONTAINER_ID = "tooljet-google-translate-runtime";
|
||||
const GOOGLE_TRANSLATE_SOURCE_LANGUAGE = "en";
|
||||
const GOOGLE_TRANSLATE_PARAM = "lang";
|
||||
const LANGUAGE_CODE_REGEX =
|
||||
/^[A-Za-z]{2,3}(?:-[A-Za-z]{4})?(?:-(?:[A-Za-z]{2}|\d{3}))?(?:-[A-Za-z0-9]{4,8})*$/;
|
||||
|
||||
function normalizeLanguageCode(value) {
|
||||
if (!value) return null;
|
||||
|
||||
const languageCode = value.trim();
|
||||
if (!LANGUAGE_CODE_REGEX.test(languageCode)) return null;
|
||||
|
||||
const parts = languageCode.split("-");
|
||||
const normalizedParts = [parts[0].toLowerCase()];
|
||||
let index = 1;
|
||||
|
||||
if (parts[index] && /^[A-Za-z]{4}$/.test(parts[index])) {
|
||||
const script = parts[index];
|
||||
normalizedParts.push(
|
||||
`${script.charAt(0).toUpperCase()}${script.slice(1).toLowerCase()}`
|
||||
);
|
||||
index += 1;
|
||||
}
|
||||
|
||||
if (
|
||||
parts[index] &&
|
||||
(/^[A-Za-z]{2}$/.test(parts[index]) || /^\d{3}$/.test(parts[index]))
|
||||
) {
|
||||
const region = parts[index];
|
||||
normalizedParts.push(/^\d{3}$/.test(region) ? region : region.toUpperCase());
|
||||
index += 1;
|
||||
}
|
||||
|
||||
while (index < parts.length) {
|
||||
normalizedParts.push(parts[index].toLowerCase());
|
||||
index += 1;
|
||||
}
|
||||
|
||||
return normalizedParts.join("-");
|
||||
}
|
||||
|
||||
function setGoogleTranslateCookie(targetLanguage) {
|
||||
const cookieValue = `/${GOOGLE_TRANSLATE_SOURCE_LANGUAGE}/${targetLanguage}`;
|
||||
const maxAge = 60 * 60 * 24 * 365;
|
||||
const secure = window.location.protocol === "https:" ? ";secure" : "";
|
||||
document.cookie = `googtrans=${cookieValue};path=/;max-age=${maxAge};SameSite=Lax${secure}`;
|
||||
}
|
||||
|
||||
function ensureTranslateRuntimeContainer() {
|
||||
let container = document.getElementById(GOOGLE_TRANSLATE_CONTAINER_ID);
|
||||
if (container) return container;
|
||||
|
||||
container = document.createElement("div");
|
||||
container.id = GOOGLE_TRANSLATE_CONTAINER_ID;
|
||||
container.setAttribute("aria-hidden", "true");
|
||||
container.style.position = "absolute";
|
||||
container.style.left = "-9999px";
|
||||
container.style.width = "1px";
|
||||
container.style.height = "1px";
|
||||
container.style.overflow = "hidden";
|
||||
container.dataset.tooljetGoogleTranslateRuntime = "true";
|
||||
document.body.appendChild(container);
|
||||
|
||||
return container;
|
||||
}
|
||||
|
||||
export default function Root({ children }) {
|
||||
const location = useLocation();
|
||||
const history = useHistory();
|
||||
|
||||
function getStoredUTMParams() {
|
||||
return JSON.parse(sessionStorage.getItem("utmParams") || "{}");
|
||||
}
|
||||
|
||||
const initializeTranslate = useCallback(() => {
|
||||
if (!window.google?.translate?.TranslateElement) return;
|
||||
if (window.__tooljetGoogleTranslateInitialized) return;
|
||||
|
||||
ensureTranslateRuntimeContainer();
|
||||
|
||||
new window.google.translate.TranslateElement(
|
||||
{
|
||||
pageLanguage: GOOGLE_TRANSLATE_SOURCE_LANGUAGE,
|
||||
autoDisplay: true,
|
||||
},
|
||||
GOOGLE_TRANSLATE_CONTAINER_ID
|
||||
);
|
||||
|
||||
window.__tooljetGoogleTranslateInitialized = true;
|
||||
}, []);
|
||||
|
||||
// Store UTMs on first page load
|
||||
useEffect(() => {
|
||||
const urlParams = new URLSearchParams(window.location.search);
|
||||
const storedParams = JSON.parse(
|
||||
sessionStorage.getItem("utmParams") || "{}"
|
||||
);
|
||||
let hasNewParams = false;
|
||||
|
||||
urlParams.forEach((value, key) => {
|
||||
if (key.startsWith("utm_")) {
|
||||
storedParams[key] = value;
|
||||
hasNewParams = true;
|
||||
}
|
||||
});
|
||||
|
||||
if (hasNewParams) {
|
||||
sessionStorage.setItem("utmParams", JSON.stringify(storedParams));
|
||||
}
|
||||
}, []);
|
||||
|
||||
// Append UTMs on every route change
|
||||
useEffect(() => {
|
||||
const storedParams = getStoredUTMParams();
|
||||
if (Object.keys(storedParams).length === 0) return;
|
||||
|
||||
const url = new URL(window.location.href);
|
||||
|
||||
// Append UTMs only if they're not already present
|
||||
Object.entries(storedParams).forEach(([key, value]) => {
|
||||
if (!url.searchParams.has(key)) {
|
||||
url.searchParams.set(key, value);
|
||||
}
|
||||
});
|
||||
|
||||
const newUrl = url.pathname + url.search + url.hash;
|
||||
|
||||
if (newUrl !== location.pathname + location.search + location.hash) {
|
||||
history.replace(newUrl); // update URL without reloading
|
||||
}
|
||||
}, [location.pathname, location.search, location.hash, history]);
|
||||
|
||||
// Support ?lang=<code> links and sync through Google's cookie mechanism.
|
||||
useEffect(() => {
|
||||
const url = new URL(window.location.href);
|
||||
const requestedLanguage = normalizeLanguageCode(
|
||||
url.searchParams.get(GOOGLE_TRANSLATE_PARAM)
|
||||
);
|
||||
if (!requestedLanguage) return;
|
||||
|
||||
setGoogleTranslateCookie(requestedLanguage);
|
||||
|
||||
url.searchParams.delete(GOOGLE_TRANSLATE_PARAM);
|
||||
const updatedUrl = `${url.pathname}${url.search}${url.hash}`;
|
||||
const currentUrl = `${location.pathname}${location.search}${location.hash}`;
|
||||
if (updatedUrl !== currentUrl) {
|
||||
history.replace(updatedUrl);
|
||||
}
|
||||
}, [
|
||||
history,
|
||||
location.pathname,
|
||||
location.search,
|
||||
location.hash,
|
||||
]);
|
||||
|
||||
// Initialize Google Translate globally once.
|
||||
useEffect(() => {
|
||||
window[GOOGLE_TRANSLATE_CALLBACK] = initializeTranslate;
|
||||
|
||||
if (window.google?.translate?.TranslateElement) {
|
||||
initializeTranslate();
|
||||
} else if (!document.getElementById(GOOGLE_TRANSLATE_SCRIPT_ID)) {
|
||||
const script = document.createElement("script");
|
||||
script.id = GOOGLE_TRANSLATE_SCRIPT_ID;
|
||||
script.src = `https://translate.google.com/translate_a/element.js?cb=${GOOGLE_TRANSLATE_CALLBACK}`;
|
||||
script.async = true;
|
||||
document.body.appendChild(script);
|
||||
}
|
||||
|
||||
return () => {
|
||||
if (window[GOOGLE_TRANSLATE_CALLBACK] === initializeTranslate) {
|
||||
delete window[GOOGLE_TRANSLATE_CALLBACK];
|
||||
}
|
||||
delete window.__tooljetGoogleTranslateInitialized;
|
||||
|
||||
const container = document.getElementById(GOOGLE_TRANSLATE_CONTAINER_ID);
|
||||
if (container?.dataset.tooljetGoogleTranslateRuntime === "true") {
|
||||
container.remove();
|
||||
}
|
||||
};
|
||||
}, [initializeTranslate]);
|
||||
|
||||
return <>{children}</>;
|
||||
}
|
||||
BIN
docs/static/img/actions/reset-query/reset-query.png
vendored
Normal file
|
After Width: | Height: | Size: 47 KiB |
BIN
docs/static/img/app-builder/custom-theme/custom-theme-app-v1.png
vendored
Normal file
|
After Width: | Height: | Size: 541 KiB |
BIN
docs/static/img/app-builder/custom-theme/default-theme-app-v1.png
vendored
Normal file
|
After Width: | Height: | Size: 534 KiB |
BIN
docs/static/img/app-builder/embed-apps/auth-flow.png
vendored
Normal file
|
After Width: | Height: | Size: 390 KiB |
BIN
docs/static/img/app-builder/embed-apps/iframe.png
vendored
Normal file
|
After Width: | Height: | Size: 1.3 MiB |
BIN
docs/static/img/app-builder/embed-apps/public-app.png
vendored
Normal file
|
After Width: | Height: | Size: 1.3 MiB |
BIN
docs/static/img/app-builder/embed-apps/share.png
vendored
Normal file
|
After Width: | Height: | Size: 170 KiB |
|
Before Width: | Height: | Size: 34 KiB After Width: | Height: | Size: 37 KiB |
BIN
docs/static/img/app-builder/modules/dashboard.png
vendored
|
Before Width: | Height: | Size: 89 KiB After Width: | Height: | Size: 73 KiB |
|
Before Width: | Height: | Size: 41 KiB After Width: | Height: | Size: 42 KiB |
|
Before Width: | Height: | Size: 84 KiB After Width: | Height: | Size: 85 KiB |
|
Before Width: | Height: | Size: 309 KiB After Width: | Height: | Size: 245 KiB |
BIN
docs/static/img/app-builder/permissions/page-modal.png
vendored
Normal file
|
After Width: | Height: | Size: 71 KiB |
BIN
docs/static/img/app-builder/permissions/page-permission.png
vendored
Normal file
|
After Width: | Height: | Size: 94 KiB |
BIN
docs/static/img/app-builder/permissions/query-denied.png
vendored
Normal file
|
After Width: | Height: | Size: 142 KiB |
BIN
docs/static/img/datasource-reference/airtable/airtable-connection.png
vendored
Normal file
|
After Width: | Height: | Size: 46 KiB |
BIN
docs/static/img/datasource-reference/airtable/airtable-listops.png
vendored
Normal file
|
After Width: | Height: | Size: 29 KiB |
BIN
docs/static/img/datasource-reference/airtable/airtable-listrec.png
vendored
Normal file
|
After Width: | Height: | Size: 96 KiB |
BIN
docs/static/img/datasource-reference/airtable/create-rec.png
vendored
Normal file
|
After Width: | Height: | Size: 52 KiB |
BIN
docs/static/img/datasource-reference/airtable/delete-rec.png
vendored
Normal file
|
After Width: | Height: | Size: 45 KiB |
BIN
docs/static/img/datasource-reference/airtable/retrieve-rec.png
vendored
Normal file
|
After Width: | Height: | Size: 45 KiB |
BIN
docs/static/img/datasource-reference/airtable/update-rec.png
vendored
Normal file
|
After Width: | Height: | Size: 52 KiB |
BIN
docs/static/img/datasource-reference/amazonses/awsSES-listops.png
vendored
Normal file
|
After Width: | Height: | Size: 35 KiB |
BIN
docs/static/img/datasource-reference/amazonses/ses-query.png
vendored
Normal file
|
After Width: | Height: | Size: 60 KiB |
BIN
docs/static/img/datasource-reference/appwrite/appwrite-adddoc.png
vendored
Normal file
|
After Width: | Height: | Size: 37 KiB |
BIN
docs/static/img/datasource-reference/appwrite/appwrite-connection.png
vendored
Normal file
|
After Width: | Height: | Size: 50 KiB |
BIN
docs/static/img/datasource-reference/appwrite/appwrite-deldoc.png
vendored
Normal file
|
After Width: | Height: | Size: 31 KiB |
BIN
docs/static/img/datasource-reference/appwrite/appwrite-getdoc.png
vendored
Normal file
|
After Width: | Height: | Size: 31 KiB |
BIN
docs/static/img/datasource-reference/appwrite/appwrite-listdoc.png
vendored
Normal file
|
After Width: | Height: | Size: 47 KiB |