Compare commits
827 Commits
1.0.0-beta
...
1.6.2
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
1bcb027e05 | ||
|
|
719d75f8a6 | ||
|
|
8c164c410d | ||
|
|
8c6a4a90aa | ||
|
|
66e8a4666c | ||
|
|
862dd6f0bc | ||
|
|
83afb23ac4 | ||
|
|
baee745d3c | ||
|
|
2f5579b070 | ||
|
|
bc5d7f5f57 | ||
|
|
d729be0d71 | ||
|
|
7ccf7f6f15 | ||
|
|
918dc98f71 | ||
|
|
79a72528d7 | ||
|
|
0f2cc7d425 | ||
|
|
8f75725987 | ||
|
|
2918a3f767 | ||
|
|
598c206bbd | ||
|
|
2ead5f4506 | ||
|
|
4f68a26049 | ||
|
|
49b88002fb | ||
|
|
ead5df0a8c | ||
|
|
9f38ad9b4d | ||
|
|
7748fb682d | ||
|
|
6f601c7814 | ||
|
|
c63dcd89b5 | ||
|
|
8c6e3be8ce | ||
|
|
13bfeae780 | ||
|
|
051162cd69 | ||
|
|
9065d21778 | ||
|
|
cafbad88f3 | ||
|
|
20965fc67b | ||
|
|
3efb04a603 | ||
|
|
d650d1e6eb | ||
|
|
2b23587200 | ||
|
|
a839f9146f | ||
|
|
a6138a02fd | ||
|
|
0e18fc4700 | ||
|
|
68015511c1 | ||
|
|
715d33fe90 | ||
|
|
85b99852bb | ||
|
|
a54c88eb32 | ||
|
|
6a5bdd40b6 | ||
|
|
d03f45279c | ||
|
|
e7b45df81f | ||
|
|
91df8c0556 | ||
|
|
f300838f8e | ||
|
|
1bf2e23f5d | ||
|
|
58ba0d07b0 | ||
|
|
97ae76e4e7 | ||
|
|
4b24d722b6 | ||
|
|
09efed4331 | ||
|
|
c86418dbbb | ||
|
|
c043912f94 | ||
|
|
b56ba3ee23 | ||
|
|
0b8bb5a974 | ||
|
|
954b13ac60 | ||
|
|
bae540966b | ||
|
|
79d4ab1671 | ||
|
|
31104d3d04 | ||
|
|
f254d98712 | ||
|
|
b403f5018b | ||
|
|
fd219d5780 | ||
|
|
2b77c0fac8 | ||
|
|
81ab008d83 | ||
|
|
fc19d0ba8b | ||
|
|
ddd292422b | ||
|
|
b86ef93211 | ||
|
|
6384bcd934 | ||
|
|
3d4177bd93 | ||
|
|
c6e1a9a171 | ||
|
|
13825568fe | ||
|
|
05c6a010e4 | ||
|
|
98178eaf24 | ||
|
|
5c682fe923 | ||
|
|
100dd80764 | ||
|
|
867f1bcc96 | ||
|
|
78b38c91e7 | ||
|
|
2ee88d6a46 | ||
|
|
8d651cd44d | ||
|
|
8aa95db9bc | ||
|
|
31a41576d8 | ||
|
|
335c9b1fea | ||
|
|
b395b65b86 | ||
|
|
768e4745e1 | ||
|
|
ba33064852 | ||
|
|
94b5aadd76 | ||
|
|
a65ea9c360 | ||
|
|
a6afce5c0e | ||
|
|
d26ec69445 | ||
|
|
3c2ea1a75f | ||
|
|
6acc2b6a17 | ||
|
|
83b4976305 | ||
|
|
b1cbb1b50f | ||
|
|
ff9e5a383b | ||
|
|
d66739f69e | ||
|
|
fc6c93a08a | ||
|
|
9897c53ed3 | ||
|
|
ced34dd2c6 | ||
|
|
92caac309a | ||
|
|
3c7a91a047 | ||
|
|
571db825ad | ||
|
|
0ae5ac9947 | ||
|
|
cd35148e48 | ||
|
|
19ccf098f0 | ||
|
|
2b64c0e84e | ||
|
|
d62a3c64cf | ||
|
|
3503cc3338 | ||
|
|
6d83e29ee2 | ||
|
|
5f9c9eed0a | ||
|
|
97ef363461 | ||
|
|
c67a2dfa73 | ||
|
|
f29a5ccc67 | ||
|
|
cbca88f76b | ||
|
|
3ee9051bc1 | ||
|
|
097dafb553 | ||
|
|
915581dfe7 | ||
|
|
a2cf4ffac1 | ||
|
|
6b4e52a725 | ||
|
|
454d7c4a88 | ||
|
|
cb85ad460e | ||
|
|
e41eafd497 | ||
|
|
d70396a664 | ||
|
|
3d59556bcd | ||
|
|
c7018e92b0 | ||
|
|
a575bace39 | ||
|
|
2047aa30e1 | ||
|
|
3b10453af3 | ||
|
|
363b8b52af | ||
|
|
3257edc2a0 | ||
|
|
cd54e7dd38 | ||
|
|
9177eaba22 | ||
|
|
33ae2e08cc | ||
|
|
4fc61386d3 | ||
|
|
c409266954 | ||
|
|
57315a36ee | ||
|
|
63637b91a8 | ||
|
|
09238cd98a | ||
|
|
67b149ce4b | ||
|
|
96151de814 | ||
|
|
f2e461a1ee | ||
|
|
8125622c98 | ||
|
|
1a6942ccc9 | ||
|
|
7b0e1df778 | ||
|
|
6f8c538086 | ||
|
|
b353a8f9b4 | ||
|
|
0eb35f2221 | ||
|
|
7d6b114d67 | ||
|
|
a169256770 | ||
|
|
2e54afd72f | ||
|
|
26207bd951 | ||
|
|
3ed681e277 | ||
|
|
c135b5e3cf | ||
|
|
e648307f0b | ||
|
|
0e4f35e87a | ||
|
|
553dffd4ee | ||
|
|
b4b19d2263 | ||
|
|
c7c39676d1 | ||
|
|
a6348a3e28 | ||
|
|
75212f1e05 | ||
|
|
c1fd38ac39 | ||
|
|
33e2798313 | ||
|
|
f0cb65f65c | ||
|
|
e885676ad8 | ||
|
|
b75d0a921e | ||
|
|
34fa5fe438 | ||
|
|
c2449ce795 | ||
|
|
9bb6cb14a6 | ||
|
|
b6f67e0f0b | ||
|
|
980545c636 | ||
|
|
92135ff9c1 | ||
|
|
dd7b91f770 | ||
|
|
ab843b1a43 | ||
|
|
4593edbb45 | ||
|
|
96b451843c | ||
|
|
54aa3ce7d8 | ||
|
|
45a70152ee | ||
|
|
8c5f00a446 | ||
|
|
af98610d0d | ||
|
|
875ec662ad | ||
|
|
8800ec9675 | ||
|
|
df4da75c57 | ||
|
|
717dfae26c | ||
|
|
58a2a9dcc9 | ||
|
|
27a0df4ed4 | ||
|
|
6fc6f325a7 | ||
|
|
b46e49922c | ||
|
|
2cca561e51 | ||
|
|
fbc1aa25a3 | ||
|
|
e8870cf174 | ||
|
|
17919192e0 | ||
|
|
d768bb163a | ||
|
|
dc6fafba41 | ||
|
|
9f979c5019 | ||
|
|
c3d2c34279 | ||
|
|
430f187fde | ||
|
|
f438d2ddbf | ||
|
|
6d519af198 | ||
|
|
a34e88257d | ||
|
|
ea0ab0e63c | ||
|
|
80375cd0dc | ||
|
|
f817ba7664 | ||
|
|
3398088e03 | ||
|
|
e586dd50f4 | ||
|
|
5a71c0ba65 | ||
|
|
f13b6abd78 | ||
|
|
34c6b590d7 | ||
|
|
ab797203eb | ||
|
|
30e8b1f0fe | ||
|
|
d03bee98f5 | ||
|
|
fa365fb7b8 | ||
|
|
ea1cd4b0d4 | ||
|
|
be0c7444e9 | ||
|
|
858c809514 | ||
|
|
10ff2c8a65 | ||
|
|
167d0b6867 | ||
|
|
8c121daf6c | ||
|
|
a23d437bd3 | ||
|
|
cd280d1396 | ||
|
|
d18200739a | ||
|
|
a62b2e8d10 | ||
|
|
c92069a1f4 | ||
|
|
c5e37c1608 | ||
|
|
948eb7f6d0 | ||
|
|
62a0104e70 | ||
|
|
6dd8db5cd1 | ||
|
|
9ea7275371 | ||
|
|
c997b8625f | ||
|
|
6f3514199a | ||
|
|
0cfc4d7dad | ||
|
|
56fd366a7d | ||
|
|
23b5dcfbed | ||
|
|
30ebbaaef0 | ||
|
|
dba5a73e0e | ||
|
|
f07e8d08c3 | ||
|
|
ea24759bb3 | ||
|
|
b467d6afa1 | ||
|
|
373441b7ab | ||
|
|
af3694da34 | ||
|
|
ae4ef4eb99 | ||
|
|
547e777eb0 | ||
|
|
d9ee40c898 | ||
|
|
eff812eaa8 | ||
|
|
731ec1da69 | ||
|
|
b8ed5ac1c5 | ||
|
|
d2d84be99a | ||
|
|
96bfc3cf36 | ||
|
|
6f54e3da9e | ||
|
|
825730052b | ||
|
|
edc8716297 | ||
|
|
3ee4aaf194 | ||
|
|
b9a5d486b9 | ||
|
|
dc66ebeed6 | ||
|
|
1f584bf3e8 | ||
|
|
5b0200154a | ||
|
|
1e55d96376 | ||
|
|
a512148348 | ||
|
|
d9eccd6c13 | ||
|
|
1f95d7161a | ||
|
|
3a1f4d7545 | ||
|
|
492669f68a | ||
|
|
caded23b51 | ||
|
|
e9cc48a3ae | ||
|
|
4ed98c227b | ||
|
|
f66fb7d4a3 | ||
|
|
f25990a9a7 | ||
|
|
21d5b67ef1 | ||
|
|
198810121c | ||
|
|
408822ab7f | ||
|
|
840d5c2b66 | ||
|
|
491b4e7b18 | ||
|
|
89729a451c | ||
|
|
0fd3271ef4 | ||
|
|
fa21934d5d | ||
|
|
f91a4e88d5 | ||
|
|
3e9dc4753b | ||
|
|
b03415a0eb | ||
|
|
a8e8676b0a | ||
|
|
8242a66b97 | ||
|
|
d994a8100d | ||
|
|
1ee8561e2a | ||
|
|
bb7421c54e | ||
|
|
99352aa2a9 | ||
|
|
31d54eb63c | ||
|
|
58c12996f1 | ||
|
|
3dba4aa36d | ||
|
|
d88fc132cc | ||
|
|
c6ff868be8 | ||
|
|
e8d2cde465 | ||
|
|
2bd06ff493 | ||
|
|
75dc6edd51 | ||
|
|
afc6ee596d | ||
|
|
d47c2f9dcf | ||
|
|
23f9d314df | ||
|
|
cae4f5d840 | ||
|
|
1e72b0f854 | ||
|
|
4dd9f4736d | ||
|
|
e38941adf1 | ||
|
|
b9c7c8c966 | ||
|
|
87b95986c3 | ||
|
|
e48a0fcabc | ||
|
|
e9e9478f6c | ||
|
|
bc050097c3 | ||
|
|
dde2f45669 | ||
|
|
f62f2e3b08 | ||
|
|
0b235f985f | ||
|
|
bb0c1c839b | ||
|
|
4e02a7712a | ||
|
|
8df01208e0 | ||
|
|
938cc31b8a | ||
|
|
08bd3cfd0b | ||
|
|
3bb4b44f19 | ||
|
|
a058f4acf3 | ||
|
|
55222450f3 | ||
|
|
17789ef1a5 | ||
|
|
dd24b4ad74 | ||
|
|
6b8fa28308 | ||
|
|
d9aab7b3ff | ||
|
|
5b44f3552d | ||
|
|
fa1997adc1 | ||
|
|
29375385c0 | ||
|
|
4f5c3a86ff | ||
|
|
6e5391cb8f | ||
|
|
3d4b9d48e3 | ||
|
|
aca1cc0518 | ||
|
|
2543bf356c | ||
|
|
95fed840d4 | ||
|
|
8a377d73fd | ||
|
|
576fda2357 | ||
|
|
230c08e541 | ||
|
|
9e572685ba | ||
|
|
7f4135e0cf | ||
|
|
9d68c5666f | ||
|
|
d460dd35c7 | ||
|
|
059081ad8b | ||
|
|
7eb08474ff | ||
|
|
83c0379c6b | ||
|
|
21f1326045 | ||
|
|
f62e32724c | ||
|
|
5e052a446a | ||
|
|
9a167b5acb | ||
|
|
5d2f3186cc | ||
|
|
e58d10fc53 | ||
|
|
4392bb604c | ||
|
|
5a4a6655a5 | ||
|
|
a20befd89f | ||
|
|
a9f0b9aa38 | ||
|
|
f8e0219b49 | ||
|
|
cb431f3574 | ||
|
|
1ff3a9b2f9 | ||
|
|
237960fc5b | ||
|
|
3ebc01df8c | ||
|
|
81adcd9234 | ||
|
|
cffc156cf6 | ||
|
|
e4af990bf2 | ||
|
|
e236364124 | ||
|
|
f5a3fd7202 | ||
|
|
b3026ba663 | ||
|
|
18e6f16ce7 | ||
|
|
599d0a52bf | ||
|
|
eed6081ade | ||
|
|
c4ae34383d | ||
|
|
c543376a0a | ||
|
|
a5b782b72a | ||
|
|
4819f410e6 | ||
|
|
4084849fdc | ||
|
|
35e5f39c71 | ||
|
|
80d76befc9 | ||
|
|
893244100e | ||
|
|
2a43b3ce4a | ||
|
|
b82754c7af | ||
|
|
8793d3976d | ||
|
|
6e833d4cee | ||
|
|
b3d0b69c04 | ||
|
|
28ac5e1237 | ||
|
|
8990de5618 | ||
|
|
6aeddde1cd | ||
|
|
c3dbc64a58 | ||
|
|
2a00c877ea | ||
|
|
91b4bb4683 | ||
|
|
f4fd33b47f | ||
|
|
d6d6a59eee | ||
|
|
4dba75f913 | ||
|
|
548a883e3f | ||
|
|
a6d6aaaadd | ||
|
|
566e66daa4 | ||
|
|
97af632c61 | ||
|
|
5d6e15b0d6 | ||
|
|
419bacf55f | ||
|
|
960eb34c7d | ||
|
|
6f59d0cd2d | ||
|
|
6fd1dbc638 | ||
|
|
87915f29f6 | ||
|
|
181071e4f6 | ||
|
|
feb558cfa8 | ||
|
|
9ea7c43212 | ||
|
|
38528ae8c5 | ||
|
|
c837899d82 | ||
|
|
7938b419cc | ||
|
|
bf8bb1a0df | ||
|
|
957fa67e24 | ||
|
|
b4c6897850 | ||
|
|
e2f056e6ca | ||
|
|
8fa719181a | ||
|
|
b4fda6a1f6 | ||
|
|
99188233db | ||
|
|
3bab90891f | ||
|
|
8c0e4d2d8c | ||
|
|
3e94384cde | ||
|
|
189b739997 | ||
|
|
334fc55dd0 | ||
|
|
ab933d48de | ||
|
|
36b62a5fe4 | ||
|
|
08752820fc | ||
|
|
787ec50a9c | ||
|
|
65b29161a0 | ||
|
|
f60f15345f | ||
|
|
c286c28d46 | ||
|
|
8fb003d7ce | ||
|
|
35daf42a55 | ||
|
|
976aaca287 | ||
|
|
0454f09383 | ||
|
|
6b5674a107 | ||
|
|
45a75d0bee | ||
|
|
12f627711c | ||
|
|
442775ac90 | ||
|
|
01da3b3225 | ||
|
|
51ac815b23 | ||
|
|
285ad45a0e | ||
|
|
4707722e6e | ||
|
|
499f75edd1 | ||
|
|
57b96adcd0 | ||
|
|
eb9675c6cf | ||
|
|
b59c6e377a | ||
|
|
432f38333e | ||
|
|
e86640547e | ||
|
|
25c125b96d | ||
|
|
aa3b527f67 | ||
|
|
bacd5a4373 | ||
|
|
53be2739bb | ||
|
|
4a42aa385a | ||
|
|
ac8e315fbd | ||
|
|
7556a59e11 | ||
|
|
8b0c30f19f | ||
|
|
b731a50cc9 | ||
|
|
2398931cc1 | ||
|
|
419e576a3e | ||
|
|
1a750e8279 | ||
|
|
f14379a1c8 | ||
|
|
521bbbf1d6 | ||
|
|
cb775340a4 | ||
|
|
31bd42f964 | ||
|
|
e64e7d1d92 | ||
|
|
480a5f648d | ||
|
|
9cb215295a | ||
|
|
764c56c4a1 | ||
|
|
e057c5f3bf | ||
|
|
bc8cd5c941 | ||
|
|
6350edf8fd | ||
|
|
8e8fdabd03 | ||
|
|
2883d8c544 | ||
|
|
dd8c426faa | ||
|
|
64a2cc23c6 | ||
|
|
ec33fe5657 | ||
|
|
56b3b2ab3b | ||
|
|
a436dff4a0 | ||
|
|
cf80d67bf8 | ||
|
|
e24edc0803 | ||
|
|
d89ca10a82 | ||
|
|
d9e6d0c71a | ||
|
|
517bc7f632 | ||
|
|
674316aa46 | ||
|
|
7a55c9ad03 | ||
|
|
c7f3c9da92 | ||
|
|
be77b3e8f3 | ||
|
|
d7f50bac6a | ||
|
|
3ccfe60685 | ||
|
|
40040af957 | ||
|
|
1568b38eac | ||
|
|
7fd1652a71 | ||
|
|
787a172a7c | ||
|
|
23a68fbc10 | ||
|
|
f078ee6051 | ||
|
|
0450f62108 | ||
|
|
b2faeb3c17 | ||
|
|
9ea37789d6 | ||
|
|
aa45150c51 | ||
|
|
a708750fea | ||
|
|
d260450a84 | ||
|
|
a76e3e00f7 | ||
|
|
a33ebe5bc5 | ||
|
|
6f683ca486 | ||
|
|
0e65f8c921 | ||
|
|
dfcab90c2d | ||
|
|
5a6a035d30 | ||
|
|
d76ff17fb3 | ||
|
|
1f570e9b46 | ||
|
|
4953e69b1b | ||
|
|
ab6ecdbc9c | ||
|
|
0b7ca95d21 | ||
|
|
6cc4bc2645 | ||
|
|
b75f848b90 | ||
|
|
c4e62a7aee | ||
|
|
c903c03979 | ||
|
|
d7b9755f3a | ||
|
|
e17bf0db13 | ||
|
|
74d6b3d902 | ||
|
|
302094771b | ||
|
|
80ef8f189e | ||
|
|
6204fa0ade | ||
|
|
1d105fc5be | ||
|
|
3612857585 | ||
|
|
8f1ee60119 | ||
|
|
e7ca7fe89c | ||
|
|
4be1d87602 | ||
|
|
131df8aeb7 | ||
|
|
3442942893 | ||
|
|
fbd78ab842 | ||
|
|
66f324e18c | ||
|
|
5e2f9e1eeb | ||
|
|
fefb07e14c | ||
|
|
013f342ff6 | ||
|
|
aabdcea3c0 | ||
|
|
a178faa377 | ||
|
|
edf0ce226f | ||
|
|
7118ae374d | ||
|
|
f2a14e6a36 | ||
|
|
f37be774a6 | ||
|
|
0dcfeb3587 | ||
|
|
dbfc8b51aa | ||
|
|
d72a8af04b | ||
|
|
7131dea7a0 | ||
|
|
deb30ed4ae | ||
|
|
3b09ef3345 | ||
|
|
06e90c9555 | ||
|
|
cdc415079c | ||
|
|
1c2ba4076a | ||
|
|
af68aa692c | ||
|
|
edba818615 | ||
|
|
cdf904a2bc | ||
|
|
fedab6c9a8 | ||
|
|
33e8ed4c93 | ||
|
|
2b54dfe035 | ||
|
|
e601816791 | ||
|
|
7a46cf3da7 | ||
|
|
ad32e5e651 | ||
|
|
8ec55eb70d | ||
|
|
767fec19cd | ||
|
|
d215a12f5a | ||
|
|
d22dcfb464 | ||
|
|
c93b36c757 | ||
|
|
9253dd19ba | ||
|
|
b9d83a2507 | ||
|
|
581f96daa8 | ||
|
|
33ff2fbf3b | ||
|
|
535b4e1fb1 | ||
|
|
5871bea706 | ||
|
|
07eb422491 | ||
|
|
654ed46a46 | ||
|
|
eb73da8aa0 | ||
|
|
cc6800c791 | ||
|
|
47abdf873a | ||
|
|
90366da61b | ||
|
|
5529beaf6e | ||
|
|
93c8236535 | ||
|
|
37fdc4a6a8 | ||
|
|
a456a37b2f | ||
|
|
430afe3f93 | ||
|
|
3b60e1f3ac | ||
|
|
b8543e5fa8 | ||
|
|
d594314e52 | ||
|
|
81c142e8ae | ||
|
|
59eedce664 | ||
|
|
adef93623d | ||
|
|
759434e9f8 | ||
|
|
0e38f58a7f | ||
|
|
8445e83c7c | ||
|
|
89a59b25fc | ||
|
|
57a37a01ce | ||
|
|
f8add1f098 | ||
|
|
0bd0cc76fb | ||
|
|
06e4fbac68 | ||
|
|
e82df67063 | ||
|
|
84f94bb727 | ||
|
|
20f1a6372b | ||
|
|
06c434a5ea | ||
|
|
b83dadb14b | ||
|
|
492e53edf3 | ||
|
|
3d9557b65c | ||
|
|
332804ed71 | ||
|
|
de70c62ea8 | ||
|
|
e4789c6b08 | ||
|
|
ec9d02a735 | ||
|
|
ae73a2f3f4 | ||
|
|
d8183bfd0d | ||
|
|
e11748fe30 | ||
|
|
ccbe56e110 | ||
|
|
ff37e07ce6 | ||
|
|
f59f0ee57d | ||
|
|
372932985d | ||
|
|
c877bb1187 | ||
|
|
5f95500b6f | ||
|
|
3194dc56eb | ||
|
|
e49fb646b0 | ||
|
|
fd11fb81d6 | ||
|
|
82f990eb8b | ||
|
|
851bedb2e5 | ||
|
|
e6c42e9610 | ||
|
|
d3d523b2b8 | ||
|
|
532d3696c2 | ||
|
|
dabd4a055c | ||
|
|
7bf820a4bf | ||
|
|
b862e1aeef | ||
|
|
bdee036ab4 | ||
|
|
62238948e0 | ||
|
|
489f6bed17 | ||
|
|
6aa4908446 | ||
|
|
d5a220a004 | ||
|
|
a418195b28 | ||
|
|
2ff6d1d117 | ||
|
|
8dd30c88ab | ||
|
|
7797c6c770 | ||
|
|
40922fedb8 | ||
|
|
4c1366ef91 | ||
|
|
f61d442989 | ||
|
|
60449afca5 | ||
|
|
b1702bf99a | ||
|
|
a35e24bc0e | ||
|
|
c230e034cf | ||
|
|
06ceff7427 | ||
|
|
81c4199e87 | ||
|
|
19273ddbd5 | ||
|
|
fdf1dfdeba | ||
|
|
f14ecf50e4 | ||
|
|
c244ef387b | ||
|
|
8165051dd8 | ||
|
|
a7b8ffaf9f | ||
|
|
6fba13c8d1 | ||
|
|
3c99fbb1ef | ||
|
|
5b44ffa2fb | ||
|
|
6e6992e19f | ||
|
|
4bce210ff5 | ||
|
|
bbc1a9eac4 | ||
|
|
5e92aebd20 | ||
|
|
2428738fa6 | ||
|
|
d22c7826fe | ||
|
|
34e3fe690d | ||
|
|
c415ceef8d | ||
|
|
73798f9e61 | ||
|
|
9694261f3e | ||
|
|
874c67345e | ||
|
|
42434ca832 | ||
|
|
4a6da91faf | ||
|
|
8f96d0795c | ||
|
|
da3c8823f8 | ||
|
|
3cd20cab55 | ||
|
|
b1fa980f56 | ||
|
|
ef0bc9a764 | ||
|
|
dc2ec5b73b | ||
|
|
d8a089fbc2 | ||
|
|
00a0d89d6c | ||
|
|
2f49be69fe | ||
|
|
b92639647a | ||
|
|
befdc3a002 | ||
|
|
3c7025a327 | ||
|
|
58a084426b | ||
|
|
d070415515 | ||
|
|
3fa7132534 | ||
|
|
feeeba5cee | ||
|
|
9e5d5e8990 | ||
|
|
c51f1cb6a2 | ||
|
|
786551d86a | ||
|
|
0e73365106 | ||
|
|
b6963a9c35 | ||
|
|
bc0b467f1a | ||
|
|
7cf798851c | ||
|
|
e475c1ea50 | ||
|
|
0840c166ab | ||
|
|
65a537a670 | ||
|
|
a7c99b016c | ||
|
|
6a8132546e | ||
|
|
94ce5edc61 | ||
|
|
889f8e1394 | ||
|
|
9d36198459 | ||
|
|
673635a585 | ||
|
|
53660a163c | ||
|
|
b5420a40ab | ||
|
|
962c5fb886 | ||
|
|
7d6dd9e9fd | ||
|
|
dc9b1f1efd | ||
|
|
3257c39fca | ||
|
|
8b43c6f9c5 | ||
|
|
8b5cac40e0 | ||
|
|
722b877ea5 | ||
|
|
a9477d7eb9 | ||
|
|
bb5573a8f4 | ||
|
|
81571a8fb7 | ||
|
|
57cd776c34 | ||
|
|
5c507cc0ec | ||
|
|
55c0953fde | ||
|
|
844b12d363 | ||
|
|
f40d91ff9e | ||
|
|
f5e894e06a | ||
|
|
8fe479f809 | ||
|
|
9b9c343e2d | ||
|
|
cb1ccbe945 | ||
|
|
5de6028136 | ||
|
|
e226a5e86b | ||
|
|
f0ecfbb403 | ||
|
|
985418b9af | ||
|
|
197c797264 | ||
|
|
16b131970b | ||
|
|
4541880d57 | ||
|
|
3e41e3d725 | ||
|
|
1bad0c538b | ||
|
|
61e6fb3126 | ||
|
|
f80171ad53 | ||
|
|
2b6552319c | ||
|
|
5ce6cb01ff | ||
|
|
69621a430d | ||
|
|
4f0b45dd9f | ||
|
|
bdf72662bf | ||
|
|
34c8c0db70 | ||
|
|
44e7bf1199 | ||
|
|
f4ae2188e0 | ||
|
|
20f659db89 | ||
|
|
0e04e82b88 | ||
|
|
f874449d36 | ||
|
|
397036640e | ||
|
|
60110350aa | ||
|
|
a57f0ab360 | ||
|
|
e0dd3c34b2 | ||
|
|
472b0d7086 | ||
|
|
0bd8217d9e | ||
|
|
fdb1ab4bd9 | ||
|
|
61b34c8b16 | ||
|
|
9f1f2910e4 | ||
|
|
6050a0a7d7 | ||
|
|
72f1686395 | ||
|
|
d284d36c24 | ||
|
|
6cc6b0c239 | ||
|
|
8e5330fb82 | ||
|
|
2d0a367f1a | ||
|
|
02b5f4d390 | ||
|
|
d1fead5050 | ||
|
|
9a831e8e34 | ||
|
|
5f92b0bbc1 | ||
|
|
19232a81ef | ||
|
|
d1278c252b | ||
|
|
273d9675bf | ||
|
|
b4620cfea6 | ||
|
|
2c8f824240 | ||
|
|
7c34f76695 | ||
|
|
72d7ecb2ed | ||
|
|
75e70b5477 | ||
|
|
4eca127781 | ||
|
|
d27ecaae5e | ||
|
|
f0898613a2 | ||
|
|
40a2933e25 | ||
|
|
a208ab36b8 | ||
|
|
680c665242 | ||
|
|
6b141c3ea0 | ||
|
|
a039168217 | ||
|
|
e4fe749251 | ||
|
|
ed5e6ec0f7 | ||
|
|
1aec431c36 | ||
|
|
cb87463a69 | ||
|
|
4b5c74e8d6 | ||
|
|
ab18e15a71 | ||
|
|
7ff5376d13 | ||
|
|
516c68224a | ||
|
|
7b93fbeba3 | ||
|
|
f958067139 | ||
|
|
4e606836a1 | ||
|
|
5da5ee3581 | ||
|
|
302ac2e644 | ||
|
|
baab56b6d8 | ||
|
|
79c4f13440 | ||
|
|
7b3db11b82 | ||
|
|
3ffca75915 | ||
|
|
f72dd3471e | ||
|
|
3f55103542 | ||
|
|
b39fe87eea | ||
|
|
bfc81e52b0 | ||
|
|
54f5d159a5 | ||
|
|
a2ed7c7117 | ||
|
|
161e87dbda | ||
|
|
4c7581df4f | ||
|
|
bfd1b21f9c | ||
|
|
84ee25e441 | ||
|
|
47683f2b8c | ||
|
|
81f1f48045 | ||
|
|
025c2c5306 | ||
|
|
fa39b708a9 | ||
|
|
f5fda5d8ea | ||
|
|
5774e534e5 | ||
|
|
e32301ade4 | ||
|
|
a2bf3ba7e7 | ||
|
|
62ba797cd0 | ||
|
|
82192fa180 | ||
|
|
7b20329743 | ||
|
|
a85303161c | ||
|
|
38544cc2d6 | ||
|
|
484a099ee3 | ||
|
|
832d7e5d6d | ||
|
|
c8c756df28 | ||
|
|
c3d19454f7 | ||
|
|
fcc6cad6d7 | ||
|
|
6c813186b8 | ||
|
|
a556339b76 | ||
|
|
d2b10def35 | ||
|
|
4421f470a4 | ||
|
|
235e91294e | ||
|
|
184a22c238 | ||
|
|
b598fc3fba | ||
|
|
dc7bd41eb9 | ||
|
|
fb754bc4e0 | ||
|
|
ab69ded396 | ||
|
|
b4dd827ce1 | ||
|
|
e1f0834af4 | ||
|
|
a36691e5ab | ||
|
|
26a165ab71 | ||
|
|
7ab89b1adb | ||
|
|
b1d111a089 | ||
|
|
9e8086908d | ||
|
|
cf6e48be9a | ||
|
|
1df1b55e24 |
@@ -22,9 +22,8 @@ next-env.d.ts
|
|||||||
*.log
|
*.log
|
||||||
.machinelogs*.json
|
.machinelogs*.json
|
||||||
*-audit.json
|
*-audit.json
|
||||||
package-lock.json
|
|
||||||
config/
|
|
||||||
install/
|
install/
|
||||||
bruno/
|
bruno/
|
||||||
LICENSE
|
LICENSE
|
||||||
CONTRIBUTING.md
|
CONTRIBUTING.md
|
||||||
|
dist
|
||||||
|
|||||||
3
.github/FUNDING.yml
vendored
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
# These are supported funding model platforms
|
||||||
|
|
||||||
|
github: [fosrl]
|
||||||
35
.github/dependabot.yml
vendored
Normal file
@@ -0,0 +1,35 @@
|
|||||||
|
version: 2
|
||||||
|
updates:
|
||||||
|
- package-ecosystem: "npm"
|
||||||
|
directory: "/"
|
||||||
|
schedule:
|
||||||
|
interval: "daily"
|
||||||
|
groups:
|
||||||
|
dev-patch-updates:
|
||||||
|
dependency-type: "development"
|
||||||
|
update-types:
|
||||||
|
- "patch"
|
||||||
|
dev-minor-updates:
|
||||||
|
dependency-type: "development"
|
||||||
|
update-types:
|
||||||
|
- "minor"
|
||||||
|
prod-patch-updates:
|
||||||
|
dependency-type: "production"
|
||||||
|
update-types:
|
||||||
|
- "patch"
|
||||||
|
prod-minor-updates:
|
||||||
|
dependency-type: "production"
|
||||||
|
update-types:
|
||||||
|
- "minor"
|
||||||
|
|
||||||
|
- package-ecosystem: "docker"
|
||||||
|
directory: "/"
|
||||||
|
schedule:
|
||||||
|
interval: "daily"
|
||||||
|
groups:
|
||||||
|
patch-updates:
|
||||||
|
update-types:
|
||||||
|
- "patch"
|
||||||
|
minor-updates:
|
||||||
|
update-types:
|
||||||
|
- "minor"
|
||||||
78
.github/workflows/cicd.yml
vendored
Normal file
@@ -0,0 +1,78 @@
|
|||||||
|
name: CI/CD Pipeline
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
tags:
|
||||||
|
- "*"
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
release:
|
||||||
|
name: Build and Release
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Checkout code
|
||||||
|
uses: actions/checkout@v3
|
||||||
|
|
||||||
|
- name: Set up Docker Buildx
|
||||||
|
uses: docker/setup-buildx-action@v2
|
||||||
|
|
||||||
|
- name: Log in to Docker Hub
|
||||||
|
uses: docker/login-action@v2
|
||||||
|
with:
|
||||||
|
username: ${{ secrets.DOCKER_HUB_USERNAME }}
|
||||||
|
password: ${{ secrets.DOCKER_HUB_ACCESS_TOKEN }}
|
||||||
|
|
||||||
|
- name: Extract tag name
|
||||||
|
id: get-tag
|
||||||
|
run: echo "TAG=${GITHUB_REF#refs/tags/}" >> $GITHUB_ENV
|
||||||
|
|
||||||
|
- name: Install Go
|
||||||
|
uses: actions/setup-go@v4
|
||||||
|
with:
|
||||||
|
go-version: 1.23.0
|
||||||
|
|
||||||
|
- name: Update version in package.json
|
||||||
|
run: |
|
||||||
|
TAG=${{ env.TAG }}
|
||||||
|
sed -i "s/export const APP_VERSION = \".*\";/export const APP_VERSION = \"$TAG\";/" server/lib/consts.ts
|
||||||
|
cat server/lib/consts.ts
|
||||||
|
|
||||||
|
- name: Pull latest Gerbil version
|
||||||
|
id: get-gerbil-tag
|
||||||
|
run: |
|
||||||
|
LATEST_TAG=$(curl -s https://api.github.com/repos/fosrl/gerbil/tags | jq -r '.[0].name')
|
||||||
|
echo "LATEST_GERBIL_TAG=$LATEST_TAG" >> $GITHUB_ENV
|
||||||
|
|
||||||
|
- name: Pull latest Badger version
|
||||||
|
id: get-badger-tag
|
||||||
|
run: |
|
||||||
|
LATEST_TAG=$(curl -s https://api.github.com/repos/fosrl/badger/tags | jq -r '.[0].name')
|
||||||
|
echo "LATEST_BADGER_TAG=$LATEST_TAG" >> $GITHUB_ENV
|
||||||
|
|
||||||
|
- name: Update install/main.go
|
||||||
|
run: |
|
||||||
|
PANGOLIN_VERSION=${{ env.TAG }}
|
||||||
|
GERBIL_VERSION=${{ env.LATEST_GERBIL_TAG }}
|
||||||
|
BADGER_VERSION=${{ env.LATEST_BADGER_TAG }}
|
||||||
|
sed -i "s/config.PangolinVersion = \".*\"/config.PangolinVersion = \"$PANGOLIN_VERSION\"/" install/main.go
|
||||||
|
sed -i "s/config.GerbilVersion = \".*\"/config.GerbilVersion = \"$GERBIL_VERSION\"/" install/main.go
|
||||||
|
sed -i "s/config.BadgerVersion = \".*\"/config.BadgerVersion = \"$BADGER_VERSION\"/" install/main.go
|
||||||
|
echo "Updated install/main.go with Pangolin version $PANGOLIN_VERSION, Gerbil version $GERBIL_VERSION, and Badger version $BADGER_VERSION"
|
||||||
|
cat install/main.go
|
||||||
|
|
||||||
|
- name: Build installer
|
||||||
|
working-directory: install
|
||||||
|
run: |
|
||||||
|
make go-build-release
|
||||||
|
|
||||||
|
- name: Upload artifacts from /install/bin
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
with:
|
||||||
|
name: install-bin
|
||||||
|
path: install/bin/
|
||||||
|
|
||||||
|
- name: Build and push Docker images
|
||||||
|
run: |
|
||||||
|
TAG=${{ env.TAG }}
|
||||||
|
make build-release tag=$TAG
|
||||||
34
.github/workflows/linting.yml
vendored
Normal file
@@ -0,0 +1,34 @@
|
|||||||
|
name: ESLint
|
||||||
|
|
||||||
|
on:
|
||||||
|
pull_request:
|
||||||
|
paths:
|
||||||
|
- '**/*.js'
|
||||||
|
- '**/*.jsx'
|
||||||
|
- '**/*.ts'
|
||||||
|
- '**/*.tsx'
|
||||||
|
- '.eslintrc*'
|
||||||
|
- 'package.json'
|
||||||
|
- 'yarn.lock'
|
||||||
|
- 'pnpm-lock.yaml'
|
||||||
|
- 'package-lock.json'
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
Linter:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- name: Checkout code
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Set up Node.js
|
||||||
|
uses: actions/setup-node@v4
|
||||||
|
with:
|
||||||
|
node-version: '20'
|
||||||
|
|
||||||
|
- name: Install dependencies
|
||||||
|
run: |
|
||||||
|
npm ci
|
||||||
|
|
||||||
|
- name: Run ESLint
|
||||||
|
run: |
|
||||||
|
npx eslint . --ext .js,.jsx,.ts,.tsx
|
||||||
37
.github/workflows/stale-bot.yml
vendored
Normal file
@@ -0,0 +1,37 @@
|
|||||||
|
name: Mark and Close Stale Issues
|
||||||
|
|
||||||
|
on:
|
||||||
|
schedule:
|
||||||
|
- cron: '0 0 * * *'
|
||||||
|
workflow_dispatch: # Allow manual trigger
|
||||||
|
|
||||||
|
permissions:
|
||||||
|
contents: write # only for delete-branch option
|
||||||
|
issues: write
|
||||||
|
pull-requests: write
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
stale:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/stale@v9
|
||||||
|
with:
|
||||||
|
days-before-stale: 14
|
||||||
|
days-before-close: 14
|
||||||
|
stale-issue-message: 'This issue has been automatically marked as stale due to 14 days of inactivity. It will be closed in 14 days if no further activity occurs.'
|
||||||
|
close-issue-message: 'This issue has been automatically closed due to inactivity. If you believe this is still relevant, please open a new issue with up-to-date information.'
|
||||||
|
stale-issue-label: 'stale'
|
||||||
|
|
||||||
|
exempt-issue-labels: 'needs investigating, networking, new feature, reverse proxy, bug, api, authentication, documentation, enhancement, help wanted, good first issue, question'
|
||||||
|
|
||||||
|
exempt-all-issue-assignees: true
|
||||||
|
|
||||||
|
only-labels: ''
|
||||||
|
exempt-pr-labels: ''
|
||||||
|
days-before-pr-stale: -1
|
||||||
|
days-before-pr-close: -1
|
||||||
|
|
||||||
|
operations-per-run: 100
|
||||||
|
remove-stale-when-updated: true
|
||||||
|
delete-branch: false
|
||||||
|
enable-statistics: true
|
||||||
49
.github/workflows/test.yml
vendored
Normal file
@@ -0,0 +1,49 @@
|
|||||||
|
name: Run Tests
|
||||||
|
|
||||||
|
on:
|
||||||
|
pull_request:
|
||||||
|
branches:
|
||||||
|
- main
|
||||||
|
- dev
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
test:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- uses: actions/setup-node@v4
|
||||||
|
with:
|
||||||
|
node-version: '20'
|
||||||
|
|
||||||
|
- name: Copy config file
|
||||||
|
run: cp config/config.example.yml config/config.yml
|
||||||
|
|
||||||
|
- name: Install dependencies
|
||||||
|
run: npm ci
|
||||||
|
|
||||||
|
- name: Generate database migrations
|
||||||
|
run: npm run db:sqlite:generate
|
||||||
|
|
||||||
|
- name: Apply database migrations
|
||||||
|
run: npm run db:sqlite:push
|
||||||
|
|
||||||
|
- name: Start app in background
|
||||||
|
run: nohup npm run dev &
|
||||||
|
|
||||||
|
- name: Wait for app availability
|
||||||
|
run: |
|
||||||
|
for i in {1..5}; do
|
||||||
|
if curl --silent --fail http://localhost:3002/auth/login; then
|
||||||
|
echo "App is up"
|
||||||
|
exit 0
|
||||||
|
fi
|
||||||
|
echo "Waiting for the app... attempt $i"
|
||||||
|
sleep 5
|
||||||
|
done
|
||||||
|
echo "App failed to start"
|
||||||
|
exit 1
|
||||||
|
|
||||||
|
- name: Build Docker image
|
||||||
|
run: make build
|
||||||
9
.gitignore
vendored
@@ -23,9 +23,14 @@ next-env.d.ts
|
|||||||
.machinelogs*.json
|
.machinelogs*.json
|
||||||
*-audit.json
|
*-audit.json
|
||||||
migrations
|
migrations
|
||||||
package-lock.json
|
|
||||||
tsconfig.tsbuildinfo
|
tsconfig.tsbuildinfo
|
||||||
config/
|
config/config.yml
|
||||||
dist
|
dist
|
||||||
.dist
|
.dist
|
||||||
installer
|
installer
|
||||||
|
*.tar
|
||||||
|
bin
|
||||||
|
.secrets
|
||||||
|
test_event.json
|
||||||
|
.idea/
|
||||||
|
server/db/index.ts
|
||||||
|
|||||||
@@ -1,6 +1,12 @@
|
|||||||
## Contributing
|
## Contributing
|
||||||
|
|
||||||
Contributions are welcome! Please see the following page in our documentation with future plans and feature ideas if you are looking for a place to start.
|
Contributions are welcome!
|
||||||
|
|
||||||
|
Please see the contribution and local development guide on the docs page before getting started:
|
||||||
|
|
||||||
|
https://docs.fossorial.io/development
|
||||||
|
|
||||||
|
For ideas about what features to work on and our future plans, please see the roadmap:
|
||||||
|
|
||||||
https://docs.fossorial.io/roadmap
|
https://docs.fossorial.io/roadmap
|
||||||
|
|
||||||
@@ -15,4 +21,4 @@ By creating this pull request, I grant the project maintainers an unlimited,
|
|||||||
perpetual license to use, modify, and redistribute these contributions under any terms they
|
perpetual license to use, modify, and redistribute these contributions under any terms they
|
||||||
choose, including both the AGPLv3 and the Fossorial Commercial license terms. I
|
choose, including both the AGPLv3 and the Fossorial Commercial license terms. I
|
||||||
represent that I have the right to grant this license for all contributed content.
|
represent that I have the right to grant this license for all contributed content.
|
||||||
```
|
```
|
||||||
|
|||||||
31
Dockerfile
@@ -2,33 +2,40 @@ FROM node:20-alpine AS builder
|
|||||||
|
|
||||||
WORKDIR /app
|
WORKDIR /app
|
||||||
|
|
||||||
COPY package.json ./
|
# COPY package.json package-lock.json ./
|
||||||
|
COPY package*.json ./
|
||||||
RUN npm install
|
RUN npm ci
|
||||||
|
|
||||||
COPY . .
|
COPY . .
|
||||||
|
|
||||||
RUN npx drizzle-kit generate --dialect sqlite --schema ./server/db/schema.ts --out init
|
RUN echo 'export * from "./sqlite";' > server/db/index.ts
|
||||||
|
|
||||||
RUN npm run build
|
RUN npx drizzle-kit generate --dialect sqlite --schema ./server/db/sqlite/schema.ts --out init
|
||||||
|
|
||||||
|
RUN npm run build:sqlite
|
||||||
|
RUN npm run build:cli
|
||||||
|
|
||||||
FROM node:20-alpine AS runner
|
FROM node:20-alpine AS runner
|
||||||
|
|
||||||
RUN apk add --no-cache curl
|
|
||||||
|
|
||||||
WORKDIR /app
|
WORKDIR /app
|
||||||
|
|
||||||
COPY package.json ./
|
# Curl used for the health checks
|
||||||
|
RUN apk add --no-cache curl
|
||||||
|
|
||||||
RUN npm install --omit=dev
|
# COPY package.json package-lock.json ./
|
||||||
|
COPY package*.json ./
|
||||||
|
RUN npm ci --omit=dev && npm cache clean --force
|
||||||
|
|
||||||
COPY --from=builder /app/.next ./.next
|
COPY --from=builder /app/.next/standalone ./
|
||||||
|
COPY --from=builder /app/.next/static ./.next/static
|
||||||
COPY --from=builder /app/dist ./dist
|
COPY --from=builder /app/dist ./dist
|
||||||
COPY --from=builder /app/init ./dist/init
|
COPY --from=builder /app/init ./dist/init
|
||||||
|
|
||||||
COPY config.example.yml ./dist/config.example.yml
|
COPY ./cli/wrapper.sh /usr/local/bin/pangctl
|
||||||
|
RUN chmod +x /usr/local/bin/pangctl ./dist/cli.mjs
|
||||||
|
|
||||||
COPY server/db/names.json ./dist/names.json
|
COPY server/db/names.json ./dist/names.json
|
||||||
|
|
||||||
COPY public ./public
|
COPY public ./public
|
||||||
|
|
||||||
CMD ["npm", "start"]
|
CMD ["npm", "run", "start:sqlite"]
|
||||||
|
|||||||
41
Dockerfile.pg
Normal file
@@ -0,0 +1,41 @@
|
|||||||
|
FROM node:20-alpine AS builder
|
||||||
|
|
||||||
|
WORKDIR /app
|
||||||
|
|
||||||
|
# COPY package.json package-lock.json ./
|
||||||
|
COPY package*.json ./
|
||||||
|
RUN npm ci
|
||||||
|
|
||||||
|
COPY . .
|
||||||
|
|
||||||
|
RUN echo 'export * from "./pg";' > server/db/index.ts
|
||||||
|
|
||||||
|
RUN npx drizzle-kit generate --dialect postgresql --schema ./server/db/pg/schema.ts --out init
|
||||||
|
|
||||||
|
RUN npm run build:pg
|
||||||
|
RUN npm run build:cli
|
||||||
|
|
||||||
|
FROM node:20-alpine AS runner
|
||||||
|
|
||||||
|
WORKDIR /app
|
||||||
|
|
||||||
|
# Curl used for the health checks
|
||||||
|
RUN apk add --no-cache curl
|
||||||
|
|
||||||
|
# COPY package.json package-lock.json ./
|
||||||
|
COPY package*.json ./
|
||||||
|
RUN npm ci --omit=dev && npm cache clean --force
|
||||||
|
|
||||||
|
COPY --from=builder /app/.next/standalone ./
|
||||||
|
COPY --from=builder /app/.next/static ./.next/static
|
||||||
|
COPY --from=builder /app/dist ./dist
|
||||||
|
COPY --from=builder /app/init ./dist/init
|
||||||
|
|
||||||
|
COPY ./cli/wrapper.sh /usr/local/bin/pangctl
|
||||||
|
RUN chmod +x /usr/local/bin/pangctl ./dist/cli.mjs
|
||||||
|
|
||||||
|
COPY server/db/names.json ./dist/names.json
|
||||||
|
|
||||||
|
COPY public ./public
|
||||||
|
|
||||||
|
CMD ["npm", "run", "start:pg"]
|
||||||
16
Makefile
@@ -1,18 +1,24 @@
|
|||||||
|
.PHONY: build build-release build-arm build-x86 test clean
|
||||||
|
|
||||||
all: build push
|
build-release:
|
||||||
|
@if [ -z "$(tag)" ]; then \
|
||||||
|
echo "Error: tag is required. Usage: make build-release tag=<tag>"; \
|
||||||
|
exit 1; \
|
||||||
|
fi
|
||||||
|
docker buildx build --platform linux/arm64,linux/amd64 -t fosrl/pangolin:latest -f Dockerfile --push .
|
||||||
|
docker buildx build --platform linux/arm64,linux/amd64 -t fosrl/pangolin:$(tag) -f Dockerfile --push .
|
||||||
|
docker buildx build --platform linux/arm64,linux/amd64 -t fosrl/pangolin:postgresql-latest -f Dockerfile.pg --push .
|
||||||
|
docker buildx build --platform linux/arm64,linux/amd64 -t fosrl/pangolin:postgresql-$(tag) -f Dockerfile.pg --push .
|
||||||
|
|
||||||
build-arm:
|
build-arm:
|
||||||
docker buildx build --platform linux/arm64 -t fosrl/pangolin:latest .
|
docker buildx build --platform linux/arm64 -t fosrl/pangolin:latest .
|
||||||
|
|
||||||
build-x86:
|
build-x86:
|
||||||
docker buildx build --platform linux/amd64 -t fosrl/pangolin:latest .
|
docker buildx build --platform linux/amd64 -t fosrl/pangolin:latest .
|
||||||
|
|
||||||
build:
|
build:
|
||||||
docker build -t fosrl/pangolin:latest .
|
docker build -t fosrl/pangolin:latest .
|
||||||
|
|
||||||
push:
|
|
||||||
docker push fosrl/pangolin:latest
|
|
||||||
|
|
||||||
test:
|
test:
|
||||||
docker run -it -p 3000:3000 -p 3001:3001 -p 3002:3002 -v ./config:/app/config fosrl/pangolin:latest
|
docker run -it -p 3000:3000 -p 3001:3001 -p 3002:3002 -v ./config:/app/config fosrl/pangolin:latest
|
||||||
|
|
||||||
|
|||||||
179
README.md
@@ -1,126 +1,153 @@
|
|||||||
# Pangolin
|
<div align="center">
|
||||||
|
<h2>
|
||||||
|
<picture>
|
||||||
|
<source media="(prefers-color-scheme: dark)" srcset="public/logo/word_mark_white.png">
|
||||||
|
<img alt="Pangolin Logo" src="public/logo/word_mark_black.png" width="250">
|
||||||
|
</picture>
|
||||||
|
</h2>
|
||||||
|
</div>
|
||||||
|
|
||||||
Pangolin is a self-hosted tunneled reverse proxy management server with identity and access management, designed to securely expose private resources through use with the Traefik reverse proxy and WireGuard tunnel clients like Newt. With Pangolin, you retain full control over your infrastructure while providing a user-friendly and feature-rich solution for managing proxies, authentication, and access, and simplifying complex network setups, all with a clean and simple UI.
|
<h4 align="center">Tunneled Reverse Proxy Server with Access Control</h4>
|
||||||
|
<div align="center">
|
||||||
|
|
||||||
### Installation and Documentation
|
_Your own self-hosted zero trust tunnel._
|
||||||
|
|
||||||
- [Installation Instructions](https://docs.fossorial.io/Getting%20Started/quick-install)
|
</div>
|
||||||
- [Full Documentation](https://docs.fossorial.io)
|
|
||||||
|
|
||||||
## Preview
|
<div align="center">
|
||||||
|
<h5>
|
||||||
|
<a href="https://fossorial.io">
|
||||||
|
Website
|
||||||
|
</a>
|
||||||
|
<span> | </span>
|
||||||
|
<a href="https://docs.fossorial.io/Getting%20Started/quick-install">
|
||||||
|
Install Guide
|
||||||
|
</a>
|
||||||
|
<span> | </span>
|
||||||
|
<a href="mailto:numbat@fossorial.io">
|
||||||
|
Contact Us
|
||||||
|
</a>
|
||||||
|
</h5>
|
||||||
|
|
||||||
<img src="public/screenshots/sites.png" alt="Preview"/>
|
[](https://hub.docker.com/r/fosrl/pangolin)
|
||||||
|

|
||||||
|
[](https://discord.gg/HCJR8Xhme4)
|
||||||
|
[](https://www.youtube.com/@fossorial-app)
|
||||||
|
|
||||||
_Sites page of Pangolin dashboard (dark mode) showing multiple tunnels connected to the central server._
|
</div>
|
||||||
|
|
||||||
|
Pangolin is a self-hosted tunneled reverse proxy server with identity and access control, designed to securely expose private resources on distributed networks. Acting as a central hub, it connects isolated networks — even those behind restrictive firewalls — through encrypted tunnels, enabling easy access to remote services without opening ports.
|
||||||
|
|
||||||
|
<img src="public/screenshots/hero.png" alt="Preview"/>
|
||||||
|
|
||||||
|
_Resources page of Pangolin dashboard (dark mode) showing multiple resources available to connect._
|
||||||
|
|
||||||
## Key Features
|
## Key Features
|
||||||
|
|
||||||
### Reverse Proxy Through WireGuard Tunnel
|
### Reverse Proxy Through WireGuard Tunnel
|
||||||
|
|
||||||
- Expose private resources on your network **without opening ports**.
|
- Expose private resources on your network **without opening ports** (firewall punching).
|
||||||
- Secure and easy to configure site-to-site connectivity via a custom **user space WireGuard client**, [Newt](https://github.com/fosrl/newt).
|
- Secure and easy to configure site-to-site connectivity via a custom **user space WireGuard client**, [Newt](https://github.com/fosrl/newt).
|
||||||
- Built-in support for any WireGuard client.
|
- Built-in support for any WireGuard client.
|
||||||
- Automated **SSL certificates** (https) via [LetsEncrypt](https://letsencrypt.org/).
|
- Automated **SSL certificates** (https) via [LetsEncrypt](https://letsencrypt.org/).
|
||||||
|
- Support for HTTP/HTTPS and **raw TCP/UDP services**.
|
||||||
|
- Load balancing.
|
||||||
|
|
||||||
### Identity & Access Management
|
### Identity & Access Management
|
||||||
|
|
||||||
- Centralized authentication system using platform SSO. **Users will only have to manage one login.**
|
- Centralized authentication system using platform SSO. **Users will only have to manage one login.**
|
||||||
- Totp with backup codes for two-factor authentication.
|
- **Define access control rules for IPs, IP ranges, and URL paths per resource.**
|
||||||
- Create organizations, each with multiple sites, users, and roles.
|
- TOTP with backup codes for two-factor authentication.
|
||||||
- **Role-based access control** to manage resource access permissions.
|
- Create organizations, each with multiple sites, users, and roles.
|
||||||
- Additional authentication options include:
|
- **Role-based access control** to manage resource access permissions.
|
||||||
- Email whitelisting with **one-time passcodes.**
|
- Additional authentication options include:
|
||||||
- **Temporary, self-destructing share links.**
|
- Email whitelisting with **one-time passcodes.**
|
||||||
- Resource specific pin codes.
|
- **Temporary, self-destructing share links.**
|
||||||
- Resource specific passwords.
|
- Resource specific pin codes.
|
||||||
|
- Resource specific passwords.
|
||||||
|
- External identity provider (IdP) support with OAuth2/OIDC, such as Authentik, Keycloak, Okta, and others.
|
||||||
|
- Auto-provision users and roles from your IdP.
|
||||||
|
|
||||||
### Simple Dashboard UI
|
### Simple Dashboard UI
|
||||||
|
|
||||||
- Manage sites, users, and roles with a clean and intuitive UI.
|
- Manage sites, users, and roles with a clean and intuitive UI.
|
||||||
- Monitor site usage and connectivity.
|
- Monitor site usage and connectivity.
|
||||||
- Light and dark mode options.
|
- Light and dark mode options.
|
||||||
- Mobile friendly.
|
- Mobile friendly.
|
||||||
|
|
||||||
### Easy Deployment
|
### Easy Deployment
|
||||||
|
|
||||||
- Docker Compose based setup for simplified deployment.
|
- Run on any cloud provider or on-premises.
|
||||||
- Future-proof installation script for streamlined setup and feature additions.
|
- **Docker Compose based setup** for simplified deployment.
|
||||||
- Run on any VPS.
|
- Future-proof installation script for streamlined setup and feature additions.
|
||||||
- Use your preferred WireGuard client to connect, or use Newt, our custom user space client for the best experience.
|
- Use any WireGuard client to connect, or use **Newt, our custom user space client** for the best experience.
|
||||||
|
- Use the API to create custom integrations and scripts.
|
||||||
|
- Fine-grained access control to the API via scoped API keys.
|
||||||
|
- Comprehensive Swagger documentation for the API.
|
||||||
|
|
||||||
### Modular Design
|
### Modular Design
|
||||||
|
|
||||||
- Extend functionality with existing [Traefik](https://github.com/traefik/traefik) plugins, such as [Fail2Ban](https://plugins.traefik.io/plugins/628c9ebcffc0cd18356a979f/fail2-ban) or [CrowdSec](https://plugins.traefik.io/plugins/6335346ca4caa9ddeffda116/crowdsec-bouncer-traefik-plugin), which integrate seamlessly.
|
- Extend functionality with existing [Traefik](https://github.com/traefik/traefik) plugins, such as [CrowdSec](https://plugins.traefik.io/plugins/6335346ca4caa9ddeffda116/crowdsec-bouncer-traefik-plugin) and [Geoblock](https://github.com/PascalMinder/geoblock).
|
||||||
- Attach as many sites to the central server as you wish.
|
- **Automatically install and configure Crowdsec via Pangolin's installer script.**
|
||||||
|
- Attach as many sites to the central server as you wish.
|
||||||
|
|
||||||
## Screenshots
|
<img src="public/screenshots/collage.png" alt="Collage"/>
|
||||||
|
|
||||||
Pangolin has a straightforward and simple dashboard UI:
|
## Deployment and Usage Example
|
||||||
|
|
||||||
<div align="center">
|
|
||||||
<table>
|
|
||||||
<tr>
|
|
||||||
<td align="center"><img src="public/screenshots/sites.png" alt="Sites Example" width="200"/></td>
|
|
||||||
<td align="center"><img src="public/screenshots/users.png" alt="Users Example" width="200"/></td>
|
|
||||||
<td align="center"><img src="public/screenshots/share-link.png" alt="Share Link Example" width="200"/></td>
|
|
||||||
</tr>
|
|
||||||
<tr>
|
|
||||||
<td align="center"><b>Sites</b></td>
|
|
||||||
<td align="center"><b>Users</b></td>
|
|
||||||
<td align="center"><b>Share Link</b></td>
|
|
||||||
</tr>
|
|
||||||
<tr>
|
|
||||||
<td align="center"><img src="public/screenshots/auth.png" alt="Authentication Example" width="200"/></td>
|
|
||||||
<td align="center"><img src="public/screenshots/connectivity.png" alt="Connectivity Example" width="200"/></td>
|
|
||||||
<td align="center"></td>
|
|
||||||
</tr>
|
|
||||||
<tr>
|
|
||||||
<td align="center"><b>Authentication</b></td>
|
|
||||||
<td align="center"><b>Connectivity</b></td>
|
|
||||||
<td align="center"><b></b></td>
|
|
||||||
</tr>
|
|
||||||
</table>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
## Workflow Example
|
|
||||||
|
|
||||||
### Deployment and Usage Example
|
|
||||||
|
|
||||||
1. **Deploy the Central Server**:
|
1. **Deploy the Central Server**:
|
||||||
|
|
||||||
- Deploy the Docker Compose stack containing Pangolin, Gerbil, and Traefik onto a VPS hosted on a cloud platform like Amazon EC2, DigitalOcean Droplet, or similar. There are many cheap VPS hosting options available to suit your needs.
|
- Deploy the Docker Compose stack onto a VPS hosted on a cloud platform like RackNerd, Amazon EC2, DigitalOcean Droplet, or similar. There are many cheap VPS hosting options available to suit your needs.
|
||||||
|
|
||||||
2. **Domain Configuration**:
|
> [!TIP]
|
||||||
|
> Many of our users have had a great experience with [RackNerd](https://my.racknerd.com/aff.php?aff=13788). Depending on promotions, you can get a [**VPS with 1 vCPU, 1GB RAM, and ~20GB SSD for just around $12/year**](https://my.racknerd.com/aff.php?aff=13788&pid=912). That's a great deal!
|
||||||
|
> We are part of the [RackNerd](https://my.racknerd.com/aff.php?aff=13788) affiliate program, so if you purchase through [our link](https://my.racknerd.com/aff.php?aff=13788), we receive a small commission which helps us maintain the project and keep it free for everyone.
|
||||||
|
|
||||||
|
1. **Domain Configuration**:
|
||||||
|
|
||||||
- Point your domain name to the VPS and configure Pangolin with your preferred settings.
|
- Point your domain name to the VPS and configure Pangolin with your preferred settings.
|
||||||
|
|
||||||
3. **Connect Private Sites**:
|
2. **Connect Private Sites**:
|
||||||
|
|
||||||
- Install Newt or use another WireGuard client on private sites.
|
- Install Newt or use another WireGuard client on private sites.
|
||||||
- Automaticlaly establish a connection from these sites to the central server.
|
- Automatically establish a connection from these sites to the central server.
|
||||||
4. **Configure Users & Roles**
|
|
||||||
- Define organizations and invite users.
|
3. **Expose Resources**:
|
||||||
- Implement user- or role-based permissions to control resource access.
|
|
||||||
|
- Add resources to the central server and configure access control rules.
|
||||||
|
- Access these resources securely from anywhere.
|
||||||
|
|
||||||
**Use Case Example - Bypassing Port Restrictions in Home Lab**:
|
**Use Case Example - Bypassing Port Restrictions in Home Lab**:
|
||||||
Imagine private sites where the ISP restricts port forwarding. By connecting these sites to Pangolin via WireGuard, you can securely expose HTTP and HTTPS resources on the private network without any networking complexity.
|
Imagine private sites where the ISP restricts port forwarding. By connecting these sites to Pangolin via WireGuard, you can securely expose HTTP and HTTPS resources on the private network without any networking complexity.
|
||||||
|
|
||||||
|
**Use Case Example - Deploying Services For Your Business**:
|
||||||
|
You can use Pangolin as an easy way to expose your business applications to your users behind a safe authentication portal you can integrate into your IdP solution. Expose resources on prem and on the cloud.
|
||||||
|
|
||||||
**Use Case Example - IoT Networks**:
|
**Use Case Example - IoT Networks**:
|
||||||
IoT networks are often fragmented and difficult to manage. By deploying Pangolin on a central server, you can connect all your IoT sites via Newt or another WireGuard client. This creates a simple, secure, and centralized way to access IoT resources without the need for intricate networking setups.
|
IoT networks are often fragmented and difficult to manage. By deploying Pangolin on a central server, you can connect all your IoT sites via Newt or another WireGuard client. This creates a simple, secure, and centralized way to access IoT resources without the need for intricate networking setups.
|
||||||
|
|
||||||
## Similar Projects and Inspirations
|
## Similar Projects and Inspirations
|
||||||
|
|
||||||
Pangolin was inspired by several existing projects and concepts:
|
**Cloudflare Tunnels**:
|
||||||
|
A similar approach to proxying private resources securely, but Pangolin is a self-hosted alternative, giving you full control over your infrastructure.
|
||||||
|
|
||||||
- **Cloudflare Tunnels**:
|
**Authelia**:
|
||||||
A similar approach to proxying private resources securely, but Pangolin is a self-hosted alternative, giving you full control over your infrastructure.
|
This inspired Pangolin’s centralized authentication system for proxies, enabling robust user and role management.
|
||||||
|
|
||||||
- **Authentik and Authelia**:
|
## Project Development / Roadmap
|
||||||
These projects inspired Pangolin’s centralized authentication system for proxies, enabling robust user and role management.
|
|
||||||
|
> [!NOTE]
|
||||||
|
> Pangolin is under heavy development. The roadmap is subject to change as we fix bugs, add new features, and make improvements.
|
||||||
|
|
||||||
|
View the [project board](https://github.com/orgs/fosrl/projects/1) for more detailed info.
|
||||||
|
|
||||||
## Licensing
|
## Licensing
|
||||||
|
|
||||||
Pangolin is dual licensed under the AGPLv3 and the Fossorial Commercial license. For inquiries about commercial licensing, please contact us.
|
Pangolin is dual licensed under the AGPL-3 and the Fossorial Commercial license. Please see the [LICENSE](./LICENSE) file in the repository for details. For inquiries about commercial licensing, please contact us at [numbat@fossorial.io](mailto:numbat@fossorial.io).
|
||||||
|
|
||||||
## Contributions
|
## Contributions
|
||||||
|
|
||||||
Please see [CONTRIBUTIONS](./CONTRIBUTING.md) in the repository for guidelines and best practices.
|
Please see [CONTRIBUTING](./CONTRIBUTING.md) in the repository for guidelines and best practices.
|
||||||
|
|
||||||
|
Please post bug reports and other functional issues in the [Issues](https://github.com/fosrl/pangolin/issues) section of the repository.
|
||||||
|
For all feature requests, or other ideas, please use the [Discussions](https://github.com/orgs/fosrl/discussions) section.
|
||||||
|
|||||||
14
SECURITY.md
Normal file
@@ -0,0 +1,14 @@
|
|||||||
|
# Security Policy
|
||||||
|
|
||||||
|
If you discover a security vulnerability, please follow the steps below to responsibly disclose it to us:
|
||||||
|
|
||||||
|
1. **Do not create a public GitHub issue or discussion post.** This could put the security of other users at risk.
|
||||||
|
2. Send a detailed report to [security@fossorial.io](mailto:security@fossorial.io) or send a **private** message to a maintainer on [Discord](https://discord.gg/HCJR8Xhme4). Include:
|
||||||
|
|
||||||
|
- Description and location of the vulnerability.
|
||||||
|
- Potential impact of the vulnerability.
|
||||||
|
- Steps to reproduce the vulnerability.
|
||||||
|
- Potential solutions to fix the vulnerability.
|
||||||
|
- Your name/handle and a link for recognition (optional).
|
||||||
|
|
||||||
|
We aim to address the issue as soon as possible.
|
||||||
@@ -12,7 +12,7 @@ post {
|
|||||||
|
|
||||||
body:json {
|
body:json {
|
||||||
{
|
{
|
||||||
"email": "owen@fossorial.io",
|
"email": "admin@fosrl.io",
|
||||||
"password": "Password123!"
|
"password": "Password123!"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
11
bruno/Users/adminListUsers.bru
Normal file
@@ -0,0 +1,11 @@
|
|||||||
|
meta {
|
||||||
|
name: adminListUsers
|
||||||
|
type: http
|
||||||
|
seq: 2
|
||||||
|
}
|
||||||
|
|
||||||
|
get {
|
||||||
|
url: http://localhost:3000/api/v1/users
|
||||||
|
body: none
|
||||||
|
auth: none
|
||||||
|
}
|
||||||
11
bruno/Users/adminRemoveUser.bru
Normal file
@@ -0,0 +1,11 @@
|
|||||||
|
meta {
|
||||||
|
name: adminRemoveUser
|
||||||
|
type: http
|
||||||
|
seq: 3
|
||||||
|
}
|
||||||
|
|
||||||
|
delete {
|
||||||
|
url: http://localhost:3000/api/v1/user/ky5r7ivqs8wc7u4
|
||||||
|
body: none
|
||||||
|
auth: none
|
||||||
|
}
|
||||||
141
cli/commands/setAdminCredentials.ts
Normal file
@@ -0,0 +1,141 @@
|
|||||||
|
import { CommandModule } from "yargs";
|
||||||
|
import { hashPassword, verifyPassword } from "@server/auth/password";
|
||||||
|
import { db, resourceSessions, sessions } from "@server/db";
|
||||||
|
import { users } from "@server/db";
|
||||||
|
import { eq, inArray } from "drizzle-orm";
|
||||||
|
import moment from "moment";
|
||||||
|
import { fromError } from "zod-validation-error";
|
||||||
|
import { passwordSchema } from "@server/auth/passwordSchema";
|
||||||
|
import { UserType } from "@server/types/UserTypes";
|
||||||
|
import { generateRandomString, RandomReader } from "@oslojs/crypto/random";
|
||||||
|
|
||||||
|
type SetAdminCredentialsArgs = {
|
||||||
|
email: string;
|
||||||
|
password: string;
|
||||||
|
};
|
||||||
|
|
||||||
|
export const setAdminCredentials: CommandModule<{}, SetAdminCredentialsArgs> = {
|
||||||
|
command: "set-admin-credentials",
|
||||||
|
describe: "Set the server admin credentials",
|
||||||
|
builder: (yargs) => {
|
||||||
|
return yargs
|
||||||
|
.option("email", {
|
||||||
|
type: "string",
|
||||||
|
demandOption: true,
|
||||||
|
describe: "Admin email address"
|
||||||
|
})
|
||||||
|
.option("password", {
|
||||||
|
type: "string",
|
||||||
|
demandOption: true,
|
||||||
|
describe: "Admin password"
|
||||||
|
});
|
||||||
|
},
|
||||||
|
handler: async (argv: { email: string; password: string }) => {
|
||||||
|
try {
|
||||||
|
const { email, password } = argv;
|
||||||
|
|
||||||
|
const parsed = passwordSchema.safeParse(password);
|
||||||
|
|
||||||
|
if (!parsed.success) {
|
||||||
|
throw Error(
|
||||||
|
`Invalid server admin password: ${fromError(parsed.error).toString()}`
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
const passwordHash = await hashPassword(password);
|
||||||
|
|
||||||
|
await db.transaction(async (trx) => {
|
||||||
|
try {
|
||||||
|
const [existing] = await trx
|
||||||
|
.select()
|
||||||
|
.from(users)
|
||||||
|
.where(eq(users.serverAdmin, true));
|
||||||
|
|
||||||
|
if (existing) {
|
||||||
|
const passwordChanged = !(await verifyPassword(
|
||||||
|
password,
|
||||||
|
existing.passwordHash!
|
||||||
|
));
|
||||||
|
|
||||||
|
if (passwordChanged) {
|
||||||
|
await trx
|
||||||
|
.update(users)
|
||||||
|
.set({ passwordHash })
|
||||||
|
.where(eq(users.userId, existing.userId));
|
||||||
|
|
||||||
|
await invalidateAllSessions(existing.userId);
|
||||||
|
console.log("Server admin password updated");
|
||||||
|
}
|
||||||
|
|
||||||
|
if (existing.email !== email) {
|
||||||
|
await trx
|
||||||
|
.update(users)
|
||||||
|
.set({ email, username: email })
|
||||||
|
.where(eq(users.userId, existing.userId));
|
||||||
|
|
||||||
|
console.log("Server admin email updated");
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
const userId = generateId(15);
|
||||||
|
|
||||||
|
await trx.update(users).set({ serverAdmin: false });
|
||||||
|
|
||||||
|
await db.insert(users).values({
|
||||||
|
userId: userId,
|
||||||
|
email: email,
|
||||||
|
type: UserType.Internal,
|
||||||
|
username: email,
|
||||||
|
passwordHash,
|
||||||
|
dateCreated: moment().toISOString(),
|
||||||
|
serverAdmin: true,
|
||||||
|
emailVerified: true
|
||||||
|
});
|
||||||
|
|
||||||
|
console.log("Server admin created");
|
||||||
|
}
|
||||||
|
} catch (e) {
|
||||||
|
console.error("Failed to set admin credentials", e);
|
||||||
|
trx.rollback();
|
||||||
|
throw e;
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
console.log("Admin credentials updated successfully");
|
||||||
|
process.exit(0);
|
||||||
|
} catch (error) {
|
||||||
|
console.error("Error:", error);
|
||||||
|
process.exit(1);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
export async function invalidateAllSessions(userId: string): Promise<void> {
|
||||||
|
try {
|
||||||
|
await db.transaction(async (trx) => {
|
||||||
|
const userSessions = await trx
|
||||||
|
.select()
|
||||||
|
.from(sessions)
|
||||||
|
.where(eq(sessions.userId, userId));
|
||||||
|
await trx.delete(resourceSessions).where(
|
||||||
|
inArray(
|
||||||
|
resourceSessions.userSessionId,
|
||||||
|
userSessions.map((s) => s.sessionId)
|
||||||
|
)
|
||||||
|
);
|
||||||
|
await trx.delete(sessions).where(eq(sessions.userId, userId));
|
||||||
|
});
|
||||||
|
} catch (e) {
|
||||||
|
console.log("Failed to all invalidate user sessions", e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const random: RandomReader = {
|
||||||
|
read(bytes: Uint8Array): void {
|
||||||
|
crypto.getRandomValues(bytes);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
export function generateId(length: number): string {
|
||||||
|
const alphabet = "abcdefghijklmnopqrstuvwxyz0123456789";
|
||||||
|
return generateRandomString(random, alphabet, length);
|
||||||
|
}
|
||||||
11
cli/index.ts
Normal file
@@ -0,0 +1,11 @@
|
|||||||
|
#!/usr/bin/env node
|
||||||
|
|
||||||
|
import yargs from "yargs";
|
||||||
|
import { hideBin } from "yargs/helpers";
|
||||||
|
import { setAdminCredentials } from "@cli/commands/setAdminCredentials";
|
||||||
|
|
||||||
|
yargs(hideBin(process.argv))
|
||||||
|
.scriptName("pangctl")
|
||||||
|
.command(setAdminCredentials)
|
||||||
|
.demandCommand()
|
||||||
|
.help().argv;
|
||||||
3
cli/wrapper.sh
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
#!/bin/sh
|
||||||
|
cd /app/
|
||||||
|
./dist/cli.mjs "$@"
|
||||||
@@ -1,49 +0,0 @@
|
|||||||
app:
|
|
||||||
base_url: https://proxy.example.com
|
|
||||||
log_level: info
|
|
||||||
save_logs: false
|
|
||||||
|
|
||||||
server:
|
|
||||||
external_port: 3000
|
|
||||||
internal_port: 3001
|
|
||||||
next_port: 3002
|
|
||||||
internal_hostname: pangolin
|
|
||||||
secure_cookies: false
|
|
||||||
session_cookie_name: p_session
|
|
||||||
resource_session_cookie_name: p_resource_session
|
|
||||||
|
|
||||||
traefik:
|
|
||||||
cert_resolver: letsencrypt
|
|
||||||
http_entrypoint: web
|
|
||||||
https_entrypoint: websecure
|
|
||||||
prefer_wildcard_cert: true
|
|
||||||
|
|
||||||
gerbil:
|
|
||||||
start_port: 51820
|
|
||||||
base_endpoint: proxy.example.com
|
|
||||||
use_subdomain: false
|
|
||||||
block_size: 16
|
|
||||||
subnet_group: 10.0.0.0/8
|
|
||||||
|
|
||||||
rate_limits:
|
|
||||||
global:
|
|
||||||
window_minutes: 1
|
|
||||||
max_requests: 100
|
|
||||||
|
|
||||||
email:
|
|
||||||
smtp_host: host.hoster.net
|
|
||||||
smtp_port: 587
|
|
||||||
smtp_user: no-reply@example.com
|
|
||||||
smtp_pass: aaaaaaaaaaaaaaaaaa
|
|
||||||
no_reply: no-reply@example.com
|
|
||||||
|
|
||||||
users:
|
|
||||||
server_admin:
|
|
||||||
email: admin@example.com
|
|
||||||
password: Password123!
|
|
||||||
|
|
||||||
flags:
|
|
||||||
require_email_verification: true
|
|
||||||
disable_signup_without_invite: true
|
|
||||||
disable_user_create_org: true
|
|
||||||
|
|
||||||
0
config/.gitkeep
Normal file
49
config/config.example.yml
Normal file
@@ -0,0 +1,49 @@
|
|||||||
|
# To see all available options, please visit the docs:
|
||||||
|
# https://docs.fossorial.io/Pangolin/Configuration/config
|
||||||
|
|
||||||
|
app:
|
||||||
|
dashboard_url: "http://localhost:3002"
|
||||||
|
log_level: "info"
|
||||||
|
save_logs: false
|
||||||
|
|
||||||
|
domains:
|
||||||
|
domain1:
|
||||||
|
base_domain: "example.com"
|
||||||
|
cert_resolver: "letsencrypt"
|
||||||
|
|
||||||
|
server:
|
||||||
|
external_port: 3000
|
||||||
|
internal_port: 3001
|
||||||
|
next_port: 3002
|
||||||
|
internal_hostname: "pangolin"
|
||||||
|
session_cookie_name: "p_session_token"
|
||||||
|
resource_access_token_param: "p_token"
|
||||||
|
secret: "your_secret_key_here"
|
||||||
|
resource_access_token_headers:
|
||||||
|
id: "P-Access-Token-Id"
|
||||||
|
token: "P-Access-Token"
|
||||||
|
resource_session_request_param: "p_session_request"
|
||||||
|
|
||||||
|
traefik:
|
||||||
|
http_entrypoint: "web"
|
||||||
|
https_entrypoint: "websecure"
|
||||||
|
|
||||||
|
gerbil:
|
||||||
|
start_port: 51820
|
||||||
|
base_endpoint: "localhost"
|
||||||
|
block_size: 24
|
||||||
|
site_block_size: 30
|
||||||
|
subnet_group: 100.89.137.0/20
|
||||||
|
use_subdomain: true
|
||||||
|
|
||||||
|
rate_limits:
|
||||||
|
global:
|
||||||
|
window_minutes: 1
|
||||||
|
max_requests: 500
|
||||||
|
|
||||||
|
flags:
|
||||||
|
require_email_verification: false
|
||||||
|
disable_signup_without_invite: true
|
||||||
|
disable_user_create_org: true
|
||||||
|
allow_raw_resources: true
|
||||||
|
allow_base_domain_resources: true
|
||||||
0
config/db/.gitkeep
Normal file
0
config/logs/.gitkeep
Normal file
3
crowdin.yml
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
files:
|
||||||
|
- source: /messages/en-US.json
|
||||||
|
translation: /messages/%locale%.json
|
||||||
@@ -1,23 +1,19 @@
|
|||||||
version: "3.7"
|
name: pangolin
|
||||||
|
|
||||||
services:
|
services:
|
||||||
pangolin:
|
pangolin:
|
||||||
image: fosrl/pangolin:1.0.0-beta.1
|
image: fosrl/pangolin:latest
|
||||||
container_name: pangolin
|
container_name: pangolin
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
ports:
|
|
||||||
- 3001:3001
|
|
||||||
- 3000:3000
|
|
||||||
volumes:
|
volumes:
|
||||||
- ./config:/app/config
|
- ./config:/app/config
|
||||||
healthcheck:
|
healthcheck:
|
||||||
test: ["CMD", "curl", "-f", "http://localhost:3001/api/v1/"]
|
test: ["CMD", "curl", "-f", "http://localhost:3001/api/v1/"]
|
||||||
interval: "3s"
|
interval: "3s"
|
||||||
timeout: "3s"
|
timeout: "3s"
|
||||||
retries: 5
|
retries: 15
|
||||||
|
|
||||||
gerbil:
|
gerbil:
|
||||||
image: fosrl/gerbil:1.0.0-beta.1
|
image: fosrl/gerbil:latest
|
||||||
container_name: gerbil
|
container_name: gerbil
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
depends_on:
|
depends_on:
|
||||||
@@ -35,12 +31,11 @@ services:
|
|||||||
- SYS_MODULE
|
- SYS_MODULE
|
||||||
ports:
|
ports:
|
||||||
- 51820:51820/udp
|
- 51820:51820/udp
|
||||||
- 8080:8080 # Port for traefik because of the network_mode
|
|
||||||
- 443:443 # Port for traefik because of the network_mode
|
- 443:443 # Port for traefik because of the network_mode
|
||||||
- 80:80 # Port for traefik because of the network_mode
|
- 80:80 # Port for traefik because of the network_mode
|
||||||
|
|
||||||
traefik:
|
traefik:
|
||||||
image: traefik:v3.1
|
image: traefik:v3.4.0
|
||||||
container_name: traefik
|
container_name: traefik
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
network_mode: service:gerbil # Ports appear on the gerbil service
|
network_mode: service:gerbil # Ports appear on the gerbil service
|
||||||
@@ -50,5 +45,10 @@ services:
|
|||||||
command:
|
command:
|
||||||
- --configFile=/etc/traefik/traefik_config.yml
|
- --configFile=/etc/traefik/traefik_config.yml
|
||||||
volumes:
|
volumes:
|
||||||
- ./traefik:/etc/traefik:ro # Volume to store the Traefik configuration
|
- ./config/traefik:/etc/traefik:ro # Volume to store the Traefik configuration
|
||||||
- ./letsencrypt:/letsencrypt # Volume to store the Let's Encrypt certificates
|
- ./config/letsencrypt:/letsencrypt # Volume to store the Let's Encrypt certificates
|
||||||
|
|
||||||
|
networks:
|
||||||
|
default:
|
||||||
|
driver: bridge
|
||||||
|
name: pangolin
|
||||||
12
drizzle.pg.config.ts
Normal file
@@ -0,0 +1,12 @@
|
|||||||
|
import { defineConfig } from "drizzle-kit";
|
||||||
|
import path from "path";
|
||||||
|
|
||||||
|
export default defineConfig({
|
||||||
|
dialect: "postgresql",
|
||||||
|
schema: path.join("server", "db", "pg", "schema.ts"),
|
||||||
|
out: path.join("server", "migrations"),
|
||||||
|
verbose: true,
|
||||||
|
dbCredentials: {
|
||||||
|
url: process.env.DATABASE_URL as string
|
||||||
|
}
|
||||||
|
});
|
||||||
@@ -4,10 +4,10 @@ import path from "path";
|
|||||||
|
|
||||||
export default defineConfig({
|
export default defineConfig({
|
||||||
dialect: "sqlite",
|
dialect: "sqlite",
|
||||||
schema: path.join("server", "db", "schema.ts"),
|
schema: path.join("server", "db", "sqlite", "schema.ts"),
|
||||||
out: path.join("server", "migrations"),
|
out: path.join("server", "migrations"),
|
||||||
verbose: true,
|
verbose: true,
|
||||||
dbCredentials: {
|
dbCredentials: {
|
||||||
url: path.join(APP_PATH, "db", "db.sqlite"),
|
url: path.join(APP_PATH, "db", "db.sqlite")
|
||||||
},
|
}
|
||||||
});
|
});
|
||||||
@@ -52,6 +52,7 @@ esbuild
|
|||||||
bundle: true,
|
bundle: true,
|
||||||
outfile: argv.out,
|
outfile: argv.out,
|
||||||
format: "esm",
|
format: "esm",
|
||||||
|
minify: true,
|
||||||
banner: {
|
banner: {
|
||||||
js: banner,
|
js: banner,
|
||||||
},
|
},
|
||||||
|
|||||||
19
eslint.config.js
Normal file
@@ -0,0 +1,19 @@
|
|||||||
|
import tseslint from 'typescript-eslint';
|
||||||
|
|
||||||
|
export default tseslint.config({
|
||||||
|
files: ["**/*.{ts,tsx,js,jsx}"],
|
||||||
|
languageOptions: {
|
||||||
|
parser: tseslint.parser,
|
||||||
|
parserOptions: {
|
||||||
|
ecmaVersion: "latest",
|
||||||
|
sourceType: "module",
|
||||||
|
ecmaFeatures: {
|
||||||
|
jsx: true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
rules: {
|
||||||
|
"semi": "error",
|
||||||
|
"prefer-const": "warn"
|
||||||
|
}
|
||||||
|
});
|
||||||
@@ -1,8 +1,24 @@
|
|||||||
|
all: update-versions go-build-release put-back
|
||||||
|
|
||||||
all: build
|
go-build-release:
|
||||||
|
CGO_ENABLED=0 GOOS=linux GOARCH=amd64 go build -o bin/installer_linux_amd64
|
||||||
build:
|
CGO_ENABLED=0 GOOS=linux GOARCH=arm64 go build -o bin/installer_linux_arm64
|
||||||
CGO_ENABLED=0 GOOS=linux GOARCH=amd64 go build -o installer
|
|
||||||
|
|
||||||
clean:
|
clean:
|
||||||
rm installer
|
rm -f bin/installer_linux_amd64
|
||||||
|
rm -f bin/installer_linux_arm64
|
||||||
|
|
||||||
|
update-versions:
|
||||||
|
@echo "Fetching latest versions..."
|
||||||
|
cp main.go main.go.bak && \
|
||||||
|
PANGOLIN_VERSION=$$(curl -s https://api.github.com/repos/fosrl/pangolin/tags | jq -r '.[0].name') && \
|
||||||
|
GERBIL_VERSION=$$(curl -s https://api.github.com/repos/fosrl/gerbil/tags | jq -r '.[0].name') && \
|
||||||
|
BADGER_VERSION=$$(curl -s https://api.github.com/repos/fosrl/badger/tags | jq -r '.[0].name') && \
|
||||||
|
echo "Latest versions - Pangolin: $$PANGOLIN_VERSION, Gerbil: $$GERBIL_VERSION, Badger: $$BADGER_VERSION" && \
|
||||||
|
sed -i "s/config.PangolinVersion = \".*\"/config.PangolinVersion = \"$$PANGOLIN_VERSION\"/" main.go && \
|
||||||
|
sed -i "s/config.GerbilVersion = \".*\"/config.GerbilVersion = \"$$GERBIL_VERSION\"/" main.go && \
|
||||||
|
sed -i "s/config.BadgerVersion = \".*\"/config.BadgerVersion = \"$$BADGER_VERSION\"/" main.go && \
|
||||||
|
echo "Updated main.go with latest versions"
|
||||||
|
|
||||||
|
put-back:
|
||||||
|
mv main.go.bak main.go
|
||||||
|
|||||||
353
install/config.go
Normal file
@@ -0,0 +1,353 @@
|
|||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bytes"
|
||||||
|
"fmt"
|
||||||
|
"os"
|
||||||
|
"os/exec"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"gopkg.in/yaml.v3"
|
||||||
|
)
|
||||||
|
|
||||||
|
// TraefikConfig represents the structure of the main Traefik configuration
|
||||||
|
type TraefikConfig struct {
|
||||||
|
Experimental struct {
|
||||||
|
Plugins struct {
|
||||||
|
Badger struct {
|
||||||
|
Version string `yaml:"version"`
|
||||||
|
} `yaml:"badger"`
|
||||||
|
} `yaml:"plugins"`
|
||||||
|
} `yaml:"experimental"`
|
||||||
|
CertificatesResolvers struct {
|
||||||
|
LetsEncrypt struct {
|
||||||
|
Acme struct {
|
||||||
|
Email string `yaml:"email"`
|
||||||
|
} `yaml:"acme"`
|
||||||
|
} `yaml:"letsencrypt"`
|
||||||
|
} `yaml:"certificatesResolvers"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// DynamicConfig represents the structure of the dynamic configuration
|
||||||
|
type DynamicConfig struct {
|
||||||
|
HTTP struct {
|
||||||
|
Routers map[string]struct {
|
||||||
|
Rule string `yaml:"rule"`
|
||||||
|
} `yaml:"routers"`
|
||||||
|
} `yaml:"http"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// ConfigValues holds the extracted configuration values
|
||||||
|
type ConfigValues struct {
|
||||||
|
DashboardDomain string
|
||||||
|
LetsEncryptEmail string
|
||||||
|
BadgerVersion string
|
||||||
|
}
|
||||||
|
|
||||||
|
// ReadTraefikConfig reads and extracts values from Traefik configuration files
|
||||||
|
func ReadTraefikConfig(mainConfigPath, dynamicConfigPath string) (*ConfigValues, error) {
|
||||||
|
// Read main config file
|
||||||
|
mainConfigData, err := os.ReadFile(mainConfigPath)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("error reading main config file: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
var mainConfig TraefikConfig
|
||||||
|
if err := yaml.Unmarshal(mainConfigData, &mainConfig); err != nil {
|
||||||
|
return nil, fmt.Errorf("error parsing main config file: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Read dynamic config file
|
||||||
|
dynamicConfigData, err := os.ReadFile(dynamicConfigPath)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("error reading dynamic config file: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
var dynamicConfig DynamicConfig
|
||||||
|
if err := yaml.Unmarshal(dynamicConfigData, &dynamicConfig); err != nil {
|
||||||
|
return nil, fmt.Errorf("error parsing dynamic config file: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Extract values
|
||||||
|
values := &ConfigValues{
|
||||||
|
BadgerVersion: mainConfig.Experimental.Plugins.Badger.Version,
|
||||||
|
LetsEncryptEmail: mainConfig.CertificatesResolvers.LetsEncrypt.Acme.Email,
|
||||||
|
}
|
||||||
|
|
||||||
|
// Extract DashboardDomain from router rules
|
||||||
|
// Look for it in the main router rules
|
||||||
|
for _, router := range dynamicConfig.HTTP.Routers {
|
||||||
|
if router.Rule != "" {
|
||||||
|
// Extract domain from Host(`mydomain.com`)
|
||||||
|
if domain := extractDomainFromRule(router.Rule); domain != "" {
|
||||||
|
values.DashboardDomain = domain
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return values, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// extractDomainFromRule extracts the domain from a router rule
|
||||||
|
func extractDomainFromRule(rule string) string {
|
||||||
|
// Look for the Host(`mydomain.com`) pattern
|
||||||
|
if start := findPattern(rule, "Host(`"); start != -1 {
|
||||||
|
end := findPattern(rule[start:], "`)")
|
||||||
|
if end != -1 {
|
||||||
|
return rule[start+6 : start+end]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
|
||||||
|
// findPattern finds the start of a pattern in a string
|
||||||
|
func findPattern(s, pattern string) int {
|
||||||
|
return bytes.Index([]byte(s), []byte(pattern))
|
||||||
|
}
|
||||||
|
|
||||||
|
func copyDockerService(sourceFile, destFile, serviceName string) error {
|
||||||
|
// Read source file
|
||||||
|
sourceData, err := os.ReadFile(sourceFile)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("error reading source file: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Read destination file
|
||||||
|
destData, err := os.ReadFile(destFile)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("error reading destination file: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Parse source Docker Compose YAML
|
||||||
|
var sourceCompose map[string]interface{}
|
||||||
|
if err := yaml.Unmarshal(sourceData, &sourceCompose); err != nil {
|
||||||
|
return fmt.Errorf("error parsing source Docker Compose file: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Parse destination Docker Compose YAML
|
||||||
|
var destCompose map[string]interface{}
|
||||||
|
if err := yaml.Unmarshal(destData, &destCompose); err != nil {
|
||||||
|
return fmt.Errorf("error parsing destination Docker Compose file: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get services section from source
|
||||||
|
sourceServices, ok := sourceCompose["services"].(map[string]interface{})
|
||||||
|
if !ok {
|
||||||
|
return fmt.Errorf("services section not found in source file or has invalid format")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get the specific service configuration
|
||||||
|
serviceConfig, ok := sourceServices[serviceName]
|
||||||
|
if !ok {
|
||||||
|
return fmt.Errorf("service '%s' not found in source file", serviceName)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get or create services section in destination
|
||||||
|
destServices, ok := destCompose["services"].(map[string]interface{})
|
||||||
|
if !ok {
|
||||||
|
// If services section doesn't exist, create it
|
||||||
|
destServices = make(map[string]interface{})
|
||||||
|
destCompose["services"] = destServices
|
||||||
|
}
|
||||||
|
|
||||||
|
// Update service in destination
|
||||||
|
destServices[serviceName] = serviceConfig
|
||||||
|
|
||||||
|
// Marshal updated destination YAML
|
||||||
|
// Use yaml.v3 encoder to preserve formatting and comments
|
||||||
|
// updatedData, err := yaml.Marshal(destCompose)
|
||||||
|
updatedData, err := MarshalYAMLWithIndent(destCompose, 2)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("error marshaling updated Docker Compose file: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Write updated YAML back to destination file
|
||||||
|
if err := os.WriteFile(destFile, updatedData, 0644); err != nil {
|
||||||
|
return fmt.Errorf("error writing to destination file: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func backupConfig() error {
|
||||||
|
// Backup docker-compose.yml
|
||||||
|
if _, err := os.Stat("docker-compose.yml"); err == nil {
|
||||||
|
if err := copyFile("docker-compose.yml", "docker-compose.yml.backup"); err != nil {
|
||||||
|
return fmt.Errorf("failed to backup docker-compose.yml: %v", err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Backup config directory
|
||||||
|
if _, err := os.Stat("config"); err == nil {
|
||||||
|
cmd := exec.Command("tar", "-czvf", "config.tar.gz", "config")
|
||||||
|
if err := cmd.Run(); err != nil {
|
||||||
|
return fmt.Errorf("failed to backup config directory: %v", err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func MarshalYAMLWithIndent(data interface{}, indent int) ([]byte, error) {
|
||||||
|
buffer := new(bytes.Buffer)
|
||||||
|
encoder := yaml.NewEncoder(buffer)
|
||||||
|
encoder.SetIndent(indent)
|
||||||
|
|
||||||
|
err := encoder.Encode(data)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
defer encoder.Close()
|
||||||
|
return buffer.Bytes(), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func replaceInFile(filepath, oldStr, newStr string) error {
|
||||||
|
// Read the file content
|
||||||
|
content, err := os.ReadFile(filepath)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("error reading file: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Replace the string
|
||||||
|
newContent := strings.Replace(string(content), oldStr, newStr, -1)
|
||||||
|
|
||||||
|
// Write the modified content back to the file
|
||||||
|
err = os.WriteFile(filepath, []byte(newContent), 0644)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("error writing file: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func CheckAndAddTraefikLogVolume(composePath string) error {
|
||||||
|
// Read the docker-compose.yml file
|
||||||
|
data, err := os.ReadFile(composePath)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("error reading compose file: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Parse YAML into a generic map
|
||||||
|
var compose map[string]interface{}
|
||||||
|
if err := yaml.Unmarshal(data, &compose); err != nil {
|
||||||
|
return fmt.Errorf("error parsing compose file: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get services section
|
||||||
|
services, ok := compose["services"].(map[string]interface{})
|
||||||
|
if !ok {
|
||||||
|
return fmt.Errorf("services section not found or invalid")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get traefik service
|
||||||
|
traefik, ok := services["traefik"].(map[string]interface{})
|
||||||
|
if !ok {
|
||||||
|
return fmt.Errorf("traefik service not found or invalid")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check volumes
|
||||||
|
logVolume := "./config/traefik/logs:/var/log/traefik"
|
||||||
|
var volumes []interface{}
|
||||||
|
|
||||||
|
if existingVolumes, ok := traefik["volumes"].([]interface{}); ok {
|
||||||
|
// Check if volume already exists
|
||||||
|
for _, v := range existingVolumes {
|
||||||
|
if v.(string) == logVolume {
|
||||||
|
fmt.Println("Traefik log volume is already configured")
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
volumes = existingVolumes
|
||||||
|
}
|
||||||
|
|
||||||
|
// Add new volume
|
||||||
|
volumes = append(volumes, logVolume)
|
||||||
|
traefik["volumes"] = volumes
|
||||||
|
|
||||||
|
// Write updated config back to file
|
||||||
|
newData, err := MarshalYAMLWithIndent(compose, 2)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("error marshaling updated compose file: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := os.WriteFile(composePath, newData, 0644); err != nil {
|
||||||
|
return fmt.Errorf("error writing updated compose file: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
fmt.Println("Added traefik log volume and created logs directory")
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// MergeYAML merges two YAML files, where the contents of the second file
|
||||||
|
// are merged into the first file. In case of conflicts, values from the
|
||||||
|
// second file take precedence.
|
||||||
|
func MergeYAML(baseFile, overlayFile string) error {
|
||||||
|
// Read the base YAML file
|
||||||
|
baseContent, err := os.ReadFile(baseFile)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("error reading base file: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Read the overlay YAML file
|
||||||
|
overlayContent, err := os.ReadFile(overlayFile)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("error reading overlay file: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Parse base YAML into a map
|
||||||
|
var baseMap map[string]interface{}
|
||||||
|
if err := yaml.Unmarshal(baseContent, &baseMap); err != nil {
|
||||||
|
return fmt.Errorf("error parsing base YAML: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Parse overlay YAML into a map
|
||||||
|
var overlayMap map[string]interface{}
|
||||||
|
if err := yaml.Unmarshal(overlayContent, &overlayMap); err != nil {
|
||||||
|
return fmt.Errorf("error parsing overlay YAML: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Merge the overlay into the base
|
||||||
|
merged := mergeMap(baseMap, overlayMap)
|
||||||
|
|
||||||
|
// Marshal the merged result back to YAML
|
||||||
|
mergedContent, err := MarshalYAMLWithIndent(merged, 2)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("error marshaling merged YAML: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Write the merged content back to the base file
|
||||||
|
if err := os.WriteFile(baseFile, mergedContent, 0644); err != nil {
|
||||||
|
return fmt.Errorf("error writing merged YAML: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// mergeMap recursively merges two maps
|
||||||
|
func mergeMap(base, overlay map[string]interface{}) map[string]interface{} {
|
||||||
|
result := make(map[string]interface{})
|
||||||
|
|
||||||
|
// Copy all key-values from base map
|
||||||
|
for k, v := range base {
|
||||||
|
result[k] = v
|
||||||
|
}
|
||||||
|
|
||||||
|
// Merge overlay values
|
||||||
|
for k, v := range overlay {
|
||||||
|
// If both maps have the same key and both values are maps, merge recursively
|
||||||
|
if baseVal, ok := base[k]; ok {
|
||||||
|
if baseMap, isBaseMap := baseVal.(map[string]interface{}); isBaseMap {
|
||||||
|
if overlayMap, isOverlayMap := v.(map[string]interface{}); isOverlayMap {
|
||||||
|
result[k] = mergeMap(baseMap, overlayMap)
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// Otherwise, overlay value takes precedence
|
||||||
|
result[k] = v
|
||||||
|
}
|
||||||
|
|
||||||
|
return result
|
||||||
|
}
|
||||||
39
install/config/config.yml
Normal file
@@ -0,0 +1,39 @@
|
|||||||
|
# To see all available options, please visit the docs:
|
||||||
|
# https://docs.fossorial.io/Pangolin/Configuration/config
|
||||||
|
|
||||||
|
app:
|
||||||
|
dashboard_url: "https://{{.DashboardDomain}}"
|
||||||
|
log_level: "info"
|
||||||
|
|
||||||
|
domains:
|
||||||
|
domain1:
|
||||||
|
base_domain: "{{.BaseDomain}}"
|
||||||
|
cert_resolver: "letsencrypt"
|
||||||
|
|
||||||
|
server:
|
||||||
|
secret: "{{.Secret}}"
|
||||||
|
cors:
|
||||||
|
origins: ["https://{{.DashboardDomain}}"]
|
||||||
|
methods: ["GET", "POST", "PUT", "DELETE", "PATCH"]
|
||||||
|
allowed_headers: ["X-CSRF-Token", "Content-Type"]
|
||||||
|
credentials: false
|
||||||
|
|
||||||
|
gerbil:
|
||||||
|
start_port: 51820
|
||||||
|
base_endpoint: "{{.DashboardDomain}}"
|
||||||
|
|
||||||
|
{{if .EnableEmail}}
|
||||||
|
email:
|
||||||
|
smtp_host: "{{.EmailSMTPHost}}"
|
||||||
|
smtp_port: {{.EmailSMTPPort}}
|
||||||
|
smtp_user: "{{.EmailSMTPUser}}"
|
||||||
|
smtp_pass: "{{.EmailSMTPPass}}"
|
||||||
|
no_reply: "{{.EmailNoReply}}"
|
||||||
|
{{end}}
|
||||||
|
|
||||||
|
flags:
|
||||||
|
require_email_verification: {{.EnableEmail}}
|
||||||
|
disable_signup_without_invite: true
|
||||||
|
disable_user_create_org: false
|
||||||
|
allow_raw_resources: true
|
||||||
|
allow_base_domain_resources: true
|
||||||
6
install/config/crowdsec/acquis.d/appsec.yaml
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
listen_addr: 0.0.0.0:7422
|
||||||
|
appsec_config: crowdsecurity/appsec-default
|
||||||
|
name: myAppSecComponent
|
||||||
|
source: appsec
|
||||||
|
labels:
|
||||||
|
type: appsec
|
||||||
5
install/config/crowdsec/acquis.d/traefik.yaml
Normal file
@@ -0,0 +1,5 @@
|
|||||||
|
poll_without_inotify: false
|
||||||
|
filenames:
|
||||||
|
- /var/log/traefik/*.log
|
||||||
|
labels:
|
||||||
|
type: traefik
|
||||||
27
install/config/crowdsec/docker-compose.yml
Normal file
@@ -0,0 +1,27 @@
|
|||||||
|
services:
|
||||||
|
crowdsec:
|
||||||
|
image: crowdsecurity/crowdsec:latest
|
||||||
|
container_name: crowdsec
|
||||||
|
environment:
|
||||||
|
GID: "1000"
|
||||||
|
COLLECTIONS: crowdsecurity/traefik crowdsecurity/appsec-virtual-patching crowdsecurity/appsec-generic-rules
|
||||||
|
ENROLL_INSTANCE_NAME: "pangolin-crowdsec"
|
||||||
|
PARSERS: crowdsecurity/whitelists
|
||||||
|
ENROLL_TAGS: docker
|
||||||
|
healthcheck:
|
||||||
|
interval: 10s
|
||||||
|
retries: 15
|
||||||
|
timeout: 10s
|
||||||
|
test: ["CMD", "cscli", "capi", "status"]
|
||||||
|
labels:
|
||||||
|
- "traefik.enable=false" # Disable traefik for crowdsec
|
||||||
|
volumes:
|
||||||
|
# crowdsec container data
|
||||||
|
- ./config/crowdsec:/etc/crowdsec # crowdsec config
|
||||||
|
- ./config/crowdsec/db:/var/lib/crowdsec/data # crowdsec db
|
||||||
|
# log bind mounts into crowdsec
|
||||||
|
- ./config/traefik/logs:/var/log/traefik # traefik logs
|
||||||
|
ports:
|
||||||
|
- 6060:6060 # metrics endpoint for prometheus
|
||||||
|
restart: unless-stopped
|
||||||
|
command: -t # Add test config flag to verify configuration
|
||||||
109
install/config/crowdsec/dynamic_config.yml
Normal file
@@ -0,0 +1,109 @@
|
|||||||
|
http:
|
||||||
|
middlewares:
|
||||||
|
redirect-to-https:
|
||||||
|
redirectScheme:
|
||||||
|
scheme: https
|
||||||
|
default-whitelist: # Whitelist middleware for internal IPs
|
||||||
|
ipWhiteList: # Internal IP addresses
|
||||||
|
sourceRange: # Internal IP addresses
|
||||||
|
- "10.0.0.0/8" # Internal IP addresses
|
||||||
|
- "192.168.0.0/16" # Internal IP addresses
|
||||||
|
- "172.16.0.0/12" # Internal IP addresses
|
||||||
|
# Basic security headers
|
||||||
|
security-headers:
|
||||||
|
headers:
|
||||||
|
customResponseHeaders: # Custom response headers
|
||||||
|
Server: "" # Remove server header
|
||||||
|
X-Powered-By: "" # Remove powered by header
|
||||||
|
X-Forwarded-Proto: "https" # Set forwarded proto to https
|
||||||
|
sslProxyHeaders: # SSL proxy headers
|
||||||
|
X-Forwarded-Proto: "https" # Set forwarded proto to https
|
||||||
|
hostsProxyHeaders: # Hosts proxy headers
|
||||||
|
- "X-Forwarded-Host" # Set forwarded host
|
||||||
|
contentTypeNosniff: true # Prevent MIME sniffing
|
||||||
|
customFrameOptionsValue: "SAMEORIGIN" # Set frame options
|
||||||
|
referrerPolicy: "strict-origin-when-cross-origin" # Set referrer policy
|
||||||
|
forceSTSHeader: true # Force STS header
|
||||||
|
stsIncludeSubdomains: true # Include subdomains
|
||||||
|
stsSeconds: 63072000 # STS seconds
|
||||||
|
stsPreload: true # Preload STS
|
||||||
|
# CrowdSec configuration with proper IP forwarding
|
||||||
|
crowdsec:
|
||||||
|
plugin:
|
||||||
|
crowdsec:
|
||||||
|
enabled: true # Enable CrowdSec plugin
|
||||||
|
logLevel: INFO # Log level
|
||||||
|
updateIntervalSeconds: 15 # Update interval
|
||||||
|
updateMaxFailure: 0 # Update max failure
|
||||||
|
defaultDecisionSeconds: 15 # Default decision seconds
|
||||||
|
httpTimeoutSeconds: 10 # HTTP timeout
|
||||||
|
crowdsecMode: live # CrowdSec mode
|
||||||
|
crowdsecAppsecEnabled: true # Enable AppSec
|
||||||
|
crowdsecAppsecHost: crowdsec:7422 # CrowdSec IP address which you noted down later
|
||||||
|
crowdsecAppsecFailureBlock: true # Block on failure
|
||||||
|
crowdsecAppsecUnreachableBlock: true # Block on unreachable
|
||||||
|
crowdsecAppsecBodyLimit: 10485760
|
||||||
|
crowdsecLapiKey: "PUT_YOUR_BOUNCER_KEY_HERE_OR_IT_WILL_NOT_WORK" # CrowdSec API key which you noted down later
|
||||||
|
crowdsecLapiHost: crowdsec:8080 # CrowdSec
|
||||||
|
crowdsecLapiScheme: http # CrowdSec API scheme
|
||||||
|
forwardedHeadersTrustedIPs: # Forwarded headers trusted IPs
|
||||||
|
- "0.0.0.0/0" # All IP addresses are trusted for forwarded headers (CHANGE MADE HERE)
|
||||||
|
clientTrustedIPs: # Client trusted IPs (CHANGE MADE HERE)
|
||||||
|
- "10.0.0.0/8" # Internal LAN IP addresses
|
||||||
|
- "172.16.0.0/12" # Internal LAN IP addresses
|
||||||
|
- "192.168.0.0/16" # Internal LAN IP addresses
|
||||||
|
- "100.89.137.0/20" # Internal LAN IP addresses
|
||||||
|
|
||||||
|
routers:
|
||||||
|
# HTTP to HTTPS redirect router
|
||||||
|
main-app-router-redirect:
|
||||||
|
rule: "Host(`{{.DashboardDomain}}`)" # Dynamic Domain Name
|
||||||
|
service: next-service
|
||||||
|
entryPoints:
|
||||||
|
- web
|
||||||
|
middlewares:
|
||||||
|
- redirect-to-https
|
||||||
|
|
||||||
|
# Next.js router (handles everything except API and WebSocket paths)
|
||||||
|
next-router:
|
||||||
|
rule: "Host(`{{.DashboardDomain}}`) && !PathPrefix(`/api/v1`)" # Dynamic Domain Name
|
||||||
|
service: next-service
|
||||||
|
entryPoints:
|
||||||
|
- websecure
|
||||||
|
middlewares:
|
||||||
|
- security-headers # Add security headers middleware
|
||||||
|
tls:
|
||||||
|
certResolver: letsencrypt
|
||||||
|
|
||||||
|
# API router (handles /api/v1 paths)
|
||||||
|
api-router:
|
||||||
|
rule: "Host(`{{.DashboardDomain}}`) && PathPrefix(`/api/v1`)" # Dynamic Domain Name
|
||||||
|
service: api-service
|
||||||
|
entryPoints:
|
||||||
|
- websecure
|
||||||
|
middlewares:
|
||||||
|
- security-headers # Add security headers middleware
|
||||||
|
tls:
|
||||||
|
certResolver: letsencrypt
|
||||||
|
|
||||||
|
# WebSocket router
|
||||||
|
ws-router:
|
||||||
|
rule: "Host(`{{.DashboardDomain}}`)" # Dynamic Domain Name
|
||||||
|
service: api-service
|
||||||
|
entryPoints:
|
||||||
|
- websecure
|
||||||
|
middlewares:
|
||||||
|
- security-headers # Add security headers middleware
|
||||||
|
tls:
|
||||||
|
certResolver: letsencrypt
|
||||||
|
|
||||||
|
services:
|
||||||
|
next-service:
|
||||||
|
loadBalancer:
|
||||||
|
servers:
|
||||||
|
- url: "http://pangolin:3002" # Next.js server
|
||||||
|
|
||||||
|
api-service:
|
||||||
|
loadBalancer:
|
||||||
|
servers:
|
||||||
|
- url: "http://pangolin:3000" # API/WebSocket server
|
||||||
25
install/config/crowdsec/profiles.yaml
Normal file
@@ -0,0 +1,25 @@
|
|||||||
|
name: captcha_remediation
|
||||||
|
filters:
|
||||||
|
- Alert.Remediation == true && Alert.GetScope() == "Ip" && Alert.GetScenario() contains "http"
|
||||||
|
decisions:
|
||||||
|
- type: captcha
|
||||||
|
duration: 4h
|
||||||
|
on_success: break
|
||||||
|
|
||||||
|
---
|
||||||
|
name: default_ip_remediation
|
||||||
|
filters:
|
||||||
|
- Alert.Remediation == true && Alert.GetScope() == "Ip"
|
||||||
|
decisions:
|
||||||
|
- type: ban
|
||||||
|
duration: 4h
|
||||||
|
on_success: break
|
||||||
|
|
||||||
|
---
|
||||||
|
name: default_range_remediation
|
||||||
|
filters:
|
||||||
|
- Alert.Remediation == true && Alert.GetScope() == "Range"
|
||||||
|
decisions:
|
||||||
|
- type: ban
|
||||||
|
duration: 4h
|
||||||
|
on_success: break
|
||||||
91
install/config/crowdsec/traefik_config.yml
Normal file
@@ -0,0 +1,91 @@
|
|||||||
|
api:
|
||||||
|
insecure: true
|
||||||
|
dashboard: true
|
||||||
|
|
||||||
|
providers:
|
||||||
|
http:
|
||||||
|
endpoint: "http://pangolin:3001/api/v1/traefik-config"
|
||||||
|
pollInterval: "5s"
|
||||||
|
file:
|
||||||
|
filename: "/etc/traefik/dynamic_config.yml"
|
||||||
|
|
||||||
|
experimental:
|
||||||
|
plugins:
|
||||||
|
badger:
|
||||||
|
moduleName: "github.com/fosrl/badger"
|
||||||
|
version: "{{.BadgerVersion}}"
|
||||||
|
crowdsec: # CrowdSec plugin configuration added
|
||||||
|
moduleName: "github.com/maxlerebourg/crowdsec-bouncer-traefik-plugin"
|
||||||
|
version: "v1.4.2"
|
||||||
|
|
||||||
|
log:
|
||||||
|
level: "INFO"
|
||||||
|
format: "json" # Log format changed to json for better parsing
|
||||||
|
maxSize: 100
|
||||||
|
maxBackups: 3
|
||||||
|
maxAge: 3
|
||||||
|
compress: true
|
||||||
|
|
||||||
|
accessLog: # We enable access logs as json
|
||||||
|
filePath: "/var/log/traefik/access.log"
|
||||||
|
format: json
|
||||||
|
filters:
|
||||||
|
statusCodes:
|
||||||
|
- "200-299" # Success codes
|
||||||
|
- "400-499" # Client errors
|
||||||
|
- "500-599" # Server errors
|
||||||
|
retryAttempts: true
|
||||||
|
minDuration: "100ms" # Increased to focus on slower requests
|
||||||
|
bufferingSize: 100 # Add buffering for better performance
|
||||||
|
fields:
|
||||||
|
defaultMode: drop # Start with dropping all fields
|
||||||
|
names:
|
||||||
|
ClientAddr: keep # Keep client address for IP tracking
|
||||||
|
ClientHost: keep # Keep client host for IP tracking
|
||||||
|
RequestMethod: keep # Keep request method for tracking
|
||||||
|
RequestPath: keep # Keep request path for tracking
|
||||||
|
RequestProtocol: keep # Keep request protocol for tracking
|
||||||
|
DownstreamStatus: keep # Keep downstream status for tracking
|
||||||
|
DownstreamContentSize: keep # Keep downstream content size for tracking
|
||||||
|
Duration: keep # Keep request duration for tracking
|
||||||
|
ServiceName: keep # Keep service name for tracking
|
||||||
|
StartUTC: keep # Keep start time for tracking
|
||||||
|
TLSVersion: keep # Keep TLS version for tracking
|
||||||
|
TLSCipher: keep # Keep TLS cipher for tracking
|
||||||
|
RetryAttempts: keep # Keep retry attempts for tracking
|
||||||
|
headers:
|
||||||
|
defaultMode: drop # Start with dropping all headers
|
||||||
|
names:
|
||||||
|
User-Agent: keep # Keep user agent for tracking
|
||||||
|
X-Real-Ip: keep # Keep real IP for tracking
|
||||||
|
X-Forwarded-For: keep # Keep forwarded IP for tracking
|
||||||
|
X-Forwarded-Proto: keep # Keep forwarded protocol for tracking
|
||||||
|
Content-Type: keep # Keep content type for tracking
|
||||||
|
Authorization: redact # Redact sensitive information
|
||||||
|
Cookie: redact # Redact sensitive information
|
||||||
|
|
||||||
|
certificatesResolvers:
|
||||||
|
letsencrypt:
|
||||||
|
acme:
|
||||||
|
httpChallenge:
|
||||||
|
entryPoint: web
|
||||||
|
email: "{{.LetsEncryptEmail}}"
|
||||||
|
storage: "/letsencrypt/acme.json"
|
||||||
|
caServer: "https://acme-v02.api.letsencrypt.org/directory"
|
||||||
|
|
||||||
|
entryPoints:
|
||||||
|
web:
|
||||||
|
address: ":80"
|
||||||
|
websecure:
|
||||||
|
address: ":443"
|
||||||
|
transport:
|
||||||
|
respondingTimeouts:
|
||||||
|
readTimeout: "30m"
|
||||||
|
http:
|
||||||
|
tls:
|
||||||
|
certResolver: "letsencrypt"
|
||||||
|
middlewares:
|
||||||
|
- crowdsec@file
|
||||||
|
|
||||||
|
serversTransport:
|
||||||
|
insecureSkipVerify: true
|
||||||
@@ -1,21 +1,19 @@
|
|||||||
|
name: pangolin
|
||||||
services:
|
services:
|
||||||
pangolin:
|
pangolin:
|
||||||
image: fosrl/pangolin:1.0.0-beta.1
|
image: fosrl/pangolin:{{.PangolinVersion}}
|
||||||
container_name: pangolin
|
container_name: pangolin
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
ports:
|
|
||||||
- 3001:3001
|
|
||||||
- 3000:3000
|
|
||||||
volumes:
|
volumes:
|
||||||
- ./config:/app/config
|
- ./config:/app/config
|
||||||
healthcheck:
|
healthcheck:
|
||||||
test: ["CMD", "curl", "-f", "http://localhost:3001/api/v1/"]
|
test: ["CMD", "curl", "-f", "http://localhost:3001/api/v1/"]
|
||||||
interval: "3s"
|
interval: "10s"
|
||||||
timeout: "3s"
|
timeout: "10s"
|
||||||
retries: 5
|
retries: 15
|
||||||
|
{{if .InstallGerbil}}
|
||||||
gerbil:
|
gerbil:
|
||||||
image: fosrl/gerbil:1.0.0-beta.1
|
image: fosrl/gerbil:{{.GerbilVersion}}
|
||||||
container_name: gerbil
|
container_name: gerbil
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
depends_on:
|
depends_on:
|
||||||
@@ -35,12 +33,18 @@ services:
|
|||||||
- 51820:51820/udp
|
- 51820:51820/udp
|
||||||
- 443:443 # Port for traefik because of the network_mode
|
- 443:443 # Port for traefik because of the network_mode
|
||||||
- 80:80 # Port for traefik because of the network_mode
|
- 80:80 # Port for traefik because of the network_mode
|
||||||
|
{{end}}
|
||||||
traefik:
|
traefik:
|
||||||
image: traefik:v3.1
|
image: traefik:v3.4.1
|
||||||
container_name: traefik
|
container_name: traefik
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
|
{{if .InstallGerbil}}
|
||||||
network_mode: service:gerbil # Ports appear on the gerbil service
|
network_mode: service:gerbil # Ports appear on the gerbil service
|
||||||
|
{{end}}{{if not .InstallGerbil}}
|
||||||
|
ports:
|
||||||
|
- 443:443
|
||||||
|
- 80:80
|
||||||
|
{{end}}
|
||||||
depends_on:
|
depends_on:
|
||||||
pangolin:
|
pangolin:
|
||||||
condition: service_healthy
|
condition: service_healthy
|
||||||
@@ -49,3 +53,9 @@ services:
|
|||||||
volumes:
|
volumes:
|
||||||
- ./config/traefik:/etc/traefik:ro # Volume to store the Traefik configuration
|
- ./config/traefik:/etc/traefik:ro # Volume to store the Traefik configuration
|
||||||
- ./config/letsencrypt:/letsencrypt # Volume to store the Let's Encrypt certificates
|
- ./config/letsencrypt:/letsencrypt # Volume to store the Let's Encrypt certificates
|
||||||
|
- ./config/traefik/logs:/var/log/traefik # Volume to store Traefik logs
|
||||||
|
|
||||||
|
networks:
|
||||||
|
default:
|
||||||
|
driver: bridge
|
||||||
|
name: pangolin
|
||||||
@@ -3,12 +3,11 @@ http:
|
|||||||
redirect-to-https:
|
redirect-to-https:
|
||||||
redirectScheme:
|
redirectScheme:
|
||||||
scheme: https
|
scheme: https
|
||||||
permanent: true
|
|
||||||
|
|
||||||
routers:
|
routers:
|
||||||
# HTTP to HTTPS redirect router
|
# HTTP to HTTPS redirect router
|
||||||
main-app-router-redirect:
|
main-app-router-redirect:
|
||||||
rule: "Host(`{{.Domain}}`)"
|
rule: "Host(`{{.DashboardDomain}}`)"
|
||||||
service: next-service
|
service: next-service
|
||||||
entryPoints:
|
entryPoints:
|
||||||
- web
|
- web
|
||||||
@@ -17,7 +16,7 @@ http:
|
|||||||
|
|
||||||
# Next.js router (handles everything except API and WebSocket paths)
|
# Next.js router (handles everything except API and WebSocket paths)
|
||||||
next-router:
|
next-router:
|
||||||
rule: "Host(`{{.Domain}}`) && !PathPrefix(`/api/v1`)"
|
rule: "Host(`{{.DashboardDomain}}`) && !PathPrefix(`/api/v1`)"
|
||||||
service: next-service
|
service: next-service
|
||||||
entryPoints:
|
entryPoints:
|
||||||
- websecure
|
- websecure
|
||||||
@@ -26,7 +25,7 @@ http:
|
|||||||
|
|
||||||
# API router (handles /api/v1 paths)
|
# API router (handles /api/v1 paths)
|
||||||
api-router:
|
api-router:
|
||||||
rule: "Host(`{{.Domain}}`) && PathPrefix(`/api/v1`)"
|
rule: "Host(`{{.DashboardDomain}}`) && PathPrefix(`/api/v1`)"
|
||||||
service: api-service
|
service: api-service
|
||||||
entryPoints:
|
entryPoints:
|
||||||
- websecure
|
- websecure
|
||||||
@@ -35,7 +34,7 @@ http:
|
|||||||
|
|
||||||
# WebSocket router
|
# WebSocket router
|
||||||
ws-router:
|
ws-router:
|
||||||
rule: "Host(`{{.Domain}}`)"
|
rule: "Host(`{{.DashboardDomain}}`)"
|
||||||
service: api-service
|
service: api-service
|
||||||
entryPoints:
|
entryPoints:
|
||||||
- websecure
|
- websecure
|
||||||
@@ -13,11 +13,15 @@ experimental:
|
|||||||
plugins:
|
plugins:
|
||||||
badger:
|
badger:
|
||||||
moduleName: "github.com/fosrl/badger"
|
moduleName: "github.com/fosrl/badger"
|
||||||
version: "v1.0.0-beta.1"
|
version: "{{.BadgerVersion}}"
|
||||||
|
|
||||||
log:
|
log:
|
||||||
level: "INFO"
|
level: "INFO"
|
||||||
format: "common"
|
format: "common"
|
||||||
|
maxSize: 100
|
||||||
|
maxBackups: 3
|
||||||
|
maxAge: 3
|
||||||
|
compress: true
|
||||||
|
|
||||||
certificatesResolvers:
|
certificatesResolvers:
|
||||||
letsencrypt:
|
letsencrypt:
|
||||||
@@ -33,6 +37,9 @@ entryPoints:
|
|||||||
address: ":80"
|
address: ":80"
|
||||||
websecure:
|
websecure:
|
||||||
address: ":443"
|
address: ":443"
|
||||||
|
transport:
|
||||||
|
respondingTimeouts:
|
||||||
|
readTimeout: "30m"
|
||||||
http:
|
http:
|
||||||
tls:
|
tls:
|
||||||
certResolver: "letsencrypt"
|
certResolver: "letsencrypt"
|
||||||
201
install/crowdsec.go
Normal file
@@ -0,0 +1,201 @@
|
|||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bytes"
|
||||||
|
"fmt"
|
||||||
|
"log"
|
||||||
|
"os"
|
||||||
|
"os/exec"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"gopkg.in/yaml.v3"
|
||||||
|
)
|
||||||
|
|
||||||
|
func installCrowdsec(config Config) error {
|
||||||
|
|
||||||
|
if err := stopContainers(); err != nil {
|
||||||
|
return fmt.Errorf("failed to stop containers: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Run installation steps
|
||||||
|
if err := backupConfig(); err != nil {
|
||||||
|
return fmt.Errorf("backup failed: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := createConfigFiles(config); err != nil {
|
||||||
|
fmt.Printf("Error creating config files: %v\n", err)
|
||||||
|
os.Exit(1)
|
||||||
|
}
|
||||||
|
|
||||||
|
os.MkdirAll("config/crowdsec/db", 0755)
|
||||||
|
os.MkdirAll("config/crowdsec/acquis.d", 0755)
|
||||||
|
os.MkdirAll("config/traefik/logs", 0755)
|
||||||
|
|
||||||
|
if err := copyDockerService("config/crowdsec/docker-compose.yml", "docker-compose.yml", "crowdsec"); err != nil {
|
||||||
|
fmt.Printf("Error copying docker service: %v\n", err)
|
||||||
|
os.Exit(1)
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := MergeYAML("config/traefik/traefik_config.yml", "config/crowdsec/traefik_config.yml"); err != nil {
|
||||||
|
fmt.Printf("Error copying entry points: %v\n", err)
|
||||||
|
os.Exit(1)
|
||||||
|
}
|
||||||
|
// delete the 2nd file
|
||||||
|
if err := os.Remove("config/crowdsec/traefik_config.yml"); err != nil {
|
||||||
|
fmt.Printf("Error removing file: %v\n", err)
|
||||||
|
os.Exit(1)
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := MergeYAML("config/traefik/dynamic_config.yml", "config/crowdsec/dynamic_config.yml"); err != nil {
|
||||||
|
fmt.Printf("Error copying entry points: %v\n", err)
|
||||||
|
os.Exit(1)
|
||||||
|
}
|
||||||
|
// delete the 2nd file
|
||||||
|
if err := os.Remove("config/crowdsec/dynamic_config.yml"); err != nil {
|
||||||
|
fmt.Printf("Error removing file: %v\n", err)
|
||||||
|
os.Exit(1)
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := os.Remove("config/crowdsec/docker-compose.yml"); err != nil {
|
||||||
|
fmt.Printf("Error removing file: %v\n", err)
|
||||||
|
os.Exit(1)
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := CheckAndAddTraefikLogVolume("docker-compose.yml"); err != nil {
|
||||||
|
fmt.Printf("Error checking and adding Traefik log volume: %v\n", err)
|
||||||
|
os.Exit(1)
|
||||||
|
}
|
||||||
|
|
||||||
|
// check and add the service dependency of crowdsec to traefik
|
||||||
|
if err := CheckAndAddCrowdsecDependency("docker-compose.yml"); err != nil {
|
||||||
|
fmt.Printf("Error adding crowdsec dependency to traefik: %v\n", err)
|
||||||
|
os.Exit(1)
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := startContainers(); err != nil {
|
||||||
|
return fmt.Errorf("failed to start containers: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// get API key
|
||||||
|
apiKey, err := GetCrowdSecAPIKey()
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("failed to get API key: %v", err)
|
||||||
|
}
|
||||||
|
config.TraefikBouncerKey = apiKey
|
||||||
|
|
||||||
|
if err := replaceInFile("config/traefik/dynamic_config.yml", "PUT_YOUR_BOUNCER_KEY_HERE_OR_IT_WILL_NOT_WORK", config.TraefikBouncerKey); err != nil {
|
||||||
|
return fmt.Errorf("failed to replace bouncer key: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := restartContainer("traefik"); err != nil {
|
||||||
|
return fmt.Errorf("failed to restart containers: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if checkIfTextInFile("config/traefik/dynamic_config.yml", "PUT_YOUR_BOUNCER_KEY_HERE_OR_IT_WILL_NOT_WORK") {
|
||||||
|
fmt.Println("Failed to replace bouncer key! Please retrieve the key and replace it in the config/traefik/dynamic_config.yml file using the following command:")
|
||||||
|
fmt.Println(" docker exec crowdsec cscli bouncers add traefik-bouncer")
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func checkIsCrowdsecInstalledInCompose() bool {
|
||||||
|
// Read docker-compose.yml
|
||||||
|
content, err := os.ReadFile("docker-compose.yml")
|
||||||
|
if err != nil {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check for crowdsec service
|
||||||
|
return bytes.Contains(content, []byte("crowdsec:"))
|
||||||
|
}
|
||||||
|
|
||||||
|
func GetCrowdSecAPIKey() (string, error) {
|
||||||
|
// First, ensure the container is running
|
||||||
|
if err := waitForContainer("crowdsec"); err != nil {
|
||||||
|
return "", fmt.Errorf("waiting for container: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Execute the command to get the API key
|
||||||
|
cmd := exec.Command("docker", "exec", "crowdsec", "cscli", "bouncers", "add", "traefik-bouncer", "-o", "raw")
|
||||||
|
var out bytes.Buffer
|
||||||
|
cmd.Stdout = &out
|
||||||
|
|
||||||
|
if err := cmd.Run(); err != nil {
|
||||||
|
return "", fmt.Errorf("executing command: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Trim any whitespace from the output
|
||||||
|
apiKey := strings.TrimSpace(out.String())
|
||||||
|
if apiKey == "" {
|
||||||
|
return "", fmt.Errorf("empty API key returned")
|
||||||
|
}
|
||||||
|
|
||||||
|
return apiKey, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func checkIfTextInFile(file, text string) bool {
|
||||||
|
// Read file
|
||||||
|
content, err := os.ReadFile(file)
|
||||||
|
if err != nil {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check for text
|
||||||
|
return bytes.Contains(content, []byte(text))
|
||||||
|
}
|
||||||
|
|
||||||
|
func CheckAndAddCrowdsecDependency(composePath string) error {
|
||||||
|
// Read the docker-compose.yml file
|
||||||
|
data, err := os.ReadFile(composePath)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("error reading compose file: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Parse YAML into a generic map
|
||||||
|
var compose map[string]interface{}
|
||||||
|
if err := yaml.Unmarshal(data, &compose); err != nil {
|
||||||
|
return fmt.Errorf("error parsing compose file: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get services section
|
||||||
|
services, ok := compose["services"].(map[string]interface{})
|
||||||
|
if !ok {
|
||||||
|
return fmt.Errorf("services section not found or invalid")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get traefik service
|
||||||
|
traefik, ok := services["traefik"].(map[string]interface{})
|
||||||
|
if !ok {
|
||||||
|
return fmt.Errorf("traefik service not found or invalid")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get dependencies
|
||||||
|
dependsOn, ok := traefik["depends_on"].(map[string]interface{})
|
||||||
|
if ok {
|
||||||
|
// Append the new block for crowdsec
|
||||||
|
dependsOn["crowdsec"] = map[string]interface{}{
|
||||||
|
"condition": "service_healthy",
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
// No dependencies exist, create it
|
||||||
|
traefik["depends_on"] = map[string]interface{}{
|
||||||
|
"crowdsec": map[string]interface{}{
|
||||||
|
"condition": "service_healthy",
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Marshal the modified data back to YAML with indentation
|
||||||
|
modifiedData, err := MarshalYAMLWithIndent(compose, 2) // Set indentation to 2 spaces
|
||||||
|
if err != nil {
|
||||||
|
log.Fatalf("error marshaling YAML: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := os.WriteFile(composePath, modifiedData, 0644); err != nil {
|
||||||
|
return fmt.Errorf("error writing updated compose file: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
fmt.Println("Added dependency of crowdsec to traefik")
|
||||||
|
return nil
|
||||||
|
}
|
||||||
@@ -1,48 +0,0 @@
|
|||||||
app:
|
|
||||||
base_url: https://{{.Domain}}
|
|
||||||
log_level: info
|
|
||||||
save_logs: false
|
|
||||||
|
|
||||||
server:
|
|
||||||
external_port: 3000
|
|
||||||
internal_port: 3001
|
|
||||||
next_port: 3002
|
|
||||||
internal_hostname: pangolin
|
|
||||||
secure_cookies: false
|
|
||||||
session_cookie_name: p_session
|
|
||||||
resource_session_cookie_name: p_resource_session
|
|
||||||
|
|
||||||
traefik:
|
|
||||||
cert_resolver: letsencrypt
|
|
||||||
http_entrypoint: web
|
|
||||||
https_entrypoint: websecure
|
|
||||||
prefer_wildcard_cert: false
|
|
||||||
|
|
||||||
gerbil:
|
|
||||||
start_port: 51820
|
|
||||||
base_endpoint: {{.Domain}}
|
|
||||||
use_subdomain: false
|
|
||||||
block_size: 16
|
|
||||||
subnet_group: 10.0.0.0/8
|
|
||||||
|
|
||||||
rate_limits:
|
|
||||||
global:
|
|
||||||
window_minutes: 1
|
|
||||||
max_requests: 100
|
|
||||||
{{if .EnableEmail}}
|
|
||||||
email:
|
|
||||||
smtp_host: {{.EmailSMTPHost}}
|
|
||||||
smtp_port: {{.EmailSMTPPort}}
|
|
||||||
smtp_user: {{.EmailSMTPUser}}
|
|
||||||
smtp_pass: {{.EmailSMTPPass}}
|
|
||||||
no_reply: {{.EmailNoReply}}
|
|
||||||
{{end}}
|
|
||||||
users:
|
|
||||||
server_admin:
|
|
||||||
email: {{.AdminUserEmail}}
|
|
||||||
password: {{.AdminUserPassword}}
|
|
||||||
|
|
||||||
flags:
|
|
||||||
require_email_verification: {{.EnableEmail}}
|
|
||||||
disable_signup_without_invite: {{.DisableSignupWithoutInvite}}
|
|
||||||
disable_user_create_org: {{.DisableUserCreateOrg}}
|
|
||||||
@@ -1,3 +1,10 @@
|
|||||||
module installer
|
module installer
|
||||||
|
|
||||||
go 1.23.0
|
go 1.23.0
|
||||||
|
|
||||||
|
require (
|
||||||
|
golang.org/x/term v0.28.0
|
||||||
|
gopkg.in/yaml.v3 v3.0.1
|
||||||
|
)
|
||||||
|
|
||||||
|
require golang.org/x/sys v0.29.0 // indirect
|
||||||
|
|||||||
@@ -0,0 +1,8 @@
|
|||||||
|
golang.org/x/sys v0.29.0 h1:TPYlXGxvx1MGTn2GiZDhnjPA9wZzZeGKHHmKhHYvgaU=
|
||||||
|
golang.org/x/sys v0.29.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
|
||||||
|
golang.org/x/term v0.28.0 h1:/Ts8HFuMR2E6IP/jlo7QVLZHggjKQbhu/7H0LJFr3Gg=
|
||||||
|
golang.org/x/term v0.28.0/go.mod h1:Sw/lC2IAUZ92udQNf3WodGtn4k/XoLyZoh8v/8uiwek=
|
||||||
|
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405 h1:yhCVgyC4o1eVCa2tZl7eS0r+SDo693bJlVdllGtEeKM=
|
||||||
|
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||||
|
gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA=
|
||||||
|
gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
|
||||||
|
|||||||
12
install/input.txt
Normal file
@@ -0,0 +1,12 @@
|
|||||||
|
example.com
|
||||||
|
pangolin.example.com
|
||||||
|
admin@example.com
|
||||||
|
yes
|
||||||
|
admin@example.com
|
||||||
|
Password123!
|
||||||
|
Password123!
|
||||||
|
yes
|
||||||
|
no
|
||||||
|
no
|
||||||
|
no
|
||||||
|
yes
|
||||||
579
install/main.go
@@ -2,66 +2,171 @@ package main
|
|||||||
|
|
||||||
import (
|
import (
|
||||||
"bufio"
|
"bufio"
|
||||||
|
"bytes"
|
||||||
"embed"
|
"embed"
|
||||||
"fmt"
|
"fmt"
|
||||||
|
"io"
|
||||||
"io/fs"
|
"io/fs"
|
||||||
"os"
|
"os"
|
||||||
"os/exec"
|
"os/exec"
|
||||||
|
"os/user"
|
||||||
"path/filepath"
|
"path/filepath"
|
||||||
"runtime"
|
"runtime"
|
||||||
"strings"
|
"strings"
|
||||||
|
"syscall"
|
||||||
"text/template"
|
"text/template"
|
||||||
"unicode"
|
"time"
|
||||||
|
"math/rand"
|
||||||
|
"strconv"
|
||||||
|
|
||||||
|
"golang.org/x/term"
|
||||||
)
|
)
|
||||||
|
|
||||||
//go:embed fs/*
|
// DO NOT EDIT THIS FUNCTION; IT MATCHED BY REGEX IN CICD
|
||||||
|
func loadVersions(config *Config) {
|
||||||
|
config.PangolinVersion = "replaceme"
|
||||||
|
config.GerbilVersion = "replaceme"
|
||||||
|
config.BadgerVersion = "replaceme"
|
||||||
|
}
|
||||||
|
|
||||||
|
//go:embed config/*
|
||||||
var configFiles embed.FS
|
var configFiles embed.FS
|
||||||
|
|
||||||
type Config struct {
|
type Config struct {
|
||||||
Domain string `yaml:"domain"`
|
PangolinVersion string
|
||||||
LetsEncryptEmail string `yaml:"letsEncryptEmail"`
|
GerbilVersion string
|
||||||
AdminUserEmail string `yaml:"adminUserEmail"`
|
BadgerVersion string
|
||||||
AdminUserPassword string `yaml:"adminUserPassword"`
|
BaseDomain string
|
||||||
DisableSignupWithoutInvite bool `yaml:"disableSignupWithoutInvite"`
|
DashboardDomain string
|
||||||
DisableUserCreateOrg bool `yaml:"disableUserCreateOrg"`
|
LetsEncryptEmail string
|
||||||
EnableEmail bool `yaml:"enableEmail"`
|
EnableEmail bool
|
||||||
EmailSMTPHost string `yaml:"emailSMTPHost"`
|
EmailSMTPHost string
|
||||||
EmailSMTPPort int `yaml:"emailSMTPPort"`
|
EmailSMTPPort int
|
||||||
EmailSMTPUser string `yaml:"emailSMTPUser"`
|
EmailSMTPUser string
|
||||||
EmailSMTPPass string `yaml:"emailSMTPPass"`
|
EmailSMTPPass string
|
||||||
EmailNoReply string `yaml:"emailNoReply"`
|
EmailNoReply string
|
||||||
|
InstallGerbil bool
|
||||||
|
TraefikBouncerKey string
|
||||||
|
DoCrowdsecInstall bool
|
||||||
|
Secret string
|
||||||
}
|
}
|
||||||
|
|
||||||
func main() {
|
func main() {
|
||||||
reader := bufio.NewReader(os.Stdin)
|
reader := bufio.NewReader(os.Stdin)
|
||||||
|
|
||||||
// check if the user is root
|
// check if docker is not installed and the user is root
|
||||||
if os.Geteuid() != 0 {
|
if !isDockerInstalled() {
|
||||||
fmt.Println("This script must be run as root")
|
if os.Geteuid() != 0 {
|
||||||
|
fmt.Println("Docker is not installed. Please install Docker manually or run this installer as root.")
|
||||||
|
os.Exit(1)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// check if the user is in the docker group (linux only)
|
||||||
|
if !isUserInDockerGroup() {
|
||||||
|
fmt.Println("You are not in the docker group.")
|
||||||
|
fmt.Println("The installer will not be able to run docker commands without running it as root.")
|
||||||
os.Exit(1)
|
os.Exit(1)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
var config Config
|
||||||
|
|
||||||
// check if there is already a config file
|
// check if there is already a config file
|
||||||
if _, err := os.Stat("config/config.yml"); err != nil {
|
if _, err := os.Stat("config/config.yml"); err != nil {
|
||||||
config := collectUserInput(reader)
|
config = collectUserInput(reader)
|
||||||
createConfigFiles(config)
|
|
||||||
|
loadVersions(&config)
|
||||||
|
config.DoCrowdsecInstall = false
|
||||||
|
config.Secret = generateRandomSecretKey()
|
||||||
|
|
||||||
|
if err := createConfigFiles(config); err != nil {
|
||||||
|
fmt.Printf("Error creating config files: %v\n", err)
|
||||||
|
os.Exit(1)
|
||||||
|
}
|
||||||
|
|
||||||
|
moveFile("config/docker-compose.yml", "docker-compose.yml")
|
||||||
|
|
||||||
if !isDockerInstalled() && runtime.GOOS == "linux" {
|
if !isDockerInstalled() && runtime.GOOS == "linux" {
|
||||||
if shouldInstallDocker() {
|
if readBool(reader, "Docker is not installed. Would you like to install it?", true) {
|
||||||
installDocker()
|
installDocker()
|
||||||
|
// try to start docker service but ignore errors
|
||||||
|
if err := startDockerService(); err != nil {
|
||||||
|
fmt.Println("Error starting Docker service:", err)
|
||||||
|
} else {
|
||||||
|
fmt.Println("Docker service started successfully!")
|
||||||
|
}
|
||||||
|
// wait 10 seconds for docker to start checking if docker is running every 2 seconds
|
||||||
|
fmt.Println("Waiting for Docker to start...")
|
||||||
|
for i := 0; i < 5; i++ {
|
||||||
|
if isDockerRunning() {
|
||||||
|
fmt.Println("Docker is running!")
|
||||||
|
break
|
||||||
|
}
|
||||||
|
fmt.Println("Docker is not running yet, waiting...")
|
||||||
|
time.Sleep(2 * time.Second)
|
||||||
|
}
|
||||||
|
if !isDockerRunning() {
|
||||||
|
fmt.Println("Docker is still not running after 10 seconds. Please check the installation.")
|
||||||
|
os.Exit(1)
|
||||||
|
}
|
||||||
|
fmt.Println("Docker installed successfully!")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fmt.Println("\n=== Starting installation ===")
|
||||||
|
|
||||||
|
if isDockerInstalled() {
|
||||||
|
if readBool(reader, "Would you like to install and start the containers?", true) {
|
||||||
|
if err := pullContainers(); err != nil {
|
||||||
|
fmt.Println("Error: ", err)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := startContainers(); err != nil {
|
||||||
|
fmt.Println("Error: ", err)
|
||||||
|
return
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
fmt.Println("Config file already exists... skipping configuration")
|
fmt.Println("Looks like you already installed, so I am going to do the setup...")
|
||||||
}
|
}
|
||||||
|
|
||||||
if isDockerInstalled() {
|
if !checkIsCrowdsecInstalledInCompose() {
|
||||||
if readBool(reader, "Would you like to install and start the containers?", true) {
|
fmt.Println("\n=== CrowdSec Install ===")
|
||||||
pullAndStartContainers()
|
// check if crowdsec is installed
|
||||||
|
if readBool(reader, "Would you like to install CrowdSec?", false) {
|
||||||
|
fmt.Println("This installer constitutes a minimal viable CrowdSec deployment. CrowdSec will add extra complexity to your Pangolin installation and may not work to the best of its abilities out of the box. Users are expected to implement configuration adjustments on their own to achieve the best security posture. Consult the CrowdSec documentation for detailed configuration instructions.")
|
||||||
|
if readBool(reader, "Are you willing to manage CrowdSec?", false) {
|
||||||
|
if config.DashboardDomain == "" {
|
||||||
|
traefikConfig, err := ReadTraefikConfig("config/traefik/traefik_config.yml", "config/traefik/dynamic_config.yml")
|
||||||
|
if err != nil {
|
||||||
|
fmt.Printf("Error reading config: %v\n", err)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
config.DashboardDomain = traefikConfig.DashboardDomain
|
||||||
|
config.LetsEncryptEmail = traefikConfig.LetsEncryptEmail
|
||||||
|
config.BadgerVersion = traefikConfig.BadgerVersion
|
||||||
|
|
||||||
|
// print the values and check if they are right
|
||||||
|
fmt.Println("Detected values:")
|
||||||
|
fmt.Printf("Dashboard Domain: %s\n", config.DashboardDomain)
|
||||||
|
fmt.Printf("Let's Encrypt Email: %s\n", config.LetsEncryptEmail)
|
||||||
|
fmt.Printf("Badger Version: %s\n", config.BadgerVersion)
|
||||||
|
|
||||||
|
if !readBool(reader, "Are these values correct?", true) {
|
||||||
|
config = collectUserInput(reader)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
config.DoCrowdsecInstall = true
|
||||||
|
installCrowdsec(config)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fmt.Println("Installation complete!")
|
fmt.Println("Installation complete!")
|
||||||
|
fmt.Printf("\nTo complete the initial setup, please visit:\nhttps://%s/auth/initial-setup\n", config.DashboardDomain)
|
||||||
}
|
}
|
||||||
|
|
||||||
func readString(reader *bufio.Reader, prompt string, defaultValue string) string {
|
func readString(reader *bufio.Reader, prompt string, defaultValue string) string {
|
||||||
@@ -78,6 +183,26 @@ func readString(reader *bufio.Reader, prompt string, defaultValue string) string
|
|||||||
return input
|
return input
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func readPassword(prompt string, reader *bufio.Reader) string {
|
||||||
|
if term.IsTerminal(int(syscall.Stdin)) {
|
||||||
|
fmt.Print(prompt + ": ")
|
||||||
|
// Read password without echo if we're in a terminal
|
||||||
|
password, err := term.ReadPassword(int(syscall.Stdin))
|
||||||
|
fmt.Println() // Add a newline since ReadPassword doesn't add one
|
||||||
|
if err != nil {
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
input := strings.TrimSpace(string(password))
|
||||||
|
if input == "" {
|
||||||
|
return readPassword(prompt, reader)
|
||||||
|
}
|
||||||
|
return input
|
||||||
|
} else {
|
||||||
|
// Fallback to reading from stdin if not in a terminal
|
||||||
|
return readString(reader, prompt, "")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
func readBool(reader *bufio.Reader, prompt string, defaultValue bool) bool {
|
func readBool(reader *bufio.Reader, prompt string, defaultValue bool) bool {
|
||||||
defaultStr := "no"
|
defaultStr := "no"
|
||||||
if defaultValue {
|
if defaultValue {
|
||||||
@@ -102,34 +227,14 @@ func collectUserInput(reader *bufio.Reader) Config {
|
|||||||
|
|
||||||
// Basic configuration
|
// Basic configuration
|
||||||
fmt.Println("\n=== Basic Configuration ===")
|
fmt.Println("\n=== Basic Configuration ===")
|
||||||
config.Domain = readString(reader, "Enter your domain name", "")
|
config.BaseDomain = readString(reader, "Enter your base domain (no subdomain e.g. example.com)", "")
|
||||||
|
config.DashboardDomain = readString(reader, "Enter the domain for the Pangolin dashboard", "pangolin."+config.BaseDomain)
|
||||||
config.LetsEncryptEmail = readString(reader, "Enter email for Let's Encrypt certificates", "")
|
config.LetsEncryptEmail = readString(reader, "Enter email for Let's Encrypt certificates", "")
|
||||||
|
config.InstallGerbil = readBool(reader, "Do you want to use Gerbil to allow tunneled connections", true)
|
||||||
// Admin user configuration
|
|
||||||
fmt.Println("\n=== Admin User Configuration ===")
|
|
||||||
config.AdminUserEmail = readString(reader, "Enter admin user email", "admin@"+config.Domain)
|
|
||||||
for {
|
|
||||||
config.AdminUserPassword = readString(reader, "Enter admin user password", "")
|
|
||||||
if valid, message := validatePassword(config.AdminUserPassword); valid {
|
|
||||||
break
|
|
||||||
} else {
|
|
||||||
fmt.Println("Invalid password:", message)
|
|
||||||
fmt.Println("Password requirements:")
|
|
||||||
fmt.Println("- At least one uppercase English letter")
|
|
||||||
fmt.Println("- At least one lowercase English letter")
|
|
||||||
fmt.Println("- At least one digit")
|
|
||||||
fmt.Println("- At least one special character")
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Security settings
|
|
||||||
fmt.Println("\n=== Security Settings ===")
|
|
||||||
config.DisableSignupWithoutInvite = readBool(reader, "Disable signup without invite", true)
|
|
||||||
config.DisableUserCreateOrg = readBool(reader, "Disable users from creating organizations", false)
|
|
||||||
|
|
||||||
// Email configuration
|
// Email configuration
|
||||||
fmt.Println("\n=== Email Configuration ===")
|
fmt.Println("\n=== Email Configuration ===")
|
||||||
config.EnableEmail = readBool(reader, "Enable email functionality", false)
|
config.EnableEmail = readBool(reader, "Enable email functionality (SMTP)", false)
|
||||||
|
|
||||||
if config.EnableEmail {
|
if config.EnableEmail {
|
||||||
config.EmailSMTPHost = readString(reader, "Enter SMTP host", "")
|
config.EmailSMTPHost = readString(reader, "Enter SMTP host", "")
|
||||||
@@ -140,68 +245,22 @@ func collectUserInput(reader *bufio.Reader) Config {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Validate required fields
|
// Validate required fields
|
||||||
if config.Domain == "" {
|
if config.BaseDomain == "" {
|
||||||
fmt.Println("Error: Domain name is required")
|
fmt.Println("Error: Domain name is required")
|
||||||
os.Exit(1)
|
os.Exit(1)
|
||||||
}
|
}
|
||||||
|
if config.DashboardDomain == "" {
|
||||||
|
fmt.Println("Error: Dashboard Domain name is required")
|
||||||
|
os.Exit(1)
|
||||||
|
}
|
||||||
if config.LetsEncryptEmail == "" {
|
if config.LetsEncryptEmail == "" {
|
||||||
fmt.Println("Error: Let's Encrypt email is required")
|
fmt.Println("Error: Let's Encrypt email is required")
|
||||||
os.Exit(1)
|
os.Exit(1)
|
||||||
}
|
}
|
||||||
if config.AdminUserEmail == "" || config.AdminUserPassword == "" {
|
|
||||||
fmt.Println("Error: Admin user email and password are required")
|
|
||||||
os.Exit(1)
|
|
||||||
}
|
|
||||||
|
|
||||||
return config
|
return config
|
||||||
}
|
}
|
||||||
|
|
||||||
func validatePassword(password string) (bool, string) {
|
|
||||||
if len(password) == 0 {
|
|
||||||
return false, "Password cannot be empty"
|
|
||||||
}
|
|
||||||
|
|
||||||
var (
|
|
||||||
hasUpper bool
|
|
||||||
hasLower bool
|
|
||||||
hasDigit bool
|
|
||||||
hasSpecial bool
|
|
||||||
)
|
|
||||||
|
|
||||||
for _, char := range password {
|
|
||||||
switch {
|
|
||||||
case unicode.IsUpper(char):
|
|
||||||
hasUpper = true
|
|
||||||
case unicode.IsLower(char):
|
|
||||||
hasLower = true
|
|
||||||
case unicode.IsDigit(char):
|
|
||||||
hasDigit = true
|
|
||||||
case unicode.IsPunct(char) || unicode.IsSymbol(char):
|
|
||||||
hasSpecial = true
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
var missing []string
|
|
||||||
if !hasUpper {
|
|
||||||
missing = append(missing, "an uppercase letter")
|
|
||||||
}
|
|
||||||
if !hasLower {
|
|
||||||
missing = append(missing, "a lowercase letter")
|
|
||||||
}
|
|
||||||
if !hasDigit {
|
|
||||||
missing = append(missing, "a digit")
|
|
||||||
}
|
|
||||||
if !hasSpecial {
|
|
||||||
missing = append(missing, "a special character")
|
|
||||||
}
|
|
||||||
|
|
||||||
if len(missing) > 0 {
|
|
||||||
return false, fmt.Sprintf("Password must contain %s", strings.Join(missing, ", "))
|
|
||||||
}
|
|
||||||
|
|
||||||
return true, ""
|
|
||||||
}
|
|
||||||
|
|
||||||
func createConfigFiles(config Config) error {
|
func createConfigFiles(config Config) error {
|
||||||
os.MkdirAll("config", 0755)
|
os.MkdirAll("config", 0755)
|
||||||
os.MkdirAll("config/letsencrypt", 0755)
|
os.MkdirAll("config/letsencrypt", 0755)
|
||||||
@@ -209,26 +268,33 @@ func createConfigFiles(config Config) error {
|
|||||||
os.MkdirAll("config/logs", 0755)
|
os.MkdirAll("config/logs", 0755)
|
||||||
|
|
||||||
// Walk through all embedded files
|
// Walk through all embedded files
|
||||||
err := fs.WalkDir(configFiles, "fs", func(path string, d fs.DirEntry, err error) error {
|
err := fs.WalkDir(configFiles, "config", func(path string, d fs.DirEntry, err error) error {
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
// Skip the root fs directory itself
|
// Skip the root fs directory itself
|
||||||
if path == "fs" {
|
if path == "config" {
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// Get the relative path by removing the "fs/" prefix
|
if !config.DoCrowdsecInstall && strings.Contains(path, "crowdsec") {
|
||||||
relPath := strings.TrimPrefix(path, "fs/")
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
// Create the full output path under "config/"
|
if config.DoCrowdsecInstall && !strings.Contains(path, "crowdsec") {
|
||||||
outPath := filepath.Join("config", relPath)
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// skip .DS_Store
|
||||||
|
if strings.Contains(path, ".DS_Store") {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
if d.IsDir() {
|
if d.IsDir() {
|
||||||
// Create directory
|
// Create directory
|
||||||
if err := os.MkdirAll(outPath, 0755); err != nil {
|
if err := os.MkdirAll(path, 0755); err != nil {
|
||||||
return fmt.Errorf("failed to create directory %s: %v", outPath, err)
|
return fmt.Errorf("failed to create directory %s: %v", path, err)
|
||||||
}
|
}
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
@@ -246,14 +312,14 @@ func createConfigFiles(config Config) error {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Ensure parent directory exists
|
// Ensure parent directory exists
|
||||||
if err := os.MkdirAll(filepath.Dir(outPath), 0755); err != nil {
|
if err := os.MkdirAll(filepath.Dir(path), 0755); err != nil {
|
||||||
return fmt.Errorf("failed to create parent directory for %s: %v", outPath, err)
|
return fmt.Errorf("failed to create parent directory for %s: %v", path, err)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Create output file
|
// Create output file
|
||||||
outFile, err := os.Create(outPath)
|
outFile, err := os.Create(path)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return fmt.Errorf("failed to create %s: %v", outPath, err)
|
return fmt.Errorf("failed to create %s: %v", path, err)
|
||||||
}
|
}
|
||||||
defer outFile.Close()
|
defer outFile.Close()
|
||||||
|
|
||||||
@@ -269,19 +335,9 @@ func createConfigFiles(config Config) error {
|
|||||||
return fmt.Errorf("error walking config files: %v", err)
|
return fmt.Errorf("error walking config files: %v", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
// move the docker-compose.yml file to the root directory
|
|
||||||
os.Rename("config/docker-compose.yml", "docker-compose.yml")
|
|
||||||
|
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func shouldInstallDocker() bool {
|
|
||||||
reader := bufio.NewReader(os.Stdin)
|
|
||||||
fmt.Print("Would you like to install Docker? (yes/no): ")
|
|
||||||
response, _ := reader.ReadString('\n')
|
|
||||||
return strings.ToLower(strings.TrimSpace(response)) == "yes"
|
|
||||||
}
|
|
||||||
|
|
||||||
func installDocker() error {
|
func installDocker() error {
|
||||||
// Detect Linux distribution
|
// Detect Linux distribution
|
||||||
cmd := exec.Command("cat", "/etc/os-release")
|
cmd := exec.Command("cat", "/etc/os-release")
|
||||||
@@ -289,26 +345,73 @@ func installDocker() error {
|
|||||||
if err != nil {
|
if err != nil {
|
||||||
return fmt.Errorf("failed to detect Linux distribution: %v", err)
|
return fmt.Errorf("failed to detect Linux distribution: %v", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
osRelease := string(output)
|
osRelease := string(output)
|
||||||
var installCmd *exec.Cmd
|
|
||||||
|
|
||||||
|
// Detect system architecture
|
||||||
|
archCmd := exec.Command("uname", "-m")
|
||||||
|
archOutput, err := archCmd.Output()
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("failed to detect system architecture: %v", err)
|
||||||
|
}
|
||||||
|
arch := strings.TrimSpace(string(archOutput))
|
||||||
|
|
||||||
|
// Map architecture to Docker's architecture naming
|
||||||
|
var dockerArch string
|
||||||
|
switch arch {
|
||||||
|
case "x86_64":
|
||||||
|
dockerArch = "amd64"
|
||||||
|
case "aarch64":
|
||||||
|
dockerArch = "arm64"
|
||||||
|
default:
|
||||||
|
return fmt.Errorf("unsupported architecture: %s", arch)
|
||||||
|
}
|
||||||
|
|
||||||
|
var installCmd *exec.Cmd
|
||||||
switch {
|
switch {
|
||||||
case strings.Contains(osRelease, "ID=ubuntu") || strings.Contains(osRelease, "ID=debian"):
|
case strings.Contains(osRelease, "ID=ubuntu"):
|
||||||
installCmd = exec.Command("bash", "-c", `
|
installCmd = exec.Command("bash", "-c", fmt.Sprintf(`
|
||||||
apt-get update &&
|
apt-get update &&
|
||||||
apt-get install -y apt-transport-https ca-certificates curl software-properties-common &&
|
apt-get install -y apt-transport-https ca-certificates curl software-properties-common &&
|
||||||
curl -fsSL https://download.docker.com/linux/ubuntu/gpg | gpg --dearmor -o /usr/share/keyrings/docker-archive-keyring.gpg &&
|
curl -fsSL https://download.docker.com/linux/ubuntu/gpg | gpg --dearmor -o /usr/share/keyrings/docker-archive-keyring.gpg &&
|
||||||
echo "deb [arch=amd64 signed-by=/usr/share/keyrings/docker-archive-keyring.gpg] https://download.docker.com/linux/ubuntu $(lsb_release -cs) stable" > /etc/apt/sources.list.d/docker.list &&
|
echo "deb [arch=%s signed-by=/usr/share/keyrings/docker-archive-keyring.gpg] https://download.docker.com/linux/ubuntu $(lsb_release -cs) stable" > /etc/apt/sources.list.d/docker.list &&
|
||||||
apt-get update &&
|
apt-get update &&
|
||||||
apt-get install -y docker-ce docker-ce-cli containerd.io docker-compose-plugin
|
apt-get install -y docker-ce docker-ce-cli containerd.io docker-compose-plugin
|
||||||
`)
|
`, dockerArch))
|
||||||
|
case strings.Contains(osRelease, "ID=debian"):
|
||||||
|
installCmd = exec.Command("bash", "-c", fmt.Sprintf(`
|
||||||
|
apt-get update &&
|
||||||
|
apt-get install -y apt-transport-https ca-certificates curl software-properties-common &&
|
||||||
|
curl -fsSL https://download.docker.com/linux/debian/gpg | gpg --dearmor -o /usr/share/keyrings/docker-archive-keyring.gpg &&
|
||||||
|
echo "deb [arch=%s signed-by=/usr/share/keyrings/docker-archive-keyring.gpg] https://download.docker.com/linux/debian $(lsb_release -cs) stable" > /etc/apt/sources.list.d/docker.list &&
|
||||||
|
apt-get update &&
|
||||||
|
apt-get install -y docker-ce docker-ce-cli containerd.io docker-compose-plugin
|
||||||
|
`, dockerArch))
|
||||||
case strings.Contains(osRelease, "ID=fedora"):
|
case strings.Contains(osRelease, "ID=fedora"):
|
||||||
installCmd = exec.Command("bash", "-c", `
|
// Detect Fedora version to handle DNF 5 changes
|
||||||
dnf -y install dnf-plugins-core &&
|
versionCmd := exec.Command("bash", "-c", "grep VERSION_ID /etc/os-release | cut -d'=' -f2 | tr -d '\"'")
|
||||||
dnf config-manager --add-repo https://download.docker.com/linux/fedora/docker-ce.repo &&
|
versionOutput, err := versionCmd.Output()
|
||||||
dnf install -y docker-ce docker-ce-cli containerd.io docker-compose-plugin
|
var fedoraVersion int
|
||||||
`)
|
if err == nil {
|
||||||
|
if v, parseErr := strconv.Atoi(strings.TrimSpace(string(versionOutput))); parseErr == nil {
|
||||||
|
fedoraVersion = v
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Use appropriate DNF syntax based on version
|
||||||
|
var repoCmd string
|
||||||
|
if fedoraVersion >= 41 {
|
||||||
|
// DNF 5 syntax for Fedora 41+
|
||||||
|
repoCmd = "dnf config-manager addrepo --from-repofile=https://download.docker.com/linux/fedora/docker-ce.repo"
|
||||||
|
} else {
|
||||||
|
// DNF 4 syntax for Fedora < 41
|
||||||
|
repoCmd = "dnf config-manager --add-repo https://download.docker.com/linux/fedora/docker-ce.repo"
|
||||||
|
}
|
||||||
|
|
||||||
|
installCmd = exec.Command("bash", "-c", fmt.Sprintf(`
|
||||||
|
dnf -y install dnf-plugins-core &&
|
||||||
|
%s &&
|
||||||
|
dnf install -y docker-ce docker-ce-cli containerd.io docker-compose-plugin
|
||||||
|
`, repoCmd))
|
||||||
case strings.Contains(osRelease, "ID=opensuse") || strings.Contains(osRelease, "ID=\"opensuse-"):
|
case strings.Contains(osRelease, "ID=opensuse") || strings.Contains(osRelease, "ID=\"opensuse-"):
|
||||||
installCmd = exec.Command("bash", "-c", `
|
installCmd = exec.Command("bash", "-c", `
|
||||||
zypper install -y docker docker-compose &&
|
zypper install -y docker docker-compose &&
|
||||||
@@ -338,6 +441,20 @@ func installDocker() error {
|
|||||||
return installCmd.Run()
|
return installCmd.Run()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func startDockerService() error {
|
||||||
|
if runtime.GOOS == "linux" {
|
||||||
|
cmd := exec.Command("systemctl", "enable", "--now", "docker")
|
||||||
|
cmd.Stdout = os.Stdout
|
||||||
|
cmd.Stderr = os.Stderr
|
||||||
|
return cmd.Run()
|
||||||
|
} else if runtime.GOOS == "darwin" {
|
||||||
|
// On macOS, Docker is usually started via the Docker Desktop application
|
||||||
|
fmt.Println("Please start Docker Desktop manually on macOS.")
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
return fmt.Errorf("unsupported operating system for starting Docker service")
|
||||||
|
}
|
||||||
|
|
||||||
func isDockerInstalled() bool {
|
func isDockerInstalled() bool {
|
||||||
cmd := exec.Command("docker", "--version")
|
cmd := exec.Command("docker", "--version")
|
||||||
if err := cmd.Run(); err != nil {
|
if err := cmd.Run(); err != nil {
|
||||||
@@ -346,29 +463,181 @@ func isDockerInstalled() bool {
|
|||||||
return true
|
return true
|
||||||
}
|
}
|
||||||
|
|
||||||
func pullAndStartContainers() error {
|
func isUserInDockerGroup() bool {
|
||||||
|
if runtime.GOOS == "darwin" {
|
||||||
|
// Docker group is not applicable on macOS
|
||||||
|
// So we assume that the user can run Docker commands
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
if os.Geteuid() == 0 {
|
||||||
|
return true // Root user can run Docker commands anyway
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if the current user is in the docker group
|
||||||
|
if dockerGroup, err := user.LookupGroup("docker"); err == nil {
|
||||||
|
if currentUser, err := user.Current(); err == nil {
|
||||||
|
if currentUserGroupIds, err := currentUser.GroupIds(); err == nil {
|
||||||
|
for _, groupId := range currentUserGroupIds {
|
||||||
|
if groupId == dockerGroup.Gid {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Eventually, if any of the checks fail, we assume the user cannot run Docker commands
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
// isDockerRunning checks if the Docker daemon is running by using the `docker info` command.
|
||||||
|
func isDockerRunning() bool {
|
||||||
|
cmd := exec.Command("docker", "info")
|
||||||
|
if err := cmd.Run(); err != nil {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
// executeDockerComposeCommandWithArgs executes the appropriate docker command with arguments supplied
|
||||||
|
func executeDockerComposeCommandWithArgs(args ...string) error {
|
||||||
|
var cmd *exec.Cmd
|
||||||
|
var useNewStyle bool
|
||||||
|
|
||||||
|
if !isDockerInstalled() {
|
||||||
|
return fmt.Errorf("docker is not installed")
|
||||||
|
}
|
||||||
|
|
||||||
|
checkCmd := exec.Command("docker", "compose", "version")
|
||||||
|
if err := checkCmd.Run(); err == nil {
|
||||||
|
useNewStyle = true
|
||||||
|
} else {
|
||||||
|
checkCmd = exec.Command("docker-compose", "version")
|
||||||
|
if err := checkCmd.Run(); err == nil {
|
||||||
|
useNewStyle = false
|
||||||
|
} else {
|
||||||
|
return fmt.Errorf("neither 'docker compose' nor 'docker-compose' command is available")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if useNewStyle {
|
||||||
|
cmd = exec.Command("docker", append([]string{"compose"}, args...)...)
|
||||||
|
} else {
|
||||||
|
cmd = exec.Command("docker-compose", args...)
|
||||||
|
}
|
||||||
|
|
||||||
|
cmd.Stdout = os.Stdout
|
||||||
|
cmd.Stderr = os.Stderr
|
||||||
|
return cmd.Run()
|
||||||
|
}
|
||||||
|
|
||||||
|
// pullContainers pulls the containers using the appropriate command.
|
||||||
|
func pullContainers() error {
|
||||||
|
fmt.Println("Pulling the container images...")
|
||||||
|
|
||||||
|
if err := executeDockerComposeCommandWithArgs("-f", "docker-compose.yml", "pull", "--policy", "always"); err != nil {
|
||||||
|
return fmt.Errorf("failed to pull the containers: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// startContainers starts the containers using the appropriate command.
|
||||||
|
func startContainers() error {
|
||||||
fmt.Println("Starting containers...")
|
fmt.Println("Starting containers...")
|
||||||
|
if err := executeDockerComposeCommandWithArgs("-f", "docker-compose.yml", "up", "-d", "--force-recreate"); err != nil {
|
||||||
// First try docker compose (new style)
|
return fmt.Errorf("failed to start containers: %v", err)
|
||||||
cmd := exec.Command("docker", "compose", "-f", "docker-compose.yml", "pull")
|
|
||||||
cmd.Stdout = os.Stdout
|
|
||||||
cmd.Stderr = os.Stderr
|
|
||||||
err := cmd.Run()
|
|
||||||
|
|
||||||
if err != nil {
|
|
||||||
fmt.Println("Failed to start containers using docker compose, falling back to docker-compose command")
|
|
||||||
os.Exit(1)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
cmd = exec.Command("docker", "compose", "-f", "docker-compose.yml", "up", "-d")
|
return nil
|
||||||
cmd.Stdout = os.Stdout
|
}
|
||||||
cmd.Stderr = os.Stderr
|
|
||||||
err = cmd.Run()
|
|
||||||
|
|
||||||
if err != nil {
|
// stopContainers stops the containers using the appropriate command.
|
||||||
fmt.Println("Failed to start containers using docker-compose command")
|
func stopContainers() error {
|
||||||
os.Exit(1)
|
fmt.Println("Stopping containers...")
|
||||||
|
|
||||||
|
if err := executeDockerComposeCommandWithArgs("-f", "docker-compose.yml", "down"); err != nil {
|
||||||
|
return fmt.Errorf("failed to stop containers: %v", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// restartContainer restarts a specific container using the appropriate command.
|
||||||
|
func restartContainer(container string) error {
|
||||||
|
fmt.Println("Restarting containers...")
|
||||||
|
|
||||||
|
if err := executeDockerComposeCommandWithArgs("-f", "docker-compose.yml", "restart", container); err != nil {
|
||||||
|
return fmt.Errorf("failed to stop the container \"%s\": %v", container, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func copyFile(src, dst string) error {
|
||||||
|
source, err := os.Open(src)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
defer source.Close()
|
||||||
|
|
||||||
|
destination, err := os.Create(dst)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
defer destination.Close()
|
||||||
|
|
||||||
|
_, err = io.Copy(destination, source)
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func moveFile(src, dst string) error {
|
||||||
|
if err := copyFile(src, dst); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
return os.Remove(src)
|
||||||
|
}
|
||||||
|
|
||||||
|
func waitForContainer(containerName string) error {
|
||||||
|
maxAttempts := 30
|
||||||
|
retryInterval := time.Second * 2
|
||||||
|
|
||||||
|
for attempt := 0; attempt < maxAttempts; attempt++ {
|
||||||
|
// Check if container is running
|
||||||
|
cmd := exec.Command("docker", "container", "inspect", "-f", "{{.State.Running}}", containerName)
|
||||||
|
var out bytes.Buffer
|
||||||
|
cmd.Stdout = &out
|
||||||
|
|
||||||
|
if err := cmd.Run(); err != nil {
|
||||||
|
// If the container doesn't exist or there's another error, wait and retry
|
||||||
|
time.Sleep(retryInterval)
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
isRunning := strings.TrimSpace(out.String()) == "true"
|
||||||
|
if isRunning {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Container exists but isn't running yet, wait and retry
|
||||||
|
time.Sleep(retryInterval)
|
||||||
|
}
|
||||||
|
|
||||||
|
return fmt.Errorf("container %s did not start within %v seconds", containerName, maxAttempts*int(retryInterval.Seconds()))
|
||||||
|
}
|
||||||
|
|
||||||
|
func generateRandomSecretKey() string {
|
||||||
|
const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789"
|
||||||
|
const length = 32
|
||||||
|
|
||||||
|
var seededRand *rand.Rand = rand.New(
|
||||||
|
rand.NewSource(time.Now().UnixNano()))
|
||||||
|
|
||||||
|
b := make([]byte, length)
|
||||||
|
for i := range b {
|
||||||
|
b[i] = charset[seededRand.Intn(len(charset))]
|
||||||
|
}
|
||||||
|
return string(b)
|
||||||
|
}
|
||||||
|
|||||||
1136
messages/de-DE.json
Normal file
1136
messages/en-US.json
Normal file
1136
messages/es-ES.json
Normal file
1136
messages/fr-FR.json
Normal file
1136
messages/it-IT.json
Normal file
1136
messages/nl-NL.json
Normal file
1136
messages/pl-PL.json
Normal file
1136
messages/pt-PT.json
Normal file
1136
messages/tr-TR.json
Normal file
1136
messages/zh-CN.json
Normal file
@@ -1,8 +1,13 @@
|
|||||||
/** @type {import('next').NextConfig} */
|
import createNextIntlPlugin from "next-intl/plugin";
|
||||||
|
|
||||||
|
const withNextIntl = createNextIntlPlugin();
|
||||||
|
|
||||||
|
/** @type {import("next").NextConfig} */
|
||||||
const nextConfig = {
|
const nextConfig = {
|
||||||
eslint: {
|
eslint: {
|
||||||
ignoreDuringBuilds: true,
|
ignoreDuringBuilds: true
|
||||||
}
|
},
|
||||||
|
output: "standalone"
|
||||||
};
|
};
|
||||||
|
|
||||||
export default nextConfig;
|
export default withNextIntl(nextConfig);
|
||||||
|
|||||||
16601
package-lock.json
generated
Normal file
160
package.json
@@ -1,6 +1,6 @@
|
|||||||
{
|
{
|
||||||
"name": "@fosrl/pangolin",
|
"name": "@fosrl/pangolin",
|
||||||
"version": "1.0.0-beta.1",
|
"version": "0.0.0",
|
||||||
"private": true,
|
"private": true,
|
||||||
"type": "module",
|
"type": "module",
|
||||||
"description": "Tunneled Reverse Proxy Management Server with Identity and Access Control and Dashboard UI",
|
"description": "Tunneled Reverse Proxy Management Server with Identity and Access Control and Dashboard UI",
|
||||||
@@ -12,100 +12,134 @@
|
|||||||
"license": "SEE LICENSE IN LICENSE AND README.md",
|
"license": "SEE LICENSE IN LICENSE AND README.md",
|
||||||
"scripts": {
|
"scripts": {
|
||||||
"dev": "NODE_ENV=development ENVIRONMENT=dev tsx watch server/index.ts",
|
"dev": "NODE_ENV=development ENVIRONMENT=dev tsx watch server/index.ts",
|
||||||
"db:generate": "drizzle-kit generate",
|
"db:pg:generate": "drizzle-kit generate --config=./drizzle.pg.config.ts",
|
||||||
"db:push": "npx tsx server/db/migrate.ts",
|
"db:sqlite:generate": "drizzle-kit generate --config=./drizzle.sqlite.config.ts",
|
||||||
"db:studio": "drizzle-kit studio",
|
"db:pg:push": "npx tsx server/db/pg/migrate.ts",
|
||||||
"build": "mkdir -p dist && next build && node esbuild.mjs -e server/index.ts -o dist/server.mjs && node esbuild.mjs -e server/setup/migrations.ts -o dist/migrations.mjs",
|
"db:sqlite:push": "npx tsx server/db/sqlite/migrate.ts",
|
||||||
"start": "NODE_OPTIONS=--enable-source-maps NODE_ENV=development ENVIRONMENT=prod sh -c 'node dist/migrations.mjs && node dist/server.mjs'",
|
"db:sqlite:studio": "drizzle-kit studio --config=./drizzle.sqlite.config.ts",
|
||||||
"email": "email dev --dir server/emails/templates --port 3005"
|
"db:pg:studio": "drizzle-kit studio --config=./drizzle.pg.config.ts",
|
||||||
|
"db:clear-migrations": "rm -rf server/migrations",
|
||||||
|
"build:sqlite": "mkdir -p dist && next build && node esbuild.mjs -e server/index.ts -o dist/server.mjs && node esbuild.mjs -e server/setup/migrationsSqlite.ts -o dist/migrations.mjs",
|
||||||
|
"build:pg": "mkdir -p dist && next build && node esbuild.mjs -e server/index.ts -o dist/server.mjs && node esbuild.mjs -e server/setup/migrationsPg.ts -o dist/migrations.mjs",
|
||||||
|
"start:sqlite": "DB_TYPE=sqlite NODE_OPTIONS=--enable-source-maps NODE_ENV=development ENVIRONMENT=prod sh -c 'node dist/migrations.mjs && node dist/server.mjs'",
|
||||||
|
"start:pg": "DB_TYPE=pg NODE_OPTIONS=--enable-source-maps NODE_ENV=development ENVIRONMENT=prod sh -c 'node dist/migrations.mjs && node dist/server.mjs'",
|
||||||
|
"email": "email dev --dir server/emails/templates --port 3005",
|
||||||
|
"build:cli": "node esbuild.mjs -e cli/index.ts -o dist/cli.mjs"
|
||||||
},
|
},
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
|
"@asteasolutions/zod-to-openapi": "^7.3.4",
|
||||||
"@hookform/resolvers": "3.9.1",
|
"@hookform/resolvers": "3.9.1",
|
||||||
"@node-rs/argon2": "2.0.2",
|
"@node-rs/argon2": "^2.0.2",
|
||||||
"@oslojs/crypto": "1.0.1",
|
"@oslojs/crypto": "1.0.1",
|
||||||
"@oslojs/encoding": "1.1.0",
|
"@oslojs/encoding": "1.1.0",
|
||||||
"@radix-ui/react-avatar": "1.1.2",
|
"@radix-ui/react-avatar": "1.1.10",
|
||||||
"@radix-ui/react-checkbox": "1.1.3",
|
"@radix-ui/react-checkbox": "1.3.2",
|
||||||
"@radix-ui/react-dialog": "1.1.4",
|
"@radix-ui/react-collapsible": "1.1.11",
|
||||||
"@radix-ui/react-dropdown-menu": "2.1.4",
|
"@radix-ui/react-dialog": "1.1.14",
|
||||||
|
"@radix-ui/react-dropdown-menu": "2.1.15",
|
||||||
"@radix-ui/react-icons": "1.3.2",
|
"@radix-ui/react-icons": "1.3.2",
|
||||||
"@radix-ui/react-label": "2.1.1",
|
"@radix-ui/react-label": "2.1.7",
|
||||||
"@radix-ui/react-popover": "1.1.4",
|
"@radix-ui/react-popover": "1.1.14",
|
||||||
"@radix-ui/react-radio-group": "1.2.2",
|
"@radix-ui/react-progress": "^1.1.7",
|
||||||
"@radix-ui/react-select": "2.1.4",
|
"@radix-ui/react-radio-group": "1.3.7",
|
||||||
"@radix-ui/react-separator": "1.1.1",
|
"@radix-ui/react-scroll-area": "^1.2.9",
|
||||||
"@radix-ui/react-slot": "1.1.1",
|
"@radix-ui/react-select": "2.2.5",
|
||||||
"@radix-ui/react-switch": "1.1.2",
|
"@radix-ui/react-separator": "1.1.7",
|
||||||
"@radix-ui/react-tabs": "1.1.2",
|
"@radix-ui/react-slot": "1.2.3",
|
||||||
"@radix-ui/react-toast": "1.2.4",
|
"@radix-ui/react-switch": "1.2.5",
|
||||||
"@react-email/components": "0.0.31",
|
"@radix-ui/react-tabs": "1.1.12",
|
||||||
"@react-email/tailwind": "1.0.4",
|
"@radix-ui/react-toast": "1.2.14",
|
||||||
"@tanstack/react-table": "8.20.6",
|
"@react-email/components": "0.1.0",
|
||||||
"axios": "1.7.9",
|
"@react-email/render": "^1.1.2",
|
||||||
|
"@react-email/tailwind": "1.0.5",
|
||||||
|
"@tailwindcss/forms": "^0.5.10",
|
||||||
|
"@tanstack/react-table": "8.21.3",
|
||||||
|
"arctic": "^3.7.0",
|
||||||
|
"axios": "1.10.0",
|
||||||
"better-sqlite3": "11.7.0",
|
"better-sqlite3": "11.7.0",
|
||||||
|
"canvas-confetti": "1.9.3",
|
||||||
"class-variance-authority": "0.7.1",
|
"class-variance-authority": "0.7.1",
|
||||||
"clsx": "2.1.1",
|
"clsx": "2.1.1",
|
||||||
"cmdk": "1.0.4",
|
"cmdk": "1.1.1",
|
||||||
|
"cookie": "^1.0.2",
|
||||||
"cookie-parser": "1.4.7",
|
"cookie-parser": "1.4.7",
|
||||||
|
"cookies": "^0.9.1",
|
||||||
"cors": "2.8.5",
|
"cors": "2.8.5",
|
||||||
"drizzle-orm": "0.38.3",
|
"crypto-js": "^4.2.0",
|
||||||
"emblor": "1.4.7",
|
"drizzle-orm": "0.44.2",
|
||||||
"eslint": "9.17.0",
|
"eslint": "9.29.0",
|
||||||
"eslint-config-next": "15.1.3",
|
"eslint-config-next": "15.3.4",
|
||||||
"express": "4.21.2",
|
"express": "4.21.2",
|
||||||
"express-rate-limit": "7.5.0",
|
"express-rate-limit": "7.5.1",
|
||||||
"glob": "11.0.0",
|
"glob": "11.0.3",
|
||||||
"helmet": "8.0.0",
|
"helmet": "8.1.0",
|
||||||
"http-errors": "2.0.0",
|
"http-errors": "2.0.0",
|
||||||
"input-otp": "1.4.1",
|
"i": "^0.3.7",
|
||||||
|
"input-otp": "1.4.2",
|
||||||
|
"jmespath": "^0.16.0",
|
||||||
"js-yaml": "4.1.0",
|
"js-yaml": "4.1.0",
|
||||||
"lucide-react": "0.469.0",
|
"jsonwebtoken": "^9.0.2",
|
||||||
|
"lucide-react": "0.522.0",
|
||||||
"moment": "2.30.1",
|
"moment": "2.30.1",
|
||||||
"next": "15.1.3",
|
"next": "15.3.4",
|
||||||
"next-themes": "0.4.4",
|
"next-intl": "^4.1.0",
|
||||||
|
"next-themes": "0.4.6",
|
||||||
|
"node-cache": "5.1.2",
|
||||||
"node-fetch": "3.3.2",
|
"node-fetch": "3.3.2",
|
||||||
"nodemailer": "6.9.16",
|
"nodemailer": "7.0.3",
|
||||||
|
"npm": "^11.4.2",
|
||||||
"oslo": "1.2.1",
|
"oslo": "1.2.1",
|
||||||
|
"pg": "^8.16.2",
|
||||||
"qrcode.react": "4.2.0",
|
"qrcode.react": "4.2.0",
|
||||||
"react": "19.0.0",
|
"react": "19.1.0",
|
||||||
"react-dom": "19.0.0",
|
"react-dom": "19.1.0",
|
||||||
"react-hook-form": "7.54.2",
|
"react-easy-sort": "^1.6.0",
|
||||||
|
"react-hook-form": "7.58.1",
|
||||||
|
"react-icons": "^5.5.0",
|
||||||
"rebuild": "0.1.2",
|
"rebuild": "0.1.2",
|
||||||
"semver": "7.6.3",
|
"semver": "7.7.2",
|
||||||
"tailwind-merge": "2.6.0",
|
"swagger-ui-express": "^5.0.1",
|
||||||
"tailwindcss-animate": "1.0.7",
|
"tailwind-merge": "3.3.1",
|
||||||
|
"tw-animate-css": "^1.3.3",
|
||||||
|
"uuid": "^11.1.0",
|
||||||
"vaul": "1.1.2",
|
"vaul": "1.1.2",
|
||||||
"winston": "3.17.0",
|
"winston": "3.17.0",
|
||||||
"winston-daily-rotate-file": "5.0.0",
|
"winston-daily-rotate-file": "5.0.0",
|
||||||
"ws": "8.18.0",
|
"ws": "8.18.2",
|
||||||
"zod": "3.24.1",
|
"zod": "3.25.67",
|
||||||
"zod-validation-error": "3.4.0"
|
"zod-validation-error": "3.5.2",
|
||||||
|
"yargs": "18.0.0"
|
||||||
},
|
},
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"@dotenvx/dotenvx": "1.32.0",
|
"@dotenvx/dotenvx": "1.45.1",
|
||||||
"@esbuild-plugins/tsconfig-paths": "0.1.2",
|
"@esbuild-plugins/tsconfig-paths": "0.1.2",
|
||||||
|
"@tailwindcss/postcss": "^4.1.10",
|
||||||
"@types/better-sqlite3": "7.6.12",
|
"@types/better-sqlite3": "7.6.12",
|
||||||
"@types/cookie-parser": "1.4.8",
|
"@types/cookie-parser": "1.4.9",
|
||||||
"@types/cors": "2.8.17",
|
"@types/cors": "2.8.19",
|
||||||
|
"@types/crypto-js": "^4.2.2",
|
||||||
"@types/express": "5.0.0",
|
"@types/express": "5.0.0",
|
||||||
|
"@types/jmespath": "^0.15.2",
|
||||||
"@types/js-yaml": "4.0.9",
|
"@types/js-yaml": "4.0.9",
|
||||||
"@types/node": "^22",
|
"@types/jsonwebtoken": "^9.0.10",
|
||||||
|
"@types/node": "^24",
|
||||||
"@types/nodemailer": "6.4.17",
|
"@types/nodemailer": "6.4.17",
|
||||||
"@types/react": "19.0.2",
|
"@types/react": "19.1.8",
|
||||||
"@types/react-dom": "19.0.2",
|
"@types/react-dom": "19.1.6",
|
||||||
"@types/semver": "7.5.8",
|
"@types/semver": "7.7.0",
|
||||||
"@types/ws": "8.5.13",
|
"@types/swagger-ui-express": "^4.1.8",
|
||||||
|
"@types/ws": "8.18.1",
|
||||||
"@types/yargs": "17.0.33",
|
"@types/yargs": "17.0.33",
|
||||||
"drizzle-kit": "0.30.1",
|
"drizzle-kit": "0.31.2",
|
||||||
"esbuild": "0.24.2",
|
"esbuild": "0.25.5",
|
||||||
"esbuild-node-externals": "1.16.0",
|
"esbuild-node-externals": "1.18.0",
|
||||||
"postcss": "^8",
|
"postcss": "^8",
|
||||||
"react-email": "3.0.4",
|
"react-email": "4.0.16",
|
||||||
"tailwindcss": "^3.4.17",
|
"tailwindcss": "^4.1.4",
|
||||||
"tsc-alias": "1.8.10",
|
"tsc-alias": "1.8.16",
|
||||||
"tsx": "4.19.2",
|
"tsx": "4.20.3",
|
||||||
"typescript": "^5",
|
"typescript": "^5",
|
||||||
"yargs": "17.7.2"
|
"typescript-eslint": "^8.35.0"
|
||||||
},
|
},
|
||||||
"overrides": {
|
"overrides": {
|
||||||
"emblor": {
|
"emblor": {
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
/** @type {import('postcss-load-config').Config} */
|
/** @type {import('postcss-load-config').Config} */
|
||||||
const config = {
|
const config = {
|
||||||
plugins: {
|
plugins: {
|
||||||
tailwindcss: {},
|
"@tailwindcss/postcss": {},
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|||||||
@@ -1,22 +1,21 @@
|
|||||||
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
|
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
|
||||||
|
<!-- Created with Inkscape (http://www.inkscape.org/) -->
|
||||||
|
|
||||||
<svg
|
<svg
|
||||||
|
width="900.82861"
|
||||||
|
height="955.20648"
|
||||||
|
viewBox="0 0 238.34422 252.7317"
|
||||||
version="1.1"
|
version="1.1"
|
||||||
x="0px"
|
id="svg420"
|
||||||
y="0px"
|
inkscape:export-filename="logo.svg"
|
||||||
viewBox="0 0 399.99999 400.00002"
|
inkscape:export-xdpi="221.14999"
|
||||||
enable-background="new 0 0 419.528 419.528"
|
inkscape:export-ydpi="221.14999"
|
||||||
xml:space="preserve"
|
|
||||||
id="svg52"
|
|
||||||
sodipodi:docname="noun-pangolin-1798092.svg"
|
|
||||||
width="400"
|
|
||||||
height="400"
|
|
||||||
inkscape:version="1.2.2 (b0a8486541, 2022-12-01)"
|
|
||||||
xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape"
|
xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape"
|
||||||
xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd"
|
xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd"
|
||||||
xmlns="http://www.w3.org/2000/svg"
|
xmlns="http://www.w3.org/2000/svg"
|
||||||
xmlns:svg="http://www.w3.org/2000/svg"><defs
|
xmlns:svg="http://www.w3.org/2000/svg">
|
||||||
id="defs56" /><sodipodi:namedview
|
<sodipodi:namedview
|
||||||
id="namedview54"
|
id="namedview422"
|
||||||
pagecolor="#ffffff"
|
pagecolor="#ffffff"
|
||||||
bordercolor="#666666"
|
bordercolor="#666666"
|
||||||
borderopacity="1.0"
|
borderopacity="1.0"
|
||||||
@@ -24,15 +23,18 @@
|
|||||||
inkscape:pageopacity="0.0"
|
inkscape:pageopacity="0.0"
|
||||||
inkscape:pagecheckerboard="0"
|
inkscape:pagecheckerboard="0"
|
||||||
inkscape:deskcolor="#d1d1d1"
|
inkscape:deskcolor="#d1d1d1"
|
||||||
showgrid="false"
|
inkscape:document-units="mm"
|
||||||
inkscape:zoom="1.9583914"
|
showgrid="false" />
|
||||||
inkscape:cx="209.86611"
|
<defs
|
||||||
inkscape:cy="262.20499"
|
id="defs417" />
|
||||||
inkscape:window-width="3840"
|
<g
|
||||||
inkscape:window-height="2136"
|
inkscape:label="Layer 1"
|
||||||
inkscape:window-x="0"
|
inkscape:groupmode="layer"
|
||||||
inkscape:window-y="0"
|
id="layer1"
|
||||||
inkscape:window-maximized="1"
|
transform="translate(-13.119542,-5.9258171)">
|
||||||
inkscape:current-layer="svg52" /><path
|
<path
|
||||||
d="m 62.232921,184.91974 c 0,2.431 -1.97,4.402 -4.399,4.402 -2.429,0 -4.399,-1.972 -4.399,-4.402 0,-2.429 1.97,-4.399 4.399,-4.399 2.429,-10e-4 4.399,1.97 4.399,4.399 z m 58.993999,-4.821 c -25.943999,-2.826 -38.978999,7.453 -71.181999,31.357 -27.572,20.467 -32.767,4.381 -31.748,-2.614 1.499,-10.282 25.222,-58.573 48.079,-88.461 28.273,7.34 49.869999,30.727 54.850999,59.718 z m -55.915999,4.821 c 0,-4.131 -3.349,-7.478 -7.478,-7.478 -4.129,0 -7.478,3.347 -7.478,7.478 0,4.131 3.349,7.481 7.478,7.481 4.13,0 7.478,-3.35 7.478,-7.481 z m -15.032,48.424 -0.234,14.041 20.413,22.687 -9.818,7.353 33.306,27.492 -11.759,8.124 42.631999,19.939 -10.825,9.747 48.291,8.078 -7.526,10.307 48.758,-4.531 -3.997,11.725 53.916,-18.153 -2.76,13.357 48.077,-34.345 1.479,13.562 34.087,-48.576 7.478,14.206 15.187,-58.89 10.391,8.533 -2.14,-57.884 13.814,5.13 -21.082,-51.204 13.404,0.048 -33.696,-42.131 15.312,-1.366 -47.026,-32.831002 14.255,-8.399 -54.817,-14.682 9.257,-11.695 -49.625,0.352 0.6,-13.337 -38.537,14.084 -1.597,-12.689 -29.984,21.429 -6.446,-10.852 -22.59,26.504 -7.021,-9.572 -18.923,30.294 -9.595999,-8.744 -16.754,30.138002 c 31.509999,10.197 54.979999,37.951 59.126999,71.547 0.404,0.087 -22.37,31.257 10.955,57.85 -0.576,-2.985 -6.113,-53.902 47.496,-57.61 26.668,-1.844 48.4,21.666 48.4,48.399 0,8.184 -2.05,15.883 -5.636,22.64 -15.927,29.611 -64.858,30.755 -80.429,30.596 -45.154,-0.459 -104.051999,-51.521 -104.051999,-51.521 z"
|
d="m 213.66176,90.072122 c 4.95655,0 8.97383,4.018046 8.97383,8.973827 0,4.956581 -4.01728,8.974621 -8.97383,8.974621 -4.95657,0 -8.97462,-4.01804 -8.97462,-8.974621 0,-4.955781 4.01805,-8.973827 8.97462,-8.973827 z m 35.2316,37.450998 c -0.90048,29.80928 -23.66033,69.21262 -54.51292,79.34466 -36.04206,11.836 -63.40991,-5.92226 -72.08409,-26.74061 -6.75754,-16.21966 -1.65117,-35.62363 10.96266,-43.83669 10.6506,-6.93533 30.48543,-8.76736 47.15454,2.19144 -5.85627,-15.34246 -21.62491,-25.4256 -35.59101,-28.49424 -13.96613,-3.06867 -28.38324,0.43858 -38.74504,5.69946 13.29071,-14.68572 44.40801,-28.946049 78.24077,-10.95958 22.67676,12.05491 32.43775,28.93208 42.0489,51.72763 C 251.59637,117.87858 234.026,71.411066 203.39074,43.794029 172.15544,15.636686 129.95516,4.340214 97.668803,6.103155 108.32483,12.678273 120.84625,22.06586 132.41209,33.053363 81.298533,26.697169 39.174705,38.314245 13.119542,73.749217 27.67508,70.878527 46.868833,69.073666 65.974711,70.016861 28.737658,96.252107 7.1124298,140.38147 18.105298,186.43137 c 6.718497,-11.74129 16.767711,-25.84558 28.726275,-38.62863 -3.677175,34.36994 1.42836,80.83745 45.62293,110.85478 -2.25587,-9.42394 -4.08014,-20.88443 -4.91466,-33.0154 20.673197,16.1282 50.685067,29.42205 87.917917,20.24096 65.77679,-16.21975 83.34719,-79.78335 73.4356,-118.35996"
|
||||||
id="path46" /></svg>
|
style="fill:#000000;fill-opacity:1;fill-rule:nonzero;stroke:none;stroke-width:0.0776283"
|
||||||
|
id="path32" />
|
||||||
|
</g>
|
||||||
|
</svg>
|
||||||
|
|||||||
|
Before Width: | Height: | Size: 2.6 KiB After Width: | Height: | Size: 2.5 KiB |
@@ -1,39 +1,22 @@
|
|||||||
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
|
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
|
||||||
|
<!-- Created with Inkscape (http://www.inkscape.org/) -->
|
||||||
|
|
||||||
<svg
|
<svg
|
||||||
|
width="900.82861"
|
||||||
|
height="955.20648"
|
||||||
|
viewBox="0 0 238.34422 252.7317"
|
||||||
version="1.1"
|
version="1.1"
|
||||||
x="0px"
|
id="svg420"
|
||||||
y="0px"
|
|
||||||
viewBox="0 0 399.99999 400.00002"
|
|
||||||
enable-background="new 0 0 419.528 419.528"
|
|
||||||
xml:space="preserve"
|
|
||||||
id="svg52"
|
|
||||||
sodipodi:docname="pangolin_orange.svg"
|
|
||||||
width="400"
|
|
||||||
height="400"
|
|
||||||
inkscape:version="1.2.2 (b0a8486541, 2022-12-01)"
|
|
||||||
xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape"
|
|
||||||
xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd"
|
|
||||||
xmlns="http://www.w3.org/2000/svg"
|
xmlns="http://www.w3.org/2000/svg"
|
||||||
xmlns:svg="http://www.w3.org/2000/svg"><defs
|
xmlns:svg="http://www.w3.org/2000/svg">
|
||||||
id="defs56" /><sodipodi:namedview
|
<defs
|
||||||
id="namedview54"
|
id="defs417" />
|
||||||
pagecolor="#ffffff"
|
<g
|
||||||
bordercolor="#666666"
|
id="layer1"
|
||||||
borderopacity="1.0"
|
transform="translate(-13.119542,-5.9258171)">
|
||||||
inkscape:showpageshadow="2"
|
<path
|
||||||
inkscape:pageopacity="0.0"
|
d="m 213.66176,90.072122 c 4.95655,0 8.97383,4.018046 8.97383,8.973827 0,4.956581 -4.01728,8.974621 -8.97383,8.974621 -4.95657,0 -8.97462,-4.01804 -8.97462,-8.974621 0,-4.955781 4.01805,-8.973827 8.97462,-8.973827 z m 35.2316,37.450998 c -0.90048,29.80928 -23.66033,69.21262 -54.51292,79.34466 -36.04206,11.836 -63.40991,-5.92226 -72.08409,-26.74061 -6.75754,-16.21966 -1.65117,-35.62363 10.96266,-43.83669 10.6506,-6.93533 30.48543,-8.76736 47.15454,2.19144 -5.85627,-15.34246 -21.62491,-25.4256 -35.59101,-28.49424 -13.96613,-3.06867 -28.38324,0.43858 -38.74504,5.69946 13.29071,-14.68572 44.40801,-28.946049 78.24077,-10.95958 22.67676,12.05491 32.43775,28.93208 42.0489,51.72763 C 251.59637,117.87858 234.026,71.411066 203.39074,43.794029 172.15544,15.636686 129.95516,4.340214 97.668803,6.103155 108.32483,12.678273 120.84625,22.06586 132.41209,33.053363 81.298533,26.697169 39.174705,38.314245 13.119542,73.749217 27.67508,70.878527 46.868833,69.073666 65.974711,70.016861 28.737658,96.252107 7.1124298,140.38147 18.105298,186.43137 c 6.718497,-11.74129 16.767711,-25.84558 28.726275,-38.62863 -3.677175,34.36994 1.42836,80.83745 45.62293,110.85478 -2.25587,-9.42394 -4.08014,-20.88443 -4.91466,-33.0154 20.673197,16.1282 50.685067,29.42205 87.917917,20.24096 65.77679,-16.21975 83.34719,-79.78335 73.4356,-118.35996"
|
||||||
inkscape:pagecheckerboard="0"
|
style="fill:#f36118;fill-opacity:1;fill-rule:nonzero;stroke:none;stroke-width:0.0776283"
|
||||||
inkscape:deskcolor="#d1d1d1"
|
id="path32" />
|
||||||
showgrid="false"
|
</g>
|
||||||
inkscape:zoom="1.9583914"
|
</svg>
|
||||||
inkscape:cx="127.40048"
|
|
||||||
inkscape:cy="262.71561"
|
|
||||||
inkscape:window-width="1436"
|
|
||||||
inkscape:window-height="1236"
|
|
||||||
inkscape:window-x="2208"
|
|
||||||
inkscape:window-y="511"
|
|
||||||
inkscape:window-maximized="0"
|
|
||||||
inkscape:current-layer="svg52" /><path
|
|
||||||
d="m 62.232921,184.91974 c 0,2.431 -1.97,4.402 -4.399,4.402 -2.429,0 -4.399,-1.972 -4.399,-4.402 0,-2.429 1.97,-4.399 4.399,-4.399 2.429,-10e-4 4.399,1.97 4.399,4.399 z m 58.993999,-4.821 c -25.943999,-2.826 -38.978999,7.453 -71.181999,31.357 -27.572,20.467 -32.767,4.381 -31.748,-2.614 1.499,-10.282 25.222,-58.573 48.079,-88.461 28.273,7.34 49.869999,30.727 54.850999,59.718 z m -55.915999,4.821 c 0,-4.131 -3.349,-7.478 -7.478,-7.478 -4.129,0 -7.478,3.347 -7.478,7.478 0,4.131 3.349,7.481 7.478,7.481 4.13,0 7.478,-3.35 7.478,-7.481 z m -15.032,48.424 -0.234,14.041 20.413,22.687 -9.818,7.353 33.306,27.492 -11.759,8.124 42.631999,19.939 -10.825,9.747 48.291,8.078 -7.526,10.307 48.758,-4.531 -3.997,11.725 53.916,-18.153 -2.76,13.357 48.077,-34.345 1.479,13.562 34.087,-48.576 7.478,14.206 15.187,-58.89 10.391,8.533 -2.14,-57.884 13.814,5.13 -21.082,-51.204 13.404,0.048 -33.696,-42.131 15.312,-1.366 -47.026,-32.831002 14.255,-8.399 -54.817,-14.682 9.257,-11.695 -49.625,0.352 0.6,-13.337 -38.537,14.084 -1.597,-12.689 -29.984,21.429 -6.446,-10.852 -22.59,26.504 -7.021,-9.572 -18.923,30.294 -9.595999,-8.744 -16.754,30.138002 c 31.509999,10.197 54.979999,37.951 59.126999,71.547 0.404,0.087 -22.37,31.257 10.955,57.85 -0.576,-2.985 -6.113,-53.902 47.496,-57.61 26.668,-1.844 48.4,21.666 48.4,48.399 0,8.184 -2.05,15.883 -5.636,22.64 -15.927,29.611 -64.858,30.755 -80.429,30.596 -45.154,-0.459 -104.051999,-51.521 -104.051999,-51.521 z"
|
|
||||||
id="path46"
|
|
||||||
style="fill:#f97315;fill-opacity:1" /></svg>
|
|
||||||
|
|||||||
|
Before Width: | Height: | Size: 2.6 KiB After Width: | Height: | Size: 1.8 KiB |
BIN
public/logo/pangolin_orange_192x192.png
Normal file
|
After Width: | Height: | Size: 11 KiB |
BIN
public/logo/pangolin_orange_512x512.png
Normal file
|
After Width: | Height: | Size: 24 KiB |
BIN
public/logo/pangolin_orange_96x96.png
Normal file
|
After Width: | Height: | Size: 7.4 KiB |
BIN
public/logo/pangolin_profile_picture.png
Normal file
|
After Width: | Height: | Size: 24 KiB |
BIN
public/logo/word_mark.png
Normal file
|
After Width: | Height: | Size: 36 KiB |
BIN
public/logo/word_mark_black.png
Normal file
|
After Width: | Height: | Size: 34 KiB |
BIN
public/logo/word_mark_white.png
Normal file
|
After Width: | Height: | Size: 33 KiB |
|
Before Width: | Height: | Size: 577 KiB |
BIN
public/screenshots/collage.png
Normal file
|
After Width: | Height: | Size: 574 KiB |
|
Before Width: | Height: | Size: 447 KiB |
BIN
public/screenshots/hero.png
Normal file
|
After Width: | Height: | Size: 434 KiB |
|
Before Width: | Height: | Size: 484 KiB |
|
Before Width: | Height: | Size: 438 KiB |
|
Before Width: | Height: | Size: 415 KiB |
@@ -14,29 +14,39 @@ import { logIncomingMiddleware } from "./middlewares/logIncoming";
|
|||||||
import { csrfProtectionMiddleware } from "./middlewares/csrfProtection";
|
import { csrfProtectionMiddleware } from "./middlewares/csrfProtection";
|
||||||
import helmet from "helmet";
|
import helmet from "helmet";
|
||||||
|
|
||||||
const dev = process.env.ENVIRONMENT !== "prod";
|
const dev = config.isDev;
|
||||||
const externalPort = config.getRawConfig().server.external_port;
|
const externalPort = config.getRawConfig().server.external_port;
|
||||||
|
|
||||||
export function createApiServer() {
|
export function createApiServer() {
|
||||||
const apiServer = express();
|
const apiServer = express();
|
||||||
|
|
||||||
// Middleware setup
|
const trustProxy = config.getRawConfig().server.trust_proxy;
|
||||||
apiServer.set("trust proxy", 1);
|
if (trustProxy) {
|
||||||
if (dev) {
|
apiServer.set("trust proxy", trustProxy);
|
||||||
apiServer.use(
|
}
|
||||||
cors({
|
|
||||||
origin: `http://localhost:${config.getRawConfig().server.next_port}`,
|
|
||||||
credentials: true
|
|
||||||
})
|
|
||||||
);
|
|
||||||
} else {
|
|
||||||
const corsOptions = {
|
|
||||||
origin: config.getRawConfig().app.base_url,
|
|
||||||
methods: ["GET", "POST", "PUT", "DELETE", "PATCH"],
|
|
||||||
allowedHeaders: ["Content-Type", "X-CSRF-Token"]
|
|
||||||
};
|
|
||||||
|
|
||||||
apiServer.use(cors(corsOptions));
|
const corsConfig = config.getRawConfig().server.cors;
|
||||||
|
|
||||||
|
const options = {
|
||||||
|
...(corsConfig?.origins
|
||||||
|
? { origin: corsConfig.origins }
|
||||||
|
: {
|
||||||
|
origin: (origin: any, callback: any) => {
|
||||||
|
callback(null, true);
|
||||||
|
}
|
||||||
|
}),
|
||||||
|
...(corsConfig?.methods && { methods: corsConfig.methods }),
|
||||||
|
...(corsConfig?.allowed_headers && {
|
||||||
|
allowedHeaders: corsConfig.allowed_headers
|
||||||
|
}),
|
||||||
|
credentials: !(corsConfig?.credentials === false)
|
||||||
|
};
|
||||||
|
|
||||||
|
logger.debug("Using CORS options", options);
|
||||||
|
|
||||||
|
apiServer.use(cors(options));
|
||||||
|
|
||||||
|
if (!dev) {
|
||||||
apiServer.use(helmet());
|
apiServer.use(helmet());
|
||||||
apiServer.use(csrfProtectionMiddleware);
|
apiServer.use(csrfProtectionMiddleware);
|
||||||
}
|
}
|
||||||
@@ -47,7 +57,8 @@ export function createApiServer() {
|
|||||||
if (!dev) {
|
if (!dev) {
|
||||||
apiServer.use(
|
apiServer.use(
|
||||||
rateLimitMiddleware({
|
rateLimitMiddleware({
|
||||||
windowMin: config.getRawConfig().rate_limits.global.window_minutes,
|
windowMin:
|
||||||
|
config.getRawConfig().rate_limits.global.window_minutes,
|
||||||
max: config.getRawConfig().rate_limits.global.max_requests,
|
max: config.getRawConfig().rate_limits.global.max_requests,
|
||||||
type: "IP_AND_PATH"
|
type: "IP_AND_PATH"
|
||||||
})
|
})
|
||||||
|
|||||||
@@ -1,11 +1,14 @@
|
|||||||
import { Request } from "express";
|
import { Request } from "express";
|
||||||
import { db } from "@server/db";
|
import { db } from "@server/db";
|
||||||
import { userActions, roleActions, userOrgs } from "@server/db/schema";
|
import { userActions, roleActions, userOrgs } from "@server/db";
|
||||||
import { and, eq } from "drizzle-orm";
|
import { and, eq } from "drizzle-orm";
|
||||||
import createHttpError from "http-errors";
|
import createHttpError from "http-errors";
|
||||||
import HttpCode from "@server/types/HttpCode";
|
import HttpCode from "@server/types/HttpCode";
|
||||||
|
|
||||||
export enum ActionsEnum {
|
export enum ActionsEnum {
|
||||||
|
createOrgUser = "createOrgUser",
|
||||||
|
listOrgs = "listOrgs",
|
||||||
|
listUserOrgs = "listUserOrgs",
|
||||||
createOrg = "createOrg",
|
createOrg = "createOrg",
|
||||||
// deleteOrg = "deleteOrg",
|
// deleteOrg = "deleteOrg",
|
||||||
getOrg = "getOrg",
|
getOrg = "getOrg",
|
||||||
@@ -32,6 +35,8 @@ export enum ActionsEnum {
|
|||||||
listRoles = "listRoles",
|
listRoles = "listRoles",
|
||||||
updateRole = "updateRole",
|
updateRole = "updateRole",
|
||||||
inviteUser = "inviteUser",
|
inviteUser = "inviteUser",
|
||||||
|
listInvitations = "listInvitations",
|
||||||
|
removeInvitation = "removeInvitation",
|
||||||
removeUser = "removeUser",
|
removeUser = "removeUser",
|
||||||
listUsers = "listUsers",
|
listUsers = "listUsers",
|
||||||
listSiteRoles = "listSiteRoles",
|
listSiteRoles = "listSiteRoles",
|
||||||
@@ -51,13 +56,36 @@ export enum ActionsEnum {
|
|||||||
// removeUserAction = "removeUserAction",
|
// removeUserAction = "removeUserAction",
|
||||||
// removeUserSite = "removeUserSite",
|
// removeUserSite = "removeUserSite",
|
||||||
getOrgUser = "getOrgUser",
|
getOrgUser = "getOrgUser",
|
||||||
"setResourcePassword" = "setResourcePassword",
|
setResourcePassword = "setResourcePassword",
|
||||||
"setResourcePincode" = "setResourcePincode",
|
setResourcePincode = "setResourcePincode",
|
||||||
"setResourceWhitelist" = "setResourceWhitelist",
|
setResourceWhitelist = "setResourceWhitelist",
|
||||||
"getResourceWhitelist" = "getResourceWhitelist",
|
getResourceWhitelist = "getResourceWhitelist",
|
||||||
"generateAccessToken" = "generateAccessToken",
|
generateAccessToken = "generateAccessToken",
|
||||||
"deleteAcessToken" = "deleteAcessToken",
|
deleteAcessToken = "deleteAcessToken",
|
||||||
"listAccessTokens" = "listAccessTokens"
|
listAccessTokens = "listAccessTokens",
|
||||||
|
createResourceRule = "createResourceRule",
|
||||||
|
deleteResourceRule = "deleteResourceRule",
|
||||||
|
listResourceRules = "listResourceRules",
|
||||||
|
updateResourceRule = "updateResourceRule",
|
||||||
|
listOrgDomains = "listOrgDomains",
|
||||||
|
createNewt = "createNewt",
|
||||||
|
createIdp = "createIdp",
|
||||||
|
updateIdp = "updateIdp",
|
||||||
|
deleteIdp = "deleteIdp",
|
||||||
|
listIdps = "listIdps",
|
||||||
|
getIdp = "getIdp",
|
||||||
|
createIdpOrg = "createIdpOrg",
|
||||||
|
deleteIdpOrg = "deleteIdpOrg",
|
||||||
|
listIdpOrgs = "listIdpOrgs",
|
||||||
|
updateIdpOrg = "updateIdpOrg",
|
||||||
|
checkOrgId = "checkOrgId",
|
||||||
|
createApiKey = "createApiKey",
|
||||||
|
deleteApiKey = "deleteApiKey",
|
||||||
|
setApiKeyActions = "setApiKeyActions",
|
||||||
|
setApiKeyOrgs = "setApiKeyOrgs",
|
||||||
|
listApiKeyActions = "listApiKeyActions",
|
||||||
|
listApiKeys = "listApiKeys",
|
||||||
|
getApiKey = "getApiKey"
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function checkUserActionPermission(
|
export async function checkUserActionPermission(
|
||||||
|
|||||||
45
server/auth/canUserAccessResource.ts
Normal file
@@ -0,0 +1,45 @@
|
|||||||
|
import { db } from "@server/db";
|
||||||
|
import { and, eq } from "drizzle-orm";
|
||||||
|
import { roleResources, userResources } from "@server/db";
|
||||||
|
|
||||||
|
export async function canUserAccessResource({
|
||||||
|
userId,
|
||||||
|
resourceId,
|
||||||
|
roleId
|
||||||
|
}: {
|
||||||
|
userId: string;
|
||||||
|
resourceId: number;
|
||||||
|
roleId: number;
|
||||||
|
}): Promise<boolean> {
|
||||||
|
const roleResourceAccess = await db
|
||||||
|
.select()
|
||||||
|
.from(roleResources)
|
||||||
|
.where(
|
||||||
|
and(
|
||||||
|
eq(roleResources.resourceId, resourceId),
|
||||||
|
eq(roleResources.roleId, roleId)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
.limit(1);
|
||||||
|
|
||||||
|
if (roleResourceAccess.length > 0) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
const userResourceAccess = await db
|
||||||
|
.select()
|
||||||
|
.from(userResources)
|
||||||
|
.where(
|
||||||
|
and(
|
||||||
|
eq(userResources.userId, userId),
|
||||||
|
eq(userResources.resourceId, resourceId)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
.limit(1);
|
||||||
|
|
||||||
|
if (userResourceAccess.length > 0) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
return false;
|
||||||
|
}
|
||||||
@@ -1,5 +1,5 @@
|
|||||||
import db from "@server/db";
|
import { db } from "@server/db";
|
||||||
import { UserInvite, userInvites } from "@server/db/schema";
|
import { UserInvite, userInvites } from "@server/db";
|
||||||
import { isWithinExpirationDate } from "oslo";
|
import { isWithinExpirationDate } from "oslo";
|
||||||
import { verifyPassword } from "./password";
|
import { verifyPassword } from "./password";
|
||||||
import { eq } from "drizzle-orm";
|
import { eq } from "drizzle-orm";
|
||||||
|
|||||||
@@ -1,118 +0,0 @@
|
|||||||
import {
|
|
||||||
encodeBase32LowerCaseNoPadding,
|
|
||||||
encodeHexLowerCase,
|
|
||||||
} from "@oslojs/encoding";
|
|
||||||
import { sha256 } from "@oslojs/crypto/sha2";
|
|
||||||
import { Session, sessions, User, users } from "@server/db/schema";
|
|
||||||
import db from "@server/db";
|
|
||||||
import { eq } from "drizzle-orm";
|
|
||||||
import config from "@server/lib/config";
|
|
||||||
import type { RandomReader } from "@oslojs/crypto/random";
|
|
||||||
import { generateRandomString } from "@oslojs/crypto/random";
|
|
||||||
|
|
||||||
export const SESSION_COOKIE_NAME = config.getRawConfig().server.session_cookie_name;
|
|
||||||
export const SESSION_COOKIE_EXPIRES = 1000 * 60 * 60 * 24 * 30;
|
|
||||||
export const SECURE_COOKIES = config.getRawConfig().server.secure_cookies;
|
|
||||||
export const COOKIE_DOMAIN = "." + config.getBaseDomain();
|
|
||||||
|
|
||||||
export function generateSessionToken(): string {
|
|
||||||
const bytes = new Uint8Array(20);
|
|
||||||
crypto.getRandomValues(bytes);
|
|
||||||
const token = encodeBase32LowerCaseNoPadding(bytes);
|
|
||||||
return token;
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function createSession(
|
|
||||||
token: string,
|
|
||||||
userId: string,
|
|
||||||
): Promise<Session> {
|
|
||||||
const sessionId = encodeHexLowerCase(
|
|
||||||
sha256(new TextEncoder().encode(token)),
|
|
||||||
);
|
|
||||||
const session: Session = {
|
|
||||||
sessionId: sessionId,
|
|
||||||
userId,
|
|
||||||
expiresAt: new Date(Date.now() + SESSION_COOKIE_EXPIRES).getTime(),
|
|
||||||
};
|
|
||||||
await db.insert(sessions).values(session);
|
|
||||||
return session;
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function validateSessionToken(
|
|
||||||
token: string,
|
|
||||||
): Promise<SessionValidationResult> {
|
|
||||||
const sessionId = encodeHexLowerCase(
|
|
||||||
sha256(new TextEncoder().encode(token)),
|
|
||||||
);
|
|
||||||
const result = await db
|
|
||||||
.select({ user: users, session: sessions })
|
|
||||||
.from(sessions)
|
|
||||||
.innerJoin(users, eq(sessions.userId, users.userId))
|
|
||||||
.where(eq(sessions.sessionId, sessionId));
|
|
||||||
if (result.length < 1) {
|
|
||||||
return { session: null, user: null };
|
|
||||||
}
|
|
||||||
const { user, session } = result[0];
|
|
||||||
if (Date.now() >= session.expiresAt) {
|
|
||||||
await db
|
|
||||||
.delete(sessions)
|
|
||||||
.where(eq(sessions.sessionId, session.sessionId));
|
|
||||||
return { session: null, user: null };
|
|
||||||
}
|
|
||||||
if (Date.now() >= session.expiresAt - SESSION_COOKIE_EXPIRES / 2) {
|
|
||||||
session.expiresAt = new Date(
|
|
||||||
Date.now() + SESSION_COOKIE_EXPIRES,
|
|
||||||
).getTime();
|
|
||||||
await db
|
|
||||||
.update(sessions)
|
|
||||||
.set({
|
|
||||||
expiresAt: session.expiresAt,
|
|
||||||
})
|
|
||||||
.where(eq(sessions.sessionId, session.sessionId));
|
|
||||||
}
|
|
||||||
return { session, user };
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function invalidateSession(sessionId: string): Promise<void> {
|
|
||||||
await db.delete(sessions).where(eq(sessions.sessionId, sessionId));
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function invalidateAllSessions(userId: string): Promise<void> {
|
|
||||||
await db.delete(sessions).where(eq(sessions.userId, userId));
|
|
||||||
}
|
|
||||||
|
|
||||||
export function serializeSessionCookie(token: string): string {
|
|
||||||
if (SECURE_COOKIES) {
|
|
||||||
return `${SESSION_COOKIE_NAME}=${token}; HttpOnly; SameSite=Strict; Max-Age=${SESSION_COOKIE_EXPIRES}; Path=/; Secure; Domain=${COOKIE_DOMAIN}`;
|
|
||||||
} else {
|
|
||||||
return `${SESSION_COOKIE_NAME}=${token}; HttpOnly; SameSite=Strict; Max-Age=${SESSION_COOKIE_EXPIRES}; Path=/; Domain=${COOKIE_DOMAIN}`;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
export function createBlankSessionTokenCookie(): string {
|
|
||||||
if (SECURE_COOKIES) {
|
|
||||||
return `${SESSION_COOKIE_NAME}=; HttpOnly; SameSite=Strict; Max-Age=0; Path=/; Secure; Domain=${COOKIE_DOMAIN}`;
|
|
||||||
} else {
|
|
||||||
return `${SESSION_COOKIE_NAME}=; HttpOnly; SameSite=Strict; Max-Age=0; Path=/; Domain=${COOKIE_DOMAIN}`;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
const random: RandomReader = {
|
|
||||||
read(bytes: Uint8Array): void {
|
|
||||||
crypto.getRandomValues(bytes);
|
|
||||||
},
|
|
||||||
};
|
|
||||||
|
|
||||||
export function generateId(length: number): string {
|
|
||||||
const alphabet = "abcdefghijklmnopqrstuvwxyz0123456789";
|
|
||||||
return generateRandomString(random, alphabet, length);
|
|
||||||
}
|
|
||||||
|
|
||||||
export function generateIdFromEntropySize(size: number): string {
|
|
||||||
const buffer = crypto.getRandomValues(new Uint8Array(size));
|
|
||||||
return encodeBase32LowerCaseNoPadding(buffer);
|
|
||||||
}
|
|
||||||
|
|
||||||
export type SessionValidationResult =
|
|
||||||
| { session: Session; user: User }
|
|
||||||
| { session: null; user: null };
|
|
||||||
@@ -1,5 +1,5 @@
|
|||||||
import { db } from '@server/db';
|
import { db } from '@server/db';
|
||||||
import { limitsTable } from '@server/db/schema';
|
import { limitsTable } from '@server/db';
|
||||||
import { and, eq } from 'drizzle-orm';
|
import { and, eq } from 'drizzle-orm';
|
||||||
import createHttpError from 'http-errors';
|
import createHttpError from 'http-errors';
|
||||||
import HttpCode from '@server/types/HttpCode';
|
import HttpCode from '@server/types/HttpCode';
|
||||||
@@ -37,4 +37,4 @@ export async function checkOrgLimit({ orgId, limitName, currentValue, increment
|
|||||||
}
|
}
|
||||||
throw createHttpError(HttpCode.INTERNAL_SERVER_ERROR, 'Unknown error occurred while checking limit');
|
throw createHttpError(HttpCode.INTERNAL_SERVER_ERROR, 'Unknown error occurred while checking limit');
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -3,8 +3,8 @@ import z from "zod";
|
|||||||
export const passwordSchema = z
|
export const passwordSchema = z
|
||||||
.string()
|
.string()
|
||||||
.min(8, { message: "Password must be at least 8 characters long" })
|
.min(8, { message: "Password must be at least 8 characters long" })
|
||||||
.max(64, { message: "Password must be at most 64 characters long" })
|
.max(128, { message: "Password must be at most 128 characters long" })
|
||||||
.regex(/^(?=.*?[A-Z])(?=.*?[a-z])(?=.*?[0-9])(?=.*?[#?!@$%^&*-]).*$/, {
|
.regex(/^(?=.*?[A-Z])(?=.*?[a-z])(?=.*?[0-9])(?=.*?[~!`@#$%^&*()_\-+={}[\]|\\:;"'<>,.\/?]).*$/, {
|
||||||
message: `Your password must meet the following conditions:
|
message: `Your password must meet the following conditions:
|
||||||
at least one uppercase English letter,
|
at least one uppercase English letter,
|
||||||
at least one lowercase English letter,
|
at least one lowercase English letter,
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
import db from "@server/db";
|
import { db } from "@server/db";
|
||||||
import { resourceOtp } from "@server/db/schema";
|
import { resourceOtp } from "@server/db";
|
||||||
import { and, eq } from "drizzle-orm";
|
import { and, eq } from "drizzle-orm";
|
||||||
import { createDate, isWithinExpirationDate, TimeSpan } from "oslo";
|
import { createDate, isWithinExpirationDate, TimeSpan } from "oslo";
|
||||||
import { alphabet, generateRandomString, sha256 } from "oslo/crypto";
|
import { alphabet, generateRandomString, sha256 } from "oslo/crypto";
|
||||||
@@ -26,7 +26,7 @@ export async function sendResourceOtpEmail(
|
|||||||
}),
|
}),
|
||||||
{
|
{
|
||||||
to: email,
|
to: email,
|
||||||
from: config.getRawConfig().email?.no_reply,
|
from: config.getNoReplyEmail(),
|
||||||
subject: `Your one-time code to access ${resourceName}`
|
subject: `Your one-time code to access ${resourceName}`
|
||||||
}
|
}
|
||||||
);
|
);
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
import { TimeSpan, createDate } from "oslo";
|
import { TimeSpan, createDate } from "oslo";
|
||||||
import { generateRandomString, alphabet } from "oslo/crypto";
|
import { generateRandomString, alphabet } from "oslo/crypto";
|
||||||
import db from "@server/db";
|
import { db } from "@server/db";
|
||||||
import { users, emailVerificationCodes } from "@server/db/schema";
|
import { users, emailVerificationCodes } from "@server/db";
|
||||||
import { eq } from "drizzle-orm";
|
import { eq } from "drizzle-orm";
|
||||||
import { sendEmail } from "@server/emails";
|
import { sendEmail } from "@server/emails";
|
||||||
import config from "@server/lib/config";
|
import config from "@server/lib/config";
|
||||||
@@ -17,11 +17,11 @@ export async function sendEmailVerificationCode(
|
|||||||
VerifyEmail({
|
VerifyEmail({
|
||||||
username: email,
|
username: email,
|
||||||
verificationCode: code,
|
verificationCode: code,
|
||||||
verifyLink: `${config.getRawConfig().app.base_url}/auth/verify-email`
|
verifyLink: `${config.getRawConfig().app.dashboard_url}/auth/verify-email`
|
||||||
}),
|
}),
|
||||||
{
|
{
|
||||||
to: email,
|
to: email,
|
||||||
from: config.getRawConfig().email?.no_reply,
|
from: config.getNoReplyEmail(),
|
||||||
subject: "Verify your email address"
|
subject: "Verify your email address"
|
||||||
}
|
}
|
||||||
);
|
);
|
||||||
|
|||||||
@@ -1,19 +1,31 @@
|
|||||||
import {
|
import {
|
||||||
encodeBase32LowerCaseNoPadding,
|
encodeBase32LowerCaseNoPadding,
|
||||||
encodeHexLowerCase,
|
encodeHexLowerCase
|
||||||
} from "@oslojs/encoding";
|
} from "@oslojs/encoding";
|
||||||
import { sha256 } from "@oslojs/crypto/sha2";
|
import { sha256 } from "@oslojs/crypto/sha2";
|
||||||
import { Session, sessions, User, users } from "@server/db/schema";
|
import {
|
||||||
import db from "@server/db";
|
resourceSessions,
|
||||||
import { eq } from "drizzle-orm";
|
Session,
|
||||||
|
sessions,
|
||||||
|
User,
|
||||||
|
users
|
||||||
|
} from "@server/db";
|
||||||
|
import { db } from "@server/db";
|
||||||
|
import { eq, inArray } from "drizzle-orm";
|
||||||
import config from "@server/lib/config";
|
import config from "@server/lib/config";
|
||||||
import type { RandomReader } from "@oslojs/crypto/random";
|
import type { RandomReader } from "@oslojs/crypto/random";
|
||||||
import { generateRandomString } from "@oslojs/crypto/random";
|
import { generateRandomString } from "@oslojs/crypto/random";
|
||||||
|
import logger from "@server/logger";
|
||||||
|
|
||||||
export const SESSION_COOKIE_NAME = config.getRawConfig().server.session_cookie_name;
|
export const SESSION_COOKIE_NAME =
|
||||||
export const SESSION_COOKIE_EXPIRES = 1000 * 60 * 60 * 24 * 30;
|
config.getRawConfig().server.session_cookie_name;
|
||||||
export const SECURE_COOKIES = config.getRawConfig().server.secure_cookies;
|
export const SESSION_COOKIE_EXPIRES =
|
||||||
export const COOKIE_DOMAIN = "." + config.getBaseDomain();
|
1000 *
|
||||||
|
60 *
|
||||||
|
60 *
|
||||||
|
config.getRawConfig().server.dashboard_session_length_hours;
|
||||||
|
export const COOKIE_DOMAIN =
|
||||||
|
"." + new URL(config.getRawConfig().app.dashboard_url).hostname;
|
||||||
|
|
||||||
export function generateSessionToken(): string {
|
export function generateSessionToken(): string {
|
||||||
const bytes = new Uint8Array(20);
|
const bytes = new Uint8Array(20);
|
||||||
@@ -24,25 +36,25 @@ export function generateSessionToken(): string {
|
|||||||
|
|
||||||
export async function createSession(
|
export async function createSession(
|
||||||
token: string,
|
token: string,
|
||||||
userId: string,
|
userId: string
|
||||||
): Promise<Session> {
|
): Promise<Session> {
|
||||||
const sessionId = encodeHexLowerCase(
|
const sessionId = encodeHexLowerCase(
|
||||||
sha256(new TextEncoder().encode(token)),
|
sha256(new TextEncoder().encode(token))
|
||||||
);
|
);
|
||||||
const session: Session = {
|
const session: Session = {
|
||||||
sessionId: sessionId,
|
sessionId: sessionId,
|
||||||
userId,
|
userId,
|
||||||
expiresAt: new Date(Date.now() + SESSION_COOKIE_EXPIRES).getTime(),
|
expiresAt: new Date(Date.now() + SESSION_COOKIE_EXPIRES).getTime()
|
||||||
};
|
};
|
||||||
await db.insert(sessions).values(session);
|
await db.insert(sessions).values(session);
|
||||||
return session;
|
return session;
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function validateSessionToken(
|
export async function validateSessionToken(
|
||||||
token: string,
|
token: string
|
||||||
): Promise<SessionValidationResult> {
|
): Promise<SessionValidationResult> {
|
||||||
const sessionId = encodeHexLowerCase(
|
const sessionId = encodeHexLowerCase(
|
||||||
sha256(new TextEncoder().encode(token)),
|
sha256(new TextEncoder().encode(token))
|
||||||
);
|
);
|
||||||
const result = await db
|
const result = await db
|
||||||
.select({ user: users, session: sessions })
|
.select({ user: users, session: sessions })
|
||||||
@@ -61,46 +73,84 @@ export async function validateSessionToken(
|
|||||||
}
|
}
|
||||||
if (Date.now() >= session.expiresAt - SESSION_COOKIE_EXPIRES / 2) {
|
if (Date.now() >= session.expiresAt - SESSION_COOKIE_EXPIRES / 2) {
|
||||||
session.expiresAt = new Date(
|
session.expiresAt = new Date(
|
||||||
Date.now() + SESSION_COOKIE_EXPIRES,
|
Date.now() + SESSION_COOKIE_EXPIRES
|
||||||
).getTime();
|
).getTime();
|
||||||
await db
|
await db.transaction(async (trx) => {
|
||||||
.update(sessions)
|
await trx
|
||||||
.set({
|
.update(sessions)
|
||||||
expiresAt: session.expiresAt,
|
.set({
|
||||||
})
|
expiresAt: session.expiresAt
|
||||||
.where(eq(sessions.sessionId, session.sessionId));
|
})
|
||||||
|
.where(eq(sessions.sessionId, session.sessionId));
|
||||||
|
|
||||||
|
await trx
|
||||||
|
.update(resourceSessions)
|
||||||
|
.set({
|
||||||
|
expiresAt: session.expiresAt
|
||||||
|
})
|
||||||
|
.where(eq(resourceSessions.userSessionId, session.sessionId));
|
||||||
|
});
|
||||||
}
|
}
|
||||||
return { session, user };
|
return { session, user };
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function invalidateSession(sessionId: string): Promise<void> {
|
export async function invalidateSession(sessionId: string): Promise<void> {
|
||||||
await db.delete(sessions).where(eq(sessions.sessionId, sessionId));
|
try {
|
||||||
}
|
await db.transaction(async (trx) => {
|
||||||
|
await trx
|
||||||
export async function invalidateAllSessions(userId: string): Promise<void> {
|
.delete(resourceSessions)
|
||||||
await db.delete(sessions).where(eq(sessions.userId, userId));
|
.where(eq(resourceSessions.userSessionId, sessionId));
|
||||||
}
|
await trx.delete(sessions).where(eq(sessions.sessionId, sessionId));
|
||||||
|
});
|
||||||
export function serializeSessionCookie(token: string): string {
|
} catch (e) {
|
||||||
if (SECURE_COOKIES) {
|
logger.error("Failed to invalidate session", e);
|
||||||
return `${SESSION_COOKIE_NAME}=${token}; HttpOnly; SameSite=Strict; Max-Age=${SESSION_COOKIE_EXPIRES}; Path=/; Secure; Domain=${COOKIE_DOMAIN}`;
|
|
||||||
} else {
|
|
||||||
return `${SESSION_COOKIE_NAME}=${token}; HttpOnly; SameSite=Strict; Max-Age=${SESSION_COOKIE_EXPIRES}; Path=/; Domain=${COOKIE_DOMAIN}`;
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
export function createBlankSessionTokenCookie(): string {
|
export async function invalidateAllSessions(userId: string): Promise<void> {
|
||||||
if (SECURE_COOKIES) {
|
try {
|
||||||
return `${SESSION_COOKIE_NAME}=; HttpOnly; SameSite=Strict; Max-Age=0; Path=/; Secure; Domain=${COOKIE_DOMAIN}`;
|
await db.transaction(async (trx) => {
|
||||||
|
const userSessions = await trx
|
||||||
|
.select()
|
||||||
|
.from(sessions)
|
||||||
|
.where(eq(sessions.userId, userId));
|
||||||
|
await trx.delete(resourceSessions).where(
|
||||||
|
inArray(
|
||||||
|
resourceSessions.userSessionId,
|
||||||
|
userSessions.map((s) => s.sessionId)
|
||||||
|
)
|
||||||
|
);
|
||||||
|
await trx.delete(sessions).where(eq(sessions.userId, userId));
|
||||||
|
});
|
||||||
|
} catch (e) {
|
||||||
|
logger.error("Failed to all invalidate user sessions", e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export function serializeSessionCookie(
|
||||||
|
token: string,
|
||||||
|
isSecure: boolean,
|
||||||
|
expiresAt: Date
|
||||||
|
): string {
|
||||||
|
if (isSecure) {
|
||||||
|
return `${SESSION_COOKIE_NAME}=${token}; HttpOnly; SameSite=Lax; Expires=${expiresAt.toUTCString()}; Path=/; Secure; Domain=${COOKIE_DOMAIN}`;
|
||||||
} else {
|
} else {
|
||||||
return `${SESSION_COOKIE_NAME}=; HttpOnly; SameSite=Strict; Max-Age=0; Path=/; Domain=${COOKIE_DOMAIN}`;
|
return `${SESSION_COOKIE_NAME}=${token}; HttpOnly; SameSite=Lax; Expires=${expiresAt.toUTCString()}; Path=/;`;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export function createBlankSessionTokenCookie(isSecure: boolean): string {
|
||||||
|
if (isSecure) {
|
||||||
|
return `${SESSION_COOKIE_NAME}=; HttpOnly; SameSite=Lax; Max-Age=0; Path=/; Secure; Domain=${COOKIE_DOMAIN}`;
|
||||||
|
} else {
|
||||||
|
return `${SESSION_COOKIE_NAME}=; HttpOnly; SameSite=Lax; Max-Age=0; Path=/;`;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
const random: RandomReader = {
|
const random: RandomReader = {
|
||||||
read(bytes: Uint8Array): void {
|
read(bytes: Uint8Array): void {
|
||||||
crypto.getRandomValues(bytes);
|
crypto.getRandomValues(bytes);
|
||||||
},
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
export function generateId(length: number): string {
|
export function generateId(length: number): string {
|
||||||
|
|||||||
@@ -2,8 +2,8 @@ import {
|
|||||||
encodeHexLowerCase,
|
encodeHexLowerCase,
|
||||||
} from "@oslojs/encoding";
|
} from "@oslojs/encoding";
|
||||||
import { sha256 } from "@oslojs/crypto/sha2";
|
import { sha256 } from "@oslojs/crypto/sha2";
|
||||||
import { Newt, newts, newtSessions, NewtSession } from "@server/db/schema";
|
import { Newt, newts, newtSessions, NewtSession } from "@server/db";
|
||||||
import db from "@server/db";
|
import { db } from "@server/db";
|
||||||
import { eq } from "drizzle-orm";
|
import { eq } from "drizzle-orm";
|
||||||
|
|
||||||
export const EXPIRES = 1000 * 60 * 60 * 24 * 30;
|
export const EXPIRES = 1000 * 60 * 60 * 24 * 30;
|
||||||
|
|||||||
@@ -1,24 +1,24 @@
|
|||||||
import { encodeHexLowerCase } from "@oslojs/encoding";
|
import { encodeHexLowerCase } from "@oslojs/encoding";
|
||||||
import { sha256 } from "@oslojs/crypto/sha2";
|
import { sha256 } from "@oslojs/crypto/sha2";
|
||||||
import { resourceSessions, ResourceSession } from "@server/db/schema";
|
import { resourceSessions, ResourceSession } from "@server/db";
|
||||||
import db from "@server/db";
|
import { db } from "@server/db";
|
||||||
import { eq, and } from "drizzle-orm";
|
import { eq, and } from "drizzle-orm";
|
||||||
import config from "@server/lib/config";
|
import config from "@server/lib/config";
|
||||||
|
|
||||||
export const SESSION_COOKIE_NAME =
|
export const SESSION_COOKIE_NAME =
|
||||||
config.getRawConfig().server.resource_session_cookie_name;
|
config.getRawConfig().server.session_cookie_name;
|
||||||
export const SESSION_COOKIE_EXPIRES = 1000 * 60 * 60 * 24 * 30;
|
export const SESSION_COOKIE_EXPIRES =
|
||||||
export const SECURE_COOKIES = config.getRawConfig().server.secure_cookies;
|
1000 * 60 * 60 * config.getRawConfig().server.resource_session_length_hours;
|
||||||
export const COOKIE_DOMAIN = "." + config.getBaseDomain();
|
|
||||||
|
|
||||||
export async function createResourceSession(opts: {
|
export async function createResourceSession(opts: {
|
||||||
token: string;
|
token: string;
|
||||||
resourceId: number;
|
resourceId: number;
|
||||||
passwordId?: number;
|
isRequestToken?: boolean;
|
||||||
pincodeId?: number;
|
passwordId?: number | null;
|
||||||
whitelistId?: number;
|
pincodeId?: number | null;
|
||||||
accessTokenId?: string;
|
userSessionId?: string | null;
|
||||||
usedOtp?: boolean;
|
whitelistId?: number | null;
|
||||||
|
accessTokenId?: string | null;
|
||||||
doNotExtend?: boolean;
|
doNotExtend?: boolean;
|
||||||
expiresAt?: number | null;
|
expiresAt?: number | null;
|
||||||
sessionLength?: number | null;
|
sessionLength?: number | null;
|
||||||
@@ -27,7 +27,8 @@ export async function createResourceSession(opts: {
|
|||||||
!opts.passwordId &&
|
!opts.passwordId &&
|
||||||
!opts.pincodeId &&
|
!opts.pincodeId &&
|
||||||
!opts.whitelistId &&
|
!opts.whitelistId &&
|
||||||
!opts.accessTokenId
|
!opts.accessTokenId &&
|
||||||
|
!opts.userSessionId
|
||||||
) {
|
) {
|
||||||
throw new Error("Auth method must be provided");
|
throw new Error("Auth method must be provided");
|
||||||
}
|
}
|
||||||
@@ -47,7 +48,9 @@ export async function createResourceSession(opts: {
|
|||||||
pincodeId: opts.pincodeId || null,
|
pincodeId: opts.pincodeId || null,
|
||||||
whitelistId: opts.whitelistId || null,
|
whitelistId: opts.whitelistId || null,
|
||||||
doNotExtend: opts.doNotExtend || false,
|
doNotExtend: opts.doNotExtend || false,
|
||||||
accessTokenId: opts.accessTokenId || null
|
accessTokenId: opts.accessTokenId || null,
|
||||||
|
isRequestToken: opts.isRequestToken || false,
|
||||||
|
userSessionId: opts.userSessionId || null
|
||||||
};
|
};
|
||||||
|
|
||||||
await db.insert(resourceSessions).values(session);
|
await db.insert(resourceSessions).values(session);
|
||||||
@@ -162,22 +165,34 @@ export async function invalidateAllSessions(
|
|||||||
|
|
||||||
export function serializeResourceSessionCookie(
|
export function serializeResourceSessionCookie(
|
||||||
cookieName: string,
|
cookieName: string,
|
||||||
token: string
|
domain: string,
|
||||||
|
token: string,
|
||||||
|
isHttp: boolean = false,
|
||||||
|
expiresAt?: Date
|
||||||
): string {
|
): string {
|
||||||
if (SECURE_COOKIES) {
|
const now = new Date().getTime();
|
||||||
return `${cookieName}=${token}; HttpOnly; SameSite=Strict; Max-Age=${SESSION_COOKIE_EXPIRES}; Path=/; Secure; Domain=${COOKIE_DOMAIN}`;
|
if (!isHttp) {
|
||||||
|
if (expiresAt === undefined) {
|
||||||
|
return `${cookieName}_s.${now}=${token}; HttpOnly; SameSite=Lax; Path=/; Secure; Domain=${"." + domain}`;
|
||||||
|
}
|
||||||
|
return `${cookieName}_s.${now}=${token}; HttpOnly; SameSite=Lax; Expires=${expiresAt.toUTCString()}; Path=/; Secure; Domain=${"." + domain}`;
|
||||||
} else {
|
} else {
|
||||||
return `${cookieName}=${token}; HttpOnly; SameSite=Strict; Max-Age=${SESSION_COOKIE_EXPIRES}; Path=/; Domain=${COOKIE_DOMAIN}`;
|
if (expiresAt === undefined) {
|
||||||
|
return `${cookieName}.${now}=${token}; HttpOnly; SameSite=Lax; Path=/; Domain=${"." + domain}`;
|
||||||
|
}
|
||||||
|
return `${cookieName}.${now}=${token}; HttpOnly; SameSite=Lax; Expires=${expiresAt.toUTCString()}; Path=/; Domain=${"." + domain}`;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
export function createBlankResourceSessionTokenCookie(
|
export function createBlankResourceSessionTokenCookie(
|
||||||
cookieName: string
|
cookieName: string,
|
||||||
|
domain: string,
|
||||||
|
isHttp: boolean = false
|
||||||
): string {
|
): string {
|
||||||
if (SECURE_COOKIES) {
|
if (!isHttp) {
|
||||||
return `${cookieName}=; HttpOnly; SameSite=Strict; Max-Age=0; Path=/; Secure; Domain=${COOKIE_DOMAIN}`;
|
return `${cookieName}_s=; HttpOnly; SameSite=Lax; Max-Age=0; Path=/; Secure; Domain=${"." + domain}`;
|
||||||
} else {
|
} else {
|
||||||
return `${cookieName}=; HttpOnly; SameSite=Strict; Max-Age=0; Path=/; Domain=${COOKIE_DOMAIN}`;
|
return `${cookieName}=; HttpOnly; SameSite=Lax; Max-Age=0; Path=/; Domain=${"." + domain}`;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
import { verify } from "@node-rs/argon2";
|
import { verify } from "@node-rs/argon2";
|
||||||
import db from "@server/db";
|
import { db } from "@server/db";
|
||||||
import { twoFactorBackupCodes } from "@server/db/schema";
|
import { twoFactorBackupCodes } from "@server/db";
|
||||||
import { eq } from "drizzle-orm";
|
import { eq } from "drizzle-orm";
|
||||||
import { decodeHex } from "oslo/encoding";
|
import { decodeHex } from "oslo/encoding";
|
||||||
import { TOTPController } from "oslo/otp";
|
import { TOTPController } from "oslo/otp";
|
||||||
|
|||||||
117
server/auth/verifyResourceAccessToken.ts
Normal file
@@ -0,0 +1,117 @@
|
|||||||
|
import { db } from "@server/db";
|
||||||
|
import {
|
||||||
|
Resource,
|
||||||
|
ResourceAccessToken,
|
||||||
|
resourceAccessToken,
|
||||||
|
resources
|
||||||
|
} from "@server/db";
|
||||||
|
import { and, eq } from "drizzle-orm";
|
||||||
|
import { isWithinExpirationDate } from "oslo";
|
||||||
|
import { verifyPassword } from "./password";
|
||||||
|
import { encodeHexLowerCase } from "@oslojs/encoding";
|
||||||
|
import { sha256 } from "@oslojs/crypto/sha2";
|
||||||
|
|
||||||
|
export async function verifyResourceAccessToken({
|
||||||
|
accessToken,
|
||||||
|
accessTokenId,
|
||||||
|
resourceId
|
||||||
|
}: {
|
||||||
|
accessToken: string;
|
||||||
|
accessTokenId?: string;
|
||||||
|
resourceId?: number; // IF THIS IS NOT SET, THE TOKEN IS VALID FOR ALL RESOURCES
|
||||||
|
}): Promise<{
|
||||||
|
valid: boolean;
|
||||||
|
error?: string;
|
||||||
|
tokenItem?: ResourceAccessToken;
|
||||||
|
resource?: Resource;
|
||||||
|
}> {
|
||||||
|
const accessTokenHash = encodeHexLowerCase(
|
||||||
|
sha256(new TextEncoder().encode(accessToken))
|
||||||
|
);
|
||||||
|
|
||||||
|
let tokenItem: ResourceAccessToken | undefined;
|
||||||
|
let resource: Resource | undefined;
|
||||||
|
|
||||||
|
if (!accessTokenId) {
|
||||||
|
const [res] = await db
|
||||||
|
.select()
|
||||||
|
.from(resourceAccessToken)
|
||||||
|
.where(and(eq(resourceAccessToken.tokenHash, accessTokenHash)))
|
||||||
|
.innerJoin(
|
||||||
|
resources,
|
||||||
|
eq(resourceAccessToken.resourceId, resources.resourceId)
|
||||||
|
);
|
||||||
|
|
||||||
|
tokenItem = res?.resourceAccessToken;
|
||||||
|
resource = res?.resources;
|
||||||
|
} else {
|
||||||
|
const [res] = await db
|
||||||
|
.select()
|
||||||
|
.from(resourceAccessToken)
|
||||||
|
.where(and(eq(resourceAccessToken.accessTokenId, accessTokenId)))
|
||||||
|
.innerJoin(
|
||||||
|
resources,
|
||||||
|
eq(resourceAccessToken.resourceId, resources.resourceId)
|
||||||
|
);
|
||||||
|
|
||||||
|
if (res && res.resourceAccessToken) {
|
||||||
|
if (res.resourceAccessToken.tokenHash?.startsWith("$argon")) {
|
||||||
|
const validCode = await verifyPassword(
|
||||||
|
accessToken,
|
||||||
|
res.resourceAccessToken.tokenHash
|
||||||
|
);
|
||||||
|
|
||||||
|
if (!validCode) {
|
||||||
|
return {
|
||||||
|
valid: false,
|
||||||
|
error: "Invalid access token"
|
||||||
|
};
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
const tokenHash = encodeHexLowerCase(
|
||||||
|
sha256(new TextEncoder().encode(accessToken))
|
||||||
|
);
|
||||||
|
|
||||||
|
if (res.resourceAccessToken.tokenHash !== tokenHash) {
|
||||||
|
return {
|
||||||
|
valid: false,
|
||||||
|
error: "Invalid access token"
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
tokenItem = res?.resourceAccessToken;
|
||||||
|
resource = res?.resources;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!tokenItem || !resource) {
|
||||||
|
return {
|
||||||
|
valid: false,
|
||||||
|
error: "Access token does not exist for resource"
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
if (
|
||||||
|
tokenItem.expiresAt &&
|
||||||
|
!isWithinExpirationDate(new Date(tokenItem.expiresAt))
|
||||||
|
) {
|
||||||
|
return {
|
||||||
|
valid: false,
|
||||||
|
error: "Access token has expired"
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
if (resourceId && resource.resourceId !== resourceId) {
|
||||||
|
return {
|
||||||
|
valid: false,
|
||||||
|
error: "Resource ID does not match"
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
valid: true,
|
||||||
|
tokenItem,
|
||||||
|
resource
|
||||||
|
};
|
||||||
|
}
|
||||||
72
server/db/README.md
Normal file
@@ -0,0 +1,72 @@
|
|||||||
|
# Database
|
||||||
|
|
||||||
|
Pangolin can use a Postgres or SQLite database to store its data.
|
||||||
|
|
||||||
|
## Development
|
||||||
|
|
||||||
|
### Postgres
|
||||||
|
|
||||||
|
To use Postgres, edit `server/db/index.ts` to export all from `server/db/pg/index.ts`:
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
export * from "./pg";
|
||||||
|
```
|
||||||
|
|
||||||
|
Make sure you have a valid config file with a connection string:
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
postgres:
|
||||||
|
connection_string: postgresql://postgres:postgres@localhost:5432
|
||||||
|
```
|
||||||
|
|
||||||
|
You can run an ephemeral Postgres database for local development using Docker:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
docker run -d \
|
||||||
|
--name postgres \
|
||||||
|
--rm \
|
||||||
|
-p 5432:5432 \
|
||||||
|
-e POSTGRES_PASSWORD=postgres \
|
||||||
|
-v $(mktemp -d):/var/lib/postgresql/data \
|
||||||
|
postgres:17
|
||||||
|
```
|
||||||
|
|
||||||
|
### Schema
|
||||||
|
|
||||||
|
`server/db/pg/schema.ts` and `server/db/sqlite/schema.ts` contain the database schema definitions. These need to be kept in sync with with each other.
|
||||||
|
|
||||||
|
Stick to common data types and avoid Postgres-specific features to ensure compatibility with SQLite.
|
||||||
|
|
||||||
|
### SQLite
|
||||||
|
|
||||||
|
To use SQLite, edit `server/db/index.ts` to export all from `server/db/sqlite/index.ts`:
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
export * from "./sqlite";
|
||||||
|
```
|
||||||
|
|
||||||
|
No edits to the config are needed. If you keep the Postgres config, it will be ignored.
|
||||||
|
|
||||||
|
## Generate and Push Migrations
|
||||||
|
|
||||||
|
Ensure drizzle-kit is installed.
|
||||||
|
|
||||||
|
### Postgres
|
||||||
|
|
||||||
|
You must have a connection string in your config file, as shown above.
|
||||||
|
|
||||||
|
```bash
|
||||||
|
npm run db:pg:generate
|
||||||
|
npm run db:pg:push
|
||||||
|
```
|
||||||
|
|
||||||
|
### SQLite
|
||||||
|
|
||||||
|
```bash
|
||||||
|
npm run db:sqlite:generate
|
||||||
|
npm run db:sqlite:push
|
||||||
|
```
|
||||||
|
|
||||||
|
## Build Time
|
||||||
|
|
||||||
|
There is a dockerfile for each database type. The dockerfile swaps out the `server/db/index.ts` file to use the correct database type.
|
||||||
@@ -1,23 +0,0 @@
|
|||||||
import { drizzle } from "drizzle-orm/better-sqlite3";
|
|
||||||
import Database from "better-sqlite3";
|
|
||||||
import * as schema from "@server/db/schema";
|
|
||||||
import path from "path";
|
|
||||||
import fs from "fs/promises";
|
|
||||||
import { APP_PATH } from "@server/lib/consts";
|
|
||||||
|
|
||||||
export const location = path.join(APP_PATH, "db", "db.sqlite");
|
|
||||||
export const exists = await checkFileExists(location);
|
|
||||||
|
|
||||||
const sqlite = new Database(location);
|
|
||||||
export const db = drizzle(sqlite, { schema });
|
|
||||||
|
|
||||||
export default db;
|
|
||||||
|
|
||||||
async function checkFileExists(filePath: string): Promise<boolean> {
|
|
||||||
try {
|
|
||||||
await fs.access(filePath);
|
|
||||||
return true;
|
|
||||||
} catch {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,7 +1,7 @@
|
|||||||
import { join } from "path";
|
import { join } from "path";
|
||||||
import { readFileSync } from "fs";
|
import { readFileSync } from "fs";
|
||||||
import { db } from "@server/db";
|
import { db } from "@server/db";
|
||||||
import { exitNodes, sites } from "./schema";
|
import { exitNodes, sites } from "@server/db";
|
||||||
import { eq, and } from "drizzle-orm";
|
import { eq, and } from "drizzle-orm";
|
||||||
import { __DIRNAME } from "@server/lib/consts";
|
import { __DIRNAME } from "@server/lib/consts";
|
||||||
|
|
||||||
|
|||||||
39
server/db/pg/driver.ts
Normal file
@@ -0,0 +1,39 @@
|
|||||||
|
import { drizzle as DrizzlePostgres } from "drizzle-orm/node-postgres";
|
||||||
|
import { readConfigFile } from "@server/lib/readConfigFile";
|
||||||
|
import { withReplicas } from "drizzle-orm/pg-core";
|
||||||
|
|
||||||
|
function createDb() {
|
||||||
|
const config = readConfigFile();
|
||||||
|
|
||||||
|
if (!config.postgres) {
|
||||||
|
throw new Error(
|
||||||
|
"Postgres configuration is missing in the configuration file."
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
const connectionString = config.postgres?.connection_string;
|
||||||
|
const replicaConnections = config.postgres?.replicas || [];
|
||||||
|
|
||||||
|
if (!connectionString) {
|
||||||
|
throw new Error(
|
||||||
|
"A primary db connection string is required in the configuration file."
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
const primary = DrizzlePostgres(connectionString);
|
||||||
|
const replicas = [];
|
||||||
|
|
||||||
|
if (!replicaConnections.length) {
|
||||||
|
replicas.push(primary);
|
||||||
|
} else {
|
||||||
|
for (const conn of replicaConnections) {
|
||||||
|
const replica = DrizzlePostgres(conn.connection_string);
|
||||||
|
replicas.push(replica);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return withReplicas(primary, replicas as any);
|
||||||
|
}
|
||||||
|
|
||||||
|
export const db = createDb();
|
||||||
|
export default db;
|
||||||
2
server/db/pg/index.ts
Normal file
@@ -0,0 +1,2 @@
|
|||||||
|
export * from "./driver";
|
||||||
|
export * from "./schema";
|
||||||
20
server/db/pg/migrate.ts
Normal file
@@ -0,0 +1,20 @@
|
|||||||
|
import { migrate } from "drizzle-orm/node-postgres/migrator";
|
||||||
|
import db from "./driver";
|
||||||
|
import path from "path";
|
||||||
|
|
||||||
|
const migrationsFolder = path.join("server/migrations");
|
||||||
|
|
||||||
|
const runMigrations = async () => {
|
||||||
|
console.log("Running migrations...");
|
||||||
|
try {
|
||||||
|
await migrate(db as any, {
|
||||||
|
migrationsFolder: migrationsFolder
|
||||||
|
});
|
||||||
|
console.log("Migrations completed successfully.");
|
||||||
|
} catch (error) {
|
||||||
|
console.error("Error running migrations:", error);
|
||||||
|
process.exit(1);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
runMigrations();
|
||||||
532
server/db/pg/schema.ts
Normal file
@@ -0,0 +1,532 @@
|
|||||||
|
import {
|
||||||
|
pgTable,
|
||||||
|
serial,
|
||||||
|
varchar,
|
||||||
|
boolean,
|
||||||
|
integer,
|
||||||
|
bigint,
|
||||||
|
real
|
||||||
|
} from "drizzle-orm/pg-core";
|
||||||
|
import { InferSelectModel } from "drizzle-orm";
|
||||||
|
|
||||||
|
export const domains = pgTable("domains", {
|
||||||
|
domainId: varchar("domainId").primaryKey(),
|
||||||
|
baseDomain: varchar("baseDomain").notNull(),
|
||||||
|
configManaged: boolean("configManaged").notNull().default(false)
|
||||||
|
});
|
||||||
|
|
||||||
|
export const orgs = pgTable("orgs", {
|
||||||
|
orgId: varchar("orgId").primaryKey(),
|
||||||
|
name: varchar("name").notNull()
|
||||||
|
});
|
||||||
|
|
||||||
|
export const orgDomains = pgTable("orgDomains", {
|
||||||
|
orgId: varchar("orgId")
|
||||||
|
.notNull()
|
||||||
|
.references(() => orgs.orgId, { onDelete: "cascade" }),
|
||||||
|
domainId: varchar("domainId")
|
||||||
|
.notNull()
|
||||||
|
.references(() => domains.domainId, { onDelete: "cascade" })
|
||||||
|
});
|
||||||
|
|
||||||
|
export const sites = pgTable("sites", {
|
||||||
|
siteId: serial("siteId").primaryKey(),
|
||||||
|
orgId: varchar("orgId")
|
||||||
|
.references(() => orgs.orgId, {
|
||||||
|
onDelete: "cascade"
|
||||||
|
})
|
||||||
|
.notNull(),
|
||||||
|
niceId: varchar("niceId").notNull(),
|
||||||
|
exitNodeId: integer("exitNode").references(() => exitNodes.exitNodeId, {
|
||||||
|
onDelete: "set null"
|
||||||
|
}),
|
||||||
|
name: varchar("name").notNull(),
|
||||||
|
pubKey: varchar("pubKey"),
|
||||||
|
subnet: varchar("subnet").notNull(),
|
||||||
|
megabytesIn: real("bytesIn"),
|
||||||
|
megabytesOut: real("bytesOut"),
|
||||||
|
lastBandwidthUpdate: varchar("lastBandwidthUpdate"),
|
||||||
|
type: varchar("type").notNull(), // "newt" or "wireguard"
|
||||||
|
online: boolean("online").notNull().default(false),
|
||||||
|
dockerSocketEnabled: boolean("dockerSocketEnabled").notNull().default(true)
|
||||||
|
});
|
||||||
|
|
||||||
|
export const resources = pgTable("resources", {
|
||||||
|
resourceId: serial("resourceId").primaryKey(),
|
||||||
|
siteId: integer("siteId")
|
||||||
|
.references(() => sites.siteId, {
|
||||||
|
onDelete: "cascade"
|
||||||
|
})
|
||||||
|
.notNull(),
|
||||||
|
orgId: varchar("orgId")
|
||||||
|
.references(() => orgs.orgId, {
|
||||||
|
onDelete: "cascade"
|
||||||
|
})
|
||||||
|
.notNull(),
|
||||||
|
name: varchar("name").notNull(),
|
||||||
|
subdomain: varchar("subdomain"),
|
||||||
|
fullDomain: varchar("fullDomain"),
|
||||||
|
domainId: varchar("domainId").references(() => domains.domainId, {
|
||||||
|
onDelete: "set null"
|
||||||
|
}),
|
||||||
|
ssl: boolean("ssl").notNull().default(false),
|
||||||
|
blockAccess: boolean("blockAccess").notNull().default(false),
|
||||||
|
sso: boolean("sso").notNull().default(true),
|
||||||
|
http: boolean("http").notNull().default(true),
|
||||||
|
protocol: varchar("protocol").notNull(),
|
||||||
|
proxyPort: integer("proxyPort"),
|
||||||
|
emailWhitelistEnabled: boolean("emailWhitelistEnabled")
|
||||||
|
.notNull()
|
||||||
|
.default(false),
|
||||||
|
isBaseDomain: boolean("isBaseDomain"),
|
||||||
|
applyRules: boolean("applyRules").notNull().default(false),
|
||||||
|
enabled: boolean("enabled").notNull().default(true),
|
||||||
|
stickySession: boolean("stickySession").notNull().default(false),
|
||||||
|
tlsServerName: varchar("tlsServerName"),
|
||||||
|
setHostHeader: varchar("setHostHeader")
|
||||||
|
});
|
||||||
|
|
||||||
|
export const targets = pgTable("targets", {
|
||||||
|
targetId: serial("targetId").primaryKey(),
|
||||||
|
resourceId: integer("resourceId")
|
||||||
|
.references(() => resources.resourceId, {
|
||||||
|
onDelete: "cascade"
|
||||||
|
})
|
||||||
|
.notNull(),
|
||||||
|
ip: varchar("ip").notNull(),
|
||||||
|
method: varchar("method"),
|
||||||
|
port: integer("port").notNull(),
|
||||||
|
internalPort: integer("internalPort"),
|
||||||
|
enabled: boolean("enabled").notNull().default(true)
|
||||||
|
});
|
||||||
|
|
||||||
|
export const exitNodes = pgTable("exitNodes", {
|
||||||
|
exitNodeId: serial("exitNodeId").primaryKey(),
|
||||||
|
name: varchar("name").notNull(),
|
||||||
|
address: varchar("address").notNull(),
|
||||||
|
endpoint: varchar("endpoint").notNull(),
|
||||||
|
publicKey: varchar("publicKey").notNull(),
|
||||||
|
listenPort: integer("listenPort").notNull(),
|
||||||
|
reachableAt: varchar("reachableAt")
|
||||||
|
});
|
||||||
|
|
||||||
|
export const users = pgTable("user", {
|
||||||
|
userId: varchar("id").primaryKey(),
|
||||||
|
email: varchar("email"),
|
||||||
|
username: varchar("username").notNull(),
|
||||||
|
name: varchar("name"),
|
||||||
|
type: varchar("type").notNull(), // "internal", "oidc"
|
||||||
|
idpId: integer("idpId").references(() => idp.idpId, {
|
||||||
|
onDelete: "cascade"
|
||||||
|
}),
|
||||||
|
passwordHash: varchar("passwordHash"),
|
||||||
|
twoFactorEnabled: boolean("twoFactorEnabled").notNull().default(false),
|
||||||
|
twoFactorSecret: varchar("twoFactorSecret"),
|
||||||
|
emailVerified: boolean("emailVerified").notNull().default(false),
|
||||||
|
dateCreated: varchar("dateCreated").notNull(),
|
||||||
|
serverAdmin: boolean("serverAdmin").notNull().default(false)
|
||||||
|
});
|
||||||
|
|
||||||
|
export const newts = pgTable("newt", {
|
||||||
|
newtId: varchar("id").primaryKey(),
|
||||||
|
secretHash: varchar("secretHash").notNull(),
|
||||||
|
dateCreated: varchar("dateCreated").notNull(),
|
||||||
|
siteId: integer("siteId").references(() => sites.siteId, {
|
||||||
|
onDelete: "cascade"
|
||||||
|
})
|
||||||
|
});
|
||||||
|
|
||||||
|
export const twoFactorBackupCodes = pgTable("twoFactorBackupCodes", {
|
||||||
|
codeId: serial("id").primaryKey(),
|
||||||
|
userId: varchar("userId")
|
||||||
|
.notNull()
|
||||||
|
.references(() => users.userId, { onDelete: "cascade" }),
|
||||||
|
codeHash: varchar("codeHash").notNull()
|
||||||
|
});
|
||||||
|
|
||||||
|
export const sessions = pgTable("session", {
|
||||||
|
sessionId: varchar("id").primaryKey(),
|
||||||
|
userId: varchar("userId")
|
||||||
|
.notNull()
|
||||||
|
.references(() => users.userId, { onDelete: "cascade" }),
|
||||||
|
expiresAt: bigint("expiresAt", { mode: "number" }).notNull()
|
||||||
|
});
|
||||||
|
|
||||||
|
export const newtSessions = pgTable("newtSession", {
|
||||||
|
sessionId: varchar("id").primaryKey(),
|
||||||
|
newtId: varchar("newtId")
|
||||||
|
.notNull()
|
||||||
|
.references(() => newts.newtId, { onDelete: "cascade" }),
|
||||||
|
expiresAt: bigint("expiresAt", { mode: "number" }).notNull()
|
||||||
|
});
|
||||||
|
|
||||||
|
export const userOrgs = pgTable("userOrgs", {
|
||||||
|
userId: varchar("userId")
|
||||||
|
.notNull()
|
||||||
|
.references(() => users.userId, { onDelete: "cascade" }),
|
||||||
|
orgId: varchar("orgId")
|
||||||
|
.references(() => orgs.orgId, {
|
||||||
|
onDelete: "cascade"
|
||||||
|
})
|
||||||
|
.notNull(),
|
||||||
|
roleId: integer("roleId")
|
||||||
|
.notNull()
|
||||||
|
.references(() => roles.roleId),
|
||||||
|
isOwner: boolean("isOwner").notNull().default(false)
|
||||||
|
});
|
||||||
|
|
||||||
|
export const emailVerificationCodes = pgTable("emailVerificationCodes", {
|
||||||
|
codeId: serial("id").primaryKey(),
|
||||||
|
userId: varchar("userId")
|
||||||
|
.notNull()
|
||||||
|
.references(() => users.userId, { onDelete: "cascade" }),
|
||||||
|
email: varchar("email").notNull(),
|
||||||
|
code: varchar("code").notNull(),
|
||||||
|
expiresAt: bigint("expiresAt", { mode: "number" }).notNull()
|
||||||
|
});
|
||||||
|
|
||||||
|
export const passwordResetTokens = pgTable("passwordResetTokens", {
|
||||||
|
tokenId: serial("id").primaryKey(),
|
||||||
|
email: varchar("email").notNull(),
|
||||||
|
userId: varchar("userId")
|
||||||
|
.notNull()
|
||||||
|
.references(() => users.userId, { onDelete: "cascade" }),
|
||||||
|
tokenHash: varchar("tokenHash").notNull(),
|
||||||
|
expiresAt: bigint("expiresAt", { mode: "number" }).notNull()
|
||||||
|
});
|
||||||
|
|
||||||
|
export const actions = pgTable("actions", {
|
||||||
|
actionId: varchar("actionId").primaryKey(),
|
||||||
|
name: varchar("name"),
|
||||||
|
description: varchar("description")
|
||||||
|
});
|
||||||
|
|
||||||
|
export const roles = pgTable("roles", {
|
||||||
|
roleId: serial("roleId").primaryKey(),
|
||||||
|
orgId: varchar("orgId")
|
||||||
|
.references(() => orgs.orgId, {
|
||||||
|
onDelete: "cascade"
|
||||||
|
})
|
||||||
|
.notNull(),
|
||||||
|
isAdmin: boolean("isAdmin"),
|
||||||
|
name: varchar("name").notNull(),
|
||||||
|
description: varchar("description")
|
||||||
|
});
|
||||||
|
|
||||||
|
export const roleActions = pgTable("roleActions", {
|
||||||
|
roleId: integer("roleId")
|
||||||
|
.notNull()
|
||||||
|
.references(() => roles.roleId, { onDelete: "cascade" }),
|
||||||
|
actionId: varchar("actionId")
|
||||||
|
.notNull()
|
||||||
|
.references(() => actions.actionId, { onDelete: "cascade" }),
|
||||||
|
orgId: varchar("orgId")
|
||||||
|
.notNull()
|
||||||
|
.references(() => orgs.orgId, { onDelete: "cascade" })
|
||||||
|
});
|
||||||
|
|
||||||
|
export const userActions = pgTable("userActions", {
|
||||||
|
userId: varchar("userId")
|
||||||
|
.notNull()
|
||||||
|
.references(() => users.userId, { onDelete: "cascade" }),
|
||||||
|
actionId: varchar("actionId")
|
||||||
|
.notNull()
|
||||||
|
.references(() => actions.actionId, { onDelete: "cascade" }),
|
||||||
|
orgId: varchar("orgId")
|
||||||
|
.notNull()
|
||||||
|
.references(() => orgs.orgId, { onDelete: "cascade" })
|
||||||
|
});
|
||||||
|
|
||||||
|
export const roleSites = pgTable("roleSites", {
|
||||||
|
roleId: integer("roleId")
|
||||||
|
.notNull()
|
||||||
|
.references(() => roles.roleId, { onDelete: "cascade" }),
|
||||||
|
siteId: integer("siteId")
|
||||||
|
.notNull()
|
||||||
|
.references(() => sites.siteId, { onDelete: "cascade" })
|
||||||
|
});
|
||||||
|
|
||||||
|
export const userSites = pgTable("userSites", {
|
||||||
|
userId: varchar("userId")
|
||||||
|
.notNull()
|
||||||
|
.references(() => users.userId, { onDelete: "cascade" }),
|
||||||
|
siteId: integer("siteId")
|
||||||
|
.notNull()
|
||||||
|
.references(() => sites.siteId, { onDelete: "cascade" })
|
||||||
|
});
|
||||||
|
|
||||||
|
export const roleResources = pgTable("roleResources", {
|
||||||
|
roleId: integer("roleId")
|
||||||
|
.notNull()
|
||||||
|
.references(() => roles.roleId, { onDelete: "cascade" }),
|
||||||
|
resourceId: integer("resourceId")
|
||||||
|
.notNull()
|
||||||
|
.references(() => resources.resourceId, { onDelete: "cascade" })
|
||||||
|
});
|
||||||
|
|
||||||
|
export const userResources = pgTable("userResources", {
|
||||||
|
userId: varchar("userId")
|
||||||
|
.notNull()
|
||||||
|
.references(() => users.userId, { onDelete: "cascade" }),
|
||||||
|
resourceId: integer("resourceId")
|
||||||
|
.notNull()
|
||||||
|
.references(() => resources.resourceId, { onDelete: "cascade" })
|
||||||
|
});
|
||||||
|
|
||||||
|
export const limitsTable = pgTable("limits", {
|
||||||
|
limitId: serial("limitId").primaryKey(),
|
||||||
|
orgId: varchar("orgId")
|
||||||
|
.references(() => orgs.orgId, {
|
||||||
|
onDelete: "cascade"
|
||||||
|
})
|
||||||
|
.notNull(),
|
||||||
|
name: varchar("name").notNull(),
|
||||||
|
value: bigint("value", { mode: "number" }).notNull(),
|
||||||
|
description: varchar("description")
|
||||||
|
});
|
||||||
|
|
||||||
|
export const userInvites = pgTable("userInvites", {
|
||||||
|
inviteId: varchar("inviteId").primaryKey(),
|
||||||
|
orgId: varchar("orgId")
|
||||||
|
.notNull()
|
||||||
|
.references(() => orgs.orgId, { onDelete: "cascade" }),
|
||||||
|
email: varchar("email").notNull(),
|
||||||
|
expiresAt: bigint("expiresAt", { mode: "number" }).notNull(),
|
||||||
|
tokenHash: varchar("token").notNull(),
|
||||||
|
roleId: integer("roleId")
|
||||||
|
.notNull()
|
||||||
|
.references(() => roles.roleId, { onDelete: "cascade" })
|
||||||
|
});
|
||||||
|
|
||||||
|
export const resourcePincode = pgTable("resourcePincode", {
|
||||||
|
pincodeId: serial("pincodeId").primaryKey(),
|
||||||
|
resourceId: integer("resourceId")
|
||||||
|
.notNull()
|
||||||
|
.references(() => resources.resourceId, { onDelete: "cascade" }),
|
||||||
|
pincodeHash: varchar("pincodeHash").notNull(),
|
||||||
|
digitLength: integer("digitLength").notNull()
|
||||||
|
});
|
||||||
|
|
||||||
|
export const resourcePassword = pgTable("resourcePassword", {
|
||||||
|
passwordId: serial("passwordId").primaryKey(),
|
||||||
|
resourceId: integer("resourceId")
|
||||||
|
.notNull()
|
||||||
|
.references(() => resources.resourceId, { onDelete: "cascade" }),
|
||||||
|
passwordHash: varchar("passwordHash").notNull()
|
||||||
|
});
|
||||||
|
|
||||||
|
export const resourceAccessToken = pgTable("resourceAccessToken", {
|
||||||
|
accessTokenId: varchar("accessTokenId").primaryKey(),
|
||||||
|
orgId: varchar("orgId")
|
||||||
|
.notNull()
|
||||||
|
.references(() => orgs.orgId, { onDelete: "cascade" }),
|
||||||
|
resourceId: integer("resourceId")
|
||||||
|
.notNull()
|
||||||
|
.references(() => resources.resourceId, { onDelete: "cascade" }),
|
||||||
|
tokenHash: varchar("tokenHash").notNull(),
|
||||||
|
sessionLength: bigint("sessionLength", { mode: "number" }).notNull(),
|
||||||
|
expiresAt: bigint("expiresAt", { mode: "number" }),
|
||||||
|
title: varchar("title"),
|
||||||
|
description: varchar("description"),
|
||||||
|
createdAt: bigint("createdAt", { mode: "number" }).notNull()
|
||||||
|
});
|
||||||
|
|
||||||
|
export const resourceSessions = pgTable("resourceSessions", {
|
||||||
|
sessionId: varchar("id").primaryKey(),
|
||||||
|
resourceId: integer("resourceId")
|
||||||
|
.notNull()
|
||||||
|
.references(() => resources.resourceId, { onDelete: "cascade" }),
|
||||||
|
expiresAt: bigint("expiresAt", { mode: "number" }).notNull(),
|
||||||
|
sessionLength: bigint("sessionLength", { mode: "number" }).notNull(),
|
||||||
|
doNotExtend: boolean("doNotExtend").notNull().default(false),
|
||||||
|
isRequestToken: boolean("isRequestToken"),
|
||||||
|
userSessionId: varchar("userSessionId").references(
|
||||||
|
() => sessions.sessionId,
|
||||||
|
{
|
||||||
|
onDelete: "cascade"
|
||||||
|
}
|
||||||
|
),
|
||||||
|
passwordId: integer("passwordId").references(
|
||||||
|
() => resourcePassword.passwordId,
|
||||||
|
{
|
||||||
|
onDelete: "cascade"
|
||||||
|
}
|
||||||
|
),
|
||||||
|
pincodeId: integer("pincodeId").references(
|
||||||
|
() => resourcePincode.pincodeId,
|
||||||
|
{
|
||||||
|
onDelete: "cascade"
|
||||||
|
}
|
||||||
|
),
|
||||||
|
whitelistId: integer("whitelistId").references(
|
||||||
|
() => resourceWhitelist.whitelistId,
|
||||||
|
{
|
||||||
|
onDelete: "cascade"
|
||||||
|
}
|
||||||
|
),
|
||||||
|
accessTokenId: varchar("accessTokenId").references(
|
||||||
|
() => resourceAccessToken.accessTokenId,
|
||||||
|
{
|
||||||
|
onDelete: "cascade"
|
||||||
|
}
|
||||||
|
)
|
||||||
|
});
|
||||||
|
|
||||||
|
export const resourceWhitelist = pgTable("resourceWhitelist", {
|
||||||
|
whitelistId: serial("id").primaryKey(),
|
||||||
|
email: varchar("email").notNull(),
|
||||||
|
resourceId: integer("resourceId")
|
||||||
|
.notNull()
|
||||||
|
.references(() => resources.resourceId, { onDelete: "cascade" })
|
||||||
|
});
|
||||||
|
|
||||||
|
export const resourceOtp = pgTable("resourceOtp", {
|
||||||
|
otpId: serial("otpId").primaryKey(),
|
||||||
|
resourceId: integer("resourceId")
|
||||||
|
.notNull()
|
||||||
|
.references(() => resources.resourceId, { onDelete: "cascade" }),
|
||||||
|
email: varchar("email").notNull(),
|
||||||
|
otpHash: varchar("otpHash").notNull(),
|
||||||
|
expiresAt: bigint("expiresAt", { mode: "number" }).notNull()
|
||||||
|
});
|
||||||
|
|
||||||
|
export const versionMigrations = pgTable("versionMigrations", {
|
||||||
|
version: varchar("version").primaryKey(),
|
||||||
|
executedAt: bigint("executedAt", { mode: "number" }).notNull()
|
||||||
|
});
|
||||||
|
|
||||||
|
export const resourceRules = pgTable("resourceRules", {
|
||||||
|
ruleId: serial("ruleId").primaryKey(),
|
||||||
|
resourceId: integer("resourceId")
|
||||||
|
.notNull()
|
||||||
|
.references(() => resources.resourceId, { onDelete: "cascade" }),
|
||||||
|
enabled: boolean("enabled").notNull().default(true),
|
||||||
|
priority: integer("priority").notNull(),
|
||||||
|
action: varchar("action").notNull(), // ACCEPT, DROP
|
||||||
|
match: varchar("match").notNull(), // CIDR, PATH, IP
|
||||||
|
value: varchar("value").notNull()
|
||||||
|
});
|
||||||
|
|
||||||
|
export const supporterKey = pgTable("supporterKey", {
|
||||||
|
keyId: serial("keyId").primaryKey(),
|
||||||
|
key: varchar("key").notNull(),
|
||||||
|
githubUsername: varchar("githubUsername").notNull(),
|
||||||
|
phrase: varchar("phrase"),
|
||||||
|
tier: varchar("tier"),
|
||||||
|
valid: boolean("valid").notNull().default(false)
|
||||||
|
});
|
||||||
|
|
||||||
|
export const idp = pgTable("idp", {
|
||||||
|
idpId: serial("idpId").primaryKey(),
|
||||||
|
name: varchar("name").notNull(),
|
||||||
|
type: varchar("type").notNull(),
|
||||||
|
defaultRoleMapping: varchar("defaultRoleMapping"),
|
||||||
|
defaultOrgMapping: varchar("defaultOrgMapping"),
|
||||||
|
autoProvision: boolean("autoProvision").notNull().default(false)
|
||||||
|
});
|
||||||
|
|
||||||
|
export const idpOidcConfig = pgTable("idpOidcConfig", {
|
||||||
|
idpOauthConfigId: serial("idpOauthConfigId").primaryKey(),
|
||||||
|
idpId: integer("idpId")
|
||||||
|
.notNull()
|
||||||
|
.references(() => idp.idpId, { onDelete: "cascade" }),
|
||||||
|
clientId: varchar("clientId").notNull(),
|
||||||
|
clientSecret: varchar("clientSecret").notNull(),
|
||||||
|
authUrl: varchar("authUrl").notNull(),
|
||||||
|
tokenUrl: varchar("tokenUrl").notNull(),
|
||||||
|
identifierPath: varchar("identifierPath").notNull(),
|
||||||
|
emailPath: varchar("emailPath"),
|
||||||
|
namePath: varchar("namePath"),
|
||||||
|
scopes: varchar("scopes").notNull()
|
||||||
|
});
|
||||||
|
|
||||||
|
export const licenseKey = pgTable("licenseKey", {
|
||||||
|
licenseKeyId: varchar("licenseKeyId").primaryKey().notNull(),
|
||||||
|
instanceId: varchar("instanceId").notNull(),
|
||||||
|
token: varchar("token").notNull()
|
||||||
|
});
|
||||||
|
|
||||||
|
export const hostMeta = pgTable("hostMeta", {
|
||||||
|
hostMetaId: varchar("hostMetaId").primaryKey().notNull(),
|
||||||
|
createdAt: bigint("createdAt", { mode: "number" }).notNull()
|
||||||
|
});
|
||||||
|
|
||||||
|
export const apiKeys = pgTable("apiKeys", {
|
||||||
|
apiKeyId: varchar("apiKeyId").primaryKey(),
|
||||||
|
name: varchar("name").notNull(),
|
||||||
|
apiKeyHash: varchar("apiKeyHash").notNull(),
|
||||||
|
lastChars: varchar("lastChars").notNull(),
|
||||||
|
createdAt: varchar("dateCreated").notNull(),
|
||||||
|
isRoot: boolean("isRoot").notNull().default(false)
|
||||||
|
});
|
||||||
|
|
||||||
|
export const apiKeyActions = pgTable("apiKeyActions", {
|
||||||
|
apiKeyId: varchar("apiKeyId")
|
||||||
|
.notNull()
|
||||||
|
.references(() => apiKeys.apiKeyId, { onDelete: "cascade" }),
|
||||||
|
actionId: varchar("actionId")
|
||||||
|
.notNull()
|
||||||
|
.references(() => actions.actionId, { onDelete: "cascade" })
|
||||||
|
});
|
||||||
|
|
||||||
|
export const apiKeyOrg = pgTable("apiKeyOrg", {
|
||||||
|
apiKeyId: varchar("apiKeyId")
|
||||||
|
.notNull()
|
||||||
|
.references(() => apiKeys.apiKeyId, { onDelete: "cascade" }),
|
||||||
|
orgId: varchar("orgId")
|
||||||
|
.references(() => orgs.orgId, {
|
||||||
|
onDelete: "cascade"
|
||||||
|
})
|
||||||
|
.notNull()
|
||||||
|
});
|
||||||
|
|
||||||
|
export const idpOrg = pgTable("idpOrg", {
|
||||||
|
idpId: integer("idpId")
|
||||||
|
.notNull()
|
||||||
|
.references(() => idp.idpId, { onDelete: "cascade" }),
|
||||||
|
orgId: varchar("orgId")
|
||||||
|
.notNull()
|
||||||
|
.references(() => orgs.orgId, { onDelete: "cascade" }),
|
||||||
|
roleMapping: varchar("roleMapping"),
|
||||||
|
orgMapping: varchar("orgMapping")
|
||||||
|
});
|
||||||
|
|
||||||
|
export type Org = InferSelectModel<typeof orgs>;
|
||||||
|
export type User = InferSelectModel<typeof users>;
|
||||||
|
export type Site = InferSelectModel<typeof sites>;
|
||||||
|
export type Resource = InferSelectModel<typeof resources>;
|
||||||
|
export type ExitNode = InferSelectModel<typeof exitNodes>;
|
||||||
|
export type Target = InferSelectModel<typeof targets>;
|
||||||
|
export type Session = InferSelectModel<typeof sessions>;
|
||||||
|
export type Newt = InferSelectModel<typeof newts>;
|
||||||
|
export type NewtSession = InferSelectModel<typeof newtSessions>;
|
||||||
|
export type EmailVerificationCode = InferSelectModel<
|
||||||
|
typeof emailVerificationCodes
|
||||||
|
>;
|
||||||
|
export type TwoFactorBackupCode = InferSelectModel<typeof twoFactorBackupCodes>;
|
||||||
|
export type PasswordResetToken = InferSelectModel<typeof passwordResetTokens>;
|
||||||
|
export type Role = InferSelectModel<typeof roles>;
|
||||||
|
export type Action = InferSelectModel<typeof actions>;
|
||||||
|
export type RoleAction = InferSelectModel<typeof roleActions>;
|
||||||
|
export type UserAction = InferSelectModel<typeof userActions>;
|
||||||
|
export type RoleSite = InferSelectModel<typeof roleSites>;
|
||||||
|
export type UserSite = InferSelectModel<typeof userSites>;
|
||||||
|
export type RoleResource = InferSelectModel<typeof roleResources>;
|
||||||
|
export type UserResource = InferSelectModel<typeof userResources>;
|
||||||
|
export type Limit = InferSelectModel<typeof limitsTable>;
|
||||||
|
export type UserInvite = InferSelectModel<typeof userInvites>;
|
||||||
|
export type UserOrg = InferSelectModel<typeof userOrgs>;
|
||||||
|
export type ResourceSession = InferSelectModel<typeof resourceSessions>;
|
||||||
|
export type ResourcePincode = InferSelectModel<typeof resourcePincode>;
|
||||||
|
export type ResourcePassword = InferSelectModel<typeof resourcePassword>;
|
||||||
|
export type ResourceOtp = InferSelectModel<typeof resourceOtp>;
|
||||||
|
export type ResourceAccessToken = InferSelectModel<typeof resourceAccessToken>;
|
||||||
|
export type ResourceWhitelist = InferSelectModel<typeof resourceWhitelist>;
|
||||||
|
export type VersionMigration = InferSelectModel<typeof versionMigrations>;
|
||||||
|
export type ResourceRule = InferSelectModel<typeof resourceRules>;
|
||||||
|
export type Domain = InferSelectModel<typeof domains>;
|
||||||
|
export type SupporterKey = InferSelectModel<typeof supporterKey>;
|
||||||
|
export type Idp = InferSelectModel<typeof idp>;
|
||||||
|
export type ApiKey = InferSelectModel<typeof apiKeys>;
|
||||||
|
export type ApiKeyAction = InferSelectModel<typeof apiKeyActions>;
|
||||||
|
export type ApiKeyOrg = InferSelectModel<typeof apiKeyOrg>;
|
||||||