Compare commits
875 commits
empty-mach
...
master
Author | SHA1 | Date | |
---|---|---|---|
|
05b54687b6 | ||
|
c4473839c4 | ||
|
d2c4bc55fc | ||
|
2abc078e53 | ||
|
ceb4479ec4 | ||
|
845d4542bb | ||
|
f4ed7b3520 | ||
|
60431804d8 | ||
|
0f942a95f1 | ||
|
97e6588a45 | ||
|
725cae1fa8 | ||
|
c64332d30a | ||
|
718d1c54b2 | ||
|
b48b728317 | ||
|
fb393f1c57 | ||
|
630cbf0c70 | ||
|
95f27677e4 | ||
|
c6e40191dd | ||
|
0746e0c091 | ||
|
2291a232cb | ||
|
0e8a1c681b | ||
|
990dd5e08e | ||
|
2682f801df | ||
|
912c4bca70 | ||
|
26bcd0912a | ||
|
63bd31b471 | ||
|
be97466809 | ||
|
df13f43156 | ||
|
368d22ec30 | ||
|
f6bb8412c5 | ||
|
2e1ddec107 | ||
|
52f86c2d10 | ||
|
7779c7ff0c | ||
|
75a50c0c9d | ||
|
d9f2a22ee5 | ||
|
c76325b91b | ||
|
dd71f0a866 | ||
|
b63e64ee9f | ||
|
6de62a1468 | ||
|
b411782648 | ||
|
2f49088163 | ||
|
fd2bb8927c | ||
|
e9b0f3c54e | ||
|
a6b0e58380 | ||
|
caca4032d1 | ||
|
7dd86e2b95 | ||
|
06bebdeac7 | ||
|
742f5e8cda | ||
|
6c042f18f0 | ||
|
2a7e8383c8 | ||
|
b1c09f7512 | ||
|
bd785ede15 | ||
|
1a56a0e0b9 | ||
|
49e0735b53 | ||
|
6daaab1789 | ||
|
e8ff13bc17 | ||
|
a928b4d001 | ||
|
44ec3b9e01 | ||
|
6c5e8afde9 | ||
|
1eab943ec2 | ||
|
8108e4156d | ||
|
5731491b4e | ||
|
98560d0cf5 | ||
|
e611d01c90 | ||
|
5356ccc6cd | ||
|
d8877a71fc | ||
|
e7ecea764e | ||
|
41b43733b0 | ||
|
8e9e091656 | ||
|
a23fe06d68 | ||
|
4bf640c6e8 | ||
|
e34af358d7 | ||
|
0df8f54fbb | ||
|
8da490f593 | ||
|
3e3df5e4c6 | ||
|
f3ea88f64c | ||
|
e976614645 | ||
|
717fc97ca0 | ||
|
97c441dab6 | ||
|
8de8bf0e06 | ||
|
2bbf0b4762 | ||
|
45571cea08 | ||
|
d34fb7e8a8 | ||
|
4561eb787b | ||
|
a6a4d460d7 | ||
|
eada3739e6 | ||
|
bdecf38616 | ||
|
5c83695177 | ||
|
2853410576 | ||
|
58a1d7164f | ||
|
332af5dd8d | ||
|
fa56d35a48 | ||
|
df159b0167 | ||
|
af1df0696b | ||
|
3208a40ef3 | ||
|
4e724f6c0a | ||
|
fdc525164a | ||
|
81acad0d66 | ||
|
5ff8a03195 | ||
|
4160bb8102 | ||
|
f5fbe4a200 | ||
|
45c669fb65 | ||
|
825c08aa9d | ||
|
af14f1085f | ||
|
e6f5d157b8 | ||
|
785fce4dc7 | ||
|
17db4cb970 | ||
|
4192af30d5 | ||
|
3921c3f480 | ||
|
6507e8f4cd | ||
|
66544baa7f | ||
|
311dfdee1f | ||
|
91b0fce955 | ||
|
532e97e00f | ||
|
d7116a4a6f | ||
|
2fb6f209aa | ||
|
3f9e8e81e6 | ||
|
8c75efdb2a | ||
|
f75cdeb239 | ||
|
4b8e6cd780 | ||
|
84606eb207 | ||
|
dc698ecea8 | ||
|
455acf7c90 | ||
|
19d36c0fb2 | ||
|
ce32fc019e | ||
|
6ffb68322f | ||
|
fa8d5b6992 | ||
|
5d0d5ac9c9 | ||
|
d760b401e6 | ||
|
4df4e5b3bf | ||
|
70e8377c0d | ||
|
685cda545b | ||
|
f156f178cd | ||
|
a52f1b75ff | ||
|
421ef3bf9c | ||
|
08794c5b6d | ||
|
a65223aa5b | ||
|
24b5e8f100 | ||
|
c6e4762f28 | ||
|
6acbcb0a33 | ||
|
e452dc80bd | ||
|
fd309134a2 | ||
|
48f011dc1c | ||
|
75d8ad9798 | ||
|
03bb194d2c | ||
|
6ca053ca67 | ||
|
1e0bcedef5 | ||
|
733f5e165b | ||
|
0ef5f20aa7 | ||
|
fca8883cd9 | ||
|
896dfefcdf | ||
|
6960419a2e | ||
|
adba4e2a2f | ||
|
aa4f02c798 | ||
|
260f5a7992 | ||
|
0f722916b8 | ||
|
a59ae61441 | ||
|
437a97510a | ||
|
f306d59016 | ||
|
58f91dc951 | ||
|
bd47dac6a3 | ||
|
5d5a1117e1 | ||
|
ecd1a8bfed | ||
|
f99f003a50 | ||
|
5622ac8338 | ||
|
da746f77d5 | ||
|
1c03fbe99e | ||
|
a504113186 | ||
|
2a2b09b52a | ||
|
ca784b147b | ||
|
b6f272d09a | ||
|
a62e28fdfb | ||
|
bc9bfa81b2 | ||
|
162768bdec | ||
|
2212c2f847 | ||
|
33e3fdabe4 | ||
|
e1932ff01e | ||
|
052accd6bb | ||
|
240f057f95 | ||
|
6e34d609b7 | ||
|
fd22bb5ec2 | ||
|
1530d93fc1 | ||
|
e0e9e3ef16 | ||
|
44eb4d4a94 | ||
|
822fcdacbb | ||
|
08694adf1b | ||
|
c2c173ac7e | ||
|
a79fcaf378 | ||
|
bc3a179af9 | ||
|
9b07e1f7ce | ||
|
a851e14c88 | ||
|
a941576acc | ||
|
89f704ef18 | ||
|
67cdf91f94 | ||
|
51f70e47e3 | ||
|
12d9fba4b3 | ||
|
6eabb461ce | ||
|
b1c9717e21 | ||
|
4a4b309790 | ||
|
acd2a14dc9 | ||
|
c10aad79d9 | ||
|
1b4ec66148 | ||
|
04f3dc09f9 | ||
|
c707b72b03 | ||
|
84cbff16d4 | ||
|
b1f85693c2 | ||
|
518c7f178a | ||
|
4acb4f8df3 | ||
|
3e86f52250 | ||
|
8cca4346a5 | ||
|
90d3a21853 | ||
|
1ab4487b65 | ||
|
486f96e7ac | ||
|
8bb7da3994 | ||
|
0f3ae64062 | ||
|
0c4093dcca | ||
|
23968e472d | ||
|
a5ab73d458 | ||
|
f8755be9cd | ||
|
d1bfaddb69 | ||
|
bfc92ca1c5 | ||
|
ed3d501081 | ||
|
7e5ab344a2 | ||
|
7c5cbef51a | ||
|
6b0bdc5eeb | ||
|
1aa4fc5949 | ||
|
380cbf70a9 | ||
|
05c1825622 | ||
|
6a61b919e7 | ||
|
15542b78fb | ||
|
b164373997 | ||
|
ffcab0b2bc | ||
|
32e9eb4be4 | ||
|
76d4bc7788 | ||
|
ec199162dc | ||
|
1dcf9d1ae1 | ||
|
7ffa0cc787 | ||
|
ec53c672dc | ||
|
92f923cfa8 | ||
|
947b247a40 | ||
|
d7ef51e6ba | ||
|
a51bce8f8d | ||
|
47eb2e240d | ||
|
ddd6ee8e42 | ||
|
5cd4406f5e | ||
|
4934fce769 | ||
|
272cf543b3 | ||
|
d2d788c5dc | ||
|
a4dc5053d2 | ||
|
19de3a8a77 | ||
|
e7ad3d88ae | ||
|
3cd4847093 | ||
|
b2a6eb92bf | ||
|
f0cda0406b | ||
|
ff7acd3347 | ||
|
a6b55f2b5e | ||
|
a254b436c7 | ||
|
3b1563a538 | ||
|
0ecb6eefee | ||
|
6e228f3f3f | ||
|
28238cb01f | ||
|
0dd22e8b93 | ||
|
9ae8bd79c5 | ||
|
6b5da29e3d | ||
|
6c20d38c41 | ||
|
338141f067 | ||
|
9235f55c47 | ||
|
61d4ccbfdd | ||
|
89028f17cf | ||
|
3253b16f0f | ||
|
5618ba9f46 | ||
|
d39131d154 | ||
|
8b5ad6990d | ||
|
6dadfcb2ef | ||
|
7a4796d655 | ||
|
cba6de024f | ||
|
bfda483c0a | ||
|
3cb9dbdb21 | ||
|
b8e6bd8c9a | ||
|
95ed308207 | ||
|
0d1c4c6070 | ||
|
8f6659a2ec | ||
|
9dba6db676 | ||
|
37c0c067a8 | ||
|
e4dcdd2572 | ||
|
ac01faf483 | ||
|
4c08e1e68c | ||
|
d5b6f2974b | ||
|
64deeab1ec | ||
|
b2212f4225 | ||
|
ce276d3838 | ||
|
43ef32aa8d | ||
|
0040569fa9 | ||
|
6b9e065764 | ||
|
d45bec4047 | ||
|
702da0f59a | ||
|
f02f34d64c | ||
|
fd94e2c056 | ||
|
aff80a2863 | ||
|
22146eb3e4 | ||
|
b562103024 | ||
|
25868f27de | ||
|
0c9943b740 | ||
|
32f196a774 | ||
|
c588be0842 | ||
|
f2154e362b | ||
|
2aa55e9444 | ||
|
e36df40ba7 | ||
|
86d9384954 | ||
|
b4d9223625 | ||
|
1ec52431b6 | ||
|
e8e2ade8f0 | ||
|
6a6501691a | ||
|
caaed7c515 | ||
|
afeb541eac | ||
|
93c22f29cf | ||
|
0f319b31fd | ||
|
d6361d0a40 | ||
|
cd9d8f309d | ||
|
0334a9afe8 | ||
|
31c5727a90 | ||
|
644c767019 | ||
|
6ba682a32f | ||
|
1d5baa657f | ||
|
2cb7b0bee6 | ||
|
a18df9c3bb | ||
|
ffadd42779 | ||
|
55247cd46a | ||
|
643445b7cf | ||
|
9dfc66ef04 | ||
|
ae53c0f1cc | ||
|
718721b341 | ||
|
5cb7013575 | ||
|
a01ce18b98 | ||
|
1a6f12c88e | ||
|
5e7c0e0f49 | ||
|
867245aefb | ||
|
389ea4293f | ||
|
77d58652a3 | ||
|
4bc225f26b | ||
|
fc78845a97 | ||
|
395cace69f | ||
|
7106d396dc | ||
|
f12ff3dfed | ||
|
b52b4252c1 | ||
|
202112bcae | ||
|
46fff0b544 | ||
|
b6b6fd026b | ||
|
9ac5aeda79 | ||
|
3c16139c44 | ||
|
bb16552aca | ||
|
e73ceafdba | ||
|
9af546bd0a | ||
|
11b7b1bc88 | ||
|
6c8ddbccac | ||
|
f9ca14f010 | ||
|
1295de928a | ||
|
01d7c1a5c2 | ||
|
c10bca93df | ||
|
2fa826318e | ||
|
59afb285f3 | ||
|
9967d60987 | ||
|
486b56d1ed | ||
|
8bcb4c2436 | ||
|
73f71a0aa3 | ||
|
17cd792826 | ||
|
bd41f855cf | ||
|
e61d5a3034 | ||
|
ebe25d7653 | ||
|
893394ef5f | ||
|
e404e0b608 | ||
|
956703c31a | ||
|
85839b0199 | ||
|
6e18c652cb | ||
|
a910b7beca | ||
|
d26e17f505 | ||
|
aeca8f40c2 | ||
|
4137482f65 | ||
|
98c6038fde | ||
|
507da49b5a | ||
|
9beb5388cb | ||
|
d4c0643122 | ||
|
e42841cd00 | ||
|
62caffb102 | ||
|
fddf597040 | ||
|
8bfeb7d90d | ||
|
40e6b205bc | ||
|
da6106bd23 | ||
|
4c4b545e9b | ||
|
f7409d47be | ||
|
062f71fb92 | ||
|
89c3c18c19 | ||
|
c3c2608947 | ||
|
2c8769adf6 | ||
|
381220daf4 | ||
|
b9a3acb03f | ||
|
76429f033a | ||
|
cf747d65e0 | ||
|
25bb23d8b7 | ||
|
6096cb3c9b | ||
|
4e2c9c185b | ||
|
8da9d5eefd | ||
|
5b3200173e | ||
|
edd062522d | ||
|
3cc6b2c0d0 | ||
|
9ccdddaab1 | ||
|
0191faf3a8 | ||
|
2a8e97d558 | ||
|
7673a20467 | ||
|
18764eff0e | ||
|
e3cb4ab2c4 | ||
|
26e2bbd709 | ||
|
3b1f412e44 | ||
|
a4eee41fd7 | ||
|
228e4f9acc | ||
|
d757cf8e84 | ||
|
396dcf8e6e | ||
|
12c32d507c | ||
|
f6544962ea | ||
|
084186c67a | ||
|
92a9d6c321 | ||
|
3c1b957050 | ||
|
4fbc3402fb | ||
|
6720d89845 | ||
|
f6924f8c57 | ||
|
9167bd107d | ||
|
b2d3520519 | ||
|
364b833d67 | ||
|
0416a41d58 | ||
|
1f9f81da70 | ||
|
025f14f879 | ||
|
e5fe74ce77 | ||
|
e1400d28f1 | ||
|
534328ca30 | ||
|
eddb994c0b | ||
|
6e3ca35941 | ||
|
0ea1508ff9 | ||
|
fcf44f7b31 | ||
|
412b4c4b0b | ||
|
0ddd42c01f | ||
|
77f2968267 | ||
|
8aca0ea860 | ||
|
abbc130844 | ||
|
424215f228 | ||
|
e1f5ed41df | ||
|
5ac33aab03 | ||
|
4ae41a363d | ||
|
71b7a594bd | ||
|
2701454f23 | ||
|
e1f4a71357 | ||
|
a753ea6981 | ||
|
8f71edaadd | ||
|
4ff8f498ce | ||
|
6bb20fa951 | ||
|
88587822c1 | ||
|
c0e6c1ac78 | ||
|
d286b044e7 | ||
|
0d1adfc7db | ||
|
3041023ed8 | ||
|
66dfded0cf | ||
|
6b744884b0 | ||
|
774a8cfc00 | ||
|
0c5d233563 | ||
|
9a5a937695 | ||
|
7fa469d0b0 | ||
|
1e018bdaf8 | ||
|
0279e549bd | ||
|
3132aa54b7 | ||
|
38ab6be7c2 | ||
|
3fa555fb25 | ||
|
ea6401ce09 | ||
|
61bea26486 | ||
|
772d5b5c32 | ||
|
1095f6c875 | ||
|
169b844212 | ||
|
f39fbf07fa | ||
|
d769fff1e8 | ||
|
68d4bdc1bd | ||
|
bbfb7d1cfa | ||
|
3884c5f47d | ||
|
80de87ac34 | ||
|
a3e5f0a3a0 | ||
|
91eb39cff6 | ||
|
dc38e5ac00 | ||
|
a74e424d53 | ||
|
d87f088b8f | ||
|
86971da274 | ||
|
618be9ff68 | ||
|
c77fe16943 | ||
|
94c7efdb5b | ||
|
b1f2063a9a | ||
|
855f9e6f8d | ||
|
e61a464951 | ||
|
b451d190b7 | ||
|
9c90144867 | ||
|
6aaf3cd50b | ||
|
0a114ca7d1 | ||
|
e161507d08 | ||
|
9faa49c7e8 | ||
|
d95b7afe61 | ||
|
9d5aaf5ea2 | ||
|
5b0fe4b7f1 | ||
|
e71d146a2d | ||
|
137e7408fd | ||
|
e63a3ab92b | ||
|
c7014dba8f | ||
|
0a748d324e | ||
|
16a3be49e2 | ||
|
e27a0a0e14 | ||
|
b2c2c5ac59 | ||
|
a19748ae35 | ||
|
85ab9c68a2 | ||
|
f6d6c5bb2b | ||
|
ef4e61e05e | ||
|
54fc5e48dc | ||
|
7f04d12333 | ||
|
76c84c69c4 | ||
|
6888fa2133 | ||
|
dd76c07293 | ||
|
d7119b73ab | ||
|
a6bb2cf5e1 | ||
|
83db873f5b | ||
|
cdd2401ff6 | ||
|
83b6c2cfef | ||
|
3962daa3bd | ||
|
9ae99964e6 | ||
|
37c8d4419a | ||
|
301782ae18 | ||
|
ff17a961fc | ||
|
60b3f63851 | ||
|
39a4a256fd | ||
|
01ea78c10e | ||
|
40b5bcb918 | ||
|
75d8b821ff | ||
|
8acce4637a | ||
|
addf60b3ee | ||
|
20a1bc7d44 | ||
|
8e74575c03 | ||
|
be18fea136 | ||
|
76ea3a063f | ||
|
90c38db9f2 | ||
|
b7d1e2c483 | ||
|
8fce946850 | ||
|
19a01d20dd | ||
|
65fa2bf8c3 | ||
|
12a4a5fb14 | ||
|
83c3818504 | ||
|
5de500d6da | ||
|
5ff6bfba9c | ||
|
06ddd01c70 | ||
|
5aca11af70 | ||
|
ecb32d74c6 | ||
|
f280505eaa | ||
|
812b87ab48 | ||
|
0f5560b62a | ||
|
6c15da4ece | ||
|
ebd709a4fe | ||
|
2820f9b986 | ||
|
9f911318f3 | ||
|
1d7d377f8b | ||
|
a0b264047c | ||
|
987f119c4b | ||
|
b6be18ca65 | ||
|
7e871d2278 | ||
|
8f130196f8 | ||
|
628af6e2d0 | ||
|
8024693f4f | ||
|
e927717fa0 | ||
|
3bf95e1a83 | ||
|
e37d09e5b4 | ||
|
3fb3decf49 | ||
|
b0f370bae2 | ||
|
6193047c35 | ||
|
02be5f3618 | ||
|
47cc60bda9 | ||
|
ce60c7b056 | ||
|
d369656b26 | ||
|
e5833699c0 | ||
|
83ed8a61aa | ||
|
4bffc0df21 | ||
|
1e4441b6ae | ||
|
7bb74b9664 | ||
|
4f29ce2ee7 | ||
|
0c35d9d43c | ||
|
4f25738d6b | ||
|
47dbfa770d | ||
|
91b0f8fee1 | ||
|
2e91a82aa7 | ||
|
f25fdecc3f | ||
|
b603bdfccc | ||
|
3d8c891699 | ||
|
ecb5562b57 | ||
|
2142f7bb5c | ||
|
51800132cd | ||
|
73663ff9e7 | ||
|
942aed1219 | ||
|
157589d31e | ||
|
ba4396e52c | ||
|
6fb962a941 | ||
|
cd4dabde0e | ||
|
c4deaf0994 | ||
|
ca12432a2a | ||
|
d986ae0ee5 | ||
|
943bb58086 | ||
|
c49c1cbf2a | ||
|
7284c0a47a | ||
|
a84e4b6b15 | ||
|
822e441d3a | ||
|
185f9ad541 | ||
|
dfc4126384 | ||
|
033082a31e | ||
|
2d81e751a1 | ||
|
899f3e7eb8 | ||
|
fd1c38811e | ||
|
59f6610721 | ||
|
72c1753fb7 | ||
|
401739b036 | ||
|
e4463c412b | ||
|
38b37db55b | ||
|
6efc2688b1 | ||
|
c022eb1b86 | ||
|
ef3a130d54 | ||
|
7d6523db29 | ||
|
5d2c99bb17 | ||
|
6b71da6b78 | ||
|
ff88faf402 | ||
|
a32aa96752 | ||
|
f68bc113a7 | ||
|
579cecde04 | ||
|
fe23da6e0c | ||
|
e4ff26d613 | ||
|
409721414b | ||
|
6c19beb937 | ||
|
11965f08db | ||
|
10ee07cea0 | ||
|
cc228f1868 | ||
|
fdda940ac0 | ||
|
9131d9d568 | ||
|
311cda31fe | ||
|
3d72ca731a | ||
|
fd3e668fe1 | ||
|
fa0e590778 | ||
|
7c3dbffcc6 | ||
|
4a6a9c4355 | ||
|
f2528f3e29 | ||
|
d15014f82e | ||
|
60f1228030 | ||
|
104f5d1fe6 | ||
|
66543493b5 | ||
|
aa974d26c6 | ||
|
fde9640364 | ||
|
c5079ac15e | ||
|
2067ab0427 | ||
|
5bdd3bbfcb | ||
|
3288fad341 | ||
|
8b82939d33 | ||
|
4ac01ed880 | ||
|
99513f64fd | ||
|
3beb84bcfe | ||
|
b0889d7751 | ||
|
502a3cf841 | ||
|
523343b174 | ||
|
3b4da7e637 | ||
|
895691dad1 | ||
|
59fc403e32 | ||
|
f860a037b5 | ||
|
8aca00326d | ||
|
668627f890 | ||
|
344b1dc559 | ||
|
df88f4e1e9 | ||
|
67e464281f | ||
|
23ffa1e04f | ||
|
02d2eab18c | ||
|
22479a289d | ||
|
2088bb1f91 | ||
|
b7c4bfd4e3 | ||
|
e545933923 | ||
|
7b4f300eb2 | ||
|
ac6e0c1b89 | ||
|
c1334b9a8b | ||
|
03c9216026 | ||
|
bb2f0e938f | ||
|
ae6bf39495 | ||
|
24b540ecde | ||
|
487bf4e74a | ||
|
7866729d3b | ||
|
6b0097a24b | ||
|
04a8fb7f81 | ||
|
33383faf9e | ||
|
2b7e3ff1e7 | ||
|
0ecb6dcd4d | ||
|
ec0d2a5ed2 | ||
|
a96b3e077d | ||
|
2b7f6b2b84 | ||
|
8fecc2c00b | ||
|
708fa8280a | ||
|
7144dca68a | ||
|
7359586f1c | ||
|
4b3c9c2806 | ||
|
7674f907c4 | ||
|
f78270188f | ||
|
1d9f861f28 | ||
|
daae241ff9 | ||
|
a50a3362bd | ||
|
ec8a1ecec1 | ||
|
74659a82ab | ||
|
ddd75eae9a | ||
|
b95a67751e | ||
|
83841d801c | ||
|
65c0b9ebcf | ||
|
3ba67bad3d | ||
|
9b3be5c2e8 | ||
|
b203b3f444 | ||
|
af30ba0e3b | ||
|
c920a301e0 | ||
|
44525e235a | ||
|
6120571421 | ||
|
edced6818a | ||
|
6798dd7ba5 | ||
|
bfbe180101 | ||
|
52447f6999 | ||
|
568eb1d4e0 | ||
|
d4a7288826 | ||
|
21e5b0d6d0 | ||
|
0708073a0c | ||
|
1ba6c67ff2 | ||
|
1f06f242cc | ||
|
9b3ff82542 | ||
|
87239c62c1 | ||
|
03b5184837 | ||
|
52fbda1a5e | ||
|
110272484d | ||
|
7d97729eea | ||
|
b06167a3fa | ||
|
f3317f78d5 | ||
|
b2130b1593 | ||
|
9d199fd4a9 | ||
|
414282a2c9 | ||
|
faf3670e7f | ||
|
e674537d0b | ||
|
c4652d60a7 | ||
|
ea40ffd655 | ||
|
7d0f89df29 | ||
|
21255b6391 | ||
|
64e0832b85 | ||
|
bacea50485 | ||
|
1f5224b74b | ||
|
bd2757c63d | ||
|
e46ca38cbb | ||
|
27194a9f9c | ||
|
1aac5d78d9 | ||
|
6b18674960 | ||
|
eea07b7a1a | ||
|
15a9e16530 | ||
|
bd3722f075 | ||
|
fe5f9bfc28 | ||
|
dacf6ebc64 | ||
|
c742242094 | ||
|
1002affc16 | ||
|
16b1ab06a9 | ||
|
866c200c31 | ||
|
1fc29d094f | ||
|
39f57f1487 | ||
|
804b6f4c5d | ||
|
e13867f7c9 | ||
|
e1954adc32 | ||
|
0eea20fa7c | ||
|
3adb90e7b7 | ||
|
496dacb7ff | ||
|
865ff5c88d | ||
|
85e5a7e8ed | ||
|
24b1a99c42 | ||
|
8decbe7670 | ||
|
5c8e2a8510 | ||
|
bd91ddaf52 | ||
|
7b8cd63b04 | ||
|
fa35e8c0ba | ||
|
9fac14488a | ||
|
39da36361c | ||
|
73f336363a | ||
|
37ada58f3a | ||
|
607b7d4b6f | ||
|
920f3d2a7d | ||
|
5c1c941851 | ||
|
2c0c7225f5 | ||
|
0a372d83b9 | ||
|
5e80bf3043 | ||
|
9739de61b0 | ||
|
134ac2e68c | ||
|
e05515f79d | ||
|
e868adee2f | ||
|
5f62d738fc | ||
|
492e0dfeb1 | ||
|
b1cbf2e2e5 | ||
|
14dd6b9026 | ||
|
69dd8d2892 | ||
|
ca4cd6d559 | ||
|
02e0f3c095 | ||
|
863facaa33 | ||
|
15902dcba6 | ||
|
8e7e799304 | ||
|
dbdc7529b3 | ||
|
4be884824f | ||
|
520e40db37 | ||
|
46f20193f9 | ||
|
33ff938504 | ||
|
7fafb483ad | ||
|
342e7f5272 | ||
|
a4f4eabf0a | ||
|
628d7be1d8 | ||
|
3c6834fc18 | ||
|
3d6f015211 | ||
|
a6ed08b239 | ||
|
0a39066f9d | ||
|
a1d5a02646 | ||
|
b91fcb8e9b | ||
|
d71279f023 | ||
|
c78c833400 | ||
|
a2d91119d4 | ||
|
10585bfecc | ||
|
b572f64dc6 | ||
|
ff72a3c1c7 | ||
|
67841d54ee | ||
|
9c1b78395a | ||
|
581ddf78fc | ||
|
567e0ab7d1 | ||
|
1c0fe09576 | ||
|
bdda8691ff | ||
|
4b311684ab | ||
|
b7f1c5455f | ||
|
799cc82bb5 | ||
|
df7c51f34e | ||
|
88a4801d6a | ||
|
e88e9946f9 | ||
|
1fc9587919 | ||
|
29573682ec | ||
|
d199223be0 | ||
|
8ea9e83798 | ||
|
1a293a2a27 | ||
|
f1cfcfe7cc | ||
|
357899b83e | ||
|
a7c7ea5712 | ||
|
0483b9c641 | ||
|
5009e9e483 | ||
|
1e1741aa45 | ||
|
fe09737d80 | ||
|
4b843d145a | ||
|
cdab206d05 | ||
|
9c1c4093a3 | ||
|
131ed1b0a7 | ||
|
bf3ea71630 | ||
|
e6a2a7386c | ||
|
7c7fe70cb2 | ||
|
18030e6c58 | ||
|
220bbe5862 | ||
|
6d6d82b3af | ||
|
98f2ac5e7c | ||
|
635e633520 | ||
|
39f7e38444 | ||
|
c2b298c93a | ||
|
b8547da4c3 | ||
|
fae1f96856 | ||
|
afbdcd520b | ||
|
fbcb2ed7fd | ||
|
0449ec1868 | ||
|
a49b023a28 | ||
|
f1dbe8c9dd | ||
|
5fcf47c79f | ||
|
8f111680bf | ||
|
64369b5c2b | ||
|
392708a804 | ||
|
ddfe95e45d | ||
|
2dcce2ae72 | ||
|
f22e4eb24e | ||
|
2e37d5ce97 |
1018 changed files with 92848 additions and 38163 deletions
6
.dockerignore
Normal file
6
.dockerignore
Normal file
|
@ -0,0 +1,6 @@
|
|||
# We include .git in the build context because excluding it would break the
|
||||
# "make release" target, which uses git to retrieve the build version and tag.
|
||||
#.git
|
||||
|
||||
/tests
|
||||
/crowdsec-v*
|
33
.github/ISSUE_TEMPLATE/bug_report.md
vendored
33
.github/ISSUE_TEMPLATE/bug_report.md
vendored
|
@ -1,33 +0,0 @@
|
|||
---
|
||||
name: Bug report
|
||||
about: Create a report to help us improve
|
||||
title: Bug/
|
||||
labels: bug
|
||||
assignees: ''
|
||||
|
||||
---
|
||||
|
||||
Please, start your issue name (after `Bug`) with the component name impacted by this feature request and a small description of the Bug. Example: `Bug/cscli: issue with ....` and remove this line :)
|
||||
|
||||
**Describe the bug**
|
||||
A clear and concise description of what the bug is.
|
||||
|
||||
**To Reproduce**
|
||||
Steps to reproduce the behavior:
|
||||
1. Go to '...'
|
||||
2. Click on '....'
|
||||
3. Scroll down to '....'
|
||||
4. See error
|
||||
|
||||
**Expected behavior**
|
||||
A clear and concise description of what you expected to happen.
|
||||
|
||||
**Screenshots**
|
||||
If applicable, add screenshots to help explain your problem.
|
||||
|
||||
**Technical Information (please complete the following information):**
|
||||
- OS: [e.g. Ubuntu, Redhat ..]
|
||||
- Version [e.g. v0.2.0, v0.1.5 ..]
|
||||
|
||||
**Additional context**
|
||||
Add any other context about the problem here, for example `/var/log/crowdsec.log` or error messages.
|
136
.github/ISSUE_TEMPLATE/bug_report.yaml
vendored
Normal file
136
.github/ISSUE_TEMPLATE/bug_report.yaml
vendored
Normal file
|
@ -0,0 +1,136 @@
|
|||
name: Bug report
|
||||
description: Report a bug encountered while operating crowdsec
|
||||
labels: kind/bug
|
||||
body:
|
||||
- type: textarea
|
||||
id: problem
|
||||
attributes:
|
||||
label: What happened?
|
||||
description: |
|
||||
Please provide as much info as possible. Not doing so may result in your bug not being addressed in a timely manner.
|
||||
If this matter is security related, please disclose it privately to security@crowdsec.net
|
||||
validations:
|
||||
required: true
|
||||
|
||||
- type: textarea
|
||||
id: expected
|
||||
attributes:
|
||||
label: What did you expect to happen?
|
||||
validations:
|
||||
required: true
|
||||
|
||||
- type: textarea
|
||||
id: repro
|
||||
attributes:
|
||||
label: How can we reproduce it (as minimally and precisely as possible)?
|
||||
validations:
|
||||
required: true
|
||||
|
||||
- type: textarea
|
||||
id: additional
|
||||
attributes:
|
||||
label: Anything else we need to know?
|
||||
|
||||
- type: textarea
|
||||
id: Version
|
||||
attributes:
|
||||
label: Crowdsec version
|
||||
value: |
|
||||
<details>
|
||||
|
||||
```console
|
||||
$ cscli version
|
||||
# paste output here
|
||||
```
|
||||
|
||||
</details>
|
||||
validations:
|
||||
required: true
|
||||
|
||||
- type: textarea
|
||||
id: osVersion
|
||||
attributes:
|
||||
label: OS version
|
||||
value: |
|
||||
<details>
|
||||
|
||||
```console
|
||||
# On Linux:
|
||||
$ cat /etc/os-release
|
||||
# paste output here
|
||||
$ uname -a
|
||||
# paste output here
|
||||
|
||||
# On Windows:
|
||||
C:\> wmic os get Caption, Version, BuildNumber, OSArchitecture
|
||||
# paste output here
|
||||
```
|
||||
|
||||
</details>
|
||||
|
||||
- type: textarea
|
||||
id: collections
|
||||
attributes:
|
||||
label: Enabled collections and parsers
|
||||
value: |
|
||||
<details>
|
||||
|
||||
```console
|
||||
$ cscli hub list -o raw
|
||||
# paste output here
|
||||
```
|
||||
|
||||
</details>
|
||||
|
||||
- type: textarea
|
||||
id: acquis
|
||||
attributes:
|
||||
label: Acquisition config
|
||||
value: |
|
||||
<details>
|
||||
```console
|
||||
# On Linux:
|
||||
$ cat /etc/crowdsec/acquis.yaml /etc/crowdsec/acquis.d/*
|
||||
# paste output here
|
||||
|
||||
# On Windows:
|
||||
C:\> Get-Content C:\ProgramData\CrowdSec\config\acquis.yaml
|
||||
# paste output here
|
||||
</details>
|
||||
|
||||
- type: textarea
|
||||
id: config
|
||||
attributes:
|
||||
label: Config show
|
||||
value: |
|
||||
<details>
|
||||
|
||||
```console
|
||||
$ cscli config show
|
||||
# paste output here
|
||||
```
|
||||
|
||||
</details>
|
||||
|
||||
- type: textarea
|
||||
id: metrics
|
||||
attributes:
|
||||
label: Prometheus metrics
|
||||
value: |
|
||||
<details>
|
||||
|
||||
```console
|
||||
$ cscli metrics
|
||||
# paste output here
|
||||
```
|
||||
|
||||
</details>
|
||||
|
||||
- type: textarea
|
||||
id: customizations
|
||||
attributes:
|
||||
label: "Related custom configs versions (if applicable) : notification plugins, custom scenarios, parsers etc."
|
||||
value: |
|
||||
<details>
|
||||
|
||||
</details>
|
4
.github/ISSUE_TEMPLATE/config.yml
vendored
Normal file
4
.github/ISSUE_TEMPLATE/config.yml
vendored
Normal file
|
@ -0,0 +1,4 @@
|
|||
contact_links:
|
||||
- name: Support Request
|
||||
url: https://discourse.crowdsec.net
|
||||
about: Support request or question relating to Crowdsec
|
25
.github/ISSUE_TEMPLATE/feature_request.md
vendored
25
.github/ISSUE_TEMPLATE/feature_request.md
vendored
|
@ -1,25 +0,0 @@
|
|||
---
|
||||
name: Feature request
|
||||
about: Suggest an idea for this project
|
||||
title: Improvement/
|
||||
labels: enhancement
|
||||
assignees: ''
|
||||
|
||||
---
|
||||
|
||||
Please, start your issue name (after `improvement`) with the component name impacted by this feature request and a small description of the FR. Example: `Improvement/cscli: add this feature ....` and remove this line :)
|
||||
|
||||
**Is your feature request related to a problem? Please describe.**
|
||||
A clear and concise description of what the problem is. Ex. I'm always frustrated when [...]
|
||||
|
||||
**Describe the solution you'd like**
|
||||
A clear and concise description of what you want to happen.
|
||||
|
||||
**Describe alternatives you've considered** (Optional)
|
||||
A clear and concise description of any alternative solutions or features you've considered.
|
||||
|
||||
**Example of what you imagine**
|
||||
If applicable, add an example of what you would expect from this feature request.
|
||||
|
||||
**Additional context**
|
||||
Add any other context or screenshots about the feature request here.
|
27
.github/ISSUE_TEMPLATE/feature_request.yaml
vendored
Normal file
27
.github/ISSUE_TEMPLATE/feature_request.yaml
vendored
Normal file
|
@ -0,0 +1,27 @@
|
|||
name: Feature request
|
||||
description: Suggest an improvement or a new feature
|
||||
body:
|
||||
- type: textarea
|
||||
id: feature
|
||||
attributes:
|
||||
label: What would you like to be added?
|
||||
description: |
|
||||
Significant feature requests are unlikely to make progress as issues. Please consider engaging on discord (discord.gg/crowdsec) and forums (https://discourse.crowdsec.net), instead.
|
||||
value: |
|
||||
For feature request please pick a kind label by removing `<!-- -->` that wrap the example lines below
|
||||
|
||||
|
||||
<!-- /kind feature -->
|
||||
<!-- Completely new feature not currently available -->
|
||||
|
||||
<!-- /kind enhancement -->
|
||||
<!-- Feature is available but this extends or adds extra functionality -->
|
||||
validations:
|
||||
required: true
|
||||
|
||||
- type: textarea
|
||||
id: rationale
|
||||
attributes:
|
||||
label: Why is this needed?
|
||||
validations:
|
||||
required: true
|
2
.github/buildkit.toml
vendored
Normal file
2
.github/buildkit.toml
vendored
Normal file
|
@ -0,0 +1,2 @@
|
|||
[worker.oci]
|
||||
# max-parallelism = 2
|
10
.github/codecov.yml
vendored
Normal file
10
.github/codecov.yml
vendored
Normal file
|
@ -0,0 +1,10 @@
|
|||
# we measure coverage but don't enforce it
|
||||
# https://docs.codecov.com/docs/codecov-yaml
|
||||
coverage:
|
||||
status:
|
||||
patch:
|
||||
default:
|
||||
target: 0%
|
||||
project:
|
||||
default:
|
||||
target: 0%
|
104
.github/governance.yml
vendored
Normal file
104
.github/governance.yml
vendored
Normal file
|
@ -0,0 +1,104 @@
|
|||
version: v1
|
||||
|
||||
issue:
|
||||
captures:
|
||||
- regex: 'version: v*(.+)-[rc*]?'
|
||||
github_release: true
|
||||
ignore_case: true
|
||||
label: 'version/$CAPTURED'
|
||||
|
||||
- regex: 'Platform: *(windows?|ms|wins?|microsoft).*'
|
||||
label: 'os/win'
|
||||
ignore_case: true
|
||||
|
||||
- regex: 'Platform: *(freebsd|bsd).*'
|
||||
label: 'os/freebsd'
|
||||
ignore_case: true
|
||||
|
||||
- regex: 'Platform: *(linux|linus|lin).*'
|
||||
label: 'os/linux'
|
||||
ignore_case: true
|
||||
|
||||
- regex: 'Platform: *(macos|mac|apple|macintosh|macbook).*'
|
||||
label: 'os/mac'
|
||||
ignore_case: true
|
||||
|
||||
labels:
|
||||
- prefix: triage
|
||||
list: ['accepted']
|
||||
multiple: false
|
||||
author_association:
|
||||
collaborator: true
|
||||
member: true
|
||||
owner: true
|
||||
needs:
|
||||
comment: |
|
||||
@$AUTHOR: Thanks for opening an issue, it is currently awaiting triage.
|
||||
|
||||
In the meantime, you can:
|
||||
|
||||
1. Check [Crowdsec Documentation](https://docs.crowdsec.net/) to see if your issue can be self resolved.
|
||||
2. You can also join our [Discord](https://discord.gg/crowdsec).
|
||||
3. Check [Releases](https://github.com/crowdsecurity/crowdsec/releases/latest) to make sure your agent is on the latest version.
|
||||
|
||||
- prefix: kind
|
||||
list: ['feature', 'bug', 'packaging', 'enhancement', 'refactoring']
|
||||
multiple: false
|
||||
author_association:
|
||||
author: true
|
||||
collaborator: true
|
||||
member: true
|
||||
owner: true
|
||||
needs:
|
||||
comment: |
|
||||
@$AUTHOR: There are no 'kind' label on this issue. You need a 'kind' label to start the triage process.
|
||||
* `/kind feature`
|
||||
* `/kind enhancement`
|
||||
* `/kind refactoring`
|
||||
* `/kind bug`
|
||||
* `/kind packaging`
|
||||
|
||||
- prefix: os
|
||||
list: ['mac', 'win', 'linux', 'freebsd']
|
||||
multiple: true
|
||||
|
||||
pull_request:
|
||||
labels:
|
||||
- prefix: kind
|
||||
multiple: false
|
||||
list: [ 'feature', 'enhancement', 'fix', 'chore', 'dependencies', 'refactoring']
|
||||
needs:
|
||||
comment: |
|
||||
@$AUTHOR: There are no 'kind' label on this PR. You need a 'kind' label to generate the release automatically.
|
||||
* `/kind feature`
|
||||
* `/kind enhancement`
|
||||
* `/kind refactoring`
|
||||
* `/kind fix`
|
||||
* `/kind chore`
|
||||
* `/kind dependencies`
|
||||
status:
|
||||
context: 'Kind Label'
|
||||
description:
|
||||
success: Ready for review & merge.
|
||||
failure: Missing kind label to generate release automatically.
|
||||
|
||||
- prefix: area
|
||||
list: [ "agent", "local-api", "cscli", "security", "configuration", "appsec"]
|
||||
multiple: true
|
||||
needs:
|
||||
comment: |
|
||||
@$AUTHOR: There are no area labels on this PR. You can add as many areas as you see fit.
|
||||
* `/area agent`
|
||||
* `/area local-api`
|
||||
* `/area cscli`
|
||||
* `/area appsec`
|
||||
* `/area security`
|
||||
* `/area configuration`
|
||||
|
||||
- prefix: priority
|
||||
multiple: false
|
||||
list: [ 'urgent', 'important' ]
|
||||
author_association:
|
||||
collaborator: true
|
||||
member: true
|
||||
owner: true
|
7
.github/release-drafter.yml
vendored
7
.github/release-drafter.yml
vendored
|
@ -2,12 +2,15 @@ categories:
|
|||
- title: 'New Features'
|
||||
labels:
|
||||
- 'new feature'
|
||||
- 'kind/feature'
|
||||
- title: 'Improvements'
|
||||
labels:
|
||||
- 'kind/enhancement'
|
||||
- 'enhancement'
|
||||
- 'improvement'
|
||||
- title: 'Bug Fixes'
|
||||
labels:
|
||||
- 'kind/fix'
|
||||
- 'fix'
|
||||
- 'bugfix'
|
||||
- 'bug'
|
||||
|
@ -15,6 +18,10 @@ categories:
|
|||
labels:
|
||||
- 'documentation'
|
||||
- 'doc'
|
||||
- title: 'Chore / Deps'
|
||||
labels:
|
||||
- 'kind/dependencies'
|
||||
- 'kind/chore'
|
||||
tag-template: "- $TITLE @$AUTHOR (#$NUMBER)"
|
||||
template: |
|
||||
## Changes
|
||||
|
|
1
.github/workflows/.yamllint
vendored
Symbolic link
1
.github/workflows/.yamllint
vendored
Symbolic link
|
@ -0,0 +1 @@
|
|||
../../.yamllint
|
47
.github/workflows/bats-hub.yml
vendored
47
.github/workflows/bats-hub.yml
vendored
|
@ -1,4 +1,4 @@
|
|||
name: Hub tests
|
||||
name: (sub) Bats / Hub
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
|
@ -10,9 +10,13 @@ on:
|
|||
|
||||
jobs:
|
||||
build:
|
||||
name: "Build + tests"
|
||||
strategy:
|
||||
matrix:
|
||||
test-file: ["hub-1.bats", "hub-2.bats", "hub-3.bats"]
|
||||
|
||||
name: "Functional tests"
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 20
|
||||
timeout-minutes: 30
|
||||
steps:
|
||||
|
||||
- name: "Force machineid"
|
||||
|
@ -20,36 +24,37 @@ jobs:
|
|||
sudo chmod +w /etc/machine-id
|
||||
echo githubciXXXXXXXXXXXXXXXXXXXXXXXX | sudo tee /etc/machine-id
|
||||
|
||||
- name: "Set up Go 1.17"
|
||||
uses: actions/setup-go@v3
|
||||
with:
|
||||
go-version: 1.17
|
||||
id: go
|
||||
|
||||
- name: "Clone CrowdSec"
|
||||
uses: actions/checkout@v3
|
||||
- name: "Check out CrowdSec repository"
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
submodules: true
|
||||
|
||||
- name: "Set up Go"
|
||||
uses: actions/setup-go@v5
|
||||
with:
|
||||
go-version: "1.22.2"
|
||||
|
||||
- name: "Install bats dependencies"
|
||||
env:
|
||||
GOBIN: /usr/local/bin
|
||||
run: |
|
||||
sudo apt install -y -qq build-essential daemonize jq netcat-openbsd
|
||||
GO111MODULE=on go get github.com/mikefarah/yq/v4
|
||||
sudo cp -u ~/go/bin/yq /usr/local/bin/
|
||||
sudo apt -qq -y -o=Dpkg::Use-Pty=0 install build-essential daemonize jq libre2-dev
|
||||
|
||||
- name: "Build crowdsec and fixture"
|
||||
run: make bats-clean bats-build bats-fixture
|
||||
run: make bats-clean bats-build bats-fixture BUILD_STATIC=1
|
||||
|
||||
- name: "Run hub tests"
|
||||
run: make bats-test-hub
|
||||
run: |
|
||||
./test/bin/generate-hub-tests
|
||||
./test/run-tests ./test/dyn-bats/${{ matrix.test-file }} --formatter $(pwd)/test/lib/color-formatter
|
||||
|
||||
- name: "Collect hub coverage"
|
||||
run: ./tests/collect-hub-coverage >> $GITHUB_ENV
|
||||
run: ./test/bin/collect-hub-coverage >> $GITHUB_ENV
|
||||
|
||||
- name: "Create Parsers badge"
|
||||
uses: schneegans/dynamic-badges-action@v1.1.0
|
||||
if: ${{ github.ref == 'refs/heads/master' }}
|
||||
uses: schneegans/dynamic-badges-action@v1.7.0
|
||||
if: ${{ github.ref == 'refs/heads/master' && github.repository_owner == 'crowdsecurity' }}
|
||||
with:
|
||||
auth: ${{ secrets.GIST_BADGES_SECRET }}
|
||||
gistID: ${{ secrets.GIST_BADGES_ID }}
|
||||
|
@ -59,8 +64,8 @@ jobs:
|
|||
color: ${{ env.SCENARIO_BADGE_COLOR }}
|
||||
|
||||
- name: "Create Scenarios badge"
|
||||
uses: schneegans/dynamic-badges-action@v1.1.0
|
||||
if: ${{ github.ref == 'refs/heads/master' }}
|
||||
uses: schneegans/dynamic-badges-action@v1.7.0
|
||||
if: ${{ github.ref == 'refs/heads/master' && github.repository_owner == 'crowdsecurity' }}
|
||||
with:
|
||||
auth: ${{ secrets.GIST_BADGES_SECRET }}
|
||||
gistID: ${{ secrets.GIST_BADGES_ID }}
|
||||
|
|
66
.github/workflows/bats-mysql.yml
vendored
66
.github/workflows/bats-mysql.yml
vendored
|
@ -1,4 +1,4 @@
|
|||
name: Functional tests with MySQL
|
||||
name: (sub) Bats / MySQL
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
|
@ -6,21 +6,17 @@ on:
|
|||
database_image:
|
||||
required: true
|
||||
type: string
|
||||
secrets:
|
||||
DATABASE_PASSWORD:
|
||||
required: true
|
||||
|
||||
jobs:
|
||||
|
||||
build:
|
||||
name: "Build + tests"
|
||||
name: "Functional tests"
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 20
|
||||
timeout-minutes: 30
|
||||
services:
|
||||
database:
|
||||
image: ${{ inputs.database_image }}
|
||||
env:
|
||||
MYSQL_ROOT_PASSWORD: ${{ secrets.DATABASE_PASSWORD }}
|
||||
MYSQL_ROOT_PASSWORD: "secret"
|
||||
ports:
|
||||
- 3306:3306
|
||||
|
||||
|
@ -31,48 +27,66 @@ jobs:
|
|||
sudo chmod +w /etc/machine-id
|
||||
echo githubciXXXXXXXXXXXXXXXXXXXXXXXX | sudo tee /etc/machine-id
|
||||
|
||||
- name: "Set up Go 1.17"
|
||||
uses: actions/setup-go@v3
|
||||
with:
|
||||
go-version: 1.17
|
||||
id: go
|
||||
|
||||
- name: "Clone CrowdSec"
|
||||
uses: actions/checkout@v3
|
||||
- name: "Check out CrowdSec repository"
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
submodules: true
|
||||
|
||||
- name: "Set up Go"
|
||||
uses: actions/setup-go@v5
|
||||
with:
|
||||
go-version: "1.22.2"
|
||||
|
||||
- name: "Install bats dependencies"
|
||||
env:
|
||||
GOBIN: /usr/local/bin
|
||||
run: |
|
||||
sudo apt install -y -qq build-essential daemonize jq netcat-openbsd
|
||||
GO111MODULE=on go get github.com/mikefarah/yq/v4
|
||||
sudo cp -u ~/go/bin/yq /usr/local/bin/
|
||||
sudo apt -qq -y -o=Dpkg::Use-Pty=0 install build-essential daemonize jq libre2-dev
|
||||
|
||||
- name: "Build crowdsec and fixture"
|
||||
run: make bats-clean bats-build bats-fixture
|
||||
run: |
|
||||
make clean bats-build bats-fixture BUILD_STATIC=1
|
||||
env:
|
||||
DB_BACKEND: mysql
|
||||
MYSQL_HOST: 127.0.0.1
|
||||
MYSQL_PORT: 3306
|
||||
MYSQL_PASSWORD: ${{ secrets.DATABASE_PASSWORD }}
|
||||
MYSQL_PASSWORD: "secret"
|
||||
MYSQL_USER: root
|
||||
|
||||
- name: "Run tests"
|
||||
run: make bats-test
|
||||
run: ./test/run-tests ./test/bats --formatter $(pwd)/test/lib/color-formatter
|
||||
env:
|
||||
DB_BACKEND: mysql
|
||||
MYSQL_HOST: 127.0.0.1
|
||||
MYSQL_PORT: 3306
|
||||
MYSQL_PASSWORD: ${{ secrets.DATABASE_PASSWORD }}
|
||||
MYSQL_PASSWORD: "secret"
|
||||
MYSQL_USER: root
|
||||
|
||||
#
|
||||
# In case you need to inspect the database status after the failure of a given test
|
||||
#
|
||||
# - name: "Run specified tests"
|
||||
# run: ./test/run-tests test/bats/<filename>.bats -f "<test name>"
|
||||
|
||||
- name: Show database dump
|
||||
run: ./test/instance-db dump /dev/fd/1
|
||||
env:
|
||||
DB_BACKEND: mysql
|
||||
MYSQL_HOST: 127.0.0.1
|
||||
MYSQL_PORT: 3306
|
||||
MYSQL_PASSWORD: "secret"
|
||||
MYSQL_USER: root
|
||||
if: ${{ always() }}
|
||||
|
||||
- name: "Show stack traces"
|
||||
run: for file in $(find /tmp/crowdsec-crash.*.txt); do echo ">>>>> $file"; cat $file; echo; done
|
||||
if: ${{ always() }}
|
||||
|
||||
- name: "Show crowdsec logs"
|
||||
run:
|
||||
for file in $(find ./tests/local/var/log -type f); do echo ">>>>> $file"; cat $file; echo; done
|
||||
run: for file in $(find ./test/local/var/log -type f); do echo ">>>>> $file"; cat $file; echo; done
|
||||
if: ${{ always() }}
|
||||
|
||||
- name: "Show database logs"
|
||||
run: docker logs "${{ job.services.database.id }}"
|
||||
if: ${{ always() }}
|
||||
|
||||
|
|
76
.github/workflows/bats-postgres.yml
vendored
76
.github/workflows/bats-postgres.yml
vendored
|
@ -1,22 +1,18 @@
|
|||
name: Functional tests with PostgreSQL
|
||||
name: (sub) Bats / Postgres
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
secrets:
|
||||
DATABASE_PASSWORD:
|
||||
required: true
|
||||
|
||||
jobs:
|
||||
|
||||
build:
|
||||
name: "Build + tests"
|
||||
name: "Functional tests"
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 20
|
||||
timeout-minutes: 30
|
||||
services:
|
||||
database:
|
||||
image: postgres:latest
|
||||
image: postgres:16
|
||||
env:
|
||||
POSTGRES_PASSWORD: ${{ secrets.DATABASE_PASSWORD }}
|
||||
POSTGRES_PASSWORD: "secret"
|
||||
ports:
|
||||
- 5432:5432
|
||||
options: >-
|
||||
|
@ -27,71 +23,63 @@ jobs:
|
|||
|
||||
steps:
|
||||
|
||||
- name: "Install pg_dump v16"
|
||||
# we can remove this when it's released on ubuntu-latest
|
||||
run: |
|
||||
sudo sh -c 'echo "deb http://apt.postgresql.org/pub/repos/apt $(lsb_release -cs)-pgdg main" > /etc/apt/sources.list.d/pgdg.list'
|
||||
wget -qO- https://www.postgresql.org/media/keys/ACCC4CF8.asc | sudo tee /etc/apt/trusted.gpg.d/pgdg.asc &>/dev/null
|
||||
sudo apt update
|
||||
sudo apt -qq -y -o=Dpkg::Use-Pty=0 install postgresql-client-16
|
||||
|
||||
- name: "Force machineid"
|
||||
run: |
|
||||
sudo chmod +w /etc/machine-id
|
||||
echo githubciXXXXXXXXXXXXXXXXXXXXXXXX | sudo tee /etc/machine-id
|
||||
|
||||
- name: "Set up Go 1.17"
|
||||
uses: actions/setup-go@v3
|
||||
with:
|
||||
go-version: 1.17
|
||||
id: go
|
||||
|
||||
- name: "Clone CrowdSec"
|
||||
uses: actions/checkout@v3
|
||||
- name: "Check out CrowdSec repository"
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
submodules: true
|
||||
|
||||
- name: "Set up Go"
|
||||
uses: actions/setup-go@v5
|
||||
with:
|
||||
go-version: "1.22.2"
|
||||
|
||||
- name: "Install bats dependencies"
|
||||
env:
|
||||
GOBIN: /usr/local/bin
|
||||
run: |
|
||||
sudo apt install -y -qq build-essential daemonize jq netcat-openbsd
|
||||
GO111MODULE=on go get github.com/mikefarah/yq/v4
|
||||
sudo cp -u ~/go/bin/yq /usr/local/bin/
|
||||
sudo apt -qq -y -o=Dpkg::Use-Pty=0 install build-essential daemonize jq libre2-dev
|
||||
|
||||
- name: "Build crowdsec and fixture (DB_BACKEND: pgx)"
|
||||
run: make clean bats-build bats-fixture
|
||||
run: |
|
||||
make clean bats-build bats-fixture BUILD_STATIC=1
|
||||
env:
|
||||
DB_BACKEND: pgx
|
||||
PGHOST: 127.0.0.1
|
||||
PGPORT: 5432
|
||||
PGPASSWORD: ${{ secrets.DATABASE_PASSWORD }}
|
||||
PGPASSWORD: "secret"
|
||||
PGUSER: postgres
|
||||
|
||||
- name: "Run tests (DB_BACKEND: pgx)"
|
||||
run: make bats-test
|
||||
run: ./test/run-tests ./test/bats --formatter $(pwd)/test/lib/color-formatter
|
||||
env:
|
||||
DB_BACKEND: pgx
|
||||
PGHOST: 127.0.0.1
|
||||
PGPORT: 5432
|
||||
PGPASSWORD: ${{ secrets.DATABASE_PASSWORD }}
|
||||
PGPASSWORD: "secret"
|
||||
PGUSER: postgres
|
||||
|
||||
- name: "Build crowdsec and fixture (DB_BACKEND: postgres)"
|
||||
run: make clean bats-build bats-fixture
|
||||
env:
|
||||
DB_BACKEND: postgres
|
||||
PGHOST: 127.0.0.1
|
||||
PGPORT: 5432
|
||||
PGPASSWORD: ${{ secrets.DATABASE_PASSWORD }}
|
||||
PGUSER: postgres
|
||||
|
||||
- name: "Run tests (DB_BACKEND: postgres)"
|
||||
run: make bats-test
|
||||
env:
|
||||
DB_BACKEND: postgres
|
||||
PGHOST: 127.0.0.1
|
||||
PGPORT: 5432
|
||||
PGPASSWORD: ${{ secrets.DATABASE_PASSWORD }}
|
||||
PGUSER: postgres
|
||||
- name: "Show stack traces"
|
||||
run: for file in $(find /tmp/crowdsec-crash.*.txt); do echo ">>>>> $file"; cat $file; echo; done
|
||||
if: ${{ always() }}
|
||||
|
||||
- name: "Show crowdsec logs"
|
||||
run:
|
||||
for file in $(find ./tests/local/var/log -type f); do echo ">>>>> $file"; cat $file; echo; done
|
||||
run: for file in $(find ./test/local/var/log -type f); do echo ">>>>> $file"; cat $file; echo; done
|
||||
if: ${{ always() }}
|
||||
|
||||
- name: "Show database logs"
|
||||
run: docker logs "${{ job.services.database.id }}"
|
||||
if: ${{ always() }}
|
||||
|
||||
|
|
81
.github/workflows/bats-sqlite-coverage.yml
vendored
81
.github/workflows/bats-sqlite-coverage.yml
vendored
|
@ -1,12 +1,14 @@
|
|||
name: Functional tests with sqlite
|
||||
name: (sub) Bats / sqlite + coverage
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
|
||||
jobs:
|
||||
env:
|
||||
TEST_COVERAGE: true
|
||||
|
||||
jobs:
|
||||
build:
|
||||
name: "Build + tests"
|
||||
name: "Functional tests"
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 20
|
||||
|
||||
|
@ -17,41 +19,66 @@ jobs:
|
|||
sudo chmod +w /etc/machine-id
|
||||
echo githubciXXXXXXXXXXXXXXXXXXXXXXXX | sudo tee /etc/machine-id
|
||||
|
||||
- name: "Set up Go 1.17"
|
||||
uses: actions/setup-go@v3
|
||||
with:
|
||||
go-version: 1.17
|
||||
id: go
|
||||
|
||||
- name: "Clone CrowdSec"
|
||||
uses: actions/checkout@v3
|
||||
- name: "Check out CrowdSec repository"
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
submodules: true
|
||||
|
||||
- name: "Set up Go"
|
||||
uses: actions/setup-go@v5
|
||||
with:
|
||||
go-version: "1.22.2"
|
||||
|
||||
- name: "Install bats dependencies"
|
||||
env:
|
||||
GOBIN: /usr/local/bin
|
||||
run: |
|
||||
sudo apt install -y -qq build-essential daemonize jq netcat-openbsd
|
||||
GO111MODULE=on go get github.com/mikefarah/yq/v4
|
||||
sudo cp -u ~/go/bin/yq /usr/local/bin/
|
||||
go install github.com/wadey/gocovmerge@latest
|
||||
sudo cp -u ~/go/bin/gocovmerge /usr/local/bin/
|
||||
sudo apt -qq -y -o=Dpkg::Use-Pty=0 install build-essential daemonize jq libre2-dev
|
||||
|
||||
- name: "Build crowdsec and fixture"
|
||||
run: TEST_COVERAGE=true make bats-clean bats-build bats-fixture
|
||||
run: |
|
||||
make clean bats-build bats-fixture BUILD_STATIC=1
|
||||
|
||||
- name: "Run tests"
|
||||
run: |
|
||||
TEST_COVERAGE=true make bats-test
|
||||
bzip2 ./tests/local/var/lib/coverage/coverage-bats.out
|
||||
run: ./test/run-tests ./test/bats --formatter $(pwd)/test/lib/color-formatter
|
||||
|
||||
- name: "Coverage report artifact"
|
||||
uses: actions/upload-artifact@v2
|
||||
with:
|
||||
name: coverage-bats.out.bz2
|
||||
path: ./tests/local/var/lib/coverage/coverage-bats.out.bz2
|
||||
- name: "Collect coverage data"
|
||||
run: |
|
||||
go tool covdata textfmt -i test/coverage -o coverage-bats-raw.out
|
||||
# filter out unwanted packages, should match the argument to "go-acc --ignore"
|
||||
grep -v \
|
||||
-e '/pkg/database' \
|
||||
-e '/plugins/notifications' \
|
||||
-e '/pkg/protobufs' \
|
||||
-e '/pkg/cwversions' \
|
||||
-e '/pkg/models' \
|
||||
< coverage-bats-raw.out \
|
||||
> coverage-bats.out
|
||||
|
||||
#
|
||||
# In case you need to inspect the database status after the failure of a given test
|
||||
#
|
||||
# - name: "Run specified tests"
|
||||
# run: ./test/run-tests test/bats/<filename>.bats -f "<test name>"
|
||||
|
||||
- name: "Show database dump"
|
||||
run: |
|
||||
./test/instance-crowdsec stop
|
||||
sqlite3 ./test/local/var/lib/crowdsec/data/crowdsec.db '.dump'
|
||||
if: ${{ always() }}
|
||||
|
||||
- name: "Show stack traces"
|
||||
run: for file in $(find /tmp/crowdsec-crash.*.txt); do echo ">>>>> $file"; cat $file; echo; done
|
||||
if: ${{ always() }}
|
||||
|
||||
- name: "Show crowdsec logs"
|
||||
run:
|
||||
for file in $(find ./tests/local/var/log -type f); do echo ">>>>> $file"; cat $file; echo; done
|
||||
run: for file in $(find ./test/local/var/log -type f); do echo ">>>>> $file"; cat $file; echo; done
|
||||
if: ${{ always() }}
|
||||
|
||||
- name: Upload crowdsec coverage to codecov
|
||||
uses: codecov/codecov-action@v4
|
||||
with:
|
||||
files: ./coverage-bats.out
|
||||
flags: bats
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
|
|
53
.github/workflows/bats.yml
vendored
Normal file
53
.github/workflows/bats.yml
vendored
Normal file
|
@ -0,0 +1,53 @@
|
|||
---
|
||||
# This workflow is actually running
|
||||
# only functional tests, but the
|
||||
# name is used for the badge in README.md
|
||||
|
||||
name: Tests
|
||||
|
||||
# Main workflow for functional tests, it calls all the others through parallel jobs.
|
||||
#
|
||||
# https://docs.github.com/en/actions/using-workflows/reusing-workflows
|
||||
#
|
||||
# There is no need to merge coverage output because codecov.io should take care of that.
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- master
|
||||
- releases/**
|
||||
paths-ignore:
|
||||
- "README.md"
|
||||
pull_request:
|
||||
branches:
|
||||
- master
|
||||
- releases/**
|
||||
paths-ignore:
|
||||
- "README.md"
|
||||
|
||||
jobs:
|
||||
sqlite:
|
||||
uses: ./.github/workflows/bats-sqlite-coverage.yml
|
||||
|
||||
# Jobs for Postgres (and sometimes MySQL) can have failing tests on GitHub
|
||||
# CI, but they pass when run on devs' machines or in the release checks. We
|
||||
# disable them here by default. Remove if...false to enable them.
|
||||
|
||||
mariadb:
|
||||
uses: ./.github/workflows/bats-mysql.yml
|
||||
with:
|
||||
database_image: mariadb:latest
|
||||
|
||||
mysql:
|
||||
uses: ./.github/workflows/bats-mysql.yml
|
||||
with:
|
||||
database_image: mysql:latest
|
||||
|
||||
postgres:
|
||||
uses: ./.github/workflows/bats-postgres.yml
|
||||
|
||||
hub:
|
||||
uses: ./.github/workflows/bats-hub.yml
|
||||
secrets:
|
||||
GIST_BADGES_ID: ${{ secrets.GIST_BADGES_ID }}
|
||||
GIST_BADGES_SECRET: ${{ secrets.GIST_BADGES_SECRET }}
|
35
.github/workflows/cache-cleanup.yaml
vendored
Normal file
35
.github/workflows/cache-cleanup.yaml
vendored
Normal file
|
@ -0,0 +1,35 @@
|
|||
# https://docs.github.com/en/actions/using-workflows/caching-dependencies-to-speed-up-workflows#managing-caches
|
||||
|
||||
name: cleanup caches by a branch
|
||||
on:
|
||||
pull_request:
|
||||
types:
|
||||
- closed
|
||||
|
||||
jobs:
|
||||
cleanup:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Check out code
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Cleanup
|
||||
run: |
|
||||
gh extension install actions/gh-actions-cache
|
||||
|
||||
REPO=${{ github.repository }}
|
||||
BRANCH="refs/pull/${{ github.event.pull_request.number }}/merge"
|
||||
|
||||
echo "Fetching list of cache key"
|
||||
cacheKeysForPR=$(gh actions-cache list -R $REPO -B $BRANCH | cut -f 1 )
|
||||
|
||||
## Setting this to not fail the workflow while deleting cache keys.
|
||||
set +e
|
||||
echo "Deleting caches..."
|
||||
for cacheKey in $cacheKeysForPR
|
||||
do
|
||||
gh actions-cache delete $cacheKey -R $REPO -B $BRANCH --confirm
|
||||
done
|
||||
echo "Done"
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
46
.github/workflows/ci-windows-build-msi.yml
vendored
Normal file
46
.github/workflows/ci-windows-build-msi.yml
vendored
Normal file
|
@ -0,0 +1,46 @@
|
|||
name: build-msi (windows)
|
||||
|
||||
on:
|
||||
release:
|
||||
types:
|
||||
- prereleased
|
||||
push:
|
||||
branches:
|
||||
- master
|
||||
- releases/**
|
||||
paths:
|
||||
- windows/installer/*.wxs
|
||||
- .github/workflows/ci-windows-build-msi.yml
|
||||
pull_request:
|
||||
branches:
|
||||
- master
|
||||
- releases/**
|
||||
paths:
|
||||
- windows/installer/*.wxs
|
||||
- .github/workflows/ci-windows-build-msi.yml
|
||||
|
||||
jobs:
|
||||
build:
|
||||
name: Build
|
||||
runs-on: windows-2019
|
||||
|
||||
steps:
|
||||
|
||||
- name: Check out code into the Go module directory
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
submodules: false
|
||||
|
||||
- name: "Set up Go"
|
||||
uses: actions/setup-go@v5
|
||||
with:
|
||||
go-version: "1.22.2"
|
||||
|
||||
- name: Build
|
||||
run: make windows_installer BUILD_RE2_WASM=1
|
||||
- name: Upload MSI
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
path: crowdsec*msi
|
||||
name: crowdsec.msi
|
32
.github/workflows/ci_golangci-lint.yml
vendored
32
.github/workflows/ci_golangci-lint.yml
vendored
|
@ -1,32 +0,0 @@
|
|||
name: golangci-lint
|
||||
on:
|
||||
push:
|
||||
tags:
|
||||
- v*
|
||||
branches:
|
||||
- master
|
||||
paths-ignore:
|
||||
- 'docs/**'
|
||||
- 'mkdocs.yml'
|
||||
- 'README.md'
|
||||
pull_request:
|
||||
paths-ignore:
|
||||
- 'docs/**'
|
||||
- 'mkdocs.yml'
|
||||
- 'README.md'
|
||||
jobs:
|
||||
golangci:
|
||||
name: lint
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- name: golangci-lint
|
||||
uses: golangci/golangci-lint-action@v2
|
||||
with:
|
||||
# Required: the version of golangci-lint is required and must be specified without patch version: we always use the latest patch version.
|
||||
version: v1.45.2
|
||||
# Optional: golangci-lint command line arguments.
|
||||
args: --issues-exit-code=0 --timeout 5m
|
||||
only-new-issues: true
|
||||
|
||||
|
4
.github/workflows/ci_release-drafter.yml
vendored
4
.github/workflows/ci_release-drafter.yml
vendored
|
@ -5,17 +5,17 @@ on:
|
|||
# branches to consider in the event; optional, defaults to all
|
||||
branches:
|
||||
- master
|
||||
- releases/**
|
||||
|
||||
jobs:
|
||||
update_release_draft:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
# Drafts your next Release notes as Pull Requests are merged into "master"
|
||||
- uses: release-drafter/release-drafter@v5
|
||||
- uses: release-drafter/release-drafter@v6
|
||||
with:
|
||||
config-name: release-drafter.yml
|
||||
# (Optional) specify config name to use, relative to .github/. Default: release-drafter.yml
|
||||
# config-name: my-config.yml
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
|
|
114
.github/workflows/ci_tests.yml
vendored
114
.github/workflows/ci_tests.yml
vendored
|
@ -1,114 +0,0 @@
|
|||
name: Tests
|
||||
|
||||
# Main workflow for tests, it calls all the others through parallel jobs.
|
||||
#
|
||||
# A final step collects and merges coverage output, then pushes it to
|
||||
# coveralls.io
|
||||
#
|
||||
# https://docs.github.com/en/actions/using-workflows/reusing-workflows
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- master
|
||||
- testing*
|
||||
paths-ignore:
|
||||
- 'README.md'
|
||||
pull_request:
|
||||
branches:
|
||||
- master
|
||||
- testing*
|
||||
paths-ignore:
|
||||
- 'README.md'
|
||||
|
||||
jobs:
|
||||
|
||||
go-tests:
|
||||
uses: ./.github/workflows/go-tests.yml
|
||||
|
||||
bats-sqlite:
|
||||
uses: ./.github/workflows/bats-sqlite-coverage.yml
|
||||
|
||||
# Jobs for Postgres and MySQL can have failing tests on GitHub CI, but they
|
||||
# pass when run on devs' machines or in the release checks. We disable them
|
||||
# here by default. Remove the if..false to enable them.
|
||||
|
||||
bats-mariadb:
|
||||
if: ${{ false }}
|
||||
uses: ./.github/workflows/bats-mysql.yml
|
||||
with:
|
||||
database_image: mariadb:latest
|
||||
secrets:
|
||||
DATABASE_PASSWORD: ${{ secrets.DATABASE_PASSWORD}}
|
||||
|
||||
bats-mysql:
|
||||
if: ${{ false }}
|
||||
uses: ./.github/workflows/bats-mysql.yml
|
||||
with:
|
||||
database_image: mysql:latest
|
||||
secrets:
|
||||
DATABASE_PASSWORD: ${{ secrets.DATABASE_PASSWORD}}
|
||||
|
||||
bats-postgres:
|
||||
if: ${{ false }}
|
||||
uses: ./.github/workflows/bats-postgres.yml
|
||||
secrets:
|
||||
DATABASE_PASSWORD: ${{ secrets.DATABASE_PASSWORD}}
|
||||
|
||||
bats-hub:
|
||||
uses: ./.github/workflows/bats-hub.yml
|
||||
secrets:
|
||||
GIST_BADGES_ID: ${{ secrets.GIST_BADGES_ID }}
|
||||
GIST_BADGES_SECRET: ${{ secrets.GIST_BADGES_SECRET }}
|
||||
|
||||
coverage:
|
||||
needs: [go-tests, bats-sqlite]
|
||||
name: Coverage
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Set up Go 1.17
|
||||
uses: actions/setup-go@v3
|
||||
with:
|
||||
go-version: 1.17
|
||||
id: go
|
||||
|
||||
- name: Check out code into the Go module directory
|
||||
uses: actions/checkout@v3
|
||||
|
||||
- name: Download unit report
|
||||
uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: coverage.out.bz2
|
||||
|
||||
- name: Download bats report
|
||||
uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: coverage-bats.out.bz2
|
||||
|
||||
- name: merge coverage reports
|
||||
run: |
|
||||
go get -u github.com/wadey/gocovmerge
|
||||
bunzip2 coverage.out.bz2
|
||||
bunzip2 coverage-bats.out.bz2
|
||||
~/go/bin/gocovmerge coverage.out coverage-bats.out > coverage-all.out
|
||||
bzip2 <coverage-all.out >coverage-all.out.bz2
|
||||
|
||||
- name: gcov2lcov
|
||||
uses: jandelgado/gcov2lcov-action@v1.0.8
|
||||
with:
|
||||
infile: coverage-all.out
|
||||
outfile: coverage-all.txt
|
||||
|
||||
- name: Coverage report artifact (merged)
|
||||
uses: actions/upload-artifact@v2
|
||||
with:
|
||||
name: coverage-all.out.bz2
|
||||
path: ./coverage-all.out.bz2
|
||||
|
||||
- name: Coveralls
|
||||
uses: coverallsapp/github-action@master
|
||||
continue-on-error: true
|
||||
with:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
path-to-lcov: coverage-all.txt
|
||||
|
33
.github/workflows/codeql-analysis.yml
vendored
33
.github/workflows/codeql-analysis.yml
vendored
|
@ -1,3 +1,4 @@
|
|||
# yamllint disable rule:comments
|
||||
# For most projects, this workflow file will not need changing; you simply need
|
||||
# to commit it to your repository.
|
||||
#
|
||||
|
@ -13,10 +14,14 @@ name: "CodeQL"
|
|||
|
||||
on:
|
||||
push:
|
||||
branches: [ master ]
|
||||
branches:
|
||||
- master
|
||||
- releases/**
|
||||
pull_request:
|
||||
# The branches below must be a subset of the branches above
|
||||
branches: [ master ]
|
||||
branches:
|
||||
- master
|
||||
- releases/**
|
||||
schedule:
|
||||
- cron: '15 16 * * 2'
|
||||
|
||||
|
@ -39,11 +44,20 @@ jobs:
|
|||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v3
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
# required to pick up tags for BUILD_VERSION
|
||||
fetch-depth: 0
|
||||
|
||||
- name: "Set up Go"
|
||||
uses: actions/setup-go@v5
|
||||
with:
|
||||
go-version: "1.22.2"
|
||||
cache-dependency-path: "**/go.sum"
|
||||
|
||||
# Initializes the CodeQL tools for scanning.
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@v1
|
||||
uses: github/codeql-action/init@v3
|
||||
with:
|
||||
languages: ${{ matrix.language }}
|
||||
# If you wish to specify custom queries, you can do so here or in a config file.
|
||||
|
@ -53,8 +67,8 @@ jobs:
|
|||
|
||||
# Autobuild attempts to build any compiled languages (C/C++, C#, or Java).
|
||||
# If this step fails, then you should remove it and run the build manually (see below)
|
||||
- name: Autobuild
|
||||
uses: github/codeql-action/autobuild@v1
|
||||
# - name: Autobuild
|
||||
# uses: github/codeql-action/autobuild@v3
|
||||
|
||||
# ℹ️ Command-line programs to run using the OS shell.
|
||||
# 📚 https://git.io/JvXDl
|
||||
|
@ -63,9 +77,8 @@ jobs:
|
|||
# and modify them (or add more) to build your code if your project
|
||||
# uses a compiled language
|
||||
|
||||
#- run: |
|
||||
# make bootstrap
|
||||
# make release
|
||||
- run: |
|
||||
make clean build BUILD_RE2_WASM=1
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@v1
|
||||
uses: github/codeql-action/analyze@v3
|
||||
|
|
20
.github/workflows/dispatch_ci_hub.yaml
vendored
20
.github/workflows/dispatch_ci_hub.yaml
vendored
|
@ -1,20 +0,0 @@
|
|||
name: Dispatch to hub when creating pre-release
|
||||
|
||||
on:
|
||||
push:
|
||||
# branches to consider in the event; optional, defaults to all
|
||||
branches:
|
||||
- master
|
||||
|
||||
jobs:
|
||||
dispatch:
|
||||
name: dispatch to hub-tests
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Repository Dispatch
|
||||
uses: peter-evans/repository-dispatch@v1
|
||||
with:
|
||||
token: ${{ secrets.DISPATCH_TOKEN }}
|
||||
event-type: trigger_ci_hub
|
||||
repository: crowdsecurity/hub
|
||||
client-payload: '{"version": "${{ steps.keydb.outputs.release }}"}'
|
|
@ -1,24 +0,0 @@
|
|||
name: Dispatch to hub when creating pre-release
|
||||
|
||||
on:
|
||||
release:
|
||||
types: prereleased
|
||||
|
||||
jobs:
|
||||
dispatch:
|
||||
name: dispatch to hub-tests
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- id: keydb
|
||||
uses: pozetroninc/github-action-get-latest-release@master
|
||||
with:
|
||||
owner: crowdsecurity
|
||||
repo: crowdsec
|
||||
excludes: prerelease, draft
|
||||
- name: Repository Dispatch
|
||||
uses: peter-evans/repository-dispatch@v1
|
||||
with:
|
||||
token: ${{ secrets.DISPATCH_TOKEN }}
|
||||
event-type: create_branch
|
||||
repository: crowdsecurity/hub
|
||||
client-payload: '{"version": "${{ steps.keydb.outputs.release }}"}'
|
|
@ -1,24 +0,0 @@
|
|||
name: Dispatch to hub when deleting pre-release
|
||||
|
||||
on:
|
||||
release:
|
||||
types: deleted
|
||||
|
||||
jobs:
|
||||
dispatch:
|
||||
name: dispatch to hub-tests
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- id: keydb
|
||||
uses: pozetroninc/github-action-get-latest-release@master
|
||||
with:
|
||||
owner: crowdsecurity
|
||||
repo: crowdsec
|
||||
excludes: prerelease, draft
|
||||
- name: Repository Dispatch
|
||||
uses: peter-evans/repository-dispatch@v1
|
||||
with:
|
||||
token: ${{ secrets.DISPATCH_TOKEN }}
|
||||
event-type: delete_branch
|
||||
repository: crowdsecurity/hub
|
||||
client-payload: '{"version": "${{ steps.keydb.outputs.release }}"}'
|
87
.github/workflows/docker-tests.yml
vendored
Normal file
87
.github/workflows/docker-tests.yml
vendored
Normal file
|
@ -0,0 +1,87 @@
|
|||
name: Test Docker images
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- master
|
||||
- releases/**
|
||||
paths-ignore:
|
||||
- 'README.md'
|
||||
pull_request:
|
||||
branches:
|
||||
- master
|
||||
- releases/**
|
||||
paths-ignore:
|
||||
- 'README.md'
|
||||
|
||||
jobs:
|
||||
test_flavor:
|
||||
strategy:
|
||||
# we could test all the flavors in a single pytest job,
|
||||
# but let's split them (and the image build) in multiple runners for performance
|
||||
matrix:
|
||||
# can be slim, full or debian (no debian slim).
|
||||
flavor: ["slim", "debian"]
|
||||
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 30
|
||||
steps:
|
||||
|
||||
- name: Check out the repo
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
with:
|
||||
config: .github/buildkit.toml
|
||||
|
||||
- name: "Build image"
|
||||
uses: docker/build-push-action@v5
|
||||
with:
|
||||
context: .
|
||||
file: ./Dockerfile${{ matrix.flavor == 'debian' && '.debian' || '' }}
|
||||
tags: crowdsecurity/crowdsec:test${{ matrix.flavor == 'full' && '' || '-' }}${{ matrix.flavor == 'full' && '' || matrix.flavor }}
|
||||
target: ${{ matrix.flavor == 'debian' && 'full' || matrix.flavor }}
|
||||
platforms: linux/amd64
|
||||
load: true
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=min
|
||||
|
||||
- name: "Setup Python"
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: "3.x"
|
||||
|
||||
- name: "Install pipenv"
|
||||
run: |
|
||||
cd docker/test
|
||||
python -m pip install --upgrade pipenv wheel
|
||||
|
||||
#- name: "Cache virtualenvs"
|
||||
# id: cache-pipenv
|
||||
# uses: actions/cache@v4
|
||||
# with:
|
||||
# path: ~/.local/share/virtualenvs
|
||||
# key: ${{ runner.os }}-pipenv-${{ hashFiles('**/Pipfile.lock') }}
|
||||
|
||||
- name: "Install dependencies"
|
||||
#if: steps.cache-pipenv.outputs.cache-hit != 'true'
|
||||
run: |
|
||||
cd docker/test
|
||||
pipenv install --deploy
|
||||
|
||||
- name: "Create Docker network"
|
||||
run: docker network create net-test
|
||||
|
||||
- name: "Run tests"
|
||||
env:
|
||||
CROWDSEC_TEST_VERSION: test
|
||||
CROWDSEC_TEST_FLAVORS: ${{ matrix.flavor }}
|
||||
CROWDSEC_TEST_NETWORK: net-test
|
||||
CROWDSEC_TEST_TIMEOUT: 90
|
||||
# running serially to reduce test flakiness
|
||||
run: |
|
||||
cd docker/test
|
||||
pipenv run pytest -n 1 --durations=0 --color=yes
|
66
.github/workflows/go-tests-windows.yml
vendored
Normal file
66
.github/workflows/go-tests-windows.yml
vendored
Normal file
|
@ -0,0 +1,66 @@
|
|||
name: Go tests (windows)
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- master
|
||||
- releases/**
|
||||
paths-ignore:
|
||||
- 'README.md'
|
||||
pull_request:
|
||||
branches:
|
||||
- master
|
||||
- releases/**
|
||||
paths-ignore:
|
||||
- 'README.md'
|
||||
|
||||
env:
|
||||
RICHGO_FORCE_COLOR: 1
|
||||
CROWDSEC_FEATURE_DISABLE_HTTP_RETRY_BACKOFF: true
|
||||
|
||||
jobs:
|
||||
build:
|
||||
name: "Build + tests"
|
||||
runs-on: windows-2022
|
||||
|
||||
steps:
|
||||
|
||||
- name: Check out CrowdSec repository
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
submodules: false
|
||||
|
||||
- name: "Set up Go"
|
||||
uses: actions/setup-go@v5
|
||||
with:
|
||||
go-version: "1.22.2"
|
||||
|
||||
- name: Build
|
||||
run: |
|
||||
make build BUILD_RE2_WASM=1
|
||||
|
||||
- name: Run tests
|
||||
run: |
|
||||
go install github.com/kyoh86/richgo@v0.3.10
|
||||
go test -coverprofile coverage.out -covermode=atomic ./... > out.txt
|
||||
if(!$?) { cat out.txt | sed 's/ *coverage:.*of statements in.*//' | richgo testfilter; Exit 1 }
|
||||
cat out.txt | sed 's/ *coverage:.*of statements in.*//' | richgo testfilter
|
||||
|
||||
- name: Upload unit coverage to Codecov
|
||||
uses: codecov/codecov-action@v4
|
||||
with:
|
||||
files: coverage.out
|
||||
flags: unit-windows
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
|
||||
- name: golangci-lint
|
||||
uses: golangci/golangci-lint-action@v4
|
||||
with:
|
||||
version: v1.57
|
||||
args: --issues-exit-code=1 --timeout 10m
|
||||
only-new-issues: false
|
||||
# the cache is already managed above, enabling it here
|
||||
# gives errors when extracting
|
||||
skip-pkg-cache: true
|
||||
skip-build-cache: true
|
173
.github/workflows/go-tests.yml
vendored
173
.github/workflows/go-tests.yml
vendored
|
@ -1,72 +1,167 @@
|
|||
name: Go tests
|
||||
---
|
||||
# This workflow is actually running
|
||||
# tests (with localstack) but the
|
||||
# name is used for the badge in README.md
|
||||
|
||||
name: Build
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- master
|
||||
- releases/**
|
||||
paths-ignore:
|
||||
- 'README.md'
|
||||
pull_request:
|
||||
branches:
|
||||
- master
|
||||
- releases/**
|
||||
paths-ignore:
|
||||
- 'README.md'
|
||||
|
||||
# these env variables are for localstack, so we can emulate aws services
|
||||
env:
|
||||
RICHGO_FORCE_COLOR: 1
|
||||
AWS_HOST: localstack
|
||||
SERVICES: cloudwatch,logs,kinesis
|
||||
#those are to mimic aws config
|
||||
AWS_ACCESS_KEY_ID: AKIAIOSFODNN7EXAMPLE
|
||||
AWS_SECRET_ACCESS_KEY: wJalrXUtnFEMI/K7MDENG/bPxRfiCYEXAMPLEKEY
|
||||
# these are to mimic aws config
|
||||
AWS_ACCESS_KEY_ID: test
|
||||
AWS_SECRET_ACCESS_KEY: test
|
||||
AWS_REGION: us-east-1
|
||||
#and to override our endpoint in aws sdk
|
||||
AWS_ENDPOINT_FORCE: http://localhost:4566
|
||||
KINESIS_INITIALIZE_STREAMS: "stream-1-shard:1,stream-2-shards:2"
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
CROWDSEC_FEATURE_DISABLE_HTTP_RETRY_BACKOFF: true
|
||||
|
||||
jobs:
|
||||
|
||||
build:
|
||||
name: "Build + tests"
|
||||
runs-on: ubuntu-latest
|
||||
services:
|
||||
localstack:
|
||||
image: localstack/localstack:0.13.3
|
||||
image: localstack/localstack:3.0
|
||||
ports:
|
||||
- 4566:4566 # Localstack exposes all services on same port
|
||||
- 4566:4566 # Localstack exposes all services on the same port
|
||||
env:
|
||||
SERVICES: ${{ env.SERVICES }}
|
||||
DEBUG: ""
|
||||
DATA_DIR: ""
|
||||
LAMBDA_EXECUTOR: ""
|
||||
KINESIS_ERROR_PROBABILITY: ""
|
||||
DOCKER_HOST: unix:///var/run/docker.sock
|
||||
HOST_TMP_FOLDER: "/tmp"
|
||||
KINESIS_INITIALIZE_STREAMS: ${{ env.KINESIS_INITIALIZE_STREAMS }}
|
||||
HOSTNAME_EXTERNAL: ${{ env.AWS_HOST }} # Required so that resource urls are provided properly
|
||||
# e.g sqs url will get localhost if we don't set this env to map our service
|
||||
LOCALSTACK_HOST: ${{ env.AWS_HOST }} # Required so that resource urls are provided properly
|
||||
# e.g sqs url will get localhost if we don't set this env to map our service
|
||||
options: >-
|
||||
--name=localstack
|
||||
--health-cmd="curl -sS 127.0.0.1:4566 || exit 1"
|
||||
--health-interval=10s
|
||||
--health-timeout=5s
|
||||
--health-retries=3
|
||||
zoo1:
|
||||
image: confluentinc/cp-zookeeper:7.4.3
|
||||
ports:
|
||||
- "2181:2181"
|
||||
env:
|
||||
ZOOKEEPER_CLIENT_PORT: 2181
|
||||
ZOOKEEPER_SERVER_ID: 1
|
||||
ZOOKEEPER_SERVERS: zoo1:2888:3888
|
||||
options: >-
|
||||
--name=zoo1
|
||||
--health-cmd "jps -l | grep zookeeper"
|
||||
--health-interval 10s
|
||||
--health-timeout 5s
|
||||
--health-retries 5
|
||||
|
||||
kafka1:
|
||||
image: crowdsecurity/kafka-ssl
|
||||
ports:
|
||||
- "9093:9093"
|
||||
- "9092:9092"
|
||||
- "9999:9999"
|
||||
env:
|
||||
KAFKA_ADVERTISED_LISTENERS: LISTENER_DOCKER_INTERNAL://127.0.0.1:19092,LISTENER_DOCKER_EXTERNAL://127.0.0.1:9092,LISTENER_DOCKER_EXTERNAL_SSL://127.0.0.1:9093
|
||||
KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: LISTENER_DOCKER_INTERNAL:PLAINTEXT,LISTENER_DOCKER_EXTERNAL:PLAINTEXT,LISTENER_DOCKER_EXTERNAL_SSL:SSL
|
||||
KAFKA_INTER_BROKER_LISTENER_NAME: LISTENER_DOCKER_INTERNAL
|
||||
KAFKA_ZOOKEEPER_CONNECT: "zoo1:2181"
|
||||
KAFKA_BROKER_ID: 1
|
||||
KAFKA_LOG4J_LOGGERS: "kafka.controller=INFO,kafka.producer.async.DefaultEventHandler=INFO,state.change.logger=INFO"
|
||||
KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1
|
||||
KAFKA_TRANSACTION_STATE_LOG_REPLICATION_FACTOR: 1
|
||||
KAFKA_TRANSACTION_STATE_LOG_MIN_ISR: 1
|
||||
KAFKA_JMX_PORT: 9999
|
||||
KAFKA_JMX_HOSTNAME: "127.0.0.1"
|
||||
KAFKA_AUTHORIZER_CLASS_NAME: kafka.security.authorizer.AclAuthorizer
|
||||
KAFKA_ALLOW_EVERYONE_IF_NO_ACL_FOUND: "true"
|
||||
KAFKA_SSL_KEYSTORE_FILENAME: kafka.kafka1.keystore.jks
|
||||
KAFKA_SSL_KEYSTORE_CREDENTIALS: kafka1_keystore_creds
|
||||
KAFKA_SSL_KEY_CREDENTIALS: kafka1_sslkey_creds
|
||||
KAFKA_SSL_TRUSTSTORE_FILENAME: kafka.kafka1.truststore.jks
|
||||
KAFKA_SSL_TRUSTSTORE_CREDENTIALS: kafka1_truststore_creds
|
||||
KAFKA_SSL_ENABLED_PROTOCOLS: TLSv1.2
|
||||
KAFKA_SSL_PROTOCOL: TLSv1.2
|
||||
KAFKA_SSL_CLIENT_AUTH: none
|
||||
KAFKA_AUTO_CREATE_TOPICS_ENABLE: "true"
|
||||
options: >-
|
||||
--name=kafka1
|
||||
--health-cmd "kafka-broker-api-versions --version"
|
||||
--health-interval 10s
|
||||
--health-timeout 10s
|
||||
--health-retries 5
|
||||
|
||||
loki:
|
||||
image: grafana/loki:2.9.1
|
||||
ports:
|
||||
- "3100:3100"
|
||||
options: >-
|
||||
--name=loki1
|
||||
--health-cmd "wget -q -O - http://localhost:3100/ready | grep 'ready'"
|
||||
--health-interval 30s
|
||||
--health-timeout 10s
|
||||
--health-retries 5
|
||||
--health-start-period 30s
|
||||
|
||||
steps:
|
||||
|
||||
- name: "Set up Go 1.17"
|
||||
uses: actions/setup-go@v3
|
||||
with:
|
||||
go-version: 1.17
|
||||
id: go
|
||||
|
||||
- name: "Clone CrowdSec"
|
||||
uses: actions/checkout@v3
|
||||
- name: Check out CrowdSec repository
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
submodules: false
|
||||
|
||||
- name: Build
|
||||
run: make build && go get -u github.com/jandelgado/gcov2lcov && go get -u github.com/ory/go-acc
|
||||
|
||||
- name: "Run tests"
|
||||
run: |
|
||||
go run github.com/ory/go-acc ./... -o coverage.out --ignore database,notifications,protobufs,cwversion,cstest,models
|
||||
bzip2 ./coverage.out
|
||||
|
||||
- name: "Coverage report artifact"
|
||||
uses: actions/upload-artifact@v2
|
||||
- name: "Set up Go"
|
||||
uses: actions/setup-go@v5
|
||||
with:
|
||||
name: coverage.out.bz2
|
||||
path: ./coverage.out.bz2
|
||||
go-version: "1.22.2"
|
||||
|
||||
- name: Create localstack streams
|
||||
run: |
|
||||
aws --endpoint-url=http://127.0.0.1:4566 --region us-east-1 kinesis create-stream --stream-name stream-1-shard --shard-count 1
|
||||
aws --endpoint-url=http://127.0.0.1:4566 --region us-east-1 kinesis create-stream --stream-name stream-2-shards --shard-count 2
|
||||
|
||||
- name: Build and run tests, static
|
||||
run: |
|
||||
sudo apt -qq -y -o=Dpkg::Use-Pty=0 install build-essential libre2-dev
|
||||
go install github.com/ory/go-acc@v0.2.8
|
||||
go install github.com/kyoh86/richgo@v0.3.10
|
||||
set -o pipefail
|
||||
make build BUILD_STATIC=1
|
||||
make go-acc | sed 's/ *coverage:.*of statements in.*//' | richgo testfilter
|
||||
|
||||
- name: Run tests again, dynamic
|
||||
run: |
|
||||
make clean build
|
||||
set -o pipefail
|
||||
make go-acc | sed 's/ *coverage:.*of statements in.*//' | richgo testfilter
|
||||
|
||||
- name: Upload unit coverage to Codecov
|
||||
uses: codecov/codecov-action@v4
|
||||
with:
|
||||
files: coverage.out
|
||||
flags: unit-linux
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
|
||||
- name: golangci-lint
|
||||
uses: golangci/golangci-lint-action@v4
|
||||
with:
|
||||
version: v1.57
|
||||
args: --issues-exit-code=1 --timeout 10m
|
||||
only-new-issues: false
|
||||
# the cache is already managed above, enabling it here
|
||||
# gives errors when extracting
|
||||
skip-pkg-cache: true
|
||||
skip-build-cache: true
|
||||
|
|
30
.github/workflows/governance-bot.yaml
vendored
Normal file
30
.github/workflows/governance-bot.yaml
vendored
Normal file
|
@ -0,0 +1,30 @@
|
|||
# .github/workflow/governance.yml
|
||||
|
||||
on:
|
||||
pull_request_target:
|
||||
types: [ synchronize, opened, labeled, unlabeled ]
|
||||
issues:
|
||||
types: [ opened, labeled, unlabeled ]
|
||||
issue_comment:
|
||||
types: [ created ]
|
||||
|
||||
# You can use permissions to modify the default permissions granted to the GITHUB_TOKEN,
|
||||
# adding or removing access as required, so that you only allow the minimum required access.
|
||||
permissions:
|
||||
contents: read
|
||||
issues: write
|
||||
pull-requests: write
|
||||
statuses: write
|
||||
checks: write
|
||||
|
||||
jobs:
|
||||
governance:
|
||||
name: Governance
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
# Semantic versioning, lock to different version: v2, v2.0 or a commit hash.
|
||||
- uses: BirthdayResearch/oss-governance-bot@v4
|
||||
with:
|
||||
# You can use a PAT to post a comment/label/status so that it shows up as a user instead of github-actions
|
||||
github-token: ${{secrets.GITHUB_TOKEN}} # optional, default to '${{ github.token }}'
|
||||
config-path: .github/governance.yml # optional, default to '.github/governance.yml'
|
47
.github/workflows/publish-docker-master.yml
vendored
Normal file
47
.github/workflows/publish-docker-master.yml
vendored
Normal file
|
@ -0,0 +1,47 @@
|
|||
name: (push-master) Publish latest Docker images
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [ master ]
|
||||
paths:
|
||||
- 'pkg/**'
|
||||
- 'cmd/**'
|
||||
- 'mk/**'
|
||||
- 'docker/docker_start.sh'
|
||||
- 'docker/config.yaml'
|
||||
- '.github/workflows/publish-docker-master.yml'
|
||||
- '.github/workflows/publish-docker.yml'
|
||||
- 'Dockerfile'
|
||||
- 'Dockerfile.debian'
|
||||
- 'go.mod'
|
||||
- 'go.sum'
|
||||
- 'Makefile'
|
||||
|
||||
jobs:
|
||||
dev-alpine:
|
||||
uses: ./.github/workflows/publish-docker.yml
|
||||
with:
|
||||
platform: linux/amd64
|
||||
crowdsec_version: ""
|
||||
image_version: dev
|
||||
latest: false
|
||||
push: true
|
||||
slim: false
|
||||
debian: false
|
||||
secrets:
|
||||
DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }}
|
||||
DOCKER_PASSWORD: ${{ secrets.DOCKER_PASSWORD }}
|
||||
|
||||
dev-debian:
|
||||
uses: ./.github/workflows/publish-docker.yml
|
||||
with:
|
||||
platform: linux/amd64
|
||||
crowdsec_version: ""
|
||||
image_version: dev
|
||||
latest: false
|
||||
push: true
|
||||
slim: false
|
||||
debian: true
|
||||
secrets:
|
||||
DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }}
|
||||
DOCKER_PASSWORD: ${{ secrets.DOCKER_PASSWORD }}
|
48
.github/workflows/publish-docker-release.yml
vendored
Normal file
48
.github/workflows/publish-docker-release.yml
vendored
Normal file
|
@ -0,0 +1,48 @@
|
|||
name: (manual) Publish Docker images
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
image_version:
|
||||
description: Docker Image version (base tag, i.e. v1.6.0-2)
|
||||
required: true
|
||||
crowdsec_version:
|
||||
description: Crowdsec version (BUILD_VERSION)
|
||||
required: true
|
||||
latest:
|
||||
description: Overwrite latest (and slim) tags?
|
||||
default: false
|
||||
required: true
|
||||
push:
|
||||
description: Really push?
|
||||
default: false
|
||||
required: true
|
||||
|
||||
jobs:
|
||||
alpine:
|
||||
uses: ./.github/workflows/publish-docker.yml
|
||||
secrets:
|
||||
DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }}
|
||||
DOCKER_PASSWORD: ${{ secrets.DOCKER_PASSWORD }}
|
||||
with:
|
||||
image_version: ${{ github.event.inputs.image_version }}
|
||||
crowdsec_version: ${{ github.event.inputs.crowdsec_version }}
|
||||
latest: ${{ github.event.inputs.latest == 'true' }}
|
||||
push: ${{ github.event.inputs.push == 'true' }}
|
||||
slim: true
|
||||
debian: false
|
||||
platform: "linux/amd64,linux/386,linux/arm64,linux/arm/v7,linux/arm/v6"
|
||||
|
||||
debian:
|
||||
uses: ./.github/workflows/publish-docker.yml
|
||||
secrets:
|
||||
DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }}
|
||||
DOCKER_PASSWORD: ${{ secrets.DOCKER_PASSWORD }}
|
||||
with:
|
||||
image_version: ${{ github.event.inputs.image_version }}
|
||||
crowdsec_version: ${{ github.event.inputs.crowdsec_version }}
|
||||
latest: ${{ github.event.inputs.latest == 'true' }}
|
||||
push: ${{ github.event.inputs.push == 'true' }}
|
||||
slim: false
|
||||
debian: true
|
||||
platform: "linux/amd64,linux/386,linux/arm64"
|
125
.github/workflows/publish-docker.yml
vendored
Normal file
125
.github/workflows/publish-docker.yml
vendored
Normal file
|
@ -0,0 +1,125 @@
|
|||
name: (sub) Publish Docker images
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
secrets:
|
||||
DOCKER_USERNAME:
|
||||
required: true
|
||||
DOCKER_PASSWORD:
|
||||
required: true
|
||||
inputs:
|
||||
platform:
|
||||
required: true
|
||||
type: string
|
||||
image_version:
|
||||
required: true
|
||||
type: string
|
||||
crowdsec_version:
|
||||
required: true
|
||||
type: string
|
||||
latest:
|
||||
required: true
|
||||
type: boolean
|
||||
push:
|
||||
required: true
|
||||
type: boolean
|
||||
slim:
|
||||
required: true
|
||||
type: boolean
|
||||
debian:
|
||||
required: true
|
||||
type: boolean
|
||||
|
||||
jobs:
|
||||
push_to_registry:
|
||||
name: Push Docker image to registries
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
|
||||
- name: Check out the repo
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v3
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
with:
|
||||
config: .github/buildkit.toml
|
||||
|
||||
- name: Login to DockerHub
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
username: ${{ secrets.DOCKER_USERNAME }}
|
||||
password: ${{ secrets.DOCKER_PASSWORD }}
|
||||
|
||||
- name: Login to GitHub Container Registry
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.repository_owner }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Prepare (slim)
|
||||
if: ${{ inputs.slim }}
|
||||
id: slim
|
||||
run: |
|
||||
DOCKERHUB_IMAGE=${{ secrets.DOCKER_USERNAME }}/crowdsec
|
||||
GHCR_IMAGE=ghcr.io/${{ github.repository_owner }}/crowdsec
|
||||
VERSION=${{ inputs.image_version }}
|
||||
DEBIAN=${{ inputs.debian && '-debian' || '' }}
|
||||
TAGS="${DOCKERHUB_IMAGE}:${VERSION}-slim${DEBIAN},${GHCR_IMAGE}:${VERSION}-slim${DEBIAN}"
|
||||
if [[ ${{ inputs.latest }} == true ]]; then
|
||||
TAGS=$TAGS,${DOCKERHUB_IMAGE}:slim${DEBIAN},${GHCR_IMAGE}:slim${DEBIAN}
|
||||
fi
|
||||
echo "tags=${TAGS}" >> $GITHUB_OUTPUT
|
||||
echo "created=$(date -u +'%Y-%m-%dT%H:%M:%SZ')" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Prepare (full)
|
||||
id: full
|
||||
run: |
|
||||
DOCKERHUB_IMAGE=${{ secrets.DOCKER_USERNAME }}/crowdsec
|
||||
GHCR_IMAGE=ghcr.io/${{ github.repository_owner }}/crowdsec
|
||||
VERSION=${{ inputs.image_version }}
|
||||
DEBIAN=${{ inputs.debian && '-debian' || '' }}
|
||||
TAGS="${DOCKERHUB_IMAGE}:${VERSION}${DEBIAN},${GHCR_IMAGE}:${VERSION}${DEBIAN}"
|
||||
if [[ ${{ inputs.latest }} == true ]]; then
|
||||
TAGS=$TAGS,${DOCKERHUB_IMAGE}:latest${DEBIAN},${GHCR_IMAGE}:latest${DEBIAN}
|
||||
fi
|
||||
echo "tags=${TAGS}" >> $GITHUB_OUTPUT
|
||||
echo "created=$(date -u +'%Y-%m-%dT%H:%M:%SZ')" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Build and push image (slim)
|
||||
if: ${{ inputs.slim }}
|
||||
uses: docker/build-push-action@v5
|
||||
with:
|
||||
context: .
|
||||
file: ./Dockerfile${{ inputs.debian && '.debian' || '' }}
|
||||
push: ${{ inputs.push }}
|
||||
tags: ${{ steps.slim.outputs.tags }}
|
||||
target: slim
|
||||
platforms: ${{ inputs.platform }}
|
||||
labels: |
|
||||
org.opencontainers.image.source=${{ github.event.repository.html_url }}
|
||||
org.opencontainers.image.created=${{ steps.slim.outputs.created }}
|
||||
org.opencontainers.image.revision=${{ github.sha }}
|
||||
build-args: |
|
||||
BUILD_VERSION=${{ inputs.crowdsec_version }}
|
||||
|
||||
- name: Build and push image (full)
|
||||
uses: docker/build-push-action@v5
|
||||
with:
|
||||
context: .
|
||||
file: ./Dockerfile${{ inputs.debian && '.debian' || '' }}
|
||||
push: ${{ inputs.push }}
|
||||
tags: ${{ steps.full.outputs.tags }}
|
||||
target: full
|
||||
platforms: ${{ inputs.platform }}
|
||||
labels: |
|
||||
org.opencontainers.image.source=${{ github.event.repository.html_url }}
|
||||
org.opencontainers.image.created=${{ steps.full.outputs.created }}
|
||||
org.opencontainers.image.revision=${{ github.sha }}
|
||||
build-args: |
|
||||
BUILD_VERSION=${{ inputs.crowdsec_version }}
|
40
.github/workflows/publish-tarball-release.yml
vendored
Normal file
40
.github/workflows/publish-tarball-release.yml
vendored
Normal file
|
@ -0,0 +1,40 @@
|
|||
# .github/workflows/build-docker-image.yml
|
||||
name: Release
|
||||
|
||||
on:
|
||||
release:
|
||||
types:
|
||||
- prereleased
|
||||
|
||||
permissions:
|
||||
# Use write for: hub release edit
|
||||
contents: write
|
||||
|
||||
jobs:
|
||||
build:
|
||||
name: Build and upload binary package
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
|
||||
- name: Check out code into the Go module directory
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
submodules: false
|
||||
|
||||
- name: "Set up Go"
|
||||
uses: actions/setup-go@v5
|
||||
with:
|
||||
go-version: "1.22.2"
|
||||
|
||||
- name: Build the binaries
|
||||
run: |
|
||||
sudo apt -qq -y -o=Dpkg::Use-Pty=0 install build-essential libre2-dev
|
||||
make vendor release BUILD_STATIC=1
|
||||
|
||||
- name: Upload to release
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
run: |
|
||||
tag_name="${GITHUB_REF##*/}"
|
||||
gh release upload "$tag_name" crowdsec-release.tgz vendor.tgz *-vendor.tar.xz
|
103
.github/workflows/release_publish-package.yml
vendored
103
.github/workflows/release_publish-package.yml
vendored
|
@ -1,103 +0,0 @@
|
|||
# .github/workflows/build-docker-image.yml
|
||||
name: build
|
||||
|
||||
on:
|
||||
release:
|
||||
types: prereleased
|
||||
|
||||
jobs:
|
||||
build:
|
||||
name: Build and upload binary package
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Set up Go 1.17
|
||||
uses: actions/setup-go@v3
|
||||
with:
|
||||
go-version: 1.17
|
||||
id: go
|
||||
- name: Check out code into the Go module directory
|
||||
uses: actions/checkout@v3
|
||||
- name: Build the binaries
|
||||
run: make release
|
||||
- name: Upload to release
|
||||
uses: JasonEtco/upload-to-release@master
|
||||
with:
|
||||
args: crowdsec-release.tgz application/x-gzip
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
build_static:
|
||||
name: Build and upload binary package
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Set up Go 1.17
|
||||
uses: actions/setup-go@v1
|
||||
with:
|
||||
go-version: 1.17
|
||||
id: go
|
||||
- name: Check out code into the Go module directory
|
||||
uses: actions/checkout@v3
|
||||
- name: Build the binaries
|
||||
run: make release_static
|
||||
- name: Upload to release
|
||||
uses: JasonEtco/upload-to-release@master
|
||||
with:
|
||||
args: crowdsec-release-static.tgz application/x-gzip
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
test_tarball:
|
||||
strategy:
|
||||
matrix:
|
||||
tarball:
|
||||
- name: crowdsec-release
|
||||
- name: crowdsec-release-static
|
||||
name: Download tarball and run functional tests
|
||||
runs-on: ubuntu-20.04
|
||||
needs: [build, build_static]
|
||||
steps:
|
||||
- id: fetch_prerelease_version
|
||||
uses: pozetroninc/github-action-get-latest-release@master
|
||||
with:
|
||||
repository: crowdsecurity/crowdsec
|
||||
excludes: draft
|
||||
- name: download tarball
|
||||
run: wget -qO - https://github.com/crowdsecurity/crowdsec/releases/download/${{ steps.fetch_prerelease_version.outputs.release }}/${{ matrix.tarball.name }}.tgz | tar xvzf -
|
||||
- name: "Force machineid"
|
||||
run: |
|
||||
sudo chmod +w /etc/machine-id
|
||||
echo githubciXXXXXXXXXXXXXXXXXXXXXXXX | sudo tee /etc/machine-id
|
||||
- name: Install release
|
||||
run: |
|
||||
cd crowdsec-${{ steps.fetch_prerelease_version.outputs.release }}
|
||||
sudo ./wizard.sh --unattended
|
||||
- name: Check out code to get functional tests scripts
|
||||
uses: actions/checkout@v3
|
||||
- name: "Test post-install base"
|
||||
run: |
|
||||
cd scripts/func_tests/
|
||||
./tests_post-install_0base.sh
|
||||
- name: "Test post-install bouncer"
|
||||
run: |
|
||||
cd scripts/func_tests/
|
||||
./tests_post-install_1bouncers.sh
|
||||
- name: "Test post-install collections"
|
||||
run: |
|
||||
cd scripts/func_tests/
|
||||
./tests_post-install_2collections.sh
|
||||
- name: "Test post-install macines"
|
||||
run: |
|
||||
cd scripts/func_tests/
|
||||
./tests_post-install_3machines.sh
|
||||
- name: "Test post-install ip management"
|
||||
run: |
|
||||
cd scripts/func_tests/
|
||||
./tests_post-install_99ip_mgmt.sh
|
||||
- name: "Test cold logs"
|
||||
run: |
|
||||
cd scripts/func_tests/
|
||||
./tests_post-install_4cold-logs.sh
|
||||
- name: "Uninstall"
|
||||
run: sudo ./wizard.sh --uninstall
|
||||
- name: "Test post remove"
|
||||
run: |
|
||||
cd scripts/func_tests/
|
||||
bash -x ./tests_post-remove_0base.sh
|
|
@ -1,60 +0,0 @@
|
|||
name: Publish Docker Debian image
|
||||
on:
|
||||
release:
|
||||
types:
|
||||
- released
|
||||
- prereleased
|
||||
workflow_dispatch:
|
||||
jobs:
|
||||
push_to_registry:
|
||||
name: Push Docker debian image to Docker Hub
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
-
|
||||
name: Check out the repo
|
||||
uses: actions/checkout@v3
|
||||
-
|
||||
name: Prepare
|
||||
id: prep
|
||||
run: |
|
||||
DOCKER_IMAGE=crowdsecurity/crowdsec
|
||||
VERSION=bullseye
|
||||
if [[ $GITHUB_REF == refs/tags/* ]]; then
|
||||
VERSION=${GITHUB_REF#refs/tags/}
|
||||
elif [[ $GITHUB_REF == refs/heads/* ]]; then
|
||||
VERSION=$(echo ${GITHUB_REF#refs/heads/} | sed -E 's#/+#-#g')
|
||||
elif [[ $GITHUB_REF == refs/pull/* ]]; then
|
||||
VERSION=pr-${{ github.event.number }}
|
||||
fi
|
||||
TAGS="${DOCKER_IMAGE}:${VERSION}-debian"
|
||||
if [[ ${{ github.event.action }} == released ]]; then
|
||||
TAGS=$TAGS,${DOCKER_IMAGE}:latest-debian
|
||||
fi
|
||||
echo ::set-output name=version::${VERSION}
|
||||
echo ::set-output name=tags::${TAGS}
|
||||
echo ::set-output name=created::$(date -u +'%Y-%m-%dT%H:%M:%SZ')
|
||||
-
|
||||
name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v1
|
||||
-
|
||||
name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v1
|
||||
-
|
||||
name: Login to DockerHub
|
||||
uses: docker/login-action@v1
|
||||
with:
|
||||
username: ${{ secrets.DOCKER_USERNAME }}
|
||||
password: ${{ secrets.DOCKER_PASSWORD }}
|
||||
-
|
||||
name: Build and push
|
||||
uses: docker/build-push-action@v2
|
||||
with:
|
||||
context: .
|
||||
file: ./Dockerfile.debian
|
||||
push: ${{ github.event_name != 'pull_request' }}
|
||||
tags: ${{ steps.prep.outputs.tags }}
|
||||
platforms: linux/amd64,linux/arm64,linux/arm/v7,linux/arm/v6,linux/386
|
||||
labels: |
|
||||
org.opencontainers.image.source=${{ github.event.repository.html_url }}
|
||||
org.opencontainers.image.created=${{ steps.prep.outputs.created }}
|
||||
org.opencontainers.image.revision=${{ github.sha }}
|
|
@ -1,67 +0,0 @@
|
|||
name: Publish Docker image
|
||||
on:
|
||||
release:
|
||||
types:
|
||||
- released
|
||||
- prereleased
|
||||
jobs:
|
||||
push_to_registry:
|
||||
name: Push Docker image to Docker Hub
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
-
|
||||
name: Check out the repo
|
||||
uses: actions/checkout@v3
|
||||
-
|
||||
name: Prepare
|
||||
id: prep
|
||||
run: |
|
||||
DOCKER_IMAGE=crowdsecurity/crowdsec
|
||||
GHCR_IMAGE=ghcr.io/${{ github.repository_owner }}/crowdsec
|
||||
VERSION=edge
|
||||
if [[ $GITHUB_REF == refs/tags/* ]]; then
|
||||
VERSION=${GITHUB_REF#refs/tags/}
|
||||
elif [[ $GITHUB_REF == refs/heads/* ]]; then
|
||||
VERSION=$(echo ${GITHUB_REF#refs/heads/} | sed -E 's#/+#-#g')
|
||||
elif [[ $GITHUB_REF == refs/pull/* ]]; then
|
||||
VERSION=pr-${{ github.event.number }}
|
||||
fi
|
||||
TAGS="${DOCKER_IMAGE}:${VERSION},${GHCR_IMAGE}:${VERSION}"
|
||||
if [[ ${{ github.event.action }} == released ]]; then
|
||||
TAGS=$TAGS,${DOCKER_IMAGE}:latest,${GHCR_IMAGE}:latest
|
||||
fi
|
||||
echo ::set-output name=version::${VERSION}
|
||||
echo ::set-output name=tags::${TAGS}
|
||||
echo ::set-output name=created::$(date -u +'%Y-%m-%dT%H:%M:%SZ')
|
||||
-
|
||||
name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v1
|
||||
-
|
||||
name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v1
|
||||
-
|
||||
name: Login to DockerHub
|
||||
uses: docker/login-action@v1
|
||||
with:
|
||||
username: ${{ secrets.DOCKER_USERNAME }}
|
||||
password: ${{ secrets.DOCKER_PASSWORD }}
|
||||
|
||||
- name: Login to GitHub Container Registry
|
||||
uses: docker/login-action@v1.12.0
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.repository_owner }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
-
|
||||
name: Build and push
|
||||
uses: docker/build-push-action@v2
|
||||
with:
|
||||
context: .
|
||||
file: ./Dockerfile
|
||||
push: ${{ github.event_name != 'pull_request' }}
|
||||
tags: ${{ steps.prep.outputs.tags }}
|
||||
platforms: linux/amd64,linux/arm64,linux/arm/v7,linux/arm/v6,linux/386
|
||||
labels: |
|
||||
org.opencontainers.image.source=${{ github.event.repository.html_url }}
|
||||
org.opencontainers.image.created=${{ steps.prep.outputs.created }}
|
||||
org.opencontainers.image.revision=${{ github.sha }}
|
6
.github/workflows/update_docker_hub_doc.yml
vendored
6
.github/workflows/update_docker_hub_doc.yml
vendored
|
@ -1,4 +1,4 @@
|
|||
name: Update Docker Hub README
|
||||
name: (push-master) Update Docker Hub README
|
||||
|
||||
on:
|
||||
push:
|
||||
|
@ -13,10 +13,12 @@ jobs:
|
|||
steps:
|
||||
-
|
||||
name: Check out the repo
|
||||
uses: actions/checkout@v3
|
||||
uses: actions/checkout@v4
|
||||
if: ${{ github.repository_owner == 'crowdsecurity' }}
|
||||
-
|
||||
name: Update docker hub README
|
||||
uses: ms-jpq/sync-dockerhub-readme@v1
|
||||
if: ${{ github.repository_owner == 'crowdsecurity' }}
|
||||
with:
|
||||
username: ${{ secrets.DOCKER_USERNAME }}
|
||||
password: ${{ secrets.DOCKER_PASSWORD }}
|
||||
|
|
47
.gitignore
vendored
47
.gitignore
vendored
|
@ -7,25 +7,56 @@
|
|||
*~
|
||||
.pc
|
||||
|
||||
# IDEs
|
||||
.vscode
|
||||
.idea
|
||||
|
||||
# If vendor is included, allow prebuilt (wasm?) libraries.
|
||||
!vendor/**/*.so
|
||||
|
||||
# Test binaries, built with `go test -c`
|
||||
*.test
|
||||
*.cover
|
||||
|
||||
# Output of the go coverage tool, specifically when used with LiteIDE
|
||||
# Test dependencies
|
||||
test/tools/*
|
||||
|
||||
# VMs used for dev/test
|
||||
|
||||
.vagrant
|
||||
|
||||
# Test binaries, built from *_test.go
|
||||
pkg/csplugin/tests/cs_plugin_test*
|
||||
|
||||
# Output of go-acc, go -cover
|
||||
*.out
|
||||
test/coverage/*
|
||||
|
||||
# Development artifacts, backups, etc
|
||||
*.swp
|
||||
*.swo
|
||||
|
||||
# Dependency directories (remove the comment below to include it)
|
||||
# vendor/
|
||||
# Dependencies are not vendored by default, but a tarball is created by "make vendor"
|
||||
# and provided in the release. Used by gentoo, etc.
|
||||
vendor/
|
||||
vendor.tgz
|
||||
|
||||
# crowdsec binaries
|
||||
cmd/crowdsec-cli/cscli
|
||||
cmd/crowdsec/crowdsec
|
||||
plugins/notifications/http/notification-http
|
||||
plugins/notifications/slack/notification-slack
|
||||
plugins/notifications/splunk/notification-splunk
|
||||
plugins/notifications/email/notification-email
|
||||
plugins/notifications/dummy/notification-dummy
|
||||
cmd/notification-*/notification-*
|
||||
|
||||
# Test cache (downloaded files)
|
||||
.cache
|
||||
|
||||
# Release stuff
|
||||
crowdsec-v*
|
||||
msi
|
||||
*.msi
|
||||
**/*.nupkg
|
||||
*.tgz
|
||||
|
||||
# Python
|
||||
__pycache__
|
||||
*.py[cod]
|
||||
*.egg-info
|
||||
|
|
12
.gitmodules
vendored
12
.gitmodules
vendored
|
@ -1,12 +1,16 @@
|
|||
[submodule "tests/lib/bats-core"]
|
||||
path = tests/lib/bats-core
|
||||
path = test/lib/bats-core
|
||||
url = https://github.com/crowdsecurity/bats-core.git
|
||||
branch = v1.7.0
|
||||
[submodule "tests/lib/bats-file"]
|
||||
path = tests/lib/bats-file
|
||||
path = test/lib/bats-file
|
||||
url = https://github.com/crowdsecurity/bats-file.git
|
||||
[submodule "tests/lib/bats-assert"]
|
||||
path = tests/lib/bats-assert
|
||||
path = test/lib/bats-assert
|
||||
url = https://github.com/crowdsecurity/bats-assert.git
|
||||
[submodule "tests/lib/bats-support"]
|
||||
path = tests/lib/bats-support
|
||||
path = test/lib/bats-support
|
||||
url = https://github.com/crowdsecurity/bats-support.git
|
||||
[submodule "tests/lib/bats-mock"]
|
||||
path = test/lib/bats-mock
|
||||
url = https://github.com/crowdsecurity/bats-mock.git
|
||||
|
|
382
.golangci.yml
Normal file
382
.golangci.yml
Normal file
|
@ -0,0 +1,382 @@
|
|||
# https://github.com/golangci/golangci-lint/blob/master/.golangci.reference.yml
|
||||
|
||||
linters-settings:
|
||||
cyclop:
|
||||
# lower this after refactoring
|
||||
max-complexity: 48
|
||||
|
||||
gci:
|
||||
sections:
|
||||
- standard
|
||||
- default
|
||||
- prefix(github.com/crowdsecurity)
|
||||
- prefix(github.com/crowdsecurity/crowdsec)
|
||||
|
||||
gomoddirectives:
|
||||
replace-allow-list:
|
||||
- golang.org/x/time/rate
|
||||
|
||||
gocognit:
|
||||
# lower this after refactoring
|
||||
min-complexity: 145
|
||||
|
||||
gocyclo:
|
||||
# lower this after refactoring
|
||||
min-complexity: 48
|
||||
|
||||
funlen:
|
||||
# Checks the number of lines in a function.
|
||||
# If lower than 0, disable the check.
|
||||
# Default: 60
|
||||
# lower this after refactoring
|
||||
lines: 437
|
||||
# Checks the number of statements in a function.
|
||||
# If lower than 0, disable the check.
|
||||
# Default: 40
|
||||
# lower this after refactoring
|
||||
statements: 122
|
||||
|
||||
govet:
|
||||
enable-all: true
|
||||
disable:
|
||||
- reflectvaluecompare
|
||||
- fieldalignment
|
||||
|
||||
lll:
|
||||
# lower this after refactoring
|
||||
line-length: 2607
|
||||
|
||||
maintidx:
|
||||
# raise this after refactoring
|
||||
under: 11
|
||||
|
||||
misspell:
|
||||
locale: US
|
||||
|
||||
nestif:
|
||||
# lower this after refactoring
|
||||
min-complexity: 28
|
||||
|
||||
nlreturn:
|
||||
block-size: 5
|
||||
|
||||
nolintlint:
|
||||
allow-unused: false # report any unused nolint directives
|
||||
require-explanation: false # don't require an explanation for nolint directives
|
||||
require-specific: false # don't require nolint directives to be specific about which linter is being skipped
|
||||
|
||||
interfacebloat:
|
||||
max: 12
|
||||
|
||||
depguard:
|
||||
rules:
|
||||
wrap:
|
||||
deny:
|
||||
- pkg: "github.com/pkg/errors"
|
||||
desc: "errors.Wrap() is deprecated in favor of fmt.Errorf()"
|
||||
files:
|
||||
- "!**/pkg/database/*.go"
|
||||
- "!**/pkg/exprhelpers/*.go"
|
||||
- "!**/pkg/acquisition/modules/appsec/appsec.go"
|
||||
- "!**/pkg/acquisition/modules/loki/internal/lokiclient/loki_client.go"
|
||||
- "!**/pkg/apiserver/controllers/v1/errors.go"
|
||||
yaml:
|
||||
files:
|
||||
- "!**/pkg/acquisition/acquisition.go"
|
||||
- "!**/pkg/acquisition/acquisition_test.go"
|
||||
- "!**/pkg/acquisition/modules/appsec/appsec.go"
|
||||
- "!**/pkg/acquisition/modules/cloudwatch/cloudwatch.go"
|
||||
- "!**/pkg/acquisition/modules/docker/docker.go"
|
||||
- "!**/pkg/acquisition/modules/file/file.go"
|
||||
- "!**/pkg/acquisition/modules/journalctl/journalctl.go"
|
||||
- "!**/pkg/acquisition/modules/kafka/kafka.go"
|
||||
- "!**/pkg/acquisition/modules/kinesis/kinesis.go"
|
||||
- "!**/pkg/acquisition/modules/kubernetesaudit/k8s_audit.go"
|
||||
- "!**/pkg/acquisition/modules/loki/loki.go"
|
||||
- "!**/pkg/acquisition/modules/loki/timestamp_test.go"
|
||||
- "!**/pkg/acquisition/modules/s3/s3.go"
|
||||
- "!**/pkg/acquisition/modules/syslog/syslog.go"
|
||||
- "!**/pkg/acquisition/modules/wineventlog/wineventlog_windows.go"
|
||||
- "!**/pkg/appsec/appsec.go"
|
||||
- "!**/pkg/appsec/loader.go"
|
||||
- "!**/pkg/csplugin/broker.go"
|
||||
- "!**/pkg/csplugin/broker_test.go"
|
||||
- "!**/pkg/dumps/bucket_dump.go"
|
||||
- "!**/pkg/dumps/parser_dump.go"
|
||||
- "!**/pkg/hubtest/coverage.go"
|
||||
- "!**/pkg/hubtest/hubtest_item.go"
|
||||
- "!**/pkg/hubtest/parser_assert.go"
|
||||
- "!**/pkg/hubtest/scenario_assert.go"
|
||||
- "!**/pkg/leakybucket/buckets_test.go"
|
||||
- "!**/pkg/leakybucket/manager_load.go"
|
||||
- "!**/pkg/metabase/metabase.go"
|
||||
- "!**/pkg/parser/node.go"
|
||||
- "!**/pkg/parser/node_test.go"
|
||||
- "!**/pkg/parser/parsing_test.go"
|
||||
- "!**/pkg/parser/stage.go"
|
||||
deny:
|
||||
- pkg: "gopkg.in/yaml.v2"
|
||||
desc: "yaml.v2 is deprecated for new code in favor of yaml.v3"
|
||||
|
||||
wsl:
|
||||
# Allow blocks to end with comments
|
||||
allow-trailing-comment: true
|
||||
|
||||
linters:
|
||||
enable-all: true
|
||||
disable:
|
||||
#
|
||||
# DEPRECATED by golangi-lint
|
||||
#
|
||||
- deadcode
|
||||
- exhaustivestruct
|
||||
- golint
|
||||
- ifshort
|
||||
- interfacer
|
||||
- maligned
|
||||
- nosnakecase
|
||||
- scopelint
|
||||
- structcheck
|
||||
- varcheck
|
||||
|
||||
#
|
||||
# Disabled until fixed for go 1.22
|
||||
#
|
||||
|
||||
- copyloopvar # copyloopvar is a linter detects places where loop variables are copied
|
||||
- intrange # intrange is a linter to find places where for loops could make use of an integer range.
|
||||
|
||||
#
|
||||
# Enabled
|
||||
#
|
||||
|
||||
# - asasalint # check for pass []any as any in variadic func(...any)
|
||||
# - asciicheck # checks that all code identifiers does not have non-ASCII symbols in the name
|
||||
# - bidichk # Checks for dangerous unicode character sequences
|
||||
# - bodyclose # checks whether HTTP response body is closed successfully
|
||||
# - cyclop # checks function and package cyclomatic complexity
|
||||
# - decorder # check declaration order and count of types, constants, variables and functions
|
||||
# - depguard # Go linter that checks if package imports are in a list of acceptable packages
|
||||
# - dupword # checks for duplicate words in the source code
|
||||
# - durationcheck # check for two durations multiplied together
|
||||
# - errcheck # errcheck is a program for checking for unchecked errors in Go code. These unchecked errors can be critical bugs in some cases
|
||||
# - errorlint # errorlint is a linter for that can be used to find code that will cause problems with the error wrapping scheme introduced in Go 1.13.
|
||||
# - execinquery # execinquery is a linter about query string checker in Query function which reads your Go src files and warning it finds
|
||||
# - exportloopref # checks for pointers to enclosing loop variables
|
||||
# - funlen # Tool for detection of long functions
|
||||
# - ginkgolinter # enforces standards of using ginkgo and gomega
|
||||
# - gocheckcompilerdirectives # Checks that go compiler directive comments (//go:) are valid.
|
||||
# - gochecknoinits # Checks that no init functions are present in Go code
|
||||
# - gochecksumtype # Run exhaustiveness checks on Go "sum types"
|
||||
# - gocognit # Computes and checks the cognitive complexity of functions
|
||||
# - gocritic # Provides diagnostics that check for bugs, performance and style issues.
|
||||
# - gocyclo # Computes and checks the cyclomatic complexity of functions
|
||||
# - goheader # Checks is file header matches to pattern
|
||||
# - gomoddirectives # Manage the use of 'replace', 'retract', and 'excludes' directives in go.mod.
|
||||
# - gomodguard # Allow and block list linter for direct Go module dependencies. This is different from depguard where there are different block types for example version constraints and module recommendations.
|
||||
# - goprintffuncname # Checks that printf-like functions are named with `f` at the end
|
||||
# - gosimple # (megacheck): Linter for Go source code that specializes in simplifying code
|
||||
# - gosmopolitan # Report certain i18n/l10n anti-patterns in your Go codebase
|
||||
# - govet # (vet, vetshadow): Vet examines Go source code and reports suspicious constructs. It is roughly the same as 'go vet' and uses its passes.
|
||||
# - grouper # Analyze expression groups.
|
||||
# - importas # Enforces consistent import aliases
|
||||
# - ineffassign # Detects when assignments to existing variables are not used
|
||||
# - interfacebloat # A linter that checks the number of methods inside an interface.
|
||||
# - lll # Reports long lines
|
||||
# - loggercheck # (logrlint): Checks key value pairs for common logger libraries (kitlog,klog,logr,zap).
|
||||
# - logrlint # Check logr arguments.
|
||||
# - maintidx # maintidx measures the maintainability index of each function.
|
||||
# - makezero # Finds slice declarations with non-zero initial length
|
||||
# - mirror # reports wrong mirror patterns of bytes/strings usage
|
||||
# - misspell # Finds commonly misspelled English words
|
||||
# - nakedret # Checks that functions with naked returns are not longer than a maximum size (can be zero).
|
||||
# - nestif # Reports deeply nested if statements
|
||||
# - nilerr # Finds the code that returns nil even if it checks that the error is not nil.
|
||||
# - nolintlint # Reports ill-formed or insufficient nolint directives
|
||||
# - nonamedreturns # Reports all named returns
|
||||
# - nosprintfhostport # Checks for misuse of Sprintf to construct a host with port in a URL.
|
||||
# - perfsprint # Checks that fmt.Sprintf can be replaced with a faster alternative.
|
||||
# - predeclared # find code that shadows one of Go's predeclared identifiers
|
||||
# - reassign # Checks that package variables are not reassigned
|
||||
# - rowserrcheck # checks whether Rows.Err of rows is checked successfully
|
||||
# - sloglint # ensure consistent code style when using log/slog
|
||||
# - spancheck # Checks for mistakes with OpenTelemetry/Census spans.
|
||||
# - sqlclosecheck # Checks that sql.Rows, sql.Stmt, sqlx.NamedStmt, pgx.Query are closed.
|
||||
# - staticcheck # (megacheck): It's a set of rules from staticcheck. It's not the same thing as the staticcheck binary. The author of staticcheck doesn't support or approve the use of staticcheck as a library inside golangci-lint.
|
||||
# - tenv # tenv is analyzer that detects using os.Setenv instead of t.Setenv since Go1.17
|
||||
# - testableexamples # linter checks if examples are testable (have an expected output)
|
||||
# - testifylint # Checks usage of github.com/stretchr/testify.
|
||||
# - tparallel # tparallel detects inappropriate usage of t.Parallel() method in your Go test codes
|
||||
# - unconvert # Remove unnecessary type conversions
|
||||
# - unused # (megacheck): Checks Go code for unused constants, variables, functions and types
|
||||
# - usestdlibvars # A linter that detect the possibility to use variables/constants from the Go standard library.
|
||||
# - wastedassign # Finds wasted assignment statements
|
||||
# - zerologlint # Detects the wrong usage of `zerolog` that a user forgets to dispatch with `Send` or `Msg`
|
||||
|
||||
#
|
||||
# Recommended? (easy)
|
||||
#
|
||||
|
||||
- dogsled # Checks assignments with too many blank identifiers (e.g. x, _, _, _, := f())
|
||||
- errchkjson # Checks types passed to the json encoding functions. Reports unsupported types and reports occations, where the check for the returned error can be omitted.
|
||||
- exhaustive # check exhaustiveness of enum switch statements
|
||||
- gci # Gci control golang package import order and make it always deterministic.
|
||||
- godot # Check if comments end in a period
|
||||
- gofmt # Gofmt checks whether code was gofmt-ed. By default this tool runs with -s option to check for code simplification
|
||||
- goimports # Check import statements are formatted according to the 'goimport' command. Reformat imports in autofix mode.
|
||||
- gosec # (gas): Inspects source code for security problems
|
||||
- inamedparam # reports interfaces with unnamed method parameters
|
||||
- musttag # enforce field tags in (un)marshaled structs
|
||||
- promlinter # Check Prometheus metrics naming via promlint
|
||||
- protogetter # Reports direct reads from proto message fields when getters should be used
|
||||
- revive # Fast, configurable, extensible, flexible, and beautiful linter for Go. Drop-in replacement of golint.
|
||||
- tagalign # check that struct tags are well aligned
|
||||
- thelper # thelper detects tests helpers which is not start with t.Helper() method.
|
||||
- wrapcheck # Checks that errors returned from external packages are wrapped
|
||||
|
||||
#
|
||||
# Recommended? (requires some work)
|
||||
#
|
||||
|
||||
- containedctx # containedctx is a linter that detects struct contained context.Context field
|
||||
- contextcheck # check whether the function uses a non-inherited context
|
||||
- errname # Checks that sentinel errors are prefixed with the `Err` and error types are suffixed with the `Error`.
|
||||
- gomnd # An analyzer to detect magic numbers.
|
||||
- ireturn # Accept Interfaces, Return Concrete Types
|
||||
- nilnil # Checks that there is no simultaneous return of `nil` error and an invalid value.
|
||||
- noctx # Finds sending http request without context.Context
|
||||
- unparam # Reports unused function parameters
|
||||
|
||||
#
|
||||
# Formatting only, useful in IDE but should not be forced on CI?
|
||||
#
|
||||
|
||||
- gofumpt # Gofumpt checks whether code was gofumpt-ed.
|
||||
- nlreturn # nlreturn checks for a new line before return and branch statements to increase code clarity
|
||||
- whitespace # Whitespace is a linter that checks for unnecessary newlines at the start and end of functions, if, for, etc.
|
||||
- wsl # add or remove empty lines
|
||||
|
||||
#
|
||||
# Well intended, but not ready for this
|
||||
#
|
||||
- dupl # Tool for code clone detection
|
||||
- forcetypeassert # finds forced type assertions
|
||||
- godox # Tool for detection of FIXME, TODO and other comment keywords
|
||||
- goerr113 # Go linter to check the errors handling expressions
|
||||
- paralleltest # Detects missing usage of t.Parallel() method in your Go test
|
||||
- testpackage # linter that makes you use a separate _test package
|
||||
|
||||
#
|
||||
# Too strict / too many false positives (for now?)
|
||||
#
|
||||
- exhaustruct # Checks if all structure fields are initialized
|
||||
- forbidigo # Forbids identifiers
|
||||
- gochecknoglobals # Check that no global variables exist.
|
||||
- goconst # Finds repeated strings that could be replaced by a constant
|
||||
- stylecheck # Stylecheck is a replacement for golint
|
||||
- tagliatelle # Checks the struct tags.
|
||||
- varnamelen # checks that the length of a variable's name matches its scope
|
||||
|
||||
#
|
||||
# Under evaluation
|
||||
#
|
||||
|
||||
- prealloc # Finds slice declarations that could potentially be preallocated
|
||||
|
||||
|
||||
issues:
|
||||
# “Look, that’s why there’s rules, understand? So that you think before you
|
||||
# break ‘em.” ― Terry Pratchett
|
||||
|
||||
exclude-dirs:
|
||||
- pkg/time/rate
|
||||
|
||||
exclude-files:
|
||||
- pkg/yamlpatch/merge.go
|
||||
- pkg/yamlpatch/merge_test.go
|
||||
|
||||
exclude-generated-strict: true
|
||||
|
||||
max-issues-per-linter: 0
|
||||
max-same-issues: 0
|
||||
exclude-rules:
|
||||
|
||||
# Won't fix:
|
||||
|
||||
# `err` is often shadowed, we may continue to do it
|
||||
- linters:
|
||||
- govet
|
||||
text: "shadow: declaration of \"err\" shadows declaration"
|
||||
|
||||
- linters:
|
||||
- errcheck
|
||||
text: "Error return value of `.*` is not checked"
|
||||
|
||||
- linters:
|
||||
- gocritic
|
||||
text: "ifElseChain: rewrite if-else to switch statement"
|
||||
|
||||
- linters:
|
||||
- gocritic
|
||||
text: "captLocal: `.*' should not be capitalized"
|
||||
|
||||
- linters:
|
||||
- gocritic
|
||||
text: "appendAssign: append result not assigned to the same slice"
|
||||
|
||||
- linters:
|
||||
- gocritic
|
||||
text: "commentFormatting: put a space between `//` and comment text"
|
||||
|
||||
# Will fix, trivial - just beware of merge conflicts
|
||||
|
||||
- linters:
|
||||
- perfsprint
|
||||
text: "fmt.Sprintf can be replaced .*"
|
||||
|
||||
- linters:
|
||||
- perfsprint
|
||||
text: "fmt.Errorf can be replaced with errors.New"
|
||||
|
||||
#
|
||||
# Will fix, easy but some neurons required
|
||||
#
|
||||
|
||||
- linters:
|
||||
- errorlint
|
||||
text: "non-wrapping format verb for fmt.Errorf. Use `%w` to format errors"
|
||||
|
||||
- linters:
|
||||
- errorlint
|
||||
text: "type assertion on error will fail on wrapped errors. Use errors.As to check for specific errors"
|
||||
|
||||
- linters:
|
||||
- errorlint
|
||||
text: "type switch on error will fail on wrapped errors. Use errors.As to check for specific errors"
|
||||
|
||||
- linters:
|
||||
- errorlint
|
||||
text: "type assertion on error will fail on wrapped errors. Use errors.Is to check for specific errors"
|
||||
|
||||
- linters:
|
||||
- errorlint
|
||||
text: "comparing with .* will fail on wrapped errors. Use errors.Is to check for a specific error"
|
||||
|
||||
- linters:
|
||||
- errorlint
|
||||
text: "switch on an error will fail on wrapped errors. Use errors.Is to check for specific errors"
|
||||
|
||||
- linters:
|
||||
- nosprintfhostport
|
||||
text: "host:port in url should be constructed with net.JoinHostPort and not directly with fmt.Sprintf"
|
||||
|
||||
# https://github.com/timakin/bodyclose
|
||||
- linters:
|
||||
- bodyclose
|
||||
text: "response body must be closed"
|
||||
|
||||
# named/naked returns are evil, with a single exception
|
||||
# https://go.dev/wiki/CodeReviewComments#named-result-parameters
|
||||
- linters:
|
||||
- nonamedreturns
|
||||
text: "named return .* with type .* found"
|
43
.yamllint
Normal file
43
.yamllint
Normal file
|
@ -0,0 +1,43 @@
|
|||
---
|
||||
rules:
|
||||
braces:
|
||||
min-spaces-inside: 0
|
||||
max-spaces-inside: 1
|
||||
brackets:
|
||||
min-spaces-inside: 0
|
||||
max-spaces-inside: 1
|
||||
colons:
|
||||
max-spaces-before: 0
|
||||
max-spaces-after: 1
|
||||
commas:
|
||||
max-spaces-before: 0
|
||||
min-spaces-after: 1
|
||||
max-spaces-after: 1
|
||||
comments:
|
||||
level: warning
|
||||
require-starting-space: true
|
||||
min-spaces-from-content: 2
|
||||
comments-indentation:
|
||||
level: warning
|
||||
document-end: disable
|
||||
document-start: disable
|
||||
empty-lines:
|
||||
max: 2
|
||||
max-start: 0
|
||||
max-end: 0
|
||||
hyphens:
|
||||
max-spaces-after: 1
|
||||
indentation:
|
||||
spaces: consistent
|
||||
indent-sequences: whatever
|
||||
check-multi-line-strings: false
|
||||
key-duplicates: enable
|
||||
line-length:
|
||||
max: 180
|
||||
allow-non-breakable-words: true
|
||||
allow-non-breakable-inline-mappings: false
|
||||
new-line-at-end-of-file: enable
|
||||
new-lines:
|
||||
type: unix
|
||||
trailing-spaces: enable
|
||||
truthy: disable
|
3
CONTRIBUTING.md
Normal file
3
CONTRIBUTING.md
Normal file
|
@ -0,0 +1,3 @@
|
|||
|
||||
Please refer to [Contributing to CrowdSec](https://doc.crowdsec.net/docs/next/contributing/getting_started).
|
||||
|
72
Dockerfile
72
Dockerfile
|
@ -1,33 +1,63 @@
|
|||
ARG GOVERSION=1.17
|
||||
# vim: set ft=dockerfile:
|
||||
FROM golang:1.22.2-alpine3.18 AS build
|
||||
|
||||
FROM golang:${GOVERSION}-alpine AS build
|
||||
ARG BUILD_VERSION
|
||||
|
||||
WORKDIR /go/src/crowdsec
|
||||
|
||||
# We like to choose the release of re2 to use, and Alpine does not ship a static version anyway.
|
||||
ENV RE2_VERSION=2023-03-01
|
||||
ENV BUILD_VERSION=${BUILD_VERSION}
|
||||
|
||||
# wizard.sh requires GNU coreutils
|
||||
RUN apk add --no-cache git gcc libc-dev make bash gettext binutils-gold coreutils
|
||||
RUN apk add --no-cache git g++ gcc libc-dev make bash gettext binutils-gold coreutils pkgconfig && \
|
||||
wget https://github.com/google/re2/archive/refs/tags/${RE2_VERSION}.tar.gz && \
|
||||
tar -xzf ${RE2_VERSION}.tar.gz && \
|
||||
cd re2-${RE2_VERSION} && \
|
||||
make install && \
|
||||
echo "githubciXXXXXXXXXXXXXXXXXXXXXXXX" > /etc/machine-id && \
|
||||
go install github.com/mikefarah/yq/v4@v4.43.1
|
||||
|
||||
COPY . .
|
||||
|
||||
RUN SYSTEM="docker" make release
|
||||
RUN cd crowdsec-v* && ./wizard.sh --docker-mode && cd -
|
||||
RUN cscli hub update && cscli collections install crowdsecurity/linux && cscli parsers install crowdsecurity/whitelists
|
||||
FROM alpine:latest
|
||||
RUN apk add --no-cache --repository=http://dl-cdn.alpinelinux.org/alpine/edge/community tzdata yq jq bash && \
|
||||
mkdir -p /staging/etc/crowdsec && \
|
||||
mkdir -p /staging/var/lib/crowdsec
|
||||
RUN make clean release DOCKER_BUILD=1 BUILD_STATIC=1 && \
|
||||
cd crowdsec-v* && \
|
||||
./wizard.sh --docker-mode && \
|
||||
cd - >/dev/null && \
|
||||
cscli hub update && \
|
||||
cscli collections install crowdsecurity/linux && \
|
||||
cscli parsers install crowdsecurity/whitelists
|
||||
|
||||
# In case we need to remove agents here..
|
||||
# cscli machines list -o json | yq '.[].machineId' | xargs -r cscli machines delete
|
||||
|
||||
FROM alpine:latest as slim
|
||||
|
||||
RUN apk add --no-cache --repository=http://dl-cdn.alpinelinux.org/alpine/edge/community tzdata bash rsync && \
|
||||
mkdir -p /staging/etc/crowdsec && \
|
||||
mkdir -p /staging/etc/crowdsec/acquis.d && \
|
||||
mkdir -p /staging/var/lib/crowdsec && \
|
||||
mkdir -p /var/lib/crowdsec/data
|
||||
|
||||
COPY --from=build /go/bin/yq /usr/local/bin/crowdsec /usr/local/bin/cscli /usr/local/bin/
|
||||
COPY --from=build /etc/crowdsec /staging/etc/crowdsec
|
||||
COPY --from=build /var/lib/crowdsec /staging/var/lib/crowdsec
|
||||
COPY --from=build /usr/local/bin/crowdsec /usr/local/bin/crowdsec
|
||||
COPY --from=build /usr/local/bin/cscli /usr/local/bin/cscli
|
||||
COPY --from=build /go/src/crowdsec/docker/docker_start.sh /
|
||||
COPY --from=build /go/src/crowdsec/docker/config.yaml /staging/etc/crowdsec/config.yaml
|
||||
#Due to the wizard using cp -n, we have to copy the config files directly from the source as -n does not exist in busybox cp
|
||||
#The files are here for reference, as users will need to mount a new version to be actually able to use notifications
|
||||
COPY --from=build /go/src/crowdsec/plugins/notifications/email/email.yaml /staging/etc/crowdsec/notifications/email.yaml
|
||||
COPY --from=build /go/src/crowdsec/plugins/notifications/http/http.yaml /staging/etc/crowdsec/notifications/http.yaml
|
||||
COPY --from=build /go/src/crowdsec/plugins/notifications/slack/slack.yaml /staging/etc/crowdsec/notifications/slack.yaml
|
||||
COPY --from=build /go/src/crowdsec/plugins/notifications/splunk/splunk.yaml /staging/etc/crowdsec/notifications/splunk.yaml
|
||||
COPY --from=build /usr/local/lib/crowdsec/plugins /usr/local/lib/crowdsec/plugins
|
||||
COPY --from=build /var/lib/crowdsec /staging/var/lib/crowdsec
|
||||
RUN yq -n '.url="http://0.0.0.0:8080"' | install -m 0600 /dev/stdin /staging/etc/crowdsec/local_api_credentials.yaml
|
||||
|
||||
ENTRYPOINT /bin/bash docker_start.sh
|
||||
ENTRYPOINT /bin/bash /docker_start.sh
|
||||
|
||||
FROM slim as full
|
||||
|
||||
# Due to the wizard using cp -n, we have to copy the config files directly from the source as -n does not exist in busybox cp
|
||||
# The files are here for reference, as users will need to mount a new version to be actually able to use notifications
|
||||
COPY --from=build \
|
||||
/go/src/crowdsec/cmd/notification-email/email.yaml \
|
||||
/go/src/crowdsec/cmd/notification-http/http.yaml \
|
||||
/go/src/crowdsec/cmd/notification-slack/slack.yaml \
|
||||
/go/src/crowdsec/cmd/notification-splunk/splunk.yaml \
|
||||
/go/src/crowdsec/cmd/notification-sentinel/sentinel.yaml \
|
||||
/staging/etc/crowdsec/notifications/
|
||||
|
||||
COPY --from=build /usr/local/lib/crowdsec/plugins /usr/local/lib/crowdsec/plugins
|
||||
|
|
|
@ -1,47 +1,87 @@
|
|||
ARG GOVERSION=1.17
|
||||
# vim: set ft=dockerfile:
|
||||
FROM golang:1.22.2-bookworm AS build
|
||||
|
||||
FROM golang:${GOVERSION}-bullseye AS build
|
||||
ARG BUILD_VERSION
|
||||
|
||||
WORKDIR /go/src/crowdsec
|
||||
|
||||
ENV DEBIAN_FRONTEND=noninteractive
|
||||
ENV DEBCONF_NOWARNINGS="yes"
|
||||
|
||||
# We like to choose the release of re2 to use, the debian version is usually older.
|
||||
ENV RE2_VERSION=2023-03-01
|
||||
ENV BUILD_VERSION=${BUILD_VERSION}
|
||||
|
||||
# wizard.sh requires GNU coreutils
|
||||
RUN apt-get update && apt-get install -y git jq gcc libc-dev make bash gettext binutils-gold coreutils tzdata python3 python3-pip
|
||||
RUN apt-get update && \
|
||||
apt-get install -y -q git gcc libc-dev make bash gettext binutils-gold coreutils tzdata && \
|
||||
wget https://github.com/google/re2/archive/refs/tags/${RE2_VERSION}.tar.gz && \
|
||||
tar -xzf ${RE2_VERSION}.tar.gz && \
|
||||
cd re2-${RE2_VERSION} && \
|
||||
make && \
|
||||
make install && \
|
||||
echo "githubciXXXXXXXXXXXXXXXXXXXXXXXX" > /etc/machine-id && \
|
||||
go install github.com/mikefarah/yq/v4@v4.43.1
|
||||
|
||||
COPY . .
|
||||
|
||||
RUN SYSTEM="docker" make release
|
||||
RUN cd crowdsec-v* && ./wizard.sh --docker-mode && cd -
|
||||
RUN cscli hub update && cscli collections install crowdsecurity/linux && cscli parsers install crowdsecurity/whitelists
|
||||
RUN GO111MODULE=on go get github.com/mikefarah/yq/v4
|
||||
RUN make clean release DOCKER_BUILD=1 BUILD_STATIC=1 && \
|
||||
cd crowdsec-v* && \
|
||||
./wizard.sh --docker-mode && \
|
||||
cd - >/dev/null && \
|
||||
cscli hub update && \
|
||||
cscli collections install crowdsecurity/linux && \
|
||||
cscli parsers install crowdsecurity/whitelists
|
||||
|
||||
FROM debian:bullseye-slim
|
||||
# In case we need to remove agents here..
|
||||
# cscli machines list -o json | yq '.[].machineId' | xargs -r cscli machines delete
|
||||
|
||||
RUN apt-get update
|
||||
RUN apt-get install -y -q --install-recommends --no-install-suggests \
|
||||
FROM debian:bookworm-slim as slim
|
||||
|
||||
ENV DEBIAN_FRONTEND=noninteractive
|
||||
ENV DEBCONF_NOWARNINGS="yes"
|
||||
|
||||
RUN apt-get update && \
|
||||
apt-get install -y -q --install-recommends --no-install-suggests \
|
||||
procps \
|
||||
systemd \
|
||||
iproute2 \
|
||||
ca-certificates \
|
||||
bash \
|
||||
jq \
|
||||
tzdata && \
|
||||
tzdata \
|
||||
rsync && \
|
||||
mkdir -p /staging/etc/crowdsec && \
|
||||
mkdir -p /staging/var/lib/crowdsec
|
||||
mkdir -p /staging/etc/crowdsec/acquis.d && \
|
||||
mkdir -p /staging/var/lib/crowdsec && \
|
||||
mkdir -p /var/lib/crowdsec/data
|
||||
|
||||
COPY --from=build /go/bin/yq /usr/local/bin/yq
|
||||
COPY --from=build /go/bin/yq /usr/local/bin/crowdsec /usr/local/bin/cscli /usr/local/bin/
|
||||
COPY --from=build /etc/crowdsec /staging/etc/crowdsec
|
||||
COPY --from=build /var/lib/crowdsec /staging/var/lib/crowdsec
|
||||
COPY --from=build /usr/local/bin/crowdsec /usr/local/bin/crowdsec
|
||||
COPY --from=build /usr/local/bin/cscli /usr/local/bin/cscli
|
||||
COPY --from=build /go/src/crowdsec/docker/docker_start.sh /
|
||||
COPY --from=build /go/src/crowdsec/docker/config.yaml /staging/etc/crowdsec/config.yaml
|
||||
RUN yq eval -i ".plugin_config.group = \"nogroup\"" /staging/etc/crowdsec/config.yaml
|
||||
#Due to the wizard using cp -n, we have to copy the config files directly from the source as -n does not exist in busybox cp
|
||||
#The files are here for reference, as users will need to mount a new version to be actually able to use notifications
|
||||
COPY --from=build /go/src/crowdsec/plugins/notifications/email/email.yaml /staging/etc/crowdsec/notifications/email.yaml
|
||||
COPY --from=build /go/src/crowdsec/plugins/notifications/http/http.yaml /staging/etc/crowdsec/notifications/http.yaml
|
||||
COPY --from=build /go/src/crowdsec/plugins/notifications/slack/slack.yaml /staging/etc/crowdsec/notifications/slack.yaml
|
||||
COPY --from=build /go/src/crowdsec/plugins/notifications/splunk/splunk.yaml /staging/etc/crowdsec/notifications/splunk.yaml
|
||||
COPY --from=build /usr/local/lib/crowdsec/plugins /usr/local/lib/crowdsec/plugins
|
||||
RUN yq -n '.url="http://0.0.0.0:8080"' | install -m 0600 /dev/stdin /staging/etc/crowdsec/local_api_credentials.yaml && \
|
||||
yq eval -i ".plugin_config.group = \"nogroup\"" /staging/etc/crowdsec/config.yaml
|
||||
|
||||
ENTRYPOINT /bin/bash docker_start.sh
|
||||
|
||||
FROM slim as plugins
|
||||
|
||||
# Due to the wizard using cp -n, we have to copy the config files directly from the source as -n does not exist in busybox cp
|
||||
# The files are here for reference, as users will need to mount a new version to be actually able to use notifications
|
||||
COPY --from=build \
|
||||
/go/src/crowdsec/cmd/notification-email/email.yaml \
|
||||
/go/src/crowdsec/cmd/notification-http/http.yaml \
|
||||
/go/src/crowdsec/cmd/notification-slack/slack.yaml \
|
||||
/go/src/crowdsec/cmd/notification-splunk/splunk.yaml \
|
||||
/go/src/crowdsec/cmd/notification-sentinel/sentinel.yaml \
|
||||
/staging/etc/crowdsec/notifications/
|
||||
|
||||
COPY --from=build /usr/local/lib/crowdsec/plugins /usr/local/lib/crowdsec/plugins
|
||||
|
||||
FROM slim as geoip
|
||||
|
||||
COPY --from=build /var/lib/crowdsec /staging/var/lib/crowdsec
|
||||
|
||||
FROM plugins as full
|
||||
|
||||
COPY --from=build /var/lib/crowdsec /staging/var/lib/crowdsec
|
||||
|
|
2
LICENSE
2
LICENSE
|
@ -1,6 +1,6 @@
|
|||
MIT License
|
||||
|
||||
Copyright (c) 2020-2022 Crowdsec
|
||||
Copyright (c) 2020-2023 Crowdsec
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
|
|
438
Makefile
438
Makefile
|
@ -1,212 +1,304 @@
|
|||
SYSTEM?= $(shell uname -s | tr '[A-Z]' '[a-z]')
|
||||
include mk/platform.mk
|
||||
include mk/gmsl
|
||||
|
||||
ifneq ("$(wildcard $(CURDIR)/platform/$(SYSTEM).mk)", "")
|
||||
include $(CURDIR)/platform/$(SYSTEM).mk
|
||||
else
|
||||
include $(CURDIR)/platform/linux.mk
|
||||
# By default, this build requires the C++ re2 library to be installed.
|
||||
#
|
||||
# Debian/Ubuntu: apt install libre2-dev
|
||||
# Fedora/CentOS: dnf install re2-devel
|
||||
# FreeBSD: pkg install re2
|
||||
# Alpine: apk add re2-dev
|
||||
# Windows: choco install re2
|
||||
# MacOS: brew install re2
|
||||
|
||||
# To build without re2, run "make BUILD_RE2_WASM=1"
|
||||
# The WASM version is slower and introduces a short delay when starting a process
|
||||
# (including cscli) so it is not recommended for production use.
|
||||
BUILD_RE2_WASM ?= 0
|
||||
|
||||
# To build static binaries, run "make BUILD_STATIC=1".
|
||||
# On some platforms, this requires additional packages
|
||||
# (e.g. glibc-static and libstdc++-static on fedora, centos.. which are on the powertools/crb repository).
|
||||
# If the static build fails at the link stage, it might be because the static library is not provided
|
||||
# for your distribution (look for libre2.a). See the Dockerfile for an example of how to build it.
|
||||
BUILD_STATIC ?= 0
|
||||
|
||||
# List of plugins to build
|
||||
PLUGINS ?= $(patsubst ./cmd/notification-%,%,$(wildcard ./cmd/notification-*))
|
||||
|
||||
# Can be overriden, if you can deal with the consequences
|
||||
BUILD_REQUIRE_GO_MAJOR ?= 1
|
||||
BUILD_REQUIRE_GO_MINOR ?= 21
|
||||
|
||||
#--------------------------------------
|
||||
|
||||
GO = go
|
||||
GOTEST = $(GO) test
|
||||
|
||||
BUILD_CODENAME ?= alphaga
|
||||
|
||||
CROWDSEC_FOLDER = ./cmd/crowdsec
|
||||
CSCLI_FOLDER = ./cmd/crowdsec-cli/
|
||||
PLUGINS_DIR_PREFIX = ./cmd/notification-
|
||||
|
||||
CROWDSEC_BIN = crowdsec$(EXT)
|
||||
CSCLI_BIN = cscli$(EXT)
|
||||
|
||||
# semver comparison to select the hub branch requires the version to start with "v"
|
||||
ifneq ($(call substr,$(BUILD_VERSION),1,1),v)
|
||||
$(error BUILD_VERSION "$(BUILD_VERSION)" should start with "v")
|
||||
endif
|
||||
|
||||
CROWDSEC_FOLDER = "./cmd/crowdsec"
|
||||
CSCLI_FOLDER = "./cmd/crowdsec-cli/"
|
||||
|
||||
HTTP_PLUGIN_FOLDER = "./plugins/notifications/http"
|
||||
SLACK_PLUGIN_FOLDER = "./plugins/notifications/slack"
|
||||
SPLUNK_PLUGIN_FOLDER = "./plugins/notifications/splunk"
|
||||
EMAIL_PLUGIN_FOLDER = "./plugins/notifications/email"
|
||||
DUMMY_PLUGIN_FOLDER = "./plugins/notifications/dummy"
|
||||
|
||||
HTTP_PLUGIN_BIN = "notification-http"
|
||||
SLACK_PLUGIN_BIN = "notification-slack"
|
||||
SPLUNK_PLUGIN_BIN = "notification-splunk"
|
||||
EMAIL_PLUGIN_BIN = "notification-email"
|
||||
DUMMY_PLUGIN_BIN= "notification-dummy"
|
||||
|
||||
HTTP_PLUGIN_CONFIG = "http.yaml"
|
||||
SLACK_PLUGIN_CONFIG = "slack.yaml"
|
||||
SPLUNK_PLUGIN_CONFIG = "splunk.yaml"
|
||||
EMAIL_PLUGIN_CONFIG = "email.yaml"
|
||||
|
||||
CROWDSEC_BIN = "crowdsec"
|
||||
CSCLI_BIN = "cscli"
|
||||
BUILD_CMD = "build"
|
||||
|
||||
GOOS ?= $(shell go env GOOS)
|
||||
GOARCH ?= $(shell go env GOARCH)
|
||||
|
||||
# Golang version info
|
||||
GO_MAJOR_VERSION = $(shell go version | cut -c 14- | cut -d' ' -f1 | cut -d'.' -f1)
|
||||
GO_MINOR_VERSION = $(shell go version | cut -c 14- | cut -d' ' -f1 | cut -d'.' -f2)
|
||||
MINIMUM_SUPPORTED_GO_MAJOR_VERSION = 1
|
||||
MINIMUM_SUPPORTED_GO_MINOR_VERSION = 17
|
||||
GO_VERSION_VALIDATION_ERR_MSG = Golang version ($(BUILD_GOVERSION)) is not supported, please use at least $(MINIMUM_SUPPORTED_GO_MAJOR_VERSION).$(MINIMUM_SUPPORTED_GO_MINOR_VERSION)
|
||||
|
||||
# Current versioning information from env
|
||||
BUILD_VERSION ?= "$(shell git describe --tags)"
|
||||
BUILD_GOVERSION = "$(shell go version | cut -d " " -f3 | sed -E 's/[go]+//g')"
|
||||
BUILD_CODENAME = "alphaga"
|
||||
BUILD_TIMESTAMP = $(shell date +%F"_"%T)
|
||||
BUILD_TAG ?= "$(shell git rev-parse HEAD)"
|
||||
DEFAULT_CONFIGDIR ?= "/etc/crowdsec"
|
||||
DEFAULT_DATADIR ?= "/var/lib/crowdsec/data"
|
||||
BINCOVER_TESTING ?= false
|
||||
|
||||
LD_OPTS_VARS= \
|
||||
-X github.com/crowdsecurity/crowdsec/cmd/crowdsec/main.bincoverTesting=$(BINCOVER_TESTING) \
|
||||
-X github.com/crowdsecurity/crowdsec/pkg/cwversion.Version=$(BUILD_VERSION) \
|
||||
-X github.com/crowdsecurity/crowdsec/pkg/cwversion.BuildDate=$(BUILD_TIMESTAMP) \
|
||||
-X github.com/crowdsecurity/crowdsec/pkg/cwversion.Codename=$(BUILD_CODENAME) \
|
||||
-X github.com/crowdsecurity/crowdsec/pkg/cwversion.Tag=$(BUILD_TAG) \
|
||||
-X github.com/crowdsecurity/crowdsec/pkg/csconfig.defaultConfigDir=$(DEFAULT_CONFIGDIR) \
|
||||
-X github.com/crowdsecurity/crowdsec/pkg/csconfig.defaultDataDir=$(DEFAULT_DATADIR)
|
||||
|
||||
export LD_OPTS=-ldflags "-s -w $(LD_OPTS_VARS)"
|
||||
export LD_OPTS_STATIC=-ldflags "-s -w $(LD_OPTS_VARS) -extldflags '-static'"
|
||||
|
||||
GOCMD=go
|
||||
GOTEST=$(GOCMD) test
|
||||
|
||||
# Directory for the release files
|
||||
RELDIR = crowdsec-$(BUILD_VERSION)
|
||||
|
||||
GO_MODULE_NAME = github.com/crowdsecurity/crowdsec
|
||||
|
||||
# Check if a given value is considered truthy and returns "0" or "1".
|
||||
# A truthy value is one of the following: "1", "yes", or "true", case-insensitive.
|
||||
#
|
||||
# Usage:
|
||||
# ifeq ($(call bool,$(FOO)),1)
|
||||
# $(info Let's foo)
|
||||
# endif
|
||||
bool = $(if $(filter $(call lc, $1),1 yes true),1,0)
|
||||
|
||||
#--------------------------------------
|
||||
#
|
||||
# Define MAKE_FLAGS and LD_OPTS for the sub-makefiles in cmd/
|
||||
#
|
||||
|
||||
MAKE_FLAGS = --no-print-directory GOARCH=$(GOARCH) GOOS=$(GOOS) RM="$(RM)" WIN_IGNORE_ERR="$(WIN_IGNORE_ERR)" CP="$(CP)" CPR="$(CPR)" MKDIR="$(MKDIR)"
|
||||
|
||||
LD_OPTS_VARS= \
|
||||
-X 'github.com/crowdsecurity/go-cs-lib/version.Version=$(BUILD_VERSION)' \
|
||||
-X 'github.com/crowdsecurity/go-cs-lib/version.BuildDate=$(BUILD_TIMESTAMP)' \
|
||||
-X 'github.com/crowdsecurity/go-cs-lib/version.Tag=$(BUILD_TAG)' \
|
||||
-X '$(GO_MODULE_NAME)/pkg/cwversion.Codename=$(BUILD_CODENAME)' \
|
||||
-X '$(GO_MODULE_NAME)/pkg/csconfig.defaultConfigDir=$(DEFAULT_CONFIGDIR)' \
|
||||
-X '$(GO_MODULE_NAME)/pkg/csconfig.defaultDataDir=$(DEFAULT_DATADIR)'
|
||||
|
||||
ifneq (,$(DOCKER_BUILD))
|
||||
LD_OPTS_VARS += -X '$(GO_MODULE_NAME)/pkg/cwversion.System=docker'
|
||||
endif
|
||||
|
||||
GO_TAGS := netgo,osusergo,sqlite_omit_load_extension
|
||||
|
||||
# this will be used by Go in the make target, some distributions require it
|
||||
export PKG_CONFIG_PATH:=/usr/local/lib/pkgconfig:$(PKG_CONFIG_PATH)
|
||||
|
||||
ifeq ($(call bool,$(BUILD_RE2_WASM)),0)
|
||||
ifeq ($(PKG_CONFIG),)
|
||||
$(error "pkg-config is not available. Please install pkg-config.")
|
||||
endif
|
||||
|
||||
ifeq ($(RE2_CHECK),)
|
||||
RE2_FAIL := "libre2-dev is not installed, please install it or set BUILD_RE2_WASM=1 to use the WebAssembly version"
|
||||
else
|
||||
# += adds a space that we don't want
|
||||
GO_TAGS := $(GO_TAGS),re2_cgo
|
||||
LD_OPTS_VARS += -X '$(GO_MODULE_NAME)/pkg/cwversion.Libre2=C++'
|
||||
endif
|
||||
endif
|
||||
|
||||
# Build static to avoid the runtime dependency on libre2.so
|
||||
ifeq ($(call bool,$(BUILD_STATIC)),1)
|
||||
BUILD_TYPE = static
|
||||
EXTLDFLAGS := -extldflags '-static'
|
||||
else
|
||||
BUILD_TYPE = dynamic
|
||||
EXTLDFLAGS :=
|
||||
endif
|
||||
|
||||
# Build with debug symbols, and disable optimizations + inlining, to use Delve
|
||||
ifeq ($(call bool,$(DEBUG)),1)
|
||||
STRIP_SYMBOLS :=
|
||||
DISABLE_OPTIMIZATION := -gcflags "-N -l"
|
||||
else
|
||||
STRIP_SYMBOLS := -s -w
|
||||
DISABLE_OPTIMIZATION :=
|
||||
endif
|
||||
|
||||
export LD_OPTS=-ldflags "$(STRIP_SYMBOLS) $(EXTLDFLAGS) $(LD_OPTS_VARS)" \
|
||||
-trimpath -tags $(GO_TAGS) $(DISABLE_OPTIMIZATION)
|
||||
|
||||
ifeq ($(call bool,$(TEST_COVERAGE)),1)
|
||||
LD_OPTS += -cover
|
||||
endif
|
||||
|
||||
#--------------------------------------
|
||||
|
||||
.PHONY: build
|
||||
build: goversion crowdsec cscli plugins
|
||||
build: pre-build goversion crowdsec cscli plugins ## Build crowdsec, cscli and plugins
|
||||
|
||||
.PHONY: pre-build
|
||||
pre-build: ## Sanity checks and build information
|
||||
$(info Building $(BUILD_VERSION) ($(BUILD_TAG)) $(BUILD_TYPE) for $(GOOS)/$(GOARCH))
|
||||
|
||||
ifneq (,$(RE2_FAIL))
|
||||
$(error $(RE2_FAIL))
|
||||
endif
|
||||
|
||||
ifneq (,$(RE2_CHECK))
|
||||
$(info Using C++ regexp library)
|
||||
else
|
||||
$(info Fallback to WebAssembly regexp library. To use the C++ version, make sure you have installed libre2-dev and pkg-config.)
|
||||
endif
|
||||
|
||||
ifeq ($(call bool,$(DEBUG)),1)
|
||||
$(info Building with debug symbols and disabled optimizations)
|
||||
endif
|
||||
|
||||
ifeq ($(call bool,$(TEST_COVERAGE)),1)
|
||||
$(info Test coverage collection enabled)
|
||||
endif
|
||||
|
||||
# intentional, empty line
|
||||
$(info )
|
||||
|
||||
.PHONY: all
|
||||
all: clean test build
|
||||
|
||||
.PHONY: static
|
||||
static: crowdsec_static cscli_static plugins_static
|
||||
all: clean test build ## Clean, test and build (requires localstack)
|
||||
|
||||
.PHONY: plugins
|
||||
plugins: http-plugin slack-plugin splunk-plugin email-plugin dummy-plugin
|
||||
plugins: ## Build notification plugins
|
||||
@$(foreach plugin,$(PLUGINS), \
|
||||
$(MAKE) -C $(PLUGINS_DIR_PREFIX)$(plugin) build $(MAKE_FLAGS); \
|
||||
)
|
||||
|
||||
plugins_static: http-plugin_static slack-plugin_static splunk-plugin_static email-plugin_static dummy-plugin_static
|
||||
# same as "$(MAKE) -f debian/rules clean" but without the dependency on debhelper
|
||||
.PHONY: clean-debian
|
||||
clean-debian:
|
||||
@$(RM) -r debian/crowdsec
|
||||
@$(RM) -r debian/crowdsec
|
||||
@$(RM) -r debian/files
|
||||
@$(RM) -r debian/.debhelper
|
||||
@$(RM) -r debian/*.substvars
|
||||
@$(RM) -r debian/*-stamp
|
||||
|
||||
goversion:
|
||||
@if [ $(GO_MAJOR_VERSION) -gt $(MINIMUM_SUPPORTED_GO_MAJOR_VERSION) ]; then \
|
||||
exit 0 ;\
|
||||
elif [ $(GO_MAJOR_VERSION) -lt $(MINIMUM_SUPPORTED_GO_MAJOR_VERSION) ]; then \
|
||||
echo '$(GO_VERSION_VALIDATION_ERR_MSG)';\
|
||||
exit 1; \
|
||||
elif [ $(GO_MINOR_VERSION) -lt $(MINIMUM_SUPPORTED_GO_MINOR_VERSION) ] ; then \
|
||||
echo '$(GO_VERSION_VALIDATION_ERR_MSG)';\
|
||||
exit 1; \
|
||||
fi
|
||||
.PHONY: clean-rpm
|
||||
clean-rpm:
|
||||
@$(RM) -r rpm/BUILD
|
||||
@$(RM) -r rpm/BUILDROOT
|
||||
@$(RM) -r rpm/RPMS
|
||||
@$(RM) -r rpm/SOURCES/*.tar.gz
|
||||
@$(RM) -r rpm/SRPMS
|
||||
|
||||
.PHONY: clean
|
||||
clean: testclean
|
||||
@$(MAKE) -C $(CROWDSEC_FOLDER) clean --no-print-directory
|
||||
@$(MAKE) -C $(CSCLI_FOLDER) clean --no-print-directory
|
||||
@$(RM) $(CROWDSEC_BIN)
|
||||
@$(RM) $(CSCLI_BIN)
|
||||
@$(RM) *.log
|
||||
@$(RM) crowdsec-release.tgz
|
||||
@$(RM) crowdsec-release-static.tgz
|
||||
@$(RM) $(HTTP_PLUGIN_FOLDER)/$(HTTP_PLUGIN_BIN)
|
||||
@$(RM) $(SLACK_PLUGIN_FOLDER)/$(SLACK_PLUGIN_BIN)
|
||||
@$(RM) $(SPLUNK_PLUGIN_FOLDER)/$(SPLUNK_PLUGIN_BIN)
|
||||
@$(RM) $(EMAIL_PLUGIN_FOLDER)/$(EMAIL_PLUGIN_BIN)
|
||||
@$(RM) $(DUMMY_PLUGIN_FOLDER)/$(DUMMY_PLUGIN_BIN)
|
||||
clean: clean-debian clean-rpm testclean ## Remove build artifacts
|
||||
@$(MAKE) -C $(CROWDSEC_FOLDER) clean $(MAKE_FLAGS)
|
||||
@$(MAKE) -C $(CSCLI_FOLDER) clean $(MAKE_FLAGS)
|
||||
@$(RM) $(CROWDSEC_BIN) $(WIN_IGNORE_ERR)
|
||||
@$(RM) $(CSCLI_BIN) $(WIN_IGNORE_ERR)
|
||||
@$(RM) *.log $(WIN_IGNORE_ERR)
|
||||
@$(RM) crowdsec-release.tgz $(WIN_IGNORE_ERR)
|
||||
@$(foreach plugin,$(PLUGINS), \
|
||||
$(MAKE) -C $(PLUGINS_DIR_PREFIX)$(plugin) clean $(MAKE_FLAGS); \
|
||||
)
|
||||
|
||||
.PHONY: cscli
|
||||
cscli: goversion ## Build cscli
|
||||
@$(MAKE) -C $(CSCLI_FOLDER) build $(MAKE_FLAGS)
|
||||
|
||||
cscli: goversion
|
||||
@GOARCH=$(GOARCH) GOOS=$(GOOS) $(MAKE) -C $(CSCLI_FOLDER) build --no-print-directory
|
||||
.PHONY: crowdsec
|
||||
crowdsec: goversion ## Build crowdsec
|
||||
@$(MAKE) -C $(CROWDSEC_FOLDER) build $(MAKE_FLAGS)
|
||||
|
||||
cscli-bincover: goversion
|
||||
@GOARCH=$(GOARCH) GOOS=$(GOOS) $(MAKE) -C $(CSCLI_FOLDER) build-bincover --no-print-directory
|
||||
|
||||
crowdsec: goversion
|
||||
@GOARCH=$(GOARCH) GOOS=$(GOOS) $(MAKE) -C $(CROWDSEC_FOLDER) build --no-print-directory
|
||||
|
||||
crowdsec-bincover: goversion
|
||||
@GOARCH=$(GOARCH) GOOS=$(GOOS) $(MAKE) -C $(CROWDSEC_FOLDER) build-bincover --no-print-directory
|
||||
|
||||
http-plugin: goversion
|
||||
@GOARCH=$(GOARCH) GOOS=$(GOOS) $(MAKE) -C $(HTTP_PLUGIN_FOLDER) build --no-print-directory
|
||||
|
||||
slack-plugin: goversion
|
||||
@GOARCH=$(GOARCH) GOOS=$(GOOS) $(MAKE) -C $(SLACK_PLUGIN_FOLDER) build --no-print-directory
|
||||
|
||||
splunk-plugin: goversion
|
||||
@GOARCH=$(GOARCH) GOOS=$(GOOS) $(MAKE) -C $(SPLUNK_PLUGIN_FOLDER) build --no-print-directory
|
||||
|
||||
email-plugin: goversion
|
||||
@GOARCH=$(GOARCH) GOOS=$(GOOS) $(MAKE) -C $(EMAIL_PLUGIN_FOLDER) build --no-print-directory
|
||||
|
||||
dummy-plugin: goversion
|
||||
@GOARCH=$(GOARCH) GOOS=$(GOOS) $(MAKE) -C $(DUMMY_PLUGIN_FOLDER) build --no-print-directory
|
||||
|
||||
cscli_static: goversion
|
||||
@GOARCH=$(GOARCH) GOOS=$(GOOS) $(MAKE) -C $(CSCLI_FOLDER) static --no-print-directory
|
||||
|
||||
crowdsec_static: goversion
|
||||
@GOARCH=$(GOARCH) GOOS=$(GOOS) $(MAKE) -C $(CROWDSEC_FOLDER) static --no-print-directory
|
||||
|
||||
http-plugin_static: goversion
|
||||
@GOARCH=$(GOARCH) GOOS=$(GOOS) $(MAKE) -C $(HTTP_PLUGIN_FOLDER) static --no-print-directory
|
||||
|
||||
slack-plugin_static: goversion
|
||||
@GOARCH=$(GOARCH) GOOS=$(GOOS) $(MAKE) -C $(SLACK_PLUGIN_FOLDER) static --no-print-directory
|
||||
|
||||
splunk-plugin_static:goversion
|
||||
@GOARCH=$(GOARCH) GOOS=$(GOOS) $(MAKE) -C $(SPLUNK_PLUGIN_FOLDER) static --no-print-directory
|
||||
|
||||
email-plugin_static:goversion
|
||||
@GOARCH=$(GOARCH) GOOS=$(GOOS) $(MAKE) -C $(EMAIL_PLUGIN_FOLDER) static --no-print-directory
|
||||
|
||||
dummy-plugin_static:goversion
|
||||
@GOARCH=$(GOARCH) GOOS=$(GOOS) $(MAKE) -C $(DUMMY_PLUGIN_FOLDER) static --no-print-directory
|
||||
.PHONY: generate
|
||||
generate: ## Generate code for the database and APIs
|
||||
$(GO) generate ./pkg/database/ent
|
||||
$(GO) generate ./pkg/models
|
||||
|
||||
.PHONY: testclean
|
||||
testclean: bats-clean
|
||||
@$(RM) pkg/apiserver/ent
|
||||
@$(RM) -r pkg/cwhub/hubdir
|
||||
testclean: bats-clean ## Remove test artifacts
|
||||
@$(RM) pkg/apiserver/ent $(WIN_IGNORE_ERR)
|
||||
@$(RM) pkg/cwhub/hubdir $(WIN_IGNORE_ERR)
|
||||
@$(RM) pkg/cwhub/install $(WIN_IGNORE_ERR)
|
||||
@$(RM) pkg/types/example.txt $(WIN_IGNORE_ERR)
|
||||
|
||||
# for the tests with localstack
|
||||
export AWS_ENDPOINT_FORCE=http://localhost:4566
|
||||
export AWS_ACCESS_KEY_ID=test
|
||||
export AWS_SECRET_ACCESS_KEY=test
|
||||
|
||||
testenv:
|
||||
@echo 'NOTE: You need Docker, docker-compose and run "make localstack" in a separate shell ("make localstack-stop" to terminate it)'
|
||||
|
||||
.PHONY: test
|
||||
test: goversion
|
||||
test: testenv goversion ## Run unit tests with localstack
|
||||
$(GOTEST) $(LD_OPTS) ./...
|
||||
|
||||
package-common:
|
||||
@echo Building Release to dir $(RELDIR)
|
||||
@mkdir -p $(RELDIR)/cmd/crowdsec
|
||||
@mkdir -p $(RELDIR)/cmd/crowdsec-cli
|
||||
@mkdir -p $(RELDIR)/$(subst ./,,$(HTTP_PLUGIN_FOLDER))
|
||||
@mkdir -p $(RELDIR)/$(subst ./,,$(SLACK_PLUGIN_FOLDER))
|
||||
@mkdir -p $(RELDIR)/$(subst ./,,$(SPLUNK_PLUGIN_FOLDER))
|
||||
@mkdir -p $(RELDIR)/$(subst ./,,$(EMAIL_PLUGIN_FOLDER))
|
||||
.PHONY: go-acc
|
||||
go-acc: testenv goversion ## Run unit tests with localstack + coverage
|
||||
go-acc ./... -o coverage.out --ignore database,notifications,protobufs,cwversion,cstest,models -- $(LD_OPTS)
|
||||
|
||||
@cp $(CROWDSEC_FOLDER)/$(CROWDSEC_BIN) $(RELDIR)/cmd/crowdsec
|
||||
@cp $(CSCLI_FOLDER)/$(CSCLI_BIN) $(RELDIR)/cmd/crowdsec-cli
|
||||
# mock AWS services
|
||||
.PHONY: localstack
|
||||
localstack: ## Run localstack containers (required for unit testing)
|
||||
docker-compose -f test/localstack/docker-compose.yml up
|
||||
|
||||
@cp $(HTTP_PLUGIN_FOLDER)/$(HTTP_PLUGIN_BIN) $(RELDIR)/$(subst ./,,$(HTTP_PLUGIN_FOLDER))
|
||||
@cp $(SLACK_PLUGIN_FOLDER)/$(SLACK_PLUGIN_BIN) $(RELDIR)/$(subst ./,,$(SLACK_PLUGIN_FOLDER))
|
||||
@cp $(SPLUNK_PLUGIN_FOLDER)/$(SPLUNK_PLUGIN_BIN) $(RELDIR)/$(subst ./,,$(SPLUNK_PLUGIN_FOLDER))
|
||||
@cp $(EMAIL_PLUGIN_FOLDER)/$(EMAIL_PLUGIN_BIN) $(RELDIR)/$(subst ./,,$(EMAIL_PLUGIN_FOLDER))
|
||||
.PHONY: localstack-stop
|
||||
localstack-stop: ## Stop localstack containers
|
||||
docker-compose -f test/localstack/docker-compose.yml down
|
||||
|
||||
@cp $(HTTP_PLUGIN_FOLDER)/$(HTTP_PLUGIN_CONFIG) $(RELDIR)/$(subst ./,,$(HTTP_PLUGIN_FOLDER))
|
||||
@cp $(SLACK_PLUGIN_FOLDER)/$(SLACK_PLUGIN_CONFIG) $(RELDIR)/$(subst ./,,$(SLACK_PLUGIN_FOLDER))
|
||||
@cp $(SPLUNK_PLUGIN_FOLDER)/$(SPLUNK_PLUGIN_CONFIG) $(RELDIR)/$(subst ./,,$(SPLUNK_PLUGIN_FOLDER))
|
||||
@cp $(EMAIL_PLUGIN_FOLDER)/$(EMAIL_PLUGIN_CONFIG) $(RELDIR)/$(subst ./,,$(EMAIL_PLUGIN_FOLDER))
|
||||
# build vendor.tgz to be distributed with the release
|
||||
.PHONY: vendor
|
||||
vendor: vendor-remove ## CI only - vendor dependencies and archive them for packaging
|
||||
$(GO) mod vendor
|
||||
tar czf vendor.tgz vendor
|
||||
tar --create --auto-compress --file=$(RELDIR)-vendor.tar.xz vendor
|
||||
|
||||
@cp -R ./config $(RELDIR)
|
||||
@cp wizard.sh $(RELDIR)
|
||||
@cp scripts/test_env.sh $(RELDIR)
|
||||
# remove vendor directories and vendor.tgz
|
||||
.PHONY: vendor-remove
|
||||
vendor-remove: ## Remove vendor dependencies and archives
|
||||
$(RM) vendor vendor.tgz *-vendor.tar.xz
|
||||
|
||||
.PHONY: package
|
||||
package: package-common
|
||||
@tar cvzf crowdsec-release.tgz $(RELDIR)
|
||||
package:
|
||||
@echo "Building Release to dir $(RELDIR)"
|
||||
@$(MKDIR) $(RELDIR)/cmd/crowdsec
|
||||
@$(MKDIR) $(RELDIR)/cmd/crowdsec-cli
|
||||
@$(CP) $(CROWDSEC_FOLDER)/$(CROWDSEC_BIN) $(RELDIR)/cmd/crowdsec
|
||||
@$(CP) $(CSCLI_FOLDER)/$(CSCLI_BIN) $(RELDIR)/cmd/crowdsec-cli
|
||||
|
||||
package_static: package-common
|
||||
@tar cvzf crowdsec-release-static.tgz $(RELDIR)
|
||||
@$(foreach plugin,$(PLUGINS), \
|
||||
$(MKDIR) $(RELDIR)/$(PLUGINS_DIR_PREFIX)$(plugin); \
|
||||
$(CP) $(PLUGINS_DIR_PREFIX)$(plugin)/notification-$(plugin)$(EXT) $(RELDIR)/$(PLUGINS_DIR_PREFIX)$(plugin); \
|
||||
$(CP) $(PLUGINS_DIR_PREFIX)$(plugin)/$(plugin).yaml $(RELDIR)/$(PLUGINS_DIR_PREFIX)$(plugin)/; \
|
||||
)
|
||||
|
||||
@$(CPR) ./config $(RELDIR)
|
||||
@$(CP) wizard.sh $(RELDIR)
|
||||
@$(CP) scripts/test_env.sh $(RELDIR)
|
||||
@$(CP) scripts/test_env.ps1 $(RELDIR)
|
||||
|
||||
@tar cvzf crowdsec-release.tgz $(RELDIR)
|
||||
|
||||
.PHONY: check_release
|
||||
check_release:
|
||||
ifneq ($(OS), Windows_NT)
|
||||
@if [ -d $(RELDIR) ]; then echo "$(RELDIR) already exists, abort" ; exit 1 ; fi
|
||||
else
|
||||
@if (Test-Path -Path $(RELDIR)) { echo "$(RELDIR) already exists, abort" ; exit 1 ; }
|
||||
endif
|
||||
|
||||
.PHONY: release
|
||||
release: check_release build package
|
||||
release: check_release build package ## Build a release tarball
|
||||
|
||||
.PHONY: release_static
|
||||
release_static: check_release static package_static
|
||||
.PHONY: windows_installer
|
||||
windows_installer: build ## Windows - build the installer
|
||||
@.\make_installer.ps1 -version $(BUILD_VERSION)
|
||||
|
||||
include tests/bats.mk
|
||||
.PHONY: chocolatey
|
||||
chocolatey: windows_installer ## Windows - build the chocolatey package
|
||||
@.\make_chocolatey.ps1 -version $(BUILD_VERSION)
|
||||
|
||||
# Include test/bats.mk only if it exists
|
||||
# to allow building without a test/ directory
|
||||
# (i.e. inside docker)
|
||||
ifeq (,$(wildcard test/bats.mk))
|
||||
bats-clean:
|
||||
else
|
||||
include test/bats.mk
|
||||
endif
|
||||
|
||||
include mk/goversion.mk
|
||||
include mk/help.mk
|
||||
|
|
31
README.md
31
README.md
|
@ -6,28 +6,37 @@
|
|||
</br>
|
||||
</br>
|
||||
<p align="center">
|
||||
<img src="https://github.com/crowdsecurity/crowdsec/workflows/tests/badge.svg">
|
||||
<img src="https://github.com/crowdsecurity/crowdsec/workflows/build/badge.svg">
|
||||
<a href='https://coveralls.io/github/crowdsecurity/crowdsec?branch=master'><img src='https://coveralls.io/repos/github/crowdsecurity/crowdsec/badge.svg?branch=master' alt='Coverage Status' /></a>
|
||||
<img src="https://github.com/crowdsecurity/crowdsec/actions/workflows/go-tests.yml/badge.svg">
|
||||
<img src="https://github.com/crowdsecurity/crowdsec/actions/workflows/bats.yml/badge.svg">
|
||||
<a href="https://codecov.io/gh/crowdsecurity/crowdsec">
|
||||
<img src="https://codecov.io/gh/crowdsecurity/crowdsec/branch/master/graph/badge.svg?token=CQGSPNY3PT"/>
|
||||
</a>
|
||||
<img src="https://goreportcard.com/badge/github.com/crowdsecurity/crowdsec">
|
||||
<img src="https://img.shields.io/github/license/crowdsecurity/crowdsec">
|
||||
<img src="https://img.shields.io/endpoint?url=https://gist.githubusercontent.com/AlteredCoder/ed74e50c43e3b17bdfc4d93149f23d37/raw/crowdsec_parsers_badge.json">
|
||||
<img src="https://img.shields.io/endpoint?url=https://gist.githubusercontent.com/AlteredCoder/ed74e50c43e3b17bdfc4d93149f23d37/raw/crowdsec_scenarios_badge.json">
|
||||
<a href="https://hub.docker.com/r/crowdsecurity/crowdsec">
|
||||
<img src="https://img.shields.io/docker/pulls/crowdsecurity/crowdsec?logo=docker">
|
||||
</a>
|
||||
<a href="https://discord.com/invite/crowdsec">
|
||||
<img src="https://img.shields.io/discord/921520481163673640?label=Discord&logo=discord">
|
||||
</a>
|
||||
</p>
|
||||
|
||||
<p align="center">
|
||||
:computer: <a href="https://app.crowdsec.net">Console (WebApp)</a>
|
||||
:books: <a href="https://doc.crowdsec.net">Documentation</a>
|
||||
:diamond_shape_with_a_dot_inside: <a href="https://hub.crowdsec.net">Configuration Hub</a>
|
||||
:speech_balloon: <a href="https://discourse.crowdsec.net">Discourse (Forum)</a>
|
||||
:speech_balloon: <a href="https://discord.gg/wGN7ShmEE8">Discord (Live Chat)</a>
|
||||
:speech_balloon: <a href="https://discord.gg/crowdsec">Discord (Live Chat)</a>
|
||||
</p>
|
||||
|
||||
|
||||
:dancer: This is a community driven project, <a href="https://forms.gle/ZQBQcptG2wYGajRX8">we need your feedback</a>.
|
||||
:dancer: This is a community-driven project, <a href="https://forms.gle/ZQBQcptG2wYGajRX8">we need your feedback</a>.
|
||||
|
||||
## <TL;DR>
|
||||
|
||||
CrowdSec is a free, modern & collaborative behavior detection engine, coupled with a global IP reputation network. It stacks on fail2ban's philosophy but is IPV6 compatible and 60x faster (Go vs Python), uses Grok patterns to parse logs and YAML scenario to identify behaviors. CrowdSec is engineered for modern Cloud / Containers / VM based infrastructures (by decoupling detection and remediation). Once detected you can remedy threats with various bouncers (firewall block, nginx http 403, Captchas, etc.) while the aggressive IP can be sent to CrowdSec for curation before being shared among all users to further improve everyone's security. See [FAQ](https://doc.crowdsec.net/docs/faq) or read below for more.
|
||||
CrowdSec is a free, modern & collaborative behavior detection engine, coupled with a global IP reputation network. It stacks on fail2ban's philosophy but is IPV6 compatible and 60x faster (Go vs Python), it uses Grok patterns to parse logs and YAML scenarios to identify behaviors. CrowdSec is engineered for modern Cloud / Containers / VM-based infrastructures (by decoupling detection and remediation). Once detected you can remedy threats with various bouncers (firewall block, nginx http 403, Captchas, etc.) while the aggressive IP can be sent to CrowdSec for curation before being shared among all users to further improve everyone's security. See [FAQ](https://doc.crowdsec.net/docs/faq) or read below for more.
|
||||
|
||||
## 2 mins install
|
||||
|
||||
|
@ -66,7 +75,7 @@ cd crowdsec-v* && sudo ./wizard.sh -i
|
|||
|
||||
## :information_source: About the CrowdSec project
|
||||
|
||||
Crowdsec is an open-source, lightweight software, detecting peers with aggressive behaviors to prevent them from accessing your systems. Its user friendly design and assistance offers a low technical barrier of entry and nevertheless a high security gain.
|
||||
Crowdsec is an open-source, lightweight software, detecting peers with aggressive behaviors to prevent them from accessing your systems. Its user-friendly design and assistance offer a low technical barrier of entry and nevertheless a high security gain.
|
||||
|
||||
The architecture is as follows :
|
||||
|
||||
|
@ -78,15 +87,15 @@ Once an unwanted behavior is detected, deal with it through a [bouncer](https://
|
|||
|
||||
## Outnumbering hackers all together
|
||||
|
||||
By sharing the threat they faced, all users are protecting each-others (hence the name Crowd-Security). Crowdsec is designed for modern infrastructures, with its "*Detect Here, Remedy There*" approach, letting you analyse logs coming from several sources in one place and block threats at various levels (applicative, system, infrastructural) of your stack.
|
||||
By sharing the threat they faced, all users are protecting each-others (hence the name Crowd-Security). Crowdsec is designed for modern infrastructures, with its "*Detect Here, Remedy There*" approach, letting you analyze logs coming from several sources in one place and block threats at various levels (applicative, system, infrastructural) of your stack.
|
||||
|
||||
CrowdSec ships by default with scenarios (brute force, port scan, web scan, etc.) adapted for most context, but you can easily extend it by picking more of them from the **[HUB](https://hub.crowdsec.net)**. It is also easy to adapt an existing one or create one yourself.
|
||||
CrowdSec ships by default with scenarios (brute force, port scan, web scan, etc.) adapted for most contexts, but you can easily extend it by picking more of them from the **[HUB](https://hub.crowdsec.net)**. It is also easy to adapt an existing one or create one yourself.
|
||||
|
||||
## :point_right: What it is not
|
||||
|
||||
CrowdSec is not a SIEM, storing your logs (neither locally nor remotely). Your data are analyzed locally and forgotten.
|
||||
|
||||
Signals sent to the curation platform are limited to the very strict minimum: IP, Scenario, Timestamp. They are only used to allow the system to spot new rogue IPs, rule out false positives or poisoning attempts.
|
||||
Signals sent to the curation platform are limited to the very strict minimum: IP, Scenario, Timestamp. They are only used to allow the system to spot new rogue IPs, and rule out false positives or poisoning attempts.
|
||||
|
||||
## :arrow_down: Install it !
|
||||
|
||||
|
@ -132,7 +141,7 @@ Or look directly at [installation documentation](https://doc.crowdsec.net/docs/g
|
|||
### Hot & Cold logs
|
||||
|
||||
<details>
|
||||
<summary>Process cold logs, for forensic, tests and chasing false-positives & false negatives (click to expand)</summary>
|
||||
<summary>Process cold logs, for forensic, tests and chasing false positives & false negatives (click to expand)</summary>
|
||||
<img src="https://github.com/crowdsecurity/crowdsec-docs/blob/main/crowdsec-docs/static/img/forensic-mode.gif?raw=true">
|
||||
</details>
|
||||
|
||||
|
|
31
SECURITY.md
Normal file
31
SECURITY.md
Normal file
|
@ -0,0 +1,31 @@
|
|||
# Security Policy
|
||||
|
||||
## Scope
|
||||
|
||||
This security policy applies to :
|
||||
- Crowdsec agent
|
||||
- Crowdsec Local API
|
||||
- Crowdsec bouncers **developed and maintained** by the Crowdsec team [1]
|
||||
|
||||
Reports regarding developements of community members that are not part of the crowdsecurity organization will be thoroughly investigated nonetheless.
|
||||
|
||||
[1] Projects developed and maintained by the Crowdsec team are under the **crowdsecurity** github organization. Bouncers developed by community members that are not part of the Crowdsec organization are explictely excluded.
|
||||
|
||||
## Reporting a Vulnerability
|
||||
|
||||
We are extremely grateful to security researchers and users that report vulnerabilities regarding the Crowdsec project. All reports are thoroughly investigated by members of the Crowdsec organization.
|
||||
|
||||
You can email the private [security@crowdsec.net](mailto:security@crowdsec.net) list with the security details and the details expected for [all Crowdsec bug reports](https://github.com/crowdsecurity/crowdsec/blob/master/.github/ISSUE_TEMPLATE/bug_report.md).
|
||||
|
||||
You may encrypt your email to this list using the GPG key of the [Security team](https://doc.crowdsec.net/docs/next/contact_team). Encryption using GPG is NOT required to make a disclosure.
|
||||
|
||||
## When Should I Report a Vulnerability?
|
||||
|
||||
- You think you discovered a potential security vulnerability in Crowdsec
|
||||
- You are unsure how a vulnerability affects Crowdsec
|
||||
- You think you discovered a vulnerability in another project that Crowdsec depends on
|
||||
|
||||
For projects with their own vulnerability reporting and disclosure process, please report it directly there.
|
||||
|
||||
|
||||
<!-- Very heavily inspired from https://kubernetes.io/docs/reference/issues-security/security/ -->
|
186
azure-pipelines.yml
Normal file
186
azure-pipelines.yml
Normal file
|
@ -0,0 +1,186 @@
|
|||
trigger:
|
||||
tags:
|
||||
include:
|
||||
- "v*"
|
||||
exclude:
|
||||
- "v*freebsd"
|
||||
branches:
|
||||
exclude:
|
||||
- "*"
|
||||
pr: none
|
||||
|
||||
pool:
|
||||
vmImage: windows-latest
|
||||
|
||||
stages:
|
||||
- stage: Build
|
||||
jobs:
|
||||
- job: Build
|
||||
displayName: "Build"
|
||||
steps:
|
||||
- task: GoTool@0
|
||||
displayName: "Install Go"
|
||||
inputs:
|
||||
version: '1.22.2'
|
||||
|
||||
- pwsh: |
|
||||
choco install -y make
|
||||
displayName: "Install builds deps"
|
||||
- task: PowerShell@2
|
||||
inputs:
|
||||
targetType: 'inline'
|
||||
pwsh: true
|
||||
#we are not calling make windows_installer because we want to sign the binaries before they are added to the MSI
|
||||
script: |
|
||||
make build BUILD_RE2_WASM=1
|
||||
|
||||
- pwsh: |
|
||||
$build_version=$env:BUILD_SOURCEBRANCHNAME
|
||||
#Override the version if it's set in the pipeline
|
||||
if ( ${env:USERBUILDVERSION} -ne "")
|
||||
{
|
||||
$build_version = ${env:USERBUILDVERSION}
|
||||
}
|
||||
if ($build_version.StartsWith("v"))
|
||||
{
|
||||
$build_version = $build_version.Substring(1)
|
||||
}
|
||||
if ($build_version.Contains("-"))
|
||||
{
|
||||
$build_version = $build_version.Substring(0, $build_version.IndexOf("-"))
|
||||
}
|
||||
Write-Host "##vso[task.setvariable variable=BuildVersion;isOutput=true]$build_version"
|
||||
displayName: GetCrowdsecVersion
|
||||
name: GetCrowdsecVersion
|
||||
- pwsh: |
|
||||
Get-ChildItem -Path .\cmd -Directory | ForEach-Object {
|
||||
$dirName = $_.Name
|
||||
Get-ChildItem -Path .\cmd\$dirName -File -Filter '*.exe' | ForEach-Object {
|
||||
$fileName = $_.Name
|
||||
$destDir = Join-Path $(Build.ArtifactStagingDirectory) cmd\$dirName
|
||||
New-Item -ItemType Directory -Path $destDir -Force
|
||||
Copy-Item -Path .\cmd\$dirName\$fileName -Destination $destDir
|
||||
}
|
||||
}
|
||||
displayName: "Copy binaries to staging directory"
|
||||
- task: PublishPipelineArtifact@1
|
||||
inputs:
|
||||
targetPath: '$(Build.ArtifactStagingDirectory)'
|
||||
artifact: 'unsigned_binaries'
|
||||
displayName: "Upload binaries artifact"
|
||||
|
||||
- stage: Sign
|
||||
dependsOn: Build
|
||||
variables:
|
||||
- group: 'FOSS Build Variables'
|
||||
- name: BuildVersion
|
||||
value: $[ stageDependencies.Build.Build.outputs['GetCrowdsecVersion.BuildVersion'] ]
|
||||
condition: succeeded()
|
||||
jobs:
|
||||
- job: Sign
|
||||
displayName: "Sign"
|
||||
steps:
|
||||
- download: current
|
||||
artifact: unsigned_binaries
|
||||
displayName: "Download binaries artifact"
|
||||
- task: CopyFiles@2
|
||||
inputs:
|
||||
SourceFolder: '$(Pipeline.Workspace)/unsigned_binaries'
|
||||
TargetFolder: '$(Build.SourcesDirectory)'
|
||||
displayName: "Copy binaries to workspace"
|
||||
- task: DotNetCoreCLI@2
|
||||
displayName: "Install SignTool tool"
|
||||
inputs:
|
||||
command: 'custom'
|
||||
custom: 'tool'
|
||||
arguments: install --global sign --version 0.9.0-beta.23127.3
|
||||
- task: AzureKeyVault@2
|
||||
displayName: "Get signing parameters"
|
||||
inputs:
|
||||
azureSubscription: "Azure subscription"
|
||||
KeyVaultName: "$(KeyVaultName)"
|
||||
SecretsFilter: "TenantId,ClientId,ClientSecret,Certificate,KeyVaultUrl"
|
||||
- pwsh: |
|
||||
sign code azure-key-vault `
|
||||
"**/*.exe" `
|
||||
--base-directory "$(Build.SourcesDirectory)/cmd/" `
|
||||
--publisher-name "CrowdSec" `
|
||||
--description "CrowdSec" `
|
||||
--description-url "https://github.com/crowdsecurity/crowdsec" `
|
||||
--azure-key-vault-tenant-id "$(TenantId)" `
|
||||
--azure-key-vault-client-id "$(ClientId)" `
|
||||
--azure-key-vault-client-secret "$(ClientSecret)" `
|
||||
--azure-key-vault-certificate "$(Certificate)" `
|
||||
--azure-key-vault-url "$(KeyVaultUrl)"
|
||||
displayName: "Sign crowdsec binaries"
|
||||
- pwsh: |
|
||||
.\make_installer.ps1 -version '$(BuildVersion)'
|
||||
displayName: "Build Crowdsec MSI"
|
||||
name: BuildMSI
|
||||
- pwsh: |
|
||||
.\make_chocolatey.ps1 -version '$(BuildVersion)'
|
||||
displayName: "Build Chocolatey nupkg"
|
||||
- pwsh: |
|
||||
sign code azure-key-vault `
|
||||
"*.msi" `
|
||||
--base-directory "$(Build.SourcesDirectory)" `
|
||||
--publisher-name "CrowdSec" `
|
||||
--description "CrowdSec" `
|
||||
--description-url "https://github.com/crowdsecurity/crowdsec" `
|
||||
--azure-key-vault-tenant-id "$(TenantId)" `
|
||||
--azure-key-vault-client-id "$(ClientId)" `
|
||||
--azure-key-vault-client-secret "$(ClientSecret)" `
|
||||
--azure-key-vault-certificate "$(Certificate)" `
|
||||
--azure-key-vault-url "$(KeyVaultUrl)"
|
||||
displayName: "Sign MSI package"
|
||||
- pwsh: |
|
||||
sign code azure-key-vault `
|
||||
"*.nupkg" `
|
||||
--base-directory "$(Build.SourcesDirectory)" `
|
||||
--publisher-name "CrowdSec" `
|
||||
--description "CrowdSec" `
|
||||
--description-url "https://github.com/crowdsecurity/crowdsec" `
|
||||
--azure-key-vault-tenant-id "$(TenantId)" `
|
||||
--azure-key-vault-client-id "$(ClientId)" `
|
||||
--azure-key-vault-client-secret "$(ClientSecret)" `
|
||||
--azure-key-vault-certificate "$(Certificate)" `
|
||||
--azure-key-vault-url "$(KeyVaultUrl)"
|
||||
displayName: "Sign nuget package"
|
||||
- task: PublishPipelineArtifact@1
|
||||
inputs:
|
||||
targetPath: '$(Build.SourcesDirectory)/crowdsec_$(BuildVersion).msi'
|
||||
artifact: 'signed_msi_package'
|
||||
displayName: "Upload signed MSI artifact"
|
||||
- task: PublishPipelineArtifact@1
|
||||
inputs:
|
||||
targetPath: '$(Build.SourcesDirectory)/crowdsec.$(BuildVersion).nupkg'
|
||||
artifact: 'signed_nuget_package'
|
||||
displayName: "Upload signed nuget artifact"
|
||||
|
||||
- stage: Publish
|
||||
dependsOn: Sign
|
||||
jobs:
|
||||
- deployment: "Publish"
|
||||
displayName: "Publish to GitHub"
|
||||
environment: github
|
||||
strategy:
|
||||
runOnce:
|
||||
deploy:
|
||||
steps:
|
||||
- bash: |
|
||||
tag=$(curl -H "Accept: application/vnd.github.v3+json" https://api.github.com/repos/crowdsecurity/crowdsec/releases | jq -r '. | map(select(.prerelease==true)) | sort_by(.created_at) | reverse | .[0].tag_name')
|
||||
echo "##vso[task.setvariable variable=LatestPreRelease;isOutput=true]$tag"
|
||||
name: GetLatestPrelease
|
||||
- task: GitHubRelease@1
|
||||
inputs:
|
||||
gitHubConnection: "github.com_blotus"
|
||||
repositoryName: '$(Build.Repository.Name)'
|
||||
action: 'edit'
|
||||
tag: '$(GetLatestPrelease.LatestPreRelease)'
|
||||
assetUploadMode: 'replace'
|
||||
addChangeLog: false
|
||||
isPreRelease: true #we force prerelease because the pipeline is invoked on tag creation, which happens when we do a prerelease
|
||||
assets: |
|
||||
$(Pipeline.Workspace)/signed_msi_package/*.msi
|
||||
$(Pipeline.Workspace)/signed_nuget_package/*.nupkg
|
||||
condition: ne(variables['GetLatestPrelease.LatestPreRelease'], '')
|
|
@ -1,27 +1,21 @@
|
|||
# Go parameters
|
||||
GOCMD=go
|
||||
GOBUILD=$(GOCMD) build
|
||||
GOCLEAN=$(GOCMD) clean
|
||||
GOTEST=$(GOCMD) test
|
||||
GOGET=$(GOCMD) get
|
||||
ifeq ($(OS), Windows_NT)
|
||||
SHELL := pwsh.exe
|
||||
.SHELLFLAGS := -NoProfile -Command
|
||||
EXT = .exe
|
||||
endif
|
||||
|
||||
BINARY_NAME=cscli
|
||||
# names longer than 15 chars break 'pgrep'
|
||||
BINARY_NAME_COVER=$(BINARY_NAME).cover
|
||||
PREFIX?="/"
|
||||
GO = go
|
||||
GOBUILD = $(GO) build
|
||||
|
||||
BINARY_NAME = cscli$(EXT)
|
||||
PREFIX ?= "/"
|
||||
BIN_PREFIX = $(PREFIX)"/usr/local/bin/"
|
||||
|
||||
.PHONY: all
|
||||
all: clean build
|
||||
|
||||
build: clean
|
||||
@$(GOBUILD) $(LD_OPTS) -o $(BINARY_NAME) -v
|
||||
|
||||
build-bincover: clean
|
||||
$(GOTEST) . -tags testrunmain -coverpkg=$(go list github.com/crowdsecurity/crowdsec/... | grep -v -e 'pkg/database' -e 'plugins/notifications' -e 'pkg/protobufs' -e 'pkg/cwversions' -e 'pkg/cstest' -e 'pkg/models') -covermode=atomic $(LD_OPTS) -c -o $(BINARY_NAME_COVER)
|
||||
|
||||
static: clean
|
||||
@$(GOBUILD) $(LD_OPTS_STATIC) -o $(BINARY_NAME) -v -a -tags netgo
|
||||
$(GOBUILD) $(LD_OPTS) -o $(BINARY_NAME)
|
||||
|
||||
.PHONY: install
|
||||
install: install-conf install-bin
|
||||
|
@ -32,8 +26,8 @@ install-bin:
|
|||
@install -v -m 755 -D "$(BINARY_NAME)" "$(BIN_PREFIX)/$(BINARY_NAME)" || exit
|
||||
|
||||
uninstall:
|
||||
@$(RM) -r $(CSCLI_CONFIG)
|
||||
@$(RM) -r $(BIN_PREFIX)$(BINARY_NAME)
|
||||
@$(RM) $(CSCLI_CONFIG) $(WIN_IGNORE_ERR)
|
||||
@$(RM) $(BIN_PREFIX)$(BINARY_NAME) $(WIN_IGNORE_ERR)
|
||||
|
||||
clean:
|
||||
@$(RM) $(BINARY_NAME) $(BINARY_NAME_COVER)
|
||||
@$(RM) $(BINARY_NAME) $(WIN_IGNORE_ERR)
|
||||
|
|
|
@ -1 +0,0 @@
|
|||
see doc in `doc/`
|
|
@ -4,229 +4,233 @@ import (
|
|||
"context"
|
||||
"encoding/csv"
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"fmt"
|
||||
"net/url"
|
||||
"os"
|
||||
"sort"
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
"text/template"
|
||||
|
||||
"github.com/crowdsecurity/crowdsec/pkg/apiclient"
|
||||
"github.com/crowdsecurity/crowdsec/pkg/cwversion"
|
||||
"github.com/crowdsecurity/crowdsec/pkg/database"
|
||||
"github.com/crowdsecurity/crowdsec/pkg/models"
|
||||
"github.com/fatih/color"
|
||||
"github.com/go-openapi/strfmt"
|
||||
"github.com/olekukonko/tablewriter"
|
||||
log "github.com/sirupsen/logrus"
|
||||
"github.com/spf13/cobra"
|
||||
"gopkg.in/yaml.v2"
|
||||
)
|
||||
"gopkg.in/yaml.v3"
|
||||
|
||||
var printMachine bool
|
||||
var limit *int
|
||||
"github.com/crowdsecurity/go-cs-lib/version"
|
||||
|
||||
"github.com/crowdsecurity/crowdsec/cmd/crowdsec-cli/require"
|
||||
"github.com/crowdsecurity/crowdsec/pkg/apiclient"
|
||||
"github.com/crowdsecurity/crowdsec/pkg/database"
|
||||
"github.com/crowdsecurity/crowdsec/pkg/models"
|
||||
"github.com/crowdsecurity/crowdsec/pkg/types"
|
||||
)
|
||||
|
||||
func DecisionsFromAlert(alert *models.Alert) string {
|
||||
ret := ""
|
||||
var decMap = make(map[string]int)
|
||||
decMap := make(map[string]int)
|
||||
|
||||
for _, decision := range alert.Decisions {
|
||||
k := *decision.Type
|
||||
if *decision.Simulated {
|
||||
k = fmt.Sprintf("(simul)%s", k)
|
||||
}
|
||||
|
||||
v := decMap[k]
|
||||
decMap[k] = v + 1
|
||||
}
|
||||
|
||||
for k, v := range decMap {
|
||||
if len(ret) > 0 {
|
||||
ret += " "
|
||||
}
|
||||
|
||||
ret += fmt.Sprintf("%s:%d", k, v)
|
||||
}
|
||||
|
||||
return ret
|
||||
}
|
||||
|
||||
func AlertsToTable(alerts *models.GetAlertsResponse, printMachine bool) error {
|
||||
|
||||
if csConfig.Cscli.Output == "raw" {
|
||||
func (cli *cliAlerts) alertsToTable(alerts *models.GetAlertsResponse, printMachine bool) error {
|
||||
switch cli.cfg().Cscli.Output {
|
||||
case "raw":
|
||||
csvwriter := csv.NewWriter(os.Stdout)
|
||||
header := []string{"id", "scope", "value", "reason", "country", "as", "decisions", "created_at"}
|
||||
|
||||
if printMachine {
|
||||
header = append(header, "machine")
|
||||
}
|
||||
err := csvwriter.Write(header)
|
||||
if err != nil {
|
||||
|
||||
if err := csvwriter.Write(header); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
for _, alertItem := range *alerts {
|
||||
row := []string{
|
||||
fmt.Sprintf("%d", alertItem.ID),
|
||||
strconv.FormatInt(alertItem.ID, 10),
|
||||
*alertItem.Source.Scope,
|
||||
*alertItem.Source.Value,
|
||||
*alertItem.Scenario,
|
||||
alertItem.Source.Cn,
|
||||
alertItem.Source.AsNumber + " " + alertItem.Source.AsName,
|
||||
alertItem.Source.GetAsNumberName(),
|
||||
DecisionsFromAlert(alertItem),
|
||||
*alertItem.StartAt,
|
||||
}
|
||||
if printMachine {
|
||||
row = append(row, alertItem.MachineID)
|
||||
}
|
||||
err := csvwriter.Write(row)
|
||||
if err != nil {
|
||||
|
||||
if err := csvwriter.Write(row); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
csvwriter.Flush()
|
||||
} else if csConfig.Cscli.Output == "json" {
|
||||
x, _ := json.MarshalIndent(alerts, "", " ")
|
||||
fmt.Printf("%s", string(x))
|
||||
} else if csConfig.Cscli.Output == "human" {
|
||||
|
||||
table := tablewriter.NewWriter(os.Stdout)
|
||||
header := []string{"ID", "value", "reason", "country", "as", "decisions", "created_at"}
|
||||
if printMachine {
|
||||
header = append(header, "machine")
|
||||
case "json":
|
||||
if *alerts == nil {
|
||||
// avoid returning "null" in json
|
||||
// could be cleaner if we used slice of alerts directly
|
||||
fmt.Println("[]")
|
||||
return nil
|
||||
}
|
||||
table.SetHeader(header)
|
||||
|
||||
x, _ := json.MarshalIndent(alerts, "", " ")
|
||||
fmt.Print(string(x))
|
||||
case "human":
|
||||
if len(*alerts) == 0 {
|
||||
fmt.Println("No active alerts")
|
||||
return nil
|
||||
}
|
||||
for _, alertItem := range *alerts {
|
||||
|
||||
displayVal := *alertItem.Source.Scope
|
||||
if *alertItem.Source.Value != "" {
|
||||
displayVal += ":" + *alertItem.Source.Value
|
||||
}
|
||||
row := []string{
|
||||
strconv.Itoa(int(alertItem.ID)),
|
||||
displayVal,
|
||||
*alertItem.Scenario,
|
||||
alertItem.Source.Cn,
|
||||
alertItem.Source.AsNumber + " " + alertItem.Source.AsName,
|
||||
DecisionsFromAlert(alertItem),
|
||||
*alertItem.StartAt,
|
||||
}
|
||||
if printMachine {
|
||||
row = append(row, alertItem.MachineID)
|
||||
}
|
||||
table.Append(row)
|
||||
}
|
||||
table.Render() // Send output
|
||||
alertsTable(color.Output, alerts, printMachine)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func DisplayOneAlert(alert *models.Alert, withDetail bool) error {
|
||||
if csConfig.Cscli.Output == "human" {
|
||||
fmt.Printf("\n################################################################################################\n\n")
|
||||
scopeAndValue := *alert.Source.Scope
|
||||
if *alert.Source.Value != "" {
|
||||
scopeAndValue += ":" + *alert.Source.Value
|
||||
}
|
||||
fmt.Printf(" - ID : %d\n", alert.ID)
|
||||
fmt.Printf(" - Date : %s\n", alert.CreatedAt)
|
||||
fmt.Printf(" - Machine : %s\n", alert.MachineID)
|
||||
fmt.Printf(" - Simulation : %v\n", *alert.Simulated)
|
||||
fmt.Printf(" - Reason : %s\n", *alert.Scenario)
|
||||
fmt.Printf(" - Events Count : %d\n", *alert.EventsCount)
|
||||
fmt.Printf(" - Scope:Value: %s\n", scopeAndValue)
|
||||
fmt.Printf(" - Country : %s\n", alert.Source.Cn)
|
||||
fmt.Printf(" - AS : %s\n", alert.Source.AsName)
|
||||
fmt.Printf(" - Begin : %s\n", *alert.StartAt)
|
||||
fmt.Printf(" - End : %s\n\n", *alert.StopAt)
|
||||
foundActive := false
|
||||
table := tablewriter.NewWriter(os.Stdout)
|
||||
table.SetHeader([]string{"ID", "scope:value", "action", "expiration", "created_at"})
|
||||
for _, decision := range alert.Decisions {
|
||||
parsedDuration, err := time.ParseDuration(*decision.Duration)
|
||||
if err != nil {
|
||||
log.Errorf(err.Error())
|
||||
var alertTemplate = `
|
||||
################################################################################################
|
||||
|
||||
- ID : {{.ID}}
|
||||
- Date : {{.CreatedAt}}
|
||||
- Machine : {{.MachineID}}
|
||||
- Simulation : {{.Simulated}}
|
||||
- Reason : {{.Scenario}}
|
||||
- Events Count : {{.EventsCount}}
|
||||
- Scope:Value : {{.Source.Scope}}{{if .Source.Value}}:{{.Source.Value}}{{end}}
|
||||
- Country : {{.Source.Cn}}
|
||||
- AS : {{.Source.AsName}}
|
||||
- Begin : {{.StartAt}}
|
||||
- End : {{.StopAt}}
|
||||
- UUID : {{.UUID}}
|
||||
|
||||
`
|
||||
|
||||
func (cli *cliAlerts) displayOneAlert(alert *models.Alert, withDetail bool) error {
|
||||
tmpl, err := template.New("alert").Parse(alertTemplate)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if err = tmpl.Execute(os.Stdout, alert); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
alertDecisionsTable(color.Output, alert)
|
||||
|
||||
if len(alert.Meta) > 0 {
|
||||
fmt.Printf("\n - Context :\n")
|
||||
sort.Slice(alert.Meta, func(i, j int) bool {
|
||||
return alert.Meta[i].Key < alert.Meta[j].Key
|
||||
})
|
||||
|
||||
table := newTable(color.Output)
|
||||
table.SetRowLines(false)
|
||||
table.SetHeaders("Key", "Value")
|
||||
|
||||
for _, meta := range alert.Meta {
|
||||
var valSlice []string
|
||||
if err := json.Unmarshal([]byte(meta.Value), &valSlice); err != nil {
|
||||
return fmt.Errorf("unknown context value type '%s': %w", meta.Value, err)
|
||||
}
|
||||
expire := time.Now().UTC().Add(parsedDuration)
|
||||
if time.Now().UTC().After(expire) {
|
||||
continue
|
||||
|
||||
for _, value := range valSlice {
|
||||
table.AddRow(
|
||||
meta.Key,
|
||||
value,
|
||||
)
|
||||
}
|
||||
foundActive = true
|
||||
scopeAndValue := *decision.Scope
|
||||
if *decision.Value != "" {
|
||||
scopeAndValue += ":" + *decision.Value
|
||||
}
|
||||
table.Append([]string{
|
||||
strconv.Itoa(int(decision.ID)),
|
||||
scopeAndValue,
|
||||
*decision.Type,
|
||||
*decision.Duration,
|
||||
alert.CreatedAt,
|
||||
})
|
||||
}
|
||||
if foundActive {
|
||||
fmt.Printf(" - Active Decisions :\n")
|
||||
table.Render() // Send output
|
||||
}
|
||||
|
||||
if withDetail {
|
||||
fmt.Printf("\n - Events :\n")
|
||||
for _, event := range alert.Events {
|
||||
fmt.Printf("\n- Date: %s\n", *event.Timestamp)
|
||||
table = tablewriter.NewWriter(os.Stdout)
|
||||
table.SetHeader([]string{"Key", "Value"})
|
||||
sort.Slice(event.Meta, func(i, j int) bool {
|
||||
return event.Meta[i].Key < event.Meta[j].Key
|
||||
})
|
||||
for _, meta := range event.Meta {
|
||||
table.Append([]string{
|
||||
meta.Key,
|
||||
meta.Value,
|
||||
})
|
||||
}
|
||||
table.Render()
|
||||
}
|
||||
|
||||
table.Render() // Send output
|
||||
}
|
||||
if withDetail {
|
||||
fmt.Printf("\n - Events :\n")
|
||||
|
||||
for _, event := range alert.Events {
|
||||
alertEventTable(color.Output, event)
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func NewAlertsCmd() *cobra.Command {
|
||||
/* ---- ALERTS COMMAND */
|
||||
var cmdAlerts = &cobra.Command{
|
||||
type cliAlerts struct {
|
||||
client *apiclient.ApiClient
|
||||
cfg configGetter
|
||||
}
|
||||
|
||||
func NewCLIAlerts(getconfig configGetter) *cliAlerts {
|
||||
return &cliAlerts{
|
||||
cfg: getconfig,
|
||||
}
|
||||
}
|
||||
|
||||
func (cli *cliAlerts) NewCommand() *cobra.Command {
|
||||
cmd := &cobra.Command{
|
||||
Use: "alerts [action]",
|
||||
Short: "Manage alerts",
|
||||
Args: cobra.MinimumNArgs(1),
|
||||
DisableAutoGenTag: true,
|
||||
PersistentPreRun: func(cmd *cobra.Command, args []string) {
|
||||
var err error
|
||||
if err := csConfig.LoadAPIClient(); err != nil {
|
||||
log.Fatalf("loading api client: %s", err.Error())
|
||||
Aliases: []string{"alert"},
|
||||
PersistentPreRunE: func(_ *cobra.Command, _ []string) error {
|
||||
cfg := cli.cfg()
|
||||
if err := cfg.LoadAPIClient(); err != nil {
|
||||
return fmt.Errorf("loading api client: %w", err)
|
||||
}
|
||||
if csConfig.API.Client == nil {
|
||||
log.Fatalln("There is no configuration on 'api_client:'")
|
||||
}
|
||||
if csConfig.API.Client.Credentials == nil {
|
||||
log.Fatalf("Please provide credentials for the API in '%s'", csConfig.API.Client.CredentialsFilePath)
|
||||
}
|
||||
apiURL, err := url.Parse(csConfig.API.Client.Credentials.URL)
|
||||
apiURL, err := url.Parse(cfg.API.Client.Credentials.URL)
|
||||
if err != nil {
|
||||
log.Fatalf("parsing api url: %s", apiURL)
|
||||
return fmt.Errorf("parsing api url %s: %w", apiURL, err)
|
||||
}
|
||||
Client, err = apiclient.NewClient(&apiclient.Config{
|
||||
MachineID: csConfig.API.Client.Credentials.Login,
|
||||
Password: strfmt.Password(csConfig.API.Client.Credentials.Password),
|
||||
UserAgent: fmt.Sprintf("crowdsec/%s", cwversion.VersionStr()),
|
||||
|
||||
cli.client, err = apiclient.NewClient(&apiclient.Config{
|
||||
MachineID: cfg.API.Client.Credentials.Login,
|
||||
Password: strfmt.Password(cfg.API.Client.Credentials.Password),
|
||||
UserAgent: fmt.Sprintf("crowdsec/%s", version.String()),
|
||||
URL: apiURL,
|
||||
VersionPrefix: "v1",
|
||||
})
|
||||
|
||||
if err != nil {
|
||||
log.Fatalf("new api client: %s", err.Error())
|
||||
return fmt.Errorf("new api client: %w", err)
|
||||
}
|
||||
|
||||
return nil
|
||||
},
|
||||
}
|
||||
|
||||
var alertListFilter = apiclient.AlertsListOpts{
|
||||
cmd.AddCommand(cli.NewListCmd())
|
||||
cmd.AddCommand(cli.NewInspectCmd())
|
||||
cmd.AddCommand(cli.NewFlushCmd())
|
||||
cmd.AddCommand(cli.NewDeleteCmd())
|
||||
|
||||
return cmd
|
||||
}
|
||||
|
||||
func (cli *cliAlerts) NewListCmd() *cobra.Command {
|
||||
alertListFilter := apiclient.AlertsListOpts{
|
||||
ScopeEquals: new(string),
|
||||
ValueEquals: new(string),
|
||||
ScenarioEquals: new(string),
|
||||
|
@ -235,25 +239,31 @@ func NewAlertsCmd() *cobra.Command {
|
|||
Since: new(string),
|
||||
Until: new(string),
|
||||
TypeEquals: new(string),
|
||||
IncludeCAPI: new(bool),
|
||||
OriginEquals: new(string),
|
||||
}
|
||||
limit = new(int)
|
||||
|
||||
limit := new(int)
|
||||
contained := new(bool)
|
||||
var cmdAlertsList = &cobra.Command{
|
||||
|
||||
var printMachine bool
|
||||
|
||||
cmd := &cobra.Command{
|
||||
Use: "list [filters]",
|
||||
Short: "List alerts",
|
||||
Example: `cscli alerts list
|
||||
cscli alerts list --ip 1.2.3.4
|
||||
cscli alerts list --range 1.2.3.0/24
|
||||
cscli alerts list --origin lists
|
||||
cscli alerts list -s crowdsecurity/ssh-bf
|
||||
cscli alerts list --type ban`,
|
||||
Long: `List alerts with optional filters`,
|
||||
DisableAutoGenTag: true,
|
||||
Run: func(cmd *cobra.Command, args []string) {
|
||||
var err error
|
||||
|
||||
RunE: func(cmd *cobra.Command, _ []string) error {
|
||||
if err := manageCliDecisionAlerts(alertListFilter.IPEquals, alertListFilter.RangeEquals,
|
||||
alertListFilter.ScopeEquals, alertListFilter.ValueEquals); err != nil {
|
||||
printHelp(cmd)
|
||||
log.Fatalf("%s", err)
|
||||
return err
|
||||
}
|
||||
if limit != nil {
|
||||
alertListFilter.Limit = limit
|
||||
|
@ -261,32 +271,33 @@ cscli alerts list --type ban`,
|
|||
|
||||
if *alertListFilter.Until == "" {
|
||||
alertListFilter.Until = nil
|
||||
} else {
|
||||
} else if strings.HasSuffix(*alertListFilter.Until, "d") {
|
||||
/*time.ParseDuration support hours 'h' as bigger unit, let's make the user's life easier*/
|
||||
if strings.HasSuffix(*alertListFilter.Until, "d") {
|
||||
realDuration := strings.TrimSuffix(*alertListFilter.Until, "d")
|
||||
days, err := strconv.Atoi(realDuration)
|
||||
if err != nil {
|
||||
printHelp(cmd)
|
||||
log.Fatalf("Can't parse duration %s, valid durations format: 1d, 4h, 4h15m", *alertListFilter.Until)
|
||||
}
|
||||
*alertListFilter.Until = fmt.Sprintf("%d%s", days*24, "h")
|
||||
realDuration := strings.TrimSuffix(*alertListFilter.Until, "d")
|
||||
days, err := strconv.Atoi(realDuration)
|
||||
if err != nil {
|
||||
printHelp(cmd)
|
||||
return fmt.Errorf("can't parse duration %s, valid durations format: 1d, 4h, 4h15m", *alertListFilter.Until)
|
||||
}
|
||||
*alertListFilter.Until = fmt.Sprintf("%d%s", days*24, "h")
|
||||
}
|
||||
if *alertListFilter.Since == "" {
|
||||
alertListFilter.Since = nil
|
||||
} else {
|
||||
} else if strings.HasSuffix(*alertListFilter.Since, "d") {
|
||||
/*time.ParseDuration support hours 'h' as bigger unit, let's make the user's life easier*/
|
||||
if strings.HasSuffix(*alertListFilter.Since, "d") {
|
||||
realDuration := strings.TrimSuffix(*alertListFilter.Since, "d")
|
||||
days, err := strconv.Atoi(realDuration)
|
||||
if err != nil {
|
||||
printHelp(cmd)
|
||||
log.Fatalf("Can't parse duration %s, valid durations format: 1d, 4h, 4h15m", *alertListFilter.Since)
|
||||
}
|
||||
*alertListFilter.Since = fmt.Sprintf("%d%s", days*24, "h")
|
||||
realDuration := strings.TrimSuffix(*alertListFilter.Since, "d")
|
||||
days, err := strconv.Atoi(realDuration)
|
||||
if err != nil {
|
||||
printHelp(cmd)
|
||||
return fmt.Errorf("can't parse duration %s, valid durations format: 1d, 4h, 4h15m", *alertListFilter.Since)
|
||||
}
|
||||
*alertListFilter.Since = fmt.Sprintf("%d%s", days*24, "h")
|
||||
}
|
||||
|
||||
if *alertListFilter.IncludeCAPI {
|
||||
*alertListFilter.Limit = 0
|
||||
}
|
||||
|
||||
if *alertListFilter.TypeEquals == "" {
|
||||
alertListFilter.TypeEquals = nil
|
||||
}
|
||||
|
@ -305,44 +316,65 @@ cscli alerts list --type ban`,
|
|||
if *alertListFilter.RangeEquals == "" {
|
||||
alertListFilter.RangeEquals = nil
|
||||
}
|
||||
|
||||
if *alertListFilter.OriginEquals == "" {
|
||||
alertListFilter.OriginEquals = nil
|
||||
}
|
||||
|
||||
if contained != nil && *contained {
|
||||
alertListFilter.Contains = new(bool)
|
||||
}
|
||||
alerts, _, err := Client.Alerts.List(context.Background(), alertListFilter)
|
||||
|
||||
alerts, _, err := cli.client.Alerts.List(context.Background(), alertListFilter)
|
||||
if err != nil {
|
||||
log.Fatalf("Unable to list alerts : %v", err.Error())
|
||||
return fmt.Errorf("unable to list alerts: %w", err)
|
||||
}
|
||||
|
||||
err = AlertsToTable(alerts, printMachine)
|
||||
if err != nil {
|
||||
log.Fatalf("unable to list alerts : %v", err.Error())
|
||||
if err = cli.alertsToTable(alerts, printMachine); err != nil {
|
||||
return fmt.Errorf("unable to list alerts: %w", err)
|
||||
}
|
||||
|
||||
return nil
|
||||
},
|
||||
}
|
||||
cmdAlertsList.Flags().SortFlags = false
|
||||
cmdAlertsList.Flags().StringVar(alertListFilter.Until, "until", "", "restrict to alerts older than until (ie. 4h, 30d)")
|
||||
cmdAlertsList.Flags().StringVar(alertListFilter.Since, "since", "", "restrict to alerts newer than since (ie. 4h, 30d)")
|
||||
cmdAlertsList.Flags().StringVarP(alertListFilter.IPEquals, "ip", "i", "", "restrict to alerts from this source ip (shorthand for --scope ip --value <IP>)")
|
||||
cmdAlertsList.Flags().StringVarP(alertListFilter.ScenarioEquals, "scenario", "s", "", "the scenario (ie. crowdsecurity/ssh-bf)")
|
||||
cmdAlertsList.Flags().StringVarP(alertListFilter.RangeEquals, "range", "r", "", "restrict to alerts from this range (shorthand for --scope range --value <RANGE/X>)")
|
||||
cmdAlertsList.Flags().StringVar(alertListFilter.TypeEquals, "type", "", "restrict to alerts with given decision type (ie. ban, captcha)")
|
||||
cmdAlertsList.Flags().StringVar(alertListFilter.ScopeEquals, "scope", "", "restrict to alerts of this scope (ie. ip,range)")
|
||||
cmdAlertsList.Flags().StringVarP(alertListFilter.ValueEquals, "value", "v", "", "the value to match for in the specified scope")
|
||||
cmdAlertsList.Flags().BoolVar(contained, "contained", false, "query decisions contained by range")
|
||||
cmdAlertsList.Flags().BoolVarP(&printMachine, "machine", "m", false, "print machines that sent alerts")
|
||||
cmdAlertsList.Flags().IntVarP(limit, "limit", "l", 50, "limit size of alerts list table (0 to view all alerts)")
|
||||
cmdAlerts.AddCommand(cmdAlertsList)
|
||||
|
||||
var ActiveDecision *bool
|
||||
var AlertDeleteAll bool
|
||||
var alertDeleteFilter = apiclient.AlertsDeleteOpts{
|
||||
flags := cmd.Flags()
|
||||
flags.SortFlags = false
|
||||
flags.BoolVarP(alertListFilter.IncludeCAPI, "all", "a", false, "Include decisions from Central API")
|
||||
flags.StringVar(alertListFilter.Until, "until", "", "restrict to alerts older than until (ie. 4h, 30d)")
|
||||
flags.StringVar(alertListFilter.Since, "since", "", "restrict to alerts newer than since (ie. 4h, 30d)")
|
||||
flags.StringVarP(alertListFilter.IPEquals, "ip", "i", "", "restrict to alerts from this source ip (shorthand for --scope ip --value <IP>)")
|
||||
flags.StringVarP(alertListFilter.ScenarioEquals, "scenario", "s", "", "the scenario (ie. crowdsecurity/ssh-bf)")
|
||||
flags.StringVarP(alertListFilter.RangeEquals, "range", "r", "", "restrict to alerts from this range (shorthand for --scope range --value <RANGE/X>)")
|
||||
flags.StringVar(alertListFilter.TypeEquals, "type", "", "restrict to alerts with given decision type (ie. ban, captcha)")
|
||||
flags.StringVar(alertListFilter.ScopeEquals, "scope", "", "restrict to alerts of this scope (ie. ip,range)")
|
||||
flags.StringVarP(alertListFilter.ValueEquals, "value", "v", "", "the value to match for in the specified scope")
|
||||
flags.StringVar(alertListFilter.OriginEquals, "origin", "", fmt.Sprintf("the value to match for the specified origin (%s ...)", strings.Join(types.GetOrigins(), ",")))
|
||||
flags.BoolVar(contained, "contained", false, "query decisions contained by range")
|
||||
flags.BoolVarP(&printMachine, "machine", "m", false, "print machines that sent alerts")
|
||||
flags.IntVarP(limit, "limit", "l", 50, "limit size of alerts list table (0 to view all alerts)")
|
||||
|
||||
return cmd
|
||||
}
|
||||
|
||||
func (cli *cliAlerts) NewDeleteCmd() *cobra.Command {
|
||||
var (
|
||||
ActiveDecision *bool
|
||||
AlertDeleteAll bool
|
||||
delAlertByID string
|
||||
)
|
||||
|
||||
alertDeleteFilter := apiclient.AlertsDeleteOpts{
|
||||
ScopeEquals: new(string),
|
||||
ValueEquals: new(string),
|
||||
ScenarioEquals: new(string),
|
||||
IPEquals: new(string),
|
||||
RangeEquals: new(string),
|
||||
}
|
||||
var cmdAlertsDelete = &cobra.Command{
|
||||
|
||||
contained := new(bool)
|
||||
|
||||
cmd := &cobra.Command{
|
||||
Use: "delete [filters] [--all]",
|
||||
Short: `Delete alerts
|
||||
/!\ This command can be use only on the same machine than the local API.`,
|
||||
|
@ -352,25 +384,27 @@ cscli alerts delete -s crowdsecurity/ssh-bf"`,
|
|||
DisableAutoGenTag: true,
|
||||
Aliases: []string{"remove"},
|
||||
Args: cobra.ExactArgs(0),
|
||||
PreRun: func(cmd *cobra.Command, args []string) {
|
||||
PreRunE: func(cmd *cobra.Command, _ []string) error {
|
||||
if AlertDeleteAll {
|
||||
return
|
||||
return nil
|
||||
}
|
||||
if *alertDeleteFilter.ScopeEquals == "" && *alertDeleteFilter.ValueEquals == "" &&
|
||||
*alertDeleteFilter.ScenarioEquals == "" && *alertDeleteFilter.IPEquals == "" &&
|
||||
*alertDeleteFilter.RangeEquals == "" {
|
||||
*alertDeleteFilter.RangeEquals == "" && delAlertByID == "" {
|
||||
_ = cmd.Usage()
|
||||
log.Fatalln("At least one filter or --all must be specified")
|
||||
return errors.New("at least one filter or --all must be specified")
|
||||
}
|
||||
|
||||
return nil
|
||||
},
|
||||
Run: func(cmd *cobra.Command, args []string) {
|
||||
RunE: func(cmd *cobra.Command, _ []string) error {
|
||||
var err error
|
||||
|
||||
if !AlertDeleteAll {
|
||||
if err := manageCliDecisionAlerts(alertDeleteFilter.IPEquals, alertDeleteFilter.RangeEquals,
|
||||
if err = manageCliDecisionAlerts(alertDeleteFilter.IPEquals, alertDeleteFilter.RangeEquals,
|
||||
alertDeleteFilter.ScopeEquals, alertDeleteFilter.ValueEquals); err != nil {
|
||||
printHelp(cmd)
|
||||
log.Fatalf("%s", err)
|
||||
return err
|
||||
}
|
||||
if ActiveDecision != nil {
|
||||
alertDeleteFilter.ActiveDecisionEquals = ActiveDecision
|
||||
|
@ -394,110 +428,133 @@ cscli alerts delete -s crowdsecurity/ssh-bf"`,
|
|||
if contained != nil && *contained {
|
||||
alertDeleteFilter.Contains = new(bool)
|
||||
}
|
||||
limit := 0
|
||||
alertDeleteFilter.Limit = &limit
|
||||
} else {
|
||||
limit := 0
|
||||
alertDeleteFilter = apiclient.AlertsDeleteOpts{Limit: &limit}
|
||||
}
|
||||
alerts, _, err := Client.Alerts.Delete(context.Background(), alertDeleteFilter)
|
||||
if err != nil {
|
||||
log.Fatalf("Unable to delete alerts : %v", err.Error())
|
||||
|
||||
var alerts *models.DeleteAlertsResponse
|
||||
if delAlertByID == "" {
|
||||
alerts, _, err = cli.client.Alerts.Delete(context.Background(), alertDeleteFilter)
|
||||
if err != nil {
|
||||
return fmt.Errorf("unable to delete alerts: %w", err)
|
||||
}
|
||||
} else {
|
||||
alerts, _, err = cli.client.Alerts.DeleteOne(context.Background(), delAlertByID)
|
||||
if err != nil {
|
||||
return fmt.Errorf("unable to delete alert: %w", err)
|
||||
}
|
||||
}
|
||||
log.Infof("%s alert(s) deleted", alerts.NbDeleted)
|
||||
|
||||
return nil
|
||||
},
|
||||
}
|
||||
cmdAlertsDelete.Flags().SortFlags = false
|
||||
cmdAlertsDelete.Flags().StringVar(alertDeleteFilter.ScopeEquals, "scope", "", "the scope (ie. ip,range)")
|
||||
cmdAlertsDelete.Flags().StringVarP(alertDeleteFilter.ValueEquals, "value", "v", "", "the value to match for in the specified scope")
|
||||
cmdAlertsDelete.Flags().StringVarP(alertDeleteFilter.ScenarioEquals, "scenario", "s", "", "the scenario (ie. crowdsecurity/ssh-bf)")
|
||||
cmdAlertsDelete.Flags().StringVarP(alertDeleteFilter.IPEquals, "ip", "i", "", "Source ip (shorthand for --scope ip --value <IP>)")
|
||||
cmdAlertsDelete.Flags().StringVarP(alertDeleteFilter.RangeEquals, "range", "r", "", "Range source ip (shorthand for --scope range --value <RANGE>)")
|
||||
cmdAlertsDelete.Flags().BoolVarP(&AlertDeleteAll, "all", "a", false, "delete all alerts")
|
||||
cmdAlertsDelete.Flags().BoolVar(contained, "contained", false, "query decisions contained by range")
|
||||
|
||||
cmdAlerts.AddCommand(cmdAlertsDelete)
|
||||
flags := cmd.Flags()
|
||||
flags.SortFlags = false
|
||||
flags.StringVar(alertDeleteFilter.ScopeEquals, "scope", "", "the scope (ie. ip,range)")
|
||||
flags.StringVarP(alertDeleteFilter.ValueEquals, "value", "v", "", "the value to match for in the specified scope")
|
||||
flags.StringVarP(alertDeleteFilter.ScenarioEquals, "scenario", "s", "", "the scenario (ie. crowdsecurity/ssh-bf)")
|
||||
flags.StringVarP(alertDeleteFilter.IPEquals, "ip", "i", "", "Source ip (shorthand for --scope ip --value <IP>)")
|
||||
flags.StringVarP(alertDeleteFilter.RangeEquals, "range", "r", "", "Range source ip (shorthand for --scope range --value <RANGE>)")
|
||||
flags.StringVar(&delAlertByID, "id", "", "alert ID")
|
||||
flags.BoolVarP(&AlertDeleteAll, "all", "a", false, "delete all alerts")
|
||||
flags.BoolVar(contained, "contained", false, "query decisions contained by range")
|
||||
|
||||
return cmd
|
||||
}
|
||||
|
||||
func (cli *cliAlerts) NewInspectCmd() *cobra.Command {
|
||||
var details bool
|
||||
var cmdAlertsInspect = &cobra.Command{
|
||||
|
||||
cmd := &cobra.Command{
|
||||
Use: `inspect "alert_id"`,
|
||||
Short: `Show info about an alert`,
|
||||
Example: `cscli alerts inspect 123`,
|
||||
DisableAutoGenTag: true,
|
||||
Run: func(cmd *cobra.Command, args []string) {
|
||||
RunE: func(cmd *cobra.Command, args []string) error {
|
||||
cfg := cli.cfg()
|
||||
if len(args) == 0 {
|
||||
printHelp(cmd)
|
||||
return
|
||||
return errors.New("missing alert_id")
|
||||
}
|
||||
for _, alertID := range args {
|
||||
id, err := strconv.Atoi(alertID)
|
||||
if err != nil {
|
||||
log.Fatalf("bad alert id %s", alertID)
|
||||
continue
|
||||
return fmt.Errorf("bad alert id %s", alertID)
|
||||
}
|
||||
alert, _, err := Client.Alerts.GetByID(context.Background(), id)
|
||||
alert, _, err := cli.client.Alerts.GetByID(context.Background(), id)
|
||||
if err != nil {
|
||||
log.Fatalf("can't find alert with id %s: %s", alertID, err)
|
||||
return fmt.Errorf("can't find alert with id %s: %w", alertID, err)
|
||||
}
|
||||
switch csConfig.Cscli.Output {
|
||||
switch cfg.Cscli.Output {
|
||||
case "human":
|
||||
if err := DisplayOneAlert(alert, details); err != nil {
|
||||
if err := cli.displayOneAlert(alert, details); err != nil {
|
||||
continue
|
||||
}
|
||||
case "json":
|
||||
data, err := json.MarshalIndent(alert, "", " ")
|
||||
if err != nil {
|
||||
log.Fatalf("unable to marshal alert with id %s: %s", alertID, err)
|
||||
return fmt.Errorf("unable to marshal alert with id %s: %w", alertID, err)
|
||||
}
|
||||
fmt.Printf("%s\n", string(data))
|
||||
case "raw":
|
||||
data, err := yaml.Marshal(alert)
|
||||
if err != nil {
|
||||
log.Fatalf("unable to marshal alert with id %s: %s", alertID, err)
|
||||
return fmt.Errorf("unable to marshal alert with id %s: %w", alertID, err)
|
||||
}
|
||||
fmt.Printf("%s\n", string(data))
|
||||
fmt.Println(string(data))
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
},
|
||||
}
|
||||
cmdAlertsInspect.Flags().SortFlags = false
|
||||
cmdAlertsInspect.Flags().BoolVarP(&details, "details", "d", false, "show alerts with events")
|
||||
|
||||
cmdAlerts.AddCommand(cmdAlertsInspect)
|
||||
cmd.Flags().SortFlags = false
|
||||
cmd.Flags().BoolVarP(&details, "details", "d", false, "show alerts with events")
|
||||
|
||||
var maxItems int
|
||||
var maxAge string
|
||||
var cmdAlertsFlush = &cobra.Command{
|
||||
return cmd
|
||||
}
|
||||
|
||||
func (cli *cliAlerts) NewFlushCmd() *cobra.Command {
|
||||
var (
|
||||
maxItems int
|
||||
maxAge string
|
||||
)
|
||||
|
||||
cmd := &cobra.Command{
|
||||
Use: `flush`,
|
||||
Short: `Flush alerts
|
||||
/!\ This command can be used only on the same machine than the local API`,
|
||||
Example: `cscli alerts flush --max-items 1000 --max-age 7d`,
|
||||
DisableAutoGenTag: true,
|
||||
Run: func(cmd *cobra.Command, args []string) {
|
||||
var err error
|
||||
if err := csConfig.LoadAPIServer(); err != nil || csConfig.DisableAPI {
|
||||
log.Fatal("Local API is disabled, please run this command on the local API machine")
|
||||
RunE: func(_ *cobra.Command, _ []string) error {
|
||||
cfg := cli.cfg()
|
||||
if err := require.LAPI(cfg); err != nil {
|
||||
return err
|
||||
}
|
||||
if err := csConfig.LoadDBConfig(); err != nil {
|
||||
log.Fatalf(err.Error())
|
||||
}
|
||||
dbClient, err = database.NewClient(csConfig.DbConfig)
|
||||
db, err := database.NewClient(cfg.DbConfig)
|
||||
if err != nil {
|
||||
log.Fatalf("unable to create new database client: %s", err)
|
||||
return fmt.Errorf("unable to create new database client: %w", err)
|
||||
}
|
||||
log.Info("Flushing alerts. !! This may take a long time !!")
|
||||
err = dbClient.FlushAlerts(maxAge, maxItems)
|
||||
err = db.FlushAlerts(maxAge, maxItems)
|
||||
if err != nil {
|
||||
log.Fatalf("unable to flush alerts: %s", err)
|
||||
return fmt.Errorf("unable to flush alerts: %w", err)
|
||||
}
|
||||
log.Info("Alerts flushed")
|
||||
|
||||
return nil
|
||||
},
|
||||
}
|
||||
|
||||
cmdAlertsFlush.Flags().SortFlags = false
|
||||
cmdAlertsFlush.Flags().IntVar(&maxItems, "max-items", 5000, "Maximum number of alert items to keep in the database")
|
||||
cmdAlertsFlush.Flags().StringVar(&maxAge, "max-age", "7d", "Maximum age of alert items to keep in the database")
|
||||
cmd.Flags().SortFlags = false
|
||||
cmd.Flags().IntVar(&maxItems, "max-items", 5000, "Maximum number of alert items to keep in the database")
|
||||
cmd.Flags().StringVar(&maxAge, "max-age", "7d", "Maximum age of alert items to keep in the database")
|
||||
|
||||
cmdAlerts.AddCommand(cmdAlertsFlush)
|
||||
|
||||
return cmdAlerts
|
||||
return cmd
|
||||
}
|
||||
|
|
102
cmd/crowdsec-cli/alerts_table.go
Normal file
102
cmd/crowdsec-cli/alerts_table.go
Normal file
|
@ -0,0 +1,102 @@
|
|||
package main
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"io"
|
||||
"sort"
|
||||
"strconv"
|
||||
"time"
|
||||
|
||||
log "github.com/sirupsen/logrus"
|
||||
|
||||
"github.com/crowdsecurity/crowdsec/pkg/models"
|
||||
)
|
||||
|
||||
func alertsTable(out io.Writer, alerts *models.GetAlertsResponse, printMachine bool) {
|
||||
t := newTable(out)
|
||||
t.SetRowLines(false)
|
||||
header := []string{"ID", "value", "reason", "country", "as", "decisions", "created_at"}
|
||||
if printMachine {
|
||||
header = append(header, "machine")
|
||||
}
|
||||
t.SetHeaders(header...)
|
||||
|
||||
for _, alertItem := range *alerts {
|
||||
displayVal := *alertItem.Source.Scope
|
||||
if len(alertItem.Decisions) > 1 {
|
||||
displayVal = fmt.Sprintf("%s (%d %ss)", *alertItem.Source.Scope, len(alertItem.Decisions), *alertItem.Decisions[0].Scope)
|
||||
} else if *alertItem.Source.Value != "" {
|
||||
displayVal += ":" + *alertItem.Source.Value
|
||||
}
|
||||
|
||||
row := []string{
|
||||
strconv.Itoa(int(alertItem.ID)),
|
||||
displayVal,
|
||||
*alertItem.Scenario,
|
||||
alertItem.Source.Cn,
|
||||
alertItem.Source.GetAsNumberName(),
|
||||
DecisionsFromAlert(alertItem),
|
||||
*alertItem.StartAt,
|
||||
}
|
||||
|
||||
if printMachine {
|
||||
row = append(row, alertItem.MachineID)
|
||||
}
|
||||
|
||||
t.AddRow(row...)
|
||||
}
|
||||
|
||||
t.Render()
|
||||
}
|
||||
|
||||
func alertDecisionsTable(out io.Writer, alert *models.Alert) {
|
||||
foundActive := false
|
||||
t := newTable(out)
|
||||
t.SetRowLines(false)
|
||||
t.SetHeaders("ID", "scope:value", "action", "expiration", "created_at")
|
||||
for _, decision := range alert.Decisions {
|
||||
parsedDuration, err := time.ParseDuration(*decision.Duration)
|
||||
if err != nil {
|
||||
log.Error(err)
|
||||
}
|
||||
expire := time.Now().UTC().Add(parsedDuration)
|
||||
if time.Now().UTC().After(expire) {
|
||||
continue
|
||||
}
|
||||
foundActive = true
|
||||
scopeAndValue := *decision.Scope
|
||||
if *decision.Value != "" {
|
||||
scopeAndValue += ":" + *decision.Value
|
||||
}
|
||||
t.AddRow(
|
||||
strconv.Itoa(int(decision.ID)),
|
||||
scopeAndValue,
|
||||
*decision.Type,
|
||||
*decision.Duration,
|
||||
alert.CreatedAt,
|
||||
)
|
||||
}
|
||||
if foundActive {
|
||||
fmt.Printf(" - Active Decisions :\n")
|
||||
t.Render() // Send output
|
||||
}
|
||||
}
|
||||
|
||||
func alertEventTable(out io.Writer, event *models.Event) {
|
||||
fmt.Fprintf(out, "\n- Date: %s\n", *event.Timestamp)
|
||||
|
||||
t := newTable(out)
|
||||
t.SetHeaders("Key", "Value")
|
||||
sort.Slice(event.Meta, func(i, j int) bool {
|
||||
return event.Meta[i].Key < event.Meta[j].Key
|
||||
})
|
||||
|
||||
for _, meta := range event.Meta {
|
||||
t.AddRow(
|
||||
meta.Key,
|
||||
meta.Value,
|
||||
)
|
||||
}
|
||||
|
||||
t.Render() // Send output
|
||||
}
|
|
@ -3,172 +3,318 @@ package main
|
|||
import (
|
||||
"encoding/csv"
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"fmt"
|
||||
"os"
|
||||
"slices"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
middlewares "github.com/crowdsecurity/crowdsec/pkg/apiserver/middlewares/v1"
|
||||
"github.com/crowdsecurity/crowdsec/pkg/database"
|
||||
"github.com/enescakir/emoji"
|
||||
"github.com/olekukonko/tablewriter"
|
||||
"github.com/AlecAivazis/survey/v2"
|
||||
"github.com/fatih/color"
|
||||
log "github.com/sirupsen/logrus"
|
||||
"github.com/spf13/cobra"
|
||||
|
||||
"github.com/crowdsecurity/crowdsec/cmd/crowdsec-cli/require"
|
||||
middlewares "github.com/crowdsecurity/crowdsec/pkg/apiserver/middlewares/v1"
|
||||
"github.com/crowdsecurity/crowdsec/pkg/database"
|
||||
"github.com/crowdsecurity/crowdsec/pkg/types"
|
||||
)
|
||||
|
||||
var keyIP string
|
||||
var keyLength int
|
||||
var key string
|
||||
func askYesNo(message string, defaultAnswer bool) (bool, error) {
|
||||
var answer bool
|
||||
|
||||
func NewBouncersCmd() *cobra.Command {
|
||||
/* ---- DECISIONS COMMAND */
|
||||
var cmdBouncers = &cobra.Command{
|
||||
prompt := &survey.Confirm{
|
||||
Message: message,
|
||||
Default: defaultAnswer,
|
||||
}
|
||||
|
||||
if err := survey.AskOne(prompt, &answer); err != nil {
|
||||
return defaultAnswer, err
|
||||
}
|
||||
|
||||
return answer, nil
|
||||
}
|
||||
|
||||
type cliBouncers struct {
|
||||
db *database.Client
|
||||
cfg configGetter
|
||||
}
|
||||
|
||||
func NewCLIBouncers(cfg configGetter) *cliBouncers {
|
||||
return &cliBouncers{
|
||||
cfg: cfg,
|
||||
}
|
||||
}
|
||||
|
||||
func (cli *cliBouncers) NewCommand() *cobra.Command {
|
||||
cmd := &cobra.Command{
|
||||
Use: "bouncers [action]",
|
||||
Short: "Manage bouncers [requires local API]",
|
||||
Long: `To list/add/delete bouncers.
|
||||
Long: `To list/add/delete/prune bouncers.
|
||||
Note: This command requires database direct access, so is intended to be run on Local API/master.
|
||||
`,
|
||||
Args: cobra.MinimumNArgs(1),
|
||||
Aliases: []string{"bouncer"},
|
||||
DisableAutoGenTag: true,
|
||||
PersistentPreRun: func(cmd *cobra.Command, args []string) {
|
||||
PersistentPreRunE: func(_ *cobra.Command, _ []string) error {
|
||||
var err error
|
||||
if err := csConfig.LoadAPIServer(); err != nil || csConfig.DisableAPI {
|
||||
log.Fatal("Local API is disabled, please run this command on the local API machine")
|
||||
|
||||
cfg := cli.cfg()
|
||||
|
||||
if err = require.LAPI(cfg); err != nil {
|
||||
return err
|
||||
}
|
||||
if err := csConfig.LoadDBConfig(); err != nil {
|
||||
log.Fatalf(err.Error())
|
||||
}
|
||||
dbClient, err = database.NewClient(csConfig.DbConfig)
|
||||
|
||||
cli.db, err = database.NewClient(cfg.DbConfig)
|
||||
if err != nil {
|
||||
log.Fatalf("unable to create new database client: %s", err)
|
||||
return fmt.Errorf("can't connect to the database: %w", err)
|
||||
}
|
||||
|
||||
return nil
|
||||
},
|
||||
}
|
||||
|
||||
var cmdBouncersList = &cobra.Command{
|
||||
cmd.AddCommand(cli.newListCmd())
|
||||
cmd.AddCommand(cli.newAddCmd())
|
||||
cmd.AddCommand(cli.newDeleteCmd())
|
||||
cmd.AddCommand(cli.newPruneCmd())
|
||||
|
||||
return cmd
|
||||
}
|
||||
|
||||
func (cli *cliBouncers) list() error {
|
||||
out := color.Output
|
||||
|
||||
bouncers, err := cli.db.ListBouncers()
|
||||
if err != nil {
|
||||
return fmt.Errorf("unable to list bouncers: %w", err)
|
||||
}
|
||||
|
||||
switch cli.cfg().Cscli.Output {
|
||||
case "human":
|
||||
getBouncersTable(out, bouncers)
|
||||
case "json":
|
||||
enc := json.NewEncoder(out)
|
||||
enc.SetIndent("", " ")
|
||||
|
||||
if err := enc.Encode(bouncers); err != nil {
|
||||
return fmt.Errorf("failed to marshal: %w", err)
|
||||
}
|
||||
|
||||
return nil
|
||||
case "raw":
|
||||
csvwriter := csv.NewWriter(out)
|
||||
|
||||
if err := csvwriter.Write([]string{"name", "ip", "revoked", "last_pull", "type", "version", "auth_type"}); err != nil {
|
||||
return fmt.Errorf("failed to write raw header: %w", err)
|
||||
}
|
||||
|
||||
for _, b := range bouncers {
|
||||
valid := "validated"
|
||||
if b.Revoked {
|
||||
valid = "pending"
|
||||
}
|
||||
|
||||
if err := csvwriter.Write([]string{b.Name, b.IPAddress, valid, b.LastPull.Format(time.RFC3339), b.Type, b.Version, b.AuthType}); err != nil {
|
||||
return fmt.Errorf("failed to write raw: %w", err)
|
||||
}
|
||||
}
|
||||
|
||||
csvwriter.Flush()
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (cli *cliBouncers) newListCmd() *cobra.Command {
|
||||
cmd := &cobra.Command{
|
||||
Use: "list",
|
||||
Short: "List bouncers",
|
||||
Long: `List bouncers`,
|
||||
Short: "list all bouncers within the database",
|
||||
Example: `cscli bouncers list`,
|
||||
Args: cobra.ExactArgs(0),
|
||||
DisableAutoGenTag: true,
|
||||
Run: func(cmd *cobra.Command, arg []string) {
|
||||
blockers, err := dbClient.ListBouncers()
|
||||
if err != nil {
|
||||
log.Errorf("unable to list blockers: %s", err)
|
||||
}
|
||||
if csConfig.Cscli.Output == "human" {
|
||||
|
||||
table := tablewriter.NewWriter(os.Stdout)
|
||||
table.SetCenterSeparator("")
|
||||
table.SetColumnSeparator("")
|
||||
|
||||
table.SetHeaderAlignment(tablewriter.ALIGN_LEFT)
|
||||
table.SetAlignment(tablewriter.ALIGN_LEFT)
|
||||
table.SetHeader([]string{"Name", "IP Address", "Valid", "Last API pull", "Type", "Version"})
|
||||
for _, b := range blockers {
|
||||
var revoked string
|
||||
if !b.Revoked {
|
||||
revoked = emoji.CheckMark.String()
|
||||
} else {
|
||||
revoked = emoji.Prohibited.String()
|
||||
}
|
||||
table.Append([]string{b.Name, b.IPAddress, revoked, b.LastPull.Format(time.RFC3339), b.Type, b.Version})
|
||||
}
|
||||
table.Render()
|
||||
} else if csConfig.Cscli.Output == "json" {
|
||||
x, err := json.MarshalIndent(blockers, "", " ")
|
||||
if err != nil {
|
||||
log.Fatalf("failed to unmarshal")
|
||||
}
|
||||
fmt.Printf("%s", string(x))
|
||||
} else if csConfig.Cscli.Output == "raw" {
|
||||
csvwriter := csv.NewWriter(os.Stdout)
|
||||
err := csvwriter.Write([]string{"name", "ip", "revoked", "last_pull", "type", "version"})
|
||||
if err != nil {
|
||||
log.Fatalf("failed to write raw header: %s", err)
|
||||
}
|
||||
for _, b := range blockers {
|
||||
var revoked string
|
||||
if !b.Revoked {
|
||||
revoked = "validated"
|
||||
} else {
|
||||
revoked = "pending"
|
||||
}
|
||||
err := csvwriter.Write([]string{b.Name, b.IPAddress, revoked, b.LastPull.Format(time.RFC3339), b.Type, b.Version})
|
||||
if err != nil {
|
||||
log.Fatalf("failed to write raw: %s", err)
|
||||
}
|
||||
}
|
||||
csvwriter.Flush()
|
||||
}
|
||||
RunE: func(_ *cobra.Command, _ []string) error {
|
||||
return cli.list()
|
||||
},
|
||||
}
|
||||
cmdBouncers.AddCommand(cmdBouncersList)
|
||||
|
||||
var cmdBouncersAdd = &cobra.Command{
|
||||
Use: "add MyBouncerName [--length 16]",
|
||||
Short: "add bouncer",
|
||||
Long: `add bouncer`,
|
||||
Example: fmt.Sprintf(`cscli bouncers add MyBouncerName
|
||||
cscli bouncers add MyBouncerName -l 24
|
||||
cscli bouncers add MyBouncerName -k %s`, generatePassword(32)),
|
||||
return cmd
|
||||
}
|
||||
|
||||
func (cli *cliBouncers) add(bouncerName string, key string) error {
|
||||
var err error
|
||||
|
||||
keyLength := 32
|
||||
|
||||
if key == "" {
|
||||
key, err = middlewares.GenerateAPIKey(keyLength)
|
||||
if err != nil {
|
||||
return fmt.Errorf("unable to generate api key: %w", err)
|
||||
}
|
||||
}
|
||||
|
||||
_, err = cli.db.CreateBouncer(bouncerName, "", middlewares.HashSHA512(key), types.ApiKeyAuthType)
|
||||
if err != nil {
|
||||
return fmt.Errorf("unable to create bouncer: %w", err)
|
||||
}
|
||||
|
||||
switch cli.cfg().Cscli.Output {
|
||||
case "human":
|
||||
fmt.Printf("API key for '%s':\n\n", bouncerName)
|
||||
fmt.Printf(" %s\n\n", key)
|
||||
fmt.Print("Please keep this key since you will not be able to retrieve it!\n")
|
||||
case "raw":
|
||||
fmt.Print(key)
|
||||
case "json":
|
||||
j, err := json.Marshal(key)
|
||||
if err != nil {
|
||||
return errors.New("unable to marshal api key")
|
||||
}
|
||||
|
||||
fmt.Print(string(j))
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (cli *cliBouncers) newAddCmd() *cobra.Command {
|
||||
var key string
|
||||
|
||||
cmd := &cobra.Command{
|
||||
Use: "add MyBouncerName",
|
||||
Short: "add a single bouncer to the database",
|
||||
Example: `cscli bouncers add MyBouncerName
|
||||
cscli bouncers add MyBouncerName --key <random-key>`,
|
||||
Args: cobra.ExactArgs(1),
|
||||
DisableAutoGenTag: true,
|
||||
Run: func(cmd *cobra.Command, arg []string) {
|
||||
keyName := arg[0]
|
||||
var apiKey string
|
||||
var err error
|
||||
if keyName == "" {
|
||||
log.Fatalf("Please provide a name for the api key")
|
||||
}
|
||||
apiKey = key
|
||||
if key == "" {
|
||||
apiKey, err = middlewares.GenerateAPIKey(keyLength)
|
||||
}
|
||||
if err != nil {
|
||||
log.Fatalf("unable to generate api key: %s", err)
|
||||
}
|
||||
err = dbClient.CreateBouncer(keyName, keyIP, middlewares.HashSHA512(apiKey))
|
||||
if err != nil {
|
||||
log.Fatalf("unable to create bouncer: %s", err)
|
||||
}
|
||||
|
||||
if csConfig.Cscli.Output == "human" {
|
||||
fmt.Printf("Api key for '%s':\n\n", keyName)
|
||||
fmt.Printf(" %s\n\n", apiKey)
|
||||
fmt.Print("Please keep this key since you will not be able to retrieve it!\n")
|
||||
} else if csConfig.Cscli.Output == "raw" {
|
||||
fmt.Printf("%s", apiKey)
|
||||
} else if csConfig.Cscli.Output == "json" {
|
||||
j, err := json.Marshal(apiKey)
|
||||
if err != nil {
|
||||
log.Fatalf("unable to marshal api key")
|
||||
}
|
||||
fmt.Printf("%s", string(j))
|
||||
}
|
||||
RunE: func(_ *cobra.Command, args []string) error {
|
||||
return cli.add(args[0], key)
|
||||
},
|
||||
}
|
||||
cmdBouncersAdd.Flags().IntVarP(&keyLength, "length", "l", 16, "length of the api key")
|
||||
cmdBouncersAdd.Flags().StringVarP(&key, "key", "k", "", "api key for the bouncer")
|
||||
cmdBouncers.AddCommand(cmdBouncersAdd)
|
||||
|
||||
var cmdBouncersDelete = &cobra.Command{
|
||||
flags := cmd.Flags()
|
||||
flags.StringP("length", "l", "", "length of the api key")
|
||||
_ = flags.MarkDeprecated("length", "use --key instead")
|
||||
flags.StringVarP(&key, "key", "k", "", "api key for the bouncer")
|
||||
|
||||
return cmd
|
||||
}
|
||||
|
||||
func (cli *cliBouncers) deleteValid(_ *cobra.Command, args []string, toComplete string) ([]string, cobra.ShellCompDirective) {
|
||||
bouncers, err := cli.db.ListBouncers()
|
||||
if err != nil {
|
||||
cobra.CompError("unable to list bouncers " + err.Error())
|
||||
}
|
||||
|
||||
ret := []string{}
|
||||
|
||||
for _, bouncer := range bouncers {
|
||||
if strings.Contains(bouncer.Name, toComplete) && !slices.Contains(args, bouncer.Name) {
|
||||
ret = append(ret, bouncer.Name)
|
||||
}
|
||||
}
|
||||
|
||||
return ret, cobra.ShellCompDirectiveNoFileComp
|
||||
}
|
||||
|
||||
func (cli *cliBouncers) delete(bouncers []string) error {
|
||||
for _, bouncerID := range bouncers {
|
||||
err := cli.db.DeleteBouncer(bouncerID)
|
||||
if err != nil {
|
||||
return fmt.Errorf("unable to delete bouncer '%s': %w", bouncerID, err)
|
||||
}
|
||||
|
||||
log.Infof("bouncer '%s' deleted successfully", bouncerID)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (cli *cliBouncers) newDeleteCmd() *cobra.Command {
|
||||
cmd := &cobra.Command{
|
||||
Use: "delete MyBouncerName",
|
||||
Short: "delete bouncer",
|
||||
Short: "delete bouncer(s) from the database",
|
||||
Args: cobra.MinimumNArgs(1),
|
||||
Aliases: []string{"remove"},
|
||||
DisableAutoGenTag: true,
|
||||
Run: func(cmd *cobra.Command, args []string) {
|
||||
for _, bouncerID := range args {
|
||||
err := dbClient.DeleteBouncer(bouncerID)
|
||||
if err != nil {
|
||||
log.Fatalf("unable to delete bouncer: %s", err)
|
||||
}
|
||||
log.Infof("bouncer '%s' deleted successfully", bouncerID)
|
||||
}
|
||||
ValidArgsFunction: cli.deleteValid,
|
||||
RunE: func(_ *cobra.Command, args []string) error {
|
||||
return cli.delete(args)
|
||||
},
|
||||
}
|
||||
cmdBouncers.AddCommand(cmdBouncersDelete)
|
||||
return cmdBouncers
|
||||
|
||||
return cmd
|
||||
}
|
||||
|
||||
func (cli *cliBouncers) prune(duration time.Duration, force bool) error {
|
||||
if duration < 2*time.Minute {
|
||||
if yes, err := askYesNo(
|
||||
"The duration you provided is less than 2 minutes. " +
|
||||
"This may remove active bouncers. Continue?", false); err != nil {
|
||||
return err
|
||||
} else if !yes {
|
||||
fmt.Println("User aborted prune. No changes were made.")
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
bouncers, err := cli.db.QueryBouncersLastPulltimeLT(time.Now().UTC().Add(-duration))
|
||||
if err != nil {
|
||||
return fmt.Errorf("unable to query bouncers: %w", err)
|
||||
}
|
||||
|
||||
if len(bouncers) == 0 {
|
||||
fmt.Println("No bouncers to prune.")
|
||||
return nil
|
||||
}
|
||||
|
||||
getBouncersTable(color.Output, bouncers)
|
||||
|
||||
if !force {
|
||||
if yes, err := askYesNo(
|
||||
"You are about to PERMANENTLY remove the above bouncers from the database. " +
|
||||
"These will NOT be recoverable. Continue?", false); err != nil {
|
||||
return err
|
||||
} else if !yes {
|
||||
fmt.Println("User aborted prune. No changes were made.")
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
deleted, err := cli.db.BulkDeleteBouncers(bouncers)
|
||||
if err != nil {
|
||||
return fmt.Errorf("unable to prune bouncers: %w", err)
|
||||
}
|
||||
|
||||
fmt.Fprintf(os.Stderr, "Successfully deleted %d bouncers\n", deleted)
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (cli *cliBouncers) newPruneCmd() *cobra.Command {
|
||||
var (
|
||||
duration time.Duration
|
||||
force bool
|
||||
)
|
||||
|
||||
const defaultDuration = 60 * time.Minute
|
||||
|
||||
cmd := &cobra.Command{
|
||||
Use: "prune",
|
||||
Short: "prune multiple bouncers from the database",
|
||||
Args: cobra.NoArgs,
|
||||
DisableAutoGenTag: true,
|
||||
Example: `cscli bouncers prune -d 45m
|
||||
cscli bouncers prune -d 45m --force`,
|
||||
RunE: func(_ *cobra.Command, _ []string) error {
|
||||
return cli.prune(duration, force)
|
||||
},
|
||||
}
|
||||
|
||||
flags := cmd.Flags()
|
||||
flags.DurationVarP(&duration, "duration", "d", defaultDuration, "duration of time since last pull")
|
||||
flags.BoolVar(&force, "force", false, "force prune without asking for confirmation")
|
||||
|
||||
return cmd
|
||||
}
|
||||
|
|
29
cmd/crowdsec-cli/bouncers_table.go
Normal file
29
cmd/crowdsec-cli/bouncers_table.go
Normal file
|
@ -0,0 +1,29 @@
|
|||
package main
|
||||
|
||||
import (
|
||||
"io"
|
||||
"time"
|
||||
|
||||
"github.com/aquasecurity/table"
|
||||
|
||||
"github.com/crowdsecurity/crowdsec/pkg/database/ent"
|
||||
"github.com/crowdsecurity/crowdsec/pkg/emoji"
|
||||
)
|
||||
|
||||
func getBouncersTable(out io.Writer, bouncers []*ent.Bouncer) {
|
||||
t := newLightTable(out)
|
||||
t.SetHeaders("Name", "IP Address", "Valid", "Last API pull", "Type", "Version", "Auth Type")
|
||||
t.SetHeaderAlignment(table.AlignLeft, table.AlignLeft, table.AlignLeft, table.AlignLeft, table.AlignLeft, table.AlignLeft)
|
||||
t.SetAlignment(table.AlignLeft, table.AlignLeft, table.AlignLeft, table.AlignLeft, table.AlignLeft, table.AlignLeft)
|
||||
|
||||
for _, b := range bouncers {
|
||||
revoked := emoji.CheckMark
|
||||
if b.Revoked {
|
||||
revoked = emoji.Prohibited
|
||||
}
|
||||
|
||||
t.AddRow(b.Name, b.IPAddress, revoked, b.LastPull.Format(time.RFC3339), b.Type, b.Version, b.AuthType)
|
||||
}
|
||||
|
||||
t.Render()
|
||||
}
|
|
@ -2,168 +2,221 @@ package main
|
|||
|
||||
import (
|
||||
"context"
|
||||
"errors"
|
||||
"fmt"
|
||||
"io/ioutil"
|
||||
"net/url"
|
||||
"os"
|
||||
|
||||
"github.com/crowdsecurity/crowdsec/pkg/apiclient"
|
||||
"github.com/crowdsecurity/crowdsec/pkg/csconfig"
|
||||
"github.com/crowdsecurity/crowdsec/pkg/cwhub"
|
||||
"github.com/crowdsecurity/crowdsec/pkg/cwversion"
|
||||
"github.com/crowdsecurity/crowdsec/pkg/models"
|
||||
"github.com/go-openapi/strfmt"
|
||||
log "github.com/sirupsen/logrus"
|
||||
"github.com/spf13/cobra"
|
||||
"gopkg.in/yaml.v3"
|
||||
|
||||
"gopkg.in/yaml.v2"
|
||||
"github.com/crowdsecurity/go-cs-lib/version"
|
||||
|
||||
"github.com/crowdsecurity/crowdsec/cmd/crowdsec-cli/require"
|
||||
"github.com/crowdsecurity/crowdsec/pkg/apiclient"
|
||||
"github.com/crowdsecurity/crowdsec/pkg/csconfig"
|
||||
"github.com/crowdsecurity/crowdsec/pkg/cwhub"
|
||||
"github.com/crowdsecurity/crowdsec/pkg/models"
|
||||
"github.com/crowdsecurity/crowdsec/pkg/types"
|
||||
)
|
||||
|
||||
var CAPIURLPrefix string = "v2"
|
||||
var CAPIBaseURL string = "https://api.crowdsec.net/"
|
||||
var capiUserPrefix string
|
||||
const (
|
||||
CAPIBaseURL = "https://api.crowdsec.net/"
|
||||
CAPIURLPrefix = "v3"
|
||||
)
|
||||
|
||||
func NewCapiCmd() *cobra.Command {
|
||||
var cmdCapi = &cobra.Command{
|
||||
type cliCapi struct {
|
||||
cfg configGetter
|
||||
}
|
||||
|
||||
func NewCLICapi(cfg configGetter) *cliCapi {
|
||||
return &cliCapi{
|
||||
cfg: cfg,
|
||||
}
|
||||
}
|
||||
|
||||
func (cli *cliCapi) NewCommand() *cobra.Command {
|
||||
cmd := &cobra.Command{
|
||||
Use: "capi [action]",
|
||||
Short: "Manage interaction with Central API (CAPI)",
|
||||
Args: cobra.MinimumNArgs(1),
|
||||
DisableAutoGenTag: true,
|
||||
PersistentPreRunE: func(cmd *cobra.Command, args []string) error {
|
||||
if err := csConfig.LoadAPIServer(); err != nil || csConfig.DisableAPI {
|
||||
log.Fatal("Local API is disabled, please run this command on the local API machine")
|
||||
PersistentPreRunE: func(_ *cobra.Command, _ []string) error {
|
||||
cfg := cli.cfg()
|
||||
if err := require.LAPI(cfg); err != nil {
|
||||
return err
|
||||
}
|
||||
if csConfig.API.Server.OnlineClient == nil {
|
||||
log.Fatalf("no configuration for Central API in '%s'", *csConfig.FilePath)
|
||||
|
||||
if err := require.CAPI(cfg); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
},
|
||||
}
|
||||
|
||||
var cmdCapiRegister = &cobra.Command{
|
||||
cmd.AddCommand(cli.newRegisterCmd())
|
||||
cmd.AddCommand(cli.newStatusCmd())
|
||||
|
||||
return cmd
|
||||
}
|
||||
|
||||
func (cli *cliCapi) register(capiUserPrefix string, outputFile string) error {
|
||||
cfg := cli.cfg()
|
||||
|
||||
capiUser, err := generateID(capiUserPrefix)
|
||||
if err != nil {
|
||||
return fmt.Errorf("unable to generate machine id: %w", err)
|
||||
}
|
||||
|
||||
password := strfmt.Password(generatePassword(passwordLength))
|
||||
|
||||
apiurl, err := url.Parse(types.CAPIBaseURL)
|
||||
if err != nil {
|
||||
return fmt.Errorf("unable to parse api url %s: %w", types.CAPIBaseURL, err)
|
||||
}
|
||||
|
||||
_, err = apiclient.RegisterClient(&apiclient.Config{
|
||||
MachineID: capiUser,
|
||||
Password: password,
|
||||
UserAgent: fmt.Sprintf("crowdsec/%s", version.String()),
|
||||
URL: apiurl,
|
||||
VersionPrefix: CAPIURLPrefix,
|
||||
}, nil)
|
||||
if err != nil {
|
||||
return fmt.Errorf("api client register ('%s'): %w", types.CAPIBaseURL, err)
|
||||
}
|
||||
|
||||
log.Infof("Successfully registered to Central API (CAPI)")
|
||||
|
||||
var dumpFile string
|
||||
|
||||
switch {
|
||||
case outputFile != "":
|
||||
dumpFile = outputFile
|
||||
case cfg.API.Server.OnlineClient.CredentialsFilePath != "":
|
||||
dumpFile = cfg.API.Server.OnlineClient.CredentialsFilePath
|
||||
default:
|
||||
dumpFile = ""
|
||||
}
|
||||
|
||||
apiCfg := csconfig.ApiCredentialsCfg{
|
||||
Login: capiUser,
|
||||
Password: password.String(),
|
||||
URL: types.CAPIBaseURL,
|
||||
}
|
||||
|
||||
apiConfigDump, err := yaml.Marshal(apiCfg)
|
||||
if err != nil {
|
||||
return fmt.Errorf("unable to marshal api credentials: %w", err)
|
||||
}
|
||||
|
||||
if dumpFile != "" {
|
||||
err = os.WriteFile(dumpFile, apiConfigDump, 0o600)
|
||||
if err != nil {
|
||||
return fmt.Errorf("write api credentials in '%s' failed: %w", dumpFile, err)
|
||||
}
|
||||
|
||||
log.Infof("Central API credentials written to '%s'", dumpFile)
|
||||
} else {
|
||||
fmt.Println(string(apiConfigDump))
|
||||
}
|
||||
|
||||
log.Warning(ReloadMessage())
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (cli *cliCapi) newRegisterCmd() *cobra.Command {
|
||||
var (
|
||||
capiUserPrefix string
|
||||
outputFile string
|
||||
)
|
||||
|
||||
cmd := &cobra.Command{
|
||||
Use: "register",
|
||||
Short: "Register to Central API (CAPI)",
|
||||
Args: cobra.MinimumNArgs(0),
|
||||
DisableAutoGenTag: true,
|
||||
Run: func(cmd *cobra.Command, args []string) {
|
||||
var err error
|
||||
capiUser, err := generateID(capiUserPrefix)
|
||||
if err != nil {
|
||||
log.Fatalf("unable to generate machine id: %s", err)
|
||||
}
|
||||
password := strfmt.Password(generatePassword(passwordLength))
|
||||
apiurl, err := url.Parse(CAPIBaseURL)
|
||||
if err != nil {
|
||||
log.Fatalf("unable to parse api url %s : %s", CAPIBaseURL, err)
|
||||
}
|
||||
_, err = apiclient.RegisterClient(&apiclient.Config{
|
||||
MachineID: capiUser,
|
||||
Password: password,
|
||||
UserAgent: fmt.Sprintf("crowdsec/%s", cwversion.VersionStr()),
|
||||
URL: apiurl,
|
||||
VersionPrefix: CAPIURLPrefix,
|
||||
}, nil)
|
||||
|
||||
if err != nil {
|
||||
log.Fatalf("api client register ('%s'): %s", CAPIBaseURL, err)
|
||||
}
|
||||
log.Printf("Successfully registered to Central API (CAPI)")
|
||||
|
||||
var dumpFile string
|
||||
|
||||
if outputFile != "" {
|
||||
dumpFile = outputFile
|
||||
} else if csConfig.API.Server.OnlineClient.CredentialsFilePath != "" {
|
||||
dumpFile = csConfig.API.Server.OnlineClient.CredentialsFilePath
|
||||
} else {
|
||||
dumpFile = ""
|
||||
}
|
||||
apiCfg := csconfig.ApiCredentialsCfg{
|
||||
Login: capiUser,
|
||||
Password: password.String(),
|
||||
URL: CAPIBaseURL,
|
||||
}
|
||||
apiConfigDump, err := yaml.Marshal(apiCfg)
|
||||
if err != nil {
|
||||
log.Fatalf("unable to marshal api credentials: %s", err)
|
||||
}
|
||||
if dumpFile != "" {
|
||||
err = ioutil.WriteFile(dumpFile, apiConfigDump, 0600)
|
||||
if err != nil {
|
||||
log.Fatalf("write api credentials in '%s' failed: %s", dumpFile, err)
|
||||
}
|
||||
log.Printf("Central API credentials dumped to '%s'", dumpFile)
|
||||
} else {
|
||||
fmt.Printf("%s\n", string(apiConfigDump))
|
||||
}
|
||||
|
||||
log.Warningf(ReloadMessage())
|
||||
RunE: func(_ *cobra.Command, _ []string) error {
|
||||
return cli.register(capiUserPrefix, outputFile)
|
||||
},
|
||||
}
|
||||
cmdCapiRegister.Flags().StringVarP(&outputFile, "file", "f", "", "output file destination")
|
||||
cmdCapiRegister.Flags().StringVar(&capiUserPrefix, "schmilblick", "", "set a schmilblick (use in tests only)")
|
||||
cmdCapiRegister.Flags().MarkHidden("schmilblick")
|
||||
cmdCapi.AddCommand(cmdCapiRegister)
|
||||
|
||||
var cmdCapiStatus = &cobra.Command{
|
||||
cmd.Flags().StringVarP(&outputFile, "file", "f", "", "output file destination")
|
||||
cmd.Flags().StringVar(&capiUserPrefix, "schmilblick", "", "set a schmilblick (use in tests only)")
|
||||
|
||||
if err := cmd.Flags().MarkHidden("schmilblick"); err != nil {
|
||||
log.Fatalf("failed to hide flag: %s", err)
|
||||
}
|
||||
|
||||
return cmd
|
||||
}
|
||||
|
||||
func (cli *cliCapi) status() error {
|
||||
cfg := cli.cfg()
|
||||
|
||||
if err := require.CAPIRegistered(cfg); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
password := strfmt.Password(cfg.API.Server.OnlineClient.Credentials.Password)
|
||||
|
||||
apiurl, err := url.Parse(cfg.API.Server.OnlineClient.Credentials.URL)
|
||||
if err != nil {
|
||||
return fmt.Errorf("parsing api url ('%s'): %w", cfg.API.Server.OnlineClient.Credentials.URL, err)
|
||||
}
|
||||
|
||||
hub, err := require.Hub(cfg, nil, nil)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
scenarios, err := hub.GetInstalledNamesByType(cwhub.SCENARIOS)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to get scenarios: %w", err)
|
||||
}
|
||||
|
||||
if len(scenarios) == 0 {
|
||||
return errors.New("no scenarios installed, abort")
|
||||
}
|
||||
|
||||
Client, err = apiclient.NewDefaultClient(apiurl, CAPIURLPrefix, fmt.Sprintf("crowdsec/%s", version.String()), nil)
|
||||
if err != nil {
|
||||
return fmt.Errorf("init default client: %w", err)
|
||||
}
|
||||
|
||||
t := models.WatcherAuthRequest{
|
||||
MachineID: &cfg.API.Server.OnlineClient.Credentials.Login,
|
||||
Password: &password,
|
||||
Scenarios: scenarios,
|
||||
}
|
||||
|
||||
log.Infof("Loaded credentials from %s", cfg.API.Server.OnlineClient.CredentialsFilePath)
|
||||
log.Infof("Trying to authenticate with username %s on %s", cfg.API.Server.OnlineClient.Credentials.Login, apiurl)
|
||||
|
||||
_, _, err = Client.Auth.AuthenticateWatcher(context.Background(), t)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to authenticate to Central API (CAPI): %w", err)
|
||||
}
|
||||
|
||||
log.Info("You can successfully interact with Central API (CAPI)")
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (cli *cliCapi) newStatusCmd() *cobra.Command {
|
||||
cmd := &cobra.Command{
|
||||
Use: "status",
|
||||
Short: "Check status with the Central API (CAPI)",
|
||||
Args: cobra.MinimumNArgs(0),
|
||||
DisableAutoGenTag: true,
|
||||
Run: func(cmd *cobra.Command, args []string) {
|
||||
var err error
|
||||
if csConfig.API.Server == nil {
|
||||
log.Fatalln("There is no configuration on 'api_client:'")
|
||||
}
|
||||
if csConfig.API.Server.OnlineClient == nil {
|
||||
log.Fatalf("Please provide credentials for the Central API (CAPI) in '%s'", csConfig.API.Server.OnlineClient.CredentialsFilePath)
|
||||
}
|
||||
|
||||
if csConfig.API.Server.OnlineClient.Credentials == nil {
|
||||
log.Fatalf("no credentials for Central API (CAPI) in '%s'", csConfig.API.Server.OnlineClient.CredentialsFilePath)
|
||||
}
|
||||
|
||||
password := strfmt.Password(csConfig.API.Server.OnlineClient.Credentials.Password)
|
||||
apiurl, err := url.Parse(csConfig.API.Server.OnlineClient.Credentials.URL)
|
||||
if err != nil {
|
||||
log.Fatalf("parsing api url ('%s'): %s", csConfig.API.Server.OnlineClient.Credentials.URL, err)
|
||||
}
|
||||
|
||||
if err := csConfig.LoadHub(); err != nil {
|
||||
log.Fatalf(err.Error())
|
||||
}
|
||||
|
||||
if err := cwhub.GetHubIdx(csConfig.Hub); err != nil {
|
||||
log.Fatalf("Failed to load hub index : %s", err)
|
||||
log.Infoln("Run 'sudo cscli hub update' to get the hub index")
|
||||
}
|
||||
scenarios, err := cwhub.GetInstalledScenariosAsString()
|
||||
if err != nil {
|
||||
log.Fatalf("failed to get scenarios : %s", err.Error())
|
||||
}
|
||||
if len(scenarios) == 0 {
|
||||
log.Fatalf("no scenarios installed, abort")
|
||||
}
|
||||
|
||||
Client, err = apiclient.NewDefaultClient(apiurl, CAPIURLPrefix, fmt.Sprintf("crowdsec/%s", cwversion.VersionStr()), nil)
|
||||
if err != nil {
|
||||
log.Fatalf("init default client: %s", err)
|
||||
}
|
||||
t := models.WatcherAuthRequest{
|
||||
MachineID: &csConfig.API.Server.OnlineClient.Credentials.Login,
|
||||
Password: &password,
|
||||
Scenarios: scenarios,
|
||||
}
|
||||
log.Infof("Loaded credentials from %s", csConfig.API.Server.OnlineClient.CredentialsFilePath)
|
||||
log.Infof("Trying to authenticate with username %s on %s", csConfig.API.Server.OnlineClient.Credentials.Login, apiurl)
|
||||
_, err = Client.Auth.AuthenticateWatcher(context.Background(), t)
|
||||
if err != nil {
|
||||
log.Fatalf("Failed to authenticate to Central API (CAPI) : %s", err)
|
||||
}
|
||||
log.Infof("You can successfully interact with Central API (CAPI)")
|
||||
RunE: func(_ *cobra.Command, _ []string) error {
|
||||
return cli.status()
|
||||
},
|
||||
}
|
||||
cmdCapi.AddCommand(cmdCapiStatus)
|
||||
|
||||
return cmdCapi
|
||||
return cmd
|
||||
}
|
||||
|
|
|
@ -1,178 +0,0 @@
|
|||
package main
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
|
||||
"github.com/crowdsecurity/crowdsec/pkg/cwhub"
|
||||
|
||||
log "github.com/sirupsen/logrus"
|
||||
|
||||
"github.com/spf13/cobra"
|
||||
)
|
||||
|
||||
func NewCollectionsCmd() *cobra.Command {
|
||||
var cmdCollections = &cobra.Command{
|
||||
Use: "collections [action]",
|
||||
Short: "Manage collections from hub",
|
||||
Long: `Install/Remove/Upgrade/Inspect collections from the CrowdSec Hub.`,
|
||||
/*TBD fix help*/
|
||||
Args: cobra.MinimumNArgs(1),
|
||||
Aliases: []string{"collection"},
|
||||
DisableAutoGenTag: true,
|
||||
PersistentPreRunE: func(cmd *cobra.Command, args []string) error {
|
||||
if err := csConfig.LoadHub(); err != nil {
|
||||
log.Fatalf(err.Error())
|
||||
}
|
||||
if csConfig.Hub == nil {
|
||||
return fmt.Errorf("you must configure cli before interacting with hub")
|
||||
}
|
||||
|
||||
if err := setHubBranch(); err != nil {
|
||||
return fmt.Errorf("error while setting hub branch: %s", err)
|
||||
}
|
||||
|
||||
if err := cwhub.GetHubIdx(csConfig.Hub); err != nil {
|
||||
log.Fatalf("Failed to get Hub index : %v", err)
|
||||
log.Infoln("Run 'sudo cscli hub update' to get the hub index")
|
||||
}
|
||||
|
||||
return nil
|
||||
},
|
||||
PersistentPostRun: func(cmd *cobra.Command, args []string) {
|
||||
if cmd.Name() == "inspect" || cmd.Name() == "list" {
|
||||
return
|
||||
}
|
||||
log.Infof(ReloadMessage())
|
||||
},
|
||||
}
|
||||
|
||||
var ignoreError bool
|
||||
var cmdCollectionsInstall = &cobra.Command{
|
||||
Use: "install collection",
|
||||
Short: "Install given collection(s)",
|
||||
Long: `Fetch and install given collection(s) from hub`,
|
||||
Example: `cscli collections install crowdsec/xxx crowdsec/xyz`,
|
||||
Args: cobra.MinimumNArgs(1),
|
||||
ValidArgsFunction: func(cmd *cobra.Command, args []string, toComplete string) ([]string, cobra.ShellCompDirective) {
|
||||
return compAllItems(cwhub.COLLECTIONS, args, toComplete)
|
||||
},
|
||||
DisableAutoGenTag: true,
|
||||
Run: func(cmd *cobra.Command, args []string) {
|
||||
for _, name := range args {
|
||||
if err := cwhub.InstallItem(csConfig, name, cwhub.COLLECTIONS, forceAction, downloadOnly); err != nil {
|
||||
if ignoreError {
|
||||
log.Errorf("Error while installing '%s': %s", name, err)
|
||||
} else {
|
||||
log.Fatalf("Error while installing '%s': %s", name, err)
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
}
|
||||
cmdCollectionsInstall.PersistentFlags().BoolVarP(&downloadOnly, "download-only", "d", false, "Only download packages, don't enable")
|
||||
cmdCollectionsInstall.PersistentFlags().BoolVar(&forceAction, "force", false, "Force install : Overwrite tainted and outdated files")
|
||||
cmdCollectionsInstall.PersistentFlags().BoolVar(&ignoreError, "ignore", false, "Ignore errors when installing multiple collections")
|
||||
cmdCollections.AddCommand(cmdCollectionsInstall)
|
||||
|
||||
var cmdCollectionsRemove = &cobra.Command{
|
||||
Use: "remove collection",
|
||||
Short: "Remove given collection(s)",
|
||||
Long: `Remove given collection(s) from hub`,
|
||||
Example: `cscli collections remove crowdsec/xxx crowdsec/xyz`,
|
||||
Aliases: []string{"delete"},
|
||||
DisableAutoGenTag: true,
|
||||
ValidArgsFunction: func(cmd *cobra.Command, args []string, toComplete string) ([]string, cobra.ShellCompDirective) {
|
||||
return compInstalledItems(cwhub.COLLECTIONS, args, toComplete)
|
||||
},
|
||||
Run: func(cmd *cobra.Command, args []string) {
|
||||
if all {
|
||||
cwhub.RemoveMany(csConfig, cwhub.COLLECTIONS, "", all, purge, forceAction)
|
||||
return
|
||||
}
|
||||
|
||||
if len(args) == 0 {
|
||||
log.Fatalf("Specify at least one collection to remove or '--all' flag.")
|
||||
}
|
||||
|
||||
for _, name := range args {
|
||||
if !forceAction {
|
||||
item := cwhub.GetItem(cwhub.COLLECTIONS, name)
|
||||
if item == nil {
|
||||
log.Fatalf("unable to retrieve: %s\n", name)
|
||||
}
|
||||
if len(item.BelongsToCollections) > 0 {
|
||||
log.Warningf("%s belongs to other collections :\n%s\n", name, item.BelongsToCollections)
|
||||
log.Printf("Run 'sudo cscli collections remove %s --force' if you want to force remove this sub collection\n", name)
|
||||
continue
|
||||
}
|
||||
}
|
||||
cwhub.RemoveMany(csConfig, cwhub.COLLECTIONS, name, all, purge, forceAction)
|
||||
}
|
||||
},
|
||||
}
|
||||
cmdCollectionsRemove.PersistentFlags().BoolVar(&purge, "purge", false, "Delete source file too")
|
||||
cmdCollectionsRemove.PersistentFlags().BoolVar(&forceAction, "force", false, "Force remove : Remove tainted and outdated files")
|
||||
cmdCollectionsRemove.PersistentFlags().BoolVar(&all, "all", false, "Delete all the collections")
|
||||
cmdCollections.AddCommand(cmdCollectionsRemove)
|
||||
|
||||
var cmdCollectionsUpgrade = &cobra.Command{
|
||||
Use: "upgrade collection",
|
||||
Short: "Upgrade given collection(s)",
|
||||
Long: `Fetch and upgrade given collection(s) from hub`,
|
||||
Example: `cscli collections upgrade crowdsec/xxx crowdsec/xyz`,
|
||||
DisableAutoGenTag: true,
|
||||
ValidArgsFunction: func(cmd *cobra.Command, args []string, toComplete string) ([]string, cobra.ShellCompDirective) {
|
||||
return compInstalledItems(cwhub.COLLECTIONS, args, toComplete)
|
||||
},
|
||||
Run: func(cmd *cobra.Command, args []string) {
|
||||
if all {
|
||||
cwhub.UpgradeConfig(csConfig, cwhub.COLLECTIONS, "", forceAction)
|
||||
} else {
|
||||
if len(args) == 0 {
|
||||
log.Fatalf("no target collection to upgrade")
|
||||
}
|
||||
for _, name := range args {
|
||||
cwhub.UpgradeConfig(csConfig, cwhub.COLLECTIONS, name, forceAction)
|
||||
}
|
||||
}
|
||||
},
|
||||
}
|
||||
cmdCollectionsUpgrade.PersistentFlags().BoolVarP(&all, "all", "a", false, "Upgrade all the collections")
|
||||
cmdCollectionsUpgrade.PersistentFlags().BoolVar(&forceAction, "force", false, "Force upgrade : Overwrite tainted and outdated files")
|
||||
cmdCollections.AddCommand(cmdCollectionsUpgrade)
|
||||
|
||||
var cmdCollectionsInspect = &cobra.Command{
|
||||
Use: "inspect collection",
|
||||
Short: "Inspect given collection",
|
||||
Long: `Inspect given collection`,
|
||||
Example: `cscli collections inspect crowdsec/xxx crowdsec/xyz`,
|
||||
Args: cobra.MinimumNArgs(1),
|
||||
DisableAutoGenTag: true,
|
||||
ValidArgsFunction: func(cmd *cobra.Command, args []string, toComplete string) ([]string, cobra.ShellCompDirective) {
|
||||
return compInstalledItems(cwhub.COLLECTIONS, args, toComplete)
|
||||
},
|
||||
Run: func(cmd *cobra.Command, args []string) {
|
||||
for _, name := range args {
|
||||
InspectItem(name, cwhub.COLLECTIONS)
|
||||
}
|
||||
},
|
||||
}
|
||||
cmdCollectionsInspect.PersistentFlags().StringVarP(&prometheusURL, "url", "u", "", "Prometheus url")
|
||||
cmdCollections.AddCommand(cmdCollectionsInspect)
|
||||
|
||||
var cmdCollectionsList = &cobra.Command{
|
||||
Use: "list collection [-a]",
|
||||
Short: "List all collections",
|
||||
Long: `List all collections`,
|
||||
Example: `cscli collections list`,
|
||||
Args: cobra.ExactArgs(0),
|
||||
DisableAutoGenTag: true,
|
||||
Run: func(cmd *cobra.Command, args []string) {
|
||||
ListItems([]string{cwhub.COLLECTIONS}, args, false, true, all)
|
||||
},
|
||||
}
|
||||
cmdCollectionsList.PersistentFlags().BoolVarP(&all, "all", "a", false, "List disabled items as well")
|
||||
cmdCollections.AddCommand(cmdCollectionsList)
|
||||
|
||||
return cmdCollections
|
||||
}
|
|
@ -7,9 +7,8 @@ import (
|
|||
)
|
||||
|
||||
func NewCompletionCmd() *cobra.Command {
|
||||
|
||||
var completionCmd = &cobra.Command{
|
||||
Use: "completion [bash|zsh]",
|
||||
completionCmd := &cobra.Command{
|
||||
Use: "completion [bash|zsh|powershell|fish]",
|
||||
Short: "Generate completion script",
|
||||
Long: `To load completions:
|
||||
|
||||
|
@ -49,22 +48,39 @@ func NewCompletionCmd() *cobra.Command {
|
|||
$ cscli completion zsh > "${fpath[1]}/_cscli"
|
||||
|
||||
# You will need to start a new shell for this setup to take effect.
|
||||
|
||||
### fish:
|
||||
` + "```shell" + `
|
||||
$ cscli completion fish | source
|
||||
|
||||
# To load completions for each session, execute once:
|
||||
$ cscli completion fish > ~/.config/fish/completions/cscli.fish
|
||||
` + "```" + `
|
||||
### PowerShell:
|
||||
` + "```powershell" + `
|
||||
PS> cscli completion powershell | Out-String | Invoke-Expression
|
||||
|
||||
# To load completions for every new session, run:
|
||||
PS> cscli completion powershell > cscli.ps1
|
||||
# and source this file from your PowerShell profile.
|
||||
` + "```",
|
||||
DisableFlagsInUseLine: true,
|
||||
DisableAutoGenTag: true,
|
||||
ValidArgs: []string{"bash", "zsh"},
|
||||
Args: cobra.ExactValidArgs(1),
|
||||
ValidArgs: []string{"bash", "zsh", "powershell", "fish"},
|
||||
Args: cobra.MatchAll(cobra.ExactArgs(1), cobra.OnlyValidArgs),
|
||||
Run: func(cmd *cobra.Command, args []string) {
|
||||
switch args[0] {
|
||||
case "bash":
|
||||
cmd.Root().GenBashCompletion(os.Stdout)
|
||||
case "zsh":
|
||||
cmd.Root().GenZshCompletion(os.Stdout)
|
||||
/*case "fish":
|
||||
case "powershell":
|
||||
cmd.Root().GenPowerShellCompletion(os.Stdout)
|
||||
case "fish":
|
||||
cmd.Root().GenFishCompletion(os.Stdout, true)
|
||||
*/
|
||||
}
|
||||
},
|
||||
}
|
||||
|
||||
return completionCmd
|
||||
}
|
||||
|
|
|
@ -1,484 +1,32 @@
|
|||
package main
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"io/ioutil"
|
||||
"os"
|
||||
"path/filepath"
|
||||
|
||||
"github.com/antonmedv/expr"
|
||||
|
||||
"github.com/pkg/errors"
|
||||
|
||||
"github.com/crowdsecurity/crowdsec/pkg/csconfig"
|
||||
"github.com/crowdsecurity/crowdsec/pkg/cwhub"
|
||||
"github.com/crowdsecurity/crowdsec/pkg/types"
|
||||
log "github.com/sirupsen/logrus"
|
||||
"github.com/spf13/cobra"
|
||||
"gopkg.in/yaml.v2"
|
||||
)
|
||||
|
||||
type OldAPICfg struct {
|
||||
MachineID string `json:"machine_id"`
|
||||
Password string `json:"password"`
|
||||
type cliConfig struct {
|
||||
cfg configGetter
|
||||
}
|
||||
|
||||
/* Backup crowdsec configurations to directory <dirPath> :
|
||||
|
||||
- Main config (config.yaml)
|
||||
- Profiles config (profiles.yaml)
|
||||
- Simulation config (simulation.yaml)
|
||||
- Backup of API credentials (local API and online API)
|
||||
- List of scenarios, parsers, postoverflows and collections that are up-to-date
|
||||
- Tainted/local/out-of-date scenarios, parsers, postoverflows and collections
|
||||
*/
|
||||
func backupConfigToDirectory(dirPath string) error {
|
||||
var err error
|
||||
|
||||
if dirPath == "" {
|
||||
return fmt.Errorf("directory path can't be empty")
|
||||
func NewCLIConfig(cfg configGetter) *cliConfig {
|
||||
return &cliConfig{
|
||||
cfg: cfg,
|
||||
}
|
||||
log.Infof("Starting configuration backup")
|
||||
/*if parent directory doesn't exist, bail out. create final dir with Mkdir*/
|
||||
parentDir := filepath.Dir(dirPath)
|
||||
if _, err := os.Stat(parentDir); err != nil {
|
||||
return errors.Wrapf(err, "while checking parent directory %s existence", parentDir)
|
||||
}
|
||||
|
||||
if err = os.Mkdir(dirPath, 0700); err != nil {
|
||||
return fmt.Errorf("error while creating %s : %s", dirPath, err)
|
||||
}
|
||||
|
||||
if csConfig.ConfigPaths.SimulationFilePath != "" {
|
||||
backupSimulation := filepath.Join(dirPath, "simulation.yaml")
|
||||
if err = types.CopyFile(csConfig.ConfigPaths.SimulationFilePath, backupSimulation); err != nil {
|
||||
return fmt.Errorf("failed copy %s to %s : %s", csConfig.ConfigPaths.SimulationFilePath, backupSimulation, err)
|
||||
}
|
||||
log.Infof("Saved simulation to %s", backupSimulation)
|
||||
}
|
||||
|
||||
/*
|
||||
- backup AcquisitionFilePath
|
||||
- backup the other files of acquisition directory
|
||||
*/
|
||||
if csConfig.Crowdsec != nil && csConfig.Crowdsec.AcquisitionFilePath != "" {
|
||||
backupAcquisition := filepath.Join(dirPath, "acquis.yaml")
|
||||
if err = types.CopyFile(csConfig.Crowdsec.AcquisitionFilePath, backupAcquisition); err != nil {
|
||||
return fmt.Errorf("failed copy %s to %s : %s", csConfig.Crowdsec.AcquisitionFilePath, backupAcquisition, err)
|
||||
}
|
||||
}
|
||||
|
||||
acquisBackupDir := filepath.Join(dirPath, "acquis")
|
||||
if err = os.Mkdir(acquisBackupDir, 0700); err != nil {
|
||||
return fmt.Errorf("error while creating %s : %s", acquisBackupDir, err)
|
||||
}
|
||||
|
||||
if csConfig.Crowdsec != nil && len(csConfig.Crowdsec.AcquisitionFiles) > 0 {
|
||||
for _, acquisFile := range csConfig.Crowdsec.AcquisitionFiles {
|
||||
/*if it was the default one, it was already backup'ed*/
|
||||
if csConfig.Crowdsec.AcquisitionFilePath == acquisFile {
|
||||
continue
|
||||
}
|
||||
targetFname, err := filepath.Abs(filepath.Join(acquisBackupDir, filepath.Base(acquisFile)))
|
||||
if err != nil {
|
||||
return errors.Wrapf(err, "while saving %s to %s", acquisFile, acquisBackupDir)
|
||||
}
|
||||
if err = types.CopyFile(acquisFile, targetFname); err != nil {
|
||||
return fmt.Errorf("failed copy %s to %s : %s", acquisFile, targetFname, err)
|
||||
}
|
||||
log.Infof("Saved acquis %s to %s", acquisFile, targetFname)
|
||||
}
|
||||
}
|
||||
|
||||
if ConfigFilePath != "" {
|
||||
backupMain := fmt.Sprintf("%s/config.yaml", dirPath)
|
||||
if err = types.CopyFile(ConfigFilePath, backupMain); err != nil {
|
||||
return fmt.Errorf("failed copy %s to %s : %s", ConfigFilePath, backupMain, err)
|
||||
}
|
||||
log.Infof("Saved default yaml to %s", backupMain)
|
||||
}
|
||||
if csConfig.API != nil && csConfig.API.Server != nil && csConfig.API.Server.OnlineClient != nil && csConfig.API.Server.OnlineClient.CredentialsFilePath != "" {
|
||||
backupCAPICreds := fmt.Sprintf("%s/online_api_credentials.yaml", dirPath)
|
||||
if err = types.CopyFile(csConfig.API.Server.OnlineClient.CredentialsFilePath, backupCAPICreds); err != nil {
|
||||
return fmt.Errorf("failed copy %s to %s : %s", csConfig.API.Server.OnlineClient.CredentialsFilePath, backupCAPICreds, err)
|
||||
}
|
||||
log.Infof("Saved online API credentials to %s", backupCAPICreds)
|
||||
}
|
||||
if csConfig.API != nil && csConfig.API.Client != nil && csConfig.API.Client.CredentialsFilePath != "" {
|
||||
backupLAPICreds := fmt.Sprintf("%s/local_api_credentials.yaml", dirPath)
|
||||
if err = types.CopyFile(csConfig.API.Client.CredentialsFilePath, backupLAPICreds); err != nil {
|
||||
return fmt.Errorf("failed copy %s to %s : %s", csConfig.API.Client.CredentialsFilePath, backupLAPICreds, err)
|
||||
}
|
||||
log.Infof("Saved local API credentials to %s", backupLAPICreds)
|
||||
}
|
||||
if csConfig.API != nil && csConfig.API.Server != nil && csConfig.API.Server.ProfilesPath != "" {
|
||||
backupProfiles := fmt.Sprintf("%s/profiles.yaml", dirPath)
|
||||
if err = types.CopyFile(csConfig.API.Server.ProfilesPath, backupProfiles); err != nil {
|
||||
return fmt.Errorf("failed copy %s to %s : %s", csConfig.API.Server.ProfilesPath, backupProfiles, err)
|
||||
}
|
||||
log.Infof("Saved profiles to %s", backupProfiles)
|
||||
}
|
||||
|
||||
if err = BackupHub(dirPath); err != nil {
|
||||
return fmt.Errorf("failed to backup hub config : %s", err)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
/* Restore crowdsec configurations to directory <dirPath> :
|
||||
|
||||
- Main config (config.yaml)
|
||||
- Profiles config (profiles.yaml)
|
||||
- Simulation config (simulation.yaml)
|
||||
- Backup of API credentials (local API and online API)
|
||||
- List of scenarios, parsers, postoverflows and collections that are up-to-date
|
||||
- Tainted/local/out-of-date scenarios, parsers, postoverflows and collections
|
||||
*/
|
||||
func restoreConfigFromDirectory(dirPath string) error {
|
||||
var err error
|
||||
|
||||
if !restoreOldBackup {
|
||||
backupMain := fmt.Sprintf("%s/config.yaml", dirPath)
|
||||
if _, err = os.Stat(backupMain); err == nil {
|
||||
if csConfig.ConfigPaths != nil && csConfig.ConfigPaths.ConfigDir != "" {
|
||||
if err = types.CopyFile(backupMain, fmt.Sprintf("%s/config.yaml", csConfig.ConfigPaths.ConfigDir)); err != nil {
|
||||
return fmt.Errorf("failed copy %s to %s : %s", backupMain, csConfig.ConfigPaths.ConfigDir, err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Now we have config.yaml, we should regenerate config struct to have rights paths etc
|
||||
ConfigFilePath = fmt.Sprintf("%s/config.yaml", csConfig.ConfigPaths.ConfigDir)
|
||||
initConfig()
|
||||
|
||||
backupCAPICreds := fmt.Sprintf("%s/online_api_credentials.yaml", dirPath)
|
||||
if _, err = os.Stat(backupCAPICreds); err == nil {
|
||||
if err = types.CopyFile(backupCAPICreds, csConfig.API.Server.OnlineClient.CredentialsFilePath); err != nil {
|
||||
return fmt.Errorf("failed copy %s to %s : %s", backupCAPICreds, csConfig.API.Server.OnlineClient.CredentialsFilePath, err)
|
||||
}
|
||||
}
|
||||
|
||||
backupLAPICreds := fmt.Sprintf("%s/local_api_credentials.yaml", dirPath)
|
||||
if _, err = os.Stat(backupLAPICreds); err == nil {
|
||||
if err = types.CopyFile(backupLAPICreds, csConfig.API.Client.CredentialsFilePath); err != nil {
|
||||
return fmt.Errorf("failed copy %s to %s : %s", backupLAPICreds, csConfig.API.Client.CredentialsFilePath, err)
|
||||
}
|
||||
}
|
||||
|
||||
backupProfiles := fmt.Sprintf("%s/profiles.yaml", dirPath)
|
||||
if _, err = os.Stat(backupProfiles); err == nil {
|
||||
if err = types.CopyFile(backupProfiles, csConfig.API.Server.ProfilesPath); err != nil {
|
||||
return fmt.Errorf("failed copy %s to %s : %s", backupProfiles, csConfig.API.Server.ProfilesPath, err)
|
||||
}
|
||||
}
|
||||
} else {
|
||||
var oldAPICfg OldAPICfg
|
||||
backupOldAPICfg := fmt.Sprintf("%s/api_creds.json", dirPath)
|
||||
|
||||
jsonFile, err := os.Open(backupOldAPICfg)
|
||||
if err != nil {
|
||||
log.Warningf("failed to open %s : %s", backupOldAPICfg, err)
|
||||
} else {
|
||||
byteValue, _ := ioutil.ReadAll(jsonFile)
|
||||
err = json.Unmarshal(byteValue, &oldAPICfg)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to load json file %s : %s", backupOldAPICfg, err)
|
||||
}
|
||||
|
||||
apiCfg := csconfig.ApiCredentialsCfg{
|
||||
Login: oldAPICfg.MachineID,
|
||||
Password: oldAPICfg.Password,
|
||||
URL: CAPIBaseURL,
|
||||
}
|
||||
apiConfigDump, err := yaml.Marshal(apiCfg)
|
||||
if err != nil {
|
||||
return fmt.Errorf("unable to dump api credentials: %s", err)
|
||||
}
|
||||
apiConfigDumpFile := fmt.Sprintf("%s/online_api_credentials.yaml", csConfig.ConfigPaths.ConfigDir)
|
||||
if csConfig.API.Server.OnlineClient != nil && csConfig.API.Server.OnlineClient.CredentialsFilePath != "" {
|
||||
apiConfigDumpFile = csConfig.API.Server.OnlineClient.CredentialsFilePath
|
||||
}
|
||||
err = ioutil.WriteFile(apiConfigDumpFile, apiConfigDump, 0644)
|
||||
if err != nil {
|
||||
return fmt.Errorf("write api credentials in '%s' failed: %s", apiConfigDumpFile, err)
|
||||
}
|
||||
log.Infof("Saved API credentials to %s", apiConfigDumpFile)
|
||||
}
|
||||
}
|
||||
|
||||
backupSimulation := fmt.Sprintf("%s/simulation.yaml", dirPath)
|
||||
if _, err = os.Stat(backupSimulation); err == nil {
|
||||
if err = types.CopyFile(backupSimulation, csConfig.ConfigPaths.SimulationFilePath); err != nil {
|
||||
return fmt.Errorf("failed copy %s to %s : %s", backupSimulation, csConfig.ConfigPaths.SimulationFilePath, err)
|
||||
}
|
||||
}
|
||||
|
||||
/*if there is a acquisition dir, restore its content*/
|
||||
if csConfig.Crowdsec.AcquisitionDirPath != "" {
|
||||
if err = os.Mkdir(csConfig.Crowdsec.AcquisitionDirPath, 0700); err != nil {
|
||||
return fmt.Errorf("error while creating %s : %s", csConfig.Crowdsec.AcquisitionDirPath, err)
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
//if there was a single one
|
||||
backupAcquisition := fmt.Sprintf("%s/acquis.yaml", dirPath)
|
||||
if _, err = os.Stat(backupAcquisition); err == nil {
|
||||
log.Debugf("restoring backup'ed %s", backupAcquisition)
|
||||
if err = types.CopyFile(backupAcquisition, csConfig.Crowdsec.AcquisitionFilePath); err != nil {
|
||||
return fmt.Errorf("failed copy %s to %s : %s", backupAcquisition, csConfig.Crowdsec.AcquisitionFilePath, err)
|
||||
}
|
||||
}
|
||||
|
||||
//if there is files in the acquis backup dir, restore them
|
||||
acquisBackupDir := filepath.Join(dirPath, "acquis", "*.yaml")
|
||||
if acquisFiles, err := filepath.Glob(acquisBackupDir); err == nil {
|
||||
for _, acquisFile := range acquisFiles {
|
||||
targetFname, err := filepath.Abs(csConfig.Crowdsec.AcquisitionDirPath + "/" + filepath.Base(acquisFile))
|
||||
if err != nil {
|
||||
return errors.Wrapf(err, "while saving %s to %s", acquisFile, targetFname)
|
||||
}
|
||||
log.Debugf("restoring %s to %s", acquisFile, targetFname)
|
||||
if err = types.CopyFile(acquisFile, targetFname); err != nil {
|
||||
return fmt.Errorf("failed copy %s to %s : %s", acquisFile, targetFname, err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if csConfig.Crowdsec != nil && len(csConfig.Crowdsec.AcquisitionFiles) > 0 {
|
||||
for _, acquisFile := range csConfig.Crowdsec.AcquisitionFiles {
|
||||
log.Infof("backup filepath from dir -> %s", acquisFile)
|
||||
/*if it was the default one, it was already backup'ed*/
|
||||
if csConfig.Crowdsec.AcquisitionFilePath == acquisFile {
|
||||
log.Infof("skip this one")
|
||||
continue
|
||||
}
|
||||
targetFname, err := filepath.Abs(filepath.Join(acquisBackupDir, filepath.Base(acquisFile)))
|
||||
if err != nil {
|
||||
return errors.Wrapf(err, "while saving %s to %s", acquisFile, acquisBackupDir)
|
||||
}
|
||||
if err = types.CopyFile(acquisFile, targetFname); err != nil {
|
||||
return fmt.Errorf("failed copy %s to %s : %s", acquisFile, targetFname, err)
|
||||
}
|
||||
log.Infof("Saved acquis %s to %s", acquisFile, targetFname)
|
||||
}
|
||||
}
|
||||
|
||||
if err = RestoreHub(dirPath); err != nil {
|
||||
return fmt.Errorf("failed to restore hub config : %s", err)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func NewConfigCmd() *cobra.Command {
|
||||
|
||||
var cmdConfig = &cobra.Command{
|
||||
func (cli *cliConfig) NewCommand() *cobra.Command {
|
||||
cmd := &cobra.Command{
|
||||
Use: "config [command]",
|
||||
Short: "Allows to view current config",
|
||||
Args: cobra.ExactArgs(0),
|
||||
DisableAutoGenTag: true,
|
||||
}
|
||||
var key string
|
||||
type Env struct {
|
||||
Config *csconfig.Config
|
||||
}
|
||||
var cmdConfigShow = &cobra.Command{
|
||||
Use: "show",
|
||||
Short: "Displays current config",
|
||||
Long: `Displays the current cli configuration.`,
|
||||
Args: cobra.ExactArgs(0),
|
||||
DisableAutoGenTag: true,
|
||||
Run: func(cmd *cobra.Command, args []string) {
|
||||
|
||||
if key != "" {
|
||||
program, err := expr.Compile(key, expr.Env(Env{}))
|
||||
if err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
output, err := expr.Run(program, Env{Config: csConfig})
|
||||
if err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
switch csConfig.Cscli.Output {
|
||||
case "human", "raw":
|
||||
switch output.(type) {
|
||||
case string:
|
||||
fmt.Printf("%s\n", output)
|
||||
case int:
|
||||
fmt.Printf("%d\n", output)
|
||||
default:
|
||||
fmt.Printf("%v\n", output)
|
||||
}
|
||||
case "json":
|
||||
data, err := json.MarshalIndent(output, "", " ")
|
||||
if err != nil {
|
||||
log.Fatalf("failed to marshal configuration: %s", err)
|
||||
}
|
||||
fmt.Printf("%s\n", string(data))
|
||||
}
|
||||
return
|
||||
}
|
||||
cmd.AddCommand(cli.newShowCmd())
|
||||
cmd.AddCommand(cli.newShowYAMLCmd())
|
||||
cmd.AddCommand(cli.newBackupCmd())
|
||||
cmd.AddCommand(cli.newRestoreCmd())
|
||||
cmd.AddCommand(cli.newFeatureFlagsCmd())
|
||||
|
||||
switch csConfig.Cscli.Output {
|
||||
case "human":
|
||||
fmt.Printf("Global:\n")
|
||||
if csConfig.ConfigPaths != nil {
|
||||
fmt.Printf(" - Configuration Folder : %s\n", csConfig.ConfigPaths.ConfigDir)
|
||||
fmt.Printf(" - Data Folder : %s\n", csConfig.ConfigPaths.DataDir)
|
||||
fmt.Printf(" - Hub Folder : %s\n", csConfig.ConfigPaths.HubDir)
|
||||
fmt.Printf(" - Simulation File : %s\n", csConfig.ConfigPaths.SimulationFilePath)
|
||||
}
|
||||
if csConfig.Common != nil {
|
||||
fmt.Printf(" - Log Folder : %s\n", csConfig.Common.LogDir)
|
||||
fmt.Printf(" - Log level : %s\n", csConfig.Common.LogLevel)
|
||||
fmt.Printf(" - Log Media : %s\n", csConfig.Common.LogMedia)
|
||||
}
|
||||
if csConfig.Crowdsec != nil {
|
||||
fmt.Printf("Crowdsec:\n")
|
||||
fmt.Printf(" - Acquisition File : %s\n", csConfig.Crowdsec.AcquisitionFilePath)
|
||||
fmt.Printf(" - Parsers routines : %d\n", csConfig.Crowdsec.ParserRoutinesCount)
|
||||
if csConfig.Crowdsec.AcquisitionDirPath != "" {
|
||||
fmt.Printf(" - Acquisition Folder : %s\n", csConfig.Crowdsec.AcquisitionDirPath)
|
||||
}
|
||||
}
|
||||
if csConfig.Cscli != nil {
|
||||
fmt.Printf("cscli:\n")
|
||||
fmt.Printf(" - Output : %s\n", csConfig.Cscli.Output)
|
||||
fmt.Printf(" - Hub Branch : %s\n", csConfig.Cscli.HubBranch)
|
||||
fmt.Printf(" - Hub Folder : %s\n", csConfig.Cscli.HubDir)
|
||||
}
|
||||
if csConfig.API != nil {
|
||||
if csConfig.API.Client != nil && csConfig.API.Client.Credentials != nil {
|
||||
fmt.Printf("API Client:\n")
|
||||
fmt.Printf(" - URL : %s\n", csConfig.API.Client.Credentials.URL)
|
||||
fmt.Printf(" - Login : %s\n", csConfig.API.Client.Credentials.Login)
|
||||
fmt.Printf(" - Credentials File : %s\n", csConfig.API.Client.CredentialsFilePath)
|
||||
}
|
||||
if csConfig.API.Server != nil {
|
||||
fmt.Printf("Local API Server:\n")
|
||||
fmt.Printf(" - Listen URL : %s\n", csConfig.API.Server.ListenURI)
|
||||
fmt.Printf(" - Profile File : %s\n", csConfig.API.Server.ProfilesPath)
|
||||
if csConfig.API.Server.TLS != nil {
|
||||
if csConfig.API.Server.TLS.CertFilePath != "" {
|
||||
fmt.Printf(" - Cert File : %s\n", csConfig.API.Server.TLS.CertFilePath)
|
||||
}
|
||||
if csConfig.API.Server.TLS.KeyFilePath != "" {
|
||||
fmt.Printf(" - Key File : %s\n", csConfig.API.Server.TLS.KeyFilePath)
|
||||
}
|
||||
}
|
||||
fmt.Printf(" - Trusted IPs: \n")
|
||||
for _, ip := range csConfig.API.Server.TrustedIPs {
|
||||
fmt.Printf(" - %s\n", ip)
|
||||
}
|
||||
if csConfig.API.Server.OnlineClient != nil && csConfig.API.Server.OnlineClient.Credentials != nil {
|
||||
fmt.Printf("Central API:\n")
|
||||
fmt.Printf(" - URL : %s\n", csConfig.API.Server.OnlineClient.Credentials.URL)
|
||||
fmt.Printf(" - Login : %s\n", csConfig.API.Server.OnlineClient.Credentials.Login)
|
||||
fmt.Printf(" - Credentials File : %s\n", csConfig.API.Server.OnlineClient.CredentialsFilePath)
|
||||
}
|
||||
}
|
||||
}
|
||||
if csConfig.DbConfig != nil {
|
||||
fmt.Printf(" - Database:\n")
|
||||
fmt.Printf(" - Type : %s\n", csConfig.DbConfig.Type)
|
||||
switch csConfig.DbConfig.Type {
|
||||
case "sqlite":
|
||||
fmt.Printf(" - Path : %s\n", csConfig.DbConfig.DbPath)
|
||||
case "mysql", "postgresql", "postgres":
|
||||
fmt.Printf(" - Host : %s\n", csConfig.DbConfig.Host)
|
||||
fmt.Printf(" - Port : %d\n", csConfig.DbConfig.Port)
|
||||
fmt.Printf(" - User : %s\n", csConfig.DbConfig.User)
|
||||
fmt.Printf(" - DB Name : %s\n", csConfig.DbConfig.DbName)
|
||||
}
|
||||
if csConfig.DbConfig.Flush != nil {
|
||||
if *csConfig.DbConfig.Flush.MaxAge != "" {
|
||||
fmt.Printf(" - Flush age : %s\n", *csConfig.DbConfig.Flush.MaxAge)
|
||||
}
|
||||
if *csConfig.DbConfig.Flush.MaxItems != 0 {
|
||||
fmt.Printf(" - Flush size : %d\n", *csConfig.DbConfig.Flush.MaxItems)
|
||||
}
|
||||
}
|
||||
}
|
||||
case "json":
|
||||
data, err := json.MarshalIndent(csConfig, "", " ")
|
||||
if err != nil {
|
||||
log.Fatalf("failed to marshal configuration: %s", err)
|
||||
}
|
||||
fmt.Printf("%s\n", string(data))
|
||||
case "raw":
|
||||
data, err := yaml.Marshal(csConfig)
|
||||
if err != nil {
|
||||
log.Fatalf("failed to marshal configuration: %s", err)
|
||||
}
|
||||
fmt.Printf("%s\n", string(data))
|
||||
}
|
||||
},
|
||||
}
|
||||
cmdConfigShow.Flags().StringVar(&key, "key", "", "Display only this value (Config.API.Server.ListenURI)")
|
||||
cmdConfig.AddCommand(cmdConfigShow)
|
||||
|
||||
var cmdConfigBackup = &cobra.Command{
|
||||
Use: `backup "directory"`,
|
||||
Short: "Backup current config",
|
||||
Long: `Backup the current crowdsec configuration including :
|
||||
|
||||
- Main config (config.yaml)
|
||||
- Simulation config (simulation.yaml)
|
||||
- Profiles config (profiles.yaml)
|
||||
- List of scenarios, parsers, postoverflows and collections that are up-to-date
|
||||
- Tainted/local/out-of-date scenarios, parsers, postoverflows and collections
|
||||
- Backup of API credentials (local API and online API)`,
|
||||
Example: `cscli config backup ./my-backup`,
|
||||
Args: cobra.ExactArgs(1),
|
||||
DisableAutoGenTag: true,
|
||||
Run: func(cmd *cobra.Command, args []string) {
|
||||
var err error
|
||||
if err := csConfig.LoadHub(); err != nil {
|
||||
log.Fatalf(err.Error())
|
||||
}
|
||||
if err = cwhub.GetHubIdx(csConfig.Hub); err != nil {
|
||||
log.Fatalf("Failed to get Hub index : %v", err)
|
||||
log.Infoln("Run 'sudo cscli hub update' to get the hub index")
|
||||
}
|
||||
if err = backupConfigToDirectory(args[0]); err != nil {
|
||||
log.Fatalf("Failed to backup configurations: %s", err)
|
||||
}
|
||||
},
|
||||
}
|
||||
cmdConfig.AddCommand(cmdConfigBackup)
|
||||
|
||||
var cmdConfigRestore = &cobra.Command{
|
||||
Use: `restore "directory"`,
|
||||
Short: `Restore config in backup "directory"`,
|
||||
Long: `Restore the crowdsec configuration from specified backup "directory" including:
|
||||
|
||||
- Main config (config.yaml)
|
||||
- Simulation config (simulation.yaml)
|
||||
- Profiles config (profiles.yaml)
|
||||
- List of scenarios, parsers, postoverflows and collections that are up-to-date
|
||||
- Tainted/local/out-of-date scenarios, parsers, postoverflows and collections
|
||||
- Backup of API credentials (local API and online API)`,
|
||||
Args: cobra.ExactArgs(1),
|
||||
DisableAutoGenTag: true,
|
||||
Run: func(cmd *cobra.Command, args []string) {
|
||||
var err error
|
||||
if err := csConfig.LoadHub(); err != nil {
|
||||
log.Fatalf(err.Error())
|
||||
}
|
||||
if err = cwhub.GetHubIdx(csConfig.Hub); err != nil {
|
||||
log.Fatalf("Failed to get Hub index : %v", err)
|
||||
log.Infoln("Run 'sudo cscli hub update' to get the hub index")
|
||||
}
|
||||
if err := restoreConfigFromDirectory(args[0]); err != nil {
|
||||
log.Fatalf("failed restoring configurations from %s : %s", args[0], err)
|
||||
}
|
||||
},
|
||||
}
|
||||
cmdConfigRestore.PersistentFlags().BoolVar(&restoreOldBackup, "old-backup", false, "To use when you are upgrading crowdsec v0.X to v1.X and you need to restore backup from v0.X")
|
||||
cmdConfig.AddCommand(cmdConfigRestore)
|
||||
|
||||
return cmdConfig
|
||||
return cmd
|
||||
}
|
||||
|
|
240
cmd/crowdsec-cli/config_backup.go
Normal file
240
cmd/crowdsec-cli/config_backup.go
Normal file
|
@ -0,0 +1,240 @@
|
|||
package main
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"fmt"
|
||||
"os"
|
||||
"path/filepath"
|
||||
|
||||
log "github.com/sirupsen/logrus"
|
||||
"github.com/spf13/cobra"
|
||||
|
||||
"github.com/crowdsecurity/crowdsec/cmd/crowdsec-cli/require"
|
||||
"github.com/crowdsecurity/crowdsec/pkg/cwhub"
|
||||
)
|
||||
|
||||
func (cli *cliConfig) backupHub(dirPath string) error {
|
||||
hub, err := require.Hub(cli.cfg(), nil, nil)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
for _, itemType := range cwhub.ItemTypes {
|
||||
clog := log.WithFields(log.Fields{
|
||||
"type": itemType,
|
||||
})
|
||||
|
||||
itemMap := hub.GetItemMap(itemType)
|
||||
if itemMap == nil {
|
||||
clog.Infof("No %s to backup.", itemType)
|
||||
continue
|
||||
}
|
||||
|
||||
itemDirectory := fmt.Sprintf("%s/%s/", dirPath, itemType)
|
||||
if err = os.MkdirAll(itemDirectory, os.ModePerm); err != nil {
|
||||
return fmt.Errorf("error while creating %s: %w", itemDirectory, err)
|
||||
}
|
||||
|
||||
upstreamParsers := []string{}
|
||||
|
||||
for k, v := range itemMap {
|
||||
clog = clog.WithFields(log.Fields{
|
||||
"file": v.Name,
|
||||
})
|
||||
if !v.State.Installed { // only backup installed ones
|
||||
clog.Debugf("[%s]: not installed", k)
|
||||
continue
|
||||
}
|
||||
|
||||
// for the local/tainted ones, we back up the full file
|
||||
if v.State.Tainted || v.State.IsLocal() || !v.State.UpToDate {
|
||||
// we need to backup stages for parsers
|
||||
if itemType == cwhub.PARSERS || itemType == cwhub.POSTOVERFLOWS {
|
||||
fstagedir := fmt.Sprintf("%s%s", itemDirectory, v.Stage)
|
||||
if err = os.MkdirAll(fstagedir, os.ModePerm); err != nil {
|
||||
return fmt.Errorf("error while creating stage dir %s: %w", fstagedir, err)
|
||||
}
|
||||
}
|
||||
|
||||
clog.Debugf("[%s]: backing up file (tainted:%t local:%t up-to-date:%t)", k, v.State.Tainted, v.State.IsLocal(), v.State.UpToDate)
|
||||
|
||||
tfile := fmt.Sprintf("%s%s/%s", itemDirectory, v.Stage, v.FileName)
|
||||
if err = CopyFile(v.State.LocalPath, tfile); err != nil {
|
||||
return fmt.Errorf("failed copy %s %s to %s: %w", itemType, v.State.LocalPath, tfile, err)
|
||||
}
|
||||
|
||||
clog.Infof("local/tainted saved %s to %s", v.State.LocalPath, tfile)
|
||||
|
||||
continue
|
||||
}
|
||||
|
||||
clog.Debugf("[%s]: from hub, just backup name (up-to-date:%t)", k, v.State.UpToDate)
|
||||
clog.Infof("saving, version:%s, up-to-date:%t", v.Version, v.State.UpToDate)
|
||||
upstreamParsers = append(upstreamParsers, v.Name)
|
||||
}
|
||||
// write the upstream items
|
||||
upstreamParsersFname := fmt.Sprintf("%s/upstream-%s.json", itemDirectory, itemType)
|
||||
|
||||
upstreamParsersContent, err := json.MarshalIndent(upstreamParsers, "", " ")
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed marshaling upstream parsers: %w", err)
|
||||
}
|
||||
|
||||
err = os.WriteFile(upstreamParsersFname, upstreamParsersContent, 0o644)
|
||||
if err != nil {
|
||||
return fmt.Errorf("unable to write to %s %s: %w", itemType, upstreamParsersFname, err)
|
||||
}
|
||||
|
||||
clog.Infof("Wrote %d entries for %s to %s", len(upstreamParsers), itemType, upstreamParsersFname)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
/*
|
||||
Backup crowdsec configurations to directory <dirPath>:
|
||||
|
||||
- Main config (config.yaml)
|
||||
- Profiles config (profiles.yaml)
|
||||
- Simulation config (simulation.yaml)
|
||||
- Backup of API credentials (local API and online API)
|
||||
- List of scenarios, parsers, postoverflows and collections that are up-to-date
|
||||
- Tainted/local/out-of-date scenarios, parsers, postoverflows and collections
|
||||
- Acquisition files (acquis.yaml, acquis.d/*.yaml)
|
||||
*/
|
||||
func (cli *cliConfig) backup(dirPath string) error {
|
||||
var err error
|
||||
|
||||
cfg := cli.cfg()
|
||||
|
||||
if dirPath == "" {
|
||||
return errors.New("directory path can't be empty")
|
||||
}
|
||||
|
||||
log.Infof("Starting configuration backup")
|
||||
|
||||
/*if parent directory doesn't exist, bail out. create final dir with Mkdir*/
|
||||
parentDir := filepath.Dir(dirPath)
|
||||
if _, err = os.Stat(parentDir); err != nil {
|
||||
return fmt.Errorf("while checking parent directory %s existence: %w", parentDir, err)
|
||||
}
|
||||
|
||||
if err = os.Mkdir(dirPath, 0o700); err != nil {
|
||||
return fmt.Errorf("while creating %s: %w", dirPath, err)
|
||||
}
|
||||
|
||||
if cfg.ConfigPaths.SimulationFilePath != "" {
|
||||
backupSimulation := filepath.Join(dirPath, "simulation.yaml")
|
||||
if err = CopyFile(cfg.ConfigPaths.SimulationFilePath, backupSimulation); err != nil {
|
||||
return fmt.Errorf("failed copy %s to %s: %w", cfg.ConfigPaths.SimulationFilePath, backupSimulation, err)
|
||||
}
|
||||
|
||||
log.Infof("Saved simulation to %s", backupSimulation)
|
||||
}
|
||||
|
||||
/*
|
||||
- backup AcquisitionFilePath
|
||||
- backup the other files of acquisition directory
|
||||
*/
|
||||
if cfg.Crowdsec != nil && cfg.Crowdsec.AcquisitionFilePath != "" {
|
||||
backupAcquisition := filepath.Join(dirPath, "acquis.yaml")
|
||||
if err = CopyFile(cfg.Crowdsec.AcquisitionFilePath, backupAcquisition); err != nil {
|
||||
return fmt.Errorf("failed copy %s to %s: %w", cfg.Crowdsec.AcquisitionFilePath, backupAcquisition, err)
|
||||
}
|
||||
}
|
||||
|
||||
acquisBackupDir := filepath.Join(dirPath, "acquis")
|
||||
if err = os.Mkdir(acquisBackupDir, 0o700); err != nil {
|
||||
return fmt.Errorf("error while creating %s: %w", acquisBackupDir, err)
|
||||
}
|
||||
|
||||
if cfg.Crowdsec != nil && len(cfg.Crowdsec.AcquisitionFiles) > 0 {
|
||||
for _, acquisFile := range cfg.Crowdsec.AcquisitionFiles {
|
||||
/*if it was the default one, it was already backup'ed*/
|
||||
if cfg.Crowdsec.AcquisitionFilePath == acquisFile {
|
||||
continue
|
||||
}
|
||||
|
||||
targetFname, err := filepath.Abs(filepath.Join(acquisBackupDir, filepath.Base(acquisFile)))
|
||||
if err != nil {
|
||||
return fmt.Errorf("while saving %s to %s: %w", acquisFile, acquisBackupDir, err)
|
||||
}
|
||||
|
||||
if err = CopyFile(acquisFile, targetFname); err != nil {
|
||||
return fmt.Errorf("failed copy %s to %s: %w", acquisFile, targetFname, err)
|
||||
}
|
||||
|
||||
log.Infof("Saved acquis %s to %s", acquisFile, targetFname)
|
||||
}
|
||||
}
|
||||
|
||||
if ConfigFilePath != "" {
|
||||
backupMain := fmt.Sprintf("%s/config.yaml", dirPath)
|
||||
if err = CopyFile(ConfigFilePath, backupMain); err != nil {
|
||||
return fmt.Errorf("failed copy %s to %s: %w", ConfigFilePath, backupMain, err)
|
||||
}
|
||||
|
||||
log.Infof("Saved default yaml to %s", backupMain)
|
||||
}
|
||||
|
||||
if cfg.API != nil && cfg.API.Server != nil && cfg.API.Server.OnlineClient != nil && cfg.API.Server.OnlineClient.CredentialsFilePath != "" {
|
||||
backupCAPICreds := fmt.Sprintf("%s/online_api_credentials.yaml", dirPath)
|
||||
if err = CopyFile(cfg.API.Server.OnlineClient.CredentialsFilePath, backupCAPICreds); err != nil {
|
||||
return fmt.Errorf("failed copy %s to %s: %w", cfg.API.Server.OnlineClient.CredentialsFilePath, backupCAPICreds, err)
|
||||
}
|
||||
|
||||
log.Infof("Saved online API credentials to %s", backupCAPICreds)
|
||||
}
|
||||
|
||||
if cfg.API != nil && cfg.API.Client != nil && cfg.API.Client.CredentialsFilePath != "" {
|
||||
backupLAPICreds := fmt.Sprintf("%s/local_api_credentials.yaml", dirPath)
|
||||
if err = CopyFile(cfg.API.Client.CredentialsFilePath, backupLAPICreds); err != nil {
|
||||
return fmt.Errorf("failed copy %s to %s: %w", cfg.API.Client.CredentialsFilePath, backupLAPICreds, err)
|
||||
}
|
||||
|
||||
log.Infof("Saved local API credentials to %s", backupLAPICreds)
|
||||
}
|
||||
|
||||
if cfg.API != nil && cfg.API.Server != nil && cfg.API.Server.ProfilesPath != "" {
|
||||
backupProfiles := fmt.Sprintf("%s/profiles.yaml", dirPath)
|
||||
if err = CopyFile(cfg.API.Server.ProfilesPath, backupProfiles); err != nil {
|
||||
return fmt.Errorf("failed copy %s to %s: %w", cfg.API.Server.ProfilesPath, backupProfiles, err)
|
||||
}
|
||||
|
||||
log.Infof("Saved profiles to %s", backupProfiles)
|
||||
}
|
||||
|
||||
if err = cli.backupHub(dirPath); err != nil {
|
||||
return fmt.Errorf("failed to backup hub config: %w", err)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (cli *cliConfig) newBackupCmd() *cobra.Command {
|
||||
cmd := &cobra.Command{
|
||||
Use: `backup "directory"`,
|
||||
Short: "Backup current config",
|
||||
Long: `Backup the current crowdsec configuration including :
|
||||
|
||||
- Main config (config.yaml)
|
||||
- Simulation config (simulation.yaml)
|
||||
- Profiles config (profiles.yaml)
|
||||
- List of scenarios, parsers, postoverflows and collections that are up-to-date
|
||||
- Tainted/local/out-of-date scenarios, parsers, postoverflows and collections
|
||||
- Backup of API credentials (local API and online API)`,
|
||||
Example: `cscli config backup ./my-backup`,
|
||||
Args: cobra.ExactArgs(1),
|
||||
DisableAutoGenTag: true,
|
||||
RunE: func(_ *cobra.Command, args []string) error {
|
||||
if err := cli.backup(args[0]); err != nil {
|
||||
return fmt.Errorf("failed to backup config: %w", err)
|
||||
}
|
||||
|
||||
return nil
|
||||
},
|
||||
}
|
||||
|
||||
return cmd
|
||||
}
|
135
cmd/crowdsec-cli/config_feature_flags.go
Normal file
135
cmd/crowdsec-cli/config_feature_flags.go
Normal file
|
@ -0,0 +1,135 @@
|
|||
package main
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"path/filepath"
|
||||
|
||||
"github.com/fatih/color"
|
||||
"github.com/spf13/cobra"
|
||||
|
||||
"github.com/crowdsecurity/crowdsec/pkg/csconfig"
|
||||
"github.com/crowdsecurity/crowdsec/pkg/fflag"
|
||||
)
|
||||
|
||||
func (cli *cliConfig) featureFlags(showRetired bool) error {
|
||||
green := color.New(color.FgGreen).SprintFunc()
|
||||
red := color.New(color.FgRed).SprintFunc()
|
||||
yellow := color.New(color.FgYellow).SprintFunc()
|
||||
magenta := color.New(color.FgMagenta).SprintFunc()
|
||||
|
||||
printFeature := func(feat fflag.Feature) {
|
||||
nameDesc := feat.Name
|
||||
if feat.Description != "" {
|
||||
nameDesc += ": " + feat.Description
|
||||
}
|
||||
|
||||
status := red("✗")
|
||||
if feat.IsEnabled() {
|
||||
status = green("✓")
|
||||
}
|
||||
|
||||
fmt.Printf("%s %s", status, nameDesc)
|
||||
|
||||
if feat.State == fflag.DeprecatedState {
|
||||
fmt.Printf("\n %s %s", yellow("DEPRECATED"), feat.DeprecationMsg)
|
||||
}
|
||||
|
||||
if feat.State == fflag.RetiredState {
|
||||
fmt.Printf("\n %s %s", magenta("RETIRED"), feat.DeprecationMsg)
|
||||
}
|
||||
|
||||
fmt.Println()
|
||||
}
|
||||
|
||||
feats := fflag.Crowdsec.GetAllFeatures()
|
||||
|
||||
enabled := []fflag.Feature{}
|
||||
disabled := []fflag.Feature{}
|
||||
retired := []fflag.Feature{}
|
||||
|
||||
for _, feat := range feats {
|
||||
if feat.State == fflag.RetiredState {
|
||||
retired = append(retired, feat)
|
||||
continue
|
||||
}
|
||||
|
||||
if feat.IsEnabled() {
|
||||
enabled = append(enabled, feat)
|
||||
continue
|
||||
}
|
||||
|
||||
disabled = append(disabled, feat)
|
||||
}
|
||||
|
||||
if len(enabled) > 0 {
|
||||
fmt.Println(" --- Enabled features ---")
|
||||
fmt.Println()
|
||||
|
||||
for _, feat := range enabled {
|
||||
printFeature(feat)
|
||||
}
|
||||
|
||||
fmt.Println()
|
||||
}
|
||||
|
||||
if len(disabled) > 0 {
|
||||
fmt.Println(" --- Disabled features ---")
|
||||
fmt.Println()
|
||||
|
||||
for _, feat := range disabled {
|
||||
printFeature(feat)
|
||||
}
|
||||
|
||||
fmt.Println()
|
||||
}
|
||||
|
||||
fmt.Println("To enable a feature you can: ")
|
||||
fmt.Println(" - set the environment variable CROWDSEC_FEATURE_<uppercase_feature_name> to true")
|
||||
|
||||
featurePath, err := filepath.Abs(csconfig.GetFeatureFilePath(ConfigFilePath))
|
||||
if err != nil {
|
||||
// we already read the file, shouldn't happen
|
||||
return err
|
||||
}
|
||||
|
||||
fmt.Printf(" - add the line '- <feature_name>' to the file %s\n", featurePath)
|
||||
fmt.Println()
|
||||
|
||||
if len(enabled) == 0 && len(disabled) == 0 {
|
||||
fmt.Println("However, no feature flag is available in this release.")
|
||||
fmt.Println()
|
||||
}
|
||||
|
||||
if showRetired && len(retired) > 0 {
|
||||
fmt.Println(" --- Retired features ---")
|
||||
fmt.Println()
|
||||
|
||||
for _, feat := range retired {
|
||||
printFeature(feat)
|
||||
}
|
||||
|
||||
fmt.Println()
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (cli *cliConfig) newFeatureFlagsCmd() *cobra.Command {
|
||||
var showRetired bool
|
||||
|
||||
cmd := &cobra.Command{
|
||||
Use: "feature-flags",
|
||||
Short: "Displays feature flag status",
|
||||
Long: `Displays the supported feature flags and their current status.`,
|
||||
Args: cobra.ExactArgs(0),
|
||||
DisableAutoGenTag: true,
|
||||
RunE: func(_ *cobra.Command, _ []string) error {
|
||||
return cli.featureFlags(showRetired)
|
||||
},
|
||||
}
|
||||
|
||||
flags := cmd.Flags()
|
||||
flags.BoolVar(&showRetired, "retired", false, "Show retired features")
|
||||
|
||||
return cmd
|
||||
}
|
273
cmd/crowdsec-cli/config_restore.go
Normal file
273
cmd/crowdsec-cli/config_restore.go
Normal file
|
@ -0,0 +1,273 @@
|
|||
package main
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"os"
|
||||
"path/filepath"
|
||||
|
||||
log "github.com/sirupsen/logrus"
|
||||
"github.com/spf13/cobra"
|
||||
|
||||
"github.com/crowdsecurity/crowdsec/cmd/crowdsec-cli/require"
|
||||
"github.com/crowdsecurity/crowdsec/pkg/cwhub"
|
||||
)
|
||||
|
||||
func (cli *cliConfig) restoreHub(dirPath string) error {
|
||||
cfg := cli.cfg()
|
||||
|
||||
hub, err := require.Hub(cfg, require.RemoteHub(cfg), nil)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
for _, itype := range cwhub.ItemTypes {
|
||||
itemDirectory := fmt.Sprintf("%s/%s/", dirPath, itype)
|
||||
if _, err = os.Stat(itemDirectory); err != nil {
|
||||
log.Infof("no %s in backup", itype)
|
||||
continue
|
||||
}
|
||||
/*restore the upstream items*/
|
||||
upstreamListFN := fmt.Sprintf("%s/upstream-%s.json", itemDirectory, itype)
|
||||
|
||||
file, err := os.ReadFile(upstreamListFN)
|
||||
if err != nil {
|
||||
return fmt.Errorf("error while opening %s: %w", upstreamListFN, err)
|
||||
}
|
||||
|
||||
var upstreamList []string
|
||||
|
||||
err = json.Unmarshal(file, &upstreamList)
|
||||
if err != nil {
|
||||
return fmt.Errorf("error unmarshaling %s: %w", upstreamListFN, err)
|
||||
}
|
||||
|
||||
for _, toinstall := range upstreamList {
|
||||
item := hub.GetItem(itype, toinstall)
|
||||
if item == nil {
|
||||
log.Errorf("Item %s/%s not found in hub", itype, toinstall)
|
||||
continue
|
||||
}
|
||||
|
||||
if err = item.Install(false, false); err != nil {
|
||||
log.Errorf("Error while installing %s : %s", toinstall, err)
|
||||
}
|
||||
}
|
||||
|
||||
/*restore the local and tainted items*/
|
||||
files, err := os.ReadDir(itemDirectory)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed enumerating files of %s: %w", itemDirectory, err)
|
||||
}
|
||||
|
||||
for _, file := range files {
|
||||
// this was the upstream data
|
||||
if file.Name() == fmt.Sprintf("upstream-%s.json", itype) {
|
||||
continue
|
||||
}
|
||||
|
||||
if itype == cwhub.PARSERS || itype == cwhub.POSTOVERFLOWS {
|
||||
// we expect a stage here
|
||||
if !file.IsDir() {
|
||||
continue
|
||||
}
|
||||
|
||||
stage := file.Name()
|
||||
stagedir := fmt.Sprintf("%s/%s/%s/", cfg.ConfigPaths.ConfigDir, itype, stage)
|
||||
log.Debugf("Found stage %s in %s, target directory : %s", stage, itype, stagedir)
|
||||
|
||||
if err = os.MkdirAll(stagedir, os.ModePerm); err != nil {
|
||||
return fmt.Errorf("error while creating stage directory %s: %w", stagedir, err)
|
||||
}
|
||||
|
||||
// find items
|
||||
ifiles, err := os.ReadDir(itemDirectory + "/" + stage + "/")
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed enumerating files of %s: %w", itemDirectory+"/"+stage, err)
|
||||
}
|
||||
|
||||
// finally copy item
|
||||
for _, tfile := range ifiles {
|
||||
log.Infof("Going to restore local/tainted [%s]", tfile.Name())
|
||||
sourceFile := fmt.Sprintf("%s/%s/%s", itemDirectory, stage, tfile.Name())
|
||||
|
||||
destinationFile := fmt.Sprintf("%s%s", stagedir, tfile.Name())
|
||||
if err = CopyFile(sourceFile, destinationFile); err != nil {
|
||||
return fmt.Errorf("failed copy %s %s to %s: %w", itype, sourceFile, destinationFile, err)
|
||||
}
|
||||
|
||||
log.Infof("restored %s to %s", sourceFile, destinationFile)
|
||||
}
|
||||
} else {
|
||||
log.Infof("Going to restore local/tainted [%s]", file.Name())
|
||||
sourceFile := fmt.Sprintf("%s/%s", itemDirectory, file.Name())
|
||||
destinationFile := fmt.Sprintf("%s/%s/%s", cfg.ConfigPaths.ConfigDir, itype, file.Name())
|
||||
|
||||
if err = CopyFile(sourceFile, destinationFile); err != nil {
|
||||
return fmt.Errorf("failed copy %s %s to %s: %w", itype, sourceFile, destinationFile, err)
|
||||
}
|
||||
|
||||
log.Infof("restored %s to %s", sourceFile, destinationFile)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
/*
|
||||
Restore crowdsec configurations to directory <dirPath>:
|
||||
|
||||
- Main config (config.yaml)
|
||||
- Profiles config (profiles.yaml)
|
||||
- Simulation config (simulation.yaml)
|
||||
- Backup of API credentials (local API and online API)
|
||||
- List of scenarios, parsers, postoverflows and collections that are up-to-date
|
||||
- Tainted/local/out-of-date scenarios, parsers, postoverflows and collections
|
||||
- Acquisition files (acquis.yaml, acquis.d/*.yaml)
|
||||
*/
|
||||
func (cli *cliConfig) restore(dirPath string) error {
|
||||
var err error
|
||||
|
||||
cfg := cli.cfg()
|
||||
|
||||
backupMain := fmt.Sprintf("%s/config.yaml", dirPath)
|
||||
if _, err = os.Stat(backupMain); err == nil {
|
||||
if cfg.ConfigPaths != nil && cfg.ConfigPaths.ConfigDir != "" {
|
||||
if err = CopyFile(backupMain, fmt.Sprintf("%s/config.yaml", cfg.ConfigPaths.ConfigDir)); err != nil {
|
||||
return fmt.Errorf("failed copy %s to %s: %w", backupMain, cfg.ConfigPaths.ConfigDir, err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Now we have config.yaml, we should regenerate config struct to have rights paths etc
|
||||
ConfigFilePath = fmt.Sprintf("%s/config.yaml", cfg.ConfigPaths.ConfigDir)
|
||||
|
||||
log.Debug("Reloading configuration")
|
||||
|
||||
csConfig, _, err = loadConfigFor("config")
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to reload configuration: %w", err)
|
||||
}
|
||||
|
||||
cfg = cli.cfg()
|
||||
|
||||
backupCAPICreds := fmt.Sprintf("%s/online_api_credentials.yaml", dirPath)
|
||||
if _, err = os.Stat(backupCAPICreds); err == nil {
|
||||
if err = CopyFile(backupCAPICreds, cfg.API.Server.OnlineClient.CredentialsFilePath); err != nil {
|
||||
return fmt.Errorf("failed copy %s to %s: %w", backupCAPICreds, cfg.API.Server.OnlineClient.CredentialsFilePath, err)
|
||||
}
|
||||
}
|
||||
|
||||
backupLAPICreds := fmt.Sprintf("%s/local_api_credentials.yaml", dirPath)
|
||||
if _, err = os.Stat(backupLAPICreds); err == nil {
|
||||
if err = CopyFile(backupLAPICreds, cfg.API.Client.CredentialsFilePath); err != nil {
|
||||
return fmt.Errorf("failed copy %s to %s: %w", backupLAPICreds, cfg.API.Client.CredentialsFilePath, err)
|
||||
}
|
||||
}
|
||||
|
||||
backupProfiles := fmt.Sprintf("%s/profiles.yaml", dirPath)
|
||||
if _, err = os.Stat(backupProfiles); err == nil {
|
||||
if err = CopyFile(backupProfiles, cfg.API.Server.ProfilesPath); err != nil {
|
||||
return fmt.Errorf("failed copy %s to %s: %w", backupProfiles, cfg.API.Server.ProfilesPath, err)
|
||||
}
|
||||
}
|
||||
|
||||
backupSimulation := fmt.Sprintf("%s/simulation.yaml", dirPath)
|
||||
if _, err = os.Stat(backupSimulation); err == nil {
|
||||
if err = CopyFile(backupSimulation, cfg.ConfigPaths.SimulationFilePath); err != nil {
|
||||
return fmt.Errorf("failed copy %s to %s: %w", backupSimulation, cfg.ConfigPaths.SimulationFilePath, err)
|
||||
}
|
||||
}
|
||||
|
||||
/*if there is a acquisition dir, restore its content*/
|
||||
if cfg.Crowdsec.AcquisitionDirPath != "" {
|
||||
if err = os.MkdirAll(cfg.Crowdsec.AcquisitionDirPath, 0o700); err != nil {
|
||||
return fmt.Errorf("error while creating %s: %w", cfg.Crowdsec.AcquisitionDirPath, err)
|
||||
}
|
||||
}
|
||||
|
||||
// if there was a single one
|
||||
backupAcquisition := fmt.Sprintf("%s/acquis.yaml", dirPath)
|
||||
if _, err = os.Stat(backupAcquisition); err == nil {
|
||||
log.Debugf("restoring backup'ed %s", backupAcquisition)
|
||||
|
||||
if err = CopyFile(backupAcquisition, cfg.Crowdsec.AcquisitionFilePath); err != nil {
|
||||
return fmt.Errorf("failed copy %s to %s: %w", backupAcquisition, cfg.Crowdsec.AcquisitionFilePath, err)
|
||||
}
|
||||
}
|
||||
|
||||
// if there are files in the acquis backup dir, restore them
|
||||
acquisBackupDir := filepath.Join(dirPath, "acquis", "*.yaml")
|
||||
if acquisFiles, err := filepath.Glob(acquisBackupDir); err == nil {
|
||||
for _, acquisFile := range acquisFiles {
|
||||
targetFname, err := filepath.Abs(cfg.Crowdsec.AcquisitionDirPath + "/" + filepath.Base(acquisFile))
|
||||
if err != nil {
|
||||
return fmt.Errorf("while saving %s to %s: %w", acquisFile, targetFname, err)
|
||||
}
|
||||
|
||||
log.Debugf("restoring %s to %s", acquisFile, targetFname)
|
||||
|
||||
if err = CopyFile(acquisFile, targetFname); err != nil {
|
||||
return fmt.Errorf("failed copy %s to %s: %w", acquisFile, targetFname, err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if cfg.Crowdsec != nil && len(cfg.Crowdsec.AcquisitionFiles) > 0 {
|
||||
for _, acquisFile := range cfg.Crowdsec.AcquisitionFiles {
|
||||
log.Infof("backup filepath from dir -> %s", acquisFile)
|
||||
|
||||
// if it was the default one, it has already been backed up
|
||||
if cfg.Crowdsec.AcquisitionFilePath == acquisFile {
|
||||
log.Infof("skip this one")
|
||||
continue
|
||||
}
|
||||
|
||||
targetFname, err := filepath.Abs(filepath.Join(acquisBackupDir, filepath.Base(acquisFile)))
|
||||
if err != nil {
|
||||
return fmt.Errorf("while saving %s to %s: %w", acquisFile, acquisBackupDir, err)
|
||||
}
|
||||
|
||||
if err = CopyFile(acquisFile, targetFname); err != nil {
|
||||
return fmt.Errorf("failed copy %s to %s: %w", acquisFile, targetFname, err)
|
||||
}
|
||||
|
||||
log.Infof("Saved acquis %s to %s", acquisFile, targetFname)
|
||||
}
|
||||
}
|
||||
|
||||
if err = cli.restoreHub(dirPath); err != nil {
|
||||
return fmt.Errorf("failed to restore hub config: %w", err)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (cli *cliConfig) newRestoreCmd() *cobra.Command {
|
||||
cmd := &cobra.Command{
|
||||
Use: `restore "directory"`,
|
||||
Short: `Restore config in backup "directory"`,
|
||||
Long: `Restore the crowdsec configuration from specified backup "directory" including:
|
||||
|
||||
- Main config (config.yaml)
|
||||
- Simulation config (simulation.yaml)
|
||||
- Profiles config (profiles.yaml)
|
||||
- List of scenarios, parsers, postoverflows and collections that are up-to-date
|
||||
- Tainted/local/out-of-date scenarios, parsers, postoverflows and collections
|
||||
- Backup of API credentials (local API and online API)`,
|
||||
Args: cobra.ExactArgs(1),
|
||||
DisableAutoGenTag: true,
|
||||
RunE: func(_ *cobra.Command, args []string) error {
|
||||
dirPath := args[0]
|
||||
|
||||
if err := cli.restore(dirPath); err != nil {
|
||||
return fmt.Errorf("failed to restore config from %s: %w", dirPath, err)
|
||||
}
|
||||
|
||||
return nil
|
||||
},
|
||||
}
|
||||
|
||||
return cmd
|
||||
}
|
258
cmd/crowdsec-cli/config_show.go
Normal file
258
cmd/crowdsec-cli/config_show.go
Normal file
|
@ -0,0 +1,258 @@
|
|||
package main
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"os"
|
||||
"text/template"
|
||||
|
||||
"github.com/antonmedv/expr"
|
||||
"github.com/sanity-io/litter"
|
||||
log "github.com/sirupsen/logrus"
|
||||
"github.com/spf13/cobra"
|
||||
"gopkg.in/yaml.v3"
|
||||
|
||||
"github.com/crowdsecurity/crowdsec/pkg/csconfig"
|
||||
"github.com/crowdsecurity/crowdsec/pkg/exprhelpers"
|
||||
)
|
||||
|
||||
func (cli *cliConfig) showKey(key string) error {
|
||||
cfg := cli.cfg()
|
||||
|
||||
type Env struct {
|
||||
Config *csconfig.Config
|
||||
}
|
||||
|
||||
opts := []expr.Option{}
|
||||
opts = append(opts, exprhelpers.GetExprOptions(map[string]interface{}{})...)
|
||||
opts = append(opts, expr.Env(Env{}))
|
||||
|
||||
program, err := expr.Compile(key, opts...)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
output, err := expr.Run(program, Env{Config: cfg})
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
switch cfg.Cscli.Output {
|
||||
case "human", "raw":
|
||||
// Don't use litter for strings, it adds quotes
|
||||
// that would break compatibility with previous versions
|
||||
switch output.(type) {
|
||||
case string:
|
||||
fmt.Println(output)
|
||||
default:
|
||||
litter.Dump(output)
|
||||
}
|
||||
case "json":
|
||||
data, err := json.MarshalIndent(output, "", " ")
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to marshal configuration: %w", err)
|
||||
}
|
||||
|
||||
fmt.Println(string(data))
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (cli *cliConfig) template() string {
|
||||
return `Global:
|
||||
|
||||
{{- if .ConfigPaths }}
|
||||
- Configuration Folder : {{.ConfigPaths.ConfigDir}}
|
||||
- Data Folder : {{.ConfigPaths.DataDir}}
|
||||
- Hub Folder : {{.ConfigPaths.HubDir}}
|
||||
- Simulation File : {{.ConfigPaths.SimulationFilePath}}
|
||||
{{- end }}
|
||||
|
||||
{{- if .Common }}
|
||||
- Log Folder : {{.Common.LogDir}}
|
||||
- Log level : {{.Common.LogLevel}}
|
||||
- Log Media : {{.Common.LogMedia}}
|
||||
{{- end }}
|
||||
|
||||
{{- if .Crowdsec }}
|
||||
Crowdsec{{if and .Crowdsec.Enable (not (ValueBool .Crowdsec.Enable))}} (disabled){{end}}:
|
||||
- Acquisition File : {{.Crowdsec.AcquisitionFilePath}}
|
||||
- Parsers routines : {{.Crowdsec.ParserRoutinesCount}}
|
||||
{{- if .Crowdsec.AcquisitionDirPath }}
|
||||
- Acquisition Folder : {{.Crowdsec.AcquisitionDirPath}}
|
||||
{{- end }}
|
||||
{{- end }}
|
||||
|
||||
{{- if .Cscli }}
|
||||
cscli:
|
||||
- Output : {{.Cscli.Output}}
|
||||
- Hub Branch : {{.Cscli.HubBranch}}
|
||||
{{- end }}
|
||||
|
||||
{{- if .API }}
|
||||
{{- if .API.Client }}
|
||||
API Client:
|
||||
{{- if .API.Client.Credentials }}
|
||||
- URL : {{.API.Client.Credentials.URL}}
|
||||
- Login : {{.API.Client.Credentials.Login}}
|
||||
{{- end }}
|
||||
- Credentials File : {{.API.Client.CredentialsFilePath}}
|
||||
{{- end }}
|
||||
|
||||
{{- if .API.Server }}
|
||||
Local API Server{{if and .API.Server.Enable (not (ValueBool .API.Server.Enable))}} (disabled){{end}}:
|
||||
- Listen URL : {{.API.Server.ListenURI}}
|
||||
- Listen Socket : {{.API.Server.ListenSocket}}
|
||||
- Profile File : {{.API.Server.ProfilesPath}}
|
||||
|
||||
{{- if .API.Server.TLS }}
|
||||
{{- if .API.Server.TLS.CertFilePath }}
|
||||
- Cert File : {{.API.Server.TLS.CertFilePath}}
|
||||
{{- end }}
|
||||
|
||||
{{- if .API.Server.TLS.KeyFilePath }}
|
||||
- Key File : {{.API.Server.TLS.KeyFilePath}}
|
||||
{{- end }}
|
||||
|
||||
{{- if .API.Server.TLS.CACertPath }}
|
||||
- CA Cert : {{.API.Server.TLS.CACertPath}}
|
||||
{{- end }}
|
||||
|
||||
{{- if .API.Server.TLS.CRLPath }}
|
||||
- CRL : {{.API.Server.TLS.CRLPath}}
|
||||
{{- end }}
|
||||
|
||||
{{- if .API.Server.TLS.CacheExpiration }}
|
||||
- Cache Expiration : {{.API.Server.TLS.CacheExpiration}}
|
||||
{{- end }}
|
||||
|
||||
{{- if .API.Server.TLS.ClientVerification }}
|
||||
- Client Verification : {{.API.Server.TLS.ClientVerification}}
|
||||
{{- end }}
|
||||
|
||||
{{- if .API.Server.TLS.AllowedAgentsOU }}
|
||||
{{- range .API.Server.TLS.AllowedAgentsOU }}
|
||||
- Allowed Agents OU : {{.}}
|
||||
{{- end }}
|
||||
{{- end }}
|
||||
|
||||
{{- if .API.Server.TLS.AllowedBouncersOU }}
|
||||
{{- range .API.Server.TLS.AllowedBouncersOU }}
|
||||
- Allowed Bouncers OU : {{.}}
|
||||
{{- end }}
|
||||
{{- end }}
|
||||
{{- end }}
|
||||
|
||||
- Trusted IPs:
|
||||
{{- range .API.Server.TrustedIPs }}
|
||||
- {{.}}
|
||||
{{- end }}
|
||||
|
||||
{{- if and .API.Server.OnlineClient .API.Server.OnlineClient.Credentials }}
|
||||
Central API:
|
||||
- URL : {{.API.Server.OnlineClient.Credentials.URL}}
|
||||
- Login : {{.API.Server.OnlineClient.Credentials.Login}}
|
||||
- Credentials File : {{.API.Server.OnlineClient.CredentialsFilePath}}
|
||||
{{- end }}
|
||||
{{- end }}
|
||||
{{- end }}
|
||||
|
||||
{{- if .DbConfig }}
|
||||
- Database:
|
||||
- Type : {{.DbConfig.Type}}
|
||||
{{- if eq .DbConfig.Type "sqlite" }}
|
||||
- Path : {{.DbConfig.DbPath}}
|
||||
{{- else}}
|
||||
- Host : {{.DbConfig.Host}}
|
||||
- Port : {{.DbConfig.Port}}
|
||||
- User : {{.DbConfig.User}}
|
||||
- DB Name : {{.DbConfig.DbName}}
|
||||
{{- end }}
|
||||
{{- if .DbConfig.MaxOpenConns }}
|
||||
- Max Open Conns : {{.DbConfig.MaxOpenConns}}
|
||||
{{- end }}
|
||||
{{- if ne .DbConfig.DecisionBulkSize 0 }}
|
||||
- Decision Bulk Size : {{.DbConfig.DecisionBulkSize}}
|
||||
{{- end }}
|
||||
{{- if .DbConfig.Flush }}
|
||||
{{- if .DbConfig.Flush.MaxAge }}
|
||||
- Flush age : {{.DbConfig.Flush.MaxAge}}
|
||||
{{- end }}
|
||||
{{- if .DbConfig.Flush.MaxItems }}
|
||||
- Flush size : {{.DbConfig.Flush.MaxItems}}
|
||||
{{- end }}
|
||||
{{- end }}
|
||||
{{- end }}
|
||||
`
|
||||
}
|
||||
|
||||
func (cli *cliConfig) show() error {
|
||||
cfg := cli.cfg()
|
||||
|
||||
switch cfg.Cscli.Output {
|
||||
case "human":
|
||||
// The tests on .Enable look funny because the option has a true default which has
|
||||
// not been set yet (we don't really load the LAPI) and go templates don't dereference
|
||||
// pointers in boolean tests. Prefix notation is the cherry on top.
|
||||
funcs := template.FuncMap{
|
||||
// can't use generics here
|
||||
"ValueBool": func(b *bool) bool { return b != nil && *b },
|
||||
}
|
||||
|
||||
tmp, err := template.New("config").Funcs(funcs).Parse(cli.template())
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
err = tmp.Execute(os.Stdout, cfg)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
case "json":
|
||||
data, err := json.MarshalIndent(cfg, "", " ")
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to marshal configuration: %w", err)
|
||||
}
|
||||
|
||||
fmt.Println(string(data))
|
||||
case "raw":
|
||||
data, err := yaml.Marshal(cfg)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to marshal configuration: %w", err)
|
||||
}
|
||||
|
||||
fmt.Println(string(data))
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (cli *cliConfig) newShowCmd() *cobra.Command {
|
||||
var key string
|
||||
|
||||
cmd := &cobra.Command{
|
||||
Use: "show",
|
||||
Short: "Displays current config",
|
||||
Long: `Displays the current cli configuration.`,
|
||||
Args: cobra.ExactArgs(0),
|
||||
DisableAutoGenTag: true,
|
||||
RunE: func(_ *cobra.Command, _ []string) error {
|
||||
if err := cli.cfg().LoadAPIClient(); err != nil {
|
||||
log.Errorf("failed to load API client configuration: %s", err)
|
||||
// don't return, we can still show the configuration
|
||||
}
|
||||
|
||||
if key != "" {
|
||||
return cli.showKey(key)
|
||||
}
|
||||
|
||||
return cli.show()
|
||||
},
|
||||
}
|
||||
|
||||
flags := cmd.Flags()
|
||||
flags.StringVarP(&key, "key", "", "", "Display only this value (Config.API.Server.ListenURI)")
|
||||
|
||||
return cmd
|
||||
}
|
26
cmd/crowdsec-cli/config_showyaml.go
Normal file
26
cmd/crowdsec-cli/config_showyaml.go
Normal file
|
@ -0,0 +1,26 @@
|
|||
package main
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
|
||||
"github.com/spf13/cobra"
|
||||
)
|
||||
|
||||
func (cli *cliConfig) showYAML() error {
|
||||
fmt.Println(mergedConfig)
|
||||
return nil
|
||||
}
|
||||
|
||||
func (cli *cliConfig) newShowYAMLCmd() *cobra.Command {
|
||||
cmd := &cobra.Command{
|
||||
Use: "show-yaml",
|
||||
Short: "Displays merged config.yaml + config.yaml.local",
|
||||
Args: cobra.ExactArgs(0),
|
||||
DisableAutoGenTag: true,
|
||||
RunE: func(_ *cobra.Command, _ []string) error {
|
||||
return cli.showYAML()
|
||||
},
|
||||
}
|
||||
|
||||
return cmd
|
||||
}
|
|
@ -6,57 +6,74 @@ import (
|
|||
"encoding/json"
|
||||
"errors"
|
||||
"fmt"
|
||||
"io/fs"
|
||||
"net/url"
|
||||
"os"
|
||||
"strconv"
|
||||
"strings"
|
||||
|
||||
"github.com/fatih/color"
|
||||
"github.com/go-openapi/strfmt"
|
||||
log "github.com/sirupsen/logrus"
|
||||
"github.com/spf13/cobra"
|
||||
"gopkg.in/yaml.v3"
|
||||
|
||||
"github.com/crowdsecurity/go-cs-lib/ptr"
|
||||
"github.com/crowdsecurity/go-cs-lib/version"
|
||||
|
||||
"github.com/crowdsecurity/crowdsec/cmd/crowdsec-cli/require"
|
||||
"github.com/crowdsecurity/crowdsec/pkg/apiclient"
|
||||
"github.com/crowdsecurity/crowdsec/pkg/csconfig"
|
||||
"github.com/crowdsecurity/crowdsec/pkg/cwhub"
|
||||
"github.com/crowdsecurity/crowdsec/pkg/cwversion"
|
||||
"github.com/crowdsecurity/crowdsec/pkg/types"
|
||||
"github.com/enescakir/emoji"
|
||||
"github.com/go-openapi/strfmt"
|
||||
"github.com/olekukonko/tablewriter"
|
||||
log "github.com/sirupsen/logrus"
|
||||
"github.com/spf13/cobra"
|
||||
)
|
||||
|
||||
func NewConsoleCmd() *cobra.Command {
|
||||
var cmdConsole = &cobra.Command{
|
||||
type cliConsole struct {
|
||||
cfg configGetter
|
||||
}
|
||||
|
||||
func NewCLIConsole(cfg configGetter) *cliConsole {
|
||||
return &cliConsole{
|
||||
cfg: cfg,
|
||||
}
|
||||
}
|
||||
|
||||
func (cli *cliConsole) NewCommand() *cobra.Command {
|
||||
cmd := &cobra.Command{
|
||||
Use: "console [action]",
|
||||
Short: "Manage interaction with Crowdsec console (https://app.crowdsec.net)",
|
||||
Args: cobra.MinimumNArgs(1),
|
||||
DisableAutoGenTag: true,
|
||||
PersistentPreRunE: func(cmd *cobra.Command, args []string) error {
|
||||
if err := csConfig.LoadAPIServer(); err != nil || csConfig.DisableAPI {
|
||||
var fdErr *fs.PathError
|
||||
if errors.As(err, &fdErr) {
|
||||
log.Fatalf("Unable to load Local API : %s", fdErr)
|
||||
}
|
||||
if err != nil {
|
||||
log.Fatalf("Unable to load required Local API Configuration : %s", err)
|
||||
}
|
||||
log.Fatal("Local API is disabled, please run this command on the local API machine")
|
||||
PersistentPreRunE: func(_ *cobra.Command, _ []string) error {
|
||||
cfg := cli.cfg()
|
||||
if err := require.LAPI(cfg); err != nil {
|
||||
return err
|
||||
}
|
||||
if csConfig.DisableAPI {
|
||||
log.Fatal("Local API is disabled, please run this command on the local API machine")
|
||||
if err := require.CAPI(cfg); err != nil {
|
||||
return err
|
||||
}
|
||||
if csConfig.API.Server.OnlineClient == nil {
|
||||
log.Fatalf("No configuration for Central API (CAPI) in '%s'", *csConfig.FilePath)
|
||||
}
|
||||
if csConfig.API.Server.OnlineClient.Credentials == nil {
|
||||
log.Fatal("You must configure Central API (CAPI) with `cscli capi register` before enrolling your instance")
|
||||
if err := require.CAPIRegistered(cfg); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
},
|
||||
}
|
||||
|
||||
cmd.AddCommand(cli.newEnrollCmd())
|
||||
cmd.AddCommand(cli.newEnableCmd())
|
||||
cmd.AddCommand(cli.newDisableCmd())
|
||||
cmd.AddCommand(cli.newStatusCmd())
|
||||
|
||||
return cmd
|
||||
}
|
||||
|
||||
func (cli *cliConsole) newEnrollCmd() *cobra.Command {
|
||||
name := ""
|
||||
overwrite := false
|
||||
tags := []string{}
|
||||
opts := []string{}
|
||||
|
||||
cmdEnroll := &cobra.Command{
|
||||
cmd := &cobra.Command{
|
||||
Use: "enroll [enroll-key]",
|
||||
Short: "Enroll this instance to https://app.crowdsec.net [requires local API]",
|
||||
Long: `
|
||||
|
@ -64,242 +81,361 @@ Enroll this instance to https://app.crowdsec.net
|
|||
|
||||
You can get your enrollment key by creating an account on https://app.crowdsec.net.
|
||||
After running this command your will need to validate the enrollment in the webapp.`,
|
||||
Example: `cscli console enroll YOUR-ENROLL-KEY
|
||||
Example: fmt.Sprintf(`cscli console enroll YOUR-ENROLL-KEY
|
||||
cscli console enroll --name [instance_name] YOUR-ENROLL-KEY
|
||||
cscli console enroll --name [instance_name] --tags [tag_1] --tags [tag_2] YOUR-ENROLL-KEY
|
||||
`,
|
||||
cscli console enroll --enable context,manual YOUR-ENROLL-KEY
|
||||
|
||||
valid options are : %s,all (see 'cscli console status' for details)`, strings.Join(csconfig.CONSOLE_CONFIGS, ",")),
|
||||
Args: cobra.ExactArgs(1),
|
||||
DisableAutoGenTag: true,
|
||||
Run: func(cmd *cobra.Command, args []string) {
|
||||
password := strfmt.Password(csConfig.API.Server.OnlineClient.Credentials.Password)
|
||||
apiURL, err := url.Parse(csConfig.API.Server.OnlineClient.Credentials.URL)
|
||||
RunE: func(_ *cobra.Command, args []string) error {
|
||||
cfg := cli.cfg()
|
||||
password := strfmt.Password(cfg.API.Server.OnlineClient.Credentials.Password)
|
||||
|
||||
apiURL, err := url.Parse(cfg.API.Server.OnlineClient.Credentials.URL)
|
||||
if err != nil {
|
||||
log.Fatalf("Could not parse CAPI URL : %s", err)
|
||||
return fmt.Errorf("could not parse CAPI URL: %w", err)
|
||||
}
|
||||
|
||||
if err := csConfig.LoadHub(); err != nil {
|
||||
log.Fatalf(err.Error())
|
||||
}
|
||||
|
||||
if err := cwhub.GetHubIdx(csConfig.Hub); err != nil {
|
||||
log.Fatalf("Failed to load hub index : %s", err)
|
||||
log.Infoln("Run 'sudo cscli hub update' to get the hub index")
|
||||
}
|
||||
|
||||
scenarios, err := cwhub.GetInstalledScenariosAsString()
|
||||
hub, err := require.Hub(cfg, nil, nil)
|
||||
if err != nil {
|
||||
log.Fatalf("failed to get scenarios : %s", err.Error())
|
||||
return err
|
||||
}
|
||||
|
||||
scenarios, err := hub.GetInstalledNamesByType(cwhub.SCENARIOS)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to get installed scenarios: %w", err)
|
||||
}
|
||||
|
||||
if len(scenarios) == 0 {
|
||||
scenarios = make([]string, 0)
|
||||
}
|
||||
|
||||
enableOpts := []string{csconfig.SEND_MANUAL_SCENARIOS, csconfig.SEND_TAINTED_SCENARIOS}
|
||||
if len(opts) != 0 {
|
||||
for _, opt := range opts {
|
||||
valid := false
|
||||
if opt == "all" {
|
||||
enableOpts = csconfig.CONSOLE_CONFIGS
|
||||
break
|
||||
}
|
||||
for _, availableOpt := range csconfig.CONSOLE_CONFIGS {
|
||||
if opt == availableOpt {
|
||||
valid = true
|
||||
enable := true
|
||||
for _, enabledOpt := range enableOpts {
|
||||
if opt == enabledOpt {
|
||||
enable = false
|
||||
continue
|
||||
}
|
||||
}
|
||||
if enable {
|
||||
enableOpts = append(enableOpts, opt)
|
||||
}
|
||||
|
||||
break
|
||||
}
|
||||
}
|
||||
if !valid {
|
||||
return fmt.Errorf("option %s doesn't exist", opt)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
c, _ := apiclient.NewClient(&apiclient.Config{
|
||||
MachineID: csConfig.API.Server.OnlineClient.Credentials.Login,
|
||||
MachineID: cli.cfg().API.Server.OnlineClient.Credentials.Login,
|
||||
Password: password,
|
||||
Scenarios: scenarios,
|
||||
UserAgent: fmt.Sprintf("crowdsec/%s", cwversion.VersionStr()),
|
||||
UserAgent: fmt.Sprintf("crowdsec/%s", version.String()),
|
||||
URL: apiURL,
|
||||
VersionPrefix: "v2",
|
||||
VersionPrefix: "v3",
|
||||
})
|
||||
|
||||
resp, err := c.Auth.EnrollWatcher(context.Background(), args[0], name, tags, overwrite)
|
||||
if err != nil {
|
||||
log.Fatalf("Could not enroll instance: %s", err)
|
||||
}
|
||||
if resp.Response.StatusCode == 200 && !overwrite {
|
||||
log.Warningf("Instance already enrolled. You can use '--overwrite' to force enroll")
|
||||
return
|
||||
return fmt.Errorf("could not enroll instance: %w", err)
|
||||
}
|
||||
|
||||
SetConsoleOpts(csconfig.CONSOLE_CONFIGS, true)
|
||||
if err := csConfig.API.Server.DumpConsoleConfig(); err != nil {
|
||||
log.Fatalf("failed writing console config : %s", err)
|
||||
if resp.Response.StatusCode == 200 && !overwrite {
|
||||
log.Warning("Instance already enrolled. You can use '--overwrite' to force enroll")
|
||||
return nil
|
||||
}
|
||||
log.Infof("Enabled tainted&manual alerts sharing, see 'cscli console status'.")
|
||||
log.Infof("Watcher successfully enrolled. Visit https://app.crowdsec.net to accept it.")
|
||||
log.Infof("Please restart crowdsec after accepting the enrollment.")
|
||||
|
||||
if err := cli.setConsoleOpts(enableOpts, true); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
for _, opt := range enableOpts {
|
||||
log.Infof("Enabled %s : %s", opt, csconfig.CONSOLE_CONFIGS_HELP[opt])
|
||||
}
|
||||
|
||||
log.Info("Watcher successfully enrolled. Visit https://app.crowdsec.net to accept it.")
|
||||
log.Info("Please restart crowdsec after accepting the enrollment.")
|
||||
|
||||
return nil
|
||||
},
|
||||
}
|
||||
cmdEnroll.Flags().StringVarP(&name, "name", "n", "", "Name to display in the console")
|
||||
cmdEnroll.Flags().BoolVarP(&overwrite, "overwrite", "", false, "Force enroll the instance")
|
||||
cmdEnroll.Flags().StringSliceVarP(&tags, "tags", "t", tags, "Tags to display in the console")
|
||||
cmdConsole.AddCommand(cmdEnroll)
|
||||
|
||||
var enableAll, disableAll bool
|
||||
flags := cmd.Flags()
|
||||
flags.StringVarP(&name, "name", "n", "", "Name to display in the console")
|
||||
flags.BoolVarP(&overwrite, "overwrite", "", false, "Force enroll the instance")
|
||||
flags.StringSliceVarP(&tags, "tags", "t", tags, "Tags to display in the console")
|
||||
flags.StringSliceVarP(&opts, "enable", "e", opts, "Enable console options")
|
||||
|
||||
cmdEnable := &cobra.Command{
|
||||
Use: "enable [feature-flag]",
|
||||
Short: "Enable a feature flag",
|
||||
Example: "enable alerts-tainted",
|
||||
return cmd
|
||||
}
|
||||
|
||||
func (cli *cliConsole) newEnableCmd() *cobra.Command {
|
||||
var enableAll bool
|
||||
|
||||
cmd := &cobra.Command{
|
||||
Use: "enable [option]",
|
||||
Short: "Enable a console option",
|
||||
Example: "sudo cscli console enable tainted",
|
||||
Long: `
|
||||
Enable given information push to the central API. Allows to empower the console`,
|
||||
ValidArgs: csconfig.CONSOLE_CONFIGS,
|
||||
DisableAutoGenTag: true,
|
||||
Run: func(cmd *cobra.Command, args []string) {
|
||||
RunE: func(_ *cobra.Command, args []string) error {
|
||||
if enableAll {
|
||||
SetConsoleOpts(csconfig.CONSOLE_CONFIGS, true)
|
||||
if err := cli.setConsoleOpts(csconfig.CONSOLE_CONFIGS, true); err != nil {
|
||||
return err
|
||||
}
|
||||
log.Infof("All features have been enabled successfully")
|
||||
} else {
|
||||
if len(args) == 0 {
|
||||
log.Fatalf("You must specify at least one feature to enable")
|
||||
return errors.New("you must specify at least one feature to enable")
|
||||
}
|
||||
if err := cli.setConsoleOpts(args, true); err != nil {
|
||||
return err
|
||||
}
|
||||
SetConsoleOpts(args, true)
|
||||
log.Infof("%v have been enabled", args)
|
||||
}
|
||||
if err := csConfig.API.Server.DumpConsoleConfig(); err != nil {
|
||||
log.Fatalf("failed writing console config : %s", err)
|
||||
}
|
||||
|
||||
log.Infof(ReloadMessage())
|
||||
|
||||
return nil
|
||||
},
|
||||
}
|
||||
cmdEnable.Flags().BoolVarP(&enableAll, "all", "a", false, "Enable all feature flags")
|
||||
cmdConsole.AddCommand(cmdEnable)
|
||||
cmd.Flags().BoolVarP(&enableAll, "all", "a", false, "Enable all console options")
|
||||
|
||||
cmdDisable := &cobra.Command{
|
||||
Use: "disable [feature-flag]",
|
||||
Short: "Disable a feature flag",
|
||||
Example: "disable alerts-tainted",
|
||||
return cmd
|
||||
}
|
||||
|
||||
func (cli *cliConsole) newDisableCmd() *cobra.Command {
|
||||
var disableAll bool
|
||||
|
||||
cmd := &cobra.Command{
|
||||
Use: "disable [option]",
|
||||
Short: "Disable a console option",
|
||||
Example: "sudo cscli console disable tainted",
|
||||
Long: `
|
||||
Disable given information push to the central API.`,
|
||||
ValidArgs: csconfig.CONSOLE_CONFIGS,
|
||||
Args: cobra.MinimumNArgs(1),
|
||||
DisableAutoGenTag: true,
|
||||
Run: func(cmd *cobra.Command, args []string) {
|
||||
if disableAll {
|
||||
SetConsoleOpts(csconfig.CONSOLE_CONFIGS, false)
|
||||
} else {
|
||||
SetConsoleOpts(args, false)
|
||||
}
|
||||
|
||||
if err := csConfig.API.Server.DumpConsoleConfig(); err != nil {
|
||||
log.Fatalf("failed writing console config : %s", err)
|
||||
}
|
||||
RunE: func(_ *cobra.Command, args []string) error {
|
||||
if disableAll {
|
||||
if err := cli.setConsoleOpts(csconfig.CONSOLE_CONFIGS, false); err != nil {
|
||||
return err
|
||||
}
|
||||
log.Infof("All features have been disabled")
|
||||
} else {
|
||||
if err := cli.setConsoleOpts(args, false); err != nil {
|
||||
return err
|
||||
}
|
||||
log.Infof("%v have been disabled", args)
|
||||
}
|
||||
|
||||
log.Infof(ReloadMessage())
|
||||
|
||||
return nil
|
||||
},
|
||||
}
|
||||
cmdDisable.Flags().BoolVarP(&disableAll, "all", "a", false, "Enable all feature flags")
|
||||
cmdConsole.AddCommand(cmdDisable)
|
||||
cmd.Flags().BoolVarP(&disableAll, "all", "a", false, "Disable all console options")
|
||||
|
||||
cmdConsoleStatus := &cobra.Command{
|
||||
Use: "status [feature-flag]",
|
||||
Short: "Shows status of one or all feature flags",
|
||||
Example: "status alerts-tainted",
|
||||
return cmd
|
||||
}
|
||||
|
||||
func (cli *cliConsole) newStatusCmd() *cobra.Command {
|
||||
cmd := &cobra.Command{
|
||||
Use: "status",
|
||||
Short: "Shows status of the console options",
|
||||
Example: `sudo cscli console status`,
|
||||
DisableAutoGenTag: true,
|
||||
Run: func(cmd *cobra.Command, args []string) {
|
||||
switch csConfig.Cscli.Output {
|
||||
RunE: func(_ *cobra.Command, _ []string) error {
|
||||
cfg := cli.cfg()
|
||||
consoleCfg := cfg.API.Server.ConsoleConfig
|
||||
switch cfg.Cscli.Output {
|
||||
case "human":
|
||||
table := tablewriter.NewWriter(os.Stdout)
|
||||
|
||||
table.SetHeaderAlignment(tablewriter.ALIGN_LEFT)
|
||||
table.SetAlignment(tablewriter.ALIGN_LEFT)
|
||||
table.SetHeader([]string{"Option Name", "Activated", "Description"})
|
||||
for _, option := range csconfig.CONSOLE_CONFIGS {
|
||||
switch option {
|
||||
case csconfig.SEND_CUSTOM_SCENARIOS:
|
||||
activated := string(emoji.CrossMark)
|
||||
if *csConfig.API.Server.ConsoleConfig.ShareCustomScenarios {
|
||||
activated = string(emoji.CheckMarkButton)
|
||||
}
|
||||
table.Append([]string{option, activated, "Send alerts from custom scenarios to the console"})
|
||||
case csconfig.SEND_MANUAL_SCENARIOS:
|
||||
activated := string(emoji.CrossMark)
|
||||
if *csConfig.API.Server.ConsoleConfig.ShareManualDecisions {
|
||||
activated = string(emoji.CheckMarkButton)
|
||||
}
|
||||
table.Append([]string{option, activated, "Send manual decisions to the console"})
|
||||
case csconfig.SEND_TAINTED_SCENARIOS:
|
||||
activated := string(emoji.CrossMark)
|
||||
if *csConfig.API.Server.ConsoleConfig.ShareTaintedScenarios {
|
||||
activated = string(emoji.CheckMarkButton)
|
||||
}
|
||||
table.Append([]string{option, activated, "Send alerts from tainted scenarios to the console"})
|
||||
}
|
||||
}
|
||||
table.Render()
|
||||
cmdConsoleStatusTable(color.Output, *consoleCfg)
|
||||
case "json":
|
||||
data, err := json.MarshalIndent(csConfig.API.Server.ConsoleConfig, "", " ")
|
||||
if err != nil {
|
||||
log.Fatalf("failed to marshal configuration: %s", err)
|
||||
out := map[string](*bool){
|
||||
csconfig.SEND_MANUAL_SCENARIOS: consoleCfg.ShareManualDecisions,
|
||||
csconfig.SEND_CUSTOM_SCENARIOS: consoleCfg.ShareCustomScenarios,
|
||||
csconfig.SEND_TAINTED_SCENARIOS: consoleCfg.ShareTaintedScenarios,
|
||||
csconfig.SEND_CONTEXT: consoleCfg.ShareContext,
|
||||
csconfig.CONSOLE_MANAGEMENT: consoleCfg.ConsoleManagement,
|
||||
}
|
||||
fmt.Printf("%s\n", string(data))
|
||||
data, err := json.MarshalIndent(out, "", " ")
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to marshal configuration: %w", err)
|
||||
}
|
||||
fmt.Println(string(data))
|
||||
case "raw":
|
||||
csvwriter := csv.NewWriter(os.Stdout)
|
||||
err := csvwriter.Write([]string{"option", "enabled"})
|
||||
if err != nil {
|
||||
log.Fatal(err)
|
||||
return err
|
||||
}
|
||||
|
||||
rows := [][]string{
|
||||
{"share_manual_decisions", fmt.Sprintf("%t", *csConfig.API.Server.ConsoleConfig.ShareManualDecisions)},
|
||||
{"share_custom", fmt.Sprintf("%t", *csConfig.API.Server.ConsoleConfig.ShareCustomScenarios)},
|
||||
{"share_tainted", fmt.Sprintf("%t", *csConfig.API.Server.ConsoleConfig.ShareTaintedScenarios)},
|
||||
{csconfig.SEND_MANUAL_SCENARIOS, strconv.FormatBool(*consoleCfg.ShareManualDecisions)},
|
||||
{csconfig.SEND_CUSTOM_SCENARIOS, strconv.FormatBool(*consoleCfg.ShareCustomScenarios)},
|
||||
{csconfig.SEND_TAINTED_SCENARIOS, strconv.FormatBool(*consoleCfg.ShareTaintedScenarios)},
|
||||
{csconfig.SEND_CONTEXT, strconv.FormatBool(*consoleCfg.ShareContext)},
|
||||
{csconfig.CONSOLE_MANAGEMENT, strconv.FormatBool(*consoleCfg.ConsoleManagement)},
|
||||
}
|
||||
for _, row := range rows {
|
||||
err = csvwriter.Write(row)
|
||||
if err != nil {
|
||||
log.Fatal(err)
|
||||
return err
|
||||
}
|
||||
}
|
||||
csvwriter.Flush()
|
||||
}
|
||||
|
||||
return nil
|
||||
},
|
||||
}
|
||||
|
||||
cmdConsole.AddCommand(cmdConsoleStatus)
|
||||
return cmdConsole
|
||||
return cmd
|
||||
}
|
||||
|
||||
func SetConsoleOpts(args []string, wanted bool) {
|
||||
func (cli *cliConsole) dumpConfig() error {
|
||||
serverCfg := cli.cfg().API.Server
|
||||
|
||||
out, err := yaml.Marshal(serverCfg.ConsoleConfig)
|
||||
if err != nil {
|
||||
return fmt.Errorf("while marshaling ConsoleConfig (for %s): %w", serverCfg.ConsoleConfigPath, err)
|
||||
}
|
||||
|
||||
if serverCfg.ConsoleConfigPath == "" {
|
||||
serverCfg.ConsoleConfigPath = csconfig.DefaultConsoleConfigFilePath
|
||||
log.Debugf("Empty console_path, defaulting to %s", serverCfg.ConsoleConfigPath)
|
||||
}
|
||||
|
||||
if err := os.WriteFile(serverCfg.ConsoleConfigPath, out, 0o600); err != nil {
|
||||
return fmt.Errorf("while dumping console config to %s: %w", serverCfg.ConsoleConfigPath, err)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (cli *cliConsole) setConsoleOpts(args []string, wanted bool) error {
|
||||
cfg := cli.cfg()
|
||||
consoleCfg := cfg.API.Server.ConsoleConfig
|
||||
|
||||
for _, arg := range args {
|
||||
switch arg {
|
||||
case csconfig.CONSOLE_MANAGEMENT:
|
||||
/*for each flag check if it's already set before setting it*/
|
||||
if consoleCfg.ConsoleManagement != nil {
|
||||
if *consoleCfg.ConsoleManagement == wanted {
|
||||
log.Debugf("%s already set to %t", csconfig.CONSOLE_MANAGEMENT, wanted)
|
||||
} else {
|
||||
log.Infof("%s set to %t", csconfig.CONSOLE_MANAGEMENT, wanted)
|
||||
*consoleCfg.ConsoleManagement = wanted
|
||||
}
|
||||
} else {
|
||||
log.Infof("%s set to %t", csconfig.CONSOLE_MANAGEMENT, wanted)
|
||||
consoleCfg.ConsoleManagement = ptr.Of(wanted)
|
||||
}
|
||||
|
||||
if cfg.API.Server.OnlineClient.Credentials != nil {
|
||||
changed := false
|
||||
if wanted && cfg.API.Server.OnlineClient.Credentials.PapiURL == "" {
|
||||
changed = true
|
||||
cfg.API.Server.OnlineClient.Credentials.PapiURL = types.PAPIBaseURL
|
||||
} else if !wanted && cfg.API.Server.OnlineClient.Credentials.PapiURL != "" {
|
||||
changed = true
|
||||
cfg.API.Server.OnlineClient.Credentials.PapiURL = ""
|
||||
}
|
||||
|
||||
if changed {
|
||||
fileContent, err := yaml.Marshal(cfg.API.Server.OnlineClient.Credentials)
|
||||
if err != nil {
|
||||
return fmt.Errorf("cannot marshal credentials: %w", err)
|
||||
}
|
||||
|
||||
log.Infof("Updating credentials file: %s", cfg.API.Server.OnlineClient.CredentialsFilePath)
|
||||
|
||||
err = os.WriteFile(cfg.API.Server.OnlineClient.CredentialsFilePath, fileContent, 0o600)
|
||||
if err != nil {
|
||||
return fmt.Errorf("cannot write credentials file: %w", err)
|
||||
}
|
||||
}
|
||||
}
|
||||
case csconfig.SEND_CUSTOM_SCENARIOS:
|
||||
/*for each flag check if it's already set before setting it*/
|
||||
if csConfig.API.Server.ConsoleConfig.ShareCustomScenarios != nil {
|
||||
if *csConfig.API.Server.ConsoleConfig.ShareCustomScenarios == wanted {
|
||||
log.Infof("%s already set to %t", csconfig.SEND_CUSTOM_SCENARIOS, wanted)
|
||||
if consoleCfg.ShareCustomScenarios != nil {
|
||||
if *consoleCfg.ShareCustomScenarios == wanted {
|
||||
log.Debugf("%s already set to %t", csconfig.SEND_CUSTOM_SCENARIOS, wanted)
|
||||
} else {
|
||||
log.Infof("%s set to %t", csconfig.SEND_CUSTOM_SCENARIOS, wanted)
|
||||
*csConfig.API.Server.ConsoleConfig.ShareCustomScenarios = wanted
|
||||
*consoleCfg.ShareCustomScenarios = wanted
|
||||
}
|
||||
} else {
|
||||
log.Infof("%s set to %t", csconfig.SEND_CUSTOM_SCENARIOS, wanted)
|
||||
csConfig.API.Server.ConsoleConfig.ShareCustomScenarios = types.BoolPtr(wanted)
|
||||
consoleCfg.ShareCustomScenarios = ptr.Of(wanted)
|
||||
}
|
||||
case csconfig.SEND_TAINTED_SCENARIOS:
|
||||
/*for each flag check if it's already set before setting it*/
|
||||
if csConfig.API.Server.ConsoleConfig.ShareTaintedScenarios != nil {
|
||||
if *csConfig.API.Server.ConsoleConfig.ShareTaintedScenarios == wanted {
|
||||
log.Infof("%s already set to %t", csconfig.SEND_TAINTED_SCENARIOS, wanted)
|
||||
if consoleCfg.ShareTaintedScenarios != nil {
|
||||
if *consoleCfg.ShareTaintedScenarios == wanted {
|
||||
log.Debugf("%s already set to %t", csconfig.SEND_TAINTED_SCENARIOS, wanted)
|
||||
} else {
|
||||
log.Infof("%s set to %t", csconfig.SEND_TAINTED_SCENARIOS, wanted)
|
||||
*csConfig.API.Server.ConsoleConfig.ShareTaintedScenarios = wanted
|
||||
*consoleCfg.ShareTaintedScenarios = wanted
|
||||
}
|
||||
} else {
|
||||
log.Infof("%s set to %t", csconfig.SEND_TAINTED_SCENARIOS, wanted)
|
||||
csConfig.API.Server.ConsoleConfig.ShareTaintedScenarios = types.BoolPtr(wanted)
|
||||
consoleCfg.ShareTaintedScenarios = ptr.Of(wanted)
|
||||
}
|
||||
case csconfig.SEND_MANUAL_SCENARIOS:
|
||||
/*for each flag check if it's already set before setting it*/
|
||||
if csConfig.API.Server.ConsoleConfig.ShareManualDecisions != nil {
|
||||
if *csConfig.API.Server.ConsoleConfig.ShareManualDecisions == wanted {
|
||||
log.Infof("%s already set to %t", csconfig.SEND_MANUAL_SCENARIOS, wanted)
|
||||
if consoleCfg.ShareManualDecisions != nil {
|
||||
if *consoleCfg.ShareManualDecisions == wanted {
|
||||
log.Debugf("%s already set to %t", csconfig.SEND_MANUAL_SCENARIOS, wanted)
|
||||
} else {
|
||||
log.Infof("%s set to %t", csconfig.SEND_MANUAL_SCENARIOS, wanted)
|
||||
*csConfig.API.Server.ConsoleConfig.ShareManualDecisions = wanted
|
||||
*consoleCfg.ShareManualDecisions = wanted
|
||||
}
|
||||
} else {
|
||||
log.Infof("%s set to %t", csconfig.SEND_MANUAL_SCENARIOS, wanted)
|
||||
csConfig.API.Server.ConsoleConfig.ShareManualDecisions = types.BoolPtr(wanted)
|
||||
consoleCfg.ShareManualDecisions = ptr.Of(wanted)
|
||||
}
|
||||
case csconfig.SEND_CONTEXT:
|
||||
/*for each flag check if it's already set before setting it*/
|
||||
if consoleCfg.ShareContext != nil {
|
||||
if *consoleCfg.ShareContext == wanted {
|
||||
log.Debugf("%s already set to %t", csconfig.SEND_CONTEXT, wanted)
|
||||
} else {
|
||||
log.Infof("%s set to %t", csconfig.SEND_CONTEXT, wanted)
|
||||
*consoleCfg.ShareContext = wanted
|
||||
}
|
||||
} else {
|
||||
log.Infof("%s set to %t", csconfig.SEND_CONTEXT, wanted)
|
||||
consoleCfg.ShareContext = ptr.Of(wanted)
|
||||
}
|
||||
default:
|
||||
log.Fatalf("unknown flag %s", arg)
|
||||
return fmt.Errorf("unknown flag %s", arg)
|
||||
}
|
||||
}
|
||||
|
||||
if err := cli.dumpConfig(); err != nil {
|
||||
return fmt.Errorf("failed writing console config: %w", err)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
|
49
cmd/crowdsec-cli/console_table.go
Normal file
49
cmd/crowdsec-cli/console_table.go
Normal file
|
@ -0,0 +1,49 @@
|
|||
package main
|
||||
|
||||
import (
|
||||
"io"
|
||||
|
||||
"github.com/aquasecurity/table"
|
||||
|
||||
"github.com/crowdsecurity/crowdsec/pkg/csconfig"
|
||||
"github.com/crowdsecurity/crowdsec/pkg/emoji"
|
||||
)
|
||||
|
||||
func cmdConsoleStatusTable(out io.Writer, consoleCfg csconfig.ConsoleConfig) {
|
||||
t := newTable(out)
|
||||
t.SetRowLines(false)
|
||||
|
||||
t.SetHeaders("Option Name", "Activated", "Description")
|
||||
t.SetHeaderAlignment(table.AlignLeft, table.AlignLeft, table.AlignLeft)
|
||||
|
||||
for _, option := range csconfig.CONSOLE_CONFIGS {
|
||||
activated := emoji.CrossMark
|
||||
|
||||
switch option {
|
||||
case csconfig.SEND_CUSTOM_SCENARIOS:
|
||||
if *consoleCfg.ShareCustomScenarios {
|
||||
activated = emoji.CheckMarkButton
|
||||
}
|
||||
case csconfig.SEND_MANUAL_SCENARIOS:
|
||||
if *consoleCfg.ShareManualDecisions {
|
||||
activated = emoji.CheckMarkButton
|
||||
}
|
||||
case csconfig.SEND_TAINTED_SCENARIOS:
|
||||
if *consoleCfg.ShareTaintedScenarios {
|
||||
activated = emoji.CheckMarkButton
|
||||
}
|
||||
case csconfig.SEND_CONTEXT:
|
||||
if *consoleCfg.ShareContext {
|
||||
activated = emoji.CheckMarkButton
|
||||
}
|
||||
case csconfig.CONSOLE_MANAGEMENT:
|
||||
if *consoleCfg.ConsoleManagement {
|
||||
activated = emoji.CheckMarkButton
|
||||
}
|
||||
}
|
||||
|
||||
t.AddRow(option, activated, csconfig.CONSOLE_CONFIGS_HELP[option])
|
||||
}
|
||||
|
||||
t.Render()
|
||||
}
|
82
cmd/crowdsec-cli/copyfile.go
Normal file
82
cmd/crowdsec-cli/copyfile.go
Normal file
|
@ -0,0 +1,82 @@
|
|||
package main
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"io"
|
||||
"os"
|
||||
"path/filepath"
|
||||
|
||||
log "github.com/sirupsen/logrus"
|
||||
)
|
||||
|
||||
/*help to copy the file, ioutil doesn't offer the feature*/
|
||||
|
||||
func copyFileContents(src, dst string) (err error) {
|
||||
in, err := os.Open(src)
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
defer in.Close()
|
||||
|
||||
out, err := os.Create(dst)
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
|
||||
defer func() {
|
||||
cerr := out.Close()
|
||||
if err == nil {
|
||||
err = cerr
|
||||
}
|
||||
}()
|
||||
|
||||
if _, err = io.Copy(out, in); err != nil {
|
||||
return
|
||||
}
|
||||
|
||||
err = out.Sync()
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
/*copy the file, ioutile doesn't offer the feature*/
|
||||
func CopyFile(sourceSymLink, destinationFile string) error {
|
||||
sourceFile, err := filepath.EvalSymlinks(sourceSymLink)
|
||||
if err != nil {
|
||||
log.Infof("Not a symlink : %s", err)
|
||||
|
||||
sourceFile = sourceSymLink
|
||||
}
|
||||
|
||||
sourceFileStat, err := os.Stat(sourceFile)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if !sourceFileStat.Mode().IsRegular() {
|
||||
// cannot copy non-regular files (e.g., directories,
|
||||
// symlinks, devices, etc.)
|
||||
return fmt.Errorf("copyFile: non-regular source file %s (%q)", sourceFileStat.Name(), sourceFileStat.Mode().String())
|
||||
}
|
||||
|
||||
destinationFileStat, err := os.Stat(destinationFile)
|
||||
if err != nil {
|
||||
if !os.IsNotExist(err) {
|
||||
return err
|
||||
}
|
||||
} else {
|
||||
if !(destinationFileStat.Mode().IsRegular()) {
|
||||
return fmt.Errorf("copyFile: non-regular destination file %s (%q)", destinationFileStat.Name(), destinationFileStat.Mode().String())
|
||||
}
|
||||
|
||||
if os.SameFile(sourceFileStat, destinationFileStat) {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
if err = os.Link(sourceFile, destinationFile); err != nil {
|
||||
err = copyFileContents(sourceFile, destinationFile)
|
||||
}
|
||||
|
||||
return err
|
||||
}
|
|
@ -1,29 +1,36 @@
|
|||
//go:build linux
|
||||
|
||||
package main
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"math"
|
||||
"os"
|
||||
"os/exec"
|
||||
"os/user"
|
||||
"path/filepath"
|
||||
"strconv"
|
||||
"strings"
|
||||
"syscall"
|
||||
"unicode"
|
||||
|
||||
"github.com/AlecAivazis/survey/v2"
|
||||
"github.com/crowdsecurity/crowdsec/pkg/metabase"
|
||||
|
||||
"github.com/pbnjay/memory"
|
||||
log "github.com/sirupsen/logrus"
|
||||
"github.com/spf13/cobra"
|
||||
|
||||
"github.com/crowdsecurity/crowdsec/cmd/crowdsec-cli/require"
|
||||
"github.com/crowdsecurity/crowdsec/pkg/metabase"
|
||||
)
|
||||
|
||||
var (
|
||||
metabaseUser = "crowdsec@crowdsec.net"
|
||||
metabasePassword string
|
||||
metabaseDbPath string
|
||||
metabaseDBPath string
|
||||
metabaseConfigPath string
|
||||
metabaseConfigFolder = "metabase/"
|
||||
metabaseConfigFile = "metabase.yaml"
|
||||
metabaseImage = "metabase/metabase:v0.46.6.1"
|
||||
/**/
|
||||
metabaseListenAddress = "127.0.0.1"
|
||||
metabaseListenPort = "3000"
|
||||
|
@ -32,12 +39,21 @@ var (
|
|||
|
||||
forceYes bool
|
||||
|
||||
/*informations needed to setup a random password on user's behalf*/
|
||||
// information needed to set up a random password on user's behalf
|
||||
)
|
||||
|
||||
func NewDashboardCmd() *cobra.Command {
|
||||
/* ---- UPDATE COMMAND */
|
||||
var cmdDashboard = &cobra.Command{
|
||||
type cliDashboard struct {
|
||||
cfg configGetter
|
||||
}
|
||||
|
||||
func NewCLIDashboard(cfg configGetter) *cliDashboard {
|
||||
return &cliDashboard{
|
||||
cfg: cfg,
|
||||
}
|
||||
}
|
||||
|
||||
func (cli *cliDashboard) NewCommand() *cobra.Command {
|
||||
cmd := &cobra.Command{
|
||||
Use: "dashboard [command]",
|
||||
Short: "Manage your metabase dashboard container [requires local API]",
|
||||
Long: `Install/Start/Stop/Remove a metabase container exposing dashboard and metrics.
|
||||
|
@ -51,23 +67,24 @@ cscli dashboard start
|
|||
cscli dashboard stop
|
||||
cscli dashboard remove
|
||||
`,
|
||||
PersistentPreRun: func(cmd *cobra.Command, args []string) {
|
||||
PersistentPreRunE: func(_ *cobra.Command, _ []string) error {
|
||||
cfg := cli.cfg()
|
||||
if err := require.LAPI(cfg); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if err := metabase.TestAvailability(); err != nil {
|
||||
log.Fatalf("%s", err)
|
||||
return err
|
||||
}
|
||||
|
||||
if err := csConfig.LoadAPIServer(); err != nil || csConfig.DisableAPI {
|
||||
log.Fatal("Local API is disabled, please run this command on the local API machine")
|
||||
}
|
||||
|
||||
metabaseConfigFolderPath := filepath.Join(csConfig.ConfigPaths.ConfigDir, metabaseConfigFolder)
|
||||
metabaseConfigFolderPath := filepath.Join(cfg.ConfigPaths.ConfigDir, metabaseConfigFolder)
|
||||
metabaseConfigPath = filepath.Join(metabaseConfigFolderPath, metabaseConfigFile)
|
||||
if err := os.MkdirAll(metabaseConfigFolderPath, os.ModePerm); err != nil {
|
||||
log.Fatalf(err.Error())
|
||||
return err
|
||||
}
|
||||
if err := csConfig.LoadDBConfig(); err != nil {
|
||||
log.Errorf("This command requires direct database access (must be run on the local API machine)")
|
||||
log.Fatalf(err.Error())
|
||||
|
||||
if err := require.DB(cfg); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
/*
|
||||
|
@ -81,11 +98,24 @@ cscli dashboard remove
|
|||
metabaseContainerID = oldContainerID
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
},
|
||||
}
|
||||
|
||||
cmd.AddCommand(cli.newSetupCmd())
|
||||
cmd.AddCommand(cli.newStartCmd())
|
||||
cmd.AddCommand(cli.newStopCmd())
|
||||
cmd.AddCommand(cli.newShowPasswordCmd())
|
||||
cmd.AddCommand(cli.newRemoveCmd())
|
||||
|
||||
return cmd
|
||||
}
|
||||
|
||||
func (cli *cliDashboard) newSetupCmd() *cobra.Command {
|
||||
var force bool
|
||||
var cmdDashSetup = &cobra.Command{
|
||||
|
||||
cmd := &cobra.Command{
|
||||
Use: "setup",
|
||||
Short: "Setup a metabase container.",
|
||||
Long: `Perform a metabase docker setup, download standard dashboards, create a fresh user and start the container`,
|
||||
|
@ -96,9 +126,9 @@ cscli dashboard setup
|
|||
cscli dashboard setup --listen 0.0.0.0
|
||||
cscli dashboard setup -l 0.0.0.0 -p 443 --password <password>
|
||||
`,
|
||||
Run: func(cmd *cobra.Command, args []string) {
|
||||
if metabaseDbPath == "" {
|
||||
metabaseDbPath = csConfig.ConfigPaths.DataDir
|
||||
RunE: func(_ *cobra.Command, _ []string) error {
|
||||
if metabaseDBPath == "" {
|
||||
metabaseDBPath = cli.cfg().ConfigPaths.DataDir
|
||||
}
|
||||
|
||||
if metabasePassword == "" {
|
||||
|
@ -108,54 +138,26 @@ cscli dashboard setup -l 0.0.0.0 -p 443 --password <password>
|
|||
isValid = passwordIsValid(metabasePassword)
|
||||
}
|
||||
}
|
||||
var answer bool
|
||||
groupExist := false
|
||||
dockerGroup, err := user.LookupGroup(crowdsecGroup)
|
||||
if err == nil {
|
||||
groupExist = true
|
||||
if err := checkSystemMemory(&forceYes); err != nil {
|
||||
return err
|
||||
}
|
||||
if !forceYes && !groupExist {
|
||||
prompt := &survey.Confirm{
|
||||
Message: fmt.Sprintf("For metabase docker to be able to access SQLite file we need to add a new group called '%s' to the system, is it ok for you ?", crowdsecGroup),
|
||||
Default: true,
|
||||
}
|
||||
if err := survey.AskOne(prompt, &answer); err != nil {
|
||||
log.Fatalf("unable to ask to force: %s", err)
|
||||
}
|
||||
warnIfNotLoopback(metabaseListenAddress)
|
||||
if err := disclaimer(&forceYes); err != nil {
|
||||
return err
|
||||
}
|
||||
if !answer && !forceYes && !groupExist {
|
||||
log.Fatalf("unable to continue without creating '%s' group", crowdsecGroup)
|
||||
}
|
||||
if !groupExist {
|
||||
groupAddCmd, err := exec.LookPath("groupadd")
|
||||
if err != nil {
|
||||
log.Fatalf("unable to find 'groupadd' command, can't continue")
|
||||
}
|
||||
|
||||
groupAdd := &exec.Cmd{Path: groupAddCmd, Args: []string{groupAddCmd, crowdsecGroup}}
|
||||
if err := groupAdd.Run(); err != nil {
|
||||
log.Fatalf("unable to add group '%s': %s", dockerGroup, err)
|
||||
}
|
||||
dockerGroup, err = user.LookupGroup(crowdsecGroup)
|
||||
if err != nil {
|
||||
log.Fatalf("unable to lookup '%s' group: %+v", dockerGroup, err)
|
||||
}
|
||||
}
|
||||
intID, err := strconv.Atoi(dockerGroup.Gid)
|
||||
dockerGroup, err := checkGroups(&forceYes)
|
||||
if err != nil {
|
||||
log.Fatalf("unable to convert group ID to int: %s", err)
|
||||
return err
|
||||
}
|
||||
if err := os.Chown(csConfig.DbConfig.DbPath, 0, intID); err != nil {
|
||||
log.Fatalf("unable to chown sqlite db file '%s': %s", csConfig.DbConfig.DbPath, err)
|
||||
if err = cli.chownDatabase(dockerGroup.Gid); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
mb, err := metabase.SetupMetabase(csConfig.API.Server.DbConfig, metabaseListenAddress, metabaseListenPort, metabaseUser, metabasePassword, metabaseDbPath, dockerGroup.Gid, metabaseContainerID)
|
||||
mb, err := metabase.SetupMetabase(cli.cfg().API.Server.DbConfig, metabaseListenAddress, metabaseListenPort, metabaseUser, metabasePassword, metabaseDBPath, dockerGroup.Gid, metabaseContainerID, metabaseImage)
|
||||
if err != nil {
|
||||
log.Fatalf(err.Error())
|
||||
return err
|
||||
}
|
||||
|
||||
if err := mb.DumpConfig(metabaseConfigPath); err != nil {
|
||||
log.Fatalf(err.Error())
|
||||
return err
|
||||
}
|
||||
|
||||
log.Infof("Metabase is ready")
|
||||
|
@ -163,67 +165,96 @@ cscli dashboard setup -l 0.0.0.0 -p 443 --password <password>
|
|||
fmt.Printf("\tURL : '%s'\n", mb.Config.ListenURL)
|
||||
fmt.Printf("\tusername : '%s'\n", mb.Config.Username)
|
||||
fmt.Printf("\tpassword : '%s'\n", mb.Config.Password)
|
||||
|
||||
return nil
|
||||
},
|
||||
}
|
||||
cmdDashSetup.Flags().BoolVarP(&force, "force", "f", false, "Force setup : override existing files.")
|
||||
cmdDashSetup.Flags().StringVarP(&metabaseDbPath, "dir", "d", "", "Shared directory with metabase container.")
|
||||
cmdDashSetup.Flags().StringVarP(&metabaseListenAddress, "listen", "l", metabaseListenAddress, "Listen address of container")
|
||||
cmdDashSetup.Flags().StringVarP(&metabaseListenPort, "port", "p", metabaseListenPort, "Listen port of container")
|
||||
cmdDashSetup.Flags().BoolVarP(&forceYes, "yes", "y", false, "force yes")
|
||||
//cmdDashSetup.Flags().StringVarP(&metabaseUser, "user", "u", "crowdsec@crowdsec.net", "metabase user")
|
||||
cmdDashSetup.Flags().StringVar(&metabasePassword, "password", "", "metabase password")
|
||||
|
||||
cmdDashboard.AddCommand(cmdDashSetup)
|
||||
flags := cmd.Flags()
|
||||
flags.BoolVarP(&force, "force", "f", false, "Force setup : override existing files")
|
||||
flags.StringVarP(&metabaseDBPath, "dir", "d", "", "Shared directory with metabase container")
|
||||
flags.StringVarP(&metabaseListenAddress, "listen", "l", metabaseListenAddress, "Listen address of container")
|
||||
flags.StringVar(&metabaseImage, "metabase-image", metabaseImage, "Metabase image to use")
|
||||
flags.StringVarP(&metabaseListenPort, "port", "p", metabaseListenPort, "Listen port of container")
|
||||
flags.BoolVarP(&forceYes, "yes", "y", false, "force yes")
|
||||
// flags.StringVarP(&metabaseUser, "user", "u", "crowdsec@crowdsec.net", "metabase user")
|
||||
flags.StringVar(&metabasePassword, "password", "", "metabase password")
|
||||
|
||||
var cmdDashStart = &cobra.Command{
|
||||
return cmd
|
||||
}
|
||||
|
||||
func (cli *cliDashboard) newStartCmd() *cobra.Command {
|
||||
cmd := &cobra.Command{
|
||||
Use: "start",
|
||||
Short: "Start the metabase container.",
|
||||
Long: `Stats the metabase container using docker.`,
|
||||
Args: cobra.ExactArgs(0),
|
||||
DisableAutoGenTag: true,
|
||||
Run: func(cmd *cobra.Command, args []string) {
|
||||
RunE: func(_ *cobra.Command, _ []string) error {
|
||||
mb, err := metabase.NewMetabase(metabaseConfigPath, metabaseContainerID)
|
||||
if err != nil {
|
||||
log.Fatalf(err.Error())
|
||||
return err
|
||||
}
|
||||
warnIfNotLoopback(mb.Config.ListenAddr)
|
||||
if err := disclaimer(&forceYes); err != nil {
|
||||
return err
|
||||
}
|
||||
if err := mb.Container.Start(); err != nil {
|
||||
log.Fatalf("Failed to start metabase container : %s", err)
|
||||
return fmt.Errorf("failed to start metabase container : %s", err)
|
||||
}
|
||||
log.Infof("Started metabase")
|
||||
log.Infof("url : http://%s:%s", metabaseListenAddress, metabaseListenPort)
|
||||
log.Infof("url : http://%s:%s", mb.Config.ListenAddr, mb.Config.ListenPort)
|
||||
|
||||
return nil
|
||||
},
|
||||
}
|
||||
cmdDashboard.AddCommand(cmdDashStart)
|
||||
|
||||
var cmdDashStop = &cobra.Command{
|
||||
cmd.Flags().BoolVarP(&forceYes, "yes", "y", false, "force yes")
|
||||
|
||||
return cmd
|
||||
}
|
||||
|
||||
func (cli *cliDashboard) newStopCmd() *cobra.Command {
|
||||
cmd := &cobra.Command{
|
||||
Use: "stop",
|
||||
Short: "Stops the metabase container.",
|
||||
Long: `Stops the metabase container using docker.`,
|
||||
Args: cobra.ExactArgs(0),
|
||||
DisableAutoGenTag: true,
|
||||
Run: func(cmd *cobra.Command, args []string) {
|
||||
RunE: func(_ *cobra.Command, _ []string) error {
|
||||
if err := metabase.StopContainer(metabaseContainerID); err != nil {
|
||||
log.Fatalf("unable to stop container '%s': %s", metabaseContainerID, err)
|
||||
return fmt.Errorf("unable to stop container '%s': %s", metabaseContainerID, err)
|
||||
}
|
||||
return nil
|
||||
},
|
||||
}
|
||||
cmdDashboard.AddCommand(cmdDashStop)
|
||||
|
||||
var cmdDashShowPassword = &cobra.Command{Use: "show-password",
|
||||
return cmd
|
||||
}
|
||||
|
||||
func (cli *cliDashboard) newShowPasswordCmd() *cobra.Command {
|
||||
cmd := &cobra.Command{Use: "show-password",
|
||||
Short: "displays password of metabase.",
|
||||
Args: cobra.ExactArgs(0),
|
||||
DisableAutoGenTag: true,
|
||||
Run: func(cmd *cobra.Command, args []string) {
|
||||
RunE: func(_ *cobra.Command, _ []string) error {
|
||||
m := metabase.Metabase{}
|
||||
if err := m.LoadConfig(metabaseConfigPath); err != nil {
|
||||
log.Fatal(err)
|
||||
return err
|
||||
}
|
||||
log.Printf("%s", m.Config.Password)
|
||||
log.Printf("'%s'", m.Config.Password)
|
||||
|
||||
return nil
|
||||
},
|
||||
}
|
||||
cmdDashboard.AddCommand(cmdDashShowPassword)
|
||||
|
||||
var cmdDashRemove = &cobra.Command{
|
||||
return cmd
|
||||
}
|
||||
|
||||
func (cli *cliDashboard) newRemoveCmd() *cobra.Command {
|
||||
var force bool
|
||||
|
||||
cmd := &cobra.Command{
|
||||
Use: "remove",
|
||||
Short: "removes the metabase container.",
|
||||
Long: `removes the metabase container using docker.`,
|
||||
|
@ -233,67 +264,77 @@ cscli dashboard setup -l 0.0.0.0 -p 443 --password <password>
|
|||
cscli dashboard remove
|
||||
cscli dashboard remove --force
|
||||
`,
|
||||
Run: func(cmd *cobra.Command, args []string) {
|
||||
answer := true
|
||||
RunE: func(_ *cobra.Command, _ []string) error {
|
||||
if !forceYes {
|
||||
var answer bool
|
||||
prompt := &survey.Confirm{
|
||||
Message: "Do you really want to remove crowdsec dashboard? (all your changes will be lost)",
|
||||
Default: true,
|
||||
}
|
||||
if err := survey.AskOne(prompt, &answer); err != nil {
|
||||
log.Fatalf("unable to ask to force: %s", err)
|
||||
return fmt.Errorf("unable to ask to force: %s", err)
|
||||
}
|
||||
if !answer {
|
||||
return fmt.Errorf("user stated no to continue")
|
||||
}
|
||||
}
|
||||
if answer {
|
||||
if metabase.IsContainerExist(metabaseContainerID) {
|
||||
log.Debugf("Stopping container %s", metabaseContainerID)
|
||||
if err := metabase.StopContainer(metabaseContainerID); err != nil {
|
||||
log.Warningf("unable to stop container '%s': %s", metabaseContainerID, err)
|
||||
if metabase.IsContainerExist(metabaseContainerID) {
|
||||
log.Debugf("Stopping container %s", metabaseContainerID)
|
||||
if err := metabase.StopContainer(metabaseContainerID); err != nil {
|
||||
log.Warningf("unable to stop container '%s': %s", metabaseContainerID, err)
|
||||
}
|
||||
dockerGroup, err := user.LookupGroup(crowdsecGroup)
|
||||
if err == nil { // if group exist, remove it
|
||||
groupDelCmd, err := exec.LookPath("groupdel")
|
||||
if err != nil {
|
||||
return fmt.Errorf("unable to find 'groupdel' command, can't continue")
|
||||
}
|
||||
dockerGroup, err := user.LookupGroup(crowdsecGroup)
|
||||
if err == nil { // if group exist, remove it
|
||||
groupDelCmd, err := exec.LookPath("groupdel")
|
||||
if err != nil {
|
||||
log.Fatalf("unable to find 'groupdel' command, can't continue")
|
||||
}
|
||||
|
||||
groupDel := &exec.Cmd{Path: groupDelCmd, Args: []string{groupDelCmd, crowdsecGroup}}
|
||||
if err := groupDel.Run(); err != nil {
|
||||
log.Errorf("unable to delete group '%s': %s", dockerGroup, err)
|
||||
}
|
||||
groupDel := &exec.Cmd{Path: groupDelCmd, Args: []string{groupDelCmd, crowdsecGroup}}
|
||||
if err := groupDel.Run(); err != nil {
|
||||
log.Warnf("unable to delete group '%s': %s", dockerGroup, err)
|
||||
}
|
||||
log.Debugf("Removing container %s", metabaseContainerID)
|
||||
if err := metabase.RemoveContainer(metabaseContainerID); err != nil {
|
||||
log.Warningf("unable to remove container '%s': %s", metabaseContainerID, err)
|
||||
}
|
||||
log.Infof("container %s stopped & removed", metabaseContainerID)
|
||||
}
|
||||
log.Debugf("Removing metabase db %s", csConfig.ConfigPaths.DataDir)
|
||||
if err := metabase.RemoveDatabase(csConfig.ConfigPaths.DataDir); err != nil {
|
||||
log.Warningf("failed to remove metabase internal db : %s", err)
|
||||
log.Debugf("Removing container %s", metabaseContainerID)
|
||||
if err := metabase.RemoveContainer(metabaseContainerID); err != nil {
|
||||
log.Warnf("unable to remove container '%s': %s", metabaseContainerID, err)
|
||||
}
|
||||
if force {
|
||||
if err := metabase.RemoveImageContainer(); err != nil {
|
||||
if !strings.Contains(err.Error(), "No such image") {
|
||||
log.Fatalf("removing docker image: %s", err)
|
||||
}
|
||||
log.Infof("container %s stopped & removed", metabaseContainerID)
|
||||
}
|
||||
log.Debugf("Removing metabase db %s", cli.cfg().ConfigPaths.DataDir)
|
||||
if err := metabase.RemoveDatabase(cli.cfg().ConfigPaths.DataDir); err != nil {
|
||||
log.Warnf("failed to remove metabase internal db : %s", err)
|
||||
}
|
||||
if force {
|
||||
m := metabase.Metabase{}
|
||||
if err := m.LoadConfig(metabaseConfigPath); err != nil {
|
||||
return err
|
||||
}
|
||||
if err := metabase.RemoveImageContainer(m.Config.Image); err != nil {
|
||||
if !strings.Contains(err.Error(), "No such image") {
|
||||
return fmt.Errorf("removing docker image: %s", err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
},
|
||||
}
|
||||
cmdDashRemove.Flags().BoolVarP(&force, "force", "f", false, "Remove also the metabase image")
|
||||
cmdDashRemove.Flags().BoolVarP(&forceYes, "yes", "y", false, "force yes")
|
||||
cmdDashboard.AddCommand(cmdDashRemove)
|
||||
|
||||
return cmdDashboard
|
||||
flags := cmd.Flags()
|
||||
flags.BoolVarP(&force, "force", "f", false, "Remove also the metabase image")
|
||||
flags.BoolVarP(&forceYes, "yes", "y", false, "force yes")
|
||||
|
||||
return cmd
|
||||
}
|
||||
|
||||
func passwordIsValid(password string) bool {
|
||||
hasDigit := false
|
||||
|
||||
for _, j := range password {
|
||||
if unicode.IsDigit(j) {
|
||||
hasDigit = true
|
||||
|
||||
break
|
||||
}
|
||||
}
|
||||
|
@ -301,6 +342,134 @@ func passwordIsValid(password string) bool {
|
|||
if !hasDigit || len(password) < 6 {
|
||||
return false
|
||||
}
|
||||
return true
|
||||
|
||||
return true
|
||||
}
|
||||
|
||||
func checkSystemMemory(forceYes *bool) error {
|
||||
totMem := memory.TotalMemory()
|
||||
if totMem >= uint64(math.Pow(2, 30)) {
|
||||
return nil
|
||||
}
|
||||
|
||||
if !*forceYes {
|
||||
var answer bool
|
||||
|
||||
prompt := &survey.Confirm{
|
||||
Message: "Metabase requires 1-2GB of RAM, your system is below this requirement continue ?",
|
||||
Default: true,
|
||||
}
|
||||
if err := survey.AskOne(prompt, &answer); err != nil {
|
||||
return fmt.Errorf("unable to ask about RAM check: %s", err)
|
||||
}
|
||||
|
||||
if !answer {
|
||||
return fmt.Errorf("user stated no to continue")
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
log.Warn("Metabase requires 1-2GB of RAM, your system is below this requirement")
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func warnIfNotLoopback(addr string) {
|
||||
if addr == "127.0.0.1" || addr == "::1" {
|
||||
return
|
||||
}
|
||||
|
||||
log.Warnf("You are potentially exposing your metabase port to the internet (addr: %s), please consider using a reverse proxy", addr)
|
||||
}
|
||||
|
||||
func disclaimer(forceYes *bool) error {
|
||||
if !*forceYes {
|
||||
var answer bool
|
||||
|
||||
prompt := &survey.Confirm{
|
||||
Message: "CrowdSec takes no responsibility for the security of your metabase instance. Do you accept these responsibilities ?",
|
||||
Default: true,
|
||||
}
|
||||
|
||||
if err := survey.AskOne(prompt, &answer); err != nil {
|
||||
return fmt.Errorf("unable to ask to question: %s", err)
|
||||
}
|
||||
|
||||
if !answer {
|
||||
return fmt.Errorf("user stated no to responsibilities")
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
log.Warn("CrowdSec takes no responsibility for the security of your metabase instance. You used force yes, so you accept this disclaimer")
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func checkGroups(forceYes *bool) (*user.Group, error) {
|
||||
dockerGroup, err := user.LookupGroup(crowdsecGroup)
|
||||
if err == nil {
|
||||
return dockerGroup, nil
|
||||
}
|
||||
|
||||
if !*forceYes {
|
||||
var answer bool
|
||||
|
||||
prompt := &survey.Confirm{
|
||||
Message: fmt.Sprintf("For metabase docker to be able to access SQLite file we need to add a new group called '%s' to the system, is it ok for you ?", crowdsecGroup),
|
||||
Default: true,
|
||||
}
|
||||
|
||||
if err := survey.AskOne(prompt, &answer); err != nil {
|
||||
return dockerGroup, fmt.Errorf("unable to ask to question: %s", err)
|
||||
}
|
||||
|
||||
if !answer {
|
||||
return dockerGroup, fmt.Errorf("unable to continue without creating '%s' group", crowdsecGroup)
|
||||
}
|
||||
}
|
||||
|
||||
groupAddCmd, err := exec.LookPath("groupadd")
|
||||
if err != nil {
|
||||
return dockerGroup, fmt.Errorf("unable to find 'groupadd' command, can't continue")
|
||||
}
|
||||
|
||||
groupAdd := &exec.Cmd{Path: groupAddCmd, Args: []string{groupAddCmd, crowdsecGroup}}
|
||||
if err := groupAdd.Run(); err != nil {
|
||||
return dockerGroup, fmt.Errorf("unable to add group '%s': %s", dockerGroup, err)
|
||||
}
|
||||
|
||||
return user.LookupGroup(crowdsecGroup)
|
||||
}
|
||||
|
||||
func (cli *cliDashboard) chownDatabase(gid string) error {
|
||||
cfg := cli.cfg()
|
||||
intID, err := strconv.Atoi(gid)
|
||||
|
||||
if err != nil {
|
||||
return fmt.Errorf("unable to convert group ID to int: %s", err)
|
||||
}
|
||||
|
||||
if stat, err := os.Stat(cfg.DbConfig.DbPath); !os.IsNotExist(err) {
|
||||
info := stat.Sys()
|
||||
if err := os.Chown(cfg.DbConfig.DbPath, int(info.(*syscall.Stat_t).Uid), intID); err != nil {
|
||||
return fmt.Errorf("unable to chown sqlite db file '%s': %s", cfg.DbConfig.DbPath, err)
|
||||
}
|
||||
}
|
||||
|
||||
if cfg.DbConfig.Type == "sqlite" && cfg.DbConfig.UseWal != nil && *cfg.DbConfig.UseWal {
|
||||
for _, ext := range []string{"-wal", "-shm"} {
|
||||
file := cfg.DbConfig.DbPath + ext
|
||||
if stat, err := os.Stat(file); !os.IsNotExist(err) {
|
||||
info := stat.Sys()
|
||||
if err := os.Chown(file, int(info.(*syscall.Stat_t).Uid), intID); err != nil {
|
||||
return fmt.Errorf("unable to chown sqlite db file '%s': %s", file, err)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
|
32
cmd/crowdsec-cli/dashboard_unsupported.go
Normal file
32
cmd/crowdsec-cli/dashboard_unsupported.go
Normal file
|
@ -0,0 +1,32 @@
|
|||
//go:build !linux
|
||||
|
||||
package main
|
||||
|
||||
import (
|
||||
"runtime"
|
||||
|
||||
log "github.com/sirupsen/logrus"
|
||||
"github.com/spf13/cobra"
|
||||
)
|
||||
|
||||
type cliDashboard struct{
|
||||
cfg configGetter
|
||||
}
|
||||
|
||||
func NewCLIDashboard(cfg configGetter) *cliDashboard {
|
||||
return &cliDashboard{
|
||||
cfg: cfg,
|
||||
}
|
||||
}
|
||||
|
||||
func (cli cliDashboard) NewCommand() *cobra.Command {
|
||||
cmd := &cobra.Command{
|
||||
Use: "dashboard",
|
||||
DisableAutoGenTag: true,
|
||||
Run: func(_ *cobra.Command, _ []string) {
|
||||
log.Infof("Dashboard command is disabled on %s", runtime.GOOS)
|
||||
},
|
||||
}
|
||||
|
||||
return cmd
|
||||
}
|
|
@ -4,63 +4,66 @@ import (
|
|||
"context"
|
||||
"encoding/csv"
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"fmt"
|
||||
"net/url"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/crowdsecurity/crowdsec/pkg/apiclient"
|
||||
"github.com/crowdsecurity/crowdsec/pkg/cwversion"
|
||||
"github.com/crowdsecurity/crowdsec/pkg/models"
|
||||
"github.com/crowdsecurity/crowdsec/pkg/types"
|
||||
"github.com/fatih/color"
|
||||
"github.com/go-openapi/strfmt"
|
||||
"github.com/jszwec/csvutil"
|
||||
"github.com/olekukonko/tablewriter"
|
||||
log "github.com/sirupsen/logrus"
|
||||
"github.com/spf13/cobra"
|
||||
|
||||
"github.com/crowdsecurity/go-cs-lib/version"
|
||||
|
||||
"github.com/crowdsecurity/crowdsec/pkg/apiclient"
|
||||
"github.com/crowdsecurity/crowdsec/pkg/models"
|
||||
"github.com/crowdsecurity/crowdsec/pkg/types"
|
||||
)
|
||||
|
||||
var Client *apiclient.ApiClient
|
||||
|
||||
var (
|
||||
defaultDuration = "4h"
|
||||
defaultScope = "ip"
|
||||
defaultType = "ban"
|
||||
defaultReason = "manual"
|
||||
)
|
||||
|
||||
func DecisionsToTable(alerts *models.GetAlertsResponse, printMachine bool) error {
|
||||
func (cli *cliDecisions) decisionsToTable(alerts *models.GetAlertsResponse, printMachine bool) error {
|
||||
/*here we cheat a bit : to make it more readable for the user, we dedup some entries*/
|
||||
var spamLimit map[string]bool = make(map[string]bool)
|
||||
var skipped = 0
|
||||
spamLimit := make(map[string]bool)
|
||||
skipped := 0
|
||||
|
||||
for aIdx := 0; aIdx < len(*alerts); aIdx++ {
|
||||
alertItem := (*alerts)[aIdx]
|
||||
newDecisions := make([]*models.Decision, 0)
|
||||
|
||||
for _, decisionItem := range alertItem.Decisions {
|
||||
spamKey := fmt.Sprintf("%t:%s:%s:%s", *decisionItem.Simulated, *decisionItem.Type, *decisionItem.Scope, *decisionItem.Value)
|
||||
if _, ok := spamLimit[spamKey]; ok {
|
||||
skipped++
|
||||
continue
|
||||
}
|
||||
|
||||
spamLimit[spamKey] = true
|
||||
|
||||
newDecisions = append(newDecisions, decisionItem)
|
||||
}
|
||||
|
||||
alertItem.Decisions = newDecisions
|
||||
}
|
||||
if csConfig.Cscli.Output == "raw" {
|
||||
|
||||
switch cli.cfg().Cscli.Output {
|
||||
case "raw":
|
||||
csvwriter := csv.NewWriter(os.Stdout)
|
||||
header := []string{"id", "source", "ip", "reason", "action", "country", "as", "events_count", "expiration", "simulated", "alert_id"}
|
||||
|
||||
if printMachine {
|
||||
header = append(header, "machine")
|
||||
}
|
||||
|
||||
err := csvwriter.Write(header)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
for _, alertItem := range *alerts {
|
||||
for _, decisionItem := range alertItem.Decisions {
|
||||
raw := []string{
|
||||
|
@ -70,7 +73,7 @@ func DecisionsToTable(alerts *models.GetAlertsResponse, printMachine bool) error
|
|||
*decisionItem.Scenario,
|
||||
*decisionItem.Type,
|
||||
alertItem.Source.Cn,
|
||||
alertItem.Source.AsNumber + " " + alertItem.Source.AsName,
|
||||
alertItem.Source.GetAsNumberName(),
|
||||
fmt.Sprintf("%d", *alertItem.EventsCount),
|
||||
*decisionItem.Duration,
|
||||
fmt.Sprintf("%t", *decisionItem.Simulated),
|
||||
|
@ -86,59 +89,46 @@ func DecisionsToTable(alerts *models.GetAlertsResponse, printMachine bool) error
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
csvwriter.Flush()
|
||||
} else if csConfig.Cscli.Output == "json" {
|
||||
case "json":
|
||||
if *alerts == nil {
|
||||
// avoid returning "null" in `json"
|
||||
// could be cleaner if we used slice of alerts directly
|
||||
fmt.Println("[]")
|
||||
return nil
|
||||
}
|
||||
|
||||
x, _ := json.MarshalIndent(alerts, "", " ")
|
||||
fmt.Printf("%s", string(x))
|
||||
} else if csConfig.Cscli.Output == "human" {
|
||||
table := tablewriter.NewWriter(os.Stdout)
|
||||
header := []string{"ID", "Source", "Scope:Value", "Reason", "Action", "Country", "AS", "Events", "expiration", "Alert ID"}
|
||||
if printMachine {
|
||||
header = append(header, "Machine")
|
||||
}
|
||||
table.SetHeader(header)
|
||||
|
||||
case "human":
|
||||
if len(*alerts) == 0 {
|
||||
fmt.Println("No active decisions")
|
||||
return nil
|
||||
}
|
||||
|
||||
for _, alertItem := range *alerts {
|
||||
for _, decisionItem := range alertItem.Decisions {
|
||||
if *alertItem.Simulated {
|
||||
*decisionItem.Type = fmt.Sprintf("(simul)%s", *decisionItem.Type)
|
||||
}
|
||||
raw := []string{
|
||||
strconv.Itoa(int(decisionItem.ID)),
|
||||
*decisionItem.Origin,
|
||||
*decisionItem.Scope + ":" + *decisionItem.Value,
|
||||
*decisionItem.Scenario,
|
||||
*decisionItem.Type,
|
||||
alertItem.Source.Cn,
|
||||
alertItem.Source.AsNumber + " " + alertItem.Source.AsName,
|
||||
strconv.Itoa(int(*alertItem.EventsCount)),
|
||||
*decisionItem.Duration,
|
||||
strconv.Itoa(int(alertItem.ID)),
|
||||
}
|
||||
cli.decisionsTable(color.Output, alerts, printMachine)
|
||||
|
||||
if printMachine {
|
||||
raw = append(raw, alertItem.MachineID)
|
||||
}
|
||||
|
||||
table.Append(raw)
|
||||
}
|
||||
}
|
||||
table.Render() // Send output
|
||||
if skipped > 0 {
|
||||
fmt.Printf("%d duplicated entries skipped\n", skipped)
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func NewDecisionsCmd() *cobra.Command {
|
||||
/* ---- DECISIONS COMMAND */
|
||||
var cmdDecisions = &cobra.Command{
|
||||
type cliDecisions struct {
|
||||
cfg configGetter
|
||||
}
|
||||
|
||||
func NewCLIDecisions(cfg configGetter) *cliDecisions {
|
||||
return &cliDecisions{
|
||||
cfg: cfg,
|
||||
}
|
||||
}
|
||||
|
||||
func (cli *cliDecisions) NewCommand() *cobra.Command {
|
||||
cmd := &cobra.Command{
|
||||
Use: "decisions [action]",
|
||||
Short: "Manage decisions",
|
||||
Long: `Add/List/Delete/Import decisions from LAPI`,
|
||||
|
@ -147,34 +137,40 @@ func NewDecisionsCmd() *cobra.Command {
|
|||
/*TBD example*/
|
||||
Args: cobra.MinimumNArgs(1),
|
||||
DisableAutoGenTag: true,
|
||||
PersistentPreRun: func(cmd *cobra.Command, args []string) {
|
||||
if err := csConfig.LoadAPIClient(); err != nil {
|
||||
log.Fatalf(err.Error())
|
||||
PersistentPreRunE: func(_ *cobra.Command, _ []string) error {
|
||||
cfg := cli.cfg()
|
||||
if err := cfg.LoadAPIClient(); err != nil {
|
||||
return fmt.Errorf("loading api client: %w", err)
|
||||
}
|
||||
if csConfig.API.Client == nil {
|
||||
log.Fatalln("There is no configuration on 'api_client:'")
|
||||
}
|
||||
if csConfig.API.Client.Credentials == nil {
|
||||
log.Fatalf("Please provide credentials for the API in '%s'", csConfig.API.Client.CredentialsFilePath)
|
||||
}
|
||||
password := strfmt.Password(csConfig.API.Client.Credentials.Password)
|
||||
apiurl, err := url.Parse(csConfig.API.Client.Credentials.URL)
|
||||
password := strfmt.Password(cfg.API.Client.Credentials.Password)
|
||||
apiurl, err := url.Parse(cfg.API.Client.Credentials.URL)
|
||||
if err != nil {
|
||||
log.Fatalf("parsing api url ('%s'): %s", csConfig.API.Client.Credentials.URL, err)
|
||||
return fmt.Errorf("parsing api url %s: %w", cfg.API.Client.Credentials.URL, err)
|
||||
}
|
||||
Client, err = apiclient.NewClient(&apiclient.Config{
|
||||
MachineID: csConfig.API.Client.Credentials.Login,
|
||||
MachineID: cfg.API.Client.Credentials.Login,
|
||||
Password: password,
|
||||
UserAgent: fmt.Sprintf("crowdsec/%s", cwversion.VersionStr()),
|
||||
UserAgent: fmt.Sprintf("crowdsec/%s", version.String()),
|
||||
URL: apiurl,
|
||||
VersionPrefix: "v1",
|
||||
})
|
||||
if err != nil {
|
||||
log.Fatalf("creating api client : %s", err)
|
||||
return fmt.Errorf("creating api client: %w", err)
|
||||
}
|
||||
|
||||
return nil
|
||||
},
|
||||
}
|
||||
|
||||
cmd.AddCommand(cli.newListCmd())
|
||||
cmd.AddCommand(cli.newAddCmd())
|
||||
cmd.AddCommand(cli.newDeleteCmd())
|
||||
cmd.AddCommand(cli.newImportCmd())
|
||||
|
||||
return cmd
|
||||
}
|
||||
|
||||
func (cli *cliDecisions) newListCmd() *cobra.Command {
|
||||
var filter = apiclient.AlertsListOpts{
|
||||
ValueEquals: new(string),
|
||||
ScopeEquals: new(string),
|
||||
|
@ -188,24 +184,27 @@ func NewDecisionsCmd() *cobra.Command {
|
|||
IncludeCAPI: new(bool),
|
||||
Limit: new(int),
|
||||
}
|
||||
|
||||
NoSimu := new(bool)
|
||||
contained := new(bool)
|
||||
|
||||
var printMachine bool
|
||||
var cmdDecisionsList = &cobra.Command{
|
||||
|
||||
cmd := &cobra.Command{
|
||||
Use: "list [options]",
|
||||
Short: "List decisions from LAPI",
|
||||
Example: `cscli decisions list -i 1.2.3.4
|
||||
cscli decisions list -r 1.2.3.0/24
|
||||
cscli decisions list -s crowdsecurity/ssh-bf
|
||||
cscli decisions list -t ban
|
||||
cscli decisions list --origin lists --scenario list_name
|
||||
`,
|
||||
Args: cobra.ExactArgs(0),
|
||||
DisableAutoGenTag: true,
|
||||
Run: func(cmd *cobra.Command, args []string) {
|
||||
RunE: func(cmd *cobra.Command, _ []string) error {
|
||||
var err error
|
||||
/*take care of shorthand options*/
|
||||
if err := manageCliDecisionAlerts(filter.IPEquals, filter.RangeEquals, filter.ScopeEquals, filter.ValueEquals); err != nil {
|
||||
log.Fatalf("%s", err)
|
||||
if err = manageCliDecisionAlerts(filter.IPEquals, filter.RangeEquals, filter.ScopeEquals, filter.ValueEquals); err != nil {
|
||||
return err
|
||||
}
|
||||
filter.ActiveDecisionEquals = new(bool)
|
||||
*filter.ActiveDecisionEquals = true
|
||||
|
@ -215,31 +214,28 @@ cscli decisions list -t ban
|
|||
/* nullify the empty entries to avoid bad filter */
|
||||
if *filter.Until == "" {
|
||||
filter.Until = nil
|
||||
} else {
|
||||
} else if strings.HasSuffix(*filter.Until, "d") {
|
||||
/*time.ParseDuration support hours 'h' as bigger unit, let's make the user's life easier*/
|
||||
if strings.HasSuffix(*filter.Until, "d") {
|
||||
realDuration := strings.TrimSuffix(*filter.Until, "d")
|
||||
days, err := strconv.Atoi(realDuration)
|
||||
if err != nil {
|
||||
printHelp(cmd)
|
||||
log.Fatalf("Can't parse duration %s, valid durations format: 1d, 4h, 4h15m", *filter.Until)
|
||||
}
|
||||
*filter.Until = fmt.Sprintf("%d%s", days*24, "h")
|
||||
realDuration := strings.TrimSuffix(*filter.Until, "d")
|
||||
days, err := strconv.Atoi(realDuration)
|
||||
if err != nil {
|
||||
printHelp(cmd)
|
||||
return fmt.Errorf("can't parse duration %s, valid durations format: 1d, 4h, 4h15m", *filter.Until)
|
||||
}
|
||||
*filter.Until = fmt.Sprintf("%d%s", days*24, "h")
|
||||
}
|
||||
|
||||
if *filter.Since == "" {
|
||||
filter.Since = nil
|
||||
} else {
|
||||
} else if strings.HasSuffix(*filter.Since, "d") {
|
||||
/*time.ParseDuration support hours 'h' as bigger unit, let's make the user's life easier*/
|
||||
if strings.HasSuffix(*filter.Since, "d") {
|
||||
realDuration := strings.TrimSuffix(*filter.Since, "d")
|
||||
days, err := strconv.Atoi(realDuration)
|
||||
if err != nil {
|
||||
printHelp(cmd)
|
||||
log.Fatalf("Can't parse duration %s, valid durations format: 1d, 4h, 4h15m", *filter.Until)
|
||||
}
|
||||
*filter.Since = fmt.Sprintf("%d%s", days*24, "h")
|
||||
realDuration := strings.TrimSuffix(*filter.Since, "d")
|
||||
days, err := strconv.Atoi(realDuration)
|
||||
if err != nil {
|
||||
printHelp(cmd)
|
||||
return fmt.Errorf("can't parse duration %s, valid durations format: 1d, 4h, 4h15m", *filter.Since)
|
||||
}
|
||||
*filter.Since = fmt.Sprintf("%d%s", days*24, "h")
|
||||
}
|
||||
if *filter.IncludeCAPI {
|
||||
*filter.Limit = 0
|
||||
|
@ -273,33 +269,37 @@ cscli decisions list -t ban
|
|||
|
||||
alerts, _, err := Client.Alerts.List(context.Background(), filter)
|
||||
if err != nil {
|
||||
log.Fatalf("Unable to list decisions : %v", err.Error())
|
||||
return fmt.Errorf("unable to retrieve decisions: %w", err)
|
||||
}
|
||||
|
||||
err = DecisionsToTable(alerts, printMachine)
|
||||
err = cli.decisionsToTable(alerts, printMachine)
|
||||
if err != nil {
|
||||
log.Fatalf("unable to list decisions : %v", err.Error())
|
||||
return fmt.Errorf("unable to print decisions: %w", err)
|
||||
}
|
||||
|
||||
return nil
|
||||
},
|
||||
}
|
||||
cmdDecisionsList.Flags().SortFlags = false
|
||||
cmdDecisionsList.Flags().BoolVarP(filter.IncludeCAPI, "all", "a", false, "Include decisions from Central API")
|
||||
cmdDecisionsList.Flags().StringVar(filter.Since, "since", "", "restrict to alerts newer than since (ie. 4h, 30d)")
|
||||
cmdDecisionsList.Flags().StringVar(filter.Until, "until", "", "restrict to alerts older than until (ie. 4h, 30d)")
|
||||
cmdDecisionsList.Flags().StringVarP(filter.TypeEquals, "type", "t", "", "restrict to this decision type (ie. ban,captcha)")
|
||||
cmdDecisionsList.Flags().StringVar(filter.ScopeEquals, "scope", "", "restrict to this scope (ie. ip,range,session)")
|
||||
cmdDecisionsList.Flags().StringVar(filter.OriginEquals, "origin", "", "restrict to this origin (ie. lists,CAPI,cscli)")
|
||||
cmdDecisionsList.Flags().StringVarP(filter.ValueEquals, "value", "v", "", "restrict to this value (ie. 1.2.3.4,userName)")
|
||||
cmdDecisionsList.Flags().StringVarP(filter.ScenarioEquals, "scenario", "s", "", "restrict to this scenario (ie. crowdsecurity/ssh-bf)")
|
||||
cmdDecisionsList.Flags().StringVarP(filter.IPEquals, "ip", "i", "", "restrict to alerts from this source ip (shorthand for --scope ip --value <IP>)")
|
||||
cmdDecisionsList.Flags().StringVarP(filter.RangeEquals, "range", "r", "", "restrict to alerts from this source range (shorthand for --scope range --value <RANGE>)")
|
||||
cmdDecisionsList.Flags().IntVarP(filter.Limit, "limit", "l", 100, "number of alerts to get (use 0 to remove the limit)")
|
||||
cmdDecisionsList.Flags().BoolVar(NoSimu, "no-simu", false, "exclude decisions in simulation mode")
|
||||
cmdDecisionsList.Flags().BoolVarP(&printMachine, "machine", "m", false, "print machines that triggered decisions")
|
||||
cmdDecisionsList.Flags().BoolVar(contained, "contained", false, "query decisions contained by range")
|
||||
cmd.Flags().SortFlags = false
|
||||
cmd.Flags().BoolVarP(filter.IncludeCAPI, "all", "a", false, "Include decisions from Central API")
|
||||
cmd.Flags().StringVar(filter.Since, "since", "", "restrict to alerts newer than since (ie. 4h, 30d)")
|
||||
cmd.Flags().StringVar(filter.Until, "until", "", "restrict to alerts older than until (ie. 4h, 30d)")
|
||||
cmd.Flags().StringVarP(filter.TypeEquals, "type", "t", "", "restrict to this decision type (ie. ban,captcha)")
|
||||
cmd.Flags().StringVar(filter.ScopeEquals, "scope", "", "restrict to this scope (ie. ip,range,session)")
|
||||
cmd.Flags().StringVar(filter.OriginEquals, "origin", "", fmt.Sprintf("the value to match for the specified origin (%s ...)", strings.Join(types.GetOrigins(), ",")))
|
||||
cmd.Flags().StringVarP(filter.ValueEquals, "value", "v", "", "restrict to this value (ie. 1.2.3.4,userName)")
|
||||
cmd.Flags().StringVarP(filter.ScenarioEquals, "scenario", "s", "", "restrict to this scenario (ie. crowdsecurity/ssh-bf)")
|
||||
cmd.Flags().StringVarP(filter.IPEquals, "ip", "i", "", "restrict to alerts from this source ip (shorthand for --scope ip --value <IP>)")
|
||||
cmd.Flags().StringVarP(filter.RangeEquals, "range", "r", "", "restrict to alerts from this source range (shorthand for --scope range --value <RANGE>)")
|
||||
cmd.Flags().IntVarP(filter.Limit, "limit", "l", 100, "number of alerts to get (use 0 to remove the limit)")
|
||||
cmd.Flags().BoolVar(NoSimu, "no-simu", false, "exclude decisions in simulation mode")
|
||||
cmd.Flags().BoolVarP(&printMachine, "machine", "m", false, "print machines that triggered decisions")
|
||||
cmd.Flags().BoolVar(contained, "contained", false, "query decisions contained by range")
|
||||
|
||||
cmdDecisions.AddCommand(cmdDecisionsList)
|
||||
return cmd
|
||||
}
|
||||
|
||||
func (cli *cliDecisions) newAddCmd() *cobra.Command {
|
||||
var (
|
||||
addIP string
|
||||
addRange string
|
||||
|
@ -310,7 +310,7 @@ cscli decisions list -t ban
|
|||
addType string
|
||||
)
|
||||
|
||||
var cmdDecisionsAdd = &cobra.Command{
|
||||
cmd := &cobra.Command{
|
||||
Use: "add [options]",
|
||||
Short: "Add decision to LAPI",
|
||||
Example: `cscli decisions add --ip 1.2.3.4
|
||||
|
@ -321,11 +321,10 @@ cscli decisions add --scope username --value foobar
|
|||
/*TBD : fix long and example*/
|
||||
Args: cobra.ExactArgs(0),
|
||||
DisableAutoGenTag: true,
|
||||
Run: func(cmd *cobra.Command, args []string) {
|
||||
RunE: func(cmd *cobra.Command, _ []string) error {
|
||||
var err error
|
||||
var ipRange string
|
||||
alerts := models.AddAlertsRequest{}
|
||||
origin := "cscli"
|
||||
origin := types.CscliOrigin
|
||||
capacity := int32(0)
|
||||
leakSpeed := "0"
|
||||
eventsCount := int32(1)
|
||||
|
@ -336,8 +335,8 @@ cscli decisions add --scope username --value foobar
|
|||
createdAt := time.Now().UTC().Format(time.RFC3339)
|
||||
|
||||
/*take care of shorthand options*/
|
||||
if err := manageCliDecisionAlerts(&addIP, &addRange, &addScope, &addValue); err != nil {
|
||||
log.Fatalf("%s", err)
|
||||
if err = manageCliDecisionAlerts(&addIP, &addRange, &addScope, &addValue); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if addIP != "" {
|
||||
|
@ -348,11 +347,11 @@ cscli decisions add --scope username --value foobar
|
|||
addScope = types.Range
|
||||
} else if addValue == "" {
|
||||
printHelp(cmd)
|
||||
log.Fatalf("Missing arguments, a value is required (--ip, --range or --scope and --value)")
|
||||
return errors.New("missing arguments, a value is required (--ip, --range or --scope and --value)")
|
||||
}
|
||||
|
||||
if addReason == "" {
|
||||
addReason = fmt.Sprintf("manual '%s' from '%s'", addType, csConfig.API.Client.Credentials.Login)
|
||||
addReason = fmt.Sprintf("manual '%s' from '%s'", addType, cli.cfg().API.Client.Credentials.Login)
|
||||
}
|
||||
decision := models.Decision{
|
||||
Duration: &addDuration,
|
||||
|
@ -373,12 +372,13 @@ cscli decisions add --scope username --value foobar
|
|||
Scenario: &addReason,
|
||||
ScenarioVersion: &empty,
|
||||
Simulated: &simulated,
|
||||
// setting empty scope/value broke plugins, and it didn't seem to be needed anymore w/ latest papi changes
|
||||
Source: &models.Source{
|
||||
AsName: empty,
|
||||
AsNumber: empty,
|
||||
Cn: empty,
|
||||
IP: addValue,
|
||||
Range: ipRange,
|
||||
Range: "",
|
||||
Scope: &addScope,
|
||||
Value: &addValue,
|
||||
},
|
||||
|
@ -390,78 +390,96 @@ cscli decisions add --scope username --value foobar
|
|||
|
||||
_, _, err = Client.Alerts.Add(context.Background(), alerts)
|
||||
if err != nil {
|
||||
log.Fatalf(err.Error())
|
||||
return err
|
||||
}
|
||||
|
||||
log.Info("Decision successfully added")
|
||||
|
||||
return nil
|
||||
},
|
||||
}
|
||||
|
||||
cmdDecisionsAdd.Flags().SortFlags = false
|
||||
cmdDecisionsAdd.Flags().StringVarP(&addIP, "ip", "i", "", "Source ip (shorthand for --scope ip --value <IP>)")
|
||||
cmdDecisionsAdd.Flags().StringVarP(&addRange, "range", "r", "", "Range source ip (shorthand for --scope range --value <RANGE>)")
|
||||
cmdDecisionsAdd.Flags().StringVarP(&addDuration, "duration", "d", "4h", "Decision duration (ie. 1h,4h,30m)")
|
||||
cmdDecisionsAdd.Flags().StringVarP(&addValue, "value", "v", "", "The value (ie. --scope username --value foobar)")
|
||||
cmdDecisionsAdd.Flags().StringVar(&addScope, "scope", types.Ip, "Decision scope (ie. ip,range,username)")
|
||||
cmdDecisionsAdd.Flags().StringVarP(&addReason, "reason", "R", "", "Decision reason (ie. scenario-name)")
|
||||
cmdDecisionsAdd.Flags().StringVarP(&addType, "type", "t", "ban", "Decision type (ie. ban,captcha,throttle)")
|
||||
cmdDecisions.AddCommand(cmdDecisionsAdd)
|
||||
cmd.Flags().SortFlags = false
|
||||
cmd.Flags().StringVarP(&addIP, "ip", "i", "", "Source ip (shorthand for --scope ip --value <IP>)")
|
||||
cmd.Flags().StringVarP(&addRange, "range", "r", "", "Range source ip (shorthand for --scope range --value <RANGE>)")
|
||||
cmd.Flags().StringVarP(&addDuration, "duration", "d", "4h", "Decision duration (ie. 1h,4h,30m)")
|
||||
cmd.Flags().StringVarP(&addValue, "value", "v", "", "The value (ie. --scope username --value foobar)")
|
||||
cmd.Flags().StringVar(&addScope, "scope", types.Ip, "Decision scope (ie. ip,range,username)")
|
||||
cmd.Flags().StringVarP(&addReason, "reason", "R", "", "Decision reason (ie. scenario-name)")
|
||||
cmd.Flags().StringVarP(&addType, "type", "t", "ban", "Decision type (ie. ban,captcha,throttle)")
|
||||
|
||||
var delFilter = apiclient.DecisionsDeleteOpts{
|
||||
ScopeEquals: new(string),
|
||||
ValueEquals: new(string),
|
||||
TypeEquals: new(string),
|
||||
IPEquals: new(string),
|
||||
RangeEquals: new(string),
|
||||
return cmd
|
||||
}
|
||||
|
||||
func (cli *cliDecisions) newDeleteCmd() *cobra.Command {
|
||||
delFilter := apiclient.DecisionsDeleteOpts{
|
||||
ScopeEquals: new(string),
|
||||
ValueEquals: new(string),
|
||||
TypeEquals: new(string),
|
||||
IPEquals: new(string),
|
||||
RangeEquals: new(string),
|
||||
ScenarioEquals: new(string),
|
||||
OriginEquals: new(string),
|
||||
}
|
||||
var delDecisionId string
|
||||
|
||||
var delDecisionID string
|
||||
|
||||
var delDecisionAll bool
|
||||
var cmdDecisionsDelete = &cobra.Command{
|
||||
|
||||
contained := new(bool)
|
||||
|
||||
cmd := &cobra.Command{
|
||||
Use: "delete [options]",
|
||||
Short: "Delete decisions",
|
||||
DisableAutoGenTag: true,
|
||||
Aliases: []string{"remove"},
|
||||
Example: `cscli decisions delete -r 1.2.3.0/24
|
||||
cscli decisions delete -i 1.2.3.4
|
||||
cscli decisions delete -s crowdsecurity/ssh-bf
|
||||
cscli decisions delete --id 42
|
||||
cscli decisions delete --type captcha
|
||||
cscli decisions delete --origin lists --scenario list_name
|
||||
`,
|
||||
/*TBD : refaire le Long/Example*/
|
||||
PreRun: func(cmd *cobra.Command, args []string) {
|
||||
PreRunE: func(cmd *cobra.Command, _ []string) error {
|
||||
if delDecisionAll {
|
||||
return
|
||||
return nil
|
||||
}
|
||||
if *delFilter.ScopeEquals == "" && *delFilter.ValueEquals == "" &&
|
||||
*delFilter.TypeEquals == "" && *delFilter.IPEquals == "" &&
|
||||
*delFilter.RangeEquals == "" && delDecisionId == "" {
|
||||
*delFilter.RangeEquals == "" && *delFilter.ScenarioEquals == "" &&
|
||||
*delFilter.OriginEquals == "" && delDecisionID == "" {
|
||||
cmd.Usage()
|
||||
log.Fatalln("At least one filter or --all must be specified")
|
||||
return errors.New("at least one filter or --all must be specified")
|
||||
}
|
||||
|
||||
return nil
|
||||
},
|
||||
Run: func(cmd *cobra.Command, args []string) {
|
||||
RunE: func(_ *cobra.Command, _ []string) error {
|
||||
var err error
|
||||
var decisions *models.DeleteDecisionResponse
|
||||
|
||||
/*take care of shorthand options*/
|
||||
if err := manageCliDecisionAlerts(delFilter.IPEquals, delFilter.RangeEquals, delFilter.ScopeEquals, delFilter.ValueEquals); err != nil {
|
||||
log.Fatalf("%s", err)
|
||||
if err = manageCliDecisionAlerts(delFilter.IPEquals, delFilter.RangeEquals, delFilter.ScopeEquals, delFilter.ValueEquals); err != nil {
|
||||
return err
|
||||
}
|
||||
if *delFilter.ScopeEquals == "" {
|
||||
delFilter.ScopeEquals = nil
|
||||
}
|
||||
if *delFilter.OriginEquals == "" {
|
||||
delFilter.OriginEquals = nil
|
||||
}
|
||||
if *delFilter.ValueEquals == "" {
|
||||
delFilter.ValueEquals = nil
|
||||
}
|
||||
|
||||
if *delFilter.ScenarioEquals == "" {
|
||||
delFilter.ScenarioEquals = nil
|
||||
}
|
||||
if *delFilter.TypeEquals == "" {
|
||||
delFilter.TypeEquals = nil
|
||||
}
|
||||
|
||||
if *delFilter.IPEquals == "" {
|
||||
delFilter.IPEquals = nil
|
||||
}
|
||||
|
||||
if *delFilter.RangeEquals == "" {
|
||||
delFilter.RangeEquals = nil
|
||||
}
|
||||
|
@ -469,179 +487,37 @@ cscli decisions delete --type captcha
|
|||
delFilter.Contains = new(bool)
|
||||
}
|
||||
|
||||
if delDecisionId == "" {
|
||||
if delDecisionID == "" {
|
||||
decisions, _, err = Client.Decisions.Delete(context.Background(), delFilter)
|
||||
if err != nil {
|
||||
log.Fatalf("Unable to delete decisions : %v", err.Error())
|
||||
return fmt.Errorf("unable to delete decisions: %v", err)
|
||||
}
|
||||
} else {
|
||||
decisions, _, err = Client.Decisions.DeleteOne(context.Background(), delDecisionId)
|
||||
if _, err = strconv.Atoi(delDecisionID); err != nil {
|
||||
return fmt.Errorf("id '%s' is not an integer: %v", delDecisionID, err)
|
||||
}
|
||||
decisions, _, err = Client.Decisions.DeleteOne(context.Background(), delDecisionID)
|
||||
if err != nil {
|
||||
log.Fatalf("Unable to delete decision : %v", err.Error())
|
||||
return fmt.Errorf("unable to delete decision: %v", err)
|
||||
}
|
||||
}
|
||||
log.Infof("%s decision(s) deleted", decisions.NbDeleted)
|
||||
|
||||
return nil
|
||||
},
|
||||
}
|
||||
|
||||
cmdDecisionsDelete.Flags().SortFlags = false
|
||||
cmdDecisionsDelete.Flags().StringVarP(delFilter.IPEquals, "ip", "i", "", "Source ip (shorthand for --scope ip --value <IP>)")
|
||||
cmdDecisionsDelete.Flags().StringVarP(delFilter.RangeEquals, "range", "r", "", "Range source ip (shorthand for --scope range --value <RANGE>)")
|
||||
cmdDecisionsDelete.Flags().StringVar(&delDecisionId, "id", "", "decision id")
|
||||
cmdDecisionsDelete.Flags().StringVarP(delFilter.TypeEquals, "type", "t", "", "the decision type (ie. ban,captcha)")
|
||||
cmdDecisionsDelete.Flags().StringVarP(delFilter.ValueEquals, "value", "v", "", "the value to match for in the specified scope")
|
||||
cmdDecisionsDelete.Flags().BoolVar(&delDecisionAll, "all", false, "delete all decisions")
|
||||
cmdDecisionsDelete.Flags().BoolVar(contained, "contained", false, "query decisions contained by range")
|
||||
cmd.Flags().SortFlags = false
|
||||
cmd.Flags().StringVarP(delFilter.IPEquals, "ip", "i", "", "Source ip (shorthand for --scope ip --value <IP>)")
|
||||
cmd.Flags().StringVarP(delFilter.RangeEquals, "range", "r", "", "Range source ip (shorthand for --scope range --value <RANGE>)")
|
||||
cmd.Flags().StringVarP(delFilter.TypeEquals, "type", "t", "", "the decision type (ie. ban,captcha)")
|
||||
cmd.Flags().StringVarP(delFilter.ValueEquals, "value", "v", "", "the value to match for in the specified scope")
|
||||
cmd.Flags().StringVarP(delFilter.ScenarioEquals, "scenario", "s", "", "the scenario name (ie. crowdsecurity/ssh-bf)")
|
||||
cmd.Flags().StringVar(delFilter.OriginEquals, "origin", "", fmt.Sprintf("the value to match for the specified origin (%s ...)", strings.Join(types.GetOrigins(), ",")))
|
||||
|
||||
cmdDecisions.AddCommand(cmdDecisionsDelete)
|
||||
cmd.Flags().StringVar(&delDecisionID, "id", "", "decision id")
|
||||
cmd.Flags().BoolVar(&delDecisionAll, "all", false, "delete all decisions")
|
||||
cmd.Flags().BoolVar(contained, "contained", false, "query decisions contained by range")
|
||||
|
||||
var (
|
||||
importDuration string
|
||||
importScope string
|
||||
importReason string
|
||||
importType string
|
||||
importFile string
|
||||
)
|
||||
|
||||
var cmdDecisionImport = &cobra.Command{
|
||||
Use: "import [options]",
|
||||
Short: "Import decisions from json or csv file",
|
||||
Long: "expected format :\n" +
|
||||
"csv : any of duration,origin,reason,scope,type,value, with a header line\n" +
|
||||
`json : {"duration" : "24h", "origin" : "my-list", "reason" : "my_scenario", "scope" : "ip", "type" : "ban", "value" : "x.y.z.z"}`,
|
||||
DisableAutoGenTag: true,
|
||||
Example: `decisions.csv :
|
||||
duration,scope,value
|
||||
24h,ip,1.2.3.4
|
||||
|
||||
cscsli decisions import -i decisions.csv
|
||||
|
||||
decisions.json :
|
||||
[{"duration" : "4h", "scope" : "ip", "type" : "ban", "value" : "1.2.3.4"}]
|
||||
`,
|
||||
Run: func(cmd *cobra.Command, args []string) {
|
||||
if importFile == "" {
|
||||
log.Fatalf("Please provide a input file containing decisions with -i flag")
|
||||
}
|
||||
csvData, err := os.ReadFile(importFile)
|
||||
if err != nil {
|
||||
log.Fatalf("unable to open '%s': %s", importFile, err)
|
||||
}
|
||||
type decisionRaw struct {
|
||||
Duration string `csv:"duration,omitempty" json:"duration,omitempty"`
|
||||
Origin string `csv:"origin,omitempty" json:"origin,omitempty"`
|
||||
Scenario string `csv:"reason,omitempty" json:"reason,omitempty"`
|
||||
Scope string `csv:"scope,omitempty" json:"scope,omitempty"`
|
||||
Type string `csv:"type,omitempty" json:"type,omitempty"`
|
||||
Value string `csv:"value" json:"value"`
|
||||
}
|
||||
var decisionsListRaw []decisionRaw
|
||||
switch fileFormat := filepath.Ext(importFile); fileFormat {
|
||||
case ".json":
|
||||
if err := json.Unmarshal(csvData, &decisionsListRaw); err != nil {
|
||||
log.Fatalf("unable to unmarshall json: '%s'", err)
|
||||
}
|
||||
case ".csv":
|
||||
if err := csvutil.Unmarshal(csvData, &decisionsListRaw); err != nil {
|
||||
log.Fatalf("unable to unmarshall csv: '%s'", err)
|
||||
}
|
||||
default:
|
||||
log.Fatalf("file format not supported for '%s'. supported format are 'json' and 'csv'", importFile)
|
||||
}
|
||||
|
||||
decisionsList := make([]*models.Decision, 0)
|
||||
for i, decisionLine := range decisionsListRaw {
|
||||
line := i + 2
|
||||
if decisionLine.Value == "" {
|
||||
log.Fatalf("please provide a 'value' in your csv line %d", line)
|
||||
}
|
||||
/*deal with defaults and cli-override*/
|
||||
if decisionLine.Duration == "" {
|
||||
decisionLine.Duration = defaultDuration
|
||||
log.Debugf("No 'duration' line %d, using default value: '%s'", line, defaultDuration)
|
||||
}
|
||||
if importDuration != "" {
|
||||
decisionLine.Duration = importDuration
|
||||
log.Debugf("'duration' line %d, using supplied value: '%s'", line, importDuration)
|
||||
}
|
||||
decisionLine.Origin = "cscli-import"
|
||||
|
||||
if decisionLine.Scenario == "" {
|
||||
decisionLine.Scenario = defaultReason
|
||||
log.Debugf("No 'reason' line %d, using value: '%s'", line, decisionLine.Scenario)
|
||||
}
|
||||
if importReason != "" {
|
||||
decisionLine.Scenario = importReason
|
||||
log.Debugf("No 'reason' line %d, using supplied value: '%s'", line, importReason)
|
||||
}
|
||||
if decisionLine.Type == "" {
|
||||
decisionLine.Type = defaultType
|
||||
log.Debugf("No 'type' line %d, using default value: '%s'", line, decisionLine.Type)
|
||||
}
|
||||
if importType != "" {
|
||||
decisionLine.Type = importType
|
||||
log.Debugf("'type' line %d, using supplied value: '%s'", line, importType)
|
||||
}
|
||||
if decisionLine.Scope == "" {
|
||||
decisionLine.Scope = defaultScope
|
||||
log.Debugf("No 'scope' line %d, using default value: '%s'", line, decisionLine.Scope)
|
||||
}
|
||||
if importScope != "" {
|
||||
decisionLine.Scope = importScope
|
||||
log.Debugf("'scope' line %d, using supplied value: '%s'", line, importScope)
|
||||
}
|
||||
decision := models.Decision{
|
||||
Value: types.StrPtr(decisionLine.Value),
|
||||
Duration: types.StrPtr(decisionLine.Duration),
|
||||
Origin: types.StrPtr(decisionLine.Origin),
|
||||
Scenario: types.StrPtr(decisionLine.Scenario),
|
||||
Type: types.StrPtr(decisionLine.Type),
|
||||
Scope: types.StrPtr(decisionLine.Scope),
|
||||
Simulated: new(bool),
|
||||
}
|
||||
decisionsList = append(decisionsList, &decision)
|
||||
}
|
||||
alerts := models.AddAlertsRequest{}
|
||||
importAlert := models.Alert{
|
||||
CreatedAt: time.Now().UTC().Format(time.RFC3339),
|
||||
Scenario: types.StrPtr(fmt.Sprintf("add: %d IPs", len(decisionsList))),
|
||||
Message: types.StrPtr(""),
|
||||
Events: []*models.Event{},
|
||||
Source: &models.Source{
|
||||
Scope: types.StrPtr("cscli/manual-import"),
|
||||
Value: types.StrPtr(""),
|
||||
},
|
||||
StartAt: types.StrPtr(time.Now().UTC().Format(time.RFC3339)),
|
||||
StopAt: types.StrPtr(time.Now().UTC().Format(time.RFC3339)),
|
||||
Capacity: types.Int32Ptr(0),
|
||||
Simulated: types.BoolPtr(false),
|
||||
EventsCount: types.Int32Ptr(int32(len(decisionsList))),
|
||||
Leakspeed: types.StrPtr(""),
|
||||
ScenarioHash: types.StrPtr(""),
|
||||
ScenarioVersion: types.StrPtr(""),
|
||||
Decisions: decisionsList,
|
||||
}
|
||||
alerts = append(alerts, &importAlert)
|
||||
|
||||
if len(decisionsList) > 1000 {
|
||||
log.Infof("You are about to add %d decisions, this may take a while", len(decisionsList))
|
||||
}
|
||||
|
||||
_, _, err = Client.Alerts.Add(context.Background(), alerts)
|
||||
if err != nil {
|
||||
log.Fatalf(err.Error())
|
||||
}
|
||||
log.Infof("%d decisions successfully imported", len(decisionsList))
|
||||
},
|
||||
}
|
||||
|
||||
cmdDecisionImport.Flags().SortFlags = false
|
||||
cmdDecisionImport.Flags().StringVarP(&importFile, "input", "i", "", "Input file")
|
||||
cmdDecisionImport.Flags().StringVarP(&importDuration, "duration", "d", "", "Decision duration (ie. 1h,4h,30m)")
|
||||
cmdDecisionImport.Flags().StringVar(&importScope, "scope", types.Ip, "Decision scope (ie. ip,range,username)")
|
||||
cmdDecisionImport.Flags().StringVarP(&importReason, "reason", "R", "", "Decision reason (ie. scenario-name)")
|
||||
cmdDecisionImport.Flags().StringVarP(&importType, "type", "t", "", "Decision type (ie. ban,captcha,throttle)")
|
||||
cmdDecisions.AddCommand(cmdDecisionImport)
|
||||
|
||||
return cmdDecisions
|
||||
return cmd
|
||||
}
|
||||
|
|
280
cmd/crowdsec-cli/decisions_import.go
Normal file
280
cmd/crowdsec-cli/decisions_import.go
Normal file
|
@ -0,0 +1,280 @@
|
|||
package main
|
||||
|
||||
import (
|
||||
"bufio"
|
||||
"bytes"
|
||||
"context"
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"fmt"
|
||||
"io"
|
||||
"os"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/jszwec/csvutil"
|
||||
log "github.com/sirupsen/logrus"
|
||||
"github.com/spf13/cobra"
|
||||
|
||||
"github.com/crowdsecurity/go-cs-lib/ptr"
|
||||
"github.com/crowdsecurity/go-cs-lib/slicetools"
|
||||
|
||||
"github.com/crowdsecurity/crowdsec/pkg/models"
|
||||
"github.com/crowdsecurity/crowdsec/pkg/types"
|
||||
)
|
||||
|
||||
// decisionRaw is only used to unmarshall json/csv decisions
|
||||
type decisionRaw struct {
|
||||
Duration string `csv:"duration,omitempty" json:"duration,omitempty"`
|
||||
Scenario string `csv:"reason,omitempty" json:"reason,omitempty"`
|
||||
Scope string `csv:"scope,omitempty" json:"scope,omitempty"`
|
||||
Type string `csv:"type,omitempty" json:"type,omitempty"`
|
||||
Value string `csv:"value" json:"value"`
|
||||
}
|
||||
|
||||
func parseDecisionList(content []byte, format string) ([]decisionRaw, error) {
|
||||
ret := []decisionRaw{}
|
||||
|
||||
switch format {
|
||||
case "values":
|
||||
log.Infof("Parsing values")
|
||||
|
||||
scanner := bufio.NewScanner(bytes.NewReader(content))
|
||||
for scanner.Scan() {
|
||||
value := strings.TrimSpace(scanner.Text())
|
||||
ret = append(ret, decisionRaw{Value: value})
|
||||
}
|
||||
|
||||
if err := scanner.Err(); err != nil {
|
||||
return nil, fmt.Errorf("unable to parse values: '%s'", err)
|
||||
}
|
||||
case "json":
|
||||
log.Infof("Parsing json")
|
||||
|
||||
if err := json.Unmarshal(content, &ret); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
case "csv":
|
||||
log.Infof("Parsing csv")
|
||||
|
||||
if err := csvutil.Unmarshal(content, &ret); err != nil {
|
||||
return nil, fmt.Errorf("unable to parse csv: '%s'", err)
|
||||
}
|
||||
default:
|
||||
return nil, fmt.Errorf("invalid format '%s', expected one of 'json', 'csv', 'values'", format)
|
||||
}
|
||||
|
||||
return ret, nil
|
||||
}
|
||||
|
||||
|
||||
func (cli *cliDecisions) runImport(cmd *cobra.Command, args []string) error {
|
||||
flags := cmd.Flags()
|
||||
|
||||
input, err := flags.GetString("input")
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
defaultDuration, err := flags.GetString("duration")
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if defaultDuration == "" {
|
||||
return errors.New("--duration cannot be empty")
|
||||
}
|
||||
|
||||
defaultScope, err := flags.GetString("scope")
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if defaultScope == "" {
|
||||
return errors.New("--scope cannot be empty")
|
||||
}
|
||||
|
||||
defaultReason, err := flags.GetString("reason")
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if defaultReason == "" {
|
||||
return errors.New("--reason cannot be empty")
|
||||
}
|
||||
|
||||
defaultType, err := flags.GetString("type")
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if defaultType == "" {
|
||||
return errors.New("--type cannot be empty")
|
||||
}
|
||||
|
||||
batchSize, err := flags.GetInt("batch")
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
format, err := flags.GetString("format")
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
var (
|
||||
content []byte
|
||||
fin *os.File
|
||||
)
|
||||
|
||||
// set format if the file has a json or csv extension
|
||||
if format == "" {
|
||||
if strings.HasSuffix(input, ".json") {
|
||||
format = "json"
|
||||
} else if strings.HasSuffix(input, ".csv") {
|
||||
format = "csv"
|
||||
}
|
||||
}
|
||||
|
||||
if format == "" {
|
||||
return errors.New("unable to guess format from file extension, please provide a format with --format flag")
|
||||
}
|
||||
|
||||
if input == "-" {
|
||||
fin = os.Stdin
|
||||
input = "stdin"
|
||||
} else {
|
||||
fin, err = os.Open(input)
|
||||
if err != nil {
|
||||
return fmt.Errorf("unable to open %s: %s", input, err)
|
||||
}
|
||||
}
|
||||
|
||||
content, err = io.ReadAll(fin)
|
||||
if err != nil {
|
||||
return fmt.Errorf("unable to read from %s: %s", input, err)
|
||||
}
|
||||
|
||||
decisionsListRaw, err := parseDecisionList(content, format)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
decisions := make([]*models.Decision, len(decisionsListRaw))
|
||||
|
||||
for i, d := range decisionsListRaw {
|
||||
if d.Value == "" {
|
||||
return fmt.Errorf("item %d: missing 'value'", i)
|
||||
}
|
||||
|
||||
if d.Duration == "" {
|
||||
d.Duration = defaultDuration
|
||||
log.Debugf("item %d: missing 'duration', using default '%s'", i, defaultDuration)
|
||||
}
|
||||
|
||||
if d.Scenario == "" {
|
||||
d.Scenario = defaultReason
|
||||
log.Debugf("item %d: missing 'reason', using default '%s'", i, defaultReason)
|
||||
}
|
||||
|
||||
if d.Type == "" {
|
||||
d.Type = defaultType
|
||||
log.Debugf("item %d: missing 'type', using default '%s'", i, defaultType)
|
||||
}
|
||||
|
||||
if d.Scope == "" {
|
||||
d.Scope = defaultScope
|
||||
log.Debugf("item %d: missing 'scope', using default '%s'", i, defaultScope)
|
||||
}
|
||||
|
||||
decisions[i] = &models.Decision{
|
||||
Value: ptr.Of(d.Value),
|
||||
Duration: ptr.Of(d.Duration),
|
||||
Origin: ptr.Of(types.CscliImportOrigin),
|
||||
Scenario: ptr.Of(d.Scenario),
|
||||
Type: ptr.Of(d.Type),
|
||||
Scope: ptr.Of(d.Scope),
|
||||
Simulated: ptr.Of(false),
|
||||
}
|
||||
}
|
||||
|
||||
if len(decisions) > 1000 {
|
||||
log.Infof("You are about to add %d decisions, this may take a while", len(decisions))
|
||||
}
|
||||
|
||||
for _, chunk := range slicetools.Chunks(decisions, batchSize) {
|
||||
log.Debugf("Processing chunk of %d decisions", len(chunk))
|
||||
importAlert := models.Alert{
|
||||
CreatedAt: time.Now().UTC().Format(time.RFC3339),
|
||||
Scenario: ptr.Of(fmt.Sprintf("import %s: %d IPs", input, len(chunk))),
|
||||
|
||||
Message: ptr.Of(""),
|
||||
Events: []*models.Event{},
|
||||
Source: &models.Source{
|
||||
Scope: ptr.Of(""),
|
||||
Value: ptr.Of(""),
|
||||
},
|
||||
StartAt: ptr.Of(time.Now().UTC().Format(time.RFC3339)),
|
||||
StopAt: ptr.Of(time.Now().UTC().Format(time.RFC3339)),
|
||||
Capacity: ptr.Of(int32(0)),
|
||||
Simulated: ptr.Of(false),
|
||||
EventsCount: ptr.Of(int32(len(chunk))),
|
||||
Leakspeed: ptr.Of(""),
|
||||
ScenarioHash: ptr.Of(""),
|
||||
ScenarioVersion: ptr.Of(""),
|
||||
Decisions: chunk,
|
||||
}
|
||||
|
||||
_, _, err = Client.Alerts.Add(context.Background(), models.AddAlertsRequest{&importAlert})
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
log.Infof("Imported %d decisions", len(decisions))
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (cli *cliDecisions) newImportCmd() *cobra.Command {
|
||||
cmd := &cobra.Command{
|
||||
Use: "import [options]",
|
||||
Short: "Import decisions from a file or pipe",
|
||||
Long: "expected format:\n" +
|
||||
"csv : any of duration,reason,scope,type,value, with a header line\n" +
|
||||
"json :" + "`{" + `"duration" : "24h", "reason" : "my_scenario", "scope" : "ip", "type" : "ban", "value" : "x.y.z.z"` + "}`",
|
||||
Args: cobra.NoArgs,
|
||||
DisableAutoGenTag: true,
|
||||
Example: `decisions.csv:
|
||||
duration,scope,value
|
||||
24h,ip,1.2.3.4
|
||||
|
||||
$ cscli decisions import -i decisions.csv
|
||||
|
||||
decisions.json:
|
||||
[{"duration" : "4h", "scope" : "ip", "type" : "ban", "value" : "1.2.3.4"}]
|
||||
|
||||
The file format is detected from the extension, but can be forced with the --format option
|
||||
which is required when reading from standard input.
|
||||
|
||||
Raw values, standard input:
|
||||
|
||||
$ echo "1.2.3.4" | cscli decisions import -i - --format values
|
||||
`,
|
||||
RunE: cli.runImport,
|
||||
}
|
||||
|
||||
flags := cmd.Flags()
|
||||
flags.SortFlags = false
|
||||
flags.StringP("input", "i", "", "Input file")
|
||||
flags.StringP("duration", "d", "4h", "Decision duration: 1h,4h,30m")
|
||||
flags.String("scope", types.Ip, "Decision scope: ip,range,username")
|
||||
flags.StringP("reason", "R", "manual", "Decision reason: <scenario-name>")
|
||||
flags.StringP("type", "t", "ban", "Decision type: ban,captcha,throttle")
|
||||
flags.Int("batch", 0, "Split import in batches of N decisions")
|
||||
flags.String("format", "", "Input format: 'json', 'csv' or 'values' (each line is a value, no headers)")
|
||||
|
||||
cmd.MarkFlagRequired("input")
|
||||
|
||||
return cmd
|
||||
}
|
50
cmd/crowdsec-cli/decisions_table.go
Normal file
50
cmd/crowdsec-cli/decisions_table.go
Normal file
|
@ -0,0 +1,50 @@
|
|||
package main
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"io"
|
||||
"strconv"
|
||||
|
||||
"github.com/crowdsecurity/crowdsec/pkg/models"
|
||||
)
|
||||
|
||||
func (cli *cliDecisions) decisionsTable(out io.Writer, alerts *models.GetAlertsResponse, printMachine bool) {
|
||||
t := newTable(out)
|
||||
t.SetRowLines(false)
|
||||
|
||||
header := []string{"ID", "Source", "Scope:Value", "Reason", "Action", "Country", "AS", "Events", "expiration", "Alert ID"}
|
||||
if printMachine {
|
||||
header = append(header, "Machine")
|
||||
}
|
||||
|
||||
t.SetHeaders(header...)
|
||||
|
||||
for _, alertItem := range *alerts {
|
||||
for _, decisionItem := range alertItem.Decisions {
|
||||
if *alertItem.Simulated {
|
||||
*decisionItem.Type = fmt.Sprintf("(simul)%s", *decisionItem.Type)
|
||||
}
|
||||
|
||||
row := []string{
|
||||
strconv.Itoa(int(decisionItem.ID)),
|
||||
*decisionItem.Origin,
|
||||
*decisionItem.Scope + ":" + *decisionItem.Value,
|
||||
*decisionItem.Scenario,
|
||||
*decisionItem.Type,
|
||||
alertItem.Source.Cn,
|
||||
alertItem.Source.GetAsNumberName(),
|
||||
strconv.Itoa(int(*alertItem.EventsCount)),
|
||||
*decisionItem.Duration,
|
||||
strconv.Itoa(int(alertItem.ID)),
|
||||
}
|
||||
|
||||
if printMachine {
|
||||
row = append(row, alertItem.MachineID)
|
||||
}
|
||||
|
||||
t.AddRow(row...)
|
||||
}
|
||||
}
|
||||
|
||||
t.Render()
|
||||
}
|
51
cmd/crowdsec-cli/doc.go
Normal file
51
cmd/crowdsec-cli/doc.go
Normal file
|
@ -0,0 +1,51 @@
|
|||
package main
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
|
||||
"github.com/spf13/cobra"
|
||||
"github.com/spf13/cobra/doc"
|
||||
)
|
||||
|
||||
type cliDoc struct{}
|
||||
|
||||
func NewCLIDoc() *cliDoc {
|
||||
return &cliDoc{}
|
||||
}
|
||||
|
||||
func (cli cliDoc) NewCommand(rootCmd *cobra.Command) *cobra.Command {
|
||||
cmd := &cobra.Command{
|
||||
Use: "doc",
|
||||
Short: "Generate the documentation in `./doc/`. Directory must exist.",
|
||||
Args: cobra.ExactArgs(0),
|
||||
Hidden: true,
|
||||
DisableAutoGenTag: true,
|
||||
RunE: func(_ *cobra.Command, _ []string) error {
|
||||
if err := doc.GenMarkdownTreeCustom(rootCmd, "./doc/", cli.filePrepender, cli.linkHandler); err != nil {
|
||||
return fmt.Errorf("failed to generate cobra doc: %s", err)
|
||||
}
|
||||
return nil
|
||||
},
|
||||
}
|
||||
|
||||
return cmd
|
||||
}
|
||||
|
||||
func (cli cliDoc) filePrepender(filename string) string {
|
||||
const header = `---
|
||||
id: %s
|
||||
title: %s
|
||||
---
|
||||
`
|
||||
|
||||
name := filepath.Base(filename)
|
||||
base := strings.TrimSuffix(name, filepath.Ext(name))
|
||||
|
||||
return fmt.Sprintf(header, base, strings.ReplaceAll(base, "_", " "))
|
||||
}
|
||||
|
||||
func (cli cliDoc) linkHandler(name string) string {
|
||||
return fmt.Sprintf("/cscli/%s", name)
|
||||
}
|
|
@ -1,26 +1,69 @@
|
|||
package main
|
||||
|
||||
import (
|
||||
"bufio"
|
||||
"errors"
|
||||
"fmt"
|
||||
"io"
|
||||
"os"
|
||||
"os/exec"
|
||||
"path/filepath"
|
||||
|
||||
"github.com/crowdsecurity/crowdsec/pkg/cstest"
|
||||
"github.com/crowdsecurity/crowdsec/pkg/types"
|
||||
log "github.com/sirupsen/logrus"
|
||||
"github.com/spf13/cobra"
|
||||
|
||||
"github.com/crowdsecurity/crowdsec/pkg/dumps"
|
||||
"github.com/crowdsecurity/crowdsec/pkg/hubtest"
|
||||
)
|
||||
|
||||
func NewExplainCmd() *cobra.Command {
|
||||
/* ---- HUB COMMAND */
|
||||
var logFile string
|
||||
var dsn string
|
||||
var logLine string
|
||||
var logType string
|
||||
var opts cstest.DumpOpts
|
||||
func getLineCountForFile(filepath string) (int, error) {
|
||||
f, err := os.Open(filepath)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
defer f.Close()
|
||||
|
||||
var cmdExplain = &cobra.Command{
|
||||
lc := 0
|
||||
fs := bufio.NewReader(f)
|
||||
|
||||
for {
|
||||
input, err := fs.ReadBytes('\n')
|
||||
if len(input) > 1 {
|
||||
lc++
|
||||
}
|
||||
|
||||
if err != nil && err == io.EOF {
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
return lc, nil
|
||||
}
|
||||
|
||||
type cliExplain struct {
|
||||
cfg configGetter
|
||||
flags struct {
|
||||
logFile string
|
||||
dsn string
|
||||
logLine string
|
||||
logType string
|
||||
details bool
|
||||
skipOk bool
|
||||
onlySuccessfulParsers bool
|
||||
noClean bool
|
||||
crowdsec string
|
||||
labels string
|
||||
}
|
||||
}
|
||||
|
||||
func NewCLIExplain(cfg configGetter) *cliExplain {
|
||||
return &cliExplain{
|
||||
cfg: cfg,
|
||||
}
|
||||
}
|
||||
|
||||
func (cli *cliExplain) NewCommand() *cobra.Command {
|
||||
cmd := &cobra.Command{
|
||||
Use: "explain",
|
||||
Short: "Explain log pipeline",
|
||||
Long: `
|
||||
|
@ -30,85 +73,177 @@ Explain log pipeline
|
|||
cscli explain --file ./myfile.log --type nginx
|
||||
cscli explain --log "Sep 19 18:33:22 scw-d95986 sshd[24347]: pam_unix(sshd:auth): authentication failure; logname= uid=0 euid=0 tty=ssh ruser= rhost=1.2.3.4" --type syslog
|
||||
cscli explain --dsn "file://myfile.log" --type nginx
|
||||
tail -n 5 myfile.log | cscli explain --type nginx -f -
|
||||
`,
|
||||
Args: cobra.ExactArgs(0),
|
||||
DisableAutoGenTag: true,
|
||||
Run: func(cmd *cobra.Command, args []string) {
|
||||
|
||||
if logType == "" || (logLine == "" && logFile == "" && dsn == "") {
|
||||
printHelp(cmd)
|
||||
fmt.Println()
|
||||
fmt.Printf("Please provide --type flag\n")
|
||||
os.Exit(1)
|
||||
RunE: func(_ *cobra.Command, _ []string) error {
|
||||
return cli.run()
|
||||
},
|
||||
PersistentPreRunE: func(_ *cobra.Command, _ []string) error {
|
||||
fileInfo, _ := os.Stdin.Stat()
|
||||
if cli.flags.logFile == "-" && ((fileInfo.Mode() & os.ModeCharDevice) == os.ModeCharDevice) {
|
||||
return errors.New("the option -f - is intended to work with pipes")
|
||||
}
|
||||
|
||||
// we create a temporary log file if a log line has been provided
|
||||
if logLine != "" {
|
||||
logFile = "./cscli_test_tmp.log"
|
||||
f, err := os.Create(logFile)
|
||||
if err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
defer f.Close()
|
||||
|
||||
_, err = f.WriteString(logLine)
|
||||
if err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
}
|
||||
|
||||
if logFile != "" {
|
||||
absolutePath, err := filepath.Abs(logFile)
|
||||
if err != nil {
|
||||
log.Fatalf("unable to get absolute path of '%s', exiting", logFile)
|
||||
}
|
||||
dsn = fmt.Sprintf("file://%s", absolutePath)
|
||||
lineCount := types.GetLineCountForFile(absolutePath)
|
||||
if lineCount > 100 {
|
||||
log.Warnf("log file contains %d lines. This may take lot of resources.", lineCount)
|
||||
}
|
||||
}
|
||||
|
||||
if dsn == "" {
|
||||
log.Fatal("no acquisition (--file or --dsn) provided, can't run cscli test.")
|
||||
}
|
||||
|
||||
cmdArgs := []string{"-c", ConfigFilePath, "-type", logType, "-dsn", dsn, "-dump-data", "./", "-no-api"}
|
||||
crowdsecCmd := exec.Command("crowdsec", cmdArgs...)
|
||||
output, err := crowdsecCmd.CombinedOutput()
|
||||
if err != nil {
|
||||
fmt.Println(string(output))
|
||||
log.Fatalf("fail to run crowdsec for test: %v", err)
|
||||
}
|
||||
|
||||
// rm the temporary log file if only a log line was provided
|
||||
if logLine != "" {
|
||||
if err := os.Remove(logFile); err != nil {
|
||||
log.Fatalf("unable to remove tmp log file '%s': %+v", logFile, err)
|
||||
}
|
||||
}
|
||||
parserDumpFile := filepath.Join("./", cstest.ParserResultFileName)
|
||||
bucketStateDumpFile := filepath.Join("./", cstest.BucketPourResultFileName)
|
||||
|
||||
parserDump, err := cstest.LoadParserDump(parserDumpFile)
|
||||
if err != nil {
|
||||
log.Fatalf("unable to load parser dump result: %s", err)
|
||||
}
|
||||
|
||||
bucketStateDump, err := cstest.LoadBucketPourDump(bucketStateDumpFile)
|
||||
if err != nil {
|
||||
log.Fatalf("unable to load bucket dump result: %s", err)
|
||||
}
|
||||
|
||||
cstest.DumpTree(*parserDump, *bucketStateDump, opts)
|
||||
return nil
|
||||
},
|
||||
}
|
||||
cmdExplain.PersistentFlags().StringVarP(&logFile, "file", "f", "", "Log file to test")
|
||||
cmdExplain.PersistentFlags().StringVarP(&dsn, "dsn", "d", "", "DSN to test")
|
||||
cmdExplain.PersistentFlags().StringVarP(&logLine, "log", "l", "", "Log line to test")
|
||||
cmdExplain.PersistentFlags().StringVarP(&logType, "type", "t", "", "Type of the acquisition to test")
|
||||
cmdExplain.PersistentFlags().BoolVarP(&opts.Details, "verbose", "v", false, "Display individual changes")
|
||||
cmdExplain.PersistentFlags().BoolVar(&opts.SkipOk, "failures", false, "Only show failed lines")
|
||||
|
||||
return cmdExplain
|
||||
flags := cmd.Flags()
|
||||
|
||||
flags.StringVarP(&cli.flags.logFile, "file", "f", "", "Log file to test")
|
||||
flags.StringVarP(&cli.flags.dsn, "dsn", "d", "", "DSN to test")
|
||||
flags.StringVarP(&cli.flags.logLine, "log", "l", "", "Log line to test")
|
||||
flags.StringVarP(&cli.flags.logType, "type", "t", "", "Type of the acquisition to test")
|
||||
flags.StringVar(&cli.flags.labels, "labels", "", "Additional labels to add to the acquisition format (key:value,key2:value2)")
|
||||
flags.BoolVarP(&cli.flags.details, "verbose", "v", false, "Display individual changes")
|
||||
flags.BoolVar(&cli.flags.skipOk, "failures", false, "Only show failed lines")
|
||||
flags.BoolVar(&cli.flags.onlySuccessfulParsers, "only-successful-parsers", false, "Only show successful parsers")
|
||||
flags.StringVar(&cli.flags.crowdsec, "crowdsec", "crowdsec", "Path to crowdsec")
|
||||
flags.BoolVar(&cli.flags.noClean, "no-clean", false, "Don't clean runtime environment after tests")
|
||||
|
||||
cmd.MarkFlagRequired("type")
|
||||
cmd.MarkFlagsOneRequired("log", "file", "dsn")
|
||||
|
||||
return cmd
|
||||
}
|
||||
|
||||
func (cli *cliExplain) run() error {
|
||||
logFile := cli.flags.logFile
|
||||
logLine := cli.flags.logLine
|
||||
logType := cli.flags.logType
|
||||
dsn := cli.flags.dsn
|
||||
labels := cli.flags.labels
|
||||
crowdsec := cli.flags.crowdsec
|
||||
|
||||
opts := dumps.DumpOpts{
|
||||
Details: cli.flags.details,
|
||||
SkipOk: cli.flags.skipOk,
|
||||
ShowNotOkParsers: !cli.flags.onlySuccessfulParsers,
|
||||
}
|
||||
|
||||
var f *os.File
|
||||
|
||||
// using empty string fallback to /tmp
|
||||
dir, err := os.MkdirTemp("", "cscli_explain")
|
||||
if err != nil {
|
||||
return fmt.Errorf("couldn't create a temporary directory to store cscli explain result: %w", err)
|
||||
}
|
||||
|
||||
defer func() {
|
||||
if cli.flags.noClean {
|
||||
return
|
||||
}
|
||||
|
||||
if _, err := os.Stat(dir); !os.IsNotExist(err) {
|
||||
if err := os.RemoveAll(dir); err != nil {
|
||||
log.Errorf("unable to delete temporary directory '%s': %s", dir, err)
|
||||
}
|
||||
}
|
||||
}()
|
||||
|
||||
// we create a temporary log file if a log line/stdin has been provided
|
||||
if logLine != "" || logFile == "-" {
|
||||
tmpFile := filepath.Join(dir, "cscli_test_tmp.log")
|
||||
|
||||
f, err = os.Create(tmpFile)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if logLine != "" {
|
||||
_, err = f.WriteString(logLine)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
} else if logFile == "-" {
|
||||
reader := bufio.NewReader(os.Stdin)
|
||||
errCount := 0
|
||||
|
||||
for {
|
||||
input, err := reader.ReadBytes('\n')
|
||||
if err != nil && errors.Is(err, io.EOF) {
|
||||
break
|
||||
}
|
||||
|
||||
if len(input) > 1 {
|
||||
_, err = f.Write(input)
|
||||
}
|
||||
|
||||
if err != nil || len(input) <= 1 {
|
||||
errCount++
|
||||
}
|
||||
}
|
||||
|
||||
if errCount > 0 {
|
||||
log.Warnf("Failed to write %d lines to %s", errCount, tmpFile)
|
||||
}
|
||||
}
|
||||
|
||||
f.Close()
|
||||
// this is the file that was going to be read by crowdsec anyway
|
||||
logFile = tmpFile
|
||||
}
|
||||
|
||||
if logFile != "" {
|
||||
absolutePath, err := filepath.Abs(logFile)
|
||||
if err != nil {
|
||||
return fmt.Errorf("unable to get absolute path of '%s', exiting", logFile)
|
||||
}
|
||||
|
||||
dsn = fmt.Sprintf("file://%s", absolutePath)
|
||||
|
||||
lineCount, err := getLineCountForFile(absolutePath)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
log.Debugf("file %s has %d lines", absolutePath, lineCount)
|
||||
|
||||
if lineCount == 0 {
|
||||
return fmt.Errorf("the log file is empty: %s", absolutePath)
|
||||
}
|
||||
|
||||
if lineCount > 100 {
|
||||
log.Warnf("%s contains %d lines. This may take a lot of resources.", absolutePath, lineCount)
|
||||
}
|
||||
}
|
||||
|
||||
if dsn == "" {
|
||||
return errors.New("no acquisition (--file or --dsn) provided, can't run cscli test")
|
||||
}
|
||||
|
||||
cmdArgs := []string{"-c", ConfigFilePath, "-type", logType, "-dsn", dsn, "-dump-data", dir, "-no-api"}
|
||||
|
||||
if labels != "" {
|
||||
log.Debugf("adding labels %s", labels)
|
||||
cmdArgs = append(cmdArgs, "-label", labels)
|
||||
}
|
||||
|
||||
crowdsecCmd := exec.Command(crowdsec, cmdArgs...)
|
||||
|
||||
output, err := crowdsecCmd.CombinedOutput()
|
||||
if err != nil {
|
||||
fmt.Println(string(output))
|
||||
|
||||
return fmt.Errorf("fail to run crowdsec for test: %w", err)
|
||||
}
|
||||
|
||||
parserDumpFile := filepath.Join(dir, hubtest.ParserResultFileName)
|
||||
bucketStateDumpFile := filepath.Join(dir, hubtest.BucketPourResultFileName)
|
||||
|
||||
parserDump, err := dumps.LoadParserDump(parserDumpFile)
|
||||
if err != nil {
|
||||
return fmt.Errorf("unable to load parser dump result: %w", err)
|
||||
}
|
||||
|
||||
bucketStateDump, err := dumps.LoadBucketPourDump(bucketStateDumpFile)
|
||||
if err != nil {
|
||||
return fmt.Errorf("unable to load bucket dump result: %w", err)
|
||||
}
|
||||
|
||||
dumps.DumpTree(*parserDump, *bucketStateDump, opts)
|
||||
|
||||
return nil
|
||||
}
|
||||
|
|
29
cmd/crowdsec-cli/flag.go
Normal file
29
cmd/crowdsec-cli/flag.go
Normal file
|
@ -0,0 +1,29 @@
|
|||
package main
|
||||
|
||||
// Custom types for flag validation and conversion.
|
||||
|
||||
import (
|
||||
"errors"
|
||||
)
|
||||
|
||||
type MachinePassword string
|
||||
|
||||
func (p *MachinePassword) String() string {
|
||||
return string(*p)
|
||||
}
|
||||
|
||||
func (p *MachinePassword) Set(v string) error {
|
||||
// a password can't be more than 72 characters
|
||||
// due to bcrypt limitations
|
||||
if len(v) > 72 {
|
||||
return errors.New("password too long (max 72 characters)")
|
||||
}
|
||||
|
||||
*p = MachinePassword(v)
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (p *MachinePassword) Type() string {
|
||||
return "string"
|
||||
}
|
|
@ -1,141 +1,228 @@
|
|||
package main
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
|
||||
"github.com/crowdsecurity/crowdsec/pkg/cwhub"
|
||||
|
||||
"github.com/fatih/color"
|
||||
log "github.com/sirupsen/logrus"
|
||||
"github.com/spf13/cobra"
|
||||
"gopkg.in/yaml.v3"
|
||||
|
||||
"github.com/crowdsecurity/crowdsec/cmd/crowdsec-cli/require"
|
||||
"github.com/crowdsecurity/crowdsec/pkg/cwhub"
|
||||
)
|
||||
|
||||
func NewHubCmd() *cobra.Command {
|
||||
/* ---- HUB COMMAND */
|
||||
var cmdHub = &cobra.Command{
|
||||
type cliHub struct{
|
||||
cfg configGetter
|
||||
}
|
||||
|
||||
func NewCLIHub(cfg configGetter) *cliHub {
|
||||
return &cliHub{
|
||||
cfg: cfg,
|
||||
}
|
||||
}
|
||||
|
||||
func (cli *cliHub) NewCommand() *cobra.Command {
|
||||
cmd := &cobra.Command{
|
||||
Use: "hub [action]",
|
||||
Short: "Manage Hub",
|
||||
Long: `
|
||||
Hub management
|
||||
Short: "Manage hub index",
|
||||
Long: `Hub management
|
||||
|
||||
List/update parsers/scenarios/postoverflows/collections from [Crowdsec Hub](https://hub.crowdsec.net).
|
||||
Hub is manage by cscli, to get latest hub files from [Crowdsec Hub](https://hub.crowdsec.net), you need to update.
|
||||
`,
|
||||
Example: `
|
||||
cscli hub list # List all installed configurations
|
||||
cscli hub update # Download list of available configurations from the hub
|
||||
`,
|
||||
The Hub is managed by cscli, to get the latest hub files from [Crowdsec Hub](https://hub.crowdsec.net), you need to update.`,
|
||||
Example: `cscli hub list
|
||||
cscli hub update
|
||||
cscli hub upgrade`,
|
||||
Args: cobra.ExactArgs(0),
|
||||
DisableAutoGenTag: true,
|
||||
PersistentPreRunE: func(cmd *cobra.Command, args []string) error {
|
||||
if csConfig.Cscli == nil {
|
||||
return fmt.Errorf("you must configure cli before interacting with hub")
|
||||
}
|
||||
|
||||
return nil
|
||||
},
|
||||
}
|
||||
cmdHub.PersistentFlags().StringVarP(&cwhub.HubBranch, "branch", "b", "", "Use given branch from hub")
|
||||
|
||||
var cmdHubList = &cobra.Command{
|
||||
cmd.AddCommand(cli.newListCmd())
|
||||
cmd.AddCommand(cli.newUpdateCmd())
|
||||
cmd.AddCommand(cli.newUpgradeCmd())
|
||||
cmd.AddCommand(cli.newTypesCmd())
|
||||
|
||||
return cmd
|
||||
}
|
||||
|
||||
func (cli *cliHub) list(all bool) error {
|
||||
hub, err := require.Hub(cli.cfg(), nil, log.StandardLogger())
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
for _, v := range hub.Warnings {
|
||||
log.Info(v)
|
||||
}
|
||||
|
||||
for _, line := range hub.ItemStats() {
|
||||
log.Info(line)
|
||||
}
|
||||
|
||||
items := make(map[string][]*cwhub.Item)
|
||||
|
||||
for _, itemType := range cwhub.ItemTypes {
|
||||
items[itemType], err = selectItems(hub, itemType, nil, !all)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
err = listItems(color.Output, cwhub.ItemTypes, items, true)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (cli *cliHub) newListCmd() *cobra.Command {
|
||||
var all bool
|
||||
|
||||
cmd := &cobra.Command{
|
||||
Use: "list [-a]",
|
||||
Short: "List installed configs",
|
||||
Short: "List all installed configurations",
|
||||
Args: cobra.ExactArgs(0),
|
||||
DisableAutoGenTag: true,
|
||||
Run: func(cmd *cobra.Command, args []string) {
|
||||
|
||||
if err := csConfig.LoadHub(); err != nil {
|
||||
log.Fatalf(err.Error())
|
||||
}
|
||||
if err := cwhub.GetHubIdx(csConfig.Hub); err != nil {
|
||||
log.Fatalf("Failed to get Hub index : %v", err)
|
||||
log.Infoln("Run 'sudo cscli hub update' to get the hub index")
|
||||
}
|
||||
//use LocalSync to get warnings about tainted / outdated items
|
||||
_, warn := cwhub.LocalSync(csConfig.Hub)
|
||||
for _, v := range warn {
|
||||
log.Info(v)
|
||||
}
|
||||
cwhub.DisplaySummary()
|
||||
ListItems([]string{
|
||||
cwhub.COLLECTIONS, cwhub.PARSERS, cwhub.SCENARIOS, cwhub.PARSERS_OVFLW,
|
||||
}, args, true, false, all)
|
||||
RunE: func(_ *cobra.Command, _ []string) error {
|
||||
return cli.list(all)
|
||||
},
|
||||
}
|
||||
cmdHubList.PersistentFlags().BoolVarP(&all, "all", "a", false, "List disabled items as well")
|
||||
cmdHub.AddCommand(cmdHubList)
|
||||
|
||||
var cmdHubUpdate = &cobra.Command{
|
||||
flags := cmd.Flags()
|
||||
flags.BoolVarP(&all, "all", "a", false, "List disabled items as well")
|
||||
|
||||
return cmd
|
||||
}
|
||||
|
||||
func (cli *cliHub) update() error {
|
||||
local := cli.cfg().Hub
|
||||
remote := require.RemoteHub(cli.cfg())
|
||||
|
||||
// don't use require.Hub because if there is no index file, it would fail
|
||||
hub, err := cwhub.NewHub(local, remote, true, log.StandardLogger())
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to update hub: %w", err)
|
||||
}
|
||||
|
||||
for _, v := range hub.Warnings {
|
||||
log.Info(v)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (cli *cliHub) newUpdateCmd() *cobra.Command {
|
||||
cmd := &cobra.Command{
|
||||
Use: "update",
|
||||
Short: "Fetch available configs from hub",
|
||||
Short: "Download the latest index (catalog of available configurations)",
|
||||
Long: `
|
||||
Fetches the [.index.json](https://github.com/crowdsecurity/hub/blob/master/.index.json) file from hub, containing the list of available configs.
|
||||
Fetches the .index.json file from the hub, containing the list of available configs.
|
||||
`,
|
||||
Args: cobra.ExactArgs(0),
|
||||
DisableAutoGenTag: true,
|
||||
PersistentPreRunE: func(cmd *cobra.Command, args []string) error {
|
||||
if csConfig.Cscli == nil {
|
||||
return fmt.Errorf("you must configure cli before interacting with hub")
|
||||
}
|
||||
|
||||
if err := setHubBranch(); err != nil {
|
||||
return fmt.Errorf("error while setting hub branch: %s", err)
|
||||
}
|
||||
return nil
|
||||
},
|
||||
Run: func(cmd *cobra.Command, args []string) {
|
||||
if err := csConfig.LoadHub(); err != nil {
|
||||
log.Fatalf(err.Error())
|
||||
}
|
||||
if err := cwhub.UpdateHubIdx(csConfig.Hub); err != nil {
|
||||
log.Fatalf("Failed to get Hub index : %v", err)
|
||||
}
|
||||
//use LocalSync to get warnings about tainted / outdated items
|
||||
_, warn := cwhub.LocalSync(csConfig.Hub)
|
||||
for _, v := range warn {
|
||||
log.Info(v)
|
||||
}
|
||||
RunE: func(_ *cobra.Command, _ []string) error {
|
||||
return cli.update()
|
||||
},
|
||||
}
|
||||
cmdHub.AddCommand(cmdHubUpdate)
|
||||
|
||||
var cmdHubUpgrade = &cobra.Command{
|
||||
return cmd
|
||||
}
|
||||
|
||||
func (cli *cliHub) upgrade(force bool) error {
|
||||
hub, err := require.Hub(cli.cfg(), require.RemoteHub(cli.cfg()), log.StandardLogger())
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
for _, itemType := range cwhub.ItemTypes {
|
||||
items, err := hub.GetInstalledItemsByType(itemType)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
updated := 0
|
||||
|
||||
log.Infof("Upgrading %s", itemType)
|
||||
|
||||
for _, item := range items {
|
||||
didUpdate, err := item.Upgrade(force)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if didUpdate {
|
||||
updated++
|
||||
}
|
||||
}
|
||||
|
||||
log.Infof("Upgraded %d %s", updated, itemType)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (cli *cliHub) newUpgradeCmd() *cobra.Command {
|
||||
var force bool
|
||||
|
||||
cmd := &cobra.Command{
|
||||
Use: "upgrade",
|
||||
Short: "Upgrade all configs installed from hub",
|
||||
Short: "Upgrade all configurations to their latest version",
|
||||
Long: `
|
||||
Upgrade all configs installed from Crowdsec Hub. Run 'sudo cscli hub update' if you want the latest versions available.
|
||||
`,
|
||||
Args: cobra.ExactArgs(0),
|
||||
DisableAutoGenTag: true,
|
||||
PersistentPreRunE: func(cmd *cobra.Command, args []string) error {
|
||||
if csConfig.Cscli == nil {
|
||||
return fmt.Errorf("you must configure cli before interacting with hub")
|
||||
}
|
||||
|
||||
if err := setHubBranch(); err != nil {
|
||||
return fmt.Errorf("error while setting hub branch: %s", err)
|
||||
}
|
||||
return nil
|
||||
},
|
||||
Run: func(cmd *cobra.Command, args []string) {
|
||||
if err := csConfig.LoadHub(); err != nil {
|
||||
log.Fatalf(err.Error())
|
||||
}
|
||||
if err := cwhub.GetHubIdx(csConfig.Hub); err != nil {
|
||||
log.Fatalf("Failed to get Hub index : %v", err)
|
||||
log.Infoln("Run 'sudo cscli hub update' to get the hub index")
|
||||
}
|
||||
|
||||
log.Infof("Upgrading collections")
|
||||
cwhub.UpgradeConfig(csConfig, cwhub.COLLECTIONS, "", forceAction)
|
||||
log.Infof("Upgrading parsers")
|
||||
cwhub.UpgradeConfig(csConfig, cwhub.PARSERS, "", forceAction)
|
||||
log.Infof("Upgrading scenarios")
|
||||
cwhub.UpgradeConfig(csConfig, cwhub.SCENARIOS, "", forceAction)
|
||||
log.Infof("Upgrading postoverflows")
|
||||
cwhub.UpgradeConfig(csConfig, cwhub.PARSERS_OVFLW, "", forceAction)
|
||||
RunE: func(_ *cobra.Command, _ []string) error {
|
||||
return cli.upgrade(force)
|
||||
},
|
||||
}
|
||||
cmdHubUpgrade.PersistentFlags().BoolVar(&forceAction, "force", false, "Force upgrade : Overwrite tainted and outdated files")
|
||||
cmdHub.AddCommand(cmdHubUpgrade)
|
||||
return cmdHub
|
||||
|
||||
flags := cmd.Flags()
|
||||
flags.BoolVar(&force, "force", false, "Force upgrade: overwrite tainted and outdated files")
|
||||
|
||||
return cmd
|
||||
}
|
||||
|
||||
func (cli *cliHub) types() error {
|
||||
switch cli.cfg().Cscli.Output {
|
||||
case "human":
|
||||
s, err := yaml.Marshal(cwhub.ItemTypes)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
fmt.Print(string(s))
|
||||
case "json":
|
||||
jsonStr, err := json.Marshal(cwhub.ItemTypes)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
fmt.Println(string(jsonStr))
|
||||
case "raw":
|
||||
for _, itemType := range cwhub.ItemTypes {
|
||||
fmt.Println(itemType)
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (cli *cliHub) newTypesCmd() *cobra.Command {
|
||||
cmd := &cobra.Command{
|
||||
Use: "types",
|
||||
Short: "List supported item types",
|
||||
Long: `
|
||||
List the types of supported hub items.
|
||||
`,
|
||||
Args: cobra.ExactArgs(0),
|
||||
DisableAutoGenTag: true,
|
||||
RunE: func(_ *cobra.Command, _ []string) error {
|
||||
return cli.types()
|
||||
},
|
||||
}
|
||||
|
||||
return cmd
|
||||
}
|
||||
|
|
123
cmd/crowdsec-cli/hubappsec.go
Normal file
123
cmd/crowdsec-cli/hubappsec.go
Normal file
|
@ -0,0 +1,123 @@
|
|||
package main
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"os"
|
||||
|
||||
"golang.org/x/text/cases"
|
||||
"golang.org/x/text/language"
|
||||
"gopkg.in/yaml.v3"
|
||||
|
||||
"github.com/crowdsecurity/crowdsec/pkg/appsec"
|
||||
"github.com/crowdsecurity/crowdsec/pkg/appsec/appsec_rule"
|
||||
"github.com/crowdsecurity/crowdsec/pkg/cwhub"
|
||||
)
|
||||
|
||||
func NewCLIAppsecConfig(cfg configGetter) *cliItem {
|
||||
return &cliItem{
|
||||
cfg: cfg,
|
||||
name: cwhub.APPSEC_CONFIGS,
|
||||
singular: "appsec-config",
|
||||
oneOrMore: "appsec-config(s)",
|
||||
help: cliHelp{
|
||||
example: `cscli appsec-configs list -a
|
||||
cscli appsec-configs install crowdsecurity/vpatch
|
||||
cscli appsec-configs inspect crowdsecurity/vpatch
|
||||
cscli appsec-configs upgrade crowdsecurity/vpatch
|
||||
cscli appsec-configs remove crowdsecurity/vpatch
|
||||
`,
|
||||
},
|
||||
installHelp: cliHelp{
|
||||
example: `cscli appsec-configs install crowdsecurity/vpatch`,
|
||||
},
|
||||
removeHelp: cliHelp{
|
||||
example: `cscli appsec-configs remove crowdsecurity/vpatch`,
|
||||
},
|
||||
upgradeHelp: cliHelp{
|
||||
example: `cscli appsec-configs upgrade crowdsecurity/vpatch`,
|
||||
},
|
||||
inspectHelp: cliHelp{
|
||||
example: `cscli appsec-configs inspect crowdsecurity/vpatch`,
|
||||
},
|
||||
listHelp: cliHelp{
|
||||
example: `cscli appsec-configs list
|
||||
cscli appsec-configs list -a
|
||||
cscli appsec-configs list crowdsecurity/vpatch`,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
func NewCLIAppsecRule(cfg configGetter) *cliItem {
|
||||
inspectDetail := func(item *cwhub.Item) error {
|
||||
// Only show the converted rules in human mode
|
||||
if csConfig.Cscli.Output != "human" {
|
||||
return nil
|
||||
}
|
||||
|
||||
appsecRule := appsec.AppsecCollectionConfig{}
|
||||
|
||||
yamlContent, err := os.ReadFile(item.State.LocalPath)
|
||||
if err != nil {
|
||||
return fmt.Errorf("unable to read file %s: %w", item.State.LocalPath, err)
|
||||
}
|
||||
|
||||
if err := yaml.Unmarshal(yamlContent, &appsecRule); err != nil {
|
||||
return fmt.Errorf("unable to unmarshal yaml file %s: %w", item.State.LocalPath, err)
|
||||
}
|
||||
|
||||
for _, ruleType := range appsec_rule.SupportedTypes() {
|
||||
fmt.Printf("\n%s format:\n", cases.Title(language.Und, cases.NoLower).String(ruleType))
|
||||
|
||||
for _, rule := range appsecRule.Rules {
|
||||
convertedRule, _, err := rule.Convert(ruleType, appsecRule.Name)
|
||||
if err != nil {
|
||||
return fmt.Errorf("unable to convert rule %s: %w", rule.Name, err)
|
||||
}
|
||||
|
||||
fmt.Println(convertedRule)
|
||||
}
|
||||
|
||||
switch ruleType { //nolint:gocritic
|
||||
case appsec_rule.ModsecurityRuleType:
|
||||
for _, rule := range appsecRule.SecLangRules {
|
||||
fmt.Println(rule)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
return &cliItem{
|
||||
cfg: cfg,
|
||||
name: "appsec-rules",
|
||||
singular: "appsec-rule",
|
||||
oneOrMore: "appsec-rule(s)",
|
||||
help: cliHelp{
|
||||
example: `cscli appsec-rules list -a
|
||||
cscli appsec-rules install crowdsecurity/crs
|
||||
cscli appsec-rules inspect crowdsecurity/crs
|
||||
cscli appsec-rules upgrade crowdsecurity/crs
|
||||
cscli appsec-rules remove crowdsecurity/crs
|
||||
`,
|
||||
},
|
||||
installHelp: cliHelp{
|
||||
example: `cscli appsec-rules install crowdsecurity/crs`,
|
||||
},
|
||||
removeHelp: cliHelp{
|
||||
example: `cscli appsec-rules remove crowdsecurity/crs`,
|
||||
},
|
||||
upgradeHelp: cliHelp{
|
||||
example: `cscli appsec-rules upgrade crowdsecurity/crs`,
|
||||
},
|
||||
inspectHelp: cliHelp{
|
||||
example: `cscli appsec-rules inspect crowdsecurity/crs`,
|
||||
},
|
||||
inspectDetail: inspectDetail,
|
||||
listHelp: cliHelp{
|
||||
example: `cscli appsec-rules list
|
||||
cscli appsec-rules list -a
|
||||
cscli appsec-rules list crowdsecurity/crs`,
|
||||
},
|
||||
}
|
||||
}
|
41
cmd/crowdsec-cli/hubcollection.go
Normal file
41
cmd/crowdsec-cli/hubcollection.go
Normal file
|
@ -0,0 +1,41 @@
|
|||
package main
|
||||
|
||||
import (
|
||||
"github.com/crowdsecurity/crowdsec/pkg/cwhub"
|
||||
)
|
||||
|
||||
func NewCLICollection(cfg configGetter) *cliItem {
|
||||
return &cliItem{
|
||||
cfg: cfg,
|
||||
name: cwhub.COLLECTIONS,
|
||||
singular: "collection",
|
||||
oneOrMore: "collection(s)",
|
||||
help: cliHelp{
|
||||
example: `cscli collections list -a
|
||||
cscli collections install crowdsecurity/http-cve crowdsecurity/iptables
|
||||
cscli collections inspect crowdsecurity/http-cve crowdsecurity/iptables
|
||||
cscli collections upgrade crowdsecurity/http-cve crowdsecurity/iptables
|
||||
cscli collections remove crowdsecurity/http-cve crowdsecurity/iptables
|
||||
`,
|
||||
},
|
||||
installHelp: cliHelp{
|
||||
example: `cscli collections install crowdsecurity/http-cve crowdsecurity/iptables`,
|
||||
},
|
||||
removeHelp: cliHelp{
|
||||
example: `cscli collections remove crowdsecurity/http-cve crowdsecurity/iptables`,
|
||||
},
|
||||
upgradeHelp: cliHelp{
|
||||
example: `cscli collections upgrade crowdsecurity/http-cve crowdsecurity/iptables`,
|
||||
},
|
||||
inspectHelp: cliHelp{
|
||||
example: `cscli collections inspect crowdsecurity/http-cve crowdsecurity/iptables`,
|
||||
},
|
||||
listHelp: cliHelp{
|
||||
example: `cscli collections list
|
||||
cscli collections list -a
|
||||
cscli collections list crowdsecurity/http-cve crowdsecurity/iptables
|
||||
|
||||
List only enabled collections unless "-a" or names are specified.`,
|
||||
},
|
||||
}
|
||||
}
|
41
cmd/crowdsec-cli/hubcontext.go
Normal file
41
cmd/crowdsec-cli/hubcontext.go
Normal file
|
@ -0,0 +1,41 @@
|
|||
package main
|
||||
|
||||
import (
|
||||
"github.com/crowdsecurity/crowdsec/pkg/cwhub"
|
||||
)
|
||||
|
||||
func NewCLIContext(cfg configGetter) *cliItem {
|
||||
return &cliItem{
|
||||
cfg: cfg,
|
||||
name: cwhub.CONTEXTS,
|
||||
singular: "context",
|
||||
oneOrMore: "context(s)",
|
||||
help: cliHelp{
|
||||
example: `cscli contexts list -a
|
||||
cscli contexts install crowdsecurity/yyy crowdsecurity/zzz
|
||||
cscli contexts inspect crowdsecurity/yyy crowdsecurity/zzz
|
||||
cscli contexts upgrade crowdsecurity/yyy crowdsecurity/zzz
|
||||
cscli contexts remove crowdsecurity/yyy crowdsecurity/zzz
|
||||
`,
|
||||
},
|
||||
installHelp: cliHelp{
|
||||
example: `cscli contexts install crowdsecurity/yyy crowdsecurity/zzz`,
|
||||
},
|
||||
removeHelp: cliHelp{
|
||||
example: `cscli contexts remove crowdsecurity/yyy crowdsecurity/zzz`,
|
||||
},
|
||||
upgradeHelp: cliHelp{
|
||||
example: `cscli contexts upgrade crowdsecurity/yyy crowdsecurity/zzz`,
|
||||
},
|
||||
inspectHelp: cliHelp{
|
||||
example: `cscli contexts inspect crowdsecurity/yyy crowdsecurity/zzz`,
|
||||
},
|
||||
listHelp: cliHelp{
|
||||
example: `cscli contexts list
|
||||
cscli contexts list -a
|
||||
cscli contexts list crowdsecurity/yyy crowdsecurity/zzz
|
||||
|
||||
List only enabled contexts unless "-a" or names are specified.`,
|
||||
},
|
||||
}
|
||||
}
|
41
cmd/crowdsec-cli/hubparser.go
Normal file
41
cmd/crowdsec-cli/hubparser.go
Normal file
|
@ -0,0 +1,41 @@
|
|||
package main
|
||||
|
||||
import (
|
||||
"github.com/crowdsecurity/crowdsec/pkg/cwhub"
|
||||
)
|
||||
|
||||
func NewCLIParser(cfg configGetter) *cliItem {
|
||||
return &cliItem{
|
||||
cfg: cfg,
|
||||
name: cwhub.PARSERS,
|
||||
singular: "parser",
|
||||
oneOrMore: "parser(s)",
|
||||
help: cliHelp{
|
||||
example: `cscli parsers list -a
|
||||
cscli parsers install crowdsecurity/caddy-logs crowdsecurity/sshd-logs
|
||||
cscli parsers inspect crowdsecurity/caddy-logs crowdsecurity/sshd-logs
|
||||
cscli parsers upgrade crowdsecurity/caddy-logs crowdsecurity/sshd-logs
|
||||
cscli parsers remove crowdsecurity/caddy-logs crowdsecurity/sshd-logs
|
||||
`,
|
||||
},
|
||||
installHelp: cliHelp{
|
||||
example: `cscli parsers install crowdsecurity/caddy-logs crowdsecurity/sshd-logs`,
|
||||
},
|
||||
removeHelp: cliHelp{
|
||||
example: `cscli parsers remove crowdsecurity/caddy-logs crowdsecurity/sshd-logs`,
|
||||
},
|
||||
upgradeHelp: cliHelp{
|
||||
example: `cscli parsers upgrade crowdsecurity/caddy-logs crowdsecurity/sshd-logs`,
|
||||
},
|
||||
inspectHelp: cliHelp{
|
||||
example: `cscli parsers inspect crowdsecurity/httpd-logs crowdsecurity/sshd-logs`,
|
||||
},
|
||||
listHelp: cliHelp{
|
||||
example: `cscli parsers list
|
||||
cscli parsers list -a
|
||||
cscli parsers list crowdsecurity/caddy-logs crowdsecurity/sshd-logs
|
||||
|
||||
List only enabled parsers unless "-a" or names are specified.`,
|
||||
},
|
||||
}
|
||||
}
|
41
cmd/crowdsec-cli/hubpostoverflow.go
Normal file
41
cmd/crowdsec-cli/hubpostoverflow.go
Normal file
|
@ -0,0 +1,41 @@
|
|||
package main
|
||||
|
||||
import (
|
||||
"github.com/crowdsecurity/crowdsec/pkg/cwhub"
|
||||
)
|
||||
|
||||
func NewCLIPostOverflow(cfg configGetter) *cliItem {
|
||||
return &cliItem{
|
||||
cfg: cfg,
|
||||
name: cwhub.POSTOVERFLOWS,
|
||||
singular: "postoverflow",
|
||||
oneOrMore: "postoverflow(s)",
|
||||
help: cliHelp{
|
||||
example: `cscli postoverflows list -a
|
||||
cscli postoverflows install crowdsecurity/cdn-whitelist crowdsecurity/rdns
|
||||
cscli postoverflows inspect crowdsecurity/cdn-whitelist crowdsecurity/rdns
|
||||
cscli postoverflows upgrade crowdsecurity/cdn-whitelist crowdsecurity/rdns
|
||||
cscli postoverflows remove crowdsecurity/cdn-whitelist crowdsecurity/rdns
|
||||
`,
|
||||
},
|
||||
installHelp: cliHelp{
|
||||
example: `cscli postoverflows install crowdsecurity/cdn-whitelist crowdsecurity/rdns`,
|
||||
},
|
||||
removeHelp: cliHelp{
|
||||
example: `cscli postoverflows remove crowdsecurity/cdn-whitelist crowdsecurity/rdns`,
|
||||
},
|
||||
upgradeHelp: cliHelp{
|
||||
example: `cscli postoverflows upgrade crowdsecurity/cdn-whitelist crowdsecurity/rdns`,
|
||||
},
|
||||
inspectHelp: cliHelp{
|
||||
example: `cscli postoverflows inspect crowdsecurity/cdn-whitelist crowdsecurity/rdns`,
|
||||
},
|
||||
listHelp: cliHelp{
|
||||
example: `cscli postoverflows list
|
||||
cscli postoverflows list -a
|
||||
cscli postoverflows list crowdsecurity/cdn-whitelist crowdsecurity/rdns
|
||||
|
||||
List only enabled postoverflows unless "-a" or names are specified.`,
|
||||
},
|
||||
}
|
||||
}
|
41
cmd/crowdsec-cli/hubscenario.go
Normal file
41
cmd/crowdsec-cli/hubscenario.go
Normal file
|
@ -0,0 +1,41 @@
|
|||
package main
|
||||
|
||||
import (
|
||||
"github.com/crowdsecurity/crowdsec/pkg/cwhub"
|
||||
)
|
||||
|
||||
func NewCLIScenario(cfg configGetter) *cliItem {
|
||||
return &cliItem{
|
||||
cfg: cfg,
|
||||
name: cwhub.SCENARIOS,
|
||||
singular: "scenario",
|
||||
oneOrMore: "scenario(s)",
|
||||
help: cliHelp{
|
||||
example: `cscli scenarios list -a
|
||||
cscli scenarios install crowdsecurity/ssh-bf crowdsecurity/http-probing
|
||||
cscli scenarios inspect crowdsecurity/ssh-bf crowdsecurity/http-probing
|
||||
cscli scenarios upgrade crowdsecurity/ssh-bf crowdsecurity/http-probing
|
||||
cscli scenarios remove crowdsecurity/ssh-bf crowdsecurity/http-probing
|
||||
`,
|
||||
},
|
||||
installHelp: cliHelp{
|
||||
example: `cscli scenarios install crowdsecurity/ssh-bf crowdsecurity/http-probing`,
|
||||
},
|
||||
removeHelp: cliHelp{
|
||||
example: `cscli scenarios remove crowdsecurity/ssh-bf crowdsecurity/http-probing`,
|
||||
},
|
||||
upgradeHelp: cliHelp{
|
||||
example: `cscli scenarios upgrade crowdsecurity/ssh-bf crowdsecurity/http-probing`,
|
||||
},
|
||||
inspectHelp: cliHelp{
|
||||
example: `cscli scenarios inspect crowdsecurity/ssh-bf crowdsecurity/http-probing`,
|
||||
},
|
||||
listHelp: cliHelp{
|
||||
example: `cscli scenarios list
|
||||
cscli scenarios list -a
|
||||
cscli scenarios list crowdsecurity/ssh-bf crowdsecurity/http-probing
|
||||
|
||||
List only enabled scenarios unless "-a" or names are specified.`,
|
||||
},
|
||||
}
|
||||
}
|
|
@ -2,59 +2,106 @@ package main
|
|||
|
||||
import (
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"fmt"
|
||||
"math"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
"text/template"
|
||||
|
||||
"github.com/AlecAivazis/survey/v2"
|
||||
"github.com/crowdsecurity/crowdsec/pkg/cstest"
|
||||
"github.com/enescakir/emoji"
|
||||
"github.com/olekukonko/tablewriter"
|
||||
"github.com/fatih/color"
|
||||
log "github.com/sirupsen/logrus"
|
||||
"github.com/spf13/cobra"
|
||||
"gopkg.in/yaml.v2"
|
||||
"gopkg.in/yaml.v3"
|
||||
|
||||
"github.com/crowdsecurity/crowdsec/pkg/dumps"
|
||||
"github.com/crowdsecurity/crowdsec/pkg/emoji"
|
||||
"github.com/crowdsecurity/crowdsec/pkg/hubtest"
|
||||
)
|
||||
|
||||
var (
|
||||
HubTest cstest.HubTest
|
||||
HubTest hubtest.HubTest
|
||||
HubAppsecTests hubtest.HubTest
|
||||
hubPtr *hubtest.HubTest
|
||||
isAppsecTest bool
|
||||
)
|
||||
|
||||
func NewHubTestCmd() *cobra.Command {
|
||||
/* ---- HUB COMMAND */
|
||||
var hubPath string
|
||||
var logType string
|
||||
var crowdsecPath string
|
||||
var cscliPath string
|
||||
type cliHubTest struct {
|
||||
cfg configGetter
|
||||
}
|
||||
|
||||
var cmdHubTest = &cobra.Command{
|
||||
Use: "hubtest",
|
||||
Short: "Run functional tests on hub configurations",
|
||||
Long: `
|
||||
Run functional tests on hub configurations (parsers, scenarios, collections...)
|
||||
`,
|
||||
func NewCLIHubTest(cfg configGetter) *cliHubTest {
|
||||
return &cliHubTest{
|
||||
cfg: cfg,
|
||||
}
|
||||
}
|
||||
|
||||
func (cli *cliHubTest) NewCommand() *cobra.Command {
|
||||
var (
|
||||
hubPath string
|
||||
crowdsecPath string
|
||||
cscliPath string
|
||||
)
|
||||
|
||||
cmd := &cobra.Command{
|
||||
Use: "hubtest",
|
||||
Short: "Run functional tests on hub configurations",
|
||||
Long: "Run functional tests on hub configurations (parsers, scenarios, collections...)",
|
||||
Args: cobra.ExactArgs(0),
|
||||
DisableAutoGenTag: true,
|
||||
PersistentPreRun: func(cmd *cobra.Command, args []string) {
|
||||
PersistentPreRunE: func(_ *cobra.Command, _ []string) error {
|
||||
var err error
|
||||
HubTest, err = cstest.NewHubTest(hubPath, crowdsecPath, cscliPath)
|
||||
HubTest, err = hubtest.NewHubTest(hubPath, crowdsecPath, cscliPath, false)
|
||||
if err != nil {
|
||||
log.Fatalf("unable to load hubtest: %+v", err)
|
||||
return fmt.Errorf("unable to load hubtest: %+v", err)
|
||||
}
|
||||
|
||||
HubAppsecTests, err = hubtest.NewHubTest(hubPath, crowdsecPath, cscliPath, true)
|
||||
if err != nil {
|
||||
return fmt.Errorf("unable to load appsec specific hubtest: %+v", err)
|
||||
}
|
||||
|
||||
// commands will use the hubPtr, will point to the default hubTest object, or the one dedicated to appsec tests
|
||||
hubPtr = &HubTest
|
||||
if isAppsecTest {
|
||||
hubPtr = &HubAppsecTests
|
||||
}
|
||||
|
||||
return nil
|
||||
},
|
||||
}
|
||||
cmdHubTest.PersistentFlags().StringVar(&hubPath, "hub", ".", "Path to hub folder")
|
||||
cmdHubTest.PersistentFlags().StringVar(&crowdsecPath, "crowdsec", "crowdsec", "Path to crowdsec")
|
||||
cmdHubTest.PersistentFlags().StringVar(&cscliPath, "cscli", "cscli", "Path to cscli")
|
||||
|
||||
cmd.PersistentFlags().StringVar(&hubPath, "hub", ".", "Path to hub folder")
|
||||
cmd.PersistentFlags().StringVar(&crowdsecPath, "crowdsec", "crowdsec", "Path to crowdsec")
|
||||
cmd.PersistentFlags().StringVar(&cscliPath, "cscli", "cscli", "Path to cscli")
|
||||
cmd.PersistentFlags().BoolVar(&isAppsecTest, "appsec", false, "Command relates to appsec tests")
|
||||
|
||||
cmd.AddCommand(cli.NewCreateCmd())
|
||||
cmd.AddCommand(cli.NewRunCmd())
|
||||
cmd.AddCommand(cli.NewCleanCmd())
|
||||
cmd.AddCommand(cli.NewInfoCmd())
|
||||
cmd.AddCommand(cli.NewListCmd())
|
||||
cmd.AddCommand(cli.NewCoverageCmd())
|
||||
cmd.AddCommand(cli.NewEvalCmd())
|
||||
cmd.AddCommand(cli.NewExplainCmd())
|
||||
|
||||
return cmd
|
||||
}
|
||||
|
||||
func (cli *cliHubTest) NewCreateCmd() *cobra.Command {
|
||||
var (
|
||||
ignoreParsers bool
|
||||
labels map[string]string
|
||||
logType string
|
||||
)
|
||||
|
||||
parsers := []string{}
|
||||
postoverflows := []string{}
|
||||
scenarios := []string{}
|
||||
var ignoreParsers bool
|
||||
var labels map[string]string
|
||||
|
||||
var cmdHubTestCreate = &cobra.Command{
|
||||
cmd := &cobra.Command{
|
||||
Use: "create",
|
||||
Short: "create [test_name]",
|
||||
Example: `cscli hubtest create my-awesome-test --type syslog
|
||||
|
@ -62,129 +109,170 @@ cscli hubtest create my-nginx-custom-test --type nginx
|
|||
cscli hubtest create my-scenario-test --parsers crowdsecurity/nginx --scenarios crowdsecurity/http-probing`,
|
||||
Args: cobra.ExactArgs(1),
|
||||
DisableAutoGenTag: true,
|
||||
Run: func(cmd *cobra.Command, args []string) {
|
||||
RunE: func(_ *cobra.Command, args []string) error {
|
||||
testName := args[0]
|
||||
testPath := filepath.Join(HubTest.HubTestPath, testName)
|
||||
testPath := filepath.Join(hubPtr.HubTestPath, testName)
|
||||
if _, err := os.Stat(testPath); os.IsExist(err) {
|
||||
log.Fatalf("test '%s' already exists in '%s', exiting", testName, testPath)
|
||||
return fmt.Errorf("test '%s' already exists in '%s', exiting", testName, testPath)
|
||||
}
|
||||
|
||||
if isAppsecTest {
|
||||
logType = "appsec"
|
||||
}
|
||||
|
||||
if logType == "" {
|
||||
log.Fatalf("please provide a type (--type) for the test")
|
||||
return errors.New("please provide a type (--type) for the test")
|
||||
}
|
||||
|
||||
if err := os.MkdirAll(testPath, os.ModePerm); err != nil {
|
||||
log.Fatalf("unable to create folder '%s': %+v", testPath, err)
|
||||
}
|
||||
|
||||
// create empty log file
|
||||
logFileName := fmt.Sprintf("%s.log", testName)
|
||||
logFilePath := filepath.Join(testPath, logFileName)
|
||||
logFile, err := os.Create(logFilePath)
|
||||
if err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
logFile.Close()
|
||||
|
||||
// create empty parser assertion file
|
||||
parserAssertFilePath := filepath.Join(testPath, cstest.ParserAssertFileName)
|
||||
parserAssertFile, err := os.Create(parserAssertFilePath)
|
||||
if err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
parserAssertFile.Close()
|
||||
|
||||
// create empty scenario assertion file
|
||||
scenarioAssertFilePath := filepath.Join(testPath, cstest.ScenarioAssertFileName)
|
||||
scenarioAssertFile, err := os.Create(scenarioAssertFilePath)
|
||||
if err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
scenarioAssertFile.Close()
|
||||
|
||||
parsers = append(parsers, "crowdsecurity/syslog-logs")
|
||||
parsers = append(parsers, "crowdsecurity/dateparse-enrich")
|
||||
|
||||
if len(scenarios) == 0 {
|
||||
scenarios = append(scenarios, "")
|
||||
}
|
||||
|
||||
if len(postoverflows) == 0 {
|
||||
postoverflows = append(postoverflows, "")
|
||||
}
|
||||
|
||||
configFileData := &cstest.HubTestItemConfig{
|
||||
Parsers: parsers,
|
||||
Scenarios: scenarios,
|
||||
PostOVerflows: postoverflows,
|
||||
LogFile: logFileName,
|
||||
LogType: logType,
|
||||
IgnoreParsers: ignoreParsers,
|
||||
Labels: labels,
|
||||
return fmt.Errorf("unable to create folder '%s': %+v", testPath, err)
|
||||
}
|
||||
|
||||
configFilePath := filepath.Join(testPath, "config.yaml")
|
||||
fd, err := os.OpenFile(configFilePath, os.O_CREATE|os.O_WRONLY|os.O_TRUNC, 0666)
|
||||
|
||||
configFileData := &hubtest.HubTestItemConfig{}
|
||||
if logType == "appsec" {
|
||||
// create empty nuclei template file
|
||||
nucleiFileName := fmt.Sprintf("%s.yaml", testName)
|
||||
nucleiFilePath := filepath.Join(testPath, nucleiFileName)
|
||||
|
||||
nucleiFile, err := os.OpenFile(nucleiFilePath, os.O_RDWR|os.O_CREATE, 0o755)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
ntpl := template.Must(template.New("nuclei").Parse(hubtest.TemplateNucleiFile))
|
||||
if ntpl == nil {
|
||||
return errors.New("unable to parse nuclei template")
|
||||
}
|
||||
ntpl.ExecuteTemplate(nucleiFile, "nuclei", struct{ TestName string }{TestName: testName})
|
||||
nucleiFile.Close()
|
||||
configFileData.AppsecRules = []string{"./appsec-rules/<author>/your_rule_here.yaml"}
|
||||
configFileData.NucleiTemplate = nucleiFileName
|
||||
fmt.Println()
|
||||
fmt.Printf(" Test name : %s\n", testName)
|
||||
fmt.Printf(" Test path : %s\n", testPath)
|
||||
fmt.Printf(" Config File : %s\n", configFilePath)
|
||||
fmt.Printf(" Nuclei Template : %s\n", nucleiFilePath)
|
||||
} else {
|
||||
// create empty log file
|
||||
logFileName := fmt.Sprintf("%s.log", testName)
|
||||
logFilePath := filepath.Join(testPath, logFileName)
|
||||
logFile, err := os.Create(logFilePath)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
logFile.Close()
|
||||
|
||||
// create empty parser assertion file
|
||||
parserAssertFilePath := filepath.Join(testPath, hubtest.ParserAssertFileName)
|
||||
parserAssertFile, err := os.Create(parserAssertFilePath)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
parserAssertFile.Close()
|
||||
// create empty scenario assertion file
|
||||
scenarioAssertFilePath := filepath.Join(testPath, hubtest.ScenarioAssertFileName)
|
||||
scenarioAssertFile, err := os.Create(scenarioAssertFilePath)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
scenarioAssertFile.Close()
|
||||
|
||||
parsers = append(parsers, "crowdsecurity/syslog-logs")
|
||||
parsers = append(parsers, "crowdsecurity/dateparse-enrich")
|
||||
|
||||
if len(scenarios) == 0 {
|
||||
scenarios = append(scenarios, "")
|
||||
}
|
||||
|
||||
if len(postoverflows) == 0 {
|
||||
postoverflows = append(postoverflows, "")
|
||||
}
|
||||
configFileData.Parsers = parsers
|
||||
configFileData.Scenarios = scenarios
|
||||
configFileData.PostOverflows = postoverflows
|
||||
configFileData.LogFile = logFileName
|
||||
configFileData.LogType = logType
|
||||
configFileData.IgnoreParsers = ignoreParsers
|
||||
configFileData.Labels = labels
|
||||
fmt.Println()
|
||||
fmt.Printf(" Test name : %s\n", testName)
|
||||
fmt.Printf(" Test path : %s\n", testPath)
|
||||
fmt.Printf(" Log file : %s (please fill it with logs)\n", logFilePath)
|
||||
fmt.Printf(" Parser assertion file : %s (please fill it with assertion)\n", parserAssertFilePath)
|
||||
fmt.Printf(" Scenario assertion file : %s (please fill it with assertion)\n", scenarioAssertFilePath)
|
||||
fmt.Printf(" Configuration File : %s (please fill it with parsers, scenarios...)\n", configFilePath)
|
||||
}
|
||||
|
||||
fd, err := os.Create(configFilePath)
|
||||
if err != nil {
|
||||
log.Fatalf("open: %s", err)
|
||||
return fmt.Errorf("open: %w", err)
|
||||
}
|
||||
data, err := yaml.Marshal(configFileData)
|
||||
if err != nil {
|
||||
log.Fatalf("marshal: %s", err)
|
||||
return fmt.Errorf("marshal: %w", err)
|
||||
}
|
||||
_, err = fd.Write(data)
|
||||
if err != nil {
|
||||
log.Fatalf("write: %s", err)
|
||||
return fmt.Errorf("write: %w", err)
|
||||
}
|
||||
if err := fd.Close(); err != nil {
|
||||
log.Fatalf(" close: %s", err)
|
||||
return fmt.Errorf("close: %w", err)
|
||||
}
|
||||
fmt.Println()
|
||||
fmt.Printf(" Test name : %s\n", testName)
|
||||
fmt.Printf(" Test path : %s\n", testPath)
|
||||
fmt.Printf(" Log file : %s (please fill it with logs)\n", logFilePath)
|
||||
fmt.Printf(" Parser assertion file : %s (please fill it with assertion)\n", parserAssertFilePath)
|
||||
fmt.Printf(" Scenario assertion file : %s (please fill it with assertion)\n", scenarioAssertFilePath)
|
||||
fmt.Printf(" Configuration File : %s (please fill it with parsers, scenarios...)\n", configFilePath)
|
||||
|
||||
return nil
|
||||
},
|
||||
}
|
||||
cmdHubTestCreate.PersistentFlags().StringVarP(&logType, "type", "t", "", "Log type of the test")
|
||||
cmdHubTestCreate.Flags().StringSliceVarP(&parsers, "parsers", "p", parsers, "Parsers to add to test")
|
||||
cmdHubTestCreate.Flags().StringSliceVar(&postoverflows, "postoverflows", postoverflows, "Postoverflows to add to test")
|
||||
cmdHubTestCreate.Flags().StringSliceVarP(&scenarios, "scenarios", "s", scenarios, "Scenarios to add to test")
|
||||
cmdHubTestCreate.PersistentFlags().BoolVar(&ignoreParsers, "ignore-parsers", false, "Don't run test on parsers")
|
||||
cmdHubTest.AddCommand(cmdHubTestCreate)
|
||||
|
||||
var noClean bool
|
||||
var runAll bool
|
||||
var forceClean bool
|
||||
var cmdHubTestRun = &cobra.Command{
|
||||
cmd.PersistentFlags().StringVarP(&logType, "type", "t", "", "Log type of the test")
|
||||
cmd.Flags().StringSliceVarP(&parsers, "parsers", "p", parsers, "Parsers to add to test")
|
||||
cmd.Flags().StringSliceVar(&postoverflows, "postoverflows", postoverflows, "Postoverflows to add to test")
|
||||
cmd.Flags().StringSliceVarP(&scenarios, "scenarios", "s", scenarios, "Scenarios to add to test")
|
||||
cmd.PersistentFlags().BoolVar(&ignoreParsers, "ignore-parsers", false, "Don't run test on parsers")
|
||||
|
||||
return cmd
|
||||
}
|
||||
|
||||
func (cli *cliHubTest) NewRunCmd() *cobra.Command {
|
||||
var (
|
||||
noClean bool
|
||||
runAll bool
|
||||
forceClean bool
|
||||
NucleiTargetHost string
|
||||
AppSecHost string
|
||||
)
|
||||
|
||||
cmd := &cobra.Command{
|
||||
Use: "run",
|
||||
Short: "run [test_name]",
|
||||
DisableAutoGenTag: true,
|
||||
Run: func(cmd *cobra.Command, args []string) {
|
||||
RunE: func(cmd *cobra.Command, args []string) error {
|
||||
cfg := cli.cfg()
|
||||
|
||||
if !runAll && len(args) == 0 {
|
||||
printHelp(cmd)
|
||||
fmt.Println("Please provide test to run or --all flag")
|
||||
os.Exit(1)
|
||||
return errors.New("please provide test to run or --all flag")
|
||||
}
|
||||
|
||||
hubPtr.NucleiTargetHost = NucleiTargetHost
|
||||
hubPtr.AppSecHost = AppSecHost
|
||||
if runAll {
|
||||
if err := HubTest.LoadAllTests(); err != nil {
|
||||
log.Fatalf("unable to load all tests: %+v", err)
|
||||
if err := hubPtr.LoadAllTests(); err != nil {
|
||||
return fmt.Errorf("unable to load all tests: %+v", err)
|
||||
}
|
||||
} else {
|
||||
for _, testName := range args {
|
||||
_, err := HubTest.LoadTestItem(testName)
|
||||
_, err := hubPtr.LoadTestItem(testName)
|
||||
if err != nil {
|
||||
log.Fatalf("unable to load test '%s': %s", testName, err)
|
||||
return fmt.Errorf("unable to load test '%s': %w", testName, err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for _, test := range HubTest.Tests {
|
||||
if csConfig.Cscli.Output == "human" {
|
||||
// set timezone to avoid DST issues
|
||||
os.Setenv("TZ", "UTC")
|
||||
for _, test := range hubPtr.Tests {
|
||||
if cfg.Cscli.Output == "human" {
|
||||
log.Infof("Running test '%s'", test.Name)
|
||||
}
|
||||
err := test.Run()
|
||||
|
@ -193,12 +281,15 @@ cscli hubtest create my-scenario-test --parsers crowdsecurity/nginx --scenarios
|
|||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
},
|
||||
PersistentPostRun: func(cmd *cobra.Command, args []string) {
|
||||
PersistentPostRunE: func(_ *cobra.Command, _ []string) error {
|
||||
cfg := cli.cfg()
|
||||
|
||||
success := true
|
||||
testResult := make(map[string]bool)
|
||||
for _, test := range HubTest.Tests {
|
||||
if test.AutoGen {
|
||||
for _, test := range hubPtr.Tests {
|
||||
if test.AutoGen && !isAppsecTest {
|
||||
if test.ParserAssert.AutoGenAssert {
|
||||
log.Warningf("Assert file '%s' is empty, generating assertion:", test.ParserAssert.File)
|
||||
fmt.Println()
|
||||
|
@ -211,7 +302,7 @@ cscli hubtest create my-scenario-test --parsers crowdsecurity/nginx --scenarios
|
|||
}
|
||||
if !noClean {
|
||||
if err := test.Clean(); err != nil {
|
||||
log.Fatalf("unable to clean test '%s' env: %s", test.Name, err)
|
||||
return fmt.Errorf("unable to clean test '%s' env: %w", test.Name, err)
|
||||
}
|
||||
}
|
||||
fmt.Printf("\nPlease fill your assert file(s) for test '%s', exiting\n", test.Name)
|
||||
|
@ -219,18 +310,18 @@ cscli hubtest create my-scenario-test --parsers crowdsecurity/nginx --scenarios
|
|||
}
|
||||
testResult[test.Name] = test.Success
|
||||
if test.Success {
|
||||
if csConfig.Cscli.Output == "human" {
|
||||
if cfg.Cscli.Output == "human" {
|
||||
log.Infof("Test '%s' passed successfully (%d assertions)\n", test.Name, test.ParserAssert.NbAssert+test.ScenarioAssert.NbAssert)
|
||||
}
|
||||
if !noClean {
|
||||
if err := test.Clean(); err != nil {
|
||||
log.Fatalf("unable to clean test '%s' env: %s", test.Name, err)
|
||||
return fmt.Errorf("unable to clean test '%s' env: %w", test.Name, err)
|
||||
}
|
||||
}
|
||||
} else {
|
||||
success = false
|
||||
cleanTestEnv := false
|
||||
if csConfig.Cscli.Output == "human" {
|
||||
if cfg.Cscli.Output == "human" {
|
||||
if len(test.ParserAssert.Fails) > 0 {
|
||||
fmt.Println()
|
||||
log.Errorf("Parser test '%s' failed (%d errors)\n", test.Name, len(test.ParserAssert.Fails))
|
||||
|
@ -261,36 +352,23 @@ cscli hubtest create my-scenario-test --parsers crowdsecurity/nginx --scenarios
|
|||
Default: true,
|
||||
}
|
||||
if err := survey.AskOne(prompt, &cleanTestEnv); err != nil {
|
||||
log.Fatalf("unable to ask to remove runtime folder: %s", err)
|
||||
return fmt.Errorf("unable to ask to remove runtime folder: %w", err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if cleanTestEnv || forceClean {
|
||||
if err := test.Clean(); err != nil {
|
||||
log.Fatalf("unable to clean test '%s' env: %s", test.Name, err)
|
||||
return fmt.Errorf("unable to clean test '%s' env: %w", test.Name, err)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
if csConfig.Cscli.Output == "human" {
|
||||
table := tablewriter.NewWriter(os.Stdout)
|
||||
table.SetCenterSeparator("")
|
||||
table.SetColumnSeparator("")
|
||||
|
||||
table.SetHeaderAlignment(tablewriter.ALIGN_LEFT)
|
||||
table.SetAlignment(tablewriter.ALIGN_LEFT)
|
||||
|
||||
table.SetHeader([]string{"Test", "Result"})
|
||||
for testName, success := range testResult {
|
||||
status := emoji.CheckMarkButton.String()
|
||||
if !success {
|
||||
status = emoji.CrossMark.String()
|
||||
}
|
||||
table.Append([]string{testName, status})
|
||||
}
|
||||
table.Render()
|
||||
} else if csConfig.Cscli.Output == "json" {
|
||||
switch cfg.Cscli.Output {
|
||||
case "human":
|
||||
hubTestResultTable(color.Output, testResult)
|
||||
case "json":
|
||||
jsonResult := make(map[string][]string, 0)
|
||||
jsonResult["success"] = make([]string, 0)
|
||||
jsonResult["fail"] = make([]string, 0)
|
||||
|
@ -303,119 +381,158 @@ cscli hubtest create my-scenario-test --parsers crowdsecurity/nginx --scenarios
|
|||
}
|
||||
jsonStr, err := json.Marshal(jsonResult)
|
||||
if err != nil {
|
||||
log.Fatalf("unable to json test result: %s", err.Error())
|
||||
return fmt.Errorf("unable to json test result: %w", err)
|
||||
}
|
||||
fmt.Println(string(jsonStr))
|
||||
default:
|
||||
return errors.New("only human/json output modes are supported")
|
||||
}
|
||||
|
||||
if !success {
|
||||
os.Exit(1)
|
||||
}
|
||||
|
||||
return nil
|
||||
},
|
||||
}
|
||||
cmdHubTestRun.Flags().BoolVar(&noClean, "no-clean", false, "Don't clean runtime environment if test succeed")
|
||||
cmdHubTestRun.Flags().BoolVar(&forceClean, "clean", false, "Clean runtime environment if test fail")
|
||||
cmdHubTestRun.Flags().BoolVar(&runAll, "all", false, "Run all tests")
|
||||
cmdHubTest.AddCommand(cmdHubTestRun)
|
||||
|
||||
var cmdHubTestClean = &cobra.Command{
|
||||
cmd.Flags().BoolVar(&noClean, "no-clean", false, "Don't clean runtime environment if test succeed")
|
||||
cmd.Flags().BoolVar(&forceClean, "clean", false, "Clean runtime environment if test fail")
|
||||
cmd.Flags().StringVar(&NucleiTargetHost, "target", hubtest.DefaultNucleiTarget, "Target for AppSec Test")
|
||||
cmd.Flags().StringVar(&AppSecHost, "host", hubtest.DefaultAppsecHost, "Address to expose AppSec for hubtest")
|
||||
cmd.Flags().BoolVar(&runAll, "all", false, "Run all tests")
|
||||
|
||||
return cmd
|
||||
}
|
||||
|
||||
func (cli *cliHubTest) NewCleanCmd() *cobra.Command {
|
||||
cmd := &cobra.Command{
|
||||
Use: "clean",
|
||||
Short: "clean [test_name]",
|
||||
Args: cobra.MinimumNArgs(1),
|
||||
DisableAutoGenTag: true,
|
||||
Run: func(cmd *cobra.Command, args []string) {
|
||||
RunE: func(_ *cobra.Command, args []string) error {
|
||||
for _, testName := range args {
|
||||
test, err := HubTest.LoadTestItem(testName)
|
||||
test, err := hubPtr.LoadTestItem(testName)
|
||||
if err != nil {
|
||||
log.Fatalf("unable to load test '%s': %s", testName, err)
|
||||
return fmt.Errorf("unable to load test '%s': %w", testName, err)
|
||||
}
|
||||
if err := test.Clean(); err != nil {
|
||||
log.Fatalf("unable to clean test '%s' env: %s", test.Name, err)
|
||||
return fmt.Errorf("unable to clean test '%s' env: %w", test.Name, err)
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
},
|
||||
}
|
||||
cmdHubTest.AddCommand(cmdHubTestClean)
|
||||
|
||||
var cmdHubTestInfo = &cobra.Command{
|
||||
return cmd
|
||||
}
|
||||
|
||||
func (cli *cliHubTest) NewInfoCmd() *cobra.Command {
|
||||
cmd := &cobra.Command{
|
||||
Use: "info",
|
||||
Short: "info [test_name]",
|
||||
Args: cobra.MinimumNArgs(1),
|
||||
DisableAutoGenTag: true,
|
||||
Run: func(cmd *cobra.Command, args []string) {
|
||||
RunE: func(_ *cobra.Command, args []string) error {
|
||||
for _, testName := range args {
|
||||
test, err := HubTest.LoadTestItem(testName)
|
||||
test, err := hubPtr.LoadTestItem(testName)
|
||||
if err != nil {
|
||||
log.Fatalf("unable to load test '%s': %s", testName, err)
|
||||
return fmt.Errorf("unable to load test '%s': %w", testName, err)
|
||||
}
|
||||
fmt.Println()
|
||||
fmt.Printf(" Test name : %s\n", test.Name)
|
||||
fmt.Printf(" Test path : %s\n", test.Path)
|
||||
fmt.Printf(" Log file : %s\n", filepath.Join(test.Path, test.Config.LogFile))
|
||||
fmt.Printf(" Parser assertion file : %s\n", filepath.Join(test.Path, cstest.ParserAssertFileName))
|
||||
fmt.Printf(" Scenario assertion file : %s\n", filepath.Join(test.Path, cstest.ScenarioAssertFileName))
|
||||
if isAppsecTest {
|
||||
fmt.Printf(" Nuclei Template : %s\n", test.Config.NucleiTemplate)
|
||||
fmt.Printf(" Appsec Rules : %s\n", strings.Join(test.Config.AppsecRules, ", "))
|
||||
} else {
|
||||
fmt.Printf(" Log file : %s\n", filepath.Join(test.Path, test.Config.LogFile))
|
||||
fmt.Printf(" Parser assertion file : %s\n", filepath.Join(test.Path, hubtest.ParserAssertFileName))
|
||||
fmt.Printf(" Scenario assertion file : %s\n", filepath.Join(test.Path, hubtest.ScenarioAssertFileName))
|
||||
}
|
||||
fmt.Printf(" Configuration File : %s\n", filepath.Join(test.Path, "config.yaml"))
|
||||
}
|
||||
|
||||
return nil
|
||||
},
|
||||
}
|
||||
cmdHubTest.AddCommand(cmdHubTestInfo)
|
||||
|
||||
var cmdHubTestList = &cobra.Command{
|
||||
return cmd
|
||||
}
|
||||
|
||||
func (cli *cliHubTest) NewListCmd() *cobra.Command {
|
||||
cmd := &cobra.Command{
|
||||
Use: "list",
|
||||
Short: "list",
|
||||
DisableAutoGenTag: true,
|
||||
Run: func(cmd *cobra.Command, args []string) {
|
||||
if err := HubTest.LoadAllTests(); err != nil {
|
||||
log.Fatalf("unable to load all tests: %+v", err)
|
||||
RunE: func(_ *cobra.Command, _ []string) error {
|
||||
cfg := cli.cfg()
|
||||
|
||||
if err := hubPtr.LoadAllTests(); err != nil {
|
||||
return fmt.Errorf("unable to load all tests: %w", err)
|
||||
}
|
||||
|
||||
table := tablewriter.NewWriter(os.Stdout)
|
||||
table.SetCenterSeparator("")
|
||||
table.SetColumnSeparator("")
|
||||
|
||||
table.SetHeaderAlignment(tablewriter.ALIGN_LEFT)
|
||||
table.SetAlignment(tablewriter.ALIGN_LEFT)
|
||||
table.SetHeader([]string{"Name", "Path"})
|
||||
for _, test := range HubTest.Tests {
|
||||
table.Append([]string{test.Name, test.Path})
|
||||
switch cfg.Cscli.Output {
|
||||
case "human":
|
||||
hubTestListTable(color.Output, hubPtr.Tests)
|
||||
case "json":
|
||||
j, err := json.MarshalIndent(hubPtr.Tests, " ", " ")
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
fmt.Println(string(j))
|
||||
default:
|
||||
return errors.New("only human/json output modes are supported")
|
||||
}
|
||||
table.Render()
|
||||
|
||||
return nil
|
||||
},
|
||||
}
|
||||
cmdHubTest.AddCommand(cmdHubTestList)
|
||||
|
||||
var showParserCov bool
|
||||
var showScenarioCov bool
|
||||
var showOnlyPercent bool
|
||||
var cmdHubTestCoverage = &cobra.Command{
|
||||
return cmd
|
||||
}
|
||||
|
||||
func (cli *cliHubTest) NewCoverageCmd() *cobra.Command {
|
||||
var (
|
||||
showParserCov bool
|
||||
showScenarioCov bool
|
||||
showOnlyPercent bool
|
||||
showAppsecCov bool
|
||||
)
|
||||
|
||||
cmd := &cobra.Command{
|
||||
Use: "coverage",
|
||||
Short: "coverage",
|
||||
DisableAutoGenTag: true,
|
||||
Run: func(cmd *cobra.Command, args []string) {
|
||||
RunE: func(_ *cobra.Command, _ []string) error {
|
||||
cfg := cli.cfg()
|
||||
|
||||
// for this one we explicitly don't do for appsec
|
||||
if err := HubTest.LoadAllTests(); err != nil {
|
||||
log.Fatalf("unable to load all tests: %+v", err)
|
||||
return fmt.Errorf("unable to load all tests: %+v", err)
|
||||
}
|
||||
var err error
|
||||
scenarioCoverage := []cstest.ScenarioCoverage{}
|
||||
parserCoverage := []cstest.ParserCoverage{}
|
||||
scenarioCoverage := []hubtest.Coverage{}
|
||||
parserCoverage := []hubtest.Coverage{}
|
||||
appsecRuleCoverage := []hubtest.Coverage{}
|
||||
scenarioCoveragePercent := 0
|
||||
parserCoveragePercent := 0
|
||||
showAll := false
|
||||
appsecRuleCoveragePercent := 0
|
||||
|
||||
if !showScenarioCov && !showParserCov { // if both are false (flag by default), show both
|
||||
showAll = true
|
||||
}
|
||||
// if both are false (flag by default), show both
|
||||
showAll := !showScenarioCov && !showParserCov && !showAppsecCov
|
||||
|
||||
if showParserCov || showAll {
|
||||
parserCoverage, err = HubTest.GetParsersCoverage()
|
||||
if err != nil {
|
||||
log.Fatalf("while getting parser coverage : %s", err)
|
||||
return fmt.Errorf("while getting parser coverage: %w", err)
|
||||
}
|
||||
parserTested := 0
|
||||
for _, test := range parserCoverage {
|
||||
if test.TestsCount > 0 {
|
||||
parserTested += 1
|
||||
parserTested++
|
||||
}
|
||||
}
|
||||
parserCoveragePercent = int(math.Round((float64(parserTested) / float64(len(parserCoverage)) * 100)))
|
||||
|
@ -424,68 +541,62 @@ cscli hubtest create my-scenario-test --parsers crowdsecurity/nginx --scenarios
|
|||
if showScenarioCov || showAll {
|
||||
scenarioCoverage, err = HubTest.GetScenariosCoverage()
|
||||
if err != nil {
|
||||
log.Fatalf("while getting scenario coverage: %s", err)
|
||||
return fmt.Errorf("while getting scenario coverage: %w", err)
|
||||
}
|
||||
|
||||
scenarioTested := 0
|
||||
for _, test := range scenarioCoverage {
|
||||
if test.TestsCount > 0 {
|
||||
scenarioTested += 1
|
||||
scenarioTested++
|
||||
}
|
||||
}
|
||||
|
||||
scenarioCoveragePercent = int(math.Round((float64(scenarioTested) / float64(len(scenarioCoverage)) * 100)))
|
||||
}
|
||||
|
||||
if showAppsecCov || showAll {
|
||||
appsecRuleCoverage, err = HubTest.GetAppsecCoverage()
|
||||
if err != nil {
|
||||
return fmt.Errorf("while getting scenario coverage: %w", err)
|
||||
}
|
||||
|
||||
appsecRuleTested := 0
|
||||
for _, test := range appsecRuleCoverage {
|
||||
if test.TestsCount > 0 {
|
||||
appsecRuleTested++
|
||||
}
|
||||
}
|
||||
appsecRuleCoveragePercent = int(math.Round((float64(appsecRuleTested) / float64(len(appsecRuleCoverage)) * 100)))
|
||||
}
|
||||
|
||||
if showOnlyPercent {
|
||||
if showAll {
|
||||
fmt.Printf("parsers=%d%%\nscenarios=%d%%", parserCoveragePercent, scenarioCoveragePercent)
|
||||
} else if showParserCov {
|
||||
switch {
|
||||
case showAll:
|
||||
fmt.Printf("parsers=%d%%\nscenarios=%d%%\nappsec_rules=%d%%", parserCoveragePercent, scenarioCoveragePercent, appsecRuleCoveragePercent)
|
||||
case showParserCov:
|
||||
fmt.Printf("parsers=%d%%", parserCoveragePercent)
|
||||
} else if showScenarioCov {
|
||||
case showScenarioCov:
|
||||
fmt.Printf("scenarios=%d%%", scenarioCoveragePercent)
|
||||
case showAppsecCov:
|
||||
fmt.Printf("appsec_rules=%d%%", appsecRuleCoveragePercent)
|
||||
}
|
||||
os.Exit(0)
|
||||
}
|
||||
|
||||
if csConfig.Cscli.Output == "human" {
|
||||
switch cfg.Cscli.Output {
|
||||
case "human":
|
||||
if showParserCov || showAll {
|
||||
table := tablewriter.NewWriter(os.Stdout)
|
||||
table.SetCenterSeparator("")
|
||||
table.SetColumnSeparator("")
|
||||
|
||||
table.SetHeaderAlignment(tablewriter.ALIGN_LEFT)
|
||||
table.SetAlignment(tablewriter.ALIGN_LEFT)
|
||||
|
||||
table.SetHeader([]string{"Parser", "Status", "Number of tests"})
|
||||
parserTested := 0
|
||||
for _, test := range parserCoverage {
|
||||
status := emoji.RedCircle.String()
|
||||
if test.TestsCount > 0 {
|
||||
status = emoji.GreenCircle.String()
|
||||
parserTested += 1
|
||||
}
|
||||
table.Append([]string{test.Parser, status, fmt.Sprintf("%d times (across %d tests)", test.TestsCount, len(test.PresentIn))})
|
||||
}
|
||||
table.Render()
|
||||
hubTestParserCoverageTable(color.Output, parserCoverage)
|
||||
}
|
||||
|
||||
if showScenarioCov || showAll {
|
||||
table := tablewriter.NewWriter(os.Stdout)
|
||||
table.SetCenterSeparator("")
|
||||
table.SetColumnSeparator("")
|
||||
|
||||
table.SetHeaderAlignment(tablewriter.ALIGN_LEFT)
|
||||
table.SetAlignment(tablewriter.ALIGN_LEFT)
|
||||
|
||||
table.SetHeader([]string{"Scenario", "Status", "Number of tests"})
|
||||
for _, test := range scenarioCoverage {
|
||||
status := emoji.RedCircle.String()
|
||||
if test.TestsCount > 0 {
|
||||
status = emoji.GreenCircle.String()
|
||||
}
|
||||
table.Append([]string{test.Scenario, status, fmt.Sprintf("%d times (across %d tests)", test.TestsCount, len(test.PresentIn))})
|
||||
}
|
||||
table.Render()
|
||||
hubTestScenarioCoverageTable(color.Output, scenarioCoverage)
|
||||
}
|
||||
|
||||
if showAppsecCov || showAll {
|
||||
hubTestAppsecRuleCoverageTable(color.Output, appsecRuleCoverage)
|
||||
}
|
||||
|
||||
fmt.Println()
|
||||
if showParserCov || showAll {
|
||||
fmt.Printf("PARSERS : %d%% of coverage\n", parserCoveragePercent)
|
||||
|
@ -493,95 +604,118 @@ cscli hubtest create my-scenario-test --parsers crowdsecurity/nginx --scenarios
|
|||
if showScenarioCov || showAll {
|
||||
fmt.Printf("SCENARIOS : %d%% of coverage\n", scenarioCoveragePercent)
|
||||
}
|
||||
} else if csConfig.Cscli.Output == "json" {
|
||||
if showAppsecCov || showAll {
|
||||
fmt.Printf("APPSEC RULES : %d%% of coverage\n", appsecRuleCoveragePercent)
|
||||
}
|
||||
case "json":
|
||||
dump, err := json.MarshalIndent(parserCoverage, "", " ")
|
||||
if err != nil {
|
||||
log.Fatal(err)
|
||||
return err
|
||||
}
|
||||
fmt.Printf("%s", dump)
|
||||
dump, err = json.MarshalIndent(scenarioCoverage, "", " ")
|
||||
if err != nil {
|
||||
log.Fatal(err)
|
||||
return err
|
||||
}
|
||||
fmt.Printf("%s", dump)
|
||||
} else {
|
||||
log.Fatalf("only human/json output modes are supported")
|
||||
dump, err = json.MarshalIndent(appsecRuleCoverage, "", " ")
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
fmt.Printf("%s", dump)
|
||||
default:
|
||||
return errors.New("only human/json output modes are supported")
|
||||
}
|
||||
|
||||
return nil
|
||||
},
|
||||
}
|
||||
cmdHubTestCoverage.PersistentFlags().BoolVar(&showOnlyPercent, "percent", false, "Show only percentages of coverage")
|
||||
cmdHubTestCoverage.PersistentFlags().BoolVar(&showParserCov, "parsers", false, "Show only parsers coverage")
|
||||
cmdHubTestCoverage.PersistentFlags().BoolVar(&showScenarioCov, "scenarios", false, "Show only scenarios coverage")
|
||||
cmdHubTest.AddCommand(cmdHubTestCoverage)
|
||||
|
||||
cmd.PersistentFlags().BoolVar(&showOnlyPercent, "percent", false, "Show only percentages of coverage")
|
||||
cmd.PersistentFlags().BoolVar(&showParserCov, "parsers", false, "Show only parsers coverage")
|
||||
cmd.PersistentFlags().BoolVar(&showScenarioCov, "scenarios", false, "Show only scenarios coverage")
|
||||
cmd.PersistentFlags().BoolVar(&showAppsecCov, "appsec", false, "Show only appsec coverage")
|
||||
|
||||
return cmd
|
||||
}
|
||||
|
||||
func (cli *cliHubTest) NewEvalCmd() *cobra.Command {
|
||||
var evalExpression string
|
||||
var cmdHubTestEval = &cobra.Command{
|
||||
|
||||
cmd := &cobra.Command{
|
||||
Use: "eval",
|
||||
Short: "eval [test_name]",
|
||||
Args: cobra.ExactArgs(1),
|
||||
DisableAutoGenTag: true,
|
||||
Run: func(cmd *cobra.Command, args []string) {
|
||||
RunE: func(_ *cobra.Command, args []string) error {
|
||||
for _, testName := range args {
|
||||
test, err := HubTest.LoadTestItem(testName)
|
||||
test, err := hubPtr.LoadTestItem(testName)
|
||||
if err != nil {
|
||||
log.Fatalf("can't load test: %+v", err)
|
||||
return fmt.Errorf("can't load test: %+v", err)
|
||||
}
|
||||
|
||||
err = test.ParserAssert.LoadTest(test.ParserResultFile)
|
||||
if err != nil {
|
||||
log.Fatalf("can't load test results from '%s': %+v", test.ParserResultFile, err)
|
||||
return fmt.Errorf("can't load test results from '%s': %+v", test.ParserResultFile, err)
|
||||
}
|
||||
|
||||
output, err := test.ParserAssert.EvalExpression(evalExpression)
|
||||
if err != nil {
|
||||
log.Fatalf(err.Error())
|
||||
return err
|
||||
}
|
||||
fmt.Printf(output)
|
||||
|
||||
fmt.Print(output)
|
||||
}
|
||||
|
||||
return nil
|
||||
},
|
||||
}
|
||||
cmdHubTestEval.PersistentFlags().StringVarP(&evalExpression, "expr", "e", "", "Expression to eval")
|
||||
cmdHubTest.AddCommand(cmdHubTestEval)
|
||||
|
||||
var cmdHubTestExplain = &cobra.Command{
|
||||
cmd.PersistentFlags().StringVarP(&evalExpression, "expr", "e", "", "Expression to eval")
|
||||
|
||||
return cmd
|
||||
}
|
||||
|
||||
func (cli *cliHubTest) NewExplainCmd() *cobra.Command {
|
||||
cmd := &cobra.Command{
|
||||
Use: "explain",
|
||||
Short: "explain [test_name]",
|
||||
Args: cobra.ExactArgs(1),
|
||||
DisableAutoGenTag: true,
|
||||
Run: func(cmd *cobra.Command, args []string) {
|
||||
RunE: func(_ *cobra.Command, args []string) error {
|
||||
for _, testName := range args {
|
||||
test, err := HubTest.LoadTestItem(testName)
|
||||
if err != nil {
|
||||
log.Fatalf("can't load test: %+v", err)
|
||||
return fmt.Errorf("can't load test: %+v", err)
|
||||
}
|
||||
err = test.ParserAssert.LoadTest(test.ParserResultFile)
|
||||
if err != nil {
|
||||
err := test.Run()
|
||||
if err != nil {
|
||||
log.Fatalf("running test '%s' failed: %+v", test.Name, err)
|
||||
if err = test.Run(); err != nil {
|
||||
return fmt.Errorf("running test '%s' failed: %+v", test.Name, err)
|
||||
}
|
||||
err = test.ParserAssert.LoadTest(test.ParserResultFile)
|
||||
if err != nil {
|
||||
log.Fatalf("unable to load parser result after run: %s", err)
|
||||
|
||||
if err = test.ParserAssert.LoadTest(test.ParserResultFile); err != nil {
|
||||
return fmt.Errorf("unable to load parser result after run: %w", err)
|
||||
}
|
||||
}
|
||||
|
||||
err = test.ScenarioAssert.LoadTest(test.ScenarioResultFile, test.BucketPourResultFile)
|
||||
if err != nil {
|
||||
err := test.Run()
|
||||
if err != nil {
|
||||
log.Fatalf("running test '%s' failed: %+v", test.Name, err)
|
||||
if err = test.Run(); err != nil {
|
||||
return fmt.Errorf("running test '%s' failed: %+v", test.Name, err)
|
||||
}
|
||||
err = test.ScenarioAssert.LoadTest(test.ScenarioResultFile, test.BucketPourResultFile)
|
||||
if err != nil {
|
||||
log.Fatalf("unable to load scenario result after run: %s", err)
|
||||
|
||||
if err = test.ScenarioAssert.LoadTest(test.ScenarioResultFile, test.BucketPourResultFile); err != nil {
|
||||
return fmt.Errorf("unable to load scenario result after run: %w", err)
|
||||
}
|
||||
}
|
||||
opts := cstest.DumpOpts{}
|
||||
cstest.DumpTree(*test.ParserAssert.TestData, *test.ScenarioAssert.PourData, opts)
|
||||
opts := dumps.DumpOpts{}
|
||||
dumps.DumpTree(*test.ParserAssert.TestData, *test.ScenarioAssert.PourData, opts)
|
||||
}
|
||||
|
||||
return nil
|
||||
},
|
||||
}
|
||||
cmdHubTest.AddCommand(cmdHubTestExplain)
|
||||
|
||||
return cmdHubTest
|
||||
return cmd
|
||||
}
|
||||
|
|
105
cmd/crowdsec-cli/hubtest_table.go
Normal file
105
cmd/crowdsec-cli/hubtest_table.go
Normal file
|
@ -0,0 +1,105 @@
|
|||
package main
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"io"
|
||||
|
||||
"github.com/aquasecurity/table"
|
||||
|
||||
"github.com/crowdsecurity/crowdsec/pkg/emoji"
|
||||
"github.com/crowdsecurity/crowdsec/pkg/hubtest"
|
||||
)
|
||||
|
||||
func hubTestResultTable(out io.Writer, testResult map[string]bool) {
|
||||
t := newLightTable(out)
|
||||
t.SetHeaders("Test", "Result")
|
||||
t.SetHeaderAlignment(table.AlignLeft)
|
||||
t.SetAlignment(table.AlignLeft)
|
||||
|
||||
for testName, success := range testResult {
|
||||
status := emoji.CheckMarkButton
|
||||
if !success {
|
||||
status = emoji.CrossMark
|
||||
}
|
||||
|
||||
t.AddRow(testName, status)
|
||||
}
|
||||
|
||||
t.Render()
|
||||
}
|
||||
|
||||
func hubTestListTable(out io.Writer, tests []*hubtest.HubTestItem) {
|
||||
t := newLightTable(out)
|
||||
t.SetHeaders("Name", "Path")
|
||||
t.SetHeaderAlignment(table.AlignLeft, table.AlignLeft)
|
||||
t.SetAlignment(table.AlignLeft, table.AlignLeft)
|
||||
|
||||
for _, test := range tests {
|
||||
t.AddRow(test.Name, test.Path)
|
||||
}
|
||||
|
||||
t.Render()
|
||||
}
|
||||
|
||||
func hubTestParserCoverageTable(out io.Writer, coverage []hubtest.Coverage) {
|
||||
t := newLightTable(out)
|
||||
t.SetHeaders("Parser", "Status", "Number of tests")
|
||||
t.SetHeaderAlignment(table.AlignLeft, table.AlignLeft, table.AlignLeft)
|
||||
t.SetAlignment(table.AlignLeft, table.AlignLeft, table.AlignLeft)
|
||||
|
||||
parserTested := 0
|
||||
|
||||
for _, test := range coverage {
|
||||
status := emoji.RedCircle
|
||||
if test.TestsCount > 0 {
|
||||
status = emoji.GreenCircle
|
||||
parserTested++
|
||||
}
|
||||
|
||||
t.AddRow(test.Name, status, fmt.Sprintf("%d times (across %d tests)", test.TestsCount, len(test.PresentIn)))
|
||||
}
|
||||
|
||||
t.Render()
|
||||
}
|
||||
|
||||
func hubTestAppsecRuleCoverageTable(out io.Writer, coverage []hubtest.Coverage) {
|
||||
t := newLightTable(out)
|
||||
t.SetHeaders("Appsec Rule", "Status", "Number of tests")
|
||||
t.SetHeaderAlignment(table.AlignLeft, table.AlignLeft, table.AlignLeft)
|
||||
t.SetAlignment(table.AlignLeft, table.AlignLeft, table.AlignLeft)
|
||||
|
||||
parserTested := 0
|
||||
|
||||
for _, test := range coverage {
|
||||
status := emoji.RedCircle
|
||||
if test.TestsCount > 0 {
|
||||
status = emoji.GreenCircle
|
||||
parserTested++
|
||||
}
|
||||
|
||||
t.AddRow(test.Name, status, fmt.Sprintf("%d times (across %d tests)", test.TestsCount, len(test.PresentIn)))
|
||||
}
|
||||
|
||||
t.Render()
|
||||
}
|
||||
|
||||
func hubTestScenarioCoverageTable(out io.Writer, coverage []hubtest.Coverage) {
|
||||
t := newLightTable(out)
|
||||
t.SetHeaders("Scenario", "Status", "Number of tests")
|
||||
t.SetHeaderAlignment(table.AlignLeft, table.AlignLeft, table.AlignLeft)
|
||||
t.SetAlignment(table.AlignLeft, table.AlignLeft, table.AlignLeft)
|
||||
|
||||
parserTested := 0
|
||||
|
||||
for _, test := range coverage {
|
||||
status := emoji.RedCircle
|
||||
if test.TestsCount > 0 {
|
||||
status = emoji.GreenCircle
|
||||
parserTested++
|
||||
}
|
||||
|
||||
t.AddRow(test.Name, status, fmt.Sprintf("%d times (across %d tests)", test.TestsCount, len(test.PresentIn)))
|
||||
}
|
||||
|
||||
t.Render()
|
||||
}
|
327
cmd/crowdsec-cli/item_metrics.go
Normal file
327
cmd/crowdsec-cli/item_metrics.go
Normal file
|
@ -0,0 +1,327 @@
|
|||
package main
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"math"
|
||||
"net/http"
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/fatih/color"
|
||||
dto "github.com/prometheus/client_model/go"
|
||||
"github.com/prometheus/prom2json"
|
||||
log "github.com/sirupsen/logrus"
|
||||
|
||||
"github.com/crowdsecurity/go-cs-lib/trace"
|
||||
|
||||
"github.com/crowdsecurity/crowdsec/pkg/cwhub"
|
||||
)
|
||||
|
||||
func ShowMetrics(hubItem *cwhub.Item) error {
|
||||
switch hubItem.Type {
|
||||
case cwhub.PARSERS:
|
||||
metrics := GetParserMetric(csConfig.Cscli.PrometheusUrl, hubItem.Name)
|
||||
parserMetricsTable(color.Output, hubItem.Name, metrics)
|
||||
case cwhub.SCENARIOS:
|
||||
metrics := GetScenarioMetric(csConfig.Cscli.PrometheusUrl, hubItem.Name)
|
||||
scenarioMetricsTable(color.Output, hubItem.Name, metrics)
|
||||
case cwhub.COLLECTIONS:
|
||||
for _, sub := range hubItem.SubItems() {
|
||||
if err := ShowMetrics(sub); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
case cwhub.APPSEC_RULES:
|
||||
metrics := GetAppsecRuleMetric(csConfig.Cscli.PrometheusUrl, hubItem.Name)
|
||||
appsecMetricsTable(color.Output, hubItem.Name, metrics)
|
||||
default: // no metrics for this item type
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// GetParserMetric is a complete rip from prom2json
|
||||
func GetParserMetric(url string, itemName string) map[string]map[string]int {
|
||||
stats := make(map[string]map[string]int)
|
||||
|
||||
result := GetPrometheusMetric(url)
|
||||
for idx, fam := range result {
|
||||
if !strings.HasPrefix(fam.Name, "cs_") {
|
||||
continue
|
||||
}
|
||||
|
||||
log.Tracef("round %d", idx)
|
||||
|
||||
for _, m := range fam.Metrics {
|
||||
metric, ok := m.(prom2json.Metric)
|
||||
if !ok {
|
||||
log.Debugf("failed to convert metric to prom2json.Metric")
|
||||
continue
|
||||
}
|
||||
|
||||
name, ok := metric.Labels["name"]
|
||||
if !ok {
|
||||
log.Debugf("no name in Metric %v", metric.Labels)
|
||||
}
|
||||
|
||||
if name != itemName {
|
||||
continue
|
||||
}
|
||||
|
||||
source, ok := metric.Labels["source"]
|
||||
|
||||
if !ok {
|
||||
log.Debugf("no source in Metric %v", metric.Labels)
|
||||
} else {
|
||||
if srctype, ok := metric.Labels["type"]; ok {
|
||||
source = srctype + ":" + source
|
||||
}
|
||||
}
|
||||
|
||||
value := m.(prom2json.Metric).Value
|
||||
|
||||
fval, err := strconv.ParseFloat(value, 32)
|
||||
if err != nil {
|
||||
log.Errorf("Unexpected int value %s : %s", value, err)
|
||||
continue
|
||||
}
|
||||
|
||||
ival := int(fval)
|
||||
|
||||
switch fam.Name {
|
||||
case "cs_reader_hits_total":
|
||||
if _, ok := stats[source]; !ok {
|
||||
stats[source] = make(map[string]int)
|
||||
stats[source]["parsed"] = 0
|
||||
stats[source]["reads"] = 0
|
||||
stats[source]["unparsed"] = 0
|
||||
stats[source]["hits"] = 0
|
||||
}
|
||||
stats[source]["reads"] += ival
|
||||
case "cs_parser_hits_ok_total":
|
||||
if _, ok := stats[source]; !ok {
|
||||
stats[source] = make(map[string]int)
|
||||
}
|
||||
stats[source]["parsed"] += ival
|
||||
case "cs_parser_hits_ko_total":
|
||||
if _, ok := stats[source]; !ok {
|
||||
stats[source] = make(map[string]int)
|
||||
}
|
||||
stats[source]["unparsed"] += ival
|
||||
case "cs_node_hits_total":
|
||||
if _, ok := stats[source]; !ok {
|
||||
stats[source] = make(map[string]int)
|
||||
}
|
||||
stats[source]["hits"] += ival
|
||||
case "cs_node_hits_ok_total":
|
||||
if _, ok := stats[source]; !ok {
|
||||
stats[source] = make(map[string]int)
|
||||
}
|
||||
stats[source]["parsed"] += ival
|
||||
case "cs_node_hits_ko_total":
|
||||
if _, ok := stats[source]; !ok {
|
||||
stats[source] = make(map[string]int)
|
||||
}
|
||||
stats[source]["unparsed"] += ival
|
||||
default:
|
||||
continue
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return stats
|
||||
}
|
||||
|
||||
func GetScenarioMetric(url string, itemName string) map[string]int {
|
||||
stats := make(map[string]int)
|
||||
|
||||
stats["instantiation"] = 0
|
||||
stats["curr_count"] = 0
|
||||
stats["overflow"] = 0
|
||||
stats["pour"] = 0
|
||||
stats["underflow"] = 0
|
||||
|
||||
result := GetPrometheusMetric(url)
|
||||
for idx, fam := range result {
|
||||
if !strings.HasPrefix(fam.Name, "cs_") {
|
||||
continue
|
||||
}
|
||||
|
||||
log.Tracef("round %d", idx)
|
||||
|
||||
for _, m := range fam.Metrics {
|
||||
metric, ok := m.(prom2json.Metric)
|
||||
if !ok {
|
||||
log.Debugf("failed to convert metric to prom2json.Metric")
|
||||
continue
|
||||
}
|
||||
|
||||
name, ok := metric.Labels["name"]
|
||||
|
||||
if !ok {
|
||||
log.Debugf("no name in Metric %v", metric.Labels)
|
||||
}
|
||||
|
||||
if name != itemName {
|
||||
continue
|
||||
}
|
||||
|
||||
value := m.(prom2json.Metric).Value
|
||||
|
||||
fval, err := strconv.ParseFloat(value, 32)
|
||||
if err != nil {
|
||||
log.Errorf("Unexpected int value %s : %s", value, err)
|
||||
continue
|
||||
}
|
||||
|
||||
ival := int(fval)
|
||||
|
||||
switch fam.Name {
|
||||
case "cs_bucket_created_total":
|
||||
stats["instantiation"] += ival
|
||||
case "cs_buckets":
|
||||
stats["curr_count"] += ival
|
||||
case "cs_bucket_overflowed_total":
|
||||
stats["overflow"] += ival
|
||||
case "cs_bucket_poured_total":
|
||||
stats["pour"] += ival
|
||||
case "cs_bucket_underflowed_total":
|
||||
stats["underflow"] += ival
|
||||
default:
|
||||
continue
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return stats
|
||||
}
|
||||
|
||||
func GetAppsecRuleMetric(url string, itemName string) map[string]int {
|
||||
stats := make(map[string]int)
|
||||
|
||||
stats["inband_hits"] = 0
|
||||
stats["outband_hits"] = 0
|
||||
|
||||
results := GetPrometheusMetric(url)
|
||||
for idx, fam := range results {
|
||||
if !strings.HasPrefix(fam.Name, "cs_") {
|
||||
continue
|
||||
}
|
||||
|
||||
log.Tracef("round %d", idx)
|
||||
|
||||
for _, m := range fam.Metrics {
|
||||
metric, ok := m.(prom2json.Metric)
|
||||
if !ok {
|
||||
log.Debugf("failed to convert metric to prom2json.Metric")
|
||||
continue
|
||||
}
|
||||
|
||||
name, ok := metric.Labels["rule_name"]
|
||||
|
||||
if !ok {
|
||||
log.Debugf("no rule_name in Metric %v", metric.Labels)
|
||||
}
|
||||
|
||||
if name != itemName {
|
||||
continue
|
||||
}
|
||||
|
||||
band, ok := metric.Labels["type"]
|
||||
if !ok {
|
||||
log.Debugf("no type in Metric %v", metric.Labels)
|
||||
}
|
||||
|
||||
value := m.(prom2json.Metric).Value
|
||||
|
||||
fval, err := strconv.ParseFloat(value, 32)
|
||||
if err != nil {
|
||||
log.Errorf("Unexpected int value %s : %s", value, err)
|
||||
continue
|
||||
}
|
||||
|
||||
ival := int(fval)
|
||||
|
||||
switch fam.Name {
|
||||
case "cs_appsec_rule_hits":
|
||||
switch band {
|
||||
case "inband":
|
||||
stats["inband_hits"] += ival
|
||||
case "outband":
|
||||
stats["outband_hits"] += ival
|
||||
default:
|
||||
continue
|
||||
}
|
||||
default:
|
||||
continue
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return stats
|
||||
}
|
||||
|
||||
func GetPrometheusMetric(url string) []*prom2json.Family {
|
||||
mfChan := make(chan *dto.MetricFamily, 1024)
|
||||
|
||||
// Start with the DefaultTransport for sane defaults.
|
||||
transport := http.DefaultTransport.(*http.Transport).Clone()
|
||||
// Conservatively disable HTTP keep-alives as this program will only
|
||||
// ever need a single HTTP request.
|
||||
transport.DisableKeepAlives = true
|
||||
// Timeout early if the server doesn't even return the headers.
|
||||
transport.ResponseHeaderTimeout = time.Minute
|
||||
|
||||
go func() {
|
||||
defer trace.CatchPanic("crowdsec/GetPrometheusMetric")
|
||||
|
||||
err := prom2json.FetchMetricFamilies(url, mfChan, transport)
|
||||
if err != nil {
|
||||
log.Fatalf("failed to fetch prometheus metrics : %v", err)
|
||||
}
|
||||
}()
|
||||
|
||||
result := []*prom2json.Family{}
|
||||
for mf := range mfChan {
|
||||
result = append(result, prom2json.NewFamily(mf))
|
||||
}
|
||||
|
||||
log.Debugf("Finished reading prometheus output, %d entries", len(result))
|
||||
|
||||
return result
|
||||
}
|
||||
|
||||
type unit struct {
|
||||
value int64
|
||||
symbol string
|
||||
}
|
||||
|
||||
var ranges = []unit{
|
||||
{value: 1e18, symbol: "E"},
|
||||
{value: 1e15, symbol: "P"},
|
||||
{value: 1e12, symbol: "T"},
|
||||
{value: 1e9, symbol: "G"},
|
||||
{value: 1e6, symbol: "M"},
|
||||
{value: 1e3, symbol: "k"},
|
||||
{value: 1, symbol: ""},
|
||||
}
|
||||
|
||||
func formatNumber(num int) string {
|
||||
goodUnit := unit{}
|
||||
|
||||
for _, u := range ranges {
|
||||
if int64(num) >= u.value {
|
||||
goodUnit = u
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
if goodUnit.value == 1 {
|
||||
return fmt.Sprintf("%d%s", num, goodUnit.symbol)
|
||||
}
|
||||
|
||||
res := math.Round(float64(num)/float64(goodUnit.value)*100) / 100
|
||||
|
||||
return fmt.Sprintf("%.2f%s", res, goodUnit.symbol)
|
||||
}
|
85
cmd/crowdsec-cli/item_suggest.go
Normal file
85
cmd/crowdsec-cli/item_suggest.go
Normal file
|
@ -0,0 +1,85 @@
|
|||
package main
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"slices"
|
||||
"strings"
|
||||
|
||||
"github.com/agext/levenshtein"
|
||||
"github.com/spf13/cobra"
|
||||
|
||||
"github.com/crowdsecurity/crowdsec/cmd/crowdsec-cli/require"
|
||||
"github.com/crowdsecurity/crowdsec/pkg/cwhub"
|
||||
)
|
||||
|
||||
// suggestNearestMessage returns a message with the most similar item name, if one is found
|
||||
func suggestNearestMessage(hub *cwhub.Hub, itemType string, itemName string) string {
|
||||
const maxDistance = 7
|
||||
|
||||
score := 100
|
||||
nearest := ""
|
||||
|
||||
for _, item := range hub.GetItemMap(itemType) {
|
||||
d := levenshtein.Distance(itemName, item.Name, nil)
|
||||
if d < score {
|
||||
score = d
|
||||
nearest = item.Name
|
||||
}
|
||||
}
|
||||
|
||||
msg := fmt.Sprintf("can't find '%s' in %s", itemName, itemType)
|
||||
|
||||
if score < maxDistance {
|
||||
msg += fmt.Sprintf(", did you mean '%s'?", nearest)
|
||||
}
|
||||
|
||||
return msg
|
||||
}
|
||||
|
||||
func compAllItems(itemType string, args []string, toComplete string) ([]string, cobra.ShellCompDirective) {
|
||||
hub, err := require.Hub(csConfig, nil, nil)
|
||||
if err != nil {
|
||||
return nil, cobra.ShellCompDirectiveDefault
|
||||
}
|
||||
|
||||
comp := make([]string, 0)
|
||||
|
||||
for _, item := range hub.GetItemMap(itemType) {
|
||||
if !slices.Contains(args, item.Name) && strings.Contains(item.Name, toComplete) {
|
||||
comp = append(comp, item.Name)
|
||||
}
|
||||
}
|
||||
|
||||
cobra.CompDebugln(fmt.Sprintf("%s: %+v", itemType, comp), true)
|
||||
|
||||
return comp, cobra.ShellCompDirectiveNoFileComp
|
||||
}
|
||||
|
||||
func compInstalledItems(itemType string, args []string, toComplete string) ([]string, cobra.ShellCompDirective) {
|
||||
hub, err := require.Hub(csConfig, nil, nil)
|
||||
if err != nil {
|
||||
return nil, cobra.ShellCompDirectiveDefault
|
||||
}
|
||||
|
||||
items, err := hub.GetInstalledNamesByType(itemType)
|
||||
if err != nil {
|
||||
cobra.CompDebugln(fmt.Sprintf("list installed %s err: %s", itemType, err), true)
|
||||
return nil, cobra.ShellCompDirectiveDefault
|
||||
}
|
||||
|
||||
comp := make([]string, 0)
|
||||
|
||||
if toComplete != "" {
|
||||
for _, item := range items {
|
||||
if strings.Contains(item, toComplete) {
|
||||
comp = append(comp, item)
|
||||
}
|
||||
}
|
||||
} else {
|
||||
comp = items
|
||||
}
|
||||
|
||||
cobra.CompDebugln(fmt.Sprintf("%s: %+v", itemType, comp), true)
|
||||
|
||||
return comp, cobra.ShellCompDirectiveNoFileComp
|
||||
}
|
546
cmd/crowdsec-cli/itemcli.go
Normal file
546
cmd/crowdsec-cli/itemcli.go
Normal file
|
@ -0,0 +1,546 @@
|
|||
package main
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"fmt"
|
||||
"os"
|
||||
"strings"
|
||||
|
||||
"github.com/fatih/color"
|
||||
"github.com/hexops/gotextdiff"
|
||||
"github.com/hexops/gotextdiff/myers"
|
||||
"github.com/hexops/gotextdiff/span"
|
||||
log "github.com/sirupsen/logrus"
|
||||
"github.com/spf13/cobra"
|
||||
|
||||
"github.com/crowdsecurity/go-cs-lib/coalesce"
|
||||
|
||||
"github.com/crowdsecurity/crowdsec/cmd/crowdsec-cli/require"
|
||||
"github.com/crowdsecurity/crowdsec/pkg/cwhub"
|
||||
)
|
||||
|
||||
type cliHelp struct {
|
||||
// Example is required, the others have a default value
|
||||
// generated from the item type
|
||||
use string
|
||||
short string
|
||||
long string
|
||||
example string
|
||||
}
|
||||
|
||||
type cliItem struct {
|
||||
cfg configGetter
|
||||
name string // plural, as used in the hub index
|
||||
singular string
|
||||
oneOrMore string // parenthetical pluralizaion: "parser(s)"
|
||||
help cliHelp
|
||||
installHelp cliHelp
|
||||
removeHelp cliHelp
|
||||
upgradeHelp cliHelp
|
||||
inspectHelp cliHelp
|
||||
inspectDetail func(item *cwhub.Item) error
|
||||
listHelp cliHelp
|
||||
}
|
||||
|
||||
func (cli cliItem) NewCommand() *cobra.Command {
|
||||
cmd := &cobra.Command{
|
||||
Use: coalesce.String(cli.help.use, fmt.Sprintf("%s <action> [item]...", cli.name)),
|
||||
Short: coalesce.String(cli.help.short, fmt.Sprintf("Manage hub %s", cli.name)),
|
||||
Long: cli.help.long,
|
||||
Example: cli.help.example,
|
||||
Args: cobra.MinimumNArgs(1),
|
||||
Aliases: []string{cli.singular},
|
||||
DisableAutoGenTag: true,
|
||||
}
|
||||
|
||||
cmd.AddCommand(cli.newInstallCmd())
|
||||
cmd.AddCommand(cli.newRemoveCmd())
|
||||
cmd.AddCommand(cli.newUpgradeCmd())
|
||||
cmd.AddCommand(cli.newInspectCmd())
|
||||
cmd.AddCommand(cli.newListCmd())
|
||||
|
||||
return cmd
|
||||
}
|
||||
|
||||
func (cli cliItem) install(args []string, downloadOnly bool, force bool, ignoreError bool) error {
|
||||
cfg := cli.cfg()
|
||||
|
||||
hub, err := require.Hub(cfg, require.RemoteHub(cfg), log.StandardLogger())
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
for _, name := range args {
|
||||
item := hub.GetItem(cli.name, name)
|
||||
if item == nil {
|
||||
msg := suggestNearestMessage(hub, cli.name, name)
|
||||
if !ignoreError {
|
||||
return errors.New(msg)
|
||||
}
|
||||
|
||||
log.Errorf(msg)
|
||||
|
||||
continue
|
||||
}
|
||||
|
||||
if err := item.Install(force, downloadOnly); err != nil {
|
||||
if !ignoreError {
|
||||
return fmt.Errorf("error while installing '%s': %w", item.Name, err)
|
||||
}
|
||||
|
||||
log.Errorf("Error while installing '%s': %s", item.Name, err)
|
||||
}
|
||||
}
|
||||
|
||||
log.Infof(ReloadMessage())
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (cli cliItem) newInstallCmd() *cobra.Command {
|
||||
var (
|
||||
downloadOnly bool
|
||||
force bool
|
||||
ignoreError bool
|
||||
)
|
||||
|
||||
cmd := &cobra.Command{
|
||||
Use: coalesce.String(cli.installHelp.use, "install [item]..."),
|
||||
Short: coalesce.String(cli.installHelp.short, fmt.Sprintf("Install given %s", cli.oneOrMore)),
|
||||
Long: coalesce.String(cli.installHelp.long, fmt.Sprintf("Fetch and install one or more %s from the hub", cli.name)),
|
||||
Example: cli.installHelp.example,
|
||||
Args: cobra.MinimumNArgs(1),
|
||||
DisableAutoGenTag: true,
|
||||
ValidArgsFunction: func(_ *cobra.Command, args []string, toComplete string) ([]string, cobra.ShellCompDirective) {
|
||||
return compAllItems(cli.name, args, toComplete)
|
||||
},
|
||||
RunE: func(_ *cobra.Command, args []string) error {
|
||||
return cli.install(args, downloadOnly, force, ignoreError)
|
||||
},
|
||||
}
|
||||
|
||||
flags := cmd.Flags()
|
||||
flags.BoolVarP(&downloadOnly, "download-only", "d", false, "Only download packages, don't enable")
|
||||
flags.BoolVar(&force, "force", false, "Force install: overwrite tainted and outdated files")
|
||||
flags.BoolVar(&ignoreError, "ignore", false, fmt.Sprintf("Ignore errors when installing multiple %s", cli.name))
|
||||
|
||||
return cmd
|
||||
}
|
||||
|
||||
// return the names of the installed parents of an item, used to check if we can remove it
|
||||
func istalledParentNames(item *cwhub.Item) []string {
|
||||
ret := make([]string, 0)
|
||||
|
||||
for _, parent := range item.Ancestors() {
|
||||
if parent.State.Installed {
|
||||
ret = append(ret, parent.Name)
|
||||
}
|
||||
}
|
||||
|
||||
return ret
|
||||
}
|
||||
|
||||
func (cli cliItem) remove(args []string, purge bool, force bool, all bool) error {
|
||||
hub, err := require.Hub(cli.cfg(), nil, log.StandardLogger())
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if all {
|
||||
getter := hub.GetInstalledItemsByType
|
||||
if purge {
|
||||
getter = hub.GetItemsByType
|
||||
}
|
||||
|
||||
items, err := getter(cli.name)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
removed := 0
|
||||
|
||||
for _, item := range items {
|
||||
didRemove, err := item.Remove(purge, force)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if didRemove {
|
||||
log.Infof("Removed %s", item.Name)
|
||||
|
||||
removed++
|
||||
}
|
||||
}
|
||||
|
||||
log.Infof("Removed %d %s", removed, cli.name)
|
||||
|
||||
if removed > 0 {
|
||||
log.Infof(ReloadMessage())
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
if len(args) == 0 {
|
||||
return fmt.Errorf("specify at least one %s to remove or '--all'", cli.singular)
|
||||
}
|
||||
|
||||
removed := 0
|
||||
|
||||
for _, itemName := range args {
|
||||
item := hub.GetItem(cli.name, itemName)
|
||||
if item == nil {
|
||||
return fmt.Errorf("can't find '%s' in %s", itemName, cli.name)
|
||||
}
|
||||
|
||||
parents := istalledParentNames(item)
|
||||
|
||||
if !force && len(parents) > 0 {
|
||||
log.Warningf("%s belongs to collections: %s", item.Name, parents)
|
||||
log.Warningf("Run 'sudo cscli %s remove %s --force' if you want to force remove this %s", item.Type, item.Name, cli.singular)
|
||||
|
||||
continue
|
||||
}
|
||||
|
||||
didRemove, err := item.Remove(purge, force)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if didRemove {
|
||||
log.Infof("Removed %s", item.Name)
|
||||
|
||||
removed++
|
||||
}
|
||||
}
|
||||
|
||||
log.Infof("Removed %d %s", removed, cli.name)
|
||||
|
||||
if removed > 0 {
|
||||
log.Infof(ReloadMessage())
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (cli cliItem) newRemoveCmd() *cobra.Command {
|
||||
var (
|
||||
purge bool
|
||||
force bool
|
||||
all bool
|
||||
)
|
||||
|
||||
cmd := &cobra.Command{
|
||||
Use: coalesce.String(cli.removeHelp.use, "remove [item]..."),
|
||||
Short: coalesce.String(cli.removeHelp.short, fmt.Sprintf("Remove given %s", cli.oneOrMore)),
|
||||
Long: coalesce.String(cli.removeHelp.long, fmt.Sprintf("Remove one or more %s", cli.name)),
|
||||
Example: cli.removeHelp.example,
|
||||
Aliases: []string{"delete"},
|
||||
DisableAutoGenTag: true,
|
||||
ValidArgsFunction: func(_ *cobra.Command, args []string, toComplete string) ([]string, cobra.ShellCompDirective) {
|
||||
return compInstalledItems(cli.name, args, toComplete)
|
||||
},
|
||||
RunE: func(_ *cobra.Command, args []string) error {
|
||||
return cli.remove(args, purge, force, all)
|
||||
},
|
||||
}
|
||||
|
||||
flags := cmd.Flags()
|
||||
flags.BoolVar(&purge, "purge", false, "Delete source file too")
|
||||
flags.BoolVar(&force, "force", false, "Force remove: remove tainted and outdated files")
|
||||
flags.BoolVar(&all, "all", false, fmt.Sprintf("Remove all the %s", cli.name))
|
||||
|
||||
return cmd
|
||||
}
|
||||
|
||||
func (cli cliItem) upgrade(args []string, force bool, all bool) error {
|
||||
cfg := cli.cfg()
|
||||
|
||||
hub, err := require.Hub(cfg, require.RemoteHub(cfg), log.StandardLogger())
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if all {
|
||||
items, err := hub.GetInstalledItemsByType(cli.name)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
updated := 0
|
||||
|
||||
for _, item := range items {
|
||||
didUpdate, err := item.Upgrade(force)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if didUpdate {
|
||||
updated++
|
||||
}
|
||||
}
|
||||
|
||||
log.Infof("Updated %d %s", updated, cli.name)
|
||||
|
||||
if updated > 0 {
|
||||
log.Infof(ReloadMessage())
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
if len(args) == 0 {
|
||||
return fmt.Errorf("specify at least one %s to upgrade or '--all'", cli.singular)
|
||||
}
|
||||
|
||||
updated := 0
|
||||
|
||||
for _, itemName := range args {
|
||||
item := hub.GetItem(cli.name, itemName)
|
||||
if item == nil {
|
||||
return fmt.Errorf("can't find '%s' in %s", itemName, cli.name)
|
||||
}
|
||||
|
||||
didUpdate, err := item.Upgrade(force)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if didUpdate {
|
||||
log.Infof("Updated %s", item.Name)
|
||||
|
||||
updated++
|
||||
}
|
||||
}
|
||||
|
||||
if updated > 0 {
|
||||
log.Infof(ReloadMessage())
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (cli cliItem) newUpgradeCmd() *cobra.Command {
|
||||
var (
|
||||
all bool
|
||||
force bool
|
||||
)
|
||||
|
||||
cmd := &cobra.Command{
|
||||
Use: coalesce.String(cli.upgradeHelp.use, "upgrade [item]..."),
|
||||
Short: coalesce.String(cli.upgradeHelp.short, fmt.Sprintf("Upgrade given %s", cli.oneOrMore)),
|
||||
Long: coalesce.String(cli.upgradeHelp.long, fmt.Sprintf("Fetch and upgrade one or more %s from the hub", cli.name)),
|
||||
Example: cli.upgradeHelp.example,
|
||||
DisableAutoGenTag: true,
|
||||
ValidArgsFunction: func(_ *cobra.Command, args []string, toComplete string) ([]string, cobra.ShellCompDirective) {
|
||||
return compInstalledItems(cli.name, args, toComplete)
|
||||
},
|
||||
RunE: func(_ *cobra.Command, args []string) error {
|
||||
return cli.upgrade(args, force, all)
|
||||
},
|
||||
}
|
||||
|
||||
flags := cmd.Flags()
|
||||
flags.BoolVarP(&all, "all", "a", false, fmt.Sprintf("Upgrade all the %s", cli.name))
|
||||
flags.BoolVar(&force, "force", false, "Force upgrade: overwrite tainted and outdated files")
|
||||
|
||||
return cmd
|
||||
}
|
||||
|
||||
func (cli cliItem) inspect(args []string, url string, diff bool, rev bool, noMetrics bool) error {
|
||||
cfg := cli.cfg()
|
||||
|
||||
if rev && !diff {
|
||||
return errors.New("--rev can only be used with --diff")
|
||||
}
|
||||
|
||||
if url != "" {
|
||||
cfg.Cscli.PrometheusUrl = url
|
||||
}
|
||||
|
||||
remote := (*cwhub.RemoteHubCfg)(nil)
|
||||
|
||||
if diff {
|
||||
remote = require.RemoteHub(cfg)
|
||||
}
|
||||
|
||||
hub, err := require.Hub(cfg, remote, log.StandardLogger())
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
for _, name := range args {
|
||||
item := hub.GetItem(cli.name, name)
|
||||
if item == nil {
|
||||
return fmt.Errorf("can't find '%s' in %s", name, cli.name)
|
||||
}
|
||||
|
||||
if diff {
|
||||
fmt.Println(cli.whyTainted(hub, item, rev))
|
||||
|
||||
continue
|
||||
}
|
||||
|
||||
if err = inspectItem(item, !noMetrics); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if cli.inspectDetail != nil {
|
||||
if err = cli.inspectDetail(item); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (cli cliItem) newInspectCmd() *cobra.Command {
|
||||
var (
|
||||
url string
|
||||
diff bool
|
||||
rev bool
|
||||
noMetrics bool
|
||||
)
|
||||
|
||||
cmd := &cobra.Command{
|
||||
Use: coalesce.String(cli.inspectHelp.use, "inspect [item]..."),
|
||||
Short: coalesce.String(cli.inspectHelp.short, fmt.Sprintf("Inspect given %s", cli.oneOrMore)),
|
||||
Long: coalesce.String(cli.inspectHelp.long, fmt.Sprintf("Inspect the state of one or more %s", cli.name)),
|
||||
Example: cli.inspectHelp.example,
|
||||
Args: cobra.MinimumNArgs(1),
|
||||
DisableAutoGenTag: true,
|
||||
ValidArgsFunction: func(_ *cobra.Command, args []string, toComplete string) ([]string, cobra.ShellCompDirective) {
|
||||
return compInstalledItems(cli.name, args, toComplete)
|
||||
},
|
||||
RunE: func(_ *cobra.Command, args []string) error {
|
||||
return cli.inspect(args, url, diff, rev, noMetrics)
|
||||
},
|
||||
}
|
||||
|
||||
flags := cmd.Flags()
|
||||
flags.StringVarP(&url, "url", "u", "", "Prometheus url")
|
||||
flags.BoolVar(&diff, "diff", false, "Show diff with latest version (for tainted items)")
|
||||
flags.BoolVar(&rev, "rev", false, "Reverse diff output")
|
||||
flags.BoolVar(&noMetrics, "no-metrics", false, "Don't show metrics (when cscli.output=human)")
|
||||
|
||||
return cmd
|
||||
}
|
||||
|
||||
func (cli cliItem) list(args []string, all bool) error {
|
||||
hub, err := require.Hub(cli.cfg(), nil, log.StandardLogger())
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
items := make(map[string][]*cwhub.Item)
|
||||
|
||||
items[cli.name], err = selectItems(hub, cli.name, args, !all)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if err = listItems(color.Output, []string{cli.name}, items, false); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (cli cliItem) newListCmd() *cobra.Command {
|
||||
var all bool
|
||||
|
||||
cmd := &cobra.Command{
|
||||
Use: coalesce.String(cli.listHelp.use, "list [item... | -a]"),
|
||||
Short: coalesce.String(cli.listHelp.short, fmt.Sprintf("List %s", cli.oneOrMore)),
|
||||
Long: coalesce.String(cli.listHelp.long, fmt.Sprintf("List of installed/available/specified %s", cli.name)),
|
||||
Example: cli.listHelp.example,
|
||||
DisableAutoGenTag: true,
|
||||
RunE: func(_ *cobra.Command, args []string) error {
|
||||
return cli.list(args, all)
|
||||
},
|
||||
}
|
||||
|
||||
flags := cmd.Flags()
|
||||
flags.BoolVarP(&all, "all", "a", false, "List disabled items as well")
|
||||
|
||||
return cmd
|
||||
}
|
||||
|
||||
// return the diff between the installed version and the latest version
|
||||
func (cli cliItem) itemDiff(item *cwhub.Item, reverse bool) (string, error) {
|
||||
if !item.State.Installed {
|
||||
return "", fmt.Errorf("'%s' is not installed", item.FQName())
|
||||
}
|
||||
|
||||
latestContent, remoteURL, err := item.FetchLatest()
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
localContent, err := os.ReadFile(item.State.LocalPath)
|
||||
if err != nil {
|
||||
return "", fmt.Errorf("while reading %s: %w", item.State.LocalPath, err)
|
||||
}
|
||||
|
||||
file1 := item.State.LocalPath
|
||||
file2 := remoteURL
|
||||
content1 := string(localContent)
|
||||
content2 := string(latestContent)
|
||||
|
||||
if reverse {
|
||||
file1, file2 = file2, file1
|
||||
content1, content2 = content2, content1
|
||||
}
|
||||
|
||||
edits := myers.ComputeEdits(span.URIFromPath(file1), content1, content2)
|
||||
diff := gotextdiff.ToUnified(file1, file2, content1, edits)
|
||||
|
||||
return fmt.Sprintf("%s", diff), nil
|
||||
}
|
||||
|
||||
func (cli cliItem) whyTainted(hub *cwhub.Hub, item *cwhub.Item, reverse bool) string {
|
||||
if !item.State.Installed {
|
||||
return fmt.Sprintf("# %s is not installed", item.FQName())
|
||||
}
|
||||
|
||||
if !item.State.Tainted {
|
||||
return fmt.Sprintf("# %s is not tainted", item.FQName())
|
||||
}
|
||||
|
||||
if len(item.State.TaintedBy) == 0 {
|
||||
return fmt.Sprintf("# %s is tainted but we don't know why. please report this as a bug", item.FQName())
|
||||
}
|
||||
|
||||
ret := []string{
|
||||
fmt.Sprintf("# Let's see why %s is tainted.", item.FQName()),
|
||||
}
|
||||
|
||||
for _, fqsub := range item.State.TaintedBy {
|
||||
ret = append(ret, fmt.Sprintf("\n-> %s\n", fqsub))
|
||||
|
||||
sub, err := hub.GetItemFQ(fqsub)
|
||||
if err != nil {
|
||||
ret = append(ret, err.Error())
|
||||
}
|
||||
|
||||
diff, err := cli.itemDiff(sub, reverse)
|
||||
if err != nil {
|
||||
ret = append(ret, err.Error())
|
||||
}
|
||||
|
||||
if diff != "" {
|
||||
ret = append(ret, diff)
|
||||
} else if len(sub.State.TaintedBy) > 0 {
|
||||
taintList := strings.Join(sub.State.TaintedBy, ", ")
|
||||
if sub.FQName() == taintList {
|
||||
// hack: avoid message "item is tainted by itself"
|
||||
continue
|
||||
}
|
||||
|
||||
ret = append(ret, fmt.Sprintf("# %s is tainted by %s", sub.FQName(), taintList))
|
||||
}
|
||||
}
|
||||
|
||||
return strings.Join(ret, "\n")
|
||||
}
|
183
cmd/crowdsec-cli/items.go
Normal file
183
cmd/crowdsec-cli/items.go
Normal file
|
@ -0,0 +1,183 @@
|
|||
package main
|
||||
|
||||
import (
|
||||
"encoding/csv"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"io"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"slices"
|
||||
"strings"
|
||||
|
||||
"gopkg.in/yaml.v3"
|
||||
|
||||
"github.com/crowdsecurity/crowdsec/pkg/cwhub"
|
||||
)
|
||||
|
||||
// selectItems returns a slice of items of a given type, selected by name and sorted by case-insensitive name
|
||||
func selectItems(hub *cwhub.Hub, itemType string, args []string, installedOnly bool) ([]*cwhub.Item, error) {
|
||||
itemNames := hub.GetNamesByType(itemType)
|
||||
|
||||
notExist := []string{}
|
||||
|
||||
if len(args) > 0 {
|
||||
for _, arg := range args {
|
||||
if !slices.Contains(itemNames, arg) {
|
||||
notExist = append(notExist, arg)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if len(notExist) > 0 {
|
||||
return nil, fmt.Errorf("item(s) '%s' not found in %s", strings.Join(notExist, ", "), itemType)
|
||||
}
|
||||
|
||||
if len(args) > 0 {
|
||||
itemNames = args
|
||||
installedOnly = false
|
||||
}
|
||||
|
||||
items := make([]*cwhub.Item, 0, len(itemNames))
|
||||
|
||||
for _, itemName := range itemNames {
|
||||
item := hub.GetItem(itemType, itemName)
|
||||
if installedOnly && !item.State.Installed {
|
||||
continue
|
||||
}
|
||||
|
||||
items = append(items, item)
|
||||
}
|
||||
|
||||
cwhub.SortItemSlice(items)
|
||||
|
||||
return items, nil
|
||||
}
|
||||
|
||||
func listItems(out io.Writer, itemTypes []string, items map[string][]*cwhub.Item, omitIfEmpty bool) error {
|
||||
switch csConfig.Cscli.Output {
|
||||
case "human":
|
||||
nothingToDisplay := true
|
||||
|
||||
for _, itemType := range itemTypes {
|
||||
if omitIfEmpty && len(items[itemType]) == 0 {
|
||||
continue
|
||||
}
|
||||
|
||||
listHubItemTable(out, "\n"+strings.ToUpper(itemType), items[itemType])
|
||||
|
||||
nothingToDisplay = false
|
||||
}
|
||||
|
||||
if nothingToDisplay {
|
||||
fmt.Println("No items to display")
|
||||
}
|
||||
case "json":
|
||||
type itemHubStatus struct {
|
||||
Name string `json:"name"`
|
||||
LocalVersion string `json:"local_version"`
|
||||
LocalPath string `json:"local_path"`
|
||||
Description string `json:"description"`
|
||||
UTF8Status string `json:"utf8_status"`
|
||||
Status string `json:"status"`
|
||||
}
|
||||
|
||||
hubStatus := make(map[string][]itemHubStatus)
|
||||
for _, itemType := range itemTypes {
|
||||
// empty slice in case there are no items of this type
|
||||
hubStatus[itemType] = make([]itemHubStatus, len(items[itemType]))
|
||||
|
||||
for i, item := range items[itemType] {
|
||||
status := item.State.Text()
|
||||
statusEmo := item.State.Emoji()
|
||||
hubStatus[itemType][i] = itemHubStatus{
|
||||
Name: item.Name,
|
||||
LocalVersion: item.State.LocalVersion,
|
||||
LocalPath: item.State.LocalPath,
|
||||
Description: item.Description,
|
||||
Status: status,
|
||||
UTF8Status: fmt.Sprintf("%v %s", statusEmo, status),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
x, err := json.MarshalIndent(hubStatus, "", " ")
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to unmarshal: %w", err)
|
||||
}
|
||||
|
||||
out.Write(x)
|
||||
case "raw":
|
||||
csvwriter := csv.NewWriter(out)
|
||||
|
||||
header := []string{"name", "status", "version", "description"}
|
||||
if len(itemTypes) > 1 {
|
||||
header = append(header, "type")
|
||||
}
|
||||
|
||||
if err := csvwriter.Write(header); err != nil {
|
||||
return fmt.Errorf("failed to write header: %w", err)
|
||||
}
|
||||
|
||||
for _, itemType := range itemTypes {
|
||||
for _, item := range items[itemType] {
|
||||
row := []string{
|
||||
item.Name,
|
||||
item.State.Text(),
|
||||
item.State.LocalVersion,
|
||||
item.Description,
|
||||
}
|
||||
if len(itemTypes) > 1 {
|
||||
row = append(row, itemType)
|
||||
}
|
||||
|
||||
if err := csvwriter.Write(row); err != nil {
|
||||
return fmt.Errorf("failed to write raw output: %w", err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
csvwriter.Flush()
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func inspectItem(item *cwhub.Item, showMetrics bool) error {
|
||||
switch csConfig.Cscli.Output {
|
||||
case "human", "raw":
|
||||
enc := yaml.NewEncoder(os.Stdout)
|
||||
enc.SetIndent(2)
|
||||
|
||||
if err := enc.Encode(item); err != nil {
|
||||
return fmt.Errorf("unable to encode item: %w", err)
|
||||
}
|
||||
case "json":
|
||||
b, err := json.MarshalIndent(*item, "", " ")
|
||||
if err != nil {
|
||||
return fmt.Errorf("unable to marshal item: %w", err)
|
||||
}
|
||||
|
||||
fmt.Print(string(b))
|
||||
}
|
||||
|
||||
if csConfig.Cscli.Output != "human" {
|
||||
return nil
|
||||
}
|
||||
|
||||
if item.State.Tainted {
|
||||
fmt.Println()
|
||||
fmt.Printf(`This item is tainted. Use "%s %s inspect --diff %s" to see why.`, filepath.Base(os.Args[0]), item.Type, item.Name)
|
||||
fmt.Println()
|
||||
}
|
||||
|
||||
if showMetrics {
|
||||
fmt.Printf("\nCurrent metrics: \n")
|
||||
|
||||
if err := ShowMetrics(item); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
|
@ -2,177 +2,631 @@ package main
|
|||
|
||||
import (
|
||||
"context"
|
||||
"errors"
|
||||
"fmt"
|
||||
"io/ioutil"
|
||||
"net/url"
|
||||
"os"
|
||||
"slices"
|
||||
"sort"
|
||||
"strings"
|
||||
|
||||
"github.com/crowdsecurity/crowdsec/pkg/apiclient"
|
||||
"github.com/crowdsecurity/crowdsec/pkg/csconfig"
|
||||
"github.com/crowdsecurity/crowdsec/pkg/cwhub"
|
||||
"github.com/crowdsecurity/crowdsec/pkg/cwversion"
|
||||
"github.com/crowdsecurity/crowdsec/pkg/models"
|
||||
"github.com/go-openapi/strfmt"
|
||||
log "github.com/sirupsen/logrus"
|
||||
"github.com/spf13/cobra"
|
||||
"gopkg.in/yaml.v2"
|
||||
"gopkg.in/yaml.v3"
|
||||
|
||||
"github.com/crowdsecurity/go-cs-lib/version"
|
||||
|
||||
"github.com/crowdsecurity/crowdsec/cmd/crowdsec-cli/require"
|
||||
"github.com/crowdsecurity/crowdsec/pkg/alertcontext"
|
||||
"github.com/crowdsecurity/crowdsec/pkg/apiclient"
|
||||
"github.com/crowdsecurity/crowdsec/pkg/csconfig"
|
||||
"github.com/crowdsecurity/crowdsec/pkg/cwhub"
|
||||
"github.com/crowdsecurity/crowdsec/pkg/exprhelpers"
|
||||
"github.com/crowdsecurity/crowdsec/pkg/models"
|
||||
"github.com/crowdsecurity/crowdsec/pkg/parser"
|
||||
)
|
||||
|
||||
var LAPIURLPrefix string = "v1"
|
||||
var lapiUser string
|
||||
const LAPIURLPrefix = "v1"
|
||||
|
||||
func NewLapiCmd() *cobra.Command {
|
||||
var cmdLapi = &cobra.Command{
|
||||
type cliLapi struct {
|
||||
cfg configGetter
|
||||
}
|
||||
|
||||
func NewCLILapi(cfg configGetter) *cliLapi {
|
||||
return &cliLapi{
|
||||
cfg: cfg,
|
||||
}
|
||||
}
|
||||
|
||||
func (cli *cliLapi) status() error {
|
||||
cfg := cli.cfg()
|
||||
password := strfmt.Password(cfg.API.Client.Credentials.Password)
|
||||
login := cfg.API.Client.Credentials.Login
|
||||
|
||||
origURL := cfg.API.Client.Credentials.URL
|
||||
|
||||
apiURL, err := url.Parse(origURL)
|
||||
if err != nil {
|
||||
return fmt.Errorf("parsing api url: %w", err)
|
||||
}
|
||||
|
||||
hub, err := require.Hub(cfg, nil, nil)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
scenarios, err := hub.GetInstalledNamesByType(cwhub.SCENARIOS)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to get scenarios: %w", err)
|
||||
}
|
||||
|
||||
Client, err = apiclient.NewDefaultClient(apiURL,
|
||||
LAPIURLPrefix,
|
||||
fmt.Sprintf("crowdsec/%s", version.String()),
|
||||
nil)
|
||||
if err != nil {
|
||||
return fmt.Errorf("init default client: %w", err)
|
||||
}
|
||||
|
||||
t := models.WatcherAuthRequest{
|
||||
MachineID: &login,
|
||||
Password: &password,
|
||||
Scenarios: scenarios,
|
||||
}
|
||||
|
||||
log.Infof("Loaded credentials from %s", cfg.API.Client.CredentialsFilePath)
|
||||
// use the original string because apiURL would print 'http://unix/'
|
||||
log.Infof("Trying to authenticate with username %s on %s", login, origURL)
|
||||
|
||||
_, _, err = Client.Auth.AuthenticateWatcher(context.Background(), t)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to authenticate to Local API (LAPI): %w", err)
|
||||
}
|
||||
|
||||
log.Infof("You can successfully interact with Local API (LAPI)")
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (cli *cliLapi) register(apiURL string, outputFile string, machine string) error {
|
||||
var err error
|
||||
|
||||
lapiUser := machine
|
||||
cfg := cli.cfg()
|
||||
|
||||
if lapiUser == "" {
|
||||
lapiUser, err = generateID("")
|
||||
if err != nil {
|
||||
return fmt.Errorf("unable to generate machine id: %w", err)
|
||||
}
|
||||
}
|
||||
|
||||
password := strfmt.Password(generatePassword(passwordLength))
|
||||
|
||||
apiurl, err := prepareAPIURL(cfg.API.Client, apiURL)
|
||||
if err != nil {
|
||||
return fmt.Errorf("parsing api url: %w", err)
|
||||
}
|
||||
|
||||
_, err = apiclient.RegisterClient(&apiclient.Config{
|
||||
MachineID: lapiUser,
|
||||
Password: password,
|
||||
UserAgent: fmt.Sprintf("crowdsec/%s", version.String()),
|
||||
URL: apiurl,
|
||||
VersionPrefix: LAPIURLPrefix,
|
||||
}, nil)
|
||||
if err != nil {
|
||||
return fmt.Errorf("api client register: %w", err)
|
||||
}
|
||||
|
||||
log.Printf("Successfully registered to Local API (LAPI)")
|
||||
|
||||
var dumpFile string
|
||||
|
||||
if outputFile != "" {
|
||||
dumpFile = outputFile
|
||||
} else if cfg.API.Client.CredentialsFilePath != "" {
|
||||
dumpFile = cfg.API.Client.CredentialsFilePath
|
||||
} else {
|
||||
dumpFile = ""
|
||||
}
|
||||
|
||||
apiCfg := csconfig.ApiCredentialsCfg{
|
||||
Login: lapiUser,
|
||||
Password: password.String(),
|
||||
URL: apiURL,
|
||||
}
|
||||
|
||||
apiConfigDump, err := yaml.Marshal(apiCfg)
|
||||
if err != nil {
|
||||
return fmt.Errorf("unable to marshal api credentials: %w", err)
|
||||
}
|
||||
|
||||
if dumpFile != "" {
|
||||
err = os.WriteFile(dumpFile, apiConfigDump, 0o600)
|
||||
if err != nil {
|
||||
return fmt.Errorf("write api credentials to '%s' failed: %w", dumpFile, err)
|
||||
}
|
||||
|
||||
log.Printf("Local API credentials written to '%s'", dumpFile)
|
||||
} else {
|
||||
fmt.Printf("%s\n", string(apiConfigDump))
|
||||
}
|
||||
|
||||
log.Warning(ReloadMessage())
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// prepareAPIURL checks/fixes a LAPI connection url (http, https or socket) and returns an URL struct
|
||||
func prepareAPIURL(clientCfg *csconfig.LocalApiClientCfg, apiURL string) (*url.URL, error) {
|
||||
if apiURL == "" {
|
||||
if clientCfg == nil || clientCfg.Credentials == nil || clientCfg.Credentials.URL == "" {
|
||||
return nil, errors.New("no Local API URL. Please provide it in your configuration or with the -u parameter")
|
||||
}
|
||||
|
||||
apiURL = clientCfg.Credentials.URL
|
||||
}
|
||||
|
||||
// URL needs to end with /, but user doesn't care
|
||||
if !strings.HasSuffix(apiURL, "/") {
|
||||
apiURL += "/"
|
||||
}
|
||||
|
||||
// URL needs to start with http://, but user doesn't care
|
||||
if !strings.HasPrefix(apiURL, "http://") && !strings.HasPrefix(apiURL, "https://") && !strings.HasPrefix(apiURL, "/") {
|
||||
apiURL = "http://" + apiURL
|
||||
}
|
||||
|
||||
return url.Parse(apiURL)
|
||||
}
|
||||
|
||||
func (cli *cliLapi) newStatusCmd() *cobra.Command {
|
||||
cmdLapiStatus := &cobra.Command{
|
||||
Use: "status",
|
||||
Short: "Check authentication to Local API (LAPI)",
|
||||
Args: cobra.MinimumNArgs(0),
|
||||
DisableAutoGenTag: true,
|
||||
RunE: func(_ *cobra.Command, _ []string) error {
|
||||
return cli.status()
|
||||
},
|
||||
}
|
||||
|
||||
return cmdLapiStatus
|
||||
}
|
||||
|
||||
func (cli *cliLapi) newRegisterCmd() *cobra.Command {
|
||||
var (
|
||||
apiURL string
|
||||
outputFile string
|
||||
machine string
|
||||
)
|
||||
|
||||
cmd := &cobra.Command{
|
||||
Use: "register",
|
||||
Short: "Register a machine to Local API (LAPI)",
|
||||
Long: `Register your machine to the Local API (LAPI).
|
||||
Keep in mind the machine needs to be validated by an administrator on LAPI side to be effective.`,
|
||||
Args: cobra.MinimumNArgs(0),
|
||||
DisableAutoGenTag: true,
|
||||
RunE: func(_ *cobra.Command, _ []string) error {
|
||||
return cli.register(apiURL, outputFile, machine)
|
||||
},
|
||||
}
|
||||
|
||||
flags := cmd.Flags()
|
||||
flags.StringVarP(&apiURL, "url", "u", "", "URL of the API (ie. http://127.0.0.1)")
|
||||
flags.StringVarP(&outputFile, "file", "f", "", "output file destination")
|
||||
flags.StringVar(&machine, "machine", "", "Name of the machine to register with")
|
||||
|
||||
return cmd
|
||||
}
|
||||
|
||||
func (cli *cliLapi) NewCommand() *cobra.Command {
|
||||
cmd := &cobra.Command{
|
||||
Use: "lapi [action]",
|
||||
Short: "Manage interaction with Local API (LAPI)",
|
||||
Args: cobra.MinimumNArgs(1),
|
||||
DisableAutoGenTag: true,
|
||||
PersistentPreRunE: func(cmd *cobra.Command, args []string) error {
|
||||
if err := csConfig.LoadAPIClient(); err != nil {
|
||||
return fmt.Errorf("loading api client: %s", err.Error())
|
||||
}
|
||||
if csConfig.API.Client == nil {
|
||||
log.Fatalln("There is no API->client configuration")
|
||||
}
|
||||
if csConfig.API.Client.Credentials == nil {
|
||||
log.Fatalf("no configuration for Local API (LAPI) in '%s'", *csConfig.FilePath)
|
||||
PersistentPreRunE: func(_ *cobra.Command, _ []string) error {
|
||||
if err := cli.cfg().LoadAPIClient(); err != nil {
|
||||
return fmt.Errorf("loading api client: %w", err)
|
||||
}
|
||||
return nil
|
||||
},
|
||||
}
|
||||
|
||||
var cmdLapiRegister = &cobra.Command{
|
||||
Use: "register",
|
||||
Short: "Register a machine to Local API (LAPI)",
|
||||
Long: `Register you machine to the Local API (LAPI).
|
||||
Keep in mind the machine needs to be validated by an administrator on LAPI side to be effective.`,
|
||||
Args: cobra.MinimumNArgs(0),
|
||||
DisableAutoGenTag: true,
|
||||
Run: func(cmd *cobra.Command, args []string) {
|
||||
var err error
|
||||
if lapiUser == "" {
|
||||
lapiUser, err = generateID("")
|
||||
if err != nil {
|
||||
log.Fatalf("unable to generate machine id: %s", err)
|
||||
}
|
||||
}
|
||||
password := strfmt.Password(generatePassword(passwordLength))
|
||||
if apiURL == "" {
|
||||
if csConfig.API.Client != nil && csConfig.API.Client.Credentials != nil && csConfig.API.Client.Credentials.URL != "" {
|
||||
apiURL = csConfig.API.Client.Credentials.URL
|
||||
} else {
|
||||
log.Fatalf("No Local API URL. Please provide it in your configuration or with the -u parameter")
|
||||
}
|
||||
}
|
||||
/*URL needs to end with /, but user doesn't care*/
|
||||
if !strings.HasSuffix(apiURL, "/") {
|
||||
apiURL += "/"
|
||||
}
|
||||
/*URL needs to start with http://, but user doesn't care*/
|
||||
if !strings.HasPrefix(apiURL, "http://") && !strings.HasPrefix(apiURL, "https://") {
|
||||
apiURL = "http://" + apiURL
|
||||
}
|
||||
apiurl, err := url.Parse(apiURL)
|
||||
if err != nil {
|
||||
log.Fatalf("parsing api url: %s", err)
|
||||
}
|
||||
_, err = apiclient.RegisterClient(&apiclient.Config{
|
||||
MachineID: lapiUser,
|
||||
Password: password,
|
||||
UserAgent: fmt.Sprintf("crowdsec/%s", cwversion.VersionStr()),
|
||||
URL: apiurl,
|
||||
VersionPrefix: LAPIURLPrefix,
|
||||
}, nil)
|
||||
cmd.AddCommand(cli.newRegisterCmd())
|
||||
cmd.AddCommand(cli.newStatusCmd())
|
||||
cmd.AddCommand(cli.newContextCmd())
|
||||
|
||||
if err != nil {
|
||||
log.Fatalf("api client register: %s", err)
|
||||
}
|
||||
|
||||
log.Printf("Successfully registered to Local API (LAPI)")
|
||||
|
||||
var dumpFile string
|
||||
if outputFile != "" {
|
||||
dumpFile = outputFile
|
||||
} else if csConfig.API.Client.CredentialsFilePath != "" {
|
||||
dumpFile = csConfig.API.Client.CredentialsFilePath
|
||||
} else {
|
||||
dumpFile = ""
|
||||
}
|
||||
apiCfg := csconfig.ApiCredentialsCfg{
|
||||
Login: lapiUser,
|
||||
Password: password.String(),
|
||||
URL: apiURL,
|
||||
}
|
||||
apiConfigDump, err := yaml.Marshal(apiCfg)
|
||||
if err != nil {
|
||||
log.Fatalf("unable to marshal api credentials: %s", err)
|
||||
}
|
||||
if dumpFile != "" {
|
||||
err = ioutil.WriteFile(dumpFile, apiConfigDump, 0644)
|
||||
if err != nil {
|
||||
log.Fatalf("write api credentials in '%s' failed: %s", dumpFile, err)
|
||||
}
|
||||
log.Printf("Local API credentials dumped to '%s'", dumpFile)
|
||||
} else {
|
||||
fmt.Printf("%s\n", string(apiConfigDump))
|
||||
}
|
||||
log.Warningf(ReloadMessage())
|
||||
},
|
||||
}
|
||||
cmdLapiRegister.Flags().StringVarP(&apiURL, "url", "u", "", "URL of the API (ie. http://127.0.0.1)")
|
||||
cmdLapiRegister.Flags().StringVarP(&outputFile, "file", "f", "", "output file destination")
|
||||
cmdLapiRegister.Flags().StringVar(&lapiUser, "machine", "", "Name of the machine to register with")
|
||||
cmdLapi.AddCommand(cmdLapiRegister)
|
||||
|
||||
var cmdLapiStatus = &cobra.Command{
|
||||
Use: "status",
|
||||
Short: "Check authentication to Local API (LAPI)",
|
||||
Args: cobra.MinimumNArgs(0),
|
||||
DisableAutoGenTag: true,
|
||||
Run: func(cmd *cobra.Command, args []string) {
|
||||
var err error
|
||||
|
||||
password := strfmt.Password(csConfig.API.Client.Credentials.Password)
|
||||
apiurl, err := url.Parse(csConfig.API.Client.Credentials.URL)
|
||||
login := csConfig.API.Client.Credentials.Login
|
||||
if err != nil {
|
||||
log.Fatalf("parsing api url ('%s'): %s", apiurl, err)
|
||||
}
|
||||
if err := csConfig.LoadHub(); err != nil {
|
||||
log.Fatalf(err.Error())
|
||||
}
|
||||
|
||||
if err := cwhub.GetHubIdx(csConfig.Hub); err != nil {
|
||||
log.Fatalf("Failed to load hub index : %s", err)
|
||||
log.Infoln("Run 'sudo cscli hub update' to get the hub index")
|
||||
}
|
||||
scenarios, err := cwhub.GetInstalledScenariosAsString()
|
||||
if err != nil {
|
||||
log.Fatalf("failed to get scenarios : %s", err.Error())
|
||||
}
|
||||
|
||||
Client, err = apiclient.NewDefaultClient(apiurl,
|
||||
LAPIURLPrefix,
|
||||
fmt.Sprintf("crowdsec/%s", cwversion.VersionStr()),
|
||||
nil)
|
||||
if err != nil {
|
||||
log.Fatalf("init default client: %s", err)
|
||||
}
|
||||
t := models.WatcherAuthRequest{
|
||||
MachineID: &login,
|
||||
Password: &password,
|
||||
Scenarios: scenarios,
|
||||
}
|
||||
log.Infof("Loaded credentials from %s", csConfig.API.Client.CredentialsFilePath)
|
||||
log.Infof("Trying to authenticate with username %s on %s", login, apiurl)
|
||||
_, err = Client.Auth.AuthenticateWatcher(context.Background(), t)
|
||||
if err != nil {
|
||||
log.Fatalf("Failed to authenticate to Local API (LAPI) : %s", err)
|
||||
} else {
|
||||
log.Infof("You can successfully interact with Local API (LAPI)")
|
||||
}
|
||||
},
|
||||
}
|
||||
cmdLapi.AddCommand(cmdLapiStatus)
|
||||
return cmdLapi
|
||||
return cmd
|
||||
}
|
||||
|
||||
func (cli *cliLapi) addContext(key string, values []string) error {
|
||||
cfg := cli.cfg()
|
||||
|
||||
if err := alertcontext.ValidateContextExpr(key, values); err != nil {
|
||||
return fmt.Errorf("invalid context configuration: %w", err)
|
||||
}
|
||||
|
||||
if _, ok := cfg.Crowdsec.ContextToSend[key]; !ok {
|
||||
cfg.Crowdsec.ContextToSend[key] = make([]string, 0)
|
||||
|
||||
log.Infof("key '%s' added", key)
|
||||
}
|
||||
|
||||
data := cfg.Crowdsec.ContextToSend[key]
|
||||
|
||||
for _, val := range values {
|
||||
if !slices.Contains(data, val) {
|
||||
log.Infof("value '%s' added to key '%s'", val, key)
|
||||
data = append(data, val)
|
||||
}
|
||||
|
||||
cfg.Crowdsec.ContextToSend[key] = data
|
||||
}
|
||||
|
||||
if err := cfg.Crowdsec.DumpContextConfigFile(); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (cli *cliLapi) newContextAddCmd() *cobra.Command {
|
||||
var (
|
||||
keyToAdd string
|
||||
valuesToAdd []string
|
||||
)
|
||||
|
||||
cmd := &cobra.Command{
|
||||
Use: "add",
|
||||
Short: "Add context to send with alerts. You must specify the output key with the expr value you want",
|
||||
Example: `cscli lapi context add --key source_ip --value evt.Meta.source_ip
|
||||
cscli lapi context add --key file_source --value evt.Line.Src
|
||||
cscli lapi context add --value evt.Meta.source_ip --value evt.Meta.target_user
|
||||
`,
|
||||
DisableAutoGenTag: true,
|
||||
RunE: func(_ *cobra.Command, _ []string) error {
|
||||
hub, err := require.Hub(cli.cfg(), nil, nil)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if err = alertcontext.LoadConsoleContext(cli.cfg(), hub); err != nil {
|
||||
return fmt.Errorf("while loading context: %w", err)
|
||||
}
|
||||
|
||||
if keyToAdd != "" {
|
||||
if err := cli.addContext(keyToAdd, valuesToAdd); err != nil {
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
for _, v := range valuesToAdd {
|
||||
keySlice := strings.Split(v, ".")
|
||||
key := keySlice[len(keySlice)-1]
|
||||
value := []string{v}
|
||||
if err := cli.addContext(key, value); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
},
|
||||
}
|
||||
|
||||
flags := cmd.Flags()
|
||||
flags.StringVarP(&keyToAdd, "key", "k", "", "The key of the different values to send")
|
||||
flags.StringSliceVar(&valuesToAdd, "value", []string{}, "The expr fields to associate with the key")
|
||||
cmd.MarkFlagRequired("value")
|
||||
|
||||
return cmd
|
||||
}
|
||||
|
||||
func (cli *cliLapi) newContextStatusCmd() *cobra.Command {
|
||||
cmd := &cobra.Command{
|
||||
Use: "status",
|
||||
Short: "List context to send with alerts",
|
||||
DisableAutoGenTag: true,
|
||||
RunE: func(_ *cobra.Command, _ []string) error {
|
||||
cfg := cli.cfg()
|
||||
hub, err := require.Hub(cfg, nil, nil)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if err = alertcontext.LoadConsoleContext(cfg, hub); err != nil {
|
||||
return fmt.Errorf("while loading context: %w", err)
|
||||
}
|
||||
|
||||
if len(cfg.Crowdsec.ContextToSend) == 0 {
|
||||
fmt.Println("No context found on this agent. You can use 'cscli lapi context add' to add context to your alerts.")
|
||||
return nil
|
||||
}
|
||||
|
||||
dump, err := yaml.Marshal(cfg.Crowdsec.ContextToSend)
|
||||
if err != nil {
|
||||
return fmt.Errorf("unable to show context status: %w", err)
|
||||
}
|
||||
|
||||
fmt.Print(string(dump))
|
||||
|
||||
return nil
|
||||
},
|
||||
}
|
||||
|
||||
return cmd
|
||||
}
|
||||
|
||||
func (cli *cliLapi) newContextDetectCmd() *cobra.Command {
|
||||
var detectAll bool
|
||||
|
||||
cmd := &cobra.Command{
|
||||
Use: "detect",
|
||||
Short: "Detect available fields from the installed parsers",
|
||||
Example: `cscli lapi context detect --all
|
||||
cscli lapi context detect crowdsecurity/sshd-logs
|
||||
`,
|
||||
DisableAutoGenTag: true,
|
||||
RunE: func(cmd *cobra.Command, args []string) error {
|
||||
cfg := cli.cfg()
|
||||
if !detectAll && len(args) == 0 {
|
||||
log.Infof("Please provide parsers to detect or --all flag.")
|
||||
printHelp(cmd)
|
||||
}
|
||||
|
||||
// to avoid all the log.Info from the loaders functions
|
||||
log.SetLevel(log.WarnLevel)
|
||||
|
||||
if err := exprhelpers.Init(nil); err != nil {
|
||||
return fmt.Errorf("failed to init expr helpers: %w", err)
|
||||
}
|
||||
|
||||
hub, err := require.Hub(cfg, nil, nil)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
csParsers := parser.NewParsers(hub)
|
||||
if csParsers, err = parser.LoadParsers(cfg, csParsers); err != nil {
|
||||
return fmt.Errorf("unable to load parsers: %w", err)
|
||||
}
|
||||
|
||||
fieldByParsers := make(map[string][]string)
|
||||
for _, node := range csParsers.Nodes {
|
||||
if !detectAll && !slices.Contains(args, node.Name) {
|
||||
continue
|
||||
}
|
||||
if !detectAll {
|
||||
args = removeFromSlice(node.Name, args)
|
||||
}
|
||||
fieldByParsers[node.Name] = make([]string, 0)
|
||||
fieldByParsers[node.Name] = detectNode(node, *csParsers.Ctx)
|
||||
|
||||
subNodeFields := detectSubNode(node, *csParsers.Ctx)
|
||||
for _, field := range subNodeFields {
|
||||
if !slices.Contains(fieldByParsers[node.Name], field) {
|
||||
fieldByParsers[node.Name] = append(fieldByParsers[node.Name], field)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fmt.Printf("Acquisition :\n\n")
|
||||
fmt.Printf(" - evt.Line.Module\n")
|
||||
fmt.Printf(" - evt.Line.Raw\n")
|
||||
fmt.Printf(" - evt.Line.Src\n")
|
||||
fmt.Println()
|
||||
|
||||
parsersKey := make([]string, 0)
|
||||
for k := range fieldByParsers {
|
||||
parsersKey = append(parsersKey, k)
|
||||
}
|
||||
sort.Strings(parsersKey)
|
||||
|
||||
for _, k := range parsersKey {
|
||||
if len(fieldByParsers[k]) == 0 {
|
||||
continue
|
||||
}
|
||||
fmt.Printf("%s :\n\n", k)
|
||||
values := fieldByParsers[k]
|
||||
sort.Strings(values)
|
||||
for _, value := range values {
|
||||
fmt.Printf(" - %s\n", value)
|
||||
}
|
||||
fmt.Println()
|
||||
}
|
||||
|
||||
if len(args) > 0 {
|
||||
for _, parserNotFound := range args {
|
||||
log.Errorf("parser '%s' not found, can't detect fields", parserNotFound)
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
},
|
||||
}
|
||||
cmd.Flags().BoolVarP(&detectAll, "all", "a", false, "Detect evt field for all installed parser")
|
||||
|
||||
return cmd
|
||||
}
|
||||
|
||||
func (cli *cliLapi) newContextDeleteCmd() *cobra.Command {
|
||||
cmd := &cobra.Command{
|
||||
Use: "delete",
|
||||
DisableAutoGenTag: true,
|
||||
RunE: func(_ *cobra.Command, _ []string) error {
|
||||
filePath := cli.cfg().Crowdsec.ConsoleContextPath
|
||||
if filePath == "" {
|
||||
filePath = "the context file"
|
||||
}
|
||||
fmt.Printf("Command 'delete' is deprecated, please manually edit %s.", filePath)
|
||||
|
||||
return nil
|
||||
},
|
||||
}
|
||||
|
||||
return cmd
|
||||
}
|
||||
|
||||
func (cli *cliLapi) newContextCmd() *cobra.Command {
|
||||
cmd := &cobra.Command{
|
||||
Use: "context [command]",
|
||||
Short: "Manage context to send with alerts",
|
||||
DisableAutoGenTag: true,
|
||||
PersistentPreRunE: func(_ *cobra.Command, _ []string) error {
|
||||
cfg := cli.cfg()
|
||||
if err := cfg.LoadCrowdsec(); err != nil {
|
||||
fileNotFoundMessage := fmt.Sprintf("failed to open context file: open %s: no such file or directory", cfg.Crowdsec.ConsoleContextPath)
|
||||
if err.Error() != fileNotFoundMessage {
|
||||
return fmt.Errorf("unable to load CrowdSec agent configuration: %w", err)
|
||||
}
|
||||
}
|
||||
if cfg.DisableAgent {
|
||||
return errors.New("agent is disabled and lapi context can only be used on the agent")
|
||||
}
|
||||
|
||||
return nil
|
||||
},
|
||||
Run: func(cmd *cobra.Command, _ []string) {
|
||||
printHelp(cmd)
|
||||
},
|
||||
}
|
||||
|
||||
cmd.AddCommand(cli.newContextAddCmd())
|
||||
cmd.AddCommand(cli.newContextStatusCmd())
|
||||
cmd.AddCommand(cli.newContextDetectCmd())
|
||||
cmd.AddCommand(cli.newContextDeleteCmd())
|
||||
|
||||
return cmd
|
||||
}
|
||||
|
||||
func detectStaticField(grokStatics []parser.ExtraField) []string {
|
||||
ret := make([]string, 0)
|
||||
|
||||
for _, static := range grokStatics {
|
||||
if static.Parsed != "" {
|
||||
fieldName := fmt.Sprintf("evt.Parsed.%s", static.Parsed)
|
||||
if !slices.Contains(ret, fieldName) {
|
||||
ret = append(ret, fieldName)
|
||||
}
|
||||
}
|
||||
|
||||
if static.Meta != "" {
|
||||
fieldName := fmt.Sprintf("evt.Meta.%s", static.Meta)
|
||||
if !slices.Contains(ret, fieldName) {
|
||||
ret = append(ret, fieldName)
|
||||
}
|
||||
}
|
||||
|
||||
if static.TargetByName != "" {
|
||||
fieldName := static.TargetByName
|
||||
if !strings.HasPrefix(fieldName, "evt.") {
|
||||
fieldName = "evt." + fieldName
|
||||
}
|
||||
|
||||
if !slices.Contains(ret, fieldName) {
|
||||
ret = append(ret, fieldName)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return ret
|
||||
}
|
||||
|
||||
func detectNode(node parser.Node, parserCTX parser.UnixParserCtx) []string {
|
||||
ret := make([]string, 0)
|
||||
|
||||
if node.Grok.RunTimeRegexp != nil {
|
||||
for _, capturedField := range node.Grok.RunTimeRegexp.Names() {
|
||||
fieldName := fmt.Sprintf("evt.Parsed.%s", capturedField)
|
||||
if !slices.Contains(ret, fieldName) {
|
||||
ret = append(ret, fieldName)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if node.Grok.RegexpName != "" {
|
||||
grokCompiled, err := parserCTX.Grok.Get(node.Grok.RegexpName)
|
||||
// ignore error (parser does not exist?)
|
||||
if err == nil {
|
||||
for _, capturedField := range grokCompiled.Names() {
|
||||
fieldName := fmt.Sprintf("evt.Parsed.%s", capturedField)
|
||||
if !slices.Contains(ret, fieldName) {
|
||||
ret = append(ret, fieldName)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if len(node.Grok.Statics) > 0 {
|
||||
staticsField := detectStaticField(node.Grok.Statics)
|
||||
for _, staticField := range staticsField {
|
||||
if !slices.Contains(ret, staticField) {
|
||||
ret = append(ret, staticField)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if len(node.Statics) > 0 {
|
||||
staticsField := detectStaticField(node.Statics)
|
||||
for _, staticField := range staticsField {
|
||||
if !slices.Contains(ret, staticField) {
|
||||
ret = append(ret, staticField)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return ret
|
||||
}
|
||||
|
||||
func detectSubNode(node parser.Node, parserCTX parser.UnixParserCtx) []string {
|
||||
ret := make([]string, 0)
|
||||
|
||||
for _, subnode := range node.LeavesNodes {
|
||||
if subnode.Grok.RunTimeRegexp != nil {
|
||||
for _, capturedField := range subnode.Grok.RunTimeRegexp.Names() {
|
||||
fieldName := fmt.Sprintf("evt.Parsed.%s", capturedField)
|
||||
if !slices.Contains(ret, fieldName) {
|
||||
ret = append(ret, fieldName)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if subnode.Grok.RegexpName != "" {
|
||||
grokCompiled, err := parserCTX.Grok.Get(subnode.Grok.RegexpName)
|
||||
if err == nil {
|
||||
// ignore error (parser does not exist?)
|
||||
for _, capturedField := range grokCompiled.Names() {
|
||||
fieldName := fmt.Sprintf("evt.Parsed.%s", capturedField)
|
||||
if !slices.Contains(ret, fieldName) {
|
||||
ret = append(ret, fieldName)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if len(subnode.Grok.Statics) > 0 {
|
||||
staticsField := detectStaticField(subnode.Grok.Statics)
|
||||
for _, staticField := range staticsField {
|
||||
if !slices.Contains(ret, staticField) {
|
||||
ret = append(ret, staticField)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if len(subnode.Statics) > 0 {
|
||||
staticsField := detectStaticField(subnode.Statics)
|
||||
for _, staticField := range staticsField {
|
||||
if !slices.Contains(ret, staticField) {
|
||||
ret = append(ret, staticField)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return ret
|
||||
}
|
||||
|
|
49
cmd/crowdsec-cli/lapi_test.go
Normal file
49
cmd/crowdsec-cli/lapi_test.go
Normal file
|
@ -0,0 +1,49 @@
|
|||
package main
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
|
||||
"github.com/crowdsecurity/crowdsec/pkg/csconfig"
|
||||
)
|
||||
|
||||
func TestPrepareAPIURL_NoProtocol(t *testing.T) {
|
||||
url, err := prepareAPIURL(nil, "localhost:81")
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, "http://localhost:81/", url.String())
|
||||
}
|
||||
|
||||
func TestPrepareAPIURL_Http(t *testing.T) {
|
||||
url, err := prepareAPIURL(nil, "http://localhost:81")
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, "http://localhost:81/", url.String())
|
||||
}
|
||||
|
||||
func TestPrepareAPIURL_Https(t *testing.T) {
|
||||
url, err := prepareAPIURL(nil, "https://localhost:81")
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, "https://localhost:81/", url.String())
|
||||
}
|
||||
|
||||
func TestPrepareAPIURL_UnixSocket(t *testing.T) {
|
||||
url, err := prepareAPIURL(nil, "/path/socket")
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, "/path/socket/", url.String())
|
||||
}
|
||||
|
||||
func TestPrepareAPIURL_Empty(t *testing.T) {
|
||||
_, err := prepareAPIURL(nil, "")
|
||||
require.Error(t, err)
|
||||
}
|
||||
|
||||
func TestPrepareAPIURL_Empty_ConfigOverride(t *testing.T) {
|
||||
url, err := prepareAPIURL(&csconfig.LocalApiClientCfg{
|
||||
Credentials: &csconfig.ApiCredentialsCfg{
|
||||
URL: "localhost:80",
|
||||
},
|
||||
}, "")
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, "http://localhost:80/", url.String())
|
||||
}
|
|
@ -4,56 +4,49 @@ import (
|
|||
saferand "crypto/rand"
|
||||
"encoding/csv"
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"fmt"
|
||||
"io/ioutil"
|
||||
"math/big"
|
||||
"os"
|
||||
"slices"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/AlecAivazis/survey/v2"
|
||||
"github.com/crowdsecurity/crowdsec/pkg/csconfig"
|
||||
"github.com/crowdsecurity/crowdsec/pkg/database"
|
||||
"github.com/denisbrodbeck/machineid"
|
||||
"github.com/enescakir/emoji"
|
||||
"github.com/fatih/color"
|
||||
"github.com/go-openapi/strfmt"
|
||||
"github.com/olekukonko/tablewriter"
|
||||
"github.com/pkg/errors"
|
||||
"github.com/google/uuid"
|
||||
log "github.com/sirupsen/logrus"
|
||||
"github.com/spf13/cobra"
|
||||
"gopkg.in/yaml.v2"
|
||||
"gopkg.in/yaml.v3"
|
||||
|
||||
"github.com/crowdsecurity/machineid"
|
||||
|
||||
"github.com/crowdsecurity/crowdsec/cmd/crowdsec-cli/require"
|
||||
"github.com/crowdsecurity/crowdsec/pkg/csconfig"
|
||||
"github.com/crowdsecurity/crowdsec/pkg/database"
|
||||
"github.com/crowdsecurity/crowdsec/pkg/database/ent"
|
||||
"github.com/crowdsecurity/crowdsec/pkg/types"
|
||||
)
|
||||
|
||||
var machineID string
|
||||
var machinePassword string
|
||||
var interactive bool
|
||||
var apiURL string
|
||||
var outputFile string
|
||||
var forceAdd bool
|
||||
var autoAdd bool
|
||||
|
||||
var (
|
||||
passwordLength = 64
|
||||
upper = "ABCDEFGHIJKLMNOPQRSTUVWXY"
|
||||
lower = "abcdefghijklmnopqrstuvwxyz"
|
||||
digits = "0123456789"
|
||||
)
|
||||
|
||||
const (
|
||||
uuid = "/proc/sys/kernel/random/uuid"
|
||||
)
|
||||
const passwordLength = 64
|
||||
|
||||
func generatePassword(length int) string {
|
||||
upper := "ABCDEFGHIJKLMNOPQRSTUVWXY"
|
||||
lower := "abcdefghijklmnopqrstuvwxyz"
|
||||
digits := "0123456789"
|
||||
|
||||
charset := upper + lower + digits
|
||||
charsetLength := len(charset)
|
||||
|
||||
buf := make([]byte, length)
|
||||
|
||||
for i := 0; i < length; i++ {
|
||||
rInt, err := saferand.Int(saferand.Reader, big.NewInt(int64(charsetLength)))
|
||||
if err != nil {
|
||||
log.Fatalf("failed getting data from prng for password generation : %s", err)
|
||||
}
|
||||
|
||||
buf[i] = charset[rInt.Int64()]
|
||||
}
|
||||
|
||||
|
@ -68,13 +61,15 @@ func generateIDPrefix() (string, error) {
|
|||
if err == nil {
|
||||
return prefix, nil
|
||||
}
|
||||
|
||||
log.Debugf("failed to get machine-id with usual files: %s", err)
|
||||
|
||||
bID, err := ioutil.ReadFile(uuid)
|
||||
bID, err := uuid.NewRandom()
|
||||
if err == nil {
|
||||
return string(bID), nil
|
||||
return bID.String(), nil
|
||||
}
|
||||
return "", errors.Wrap(err, "generating machine id")
|
||||
|
||||
return "", fmt.Errorf("generating machine id: %w", err)
|
||||
}
|
||||
|
||||
// Generate a unique identifier, composed by a prefix and a random suffix.
|
||||
|
@ -84,260 +79,427 @@ func generateID(prefix string) (string, error) {
|
|||
if prefix == "" {
|
||||
prefix, err = generateIDPrefix()
|
||||
}
|
||||
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
prefix = strings.ReplaceAll(prefix, "-", "")[:32]
|
||||
suffix := generatePassword(16)
|
||||
|
||||
return prefix + suffix, nil
|
||||
}
|
||||
|
||||
func NewMachinesCmd() *cobra.Command {
|
||||
/* ---- DECISIONS COMMAND */
|
||||
var cmdMachines = &cobra.Command{
|
||||
// getLastHeartbeat returns the last heartbeat timestamp of a machine
|
||||
// and a boolean indicating if the machine is considered active or not.
|
||||
func getLastHeartbeat(m *ent.Machine) (string, bool) {
|
||||
if m.LastHeartbeat == nil {
|
||||
return "-", false
|
||||
}
|
||||
|
||||
elapsed := time.Now().UTC().Sub(*m.LastHeartbeat)
|
||||
|
||||
hb := elapsed.Truncate(time.Second).String()
|
||||
if elapsed > 2*time.Minute {
|
||||
return hb, false
|
||||
}
|
||||
|
||||
return hb, true
|
||||
}
|
||||
|
||||
type cliMachines struct {
|
||||
db *database.Client
|
||||
cfg configGetter
|
||||
}
|
||||
|
||||
func NewCLIMachines(cfg configGetter) *cliMachines {
|
||||
return &cliMachines{
|
||||
cfg: cfg,
|
||||
}
|
||||
}
|
||||
|
||||
func (cli *cliMachines) NewCommand() *cobra.Command {
|
||||
cmd := &cobra.Command{
|
||||
Use: "machines [action]",
|
||||
Short: "Manage local API machines [requires local API]",
|
||||
Long: `To list/add/delete/validate machines.
|
||||
Long: `To list/add/delete/validate/prune machines.
|
||||
Note: This command requires database direct access, so is intended to be run on the local API machine.
|
||||
`,
|
||||
Example: `cscli machines [action]`,
|
||||
DisableAutoGenTag: true,
|
||||
Aliases: []string{"machine"},
|
||||
PersistentPreRun: func(cmd *cobra.Command, args []string) {
|
||||
if err := csConfig.LoadAPIServer(); err != nil || csConfig.DisableAPI {
|
||||
if err != nil {
|
||||
log.Errorf("local api : %s", err)
|
||||
}
|
||||
log.Fatal("Local API is disabled, please run this command on the local API machine")
|
||||
}
|
||||
if err := csConfig.LoadDBConfig(); err != nil {
|
||||
log.Errorf("This command requires direct database access (must be run on the local API machine)")
|
||||
log.Fatalf(err.Error())
|
||||
}
|
||||
},
|
||||
}
|
||||
|
||||
var cmdMachinesList = &cobra.Command{
|
||||
Use: "list",
|
||||
Short: "List machines",
|
||||
Long: `List `,
|
||||
Example: `cscli machines list`,
|
||||
Args: cobra.MaximumNArgs(1),
|
||||
DisableAutoGenTag: true,
|
||||
PreRun: func(cmd *cobra.Command, args []string) {
|
||||
PersistentPreRunE: func(_ *cobra.Command, _ []string) error {
|
||||
var err error
|
||||
dbClient, err = database.NewClient(csConfig.DbConfig)
|
||||
if err != nil {
|
||||
log.Fatalf("unable to create new database client: %s", err)
|
||||
if err = require.LAPI(cli.cfg()); err != nil {
|
||||
return err
|
||||
}
|
||||
},
|
||||
Run: func(cmd *cobra.Command, args []string) {
|
||||
machines, err := dbClient.ListMachines()
|
||||
cli.db, err = database.NewClient(cli.cfg().DbConfig)
|
||||
if err != nil {
|
||||
log.Errorf("unable to list blockers: %s", err)
|
||||
return fmt.Errorf("unable to create new database client: %w", err)
|
||||
}
|
||||
if csConfig.Cscli.Output == "human" {
|
||||
table := tablewriter.NewWriter(os.Stdout)
|
||||
table.SetCenterSeparator("")
|
||||
table.SetColumnSeparator("")
|
||||
|
||||
table.SetHeaderAlignment(tablewriter.ALIGN_LEFT)
|
||||
table.SetAlignment(tablewriter.ALIGN_LEFT)
|
||||
table.SetHeader([]string{"Name", "IP Address", "Last Update", "Status", "Version"})
|
||||
for _, w := range machines {
|
||||
var validated string
|
||||
if w.IsValidated {
|
||||
validated = emoji.CheckMark.String()
|
||||
} else {
|
||||
validated = emoji.Prohibited.String()
|
||||
}
|
||||
table.Append([]string{w.MachineId, w.IpAddress, w.UpdatedAt.Format(time.RFC3339), validated, w.Version})
|
||||
}
|
||||
table.Render()
|
||||
} else if csConfig.Cscli.Output == "json" {
|
||||
x, err := json.MarshalIndent(machines, "", " ")
|
||||
if err != nil {
|
||||
log.Fatalf("failed to unmarshal")
|
||||
}
|
||||
fmt.Printf("%s", string(x))
|
||||
} else if csConfig.Cscli.Output == "raw" {
|
||||
csvwriter := csv.NewWriter(os.Stdout)
|
||||
err := csvwriter.Write([]string{"machine_id", "ip_address", "updated_at", "validated", "version"})
|
||||
if err != nil {
|
||||
log.Fatalf("failed to write header: %s", err)
|
||||
}
|
||||
for _, w := range machines {
|
||||
var validated string
|
||||
if w.IsValidated {
|
||||
validated = "true"
|
||||
} else {
|
||||
validated = "false"
|
||||
}
|
||||
err := csvwriter.Write([]string{w.MachineId, w.IpAddress, w.UpdatedAt.Format(time.RFC3339), validated, w.Version})
|
||||
if err != nil {
|
||||
log.Fatalf("failed to write raw output : %s", err)
|
||||
}
|
||||
}
|
||||
csvwriter.Flush()
|
||||
} else {
|
||||
log.Errorf("unknown output '%s'", csConfig.Cscli.Output)
|
||||
}
|
||||
return nil
|
||||
},
|
||||
}
|
||||
cmdMachines.AddCommand(cmdMachinesList)
|
||||
|
||||
var cmdMachinesAdd = &cobra.Command{
|
||||
cmd.AddCommand(cli.newListCmd())
|
||||
cmd.AddCommand(cli.newAddCmd())
|
||||
cmd.AddCommand(cli.newDeleteCmd())
|
||||
cmd.AddCommand(cli.newValidateCmd())
|
||||
cmd.AddCommand(cli.newPruneCmd())
|
||||
|
||||
return cmd
|
||||
}
|
||||
|
||||
func (cli *cliMachines) list() error {
|
||||
out := color.Output
|
||||
|
||||
machines, err := cli.db.ListMachines()
|
||||
if err != nil {
|
||||
return fmt.Errorf("unable to list machines: %w", err)
|
||||
}
|
||||
|
||||
switch cli.cfg().Cscli.Output {
|
||||
case "human":
|
||||
getAgentsTable(out, machines)
|
||||
case "json":
|
||||
enc := json.NewEncoder(out)
|
||||
enc.SetIndent("", " ")
|
||||
|
||||
if err := enc.Encode(machines); err != nil {
|
||||
return errors.New("failed to marshal")
|
||||
}
|
||||
|
||||
return nil
|
||||
case "raw":
|
||||
csvwriter := csv.NewWriter(out)
|
||||
|
||||
err := csvwriter.Write([]string{"machine_id", "ip_address", "updated_at", "validated", "version", "auth_type", "last_heartbeat"})
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to write header: %w", err)
|
||||
}
|
||||
|
||||
for _, m := range machines {
|
||||
validated := "false"
|
||||
if m.IsValidated {
|
||||
validated = "true"
|
||||
}
|
||||
|
||||
hb, _ := getLastHeartbeat(m)
|
||||
|
||||
if err := csvwriter.Write([]string{m.MachineId, m.IpAddress, m.UpdatedAt.Format(time.RFC3339), validated, m.Version, m.AuthType, hb}); err != nil {
|
||||
return fmt.Errorf("failed to write raw output: %w", err)
|
||||
}
|
||||
}
|
||||
|
||||
csvwriter.Flush()
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (cli *cliMachines) newListCmd() *cobra.Command {
|
||||
cmd := &cobra.Command{
|
||||
Use: "list",
|
||||
Short: "list all machines in the database",
|
||||
Long: `list all machines in the database with their status and last heartbeat`,
|
||||
Example: `cscli machines list`,
|
||||
Args: cobra.NoArgs,
|
||||
DisableAutoGenTag: true,
|
||||
RunE: func(_ *cobra.Command, _ []string) error {
|
||||
return cli.list()
|
||||
},
|
||||
}
|
||||
|
||||
return cmd
|
||||
}
|
||||
|
||||
func (cli *cliMachines) newAddCmd() *cobra.Command {
|
||||
var (
|
||||
password MachinePassword
|
||||
dumpFile string
|
||||
apiURL string
|
||||
interactive bool
|
||||
autoAdd bool
|
||||
force bool
|
||||
)
|
||||
|
||||
cmd := &cobra.Command{
|
||||
Use: "add",
|
||||
Short: "add machine to the database.",
|
||||
Short: "add a single machine to the database",
|
||||
DisableAutoGenTag: true,
|
||||
Long: `Register a new machine in the database. cscli should be on the same machine as LAPI.`,
|
||||
Example: `
|
||||
cscli machines add --auto
|
||||
Example: `cscli machines add --auto
|
||||
cscli machines add MyTestMachine --auto
|
||||
cscli machines add MyTestMachine --password MyPassword
|
||||
`,
|
||||
PreRun: func(cmd *cobra.Command, args []string) {
|
||||
var err error
|
||||
dbClient, err = database.NewClient(csConfig.DbConfig)
|
||||
if err != nil {
|
||||
log.Fatalf("unable to create new database client: %s", err)
|
||||
}
|
||||
},
|
||||
Run: func(cmd *cobra.Command, args []string) {
|
||||
var dumpFile string
|
||||
var err error
|
||||
|
||||
// create machineID if not specified by user
|
||||
if len(args) == 0 {
|
||||
if !autoAdd {
|
||||
printHelp(cmd)
|
||||
return
|
||||
}
|
||||
machineID, err = generateID("")
|
||||
if err != nil {
|
||||
log.Fatalf("unable to generate machine id : %s", err)
|
||||
}
|
||||
} else {
|
||||
machineID = args[0]
|
||||
}
|
||||
|
||||
/*check if file already exists*/
|
||||
if outputFile != "" {
|
||||
dumpFile = outputFile
|
||||
} else if csConfig.API.Client != nil && csConfig.API.Client.CredentialsFilePath != "" {
|
||||
dumpFile = csConfig.API.Client.CredentialsFilePath
|
||||
}
|
||||
|
||||
// create a password if it's not specified by user
|
||||
if machinePassword == "" && !interactive {
|
||||
if !autoAdd {
|
||||
printHelp(cmd)
|
||||
return
|
||||
}
|
||||
machinePassword = generatePassword(passwordLength)
|
||||
} else if machinePassword == "" && interactive {
|
||||
qs := &survey.Password{
|
||||
Message: "Please provide a password for the machine",
|
||||
}
|
||||
survey.AskOne(qs, &machinePassword)
|
||||
}
|
||||
password := strfmt.Password(machinePassword)
|
||||
_, err = dbClient.CreateMachine(&machineID, &password, "", true, forceAdd)
|
||||
if err != nil {
|
||||
log.Fatalf("unable to create machine: %s", err)
|
||||
}
|
||||
log.Infof("Machine '%s' successfully added to the local API", machineID)
|
||||
|
||||
if apiURL == "" {
|
||||
if csConfig.API.Client != nil && csConfig.API.Client.Credentials != nil && csConfig.API.Client.Credentials.URL != "" {
|
||||
apiURL = csConfig.API.Client.Credentials.URL
|
||||
} else if csConfig.API.Server != nil && csConfig.API.Server.ListenURI != "" {
|
||||
apiURL = "http://" + csConfig.API.Server.ListenURI
|
||||
} else {
|
||||
log.Fatalf("unable to dump an api URL. Please provide it in your configuration or with the -u parameter")
|
||||
}
|
||||
}
|
||||
apiCfg := csconfig.ApiCredentialsCfg{
|
||||
Login: machineID,
|
||||
Password: password.String(),
|
||||
URL: apiURL,
|
||||
}
|
||||
apiConfigDump, err := yaml.Marshal(apiCfg)
|
||||
if err != nil {
|
||||
log.Fatalf("unable to marshal api credentials: %s", err)
|
||||
}
|
||||
if dumpFile != "" && dumpFile != "-" {
|
||||
err = ioutil.WriteFile(dumpFile, apiConfigDump, 0644)
|
||||
if err != nil {
|
||||
log.Fatalf("write api credentials in '%s' failed: %s", dumpFile, err)
|
||||
}
|
||||
log.Printf("API credentials dumped to '%s'", dumpFile)
|
||||
} else {
|
||||
fmt.Printf("%s\n", string(apiConfigDump))
|
||||
}
|
||||
cscli machines add -f- --auto > /tmp/mycreds.yaml`,
|
||||
RunE: func(_ *cobra.Command, args []string) error {
|
||||
return cli.add(args, string(password), dumpFile, apiURL, interactive, autoAdd, force)
|
||||
},
|
||||
}
|
||||
cmdMachinesAdd.Flags().StringVarP(&machinePassword, "password", "p", "", "machine password to login to the API")
|
||||
cmdMachinesAdd.Flags().StringVarP(&outputFile, "file", "f", "",
|
||||
"output file destination (defaults to "+csconfig.DefaultConfigPath("local_api_credentials.yaml"))
|
||||
cmdMachinesAdd.Flags().StringVarP(&apiURL, "url", "u", "", "URL of the local API")
|
||||
cmdMachinesAdd.Flags().BoolVarP(&interactive, "interactive", "i", false, "interfactive mode to enter the password")
|
||||
cmdMachinesAdd.Flags().BoolVarP(&autoAdd, "auto", "a", false, "automatically generate password (and username if not provided)")
|
||||
cmdMachinesAdd.Flags().BoolVar(&forceAdd, "force", false, "will force add the machine if it already exist")
|
||||
cmdMachines.AddCommand(cmdMachinesAdd)
|
||||
|
||||
var cmdMachinesDelete = &cobra.Command{
|
||||
Use: "delete --machine MyTestMachine",
|
||||
Short: "delete machines",
|
||||
Example: `cscli machines delete "machine_name"`,
|
||||
flags := cmd.Flags()
|
||||
flags.VarP(&password, "password", "p", "machine password to login to the API")
|
||||
flags.StringVarP(&dumpFile, "file", "f", "", "output file destination (defaults to "+csconfig.DefaultConfigPath("local_api_credentials.yaml")+")")
|
||||
flags.StringVarP(&apiURL, "url", "u", "", "URL of the local API")
|
||||
flags.BoolVarP(&interactive, "interactive", "i", false, "interfactive mode to enter the password")
|
||||
flags.BoolVarP(&autoAdd, "auto", "a", false, "automatically generate password (and username if not provided)")
|
||||
flags.BoolVar(&force, "force", false, "will force add the machine if it already exist")
|
||||
|
||||
return cmd
|
||||
}
|
||||
|
||||
func (cli *cliMachines) add(args []string, machinePassword string, dumpFile string, apiURL string, interactive bool, autoAdd bool, force bool) error {
|
||||
var (
|
||||
err error
|
||||
machineID string
|
||||
)
|
||||
|
||||
// create machineID if not specified by user
|
||||
if len(args) == 0 {
|
||||
if !autoAdd {
|
||||
return errors.New("please specify a machine name to add, or use --auto")
|
||||
}
|
||||
|
||||
machineID, err = generateID("")
|
||||
if err != nil {
|
||||
return fmt.Errorf("unable to generate machine id: %w", err)
|
||||
}
|
||||
} else {
|
||||
machineID = args[0]
|
||||
}
|
||||
|
||||
clientCfg := cli.cfg().API.Client
|
||||
serverCfg := cli.cfg().API.Server
|
||||
|
||||
/*check if file already exists*/
|
||||
if dumpFile == "" && clientCfg != nil && clientCfg.CredentialsFilePath != "" {
|
||||
credFile := clientCfg.CredentialsFilePath
|
||||
// use the default only if the file does not exist
|
||||
_, err = os.Stat(credFile)
|
||||
|
||||
switch {
|
||||
case os.IsNotExist(err) || force:
|
||||
dumpFile = credFile
|
||||
case err != nil:
|
||||
return fmt.Errorf("unable to stat '%s': %w", credFile, err)
|
||||
default:
|
||||
return fmt.Errorf(`credentials file '%s' already exists: please remove it, use "--force" or specify a different file with "-f" ("-f -" for standard output)`, credFile)
|
||||
}
|
||||
}
|
||||
|
||||
if dumpFile == "" {
|
||||
return errors.New(`please specify a file to dump credentials to, with -f ("-f -" for standard output)`)
|
||||
}
|
||||
|
||||
// create a password if it's not specified by user
|
||||
if machinePassword == "" && !interactive {
|
||||
if !autoAdd {
|
||||
return errors.New("please specify a password with --password or use --auto")
|
||||
}
|
||||
|
||||
machinePassword = generatePassword(passwordLength)
|
||||
} else if machinePassword == "" && interactive {
|
||||
qs := &survey.Password{
|
||||
Message: "Please provide a password for the machine:",
|
||||
}
|
||||
survey.AskOne(qs, &machinePassword)
|
||||
}
|
||||
|
||||
password := strfmt.Password(machinePassword)
|
||||
|
||||
_, err = cli.db.CreateMachine(&machineID, &password, "", true, force, types.PasswordAuthType)
|
||||
if err != nil {
|
||||
return fmt.Errorf("unable to create machine: %w", err)
|
||||
}
|
||||
|
||||
fmt.Fprintf(os.Stderr, "Machine '%s' successfully added to the local API.\n", machineID)
|
||||
|
||||
if apiURL == "" {
|
||||
if clientCfg != nil && clientCfg.Credentials != nil && clientCfg.Credentials.URL != "" {
|
||||
apiURL = clientCfg.Credentials.URL
|
||||
} else if serverCfg.ClientURL() != "" {
|
||||
apiURL = serverCfg.ClientURL()
|
||||
} else {
|
||||
return errors.New("unable to dump an api URL. Please provide it in your configuration or with the -u parameter")
|
||||
}
|
||||
}
|
||||
|
||||
apiCfg := csconfig.ApiCredentialsCfg{
|
||||
Login: machineID,
|
||||
Password: password.String(),
|
||||
URL: apiURL,
|
||||
}
|
||||
|
||||
apiConfigDump, err := yaml.Marshal(apiCfg)
|
||||
if err != nil {
|
||||
return fmt.Errorf("unable to marshal api credentials: %w", err)
|
||||
}
|
||||
|
||||
if dumpFile != "" && dumpFile != "-" {
|
||||
if err = os.WriteFile(dumpFile, apiConfigDump, 0o600); err != nil {
|
||||
return fmt.Errorf("write api credentials in '%s' failed: %w", dumpFile, err)
|
||||
}
|
||||
|
||||
fmt.Fprintf(os.Stderr, "API credentials written to '%s'.\n", dumpFile)
|
||||
} else {
|
||||
fmt.Print(string(apiConfigDump))
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (cli *cliMachines) deleteValid(_ *cobra.Command, args []string, toComplete string) ([]string, cobra.ShellCompDirective) {
|
||||
machines, err := cli.db.ListMachines()
|
||||
if err != nil {
|
||||
cobra.CompError("unable to list machines " + err.Error())
|
||||
}
|
||||
|
||||
ret := []string{}
|
||||
|
||||
for _, machine := range machines {
|
||||
if strings.Contains(machine.MachineId, toComplete) && !slices.Contains(args, machine.MachineId) {
|
||||
ret = append(ret, machine.MachineId)
|
||||
}
|
||||
}
|
||||
|
||||
return ret, cobra.ShellCompDirectiveNoFileComp
|
||||
}
|
||||
|
||||
func (cli *cliMachines) delete(machines []string) error {
|
||||
for _, machineID := range machines {
|
||||
if err := cli.db.DeleteWatcher(machineID); err != nil {
|
||||
log.Errorf("unable to delete machine '%s': %s", machineID, err)
|
||||
return nil
|
||||
}
|
||||
|
||||
log.Infof("machine '%s' deleted successfully", machineID)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (cli *cliMachines) newDeleteCmd() *cobra.Command {
|
||||
cmd := &cobra.Command{
|
||||
Use: "delete [machine_name]...",
|
||||
Short: "delete machine(s) by name",
|
||||
Example: `cscli machines delete "machine1" "machine2"`,
|
||||
Args: cobra.MinimumNArgs(1),
|
||||
Aliases: []string{"remove"},
|
||||
DisableAutoGenTag: true,
|
||||
PreRun: func(cmd *cobra.Command, args []string) {
|
||||
var err error
|
||||
dbClient, err = database.NewClient(csConfig.DbConfig)
|
||||
if err != nil {
|
||||
log.Fatalf("unable to create new database client: %s", err)
|
||||
}
|
||||
},
|
||||
Run: func(cmd *cobra.Command, args []string) {
|
||||
machineID = args[0]
|
||||
for _, machineID := range args {
|
||||
err := dbClient.DeleteWatcher(machineID)
|
||||
if err != nil {
|
||||
log.Errorf("unable to delete machine: %s", err)
|
||||
return
|
||||
}
|
||||
log.Infof("machine '%s' deleted successfully", machineID)
|
||||
}
|
||||
ValidArgsFunction: cli.deleteValid,
|
||||
RunE: func(_ *cobra.Command, args []string) error {
|
||||
return cli.delete(args)
|
||||
},
|
||||
}
|
||||
cmdMachinesDelete.Flags().StringVarP(&machineID, "machine", "m", "", "machine to delete")
|
||||
cmdMachines.AddCommand(cmdMachinesDelete)
|
||||
|
||||
var cmdMachinesValidate = &cobra.Command{
|
||||
return cmd
|
||||
}
|
||||
|
||||
func (cli *cliMachines) prune(duration time.Duration, notValidOnly bool, force bool) error {
|
||||
if duration < 2*time.Minute && !notValidOnly {
|
||||
if yes, err := askYesNo(
|
||||
"The duration you provided is less than 2 minutes. " +
|
||||
"This can break installations if the machines are only temporarily disconnected. Continue?", false); err != nil {
|
||||
return err
|
||||
} else if !yes {
|
||||
fmt.Println("User aborted prune. No changes were made.")
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
machines := []*ent.Machine{}
|
||||
if pending, err := cli.db.QueryPendingMachine(); err == nil {
|
||||
machines = append(machines, pending...)
|
||||
}
|
||||
|
||||
if !notValidOnly {
|
||||
if pending, err := cli.db.QueryLastValidatedHeartbeatLT(time.Now().UTC().Add(-duration)); err == nil {
|
||||
machines = append(machines, pending...)
|
||||
}
|
||||
}
|
||||
|
||||
if len(machines) == 0 {
|
||||
fmt.Println("No machines to prune.")
|
||||
return nil
|
||||
}
|
||||
|
||||
getAgentsTable(color.Output, machines)
|
||||
|
||||
if !force {
|
||||
if yes, err := askYesNo(
|
||||
"You are about to PERMANENTLY remove the above machines from the database. " +
|
||||
"These will NOT be recoverable. Continue?", false); err != nil {
|
||||
return err
|
||||
} else if !yes {
|
||||
fmt.Println("User aborted prune. No changes were made.")
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
deleted, err := cli.db.BulkDeleteWatchers(machines)
|
||||
if err != nil {
|
||||
return fmt.Errorf("unable to prune machines: %w", err)
|
||||
}
|
||||
|
||||
fmt.Fprintf(os.Stderr, "successfully delete %d machines\n", deleted)
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (cli *cliMachines) newPruneCmd() *cobra.Command {
|
||||
var (
|
||||
duration time.Duration
|
||||
notValidOnly bool
|
||||
force bool
|
||||
)
|
||||
|
||||
const defaultDuration = 10 * time.Minute
|
||||
|
||||
cmd := &cobra.Command{
|
||||
Use: "prune",
|
||||
Short: "prune multiple machines from the database",
|
||||
Long: `prune multiple machines that are not validated or have not connected to the local API in a given duration.`,
|
||||
Example: `cscli machines prune
|
||||
cscli machines prune --duration 1h
|
||||
cscli machines prune --not-validated-only --force`,
|
||||
Args: cobra.NoArgs,
|
||||
DisableAutoGenTag: true,
|
||||
RunE: func(_ *cobra.Command, _ []string) error {
|
||||
return cli.prune(duration, notValidOnly, force)
|
||||
},
|
||||
}
|
||||
|
||||
flags := cmd.Flags()
|
||||
flags.DurationVarP(&duration, "duration", "d", defaultDuration, "duration of time since validated machine last heartbeat")
|
||||
flags.BoolVar(¬ValidOnly, "not-validated-only", false, "only prune machines that are not validated")
|
||||
flags.BoolVar(&force, "force", false, "force prune without asking for confirmation")
|
||||
|
||||
return cmd
|
||||
}
|
||||
|
||||
func (cli *cliMachines) validate(machineID string) error {
|
||||
if err := cli.db.ValidateMachine(machineID); err != nil {
|
||||
return fmt.Errorf("unable to validate machine '%s': %w", machineID, err)
|
||||
}
|
||||
|
||||
log.Infof("machine '%s' validated successfully", machineID)
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (cli *cliMachines) newValidateCmd() *cobra.Command {
|
||||
cmd := &cobra.Command{
|
||||
Use: "validate",
|
||||
Short: "validate a machine to access the local API",
|
||||
Long: `validate a machine to access the local API.`,
|
||||
Example: `cscli machines validate "machine_name"`,
|
||||
Args: cobra.ExactArgs(1),
|
||||
DisableAutoGenTag: true,
|
||||
PreRun: func(cmd *cobra.Command, args []string) {
|
||||
var err error
|
||||
dbClient, err = database.NewClient(csConfig.DbConfig)
|
||||
if err != nil {
|
||||
log.Fatalf("unable to create new database client: %s", err)
|
||||
}
|
||||
},
|
||||
Run: func(cmd *cobra.Command, args []string) {
|
||||
machineID = args[0]
|
||||
if err := dbClient.ValidateMachine(machineID); err != nil {
|
||||
log.Fatalf("unable to validate machine '%s': %s", machineID, err)
|
||||
}
|
||||
log.Infof("machine '%s' validated successfully", machineID)
|
||||
RunE: func(_ *cobra.Command, args []string) error {
|
||||
return cli.validate(args[0])
|
||||
},
|
||||
}
|
||||
cmdMachines.AddCommand(cmdMachinesValidate)
|
||||
|
||||
return cmdMachines
|
||||
return cmd
|
||||
}
|
||||
|
|
34
cmd/crowdsec-cli/machines_table.go
Normal file
34
cmd/crowdsec-cli/machines_table.go
Normal file
|
@ -0,0 +1,34 @@
|
|||
package main
|
||||
|
||||
import (
|
||||
"io"
|
||||
"time"
|
||||
|
||||
"github.com/aquasecurity/table"
|
||||
|
||||
"github.com/crowdsecurity/crowdsec/pkg/database/ent"
|
||||
"github.com/crowdsecurity/crowdsec/pkg/emoji"
|
||||
)
|
||||
|
||||
func getAgentsTable(out io.Writer, machines []*ent.Machine) {
|
||||
t := newLightTable(out)
|
||||
t.SetHeaders("Name", "IP Address", "Last Update", "Status", "Version", "Auth Type", "Last Heartbeat")
|
||||
t.SetHeaderAlignment(table.AlignLeft, table.AlignLeft, table.AlignLeft, table.AlignLeft, table.AlignLeft, table.AlignLeft, table.AlignLeft)
|
||||
t.SetAlignment(table.AlignLeft, table.AlignLeft, table.AlignLeft, table.AlignLeft, table.AlignLeft, table.AlignLeft, table.AlignLeft)
|
||||
|
||||
for _, m := range machines {
|
||||
validated := emoji.Prohibited
|
||||
if m.IsValidated {
|
||||
validated = emoji.CheckMark
|
||||
}
|
||||
|
||||
hb, active := getLastHeartbeat(m)
|
||||
if !active {
|
||||
hb = emoji.Warning + " " + hb
|
||||
}
|
||||
|
||||
t.AddRow(m.MachineId, m.IpAddress, m.UpdatedAt.Format(time.RFC3339), validated, m.Version, m.AuthType, hb)
|
||||
}
|
||||
|
||||
t.Render()
|
||||
}
|
|
@ -3,77 +3,135 @@ package main
|
|||
import (
|
||||
"fmt"
|
||||
"os"
|
||||
"path"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
|
||||
"github.com/crowdsecurity/crowdsec/pkg/csconfig"
|
||||
"github.com/crowdsecurity/crowdsec/pkg/cwhub"
|
||||
"github.com/crowdsecurity/crowdsec/pkg/cwversion"
|
||||
"github.com/crowdsecurity/crowdsec/pkg/database"
|
||||
"slices"
|
||||
"time"
|
||||
|
||||
"github.com/fatih/color"
|
||||
cc "github.com/ivanpirog/coloredcobra"
|
||||
log "github.com/sirupsen/logrus"
|
||||
"github.com/spf13/cobra"
|
||||
"github.com/spf13/cobra/doc"
|
||||
|
||||
"github.com/crowdsecurity/go-cs-lib/trace"
|
||||
|
||||
"github.com/crowdsecurity/crowdsec/pkg/csconfig"
|
||||
"github.com/crowdsecurity/crowdsec/pkg/database"
|
||||
"github.com/crowdsecurity/crowdsec/pkg/fflag"
|
||||
)
|
||||
|
||||
var trace_lvl, dbg_lvl, nfo_lvl, wrn_lvl, err_lvl bool
|
||||
var (
|
||||
ConfigFilePath string
|
||||
csConfig *csconfig.Config
|
||||
dbClient *database.Client
|
||||
)
|
||||
|
||||
var ConfigFilePath string
|
||||
var csConfig *csconfig.Config
|
||||
var dbClient *database.Client
|
||||
type configGetter func() *csconfig.Config
|
||||
|
||||
var OutputFormat string
|
||||
var mergedConfig string
|
||||
|
||||
var downloadOnly bool
|
||||
var forceAction bool
|
||||
var purge bool
|
||||
var all bool
|
||||
var restoreOldBackup bool
|
||||
type cliRoot struct {
|
||||
logTrace bool
|
||||
logDebug bool
|
||||
logInfo bool
|
||||
logWarn bool
|
||||
logErr bool
|
||||
outputColor string
|
||||
outputFormat string
|
||||
// flagBranch overrides the value in csConfig.Cscli.HubBranch
|
||||
flagBranch string
|
||||
}
|
||||
|
||||
var prometheusURL string
|
||||
func newCliRoot() *cliRoot {
|
||||
return &cliRoot{}
|
||||
}
|
||||
|
||||
func initConfig() {
|
||||
var err error
|
||||
if trace_lvl {
|
||||
log.SetLevel(log.TraceLevel)
|
||||
} else if dbg_lvl {
|
||||
log.SetLevel(log.DebugLevel)
|
||||
} else if nfo_lvl {
|
||||
log.SetLevel(log.InfoLevel)
|
||||
} else if wrn_lvl {
|
||||
log.SetLevel(log.WarnLevel)
|
||||
} else if err_lvl {
|
||||
log.SetLevel(log.ErrorLevel)
|
||||
// cfg() is a helper function to get the configuration loaded from config.yaml,
|
||||
// we pass it to subcommands because the file is not read until the Execute() call
|
||||
func (cli *cliRoot) cfg() *csconfig.Config {
|
||||
return csConfig
|
||||
}
|
||||
|
||||
// wantedLogLevel returns the log level requested in the command line flags.
|
||||
func (cli *cliRoot) wantedLogLevel() log.Level {
|
||||
switch {
|
||||
case cli.logTrace:
|
||||
return log.TraceLevel
|
||||
case cli.logDebug:
|
||||
return log.DebugLevel
|
||||
case cli.logInfo:
|
||||
return log.InfoLevel
|
||||
case cli.logWarn:
|
||||
return log.WarnLevel
|
||||
case cli.logErr:
|
||||
return log.ErrorLevel
|
||||
default:
|
||||
return log.InfoLevel
|
||||
}
|
||||
logFormatter := &log.TextFormatter{TimestampFormat: "02-01-2006 03:04:05 PM", FullTimestamp: true}
|
||||
log.SetFormatter(logFormatter)
|
||||
csConfig, err = csconfig.NewConfig(ConfigFilePath, false, false)
|
||||
if err != nil {
|
||||
log.Fatalf(err.Error())
|
||||
}
|
||||
log.Debugf("Using %s as configuration file", ConfigFilePath)
|
||||
if err := csConfig.LoadCSCLI(); err != nil {
|
||||
log.Fatalf(err.Error())
|
||||
}
|
||||
|
||||
// loadConfigFor loads the configuration file for the given sub-command.
|
||||
// If the sub-command does not need it, it returns a default configuration.
|
||||
func loadConfigFor(command string) (*csconfig.Config, string, error) {
|
||||
noNeedConfig := []string{
|
||||
"doc",
|
||||
"help",
|
||||
"completion",
|
||||
"version",
|
||||
"hubtest",
|
||||
}
|
||||
|
||||
if csConfig.Cscli == nil {
|
||||
log.Fatalf("missing 'cscli' configuration in '%s', exiting", ConfigFilePath)
|
||||
}
|
||||
if !slices.Contains(noNeedConfig, command) {
|
||||
log.Debugf("Using %s as configuration file", ConfigFilePath)
|
||||
|
||||
if cwhub.HubBranch == "" && csConfig.Cscli.HubBranch != "" {
|
||||
cwhub.HubBranch = csConfig.Cscli.HubBranch
|
||||
}
|
||||
if OutputFormat != "" {
|
||||
csConfig.Cscli.Output = OutputFormat
|
||||
if OutputFormat != "json" && OutputFormat != "raw" && OutputFormat != "human" {
|
||||
log.Fatalf("output format %s unknown", OutputFormat)
|
||||
config, merged, err := csconfig.NewConfig(ConfigFilePath, false, false, true)
|
||||
if err != nil {
|
||||
return nil, "", err
|
||||
}
|
||||
|
||||
// set up directory for trace files
|
||||
if err := trace.Init(filepath.Join(config.ConfigPaths.DataDir, "trace")); err != nil {
|
||||
return nil, "", fmt.Errorf("while setting up trace directory: %w", err)
|
||||
}
|
||||
|
||||
return config, merged, nil
|
||||
}
|
||||
|
||||
return csconfig.NewDefaultConfig(), "", nil
|
||||
}
|
||||
|
||||
// initialize is called before the subcommand is executed.
|
||||
func (cli *cliRoot) initialize() {
|
||||
var err error
|
||||
|
||||
log.SetLevel(cli.wantedLogLevel())
|
||||
|
||||
csConfig, mergedConfig, err = loadConfigFor(os.Args[1])
|
||||
if err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
|
||||
// recap of the enabled feature flags, because logging
|
||||
// was not enabled when we set them from envvars
|
||||
if fflist := csconfig.ListFeatureFlags(); fflist != "" {
|
||||
log.Debugf("Enabled feature flags: %s", fflist)
|
||||
}
|
||||
|
||||
if cli.flagBranch != "" {
|
||||
csConfig.Cscli.HubBranch = cli.flagBranch
|
||||
}
|
||||
|
||||
if cli.outputFormat != "" {
|
||||
csConfig.Cscli.Output = cli.outputFormat
|
||||
}
|
||||
|
||||
if csConfig.Cscli.Output == "" {
|
||||
csConfig.Cscli.Output = "human"
|
||||
}
|
||||
|
||||
if csConfig.Cscli.Output != "human" && csConfig.Cscli.Output != "json" && csConfig.Cscli.Output != "raw" {
|
||||
log.Fatalf("output format '%s' not supported: must be one of human, json, raw", csConfig.Cscli.Output)
|
||||
}
|
||||
|
||||
if csConfig.Cscli.Output == "json" {
|
||||
log.SetFormatter(&log.JSONFormatter{})
|
||||
log.SetLevel(log.ErrorLevel)
|
||||
|
@ -81,107 +139,145 @@ func initConfig() {
|
|||
log.SetLevel(log.ErrorLevel)
|
||||
}
|
||||
|
||||
if cli.outputColor != "" {
|
||||
csConfig.Cscli.Color = cli.outputColor
|
||||
|
||||
if cli.outputColor != "yes" && cli.outputColor != "no" && cli.outputColor != "auto" {
|
||||
log.Fatalf("output color %s unknown", cli.outputColor)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// list of valid subcommands for the shell completion
|
||||
var validArgs = []string{
|
||||
"scenarios", "parsers", "collections", "capi", "lapi", "postoverflows", "machines",
|
||||
"metrics", "bouncers", "alerts", "decisions", "simulation", "hub", "dashboard",
|
||||
"config", "completion", "version", "console",
|
||||
"alerts", "appsec-configs", "appsec-rules", "bouncers", "capi", "collections",
|
||||
"completion", "config", "console", "contexts", "dashboard", "decisions", "explain",
|
||||
"hub", "hubtest", "lapi", "machines", "metrics", "notifications", "parsers",
|
||||
"postoverflows", "scenarios", "simulation", "support", "version",
|
||||
}
|
||||
|
||||
func prepender(filename string) string {
|
||||
const header = `---
|
||||
id: %s
|
||||
title: %s
|
||||
---
|
||||
`
|
||||
name := filepath.Base(filename)
|
||||
base := strings.TrimSuffix(name, path.Ext(name))
|
||||
return fmt.Sprintf(header, base, strings.Replace(base, "_", " ", -1))
|
||||
func (cli *cliRoot) colorize(cmd *cobra.Command) {
|
||||
cc.Init(&cc.Config{
|
||||
RootCmd: cmd,
|
||||
Headings: cc.Yellow,
|
||||
Commands: cc.Green + cc.Bold,
|
||||
CmdShortDescr: cc.Cyan,
|
||||
Example: cc.Italic,
|
||||
ExecName: cc.Bold,
|
||||
Aliases: cc.Bold + cc.Italic,
|
||||
FlagsDataType: cc.White,
|
||||
Flags: cc.Green,
|
||||
FlagsDescr: cc.Cyan,
|
||||
NoExtraNewlines: true,
|
||||
NoBottomNewline: true,
|
||||
})
|
||||
cmd.SetOut(color.Output)
|
||||
}
|
||||
|
||||
func linkHandler(name string) string {
|
||||
return fmt.Sprintf("/cscli/%s", name)
|
||||
}
|
||||
func (cli *cliRoot) NewCommand() *cobra.Command {
|
||||
// set the formatter asap and worry about level later
|
||||
logFormatter := &log.TextFormatter{TimestampFormat: time.RFC3339, FullTimestamp: true}
|
||||
log.SetFormatter(logFormatter)
|
||||
|
||||
func main() {
|
||||
if err := fflag.RegisterAllFeatures(); err != nil {
|
||||
log.Fatalf("failed to register features: %s", err)
|
||||
}
|
||||
|
||||
var rootCmd = &cobra.Command{
|
||||
if err := csconfig.LoadFeatureFlagsEnv(log.StandardLogger()); err != nil {
|
||||
log.Fatalf("failed to set feature flags from env: %s", err)
|
||||
}
|
||||
|
||||
cmd := &cobra.Command{
|
||||
Use: "cscli",
|
||||
Short: "cscli allows you to manage crowdsec",
|
||||
Long: `cscli is the main command to interact with your crowdsec service, scenarios & db.
|
||||
It is meant to allow you to manage bans, parsers/scenarios/etc, api and generally manage you crowdsec setup.`,
|
||||
ValidArgs: validArgs,
|
||||
DisableAutoGenTag: true,
|
||||
SilenceErrors: true,
|
||||
SilenceUsage: true,
|
||||
/*TBD examples*/
|
||||
}
|
||||
var cmdDocGen = &cobra.Command{
|
||||
Use: "doc",
|
||||
Short: "Generate the documentation in `./doc/`. Directory must exist.",
|
||||
Args: cobra.ExactArgs(0),
|
||||
Hidden: true,
|
||||
DisableAutoGenTag: true,
|
||||
Run: func(cmd *cobra.Command, args []string) {
|
||||
if err := doc.GenMarkdownTreeCustom(rootCmd, "./doc/", prepender, linkHandler); err != nil {
|
||||
log.Fatalf("Failed to generate cobra doc: %s", err.Error())
|
||||
}
|
||||
},
|
||||
}
|
||||
rootCmd.AddCommand(cmdDocGen)
|
||||
/*usage*/
|
||||
var cmdVersion = &cobra.Command{
|
||||
Use: "version",
|
||||
Short: "Display version and exit.",
|
||||
Args: cobra.ExactArgs(0),
|
||||
DisableAutoGenTag: true,
|
||||
Run: func(cmd *cobra.Command, args []string) {
|
||||
cwversion.Show()
|
||||
},
|
||||
}
|
||||
rootCmd.AddCommand(cmdVersion)
|
||||
|
||||
rootCmd.PersistentFlags().StringVarP(&ConfigFilePath, "config", "c", csconfig.DefaultConfigPath("config.yaml"), "path to crowdsec config file")
|
||||
rootCmd.PersistentFlags().StringVarP(&OutputFormat, "output", "o", "", "Output format : human, json, raw.")
|
||||
rootCmd.PersistentFlags().BoolVar(&dbg_lvl, "debug", false, "Set logging to debug.")
|
||||
rootCmd.PersistentFlags().BoolVar(&nfo_lvl, "info", false, "Set logging to info.")
|
||||
rootCmd.PersistentFlags().BoolVar(&wrn_lvl, "warning", false, "Set logging to warning.")
|
||||
rootCmd.PersistentFlags().BoolVar(&err_lvl, "error", false, "Set logging to error.")
|
||||
rootCmd.PersistentFlags().BoolVar(&trace_lvl, "trace", false, "Set logging to trace.")
|
||||
|
||||
rootCmd.PersistentFlags().StringVar(&cwhub.HubBranch, "branch", "", "Override hub branch on github")
|
||||
if err := rootCmd.PersistentFlags().MarkHidden("branch"); err != nil {
|
||||
log.Fatalf("failed to make branch hidden : %s", err)
|
||||
}
|
||||
|
||||
if len(os.Args) > 1 && os.Args[1] != "completion" && os.Args[1] != "version" && os.Args[1] != "help" {
|
||||
cobra.OnInitialize(initConfig)
|
||||
}
|
||||
cli.colorize(cmd)
|
||||
|
||||
/*don't sort flags so we can enforce order*/
|
||||
rootCmd.Flags().SortFlags = false
|
||||
rootCmd.PersistentFlags().SortFlags = false
|
||||
cmd.Flags().SortFlags = false
|
||||
|
||||
rootCmd.AddCommand(NewConfigCmd())
|
||||
rootCmd.AddCommand(NewHubCmd())
|
||||
rootCmd.AddCommand(NewMetricsCmd())
|
||||
rootCmd.AddCommand(NewDashboardCmd())
|
||||
rootCmd.AddCommand(NewDecisionsCmd())
|
||||
rootCmd.AddCommand(NewAlertsCmd())
|
||||
// rootCmd.AddCommand(NewInspectCmd())
|
||||
rootCmd.AddCommand(NewSimulationCmds())
|
||||
rootCmd.AddCommand(NewBouncersCmd())
|
||||
rootCmd.AddCommand(NewMachinesCmd())
|
||||
rootCmd.AddCommand(NewParsersCmd())
|
||||
rootCmd.AddCommand(NewScenariosCmd())
|
||||
rootCmd.AddCommand(NewCollectionsCmd())
|
||||
rootCmd.AddCommand(NewPostOverflowsCmd())
|
||||
rootCmd.AddCommand(NewCapiCmd())
|
||||
rootCmd.AddCommand(NewLapiCmd())
|
||||
rootCmd.AddCommand(NewCompletionCmd())
|
||||
rootCmd.AddCommand(NewConsoleCmd())
|
||||
rootCmd.AddCommand(NewExplainCmd())
|
||||
rootCmd.AddCommand(NewHubTestCmd())
|
||||
if err := rootCmd.Execute(); err != nil {
|
||||
log.Fatalf("While executing root command : %s", err)
|
||||
pflags := cmd.PersistentFlags()
|
||||
pflags.SortFlags = false
|
||||
|
||||
pflags.StringVarP(&ConfigFilePath, "config", "c", csconfig.DefaultConfigPath("config.yaml"), "path to crowdsec config file")
|
||||
pflags.StringVarP(&cli.outputFormat, "output", "o", "", "Output format: human, json, raw")
|
||||
pflags.StringVarP(&cli.outputColor, "color", "", "auto", "Output color: yes, no, auto")
|
||||
pflags.BoolVar(&cli.logDebug, "debug", false, "Set logging to debug")
|
||||
pflags.BoolVar(&cli.logInfo, "info", false, "Set logging to info")
|
||||
pflags.BoolVar(&cli.logWarn, "warning", false, "Set logging to warning")
|
||||
pflags.BoolVar(&cli.logErr, "error", false, "Set logging to error")
|
||||
pflags.BoolVar(&cli.logTrace, "trace", false, "Set logging to trace")
|
||||
pflags.StringVar(&cli.flagBranch, "branch", "", "Override hub branch on github")
|
||||
|
||||
if err := pflags.MarkHidden("branch"); err != nil {
|
||||
log.Fatalf("failed to hide flag: %s", err)
|
||||
}
|
||||
|
||||
// Look for "-c /path/to/config.yaml"
|
||||
// This duplicates the logic in cobra, but we need to do it before
|
||||
// because feature flags can change which subcommands are available.
|
||||
for i, arg := range os.Args {
|
||||
if arg == "-c" || arg == "--config" {
|
||||
if len(os.Args) > i+1 {
|
||||
ConfigFilePath = os.Args[i+1]
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if err := csconfig.LoadFeatureFlagsFile(ConfigFilePath, log.StandardLogger()); err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
|
||||
if len(os.Args) > 1 {
|
||||
cobra.OnInitialize(cli.initialize)
|
||||
}
|
||||
|
||||
cmd.AddCommand(NewCLIDoc().NewCommand(cmd))
|
||||
cmd.AddCommand(NewCLIVersion().NewCommand())
|
||||
cmd.AddCommand(NewCLIConfig(cli.cfg).NewCommand())
|
||||
cmd.AddCommand(NewCLIHub(cli.cfg).NewCommand())
|
||||
cmd.AddCommand(NewCLIMetrics(cli.cfg).NewCommand())
|
||||
cmd.AddCommand(NewCLIDashboard(cli.cfg).NewCommand())
|
||||
cmd.AddCommand(NewCLIDecisions(cli.cfg).NewCommand())
|
||||
cmd.AddCommand(NewCLIAlerts(cli.cfg).NewCommand())
|
||||
cmd.AddCommand(NewCLISimulation(cli.cfg).NewCommand())
|
||||
cmd.AddCommand(NewCLIBouncers(cli.cfg).NewCommand())
|
||||
cmd.AddCommand(NewCLIMachines(cli.cfg).NewCommand())
|
||||
cmd.AddCommand(NewCLICapi(cli.cfg).NewCommand())
|
||||
cmd.AddCommand(NewCLILapi(cli.cfg).NewCommand())
|
||||
cmd.AddCommand(NewCompletionCmd())
|
||||
cmd.AddCommand(NewCLIConsole(cli.cfg).NewCommand())
|
||||
cmd.AddCommand(NewCLIExplain(cli.cfg).NewCommand())
|
||||
cmd.AddCommand(NewCLIHubTest(cli.cfg).NewCommand())
|
||||
cmd.AddCommand(NewCLINotifications(cli.cfg).NewCommand())
|
||||
cmd.AddCommand(NewCLISupport().NewCommand())
|
||||
cmd.AddCommand(NewCLIPapi(cli.cfg).NewCommand())
|
||||
cmd.AddCommand(NewCLICollection(cli.cfg).NewCommand())
|
||||
cmd.AddCommand(NewCLIParser(cli.cfg).NewCommand())
|
||||
cmd.AddCommand(NewCLIScenario(cli.cfg).NewCommand())
|
||||
cmd.AddCommand(NewCLIPostOverflow(cli.cfg).NewCommand())
|
||||
cmd.AddCommand(NewCLIContext(cli.cfg).NewCommand())
|
||||
cmd.AddCommand(NewCLIAppsecConfig(cli.cfg).NewCommand())
|
||||
cmd.AddCommand(NewCLIAppsecRule(cli.cfg).NewCommand())
|
||||
|
||||
if fflag.CscliSetup.IsEnabled() {
|
||||
cmd.AddCommand(NewSetupCmd())
|
||||
}
|
||||
|
||||
return cmd
|
||||
}
|
||||
|
||||
func main() {
|
||||
cmd := newCliRoot().NewCommand()
|
||||
if err := cmd.Execute(); err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,13 +0,0 @@
|
|||
//go:build testrunmain
|
||||
|
||||
package main
|
||||
|
||||
import (
|
||||
"github.com/confluentinc/bincover"
|
||||
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestBincoverRunMain(t *testing.T) {
|
||||
bincover.RunTest(main)
|
||||
}
|
|
@ -5,21 +5,19 @@ import (
|
|||
"runtime"
|
||||
)
|
||||
|
||||
const (
|
||||
ReloadMessageFormat = `Run '%s' for the new configuration to be effective.`
|
||||
ReloadCmdLinux = `sudo systemctl reload crowdsec`
|
||||
ReloadCmdFreebsd = `sudo service crowdsec reload`
|
||||
)
|
||||
|
||||
// ReloadMessage returns a description of the task required to reload
|
||||
// the crowdsec configuration, according to the operating system.
|
||||
func ReloadMessage() string {
|
||||
var msg string
|
||||
|
||||
var reloadCmd string
|
||||
|
||||
if runtime.GOOS == "freebsd" {
|
||||
reloadCmd = ReloadCmdFreebsd
|
||||
} else {
|
||||
reloadCmd = ReloadCmdLinux
|
||||
switch runtime.GOOS {
|
||||
case "windows":
|
||||
msg = "Please restart the crowdsec service"
|
||||
case "freebsd":
|
||||
msg = `Run 'sudo service crowdsec reload'`
|
||||
default:
|
||||
msg = `Run 'sudo systemctl reload crowdsec'`
|
||||
}
|
||||
|
||||
return fmt.Sprintf(ReloadMessageFormat, reloadCmd)
|
||||
return fmt.Sprintf("%s for the new configuration to be effective.", msg)
|
||||
}
|
||||
|
|
|
@ -2,95 +2,80 @@ package main
|
|||
|
||||
import (
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"fmt"
|
||||
"io"
|
||||
"net/http"
|
||||
"os"
|
||||
"sort"
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/crowdsecurity/crowdsec/pkg/types"
|
||||
log "github.com/sirupsen/logrus"
|
||||
"gopkg.in/yaml.v2"
|
||||
|
||||
"github.com/olekukonko/tablewriter"
|
||||
"github.com/fatih/color"
|
||||
dto "github.com/prometheus/client_model/go"
|
||||
"github.com/prometheus/prom2json"
|
||||
log "github.com/sirupsen/logrus"
|
||||
"github.com/spf13/cobra"
|
||||
"gopkg.in/yaml.v3"
|
||||
|
||||
"github.com/crowdsecurity/go-cs-lib/maptools"
|
||||
"github.com/crowdsecurity/go-cs-lib/trace"
|
||||
)
|
||||
|
||||
func lapiMetricsToTable(table *tablewriter.Table, stats map[string]map[string]map[string]int) error {
|
||||
|
||||
//stats : machine -> route -> method -> count
|
||||
/*we want consistent display order*/
|
||||
machineKeys := []string{}
|
||||
for k := range stats {
|
||||
machineKeys = append(machineKeys, k)
|
||||
type (
|
||||
statAcquis map[string]map[string]int
|
||||
statParser map[string]map[string]int
|
||||
statBucket map[string]map[string]int
|
||||
statWhitelist map[string]map[string]map[string]int
|
||||
statLapi map[string]map[string]int
|
||||
statLapiMachine map[string]map[string]map[string]int
|
||||
statLapiBouncer map[string]map[string]map[string]int
|
||||
statLapiDecision map[string]struct {
|
||||
NonEmpty int
|
||||
Empty int
|
||||
}
|
||||
sort.Strings(machineKeys)
|
||||
|
||||
for _, machine := range machineKeys {
|
||||
//oneRow : route -> method -> count
|
||||
machineRow := stats[machine]
|
||||
for routeName, route := range machineRow {
|
||||
for methodName, count := range route {
|
||||
row := []string{}
|
||||
row = append(row, machine)
|
||||
row = append(row, routeName)
|
||||
row = append(row, methodName)
|
||||
if count != 0 {
|
||||
row = append(row, fmt.Sprintf("%d", count))
|
||||
} else {
|
||||
row = append(row, "-")
|
||||
}
|
||||
table.Append(row)
|
||||
}
|
||||
}
|
||||
statDecision map[string]map[string]map[string]int
|
||||
statAppsecEngine map[string]map[string]int
|
||||
statAppsecRule map[string]map[string]map[string]int
|
||||
statAlert map[string]int
|
||||
statStash map[string]struct {
|
||||
Type string
|
||||
Count int
|
||||
}
|
||||
return nil
|
||||
)
|
||||
|
||||
var (
|
||||
ErrMissingConfig = errors.New("prometheus section missing, can't show metrics")
|
||||
ErrMetricsDisabled = errors.New("prometheus is not enabled, can't show metrics")
|
||||
)
|
||||
|
||||
type metricSection interface {
|
||||
Table(out io.Writer, noUnit bool, showEmpty bool)
|
||||
Description() (string, string)
|
||||
}
|
||||
|
||||
func metricsToTable(table *tablewriter.Table, stats map[string]map[string]int, keys []string) error {
|
||||
type metricStore map[string]metricSection
|
||||
|
||||
var sortedKeys []string
|
||||
|
||||
if table == nil {
|
||||
return fmt.Errorf("nil table")
|
||||
func NewMetricStore() metricStore {
|
||||
return metricStore{
|
||||
"acquisition": statAcquis{},
|
||||
"scenarios": statBucket{},
|
||||
"parsers": statParser{},
|
||||
"lapi": statLapi{},
|
||||
"lapi-machine": statLapiMachine{},
|
||||
"lapi-bouncer": statLapiBouncer{},
|
||||
"lapi-decisions": statLapiDecision{},
|
||||
"decisions": statDecision{},
|
||||
"alerts": statAlert{},
|
||||
"stash": statStash{},
|
||||
"appsec-engine": statAppsecEngine{},
|
||||
"appsec-rule": statAppsecRule{},
|
||||
"whitelists": statWhitelist{},
|
||||
}
|
||||
//sort keys to keep consistent order when printing
|
||||
sortedKeys = []string{}
|
||||
for akey := range stats {
|
||||
sortedKeys = append(sortedKeys, akey)
|
||||
}
|
||||
sort.Strings(sortedKeys)
|
||||
//
|
||||
for _, alabel := range sortedKeys {
|
||||
astats, ok := stats[alabel]
|
||||
if !ok {
|
||||
continue
|
||||
}
|
||||
row := []string{}
|
||||
row = append(row, alabel) //name
|
||||
for _, sl := range keys {
|
||||
if v, ok := astats[sl]; ok && v != 0 {
|
||||
numberToShow := fmt.Sprintf("%d", v)
|
||||
if !noUnit {
|
||||
numberToShow = formatNumber(v)
|
||||
}
|
||||
row = append(row, numberToShow)
|
||||
} else {
|
||||
row = append(row, "-")
|
||||
}
|
||||
}
|
||||
table.Append(row)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
/*This is a complete rip from prom2json*/
|
||||
func ShowPrometheus(url string) {
|
||||
func (ms metricStore) Fetch(url string) error {
|
||||
mfChan := make(chan *dto.MetricFamily, 1024)
|
||||
errChan := make(chan error, 1)
|
||||
|
||||
// Start with the DefaultTransport for sane defaults.
|
||||
transport := http.DefaultTransport.(*http.Transport).Clone()
|
||||
|
@ -99,43 +84,62 @@ func ShowPrometheus(url string) {
|
|||
transport.DisableKeepAlives = true
|
||||
// Timeout early if the server doesn't even return the headers.
|
||||
transport.ResponseHeaderTimeout = time.Minute
|
||||
|
||||
go func() {
|
||||
defer types.CatchPanic("crowdsec/ShowPrometheus")
|
||||
defer trace.CatchPanic("crowdsec/ShowPrometheus")
|
||||
|
||||
err := prom2json.FetchMetricFamilies(url, mfChan, transport)
|
||||
if err != nil {
|
||||
log.Fatalf("failed to fetch prometheus metrics : %v", err)
|
||||
errChan <- fmt.Errorf("failed to fetch metrics: %w", err)
|
||||
return
|
||||
}
|
||||
errChan <- nil
|
||||
}()
|
||||
|
||||
result := []*prom2json.Family{}
|
||||
for mf := range mfChan {
|
||||
result = append(result, prom2json.NewFamily(mf))
|
||||
}
|
||||
log.Debugf("Finished reading prometheus output, %d entries", len(result))
|
||||
|
||||
if err := <-errChan; err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
log.Debugf("Finished reading metrics output, %d entries", len(result))
|
||||
/*walk*/
|
||||
lapi_decisions_stats := map[string]struct {
|
||||
NonEmpty int
|
||||
Empty int
|
||||
}{}
|
||||
acquis_stats := map[string]map[string]int{}
|
||||
parsers_stats := map[string]map[string]int{}
|
||||
buckets_stats := map[string]map[string]int{}
|
||||
lapi_stats := map[string]map[string]int{}
|
||||
lapi_machine_stats := map[string]map[string]map[string]int{}
|
||||
lapi_bouncer_stats := map[string]map[string]map[string]int{}
|
||||
|
||||
mAcquis := ms["acquisition"].(statAcquis)
|
||||
mParser := ms["parsers"].(statParser)
|
||||
mBucket := ms["scenarios"].(statBucket)
|
||||
mLapi := ms["lapi"].(statLapi)
|
||||
mLapiMachine := ms["lapi-machine"].(statLapiMachine)
|
||||
mLapiBouncer := ms["lapi-bouncer"].(statLapiBouncer)
|
||||
mLapiDecision := ms["lapi-decisions"].(statLapiDecision)
|
||||
mDecision := ms["decisions"].(statDecision)
|
||||
mAppsecEngine := ms["appsec-engine"].(statAppsecEngine)
|
||||
mAppsecRule := ms["appsec-rule"].(statAppsecRule)
|
||||
mAlert := ms["alerts"].(statAlert)
|
||||
mStash := ms["stash"].(statStash)
|
||||
mWhitelist := ms["whitelists"].(statWhitelist)
|
||||
|
||||
for idx, fam := range result {
|
||||
if !strings.HasPrefix(fam.Name, "cs_") {
|
||||
continue
|
||||
}
|
||||
|
||||
log.Tracef("round %d", idx)
|
||||
|
||||
for _, m := range fam.Metrics {
|
||||
metric := m.(prom2json.Metric)
|
||||
metric, ok := m.(prom2json.Metric)
|
||||
if !ok {
|
||||
log.Debugf("failed to convert metric to prom2json.Metric")
|
||||
continue
|
||||
}
|
||||
|
||||
name, ok := metric.Labels["name"]
|
||||
if !ok {
|
||||
log.Debugf("no name in Metric %v", metric.Labels)
|
||||
}
|
||||
|
||||
source, ok := metric.Labels["source"]
|
||||
if !ok {
|
||||
log.Debugf("no source in Metric %v for %s", metric.Labels, fam.Name)
|
||||
|
@ -152,272 +156,342 @@ func ShowPrometheus(url string) {
|
|||
route := metric.Labels["route"]
|
||||
method := metric.Labels["method"]
|
||||
|
||||
reason := metric.Labels["reason"]
|
||||
origin := metric.Labels["origin"]
|
||||
action := metric.Labels["action"]
|
||||
|
||||
appsecEngine := metric.Labels["appsec_engine"]
|
||||
appsecRule := metric.Labels["rule_name"]
|
||||
|
||||
mtype := metric.Labels["type"]
|
||||
|
||||
fval, err := strconv.ParseFloat(value, 32)
|
||||
if err != nil {
|
||||
log.Errorf("Unexpected int value %s : %s", value, err)
|
||||
}
|
||||
|
||||
ival := int(fval)
|
||||
|
||||
switch fam.Name {
|
||||
/*buckets*/
|
||||
//
|
||||
// buckets
|
||||
//
|
||||
case "cs_bucket_created_total":
|
||||
if _, ok := buckets_stats[name]; !ok {
|
||||
buckets_stats[name] = make(map[string]int)
|
||||
}
|
||||
buckets_stats[name]["instanciation"] += ival
|
||||
mBucket.Process(name, "instantiation", ival)
|
||||
case "cs_buckets":
|
||||
if _, ok := buckets_stats[name]; !ok {
|
||||
buckets_stats[name] = make(map[string]int)
|
||||
}
|
||||
buckets_stats[name]["curr_count"] += ival
|
||||
mBucket.Process(name, "curr_count", ival)
|
||||
case "cs_bucket_overflowed_total":
|
||||
if _, ok := buckets_stats[name]; !ok {
|
||||
buckets_stats[name] = make(map[string]int)
|
||||
}
|
||||
buckets_stats[name]["overflow"] += ival
|
||||
mBucket.Process(name, "overflow", ival)
|
||||
case "cs_bucket_poured_total":
|
||||
if _, ok := buckets_stats[name]; !ok {
|
||||
buckets_stats[name] = make(map[string]int)
|
||||
}
|
||||
if _, ok := acquis_stats[source]; !ok {
|
||||
acquis_stats[source] = make(map[string]int)
|
||||
}
|
||||
buckets_stats[name]["pour"] += ival
|
||||
acquis_stats[source]["pour"] += ival
|
||||
mBucket.Process(name, "pour", ival)
|
||||
mAcquis.Process(source, "pour", ival)
|
||||
case "cs_bucket_underflowed_total":
|
||||
if _, ok := buckets_stats[name]; !ok {
|
||||
buckets_stats[name] = make(map[string]int)
|
||||
}
|
||||
buckets_stats[name]["underflow"] += ival
|
||||
/*acquis*/
|
||||
mBucket.Process(name, "underflow", ival)
|
||||
//
|
||||
// parsers
|
||||
//
|
||||
case "cs_parser_hits_total":
|
||||
if _, ok := acquis_stats[source]; !ok {
|
||||
acquis_stats[source] = make(map[string]int)
|
||||
}
|
||||
acquis_stats[source]["reads"] += ival
|
||||
mAcquis.Process(source, "reads", ival)
|
||||
case "cs_parser_hits_ok_total":
|
||||
if _, ok := acquis_stats[source]; !ok {
|
||||
acquis_stats[source] = make(map[string]int)
|
||||
}
|
||||
acquis_stats[source]["parsed"] += ival
|
||||
mAcquis.Process(source, "parsed", ival)
|
||||
case "cs_parser_hits_ko_total":
|
||||
if _, ok := acquis_stats[source]; !ok {
|
||||
acquis_stats[source] = make(map[string]int)
|
||||
}
|
||||
acquis_stats[source]["unparsed"] += ival
|
||||
mAcquis.Process(source, "unparsed", ival)
|
||||
case "cs_node_hits_total":
|
||||
if _, ok := parsers_stats[name]; !ok {
|
||||
parsers_stats[name] = make(map[string]int)
|
||||
}
|
||||
parsers_stats[name]["hits"] += ival
|
||||
mParser.Process(name, "hits", ival)
|
||||
case "cs_node_hits_ok_total":
|
||||
if _, ok := parsers_stats[name]; !ok {
|
||||
parsers_stats[name] = make(map[string]int)
|
||||
}
|
||||
parsers_stats[name]["parsed"] += ival
|
||||
mParser.Process(name, "parsed", ival)
|
||||
case "cs_node_hits_ko_total":
|
||||
if _, ok := parsers_stats[name]; !ok {
|
||||
parsers_stats[name] = make(map[string]int)
|
||||
}
|
||||
parsers_stats[name]["unparsed"] += ival
|
||||
mParser.Process(name, "unparsed", ival)
|
||||
//
|
||||
// whitelists
|
||||
//
|
||||
case "cs_node_wl_hits_total":
|
||||
mWhitelist.Process(name, reason, "hits", ival)
|
||||
case "cs_node_wl_hits_ok_total":
|
||||
mWhitelist.Process(name, reason, "whitelisted", ival)
|
||||
// track as well whitelisted lines at acquis level
|
||||
mAcquis.Process(source, "whitelisted", ival)
|
||||
//
|
||||
// lapi
|
||||
//
|
||||
case "cs_lapi_route_requests_total":
|
||||
if _, ok := lapi_stats[route]; !ok {
|
||||
lapi_stats[route] = make(map[string]int)
|
||||
}
|
||||
lapi_stats[route][method] += ival
|
||||
mLapi.Process(route, method, ival)
|
||||
case "cs_lapi_machine_requests_total":
|
||||
if _, ok := lapi_machine_stats[machine]; !ok {
|
||||
lapi_machine_stats[machine] = make(map[string]map[string]int)
|
||||
}
|
||||
if _, ok := lapi_machine_stats[machine][route]; !ok {
|
||||
lapi_machine_stats[machine][route] = make(map[string]int)
|
||||
}
|
||||
lapi_machine_stats[machine][route][method] += ival
|
||||
mLapiMachine.Process(machine, route, method, ival)
|
||||
case "cs_lapi_bouncer_requests_total":
|
||||
if _, ok := lapi_bouncer_stats[bouncer]; !ok {
|
||||
lapi_bouncer_stats[bouncer] = make(map[string]map[string]int)
|
||||
}
|
||||
if _, ok := lapi_bouncer_stats[bouncer][route]; !ok {
|
||||
lapi_bouncer_stats[bouncer][route] = make(map[string]int)
|
||||
}
|
||||
lapi_bouncer_stats[bouncer][route][method] += ival
|
||||
mLapiBouncer.Process(bouncer, route, method, ival)
|
||||
case "cs_lapi_decisions_ko_total", "cs_lapi_decisions_ok_total":
|
||||
if _, ok := lapi_decisions_stats[bouncer]; !ok {
|
||||
lapi_decisions_stats[bouncer] = struct {
|
||||
NonEmpty int
|
||||
Empty int
|
||||
}{}
|
||||
}
|
||||
x := lapi_decisions_stats[bouncer]
|
||||
if fam.Name == "cs_lapi_decisions_ko_total" {
|
||||
x.Empty += ival
|
||||
} else if fam.Name == "cs_lapi_decisions_ok_total" {
|
||||
x.NonEmpty += ival
|
||||
}
|
||||
lapi_decisions_stats[bouncer] = x
|
||||
mLapiDecision.Process(bouncer, fam.Name, ival)
|
||||
//
|
||||
// decisions
|
||||
//
|
||||
case "cs_active_decisions":
|
||||
mDecision.Process(reason, origin, action, ival)
|
||||
case "cs_alerts":
|
||||
mAlert.Process(reason, ival)
|
||||
//
|
||||
// stash
|
||||
//
|
||||
case "cs_cache_size":
|
||||
mStash.Process(name, mtype, ival)
|
||||
//
|
||||
// appsec
|
||||
//
|
||||
case "cs_appsec_reqs_total":
|
||||
mAppsecEngine.Process(appsecEngine, "processed", ival)
|
||||
case "cs_appsec_block_total":
|
||||
mAppsecEngine.Process(appsecEngine, "blocked", ival)
|
||||
case "cs_appsec_rule_hits":
|
||||
mAppsecRule.Process(appsecEngine, appsecRule, "triggered", ival)
|
||||
default:
|
||||
log.Debugf("unknown: %+v", fam.Name)
|
||||
continue
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
if csConfig.Cscli.Output == "human" {
|
||||
|
||||
acquisTable := tablewriter.NewWriter(os.Stdout)
|
||||
acquisTable.SetHeader([]string{"Source", "Lines read", "Lines parsed", "Lines unparsed", "Lines poured to bucket"})
|
||||
keys := []string{"reads", "parsed", "unparsed", "pour"}
|
||||
if err := metricsToTable(acquisTable, acquis_stats, keys); err != nil {
|
||||
log.Warningf("while collecting acquis stats : %s", err)
|
||||
}
|
||||
bucketsTable := tablewriter.NewWriter(os.Stdout)
|
||||
bucketsTable.SetHeader([]string{"Bucket", "Current Count", "Overflows", "Instantiated", "Poured", "Expired"})
|
||||
keys = []string{"curr_count", "overflow", "instanciation", "pour", "underflow"}
|
||||
if err := metricsToTable(bucketsTable, buckets_stats, keys); err != nil {
|
||||
log.Warningf("while collecting acquis stats : %s", err)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
parsersTable := tablewriter.NewWriter(os.Stdout)
|
||||
parsersTable.SetHeader([]string{"Parsers", "Hits", "Parsed", "Unparsed"})
|
||||
keys = []string{"hits", "parsed", "unparsed"}
|
||||
if err := metricsToTable(parsersTable, parsers_stats, keys); err != nil {
|
||||
log.Warningf("while collecting acquis stats : %s", err)
|
||||
}
|
||||
type cliMetrics struct {
|
||||
cfg configGetter
|
||||
}
|
||||
|
||||
lapiMachinesTable := tablewriter.NewWriter(os.Stdout)
|
||||
lapiMachinesTable.SetHeader([]string{"Machine", "Route", "Method", "Hits"})
|
||||
if err := lapiMetricsToTable(lapiMachinesTable, lapi_machine_stats); err != nil {
|
||||
log.Warningf("while collecting machine lapi stats : %s", err)
|
||||
}
|
||||
|
||||
//lapiMetricsToTable
|
||||
lapiBouncersTable := tablewriter.NewWriter(os.Stdout)
|
||||
lapiBouncersTable.SetHeader([]string{"Bouncer", "Route", "Method", "Hits"})
|
||||
if err := lapiMetricsToTable(lapiBouncersTable, lapi_bouncer_stats); err != nil {
|
||||
log.Warningf("while collecting bouncer lapi stats : %s", err)
|
||||
}
|
||||
|
||||
lapiDecisionsTable := tablewriter.NewWriter(os.Stdout)
|
||||
lapiDecisionsTable.SetHeader([]string{"Bouncer", "Empty answers", "Non-empty answers"})
|
||||
for bouncer, hits := range lapi_decisions_stats {
|
||||
row := []string{}
|
||||
row = append(row, bouncer)
|
||||
row = append(row, fmt.Sprintf("%d", hits.Empty))
|
||||
row = append(row, fmt.Sprintf("%d", hits.NonEmpty))
|
||||
lapiDecisionsTable.Append(row)
|
||||
}
|
||||
|
||||
/*unfortunately, we can't reuse metricsToTable as the structure is too different :/*/
|
||||
lapiTable := tablewriter.NewWriter(os.Stdout)
|
||||
lapiTable.SetHeader([]string{"Route", "Method", "Hits"})
|
||||
sortedKeys := []string{}
|
||||
for akey := range lapi_stats {
|
||||
sortedKeys = append(sortedKeys, akey)
|
||||
}
|
||||
sort.Strings(sortedKeys)
|
||||
for _, alabel := range sortedKeys {
|
||||
astats := lapi_stats[alabel]
|
||||
subKeys := []string{}
|
||||
for skey := range astats {
|
||||
subKeys = append(subKeys, skey)
|
||||
}
|
||||
sort.Strings(subKeys)
|
||||
for _, sl := range subKeys {
|
||||
row := []string{}
|
||||
row = append(row, alabel)
|
||||
row = append(row, sl)
|
||||
row = append(row, fmt.Sprintf("%d", astats[sl]))
|
||||
lapiTable.Append(row)
|
||||
}
|
||||
}
|
||||
|
||||
if bucketsTable.NumLines() > 0 {
|
||||
log.Printf("Buckets Metrics:")
|
||||
bucketsTable.SetAlignment(tablewriter.ALIGN_LEFT)
|
||||
bucketsTable.Render()
|
||||
}
|
||||
if acquisTable.NumLines() > 0 {
|
||||
log.Printf("Acquisition Metrics:")
|
||||
acquisTable.SetAlignment(tablewriter.ALIGN_LEFT)
|
||||
acquisTable.Render()
|
||||
}
|
||||
if parsersTable.NumLines() > 0 {
|
||||
log.Printf("Parser Metrics:")
|
||||
parsersTable.SetAlignment(tablewriter.ALIGN_LEFT)
|
||||
parsersTable.Render()
|
||||
}
|
||||
if lapiTable.NumLines() > 0 {
|
||||
log.Printf("Local Api Metrics:")
|
||||
lapiTable.SetAlignment(tablewriter.ALIGN_LEFT)
|
||||
lapiTable.Render()
|
||||
}
|
||||
if lapiMachinesTable.NumLines() > 0 {
|
||||
log.Printf("Local Api Machines Metrics:")
|
||||
lapiMachinesTable.SetAlignment(tablewriter.ALIGN_LEFT)
|
||||
lapiMachinesTable.Render()
|
||||
}
|
||||
if lapiBouncersTable.NumLines() > 0 {
|
||||
log.Printf("Local Api Bouncers Metrics:")
|
||||
lapiBouncersTable.SetAlignment(tablewriter.ALIGN_LEFT)
|
||||
lapiBouncersTable.Render()
|
||||
}
|
||||
|
||||
if lapiDecisionsTable.NumLines() > 0 {
|
||||
log.Printf("Local Api Bouncers Decisions:")
|
||||
lapiDecisionsTable.SetAlignment(tablewriter.ALIGN_LEFT)
|
||||
lapiDecisionsTable.Render()
|
||||
}
|
||||
|
||||
} else if csConfig.Cscli.Output == "json" {
|
||||
for _, val := range []interface{}{acquis_stats, parsers_stats, buckets_stats, lapi_stats, lapi_bouncer_stats, lapi_machine_stats, lapi_decisions_stats} {
|
||||
x, err := json.MarshalIndent(val, "", " ")
|
||||
if err != nil {
|
||||
log.Fatalf("failed to unmarshal metrics : %v", err)
|
||||
}
|
||||
fmt.Printf("%s\n", string(x))
|
||||
}
|
||||
} else if csConfig.Cscli.Output == "raw" {
|
||||
for _, val := range []interface{}{acquis_stats, parsers_stats, buckets_stats, lapi_stats, lapi_bouncer_stats, lapi_machine_stats, lapi_decisions_stats} {
|
||||
x, err := yaml.Marshal(val)
|
||||
if err != nil {
|
||||
log.Fatalf("failed to unmarshal metrics : %v", err)
|
||||
}
|
||||
fmt.Printf("%s\n", string(x))
|
||||
}
|
||||
func NewCLIMetrics(cfg configGetter) *cliMetrics {
|
||||
return &cliMetrics{
|
||||
cfg: cfg,
|
||||
}
|
||||
}
|
||||
|
||||
var noUnit bool
|
||||
func (ms metricStore) Format(out io.Writer, sections []string, formatType string, noUnit bool) error {
|
||||
// copy only the sections we want
|
||||
want := map[string]metricSection{}
|
||||
|
||||
func NewMetricsCmd() *cobra.Command {
|
||||
/* ---- UPDATE COMMAND */
|
||||
var cmdMetrics = &cobra.Command{
|
||||
Use: "metrics",
|
||||
Short: "Display crowdsec prometheus metrics.",
|
||||
Long: `Fetch metrics from the prometheus server and display them in a human-friendly way`,
|
||||
// if explicitly asking for sections, we want to show empty tables
|
||||
showEmpty := len(sections) > 0
|
||||
|
||||
// if no sections are specified, we want all of them
|
||||
if len(sections) == 0 {
|
||||
sections = maptools.SortedKeys(ms)
|
||||
}
|
||||
|
||||
for _, section := range sections {
|
||||
want[section] = ms[section]
|
||||
}
|
||||
|
||||
switch formatType {
|
||||
case "human":
|
||||
for _, section := range maptools.SortedKeys(want) {
|
||||
want[section].Table(out, noUnit, showEmpty)
|
||||
}
|
||||
case "json":
|
||||
x, err := json.MarshalIndent(want, "", " ")
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to marshal metrics: %w", err)
|
||||
}
|
||||
out.Write(x)
|
||||
case "raw":
|
||||
x, err := yaml.Marshal(want)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to marshal metrics: %w", err)
|
||||
}
|
||||
out.Write(x)
|
||||
default:
|
||||
return fmt.Errorf("unknown format type %s", formatType)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (cli *cliMetrics) show(sections []string, url string, noUnit bool) error {
|
||||
cfg := cli.cfg()
|
||||
|
||||
if url != "" {
|
||||
cfg.Cscli.PrometheusUrl = url
|
||||
}
|
||||
|
||||
if cfg.Prometheus == nil {
|
||||
return ErrMissingConfig
|
||||
}
|
||||
|
||||
if !cfg.Prometheus.Enabled {
|
||||
return ErrMetricsDisabled
|
||||
}
|
||||
|
||||
ms := NewMetricStore()
|
||||
|
||||
if err := ms.Fetch(cfg.Cscli.PrometheusUrl); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// any section that we don't have in the store is an error
|
||||
for _, section := range sections {
|
||||
if _, ok := ms[section]; !ok {
|
||||
return fmt.Errorf("unknown metrics type: %s", section)
|
||||
}
|
||||
}
|
||||
|
||||
if err := ms.Format(color.Output, sections, cfg.Cscli.Output, noUnit); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (cli *cliMetrics) NewCommand() *cobra.Command {
|
||||
var (
|
||||
url string
|
||||
noUnit bool
|
||||
)
|
||||
|
||||
cmd := &cobra.Command{
|
||||
Use: "metrics",
|
||||
Short: "Display crowdsec prometheus metrics.",
|
||||
Long: `Fetch metrics from a Local API server and display them`,
|
||||
Example: `# Show all Metrics, skip empty tables (same as "cecli metrics show")
|
||||
cscli metrics
|
||||
|
||||
# Show only some metrics, connect to a different url
|
||||
cscli metrics --url http://lapi.local:6060/metrics show acquisition parsers
|
||||
|
||||
# List available metric types
|
||||
cscli metrics list`,
|
||||
Args: cobra.ExactArgs(0),
|
||||
DisableAutoGenTag: true,
|
||||
Run: func(cmd *cobra.Command, args []string) {
|
||||
if err := csConfig.LoadPrometheus(); err != nil {
|
||||
log.Fatalf(err.Error())
|
||||
}
|
||||
if !csConfig.Prometheus.Enabled {
|
||||
log.Warningf("Prometheus is not enabled, can't show metrics")
|
||||
os.Exit(1)
|
||||
}
|
||||
|
||||
if prometheusURL == "" {
|
||||
prometheusURL = csConfig.Cscli.PrometheusUrl
|
||||
}
|
||||
|
||||
if prometheusURL == "" {
|
||||
log.Errorf("No prometheus url, please specify in %s or via -u", *csConfig.FilePath)
|
||||
os.Exit(1)
|
||||
}
|
||||
|
||||
ShowPrometheus(prometheusURL + "/metrics")
|
||||
RunE: func(_ *cobra.Command, _ []string) error {
|
||||
return cli.show(nil, url, noUnit)
|
||||
},
|
||||
}
|
||||
cmdMetrics.PersistentFlags().StringVarP(&prometheusURL, "url", "u", "", "Prometheus url (http://<ip>:<port>/metrics)")
|
||||
cmdMetrics.PersistentFlags().BoolVar(&noUnit, "no-unit", false, "Show the real number instead of formatted with units")
|
||||
|
||||
return cmdMetrics
|
||||
flags := cmd.Flags()
|
||||
flags.StringVarP(&url, "url", "u", "", "Prometheus url (http://<ip>:<port>/metrics)")
|
||||
flags.BoolVar(&noUnit, "no-unit", false, "Show the real number instead of formatted with units")
|
||||
|
||||
cmd.AddCommand(cli.newShowCmd())
|
||||
cmd.AddCommand(cli.newListCmd())
|
||||
|
||||
return cmd
|
||||
}
|
||||
|
||||
// expandAlias returns a list of sections. The input can be a list of sections or alias.
|
||||
func (cli *cliMetrics) expandAlias(args []string) []string {
|
||||
ret := []string{}
|
||||
|
||||
for _, section := range args {
|
||||
switch section {
|
||||
case "engine":
|
||||
ret = append(ret, "acquisition", "parsers", "scenarios", "stash", "whitelists")
|
||||
case "lapi":
|
||||
ret = append(ret, "alerts", "decisions", "lapi", "lapi-bouncer", "lapi-decisions", "lapi-machine")
|
||||
case "appsec":
|
||||
ret = append(ret, "appsec-engine", "appsec-rule")
|
||||
default:
|
||||
ret = append(ret, section)
|
||||
}
|
||||
}
|
||||
|
||||
return ret
|
||||
}
|
||||
|
||||
func (cli *cliMetrics) newShowCmd() *cobra.Command {
|
||||
var (
|
||||
url string
|
||||
noUnit bool
|
||||
)
|
||||
|
||||
cmd := &cobra.Command{
|
||||
Use: "show [type]...",
|
||||
Short: "Display all or part of the available metrics.",
|
||||
Long: `Fetch metrics from a Local API server and display them, optionally filtering on specific types.`,
|
||||
Example: `# Show all Metrics, skip empty tables
|
||||
cscli metrics show
|
||||
|
||||
# Use an alias: "engine", "lapi" or "appsec" to show a group of metrics
|
||||
cscli metrics show engine
|
||||
|
||||
# Show some specific metrics, show empty tables, connect to a different url
|
||||
cscli metrics show acquisition parsers scenarios stash --url http://lapi.local:6060/metrics
|
||||
|
||||
# To list available metric types, use "cscli metrics list"
|
||||
cscli metrics list; cscli metrics list -o json
|
||||
|
||||
# Show metrics in json format
|
||||
cscli metrics show acquisition parsers scenarios stash -o json`,
|
||||
// Positional args are optional
|
||||
DisableAutoGenTag: true,
|
||||
RunE: func(_ *cobra.Command, args []string) error {
|
||||
args = cli.expandAlias(args)
|
||||
return cli.show(args, url, noUnit)
|
||||
},
|
||||
}
|
||||
|
||||
flags := cmd.Flags()
|
||||
flags.StringVarP(&url, "url", "u", "", "Metrics url (http://<ip>:<port>/metrics)")
|
||||
flags.BoolVar(&noUnit, "no-unit", false, "Show the real number instead of formatted with units")
|
||||
|
||||
return cmd
|
||||
}
|
||||
|
||||
func (cli *cliMetrics) list() error {
|
||||
type metricType struct {
|
||||
Type string `json:"type" yaml:"type"`
|
||||
Title string `json:"title" yaml:"title"`
|
||||
Description string `json:"description" yaml:"description"`
|
||||
}
|
||||
|
||||
var allMetrics []metricType
|
||||
|
||||
ms := NewMetricStore()
|
||||
for _, section := range maptools.SortedKeys(ms) {
|
||||
title, description := ms[section].Description()
|
||||
allMetrics = append(allMetrics, metricType{
|
||||
Type: section,
|
||||
Title: title,
|
||||
Description: description,
|
||||
})
|
||||
}
|
||||
|
||||
switch cli.cfg().Cscli.Output {
|
||||
case "human":
|
||||
t := newTable(color.Output)
|
||||
t.SetRowLines(true)
|
||||
t.SetHeaders("Type", "Title", "Description")
|
||||
|
||||
for _, metric := range allMetrics {
|
||||
t.AddRow(metric.Type, metric.Title, metric.Description)
|
||||
}
|
||||
|
||||
t.Render()
|
||||
case "json":
|
||||
x, err := json.MarshalIndent(allMetrics, "", " ")
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to marshal metric types: %w", err)
|
||||
}
|
||||
|
||||
fmt.Println(string(x))
|
||||
case "raw":
|
||||
x, err := yaml.Marshal(allMetrics)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to marshal metric types: %w", err)
|
||||
}
|
||||
|
||||
fmt.Println(string(x))
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (cli *cliMetrics) newListCmd() *cobra.Command {
|
||||
cmd := &cobra.Command{
|
||||
Use: "list",
|
||||
Short: "List available types of metrics.",
|
||||
Long: `List available types of metrics.`,
|
||||
Args: cobra.ExactArgs(0),
|
||||
DisableAutoGenTag: true,
|
||||
RunE: func(_ *cobra.Command, _ []string) error {
|
||||
return cli.list()
|
||||
},
|
||||
}
|
||||
|
||||
return cmd
|
||||
}
|
||||
|
|
615
cmd/crowdsec-cli/metrics_table.go
Normal file
615
cmd/crowdsec-cli/metrics_table.go
Normal file
|
@ -0,0 +1,615 @@
|
|||
package main
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"fmt"
|
||||
"io"
|
||||
"sort"
|
||||
"strconv"
|
||||
|
||||
"github.com/aquasecurity/table"
|
||||
log "github.com/sirupsen/logrus"
|
||||
|
||||
"github.com/crowdsecurity/go-cs-lib/maptools"
|
||||
)
|
||||
|
||||
// ErrNilTable means a nil pointer was passed instead of a table instance. This is a programming error.
|
||||
var ErrNilTable = errors.New("nil table")
|
||||
|
||||
func lapiMetricsToTable(t *table.Table, stats map[string]map[string]map[string]int) int {
|
||||
// stats: machine -> route -> method -> count
|
||||
// sort keys to keep consistent order when printing
|
||||
machineKeys := []string{}
|
||||
for k := range stats {
|
||||
machineKeys = append(machineKeys, k)
|
||||
}
|
||||
|
||||
sort.Strings(machineKeys)
|
||||
|
||||
numRows := 0
|
||||
|
||||
for _, machine := range machineKeys {
|
||||
// oneRow: route -> method -> count
|
||||
machineRow := stats[machine]
|
||||
for routeName, route := range machineRow {
|
||||
for methodName, count := range route {
|
||||
row := []string{
|
||||
machine,
|
||||
routeName,
|
||||
methodName,
|
||||
}
|
||||
if count != 0 {
|
||||
row = append(row, strconv.Itoa(count))
|
||||
} else {
|
||||
row = append(row, "-")
|
||||
}
|
||||
|
||||
t.AddRow(row...)
|
||||
|
||||
numRows++
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return numRows
|
||||
}
|
||||
|
||||
func wlMetricsToTable(t *table.Table, stats map[string]map[string]map[string]int, noUnit bool) (int, error) {
|
||||
if t == nil {
|
||||
return 0, ErrNilTable
|
||||
}
|
||||
|
||||
numRows := 0
|
||||
|
||||
for _, name := range maptools.SortedKeys(stats) {
|
||||
for _, reason := range maptools.SortedKeys(stats[name]) {
|
||||
row := []string{
|
||||
name,
|
||||
reason,
|
||||
"-",
|
||||
"-",
|
||||
}
|
||||
|
||||
for _, action := range maptools.SortedKeys(stats[name][reason]) {
|
||||
value := stats[name][reason][action]
|
||||
|
||||
switch action {
|
||||
case "whitelisted":
|
||||
row[3] = strconv.Itoa(value)
|
||||
case "hits":
|
||||
row[2] = strconv.Itoa(value)
|
||||
default:
|
||||
log.Debugf("unexpected counter '%s' for whitelists = %d", action, value)
|
||||
}
|
||||
}
|
||||
|
||||
t.AddRow(row...)
|
||||
|
||||
numRows++
|
||||
}
|
||||
}
|
||||
|
||||
return numRows, nil
|
||||
}
|
||||
|
||||
func metricsToTable(t *table.Table, stats map[string]map[string]int, keys []string, noUnit bool) (int, error) {
|
||||
if t == nil {
|
||||
return 0, ErrNilTable
|
||||
}
|
||||
|
||||
numRows := 0
|
||||
|
||||
for _, alabel := range maptools.SortedKeys(stats) {
|
||||
astats, ok := stats[alabel]
|
||||
if !ok {
|
||||
continue
|
||||
}
|
||||
|
||||
row := []string{
|
||||
alabel,
|
||||
}
|
||||
|
||||
for _, sl := range keys {
|
||||
if v, ok := astats[sl]; ok && v != 0 {
|
||||
numberToShow := strconv.Itoa(v)
|
||||
if !noUnit {
|
||||
numberToShow = formatNumber(v)
|
||||
}
|
||||
|
||||
row = append(row, numberToShow)
|
||||
} else {
|
||||
row = append(row, "-")
|
||||
}
|
||||
}
|
||||
|
||||
t.AddRow(row...)
|
||||
|
||||
numRows++
|
||||
}
|
||||
|
||||
return numRows, nil
|
||||
}
|
||||
|
||||
func (s statBucket) Description() (string, string) {
|
||||
return "Scenario Metrics",
|
||||
`Measure events in different scenarios. Current count is the number of buckets during metrics collection. ` +
|
||||
`Overflows are past event-producing buckets, while Expired are the ones that didn’t receive enough events to Overflow.`
|
||||
}
|
||||
|
||||
func (s statBucket) Process(bucket, metric string, val int) {
|
||||
if _, ok := s[bucket]; !ok {
|
||||
s[bucket] = make(map[string]int)
|
||||
}
|
||||
|
||||
s[bucket][metric] += val
|
||||
}
|
||||
|
||||
func (s statBucket) Table(out io.Writer, noUnit bool, showEmpty bool) {
|
||||
t := newTable(out)
|
||||
t.SetRowLines(false)
|
||||
t.SetHeaders("Scenario", "Current Count", "Overflows", "Instantiated", "Poured", "Expired")
|
||||
t.SetAlignment(table.AlignLeft, table.AlignLeft, table.AlignLeft, table.AlignLeft, table.AlignLeft, table.AlignLeft)
|
||||
|
||||
keys := []string{"curr_count", "overflow", "instantiation", "pour", "underflow"}
|
||||
|
||||
if numRows, err := metricsToTable(t, s, keys, noUnit); err != nil {
|
||||
log.Warningf("while collecting scenario stats: %s", err)
|
||||
} else if numRows > 0 || showEmpty {
|
||||
title, _ := s.Description()
|
||||
renderTableTitle(out, "\n"+title+":")
|
||||
t.Render()
|
||||
}
|
||||
}
|
||||
|
||||
func (s statAcquis) Description() (string, string) {
|
||||
return "Acquisition Metrics",
|
||||
`Measures the lines read, parsed, and unparsed per datasource. ` +
|
||||
`Zero read lines indicate a misconfigured or inactive datasource. ` +
|
||||
`Zero parsed lines mean the parser(s) failed. ` +
|
||||
`Non-zero parsed lines are fine as crowdsec selects relevant lines.`
|
||||
}
|
||||
|
||||
func (s statAcquis) Process(source, metric string, val int) {
|
||||
if _, ok := s[source]; !ok {
|
||||
s[source] = make(map[string]int)
|
||||
}
|
||||
|
||||
s[source][metric] += val
|
||||
}
|
||||
|
||||
func (s statAcquis) Table(out io.Writer, noUnit bool, showEmpty bool) {
|
||||
t := newTable(out)
|
||||
t.SetRowLines(false)
|
||||
t.SetHeaders("Source", "Lines read", "Lines parsed", "Lines unparsed", "Lines poured to bucket", "Lines whitelisted")
|
||||
t.SetAlignment(table.AlignLeft, table.AlignLeft, table.AlignLeft, table.AlignLeft, table.AlignLeft)
|
||||
|
||||
keys := []string{"reads", "parsed", "unparsed", "pour", "whitelisted"}
|
||||
|
||||
if numRows, err := metricsToTable(t, s, keys, noUnit); err != nil {
|
||||
log.Warningf("while collecting acquis stats: %s", err)
|
||||
} else if numRows > 0 || showEmpty {
|
||||
title, _ := s.Description()
|
||||
renderTableTitle(out, "\n"+title+":")
|
||||
t.Render()
|
||||
}
|
||||
}
|
||||
|
||||
func (s statAppsecEngine) Description() (string, string) {
|
||||
return "Appsec Metrics",
|
||||
`Measures the number of parsed and blocked requests by the AppSec Component.`
|
||||
}
|
||||
|
||||
func (s statAppsecEngine) Process(appsecEngine, metric string, val int) {
|
||||
if _, ok := s[appsecEngine]; !ok {
|
||||
s[appsecEngine] = make(map[string]int)
|
||||
}
|
||||
|
||||
s[appsecEngine][metric] += val
|
||||
}
|
||||
|
||||
func (s statAppsecEngine) Table(out io.Writer, noUnit bool, showEmpty bool) {
|
||||
t := newTable(out)
|
||||
t.SetRowLines(false)
|
||||
t.SetHeaders("Appsec Engine", "Processed", "Blocked")
|
||||
t.SetAlignment(table.AlignLeft, table.AlignLeft)
|
||||
|
||||
keys := []string{"processed", "blocked"}
|
||||
|
||||
if numRows, err := metricsToTable(t, s, keys, noUnit); err != nil {
|
||||
log.Warningf("while collecting appsec stats: %s", err)
|
||||
} else if numRows > 0 || showEmpty {
|
||||
title, _ := s.Description()
|
||||
renderTableTitle(out, "\n"+title+":")
|
||||
t.Render()
|
||||
}
|
||||
}
|
||||
|
||||
func (s statAppsecRule) Description() (string, string) {
|
||||
return "Appsec Rule Metrics",
|
||||
`Provides “per AppSec Component” information about the number of matches for loaded AppSec Rules.`
|
||||
}
|
||||
|
||||
func (s statAppsecRule) Process(appsecEngine, appsecRule string, metric string, val int) {
|
||||
if _, ok := s[appsecEngine]; !ok {
|
||||
s[appsecEngine] = make(map[string]map[string]int)
|
||||
}
|
||||
|
||||
if _, ok := s[appsecEngine][appsecRule]; !ok {
|
||||
s[appsecEngine][appsecRule] = make(map[string]int)
|
||||
}
|
||||
|
||||
s[appsecEngine][appsecRule][metric] += val
|
||||
}
|
||||
|
||||
func (s statAppsecRule) Table(out io.Writer, noUnit bool, showEmpty bool) {
|
||||
for appsecEngine, appsecEngineRulesStats := range s {
|
||||
t := newTable(out)
|
||||
t.SetRowLines(false)
|
||||
t.SetHeaders("Rule ID", "Triggered")
|
||||
t.SetAlignment(table.AlignLeft, table.AlignLeft)
|
||||
|
||||
keys := []string{"triggered"}
|
||||
|
||||
if numRows, err := metricsToTable(t, appsecEngineRulesStats, keys, noUnit); err != nil {
|
||||
log.Warningf("while collecting appsec rules stats: %s", err)
|
||||
} else if numRows > 0 || showEmpty {
|
||||
renderTableTitle(out, fmt.Sprintf("\nAppsec '%s' Rules Metrics:", appsecEngine))
|
||||
t.Render()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (s statWhitelist) Description() (string, string) {
|
||||
return "Whitelist Metrics",
|
||||
`Tracks the number of events processed and possibly whitelisted by each parser whitelist.`
|
||||
}
|
||||
|
||||
func (s statWhitelist) Process(whitelist, reason, metric string, val int) {
|
||||
if _, ok := s[whitelist]; !ok {
|
||||
s[whitelist] = make(map[string]map[string]int)
|
||||
}
|
||||
|
||||
if _, ok := s[whitelist][reason]; !ok {
|
||||
s[whitelist][reason] = make(map[string]int)
|
||||
}
|
||||
|
||||
s[whitelist][reason][metric] += val
|
||||
}
|
||||
|
||||
func (s statWhitelist) Table(out io.Writer, noUnit bool, showEmpty bool) {
|
||||
t := newTable(out)
|
||||
t.SetRowLines(false)
|
||||
t.SetHeaders("Whitelist", "Reason", "Hits", "Whitelisted")
|
||||
t.SetAlignment(table.AlignLeft, table.AlignLeft, table.AlignLeft, table.AlignLeft)
|
||||
|
||||
if numRows, err := wlMetricsToTable(t, s, noUnit); err != nil {
|
||||
log.Warningf("while collecting parsers stats: %s", err)
|
||||
} else if numRows > 0 || showEmpty {
|
||||
title, _ := s.Description()
|
||||
renderTableTitle(out, "\n"+title+":")
|
||||
t.Render()
|
||||
}
|
||||
}
|
||||
|
||||
func (s statParser) Description() (string, string) {
|
||||
return "Parser Metrics",
|
||||
`Tracks the number of events processed by each parser and indicates success of failure. ` +
|
||||
`Zero parsed lines means the parer(s) failed. ` +
|
||||
`Non-zero unparsed lines are fine as crowdsec select relevant lines.`
|
||||
}
|
||||
|
||||
func (s statParser) Process(parser, metric string, val int) {
|
||||
if _, ok := s[parser]; !ok {
|
||||
s[parser] = make(map[string]int)
|
||||
}
|
||||
|
||||
s[parser][metric] += val
|
||||
}
|
||||
|
||||
func (s statParser) Table(out io.Writer, noUnit bool, showEmpty bool) {
|
||||
t := newTable(out)
|
||||
t.SetRowLines(false)
|
||||
t.SetHeaders("Parsers", "Hits", "Parsed", "Unparsed")
|
||||
t.SetAlignment(table.AlignLeft, table.AlignLeft, table.AlignLeft, table.AlignLeft)
|
||||
|
||||
keys := []string{"hits", "parsed", "unparsed"}
|
||||
|
||||
if numRows, err := metricsToTable(t, s, keys, noUnit); err != nil {
|
||||
log.Warningf("while collecting parsers stats: %s", err)
|
||||
} else if numRows > 0 || showEmpty {
|
||||
title, _ := s.Description()
|
||||
renderTableTitle(out, "\n"+title+":")
|
||||
t.Render()
|
||||
}
|
||||
}
|
||||
|
||||
func (s statStash) Description() (string, string) {
|
||||
return "Parser Stash Metrics",
|
||||
`Tracks the status of stashes that might be created by various parsers and scenarios.`
|
||||
}
|
||||
|
||||
func (s statStash) Process(name, mtype string, val int) {
|
||||
s[name] = struct {
|
||||
Type string
|
||||
Count int
|
||||
}{
|
||||
Type: mtype,
|
||||
Count: val,
|
||||
}
|
||||
}
|
||||
|
||||
func (s statStash) Table(out io.Writer, noUnit bool, showEmpty bool) {
|
||||
t := newTable(out)
|
||||
t.SetRowLines(false)
|
||||
t.SetHeaders("Name", "Type", "Items")
|
||||
t.SetAlignment(table.AlignLeft, table.AlignLeft, table.AlignLeft)
|
||||
|
||||
// unfortunately, we can't reuse metricsToTable as the structure is too different :/
|
||||
numRows := 0
|
||||
|
||||
for _, alabel := range maptools.SortedKeys(s) {
|
||||
astats := s[alabel]
|
||||
|
||||
row := []string{
|
||||
alabel,
|
||||
astats.Type,
|
||||
strconv.Itoa(astats.Count),
|
||||
}
|
||||
t.AddRow(row...)
|
||||
|
||||
numRows++
|
||||
}
|
||||
|
||||
if numRows > 0 || showEmpty {
|
||||
title, _ := s.Description()
|
||||
renderTableTitle(out, "\n"+title+":")
|
||||
t.Render()
|
||||
}
|
||||
}
|
||||
|
||||
func (s statLapi) Description() (string, string) {
|
||||
return "Local API Metrics",
|
||||
`Monitors the requests made to local API routes.`
|
||||
}
|
||||
|
||||
func (s statLapi) Process(route, method string, val int) {
|
||||
if _, ok := s[route]; !ok {
|
||||
s[route] = make(map[string]int)
|
||||
}
|
||||
|
||||
s[route][method] += val
|
||||
}
|
||||
|
||||
func (s statLapi) Table(out io.Writer, noUnit bool, showEmpty bool) {
|
||||
t := newTable(out)
|
||||
t.SetRowLines(false)
|
||||
t.SetHeaders("Route", "Method", "Hits")
|
||||
t.SetAlignment(table.AlignLeft, table.AlignLeft, table.AlignLeft)
|
||||
|
||||
// unfortunately, we can't reuse metricsToTable as the structure is too different :/
|
||||
numRows := 0
|
||||
|
||||
for _, alabel := range maptools.SortedKeys(s) {
|
||||
astats := s[alabel]
|
||||
|
||||
subKeys := []string{}
|
||||
for skey := range astats {
|
||||
subKeys = append(subKeys, skey)
|
||||
}
|
||||
|
||||
sort.Strings(subKeys)
|
||||
|
||||
for _, sl := range subKeys {
|
||||
row := []string{
|
||||
alabel,
|
||||
sl,
|
||||
strconv.Itoa(astats[sl]),
|
||||
}
|
||||
|
||||
t.AddRow(row...)
|
||||
|
||||
numRows++
|
||||
}
|
||||
}
|
||||
|
||||
if numRows > 0 || showEmpty {
|
||||
title, _ := s.Description()
|
||||
renderTableTitle(out, "\n"+title+":")
|
||||
t.Render()
|
||||
}
|
||||
}
|
||||
|
||||
func (s statLapiMachine) Description() (string, string) {
|
||||
return "Local API Machines Metrics",
|
||||
`Tracks the number of calls to the local API from each registered machine.`
|
||||
}
|
||||
|
||||
func (s statLapiMachine) Process(machine, route, method string, val int) {
|
||||
if _, ok := s[machine]; !ok {
|
||||
s[machine] = make(map[string]map[string]int)
|
||||
}
|
||||
|
||||
if _, ok := s[machine][route]; !ok {
|
||||
s[machine][route] = make(map[string]int)
|
||||
}
|
||||
|
||||
s[machine][route][method] += val
|
||||
}
|
||||
|
||||
func (s statLapiMachine) Table(out io.Writer, noUnit bool, showEmpty bool) {
|
||||
t := newTable(out)
|
||||
t.SetRowLines(false)
|
||||
t.SetHeaders("Machine", "Route", "Method", "Hits")
|
||||
t.SetAlignment(table.AlignLeft, table.AlignLeft, table.AlignLeft, table.AlignLeft)
|
||||
|
||||
numRows := lapiMetricsToTable(t, s)
|
||||
|
||||
if numRows > 0 || showEmpty {
|
||||
title, _ := s.Description()
|
||||
renderTableTitle(out, "\n"+title+":")
|
||||
t.Render()
|
||||
}
|
||||
}
|
||||
|
||||
func (s statLapiBouncer) Description() (string, string) {
|
||||
return "Local API Bouncers Metrics",
|
||||
`Tracks total hits to remediation component related API routes.`
|
||||
}
|
||||
|
||||
func (s statLapiBouncer) Process(bouncer, route, method string, val int) {
|
||||
if _, ok := s[bouncer]; !ok {
|
||||
s[bouncer] = make(map[string]map[string]int)
|
||||
}
|
||||
|
||||
if _, ok := s[bouncer][route]; !ok {
|
||||
s[bouncer][route] = make(map[string]int)
|
||||
}
|
||||
|
||||
s[bouncer][route][method] += val
|
||||
}
|
||||
|
||||
func (s statLapiBouncer) Table(out io.Writer, noUnit bool, showEmpty bool) {
|
||||
t := newTable(out)
|
||||
t.SetRowLines(false)
|
||||
t.SetHeaders("Bouncer", "Route", "Method", "Hits")
|
||||
t.SetAlignment(table.AlignLeft, table.AlignLeft, table.AlignLeft, table.AlignLeft)
|
||||
|
||||
numRows := lapiMetricsToTable(t, s)
|
||||
|
||||
if numRows > 0 || showEmpty {
|
||||
title, _ := s.Description()
|
||||
renderTableTitle(out, "\n"+title+":")
|
||||
t.Render()
|
||||
}
|
||||
}
|
||||
|
||||
func (s statLapiDecision) Description() (string, string) {
|
||||
return "Local API Bouncers Decisions",
|
||||
`Tracks the number of empty/non-empty answers from LAPI to bouncers that are working in "live" mode.`
|
||||
}
|
||||
|
||||
func (s statLapiDecision) Process(bouncer, fam string, val int) {
|
||||
if _, ok := s[bouncer]; !ok {
|
||||
s[bouncer] = struct {
|
||||
NonEmpty int
|
||||
Empty int
|
||||
}{}
|
||||
}
|
||||
|
||||
x := s[bouncer]
|
||||
|
||||
switch fam {
|
||||
case "cs_lapi_decisions_ko_total":
|
||||
x.Empty += val
|
||||
case "cs_lapi_decisions_ok_total":
|
||||
x.NonEmpty += val
|
||||
}
|
||||
|
||||
s[bouncer] = x
|
||||
}
|
||||
|
||||
func (s statLapiDecision) Table(out io.Writer, noUnit bool, showEmpty bool) {
|
||||
t := newTable(out)
|
||||
t.SetRowLines(false)
|
||||
t.SetHeaders("Bouncer", "Empty answers", "Non-empty answers")
|
||||
t.SetAlignment(table.AlignLeft, table.AlignLeft, table.AlignLeft)
|
||||
|
||||
numRows := 0
|
||||
|
||||
for bouncer, hits := range s {
|
||||
t.AddRow(
|
||||
bouncer,
|
||||
strconv.Itoa(hits.Empty),
|
||||
strconv.Itoa(hits.NonEmpty),
|
||||
)
|
||||
|
||||
numRows++
|
||||
}
|
||||
|
||||
if numRows > 0 || showEmpty {
|
||||
title, _ := s.Description()
|
||||
renderTableTitle(out, "\n"+title+":")
|
||||
t.Render()
|
||||
}
|
||||
}
|
||||
|
||||
func (s statDecision) Description() (string, string) {
|
||||
return "Local API Decisions",
|
||||
`Provides information about all currently active decisions. ` +
|
||||
`Includes both local (crowdsec) and global decisions (CAPI), and lists subscriptions (lists).`
|
||||
}
|
||||
|
||||
func (s statDecision) Process(reason, origin, action string, val int) {
|
||||
if _, ok := s[reason]; !ok {
|
||||
s[reason] = make(map[string]map[string]int)
|
||||
}
|
||||
|
||||
if _, ok := s[reason][origin]; !ok {
|
||||
s[reason][origin] = make(map[string]int)
|
||||
}
|
||||
|
||||
s[reason][origin][action] += val
|
||||
}
|
||||
|
||||
func (s statDecision) Table(out io.Writer, noUnit bool, showEmpty bool) {
|
||||
t := newTable(out)
|
||||
t.SetRowLines(false)
|
||||
t.SetHeaders("Reason", "Origin", "Action", "Count")
|
||||
t.SetAlignment(table.AlignLeft, table.AlignLeft, table.AlignLeft, table.AlignLeft)
|
||||
|
||||
numRows := 0
|
||||
|
||||
for reason, origins := range s {
|
||||
for origin, actions := range origins {
|
||||
for action, hits := range actions {
|
||||
t.AddRow(
|
||||
reason,
|
||||
origin,
|
||||
action,
|
||||
strconv.Itoa(hits),
|
||||
)
|
||||
|
||||
numRows++
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if numRows > 0 || showEmpty {
|
||||
title, _ := s.Description()
|
||||
renderTableTitle(out, "\n"+title+":")
|
||||
t.Render()
|
||||
}
|
||||
}
|
||||
|
||||
func (s statAlert) Description() (string, string) {
|
||||
return "Local API Alerts",
|
||||
`Tracks the total number of past and present alerts for the installed scenarios.`
|
||||
}
|
||||
|
||||
func (s statAlert) Process(reason string, val int) {
|
||||
s[reason] += val
|
||||
}
|
||||
|
||||
func (s statAlert) Table(out io.Writer, noUnit bool, showEmpty bool) {
|
||||
t := newTable(out)
|
||||
t.SetRowLines(false)
|
||||
t.SetHeaders("Reason", "Count")
|
||||
t.SetAlignment(table.AlignLeft, table.AlignLeft)
|
||||
|
||||
numRows := 0
|
||||
|
||||
for scenario, hits := range s {
|
||||
t.AddRow(
|
||||
scenario,
|
||||
strconv.Itoa(hits),
|
||||
)
|
||||
|
||||
numRows++
|
||||
}
|
||||
|
||||
if numRows > 0 || showEmpty {
|
||||
title, _ := s.Description()
|
||||
renderTableTitle(out, "\n"+title+":")
|
||||
t.Render()
|
||||
}
|
||||
}
|
462
cmd/crowdsec-cli/notifications.go
Normal file
462
cmd/crowdsec-cli/notifications.go
Normal file
|
@ -0,0 +1,462 @@
|
|||
package main
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/csv"
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"fmt"
|
||||
"io/fs"
|
||||
"net/url"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/fatih/color"
|
||||
"github.com/go-openapi/strfmt"
|
||||
log "github.com/sirupsen/logrus"
|
||||
"github.com/spf13/cobra"
|
||||
"gopkg.in/tomb.v2"
|
||||
"gopkg.in/yaml.v3"
|
||||
|
||||
"github.com/crowdsecurity/go-cs-lib/ptr"
|
||||
"github.com/crowdsecurity/go-cs-lib/version"
|
||||
|
||||
"github.com/crowdsecurity/crowdsec/cmd/crowdsec-cli/require"
|
||||
"github.com/crowdsecurity/crowdsec/pkg/apiclient"
|
||||
"github.com/crowdsecurity/crowdsec/pkg/csconfig"
|
||||
"github.com/crowdsecurity/crowdsec/pkg/csplugin"
|
||||
"github.com/crowdsecurity/crowdsec/pkg/csprofiles"
|
||||
"github.com/crowdsecurity/crowdsec/pkg/models"
|
||||
"github.com/crowdsecurity/crowdsec/pkg/types"
|
||||
)
|
||||
|
||||
type NotificationsCfg struct {
|
||||
Config csplugin.PluginConfig `json:"plugin_config"`
|
||||
Profiles []*csconfig.ProfileCfg `json:"associated_profiles"`
|
||||
ids []uint
|
||||
}
|
||||
|
||||
type cliNotifications struct {
|
||||
cfg configGetter
|
||||
}
|
||||
|
||||
func NewCLINotifications(cfg configGetter) *cliNotifications {
|
||||
return &cliNotifications{
|
||||
cfg: cfg,
|
||||
}
|
||||
}
|
||||
|
||||
func (cli *cliNotifications) NewCommand() *cobra.Command {
|
||||
cmd := &cobra.Command{
|
||||
Use: "notifications [action]",
|
||||
Short: "Helper for notification plugin configuration",
|
||||
Long: "To list/inspect/test notification template",
|
||||
Args: cobra.MinimumNArgs(1),
|
||||
Aliases: []string{"notifications", "notification"},
|
||||
DisableAutoGenTag: true,
|
||||
PersistentPreRunE: func(_ *cobra.Command, _ []string) error {
|
||||
cfg := cli.cfg()
|
||||
if err := require.LAPI(cfg); err != nil {
|
||||
return err
|
||||
}
|
||||
if err := cfg.LoadAPIClient(); err != nil {
|
||||
return fmt.Errorf("loading api client: %w", err)
|
||||
}
|
||||
if err := require.Notifications(cfg); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
},
|
||||
}
|
||||
|
||||
cmd.AddCommand(cli.NewListCmd())
|
||||
cmd.AddCommand(cli.NewInspectCmd())
|
||||
cmd.AddCommand(cli.NewReinjectCmd())
|
||||
cmd.AddCommand(cli.NewTestCmd())
|
||||
|
||||
return cmd
|
||||
}
|
||||
|
||||
func (cli *cliNotifications) getPluginConfigs() (map[string]csplugin.PluginConfig, error) {
|
||||
cfg := cli.cfg()
|
||||
pcfgs := map[string]csplugin.PluginConfig{}
|
||||
wf := func(path string, info fs.FileInfo, err error) error {
|
||||
if info == nil {
|
||||
return fmt.Errorf("error while traversing directory %s: %w", path, err)
|
||||
}
|
||||
|
||||
name := filepath.Join(cfg.ConfigPaths.NotificationDir, info.Name()) // Avoid calling info.Name() twice
|
||||
if (strings.HasSuffix(name, "yaml") || strings.HasSuffix(name, "yml")) && !(info.IsDir()) {
|
||||
ts, err := csplugin.ParsePluginConfigFile(name)
|
||||
if err != nil {
|
||||
return fmt.Errorf("loading notifification plugin configuration with %s: %w", name, err)
|
||||
}
|
||||
|
||||
for _, t := range ts {
|
||||
csplugin.SetRequiredFields(&t)
|
||||
pcfgs[t.Name] = t
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
if err := filepath.Walk(cfg.ConfigPaths.NotificationDir, wf); err != nil {
|
||||
return nil, fmt.Errorf("while loading notifification plugin configuration: %w", err)
|
||||
}
|
||||
|
||||
return pcfgs, nil
|
||||
}
|
||||
|
||||
func (cli *cliNotifications) getProfilesConfigs() (map[string]NotificationsCfg, error) {
|
||||
cfg := cli.cfg()
|
||||
// A bit of a tricky stuf now: reconcile profiles and notification plugins
|
||||
pcfgs, err := cli.getPluginConfigs()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
ncfgs := map[string]NotificationsCfg{}
|
||||
for _, pc := range pcfgs {
|
||||
ncfgs[pc.Name] = NotificationsCfg{
|
||||
Config: pc,
|
||||
}
|
||||
}
|
||||
|
||||
profiles, err := csprofiles.NewProfile(cfg.API.Server.Profiles)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("while extracting profiles from configuration: %w", err)
|
||||
}
|
||||
|
||||
for profileID, profile := range profiles {
|
||||
for _, notif := range profile.Cfg.Notifications {
|
||||
pc, ok := pcfgs[notif]
|
||||
if !ok {
|
||||
return nil, fmt.Errorf("notification plugin '%s' does not exist", notif)
|
||||
}
|
||||
|
||||
tmp, ok := ncfgs[pc.Name]
|
||||
if !ok {
|
||||
return nil, fmt.Errorf("notification plugin '%s' does not exist", pc.Name)
|
||||
}
|
||||
|
||||
tmp.Profiles = append(tmp.Profiles, profile.Cfg)
|
||||
tmp.ids = append(tmp.ids, uint(profileID))
|
||||
ncfgs[pc.Name] = tmp
|
||||
}
|
||||
}
|
||||
|
||||
return ncfgs, nil
|
||||
}
|
||||
|
||||
func (cli *cliNotifications) NewListCmd() *cobra.Command {
|
||||
cmd := &cobra.Command{
|
||||
Use: "list",
|
||||
Short: "list active notifications plugins",
|
||||
Long: `list active notifications plugins`,
|
||||
Example: `cscli notifications list`,
|
||||
Args: cobra.ExactArgs(0),
|
||||
DisableAutoGenTag: true,
|
||||
RunE: func(_ *cobra.Command, _ []string) error {
|
||||
cfg := cli.cfg()
|
||||
ncfgs, err := cli.getProfilesConfigs()
|
||||
if err != nil {
|
||||
return fmt.Errorf("can't build profiles configuration: %w", err)
|
||||
}
|
||||
|
||||
if cfg.Cscli.Output == "human" {
|
||||
notificationListTable(color.Output, ncfgs)
|
||||
} else if cfg.Cscli.Output == "json" {
|
||||
x, err := json.MarshalIndent(ncfgs, "", " ")
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to marshal notification configuration: %w", err)
|
||||
}
|
||||
fmt.Printf("%s", string(x))
|
||||
} else if cfg.Cscli.Output == "raw" {
|
||||
csvwriter := csv.NewWriter(os.Stdout)
|
||||
err := csvwriter.Write([]string{"Name", "Type", "Profile name"})
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to write raw header: %w", err)
|
||||
}
|
||||
for _, b := range ncfgs {
|
||||
profilesList := []string{}
|
||||
for _, p := range b.Profiles {
|
||||
profilesList = append(profilesList, p.Name)
|
||||
}
|
||||
err := csvwriter.Write([]string{b.Config.Name, b.Config.Type, strings.Join(profilesList, ", ")})
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to write raw content: %w", err)
|
||||
}
|
||||
}
|
||||
csvwriter.Flush()
|
||||
}
|
||||
|
||||
return nil
|
||||
},
|
||||
}
|
||||
|
||||
return cmd
|
||||
}
|
||||
|
||||
func (cli *cliNotifications) NewInspectCmd() *cobra.Command {
|
||||
cmd := &cobra.Command{
|
||||
Use: "inspect",
|
||||
Short: "Inspect active notifications plugin configuration",
|
||||
Long: `Inspect active notifications plugin and show configuration`,
|
||||
Example: `cscli notifications inspect <plugin_name>`,
|
||||
Args: cobra.ExactArgs(1),
|
||||
DisableAutoGenTag: true,
|
||||
RunE: func(_ *cobra.Command, args []string) error {
|
||||
cfg := cli.cfg()
|
||||
ncfgs, err := cli.getProfilesConfigs()
|
||||
if err != nil {
|
||||
return fmt.Errorf("can't build profiles configuration: %w", err)
|
||||
}
|
||||
ncfg, ok := ncfgs[args[0]]
|
||||
if !ok {
|
||||
return fmt.Errorf("plugin '%s' does not exist or is not active", args[0])
|
||||
}
|
||||
if cfg.Cscli.Output == "human" || cfg.Cscli.Output == "raw" {
|
||||
fmt.Printf(" - %15s: %15s\n", "Type", ncfg.Config.Type)
|
||||
fmt.Printf(" - %15s: %15s\n", "Name", ncfg.Config.Name)
|
||||
fmt.Printf(" - %15s: %15s\n", "Timeout", ncfg.Config.TimeOut)
|
||||
fmt.Printf(" - %15s: %15s\n", "Format", ncfg.Config.Format)
|
||||
for k, v := range ncfg.Config.Config {
|
||||
fmt.Printf(" - %15s: %15v\n", k, v)
|
||||
}
|
||||
} else if cfg.Cscli.Output == "json" {
|
||||
x, err := json.MarshalIndent(cfg, "", " ")
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to marshal notification configuration: %w", err)
|
||||
}
|
||||
fmt.Printf("%s", string(x))
|
||||
}
|
||||
|
||||
return nil
|
||||
},
|
||||
}
|
||||
|
||||
return cmd
|
||||
}
|
||||
|
||||
func (cli *cliNotifications) NewTestCmd() *cobra.Command {
|
||||
var (
|
||||
pluginBroker csplugin.PluginBroker
|
||||
pluginTomb tomb.Tomb
|
||||
alertOverride string
|
||||
)
|
||||
|
||||
cmd := &cobra.Command{
|
||||
Use: "test [plugin name]",
|
||||
Short: "send a generic test alert to notification plugin",
|
||||
Long: `send a generic test alert to a notification plugin to test configuration even if is not active`,
|
||||
Example: `cscli notifications test [plugin_name]`,
|
||||
Args: cobra.ExactArgs(1),
|
||||
DisableAutoGenTag: true,
|
||||
PreRunE: func(_ *cobra.Command, args []string) error {
|
||||
cfg := cli.cfg()
|
||||
pconfigs, err := cli.getPluginConfigs()
|
||||
if err != nil {
|
||||
return fmt.Errorf("can't build profiles configuration: %w", err)
|
||||
}
|
||||
pcfg, ok := pconfigs[args[0]]
|
||||
if !ok {
|
||||
return fmt.Errorf("plugin name: '%s' does not exist", args[0])
|
||||
}
|
||||
// Create a single profile with plugin name as notification name
|
||||
return pluginBroker.Init(cfg.PluginConfig, []*csconfig.ProfileCfg{
|
||||
{
|
||||
Notifications: []string{
|
||||
pcfg.Name,
|
||||
},
|
||||
},
|
||||
}, cfg.ConfigPaths)
|
||||
},
|
||||
RunE: func(_ *cobra.Command, _ []string) error {
|
||||
pluginTomb.Go(func() error {
|
||||
pluginBroker.Run(&pluginTomb)
|
||||
return nil
|
||||
})
|
||||
alert := &models.Alert{
|
||||
Capacity: ptr.Of(int32(0)),
|
||||
Decisions: []*models.Decision{{
|
||||
Duration: ptr.Of("4h"),
|
||||
Scope: ptr.Of("Ip"),
|
||||
Value: ptr.Of("10.10.10.10"),
|
||||
Type: ptr.Of("ban"),
|
||||
Scenario: ptr.Of("test alert"),
|
||||
Origin: ptr.Of(types.CscliOrigin),
|
||||
}},
|
||||
Events: []*models.Event{},
|
||||
EventsCount: ptr.Of(int32(1)),
|
||||
Leakspeed: ptr.Of("0"),
|
||||
Message: ptr.Of("test alert"),
|
||||
ScenarioHash: ptr.Of(""),
|
||||
Scenario: ptr.Of("test alert"),
|
||||
ScenarioVersion: ptr.Of(""),
|
||||
Simulated: ptr.Of(false),
|
||||
Source: &models.Source{
|
||||
AsName: "",
|
||||
AsNumber: "",
|
||||
Cn: "",
|
||||
IP: "10.10.10.10",
|
||||
Range: "",
|
||||
Scope: ptr.Of("Ip"),
|
||||
Value: ptr.Of("10.10.10.10"),
|
||||
},
|
||||
StartAt: ptr.Of(time.Now().UTC().Format(time.RFC3339)),
|
||||
StopAt: ptr.Of(time.Now().UTC().Format(time.RFC3339)),
|
||||
CreatedAt: time.Now().UTC().Format(time.RFC3339),
|
||||
}
|
||||
if err := yaml.Unmarshal([]byte(alertOverride), alert); err != nil {
|
||||
return fmt.Errorf("failed to unmarshal alert override: %w", err)
|
||||
}
|
||||
|
||||
pluginBroker.PluginChannel <- csplugin.ProfileAlert{
|
||||
ProfileID: uint(0),
|
||||
Alert: alert,
|
||||
}
|
||||
|
||||
// time.Sleep(2 * time.Second) // There's no mechanism to ensure notification has been sent
|
||||
pluginTomb.Kill(errors.New("terminating"))
|
||||
pluginTomb.Wait()
|
||||
|
||||
return nil
|
||||
},
|
||||
}
|
||||
cmd.Flags().StringVarP(&alertOverride, "alert", "a", "", "JSON string used to override alert fields in the generic alert (see crowdsec/pkg/models/alert.go in the source tree for the full definition of the object)")
|
||||
|
||||
return cmd
|
||||
}
|
||||
|
||||
func (cli *cliNotifications) NewReinjectCmd() *cobra.Command {
|
||||
var (
|
||||
alertOverride string
|
||||
alert *models.Alert
|
||||
)
|
||||
|
||||
cmd := &cobra.Command{
|
||||
Use: "reinject",
|
||||
Short: "reinject an alert into profiles to trigger notifications",
|
||||
Long: `reinject an alert into profiles to be evaluated by the filter and sent to matched notifications plugins`,
|
||||
Example: `
|
||||
cscli notifications reinject <alert_id>
|
||||
cscli notifications reinject <alert_id> -a '{"remediation": false,"scenario":"notification/test"}'
|
||||
cscli notifications reinject <alert_id> -a '{"remediation": true,"scenario":"notification/test"}'
|
||||
`,
|
||||
Args: cobra.ExactArgs(1),
|
||||
DisableAutoGenTag: true,
|
||||
PreRunE: func(_ *cobra.Command, args []string) error {
|
||||
var err error
|
||||
alert, err = cli.fetchAlertFromArgString(args[0])
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
},
|
||||
RunE: func(_ *cobra.Command, _ []string) error {
|
||||
var (
|
||||
pluginBroker csplugin.PluginBroker
|
||||
pluginTomb tomb.Tomb
|
||||
)
|
||||
|
||||
cfg := cli.cfg()
|
||||
|
||||
if alertOverride != "" {
|
||||
if err := json.Unmarshal([]byte(alertOverride), alert); err != nil {
|
||||
return fmt.Errorf("can't unmarshal data in the alert flag: %w", err)
|
||||
}
|
||||
}
|
||||
|
||||
err := pluginBroker.Init(cfg.PluginConfig, cfg.API.Server.Profiles, cfg.ConfigPaths)
|
||||
if err != nil {
|
||||
return fmt.Errorf("can't initialize plugins: %w", err)
|
||||
}
|
||||
|
||||
pluginTomb.Go(func() error {
|
||||
pluginBroker.Run(&pluginTomb)
|
||||
return nil
|
||||
})
|
||||
|
||||
profiles, err := csprofiles.NewProfile(cfg.API.Server.Profiles)
|
||||
if err != nil {
|
||||
return fmt.Errorf("cannot extract profiles from configuration: %w", err)
|
||||
}
|
||||
|
||||
for id, profile := range profiles {
|
||||
_, matched, err := profile.EvaluateProfile(alert)
|
||||
if err != nil {
|
||||
return fmt.Errorf("can't evaluate profile %s: %w", profile.Cfg.Name, err)
|
||||
}
|
||||
if !matched {
|
||||
log.Infof("The profile %s didn't match", profile.Cfg.Name)
|
||||
continue
|
||||
}
|
||||
log.Infof("The profile %s matched, sending to its configured notification plugins", profile.Cfg.Name)
|
||||
loop:
|
||||
for {
|
||||
select {
|
||||
case pluginBroker.PluginChannel <- csplugin.ProfileAlert{
|
||||
ProfileID: uint(id),
|
||||
Alert: alert,
|
||||
}:
|
||||
break loop
|
||||
default:
|
||||
time.Sleep(50 * time.Millisecond)
|
||||
log.Info("sleeping\n")
|
||||
}
|
||||
}
|
||||
|
||||
if profile.Cfg.OnSuccess == "break" {
|
||||
log.Infof("The profile %s contains a 'on_success: break' so bailing out", profile.Cfg.Name)
|
||||
break
|
||||
}
|
||||
}
|
||||
// time.Sleep(2 * time.Second) // There's no mechanism to ensure notification has been sent
|
||||
pluginTomb.Kill(errors.New("terminating"))
|
||||
pluginTomb.Wait()
|
||||
|
||||
return nil
|
||||
},
|
||||
}
|
||||
cmd.Flags().StringVarP(&alertOverride, "alert", "a", "", "JSON string used to override alert fields in the reinjected alert (see crowdsec/pkg/models/alert.go in the source tree for the full definition of the object)")
|
||||
|
||||
return cmd
|
||||
}
|
||||
|
||||
func (cli *cliNotifications) fetchAlertFromArgString(toParse string) (*models.Alert, error) {
|
||||
cfg := cli.cfg()
|
||||
|
||||
id, err := strconv.Atoi(toParse)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("bad alert id %s", toParse)
|
||||
}
|
||||
|
||||
apiURL, err := url.Parse(cfg.API.Client.Credentials.URL)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("error parsing the URL of the API: %w", err)
|
||||
}
|
||||
|
||||
client, err := apiclient.NewClient(&apiclient.Config{
|
||||
MachineID: cfg.API.Client.Credentials.Login,
|
||||
Password: strfmt.Password(cfg.API.Client.Credentials.Password),
|
||||
UserAgent: fmt.Sprintf("crowdsec/%s", version.String()),
|
||||
URL: apiURL,
|
||||
VersionPrefix: "v1",
|
||||
})
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("error creating the client for the API: %w", err)
|
||||
}
|
||||
|
||||
alert, _, err := client.Alerts.GetByID(context.Background(), id)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("can't find alert with id %d: %w", id, err)
|
||||
}
|
||||
|
||||
return alert, nil
|
||||
}
|
45
cmd/crowdsec-cli/notifications_table.go
Normal file
45
cmd/crowdsec-cli/notifications_table.go
Normal file
|
@ -0,0 +1,45 @@
|
|||
package main
|
||||
|
||||
import (
|
||||
"io"
|
||||
"sort"
|
||||
"strings"
|
||||
|
||||
"github.com/aquasecurity/table"
|
||||
|
||||
"github.com/crowdsecurity/crowdsec/pkg/emoji"
|
||||
)
|
||||
|
||||
func notificationListTable(out io.Writer, ncfgs map[string]NotificationsCfg) {
|
||||
t := newLightTable(out)
|
||||
t.SetHeaders("Active", "Name", "Type", "Profile name")
|
||||
t.SetHeaderAlignment(table.AlignLeft, table.AlignLeft, table.AlignLeft, table.AlignLeft)
|
||||
t.SetAlignment(table.AlignLeft, table.AlignLeft, table.AlignLeft, table.AlignLeft)
|
||||
|
||||
keys := make([]string, 0, len(ncfgs))
|
||||
for k := range ncfgs {
|
||||
keys = append(keys, k)
|
||||
}
|
||||
|
||||
sort.Slice(keys, func(i, j int) bool {
|
||||
return len(ncfgs[keys[i]].Profiles) > len(ncfgs[keys[j]].Profiles)
|
||||
})
|
||||
|
||||
for _, k := range keys {
|
||||
b := ncfgs[k]
|
||||
profilesList := []string{}
|
||||
|
||||
for _, p := range b.Profiles {
|
||||
profilesList = append(profilesList, p.Name)
|
||||
}
|
||||
|
||||
active := emoji.CheckMark
|
||||
if len(profilesList) == 0 {
|
||||
active = emoji.Prohibited
|
||||
}
|
||||
|
||||
t.AddRow(active, b.Config.Name, b.Config.Type, strings.Join(profilesList, ", "))
|
||||
}
|
||||
|
||||
t.Render()
|
||||
}
|
152
cmd/crowdsec-cli/papi.go
Normal file
152
cmd/crowdsec-cli/papi.go
Normal file
|
@ -0,0 +1,152 @@
|
|||
package main
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"time"
|
||||
|
||||
log "github.com/sirupsen/logrus"
|
||||
"github.com/spf13/cobra"
|
||||
"gopkg.in/tomb.v2"
|
||||
|
||||
"github.com/crowdsecurity/go-cs-lib/ptr"
|
||||
|
||||
"github.com/crowdsecurity/crowdsec/cmd/crowdsec-cli/require"
|
||||
"github.com/crowdsecurity/crowdsec/pkg/apiserver"
|
||||
"github.com/crowdsecurity/crowdsec/pkg/database"
|
||||
)
|
||||
|
||||
type cliPapi struct {
|
||||
cfg configGetter
|
||||
}
|
||||
|
||||
func NewCLIPapi(cfg configGetter) *cliPapi {
|
||||
return &cliPapi{
|
||||
cfg: cfg,
|
||||
}
|
||||
}
|
||||
|
||||
func (cli *cliPapi) NewCommand() *cobra.Command {
|
||||
cmd := &cobra.Command{
|
||||
Use: "papi [action]",
|
||||
Short: "Manage interaction with Polling API (PAPI)",
|
||||
Args: cobra.MinimumNArgs(1),
|
||||
DisableAutoGenTag: true,
|
||||
PersistentPreRunE: func(_ *cobra.Command, _ []string) error {
|
||||
cfg := cli.cfg()
|
||||
if err := require.LAPI(cfg); err != nil {
|
||||
return err
|
||||
}
|
||||
if err := require.CAPI(cfg); err != nil {
|
||||
return err
|
||||
}
|
||||
if err := require.PAPI(cfg); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
},
|
||||
}
|
||||
|
||||
cmd.AddCommand(cli.NewStatusCmd())
|
||||
cmd.AddCommand(cli.NewSyncCmd())
|
||||
|
||||
return cmd
|
||||
}
|
||||
|
||||
func (cli *cliPapi) NewStatusCmd() *cobra.Command {
|
||||
cmd := &cobra.Command{
|
||||
Use: "status",
|
||||
Short: "Get status of the Polling API",
|
||||
Args: cobra.MinimumNArgs(0),
|
||||
DisableAutoGenTag: true,
|
||||
RunE: func(_ *cobra.Command, _ []string) error {
|
||||
var err error
|
||||
cfg := cli.cfg()
|
||||
dbClient, err = database.NewClient(cfg.DbConfig)
|
||||
if err != nil {
|
||||
return fmt.Errorf("unable to initialize database client: %w", err)
|
||||
}
|
||||
|
||||
apic, err := apiserver.NewAPIC(cfg.API.Server.OnlineClient, dbClient, cfg.API.Server.ConsoleConfig, cfg.API.Server.CapiWhitelists)
|
||||
if err != nil {
|
||||
return fmt.Errorf("unable to initialize API client: %w", err)
|
||||
}
|
||||
|
||||
papi, err := apiserver.NewPAPI(apic, dbClient, cfg.API.Server.ConsoleConfig, log.GetLevel())
|
||||
if err != nil {
|
||||
return fmt.Errorf("unable to initialize PAPI client: %w", err)
|
||||
}
|
||||
|
||||
perms, err := papi.GetPermissions()
|
||||
if err != nil {
|
||||
return fmt.Errorf("unable to get PAPI permissions: %w", err)
|
||||
}
|
||||
var lastTimestampStr *string
|
||||
lastTimestampStr, err = dbClient.GetConfigItem(apiserver.PapiPullKey)
|
||||
if err != nil {
|
||||
lastTimestampStr = ptr.Of("never")
|
||||
}
|
||||
log.Infof("You can successfully interact with Polling API (PAPI)")
|
||||
log.Infof("Console plan: %s", perms.Plan)
|
||||
log.Infof("Last order received: %s", *lastTimestampStr)
|
||||
|
||||
log.Infof("PAPI subscriptions:")
|
||||
for _, sub := range perms.Categories {
|
||||
log.Infof(" - %s", sub)
|
||||
}
|
||||
|
||||
return nil
|
||||
},
|
||||
}
|
||||
|
||||
return cmd
|
||||
}
|
||||
|
||||
func (cli *cliPapi) NewSyncCmd() *cobra.Command {
|
||||
cmd := &cobra.Command{
|
||||
Use: "sync",
|
||||
Short: "Sync with the Polling API, pulling all non-expired orders for the instance",
|
||||
Args: cobra.MinimumNArgs(0),
|
||||
DisableAutoGenTag: true,
|
||||
RunE: func(_ *cobra.Command, _ []string) error {
|
||||
var err error
|
||||
cfg := cli.cfg()
|
||||
t := tomb.Tomb{}
|
||||
|
||||
dbClient, err = database.NewClient(cfg.DbConfig)
|
||||
if err != nil {
|
||||
return fmt.Errorf("unable to initialize database client: %w", err)
|
||||
}
|
||||
|
||||
apic, err := apiserver.NewAPIC(cfg.API.Server.OnlineClient, dbClient, cfg.API.Server.ConsoleConfig, cfg.API.Server.CapiWhitelists)
|
||||
if err != nil {
|
||||
return fmt.Errorf("unable to initialize API client: %w", err)
|
||||
}
|
||||
|
||||
t.Go(apic.Push)
|
||||
|
||||
papi, err := apiserver.NewPAPI(apic, dbClient, cfg.API.Server.ConsoleConfig, log.GetLevel())
|
||||
if err != nil {
|
||||
return fmt.Errorf("unable to initialize PAPI client: %w", err)
|
||||
}
|
||||
|
||||
t.Go(papi.SyncDecisions)
|
||||
|
||||
err = papi.PullOnce(time.Time{}, true)
|
||||
if err != nil {
|
||||
return fmt.Errorf("unable to sync decisions: %w", err)
|
||||
}
|
||||
|
||||
log.Infof("Sending acknowledgements to CAPI")
|
||||
|
||||
apic.Shutdown()
|
||||
papi.Shutdown()
|
||||
t.Wait()
|
||||
time.Sleep(5 * time.Second) //FIXME: the push done by apic.Push is run inside a sub goroutine, sleep to make sure it's done
|
||||
|
||||
return nil
|
||||
},
|
||||
}
|
||||
|
||||
return cmd
|
||||
}
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Reference in a new issue