Compare commits
12 commits
master
...
cti-swagge
Author | SHA1 | Date | |
---|---|---|---|
|
d36425e471 | ||
|
b210b0f233 | ||
|
06e9ca4e4a | ||
|
f91e329efc | ||
|
3cb4a4c8d1 | ||
|
764bef719b | ||
|
5c6bddc070 | ||
|
75ff4e1e31 | ||
|
084c01ddc7 | ||
|
f82c5f34d3 | ||
|
0be5fbb07a | ||
|
4ecefdd849 |
166 changed files with 4734 additions and 4754 deletions
6
.github/governance.yml
vendored
6
.github/governance.yml
vendored
|
@ -42,7 +42,7 @@ issue:
|
||||||
3. Check [Releases](https://github.com/crowdsecurity/crowdsec/releases/latest) to make sure your agent is on the latest version.
|
3. Check [Releases](https://github.com/crowdsecurity/crowdsec/releases/latest) to make sure your agent is on the latest version.
|
||||||
|
|
||||||
- prefix: kind
|
- prefix: kind
|
||||||
list: ['feature', 'bug', 'packaging', 'enhancement', 'refactoring']
|
list: ['feature', 'bug', 'packaging', 'enhancement']
|
||||||
multiple: false
|
multiple: false
|
||||||
author_association:
|
author_association:
|
||||||
author: true
|
author: true
|
||||||
|
@ -54,7 +54,6 @@ issue:
|
||||||
@$AUTHOR: There are no 'kind' label on this issue. You need a 'kind' label to start the triage process.
|
@$AUTHOR: There are no 'kind' label on this issue. You need a 'kind' label to start the triage process.
|
||||||
* `/kind feature`
|
* `/kind feature`
|
||||||
* `/kind enhancement`
|
* `/kind enhancement`
|
||||||
* `/kind refactoring`
|
|
||||||
* `/kind bug`
|
* `/kind bug`
|
||||||
* `/kind packaging`
|
* `/kind packaging`
|
||||||
|
|
||||||
|
@ -66,13 +65,12 @@ pull_request:
|
||||||
labels:
|
labels:
|
||||||
- prefix: kind
|
- prefix: kind
|
||||||
multiple: false
|
multiple: false
|
||||||
list: [ 'feature', 'enhancement', 'fix', 'chore', 'dependencies', 'refactoring']
|
list: [ 'feature', 'enhancement', 'fix', 'chore', 'dependencies']
|
||||||
needs:
|
needs:
|
||||||
comment: |
|
comment: |
|
||||||
@$AUTHOR: There are no 'kind' label on this PR. You need a 'kind' label to generate the release automatically.
|
@$AUTHOR: There are no 'kind' label on this PR. You need a 'kind' label to generate the release automatically.
|
||||||
* `/kind feature`
|
* `/kind feature`
|
||||||
* `/kind enhancement`
|
* `/kind enhancement`
|
||||||
* `/kind refactoring`
|
|
||||||
* `/kind fix`
|
* `/kind fix`
|
||||||
* `/kind chore`
|
* `/kind chore`
|
||||||
* `/kind dependencies`
|
* `/kind dependencies`
|
||||||
|
|
6
.github/workflows/bats-hub.yml
vendored
6
.github/workflows/bats-hub.yml
vendored
|
@ -33,7 +33,7 @@ jobs:
|
||||||
- name: "Set up Go"
|
- name: "Set up Go"
|
||||||
uses: actions/setup-go@v5
|
uses: actions/setup-go@v5
|
||||||
with:
|
with:
|
||||||
go-version: "1.22.2"
|
go-version: "1.21.7"
|
||||||
|
|
||||||
- name: "Install bats dependencies"
|
- name: "Install bats dependencies"
|
||||||
env:
|
env:
|
||||||
|
@ -53,7 +53,7 @@ jobs:
|
||||||
run: ./test/bin/collect-hub-coverage >> $GITHUB_ENV
|
run: ./test/bin/collect-hub-coverage >> $GITHUB_ENV
|
||||||
|
|
||||||
- name: "Create Parsers badge"
|
- name: "Create Parsers badge"
|
||||||
uses: schneegans/dynamic-badges-action@v1.7.0
|
uses: schneegans/dynamic-badges-action@v1.6.0
|
||||||
if: ${{ github.ref == 'refs/heads/master' && github.repository_owner == 'crowdsecurity' }}
|
if: ${{ github.ref == 'refs/heads/master' && github.repository_owner == 'crowdsecurity' }}
|
||||||
with:
|
with:
|
||||||
auth: ${{ secrets.GIST_BADGES_SECRET }}
|
auth: ${{ secrets.GIST_BADGES_SECRET }}
|
||||||
|
@ -64,7 +64,7 @@ jobs:
|
||||||
color: ${{ env.SCENARIO_BADGE_COLOR }}
|
color: ${{ env.SCENARIO_BADGE_COLOR }}
|
||||||
|
|
||||||
- name: "Create Scenarios badge"
|
- name: "Create Scenarios badge"
|
||||||
uses: schneegans/dynamic-badges-action@v1.7.0
|
uses: schneegans/dynamic-badges-action@v1.6.0
|
||||||
if: ${{ github.ref == 'refs/heads/master' && github.repository_owner == 'crowdsecurity' }}
|
if: ${{ github.ref == 'refs/heads/master' && github.repository_owner == 'crowdsecurity' }}
|
||||||
with:
|
with:
|
||||||
auth: ${{ secrets.GIST_BADGES_SECRET }}
|
auth: ${{ secrets.GIST_BADGES_SECRET }}
|
||||||
|
|
2
.github/workflows/bats-mysql.yml
vendored
2
.github/workflows/bats-mysql.yml
vendored
|
@ -36,7 +36,7 @@ jobs:
|
||||||
- name: "Set up Go"
|
- name: "Set up Go"
|
||||||
uses: actions/setup-go@v5
|
uses: actions/setup-go@v5
|
||||||
with:
|
with:
|
||||||
go-version: "1.22.2"
|
go-version: "1.21.7"
|
||||||
|
|
||||||
- name: "Install bats dependencies"
|
- name: "Install bats dependencies"
|
||||||
env:
|
env:
|
||||||
|
|
2
.github/workflows/bats-postgres.yml
vendored
2
.github/workflows/bats-postgres.yml
vendored
|
@ -45,7 +45,7 @@ jobs:
|
||||||
- name: "Set up Go"
|
- name: "Set up Go"
|
||||||
uses: actions/setup-go@v5
|
uses: actions/setup-go@v5
|
||||||
with:
|
with:
|
||||||
go-version: "1.22.2"
|
go-version: "1.21.7"
|
||||||
|
|
||||||
- name: "Install bats dependencies"
|
- name: "Install bats dependencies"
|
||||||
env:
|
env:
|
||||||
|
|
5
.github/workflows/bats-sqlite-coverage.yml
vendored
5
.github/workflows/bats-sqlite-coverage.yml
vendored
|
@ -28,7 +28,7 @@ jobs:
|
||||||
- name: "Set up Go"
|
- name: "Set up Go"
|
||||||
uses: actions/setup-go@v5
|
uses: actions/setup-go@v5
|
||||||
with:
|
with:
|
||||||
go-version: "1.22.2"
|
go-version: "1.21.7"
|
||||||
|
|
||||||
- name: "Install bats dependencies"
|
- name: "Install bats dependencies"
|
||||||
env:
|
env:
|
||||||
|
@ -77,8 +77,7 @@ jobs:
|
||||||
if: ${{ always() }}
|
if: ${{ always() }}
|
||||||
|
|
||||||
- name: Upload crowdsec coverage to codecov
|
- name: Upload crowdsec coverage to codecov
|
||||||
uses: codecov/codecov-action@v4
|
uses: codecov/codecov-action@v3
|
||||||
with:
|
with:
|
||||||
files: ./coverage-bats.out
|
files: ./coverage-bats.out
|
||||||
flags: bats
|
flags: bats
|
||||||
token: ${{ secrets.CODECOV_TOKEN }}
|
|
||||||
|
|
4
.github/workflows/ci-windows-build-msi.yml
vendored
4
.github/workflows/ci-windows-build-msi.yml
vendored
|
@ -35,12 +35,12 @@ jobs:
|
||||||
- name: "Set up Go"
|
- name: "Set up Go"
|
||||||
uses: actions/setup-go@v5
|
uses: actions/setup-go@v5
|
||||||
with:
|
with:
|
||||||
go-version: "1.22.2"
|
go-version: "1.21.7"
|
||||||
|
|
||||||
- name: Build
|
- name: Build
|
||||||
run: make windows_installer BUILD_RE2_WASM=1
|
run: make windows_installer BUILD_RE2_WASM=1
|
||||||
- name: Upload MSI
|
- name: Upload MSI
|
||||||
uses: actions/upload-artifact@v4
|
uses: actions/upload-artifact@v3
|
||||||
with:
|
with:
|
||||||
path: crowdsec*msi
|
path: crowdsec*msi
|
||||||
name: crowdsec.msi
|
name: crowdsec.msi
|
||||||
|
|
2
.github/workflows/ci_release-drafter.yml
vendored
2
.github/workflows/ci_release-drafter.yml
vendored
|
@ -12,7 +12,7 @@ jobs:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
# Drafts your next Release notes as Pull Requests are merged into "master"
|
# Drafts your next Release notes as Pull Requests are merged into "master"
|
||||||
- uses: release-drafter/release-drafter@v6
|
- uses: release-drafter/release-drafter@v5
|
||||||
with:
|
with:
|
||||||
config-name: release-drafter.yml
|
config-name: release-drafter.yml
|
||||||
# (Optional) specify config name to use, relative to .github/. Default: release-drafter.yml
|
# (Optional) specify config name to use, relative to .github/. Default: release-drafter.yml
|
||||||
|
|
4
.github/workflows/codeql-analysis.yml
vendored
4
.github/workflows/codeql-analysis.yml
vendored
|
@ -52,7 +52,7 @@ jobs:
|
||||||
- name: "Set up Go"
|
- name: "Set up Go"
|
||||||
uses: actions/setup-go@v5
|
uses: actions/setup-go@v5
|
||||||
with:
|
with:
|
||||||
go-version: "1.22.2"
|
go-version: "1.21.7"
|
||||||
cache-dependency-path: "**/go.sum"
|
cache-dependency-path: "**/go.sum"
|
||||||
|
|
||||||
# Initializes the CodeQL tools for scanning.
|
# Initializes the CodeQL tools for scanning.
|
||||||
|
@ -68,7 +68,7 @@ jobs:
|
||||||
# Autobuild attempts to build any compiled languages (C/C++, C#, or Java).
|
# Autobuild attempts to build any compiled languages (C/C++, C#, or Java).
|
||||||
# If this step fails, then you should remove it and run the build manually (see below)
|
# If this step fails, then you should remove it and run the build manually (see below)
|
||||||
# - name: Autobuild
|
# - name: Autobuild
|
||||||
# uses: github/codeql-action/autobuild@v3
|
# uses: github/codeql-action/autobuild@v2
|
||||||
|
|
||||||
# ℹ️ Command-line programs to run using the OS shell.
|
# ℹ️ Command-line programs to run using the OS shell.
|
||||||
# 📚 https://git.io/JvXDl
|
# 📚 https://git.io/JvXDl
|
||||||
|
|
14
.github/workflows/docker-tests.yml
vendored
14
.github/workflows/docker-tests.yml
vendored
|
@ -59,15 +59,15 @@ jobs:
|
||||||
cd docker/test
|
cd docker/test
|
||||||
python -m pip install --upgrade pipenv wheel
|
python -m pip install --upgrade pipenv wheel
|
||||||
|
|
||||||
#- name: "Cache virtualenvs"
|
- name: "Cache virtualenvs"
|
||||||
# id: cache-pipenv
|
id: cache-pipenv
|
||||||
# uses: actions/cache@v4
|
uses: actions/cache@v4
|
||||||
# with:
|
with:
|
||||||
# path: ~/.local/share/virtualenvs
|
path: ~/.local/share/virtualenvs
|
||||||
# key: ${{ runner.os }}-pipenv-${{ hashFiles('**/Pipfile.lock') }}
|
key: ${{ runner.os }}-pipenv-${{ hashFiles('**/Pipfile.lock') }}
|
||||||
|
|
||||||
- name: "Install dependencies"
|
- name: "Install dependencies"
|
||||||
#if: steps.cache-pipenv.outputs.cache-hit != 'true'
|
if: steps.cache-pipenv.outputs.cache-hit != 'true'
|
||||||
run: |
|
run: |
|
||||||
cd docker/test
|
cd docker/test
|
||||||
pipenv install --deploy
|
pipenv install --deploy
|
||||||
|
|
9
.github/workflows/go-tests-windows.yml
vendored
9
.github/workflows/go-tests-windows.yml
vendored
|
@ -34,7 +34,7 @@ jobs:
|
||||||
- name: "Set up Go"
|
- name: "Set up Go"
|
||||||
uses: actions/setup-go@v5
|
uses: actions/setup-go@v5
|
||||||
with:
|
with:
|
||||||
go-version: "1.22.2"
|
go-version: "1.21.7"
|
||||||
|
|
||||||
- name: Build
|
- name: Build
|
||||||
run: |
|
run: |
|
||||||
|
@ -48,16 +48,15 @@ jobs:
|
||||||
cat out.txt | sed 's/ *coverage:.*of statements in.*//' | richgo testfilter
|
cat out.txt | sed 's/ *coverage:.*of statements in.*//' | richgo testfilter
|
||||||
|
|
||||||
- name: Upload unit coverage to Codecov
|
- name: Upload unit coverage to Codecov
|
||||||
uses: codecov/codecov-action@v4
|
uses: codecov/codecov-action@v3
|
||||||
with:
|
with:
|
||||||
files: coverage.out
|
files: coverage.out
|
||||||
flags: unit-windows
|
flags: unit-windows
|
||||||
token: ${{ secrets.CODECOV_TOKEN }}
|
|
||||||
|
|
||||||
- name: golangci-lint
|
- name: golangci-lint
|
||||||
uses: golangci/golangci-lint-action@v4
|
uses: golangci/golangci-lint-action@v3
|
||||||
with:
|
with:
|
||||||
version: v1.57
|
version: v1.55
|
||||||
args: --issues-exit-code=1 --timeout 10m
|
args: --issues-exit-code=1 --timeout 10m
|
||||||
only-new-issues: false
|
only-new-issues: false
|
||||||
# the cache is already managed above, enabling it here
|
# the cache is already managed above, enabling it here
|
||||||
|
|
9
.github/workflows/go-tests.yml
vendored
9
.github/workflows/go-tests.yml
vendored
|
@ -126,7 +126,7 @@ jobs:
|
||||||
- name: "Set up Go"
|
- name: "Set up Go"
|
||||||
uses: actions/setup-go@v5
|
uses: actions/setup-go@v5
|
||||||
with:
|
with:
|
||||||
go-version: "1.22.2"
|
go-version: "1.21.7"
|
||||||
|
|
||||||
- name: Create localstack streams
|
- name: Create localstack streams
|
||||||
run: |
|
run: |
|
||||||
|
@ -149,16 +149,15 @@ jobs:
|
||||||
make go-acc | sed 's/ *coverage:.*of statements in.*//' | richgo testfilter
|
make go-acc | sed 's/ *coverage:.*of statements in.*//' | richgo testfilter
|
||||||
|
|
||||||
- name: Upload unit coverage to Codecov
|
- name: Upload unit coverage to Codecov
|
||||||
uses: codecov/codecov-action@v4
|
uses: codecov/codecov-action@v3
|
||||||
with:
|
with:
|
||||||
files: coverage.out
|
files: coverage.out
|
||||||
flags: unit-linux
|
flags: unit-linux
|
||||||
token: ${{ secrets.CODECOV_TOKEN }}
|
|
||||||
|
|
||||||
- name: golangci-lint
|
- name: golangci-lint
|
||||||
uses: golangci/golangci-lint-action@v4
|
uses: golangci/golangci-lint-action@v3
|
||||||
with:
|
with:
|
||||||
version: v1.57
|
version: v1.55
|
||||||
args: --issues-exit-code=1 --timeout 10m
|
args: --issues-exit-code=1 --timeout 10m
|
||||||
only-new-issues: false
|
only-new-issues: false
|
||||||
# the cache is already managed above, enabling it here
|
# the cache is already managed above, enabling it here
|
||||||
|
|
2
.github/workflows/governance-bot.yaml
vendored
2
.github/workflows/governance-bot.yaml
vendored
|
@ -23,7 +23,7 @@ jobs:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
# Semantic versioning, lock to different version: v2, v2.0 or a commit hash.
|
# Semantic versioning, lock to different version: v2, v2.0 or a commit hash.
|
||||||
- uses: BirthdayResearch/oss-governance-bot@v4
|
- uses: BirthdayResearch/oss-governance-bot@v3
|
||||||
with:
|
with:
|
||||||
# You can use a PAT to post a comment/label/status so that it shows up as a user instead of github-actions
|
# You can use a PAT to post a comment/label/status so that it shows up as a user instead of github-actions
|
||||||
github-token: ${{secrets.GITHUB_TOKEN}} # optional, default to '${{ github.token }}'
|
github-token: ${{secrets.GITHUB_TOKEN}} # optional, default to '${{ github.token }}'
|
||||||
|
|
|
@ -25,7 +25,7 @@ jobs:
|
||||||
- name: "Set up Go"
|
- name: "Set up Go"
|
||||||
uses: actions/setup-go@v5
|
uses: actions/setup-go@v5
|
||||||
with:
|
with:
|
||||||
go-version: "1.22.2"
|
go-version: "1.21.7"
|
||||||
|
|
||||||
- name: Build the binaries
|
- name: Build the binaries
|
||||||
run: |
|
run: |
|
||||||
|
|
6
.gitignore
vendored
6
.gitignore
vendored
|
@ -6,10 +6,7 @@
|
||||||
*.dylib
|
*.dylib
|
||||||
*~
|
*~
|
||||||
.pc
|
.pc
|
||||||
|
|
||||||
# IDEs
|
|
||||||
.vscode
|
.vscode
|
||||||
.idea
|
|
||||||
|
|
||||||
# If vendor is included, allow prebuilt (wasm?) libraries.
|
# If vendor is included, allow prebuilt (wasm?) libraries.
|
||||||
!vendor/**/*.so
|
!vendor/**/*.so
|
||||||
|
@ -37,7 +34,7 @@ test/coverage/*
|
||||||
*.swo
|
*.swo
|
||||||
|
|
||||||
# Dependencies are not vendored by default, but a tarball is created by "make vendor"
|
# Dependencies are not vendored by default, but a tarball is created by "make vendor"
|
||||||
# and provided in the release. Used by gentoo, etc.
|
# and provided in the release. Used by freebsd, gentoo, etc.
|
||||||
vendor/
|
vendor/
|
||||||
vendor.tgz
|
vendor.tgz
|
||||||
|
|
||||||
|
@ -45,6 +42,7 @@ vendor.tgz
|
||||||
cmd/crowdsec-cli/cscli
|
cmd/crowdsec-cli/cscli
|
||||||
cmd/crowdsec/crowdsec
|
cmd/crowdsec/crowdsec
|
||||||
cmd/notification-*/notification-*
|
cmd/notification-*/notification-*
|
||||||
|
cmd/cscti/cscti
|
||||||
|
|
||||||
# Test cache (downloaded files)
|
# Test cache (downloaded files)
|
||||||
.cache
|
.cache
|
||||||
|
|
161
.golangci.yml
161
.golangci.yml
|
@ -1,9 +1,19 @@
|
||||||
# https://github.com/golangci/golangci-lint/blob/master/.golangci.reference.yml
|
# https://github.com/golangci/golangci-lint/blob/master/.golangci.reference.yml
|
||||||
|
|
||||||
|
run:
|
||||||
|
skip-dirs:
|
||||||
|
- pkg/time/rate
|
||||||
|
skip-files:
|
||||||
|
- pkg/cti/client.go
|
||||||
|
- pkg/cti/types.go
|
||||||
|
- pkg/database/ent/generate.go
|
||||||
|
- pkg/yamlpatch/merge.go
|
||||||
|
- pkg/yamlpatch/merge_test.go
|
||||||
|
|
||||||
linters-settings:
|
linters-settings:
|
||||||
cyclop:
|
cyclop:
|
||||||
# lower this after refactoring
|
# lower this after refactoring
|
||||||
max-complexity: 48
|
max-complexity: 53
|
||||||
|
|
||||||
gci:
|
gci:
|
||||||
sections:
|
sections:
|
||||||
|
@ -12,39 +22,29 @@ linters-settings:
|
||||||
- prefix(github.com/crowdsecurity)
|
- prefix(github.com/crowdsecurity)
|
||||||
- prefix(github.com/crowdsecurity/crowdsec)
|
- prefix(github.com/crowdsecurity/crowdsec)
|
||||||
|
|
||||||
gomoddirectives:
|
|
||||||
replace-allow-list:
|
|
||||||
- golang.org/x/time/rate
|
|
||||||
|
|
||||||
gocognit:
|
gocognit:
|
||||||
# lower this after refactoring
|
# lower this after refactoring
|
||||||
min-complexity: 145
|
min-complexity: 145
|
||||||
|
|
||||||
gocyclo:
|
gocyclo:
|
||||||
# lower this after refactoring
|
# lower this after refactoring
|
||||||
min-complexity: 48
|
min-complexity: 49
|
||||||
|
|
||||||
funlen:
|
funlen:
|
||||||
# Checks the number of lines in a function.
|
# Checks the number of lines in a function.
|
||||||
# If lower than 0, disable the check.
|
# If lower than 0, disable the check.
|
||||||
# Default: 60
|
# Default: 60
|
||||||
# lower this after refactoring
|
lines: -1
|
||||||
lines: 437
|
|
||||||
# Checks the number of statements in a function.
|
# Checks the number of statements in a function.
|
||||||
# If lower than 0, disable the check.
|
# If lower than 0, disable the check.
|
||||||
# Default: 40
|
# Default: 40
|
||||||
# lower this after refactoring
|
statements: -1
|
||||||
statements: 122
|
|
||||||
|
|
||||||
govet:
|
govet:
|
||||||
enable-all: true
|
check-shadowing: true
|
||||||
disable:
|
|
||||||
- reflectvaluecompare
|
|
||||||
- fieldalignment
|
|
||||||
|
|
||||||
lll:
|
lll:
|
||||||
# lower this after refactoring
|
line-length: 140
|
||||||
line-length: 2607
|
|
||||||
|
|
||||||
maintidx:
|
maintidx:
|
||||||
# raise this after refactoring
|
# raise this after refactoring
|
||||||
|
@ -58,7 +58,7 @@ linters-settings:
|
||||||
min-complexity: 28
|
min-complexity: 28
|
||||||
|
|
||||||
nlreturn:
|
nlreturn:
|
||||||
block-size: 5
|
block-size: 4
|
||||||
|
|
||||||
nolintlint:
|
nolintlint:
|
||||||
allow-unused: false # report any unused nolint directives
|
allow-unused: false # report any unused nolint directives
|
||||||
|
@ -70,18 +70,24 @@ linters-settings:
|
||||||
|
|
||||||
depguard:
|
depguard:
|
||||||
rules:
|
rules:
|
||||||
wrap:
|
main:
|
||||||
deny:
|
deny:
|
||||||
- pkg: "github.com/pkg/errors"
|
- pkg: "github.com/pkg/errors"
|
||||||
desc: "errors.Wrap() is deprecated in favor of fmt.Errorf()"
|
desc: "errors.Wrap() is deprecated in favor of fmt.Errorf()"
|
||||||
files:
|
|
||||||
- "!**/pkg/database/*.go"
|
|
||||||
- "!**/pkg/exprhelpers/*.go"
|
|
||||||
- "!**/pkg/acquisition/modules/appsec/appsec.go"
|
|
||||||
- "!**/pkg/acquisition/modules/loki/internal/lokiclient/loki_client.go"
|
|
||||||
- "!**/pkg/apiserver/controllers/v1/errors.go"
|
|
||||||
yaml:
|
yaml:
|
||||||
files:
|
files:
|
||||||
|
- "!**/cmd/crowdsec-cli/alerts.go"
|
||||||
|
- "!**/cmd/crowdsec-cli/capi.go"
|
||||||
|
- "!**/cmd/crowdsec-cli/config_show.go"
|
||||||
|
- "!**/cmd/crowdsec-cli/hubtest.go"
|
||||||
|
- "!**/cmd/crowdsec-cli/lapi.go"
|
||||||
|
- "!**/cmd/crowdsec-cli/simulation.go"
|
||||||
|
- "!**/cmd/crowdsec/crowdsec.go"
|
||||||
|
- "!**/cmd/notification-dummy/main.go"
|
||||||
|
- "!**/cmd/notification-email/main.go"
|
||||||
|
- "!**/cmd/notification-http/main.go"
|
||||||
|
- "!**/cmd/notification-slack/main.go"
|
||||||
|
- "!**/cmd/notification-splunk/main.go"
|
||||||
- "!**/pkg/acquisition/acquisition.go"
|
- "!**/pkg/acquisition/acquisition.go"
|
||||||
- "!**/pkg/acquisition/acquisition_test.go"
|
- "!**/pkg/acquisition/acquisition_test.go"
|
||||||
- "!**/pkg/acquisition/modules/appsec/appsec.go"
|
- "!**/pkg/acquisition/modules/appsec/appsec.go"
|
||||||
|
@ -101,6 +107,7 @@ linters-settings:
|
||||||
- "!**/pkg/appsec/loader.go"
|
- "!**/pkg/appsec/loader.go"
|
||||||
- "!**/pkg/csplugin/broker.go"
|
- "!**/pkg/csplugin/broker.go"
|
||||||
- "!**/pkg/csplugin/broker_test.go"
|
- "!**/pkg/csplugin/broker_test.go"
|
||||||
|
- "!**/pkg/dumps/bucker_dump.go"
|
||||||
- "!**/pkg/dumps/bucket_dump.go"
|
- "!**/pkg/dumps/bucket_dump.go"
|
||||||
- "!**/pkg/dumps/parser_dump.go"
|
- "!**/pkg/dumps/parser_dump.go"
|
||||||
- "!**/pkg/hubtest/coverage.go"
|
- "!**/pkg/hubtest/coverage.go"
|
||||||
|
@ -128,30 +135,23 @@ linters:
|
||||||
#
|
#
|
||||||
# DEPRECATED by golangi-lint
|
# DEPRECATED by golangi-lint
|
||||||
#
|
#
|
||||||
- deadcode
|
- deadcode # The owner seems to have abandoned the linter. Replaced by unused.
|
||||||
- exhaustivestruct
|
- exhaustivestruct # The owner seems to have abandoned the linter. Replaced by exhaustruct.
|
||||||
- golint
|
- golint # Golint differs from gofmt. Gofmt reformats Go source code, whereas golint prints out style mistakes
|
||||||
- ifshort
|
- ifshort # Checks that your code uses short syntax for if-statements whenever possible
|
||||||
- interfacer
|
- interfacer # Linter that suggests narrower interface types
|
||||||
- maligned
|
- maligned # Tool to detect Go structs that would take less memory if their fields were sorted
|
||||||
- nosnakecase
|
- nosnakecase # nosnakecase is a linter that detects snake case of variable naming and function name.
|
||||||
- scopelint
|
- scopelint # Scopelint checks for unpinned variables in go programs
|
||||||
- structcheck
|
- structcheck # The owner seems to have abandoned the linter. Replaced by unused.
|
||||||
- varcheck
|
- varcheck # The owner seems to have abandoned the linter. Replaced by unused.
|
||||||
|
|
||||||
#
|
|
||||||
# Disabled until fixed for go 1.22
|
|
||||||
#
|
|
||||||
|
|
||||||
- copyloopvar # copyloopvar is a linter detects places where loop variables are copied
|
|
||||||
- intrange # intrange is a linter to find places where for loops could make use of an integer range.
|
|
||||||
|
|
||||||
#
|
#
|
||||||
# Enabled
|
# Enabled
|
||||||
#
|
#
|
||||||
|
|
||||||
# - asasalint # check for pass []any as any in variadic func(...any)
|
# - asasalint # check for pass []any as any in variadic func(...any)
|
||||||
# - asciicheck # checks that all code identifiers does not have non-ASCII symbols in the name
|
# - asciicheck # Simple linter to check that your code does not contain non-ASCII identifiers
|
||||||
# - bidichk # Checks for dangerous unicode character sequences
|
# - bidichk # Checks for dangerous unicode character sequences
|
||||||
# - bodyclose # checks whether HTTP response body is closed successfully
|
# - bodyclose # checks whether HTTP response body is closed successfully
|
||||||
# - cyclop # checks function and package cyclomatic complexity
|
# - cyclop # checks function and package cyclomatic complexity
|
||||||
|
@ -159,15 +159,13 @@ linters:
|
||||||
# - depguard # Go linter that checks if package imports are in a list of acceptable packages
|
# - depguard # Go linter that checks if package imports are in a list of acceptable packages
|
||||||
# - dupword # checks for duplicate words in the source code
|
# - dupword # checks for duplicate words in the source code
|
||||||
# - durationcheck # check for two durations multiplied together
|
# - durationcheck # check for two durations multiplied together
|
||||||
# - errcheck # errcheck is a program for checking for unchecked errors in Go code. These unchecked errors can be critical bugs in some cases
|
# - errcheck # Errcheck is a program for checking for unchecked errors in go programs. These unchecked errors can be critical bugs in some cases
|
||||||
# - errorlint # errorlint is a linter for that can be used to find code that will cause problems with the error wrapping scheme introduced in Go 1.13.
|
# - errorlint # errorlint is a linter for that can be used to find code that will cause problems with the error wrapping scheme introduced in Go 1.13.
|
||||||
# - execinquery # execinquery is a linter about query string checker in Query function which reads your Go src files and warning it finds
|
# - execinquery # execinquery is a linter about query string checker in Query function which reads your Go src files and warning it finds
|
||||||
# - exportloopref # checks for pointers to enclosing loop variables
|
# - exportloopref # checks for pointers to enclosing loop variables
|
||||||
# - funlen # Tool for detection of long functions
|
# - funlen # Tool for detection of long functions
|
||||||
# - ginkgolinter # enforces standards of using ginkgo and gomega
|
# - ginkgolinter # enforces standards of using ginkgo and gomega
|
||||||
# - gocheckcompilerdirectives # Checks that go compiler directive comments (//go:) are valid.
|
|
||||||
# - gochecknoinits # Checks that no init functions are present in Go code
|
# - gochecknoinits # Checks that no init functions are present in Go code
|
||||||
# - gochecksumtype # Run exhaustiveness checks on Go "sum types"
|
|
||||||
# - gocognit # Computes and checks the cognitive complexity of functions
|
# - gocognit # Computes and checks the cognitive complexity of functions
|
||||||
# - gocritic # Provides diagnostics that check for bugs, performance and style issues.
|
# - gocritic # Provides diagnostics that check for bugs, performance and style issues.
|
||||||
# - gocyclo # Computes and checks the cyclomatic complexity of functions
|
# - gocyclo # Computes and checks the cyclomatic complexity of functions
|
||||||
|
@ -175,63 +173,56 @@ linters:
|
||||||
# - gomoddirectives # Manage the use of 'replace', 'retract', and 'excludes' directives in go.mod.
|
# - gomoddirectives # Manage the use of 'replace', 'retract', and 'excludes' directives in go.mod.
|
||||||
# - gomodguard # Allow and block list linter for direct Go module dependencies. This is different from depguard where there are different block types for example version constraints and module recommendations.
|
# - gomodguard # Allow and block list linter for direct Go module dependencies. This is different from depguard where there are different block types for example version constraints and module recommendations.
|
||||||
# - goprintffuncname # Checks that printf-like functions are named with `f` at the end
|
# - goprintffuncname # Checks that printf-like functions are named with `f` at the end
|
||||||
# - gosimple # (megacheck): Linter for Go source code that specializes in simplifying code
|
# - gosimple # (megacheck): Linter for Go source code that specializes in simplifying a code
|
||||||
# - gosmopolitan # Report certain i18n/l10n anti-patterns in your Go codebase
|
# - govet # (vet, vetshadow): Vet examines Go source code and reports suspicious constructs, such as Printf calls whose arguments do not align with the format string
|
||||||
# - govet # (vet, vetshadow): Vet examines Go source code and reports suspicious constructs. It is roughly the same as 'go vet' and uses its passes.
|
# - grouper # An analyzer to analyze expression groups.
|
||||||
# - grouper # Analyze expression groups.
|
|
||||||
# - importas # Enforces consistent import aliases
|
# - importas # Enforces consistent import aliases
|
||||||
# - ineffassign # Detects when assignments to existing variables are not used
|
# - ineffassign # Detects when assignments to existing variables are not used
|
||||||
# - interfacebloat # A linter that checks the number of methods inside an interface.
|
# - interfacebloat # A linter that checks the number of methods inside an interface.
|
||||||
# - lll # Reports long lines
|
|
||||||
# - loggercheck # (logrlint): Checks key value pairs for common logger libraries (kitlog,klog,logr,zap).
|
|
||||||
# - logrlint # Check logr arguments.
|
# - logrlint # Check logr arguments.
|
||||||
# - maintidx # maintidx measures the maintainability index of each function.
|
# - maintidx # maintidx measures the maintainability index of each function.
|
||||||
# - makezero # Finds slice declarations with non-zero initial length
|
# - makezero # Finds slice declarations with non-zero initial length
|
||||||
# - mirror # reports wrong mirror patterns of bytes/strings usage
|
# - misspell # Finds commonly misspelled English words in comments
|
||||||
# - misspell # Finds commonly misspelled English words
|
# - nakedret # Finds naked returns in functions greater than a specified function length
|
||||||
# - nakedret # Checks that functions with naked returns are not longer than a maximum size (can be zero).
|
|
||||||
# - nestif # Reports deeply nested if statements
|
# - nestif # Reports deeply nested if statements
|
||||||
# - nilerr # Finds the code that returns nil even if it checks that the error is not nil.
|
# - nilerr # Finds the code that returns nil even if it checks that the error is not nil.
|
||||||
# - nolintlint # Reports ill-formed or insufficient nolint directives
|
# - nolintlint # Reports ill-formed or insufficient nolint directives
|
||||||
# - nonamedreturns # Reports all named returns
|
# - nonamedreturns # Reports all named returns
|
||||||
# - nosprintfhostport # Checks for misuse of Sprintf to construct a host with port in a URL.
|
# - nosprintfhostport # Checks for misuse of Sprintf to construct a host with port in a URL.
|
||||||
# - perfsprint # Checks that fmt.Sprintf can be replaced with a faster alternative.
|
|
||||||
# - predeclared # find code that shadows one of Go's predeclared identifiers
|
# - predeclared # find code that shadows one of Go's predeclared identifiers
|
||||||
# - reassign # Checks that package variables are not reassigned
|
# - reassign # Checks that package variables are not reassigned
|
||||||
# - rowserrcheck # checks whether Rows.Err of rows is checked successfully
|
# - rowserrcheck # checks whether Err of rows is checked successfully
|
||||||
# - sloglint # ensure consistent code style when using log/slog
|
# - sqlclosecheck # Checks that sql.Rows and sql.Stmt are closed.
|
||||||
# - spancheck # Checks for mistakes with OpenTelemetry/Census spans.
|
# - staticcheck # (megacheck): Staticcheck is a go vet on steroids, applying a ton of static analysis checks
|
||||||
# - sqlclosecheck # Checks that sql.Rows, sql.Stmt, sqlx.NamedStmt, pgx.Query are closed.
|
|
||||||
# - staticcheck # (megacheck): It's a set of rules from staticcheck. It's not the same thing as the staticcheck binary. The author of staticcheck doesn't support or approve the use of staticcheck as a library inside golangci-lint.
|
|
||||||
# - tenv # tenv is analyzer that detects using os.Setenv instead of t.Setenv since Go1.17
|
|
||||||
# - testableexamples # linter checks if examples are testable (have an expected output)
|
# - testableexamples # linter checks if examples are testable (have an expected output)
|
||||||
# - testifylint # Checks usage of github.com/stretchr/testify.
|
# - tenv # tenv is analyzer that detects using os.Setenv instead of t.Setenv since Go1.17
|
||||||
# - tparallel # tparallel detects inappropriate usage of t.Parallel() method in your Go test codes
|
# - tparallel # tparallel detects inappropriate usage of t.Parallel() method in your Go test codes
|
||||||
|
# - typecheck # Like the front-end of a Go compiler, parses and type-checks Go code
|
||||||
# - unconvert # Remove unnecessary type conversions
|
# - unconvert # Remove unnecessary type conversions
|
||||||
# - unused # (megacheck): Checks Go code for unused constants, variables, functions and types
|
# - unused # (megacheck): Checks Go code for unused constants, variables, functions and types
|
||||||
# - usestdlibvars # A linter that detect the possibility to use variables/constants from the Go standard library.
|
# - usestdlibvars # A linter that detect the possibility to use variables/constants from the Go standard library.
|
||||||
# - wastedassign # Finds wasted assignment statements
|
# - wastedassign # wastedassign finds wasted assignment statements.
|
||||||
# - zerologlint # Detects the wrong usage of `zerolog` that a user forgets to dispatch with `Send` or `Msg`
|
|
||||||
|
|
||||||
#
|
#
|
||||||
# Recommended? (easy)
|
# Recommended? (easy)
|
||||||
#
|
#
|
||||||
|
|
||||||
- dogsled # Checks assignments with too many blank identifiers (e.g. x, _, _, _, := f())
|
- dogsled # Checks assignments with too many blank identifiers (e.g. x, _, _, _, := f())
|
||||||
- errchkjson # Checks types passed to the json encoding functions. Reports unsupported types and reports occations, where the check for the returned error can be omitted.
|
- errchkjson # Checks types passed to the json encoding functions. Reports unsupported types and optionally reports occations, where the check for the returned error can be omitted.
|
||||||
- exhaustive # check exhaustiveness of enum switch statements
|
- exhaustive # check exhaustiveness of enum switch statements
|
||||||
- gci # Gci control golang package import order and make it always deterministic.
|
- gci # Gci control golang package import order and make it always deterministic.
|
||||||
- godot # Check if comments end in a period
|
- godot # Check if comments end in a period
|
||||||
- gofmt # Gofmt checks whether code was gofmt-ed. By default this tool runs with -s option to check for code simplification
|
- gofmt # Gofmt checks whether code was gofmt-ed. By default this tool runs with -s option to check for code simplification
|
||||||
- goimports # Check import statements are formatted according to the 'goimport' command. Reformat imports in autofix mode.
|
- goimports # In addition to fixing imports, goimports also formats your code in the same style as gofmt.
|
||||||
- gosec # (gas): Inspects source code for security problems
|
- gosec # (gas): Inspects source code for security problems
|
||||||
- inamedparam # reports interfaces with unnamed method parameters
|
- inamedparam # reports interfaces with unnamed method parameters
|
||||||
|
- lll # Reports long lines
|
||||||
- musttag # enforce field tags in (un)marshaled structs
|
- musttag # enforce field tags in (un)marshaled structs
|
||||||
- promlinter # Check Prometheus metrics naming via promlint
|
- promlinter # Check Prometheus metrics naming via promlint
|
||||||
- protogetter # Reports direct reads from proto message fields when getters should be used
|
- protogetter # Reports direct reads from proto message fields when getters should be used
|
||||||
- revive # Fast, configurable, extensible, flexible, and beautiful linter for Go. Drop-in replacement of golint.
|
- revive # Fast, configurable, extensible, flexible, and beautiful linter for Go. Drop-in replacement of golint.
|
||||||
- tagalign # check that struct tags are well aligned
|
- tagalign # check that struct tags are well aligned
|
||||||
- thelper # thelper detects tests helpers which is not start with t.Helper() method.
|
- thelper # thelper detects golang test helpers without t.Helper() call and checks the consistency of test helpers
|
||||||
- wrapcheck # Checks that errors returned from external packages are wrapped
|
- wrapcheck # Checks that errors returned from external packages are wrapped
|
||||||
|
|
||||||
#
|
#
|
||||||
|
@ -239,12 +230,12 @@ linters:
|
||||||
#
|
#
|
||||||
|
|
||||||
- containedctx # containedctx is a linter that detects struct contained context.Context field
|
- containedctx # containedctx is a linter that detects struct contained context.Context field
|
||||||
- contextcheck # check whether the function uses a non-inherited context
|
- contextcheck # check the function whether use a non-inherited context
|
||||||
- errname # Checks that sentinel errors are prefixed with the `Err` and error types are suffixed with the `Error`.
|
- errname # Checks that sentinel errors are prefixed with the `Err` and error types are suffixed with the `Error`.
|
||||||
- gomnd # An analyzer to detect magic numbers.
|
- gomnd # An analyzer to detect magic numbers.
|
||||||
- ireturn # Accept Interfaces, Return Concrete Types
|
- ireturn # Accept Interfaces, Return Concrete Types
|
||||||
- nilnil # Checks that there is no simultaneous return of `nil` error and an invalid value.
|
- nilnil # Checks that there is no simultaneous return of `nil` error and an invalid value.
|
||||||
- noctx # Finds sending http request without context.Context
|
- noctx # noctx finds sending http request without context.Context
|
||||||
- unparam # Reports unused function parameters
|
- unparam # Reports unused function parameters
|
||||||
|
|
||||||
#
|
#
|
||||||
|
@ -253,8 +244,8 @@ linters:
|
||||||
|
|
||||||
- gofumpt # Gofumpt checks whether code was gofumpt-ed.
|
- gofumpt # Gofumpt checks whether code was gofumpt-ed.
|
||||||
- nlreturn # nlreturn checks for a new line before return and branch statements to increase code clarity
|
- nlreturn # nlreturn checks for a new line before return and branch statements to increase code clarity
|
||||||
- whitespace # Whitespace is a linter that checks for unnecessary newlines at the start and end of functions, if, for, etc.
|
- whitespace # Tool for detection of leading and trailing whitespace
|
||||||
- wsl # add or remove empty lines
|
- wsl # Whitespace Linter - Forces you to use empty lines!
|
||||||
|
|
||||||
#
|
#
|
||||||
# Well intended, but not ready for this
|
# Well intended, but not ready for this
|
||||||
|
@ -262,8 +253,8 @@ linters:
|
||||||
- dupl # Tool for code clone detection
|
- dupl # Tool for code clone detection
|
||||||
- forcetypeassert # finds forced type assertions
|
- forcetypeassert # finds forced type assertions
|
||||||
- godox # Tool for detection of FIXME, TODO and other comment keywords
|
- godox # Tool for detection of FIXME, TODO and other comment keywords
|
||||||
- goerr113 # Go linter to check the errors handling expressions
|
- goerr113 # Golang linter to check the errors handling expressions
|
||||||
- paralleltest # Detects missing usage of t.Parallel() method in your Go test
|
- paralleltest # paralleltest detects missing usage of t.Parallel() method in your Go test
|
||||||
- testpackage # linter that makes you use a separate _test package
|
- testpackage # linter that makes you use a separate _test package
|
||||||
|
|
||||||
#
|
#
|
||||||
|
@ -271,7 +262,7 @@ linters:
|
||||||
#
|
#
|
||||||
- exhaustruct # Checks if all structure fields are initialized
|
- exhaustruct # Checks if all structure fields are initialized
|
||||||
- forbidigo # Forbids identifiers
|
- forbidigo # Forbids identifiers
|
||||||
- gochecknoglobals # Check that no global variables exist.
|
- gochecknoglobals # check that no global variables exist
|
||||||
- goconst # Finds repeated strings that could be replaced by a constant
|
- goconst # Finds repeated strings that could be replaced by a constant
|
||||||
- stylecheck # Stylecheck is a replacement for golint
|
- stylecheck # Stylecheck is a replacement for golint
|
||||||
- tagliatelle # Checks the struct tags.
|
- tagliatelle # Checks the struct tags.
|
||||||
|
@ -288,21 +279,15 @@ issues:
|
||||||
# “Look, that’s why there’s rules, understand? So that you think before you
|
# “Look, that’s why there’s rules, understand? So that you think before you
|
||||||
# break ‘em.” ― Terry Pratchett
|
# break ‘em.” ― Terry Pratchett
|
||||||
|
|
||||||
exclude-dirs:
|
|
||||||
- pkg/time/rate
|
|
||||||
|
|
||||||
exclude-files:
|
|
||||||
- pkg/yamlpatch/merge.go
|
|
||||||
- pkg/yamlpatch/merge_test.go
|
|
||||||
|
|
||||||
exclude-generated-strict: true
|
|
||||||
|
|
||||||
max-issues-per-linter: 0
|
max-issues-per-linter: 0
|
||||||
max-same-issues: 0
|
max-same-issues: 0
|
||||||
exclude-rules:
|
exclude-rules:
|
||||||
|
|
||||||
# Won't fix:
|
# Won't fix:
|
||||||
|
|
||||||
|
- path: go.mod
|
||||||
|
text: "replacement are not allowed: golang.org/x/time/rate"
|
||||||
|
|
||||||
# `err` is often shadowed, we may continue to do it
|
# `err` is often shadowed, we may continue to do it
|
||||||
- linters:
|
- linters:
|
||||||
- govet
|
- govet
|
||||||
|
@ -380,3 +365,13 @@ issues:
|
||||||
- linters:
|
- linters:
|
||||||
- nonamedreturns
|
- nonamedreturns
|
||||||
text: "named return .* with type .* found"
|
text: "named return .* with type .* found"
|
||||||
|
|
||||||
|
#
|
||||||
|
# Will fix, might be trickier
|
||||||
|
#
|
||||||
|
|
||||||
|
# https://github.com/pkg/errors/issues/245
|
||||||
|
- linters:
|
||||||
|
- depguard
|
||||||
|
text: "import 'github.com/pkg/errors' is not allowed .*"
|
||||||
|
|
||||||
|
|
15
Dockerfile
15
Dockerfile
|
@ -1,5 +1,5 @@
|
||||||
# vim: set ft=dockerfile:
|
# vim: set ft=dockerfile:
|
||||||
FROM golang:1.22.2-alpine3.18 AS build
|
FROM golang:1.21.7-alpine3.18 AS build
|
||||||
|
|
||||||
ARG BUILD_VERSION
|
ARG BUILD_VERSION
|
||||||
|
|
||||||
|
@ -16,7 +16,7 @@ RUN apk add --no-cache git g++ gcc libc-dev make bash gettext binutils-gold core
|
||||||
cd re2-${RE2_VERSION} && \
|
cd re2-${RE2_VERSION} && \
|
||||||
make install && \
|
make install && \
|
||||||
echo "githubciXXXXXXXXXXXXXXXXXXXXXXXX" > /etc/machine-id && \
|
echo "githubciXXXXXXXXXXXXXXXXXXXXXXXX" > /etc/machine-id && \
|
||||||
go install github.com/mikefarah/yq/v4@v4.43.1
|
go install github.com/mikefarah/yq/v4@v4.40.4
|
||||||
|
|
||||||
COPY . .
|
COPY . .
|
||||||
|
|
||||||
|
@ -43,12 +43,11 @@ COPY --from=build /go/bin/yq /usr/local/bin/crowdsec /usr/local/bin/cscli /usr/l
|
||||||
COPY --from=build /etc/crowdsec /staging/etc/crowdsec
|
COPY --from=build /etc/crowdsec /staging/etc/crowdsec
|
||||||
COPY --from=build /go/src/crowdsec/docker/docker_start.sh /
|
COPY --from=build /go/src/crowdsec/docker/docker_start.sh /
|
||||||
COPY --from=build /go/src/crowdsec/docker/config.yaml /staging/etc/crowdsec/config.yaml
|
COPY --from=build /go/src/crowdsec/docker/config.yaml /staging/etc/crowdsec/config.yaml
|
||||||
COPY --from=build /var/lib/crowdsec /staging/var/lib/crowdsec
|
|
||||||
RUN yq -n '.url="http://0.0.0.0:8080"' | install -m 0600 /dev/stdin /staging/etc/crowdsec/local_api_credentials.yaml
|
RUN yq -n '.url="http://0.0.0.0:8080"' | install -m 0600 /dev/stdin /staging/etc/crowdsec/local_api_credentials.yaml
|
||||||
|
|
||||||
ENTRYPOINT /bin/bash /docker_start.sh
|
ENTRYPOINT /bin/bash /docker_start.sh
|
||||||
|
|
||||||
FROM slim as full
|
FROM slim as plugins
|
||||||
|
|
||||||
# Due to the wizard using cp -n, we have to copy the config files directly from the source as -n does not exist in busybox cp
|
# Due to the wizard using cp -n, we have to copy the config files directly from the source as -n does not exist in busybox cp
|
||||||
# The files are here for reference, as users will need to mount a new version to be actually able to use notifications
|
# The files are here for reference, as users will need to mount a new version to be actually able to use notifications
|
||||||
|
@ -61,3 +60,11 @@ COPY --from=build \
|
||||||
/staging/etc/crowdsec/notifications/
|
/staging/etc/crowdsec/notifications/
|
||||||
|
|
||||||
COPY --from=build /usr/local/lib/crowdsec/plugins /usr/local/lib/crowdsec/plugins
|
COPY --from=build /usr/local/lib/crowdsec/plugins /usr/local/lib/crowdsec/plugins
|
||||||
|
|
||||||
|
FROM slim as geoip
|
||||||
|
|
||||||
|
COPY --from=build /var/lib/crowdsec /staging/var/lib/crowdsec
|
||||||
|
|
||||||
|
FROM plugins as full
|
||||||
|
|
||||||
|
COPY --from=build /var/lib/crowdsec /staging/var/lib/crowdsec
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
# vim: set ft=dockerfile:
|
# vim: set ft=dockerfile:
|
||||||
FROM golang:1.22.2-bookworm AS build
|
FROM golang:1.21.7-bookworm AS build
|
||||||
|
|
||||||
ARG BUILD_VERSION
|
ARG BUILD_VERSION
|
||||||
|
|
||||||
|
@ -21,7 +21,7 @@ RUN apt-get update && \
|
||||||
make && \
|
make && \
|
||||||
make install && \
|
make install && \
|
||||||
echo "githubciXXXXXXXXXXXXXXXXXXXXXXXX" > /etc/machine-id && \
|
echo "githubciXXXXXXXXXXXXXXXXXXXXXXXX" > /etc/machine-id && \
|
||||||
go install github.com/mikefarah/yq/v4@v4.43.1
|
go install github.com/mikefarah/yq/v4@v4.40.4
|
||||||
|
|
||||||
COPY . .
|
COPY . .
|
||||||
|
|
||||||
|
|
7
Makefile
7
Makefile
|
@ -38,6 +38,7 @@ BUILD_CODENAME ?= alphaga
|
||||||
|
|
||||||
CROWDSEC_FOLDER = ./cmd/crowdsec
|
CROWDSEC_FOLDER = ./cmd/crowdsec
|
||||||
CSCLI_FOLDER = ./cmd/crowdsec-cli/
|
CSCLI_FOLDER = ./cmd/crowdsec-cli/
|
||||||
|
CSCTI_FOLDER = ./cmd/cscti/
|
||||||
PLUGINS_DIR_PREFIX = ./cmd/notification-
|
PLUGINS_DIR_PREFIX = ./cmd/notification-
|
||||||
|
|
||||||
CROWDSEC_BIN = crowdsec$(EXT)
|
CROWDSEC_BIN = crowdsec$(EXT)
|
||||||
|
@ -198,6 +199,10 @@ clean: clean-debian clean-rpm testclean ## Remove build artifacts
|
||||||
cscli: goversion ## Build cscli
|
cscli: goversion ## Build cscli
|
||||||
@$(MAKE) -C $(CSCLI_FOLDER) build $(MAKE_FLAGS)
|
@$(MAKE) -C $(CSCLI_FOLDER) build $(MAKE_FLAGS)
|
||||||
|
|
||||||
|
.PHONY: cscti
|
||||||
|
cscti: goversion ## Build cscti
|
||||||
|
@$(MAKE) -C $(CSCTI_FOLDER) build $(MAKE_FLAGS)
|
||||||
|
|
||||||
.PHONY: crowdsec
|
.PHONY: crowdsec
|
||||||
crowdsec: goversion ## Build crowdsec
|
crowdsec: goversion ## Build crowdsec
|
||||||
@$(MAKE) -C $(CROWDSEC_FOLDER) build $(MAKE_FLAGS)
|
@$(MAKE) -C $(CROWDSEC_FOLDER) build $(MAKE_FLAGS)
|
||||||
|
@ -206,6 +211,8 @@ crowdsec: goversion ## Build crowdsec
|
||||||
generate: ## Generate code for the database and APIs
|
generate: ## Generate code for the database and APIs
|
||||||
$(GO) generate ./pkg/database/ent
|
$(GO) generate ./pkg/database/ent
|
||||||
$(GO) generate ./pkg/models
|
$(GO) generate ./pkg/models
|
||||||
|
$(GO) generate ./pkg/cti
|
||||||
|
@echo "Code generation complete."
|
||||||
|
|
||||||
.PHONY: testclean
|
.PHONY: testclean
|
||||||
testclean: bats-clean ## Remove test artifacts
|
testclean: bats-clean ## Remove test artifacts
|
||||||
|
|
|
@ -15,13 +15,19 @@ pool:
|
||||||
stages:
|
stages:
|
||||||
- stage: Build
|
- stage: Build
|
||||||
jobs:
|
jobs:
|
||||||
- job: Build
|
- job:
|
||||||
displayName: "Build"
|
displayName: "Build"
|
||||||
steps:
|
steps:
|
||||||
|
- task: DotNetCoreCLI@2
|
||||||
|
displayName: "Install SignClient"
|
||||||
|
inputs:
|
||||||
|
command: 'custom'
|
||||||
|
custom: 'tool'
|
||||||
|
arguments: 'install --global SignClient --version 1.3.155'
|
||||||
- task: GoTool@0
|
- task: GoTool@0
|
||||||
displayName: "Install Go"
|
displayName: "Install Go"
|
||||||
inputs:
|
inputs:
|
||||||
version: '1.22.2'
|
version: '1.21.7'
|
||||||
|
|
||||||
- pwsh: |
|
- pwsh: |
|
||||||
choco install -y make
|
choco install -y make
|
||||||
|
@ -33,14 +39,24 @@ stages:
|
||||||
#we are not calling make windows_installer because we want to sign the binaries before they are added to the MSI
|
#we are not calling make windows_installer because we want to sign the binaries before they are added to the MSI
|
||||||
script: |
|
script: |
|
||||||
make build BUILD_RE2_WASM=1
|
make build BUILD_RE2_WASM=1
|
||||||
|
- task: AzureKeyVault@2
|
||||||
|
inputs:
|
||||||
|
azureSubscription: 'Azure subscription 1(8a93ab40-7e99-445e-ad47-0f6a3e2ef546)'
|
||||||
|
KeyVaultName: 'CodeSigningSecrets'
|
||||||
|
SecretsFilter: 'CodeSigningUser,CodeSigningPassword'
|
||||||
|
RunAsPreJob: false
|
||||||
|
|
||||||
|
- task: DownloadSecureFile@1
|
||||||
|
inputs:
|
||||||
|
secureFile: appsettings.json
|
||||||
|
|
||||||
|
- pwsh: |
|
||||||
|
SignClient.exe Sign --name "crowdsec-binaries" `
|
||||||
|
--input "**/*.exe" --config (Join-Path -Path $(Agent.TempDirectory) -ChildPath "appsettings.json") `
|
||||||
|
--user $(CodeSigningUser) --secret '$(CodeSigningPassword)'
|
||||||
|
displayName: "Sign Crowdsec binaries + plugins"
|
||||||
- pwsh: |
|
- pwsh: |
|
||||||
$build_version=$env:BUILD_SOURCEBRANCHNAME
|
$build_version=$env:BUILD_SOURCEBRANCHNAME
|
||||||
#Override the version if it's set in the pipeline
|
|
||||||
if ( ${env:USERBUILDVERSION} -ne "")
|
|
||||||
{
|
|
||||||
$build_version = ${env:USERBUILDVERSION}
|
|
||||||
}
|
|
||||||
if ($build_version.StartsWith("v"))
|
if ($build_version.StartsWith("v"))
|
||||||
{
|
{
|
||||||
$build_version = $build_version.Substring(1)
|
$build_version = $build_version.Substring(1)
|
||||||
|
@ -53,112 +69,35 @@ stages:
|
||||||
displayName: GetCrowdsecVersion
|
displayName: GetCrowdsecVersion
|
||||||
name: GetCrowdsecVersion
|
name: GetCrowdsecVersion
|
||||||
- pwsh: |
|
- pwsh: |
|
||||||
Get-ChildItem -Path .\cmd -Directory | ForEach-Object {
|
.\make_installer.ps1 -version '$(GetCrowdsecVersion.BuildVersion)'
|
||||||
$dirName = $_.Name
|
|
||||||
Get-ChildItem -Path .\cmd\$dirName -File -Filter '*.exe' | ForEach-Object {
|
|
||||||
$fileName = $_.Name
|
|
||||||
$destDir = Join-Path $(Build.ArtifactStagingDirectory) cmd\$dirName
|
|
||||||
New-Item -ItemType Directory -Path $destDir -Force
|
|
||||||
Copy-Item -Path .\cmd\$dirName\$fileName -Destination $destDir
|
|
||||||
}
|
|
||||||
}
|
|
||||||
displayName: "Copy binaries to staging directory"
|
|
||||||
- task: PublishPipelineArtifact@1
|
|
||||||
inputs:
|
|
||||||
targetPath: '$(Build.ArtifactStagingDirectory)'
|
|
||||||
artifact: 'unsigned_binaries'
|
|
||||||
displayName: "Upload binaries artifact"
|
|
||||||
|
|
||||||
- stage: Sign
|
|
||||||
dependsOn: Build
|
|
||||||
variables:
|
|
||||||
- group: 'FOSS Build Variables'
|
|
||||||
- name: BuildVersion
|
|
||||||
value: $[ stageDependencies.Build.Build.outputs['GetCrowdsecVersion.BuildVersion'] ]
|
|
||||||
condition: succeeded()
|
|
||||||
jobs:
|
|
||||||
- job: Sign
|
|
||||||
displayName: "Sign"
|
|
||||||
steps:
|
|
||||||
- download: current
|
|
||||||
artifact: unsigned_binaries
|
|
||||||
displayName: "Download binaries artifact"
|
|
||||||
- task: CopyFiles@2
|
|
||||||
inputs:
|
|
||||||
SourceFolder: '$(Pipeline.Workspace)/unsigned_binaries'
|
|
||||||
TargetFolder: '$(Build.SourcesDirectory)'
|
|
||||||
displayName: "Copy binaries to workspace"
|
|
||||||
- task: DotNetCoreCLI@2
|
|
||||||
displayName: "Install SignTool tool"
|
|
||||||
inputs:
|
|
||||||
command: 'custom'
|
|
||||||
custom: 'tool'
|
|
||||||
arguments: install --global sign --version 0.9.0-beta.23127.3
|
|
||||||
- task: AzureKeyVault@2
|
|
||||||
displayName: "Get signing parameters"
|
|
||||||
inputs:
|
|
||||||
azureSubscription: "Azure subscription"
|
|
||||||
KeyVaultName: "$(KeyVaultName)"
|
|
||||||
SecretsFilter: "TenantId,ClientId,ClientSecret,Certificate,KeyVaultUrl"
|
|
||||||
- pwsh: |
|
|
||||||
sign code azure-key-vault `
|
|
||||||
"**/*.exe" `
|
|
||||||
--base-directory "$(Build.SourcesDirectory)/cmd/" `
|
|
||||||
--publisher-name "CrowdSec" `
|
|
||||||
--description "CrowdSec" `
|
|
||||||
--description-url "https://github.com/crowdsecurity/crowdsec" `
|
|
||||||
--azure-key-vault-tenant-id "$(TenantId)" `
|
|
||||||
--azure-key-vault-client-id "$(ClientId)" `
|
|
||||||
--azure-key-vault-client-secret "$(ClientSecret)" `
|
|
||||||
--azure-key-vault-certificate "$(Certificate)" `
|
|
||||||
--azure-key-vault-url "$(KeyVaultUrl)"
|
|
||||||
displayName: "Sign crowdsec binaries"
|
|
||||||
- pwsh: |
|
|
||||||
.\make_installer.ps1 -version '$(BuildVersion)'
|
|
||||||
displayName: "Build Crowdsec MSI"
|
displayName: "Build Crowdsec MSI"
|
||||||
name: BuildMSI
|
name: BuildMSI
|
||||||
|
|
||||||
- pwsh: |
|
- pwsh: |
|
||||||
.\make_chocolatey.ps1 -version '$(BuildVersion)'
|
.\make_chocolatey.ps1 -version '$(GetCrowdsecVersion.BuildVersion)'
|
||||||
displayName: "Build Chocolatey nupkg"
|
displayName: "Build Chocolatey nupkg"
|
||||||
|
|
||||||
- pwsh: |
|
- pwsh: |
|
||||||
sign code azure-key-vault `
|
SignClient.exe Sign --name "crowdsec-msi" `
|
||||||
"*.msi" `
|
--input "*.msi" --config (Join-Path -Path $(Agent.TempDirectory) -ChildPath "appsettings.json") `
|
||||||
--base-directory "$(Build.SourcesDirectory)" `
|
--user $(CodeSigningUser) --secret '$(CodeSigningPassword)'
|
||||||
--publisher-name "CrowdSec" `
|
displayName: "Sign Crowdsec MSI"
|
||||||
--description "CrowdSec" `
|
|
||||||
--description-url "https://github.com/crowdsecurity/crowdsec" `
|
- task: PublishBuildArtifacts@1
|
||||||
--azure-key-vault-tenant-id "$(TenantId)" `
|
|
||||||
--azure-key-vault-client-id "$(ClientId)" `
|
|
||||||
--azure-key-vault-client-secret "$(ClientSecret)" `
|
|
||||||
--azure-key-vault-certificate "$(Certificate)" `
|
|
||||||
--azure-key-vault-url "$(KeyVaultUrl)"
|
|
||||||
displayName: "Sign MSI package"
|
|
||||||
- pwsh: |
|
|
||||||
sign code azure-key-vault `
|
|
||||||
"*.nupkg" `
|
|
||||||
--base-directory "$(Build.SourcesDirectory)" `
|
|
||||||
--publisher-name "CrowdSec" `
|
|
||||||
--description "CrowdSec" `
|
|
||||||
--description-url "https://github.com/crowdsecurity/crowdsec" `
|
|
||||||
--azure-key-vault-tenant-id "$(TenantId)" `
|
|
||||||
--azure-key-vault-client-id "$(ClientId)" `
|
|
||||||
--azure-key-vault-client-secret "$(ClientSecret)" `
|
|
||||||
--azure-key-vault-certificate "$(Certificate)" `
|
|
||||||
--azure-key-vault-url "$(KeyVaultUrl)"
|
|
||||||
displayName: "Sign nuget package"
|
|
||||||
- task: PublishPipelineArtifact@1
|
|
||||||
inputs:
|
inputs:
|
||||||
targetPath: '$(Build.SourcesDirectory)/crowdsec_$(BuildVersion).msi'
|
PathtoPublish: '$(Build.Repository.LocalPath)\\crowdsec_$(GetCrowdsecVersion.BuildVersion).msi'
|
||||||
artifact: 'signed_msi_package'
|
ArtifactName: 'crowdsec.msi'
|
||||||
displayName: "Upload signed MSI artifact"
|
publishLocation: 'Container'
|
||||||
- task: PublishPipelineArtifact@1
|
displayName: "Upload MSI artifact"
|
||||||
|
|
||||||
|
- task: PublishBuildArtifacts@1
|
||||||
inputs:
|
inputs:
|
||||||
targetPath: '$(Build.SourcesDirectory)/crowdsec.$(BuildVersion).nupkg'
|
PathtoPublish: '$(Build.Repository.LocalPath)\\windows\\Chocolatey\\crowdsec\\crowdsec.$(GetCrowdsecVersion.BuildVersion).nupkg'
|
||||||
artifact: 'signed_nuget_package'
|
ArtifactName: 'crowdsec.nupkg'
|
||||||
displayName: "Upload signed nuget artifact"
|
publishLocation: 'Container'
|
||||||
|
displayName: "Upload nupkg artifact"
|
||||||
- stage: Publish
|
- stage: Publish
|
||||||
dependsOn: Sign
|
dependsOn: Build
|
||||||
jobs:
|
jobs:
|
||||||
- deployment: "Publish"
|
- deployment: "Publish"
|
||||||
displayName: "Publish to GitHub"
|
displayName: "Publish to GitHub"
|
||||||
|
@ -180,7 +119,8 @@ stages:
|
||||||
assetUploadMode: 'replace'
|
assetUploadMode: 'replace'
|
||||||
addChangeLog: false
|
addChangeLog: false
|
||||||
isPreRelease: true #we force prerelease because the pipeline is invoked on tag creation, which happens when we do a prerelease
|
isPreRelease: true #we force prerelease because the pipeline is invoked on tag creation, which happens when we do a prerelease
|
||||||
|
#the .. is an ugly hack, but I can't find the var that gives D:\a\1 ...
|
||||||
assets: |
|
assets: |
|
||||||
$(Pipeline.Workspace)/signed_msi_package/*.msi
|
$(Build.ArtifactStagingDirectory)\..\crowdsec.msi/*.msi
|
||||||
$(Pipeline.Workspace)/signed_nuget_package/*.nupkg
|
$(Build.ArtifactStagingDirectory)\..\crowdsec.nupkg/*.nupkg
|
||||||
condition: ne(variables['GetLatestPrelease.LatestPreRelease'], '')
|
condition: ne(variables['GetLatestPrelease.LatestPreRelease'], '')
|
||||||
|
|
|
@ -4,7 +4,6 @@ import (
|
||||||
"context"
|
"context"
|
||||||
"encoding/csv"
|
"encoding/csv"
|
||||||
"encoding/json"
|
"encoding/json"
|
||||||
"errors"
|
|
||||||
"fmt"
|
"fmt"
|
||||||
"net/url"
|
"net/url"
|
||||||
"os"
|
"os"
|
||||||
|
@ -17,7 +16,7 @@ import (
|
||||||
"github.com/go-openapi/strfmt"
|
"github.com/go-openapi/strfmt"
|
||||||
log "github.com/sirupsen/logrus"
|
log "github.com/sirupsen/logrus"
|
||||||
"github.com/spf13/cobra"
|
"github.com/spf13/cobra"
|
||||||
"gopkg.in/yaml.v3"
|
"gopkg.in/yaml.v2"
|
||||||
|
|
||||||
"github.com/crowdsecurity/go-cs-lib/version"
|
"github.com/crowdsecurity/go-cs-lib/version"
|
||||||
|
|
||||||
|
@ -178,9 +177,9 @@ func (cli *cliAlerts) displayOneAlert(alert *models.Alert, withDetail bool) erro
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
type cliAlerts struct {
|
type cliAlerts struct{
|
||||||
client *apiclient.ApiClient
|
client *apiclient.ApiClient
|
||||||
cfg configGetter
|
cfg configGetter
|
||||||
}
|
}
|
||||||
|
|
||||||
func NewCLIAlerts(getconfig configGetter) *cliAlerts {
|
func NewCLIAlerts(getconfig configGetter) *cliAlerts {
|
||||||
|
@ -205,7 +204,6 @@ func (cli *cliAlerts) NewCommand() *cobra.Command {
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return fmt.Errorf("parsing api url %s: %w", apiURL, err)
|
return fmt.Errorf("parsing api url %s: %w", apiURL, err)
|
||||||
}
|
}
|
||||||
|
|
||||||
cli.client, err = apiclient.NewClient(&apiclient.Config{
|
cli.client, err = apiclient.NewClient(&apiclient.Config{
|
||||||
MachineID: cfg.API.Client.Credentials.Login,
|
MachineID: cfg.API.Client.Credentials.Login,
|
||||||
Password: strfmt.Password(cfg.API.Client.Credentials.Password),
|
Password: strfmt.Password(cfg.API.Client.Credentials.Password),
|
||||||
|
@ -213,6 +211,7 @@ func (cli *cliAlerts) NewCommand() *cobra.Command {
|
||||||
URL: apiURL,
|
URL: apiURL,
|
||||||
VersionPrefix: "v1",
|
VersionPrefix: "v1",
|
||||||
})
|
})
|
||||||
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return fmt.Errorf("new api client: %w", err)
|
return fmt.Errorf("new api client: %w", err)
|
||||||
}
|
}
|
||||||
|
@ -230,7 +229,7 @@ func (cli *cliAlerts) NewCommand() *cobra.Command {
|
||||||
}
|
}
|
||||||
|
|
||||||
func (cli *cliAlerts) NewListCmd() *cobra.Command {
|
func (cli *cliAlerts) NewListCmd() *cobra.Command {
|
||||||
alertListFilter := apiclient.AlertsListOpts{
|
var alertListFilter = apiclient.AlertsListOpts{
|
||||||
ScopeEquals: new(string),
|
ScopeEquals: new(string),
|
||||||
ValueEquals: new(string),
|
ValueEquals: new(string),
|
||||||
ScenarioEquals: new(string),
|
ScenarioEquals: new(string),
|
||||||
|
@ -254,10 +253,8 @@ func (cli *cliAlerts) NewListCmd() *cobra.Command {
|
||||||
Example: `cscli alerts list
|
Example: `cscli alerts list
|
||||||
cscli alerts list --ip 1.2.3.4
|
cscli alerts list --ip 1.2.3.4
|
||||||
cscli alerts list --range 1.2.3.0/24
|
cscli alerts list --range 1.2.3.0/24
|
||||||
cscli alerts list --origin lists
|
|
||||||
cscli alerts list -s crowdsecurity/ssh-bf
|
cscli alerts list -s crowdsecurity/ssh-bf
|
||||||
cscli alerts list --type ban`,
|
cscli alerts list --type ban`,
|
||||||
Long: `List alerts with optional filters`,
|
|
||||||
DisableAutoGenTag: true,
|
DisableAutoGenTag: true,
|
||||||
RunE: func(cmd *cobra.Command, _ []string) error {
|
RunE: func(cmd *cobra.Command, _ []string) error {
|
||||||
if err := manageCliDecisionAlerts(alertListFilter.IPEquals, alertListFilter.RangeEquals,
|
if err := manageCliDecisionAlerts(alertListFilter.IPEquals, alertListFilter.RangeEquals,
|
||||||
|
@ -361,10 +358,10 @@ func (cli *cliAlerts) NewDeleteCmd() *cobra.Command {
|
||||||
var (
|
var (
|
||||||
ActiveDecision *bool
|
ActiveDecision *bool
|
||||||
AlertDeleteAll bool
|
AlertDeleteAll bool
|
||||||
delAlertByID string
|
delAlertByID string
|
||||||
)
|
)
|
||||||
|
|
||||||
alertDeleteFilter := apiclient.AlertsDeleteOpts{
|
var alertDeleteFilter = apiclient.AlertsDeleteOpts{
|
||||||
ScopeEquals: new(string),
|
ScopeEquals: new(string),
|
||||||
ValueEquals: new(string),
|
ValueEquals: new(string),
|
||||||
ScenarioEquals: new(string),
|
ScenarioEquals: new(string),
|
||||||
|
@ -392,7 +389,7 @@ cscli alerts delete -s crowdsecurity/ssh-bf"`,
|
||||||
*alertDeleteFilter.ScenarioEquals == "" && *alertDeleteFilter.IPEquals == "" &&
|
*alertDeleteFilter.ScenarioEquals == "" && *alertDeleteFilter.IPEquals == "" &&
|
||||||
*alertDeleteFilter.RangeEquals == "" && delAlertByID == "" {
|
*alertDeleteFilter.RangeEquals == "" && delAlertByID == "" {
|
||||||
_ = cmd.Usage()
|
_ = cmd.Usage()
|
||||||
return errors.New("at least one filter or --all must be specified")
|
return fmt.Errorf("at least one filter or --all must be specified")
|
||||||
}
|
}
|
||||||
|
|
||||||
return nil
|
return nil
|
||||||
|
@ -452,7 +449,7 @@ cscli alerts delete -s crowdsecurity/ssh-bf"`,
|
||||||
return nil
|
return nil
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
flags := cmd.Flags()
|
flags := cmd.Flags()
|
||||||
flags.SortFlags = false
|
flags.SortFlags = false
|
||||||
flags.StringVar(alertDeleteFilter.ScopeEquals, "scope", "", "the scope (ie. ip,range)")
|
flags.StringVar(alertDeleteFilter.ScopeEquals, "scope", "", "the scope (ie. ip,range)")
|
||||||
|
@ -479,7 +476,7 @@ func (cli *cliAlerts) NewInspectCmd() *cobra.Command {
|
||||||
cfg := cli.cfg()
|
cfg := cli.cfg()
|
||||||
if len(args) == 0 {
|
if len(args) == 0 {
|
||||||
printHelp(cmd)
|
printHelp(cmd)
|
||||||
return errors.New("missing alert_id")
|
return fmt.Errorf("missing alert_id")
|
||||||
}
|
}
|
||||||
for _, alertID := range args {
|
for _, alertID := range args {
|
||||||
id, err := strconv.Atoi(alertID)
|
id, err := strconv.Atoi(alertID)
|
||||||
|
@ -523,7 +520,7 @@ func (cli *cliAlerts) NewInspectCmd() *cobra.Command {
|
||||||
func (cli *cliAlerts) NewFlushCmd() *cobra.Command {
|
func (cli *cliAlerts) NewFlushCmd() *cobra.Command {
|
||||||
var (
|
var (
|
||||||
maxItems int
|
maxItems int
|
||||||
maxAge string
|
maxAge string
|
||||||
)
|
)
|
||||||
|
|
||||||
cmd := &cobra.Command{
|
cmd := &cobra.Command{
|
||||||
|
|
|
@ -259,7 +259,7 @@ func (cli *cliBouncers) prune(duration time.Duration, force bool) error {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
bouncers, err := cli.db.QueryBouncersLastPulltimeLT(time.Now().UTC().Add(-duration))
|
bouncers, err := cli.db.QueryBouncersLastPulltimeLT(time.Now().UTC().Add(duration))
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return fmt.Errorf("unable to query bouncers: %w", err)
|
return fmt.Errorf("unable to query bouncers: %w", err)
|
||||||
}
|
}
|
||||||
|
|
|
@ -10,7 +10,7 @@ import (
|
||||||
"github.com/go-openapi/strfmt"
|
"github.com/go-openapi/strfmt"
|
||||||
log "github.com/sirupsen/logrus"
|
log "github.com/sirupsen/logrus"
|
||||||
"github.com/spf13/cobra"
|
"github.com/spf13/cobra"
|
||||||
"gopkg.in/yaml.v3"
|
"gopkg.in/yaml.v2"
|
||||||
|
|
||||||
"github.com/crowdsecurity/go-cs-lib/version"
|
"github.com/crowdsecurity/go-cs-lib/version"
|
||||||
|
|
||||||
|
@ -85,6 +85,7 @@ func (cli *cliCapi) register(capiUserPrefix string, outputFile string) error {
|
||||||
URL: apiurl,
|
URL: apiurl,
|
||||||
VersionPrefix: CAPIURLPrefix,
|
VersionPrefix: CAPIURLPrefix,
|
||||||
}, nil)
|
}, nil)
|
||||||
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return fmt.Errorf("api client register ('%s'): %w", types.CAPIBaseURL, err)
|
return fmt.Errorf("api client register ('%s'): %w", types.CAPIBaseURL, err)
|
||||||
}
|
}
|
||||||
|
@ -174,7 +175,7 @@ func (cli *cliCapi) status() error {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
scenarios, err := hub.GetInstalledNamesByType(cwhub.SCENARIOS)
|
scenarios, err := hub.GetInstalledItemNames(cwhub.SCENARIOS)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return fmt.Errorf("failed to get scenarios: %w", err)
|
return fmt.Errorf("failed to get scenarios: %w", err)
|
||||||
}
|
}
|
||||||
|
|
|
@ -10,15 +10,13 @@ import (
|
||||||
"github.com/sanity-io/litter"
|
"github.com/sanity-io/litter"
|
||||||
log "github.com/sirupsen/logrus"
|
log "github.com/sirupsen/logrus"
|
||||||
"github.com/spf13/cobra"
|
"github.com/spf13/cobra"
|
||||||
"gopkg.in/yaml.v3"
|
"gopkg.in/yaml.v2"
|
||||||
|
|
||||||
"github.com/crowdsecurity/crowdsec/pkg/csconfig"
|
"github.com/crowdsecurity/crowdsec/pkg/csconfig"
|
||||||
"github.com/crowdsecurity/crowdsec/pkg/exprhelpers"
|
"github.com/crowdsecurity/crowdsec/pkg/exprhelpers"
|
||||||
)
|
)
|
||||||
|
|
||||||
func (cli *cliConfig) showKey(key string) error {
|
func showConfigKey(key string) error {
|
||||||
cfg := cli.cfg()
|
|
||||||
|
|
||||||
type Env struct {
|
type Env struct {
|
||||||
Config *csconfig.Config
|
Config *csconfig.Config
|
||||||
}
|
}
|
||||||
|
@ -32,15 +30,15 @@ func (cli *cliConfig) showKey(key string) error {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
output, err := expr.Run(program, Env{Config: cfg})
|
output, err := expr.Run(program, Env{Config: csConfig})
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
switch cfg.Cscli.Output {
|
switch csConfig.Cscli.Output {
|
||||||
case "human", "raw":
|
case "human", "raw":
|
||||||
// Don't use litter for strings, it adds quotes
|
// Don't use litter for strings, it adds quotes
|
||||||
// that would break compatibility with previous versions
|
// that we didn't have before
|
||||||
switch output.(type) {
|
switch output.(type) {
|
||||||
case string:
|
case string:
|
||||||
fmt.Println(output)
|
fmt.Println(output)
|
||||||
|
@ -53,14 +51,13 @@ func (cli *cliConfig) showKey(key string) error {
|
||||||
return fmt.Errorf("failed to marshal configuration: %w", err)
|
return fmt.Errorf("failed to marshal configuration: %w", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
fmt.Println(string(data))
|
fmt.Printf("%s\n", string(data))
|
||||||
}
|
}
|
||||||
|
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (cli *cliConfig) template() string {
|
var configShowTemplate = `Global:
|
||||||
return `Global:
|
|
||||||
|
|
||||||
{{- if .ConfigPaths }}
|
{{- if .ConfigPaths }}
|
||||||
- Configuration Folder : {{.ConfigPaths.ConfigDir}}
|
- Configuration Folder : {{.ConfigPaths.ConfigDir}}
|
||||||
|
@ -103,7 +100,6 @@ API Client:
|
||||||
{{- if .API.Server }}
|
{{- if .API.Server }}
|
||||||
Local API Server{{if and .API.Server.Enable (not (ValueBool .API.Server.Enable))}} (disabled){{end}}:
|
Local API Server{{if and .API.Server.Enable (not (ValueBool .API.Server.Enable))}} (disabled){{end}}:
|
||||||
- Listen URL : {{.API.Server.ListenURI}}
|
- Listen URL : {{.API.Server.ListenURI}}
|
||||||
- Listen Socket : {{.API.Server.ListenSocket}}
|
|
||||||
- Profile File : {{.API.Server.ProfilesPath}}
|
- Profile File : {{.API.Server.ProfilesPath}}
|
||||||
|
|
||||||
{{- if .API.Server.TLS }}
|
{{- if .API.Server.TLS }}
|
||||||
|
@ -185,11 +181,19 @@ Central API:
|
||||||
{{- end }}
|
{{- end }}
|
||||||
{{- end }}
|
{{- end }}
|
||||||
`
|
`
|
||||||
}
|
|
||||||
|
|
||||||
func (cli *cliConfig) show() error {
|
func (cli *cliConfig) show(key string) error {
|
||||||
cfg := cli.cfg()
|
cfg := cli.cfg()
|
||||||
|
|
||||||
|
if err := cfg.LoadAPIClient(); err != nil {
|
||||||
|
log.Errorf("failed to load API client configuration: %s", err)
|
||||||
|
// don't return, we can still show the configuration
|
||||||
|
}
|
||||||
|
|
||||||
|
if key != "" {
|
||||||
|
return showConfigKey(key)
|
||||||
|
}
|
||||||
|
|
||||||
switch cfg.Cscli.Output {
|
switch cfg.Cscli.Output {
|
||||||
case "human":
|
case "human":
|
||||||
// The tests on .Enable look funny because the option has a true default which has
|
// The tests on .Enable look funny because the option has a true default which has
|
||||||
|
@ -200,7 +204,7 @@ func (cli *cliConfig) show() error {
|
||||||
"ValueBool": func(b *bool) bool { return b != nil && *b },
|
"ValueBool": func(b *bool) bool { return b != nil && *b },
|
||||||
}
|
}
|
||||||
|
|
||||||
tmp, err := template.New("config").Funcs(funcs).Parse(cli.template())
|
tmp, err := template.New("config").Funcs(funcs).Parse(configShowTemplate)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
@ -215,14 +219,14 @@ func (cli *cliConfig) show() error {
|
||||||
return fmt.Errorf("failed to marshal configuration: %w", err)
|
return fmt.Errorf("failed to marshal configuration: %w", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
fmt.Println(string(data))
|
fmt.Printf("%s\n", string(data))
|
||||||
case "raw":
|
case "raw":
|
||||||
data, err := yaml.Marshal(cfg)
|
data, err := yaml.Marshal(cfg)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return fmt.Errorf("failed to marshal configuration: %w", err)
|
return fmt.Errorf("failed to marshal configuration: %w", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
fmt.Println(string(data))
|
fmt.Printf("%s\n", string(data))
|
||||||
}
|
}
|
||||||
|
|
||||||
return nil
|
return nil
|
||||||
|
@ -238,16 +242,7 @@ func (cli *cliConfig) newShowCmd() *cobra.Command {
|
||||||
Args: cobra.ExactArgs(0),
|
Args: cobra.ExactArgs(0),
|
||||||
DisableAutoGenTag: true,
|
DisableAutoGenTag: true,
|
||||||
RunE: func(_ *cobra.Command, _ []string) error {
|
RunE: func(_ *cobra.Command, _ []string) error {
|
||||||
if err := cli.cfg().LoadAPIClient(); err != nil {
|
return cli.show(key)
|
||||||
log.Errorf("failed to load API client configuration: %s", err)
|
|
||||||
// don't return, we can still show the configuration
|
|
||||||
}
|
|
||||||
|
|
||||||
if key != "" {
|
|
||||||
return cli.showKey(key)
|
|
||||||
}
|
|
||||||
|
|
||||||
return cli.show()
|
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -4,11 +4,9 @@ import (
|
||||||
"context"
|
"context"
|
||||||
"encoding/csv"
|
"encoding/csv"
|
||||||
"encoding/json"
|
"encoding/json"
|
||||||
"errors"
|
|
||||||
"fmt"
|
"fmt"
|
||||||
"net/url"
|
"net/url"
|
||||||
"os"
|
"os"
|
||||||
"strconv"
|
|
||||||
"strings"
|
"strings"
|
||||||
|
|
||||||
"github.com/fatih/color"
|
"github.com/fatih/color"
|
||||||
|
@ -38,7 +36,7 @@ func NewCLIConsole(cfg configGetter) *cliConsole {
|
||||||
}
|
}
|
||||||
|
|
||||||
func (cli *cliConsole) NewCommand() *cobra.Command {
|
func (cli *cliConsole) NewCommand() *cobra.Command {
|
||||||
cmd := &cobra.Command{
|
var cmd = &cobra.Command{
|
||||||
Use: "console [action]",
|
Use: "console [action]",
|
||||||
Short: "Manage interaction with Crowdsec console (https://app.crowdsec.net)",
|
Short: "Manage interaction with Crowdsec console (https://app.crowdsec.net)",
|
||||||
Args: cobra.MinimumNArgs(1),
|
Args: cobra.MinimumNArgs(1),
|
||||||
|
@ -103,7 +101,7 @@ After running this command your will need to validate the enrollment in the weba
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
scenarios, err := hub.GetInstalledNamesByType(cwhub.SCENARIOS)
|
scenarios, err := hub.GetInstalledItemNames(cwhub.SCENARIOS)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return fmt.Errorf("failed to get installed scenarios: %w", err)
|
return fmt.Errorf("failed to get installed scenarios: %w", err)
|
||||||
}
|
}
|
||||||
|
@ -205,7 +203,7 @@ Enable given information push to the central API. Allows to empower the console`
|
||||||
log.Infof("All features have been enabled successfully")
|
log.Infof("All features have been enabled successfully")
|
||||||
} else {
|
} else {
|
||||||
if len(args) == 0 {
|
if len(args) == 0 {
|
||||||
return errors.New("you must specify at least one feature to enable")
|
return fmt.Errorf("you must specify at least one feature to enable")
|
||||||
}
|
}
|
||||||
if err := cli.setConsoleOpts(args, true); err != nil {
|
if err := cli.setConsoleOpts(args, true); err != nil {
|
||||||
return err
|
return err
|
||||||
|
@ -290,11 +288,11 @@ func (cli *cliConsole) newStatusCmd() *cobra.Command {
|
||||||
}
|
}
|
||||||
|
|
||||||
rows := [][]string{
|
rows := [][]string{
|
||||||
{csconfig.SEND_MANUAL_SCENARIOS, strconv.FormatBool(*consoleCfg.ShareManualDecisions)},
|
{csconfig.SEND_MANUAL_SCENARIOS, fmt.Sprintf("%t", *consoleCfg.ShareManualDecisions)},
|
||||||
{csconfig.SEND_CUSTOM_SCENARIOS, strconv.FormatBool(*consoleCfg.ShareCustomScenarios)},
|
{csconfig.SEND_CUSTOM_SCENARIOS, fmt.Sprintf("%t", *consoleCfg.ShareCustomScenarios)},
|
||||||
{csconfig.SEND_TAINTED_SCENARIOS, strconv.FormatBool(*consoleCfg.ShareTaintedScenarios)},
|
{csconfig.SEND_TAINTED_SCENARIOS, fmt.Sprintf("%t", *consoleCfg.ShareTaintedScenarios)},
|
||||||
{csconfig.SEND_CONTEXT, strconv.FormatBool(*consoleCfg.ShareContext)},
|
{csconfig.SEND_CONTEXT, fmt.Sprintf("%t", *consoleCfg.ShareContext)},
|
||||||
{csconfig.CONSOLE_MANAGEMENT, strconv.FormatBool(*consoleCfg.ConsoleManagement)},
|
{csconfig.CONSOLE_MANAGEMENT, fmt.Sprintf("%t", *consoleCfg.ConsoleManagement)},
|
||||||
}
|
}
|
||||||
for _, row := range rows {
|
for _, row := range rows {
|
||||||
err = csvwriter.Write(row)
|
err = csvwriter.Write(row)
|
||||||
|
|
|
@ -9,6 +9,7 @@ import (
|
||||||
log "github.com/sirupsen/logrus"
|
log "github.com/sirupsen/logrus"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
/*help to copy the file, ioutil doesn't offer the feature*/
|
/*help to copy the file, ioutil doesn't offer the feature*/
|
||||||
|
|
||||||
func copyFileContents(src, dst string) (err error) {
|
func copyFileContents(src, dst string) (err error) {
|
||||||
|
@ -68,7 +69,6 @@ func CopyFile(sourceSymLink, destinationFile string) error {
|
||||||
if !(destinationFileStat.Mode().IsRegular()) {
|
if !(destinationFileStat.Mode().IsRegular()) {
|
||||||
return fmt.Errorf("copyFile: non-regular destination file %s (%q)", destinationFileStat.Name(), destinationFileStat.Mode().String())
|
return fmt.Errorf("copyFile: non-regular destination file %s (%q)", destinationFileStat.Name(), destinationFileStat.Mode().String())
|
||||||
}
|
}
|
||||||
|
|
||||||
if os.SameFile(sourceFileStat, destinationFileStat) {
|
if os.SameFile(sourceFileStat, destinationFileStat) {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
@ -80,3 +80,4 @@ func CopyFile(sourceSymLink, destinationFile string) error {
|
||||||
|
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -4,7 +4,6 @@ import (
|
||||||
"context"
|
"context"
|
||||||
"encoding/csv"
|
"encoding/csv"
|
||||||
"encoding/json"
|
"encoding/json"
|
||||||
"errors"
|
|
||||||
"fmt"
|
"fmt"
|
||||||
"net/url"
|
"net/url"
|
||||||
"os"
|
"os"
|
||||||
|
@ -196,7 +195,7 @@ func (cli *cliDecisions) newListCmd() *cobra.Command {
|
||||||
Example: `cscli decisions list -i 1.2.3.4
|
Example: `cscli decisions list -i 1.2.3.4
|
||||||
cscli decisions list -r 1.2.3.0/24
|
cscli decisions list -r 1.2.3.0/24
|
||||||
cscli decisions list -s crowdsecurity/ssh-bf
|
cscli decisions list -s crowdsecurity/ssh-bf
|
||||||
cscli decisions list --origin lists --scenario list_name
|
cscli decisions list -t ban
|
||||||
`,
|
`,
|
||||||
Args: cobra.ExactArgs(0),
|
Args: cobra.ExactArgs(0),
|
||||||
DisableAutoGenTag: true,
|
DisableAutoGenTag: true,
|
||||||
|
@ -347,7 +346,7 @@ cscli decisions add --scope username --value foobar
|
||||||
addScope = types.Range
|
addScope = types.Range
|
||||||
} else if addValue == "" {
|
} else if addValue == "" {
|
||||||
printHelp(cmd)
|
printHelp(cmd)
|
||||||
return errors.New("missing arguments, a value is required (--ip, --range or --scope and --value)")
|
return fmt.Errorf("missing arguments, a value is required (--ip, --range or --scope and --value)")
|
||||||
}
|
}
|
||||||
|
|
||||||
if addReason == "" {
|
if addReason == "" {
|
||||||
|
@ -372,7 +371,7 @@ cscli decisions add --scope username --value foobar
|
||||||
Scenario: &addReason,
|
Scenario: &addReason,
|
||||||
ScenarioVersion: &empty,
|
ScenarioVersion: &empty,
|
||||||
Simulated: &simulated,
|
Simulated: &simulated,
|
||||||
// setting empty scope/value broke plugins, and it didn't seem to be needed anymore w/ latest papi changes
|
//setting empty scope/value broke plugins, and it didn't seem to be needed anymore w/ latest papi changes
|
||||||
Source: &models.Source{
|
Source: &models.Source{
|
||||||
AsName: empty,
|
AsName: empty,
|
||||||
AsNumber: empty,
|
AsNumber: empty,
|
||||||
|
@ -412,7 +411,7 @@ cscli decisions add --scope username --value foobar
|
||||||
}
|
}
|
||||||
|
|
||||||
func (cli *cliDecisions) newDeleteCmd() *cobra.Command {
|
func (cli *cliDecisions) newDeleteCmd() *cobra.Command {
|
||||||
delFilter := apiclient.DecisionsDeleteOpts{
|
var delFilter = apiclient.DecisionsDeleteOpts{
|
||||||
ScopeEquals: new(string),
|
ScopeEquals: new(string),
|
||||||
ValueEquals: new(string),
|
ValueEquals: new(string),
|
||||||
TypeEquals: new(string),
|
TypeEquals: new(string),
|
||||||
|
@ -437,7 +436,6 @@ func (cli *cliDecisions) newDeleteCmd() *cobra.Command {
|
||||||
cscli decisions delete -i 1.2.3.4
|
cscli decisions delete -i 1.2.3.4
|
||||||
cscli decisions delete --id 42
|
cscli decisions delete --id 42
|
||||||
cscli decisions delete --type captcha
|
cscli decisions delete --type captcha
|
||||||
cscli decisions delete --origin lists --scenario list_name
|
|
||||||
`,
|
`,
|
||||||
/*TBD : refaire le Long/Example*/
|
/*TBD : refaire le Long/Example*/
|
||||||
PreRunE: func(cmd *cobra.Command, _ []string) error {
|
PreRunE: func(cmd *cobra.Command, _ []string) error {
|
||||||
|
@ -449,7 +447,7 @@ cscli decisions delete --origin lists --scenario list_name
|
||||||
*delFilter.RangeEquals == "" && *delFilter.ScenarioEquals == "" &&
|
*delFilter.RangeEquals == "" && *delFilter.ScenarioEquals == "" &&
|
||||||
*delFilter.OriginEquals == "" && delDecisionID == "" {
|
*delFilter.OriginEquals == "" && delDecisionID == "" {
|
||||||
cmd.Usage()
|
cmd.Usage()
|
||||||
return errors.New("at least one filter or --all must be specified")
|
return fmt.Errorf("at least one filter or --all must be specified")
|
||||||
}
|
}
|
||||||
|
|
||||||
return nil
|
return nil
|
||||||
|
|
|
@ -5,7 +5,6 @@ import (
|
||||||
"bytes"
|
"bytes"
|
||||||
"context"
|
"context"
|
||||||
"encoding/json"
|
"encoding/json"
|
||||||
"errors"
|
|
||||||
"fmt"
|
"fmt"
|
||||||
"io"
|
"io"
|
||||||
"os"
|
"os"
|
||||||
|
@ -82,7 +81,7 @@ func (cli *cliDecisions) runImport(cmd *cobra.Command, args []string) error {
|
||||||
}
|
}
|
||||||
|
|
||||||
if defaultDuration == "" {
|
if defaultDuration == "" {
|
||||||
return errors.New("--duration cannot be empty")
|
return fmt.Errorf("--duration cannot be empty")
|
||||||
}
|
}
|
||||||
|
|
||||||
defaultScope, err := flags.GetString("scope")
|
defaultScope, err := flags.GetString("scope")
|
||||||
|
@ -91,7 +90,7 @@ func (cli *cliDecisions) runImport(cmd *cobra.Command, args []string) error {
|
||||||
}
|
}
|
||||||
|
|
||||||
if defaultScope == "" {
|
if defaultScope == "" {
|
||||||
return errors.New("--scope cannot be empty")
|
return fmt.Errorf("--scope cannot be empty")
|
||||||
}
|
}
|
||||||
|
|
||||||
defaultReason, err := flags.GetString("reason")
|
defaultReason, err := flags.GetString("reason")
|
||||||
|
@ -100,7 +99,7 @@ func (cli *cliDecisions) runImport(cmd *cobra.Command, args []string) error {
|
||||||
}
|
}
|
||||||
|
|
||||||
if defaultReason == "" {
|
if defaultReason == "" {
|
||||||
return errors.New("--reason cannot be empty")
|
return fmt.Errorf("--reason cannot be empty")
|
||||||
}
|
}
|
||||||
|
|
||||||
defaultType, err := flags.GetString("type")
|
defaultType, err := flags.GetString("type")
|
||||||
|
@ -109,7 +108,7 @@ func (cli *cliDecisions) runImport(cmd *cobra.Command, args []string) error {
|
||||||
}
|
}
|
||||||
|
|
||||||
if defaultType == "" {
|
if defaultType == "" {
|
||||||
return errors.New("--type cannot be empty")
|
return fmt.Errorf("--type cannot be empty")
|
||||||
}
|
}
|
||||||
|
|
||||||
batchSize, err := flags.GetInt("batch")
|
batchSize, err := flags.GetInt("batch")
|
||||||
|
@ -137,7 +136,7 @@ func (cli *cliDecisions) runImport(cmd *cobra.Command, args []string) error {
|
||||||
}
|
}
|
||||||
|
|
||||||
if format == "" {
|
if format == "" {
|
||||||
return errors.New("unable to guess format from file extension, please provide a format with --format flag")
|
return fmt.Errorf("unable to guess format from file extension, please provide a format with --format flag")
|
||||||
}
|
}
|
||||||
|
|
||||||
if input == "-" {
|
if input == "-" {
|
||||||
|
@ -236,6 +235,7 @@ func (cli *cliDecisions) runImport(cmd *cobra.Command, args []string) error {
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
func (cli *cliDecisions) newImportCmd() *cobra.Command {
|
func (cli *cliDecisions) newImportCmd() *cobra.Command {
|
||||||
cmd := &cobra.Command{
|
cmd := &cobra.Command{
|
||||||
Use: "import [options]",
|
Use: "import [options]",
|
||||||
|
|
|
@ -39,10 +39,8 @@ id: %s
|
||||||
title: %s
|
title: %s
|
||||||
---
|
---
|
||||||
`
|
`
|
||||||
|
|
||||||
name := filepath.Base(filename)
|
name := filepath.Base(filename)
|
||||||
base := strings.TrimSuffix(name, filepath.Ext(name))
|
base := strings.TrimSuffix(name, filepath.Ext(name))
|
||||||
|
|
||||||
return fmt.Sprintf(header, base, strings.ReplaceAll(base, "_", " "))
|
return fmt.Sprintf(header, base, strings.ReplaceAll(base, "_", " "))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -83,7 +83,7 @@ tail -n 5 myfile.log | cscli explain --type nginx -f -
|
||||||
PersistentPreRunE: func(_ *cobra.Command, _ []string) error {
|
PersistentPreRunE: func(_ *cobra.Command, _ []string) error {
|
||||||
fileInfo, _ := os.Stdin.Stat()
|
fileInfo, _ := os.Stdin.Stat()
|
||||||
if cli.flags.logFile == "-" && ((fileInfo.Mode() & os.ModeCharDevice) == os.ModeCharDevice) {
|
if cli.flags.logFile == "-" && ((fileInfo.Mode() & os.ModeCharDevice) == os.ModeCharDevice) {
|
||||||
return errors.New("the option -f - is intended to work with pipes")
|
return fmt.Errorf("the option -f - is intended to work with pipes")
|
||||||
}
|
}
|
||||||
|
|
||||||
return nil
|
return nil
|
||||||
|
@ -160,22 +160,18 @@ func (cli *cliExplain) run() error {
|
||||||
} else if logFile == "-" {
|
} else if logFile == "-" {
|
||||||
reader := bufio.NewReader(os.Stdin)
|
reader := bufio.NewReader(os.Stdin)
|
||||||
errCount := 0
|
errCount := 0
|
||||||
|
|
||||||
for {
|
for {
|
||||||
input, err := reader.ReadBytes('\n')
|
input, err := reader.ReadBytes('\n')
|
||||||
if err != nil && errors.Is(err, io.EOF) {
|
if err != nil && errors.Is(err, io.EOF) {
|
||||||
break
|
break
|
||||||
}
|
}
|
||||||
|
|
||||||
if len(input) > 1 {
|
if len(input) > 1 {
|
||||||
_, err = f.Write(input)
|
_, err = f.Write(input)
|
||||||
}
|
}
|
||||||
|
|
||||||
if err != nil || len(input) <= 1 {
|
if err != nil || len(input) <= 1 {
|
||||||
errCount++
|
errCount++
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if errCount > 0 {
|
if errCount > 0 {
|
||||||
log.Warnf("Failed to write %d lines to %s", errCount, tmpFile)
|
log.Warnf("Failed to write %d lines to %s", errCount, tmpFile)
|
||||||
}
|
}
|
||||||
|
@ -211,7 +207,7 @@ func (cli *cliExplain) run() error {
|
||||||
}
|
}
|
||||||
|
|
||||||
if dsn == "" {
|
if dsn == "" {
|
||||||
return errors.New("no acquisition (--file or --dsn) provided, can't run cscli test")
|
return fmt.Errorf("no acquisition (--file or --dsn) provided, can't run cscli test")
|
||||||
}
|
}
|
||||||
|
|
||||||
cmdArgs := []string{"-c", ConfigFilePath, "-type", logType, "-dsn", dsn, "-dump-data", dir, "-no-api"}
|
cmdArgs := []string{"-c", ConfigFilePath, "-type", logType, "-dsn", dsn, "-dump-data", dir, "-no-api"}
|
||||||
|
|
|
@ -13,7 +13,7 @@ import (
|
||||||
"github.com/crowdsecurity/crowdsec/pkg/cwhub"
|
"github.com/crowdsecurity/crowdsec/pkg/cwhub"
|
||||||
)
|
)
|
||||||
|
|
||||||
type cliHub struct{
|
type cliHub struct {
|
||||||
cfg configGetter
|
cfg configGetter
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -137,7 +137,7 @@ func (cli *cliHub) upgrade(force bool) error {
|
||||||
}
|
}
|
||||||
|
|
||||||
for _, itemType := range cwhub.ItemTypes {
|
for _, itemType := range cwhub.ItemTypes {
|
||||||
items, err := hub.GetInstalledItemsByType(itemType)
|
items, err := hub.GetInstalledItems(itemType)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
|
@ -13,9 +13,8 @@ import (
|
||||||
"github.com/crowdsecurity/crowdsec/pkg/cwhub"
|
"github.com/crowdsecurity/crowdsec/pkg/cwhub"
|
||||||
)
|
)
|
||||||
|
|
||||||
func NewCLIAppsecConfig(cfg configGetter) *cliItem {
|
func NewCLIAppsecConfig() *cliItem {
|
||||||
return &cliItem{
|
return &cliItem{
|
||||||
cfg: cfg,
|
|
||||||
name: cwhub.APPSEC_CONFIGS,
|
name: cwhub.APPSEC_CONFIGS,
|
||||||
singular: "appsec-config",
|
singular: "appsec-config",
|
||||||
oneOrMore: "appsec-config(s)",
|
oneOrMore: "appsec-config(s)",
|
||||||
|
@ -47,7 +46,7 @@ cscli appsec-configs list crowdsecurity/vpatch`,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func NewCLIAppsecRule(cfg configGetter) *cliItem {
|
func NewCLIAppsecRule() *cliItem {
|
||||||
inspectDetail := func(item *cwhub.Item) error {
|
inspectDetail := func(item *cwhub.Item) error {
|
||||||
// Only show the converted rules in human mode
|
// Only show the converted rules in human mode
|
||||||
if csConfig.Cscli.Output != "human" {
|
if csConfig.Cscli.Output != "human" {
|
||||||
|
@ -58,11 +57,11 @@ func NewCLIAppsecRule(cfg configGetter) *cliItem {
|
||||||
|
|
||||||
yamlContent, err := os.ReadFile(item.State.LocalPath)
|
yamlContent, err := os.ReadFile(item.State.LocalPath)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return fmt.Errorf("unable to read file %s: %w", item.State.LocalPath, err)
|
return fmt.Errorf("unable to read file %s : %s", item.State.LocalPath, err)
|
||||||
}
|
}
|
||||||
|
|
||||||
if err := yaml.Unmarshal(yamlContent, &appsecRule); err != nil {
|
if err := yaml.Unmarshal(yamlContent, &appsecRule); err != nil {
|
||||||
return fmt.Errorf("unable to unmarshal yaml file %s: %w", item.State.LocalPath, err)
|
return fmt.Errorf("unable to unmarshal yaml file %s : %s", item.State.LocalPath, err)
|
||||||
}
|
}
|
||||||
|
|
||||||
for _, ruleType := range appsec_rule.SupportedTypes() {
|
for _, ruleType := range appsec_rule.SupportedTypes() {
|
||||||
|
@ -71,7 +70,7 @@ func NewCLIAppsecRule(cfg configGetter) *cliItem {
|
||||||
for _, rule := range appsecRule.Rules {
|
for _, rule := range appsecRule.Rules {
|
||||||
convertedRule, _, err := rule.Convert(ruleType, appsecRule.Name)
|
convertedRule, _, err := rule.Convert(ruleType, appsecRule.Name)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return fmt.Errorf("unable to convert rule %s: %w", rule.Name, err)
|
return fmt.Errorf("unable to convert rule %s : %s", rule.Name, err)
|
||||||
}
|
}
|
||||||
|
|
||||||
fmt.Println(convertedRule)
|
fmt.Println(convertedRule)
|
||||||
|
@ -89,7 +88,6 @@ func NewCLIAppsecRule(cfg configGetter) *cliItem {
|
||||||
}
|
}
|
||||||
|
|
||||||
return &cliItem{
|
return &cliItem{
|
||||||
cfg: cfg,
|
|
||||||
name: "appsec-rules",
|
name: "appsec-rules",
|
||||||
singular: "appsec-rule",
|
singular: "appsec-rule",
|
||||||
oneOrMore: "appsec-rule(s)",
|
oneOrMore: "appsec-rule(s)",
|
||||||
|
|
|
@ -4,9 +4,8 @@ import (
|
||||||
"github.com/crowdsecurity/crowdsec/pkg/cwhub"
|
"github.com/crowdsecurity/crowdsec/pkg/cwhub"
|
||||||
)
|
)
|
||||||
|
|
||||||
func NewCLICollection(cfg configGetter) *cliItem {
|
func NewCLICollection() *cliItem {
|
||||||
return &cliItem{
|
return &cliItem{
|
||||||
cfg: cfg,
|
|
||||||
name: cwhub.COLLECTIONS,
|
name: cwhub.COLLECTIONS,
|
||||||
singular: "collection",
|
singular: "collection",
|
||||||
oneOrMore: "collection(s)",
|
oneOrMore: "collection(s)",
|
||||||
|
|
|
@ -4,9 +4,8 @@ import (
|
||||||
"github.com/crowdsecurity/crowdsec/pkg/cwhub"
|
"github.com/crowdsecurity/crowdsec/pkg/cwhub"
|
||||||
)
|
)
|
||||||
|
|
||||||
func NewCLIContext(cfg configGetter) *cliItem {
|
func NewCLIContext() *cliItem {
|
||||||
return &cliItem{
|
return &cliItem{
|
||||||
cfg: cfg,
|
|
||||||
name: cwhub.CONTEXTS,
|
name: cwhub.CONTEXTS,
|
||||||
singular: "context",
|
singular: "context",
|
||||||
oneOrMore: "context(s)",
|
oneOrMore: "context(s)",
|
||||||
|
|
|
@ -4,9 +4,8 @@ import (
|
||||||
"github.com/crowdsecurity/crowdsec/pkg/cwhub"
|
"github.com/crowdsecurity/crowdsec/pkg/cwhub"
|
||||||
)
|
)
|
||||||
|
|
||||||
func NewCLIParser(cfg configGetter) *cliItem {
|
func NewCLIParser() *cliItem {
|
||||||
return &cliItem{
|
return &cliItem{
|
||||||
cfg: cfg,
|
|
||||||
name: cwhub.PARSERS,
|
name: cwhub.PARSERS,
|
||||||
singular: "parser",
|
singular: "parser",
|
||||||
oneOrMore: "parser(s)",
|
oneOrMore: "parser(s)",
|
||||||
|
|
|
@ -4,9 +4,8 @@ import (
|
||||||
"github.com/crowdsecurity/crowdsec/pkg/cwhub"
|
"github.com/crowdsecurity/crowdsec/pkg/cwhub"
|
||||||
)
|
)
|
||||||
|
|
||||||
func NewCLIPostOverflow(cfg configGetter) *cliItem {
|
func NewCLIPostOverflow() *cliItem {
|
||||||
return &cliItem{
|
return &cliItem{
|
||||||
cfg: cfg,
|
|
||||||
name: cwhub.POSTOVERFLOWS,
|
name: cwhub.POSTOVERFLOWS,
|
||||||
singular: "postoverflow",
|
singular: "postoverflow",
|
||||||
oneOrMore: "postoverflow(s)",
|
oneOrMore: "postoverflow(s)",
|
||||||
|
|
|
@ -4,9 +4,8 @@ import (
|
||||||
"github.com/crowdsecurity/crowdsec/pkg/cwhub"
|
"github.com/crowdsecurity/crowdsec/pkg/cwhub"
|
||||||
)
|
)
|
||||||
|
|
||||||
func NewCLIScenario(cfg configGetter) *cliItem {
|
func NewCLIScenario() *cliItem {
|
||||||
return &cliItem{
|
return &cliItem{
|
||||||
cfg: cfg,
|
|
||||||
name: cwhub.SCENARIOS,
|
name: cwhub.SCENARIOS,
|
||||||
singular: "scenario",
|
singular: "scenario",
|
||||||
oneOrMore: "scenario(s)",
|
oneOrMore: "scenario(s)",
|
||||||
|
|
|
@ -14,7 +14,7 @@ import (
|
||||||
"github.com/fatih/color"
|
"github.com/fatih/color"
|
||||||
log "github.com/sirupsen/logrus"
|
log "github.com/sirupsen/logrus"
|
||||||
"github.com/spf13/cobra"
|
"github.com/spf13/cobra"
|
||||||
"gopkg.in/yaml.v3"
|
"gopkg.in/yaml.v2"
|
||||||
|
|
||||||
"github.com/crowdsecurity/crowdsec/pkg/dumps"
|
"github.com/crowdsecurity/crowdsec/pkg/dumps"
|
||||||
"github.com/crowdsecurity/crowdsec/pkg/emoji"
|
"github.com/crowdsecurity/crowdsec/pkg/emoji"
|
||||||
|
@ -135,8 +135,7 @@ cscli hubtest create my-scenario-test --parsers crowdsecurity/nginx --scenarios
|
||||||
// create empty nuclei template file
|
// create empty nuclei template file
|
||||||
nucleiFileName := fmt.Sprintf("%s.yaml", testName)
|
nucleiFileName := fmt.Sprintf("%s.yaml", testName)
|
||||||
nucleiFilePath := filepath.Join(testPath, nucleiFileName)
|
nucleiFilePath := filepath.Join(testPath, nucleiFileName)
|
||||||
|
nucleiFile, err := os.OpenFile(nucleiFilePath, os.O_RDWR|os.O_CREATE, 0755)
|
||||||
nucleiFile, err := os.OpenFile(nucleiFilePath, os.O_RDWR|os.O_CREATE, 0o755)
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
@ -406,7 +405,7 @@ func (cli *cliHubTest) NewRunCmd() *cobra.Command {
|
||||||
}
|
}
|
||||||
|
|
||||||
func (cli *cliHubTest) NewCleanCmd() *cobra.Command {
|
func (cli *cliHubTest) NewCleanCmd() *cobra.Command {
|
||||||
cmd := &cobra.Command{
|
var cmd = &cobra.Command{
|
||||||
Use: "clean",
|
Use: "clean",
|
||||||
Short: "clean [test_name]",
|
Short: "clean [test_name]",
|
||||||
Args: cobra.MinimumNArgs(1),
|
Args: cobra.MinimumNArgs(1),
|
||||||
|
|
|
@ -37,7 +37,6 @@ func ShowMetrics(hubItem *cwhub.Item) error {
|
||||||
appsecMetricsTable(color.Output, hubItem.Name, metrics)
|
appsecMetricsTable(color.Output, hubItem.Name, metrics)
|
||||||
default: // no metrics for this item type
|
default: // no metrics for this item type
|
||||||
}
|
}
|
||||||
|
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -50,27 +49,21 @@ func GetParserMetric(url string, itemName string) map[string]map[string]int {
|
||||||
if !strings.HasPrefix(fam.Name, "cs_") {
|
if !strings.HasPrefix(fam.Name, "cs_") {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
||||||
log.Tracef("round %d", idx)
|
log.Tracef("round %d", idx)
|
||||||
|
|
||||||
for _, m := range fam.Metrics {
|
for _, m := range fam.Metrics {
|
||||||
metric, ok := m.(prom2json.Metric)
|
metric, ok := m.(prom2json.Metric)
|
||||||
if !ok {
|
if !ok {
|
||||||
log.Debugf("failed to convert metric to prom2json.Metric")
|
log.Debugf("failed to convert metric to prom2json.Metric")
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
||||||
name, ok := metric.Labels["name"]
|
name, ok := metric.Labels["name"]
|
||||||
if !ok {
|
if !ok {
|
||||||
log.Debugf("no name in Metric %v", metric.Labels)
|
log.Debugf("no name in Metric %v", metric.Labels)
|
||||||
}
|
}
|
||||||
|
|
||||||
if name != itemName {
|
if name != itemName {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
||||||
source, ok := metric.Labels["source"]
|
source, ok := metric.Labels["source"]
|
||||||
|
|
||||||
if !ok {
|
if !ok {
|
||||||
log.Debugf("no source in Metric %v", metric.Labels)
|
log.Debugf("no source in Metric %v", metric.Labels)
|
||||||
} else {
|
} else {
|
||||||
|
@ -78,15 +71,12 @@ func GetParserMetric(url string, itemName string) map[string]map[string]int {
|
||||||
source = srctype + ":" + source
|
source = srctype + ":" + source
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
value := m.(prom2json.Metric).Value
|
value := m.(prom2json.Metric).Value
|
||||||
|
|
||||||
fval, err := strconv.ParseFloat(value, 32)
|
fval, err := strconv.ParseFloat(value, 32)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Errorf("Unexpected int value %s : %s", value, err)
|
log.Errorf("Unexpected int value %s : %s", value, err)
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
||||||
ival := int(fval)
|
ival := int(fval)
|
||||||
|
|
||||||
switch fam.Name {
|
switch fam.Name {
|
||||||
|
@ -129,7 +119,6 @@ func GetParserMetric(url string, itemName string) map[string]map[string]int {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return stats
|
return stats
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -147,34 +136,26 @@ func GetScenarioMetric(url string, itemName string) map[string]int {
|
||||||
if !strings.HasPrefix(fam.Name, "cs_") {
|
if !strings.HasPrefix(fam.Name, "cs_") {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
||||||
log.Tracef("round %d", idx)
|
log.Tracef("round %d", idx)
|
||||||
|
|
||||||
for _, m := range fam.Metrics {
|
for _, m := range fam.Metrics {
|
||||||
metric, ok := m.(prom2json.Metric)
|
metric, ok := m.(prom2json.Metric)
|
||||||
if !ok {
|
if !ok {
|
||||||
log.Debugf("failed to convert metric to prom2json.Metric")
|
log.Debugf("failed to convert metric to prom2json.Metric")
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
||||||
name, ok := metric.Labels["name"]
|
name, ok := metric.Labels["name"]
|
||||||
|
|
||||||
if !ok {
|
if !ok {
|
||||||
log.Debugf("no name in Metric %v", metric.Labels)
|
log.Debugf("no name in Metric %v", metric.Labels)
|
||||||
}
|
}
|
||||||
|
|
||||||
if name != itemName {
|
if name != itemName {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
||||||
value := m.(prom2json.Metric).Value
|
value := m.(prom2json.Metric).Value
|
||||||
|
|
||||||
fval, err := strconv.ParseFloat(value, 32)
|
fval, err := strconv.ParseFloat(value, 32)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Errorf("Unexpected int value %s : %s", value, err)
|
log.Errorf("Unexpected int value %s : %s", value, err)
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
||||||
ival := int(fval)
|
ival := int(fval)
|
||||||
|
|
||||||
switch fam.Name {
|
switch fam.Name {
|
||||||
|
@ -193,7 +174,6 @@ func GetScenarioMetric(url string, itemName string) map[string]int {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return stats
|
return stats
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -208,22 +188,17 @@ func GetAppsecRuleMetric(url string, itemName string) map[string]int {
|
||||||
if !strings.HasPrefix(fam.Name, "cs_") {
|
if !strings.HasPrefix(fam.Name, "cs_") {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
||||||
log.Tracef("round %d", idx)
|
log.Tracef("round %d", idx)
|
||||||
|
|
||||||
for _, m := range fam.Metrics {
|
for _, m := range fam.Metrics {
|
||||||
metric, ok := m.(prom2json.Metric)
|
metric, ok := m.(prom2json.Metric)
|
||||||
if !ok {
|
if !ok {
|
||||||
log.Debugf("failed to convert metric to prom2json.Metric")
|
log.Debugf("failed to convert metric to prom2json.Metric")
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
||||||
name, ok := metric.Labels["rule_name"]
|
name, ok := metric.Labels["rule_name"]
|
||||||
|
|
||||||
if !ok {
|
if !ok {
|
||||||
log.Debugf("no rule_name in Metric %v", metric.Labels)
|
log.Debugf("no rule_name in Metric %v", metric.Labels)
|
||||||
}
|
}
|
||||||
|
|
||||||
if name != itemName {
|
if name != itemName {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
@ -234,13 +209,11 @@ func GetAppsecRuleMetric(url string, itemName string) map[string]int {
|
||||||
}
|
}
|
||||||
|
|
||||||
value := m.(prom2json.Metric).Value
|
value := m.(prom2json.Metric).Value
|
||||||
|
|
||||||
fval, err := strconv.ParseFloat(value, 32)
|
fval, err := strconv.ParseFloat(value, 32)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Errorf("Unexpected int value %s : %s", value, err)
|
log.Errorf("Unexpected int value %s : %s", value, err)
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
||||||
ival := int(fval)
|
ival := int(fval)
|
||||||
|
|
||||||
switch fam.Name {
|
switch fam.Name {
|
||||||
|
@ -258,7 +231,6 @@ func GetAppsecRuleMetric(url string, itemName string) map[string]int {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return stats
|
return stats
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -275,7 +247,6 @@ func GetPrometheusMetric(url string) []*prom2json.Family {
|
||||||
|
|
||||||
go func() {
|
go func() {
|
||||||
defer trace.CatchPanic("crowdsec/GetPrometheusMetric")
|
defer trace.CatchPanic("crowdsec/GetPrometheusMetric")
|
||||||
|
|
||||||
err := prom2json.FetchMetricFamilies(url, mfChan, transport)
|
err := prom2json.FetchMetricFamilies(url, mfChan, transport)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Fatalf("failed to fetch prometheus metrics : %v", err)
|
log.Fatalf("failed to fetch prometheus metrics : %v", err)
|
||||||
|
@ -286,7 +257,6 @@ func GetPrometheusMetric(url string) []*prom2json.Family {
|
||||||
for mf := range mfChan {
|
for mf := range mfChan {
|
||||||
result = append(result, prom2json.NewFamily(mf))
|
result = append(result, prom2json.NewFamily(mf))
|
||||||
}
|
}
|
||||||
|
|
||||||
log.Debugf("Finished reading prometheus output, %d entries", len(result))
|
log.Debugf("Finished reading prometheus output, %d entries", len(result))
|
||||||
|
|
||||||
return result
|
return result
|
||||||
|
|
|
@ -61,7 +61,7 @@ func compInstalledItems(itemType string, args []string, toComplete string) ([]st
|
||||||
return nil, cobra.ShellCompDirectiveDefault
|
return nil, cobra.ShellCompDirectiveDefault
|
||||||
}
|
}
|
||||||
|
|
||||||
items, err := hub.GetInstalledNamesByType(itemType)
|
items, err := hub.GetInstalledItemNames(itemType)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
cobra.CompDebugln(fmt.Sprintf("list installed %s err: %s", itemType, err), true)
|
cobra.CompDebugln(fmt.Sprintf("list installed %s err: %s", itemType, err), true)
|
||||||
return nil, cobra.ShellCompDirectiveDefault
|
return nil, cobra.ShellCompDirectiveDefault
|
||||||
|
|
|
@ -1,7 +1,6 @@
|
||||||
package main
|
package main
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"errors"
|
|
||||||
"fmt"
|
"fmt"
|
||||||
"os"
|
"os"
|
||||||
"strings"
|
"strings"
|
||||||
|
@ -29,7 +28,6 @@ type cliHelp struct {
|
||||||
}
|
}
|
||||||
|
|
||||||
type cliItem struct {
|
type cliItem struct {
|
||||||
cfg configGetter
|
|
||||||
name string // plural, as used in the hub index
|
name string // plural, as used in the hub index
|
||||||
singular string
|
singular string
|
||||||
oneOrMore string // parenthetical pluralizaion: "parser(s)"
|
oneOrMore string // parenthetical pluralizaion: "parser(s)"
|
||||||
|
@ -63,9 +61,7 @@ func (cli cliItem) NewCommand() *cobra.Command {
|
||||||
}
|
}
|
||||||
|
|
||||||
func (cli cliItem) install(args []string, downloadOnly bool, force bool, ignoreError bool) error {
|
func (cli cliItem) install(args []string, downloadOnly bool, force bool, ignoreError bool) error {
|
||||||
cfg := cli.cfg()
|
hub, err := require.Hub(csConfig, require.RemoteHub(csConfig), log.StandardLogger())
|
||||||
|
|
||||||
hub, err := require.Hub(cfg, require.RemoteHub(cfg), log.StandardLogger())
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
@ -75,7 +71,7 @@ func (cli cliItem) install(args []string, downloadOnly bool, force bool, ignoreE
|
||||||
if item == nil {
|
if item == nil {
|
||||||
msg := suggestNearestMessage(hub, cli.name, name)
|
msg := suggestNearestMessage(hub, cli.name, name)
|
||||||
if !ignoreError {
|
if !ignoreError {
|
||||||
return errors.New(msg)
|
return fmt.Errorf(msg)
|
||||||
}
|
}
|
||||||
|
|
||||||
log.Errorf(msg)
|
log.Errorf(msg)
|
||||||
|
@ -111,10 +107,10 @@ func (cli cliItem) newInstallCmd() *cobra.Command {
|
||||||
Example: cli.installHelp.example,
|
Example: cli.installHelp.example,
|
||||||
Args: cobra.MinimumNArgs(1),
|
Args: cobra.MinimumNArgs(1),
|
||||||
DisableAutoGenTag: true,
|
DisableAutoGenTag: true,
|
||||||
ValidArgsFunction: func(_ *cobra.Command, args []string, toComplete string) ([]string, cobra.ShellCompDirective) {
|
ValidArgsFunction: func(cmd *cobra.Command, args []string, toComplete string) ([]string, cobra.ShellCompDirective) {
|
||||||
return compAllItems(cli.name, args, toComplete)
|
return compAllItems(cli.name, args, toComplete)
|
||||||
},
|
},
|
||||||
RunE: func(_ *cobra.Command, args []string) error {
|
RunE: func(cmd *cobra.Command, args []string) error {
|
||||||
return cli.install(args, downloadOnly, force, ignoreError)
|
return cli.install(args, downloadOnly, force, ignoreError)
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
@ -141,15 +137,15 @@ func istalledParentNames(item *cwhub.Item) []string {
|
||||||
}
|
}
|
||||||
|
|
||||||
func (cli cliItem) remove(args []string, purge bool, force bool, all bool) error {
|
func (cli cliItem) remove(args []string, purge bool, force bool, all bool) error {
|
||||||
hub, err := require.Hub(cli.cfg(), nil, log.StandardLogger())
|
hub, err := require.Hub(csConfig, nil, log.StandardLogger())
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
if all {
|
if all {
|
||||||
getter := hub.GetInstalledItemsByType
|
getter := hub.GetInstalledItems
|
||||||
if purge {
|
if purge {
|
||||||
getter = hub.GetItemsByType
|
getter = hub.GetAllItems
|
||||||
}
|
}
|
||||||
|
|
||||||
items, err := getter(cli.name)
|
items, err := getter(cli.name)
|
||||||
|
@ -167,7 +163,6 @@ func (cli cliItem) remove(args []string, purge bool, force bool, all bool) error
|
||||||
|
|
||||||
if didRemove {
|
if didRemove {
|
||||||
log.Infof("Removed %s", item.Name)
|
log.Infof("Removed %s", item.Name)
|
||||||
|
|
||||||
removed++
|
removed++
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -209,7 +204,6 @@ func (cli cliItem) remove(args []string, purge bool, force bool, all bool) error
|
||||||
|
|
||||||
if didRemove {
|
if didRemove {
|
||||||
log.Infof("Removed %s", item.Name)
|
log.Infof("Removed %s", item.Name)
|
||||||
|
|
||||||
removed++
|
removed++
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -237,10 +231,10 @@ func (cli cliItem) newRemoveCmd() *cobra.Command {
|
||||||
Example: cli.removeHelp.example,
|
Example: cli.removeHelp.example,
|
||||||
Aliases: []string{"delete"},
|
Aliases: []string{"delete"},
|
||||||
DisableAutoGenTag: true,
|
DisableAutoGenTag: true,
|
||||||
ValidArgsFunction: func(_ *cobra.Command, args []string, toComplete string) ([]string, cobra.ShellCompDirective) {
|
ValidArgsFunction: func(cmd *cobra.Command, args []string, toComplete string) ([]string, cobra.ShellCompDirective) {
|
||||||
return compInstalledItems(cli.name, args, toComplete)
|
return compInstalledItems(cli.name, args, toComplete)
|
||||||
},
|
},
|
||||||
RunE: func(_ *cobra.Command, args []string) error {
|
RunE: func(cmd *cobra.Command, args []string) error {
|
||||||
return cli.remove(args, purge, force, all)
|
return cli.remove(args, purge, force, all)
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
@ -254,15 +248,13 @@ func (cli cliItem) newRemoveCmd() *cobra.Command {
|
||||||
}
|
}
|
||||||
|
|
||||||
func (cli cliItem) upgrade(args []string, force bool, all bool) error {
|
func (cli cliItem) upgrade(args []string, force bool, all bool) error {
|
||||||
cfg := cli.cfg()
|
hub, err := require.Hub(csConfig, require.RemoteHub(csConfig), log.StandardLogger())
|
||||||
|
|
||||||
hub, err := require.Hub(cfg, require.RemoteHub(cfg), log.StandardLogger())
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
if all {
|
if all {
|
||||||
items, err := hub.GetInstalledItemsByType(cli.name)
|
items, err := hub.GetInstalledItems(cli.name)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
@ -308,7 +300,6 @@ func (cli cliItem) upgrade(args []string, force bool, all bool) error {
|
||||||
|
|
||||||
if didUpdate {
|
if didUpdate {
|
||||||
log.Infof("Updated %s", item.Name)
|
log.Infof("Updated %s", item.Name)
|
||||||
|
|
||||||
updated++
|
updated++
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -332,10 +323,10 @@ func (cli cliItem) newUpgradeCmd() *cobra.Command {
|
||||||
Long: coalesce.String(cli.upgradeHelp.long, fmt.Sprintf("Fetch and upgrade one or more %s from the hub", cli.name)),
|
Long: coalesce.String(cli.upgradeHelp.long, fmt.Sprintf("Fetch and upgrade one or more %s from the hub", cli.name)),
|
||||||
Example: cli.upgradeHelp.example,
|
Example: cli.upgradeHelp.example,
|
||||||
DisableAutoGenTag: true,
|
DisableAutoGenTag: true,
|
||||||
ValidArgsFunction: func(_ *cobra.Command, args []string, toComplete string) ([]string, cobra.ShellCompDirective) {
|
ValidArgsFunction: func(cmd *cobra.Command, args []string, toComplete string) ([]string, cobra.ShellCompDirective) {
|
||||||
return compInstalledItems(cli.name, args, toComplete)
|
return compInstalledItems(cli.name, args, toComplete)
|
||||||
},
|
},
|
||||||
RunE: func(_ *cobra.Command, args []string) error {
|
RunE: func(cmd *cobra.Command, args []string) error {
|
||||||
return cli.upgrade(args, force, all)
|
return cli.upgrade(args, force, all)
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
@ -348,23 +339,21 @@ func (cli cliItem) newUpgradeCmd() *cobra.Command {
|
||||||
}
|
}
|
||||||
|
|
||||||
func (cli cliItem) inspect(args []string, url string, diff bool, rev bool, noMetrics bool) error {
|
func (cli cliItem) inspect(args []string, url string, diff bool, rev bool, noMetrics bool) error {
|
||||||
cfg := cli.cfg()
|
|
||||||
|
|
||||||
if rev && !diff {
|
if rev && !diff {
|
||||||
return errors.New("--rev can only be used with --diff")
|
return fmt.Errorf("--rev can only be used with --diff")
|
||||||
}
|
}
|
||||||
|
|
||||||
if url != "" {
|
if url != "" {
|
||||||
cfg.Cscli.PrometheusUrl = url
|
csConfig.Cscli.PrometheusUrl = url
|
||||||
}
|
}
|
||||||
|
|
||||||
remote := (*cwhub.RemoteHubCfg)(nil)
|
remote := (*cwhub.RemoteHubCfg)(nil)
|
||||||
|
|
||||||
if diff {
|
if diff {
|
||||||
remote = require.RemoteHub(cfg)
|
remote = require.RemoteHub(csConfig)
|
||||||
}
|
}
|
||||||
|
|
||||||
hub, err := require.Hub(cfg, remote, log.StandardLogger())
|
hub, err := require.Hub(csConfig, remote, log.StandardLogger())
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
@ -410,10 +399,10 @@ func (cli cliItem) newInspectCmd() *cobra.Command {
|
||||||
Example: cli.inspectHelp.example,
|
Example: cli.inspectHelp.example,
|
||||||
Args: cobra.MinimumNArgs(1),
|
Args: cobra.MinimumNArgs(1),
|
||||||
DisableAutoGenTag: true,
|
DisableAutoGenTag: true,
|
||||||
ValidArgsFunction: func(_ *cobra.Command, args []string, toComplete string) ([]string, cobra.ShellCompDirective) {
|
ValidArgsFunction: func(cmd *cobra.Command, args []string, toComplete string) ([]string, cobra.ShellCompDirective) {
|
||||||
return compInstalledItems(cli.name, args, toComplete)
|
return compInstalledItems(cli.name, args, toComplete)
|
||||||
},
|
},
|
||||||
RunE: func(_ *cobra.Command, args []string) error {
|
RunE: func(cmd *cobra.Command, args []string) error {
|
||||||
return cli.inspect(args, url, diff, rev, noMetrics)
|
return cli.inspect(args, url, diff, rev, noMetrics)
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
@ -428,7 +417,7 @@ func (cli cliItem) newInspectCmd() *cobra.Command {
|
||||||
}
|
}
|
||||||
|
|
||||||
func (cli cliItem) list(args []string, all bool) error {
|
func (cli cliItem) list(args []string, all bool) error {
|
||||||
hub, err := require.Hub(cli.cfg(), nil, log.StandardLogger())
|
hub, err := require.Hub(csConfig, nil, log.StandardLogger())
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
@ -537,7 +526,6 @@ func (cli cliItem) whyTainted(hub *cwhub.Hub, item *cwhub.Item, reverse bool) st
|
||||||
// hack: avoid message "item is tainted by itself"
|
// hack: avoid message "item is tainted by itself"
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
||||||
ret = append(ret, fmt.Sprintf("# %s is tainted by %s", sub.FQName(), taintList))
|
ret = append(ret, fmt.Sprintf("# %s is tainted by %s", sub.FQName(), taintList))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -17,7 +17,7 @@ import (
|
||||||
|
|
||||||
// selectItems returns a slice of items of a given type, selected by name and sorted by case-insensitive name
|
// selectItems returns a slice of items of a given type, selected by name and sorted by case-insensitive name
|
||||||
func selectItems(hub *cwhub.Hub, itemType string, args []string, installedOnly bool) ([]*cwhub.Item, error) {
|
func selectItems(hub *cwhub.Hub, itemType string, args []string, installedOnly bool) ([]*cwhub.Item, error) {
|
||||||
itemNames := hub.GetNamesByType(itemType)
|
itemNames := hub.GetItemNames(itemType)
|
||||||
|
|
||||||
notExist := []string{}
|
notExist := []string{}
|
||||||
|
|
||||||
|
@ -116,7 +116,7 @@ func listItems(out io.Writer, itemTypes []string, items map[string][]*cwhub.Item
|
||||||
}
|
}
|
||||||
|
|
||||||
if err := csvwriter.Write(header); err != nil {
|
if err := csvwriter.Write(header); err != nil {
|
||||||
return fmt.Errorf("failed to write header: %w", err)
|
return fmt.Errorf("failed to write header: %s", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
for _, itemType := range itemTypes {
|
for _, itemType := range itemTypes {
|
||||||
|
@ -132,7 +132,7 @@ func listItems(out io.Writer, itemTypes []string, items map[string][]*cwhub.Item
|
||||||
}
|
}
|
||||||
|
|
||||||
if err := csvwriter.Write(row); err != nil {
|
if err := csvwriter.Write(row); err != nil {
|
||||||
return fmt.Errorf("failed to write raw output: %w", err)
|
return fmt.Errorf("failed to write raw output: %s", err)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -150,12 +150,12 @@ func inspectItem(item *cwhub.Item, showMetrics bool) error {
|
||||||
enc.SetIndent(2)
|
enc.SetIndent(2)
|
||||||
|
|
||||||
if err := enc.Encode(item); err != nil {
|
if err := enc.Encode(item); err != nil {
|
||||||
return fmt.Errorf("unable to encode item: %w", err)
|
return fmt.Errorf("unable to encode item: %s", err)
|
||||||
}
|
}
|
||||||
case "json":
|
case "json":
|
||||||
b, err := json.MarshalIndent(*item, "", " ")
|
b, err := json.MarshalIndent(*item, "", " ")
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return fmt.Errorf("unable to marshal item: %w", err)
|
return fmt.Errorf("unable to marshal item: %s", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
fmt.Print(string(b))
|
fmt.Print(string(b))
|
||||||
|
|
|
@ -13,7 +13,7 @@ import (
|
||||||
"github.com/go-openapi/strfmt"
|
"github.com/go-openapi/strfmt"
|
||||||
log "github.com/sirupsen/logrus"
|
log "github.com/sirupsen/logrus"
|
||||||
"github.com/spf13/cobra"
|
"github.com/spf13/cobra"
|
||||||
"gopkg.in/yaml.v3"
|
"gopkg.in/yaml.v2"
|
||||||
|
|
||||||
"github.com/crowdsecurity/go-cs-lib/version"
|
"github.com/crowdsecurity/go-cs-lib/version"
|
||||||
|
|
||||||
|
@ -44,9 +44,7 @@ func (cli *cliLapi) status() error {
|
||||||
password := strfmt.Password(cfg.API.Client.Credentials.Password)
|
password := strfmt.Password(cfg.API.Client.Credentials.Password)
|
||||||
login := cfg.API.Client.Credentials.Login
|
login := cfg.API.Client.Credentials.Login
|
||||||
|
|
||||||
origURL := cfg.API.Client.Credentials.URL
|
apiurl, err := url.Parse(cfg.API.Client.Credentials.URL)
|
||||||
|
|
||||||
apiURL, err := url.Parse(origURL)
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return fmt.Errorf("parsing api url: %w", err)
|
return fmt.Errorf("parsing api url: %w", err)
|
||||||
}
|
}
|
||||||
|
@ -56,12 +54,12 @@ func (cli *cliLapi) status() error {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
scenarios, err := hub.GetInstalledNamesByType(cwhub.SCENARIOS)
|
scenarios, err := hub.GetInstalledItemNames(cwhub.SCENARIOS)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return fmt.Errorf("failed to get scenarios: %w", err)
|
return fmt.Errorf("failed to get scenarios: %w", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
Client, err = apiclient.NewDefaultClient(apiURL,
|
Client, err = apiclient.NewDefaultClient(apiurl,
|
||||||
LAPIURLPrefix,
|
LAPIURLPrefix,
|
||||||
fmt.Sprintf("crowdsec/%s", version.String()),
|
fmt.Sprintf("crowdsec/%s", version.String()),
|
||||||
nil)
|
nil)
|
||||||
|
@ -76,8 +74,7 @@ func (cli *cliLapi) status() error {
|
||||||
}
|
}
|
||||||
|
|
||||||
log.Infof("Loaded credentials from %s", cfg.API.Client.CredentialsFilePath)
|
log.Infof("Loaded credentials from %s", cfg.API.Client.CredentialsFilePath)
|
||||||
// use the original string because apiURL would print 'http://unix/'
|
log.Infof("Trying to authenticate with username %s on %s", login, apiurl)
|
||||||
log.Infof("Trying to authenticate with username %s on %s", login, origURL)
|
|
||||||
|
|
||||||
_, _, err = Client.Auth.AuthenticateWatcher(context.Background(), t)
|
_, _, err = Client.Auth.AuthenticateWatcher(context.Background(), t)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
@ -104,7 +101,23 @@ func (cli *cliLapi) register(apiURL string, outputFile string, machine string) e
|
||||||
|
|
||||||
password := strfmt.Password(generatePassword(passwordLength))
|
password := strfmt.Password(generatePassword(passwordLength))
|
||||||
|
|
||||||
apiurl, err := prepareAPIURL(cfg.API.Client, apiURL)
|
if apiURL == "" {
|
||||||
|
if cfg.API.Client == nil || cfg.API.Client.Credentials == nil || cfg.API.Client.Credentials.URL == "" {
|
||||||
|
return fmt.Errorf("no Local API URL. Please provide it in your configuration or with the -u parameter")
|
||||||
|
}
|
||||||
|
|
||||||
|
apiURL = cfg.API.Client.Credentials.URL
|
||||||
|
}
|
||||||
|
/*URL needs to end with /, but user doesn't care*/
|
||||||
|
if !strings.HasSuffix(apiURL, "/") {
|
||||||
|
apiURL += "/"
|
||||||
|
}
|
||||||
|
/*URL needs to start with http://, but user doesn't care*/
|
||||||
|
if !strings.HasPrefix(apiURL, "http://") && !strings.HasPrefix(apiURL, "https://") {
|
||||||
|
apiURL = "http://" + apiURL
|
||||||
|
}
|
||||||
|
|
||||||
|
apiurl, err := url.Parse(apiURL)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return fmt.Errorf("parsing api url: %w", err)
|
return fmt.Errorf("parsing api url: %w", err)
|
||||||
}
|
}
|
||||||
|
@ -116,6 +129,7 @@ func (cli *cliLapi) register(apiURL string, outputFile string, machine string) e
|
||||||
URL: apiurl,
|
URL: apiurl,
|
||||||
VersionPrefix: LAPIURLPrefix,
|
VersionPrefix: LAPIURLPrefix,
|
||||||
}, nil)
|
}, nil)
|
||||||
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return fmt.Errorf("api client register: %w", err)
|
return fmt.Errorf("api client register: %w", err)
|
||||||
}
|
}
|
||||||
|
@ -159,36 +173,13 @@ func (cli *cliLapi) register(apiURL string, outputFile string, machine string) e
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// prepareAPIURL checks/fixes a LAPI connection url (http, https or socket) and returns an URL struct
|
|
||||||
func prepareAPIURL(clientCfg *csconfig.LocalApiClientCfg, apiURL string) (*url.URL, error) {
|
|
||||||
if apiURL == "" {
|
|
||||||
if clientCfg == nil || clientCfg.Credentials == nil || clientCfg.Credentials.URL == "" {
|
|
||||||
return nil, errors.New("no Local API URL. Please provide it in your configuration or with the -u parameter")
|
|
||||||
}
|
|
||||||
|
|
||||||
apiURL = clientCfg.Credentials.URL
|
|
||||||
}
|
|
||||||
|
|
||||||
// URL needs to end with /, but user doesn't care
|
|
||||||
if !strings.HasSuffix(apiURL, "/") {
|
|
||||||
apiURL += "/"
|
|
||||||
}
|
|
||||||
|
|
||||||
// URL needs to start with http://, but user doesn't care
|
|
||||||
if !strings.HasPrefix(apiURL, "http://") && !strings.HasPrefix(apiURL, "https://") && !strings.HasPrefix(apiURL, "/") {
|
|
||||||
apiURL = "http://" + apiURL
|
|
||||||
}
|
|
||||||
|
|
||||||
return url.Parse(apiURL)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (cli *cliLapi) newStatusCmd() *cobra.Command {
|
func (cli *cliLapi) newStatusCmd() *cobra.Command {
|
||||||
cmdLapiStatus := &cobra.Command{
|
cmdLapiStatus := &cobra.Command{
|
||||||
Use: "status",
|
Use: "status",
|
||||||
Short: "Check authentication to Local API (LAPI)",
|
Short: "Check authentication to Local API (LAPI)",
|
||||||
Args: cobra.MinimumNArgs(0),
|
Args: cobra.MinimumNArgs(0),
|
||||||
DisableAutoGenTag: true,
|
DisableAutoGenTag: true,
|
||||||
RunE: func(_ *cobra.Command, _ []string) error {
|
RunE: func(cmd *cobra.Command, args []string) error {
|
||||||
return cli.status()
|
return cli.status()
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
@ -584,7 +575,7 @@ func detectNode(node parser.Node, parserCTX parser.UnixParserCtx) []string {
|
||||||
}
|
}
|
||||||
|
|
||||||
func detectSubNode(node parser.Node, parserCTX parser.UnixParserCtx) []string {
|
func detectSubNode(node parser.Node, parserCTX parser.UnixParserCtx) []string {
|
||||||
ret := make([]string, 0)
|
var ret = make([]string, 0)
|
||||||
|
|
||||||
for _, subnode := range node.LeavesNodes {
|
for _, subnode := range node.LeavesNodes {
|
||||||
if subnode.Grok.RunTimeRegexp != nil {
|
if subnode.Grok.RunTimeRegexp != nil {
|
||||||
|
|
|
@ -1,49 +0,0 @@
|
||||||
package main
|
|
||||||
|
|
||||||
import (
|
|
||||||
"testing"
|
|
||||||
|
|
||||||
"github.com/stretchr/testify/assert"
|
|
||||||
"github.com/stretchr/testify/require"
|
|
||||||
|
|
||||||
"github.com/crowdsecurity/crowdsec/pkg/csconfig"
|
|
||||||
)
|
|
||||||
|
|
||||||
func TestPrepareAPIURL_NoProtocol(t *testing.T) {
|
|
||||||
url, err := prepareAPIURL(nil, "localhost:81")
|
|
||||||
require.NoError(t, err)
|
|
||||||
assert.Equal(t, "http://localhost:81/", url.String())
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestPrepareAPIURL_Http(t *testing.T) {
|
|
||||||
url, err := prepareAPIURL(nil, "http://localhost:81")
|
|
||||||
require.NoError(t, err)
|
|
||||||
assert.Equal(t, "http://localhost:81/", url.String())
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestPrepareAPIURL_Https(t *testing.T) {
|
|
||||||
url, err := prepareAPIURL(nil, "https://localhost:81")
|
|
||||||
require.NoError(t, err)
|
|
||||||
assert.Equal(t, "https://localhost:81/", url.String())
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestPrepareAPIURL_UnixSocket(t *testing.T) {
|
|
||||||
url, err := prepareAPIURL(nil, "/path/socket")
|
|
||||||
require.NoError(t, err)
|
|
||||||
assert.Equal(t, "/path/socket/", url.String())
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestPrepareAPIURL_Empty(t *testing.T) {
|
|
||||||
_, err := prepareAPIURL(nil, "")
|
|
||||||
require.Error(t, err)
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestPrepareAPIURL_Empty_ConfigOverride(t *testing.T) {
|
|
||||||
url, err := prepareAPIURL(&csconfig.LocalApiClientCfg{
|
|
||||||
Credentials: &csconfig.ApiCredentialsCfg{
|
|
||||||
URL: "localhost:80",
|
|
||||||
},
|
|
||||||
}, "")
|
|
||||||
require.NoError(t, err)
|
|
||||||
assert.Equal(t, "http://localhost:80/", url.String())
|
|
||||||
}
|
|
|
@ -4,7 +4,6 @@ import (
|
||||||
saferand "crypto/rand"
|
saferand "crypto/rand"
|
||||||
"encoding/csv"
|
"encoding/csv"
|
||||||
"encoding/json"
|
"encoding/json"
|
||||||
"errors"
|
|
||||||
"fmt"
|
"fmt"
|
||||||
"math/big"
|
"math/big"
|
||||||
"os"
|
"os"
|
||||||
|
@ -135,7 +134,7 @@ Note: This command requires database direct access, so is intended to be run on
|
||||||
}
|
}
|
||||||
cli.db, err = database.NewClient(cli.cfg().DbConfig)
|
cli.db, err = database.NewClient(cli.cfg().DbConfig)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return fmt.Errorf("unable to create new database client: %w", err)
|
return fmt.Errorf("unable to create new database client: %s", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
return nil
|
return nil
|
||||||
|
@ -156,7 +155,7 @@ func (cli *cliMachines) list() error {
|
||||||
|
|
||||||
machines, err := cli.db.ListMachines()
|
machines, err := cli.db.ListMachines()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return fmt.Errorf("unable to list machines: %w", err)
|
return fmt.Errorf("unable to list machines: %s", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
switch cli.cfg().Cscli.Output {
|
switch cli.cfg().Cscli.Output {
|
||||||
|
@ -167,7 +166,7 @@ func (cli *cliMachines) list() error {
|
||||||
enc.SetIndent("", " ")
|
enc.SetIndent("", " ")
|
||||||
|
|
||||||
if err := enc.Encode(machines); err != nil {
|
if err := enc.Encode(machines); err != nil {
|
||||||
return errors.New("failed to marshal")
|
return fmt.Errorf("failed to marshal")
|
||||||
}
|
}
|
||||||
|
|
||||||
return nil
|
return nil
|
||||||
|
@ -176,7 +175,7 @@ func (cli *cliMachines) list() error {
|
||||||
|
|
||||||
err := csvwriter.Write([]string{"machine_id", "ip_address", "updated_at", "validated", "version", "auth_type", "last_heartbeat"})
|
err := csvwriter.Write([]string{"machine_id", "ip_address", "updated_at", "validated", "version", "auth_type", "last_heartbeat"})
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return fmt.Errorf("failed to write header: %w", err)
|
return fmt.Errorf("failed to write header: %s", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
for _, m := range machines {
|
for _, m := range machines {
|
||||||
|
@ -258,12 +257,12 @@ func (cli *cliMachines) add(args []string, machinePassword string, dumpFile stri
|
||||||
// create machineID if not specified by user
|
// create machineID if not specified by user
|
||||||
if len(args) == 0 {
|
if len(args) == 0 {
|
||||||
if !autoAdd {
|
if !autoAdd {
|
||||||
return errors.New("please specify a machine name to add, or use --auto")
|
return fmt.Errorf("please specify a machine name to add, or use --auto")
|
||||||
}
|
}
|
||||||
|
|
||||||
machineID, err = generateID("")
|
machineID, err = generateID("")
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return fmt.Errorf("unable to generate machine id: %w", err)
|
return fmt.Errorf("unable to generate machine id: %s", err)
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
machineID = args[0]
|
machineID = args[0]
|
||||||
|
@ -282,20 +281,20 @@ func (cli *cliMachines) add(args []string, machinePassword string, dumpFile stri
|
||||||
case os.IsNotExist(err) || force:
|
case os.IsNotExist(err) || force:
|
||||||
dumpFile = credFile
|
dumpFile = credFile
|
||||||
case err != nil:
|
case err != nil:
|
||||||
return fmt.Errorf("unable to stat '%s': %w", credFile, err)
|
return fmt.Errorf("unable to stat '%s': %s", credFile, err)
|
||||||
default:
|
default:
|
||||||
return fmt.Errorf(`credentials file '%s' already exists: please remove it, use "--force" or specify a different file with "-f" ("-f -" for standard output)`, credFile)
|
return fmt.Errorf(`credentials file '%s' already exists: please remove it, use "--force" or specify a different file with "-f" ("-f -" for standard output)`, credFile)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if dumpFile == "" {
|
if dumpFile == "" {
|
||||||
return errors.New(`please specify a file to dump credentials to, with -f ("-f -" for standard output)`)
|
return fmt.Errorf(`please specify a file to dump credentials to, with -f ("-f -" for standard output)`)
|
||||||
}
|
}
|
||||||
|
|
||||||
// create a password if it's not specified by user
|
// create a password if it's not specified by user
|
||||||
if machinePassword == "" && !interactive {
|
if machinePassword == "" && !interactive {
|
||||||
if !autoAdd {
|
if !autoAdd {
|
||||||
return errors.New("please specify a password with --password or use --auto")
|
return fmt.Errorf("please specify a password with --password or use --auto")
|
||||||
}
|
}
|
||||||
|
|
||||||
machinePassword = generatePassword(passwordLength)
|
machinePassword = generatePassword(passwordLength)
|
||||||
|
@ -310,7 +309,7 @@ func (cli *cliMachines) add(args []string, machinePassword string, dumpFile stri
|
||||||
|
|
||||||
_, err = cli.db.CreateMachine(&machineID, &password, "", true, force, types.PasswordAuthType)
|
_, err = cli.db.CreateMachine(&machineID, &password, "", true, force, types.PasswordAuthType)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return fmt.Errorf("unable to create machine: %w", err)
|
return fmt.Errorf("unable to create machine: %s", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
fmt.Fprintf(os.Stderr, "Machine '%s' successfully added to the local API.\n", machineID)
|
fmt.Fprintf(os.Stderr, "Machine '%s' successfully added to the local API.\n", machineID)
|
||||||
|
@ -318,10 +317,10 @@ func (cli *cliMachines) add(args []string, machinePassword string, dumpFile stri
|
||||||
if apiURL == "" {
|
if apiURL == "" {
|
||||||
if clientCfg != nil && clientCfg.Credentials != nil && clientCfg.Credentials.URL != "" {
|
if clientCfg != nil && clientCfg.Credentials != nil && clientCfg.Credentials.URL != "" {
|
||||||
apiURL = clientCfg.Credentials.URL
|
apiURL = clientCfg.Credentials.URL
|
||||||
} else if serverCfg.ClientURL() != "" {
|
} else if serverCfg != nil && serverCfg.ListenURI != "" {
|
||||||
apiURL = serverCfg.ClientURL()
|
apiURL = "http://" + serverCfg.ListenURI
|
||||||
} else {
|
} else {
|
||||||
return errors.New("unable to dump an api URL. Please provide it in your configuration or with the -u parameter")
|
return fmt.Errorf("unable to dump an api URL. Please provide it in your configuration or with the -u parameter")
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -333,12 +332,12 @@ func (cli *cliMachines) add(args []string, machinePassword string, dumpFile stri
|
||||||
|
|
||||||
apiConfigDump, err := yaml.Marshal(apiCfg)
|
apiConfigDump, err := yaml.Marshal(apiCfg)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return fmt.Errorf("unable to marshal api credentials: %w", err)
|
return fmt.Errorf("unable to marshal api credentials: %s", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
if dumpFile != "" && dumpFile != "-" {
|
if dumpFile != "" && dumpFile != "-" {
|
||||||
if err = os.WriteFile(dumpFile, apiConfigDump, 0o600); err != nil {
|
if err = os.WriteFile(dumpFile, apiConfigDump, 0o600); err != nil {
|
||||||
return fmt.Errorf("write api credentials in '%s' failed: %w", dumpFile, err)
|
return fmt.Errorf("write api credentials in '%s' failed: %s", dumpFile, err)
|
||||||
}
|
}
|
||||||
|
|
||||||
fmt.Fprintf(os.Stderr, "API credentials written to '%s'.\n", dumpFile)
|
fmt.Fprintf(os.Stderr, "API credentials written to '%s'.\n", dumpFile)
|
||||||
|
@ -414,13 +413,13 @@ func (cli *cliMachines) prune(duration time.Duration, notValidOnly bool, force b
|
||||||
}
|
}
|
||||||
|
|
||||||
if !notValidOnly {
|
if !notValidOnly {
|
||||||
if pending, err := cli.db.QueryLastValidatedHeartbeatLT(time.Now().UTC().Add(-duration)); err == nil {
|
if pending, err := cli.db.QueryLastValidatedHeartbeatLT(time.Now().UTC().Add(duration)); err == nil {
|
||||||
machines = append(machines, pending...)
|
machines = append(machines, pending...)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if len(machines) == 0 {
|
if len(machines) == 0 {
|
||||||
fmt.Println("No machines to prune.")
|
fmt.Println("no machines to prune")
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -439,7 +438,7 @@ func (cli *cliMachines) prune(duration time.Duration, notValidOnly bool, force b
|
||||||
|
|
||||||
deleted, err := cli.db.BulkDeleteWatchers(machines)
|
deleted, err := cli.db.BulkDeleteWatchers(machines)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return fmt.Errorf("unable to prune machines: %w", err)
|
return fmt.Errorf("unable to prune machines: %s", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
fmt.Fprintf(os.Stderr, "successfully delete %d machines\n", deleted)
|
fmt.Fprintf(os.Stderr, "successfully delete %d machines\n", deleted)
|
||||||
|
@ -480,7 +479,7 @@ cscli machines prune --not-validated-only --force`,
|
||||||
|
|
||||||
func (cli *cliMachines) validate(machineID string) error {
|
func (cli *cliMachines) validate(machineID string) error {
|
||||||
if err := cli.db.ValidateMachine(machineID); err != nil {
|
if err := cli.db.ValidateMachine(machineID); err != nil {
|
||||||
return fmt.Errorf("unable to validate machine '%s': %w", machineID, err)
|
return fmt.Errorf("unable to validate machine '%s': %s", machineID, err)
|
||||||
}
|
}
|
||||||
|
|
||||||
log.Infof("machine '%s' validated successfully", machineID)
|
log.Infof("machine '%s' validated successfully", machineID)
|
||||||
|
@ -496,7 +495,7 @@ func (cli *cliMachines) newValidateCmd() *cobra.Command {
|
||||||
Example: `cscli machines validate "machine_name"`,
|
Example: `cscli machines validate "machine_name"`,
|
||||||
Args: cobra.ExactArgs(1),
|
Args: cobra.ExactArgs(1),
|
||||||
DisableAutoGenTag: true,
|
DisableAutoGenTag: true,
|
||||||
RunE: func(_ *cobra.Command, args []string) error {
|
RunE: func(cmd *cobra.Command, args []string) error {
|
||||||
return cli.validate(args[0])
|
return cli.validate(args[0])
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,9 +1,7 @@
|
||||||
package main
|
package main
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"fmt"
|
|
||||||
"os"
|
"os"
|
||||||
"path/filepath"
|
|
||||||
"slices"
|
"slices"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
|
@ -12,18 +10,14 @@ import (
|
||||||
log "github.com/sirupsen/logrus"
|
log "github.com/sirupsen/logrus"
|
||||||
"github.com/spf13/cobra"
|
"github.com/spf13/cobra"
|
||||||
|
|
||||||
"github.com/crowdsecurity/go-cs-lib/trace"
|
|
||||||
|
|
||||||
"github.com/crowdsecurity/crowdsec/pkg/csconfig"
|
"github.com/crowdsecurity/crowdsec/pkg/csconfig"
|
||||||
"github.com/crowdsecurity/crowdsec/pkg/database"
|
"github.com/crowdsecurity/crowdsec/pkg/database"
|
||||||
"github.com/crowdsecurity/crowdsec/pkg/fflag"
|
"github.com/crowdsecurity/crowdsec/pkg/fflag"
|
||||||
)
|
)
|
||||||
|
|
||||||
var (
|
var ConfigFilePath string
|
||||||
ConfigFilePath string
|
var csConfig *csconfig.Config
|
||||||
csConfig *csconfig.Config
|
var dbClient *database.Client
|
||||||
dbClient *database.Client
|
|
||||||
)
|
|
||||||
|
|
||||||
type configGetter func() *csconfig.Config
|
type configGetter func() *csconfig.Config
|
||||||
|
|
||||||
|
@ -88,11 +82,6 @@ func loadConfigFor(command string) (*csconfig.Config, string, error) {
|
||||||
return nil, "", err
|
return nil, "", err
|
||||||
}
|
}
|
||||||
|
|
||||||
// set up directory for trace files
|
|
||||||
if err := trace.Init(filepath.Join(config.ConfigPaths.DataDir, "trace")); err != nil {
|
|
||||||
return nil, "", fmt.Errorf("while setting up trace directory: %w", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
return config, merged, nil
|
return config, merged, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -260,13 +249,13 @@ It is meant to allow you to manage bans, parsers/scenarios/etc, api and generall
|
||||||
cmd.AddCommand(NewCLINotifications(cli.cfg).NewCommand())
|
cmd.AddCommand(NewCLINotifications(cli.cfg).NewCommand())
|
||||||
cmd.AddCommand(NewCLISupport().NewCommand())
|
cmd.AddCommand(NewCLISupport().NewCommand())
|
||||||
cmd.AddCommand(NewCLIPapi(cli.cfg).NewCommand())
|
cmd.AddCommand(NewCLIPapi(cli.cfg).NewCommand())
|
||||||
cmd.AddCommand(NewCLICollection(cli.cfg).NewCommand())
|
cmd.AddCommand(NewCLICollection().NewCommand())
|
||||||
cmd.AddCommand(NewCLIParser(cli.cfg).NewCommand())
|
cmd.AddCommand(NewCLIParser().NewCommand())
|
||||||
cmd.AddCommand(NewCLIScenario(cli.cfg).NewCommand())
|
cmd.AddCommand(NewCLIScenario().NewCommand())
|
||||||
cmd.AddCommand(NewCLIPostOverflow(cli.cfg).NewCommand())
|
cmd.AddCommand(NewCLIPostOverflow().NewCommand())
|
||||||
cmd.AddCommand(NewCLIContext(cli.cfg).NewCommand())
|
cmd.AddCommand(NewCLIContext().NewCommand())
|
||||||
cmd.AddCommand(NewCLIAppsecConfig(cli.cfg).NewCommand())
|
cmd.AddCommand(NewCLIAppsecConfig().NewCommand())
|
||||||
cmd.AddCommand(NewCLIAppsecRule(cli.cfg).NewCommand())
|
cmd.AddCommand(NewCLIAppsecRule().NewCommand())
|
||||||
|
|
||||||
if fflag.CscliSetup.IsEnabled() {
|
if fflag.CscliSetup.IsEnabled() {
|
||||||
cmd.AddCommand(NewSetupCmd())
|
cmd.AddCommand(NewSetupCmd())
|
||||||
|
|
|
@ -272,7 +272,9 @@ func (ms metricStore) Format(out io.Writer, sections []string, formatType string
|
||||||
|
|
||||||
// if no sections are specified, we want all of them
|
// if no sections are specified, we want all of them
|
||||||
if len(sections) == 0 {
|
if len(sections) == 0 {
|
||||||
sections = maptools.SortedKeys(ms)
|
for section := range ms {
|
||||||
|
sections = append(sections, section)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
for _, section := range sections {
|
for _, section := range sections {
|
||||||
|
@ -281,7 +283,7 @@ func (ms metricStore) Format(out io.Writer, sections []string, formatType string
|
||||||
|
|
||||||
switch formatType {
|
switch formatType {
|
||||||
case "human":
|
case "human":
|
||||||
for _, section := range maptools.SortedKeys(want) {
|
for section := range want {
|
||||||
want[section].Table(out, noUnit, showEmpty)
|
want[section].Table(out, noUnit, showEmpty)
|
||||||
}
|
}
|
||||||
case "json":
|
case "json":
|
||||||
|
@ -374,7 +376,7 @@ cscli metrics list`,
|
||||||
}
|
}
|
||||||
|
|
||||||
// expandAlias returns a list of sections. The input can be a list of sections or alias.
|
// expandAlias returns a list of sections. The input can be a list of sections or alias.
|
||||||
func (cli *cliMetrics) expandAlias(args []string) []string {
|
func (cli *cliMetrics) expandSectionGroups(args []string) []string {
|
||||||
ret := []string{}
|
ret := []string{}
|
||||||
|
|
||||||
for _, section := range args {
|
for _, section := range args {
|
||||||
|
@ -420,7 +422,7 @@ cscli metrics show acquisition parsers scenarios stash -o json`,
|
||||||
// Positional args are optional
|
// Positional args are optional
|
||||||
DisableAutoGenTag: true,
|
DisableAutoGenTag: true,
|
||||||
RunE: func(_ *cobra.Command, args []string) error {
|
RunE: func(_ *cobra.Command, args []string) error {
|
||||||
args = cli.expandAlias(args)
|
args = cli.expandSectionGroups(args)
|
||||||
return cli.show(args, url, noUnit)
|
return cli.show(args, url, noUnit)
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
|
@ -4,7 +4,6 @@ import (
|
||||||
"context"
|
"context"
|
||||||
"encoding/csv"
|
"encoding/csv"
|
||||||
"encoding/json"
|
"encoding/json"
|
||||||
"errors"
|
|
||||||
"fmt"
|
"fmt"
|
||||||
"io/fs"
|
"io/fs"
|
||||||
"net/url"
|
"net/url"
|
||||||
|
@ -89,7 +88,7 @@ func (cli *cliNotifications) getPluginConfigs() (map[string]csplugin.PluginConfi
|
||||||
return fmt.Errorf("error while traversing directory %s: %w", path, err)
|
return fmt.Errorf("error while traversing directory %s: %w", path, err)
|
||||||
}
|
}
|
||||||
|
|
||||||
name := filepath.Join(cfg.ConfigPaths.NotificationDir, info.Name()) // Avoid calling info.Name() twice
|
name := filepath.Join(cfg.ConfigPaths.NotificationDir, info.Name()) //Avoid calling info.Name() twice
|
||||||
if (strings.HasSuffix(name, "yaml") || strings.HasSuffix(name, "yml")) && !(info.IsDir()) {
|
if (strings.HasSuffix(name, "yaml") || strings.HasSuffix(name, "yml")) && !(info.IsDir()) {
|
||||||
ts, err := csplugin.ParsePluginConfigFile(name)
|
ts, err := csplugin.ParsePluginConfigFile(name)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
@ -267,7 +266,7 @@ func (cli *cliNotifications) NewTestCmd() *cobra.Command {
|
||||||
if !ok {
|
if !ok {
|
||||||
return fmt.Errorf("plugin name: '%s' does not exist", args[0])
|
return fmt.Errorf("plugin name: '%s' does not exist", args[0])
|
||||||
}
|
}
|
||||||
// Create a single profile with plugin name as notification name
|
//Create a single profile with plugin name as notification name
|
||||||
return pluginBroker.Init(cfg.PluginConfig, []*csconfig.ProfileCfg{
|
return pluginBroker.Init(cfg.PluginConfig, []*csconfig.ProfileCfg{
|
||||||
{
|
{
|
||||||
Notifications: []string{
|
Notifications: []string{
|
||||||
|
@ -321,8 +320,8 @@ func (cli *cliNotifications) NewTestCmd() *cobra.Command {
|
||||||
Alert: alert,
|
Alert: alert,
|
||||||
}
|
}
|
||||||
|
|
||||||
// time.Sleep(2 * time.Second) // There's no mechanism to ensure notification has been sent
|
//time.Sleep(2 * time.Second) // There's no mechanism to ensure notification has been sent
|
||||||
pluginTomb.Kill(errors.New("terminating"))
|
pluginTomb.Kill(fmt.Errorf("terminating"))
|
||||||
pluginTomb.Wait()
|
pluginTomb.Wait()
|
||||||
|
|
||||||
return nil
|
return nil
|
||||||
|
@ -417,8 +416,8 @@ cscli notifications reinject <alert_id> -a '{"remediation": true,"scenario":"not
|
||||||
break
|
break
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
// time.Sleep(2 * time.Second) // There's no mechanism to ensure notification has been sent
|
//time.Sleep(2 * time.Second) // There's no mechanism to ensure notification has been sent
|
||||||
pluginTomb.Kill(errors.New("terminating"))
|
pluginTomb.Kill(fmt.Errorf("terminating"))
|
||||||
pluginTomb.Wait()
|
pluginTomb.Wait()
|
||||||
|
|
||||||
return nil
|
return nil
|
||||||
|
|
|
@ -64,22 +64,25 @@ func (cli *cliPapi) NewStatusCmd() *cobra.Command {
|
||||||
cfg := cli.cfg()
|
cfg := cli.cfg()
|
||||||
dbClient, err = database.NewClient(cfg.DbConfig)
|
dbClient, err = database.NewClient(cfg.DbConfig)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return fmt.Errorf("unable to initialize database client: %w", err)
|
return fmt.Errorf("unable to initialize database client: %s", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
apic, err := apiserver.NewAPIC(cfg.API.Server.OnlineClient, dbClient, cfg.API.Server.ConsoleConfig, cfg.API.Server.CapiWhitelists)
|
apic, err := apiserver.NewAPIC(cfg.API.Server.OnlineClient, dbClient, cfg.API.Server.ConsoleConfig, cfg.API.Server.CapiWhitelists)
|
||||||
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return fmt.Errorf("unable to initialize API client: %w", err)
|
return fmt.Errorf("unable to initialize API client: %s", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
papi, err := apiserver.NewPAPI(apic, dbClient, cfg.API.Server.ConsoleConfig, log.GetLevel())
|
papi, err := apiserver.NewPAPI(apic, dbClient, cfg.API.Server.ConsoleConfig, log.GetLevel())
|
||||||
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return fmt.Errorf("unable to initialize PAPI client: %w", err)
|
return fmt.Errorf("unable to initialize PAPI client: %s", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
perms, err := papi.GetPermissions()
|
perms, err := papi.GetPermissions()
|
||||||
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return fmt.Errorf("unable to get PAPI permissions: %w", err)
|
return fmt.Errorf("unable to get PAPI permissions: %s", err)
|
||||||
}
|
}
|
||||||
var lastTimestampStr *string
|
var lastTimestampStr *string
|
||||||
lastTimestampStr, err = dbClient.GetConfigItem(apiserver.PapiPullKey)
|
lastTimestampStr, err = dbClient.GetConfigItem(apiserver.PapiPullKey)
|
||||||
|
@ -115,26 +118,27 @@ func (cli *cliPapi) NewSyncCmd() *cobra.Command {
|
||||||
|
|
||||||
dbClient, err = database.NewClient(cfg.DbConfig)
|
dbClient, err = database.NewClient(cfg.DbConfig)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return fmt.Errorf("unable to initialize database client: %w", err)
|
return fmt.Errorf("unable to initialize database client: %s", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
apic, err := apiserver.NewAPIC(cfg.API.Server.OnlineClient, dbClient, cfg.API.Server.ConsoleConfig, cfg.API.Server.CapiWhitelists)
|
apic, err := apiserver.NewAPIC(cfg.API.Server.OnlineClient, dbClient, cfg.API.Server.ConsoleConfig, cfg.API.Server.CapiWhitelists)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return fmt.Errorf("unable to initialize API client: %w", err)
|
return fmt.Errorf("unable to initialize API client: %s", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
t.Go(apic.Push)
|
t.Go(apic.Push)
|
||||||
|
|
||||||
papi, err := apiserver.NewPAPI(apic, dbClient, cfg.API.Server.ConsoleConfig, log.GetLevel())
|
papi, err := apiserver.NewPAPI(apic, dbClient, cfg.API.Server.ConsoleConfig, log.GetLevel())
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return fmt.Errorf("unable to initialize PAPI client: %w", err)
|
return fmt.Errorf("unable to initialize PAPI client: %s", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
t.Go(papi.SyncDecisions)
|
t.Go(papi.SyncDecisions)
|
||||||
|
|
||||||
err = papi.PullOnce(time.Time{}, true)
|
err = papi.PullOnce(time.Time{}, true)
|
||||||
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return fmt.Errorf("unable to sync decisions: %w", err)
|
return fmt.Errorf("unable to sync decisions: %s", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
log.Infof("Sending acknowledgements to CAPI")
|
log.Infof("Sending acknowledgements to CAPI")
|
||||||
|
|
|
@ -1,7 +1,6 @@
|
||||||
package require
|
package require
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"errors"
|
|
||||||
"fmt"
|
"fmt"
|
||||||
"io"
|
"io"
|
||||||
|
|
||||||
|
@ -17,7 +16,7 @@ func LAPI(c *csconfig.Config) error {
|
||||||
}
|
}
|
||||||
|
|
||||||
if c.DisableAPI {
|
if c.DisableAPI {
|
||||||
return errors.New("local API is disabled -- this command must be run on the local API machine")
|
return fmt.Errorf("local API is disabled -- this command must be run on the local API machine")
|
||||||
}
|
}
|
||||||
|
|
||||||
return nil
|
return nil
|
||||||
|
@ -33,7 +32,7 @@ func CAPI(c *csconfig.Config) error {
|
||||||
|
|
||||||
func PAPI(c *csconfig.Config) error {
|
func PAPI(c *csconfig.Config) error {
|
||||||
if c.API.Server.OnlineClient.Credentials.PapiURL == "" {
|
if c.API.Server.OnlineClient.Credentials.PapiURL == "" {
|
||||||
return errors.New("no PAPI URL in configuration")
|
return fmt.Errorf("no PAPI URL in configuration")
|
||||||
}
|
}
|
||||||
|
|
||||||
return nil
|
return nil
|
||||||
|
@ -41,7 +40,7 @@ func PAPI(c *csconfig.Config) error {
|
||||||
|
|
||||||
func CAPIRegistered(c *csconfig.Config) error {
|
func CAPIRegistered(c *csconfig.Config) error {
|
||||||
if c.API.Server.OnlineClient.Credentials == nil {
|
if c.API.Server.OnlineClient.Credentials == nil {
|
||||||
return errors.New("the Central API (CAPI) must be configured with 'cscli capi register'")
|
return fmt.Errorf("the Central API (CAPI) must be configured with 'cscli capi register'")
|
||||||
}
|
}
|
||||||
|
|
||||||
return nil
|
return nil
|
||||||
|
@ -57,7 +56,7 @@ func DB(c *csconfig.Config) error {
|
||||||
|
|
||||||
func Notifications(c *csconfig.Config) error {
|
func Notifications(c *csconfig.Config) error {
|
||||||
if c.ConfigPaths.NotificationDir == "" {
|
if c.ConfigPaths.NotificationDir == "" {
|
||||||
return errors.New("config_paths.notification_dir is not set in crowdsec config")
|
return fmt.Errorf("config_paths.notification_dir is not set in crowdsec config")
|
||||||
}
|
}
|
||||||
|
|
||||||
return nil
|
return nil
|
||||||
|
@ -83,7 +82,7 @@ func Hub(c *csconfig.Config, remote *cwhub.RemoteHubCfg, logger *logrus.Logger)
|
||||||
local := c.Hub
|
local := c.Hub
|
||||||
|
|
||||||
if local == nil {
|
if local == nil {
|
||||||
return nil, errors.New("you must configure cli before interacting with hub")
|
return nil, fmt.Errorf("you must configure cli before interacting with hub")
|
||||||
}
|
}
|
||||||
|
|
||||||
if logger == nil {
|
if logger == nil {
|
||||||
|
|
|
@ -2,7 +2,6 @@ package main
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"bytes"
|
"bytes"
|
||||||
"errors"
|
|
||||||
"fmt"
|
"fmt"
|
||||||
"os"
|
"os"
|
||||||
"os/exec"
|
"os/exec"
|
||||||
|
@ -119,11 +118,9 @@ func runSetupDetect(cmd *cobra.Command, args []string) error {
|
||||||
switch detectConfigFile {
|
switch detectConfigFile {
|
||||||
case "-":
|
case "-":
|
||||||
log.Tracef("Reading detection rules from stdin")
|
log.Tracef("Reading detection rules from stdin")
|
||||||
|
|
||||||
detectReader = os.Stdin
|
detectReader = os.Stdin
|
||||||
default:
|
default:
|
||||||
log.Tracef("Reading detection rules: %s", detectConfigFile)
|
log.Tracef("Reading detection rules: %s", detectConfigFile)
|
||||||
|
|
||||||
detectReader, err = os.Open(detectConfigFile)
|
detectReader, err = os.Open(detectConfigFile)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
|
@ -174,7 +171,6 @@ func runSetupDetect(cmd *cobra.Command, args []string) error {
|
||||||
_, err := exec.LookPath("systemctl")
|
_, err := exec.LookPath("systemctl")
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Debug("systemctl not available: snubbing systemd")
|
log.Debug("systemctl not available: snubbing systemd")
|
||||||
|
|
||||||
snubSystemd = true
|
snubSystemd = true
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -186,7 +182,6 @@ func runSetupDetect(cmd *cobra.Command, args []string) error {
|
||||||
|
|
||||||
if forcedOSFamily == "" && forcedOSID != "" {
|
if forcedOSFamily == "" && forcedOSID != "" {
|
||||||
log.Debug("force-os-id is set: force-os-family defaults to 'linux'")
|
log.Debug("force-os-id is set: force-os-family defaults to 'linux'")
|
||||||
|
|
||||||
forcedOSFamily = "linux"
|
forcedOSFamily = "linux"
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -224,7 +219,6 @@ func runSetupDetect(cmd *cobra.Command, args []string) error {
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
fmt.Println(setup)
|
fmt.Println(setup)
|
||||||
|
|
||||||
return nil
|
return nil
|
||||||
|
@ -324,7 +318,6 @@ func runSetupInstallHub(cmd *cobra.Command, args []string) error {
|
||||||
|
|
||||||
func runSetupValidate(cmd *cobra.Command, args []string) error {
|
func runSetupValidate(cmd *cobra.Command, args []string) error {
|
||||||
fromFile := args[0]
|
fromFile := args[0]
|
||||||
|
|
||||||
input, err := os.ReadFile(fromFile)
|
input, err := os.ReadFile(fromFile)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return fmt.Errorf("while reading stdin: %w", err)
|
return fmt.Errorf("while reading stdin: %w", err)
|
||||||
|
@ -332,7 +325,7 @@ func runSetupValidate(cmd *cobra.Command, args []string) error {
|
||||||
|
|
||||||
if err = setup.Validate(input); err != nil {
|
if err = setup.Validate(input); err != nil {
|
||||||
fmt.Printf("%v\n", err)
|
fmt.Printf("%v\n", err)
|
||||||
return errors.New("invalid setup file")
|
return fmt.Errorf("invalid setup file")
|
||||||
}
|
}
|
||||||
|
|
||||||
return nil
|
return nil
|
||||||
|
|
|
@ -1,14 +1,13 @@
|
||||||
package main
|
package main
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"errors"
|
|
||||||
"fmt"
|
"fmt"
|
||||||
"os"
|
"os"
|
||||||
"slices"
|
"slices"
|
||||||
|
|
||||||
log "github.com/sirupsen/logrus"
|
log "github.com/sirupsen/logrus"
|
||||||
"github.com/spf13/cobra"
|
"github.com/spf13/cobra"
|
||||||
"gopkg.in/yaml.v3"
|
"gopkg.in/yaml.v2"
|
||||||
|
|
||||||
"github.com/crowdsecurity/crowdsec/cmd/crowdsec-cli/require"
|
"github.com/crowdsecurity/crowdsec/cmd/crowdsec-cli/require"
|
||||||
"github.com/crowdsecurity/crowdsec/pkg/cwhub"
|
"github.com/crowdsecurity/crowdsec/pkg/cwhub"
|
||||||
|
@ -37,7 +36,7 @@ cscli simulation disable crowdsecurity/ssh-bf`,
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
if cli.cfg().Cscli.SimulationConfig == nil {
|
if cli.cfg().Cscli.SimulationConfig == nil {
|
||||||
return errors.New("no simulation configured")
|
return fmt.Errorf("no simulation configured")
|
||||||
}
|
}
|
||||||
|
|
||||||
return nil
|
return nil
|
||||||
|
@ -74,7 +73,7 @@ func (cli *cliSimulation) NewEnableCmd() *cobra.Command {
|
||||||
|
|
||||||
if len(args) > 0 {
|
if len(args) > 0 {
|
||||||
for _, scenario := range args {
|
for _, scenario := range args {
|
||||||
item := hub.GetItem(cwhub.SCENARIOS, scenario)
|
var item = hub.GetItem(cwhub.SCENARIOS, scenario)
|
||||||
if item == nil {
|
if item == nil {
|
||||||
log.Errorf("'%s' doesn't exist or is not a scenario", scenario)
|
log.Errorf("'%s' doesn't exist or is not a scenario", scenario)
|
||||||
continue
|
continue
|
||||||
|
@ -100,11 +99,11 @@ func (cli *cliSimulation) NewEnableCmd() *cobra.Command {
|
||||||
log.Printf("simulation mode for '%s' enabled", scenario)
|
log.Printf("simulation mode for '%s' enabled", scenario)
|
||||||
}
|
}
|
||||||
if err := cli.dumpSimulationFile(); err != nil {
|
if err := cli.dumpSimulationFile(); err != nil {
|
||||||
return fmt.Errorf("simulation enable: %w", err)
|
return fmt.Errorf("simulation enable: %s", err)
|
||||||
}
|
}
|
||||||
} else if forceGlobalSimulation {
|
} else if forceGlobalSimulation {
|
||||||
if err := cli.enableGlobalSimulation(); err != nil {
|
if err := cli.enableGlobalSimulation(); err != nil {
|
||||||
return fmt.Errorf("unable to enable global simulation mode: %w", err)
|
return fmt.Errorf("unable to enable global simulation mode: %s", err)
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
printHelp(cmd)
|
printHelp(cmd)
|
||||||
|
@ -147,11 +146,11 @@ func (cli *cliSimulation) NewDisableCmd() *cobra.Command {
|
||||||
log.Printf("simulation mode for '%s' disabled", scenario)
|
log.Printf("simulation mode for '%s' disabled", scenario)
|
||||||
}
|
}
|
||||||
if err := cli.dumpSimulationFile(); err != nil {
|
if err := cli.dumpSimulationFile(); err != nil {
|
||||||
return fmt.Errorf("simulation disable: %w", err)
|
return fmt.Errorf("simulation disable: %s", err)
|
||||||
}
|
}
|
||||||
} else if forceGlobalSimulation {
|
} else if forceGlobalSimulation {
|
||||||
if err := cli.disableGlobalSimulation(); err != nil {
|
if err := cli.disableGlobalSimulation(); err != nil {
|
||||||
return fmt.Errorf("unable to disable global simulation mode: %w", err)
|
return fmt.Errorf("unable to disable global simulation mode: %s", err)
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
printHelp(cmd)
|
printHelp(cmd)
|
||||||
|
@ -203,7 +202,7 @@ func (cli *cliSimulation) enableGlobalSimulation() error {
|
||||||
cfg.Cscli.SimulationConfig.Exclusions = []string{}
|
cfg.Cscli.SimulationConfig.Exclusions = []string{}
|
||||||
|
|
||||||
if err := cli.dumpSimulationFile(); err != nil {
|
if err := cli.dumpSimulationFile(); err != nil {
|
||||||
return fmt.Errorf("unable to dump simulation file: %w", err)
|
return fmt.Errorf("unable to dump simulation file: %s", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
log.Printf("global simulation: enabled")
|
log.Printf("global simulation: enabled")
|
||||||
|
@ -216,12 +215,12 @@ func (cli *cliSimulation) dumpSimulationFile() error {
|
||||||
|
|
||||||
newConfigSim, err := yaml.Marshal(cfg.Cscli.SimulationConfig)
|
newConfigSim, err := yaml.Marshal(cfg.Cscli.SimulationConfig)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return fmt.Errorf("unable to marshal simulation configuration: %w", err)
|
return fmt.Errorf("unable to marshal simulation configuration: %s", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
err = os.WriteFile(cfg.ConfigPaths.SimulationFilePath, newConfigSim, 0o644)
|
err = os.WriteFile(cfg.ConfigPaths.SimulationFilePath, newConfigSim, 0o644)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return fmt.Errorf("write simulation config in '%s' failed: %w", cfg.ConfigPaths.SimulationFilePath, err)
|
return fmt.Errorf("write simulation config in '%s' failed: %s", cfg.ConfigPaths.SimulationFilePath, err)
|
||||||
}
|
}
|
||||||
|
|
||||||
log.Debugf("updated simulation file %s", cfg.ConfigPaths.SimulationFilePath)
|
log.Debugf("updated simulation file %s", cfg.ConfigPaths.SimulationFilePath)
|
||||||
|
@ -238,12 +237,12 @@ func (cli *cliSimulation) disableGlobalSimulation() error {
|
||||||
|
|
||||||
newConfigSim, err := yaml.Marshal(cfg.Cscli.SimulationConfig)
|
newConfigSim, err := yaml.Marshal(cfg.Cscli.SimulationConfig)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return fmt.Errorf("unable to marshal new simulation configuration: %w", err)
|
return fmt.Errorf("unable to marshal new simulation configuration: %s", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
err = os.WriteFile(cfg.ConfigPaths.SimulationFilePath, newConfigSim, 0o644)
|
err = os.WriteFile(cfg.ConfigPaths.SimulationFilePath, newConfigSim, 0o644)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return fmt.Errorf("unable to write new simulation config in '%s': %w", cfg.ConfigPaths.SimulationFilePath, err)
|
return fmt.Errorf("unable to write new simulation config in '%s': %s", cfg.ConfigPaths.SimulationFilePath, err)
|
||||||
}
|
}
|
||||||
|
|
||||||
log.Printf("global simulation: disabled")
|
log.Printf("global simulation: disabled")
|
||||||
|
@ -270,10 +269,8 @@ func (cli *cliSimulation) status() {
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
log.Println("global simulation: disabled")
|
log.Println("global simulation: disabled")
|
||||||
|
|
||||||
if len(cfg.Cscli.SimulationConfig.Exclusions) > 0 {
|
if len(cfg.Cscli.SimulationConfig.Exclusions) > 0 {
|
||||||
log.Println("Scenarios in simulation mode :")
|
log.Println("Scenarios in simulation mode :")
|
||||||
|
|
||||||
for _, scenario := range cfg.Cscli.SimulationConfig.Exclusions {
|
for _, scenario := range cfg.Cscli.SimulationConfig.Exclusions {
|
||||||
log.Printf(" - %s", scenario)
|
log.Printf(" - %s", scenario)
|
||||||
}
|
}
|
||||||
|
|
|
@ -4,7 +4,6 @@ import (
|
||||||
"archive/zip"
|
"archive/zip"
|
||||||
"bytes"
|
"bytes"
|
||||||
"context"
|
"context"
|
||||||
"errors"
|
|
||||||
"fmt"
|
"fmt"
|
||||||
"io"
|
"io"
|
||||||
"net/http"
|
"net/http"
|
||||||
|
@ -13,14 +12,12 @@ import (
|
||||||
"path/filepath"
|
"path/filepath"
|
||||||
"regexp"
|
"regexp"
|
||||||
"strings"
|
"strings"
|
||||||
"time"
|
|
||||||
|
|
||||||
"github.com/blackfireio/osinfo"
|
"github.com/blackfireio/osinfo"
|
||||||
"github.com/go-openapi/strfmt"
|
"github.com/go-openapi/strfmt"
|
||||||
log "github.com/sirupsen/logrus"
|
log "github.com/sirupsen/logrus"
|
||||||
"github.com/spf13/cobra"
|
"github.com/spf13/cobra"
|
||||||
|
|
||||||
"github.com/crowdsecurity/go-cs-lib/trace"
|
|
||||||
"github.com/crowdsecurity/go-cs-lib/version"
|
"github.com/crowdsecurity/go-cs-lib/version"
|
||||||
|
|
||||||
"github.com/crowdsecurity/crowdsec/cmd/crowdsec-cli/require"
|
"github.com/crowdsecurity/crowdsec/cmd/crowdsec-cli/require"
|
||||||
|
@ -50,7 +47,6 @@ const (
|
||||||
SUPPORT_CAPI_STATUS_PATH = "capi_status.txt"
|
SUPPORT_CAPI_STATUS_PATH = "capi_status.txt"
|
||||||
SUPPORT_ACQUISITION_CONFIG_BASE_PATH = "config/acquis/"
|
SUPPORT_ACQUISITION_CONFIG_BASE_PATH = "config/acquis/"
|
||||||
SUPPORT_CROWDSEC_PROFILE_PATH = "config/profiles.yaml"
|
SUPPORT_CROWDSEC_PROFILE_PATH = "config/profiles.yaml"
|
||||||
SUPPORT_CRASH_PATH = "crash/"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
// from https://github.com/acarl005/stripansi
|
// from https://github.com/acarl005/stripansi
|
||||||
|
@ -66,7 +62,7 @@ func collectMetrics() ([]byte, []byte, error) {
|
||||||
|
|
||||||
if csConfig.Cscli.PrometheusUrl == "" {
|
if csConfig.Cscli.PrometheusUrl == "" {
|
||||||
log.Warn("No Prometheus URL configured, metrics will not be collected")
|
log.Warn("No Prometheus URL configured, metrics will not be collected")
|
||||||
return nil, nil, errors.New("prometheus_uri is not set")
|
return nil, nil, fmt.Errorf("prometheus_uri is not set")
|
||||||
}
|
}
|
||||||
|
|
||||||
humanMetrics := bytes.NewBuffer(nil)
|
humanMetrics := bytes.NewBuffer(nil)
|
||||||
|
@ -74,7 +70,7 @@ func collectMetrics() ([]byte, []byte, error) {
|
||||||
ms := NewMetricStore()
|
ms := NewMetricStore()
|
||||||
|
|
||||||
if err := ms.Fetch(csConfig.Cscli.PrometheusUrl); err != nil {
|
if err := ms.Fetch(csConfig.Cscli.PrometheusUrl); err != nil {
|
||||||
return nil, nil, fmt.Errorf("could not fetch prometheus metrics: %w", err)
|
return nil, nil, fmt.Errorf("could not fetch prometheus metrics: %s", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
if err := ms.Format(humanMetrics, nil, "human", false); err != nil {
|
if err := ms.Format(humanMetrics, nil, "human", false); err != nil {
|
||||||
|
@ -83,21 +79,21 @@ func collectMetrics() ([]byte, []byte, error) {
|
||||||
|
|
||||||
req, err := http.NewRequest(http.MethodGet, csConfig.Cscli.PrometheusUrl, nil)
|
req, err := http.NewRequest(http.MethodGet, csConfig.Cscli.PrometheusUrl, nil)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, nil, fmt.Errorf("could not create requests to prometheus endpoint: %w", err)
|
return nil, nil, fmt.Errorf("could not create requests to prometheus endpoint: %s", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
client := &http.Client{}
|
client := &http.Client{}
|
||||||
|
|
||||||
resp, err := client.Do(req)
|
resp, err := client.Do(req)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, nil, fmt.Errorf("could not get metrics from prometheus endpoint: %w", err)
|
return nil, nil, fmt.Errorf("could not get metrics from prometheus endpoint: %s", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
defer resp.Body.Close()
|
defer resp.Body.Close()
|
||||||
|
|
||||||
body, err := io.ReadAll(resp.Body)
|
body, err := io.ReadAll(resp.Body)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, nil, fmt.Errorf("could not read metrics from prometheus endpoint: %w", err)
|
return nil, nil, fmt.Errorf("could not read metrics from prometheus endpoint: %s", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
return humanMetrics.Bytes(), body, nil
|
return humanMetrics.Bytes(), body, nil
|
||||||
|
@ -125,18 +121,19 @@ func collectOSInfo() ([]byte, error) {
|
||||||
log.Info("Collecting OS info")
|
log.Info("Collecting OS info")
|
||||||
|
|
||||||
info, err := osinfo.GetOSInfo()
|
info, err := osinfo.GetOSInfo()
|
||||||
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
w := bytes.NewBuffer(nil)
|
w := bytes.NewBuffer(nil)
|
||||||
fmt.Fprintf(w, "Architecture: %s\n", info.Architecture)
|
w.WriteString(fmt.Sprintf("Architecture: %s\n", info.Architecture))
|
||||||
fmt.Fprintf(w, "Family: %s\n", info.Family)
|
w.WriteString(fmt.Sprintf("Family: %s\n", info.Family))
|
||||||
fmt.Fprintf(w, "ID: %s\n", info.ID)
|
w.WriteString(fmt.Sprintf("ID: %s\n", info.ID))
|
||||||
fmt.Fprintf(w, "Name: %s\n", info.Name)
|
w.WriteString(fmt.Sprintf("Name: %s\n", info.Name))
|
||||||
fmt.Fprintf(w, "Codename: %s\n", info.Codename)
|
w.WriteString(fmt.Sprintf("Codename: %s\n", info.Codename))
|
||||||
fmt.Fprintf(w, "Version: %s\n", info.Version)
|
w.WriteString(fmt.Sprintf("Version: %s\n", info.Version))
|
||||||
fmt.Fprintf(w, "Build: %s\n", info.Build)
|
w.WriteString(fmt.Sprintf("Build: %s\n", info.Build))
|
||||||
|
|
||||||
return w.Bytes(), nil
|
return w.Bytes(), nil
|
||||||
}
|
}
|
||||||
|
@ -166,7 +163,7 @@ func collectBouncers(dbClient *database.Client) ([]byte, error) {
|
||||||
|
|
||||||
bouncers, err := dbClient.ListBouncers()
|
bouncers, err := dbClient.ListBouncers()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, fmt.Errorf("unable to list bouncers: %w", err)
|
return nil, fmt.Errorf("unable to list bouncers: %s", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
getBouncersTable(out, bouncers)
|
getBouncersTable(out, bouncers)
|
||||||
|
@ -179,7 +176,7 @@ func collectAgents(dbClient *database.Client) ([]byte, error) {
|
||||||
|
|
||||||
machines, err := dbClient.ListMachines()
|
machines, err := dbClient.ListMachines()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, fmt.Errorf("unable to list machines: %w", err)
|
return nil, fmt.Errorf("unable to list machines: %s", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
getAgentsTable(out, machines)
|
getAgentsTable(out, machines)
|
||||||
|
@ -199,7 +196,7 @@ func collectAPIStatus(login string, password string, endpoint string, prefix str
|
||||||
return []byte(fmt.Sprintf("cannot parse API URL: %s", err))
|
return []byte(fmt.Sprintf("cannot parse API URL: %s", err))
|
||||||
}
|
}
|
||||||
|
|
||||||
scenarios, err := hub.GetInstalledNamesByType(cwhub.SCENARIOS)
|
scenarios, err := hub.GetInstalledItemNames(cwhub.SCENARIOS)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return []byte(fmt.Sprintf("could not collect scenarios: %s", err))
|
return []byte(fmt.Sprintf("could not collect scenarios: %s", err))
|
||||||
}
|
}
|
||||||
|
@ -267,11 +264,6 @@ func collectAcquisitionConfig() map[string][]byte {
|
||||||
return ret
|
return ret
|
||||||
}
|
}
|
||||||
|
|
||||||
func collectCrash() ([]string, error) {
|
|
||||||
log.Info("Collecting crash dumps")
|
|
||||||
return trace.List()
|
|
||||||
}
|
|
||||||
|
|
||||||
type cliSupport struct{}
|
type cliSupport struct{}
|
||||||
|
|
||||||
func NewCLISupport() *cliSupport {
|
func NewCLISupport() *cliSupport {
|
||||||
|
@ -319,7 +311,7 @@ cscli support dump -f /tmp/crowdsec-support.zip
|
||||||
`,
|
`,
|
||||||
Args: cobra.NoArgs,
|
Args: cobra.NoArgs,
|
||||||
DisableAutoGenTag: true,
|
DisableAutoGenTag: true,
|
||||||
RunE: func(_ *cobra.Command, _ []string) error {
|
Run: func(_ *cobra.Command, _ []string) {
|
||||||
var err error
|
var err error
|
||||||
var skipHub, skipDB, skipCAPI, skipLAPI, skipAgent bool
|
var skipHub, skipDB, skipCAPI, skipLAPI, skipAgent bool
|
||||||
infos := map[string][]byte{
|
infos := map[string][]byte{
|
||||||
|
@ -439,31 +431,11 @@ cscli support dump -f /tmp/crowdsec-support.zip
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
crash, err := collectCrash()
|
|
||||||
if err != nil {
|
|
||||||
log.Errorf("could not collect crash dumps: %s", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
for _, filename := range crash {
|
|
||||||
content, err := os.ReadFile(filename)
|
|
||||||
if err != nil {
|
|
||||||
log.Errorf("could not read crash dump %s: %s", filename, err)
|
|
||||||
}
|
|
||||||
|
|
||||||
infos[SUPPORT_CRASH_PATH+filepath.Base(filename)] = content
|
|
||||||
}
|
|
||||||
|
|
||||||
w := bytes.NewBuffer(nil)
|
w := bytes.NewBuffer(nil)
|
||||||
zipWriter := zip.NewWriter(w)
|
zipWriter := zip.NewWriter(w)
|
||||||
|
|
||||||
for filename, data := range infos {
|
for filename, data := range infos {
|
||||||
header := &zip.FileHeader{
|
fw, err := zipWriter.Create(filename)
|
||||||
Name: filename,
|
|
||||||
Method: zip.Deflate,
|
|
||||||
// TODO: retain mtime where possible (esp. trace)
|
|
||||||
Modified: time.Now(),
|
|
||||||
}
|
|
||||||
fw, err := zipWriter.CreateHeader(header)
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Errorf("Could not add zip entry for %s: %s", filename, err)
|
log.Errorf("Could not add zip entry for %s: %s", filename, err)
|
||||||
continue
|
continue
|
||||||
|
@ -473,19 +445,15 @@ cscli support dump -f /tmp/crowdsec-support.zip
|
||||||
|
|
||||||
err = zipWriter.Close()
|
err = zipWriter.Close()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return fmt.Errorf("could not finalize zip file: %s", err)
|
log.Fatalf("could not finalize zip file: %s", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
if outFile == "-" {
|
|
||||||
_, err = os.Stdout.Write(w.Bytes())
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
err = os.WriteFile(outFile, w.Bytes(), 0o600)
|
err = os.WriteFile(outFile, w.Bytes(), 0o600)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return fmt.Errorf("could not write zip file to %s: %s", outFile, err)
|
log.Fatalf("could not write zip file to %s: %s", outFile, err)
|
||||||
}
|
}
|
||||||
|
|
||||||
log.Infof("Written zip file to %s", outFile)
|
log.Infof("Written zip file to %s", outFile)
|
||||||
return nil
|
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -9,12 +9,11 @@ import (
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
log "github.com/sirupsen/logrus"
|
log "github.com/sirupsen/logrus"
|
||||||
"gopkg.in/yaml.v3"
|
"gopkg.in/yaml.v2"
|
||||||
|
|
||||||
"github.com/crowdsecurity/go-cs-lib/trace"
|
"github.com/crowdsecurity/go-cs-lib/trace"
|
||||||
|
|
||||||
"github.com/crowdsecurity/crowdsec/pkg/acquisition"
|
"github.com/crowdsecurity/crowdsec/pkg/acquisition"
|
||||||
"github.com/crowdsecurity/crowdsec/pkg/acquisition/configuration"
|
|
||||||
"github.com/crowdsecurity/crowdsec/pkg/alertcontext"
|
"github.com/crowdsecurity/crowdsec/pkg/alertcontext"
|
||||||
"github.com/crowdsecurity/crowdsec/pkg/appsec"
|
"github.com/crowdsecurity/crowdsec/pkg/appsec"
|
||||||
"github.com/crowdsecurity/crowdsec/pkg/csconfig"
|
"github.com/crowdsecurity/crowdsec/pkg/csconfig"
|
||||||
|
@ -148,7 +147,7 @@ func runCrowdsec(cConfig *csconfig.Config, parsers *parser.Parsers, hub *cwhub.H
|
||||||
|
|
||||||
if cConfig.Prometheus != nil && cConfig.Prometheus.Enabled {
|
if cConfig.Prometheus != nil && cConfig.Prometheus.Enabled {
|
||||||
aggregated := false
|
aggregated := false
|
||||||
if cConfig.Prometheus.Level == configuration.CFG_METRICS_AGGREGATE {
|
if cConfig.Prometheus.Level == "aggregated" {
|
||||||
aggregated = true
|
aggregated = true
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -207,7 +206,7 @@ func serveCrowdsec(parsers *parser.Parsers, cConfig *csconfig.Config, hub *cwhub
|
||||||
}
|
}
|
||||||
|
|
||||||
func dumpBucketsPour() {
|
func dumpBucketsPour() {
|
||||||
fd, err := os.OpenFile(filepath.Join(parser.DumpFolder, "bucketpour-dump.yaml"), os.O_CREATE|os.O_WRONLY|os.O_TRUNC, 0o666)
|
fd, err := os.OpenFile(filepath.Join(parser.DumpFolder, "bucketpour-dump.yaml"), os.O_CREATE|os.O_WRONLY|os.O_TRUNC, 0666)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Fatalf("open: %s", err)
|
log.Fatalf("open: %s", err)
|
||||||
}
|
}
|
||||||
|
@ -230,7 +229,7 @@ func dumpBucketsPour() {
|
||||||
}
|
}
|
||||||
|
|
||||||
func dumpParserState() {
|
func dumpParserState() {
|
||||||
fd, err := os.OpenFile(filepath.Join(parser.DumpFolder, "parser-dump.yaml"), os.O_CREATE|os.O_WRONLY|os.O_TRUNC, 0o666)
|
fd, err := os.OpenFile(filepath.Join(parser.DumpFolder, "parser-dump.yaml"), os.O_CREATE|os.O_WRONLY|os.O_TRUNC, 0666)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Fatalf("open: %s", err)
|
log.Fatalf("open: %s", err)
|
||||||
}
|
}
|
||||||
|
@ -253,7 +252,7 @@ func dumpParserState() {
|
||||||
}
|
}
|
||||||
|
|
||||||
func dumpOverflowState() {
|
func dumpOverflowState() {
|
||||||
fd, err := os.OpenFile(filepath.Join(parser.DumpFolder, "bucket-dump.yaml"), os.O_CREATE|os.O_WRONLY|os.O_TRUNC, 0o666)
|
fd, err := os.OpenFile(filepath.Join(parser.DumpFolder, "bucket-dump.yaml"), os.O_CREATE|os.O_WRONLY|os.O_TRUNC, 0666)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Fatalf("open: %s", err)
|
log.Fatalf("open: %s", err)
|
||||||
}
|
}
|
||||||
|
|
|
@ -17,12 +17,12 @@ import (
|
||||||
)
|
)
|
||||||
|
|
||||||
func AuthenticatedLAPIClient(credentials csconfig.ApiCredentialsCfg, hub *cwhub.Hub) (*apiclient.ApiClient, error) {
|
func AuthenticatedLAPIClient(credentials csconfig.ApiCredentialsCfg, hub *cwhub.Hub) (*apiclient.ApiClient, error) {
|
||||||
scenarios, err := hub.GetInstalledNamesByType(cwhub.SCENARIOS)
|
scenarios, err := hub.GetInstalledItemNames(cwhub.SCENARIOS)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, fmt.Errorf("loading list of installed hub scenarios: %w", err)
|
return nil, fmt.Errorf("loading list of installed hub scenarios: %w", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
appsecRules, err := hub.GetInstalledNamesByType(cwhub.APPSEC_RULES)
|
appsecRules, err := hub.GetInstalledItemNames(cwhub.APPSEC_RULES)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, fmt.Errorf("loading list of installed hub appsec rules: %w", err)
|
return nil, fmt.Errorf("loading list of installed hub appsec rules: %w", err)
|
||||||
}
|
}
|
||||||
|
@ -52,11 +52,11 @@ func AuthenticatedLAPIClient(credentials csconfig.ApiCredentialsCfg, hub *cwhub.
|
||||||
PapiURL: papiURL,
|
PapiURL: papiURL,
|
||||||
VersionPrefix: "v1",
|
VersionPrefix: "v1",
|
||||||
UpdateScenario: func() ([]string, error) {
|
UpdateScenario: func() ([]string, error) {
|
||||||
scenarios, err := hub.GetInstalledNamesByType(cwhub.SCENARIOS)
|
scenarios, err := hub.GetInstalledItemNames(cwhub.SCENARIOS)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
appsecRules, err := hub.GetInstalledNamesByType(cwhub.APPSEC_RULES)
|
appsecRules, err := hub.GetInstalledItemNames(cwhub.APPSEC_RULES)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
|
@ -6,7 +6,6 @@ import (
|
||||||
"fmt"
|
"fmt"
|
||||||
_ "net/http/pprof"
|
_ "net/http/pprof"
|
||||||
"os"
|
"os"
|
||||||
"path/filepath"
|
|
||||||
"runtime"
|
"runtime"
|
||||||
"runtime/pprof"
|
"runtime/pprof"
|
||||||
"strings"
|
"strings"
|
||||||
|
@ -15,8 +14,6 @@ import (
|
||||||
log "github.com/sirupsen/logrus"
|
log "github.com/sirupsen/logrus"
|
||||||
"gopkg.in/tomb.v2"
|
"gopkg.in/tomb.v2"
|
||||||
|
|
||||||
"github.com/crowdsecurity/go-cs-lib/trace"
|
|
||||||
|
|
||||||
"github.com/crowdsecurity/crowdsec/pkg/acquisition"
|
"github.com/crowdsecurity/crowdsec/pkg/acquisition"
|
||||||
"github.com/crowdsecurity/crowdsec/pkg/csconfig"
|
"github.com/crowdsecurity/crowdsec/pkg/csconfig"
|
||||||
"github.com/crowdsecurity/crowdsec/pkg/csplugin"
|
"github.com/crowdsecurity/crowdsec/pkg/csplugin"
|
||||||
|
@ -99,8 +96,8 @@ func LoadBuckets(cConfig *csconfig.Config, hub *cwhub.Hub) error {
|
||||||
buckets = leakybucket.NewBuckets()
|
buckets = leakybucket.NewBuckets()
|
||||||
|
|
||||||
log.Infof("Loading %d scenario files", len(files))
|
log.Infof("Loading %d scenario files", len(files))
|
||||||
|
|
||||||
holders, outputEventChan, err = leakybucket.LoadBuckets(cConfig.Crowdsec, hub, files, &bucketsTomb, buckets, flags.OrderEvent)
|
holders, outputEventChan, err = leakybucket.LoadBuckets(cConfig.Crowdsec, hub, files, &bucketsTomb, buckets, flags.OrderEvent)
|
||||||
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return fmt.Errorf("scenario loading failed: %w", err)
|
return fmt.Errorf("scenario loading failed: %w", err)
|
||||||
}
|
}
|
||||||
|
@ -126,7 +123,7 @@ func LoadAcquisition(cConfig *csconfig.Config) ([]acquisition.DataSource, error)
|
||||||
return nil, fmt.Errorf("failed to configure datasource for %s: %w", flags.OneShotDSN, err)
|
return nil, fmt.Errorf("failed to configure datasource for %s: %w", flags.OneShotDSN, err)
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
dataSources, err = acquisition.LoadAcquisitionFromFile(cConfig.Crowdsec, cConfig.Prometheus)
|
dataSources, err = acquisition.LoadAcquisitionFromFile(cConfig.Crowdsec)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
@ -233,10 +230,6 @@ func LoadConfig(configFile string, disableAgent bool, disableAPI bool, quiet boo
|
||||||
return nil, fmt.Errorf("while loading configuration file: %w", err)
|
return nil, fmt.Errorf("while loading configuration file: %w", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
if err := trace.Init(filepath.Join(cConfig.ConfigPaths.DataDir, "trace")); err != nil {
|
|
||||||
return nil, fmt.Errorf("while setting up trace directory: %w", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
cConfig.Common.LogLevel = newLogLevel(cConfig.Common.LogLevel, flags)
|
cConfig.Common.LogLevel = newLogLevel(cConfig.Common.LogLevel, flags)
|
||||||
|
|
||||||
if dumpFolder != "" {
|
if dumpFolder != "" {
|
||||||
|
|
|
@ -3,6 +3,7 @@ package main
|
||||||
import (
|
import (
|
||||||
"fmt"
|
"fmt"
|
||||||
"net/http"
|
"net/http"
|
||||||
|
"time"
|
||||||
|
|
||||||
"github.com/prometheus/client_golang/prometheus"
|
"github.com/prometheus/client_golang/prometheus"
|
||||||
"github.com/prometheus/client_golang/prometheus/promhttp"
|
"github.com/prometheus/client_golang/prometheus/promhttp"
|
||||||
|
@ -11,7 +12,6 @@ import (
|
||||||
"github.com/crowdsecurity/go-cs-lib/trace"
|
"github.com/crowdsecurity/go-cs-lib/trace"
|
||||||
"github.com/crowdsecurity/go-cs-lib/version"
|
"github.com/crowdsecurity/go-cs-lib/version"
|
||||||
|
|
||||||
"github.com/crowdsecurity/crowdsec/pkg/acquisition/configuration"
|
|
||||||
v1 "github.com/crowdsecurity/crowdsec/pkg/apiserver/controllers/v1"
|
v1 "github.com/crowdsecurity/crowdsec/pkg/apiserver/controllers/v1"
|
||||||
"github.com/crowdsecurity/crowdsec/pkg/cache"
|
"github.com/crowdsecurity/crowdsec/pkg/cache"
|
||||||
"github.com/crowdsecurity/crowdsec/pkg/csconfig"
|
"github.com/crowdsecurity/crowdsec/pkg/csconfig"
|
||||||
|
@ -21,8 +21,7 @@ import (
|
||||||
"github.com/crowdsecurity/crowdsec/pkg/parser"
|
"github.com/crowdsecurity/crowdsec/pkg/parser"
|
||||||
)
|
)
|
||||||
|
|
||||||
// Prometheus
|
/*prometheus*/
|
||||||
|
|
||||||
var globalParserHits = prometheus.NewCounterVec(
|
var globalParserHits = prometheus.NewCounterVec(
|
||||||
prometheus.CounterOpts{
|
prometheus.CounterOpts{
|
||||||
Name: "cs_parser_hits_total",
|
Name: "cs_parser_hits_total",
|
||||||
|
@ -30,7 +29,6 @@ var globalParserHits = prometheus.NewCounterVec(
|
||||||
},
|
},
|
||||||
[]string{"source", "type"},
|
[]string{"source", "type"},
|
||||||
)
|
)
|
||||||
|
|
||||||
var globalParserHitsOk = prometheus.NewCounterVec(
|
var globalParserHitsOk = prometheus.NewCounterVec(
|
||||||
prometheus.CounterOpts{
|
prometheus.CounterOpts{
|
||||||
Name: "cs_parser_hits_ok_total",
|
Name: "cs_parser_hits_ok_total",
|
||||||
|
@ -38,7 +36,6 @@ var globalParserHitsOk = prometheus.NewCounterVec(
|
||||||
},
|
},
|
||||||
[]string{"source", "type"},
|
[]string{"source", "type"},
|
||||||
)
|
)
|
||||||
|
|
||||||
var globalParserHitsKo = prometheus.NewCounterVec(
|
var globalParserHitsKo = prometheus.NewCounterVec(
|
||||||
prometheus.CounterOpts{
|
prometheus.CounterOpts{
|
||||||
Name: "cs_parser_hits_ko_total",
|
Name: "cs_parser_hits_ko_total",
|
||||||
|
@ -118,7 +115,9 @@ func computeDynamicMetrics(next http.Handler, dbClient *database.Client) http.Ha
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
decisions, err := dbClient.QueryDecisionCountByScenario()
|
decisionsFilters := make(map[string][]string, 0)
|
||||||
|
|
||||||
|
decisions, err := dbClient.QueryDecisionCountByScenario(decisionsFilters)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Errorf("Error querying decisions for metrics: %v", err)
|
log.Errorf("Error querying decisions for metrics: %v", err)
|
||||||
next.ServeHTTP(w, r)
|
next.ServeHTTP(w, r)
|
||||||
|
@ -139,6 +138,7 @@ func computeDynamicMetrics(next http.Handler, dbClient *database.Client) http.Ha
|
||||||
}
|
}
|
||||||
|
|
||||||
alerts, err := dbClient.AlertsCountPerScenario(alertsFilter)
|
alerts, err := dbClient.AlertsCountPerScenario(alertsFilter)
|
||||||
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Errorf("Error querying alerts for metrics: %v", err)
|
log.Errorf("Error querying alerts for metrics: %v", err)
|
||||||
next.ServeHTTP(w, r)
|
next.ServeHTTP(w, r)
|
||||||
|
@ -161,7 +161,7 @@ func registerPrometheus(config *csconfig.PrometheusCfg) {
|
||||||
|
|
||||||
// Registering prometheus
|
// Registering prometheus
|
||||||
// If in aggregated mode, do not register events associated with a source, to keep the cardinality low
|
// If in aggregated mode, do not register events associated with a source, to keep the cardinality low
|
||||||
if config.Level == configuration.CFG_METRICS_AGGREGATE {
|
if config.Level == "aggregated" {
|
||||||
log.Infof("Loading aggregated prometheus collectors")
|
log.Infof("Loading aggregated prometheus collectors")
|
||||||
prometheus.MustRegister(globalParserHits, globalParserHitsOk, globalParserHitsKo,
|
prometheus.MustRegister(globalParserHits, globalParserHitsOk, globalParserHitsKo,
|
||||||
globalCsInfo, globalParsingHistogram, globalPourHistogram,
|
globalCsInfo, globalParsingHistogram, globalPourHistogram,
|
||||||
|
@ -193,6 +193,7 @@ func servePrometheus(config *csconfig.PrometheusCfg, dbClient *database.Client,
|
||||||
defer trace.CatchPanic("crowdsec/servePrometheus")
|
defer trace.CatchPanic("crowdsec/servePrometheus")
|
||||||
|
|
||||||
http.Handle("/metrics", computeDynamicMetrics(promhttp.Handler(), dbClient))
|
http.Handle("/metrics", computeDynamicMetrics(promhttp.Handler(), dbClient))
|
||||||
|
log.Debugf("serving metrics after %s ms", time.Since(crowdsecT0))
|
||||||
|
|
||||||
if err := http.ListenAndServe(fmt.Sprintf("%s:%d", config.ListenAddr, config.ListenPort), nil); err != nil {
|
if err := http.ListenAndServe(fmt.Sprintf("%s:%d", config.ListenAddr, config.ListenPort), nil); err != nil {
|
||||||
// in time machine, we most likely have the LAPI using the port
|
// in time machine, we most likely have the LAPI using the port
|
||||||
|
|
|
@ -334,7 +334,8 @@ func Serve(cConfig *csconfig.Config, agentReady chan bool) error {
|
||||||
log.Warningln("Exprhelpers loaded without database client.")
|
log.Warningln("Exprhelpers loaded without database client.")
|
||||||
}
|
}
|
||||||
|
|
||||||
if cConfig.API.CTI != nil && *cConfig.API.CTI.Enabled {
|
// XXX: just pass the CTICfg
|
||||||
|
if cConfig.API.CTI != nil && cConfig.API.CTI.Enabled != nil && *cConfig.API.CTI.Enabled {
|
||||||
log.Infof("Crowdsec CTI helper enabled")
|
log.Infof("Crowdsec CTI helper enabled")
|
||||||
|
|
||||||
if err := exprhelpers.InitCrowdsecCTI(cConfig.API.CTI.Key, cConfig.API.CTI.CacheTimeout, cConfig.API.CTI.CacheSize, cConfig.API.CTI.LogLevel); err != nil {
|
if err := exprhelpers.InitCrowdsecCTI(cConfig.API.CTI.Key, cConfig.API.CTI.CacheTimeout, cConfig.API.CTI.CacheSize, cConfig.API.CTI.LogLevel); err != nil {
|
||||||
|
@ -391,7 +392,7 @@ func Serve(cConfig *csconfig.Config, agentReady chan bool) error {
|
||||||
}
|
}
|
||||||
|
|
||||||
if cConfig.Common != nil && cConfig.Common.Daemonize {
|
if cConfig.Common != nil && cConfig.Common.Daemonize {
|
||||||
csdaemon.Notify(csdaemon.Ready, log.StandardLogger())
|
csdaemon.NotifySystemd(log.StandardLogger())
|
||||||
// wait for signals
|
// wait for signals
|
||||||
return HandleSignals(cConfig)
|
return HandleSignals(cConfig)
|
||||||
}
|
}
|
||||||
|
|
32
cmd/cscti/Makefile
Normal file
32
cmd/cscti/Makefile
Normal file
|
@ -0,0 +1,32 @@
|
||||||
|
ifeq ($(OS), Windows_NT)
|
||||||
|
SHELL := pwsh.exe
|
||||||
|
.SHELLFLAGS := -NoProfile -Command
|
||||||
|
EXT = .exe
|
||||||
|
endif
|
||||||
|
|
||||||
|
GO = go
|
||||||
|
GOBUILD = $(GO) build
|
||||||
|
|
||||||
|
BINARY_NAME = cscti$(EXT)
|
||||||
|
PREFIX ?= "/"
|
||||||
|
BIN_PREFIX = $(PREFIX)"/usr/local/bin/"
|
||||||
|
|
||||||
|
.PHONY: all
|
||||||
|
all: clean build
|
||||||
|
|
||||||
|
build: clean
|
||||||
|
$(GOBUILD) $(LD_OPTS) -o $(BINARY_NAME)
|
||||||
|
|
||||||
|
.PHONY: install
|
||||||
|
install: install-conf install-bin
|
||||||
|
|
||||||
|
install-conf:
|
||||||
|
|
||||||
|
install-bin:
|
||||||
|
@install -v -m 755 -D "$(BINARY_NAME)" "$(BIN_PREFIX)/$(BINARY_NAME)" || exit
|
||||||
|
|
||||||
|
uninstall:
|
||||||
|
@$(RM) $(BIN_PREFIX)$(BINARY_NAME) $(WIN_IGNORE_ERR)
|
||||||
|
|
||||||
|
clean:
|
||||||
|
@$(RM) $(BINARY_NAME) $(WIN_IGNORE_ERR)
|
58
cmd/cscti/main.go
Normal file
58
cmd/cscti/main.go
Normal file
|
@ -0,0 +1,58 @@
|
||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"errors"
|
||||||
|
"os"
|
||||||
|
|
||||||
|
"github.com/fatih/color"
|
||||||
|
cc "github.com/ivanpirog/coloredcobra"
|
||||||
|
"github.com/spf13/cobra"
|
||||||
|
)
|
||||||
|
|
||||||
|
var (
|
||||||
|
ErrorNoAPIKey = errors.New("CTI_API_KEY is not set")
|
||||||
|
)
|
||||||
|
|
||||||
|
type Config struct {
|
||||||
|
API struct {
|
||||||
|
CTI struct {
|
||||||
|
Key string `yaml:"key"`
|
||||||
|
} `yaml:"cti"`
|
||||||
|
} `yaml:"api"`
|
||||||
|
}
|
||||||
|
|
||||||
|
func main() {
|
||||||
|
var configPath string
|
||||||
|
|
||||||
|
cmd := &cobra.Command{
|
||||||
|
Use: "cscti",
|
||||||
|
Short: "cscti is a tool to query the CrowdSec CTI",
|
||||||
|
ValidArgs: []string{"smoke-ip"},
|
||||||
|
DisableAutoGenTag: true,
|
||||||
|
}
|
||||||
|
|
||||||
|
cc.Init(&cc.Config{
|
||||||
|
RootCmd: cmd,
|
||||||
|
Headings: cc.Yellow,
|
||||||
|
Commands: cc.Green + cc.Bold,
|
||||||
|
CmdShortDescr: cc.Cyan,
|
||||||
|
Example: cc.Italic,
|
||||||
|
ExecName: cc.Bold,
|
||||||
|
Aliases: cc.Bold + cc.Italic,
|
||||||
|
FlagsDataType: cc.White,
|
||||||
|
Flags: cc.Green,
|
||||||
|
FlagsDescr: cc.Cyan,
|
||||||
|
})
|
||||||
|
cmd.SetOut(color.Output)
|
||||||
|
|
||||||
|
pflags := cmd.PersistentFlags()
|
||||||
|
|
||||||
|
pflags.StringVarP(&configPath, "config", "c", "", "Path to the configuration file")
|
||||||
|
|
||||||
|
cmd.AddCommand(NewCLISmokeIP().NewCommand())
|
||||||
|
|
||||||
|
if err := cmd.Execute(); err != nil {
|
||||||
|
color.Red(err.Error())
|
||||||
|
os.Exit(1)
|
||||||
|
}
|
||||||
|
}
|
90
cmd/cscti/smokeip.go
Normal file
90
cmd/cscti/smokeip.go
Normal file
|
@ -0,0 +1,90 @@
|
||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"encoding/json"
|
||||||
|
"errors"
|
||||||
|
"fmt"
|
||||||
|
"os"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/spf13/cobra"
|
||||||
|
|
||||||
|
"github.com/crowdsecurity/crowdsec/pkg/cti"
|
||||||
|
)
|
||||||
|
|
||||||
|
type cliSmokeIP struct {}
|
||||||
|
|
||||||
|
func NewCLISmokeIP() *cliSmokeIP {
|
||||||
|
return &cliSmokeIP{}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (cli *cliSmokeIP) smokeip(ip string) error {
|
||||||
|
// check if CTI_API_KEY is set
|
||||||
|
apiKey := os.Getenv("CTI_API_KEY")
|
||||||
|
if apiKey == "" {
|
||||||
|
return ErrorNoAPIKey
|
||||||
|
}
|
||||||
|
|
||||||
|
provider, err := cti.NewAPIKeyProvider(apiKey)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
// create a new CTI client
|
||||||
|
client, err := cti.NewClientWithResponses("https://cti.api.crowdsec.net/v2/", cti.WithRequestEditorFn(provider.Intercept))
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second)
|
||||||
|
defer cancel()
|
||||||
|
|
||||||
|
resp, err := client.GetSmokeIpWithResponse(ctx, ip)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
switch {
|
||||||
|
case resp.JSON404 != nil:
|
||||||
|
return errors.New("ip not found")
|
||||||
|
case resp.JSON403 != nil:
|
||||||
|
return errors.New("forbidden")
|
||||||
|
case resp.JSON500 != nil:
|
||||||
|
return errors.New("internal server error")
|
||||||
|
case resp.JSON429 != nil:
|
||||||
|
return errors.New("too many requests")
|
||||||
|
case resp.JSON400 != nil:
|
||||||
|
return errors.New("bad request")
|
||||||
|
case resp.JSON200 == nil:
|
||||||
|
return fmt.Errorf("unexpected error %d", resp.StatusCode())
|
||||||
|
}
|
||||||
|
|
||||||
|
ctiObj := resp.JSON200
|
||||||
|
|
||||||
|
var out []byte
|
||||||
|
|
||||||
|
// re-encode (todo: yaml, human)
|
||||||
|
|
||||||
|
out, err = json.MarshalIndent(ctiObj, "", " ")
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
fmt.Println(string(out))
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (cli *cliSmokeIP) NewCommand() *cobra.Command {
|
||||||
|
cmd := &cobra.Command{
|
||||||
|
Use: "smoke-ip",
|
||||||
|
Short: "Query the smoke data with a given IP",
|
||||||
|
Args: cobra.ExactArgs(1),
|
||||||
|
RunE: func(cmd *cobra.Command, args []string) error {
|
||||||
|
return cli.smokeip(args[0])
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
return cmd
|
||||||
|
}
|
|
@ -5,11 +5,10 @@ import (
|
||||||
"fmt"
|
"fmt"
|
||||||
"os"
|
"os"
|
||||||
|
|
||||||
|
"github.com/crowdsecurity/crowdsec/pkg/protobufs"
|
||||||
"github.com/hashicorp/go-hclog"
|
"github.com/hashicorp/go-hclog"
|
||||||
plugin "github.com/hashicorp/go-plugin"
|
plugin "github.com/hashicorp/go-plugin"
|
||||||
"gopkg.in/yaml.v3"
|
"gopkg.in/yaml.v2"
|
||||||
|
|
||||||
"github.com/crowdsecurity/crowdsec/pkg/protobufs"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
type PluginConfig struct {
|
type PluginConfig struct {
|
||||||
|
@ -33,7 +32,6 @@ func (s *DummyPlugin) Notify(ctx context.Context, notification *protobufs.Notifi
|
||||||
if _, ok := s.PluginConfigByName[notification.Name]; !ok {
|
if _, ok := s.PluginConfigByName[notification.Name]; !ok {
|
||||||
return nil, fmt.Errorf("invalid plugin config name %s", notification.Name)
|
return nil, fmt.Errorf("invalid plugin config name %s", notification.Name)
|
||||||
}
|
}
|
||||||
|
|
||||||
cfg := s.PluginConfigByName[notification.Name]
|
cfg := s.PluginConfigByName[notification.Name]
|
||||||
|
|
||||||
if cfg.LogLevel != nil && *cfg.LogLevel != "" {
|
if cfg.LogLevel != nil && *cfg.LogLevel != "" {
|
||||||
|
@ -44,22 +42,19 @@ func (s *DummyPlugin) Notify(ctx context.Context, notification *protobufs.Notifi
|
||||||
logger.Debug(notification.Text)
|
logger.Debug(notification.Text)
|
||||||
|
|
||||||
if cfg.OutputFile != nil && *cfg.OutputFile != "" {
|
if cfg.OutputFile != nil && *cfg.OutputFile != "" {
|
||||||
f, err := os.OpenFile(*cfg.OutputFile, os.O_APPEND|os.O_CREATE|os.O_WRONLY, 0o644)
|
f, err := os.OpenFile(*cfg.OutputFile, os.O_APPEND|os.O_CREATE|os.O_WRONLY, 0644)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
logger.Error(fmt.Sprintf("Cannot open notification file: %s", err))
|
logger.Error(fmt.Sprintf("Cannot open notification file: %s", err))
|
||||||
}
|
}
|
||||||
|
|
||||||
if _, err := f.WriteString(notification.Text + "\n"); err != nil {
|
if _, err := f.WriteString(notification.Text + "\n"); err != nil {
|
||||||
f.Close()
|
f.Close()
|
||||||
logger.Error(fmt.Sprintf("Cannot write notification to file: %s", err))
|
logger.Error(fmt.Sprintf("Cannot write notification to file: %s", err))
|
||||||
}
|
}
|
||||||
|
|
||||||
err = f.Close()
|
err = f.Close()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
logger.Error(fmt.Sprintf("Cannot close notification file: %s", err))
|
logger.Error(fmt.Sprintf("Cannot close notification file: %s", err))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fmt.Println(notification.Text)
|
fmt.Println(notification.Text)
|
||||||
|
|
||||||
return &protobufs.Empty{}, nil
|
return &protobufs.Empty{}, nil
|
||||||
|
@ -69,12 +64,11 @@ func (s *DummyPlugin) Configure(ctx context.Context, config *protobufs.Config) (
|
||||||
d := PluginConfig{}
|
d := PluginConfig{}
|
||||||
err := yaml.Unmarshal(config.Config, &d)
|
err := yaml.Unmarshal(config.Config, &d)
|
||||||
s.PluginConfigByName[d.Name] = d
|
s.PluginConfigByName[d.Name] = d
|
||||||
|
|
||||||
return &protobufs.Empty{}, err
|
return &protobufs.Empty{}, err
|
||||||
}
|
}
|
||||||
|
|
||||||
func main() {
|
func main() {
|
||||||
handshake := plugin.HandshakeConfig{
|
var handshake = plugin.HandshakeConfig{
|
||||||
ProtocolVersion: 1,
|
ProtocolVersion: 1,
|
||||||
MagicCookieKey: "CROWDSEC_PLUGIN_KEY",
|
MagicCookieKey: "CROWDSEC_PLUGIN_KEY",
|
||||||
MagicCookieValue: os.Getenv("CROWDSEC_PLUGIN_KEY"),
|
MagicCookieValue: os.Getenv("CROWDSEC_PLUGIN_KEY"),
|
||||||
|
|
|
@ -2,17 +2,15 @@ package main
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
"errors"
|
|
||||||
"fmt"
|
"fmt"
|
||||||
"os"
|
"os"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
|
"github.com/crowdsecurity/crowdsec/pkg/protobufs"
|
||||||
"github.com/hashicorp/go-hclog"
|
"github.com/hashicorp/go-hclog"
|
||||||
plugin "github.com/hashicorp/go-plugin"
|
plugin "github.com/hashicorp/go-plugin"
|
||||||
mail "github.com/xhit/go-simple-mail/v2"
|
mail "github.com/xhit/go-simple-mail/v2"
|
||||||
"gopkg.in/yaml.v3"
|
"gopkg.in/yaml.v2"
|
||||||
|
|
||||||
"github.com/crowdsecurity/crowdsec/pkg/protobufs"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
var baseLogger hclog.Logger = hclog.New(&hclog.LoggerOptions{
|
var baseLogger hclog.Logger = hclog.New(&hclog.LoggerOptions{
|
||||||
|
@ -74,20 +72,19 @@ func (n *EmailPlugin) Configure(ctx context.Context, config *protobufs.Config) (
|
||||||
}
|
}
|
||||||
|
|
||||||
if d.Name == "" {
|
if d.Name == "" {
|
||||||
return nil, errors.New("name is required")
|
return nil, fmt.Errorf("name is required")
|
||||||
}
|
}
|
||||||
|
|
||||||
if d.SMTPHost == "" {
|
if d.SMTPHost == "" {
|
||||||
return nil, errors.New("SMTP host is not set")
|
return nil, fmt.Errorf("SMTP host is not set")
|
||||||
}
|
}
|
||||||
|
|
||||||
if d.ReceiverEmails == nil || len(d.ReceiverEmails) == 0 {
|
if d.ReceiverEmails == nil || len(d.ReceiverEmails) == 0 {
|
||||||
return nil, errors.New("receiver emails are not set")
|
return nil, fmt.Errorf("receiver emails are not set")
|
||||||
}
|
}
|
||||||
|
|
||||||
n.ConfigByName[d.Name] = d
|
n.ConfigByName[d.Name] = d
|
||||||
baseLogger.Debug(fmt.Sprintf("Email plugin '%s' use SMTP host '%s:%d'", d.Name, d.SMTPHost, d.SMTPPort))
|
baseLogger.Debug(fmt.Sprintf("Email plugin '%s' use SMTP host '%s:%d'", d.Name, d.SMTPHost, d.SMTPPort))
|
||||||
|
|
||||||
return &protobufs.Empty{}, nil
|
return &protobufs.Empty{}, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -95,7 +92,6 @@ func (n *EmailPlugin) Notify(ctx context.Context, notification *protobufs.Notifi
|
||||||
if _, ok := n.ConfigByName[notification.Name]; !ok {
|
if _, ok := n.ConfigByName[notification.Name]; !ok {
|
||||||
return nil, fmt.Errorf("invalid plugin config name %s", notification.Name)
|
return nil, fmt.Errorf("invalid plugin config name %s", notification.Name)
|
||||||
}
|
}
|
||||||
|
|
||||||
cfg := n.ConfigByName[notification.Name]
|
cfg := n.ConfigByName[notification.Name]
|
||||||
|
|
||||||
logger := baseLogger.Named(cfg.Name)
|
logger := baseLogger.Named(cfg.Name)
|
||||||
|
@ -121,7 +117,6 @@ func (n *EmailPlugin) Notify(ctx context.Context, notification *protobufs.Notifi
|
||||||
server.ConnectTimeout, err = time.ParseDuration(cfg.ConnectTimeout)
|
server.ConnectTimeout, err = time.ParseDuration(cfg.ConnectTimeout)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
logger.Warn(fmt.Sprintf("invalid connect timeout '%s', using default '10s'", cfg.ConnectTimeout))
|
logger.Warn(fmt.Sprintf("invalid connect timeout '%s', using default '10s'", cfg.ConnectTimeout))
|
||||||
|
|
||||||
server.ConnectTimeout = 10 * time.Second
|
server.ConnectTimeout = 10 * time.Second
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -130,18 +125,15 @@ func (n *EmailPlugin) Notify(ctx context.Context, notification *protobufs.Notifi
|
||||||
server.SendTimeout, err = time.ParseDuration(cfg.SendTimeout)
|
server.SendTimeout, err = time.ParseDuration(cfg.SendTimeout)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
logger.Warn(fmt.Sprintf("invalid send timeout '%s', using default '10s'", cfg.SendTimeout))
|
logger.Warn(fmt.Sprintf("invalid send timeout '%s', using default '10s'", cfg.SendTimeout))
|
||||||
|
|
||||||
server.SendTimeout = 10 * time.Second
|
server.SendTimeout = 10 * time.Second
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
logger.Debug("making smtp connection")
|
logger.Debug("making smtp connection")
|
||||||
|
|
||||||
smtpClient, err := server.Connect()
|
smtpClient, err := server.Connect()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return &protobufs.Empty{}, err
|
return &protobufs.Empty{}, err
|
||||||
}
|
}
|
||||||
|
|
||||||
logger.Debug("smtp connection done")
|
logger.Debug("smtp connection done")
|
||||||
|
|
||||||
email := mail.NewMSG()
|
email := mail.NewMSG()
|
||||||
|
@ -154,14 +146,12 @@ func (n *EmailPlugin) Notify(ctx context.Context, notification *protobufs.Notifi
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return &protobufs.Empty{}, err
|
return &protobufs.Empty{}, err
|
||||||
}
|
}
|
||||||
|
|
||||||
logger.Info(fmt.Sprintf("sent email to %v", cfg.ReceiverEmails))
|
logger.Info(fmt.Sprintf("sent email to %v", cfg.ReceiverEmails))
|
||||||
|
|
||||||
return &protobufs.Empty{}, nil
|
return &protobufs.Empty{}, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func main() {
|
func main() {
|
||||||
handshake := plugin.HandshakeConfig{
|
var handshake = plugin.HandshakeConfig{
|
||||||
ProtocolVersion: 1,
|
ProtocolVersion: 1,
|
||||||
MagicCookieKey: "CROWDSEC_PLUGIN_KEY",
|
MagicCookieKey: "CROWDSEC_PLUGIN_KEY",
|
||||||
MagicCookieValue: os.Getenv("CROWDSEC_PLUGIN_KEY"),
|
MagicCookieValue: os.Getenv("CROWDSEC_PLUGIN_KEY"),
|
||||||
|
|
|
@ -12,11 +12,10 @@ import (
|
||||||
"os"
|
"os"
|
||||||
"strings"
|
"strings"
|
||||||
|
|
||||||
|
"github.com/crowdsecurity/crowdsec/pkg/protobufs"
|
||||||
"github.com/hashicorp/go-hclog"
|
"github.com/hashicorp/go-hclog"
|
||||||
plugin "github.com/hashicorp/go-plugin"
|
plugin "github.com/hashicorp/go-plugin"
|
||||||
"gopkg.in/yaml.v3"
|
"gopkg.in/yaml.v2"
|
||||||
|
|
||||||
"github.com/crowdsecurity/crowdsec/pkg/protobufs"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
type PluginConfig struct {
|
type PluginConfig struct {
|
||||||
|
@ -91,23 +90,18 @@ func getTLSClient(c *PluginConfig) error {
|
||||||
|
|
||||||
tlsConfig.Certificates = []tls.Certificate{cert}
|
tlsConfig.Certificates = []tls.Certificate{cert}
|
||||||
}
|
}
|
||||||
|
|
||||||
transport := &http.Transport{
|
transport := &http.Transport{
|
||||||
TLSClientConfig: tlsConfig,
|
TLSClientConfig: tlsConfig,
|
||||||
}
|
}
|
||||||
|
|
||||||
if c.UnixSocket != "" {
|
if c.UnixSocket != "" {
|
||||||
logger.Info(fmt.Sprintf("Using socket '%s'", c.UnixSocket))
|
logger.Info(fmt.Sprintf("Using socket '%s'", c.UnixSocket))
|
||||||
|
|
||||||
transport.DialContext = func(_ context.Context, _, _ string) (net.Conn, error) {
|
transport.DialContext = func(_ context.Context, _, _ string) (net.Conn, error) {
|
||||||
return net.Dial("unix", strings.TrimSuffix(c.UnixSocket, "/"))
|
return net.Dial("unix", strings.TrimSuffix(c.UnixSocket, "/"))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
c.Client = &http.Client{
|
c.Client = &http.Client{
|
||||||
Transport: transport,
|
Transport: transport,
|
||||||
}
|
}
|
||||||
|
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -115,7 +109,6 @@ func (s *HTTPPlugin) Notify(ctx context.Context, notification *protobufs.Notific
|
||||||
if _, ok := s.PluginConfigByName[notification.Name]; !ok {
|
if _, ok := s.PluginConfigByName[notification.Name]; !ok {
|
||||||
return nil, fmt.Errorf("invalid plugin config name %s", notification.Name)
|
return nil, fmt.Errorf("invalid plugin config name %s", notification.Name)
|
||||||
}
|
}
|
||||||
|
|
||||||
cfg := s.PluginConfigByName[notification.Name]
|
cfg := s.PluginConfigByName[notification.Name]
|
||||||
|
|
||||||
if cfg.LogLevel != nil && *cfg.LogLevel != "" {
|
if cfg.LogLevel != nil && *cfg.LogLevel != "" {
|
||||||
|
@ -128,14 +121,11 @@ func (s *HTTPPlugin) Notify(ctx context.Context, notification *protobufs.Notific
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
for headerName, headerValue := range cfg.Headers {
|
for headerName, headerValue := range cfg.Headers {
|
||||||
logger.Debug(fmt.Sprintf("adding header %s: %s", headerName, headerValue))
|
logger.Debug(fmt.Sprintf("adding header %s: %s", headerName, headerValue))
|
||||||
request.Header.Add(headerName, headerValue)
|
request.Header.Add(headerName, headerValue)
|
||||||
}
|
}
|
||||||
|
|
||||||
logger.Debug(fmt.Sprintf("making HTTP %s call to %s with body %s", cfg.Method, cfg.URL, notification.Text))
|
logger.Debug(fmt.Sprintf("making HTTP %s call to %s with body %s", cfg.Method, cfg.URL, notification.Text))
|
||||||
|
|
||||||
resp, err := cfg.Client.Do(request.WithContext(ctx))
|
resp, err := cfg.Client.Do(request.WithContext(ctx))
|
||||||
if err != nil {
|
if err != nil {
|
||||||
logger.Error(fmt.Sprintf("Failed to make HTTP request : %s", err))
|
logger.Error(fmt.Sprintf("Failed to make HTTP request : %s", err))
|
||||||
|
@ -145,7 +135,7 @@ func (s *HTTPPlugin) Notify(ctx context.Context, notification *protobufs.Notific
|
||||||
|
|
||||||
respData, err := io.ReadAll(resp.Body)
|
respData, err := io.ReadAll(resp.Body)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, fmt.Errorf("failed to read response body got error %w", err)
|
return nil, fmt.Errorf("failed to read response body got error %s", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
logger.Debug(fmt.Sprintf("got response %s", string(respData)))
|
logger.Debug(fmt.Sprintf("got response %s", string(respData)))
|
||||||
|
@ -153,7 +143,6 @@ func (s *HTTPPlugin) Notify(ctx context.Context, notification *protobufs.Notific
|
||||||
if resp.StatusCode < 200 || resp.StatusCode >= 300 {
|
if resp.StatusCode < 200 || resp.StatusCode >= 300 {
|
||||||
logger.Warn(fmt.Sprintf("HTTP server returned non 200 status code: %d", resp.StatusCode))
|
logger.Warn(fmt.Sprintf("HTTP server returned non 200 status code: %d", resp.StatusCode))
|
||||||
logger.Debug(fmt.Sprintf("HTTP server returned body: %s", string(respData)))
|
logger.Debug(fmt.Sprintf("HTTP server returned body: %s", string(respData)))
|
||||||
|
|
||||||
return &protobufs.Empty{}, nil
|
return &protobufs.Empty{}, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -162,25 +151,21 @@ func (s *HTTPPlugin) Notify(ctx context.Context, notification *protobufs.Notific
|
||||||
|
|
||||||
func (s *HTTPPlugin) Configure(ctx context.Context, config *protobufs.Config) (*protobufs.Empty, error) {
|
func (s *HTTPPlugin) Configure(ctx context.Context, config *protobufs.Config) (*protobufs.Empty, error) {
|
||||||
d := PluginConfig{}
|
d := PluginConfig{}
|
||||||
|
|
||||||
err := yaml.Unmarshal(config.Config, &d)
|
err := yaml.Unmarshal(config.Config, &d)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
err = getTLSClient(&d)
|
err = getTLSClient(&d)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
s.PluginConfigByName[d.Name] = d
|
s.PluginConfigByName[d.Name] = d
|
||||||
logger.Debug(fmt.Sprintf("HTTP plugin '%s' use URL '%s'", d.Name, d.URL))
|
logger.Debug(fmt.Sprintf("HTTP plugin '%s' use URL '%s'", d.Name, d.URL))
|
||||||
|
|
||||||
return &protobufs.Empty{}, err
|
return &protobufs.Empty{}, err
|
||||||
}
|
}
|
||||||
|
|
||||||
func main() {
|
func main() {
|
||||||
handshake := plugin.HandshakeConfig{
|
var handshake = plugin.HandshakeConfig{
|
||||||
ProtocolVersion: 1,
|
ProtocolVersion: 1,
|
||||||
MagicCookieKey: "CROWDSEC_PLUGIN_KEY",
|
MagicCookieKey: "CROWDSEC_PLUGIN_KEY",
|
||||||
MagicCookieValue: os.Getenv("CROWDSEC_PLUGIN_KEY"),
|
MagicCookieValue: os.Getenv("CROWDSEC_PLUGIN_KEY"),
|
||||||
|
|
|
@ -5,12 +5,12 @@ import (
|
||||||
"fmt"
|
"fmt"
|
||||||
"os"
|
"os"
|
||||||
|
|
||||||
|
"github.com/crowdsecurity/crowdsec/pkg/protobufs"
|
||||||
"github.com/hashicorp/go-hclog"
|
"github.com/hashicorp/go-hclog"
|
||||||
plugin "github.com/hashicorp/go-plugin"
|
plugin "github.com/hashicorp/go-plugin"
|
||||||
"github.com/slack-go/slack"
|
|
||||||
"gopkg.in/yaml.v3"
|
|
||||||
|
|
||||||
"github.com/crowdsecurity/crowdsec/pkg/protobufs"
|
"github.com/slack-go/slack"
|
||||||
|
"gopkg.in/yaml.v2"
|
||||||
)
|
)
|
||||||
|
|
||||||
type PluginConfig struct {
|
type PluginConfig struct {
|
||||||
|
@ -33,16 +33,13 @@ func (n *Notify) Notify(ctx context.Context, notification *protobufs.Notificatio
|
||||||
if _, ok := n.ConfigByName[notification.Name]; !ok {
|
if _, ok := n.ConfigByName[notification.Name]; !ok {
|
||||||
return nil, fmt.Errorf("invalid plugin config name %s", notification.Name)
|
return nil, fmt.Errorf("invalid plugin config name %s", notification.Name)
|
||||||
}
|
}
|
||||||
|
|
||||||
cfg := n.ConfigByName[notification.Name]
|
cfg := n.ConfigByName[notification.Name]
|
||||||
|
|
||||||
if cfg.LogLevel != nil && *cfg.LogLevel != "" {
|
if cfg.LogLevel != nil && *cfg.LogLevel != "" {
|
||||||
logger.SetLevel(hclog.LevelFromString(*cfg.LogLevel))
|
logger.SetLevel(hclog.LevelFromString(*cfg.LogLevel))
|
||||||
}
|
}
|
||||||
|
|
||||||
logger.Info(fmt.Sprintf("found notify signal for %s config", notification.Name))
|
logger.Info(fmt.Sprintf("found notify signal for %s config", notification.Name))
|
||||||
logger.Debug(fmt.Sprintf("posting to %s webhook, message %s", cfg.Webhook, notification.Text))
|
logger.Debug(fmt.Sprintf("posting to %s webhook, message %s", cfg.Webhook, notification.Text))
|
||||||
|
|
||||||
err := slack.PostWebhookContext(ctx, n.ConfigByName[notification.Name].Webhook, &slack.WebhookMessage{
|
err := slack.PostWebhookContext(ctx, n.ConfigByName[notification.Name].Webhook, &slack.WebhookMessage{
|
||||||
Text: notification.Text,
|
Text: notification.Text,
|
||||||
})
|
})
|
||||||
|
@ -55,19 +52,16 @@ func (n *Notify) Notify(ctx context.Context, notification *protobufs.Notificatio
|
||||||
|
|
||||||
func (n *Notify) Configure(ctx context.Context, config *protobufs.Config) (*protobufs.Empty, error) {
|
func (n *Notify) Configure(ctx context.Context, config *protobufs.Config) (*protobufs.Empty, error) {
|
||||||
d := PluginConfig{}
|
d := PluginConfig{}
|
||||||
|
|
||||||
if err := yaml.Unmarshal(config.Config, &d); err != nil {
|
if err := yaml.Unmarshal(config.Config, &d); err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
n.ConfigByName[d.Name] = d
|
n.ConfigByName[d.Name] = d
|
||||||
logger.Debug(fmt.Sprintf("Slack plugin '%s' use URL '%s'", d.Name, d.Webhook))
|
logger.Debug(fmt.Sprintf("Slack plugin '%s' use URL '%s'", d.Name, d.Webhook))
|
||||||
|
|
||||||
return &protobufs.Empty{}, nil
|
return &protobufs.Empty{}, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func main() {
|
func main() {
|
||||||
handshake := plugin.HandshakeConfig{
|
var handshake = plugin.HandshakeConfig{
|
||||||
ProtocolVersion: 1,
|
ProtocolVersion: 1,
|
||||||
MagicCookieKey: "CROWDSEC_PLUGIN_KEY",
|
MagicCookieKey: "CROWDSEC_PLUGIN_KEY",
|
||||||
MagicCookieValue: os.Getenv("CROWDSEC_PLUGIN_KEY"),
|
MagicCookieValue: os.Getenv("CROWDSEC_PLUGIN_KEY"),
|
||||||
|
|
|
@ -10,11 +10,11 @@ import (
|
||||||
"os"
|
"os"
|
||||||
"strings"
|
"strings"
|
||||||
|
|
||||||
|
"github.com/crowdsecurity/crowdsec/pkg/protobufs"
|
||||||
"github.com/hashicorp/go-hclog"
|
"github.com/hashicorp/go-hclog"
|
||||||
plugin "github.com/hashicorp/go-plugin"
|
plugin "github.com/hashicorp/go-plugin"
|
||||||
"gopkg.in/yaml.v3"
|
|
||||||
|
|
||||||
"github.com/crowdsecurity/crowdsec/pkg/protobufs"
|
"gopkg.in/yaml.v2"
|
||||||
)
|
)
|
||||||
|
|
||||||
var logger hclog.Logger = hclog.New(&hclog.LoggerOptions{
|
var logger hclog.Logger = hclog.New(&hclog.LoggerOptions{
|
||||||
|
@ -44,7 +44,6 @@ func (s *Splunk) Notify(ctx context.Context, notification *protobufs.Notificatio
|
||||||
if _, ok := s.PluginConfigByName[notification.Name]; !ok {
|
if _, ok := s.PluginConfigByName[notification.Name]; !ok {
|
||||||
return &protobufs.Empty{}, fmt.Errorf("splunk invalid config name %s", notification.Name)
|
return &protobufs.Empty{}, fmt.Errorf("splunk invalid config name %s", notification.Name)
|
||||||
}
|
}
|
||||||
|
|
||||||
cfg := s.PluginConfigByName[notification.Name]
|
cfg := s.PluginConfigByName[notification.Name]
|
||||||
|
|
||||||
if cfg.LogLevel != nil && *cfg.LogLevel != "" {
|
if cfg.LogLevel != nil && *cfg.LogLevel != "" {
|
||||||
|
@ -54,7 +53,6 @@ func (s *Splunk) Notify(ctx context.Context, notification *protobufs.Notificatio
|
||||||
logger.Info(fmt.Sprintf("received notify signal for %s config", notification.Name))
|
logger.Info(fmt.Sprintf("received notify signal for %s config", notification.Name))
|
||||||
|
|
||||||
p := Payload{Event: notification.Text}
|
p := Payload{Event: notification.Text}
|
||||||
|
|
||||||
data, err := json.Marshal(p)
|
data, err := json.Marshal(p)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return &protobufs.Empty{}, err
|
return &protobufs.Empty{}, err
|
||||||
|
@ -67,7 +65,6 @@ func (s *Splunk) Notify(ctx context.Context, notification *protobufs.Notificatio
|
||||||
|
|
||||||
req.Header.Add("Authorization", fmt.Sprintf("Splunk %s", cfg.Token))
|
req.Header.Add("Authorization", fmt.Sprintf("Splunk %s", cfg.Token))
|
||||||
logger.Debug(fmt.Sprintf("posting event %s to %s", string(data), req.URL))
|
logger.Debug(fmt.Sprintf("posting event %s to %s", string(data), req.URL))
|
||||||
|
|
||||||
resp, err := s.Client.Do(req.WithContext(ctx))
|
resp, err := s.Client.Do(req.WithContext(ctx))
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return &protobufs.Empty{}, err
|
return &protobufs.Empty{}, err
|
||||||
|
@ -76,19 +73,15 @@ func (s *Splunk) Notify(ctx context.Context, notification *protobufs.Notificatio
|
||||||
if resp.StatusCode != http.StatusOK {
|
if resp.StatusCode != http.StatusOK {
|
||||||
content, err := io.ReadAll(resp.Body)
|
content, err := io.ReadAll(resp.Body)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return &protobufs.Empty{}, fmt.Errorf("got non 200 response and failed to read error %w", err)
|
return &protobufs.Empty{}, fmt.Errorf("got non 200 response and failed to read error %s", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
return &protobufs.Empty{}, fmt.Errorf("got non 200 response %s", string(content))
|
return &protobufs.Empty{}, fmt.Errorf("got non 200 response %s", string(content))
|
||||||
}
|
}
|
||||||
|
|
||||||
respData, err := io.ReadAll(resp.Body)
|
respData, err := io.ReadAll(resp.Body)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return &protobufs.Empty{}, fmt.Errorf("failed to read response body got error %w", err)
|
return &protobufs.Empty{}, fmt.Errorf("failed to read response body got error %s", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
logger.Debug(fmt.Sprintf("got response %s", string(respData)))
|
logger.Debug(fmt.Sprintf("got response %s", string(respData)))
|
||||||
|
|
||||||
return &protobufs.Empty{}, nil
|
return &protobufs.Empty{}, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -97,12 +90,11 @@ func (s *Splunk) Configure(ctx context.Context, config *protobufs.Config) (*prot
|
||||||
err := yaml.Unmarshal(config.Config, &d)
|
err := yaml.Unmarshal(config.Config, &d)
|
||||||
s.PluginConfigByName[d.Name] = d
|
s.PluginConfigByName[d.Name] = d
|
||||||
logger.Debug(fmt.Sprintf("Splunk plugin '%s' use URL '%s'", d.Name, d.URL))
|
logger.Debug(fmt.Sprintf("Splunk plugin '%s' use URL '%s'", d.Name, d.URL))
|
||||||
|
|
||||||
return &protobufs.Empty{}, err
|
return &protobufs.Empty{}, err
|
||||||
}
|
}
|
||||||
|
|
||||||
func main() {
|
func main() {
|
||||||
handshake := plugin.HandshakeConfig{
|
var handshake = plugin.HandshakeConfig{
|
||||||
ProtocolVersion: 1,
|
ProtocolVersion: 1,
|
||||||
MagicCookieKey: "CROWDSEC_PLUGIN_KEY",
|
MagicCookieKey: "CROWDSEC_PLUGIN_KEY",
|
||||||
MagicCookieValue: os.Getenv("CROWDSEC_PLUGIN_KEY"),
|
MagicCookieValue: os.Getenv("CROWDSEC_PLUGIN_KEY"),
|
||||||
|
|
1
debian/control
vendored
1
debian/control
vendored
|
@ -8,4 +8,3 @@ Package: crowdsec
|
||||||
Architecture: any
|
Architecture: any
|
||||||
Description: Crowdsec - An open-source, lightweight agent to detect and respond to bad behaviors. It also automatically benefits from our global community-wide IP reputation database
|
Description: Crowdsec - An open-source, lightweight agent to detect and respond to bad behaviors. It also automatically benefits from our global community-wide IP reputation database
|
||||||
Depends: coreutils
|
Depends: coreutils
|
||||||
Suggests: cron
|
|
||||||
|
|
|
@ -6,7 +6,7 @@ CROWDSEC_TEST_VERSION="dev"
|
||||||
# All of the following flavors will be tested when using the "flavor" fixture
|
# All of the following flavors will be tested when using the "flavor" fixture
|
||||||
CROWDSEC_TEST_FLAVORS="full"
|
CROWDSEC_TEST_FLAVORS="full"
|
||||||
# CROWDSEC_TEST_FLAVORS="full,slim,debian"
|
# CROWDSEC_TEST_FLAVORS="full,slim,debian"
|
||||||
# CROWDSEC_TEST_FLAVORS="full,slim,debian,debian-slim"
|
# CROWDSEC_TEST_FLAVORS="full,slim,debian,geoip,plugins-debian-slim,debian-geoip,debian-plugins"
|
||||||
|
|
||||||
# network to use
|
# network to use
|
||||||
CROWDSEC_TEST_NETWORK="net-test"
|
CROWDSEC_TEST_NETWORK="net-test"
|
||||||
|
|
|
@ -42,7 +42,7 @@ def test_flavor_content(crowdsec, flavor):
|
||||||
x = cs.cont.exec_run(
|
x = cs.cont.exec_run(
|
||||||
'ls -1 /usr/local/lib/crowdsec/plugins/')
|
'ls -1 /usr/local/lib/crowdsec/plugins/')
|
||||||
stdout = x.output.decode()
|
stdout = x.output.decode()
|
||||||
if 'slim' in flavor:
|
if 'slim' in flavor or 'geoip' in flavor:
|
||||||
# the exact return code and full message depend
|
# the exact return code and full message depend
|
||||||
# on the 'ls' implementation (busybox vs coreutils)
|
# on the 'ls' implementation (busybox vs coreutils)
|
||||||
assert x.exit_code != 0
|
assert x.exit_code != 0
|
||||||
|
|
|
@ -22,7 +22,8 @@ def test_missing_key_file(crowdsec, flavor):
|
||||||
}
|
}
|
||||||
|
|
||||||
with crowdsec(flavor=flavor, environment=env, wait_status=Status.EXITED) as cs:
|
with crowdsec(flavor=flavor, environment=env, wait_status=Status.EXITED) as cs:
|
||||||
cs.wait_for_log("*local API server stopped with error: missing TLS key file*")
|
# XXX: this message appears twice, is that normal?
|
||||||
|
cs.wait_for_log("*while starting API server: missing TLS key file*")
|
||||||
|
|
||||||
|
|
||||||
def test_missing_cert_file(crowdsec, flavor):
|
def test_missing_cert_file(crowdsec, flavor):
|
||||||
|
@ -34,7 +35,7 @@ def test_missing_cert_file(crowdsec, flavor):
|
||||||
}
|
}
|
||||||
|
|
||||||
with crowdsec(flavor=flavor, environment=env, wait_status=Status.EXITED) as cs:
|
with crowdsec(flavor=flavor, environment=env, wait_status=Status.EXITED) as cs:
|
||||||
cs.wait_for_log("*local API server stopped with error: missing TLS cert file*")
|
cs.wait_for_log("*while starting API server: missing TLS cert file*")
|
||||||
|
|
||||||
|
|
||||||
def test_tls_missing_ca(crowdsec, flavor, certs_dir):
|
def test_tls_missing_ca(crowdsec, flavor, certs_dir):
|
||||||
|
|
62
go.mod
62
go.mod
|
@ -1,6 +1,6 @@
|
||||||
module github.com/crowdsecurity/crowdsec
|
module github.com/crowdsecurity/crowdsec
|
||||||
|
|
||||||
go 1.22
|
go 1.21
|
||||||
|
|
||||||
// Don't use the toolchain directive to avoid uncontrolled downloads during
|
// Don't use the toolchain directive to avoid uncontrolled downloads during
|
||||||
// a build, especially in sandboxed environments (freebsd, gentoo...).
|
// a build, especially in sandboxed environments (freebsd, gentoo...).
|
||||||
|
@ -24,15 +24,15 @@ require (
|
||||||
github.com/buger/jsonparser v1.1.1
|
github.com/buger/jsonparser v1.1.1
|
||||||
github.com/c-robinson/iplib v1.0.3
|
github.com/c-robinson/iplib v1.0.3
|
||||||
github.com/cespare/xxhash/v2 v2.2.0
|
github.com/cespare/xxhash/v2 v2.2.0
|
||||||
github.com/corazawaf/libinjection-go v0.1.2
|
|
||||||
github.com/crowdsecurity/coraza/v3 v3.0.0-20240108124027-a62b8d8e5607
|
github.com/crowdsecurity/coraza/v3 v3.0.0-20240108124027-a62b8d8e5607
|
||||||
github.com/crowdsecurity/dlog v0.0.0-20170105205344-4fb5f8204f26
|
github.com/crowdsecurity/dlog v0.0.0-20170105205344-4fb5f8204f26
|
||||||
github.com/crowdsecurity/go-cs-lib v0.0.10
|
github.com/crowdsecurity/go-cs-lib v0.0.6
|
||||||
github.com/crowdsecurity/grokky v0.2.1
|
github.com/crowdsecurity/grokky v0.2.1
|
||||||
github.com/crowdsecurity/machineid v1.0.2
|
github.com/crowdsecurity/machineid v1.0.2
|
||||||
github.com/davecgh/go-spew v1.1.1
|
github.com/davecgh/go-spew v1.1.1
|
||||||
|
github.com/deepmap/oapi-codegen v1.16.2
|
||||||
github.com/dghubble/sling v1.3.0
|
github.com/dghubble/sling v1.3.0
|
||||||
github.com/docker/docker v24.0.9+incompatible
|
github.com/docker/docker v24.0.7+incompatible
|
||||||
github.com/docker/go-connections v0.4.0
|
github.com/docker/go-connections v0.4.0
|
||||||
github.com/fatih/color v1.15.0
|
github.com/fatih/color v1.15.0
|
||||||
github.com/fsnotify/fsnotify v1.6.0
|
github.com/fsnotify/fsnotify v1.6.0
|
||||||
|
@ -47,7 +47,7 @@ require (
|
||||||
github.com/gofrs/uuid v4.0.0+incompatible
|
github.com/gofrs/uuid v4.0.0+incompatible
|
||||||
github.com/golang-jwt/jwt/v4 v4.5.0
|
github.com/golang-jwt/jwt/v4 v4.5.0
|
||||||
github.com/google/go-querystring v1.0.0
|
github.com/google/go-querystring v1.0.0
|
||||||
github.com/google/uuid v1.3.0
|
github.com/google/uuid v1.5.0
|
||||||
github.com/google/winops v0.0.0-20230712152054-af9b550d0601
|
github.com/google/winops v0.0.0-20230712152054-af9b550d0601
|
||||||
github.com/goombaio/namegenerator v0.0.0-20181006234301-989e774b106e
|
github.com/goombaio/namegenerator v0.0.0-20181006234301-989e774b106e
|
||||||
github.com/gorilla/websocket v1.5.0
|
github.com/gorilla/websocket v1.5.0
|
||||||
|
@ -56,16 +56,17 @@ require (
|
||||||
github.com/hashicorp/go-version v1.2.1
|
github.com/hashicorp/go-version v1.2.1
|
||||||
github.com/hexops/gotextdiff v1.0.3
|
github.com/hexops/gotextdiff v1.0.3
|
||||||
github.com/ivanpirog/coloredcobra v1.0.1
|
github.com/ivanpirog/coloredcobra v1.0.1
|
||||||
github.com/jackc/pgx/v4 v4.18.2
|
github.com/jackc/pgx/v4 v4.14.1
|
||||||
github.com/jarcoal/httpmock v1.1.0
|
github.com/jarcoal/httpmock v1.1.0
|
||||||
github.com/jszwec/csvutil v1.5.1
|
github.com/jszwec/csvutil v1.5.1
|
||||||
github.com/lithammer/dedent v1.1.0
|
github.com/lithammer/dedent v1.1.0
|
||||||
github.com/mattn/go-isatty v0.0.19
|
github.com/mattn/go-isatty v0.0.20
|
||||||
github.com/mattn/go-sqlite3 v1.14.16
|
github.com/mattn/go-sqlite3 v1.14.16
|
||||||
github.com/mohae/deepcopy v0.0.0-20170929034955-c48cc78d4826
|
github.com/mohae/deepcopy v0.0.0-20170929034955-c48cc78d4826
|
||||||
github.com/nxadm/tail v1.4.8
|
github.com/nxadm/tail v1.4.8
|
||||||
github.com/oschwald/geoip2-golang v1.9.0
|
github.com/oapi-codegen/runtime v1.1.1
|
||||||
github.com/oschwald/maxminddb-golang v1.12.0
|
github.com/oschwald/geoip2-golang v1.4.0
|
||||||
|
github.com/oschwald/maxminddb-golang v1.8.0
|
||||||
github.com/pbnjay/memory v0.0.0-20210728143218-7b4eea64cf58
|
github.com/pbnjay/memory v0.0.0-20210728143218-7b4eea64cf58
|
||||||
github.com/pkg/errors v0.9.1
|
github.com/pkg/errors v0.9.1
|
||||||
github.com/prometheus/client_golang v1.16.0
|
github.com/prometheus/client_golang v1.16.0
|
||||||
|
@ -82,12 +83,12 @@ require (
|
||||||
github.com/umahmood/haversine v0.0.0-20151105152445-808ab04add26
|
github.com/umahmood/haversine v0.0.0-20151105152445-808ab04add26
|
||||||
github.com/wasilibs/go-re2 v1.3.0
|
github.com/wasilibs/go-re2 v1.3.0
|
||||||
github.com/xhit/go-simple-mail/v2 v2.16.0
|
github.com/xhit/go-simple-mail/v2 v2.16.0
|
||||||
golang.org/x/crypto v0.22.0
|
golang.org/x/crypto v0.17.0
|
||||||
golang.org/x/mod v0.11.0
|
golang.org/x/mod v0.12.0
|
||||||
golang.org/x/sys v0.19.0
|
golang.org/x/sys v0.15.0
|
||||||
golang.org/x/text v0.14.0
|
golang.org/x/text v0.14.0
|
||||||
google.golang.org/grpc v1.56.3
|
google.golang.org/grpc v1.56.3
|
||||||
google.golang.org/protobuf v1.33.0
|
google.golang.org/protobuf v1.31.0
|
||||||
gopkg.in/natefinch/lumberjack.v2 v2.2.1
|
gopkg.in/natefinch/lumberjack.v2 v2.2.1
|
||||||
gopkg.in/tomb.v2 v2.0.0-20161208151619-d5d1b5820637
|
gopkg.in/tomb.v2 v2.0.0-20161208151619-d5d1b5820637
|
||||||
gopkg.in/yaml.v2 v2.4.0
|
gopkg.in/yaml.v2 v2.4.0
|
||||||
|
@ -100,11 +101,14 @@ require (
|
||||||
github.com/Masterminds/goutils v1.1.1 // indirect
|
github.com/Masterminds/goutils v1.1.1 // indirect
|
||||||
github.com/Microsoft/go-winio v0.6.1 // indirect
|
github.com/Microsoft/go-winio v0.6.1 // indirect
|
||||||
github.com/ahmetalpbalkan/dlog v0.0.0-20170105205344-4fb5f8204f26 // indirect
|
github.com/ahmetalpbalkan/dlog v0.0.0-20170105205344-4fb5f8204f26 // indirect
|
||||||
|
github.com/apapsch/go-jsonmerge/v2 v2.0.0 // indirect
|
||||||
github.com/apparentlymart/go-textseg/v13 v13.0.0 // indirect
|
github.com/apparentlymart/go-textseg/v13 v13.0.0 // indirect
|
||||||
github.com/asaskevich/govalidator v0.0.0-20200907205600-7a23bdc65eef // indirect
|
github.com/asaskevich/govalidator v0.0.0-20200907205600-7a23bdc65eef // indirect
|
||||||
github.com/beorn7/perks v1.0.1 // indirect
|
github.com/beorn7/perks v1.0.1 // indirect
|
||||||
github.com/bytedance/sonic v1.9.1 // indirect
|
github.com/bytedance/sonic v1.10.0-rc3 // indirect
|
||||||
github.com/chenzhuoyu/base64x v0.0.0-20221115062448-fe3a3abad311 // indirect
|
github.com/chenzhuoyu/base64x v0.0.0-20230717121745-296ad89f973d // indirect
|
||||||
|
github.com/chenzhuoyu/iasm v0.9.0 // indirect
|
||||||
|
github.com/corazawaf/libinjection-go v0.1.2 // indirect
|
||||||
github.com/coreos/go-systemd/v22 v22.5.0 // indirect
|
github.com/coreos/go-systemd/v22 v22.5.0 // indirect
|
||||||
github.com/cpuguy83/go-md2man/v2 v2.0.3 // indirect
|
github.com/cpuguy83/go-md2man/v2 v2.0.3 // indirect
|
||||||
github.com/creack/pty v1.1.18 // indirect
|
github.com/creack/pty v1.1.18 // indirect
|
||||||
|
@ -123,7 +127,7 @@ require (
|
||||||
github.com/go-openapi/spec v0.20.0 // indirect
|
github.com/go-openapi/spec v0.20.0 // indirect
|
||||||
github.com/go-playground/locales v0.14.1 // indirect
|
github.com/go-playground/locales v0.14.1 // indirect
|
||||||
github.com/go-playground/universal-translator v0.18.1 // indirect
|
github.com/go-playground/universal-translator v0.18.1 // indirect
|
||||||
github.com/go-playground/validator/v10 v10.14.0 // indirect
|
github.com/go-playground/validator/v10 v10.14.1 // indirect
|
||||||
github.com/go-stack/stack v1.8.0 // indirect
|
github.com/go-stack/stack v1.8.0 // indirect
|
||||||
github.com/goccy/go-json v0.10.2 // indirect
|
github.com/goccy/go-json v0.10.2 // indirect
|
||||||
github.com/gogo/protobuf v1.3.2 // indirect
|
github.com/gogo/protobuf v1.3.2 // indirect
|
||||||
|
@ -137,24 +141,24 @@ require (
|
||||||
github.com/imdario/mergo v0.3.12 // indirect
|
github.com/imdario/mergo v0.3.12 // indirect
|
||||||
github.com/inconshreveable/mousetrap v1.1.0 // indirect
|
github.com/inconshreveable/mousetrap v1.1.0 // indirect
|
||||||
github.com/jackc/chunkreader/v2 v2.0.1 // indirect
|
github.com/jackc/chunkreader/v2 v2.0.1 // indirect
|
||||||
github.com/jackc/pgconn v1.14.3 // indirect
|
github.com/jackc/pgconn v1.10.1 // indirect
|
||||||
github.com/jackc/pgio v1.0.0 // indirect
|
github.com/jackc/pgio v1.0.0 // indirect
|
||||||
github.com/jackc/pgpassfile v1.0.0 // indirect
|
github.com/jackc/pgpassfile v1.0.0 // indirect
|
||||||
github.com/jackc/pgproto3/v2 v2.3.3 // indirect
|
github.com/jackc/pgproto3/v2 v2.2.0 // indirect
|
||||||
github.com/jackc/pgservicefile v0.0.0-20221227161230-091c0ba34f0a // indirect
|
github.com/jackc/pgservicefile v0.0.0-20200714003250-2b9c44734f2b // indirect
|
||||||
github.com/jackc/pgtype v1.14.0 // indirect
|
github.com/jackc/pgtype v1.9.1 // indirect
|
||||||
github.com/jmespath/go-jmespath v0.4.0 // indirect
|
github.com/jmespath/go-jmespath v0.4.0 // indirect
|
||||||
github.com/josharian/intern v1.0.0 // indirect
|
github.com/josharian/intern v1.0.0 // indirect
|
||||||
github.com/json-iterator/go v1.1.12 // indirect
|
github.com/json-iterator/go v1.1.12 // indirect
|
||||||
github.com/kballard/go-shellquote v0.0.0-20180428030007-95032a82bc51 // indirect
|
github.com/kballard/go-shellquote v0.0.0-20180428030007-95032a82bc51 // indirect
|
||||||
github.com/klauspost/compress v1.17.3 // indirect
|
github.com/klauspost/compress v1.17.3 // indirect
|
||||||
github.com/klauspost/cpuid/v2 v2.2.4 // indirect
|
github.com/klauspost/cpuid/v2 v2.2.5 // indirect
|
||||||
github.com/leodido/go-urn v1.2.4 // indirect
|
github.com/leodido/go-urn v1.2.4 // indirect
|
||||||
github.com/lufia/plan9stats v0.0.0-20211012122336-39d0f177ccd0 // indirect
|
github.com/lufia/plan9stats v0.0.0-20211012122336-39d0f177ccd0 // indirect
|
||||||
github.com/magefile/mage v1.15.0 // indirect
|
github.com/magefile/mage v1.15.0 // indirect
|
||||||
github.com/mailru/easyjson v0.7.7 // indirect
|
github.com/mailru/easyjson v0.7.7 // indirect
|
||||||
github.com/mattn/go-colorable v0.1.13 // indirect
|
github.com/mattn/go-colorable v0.1.13 // indirect
|
||||||
github.com/mattn/go-runewidth v0.0.13 // indirect
|
github.com/mattn/go-runewidth v0.0.15 // indirect
|
||||||
github.com/matttproud/golang_protobuf_extensions v1.0.4 // indirect
|
github.com/matttproud/golang_protobuf_extensions v1.0.4 // indirect
|
||||||
github.com/mgutz/ansi v0.0.0-20200706080929-d51e80ef957d // indirect
|
github.com/mgutz/ansi v0.0.0-20200706080929-d51e80ef957d // indirect
|
||||||
github.com/mitchellh/copystructure v1.2.0 // indirect
|
github.com/mitchellh/copystructure v1.2.0 // indirect
|
||||||
|
@ -169,7 +173,7 @@ require (
|
||||||
github.com/oklog/run v1.0.0 // indirect
|
github.com/oklog/run v1.0.0 // indirect
|
||||||
github.com/opencontainers/go-digest v1.0.0 // indirect
|
github.com/opencontainers/go-digest v1.0.0 // indirect
|
||||||
github.com/opencontainers/image-spec v1.0.3-0.20211202183452-c5a74bcca799 // indirect
|
github.com/opencontainers/image-spec v1.0.3-0.20211202183452-c5a74bcca799 // indirect
|
||||||
github.com/pelletier/go-toml/v2 v2.0.8 // indirect
|
github.com/pelletier/go-toml/v2 v2.0.9 // indirect
|
||||||
github.com/petar-dambovaliev/aho-corasick v0.0.0-20230725210150-fb29fc3c913e // indirect
|
github.com/petar-dambovaliev/aho-corasick v0.0.0-20230725210150-fb29fc3c913e // indirect
|
||||||
github.com/pierrec/lz4/v4 v4.1.18 // indirect
|
github.com/pierrec/lz4/v4 v4.1.18 // indirect
|
||||||
github.com/pmezard/go-difflib v1.0.0 // indirect
|
github.com/pmezard/go-difflib v1.0.0 // indirect
|
||||||
|
@ -197,13 +201,13 @@ require (
|
||||||
github.com/yusufpapurcu/wmi v1.2.3 // indirect
|
github.com/yusufpapurcu/wmi v1.2.3 // indirect
|
||||||
github.com/zclconf/go-cty v1.8.0 // indirect
|
github.com/zclconf/go-cty v1.8.0 // indirect
|
||||||
go.mongodb.org/mongo-driver v1.9.4 // indirect
|
go.mongodb.org/mongo-driver v1.9.4 // indirect
|
||||||
golang.org/x/arch v0.3.0 // indirect
|
golang.org/x/arch v0.4.0 // indirect
|
||||||
golang.org/x/net v0.24.0 // indirect
|
golang.org/x/net v0.19.0 // indirect
|
||||||
golang.org/x/sync v0.6.0 // indirect
|
golang.org/x/sync v0.6.0 // indirect
|
||||||
golang.org/x/term v0.19.0 // indirect
|
golang.org/x/term v0.15.0 // indirect
|
||||||
golang.org/x/time v0.3.0 // indirect
|
golang.org/x/time v0.5.0 // indirect
|
||||||
golang.org/x/tools v0.8.1-0.20230428195545-5283a0178901 // indirect
|
golang.org/x/tools v0.12.0 // indirect
|
||||||
golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1 // indirect
|
golang.org/x/xerrors v0.0.0-20220411194840-2f41105eb62f // indirect
|
||||||
google.golang.org/appengine v1.6.7 // indirect
|
google.golang.org/appengine v1.6.7 // indirect
|
||||||
google.golang.org/genproto/googleapis/rpc v0.0.0-20230525234030-28d5490b6b19 // indirect
|
google.golang.org/genproto/googleapis/rpc v0.0.0-20230525234030-28d5490b6b19 // indirect
|
||||||
gopkg.in/inf.v0 v0.9.1 // indirect
|
gopkg.in/inf.v0 v0.9.1 // indirect
|
||||||
|
|
124
go.sum
124
go.sum
|
@ -26,6 +26,7 @@ github.com/Netflix/go-expect v0.0.0-20220104043353-73e0943537d2/go.mod h1:HBCaDe
|
||||||
github.com/PuerkitoBio/purell v1.1.0/go.mod h1:c11w/QuzBsJSee3cPx9rAFu61PvFxuPbtSwDGJws/X0=
|
github.com/PuerkitoBio/purell v1.1.0/go.mod h1:c11w/QuzBsJSee3cPx9rAFu61PvFxuPbtSwDGJws/X0=
|
||||||
github.com/PuerkitoBio/purell v1.1.1/go.mod h1:c11w/QuzBsJSee3cPx9rAFu61PvFxuPbtSwDGJws/X0=
|
github.com/PuerkitoBio/purell v1.1.1/go.mod h1:c11w/QuzBsJSee3cPx9rAFu61PvFxuPbtSwDGJws/X0=
|
||||||
github.com/PuerkitoBio/urlesc v0.0.0-20170810143723-de5bf2ad4578/go.mod h1:uGdkoq3SwY9Y+13GIhn11/XLaGBb4BfwItxLd5jeuXE=
|
github.com/PuerkitoBio/urlesc v0.0.0-20170810143723-de5bf2ad4578/go.mod h1:uGdkoq3SwY9Y+13GIhn11/XLaGBb4BfwItxLd5jeuXE=
|
||||||
|
github.com/RaveNoX/go-jsoncommentstrip v1.0.0/go.mod h1:78ihd09MekBnJnxpICcwzCMzGrKSKYe4AqU6PDYYpjk=
|
||||||
github.com/agext/levenshtein v1.2.1 h1:QmvMAjj2aEICytGiWzmxoE0x2KZvE0fvmqMOfy2tjT8=
|
github.com/agext/levenshtein v1.2.1 h1:QmvMAjj2aEICytGiWzmxoE0x2KZvE0fvmqMOfy2tjT8=
|
||||||
github.com/agext/levenshtein v1.2.1/go.mod h1:JEDfjyjHDjOF/1e4FlBE/PkbqA9OfWu2ki2W0IB5558=
|
github.com/agext/levenshtein v1.2.1/go.mod h1:JEDfjyjHDjOF/1e4FlBE/PkbqA9OfWu2ki2W0IB5558=
|
||||||
github.com/agnivade/levenshtein v1.0.1/go.mod h1:CURSv5d9Uaml+FovSIICkLbAUZ9S4RqaHDIsdSBg7lM=
|
github.com/agnivade/levenshtein v1.0.1/go.mod h1:CURSv5d9Uaml+FovSIICkLbAUZ9S4RqaHDIsdSBg7lM=
|
||||||
|
@ -41,6 +42,8 @@ github.com/alexliesenfeld/health v0.8.0/go.mod h1:TfNP0f+9WQVWMQRzvMUjlws4ceXKEL
|
||||||
github.com/andreyvit/diff v0.0.0-20170406064948-c7f18ee00883/go.mod h1:rCTlJbsFo29Kk6CurOXKm700vrz8f0KW0JNfpkRJY/8=
|
github.com/andreyvit/diff v0.0.0-20170406064948-c7f18ee00883/go.mod h1:rCTlJbsFo29Kk6CurOXKm700vrz8f0KW0JNfpkRJY/8=
|
||||||
github.com/antonmedv/expr v1.15.3 h1:q3hOJZNvLvhqE8OHBs1cFRdbXFNKuA+bHmRaI+AmRmI=
|
github.com/antonmedv/expr v1.15.3 h1:q3hOJZNvLvhqE8OHBs1cFRdbXFNKuA+bHmRaI+AmRmI=
|
||||||
github.com/antonmedv/expr v1.15.3/go.mod h1:0E/6TxnOlRNp81GMzX9QfDPAmHo2Phg00y4JUv1ihsE=
|
github.com/antonmedv/expr v1.15.3/go.mod h1:0E/6TxnOlRNp81GMzX9QfDPAmHo2Phg00y4JUv1ihsE=
|
||||||
|
github.com/apapsch/go-jsonmerge/v2 v2.0.0 h1:axGnT1gRIfimI7gJifB699GoE/oq+F2MU7Dml6nw9rQ=
|
||||||
|
github.com/apapsch/go-jsonmerge/v2 v2.0.0/go.mod h1:lvDnEdqiQrp0O42VQGgmlKpxL1AP2+08jFMw88y4klk=
|
||||||
github.com/apparentlymart/go-textseg/v13 v13.0.0 h1:Y+KvPE1NYz0xl601PVImeQfFyEy6iT90AvPUL1NNfNw=
|
github.com/apparentlymart/go-textseg/v13 v13.0.0 h1:Y+KvPE1NYz0xl601PVImeQfFyEy6iT90AvPUL1NNfNw=
|
||||||
github.com/apparentlymart/go-textseg/v13 v13.0.0/go.mod h1:ZK2fH7c4NqDTLtiYLvIkEghdlcqw7yxLeM89kiTRPUo=
|
github.com/apparentlymart/go-textseg/v13 v13.0.0/go.mod h1:ZK2fH7c4NqDTLtiYLvIkEghdlcqw7yxLeM89kiTRPUo=
|
||||||
github.com/appleboy/gin-jwt/v2 v2.8.0 h1:Glo7cb9eBR+hj8Y7WzgfkOlqCaNLjP+RV4dNO3fpdps=
|
github.com/appleboy/gin-jwt/v2 v2.8.0 h1:Glo7cb9eBR+hj8Y7WzgfkOlqCaNLjP+RV4dNO3fpdps=
|
||||||
|
@ -70,18 +73,23 @@ github.com/blackfireio/osinfo v1.0.3 h1:Yk2t2GTPjBcESv6nDSWZKO87bGMQgO+Hi9OoXPpx
|
||||||
github.com/blackfireio/osinfo v1.0.3/go.mod h1:Pd987poVNmd5Wsx6PRPw4+w7kLlf9iJxoRKPtPAjOrA=
|
github.com/blackfireio/osinfo v1.0.3/go.mod h1:Pd987poVNmd5Wsx6PRPw4+w7kLlf9iJxoRKPtPAjOrA=
|
||||||
github.com/bluele/gcache v0.0.2 h1:WcbfdXICg7G/DGBh1PFfcirkWOQV+v077yF1pSy3DGw=
|
github.com/bluele/gcache v0.0.2 h1:WcbfdXICg7G/DGBh1PFfcirkWOQV+v077yF1pSy3DGw=
|
||||||
github.com/bluele/gcache v0.0.2/go.mod h1:m15KV+ECjptwSPxKhOhQoAFQVtUFjTVkc3H8o0t/fp0=
|
github.com/bluele/gcache v0.0.2/go.mod h1:m15KV+ECjptwSPxKhOhQoAFQVtUFjTVkc3H8o0t/fp0=
|
||||||
|
github.com/bmatcuk/doublestar v1.1.1/go.mod h1:UD6OnuiIn0yFxxA2le/rnRU1G4RaI4UvFv1sNto9p6w=
|
||||||
github.com/buger/jsonparser v1.1.1 h1:2PnMjfWD7wBILjqQbt530v576A/cAbQvEW9gGIpYMUs=
|
github.com/buger/jsonparser v1.1.1 h1:2PnMjfWD7wBILjqQbt530v576A/cAbQvEW9gGIpYMUs=
|
||||||
github.com/buger/jsonparser v1.1.1/go.mod h1:6RYKKt7H4d4+iWqouImQ9R2FZql3VbhNgx27UK13J/0=
|
github.com/buger/jsonparser v1.1.1/go.mod h1:6RYKKt7H4d4+iWqouImQ9R2FZql3VbhNgx27UK13J/0=
|
||||||
github.com/bytedance/sonic v1.5.0/go.mod h1:ED5hyg4y6t3/9Ku1R6dU/4KyJ48DZ4jPhfY1O2AihPM=
|
github.com/bytedance/sonic v1.5.0/go.mod h1:ED5hyg4y6t3/9Ku1R6dU/4KyJ48DZ4jPhfY1O2AihPM=
|
||||||
github.com/bytedance/sonic v1.9.1 h1:6iJ6NqdoxCDr6mbY8h18oSO+cShGSMRGCEo7F2h0x8s=
|
github.com/bytedance/sonic v1.10.0-rc/go.mod h1:ElCzW+ufi8qKqNW0FY314xriJhyJhuoJ3gFZdAHF7NM=
|
||||||
github.com/bytedance/sonic v1.9.1/go.mod h1:i736AoUSYt75HyZLoJW9ERYxcy6eaN6h4BZXU064P/U=
|
github.com/bytedance/sonic v1.10.0-rc3 h1:uNSnscRapXTwUgTyOF0GVljYD08p9X/Lbr9MweSV3V0=
|
||||||
|
github.com/bytedance/sonic v1.10.0-rc3/go.mod h1:iZcSUejdk5aukTND/Eu/ivjQuEL0Cu9/rf50Hi0u/g4=
|
||||||
github.com/c-robinson/iplib v1.0.3 h1:NG0UF0GoEsrC1/vyfX1Lx2Ss7CySWl3KqqXh3q4DdPU=
|
github.com/c-robinson/iplib v1.0.3 h1:NG0UF0GoEsrC1/vyfX1Lx2Ss7CySWl3KqqXh3q4DdPU=
|
||||||
github.com/c-robinson/iplib v1.0.3/go.mod h1:i3LuuFL1hRT5gFpBRnEydzw8R6yhGkF4szNDIbF8pgo=
|
github.com/c-robinson/iplib v1.0.3/go.mod h1:i3LuuFL1hRT5gFpBRnEydzw8R6yhGkF4szNDIbF8pgo=
|
||||||
github.com/cespare/xxhash/v2 v2.2.0 h1:DC2CZ1Ep5Y4k3ZQ899DldepgrayRUGE6BBZ/cd9Cj44=
|
github.com/cespare/xxhash/v2 v2.2.0 h1:DC2CZ1Ep5Y4k3ZQ899DldepgrayRUGE6BBZ/cd9Cj44=
|
||||||
github.com/cespare/xxhash/v2 v2.2.0/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs=
|
github.com/cespare/xxhash/v2 v2.2.0/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs=
|
||||||
github.com/chenzhuoyu/base64x v0.0.0-20211019084208-fb5309c8db06/go.mod h1:DH46F32mSOjUmXrMHnKwZdA8wcEefY7UVqBKYGjpdQY=
|
github.com/chenzhuoyu/base64x v0.0.0-20211019084208-fb5309c8db06/go.mod h1:DH46F32mSOjUmXrMHnKwZdA8wcEefY7UVqBKYGjpdQY=
|
||||||
github.com/chenzhuoyu/base64x v0.0.0-20221115062448-fe3a3abad311 h1:qSGYFH7+jGhDF8vLC+iwCD4WpbV1EBDSzWkJODFLams=
|
|
||||||
github.com/chenzhuoyu/base64x v0.0.0-20221115062448-fe3a3abad311/go.mod h1:b583jCggY9gE99b6G5LEC39OIiVsWj+R97kbl5odCEk=
|
github.com/chenzhuoyu/base64x v0.0.0-20221115062448-fe3a3abad311/go.mod h1:b583jCggY9gE99b6G5LEC39OIiVsWj+R97kbl5odCEk=
|
||||||
|
github.com/chenzhuoyu/base64x v0.0.0-20230717121745-296ad89f973d h1:77cEq6EriyTZ0g/qfRdp61a3Uu/AWrgIq2s0ClJV1g0=
|
||||||
|
github.com/chenzhuoyu/base64x v0.0.0-20230717121745-296ad89f973d/go.mod h1:8EPpVsBuRksnlj1mLy4AWzRNQYxauNi62uWcE3to6eA=
|
||||||
|
github.com/chenzhuoyu/iasm v0.9.0 h1:9fhXjVzq5hUy2gkhhgHl95zG2cEAhw9OSGs8toWWAwo=
|
||||||
|
github.com/chenzhuoyu/iasm v0.9.0/go.mod h1:Xjy2NpN3h7aUqeqM+woSuuvxmIe6+DDsiNLIrkAmYog=
|
||||||
github.com/cockroachdb/apd v1.1.0 h1:3LFP3629v+1aKXU5Q37mxmRxX/pIu1nijXydLShEq5I=
|
github.com/cockroachdb/apd v1.1.0 h1:3LFP3629v+1aKXU5Q37mxmRxX/pIu1nijXydLShEq5I=
|
||||||
github.com/cockroachdb/apd v1.1.0/go.mod h1:8Sl8LxpKi29FqWXR16WEFZRNSz3SoPzUzeMeY4+DwBQ=
|
github.com/cockroachdb/apd v1.1.0/go.mod h1:8Sl8LxpKi29FqWXR16WEFZRNSz3SoPzUzeMeY4+DwBQ=
|
||||||
github.com/corazawaf/libinjection-go v0.1.2 h1:oeiV9pc5rvJ+2oqOqXEAMJousPpGiup6f7Y3nZj5GoM=
|
github.com/corazawaf/libinjection-go v0.1.2 h1:oeiV9pc5rvJ+2oqOqXEAMJousPpGiup6f7Y3nZj5GoM=
|
||||||
|
@ -102,8 +110,8 @@ github.com/crowdsecurity/coraza/v3 v3.0.0-20240108124027-a62b8d8e5607 h1:hyrYw3h
|
||||||
github.com/crowdsecurity/coraza/v3 v3.0.0-20240108124027-a62b8d8e5607/go.mod h1:br36fEqurGYZQGit+iDYsIzW0FF6VufMbDzyyLxEuPA=
|
github.com/crowdsecurity/coraza/v3 v3.0.0-20240108124027-a62b8d8e5607/go.mod h1:br36fEqurGYZQGit+iDYsIzW0FF6VufMbDzyyLxEuPA=
|
||||||
github.com/crowdsecurity/dlog v0.0.0-20170105205344-4fb5f8204f26 h1:r97WNVC30Uen+7WnLs4xDScS/Ex988+id2k6mDf8psU=
|
github.com/crowdsecurity/dlog v0.0.0-20170105205344-4fb5f8204f26 h1:r97WNVC30Uen+7WnLs4xDScS/Ex988+id2k6mDf8psU=
|
||||||
github.com/crowdsecurity/dlog v0.0.0-20170105205344-4fb5f8204f26/go.mod h1:zpv7r+7KXwgVUZnUNjyP22zc/D7LKjyoY02weH2RBbk=
|
github.com/crowdsecurity/dlog v0.0.0-20170105205344-4fb5f8204f26/go.mod h1:zpv7r+7KXwgVUZnUNjyP22zc/D7LKjyoY02weH2RBbk=
|
||||||
github.com/crowdsecurity/go-cs-lib v0.0.10 h1:Twt/y/rYCUspGY1zxDnGurL2svRSREAz+2+puLepd9c=
|
github.com/crowdsecurity/go-cs-lib v0.0.6 h1:Ef6MylXe0GaJE9vrfvxEdbHb31+JUP1os+murPz7Pos=
|
||||||
github.com/crowdsecurity/go-cs-lib v0.0.10/go.mod h1:8FMKNGsh3hMZi2SEv6P15PURhEJnZV431XjzzBSuf0k=
|
github.com/crowdsecurity/go-cs-lib v0.0.6/go.mod h1:8FMKNGsh3hMZi2SEv6P15PURhEJnZV431XjzzBSuf0k=
|
||||||
github.com/crowdsecurity/grokky v0.2.1 h1:t4VYnDlAd0RjDM2SlILalbwfCrQxtJSMGdQOR0zwkE4=
|
github.com/crowdsecurity/grokky v0.2.1 h1:t4VYnDlAd0RjDM2SlILalbwfCrQxtJSMGdQOR0zwkE4=
|
||||||
github.com/crowdsecurity/grokky v0.2.1/go.mod h1:33usDIYzGDsgX1kHAThCbseso6JuWNJXOzRQDGXHtWM=
|
github.com/crowdsecurity/grokky v0.2.1/go.mod h1:33usDIYzGDsgX1kHAThCbseso6JuWNJXOzRQDGXHtWM=
|
||||||
github.com/crowdsecurity/machineid v1.0.2 h1:wpkpsUghJF8Khtmn/tg6GxgdhLA1Xflerh5lirI+bdc=
|
github.com/crowdsecurity/machineid v1.0.2 h1:wpkpsUghJF8Khtmn/tg6GxgdhLA1Xflerh5lirI+bdc=
|
||||||
|
@ -112,12 +120,14 @@ github.com/davecgh/go-spew v0.0.0-20161028175848-04cdfd42973b/go.mod h1:J7Y8YcW2
|
||||||
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||||
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
|
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
|
||||||
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||||
|
github.com/deepmap/oapi-codegen v1.16.2 h1:xGHx0dNqYfy9gE8a7AVgVM8Sd5oF9SEgePzP+UPAUXI=
|
||||||
|
github.com/deepmap/oapi-codegen v1.16.2/go.mod h1:rdYoEA2GE+riuZ91DvpmBX9hJbQpuY9wchXpfQ3n+ho=
|
||||||
github.com/dghubble/sling v1.3.0 h1:pZHjCJq4zJvc6qVQ5wN1jo5oNZlNE0+8T/h0XeXBUKU=
|
github.com/dghubble/sling v1.3.0 h1:pZHjCJq4zJvc6qVQ5wN1jo5oNZlNE0+8T/h0XeXBUKU=
|
||||||
github.com/dghubble/sling v1.3.0/go.mod h1:XXShWaBWKzNLhu2OxikSNFrlsvowtz4kyRuXUG7oQKY=
|
github.com/dghubble/sling v1.3.0/go.mod h1:XXShWaBWKzNLhu2OxikSNFrlsvowtz4kyRuXUG7oQKY=
|
||||||
github.com/docker/distribution v2.8.2+incompatible h1:T3de5rq0dB1j30rp0sA2rER+m322EBzniBPB6ZIzuh8=
|
github.com/docker/distribution v2.8.2+incompatible h1:T3de5rq0dB1j30rp0sA2rER+m322EBzniBPB6ZIzuh8=
|
||||||
github.com/docker/distribution v2.8.2+incompatible/go.mod h1:J2gT2udsDAN96Uj4KfcMRqY0/ypR+oyYUYmja8H+y+w=
|
github.com/docker/distribution v2.8.2+incompatible/go.mod h1:J2gT2udsDAN96Uj4KfcMRqY0/ypR+oyYUYmja8H+y+w=
|
||||||
github.com/docker/docker v24.0.9+incompatible h1:HPGzNmwfLZWdxHqK9/II92pyi1EpYKsAqcl4G0Of9v0=
|
github.com/docker/docker v24.0.7+incompatible h1:Wo6l37AuwP3JaMnZa226lzVXGA3F9Ig1seQen0cKYlM=
|
||||||
github.com/docker/docker v24.0.9+incompatible/go.mod h1:eEKB0N0r5NX/I1kEveEz05bcu8tLC/8azJZsviup8Sk=
|
github.com/docker/docker v24.0.7+incompatible/go.mod h1:eEKB0N0r5NX/I1kEveEz05bcu8tLC/8azJZsviup8Sk=
|
||||||
github.com/docker/go-connections v0.4.0 h1:El9xVISelRB7BuFusrZozjnkIM5YnzCViNKohAFqRJQ=
|
github.com/docker/go-connections v0.4.0 h1:El9xVISelRB7BuFusrZozjnkIM5YnzCViNKohAFqRJQ=
|
||||||
github.com/docker/go-connections v0.4.0/go.mod h1:Gbd7IOopHjR8Iph03tsViu4nIes5XhDvyHbTtUxmeec=
|
github.com/docker/go-connections v0.4.0/go.mod h1:Gbd7IOopHjR8Iph03tsViu4nIes5XhDvyHbTtUxmeec=
|
||||||
github.com/docker/go-units v0.3.3/go.mod h1:fgPhTUdO+D/Jk86RDLlptpiXQzgHJF7gydDDbaIK4Dk=
|
github.com/docker/go-units v0.3.3/go.mod h1:fgPhTUdO+D/Jk86RDLlptpiXQzgHJF7gydDDbaIK4Dk=
|
||||||
|
@ -251,8 +261,8 @@ github.com/go-playground/universal-translator v0.17.0/go.mod h1:UkSxE5sNxxRwHyU+
|
||||||
github.com/go-playground/universal-translator v0.18.1 h1:Bcnm0ZwsGyWbCzImXv+pAJnYK9S473LQFuzCbDbfSFY=
|
github.com/go-playground/universal-translator v0.18.1 h1:Bcnm0ZwsGyWbCzImXv+pAJnYK9S473LQFuzCbDbfSFY=
|
||||||
github.com/go-playground/universal-translator v0.18.1/go.mod h1:xekY+UJKNuX9WP91TpwSH2VMlDf28Uj24BCp08ZFTUY=
|
github.com/go-playground/universal-translator v0.18.1/go.mod h1:xekY+UJKNuX9WP91TpwSH2VMlDf28Uj24BCp08ZFTUY=
|
||||||
github.com/go-playground/validator/v10 v10.4.1/go.mod h1:nlOn6nFhuKACm19sB/8EGNn9GlaMV7XkbRSipzJ0Ii4=
|
github.com/go-playground/validator/v10 v10.4.1/go.mod h1:nlOn6nFhuKACm19sB/8EGNn9GlaMV7XkbRSipzJ0Ii4=
|
||||||
github.com/go-playground/validator/v10 v10.14.0 h1:vgvQWe3XCz3gIeFDm/HnTIbj6UGmg/+t63MyGU2n5js=
|
github.com/go-playground/validator/v10 v10.14.1 h1:9c50NUPC30zyuKprjL3vNZ0m5oG+jU0zvx4AqHGnv4k=
|
||||||
github.com/go-playground/validator/v10 v10.14.0/go.mod h1:9iXMNT7sEkjXb0I+enO7QXmzG6QCsPWY4zveKFVRSyU=
|
github.com/go-playground/validator/v10 v10.14.1/go.mod h1:9iXMNT7sEkjXb0I+enO7QXmzG6QCsPWY4zveKFVRSyU=
|
||||||
github.com/go-sql-driver/mysql v1.5.0/go.mod h1:DCzpHaOWr8IXmIStZouvnhqoel9Qv2LBy8hT2VhHyBg=
|
github.com/go-sql-driver/mysql v1.5.0/go.mod h1:DCzpHaOWr8IXmIStZouvnhqoel9Qv2LBy8hT2VhHyBg=
|
||||||
github.com/go-sql-driver/mysql v1.6.0 h1:BCTh4TKNUYmOmMUcQ3IipzF5prigylS7XXjEkfCHuOE=
|
github.com/go-sql-driver/mysql v1.6.0 h1:BCTh4TKNUYmOmMUcQ3IipzF5prigylS7XXjEkfCHuOE=
|
||||||
github.com/go-sql-driver/mysql v1.6.0/go.mod h1:DCzpHaOWr8IXmIStZouvnhqoel9Qv2LBy8hT2VhHyBg=
|
github.com/go-sql-driver/mysql v1.6.0/go.mod h1:DCzpHaOWr8IXmIStZouvnhqoel9Qv2LBy8hT2VhHyBg=
|
||||||
|
@ -325,8 +335,8 @@ github.com/google/gofuzz v1.2.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/
|
||||||
github.com/google/renameio v0.1.0/go.mod h1:KWCgfxg9yswjAJkECMjeO8J8rahYeXnNhOm40UhjYkI=
|
github.com/google/renameio v0.1.0/go.mod h1:KWCgfxg9yswjAJkECMjeO8J8rahYeXnNhOm40UhjYkI=
|
||||||
github.com/google/uuid v1.0.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
|
github.com/google/uuid v1.0.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
|
||||||
github.com/google/uuid v1.1.1/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
|
github.com/google/uuid v1.1.1/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
|
||||||
github.com/google/uuid v1.3.0 h1:t6JiXgmwXMjEs8VusXIJk2BXHsn+wx8BZdTaoZ5fu7I=
|
github.com/google/uuid v1.5.0 h1:1p67kYwdtXjb0gL0BPiP1Av9wiZPo5A8z2cWkTZ+eyU=
|
||||||
github.com/google/uuid v1.3.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
|
github.com/google/uuid v1.5.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
|
||||||
github.com/google/winops v0.0.0-20230712152054-af9b550d0601 h1:XvlrmqZIuwxuRE88S9mkxX+FkV+YakqbiAC5Z4OzDnM=
|
github.com/google/winops v0.0.0-20230712152054-af9b550d0601 h1:XvlrmqZIuwxuRE88S9mkxX+FkV+YakqbiAC5Z4OzDnM=
|
||||||
github.com/google/winops v0.0.0-20230712152054-af9b550d0601/go.mod h1:rT1mcjzuvcDDbRmUTsoH6kV0DG91AkFe9UCjASraK5I=
|
github.com/google/winops v0.0.0-20230712152054-af9b550d0601/go.mod h1:rT1mcjzuvcDDbRmUTsoH6kV0DG91AkFe9UCjASraK5I=
|
||||||
github.com/goombaio/namegenerator v0.0.0-20181006234301-989e774b106e h1:XmA6L9IPRdUr28a+SK/oMchGgQy159wvzXA5tJ7l+40=
|
github.com/goombaio/namegenerator v0.0.0-20181006234301-989e774b106e h1:XmA6L9IPRdUr28a+SK/oMchGgQy159wvzXA5tJ7l+40=
|
||||||
|
@ -368,8 +378,8 @@ github.com/jackc/pgconn v0.0.0-20190831204454-2fabfa3c18b7/go.mod h1:ZJKsE/KZfsU
|
||||||
github.com/jackc/pgconn v1.8.0/go.mod h1:1C2Pb36bGIP9QHGBYCjnyhqu7Rv3sGshaQUvmfGIB/o=
|
github.com/jackc/pgconn v1.8.0/go.mod h1:1C2Pb36bGIP9QHGBYCjnyhqu7Rv3sGshaQUvmfGIB/o=
|
||||||
github.com/jackc/pgconn v1.9.0/go.mod h1:YctiPyvzfU11JFxoXokUOOKQXQmDMoJL9vJzHH8/2JY=
|
github.com/jackc/pgconn v1.9.0/go.mod h1:YctiPyvzfU11JFxoXokUOOKQXQmDMoJL9vJzHH8/2JY=
|
||||||
github.com/jackc/pgconn v1.9.1-0.20210724152538-d89c8390a530/go.mod h1:4z2w8XhRbP1hYxkpTuBjTS3ne3J48K83+u0zoyvg2pI=
|
github.com/jackc/pgconn v1.9.1-0.20210724152538-d89c8390a530/go.mod h1:4z2w8XhRbP1hYxkpTuBjTS3ne3J48K83+u0zoyvg2pI=
|
||||||
github.com/jackc/pgconn v1.14.3 h1:bVoTr12EGANZz66nZPkMInAV/KHD2TxH9npjXXgiB3w=
|
github.com/jackc/pgconn v1.10.1 h1:DzdIHIjG1AxGwoEEqS+mGsURyjt4enSmqzACXvVzOT8=
|
||||||
github.com/jackc/pgconn v1.14.3/go.mod h1:RZbme4uasqzybK2RK5c65VsHxoyaml09lx3tXOcO/VM=
|
github.com/jackc/pgconn v1.10.1/go.mod h1:4z2w8XhRbP1hYxkpTuBjTS3ne3J48K83+u0zoyvg2pI=
|
||||||
github.com/jackc/pgio v1.0.0 h1:g12B9UwVnzGhueNavwioyEEpAmqMe1E/BN9ES+8ovkE=
|
github.com/jackc/pgio v1.0.0 h1:g12B9UwVnzGhueNavwioyEEpAmqMe1E/BN9ES+8ovkE=
|
||||||
github.com/jackc/pgio v1.0.0/go.mod h1:oP+2QK2wFfUWgr+gxjoBH9KGBb31Eio69xUb0w5bYf8=
|
github.com/jackc/pgio v1.0.0/go.mod h1:oP+2QK2wFfUWgr+gxjoBH9KGBb31Eio69xUb0w5bYf8=
|
||||||
github.com/jackc/pgmock v0.0.0-20190831213851-13a1b77aafa2/go.mod h1:fGZlG77KXmcq05nJLRkk0+p82V8B8Dw8KN2/V9c/OAE=
|
github.com/jackc/pgmock v0.0.0-20190831213851-13a1b77aafa2/go.mod h1:fGZlG77KXmcq05nJLRkk0+p82V8B8Dw8KN2/V9c/OAE=
|
||||||
|
@ -385,26 +395,26 @@ github.com/jackc/pgproto3/v2 v2.0.0-rc3/go.mod h1:ryONWYqW6dqSg1Lw6vXNMXoBJhpzvW
|
||||||
github.com/jackc/pgproto3/v2 v2.0.0-rc3.0.20190831210041-4c03ce451f29/go.mod h1:ryONWYqW6dqSg1Lw6vXNMXoBJhpzvWKnT95C46ckYeM=
|
github.com/jackc/pgproto3/v2 v2.0.0-rc3.0.20190831210041-4c03ce451f29/go.mod h1:ryONWYqW6dqSg1Lw6vXNMXoBJhpzvWKnT95C46ckYeM=
|
||||||
github.com/jackc/pgproto3/v2 v2.0.6/go.mod h1:WfJCnwN3HIg9Ish/j3sgWXnAfK8A9Y0bwXYU5xKaEdA=
|
github.com/jackc/pgproto3/v2 v2.0.6/go.mod h1:WfJCnwN3HIg9Ish/j3sgWXnAfK8A9Y0bwXYU5xKaEdA=
|
||||||
github.com/jackc/pgproto3/v2 v2.1.1/go.mod h1:WfJCnwN3HIg9Ish/j3sgWXnAfK8A9Y0bwXYU5xKaEdA=
|
github.com/jackc/pgproto3/v2 v2.1.1/go.mod h1:WfJCnwN3HIg9Ish/j3sgWXnAfK8A9Y0bwXYU5xKaEdA=
|
||||||
github.com/jackc/pgproto3/v2 v2.3.3 h1:1HLSx5H+tXR9pW3in3zaztoEwQYRC9SQaYUHjTSUOag=
|
github.com/jackc/pgproto3/v2 v2.2.0 h1:r7JypeP2D3onoQTCxWdTpCtJ4D+qpKr0TxvoyMhZ5ns=
|
||||||
github.com/jackc/pgproto3/v2 v2.3.3/go.mod h1:WfJCnwN3HIg9Ish/j3sgWXnAfK8A9Y0bwXYU5xKaEdA=
|
github.com/jackc/pgproto3/v2 v2.2.0/go.mod h1:WfJCnwN3HIg9Ish/j3sgWXnAfK8A9Y0bwXYU5xKaEdA=
|
||||||
|
github.com/jackc/pgservicefile v0.0.0-20200714003250-2b9c44734f2b h1:C8S2+VttkHFdOOCXJe+YGfa4vHYwlt4Zx+IVXQ97jYg=
|
||||||
github.com/jackc/pgservicefile v0.0.0-20200714003250-2b9c44734f2b/go.mod h1:vsD4gTJCa9TptPL8sPkXrLZ+hDuNrZCnj29CQpr4X1E=
|
github.com/jackc/pgservicefile v0.0.0-20200714003250-2b9c44734f2b/go.mod h1:vsD4gTJCa9TptPL8sPkXrLZ+hDuNrZCnj29CQpr4X1E=
|
||||||
github.com/jackc/pgservicefile v0.0.0-20221227161230-091c0ba34f0a h1:bbPeKD0xmW/Y25WS6cokEszi5g+S0QxI/d45PkRi7Nk=
|
|
||||||
github.com/jackc/pgservicefile v0.0.0-20221227161230-091c0ba34f0a/go.mod h1:5TJZWKEWniPve33vlWYSoGYefn3gLQRzjfDlhSJ9ZKM=
|
|
||||||
github.com/jackc/pgtype v0.0.0-20190421001408-4ed0de4755e0/go.mod h1:hdSHsc1V01CGwFsrv11mJRHWJ6aifDLfdV3aVjFF0zg=
|
github.com/jackc/pgtype v0.0.0-20190421001408-4ed0de4755e0/go.mod h1:hdSHsc1V01CGwFsrv11mJRHWJ6aifDLfdV3aVjFF0zg=
|
||||||
github.com/jackc/pgtype v0.0.0-20190824184912-ab885b375b90/go.mod h1:KcahbBH1nCMSo2DXpzsoWOAfFkdEtEJpPbVLq8eE+mc=
|
github.com/jackc/pgtype v0.0.0-20190824184912-ab885b375b90/go.mod h1:KcahbBH1nCMSo2DXpzsoWOAfFkdEtEJpPbVLq8eE+mc=
|
||||||
github.com/jackc/pgtype v0.0.0-20190828014616-a8802b16cc59/go.mod h1:MWlu30kVJrUS8lot6TQqcg7mtthZ9T0EoIBFiJcmcyw=
|
github.com/jackc/pgtype v0.0.0-20190828014616-a8802b16cc59/go.mod h1:MWlu30kVJrUS8lot6TQqcg7mtthZ9T0EoIBFiJcmcyw=
|
||||||
github.com/jackc/pgtype v1.8.1-0.20210724151600-32e20a603178/go.mod h1:C516IlIV9NKqfsMCXTdChteoXmwgUceqaLfjg2e3NlM=
|
github.com/jackc/pgtype v1.8.1-0.20210724151600-32e20a603178/go.mod h1:C516IlIV9NKqfsMCXTdChteoXmwgUceqaLfjg2e3NlM=
|
||||||
github.com/jackc/pgtype v1.14.0 h1:y+xUdabmyMkJLyApYuPj38mW+aAIqCe5uuBB51rH3Vw=
|
github.com/jackc/pgtype v1.9.1 h1:MJc2s0MFS8C3ok1wQTdQxWuXQcB6+HwAm5x1CzW7mf0=
|
||||||
github.com/jackc/pgtype v1.14.0/go.mod h1:LUMuVrfsFfdKGLw+AFFVv6KtHOFMwRgDDzBt76IqCA4=
|
github.com/jackc/pgtype v1.9.1/go.mod h1:LUMuVrfsFfdKGLw+AFFVv6KtHOFMwRgDDzBt76IqCA4=
|
||||||
github.com/jackc/pgx/v4 v4.0.0-20190420224344-cc3461e65d96/go.mod h1:mdxmSJJuR08CZQyj1PVQBHy9XOp5p8/SHH6a0psbY9Y=
|
github.com/jackc/pgx/v4 v4.0.0-20190420224344-cc3461e65d96/go.mod h1:mdxmSJJuR08CZQyj1PVQBHy9XOp5p8/SHH6a0psbY9Y=
|
||||||
github.com/jackc/pgx/v4 v4.0.0-20190421002000-1b8f0016e912/go.mod h1:no/Y67Jkk/9WuGR0JG/JseM9irFbnEPbuWV2EELPNuM=
|
github.com/jackc/pgx/v4 v4.0.0-20190421002000-1b8f0016e912/go.mod h1:no/Y67Jkk/9WuGR0JG/JseM9irFbnEPbuWV2EELPNuM=
|
||||||
github.com/jackc/pgx/v4 v4.0.0-pre1.0.20190824185557-6972a5742186/go.mod h1:X+GQnOEnf1dqHGpw7JmHqHc1NxDoalibchSk9/RWuDc=
|
github.com/jackc/pgx/v4 v4.0.0-pre1.0.20190824185557-6972a5742186/go.mod h1:X+GQnOEnf1dqHGpw7JmHqHc1NxDoalibchSk9/RWuDc=
|
||||||
github.com/jackc/pgx/v4 v4.12.1-0.20210724153913-640aa07df17c/go.mod h1:1QD0+tgSXP7iUjYm9C1NxKhny7lq6ee99u/z+IHFcgs=
|
github.com/jackc/pgx/v4 v4.12.1-0.20210724153913-640aa07df17c/go.mod h1:1QD0+tgSXP7iUjYm9C1NxKhny7lq6ee99u/z+IHFcgs=
|
||||||
github.com/jackc/pgx/v4 v4.18.2 h1:xVpYkNR5pk5bMCZGfClbO962UIqVABcAGt7ha1s/FeU=
|
github.com/jackc/pgx/v4 v4.14.1 h1:71oo1KAGI6mXhLiTMn6iDFcp3e7+zon/capWjl2OEFU=
|
||||||
github.com/jackc/pgx/v4 v4.18.2/go.mod h1:Ey4Oru5tH5sB6tV7hDmfWFahwF15Eb7DNXlRKx2CkVw=
|
github.com/jackc/pgx/v4 v4.14.1/go.mod h1:RgDuE4Z34o7XE92RpLsvFiOEfrAUT0Xt2KxvX73W06M=
|
||||||
github.com/jackc/puddle v0.0.0-20190413234325-e4ced69a3a2b/go.mod h1:m4B5Dj62Y0fbyuIc15OsIqK0+JU8nkqQjsgx7dvjSWk=
|
github.com/jackc/puddle v0.0.0-20190413234325-e4ced69a3a2b/go.mod h1:m4B5Dj62Y0fbyuIc15OsIqK0+JU8nkqQjsgx7dvjSWk=
|
||||||
github.com/jackc/puddle v0.0.0-20190608224051-11cab39313c9/go.mod h1:m4B5Dj62Y0fbyuIc15OsIqK0+JU8nkqQjsgx7dvjSWk=
|
github.com/jackc/puddle v0.0.0-20190608224051-11cab39313c9/go.mod h1:m4B5Dj62Y0fbyuIc15OsIqK0+JU8nkqQjsgx7dvjSWk=
|
||||||
github.com/jackc/puddle v1.1.3/go.mod h1:m4B5Dj62Y0fbyuIc15OsIqK0+JU8nkqQjsgx7dvjSWk=
|
github.com/jackc/puddle v1.1.3/go.mod h1:m4B5Dj62Y0fbyuIc15OsIqK0+JU8nkqQjsgx7dvjSWk=
|
||||||
|
github.com/jackc/puddle v1.2.0/go.mod h1:m4B5Dj62Y0fbyuIc15OsIqK0+JU8nkqQjsgx7dvjSWk=
|
||||||
github.com/jarcoal/httpmock v1.1.0 h1:F47ChZj1Y2zFsCXxNkBPwNNKnAyOATcdQibk0qEdVCE=
|
github.com/jarcoal/httpmock v1.1.0 h1:F47ChZj1Y2zFsCXxNkBPwNNKnAyOATcdQibk0qEdVCE=
|
||||||
github.com/jarcoal/httpmock v1.1.0/go.mod h1:ATjnClrvW/3tijVmpL/va5Z3aAyGvqU3gCT8nX0Txik=
|
github.com/jarcoal/httpmock v1.1.0/go.mod h1:ATjnClrvW/3tijVmpL/va5Z3aAyGvqU3gCT8nX0Txik=
|
||||||
github.com/jhump/protoreflect v1.6.0 h1:h5jfMVslIg6l29nsMs0D8Wj17RDVdNYti0vDN/PZZoE=
|
github.com/jhump/protoreflect v1.6.0 h1:h5jfMVslIg6l29nsMs0D8Wj17RDVdNYti0vDN/PZZoE=
|
||||||
|
@ -422,6 +432,7 @@ github.com/json-iterator/go v1.1.12 h1:PV8peI4a0ysnczrg+LtxykD8LfKY9ML6u2jnxaEnr
|
||||||
github.com/json-iterator/go v1.1.12/go.mod h1:e30LSqwooZae/UwlEbR2852Gd8hjQvJoHmT4TnhNGBo=
|
github.com/json-iterator/go v1.1.12/go.mod h1:e30LSqwooZae/UwlEbR2852Gd8hjQvJoHmT4TnhNGBo=
|
||||||
github.com/jszwec/csvutil v1.5.1 h1:c3GFBhj6DFMUl4dMK3+B6rz2+LWWS/e9VJiVJ9t9kfQ=
|
github.com/jszwec/csvutil v1.5.1 h1:c3GFBhj6DFMUl4dMK3+B6rz2+LWWS/e9VJiVJ9t9kfQ=
|
||||||
github.com/jszwec/csvutil v1.5.1/go.mod h1:Rpu7Uu9giO9subDyMCIQfHVDuLrcaC36UA4YcJjGBkg=
|
github.com/jszwec/csvutil v1.5.1/go.mod h1:Rpu7Uu9giO9subDyMCIQfHVDuLrcaC36UA4YcJjGBkg=
|
||||||
|
github.com/juju/gnuflag v0.0.0-20171113085948-2ce1bb71843d/go.mod h1:2PavIy+JPciBPrBUjwbNvtwB6RQlve+hkpll6QSNmOE=
|
||||||
github.com/julienschmidt/httprouter v1.2.0/go.mod h1:SYymIcj16QtmaHHD7aYtjjsJG7VTCxuUUipMqKk8s4w=
|
github.com/julienschmidt/httprouter v1.2.0/go.mod h1:SYymIcj16QtmaHHD7aYtjjsJG7VTCxuUUipMqKk8s4w=
|
||||||
github.com/karrick/godirwalk v1.8.0/go.mod h1:H5KPZjojv4lE+QYImBI8xVtrBRgYrIVsaRPx4tDPEn4=
|
github.com/karrick/godirwalk v1.8.0/go.mod h1:H5KPZjojv4lE+QYImBI8xVtrBRgYrIVsaRPx4tDPEn4=
|
||||||
github.com/karrick/godirwalk v1.10.3/go.mod h1:RoGL9dQei4vP9ilrpETWE8CLOZ1kiN0LhBygSwrAsHA=
|
github.com/karrick/godirwalk v1.10.3/go.mod h1:RoGL9dQei4vP9ilrpETWE8CLOZ1kiN0LhBygSwrAsHA=
|
||||||
|
@ -436,8 +447,9 @@ github.com/klauspost/compress v1.15.9/go.mod h1:PhcZ0MbTNciWF3rruxRgKxI5NkcHHrHU
|
||||||
github.com/klauspost/compress v1.17.3 h1:qkRjuerhUU1EmXLYGkSH6EZL+vPSxIrYjLNAK4slzwA=
|
github.com/klauspost/compress v1.17.3 h1:qkRjuerhUU1EmXLYGkSH6EZL+vPSxIrYjLNAK4slzwA=
|
||||||
github.com/klauspost/compress v1.17.3/go.mod h1:/dCuZOvVtNoHsyb+cuJD3itjs3NbnF6KH9zAO4BDxPM=
|
github.com/klauspost/compress v1.17.3/go.mod h1:/dCuZOvVtNoHsyb+cuJD3itjs3NbnF6KH9zAO4BDxPM=
|
||||||
github.com/klauspost/cpuid/v2 v2.0.9/go.mod h1:FInQzS24/EEf25PyTYn52gqo7WaD8xa0213Md/qVLRg=
|
github.com/klauspost/cpuid/v2 v2.0.9/go.mod h1:FInQzS24/EEf25PyTYn52gqo7WaD8xa0213Md/qVLRg=
|
||||||
github.com/klauspost/cpuid/v2 v2.2.4 h1:acbojRNwl3o09bUq+yDCtZFc1aiwaAAxtcn8YkZXnvk=
|
github.com/klauspost/cpuid/v2 v2.2.5 h1:0E5MSMDEoAulmXNFquVs//DdoomxaoTY1kUhbc/qbZg=
|
||||||
github.com/klauspost/cpuid/v2 v2.2.4/go.mod h1:RVVoqg1df56z8g3pUjL/3lE5UfnlrJX8tyFgg4nqhuY=
|
github.com/klauspost/cpuid/v2 v2.2.5/go.mod h1:Lcz8mBdAVJIBVzewtcLocK12l3Y+JytZYpaMropDUws=
|
||||||
|
github.com/knz/go-libedit v1.10.1/go.mod h1:MZTVkCWyz0oBc7JOWP3wNAzd002ZbM/5hgShxwh4x8M=
|
||||||
github.com/konsorten/go-windows-terminal-sequences v1.0.1/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ=
|
github.com/konsorten/go-windows-terminal-sequences v1.0.1/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ=
|
||||||
github.com/konsorten/go-windows-terminal-sequences v1.0.2/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ=
|
github.com/konsorten/go-windows-terminal-sequences v1.0.2/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ=
|
||||||
github.com/kr/logfmt v0.0.0-20140226030751-b84e30acd515/go.mod h1:+0opPa2QZZtGFBFZlji/RkVcI2GknAs/DXo4wKdlNEc=
|
github.com/kr/logfmt v0.0.0-20140226030751-b84e30acd515/go.mod h1:+0opPa2QZZtGFBFZlji/RkVcI2GknAs/DXo4wKdlNEc=
|
||||||
|
@ -490,10 +502,10 @@ github.com/mattn/go-isatty v0.0.8/go.mod h1:Iq45c/XA43vh69/j3iqttzPXn0bhXyGjM0Hd
|
||||||
github.com/mattn/go-isatty v0.0.12/go.mod h1:cbi8OIDigv2wuxKPP5vlRcQ1OAZbq2CE4Kysco4FUpU=
|
github.com/mattn/go-isatty v0.0.12/go.mod h1:cbi8OIDigv2wuxKPP5vlRcQ1OAZbq2CE4Kysco4FUpU=
|
||||||
github.com/mattn/go-isatty v0.0.14/go.mod h1:7GGIvUiUoEMVVmxf/4nioHXj79iQHKdU27kJ6hsGG94=
|
github.com/mattn/go-isatty v0.0.14/go.mod h1:7GGIvUiUoEMVVmxf/4nioHXj79iQHKdU27kJ6hsGG94=
|
||||||
github.com/mattn/go-isatty v0.0.16/go.mod h1:kYGgaQfpe5nmfYZH+SKPsOc2e4SrIfOl2e/yFXSvRLM=
|
github.com/mattn/go-isatty v0.0.16/go.mod h1:kYGgaQfpe5nmfYZH+SKPsOc2e4SrIfOl2e/yFXSvRLM=
|
||||||
github.com/mattn/go-isatty v0.0.19 h1:JITubQf0MOLdlGRuRq+jtsDlekdYPia9ZFsB8h/APPA=
|
github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWEY=
|
||||||
github.com/mattn/go-isatty v0.0.19/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y=
|
github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y=
|
||||||
github.com/mattn/go-runewidth v0.0.13 h1:lTGmDsbAYt5DmK6OnoV7EuIF1wEIFAcxld6ypU4OSgU=
|
github.com/mattn/go-runewidth v0.0.15 h1:UNAjwbU9l54TA3KzvqLGxwWjHmMgBUVhBiTjelZgg3U=
|
||||||
github.com/mattn/go-runewidth v0.0.13/go.mod h1:Jdepj2loyihRzMpdS35Xk/zdY8IAYHsh153qUoGf23w=
|
github.com/mattn/go-runewidth v0.0.15/go.mod h1:Jdepj2loyihRzMpdS35Xk/zdY8IAYHsh153qUoGf23w=
|
||||||
github.com/mattn/go-sqlite3 v1.14.16 h1:yOQRA0RpS5PFz/oikGwBEqvAWhWg5ufRz4ETLjwpU1Y=
|
github.com/mattn/go-sqlite3 v1.14.16 h1:yOQRA0RpS5PFz/oikGwBEqvAWhWg5ufRz4ETLjwpU1Y=
|
||||||
github.com/mattn/go-sqlite3 v1.14.16/go.mod h1:2eHXhiwb8IkHr+BDWZGa96P6+rkvnG63S2DGjv9HUNg=
|
github.com/mattn/go-sqlite3 v1.14.16/go.mod h1:2eHXhiwb8IkHr+BDWZGa96P6+rkvnG63S2DGjv9HUNg=
|
||||||
github.com/matttproud/golang_protobuf_extensions v1.0.1/go.mod h1:D8He9yQNgCq6Z5Ld7szi9bcBfOoFv/3dc6xSMkL2PC0=
|
github.com/matttproud/golang_protobuf_extensions v1.0.1/go.mod h1:D8He9yQNgCq6Z5Ld7szi9bcBfOoFv/3dc6xSMkL2PC0=
|
||||||
|
@ -538,23 +550,26 @@ github.com/mwitkow/go-conntrack v0.0.0-20161129095857-cc309e4a2223/go.mod h1:qRW
|
||||||
github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e/go.mod h1:zD1mROLANZcx1PVRCS0qkT7pwLkGfwJo4zjcN/Tysno=
|
github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e/go.mod h1:zD1mROLANZcx1PVRCS0qkT7pwLkGfwJo4zjcN/Tysno=
|
||||||
github.com/nxadm/tail v1.4.8 h1:nPr65rt6Y5JFSKQO7qToXr7pePgD6Gwiw05lkbyAQTE=
|
github.com/nxadm/tail v1.4.8 h1:nPr65rt6Y5JFSKQO7qToXr7pePgD6Gwiw05lkbyAQTE=
|
||||||
github.com/nxadm/tail v1.4.8/go.mod h1:+ncqLTQzXmGhMZNUePPaPqPvBxHAIsmXswZKocGu+AU=
|
github.com/nxadm/tail v1.4.8/go.mod h1:+ncqLTQzXmGhMZNUePPaPqPvBxHAIsmXswZKocGu+AU=
|
||||||
|
github.com/oapi-codegen/runtime v1.1.1 h1:EXLHh0DXIJnWhdRPN2w4MXAzFyE4CskzhNLUmtpMYro=
|
||||||
|
github.com/oapi-codegen/runtime v1.1.1/go.mod h1:SK9X900oXmPWilYR5/WKPzt3Kqxn/uS/+lbpREv+eCg=
|
||||||
github.com/oklog/run v1.0.0 h1:Ru7dDtJNOyC66gQ5dQmaCa0qIsAUFY3sFpK1Xk8igrw=
|
github.com/oklog/run v1.0.0 h1:Ru7dDtJNOyC66gQ5dQmaCa0qIsAUFY3sFpK1Xk8igrw=
|
||||||
github.com/oklog/run v1.0.0/go.mod h1:dlhp/R75TPv97u0XWUtDeV/lRKWPKSdTuV0TZvrmrQA=
|
github.com/oklog/run v1.0.0/go.mod h1:dlhp/R75TPv97u0XWUtDeV/lRKWPKSdTuV0TZvrmrQA=
|
||||||
github.com/opencontainers/go-digest v1.0.0 h1:apOUWs51W5PlhuyGyz9FCeeBIOUDA/6nW8Oi/yOhh5U=
|
github.com/opencontainers/go-digest v1.0.0 h1:apOUWs51W5PlhuyGyz9FCeeBIOUDA/6nW8Oi/yOhh5U=
|
||||||
github.com/opencontainers/go-digest v1.0.0/go.mod h1:0JzlMkj0TRzQZfJkVvzbP0HBR3IKzErnv2BNG4W4MAM=
|
github.com/opencontainers/go-digest v1.0.0/go.mod h1:0JzlMkj0TRzQZfJkVvzbP0HBR3IKzErnv2BNG4W4MAM=
|
||||||
github.com/opencontainers/image-spec v1.0.3-0.20211202183452-c5a74bcca799 h1:rc3tiVYb5z54aKaDfakKn0dDjIyPpTtszkjuMzyt7ec=
|
github.com/opencontainers/image-spec v1.0.3-0.20211202183452-c5a74bcca799 h1:rc3tiVYb5z54aKaDfakKn0dDjIyPpTtszkjuMzyt7ec=
|
||||||
github.com/opencontainers/image-spec v1.0.3-0.20211202183452-c5a74bcca799/go.mod h1:BtxoFyWECRxE4U/7sNtV5W15zMzWCbyJoFRP3s7yZA0=
|
github.com/opencontainers/image-spec v1.0.3-0.20211202183452-c5a74bcca799/go.mod h1:BtxoFyWECRxE4U/7sNtV5W15zMzWCbyJoFRP3s7yZA0=
|
||||||
github.com/oschwald/geoip2-golang v1.9.0 h1:uvD3O6fXAXs+usU+UGExshpdP13GAqp4GBrzN7IgKZc=
|
github.com/oschwald/geoip2-golang v1.4.0 h1:5RlrjCgRyIGDz/mBmPfnAF4h8k0IAcRv9PvrpOfz+Ug=
|
||||||
github.com/oschwald/geoip2-golang v1.9.0/go.mod h1:BHK6TvDyATVQhKNbQBdrj9eAvuwOMi2zSFXizL3K81Y=
|
github.com/oschwald/geoip2-golang v1.4.0/go.mod h1:8QwxJvRImBH+Zl6Aa6MaIcs5YdlZSTKtzmPGzQqi9ng=
|
||||||
github.com/oschwald/maxminddb-golang v1.12.0 h1:9FnTOD0YOhP7DGxGsq4glzpGy5+w7pq50AS6wALUMYs=
|
github.com/oschwald/maxminddb-golang v1.6.0/go.mod h1:DUJFucBg2cvqx42YmDa/+xHvb0elJtOm3o4aFQ/nb/w=
|
||||||
github.com/oschwald/maxminddb-golang v1.12.0/go.mod h1:q0Nob5lTCqyQ8WT6FYgS1L7PXKVVbgiymefNwIjPzgY=
|
github.com/oschwald/maxminddb-golang v1.8.0 h1:Uh/DSnGoxsyp/KYbY1AuP0tYEwfs0sCph9p/UMXK/Hk=
|
||||||
|
github.com/oschwald/maxminddb-golang v1.8.0/go.mod h1:RXZtst0N6+FY/3qCNmZMBApR19cdQj43/NM9VkrNAis=
|
||||||
github.com/pbnjay/memory v0.0.0-20210728143218-7b4eea64cf58 h1:onHthvaw9LFnH4t2DcNVpwGmV9E1BkGknEliJkfwQj0=
|
github.com/pbnjay/memory v0.0.0-20210728143218-7b4eea64cf58 h1:onHthvaw9LFnH4t2DcNVpwGmV9E1BkGknEliJkfwQj0=
|
||||||
github.com/pbnjay/memory v0.0.0-20210728143218-7b4eea64cf58/go.mod h1:DXv8WO4yhMYhSNPKjeNKa5WY9YCIEBRbNzFFPJbWO6Y=
|
github.com/pbnjay/memory v0.0.0-20210728143218-7b4eea64cf58/go.mod h1:DXv8WO4yhMYhSNPKjeNKa5WY9YCIEBRbNzFFPJbWO6Y=
|
||||||
github.com/pborman/uuid v1.2.0/go.mod h1:X/NO0urCmaxf9VXbdlT7C2Yzkj2IKimNn4k+gtPdI/k=
|
github.com/pborman/uuid v1.2.0/go.mod h1:X/NO0urCmaxf9VXbdlT7C2Yzkj2IKimNn4k+gtPdI/k=
|
||||||
github.com/pelletier/go-toml v1.4.0/go.mod h1:PN7xzY2wHTK0K9p34ErDQMlFxa51Fk0OUruD3k1mMwo=
|
github.com/pelletier/go-toml v1.4.0/go.mod h1:PN7xzY2wHTK0K9p34ErDQMlFxa51Fk0OUruD3k1mMwo=
|
||||||
github.com/pelletier/go-toml v1.7.0/go.mod h1:vwGMzjaWMwyfHwgIBhI2YUM4fB6nL6lVAvS1LBMMhTE=
|
github.com/pelletier/go-toml v1.7.0/go.mod h1:vwGMzjaWMwyfHwgIBhI2YUM4fB6nL6lVAvS1LBMMhTE=
|
||||||
github.com/pelletier/go-toml/v2 v2.0.8 h1:0ctb6s9mE31h0/lhu+J6OPmVeDxJn+kYnJc2jZR9tGQ=
|
github.com/pelletier/go-toml/v2 v2.0.9 h1:uH2qQXheeefCCkuBBSLi7jCiSmj3VRh2+Goq2N7Xxu0=
|
||||||
github.com/pelletier/go-toml/v2 v2.0.8/go.mod h1:vuYfssBdrU2XDZ9bYydBu6t+6a6PYNcZljzZR9VXg+4=
|
github.com/pelletier/go-toml/v2 v2.0.9/go.mod h1:tJU2Z3ZkXwnxa4DPO899bsyIoywizdUvyaeZurnPPDc=
|
||||||
github.com/petar-dambovaliev/aho-corasick v0.0.0-20230725210150-fb29fc3c913e h1:POJco99aNgosh92lGqmx7L1ei+kCymivB/419SD15PQ=
|
github.com/petar-dambovaliev/aho-corasick v0.0.0-20230725210150-fb29fc3c913e h1:POJco99aNgosh92lGqmx7L1ei+kCymivB/419SD15PQ=
|
||||||
github.com/petar-dambovaliev/aho-corasick v0.0.0-20230725210150-fb29fc3c913e/go.mod h1:EHPiTAKtiFmrMldLUNswFwfZ2eJIYBHktdaUTZxYWRw=
|
github.com/petar-dambovaliev/aho-corasick v0.0.0-20230725210150-fb29fc3c913e/go.mod h1:EHPiTAKtiFmrMldLUNswFwfZ2eJIYBHktdaUTZxYWRw=
|
||||||
github.com/pierrec/lz4/v4 v4.1.15/go.mod h1:gZWDp/Ze/IJXGXf23ltt2EXimqmTUXEy0GFuRQyBid4=
|
github.com/pierrec/lz4/v4 v4.1.15/go.mod h1:gZWDp/Ze/IJXGXf23ltt2EXimqmTUXEy0GFuRQyBid4=
|
||||||
|
@ -638,6 +653,7 @@ github.com/spf13/cobra v1.8.0/go.mod h1:WXLWApfZ71AjXPya3WOlMsY9yMs7YeiHhFVlvLyh
|
||||||
github.com/spf13/pflag v1.0.3/go.mod h1:DYY7MBk1bdzusC3SYhjObp+wFpr4gzcvqqNjLnInEg4=
|
github.com/spf13/pflag v1.0.3/go.mod h1:DYY7MBk1bdzusC3SYhjObp+wFpr4gzcvqqNjLnInEg4=
|
||||||
github.com/spf13/pflag v1.0.5 h1:iy+VFUOCP1a+8yFto/drg2CJ5u0yRoB7fZw3DKv/JXA=
|
github.com/spf13/pflag v1.0.5 h1:iy+VFUOCP1a+8yFto/drg2CJ5u0yRoB7fZw3DKv/JXA=
|
||||||
github.com/spf13/pflag v1.0.5/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg=
|
github.com/spf13/pflag v1.0.5/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg=
|
||||||
|
github.com/spkg/bom v0.0.0-20160624110644-59b7046e48ad/go.mod h1:qLr4V1qq6nMqFKkMo8ZTx3f+BZEkzsRUY10Xsm2mwU0=
|
||||||
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
|
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
|
||||||
github.com/stretchr/objx v0.1.1/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
|
github.com/stretchr/objx v0.1.1/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
|
||||||
github.com/stretchr/objx v0.2.0/go.mod h1:qt09Ya8vawLte6SNmTgCsAVtYtaKzEcn8ATUoHMkEqE=
|
github.com/stretchr/objx v0.2.0/go.mod h1:qt09Ya8vawLte6SNmTgCsAVtYtaKzEcn8ATUoHMkEqE=
|
||||||
|
@ -736,8 +752,8 @@ go.uber.org/zap v1.9.1/go.mod h1:vwi/ZaCAaUcBkycHslxD9B2zi4UTXhF60s6SWpuDF0Q=
|
||||||
go.uber.org/zap v1.10.0/go.mod h1:vwi/ZaCAaUcBkycHslxD9B2zi4UTXhF60s6SWpuDF0Q=
|
go.uber.org/zap v1.10.0/go.mod h1:vwi/ZaCAaUcBkycHslxD9B2zi4UTXhF60s6SWpuDF0Q=
|
||||||
go.uber.org/zap v1.13.0/go.mod h1:zwrFLgMcdUuIBviXEYEH1YKNaOBnKXsx2IPda5bBwHM=
|
go.uber.org/zap v1.13.0/go.mod h1:zwrFLgMcdUuIBviXEYEH1YKNaOBnKXsx2IPda5bBwHM=
|
||||||
golang.org/x/arch v0.0.0-20210923205945-b76863e36670/go.mod h1:5om86z9Hs0C8fWVUuoMHwpExlXzs5Tkyp9hOrfG7pp8=
|
golang.org/x/arch v0.0.0-20210923205945-b76863e36670/go.mod h1:5om86z9Hs0C8fWVUuoMHwpExlXzs5Tkyp9hOrfG7pp8=
|
||||||
golang.org/x/arch v0.3.0 h1:02VY4/ZcO/gBOH6PUaoiptASxtXU10jazRCP865E97k=
|
golang.org/x/arch v0.4.0 h1:A8WCeEWhLwPBKNbFi5Wv5UTCBx5zzubnXDlMOFAzFMc=
|
||||||
golang.org/x/arch v0.3.0/go.mod h1:5om86z9Hs0C8fWVUuoMHwpExlXzs5Tkyp9hOrfG7pp8=
|
golang.org/x/arch v0.4.0/go.mod h1:5om86z9Hs0C8fWVUuoMHwpExlXzs5Tkyp9hOrfG7pp8=
|
||||||
golang.org/x/crypto v0.0.0-20180904163835-0709b304e793/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4=
|
golang.org/x/crypto v0.0.0-20180904163835-0709b304e793/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4=
|
||||||
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
|
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
|
||||||
golang.org/x/crypto v0.0.0-20190320223903-b7391e95e576/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
|
golang.org/x/crypto v0.0.0-20190320223903-b7391e95e576/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
|
||||||
|
@ -757,8 +773,8 @@ golang.org/x/crypto v0.0.0-20210711020723-a769d52b0f97/go.mod h1:GvvjBRRGRdwPK5y
|
||||||
golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc=
|
golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc=
|
||||||
golang.org/x/crypto v0.3.0/go.mod h1:hebNnKkNXi2UzZN1eVRvBB7co0a+JxK6XbPiWVs/3J4=
|
golang.org/x/crypto v0.3.0/go.mod h1:hebNnKkNXi2UzZN1eVRvBB7co0a+JxK6XbPiWVs/3J4=
|
||||||
golang.org/x/crypto v0.14.0/go.mod h1:MVFd36DqK4CsrnJYDkBA3VC4m2GkXAM0PvzMCn4JQf4=
|
golang.org/x/crypto v0.14.0/go.mod h1:MVFd36DqK4CsrnJYDkBA3VC4m2GkXAM0PvzMCn4JQf4=
|
||||||
golang.org/x/crypto v0.22.0 h1:g1v0xeRhjcugydODzvb3mEM9SQ0HGp9s/nh3COQ/C30=
|
golang.org/x/crypto v0.17.0 h1:r8bRNjWL3GshPW3gkd+RpvzWrZAwPS49OmTGZ/uhM4k=
|
||||||
golang.org/x/crypto v0.22.0/go.mod h1:vr6Su+7cTlO45qkww3VDJlzDn0ctJvRgYbC2NvXHt+M=
|
golang.org/x/crypto v0.17.0/go.mod h1:gCAAfMLgwOJRpTjQ2zCCt2OcSfYMTeZVSRtQlPC7Nq4=
|
||||||
golang.org/x/lint v0.0.0-20190930215403-16217165b5de/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc=
|
golang.org/x/lint v0.0.0-20190930215403-16217165b5de/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc=
|
||||||
golang.org/x/mod v0.0.0-20190513183733-4bf6d317e70e/go.mod h1:mXi4GBBbnImb6dmsKGUJ2LatrhH/nqhxcFungHvyanc=
|
golang.org/x/mod v0.0.0-20190513183733-4bf6d317e70e/go.mod h1:mXi4GBBbnImb6dmsKGUJ2LatrhH/nqhxcFungHvyanc=
|
||||||
golang.org/x/mod v0.1.1-0.20191105210325-c90efee705ee/go.mod h1:QqPTAvyqsEbceGzBzNggFXnrqF1CaUcvgkdR5Ot7KZg=
|
golang.org/x/mod v0.1.1-0.20191105210325-c90efee705ee/go.mod h1:QqPTAvyqsEbceGzBzNggFXnrqF1CaUcvgkdR5Ot7KZg=
|
||||||
|
@ -766,8 +782,8 @@ golang.org/x/mod v0.2.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
|
||||||
golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
|
golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
|
||||||
golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4=
|
golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4=
|
||||||
golang.org/x/mod v0.8.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs=
|
golang.org/x/mod v0.8.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs=
|
||||||
golang.org/x/mod v0.11.0 h1:bUO06HqtnRcc/7l71XBe4WcqTZ+3AH1J59zWDDwLKgU=
|
golang.org/x/mod v0.12.0 h1:rmsUpXtvNzj340zd98LZ4KntptpfRHwpFOHG188oHXc=
|
||||||
golang.org/x/mod v0.11.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs=
|
golang.org/x/mod v0.12.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs=
|
||||||
golang.org/x/net v0.0.0-20181005035420-146acd28ed58/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
golang.org/x/net v0.0.0-20181005035420-146acd28ed58/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
||||||
golang.org/x/net v0.0.0-20181114220301-adae6a3d119a/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
golang.org/x/net v0.0.0-20181114220301-adae6a3d119a/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
||||||
golang.org/x/net v0.0.0-20190311183353-d8887717615a/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
|
golang.org/x/net v0.0.0-20190311183353-d8887717615a/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
|
||||||
|
@ -791,8 +807,8 @@ golang.org/x/net v0.2.0/go.mod h1:KqCZLdyyvdV855qA2rE3GC2aiw5xGR5TEjj8smXukLY=
|
||||||
golang.org/x/net v0.6.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs=
|
golang.org/x/net v0.6.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs=
|
||||||
golang.org/x/net v0.10.0/go.mod h1:0qNGK6F8kojg2nk9dLZ2mShWaEBan6FAoqfSigmmuDg=
|
golang.org/x/net v0.10.0/go.mod h1:0qNGK6F8kojg2nk9dLZ2mShWaEBan6FAoqfSigmmuDg=
|
||||||
golang.org/x/net v0.17.0/go.mod h1:NxSsAGuq816PNPmqtQdLE42eU2Fs7NoRIZrHJAlaCOE=
|
golang.org/x/net v0.17.0/go.mod h1:NxSsAGuq816PNPmqtQdLE42eU2Fs7NoRIZrHJAlaCOE=
|
||||||
golang.org/x/net v0.24.0 h1:1PcaxkF854Fu3+lvBIx5SYn9wRlBzzcnHZSiaFFAb0w=
|
golang.org/x/net v0.19.0 h1:zTwKpTd2XuCqf8huc7Fo2iSy+4RHPd10s4KzeTnVr1c=
|
||||||
golang.org/x/net v0.24.0/go.mod h1:2Q7sJY5mzlzWjKtYUEXSlBWCdyaioyXzRB2RtU8KVE8=
|
golang.org/x/net v0.19.0/go.mod h1:CfAk/cbD4CthTvqiEl8NpboMuiuOYsAr/7NOjZJtv1U=
|
||||||
golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||||
golang.org/x/sync v0.0.0-20181221193216-37e7f081c4d4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
golang.org/x/sync v0.0.0-20181221193216-37e7f081c4d4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||||
golang.org/x/sync v0.0.0-20190227155943-e225da77a7e6/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
golang.org/x/sync v0.0.0-20190227155943-e225da77a7e6/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||||
|
@ -820,6 +836,7 @@ golang.org/x/sys v0.0.0-20190916202348-b4ddaad3f8a3/go.mod h1:h1NjWce9XRLGQEsW7w
|
||||||
golang.org/x/sys v0.0.0-20191005200804-aed5e4c7ecf9/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
golang.org/x/sys v0.0.0-20191005200804-aed5e4c7ecf9/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||||
golang.org/x/sys v0.0.0-20191026070338-33540a1f6037/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
golang.org/x/sys v0.0.0-20191026070338-33540a1f6037/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||||
golang.org/x/sys v0.0.0-20191220142924-d4481acd189f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
golang.org/x/sys v0.0.0-20191220142924-d4481acd189f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||||
|
golang.org/x/sys v0.0.0-20191224085550-c709ea063b76/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||||
golang.org/x/sys v0.0.0-20200116001909-b77594299b42/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
golang.org/x/sys v0.0.0-20200116001909-b77594299b42/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||||
golang.org/x/sys v0.0.0-20200223170610-d5e6a3e2c0ae/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
golang.org/x/sys v0.0.0-20200223170610-d5e6a3e2c0ae/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||||
golang.org/x/sys v0.0.0-20200323222414-85ca7c5b95cd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
golang.org/x/sys v0.0.0-20200323222414-85ca7c5b95cd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||||
|
@ -831,7 +848,6 @@ golang.org/x/sys v0.0.0-20210630005230-0f9fa26af87c/go.mod h1:oPkhp1MJrh7nUepCBc
|
||||||
golang.org/x/sys v0.0.0-20210927094055-39ccf1dd6fa6/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
golang.org/x/sys v0.0.0-20210927094055-39ccf1dd6fa6/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||||
golang.org/x/sys v0.0.0-20220503163025-988cb79eb6c6/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
golang.org/x/sys v0.0.0-20220503163025-988cb79eb6c6/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||||
golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||||
golang.org/x/sys v0.0.0-20220704084225-05e143d24a9e/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
|
||||||
golang.org/x/sys v0.0.0-20220715151400-c0bba94af5f8/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
golang.org/x/sys v0.0.0-20220715151400-c0bba94af5f8/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||||
golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||||
golang.org/x/sys v0.0.0-20220811171246-fbc7d0a398ab/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
golang.org/x/sys v0.0.0-20220811171246-fbc7d0a398ab/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||||
|
@ -841,8 +857,8 @@ golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||||
golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||||
golang.org/x/sys v0.8.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
golang.org/x/sys v0.8.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||||
golang.org/x/sys v0.13.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
golang.org/x/sys v0.13.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||||
golang.org/x/sys v0.19.0 h1:q5f1RH2jigJ1MoAWp2KTp3gm5zAGFUTarQZ5U386+4o=
|
golang.org/x/sys v0.15.0 h1:h48lPFYpsTvQJZF4EKyI4aLHaev3CxivZmv7yZig9pc=
|
||||||
golang.org/x/sys v0.19.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
|
golang.org/x/sys v0.15.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
|
||||||
golang.org/x/term v0.0.0-20201117132131-f5c789dd3221/go.mod h1:Nr5EML6q2oocZ2LXRh80K7BxOlk5/8JxuGnuhpl+muw=
|
golang.org/x/term v0.0.0-20201117132131-f5c789dd3221/go.mod h1:Nr5EML6q2oocZ2LXRh80K7BxOlk5/8JxuGnuhpl+muw=
|
||||||
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
|
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
|
||||||
golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8=
|
golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8=
|
||||||
|
@ -850,8 +866,8 @@ golang.org/x/term v0.2.0/go.mod h1:TVmDHMZPmdnySmBfhjOoOdhjzdE1h4u1VwSiw2l1Nuc=
|
||||||
golang.org/x/term v0.5.0/go.mod h1:jMB1sMXY+tzblOD4FWmEbocvup2/aLOaQEp7JmGp78k=
|
golang.org/x/term v0.5.0/go.mod h1:jMB1sMXY+tzblOD4FWmEbocvup2/aLOaQEp7JmGp78k=
|
||||||
golang.org/x/term v0.8.0/go.mod h1:xPskH00ivmX89bAKVGSKKtLOWNx2+17Eiy94tnKShWo=
|
golang.org/x/term v0.8.0/go.mod h1:xPskH00ivmX89bAKVGSKKtLOWNx2+17Eiy94tnKShWo=
|
||||||
golang.org/x/term v0.13.0/go.mod h1:LTmsnFJwVN6bCy1rVCoS+qHT1HhALEFxKncY3WNNh4U=
|
golang.org/x/term v0.13.0/go.mod h1:LTmsnFJwVN6bCy1rVCoS+qHT1HhALEFxKncY3WNNh4U=
|
||||||
golang.org/x/term v0.19.0 h1:+ThwsDv+tYfnJFhF4L8jITxu1tdTWRTZpdsWgEgjL6Q=
|
golang.org/x/term v0.15.0 h1:y/Oo/a/q3IXu26lQgl04j/gjuBDOBlx7X6Om1j2CPW4=
|
||||||
golang.org/x/term v0.19.0/go.mod h1:2CuTdWZ7KHSQwUzKva0cbMg6q2DMI3Mmxp+gKJbskEk=
|
golang.org/x/term v0.15.0/go.mod h1:BDl952bC7+uMoWR75FIrCDx79TPU9oHkTZ9yRbYOrX0=
|
||||||
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
|
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
|
||||||
golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk=
|
golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk=
|
||||||
golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
|
golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
|
||||||
|
@ -866,8 +882,8 @@ golang.org/x/text v0.9.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8=
|
||||||
golang.org/x/text v0.13.0/go.mod h1:TvPlkZtksWOMsz7fbANvkp4WM8x/WCo/om8BMLbz+aE=
|
golang.org/x/text v0.13.0/go.mod h1:TvPlkZtksWOMsz7fbANvkp4WM8x/WCo/om8BMLbz+aE=
|
||||||
golang.org/x/text v0.14.0 h1:ScX5w1eTa3QqT8oi6+ziP7dTV1S2+ALU0bI+0zXKWiQ=
|
golang.org/x/text v0.14.0 h1:ScX5w1eTa3QqT8oi6+ziP7dTV1S2+ALU0bI+0zXKWiQ=
|
||||||
golang.org/x/text v0.14.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU=
|
golang.org/x/text v0.14.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU=
|
||||||
golang.org/x/time v0.3.0 h1:rg5rLMjNzMS1RkNLzCG38eapWhnYLFYXDXj2gOlr8j4=
|
golang.org/x/time v0.5.0 h1:o7cqy6amK/52YcAKIPlM3a+Fpj35zvRj2TP+e1xFSfk=
|
||||||
golang.org/x/time v0.3.0/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
|
golang.org/x/time v0.5.0/go.mod h1:3BpzKBy/shNhVucY/MWOyx10tF3SFh9QdLuxbVysPQM=
|
||||||
golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
|
golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
|
||||||
golang.org/x/tools v0.0.0-20181030221726-6c7e314b6563/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
|
golang.org/x/tools v0.0.0-20181030221726-6c7e314b6563/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
|
||||||
golang.org/x/tools v0.0.0-20190125232054-d66bd3c5d5a6/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
|
golang.org/x/tools v0.0.0-20190125232054-d66bd3c5d5a6/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
|
||||||
|
@ -889,15 +905,16 @@ golang.org/x/tools v0.0.0-20200619180055-7c47624df98f/go.mod h1:EkVYQZoAsY45+roY
|
||||||
golang.org/x/tools v0.0.0-20210106214847-113979e3529a/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA=
|
golang.org/x/tools v0.0.0-20210106214847-113979e3529a/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA=
|
||||||
golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc=
|
golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc=
|
||||||
golang.org/x/tools v0.6.0/go.mod h1:Xwgl3UAJ/d3gWutnCtw505GrjyAbvKui8lOU390QaIU=
|
golang.org/x/tools v0.6.0/go.mod h1:Xwgl3UAJ/d3gWutnCtw505GrjyAbvKui8lOU390QaIU=
|
||||||
golang.org/x/tools v0.8.1-0.20230428195545-5283a0178901 h1:0wxTF6pSjIIhNt7mo9GvjDfzyCOiWhmICgtO/Ah948s=
|
golang.org/x/tools v0.12.0 h1:YW6HUoUmYBpwSgyaGaZq1fHjrBjX1rlpZ54T6mu2kss=
|
||||||
golang.org/x/tools v0.8.1-0.20230428195545-5283a0178901/go.mod h1:JxBZ99ISMI5ViVkT1tr6tdNmXeTrcpVSD3vZ1RsRdN4=
|
golang.org/x/tools v0.12.0/go.mod h1:Sc0INKfu04TlqNoRA1hgpFZbhYXHPr4V5DzpSBTPqQM=
|
||||||
golang.org/x/xerrors v0.0.0-20190410155217-1f06c39b4373/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
golang.org/x/xerrors v0.0.0-20190410155217-1f06c39b4373/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||||
golang.org/x/xerrors v0.0.0-20190513163551-3ee3066db522/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
golang.org/x/xerrors v0.0.0-20190513163551-3ee3066db522/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||||
golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||||
golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||||
golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||||
golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1 h1:go1bK/D/BFZV2I8cIQd1NKEZ+0owSTG1fDTci4IqFcE=
|
|
||||||
golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||||
|
golang.org/x/xerrors v0.0.0-20220411194840-2f41105eb62f h1:GGU+dLjvlC3qDwqYgL6UgRmHXhOOgns0bZu2Ty5mm6U=
|
||||||
|
golang.org/x/xerrors v0.0.0-20220411194840-2f41105eb62f/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||||
google.golang.org/appengine v1.6.5/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc=
|
google.golang.org/appengine v1.6.5/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc=
|
||||||
google.golang.org/appengine v1.6.6/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc=
|
google.golang.org/appengine v1.6.6/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc=
|
||||||
google.golang.org/appengine v1.6.7 h1:FZR1q0exgwxzPzp/aF+VccGrSfxfPpkBqjIIEq3ru6c=
|
google.golang.org/appengine v1.6.7 h1:FZR1q0exgwxzPzp/aF+VccGrSfxfPpkBqjIIEq3ru6c=
|
||||||
|
@ -908,8 +925,8 @@ google.golang.org/grpc v1.56.3 h1:8I4C0Yq1EjstUzUJzpcRVbuYA2mODtEmpWiQoN/b2nc=
|
||||||
google.golang.org/grpc v1.56.3/go.mod h1:I9bI3vqKfayGqPUAwGdOSu7kt6oIJLixfffKrpXqQ9s=
|
google.golang.org/grpc v1.56.3/go.mod h1:I9bI3vqKfayGqPUAwGdOSu7kt6oIJLixfffKrpXqQ9s=
|
||||||
google.golang.org/protobuf v1.26.0-rc.1/go.mod h1:jlhhOSvTdKEhbULTjvd4ARK9grFBp09yW+WbY/TyQbw=
|
google.golang.org/protobuf v1.26.0-rc.1/go.mod h1:jlhhOSvTdKEhbULTjvd4ARK9grFBp09yW+WbY/TyQbw=
|
||||||
google.golang.org/protobuf v1.26.0/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc=
|
google.golang.org/protobuf v1.26.0/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc=
|
||||||
google.golang.org/protobuf v1.33.0 h1:uNO2rsAINq/JlFpSdYEKIZ0uKD/R9cpdv0T+yoGwGmI=
|
google.golang.org/protobuf v1.31.0 h1:g0LDEJHgrBl9N9r17Ru3sqWhkIx2NB67okBHPwC7hs8=
|
||||||
google.golang.org/protobuf v1.33.0/go.mod h1:c6P6GXX6sHbq/GpV6MGZEdwhWPcYBgnhAHhKbcUYpos=
|
google.golang.org/protobuf v1.31.0/go.mod h1:HV8QOd/L58Z+nl8r43ehVNZIU/HEI6OcFqwMG9pJV4I=
|
||||||
gopkg.in/alecthomas/kingpin.v2 v2.2.6/go.mod h1:FMv+mEhP44yOT+4EoQTLFTRgOQ1FBLkstjWtayDeSgw=
|
gopkg.in/alecthomas/kingpin.v2 v2.2.6/go.mod h1:FMv+mEhP44yOT+4EoQTLFTRgOQ1FBLkstjWtayDeSgw=
|
||||||
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||||
gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||||
|
@ -952,6 +969,7 @@ k8s.io/klog/v2 v2.100.1 h1:7WCHKK6K8fNhTqfBhISHQ97KrnJNFZMcQvKp7gP/tmg=
|
||||||
k8s.io/klog/v2 v2.100.1/go.mod h1:y1WjHnz7Dj687irZUWR/WLkLc5N1YHtjLdmgWjndZn0=
|
k8s.io/klog/v2 v2.100.1/go.mod h1:y1WjHnz7Dj687irZUWR/WLkLc5N1YHtjLdmgWjndZn0=
|
||||||
k8s.io/utils v0.0.0-20230406110748-d93618cff8a2 h1:qY1Ad8PODbnymg2pRbkyMT/ylpTrCM8P2RJ0yroCyIk=
|
k8s.io/utils v0.0.0-20230406110748-d93618cff8a2 h1:qY1Ad8PODbnymg2pRbkyMT/ylpTrCM8P2RJ0yroCyIk=
|
||||||
k8s.io/utils v0.0.0-20230406110748-d93618cff8a2/go.mod h1:OLgZIPagt7ERELqWJFomSt595RzquPNLL48iOWgYOg0=
|
k8s.io/utils v0.0.0-20230406110748-d93618cff8a2/go.mod h1:OLgZIPagt7ERELqWJFomSt595RzquPNLL48iOWgYOg0=
|
||||||
|
nullprogram.com/x/optparse v1.0.0/go.mod h1:KdyPE+Igbe0jQUrVfMqDMeJQIJZEuyV7pjYmp6pbG50=
|
||||||
rsc.io/binaryregexp v0.2.0 h1:HfqmD5MEmC0zvwBuF187nq9mdnXjXsSivRiXN7SmRkE=
|
rsc.io/binaryregexp v0.2.0 h1:HfqmD5MEmC0zvwBuF187nq9mdnXjXsSivRiXN7SmRkE=
|
||||||
rsc.io/binaryregexp v0.2.0/go.mod h1:qTv7/COck+e2FymRvadv62gMdZztPaShugOCi3I+8D8=
|
rsc.io/binaryregexp v0.2.0/go.mod h1:qTv7/COck+e2FymRvadv62gMdZztPaShugOCi3I+8D8=
|
||||||
rsc.io/pdf v0.1.1/go.mod h1:n8OzWcQ6Sp37PL01nO98y4iUCRdTGarVfzxY20ICaU4=
|
rsc.io/pdf v0.1.1/go.mod h1:n8OzWcQ6Sp37PL01nO98y4iUCRdTGarVfzxY20ICaU4=
|
||||||
|
|
|
@ -15,6 +15,4 @@ if ($version.Contains("-"))
|
||||||
Set-Location .\windows\Chocolatey\crowdsec
|
Set-Location .\windows\Chocolatey\crowdsec
|
||||||
Copy-Item ..\..\..\crowdsec_$version.msi tools\crowdsec.msi
|
Copy-Item ..\..\..\crowdsec_$version.msi tools\crowdsec.msi
|
||||||
|
|
||||||
choco pack --version $version
|
choco pack --version $version
|
||||||
|
|
||||||
Copy-Item crowdsec.$version.nupkg ..\..\..\
|
|
|
@ -1,7 +1,7 @@
|
||||||
param (
|
param (
|
||||||
$version
|
$version
|
||||||
)
|
)
|
||||||
$env:Path += ";C:\Program Files (x86)\WiX Toolset v3.14\bin"
|
$env:Path += ";C:\Program Files (x86)\WiX Toolset v3.11\bin"
|
||||||
if ($version.StartsWith("v"))
|
if ($version.StartsWith("v"))
|
||||||
{
|
{
|
||||||
$version = $version.Substring(1)
|
$version = $version.Substring(1)
|
||||||
|
|
|
@ -54,7 +54,7 @@ type DataSource interface {
|
||||||
GetMetrics() []prometheus.Collector // Returns pointers to metrics that are managed by the module
|
GetMetrics() []prometheus.Collector // Returns pointers to metrics that are managed by the module
|
||||||
GetAggregMetrics() []prometheus.Collector // Returns pointers to metrics that are managed by the module (aggregated mode, limits cardinality)
|
GetAggregMetrics() []prometheus.Collector // Returns pointers to metrics that are managed by the module (aggregated mode, limits cardinality)
|
||||||
UnmarshalConfig([]byte) error // Decode and pre-validate the YAML datasource - anything that can be checked before runtime
|
UnmarshalConfig([]byte) error // Decode and pre-validate the YAML datasource - anything that can be checked before runtime
|
||||||
Configure([]byte, *log.Entry, int) error // Complete the YAML datasource configuration and perform runtime checks.
|
Configure([]byte, *log.Entry) error // Complete the YAML datasource configuration and perform runtime checks.
|
||||||
ConfigureByDSN(string, map[string]string, *log.Entry, string) error // Configure the datasource
|
ConfigureByDSN(string, map[string]string, *log.Entry, string) error // Configure the datasource
|
||||||
GetMode() string // Get the mode (TAIL, CAT or SERVER)
|
GetMode() string // Get the mode (TAIL, CAT or SERVER)
|
||||||
GetName() string // Get the name of the module
|
GetName() string // Get the name of the module
|
||||||
|
@ -94,7 +94,7 @@ func GetDataSourceIface(dataSourceType string) DataSource {
|
||||||
// if the configuration is not valid it returns an error.
|
// if the configuration is not valid it returns an error.
|
||||||
// If the datasource can't be run (eg. journalctl not available), it still returns an error which
|
// If the datasource can't be run (eg. journalctl not available), it still returns an error which
|
||||||
// can be checked for the appropriate action.
|
// can be checked for the appropriate action.
|
||||||
func DataSourceConfigure(commonConfig configuration.DataSourceCommonCfg, metricsLevel int) (*DataSource, error) {
|
func DataSourceConfigure(commonConfig configuration.DataSourceCommonCfg) (*DataSource, error) {
|
||||||
// we dump it back to []byte, because we want to decode the yaml blob twice:
|
// we dump it back to []byte, because we want to decode the yaml blob twice:
|
||||||
// once to DataSourceCommonCfg, and then later to the dedicated type of the datasource
|
// once to DataSourceCommonCfg, and then later to the dedicated type of the datasource
|
||||||
yamlConfig, err := yaml.Marshal(commonConfig)
|
yamlConfig, err := yaml.Marshal(commonConfig)
|
||||||
|
@ -122,7 +122,7 @@ func DataSourceConfigure(commonConfig configuration.DataSourceCommonCfg, metrics
|
||||||
return nil, &DataSourceUnavailableError{Name: commonConfig.Source, Err: err}
|
return nil, &DataSourceUnavailableError{Name: commonConfig.Source, Err: err}
|
||||||
}
|
}
|
||||||
/* configure the actual datasource */
|
/* configure the actual datasource */
|
||||||
if err := dataSrc.Configure(yamlConfig, subLogger, metricsLevel); err != nil {
|
if err := dataSrc.Configure(yamlConfig, subLogger); err != nil {
|
||||||
return nil, fmt.Errorf("failed to configure datasource %s: %w", commonConfig.Source, err)
|
return nil, fmt.Errorf("failed to configure datasource %s: %w", commonConfig.Source, err)
|
||||||
|
|
||||||
}
|
}
|
||||||
|
@ -180,30 +180,10 @@ func LoadAcquisitionFromDSN(dsn string, labels map[string]string, transformExpr
|
||||||
return sources, nil
|
return sources, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func GetMetricsLevelFromPromCfg(prom *csconfig.PrometheusCfg) int {
|
|
||||||
if prom == nil {
|
|
||||||
return configuration.METRICS_FULL
|
|
||||||
|
|
||||||
}
|
|
||||||
if !prom.Enabled {
|
|
||||||
return configuration.METRICS_NONE
|
|
||||||
}
|
|
||||||
if prom.Level == configuration.CFG_METRICS_AGGREGATE {
|
|
||||||
return configuration.METRICS_AGGREGATE
|
|
||||||
}
|
|
||||||
|
|
||||||
if prom.Level == configuration.CFG_METRICS_FULL {
|
|
||||||
return configuration.METRICS_FULL
|
|
||||||
}
|
|
||||||
return configuration.METRICS_FULL
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
// LoadAcquisitionFromFile unmarshals the configuration item and checks its availability
|
// LoadAcquisitionFromFile unmarshals the configuration item and checks its availability
|
||||||
func LoadAcquisitionFromFile(config *csconfig.CrowdsecServiceCfg, prom *csconfig.PrometheusCfg) ([]DataSource, error) {
|
func LoadAcquisitionFromFile(config *csconfig.CrowdsecServiceCfg) ([]DataSource, error) {
|
||||||
var sources []DataSource
|
var sources []DataSource
|
||||||
|
|
||||||
metrics_level := GetMetricsLevelFromPromCfg(prom)
|
|
||||||
for _, acquisFile := range config.AcquisitionFiles {
|
for _, acquisFile := range config.AcquisitionFiles {
|
||||||
log.Infof("loading acquisition file : %s", acquisFile)
|
log.Infof("loading acquisition file : %s", acquisFile)
|
||||||
yamlFile, err := os.Open(acquisFile)
|
yamlFile, err := os.Open(acquisFile)
|
||||||
|
@ -245,7 +225,7 @@ func LoadAcquisitionFromFile(config *csconfig.CrowdsecServiceCfg, prom *csconfig
|
||||||
}
|
}
|
||||||
uniqueId := uuid.NewString()
|
uniqueId := uuid.NewString()
|
||||||
sub.UniqueId = uniqueId
|
sub.UniqueId = uniqueId
|
||||||
src, err := DataSourceConfigure(sub, metrics_level)
|
src, err := DataSourceConfigure(sub)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
var dserr *DataSourceUnavailableError
|
var dserr *DataSourceUnavailableError
|
||||||
if errors.As(err, &dserr) {
|
if errors.As(err, &dserr) {
|
||||||
|
|
|
@ -35,7 +35,7 @@ func (f *MockSource) UnmarshalConfig(cfg []byte) error {
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (f *MockSource) Configure(cfg []byte, logger *log.Entry, metricsLevel int) error {
|
func (f *MockSource) Configure(cfg []byte, logger *log.Entry) error {
|
||||||
f.logger = logger
|
f.logger = logger
|
||||||
if err := f.UnmarshalConfig(cfg); err != nil {
|
if err := f.UnmarshalConfig(cfg); err != nil {
|
||||||
return err
|
return err
|
||||||
|
@ -182,7 +182,7 @@ wowo: ajsajasjas
|
||||||
t.Run(tc.TestName, func(t *testing.T) {
|
t.Run(tc.TestName, func(t *testing.T) {
|
||||||
common := configuration.DataSourceCommonCfg{}
|
common := configuration.DataSourceCommonCfg{}
|
||||||
yaml.Unmarshal([]byte(tc.String), &common)
|
yaml.Unmarshal([]byte(tc.String), &common)
|
||||||
ds, err := DataSourceConfigure(common, configuration.METRICS_NONE)
|
ds, err := DataSourceConfigure(common)
|
||||||
cstest.RequireErrorContains(t, err, tc.ExpectedError)
|
cstest.RequireErrorContains(t, err, tc.ExpectedError)
|
||||||
if tc.ExpectedError != "" {
|
if tc.ExpectedError != "" {
|
||||||
return
|
return
|
||||||
|
@ -283,7 +283,7 @@ func TestLoadAcquisitionFromFile(t *testing.T) {
|
||||||
for _, tc := range tests {
|
for _, tc := range tests {
|
||||||
tc := tc
|
tc := tc
|
||||||
t.Run(tc.TestName, func(t *testing.T) {
|
t.Run(tc.TestName, func(t *testing.T) {
|
||||||
dss, err := LoadAcquisitionFromFile(&tc.Config, nil)
|
dss, err := LoadAcquisitionFromFile(&tc.Config)
|
||||||
cstest.RequireErrorContains(t, err, tc.ExpectedError)
|
cstest.RequireErrorContains(t, err, tc.ExpectedError)
|
||||||
if tc.ExpectedError != "" {
|
if tc.ExpectedError != "" {
|
||||||
return
|
return
|
||||||
|
@ -305,7 +305,7 @@ type MockCat struct {
|
||||||
logger *log.Entry
|
logger *log.Entry
|
||||||
}
|
}
|
||||||
|
|
||||||
func (f *MockCat) Configure(cfg []byte, logger *log.Entry, metricsLevel int) error {
|
func (f *MockCat) Configure(cfg []byte, logger *log.Entry) error {
|
||||||
f.logger = logger
|
f.logger = logger
|
||||||
if f.Mode == "" {
|
if f.Mode == "" {
|
||||||
f.Mode = configuration.CAT_MODE
|
f.Mode = configuration.CAT_MODE
|
||||||
|
@ -349,7 +349,7 @@ type MockTail struct {
|
||||||
logger *log.Entry
|
logger *log.Entry
|
||||||
}
|
}
|
||||||
|
|
||||||
func (f *MockTail) Configure(cfg []byte, logger *log.Entry, metricsLevel int) error {
|
func (f *MockTail) Configure(cfg []byte, logger *log.Entry) error {
|
||||||
f.logger = logger
|
f.logger = logger
|
||||||
if f.Mode == "" {
|
if f.Mode == "" {
|
||||||
f.Mode = configuration.TAIL_MODE
|
f.Mode = configuration.TAIL_MODE
|
||||||
|
@ -497,10 +497,8 @@ type MockSourceByDSN struct {
|
||||||
logger *log.Entry //nolint: unused
|
logger *log.Entry //nolint: unused
|
||||||
}
|
}
|
||||||
|
|
||||||
func (f *MockSourceByDSN) UnmarshalConfig(cfg []byte) error { return nil }
|
func (f *MockSourceByDSN) UnmarshalConfig(cfg []byte) error { return nil }
|
||||||
func (f *MockSourceByDSN) Configure(cfg []byte, logger *log.Entry, metricsLevel int) error {
|
func (f *MockSourceByDSN) Configure(cfg []byte, logger *log.Entry) error { return nil }
|
||||||
return nil
|
|
||||||
}
|
|
||||||
func (f *MockSourceByDSN) GetMode() string { return f.Mode }
|
func (f *MockSourceByDSN) GetMode() string { return f.Mode }
|
||||||
func (f *MockSourceByDSN) OneShotAcquisition(chan types.Event, *tomb.Tomb) error { return nil }
|
func (f *MockSourceByDSN) OneShotAcquisition(chan types.Event, *tomb.Tomb) error { return nil }
|
||||||
func (f *MockSourceByDSN) StreamingAcquisition(chan types.Event, *tomb.Tomb) error { return nil }
|
func (f *MockSourceByDSN) StreamingAcquisition(chan types.Event, *tomb.Tomb) error { return nil }
|
||||||
|
|
|
@ -19,14 +19,3 @@ type DataSourceCommonCfg struct {
|
||||||
var TAIL_MODE = "tail"
|
var TAIL_MODE = "tail"
|
||||||
var CAT_MODE = "cat"
|
var CAT_MODE = "cat"
|
||||||
var SERVER_MODE = "server" // No difference with tail, just a bit more verbose
|
var SERVER_MODE = "server" // No difference with tail, just a bit more verbose
|
||||||
|
|
||||||
const (
|
|
||||||
METRICS_NONE = iota
|
|
||||||
METRICS_AGGREGATE
|
|
||||||
METRICS_FULL
|
|
||||||
)
|
|
||||||
|
|
||||||
const (
|
|
||||||
CFG_METRICS_AGGREGATE = "aggregated"
|
|
||||||
CFG_METRICS_FULL = "full"
|
|
||||||
)
|
|
||||||
|
|
|
@ -49,7 +49,6 @@ type AppsecSourceConfig struct {
|
||||||
|
|
||||||
// runtime structure of AppsecSourceConfig
|
// runtime structure of AppsecSourceConfig
|
||||||
type AppsecSource struct {
|
type AppsecSource struct {
|
||||||
metricsLevel int
|
|
||||||
config AppsecSourceConfig
|
config AppsecSourceConfig
|
||||||
logger *log.Entry
|
logger *log.Entry
|
||||||
mux *http.ServeMux
|
mux *http.ServeMux
|
||||||
|
@ -150,13 +149,13 @@ func (w *AppsecSource) GetAggregMetrics() []prometheus.Collector {
|
||||||
return []prometheus.Collector{AppsecReqCounter, AppsecBlockCounter, AppsecRuleHits, AppsecOutbandParsingHistogram, AppsecInbandParsingHistogram, AppsecGlobalParsingHistogram}
|
return []prometheus.Collector{AppsecReqCounter, AppsecBlockCounter, AppsecRuleHits, AppsecOutbandParsingHistogram, AppsecInbandParsingHistogram, AppsecGlobalParsingHistogram}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (w *AppsecSource) Configure(yamlConfig []byte, logger *log.Entry, MetricsLevel int) error {
|
func (w *AppsecSource) Configure(yamlConfig []byte, logger *log.Entry) error {
|
||||||
err := w.UnmarshalConfig(yamlConfig)
|
err := w.UnmarshalConfig(yamlConfig)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return errors.Wrap(err, "unable to parse appsec configuration")
|
return errors.Wrap(err, "unable to parse appsec configuration")
|
||||||
}
|
}
|
||||||
w.logger = logger
|
w.logger = logger
|
||||||
w.metricsLevel = MetricsLevel
|
|
||||||
w.logger.Tracef("Appsec configuration: %+v", w.config)
|
w.logger.Tracef("Appsec configuration: %+v", w.config)
|
||||||
|
|
||||||
if w.config.AuthCacheDuration == nil {
|
if w.config.AuthCacheDuration == nil {
|
||||||
|
|
|
@ -1,714 +0,0 @@
|
||||||
package appsecacquisition
|
|
||||||
|
|
||||||
import (
|
|
||||||
"net/http"
|
|
||||||
"net/url"
|
|
||||||
"testing"
|
|
||||||
|
|
||||||
"github.com/crowdsecurity/crowdsec/pkg/appsec"
|
|
||||||
"github.com/crowdsecurity/crowdsec/pkg/appsec/appsec_rule"
|
|
||||||
"github.com/crowdsecurity/crowdsec/pkg/types"
|
|
||||||
"github.com/davecgh/go-spew/spew"
|
|
||||||
log "github.com/sirupsen/logrus"
|
|
||||||
"github.com/stretchr/testify/require"
|
|
||||||
)
|
|
||||||
|
|
||||||
func TestAppsecOnMatchHooks(t *testing.T) {
|
|
||||||
tests := []appsecRuleTest{
|
|
||||||
{
|
|
||||||
name: "no rule : check return code",
|
|
||||||
expected_load_ok: true,
|
|
||||||
inband_rules: []appsec_rule.CustomRule{
|
|
||||||
{
|
|
||||||
Name: "rule1",
|
|
||||||
Zones: []string{"ARGS"},
|
|
||||||
Variables: []string{"foo"},
|
|
||||||
Match: appsec_rule.Match{Type: "regex", Value: "^toto"},
|
|
||||||
Transform: []string{"lowercase"},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
input_request: appsec.ParsedRequest{
|
|
||||||
RemoteAddr: "1.2.3.4",
|
|
||||||
Method: "GET",
|
|
||||||
URI: "/urllll",
|
|
||||||
Args: url.Values{"foo": []string{"toto"}},
|
|
||||||
},
|
|
||||||
output_asserts: func(events []types.Event, responses []appsec.AppsecTempResponse, appsecResponse appsec.BodyResponse, statusCode int) {
|
|
||||||
require.Len(t, events, 2)
|
|
||||||
require.Equal(t, types.APPSEC, events[0].Type)
|
|
||||||
require.Equal(t, types.LOG, events[1].Type)
|
|
||||||
require.Len(t, responses, 1)
|
|
||||||
require.Equal(t, 403, responses[0].BouncerHTTPResponseCode)
|
|
||||||
require.Equal(t, 403, responses[0].UserHTTPResponseCode)
|
|
||||||
require.Equal(t, appsec.BanRemediation, responses[0].Action)
|
|
||||||
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "on_match: change return code",
|
|
||||||
expected_load_ok: true,
|
|
||||||
inband_rules: []appsec_rule.CustomRule{
|
|
||||||
{
|
|
||||||
Name: "rule1",
|
|
||||||
Zones: []string{"ARGS"},
|
|
||||||
Variables: []string{"foo"},
|
|
||||||
Match: appsec_rule.Match{Type: "regex", Value: "^toto"},
|
|
||||||
Transform: []string{"lowercase"},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
on_match: []appsec.Hook{
|
|
||||||
{Filter: "IsInBand == true", Apply: []string{"SetReturnCode(413)"}},
|
|
||||||
},
|
|
||||||
input_request: appsec.ParsedRequest{
|
|
||||||
RemoteAddr: "1.2.3.4",
|
|
||||||
Method: "GET",
|
|
||||||
URI: "/urllll",
|
|
||||||
Args: url.Values{"foo": []string{"toto"}},
|
|
||||||
},
|
|
||||||
output_asserts: func(events []types.Event, responses []appsec.AppsecTempResponse, appsecResponse appsec.BodyResponse, statusCode int) {
|
|
||||||
require.Len(t, events, 2)
|
|
||||||
require.Equal(t, types.APPSEC, events[0].Type)
|
|
||||||
require.Equal(t, types.LOG, events[1].Type)
|
|
||||||
require.Len(t, responses, 1)
|
|
||||||
require.Equal(t, 403, responses[0].BouncerHTTPResponseCode)
|
|
||||||
require.Equal(t, 413, responses[0].UserHTTPResponseCode)
|
|
||||||
require.Equal(t, appsec.BanRemediation, responses[0].Action)
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "on_match: change action to a non standard one (log)",
|
|
||||||
expected_load_ok: true,
|
|
||||||
inband_rules: []appsec_rule.CustomRule{
|
|
||||||
{
|
|
||||||
Name: "rule1",
|
|
||||||
Zones: []string{"ARGS"},
|
|
||||||
Variables: []string{"foo"},
|
|
||||||
Match: appsec_rule.Match{Type: "regex", Value: "^toto"},
|
|
||||||
Transform: []string{"lowercase"},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
on_match: []appsec.Hook{
|
|
||||||
{Filter: "IsInBand == true", Apply: []string{"SetRemediation('log')"}},
|
|
||||||
},
|
|
||||||
input_request: appsec.ParsedRequest{
|
|
||||||
RemoteAddr: "1.2.3.4",
|
|
||||||
Method: "GET",
|
|
||||||
URI: "/urllll",
|
|
||||||
Args: url.Values{"foo": []string{"toto"}},
|
|
||||||
},
|
|
||||||
output_asserts: func(events []types.Event, responses []appsec.AppsecTempResponse, appsecResponse appsec.BodyResponse, statusCode int) {
|
|
||||||
require.Len(t, events, 2)
|
|
||||||
require.Equal(t, types.APPSEC, events[0].Type)
|
|
||||||
require.Equal(t, types.LOG, events[1].Type)
|
|
||||||
require.Len(t, responses, 1)
|
|
||||||
require.Equal(t, "log", responses[0].Action)
|
|
||||||
require.Equal(t, 403, responses[0].BouncerHTTPResponseCode)
|
|
||||||
require.Equal(t, 403, responses[0].UserHTTPResponseCode)
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "on_match: change action to another standard one (allow)",
|
|
||||||
expected_load_ok: true,
|
|
||||||
inband_rules: []appsec_rule.CustomRule{
|
|
||||||
{
|
|
||||||
Name: "rule1",
|
|
||||||
Zones: []string{"ARGS"},
|
|
||||||
Variables: []string{"foo"},
|
|
||||||
Match: appsec_rule.Match{Type: "regex", Value: "^toto"},
|
|
||||||
Transform: []string{"lowercase"},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
on_match: []appsec.Hook{
|
|
||||||
{Filter: "IsInBand == true", Apply: []string{"SetRemediation('allow')"}},
|
|
||||||
},
|
|
||||||
input_request: appsec.ParsedRequest{
|
|
||||||
RemoteAddr: "1.2.3.4",
|
|
||||||
Method: "GET",
|
|
||||||
URI: "/urllll",
|
|
||||||
Args: url.Values{"foo": []string{"toto"}},
|
|
||||||
},
|
|
||||||
output_asserts: func(events []types.Event, responses []appsec.AppsecTempResponse, appsecResponse appsec.BodyResponse, statusCode int) {
|
|
||||||
require.Len(t, events, 2)
|
|
||||||
require.Equal(t, types.APPSEC, events[0].Type)
|
|
||||||
require.Equal(t, types.LOG, events[1].Type)
|
|
||||||
require.Len(t, responses, 1)
|
|
||||||
require.Equal(t, appsec.AllowRemediation, responses[0].Action)
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "on_match: change action to another standard one (ban)",
|
|
||||||
expected_load_ok: true,
|
|
||||||
inband_rules: []appsec_rule.CustomRule{
|
|
||||||
{
|
|
||||||
Name: "rule1",
|
|
||||||
Zones: []string{"ARGS"},
|
|
||||||
Variables: []string{"foo"},
|
|
||||||
Match: appsec_rule.Match{Type: "regex", Value: "^toto"},
|
|
||||||
Transform: []string{"lowercase"},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
on_match: []appsec.Hook{
|
|
||||||
{Filter: "IsInBand == true", Apply: []string{"SetRemediation('ban')"}},
|
|
||||||
},
|
|
||||||
input_request: appsec.ParsedRequest{
|
|
||||||
RemoteAddr: "1.2.3.4",
|
|
||||||
Method: "GET",
|
|
||||||
URI: "/urllll",
|
|
||||||
Args: url.Values{"foo": []string{"toto"}},
|
|
||||||
},
|
|
||||||
output_asserts: func(events []types.Event, responses []appsec.AppsecTempResponse, appsecResponse appsec.BodyResponse, statusCode int) {
|
|
||||||
require.Len(t, responses, 1)
|
|
||||||
//note: SetAction normalizes deny, ban and block to ban
|
|
||||||
require.Equal(t, appsec.BanRemediation, responses[0].Action)
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "on_match: change action to another standard one (captcha)",
|
|
||||||
expected_load_ok: true,
|
|
||||||
inband_rules: []appsec_rule.CustomRule{
|
|
||||||
{
|
|
||||||
Name: "rule1",
|
|
||||||
Zones: []string{"ARGS"},
|
|
||||||
Variables: []string{"foo"},
|
|
||||||
Match: appsec_rule.Match{Type: "regex", Value: "^toto"},
|
|
||||||
Transform: []string{"lowercase"},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
on_match: []appsec.Hook{
|
|
||||||
{Filter: "IsInBand == true", Apply: []string{"SetRemediation('captcha')"}},
|
|
||||||
},
|
|
||||||
input_request: appsec.ParsedRequest{
|
|
||||||
RemoteAddr: "1.2.3.4",
|
|
||||||
Method: "GET",
|
|
||||||
URI: "/urllll",
|
|
||||||
Args: url.Values{"foo": []string{"toto"}},
|
|
||||||
},
|
|
||||||
output_asserts: func(events []types.Event, responses []appsec.AppsecTempResponse, appsecResponse appsec.BodyResponse, statusCode int) {
|
|
||||||
require.Len(t, responses, 1)
|
|
||||||
//note: SetAction normalizes deny, ban and block to ban
|
|
||||||
require.Equal(t, appsec.CaptchaRemediation, responses[0].Action)
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "on_match: change action to a non standard one",
|
|
||||||
expected_load_ok: true,
|
|
||||||
inband_rules: []appsec_rule.CustomRule{
|
|
||||||
{
|
|
||||||
Name: "rule1",
|
|
||||||
Zones: []string{"ARGS"},
|
|
||||||
Variables: []string{"foo"},
|
|
||||||
Match: appsec_rule.Match{Type: "regex", Value: "^toto"},
|
|
||||||
Transform: []string{"lowercase"},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
on_match: []appsec.Hook{
|
|
||||||
{Filter: "IsInBand == true", Apply: []string{"SetRemediation('foobar')"}},
|
|
||||||
},
|
|
||||||
input_request: appsec.ParsedRequest{
|
|
||||||
RemoteAddr: "1.2.3.4",
|
|
||||||
Method: "GET",
|
|
||||||
URI: "/urllll",
|
|
||||||
Args: url.Values{"foo": []string{"toto"}},
|
|
||||||
},
|
|
||||||
output_asserts: func(events []types.Event, responses []appsec.AppsecTempResponse, appsecResponse appsec.BodyResponse, statusCode int) {
|
|
||||||
require.Len(t, events, 2)
|
|
||||||
require.Equal(t, types.APPSEC, events[0].Type)
|
|
||||||
require.Equal(t, types.LOG, events[1].Type)
|
|
||||||
require.Len(t, responses, 1)
|
|
||||||
require.Equal(t, "foobar", responses[0].Action)
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "on_match: cancel alert",
|
|
||||||
expected_load_ok: true,
|
|
||||||
inband_rules: []appsec_rule.CustomRule{
|
|
||||||
{
|
|
||||||
Name: "rule42",
|
|
||||||
Zones: []string{"ARGS"},
|
|
||||||
Variables: []string{"foo"},
|
|
||||||
Match: appsec_rule.Match{Type: "regex", Value: "^toto"},
|
|
||||||
Transform: []string{"lowercase"},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
on_match: []appsec.Hook{
|
|
||||||
{Filter: "IsInBand == true && LogInfo('XX -> %s', evt.Appsec.MatchedRules.GetName())", Apply: []string{"CancelAlert()"}},
|
|
||||||
},
|
|
||||||
input_request: appsec.ParsedRequest{
|
|
||||||
RemoteAddr: "1.2.3.4",
|
|
||||||
Method: "GET",
|
|
||||||
URI: "/urllll",
|
|
||||||
Args: url.Values{"foo": []string{"toto"}},
|
|
||||||
},
|
|
||||||
output_asserts: func(events []types.Event, responses []appsec.AppsecTempResponse, appsecResponse appsec.BodyResponse, statusCode int) {
|
|
||||||
require.Len(t, events, 1)
|
|
||||||
require.Equal(t, types.LOG, events[0].Type)
|
|
||||||
require.Len(t, responses, 1)
|
|
||||||
require.Equal(t, appsec.BanRemediation, responses[0].Action)
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "on_match: cancel event",
|
|
||||||
expected_load_ok: true,
|
|
||||||
inband_rules: []appsec_rule.CustomRule{
|
|
||||||
{
|
|
||||||
Name: "rule42",
|
|
||||||
Zones: []string{"ARGS"},
|
|
||||||
Variables: []string{"foo"},
|
|
||||||
Match: appsec_rule.Match{Type: "regex", Value: "^toto"},
|
|
||||||
Transform: []string{"lowercase"},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
on_match: []appsec.Hook{
|
|
||||||
{Filter: "IsInBand == true", Apply: []string{"CancelEvent()"}},
|
|
||||||
},
|
|
||||||
input_request: appsec.ParsedRequest{
|
|
||||||
RemoteAddr: "1.2.3.4",
|
|
||||||
Method: "GET",
|
|
||||||
URI: "/urllll",
|
|
||||||
Args: url.Values{"foo": []string{"toto"}},
|
|
||||||
},
|
|
||||||
output_asserts: func(events []types.Event, responses []appsec.AppsecTempResponse, appsecResponse appsec.BodyResponse, statusCode int) {
|
|
||||||
require.Len(t, events, 1)
|
|
||||||
require.Equal(t, types.APPSEC, events[0].Type)
|
|
||||||
require.Len(t, responses, 1)
|
|
||||||
require.Equal(t, appsec.BanRemediation, responses[0].Action)
|
|
||||||
},
|
|
||||||
},
|
|
||||||
}
|
|
||||||
for _, test := range tests {
|
|
||||||
t.Run(test.name, func(t *testing.T) {
|
|
||||||
loadAppSecEngine(test, t)
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestAppsecPreEvalHooks(t *testing.T) {
|
|
||||||
|
|
||||||
tests := []appsecRuleTest{
|
|
||||||
{
|
|
||||||
name: "Basic on_load hook to disable inband rule",
|
|
||||||
expected_load_ok: true,
|
|
||||||
inband_rules: []appsec_rule.CustomRule{
|
|
||||||
{
|
|
||||||
Name: "rule1",
|
|
||||||
Zones: []string{"ARGS"},
|
|
||||||
Variables: []string{"foo"},
|
|
||||||
Match: appsec_rule.Match{Type: "regex", Value: "^toto"},
|
|
||||||
Transform: []string{"lowercase"},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
pre_eval: []appsec.Hook{
|
|
||||||
{Filter: "1 == 1", Apply: []string{"RemoveInBandRuleByName('rule1')"}},
|
|
||||||
},
|
|
||||||
input_request: appsec.ParsedRequest{
|
|
||||||
RemoteAddr: "1.2.3.4",
|
|
||||||
Method: "GET",
|
|
||||||
URI: "/urllll",
|
|
||||||
Args: url.Values{"foo": []string{"toto"}},
|
|
||||||
},
|
|
||||||
output_asserts: func(events []types.Event, responses []appsec.AppsecTempResponse, appsecResponse appsec.BodyResponse, statusCode int) {
|
|
||||||
require.Empty(t, events)
|
|
||||||
require.Len(t, responses, 1)
|
|
||||||
require.False(t, responses[0].InBandInterrupt)
|
|
||||||
require.False(t, responses[0].OutOfBandInterrupt)
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "Basic on_load fails to disable rule",
|
|
||||||
expected_load_ok: true,
|
|
||||||
inband_rules: []appsec_rule.CustomRule{
|
|
||||||
{
|
|
||||||
Name: "rule1",
|
|
||||||
Zones: []string{"ARGS"},
|
|
||||||
Variables: []string{"foo"},
|
|
||||||
Match: appsec_rule.Match{Type: "regex", Value: "^toto"},
|
|
||||||
Transform: []string{"lowercase"},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
pre_eval: []appsec.Hook{
|
|
||||||
{Filter: "1 ==2", Apply: []string{"RemoveInBandRuleByName('rule1')"}},
|
|
||||||
},
|
|
||||||
input_request: appsec.ParsedRequest{
|
|
||||||
RemoteAddr: "1.2.3.4",
|
|
||||||
Method: "GET",
|
|
||||||
URI: "/urllll",
|
|
||||||
Args: url.Values{"foo": []string{"toto"}},
|
|
||||||
},
|
|
||||||
output_asserts: func(events []types.Event, responses []appsec.AppsecTempResponse, appsecResponse appsec.BodyResponse, statusCode int) {
|
|
||||||
require.Len(t, events, 2)
|
|
||||||
require.Equal(t, types.APPSEC, events[0].Type)
|
|
||||||
|
|
||||||
require.Equal(t, types.LOG, events[1].Type)
|
|
||||||
require.True(t, events[1].Appsec.HasInBandMatches)
|
|
||||||
require.Len(t, events[1].Appsec.MatchedRules, 1)
|
|
||||||
require.Equal(t, "rule1", events[1].Appsec.MatchedRules[0]["msg"])
|
|
||||||
|
|
||||||
require.Len(t, responses, 1)
|
|
||||||
require.True(t, responses[0].InBandInterrupt)
|
|
||||||
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "on_load : disable inband by tag",
|
|
||||||
expected_load_ok: true,
|
|
||||||
inband_rules: []appsec_rule.CustomRule{
|
|
||||||
{
|
|
||||||
Name: "rulez",
|
|
||||||
Zones: []string{"ARGS"},
|
|
||||||
Variables: []string{"foo"},
|
|
||||||
Match: appsec_rule.Match{Type: "regex", Value: "^toto"},
|
|
||||||
Transform: []string{"lowercase"},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
pre_eval: []appsec.Hook{
|
|
||||||
{Apply: []string{"RemoveInBandRuleByTag('crowdsec-rulez')"}},
|
|
||||||
},
|
|
||||||
input_request: appsec.ParsedRequest{
|
|
||||||
RemoteAddr: "1.2.3.4",
|
|
||||||
Method: "GET",
|
|
||||||
URI: "/urllll",
|
|
||||||
Args: url.Values{"foo": []string{"toto"}},
|
|
||||||
},
|
|
||||||
output_asserts: func(events []types.Event, responses []appsec.AppsecTempResponse, appsecResponse appsec.BodyResponse, statusCode int) {
|
|
||||||
require.Empty(t, events)
|
|
||||||
require.Len(t, responses, 1)
|
|
||||||
require.False(t, responses[0].InBandInterrupt)
|
|
||||||
require.False(t, responses[0].OutOfBandInterrupt)
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "on_load : disable inband by ID",
|
|
||||||
expected_load_ok: true,
|
|
||||||
inband_rules: []appsec_rule.CustomRule{
|
|
||||||
{
|
|
||||||
Name: "rulez",
|
|
||||||
Zones: []string{"ARGS"},
|
|
||||||
Variables: []string{"foo"},
|
|
||||||
Match: appsec_rule.Match{Type: "regex", Value: "^toto"},
|
|
||||||
Transform: []string{"lowercase"},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
pre_eval: []appsec.Hook{
|
|
||||||
{Apply: []string{"RemoveInBandRuleByID(1516470898)"}}, //rule ID is generated at runtime. If you change rule, it will break the test (:
|
|
||||||
},
|
|
||||||
input_request: appsec.ParsedRequest{
|
|
||||||
RemoteAddr: "1.2.3.4",
|
|
||||||
Method: "GET",
|
|
||||||
URI: "/urllll",
|
|
||||||
Args: url.Values{"foo": []string{"toto"}},
|
|
||||||
},
|
|
||||||
output_asserts: func(events []types.Event, responses []appsec.AppsecTempResponse, appsecResponse appsec.BodyResponse, statusCode int) {
|
|
||||||
require.Empty(t, events)
|
|
||||||
require.Len(t, responses, 1)
|
|
||||||
require.False(t, responses[0].InBandInterrupt)
|
|
||||||
require.False(t, responses[0].OutOfBandInterrupt)
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "on_load : disable inband by name",
|
|
||||||
expected_load_ok: true,
|
|
||||||
inband_rules: []appsec_rule.CustomRule{
|
|
||||||
{
|
|
||||||
Name: "rulez",
|
|
||||||
Zones: []string{"ARGS"},
|
|
||||||
Variables: []string{"foo"},
|
|
||||||
Match: appsec_rule.Match{Type: "regex", Value: "^toto"},
|
|
||||||
Transform: []string{"lowercase"},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
pre_eval: []appsec.Hook{
|
|
||||||
{Apply: []string{"RemoveInBandRuleByName('rulez')"}},
|
|
||||||
},
|
|
||||||
input_request: appsec.ParsedRequest{
|
|
||||||
RemoteAddr: "1.2.3.4",
|
|
||||||
Method: "GET",
|
|
||||||
URI: "/urllll",
|
|
||||||
Args: url.Values{"foo": []string{"toto"}},
|
|
||||||
},
|
|
||||||
output_asserts: func(events []types.Event, responses []appsec.AppsecTempResponse, appsecResponse appsec.BodyResponse, statusCode int) {
|
|
||||||
require.Empty(t, events)
|
|
||||||
require.Len(t, responses, 1)
|
|
||||||
require.False(t, responses[0].InBandInterrupt)
|
|
||||||
require.False(t, responses[0].OutOfBandInterrupt)
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "on_load : outofband default behavior",
|
|
||||||
expected_load_ok: true,
|
|
||||||
outofband_rules: []appsec_rule.CustomRule{
|
|
||||||
{
|
|
||||||
Name: "rulez",
|
|
||||||
Zones: []string{"ARGS"},
|
|
||||||
Variables: []string{"foo"},
|
|
||||||
Match: appsec_rule.Match{Type: "regex", Value: "^toto"},
|
|
||||||
Transform: []string{"lowercase"},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
input_request: appsec.ParsedRequest{
|
|
||||||
RemoteAddr: "1.2.3.4",
|
|
||||||
Method: "GET",
|
|
||||||
URI: "/urllll",
|
|
||||||
Args: url.Values{"foo": []string{"toto"}},
|
|
||||||
},
|
|
||||||
output_asserts: func(events []types.Event, responses []appsec.AppsecTempResponse, appsecResponse appsec.BodyResponse, statusCode int) {
|
|
||||||
require.Len(t, events, 1)
|
|
||||||
require.Equal(t, types.LOG, events[0].Type)
|
|
||||||
require.True(t, events[0].Appsec.HasOutBandMatches)
|
|
||||||
require.False(t, events[0].Appsec.HasInBandMatches)
|
|
||||||
require.Len(t, events[0].Appsec.MatchedRules, 1)
|
|
||||||
require.Equal(t, "rulez", events[0].Appsec.MatchedRules[0]["msg"])
|
|
||||||
//maybe surprising, but response won't mention OOB event, as it's sent as soon as the inband phase is over.
|
|
||||||
require.Len(t, responses, 1)
|
|
||||||
require.False(t, responses[0].InBandInterrupt)
|
|
||||||
require.False(t, responses[0].OutOfBandInterrupt)
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "on_load : set remediation by tag",
|
|
||||||
expected_load_ok: true,
|
|
||||||
inband_rules: []appsec_rule.CustomRule{
|
|
||||||
{
|
|
||||||
Name: "rulez",
|
|
||||||
Zones: []string{"ARGS"},
|
|
||||||
Variables: []string{"foo"},
|
|
||||||
Match: appsec_rule.Match{Type: "regex", Value: "^toto"},
|
|
||||||
Transform: []string{"lowercase"},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
pre_eval: []appsec.Hook{
|
|
||||||
{Apply: []string{"SetRemediationByTag('crowdsec-rulez', 'foobar')"}}, //rule ID is generated at runtime. If you change rule, it will break the test (:
|
|
||||||
},
|
|
||||||
input_request: appsec.ParsedRequest{
|
|
||||||
RemoteAddr: "1.2.3.4",
|
|
||||||
Method: "GET",
|
|
||||||
URI: "/urllll",
|
|
||||||
Args: url.Values{"foo": []string{"toto"}},
|
|
||||||
},
|
|
||||||
output_asserts: func(events []types.Event, responses []appsec.AppsecTempResponse, appsecResponse appsec.BodyResponse, statusCode int) {
|
|
||||||
require.Len(t, events, 2)
|
|
||||||
require.Len(t, responses, 1)
|
|
||||||
require.Equal(t, "foobar", responses[0].Action)
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "on_load : set remediation by name",
|
|
||||||
expected_load_ok: true,
|
|
||||||
inband_rules: []appsec_rule.CustomRule{
|
|
||||||
{
|
|
||||||
Name: "rulez",
|
|
||||||
Zones: []string{"ARGS"},
|
|
||||||
Variables: []string{"foo"},
|
|
||||||
Match: appsec_rule.Match{Type: "regex", Value: "^toto"},
|
|
||||||
Transform: []string{"lowercase"},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
pre_eval: []appsec.Hook{
|
|
||||||
{Apply: []string{"SetRemediationByName('rulez', 'foobar')"}}, //rule ID is generated at runtime. If you change rule, it will break the test (:
|
|
||||||
},
|
|
||||||
input_request: appsec.ParsedRequest{
|
|
||||||
RemoteAddr: "1.2.3.4",
|
|
||||||
Method: "GET",
|
|
||||||
URI: "/urllll",
|
|
||||||
Args: url.Values{"foo": []string{"toto"}},
|
|
||||||
},
|
|
||||||
output_asserts: func(events []types.Event, responses []appsec.AppsecTempResponse, appsecResponse appsec.BodyResponse, statusCode int) {
|
|
||||||
require.Len(t, events, 2)
|
|
||||||
require.Len(t, responses, 1)
|
|
||||||
require.Equal(t, "foobar", responses[0].Action)
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "on_load : set remediation by ID",
|
|
||||||
expected_load_ok: true,
|
|
||||||
inband_rules: []appsec_rule.CustomRule{
|
|
||||||
{
|
|
||||||
Name: "rulez",
|
|
||||||
Zones: []string{"ARGS"},
|
|
||||||
Variables: []string{"foo"},
|
|
||||||
Match: appsec_rule.Match{Type: "regex", Value: "^toto"},
|
|
||||||
Transform: []string{"lowercase"},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
pre_eval: []appsec.Hook{
|
|
||||||
{Apply: []string{"SetRemediationByID(1516470898, 'foobar')"}}, //rule ID is generated at runtime. If you change rule, it will break the test (:
|
|
||||||
},
|
|
||||||
input_request: appsec.ParsedRequest{
|
|
||||||
RemoteAddr: "1.2.3.4",
|
|
||||||
Method: "GET",
|
|
||||||
URI: "/urllll",
|
|
||||||
Args: url.Values{"foo": []string{"toto"}},
|
|
||||||
},
|
|
||||||
output_asserts: func(events []types.Event, responses []appsec.AppsecTempResponse, appsecResponse appsec.BodyResponse, statusCode int) {
|
|
||||||
require.Len(t, events, 2)
|
|
||||||
require.Len(t, responses, 1)
|
|
||||||
require.Equal(t, "foobar", responses[0].Action)
|
|
||||||
require.Equal(t, "foobar", appsecResponse.Action)
|
|
||||||
require.Equal(t, http.StatusForbidden, appsecResponse.HTTPStatus)
|
|
||||||
},
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
for _, test := range tests {
|
|
||||||
t.Run(test.name, func(t *testing.T) {
|
|
||||||
loadAppSecEngine(test, t)
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestAppsecRemediationConfigHooks(t *testing.T) {
|
|
||||||
|
|
||||||
tests := []appsecRuleTest{
|
|
||||||
{
|
|
||||||
name: "Basic matching rule",
|
|
||||||
expected_load_ok: true,
|
|
||||||
inband_rules: []appsec_rule.CustomRule{
|
|
||||||
{
|
|
||||||
Name: "rule1",
|
|
||||||
Zones: []string{"ARGS"},
|
|
||||||
Variables: []string{"foo"},
|
|
||||||
Match: appsec_rule.Match{Type: "regex", Value: "^toto"},
|
|
||||||
Transform: []string{"lowercase"},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
input_request: appsec.ParsedRequest{
|
|
||||||
RemoteAddr: "1.2.3.4",
|
|
||||||
Method: "GET",
|
|
||||||
URI: "/urllll",
|
|
||||||
Args: url.Values{"foo": []string{"toto"}},
|
|
||||||
},
|
|
||||||
output_asserts: func(events []types.Event, responses []appsec.AppsecTempResponse, appsecResponse appsec.BodyResponse, statusCode int) {
|
|
||||||
require.Equal(t, appsec.BanRemediation, responses[0].Action)
|
|
||||||
require.Equal(t, http.StatusForbidden, statusCode)
|
|
||||||
require.Equal(t, appsec.BanRemediation, appsecResponse.Action)
|
|
||||||
require.Equal(t, http.StatusForbidden, appsecResponse.HTTPStatus)
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "SetRemediation",
|
|
||||||
expected_load_ok: true,
|
|
||||||
inband_rules: []appsec_rule.CustomRule{
|
|
||||||
{
|
|
||||||
Name: "rule1",
|
|
||||||
Zones: []string{"ARGS"},
|
|
||||||
Variables: []string{"foo"},
|
|
||||||
Match: appsec_rule.Match{Type: "regex", Value: "^toto"},
|
|
||||||
Transform: []string{"lowercase"},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
input_request: appsec.ParsedRequest{
|
|
||||||
RemoteAddr: "1.2.3.4",
|
|
||||||
Method: "GET",
|
|
||||||
URI: "/urllll",
|
|
||||||
Args: url.Values{"foo": []string{"toto"}},
|
|
||||||
},
|
|
||||||
on_match: []appsec.Hook{{Apply: []string{"SetRemediation('captcha')"}}}, //rule ID is generated at runtime. If you change rule, it will break the test (:
|
|
||||||
|
|
||||||
output_asserts: func(events []types.Event, responses []appsec.AppsecTempResponse, appsecResponse appsec.BodyResponse, statusCode int) {
|
|
||||||
require.Equal(t, appsec.CaptchaRemediation, responses[0].Action)
|
|
||||||
require.Equal(t, http.StatusForbidden, statusCode)
|
|
||||||
require.Equal(t, appsec.CaptchaRemediation, appsecResponse.Action)
|
|
||||||
require.Equal(t, http.StatusForbidden, appsecResponse.HTTPStatus)
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "SetRemediation",
|
|
||||||
expected_load_ok: true,
|
|
||||||
inband_rules: []appsec_rule.CustomRule{
|
|
||||||
{
|
|
||||||
Name: "rule1",
|
|
||||||
Zones: []string{"ARGS"},
|
|
||||||
Variables: []string{"foo"},
|
|
||||||
Match: appsec_rule.Match{Type: "regex", Value: "^toto"},
|
|
||||||
Transform: []string{"lowercase"},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
input_request: appsec.ParsedRequest{
|
|
||||||
RemoteAddr: "1.2.3.4",
|
|
||||||
Method: "GET",
|
|
||||||
URI: "/urllll",
|
|
||||||
Args: url.Values{"foo": []string{"toto"}},
|
|
||||||
},
|
|
||||||
on_match: []appsec.Hook{{Apply: []string{"SetReturnCode(418)"}}}, //rule ID is generated at runtime. If you change rule, it will break the test (:
|
|
||||||
|
|
||||||
output_asserts: func(events []types.Event, responses []appsec.AppsecTempResponse, appsecResponse appsec.BodyResponse, statusCode int) {
|
|
||||||
require.Equal(t, appsec.BanRemediation, responses[0].Action)
|
|
||||||
require.Equal(t, http.StatusForbidden, statusCode)
|
|
||||||
require.Equal(t, appsec.BanRemediation, appsecResponse.Action)
|
|
||||||
require.Equal(t, http.StatusTeapot, appsecResponse.HTTPStatus)
|
|
||||||
},
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
for _, test := range tests {
|
|
||||||
t.Run(test.name, func(t *testing.T) {
|
|
||||||
loadAppSecEngine(test, t)
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
func TestOnMatchRemediationHooks(t *testing.T) {
|
|
||||||
tests := []appsecRuleTest{
|
|
||||||
{
|
|
||||||
name: "set remediation to allow with on_match hook",
|
|
||||||
expected_load_ok: true,
|
|
||||||
inband_rules: []appsec_rule.CustomRule{
|
|
||||||
{
|
|
||||||
Name: "rule42",
|
|
||||||
Zones: []string{"ARGS"},
|
|
||||||
Variables: []string{"foo"},
|
|
||||||
Match: appsec_rule.Match{Type: "regex", Value: "^toto"},
|
|
||||||
Transform: []string{"lowercase"},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
input_request: appsec.ParsedRequest{
|
|
||||||
RemoteAddr: "1.2.3.4",
|
|
||||||
Method: "GET",
|
|
||||||
URI: "/urllll",
|
|
||||||
Args: url.Values{"foo": []string{"toto"}},
|
|
||||||
},
|
|
||||||
on_match: []appsec.Hook{
|
|
||||||
{Filter: "IsInBand == true", Apply: []string{"SetRemediation('allow')"}},
|
|
||||||
},
|
|
||||||
output_asserts: func(events []types.Event, responses []appsec.AppsecTempResponse, appsecResponse appsec.BodyResponse, statusCode int) {
|
|
||||||
require.Equal(t, appsec.AllowRemediation, appsecResponse.Action)
|
|
||||||
require.Equal(t, http.StatusOK, appsecResponse.HTTPStatus)
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "set remediation to captcha + custom user code with on_match hook",
|
|
||||||
expected_load_ok: true,
|
|
||||||
inband_rules: []appsec_rule.CustomRule{
|
|
||||||
{
|
|
||||||
Name: "rule42",
|
|
||||||
Zones: []string{"ARGS"},
|
|
||||||
Variables: []string{"foo"},
|
|
||||||
Match: appsec_rule.Match{Type: "regex", Value: "^toto"},
|
|
||||||
Transform: []string{"lowercase"},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
input_request: appsec.ParsedRequest{
|
|
||||||
RemoteAddr: "1.2.3.4",
|
|
||||||
Method: "GET",
|
|
||||||
URI: "/urllll",
|
|
||||||
Args: url.Values{"foo": []string{"toto"}},
|
|
||||||
},
|
|
||||||
DefaultRemediation: appsec.AllowRemediation,
|
|
||||||
on_match: []appsec.Hook{
|
|
||||||
{Filter: "IsInBand == true", Apply: []string{"SetRemediation('captcha')", "SetReturnCode(418)"}},
|
|
||||||
},
|
|
||||||
output_asserts: func(events []types.Event, responses []appsec.AppsecTempResponse, appsecResponse appsec.BodyResponse, statusCode int) {
|
|
||||||
spew.Dump(responses)
|
|
||||||
spew.Dump(appsecResponse)
|
|
||||||
|
|
||||||
log.Errorf("http status : %d", statusCode)
|
|
||||||
require.Equal(t, appsec.CaptchaRemediation, appsecResponse.Action)
|
|
||||||
require.Equal(t, http.StatusTeapot, appsecResponse.HTTPStatus)
|
|
||||||
require.Equal(t, http.StatusForbidden, statusCode)
|
|
||||||
},
|
|
||||||
},
|
|
||||||
}
|
|
||||||
for _, test := range tests {
|
|
||||||
t.Run(test.name, func(t *testing.T) {
|
|
||||||
loadAppSecEngine(test, t)
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,74 +0,0 @@
|
||||||
//go:build !windows
|
|
||||||
// +build !windows
|
|
||||||
|
|
||||||
package appsecacquisition
|
|
||||||
|
|
||||||
import (
|
|
||||||
"testing"
|
|
||||||
|
|
||||||
"github.com/crowdsecurity/crowdsec/pkg/appsec"
|
|
||||||
"github.com/crowdsecurity/crowdsec/pkg/appsec/appsec_rule"
|
|
||||||
"github.com/crowdsecurity/crowdsec/pkg/types"
|
|
||||||
log "github.com/sirupsen/logrus"
|
|
||||||
"github.com/stretchr/testify/require"
|
|
||||||
)
|
|
||||||
|
|
||||||
func TestAppsecRuleTransformsOthers(t *testing.T) {
|
|
||||||
|
|
||||||
log.SetLevel(log.TraceLevel)
|
|
||||||
tests := []appsecRuleTest{
|
|
||||||
{
|
|
||||||
name: "normalizepath",
|
|
||||||
expected_load_ok: true,
|
|
||||||
inband_rules: []appsec_rule.CustomRule{
|
|
||||||
{
|
|
||||||
Name: "rule1",
|
|
||||||
Zones: []string{"ARGS"},
|
|
||||||
Variables: []string{"foo"},
|
|
||||||
Match: appsec_rule.Match{Type: "equals", Value: "b/c"},
|
|
||||||
Transform: []string{"normalizepath"},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
input_request: appsec.ParsedRequest{
|
|
||||||
RemoteAddr: "1.2.3.4",
|
|
||||||
Method: "GET",
|
|
||||||
URI: "/?foo=a/../b/c",
|
|
||||||
},
|
|
||||||
output_asserts: func(events []types.Event, responses []appsec.AppsecTempResponse, appsecResponse appsec.BodyResponse, statusCode int) {
|
|
||||||
require.Len(t, events, 2)
|
|
||||||
require.Equal(t, types.APPSEC, events[0].Type)
|
|
||||||
require.Equal(t, types.LOG, events[1].Type)
|
|
||||||
require.Equal(t, "rule1", events[1].Appsec.MatchedRules[0]["msg"])
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "normalizepath #2",
|
|
||||||
expected_load_ok: true,
|
|
||||||
inband_rules: []appsec_rule.CustomRule{
|
|
||||||
{
|
|
||||||
Name: "rule1",
|
|
||||||
Zones: []string{"ARGS"},
|
|
||||||
Variables: []string{"foo"},
|
|
||||||
Match: appsec_rule.Match{Type: "equals", Value: "b/c/"},
|
|
||||||
Transform: []string{"normalizepath"},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
input_request: appsec.ParsedRequest{
|
|
||||||
RemoteAddr: "1.2.3.4",
|
|
||||||
Method: "GET",
|
|
||||||
URI: "/?foo=a/../b/c/////././././",
|
|
||||||
},
|
|
||||||
output_asserts: func(events []types.Event, responses []appsec.AppsecTempResponse, appsecResponse appsec.BodyResponse, statusCode int) {
|
|
||||||
require.Len(t, events, 2)
|
|
||||||
require.Equal(t, types.APPSEC, events[0].Type)
|
|
||||||
require.Equal(t, types.LOG, events[1].Type)
|
|
||||||
require.Equal(t, "rule1", events[1].Appsec.MatchedRules[0]["msg"])
|
|
||||||
},
|
|
||||||
},
|
|
||||||
}
|
|
||||||
for _, test := range tests {
|
|
||||||
t.Run(test.name, func(t *testing.T) {
|
|
||||||
loadAppSecEngine(test, t)
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,320 +0,0 @@
|
||||||
package appsecacquisition
|
|
||||||
|
|
||||||
import (
|
|
||||||
"net/http"
|
|
||||||
"net/url"
|
|
||||||
"testing"
|
|
||||||
|
|
||||||
"github.com/crowdsecurity/crowdsec/pkg/appsec"
|
|
||||||
"github.com/crowdsecurity/crowdsec/pkg/appsec/appsec_rule"
|
|
||||||
"github.com/crowdsecurity/crowdsec/pkg/types"
|
|
||||||
"github.com/stretchr/testify/require"
|
|
||||||
)
|
|
||||||
|
|
||||||
func TestAppsecDefaultPassRemediation(t *testing.T) {
|
|
||||||
|
|
||||||
tests := []appsecRuleTest{
|
|
||||||
{
|
|
||||||
name: "Basic non-matching rule",
|
|
||||||
expected_load_ok: true,
|
|
||||||
inband_rules: []appsec_rule.CustomRule{
|
|
||||||
{
|
|
||||||
Name: "rule1",
|
|
||||||
Zones: []string{"ARGS"},
|
|
||||||
Variables: []string{"foo"},
|
|
||||||
Match: appsec_rule.Match{Type: "regex", Value: "^toto"},
|
|
||||||
Transform: []string{"lowercase"},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
input_request: appsec.ParsedRequest{
|
|
||||||
RemoteAddr: "1.2.3.4",
|
|
||||||
Method: "GET",
|
|
||||||
URI: "/",
|
|
||||||
Args: url.Values{"foo": []string{"tutu"}},
|
|
||||||
},
|
|
||||||
output_asserts: func(events []types.Event, responses []appsec.AppsecTempResponse, appsecResponse appsec.BodyResponse, statusCode int) {
|
|
||||||
require.Equal(t, appsec.AllowRemediation, responses[0].Action)
|
|
||||||
require.Equal(t, http.StatusOK, statusCode)
|
|
||||||
require.Equal(t, appsec.AllowRemediation, appsecResponse.Action)
|
|
||||||
require.Equal(t, http.StatusOK, appsecResponse.HTTPStatus)
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "DefaultPassAction: pass",
|
|
||||||
expected_load_ok: true,
|
|
||||||
inband_rules: []appsec_rule.CustomRule{
|
|
||||||
{
|
|
||||||
Name: "rule1",
|
|
||||||
Zones: []string{"ARGS"},
|
|
||||||
Variables: []string{"foo"},
|
|
||||||
Match: appsec_rule.Match{Type: "regex", Value: "^toto"},
|
|
||||||
Transform: []string{"lowercase"},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
input_request: appsec.ParsedRequest{
|
|
||||||
RemoteAddr: "1.2.3.4",
|
|
||||||
Method: "GET",
|
|
||||||
URI: "/",
|
|
||||||
Args: url.Values{"foo": []string{"tutu"}},
|
|
||||||
},
|
|
||||||
DefaultPassAction: "allow",
|
|
||||||
output_asserts: func(events []types.Event, responses []appsec.AppsecTempResponse, appsecResponse appsec.BodyResponse, statusCode int) {
|
|
||||||
require.Equal(t, appsec.AllowRemediation, responses[0].Action)
|
|
||||||
require.Equal(t, http.StatusOK, statusCode)
|
|
||||||
require.Equal(t, appsec.AllowRemediation, appsecResponse.Action)
|
|
||||||
require.Equal(t, http.StatusOK, appsecResponse.HTTPStatus)
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "DefaultPassAction: captcha",
|
|
||||||
expected_load_ok: true,
|
|
||||||
inband_rules: []appsec_rule.CustomRule{
|
|
||||||
{
|
|
||||||
Name: "rule1",
|
|
||||||
Zones: []string{"ARGS"},
|
|
||||||
Variables: []string{"foo"},
|
|
||||||
Match: appsec_rule.Match{Type: "regex", Value: "^toto"},
|
|
||||||
Transform: []string{"lowercase"},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
input_request: appsec.ParsedRequest{
|
|
||||||
RemoteAddr: "1.2.3.4",
|
|
||||||
Method: "GET",
|
|
||||||
URI: "/",
|
|
||||||
Args: url.Values{"foo": []string{"tutu"}},
|
|
||||||
},
|
|
||||||
DefaultPassAction: "captcha",
|
|
||||||
output_asserts: func(events []types.Event, responses []appsec.AppsecTempResponse, appsecResponse appsec.BodyResponse, statusCode int) {
|
|
||||||
require.Equal(t, appsec.CaptchaRemediation, responses[0].Action)
|
|
||||||
require.Equal(t, http.StatusOK, statusCode) //@tko: body is captcha, but as it's 200, captcha won't be showed to user
|
|
||||||
require.Equal(t, appsec.CaptchaRemediation, appsecResponse.Action)
|
|
||||||
require.Equal(t, http.StatusOK, appsecResponse.HTTPStatus)
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "DefaultPassHTTPCode: 200",
|
|
||||||
expected_load_ok: true,
|
|
||||||
inband_rules: []appsec_rule.CustomRule{
|
|
||||||
{
|
|
||||||
Name: "rule1",
|
|
||||||
Zones: []string{"ARGS"},
|
|
||||||
Variables: []string{"foo"},
|
|
||||||
Match: appsec_rule.Match{Type: "regex", Value: "^toto"},
|
|
||||||
Transform: []string{"lowercase"},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
input_request: appsec.ParsedRequest{
|
|
||||||
RemoteAddr: "1.2.3.4",
|
|
||||||
Method: "GET",
|
|
||||||
URI: "/",
|
|
||||||
Args: url.Values{"foo": []string{"tutu"}},
|
|
||||||
},
|
|
||||||
UserPassedHTTPCode: 200,
|
|
||||||
output_asserts: func(events []types.Event, responses []appsec.AppsecTempResponse, appsecResponse appsec.BodyResponse, statusCode int) {
|
|
||||||
require.Equal(t, appsec.AllowRemediation, responses[0].Action)
|
|
||||||
require.Equal(t, http.StatusOK, statusCode)
|
|
||||||
require.Equal(t, appsec.AllowRemediation, appsecResponse.Action)
|
|
||||||
require.Equal(t, http.StatusOK, appsecResponse.HTTPStatus)
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "DefaultPassHTTPCode: 200",
|
|
||||||
expected_load_ok: true,
|
|
||||||
inband_rules: []appsec_rule.CustomRule{
|
|
||||||
{
|
|
||||||
Name: "rule1",
|
|
||||||
Zones: []string{"ARGS"},
|
|
||||||
Variables: []string{"foo"},
|
|
||||||
Match: appsec_rule.Match{Type: "regex", Value: "^toto"},
|
|
||||||
Transform: []string{"lowercase"},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
input_request: appsec.ParsedRequest{
|
|
||||||
RemoteAddr: "1.2.3.4",
|
|
||||||
Method: "GET",
|
|
||||||
URI: "/",
|
|
||||||
Args: url.Values{"foo": []string{"tutu"}},
|
|
||||||
},
|
|
||||||
UserPassedHTTPCode: 418,
|
|
||||||
output_asserts: func(events []types.Event, responses []appsec.AppsecTempResponse, appsecResponse appsec.BodyResponse, statusCode int) {
|
|
||||||
require.Equal(t, appsec.AllowRemediation, responses[0].Action)
|
|
||||||
require.Equal(t, http.StatusOK, statusCode)
|
|
||||||
require.Equal(t, appsec.AllowRemediation, appsecResponse.Action)
|
|
||||||
require.Equal(t, http.StatusTeapot, appsecResponse.HTTPStatus)
|
|
||||||
},
|
|
||||||
},
|
|
||||||
}
|
|
||||||
for _, test := range tests {
|
|
||||||
t.Run(test.name, func(t *testing.T) {
|
|
||||||
loadAppSecEngine(test, t)
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestAppsecDefaultRemediation(t *testing.T) {
|
|
||||||
|
|
||||||
tests := []appsecRuleTest{
|
|
||||||
{
|
|
||||||
name: "Basic matching rule",
|
|
||||||
expected_load_ok: true,
|
|
||||||
inband_rules: []appsec_rule.CustomRule{
|
|
||||||
{
|
|
||||||
Name: "rule1",
|
|
||||||
Zones: []string{"ARGS"},
|
|
||||||
Variables: []string{"foo"},
|
|
||||||
Match: appsec_rule.Match{Type: "regex", Value: "^toto"},
|
|
||||||
Transform: []string{"lowercase"},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
input_request: appsec.ParsedRequest{
|
|
||||||
RemoteAddr: "1.2.3.4",
|
|
||||||
Method: "GET",
|
|
||||||
URI: "/urllll",
|
|
||||||
Args: url.Values{"foo": []string{"toto"}},
|
|
||||||
},
|
|
||||||
output_asserts: func(events []types.Event, responses []appsec.AppsecTempResponse, appsecResponse appsec.BodyResponse, statusCode int) {
|
|
||||||
require.Equal(t, appsec.BanRemediation, responses[0].Action)
|
|
||||||
require.Equal(t, http.StatusForbidden, statusCode)
|
|
||||||
require.Equal(t, appsec.BanRemediation, appsecResponse.Action)
|
|
||||||
require.Equal(t, http.StatusForbidden, appsecResponse.HTTPStatus)
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "default remediation to ban (default)",
|
|
||||||
expected_load_ok: true,
|
|
||||||
inband_rules: []appsec_rule.CustomRule{
|
|
||||||
{
|
|
||||||
Name: "rule42",
|
|
||||||
Zones: []string{"ARGS"},
|
|
||||||
Variables: []string{"foo"},
|
|
||||||
Match: appsec_rule.Match{Type: "regex", Value: "^toto"},
|
|
||||||
Transform: []string{"lowercase"},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
input_request: appsec.ParsedRequest{
|
|
||||||
RemoteAddr: "1.2.3.4",
|
|
||||||
Method: "GET",
|
|
||||||
URI: "/urllll",
|
|
||||||
Args: url.Values{"foo": []string{"toto"}},
|
|
||||||
},
|
|
||||||
DefaultRemediation: "ban",
|
|
||||||
output_asserts: func(events []types.Event, responses []appsec.AppsecTempResponse, appsecResponse appsec.BodyResponse, statusCode int) {
|
|
||||||
require.Equal(t, appsec.BanRemediation, responses[0].Action)
|
|
||||||
require.Equal(t, http.StatusForbidden, statusCode)
|
|
||||||
require.Equal(t, appsec.BanRemediation, appsecResponse.Action)
|
|
||||||
require.Equal(t, http.StatusForbidden, appsecResponse.HTTPStatus)
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "default remediation to allow",
|
|
||||||
expected_load_ok: true,
|
|
||||||
inband_rules: []appsec_rule.CustomRule{
|
|
||||||
{
|
|
||||||
Name: "rule42",
|
|
||||||
Zones: []string{"ARGS"},
|
|
||||||
Variables: []string{"foo"},
|
|
||||||
Match: appsec_rule.Match{Type: "regex", Value: "^toto"},
|
|
||||||
Transform: []string{"lowercase"},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
input_request: appsec.ParsedRequest{
|
|
||||||
RemoteAddr: "1.2.3.4",
|
|
||||||
Method: "GET",
|
|
||||||
URI: "/urllll",
|
|
||||||
Args: url.Values{"foo": []string{"toto"}},
|
|
||||||
},
|
|
||||||
DefaultRemediation: "allow",
|
|
||||||
output_asserts: func(events []types.Event, responses []appsec.AppsecTempResponse, appsecResponse appsec.BodyResponse, statusCode int) {
|
|
||||||
require.Equal(t, appsec.AllowRemediation, responses[0].Action)
|
|
||||||
require.Equal(t, http.StatusOK, statusCode)
|
|
||||||
require.Equal(t, appsec.AllowRemediation, appsecResponse.Action)
|
|
||||||
require.Equal(t, http.StatusOK, appsecResponse.HTTPStatus)
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "default remediation to captcha",
|
|
||||||
expected_load_ok: true,
|
|
||||||
inband_rules: []appsec_rule.CustomRule{
|
|
||||||
{
|
|
||||||
Name: "rule42",
|
|
||||||
Zones: []string{"ARGS"},
|
|
||||||
Variables: []string{"foo"},
|
|
||||||
Match: appsec_rule.Match{Type: "regex", Value: "^toto"},
|
|
||||||
Transform: []string{"lowercase"},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
input_request: appsec.ParsedRequest{
|
|
||||||
RemoteAddr: "1.2.3.4",
|
|
||||||
Method: "GET",
|
|
||||||
URI: "/urllll",
|
|
||||||
Args: url.Values{"foo": []string{"toto"}},
|
|
||||||
},
|
|
||||||
DefaultRemediation: "captcha",
|
|
||||||
output_asserts: func(events []types.Event, responses []appsec.AppsecTempResponse, appsecResponse appsec.BodyResponse, statusCode int) {
|
|
||||||
require.Equal(t, appsec.CaptchaRemediation, responses[0].Action)
|
|
||||||
require.Equal(t, http.StatusForbidden, statusCode)
|
|
||||||
require.Equal(t, appsec.CaptchaRemediation, appsecResponse.Action)
|
|
||||||
require.Equal(t, http.StatusForbidden, appsecResponse.HTTPStatus)
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "custom user HTTP code",
|
|
||||||
expected_load_ok: true,
|
|
||||||
inband_rules: []appsec_rule.CustomRule{
|
|
||||||
{
|
|
||||||
Name: "rule42",
|
|
||||||
Zones: []string{"ARGS"},
|
|
||||||
Variables: []string{"foo"},
|
|
||||||
Match: appsec_rule.Match{Type: "regex", Value: "^toto"},
|
|
||||||
Transform: []string{"lowercase"},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
input_request: appsec.ParsedRequest{
|
|
||||||
RemoteAddr: "1.2.3.4",
|
|
||||||
Method: "GET",
|
|
||||||
URI: "/urllll",
|
|
||||||
Args: url.Values{"foo": []string{"toto"}},
|
|
||||||
},
|
|
||||||
UserBlockedHTTPCode: 418,
|
|
||||||
output_asserts: func(events []types.Event, responses []appsec.AppsecTempResponse, appsecResponse appsec.BodyResponse, statusCode int) {
|
|
||||||
require.Equal(t, appsec.BanRemediation, responses[0].Action)
|
|
||||||
require.Equal(t, http.StatusForbidden, statusCode)
|
|
||||||
require.Equal(t, appsec.BanRemediation, appsecResponse.Action)
|
|
||||||
require.Equal(t, http.StatusTeapot, appsecResponse.HTTPStatus)
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "custom remediation + HTTP code",
|
|
||||||
expected_load_ok: true,
|
|
||||||
inband_rules: []appsec_rule.CustomRule{
|
|
||||||
{
|
|
||||||
Name: "rule42",
|
|
||||||
Zones: []string{"ARGS"},
|
|
||||||
Variables: []string{"foo"},
|
|
||||||
Match: appsec_rule.Match{Type: "regex", Value: "^toto"},
|
|
||||||
Transform: []string{"lowercase"},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
input_request: appsec.ParsedRequest{
|
|
||||||
RemoteAddr: "1.2.3.4",
|
|
||||||
Method: "GET",
|
|
||||||
URI: "/urllll",
|
|
||||||
Args: url.Values{"foo": []string{"toto"}},
|
|
||||||
},
|
|
||||||
UserBlockedHTTPCode: 418,
|
|
||||||
DefaultRemediation: "foobar",
|
|
||||||
output_asserts: func(events []types.Event, responses []appsec.AppsecTempResponse, appsecResponse appsec.BodyResponse, statusCode int) {
|
|
||||||
require.Equal(t, "foobar", responses[0].Action)
|
|
||||||
require.Equal(t, http.StatusForbidden, statusCode)
|
|
||||||
require.Equal(t, "foobar", appsecResponse.Action)
|
|
||||||
require.Equal(t, http.StatusTeapot, appsecResponse.HTTPStatus)
|
|
||||||
},
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
for _, test := range tests {
|
|
||||||
t.Run(test.name, func(t *testing.T) {
|
|
||||||
loadAppSecEngine(test, t)
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,733 +0,0 @@
|
||||||
package appsecacquisition
|
|
||||||
|
|
||||||
import (
|
|
||||||
"net/http"
|
|
||||||
"net/url"
|
|
||||||
"testing"
|
|
||||||
|
|
||||||
"github.com/crowdsecurity/crowdsec/pkg/appsec"
|
|
||||||
"github.com/crowdsecurity/crowdsec/pkg/appsec/appsec_rule"
|
|
||||||
"github.com/crowdsecurity/crowdsec/pkg/types"
|
|
||||||
log "github.com/sirupsen/logrus"
|
|
||||||
"github.com/stretchr/testify/require"
|
|
||||||
)
|
|
||||||
|
|
||||||
func TestAppsecRuleMatches(t *testing.T) {
|
|
||||||
|
|
||||||
tests := []appsecRuleTest{
|
|
||||||
{
|
|
||||||
name: "Basic matching rule",
|
|
||||||
expected_load_ok: true,
|
|
||||||
inband_rules: []appsec_rule.CustomRule{
|
|
||||||
{
|
|
||||||
Name: "rule1",
|
|
||||||
Zones: []string{"ARGS"},
|
|
||||||
Variables: []string{"foo"},
|
|
||||||
Match: appsec_rule.Match{Type: "regex", Value: "^toto"},
|
|
||||||
Transform: []string{"lowercase"},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
input_request: appsec.ParsedRequest{
|
|
||||||
RemoteAddr: "1.2.3.4",
|
|
||||||
Method: "GET",
|
|
||||||
URI: "/urllll",
|
|
||||||
Args: url.Values{"foo": []string{"toto"}},
|
|
||||||
},
|
|
||||||
output_asserts: func(events []types.Event, responses []appsec.AppsecTempResponse, appsecResponse appsec.BodyResponse, statusCode int) {
|
|
||||||
require.Len(t, events, 2)
|
|
||||||
require.Equal(t, types.APPSEC, events[0].Type)
|
|
||||||
|
|
||||||
require.Equal(t, types.LOG, events[1].Type)
|
|
||||||
require.True(t, events[1].Appsec.HasInBandMatches)
|
|
||||||
require.Len(t, events[1].Appsec.MatchedRules, 1)
|
|
||||||
require.Equal(t, "rule1", events[1].Appsec.MatchedRules[0]["msg"])
|
|
||||||
|
|
||||||
require.Len(t, responses, 1)
|
|
||||||
require.True(t, responses[0].InBandInterrupt)
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "Basic non-matching rule",
|
|
||||||
expected_load_ok: true,
|
|
||||||
inband_rules: []appsec_rule.CustomRule{
|
|
||||||
{
|
|
||||||
Name: "rule1",
|
|
||||||
Zones: []string{"ARGS"},
|
|
||||||
Variables: []string{"foo"},
|
|
||||||
Match: appsec_rule.Match{Type: "regex", Value: "^toto"},
|
|
||||||
Transform: []string{"lowercase"},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
input_request: appsec.ParsedRequest{
|
|
||||||
RemoteAddr: "1.2.3.4",
|
|
||||||
Method: "GET",
|
|
||||||
URI: "/urllll",
|
|
||||||
Args: url.Values{"foo": []string{"tutu"}},
|
|
||||||
},
|
|
||||||
output_asserts: func(events []types.Event, responses []appsec.AppsecTempResponse, appsecResponse appsec.BodyResponse, statusCode int) {
|
|
||||||
require.Empty(t, events)
|
|
||||||
require.Len(t, responses, 1)
|
|
||||||
require.False(t, responses[0].InBandInterrupt)
|
|
||||||
require.False(t, responses[0].OutOfBandInterrupt)
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "default remediation to allow",
|
|
||||||
expected_load_ok: true,
|
|
||||||
inband_rules: []appsec_rule.CustomRule{
|
|
||||||
{
|
|
||||||
Name: "rule42",
|
|
||||||
Zones: []string{"ARGS"},
|
|
||||||
Variables: []string{"foo"},
|
|
||||||
Match: appsec_rule.Match{Type: "regex", Value: "^toto"},
|
|
||||||
Transform: []string{"lowercase"},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
input_request: appsec.ParsedRequest{
|
|
||||||
RemoteAddr: "1.2.3.4",
|
|
||||||
Method: "GET",
|
|
||||||
URI: "/urllll",
|
|
||||||
Args: url.Values{"foo": []string{"toto"}},
|
|
||||||
},
|
|
||||||
DefaultRemediation: "allow",
|
|
||||||
output_asserts: func(events []types.Event, responses []appsec.AppsecTempResponse, appsecResponse appsec.BodyResponse, statusCode int) {
|
|
||||||
require.Equal(t, appsec.AllowRemediation, responses[0].Action)
|
|
||||||
require.Equal(t, http.StatusOK, statusCode)
|
|
||||||
require.Equal(t, appsec.AllowRemediation, appsecResponse.Action)
|
|
||||||
require.Equal(t, http.StatusOK, appsecResponse.HTTPStatus)
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "default remediation to captcha",
|
|
||||||
expected_load_ok: true,
|
|
||||||
inband_rules: []appsec_rule.CustomRule{
|
|
||||||
{
|
|
||||||
Name: "rule42",
|
|
||||||
Zones: []string{"ARGS"},
|
|
||||||
Variables: []string{"foo"},
|
|
||||||
Match: appsec_rule.Match{Type: "regex", Value: "^toto"},
|
|
||||||
Transform: []string{"lowercase"},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
input_request: appsec.ParsedRequest{
|
|
||||||
RemoteAddr: "1.2.3.4",
|
|
||||||
Method: "GET",
|
|
||||||
URI: "/urllll",
|
|
||||||
Args: url.Values{"foo": []string{"toto"}},
|
|
||||||
},
|
|
||||||
DefaultRemediation: "captcha",
|
|
||||||
output_asserts: func(events []types.Event, responses []appsec.AppsecTempResponse, appsecResponse appsec.BodyResponse, statusCode int) {
|
|
||||||
require.Equal(t, appsec.CaptchaRemediation, responses[0].Action)
|
|
||||||
require.Equal(t, http.StatusForbidden, statusCode)
|
|
||||||
require.Equal(t, appsec.CaptchaRemediation, appsecResponse.Action)
|
|
||||||
require.Equal(t, http.StatusForbidden, appsecResponse.HTTPStatus)
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "no default remediation / custom user HTTP code",
|
|
||||||
expected_load_ok: true,
|
|
||||||
inband_rules: []appsec_rule.CustomRule{
|
|
||||||
{
|
|
||||||
Name: "rule42",
|
|
||||||
Zones: []string{"ARGS"},
|
|
||||||
Variables: []string{"foo"},
|
|
||||||
Match: appsec_rule.Match{Type: "regex", Value: "^toto"},
|
|
||||||
Transform: []string{"lowercase"},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
input_request: appsec.ParsedRequest{
|
|
||||||
RemoteAddr: "1.2.3.4",
|
|
||||||
Method: "GET",
|
|
||||||
URI: "/urllll",
|
|
||||||
Args: url.Values{"foo": []string{"toto"}},
|
|
||||||
},
|
|
||||||
UserBlockedHTTPCode: 418,
|
|
||||||
output_asserts: func(events []types.Event, responses []appsec.AppsecTempResponse, appsecResponse appsec.BodyResponse, statusCode int) {
|
|
||||||
require.Equal(t, appsec.BanRemediation, responses[0].Action)
|
|
||||||
require.Equal(t, http.StatusForbidden, statusCode)
|
|
||||||
require.Equal(t, appsec.BanRemediation, appsecResponse.Action)
|
|
||||||
require.Equal(t, http.StatusTeapot, appsecResponse.HTTPStatus)
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "no match but try to set remediation to captcha with on_match hook",
|
|
||||||
expected_load_ok: true,
|
|
||||||
inband_rules: []appsec_rule.CustomRule{
|
|
||||||
{
|
|
||||||
Name: "rule42",
|
|
||||||
Zones: []string{"ARGS"},
|
|
||||||
Variables: []string{"foo"},
|
|
||||||
Match: appsec_rule.Match{Type: "regex", Value: "^toto"},
|
|
||||||
Transform: []string{"lowercase"},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
on_match: []appsec.Hook{
|
|
||||||
{Filter: "IsInBand == true", Apply: []string{"SetRemediation('captcha')"}},
|
|
||||||
},
|
|
||||||
input_request: appsec.ParsedRequest{
|
|
||||||
RemoteAddr: "1.2.3.4",
|
|
||||||
Method: "GET",
|
|
||||||
URI: "/urllll",
|
|
||||||
Args: url.Values{"foo": []string{"bla"}},
|
|
||||||
},
|
|
||||||
output_asserts: func(events []types.Event, responses []appsec.AppsecTempResponse, appsecResponse appsec.BodyResponse, statusCode int) {
|
|
||||||
require.Empty(t, events)
|
|
||||||
require.Equal(t, http.StatusOK, statusCode)
|
|
||||||
require.Equal(t, appsec.AllowRemediation, appsecResponse.Action)
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "no match but try to set user HTTP code with on_match hook",
|
|
||||||
expected_load_ok: true,
|
|
||||||
inband_rules: []appsec_rule.CustomRule{
|
|
||||||
{
|
|
||||||
Name: "rule42",
|
|
||||||
Zones: []string{"ARGS"},
|
|
||||||
Variables: []string{"foo"},
|
|
||||||
Match: appsec_rule.Match{Type: "regex", Value: "^toto"},
|
|
||||||
Transform: []string{"lowercase"},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
on_match: []appsec.Hook{
|
|
||||||
{Filter: "IsInBand == true", Apply: []string{"SetReturnCode(418)"}},
|
|
||||||
},
|
|
||||||
input_request: appsec.ParsedRequest{
|
|
||||||
RemoteAddr: "1.2.3.4",
|
|
||||||
Method: "GET",
|
|
||||||
URI: "/urllll",
|
|
||||||
Args: url.Values{"foo": []string{"bla"}},
|
|
||||||
},
|
|
||||||
output_asserts: func(events []types.Event, responses []appsec.AppsecTempResponse, appsecResponse appsec.BodyResponse, statusCode int) {
|
|
||||||
require.Empty(t, events)
|
|
||||||
require.Equal(t, http.StatusOK, statusCode)
|
|
||||||
require.Equal(t, appsec.AllowRemediation, appsecResponse.Action)
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "no match but try to set remediation with pre_eval hook",
|
|
||||||
expected_load_ok: true,
|
|
||||||
inband_rules: []appsec_rule.CustomRule{
|
|
||||||
{
|
|
||||||
Name: "rule42",
|
|
||||||
Zones: []string{"ARGS"},
|
|
||||||
Variables: []string{"foo"},
|
|
||||||
Match: appsec_rule.Match{Type: "regex", Value: "^toto"},
|
|
||||||
Transform: []string{"lowercase"},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
pre_eval: []appsec.Hook{
|
|
||||||
{Filter: "IsInBand == true", Apply: []string{"SetRemediationByName('rule42', 'captcha')"}},
|
|
||||||
},
|
|
||||||
input_request: appsec.ParsedRequest{
|
|
||||||
RemoteAddr: "1.2.3.4",
|
|
||||||
Method: "GET",
|
|
||||||
URI: "/urllll",
|
|
||||||
Args: url.Values{"foo": []string{"bla"}},
|
|
||||||
},
|
|
||||||
output_asserts: func(events []types.Event, responses []appsec.AppsecTempResponse, appsecResponse appsec.BodyResponse, statusCode int) {
|
|
||||||
require.Empty(t, events)
|
|
||||||
require.Equal(t, http.StatusOK, statusCode)
|
|
||||||
require.Equal(t, appsec.AllowRemediation, appsecResponse.Action)
|
|
||||||
},
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
for _, test := range tests {
|
|
||||||
t.Run(test.name, func(t *testing.T) {
|
|
||||||
loadAppSecEngine(test, t)
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestAppsecRuleTransforms(t *testing.T) {
|
|
||||||
|
|
||||||
log.SetLevel(log.TraceLevel)
|
|
||||||
tests := []appsecRuleTest{
|
|
||||||
{
|
|
||||||
name: "Basic matching rule",
|
|
||||||
expected_load_ok: true,
|
|
||||||
inband_rules: []appsec_rule.CustomRule{
|
|
||||||
{
|
|
||||||
Name: "rule1",
|
|
||||||
Zones: []string{"URI"},
|
|
||||||
Match: appsec_rule.Match{Type: "equals", Value: "/toto"},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
input_request: appsec.ParsedRequest{
|
|
||||||
RemoteAddr: "1.2.3.4",
|
|
||||||
Method: "GET",
|
|
||||||
URI: "/toto",
|
|
||||||
},
|
|
||||||
output_asserts: func(events []types.Event, responses []appsec.AppsecTempResponse, appsecResponse appsec.BodyResponse, statusCode int) {
|
|
||||||
require.Len(t, events, 2)
|
|
||||||
require.Equal(t, types.APPSEC, events[0].Type)
|
|
||||||
require.Equal(t, types.LOG, events[1].Type)
|
|
||||||
require.Equal(t, "rule1", events[1].Appsec.MatchedRules[0]["msg"])
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "lowercase",
|
|
||||||
expected_load_ok: true,
|
|
||||||
inband_rules: []appsec_rule.CustomRule{
|
|
||||||
{
|
|
||||||
Name: "rule1",
|
|
||||||
Zones: []string{"URI"},
|
|
||||||
Match: appsec_rule.Match{Type: "equals", Value: "/toto"},
|
|
||||||
Transform: []string{"lowercase"},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
input_request: appsec.ParsedRequest{
|
|
||||||
RemoteAddr: "1.2.3.4",
|
|
||||||
Method: "GET",
|
|
||||||
URI: "/TOTO",
|
|
||||||
},
|
|
||||||
output_asserts: func(events []types.Event, responses []appsec.AppsecTempResponse, appsecResponse appsec.BodyResponse, statusCode int) {
|
|
||||||
require.Len(t, events, 2)
|
|
||||||
require.Equal(t, types.APPSEC, events[0].Type)
|
|
||||||
require.Equal(t, types.LOG, events[1].Type)
|
|
||||||
require.Equal(t, "rule1", events[1].Appsec.MatchedRules[0]["msg"])
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "uppercase",
|
|
||||||
expected_load_ok: true,
|
|
||||||
inband_rules: []appsec_rule.CustomRule{
|
|
||||||
{
|
|
||||||
Name: "rule1",
|
|
||||||
Zones: []string{"URI"},
|
|
||||||
Match: appsec_rule.Match{Type: "equals", Value: "/TOTO"},
|
|
||||||
Transform: []string{"uppercase"},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
input_request: appsec.ParsedRequest{
|
|
||||||
RemoteAddr: "1.2.3.4",
|
|
||||||
Method: "GET",
|
|
||||||
URI: "/toto",
|
|
||||||
},
|
|
||||||
output_asserts: func(events []types.Event, responses []appsec.AppsecTempResponse, appsecResponse appsec.BodyResponse, statusCode int) {
|
|
||||||
require.Len(t, events, 2)
|
|
||||||
require.Equal(t, types.APPSEC, events[0].Type)
|
|
||||||
require.Equal(t, types.LOG, events[1].Type)
|
|
||||||
require.Equal(t, "rule1", events[1].Appsec.MatchedRules[0]["msg"])
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "b64decode",
|
|
||||||
expected_load_ok: true,
|
|
||||||
inband_rules: []appsec_rule.CustomRule{
|
|
||||||
{
|
|
||||||
Name: "rule1",
|
|
||||||
Zones: []string{"ARGS"},
|
|
||||||
Variables: []string{"foo"},
|
|
||||||
Match: appsec_rule.Match{Type: "equals", Value: "toto"},
|
|
||||||
Transform: []string{"b64decode"},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
input_request: appsec.ParsedRequest{
|
|
||||||
RemoteAddr: "1.2.3.4",
|
|
||||||
Method: "GET",
|
|
||||||
URI: "/?foo=dG90bw",
|
|
||||||
},
|
|
||||||
output_asserts: func(events []types.Event, responses []appsec.AppsecTempResponse, appsecResponse appsec.BodyResponse, statusCode int) {
|
|
||||||
require.Len(t, events, 2)
|
|
||||||
require.Equal(t, types.APPSEC, events[0].Type)
|
|
||||||
require.Equal(t, types.LOG, events[1].Type)
|
|
||||||
require.Equal(t, "rule1", events[1].Appsec.MatchedRules[0]["msg"])
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "b64decode with extra padding",
|
|
||||||
expected_load_ok: true,
|
|
||||||
inband_rules: []appsec_rule.CustomRule{
|
|
||||||
{
|
|
||||||
Name: "rule1",
|
|
||||||
Zones: []string{"ARGS"},
|
|
||||||
Variables: []string{"foo"},
|
|
||||||
Match: appsec_rule.Match{Type: "equals", Value: "toto"},
|
|
||||||
Transform: []string{"b64decode"},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
input_request: appsec.ParsedRequest{
|
|
||||||
RemoteAddr: "1.2.3.4",
|
|
||||||
Method: "GET",
|
|
||||||
URI: "/?foo=dG90bw===",
|
|
||||||
},
|
|
||||||
output_asserts: func(events []types.Event, responses []appsec.AppsecTempResponse, appsecResponse appsec.BodyResponse, statusCode int) {
|
|
||||||
require.Len(t, events, 2)
|
|
||||||
require.Equal(t, types.APPSEC, events[0].Type)
|
|
||||||
require.Equal(t, types.LOG, events[1].Type)
|
|
||||||
require.Equal(t, "rule1", events[1].Appsec.MatchedRules[0]["msg"])
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "length",
|
|
||||||
expected_load_ok: true,
|
|
||||||
inband_rules: []appsec_rule.CustomRule{
|
|
||||||
{
|
|
||||||
Name: "rule1",
|
|
||||||
Zones: []string{"ARGS"},
|
|
||||||
Variables: []string{"foo"},
|
|
||||||
Match: appsec_rule.Match{Type: "gte", Value: "3"},
|
|
||||||
Transform: []string{"length"},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
input_request: appsec.ParsedRequest{
|
|
||||||
RemoteAddr: "1.2.3.4",
|
|
||||||
Method: "GET",
|
|
||||||
URI: "/?foo=toto",
|
|
||||||
},
|
|
||||||
output_asserts: func(events []types.Event, responses []appsec.AppsecTempResponse, appsecResponse appsec.BodyResponse, statusCode int) {
|
|
||||||
require.Len(t, events, 2)
|
|
||||||
require.Equal(t, types.APPSEC, events[0].Type)
|
|
||||||
require.Equal(t, types.LOG, events[1].Type)
|
|
||||||
require.Equal(t, "rule1", events[1].Appsec.MatchedRules[0]["msg"])
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "urldecode",
|
|
||||||
expected_load_ok: true,
|
|
||||||
inband_rules: []appsec_rule.CustomRule{
|
|
||||||
{
|
|
||||||
Name: "rule1",
|
|
||||||
Zones: []string{"ARGS"},
|
|
||||||
Variables: []string{"foo"},
|
|
||||||
Match: appsec_rule.Match{Type: "equals", Value: "BB/A"},
|
|
||||||
Transform: []string{"urldecode"},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
input_request: appsec.ParsedRequest{
|
|
||||||
RemoteAddr: "1.2.3.4",
|
|
||||||
Method: "GET",
|
|
||||||
URI: "/?foo=%42%42%2F%41",
|
|
||||||
},
|
|
||||||
output_asserts: func(events []types.Event, responses []appsec.AppsecTempResponse, appsecResponse appsec.BodyResponse, statusCode int) {
|
|
||||||
require.Len(t, events, 2)
|
|
||||||
require.Equal(t, types.APPSEC, events[0].Type)
|
|
||||||
require.Equal(t, types.LOG, events[1].Type)
|
|
||||||
require.Equal(t, "rule1", events[1].Appsec.MatchedRules[0]["msg"])
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "trim",
|
|
||||||
expected_load_ok: true,
|
|
||||||
inband_rules: []appsec_rule.CustomRule{
|
|
||||||
{
|
|
||||||
Name: "rule1",
|
|
||||||
Zones: []string{"ARGS"},
|
|
||||||
Variables: []string{"foo"},
|
|
||||||
Match: appsec_rule.Match{Type: "equals", Value: "BB/A"},
|
|
||||||
Transform: []string{"urldecode", "trim"},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
input_request: appsec.ParsedRequest{
|
|
||||||
RemoteAddr: "1.2.3.4",
|
|
||||||
Method: "GET",
|
|
||||||
URI: "/?foo=%20%20%42%42%2F%41%20%20",
|
|
||||||
},
|
|
||||||
output_asserts: func(events []types.Event, responses []appsec.AppsecTempResponse, appsecResponse appsec.BodyResponse, statusCode int) {
|
|
||||||
require.Len(t, events, 2)
|
|
||||||
require.Equal(t, types.APPSEC, events[0].Type)
|
|
||||||
require.Equal(t, types.LOG, events[1].Type)
|
|
||||||
require.Equal(t, "rule1", events[1].Appsec.MatchedRules[0]["msg"])
|
|
||||||
},
|
|
||||||
},
|
|
||||||
}
|
|
||||||
for _, test := range tests {
|
|
||||||
t.Run(test.name, func(t *testing.T) {
|
|
||||||
loadAppSecEngine(test, t)
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestAppsecRuleZones(t *testing.T) {
|
|
||||||
|
|
||||||
log.SetLevel(log.TraceLevel)
|
|
||||||
tests := []appsecRuleTest{
|
|
||||||
{
|
|
||||||
name: "rule: ARGS",
|
|
||||||
expected_load_ok: true,
|
|
||||||
inband_rules: []appsec_rule.CustomRule{
|
|
||||||
{
|
|
||||||
Name: "rule1",
|
|
||||||
Zones: []string{"ARGS"},
|
|
||||||
Match: appsec_rule.Match{Type: "equals", Value: "toto"},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
Name: "rule2",
|
|
||||||
Zones: []string{"ARGS"},
|
|
||||||
Match: appsec_rule.Match{Type: "equals", Value: "foobar"},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
input_request: appsec.ParsedRequest{
|
|
||||||
RemoteAddr: "1.2.3.4",
|
|
||||||
Method: "GET",
|
|
||||||
URI: "/foobar?something=toto&foobar=smth",
|
|
||||||
},
|
|
||||||
output_asserts: func(events []types.Event, responses []appsec.AppsecTempResponse, appsecResponse appsec.BodyResponse, statusCode int) {
|
|
||||||
require.Len(t, events, 2)
|
|
||||||
require.Equal(t, types.APPSEC, events[0].Type)
|
|
||||||
require.Equal(t, types.LOG, events[1].Type)
|
|
||||||
require.Equal(t, "rule1", events[1].Appsec.MatchedRules[0]["msg"])
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "rule: ARGS_NAMES",
|
|
||||||
expected_load_ok: true,
|
|
||||||
inband_rules: []appsec_rule.CustomRule{
|
|
||||||
{
|
|
||||||
Name: "rule1",
|
|
||||||
Zones: []string{"ARGS_NAMES"},
|
|
||||||
Match: appsec_rule.Match{Type: "equals", Value: "toto"},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
Name: "rule2",
|
|
||||||
Zones: []string{"ARGS_NAMES"},
|
|
||||||
Match: appsec_rule.Match{Type: "equals", Value: "foobar"},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
input_request: appsec.ParsedRequest{
|
|
||||||
RemoteAddr: "1.2.3.4",
|
|
||||||
Method: "GET",
|
|
||||||
URI: "/foobar?something=toto&foobar=smth",
|
|
||||||
},
|
|
||||||
output_asserts: func(events []types.Event, responses []appsec.AppsecTempResponse, appsecResponse appsec.BodyResponse, statusCode int) {
|
|
||||||
require.Len(t, events, 2)
|
|
||||||
require.Equal(t, types.APPSEC, events[0].Type)
|
|
||||||
require.Equal(t, types.LOG, events[1].Type)
|
|
||||||
require.Equal(t, "rule2", events[1].Appsec.MatchedRules[0]["msg"])
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "rule: BODY_ARGS",
|
|
||||||
expected_load_ok: true,
|
|
||||||
inband_rules: []appsec_rule.CustomRule{
|
|
||||||
{
|
|
||||||
Name: "rule1",
|
|
||||||
Zones: []string{"BODY_ARGS"},
|
|
||||||
Match: appsec_rule.Match{Type: "equals", Value: "toto"},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
Name: "rule2",
|
|
||||||
Zones: []string{"BODY_ARGS"},
|
|
||||||
Match: appsec_rule.Match{Type: "equals", Value: "foobar"},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
input_request: appsec.ParsedRequest{
|
|
||||||
RemoteAddr: "1.2.3.4",
|
|
||||||
Method: "GET",
|
|
||||||
URI: "/",
|
|
||||||
Body: []byte("smth=toto&foobar=other"),
|
|
||||||
Headers: http.Header{"Content-Type": []string{"application/x-www-form-urlencoded"}},
|
|
||||||
},
|
|
||||||
output_asserts: func(events []types.Event, responses []appsec.AppsecTempResponse, appsecResponse appsec.BodyResponse, statusCode int) {
|
|
||||||
require.Len(t, events, 2)
|
|
||||||
require.Equal(t, types.APPSEC, events[0].Type)
|
|
||||||
require.Equal(t, types.LOG, events[1].Type)
|
|
||||||
require.Equal(t, "rule1", events[1].Appsec.MatchedRules[0]["msg"])
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "rule: BODY_ARGS_NAMES",
|
|
||||||
expected_load_ok: true,
|
|
||||||
inband_rules: []appsec_rule.CustomRule{
|
|
||||||
{
|
|
||||||
Name: "rule1",
|
|
||||||
Zones: []string{"BODY_ARGS_NAMES"},
|
|
||||||
Match: appsec_rule.Match{Type: "equals", Value: "toto"},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
Name: "rule2",
|
|
||||||
Zones: []string{"BODY_ARGS_NAMES"},
|
|
||||||
Match: appsec_rule.Match{Type: "equals", Value: "foobar"},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
input_request: appsec.ParsedRequest{
|
|
||||||
RemoteAddr: "1.2.3.4",
|
|
||||||
Method: "GET",
|
|
||||||
URI: "/",
|
|
||||||
Body: []byte("smth=toto&foobar=other"),
|
|
||||||
Headers: http.Header{"Content-Type": []string{"application/x-www-form-urlencoded"}},
|
|
||||||
},
|
|
||||||
output_asserts: func(events []types.Event, responses []appsec.AppsecTempResponse, appsecResponse appsec.BodyResponse, statusCode int) {
|
|
||||||
require.Len(t, events, 2)
|
|
||||||
require.Equal(t, types.APPSEC, events[0].Type)
|
|
||||||
require.Equal(t, types.LOG, events[1].Type)
|
|
||||||
require.Equal(t, "rule2", events[1].Appsec.MatchedRules[0]["msg"])
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "rule: HEADERS",
|
|
||||||
expected_load_ok: true,
|
|
||||||
inband_rules: []appsec_rule.CustomRule{
|
|
||||||
{
|
|
||||||
Name: "rule1",
|
|
||||||
Zones: []string{"HEADERS"},
|
|
||||||
Match: appsec_rule.Match{Type: "equals", Value: "toto"},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
Name: "rule2",
|
|
||||||
Zones: []string{"HEADERS"},
|
|
||||||
Match: appsec_rule.Match{Type: "equals", Value: "foobar"},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
input_request: appsec.ParsedRequest{
|
|
||||||
RemoteAddr: "1.2.3.4",
|
|
||||||
Method: "GET",
|
|
||||||
URI: "/",
|
|
||||||
Headers: http.Header{"foobar": []string{"toto"}},
|
|
||||||
},
|
|
||||||
output_asserts: func(events []types.Event, responses []appsec.AppsecTempResponse, appsecResponse appsec.BodyResponse, statusCode int) {
|
|
||||||
require.Len(t, events, 2)
|
|
||||||
require.Equal(t, types.APPSEC, events[0].Type)
|
|
||||||
require.Equal(t, types.LOG, events[1].Type)
|
|
||||||
require.Equal(t, "rule1", events[1].Appsec.MatchedRules[0]["msg"])
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "rule: HEADERS_NAMES",
|
|
||||||
expected_load_ok: true,
|
|
||||||
inband_rules: []appsec_rule.CustomRule{
|
|
||||||
{
|
|
||||||
Name: "rule1",
|
|
||||||
Zones: []string{"HEADERS_NAMES"},
|
|
||||||
Match: appsec_rule.Match{Type: "equals", Value: "toto"},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
Name: "rule2",
|
|
||||||
Zones: []string{"HEADERS_NAMES"},
|
|
||||||
Match: appsec_rule.Match{Type: "equals", Value: "foobar"},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
input_request: appsec.ParsedRequest{
|
|
||||||
RemoteAddr: "1.2.3.4",
|
|
||||||
Method: "GET",
|
|
||||||
URI: "/",
|
|
||||||
Headers: http.Header{"foobar": []string{"toto"}},
|
|
||||||
},
|
|
||||||
output_asserts: func(events []types.Event, responses []appsec.AppsecTempResponse, appsecResponse appsec.BodyResponse, statusCode int) {
|
|
||||||
require.Len(t, events, 2)
|
|
||||||
require.Equal(t, types.APPSEC, events[0].Type)
|
|
||||||
require.Equal(t, types.LOG, events[1].Type)
|
|
||||||
require.Equal(t, "rule2", events[1].Appsec.MatchedRules[0]["msg"])
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "rule: METHOD",
|
|
||||||
expected_load_ok: true,
|
|
||||||
inband_rules: []appsec_rule.CustomRule{
|
|
||||||
{
|
|
||||||
Name: "rule1",
|
|
||||||
Zones: []string{"METHOD"},
|
|
||||||
Match: appsec_rule.Match{Type: "equals", Value: "GET"},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
input_request: appsec.ParsedRequest{
|
|
||||||
RemoteAddr: "1.2.3.4",
|
|
||||||
Method: "GET",
|
|
||||||
URI: "/",
|
|
||||||
},
|
|
||||||
output_asserts: func(events []types.Event, responses []appsec.AppsecTempResponse, appsecResponse appsec.BodyResponse, statusCode int) {
|
|
||||||
require.Len(t, events, 2)
|
|
||||||
require.Equal(t, types.APPSEC, events[0].Type)
|
|
||||||
require.Equal(t, types.LOG, events[1].Type)
|
|
||||||
require.Equal(t, "rule1", events[1].Appsec.MatchedRules[0]["msg"])
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "rule: PROTOCOL",
|
|
||||||
expected_load_ok: true,
|
|
||||||
inband_rules: []appsec_rule.CustomRule{
|
|
||||||
{
|
|
||||||
Name: "rule1",
|
|
||||||
Zones: []string{"PROTOCOL"},
|
|
||||||
Match: appsec_rule.Match{Type: "contains", Value: "3.1"},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
input_request: appsec.ParsedRequest{
|
|
||||||
RemoteAddr: "1.2.3.4",
|
|
||||||
Method: "GET",
|
|
||||||
URI: "/",
|
|
||||||
Proto: "HTTP/3.1",
|
|
||||||
},
|
|
||||||
output_asserts: func(events []types.Event, responses []appsec.AppsecTempResponse, appsecResponse appsec.BodyResponse, statusCode int) {
|
|
||||||
require.Len(t, events, 2)
|
|
||||||
require.Equal(t, types.APPSEC, events[0].Type)
|
|
||||||
require.Equal(t, types.LOG, events[1].Type)
|
|
||||||
require.Equal(t, "rule1", events[1].Appsec.MatchedRules[0]["msg"])
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "rule: URI",
|
|
||||||
expected_load_ok: true,
|
|
||||||
inband_rules: []appsec_rule.CustomRule{
|
|
||||||
{
|
|
||||||
Name: "rule1",
|
|
||||||
Zones: []string{"URI"},
|
|
||||||
Match: appsec_rule.Match{Type: "equals", Value: "/foobar"},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
input_request: appsec.ParsedRequest{
|
|
||||||
RemoteAddr: "1.2.3.4",
|
|
||||||
Method: "GET",
|
|
||||||
URI: "/foobar",
|
|
||||||
},
|
|
||||||
output_asserts: func(events []types.Event, responses []appsec.AppsecTempResponse, appsecResponse appsec.BodyResponse, statusCode int) {
|
|
||||||
require.Len(t, events, 2)
|
|
||||||
require.Equal(t, types.APPSEC, events[0].Type)
|
|
||||||
require.Equal(t, types.LOG, events[1].Type)
|
|
||||||
require.Equal(t, "rule1", events[1].Appsec.MatchedRules[0]["msg"])
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "rule: URI_FULL",
|
|
||||||
expected_load_ok: true,
|
|
||||||
inband_rules: []appsec_rule.CustomRule{
|
|
||||||
{
|
|
||||||
Name: "rule1",
|
|
||||||
Zones: []string{"URI_FULL"},
|
|
||||||
Match: appsec_rule.Match{Type: "equals", Value: "/foobar?a=b"},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
input_request: appsec.ParsedRequest{
|
|
||||||
RemoteAddr: "1.2.3.4",
|
|
||||||
Method: "GET",
|
|
||||||
URI: "/foobar?a=b",
|
|
||||||
},
|
|
||||||
output_asserts: func(events []types.Event, responses []appsec.AppsecTempResponse, appsecResponse appsec.BodyResponse, statusCode int) {
|
|
||||||
require.Len(t, events, 2)
|
|
||||||
require.Equal(t, types.APPSEC, events[0].Type)
|
|
||||||
require.Equal(t, types.LOG, events[1].Type)
|
|
||||||
require.Equal(t, "rule1", events[1].Appsec.MatchedRules[0]["msg"])
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "rule: RAW_BODY",
|
|
||||||
expected_load_ok: true,
|
|
||||||
inband_rules: []appsec_rule.CustomRule{
|
|
||||||
{
|
|
||||||
Name: "rule1",
|
|
||||||
Zones: []string{"RAW_BODY"},
|
|
||||||
Match: appsec_rule.Match{Type: "equals", Value: "foobar=42421"},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
input_request: appsec.ParsedRequest{
|
|
||||||
RemoteAddr: "1.2.3.4",
|
|
||||||
Method: "GET",
|
|
||||||
URI: "/",
|
|
||||||
Body: []byte("foobar=42421"),
|
|
||||||
Headers: http.Header{"Content-Type": []string{"application/x-www-form-urlencoded"}},
|
|
||||||
},
|
|
||||||
output_asserts: func(events []types.Event, responses []appsec.AppsecTempResponse, appsecResponse appsec.BodyResponse, statusCode int) {
|
|
||||||
require.Len(t, events, 2)
|
|
||||||
require.Equal(t, types.APPSEC, events[0].Type)
|
|
||||||
require.Equal(t, types.LOG, events[1].Type)
|
|
||||||
require.Equal(t, "rule1", events[1].Appsec.MatchedRules[0]["msg"])
|
|
||||||
},
|
|
||||||
},
|
|
||||||
}
|
|
||||||
for _, test := range tests {
|
|
||||||
t.Run(test.name, func(t *testing.T) {
|
|
||||||
loadAppSecEngine(test, t)
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
File diff suppressed because it is too large
Load diff
|
@ -1,46 +0,0 @@
|
||||||
//go:build windows
|
|
||||||
// +build windows
|
|
||||||
|
|
||||||
package appsecacquisition
|
|
||||||
|
|
||||||
import (
|
|
||||||
"testing"
|
|
||||||
|
|
||||||
log "github.com/sirupsen/logrus"
|
|
||||||
)
|
|
||||||
|
|
||||||
func TestAppsecRuleTransformsWindows(t *testing.T) {
|
|
||||||
|
|
||||||
log.SetLevel(log.TraceLevel)
|
|
||||||
tests := []appsecRuleTest{
|
|
||||||
// {
|
|
||||||
// name: "normalizepath",
|
|
||||||
// expected_load_ok: true,
|
|
||||||
// inband_rules: []appsec_rule.CustomRule{
|
|
||||||
// {
|
|
||||||
// Name: "rule1",
|
|
||||||
// Zones: []string{"ARGS"},
|
|
||||||
// Variables: []string{"foo"},
|
|
||||||
// Match: appsec_rule.Match{Type: "equals", Value: "b/c"},
|
|
||||||
// Transform: []string{"normalizepath"},
|
|
||||||
// },
|
|
||||||
// },
|
|
||||||
// input_request: appsec.ParsedRequest{
|
|
||||||
// RemoteAddr: "1.2.3.4",
|
|
||||||
// Method: "GET",
|
|
||||||
// URI: "/?foo=a/../b/c",
|
|
||||||
// },
|
|
||||||
// output_asserts: func(events []types.Event, responses []appsec.AppsecTempResponse, appsecResponse appsec.BodyResponse, statusCode int) {
|
|
||||||
// require.Len(t, events, 2)
|
|
||||||
// require.Equal(t, types.APPSEC, events[0].Type)
|
|
||||||
// require.Equal(t, types.LOG, events[1].Type)
|
|
||||||
// require.Equal(t, "rule1", events[1].Appsec.MatchedRules[0]["msg"])
|
|
||||||
// },
|
|
||||||
// },
|
|
||||||
}
|
|
||||||
for _, test := range tests {
|
|
||||||
t.Run(test.name, func(t *testing.T) {
|
|
||||||
loadAppSecEngine(test, t)
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -43,8 +43,7 @@ var linesRead = prometheus.NewCounterVec(
|
||||||
|
|
||||||
// CloudwatchSource is the runtime instance keeping track of N streams within 1 cloudwatch group
|
// CloudwatchSource is the runtime instance keeping track of N streams within 1 cloudwatch group
|
||||||
type CloudwatchSource struct {
|
type CloudwatchSource struct {
|
||||||
metricsLevel int
|
Config CloudwatchSourceConfiguration
|
||||||
Config CloudwatchSourceConfiguration
|
|
||||||
/*runtime stuff*/
|
/*runtime stuff*/
|
||||||
logger *log.Entry
|
logger *log.Entry
|
||||||
t *tomb.Tomb
|
t *tomb.Tomb
|
||||||
|
@ -153,12 +152,11 @@ func (cw *CloudwatchSource) UnmarshalConfig(yamlConfig []byte) error {
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (cw *CloudwatchSource) Configure(yamlConfig []byte, logger *log.Entry, MetricsLevel int) error {
|
func (cw *CloudwatchSource) Configure(yamlConfig []byte, logger *log.Entry) error {
|
||||||
err := cw.UnmarshalConfig(yamlConfig)
|
err := cw.UnmarshalConfig(yamlConfig)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
cw.metricsLevel = MetricsLevel
|
|
||||||
|
|
||||||
cw.logger = logger.WithField("group", cw.Config.GroupName)
|
cw.logger = logger.WithField("group", cw.Config.GroupName)
|
||||||
|
|
||||||
|
@ -387,9 +385,7 @@ func (cw *CloudwatchSource) LogStreamManager(in chan LogStreamTailConfig, outCha
|
||||||
if !stream.t.Alive() {
|
if !stream.t.Alive() {
|
||||||
cw.logger.Debugf("stream %s already exists, but is dead", newStream.StreamName)
|
cw.logger.Debugf("stream %s already exists, but is dead", newStream.StreamName)
|
||||||
cw.monitoredStreams = append(cw.monitoredStreams[:idx], cw.monitoredStreams[idx+1:]...)
|
cw.monitoredStreams = append(cw.monitoredStreams[:idx], cw.monitoredStreams[idx+1:]...)
|
||||||
if cw.metricsLevel != configuration.METRICS_NONE {
|
openedStreams.With(prometheus.Labels{"group": newStream.GroupName}).Dec()
|
||||||
openedStreams.With(prometheus.Labels{"group": newStream.GroupName}).Dec()
|
|
||||||
}
|
|
||||||
break
|
break
|
||||||
}
|
}
|
||||||
shouldCreate = false
|
shouldCreate = false
|
||||||
|
@ -399,9 +395,7 @@ func (cw *CloudwatchSource) LogStreamManager(in chan LogStreamTailConfig, outCha
|
||||||
|
|
||||||
//let's start watching this stream
|
//let's start watching this stream
|
||||||
if shouldCreate {
|
if shouldCreate {
|
||||||
if cw.metricsLevel != configuration.METRICS_NONE {
|
openedStreams.With(prometheus.Labels{"group": newStream.GroupName}).Inc()
|
||||||
openedStreams.With(prometheus.Labels{"group": newStream.GroupName}).Inc()
|
|
||||||
}
|
|
||||||
newStream.t = tomb.Tomb{}
|
newStream.t = tomb.Tomb{}
|
||||||
newStream.logger = cw.logger.WithFields(log.Fields{"stream": newStream.StreamName})
|
newStream.logger = cw.logger.WithFields(log.Fields{"stream": newStream.StreamName})
|
||||||
cw.logger.Debugf("starting tail of stream %s", newStream.StreamName)
|
cw.logger.Debugf("starting tail of stream %s", newStream.StreamName)
|
||||||
|
@ -415,9 +409,7 @@ func (cw *CloudwatchSource) LogStreamManager(in chan LogStreamTailConfig, outCha
|
||||||
for idx, stream := range cw.monitoredStreams {
|
for idx, stream := range cw.monitoredStreams {
|
||||||
if !cw.monitoredStreams[idx].t.Alive() {
|
if !cw.monitoredStreams[idx].t.Alive() {
|
||||||
cw.logger.Debugf("remove dead stream %s", stream.StreamName)
|
cw.logger.Debugf("remove dead stream %s", stream.StreamName)
|
||||||
if cw.metricsLevel != configuration.METRICS_NONE {
|
openedStreams.With(prometheus.Labels{"group": cw.monitoredStreams[idx].GroupName}).Dec()
|
||||||
openedStreams.With(prometheus.Labels{"group": cw.monitoredStreams[idx].GroupName}).Dec()
|
|
||||||
}
|
|
||||||
} else {
|
} else {
|
||||||
newMonitoredStreams = append(newMonitoredStreams, stream)
|
newMonitoredStreams = append(newMonitoredStreams, stream)
|
||||||
}
|
}
|
||||||
|
@ -493,9 +485,7 @@ func (cw *CloudwatchSource) TailLogStream(cfg *LogStreamTailConfig, outChan chan
|
||||||
cfg.logger.Warningf("cwLogToEvent error, discarded event : %s", err)
|
cfg.logger.Warningf("cwLogToEvent error, discarded event : %s", err)
|
||||||
} else {
|
} else {
|
||||||
cfg.logger.Debugf("pushing message : %s", evt.Line.Raw)
|
cfg.logger.Debugf("pushing message : %s", evt.Line.Raw)
|
||||||
if cw.metricsLevel != configuration.METRICS_NONE {
|
linesRead.With(prometheus.Labels{"group": cfg.GroupName, "stream": cfg.StreamName}).Inc()
|
||||||
linesRead.With(prometheus.Labels{"group": cfg.GroupName, "stream": cfg.StreamName}).Inc()
|
|
||||||
}
|
|
||||||
outChan <- evt
|
outChan <- evt
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -13,7 +13,6 @@ import (
|
||||||
|
|
||||||
"github.com/aws/aws-sdk-go/aws"
|
"github.com/aws/aws-sdk-go/aws"
|
||||||
"github.com/aws/aws-sdk-go/service/cloudwatchlogs"
|
"github.com/aws/aws-sdk-go/service/cloudwatchlogs"
|
||||||
"github.com/crowdsecurity/crowdsec/pkg/acquisition/configuration"
|
|
||||||
"github.com/crowdsecurity/crowdsec/pkg/types"
|
"github.com/crowdsecurity/crowdsec/pkg/types"
|
||||||
log "github.com/sirupsen/logrus"
|
log "github.com/sirupsen/logrus"
|
||||||
"github.com/stretchr/testify/require"
|
"github.com/stretchr/testify/require"
|
||||||
|
@ -428,7 +427,7 @@ stream_name: test_stream`),
|
||||||
dbgLogger.Logger.SetLevel(log.DebugLevel)
|
dbgLogger.Logger.SetLevel(log.DebugLevel)
|
||||||
dbgLogger.Infof("starting test")
|
dbgLogger.Infof("starting test")
|
||||||
cw := CloudwatchSource{}
|
cw := CloudwatchSource{}
|
||||||
err := cw.Configure(tc.config, dbgLogger, configuration.METRICS_NONE)
|
err := cw.Configure(tc.config, dbgLogger)
|
||||||
cstest.RequireErrorContains(t, err, tc.expectedCfgErr)
|
cstest.RequireErrorContains(t, err, tc.expectedCfgErr)
|
||||||
|
|
||||||
if tc.expectedCfgErr != "" {
|
if tc.expectedCfgErr != "" {
|
||||||
|
@ -560,7 +559,7 @@ stream_name: test_stream`),
|
||||||
dbgLogger := log.New().WithField("test", tc.name)
|
dbgLogger := log.New().WithField("test", tc.name)
|
||||||
dbgLogger.Logger.SetLevel(log.DebugLevel)
|
dbgLogger.Logger.SetLevel(log.DebugLevel)
|
||||||
cw := CloudwatchSource{}
|
cw := CloudwatchSource{}
|
||||||
err := cw.Configure(tc.config, dbgLogger, configuration.METRICS_NONE)
|
err := cw.Configure(tc.config, dbgLogger)
|
||||||
cstest.RequireErrorContains(t, err, tc.expectedCfgErr)
|
cstest.RequireErrorContains(t, err, tc.expectedCfgErr)
|
||||||
if tc.expectedCfgErr != "" {
|
if tc.expectedCfgErr != "" {
|
||||||
return
|
return
|
||||||
|
|
|
@ -46,7 +46,6 @@ type DockerConfiguration struct {
|
||||||
}
|
}
|
||||||
|
|
||||||
type DockerSource struct {
|
type DockerSource struct {
|
||||||
metricsLevel int
|
|
||||||
Config DockerConfiguration
|
Config DockerConfiguration
|
||||||
runningContainerState map[string]*ContainerConfig
|
runningContainerState map[string]*ContainerConfig
|
||||||
compiledContainerName []*regexp.Regexp
|
compiledContainerName []*regexp.Regexp
|
||||||
|
@ -129,9 +128,9 @@ func (d *DockerSource) UnmarshalConfig(yamlConfig []byte) error {
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (d *DockerSource) Configure(yamlConfig []byte, logger *log.Entry, MetricsLevel int) error {
|
func (d *DockerSource) Configure(yamlConfig []byte, logger *log.Entry) error {
|
||||||
d.logger = logger
|
d.logger = logger
|
||||||
d.metricsLevel = MetricsLevel
|
|
||||||
err := d.UnmarshalConfig(yamlConfig)
|
err := d.UnmarshalConfig(yamlConfig)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
|
@ -326,9 +325,7 @@ func (d *DockerSource) OneShotAcquisition(out chan types.Event, t *tomb.Tomb) er
|
||||||
l.Src = containerConfig.Name
|
l.Src = containerConfig.Name
|
||||||
l.Process = true
|
l.Process = true
|
||||||
l.Module = d.GetName()
|
l.Module = d.GetName()
|
||||||
if d.metricsLevel != configuration.METRICS_NONE {
|
linesRead.With(prometheus.Labels{"source": containerConfig.Name}).Inc()
|
||||||
linesRead.With(prometheus.Labels{"source": containerConfig.Name}).Inc()
|
|
||||||
}
|
|
||||||
evt := types.Event{Line: l, Process: true, Type: types.LOG, ExpectMode: types.TIMEMACHINE}
|
evt := types.Event{Line: l, Process: true, Type: types.LOG, ExpectMode: types.TIMEMACHINE}
|
||||||
out <- evt
|
out <- evt
|
||||||
d.logger.Debugf("Sent line to parsing: %+v", evt.Line.Raw)
|
d.logger.Debugf("Sent line to parsing: %+v", evt.Line.Raw)
|
||||||
|
|
|
@ -13,7 +13,6 @@ import (
|
||||||
|
|
||||||
"github.com/crowdsecurity/go-cs-lib/cstest"
|
"github.com/crowdsecurity/go-cs-lib/cstest"
|
||||||
|
|
||||||
"github.com/crowdsecurity/crowdsec/pkg/acquisition/configuration"
|
|
||||||
"github.com/crowdsecurity/crowdsec/pkg/types"
|
"github.com/crowdsecurity/crowdsec/pkg/types"
|
||||||
dockerTypes "github.com/docker/docker/api/types"
|
dockerTypes "github.com/docker/docker/api/types"
|
||||||
dockerContainer "github.com/docker/docker/api/types/container"
|
dockerContainer "github.com/docker/docker/api/types/container"
|
||||||
|
@ -61,7 +60,7 @@ container_name:
|
||||||
|
|
||||||
for _, test := range tests {
|
for _, test := range tests {
|
||||||
f := DockerSource{}
|
f := DockerSource{}
|
||||||
err := f.Configure([]byte(test.config), subLogger, configuration.METRICS_NONE)
|
err := f.Configure([]byte(test.config), subLogger)
|
||||||
cstest.AssertErrorContains(t, err, test.expectedErr)
|
cstest.AssertErrorContains(t, err, test.expectedErr)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -163,7 +162,7 @@ container_name_regexp:
|
||||||
|
|
||||||
for _, ts := range tests {
|
for _, ts := range tests {
|
||||||
var (
|
var (
|
||||||
logger *log.Logger
|
logger *log.Logger
|
||||||
subLogger *log.Entry
|
subLogger *log.Entry
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -183,7 +182,7 @@ container_name_regexp:
|
||||||
out := make(chan types.Event)
|
out := make(chan types.Event)
|
||||||
dockerSource := DockerSource{}
|
dockerSource := DockerSource{}
|
||||||
|
|
||||||
err := dockerSource.Configure([]byte(ts.config), subLogger, configuration.METRICS_NONE)
|
err := dockerSource.Configure([]byte(ts.config), subLogger)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
t.Fatalf("Unexpected error : %s", err)
|
t.Fatalf("Unexpected error : %s", err)
|
||||||
}
|
}
|
||||||
|
@ -305,7 +304,7 @@ func TestOneShot(t *testing.T) {
|
||||||
for _, ts := range tests {
|
for _, ts := range tests {
|
||||||
var (
|
var (
|
||||||
subLogger *log.Entry
|
subLogger *log.Entry
|
||||||
logger *log.Logger
|
logger *log.Logger
|
||||||
)
|
)
|
||||||
|
|
||||||
if ts.expectedOutput != "" {
|
if ts.expectedOutput != "" {
|
||||||
|
|
|
@ -3,7 +3,6 @@ package fileacquisition
|
||||||
import (
|
import (
|
||||||
"bufio"
|
"bufio"
|
||||||
"compress/gzip"
|
"compress/gzip"
|
||||||
"errors"
|
|
||||||
"fmt"
|
"fmt"
|
||||||
"io"
|
"io"
|
||||||
"net/url"
|
"net/url"
|
||||||
|
@ -12,11 +11,11 @@ import (
|
||||||
"regexp"
|
"regexp"
|
||||||
"strconv"
|
"strconv"
|
||||||
"strings"
|
"strings"
|
||||||
"sync"
|
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
"github.com/fsnotify/fsnotify"
|
"github.com/fsnotify/fsnotify"
|
||||||
"github.com/nxadm/tail"
|
"github.com/nxadm/tail"
|
||||||
|
"github.com/pkg/errors"
|
||||||
"github.com/prometheus/client_golang/prometheus"
|
"github.com/prometheus/client_golang/prometheus"
|
||||||
log "github.com/sirupsen/logrus"
|
log "github.com/sirupsen/logrus"
|
||||||
"gopkg.in/tomb.v2"
|
"gopkg.in/tomb.v2"
|
||||||
|
@ -46,7 +45,6 @@ type FileConfiguration struct {
|
||||||
}
|
}
|
||||||
|
|
||||||
type FileSource struct {
|
type FileSource struct {
|
||||||
metricsLevel int
|
|
||||||
config FileConfiguration
|
config FileConfiguration
|
||||||
watcher *fsnotify.Watcher
|
watcher *fsnotify.Watcher
|
||||||
watchedDirectories map[string]bool
|
watchedDirectories map[string]bool
|
||||||
|
@ -54,7 +52,6 @@ type FileSource struct {
|
||||||
logger *log.Entry
|
logger *log.Entry
|
||||||
files []string
|
files []string
|
||||||
exclude_regexps []*regexp.Regexp
|
exclude_regexps []*regexp.Regexp
|
||||||
tailMapMutex *sync.RWMutex
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func (f *FileSource) GetUuid() string {
|
func (f *FileSource) GetUuid() string {
|
||||||
|
@ -63,7 +60,6 @@ func (f *FileSource) GetUuid() string {
|
||||||
|
|
||||||
func (f *FileSource) UnmarshalConfig(yamlConfig []byte) error {
|
func (f *FileSource) UnmarshalConfig(yamlConfig []byte) error {
|
||||||
f.config = FileConfiguration{}
|
f.config = FileConfiguration{}
|
||||||
|
|
||||||
err := yaml.UnmarshalStrict(yamlConfig, &f.config)
|
err := yaml.UnmarshalStrict(yamlConfig, &f.config)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return fmt.Errorf("cannot parse FileAcquisition configuration: %w", err)
|
return fmt.Errorf("cannot parse FileAcquisition configuration: %w", err)
|
||||||
|
@ -78,7 +74,7 @@ func (f *FileSource) UnmarshalConfig(yamlConfig []byte) error {
|
||||||
}
|
}
|
||||||
|
|
||||||
if len(f.config.Filenames) == 0 {
|
if len(f.config.Filenames) == 0 {
|
||||||
return errors.New("no filename or filenames configuration provided")
|
return fmt.Errorf("no filename or filenames configuration provided")
|
||||||
}
|
}
|
||||||
|
|
||||||
if f.config.Mode == "" {
|
if f.config.Mode == "" {
|
||||||
|
@ -94,16 +90,14 @@ func (f *FileSource) UnmarshalConfig(yamlConfig []byte) error {
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return fmt.Errorf("could not compile regexp %s: %w", exclude, err)
|
return fmt.Errorf("could not compile regexp %s: %w", exclude, err)
|
||||||
}
|
}
|
||||||
|
|
||||||
f.exclude_regexps = append(f.exclude_regexps, re)
|
f.exclude_regexps = append(f.exclude_regexps, re)
|
||||||
}
|
}
|
||||||
|
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (f *FileSource) Configure(yamlConfig []byte, logger *log.Entry, MetricsLevel int) error {
|
func (f *FileSource) Configure(yamlConfig []byte, logger *log.Entry) error {
|
||||||
f.logger = logger
|
f.logger = logger
|
||||||
f.metricsLevel = MetricsLevel
|
|
||||||
|
|
||||||
err := f.UnmarshalConfig(yamlConfig)
|
err := f.UnmarshalConfig(yamlConfig)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
@ -111,7 +105,6 @@ func (f *FileSource) Configure(yamlConfig []byte, logger *log.Entry, MetricsLeve
|
||||||
}
|
}
|
||||||
|
|
||||||
f.watchedDirectories = make(map[string]bool)
|
f.watchedDirectories = make(map[string]bool)
|
||||||
f.tailMapMutex = &sync.RWMutex{}
|
|
||||||
f.tails = make(map[string]bool)
|
f.tails = make(map[string]bool)
|
||||||
|
|
||||||
f.watcher, err = fsnotify.NewWatcher()
|
f.watcher, err = fsnotify.NewWatcher()
|
||||||
|
@ -125,68 +118,56 @@ func (f *FileSource) Configure(yamlConfig []byte, logger *log.Entry, MetricsLeve
|
||||||
if f.config.ForceInotify {
|
if f.config.ForceInotify {
|
||||||
directory := filepath.Dir(pattern)
|
directory := filepath.Dir(pattern)
|
||||||
f.logger.Infof("Force add watch on %s", directory)
|
f.logger.Infof("Force add watch on %s", directory)
|
||||||
|
|
||||||
if !f.watchedDirectories[directory] {
|
if !f.watchedDirectories[directory] {
|
||||||
err = f.watcher.Add(directory)
|
err = f.watcher.Add(directory)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
f.logger.Errorf("Could not create watch on directory %s : %s", directory, err)
|
f.logger.Errorf("Could not create watch on directory %s : %s", directory, err)
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
||||||
f.watchedDirectories[directory] = true
|
f.watchedDirectories[directory] = true
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
files, err := filepath.Glob(pattern)
|
files, err := filepath.Glob(pattern)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return fmt.Errorf("glob failure: %w", err)
|
return fmt.Errorf("glob failure: %w", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
if len(files) == 0 {
|
if len(files) == 0 {
|
||||||
f.logger.Warnf("No matching files for pattern %s", pattern)
|
f.logger.Warnf("No matching files for pattern %s", pattern)
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
||||||
for _, file := range files {
|
for _, file := range files {
|
||||||
// check if file is excluded
|
|
||||||
excluded := false
|
|
||||||
|
|
||||||
|
//check if file is excluded
|
||||||
|
excluded := false
|
||||||
for _, pattern := range f.exclude_regexps {
|
for _, pattern := range f.exclude_regexps {
|
||||||
if pattern.MatchString(file) {
|
if pattern.MatchString(file) {
|
||||||
excluded = true
|
excluded = true
|
||||||
|
|
||||||
f.logger.Infof("Skipping file %s as it matches exclude pattern %s", file, pattern)
|
f.logger.Infof("Skipping file %s as it matches exclude pattern %s", file, pattern)
|
||||||
|
|
||||||
break
|
break
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if excluded {
|
if excluded {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
if files[0] != pattern && f.config.Mode == configuration.TAIL_MODE { //we have a glob pattern
|
||||||
if files[0] != pattern && f.config.Mode == configuration.TAIL_MODE { // we have a glob pattern
|
|
||||||
directory := filepath.Dir(file)
|
directory := filepath.Dir(file)
|
||||||
f.logger.Debugf("Will add watch to directory: %s", directory)
|
f.logger.Debugf("Will add watch to directory: %s", directory)
|
||||||
|
|
||||||
if !f.watchedDirectories[directory] {
|
if !f.watchedDirectories[directory] {
|
||||||
|
|
||||||
err = f.watcher.Add(directory)
|
err = f.watcher.Add(directory)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
f.logger.Errorf("Could not create watch on directory %s : %s", directory, err)
|
f.logger.Errorf("Could not create watch on directory %s : %s", directory, err)
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
||||||
f.watchedDirectories[directory] = true
|
f.watchedDirectories[directory] = true
|
||||||
} else {
|
} else {
|
||||||
f.logger.Debugf("Watch for directory %s already exists", directory)
|
f.logger.Debugf("Watch for directory %s already exists", directory)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
f.logger.Infof("Adding file %s to datasources", file)
|
f.logger.Infof("Adding file %s to datasources", file)
|
||||||
f.files = append(f.files, file)
|
f.files = append(f.files, file)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -203,7 +184,7 @@ func (f *FileSource) ConfigureByDSN(dsn string, labels map[string]string, logger
|
||||||
args := strings.Split(dsn, "?")
|
args := strings.Split(dsn, "?")
|
||||||
|
|
||||||
if len(args[0]) == 0 {
|
if len(args[0]) == 0 {
|
||||||
return errors.New("empty file:// DSN")
|
return fmt.Errorf("empty file:// DSN")
|
||||||
}
|
}
|
||||||
|
|
||||||
if len(args) == 2 && len(args[1]) != 0 {
|
if len(args) == 2 && len(args[1]) != 0 {
|
||||||
|
@ -211,30 +192,25 @@ func (f *FileSource) ConfigureByDSN(dsn string, labels map[string]string, logger
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return fmt.Errorf("could not parse file args: %w", err)
|
return fmt.Errorf("could not parse file args: %w", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
for key, value := range params {
|
for key, value := range params {
|
||||||
switch key {
|
switch key {
|
||||||
case "log_level":
|
case "log_level":
|
||||||
if len(value) != 1 {
|
if len(value) != 1 {
|
||||||
return errors.New("expected zero or one value for 'log_level'")
|
return errors.New("expected zero or one value for 'log_level'")
|
||||||
}
|
}
|
||||||
|
|
||||||
lvl, err := log.ParseLevel(value[0])
|
lvl, err := log.ParseLevel(value[0])
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return fmt.Errorf("unknown level %s: %w", value[0], err)
|
return fmt.Errorf("unknown level %s: %w", value[0], err)
|
||||||
}
|
}
|
||||||
|
|
||||||
f.logger.Logger.SetLevel(lvl)
|
f.logger.Logger.SetLevel(lvl)
|
||||||
case "max_buffer_size":
|
case "max_buffer_size":
|
||||||
if len(value) != 1 {
|
if len(value) != 1 {
|
||||||
return errors.New("expected zero or one value for 'max_buffer_size'")
|
return errors.New("expected zero or one value for 'max_buffer_size'")
|
||||||
}
|
}
|
||||||
|
|
||||||
maxBufferSize, err := strconv.Atoi(value[0])
|
maxBufferSize, err := strconv.Atoi(value[0])
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return fmt.Errorf("could not parse max_buffer_size %s: %w", value[0], err)
|
return fmt.Errorf("could not parse max_buffer_size %s: %w", value[0], err)
|
||||||
}
|
}
|
||||||
|
|
||||||
f.config.MaxBufferSize = maxBufferSize
|
f.config.MaxBufferSize = maxBufferSize
|
||||||
default:
|
default:
|
||||||
return fmt.Errorf("unknown parameter %s", key)
|
return fmt.Errorf("unknown parameter %s", key)
|
||||||
|
@ -247,7 +223,6 @@ func (f *FileSource) ConfigureByDSN(dsn string, labels map[string]string, logger
|
||||||
f.config.UniqueId = uuid
|
f.config.UniqueId = uuid
|
||||||
|
|
||||||
f.logger.Debugf("Will try pattern %s", args[0])
|
f.logger.Debugf("Will try pattern %s", args[0])
|
||||||
|
|
||||||
files, err := filepath.Glob(args[0])
|
files, err := filepath.Glob(args[0])
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return fmt.Errorf("glob failure: %w", err)
|
return fmt.Errorf("glob failure: %w", err)
|
||||||
|
@ -265,7 +240,6 @@ func (f *FileSource) ConfigureByDSN(dsn string, labels map[string]string, logger
|
||||||
f.logger.Infof("Adding file %s to filelist", file)
|
f.logger.Infof("Adding file %s to filelist", file)
|
||||||
f.files = append(f.files, file)
|
f.files = append(f.files, file)
|
||||||
}
|
}
|
||||||
|
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -281,26 +255,22 @@ func (f *FileSource) SupportedModes() []string {
|
||||||
// OneShotAcquisition reads a set of file and returns when done
|
// OneShotAcquisition reads a set of file and returns when done
|
||||||
func (f *FileSource) OneShotAcquisition(out chan types.Event, t *tomb.Tomb) error {
|
func (f *FileSource) OneShotAcquisition(out chan types.Event, t *tomb.Tomb) error {
|
||||||
f.logger.Debug("In oneshot")
|
f.logger.Debug("In oneshot")
|
||||||
|
|
||||||
for _, file := range f.files {
|
for _, file := range f.files {
|
||||||
fi, err := os.Stat(file)
|
fi, err := os.Stat(file)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return fmt.Errorf("could not stat file %s : %w", file, err)
|
return fmt.Errorf("could not stat file %s : %w", file, err)
|
||||||
}
|
}
|
||||||
|
|
||||||
if fi.IsDir() {
|
if fi.IsDir() {
|
||||||
f.logger.Warnf("%s is a directory, ignoring it.", file)
|
f.logger.Warnf("%s is a directory, ignoring it.", file)
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
||||||
f.logger.Infof("reading %s at once", file)
|
f.logger.Infof("reading %s at once", file)
|
||||||
|
|
||||||
err = f.readFile(file, out, t)
|
err = f.readFile(file, out, t)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
|
}
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -325,33 +295,27 @@ func (f *FileSource) StreamingAcquisition(out chan types.Event, t *tomb.Tomb) er
|
||||||
t.Go(func() error {
|
t.Go(func() error {
|
||||||
return f.monitorNewFiles(out, t)
|
return f.monitorNewFiles(out, t)
|
||||||
})
|
})
|
||||||
|
|
||||||
for _, file := range f.files {
|
for _, file := range f.files {
|
||||||
// before opening the file, check if we need to specifically avoid it. (XXX)
|
//before opening the file, check if we need to specifically avoid it. (XXX)
|
||||||
skip := false
|
skip := false
|
||||||
|
|
||||||
for _, pattern := range f.exclude_regexps {
|
for _, pattern := range f.exclude_regexps {
|
||||||
if pattern.MatchString(file) {
|
if pattern.MatchString(file) {
|
||||||
f.logger.Infof("file %s matches exclusion pattern %s, skipping", file, pattern.String())
|
f.logger.Infof("file %s matches exclusion pattern %s, skipping", file, pattern.String())
|
||||||
|
|
||||||
skip = true
|
skip = true
|
||||||
|
|
||||||
break
|
break
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if skip {
|
if skip {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
||||||
// cf. https://github.com/crowdsecurity/crowdsec/issues/1168
|
//cf. https://github.com/crowdsecurity/crowdsec/issues/1168
|
||||||
// do not rely on stat, reclose file immediately as it's opened by Tail
|
//do not rely on stat, reclose file immediately as it's opened by Tail
|
||||||
fd, err := os.Open(file)
|
fd, err := os.Open(file)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
f.logger.Errorf("unable to read %s : %s", file, err)
|
f.logger.Errorf("unable to read %s : %s", file, err)
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
||||||
if err := fd.Close(); err != nil {
|
if err := fd.Close(); err != nil {
|
||||||
f.logger.Errorf("unable to close %s : %s", file, err)
|
f.logger.Errorf("unable to close %s : %s", file, err)
|
||||||
continue
|
continue
|
||||||
|
@ -361,55 +325,37 @@ func (f *FileSource) StreamingAcquisition(out chan types.Event, t *tomb.Tomb) er
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return fmt.Errorf("could not stat file %s : %w", file, err)
|
return fmt.Errorf("could not stat file %s : %w", file, err)
|
||||||
}
|
}
|
||||||
|
|
||||||
if fi.IsDir() {
|
if fi.IsDir() {
|
||||||
f.logger.Warnf("%s is a directory, ignoring it.", file)
|
f.logger.Warnf("%s is a directory, ignoring it.", file)
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
||||||
pollFile := false
|
inotifyPoll := true
|
||||||
if f.config.PollWithoutInotify != nil {
|
if f.config.PollWithoutInotify != nil {
|
||||||
pollFile = *f.config.PollWithoutInotify
|
inotifyPoll = *f.config.PollWithoutInotify
|
||||||
} else {
|
} else {
|
||||||
networkFS, fsType, err := types.IsNetworkFS(file)
|
networkFS, fsType, err := types.IsNetworkFS(file)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
f.logger.Warningf("Could not get fs type for %s : %s", file, err)
|
f.logger.Warningf("Could not get fs type for %s : %s", file, err)
|
||||||
}
|
}
|
||||||
|
|
||||||
f.logger.Debugf("fs for %s is network: %t (%s)", file, networkFS, fsType)
|
f.logger.Debugf("fs for %s is network: %t (%s)", file, networkFS, fsType)
|
||||||
|
|
||||||
if networkFS {
|
if networkFS {
|
||||||
f.logger.Warnf("Disabling inotify polling on %s as it is on a network share. You can manually set poll_without_inotify to true to make this message disappear, or to false to enforce inotify poll", file)
|
f.logger.Warnf("Disabling inotify poll on %s as it is on a network share. You can manually set poll_without_inotify to true to make this message disappear, or to false to enforce inotify poll", file)
|
||||||
pollFile = true
|
inotifyPoll = false
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
filink, err := os.Lstat(file)
|
tail, err := tail.TailFile(file, tail.Config{ReOpen: true, Follow: true, Poll: inotifyPoll, Location: &tail.SeekInfo{Offset: 0, Whence: io.SeekEnd}, Logger: log.NewEntry(log.StandardLogger())})
|
||||||
|
|
||||||
if err != nil {
|
|
||||||
f.logger.Errorf("Could not lstat() new file %s, ignoring it : %s", file, err)
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
|
|
||||||
if filink.Mode()&os.ModeSymlink == os.ModeSymlink && !pollFile {
|
|
||||||
f.logger.Warnf("File %s is a symlink, but inotify polling is enabled. Crowdsec will not be able to detect rotation. Consider setting poll_without_inotify to true in your configuration", file)
|
|
||||||
}
|
|
||||||
|
|
||||||
tail, err := tail.TailFile(file, tail.Config{ReOpen: true, Follow: true, Poll: pollFile, Location: &tail.SeekInfo{Offset: 0, Whence: io.SeekEnd}, Logger: log.NewEntry(log.StandardLogger())})
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
f.logger.Errorf("Could not start tailing file %s : %s", file, err)
|
f.logger.Errorf("Could not start tailing file %s : %s", file, err)
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
||||||
f.tailMapMutex.Lock()
|
|
||||||
f.tails[file] = true
|
f.tails[file] = true
|
||||||
f.tailMapMutex.Unlock()
|
|
||||||
t.Go(func() error {
|
t.Go(func() error {
|
||||||
defer trace.CatchPanic("crowdsec/acquis/file/live/fsnotify")
|
defer trace.CatchPanic("crowdsec/acquis/file/live/fsnotify")
|
||||||
return f.tailFile(out, t, tail)
|
return f.tailFile(out, t, tail)
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -419,7 +365,6 @@ func (f *FileSource) Dump() interface{} {
|
||||||
|
|
||||||
func (f *FileSource) monitorNewFiles(out chan types.Event, t *tomb.Tomb) error {
|
func (f *FileSource) monitorNewFiles(out chan types.Event, t *tomb.Tomb) error {
|
||||||
logger := f.logger.WithField("goroutine", "inotify")
|
logger := f.logger.WithField("goroutine", "inotify")
|
||||||
|
|
||||||
for {
|
for {
|
||||||
select {
|
select {
|
||||||
case event, ok := <-f.watcher.Events:
|
case event, ok := <-f.watcher.Events:
|
||||||
|
@ -433,62 +378,47 @@ func (f *FileSource) monitorNewFiles(out chan types.Event, t *tomb.Tomb) error {
|
||||||
logger.Errorf("Could not stat() new file %s, ignoring it : %s", event.Name, err)
|
logger.Errorf("Could not stat() new file %s, ignoring it : %s", event.Name, err)
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
||||||
if fi.IsDir() {
|
if fi.IsDir() {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
||||||
logger.Debugf("Detected new file %s", event.Name)
|
logger.Debugf("Detected new file %s", event.Name)
|
||||||
|
|
||||||
matched := false
|
matched := false
|
||||||
|
|
||||||
for _, pattern := range f.config.Filenames {
|
for _, pattern := range f.config.Filenames {
|
||||||
logger.Debugf("Matching %s with %s", pattern, event.Name)
|
logger.Debugf("Matching %s with %s", pattern, event.Name)
|
||||||
|
|
||||||
matched, err = filepath.Match(pattern, event.Name)
|
matched, err = filepath.Match(pattern, event.Name)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
logger.Errorf("Could not match pattern : %s", err)
|
logger.Errorf("Could not match pattern : %s", err)
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
||||||
if matched {
|
if matched {
|
||||||
logger.Debugf("Matched %s with %s", pattern, event.Name)
|
logger.Debugf("Matched %s with %s", pattern, event.Name)
|
||||||
break
|
break
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if !matched {
|
if !matched {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
||||||
// before opening the file, check if we need to specifically avoid it. (XXX)
|
//before opening the file, check if we need to specifically avoid it. (XXX)
|
||||||
skip := false
|
skip := false
|
||||||
|
|
||||||
for _, pattern := range f.exclude_regexps {
|
for _, pattern := range f.exclude_regexps {
|
||||||
if pattern.MatchString(event.Name) {
|
if pattern.MatchString(event.Name) {
|
||||||
f.logger.Infof("file %s matches exclusion pattern %s, skipping", event.Name, pattern.String())
|
f.logger.Infof("file %s matches exclusion pattern %s, skipping", event.Name, pattern.String())
|
||||||
|
|
||||||
skip = true
|
skip = true
|
||||||
|
|
||||||
break
|
break
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if skip {
|
if skip {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
||||||
f.tailMapMutex.RLock()
|
|
||||||
if f.tails[event.Name] {
|
if f.tails[event.Name] {
|
||||||
f.tailMapMutex.RUnlock()
|
//we already have a tail on it, do not start a new one
|
||||||
// we already have a tail on it, do not start a new one
|
|
||||||
logger.Debugf("Already tailing file %s, not creating a new tail", event.Name)
|
logger.Debugf("Already tailing file %s, not creating a new tail", event.Name)
|
||||||
|
|
||||||
break
|
break
|
||||||
}
|
}
|
||||||
f.tailMapMutex.RUnlock()
|
//cf. https://github.com/crowdsecurity/crowdsec/issues/1168
|
||||||
// cf. https://github.com/crowdsecurity/crowdsec/issues/1168
|
//do not rely on stat, reclose file immediately as it's opened by Tail
|
||||||
// do not rely on stat, reclose file immediately as it's opened by Tail
|
|
||||||
fd, err := os.Open(event.Name)
|
fd, err := os.Open(event.Name)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
f.logger.Errorf("unable to read %s : %s", event.Name, err)
|
f.logger.Errorf("unable to read %s : %s", event.Name, err)
|
||||||
|
@ -499,41 +429,31 @@ func (f *FileSource) monitorNewFiles(out chan types.Event, t *tomb.Tomb) error {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
||||||
pollFile := false
|
inotifyPoll := true
|
||||||
if f.config.PollWithoutInotify != nil {
|
if f.config.PollWithoutInotify != nil {
|
||||||
pollFile = *f.config.PollWithoutInotify
|
inotifyPoll = *f.config.PollWithoutInotify
|
||||||
} else {
|
} else {
|
||||||
networkFS, fsType, err := types.IsNetworkFS(event.Name)
|
if f.config.PollWithoutInotify != nil {
|
||||||
if err != nil {
|
inotifyPoll = *f.config.PollWithoutInotify
|
||||||
f.logger.Warningf("Could not get fs type for %s : %s", event.Name, err)
|
} else {
|
||||||
|
networkFS, fsType, err := types.IsNetworkFS(event.Name)
|
||||||
|
if err != nil {
|
||||||
|
f.logger.Warningf("Could not get fs type for %s : %s", event.Name, err)
|
||||||
|
}
|
||||||
|
f.logger.Debugf("fs for %s is network: %t (%s)", event.Name, networkFS, fsType)
|
||||||
|
if networkFS {
|
||||||
|
inotifyPoll = false
|
||||||
|
}
|
||||||
}
|
}
|
||||||
f.logger.Debugf("fs for %s is network: %t (%s)", event.Name, networkFS, fsType)
|
|
||||||
if networkFS {
|
|
||||||
pollFile = true
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
filink, err := os.Lstat(event.Name)
|
|
||||||
|
|
||||||
if err != nil {
|
|
||||||
logger.Errorf("Could not lstat() new file %s, ignoring it : %s", event.Name, err)
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
|
|
||||||
if filink.Mode()&os.ModeSymlink == os.ModeSymlink && !pollFile {
|
|
||||||
logger.Warnf("File %s is a symlink, but inotify polling is enabled. Crowdsec will not be able to detect rotation. Consider setting poll_without_inotify to true in your configuration", event.Name)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
//Slightly different parameters for Location, as we want to read the first lines of the newly created file
|
//Slightly different parameters for Location, as we want to read the first lines of the newly created file
|
||||||
tail, err := tail.TailFile(event.Name, tail.Config{ReOpen: true, Follow: true, Poll: pollFile, Location: &tail.SeekInfo{Offset: 0, Whence: io.SeekStart}})
|
tail, err := tail.TailFile(event.Name, tail.Config{ReOpen: true, Follow: true, Poll: inotifyPoll, Location: &tail.SeekInfo{Offset: 0, Whence: io.SeekStart}})
|
||||||
if err != nil {
|
if err != nil {
|
||||||
logger.Errorf("Could not start tailing file %s : %s", event.Name, err)
|
logger.Errorf("Could not start tailing file %s : %s", event.Name, err)
|
||||||
break
|
break
|
||||||
}
|
}
|
||||||
|
|
||||||
f.tailMapMutex.Lock()
|
|
||||||
f.tails[event.Name] = true
|
f.tails[event.Name] = true
|
||||||
f.tailMapMutex.Unlock()
|
|
||||||
t.Go(func() error {
|
t.Go(func() error {
|
||||||
defer trace.CatchPanic("crowdsec/acquis/tailfile")
|
defer trace.CatchPanic("crowdsec/acquis/tailfile")
|
||||||
return f.tailFile(out, t, tail)
|
return f.tailFile(out, t, tail)
|
||||||
|
@ -543,14 +463,12 @@ func (f *FileSource) monitorNewFiles(out chan types.Event, t *tomb.Tomb) error {
|
||||||
if !ok {
|
if !ok {
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
logger.Errorf("Error while monitoring folder: %s", err)
|
logger.Errorf("Error while monitoring folder: %s", err)
|
||||||
case <-t.Dying():
|
case <-t.Dying():
|
||||||
err := f.watcher.Close()
|
err := f.watcher.Close()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return fmt.Errorf("could not remove all inotify watches: %w", err)
|
return fmt.Errorf("could not remove all inotify watches: %w", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -559,61 +477,46 @@ func (f *FileSource) monitorNewFiles(out chan types.Event, t *tomb.Tomb) error {
|
||||||
func (f *FileSource) tailFile(out chan types.Event, t *tomb.Tomb, tail *tail.Tail) error {
|
func (f *FileSource) tailFile(out chan types.Event, t *tomb.Tomb, tail *tail.Tail) error {
|
||||||
logger := f.logger.WithField("tail", tail.Filename)
|
logger := f.logger.WithField("tail", tail.Filename)
|
||||||
logger.Debugf("-> Starting tail of %s", tail.Filename)
|
logger.Debugf("-> Starting tail of %s", tail.Filename)
|
||||||
|
|
||||||
for {
|
for {
|
||||||
select {
|
select {
|
||||||
case <-t.Dying():
|
case <-t.Dying():
|
||||||
logger.Infof("File datasource %s stopping", tail.Filename)
|
logger.Infof("File datasource %s stopping", tail.Filename)
|
||||||
|
|
||||||
if err := tail.Stop(); err != nil {
|
if err := tail.Stop(); err != nil {
|
||||||
f.logger.Errorf("error in stop : %s", err)
|
f.logger.Errorf("error in stop : %s", err)
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
return nil
|
return nil
|
||||||
case <-tail.Dying(): // our tailer is dying
|
case <-tail.Dying(): //our tailer is dying
|
||||||
err := tail.Err()
|
err := tail.Err()
|
||||||
errMsg := fmt.Sprintf("file reader of %s died", tail.Filename)
|
errMsg := fmt.Sprintf("file reader of %s died", tail.Filename)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
errMsg = fmt.Sprintf(errMsg+" : %s", err)
|
errMsg = fmt.Sprintf(errMsg+" : %s", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
logger.Warningf(errMsg)
|
logger.Warningf(errMsg)
|
||||||
|
t.Kill(fmt.Errorf(errMsg))
|
||||||
return nil
|
return fmt.Errorf(errMsg)
|
||||||
case line := <-tail.Lines:
|
case line := <-tail.Lines:
|
||||||
if line == nil {
|
if line == nil {
|
||||||
logger.Warningf("tail for %s is empty", tail.Filename)
|
logger.Warningf("tail for %s is empty", tail.Filename)
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
||||||
if line.Err != nil {
|
if line.Err != nil {
|
||||||
logger.Warningf("fetch error : %v", line.Err)
|
logger.Warningf("fetch error : %v", line.Err)
|
||||||
return line.Err
|
return line.Err
|
||||||
}
|
}
|
||||||
|
if line.Text == "" { //skip empty lines
|
||||||
if line.Text == "" { // skip empty lines
|
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
linesRead.With(prometheus.Labels{"source": tail.Filename}).Inc()
|
||||||
if f.metricsLevel != configuration.METRICS_NONE {
|
|
||||||
linesRead.With(prometheus.Labels{"source": tail.Filename}).Inc()
|
|
||||||
}
|
|
||||||
|
|
||||||
src := tail.Filename
|
|
||||||
if f.metricsLevel == configuration.METRICS_AGGREGATE {
|
|
||||||
src = filepath.Base(tail.Filename)
|
|
||||||
}
|
|
||||||
|
|
||||||
l := types.Line{
|
l := types.Line{
|
||||||
Raw: trimLine(line.Text),
|
Raw: trimLine(line.Text),
|
||||||
Labels: f.config.Labels,
|
Labels: f.config.Labels,
|
||||||
Time: line.Time,
|
Time: line.Time,
|
||||||
Src: src,
|
Src: tail.Filename,
|
||||||
Process: true,
|
Process: true,
|
||||||
Module: f.GetName(),
|
Module: f.GetName(),
|
||||||
}
|
}
|
||||||
// we're tailing, it must be real time logs
|
//we're tailing, it must be real time logs
|
||||||
logger.Debugf("pushing %+v", l)
|
logger.Debugf("pushing %+v", l)
|
||||||
|
|
||||||
expectMode := types.LIVE
|
expectMode := types.LIVE
|
||||||
|
@ -627,14 +530,12 @@ func (f *FileSource) tailFile(out chan types.Event, t *tomb.Tomb, tail *tail.Tai
|
||||||
|
|
||||||
func (f *FileSource) readFile(filename string, out chan types.Event, t *tomb.Tomb) error {
|
func (f *FileSource) readFile(filename string, out chan types.Event, t *tomb.Tomb) error {
|
||||||
var scanner *bufio.Scanner
|
var scanner *bufio.Scanner
|
||||||
|
|
||||||
logger := f.logger.WithField("oneshot", filename)
|
logger := f.logger.WithField("oneshot", filename)
|
||||||
fd, err := os.Open(filename)
|
fd, err := os.Open(filename)
|
||||||
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return fmt.Errorf("failed opening %s: %w", filename, err)
|
return fmt.Errorf("failed opening %s: %w", filename, err)
|
||||||
}
|
}
|
||||||
|
|
||||||
defer fd.Close()
|
defer fd.Close()
|
||||||
|
|
||||||
if strings.HasSuffix(filename, ".gz") {
|
if strings.HasSuffix(filename, ".gz") {
|
||||||
|
@ -643,20 +544,17 @@ func (f *FileSource) readFile(filename string, out chan types.Event, t *tomb.Tom
|
||||||
logger.Errorf("Failed to read gz file: %s", err)
|
logger.Errorf("Failed to read gz file: %s", err)
|
||||||
return fmt.Errorf("failed to read gz %s: %w", filename, err)
|
return fmt.Errorf("failed to read gz %s: %w", filename, err)
|
||||||
}
|
}
|
||||||
|
|
||||||
defer gz.Close()
|
defer gz.Close()
|
||||||
scanner = bufio.NewScanner(gz)
|
scanner = bufio.NewScanner(gz)
|
||||||
|
|
||||||
} else {
|
} else {
|
||||||
scanner = bufio.NewScanner(fd)
|
scanner = bufio.NewScanner(fd)
|
||||||
}
|
}
|
||||||
|
|
||||||
scanner.Split(bufio.ScanLines)
|
scanner.Split(bufio.ScanLines)
|
||||||
|
|
||||||
if f.config.MaxBufferSize > 0 {
|
if f.config.MaxBufferSize > 0 {
|
||||||
buf := make([]byte, 0, 64*1024)
|
buf := make([]byte, 0, 64*1024)
|
||||||
scanner.Buffer(buf, f.config.MaxBufferSize)
|
scanner.Buffer(buf, f.config.MaxBufferSize)
|
||||||
}
|
}
|
||||||
|
|
||||||
for scanner.Scan() {
|
for scanner.Scan() {
|
||||||
select {
|
select {
|
||||||
case <-t.Dying():
|
case <-t.Dying():
|
||||||
|
@ -666,7 +564,6 @@ func (f *FileSource) readFile(filename string, out chan types.Event, t *tomb.Tom
|
||||||
if scanner.Text() == "" {
|
if scanner.Text() == "" {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
||||||
l := types.Line{
|
l := types.Line{
|
||||||
Raw: scanner.Text(),
|
Raw: scanner.Text(),
|
||||||
Time: time.Now().UTC(),
|
Time: time.Now().UTC(),
|
||||||
|
@ -678,19 +575,15 @@ func (f *FileSource) readFile(filename string, out chan types.Event, t *tomb.Tom
|
||||||
logger.Debugf("line %s", l.Raw)
|
logger.Debugf("line %s", l.Raw)
|
||||||
linesRead.With(prometheus.Labels{"source": filename}).Inc()
|
linesRead.With(prometheus.Labels{"source": filename}).Inc()
|
||||||
|
|
||||||
// we're reading logs at once, it must be time-machine buckets
|
//we're reading logs at once, it must be time-machine buckets
|
||||||
out <- types.Event{Line: l, Process: true, Type: types.LOG, ExpectMode: types.TIMEMACHINE}
|
out <- types.Event{Line: l, Process: true, Type: types.LOG, ExpectMode: types.TIMEMACHINE}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if err := scanner.Err(); err != nil {
|
if err := scanner.Err(); err != nil {
|
||||||
logger.Errorf("Error while reading file: %s", err)
|
logger.Errorf("Error while reading file: %s", err)
|
||||||
t.Kill(err)
|
t.Kill(err)
|
||||||
|
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
t.Kill(nil)
|
t.Kill(nil)
|
||||||
|
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
|
@ -15,7 +15,6 @@ import (
|
||||||
|
|
||||||
"github.com/crowdsecurity/go-cs-lib/cstest"
|
"github.com/crowdsecurity/go-cs-lib/cstest"
|
||||||
|
|
||||||
"github.com/crowdsecurity/crowdsec/pkg/acquisition/configuration"
|
|
||||||
fileacquisition "github.com/crowdsecurity/crowdsec/pkg/acquisition/modules/file"
|
fileacquisition "github.com/crowdsecurity/crowdsec/pkg/acquisition/modules/file"
|
||||||
"github.com/crowdsecurity/crowdsec/pkg/types"
|
"github.com/crowdsecurity/crowdsec/pkg/types"
|
||||||
)
|
)
|
||||||
|
@ -57,7 +56,7 @@ exclude_regexps: ["as[a-$d"]`,
|
||||||
tc := tc
|
tc := tc
|
||||||
t.Run(tc.name, func(t *testing.T) {
|
t.Run(tc.name, func(t *testing.T) {
|
||||||
f := fileacquisition.FileSource{}
|
f := fileacquisition.FileSource{}
|
||||||
err := f.Configure([]byte(tc.config), subLogger, configuration.METRICS_NONE)
|
err := f.Configure([]byte(tc.config), subLogger)
|
||||||
cstest.RequireErrorContains(t, err, tc.expectedErr)
|
cstest.RequireErrorContains(t, err, tc.expectedErr)
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
@ -223,7 +222,7 @@ filename: test_files/test_delete.log`,
|
||||||
tc.setup()
|
tc.setup()
|
||||||
}
|
}
|
||||||
|
|
||||||
err := f.Configure([]byte(tc.config), subLogger, configuration.METRICS_NONE)
|
err := f.Configure([]byte(tc.config), subLogger)
|
||||||
cstest.RequireErrorContains(t, err, tc.expectedConfigErr)
|
cstest.RequireErrorContains(t, err, tc.expectedConfigErr)
|
||||||
if tc.expectedConfigErr != "" {
|
if tc.expectedConfigErr != "" {
|
||||||
return
|
return
|
||||||
|
@ -385,7 +384,7 @@ force_inotify: true`, testPattern),
|
||||||
tc.setup()
|
tc.setup()
|
||||||
}
|
}
|
||||||
|
|
||||||
err := f.Configure([]byte(tc.config), subLogger, configuration.METRICS_NONE)
|
err := f.Configure([]byte(tc.config), subLogger)
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
|
||||||
if tc.afterConfigure != nil {
|
if tc.afterConfigure != nil {
|
||||||
|
@ -456,7 +455,7 @@ exclude_regexps: ["\\.gz$"]`
|
||||||
})
|
})
|
||||||
|
|
||||||
f := fileacquisition.FileSource{}
|
f := fileacquisition.FileSource{}
|
||||||
if err := f.Configure([]byte(config), subLogger, configuration.METRICS_NONE); err != nil {
|
if err := f.Configure([]byte(config), subLogger); err != nil {
|
||||||
subLogger.Fatalf("unexpected error: %s", err)
|
subLogger.Fatalf("unexpected error: %s", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -26,11 +26,10 @@ type JournalCtlConfiguration struct {
|
||||||
}
|
}
|
||||||
|
|
||||||
type JournalCtlSource struct {
|
type JournalCtlSource struct {
|
||||||
metricsLevel int
|
config JournalCtlConfiguration
|
||||||
config JournalCtlConfiguration
|
logger *log.Entry
|
||||||
logger *log.Entry
|
src string
|
||||||
src string
|
args []string
|
||||||
args []string
|
|
||||||
}
|
}
|
||||||
|
|
||||||
const journalctlCmd string = "journalctl"
|
const journalctlCmd string = "journalctl"
|
||||||
|
@ -132,9 +131,7 @@ func (j *JournalCtlSource) runJournalCtl(out chan types.Event, t *tomb.Tomb) err
|
||||||
l.Src = j.src
|
l.Src = j.src
|
||||||
l.Process = true
|
l.Process = true
|
||||||
l.Module = j.GetName()
|
l.Module = j.GetName()
|
||||||
if j.metricsLevel != configuration.METRICS_NONE {
|
linesRead.With(prometheus.Labels{"source": j.src}).Inc()
|
||||||
linesRead.With(prometheus.Labels{"source": j.src}).Inc()
|
|
||||||
}
|
|
||||||
var evt types.Event
|
var evt types.Event
|
||||||
if !j.config.UseTimeMachine {
|
if !j.config.UseTimeMachine {
|
||||||
evt = types.Event{Line: l, Process: true, Type: types.LOG, ExpectMode: types.LIVE}
|
evt = types.Event{Line: l, Process: true, Type: types.LOG, ExpectMode: types.LIVE}
|
||||||
|
@ -197,9 +194,8 @@ func (j *JournalCtlSource) UnmarshalConfig(yamlConfig []byte) error {
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (j *JournalCtlSource) Configure(yamlConfig []byte, logger *log.Entry, MetricsLevel int) error {
|
func (j *JournalCtlSource) Configure(yamlConfig []byte, logger *log.Entry) error {
|
||||||
j.logger = logger
|
j.logger = logger
|
||||||
j.metricsLevel = MetricsLevel
|
|
||||||
|
|
||||||
err := j.UnmarshalConfig(yamlConfig)
|
err := j.UnmarshalConfig(yamlConfig)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
|
|
@ -10,7 +10,6 @@ import (
|
||||||
|
|
||||||
"github.com/crowdsecurity/go-cs-lib/cstest"
|
"github.com/crowdsecurity/go-cs-lib/cstest"
|
||||||
|
|
||||||
"github.com/crowdsecurity/crowdsec/pkg/acquisition/configuration"
|
|
||||||
"github.com/crowdsecurity/crowdsec/pkg/types"
|
"github.com/crowdsecurity/crowdsec/pkg/types"
|
||||||
log "github.com/sirupsen/logrus"
|
log "github.com/sirupsen/logrus"
|
||||||
"github.com/sirupsen/logrus/hooks/test"
|
"github.com/sirupsen/logrus/hooks/test"
|
||||||
|
@ -53,7 +52,7 @@ journalctl_filter:
|
||||||
|
|
||||||
for _, test := range tests {
|
for _, test := range tests {
|
||||||
f := JournalCtlSource{}
|
f := JournalCtlSource{}
|
||||||
err := f.Configure([]byte(test.config), subLogger, configuration.METRICS_NONE)
|
err := f.Configure([]byte(test.config), subLogger)
|
||||||
cstest.AssertErrorContains(t, err, test.expectedErr)
|
cstest.AssertErrorContains(t, err, test.expectedErr)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -145,9 +144,9 @@ journalctl_filter:
|
||||||
}
|
}
|
||||||
for _, ts := range tests {
|
for _, ts := range tests {
|
||||||
var (
|
var (
|
||||||
logger *log.Logger
|
logger *log.Logger
|
||||||
subLogger *log.Entry
|
subLogger *log.Entry
|
||||||
hook *test.Hook
|
hook *test.Hook
|
||||||
)
|
)
|
||||||
|
|
||||||
if ts.expectedOutput != "" {
|
if ts.expectedOutput != "" {
|
||||||
|
@ -166,7 +165,7 @@ journalctl_filter:
|
||||||
out := make(chan types.Event, 100)
|
out := make(chan types.Event, 100)
|
||||||
j := JournalCtlSource{}
|
j := JournalCtlSource{}
|
||||||
|
|
||||||
err := j.Configure([]byte(ts.config), subLogger, configuration.METRICS_NONE)
|
err := j.Configure([]byte(ts.config), subLogger)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
t.Fatalf("Unexpected error : %s", err)
|
t.Fatalf("Unexpected error : %s", err)
|
||||||
}
|
}
|
||||||
|
@ -219,9 +218,9 @@ journalctl_filter:
|
||||||
}
|
}
|
||||||
for _, ts := range tests {
|
for _, ts := range tests {
|
||||||
var (
|
var (
|
||||||
logger *log.Logger
|
logger *log.Logger
|
||||||
subLogger *log.Entry
|
subLogger *log.Entry
|
||||||
hook *test.Hook
|
hook *test.Hook
|
||||||
)
|
)
|
||||||
|
|
||||||
if ts.expectedOutput != "" {
|
if ts.expectedOutput != "" {
|
||||||
|
@ -240,7 +239,7 @@ journalctl_filter:
|
||||||
out := make(chan types.Event)
|
out := make(chan types.Event)
|
||||||
j := JournalCtlSource{}
|
j := JournalCtlSource{}
|
||||||
|
|
||||||
err := j.Configure([]byte(ts.config), subLogger, configuration.METRICS_NONE)
|
err := j.Configure([]byte(ts.config), subLogger)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
t.Fatalf("Unexpected error : %s", err)
|
t.Fatalf("Unexpected error : %s", err)
|
||||||
}
|
}
|
||||||
|
|
|
@ -52,10 +52,9 @@ type TLSConfig struct {
|
||||||
}
|
}
|
||||||
|
|
||||||
type KafkaSource struct {
|
type KafkaSource struct {
|
||||||
metricsLevel int
|
Config KafkaConfiguration
|
||||||
Config KafkaConfiguration
|
logger *log.Entry
|
||||||
logger *log.Entry
|
Reader *kafka.Reader
|
||||||
Reader *kafka.Reader
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func (k *KafkaSource) GetUuid() string {
|
func (k *KafkaSource) GetUuid() string {
|
||||||
|
@ -87,9 +86,8 @@ func (k *KafkaSource) UnmarshalConfig(yamlConfig []byte) error {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
func (k *KafkaSource) Configure(yamlConfig []byte, logger *log.Entry, MetricsLevel int) error {
|
func (k *KafkaSource) Configure(yamlConfig []byte, logger *log.Entry) error {
|
||||||
k.logger = logger
|
k.logger = logger
|
||||||
k.metricsLevel = MetricsLevel
|
|
||||||
|
|
||||||
k.logger.Debugf("start configuring %s source", dataSourceName)
|
k.logger.Debugf("start configuring %s source", dataSourceName)
|
||||||
|
|
||||||
|
@ -172,9 +170,7 @@ func (k *KafkaSource) ReadMessage(out chan types.Event) error {
|
||||||
Module: k.GetName(),
|
Module: k.GetName(),
|
||||||
}
|
}
|
||||||
k.logger.Tracef("line with message read from topic '%s': %+v", k.Config.Topic, l)
|
k.logger.Tracef("line with message read from topic '%s': %+v", k.Config.Topic, l)
|
||||||
if k.metricsLevel != configuration.METRICS_NONE {
|
linesRead.With(prometheus.Labels{"topic": k.Config.Topic}).Inc()
|
||||||
linesRead.With(prometheus.Labels{"topic": k.Config.Topic}).Inc()
|
|
||||||
}
|
|
||||||
var evt types.Event
|
var evt types.Event
|
||||||
|
|
||||||
if !k.Config.UseTimeMachine {
|
if !k.Config.UseTimeMachine {
|
||||||
|
|
|
@ -15,7 +15,6 @@ import (
|
||||||
|
|
||||||
"github.com/crowdsecurity/go-cs-lib/cstest"
|
"github.com/crowdsecurity/go-cs-lib/cstest"
|
||||||
|
|
||||||
"github.com/crowdsecurity/crowdsec/pkg/acquisition/configuration"
|
|
||||||
"github.com/crowdsecurity/crowdsec/pkg/types"
|
"github.com/crowdsecurity/crowdsec/pkg/types"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -76,7 +75,7 @@ group_id: crowdsec`,
|
||||||
})
|
})
|
||||||
for _, test := range tests {
|
for _, test := range tests {
|
||||||
k := KafkaSource{}
|
k := KafkaSource{}
|
||||||
err := k.Configure([]byte(test.config), subLogger, configuration.METRICS_NONE)
|
err := k.Configure([]byte(test.config), subLogger)
|
||||||
cstest.AssertErrorContains(t, err, test.expectedErr)
|
cstest.AssertErrorContains(t, err, test.expectedErr)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -170,7 +169,7 @@ func TestStreamingAcquisition(t *testing.T) {
|
||||||
source: kafka
|
source: kafka
|
||||||
brokers:
|
brokers:
|
||||||
- localhost:9092
|
- localhost:9092
|
||||||
topic: crowdsecplaintext`), subLogger, configuration.METRICS_NONE)
|
topic: crowdsecplaintext`), subLogger)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
t.Fatalf("could not configure kafka source : %s", err)
|
t.Fatalf("could not configure kafka source : %s", err)
|
||||||
}
|
}
|
||||||
|
@ -246,7 +245,7 @@ tls:
|
||||||
client_cert: ./testdata/kafkaClient.certificate.pem
|
client_cert: ./testdata/kafkaClient.certificate.pem
|
||||||
client_key: ./testdata/kafkaClient.key
|
client_key: ./testdata/kafkaClient.key
|
||||||
ca_cert: ./testdata/snakeoil-ca-1.crt
|
ca_cert: ./testdata/snakeoil-ca-1.crt
|
||||||
`), subLogger, configuration.METRICS_NONE)
|
`), subLogger)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
t.Fatalf("could not configure kafka source : %s", err)
|
t.Fatalf("could not configure kafka source : %s", err)
|
||||||
}
|
}
|
||||||
|
|
|
@ -38,7 +38,6 @@ type KinesisConfiguration struct {
|
||||||
}
|
}
|
||||||
|
|
||||||
type KinesisSource struct {
|
type KinesisSource struct {
|
||||||
metricsLevel int
|
|
||||||
Config KinesisConfiguration
|
Config KinesisConfiguration
|
||||||
logger *log.Entry
|
logger *log.Entry
|
||||||
kClient *kinesis.Kinesis
|
kClient *kinesis.Kinesis
|
||||||
|
@ -150,9 +149,8 @@ func (k *KinesisSource) UnmarshalConfig(yamlConfig []byte) error {
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (k *KinesisSource) Configure(yamlConfig []byte, logger *log.Entry, MetricsLevel int) error {
|
func (k *KinesisSource) Configure(yamlConfig []byte, logger *log.Entry) error {
|
||||||
k.logger = logger
|
k.logger = logger
|
||||||
k.metricsLevel = MetricsLevel
|
|
||||||
|
|
||||||
err := k.UnmarshalConfig(yamlConfig)
|
err := k.UnmarshalConfig(yamlConfig)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
@ -285,15 +283,11 @@ func (k *KinesisSource) RegisterConsumer() (*kinesis.RegisterStreamConsumerOutpu
|
||||||
func (k *KinesisSource) ParseAndPushRecords(records []*kinesis.Record, out chan types.Event, logger *log.Entry, shardId string) {
|
func (k *KinesisSource) ParseAndPushRecords(records []*kinesis.Record, out chan types.Event, logger *log.Entry, shardId string) {
|
||||||
for _, record := range records {
|
for _, record := range records {
|
||||||
if k.Config.StreamARN != "" {
|
if k.Config.StreamARN != "" {
|
||||||
if k.metricsLevel != configuration.METRICS_NONE {
|
linesReadShards.With(prometheus.Labels{"stream": k.Config.StreamARN, "shard": shardId}).Inc()
|
||||||
linesReadShards.With(prometheus.Labels{"stream": k.Config.StreamARN, "shard": shardId}).Inc()
|
linesRead.With(prometheus.Labels{"stream": k.Config.StreamARN}).Inc()
|
||||||
linesRead.With(prometheus.Labels{"stream": k.Config.StreamARN}).Inc()
|
|
||||||
}
|
|
||||||
} else {
|
} else {
|
||||||
if k.metricsLevel != configuration.METRICS_NONE {
|
linesReadShards.With(prometheus.Labels{"stream": k.Config.StreamName, "shard": shardId}).Inc()
|
||||||
linesReadShards.With(prometheus.Labels{"stream": k.Config.StreamName, "shard": shardId}).Inc()
|
linesRead.With(prometheus.Labels{"stream": k.Config.StreamName}).Inc()
|
||||||
linesRead.With(prometheus.Labels{"stream": k.Config.StreamName}).Inc()
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
var data []CloudwatchSubscriptionLogEvent
|
var data []CloudwatchSubscriptionLogEvent
|
||||||
var err error
|
var err error
|
||||||
|
|
|
@ -17,7 +17,6 @@ import (
|
||||||
"github.com/aws/aws-sdk-go/aws"
|
"github.com/aws/aws-sdk-go/aws"
|
||||||
"github.com/aws/aws-sdk-go/aws/session"
|
"github.com/aws/aws-sdk-go/aws/session"
|
||||||
"github.com/aws/aws-sdk-go/service/kinesis"
|
"github.com/aws/aws-sdk-go/service/kinesis"
|
||||||
"github.com/crowdsecurity/crowdsec/pkg/acquisition/configuration"
|
|
||||||
"github.com/crowdsecurity/crowdsec/pkg/types"
|
"github.com/crowdsecurity/crowdsec/pkg/types"
|
||||||
log "github.com/sirupsen/logrus"
|
log "github.com/sirupsen/logrus"
|
||||||
"github.com/stretchr/testify/assert"
|
"github.com/stretchr/testify/assert"
|
||||||
|
@ -144,7 +143,7 @@ stream_arn: arn:aws:kinesis:eu-west-1:123456789012:stream/my-stream`,
|
||||||
})
|
})
|
||||||
for _, test := range tests {
|
for _, test := range tests {
|
||||||
f := KinesisSource{}
|
f := KinesisSource{}
|
||||||
err := f.Configure([]byte(test.config), subLogger, configuration.METRICS_NONE)
|
err := f.Configure([]byte(test.config), subLogger)
|
||||||
cstest.AssertErrorContains(t, err, test.expectedErr)
|
cstest.AssertErrorContains(t, err, test.expectedErr)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -173,7 +172,7 @@ stream_name: stream-1-shard`,
|
||||||
config := fmt.Sprintf(test.config, endpoint)
|
config := fmt.Sprintf(test.config, endpoint)
|
||||||
err := f.Configure([]byte(config), log.WithFields(log.Fields{
|
err := f.Configure([]byte(config), log.WithFields(log.Fields{
|
||||||
"type": "kinesis",
|
"type": "kinesis",
|
||||||
}), configuration.METRICS_NONE)
|
}))
|
||||||
if err != nil {
|
if err != nil {
|
||||||
t.Fatalf("Error configuring source: %s", err)
|
t.Fatalf("Error configuring source: %s", err)
|
||||||
}
|
}
|
||||||
|
@ -219,7 +218,7 @@ stream_name: stream-2-shards`,
|
||||||
config := fmt.Sprintf(test.config, endpoint)
|
config := fmt.Sprintf(test.config, endpoint)
|
||||||
err := f.Configure([]byte(config), log.WithFields(log.Fields{
|
err := f.Configure([]byte(config), log.WithFields(log.Fields{
|
||||||
"type": "kinesis",
|
"type": "kinesis",
|
||||||
}), configuration.METRICS_NONE)
|
}))
|
||||||
if err != nil {
|
if err != nil {
|
||||||
t.Fatalf("Error configuring source: %s", err)
|
t.Fatalf("Error configuring source: %s", err)
|
||||||
}
|
}
|
||||||
|
@ -268,7 +267,7 @@ from_subscription: true`,
|
||||||
config := fmt.Sprintf(test.config, endpoint)
|
config := fmt.Sprintf(test.config, endpoint)
|
||||||
err := f.Configure([]byte(config), log.WithFields(log.Fields{
|
err := f.Configure([]byte(config), log.WithFields(log.Fields{
|
||||||
"type": "kinesis",
|
"type": "kinesis",
|
||||||
}), configuration.METRICS_NONE)
|
}))
|
||||||
if err != nil {
|
if err != nil {
|
||||||
t.Fatalf("Error configuring source: %s", err)
|
t.Fatalf("Error configuring source: %s", err)
|
||||||
}
|
}
|
||||||
|
|
|
@ -28,13 +28,12 @@ type KubernetesAuditConfiguration struct {
|
||||||
}
|
}
|
||||||
|
|
||||||
type KubernetesAuditSource struct {
|
type KubernetesAuditSource struct {
|
||||||
metricsLevel int
|
config KubernetesAuditConfiguration
|
||||||
config KubernetesAuditConfiguration
|
logger *log.Entry
|
||||||
logger *log.Entry
|
mux *http.ServeMux
|
||||||
mux *http.ServeMux
|
server *http.Server
|
||||||
server *http.Server
|
outChan chan types.Event
|
||||||
outChan chan types.Event
|
addr string
|
||||||
addr string
|
|
||||||
}
|
}
|
||||||
|
|
||||||
var eventCount = prometheus.NewCounterVec(
|
var eventCount = prometheus.NewCounterVec(
|
||||||
|
@ -94,9 +93,8 @@ func (ka *KubernetesAuditSource) UnmarshalConfig(yamlConfig []byte) error {
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (ka *KubernetesAuditSource) Configure(config []byte, logger *log.Entry, MetricsLevel int) error {
|
func (ka *KubernetesAuditSource) Configure(config []byte, logger *log.Entry) error {
|
||||||
ka.logger = logger
|
ka.logger = logger
|
||||||
ka.metricsLevel = MetricsLevel
|
|
||||||
|
|
||||||
err := ka.UnmarshalConfig(config)
|
err := ka.UnmarshalConfig(config)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
@ -163,10 +161,7 @@ func (ka *KubernetesAuditSource) Dump() interface{} {
|
||||||
}
|
}
|
||||||
|
|
||||||
func (ka *KubernetesAuditSource) webhookHandler(w http.ResponseWriter, r *http.Request) {
|
func (ka *KubernetesAuditSource) webhookHandler(w http.ResponseWriter, r *http.Request) {
|
||||||
|
requestCount.WithLabelValues(ka.addr).Inc()
|
||||||
if ka.metricsLevel != configuration.METRICS_NONE {
|
|
||||||
requestCount.WithLabelValues(ka.addr).Inc()
|
|
||||||
}
|
|
||||||
if r.Method != http.MethodPost {
|
if r.Method != http.MethodPost {
|
||||||
w.WriteHeader(http.StatusMethodNotAllowed)
|
w.WriteHeader(http.StatusMethodNotAllowed)
|
||||||
return
|
return
|
||||||
|
@ -190,9 +185,7 @@ func (ka *KubernetesAuditSource) webhookHandler(w http.ResponseWriter, r *http.R
|
||||||
|
|
||||||
remoteIP := strings.Split(r.RemoteAddr, ":")[0]
|
remoteIP := strings.Split(r.RemoteAddr, ":")[0]
|
||||||
for _, auditEvent := range auditEvents.Items {
|
for _, auditEvent := range auditEvents.Items {
|
||||||
if ka.metricsLevel != configuration.METRICS_NONE {
|
eventCount.WithLabelValues(ka.addr).Inc()
|
||||||
eventCount.WithLabelValues(ka.addr).Inc()
|
|
||||||
}
|
|
||||||
bytesEvent, err := json.Marshal(auditEvent)
|
bytesEvent, err := json.Marshal(auditEvent)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
ka.logger.Errorf("Error marshaling audit event: %s", err)
|
ka.logger.Errorf("Error marshaling audit event: %s", err)
|
||||||
|
|
|
@ -6,7 +6,6 @@ import (
|
||||||
"testing"
|
"testing"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
"github.com/crowdsecurity/crowdsec/pkg/acquisition/configuration"
|
|
||||||
"github.com/crowdsecurity/crowdsec/pkg/types"
|
"github.com/crowdsecurity/crowdsec/pkg/types"
|
||||||
log "github.com/sirupsen/logrus"
|
log "github.com/sirupsen/logrus"
|
||||||
"github.com/stretchr/testify/assert"
|
"github.com/stretchr/testify/assert"
|
||||||
|
@ -82,7 +81,7 @@ webhook_path: /k8s-audit`,
|
||||||
|
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
|
||||||
err = f.Configure([]byte(test.config), subLogger, configuration.METRICS_NONE)
|
err = f.Configure([]byte(test.config), subLogger)
|
||||||
|
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
f.StreamingAcquisition(out, tb)
|
f.StreamingAcquisition(out, tb)
|
||||||
|
@ -254,7 +253,7 @@ webhook_path: /k8s-audit`,
|
||||||
f := KubernetesAuditSource{}
|
f := KubernetesAuditSource{}
|
||||||
err := f.UnmarshalConfig([]byte(test.config))
|
err := f.UnmarshalConfig([]byte(test.config))
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
err = f.Configure([]byte(test.config), subLogger, configuration.METRICS_NONE)
|
err = f.Configure([]byte(test.config), subLogger)
|
||||||
|
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
|
|
@ -6,13 +6,13 @@ https://grafana.com/docs/loki/latest/api/#get-lokiapiv1tail
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
"errors"
|
|
||||||
"fmt"
|
"fmt"
|
||||||
"net/url"
|
"net/url"
|
||||||
"strconv"
|
"strconv"
|
||||||
"strings"
|
"strings"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
|
"github.com/pkg/errors"
|
||||||
"github.com/prometheus/client_golang/prometheus"
|
"github.com/prometheus/client_golang/prometheus"
|
||||||
log "github.com/sirupsen/logrus"
|
log "github.com/sirupsen/logrus"
|
||||||
tomb "gopkg.in/tomb.v2"
|
tomb "gopkg.in/tomb.v2"
|
||||||
|
@ -57,8 +57,7 @@ type LokiConfiguration struct {
|
||||||
}
|
}
|
||||||
|
|
||||||
type LokiSource struct {
|
type LokiSource struct {
|
||||||
metricsLevel int
|
Config LokiConfiguration
|
||||||
Config LokiConfiguration
|
|
||||||
|
|
||||||
Client *lokiclient.LokiClient
|
Client *lokiclient.LokiClient
|
||||||
|
|
||||||
|
@ -119,10 +118,9 @@ func (l *LokiSource) UnmarshalConfig(yamlConfig []byte) error {
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (l *LokiSource) Configure(config []byte, logger *log.Entry, MetricsLevel int) error {
|
func (l *LokiSource) Configure(config []byte, logger *log.Entry) error {
|
||||||
l.Config = LokiConfiguration{}
|
l.Config = LokiConfiguration{}
|
||||||
l.logger = logger
|
l.logger = logger
|
||||||
l.metricsLevel = MetricsLevel
|
|
||||||
err := l.UnmarshalConfig(config)
|
err := l.UnmarshalConfig(config)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
|
@ -304,9 +302,7 @@ func (l *LokiSource) readOneEntry(entry lokiclient.Entry, labels map[string]stri
|
||||||
ll.Process = true
|
ll.Process = true
|
||||||
ll.Module = l.GetName()
|
ll.Module = l.GetName()
|
||||||
|
|
||||||
if l.metricsLevel != configuration.METRICS_NONE {
|
linesRead.With(prometheus.Labels{"source": l.Config.URL}).Inc()
|
||||||
linesRead.With(prometheus.Labels{"source": l.Config.URL}).Inc()
|
|
||||||
}
|
|
||||||
expectMode := types.LIVE
|
expectMode := types.LIVE
|
||||||
if l.Config.UseTimeMachine {
|
if l.Config.UseTimeMachine {
|
||||||
expectMode = types.TIMEMACHINE
|
expectMode = types.TIMEMACHINE
|
||||||
|
|
|
@ -20,7 +20,6 @@ import (
|
||||||
|
|
||||||
"github.com/crowdsecurity/go-cs-lib/cstest"
|
"github.com/crowdsecurity/go-cs-lib/cstest"
|
||||||
|
|
||||||
"github.com/crowdsecurity/crowdsec/pkg/acquisition/configuration"
|
|
||||||
"github.com/crowdsecurity/crowdsec/pkg/acquisition/modules/loki"
|
"github.com/crowdsecurity/crowdsec/pkg/acquisition/modules/loki"
|
||||||
"github.com/crowdsecurity/crowdsec/pkg/types"
|
"github.com/crowdsecurity/crowdsec/pkg/types"
|
||||||
)
|
)
|
||||||
|
@ -131,7 +130,7 @@ query: >
|
||||||
for _, test := range tests {
|
for _, test := range tests {
|
||||||
t.Run(test.testName, func(t *testing.T) {
|
t.Run(test.testName, func(t *testing.T) {
|
||||||
lokiSource := loki.LokiSource{}
|
lokiSource := loki.LokiSource{}
|
||||||
err := lokiSource.Configure([]byte(test.config), subLogger, configuration.METRICS_NONE)
|
err := lokiSource.Configure([]byte(test.config), subLogger)
|
||||||
cstest.AssertErrorContains(t, err, test.expectedErr)
|
cstest.AssertErrorContains(t, err, test.expectedErr)
|
||||||
|
|
||||||
if test.password != "" {
|
if test.password != "" {
|
||||||
|
@ -347,7 +346,7 @@ since: 1h
|
||||||
"type": "loki",
|
"type": "loki",
|
||||||
})
|
})
|
||||||
lokiSource := loki.LokiSource{}
|
lokiSource := loki.LokiSource{}
|
||||||
err := lokiSource.Configure([]byte(ts.config), subLogger, configuration.METRICS_NONE)
|
err := lokiSource.Configure([]byte(ts.config), subLogger)
|
||||||
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
t.Fatalf("Unexpected error : %s", err)
|
t.Fatalf("Unexpected error : %s", err)
|
||||||
|
@ -437,7 +436,7 @@ query: >
|
||||||
lokiTomb := tomb.Tomb{}
|
lokiTomb := tomb.Tomb{}
|
||||||
lokiSource := loki.LokiSource{}
|
lokiSource := loki.LokiSource{}
|
||||||
|
|
||||||
err := lokiSource.Configure([]byte(ts.config), subLogger, configuration.METRICS_NONE)
|
err := lokiSource.Configure([]byte(ts.config), subLogger)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
t.Fatalf("Unexpected error : %s", err)
|
t.Fatalf("Unexpected error : %s", err)
|
||||||
}
|
}
|
||||||
|
@ -515,7 +514,7 @@ query: >
|
||||||
title := time.Now().String()
|
title := time.Now().String()
|
||||||
lokiSource := loki.LokiSource{}
|
lokiSource := loki.LokiSource{}
|
||||||
|
|
||||||
err := lokiSource.Configure([]byte(config), subLogger, configuration.METRICS_NONE)
|
err := lokiSource.Configure([]byte(config), subLogger)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
t.Fatalf("Unexpected error : %s", err)
|
t.Fatalf("Unexpected error : %s", err)
|
||||||
}
|
}
|
||||||
|
|
|
@ -47,16 +47,15 @@ type S3Configuration struct {
|
||||||
}
|
}
|
||||||
|
|
||||||
type S3Source struct {
|
type S3Source struct {
|
||||||
MetricsLevel int
|
Config S3Configuration
|
||||||
Config S3Configuration
|
logger *log.Entry
|
||||||
logger *log.Entry
|
s3Client s3iface.S3API
|
||||||
s3Client s3iface.S3API
|
sqsClient sqsiface.SQSAPI
|
||||||
sqsClient sqsiface.SQSAPI
|
readerChan chan S3Object
|
||||||
readerChan chan S3Object
|
t *tomb.Tomb
|
||||||
t *tomb.Tomb
|
out chan types.Event
|
||||||
out chan types.Event
|
ctx aws.Context
|
||||||
ctx aws.Context
|
cancel context.CancelFunc
|
||||||
cancel context.CancelFunc
|
|
||||||
}
|
}
|
||||||
|
|
||||||
type S3Object struct {
|
type S3Object struct {
|
||||||
|
@ -346,9 +345,7 @@ func (s *S3Source) sqsPoll() error {
|
||||||
logger.Tracef("SQS output: %v", out)
|
logger.Tracef("SQS output: %v", out)
|
||||||
logger.Debugf("Received %d messages from SQS", len(out.Messages))
|
logger.Debugf("Received %d messages from SQS", len(out.Messages))
|
||||||
for _, message := range out.Messages {
|
for _, message := range out.Messages {
|
||||||
if s.MetricsLevel != configuration.METRICS_NONE {
|
sqsMessagesReceived.WithLabelValues(s.Config.SQSName).Inc()
|
||||||
sqsMessagesReceived.WithLabelValues(s.Config.SQSName).Inc()
|
|
||||||
}
|
|
||||||
bucket, key, err := s.extractBucketAndPrefix(message.Body)
|
bucket, key, err := s.extractBucketAndPrefix(message.Body)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
logger.Errorf("Error while parsing SQS message: %s", err)
|
logger.Errorf("Error while parsing SQS message: %s", err)
|
||||||
|
@ -429,20 +426,14 @@ func (s *S3Source) readFile(bucket string, key string) error {
|
||||||
default:
|
default:
|
||||||
text := scanner.Text()
|
text := scanner.Text()
|
||||||
logger.Tracef("Read line %s", text)
|
logger.Tracef("Read line %s", text)
|
||||||
if s.MetricsLevel != configuration.METRICS_NONE {
|
linesRead.WithLabelValues(bucket).Inc()
|
||||||
linesRead.WithLabelValues(bucket).Inc()
|
|
||||||
}
|
|
||||||
l := types.Line{}
|
l := types.Line{}
|
||||||
l.Raw = text
|
l.Raw = text
|
||||||
l.Labels = s.Config.Labels
|
l.Labels = s.Config.Labels
|
||||||
l.Time = time.Now().UTC()
|
l.Time = time.Now().UTC()
|
||||||
l.Process = true
|
l.Process = true
|
||||||
l.Module = s.GetName()
|
l.Module = s.GetName()
|
||||||
if s.MetricsLevel == configuration.METRICS_FULL {
|
l.Src = bucket + "/" + key
|
||||||
l.Src = bucket + "/" + key
|
|
||||||
} else if s.MetricsLevel == configuration.METRICS_AGGREGATE {
|
|
||||||
l.Src = bucket
|
|
||||||
}
|
|
||||||
var evt types.Event
|
var evt types.Event
|
||||||
if !s.Config.UseTimeMachine {
|
if !s.Config.UseTimeMachine {
|
||||||
evt = types.Event{Line: l, Process: true, Type: types.LOG, ExpectMode: types.LIVE}
|
evt = types.Event{Line: l, Process: true, Type: types.LOG, ExpectMode: types.LIVE}
|
||||||
|
@ -455,9 +446,7 @@ func (s *S3Source) readFile(bucket string, key string) error {
|
||||||
if err := scanner.Err(); err != nil {
|
if err := scanner.Err(); err != nil {
|
||||||
return fmt.Errorf("failed to read object %s/%s: %s", bucket, key, err)
|
return fmt.Errorf("failed to read object %s/%s: %s", bucket, key, err)
|
||||||
}
|
}
|
||||||
if s.MetricsLevel != configuration.METRICS_NONE {
|
objectsRead.WithLabelValues(bucket).Inc()
|
||||||
objectsRead.WithLabelValues(bucket).Inc()
|
|
||||||
}
|
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -516,7 +505,7 @@ func (s *S3Source) UnmarshalConfig(yamlConfig []byte) error {
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s *S3Source) Configure(yamlConfig []byte, logger *log.Entry, metricsLevel int) error {
|
func (s *S3Source) Configure(yamlConfig []byte, logger *log.Entry) error {
|
||||||
err := s.UnmarshalConfig(yamlConfig)
|
err := s.UnmarshalConfig(yamlConfig)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Reference in a new issue