Compare commits

..

6 commits

Author SHA1 Message Date
marco
fc1aa395fc wip 2024-02-27 11:14:50 +01:00
marco
7a0bb6d7dc windows CSI 2024-02-27 11:14:50 +01:00
marco
7428ec351b cleanup 2024-02-27 11:14:46 +01:00
marco
c755a0015e Fix time machine output with progress bar (linux only)
This has a log formatter that clears each line before printing overflow information.

TODO: fix for windows as well
2024-02-27 11:14:13 +01:00
marco
ad34b57419 Customize progress bar, display on stderr, disable if non-terminal 2024-02-27 11:14:13 +01:00
marco
28f2714cdc Progress bar for one shot acquisition (time machine -- cold logs)
This also disables prometheus metrics in one shot mode because
in most cases the port is already taken by LAPI.
2024-02-27 11:14:13 +01:00
198 changed files with 2590 additions and 5583 deletions

View file

@ -42,7 +42,7 @@ issue:
3. Check [Releases](https://github.com/crowdsecurity/crowdsec/releases/latest) to make sure your agent is on the latest version. 3. Check [Releases](https://github.com/crowdsecurity/crowdsec/releases/latest) to make sure your agent is on the latest version.
- prefix: kind - prefix: kind
list: ['feature', 'bug', 'packaging', 'enhancement', 'refactoring'] list: ['feature', 'bug', 'packaging', 'enhancement']
multiple: false multiple: false
author_association: author_association:
author: true author: true
@ -54,7 +54,6 @@ issue:
@$AUTHOR: There are no 'kind' label on this issue. You need a 'kind' label to start the triage process. @$AUTHOR: There are no 'kind' label on this issue. You need a 'kind' label to start the triage process.
* `/kind feature` * `/kind feature`
* `/kind enhancement` * `/kind enhancement`
* `/kind refactoring`
* `/kind bug` * `/kind bug`
* `/kind packaging` * `/kind packaging`
@ -66,13 +65,12 @@ pull_request:
labels: labels:
- prefix: kind - prefix: kind
multiple: false multiple: false
list: [ 'feature', 'enhancement', 'fix', 'chore', 'dependencies', 'refactoring'] list: [ 'feature', 'enhancement', 'fix', 'chore', 'dependencies']
needs: needs:
comment: | comment: |
@$AUTHOR: There are no 'kind' label on this PR. You need a 'kind' label to generate the release automatically. @$AUTHOR: There are no 'kind' label on this PR. You need a 'kind' label to generate the release automatically.
* `/kind feature` * `/kind feature`
* `/kind enhancement` * `/kind enhancement`
* `/kind refactoring`
* `/kind fix` * `/kind fix`
* `/kind chore` * `/kind chore`
* `/kind dependencies` * `/kind dependencies`

View file

@ -33,7 +33,7 @@ jobs:
- name: "Set up Go" - name: "Set up Go"
uses: actions/setup-go@v5 uses: actions/setup-go@v5
with: with:
go-version: "1.22.2" go-version: "1.21.7"
- name: "Install bats dependencies" - name: "Install bats dependencies"
env: env:
@ -53,7 +53,7 @@ jobs:
run: ./test/bin/collect-hub-coverage >> $GITHUB_ENV run: ./test/bin/collect-hub-coverage >> $GITHUB_ENV
- name: "Create Parsers badge" - name: "Create Parsers badge"
uses: schneegans/dynamic-badges-action@v1.7.0 uses: schneegans/dynamic-badges-action@v1.6.0
if: ${{ github.ref == 'refs/heads/master' && github.repository_owner == 'crowdsecurity' }} if: ${{ github.ref == 'refs/heads/master' && github.repository_owner == 'crowdsecurity' }}
with: with:
auth: ${{ secrets.GIST_BADGES_SECRET }} auth: ${{ secrets.GIST_BADGES_SECRET }}
@ -64,7 +64,7 @@ jobs:
color: ${{ env.SCENARIO_BADGE_COLOR }} color: ${{ env.SCENARIO_BADGE_COLOR }}
- name: "Create Scenarios badge" - name: "Create Scenarios badge"
uses: schneegans/dynamic-badges-action@v1.7.0 uses: schneegans/dynamic-badges-action@v1.6.0
if: ${{ github.ref == 'refs/heads/master' && github.repository_owner == 'crowdsecurity' }} if: ${{ github.ref == 'refs/heads/master' && github.repository_owner == 'crowdsecurity' }}
with: with:
auth: ${{ secrets.GIST_BADGES_SECRET }} auth: ${{ secrets.GIST_BADGES_SECRET }}

View file

@ -36,7 +36,7 @@ jobs:
- name: "Set up Go" - name: "Set up Go"
uses: actions/setup-go@v5 uses: actions/setup-go@v5
with: with:
go-version: "1.22.2" go-version: "1.21.7"
- name: "Install bats dependencies" - name: "Install bats dependencies"
env: env:

View file

@ -45,7 +45,7 @@ jobs:
- name: "Set up Go" - name: "Set up Go"
uses: actions/setup-go@v5 uses: actions/setup-go@v5
with: with:
go-version: "1.22.2" go-version: "1.21.7"
- name: "Install bats dependencies" - name: "Install bats dependencies"
env: env:

View file

@ -28,7 +28,7 @@ jobs:
- name: "Set up Go" - name: "Set up Go"
uses: actions/setup-go@v5 uses: actions/setup-go@v5
with: with:
go-version: "1.22.2" go-version: "1.21.7"
- name: "Install bats dependencies" - name: "Install bats dependencies"
env: env:
@ -77,8 +77,7 @@ jobs:
if: ${{ always() }} if: ${{ always() }}
- name: Upload crowdsec coverage to codecov - name: Upload crowdsec coverage to codecov
uses: codecov/codecov-action@v4 uses: codecov/codecov-action@v3
with: with:
files: ./coverage-bats.out files: ./coverage-bats.out
flags: bats flags: bats
token: ${{ secrets.CODECOV_TOKEN }}

View file

@ -35,12 +35,12 @@ jobs:
- name: "Set up Go" - name: "Set up Go"
uses: actions/setup-go@v5 uses: actions/setup-go@v5
with: with:
go-version: "1.22.2" go-version: "1.21.7"
- name: Build - name: Build
run: make windows_installer BUILD_RE2_WASM=1 run: make windows_installer BUILD_RE2_WASM=1
- name: Upload MSI - name: Upload MSI
uses: actions/upload-artifact@v4 uses: actions/upload-artifact@v3
with: with:
path: crowdsec*msi path: crowdsec*msi
name: crowdsec.msi name: crowdsec.msi

View file

@ -12,7 +12,7 @@ jobs:
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
# Drafts your next Release notes as Pull Requests are merged into "master" # Drafts your next Release notes as Pull Requests are merged into "master"
- uses: release-drafter/release-drafter@v6 - uses: release-drafter/release-drafter@v5
with: with:
config-name: release-drafter.yml config-name: release-drafter.yml
# (Optional) specify config name to use, relative to .github/. Default: release-drafter.yml # (Optional) specify config name to use, relative to .github/. Default: release-drafter.yml

View file

@ -52,7 +52,7 @@ jobs:
- name: "Set up Go" - name: "Set up Go"
uses: actions/setup-go@v5 uses: actions/setup-go@v5
with: with:
go-version: "1.22.2" go-version: "1.21.7"
cache-dependency-path: "**/go.sum" cache-dependency-path: "**/go.sum"
# Initializes the CodeQL tools for scanning. # Initializes the CodeQL tools for scanning.
@ -68,7 +68,7 @@ jobs:
# Autobuild attempts to build any compiled languages (C/C++, C#, or Java). # Autobuild attempts to build any compiled languages (C/C++, C#, or Java).
# If this step fails, then you should remove it and run the build manually (see below) # If this step fails, then you should remove it and run the build manually (see below)
# - name: Autobuild # - name: Autobuild
# uses: github/codeql-action/autobuild@v3 # uses: github/codeql-action/autobuild@v2
# Command-line programs to run using the OS shell. # Command-line programs to run using the OS shell.
# 📚 https://git.io/JvXDl # 📚 https://git.io/JvXDl

View file

@ -59,15 +59,15 @@ jobs:
cd docker/test cd docker/test
python -m pip install --upgrade pipenv wheel python -m pip install --upgrade pipenv wheel
#- name: "Cache virtualenvs" - name: "Cache virtualenvs"
# id: cache-pipenv id: cache-pipenv
# uses: actions/cache@v4 uses: actions/cache@v4
# with: with:
# path: ~/.local/share/virtualenvs path: ~/.local/share/virtualenvs
# key: ${{ runner.os }}-pipenv-${{ hashFiles('**/Pipfile.lock') }} key: ${{ runner.os }}-pipenv-${{ hashFiles('**/Pipfile.lock') }}
- name: "Install dependencies" - name: "Install dependencies"
#if: steps.cache-pipenv.outputs.cache-hit != 'true' if: steps.cache-pipenv.outputs.cache-hit != 'true'
run: | run: |
cd docker/test cd docker/test
pipenv install --deploy pipenv install --deploy

View file

@ -34,7 +34,7 @@ jobs:
- name: "Set up Go" - name: "Set up Go"
uses: actions/setup-go@v5 uses: actions/setup-go@v5
with: with:
go-version: "1.22.2" go-version: "1.21.7"
- name: Build - name: Build
run: | run: |
@ -48,16 +48,15 @@ jobs:
cat out.txt | sed 's/ *coverage:.*of statements in.*//' | richgo testfilter cat out.txt | sed 's/ *coverage:.*of statements in.*//' | richgo testfilter
- name: Upload unit coverage to Codecov - name: Upload unit coverage to Codecov
uses: codecov/codecov-action@v4 uses: codecov/codecov-action@v3
with: with:
files: coverage.out files: coverage.out
flags: unit-windows flags: unit-windows
token: ${{ secrets.CODECOV_TOKEN }}
- name: golangci-lint - name: golangci-lint
uses: golangci/golangci-lint-action@v4 uses: golangci/golangci-lint-action@v3
with: with:
version: v1.57 version: v1.55
args: --issues-exit-code=1 --timeout 10m args: --issues-exit-code=1 --timeout 10m
only-new-issues: false only-new-issues: false
# the cache is already managed above, enabling it here # the cache is already managed above, enabling it here

View file

@ -126,7 +126,7 @@ jobs:
- name: "Set up Go" - name: "Set up Go"
uses: actions/setup-go@v5 uses: actions/setup-go@v5
with: with:
go-version: "1.22.2" go-version: "1.21.7"
- name: Create localstack streams - name: Create localstack streams
run: | run: |
@ -149,16 +149,15 @@ jobs:
make go-acc | sed 's/ *coverage:.*of statements in.*//' | richgo testfilter make go-acc | sed 's/ *coverage:.*of statements in.*//' | richgo testfilter
- name: Upload unit coverage to Codecov - name: Upload unit coverage to Codecov
uses: codecov/codecov-action@v4 uses: codecov/codecov-action@v3
with: with:
files: coverage.out files: coverage.out
flags: unit-linux flags: unit-linux
token: ${{ secrets.CODECOV_TOKEN }}
- name: golangci-lint - name: golangci-lint
uses: golangci/golangci-lint-action@v4 uses: golangci/golangci-lint-action@v3
with: with:
version: v1.57 version: v1.55
args: --issues-exit-code=1 --timeout 10m args: --issues-exit-code=1 --timeout 10m
only-new-issues: false only-new-issues: false
# the cache is already managed above, enabling it here # the cache is already managed above, enabling it here

View file

@ -23,7 +23,7 @@ jobs:
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
# Semantic versioning, lock to different version: v2, v2.0 or a commit hash. # Semantic versioning, lock to different version: v2, v2.0 or a commit hash.
- uses: BirthdayResearch/oss-governance-bot@v4 - uses: BirthdayResearch/oss-governance-bot@v3
with: with:
# You can use a PAT to post a comment/label/status so that it shows up as a user instead of github-actions # You can use a PAT to post a comment/label/status so that it shows up as a user instead of github-actions
github-token: ${{secrets.GITHUB_TOKEN}} # optional, default to '${{ github.token }}' github-token: ${{secrets.GITHUB_TOKEN}} # optional, default to '${{ github.token }}'

View file

@ -25,7 +25,7 @@ jobs:
- name: "Set up Go" - name: "Set up Go"
uses: actions/setup-go@v5 uses: actions/setup-go@v5
with: with:
go-version: "1.22.2" go-version: "1.21.7"
- name: Build the binaries - name: Build the binaries
run: | run: |

5
.gitignore vendored
View file

@ -6,10 +6,7 @@
*.dylib *.dylib
*~ *~
.pc .pc
# IDEs
.vscode .vscode
.idea
# If vendor is included, allow prebuilt (wasm?) libraries. # If vendor is included, allow prebuilt (wasm?) libraries.
!vendor/**/*.so !vendor/**/*.so
@ -37,7 +34,7 @@ test/coverage/*
*.swo *.swo
# Dependencies are not vendored by default, but a tarball is created by "make vendor" # Dependencies are not vendored by default, but a tarball is created by "make vendor"
# and provided in the release. Used by gentoo, etc. # and provided in the release. Used by freebsd, gentoo, etc.
vendor/ vendor/
vendor.tgz vendor.tgz

View file

@ -1,9 +1,17 @@
# https://github.com/golangci/golangci-lint/blob/master/.golangci.reference.yml # https://github.com/golangci/golangci-lint/blob/master/.golangci.reference.yml
run:
skip-dirs:
- pkg/time/rate
skip-files:
- pkg/database/ent/generate.go
- pkg/yamlpatch/merge.go
- pkg/yamlpatch/merge_test.go
linters-settings: linters-settings:
cyclop: cyclop:
# lower this after refactoring # lower this after refactoring
max-complexity: 48 max-complexity: 53
gci: gci:
sections: sections:
@ -12,39 +20,29 @@ linters-settings:
- prefix(github.com/crowdsecurity) - prefix(github.com/crowdsecurity)
- prefix(github.com/crowdsecurity/crowdsec) - prefix(github.com/crowdsecurity/crowdsec)
gomoddirectives:
replace-allow-list:
- golang.org/x/time/rate
gocognit: gocognit:
# lower this after refactoring # lower this after refactoring
min-complexity: 145 min-complexity: 145
gocyclo: gocyclo:
# lower this after refactoring # lower this after refactoring
min-complexity: 48 min-complexity: 49
funlen: funlen:
# Checks the number of lines in a function. # Checks the number of lines in a function.
# If lower than 0, disable the check. # If lower than 0, disable the check.
# Default: 60 # Default: 60
# lower this after refactoring lines: -1
lines: 437
# Checks the number of statements in a function. # Checks the number of statements in a function.
# If lower than 0, disable the check. # If lower than 0, disable the check.
# Default: 40 # Default: 40
# lower this after refactoring statements: -1
statements: 122
govet: govet:
enable-all: true check-shadowing: true
disable:
- reflectvaluecompare
- fieldalignment
lll: lll:
# lower this after refactoring line-length: 140
line-length: 2607
maintidx: maintidx:
# raise this after refactoring # raise this after refactoring
@ -58,7 +56,7 @@ linters-settings:
min-complexity: 28 min-complexity: 28
nlreturn: nlreturn:
block-size: 5 block-size: 4
nolintlint: nolintlint:
allow-unused: false # report any unused nolint directives allow-unused: false # report any unused nolint directives
@ -70,53 +68,10 @@ linters-settings:
depguard: depguard:
rules: rules:
wrap: main:
deny: deny:
- pkg: "github.com/pkg/errors" - pkg: "github.com/pkg/errors"
desc: "errors.Wrap() is deprecated in favor of fmt.Errorf()" desc: "errors.Wrap() is deprecated in favor of fmt.Errorf()"
files:
- "!**/pkg/database/*.go"
- "!**/pkg/exprhelpers/*.go"
- "!**/pkg/acquisition/modules/appsec/appsec.go"
- "!**/pkg/acquisition/modules/loki/internal/lokiclient/loki_client.go"
- "!**/pkg/apiserver/controllers/v1/errors.go"
yaml:
files:
- "!**/pkg/acquisition/acquisition.go"
- "!**/pkg/acquisition/acquisition_test.go"
- "!**/pkg/acquisition/modules/appsec/appsec.go"
- "!**/pkg/acquisition/modules/cloudwatch/cloudwatch.go"
- "!**/pkg/acquisition/modules/docker/docker.go"
- "!**/pkg/acquisition/modules/file/file.go"
- "!**/pkg/acquisition/modules/journalctl/journalctl.go"
- "!**/pkg/acquisition/modules/kafka/kafka.go"
- "!**/pkg/acquisition/modules/kinesis/kinesis.go"
- "!**/pkg/acquisition/modules/kubernetesaudit/k8s_audit.go"
- "!**/pkg/acquisition/modules/loki/loki.go"
- "!**/pkg/acquisition/modules/loki/timestamp_test.go"
- "!**/pkg/acquisition/modules/s3/s3.go"
- "!**/pkg/acquisition/modules/syslog/syslog.go"
- "!**/pkg/acquisition/modules/wineventlog/wineventlog_windows.go"
- "!**/pkg/appsec/appsec.go"
- "!**/pkg/appsec/loader.go"
- "!**/pkg/csplugin/broker.go"
- "!**/pkg/csplugin/broker_test.go"
- "!**/pkg/dumps/bucket_dump.go"
- "!**/pkg/dumps/parser_dump.go"
- "!**/pkg/hubtest/coverage.go"
- "!**/pkg/hubtest/hubtest_item.go"
- "!**/pkg/hubtest/parser_assert.go"
- "!**/pkg/hubtest/scenario_assert.go"
- "!**/pkg/leakybucket/buckets_test.go"
- "!**/pkg/leakybucket/manager_load.go"
- "!**/pkg/metabase/metabase.go"
- "!**/pkg/parser/node.go"
- "!**/pkg/parser/node_test.go"
- "!**/pkg/parser/parsing_test.go"
- "!**/pkg/parser/stage.go"
deny:
- pkg: "gopkg.in/yaml.v2"
desc: "yaml.v2 is deprecated for new code in favor of yaml.v3"
wsl: wsl:
# Allow blocks to end with comments # Allow blocks to end with comments
@ -128,30 +83,23 @@ linters:
# #
# DEPRECATED by golangi-lint # DEPRECATED by golangi-lint
# #
- deadcode - deadcode # The owner seems to have abandoned the linter. Replaced by unused.
- exhaustivestruct - exhaustivestruct # The owner seems to have abandoned the linter. Replaced by exhaustruct.
- golint - golint # Golint differs from gofmt. Gofmt reformats Go source code, whereas golint prints out style mistakes
- ifshort - ifshort # Checks that your code uses short syntax for if-statements whenever possible
- interfacer - interfacer # Linter that suggests narrower interface types
- maligned - maligned # Tool to detect Go structs that would take less memory if their fields were sorted
- nosnakecase - nosnakecase # nosnakecase is a linter that detects snake case of variable naming and function name.
- scopelint - scopelint # Scopelint checks for unpinned variables in go programs
- structcheck - structcheck # The owner seems to have abandoned the linter. Replaced by unused.
- varcheck - varcheck # The owner seems to have abandoned the linter. Replaced by unused.
#
# Disabled until fixed for go 1.22
#
- copyloopvar # copyloopvar is a linter detects places where loop variables are copied
- intrange # intrange is a linter to find places where for loops could make use of an integer range.
# #
# Enabled # Enabled
# #
# - asasalint # check for pass []any as any in variadic func(...any) # - asasalint # check for pass []any as any in variadic func(...any)
# - asciicheck # checks that all code identifiers does not have non-ASCII symbols in the name # - asciicheck # Simple linter to check that your code does not contain non-ASCII identifiers
# - bidichk # Checks for dangerous unicode character sequences # - bidichk # Checks for dangerous unicode character sequences
# - bodyclose # checks whether HTTP response body is closed successfully # - bodyclose # checks whether HTTP response body is closed successfully
# - cyclop # checks function and package cyclomatic complexity # - cyclop # checks function and package cyclomatic complexity
@ -159,15 +107,13 @@ linters:
# - depguard # Go linter that checks if package imports are in a list of acceptable packages # - depguard # Go linter that checks if package imports are in a list of acceptable packages
# - dupword # checks for duplicate words in the source code # - dupword # checks for duplicate words in the source code
# - durationcheck # check for two durations multiplied together # - durationcheck # check for two durations multiplied together
# - errcheck # errcheck is a program for checking for unchecked errors in Go code. These unchecked errors can be critical bugs in some cases # - errcheck # Errcheck is a program for checking for unchecked errors in go programs. These unchecked errors can be critical bugs in some cases
# - errorlint # errorlint is a linter for that can be used to find code that will cause problems with the error wrapping scheme introduced in Go 1.13. # - errorlint # errorlint is a linter for that can be used to find code that will cause problems with the error wrapping scheme introduced in Go 1.13.
# - execinquery # execinquery is a linter about query string checker in Query function which reads your Go src files and warning it finds # - execinquery # execinquery is a linter about query string checker in Query function which reads your Go src files and warning it finds
# - exportloopref # checks for pointers to enclosing loop variables # - exportloopref # checks for pointers to enclosing loop variables
# - funlen # Tool for detection of long functions # - funlen # Tool for detection of long functions
# - ginkgolinter # enforces standards of using ginkgo and gomega # - ginkgolinter # enforces standards of using ginkgo and gomega
# - gocheckcompilerdirectives # Checks that go compiler directive comments (//go:) are valid.
# - gochecknoinits # Checks that no init functions are present in Go code # - gochecknoinits # Checks that no init functions are present in Go code
# - gochecksumtype # Run exhaustiveness checks on Go "sum types"
# - gocognit # Computes and checks the cognitive complexity of functions # - gocognit # Computes and checks the cognitive complexity of functions
# - gocritic # Provides diagnostics that check for bugs, performance and style issues. # - gocritic # Provides diagnostics that check for bugs, performance and style issues.
# - gocyclo # Computes and checks the cyclomatic complexity of functions # - gocyclo # Computes and checks the cyclomatic complexity of functions
@ -175,63 +121,56 @@ linters:
# - gomoddirectives # Manage the use of 'replace', 'retract', and 'excludes' directives in go.mod. # - gomoddirectives # Manage the use of 'replace', 'retract', and 'excludes' directives in go.mod.
# - gomodguard # Allow and block list linter for direct Go module dependencies. This is different from depguard where there are different block types for example version constraints and module recommendations. # - gomodguard # Allow and block list linter for direct Go module dependencies. This is different from depguard where there are different block types for example version constraints and module recommendations.
# - goprintffuncname # Checks that printf-like functions are named with `f` at the end # - goprintffuncname # Checks that printf-like functions are named with `f` at the end
# - gosimple # (megacheck): Linter for Go source code that specializes in simplifying code # - gosimple # (megacheck): Linter for Go source code that specializes in simplifying a code
# - gosmopolitan # Report certain i18n/l10n anti-patterns in your Go codebase # - govet # (vet, vetshadow): Vet examines Go source code and reports suspicious constructs, such as Printf calls whose arguments do not align with the format string
# - govet # (vet, vetshadow): Vet examines Go source code and reports suspicious constructs. It is roughly the same as 'go vet' and uses its passes. # - grouper # An analyzer to analyze expression groups.
# - grouper # Analyze expression groups.
# - importas # Enforces consistent import aliases # - importas # Enforces consistent import aliases
# - ineffassign # Detects when assignments to existing variables are not used # - ineffassign # Detects when assignments to existing variables are not used
# - interfacebloat # A linter that checks the number of methods inside an interface. # - interfacebloat # A linter that checks the number of methods inside an interface.
# - lll # Reports long lines
# - loggercheck # (logrlint): Checks key value pairs for common logger libraries (kitlog,klog,logr,zap).
# - logrlint # Check logr arguments. # - logrlint # Check logr arguments.
# - maintidx # maintidx measures the maintainability index of each function. # - maintidx # maintidx measures the maintainability index of each function.
# - makezero # Finds slice declarations with non-zero initial length # - makezero # Finds slice declarations with non-zero initial length
# - mirror # reports wrong mirror patterns of bytes/strings usage # - misspell # Finds commonly misspelled English words in comments
# - misspell # Finds commonly misspelled English words # - nakedret # Finds naked returns in functions greater than a specified function length
# - nakedret # Checks that functions with naked returns are not longer than a maximum size (can be zero).
# - nestif # Reports deeply nested if statements # - nestif # Reports deeply nested if statements
# - nilerr # Finds the code that returns nil even if it checks that the error is not nil. # - nilerr # Finds the code that returns nil even if it checks that the error is not nil.
# - nolintlint # Reports ill-formed or insufficient nolint directives # - nolintlint # Reports ill-formed or insufficient nolint directives
# - nonamedreturns # Reports all named returns # - nonamedreturns # Reports all named returns
# - nosprintfhostport # Checks for misuse of Sprintf to construct a host with port in a URL. # - nosprintfhostport # Checks for misuse of Sprintf to construct a host with port in a URL.
# - perfsprint # Checks that fmt.Sprintf can be replaced with a faster alternative.
# - predeclared # find code that shadows one of Go's predeclared identifiers # - predeclared # find code that shadows one of Go's predeclared identifiers
# - reassign # Checks that package variables are not reassigned # - reassign # Checks that package variables are not reassigned
# - rowserrcheck # checks whether Rows.Err of rows is checked successfully # - rowserrcheck # checks whether Err of rows is checked successfully
# - sloglint # ensure consistent code style when using log/slog # - sqlclosecheck # Checks that sql.Rows and sql.Stmt are closed.
# - spancheck # Checks for mistakes with OpenTelemetry/Census spans. # - staticcheck # (megacheck): Staticcheck is a go vet on steroids, applying a ton of static analysis checks
# - sqlclosecheck # Checks that sql.Rows, sql.Stmt, sqlx.NamedStmt, pgx.Query are closed.
# - staticcheck # (megacheck): It's a set of rules from staticcheck. It's not the same thing as the staticcheck binary. The author of staticcheck doesn't support or approve the use of staticcheck as a library inside golangci-lint.
# - tenv # tenv is analyzer that detects using os.Setenv instead of t.Setenv since Go1.17
# - testableexamples # linter checks if examples are testable (have an expected output) # - testableexamples # linter checks if examples are testable (have an expected output)
# - testifylint # Checks usage of github.com/stretchr/testify. # - tenv # tenv is analyzer that detects using os.Setenv instead of t.Setenv since Go1.17
# - tparallel # tparallel detects inappropriate usage of t.Parallel() method in your Go test codes # - tparallel # tparallel detects inappropriate usage of t.Parallel() method in your Go test codes
# - typecheck # Like the front-end of a Go compiler, parses and type-checks Go code
# - unconvert # Remove unnecessary type conversions # - unconvert # Remove unnecessary type conversions
# - unused # (megacheck): Checks Go code for unused constants, variables, functions and types # - unused # (megacheck): Checks Go code for unused constants, variables, functions and types
# - usestdlibvars # A linter that detect the possibility to use variables/constants from the Go standard library. # - usestdlibvars # A linter that detect the possibility to use variables/constants from the Go standard library.
# - wastedassign # Finds wasted assignment statements # - wastedassign # wastedassign finds wasted assignment statements.
# - zerologlint # Detects the wrong usage of `zerolog` that a user forgets to dispatch with `Send` or `Msg`
# #
# Recommended? (easy) # Recommended? (easy)
# #
- dogsled # Checks assignments with too many blank identifiers (e.g. x, _, _, _, := f()) - dogsled # Checks assignments with too many blank identifiers (e.g. x, _, _, _, := f())
- errchkjson # Checks types passed to the json encoding functions. Reports unsupported types and reports occations, where the check for the returned error can be omitted. - errchkjson # Checks types passed to the json encoding functions. Reports unsupported types and optionally reports occations, where the check for the returned error can be omitted.
- exhaustive # check exhaustiveness of enum switch statements - exhaustive # check exhaustiveness of enum switch statements
- gci # Gci control golang package import order and make it always deterministic. - gci # Gci control golang package import order and make it always deterministic.
- godot # Check if comments end in a period - godot # Check if comments end in a period
- gofmt # Gofmt checks whether code was gofmt-ed. By default this tool runs with -s option to check for code simplification - gofmt # Gofmt checks whether code was gofmt-ed. By default this tool runs with -s option to check for code simplification
- goimports # Check import statements are formatted according to the 'goimport' command. Reformat imports in autofix mode. - goimports # In addition to fixing imports, goimports also formats your code in the same style as gofmt.
- gosec # (gas): Inspects source code for security problems - gosec # (gas): Inspects source code for security problems
- inamedparam # reports interfaces with unnamed method parameters - inamedparam # reports interfaces with unnamed method parameters
- lll # Reports long lines
- musttag # enforce field tags in (un)marshaled structs - musttag # enforce field tags in (un)marshaled structs
- promlinter # Check Prometheus metrics naming via promlint - promlinter # Check Prometheus metrics naming via promlint
- protogetter # Reports direct reads from proto message fields when getters should be used - protogetter # Reports direct reads from proto message fields when getters should be used
- revive # Fast, configurable, extensible, flexible, and beautiful linter for Go. Drop-in replacement of golint. - revive # Fast, configurable, extensible, flexible, and beautiful linter for Go. Drop-in replacement of golint.
- tagalign # check that struct tags are well aligned - tagalign # check that struct tags are well aligned
- thelper # thelper detects tests helpers which is not start with t.Helper() method. - thelper # thelper detects golang test helpers without t.Helper() call and checks the consistency of test helpers
- wrapcheck # Checks that errors returned from external packages are wrapped - wrapcheck # Checks that errors returned from external packages are wrapped
# #
@ -239,12 +178,12 @@ linters:
# #
- containedctx # containedctx is a linter that detects struct contained context.Context field - containedctx # containedctx is a linter that detects struct contained context.Context field
- contextcheck # check whether the function uses a non-inherited context - contextcheck # check the function whether use a non-inherited context
- errname # Checks that sentinel errors are prefixed with the `Err` and error types are suffixed with the `Error`. - errname # Checks that sentinel errors are prefixed with the `Err` and error types are suffixed with the `Error`.
- gomnd # An analyzer to detect magic numbers. - gomnd # An analyzer to detect magic numbers.
- ireturn # Accept Interfaces, Return Concrete Types - ireturn # Accept Interfaces, Return Concrete Types
- nilnil # Checks that there is no simultaneous return of `nil` error and an invalid value. - nilnil # Checks that there is no simultaneous return of `nil` error and an invalid value.
- noctx # Finds sending http request without context.Context - noctx # noctx finds sending http request without context.Context
- unparam # Reports unused function parameters - unparam # Reports unused function parameters
# #
@ -253,8 +192,8 @@ linters:
- gofumpt # Gofumpt checks whether code was gofumpt-ed. - gofumpt # Gofumpt checks whether code was gofumpt-ed.
- nlreturn # nlreturn checks for a new line before return and branch statements to increase code clarity - nlreturn # nlreturn checks for a new line before return and branch statements to increase code clarity
- whitespace # Whitespace is a linter that checks for unnecessary newlines at the start and end of functions, if, for, etc. - whitespace # Tool for detection of leading and trailing whitespace
- wsl # add or remove empty lines - wsl # Whitespace Linter - Forces you to use empty lines!
# #
# Well intended, but not ready for this # Well intended, but not ready for this
@ -262,8 +201,8 @@ linters:
- dupl # Tool for code clone detection - dupl # Tool for code clone detection
- forcetypeassert # finds forced type assertions - forcetypeassert # finds forced type assertions
- godox # Tool for detection of FIXME, TODO and other comment keywords - godox # Tool for detection of FIXME, TODO and other comment keywords
- goerr113 # Go linter to check the errors handling expressions - goerr113 # Golang linter to check the errors handling expressions
- paralleltest # Detects missing usage of t.Parallel() method in your Go test - paralleltest # paralleltest detects missing usage of t.Parallel() method in your Go test
- testpackage # linter that makes you use a separate _test package - testpackage # linter that makes you use a separate _test package
# #
@ -271,7 +210,7 @@ linters:
# #
- exhaustruct # Checks if all structure fields are initialized - exhaustruct # Checks if all structure fields are initialized
- forbidigo # Forbids identifiers - forbidigo # Forbids identifiers
- gochecknoglobals # Check that no global variables exist. - gochecknoglobals # check that no global variables exist
- goconst # Finds repeated strings that could be replaced by a constant - goconst # Finds repeated strings that could be replaced by a constant
- stylecheck # Stylecheck is a replacement for golint - stylecheck # Stylecheck is a replacement for golint
- tagliatelle # Checks the struct tags. - tagliatelle # Checks the struct tags.
@ -288,21 +227,15 @@ issues:
# “Look, thats why theres rules, understand? So that you think before you # “Look, thats why theres rules, understand? So that you think before you
# break em.” ― Terry Pratchett # break em.” ― Terry Pratchett
exclude-dirs:
- pkg/time/rate
exclude-files:
- pkg/yamlpatch/merge.go
- pkg/yamlpatch/merge_test.go
exclude-generated-strict: true
max-issues-per-linter: 0 max-issues-per-linter: 0
max-same-issues: 0 max-same-issues: 0
exclude-rules: exclude-rules:
# Won't fix: # Won't fix:
- path: go.mod
text: "replacement are not allowed: golang.org/x/time/rate"
# `err` is often shadowed, we may continue to do it # `err` is often shadowed, we may continue to do it
- linters: - linters:
- govet - govet
@ -380,3 +313,13 @@ issues:
- linters: - linters:
- nonamedreturns - nonamedreturns
text: "named return .* with type .* found" text: "named return .* with type .* found"
#
# Will fix, might be trickier
#
# https://github.com/pkg/errors/issues/245
- linters:
- depguard
text: "import 'github.com/pkg/errors' is not allowed .*"

View file

@ -1,5 +1,5 @@
# vim: set ft=dockerfile: # vim: set ft=dockerfile:
FROM golang:1.22.2-alpine3.18 AS build FROM golang:1.21.7-alpine3.18 AS build
ARG BUILD_VERSION ARG BUILD_VERSION
@ -16,7 +16,7 @@ RUN apk add --no-cache git g++ gcc libc-dev make bash gettext binutils-gold core
cd re2-${RE2_VERSION} && \ cd re2-${RE2_VERSION} && \
make install && \ make install && \
echo "githubciXXXXXXXXXXXXXXXXXXXXXXXX" > /etc/machine-id && \ echo "githubciXXXXXXXXXXXXXXXXXXXXXXXX" > /etc/machine-id && \
go install github.com/mikefarah/yq/v4@v4.43.1 go install github.com/mikefarah/yq/v4@v4.40.4
COPY . . COPY . .
@ -43,12 +43,11 @@ COPY --from=build /go/bin/yq /usr/local/bin/crowdsec /usr/local/bin/cscli /usr/l
COPY --from=build /etc/crowdsec /staging/etc/crowdsec COPY --from=build /etc/crowdsec /staging/etc/crowdsec
COPY --from=build /go/src/crowdsec/docker/docker_start.sh / COPY --from=build /go/src/crowdsec/docker/docker_start.sh /
COPY --from=build /go/src/crowdsec/docker/config.yaml /staging/etc/crowdsec/config.yaml COPY --from=build /go/src/crowdsec/docker/config.yaml /staging/etc/crowdsec/config.yaml
COPY --from=build /var/lib/crowdsec /staging/var/lib/crowdsec
RUN yq -n '.url="http://0.0.0.0:8080"' | install -m 0600 /dev/stdin /staging/etc/crowdsec/local_api_credentials.yaml RUN yq -n '.url="http://0.0.0.0:8080"' | install -m 0600 /dev/stdin /staging/etc/crowdsec/local_api_credentials.yaml
ENTRYPOINT /bin/bash /docker_start.sh ENTRYPOINT /bin/bash /docker_start.sh
FROM slim as full FROM slim as plugins
# Due to the wizard using cp -n, we have to copy the config files directly from the source as -n does not exist in busybox cp # Due to the wizard using cp -n, we have to copy the config files directly from the source as -n does not exist in busybox cp
# The files are here for reference, as users will need to mount a new version to be actually able to use notifications # The files are here for reference, as users will need to mount a new version to be actually able to use notifications
@ -61,3 +60,11 @@ COPY --from=build \
/staging/etc/crowdsec/notifications/ /staging/etc/crowdsec/notifications/
COPY --from=build /usr/local/lib/crowdsec/plugins /usr/local/lib/crowdsec/plugins COPY --from=build /usr/local/lib/crowdsec/plugins /usr/local/lib/crowdsec/plugins
FROM slim as geoip
COPY --from=build /var/lib/crowdsec /staging/var/lib/crowdsec
FROM plugins as full
COPY --from=build /var/lib/crowdsec /staging/var/lib/crowdsec

View file

@ -1,5 +1,5 @@
# vim: set ft=dockerfile: # vim: set ft=dockerfile:
FROM golang:1.22.2-bookworm AS build FROM golang:1.21.7-bookworm AS build
ARG BUILD_VERSION ARG BUILD_VERSION
@ -21,7 +21,7 @@ RUN apt-get update && \
make && \ make && \
make install && \ make install && \
echo "githubciXXXXXXXXXXXXXXXXXXXXXXXX" > /etc/machine-id && \ echo "githubciXXXXXXXXXXXXXXXXXXXXXXXX" > /etc/machine-id && \
go install github.com/mikefarah/yq/v4@v4.43.1 go install github.com/mikefarah/yq/v4@v4.40.4
COPY . . COPY . .

View file

@ -202,10 +202,6 @@ cscli: goversion ## Build cscli
crowdsec: goversion ## Build crowdsec crowdsec: goversion ## Build crowdsec
@$(MAKE) -C $(CROWDSEC_FOLDER) build $(MAKE_FLAGS) @$(MAKE) -C $(CROWDSEC_FOLDER) build $(MAKE_FLAGS)
.PHONY: generate
generate: ## Generate code for the database and APIs
$(GO) generate ./pkg/database/ent
$(GO) generate ./pkg/models
.PHONY: testclean .PHONY: testclean
testclean: bats-clean ## Remove test artifacts testclean: bats-clean ## Remove test artifacts

View file

@ -15,13 +15,19 @@ pool:
stages: stages:
- stage: Build - stage: Build
jobs: jobs:
- job: Build - job:
displayName: "Build" displayName: "Build"
steps: steps:
- task: DotNetCoreCLI@2
displayName: "Install SignClient"
inputs:
command: 'custom'
custom: 'tool'
arguments: 'install --global SignClient --version 1.3.155'
- task: GoTool@0 - task: GoTool@0
displayName: "Install Go" displayName: "Install Go"
inputs: inputs:
version: '1.22.2' version: '1.21.7'
- pwsh: | - pwsh: |
choco install -y make choco install -y make
@ -33,14 +39,24 @@ stages:
#we are not calling make windows_installer because we want to sign the binaries before they are added to the MSI #we are not calling make windows_installer because we want to sign the binaries before they are added to the MSI
script: | script: |
make build BUILD_RE2_WASM=1 make build BUILD_RE2_WASM=1
- task: AzureKeyVault@2
inputs:
azureSubscription: 'Azure subscription 1(8a93ab40-7e99-445e-ad47-0f6a3e2ef546)'
KeyVaultName: 'CodeSigningSecrets'
SecretsFilter: 'CodeSigningUser,CodeSigningPassword'
RunAsPreJob: false
- task: DownloadSecureFile@1
inputs:
secureFile: appsettings.json
- pwsh: |
SignClient.exe Sign --name "crowdsec-binaries" `
--input "**/*.exe" --config (Join-Path -Path $(Agent.TempDirectory) -ChildPath "appsettings.json") `
--user $(CodeSigningUser) --secret '$(CodeSigningPassword)'
displayName: "Sign Crowdsec binaries + plugins"
- pwsh: | - pwsh: |
$build_version=$env:BUILD_SOURCEBRANCHNAME $build_version=$env:BUILD_SOURCEBRANCHNAME
#Override the version if it's set in the pipeline
if ( ${env:USERBUILDVERSION} -ne "")
{
$build_version = ${env:USERBUILDVERSION}
}
if ($build_version.StartsWith("v")) if ($build_version.StartsWith("v"))
{ {
$build_version = $build_version.Substring(1) $build_version = $build_version.Substring(1)
@ -53,112 +69,35 @@ stages:
displayName: GetCrowdsecVersion displayName: GetCrowdsecVersion
name: GetCrowdsecVersion name: GetCrowdsecVersion
- pwsh: | - pwsh: |
Get-ChildItem -Path .\cmd -Directory | ForEach-Object { .\make_installer.ps1 -version '$(GetCrowdsecVersion.BuildVersion)'
$dirName = $_.Name
Get-ChildItem -Path .\cmd\$dirName -File -Filter '*.exe' | ForEach-Object {
$fileName = $_.Name
$destDir = Join-Path $(Build.ArtifactStagingDirectory) cmd\$dirName
New-Item -ItemType Directory -Path $destDir -Force
Copy-Item -Path .\cmd\$dirName\$fileName -Destination $destDir
}
}
displayName: "Copy binaries to staging directory"
- task: PublishPipelineArtifact@1
inputs:
targetPath: '$(Build.ArtifactStagingDirectory)'
artifact: 'unsigned_binaries'
displayName: "Upload binaries artifact"
- stage: Sign
dependsOn: Build
variables:
- group: 'FOSS Build Variables'
- name: BuildVersion
value: $[ stageDependencies.Build.Build.outputs['GetCrowdsecVersion.BuildVersion'] ]
condition: succeeded()
jobs:
- job: Sign
displayName: "Sign"
steps:
- download: current
artifact: unsigned_binaries
displayName: "Download binaries artifact"
- task: CopyFiles@2
inputs:
SourceFolder: '$(Pipeline.Workspace)/unsigned_binaries'
TargetFolder: '$(Build.SourcesDirectory)'
displayName: "Copy binaries to workspace"
- task: DotNetCoreCLI@2
displayName: "Install SignTool tool"
inputs:
command: 'custom'
custom: 'tool'
arguments: install --global sign --version 0.9.0-beta.23127.3
- task: AzureKeyVault@2
displayName: "Get signing parameters"
inputs:
azureSubscription: "Azure subscription"
KeyVaultName: "$(KeyVaultName)"
SecretsFilter: "TenantId,ClientId,ClientSecret,Certificate,KeyVaultUrl"
- pwsh: |
sign code azure-key-vault `
"**/*.exe" `
--base-directory "$(Build.SourcesDirectory)/cmd/" `
--publisher-name "CrowdSec" `
--description "CrowdSec" `
--description-url "https://github.com/crowdsecurity/crowdsec" `
--azure-key-vault-tenant-id "$(TenantId)" `
--azure-key-vault-client-id "$(ClientId)" `
--azure-key-vault-client-secret "$(ClientSecret)" `
--azure-key-vault-certificate "$(Certificate)" `
--azure-key-vault-url "$(KeyVaultUrl)"
displayName: "Sign crowdsec binaries"
- pwsh: |
.\make_installer.ps1 -version '$(BuildVersion)'
displayName: "Build Crowdsec MSI" displayName: "Build Crowdsec MSI"
name: BuildMSI name: BuildMSI
- pwsh: | - pwsh: |
.\make_chocolatey.ps1 -version '$(BuildVersion)' .\make_chocolatey.ps1 -version '$(GetCrowdsecVersion.BuildVersion)'
displayName: "Build Chocolatey nupkg" displayName: "Build Chocolatey nupkg"
- pwsh: | - pwsh: |
sign code azure-key-vault ` SignClient.exe Sign --name "crowdsec-msi" `
"*.msi" ` --input "*.msi" --config (Join-Path -Path $(Agent.TempDirectory) -ChildPath "appsettings.json") `
--base-directory "$(Build.SourcesDirectory)" ` --user $(CodeSigningUser) --secret '$(CodeSigningPassword)'
--publisher-name "CrowdSec" ` displayName: "Sign Crowdsec MSI"
--description "CrowdSec" `
--description-url "https://github.com/crowdsecurity/crowdsec" ` - task: PublishBuildArtifacts@1
--azure-key-vault-tenant-id "$(TenantId)" `
--azure-key-vault-client-id "$(ClientId)" `
--azure-key-vault-client-secret "$(ClientSecret)" `
--azure-key-vault-certificate "$(Certificate)" `
--azure-key-vault-url "$(KeyVaultUrl)"
displayName: "Sign MSI package"
- pwsh: |
sign code azure-key-vault `
"*.nupkg" `
--base-directory "$(Build.SourcesDirectory)" `
--publisher-name "CrowdSec" `
--description "CrowdSec" `
--description-url "https://github.com/crowdsecurity/crowdsec" `
--azure-key-vault-tenant-id "$(TenantId)" `
--azure-key-vault-client-id "$(ClientId)" `
--azure-key-vault-client-secret "$(ClientSecret)" `
--azure-key-vault-certificate "$(Certificate)" `
--azure-key-vault-url "$(KeyVaultUrl)"
displayName: "Sign nuget package"
- task: PublishPipelineArtifact@1
inputs: inputs:
targetPath: '$(Build.SourcesDirectory)/crowdsec_$(BuildVersion).msi' PathtoPublish: '$(Build.Repository.LocalPath)\\crowdsec_$(GetCrowdsecVersion.BuildVersion).msi'
artifact: 'signed_msi_package' ArtifactName: 'crowdsec.msi'
displayName: "Upload signed MSI artifact" publishLocation: 'Container'
- task: PublishPipelineArtifact@1 displayName: "Upload MSI artifact"
- task: PublishBuildArtifacts@1
inputs: inputs:
targetPath: '$(Build.SourcesDirectory)/crowdsec.$(BuildVersion).nupkg' PathtoPublish: '$(Build.Repository.LocalPath)\\windows\\Chocolatey\\crowdsec\\crowdsec.$(GetCrowdsecVersion.BuildVersion).nupkg'
artifact: 'signed_nuget_package' ArtifactName: 'crowdsec.nupkg'
displayName: "Upload signed nuget artifact" publishLocation: 'Container'
displayName: "Upload nupkg artifact"
- stage: Publish - stage: Publish
dependsOn: Sign dependsOn: Build
jobs: jobs:
- deployment: "Publish" - deployment: "Publish"
displayName: "Publish to GitHub" displayName: "Publish to GitHub"
@ -180,7 +119,8 @@ stages:
assetUploadMode: 'replace' assetUploadMode: 'replace'
addChangeLog: false addChangeLog: false
isPreRelease: true #we force prerelease because the pipeline is invoked on tag creation, which happens when we do a prerelease isPreRelease: true #we force prerelease because the pipeline is invoked on tag creation, which happens when we do a prerelease
#the .. is an ugly hack, but I can't find the var that gives D:\a\1 ...
assets: | assets: |
$(Pipeline.Workspace)/signed_msi_package/*.msi $(Build.ArtifactStagingDirectory)\..\crowdsec.msi/*.msi
$(Pipeline.Workspace)/signed_nuget_package/*.nupkg $(Build.ArtifactStagingDirectory)\..\crowdsec.nupkg/*.nupkg
condition: ne(variables['GetLatestPrelease.LatestPreRelease'], '') condition: ne(variables['GetLatestPrelease.LatestPreRelease'], '')

View file

@ -4,7 +4,6 @@ import (
"context" "context"
"encoding/csv" "encoding/csv"
"encoding/json" "encoding/json"
"errors"
"fmt" "fmt"
"net/url" "net/url"
"os" "os"
@ -17,7 +16,7 @@ import (
"github.com/go-openapi/strfmt" "github.com/go-openapi/strfmt"
log "github.com/sirupsen/logrus" log "github.com/sirupsen/logrus"
"github.com/spf13/cobra" "github.com/spf13/cobra"
"gopkg.in/yaml.v3" "gopkg.in/yaml.v2"
"github.com/crowdsecurity/go-cs-lib/version" "github.com/crowdsecurity/go-cs-lib/version"
@ -178,9 +177,9 @@ func (cli *cliAlerts) displayOneAlert(alert *models.Alert, withDetail bool) erro
return nil return nil
} }
type cliAlerts struct { type cliAlerts struct{
client *apiclient.ApiClient client *apiclient.ApiClient
cfg configGetter cfg configGetter
} }
func NewCLIAlerts(getconfig configGetter) *cliAlerts { func NewCLIAlerts(getconfig configGetter) *cliAlerts {
@ -205,7 +204,6 @@ func (cli *cliAlerts) NewCommand() *cobra.Command {
if err != nil { if err != nil {
return fmt.Errorf("parsing api url %s: %w", apiURL, err) return fmt.Errorf("parsing api url %s: %w", apiURL, err)
} }
cli.client, err = apiclient.NewClient(&apiclient.Config{ cli.client, err = apiclient.NewClient(&apiclient.Config{
MachineID: cfg.API.Client.Credentials.Login, MachineID: cfg.API.Client.Credentials.Login,
Password: strfmt.Password(cfg.API.Client.Credentials.Password), Password: strfmt.Password(cfg.API.Client.Credentials.Password),
@ -213,6 +211,7 @@ func (cli *cliAlerts) NewCommand() *cobra.Command {
URL: apiURL, URL: apiURL,
VersionPrefix: "v1", VersionPrefix: "v1",
}) })
if err != nil { if err != nil {
return fmt.Errorf("new api client: %w", err) return fmt.Errorf("new api client: %w", err)
} }
@ -230,7 +229,7 @@ func (cli *cliAlerts) NewCommand() *cobra.Command {
} }
func (cli *cliAlerts) NewListCmd() *cobra.Command { func (cli *cliAlerts) NewListCmd() *cobra.Command {
alertListFilter := apiclient.AlertsListOpts{ var alertListFilter = apiclient.AlertsListOpts{
ScopeEquals: new(string), ScopeEquals: new(string),
ValueEquals: new(string), ValueEquals: new(string),
ScenarioEquals: new(string), ScenarioEquals: new(string),
@ -254,10 +253,8 @@ func (cli *cliAlerts) NewListCmd() *cobra.Command {
Example: `cscli alerts list Example: `cscli alerts list
cscli alerts list --ip 1.2.3.4 cscli alerts list --ip 1.2.3.4
cscli alerts list --range 1.2.3.0/24 cscli alerts list --range 1.2.3.0/24
cscli alerts list --origin lists
cscli alerts list -s crowdsecurity/ssh-bf cscli alerts list -s crowdsecurity/ssh-bf
cscli alerts list --type ban`, cscli alerts list --type ban`,
Long: `List alerts with optional filters`,
DisableAutoGenTag: true, DisableAutoGenTag: true,
RunE: func(cmd *cobra.Command, _ []string) error { RunE: func(cmd *cobra.Command, _ []string) error {
if err := manageCliDecisionAlerts(alertListFilter.IPEquals, alertListFilter.RangeEquals, if err := manageCliDecisionAlerts(alertListFilter.IPEquals, alertListFilter.RangeEquals,
@ -361,10 +358,10 @@ func (cli *cliAlerts) NewDeleteCmd() *cobra.Command {
var ( var (
ActiveDecision *bool ActiveDecision *bool
AlertDeleteAll bool AlertDeleteAll bool
delAlertByID string delAlertByID string
) )
alertDeleteFilter := apiclient.AlertsDeleteOpts{ var alertDeleteFilter = apiclient.AlertsDeleteOpts{
ScopeEquals: new(string), ScopeEquals: new(string),
ValueEquals: new(string), ValueEquals: new(string),
ScenarioEquals: new(string), ScenarioEquals: new(string),
@ -392,7 +389,7 @@ cscli alerts delete -s crowdsecurity/ssh-bf"`,
*alertDeleteFilter.ScenarioEquals == "" && *alertDeleteFilter.IPEquals == "" && *alertDeleteFilter.ScenarioEquals == "" && *alertDeleteFilter.IPEquals == "" &&
*alertDeleteFilter.RangeEquals == "" && delAlertByID == "" { *alertDeleteFilter.RangeEquals == "" && delAlertByID == "" {
_ = cmd.Usage() _ = cmd.Usage()
return errors.New("at least one filter or --all must be specified") return fmt.Errorf("at least one filter or --all must be specified")
} }
return nil return nil
@ -452,7 +449,7 @@ cscli alerts delete -s crowdsecurity/ssh-bf"`,
return nil return nil
}, },
} }
flags := cmd.Flags() flags := cmd.Flags()
flags.SortFlags = false flags.SortFlags = false
flags.StringVar(alertDeleteFilter.ScopeEquals, "scope", "", "the scope (ie. ip,range)") flags.StringVar(alertDeleteFilter.ScopeEquals, "scope", "", "the scope (ie. ip,range)")
@ -479,7 +476,7 @@ func (cli *cliAlerts) NewInspectCmd() *cobra.Command {
cfg := cli.cfg() cfg := cli.cfg()
if len(args) == 0 { if len(args) == 0 {
printHelp(cmd) printHelp(cmd)
return errors.New("missing alert_id") return fmt.Errorf("missing alert_id")
} }
for _, alertID := range args { for _, alertID := range args {
id, err := strconv.Atoi(alertID) id, err := strconv.Atoi(alertID)
@ -523,7 +520,7 @@ func (cli *cliAlerts) NewInspectCmd() *cobra.Command {
func (cli *cliAlerts) NewFlushCmd() *cobra.Command { func (cli *cliAlerts) NewFlushCmd() *cobra.Command {
var ( var (
maxItems int maxItems int
maxAge string maxAge string
) )
cmd := &cobra.Command{ cmd := &cobra.Command{

View file

@ -259,7 +259,7 @@ func (cli *cliBouncers) prune(duration time.Duration, force bool) error {
} }
} }
bouncers, err := cli.db.QueryBouncersLastPulltimeLT(time.Now().UTC().Add(-duration)) bouncers, err := cli.db.QueryBouncersLastPulltimeLT(time.Now().UTC().Add(duration))
if err != nil { if err != nil {
return fmt.Errorf("unable to query bouncers: %w", err) return fmt.Errorf("unable to query bouncers: %w", err)
} }

View file

@ -10,7 +10,7 @@ import (
"github.com/go-openapi/strfmt" "github.com/go-openapi/strfmt"
log "github.com/sirupsen/logrus" log "github.com/sirupsen/logrus"
"github.com/spf13/cobra" "github.com/spf13/cobra"
"gopkg.in/yaml.v3" "gopkg.in/yaml.v2"
"github.com/crowdsecurity/go-cs-lib/version" "github.com/crowdsecurity/go-cs-lib/version"
@ -85,6 +85,7 @@ func (cli *cliCapi) register(capiUserPrefix string, outputFile string) error {
URL: apiurl, URL: apiurl,
VersionPrefix: CAPIURLPrefix, VersionPrefix: CAPIURLPrefix,
}, nil) }, nil)
if err != nil { if err != nil {
return fmt.Errorf("api client register ('%s'): %w", types.CAPIBaseURL, err) return fmt.Errorf("api client register ('%s'): %w", types.CAPIBaseURL, err)
} }
@ -174,7 +175,7 @@ func (cli *cliCapi) status() error {
return err return err
} }
scenarios, err := hub.GetInstalledNamesByType(cwhub.SCENARIOS) scenarios, err := hub.GetInstalledItemNames(cwhub.SCENARIOS)
if err != nil { if err != nil {
return fmt.Errorf("failed to get scenarios: %w", err) return fmt.Errorf("failed to get scenarios: %w", err)
} }

View file

@ -10,15 +10,13 @@ import (
"github.com/sanity-io/litter" "github.com/sanity-io/litter"
log "github.com/sirupsen/logrus" log "github.com/sirupsen/logrus"
"github.com/spf13/cobra" "github.com/spf13/cobra"
"gopkg.in/yaml.v3" "gopkg.in/yaml.v2"
"github.com/crowdsecurity/crowdsec/pkg/csconfig" "github.com/crowdsecurity/crowdsec/pkg/csconfig"
"github.com/crowdsecurity/crowdsec/pkg/exprhelpers" "github.com/crowdsecurity/crowdsec/pkg/exprhelpers"
) )
func (cli *cliConfig) showKey(key string) error { func showConfigKey(key string) error {
cfg := cli.cfg()
type Env struct { type Env struct {
Config *csconfig.Config Config *csconfig.Config
} }
@ -32,15 +30,15 @@ func (cli *cliConfig) showKey(key string) error {
return err return err
} }
output, err := expr.Run(program, Env{Config: cfg}) output, err := expr.Run(program, Env{Config: csConfig})
if err != nil { if err != nil {
return err return err
} }
switch cfg.Cscli.Output { switch csConfig.Cscli.Output {
case "human", "raw": case "human", "raw":
// Don't use litter for strings, it adds quotes // Don't use litter for strings, it adds quotes
// that would break compatibility with previous versions // that we didn't have before
switch output.(type) { switch output.(type) {
case string: case string:
fmt.Println(output) fmt.Println(output)
@ -53,14 +51,13 @@ func (cli *cliConfig) showKey(key string) error {
return fmt.Errorf("failed to marshal configuration: %w", err) return fmt.Errorf("failed to marshal configuration: %w", err)
} }
fmt.Println(string(data)) fmt.Printf("%s\n", string(data))
} }
return nil return nil
} }
func (cli *cliConfig) template() string { var configShowTemplate = `Global:
return `Global:
{{- if .ConfigPaths }} {{- if .ConfigPaths }}
- Configuration Folder : {{.ConfigPaths.ConfigDir}} - Configuration Folder : {{.ConfigPaths.ConfigDir}}
@ -103,7 +100,6 @@ API Client:
{{- if .API.Server }} {{- if .API.Server }}
Local API Server{{if and .API.Server.Enable (not (ValueBool .API.Server.Enable))}} (disabled){{end}}: Local API Server{{if and .API.Server.Enable (not (ValueBool .API.Server.Enable))}} (disabled){{end}}:
- Listen URL : {{.API.Server.ListenURI}} - Listen URL : {{.API.Server.ListenURI}}
- Listen Socket : {{.API.Server.ListenSocket}}
- Profile File : {{.API.Server.ProfilesPath}} - Profile File : {{.API.Server.ProfilesPath}}
{{- if .API.Server.TLS }} {{- if .API.Server.TLS }}
@ -185,11 +181,19 @@ Central API:
{{- end }} {{- end }}
{{- end }} {{- end }}
` `
}
func (cli *cliConfig) show() error { func (cli *cliConfig) show(key string) error {
cfg := cli.cfg() cfg := cli.cfg()
if err := cfg.LoadAPIClient(); err != nil {
log.Errorf("failed to load API client configuration: %s", err)
// don't return, we can still show the configuration
}
if key != "" {
return showConfigKey(key)
}
switch cfg.Cscli.Output { switch cfg.Cscli.Output {
case "human": case "human":
// The tests on .Enable look funny because the option has a true default which has // The tests on .Enable look funny because the option has a true default which has
@ -200,7 +204,7 @@ func (cli *cliConfig) show() error {
"ValueBool": func(b *bool) bool { return b != nil && *b }, "ValueBool": func(b *bool) bool { return b != nil && *b },
} }
tmp, err := template.New("config").Funcs(funcs).Parse(cli.template()) tmp, err := template.New("config").Funcs(funcs).Parse(configShowTemplate)
if err != nil { if err != nil {
return err return err
} }
@ -215,14 +219,14 @@ func (cli *cliConfig) show() error {
return fmt.Errorf("failed to marshal configuration: %w", err) return fmt.Errorf("failed to marshal configuration: %w", err)
} }
fmt.Println(string(data)) fmt.Printf("%s\n", string(data))
case "raw": case "raw":
data, err := yaml.Marshal(cfg) data, err := yaml.Marshal(cfg)
if err != nil { if err != nil {
return fmt.Errorf("failed to marshal configuration: %w", err) return fmt.Errorf("failed to marshal configuration: %w", err)
} }
fmt.Println(string(data)) fmt.Printf("%s\n", string(data))
} }
return nil return nil
@ -238,16 +242,7 @@ func (cli *cliConfig) newShowCmd() *cobra.Command {
Args: cobra.ExactArgs(0), Args: cobra.ExactArgs(0),
DisableAutoGenTag: true, DisableAutoGenTag: true,
RunE: func(_ *cobra.Command, _ []string) error { RunE: func(_ *cobra.Command, _ []string) error {
if err := cli.cfg().LoadAPIClient(); err != nil { return cli.show(key)
log.Errorf("failed to load API client configuration: %s", err)
// don't return, we can still show the configuration
}
if key != "" {
return cli.showKey(key)
}
return cli.show()
}, },
} }

View file

@ -4,11 +4,9 @@ import (
"context" "context"
"encoding/csv" "encoding/csv"
"encoding/json" "encoding/json"
"errors"
"fmt" "fmt"
"net/url" "net/url"
"os" "os"
"strconv"
"strings" "strings"
"github.com/fatih/color" "github.com/fatih/color"
@ -38,7 +36,7 @@ func NewCLIConsole(cfg configGetter) *cliConsole {
} }
func (cli *cliConsole) NewCommand() *cobra.Command { func (cli *cliConsole) NewCommand() *cobra.Command {
cmd := &cobra.Command{ var cmd = &cobra.Command{
Use: "console [action]", Use: "console [action]",
Short: "Manage interaction with Crowdsec console (https://app.crowdsec.net)", Short: "Manage interaction with Crowdsec console (https://app.crowdsec.net)",
Args: cobra.MinimumNArgs(1), Args: cobra.MinimumNArgs(1),
@ -103,7 +101,7 @@ After running this command your will need to validate the enrollment in the weba
return err return err
} }
scenarios, err := hub.GetInstalledNamesByType(cwhub.SCENARIOS) scenarios, err := hub.GetInstalledItemNames(cwhub.SCENARIOS)
if err != nil { if err != nil {
return fmt.Errorf("failed to get installed scenarios: %w", err) return fmt.Errorf("failed to get installed scenarios: %w", err)
} }
@ -205,7 +203,7 @@ Enable given information push to the central API. Allows to empower the console`
log.Infof("All features have been enabled successfully") log.Infof("All features have been enabled successfully")
} else { } else {
if len(args) == 0 { if len(args) == 0 {
return errors.New("you must specify at least one feature to enable") return fmt.Errorf("you must specify at least one feature to enable")
} }
if err := cli.setConsoleOpts(args, true); err != nil { if err := cli.setConsoleOpts(args, true); err != nil {
return err return err
@ -290,11 +288,11 @@ func (cli *cliConsole) newStatusCmd() *cobra.Command {
} }
rows := [][]string{ rows := [][]string{
{csconfig.SEND_MANUAL_SCENARIOS, strconv.FormatBool(*consoleCfg.ShareManualDecisions)}, {csconfig.SEND_MANUAL_SCENARIOS, fmt.Sprintf("%t", *consoleCfg.ShareManualDecisions)},
{csconfig.SEND_CUSTOM_SCENARIOS, strconv.FormatBool(*consoleCfg.ShareCustomScenarios)}, {csconfig.SEND_CUSTOM_SCENARIOS, fmt.Sprintf("%t", *consoleCfg.ShareCustomScenarios)},
{csconfig.SEND_TAINTED_SCENARIOS, strconv.FormatBool(*consoleCfg.ShareTaintedScenarios)}, {csconfig.SEND_TAINTED_SCENARIOS, fmt.Sprintf("%t", *consoleCfg.ShareTaintedScenarios)},
{csconfig.SEND_CONTEXT, strconv.FormatBool(*consoleCfg.ShareContext)}, {csconfig.SEND_CONTEXT, fmt.Sprintf("%t", *consoleCfg.ShareContext)},
{csconfig.CONSOLE_MANAGEMENT, strconv.FormatBool(*consoleCfg.ConsoleManagement)}, {csconfig.CONSOLE_MANAGEMENT, fmt.Sprintf("%t", *consoleCfg.ConsoleManagement)},
} }
for _, row := range rows { for _, row := range rows {
err = csvwriter.Write(row) err = csvwriter.Write(row)

View file

@ -9,6 +9,7 @@ import (
log "github.com/sirupsen/logrus" log "github.com/sirupsen/logrus"
) )
/*help to copy the file, ioutil doesn't offer the feature*/ /*help to copy the file, ioutil doesn't offer the feature*/
func copyFileContents(src, dst string) (err error) { func copyFileContents(src, dst string) (err error) {
@ -68,7 +69,6 @@ func CopyFile(sourceSymLink, destinationFile string) error {
if !(destinationFileStat.Mode().IsRegular()) { if !(destinationFileStat.Mode().IsRegular()) {
return fmt.Errorf("copyFile: non-regular destination file %s (%q)", destinationFileStat.Name(), destinationFileStat.Mode().String()) return fmt.Errorf("copyFile: non-regular destination file %s (%q)", destinationFileStat.Name(), destinationFileStat.Mode().String())
} }
if os.SameFile(sourceFileStat, destinationFileStat) { if os.SameFile(sourceFileStat, destinationFileStat) {
return err return err
} }
@ -80,3 +80,4 @@ func CopyFile(sourceSymLink, destinationFile string) error {
return err return err
} }

View file

@ -4,7 +4,6 @@ import (
"context" "context"
"encoding/csv" "encoding/csv"
"encoding/json" "encoding/json"
"errors"
"fmt" "fmt"
"net/url" "net/url"
"os" "os"
@ -196,7 +195,7 @@ func (cli *cliDecisions) newListCmd() *cobra.Command {
Example: `cscli decisions list -i 1.2.3.4 Example: `cscli decisions list -i 1.2.3.4
cscli decisions list -r 1.2.3.0/24 cscli decisions list -r 1.2.3.0/24
cscli decisions list -s crowdsecurity/ssh-bf cscli decisions list -s crowdsecurity/ssh-bf
cscli decisions list --origin lists --scenario list_name cscli decisions list -t ban
`, `,
Args: cobra.ExactArgs(0), Args: cobra.ExactArgs(0),
DisableAutoGenTag: true, DisableAutoGenTag: true,
@ -347,7 +346,7 @@ cscli decisions add --scope username --value foobar
addScope = types.Range addScope = types.Range
} else if addValue == "" { } else if addValue == "" {
printHelp(cmd) printHelp(cmd)
return errors.New("missing arguments, a value is required (--ip, --range or --scope and --value)") return fmt.Errorf("missing arguments, a value is required (--ip, --range or --scope and --value)")
} }
if addReason == "" { if addReason == "" {
@ -372,7 +371,7 @@ cscli decisions add --scope username --value foobar
Scenario: &addReason, Scenario: &addReason,
ScenarioVersion: &empty, ScenarioVersion: &empty,
Simulated: &simulated, Simulated: &simulated,
// setting empty scope/value broke plugins, and it didn't seem to be needed anymore w/ latest papi changes //setting empty scope/value broke plugins, and it didn't seem to be needed anymore w/ latest papi changes
Source: &models.Source{ Source: &models.Source{
AsName: empty, AsName: empty,
AsNumber: empty, AsNumber: empty,
@ -412,7 +411,7 @@ cscli decisions add --scope username --value foobar
} }
func (cli *cliDecisions) newDeleteCmd() *cobra.Command { func (cli *cliDecisions) newDeleteCmd() *cobra.Command {
delFilter := apiclient.DecisionsDeleteOpts{ var delFilter = apiclient.DecisionsDeleteOpts{
ScopeEquals: new(string), ScopeEquals: new(string),
ValueEquals: new(string), ValueEquals: new(string),
TypeEquals: new(string), TypeEquals: new(string),
@ -437,7 +436,6 @@ func (cli *cliDecisions) newDeleteCmd() *cobra.Command {
cscli decisions delete -i 1.2.3.4 cscli decisions delete -i 1.2.3.4
cscli decisions delete --id 42 cscli decisions delete --id 42
cscli decisions delete --type captcha cscli decisions delete --type captcha
cscli decisions delete --origin lists --scenario list_name
`, `,
/*TBD : refaire le Long/Example*/ /*TBD : refaire le Long/Example*/
PreRunE: func(cmd *cobra.Command, _ []string) error { PreRunE: func(cmd *cobra.Command, _ []string) error {
@ -449,7 +447,7 @@ cscli decisions delete --origin lists --scenario list_name
*delFilter.RangeEquals == "" && *delFilter.ScenarioEquals == "" && *delFilter.RangeEquals == "" && *delFilter.ScenarioEquals == "" &&
*delFilter.OriginEquals == "" && delDecisionID == "" { *delFilter.OriginEquals == "" && delDecisionID == "" {
cmd.Usage() cmd.Usage()
return errors.New("at least one filter or --all must be specified") return fmt.Errorf("at least one filter or --all must be specified")
} }
return nil return nil

View file

@ -5,7 +5,6 @@ import (
"bytes" "bytes"
"context" "context"
"encoding/json" "encoding/json"
"errors"
"fmt" "fmt"
"io" "io"
"os" "os"
@ -82,7 +81,7 @@ func (cli *cliDecisions) runImport(cmd *cobra.Command, args []string) error {
} }
if defaultDuration == "" { if defaultDuration == "" {
return errors.New("--duration cannot be empty") return fmt.Errorf("--duration cannot be empty")
} }
defaultScope, err := flags.GetString("scope") defaultScope, err := flags.GetString("scope")
@ -91,7 +90,7 @@ func (cli *cliDecisions) runImport(cmd *cobra.Command, args []string) error {
} }
if defaultScope == "" { if defaultScope == "" {
return errors.New("--scope cannot be empty") return fmt.Errorf("--scope cannot be empty")
} }
defaultReason, err := flags.GetString("reason") defaultReason, err := flags.GetString("reason")
@ -100,7 +99,7 @@ func (cli *cliDecisions) runImport(cmd *cobra.Command, args []string) error {
} }
if defaultReason == "" { if defaultReason == "" {
return errors.New("--reason cannot be empty") return fmt.Errorf("--reason cannot be empty")
} }
defaultType, err := flags.GetString("type") defaultType, err := flags.GetString("type")
@ -109,7 +108,7 @@ func (cli *cliDecisions) runImport(cmd *cobra.Command, args []string) error {
} }
if defaultType == "" { if defaultType == "" {
return errors.New("--type cannot be empty") return fmt.Errorf("--type cannot be empty")
} }
batchSize, err := flags.GetInt("batch") batchSize, err := flags.GetInt("batch")
@ -137,7 +136,7 @@ func (cli *cliDecisions) runImport(cmd *cobra.Command, args []string) error {
} }
if format == "" { if format == "" {
return errors.New("unable to guess format from file extension, please provide a format with --format flag") return fmt.Errorf("unable to guess format from file extension, please provide a format with --format flag")
} }
if input == "-" { if input == "-" {
@ -236,6 +235,7 @@ func (cli *cliDecisions) runImport(cmd *cobra.Command, args []string) error {
return nil return nil
} }
func (cli *cliDecisions) newImportCmd() *cobra.Command { func (cli *cliDecisions) newImportCmd() *cobra.Command {
cmd := &cobra.Command{ cmd := &cobra.Command{
Use: "import [options]", Use: "import [options]",

View file

@ -39,10 +39,8 @@ id: %s
title: %s title: %s
--- ---
` `
name := filepath.Base(filename) name := filepath.Base(filename)
base := strings.TrimSuffix(name, filepath.Ext(name)) base := strings.TrimSuffix(name, filepath.Ext(name))
return fmt.Sprintf(header, base, strings.ReplaceAll(base, "_", " ")) return fmt.Sprintf(header, base, strings.ReplaceAll(base, "_", " "))
} }

View file

@ -83,7 +83,7 @@ tail -n 5 myfile.log | cscli explain --type nginx -f -
PersistentPreRunE: func(_ *cobra.Command, _ []string) error { PersistentPreRunE: func(_ *cobra.Command, _ []string) error {
fileInfo, _ := os.Stdin.Stat() fileInfo, _ := os.Stdin.Stat()
if cli.flags.logFile == "-" && ((fileInfo.Mode() & os.ModeCharDevice) == os.ModeCharDevice) { if cli.flags.logFile == "-" && ((fileInfo.Mode() & os.ModeCharDevice) == os.ModeCharDevice) {
return errors.New("the option -f - is intended to work with pipes") return fmt.Errorf("the option -f - is intended to work with pipes")
} }
return nil return nil
@ -160,22 +160,18 @@ func (cli *cliExplain) run() error {
} else if logFile == "-" { } else if logFile == "-" {
reader := bufio.NewReader(os.Stdin) reader := bufio.NewReader(os.Stdin)
errCount := 0 errCount := 0
for { for {
input, err := reader.ReadBytes('\n') input, err := reader.ReadBytes('\n')
if err != nil && errors.Is(err, io.EOF) { if err != nil && errors.Is(err, io.EOF) {
break break
} }
if len(input) > 1 { if len(input) > 1 {
_, err = f.Write(input) _, err = f.Write(input)
} }
if err != nil || len(input) <= 1 { if err != nil || len(input) <= 1 {
errCount++ errCount++
} }
} }
if errCount > 0 { if errCount > 0 {
log.Warnf("Failed to write %d lines to %s", errCount, tmpFile) log.Warnf("Failed to write %d lines to %s", errCount, tmpFile)
} }
@ -211,7 +207,7 @@ func (cli *cliExplain) run() error {
} }
if dsn == "" { if dsn == "" {
return errors.New("no acquisition (--file or --dsn) provided, can't run cscli test") return fmt.Errorf("no acquisition (--file or --dsn) provided, can't run cscli test")
} }
cmdArgs := []string{"-c", ConfigFilePath, "-type", logType, "-dsn", dsn, "-dump-data", dir, "-no-api"} cmdArgs := []string{"-c", ConfigFilePath, "-type", logType, "-dsn", dsn, "-dump-data", dir, "-no-api"}

View file

@ -13,7 +13,7 @@ import (
"github.com/crowdsecurity/crowdsec/pkg/cwhub" "github.com/crowdsecurity/crowdsec/pkg/cwhub"
) )
type cliHub struct{ type cliHub struct {
cfg configGetter cfg configGetter
} }
@ -137,7 +137,7 @@ func (cli *cliHub) upgrade(force bool) error {
} }
for _, itemType := range cwhub.ItemTypes { for _, itemType := range cwhub.ItemTypes {
items, err := hub.GetInstalledItemsByType(itemType) items, err := hub.GetInstalledItems(itemType)
if err != nil { if err != nil {
return err return err
} }

View file

@ -13,9 +13,8 @@ import (
"github.com/crowdsecurity/crowdsec/pkg/cwhub" "github.com/crowdsecurity/crowdsec/pkg/cwhub"
) )
func NewCLIAppsecConfig(cfg configGetter) *cliItem { func NewCLIAppsecConfig() *cliItem {
return &cliItem{ return &cliItem{
cfg: cfg,
name: cwhub.APPSEC_CONFIGS, name: cwhub.APPSEC_CONFIGS,
singular: "appsec-config", singular: "appsec-config",
oneOrMore: "appsec-config(s)", oneOrMore: "appsec-config(s)",
@ -47,7 +46,7 @@ cscli appsec-configs list crowdsecurity/vpatch`,
} }
} }
func NewCLIAppsecRule(cfg configGetter) *cliItem { func NewCLIAppsecRule() *cliItem {
inspectDetail := func(item *cwhub.Item) error { inspectDetail := func(item *cwhub.Item) error {
// Only show the converted rules in human mode // Only show the converted rules in human mode
if csConfig.Cscli.Output != "human" { if csConfig.Cscli.Output != "human" {
@ -58,11 +57,11 @@ func NewCLIAppsecRule(cfg configGetter) *cliItem {
yamlContent, err := os.ReadFile(item.State.LocalPath) yamlContent, err := os.ReadFile(item.State.LocalPath)
if err != nil { if err != nil {
return fmt.Errorf("unable to read file %s: %w", item.State.LocalPath, err) return fmt.Errorf("unable to read file %s : %s", item.State.LocalPath, err)
} }
if err := yaml.Unmarshal(yamlContent, &appsecRule); err != nil { if err := yaml.Unmarshal(yamlContent, &appsecRule); err != nil {
return fmt.Errorf("unable to unmarshal yaml file %s: %w", item.State.LocalPath, err) return fmt.Errorf("unable to unmarshal yaml file %s : %s", item.State.LocalPath, err)
} }
for _, ruleType := range appsec_rule.SupportedTypes() { for _, ruleType := range appsec_rule.SupportedTypes() {
@ -71,7 +70,7 @@ func NewCLIAppsecRule(cfg configGetter) *cliItem {
for _, rule := range appsecRule.Rules { for _, rule := range appsecRule.Rules {
convertedRule, _, err := rule.Convert(ruleType, appsecRule.Name) convertedRule, _, err := rule.Convert(ruleType, appsecRule.Name)
if err != nil { if err != nil {
return fmt.Errorf("unable to convert rule %s: %w", rule.Name, err) return fmt.Errorf("unable to convert rule %s : %s", rule.Name, err)
} }
fmt.Println(convertedRule) fmt.Println(convertedRule)
@ -89,7 +88,6 @@ func NewCLIAppsecRule(cfg configGetter) *cliItem {
} }
return &cliItem{ return &cliItem{
cfg: cfg,
name: "appsec-rules", name: "appsec-rules",
singular: "appsec-rule", singular: "appsec-rule",
oneOrMore: "appsec-rule(s)", oneOrMore: "appsec-rule(s)",

View file

@ -4,9 +4,8 @@ import (
"github.com/crowdsecurity/crowdsec/pkg/cwhub" "github.com/crowdsecurity/crowdsec/pkg/cwhub"
) )
func NewCLICollection(cfg configGetter) *cliItem { func NewCLICollection() *cliItem {
return &cliItem{ return &cliItem{
cfg: cfg,
name: cwhub.COLLECTIONS, name: cwhub.COLLECTIONS,
singular: "collection", singular: "collection",
oneOrMore: "collection(s)", oneOrMore: "collection(s)",

View file

@ -4,9 +4,8 @@ import (
"github.com/crowdsecurity/crowdsec/pkg/cwhub" "github.com/crowdsecurity/crowdsec/pkg/cwhub"
) )
func NewCLIContext(cfg configGetter) *cliItem { func NewCLIContext() *cliItem {
return &cliItem{ return &cliItem{
cfg: cfg,
name: cwhub.CONTEXTS, name: cwhub.CONTEXTS,
singular: "context", singular: "context",
oneOrMore: "context(s)", oneOrMore: "context(s)",

View file

@ -4,9 +4,8 @@ import (
"github.com/crowdsecurity/crowdsec/pkg/cwhub" "github.com/crowdsecurity/crowdsec/pkg/cwhub"
) )
func NewCLIParser(cfg configGetter) *cliItem { func NewCLIParser() *cliItem {
return &cliItem{ return &cliItem{
cfg: cfg,
name: cwhub.PARSERS, name: cwhub.PARSERS,
singular: "parser", singular: "parser",
oneOrMore: "parser(s)", oneOrMore: "parser(s)",

View file

@ -4,9 +4,8 @@ import (
"github.com/crowdsecurity/crowdsec/pkg/cwhub" "github.com/crowdsecurity/crowdsec/pkg/cwhub"
) )
func NewCLIPostOverflow(cfg configGetter) *cliItem { func NewCLIPostOverflow() *cliItem {
return &cliItem{ return &cliItem{
cfg: cfg,
name: cwhub.POSTOVERFLOWS, name: cwhub.POSTOVERFLOWS,
singular: "postoverflow", singular: "postoverflow",
oneOrMore: "postoverflow(s)", oneOrMore: "postoverflow(s)",

View file

@ -4,9 +4,8 @@ import (
"github.com/crowdsecurity/crowdsec/pkg/cwhub" "github.com/crowdsecurity/crowdsec/pkg/cwhub"
) )
func NewCLIScenario(cfg configGetter) *cliItem { func NewCLIScenario() *cliItem {
return &cliItem{ return &cliItem{
cfg: cfg,
name: cwhub.SCENARIOS, name: cwhub.SCENARIOS,
singular: "scenario", singular: "scenario",
oneOrMore: "scenario(s)", oneOrMore: "scenario(s)",

View file

@ -14,7 +14,7 @@ import (
"github.com/fatih/color" "github.com/fatih/color"
log "github.com/sirupsen/logrus" log "github.com/sirupsen/logrus"
"github.com/spf13/cobra" "github.com/spf13/cobra"
"gopkg.in/yaml.v3" "gopkg.in/yaml.v2"
"github.com/crowdsecurity/crowdsec/pkg/dumps" "github.com/crowdsecurity/crowdsec/pkg/dumps"
"github.com/crowdsecurity/crowdsec/pkg/emoji" "github.com/crowdsecurity/crowdsec/pkg/emoji"
@ -135,8 +135,7 @@ cscli hubtest create my-scenario-test --parsers crowdsecurity/nginx --scenarios
// create empty nuclei template file // create empty nuclei template file
nucleiFileName := fmt.Sprintf("%s.yaml", testName) nucleiFileName := fmt.Sprintf("%s.yaml", testName)
nucleiFilePath := filepath.Join(testPath, nucleiFileName) nucleiFilePath := filepath.Join(testPath, nucleiFileName)
nucleiFile, err := os.OpenFile(nucleiFilePath, os.O_RDWR|os.O_CREATE, 0755)
nucleiFile, err := os.OpenFile(nucleiFilePath, os.O_RDWR|os.O_CREATE, 0o755)
if err != nil { if err != nil {
return err return err
} }
@ -406,7 +405,7 @@ func (cli *cliHubTest) NewRunCmd() *cobra.Command {
} }
func (cli *cliHubTest) NewCleanCmd() *cobra.Command { func (cli *cliHubTest) NewCleanCmd() *cobra.Command {
cmd := &cobra.Command{ var cmd = &cobra.Command{
Use: "clean", Use: "clean",
Short: "clean [test_name]", Short: "clean [test_name]",
Args: cobra.MinimumNArgs(1), Args: cobra.MinimumNArgs(1),

View file

@ -37,7 +37,6 @@ func ShowMetrics(hubItem *cwhub.Item) error {
appsecMetricsTable(color.Output, hubItem.Name, metrics) appsecMetricsTable(color.Output, hubItem.Name, metrics)
default: // no metrics for this item type default: // no metrics for this item type
} }
return nil return nil
} }
@ -50,27 +49,21 @@ func GetParserMetric(url string, itemName string) map[string]map[string]int {
if !strings.HasPrefix(fam.Name, "cs_") { if !strings.HasPrefix(fam.Name, "cs_") {
continue continue
} }
log.Tracef("round %d", idx) log.Tracef("round %d", idx)
for _, m := range fam.Metrics { for _, m := range fam.Metrics {
metric, ok := m.(prom2json.Metric) metric, ok := m.(prom2json.Metric)
if !ok { if !ok {
log.Debugf("failed to convert metric to prom2json.Metric") log.Debugf("failed to convert metric to prom2json.Metric")
continue continue
} }
name, ok := metric.Labels["name"] name, ok := metric.Labels["name"]
if !ok { if !ok {
log.Debugf("no name in Metric %v", metric.Labels) log.Debugf("no name in Metric %v", metric.Labels)
} }
if name != itemName { if name != itemName {
continue continue
} }
source, ok := metric.Labels["source"] source, ok := metric.Labels["source"]
if !ok { if !ok {
log.Debugf("no source in Metric %v", metric.Labels) log.Debugf("no source in Metric %v", metric.Labels)
} else { } else {
@ -78,15 +71,12 @@ func GetParserMetric(url string, itemName string) map[string]map[string]int {
source = srctype + ":" + source source = srctype + ":" + source
} }
} }
value := m.(prom2json.Metric).Value value := m.(prom2json.Metric).Value
fval, err := strconv.ParseFloat(value, 32) fval, err := strconv.ParseFloat(value, 32)
if err != nil { if err != nil {
log.Errorf("Unexpected int value %s : %s", value, err) log.Errorf("Unexpected int value %s : %s", value, err)
continue continue
} }
ival := int(fval) ival := int(fval)
switch fam.Name { switch fam.Name {
@ -129,7 +119,6 @@ func GetParserMetric(url string, itemName string) map[string]map[string]int {
} }
} }
} }
return stats return stats
} }
@ -147,34 +136,26 @@ func GetScenarioMetric(url string, itemName string) map[string]int {
if !strings.HasPrefix(fam.Name, "cs_") { if !strings.HasPrefix(fam.Name, "cs_") {
continue continue
} }
log.Tracef("round %d", idx) log.Tracef("round %d", idx)
for _, m := range fam.Metrics { for _, m := range fam.Metrics {
metric, ok := m.(prom2json.Metric) metric, ok := m.(prom2json.Metric)
if !ok { if !ok {
log.Debugf("failed to convert metric to prom2json.Metric") log.Debugf("failed to convert metric to prom2json.Metric")
continue continue
} }
name, ok := metric.Labels["name"] name, ok := metric.Labels["name"]
if !ok { if !ok {
log.Debugf("no name in Metric %v", metric.Labels) log.Debugf("no name in Metric %v", metric.Labels)
} }
if name != itemName { if name != itemName {
continue continue
} }
value := m.(prom2json.Metric).Value value := m.(prom2json.Metric).Value
fval, err := strconv.ParseFloat(value, 32) fval, err := strconv.ParseFloat(value, 32)
if err != nil { if err != nil {
log.Errorf("Unexpected int value %s : %s", value, err) log.Errorf("Unexpected int value %s : %s", value, err)
continue continue
} }
ival := int(fval) ival := int(fval)
switch fam.Name { switch fam.Name {
@ -193,7 +174,6 @@ func GetScenarioMetric(url string, itemName string) map[string]int {
} }
} }
} }
return stats return stats
} }
@ -208,22 +188,17 @@ func GetAppsecRuleMetric(url string, itemName string) map[string]int {
if !strings.HasPrefix(fam.Name, "cs_") { if !strings.HasPrefix(fam.Name, "cs_") {
continue continue
} }
log.Tracef("round %d", idx) log.Tracef("round %d", idx)
for _, m := range fam.Metrics { for _, m := range fam.Metrics {
metric, ok := m.(prom2json.Metric) metric, ok := m.(prom2json.Metric)
if !ok { if !ok {
log.Debugf("failed to convert metric to prom2json.Metric") log.Debugf("failed to convert metric to prom2json.Metric")
continue continue
} }
name, ok := metric.Labels["rule_name"] name, ok := metric.Labels["rule_name"]
if !ok { if !ok {
log.Debugf("no rule_name in Metric %v", metric.Labels) log.Debugf("no rule_name in Metric %v", metric.Labels)
} }
if name != itemName { if name != itemName {
continue continue
} }
@ -234,13 +209,11 @@ func GetAppsecRuleMetric(url string, itemName string) map[string]int {
} }
value := m.(prom2json.Metric).Value value := m.(prom2json.Metric).Value
fval, err := strconv.ParseFloat(value, 32) fval, err := strconv.ParseFloat(value, 32)
if err != nil { if err != nil {
log.Errorf("Unexpected int value %s : %s", value, err) log.Errorf("Unexpected int value %s : %s", value, err)
continue continue
} }
ival := int(fval) ival := int(fval)
switch fam.Name { switch fam.Name {
@ -258,7 +231,6 @@ func GetAppsecRuleMetric(url string, itemName string) map[string]int {
} }
} }
} }
return stats return stats
} }
@ -275,7 +247,6 @@ func GetPrometheusMetric(url string) []*prom2json.Family {
go func() { go func() {
defer trace.CatchPanic("crowdsec/GetPrometheusMetric") defer trace.CatchPanic("crowdsec/GetPrometheusMetric")
err := prom2json.FetchMetricFamilies(url, mfChan, transport) err := prom2json.FetchMetricFamilies(url, mfChan, transport)
if err != nil { if err != nil {
log.Fatalf("failed to fetch prometheus metrics : %v", err) log.Fatalf("failed to fetch prometheus metrics : %v", err)
@ -286,7 +257,6 @@ func GetPrometheusMetric(url string) []*prom2json.Family {
for mf := range mfChan { for mf := range mfChan {
result = append(result, prom2json.NewFamily(mf)) result = append(result, prom2json.NewFamily(mf))
} }
log.Debugf("Finished reading prometheus output, %d entries", len(result)) log.Debugf("Finished reading prometheus output, %d entries", len(result))
return result return result

View file

@ -61,7 +61,7 @@ func compInstalledItems(itemType string, args []string, toComplete string) ([]st
return nil, cobra.ShellCompDirectiveDefault return nil, cobra.ShellCompDirectiveDefault
} }
items, err := hub.GetInstalledNamesByType(itemType) items, err := hub.GetInstalledItemNames(itemType)
if err != nil { if err != nil {
cobra.CompDebugln(fmt.Sprintf("list installed %s err: %s", itemType, err), true) cobra.CompDebugln(fmt.Sprintf("list installed %s err: %s", itemType, err), true)
return nil, cobra.ShellCompDirectiveDefault return nil, cobra.ShellCompDirectiveDefault

View file

@ -1,7 +1,6 @@
package main package main
import ( import (
"errors"
"fmt" "fmt"
"os" "os"
"strings" "strings"
@ -29,7 +28,6 @@ type cliHelp struct {
} }
type cliItem struct { type cliItem struct {
cfg configGetter
name string // plural, as used in the hub index name string // plural, as used in the hub index
singular string singular string
oneOrMore string // parenthetical pluralizaion: "parser(s)" oneOrMore string // parenthetical pluralizaion: "parser(s)"
@ -63,9 +61,7 @@ func (cli cliItem) NewCommand() *cobra.Command {
} }
func (cli cliItem) install(args []string, downloadOnly bool, force bool, ignoreError bool) error { func (cli cliItem) install(args []string, downloadOnly bool, force bool, ignoreError bool) error {
cfg := cli.cfg() hub, err := require.Hub(csConfig, require.RemoteHub(csConfig), log.StandardLogger())
hub, err := require.Hub(cfg, require.RemoteHub(cfg), log.StandardLogger())
if err != nil { if err != nil {
return err return err
} }
@ -75,7 +71,7 @@ func (cli cliItem) install(args []string, downloadOnly bool, force bool, ignoreE
if item == nil { if item == nil {
msg := suggestNearestMessage(hub, cli.name, name) msg := suggestNearestMessage(hub, cli.name, name)
if !ignoreError { if !ignoreError {
return errors.New(msg) return fmt.Errorf(msg)
} }
log.Errorf(msg) log.Errorf(msg)
@ -111,10 +107,10 @@ func (cli cliItem) newInstallCmd() *cobra.Command {
Example: cli.installHelp.example, Example: cli.installHelp.example,
Args: cobra.MinimumNArgs(1), Args: cobra.MinimumNArgs(1),
DisableAutoGenTag: true, DisableAutoGenTag: true,
ValidArgsFunction: func(_ *cobra.Command, args []string, toComplete string) ([]string, cobra.ShellCompDirective) { ValidArgsFunction: func(cmd *cobra.Command, args []string, toComplete string) ([]string, cobra.ShellCompDirective) {
return compAllItems(cli.name, args, toComplete) return compAllItems(cli.name, args, toComplete)
}, },
RunE: func(_ *cobra.Command, args []string) error { RunE: func(cmd *cobra.Command, args []string) error {
return cli.install(args, downloadOnly, force, ignoreError) return cli.install(args, downloadOnly, force, ignoreError)
}, },
} }
@ -141,15 +137,15 @@ func istalledParentNames(item *cwhub.Item) []string {
} }
func (cli cliItem) remove(args []string, purge bool, force bool, all bool) error { func (cli cliItem) remove(args []string, purge bool, force bool, all bool) error {
hub, err := require.Hub(cli.cfg(), nil, log.StandardLogger()) hub, err := require.Hub(csConfig, nil, log.StandardLogger())
if err != nil { if err != nil {
return err return err
} }
if all { if all {
getter := hub.GetInstalledItemsByType getter := hub.GetInstalledItems
if purge { if purge {
getter = hub.GetItemsByType getter = hub.GetAllItems
} }
items, err := getter(cli.name) items, err := getter(cli.name)
@ -167,7 +163,6 @@ func (cli cliItem) remove(args []string, purge bool, force bool, all bool) error
if didRemove { if didRemove {
log.Infof("Removed %s", item.Name) log.Infof("Removed %s", item.Name)
removed++ removed++
} }
} }
@ -209,7 +204,6 @@ func (cli cliItem) remove(args []string, purge bool, force bool, all bool) error
if didRemove { if didRemove {
log.Infof("Removed %s", item.Name) log.Infof("Removed %s", item.Name)
removed++ removed++
} }
} }
@ -237,10 +231,10 @@ func (cli cliItem) newRemoveCmd() *cobra.Command {
Example: cli.removeHelp.example, Example: cli.removeHelp.example,
Aliases: []string{"delete"}, Aliases: []string{"delete"},
DisableAutoGenTag: true, DisableAutoGenTag: true,
ValidArgsFunction: func(_ *cobra.Command, args []string, toComplete string) ([]string, cobra.ShellCompDirective) { ValidArgsFunction: func(cmd *cobra.Command, args []string, toComplete string) ([]string, cobra.ShellCompDirective) {
return compInstalledItems(cli.name, args, toComplete) return compInstalledItems(cli.name, args, toComplete)
}, },
RunE: func(_ *cobra.Command, args []string) error { RunE: func(cmd *cobra.Command, args []string) error {
return cli.remove(args, purge, force, all) return cli.remove(args, purge, force, all)
}, },
} }
@ -254,15 +248,13 @@ func (cli cliItem) newRemoveCmd() *cobra.Command {
} }
func (cli cliItem) upgrade(args []string, force bool, all bool) error { func (cli cliItem) upgrade(args []string, force bool, all bool) error {
cfg := cli.cfg() hub, err := require.Hub(csConfig, require.RemoteHub(csConfig), log.StandardLogger())
hub, err := require.Hub(cfg, require.RemoteHub(cfg), log.StandardLogger())
if err != nil { if err != nil {
return err return err
} }
if all { if all {
items, err := hub.GetInstalledItemsByType(cli.name) items, err := hub.GetInstalledItems(cli.name)
if err != nil { if err != nil {
return err return err
} }
@ -308,7 +300,6 @@ func (cli cliItem) upgrade(args []string, force bool, all bool) error {
if didUpdate { if didUpdate {
log.Infof("Updated %s", item.Name) log.Infof("Updated %s", item.Name)
updated++ updated++
} }
} }
@ -332,10 +323,10 @@ func (cli cliItem) newUpgradeCmd() *cobra.Command {
Long: coalesce.String(cli.upgradeHelp.long, fmt.Sprintf("Fetch and upgrade one or more %s from the hub", cli.name)), Long: coalesce.String(cli.upgradeHelp.long, fmt.Sprintf("Fetch and upgrade one or more %s from the hub", cli.name)),
Example: cli.upgradeHelp.example, Example: cli.upgradeHelp.example,
DisableAutoGenTag: true, DisableAutoGenTag: true,
ValidArgsFunction: func(_ *cobra.Command, args []string, toComplete string) ([]string, cobra.ShellCompDirective) { ValidArgsFunction: func(cmd *cobra.Command, args []string, toComplete string) ([]string, cobra.ShellCompDirective) {
return compInstalledItems(cli.name, args, toComplete) return compInstalledItems(cli.name, args, toComplete)
}, },
RunE: func(_ *cobra.Command, args []string) error { RunE: func(cmd *cobra.Command, args []string) error {
return cli.upgrade(args, force, all) return cli.upgrade(args, force, all)
}, },
} }
@ -348,23 +339,21 @@ func (cli cliItem) newUpgradeCmd() *cobra.Command {
} }
func (cli cliItem) inspect(args []string, url string, diff bool, rev bool, noMetrics bool) error { func (cli cliItem) inspect(args []string, url string, diff bool, rev bool, noMetrics bool) error {
cfg := cli.cfg()
if rev && !diff { if rev && !diff {
return errors.New("--rev can only be used with --diff") return fmt.Errorf("--rev can only be used with --diff")
} }
if url != "" { if url != "" {
cfg.Cscli.PrometheusUrl = url csConfig.Cscli.PrometheusUrl = url
} }
remote := (*cwhub.RemoteHubCfg)(nil) remote := (*cwhub.RemoteHubCfg)(nil)
if diff { if diff {
remote = require.RemoteHub(cfg) remote = require.RemoteHub(csConfig)
} }
hub, err := require.Hub(cfg, remote, log.StandardLogger()) hub, err := require.Hub(csConfig, remote, log.StandardLogger())
if err != nil { if err != nil {
return err return err
} }
@ -410,10 +399,10 @@ func (cli cliItem) newInspectCmd() *cobra.Command {
Example: cli.inspectHelp.example, Example: cli.inspectHelp.example,
Args: cobra.MinimumNArgs(1), Args: cobra.MinimumNArgs(1),
DisableAutoGenTag: true, DisableAutoGenTag: true,
ValidArgsFunction: func(_ *cobra.Command, args []string, toComplete string) ([]string, cobra.ShellCompDirective) { ValidArgsFunction: func(cmd *cobra.Command, args []string, toComplete string) ([]string, cobra.ShellCompDirective) {
return compInstalledItems(cli.name, args, toComplete) return compInstalledItems(cli.name, args, toComplete)
}, },
RunE: func(_ *cobra.Command, args []string) error { RunE: func(cmd *cobra.Command, args []string) error {
return cli.inspect(args, url, diff, rev, noMetrics) return cli.inspect(args, url, diff, rev, noMetrics)
}, },
} }
@ -428,7 +417,7 @@ func (cli cliItem) newInspectCmd() *cobra.Command {
} }
func (cli cliItem) list(args []string, all bool) error { func (cli cliItem) list(args []string, all bool) error {
hub, err := require.Hub(cli.cfg(), nil, log.StandardLogger()) hub, err := require.Hub(csConfig, nil, log.StandardLogger())
if err != nil { if err != nil {
return err return err
} }
@ -537,7 +526,6 @@ func (cli cliItem) whyTainted(hub *cwhub.Hub, item *cwhub.Item, reverse bool) st
// hack: avoid message "item is tainted by itself" // hack: avoid message "item is tainted by itself"
continue continue
} }
ret = append(ret, fmt.Sprintf("# %s is tainted by %s", sub.FQName(), taintList)) ret = append(ret, fmt.Sprintf("# %s is tainted by %s", sub.FQName(), taintList))
} }
} }

View file

@ -17,7 +17,7 @@ import (
// selectItems returns a slice of items of a given type, selected by name and sorted by case-insensitive name // selectItems returns a slice of items of a given type, selected by name and sorted by case-insensitive name
func selectItems(hub *cwhub.Hub, itemType string, args []string, installedOnly bool) ([]*cwhub.Item, error) { func selectItems(hub *cwhub.Hub, itemType string, args []string, installedOnly bool) ([]*cwhub.Item, error) {
itemNames := hub.GetNamesByType(itemType) itemNames := hub.GetItemNames(itemType)
notExist := []string{} notExist := []string{}
@ -116,7 +116,7 @@ func listItems(out io.Writer, itemTypes []string, items map[string][]*cwhub.Item
} }
if err := csvwriter.Write(header); err != nil { if err := csvwriter.Write(header); err != nil {
return fmt.Errorf("failed to write header: %w", err) return fmt.Errorf("failed to write header: %s", err)
} }
for _, itemType := range itemTypes { for _, itemType := range itemTypes {
@ -132,7 +132,7 @@ func listItems(out io.Writer, itemTypes []string, items map[string][]*cwhub.Item
} }
if err := csvwriter.Write(row); err != nil { if err := csvwriter.Write(row); err != nil {
return fmt.Errorf("failed to write raw output: %w", err) return fmt.Errorf("failed to write raw output: %s", err)
} }
} }
} }
@ -150,12 +150,12 @@ func inspectItem(item *cwhub.Item, showMetrics bool) error {
enc.SetIndent(2) enc.SetIndent(2)
if err := enc.Encode(item); err != nil { if err := enc.Encode(item); err != nil {
return fmt.Errorf("unable to encode item: %w", err) return fmt.Errorf("unable to encode item: %s", err)
} }
case "json": case "json":
b, err := json.MarshalIndent(*item, "", " ") b, err := json.MarshalIndent(*item, "", " ")
if err != nil { if err != nil {
return fmt.Errorf("unable to marshal item: %w", err) return fmt.Errorf("unable to marshal item: %s", err)
} }
fmt.Print(string(b)) fmt.Print(string(b))

View file

@ -13,7 +13,7 @@ import (
"github.com/go-openapi/strfmt" "github.com/go-openapi/strfmt"
log "github.com/sirupsen/logrus" log "github.com/sirupsen/logrus"
"github.com/spf13/cobra" "github.com/spf13/cobra"
"gopkg.in/yaml.v3" "gopkg.in/yaml.v2"
"github.com/crowdsecurity/go-cs-lib/version" "github.com/crowdsecurity/go-cs-lib/version"
@ -44,9 +44,7 @@ func (cli *cliLapi) status() error {
password := strfmt.Password(cfg.API.Client.Credentials.Password) password := strfmt.Password(cfg.API.Client.Credentials.Password)
login := cfg.API.Client.Credentials.Login login := cfg.API.Client.Credentials.Login
origURL := cfg.API.Client.Credentials.URL apiurl, err := url.Parse(cfg.API.Client.Credentials.URL)
apiURL, err := url.Parse(origURL)
if err != nil { if err != nil {
return fmt.Errorf("parsing api url: %w", err) return fmt.Errorf("parsing api url: %w", err)
} }
@ -56,12 +54,12 @@ func (cli *cliLapi) status() error {
return err return err
} }
scenarios, err := hub.GetInstalledNamesByType(cwhub.SCENARIOS) scenarios, err := hub.GetInstalledItemNames(cwhub.SCENARIOS)
if err != nil { if err != nil {
return fmt.Errorf("failed to get scenarios: %w", err) return fmt.Errorf("failed to get scenarios: %w", err)
} }
Client, err = apiclient.NewDefaultClient(apiURL, Client, err = apiclient.NewDefaultClient(apiurl,
LAPIURLPrefix, LAPIURLPrefix,
fmt.Sprintf("crowdsec/%s", version.String()), fmt.Sprintf("crowdsec/%s", version.String()),
nil) nil)
@ -76,8 +74,7 @@ func (cli *cliLapi) status() error {
} }
log.Infof("Loaded credentials from %s", cfg.API.Client.CredentialsFilePath) log.Infof("Loaded credentials from %s", cfg.API.Client.CredentialsFilePath)
// use the original string because apiURL would print 'http://unix/' log.Infof("Trying to authenticate with username %s on %s", login, apiurl)
log.Infof("Trying to authenticate with username %s on %s", login, origURL)
_, _, err = Client.Auth.AuthenticateWatcher(context.Background(), t) _, _, err = Client.Auth.AuthenticateWatcher(context.Background(), t)
if err != nil { if err != nil {
@ -104,7 +101,23 @@ func (cli *cliLapi) register(apiURL string, outputFile string, machine string) e
password := strfmt.Password(generatePassword(passwordLength)) password := strfmt.Password(generatePassword(passwordLength))
apiurl, err := prepareAPIURL(cfg.API.Client, apiURL) if apiURL == "" {
if cfg.API.Client == nil || cfg.API.Client.Credentials == nil || cfg.API.Client.Credentials.URL == "" {
return fmt.Errorf("no Local API URL. Please provide it in your configuration or with the -u parameter")
}
apiURL = cfg.API.Client.Credentials.URL
}
/*URL needs to end with /, but user doesn't care*/
if !strings.HasSuffix(apiURL, "/") {
apiURL += "/"
}
/*URL needs to start with http://, but user doesn't care*/
if !strings.HasPrefix(apiURL, "http://") && !strings.HasPrefix(apiURL, "https://") {
apiURL = "http://" + apiURL
}
apiurl, err := url.Parse(apiURL)
if err != nil { if err != nil {
return fmt.Errorf("parsing api url: %w", err) return fmt.Errorf("parsing api url: %w", err)
} }
@ -116,6 +129,7 @@ func (cli *cliLapi) register(apiURL string, outputFile string, machine string) e
URL: apiurl, URL: apiurl,
VersionPrefix: LAPIURLPrefix, VersionPrefix: LAPIURLPrefix,
}, nil) }, nil)
if err != nil { if err != nil {
return fmt.Errorf("api client register: %w", err) return fmt.Errorf("api client register: %w", err)
} }
@ -159,36 +173,13 @@ func (cli *cliLapi) register(apiURL string, outputFile string, machine string) e
return nil return nil
} }
// prepareAPIURL checks/fixes a LAPI connection url (http, https or socket) and returns an URL struct
func prepareAPIURL(clientCfg *csconfig.LocalApiClientCfg, apiURL string) (*url.URL, error) {
if apiURL == "" {
if clientCfg == nil || clientCfg.Credentials == nil || clientCfg.Credentials.URL == "" {
return nil, errors.New("no Local API URL. Please provide it in your configuration or with the -u parameter")
}
apiURL = clientCfg.Credentials.URL
}
// URL needs to end with /, but user doesn't care
if !strings.HasSuffix(apiURL, "/") {
apiURL += "/"
}
// URL needs to start with http://, but user doesn't care
if !strings.HasPrefix(apiURL, "http://") && !strings.HasPrefix(apiURL, "https://") && !strings.HasPrefix(apiURL, "/") {
apiURL = "http://" + apiURL
}
return url.Parse(apiURL)
}
func (cli *cliLapi) newStatusCmd() *cobra.Command { func (cli *cliLapi) newStatusCmd() *cobra.Command {
cmdLapiStatus := &cobra.Command{ cmdLapiStatus := &cobra.Command{
Use: "status", Use: "status",
Short: "Check authentication to Local API (LAPI)", Short: "Check authentication to Local API (LAPI)",
Args: cobra.MinimumNArgs(0), Args: cobra.MinimumNArgs(0),
DisableAutoGenTag: true, DisableAutoGenTag: true,
RunE: func(_ *cobra.Command, _ []string) error { RunE: func(cmd *cobra.Command, args []string) error {
return cli.status() return cli.status()
}, },
} }
@ -584,7 +575,7 @@ func detectNode(node parser.Node, parserCTX parser.UnixParserCtx) []string {
} }
func detectSubNode(node parser.Node, parserCTX parser.UnixParserCtx) []string { func detectSubNode(node parser.Node, parserCTX parser.UnixParserCtx) []string {
ret := make([]string, 0) var ret = make([]string, 0)
for _, subnode := range node.LeavesNodes { for _, subnode := range node.LeavesNodes {
if subnode.Grok.RunTimeRegexp != nil { if subnode.Grok.RunTimeRegexp != nil {

View file

@ -1,49 +0,0 @@
package main
import (
"testing"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"github.com/crowdsecurity/crowdsec/pkg/csconfig"
)
func TestPrepareAPIURL_NoProtocol(t *testing.T) {
url, err := prepareAPIURL(nil, "localhost:81")
require.NoError(t, err)
assert.Equal(t, "http://localhost:81/", url.String())
}
func TestPrepareAPIURL_Http(t *testing.T) {
url, err := prepareAPIURL(nil, "http://localhost:81")
require.NoError(t, err)
assert.Equal(t, "http://localhost:81/", url.String())
}
func TestPrepareAPIURL_Https(t *testing.T) {
url, err := prepareAPIURL(nil, "https://localhost:81")
require.NoError(t, err)
assert.Equal(t, "https://localhost:81/", url.String())
}
func TestPrepareAPIURL_UnixSocket(t *testing.T) {
url, err := prepareAPIURL(nil, "/path/socket")
require.NoError(t, err)
assert.Equal(t, "/path/socket/", url.String())
}
func TestPrepareAPIURL_Empty(t *testing.T) {
_, err := prepareAPIURL(nil, "")
require.Error(t, err)
}
func TestPrepareAPIURL_Empty_ConfigOverride(t *testing.T) {
url, err := prepareAPIURL(&csconfig.LocalApiClientCfg{
Credentials: &csconfig.ApiCredentialsCfg{
URL: "localhost:80",
},
}, "")
require.NoError(t, err)
assert.Equal(t, "http://localhost:80/", url.String())
}

View file

@ -4,7 +4,6 @@ import (
saferand "crypto/rand" saferand "crypto/rand"
"encoding/csv" "encoding/csv"
"encoding/json" "encoding/json"
"errors"
"fmt" "fmt"
"math/big" "math/big"
"os" "os"
@ -135,7 +134,7 @@ Note: This command requires database direct access, so is intended to be run on
} }
cli.db, err = database.NewClient(cli.cfg().DbConfig) cli.db, err = database.NewClient(cli.cfg().DbConfig)
if err != nil { if err != nil {
return fmt.Errorf("unable to create new database client: %w", err) return fmt.Errorf("unable to create new database client: %s", err)
} }
return nil return nil
@ -156,7 +155,7 @@ func (cli *cliMachines) list() error {
machines, err := cli.db.ListMachines() machines, err := cli.db.ListMachines()
if err != nil { if err != nil {
return fmt.Errorf("unable to list machines: %w", err) return fmt.Errorf("unable to list machines: %s", err)
} }
switch cli.cfg().Cscli.Output { switch cli.cfg().Cscli.Output {
@ -167,7 +166,7 @@ func (cli *cliMachines) list() error {
enc.SetIndent("", " ") enc.SetIndent("", " ")
if err := enc.Encode(machines); err != nil { if err := enc.Encode(machines); err != nil {
return errors.New("failed to marshal") return fmt.Errorf("failed to marshal")
} }
return nil return nil
@ -176,7 +175,7 @@ func (cli *cliMachines) list() error {
err := csvwriter.Write([]string{"machine_id", "ip_address", "updated_at", "validated", "version", "auth_type", "last_heartbeat"}) err := csvwriter.Write([]string{"machine_id", "ip_address", "updated_at", "validated", "version", "auth_type", "last_heartbeat"})
if err != nil { if err != nil {
return fmt.Errorf("failed to write header: %w", err) return fmt.Errorf("failed to write header: %s", err)
} }
for _, m := range machines { for _, m := range machines {
@ -258,12 +257,12 @@ func (cli *cliMachines) add(args []string, machinePassword string, dumpFile stri
// create machineID if not specified by user // create machineID if not specified by user
if len(args) == 0 { if len(args) == 0 {
if !autoAdd { if !autoAdd {
return errors.New("please specify a machine name to add, or use --auto") return fmt.Errorf("please specify a machine name to add, or use --auto")
} }
machineID, err = generateID("") machineID, err = generateID("")
if err != nil { if err != nil {
return fmt.Errorf("unable to generate machine id: %w", err) return fmt.Errorf("unable to generate machine id: %s", err)
} }
} else { } else {
machineID = args[0] machineID = args[0]
@ -282,20 +281,20 @@ func (cli *cliMachines) add(args []string, machinePassword string, dumpFile stri
case os.IsNotExist(err) || force: case os.IsNotExist(err) || force:
dumpFile = credFile dumpFile = credFile
case err != nil: case err != nil:
return fmt.Errorf("unable to stat '%s': %w", credFile, err) return fmt.Errorf("unable to stat '%s': %s", credFile, err)
default: default:
return fmt.Errorf(`credentials file '%s' already exists: please remove it, use "--force" or specify a different file with "-f" ("-f -" for standard output)`, credFile) return fmt.Errorf(`credentials file '%s' already exists: please remove it, use "--force" or specify a different file with "-f" ("-f -" for standard output)`, credFile)
} }
} }
if dumpFile == "" { if dumpFile == "" {
return errors.New(`please specify a file to dump credentials to, with -f ("-f -" for standard output)`) return fmt.Errorf(`please specify a file to dump credentials to, with -f ("-f -" for standard output)`)
} }
// create a password if it's not specified by user // create a password if it's not specified by user
if machinePassword == "" && !interactive { if machinePassword == "" && !interactive {
if !autoAdd { if !autoAdd {
return errors.New("please specify a password with --password or use --auto") return fmt.Errorf("please specify a password with --password or use --auto")
} }
machinePassword = generatePassword(passwordLength) machinePassword = generatePassword(passwordLength)
@ -310,7 +309,7 @@ func (cli *cliMachines) add(args []string, machinePassword string, dumpFile stri
_, err = cli.db.CreateMachine(&machineID, &password, "", true, force, types.PasswordAuthType) _, err = cli.db.CreateMachine(&machineID, &password, "", true, force, types.PasswordAuthType)
if err != nil { if err != nil {
return fmt.Errorf("unable to create machine: %w", err) return fmt.Errorf("unable to create machine: %s", err)
} }
fmt.Fprintf(os.Stderr, "Machine '%s' successfully added to the local API.\n", machineID) fmt.Fprintf(os.Stderr, "Machine '%s' successfully added to the local API.\n", machineID)
@ -318,10 +317,10 @@ func (cli *cliMachines) add(args []string, machinePassword string, dumpFile stri
if apiURL == "" { if apiURL == "" {
if clientCfg != nil && clientCfg.Credentials != nil && clientCfg.Credentials.URL != "" { if clientCfg != nil && clientCfg.Credentials != nil && clientCfg.Credentials.URL != "" {
apiURL = clientCfg.Credentials.URL apiURL = clientCfg.Credentials.URL
} else if serverCfg.ClientURL() != "" { } else if serverCfg != nil && serverCfg.ListenURI != "" {
apiURL = serverCfg.ClientURL() apiURL = "http://" + serverCfg.ListenURI
} else { } else {
return errors.New("unable to dump an api URL. Please provide it in your configuration or with the -u parameter") return fmt.Errorf("unable to dump an api URL. Please provide it in your configuration or with the -u parameter")
} }
} }
@ -333,12 +332,12 @@ func (cli *cliMachines) add(args []string, machinePassword string, dumpFile stri
apiConfigDump, err := yaml.Marshal(apiCfg) apiConfigDump, err := yaml.Marshal(apiCfg)
if err != nil { if err != nil {
return fmt.Errorf("unable to marshal api credentials: %w", err) return fmt.Errorf("unable to marshal api credentials: %s", err)
} }
if dumpFile != "" && dumpFile != "-" { if dumpFile != "" && dumpFile != "-" {
if err = os.WriteFile(dumpFile, apiConfigDump, 0o600); err != nil { if err = os.WriteFile(dumpFile, apiConfigDump, 0o600); err != nil {
return fmt.Errorf("write api credentials in '%s' failed: %w", dumpFile, err) return fmt.Errorf("write api credentials in '%s' failed: %s", dumpFile, err)
} }
fmt.Fprintf(os.Stderr, "API credentials written to '%s'.\n", dumpFile) fmt.Fprintf(os.Stderr, "API credentials written to '%s'.\n", dumpFile)
@ -414,13 +413,13 @@ func (cli *cliMachines) prune(duration time.Duration, notValidOnly bool, force b
} }
if !notValidOnly { if !notValidOnly {
if pending, err := cli.db.QueryLastValidatedHeartbeatLT(time.Now().UTC().Add(-duration)); err == nil { if pending, err := cli.db.QueryLastValidatedHeartbeatLT(time.Now().UTC().Add(duration)); err == nil {
machines = append(machines, pending...) machines = append(machines, pending...)
} }
} }
if len(machines) == 0 { if len(machines) == 0 {
fmt.Println("No machines to prune.") fmt.Println("no machines to prune")
return nil return nil
} }
@ -439,7 +438,7 @@ func (cli *cliMachines) prune(duration time.Duration, notValidOnly bool, force b
deleted, err := cli.db.BulkDeleteWatchers(machines) deleted, err := cli.db.BulkDeleteWatchers(machines)
if err != nil { if err != nil {
return fmt.Errorf("unable to prune machines: %w", err) return fmt.Errorf("unable to prune machines: %s", err)
} }
fmt.Fprintf(os.Stderr, "successfully delete %d machines\n", deleted) fmt.Fprintf(os.Stderr, "successfully delete %d machines\n", deleted)
@ -480,7 +479,7 @@ cscli machines prune --not-validated-only --force`,
func (cli *cliMachines) validate(machineID string) error { func (cli *cliMachines) validate(machineID string) error {
if err := cli.db.ValidateMachine(machineID); err != nil { if err := cli.db.ValidateMachine(machineID); err != nil {
return fmt.Errorf("unable to validate machine '%s': %w", machineID, err) return fmt.Errorf("unable to validate machine '%s': %s", machineID, err)
} }
log.Infof("machine '%s' validated successfully", machineID) log.Infof("machine '%s' validated successfully", machineID)
@ -496,7 +495,7 @@ func (cli *cliMachines) newValidateCmd() *cobra.Command {
Example: `cscli machines validate "machine_name"`, Example: `cscli machines validate "machine_name"`,
Args: cobra.ExactArgs(1), Args: cobra.ExactArgs(1),
DisableAutoGenTag: true, DisableAutoGenTag: true,
RunE: func(_ *cobra.Command, args []string) error { RunE: func(cmd *cobra.Command, args []string) error {
return cli.validate(args[0]) return cli.validate(args[0])
}, },
} }

View file

@ -1,9 +1,7 @@
package main package main
import ( import (
"fmt"
"os" "os"
"path/filepath"
"slices" "slices"
"time" "time"
@ -12,18 +10,14 @@ import (
log "github.com/sirupsen/logrus" log "github.com/sirupsen/logrus"
"github.com/spf13/cobra" "github.com/spf13/cobra"
"github.com/crowdsecurity/go-cs-lib/trace"
"github.com/crowdsecurity/crowdsec/pkg/csconfig" "github.com/crowdsecurity/crowdsec/pkg/csconfig"
"github.com/crowdsecurity/crowdsec/pkg/database" "github.com/crowdsecurity/crowdsec/pkg/database"
"github.com/crowdsecurity/crowdsec/pkg/fflag" "github.com/crowdsecurity/crowdsec/pkg/fflag"
) )
var ( var ConfigFilePath string
ConfigFilePath string var csConfig *csconfig.Config
csConfig *csconfig.Config var dbClient *database.Client
dbClient *database.Client
)
type configGetter func() *csconfig.Config type configGetter func() *csconfig.Config
@ -88,11 +82,6 @@ func loadConfigFor(command string) (*csconfig.Config, string, error) {
return nil, "", err return nil, "", err
} }
// set up directory for trace files
if err := trace.Init(filepath.Join(config.ConfigPaths.DataDir, "trace")); err != nil {
return nil, "", fmt.Errorf("while setting up trace directory: %w", err)
}
return config, merged, nil return config, merged, nil
} }
@ -260,13 +249,13 @@ It is meant to allow you to manage bans, parsers/scenarios/etc, api and generall
cmd.AddCommand(NewCLINotifications(cli.cfg).NewCommand()) cmd.AddCommand(NewCLINotifications(cli.cfg).NewCommand())
cmd.AddCommand(NewCLISupport().NewCommand()) cmd.AddCommand(NewCLISupport().NewCommand())
cmd.AddCommand(NewCLIPapi(cli.cfg).NewCommand()) cmd.AddCommand(NewCLIPapi(cli.cfg).NewCommand())
cmd.AddCommand(NewCLICollection(cli.cfg).NewCommand()) cmd.AddCommand(NewCLICollection().NewCommand())
cmd.AddCommand(NewCLIParser(cli.cfg).NewCommand()) cmd.AddCommand(NewCLIParser().NewCommand())
cmd.AddCommand(NewCLIScenario(cli.cfg).NewCommand()) cmd.AddCommand(NewCLIScenario().NewCommand())
cmd.AddCommand(NewCLIPostOverflow(cli.cfg).NewCommand()) cmd.AddCommand(NewCLIPostOverflow().NewCommand())
cmd.AddCommand(NewCLIContext(cli.cfg).NewCommand()) cmd.AddCommand(NewCLIContext().NewCommand())
cmd.AddCommand(NewCLIAppsecConfig(cli.cfg).NewCommand()) cmd.AddCommand(NewCLIAppsecConfig().NewCommand())
cmd.AddCommand(NewCLIAppsecRule(cli.cfg).NewCommand()) cmd.AddCommand(NewCLIAppsecRule().NewCommand())
if fflag.CscliSetup.IsEnabled() { if fflag.CscliSetup.IsEnabled() {
cmd.AddCommand(NewSetupCmd()) cmd.AddCommand(NewSetupCmd())

View file

@ -272,7 +272,9 @@ func (ms metricStore) Format(out io.Writer, sections []string, formatType string
// if no sections are specified, we want all of them // if no sections are specified, we want all of them
if len(sections) == 0 { if len(sections) == 0 {
sections = maptools.SortedKeys(ms) for section := range ms {
sections = append(sections, section)
}
} }
for _, section := range sections { for _, section := range sections {
@ -281,7 +283,7 @@ func (ms metricStore) Format(out io.Writer, sections []string, formatType string
switch formatType { switch formatType {
case "human": case "human":
for _, section := range maptools.SortedKeys(want) { for section := range want {
want[section].Table(out, noUnit, showEmpty) want[section].Table(out, noUnit, showEmpty)
} }
case "json": case "json":
@ -374,7 +376,7 @@ cscli metrics list`,
} }
// expandAlias returns a list of sections. The input can be a list of sections or alias. // expandAlias returns a list of sections. The input can be a list of sections or alias.
func (cli *cliMetrics) expandAlias(args []string) []string { func (cli *cliMetrics) expandSectionGroups(args []string) []string {
ret := []string{} ret := []string{}
for _, section := range args { for _, section := range args {
@ -420,7 +422,7 @@ cscli metrics show acquisition parsers scenarios stash -o json`,
// Positional args are optional // Positional args are optional
DisableAutoGenTag: true, DisableAutoGenTag: true,
RunE: func(_ *cobra.Command, args []string) error { RunE: func(_ *cobra.Command, args []string) error {
args = cli.expandAlias(args) args = cli.expandSectionGroups(args)
return cli.show(args, url, noUnit) return cli.show(args, url, noUnit)
}, },
} }

View file

@ -4,7 +4,6 @@ import (
"context" "context"
"encoding/csv" "encoding/csv"
"encoding/json" "encoding/json"
"errors"
"fmt" "fmt"
"io/fs" "io/fs"
"net/url" "net/url"
@ -89,7 +88,7 @@ func (cli *cliNotifications) getPluginConfigs() (map[string]csplugin.PluginConfi
return fmt.Errorf("error while traversing directory %s: %w", path, err) return fmt.Errorf("error while traversing directory %s: %w", path, err)
} }
name := filepath.Join(cfg.ConfigPaths.NotificationDir, info.Name()) // Avoid calling info.Name() twice name := filepath.Join(cfg.ConfigPaths.NotificationDir, info.Name()) //Avoid calling info.Name() twice
if (strings.HasSuffix(name, "yaml") || strings.HasSuffix(name, "yml")) && !(info.IsDir()) { if (strings.HasSuffix(name, "yaml") || strings.HasSuffix(name, "yml")) && !(info.IsDir()) {
ts, err := csplugin.ParsePluginConfigFile(name) ts, err := csplugin.ParsePluginConfigFile(name)
if err != nil { if err != nil {
@ -267,7 +266,7 @@ func (cli *cliNotifications) NewTestCmd() *cobra.Command {
if !ok { if !ok {
return fmt.Errorf("plugin name: '%s' does not exist", args[0]) return fmt.Errorf("plugin name: '%s' does not exist", args[0])
} }
// Create a single profile with plugin name as notification name //Create a single profile with plugin name as notification name
return pluginBroker.Init(cfg.PluginConfig, []*csconfig.ProfileCfg{ return pluginBroker.Init(cfg.PluginConfig, []*csconfig.ProfileCfg{
{ {
Notifications: []string{ Notifications: []string{
@ -321,8 +320,8 @@ func (cli *cliNotifications) NewTestCmd() *cobra.Command {
Alert: alert, Alert: alert,
} }
// time.Sleep(2 * time.Second) // There's no mechanism to ensure notification has been sent //time.Sleep(2 * time.Second) // There's no mechanism to ensure notification has been sent
pluginTomb.Kill(errors.New("terminating")) pluginTomb.Kill(fmt.Errorf("terminating"))
pluginTomb.Wait() pluginTomb.Wait()
return nil return nil
@ -417,8 +416,8 @@ cscli notifications reinject <alert_id> -a '{"remediation": true,"scenario":"not
break break
} }
} }
// time.Sleep(2 * time.Second) // There's no mechanism to ensure notification has been sent //time.Sleep(2 * time.Second) // There's no mechanism to ensure notification has been sent
pluginTomb.Kill(errors.New("terminating")) pluginTomb.Kill(fmt.Errorf("terminating"))
pluginTomb.Wait() pluginTomb.Wait()
return nil return nil

View file

@ -64,22 +64,25 @@ func (cli *cliPapi) NewStatusCmd() *cobra.Command {
cfg := cli.cfg() cfg := cli.cfg()
dbClient, err = database.NewClient(cfg.DbConfig) dbClient, err = database.NewClient(cfg.DbConfig)
if err != nil { if err != nil {
return fmt.Errorf("unable to initialize database client: %w", err) return fmt.Errorf("unable to initialize database client: %s", err)
} }
apic, err := apiserver.NewAPIC(cfg.API.Server.OnlineClient, dbClient, cfg.API.Server.ConsoleConfig, cfg.API.Server.CapiWhitelists) apic, err := apiserver.NewAPIC(cfg.API.Server.OnlineClient, dbClient, cfg.API.Server.ConsoleConfig, cfg.API.Server.CapiWhitelists)
if err != nil { if err != nil {
return fmt.Errorf("unable to initialize API client: %w", err) return fmt.Errorf("unable to initialize API client: %s", err)
} }
papi, err := apiserver.NewPAPI(apic, dbClient, cfg.API.Server.ConsoleConfig, log.GetLevel()) papi, err := apiserver.NewPAPI(apic, dbClient, cfg.API.Server.ConsoleConfig, log.GetLevel())
if err != nil { if err != nil {
return fmt.Errorf("unable to initialize PAPI client: %w", err) return fmt.Errorf("unable to initialize PAPI client: %s", err)
} }
perms, err := papi.GetPermissions() perms, err := papi.GetPermissions()
if err != nil { if err != nil {
return fmt.Errorf("unable to get PAPI permissions: %w", err) return fmt.Errorf("unable to get PAPI permissions: %s", err)
} }
var lastTimestampStr *string var lastTimestampStr *string
lastTimestampStr, err = dbClient.GetConfigItem(apiserver.PapiPullKey) lastTimestampStr, err = dbClient.GetConfigItem(apiserver.PapiPullKey)
@ -115,26 +118,27 @@ func (cli *cliPapi) NewSyncCmd() *cobra.Command {
dbClient, err = database.NewClient(cfg.DbConfig) dbClient, err = database.NewClient(cfg.DbConfig)
if err != nil { if err != nil {
return fmt.Errorf("unable to initialize database client: %w", err) return fmt.Errorf("unable to initialize database client: %s", err)
} }
apic, err := apiserver.NewAPIC(cfg.API.Server.OnlineClient, dbClient, cfg.API.Server.ConsoleConfig, cfg.API.Server.CapiWhitelists) apic, err := apiserver.NewAPIC(cfg.API.Server.OnlineClient, dbClient, cfg.API.Server.ConsoleConfig, cfg.API.Server.CapiWhitelists)
if err != nil { if err != nil {
return fmt.Errorf("unable to initialize API client: %w", err) return fmt.Errorf("unable to initialize API client: %s", err)
} }
t.Go(apic.Push) t.Go(apic.Push)
papi, err := apiserver.NewPAPI(apic, dbClient, cfg.API.Server.ConsoleConfig, log.GetLevel()) papi, err := apiserver.NewPAPI(apic, dbClient, cfg.API.Server.ConsoleConfig, log.GetLevel())
if err != nil { if err != nil {
return fmt.Errorf("unable to initialize PAPI client: %w", err) return fmt.Errorf("unable to initialize PAPI client: %s", err)
} }
t.Go(papi.SyncDecisions) t.Go(papi.SyncDecisions)
err = papi.PullOnce(time.Time{}, true) err = papi.PullOnce(time.Time{}, true)
if err != nil { if err != nil {
return fmt.Errorf("unable to sync decisions: %w", err) return fmt.Errorf("unable to sync decisions: %s", err)
} }
log.Infof("Sending acknowledgements to CAPI") log.Infof("Sending acknowledgements to CAPI")

View file

@ -1,7 +1,6 @@
package require package require
import ( import (
"errors"
"fmt" "fmt"
"io" "io"
@ -17,7 +16,7 @@ func LAPI(c *csconfig.Config) error {
} }
if c.DisableAPI { if c.DisableAPI {
return errors.New("local API is disabled -- this command must be run on the local API machine") return fmt.Errorf("local API is disabled -- this command must be run on the local API machine")
} }
return nil return nil
@ -33,7 +32,7 @@ func CAPI(c *csconfig.Config) error {
func PAPI(c *csconfig.Config) error { func PAPI(c *csconfig.Config) error {
if c.API.Server.OnlineClient.Credentials.PapiURL == "" { if c.API.Server.OnlineClient.Credentials.PapiURL == "" {
return errors.New("no PAPI URL in configuration") return fmt.Errorf("no PAPI URL in configuration")
} }
return nil return nil
@ -41,7 +40,7 @@ func PAPI(c *csconfig.Config) error {
func CAPIRegistered(c *csconfig.Config) error { func CAPIRegistered(c *csconfig.Config) error {
if c.API.Server.OnlineClient.Credentials == nil { if c.API.Server.OnlineClient.Credentials == nil {
return errors.New("the Central API (CAPI) must be configured with 'cscli capi register'") return fmt.Errorf("the Central API (CAPI) must be configured with 'cscli capi register'")
} }
return nil return nil
@ -57,7 +56,7 @@ func DB(c *csconfig.Config) error {
func Notifications(c *csconfig.Config) error { func Notifications(c *csconfig.Config) error {
if c.ConfigPaths.NotificationDir == "" { if c.ConfigPaths.NotificationDir == "" {
return errors.New("config_paths.notification_dir is not set in crowdsec config") return fmt.Errorf("config_paths.notification_dir is not set in crowdsec config")
} }
return nil return nil
@ -83,7 +82,7 @@ func Hub(c *csconfig.Config, remote *cwhub.RemoteHubCfg, logger *logrus.Logger)
local := c.Hub local := c.Hub
if local == nil { if local == nil {
return nil, errors.New("you must configure cli before interacting with hub") return nil, fmt.Errorf("you must configure cli before interacting with hub")
} }
if logger == nil { if logger == nil {

View file

@ -2,7 +2,6 @@ package main
import ( import (
"bytes" "bytes"
"errors"
"fmt" "fmt"
"os" "os"
"os/exec" "os/exec"
@ -119,11 +118,9 @@ func runSetupDetect(cmd *cobra.Command, args []string) error {
switch detectConfigFile { switch detectConfigFile {
case "-": case "-":
log.Tracef("Reading detection rules from stdin") log.Tracef("Reading detection rules from stdin")
detectReader = os.Stdin detectReader = os.Stdin
default: default:
log.Tracef("Reading detection rules: %s", detectConfigFile) log.Tracef("Reading detection rules: %s", detectConfigFile)
detectReader, err = os.Open(detectConfigFile) detectReader, err = os.Open(detectConfigFile)
if err != nil { if err != nil {
return err return err
@ -174,7 +171,6 @@ func runSetupDetect(cmd *cobra.Command, args []string) error {
_, err := exec.LookPath("systemctl") _, err := exec.LookPath("systemctl")
if err != nil { if err != nil {
log.Debug("systemctl not available: snubbing systemd") log.Debug("systemctl not available: snubbing systemd")
snubSystemd = true snubSystemd = true
} }
} }
@ -186,7 +182,6 @@ func runSetupDetect(cmd *cobra.Command, args []string) error {
if forcedOSFamily == "" && forcedOSID != "" { if forcedOSFamily == "" && forcedOSID != "" {
log.Debug("force-os-id is set: force-os-family defaults to 'linux'") log.Debug("force-os-id is set: force-os-family defaults to 'linux'")
forcedOSFamily = "linux" forcedOSFamily = "linux"
} }
@ -224,7 +219,6 @@ func runSetupDetect(cmd *cobra.Command, args []string) error {
if err != nil { if err != nil {
return err return err
} }
fmt.Println(setup) fmt.Println(setup)
return nil return nil
@ -324,7 +318,6 @@ func runSetupInstallHub(cmd *cobra.Command, args []string) error {
func runSetupValidate(cmd *cobra.Command, args []string) error { func runSetupValidate(cmd *cobra.Command, args []string) error {
fromFile := args[0] fromFile := args[0]
input, err := os.ReadFile(fromFile) input, err := os.ReadFile(fromFile)
if err != nil { if err != nil {
return fmt.Errorf("while reading stdin: %w", err) return fmt.Errorf("while reading stdin: %w", err)
@ -332,7 +325,7 @@ func runSetupValidate(cmd *cobra.Command, args []string) error {
if err = setup.Validate(input); err != nil { if err = setup.Validate(input); err != nil {
fmt.Printf("%v\n", err) fmt.Printf("%v\n", err)
return errors.New("invalid setup file") return fmt.Errorf("invalid setup file")
} }
return nil return nil

View file

@ -1,14 +1,13 @@
package main package main
import ( import (
"errors"
"fmt" "fmt"
"os" "os"
"slices" "slices"
log "github.com/sirupsen/logrus" log "github.com/sirupsen/logrus"
"github.com/spf13/cobra" "github.com/spf13/cobra"
"gopkg.in/yaml.v3" "gopkg.in/yaml.v2"
"github.com/crowdsecurity/crowdsec/cmd/crowdsec-cli/require" "github.com/crowdsecurity/crowdsec/cmd/crowdsec-cli/require"
"github.com/crowdsecurity/crowdsec/pkg/cwhub" "github.com/crowdsecurity/crowdsec/pkg/cwhub"
@ -37,7 +36,7 @@ cscli simulation disable crowdsecurity/ssh-bf`,
return err return err
} }
if cli.cfg().Cscli.SimulationConfig == nil { if cli.cfg().Cscli.SimulationConfig == nil {
return errors.New("no simulation configured") return fmt.Errorf("no simulation configured")
} }
return nil return nil
@ -74,7 +73,7 @@ func (cli *cliSimulation) NewEnableCmd() *cobra.Command {
if len(args) > 0 { if len(args) > 0 {
for _, scenario := range args { for _, scenario := range args {
item := hub.GetItem(cwhub.SCENARIOS, scenario) var item = hub.GetItem(cwhub.SCENARIOS, scenario)
if item == nil { if item == nil {
log.Errorf("'%s' doesn't exist or is not a scenario", scenario) log.Errorf("'%s' doesn't exist or is not a scenario", scenario)
continue continue
@ -100,11 +99,11 @@ func (cli *cliSimulation) NewEnableCmd() *cobra.Command {
log.Printf("simulation mode for '%s' enabled", scenario) log.Printf("simulation mode for '%s' enabled", scenario)
} }
if err := cli.dumpSimulationFile(); err != nil { if err := cli.dumpSimulationFile(); err != nil {
return fmt.Errorf("simulation enable: %w", err) return fmt.Errorf("simulation enable: %s", err)
} }
} else if forceGlobalSimulation { } else if forceGlobalSimulation {
if err := cli.enableGlobalSimulation(); err != nil { if err := cli.enableGlobalSimulation(); err != nil {
return fmt.Errorf("unable to enable global simulation mode: %w", err) return fmt.Errorf("unable to enable global simulation mode: %s", err)
} }
} else { } else {
printHelp(cmd) printHelp(cmd)
@ -147,11 +146,11 @@ func (cli *cliSimulation) NewDisableCmd() *cobra.Command {
log.Printf("simulation mode for '%s' disabled", scenario) log.Printf("simulation mode for '%s' disabled", scenario)
} }
if err := cli.dumpSimulationFile(); err != nil { if err := cli.dumpSimulationFile(); err != nil {
return fmt.Errorf("simulation disable: %w", err) return fmt.Errorf("simulation disable: %s", err)
} }
} else if forceGlobalSimulation { } else if forceGlobalSimulation {
if err := cli.disableGlobalSimulation(); err != nil { if err := cli.disableGlobalSimulation(); err != nil {
return fmt.Errorf("unable to disable global simulation mode: %w", err) return fmt.Errorf("unable to disable global simulation mode: %s", err)
} }
} else { } else {
printHelp(cmd) printHelp(cmd)
@ -203,7 +202,7 @@ func (cli *cliSimulation) enableGlobalSimulation() error {
cfg.Cscli.SimulationConfig.Exclusions = []string{} cfg.Cscli.SimulationConfig.Exclusions = []string{}
if err := cli.dumpSimulationFile(); err != nil { if err := cli.dumpSimulationFile(); err != nil {
return fmt.Errorf("unable to dump simulation file: %w", err) return fmt.Errorf("unable to dump simulation file: %s", err)
} }
log.Printf("global simulation: enabled") log.Printf("global simulation: enabled")
@ -216,12 +215,12 @@ func (cli *cliSimulation) dumpSimulationFile() error {
newConfigSim, err := yaml.Marshal(cfg.Cscli.SimulationConfig) newConfigSim, err := yaml.Marshal(cfg.Cscli.SimulationConfig)
if err != nil { if err != nil {
return fmt.Errorf("unable to marshal simulation configuration: %w", err) return fmt.Errorf("unable to marshal simulation configuration: %s", err)
} }
err = os.WriteFile(cfg.ConfigPaths.SimulationFilePath, newConfigSim, 0o644) err = os.WriteFile(cfg.ConfigPaths.SimulationFilePath, newConfigSim, 0o644)
if err != nil { if err != nil {
return fmt.Errorf("write simulation config in '%s' failed: %w", cfg.ConfigPaths.SimulationFilePath, err) return fmt.Errorf("write simulation config in '%s' failed: %s", cfg.ConfigPaths.SimulationFilePath, err)
} }
log.Debugf("updated simulation file %s", cfg.ConfigPaths.SimulationFilePath) log.Debugf("updated simulation file %s", cfg.ConfigPaths.SimulationFilePath)
@ -238,12 +237,12 @@ func (cli *cliSimulation) disableGlobalSimulation() error {
newConfigSim, err := yaml.Marshal(cfg.Cscli.SimulationConfig) newConfigSim, err := yaml.Marshal(cfg.Cscli.SimulationConfig)
if err != nil { if err != nil {
return fmt.Errorf("unable to marshal new simulation configuration: %w", err) return fmt.Errorf("unable to marshal new simulation configuration: %s", err)
} }
err = os.WriteFile(cfg.ConfigPaths.SimulationFilePath, newConfigSim, 0o644) err = os.WriteFile(cfg.ConfigPaths.SimulationFilePath, newConfigSim, 0o644)
if err != nil { if err != nil {
return fmt.Errorf("unable to write new simulation config in '%s': %w", cfg.ConfigPaths.SimulationFilePath, err) return fmt.Errorf("unable to write new simulation config in '%s': %s", cfg.ConfigPaths.SimulationFilePath, err)
} }
log.Printf("global simulation: disabled") log.Printf("global simulation: disabled")
@ -270,10 +269,8 @@ func (cli *cliSimulation) status() {
} }
} else { } else {
log.Println("global simulation: disabled") log.Println("global simulation: disabled")
if len(cfg.Cscli.SimulationConfig.Exclusions) > 0 { if len(cfg.Cscli.SimulationConfig.Exclusions) > 0 {
log.Println("Scenarios in simulation mode :") log.Println("Scenarios in simulation mode :")
for _, scenario := range cfg.Cscli.SimulationConfig.Exclusions { for _, scenario := range cfg.Cscli.SimulationConfig.Exclusions {
log.Printf(" - %s", scenario) log.Printf(" - %s", scenario)
} }

View file

@ -4,7 +4,6 @@ import (
"archive/zip" "archive/zip"
"bytes" "bytes"
"context" "context"
"errors"
"fmt" "fmt"
"io" "io"
"net/http" "net/http"
@ -13,14 +12,12 @@ import (
"path/filepath" "path/filepath"
"regexp" "regexp"
"strings" "strings"
"time"
"github.com/blackfireio/osinfo" "github.com/blackfireio/osinfo"
"github.com/go-openapi/strfmt" "github.com/go-openapi/strfmt"
log "github.com/sirupsen/logrus" log "github.com/sirupsen/logrus"
"github.com/spf13/cobra" "github.com/spf13/cobra"
"github.com/crowdsecurity/go-cs-lib/trace"
"github.com/crowdsecurity/go-cs-lib/version" "github.com/crowdsecurity/go-cs-lib/version"
"github.com/crowdsecurity/crowdsec/cmd/crowdsec-cli/require" "github.com/crowdsecurity/crowdsec/cmd/crowdsec-cli/require"
@ -50,7 +47,6 @@ const (
SUPPORT_CAPI_STATUS_PATH = "capi_status.txt" SUPPORT_CAPI_STATUS_PATH = "capi_status.txt"
SUPPORT_ACQUISITION_CONFIG_BASE_PATH = "config/acquis/" SUPPORT_ACQUISITION_CONFIG_BASE_PATH = "config/acquis/"
SUPPORT_CROWDSEC_PROFILE_PATH = "config/profiles.yaml" SUPPORT_CROWDSEC_PROFILE_PATH = "config/profiles.yaml"
SUPPORT_CRASH_PATH = "crash/"
) )
// from https://github.com/acarl005/stripansi // from https://github.com/acarl005/stripansi
@ -66,7 +62,7 @@ func collectMetrics() ([]byte, []byte, error) {
if csConfig.Cscli.PrometheusUrl == "" { if csConfig.Cscli.PrometheusUrl == "" {
log.Warn("No Prometheus URL configured, metrics will not be collected") log.Warn("No Prometheus URL configured, metrics will not be collected")
return nil, nil, errors.New("prometheus_uri is not set") return nil, nil, fmt.Errorf("prometheus_uri is not set")
} }
humanMetrics := bytes.NewBuffer(nil) humanMetrics := bytes.NewBuffer(nil)
@ -74,7 +70,7 @@ func collectMetrics() ([]byte, []byte, error) {
ms := NewMetricStore() ms := NewMetricStore()
if err := ms.Fetch(csConfig.Cscli.PrometheusUrl); err != nil { if err := ms.Fetch(csConfig.Cscli.PrometheusUrl); err != nil {
return nil, nil, fmt.Errorf("could not fetch prometheus metrics: %w", err) return nil, nil, fmt.Errorf("could not fetch prometheus metrics: %s", err)
} }
if err := ms.Format(humanMetrics, nil, "human", false); err != nil { if err := ms.Format(humanMetrics, nil, "human", false); err != nil {
@ -83,21 +79,21 @@ func collectMetrics() ([]byte, []byte, error) {
req, err := http.NewRequest(http.MethodGet, csConfig.Cscli.PrometheusUrl, nil) req, err := http.NewRequest(http.MethodGet, csConfig.Cscli.PrometheusUrl, nil)
if err != nil { if err != nil {
return nil, nil, fmt.Errorf("could not create requests to prometheus endpoint: %w", err) return nil, nil, fmt.Errorf("could not create requests to prometheus endpoint: %s", err)
} }
client := &http.Client{} client := &http.Client{}
resp, err := client.Do(req) resp, err := client.Do(req)
if err != nil { if err != nil {
return nil, nil, fmt.Errorf("could not get metrics from prometheus endpoint: %w", err) return nil, nil, fmt.Errorf("could not get metrics from prometheus endpoint: %s", err)
} }
defer resp.Body.Close() defer resp.Body.Close()
body, err := io.ReadAll(resp.Body) body, err := io.ReadAll(resp.Body)
if err != nil { if err != nil {
return nil, nil, fmt.Errorf("could not read metrics from prometheus endpoint: %w", err) return nil, nil, fmt.Errorf("could not read metrics from prometheus endpoint: %s", err)
} }
return humanMetrics.Bytes(), body, nil return humanMetrics.Bytes(), body, nil
@ -125,18 +121,19 @@ func collectOSInfo() ([]byte, error) {
log.Info("Collecting OS info") log.Info("Collecting OS info")
info, err := osinfo.GetOSInfo() info, err := osinfo.GetOSInfo()
if err != nil { if err != nil {
return nil, err return nil, err
} }
w := bytes.NewBuffer(nil) w := bytes.NewBuffer(nil)
fmt.Fprintf(w, "Architecture: %s\n", info.Architecture) w.WriteString(fmt.Sprintf("Architecture: %s\n", info.Architecture))
fmt.Fprintf(w, "Family: %s\n", info.Family) w.WriteString(fmt.Sprintf("Family: %s\n", info.Family))
fmt.Fprintf(w, "ID: %s\n", info.ID) w.WriteString(fmt.Sprintf("ID: %s\n", info.ID))
fmt.Fprintf(w, "Name: %s\n", info.Name) w.WriteString(fmt.Sprintf("Name: %s\n", info.Name))
fmt.Fprintf(w, "Codename: %s\n", info.Codename) w.WriteString(fmt.Sprintf("Codename: %s\n", info.Codename))
fmt.Fprintf(w, "Version: %s\n", info.Version) w.WriteString(fmt.Sprintf("Version: %s\n", info.Version))
fmt.Fprintf(w, "Build: %s\n", info.Build) w.WriteString(fmt.Sprintf("Build: %s\n", info.Build))
return w.Bytes(), nil return w.Bytes(), nil
} }
@ -166,7 +163,7 @@ func collectBouncers(dbClient *database.Client) ([]byte, error) {
bouncers, err := dbClient.ListBouncers() bouncers, err := dbClient.ListBouncers()
if err != nil { if err != nil {
return nil, fmt.Errorf("unable to list bouncers: %w", err) return nil, fmt.Errorf("unable to list bouncers: %s", err)
} }
getBouncersTable(out, bouncers) getBouncersTable(out, bouncers)
@ -179,7 +176,7 @@ func collectAgents(dbClient *database.Client) ([]byte, error) {
machines, err := dbClient.ListMachines() machines, err := dbClient.ListMachines()
if err != nil { if err != nil {
return nil, fmt.Errorf("unable to list machines: %w", err) return nil, fmt.Errorf("unable to list machines: %s", err)
} }
getAgentsTable(out, machines) getAgentsTable(out, machines)
@ -199,7 +196,7 @@ func collectAPIStatus(login string, password string, endpoint string, prefix str
return []byte(fmt.Sprintf("cannot parse API URL: %s", err)) return []byte(fmt.Sprintf("cannot parse API URL: %s", err))
} }
scenarios, err := hub.GetInstalledNamesByType(cwhub.SCENARIOS) scenarios, err := hub.GetInstalledItemNames(cwhub.SCENARIOS)
if err != nil { if err != nil {
return []byte(fmt.Sprintf("could not collect scenarios: %s", err)) return []byte(fmt.Sprintf("could not collect scenarios: %s", err))
} }
@ -267,11 +264,6 @@ func collectAcquisitionConfig() map[string][]byte {
return ret return ret
} }
func collectCrash() ([]string, error) {
log.Info("Collecting crash dumps")
return trace.List()
}
type cliSupport struct{} type cliSupport struct{}
func NewCLISupport() *cliSupport { func NewCLISupport() *cliSupport {
@ -319,7 +311,7 @@ cscli support dump -f /tmp/crowdsec-support.zip
`, `,
Args: cobra.NoArgs, Args: cobra.NoArgs,
DisableAutoGenTag: true, DisableAutoGenTag: true,
RunE: func(_ *cobra.Command, _ []string) error { Run: func(_ *cobra.Command, _ []string) {
var err error var err error
var skipHub, skipDB, skipCAPI, skipLAPI, skipAgent bool var skipHub, skipDB, skipCAPI, skipLAPI, skipAgent bool
infos := map[string][]byte{ infos := map[string][]byte{
@ -439,31 +431,11 @@ cscli support dump -f /tmp/crowdsec-support.zip
} }
} }
crash, err := collectCrash()
if err != nil {
log.Errorf("could not collect crash dumps: %s", err)
}
for _, filename := range crash {
content, err := os.ReadFile(filename)
if err != nil {
log.Errorf("could not read crash dump %s: %s", filename, err)
}
infos[SUPPORT_CRASH_PATH+filepath.Base(filename)] = content
}
w := bytes.NewBuffer(nil) w := bytes.NewBuffer(nil)
zipWriter := zip.NewWriter(w) zipWriter := zip.NewWriter(w)
for filename, data := range infos { for filename, data := range infos {
header := &zip.FileHeader{ fw, err := zipWriter.Create(filename)
Name: filename,
Method: zip.Deflate,
// TODO: retain mtime where possible (esp. trace)
Modified: time.Now(),
}
fw, err := zipWriter.CreateHeader(header)
if err != nil { if err != nil {
log.Errorf("Could not add zip entry for %s: %s", filename, err) log.Errorf("Could not add zip entry for %s: %s", filename, err)
continue continue
@ -473,19 +445,15 @@ cscli support dump -f /tmp/crowdsec-support.zip
err = zipWriter.Close() err = zipWriter.Close()
if err != nil { if err != nil {
return fmt.Errorf("could not finalize zip file: %s", err) log.Fatalf("could not finalize zip file: %s", err)
} }
if outFile == "-" {
_, err = os.Stdout.Write(w.Bytes())
return err
}
err = os.WriteFile(outFile, w.Bytes(), 0o600) err = os.WriteFile(outFile, w.Bytes(), 0o600)
if err != nil { if err != nil {
return fmt.Errorf("could not write zip file to %s: %s", outFile, err) log.Fatalf("could not write zip file to %s: %s", outFile, err)
} }
log.Infof("Written zip file to %s", outFile) log.Infof("Written zip file to %s", outFile)
return nil
}, },
} }

View file

@ -6,7 +6,8 @@ import (
"os" "os"
"github.com/aquasecurity/table" "github.com/aquasecurity/table"
isatty "github.com/mattn/go-isatty"
"github.com/crowdsecurity/go-cs-lib/cstty"
) )
func shouldWeColorize() bool { func shouldWeColorize() bool {
@ -16,7 +17,7 @@ func shouldWeColorize() bool {
if csConfig.Cscli.Color == "no" { if csConfig.Cscli.Color == "no" {
return false return false
} }
return isatty.IsTerminal(os.Stdout.Fd()) || isatty.IsCygwinTerminal(os.Stdout.Fd()) return cstty.IsTTY(os.Stdout.Fd())
} }
func newTable(out io.Writer) *table.Table { func newTable(out io.Writer) *table.Table {

View file

@ -1,11 +1,11 @@
package main package main
import ( import (
"errors"
"fmt" "fmt"
"runtime" "runtime"
"time" "time"
"github.com/pkg/errors"
log "github.com/sirupsen/logrus" log "github.com/sirupsen/logrus"
"github.com/crowdsecurity/go-cs-lib/trace" "github.com/crowdsecurity/go-cs-lib/trace"

View file

@ -9,12 +9,11 @@ import (
"time" "time"
log "github.com/sirupsen/logrus" log "github.com/sirupsen/logrus"
"gopkg.in/yaml.v3" "gopkg.in/yaml.v2"
"github.com/crowdsecurity/go-cs-lib/trace" "github.com/crowdsecurity/go-cs-lib/trace"
"github.com/crowdsecurity/crowdsec/pkg/acquisition" "github.com/crowdsecurity/crowdsec/pkg/acquisition"
"github.com/crowdsecurity/crowdsec/pkg/acquisition/configuration"
"github.com/crowdsecurity/crowdsec/pkg/alertcontext" "github.com/crowdsecurity/crowdsec/pkg/alertcontext"
"github.com/crowdsecurity/crowdsec/pkg/appsec" "github.com/crowdsecurity/crowdsec/pkg/appsec"
"github.com/crowdsecurity/crowdsec/pkg/csconfig" "github.com/crowdsecurity/crowdsec/pkg/csconfig"
@ -24,42 +23,39 @@ import (
"github.com/crowdsecurity/crowdsec/pkg/types" "github.com/crowdsecurity/crowdsec/pkg/types"
) )
// initCrowdsec prepares the log processor service func initCrowdsec(cConfig *csconfig.Config, hub *cwhub.Hub) (*parser.Parsers, error) {
func initCrowdsec(cConfig *csconfig.Config, hub *cwhub.Hub) (*parser.Parsers, []acquisition.DataSource, error) {
var err error var err error
if err = alertcontext.LoadConsoleContext(cConfig, hub); err != nil { if err = alertcontext.LoadConsoleContext(cConfig, hub); err != nil {
return nil, nil, fmt.Errorf("while loading context: %w", err) return nil, fmt.Errorf("while loading context: %w", err)
} }
// Start loading configs // Start loading configs
csParsers := parser.NewParsers(hub) csParsers := parser.NewParsers(hub)
if csParsers, err = parser.LoadParsers(cConfig, csParsers); err != nil { if csParsers, err = parser.LoadParsers(cConfig, csParsers); err != nil {
return nil, nil, fmt.Errorf("while loading parsers: %w", err) return nil, fmt.Errorf("while loading parsers: %w", err)
} }
if err := LoadBuckets(cConfig, hub); err != nil { if err := LoadBuckets(cConfig, hub); err != nil {
return nil, nil, fmt.Errorf("while loading scenarios: %w", err) return nil, fmt.Errorf("while loading scenarios: %w", err)
} }
if err := appsec.LoadAppsecRules(hub); err != nil { if err := appsec.LoadAppsecRules(hub); err != nil {
return nil, nil, fmt.Errorf("while loading appsec rules: %w", err) return nil, fmt.Errorf("while loading appsec rules: %w", err)
} }
datasources, err := LoadAcquisition(cConfig) if err := LoadAcquisition(cConfig); err != nil {
if err != nil { return nil, fmt.Errorf("while loading acquisition config: %w", err)
return nil, nil, fmt.Errorf("while loading acquisition config: %w", err)
} }
return csParsers, datasources, nil return csParsers, nil
} }
// runCrowdsec starts the log processor service func runCrowdsec(cConfig *csconfig.Config, parsers *parser.Parsers, hub *cwhub.Hub) error {
func runCrowdsec(cConfig *csconfig.Config, parsers *parser.Parsers, hub *cwhub.Hub, datasources []acquisition.DataSource) error {
inputEventChan = make(chan types.Event) inputEventChan = make(chan types.Event)
inputLineChan = make(chan types.Event) inputLineChan = make(chan types.Event)
// start go-routines for parsing, buckets pour and outputs. //start go-routines for parsing, buckets pour and outputs.
parserWg := &sync.WaitGroup{} parserWg := &sync.WaitGroup{}
parsersTomb.Go(func() error { parsersTomb.Go(func() error {
@ -69,8 +65,7 @@ func runCrowdsec(cConfig *csconfig.Config, parsers *parser.Parsers, hub *cwhub.H
parsersTomb.Go(func() error { parsersTomb.Go(func() error {
defer trace.CatchPanic("crowdsec/runParse") defer trace.CatchPanic("crowdsec/runParse")
if err := runParse(inputLineChan, inputEventChan, *parsers.Ctx, parsers.Nodes); err != nil { if err := runParse(inputLineChan, inputEventChan, *parsers.Ctx, parsers.Nodes); err != nil { //this error will never happen as parser.Parse is not able to return errors
// this error will never happen as parser.Parse is not able to return errors
log.Fatalf("starting parse error : %s", err) log.Fatalf("starting parse error : %s", err)
return err return err
} }
@ -148,7 +143,7 @@ func runCrowdsec(cConfig *csconfig.Config, parsers *parser.Parsers, hub *cwhub.H
if cConfig.Prometheus != nil && cConfig.Prometheus.Enabled { if cConfig.Prometheus != nil && cConfig.Prometheus.Enabled {
aggregated := false aggregated := false
if cConfig.Prometheus.Level == configuration.CFG_METRICS_AGGREGATE { if cConfig.Prometheus.Level == "aggregated" {
aggregated = true aggregated = true
} }
@ -166,8 +161,7 @@ func runCrowdsec(cConfig *csconfig.Config, parsers *parser.Parsers, hub *cwhub.H
return nil return nil
} }
// serveCrowdsec wraps the log processor service func serveCrowdsec(parsers *parser.Parsers, cConfig *csconfig.Config, hub *cwhub.Hub, agentReady chan bool) {
func serveCrowdsec(parsers *parser.Parsers, cConfig *csconfig.Config, hub *cwhub.Hub, datasources []acquisition.DataSource, agentReady chan bool) {
crowdsecTomb.Go(func() error { crowdsecTomb.Go(func() error {
defer trace.CatchPanic("crowdsec/serveCrowdsec") defer trace.CatchPanic("crowdsec/serveCrowdsec")
@ -177,7 +171,7 @@ func serveCrowdsec(parsers *parser.Parsers, cConfig *csconfig.Config, hub *cwhub
log.Debugf("running agent after %s ms", time.Since(crowdsecT0)) log.Debugf("running agent after %s ms", time.Since(crowdsecT0))
agentReady <- true agentReady <- true
if err := runCrowdsec(cConfig, parsers, hub, datasources); err != nil { if err := runCrowdsec(cConfig, parsers, hub); err != nil {
log.Fatalf("unable to start crowdsec routines: %s", err) log.Fatalf("unable to start crowdsec routines: %s", err)
} }
}() }()
@ -207,7 +201,7 @@ func serveCrowdsec(parsers *parser.Parsers, cConfig *csconfig.Config, hub *cwhub
} }
func dumpBucketsPour() { func dumpBucketsPour() {
fd, err := os.OpenFile(filepath.Join(parser.DumpFolder, "bucketpour-dump.yaml"), os.O_CREATE|os.O_WRONLY|os.O_TRUNC, 0o666) fd, err := os.OpenFile(filepath.Join(parser.DumpFolder, "bucketpour-dump.yaml"), os.O_CREATE|os.O_WRONLY|os.O_TRUNC, 0666)
if err != nil { if err != nil {
log.Fatalf("open: %s", err) log.Fatalf("open: %s", err)
} }
@ -230,7 +224,7 @@ func dumpBucketsPour() {
} }
func dumpParserState() { func dumpParserState() {
fd, err := os.OpenFile(filepath.Join(parser.DumpFolder, "parser-dump.yaml"), os.O_CREATE|os.O_WRONLY|os.O_TRUNC, 0o666) fd, err := os.OpenFile(filepath.Join(parser.DumpFolder, "parser-dump.yaml"), os.O_CREATE|os.O_WRONLY|os.O_TRUNC, 0666)
if err != nil { if err != nil {
log.Fatalf("open: %s", err) log.Fatalf("open: %s", err)
} }
@ -253,7 +247,7 @@ func dumpParserState() {
} }
func dumpOverflowState() { func dumpOverflowState() {
fd, err := os.OpenFile(filepath.Join(parser.DumpFolder, "bucket-dump.yaml"), os.O_CREATE|os.O_WRONLY|os.O_TRUNC, 0o666) fd, err := os.OpenFile(filepath.Join(parser.DumpFolder, "bucket-dump.yaml"), os.O_CREATE|os.O_WRONLY|os.O_TRUNC, 0666)
if err != nil { if err != nil {
log.Fatalf("open: %s", err) log.Fatalf("open: %s", err)
} }

View file

@ -1,28 +0,0 @@
package main
import (
"io"
log "github.com/sirupsen/logrus"
)
// FatalHook is used to log fatal messages to stderr when the rest goes to a file
type FatalHook struct {
Writer io.Writer
LogLevels []log.Level
}
func (hook *FatalHook) Fire(entry *log.Entry) error {
line, err := entry.String()
if err != nil {
return err
}
_, err = hook.Writer.Write([]byte(line))
return err
}
func (hook *FatalHook) Levels() []log.Level {
return hook.LogLevels
}

43
cmd/crowdsec/hook.go Normal file
View file

@ -0,0 +1,43 @@
package main
import (
"io"
"os"
log "github.com/sirupsen/logrus"
)
type ConditionalHook struct {
Writer io.Writer
LogLevels []log.Level
Enabled bool
}
func (hook *ConditionalHook) Fire(entry *log.Entry) error {
if hook.Enabled {
line, err := entry.String()
if err != nil {
return err
}
_, err = hook.Writer.Write([]byte(line))
return err
}
return nil
}
func (hook *ConditionalHook) Levels() []log.Level {
return hook.LogLevels
}
// The primal logging hook is set up before parsing config.yaml.
// Once config.yaml is parsed, the primal hook is disabled if the
// configured logger is writing to stderr. Otherwise it's used to
// report fatal errors and panics to stderr in addition to the log file.
var primalHook = &ConditionalHook{
Writer: os.Stderr,
LogLevels: []log.Level{log.FatalLevel, log.PanicLevel},
Enabled: true,
}

View file

@ -17,12 +17,12 @@ import (
) )
func AuthenticatedLAPIClient(credentials csconfig.ApiCredentialsCfg, hub *cwhub.Hub) (*apiclient.ApiClient, error) { func AuthenticatedLAPIClient(credentials csconfig.ApiCredentialsCfg, hub *cwhub.Hub) (*apiclient.ApiClient, error) {
scenarios, err := hub.GetInstalledNamesByType(cwhub.SCENARIOS) scenarios, err := hub.GetInstalledItemNames(cwhub.SCENARIOS)
if err != nil { if err != nil {
return nil, fmt.Errorf("loading list of installed hub scenarios: %w", err) return nil, fmt.Errorf("loading list of installed hub scenarios: %w", err)
} }
appsecRules, err := hub.GetInstalledNamesByType(cwhub.APPSEC_RULES) appsecRules, err := hub.GetInstalledItemNames(cwhub.APPSEC_RULES)
if err != nil { if err != nil {
return nil, fmt.Errorf("loading list of installed hub appsec rules: %w", err) return nil, fmt.Errorf("loading list of installed hub appsec rules: %w", err)
} }
@ -52,11 +52,11 @@ func AuthenticatedLAPIClient(credentials csconfig.ApiCredentialsCfg, hub *cwhub.
PapiURL: papiURL, PapiURL: papiURL,
VersionPrefix: "v1", VersionPrefix: "v1",
UpdateScenario: func() ([]string, error) { UpdateScenario: func() ([]string, error) {
scenarios, err := hub.GetInstalledNamesByType(cwhub.SCENARIOS) scenarios, err := hub.GetInstalledItemNames(cwhub.SCENARIOS)
if err != nil { if err != nil {
return nil, err return nil, err
} }
appsecRules, err := hub.GetInstalledNamesByType(cwhub.APPSEC_RULES) appsecRules, err := hub.GetInstalledItemNames(cwhub.APPSEC_RULES)
if err != nil { if err != nil {
return nil, err return nil, err
} }

View file

@ -1,22 +1,19 @@
package main package main
import ( import (
"errors"
"flag" "flag"
"fmt" "fmt"
_ "net/http/pprof" _ "net/http/pprof"
"os" "os"
"path/filepath"
"runtime" "runtime"
"runtime/pprof" "runtime/pprof"
"strings" "strings"
"time" "time"
"github.com/pkg/errors"
log "github.com/sirupsen/logrus" log "github.com/sirupsen/logrus"
"gopkg.in/tomb.v2" "gopkg.in/tomb.v2"
"github.com/crowdsecurity/go-cs-lib/trace"
"github.com/crowdsecurity/crowdsec/pkg/acquisition" "github.com/crowdsecurity/crowdsec/pkg/acquisition"
"github.com/crowdsecurity/crowdsec/pkg/csconfig" "github.com/crowdsecurity/crowdsec/pkg/csconfig"
"github.com/crowdsecurity/crowdsec/pkg/csplugin" "github.com/crowdsecurity/crowdsec/pkg/csplugin"
@ -75,7 +72,7 @@ type Flags struct {
DisableCAPI bool DisableCAPI bool
Transform string Transform string
OrderEvent bool OrderEvent bool
CPUProfile string CpuProfile string
} }
func (f *Flags) haveTimeMachine() bool { func (f *Flags) haveTimeMachine() bool {
@ -99,10 +96,10 @@ func LoadBuckets(cConfig *csconfig.Config, hub *cwhub.Hub) error {
buckets = leakybucket.NewBuckets() buckets = leakybucket.NewBuckets()
log.Infof("Loading %d scenario files", len(files)) log.Infof("Loading %d scenario files", len(files))
holders, outputEventChan, err = leakybucket.LoadBuckets(cConfig.Crowdsec, hub, files, &bucketsTomb, buckets, flags.OrderEvent) holders, outputEventChan, err = leakybucket.LoadBuckets(cConfig.Crowdsec, hub, files, &bucketsTomb, buckets, flags.OrderEvent)
if err != nil { if err != nil {
return fmt.Errorf("scenario loading failed: %w", err) return fmt.Errorf("scenario loading failed: %v", err)
} }
if cConfig.Prometheus != nil && cConfig.Prometheus.Enabled { if cConfig.Prometheus != nil && cConfig.Prometheus.Enabled {
@ -114,7 +111,7 @@ func LoadBuckets(cConfig *csconfig.Config, hub *cwhub.Hub) error {
return nil return nil
} }
func LoadAcquisition(cConfig *csconfig.Config) ([]acquisition.DataSource, error) { func LoadAcquisition(cConfig *csconfig.Config) error {
var err error var err error
if flags.SingleFileType != "" && flags.OneShotDSN != "" { if flags.SingleFileType != "" && flags.OneShotDSN != "" {
@ -123,20 +120,20 @@ func LoadAcquisition(cConfig *csconfig.Config) ([]acquisition.DataSource, error)
dataSources, err = acquisition.LoadAcquisitionFromDSN(flags.OneShotDSN, flags.Labels, flags.Transform) dataSources, err = acquisition.LoadAcquisitionFromDSN(flags.OneShotDSN, flags.Labels, flags.Transform)
if err != nil { if err != nil {
return nil, fmt.Errorf("failed to configure datasource for %s: %w", flags.OneShotDSN, err) return errors.Wrapf(err, "failed to configure datasource for %s", flags.OneShotDSN)
} }
} else { } else {
dataSources, err = acquisition.LoadAcquisitionFromFile(cConfig.Crowdsec, cConfig.Prometheus) dataSources, err = acquisition.LoadAcquisitionFromFile(cConfig.Crowdsec)
if err != nil { if err != nil {
return nil, err return err
} }
} }
if len(dataSources) == 0 { if len(dataSources) == 0 {
return nil, errors.New("no datasource enabled") return fmt.Errorf("no datasource enabled")
} }
return dataSources, nil return nil
} }
var ( var (
@ -188,7 +185,7 @@ func (f *Flags) Parse() {
} }
flag.StringVar(&dumpFolder, "dump-data", "", "dump parsers/buckets raw outputs") flag.StringVar(&dumpFolder, "dump-data", "", "dump parsers/buckets raw outputs")
flag.StringVar(&f.CPUProfile, "cpu-profile", "", "write cpu profile to file") flag.StringVar(&f.CpuProfile, "cpu-profile", "", "write cpu profile to file")
flag.Parse() flag.Parse()
} }
@ -233,10 +230,6 @@ func LoadConfig(configFile string, disableAgent bool, disableAPI bool, quiet boo
return nil, fmt.Errorf("while loading configuration file: %w", err) return nil, fmt.Errorf("while loading configuration file: %w", err)
} }
if err := trace.Init(filepath.Join(cConfig.ConfigPaths.DataDir, "trace")); err != nil {
return nil, fmt.Errorf("while setting up trace directory: %w", err)
}
cConfig.Common.LogLevel = newLogLevel(cConfig.Common.LogLevel, flags) cConfig.Common.LogLevel = newLogLevel(cConfig.Common.LogLevel, flags)
if dumpFolder != "" { if dumpFolder != "" {
@ -260,12 +253,7 @@ func LoadConfig(configFile string, disableAgent bool, disableAPI bool, quiet boo
return nil, err return nil, err
} }
if cConfig.Common.LogMedia != "stdout" { primalHook.Enabled = (cConfig.Common.LogMedia != "stdout")
log.AddHook(&FatalHook{
Writer: os.Stderr,
LogLevels: []log.Level{log.FatalLevel, log.PanicLevel},
})
}
if err := csconfig.LoadFeatureFlagsFile(configFile, log.StandardLogger()); err != nil { if err := csconfig.LoadFeatureFlagsFile(configFile, log.StandardLogger()); err != nil {
return nil, err return nil, err
@ -288,7 +276,7 @@ func LoadConfig(configFile string, disableAgent bool, disableAPI bool, quiet boo
} }
if cConfig.DisableAPI && cConfig.DisableAgent { if cConfig.DisableAPI && cConfig.DisableAgent {
return nil, errors.New("you must run at least the API Server or crowdsec") return nil, errors.New("You must run at least the API Server or crowdsec")
} }
if flags.OneShotDSN != "" && flags.SingleFileType == "" { if flags.OneShotDSN != "" && flags.SingleFileType == "" {
@ -339,9 +327,7 @@ func LoadConfig(configFile string, disableAgent bool, disableAPI bool, quiet boo
var crowdsecT0 time.Time var crowdsecT0 time.Time
func main() { func main() {
// The initial log level is INFO, even if the user provided an -error or -warning flag log.AddHook(primalHook)
// because we need feature flags before parsing cli flags
log.SetFormatter(&log.TextFormatter{TimestampFormat: time.RFC3339, FullTimestamp: true})
if err := fflag.RegisterAllFeatures(); err != nil { if err := fflag.RegisterAllFeatures(); err != nil {
log.Fatalf("failed to register features: %s", err) log.Fatalf("failed to register features: %s", err)
@ -373,19 +359,16 @@ func main() {
os.Exit(0) os.Exit(0)
} }
if flags.CPUProfile != "" { if flags.CpuProfile != "" {
f, err := os.Create(flags.CPUProfile) f, err := os.Create(flags.CpuProfile)
if err != nil { if err != nil {
log.Fatalf("could not create CPU profile: %s", err) log.Fatalf("could not create CPU profile: %s", err)
} }
log.Infof("CPU profile will be written to %s", flags.CpuProfile)
log.Infof("CPU profile will be written to %s", flags.CPUProfile)
if err := pprof.StartCPUProfile(f); err != nil { if err := pprof.StartCPUProfile(f); err != nil {
f.Close() f.Close()
log.Fatalf("could not start CPU profile: %s", err) log.Fatalf("could not start CPU profile: %s", err)
} }
defer f.Close() defer f.Close()
defer pprof.StopCPUProfile() defer pprof.StopCPUProfile()
} }

View file

@ -3,6 +3,7 @@ package main
import ( import (
"fmt" "fmt"
"net/http" "net/http"
"time"
"github.com/prometheus/client_golang/prometheus" "github.com/prometheus/client_golang/prometheus"
"github.com/prometheus/client_golang/prometheus/promhttp" "github.com/prometheus/client_golang/prometheus/promhttp"
@ -11,7 +12,6 @@ import (
"github.com/crowdsecurity/go-cs-lib/trace" "github.com/crowdsecurity/go-cs-lib/trace"
"github.com/crowdsecurity/go-cs-lib/version" "github.com/crowdsecurity/go-cs-lib/version"
"github.com/crowdsecurity/crowdsec/pkg/acquisition/configuration"
v1 "github.com/crowdsecurity/crowdsec/pkg/apiserver/controllers/v1" v1 "github.com/crowdsecurity/crowdsec/pkg/apiserver/controllers/v1"
"github.com/crowdsecurity/crowdsec/pkg/cache" "github.com/crowdsecurity/crowdsec/pkg/cache"
"github.com/crowdsecurity/crowdsec/pkg/csconfig" "github.com/crowdsecurity/crowdsec/pkg/csconfig"
@ -21,8 +21,7 @@ import (
"github.com/crowdsecurity/crowdsec/pkg/parser" "github.com/crowdsecurity/crowdsec/pkg/parser"
) )
// Prometheus /*prometheus*/
var globalParserHits = prometheus.NewCounterVec( var globalParserHits = prometheus.NewCounterVec(
prometheus.CounterOpts{ prometheus.CounterOpts{
Name: "cs_parser_hits_total", Name: "cs_parser_hits_total",
@ -30,7 +29,6 @@ var globalParserHits = prometheus.NewCounterVec(
}, },
[]string{"source", "type"}, []string{"source", "type"},
) )
var globalParserHitsOk = prometheus.NewCounterVec( var globalParserHitsOk = prometheus.NewCounterVec(
prometheus.CounterOpts{ prometheus.CounterOpts{
Name: "cs_parser_hits_ok_total", Name: "cs_parser_hits_ok_total",
@ -38,7 +36,6 @@ var globalParserHitsOk = prometheus.NewCounterVec(
}, },
[]string{"source", "type"}, []string{"source", "type"},
) )
var globalParserHitsKo = prometheus.NewCounterVec( var globalParserHitsKo = prometheus.NewCounterVec(
prometheus.CounterOpts{ prometheus.CounterOpts{
Name: "cs_parser_hits_ko_total", Name: "cs_parser_hits_ko_total",
@ -105,20 +102,20 @@ var globalPourHistogram = prometheus.NewHistogramVec(
func computeDynamicMetrics(next http.Handler, dbClient *database.Client) http.HandlerFunc { func computeDynamicMetrics(next http.Handler, dbClient *database.Client) http.HandlerFunc {
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
// catch panics here because they are not handled by servePrometheus //update cache metrics (stash)
defer trace.CatchPanic("crowdsec/computeDynamicMetrics")
// update cache metrics (stash)
cache.UpdateCacheMetrics() cache.UpdateCacheMetrics()
// update cache metrics (regexp) //update cache metrics (regexp)
exprhelpers.UpdateRegexpCacheMetrics() exprhelpers.UpdateRegexpCacheMetrics()
// decision metrics are only relevant for LAPI //decision metrics are only relevant for LAPI
if dbClient == nil { if dbClient == nil {
next.ServeHTTP(w, r) next.ServeHTTP(w, r)
return return
} }
decisions, err := dbClient.QueryDecisionCountByScenario() decisionsFilters := make(map[string][]string, 0)
decisions, err := dbClient.QueryDecisionCountByScenario(decisionsFilters)
if err != nil { if err != nil {
log.Errorf("Error querying decisions for metrics: %v", err) log.Errorf("Error querying decisions for metrics: %v", err)
next.ServeHTTP(w, r) next.ServeHTTP(w, r)
@ -139,6 +136,7 @@ func computeDynamicMetrics(next http.Handler, dbClient *database.Client) http.Ha
} }
alerts, err := dbClient.AlertsCountPerScenario(alertsFilter) alerts, err := dbClient.AlertsCountPerScenario(alertsFilter)
if err != nil { if err != nil {
log.Errorf("Error querying alerts for metrics: %v", err) log.Errorf("Error querying alerts for metrics: %v", err)
next.ServeHTTP(w, r) next.ServeHTTP(w, r)
@ -161,7 +159,7 @@ func registerPrometheus(config *csconfig.PrometheusCfg) {
// Registering prometheus // Registering prometheus
// If in aggregated mode, do not register events associated with a source, to keep the cardinality low // If in aggregated mode, do not register events associated with a source, to keep the cardinality low
if config.Level == configuration.CFG_METRICS_AGGREGATE { if config.Level == "aggregated" {
log.Infof("Loading aggregated prometheus collectors") log.Infof("Loading aggregated prometheus collectors")
prometheus.MustRegister(globalParserHits, globalParserHitsOk, globalParserHitsKo, prometheus.MustRegister(globalParserHits, globalParserHitsOk, globalParserHitsKo,
globalCsInfo, globalParsingHistogram, globalPourHistogram, globalCsInfo, globalParsingHistogram, globalPourHistogram,
@ -193,11 +191,9 @@ func servePrometheus(config *csconfig.PrometheusCfg, dbClient *database.Client,
defer trace.CatchPanic("crowdsec/servePrometheus") defer trace.CatchPanic("crowdsec/servePrometheus")
http.Handle("/metrics", computeDynamicMetrics(promhttp.Handler(), dbClient)) http.Handle("/metrics", computeDynamicMetrics(promhttp.Handler(), dbClient))
log.Debugf("serving metrics after %s ms", time.Since(crowdsecT0))
if err := http.ListenAndServe(fmt.Sprintf("%s:%d", config.ListenAddr, config.ListenPort), nil); err != nil { if err := http.ListenAndServe(fmt.Sprintf("%s:%d", config.ListenAddr, config.ListenPort), nil); err != nil {
// in time machine, we most likely have the LAPI using the port log.Warningf("prometheus: %s", err)
if !flags.haveTimeMachine() {
log.Warningf("prometheus: %s", err)
}
} }
} }

View file

@ -27,7 +27,7 @@ func dedupAlerts(alerts []types.RuntimeAlert) ([]*models.Alert, error) {
} }
for k, src := range alert.Sources { for k, src := range alert.Sources {
refsrc := *alert.Alert // copy refsrc := *alert.Alert //copy
log.Tracef("source[%s]", k) log.Tracef("source[%s]", k)
@ -81,7 +81,7 @@ LOOP:
cacheMutex.Unlock() cacheMutex.Unlock()
if err := PushAlerts(cachecopy, client); err != nil { if err := PushAlerts(cachecopy, client); err != nil {
log.Errorf("while pushing to api : %s", err) log.Errorf("while pushing to api : %s", err)
// just push back the events to the queue //just push back the events to the queue
cacheMutex.Lock() cacheMutex.Lock()
cache = append(cache, cachecopy...) cache = append(cache, cachecopy...)
cacheMutex.Unlock() cacheMutex.Unlock()
@ -110,8 +110,8 @@ LOOP:
return fmt.Errorf("postoverflow failed: %w", err) return fmt.Errorf("postoverflow failed: %w", err)
} }
log.Printf("%s", *event.Overflow.Alert.Message) log.Printf("%s", *event.Overflow.Alert.Message)
// if the Alert is nil, it's to signal bucket is ready for GC, don't track this //if the Alert is nil, it's to signal bucket is ready for GC, don't track this
// dump after postoveflow processing to avoid missing whitelist info //dump after postoveflow processing to avoid missing whitelist info
if dumpStates && event.Overflow.Alert != nil { if dumpStates && event.Overflow.Alert != nil {
if bucketOverflows == nil { if bucketOverflows == nil {
bucketOverflows = make([]types.Event, 0) bucketOverflows = make([]types.Event, 0)

View file

@ -11,6 +11,7 @@ import (
) )
func runParse(input chan types.Event, output chan types.Event, parserCTX parser.UnixParserCtx, nodes []parser.Node) error { func runParse(input chan types.Event, output chan types.Event, parserCTX parser.UnixParserCtx, nodes []parser.Node) error {
LOOP: LOOP:
for { for {
select { select {
@ -55,6 +56,5 @@ LOOP:
output <- parsed output <- parsed
} }
} }
return nil return nil
} }

View file

@ -4,30 +4,27 @@ import (
"fmt" "fmt"
"time" "time"
"github.com/prometheus/client_golang/prometheus"
log "github.com/sirupsen/logrus"
"github.com/crowdsecurity/crowdsec/pkg/csconfig" "github.com/crowdsecurity/crowdsec/pkg/csconfig"
leaky "github.com/crowdsecurity/crowdsec/pkg/leakybucket" leaky "github.com/crowdsecurity/crowdsec/pkg/leakybucket"
"github.com/crowdsecurity/crowdsec/pkg/types" "github.com/crowdsecurity/crowdsec/pkg/types"
"github.com/prometheus/client_golang/prometheus"
log "github.com/sirupsen/logrus"
) )
func runPour(input chan types.Event, holders []leaky.BucketFactory, buckets *leaky.Buckets, cConfig *csconfig.Config) error { func runPour(input chan types.Event, holders []leaky.BucketFactory, buckets *leaky.Buckets, cConfig *csconfig.Config) error {
count := 0 count := 0
for { for {
// bucket is now ready //bucket is now ready
select { select {
case <-bucketsTomb.Dying(): case <-bucketsTomb.Dying():
log.Infof("Bucket routine exiting") log.Infof("Bucket routine exiting")
return nil return nil
case parsed := <-input: case parsed := <-input:
startTime := time.Now() startTime := time.Now()
count++ count++
if count%5000 == 0 { if count%5000 == 0 {
log.Infof("%d existing buckets", leaky.LeakyRoutineCount) log.Infof("%d existing buckets", leaky.LeakyRoutineCount)
// when in forensics mode, garbage collect buckets //when in forensics mode, garbage collect buckets
if cConfig.Crowdsec.BucketsGCEnabled { if cConfig.Crowdsec.BucketsGCEnabled {
if parsed.MarshaledTime != "" { if parsed.MarshaledTime != "" {
z := &time.Time{} z := &time.Time{}
@ -35,30 +32,26 @@ func runPour(input chan types.Event, holders []leaky.BucketFactory, buckets *lea
log.Warningf("Failed to unmarshal time from event '%s' : %s", parsed.MarshaledTime, err) log.Warningf("Failed to unmarshal time from event '%s' : %s", parsed.MarshaledTime, err)
} else { } else {
log.Warning("Starting buckets garbage collection ...") log.Warning("Starting buckets garbage collection ...")
if err = leaky.GarbageCollectBuckets(*z, buckets); err != nil { if err = leaky.GarbageCollectBuckets(*z, buckets); err != nil {
return fmt.Errorf("failed to start bucket GC : %w", err) return fmt.Errorf("failed to start bucket GC : %s", err)
} }
} }
} }
} }
} }
// here we can bucketify with parsed //here we can bucketify with parsed
poured, err := leaky.PourItemToHolders(parsed, holders, buckets) poured, err := leaky.PourItemToHolders(parsed, holders, buckets)
if err != nil { if err != nil {
log.Errorf("bucketify failed for: %v", parsed) log.Errorf("bucketify failed for: %v", parsed)
continue continue
} }
elapsed := time.Since(startTime) elapsed := time.Since(startTime)
globalPourHistogram.With(prometheus.Labels{"type": parsed.Line.Module, "source": parsed.Line.Src}).Observe(elapsed.Seconds()) globalPourHistogram.With(prometheus.Labels{"type": parsed.Line.Module, "source": parsed.Line.Src}).Observe(elapsed.Seconds())
if poured { if poured {
globalBucketPourOk.Inc() globalBucketPourOk.Inc()
} else { } else {
globalBucketPourKo.Inc() globalBucketPourKo.Inc()
} }
if len(parsed.MarshaledTime) != 0 { if len(parsed.MarshaledTime) != 0 {
if err := lastProcessedItem.UnmarshalText([]byte(parsed.MarshaledTime)); err != nil { if err := lastProcessedItem.UnmarshalText([]byte(parsed.MarshaledTime)); err != nil {
log.Warningf("failed to unmarshal time from event : %s", err) log.Warningf("failed to unmarshal time from event : %s", err)

View file

@ -23,8 +23,8 @@ func StartRunSvc() error {
defer trace.CatchPanic("crowdsec/StartRunSvc") defer trace.CatchPanic("crowdsec/StartRunSvc")
// Always try to stop CPU profiling to avoid passing flags around //Always try to stop CPU profiling to avoid passing flags around
// It's a noop if profiling is not enabled //It's a noop if profiling is not enabled
defer pprof.StopCPUProfile() defer pprof.StopCPUProfile()
if cConfig, err = LoadConfig(flags.ConfigFile, flags.DisableAgent, flags.DisableAPI, false); err != nil { if cConfig, err = LoadConfig(flags.ConfigFile, flags.DisableAgent, flags.DisableAPI, false); err != nil {
@ -36,7 +36,7 @@ func StartRunSvc() error {
agentReady := make(chan bool, 1) agentReady := make(chan bool, 1)
// Enable profiling early // Enable profiling early
if cConfig.Prometheus != nil { if cConfig.Prometheus != nil && !flags.haveTimeMachine() {
var dbClient *database.Client var dbClient *database.Client
var err error var err error

View file

@ -20,8 +20,8 @@ func StartRunSvc() error {
defer trace.CatchPanic("crowdsec/StartRunSvc") defer trace.CatchPanic("crowdsec/StartRunSvc")
// Always try to stop CPU profiling to avoid passing flags around //Always try to stop CPU profiling to avoid passing flags around
// It's a noop if profiling is not enabled //It's a noop if profiling is not enabled
defer pprof.StopCPUProfile() defer pprof.StopCPUProfile()
isRunninginService, err := svc.IsWindowsService() isRunninginService, err := svc.IsWindowsService()
@ -76,7 +76,7 @@ func WindowsRun() error {
agentReady := make(chan bool, 1) agentReady := make(chan bool, 1)
// Enable profiling early // Enable profiling early
if cConfig.Prometheus != nil { if cConfig.Prometheus != nil && !flags.haveTimeMachine() {
var dbClient *database.Client var dbClient *database.Client
var err error var err error

View file

@ -86,7 +86,7 @@ func reloadHandler(sig os.Signal) (*csconfig.Config, error) {
return nil, fmt.Errorf("while loading hub index: %w", err) return nil, fmt.Errorf("while loading hub index: %w", err)
} }
csParsers, datasources, err := initCrowdsec(cConfig, hub) csParsers, err := initCrowdsec(cConfig, hub)
if err != nil { if err != nil {
return nil, fmt.Errorf("unable to init crowdsec: %w", err) return nil, fmt.Errorf("unable to init crowdsec: %w", err)
} }
@ -103,7 +103,7 @@ func reloadHandler(sig os.Signal) (*csconfig.Config, error) {
} }
agentReady := make(chan bool, 1) agentReady := make(chan bool, 1)
serveCrowdsec(csParsers, cConfig, hub, datasources, agentReady) serveCrowdsec(csParsers, cConfig, hub, agentReady)
} }
log.Printf("Reload is finished") log.Printf("Reload is finished")
@ -230,7 +230,7 @@ func drainChan(c chan types.Event) {
for { for {
select { select {
case _, ok := <-c: case _, ok := <-c:
if !ok { // closed if !ok { //closed
return return
} }
default: default:
@ -256,8 +256,8 @@ func HandleSignals(cConfig *csconfig.Config) error {
exitChan := make(chan error) exitChan := make(chan error)
// Always try to stop CPU profiling to avoid passing flags around //Always try to stop CPU profiling to avoid passing flags around
// It's a noop if profiling is not enabled //It's a noop if profiling is not enabled
defer pprof.StopCPUProfile() defer pprof.StopCPUProfile()
go func() { go func() {
@ -369,14 +369,14 @@ func Serve(cConfig *csconfig.Config, agentReady chan bool) error {
return fmt.Errorf("while loading hub index: %w", err) return fmt.Errorf("while loading hub index: %w", err)
} }
csParsers, datasources, err := initCrowdsec(cConfig, hub) csParsers, err := initCrowdsec(cConfig, hub)
if err != nil { if err != nil {
return fmt.Errorf("crowdsec init: %w", err) return fmt.Errorf("crowdsec init: %w", err)
} }
// if it's just linting, we're done // if it's just linting, we're done
if !flags.TestMode { if !flags.TestMode {
serveCrowdsec(csParsers, cConfig, hub, datasources, agentReady) serveCrowdsec(csParsers, cConfig, hub, agentReady)
} else { } else {
agentReady <- true agentReady <- true
} }
@ -391,7 +391,7 @@ func Serve(cConfig *csconfig.Config, agentReady chan bool) error {
} }
if cConfig.Common != nil && cConfig.Common.Daemonize { if cConfig.Common != nil && cConfig.Common.Daemonize {
csdaemon.Notify(csdaemon.Ready, log.StandardLogger()) csdaemon.NotifySystemd(log.StandardLogger())
// wait for signals // wait for signals
return HandleSignals(cConfig) return HandleSignals(cConfig)
} }

View file

@ -5,11 +5,10 @@ import (
"fmt" "fmt"
"os" "os"
"github.com/crowdsecurity/crowdsec/pkg/protobufs"
"github.com/hashicorp/go-hclog" "github.com/hashicorp/go-hclog"
plugin "github.com/hashicorp/go-plugin" plugin "github.com/hashicorp/go-plugin"
"gopkg.in/yaml.v3" "gopkg.in/yaml.v2"
"github.com/crowdsecurity/crowdsec/pkg/protobufs"
) )
type PluginConfig struct { type PluginConfig struct {
@ -33,7 +32,6 @@ func (s *DummyPlugin) Notify(ctx context.Context, notification *protobufs.Notifi
if _, ok := s.PluginConfigByName[notification.Name]; !ok { if _, ok := s.PluginConfigByName[notification.Name]; !ok {
return nil, fmt.Errorf("invalid plugin config name %s", notification.Name) return nil, fmt.Errorf("invalid plugin config name %s", notification.Name)
} }
cfg := s.PluginConfigByName[notification.Name] cfg := s.PluginConfigByName[notification.Name]
if cfg.LogLevel != nil && *cfg.LogLevel != "" { if cfg.LogLevel != nil && *cfg.LogLevel != "" {
@ -44,22 +42,19 @@ func (s *DummyPlugin) Notify(ctx context.Context, notification *protobufs.Notifi
logger.Debug(notification.Text) logger.Debug(notification.Text)
if cfg.OutputFile != nil && *cfg.OutputFile != "" { if cfg.OutputFile != nil && *cfg.OutputFile != "" {
f, err := os.OpenFile(*cfg.OutputFile, os.O_APPEND|os.O_CREATE|os.O_WRONLY, 0o644) f, err := os.OpenFile(*cfg.OutputFile, os.O_APPEND|os.O_CREATE|os.O_WRONLY, 0644)
if err != nil { if err != nil {
logger.Error(fmt.Sprintf("Cannot open notification file: %s", err)) logger.Error(fmt.Sprintf("Cannot open notification file: %s", err))
} }
if _, err := f.WriteString(notification.Text + "\n"); err != nil { if _, err := f.WriteString(notification.Text + "\n"); err != nil {
f.Close() f.Close()
logger.Error(fmt.Sprintf("Cannot write notification to file: %s", err)) logger.Error(fmt.Sprintf("Cannot write notification to file: %s", err))
} }
err = f.Close() err = f.Close()
if err != nil { if err != nil {
logger.Error(fmt.Sprintf("Cannot close notification file: %s", err)) logger.Error(fmt.Sprintf("Cannot close notification file: %s", err))
} }
} }
fmt.Println(notification.Text) fmt.Println(notification.Text)
return &protobufs.Empty{}, nil return &protobufs.Empty{}, nil
@ -69,12 +64,11 @@ func (s *DummyPlugin) Configure(ctx context.Context, config *protobufs.Config) (
d := PluginConfig{} d := PluginConfig{}
err := yaml.Unmarshal(config.Config, &d) err := yaml.Unmarshal(config.Config, &d)
s.PluginConfigByName[d.Name] = d s.PluginConfigByName[d.Name] = d
return &protobufs.Empty{}, err return &protobufs.Empty{}, err
} }
func main() { func main() {
handshake := plugin.HandshakeConfig{ var handshake = plugin.HandshakeConfig{
ProtocolVersion: 1, ProtocolVersion: 1,
MagicCookieKey: "CROWDSEC_PLUGIN_KEY", MagicCookieKey: "CROWDSEC_PLUGIN_KEY",
MagicCookieValue: os.Getenv("CROWDSEC_PLUGIN_KEY"), MagicCookieValue: os.Getenv("CROWDSEC_PLUGIN_KEY"),

View file

@ -2,17 +2,15 @@ package main
import ( import (
"context" "context"
"errors"
"fmt" "fmt"
"os" "os"
"time" "time"
"github.com/crowdsecurity/crowdsec/pkg/protobufs"
"github.com/hashicorp/go-hclog" "github.com/hashicorp/go-hclog"
plugin "github.com/hashicorp/go-plugin" plugin "github.com/hashicorp/go-plugin"
mail "github.com/xhit/go-simple-mail/v2" mail "github.com/xhit/go-simple-mail/v2"
"gopkg.in/yaml.v3" "gopkg.in/yaml.v2"
"github.com/crowdsecurity/crowdsec/pkg/protobufs"
) )
var baseLogger hclog.Logger = hclog.New(&hclog.LoggerOptions{ var baseLogger hclog.Logger = hclog.New(&hclog.LoggerOptions{
@ -74,20 +72,19 @@ func (n *EmailPlugin) Configure(ctx context.Context, config *protobufs.Config) (
} }
if d.Name == "" { if d.Name == "" {
return nil, errors.New("name is required") return nil, fmt.Errorf("name is required")
} }
if d.SMTPHost == "" { if d.SMTPHost == "" {
return nil, errors.New("SMTP host is not set") return nil, fmt.Errorf("SMTP host is not set")
} }
if d.ReceiverEmails == nil || len(d.ReceiverEmails) == 0 { if d.ReceiverEmails == nil || len(d.ReceiverEmails) == 0 {
return nil, errors.New("receiver emails are not set") return nil, fmt.Errorf("receiver emails are not set")
} }
n.ConfigByName[d.Name] = d n.ConfigByName[d.Name] = d
baseLogger.Debug(fmt.Sprintf("Email plugin '%s' use SMTP host '%s:%d'", d.Name, d.SMTPHost, d.SMTPPort)) baseLogger.Debug(fmt.Sprintf("Email plugin '%s' use SMTP host '%s:%d'", d.Name, d.SMTPHost, d.SMTPPort))
return &protobufs.Empty{}, nil return &protobufs.Empty{}, nil
} }
@ -95,7 +92,6 @@ func (n *EmailPlugin) Notify(ctx context.Context, notification *protobufs.Notifi
if _, ok := n.ConfigByName[notification.Name]; !ok { if _, ok := n.ConfigByName[notification.Name]; !ok {
return nil, fmt.Errorf("invalid plugin config name %s", notification.Name) return nil, fmt.Errorf("invalid plugin config name %s", notification.Name)
} }
cfg := n.ConfigByName[notification.Name] cfg := n.ConfigByName[notification.Name]
logger := baseLogger.Named(cfg.Name) logger := baseLogger.Named(cfg.Name)
@ -121,7 +117,6 @@ func (n *EmailPlugin) Notify(ctx context.Context, notification *protobufs.Notifi
server.ConnectTimeout, err = time.ParseDuration(cfg.ConnectTimeout) server.ConnectTimeout, err = time.ParseDuration(cfg.ConnectTimeout)
if err != nil { if err != nil {
logger.Warn(fmt.Sprintf("invalid connect timeout '%s', using default '10s'", cfg.ConnectTimeout)) logger.Warn(fmt.Sprintf("invalid connect timeout '%s', using default '10s'", cfg.ConnectTimeout))
server.ConnectTimeout = 10 * time.Second server.ConnectTimeout = 10 * time.Second
} }
} }
@ -130,18 +125,15 @@ func (n *EmailPlugin) Notify(ctx context.Context, notification *protobufs.Notifi
server.SendTimeout, err = time.ParseDuration(cfg.SendTimeout) server.SendTimeout, err = time.ParseDuration(cfg.SendTimeout)
if err != nil { if err != nil {
logger.Warn(fmt.Sprintf("invalid send timeout '%s', using default '10s'", cfg.SendTimeout)) logger.Warn(fmt.Sprintf("invalid send timeout '%s', using default '10s'", cfg.SendTimeout))
server.SendTimeout = 10 * time.Second server.SendTimeout = 10 * time.Second
} }
} }
logger.Debug("making smtp connection") logger.Debug("making smtp connection")
smtpClient, err := server.Connect() smtpClient, err := server.Connect()
if err != nil { if err != nil {
return &protobufs.Empty{}, err return &protobufs.Empty{}, err
} }
logger.Debug("smtp connection done") logger.Debug("smtp connection done")
email := mail.NewMSG() email := mail.NewMSG()
@ -154,14 +146,12 @@ func (n *EmailPlugin) Notify(ctx context.Context, notification *protobufs.Notifi
if err != nil { if err != nil {
return &protobufs.Empty{}, err return &protobufs.Empty{}, err
} }
logger.Info(fmt.Sprintf("sent email to %v", cfg.ReceiverEmails)) logger.Info(fmt.Sprintf("sent email to %v", cfg.ReceiverEmails))
return &protobufs.Empty{}, nil return &protobufs.Empty{}, nil
} }
func main() { func main() {
handshake := plugin.HandshakeConfig{ var handshake = plugin.HandshakeConfig{
ProtocolVersion: 1, ProtocolVersion: 1,
MagicCookieKey: "CROWDSEC_PLUGIN_KEY", MagicCookieKey: "CROWDSEC_PLUGIN_KEY",
MagicCookieValue: os.Getenv("CROWDSEC_PLUGIN_KEY"), MagicCookieValue: os.Getenv("CROWDSEC_PLUGIN_KEY"),

View file

@ -12,11 +12,10 @@ import (
"os" "os"
"strings" "strings"
"github.com/crowdsecurity/crowdsec/pkg/protobufs"
"github.com/hashicorp/go-hclog" "github.com/hashicorp/go-hclog"
plugin "github.com/hashicorp/go-plugin" plugin "github.com/hashicorp/go-plugin"
"gopkg.in/yaml.v3" "gopkg.in/yaml.v2"
"github.com/crowdsecurity/crowdsec/pkg/protobufs"
) )
type PluginConfig struct { type PluginConfig struct {
@ -91,23 +90,18 @@ func getTLSClient(c *PluginConfig) error {
tlsConfig.Certificates = []tls.Certificate{cert} tlsConfig.Certificates = []tls.Certificate{cert}
} }
transport := &http.Transport{ transport := &http.Transport{
TLSClientConfig: tlsConfig, TLSClientConfig: tlsConfig,
} }
if c.UnixSocket != "" { if c.UnixSocket != "" {
logger.Info(fmt.Sprintf("Using socket '%s'", c.UnixSocket)) logger.Info(fmt.Sprintf("Using socket '%s'", c.UnixSocket))
transport.DialContext = func(_ context.Context, _, _ string) (net.Conn, error) { transport.DialContext = func(_ context.Context, _, _ string) (net.Conn, error) {
return net.Dial("unix", strings.TrimSuffix(c.UnixSocket, "/")) return net.Dial("unix", strings.TrimSuffix(c.UnixSocket, "/"))
} }
} }
c.Client = &http.Client{ c.Client = &http.Client{
Transport: transport, Transport: transport,
} }
return nil return nil
} }
@ -115,7 +109,6 @@ func (s *HTTPPlugin) Notify(ctx context.Context, notification *protobufs.Notific
if _, ok := s.PluginConfigByName[notification.Name]; !ok { if _, ok := s.PluginConfigByName[notification.Name]; !ok {
return nil, fmt.Errorf("invalid plugin config name %s", notification.Name) return nil, fmt.Errorf("invalid plugin config name %s", notification.Name)
} }
cfg := s.PluginConfigByName[notification.Name] cfg := s.PluginConfigByName[notification.Name]
if cfg.LogLevel != nil && *cfg.LogLevel != "" { if cfg.LogLevel != nil && *cfg.LogLevel != "" {
@ -128,14 +121,11 @@ func (s *HTTPPlugin) Notify(ctx context.Context, notification *protobufs.Notific
if err != nil { if err != nil {
return nil, err return nil, err
} }
for headerName, headerValue := range cfg.Headers { for headerName, headerValue := range cfg.Headers {
logger.Debug(fmt.Sprintf("adding header %s: %s", headerName, headerValue)) logger.Debug(fmt.Sprintf("adding header %s: %s", headerName, headerValue))
request.Header.Add(headerName, headerValue) request.Header.Add(headerName, headerValue)
} }
logger.Debug(fmt.Sprintf("making HTTP %s call to %s with body %s", cfg.Method, cfg.URL, notification.Text)) logger.Debug(fmt.Sprintf("making HTTP %s call to %s with body %s", cfg.Method, cfg.URL, notification.Text))
resp, err := cfg.Client.Do(request.WithContext(ctx)) resp, err := cfg.Client.Do(request.WithContext(ctx))
if err != nil { if err != nil {
logger.Error(fmt.Sprintf("Failed to make HTTP request : %s", err)) logger.Error(fmt.Sprintf("Failed to make HTTP request : %s", err))
@ -145,7 +135,7 @@ func (s *HTTPPlugin) Notify(ctx context.Context, notification *protobufs.Notific
respData, err := io.ReadAll(resp.Body) respData, err := io.ReadAll(resp.Body)
if err != nil { if err != nil {
return nil, fmt.Errorf("failed to read response body got error %w", err) return nil, fmt.Errorf("failed to read response body got error %s", err)
} }
logger.Debug(fmt.Sprintf("got response %s", string(respData))) logger.Debug(fmt.Sprintf("got response %s", string(respData)))
@ -153,7 +143,6 @@ func (s *HTTPPlugin) Notify(ctx context.Context, notification *protobufs.Notific
if resp.StatusCode < 200 || resp.StatusCode >= 300 { if resp.StatusCode < 200 || resp.StatusCode >= 300 {
logger.Warn(fmt.Sprintf("HTTP server returned non 200 status code: %d", resp.StatusCode)) logger.Warn(fmt.Sprintf("HTTP server returned non 200 status code: %d", resp.StatusCode))
logger.Debug(fmt.Sprintf("HTTP server returned body: %s", string(respData))) logger.Debug(fmt.Sprintf("HTTP server returned body: %s", string(respData)))
return &protobufs.Empty{}, nil return &protobufs.Empty{}, nil
} }
@ -162,25 +151,21 @@ func (s *HTTPPlugin) Notify(ctx context.Context, notification *protobufs.Notific
func (s *HTTPPlugin) Configure(ctx context.Context, config *protobufs.Config) (*protobufs.Empty, error) { func (s *HTTPPlugin) Configure(ctx context.Context, config *protobufs.Config) (*protobufs.Empty, error) {
d := PluginConfig{} d := PluginConfig{}
err := yaml.Unmarshal(config.Config, &d) err := yaml.Unmarshal(config.Config, &d)
if err != nil { if err != nil {
return nil, err return nil, err
} }
err = getTLSClient(&d) err = getTLSClient(&d)
if err != nil { if err != nil {
return nil, err return nil, err
} }
s.PluginConfigByName[d.Name] = d s.PluginConfigByName[d.Name] = d
logger.Debug(fmt.Sprintf("HTTP plugin '%s' use URL '%s'", d.Name, d.URL)) logger.Debug(fmt.Sprintf("HTTP plugin '%s' use URL '%s'", d.Name, d.URL))
return &protobufs.Empty{}, err return &protobufs.Empty{}, err
} }
func main() { func main() {
handshake := plugin.HandshakeConfig{ var handshake = plugin.HandshakeConfig{
ProtocolVersion: 1, ProtocolVersion: 1,
MagicCookieKey: "CROWDSEC_PLUGIN_KEY", MagicCookieKey: "CROWDSEC_PLUGIN_KEY",
MagicCookieValue: os.Getenv("CROWDSEC_PLUGIN_KEY"), MagicCookieValue: os.Getenv("CROWDSEC_PLUGIN_KEY"),

View file

@ -5,12 +5,12 @@ import (
"fmt" "fmt"
"os" "os"
"github.com/crowdsecurity/crowdsec/pkg/protobufs"
"github.com/hashicorp/go-hclog" "github.com/hashicorp/go-hclog"
plugin "github.com/hashicorp/go-plugin" plugin "github.com/hashicorp/go-plugin"
"github.com/slack-go/slack"
"gopkg.in/yaml.v3"
"github.com/crowdsecurity/crowdsec/pkg/protobufs" "github.com/slack-go/slack"
"gopkg.in/yaml.v2"
) )
type PluginConfig struct { type PluginConfig struct {
@ -33,16 +33,13 @@ func (n *Notify) Notify(ctx context.Context, notification *protobufs.Notificatio
if _, ok := n.ConfigByName[notification.Name]; !ok { if _, ok := n.ConfigByName[notification.Name]; !ok {
return nil, fmt.Errorf("invalid plugin config name %s", notification.Name) return nil, fmt.Errorf("invalid plugin config name %s", notification.Name)
} }
cfg := n.ConfigByName[notification.Name] cfg := n.ConfigByName[notification.Name]
if cfg.LogLevel != nil && *cfg.LogLevel != "" { if cfg.LogLevel != nil && *cfg.LogLevel != "" {
logger.SetLevel(hclog.LevelFromString(*cfg.LogLevel)) logger.SetLevel(hclog.LevelFromString(*cfg.LogLevel))
} }
logger.Info(fmt.Sprintf("found notify signal for %s config", notification.Name)) logger.Info(fmt.Sprintf("found notify signal for %s config", notification.Name))
logger.Debug(fmt.Sprintf("posting to %s webhook, message %s", cfg.Webhook, notification.Text)) logger.Debug(fmt.Sprintf("posting to %s webhook, message %s", cfg.Webhook, notification.Text))
err := slack.PostWebhookContext(ctx, n.ConfigByName[notification.Name].Webhook, &slack.WebhookMessage{ err := slack.PostWebhookContext(ctx, n.ConfigByName[notification.Name].Webhook, &slack.WebhookMessage{
Text: notification.Text, Text: notification.Text,
}) })
@ -55,19 +52,16 @@ func (n *Notify) Notify(ctx context.Context, notification *protobufs.Notificatio
func (n *Notify) Configure(ctx context.Context, config *protobufs.Config) (*protobufs.Empty, error) { func (n *Notify) Configure(ctx context.Context, config *protobufs.Config) (*protobufs.Empty, error) {
d := PluginConfig{} d := PluginConfig{}
if err := yaml.Unmarshal(config.Config, &d); err != nil { if err := yaml.Unmarshal(config.Config, &d); err != nil {
return nil, err return nil, err
} }
n.ConfigByName[d.Name] = d n.ConfigByName[d.Name] = d
logger.Debug(fmt.Sprintf("Slack plugin '%s' use URL '%s'", d.Name, d.Webhook)) logger.Debug(fmt.Sprintf("Slack plugin '%s' use URL '%s'", d.Name, d.Webhook))
return &protobufs.Empty{}, nil return &protobufs.Empty{}, nil
} }
func main() { func main() {
handshake := plugin.HandshakeConfig{ var handshake = plugin.HandshakeConfig{
ProtocolVersion: 1, ProtocolVersion: 1,
MagicCookieKey: "CROWDSEC_PLUGIN_KEY", MagicCookieKey: "CROWDSEC_PLUGIN_KEY",
MagicCookieValue: os.Getenv("CROWDSEC_PLUGIN_KEY"), MagicCookieValue: os.Getenv("CROWDSEC_PLUGIN_KEY"),

View file

@ -10,11 +10,11 @@ import (
"os" "os"
"strings" "strings"
"github.com/crowdsecurity/crowdsec/pkg/protobufs"
"github.com/hashicorp/go-hclog" "github.com/hashicorp/go-hclog"
plugin "github.com/hashicorp/go-plugin" plugin "github.com/hashicorp/go-plugin"
"gopkg.in/yaml.v3"
"github.com/crowdsecurity/crowdsec/pkg/protobufs" "gopkg.in/yaml.v2"
) )
var logger hclog.Logger = hclog.New(&hclog.LoggerOptions{ var logger hclog.Logger = hclog.New(&hclog.LoggerOptions{
@ -44,7 +44,6 @@ func (s *Splunk) Notify(ctx context.Context, notification *protobufs.Notificatio
if _, ok := s.PluginConfigByName[notification.Name]; !ok { if _, ok := s.PluginConfigByName[notification.Name]; !ok {
return &protobufs.Empty{}, fmt.Errorf("splunk invalid config name %s", notification.Name) return &protobufs.Empty{}, fmt.Errorf("splunk invalid config name %s", notification.Name)
} }
cfg := s.PluginConfigByName[notification.Name] cfg := s.PluginConfigByName[notification.Name]
if cfg.LogLevel != nil && *cfg.LogLevel != "" { if cfg.LogLevel != nil && *cfg.LogLevel != "" {
@ -54,7 +53,6 @@ func (s *Splunk) Notify(ctx context.Context, notification *protobufs.Notificatio
logger.Info(fmt.Sprintf("received notify signal for %s config", notification.Name)) logger.Info(fmt.Sprintf("received notify signal for %s config", notification.Name))
p := Payload{Event: notification.Text} p := Payload{Event: notification.Text}
data, err := json.Marshal(p) data, err := json.Marshal(p)
if err != nil { if err != nil {
return &protobufs.Empty{}, err return &protobufs.Empty{}, err
@ -67,7 +65,6 @@ func (s *Splunk) Notify(ctx context.Context, notification *protobufs.Notificatio
req.Header.Add("Authorization", fmt.Sprintf("Splunk %s", cfg.Token)) req.Header.Add("Authorization", fmt.Sprintf("Splunk %s", cfg.Token))
logger.Debug(fmt.Sprintf("posting event %s to %s", string(data), req.URL)) logger.Debug(fmt.Sprintf("posting event %s to %s", string(data), req.URL))
resp, err := s.Client.Do(req.WithContext(ctx)) resp, err := s.Client.Do(req.WithContext(ctx))
if err != nil { if err != nil {
return &protobufs.Empty{}, err return &protobufs.Empty{}, err
@ -76,19 +73,15 @@ func (s *Splunk) Notify(ctx context.Context, notification *protobufs.Notificatio
if resp.StatusCode != http.StatusOK { if resp.StatusCode != http.StatusOK {
content, err := io.ReadAll(resp.Body) content, err := io.ReadAll(resp.Body)
if err != nil { if err != nil {
return &protobufs.Empty{}, fmt.Errorf("got non 200 response and failed to read error %w", err) return &protobufs.Empty{}, fmt.Errorf("got non 200 response and failed to read error %s", err)
} }
return &protobufs.Empty{}, fmt.Errorf("got non 200 response %s", string(content)) return &protobufs.Empty{}, fmt.Errorf("got non 200 response %s", string(content))
} }
respData, err := io.ReadAll(resp.Body) respData, err := io.ReadAll(resp.Body)
if err != nil { if err != nil {
return &protobufs.Empty{}, fmt.Errorf("failed to read response body got error %w", err) return &protobufs.Empty{}, fmt.Errorf("failed to read response body got error %s", err)
} }
logger.Debug(fmt.Sprintf("got response %s", string(respData))) logger.Debug(fmt.Sprintf("got response %s", string(respData)))
return &protobufs.Empty{}, nil return &protobufs.Empty{}, nil
} }
@ -97,12 +90,11 @@ func (s *Splunk) Configure(ctx context.Context, config *protobufs.Config) (*prot
err := yaml.Unmarshal(config.Config, &d) err := yaml.Unmarshal(config.Config, &d)
s.PluginConfigByName[d.Name] = d s.PluginConfigByName[d.Name] = d
logger.Debug(fmt.Sprintf("Splunk plugin '%s' use URL '%s'", d.Name, d.URL)) logger.Debug(fmt.Sprintf("Splunk plugin '%s' use URL '%s'", d.Name, d.URL))
return &protobufs.Empty{}, err return &protobufs.Empty{}, err
} }
func main() { func main() {
handshake := plugin.HandshakeConfig{ var handshake = plugin.HandshakeConfig{
ProtocolVersion: 1, ProtocolVersion: 1,
MagicCookieKey: "CROWDSEC_PLUGIN_KEY", MagicCookieKey: "CROWDSEC_PLUGIN_KEY",
MagicCookieValue: os.Getenv("CROWDSEC_PLUGIN_KEY"), MagicCookieValue: os.Getenv("CROWDSEC_PLUGIN_KEY"),

View file

@ -2,13 +2,12 @@
test -x /usr/bin/cscli || exit 0 test -x /usr/bin/cscli || exit 0
# splay hub upgrade and crowdsec reload
sleep "$(seq 1 300 | shuf -n 1)"
/usr/bin/cscli --error hub update /usr/bin/cscli --error hub update
upgraded=$(/usr/bin/cscli --error hub upgrade) upgraded=$(/usr/bin/cscli --error hub upgrade)
if [ -n "$upgraded" ]; then if [ -n "$upgraded" ]; then
# splay initial metrics push
sleep $(seq 1 90 | shuf -n 1)
systemctl reload crowdsec systemctl reload crowdsec
fi fi

1
debian/control vendored
View file

@ -8,4 +8,3 @@ Package: crowdsec
Architecture: any Architecture: any
Description: Crowdsec - An open-source, lightweight agent to detect and respond to bad behaviors. It also automatically benefits from our global community-wide IP reputation database Description: Crowdsec - An open-source, lightweight agent to detect and respond to bad behaviors. It also automatically benefits from our global community-wide IP reputation database
Depends: coreutils Depends: coreutils
Suggests: cron

View file

@ -6,7 +6,7 @@ CROWDSEC_TEST_VERSION="dev"
# All of the following flavors will be tested when using the "flavor" fixture # All of the following flavors will be tested when using the "flavor" fixture
CROWDSEC_TEST_FLAVORS="full" CROWDSEC_TEST_FLAVORS="full"
# CROWDSEC_TEST_FLAVORS="full,slim,debian" # CROWDSEC_TEST_FLAVORS="full,slim,debian"
# CROWDSEC_TEST_FLAVORS="full,slim,debian,debian-slim" # CROWDSEC_TEST_FLAVORS="full,slim,debian,geoip,plugins-debian-slim,debian-geoip,debian-plugins"
# network to use # network to use
CROWDSEC_TEST_NETWORK="net-test" CROWDSEC_TEST_NETWORK="net-test"

View file

@ -36,6 +36,8 @@ def test_register_bouncer_env(crowdsec, flavor):
bouncer1, bouncer2 = j bouncer1, bouncer2 = j
assert bouncer1['name'] == 'bouncer1name' assert bouncer1['name'] == 'bouncer1name'
assert bouncer2['name'] == 'bouncer2name' assert bouncer2['name'] == 'bouncer2name'
assert bouncer1['api_key'] == hex512('bouncer1key')
assert bouncer2['api_key'] == hex512('bouncer2key')
# add a second bouncer at runtime # add a second bouncer at runtime
res = cs.cont.exec_run('cscli bouncers add bouncer3name -k bouncer3key') res = cs.cont.exec_run('cscli bouncers add bouncer3name -k bouncer3key')
@ -46,6 +48,7 @@ def test_register_bouncer_env(crowdsec, flavor):
assert len(j) == 3 assert len(j) == 3
bouncer3 = j[2] bouncer3 = j[2]
assert bouncer3['name'] == 'bouncer3name' assert bouncer3['name'] == 'bouncer3name'
assert bouncer3['api_key'] == hex512('bouncer3key')
# remove all bouncers # remove all bouncers
res = cs.cont.exec_run('cscli bouncers delete bouncer1name bouncer2name bouncer3name') res = cs.cont.exec_run('cscli bouncers delete bouncer1name bouncer2name bouncer3name')

View file

@ -42,7 +42,7 @@ def test_flavor_content(crowdsec, flavor):
x = cs.cont.exec_run( x = cs.cont.exec_run(
'ls -1 /usr/local/lib/crowdsec/plugins/') 'ls -1 /usr/local/lib/crowdsec/plugins/')
stdout = x.output.decode() stdout = x.output.decode()
if 'slim' in flavor: if 'slim' in flavor or 'geoip' in flavor:
# the exact return code and full message depend # the exact return code and full message depend
# on the 'ls' implementation (busybox vs coreutils) # on the 'ls' implementation (busybox vs coreutils)
assert x.exit_code != 0 assert x.exit_code != 0

View file

@ -22,7 +22,8 @@ def test_missing_key_file(crowdsec, flavor):
} }
with crowdsec(flavor=flavor, environment=env, wait_status=Status.EXITED) as cs: with crowdsec(flavor=flavor, environment=env, wait_status=Status.EXITED) as cs:
cs.wait_for_log("*local API server stopped with error: missing TLS key file*") # XXX: this message appears twice, is that normal?
cs.wait_for_log("*while starting API server: missing TLS key file*")
def test_missing_cert_file(crowdsec, flavor): def test_missing_cert_file(crowdsec, flavor):
@ -34,7 +35,7 @@ def test_missing_cert_file(crowdsec, flavor):
} }
with crowdsec(flavor=flavor, environment=env, wait_status=Status.EXITED) as cs: with crowdsec(flavor=flavor, environment=env, wait_status=Status.EXITED) as cs:
cs.wait_for_log("*local API server stopped with error: missing TLS cert file*") cs.wait_for_log("*while starting API server: missing TLS cert file*")
def test_tls_missing_ca(crowdsec, flavor, certs_dir): def test_tls_missing_ca(crowdsec, flavor, certs_dir):

42
go.mod
View file

@ -1,13 +1,13 @@
module github.com/crowdsecurity/crowdsec module github.com/crowdsecurity/crowdsec
go 1.22 go 1.21
// Don't use the toolchain directive to avoid uncontrolled downloads during // Don't use the toolchain directive to avoid uncontrolled downloads during
// a build, especially in sandboxed environments (freebsd, gentoo...). // a build, especially in sandboxed environments (freebsd, gentoo...).
// toolchain go1.21.3 // toolchain go1.21.3
require ( require (
entgo.io/ent v0.12.5 entgo.io/ent v0.12.4
github.com/AlecAivazis/survey/v2 v2.3.7 github.com/AlecAivazis/survey/v2 v2.3.7
github.com/Masterminds/semver/v3 v3.2.1 github.com/Masterminds/semver/v3 v3.2.1
github.com/Masterminds/sprig/v3 v3.2.3 github.com/Masterminds/sprig/v3 v3.2.3
@ -24,17 +24,16 @@ require (
github.com/buger/jsonparser v1.1.1 github.com/buger/jsonparser v1.1.1
github.com/c-robinson/iplib v1.0.3 github.com/c-robinson/iplib v1.0.3
github.com/cespare/xxhash/v2 v2.2.0 github.com/cespare/xxhash/v2 v2.2.0
github.com/corazawaf/libinjection-go v0.1.2
github.com/crowdsecurity/coraza/v3 v3.0.0-20240108124027-a62b8d8e5607 github.com/crowdsecurity/coraza/v3 v3.0.0-20240108124027-a62b8d8e5607
github.com/crowdsecurity/dlog v0.0.0-20170105205344-4fb5f8204f26 github.com/crowdsecurity/dlog v0.0.0-20170105205344-4fb5f8204f26
github.com/crowdsecurity/go-cs-lib v0.0.10 github.com/crowdsecurity/go-cs-lib v0.0.7-0.20240130143103-452318e69ef9
github.com/crowdsecurity/grokky v0.2.1 github.com/crowdsecurity/grokky v0.2.1
github.com/crowdsecurity/machineid v1.0.2 github.com/crowdsecurity/machineid v1.0.2
github.com/davecgh/go-spew v1.1.1 github.com/davecgh/go-spew v1.1.1
github.com/dghubble/sling v1.3.0 github.com/dghubble/sling v1.3.0
github.com/docker/docker v24.0.9+incompatible github.com/docker/docker v24.0.7+incompatible
github.com/docker/go-connections v0.4.0 github.com/docker/go-connections v0.4.0
github.com/fatih/color v1.15.0 github.com/fatih/color v1.16.0
github.com/fsnotify/fsnotify v1.6.0 github.com/fsnotify/fsnotify v1.6.0
github.com/gin-gonic/gin v1.9.1 github.com/gin-gonic/gin v1.9.1
github.com/go-co-op/gocron v1.17.0 github.com/go-co-op/gocron v1.17.0
@ -56,16 +55,15 @@ require (
github.com/hashicorp/go-version v1.2.1 github.com/hashicorp/go-version v1.2.1
github.com/hexops/gotextdiff v1.0.3 github.com/hexops/gotextdiff v1.0.3
github.com/ivanpirog/coloredcobra v1.0.1 github.com/ivanpirog/coloredcobra v1.0.1
github.com/jackc/pgx/v4 v4.18.2 github.com/jackc/pgx/v4 v4.14.1
github.com/jarcoal/httpmock v1.1.0 github.com/jarcoal/httpmock v1.1.0
github.com/jszwec/csvutil v1.5.1 github.com/jszwec/csvutil v1.5.1
github.com/lithammer/dedent v1.1.0 github.com/lithammer/dedent v1.1.0
github.com/mattn/go-isatty v0.0.19
github.com/mattn/go-sqlite3 v1.14.16 github.com/mattn/go-sqlite3 v1.14.16
github.com/mohae/deepcopy v0.0.0-20170929034955-c48cc78d4826 github.com/mohae/deepcopy v0.0.0-20170929034955-c48cc78d4826
github.com/nxadm/tail v1.4.8 github.com/nxadm/tail v1.4.8
github.com/oschwald/geoip2-golang v1.9.0 github.com/oschwald/geoip2-golang v1.4.0
github.com/oschwald/maxminddb-golang v1.12.0 github.com/oschwald/maxminddb-golang v1.8.0
github.com/pbnjay/memory v0.0.0-20210728143218-7b4eea64cf58 github.com/pbnjay/memory v0.0.0-20210728143218-7b4eea64cf58
github.com/pkg/errors v0.9.1 github.com/pkg/errors v0.9.1
github.com/prometheus/client_golang v1.16.0 github.com/prometheus/client_golang v1.16.0
@ -73,6 +71,7 @@ require (
github.com/prometheus/prom2json v1.3.0 github.com/prometheus/prom2json v1.3.0
github.com/r3labs/diff/v2 v2.14.1 github.com/r3labs/diff/v2 v2.14.1
github.com/sanity-io/litter v1.5.5 github.com/sanity-io/litter v1.5.5
github.com/schollz/progressbar/v3 v3.14.1
github.com/segmentio/kafka-go v0.4.45 github.com/segmentio/kafka-go v0.4.45
github.com/shirou/gopsutil/v3 v3.23.5 github.com/shirou/gopsutil/v3 v3.23.5
github.com/sirupsen/logrus v1.9.3 github.com/sirupsen/logrus v1.9.3
@ -82,12 +81,12 @@ require (
github.com/umahmood/haversine v0.0.0-20151105152445-808ab04add26 github.com/umahmood/haversine v0.0.0-20151105152445-808ab04add26
github.com/wasilibs/go-re2 v1.3.0 github.com/wasilibs/go-re2 v1.3.0
github.com/xhit/go-simple-mail/v2 v2.16.0 github.com/xhit/go-simple-mail/v2 v2.16.0
golang.org/x/crypto v0.22.0 golang.org/x/crypto v0.17.0
golang.org/x/mod v0.11.0 golang.org/x/mod v0.11.0
golang.org/x/sys v0.19.0 golang.org/x/sys v0.15.0
golang.org/x/text v0.14.0 golang.org/x/text v0.14.0
google.golang.org/grpc v1.56.3 google.golang.org/grpc v1.56.3
google.golang.org/protobuf v1.33.0 google.golang.org/protobuf v1.31.0
gopkg.in/natefinch/lumberjack.v2 v2.2.1 gopkg.in/natefinch/lumberjack.v2 v2.2.1
gopkg.in/tomb.v2 v2.0.0-20161208151619-d5d1b5820637 gopkg.in/tomb.v2 v2.0.0-20161208151619-d5d1b5820637
gopkg.in/yaml.v2 v2.4.0 gopkg.in/yaml.v2 v2.4.0
@ -105,6 +104,7 @@ require (
github.com/beorn7/perks v1.0.1 // indirect github.com/beorn7/perks v1.0.1 // indirect
github.com/bytedance/sonic v1.9.1 // indirect github.com/bytedance/sonic v1.9.1 // indirect
github.com/chenzhuoyu/base64x v0.0.0-20221115062448-fe3a3abad311 // indirect github.com/chenzhuoyu/base64x v0.0.0-20221115062448-fe3a3abad311 // indirect
github.com/corazawaf/libinjection-go v0.1.2 // indirect
github.com/coreos/go-systemd/v22 v22.5.0 // indirect github.com/coreos/go-systemd/v22 v22.5.0 // indirect
github.com/cpuguy83/go-md2man/v2 v2.0.3 // indirect github.com/cpuguy83/go-md2man/v2 v2.0.3 // indirect
github.com/creack/pty v1.1.18 // indirect github.com/creack/pty v1.1.18 // indirect
@ -137,12 +137,12 @@ require (
github.com/imdario/mergo v0.3.12 // indirect github.com/imdario/mergo v0.3.12 // indirect
github.com/inconshreveable/mousetrap v1.1.0 // indirect github.com/inconshreveable/mousetrap v1.1.0 // indirect
github.com/jackc/chunkreader/v2 v2.0.1 // indirect github.com/jackc/chunkreader/v2 v2.0.1 // indirect
github.com/jackc/pgconn v1.14.3 // indirect github.com/jackc/pgconn v1.10.1 // indirect
github.com/jackc/pgio v1.0.0 // indirect github.com/jackc/pgio v1.0.0 // indirect
github.com/jackc/pgpassfile v1.0.0 // indirect github.com/jackc/pgpassfile v1.0.0 // indirect
github.com/jackc/pgproto3/v2 v2.3.3 // indirect github.com/jackc/pgproto3/v2 v2.2.0 // indirect
github.com/jackc/pgservicefile v0.0.0-20221227161230-091c0ba34f0a // indirect github.com/jackc/pgservicefile v0.0.0-20200714003250-2b9c44734f2b // indirect
github.com/jackc/pgtype v1.14.0 // indirect github.com/jackc/pgtype v1.9.1 // indirect
github.com/jmespath/go-jmespath v0.4.0 // indirect github.com/jmespath/go-jmespath v0.4.0 // indirect
github.com/josharian/intern v1.0.0 // indirect github.com/josharian/intern v1.0.0 // indirect
github.com/json-iterator/go v1.1.12 // indirect github.com/json-iterator/go v1.1.12 // indirect
@ -154,9 +154,11 @@ require (
github.com/magefile/mage v1.15.0 // indirect github.com/magefile/mage v1.15.0 // indirect
github.com/mailru/easyjson v0.7.7 // indirect github.com/mailru/easyjson v0.7.7 // indirect
github.com/mattn/go-colorable v0.1.13 // indirect github.com/mattn/go-colorable v0.1.13 // indirect
github.com/mattn/go-isatty v0.0.20 // indirect
github.com/mattn/go-runewidth v0.0.13 // indirect github.com/mattn/go-runewidth v0.0.13 // indirect
github.com/matttproud/golang_protobuf_extensions v1.0.4 // indirect github.com/matttproud/golang_protobuf_extensions v1.0.4 // indirect
github.com/mgutz/ansi v0.0.0-20200706080929-d51e80ef957d // indirect github.com/mgutz/ansi v0.0.0-20200706080929-d51e80ef957d // indirect
github.com/mitchellh/colorstring v0.0.0-20190213212951-d06e56a500db // indirect
github.com/mitchellh/copystructure v1.2.0 // indirect github.com/mitchellh/copystructure v1.2.0 // indirect
github.com/mitchellh/go-testing-interface v1.0.0 // indirect github.com/mitchellh/go-testing-interface v1.0.0 // indirect
github.com/mitchellh/go-wordwrap v1.0.1 // indirect github.com/mitchellh/go-wordwrap v1.0.1 // indirect
@ -176,7 +178,7 @@ require (
github.com/power-devops/perfstat v0.0.0-20210106213030-5aafc221ea8c // indirect github.com/power-devops/perfstat v0.0.0-20210106213030-5aafc221ea8c // indirect
github.com/prometheus/common v0.44.0 // indirect github.com/prometheus/common v0.44.0 // indirect
github.com/prometheus/procfs v0.10.1 // indirect github.com/prometheus/procfs v0.10.1 // indirect
github.com/rivo/uniseg v0.2.0 // indirect github.com/rivo/uniseg v0.4.4 // indirect
github.com/robfig/cron/v3 v3.0.1 // indirect github.com/robfig/cron/v3 v3.0.1 // indirect
github.com/russross/blackfriday/v2 v2.1.0 // indirect github.com/russross/blackfriday/v2 v2.1.0 // indirect
github.com/sergi/go-diff v1.3.1 // indirect github.com/sergi/go-diff v1.3.1 // indirect
@ -198,9 +200,9 @@ require (
github.com/zclconf/go-cty v1.8.0 // indirect github.com/zclconf/go-cty v1.8.0 // indirect
go.mongodb.org/mongo-driver v1.9.4 // indirect go.mongodb.org/mongo-driver v1.9.4 // indirect
golang.org/x/arch v0.3.0 // indirect golang.org/x/arch v0.3.0 // indirect
golang.org/x/net v0.24.0 // indirect golang.org/x/net v0.19.0 // indirect
golang.org/x/sync v0.6.0 // indirect golang.org/x/sync v0.6.0 // indirect
golang.org/x/term v0.19.0 // indirect golang.org/x/term v0.15.0 // indirect
golang.org/x/time v0.3.0 // indirect golang.org/x/time v0.3.0 // indirect
golang.org/x/tools v0.8.1-0.20230428195545-5283a0178901 // indirect golang.org/x/tools v0.8.1-0.20230428195545-5283a0178901 // indirect
golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1 // indirect golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1 // indirect

80
go.sum
View file

@ -2,8 +2,8 @@ ariga.io/atlas v0.14.1-0.20230918065911-83ad451a4935 h1:JnYs/y8RJ3+MiIUp+3RgyyeO
ariga.io/atlas v0.14.1-0.20230918065911-83ad451a4935/go.mod h1:isZrlzJ5cpoCoKFoY9knZug7Lq4pP1cm8g3XciLZ0Pw= ariga.io/atlas v0.14.1-0.20230918065911-83ad451a4935/go.mod h1:isZrlzJ5cpoCoKFoY9knZug7Lq4pP1cm8g3XciLZ0Pw=
bitbucket.org/creachadair/stringset v0.0.9 h1:L4vld9nzPt90UZNrXjNelTshD74ps4P5NGs3Iq6yN3o= bitbucket.org/creachadair/stringset v0.0.9 h1:L4vld9nzPt90UZNrXjNelTshD74ps4P5NGs3Iq6yN3o=
bitbucket.org/creachadair/stringset v0.0.9/go.mod h1:t+4WcQ4+PXTa8aQdNKe40ZP6iwesoMFWAxPGd3UGjyY= bitbucket.org/creachadair/stringset v0.0.9/go.mod h1:t+4WcQ4+PXTa8aQdNKe40ZP6iwesoMFWAxPGd3UGjyY=
entgo.io/ent v0.12.5 h1:KREM5E4CSoej4zeGa88Ou/gfturAnpUv0mzAjch1sj4= entgo.io/ent v0.12.4 h1:LddPnAyxls/O7DTXZvUGDj0NZIdGSu317+aoNLJWbD8=
entgo.io/ent v0.12.5/go.mod h1:Y3JVAjtlIk8xVZYSn3t3mf8xlZIn5SAOXZQxD6kKI+Q= entgo.io/ent v0.12.4/go.mod h1:Y3JVAjtlIk8xVZYSn3t3mf8xlZIn5SAOXZQxD6kKI+Q=
github.com/AlecAivazis/survey/v2 v2.3.7 h1:6I/u8FvytdGsgonrYsVn2t8t4QiRnh6QSTqkkhIiSjQ= github.com/AlecAivazis/survey/v2 v2.3.7 h1:6I/u8FvytdGsgonrYsVn2t8t4QiRnh6QSTqkkhIiSjQ=
github.com/AlecAivazis/survey/v2 v2.3.7/go.mod h1:xUTIdE4KCOIjsBAE1JYsUPoCqYdZ1reCfTwbto0Fduo= github.com/AlecAivazis/survey/v2 v2.3.7/go.mod h1:xUTIdE4KCOIjsBAE1JYsUPoCqYdZ1reCfTwbto0Fduo=
github.com/Azure/go-ansiterm v0.0.0-20210617225240-d185dfc1b5a1 h1:UQHMgLO+TxOElx5B5HZ4hJQsoJ/PvUvKRhJHDQXO8P8= github.com/Azure/go-ansiterm v0.0.0-20210617225240-d185dfc1b5a1 h1:UQHMgLO+TxOElx5B5HZ4hJQsoJ/PvUvKRhJHDQXO8P8=
@ -102,8 +102,8 @@ github.com/crowdsecurity/coraza/v3 v3.0.0-20240108124027-a62b8d8e5607 h1:hyrYw3h
github.com/crowdsecurity/coraza/v3 v3.0.0-20240108124027-a62b8d8e5607/go.mod h1:br36fEqurGYZQGit+iDYsIzW0FF6VufMbDzyyLxEuPA= github.com/crowdsecurity/coraza/v3 v3.0.0-20240108124027-a62b8d8e5607/go.mod h1:br36fEqurGYZQGit+iDYsIzW0FF6VufMbDzyyLxEuPA=
github.com/crowdsecurity/dlog v0.0.0-20170105205344-4fb5f8204f26 h1:r97WNVC30Uen+7WnLs4xDScS/Ex988+id2k6mDf8psU= github.com/crowdsecurity/dlog v0.0.0-20170105205344-4fb5f8204f26 h1:r97WNVC30Uen+7WnLs4xDScS/Ex988+id2k6mDf8psU=
github.com/crowdsecurity/dlog v0.0.0-20170105205344-4fb5f8204f26/go.mod h1:zpv7r+7KXwgVUZnUNjyP22zc/D7LKjyoY02weH2RBbk= github.com/crowdsecurity/dlog v0.0.0-20170105205344-4fb5f8204f26/go.mod h1:zpv7r+7KXwgVUZnUNjyP22zc/D7LKjyoY02weH2RBbk=
github.com/crowdsecurity/go-cs-lib v0.0.10 h1:Twt/y/rYCUspGY1zxDnGurL2svRSREAz+2+puLepd9c= github.com/crowdsecurity/go-cs-lib v0.0.7-0.20240130143103-452318e69ef9 h1:xHCBAGYm34lFc/brTurSYUn6o4yxvOBX8L8HeUuwydA=
github.com/crowdsecurity/go-cs-lib v0.0.10/go.mod h1:8FMKNGsh3hMZi2SEv6P15PURhEJnZV431XjzzBSuf0k= github.com/crowdsecurity/go-cs-lib v0.0.7-0.20240130143103-452318e69ef9/go.mod h1:xA3j72N5Vd+pXKAshebmwNiJn53jOi7CfZhEDrMTJsI=
github.com/crowdsecurity/grokky v0.2.1 h1:t4VYnDlAd0RjDM2SlILalbwfCrQxtJSMGdQOR0zwkE4= github.com/crowdsecurity/grokky v0.2.1 h1:t4VYnDlAd0RjDM2SlILalbwfCrQxtJSMGdQOR0zwkE4=
github.com/crowdsecurity/grokky v0.2.1/go.mod h1:33usDIYzGDsgX1kHAThCbseso6JuWNJXOzRQDGXHtWM= github.com/crowdsecurity/grokky v0.2.1/go.mod h1:33usDIYzGDsgX1kHAThCbseso6JuWNJXOzRQDGXHtWM=
github.com/crowdsecurity/machineid v1.0.2 h1:wpkpsUghJF8Khtmn/tg6GxgdhLA1Xflerh5lirI+bdc= github.com/crowdsecurity/machineid v1.0.2 h1:wpkpsUghJF8Khtmn/tg6GxgdhLA1Xflerh5lirI+bdc=
@ -116,8 +116,8 @@ github.com/dghubble/sling v1.3.0 h1:pZHjCJq4zJvc6qVQ5wN1jo5oNZlNE0+8T/h0XeXBUKU=
github.com/dghubble/sling v1.3.0/go.mod h1:XXShWaBWKzNLhu2OxikSNFrlsvowtz4kyRuXUG7oQKY= github.com/dghubble/sling v1.3.0/go.mod h1:XXShWaBWKzNLhu2OxikSNFrlsvowtz4kyRuXUG7oQKY=
github.com/docker/distribution v2.8.2+incompatible h1:T3de5rq0dB1j30rp0sA2rER+m322EBzniBPB6ZIzuh8= github.com/docker/distribution v2.8.2+incompatible h1:T3de5rq0dB1j30rp0sA2rER+m322EBzniBPB6ZIzuh8=
github.com/docker/distribution v2.8.2+incompatible/go.mod h1:J2gT2udsDAN96Uj4KfcMRqY0/ypR+oyYUYmja8H+y+w= github.com/docker/distribution v2.8.2+incompatible/go.mod h1:J2gT2udsDAN96Uj4KfcMRqY0/ypR+oyYUYmja8H+y+w=
github.com/docker/docker v24.0.9+incompatible h1:HPGzNmwfLZWdxHqK9/II92pyi1EpYKsAqcl4G0Of9v0= github.com/docker/docker v24.0.7+incompatible h1:Wo6l37AuwP3JaMnZa226lzVXGA3F9Ig1seQen0cKYlM=
github.com/docker/docker v24.0.9+incompatible/go.mod h1:eEKB0N0r5NX/I1kEveEz05bcu8tLC/8azJZsviup8Sk= github.com/docker/docker v24.0.7+incompatible/go.mod h1:eEKB0N0r5NX/I1kEveEz05bcu8tLC/8azJZsviup8Sk=
github.com/docker/go-connections v0.4.0 h1:El9xVISelRB7BuFusrZozjnkIM5YnzCViNKohAFqRJQ= github.com/docker/go-connections v0.4.0 h1:El9xVISelRB7BuFusrZozjnkIM5YnzCViNKohAFqRJQ=
github.com/docker/go-connections v0.4.0/go.mod h1:Gbd7IOopHjR8Iph03tsViu4nIes5XhDvyHbTtUxmeec= github.com/docker/go-connections v0.4.0/go.mod h1:Gbd7IOopHjR8Iph03tsViu4nIes5XhDvyHbTtUxmeec=
github.com/docker/go-units v0.3.3/go.mod h1:fgPhTUdO+D/Jk86RDLlptpiXQzgHJF7gydDDbaIK4Dk= github.com/docker/go-units v0.3.3/go.mod h1:fgPhTUdO+D/Jk86RDLlptpiXQzgHJF7gydDDbaIK4Dk=
@ -125,8 +125,8 @@ github.com/docker/go-units v0.4.0/go.mod h1:fgPhTUdO+D/Jk86RDLlptpiXQzgHJF7gydDD
github.com/docker/go-units v0.5.0 h1:69rxXcBk27SvSaaxTtLh/8llcHD8vYHT7WSdRZ/jvr4= github.com/docker/go-units v0.5.0 h1:69rxXcBk27SvSaaxTtLh/8llcHD8vYHT7WSdRZ/jvr4=
github.com/docker/go-units v0.5.0/go.mod h1:fgPhTUdO+D/Jk86RDLlptpiXQzgHJF7gydDDbaIK4Dk= github.com/docker/go-units v0.5.0/go.mod h1:fgPhTUdO+D/Jk86RDLlptpiXQzgHJF7gydDDbaIK4Dk=
github.com/fatih/color v1.13.0/go.mod h1:kLAiJbzzSOZDVNGyDpeOxJ47H46qBXwg5ILebYFFOfk= github.com/fatih/color v1.13.0/go.mod h1:kLAiJbzzSOZDVNGyDpeOxJ47H46qBXwg5ILebYFFOfk=
github.com/fatih/color v1.15.0 h1:kOqh6YHBtK8aywxGerMG2Eq3H6Qgoqeo13Bk2Mv/nBs= github.com/fatih/color v1.16.0 h1:zmkK9Ngbjj+K0yRhTVONQh1p/HknKYSlNT+vZCzyokM=
github.com/fatih/color v1.15.0/go.mod h1:0h5ZqXfHYED7Bhv2ZJamyIOUej9KtShiJESRwBDUSsw= github.com/fatih/color v1.16.0/go.mod h1:fL2Sau1YI5c0pdGEVCbKQbLXB6edEj1ZgiY4NijnWvE=
github.com/foxcpp/go-mockdns v1.0.0 h1:7jBqxd3WDWwi/6WhDvacvH1XsN3rOLXyHM1uhvIx6FI= github.com/foxcpp/go-mockdns v1.0.0 h1:7jBqxd3WDWwi/6WhDvacvH1XsN3rOLXyHM1uhvIx6FI=
github.com/foxcpp/go-mockdns v1.0.0/go.mod h1:lgRN6+KxQBawyIghpnl5CezHFGS9VLzvtVlwxvzXTQ4= github.com/foxcpp/go-mockdns v1.0.0/go.mod h1:lgRN6+KxQBawyIghpnl5CezHFGS9VLzvtVlwxvzXTQ4=
github.com/fsnotify/fsnotify v1.4.9/go.mod h1:znqG4EE+3YCdAaPaxE2ZRY/06pZUdp0tY4IgpuI1SZQ= github.com/fsnotify/fsnotify v1.4.9/go.mod h1:znqG4EE+3YCdAaPaxE2ZRY/06pZUdp0tY4IgpuI1SZQ=
@ -368,8 +368,8 @@ github.com/jackc/pgconn v0.0.0-20190831204454-2fabfa3c18b7/go.mod h1:ZJKsE/KZfsU
github.com/jackc/pgconn v1.8.0/go.mod h1:1C2Pb36bGIP9QHGBYCjnyhqu7Rv3sGshaQUvmfGIB/o= github.com/jackc/pgconn v1.8.0/go.mod h1:1C2Pb36bGIP9QHGBYCjnyhqu7Rv3sGshaQUvmfGIB/o=
github.com/jackc/pgconn v1.9.0/go.mod h1:YctiPyvzfU11JFxoXokUOOKQXQmDMoJL9vJzHH8/2JY= github.com/jackc/pgconn v1.9.0/go.mod h1:YctiPyvzfU11JFxoXokUOOKQXQmDMoJL9vJzHH8/2JY=
github.com/jackc/pgconn v1.9.1-0.20210724152538-d89c8390a530/go.mod h1:4z2w8XhRbP1hYxkpTuBjTS3ne3J48K83+u0zoyvg2pI= github.com/jackc/pgconn v1.9.1-0.20210724152538-d89c8390a530/go.mod h1:4z2w8XhRbP1hYxkpTuBjTS3ne3J48K83+u0zoyvg2pI=
github.com/jackc/pgconn v1.14.3 h1:bVoTr12EGANZz66nZPkMInAV/KHD2TxH9npjXXgiB3w= github.com/jackc/pgconn v1.10.1 h1:DzdIHIjG1AxGwoEEqS+mGsURyjt4enSmqzACXvVzOT8=
github.com/jackc/pgconn v1.14.3/go.mod h1:RZbme4uasqzybK2RK5c65VsHxoyaml09lx3tXOcO/VM= github.com/jackc/pgconn v1.10.1/go.mod h1:4z2w8XhRbP1hYxkpTuBjTS3ne3J48K83+u0zoyvg2pI=
github.com/jackc/pgio v1.0.0 h1:g12B9UwVnzGhueNavwioyEEpAmqMe1E/BN9ES+8ovkE= github.com/jackc/pgio v1.0.0 h1:g12B9UwVnzGhueNavwioyEEpAmqMe1E/BN9ES+8ovkE=
github.com/jackc/pgio v1.0.0/go.mod h1:oP+2QK2wFfUWgr+gxjoBH9KGBb31Eio69xUb0w5bYf8= github.com/jackc/pgio v1.0.0/go.mod h1:oP+2QK2wFfUWgr+gxjoBH9KGBb31Eio69xUb0w5bYf8=
github.com/jackc/pgmock v0.0.0-20190831213851-13a1b77aafa2/go.mod h1:fGZlG77KXmcq05nJLRkk0+p82V8B8Dw8KN2/V9c/OAE= github.com/jackc/pgmock v0.0.0-20190831213851-13a1b77aafa2/go.mod h1:fGZlG77KXmcq05nJLRkk0+p82V8B8Dw8KN2/V9c/OAE=
@ -385,26 +385,26 @@ github.com/jackc/pgproto3/v2 v2.0.0-rc3/go.mod h1:ryONWYqW6dqSg1Lw6vXNMXoBJhpzvW
github.com/jackc/pgproto3/v2 v2.0.0-rc3.0.20190831210041-4c03ce451f29/go.mod h1:ryONWYqW6dqSg1Lw6vXNMXoBJhpzvWKnT95C46ckYeM= github.com/jackc/pgproto3/v2 v2.0.0-rc3.0.20190831210041-4c03ce451f29/go.mod h1:ryONWYqW6dqSg1Lw6vXNMXoBJhpzvWKnT95C46ckYeM=
github.com/jackc/pgproto3/v2 v2.0.6/go.mod h1:WfJCnwN3HIg9Ish/j3sgWXnAfK8A9Y0bwXYU5xKaEdA= github.com/jackc/pgproto3/v2 v2.0.6/go.mod h1:WfJCnwN3HIg9Ish/j3sgWXnAfK8A9Y0bwXYU5xKaEdA=
github.com/jackc/pgproto3/v2 v2.1.1/go.mod h1:WfJCnwN3HIg9Ish/j3sgWXnAfK8A9Y0bwXYU5xKaEdA= github.com/jackc/pgproto3/v2 v2.1.1/go.mod h1:WfJCnwN3HIg9Ish/j3sgWXnAfK8A9Y0bwXYU5xKaEdA=
github.com/jackc/pgproto3/v2 v2.3.3 h1:1HLSx5H+tXR9pW3in3zaztoEwQYRC9SQaYUHjTSUOag= github.com/jackc/pgproto3/v2 v2.2.0 h1:r7JypeP2D3onoQTCxWdTpCtJ4D+qpKr0TxvoyMhZ5ns=
github.com/jackc/pgproto3/v2 v2.3.3/go.mod h1:WfJCnwN3HIg9Ish/j3sgWXnAfK8A9Y0bwXYU5xKaEdA= github.com/jackc/pgproto3/v2 v2.2.0/go.mod h1:WfJCnwN3HIg9Ish/j3sgWXnAfK8A9Y0bwXYU5xKaEdA=
github.com/jackc/pgservicefile v0.0.0-20200714003250-2b9c44734f2b h1:C8S2+VttkHFdOOCXJe+YGfa4vHYwlt4Zx+IVXQ97jYg=
github.com/jackc/pgservicefile v0.0.0-20200714003250-2b9c44734f2b/go.mod h1:vsD4gTJCa9TptPL8sPkXrLZ+hDuNrZCnj29CQpr4X1E= github.com/jackc/pgservicefile v0.0.0-20200714003250-2b9c44734f2b/go.mod h1:vsD4gTJCa9TptPL8sPkXrLZ+hDuNrZCnj29CQpr4X1E=
github.com/jackc/pgservicefile v0.0.0-20221227161230-091c0ba34f0a h1:bbPeKD0xmW/Y25WS6cokEszi5g+S0QxI/d45PkRi7Nk=
github.com/jackc/pgservicefile v0.0.0-20221227161230-091c0ba34f0a/go.mod h1:5TJZWKEWniPve33vlWYSoGYefn3gLQRzjfDlhSJ9ZKM=
github.com/jackc/pgtype v0.0.0-20190421001408-4ed0de4755e0/go.mod h1:hdSHsc1V01CGwFsrv11mJRHWJ6aifDLfdV3aVjFF0zg= github.com/jackc/pgtype v0.0.0-20190421001408-4ed0de4755e0/go.mod h1:hdSHsc1V01CGwFsrv11mJRHWJ6aifDLfdV3aVjFF0zg=
github.com/jackc/pgtype v0.0.0-20190824184912-ab885b375b90/go.mod h1:KcahbBH1nCMSo2DXpzsoWOAfFkdEtEJpPbVLq8eE+mc= github.com/jackc/pgtype v0.0.0-20190824184912-ab885b375b90/go.mod h1:KcahbBH1nCMSo2DXpzsoWOAfFkdEtEJpPbVLq8eE+mc=
github.com/jackc/pgtype v0.0.0-20190828014616-a8802b16cc59/go.mod h1:MWlu30kVJrUS8lot6TQqcg7mtthZ9T0EoIBFiJcmcyw= github.com/jackc/pgtype v0.0.0-20190828014616-a8802b16cc59/go.mod h1:MWlu30kVJrUS8lot6TQqcg7mtthZ9T0EoIBFiJcmcyw=
github.com/jackc/pgtype v1.8.1-0.20210724151600-32e20a603178/go.mod h1:C516IlIV9NKqfsMCXTdChteoXmwgUceqaLfjg2e3NlM= github.com/jackc/pgtype v1.8.1-0.20210724151600-32e20a603178/go.mod h1:C516IlIV9NKqfsMCXTdChteoXmwgUceqaLfjg2e3NlM=
github.com/jackc/pgtype v1.14.0 h1:y+xUdabmyMkJLyApYuPj38mW+aAIqCe5uuBB51rH3Vw= github.com/jackc/pgtype v1.9.1 h1:MJc2s0MFS8C3ok1wQTdQxWuXQcB6+HwAm5x1CzW7mf0=
github.com/jackc/pgtype v1.14.0/go.mod h1:LUMuVrfsFfdKGLw+AFFVv6KtHOFMwRgDDzBt76IqCA4= github.com/jackc/pgtype v1.9.1/go.mod h1:LUMuVrfsFfdKGLw+AFFVv6KtHOFMwRgDDzBt76IqCA4=
github.com/jackc/pgx/v4 v4.0.0-20190420224344-cc3461e65d96/go.mod h1:mdxmSJJuR08CZQyj1PVQBHy9XOp5p8/SHH6a0psbY9Y= github.com/jackc/pgx/v4 v4.0.0-20190420224344-cc3461e65d96/go.mod h1:mdxmSJJuR08CZQyj1PVQBHy9XOp5p8/SHH6a0psbY9Y=
github.com/jackc/pgx/v4 v4.0.0-20190421002000-1b8f0016e912/go.mod h1:no/Y67Jkk/9WuGR0JG/JseM9irFbnEPbuWV2EELPNuM= github.com/jackc/pgx/v4 v4.0.0-20190421002000-1b8f0016e912/go.mod h1:no/Y67Jkk/9WuGR0JG/JseM9irFbnEPbuWV2EELPNuM=
github.com/jackc/pgx/v4 v4.0.0-pre1.0.20190824185557-6972a5742186/go.mod h1:X+GQnOEnf1dqHGpw7JmHqHc1NxDoalibchSk9/RWuDc= github.com/jackc/pgx/v4 v4.0.0-pre1.0.20190824185557-6972a5742186/go.mod h1:X+GQnOEnf1dqHGpw7JmHqHc1NxDoalibchSk9/RWuDc=
github.com/jackc/pgx/v4 v4.12.1-0.20210724153913-640aa07df17c/go.mod h1:1QD0+tgSXP7iUjYm9C1NxKhny7lq6ee99u/z+IHFcgs= github.com/jackc/pgx/v4 v4.12.1-0.20210724153913-640aa07df17c/go.mod h1:1QD0+tgSXP7iUjYm9C1NxKhny7lq6ee99u/z+IHFcgs=
github.com/jackc/pgx/v4 v4.18.2 h1:xVpYkNR5pk5bMCZGfClbO962UIqVABcAGt7ha1s/FeU= github.com/jackc/pgx/v4 v4.14.1 h1:71oo1KAGI6mXhLiTMn6iDFcp3e7+zon/capWjl2OEFU=
github.com/jackc/pgx/v4 v4.18.2/go.mod h1:Ey4Oru5tH5sB6tV7hDmfWFahwF15Eb7DNXlRKx2CkVw= github.com/jackc/pgx/v4 v4.14.1/go.mod h1:RgDuE4Z34o7XE92RpLsvFiOEfrAUT0Xt2KxvX73W06M=
github.com/jackc/puddle v0.0.0-20190413234325-e4ced69a3a2b/go.mod h1:m4B5Dj62Y0fbyuIc15OsIqK0+JU8nkqQjsgx7dvjSWk= github.com/jackc/puddle v0.0.0-20190413234325-e4ced69a3a2b/go.mod h1:m4B5Dj62Y0fbyuIc15OsIqK0+JU8nkqQjsgx7dvjSWk=
github.com/jackc/puddle v0.0.0-20190608224051-11cab39313c9/go.mod h1:m4B5Dj62Y0fbyuIc15OsIqK0+JU8nkqQjsgx7dvjSWk= github.com/jackc/puddle v0.0.0-20190608224051-11cab39313c9/go.mod h1:m4B5Dj62Y0fbyuIc15OsIqK0+JU8nkqQjsgx7dvjSWk=
github.com/jackc/puddle v1.1.3/go.mod h1:m4B5Dj62Y0fbyuIc15OsIqK0+JU8nkqQjsgx7dvjSWk= github.com/jackc/puddle v1.1.3/go.mod h1:m4B5Dj62Y0fbyuIc15OsIqK0+JU8nkqQjsgx7dvjSWk=
github.com/jackc/puddle v1.2.0/go.mod h1:m4B5Dj62Y0fbyuIc15OsIqK0+JU8nkqQjsgx7dvjSWk=
github.com/jarcoal/httpmock v1.1.0 h1:F47ChZj1Y2zFsCXxNkBPwNNKnAyOATcdQibk0qEdVCE= github.com/jarcoal/httpmock v1.1.0 h1:F47ChZj1Y2zFsCXxNkBPwNNKnAyOATcdQibk0qEdVCE=
github.com/jarcoal/httpmock v1.1.0/go.mod h1:ATjnClrvW/3tijVmpL/va5Z3aAyGvqU3gCT8nX0Txik= github.com/jarcoal/httpmock v1.1.0/go.mod h1:ATjnClrvW/3tijVmpL/va5Z3aAyGvqU3gCT8nX0Txik=
github.com/jhump/protoreflect v1.6.0 h1:h5jfMVslIg6l29nsMs0D8Wj17RDVdNYti0vDN/PZZoE= github.com/jhump/protoreflect v1.6.0 h1:h5jfMVslIg6l29nsMs0D8Wj17RDVdNYti0vDN/PZZoE=
@ -423,6 +423,7 @@ github.com/json-iterator/go v1.1.12/go.mod h1:e30LSqwooZae/UwlEbR2852Gd8hjQvJoHm
github.com/jszwec/csvutil v1.5.1 h1:c3GFBhj6DFMUl4dMK3+B6rz2+LWWS/e9VJiVJ9t9kfQ= github.com/jszwec/csvutil v1.5.1 h1:c3GFBhj6DFMUl4dMK3+B6rz2+LWWS/e9VJiVJ9t9kfQ=
github.com/jszwec/csvutil v1.5.1/go.mod h1:Rpu7Uu9giO9subDyMCIQfHVDuLrcaC36UA4YcJjGBkg= github.com/jszwec/csvutil v1.5.1/go.mod h1:Rpu7Uu9giO9subDyMCIQfHVDuLrcaC36UA4YcJjGBkg=
github.com/julienschmidt/httprouter v1.2.0/go.mod h1:SYymIcj16QtmaHHD7aYtjjsJG7VTCxuUUipMqKk8s4w= github.com/julienschmidt/httprouter v1.2.0/go.mod h1:SYymIcj16QtmaHHD7aYtjjsJG7VTCxuUUipMqKk8s4w=
github.com/k0kubun/go-ansi v0.0.0-20180517002512-3bf9e2903213/go.mod h1:vNUNkEQ1e29fT/6vq2aBdFsgNPmy8qMdSay1npru+Sw=
github.com/karrick/godirwalk v1.8.0/go.mod h1:H5KPZjojv4lE+QYImBI8xVtrBRgYrIVsaRPx4tDPEn4= github.com/karrick/godirwalk v1.8.0/go.mod h1:H5KPZjojv4lE+QYImBI8xVtrBRgYrIVsaRPx4tDPEn4=
github.com/karrick/godirwalk v1.10.3/go.mod h1:RoGL9dQei4vP9ilrpETWE8CLOZ1kiN0LhBygSwrAsHA= github.com/karrick/godirwalk v1.10.3/go.mod h1:RoGL9dQei4vP9ilrpETWE8CLOZ1kiN0LhBygSwrAsHA=
github.com/kballard/go-shellquote v0.0.0-20180428030007-95032a82bc51 h1:Z9n2FFNUXsshfwJMBgNA0RU6/i7WVaAegv3PtuIHPMs= github.com/kballard/go-shellquote v0.0.0-20180428030007-95032a82bc51 h1:Z9n2FFNUXsshfwJMBgNA0RU6/i7WVaAegv3PtuIHPMs=
@ -490,8 +491,8 @@ github.com/mattn/go-isatty v0.0.8/go.mod h1:Iq45c/XA43vh69/j3iqttzPXn0bhXyGjM0Hd
github.com/mattn/go-isatty v0.0.12/go.mod h1:cbi8OIDigv2wuxKPP5vlRcQ1OAZbq2CE4Kysco4FUpU= github.com/mattn/go-isatty v0.0.12/go.mod h1:cbi8OIDigv2wuxKPP5vlRcQ1OAZbq2CE4Kysco4FUpU=
github.com/mattn/go-isatty v0.0.14/go.mod h1:7GGIvUiUoEMVVmxf/4nioHXj79iQHKdU27kJ6hsGG94= github.com/mattn/go-isatty v0.0.14/go.mod h1:7GGIvUiUoEMVVmxf/4nioHXj79iQHKdU27kJ6hsGG94=
github.com/mattn/go-isatty v0.0.16/go.mod h1:kYGgaQfpe5nmfYZH+SKPsOc2e4SrIfOl2e/yFXSvRLM= github.com/mattn/go-isatty v0.0.16/go.mod h1:kYGgaQfpe5nmfYZH+SKPsOc2e4SrIfOl2e/yFXSvRLM=
github.com/mattn/go-isatty v0.0.19 h1:JITubQf0MOLdlGRuRq+jtsDlekdYPia9ZFsB8h/APPA= github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWEY=
github.com/mattn/go-isatty v0.0.19/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y= github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y=
github.com/mattn/go-runewidth v0.0.13 h1:lTGmDsbAYt5DmK6OnoV7EuIF1wEIFAcxld6ypU4OSgU= github.com/mattn/go-runewidth v0.0.13 h1:lTGmDsbAYt5DmK6OnoV7EuIF1wEIFAcxld6ypU4OSgU=
github.com/mattn/go-runewidth v0.0.13/go.mod h1:Jdepj2loyihRzMpdS35Xk/zdY8IAYHsh153qUoGf23w= github.com/mattn/go-runewidth v0.0.13/go.mod h1:Jdepj2loyihRzMpdS35Xk/zdY8IAYHsh153qUoGf23w=
github.com/mattn/go-sqlite3 v1.14.16 h1:yOQRA0RpS5PFz/oikGwBEqvAWhWg5ufRz4ETLjwpU1Y= github.com/mattn/go-sqlite3 v1.14.16 h1:yOQRA0RpS5PFz/oikGwBEqvAWhWg5ufRz4ETLjwpU1Y=
@ -504,6 +505,8 @@ github.com/mgutz/ansi v0.0.0-20200706080929-d51e80ef957d h1:5PJl274Y63IEHC+7izoQ
github.com/mgutz/ansi v0.0.0-20200706080929-d51e80ef957d/go.mod h1:01TrycV0kFyexm33Z7vhZRXopbI8J3TDReVlkTgMUxE= github.com/mgutz/ansi v0.0.0-20200706080929-d51e80ef957d/go.mod h1:01TrycV0kFyexm33Z7vhZRXopbI8J3TDReVlkTgMUxE=
github.com/miekg/dns v1.1.50 h1:DQUfb9uc6smULcREF09Uc+/Gd46YWqJd5DbpPE9xkcA= github.com/miekg/dns v1.1.50 h1:DQUfb9uc6smULcREF09Uc+/Gd46YWqJd5DbpPE9xkcA=
github.com/miekg/dns v1.1.50/go.mod h1:e3IlAVfNqAllflbibAZEWOXOQ+Ynzk/dDozDxY7XnME= github.com/miekg/dns v1.1.50/go.mod h1:e3IlAVfNqAllflbibAZEWOXOQ+Ynzk/dDozDxY7XnME=
github.com/mitchellh/colorstring v0.0.0-20190213212951-d06e56a500db h1:62I3jR2EmQ4l5rM/4FEfDWcRD+abF5XlKShorW5LRoQ=
github.com/mitchellh/colorstring v0.0.0-20190213212951-d06e56a500db/go.mod h1:l0dey0ia/Uv7NcFFVbCLtqEBQbrT4OCwCSKTEv6enCw=
github.com/mitchellh/copystructure v1.0.0/go.mod h1:SNtv71yrdKgLRyLFxmLdkAbkKEFWgYaq1OVrnRcwhnw= github.com/mitchellh/copystructure v1.0.0/go.mod h1:SNtv71yrdKgLRyLFxmLdkAbkKEFWgYaq1OVrnRcwhnw=
github.com/mitchellh/copystructure v1.2.0 h1:vpKXTN4ewci03Vljg/q9QvCGUDttBOGBIa15WveJJGw= github.com/mitchellh/copystructure v1.2.0 h1:vpKXTN4ewci03Vljg/q9QvCGUDttBOGBIa15WveJJGw=
github.com/mitchellh/copystructure v1.2.0/go.mod h1:qLl+cE2AmVv+CoeAwDPye/v+N2HKCj9FbZEVFJRxO9s= github.com/mitchellh/copystructure v1.2.0/go.mod h1:qLl+cE2AmVv+CoeAwDPye/v+N2HKCj9FbZEVFJRxO9s=
@ -544,10 +547,11 @@ github.com/opencontainers/go-digest v1.0.0 h1:apOUWs51W5PlhuyGyz9FCeeBIOUDA/6nW8
github.com/opencontainers/go-digest v1.0.0/go.mod h1:0JzlMkj0TRzQZfJkVvzbP0HBR3IKzErnv2BNG4W4MAM= github.com/opencontainers/go-digest v1.0.0/go.mod h1:0JzlMkj0TRzQZfJkVvzbP0HBR3IKzErnv2BNG4W4MAM=
github.com/opencontainers/image-spec v1.0.3-0.20211202183452-c5a74bcca799 h1:rc3tiVYb5z54aKaDfakKn0dDjIyPpTtszkjuMzyt7ec= github.com/opencontainers/image-spec v1.0.3-0.20211202183452-c5a74bcca799 h1:rc3tiVYb5z54aKaDfakKn0dDjIyPpTtszkjuMzyt7ec=
github.com/opencontainers/image-spec v1.0.3-0.20211202183452-c5a74bcca799/go.mod h1:BtxoFyWECRxE4U/7sNtV5W15zMzWCbyJoFRP3s7yZA0= github.com/opencontainers/image-spec v1.0.3-0.20211202183452-c5a74bcca799/go.mod h1:BtxoFyWECRxE4U/7sNtV5W15zMzWCbyJoFRP3s7yZA0=
github.com/oschwald/geoip2-golang v1.9.0 h1:uvD3O6fXAXs+usU+UGExshpdP13GAqp4GBrzN7IgKZc= github.com/oschwald/geoip2-golang v1.4.0 h1:5RlrjCgRyIGDz/mBmPfnAF4h8k0IAcRv9PvrpOfz+Ug=
github.com/oschwald/geoip2-golang v1.9.0/go.mod h1:BHK6TvDyATVQhKNbQBdrj9eAvuwOMi2zSFXizL3K81Y= github.com/oschwald/geoip2-golang v1.4.0/go.mod h1:8QwxJvRImBH+Zl6Aa6MaIcs5YdlZSTKtzmPGzQqi9ng=
github.com/oschwald/maxminddb-golang v1.12.0 h1:9FnTOD0YOhP7DGxGsq4glzpGy5+w7pq50AS6wALUMYs= github.com/oschwald/maxminddb-golang v1.6.0/go.mod h1:DUJFucBg2cvqx42YmDa/+xHvb0elJtOm3o4aFQ/nb/w=
github.com/oschwald/maxminddb-golang v1.12.0/go.mod h1:q0Nob5lTCqyQ8WT6FYgS1L7PXKVVbgiymefNwIjPzgY= github.com/oschwald/maxminddb-golang v1.8.0 h1:Uh/DSnGoxsyp/KYbY1AuP0tYEwfs0sCph9p/UMXK/Hk=
github.com/oschwald/maxminddb-golang v1.8.0/go.mod h1:RXZtst0N6+FY/3qCNmZMBApR19cdQj43/NM9VkrNAis=
github.com/pbnjay/memory v0.0.0-20210728143218-7b4eea64cf58 h1:onHthvaw9LFnH4t2DcNVpwGmV9E1BkGknEliJkfwQj0= github.com/pbnjay/memory v0.0.0-20210728143218-7b4eea64cf58 h1:onHthvaw9LFnH4t2DcNVpwGmV9E1BkGknEliJkfwQj0=
github.com/pbnjay/memory v0.0.0-20210728143218-7b4eea64cf58/go.mod h1:DXv8WO4yhMYhSNPKjeNKa5WY9YCIEBRbNzFFPJbWO6Y= github.com/pbnjay/memory v0.0.0-20210728143218-7b4eea64cf58/go.mod h1:DXv8WO4yhMYhSNPKjeNKa5WY9YCIEBRbNzFFPJbWO6Y=
github.com/pborman/uuid v1.2.0/go.mod h1:X/NO0urCmaxf9VXbdlT7C2Yzkj2IKimNn4k+gtPdI/k= github.com/pborman/uuid v1.2.0/go.mod h1:X/NO0urCmaxf9VXbdlT7C2Yzkj2IKimNn4k+gtPdI/k=
@ -590,8 +594,9 @@ github.com/prometheus/prom2json v1.3.0 h1:BlqrtbT9lLH3ZsOVhXPsHzFrApCTKRifB7gjJu
github.com/prometheus/prom2json v1.3.0/go.mod h1:rMN7m0ApCowcoDlypBHlkNbp5eJQf/+1isKykIP5ZnM= github.com/prometheus/prom2json v1.3.0/go.mod h1:rMN7m0ApCowcoDlypBHlkNbp5eJQf/+1isKykIP5ZnM=
github.com/r3labs/diff/v2 v2.14.1 h1:wRZ3jB44Ny50DSXsoIcFQ27l2x+n5P31K/Pk+b9B0Ic= github.com/r3labs/diff/v2 v2.14.1 h1:wRZ3jB44Ny50DSXsoIcFQ27l2x+n5P31K/Pk+b9B0Ic=
github.com/r3labs/diff/v2 v2.14.1/go.mod h1:I8noH9Fc2fjSaMxqF3G2lhDdC0b+JXCfyx85tWFM9kc= github.com/r3labs/diff/v2 v2.14.1/go.mod h1:I8noH9Fc2fjSaMxqF3G2lhDdC0b+JXCfyx85tWFM9kc=
github.com/rivo/uniseg v0.2.0 h1:S1pD9weZBuJdFmowNwbpi7BJ8TNftyUImj/0WQi72jY=
github.com/rivo/uniseg v0.2.0/go.mod h1:J6wj4VEh+S6ZtnVlnTBMWIodfgj8LQOQFoIToxlJtxc= github.com/rivo/uniseg v0.2.0/go.mod h1:J6wj4VEh+S6ZtnVlnTBMWIodfgj8LQOQFoIToxlJtxc=
github.com/rivo/uniseg v0.4.4 h1:8TfxU8dW6PdqD27gjM8MVNuicgxIjxpm4K7x4jp8sis=
github.com/rivo/uniseg v0.4.4/go.mod h1:FN3SvrM+Zdj16jyLfmOkMNblXMcoc8DfTHruCPUcx88=
github.com/robfig/cron/v3 v3.0.1 h1:WdRxkvbJztn8LMz/QEvLN5sBU+xKpSqwwUO1Pjr4qDs= github.com/robfig/cron/v3 v3.0.1 h1:WdRxkvbJztn8LMz/QEvLN5sBU+xKpSqwwUO1Pjr4qDs=
github.com/robfig/cron/v3 v3.0.1/go.mod h1:eQICP3HwyT7UooqI/z+Ov+PtYAWygg1TEWWzGIFLtro= github.com/robfig/cron/v3 v3.0.1/go.mod h1:eQICP3HwyT7UooqI/z+Ov+PtYAWygg1TEWWzGIFLtro=
github.com/rogpeppe/go-internal v1.1.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4= github.com/rogpeppe/go-internal v1.1.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4=
@ -607,6 +612,8 @@ github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQD
github.com/sanity-io/litter v1.5.5 h1:iE+sBxPBzoK6uaEP5Lt3fHNgpKcHXc/A2HGETy0uJQo= github.com/sanity-io/litter v1.5.5 h1:iE+sBxPBzoK6uaEP5Lt3fHNgpKcHXc/A2HGETy0uJQo=
github.com/sanity-io/litter v1.5.5/go.mod h1:9gzJgR2i4ZpjZHsKvUXIRQVk7P+yM3e+jAF7bU2UI5U= github.com/sanity-io/litter v1.5.5/go.mod h1:9gzJgR2i4ZpjZHsKvUXIRQVk7P+yM3e+jAF7bU2UI5U=
github.com/satori/go.uuid v1.2.0/go.mod h1:dA0hQrYB0VpLJoorglMZABFdXlWrHn1NEOzdhQKdks0= github.com/satori/go.uuid v1.2.0/go.mod h1:dA0hQrYB0VpLJoorglMZABFdXlWrHn1NEOzdhQKdks0=
github.com/schollz/progressbar/v3 v3.14.1 h1:VD+MJPCr4s3wdhTc7OEJ/Z3dAeBzJ7yKH/P4lC5yRTI=
github.com/schollz/progressbar/v3 v3.14.1/go.mod h1:Zc9xXneTzWXF81TGoqL71u0sBPjULtEHYtj/WVgVy8E=
github.com/segmentio/kafka-go v0.4.45 h1:prqrZp1mMId4kI6pyPolkLsH6sWOUmDxmmucbL4WS6E= github.com/segmentio/kafka-go v0.4.45 h1:prqrZp1mMId4kI6pyPolkLsH6sWOUmDxmmucbL4WS6E=
github.com/segmentio/kafka-go v0.4.45/go.mod h1:HjF6XbOKh0Pjlkr5GVZxt6CsjjwnmhVOfURM5KMd8qg= github.com/segmentio/kafka-go v0.4.45/go.mod h1:HjF6XbOKh0Pjlkr5GVZxt6CsjjwnmhVOfURM5KMd8qg=
github.com/sergi/go-diff v1.0.0/go.mod h1:0CfEIISq7TuYL3j771MWULgwwjU+GofnZX9QAmXWZgo= github.com/sergi/go-diff v1.0.0/go.mod h1:0CfEIISq7TuYL3j771MWULgwwjU+GofnZX9QAmXWZgo=
@ -757,8 +764,8 @@ golang.org/x/crypto v0.0.0-20210711020723-a769d52b0f97/go.mod h1:GvvjBRRGRdwPK5y
golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc=
golang.org/x/crypto v0.3.0/go.mod h1:hebNnKkNXi2UzZN1eVRvBB7co0a+JxK6XbPiWVs/3J4= golang.org/x/crypto v0.3.0/go.mod h1:hebNnKkNXi2UzZN1eVRvBB7co0a+JxK6XbPiWVs/3J4=
golang.org/x/crypto v0.14.0/go.mod h1:MVFd36DqK4CsrnJYDkBA3VC4m2GkXAM0PvzMCn4JQf4= golang.org/x/crypto v0.14.0/go.mod h1:MVFd36DqK4CsrnJYDkBA3VC4m2GkXAM0PvzMCn4JQf4=
golang.org/x/crypto v0.22.0 h1:g1v0xeRhjcugydODzvb3mEM9SQ0HGp9s/nh3COQ/C30= golang.org/x/crypto v0.17.0 h1:r8bRNjWL3GshPW3gkd+RpvzWrZAwPS49OmTGZ/uhM4k=
golang.org/x/crypto v0.22.0/go.mod h1:vr6Su+7cTlO45qkww3VDJlzDn0ctJvRgYbC2NvXHt+M= golang.org/x/crypto v0.17.0/go.mod h1:gCAAfMLgwOJRpTjQ2zCCt2OcSfYMTeZVSRtQlPC7Nq4=
golang.org/x/lint v0.0.0-20190930215403-16217165b5de/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= golang.org/x/lint v0.0.0-20190930215403-16217165b5de/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc=
golang.org/x/mod v0.0.0-20190513183733-4bf6d317e70e/go.mod h1:mXi4GBBbnImb6dmsKGUJ2LatrhH/nqhxcFungHvyanc= golang.org/x/mod v0.0.0-20190513183733-4bf6d317e70e/go.mod h1:mXi4GBBbnImb6dmsKGUJ2LatrhH/nqhxcFungHvyanc=
golang.org/x/mod v0.1.1-0.20191105210325-c90efee705ee/go.mod h1:QqPTAvyqsEbceGzBzNggFXnrqF1CaUcvgkdR5Ot7KZg= golang.org/x/mod v0.1.1-0.20191105210325-c90efee705ee/go.mod h1:QqPTAvyqsEbceGzBzNggFXnrqF1CaUcvgkdR5Ot7KZg=
@ -791,8 +798,8 @@ golang.org/x/net v0.2.0/go.mod h1:KqCZLdyyvdV855qA2rE3GC2aiw5xGR5TEjj8smXukLY=
golang.org/x/net v0.6.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs= golang.org/x/net v0.6.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs=
golang.org/x/net v0.10.0/go.mod h1:0qNGK6F8kojg2nk9dLZ2mShWaEBan6FAoqfSigmmuDg= golang.org/x/net v0.10.0/go.mod h1:0qNGK6F8kojg2nk9dLZ2mShWaEBan6FAoqfSigmmuDg=
golang.org/x/net v0.17.0/go.mod h1:NxSsAGuq816PNPmqtQdLE42eU2Fs7NoRIZrHJAlaCOE= golang.org/x/net v0.17.0/go.mod h1:NxSsAGuq816PNPmqtQdLE42eU2Fs7NoRIZrHJAlaCOE=
golang.org/x/net v0.24.0 h1:1PcaxkF854Fu3+lvBIx5SYn9wRlBzzcnHZSiaFFAb0w= golang.org/x/net v0.19.0 h1:zTwKpTd2XuCqf8huc7Fo2iSy+4RHPd10s4KzeTnVr1c=
golang.org/x/net v0.24.0/go.mod h1:2Q7sJY5mzlzWjKtYUEXSlBWCdyaioyXzRB2RtU8KVE8= golang.org/x/net v0.19.0/go.mod h1:CfAk/cbD4CthTvqiEl8NpboMuiuOYsAr/7NOjZJtv1U=
golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20181221193216-37e7f081c4d4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20181221193216-37e7f081c4d4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20190227155943-e225da77a7e6/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20190227155943-e225da77a7e6/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
@ -820,6 +827,7 @@ golang.org/x/sys v0.0.0-20190916202348-b4ddaad3f8a3/go.mod h1:h1NjWce9XRLGQEsW7w
golang.org/x/sys v0.0.0-20191005200804-aed5e4c7ecf9/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20191005200804-aed5e4c7ecf9/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20191026070338-33540a1f6037/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20191026070338-33540a1f6037/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20191220142924-d4481acd189f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20191220142924-d4481acd189f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20191224085550-c709ea063b76/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20200116001909-b77594299b42/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200116001909-b77594299b42/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20200223170610-d5e6a3e2c0ae/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200223170610-d5e6a3e2c0ae/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20200323222414-85ca7c5b95cd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200323222414-85ca7c5b95cd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
@ -841,8 +849,9 @@ golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.8.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.8.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.13.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.13.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.19.0 h1:q5f1RH2jigJ1MoAWp2KTp3gm5zAGFUTarQZ5U386+4o= golang.org/x/sys v0.14.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
golang.org/x/sys v0.19.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= golang.org/x/sys v0.15.0 h1:h48lPFYpsTvQJZF4EKyI4aLHaev3CxivZmv7yZig9pc=
golang.org/x/sys v0.15.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
golang.org/x/term v0.0.0-20201117132131-f5c789dd3221/go.mod h1:Nr5EML6q2oocZ2LXRh80K7BxOlk5/8JxuGnuhpl+muw= golang.org/x/term v0.0.0-20201117132131-f5c789dd3221/go.mod h1:Nr5EML6q2oocZ2LXRh80K7BxOlk5/8JxuGnuhpl+muw=
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8=
@ -850,8 +859,9 @@ golang.org/x/term v0.2.0/go.mod h1:TVmDHMZPmdnySmBfhjOoOdhjzdE1h4u1VwSiw2l1Nuc=
golang.org/x/term v0.5.0/go.mod h1:jMB1sMXY+tzblOD4FWmEbocvup2/aLOaQEp7JmGp78k= golang.org/x/term v0.5.0/go.mod h1:jMB1sMXY+tzblOD4FWmEbocvup2/aLOaQEp7JmGp78k=
golang.org/x/term v0.8.0/go.mod h1:xPskH00ivmX89bAKVGSKKtLOWNx2+17Eiy94tnKShWo= golang.org/x/term v0.8.0/go.mod h1:xPskH00ivmX89bAKVGSKKtLOWNx2+17Eiy94tnKShWo=
golang.org/x/term v0.13.0/go.mod h1:LTmsnFJwVN6bCy1rVCoS+qHT1HhALEFxKncY3WNNh4U= golang.org/x/term v0.13.0/go.mod h1:LTmsnFJwVN6bCy1rVCoS+qHT1HhALEFxKncY3WNNh4U=
golang.org/x/term v0.19.0 h1:+ThwsDv+tYfnJFhF4L8jITxu1tdTWRTZpdsWgEgjL6Q= golang.org/x/term v0.14.0/go.mod h1:TySc+nGkYR6qt8km8wUhuFRTVSMIX3XPR58y2lC8vww=
golang.org/x/term v0.19.0/go.mod h1:2CuTdWZ7KHSQwUzKva0cbMg6q2DMI3Mmxp+gKJbskEk= golang.org/x/term v0.15.0 h1:y/Oo/a/q3IXu26lQgl04j/gjuBDOBlx7X6Om1j2CPW4=
golang.org/x/term v0.15.0/go.mod h1:BDl952bC7+uMoWR75FIrCDx79TPU9oHkTZ9yRbYOrX0=
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk= golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk=
golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
@ -908,8 +918,8 @@ google.golang.org/grpc v1.56.3 h1:8I4C0Yq1EjstUzUJzpcRVbuYA2mODtEmpWiQoN/b2nc=
google.golang.org/grpc v1.56.3/go.mod h1:I9bI3vqKfayGqPUAwGdOSu7kt6oIJLixfffKrpXqQ9s= google.golang.org/grpc v1.56.3/go.mod h1:I9bI3vqKfayGqPUAwGdOSu7kt6oIJLixfffKrpXqQ9s=
google.golang.org/protobuf v1.26.0-rc.1/go.mod h1:jlhhOSvTdKEhbULTjvd4ARK9grFBp09yW+WbY/TyQbw= google.golang.org/protobuf v1.26.0-rc.1/go.mod h1:jlhhOSvTdKEhbULTjvd4ARK9grFBp09yW+WbY/TyQbw=
google.golang.org/protobuf v1.26.0/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc= google.golang.org/protobuf v1.26.0/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc=
google.golang.org/protobuf v1.33.0 h1:uNO2rsAINq/JlFpSdYEKIZ0uKD/R9cpdv0T+yoGwGmI= google.golang.org/protobuf v1.31.0 h1:g0LDEJHgrBl9N9r17Ru3sqWhkIx2NB67okBHPwC7hs8=
google.golang.org/protobuf v1.33.0/go.mod h1:c6P6GXX6sHbq/GpV6MGZEdwhWPcYBgnhAHhKbcUYpos= google.golang.org/protobuf v1.31.0/go.mod h1:HV8QOd/L58Z+nl8r43ehVNZIU/HEI6OcFqwMG9pJV4I=
gopkg.in/alecthomas/kingpin.v2 v2.2.6/go.mod h1:FMv+mEhP44yOT+4EoQTLFTRgOQ1FBLkstjWtayDeSgw= gopkg.in/alecthomas/kingpin.v2 v2.2.6/go.mod h1:FMv+mEhP44yOT+4EoQTLFTRgOQ1FBLkstjWtayDeSgw=
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=

View file

@ -15,6 +15,4 @@ if ($version.Contains("-"))
Set-Location .\windows\Chocolatey\crowdsec Set-Location .\windows\Chocolatey\crowdsec
Copy-Item ..\..\..\crowdsec_$version.msi tools\crowdsec.msi Copy-Item ..\..\..\crowdsec_$version.msi tools\crowdsec.msi
choco pack --version $version choco pack --version $version
Copy-Item crowdsec.$version.nupkg ..\..\..\

View file

@ -1,7 +1,7 @@
param ( param (
$version $version
) )
$env:Path += ";C:\Program Files (x86)\WiX Toolset v3.14\bin" $env:Path += ";C:\Program Files (x86)\WiX Toolset v3.11\bin"
if ($version.StartsWith("v")) if ($version.StartsWith("v"))
{ {
$version = $version.Substring(1) $version = $version.Substring(1)

View file

@ -54,7 +54,7 @@ type DataSource interface {
GetMetrics() []prometheus.Collector // Returns pointers to metrics that are managed by the module GetMetrics() []prometheus.Collector // Returns pointers to metrics that are managed by the module
GetAggregMetrics() []prometheus.Collector // Returns pointers to metrics that are managed by the module (aggregated mode, limits cardinality) GetAggregMetrics() []prometheus.Collector // Returns pointers to metrics that are managed by the module (aggregated mode, limits cardinality)
UnmarshalConfig([]byte) error // Decode and pre-validate the YAML datasource - anything that can be checked before runtime UnmarshalConfig([]byte) error // Decode and pre-validate the YAML datasource - anything that can be checked before runtime
Configure([]byte, *log.Entry, int) error // Complete the YAML datasource configuration and perform runtime checks. Configure([]byte, *log.Entry) error // Complete the YAML datasource configuration and perform runtime checks.
ConfigureByDSN(string, map[string]string, *log.Entry, string) error // Configure the datasource ConfigureByDSN(string, map[string]string, *log.Entry, string) error // Configure the datasource
GetMode() string // Get the mode (TAIL, CAT or SERVER) GetMode() string // Get the mode (TAIL, CAT or SERVER)
GetName() string // Get the name of the module GetName() string // Get the name of the module
@ -94,7 +94,7 @@ func GetDataSourceIface(dataSourceType string) DataSource {
// if the configuration is not valid it returns an error. // if the configuration is not valid it returns an error.
// If the datasource can't be run (eg. journalctl not available), it still returns an error which // If the datasource can't be run (eg. journalctl not available), it still returns an error which
// can be checked for the appropriate action. // can be checked for the appropriate action.
func DataSourceConfigure(commonConfig configuration.DataSourceCommonCfg, metricsLevel int) (*DataSource, error) { func DataSourceConfigure(commonConfig configuration.DataSourceCommonCfg) (*DataSource, error) {
// we dump it back to []byte, because we want to decode the yaml blob twice: // we dump it back to []byte, because we want to decode the yaml blob twice:
// once to DataSourceCommonCfg, and then later to the dedicated type of the datasource // once to DataSourceCommonCfg, and then later to the dedicated type of the datasource
yamlConfig, err := yaml.Marshal(commonConfig) yamlConfig, err := yaml.Marshal(commonConfig)
@ -122,7 +122,7 @@ func DataSourceConfigure(commonConfig configuration.DataSourceCommonCfg, metrics
return nil, &DataSourceUnavailableError{Name: commonConfig.Source, Err: err} return nil, &DataSourceUnavailableError{Name: commonConfig.Source, Err: err}
} }
/* configure the actual datasource */ /* configure the actual datasource */
if err := dataSrc.Configure(yamlConfig, subLogger, metricsLevel); err != nil { if err := dataSrc.Configure(yamlConfig, subLogger); err != nil {
return nil, fmt.Errorf("failed to configure datasource %s: %w", commonConfig.Source, err) return nil, fmt.Errorf("failed to configure datasource %s: %w", commonConfig.Source, err)
} }
@ -180,30 +180,10 @@ func LoadAcquisitionFromDSN(dsn string, labels map[string]string, transformExpr
return sources, nil return sources, nil
} }
func GetMetricsLevelFromPromCfg(prom *csconfig.PrometheusCfg) int {
if prom == nil {
return configuration.METRICS_FULL
}
if !prom.Enabled {
return configuration.METRICS_NONE
}
if prom.Level == configuration.CFG_METRICS_AGGREGATE {
return configuration.METRICS_AGGREGATE
}
if prom.Level == configuration.CFG_METRICS_FULL {
return configuration.METRICS_FULL
}
return configuration.METRICS_FULL
}
// LoadAcquisitionFromFile unmarshals the configuration item and checks its availability // LoadAcquisitionFromFile unmarshals the configuration item and checks its availability
func LoadAcquisitionFromFile(config *csconfig.CrowdsecServiceCfg, prom *csconfig.PrometheusCfg) ([]DataSource, error) { func LoadAcquisitionFromFile(config *csconfig.CrowdsecServiceCfg) ([]DataSource, error) {
var sources []DataSource var sources []DataSource
metrics_level := GetMetricsLevelFromPromCfg(prom)
for _, acquisFile := range config.AcquisitionFiles { for _, acquisFile := range config.AcquisitionFiles {
log.Infof("loading acquisition file : %s", acquisFile) log.Infof("loading acquisition file : %s", acquisFile)
yamlFile, err := os.Open(acquisFile) yamlFile, err := os.Open(acquisFile)
@ -245,7 +225,7 @@ func LoadAcquisitionFromFile(config *csconfig.CrowdsecServiceCfg, prom *csconfig
} }
uniqueId := uuid.NewString() uniqueId := uuid.NewString()
sub.UniqueId = uniqueId sub.UniqueId = uniqueId
src, err := DataSourceConfigure(sub, metrics_level) src, err := DataSourceConfigure(sub)
if err != nil { if err != nil {
var dserr *DataSourceUnavailableError var dserr *DataSourceUnavailableError
if errors.As(err, &dserr) { if errors.As(err, &dserr) {
@ -369,7 +349,7 @@ func StartAcquisition(sources []DataSource, output chan types.Event, AcquisTomb
if subsrc.GetMode() == configuration.TAIL_MODE { if subsrc.GetMode() == configuration.TAIL_MODE {
err = subsrc.StreamingAcquisition(outChan, AcquisTomb) err = subsrc.StreamingAcquisition(outChan, AcquisTomb)
} else { } else {
err = subsrc.OneShotAcquisition(outChan, AcquisTomb) err = subsrc.OneShotAcquisition(injectProgressBar(outChan, AcquisTomb), AcquisTomb)
} }
if err != nil { if err != nil {
//if one of the acqusition returns an error, we kill the others to properly shutdown //if one of the acqusition returns an error, we kill the others to properly shutdown

View file

@ -35,7 +35,7 @@ func (f *MockSource) UnmarshalConfig(cfg []byte) error {
return nil return nil
} }
func (f *MockSource) Configure(cfg []byte, logger *log.Entry, metricsLevel int) error { func (f *MockSource) Configure(cfg []byte, logger *log.Entry) error {
f.logger = logger f.logger = logger
if err := f.UnmarshalConfig(cfg); err != nil { if err := f.UnmarshalConfig(cfg); err != nil {
return err return err
@ -182,7 +182,7 @@ wowo: ajsajasjas
t.Run(tc.TestName, func(t *testing.T) { t.Run(tc.TestName, func(t *testing.T) {
common := configuration.DataSourceCommonCfg{} common := configuration.DataSourceCommonCfg{}
yaml.Unmarshal([]byte(tc.String), &common) yaml.Unmarshal([]byte(tc.String), &common)
ds, err := DataSourceConfigure(common, configuration.METRICS_NONE) ds, err := DataSourceConfigure(common)
cstest.RequireErrorContains(t, err, tc.ExpectedError) cstest.RequireErrorContains(t, err, tc.ExpectedError)
if tc.ExpectedError != "" { if tc.ExpectedError != "" {
return return
@ -283,7 +283,7 @@ func TestLoadAcquisitionFromFile(t *testing.T) {
for _, tc := range tests { for _, tc := range tests {
tc := tc tc := tc
t.Run(tc.TestName, func(t *testing.T) { t.Run(tc.TestName, func(t *testing.T) {
dss, err := LoadAcquisitionFromFile(&tc.Config, nil) dss, err := LoadAcquisitionFromFile(&tc.Config)
cstest.RequireErrorContains(t, err, tc.ExpectedError) cstest.RequireErrorContains(t, err, tc.ExpectedError)
if tc.ExpectedError != "" { if tc.ExpectedError != "" {
return return
@ -305,7 +305,7 @@ type MockCat struct {
logger *log.Entry logger *log.Entry
} }
func (f *MockCat) Configure(cfg []byte, logger *log.Entry, metricsLevel int) error { func (f *MockCat) Configure(cfg []byte, logger *log.Entry) error {
f.logger = logger f.logger = logger
if f.Mode == "" { if f.Mode == "" {
f.Mode = configuration.CAT_MODE f.Mode = configuration.CAT_MODE
@ -349,7 +349,7 @@ type MockTail struct {
logger *log.Entry logger *log.Entry
} }
func (f *MockTail) Configure(cfg []byte, logger *log.Entry, metricsLevel int) error { func (f *MockTail) Configure(cfg []byte, logger *log.Entry) error {
f.logger = logger f.logger = logger
if f.Mode == "" { if f.Mode == "" {
f.Mode = configuration.TAIL_MODE f.Mode = configuration.TAIL_MODE
@ -497,10 +497,8 @@ type MockSourceByDSN struct {
logger *log.Entry //nolint: unused logger *log.Entry //nolint: unused
} }
func (f *MockSourceByDSN) UnmarshalConfig(cfg []byte) error { return nil } func (f *MockSourceByDSN) UnmarshalConfig(cfg []byte) error { return nil }
func (f *MockSourceByDSN) Configure(cfg []byte, logger *log.Entry, metricsLevel int) error { func (f *MockSourceByDSN) Configure(cfg []byte, logger *log.Entry) error { return nil }
return nil
}
func (f *MockSourceByDSN) GetMode() string { return f.Mode } func (f *MockSourceByDSN) GetMode() string { return f.Mode }
func (f *MockSourceByDSN) OneShotAcquisition(chan types.Event, *tomb.Tomb) error { return nil } func (f *MockSourceByDSN) OneShotAcquisition(chan types.Event, *tomb.Tomb) error { return nil }
func (f *MockSourceByDSN) StreamingAcquisition(chan types.Event, *tomb.Tomb) error { return nil } func (f *MockSourceByDSN) StreamingAcquisition(chan types.Event, *tomb.Tomb) error { return nil }

View file

@ -19,14 +19,3 @@ type DataSourceCommonCfg struct {
var TAIL_MODE = "tail" var TAIL_MODE = "tail"
var CAT_MODE = "cat" var CAT_MODE = "cat"
var SERVER_MODE = "server" // No difference with tail, just a bit more verbose var SERVER_MODE = "server" // No difference with tail, just a bit more verbose
const (
METRICS_NONE = iota
METRICS_AGGREGATE
METRICS_FULL
)
const (
CFG_METRICS_AGGREGATE = "aggregated"
CFG_METRICS_FULL = "full"
)

View file

@ -49,7 +49,6 @@ type AppsecSourceConfig struct {
// runtime structure of AppsecSourceConfig // runtime structure of AppsecSourceConfig
type AppsecSource struct { type AppsecSource struct {
metricsLevel int
config AppsecSourceConfig config AppsecSourceConfig
logger *log.Entry logger *log.Entry
mux *http.ServeMux mux *http.ServeMux
@ -150,13 +149,13 @@ func (w *AppsecSource) GetAggregMetrics() []prometheus.Collector {
return []prometheus.Collector{AppsecReqCounter, AppsecBlockCounter, AppsecRuleHits, AppsecOutbandParsingHistogram, AppsecInbandParsingHistogram, AppsecGlobalParsingHistogram} return []prometheus.Collector{AppsecReqCounter, AppsecBlockCounter, AppsecRuleHits, AppsecOutbandParsingHistogram, AppsecInbandParsingHistogram, AppsecGlobalParsingHistogram}
} }
func (w *AppsecSource) Configure(yamlConfig []byte, logger *log.Entry, MetricsLevel int) error { func (w *AppsecSource) Configure(yamlConfig []byte, logger *log.Entry) error {
err := w.UnmarshalConfig(yamlConfig) err := w.UnmarshalConfig(yamlConfig)
if err != nil { if err != nil {
return errors.Wrap(err, "unable to parse appsec configuration") return errors.Wrap(err, "unable to parse appsec configuration")
} }
w.logger = logger w.logger = logger
w.metricsLevel = MetricsLevel
w.logger.Tracef("Appsec configuration: %+v", w.config) w.logger.Tracef("Appsec configuration: %+v", w.config)
if w.config.AuthCacheDuration == nil { if w.config.AuthCacheDuration == nil {

View file

@ -1,714 +0,0 @@
package appsecacquisition
import (
"net/http"
"net/url"
"testing"
"github.com/crowdsecurity/crowdsec/pkg/appsec"
"github.com/crowdsecurity/crowdsec/pkg/appsec/appsec_rule"
"github.com/crowdsecurity/crowdsec/pkg/types"
"github.com/davecgh/go-spew/spew"
log "github.com/sirupsen/logrus"
"github.com/stretchr/testify/require"
)
func TestAppsecOnMatchHooks(t *testing.T) {
tests := []appsecRuleTest{
{
name: "no rule : check return code",
expected_load_ok: true,
inband_rules: []appsec_rule.CustomRule{
{
Name: "rule1",
Zones: []string{"ARGS"},
Variables: []string{"foo"},
Match: appsec_rule.Match{Type: "regex", Value: "^toto"},
Transform: []string{"lowercase"},
},
},
input_request: appsec.ParsedRequest{
RemoteAddr: "1.2.3.4",
Method: "GET",
URI: "/urllll",
Args: url.Values{"foo": []string{"toto"}},
},
output_asserts: func(events []types.Event, responses []appsec.AppsecTempResponse, appsecResponse appsec.BodyResponse, statusCode int) {
require.Len(t, events, 2)
require.Equal(t, types.APPSEC, events[0].Type)
require.Equal(t, types.LOG, events[1].Type)
require.Len(t, responses, 1)
require.Equal(t, 403, responses[0].BouncerHTTPResponseCode)
require.Equal(t, 403, responses[0].UserHTTPResponseCode)
require.Equal(t, appsec.BanRemediation, responses[0].Action)
},
},
{
name: "on_match: change return code",
expected_load_ok: true,
inband_rules: []appsec_rule.CustomRule{
{
Name: "rule1",
Zones: []string{"ARGS"},
Variables: []string{"foo"},
Match: appsec_rule.Match{Type: "regex", Value: "^toto"},
Transform: []string{"lowercase"},
},
},
on_match: []appsec.Hook{
{Filter: "IsInBand == true", Apply: []string{"SetReturnCode(413)"}},
},
input_request: appsec.ParsedRequest{
RemoteAddr: "1.2.3.4",
Method: "GET",
URI: "/urllll",
Args: url.Values{"foo": []string{"toto"}},
},
output_asserts: func(events []types.Event, responses []appsec.AppsecTempResponse, appsecResponse appsec.BodyResponse, statusCode int) {
require.Len(t, events, 2)
require.Equal(t, types.APPSEC, events[0].Type)
require.Equal(t, types.LOG, events[1].Type)
require.Len(t, responses, 1)
require.Equal(t, 403, responses[0].BouncerHTTPResponseCode)
require.Equal(t, 413, responses[0].UserHTTPResponseCode)
require.Equal(t, appsec.BanRemediation, responses[0].Action)
},
},
{
name: "on_match: change action to a non standard one (log)",
expected_load_ok: true,
inband_rules: []appsec_rule.CustomRule{
{
Name: "rule1",
Zones: []string{"ARGS"},
Variables: []string{"foo"},
Match: appsec_rule.Match{Type: "regex", Value: "^toto"},
Transform: []string{"lowercase"},
},
},
on_match: []appsec.Hook{
{Filter: "IsInBand == true", Apply: []string{"SetRemediation('log')"}},
},
input_request: appsec.ParsedRequest{
RemoteAddr: "1.2.3.4",
Method: "GET",
URI: "/urllll",
Args: url.Values{"foo": []string{"toto"}},
},
output_asserts: func(events []types.Event, responses []appsec.AppsecTempResponse, appsecResponse appsec.BodyResponse, statusCode int) {
require.Len(t, events, 2)
require.Equal(t, types.APPSEC, events[0].Type)
require.Equal(t, types.LOG, events[1].Type)
require.Len(t, responses, 1)
require.Equal(t, "log", responses[0].Action)
require.Equal(t, 403, responses[0].BouncerHTTPResponseCode)
require.Equal(t, 403, responses[0].UserHTTPResponseCode)
},
},
{
name: "on_match: change action to another standard one (allow)",
expected_load_ok: true,
inband_rules: []appsec_rule.CustomRule{
{
Name: "rule1",
Zones: []string{"ARGS"},
Variables: []string{"foo"},
Match: appsec_rule.Match{Type: "regex", Value: "^toto"},
Transform: []string{"lowercase"},
},
},
on_match: []appsec.Hook{
{Filter: "IsInBand == true", Apply: []string{"SetRemediation('allow')"}},
},
input_request: appsec.ParsedRequest{
RemoteAddr: "1.2.3.4",
Method: "GET",
URI: "/urllll",
Args: url.Values{"foo": []string{"toto"}},
},
output_asserts: func(events []types.Event, responses []appsec.AppsecTempResponse, appsecResponse appsec.BodyResponse, statusCode int) {
require.Len(t, events, 2)
require.Equal(t, types.APPSEC, events[0].Type)
require.Equal(t, types.LOG, events[1].Type)
require.Len(t, responses, 1)
require.Equal(t, appsec.AllowRemediation, responses[0].Action)
},
},
{
name: "on_match: change action to another standard one (ban)",
expected_load_ok: true,
inband_rules: []appsec_rule.CustomRule{
{
Name: "rule1",
Zones: []string{"ARGS"},
Variables: []string{"foo"},
Match: appsec_rule.Match{Type: "regex", Value: "^toto"},
Transform: []string{"lowercase"},
},
},
on_match: []appsec.Hook{
{Filter: "IsInBand == true", Apply: []string{"SetRemediation('ban')"}},
},
input_request: appsec.ParsedRequest{
RemoteAddr: "1.2.3.4",
Method: "GET",
URI: "/urllll",
Args: url.Values{"foo": []string{"toto"}},
},
output_asserts: func(events []types.Event, responses []appsec.AppsecTempResponse, appsecResponse appsec.BodyResponse, statusCode int) {
require.Len(t, responses, 1)
//note: SetAction normalizes deny, ban and block to ban
require.Equal(t, appsec.BanRemediation, responses[0].Action)
},
},
{
name: "on_match: change action to another standard one (captcha)",
expected_load_ok: true,
inband_rules: []appsec_rule.CustomRule{
{
Name: "rule1",
Zones: []string{"ARGS"},
Variables: []string{"foo"},
Match: appsec_rule.Match{Type: "regex", Value: "^toto"},
Transform: []string{"lowercase"},
},
},
on_match: []appsec.Hook{
{Filter: "IsInBand == true", Apply: []string{"SetRemediation('captcha')"}},
},
input_request: appsec.ParsedRequest{
RemoteAddr: "1.2.3.4",
Method: "GET",
URI: "/urllll",
Args: url.Values{"foo": []string{"toto"}},
},
output_asserts: func(events []types.Event, responses []appsec.AppsecTempResponse, appsecResponse appsec.BodyResponse, statusCode int) {
require.Len(t, responses, 1)
//note: SetAction normalizes deny, ban and block to ban
require.Equal(t, appsec.CaptchaRemediation, responses[0].Action)
},
},
{
name: "on_match: change action to a non standard one",
expected_load_ok: true,
inband_rules: []appsec_rule.CustomRule{
{
Name: "rule1",
Zones: []string{"ARGS"},
Variables: []string{"foo"},
Match: appsec_rule.Match{Type: "regex", Value: "^toto"},
Transform: []string{"lowercase"},
},
},
on_match: []appsec.Hook{
{Filter: "IsInBand == true", Apply: []string{"SetRemediation('foobar')"}},
},
input_request: appsec.ParsedRequest{
RemoteAddr: "1.2.3.4",
Method: "GET",
URI: "/urllll",
Args: url.Values{"foo": []string{"toto"}},
},
output_asserts: func(events []types.Event, responses []appsec.AppsecTempResponse, appsecResponse appsec.BodyResponse, statusCode int) {
require.Len(t, events, 2)
require.Equal(t, types.APPSEC, events[0].Type)
require.Equal(t, types.LOG, events[1].Type)
require.Len(t, responses, 1)
require.Equal(t, "foobar", responses[0].Action)
},
},
{
name: "on_match: cancel alert",
expected_load_ok: true,
inband_rules: []appsec_rule.CustomRule{
{
Name: "rule42",
Zones: []string{"ARGS"},
Variables: []string{"foo"},
Match: appsec_rule.Match{Type: "regex", Value: "^toto"},
Transform: []string{"lowercase"},
},
},
on_match: []appsec.Hook{
{Filter: "IsInBand == true && LogInfo('XX -> %s', evt.Appsec.MatchedRules.GetName())", Apply: []string{"CancelAlert()"}},
},
input_request: appsec.ParsedRequest{
RemoteAddr: "1.2.3.4",
Method: "GET",
URI: "/urllll",
Args: url.Values{"foo": []string{"toto"}},
},
output_asserts: func(events []types.Event, responses []appsec.AppsecTempResponse, appsecResponse appsec.BodyResponse, statusCode int) {
require.Len(t, events, 1)
require.Equal(t, types.LOG, events[0].Type)
require.Len(t, responses, 1)
require.Equal(t, appsec.BanRemediation, responses[0].Action)
},
},
{
name: "on_match: cancel event",
expected_load_ok: true,
inband_rules: []appsec_rule.CustomRule{
{
Name: "rule42",
Zones: []string{"ARGS"},
Variables: []string{"foo"},
Match: appsec_rule.Match{Type: "regex", Value: "^toto"},
Transform: []string{"lowercase"},
},
},
on_match: []appsec.Hook{
{Filter: "IsInBand == true", Apply: []string{"CancelEvent()"}},
},
input_request: appsec.ParsedRequest{
RemoteAddr: "1.2.3.4",
Method: "GET",
URI: "/urllll",
Args: url.Values{"foo": []string{"toto"}},
},
output_asserts: func(events []types.Event, responses []appsec.AppsecTempResponse, appsecResponse appsec.BodyResponse, statusCode int) {
require.Len(t, events, 1)
require.Equal(t, types.APPSEC, events[0].Type)
require.Len(t, responses, 1)
require.Equal(t, appsec.BanRemediation, responses[0].Action)
},
},
}
for _, test := range tests {
t.Run(test.name, func(t *testing.T) {
loadAppSecEngine(test, t)
})
}
}
func TestAppsecPreEvalHooks(t *testing.T) {
tests := []appsecRuleTest{
{
name: "Basic on_load hook to disable inband rule",
expected_load_ok: true,
inband_rules: []appsec_rule.CustomRule{
{
Name: "rule1",
Zones: []string{"ARGS"},
Variables: []string{"foo"},
Match: appsec_rule.Match{Type: "regex", Value: "^toto"},
Transform: []string{"lowercase"},
},
},
pre_eval: []appsec.Hook{
{Filter: "1 == 1", Apply: []string{"RemoveInBandRuleByName('rule1')"}},
},
input_request: appsec.ParsedRequest{
RemoteAddr: "1.2.3.4",
Method: "GET",
URI: "/urllll",
Args: url.Values{"foo": []string{"toto"}},
},
output_asserts: func(events []types.Event, responses []appsec.AppsecTempResponse, appsecResponse appsec.BodyResponse, statusCode int) {
require.Empty(t, events)
require.Len(t, responses, 1)
require.False(t, responses[0].InBandInterrupt)
require.False(t, responses[0].OutOfBandInterrupt)
},
},
{
name: "Basic on_load fails to disable rule",
expected_load_ok: true,
inband_rules: []appsec_rule.CustomRule{
{
Name: "rule1",
Zones: []string{"ARGS"},
Variables: []string{"foo"},
Match: appsec_rule.Match{Type: "regex", Value: "^toto"},
Transform: []string{"lowercase"},
},
},
pre_eval: []appsec.Hook{
{Filter: "1 ==2", Apply: []string{"RemoveInBandRuleByName('rule1')"}},
},
input_request: appsec.ParsedRequest{
RemoteAddr: "1.2.3.4",
Method: "GET",
URI: "/urllll",
Args: url.Values{"foo": []string{"toto"}},
},
output_asserts: func(events []types.Event, responses []appsec.AppsecTempResponse, appsecResponse appsec.BodyResponse, statusCode int) {
require.Len(t, events, 2)
require.Equal(t, types.APPSEC, events[0].Type)
require.Equal(t, types.LOG, events[1].Type)
require.True(t, events[1].Appsec.HasInBandMatches)
require.Len(t, events[1].Appsec.MatchedRules, 1)
require.Equal(t, "rule1", events[1].Appsec.MatchedRules[0]["msg"])
require.Len(t, responses, 1)
require.True(t, responses[0].InBandInterrupt)
},
},
{
name: "on_load : disable inband by tag",
expected_load_ok: true,
inband_rules: []appsec_rule.CustomRule{
{
Name: "rulez",
Zones: []string{"ARGS"},
Variables: []string{"foo"},
Match: appsec_rule.Match{Type: "regex", Value: "^toto"},
Transform: []string{"lowercase"},
},
},
pre_eval: []appsec.Hook{
{Apply: []string{"RemoveInBandRuleByTag('crowdsec-rulez')"}},
},
input_request: appsec.ParsedRequest{
RemoteAddr: "1.2.3.4",
Method: "GET",
URI: "/urllll",
Args: url.Values{"foo": []string{"toto"}},
},
output_asserts: func(events []types.Event, responses []appsec.AppsecTempResponse, appsecResponse appsec.BodyResponse, statusCode int) {
require.Empty(t, events)
require.Len(t, responses, 1)
require.False(t, responses[0].InBandInterrupt)
require.False(t, responses[0].OutOfBandInterrupt)
},
},
{
name: "on_load : disable inband by ID",
expected_load_ok: true,
inband_rules: []appsec_rule.CustomRule{
{
Name: "rulez",
Zones: []string{"ARGS"},
Variables: []string{"foo"},
Match: appsec_rule.Match{Type: "regex", Value: "^toto"},
Transform: []string{"lowercase"},
},
},
pre_eval: []appsec.Hook{
{Apply: []string{"RemoveInBandRuleByID(1516470898)"}}, //rule ID is generated at runtime. If you change rule, it will break the test (:
},
input_request: appsec.ParsedRequest{
RemoteAddr: "1.2.3.4",
Method: "GET",
URI: "/urllll",
Args: url.Values{"foo": []string{"toto"}},
},
output_asserts: func(events []types.Event, responses []appsec.AppsecTempResponse, appsecResponse appsec.BodyResponse, statusCode int) {
require.Empty(t, events)
require.Len(t, responses, 1)
require.False(t, responses[0].InBandInterrupt)
require.False(t, responses[0].OutOfBandInterrupt)
},
},
{
name: "on_load : disable inband by name",
expected_load_ok: true,
inband_rules: []appsec_rule.CustomRule{
{
Name: "rulez",
Zones: []string{"ARGS"},
Variables: []string{"foo"},
Match: appsec_rule.Match{Type: "regex", Value: "^toto"},
Transform: []string{"lowercase"},
},
},
pre_eval: []appsec.Hook{
{Apply: []string{"RemoveInBandRuleByName('rulez')"}},
},
input_request: appsec.ParsedRequest{
RemoteAddr: "1.2.3.4",
Method: "GET",
URI: "/urllll",
Args: url.Values{"foo": []string{"toto"}},
},
output_asserts: func(events []types.Event, responses []appsec.AppsecTempResponse, appsecResponse appsec.BodyResponse, statusCode int) {
require.Empty(t, events)
require.Len(t, responses, 1)
require.False(t, responses[0].InBandInterrupt)
require.False(t, responses[0].OutOfBandInterrupt)
},
},
{
name: "on_load : outofband default behavior",
expected_load_ok: true,
outofband_rules: []appsec_rule.CustomRule{
{
Name: "rulez",
Zones: []string{"ARGS"},
Variables: []string{"foo"},
Match: appsec_rule.Match{Type: "regex", Value: "^toto"},
Transform: []string{"lowercase"},
},
},
input_request: appsec.ParsedRequest{
RemoteAddr: "1.2.3.4",
Method: "GET",
URI: "/urllll",
Args: url.Values{"foo": []string{"toto"}},
},
output_asserts: func(events []types.Event, responses []appsec.AppsecTempResponse, appsecResponse appsec.BodyResponse, statusCode int) {
require.Len(t, events, 1)
require.Equal(t, types.LOG, events[0].Type)
require.True(t, events[0].Appsec.HasOutBandMatches)
require.False(t, events[0].Appsec.HasInBandMatches)
require.Len(t, events[0].Appsec.MatchedRules, 1)
require.Equal(t, "rulez", events[0].Appsec.MatchedRules[0]["msg"])
//maybe surprising, but response won't mention OOB event, as it's sent as soon as the inband phase is over.
require.Len(t, responses, 1)
require.False(t, responses[0].InBandInterrupt)
require.False(t, responses[0].OutOfBandInterrupt)
},
},
{
name: "on_load : set remediation by tag",
expected_load_ok: true,
inband_rules: []appsec_rule.CustomRule{
{
Name: "rulez",
Zones: []string{"ARGS"},
Variables: []string{"foo"},
Match: appsec_rule.Match{Type: "regex", Value: "^toto"},
Transform: []string{"lowercase"},
},
},
pre_eval: []appsec.Hook{
{Apply: []string{"SetRemediationByTag('crowdsec-rulez', 'foobar')"}}, //rule ID is generated at runtime. If you change rule, it will break the test (:
},
input_request: appsec.ParsedRequest{
RemoteAddr: "1.2.3.4",
Method: "GET",
URI: "/urllll",
Args: url.Values{"foo": []string{"toto"}},
},
output_asserts: func(events []types.Event, responses []appsec.AppsecTempResponse, appsecResponse appsec.BodyResponse, statusCode int) {
require.Len(t, events, 2)
require.Len(t, responses, 1)
require.Equal(t, "foobar", responses[0].Action)
},
},
{
name: "on_load : set remediation by name",
expected_load_ok: true,
inband_rules: []appsec_rule.CustomRule{
{
Name: "rulez",
Zones: []string{"ARGS"},
Variables: []string{"foo"},
Match: appsec_rule.Match{Type: "regex", Value: "^toto"},
Transform: []string{"lowercase"},
},
},
pre_eval: []appsec.Hook{
{Apply: []string{"SetRemediationByName('rulez', 'foobar')"}}, //rule ID is generated at runtime. If you change rule, it will break the test (:
},
input_request: appsec.ParsedRequest{
RemoteAddr: "1.2.3.4",
Method: "GET",
URI: "/urllll",
Args: url.Values{"foo": []string{"toto"}},
},
output_asserts: func(events []types.Event, responses []appsec.AppsecTempResponse, appsecResponse appsec.BodyResponse, statusCode int) {
require.Len(t, events, 2)
require.Len(t, responses, 1)
require.Equal(t, "foobar", responses[0].Action)
},
},
{
name: "on_load : set remediation by ID",
expected_load_ok: true,
inband_rules: []appsec_rule.CustomRule{
{
Name: "rulez",
Zones: []string{"ARGS"},
Variables: []string{"foo"},
Match: appsec_rule.Match{Type: "regex", Value: "^toto"},
Transform: []string{"lowercase"},
},
},
pre_eval: []appsec.Hook{
{Apply: []string{"SetRemediationByID(1516470898, 'foobar')"}}, //rule ID is generated at runtime. If you change rule, it will break the test (:
},
input_request: appsec.ParsedRequest{
RemoteAddr: "1.2.3.4",
Method: "GET",
URI: "/urllll",
Args: url.Values{"foo": []string{"toto"}},
},
output_asserts: func(events []types.Event, responses []appsec.AppsecTempResponse, appsecResponse appsec.BodyResponse, statusCode int) {
require.Len(t, events, 2)
require.Len(t, responses, 1)
require.Equal(t, "foobar", responses[0].Action)
require.Equal(t, "foobar", appsecResponse.Action)
require.Equal(t, http.StatusForbidden, appsecResponse.HTTPStatus)
},
},
}
for _, test := range tests {
t.Run(test.name, func(t *testing.T) {
loadAppSecEngine(test, t)
})
}
}
func TestAppsecRemediationConfigHooks(t *testing.T) {
tests := []appsecRuleTest{
{
name: "Basic matching rule",
expected_load_ok: true,
inband_rules: []appsec_rule.CustomRule{
{
Name: "rule1",
Zones: []string{"ARGS"},
Variables: []string{"foo"},
Match: appsec_rule.Match{Type: "regex", Value: "^toto"},
Transform: []string{"lowercase"},
},
},
input_request: appsec.ParsedRequest{
RemoteAddr: "1.2.3.4",
Method: "GET",
URI: "/urllll",
Args: url.Values{"foo": []string{"toto"}},
},
output_asserts: func(events []types.Event, responses []appsec.AppsecTempResponse, appsecResponse appsec.BodyResponse, statusCode int) {
require.Equal(t, appsec.BanRemediation, responses[0].Action)
require.Equal(t, http.StatusForbidden, statusCode)
require.Equal(t, appsec.BanRemediation, appsecResponse.Action)
require.Equal(t, http.StatusForbidden, appsecResponse.HTTPStatus)
},
},
{
name: "SetRemediation",
expected_load_ok: true,
inband_rules: []appsec_rule.CustomRule{
{
Name: "rule1",
Zones: []string{"ARGS"},
Variables: []string{"foo"},
Match: appsec_rule.Match{Type: "regex", Value: "^toto"},
Transform: []string{"lowercase"},
},
},
input_request: appsec.ParsedRequest{
RemoteAddr: "1.2.3.4",
Method: "GET",
URI: "/urllll",
Args: url.Values{"foo": []string{"toto"}},
},
on_match: []appsec.Hook{{Apply: []string{"SetRemediation('captcha')"}}}, //rule ID is generated at runtime. If you change rule, it will break the test (:
output_asserts: func(events []types.Event, responses []appsec.AppsecTempResponse, appsecResponse appsec.BodyResponse, statusCode int) {
require.Equal(t, appsec.CaptchaRemediation, responses[0].Action)
require.Equal(t, http.StatusForbidden, statusCode)
require.Equal(t, appsec.CaptchaRemediation, appsecResponse.Action)
require.Equal(t, http.StatusForbidden, appsecResponse.HTTPStatus)
},
},
{
name: "SetRemediation",
expected_load_ok: true,
inband_rules: []appsec_rule.CustomRule{
{
Name: "rule1",
Zones: []string{"ARGS"},
Variables: []string{"foo"},
Match: appsec_rule.Match{Type: "regex", Value: "^toto"},
Transform: []string{"lowercase"},
},
},
input_request: appsec.ParsedRequest{
RemoteAddr: "1.2.3.4",
Method: "GET",
URI: "/urllll",
Args: url.Values{"foo": []string{"toto"}},
},
on_match: []appsec.Hook{{Apply: []string{"SetReturnCode(418)"}}}, //rule ID is generated at runtime. If you change rule, it will break the test (:
output_asserts: func(events []types.Event, responses []appsec.AppsecTempResponse, appsecResponse appsec.BodyResponse, statusCode int) {
require.Equal(t, appsec.BanRemediation, responses[0].Action)
require.Equal(t, http.StatusForbidden, statusCode)
require.Equal(t, appsec.BanRemediation, appsecResponse.Action)
require.Equal(t, http.StatusTeapot, appsecResponse.HTTPStatus)
},
},
}
for _, test := range tests {
t.Run(test.name, func(t *testing.T) {
loadAppSecEngine(test, t)
})
}
}
func TestOnMatchRemediationHooks(t *testing.T) {
tests := []appsecRuleTest{
{
name: "set remediation to allow with on_match hook",
expected_load_ok: true,
inband_rules: []appsec_rule.CustomRule{
{
Name: "rule42",
Zones: []string{"ARGS"},
Variables: []string{"foo"},
Match: appsec_rule.Match{Type: "regex", Value: "^toto"},
Transform: []string{"lowercase"},
},
},
input_request: appsec.ParsedRequest{
RemoteAddr: "1.2.3.4",
Method: "GET",
URI: "/urllll",
Args: url.Values{"foo": []string{"toto"}},
},
on_match: []appsec.Hook{
{Filter: "IsInBand == true", Apply: []string{"SetRemediation('allow')"}},
},
output_asserts: func(events []types.Event, responses []appsec.AppsecTempResponse, appsecResponse appsec.BodyResponse, statusCode int) {
require.Equal(t, appsec.AllowRemediation, appsecResponse.Action)
require.Equal(t, http.StatusOK, appsecResponse.HTTPStatus)
},
},
{
name: "set remediation to captcha + custom user code with on_match hook",
expected_load_ok: true,
inband_rules: []appsec_rule.CustomRule{
{
Name: "rule42",
Zones: []string{"ARGS"},
Variables: []string{"foo"},
Match: appsec_rule.Match{Type: "regex", Value: "^toto"},
Transform: []string{"lowercase"},
},
},
input_request: appsec.ParsedRequest{
RemoteAddr: "1.2.3.4",
Method: "GET",
URI: "/urllll",
Args: url.Values{"foo": []string{"toto"}},
},
DefaultRemediation: appsec.AllowRemediation,
on_match: []appsec.Hook{
{Filter: "IsInBand == true", Apply: []string{"SetRemediation('captcha')", "SetReturnCode(418)"}},
},
output_asserts: func(events []types.Event, responses []appsec.AppsecTempResponse, appsecResponse appsec.BodyResponse, statusCode int) {
spew.Dump(responses)
spew.Dump(appsecResponse)
log.Errorf("http status : %d", statusCode)
require.Equal(t, appsec.CaptchaRemediation, appsecResponse.Action)
require.Equal(t, http.StatusTeapot, appsecResponse.HTTPStatus)
require.Equal(t, http.StatusForbidden, statusCode)
},
},
}
for _, test := range tests {
t.Run(test.name, func(t *testing.T) {
loadAppSecEngine(test, t)
})
}
}

View file

@ -1,74 +0,0 @@
//go:build !windows
// +build !windows
package appsecacquisition
import (
"testing"
"github.com/crowdsecurity/crowdsec/pkg/appsec"
"github.com/crowdsecurity/crowdsec/pkg/appsec/appsec_rule"
"github.com/crowdsecurity/crowdsec/pkg/types"
log "github.com/sirupsen/logrus"
"github.com/stretchr/testify/require"
)
func TestAppsecRuleTransformsOthers(t *testing.T) {
log.SetLevel(log.TraceLevel)
tests := []appsecRuleTest{
{
name: "normalizepath",
expected_load_ok: true,
inband_rules: []appsec_rule.CustomRule{
{
Name: "rule1",
Zones: []string{"ARGS"},
Variables: []string{"foo"},
Match: appsec_rule.Match{Type: "equals", Value: "b/c"},
Transform: []string{"normalizepath"},
},
},
input_request: appsec.ParsedRequest{
RemoteAddr: "1.2.3.4",
Method: "GET",
URI: "/?foo=a/../b/c",
},
output_asserts: func(events []types.Event, responses []appsec.AppsecTempResponse, appsecResponse appsec.BodyResponse, statusCode int) {
require.Len(t, events, 2)
require.Equal(t, types.APPSEC, events[0].Type)
require.Equal(t, types.LOG, events[1].Type)
require.Equal(t, "rule1", events[1].Appsec.MatchedRules[0]["msg"])
},
},
{
name: "normalizepath #2",
expected_load_ok: true,
inband_rules: []appsec_rule.CustomRule{
{
Name: "rule1",
Zones: []string{"ARGS"},
Variables: []string{"foo"},
Match: appsec_rule.Match{Type: "equals", Value: "b/c/"},
Transform: []string{"normalizepath"},
},
},
input_request: appsec.ParsedRequest{
RemoteAddr: "1.2.3.4",
Method: "GET",
URI: "/?foo=a/../b/c/////././././",
},
output_asserts: func(events []types.Event, responses []appsec.AppsecTempResponse, appsecResponse appsec.BodyResponse, statusCode int) {
require.Len(t, events, 2)
require.Equal(t, types.APPSEC, events[0].Type)
require.Equal(t, types.LOG, events[1].Type)
require.Equal(t, "rule1", events[1].Appsec.MatchedRules[0]["msg"])
},
},
}
for _, test := range tests {
t.Run(test.name, func(t *testing.T) {
loadAppSecEngine(test, t)
})
}
}

View file

@ -1,320 +0,0 @@
package appsecacquisition
import (
"net/http"
"net/url"
"testing"
"github.com/crowdsecurity/crowdsec/pkg/appsec"
"github.com/crowdsecurity/crowdsec/pkg/appsec/appsec_rule"
"github.com/crowdsecurity/crowdsec/pkg/types"
"github.com/stretchr/testify/require"
)
func TestAppsecDefaultPassRemediation(t *testing.T) {
tests := []appsecRuleTest{
{
name: "Basic non-matching rule",
expected_load_ok: true,
inband_rules: []appsec_rule.CustomRule{
{
Name: "rule1",
Zones: []string{"ARGS"},
Variables: []string{"foo"},
Match: appsec_rule.Match{Type: "regex", Value: "^toto"},
Transform: []string{"lowercase"},
},
},
input_request: appsec.ParsedRequest{
RemoteAddr: "1.2.3.4",
Method: "GET",
URI: "/",
Args: url.Values{"foo": []string{"tutu"}},
},
output_asserts: func(events []types.Event, responses []appsec.AppsecTempResponse, appsecResponse appsec.BodyResponse, statusCode int) {
require.Equal(t, appsec.AllowRemediation, responses[0].Action)
require.Equal(t, http.StatusOK, statusCode)
require.Equal(t, appsec.AllowRemediation, appsecResponse.Action)
require.Equal(t, http.StatusOK, appsecResponse.HTTPStatus)
},
},
{
name: "DefaultPassAction: pass",
expected_load_ok: true,
inband_rules: []appsec_rule.CustomRule{
{
Name: "rule1",
Zones: []string{"ARGS"},
Variables: []string{"foo"},
Match: appsec_rule.Match{Type: "regex", Value: "^toto"},
Transform: []string{"lowercase"},
},
},
input_request: appsec.ParsedRequest{
RemoteAddr: "1.2.3.4",
Method: "GET",
URI: "/",
Args: url.Values{"foo": []string{"tutu"}},
},
DefaultPassAction: "allow",
output_asserts: func(events []types.Event, responses []appsec.AppsecTempResponse, appsecResponse appsec.BodyResponse, statusCode int) {
require.Equal(t, appsec.AllowRemediation, responses[0].Action)
require.Equal(t, http.StatusOK, statusCode)
require.Equal(t, appsec.AllowRemediation, appsecResponse.Action)
require.Equal(t, http.StatusOK, appsecResponse.HTTPStatus)
},
},
{
name: "DefaultPassAction: captcha",
expected_load_ok: true,
inband_rules: []appsec_rule.CustomRule{
{
Name: "rule1",
Zones: []string{"ARGS"},
Variables: []string{"foo"},
Match: appsec_rule.Match{Type: "regex", Value: "^toto"},
Transform: []string{"lowercase"},
},
},
input_request: appsec.ParsedRequest{
RemoteAddr: "1.2.3.4",
Method: "GET",
URI: "/",
Args: url.Values{"foo": []string{"tutu"}},
},
DefaultPassAction: "captcha",
output_asserts: func(events []types.Event, responses []appsec.AppsecTempResponse, appsecResponse appsec.BodyResponse, statusCode int) {
require.Equal(t, appsec.CaptchaRemediation, responses[0].Action)
require.Equal(t, http.StatusOK, statusCode) //@tko: body is captcha, but as it's 200, captcha won't be showed to user
require.Equal(t, appsec.CaptchaRemediation, appsecResponse.Action)
require.Equal(t, http.StatusOK, appsecResponse.HTTPStatus)
},
},
{
name: "DefaultPassHTTPCode: 200",
expected_load_ok: true,
inband_rules: []appsec_rule.CustomRule{
{
Name: "rule1",
Zones: []string{"ARGS"},
Variables: []string{"foo"},
Match: appsec_rule.Match{Type: "regex", Value: "^toto"},
Transform: []string{"lowercase"},
},
},
input_request: appsec.ParsedRequest{
RemoteAddr: "1.2.3.4",
Method: "GET",
URI: "/",
Args: url.Values{"foo": []string{"tutu"}},
},
UserPassedHTTPCode: 200,
output_asserts: func(events []types.Event, responses []appsec.AppsecTempResponse, appsecResponse appsec.BodyResponse, statusCode int) {
require.Equal(t, appsec.AllowRemediation, responses[0].Action)
require.Equal(t, http.StatusOK, statusCode)
require.Equal(t, appsec.AllowRemediation, appsecResponse.Action)
require.Equal(t, http.StatusOK, appsecResponse.HTTPStatus)
},
},
{
name: "DefaultPassHTTPCode: 200",
expected_load_ok: true,
inband_rules: []appsec_rule.CustomRule{
{
Name: "rule1",
Zones: []string{"ARGS"},
Variables: []string{"foo"},
Match: appsec_rule.Match{Type: "regex", Value: "^toto"},
Transform: []string{"lowercase"},
},
},
input_request: appsec.ParsedRequest{
RemoteAddr: "1.2.3.4",
Method: "GET",
URI: "/",
Args: url.Values{"foo": []string{"tutu"}},
},
UserPassedHTTPCode: 418,
output_asserts: func(events []types.Event, responses []appsec.AppsecTempResponse, appsecResponse appsec.BodyResponse, statusCode int) {
require.Equal(t, appsec.AllowRemediation, responses[0].Action)
require.Equal(t, http.StatusOK, statusCode)
require.Equal(t, appsec.AllowRemediation, appsecResponse.Action)
require.Equal(t, http.StatusTeapot, appsecResponse.HTTPStatus)
},
},
}
for _, test := range tests {
t.Run(test.name, func(t *testing.T) {
loadAppSecEngine(test, t)
})
}
}
func TestAppsecDefaultRemediation(t *testing.T) {
tests := []appsecRuleTest{
{
name: "Basic matching rule",
expected_load_ok: true,
inband_rules: []appsec_rule.CustomRule{
{
Name: "rule1",
Zones: []string{"ARGS"},
Variables: []string{"foo"},
Match: appsec_rule.Match{Type: "regex", Value: "^toto"},
Transform: []string{"lowercase"},
},
},
input_request: appsec.ParsedRequest{
RemoteAddr: "1.2.3.4",
Method: "GET",
URI: "/urllll",
Args: url.Values{"foo": []string{"toto"}},
},
output_asserts: func(events []types.Event, responses []appsec.AppsecTempResponse, appsecResponse appsec.BodyResponse, statusCode int) {
require.Equal(t, appsec.BanRemediation, responses[0].Action)
require.Equal(t, http.StatusForbidden, statusCode)
require.Equal(t, appsec.BanRemediation, appsecResponse.Action)
require.Equal(t, http.StatusForbidden, appsecResponse.HTTPStatus)
},
},
{
name: "default remediation to ban (default)",
expected_load_ok: true,
inband_rules: []appsec_rule.CustomRule{
{
Name: "rule42",
Zones: []string{"ARGS"},
Variables: []string{"foo"},
Match: appsec_rule.Match{Type: "regex", Value: "^toto"},
Transform: []string{"lowercase"},
},
},
input_request: appsec.ParsedRequest{
RemoteAddr: "1.2.3.4",
Method: "GET",
URI: "/urllll",
Args: url.Values{"foo": []string{"toto"}},
},
DefaultRemediation: "ban",
output_asserts: func(events []types.Event, responses []appsec.AppsecTempResponse, appsecResponse appsec.BodyResponse, statusCode int) {
require.Equal(t, appsec.BanRemediation, responses[0].Action)
require.Equal(t, http.StatusForbidden, statusCode)
require.Equal(t, appsec.BanRemediation, appsecResponse.Action)
require.Equal(t, http.StatusForbidden, appsecResponse.HTTPStatus)
},
},
{
name: "default remediation to allow",
expected_load_ok: true,
inband_rules: []appsec_rule.CustomRule{
{
Name: "rule42",
Zones: []string{"ARGS"},
Variables: []string{"foo"},
Match: appsec_rule.Match{Type: "regex", Value: "^toto"},
Transform: []string{"lowercase"},
},
},
input_request: appsec.ParsedRequest{
RemoteAddr: "1.2.3.4",
Method: "GET",
URI: "/urllll",
Args: url.Values{"foo": []string{"toto"}},
},
DefaultRemediation: "allow",
output_asserts: func(events []types.Event, responses []appsec.AppsecTempResponse, appsecResponse appsec.BodyResponse, statusCode int) {
require.Equal(t, appsec.AllowRemediation, responses[0].Action)
require.Equal(t, http.StatusOK, statusCode)
require.Equal(t, appsec.AllowRemediation, appsecResponse.Action)
require.Equal(t, http.StatusOK, appsecResponse.HTTPStatus)
},
},
{
name: "default remediation to captcha",
expected_load_ok: true,
inband_rules: []appsec_rule.CustomRule{
{
Name: "rule42",
Zones: []string{"ARGS"},
Variables: []string{"foo"},
Match: appsec_rule.Match{Type: "regex", Value: "^toto"},
Transform: []string{"lowercase"},
},
},
input_request: appsec.ParsedRequest{
RemoteAddr: "1.2.3.4",
Method: "GET",
URI: "/urllll",
Args: url.Values{"foo": []string{"toto"}},
},
DefaultRemediation: "captcha",
output_asserts: func(events []types.Event, responses []appsec.AppsecTempResponse, appsecResponse appsec.BodyResponse, statusCode int) {
require.Equal(t, appsec.CaptchaRemediation, responses[0].Action)
require.Equal(t, http.StatusForbidden, statusCode)
require.Equal(t, appsec.CaptchaRemediation, appsecResponse.Action)
require.Equal(t, http.StatusForbidden, appsecResponse.HTTPStatus)
},
},
{
name: "custom user HTTP code",
expected_load_ok: true,
inband_rules: []appsec_rule.CustomRule{
{
Name: "rule42",
Zones: []string{"ARGS"},
Variables: []string{"foo"},
Match: appsec_rule.Match{Type: "regex", Value: "^toto"},
Transform: []string{"lowercase"},
},
},
input_request: appsec.ParsedRequest{
RemoteAddr: "1.2.3.4",
Method: "GET",
URI: "/urllll",
Args: url.Values{"foo": []string{"toto"}},
},
UserBlockedHTTPCode: 418,
output_asserts: func(events []types.Event, responses []appsec.AppsecTempResponse, appsecResponse appsec.BodyResponse, statusCode int) {
require.Equal(t, appsec.BanRemediation, responses[0].Action)
require.Equal(t, http.StatusForbidden, statusCode)
require.Equal(t, appsec.BanRemediation, appsecResponse.Action)
require.Equal(t, http.StatusTeapot, appsecResponse.HTTPStatus)
},
},
{
name: "custom remediation + HTTP code",
expected_load_ok: true,
inband_rules: []appsec_rule.CustomRule{
{
Name: "rule42",
Zones: []string{"ARGS"},
Variables: []string{"foo"},
Match: appsec_rule.Match{Type: "regex", Value: "^toto"},
Transform: []string{"lowercase"},
},
},
input_request: appsec.ParsedRequest{
RemoteAddr: "1.2.3.4",
Method: "GET",
URI: "/urllll",
Args: url.Values{"foo": []string{"toto"}},
},
UserBlockedHTTPCode: 418,
DefaultRemediation: "foobar",
output_asserts: func(events []types.Event, responses []appsec.AppsecTempResponse, appsecResponse appsec.BodyResponse, statusCode int) {
require.Equal(t, "foobar", responses[0].Action)
require.Equal(t, http.StatusForbidden, statusCode)
require.Equal(t, "foobar", appsecResponse.Action)
require.Equal(t, http.StatusTeapot, appsecResponse.HTTPStatus)
},
},
}
for _, test := range tests {
t.Run(test.name, func(t *testing.T) {
loadAppSecEngine(test, t)
})
}
}

View file

@ -1,733 +0,0 @@
package appsecacquisition
import (
"net/http"
"net/url"
"testing"
"github.com/crowdsecurity/crowdsec/pkg/appsec"
"github.com/crowdsecurity/crowdsec/pkg/appsec/appsec_rule"
"github.com/crowdsecurity/crowdsec/pkg/types"
log "github.com/sirupsen/logrus"
"github.com/stretchr/testify/require"
)
func TestAppsecRuleMatches(t *testing.T) {
tests := []appsecRuleTest{
{
name: "Basic matching rule",
expected_load_ok: true,
inband_rules: []appsec_rule.CustomRule{
{
Name: "rule1",
Zones: []string{"ARGS"},
Variables: []string{"foo"},
Match: appsec_rule.Match{Type: "regex", Value: "^toto"},
Transform: []string{"lowercase"},
},
},
input_request: appsec.ParsedRequest{
RemoteAddr: "1.2.3.4",
Method: "GET",
URI: "/urllll",
Args: url.Values{"foo": []string{"toto"}},
},
output_asserts: func(events []types.Event, responses []appsec.AppsecTempResponse, appsecResponse appsec.BodyResponse, statusCode int) {
require.Len(t, events, 2)
require.Equal(t, types.APPSEC, events[0].Type)
require.Equal(t, types.LOG, events[1].Type)
require.True(t, events[1].Appsec.HasInBandMatches)
require.Len(t, events[1].Appsec.MatchedRules, 1)
require.Equal(t, "rule1", events[1].Appsec.MatchedRules[0]["msg"])
require.Len(t, responses, 1)
require.True(t, responses[0].InBandInterrupt)
},
},
{
name: "Basic non-matching rule",
expected_load_ok: true,
inband_rules: []appsec_rule.CustomRule{
{
Name: "rule1",
Zones: []string{"ARGS"},
Variables: []string{"foo"},
Match: appsec_rule.Match{Type: "regex", Value: "^toto"},
Transform: []string{"lowercase"},
},
},
input_request: appsec.ParsedRequest{
RemoteAddr: "1.2.3.4",
Method: "GET",
URI: "/urllll",
Args: url.Values{"foo": []string{"tutu"}},
},
output_asserts: func(events []types.Event, responses []appsec.AppsecTempResponse, appsecResponse appsec.BodyResponse, statusCode int) {
require.Empty(t, events)
require.Len(t, responses, 1)
require.False(t, responses[0].InBandInterrupt)
require.False(t, responses[0].OutOfBandInterrupt)
},
},
{
name: "default remediation to allow",
expected_load_ok: true,
inband_rules: []appsec_rule.CustomRule{
{
Name: "rule42",
Zones: []string{"ARGS"},
Variables: []string{"foo"},
Match: appsec_rule.Match{Type: "regex", Value: "^toto"},
Transform: []string{"lowercase"},
},
},
input_request: appsec.ParsedRequest{
RemoteAddr: "1.2.3.4",
Method: "GET",
URI: "/urllll",
Args: url.Values{"foo": []string{"toto"}},
},
DefaultRemediation: "allow",
output_asserts: func(events []types.Event, responses []appsec.AppsecTempResponse, appsecResponse appsec.BodyResponse, statusCode int) {
require.Equal(t, appsec.AllowRemediation, responses[0].Action)
require.Equal(t, http.StatusOK, statusCode)
require.Equal(t, appsec.AllowRemediation, appsecResponse.Action)
require.Equal(t, http.StatusOK, appsecResponse.HTTPStatus)
},
},
{
name: "default remediation to captcha",
expected_load_ok: true,
inband_rules: []appsec_rule.CustomRule{
{
Name: "rule42",
Zones: []string{"ARGS"},
Variables: []string{"foo"},
Match: appsec_rule.Match{Type: "regex", Value: "^toto"},
Transform: []string{"lowercase"},
},
},
input_request: appsec.ParsedRequest{
RemoteAddr: "1.2.3.4",
Method: "GET",
URI: "/urllll",
Args: url.Values{"foo": []string{"toto"}},
},
DefaultRemediation: "captcha",
output_asserts: func(events []types.Event, responses []appsec.AppsecTempResponse, appsecResponse appsec.BodyResponse, statusCode int) {
require.Equal(t, appsec.CaptchaRemediation, responses[0].Action)
require.Equal(t, http.StatusForbidden, statusCode)
require.Equal(t, appsec.CaptchaRemediation, appsecResponse.Action)
require.Equal(t, http.StatusForbidden, appsecResponse.HTTPStatus)
},
},
{
name: "no default remediation / custom user HTTP code",
expected_load_ok: true,
inband_rules: []appsec_rule.CustomRule{
{
Name: "rule42",
Zones: []string{"ARGS"},
Variables: []string{"foo"},
Match: appsec_rule.Match{Type: "regex", Value: "^toto"},
Transform: []string{"lowercase"},
},
},
input_request: appsec.ParsedRequest{
RemoteAddr: "1.2.3.4",
Method: "GET",
URI: "/urllll",
Args: url.Values{"foo": []string{"toto"}},
},
UserBlockedHTTPCode: 418,
output_asserts: func(events []types.Event, responses []appsec.AppsecTempResponse, appsecResponse appsec.BodyResponse, statusCode int) {
require.Equal(t, appsec.BanRemediation, responses[0].Action)
require.Equal(t, http.StatusForbidden, statusCode)
require.Equal(t, appsec.BanRemediation, appsecResponse.Action)
require.Equal(t, http.StatusTeapot, appsecResponse.HTTPStatus)
},
},
{
name: "no match but try to set remediation to captcha with on_match hook",
expected_load_ok: true,
inband_rules: []appsec_rule.CustomRule{
{
Name: "rule42",
Zones: []string{"ARGS"},
Variables: []string{"foo"},
Match: appsec_rule.Match{Type: "regex", Value: "^toto"},
Transform: []string{"lowercase"},
},
},
on_match: []appsec.Hook{
{Filter: "IsInBand == true", Apply: []string{"SetRemediation('captcha')"}},
},
input_request: appsec.ParsedRequest{
RemoteAddr: "1.2.3.4",
Method: "GET",
URI: "/urllll",
Args: url.Values{"foo": []string{"bla"}},
},
output_asserts: func(events []types.Event, responses []appsec.AppsecTempResponse, appsecResponse appsec.BodyResponse, statusCode int) {
require.Empty(t, events)
require.Equal(t, http.StatusOK, statusCode)
require.Equal(t, appsec.AllowRemediation, appsecResponse.Action)
},
},
{
name: "no match but try to set user HTTP code with on_match hook",
expected_load_ok: true,
inband_rules: []appsec_rule.CustomRule{
{
Name: "rule42",
Zones: []string{"ARGS"},
Variables: []string{"foo"},
Match: appsec_rule.Match{Type: "regex", Value: "^toto"},
Transform: []string{"lowercase"},
},
},
on_match: []appsec.Hook{
{Filter: "IsInBand == true", Apply: []string{"SetReturnCode(418)"}},
},
input_request: appsec.ParsedRequest{
RemoteAddr: "1.2.3.4",
Method: "GET",
URI: "/urllll",
Args: url.Values{"foo": []string{"bla"}},
},
output_asserts: func(events []types.Event, responses []appsec.AppsecTempResponse, appsecResponse appsec.BodyResponse, statusCode int) {
require.Empty(t, events)
require.Equal(t, http.StatusOK, statusCode)
require.Equal(t, appsec.AllowRemediation, appsecResponse.Action)
},
},
{
name: "no match but try to set remediation with pre_eval hook",
expected_load_ok: true,
inband_rules: []appsec_rule.CustomRule{
{
Name: "rule42",
Zones: []string{"ARGS"},
Variables: []string{"foo"},
Match: appsec_rule.Match{Type: "regex", Value: "^toto"},
Transform: []string{"lowercase"},
},
},
pre_eval: []appsec.Hook{
{Filter: "IsInBand == true", Apply: []string{"SetRemediationByName('rule42', 'captcha')"}},
},
input_request: appsec.ParsedRequest{
RemoteAddr: "1.2.3.4",
Method: "GET",
URI: "/urllll",
Args: url.Values{"foo": []string{"bla"}},
},
output_asserts: func(events []types.Event, responses []appsec.AppsecTempResponse, appsecResponse appsec.BodyResponse, statusCode int) {
require.Empty(t, events)
require.Equal(t, http.StatusOK, statusCode)
require.Equal(t, appsec.AllowRemediation, appsecResponse.Action)
},
},
}
for _, test := range tests {
t.Run(test.name, func(t *testing.T) {
loadAppSecEngine(test, t)
})
}
}
func TestAppsecRuleTransforms(t *testing.T) {
log.SetLevel(log.TraceLevel)
tests := []appsecRuleTest{
{
name: "Basic matching rule",
expected_load_ok: true,
inband_rules: []appsec_rule.CustomRule{
{
Name: "rule1",
Zones: []string{"URI"},
Match: appsec_rule.Match{Type: "equals", Value: "/toto"},
},
},
input_request: appsec.ParsedRequest{
RemoteAddr: "1.2.3.4",
Method: "GET",
URI: "/toto",
},
output_asserts: func(events []types.Event, responses []appsec.AppsecTempResponse, appsecResponse appsec.BodyResponse, statusCode int) {
require.Len(t, events, 2)
require.Equal(t, types.APPSEC, events[0].Type)
require.Equal(t, types.LOG, events[1].Type)
require.Equal(t, "rule1", events[1].Appsec.MatchedRules[0]["msg"])
},
},
{
name: "lowercase",
expected_load_ok: true,
inband_rules: []appsec_rule.CustomRule{
{
Name: "rule1",
Zones: []string{"URI"},
Match: appsec_rule.Match{Type: "equals", Value: "/toto"},
Transform: []string{"lowercase"},
},
},
input_request: appsec.ParsedRequest{
RemoteAddr: "1.2.3.4",
Method: "GET",
URI: "/TOTO",
},
output_asserts: func(events []types.Event, responses []appsec.AppsecTempResponse, appsecResponse appsec.BodyResponse, statusCode int) {
require.Len(t, events, 2)
require.Equal(t, types.APPSEC, events[0].Type)
require.Equal(t, types.LOG, events[1].Type)
require.Equal(t, "rule1", events[1].Appsec.MatchedRules[0]["msg"])
},
},
{
name: "uppercase",
expected_load_ok: true,
inband_rules: []appsec_rule.CustomRule{
{
Name: "rule1",
Zones: []string{"URI"},
Match: appsec_rule.Match{Type: "equals", Value: "/TOTO"},
Transform: []string{"uppercase"},
},
},
input_request: appsec.ParsedRequest{
RemoteAddr: "1.2.3.4",
Method: "GET",
URI: "/toto",
},
output_asserts: func(events []types.Event, responses []appsec.AppsecTempResponse, appsecResponse appsec.BodyResponse, statusCode int) {
require.Len(t, events, 2)
require.Equal(t, types.APPSEC, events[0].Type)
require.Equal(t, types.LOG, events[1].Type)
require.Equal(t, "rule1", events[1].Appsec.MatchedRules[0]["msg"])
},
},
{
name: "b64decode",
expected_load_ok: true,
inband_rules: []appsec_rule.CustomRule{
{
Name: "rule1",
Zones: []string{"ARGS"},
Variables: []string{"foo"},
Match: appsec_rule.Match{Type: "equals", Value: "toto"},
Transform: []string{"b64decode"},
},
},
input_request: appsec.ParsedRequest{
RemoteAddr: "1.2.3.4",
Method: "GET",
URI: "/?foo=dG90bw",
},
output_asserts: func(events []types.Event, responses []appsec.AppsecTempResponse, appsecResponse appsec.BodyResponse, statusCode int) {
require.Len(t, events, 2)
require.Equal(t, types.APPSEC, events[0].Type)
require.Equal(t, types.LOG, events[1].Type)
require.Equal(t, "rule1", events[1].Appsec.MatchedRules[0]["msg"])
},
},
{
name: "b64decode with extra padding",
expected_load_ok: true,
inband_rules: []appsec_rule.CustomRule{
{
Name: "rule1",
Zones: []string{"ARGS"},
Variables: []string{"foo"},
Match: appsec_rule.Match{Type: "equals", Value: "toto"},
Transform: []string{"b64decode"},
},
},
input_request: appsec.ParsedRequest{
RemoteAddr: "1.2.3.4",
Method: "GET",
URI: "/?foo=dG90bw===",
},
output_asserts: func(events []types.Event, responses []appsec.AppsecTempResponse, appsecResponse appsec.BodyResponse, statusCode int) {
require.Len(t, events, 2)
require.Equal(t, types.APPSEC, events[0].Type)
require.Equal(t, types.LOG, events[1].Type)
require.Equal(t, "rule1", events[1].Appsec.MatchedRules[0]["msg"])
},
},
{
name: "length",
expected_load_ok: true,
inband_rules: []appsec_rule.CustomRule{
{
Name: "rule1",
Zones: []string{"ARGS"},
Variables: []string{"foo"},
Match: appsec_rule.Match{Type: "gte", Value: "3"},
Transform: []string{"length"},
},
},
input_request: appsec.ParsedRequest{
RemoteAddr: "1.2.3.4",
Method: "GET",
URI: "/?foo=toto",
},
output_asserts: func(events []types.Event, responses []appsec.AppsecTempResponse, appsecResponse appsec.BodyResponse, statusCode int) {
require.Len(t, events, 2)
require.Equal(t, types.APPSEC, events[0].Type)
require.Equal(t, types.LOG, events[1].Type)
require.Equal(t, "rule1", events[1].Appsec.MatchedRules[0]["msg"])
},
},
{
name: "urldecode",
expected_load_ok: true,
inband_rules: []appsec_rule.CustomRule{
{
Name: "rule1",
Zones: []string{"ARGS"},
Variables: []string{"foo"},
Match: appsec_rule.Match{Type: "equals", Value: "BB/A"},
Transform: []string{"urldecode"},
},
},
input_request: appsec.ParsedRequest{
RemoteAddr: "1.2.3.4",
Method: "GET",
URI: "/?foo=%42%42%2F%41",
},
output_asserts: func(events []types.Event, responses []appsec.AppsecTempResponse, appsecResponse appsec.BodyResponse, statusCode int) {
require.Len(t, events, 2)
require.Equal(t, types.APPSEC, events[0].Type)
require.Equal(t, types.LOG, events[1].Type)
require.Equal(t, "rule1", events[1].Appsec.MatchedRules[0]["msg"])
},
},
{
name: "trim",
expected_load_ok: true,
inband_rules: []appsec_rule.CustomRule{
{
Name: "rule1",
Zones: []string{"ARGS"},
Variables: []string{"foo"},
Match: appsec_rule.Match{Type: "equals", Value: "BB/A"},
Transform: []string{"urldecode", "trim"},
},
},
input_request: appsec.ParsedRequest{
RemoteAddr: "1.2.3.4",
Method: "GET",
URI: "/?foo=%20%20%42%42%2F%41%20%20",
},
output_asserts: func(events []types.Event, responses []appsec.AppsecTempResponse, appsecResponse appsec.BodyResponse, statusCode int) {
require.Len(t, events, 2)
require.Equal(t, types.APPSEC, events[0].Type)
require.Equal(t, types.LOG, events[1].Type)
require.Equal(t, "rule1", events[1].Appsec.MatchedRules[0]["msg"])
},
},
}
for _, test := range tests {
t.Run(test.name, func(t *testing.T) {
loadAppSecEngine(test, t)
})
}
}
func TestAppsecRuleZones(t *testing.T) {
log.SetLevel(log.TraceLevel)
tests := []appsecRuleTest{
{
name: "rule: ARGS",
expected_load_ok: true,
inband_rules: []appsec_rule.CustomRule{
{
Name: "rule1",
Zones: []string{"ARGS"},
Match: appsec_rule.Match{Type: "equals", Value: "toto"},
},
{
Name: "rule2",
Zones: []string{"ARGS"},
Match: appsec_rule.Match{Type: "equals", Value: "foobar"},
},
},
input_request: appsec.ParsedRequest{
RemoteAddr: "1.2.3.4",
Method: "GET",
URI: "/foobar?something=toto&foobar=smth",
},
output_asserts: func(events []types.Event, responses []appsec.AppsecTempResponse, appsecResponse appsec.BodyResponse, statusCode int) {
require.Len(t, events, 2)
require.Equal(t, types.APPSEC, events[0].Type)
require.Equal(t, types.LOG, events[1].Type)
require.Equal(t, "rule1", events[1].Appsec.MatchedRules[0]["msg"])
},
},
{
name: "rule: ARGS_NAMES",
expected_load_ok: true,
inband_rules: []appsec_rule.CustomRule{
{
Name: "rule1",
Zones: []string{"ARGS_NAMES"},
Match: appsec_rule.Match{Type: "equals", Value: "toto"},
},
{
Name: "rule2",
Zones: []string{"ARGS_NAMES"},
Match: appsec_rule.Match{Type: "equals", Value: "foobar"},
},
},
input_request: appsec.ParsedRequest{
RemoteAddr: "1.2.3.4",
Method: "GET",
URI: "/foobar?something=toto&foobar=smth",
},
output_asserts: func(events []types.Event, responses []appsec.AppsecTempResponse, appsecResponse appsec.BodyResponse, statusCode int) {
require.Len(t, events, 2)
require.Equal(t, types.APPSEC, events[0].Type)
require.Equal(t, types.LOG, events[1].Type)
require.Equal(t, "rule2", events[1].Appsec.MatchedRules[0]["msg"])
},
},
{
name: "rule: BODY_ARGS",
expected_load_ok: true,
inband_rules: []appsec_rule.CustomRule{
{
Name: "rule1",
Zones: []string{"BODY_ARGS"},
Match: appsec_rule.Match{Type: "equals", Value: "toto"},
},
{
Name: "rule2",
Zones: []string{"BODY_ARGS"},
Match: appsec_rule.Match{Type: "equals", Value: "foobar"},
},
},
input_request: appsec.ParsedRequest{
RemoteAddr: "1.2.3.4",
Method: "GET",
URI: "/",
Body: []byte("smth=toto&foobar=other"),
Headers: http.Header{"Content-Type": []string{"application/x-www-form-urlencoded"}},
},
output_asserts: func(events []types.Event, responses []appsec.AppsecTempResponse, appsecResponse appsec.BodyResponse, statusCode int) {
require.Len(t, events, 2)
require.Equal(t, types.APPSEC, events[0].Type)
require.Equal(t, types.LOG, events[1].Type)
require.Equal(t, "rule1", events[1].Appsec.MatchedRules[0]["msg"])
},
},
{
name: "rule: BODY_ARGS_NAMES",
expected_load_ok: true,
inband_rules: []appsec_rule.CustomRule{
{
Name: "rule1",
Zones: []string{"BODY_ARGS_NAMES"},
Match: appsec_rule.Match{Type: "equals", Value: "toto"},
},
{
Name: "rule2",
Zones: []string{"BODY_ARGS_NAMES"},
Match: appsec_rule.Match{Type: "equals", Value: "foobar"},
},
},
input_request: appsec.ParsedRequest{
RemoteAddr: "1.2.3.4",
Method: "GET",
URI: "/",
Body: []byte("smth=toto&foobar=other"),
Headers: http.Header{"Content-Type": []string{"application/x-www-form-urlencoded"}},
},
output_asserts: func(events []types.Event, responses []appsec.AppsecTempResponse, appsecResponse appsec.BodyResponse, statusCode int) {
require.Len(t, events, 2)
require.Equal(t, types.APPSEC, events[0].Type)
require.Equal(t, types.LOG, events[1].Type)
require.Equal(t, "rule2", events[1].Appsec.MatchedRules[0]["msg"])
},
},
{
name: "rule: HEADERS",
expected_load_ok: true,
inband_rules: []appsec_rule.CustomRule{
{
Name: "rule1",
Zones: []string{"HEADERS"},
Match: appsec_rule.Match{Type: "equals", Value: "toto"},
},
{
Name: "rule2",
Zones: []string{"HEADERS"},
Match: appsec_rule.Match{Type: "equals", Value: "foobar"},
},
},
input_request: appsec.ParsedRequest{
RemoteAddr: "1.2.3.4",
Method: "GET",
URI: "/",
Headers: http.Header{"foobar": []string{"toto"}},
},
output_asserts: func(events []types.Event, responses []appsec.AppsecTempResponse, appsecResponse appsec.BodyResponse, statusCode int) {
require.Len(t, events, 2)
require.Equal(t, types.APPSEC, events[0].Type)
require.Equal(t, types.LOG, events[1].Type)
require.Equal(t, "rule1", events[1].Appsec.MatchedRules[0]["msg"])
},
},
{
name: "rule: HEADERS_NAMES",
expected_load_ok: true,
inband_rules: []appsec_rule.CustomRule{
{
Name: "rule1",
Zones: []string{"HEADERS_NAMES"},
Match: appsec_rule.Match{Type: "equals", Value: "toto"},
},
{
Name: "rule2",
Zones: []string{"HEADERS_NAMES"},
Match: appsec_rule.Match{Type: "equals", Value: "foobar"},
},
},
input_request: appsec.ParsedRequest{
RemoteAddr: "1.2.3.4",
Method: "GET",
URI: "/",
Headers: http.Header{"foobar": []string{"toto"}},
},
output_asserts: func(events []types.Event, responses []appsec.AppsecTempResponse, appsecResponse appsec.BodyResponse, statusCode int) {
require.Len(t, events, 2)
require.Equal(t, types.APPSEC, events[0].Type)
require.Equal(t, types.LOG, events[1].Type)
require.Equal(t, "rule2", events[1].Appsec.MatchedRules[0]["msg"])
},
},
{
name: "rule: METHOD",
expected_load_ok: true,
inband_rules: []appsec_rule.CustomRule{
{
Name: "rule1",
Zones: []string{"METHOD"},
Match: appsec_rule.Match{Type: "equals", Value: "GET"},
},
},
input_request: appsec.ParsedRequest{
RemoteAddr: "1.2.3.4",
Method: "GET",
URI: "/",
},
output_asserts: func(events []types.Event, responses []appsec.AppsecTempResponse, appsecResponse appsec.BodyResponse, statusCode int) {
require.Len(t, events, 2)
require.Equal(t, types.APPSEC, events[0].Type)
require.Equal(t, types.LOG, events[1].Type)
require.Equal(t, "rule1", events[1].Appsec.MatchedRules[0]["msg"])
},
},
{
name: "rule: PROTOCOL",
expected_load_ok: true,
inband_rules: []appsec_rule.CustomRule{
{
Name: "rule1",
Zones: []string{"PROTOCOL"},
Match: appsec_rule.Match{Type: "contains", Value: "3.1"},
},
},
input_request: appsec.ParsedRequest{
RemoteAddr: "1.2.3.4",
Method: "GET",
URI: "/",
Proto: "HTTP/3.1",
},
output_asserts: func(events []types.Event, responses []appsec.AppsecTempResponse, appsecResponse appsec.BodyResponse, statusCode int) {
require.Len(t, events, 2)
require.Equal(t, types.APPSEC, events[0].Type)
require.Equal(t, types.LOG, events[1].Type)
require.Equal(t, "rule1", events[1].Appsec.MatchedRules[0]["msg"])
},
},
{
name: "rule: URI",
expected_load_ok: true,
inband_rules: []appsec_rule.CustomRule{
{
Name: "rule1",
Zones: []string{"URI"},
Match: appsec_rule.Match{Type: "equals", Value: "/foobar"},
},
},
input_request: appsec.ParsedRequest{
RemoteAddr: "1.2.3.4",
Method: "GET",
URI: "/foobar",
},
output_asserts: func(events []types.Event, responses []appsec.AppsecTempResponse, appsecResponse appsec.BodyResponse, statusCode int) {
require.Len(t, events, 2)
require.Equal(t, types.APPSEC, events[0].Type)
require.Equal(t, types.LOG, events[1].Type)
require.Equal(t, "rule1", events[1].Appsec.MatchedRules[0]["msg"])
},
},
{
name: "rule: URI_FULL",
expected_load_ok: true,
inband_rules: []appsec_rule.CustomRule{
{
Name: "rule1",
Zones: []string{"URI_FULL"},
Match: appsec_rule.Match{Type: "equals", Value: "/foobar?a=b"},
},
},
input_request: appsec.ParsedRequest{
RemoteAddr: "1.2.3.4",
Method: "GET",
URI: "/foobar?a=b",
},
output_asserts: func(events []types.Event, responses []appsec.AppsecTempResponse, appsecResponse appsec.BodyResponse, statusCode int) {
require.Len(t, events, 2)
require.Equal(t, types.APPSEC, events[0].Type)
require.Equal(t, types.LOG, events[1].Type)
require.Equal(t, "rule1", events[1].Appsec.MatchedRules[0]["msg"])
},
},
{
name: "rule: RAW_BODY",
expected_load_ok: true,
inband_rules: []appsec_rule.CustomRule{
{
Name: "rule1",
Zones: []string{"RAW_BODY"},
Match: appsec_rule.Match{Type: "equals", Value: "foobar=42421"},
},
},
input_request: appsec.ParsedRequest{
RemoteAddr: "1.2.3.4",
Method: "GET",
URI: "/",
Body: []byte("foobar=42421"),
Headers: http.Header{"Content-Type": []string{"application/x-www-form-urlencoded"}},
},
output_asserts: func(events []types.Event, responses []appsec.AppsecTempResponse, appsecResponse appsec.BodyResponse, statusCode int) {
require.Len(t, events, 2)
require.Equal(t, types.APPSEC, events[0].Type)
require.Equal(t, types.LOG, events[1].Type)
require.Equal(t, "rule1", events[1].Appsec.MatchedRules[0]["msg"])
},
},
}
for _, test := range tests {
t.Run(test.name, func(t *testing.T) {
loadAppSecEngine(test, t)
})
}
}

File diff suppressed because it is too large Load diff

View file

@ -1,46 +0,0 @@
//go:build windows
// +build windows
package appsecacquisition
import (
"testing"
log "github.com/sirupsen/logrus"
)
func TestAppsecRuleTransformsWindows(t *testing.T) {
log.SetLevel(log.TraceLevel)
tests := []appsecRuleTest{
// {
// name: "normalizepath",
// expected_load_ok: true,
// inband_rules: []appsec_rule.CustomRule{
// {
// Name: "rule1",
// Zones: []string{"ARGS"},
// Variables: []string{"foo"},
// Match: appsec_rule.Match{Type: "equals", Value: "b/c"},
// Transform: []string{"normalizepath"},
// },
// },
// input_request: appsec.ParsedRequest{
// RemoteAddr: "1.2.3.4",
// Method: "GET",
// URI: "/?foo=a/../b/c",
// },
// output_asserts: func(events []types.Event, responses []appsec.AppsecTempResponse, appsecResponse appsec.BodyResponse, statusCode int) {
// require.Len(t, events, 2)
// require.Equal(t, types.APPSEC, events[0].Type)
// require.Equal(t, types.LOG, events[1].Type)
// require.Equal(t, "rule1", events[1].Appsec.MatchedRules[0]["msg"])
// },
// },
}
for _, test := range tests {
t.Run(test.name, func(t *testing.T) {
loadAppSecEngine(test, t)
})
}
}

View file

@ -43,8 +43,7 @@ var linesRead = prometheus.NewCounterVec(
// CloudwatchSource is the runtime instance keeping track of N streams within 1 cloudwatch group // CloudwatchSource is the runtime instance keeping track of N streams within 1 cloudwatch group
type CloudwatchSource struct { type CloudwatchSource struct {
metricsLevel int Config CloudwatchSourceConfiguration
Config CloudwatchSourceConfiguration
/*runtime stuff*/ /*runtime stuff*/
logger *log.Entry logger *log.Entry
t *tomb.Tomb t *tomb.Tomb
@ -153,12 +152,11 @@ func (cw *CloudwatchSource) UnmarshalConfig(yamlConfig []byte) error {
return nil return nil
} }
func (cw *CloudwatchSource) Configure(yamlConfig []byte, logger *log.Entry, MetricsLevel int) error { func (cw *CloudwatchSource) Configure(yamlConfig []byte, logger *log.Entry) error {
err := cw.UnmarshalConfig(yamlConfig) err := cw.UnmarshalConfig(yamlConfig)
if err != nil { if err != nil {
return err return err
} }
cw.metricsLevel = MetricsLevel
cw.logger = logger.WithField("group", cw.Config.GroupName) cw.logger = logger.WithField("group", cw.Config.GroupName)
@ -387,9 +385,7 @@ func (cw *CloudwatchSource) LogStreamManager(in chan LogStreamTailConfig, outCha
if !stream.t.Alive() { if !stream.t.Alive() {
cw.logger.Debugf("stream %s already exists, but is dead", newStream.StreamName) cw.logger.Debugf("stream %s already exists, but is dead", newStream.StreamName)
cw.monitoredStreams = append(cw.monitoredStreams[:idx], cw.monitoredStreams[idx+1:]...) cw.monitoredStreams = append(cw.monitoredStreams[:idx], cw.monitoredStreams[idx+1:]...)
if cw.metricsLevel != configuration.METRICS_NONE { openedStreams.With(prometheus.Labels{"group": newStream.GroupName}).Dec()
openedStreams.With(prometheus.Labels{"group": newStream.GroupName}).Dec()
}
break break
} }
shouldCreate = false shouldCreate = false
@ -399,9 +395,7 @@ func (cw *CloudwatchSource) LogStreamManager(in chan LogStreamTailConfig, outCha
//let's start watching this stream //let's start watching this stream
if shouldCreate { if shouldCreate {
if cw.metricsLevel != configuration.METRICS_NONE { openedStreams.With(prometheus.Labels{"group": newStream.GroupName}).Inc()
openedStreams.With(prometheus.Labels{"group": newStream.GroupName}).Inc()
}
newStream.t = tomb.Tomb{} newStream.t = tomb.Tomb{}
newStream.logger = cw.logger.WithFields(log.Fields{"stream": newStream.StreamName}) newStream.logger = cw.logger.WithFields(log.Fields{"stream": newStream.StreamName})
cw.logger.Debugf("starting tail of stream %s", newStream.StreamName) cw.logger.Debugf("starting tail of stream %s", newStream.StreamName)
@ -415,9 +409,7 @@ func (cw *CloudwatchSource) LogStreamManager(in chan LogStreamTailConfig, outCha
for idx, stream := range cw.monitoredStreams { for idx, stream := range cw.monitoredStreams {
if !cw.monitoredStreams[idx].t.Alive() { if !cw.monitoredStreams[idx].t.Alive() {
cw.logger.Debugf("remove dead stream %s", stream.StreamName) cw.logger.Debugf("remove dead stream %s", stream.StreamName)
if cw.metricsLevel != configuration.METRICS_NONE { openedStreams.With(prometheus.Labels{"group": cw.monitoredStreams[idx].GroupName}).Dec()
openedStreams.With(prometheus.Labels{"group": cw.monitoredStreams[idx].GroupName}).Dec()
}
} else { } else {
newMonitoredStreams = append(newMonitoredStreams, stream) newMonitoredStreams = append(newMonitoredStreams, stream)
} }
@ -493,9 +485,7 @@ func (cw *CloudwatchSource) TailLogStream(cfg *LogStreamTailConfig, outChan chan
cfg.logger.Warningf("cwLogToEvent error, discarded event : %s", err) cfg.logger.Warningf("cwLogToEvent error, discarded event : %s", err)
} else { } else {
cfg.logger.Debugf("pushing message : %s", evt.Line.Raw) cfg.logger.Debugf("pushing message : %s", evt.Line.Raw)
if cw.metricsLevel != configuration.METRICS_NONE { linesRead.With(prometheus.Labels{"group": cfg.GroupName, "stream": cfg.StreamName}).Inc()
linesRead.With(prometheus.Labels{"group": cfg.GroupName, "stream": cfg.StreamName}).Inc()
}
outChan <- evt outChan <- evt
} }
} }

View file

@ -13,7 +13,6 @@ import (
"github.com/aws/aws-sdk-go/aws" "github.com/aws/aws-sdk-go/aws"
"github.com/aws/aws-sdk-go/service/cloudwatchlogs" "github.com/aws/aws-sdk-go/service/cloudwatchlogs"
"github.com/crowdsecurity/crowdsec/pkg/acquisition/configuration"
"github.com/crowdsecurity/crowdsec/pkg/types" "github.com/crowdsecurity/crowdsec/pkg/types"
log "github.com/sirupsen/logrus" log "github.com/sirupsen/logrus"
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
@ -428,7 +427,7 @@ stream_name: test_stream`),
dbgLogger.Logger.SetLevel(log.DebugLevel) dbgLogger.Logger.SetLevel(log.DebugLevel)
dbgLogger.Infof("starting test") dbgLogger.Infof("starting test")
cw := CloudwatchSource{} cw := CloudwatchSource{}
err := cw.Configure(tc.config, dbgLogger, configuration.METRICS_NONE) err := cw.Configure(tc.config, dbgLogger)
cstest.RequireErrorContains(t, err, tc.expectedCfgErr) cstest.RequireErrorContains(t, err, tc.expectedCfgErr)
if tc.expectedCfgErr != "" { if tc.expectedCfgErr != "" {
@ -560,7 +559,7 @@ stream_name: test_stream`),
dbgLogger := log.New().WithField("test", tc.name) dbgLogger := log.New().WithField("test", tc.name)
dbgLogger.Logger.SetLevel(log.DebugLevel) dbgLogger.Logger.SetLevel(log.DebugLevel)
cw := CloudwatchSource{} cw := CloudwatchSource{}
err := cw.Configure(tc.config, dbgLogger, configuration.METRICS_NONE) err := cw.Configure(tc.config, dbgLogger)
cstest.RequireErrorContains(t, err, tc.expectedCfgErr) cstest.RequireErrorContains(t, err, tc.expectedCfgErr)
if tc.expectedCfgErr != "" { if tc.expectedCfgErr != "" {
return return

View file

@ -46,7 +46,6 @@ type DockerConfiguration struct {
} }
type DockerSource struct { type DockerSource struct {
metricsLevel int
Config DockerConfiguration Config DockerConfiguration
runningContainerState map[string]*ContainerConfig runningContainerState map[string]*ContainerConfig
compiledContainerName []*regexp.Regexp compiledContainerName []*regexp.Regexp
@ -129,9 +128,9 @@ func (d *DockerSource) UnmarshalConfig(yamlConfig []byte) error {
return nil return nil
} }
func (d *DockerSource) Configure(yamlConfig []byte, logger *log.Entry, MetricsLevel int) error { func (d *DockerSource) Configure(yamlConfig []byte, logger *log.Entry) error {
d.logger = logger d.logger = logger
d.metricsLevel = MetricsLevel
err := d.UnmarshalConfig(yamlConfig) err := d.UnmarshalConfig(yamlConfig)
if err != nil { if err != nil {
return err return err
@ -326,9 +325,7 @@ func (d *DockerSource) OneShotAcquisition(out chan types.Event, t *tomb.Tomb) er
l.Src = containerConfig.Name l.Src = containerConfig.Name
l.Process = true l.Process = true
l.Module = d.GetName() l.Module = d.GetName()
if d.metricsLevel != configuration.METRICS_NONE { linesRead.With(prometheus.Labels{"source": containerConfig.Name}).Inc()
linesRead.With(prometheus.Labels{"source": containerConfig.Name}).Inc()
}
evt := types.Event{Line: l, Process: true, Type: types.LOG, ExpectMode: types.TIMEMACHINE} evt := types.Event{Line: l, Process: true, Type: types.LOG, ExpectMode: types.TIMEMACHINE}
out <- evt out <- evt
d.logger.Debugf("Sent line to parsing: %+v", evt.Line.Raw) d.logger.Debugf("Sent line to parsing: %+v", evt.Line.Raw)

View file

@ -13,7 +13,6 @@ import (
"github.com/crowdsecurity/go-cs-lib/cstest" "github.com/crowdsecurity/go-cs-lib/cstest"
"github.com/crowdsecurity/crowdsec/pkg/acquisition/configuration"
"github.com/crowdsecurity/crowdsec/pkg/types" "github.com/crowdsecurity/crowdsec/pkg/types"
dockerTypes "github.com/docker/docker/api/types" dockerTypes "github.com/docker/docker/api/types"
dockerContainer "github.com/docker/docker/api/types/container" dockerContainer "github.com/docker/docker/api/types/container"
@ -61,7 +60,7 @@ container_name:
for _, test := range tests { for _, test := range tests {
f := DockerSource{} f := DockerSource{}
err := f.Configure([]byte(test.config), subLogger, configuration.METRICS_NONE) err := f.Configure([]byte(test.config), subLogger)
cstest.AssertErrorContains(t, err, test.expectedErr) cstest.AssertErrorContains(t, err, test.expectedErr)
} }
} }
@ -163,7 +162,7 @@ container_name_regexp:
for _, ts := range tests { for _, ts := range tests {
var ( var (
logger *log.Logger logger *log.Logger
subLogger *log.Entry subLogger *log.Entry
) )
@ -183,7 +182,7 @@ container_name_regexp:
out := make(chan types.Event) out := make(chan types.Event)
dockerSource := DockerSource{} dockerSource := DockerSource{}
err := dockerSource.Configure([]byte(ts.config), subLogger, configuration.METRICS_NONE) err := dockerSource.Configure([]byte(ts.config), subLogger)
if err != nil { if err != nil {
t.Fatalf("Unexpected error : %s", err) t.Fatalf("Unexpected error : %s", err)
} }
@ -305,7 +304,7 @@ func TestOneShot(t *testing.T) {
for _, ts := range tests { for _, ts := range tests {
var ( var (
subLogger *log.Entry subLogger *log.Entry
logger *log.Logger logger *log.Logger
) )
if ts.expectedOutput != "" { if ts.expectedOutput != "" {

View file

@ -3,7 +3,6 @@ package fileacquisition
import ( import (
"bufio" "bufio"
"compress/gzip" "compress/gzip"
"errors"
"fmt" "fmt"
"io" "io"
"net/url" "net/url"
@ -12,11 +11,11 @@ import (
"regexp" "regexp"
"strconv" "strconv"
"strings" "strings"
"sync"
"time" "time"
"github.com/fsnotify/fsnotify" "github.com/fsnotify/fsnotify"
"github.com/nxadm/tail" "github.com/nxadm/tail"
"github.com/pkg/errors"
"github.com/prometheus/client_golang/prometheus" "github.com/prometheus/client_golang/prometheus"
log "github.com/sirupsen/logrus" log "github.com/sirupsen/logrus"
"gopkg.in/tomb.v2" "gopkg.in/tomb.v2"
@ -39,14 +38,13 @@ type FileConfiguration struct {
Filenames []string Filenames []string
ExcludeRegexps []string `yaml:"exclude_regexps"` ExcludeRegexps []string `yaml:"exclude_regexps"`
Filename string Filename string
ForceInotify bool `yaml:"force_inotify"` ForceInotify bool `yaml:"force_inotify"`
MaxBufferSize int `yaml:"max_buffer_size"` MaxBufferSize int `yaml:"max_buffer_size"`
PollWithoutInotify *bool `yaml:"poll_without_inotify"` PollWithoutInotify bool `yaml:"poll_without_inotify"`
configuration.DataSourceCommonCfg `yaml:",inline"` configuration.DataSourceCommonCfg `yaml:",inline"`
} }
type FileSource struct { type FileSource struct {
metricsLevel int
config FileConfiguration config FileConfiguration
watcher *fsnotify.Watcher watcher *fsnotify.Watcher
watchedDirectories map[string]bool watchedDirectories map[string]bool
@ -54,7 +52,6 @@ type FileSource struct {
logger *log.Entry logger *log.Entry
files []string files []string
exclude_regexps []*regexp.Regexp exclude_regexps []*regexp.Regexp
tailMapMutex *sync.RWMutex
} }
func (f *FileSource) GetUuid() string { func (f *FileSource) GetUuid() string {
@ -63,7 +60,6 @@ func (f *FileSource) GetUuid() string {
func (f *FileSource) UnmarshalConfig(yamlConfig []byte) error { func (f *FileSource) UnmarshalConfig(yamlConfig []byte) error {
f.config = FileConfiguration{} f.config = FileConfiguration{}
err := yaml.UnmarshalStrict(yamlConfig, &f.config) err := yaml.UnmarshalStrict(yamlConfig, &f.config)
if err != nil { if err != nil {
return fmt.Errorf("cannot parse FileAcquisition configuration: %w", err) return fmt.Errorf("cannot parse FileAcquisition configuration: %w", err)
@ -78,7 +74,7 @@ func (f *FileSource) UnmarshalConfig(yamlConfig []byte) error {
} }
if len(f.config.Filenames) == 0 { if len(f.config.Filenames) == 0 {
return errors.New("no filename or filenames configuration provided") return fmt.Errorf("no filename or filenames configuration provided")
} }
if f.config.Mode == "" { if f.config.Mode == "" {
@ -94,16 +90,14 @@ func (f *FileSource) UnmarshalConfig(yamlConfig []byte) error {
if err != nil { if err != nil {
return fmt.Errorf("could not compile regexp %s: %w", exclude, err) return fmt.Errorf("could not compile regexp %s: %w", exclude, err)
} }
f.exclude_regexps = append(f.exclude_regexps, re) f.exclude_regexps = append(f.exclude_regexps, re)
} }
return nil return nil
} }
func (f *FileSource) Configure(yamlConfig []byte, logger *log.Entry, MetricsLevel int) error { func (f *FileSource) Configure(yamlConfig []byte, logger *log.Entry) error {
f.logger = logger f.logger = logger
f.metricsLevel = MetricsLevel
err := f.UnmarshalConfig(yamlConfig) err := f.UnmarshalConfig(yamlConfig)
if err != nil { if err != nil {
@ -111,7 +105,6 @@ func (f *FileSource) Configure(yamlConfig []byte, logger *log.Entry, MetricsLeve
} }
f.watchedDirectories = make(map[string]bool) f.watchedDirectories = make(map[string]bool)
f.tailMapMutex = &sync.RWMutex{}
f.tails = make(map[string]bool) f.tails = make(map[string]bool)
f.watcher, err = fsnotify.NewWatcher() f.watcher, err = fsnotify.NewWatcher()
@ -125,68 +118,56 @@ func (f *FileSource) Configure(yamlConfig []byte, logger *log.Entry, MetricsLeve
if f.config.ForceInotify { if f.config.ForceInotify {
directory := filepath.Dir(pattern) directory := filepath.Dir(pattern)
f.logger.Infof("Force add watch on %s", directory) f.logger.Infof("Force add watch on %s", directory)
if !f.watchedDirectories[directory] { if !f.watchedDirectories[directory] {
err = f.watcher.Add(directory) err = f.watcher.Add(directory)
if err != nil { if err != nil {
f.logger.Errorf("Could not create watch on directory %s : %s", directory, err) f.logger.Errorf("Could not create watch on directory %s : %s", directory, err)
continue continue
} }
f.watchedDirectories[directory] = true f.watchedDirectories[directory] = true
} }
} }
files, err := filepath.Glob(pattern) files, err := filepath.Glob(pattern)
if err != nil { if err != nil {
return fmt.Errorf("glob failure: %w", err) return fmt.Errorf("glob failure: %w", err)
} }
if len(files) == 0 { if len(files) == 0 {
f.logger.Warnf("No matching files for pattern %s", pattern) f.logger.Warnf("No matching files for pattern %s", pattern)
continue continue
} }
for _, file := range files { for _, file := range files {
// check if file is excluded
excluded := false
//check if file is excluded
excluded := false
for _, pattern := range f.exclude_regexps { for _, pattern := range f.exclude_regexps {
if pattern.MatchString(file) { if pattern.MatchString(file) {
excluded = true excluded = true
f.logger.Infof("Skipping file %s as it matches exclude pattern %s", file, pattern) f.logger.Infof("Skipping file %s as it matches exclude pattern %s", file, pattern)
break break
} }
} }
if excluded { if excluded {
continue continue
} }
if files[0] != pattern && f.config.Mode == configuration.TAIL_MODE { //we have a glob pattern
if files[0] != pattern && f.config.Mode == configuration.TAIL_MODE { // we have a glob pattern
directory := filepath.Dir(file) directory := filepath.Dir(file)
f.logger.Debugf("Will add watch to directory: %s", directory) f.logger.Debugf("Will add watch to directory: %s", directory)
if !f.watchedDirectories[directory] { if !f.watchedDirectories[directory] {
err = f.watcher.Add(directory) err = f.watcher.Add(directory)
if err != nil { if err != nil {
f.logger.Errorf("Could not create watch on directory %s : %s", directory, err) f.logger.Errorf("Could not create watch on directory %s : %s", directory, err)
continue continue
} }
f.watchedDirectories[directory] = true f.watchedDirectories[directory] = true
} else { } else {
f.logger.Debugf("Watch for directory %s already exists", directory) f.logger.Debugf("Watch for directory %s already exists", directory)
} }
} }
f.logger.Infof("Adding file %s to datasources", file) f.logger.Infof("Adding file %s to datasources", file)
f.files = append(f.files, file) f.files = append(f.files, file)
} }
} }
return nil return nil
} }
@ -203,7 +184,7 @@ func (f *FileSource) ConfigureByDSN(dsn string, labels map[string]string, logger
args := strings.Split(dsn, "?") args := strings.Split(dsn, "?")
if len(args[0]) == 0 { if len(args[0]) == 0 {
return errors.New("empty file:// DSN") return fmt.Errorf("empty file:// DSN")
} }
if len(args) == 2 && len(args[1]) != 0 { if len(args) == 2 && len(args[1]) != 0 {
@ -211,30 +192,25 @@ func (f *FileSource) ConfigureByDSN(dsn string, labels map[string]string, logger
if err != nil { if err != nil {
return fmt.Errorf("could not parse file args: %w", err) return fmt.Errorf("could not parse file args: %w", err)
} }
for key, value := range params { for key, value := range params {
switch key { switch key {
case "log_level": case "log_level":
if len(value) != 1 { if len(value) != 1 {
return errors.New("expected zero or one value for 'log_level'") return errors.New("expected zero or one value for 'log_level'")
} }
lvl, err := log.ParseLevel(value[0]) lvl, err := log.ParseLevel(value[0])
if err != nil { if err != nil {
return fmt.Errorf("unknown level %s: %w", value[0], err) return fmt.Errorf("unknown level %s: %w", value[0], err)
} }
f.logger.Logger.SetLevel(lvl) f.logger.Logger.SetLevel(lvl)
case "max_buffer_size": case "max_buffer_size":
if len(value) != 1 { if len(value) != 1 {
return errors.New("expected zero or one value for 'max_buffer_size'") return errors.New("expected zero or one value for 'max_buffer_size'")
} }
maxBufferSize, err := strconv.Atoi(value[0]) maxBufferSize, err := strconv.Atoi(value[0])
if err != nil { if err != nil {
return fmt.Errorf("could not parse max_buffer_size %s: %w", value[0], err) return fmt.Errorf("could not parse max_buffer_size %s: %w", value[0], err)
} }
f.config.MaxBufferSize = maxBufferSize f.config.MaxBufferSize = maxBufferSize
default: default:
return fmt.Errorf("unknown parameter %s", key) return fmt.Errorf("unknown parameter %s", key)
@ -247,7 +223,6 @@ func (f *FileSource) ConfigureByDSN(dsn string, labels map[string]string, logger
f.config.UniqueId = uuid f.config.UniqueId = uuid
f.logger.Debugf("Will try pattern %s", args[0]) f.logger.Debugf("Will try pattern %s", args[0])
files, err := filepath.Glob(args[0]) files, err := filepath.Glob(args[0])
if err != nil { if err != nil {
return fmt.Errorf("glob failure: %w", err) return fmt.Errorf("glob failure: %w", err)
@ -265,7 +240,6 @@ func (f *FileSource) ConfigureByDSN(dsn string, labels map[string]string, logger
f.logger.Infof("Adding file %s to filelist", file) f.logger.Infof("Adding file %s to filelist", file)
f.files = append(f.files, file) f.files = append(f.files, file)
} }
return nil return nil
} }
@ -281,26 +255,22 @@ func (f *FileSource) SupportedModes() []string {
// OneShotAcquisition reads a set of file and returns when done // OneShotAcquisition reads a set of file and returns when done
func (f *FileSource) OneShotAcquisition(out chan types.Event, t *tomb.Tomb) error { func (f *FileSource) OneShotAcquisition(out chan types.Event, t *tomb.Tomb) error {
f.logger.Debug("In oneshot") f.logger.Debug("In oneshot")
for _, file := range f.files { for _, file := range f.files {
fi, err := os.Stat(file) fi, err := os.Stat(file)
if err != nil { if err != nil {
return fmt.Errorf("could not stat file %s : %w", file, err) return fmt.Errorf("could not stat file %s : %w", file, err)
} }
if fi.IsDir() { if fi.IsDir() {
f.logger.Warnf("%s is a directory, ignoring it.", file) f.logger.Warnf("%s is a directory, ignoring it.", file)
continue continue
} }
f.logger.Infof("reading %s at once", file) f.logger.Infof("reading %s at once", file)
err = f.readFile(file, out, t) err = f.readFile(file, out, t)
if err != nil { if err != nil {
return err return err
} }
}
}
return nil return nil
} }
@ -325,33 +295,27 @@ func (f *FileSource) StreamingAcquisition(out chan types.Event, t *tomb.Tomb) er
t.Go(func() error { t.Go(func() error {
return f.monitorNewFiles(out, t) return f.monitorNewFiles(out, t)
}) })
for _, file := range f.files { for _, file := range f.files {
// before opening the file, check if we need to specifically avoid it. (XXX) //before opening the file, check if we need to specifically avoid it. (XXX)
skip := false skip := false
for _, pattern := range f.exclude_regexps { for _, pattern := range f.exclude_regexps {
if pattern.MatchString(file) { if pattern.MatchString(file) {
f.logger.Infof("file %s matches exclusion pattern %s, skipping", file, pattern.String()) f.logger.Infof("file %s matches exclusion pattern %s, skipping", file, pattern.String())
skip = true skip = true
break break
} }
} }
if skip { if skip {
continue continue
} }
// cf. https://github.com/crowdsecurity/crowdsec/issues/1168 //cf. https://github.com/crowdsecurity/crowdsec/issues/1168
// do not rely on stat, reclose file immediately as it's opened by Tail //do not rely on stat, reclose file immediately as it's opened by Tail
fd, err := os.Open(file) fd, err := os.Open(file)
if err != nil { if err != nil {
f.logger.Errorf("unable to read %s : %s", file, err) f.logger.Errorf("unable to read %s : %s", file, err)
continue continue
} }
if err := fd.Close(); err != nil { if err := fd.Close(); err != nil {
f.logger.Errorf("unable to close %s : %s", file, err) f.logger.Errorf("unable to close %s : %s", file, err)
continue continue
@ -361,55 +325,22 @@ func (f *FileSource) StreamingAcquisition(out chan types.Event, t *tomb.Tomb) er
if err != nil { if err != nil {
return fmt.Errorf("could not stat file %s : %w", file, err) return fmt.Errorf("could not stat file %s : %w", file, err)
} }
if fi.IsDir() { if fi.IsDir() {
f.logger.Warnf("%s is a directory, ignoring it.", file) f.logger.Warnf("%s is a directory, ignoring it.", file)
continue continue
} }
pollFile := false tail, err := tail.TailFile(file, tail.Config{ReOpen: true, Follow: true, Poll: f.config.PollWithoutInotify, Location: &tail.SeekInfo{Offset: 0, Whence: io.SeekEnd}, Logger: log.NewEntry(log.StandardLogger())})
if f.config.PollWithoutInotify != nil {
pollFile = *f.config.PollWithoutInotify
} else {
networkFS, fsType, err := types.IsNetworkFS(file)
if err != nil {
f.logger.Warningf("Could not get fs type for %s : %s", file, err)
}
f.logger.Debugf("fs for %s is network: %t (%s)", file, networkFS, fsType)
if networkFS {
f.logger.Warnf("Disabling inotify polling on %s as it is on a network share. You can manually set poll_without_inotify to true to make this message disappear, or to false to enforce inotify poll", file)
pollFile = true
}
}
filink, err := os.Lstat(file)
if err != nil {
f.logger.Errorf("Could not lstat() new file %s, ignoring it : %s", file, err)
continue
}
if filink.Mode()&os.ModeSymlink == os.ModeSymlink && !pollFile {
f.logger.Warnf("File %s is a symlink, but inotify polling is enabled. Crowdsec will not be able to detect rotation. Consider setting poll_without_inotify to true in your configuration", file)
}
tail, err := tail.TailFile(file, tail.Config{ReOpen: true, Follow: true, Poll: pollFile, Location: &tail.SeekInfo{Offset: 0, Whence: io.SeekEnd}, Logger: log.NewEntry(log.StandardLogger())})
if err != nil { if err != nil {
f.logger.Errorf("Could not start tailing file %s : %s", file, err) f.logger.Errorf("Could not start tailing file %s : %s", file, err)
continue continue
} }
f.tailMapMutex.Lock()
f.tails[file] = true f.tails[file] = true
f.tailMapMutex.Unlock()
t.Go(func() error { t.Go(func() error {
defer trace.CatchPanic("crowdsec/acquis/file/live/fsnotify") defer trace.CatchPanic("crowdsec/acquis/file/live/fsnotify")
return f.tailFile(out, t, tail) return f.tailFile(out, t, tail)
}) })
} }
return nil return nil
} }
@ -419,7 +350,6 @@ func (f *FileSource) Dump() interface{} {
func (f *FileSource) monitorNewFiles(out chan types.Event, t *tomb.Tomb) error { func (f *FileSource) monitorNewFiles(out chan types.Event, t *tomb.Tomb) error {
logger := f.logger.WithField("goroutine", "inotify") logger := f.logger.WithField("goroutine", "inotify")
for { for {
select { select {
case event, ok := <-f.watcher.Events: case event, ok := <-f.watcher.Events:
@ -433,62 +363,47 @@ func (f *FileSource) monitorNewFiles(out chan types.Event, t *tomb.Tomb) error {
logger.Errorf("Could not stat() new file %s, ignoring it : %s", event.Name, err) logger.Errorf("Could not stat() new file %s, ignoring it : %s", event.Name, err)
continue continue
} }
if fi.IsDir() { if fi.IsDir() {
continue continue
} }
logger.Debugf("Detected new file %s", event.Name) logger.Debugf("Detected new file %s", event.Name)
matched := false matched := false
for _, pattern := range f.config.Filenames { for _, pattern := range f.config.Filenames {
logger.Debugf("Matching %s with %s", pattern, event.Name) logger.Debugf("Matching %s with %s", pattern, event.Name)
matched, err = filepath.Match(pattern, event.Name) matched, err = filepath.Match(pattern, event.Name)
if err != nil { if err != nil {
logger.Errorf("Could not match pattern : %s", err) logger.Errorf("Could not match pattern : %s", err)
continue continue
} }
if matched { if matched {
logger.Debugf("Matched %s with %s", pattern, event.Name) logger.Debugf("Matched %s with %s", pattern, event.Name)
break break
} }
} }
if !matched { if !matched {
continue continue
} }
// before opening the file, check if we need to specifically avoid it. (XXX) //before opening the file, check if we need to specifically avoid it. (XXX)
skip := false skip := false
for _, pattern := range f.exclude_regexps { for _, pattern := range f.exclude_regexps {
if pattern.MatchString(event.Name) { if pattern.MatchString(event.Name) {
f.logger.Infof("file %s matches exclusion pattern %s, skipping", event.Name, pattern.String()) f.logger.Infof("file %s matches exclusion pattern %s, skipping", event.Name, pattern.String())
skip = true skip = true
break break
} }
} }
if skip { if skip {
continue continue
} }
f.tailMapMutex.RLock()
if f.tails[event.Name] { if f.tails[event.Name] {
f.tailMapMutex.RUnlock() //we already have a tail on it, do not start a new one
// we already have a tail on it, do not start a new one
logger.Debugf("Already tailing file %s, not creating a new tail", event.Name) logger.Debugf("Already tailing file %s, not creating a new tail", event.Name)
break break
} }
f.tailMapMutex.RUnlock() //cf. https://github.com/crowdsecurity/crowdsec/issues/1168
// cf. https://github.com/crowdsecurity/crowdsec/issues/1168 //do not rely on stat, reclose file immediately as it's opened by Tail
// do not rely on stat, reclose file immediately as it's opened by Tail
fd, err := os.Open(event.Name) fd, err := os.Open(event.Name)
if err != nil { if err != nil {
f.logger.Errorf("unable to read %s : %s", event.Name, err) f.logger.Errorf("unable to read %s : %s", event.Name, err)
@ -498,42 +413,13 @@ func (f *FileSource) monitorNewFiles(out chan types.Event, t *tomb.Tomb) error {
f.logger.Errorf("unable to close %s : %s", event.Name, err) f.logger.Errorf("unable to close %s : %s", event.Name, err)
continue continue
} }
pollFile := false
if f.config.PollWithoutInotify != nil {
pollFile = *f.config.PollWithoutInotify
} else {
networkFS, fsType, err := types.IsNetworkFS(event.Name)
if err != nil {
f.logger.Warningf("Could not get fs type for %s : %s", event.Name, err)
}
f.logger.Debugf("fs for %s is network: %t (%s)", event.Name, networkFS, fsType)
if networkFS {
pollFile = true
}
}
filink, err := os.Lstat(event.Name)
if err != nil {
logger.Errorf("Could not lstat() new file %s, ignoring it : %s", event.Name, err)
continue
}
if filink.Mode()&os.ModeSymlink == os.ModeSymlink && !pollFile {
logger.Warnf("File %s is a symlink, but inotify polling is enabled. Crowdsec will not be able to detect rotation. Consider setting poll_without_inotify to true in your configuration", event.Name)
}
//Slightly different parameters for Location, as we want to read the first lines of the newly created file //Slightly different parameters for Location, as we want to read the first lines of the newly created file
tail, err := tail.TailFile(event.Name, tail.Config{ReOpen: true, Follow: true, Poll: pollFile, Location: &tail.SeekInfo{Offset: 0, Whence: io.SeekStart}}) tail, err := tail.TailFile(event.Name, tail.Config{ReOpen: true, Follow: true, Poll: f.config.PollWithoutInotify, Location: &tail.SeekInfo{Offset: 0, Whence: io.SeekStart}})
if err != nil { if err != nil {
logger.Errorf("Could not start tailing file %s : %s", event.Name, err) logger.Errorf("Could not start tailing file %s : %s", event.Name, err)
break break
} }
f.tailMapMutex.Lock()
f.tails[event.Name] = true f.tails[event.Name] = true
f.tailMapMutex.Unlock()
t.Go(func() error { t.Go(func() error {
defer trace.CatchPanic("crowdsec/acquis/tailfile") defer trace.CatchPanic("crowdsec/acquis/tailfile")
return f.tailFile(out, t, tail) return f.tailFile(out, t, tail)
@ -543,14 +429,12 @@ func (f *FileSource) monitorNewFiles(out chan types.Event, t *tomb.Tomb) error {
if !ok { if !ok {
return nil return nil
} }
logger.Errorf("Error while monitoring folder: %s", err) logger.Errorf("Error while monitoring folder: %s", err)
case <-t.Dying(): case <-t.Dying():
err := f.watcher.Close() err := f.watcher.Close()
if err != nil { if err != nil {
return fmt.Errorf("could not remove all inotify watches: %w", err) return fmt.Errorf("could not remove all inotify watches: %w", err)
} }
return nil return nil
} }
} }
@ -559,61 +443,46 @@ func (f *FileSource) monitorNewFiles(out chan types.Event, t *tomb.Tomb) error {
func (f *FileSource) tailFile(out chan types.Event, t *tomb.Tomb, tail *tail.Tail) error { func (f *FileSource) tailFile(out chan types.Event, t *tomb.Tomb, tail *tail.Tail) error {
logger := f.logger.WithField("tail", tail.Filename) logger := f.logger.WithField("tail", tail.Filename)
logger.Debugf("-> Starting tail of %s", tail.Filename) logger.Debugf("-> Starting tail of %s", tail.Filename)
for { for {
select { select {
case <-t.Dying(): case <-t.Dying():
logger.Infof("File datasource %s stopping", tail.Filename) logger.Infof("File datasource %s stopping", tail.Filename)
if err := tail.Stop(); err != nil { if err := tail.Stop(); err != nil {
f.logger.Errorf("error in stop : %s", err) f.logger.Errorf("error in stop : %s", err)
return err return err
} }
return nil return nil
case <-tail.Dying(): // our tailer is dying case <-tail.Dying(): //our tailer is dying
err := tail.Err() err := tail.Err()
errMsg := fmt.Sprintf("file reader of %s died", tail.Filename) errMsg := fmt.Sprintf("file reader of %s died", tail.Filename)
if err != nil { if err != nil {
errMsg = fmt.Sprintf(errMsg+" : %s", err) errMsg = fmt.Sprintf(errMsg+" : %s", err)
} }
logger.Warningf(errMsg) logger.Warningf(errMsg)
t.Kill(fmt.Errorf(errMsg))
return nil return fmt.Errorf(errMsg)
case line := <-tail.Lines: case line := <-tail.Lines:
if line == nil { if line == nil {
logger.Warningf("tail for %s is empty", tail.Filename) logger.Warningf("tail for %s is empty", tail.Filename)
continue continue
} }
if line.Err != nil { if line.Err != nil {
logger.Warningf("fetch error : %v", line.Err) logger.Warningf("fetch error : %v", line.Err)
return line.Err return line.Err
} }
if line.Text == "" { //skip empty lines
if line.Text == "" { // skip empty lines
continue continue
} }
linesRead.With(prometheus.Labels{"source": tail.Filename}).Inc()
if f.metricsLevel != configuration.METRICS_NONE {
linesRead.With(prometheus.Labels{"source": tail.Filename}).Inc()
}
src := tail.Filename
if f.metricsLevel == configuration.METRICS_AGGREGATE {
src = filepath.Base(tail.Filename)
}
l := types.Line{ l := types.Line{
Raw: trimLine(line.Text), Raw: trimLine(line.Text),
Labels: f.config.Labels, Labels: f.config.Labels,
Time: line.Time, Time: line.Time,
Src: src, Src: tail.Filename,
Process: true, Process: true,
Module: f.GetName(), Module: f.GetName(),
} }
// we're tailing, it must be real time logs //we're tailing, it must be real time logs
logger.Debugf("pushing %+v", l) logger.Debugf("pushing %+v", l)
expectMode := types.LIVE expectMode := types.LIVE
@ -627,14 +496,12 @@ func (f *FileSource) tailFile(out chan types.Event, t *tomb.Tomb, tail *tail.Tai
func (f *FileSource) readFile(filename string, out chan types.Event, t *tomb.Tomb) error { func (f *FileSource) readFile(filename string, out chan types.Event, t *tomb.Tomb) error {
var scanner *bufio.Scanner var scanner *bufio.Scanner
logger := f.logger.WithField("oneshot", filename) logger := f.logger.WithField("oneshot", filename)
fd, err := os.Open(filename) fd, err := os.Open(filename)
if err != nil { if err != nil {
return fmt.Errorf("failed opening %s: %w", filename, err) return fmt.Errorf("failed opening %s: %w", filename, err)
} }
defer fd.Close() defer fd.Close()
if strings.HasSuffix(filename, ".gz") { if strings.HasSuffix(filename, ".gz") {
@ -643,20 +510,17 @@ func (f *FileSource) readFile(filename string, out chan types.Event, t *tomb.Tom
logger.Errorf("Failed to read gz file: %s", err) logger.Errorf("Failed to read gz file: %s", err)
return fmt.Errorf("failed to read gz %s: %w", filename, err) return fmt.Errorf("failed to read gz %s: %w", filename, err)
} }
defer gz.Close() defer gz.Close()
scanner = bufio.NewScanner(gz) scanner = bufio.NewScanner(gz)
} else { } else {
scanner = bufio.NewScanner(fd) scanner = bufio.NewScanner(fd)
} }
scanner.Split(bufio.ScanLines) scanner.Split(bufio.ScanLines)
if f.config.MaxBufferSize > 0 { if f.config.MaxBufferSize > 0 {
buf := make([]byte, 0, 64*1024) buf := make([]byte, 0, 64*1024)
scanner.Buffer(buf, f.config.MaxBufferSize) scanner.Buffer(buf, f.config.MaxBufferSize)
} }
for scanner.Scan() { for scanner.Scan() {
select { select {
case <-t.Dying(): case <-t.Dying():
@ -666,7 +530,6 @@ func (f *FileSource) readFile(filename string, out chan types.Event, t *tomb.Tom
if scanner.Text() == "" { if scanner.Text() == "" {
continue continue
} }
l := types.Line{ l := types.Line{
Raw: scanner.Text(), Raw: scanner.Text(),
Time: time.Now().UTC(), Time: time.Now().UTC(),
@ -678,19 +541,15 @@ func (f *FileSource) readFile(filename string, out chan types.Event, t *tomb.Tom
logger.Debugf("line %s", l.Raw) logger.Debugf("line %s", l.Raw)
linesRead.With(prometheus.Labels{"source": filename}).Inc() linesRead.With(prometheus.Labels{"source": filename}).Inc()
// we're reading logs at once, it must be time-machine buckets //we're reading logs at once, it must be time-machine buckets
out <- types.Event{Line: l, Process: true, Type: types.LOG, ExpectMode: types.TIMEMACHINE} out <- types.Event{Line: l, Process: true, Type: types.LOG, ExpectMode: types.TIMEMACHINE}
} }
} }
if err := scanner.Err(); err != nil { if err := scanner.Err(); err != nil {
logger.Errorf("Error while reading file: %s", err) logger.Errorf("Error while reading file: %s", err)
t.Kill(err) t.Kill(err)
return err return err
} }
t.Kill(nil) t.Kill(nil)
return nil return nil
} }

View file

@ -15,7 +15,6 @@ import (
"github.com/crowdsecurity/go-cs-lib/cstest" "github.com/crowdsecurity/go-cs-lib/cstest"
"github.com/crowdsecurity/crowdsec/pkg/acquisition/configuration"
fileacquisition "github.com/crowdsecurity/crowdsec/pkg/acquisition/modules/file" fileacquisition "github.com/crowdsecurity/crowdsec/pkg/acquisition/modules/file"
"github.com/crowdsecurity/crowdsec/pkg/types" "github.com/crowdsecurity/crowdsec/pkg/types"
) )
@ -57,7 +56,7 @@ exclude_regexps: ["as[a-$d"]`,
tc := tc tc := tc
t.Run(tc.name, func(t *testing.T) { t.Run(tc.name, func(t *testing.T) {
f := fileacquisition.FileSource{} f := fileacquisition.FileSource{}
err := f.Configure([]byte(tc.config), subLogger, configuration.METRICS_NONE) err := f.Configure([]byte(tc.config), subLogger)
cstest.RequireErrorContains(t, err, tc.expectedErr) cstest.RequireErrorContains(t, err, tc.expectedErr)
}) })
} }
@ -223,7 +222,7 @@ filename: test_files/test_delete.log`,
tc.setup() tc.setup()
} }
err := f.Configure([]byte(tc.config), subLogger, configuration.METRICS_NONE) err := f.Configure([]byte(tc.config), subLogger)
cstest.RequireErrorContains(t, err, tc.expectedConfigErr) cstest.RequireErrorContains(t, err, tc.expectedConfigErr)
if tc.expectedConfigErr != "" { if tc.expectedConfigErr != "" {
return return
@ -385,7 +384,7 @@ force_inotify: true`, testPattern),
tc.setup() tc.setup()
} }
err := f.Configure([]byte(tc.config), subLogger, configuration.METRICS_NONE) err := f.Configure([]byte(tc.config), subLogger)
require.NoError(t, err) require.NoError(t, err)
if tc.afterConfigure != nil { if tc.afterConfigure != nil {
@ -456,7 +455,7 @@ exclude_regexps: ["\\.gz$"]`
}) })
f := fileacquisition.FileSource{} f := fileacquisition.FileSource{}
if err := f.Configure([]byte(config), subLogger, configuration.METRICS_NONE); err != nil { if err := f.Configure([]byte(config), subLogger); err != nil {
subLogger.Fatalf("unexpected error: %s", err) subLogger.Fatalf("unexpected error: %s", err)
} }

View file

@ -26,11 +26,10 @@ type JournalCtlConfiguration struct {
} }
type JournalCtlSource struct { type JournalCtlSource struct {
metricsLevel int config JournalCtlConfiguration
config JournalCtlConfiguration logger *log.Entry
logger *log.Entry src string
src string args []string
args []string
} }
const journalctlCmd string = "journalctl" const journalctlCmd string = "journalctl"
@ -132,9 +131,7 @@ func (j *JournalCtlSource) runJournalCtl(out chan types.Event, t *tomb.Tomb) err
l.Src = j.src l.Src = j.src
l.Process = true l.Process = true
l.Module = j.GetName() l.Module = j.GetName()
if j.metricsLevel != configuration.METRICS_NONE { linesRead.With(prometheus.Labels{"source": j.src}).Inc()
linesRead.With(prometheus.Labels{"source": j.src}).Inc()
}
var evt types.Event var evt types.Event
if !j.config.UseTimeMachine { if !j.config.UseTimeMachine {
evt = types.Event{Line: l, Process: true, Type: types.LOG, ExpectMode: types.LIVE} evt = types.Event{Line: l, Process: true, Type: types.LOG, ExpectMode: types.LIVE}
@ -197,9 +194,8 @@ func (j *JournalCtlSource) UnmarshalConfig(yamlConfig []byte) error {
return nil return nil
} }
func (j *JournalCtlSource) Configure(yamlConfig []byte, logger *log.Entry, MetricsLevel int) error { func (j *JournalCtlSource) Configure(yamlConfig []byte, logger *log.Entry) error {
j.logger = logger j.logger = logger
j.metricsLevel = MetricsLevel
err := j.UnmarshalConfig(yamlConfig) err := j.UnmarshalConfig(yamlConfig)
if err != nil { if err != nil {

View file

@ -10,7 +10,6 @@ import (
"github.com/crowdsecurity/go-cs-lib/cstest" "github.com/crowdsecurity/go-cs-lib/cstest"
"github.com/crowdsecurity/crowdsec/pkg/acquisition/configuration"
"github.com/crowdsecurity/crowdsec/pkg/types" "github.com/crowdsecurity/crowdsec/pkg/types"
log "github.com/sirupsen/logrus" log "github.com/sirupsen/logrus"
"github.com/sirupsen/logrus/hooks/test" "github.com/sirupsen/logrus/hooks/test"
@ -53,7 +52,7 @@ journalctl_filter:
for _, test := range tests { for _, test := range tests {
f := JournalCtlSource{} f := JournalCtlSource{}
err := f.Configure([]byte(test.config), subLogger, configuration.METRICS_NONE) err := f.Configure([]byte(test.config), subLogger)
cstest.AssertErrorContains(t, err, test.expectedErr) cstest.AssertErrorContains(t, err, test.expectedErr)
} }
} }
@ -145,9 +144,9 @@ journalctl_filter:
} }
for _, ts := range tests { for _, ts := range tests {
var ( var (
logger *log.Logger logger *log.Logger
subLogger *log.Entry subLogger *log.Entry
hook *test.Hook hook *test.Hook
) )
if ts.expectedOutput != "" { if ts.expectedOutput != "" {
@ -166,7 +165,7 @@ journalctl_filter:
out := make(chan types.Event, 100) out := make(chan types.Event, 100)
j := JournalCtlSource{} j := JournalCtlSource{}
err := j.Configure([]byte(ts.config), subLogger, configuration.METRICS_NONE) err := j.Configure([]byte(ts.config), subLogger)
if err != nil { if err != nil {
t.Fatalf("Unexpected error : %s", err) t.Fatalf("Unexpected error : %s", err)
} }
@ -219,9 +218,9 @@ journalctl_filter:
} }
for _, ts := range tests { for _, ts := range tests {
var ( var (
logger *log.Logger logger *log.Logger
subLogger *log.Entry subLogger *log.Entry
hook *test.Hook hook *test.Hook
) )
if ts.expectedOutput != "" { if ts.expectedOutput != "" {
@ -240,7 +239,7 @@ journalctl_filter:
out := make(chan types.Event) out := make(chan types.Event)
j := JournalCtlSource{} j := JournalCtlSource{}
err := j.Configure([]byte(ts.config), subLogger, configuration.METRICS_NONE) err := j.Configure([]byte(ts.config), subLogger)
if err != nil { if err != nil {
t.Fatalf("Unexpected error : %s", err) t.Fatalf("Unexpected error : %s", err)
} }

View file

@ -52,10 +52,9 @@ type TLSConfig struct {
} }
type KafkaSource struct { type KafkaSource struct {
metricsLevel int Config KafkaConfiguration
Config KafkaConfiguration logger *log.Entry
logger *log.Entry Reader *kafka.Reader
Reader *kafka.Reader
} }
func (k *KafkaSource) GetUuid() string { func (k *KafkaSource) GetUuid() string {
@ -87,9 +86,8 @@ func (k *KafkaSource) UnmarshalConfig(yamlConfig []byte) error {
return err return err
} }
func (k *KafkaSource) Configure(yamlConfig []byte, logger *log.Entry, MetricsLevel int) error { func (k *KafkaSource) Configure(yamlConfig []byte, logger *log.Entry) error {
k.logger = logger k.logger = logger
k.metricsLevel = MetricsLevel
k.logger.Debugf("start configuring %s source", dataSourceName) k.logger.Debugf("start configuring %s source", dataSourceName)
@ -172,9 +170,7 @@ func (k *KafkaSource) ReadMessage(out chan types.Event) error {
Module: k.GetName(), Module: k.GetName(),
} }
k.logger.Tracef("line with message read from topic '%s': %+v", k.Config.Topic, l) k.logger.Tracef("line with message read from topic '%s': %+v", k.Config.Topic, l)
if k.metricsLevel != configuration.METRICS_NONE { linesRead.With(prometheus.Labels{"topic": k.Config.Topic}).Inc()
linesRead.With(prometheus.Labels{"topic": k.Config.Topic}).Inc()
}
var evt types.Event var evt types.Event
if !k.Config.UseTimeMachine { if !k.Config.UseTimeMachine {

Some files were not shown because too many files have changed in this diff Show more