diff --git a/.dockerignore b/.dockerignore index fd38a48d9..d5275d4e5 100644 --- a/.dockerignore +++ b/.dockerignore @@ -3,3 +3,4 @@ #.git /tests +/crowdsec-v* diff --git a/.github/codecov.yml b/.github/codecov.yml new file mode 100644 index 000000000..5f721427d --- /dev/null +++ b/.github/codecov.yml @@ -0,0 +1,10 @@ +# we measure coverage but don't enforce it +# https://docs.codecov.com/docs/codecov-yaml +coverage: + status: + patch: + default: + target: 0% + project: + default: + target: 0% diff --git a/.github/workflows/bats-hub.yml b/.github/workflows/bats-hub.yml index d3ad66b81..026e8feaa 100644 --- a/.github/workflows/bats-hub.yml +++ b/.github/workflows/bats-hub.yml @@ -15,7 +15,7 @@ jobs: build: strategy: matrix: - go-version: ["1.20.3"] + go-version: ["1.20.4"] name: "Build + tests" runs-on: ubuntu-latest diff --git a/.github/workflows/bats-mysql.yml b/.github/workflows/bats-mysql.yml index 7c47dd085..3c8825a47 100644 --- a/.github/workflows/bats-mysql.yml +++ b/.github/workflows/bats-mysql.yml @@ -14,7 +14,7 @@ jobs: build: strategy: matrix: - go-version: ["1.20.3"] + go-version: ["1.20.4"] name: "Build + tests" runs-on: ubuntu-latest diff --git a/.github/workflows/bats-postgres.yml b/.github/workflows/bats-postgres.yml index 5ce3cec4b..de99a8e22 100644 --- a/.github/workflows/bats-postgres.yml +++ b/.github/workflows/bats-postgres.yml @@ -10,7 +10,7 @@ jobs: build: strategy: matrix: - go-version: ["1.20.3"] + go-version: ["1.20.4"] name: "Build + tests" runs-on: ubuntu-latest @@ -86,24 +86,6 @@ jobs: PGPASSWORD: "secret" PGUSER: postgres -# - name: "Build crowdsec and fixture (DB_BACKEND: postgres)" -# run: make clean bats-build bats-fixture -# env: -# DB_BACKEND: postgres -# PGHOST: 127.0.0.1 -# PGPORT: 5432 -# PGPASSWORD: "secret" -# PGUSER: postgres -# -# - name: "Run tests (DB_BACKEND: postgres)" -# run: make bats-test -# env: -# DB_BACKEND: postgres -# PGHOST: 127.0.0.1 -# PGPORT: 5432 -# PGPASSWORD: "secret" -# PGUSER: postgres - - name: "Show stack traces" run: for file in $(find /tmp/crowdsec-crash.*.txt); do echo ">>>>> $file"; cat $file; echo; done if: ${{ always() }} diff --git a/.github/workflows/bats-sqlite-coverage.yml b/.github/workflows/bats-sqlite-coverage.yml index 6d928e86e..7b2e763b3 100644 --- a/.github/workflows/bats-sqlite-coverage.yml +++ b/.github/workflows/bats-sqlite-coverage.yml @@ -11,7 +11,7 @@ jobs: build: strategy: matrix: - go-version: ["1.20.3"] + go-version: ["1.20.4"] name: "Build + tests" runs-on: ubuntu-latest @@ -72,7 +72,6 @@ jobs: -e '/plugins/notifications' \ -e '/pkg/protobufs' \ -e '/pkg/cwversions' \ - -e '/pkg/cstest' \ -e '/pkg/models' \ < coverage-bats-raw.out \ > coverage-bats.out diff --git a/.github/workflows/ci-windows-build-msi.yml b/.github/workflows/ci-windows-build-msi.yml index 6b9da460d..b9d9722f3 100644 --- a/.github/workflows/ci-windows-build-msi.yml +++ b/.github/workflows/ci-windows-build-msi.yml @@ -23,7 +23,7 @@ jobs: build: strategy: matrix: - go-version: ["1.20.3"] + go-version: ["1.20.4"] name: Build runs-on: windows-2019 diff --git a/.github/workflows/go-tests-windows.yml b/.github/workflows/go-tests-windows.yml index ecc402ce1..b8e81bbe0 100644 --- a/.github/workflows/go-tests-windows.yml +++ b/.github/workflows/go-tests-windows.yml @@ -22,7 +22,7 @@ jobs: build: strategy: matrix: - go-version: ["1.20.3"] + go-version: ["1.20.4"] name: "Build + tests" runs-on: windows-2022 diff --git a/.github/workflows/go-tests.yml b/.github/workflows/go-tests.yml index e9f3f407c..9b4adb4ce 100644 --- a/.github/workflows/go-tests.yml +++ b/.github/workflows/go-tests.yml @@ -34,7 +34,7 @@ jobs: build: strategy: matrix: - go-version: ["1.20.3"] + go-version: ["1.20.4"] name: "Build + tests" runs-on: ubuntu-latest diff --git a/.github/workflows/publish_docker-image_on_master-debian.yml b/.github/workflows/publish_docker-image_on_master-debian.yml new file mode 100644 index 000000000..88076157c --- /dev/null +++ b/.github/workflows/publish_docker-image_on_master-debian.yml @@ -0,0 +1,70 @@ +name: Publish Debian Docker image on Push to Master + +on: + push: + branches: [ master ] + paths: + - 'pkg/**' + - 'cmd/**' + - 'plugins/**' + - 'docker/docker_start.sh' + - 'docker/config.yaml' + - '.github/workflows/publish_docker-image_on_master-debian.yml' + - 'Dockerfile.debian' + - 'go.mod' + - 'go.sum' + - 'Makefile' + +jobs: + push_to_registry: + name: Push Debian Docker image to Docker Hub + runs-on: ubuntu-latest + steps: + + - name: Check out the repo + uses: actions/checkout@v3 + with: + fetch-depth: 0 + + - name: Prepare + id: prep + run: | + DOCKER_IMAGE=crowdsecurity/crowdsec + GHCR_IMAGE=ghcr.io/${{ github.repository_owner }}/crowdsec + VERSION=dev-debian + TAGS="${DOCKER_IMAGE}:${VERSION},${GHCR_IMAGE}:${VERSION}" + echo "tags=${TAGS}" >> $GITHUB_OUTPUT + echo "created=$(date -u +'%Y-%m-%dT%H:%M:%SZ')" >> $GITHUB_OUTPUT + + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v2 + with: + config: .github/buildkit.toml + + - name: Login to DockerHub + uses: docker/login-action@v2 + with: + username: ${{ secrets.DOCKER_USERNAME }} + password: ${{ secrets.DOCKER_PASSWORD }} + + - name: Login to GitHub Container Registry + uses: docker/login-action@v2 + with: + registry: ghcr.io + username: ${{ github.repository_owner }} + password: ${{ secrets.GITHUB_TOKEN }} + + - name: Build and push full image + uses: docker/build-push-action@v4 + with: + context: . + file: ./Dockerfile.debian + push: ${{ github.event_name != 'pull_request' }} + tags: ${{ steps.prep.outputs.tags }} + platforms: linux/amd64 + labels: | + org.opencontainers.image.source=${{ github.event.repository.html_url }} + org.opencontainers.image.created=${{ steps.prep.outputs.created }} + org.opencontainers.image.revision=${{ github.sha }} + cache-from: type=gha + cache-to: type=gha,mode=min diff --git a/.github/workflows/release_publish-package.yml b/.github/workflows/release_publish-package.yml index 6853f9dc3..600355739 100644 --- a/.github/workflows/release_publish-package.yml +++ b/.github/workflows/release_publish-package.yml @@ -6,11 +6,15 @@ on: types: - prereleased +permissions: + # Use write for: hub release edit + contents: write + jobs: build: strategy: matrix: - go-version: ["1.20.3"] + go-version: ["1.20.4"] name: Build and upload binary package runs-on: ubuntu-latest @@ -43,8 +47,8 @@ jobs: run: make release - name: Upload to release - uses: JasonEtco/upload-to-release@master - with: - args: crowdsec-release.tgz application/x-gzip env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + run: | + tag_name="${GITHUB_REF##*/}" + hub release edit -a crowdsec-release.tgz -m "" "$tag_name" diff --git a/.github/workflows/release_publish_docker-image-debian.yml b/.github/workflows/release_publish_docker-image-debian.yml index 98d394a40..e766dae09 100644 --- a/.github/workflows/release_publish_docker-image-debian.yml +++ b/.github/workflows/release_publish_docker-image-debian.yml @@ -29,7 +29,7 @@ jobs: VERSION=pr-${{ github.event.number }} fi TAGS="${DOCKER_IMAGE}:${VERSION}-debian" - if [[ ${{ github.event.action }} == released ]]; then + if [[ "${{ github.event.action }}" == "released" ]]; then TAGS=$TAGS,${DOCKER_IMAGE}:latest-debian fi echo "version=${VERSION}" >> $GITHUB_OUTPUT @@ -54,7 +54,7 @@ jobs: file: ./Dockerfile.debian push: ${{ github.event_name != 'pull_request' }} tags: ${{ steps.prep.outputs.tags }} - platforms: linux/amd64,linux/arm64,linux/arm/v7,linux/arm/v6,linux/386 + platforms: linux/amd64,linux/arm64,linux/386 labels: | org.opencontainers.image.source=${{ github.event.repository.html_url }} org.opencontainers.image.created=${{ steps.prep.outputs.created }} diff --git a/.gitignore b/.gitignore index 46340b9cc..4c5cb0a1f 100644 --- a/.gitignore +++ b/.gitignore @@ -8,10 +8,17 @@ .pc .vscode +# If vendor is included, allow prebuilt (wasm?) libraries. +!vendor/**/*.so + # Test binaries, built with `go test -c` *.test *.cover +# VMs used for dev/test + +.vagrant + # Test binaries, built from *_test.go pkg/csplugin/tests/cs_plugin_test* diff --git a/Dockerfile b/Dockerfile index 18cffa50a..da1c3ab06 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,5 +1,5 @@ # vim: set ft=dockerfile: -ARG GOVERSION=1.20.3 +ARG GOVERSION=1.20.4 FROM golang:${GOVERSION}-alpine AS build @@ -7,9 +7,19 @@ WORKDIR /go/src/crowdsec COPY . . +# Alpine does not ship a static version of re2, we can build it ourselves +# Later versions require 'abseil', which is likewise not available in its static form +ENV RE2_VERSION=2023-03-01 + # wizard.sh requires GNU coreutils -RUN apk add --no-cache git gcc libc-dev make bash gettext binutils-gold coreutils && \ +RUN apk add --no-cache git g++ gcc libc-dev make bash gettext binutils-gold coreutils icu-static re2-dev pkgconfig && \ + wget https://github.com/google/re2/archive/refs/tags/${RE2_VERSION}.tar.gz && \ + tar -xzf ${RE2_VERSION}.tar.gz && \ + cd re2-${RE2_VERSION} && \ + make && \ + make install && \ echo "githubciXXXXXXXXXXXXXXXXXXXXXXXX" > /etc/machine-id && \ + cd - && \ make clean release DOCKER_BUILD=1 && \ cd crowdsec-v* && \ ./wizard.sh --docker-mode && \ diff --git a/Dockerfile.debian b/Dockerfile.debian index 842adb854..10b06befd 100644 --- a/Dockerfile.debian +++ b/Dockerfile.debian @@ -1,5 +1,5 @@ # vim: set ft=dockerfile: -ARG GOVERSION=1.20.3 +ARG GOVERSION=1.20.4 FROM golang:${GOVERSION}-bullseye AS build @@ -12,7 +12,7 @@ ENV DEBCONF_NOWARNINGS="yes" # wizard.sh requires GNU coreutils RUN apt-get update && \ - apt-get install -y -q git gcc libc-dev make bash gettext binutils-gold coreutils tzdata && \ + apt-get install -y -q git gcc libc-dev make bash gettext binutils-gold coreutils tzdata libre2-dev && \ echo "githubciXXXXXXXXXXXXXXXXXXXXXXXX" > /etc/machine-id && \ make clean release DOCKER_BUILD=1 && \ cd crowdsec-v* && \ @@ -44,6 +44,9 @@ RUN apt-get update && \ mkdir -p /staging/var/lib/crowdsec && \ mkdir -p /var/lib/crowdsec/data +RUN echo "deb http://deb.debian.org/debian bullseye-backports main" >> /etc/apt/sources.list \ + && apt-get update && apt-get install -t bullseye-backports -y libsystemd0 + COPY --from=build /go/bin/yq /usr/local/bin/yq COPY --from=build /etc/crowdsec /staging/etc/crowdsec COPY --from=build /usr/local/bin/crowdsec /usr/local/bin/crowdsec diff --git a/Makefile b/Makefile index 05eb0586b..d6f1b95f2 100644 --- a/Makefile +++ b/Makefile @@ -1,36 +1,42 @@ include mk/platform.mk +BUILD_REQUIRE_GO_MAJOR ?= 1 +BUILD_REQUIRE_GO_MINOR ?= 20 + +GOCMD = go +GOTEST = $(GOCMD) test + +BUILD_CODENAME ?= alphaga + CROWDSEC_FOLDER = ./cmd/crowdsec CSCLI_FOLDER = ./cmd/crowdsec-cli/ -HTTP_PLUGIN_FOLDER = ./plugins/notifications/http -SLACK_PLUGIN_FOLDER = ./plugins/notifications/slack -SPLUNK_PLUGIN_FOLDER = ./plugins/notifications/splunk -EMAIL_PLUGIN_FOLDER = ./plugins/notifications/email -DUMMY_PLUGIN_FOLDER = ./plugins/notifications/dummy - -HTTP_PLUGIN_BIN = notification-http$(EXT) -SLACK_PLUGIN_BIN = notification-slack$(EXT) -SPLUNK_PLUGIN_BIN = notification-splunk$(EXT) -EMAIL_PLUGIN_BIN = notification-email$(EXT) -DUMMY_PLUGIN_BIN= notification-dummy$(EXT) - -HTTP_PLUGIN_CONFIG = http.yaml -SLACK_PLUGIN_CONFIG = slack.yaml -SPLUNK_PLUGIN_CONFIG = splunk.yaml -EMAIL_PLUGIN_CONFIG = email.yaml +PLUGINS ?= $(patsubst ./plugins/notifications/%,%,$(wildcard ./plugins/notifications/*)) +PLUGINS_DIR = ./plugins/notifications CROWDSEC_BIN = crowdsec$(EXT) CSCLI_BIN = cscli$(EXT) -BUILD_CMD = build + +# Directory for the release files +RELDIR = crowdsec-$(BUILD_VERSION) GO_MODULE_NAME = github.com/crowdsecurity/crowdsec +# see if we have libre2-dev installed for C++ optimizations +RE2_CHECK := $(shell pkg-config --libs re2 2>/dev/null) + +#-------------------------------------- +# +# Define MAKE_FLAGS and LD_OPTS for the sub-makefiles in cmd/ and plugins/ +# + +MAKE_FLAGS = --no-print-directory GOARCH=$(GOARCH) GOOS=$(GOOS) RM="$(RM)" WIN_IGNORE_ERR="$(WIN_IGNORE_ERR)" CP="$(CP)" CPR="$(CPR)" MKDIR="$(MKDIR)" + LD_OPTS_VARS= \ --X '$(GO_MODULE_NAME)/pkg/cwversion.Version=$(BUILD_VERSION)' \ --X '$(GO_MODULE_NAME)/pkg/cwversion.BuildDate=$(BUILD_TIMESTAMP)' \ +-X 'github.com/crowdsecurity/go-cs-lib/pkg/version.Version=$(BUILD_VERSION)' \ +-X 'github.com/crowdsecurity/go-cs-lib/pkg/version.BuildDate=$(BUILD_TIMESTAMP)' \ +-X 'github.com/crowdsecurity/go-cs-lib/pkg/version.Tag=$(BUILD_TAG)' \ -X '$(GO_MODULE_NAME)/pkg/cwversion.Codename=$(BUILD_CODENAME)' \ --X '$(GO_MODULE_NAME)/pkg/cwversion.Tag=$(BUILD_TAG)' \ -X '$(GO_MODULE_NAME)/pkg/csconfig.defaultConfigDir=$(DEFAULT_CONFIGDIR)' \ -X '$(GO_MODULE_NAME)/pkg/csconfig.defaultDataDir=$(DEFAULT_DATADIR)' @@ -38,66 +44,67 @@ ifneq (,$(DOCKER_BUILD)) LD_OPTS_VARS += -X '$(GO_MODULE_NAME)/pkg/cwversion.System=docker' endif -ifdef BUILD_STATIC -$(warning WARNING: The BUILD_STATIC variable is deprecated and has no effect. Builds are static by default since v1.5.0.) +GO_TAGS := netgo,osusergo,sqlite_omit_load_extension + +ifneq (,$(RE2_CHECK)) +# += adds a space that we don't want +GO_TAGS := $(GO_TAGS),re2_cgo +LD_OPTS_VARS += -X '$(GO_MODULE_NAME)/pkg/cwversion.Libre2=C++' endif export LD_OPTS=-ldflags "-s -w -extldflags '-static' $(LD_OPTS_VARS)" \ - -trimpath -tags netgo,osusergo,sqlite_omit_load_extension + -trimpath -tags $(GO_TAGS) ifneq (,$(TEST_COVERAGE)) LD_OPTS += -cover endif -GOCMD = go -GOTEST = $(GOCMD) test - -RELDIR = crowdsec-$(BUILD_VERSION) +#-------------------------------------- .PHONY: build -build: goversion crowdsec cscli plugins +build: pre-build goversion crowdsec cscli plugins + +.PHONY: pre-build +pre-build: +ifdef BUILD_STATIC + $(warning WARNING: The BUILD_STATIC variable is deprecated and has no effect. Builds are static by default since v1.5.0.) +endif + $(info Building $(BUILD_VERSION) ($(BUILD_TAG)) for $(GOOS)/$(GOARCH)) +ifneq (,$(RE2_CHECK)) + $(info Using C++ regexp library) +else + $(info Fallback to WebAssembly regexp library. To use the C++ version, make sure you have installed libre2-dev and pkg-config.) +endif + $(info ) .PHONY: all all: clean test build .PHONY: plugins -plugins: http-plugin slack-plugin splunk-plugin email-plugin dummy-plugin +plugins: + @$(foreach plugin,$(PLUGINS), \ + $(MAKE) -C $(PLUGINS_DIR)/$(plugin) build $(MAKE_FLAGS); \ + ) .PHONY: clean clean: testclean - @$(MAKE) -C $(CROWDSEC_FOLDER) clean --no-print-directory RM="$(RM)" WIN_IGNORE_ERR="$(WIN_IGNORE_ERR)" CP="$(CP)" CPR="$(CPR)" MKDIR="$(MKDIR)" - @$(MAKE) -C $(CSCLI_FOLDER) clean --no-print-directory RM="$(RM)" WIN_IGNORE_ERR="$(WIN_IGNORE_ERR)" CP="$(CP)" CPR="$(CPR)" MKDIR="$(MKDIR)" + @$(MAKE) -C $(CROWDSEC_FOLDER) clean $(MAKE_FLAGS) + @$(MAKE) -C $(CSCLI_FOLDER) clean $(MAKE_FLAGS) @$(RM) $(CROWDSEC_BIN) $(WIN_IGNORE_ERR) @$(RM) $(CSCLI_BIN) $(WIN_IGNORE_ERR) @$(RM) *.log $(WIN_IGNORE_ERR) @$(RM) crowdsec-release.tgz $(WIN_IGNORE_ERR) - @$(RM) $(HTTP_PLUGIN_FOLDER)/$(HTTP_PLUGIN_BIN) $(WIN_IGNORE_ERR) - @$(RM) $(SLACK_PLUGIN_FOLDER)/$(SLACK_PLUGIN_BIN) $(WIN_IGNORE_ERR) - @$(RM) $(SPLUNK_PLUGIN_FOLDER)/$(SPLUNK_PLUGIN_BIN) $(WIN_IGNORE_ERR) - @$(RM) $(EMAIL_PLUGIN_FOLDER)/$(EMAIL_PLUGIN_BIN) $(WIN_IGNORE_ERR) - @$(RM) $(DUMMY_PLUGIN_FOLDER)/$(DUMMY_PLUGIN_BIN) $(WIN_IGNORE_ERR) - + @$(foreach plugin,$(PLUGINS), \ + $(MAKE) -C $(PLUGINS_DIR)/$(plugin) clean $(MAKE_FLAGS); \ + ) +.PHONY: cscli cscli: goversion - @$(MAKE) -C $(CSCLI_FOLDER) build --no-print-directory GOARCH=$(GOARCH) GOOS=$(GOOS) RM="$(RM)" WIN_IGNORE_ERR="$(WIN_IGNORE_ERR)" CP="$(CP)" CPR="$(CPR)" MKDIR="$(MKDIR)" + @$(MAKE) -C $(CSCLI_FOLDER) build $(MAKE_FLAGS) +.PHONY: crowdsec crowdsec: goversion - @$(MAKE) -C $(CROWDSEC_FOLDER) build --no-print-directory GOARCH=$(GOARCH) GOOS=$(GOOS) RM="$(RM)" WIN_IGNORE_ERR="$(WIN_IGNORE_ERR)" CP="$(CP)" CPR="$(CPR)" MKDIR="$(MKDIR)" - -http-plugin: goversion - @$(MAKE) -C $(HTTP_PLUGIN_FOLDER) build --no-print-directory GOARCH=$(GOARCH) GOOS=$(GOOS) RM="$(RM)" WIN_IGNORE_ERR="$(WIN_IGNORE_ERR)" CP="$(CP)" CPR="$(CPR)" MKDIR="$(MKDIR)" - -slack-plugin: goversion - @$(MAKE) -C $(SLACK_PLUGIN_FOLDER) build --no-print-directory GOARCH=$(GOARCH) GOOS=$(GOOS) RM="$(RM)" WIN_IGNORE_ERR="$(WIN_IGNORE_ERR)" CP="$(CP)" CPR="$(CPR)" MKDIR="$(MKDIR)" - -splunk-plugin: goversion - @$(MAKE) -C $(SPLUNK_PLUGIN_FOLDER) build --no-print-directory GOARCH=$(GOARCH) GOOS=$(GOOS) RM="$(RM)" WIN_IGNORE_ERR="$(WIN_IGNORE_ERR)" WIN_IGNORE_ERR="$(WIN_IGNORE_ERR)" CP="$(CP)" CPR="$(CPR)" MKDIR="$(MKDIR)" - -email-plugin: goversion - @$(MAKE) -C $(EMAIL_PLUGIN_FOLDER) build --no-print-directory GOARCH=$(GOARCH) GOOS=$(GOOS) RM="$(RM)" WIN_IGNORE_ERR="$(WIN_IGNORE_ERR)" CP="$(CP)" CPR="$(CPR)" MKDIR="$(MKDIR)" - -dummy-plugin: goversion - $(MAKE) -C $(DUMMY_PLUGIN_FOLDER) build --no-print-directory GOARCH=$(GOARCH) GOOS=$(GOOS) RM="$(RM)" WIN_IGNORE_ERR="$(WIN_IGNORE_ERR)" CP="$(CP)" CPR="$(CPR)" MKDIR="$(MKDIR)" + @$(MAKE) -C $(CROWDSEC_FOLDER) build $(MAKE_FLAGS) .PHONY: testclean testclean: bats-clean @@ -130,35 +137,33 @@ localstack: localstack-stop: docker-compose -f test/localstack/docker-compose.yml down -package-common: +.PHONY: vendor +vendor: + @echo "Vendoring dependencies" + @$(GOCMD) mod vendor + @$(foreach plugin,$(PLUGINS), \ + $(MAKE) -C $(PLUGINS_DIR)/$(plugin) vendor $(MAKE_FLAGS); \ + ) + +.PHONY: package +package: @echo "Building Release to dir $(RELDIR)" @$(MKDIR) $(RELDIR)/cmd/crowdsec @$(MKDIR) $(RELDIR)/cmd/crowdsec-cli - @$(MKDIR) $(RELDIR)/$(subst ./,,$(HTTP_PLUGIN_FOLDER)) - @$(MKDIR) $(RELDIR)/$(subst ./,,$(SLACK_PLUGIN_FOLDER)) - @$(MKDIR) $(RELDIR)/$(subst ./,,$(SPLUNK_PLUGIN_FOLDER)) - @$(MKDIR) $(RELDIR)/$(subst ./,,$(EMAIL_PLUGIN_FOLDER)) - @$(CP) $(CROWDSEC_FOLDER)/$(CROWDSEC_BIN) $(RELDIR)/cmd/crowdsec @$(CP) $(CSCLI_FOLDER)/$(CSCLI_BIN) $(RELDIR)/cmd/crowdsec-cli - @$(CP) $(HTTP_PLUGIN_FOLDER)/$(HTTP_PLUGIN_BIN) $(RELDIR)/$(subst ./,,$(HTTP_PLUGIN_FOLDER)) - @$(CP) $(SLACK_PLUGIN_FOLDER)/$(SLACK_PLUGIN_BIN) $(RELDIR)/$(subst ./,,$(SLACK_PLUGIN_FOLDER)) - @$(CP) $(SPLUNK_PLUGIN_FOLDER)/$(SPLUNK_PLUGIN_BIN) $(RELDIR)/$(subst ./,,$(SPLUNK_PLUGIN_FOLDER)) - @$(CP) $(EMAIL_PLUGIN_FOLDER)/$(EMAIL_PLUGIN_BIN) $(RELDIR)/$(subst ./,,$(EMAIL_PLUGIN_FOLDER)) - - @$(CP) $(HTTP_PLUGIN_FOLDER)/$(HTTP_PLUGIN_CONFIG) $(RELDIR)/$(subst ./,,$(HTTP_PLUGIN_FOLDER)) - @$(CP) $(SLACK_PLUGIN_FOLDER)/$(SLACK_PLUGIN_CONFIG) $(RELDIR)/$(subst ./,,$(SLACK_PLUGIN_FOLDER)) - @$(CP) $(SPLUNK_PLUGIN_FOLDER)/$(SPLUNK_PLUGIN_CONFIG) $(RELDIR)/$(subst ./,,$(SPLUNK_PLUGIN_FOLDER)) - @$(CP) $(EMAIL_PLUGIN_FOLDER)/$(EMAIL_PLUGIN_CONFIG) $(RELDIR)/$(subst ./,,$(EMAIL_PLUGIN_FOLDER)) + @$(foreach plugin,$(PLUGINS), \ + $(MKDIR) $(RELDIR)/$(PLUGINS_DIR)/$(plugin); \ + $(CP) $(PLUGINS_DIR)/$(plugin)/notification-$(plugin)$(EXT) $(RELDIR)/$(PLUGINS_DIR)/$(plugin); \ + $(CP) $(PLUGINS_DIR)/$(plugin)/$(plugin).yaml $(RELDIR)/$(PLUGINS_DIR)/$(plugin)/; \ + ) @$(CPR) ./config $(RELDIR) @$(CP) wizard.sh $(RELDIR) @$(CP) scripts/test_env.sh $(RELDIR) @$(CP) scripts/test_env.ps1 $(RELDIR) -.PHONY: package -package: package-common @tar cvzf crowdsec-release.tgz $(RELDIR) .PHONY: check_release diff --git a/azure-pipelines.yml b/azure-pipelines.yml index 278d019fd..c529ee2fa 100644 --- a/azure-pipelines.yml +++ b/azure-pipelines.yml @@ -27,7 +27,7 @@ stages: - task: GoTool@0 displayName: "Install Go 1.20" inputs: - version: '1.20.3' + version: '1.20.4' - pwsh: | choco install -y make diff --git a/cmd/crowdsec-cli/Makefile b/cmd/crowdsec-cli/Makefile index 27ca90ee8..f4d66157f 100644 --- a/cmd/crowdsec-cli/Makefile +++ b/cmd/crowdsec-cli/Makefile @@ -7,9 +7,7 @@ endif # Go parameters GOCMD = go GOBUILD = $(GOCMD) build -GOCLEAN = $(GOCMD) clean GOTEST = $(GOCMD) test -GOGET = $(GOCMD) get BINARY_NAME = cscli$(EXT) PREFIX ?= "/" diff --git a/cmd/crowdsec-cli/alerts.go b/cmd/crowdsec-cli/alerts.go index 0c55e89c6..25cb26515 100644 --- a/cmd/crowdsec-cli/alerts.go +++ b/cmd/crowdsec-cli/alerts.go @@ -20,8 +20,9 @@ import ( "github.com/spf13/cobra" "gopkg.in/yaml.v2" + "github.com/crowdsecurity/go-cs-lib/pkg/version" + "github.com/crowdsecurity/crowdsec/pkg/apiclient" - "github.com/crowdsecurity/crowdsec/pkg/cwversion" "github.com/crowdsecurity/crowdsec/pkg/database" "github.com/crowdsecurity/crowdsec/pkg/models" "github.com/crowdsecurity/crowdsec/pkg/types" @@ -219,7 +220,7 @@ func NewAlertsCmd() *cobra.Command { Client, err = apiclient.NewClient(&apiclient.Config{ MachineID: csConfig.API.Client.Credentials.Login, Password: strfmt.Password(csConfig.API.Client.Credentials.Password), - UserAgent: fmt.Sprintf("crowdsec/%s", cwversion.VersionStr()), + UserAgent: fmt.Sprintf("crowdsec/%s", version.String()), URL: apiURL, VersionPrefix: "v1", }) diff --git a/cmd/crowdsec-cli/bouncers.go b/cmd/crowdsec-cli/bouncers.go index 3d751ebf5..839cf84eb 100644 --- a/cmd/crowdsec-cli/bouncers.go +++ b/cmd/crowdsec-cli/bouncers.go @@ -11,6 +11,7 @@ import ( "github.com/fatih/color" log "github.com/sirupsen/logrus" "github.com/spf13/cobra" + "golang.org/x/exp/slices" middlewares "github.com/crowdsecurity/crowdsec/pkg/apiserver/middlewares/v1" "github.com/crowdsecurity/crowdsec/pkg/database" @@ -122,7 +123,6 @@ func runBouncersAdd(cmd *cobra.Command, args []string) error { return nil } - func NewBouncersAddCmd() *cobra.Command { cmdBouncersAdd := &cobra.Command{ Use: "add MyBouncerName [--length 16]", @@ -133,7 +133,7 @@ cscli bouncers add MyBouncerName -l 24 cscli bouncers add MyBouncerName -k `, Args: cobra.ExactArgs(1), DisableAutoGenTag: true, - RunE: runBouncersAdd, + RunE: runBouncersAdd, } flags := cmdBouncersAdd.Flags() @@ -144,7 +144,6 @@ cscli bouncers add MyBouncerName -k `, return cmdBouncersAdd } - func runBouncersDelete(cmd *cobra.Command, args []string) error { for _, bouncerID := range args { err := dbClient.DeleteBouncer(bouncerID) @@ -157,7 +156,6 @@ func runBouncersDelete(cmd *cobra.Command, args []string) error { return nil } - func NewBouncersDeleteCmd() *cobra.Command { cmdBouncersDelete := &cobra.Command{ Use: "delete MyBouncerName", @@ -178,7 +176,7 @@ func NewBouncersDeleteCmd() *cobra.Command { } ret := make([]string, 0) for _, bouncer := range bouncers { - if strings.Contains(bouncer.Name, toComplete) && !inSlice(bouncer.Name, args) { + if strings.Contains(bouncer.Name, toComplete) && !slices.Contains(args, bouncer.Name) { ret = append(ret, bouncer.Name) } } diff --git a/cmd/crowdsec-cli/capi.go b/cmd/crowdsec-cli/capi.go index 0cecad0fa..e67d33ce4 100644 --- a/cmd/crowdsec-cli/capi.go +++ b/cmd/crowdsec-cli/capi.go @@ -6,10 +6,11 @@ import ( "net/url" "os" + "github.com/crowdsecurity/go-cs-lib/pkg/version" + "github.com/crowdsecurity/crowdsec/pkg/apiclient" "github.com/crowdsecurity/crowdsec/pkg/csconfig" "github.com/crowdsecurity/crowdsec/pkg/cwhub" - "github.com/crowdsecurity/crowdsec/pkg/cwversion" "github.com/crowdsecurity/crowdsec/pkg/fflag" "github.com/crowdsecurity/crowdsec/pkg/models" "github.com/crowdsecurity/crowdsec/pkg/types" @@ -70,7 +71,7 @@ func NewCapiRegisterCmd() *cobra.Command { _, err = apiclient.RegisterClient(&apiclient.Config{ MachineID: capiUser, Password: password, - UserAgent: fmt.Sprintf("crowdsec/%s", cwversion.VersionStr()), + UserAgent: fmt.Sprintf("crowdsec/%s", version.String()), URL: apiurl, VersionPrefix: CAPIURLPrefix, }, nil) @@ -164,7 +165,7 @@ func NewCapiStatusCmd() *cobra.Command { log.Fatalf("no scenarios installed, abort") } - Client, err = apiclient.NewDefaultClient(apiurl, CAPIURLPrefix, fmt.Sprintf("crowdsec/%s", cwversion.VersionStr()), nil) + Client, err = apiclient.NewDefaultClient(apiurl, CAPIURLPrefix, fmt.Sprintf("crowdsec/%s", version.String()), nil) if err != nil { log.Fatalf("init default client: %s", err) } diff --git a/cmd/crowdsec-cli/config.go b/cmd/crowdsec-cli/config.go index 4f30e8726..e60246db7 100644 --- a/cmd/crowdsec-cli/config.go +++ b/cmd/crowdsec-cli/config.go @@ -13,6 +13,7 @@ func NewConfigCmd() *cobra.Command { } cmdConfig.AddCommand(NewConfigShowCmd()) + cmdConfig.AddCommand(NewConfigShowYAMLCmd()) cmdConfig.AddCommand(NewConfigBackupCmd()) cmdConfig.AddCommand(NewConfigRestoreCmd()) cmdConfig.AddCommand(NewConfigFeatureFlagsCmd()) diff --git a/cmd/crowdsec-cli/config_showyaml.go b/cmd/crowdsec-cli/config_showyaml.go new file mode 100644 index 000000000..82bc67ffc --- /dev/null +++ b/cmd/crowdsec-cli/config_showyaml.go @@ -0,0 +1,24 @@ +package main + +import ( + "fmt" + + "github.com/spf13/cobra" +) + +func runConfigShowYAML(cmd *cobra.Command, args []string) error { + fmt.Println(mergedConfig) + return nil +} + +func NewConfigShowYAMLCmd() *cobra.Command { + cmdConfigShow := &cobra.Command{ + Use: "show-yaml", + Short: "Displays merged config.yaml + config.yaml.local", + Args: cobra.ExactArgs(0), + DisableAutoGenTag: true, + RunE: runConfigShowYAML, + } + + return cmdConfigShow +} diff --git a/cmd/crowdsec-cli/console.go b/cmd/crowdsec-cli/console.go index 6bd4ef88b..83886267d 100644 --- a/cmd/crowdsec-cli/console.go +++ b/cmd/crowdsec-cli/console.go @@ -16,10 +16,12 @@ import ( "github.com/spf13/cobra" "gopkg.in/yaml.v3" + "github.com/crowdsecurity/go-cs-lib/pkg/ptr" + "github.com/crowdsecurity/go-cs-lib/pkg/version" + "github.com/crowdsecurity/crowdsec/pkg/apiclient" "github.com/crowdsecurity/crowdsec/pkg/csconfig" "github.com/crowdsecurity/crowdsec/pkg/cwhub" - "github.com/crowdsecurity/crowdsec/pkg/cwversion" "github.com/crowdsecurity/crowdsec/pkg/fflag" "github.com/crowdsecurity/crowdsec/pkg/types" ) @@ -101,7 +103,7 @@ After running this command your will need to validate the enrollment in the weba MachineID: csConfig.API.Server.OnlineClient.Credentials.Login, Password: password, Scenarios: scenarios, - UserAgent: fmt.Sprintf("crowdsec/%s", cwversion.VersionStr()), + UserAgent: fmt.Sprintf("crowdsec/%s", version.String()), URL: apiURL, VersionPrefix: "v3", }) @@ -249,7 +251,7 @@ func SetConsoleOpts(args []string, wanted bool) { } } else { log.Infof("%s set to %t", csconfig.CONSOLE_MANAGEMENT, wanted) - csConfig.API.Server.ConsoleConfig.ConsoleManagement = types.BoolPtr(wanted) + csConfig.API.Server.ConsoleConfig.ConsoleManagement = ptr.Of(wanted) } if csConfig.API.Server.OnlineClient.Credentials != nil { changed := false @@ -283,7 +285,7 @@ func SetConsoleOpts(args []string, wanted bool) { } } else { log.Infof("%s set to %t", csconfig.SEND_CUSTOM_SCENARIOS, wanted) - csConfig.API.Server.ConsoleConfig.ShareCustomScenarios = types.BoolPtr(wanted) + csConfig.API.Server.ConsoleConfig.ShareCustomScenarios = ptr.Of(wanted) } case csconfig.SEND_TAINTED_SCENARIOS: /*for each flag check if it's already set before setting it*/ @@ -296,7 +298,7 @@ func SetConsoleOpts(args []string, wanted bool) { } } else { log.Infof("%s set to %t", csconfig.SEND_TAINTED_SCENARIOS, wanted) - csConfig.API.Server.ConsoleConfig.ShareTaintedScenarios = types.BoolPtr(wanted) + csConfig.API.Server.ConsoleConfig.ShareTaintedScenarios = ptr.Of(wanted) } case csconfig.SEND_MANUAL_SCENARIOS: /*for each flag check if it's already set before setting it*/ @@ -309,7 +311,7 @@ func SetConsoleOpts(args []string, wanted bool) { } } else { log.Infof("%s set to %t", csconfig.SEND_MANUAL_SCENARIOS, wanted) - csConfig.API.Server.ConsoleConfig.ShareManualDecisions = types.BoolPtr(wanted) + csConfig.API.Server.ConsoleConfig.ShareManualDecisions = ptr.Of(wanted) } case csconfig.SEND_CONTEXT: /*for each flag check if it's already set before setting it*/ @@ -322,7 +324,7 @@ func SetConsoleOpts(args []string, wanted bool) { } } else { log.Infof("%s set to %t", csconfig.SEND_CONTEXT, wanted) - csConfig.API.Server.ConsoleConfig.ShareContext = types.BoolPtr(wanted) + csConfig.API.Server.ConsoleConfig.ShareContext = ptr.Of(wanted) } default: log.Fatalf("unknown flag %s", arg) diff --git a/cmd/crowdsec-cli/decisions.go b/cmd/crowdsec-cli/decisions.go index 8f3ba4a4d..f2f3efcf8 100644 --- a/cmd/crowdsec-cli/decisions.go +++ b/cmd/crowdsec-cli/decisions.go @@ -19,8 +19,10 @@ import ( log "github.com/sirupsen/logrus" "github.com/spf13/cobra" + "github.com/crowdsecurity/go-cs-lib/pkg/ptr" + "github.com/crowdsecurity/go-cs-lib/pkg/version" + "github.com/crowdsecurity/crowdsec/pkg/apiclient" - "github.com/crowdsecurity/crowdsec/pkg/cwversion" "github.com/crowdsecurity/crowdsec/pkg/models" "github.com/crowdsecurity/crowdsec/pkg/types" ) @@ -120,7 +122,7 @@ func NewDecisionsCmd() *cobra.Command { Client, err = apiclient.NewClient(&apiclient.Config{ MachineID: csConfig.API.Client.Credentials.Login, Password: password, - UserAgent: fmt.Sprintf("crowdsec/%s", cwversion.VersionStr()), + UserAgent: fmt.Sprintf("crowdsec/%s", version.String()), URL: apiurl, VersionPrefix: "v1", }) @@ -488,6 +490,7 @@ func NewDecisionsImportCmd() *cobra.Command { importReason string importType string importFile string + batchSize int ) var cmdDecisionImport = &cobra.Command{ @@ -578,37 +581,69 @@ decisions.json : log.Debugf("'scope' line %d, using supplied value: '%s'", line, importScope) } decision := models.Decision{ - Value: types.StrPtr(decisionLine.Value), - Duration: types.StrPtr(decisionLine.Duration), - Origin: types.StrPtr(decisionLine.Origin), - Scenario: types.StrPtr(decisionLine.Scenario), - Type: types.StrPtr(decisionLine.Type), - Scope: types.StrPtr(decisionLine.Scope), + Value: ptr.Of(decisionLine.Value), + Duration: ptr.Of(decisionLine.Duration), + Origin: ptr.Of(decisionLine.Origin), + Scenario: ptr.Of(decisionLine.Scenario), + Type: ptr.Of(decisionLine.Type), + Scope: ptr.Of(decisionLine.Scope), Simulated: new(bool), } decisionsList = append(decisionsList, &decision) } alerts := models.AddAlertsRequest{} - importAlert := models.Alert{ - CreatedAt: time.Now().UTC().Format(time.RFC3339), - Scenario: types.StrPtr(fmt.Sprintf("import %s : %d IPs", importFile, len(decisionsList))), - Message: types.StrPtr(""), - Events: []*models.Event{}, - Source: &models.Source{ - Scope: types.StrPtr(""), - Value: types.StrPtr(""), - }, - StartAt: types.StrPtr(time.Now().UTC().Format(time.RFC3339)), - StopAt: types.StrPtr(time.Now().UTC().Format(time.RFC3339)), - Capacity: types.Int32Ptr(0), - Simulated: types.BoolPtr(false), - EventsCount: types.Int32Ptr(int32(len(decisionsList))), - Leakspeed: types.StrPtr(""), - ScenarioHash: types.StrPtr(""), - ScenarioVersion: types.StrPtr(""), - Decisions: decisionsList, + + if batchSize > 0 { + for i := 0; i < len(decisionsList); i += batchSize { + end := i + batchSize + if end > len(decisionsList) { + end = len(decisionsList) + } + decisionBatch := decisionsList[i:end] + importAlert := models.Alert{ + CreatedAt: time.Now().UTC().Format(time.RFC3339), + Scenario: ptr.Of(fmt.Sprintf("import %s : %d IPs", importFile, len(decisionBatch))), + + Message: ptr.Of(""), + Events: []*models.Event{}, + Source: &models.Source{ + Scope: ptr.Of(""), + Value: ptr.Of(""), + }, + StartAt: ptr.Of(time.Now().UTC().Format(time.RFC3339)), + StopAt: ptr.Of(time.Now().UTC().Format(time.RFC3339)), + Capacity: ptr.Of(int32(0)), + Simulated: ptr.Of(false), + EventsCount: ptr.Of(int32(len(decisionBatch))), + Leakspeed: ptr.Of(""), + ScenarioHash: ptr.Of(""), + ScenarioVersion: ptr.Of(""), + Decisions: decisionBatch, + } + alerts = append(alerts, &importAlert) + } + } else { + importAlert := models.Alert{ + CreatedAt: time.Now().UTC().Format(time.RFC3339), + Scenario: ptr.Of(fmt.Sprintf("import %s : %d IPs", importFile, len(decisionsList))), + Message: ptr.Of(""), + Events: []*models.Event{}, + Source: &models.Source{ + Scope: ptr.Of(""), + Value: ptr.Of(""), + }, + StartAt: ptr.Of(time.Now().UTC().Format(time.RFC3339)), + StopAt: ptr.Of(time.Now().UTC().Format(time.RFC3339)), + Capacity: ptr.Of(int32(0)), + Simulated: ptr.Of(false), + EventsCount: ptr.Of(int32(len(decisionsList))), + Leakspeed: ptr.Of(""), + ScenarioHash: ptr.Of(""), + ScenarioVersion: ptr.Of(""), + Decisions: decisionsList, + } + alerts = append(alerts, &importAlert) } - alerts = append(alerts, &importAlert) if len(decisionsList) > 1000 { log.Infof("You are about to add %d decisions, this may take a while", len(decisionsList)) @@ -628,6 +663,7 @@ decisions.json : cmdDecisionImport.Flags().StringVar(&importScope, "scope", types.Ip, "Decision scope (ie. ip,range,username)") cmdDecisionImport.Flags().StringVarP(&importReason, "reason", "R", "", "Decision reason (ie. scenario-name)") cmdDecisionImport.Flags().StringVarP(&importType, "type", "t", "", "Decision type (ie. ban,captcha,throttle)") + cmdDecisionImport.Flags().IntVar(&batchSize, "batch", 0, "Split import in batches of N decisions") return cmdDecisionImport } diff --git a/cmd/crowdsec-cli/hub.go b/cmd/crowdsec-cli/hub.go index 5a97b9039..4fec8fc8d 100644 --- a/cmd/crowdsec-cli/hub.go +++ b/cmd/crowdsec-cli/hub.go @@ -1,6 +1,7 @@ package main import ( + "errors" "fmt" "github.com/fatih/color" @@ -98,7 +99,15 @@ Fetches the [.index.json](https://github.com/crowdsecurity/hub/blob/master/.inde log.Fatal(err) } if err := cwhub.UpdateHubIdx(csConfig.Hub); err != nil { - log.Fatalf("Failed to get Hub index : %v", err) + if errors.Is(err, cwhub.ErrIndexNotFound) { + log.Warnf("Could not find index file for branch '%s', using 'master'", cwhub.HubBranch) + cwhub.HubBranch = "master" + if err := cwhub.UpdateHubIdx(csConfig.Hub); err != nil { + log.Fatalf("Failed to get Hub index after retry : %v", err) + } + } else { + log.Fatalf("Failed to get Hub index : %v", err) + } } //use LocalSync to get warnings about tainted / outdated items _, warn := cwhub.LocalSync(csConfig.Hub) diff --git a/cmd/crowdsec-cli/lapi.go b/cmd/crowdsec-cli/lapi.go index ae6d3f33f..e8a29d9cd 100644 --- a/cmd/crowdsec-cli/lapi.go +++ b/cmd/crowdsec-cli/lapi.go @@ -12,13 +12,15 @@ import ( "github.com/pkg/errors" log "github.com/sirupsen/logrus" "github.com/spf13/cobra" + "golang.org/x/exp/slices" "gopkg.in/yaml.v2" + "github.com/crowdsecurity/go-cs-lib/pkg/version" + "github.com/crowdsecurity/crowdsec/pkg/alertcontext" "github.com/crowdsecurity/crowdsec/pkg/apiclient" "github.com/crowdsecurity/crowdsec/pkg/csconfig" "github.com/crowdsecurity/crowdsec/pkg/cwhub" - "github.com/crowdsecurity/crowdsec/pkg/cwversion" "github.com/crowdsecurity/crowdsec/pkg/exprhelpers" "github.com/crowdsecurity/crowdsec/pkg/models" "github.com/crowdsecurity/crowdsec/pkg/parser" @@ -51,7 +53,7 @@ func runLapiStatus(cmd *cobra.Command, args []string) error { Client, err = apiclient.NewDefaultClient(apiurl, LAPIURLPrefix, - fmt.Sprintf("crowdsec/%s", cwversion.VersionStr()), + fmt.Sprintf("crowdsec/%s", version.String()), nil) if err != nil { log.Fatalf("init default client: %s", err) @@ -122,7 +124,7 @@ func runLapiRegister(cmd *cobra.Command, args []string) error { _, err = apiclient.RegisterClient(&apiclient.Config{ MachineID: lapiUser, Password: password, - UserAgent: fmt.Sprintf("crowdsec/%s", cwversion.VersionStr()), + UserAgent: fmt.Sprintf("crowdsec/%s", version.String()), URL: apiurl, VersionPrefix: LAPIURLPrefix, }, nil) @@ -258,7 +260,7 @@ cscli lapi context add --key file_source --value evt.Line.Src } data := csConfig.Crowdsec.ContextToSend[keyToAdd] for _, val := range valuesToAdd { - if !inSlice(val, data) { + if !slices.Contains(data, val) { log.Infof("value '%s' added to key '%s'", val, keyToAdd) data = append(data, val) } @@ -332,7 +334,7 @@ cscli lapi context detect crowdsecurity/sshd-logs fieldByParsers := make(map[string][]string) for _, node := range csParsers.Nodes { - if !detectAll && !inSlice(node.Name, args) { + if !detectAll && !slices.Contains(args, node.Name) { continue } if !detectAll { @@ -343,7 +345,7 @@ cscli lapi context detect crowdsecurity/sshd-logs subNodeFields := detectSubNode(node, *csParsers.Ctx) for _, field := range subNodeFields { - if !inSlice(field, fieldByParsers[node.Name]) { + if !slices.Contains(fieldByParsers[node.Name], field) { fieldByParsers[node.Name] = append(fieldByParsers[node.Name], field) } } @@ -411,7 +413,7 @@ cscli lapi context delete --value evt.Line.Src for _, value := range valuesToDelete { valueFound := false for key, context := range csConfig.Crowdsec.ContextToSend { - if inSlice(value, context) { + if slices.Contains(context, value) { valueFound = true csConfig.Crowdsec.ContextToSend[key] = removeFromSlice(value, context) log.Infof("value '%s' has been removed from key '%s'", value, key) @@ -443,13 +445,13 @@ func detectStaticField(GrokStatics []types.ExtraField) []string { for _, static := range GrokStatics { if static.Parsed != "" { fieldName := fmt.Sprintf("evt.Parsed.%s", static.Parsed) - if !inSlice(fieldName, ret) { + if !slices.Contains(ret, fieldName) { ret = append(ret, fieldName) } } if static.Meta != "" { fieldName := fmt.Sprintf("evt.Meta.%s", static.Meta) - if !inSlice(fieldName, ret) { + if !slices.Contains(ret, fieldName) { ret = append(ret, fieldName) } } @@ -458,7 +460,7 @@ func detectStaticField(GrokStatics []types.ExtraField) []string { if !strings.HasPrefix(fieldName, "evt.") { fieldName = "evt." + fieldName } - if !inSlice(fieldName, ret) { + if !slices.Contains(ret, fieldName) { ret = append(ret, fieldName) } } @@ -472,7 +474,7 @@ func detectNode(node parser.Node, parserCTX parser.UnixParserCtx) []string { if node.Grok.RunTimeRegexp != nil { for _, capturedField := range node.Grok.RunTimeRegexp.Names() { fieldName := fmt.Sprintf("evt.Parsed.%s", capturedField) - if !inSlice(fieldName, ret) { + if !slices.Contains(ret, fieldName) { ret = append(ret, fieldName) } } @@ -485,7 +487,7 @@ func detectNode(node parser.Node, parserCTX parser.UnixParserCtx) []string { } for _, capturedField := range grokCompiled.Names() { fieldName := fmt.Sprintf("evt.Parsed.%s", capturedField) - if !inSlice(fieldName, ret) { + if !slices.Contains(ret, fieldName) { ret = append(ret, fieldName) } } @@ -494,7 +496,7 @@ func detectNode(node parser.Node, parserCTX parser.UnixParserCtx) []string { if len(node.Grok.Statics) > 0 { staticsField := detectStaticField(node.Grok.Statics) for _, staticField := range staticsField { - if !inSlice(staticField, ret) { + if !slices.Contains(ret, staticField) { ret = append(ret, staticField) } } @@ -503,7 +505,7 @@ func detectNode(node parser.Node, parserCTX parser.UnixParserCtx) []string { if len(node.Statics) > 0 { staticsField := detectStaticField(node.Statics) for _, staticField := range staticsField { - if !inSlice(staticField, ret) { + if !slices.Contains(ret, staticField) { ret = append(ret, staticField) } } @@ -519,7 +521,7 @@ func detectSubNode(node parser.Node, parserCTX parser.UnixParserCtx) []string { if subnode.Grok.RunTimeRegexp != nil { for _, capturedField := range subnode.Grok.RunTimeRegexp.Names() { fieldName := fmt.Sprintf("evt.Parsed.%s", capturedField) - if !inSlice(fieldName, ret) { + if !slices.Contains(ret, fieldName) { ret = append(ret, fieldName) } } @@ -531,7 +533,7 @@ func detectSubNode(node parser.Node, parserCTX parser.UnixParserCtx) []string { } for _, capturedField := range grokCompiled.Names() { fieldName := fmt.Sprintf("evt.Parsed.%s", capturedField) - if !inSlice(fieldName, ret) { + if !slices.Contains(ret, fieldName) { ret = append(ret, fieldName) } } @@ -540,7 +542,7 @@ func detectSubNode(node parser.Node, parserCTX parser.UnixParserCtx) []string { if len(subnode.Grok.Statics) > 0 { staticsField := detectStaticField(subnode.Grok.Statics) for _, staticField := range staticsField { - if !inSlice(staticField, ret) { + if !slices.Contains(ret, staticField) { ret = append(ret, staticField) } } @@ -549,7 +551,7 @@ func detectSubNode(node parser.Node, parserCTX parser.UnixParserCtx) []string { if len(subnode.Statics) > 0 { staticsField := detectStaticField(subnode.Statics) for _, staticField := range staticsField { - if !inSlice(staticField, ret) { + if !slices.Contains(ret, staticField) { ret = append(ret, staticField) } } diff --git a/cmd/crowdsec-cli/machines.go b/cmd/crowdsec-cli/machines.go index 95189252b..25bd5acec 100644 --- a/cmd/crowdsec-cli/machines.go +++ b/cmd/crowdsec-cli/machines.go @@ -18,6 +18,7 @@ import ( "github.com/google/uuid" log "github.com/sirupsen/logrus" "github.com/spf13/cobra" + "golang.org/x/exp/slices" "gopkg.in/yaml.v2" "github.com/crowdsecurity/machineid" @@ -33,8 +34,8 @@ var ( ) func generatePassword(length int) string { - upper := "ABCDEFGHIJKLMNOPQRSTUVWXY" - lower := "abcdefghijklmnopqrstuvwxyz" + upper := "ABCDEFGHIJKLMNOPQRSTUVWXY" + lower := "abcdefghijklmnopqrstuvwxyz" digits := "0123456789" charset := upper + lower + digits @@ -344,7 +345,7 @@ func NewMachinesDeleteCmd() *cobra.Command { } ret := make([]string, 0) for _, machine := range machines { - if strings.Contains(machine.MachineId, toComplete) && !inSlice(machine.MachineId, args) { + if strings.Contains(machine.MachineId, toComplete) && !slices.Contains(args, machine.MachineId) { ret = append(ret, machine.MachineId) } } diff --git a/cmd/crowdsec-cli/main.go b/cmd/crowdsec-cli/main.go index 20872cb1b..beab47913 100644 --- a/cmd/crowdsec-cli/main.go +++ b/cmd/crowdsec-cli/main.go @@ -12,6 +12,7 @@ import ( log "github.com/sirupsen/logrus" "github.com/spf13/cobra" "github.com/spf13/cobra/doc" + "golang.org/x/exp/slices" "github.com/crowdsecurity/crowdsec/pkg/csconfig" "github.com/crowdsecurity/crowdsec/pkg/cwhub" @@ -36,6 +37,8 @@ var all bool var prometheusURL string +var mergedConfig string + func initConfig() { var err error if trace_lvl { @@ -50,8 +53,8 @@ func initConfig() { log.SetLevel(log.ErrorLevel) } - if !inSlice(os.Args[1], NoNeedConfig) { - csConfig, err = csconfig.NewConfig(ConfigFilePath, false, false, true) + if !slices.Contains(NoNeedConfig, os.Args[1]) { + csConfig, mergedConfig, err = csconfig.NewConfig(ConfigFilePath, false, false, true) if err != nil { log.Fatal(err) } diff --git a/cmd/crowdsec-cli/metrics.go b/cmd/crowdsec-cli/metrics.go index 03592a9f7..1ddf4ff66 100644 --- a/cmd/crowdsec-cli/metrics.go +++ b/cmd/crowdsec-cli/metrics.go @@ -5,7 +5,6 @@ import ( "fmt" "io" "net/http" - "os" "strconv" "strings" "time" @@ -17,12 +16,13 @@ import ( "github.com/spf13/cobra" "gopkg.in/yaml.v2" - "github.com/crowdsecurity/crowdsec/pkg/types" + "github.com/crowdsecurity/go-cs-lib/pkg/trace" ) // FormatPrometheusMetrics is a complete rip from prom2json func FormatPrometheusMetrics(out io.Writer, url string, formatType string) error { mfChan := make(chan *dto.MetricFamily, 1024) + errChan := make(chan error, 1) // Start with the DefaultTransport for sane defaults. transport := http.DefaultTransport.(*http.Transport).Clone() @@ -32,17 +32,24 @@ func FormatPrometheusMetrics(out io.Writer, url string, formatType string) error // Timeout early if the server doesn't even return the headers. transport.ResponseHeaderTimeout = time.Minute go func() { - defer types.CatchPanic("crowdsec/ShowPrometheus") + defer trace.CatchPanic("crowdsec/ShowPrometheus") err := prom2json.FetchMetricFamilies(url, mfChan, transport) if err != nil { - log.Fatalf("failed to fetch prometheus metrics : %v", err) + errChan <- fmt.Errorf("failed to fetch prometheus metrics: %w", err) + return } + errChan <- nil }() result := []*prom2json.Family{} for mf := range mfChan { result = append(result, prom2json.NewFamily(mf)) } + + if err := <-errChan; err != nil { + return err + } + log.Debugf("Finished reading prometheus output, %d entries", len(result)) /*walk*/ lapi_decisions_stats := map[string]struct { @@ -262,36 +269,44 @@ func FormatPrometheusMetrics(out io.Writer, url string, formatType string) error var noUnit bool + +func runMetrics(cmd *cobra.Command, args []string) error { + if err := csConfig.LoadPrometheus(); err != nil { + return fmt.Errorf("failed to load prometheus config: %w", err) + } + + if csConfig.Prometheus == nil { + return fmt.Errorf("prometheus section missing, can't show metrics") + } + + if !csConfig.Prometheus.Enabled { + return fmt.Errorf("prometheus is not enabled, can't show metrics") + } + + if prometheusURL == "" { + prometheusURL = csConfig.Cscli.PrometheusUrl + } + + if prometheusURL == "" { + return fmt.Errorf("no prometheus url, please specify in %s or via -u", *csConfig.FilePath) + } + + err := FormatPrometheusMetrics(color.Output, prometheusURL+"/metrics", csConfig.Cscli.Output) + if err != nil { + return fmt.Errorf("could not fetch prometheus metrics: %w", err) + } + return nil +} + + func NewMetricsCmd() *cobra.Command { - var cmdMetrics = &cobra.Command{ + cmdMetrics := &cobra.Command{ Use: "metrics", Short: "Display crowdsec prometheus metrics.", Long: `Fetch metrics from the prometheus server and display them in a human-friendly way`, Args: cobra.ExactArgs(0), DisableAutoGenTag: true, - Run: func(cmd *cobra.Command, args []string) { - if err := csConfig.LoadPrometheus(); err != nil { - log.Fatal(err) - } - if !csConfig.Prometheus.Enabled { - log.Warning("Prometheus is not enabled, can't show metrics") - os.Exit(1) - } - - if prometheusURL == "" { - prometheusURL = csConfig.Cscli.PrometheusUrl - } - - if prometheusURL == "" { - log.Errorf("No prometheus url, please specify in %s or via -u", *csConfig.FilePath) - os.Exit(1) - } - - err := FormatPrometheusMetrics(color.Output, prometheusURL+"/metrics", csConfig.Cscli.Output) - if err != nil { - log.Fatalf("could not fetch prometheus metrics: %s", err) - } - }, + RunE: runMetrics, } cmdMetrics.PersistentFlags().StringVarP(&prometheusURL, "url", "u", "", "Prometheus url (http://:/metrics)") cmdMetrics.PersistentFlags().BoolVar(&noUnit, "no-unit", false, "Show the real number instead of formatted with units") diff --git a/cmd/crowdsec-cli/notifications.go b/cmd/crowdsec-cli/notifications.go index f9b51b381..ebbefd6b7 100644 --- a/cmd/crowdsec-cli/notifications.go +++ b/cmd/crowdsec-cli/notifications.go @@ -20,11 +20,12 @@ import ( "github.com/spf13/cobra" "gopkg.in/tomb.v2" + "github.com/crowdsecurity/go-cs-lib/pkg/version" + "github.com/crowdsecurity/crowdsec/pkg/apiclient" "github.com/crowdsecurity/crowdsec/pkg/csconfig" "github.com/crowdsecurity/crowdsec/pkg/csplugin" "github.com/crowdsecurity/crowdsec/pkg/csprofiles" - "github.com/crowdsecurity/crowdsec/pkg/cwversion" ) @@ -273,7 +274,7 @@ cscli notifications reinject -a '{"remediation": true,"scenario":"not client, err := apiclient.NewClient(&apiclient.Config{ MachineID: csConfig.API.Client.Credentials.Login, Password: strfmt.Password(csConfig.API.Client.Credentials.Password), - UserAgent: fmt.Sprintf("crowdsec/%s", cwversion.VersionStr()), + UserAgent: fmt.Sprintf("crowdsec/%s", version.String()), URL: apiURL, VersionPrefix: "v1", }) diff --git a/cmd/crowdsec-cli/papi.go b/cmd/crowdsec-cli/papi.go index 176b81370..d38da0df9 100644 --- a/cmd/crowdsec-cli/papi.go +++ b/cmd/crowdsec-cli/papi.go @@ -1,15 +1,17 @@ package main import ( + "fmt" "time" - "github.com/crowdsecurity/crowdsec/pkg/apiserver" - "github.com/crowdsecurity/crowdsec/pkg/database" - "github.com/crowdsecurity/crowdsec/pkg/types" - "github.com/pkg/errors" log "github.com/sirupsen/logrus" "github.com/spf13/cobra" "gopkg.in/tomb.v2" + + "github.com/crowdsecurity/go-cs-lib/pkg/ptr" + + "github.com/crowdsecurity/crowdsec/pkg/apiserver" + "github.com/crowdsecurity/crowdsec/pkg/database" ) func NewPapiCmd() *cobra.Command { @@ -20,7 +22,7 @@ func NewPapiCmd() *cobra.Command { DisableAutoGenTag: true, PersistentPreRunE: func(cmd *cobra.Command, args []string) error { if err := csConfig.LoadAPIServer(); err != nil || csConfig.DisableAPI { - return errors.Wrap(err, "Local API is disabled, please run this command on the local API machine") + return fmt.Errorf("Local API is disabled, please run this command on the local API machine: %w", err) } if csConfig.API.Server.OnlineClient == nil { log.Fatalf("no configuration for Central API in '%s'", *csConfig.FilePath) @@ -71,7 +73,7 @@ func NewPapiStatusCmd() *cobra.Command { var lastTimestampStr *string lastTimestampStr, err = dbClient.GetConfigItem(apiserver.PapiPullKey) if err != nil { - lastTimestampStr = types.StrPtr("never") + lastTimestampStr = ptr.Of("never") } log.Infof("You can successfully interact with Polling API (PAPI)") log.Infof("Console plan: %s", perms.Plan) diff --git a/cmd/crowdsec-cli/simulation.go b/cmd/crowdsec-cli/simulation.go index 743712d31..db499e380 100644 --- a/cmd/crowdsec-cli/simulation.go +++ b/cmd/crowdsec-cli/simulation.go @@ -6,6 +6,7 @@ import ( log "github.com/sirupsen/logrus" "github.com/spf13/cobra" + "golang.org/x/exp/slices" "gopkg.in/yaml.v2" "github.com/crowdsecurity/crowdsec/pkg/cwhub" @@ -161,7 +162,7 @@ func NewSimulationEnableCmd() *cobra.Command { if !item.Installed { log.Warningf("'%s' isn't enabled", scenario) } - isExcluded := inSlice(scenario, csConfig.Cscli.SimulationConfig.Exclusions) + isExcluded := slices.Contains(csConfig.Cscli.SimulationConfig.Exclusions, scenario) if *csConfig.Cscli.SimulationConfig.Simulation && !isExcluded { log.Warning("global simulation is already enabled") continue @@ -210,7 +211,7 @@ func NewSimulationDisableCmd() *cobra.Command { Run: func(cmd *cobra.Command, args []string) { if len(args) > 0 { for _, scenario := range args { - isExcluded := inSlice(scenario, csConfig.Cscli.SimulationConfig.Exclusions) + isExcluded := slices.Contains(csConfig.Cscli.SimulationConfig.Exclusions, scenario) if !*csConfig.Cscli.SimulationConfig.Simulation && !isExcluded { log.Warningf("%s isn't in simulation mode", scenario) continue diff --git a/cmd/crowdsec-cli/support.go b/cmd/crowdsec-cli/support.go index e7110ae70..013abf4b2 100644 --- a/cmd/crowdsec-cli/support.go +++ b/cmd/crowdsec-cli/support.go @@ -18,6 +18,8 @@ import ( log "github.com/sirupsen/logrus" "github.com/spf13/cobra" + "github.com/crowdsecurity/go-cs-lib/pkg/version" + "github.com/crowdsecurity/crowdsec/pkg/apiclient" "github.com/crowdsecurity/crowdsec/pkg/cwhub" "github.com/crowdsecurity/crowdsec/pkg/cwversion" @@ -182,7 +184,7 @@ func collectAPIStatus(login string, password string, endpoint string, prefix str Client, err = apiclient.NewDefaultClient(apiurl, prefix, - fmt.Sprintf("crowdsec/%s", cwversion.VersionStr()), + fmt.Sprintf("crowdsec/%s", version.String()), nil) if err != nil { return []byte(fmt.Sprintf("could not init client: %s", err)) diff --git a/cmd/crowdsec-cli/utils.go b/cmd/crowdsec-cli/utils.go index 01817ac09..e7a520172 100644 --- a/cmd/crowdsec-cli/utils.go +++ b/cmd/crowdsec-cli/utils.go @@ -19,8 +19,11 @@ import ( log "github.com/sirupsen/logrus" "github.com/spf13/cobra" "github.com/texttheater/golang-levenshtein/levenshtein" + "golang.org/x/exp/slices" "gopkg.in/yaml.v2" + "github.com/crowdsecurity/go-cs-lib/pkg/trace" + "github.com/crowdsecurity/crowdsec/pkg/cwhub" "github.com/crowdsecurity/crowdsec/pkg/database" "github.com/crowdsecurity/crowdsec/pkg/types" @@ -35,15 +38,6 @@ func printHelp(cmd *cobra.Command) { } } -func inSlice(s string, slice []string) bool { - for _, str := range slice { - if s == str { - return true - } - } - return false -} - func indexOf(s string, slice []string) int { for i, elem := range slice { if s == elem { @@ -113,7 +107,7 @@ func compAllItems(itemType string, args []string, toComplete string) ([]string, comp := make([]string, 0) hubItems := cwhub.GetHubStatusForItemType(itemType, "", true) for _, item := range hubItems { - if !inSlice(item.Name, args) && strings.Contains(item.Name, toComplete) { + if !slices.Contains(args, item.Name) && strings.Contains(item.Name, toComplete) { comp = append(comp, item.Name) } } @@ -515,7 +509,7 @@ func GetPrometheusMetric(url string) []*prom2json.Family { transport.ResponseHeaderTimeout = time.Minute go func() { - defer types.CatchPanic("crowdsec/GetPrometheusMetric") + defer trace.CatchPanic("crowdsec/GetPrometheusMetric") err := prom2json.FetchMetricFamilies(url, mfChan, transport) if err != nil { log.Fatalf("failed to fetch prometheus metrics : %v", err) @@ -691,30 +685,13 @@ type unit struct { } var ranges = []unit{ - { - value: 1e18, - symbol: "E", - }, - { - value: 1e15, - symbol: "P", - }, - { - value: 1e12, - symbol: "T", - }, - { - value: 1e6, - symbol: "M", - }, - { - value: 1e3, - symbol: "k", - }, - { - value: 1, - symbol: "", - }, + {value: 1e18, symbol: "E"}, + {value: 1e15, symbol: "P"}, + {value: 1e12, symbol: "T"}, + {value: 1e9, symbol: "G"}, + {value: 1e6, symbol: "M"}, + {value: 1e3, symbol: "k"}, + {value: 1, symbol: ""}, } func formatNumber(num int) string { @@ -746,7 +723,6 @@ func getDBClient() (*database.Client, error) { return ret, nil } - func removeFromSlice(val string, slice []string) []string { var i int var value string diff --git a/cmd/crowdsec/Makefile b/cmd/crowdsec/Makefile index ba795b11a..8242f1b49 100644 --- a/cmd/crowdsec/Makefile +++ b/cmd/crowdsec/Makefile @@ -7,9 +7,7 @@ endif # Go parameters GOCMD = go GOBUILD = $(GOCMD) build -GOCLEAN = $(GOCMD) clean GOTEST = $(GOCMD) test -GOGET = $(GOCMD) get CROWDSEC_BIN = crowdsec$(EXT) # names longer than 15 chars break 'pgrep' @@ -31,7 +29,7 @@ test: $(GOTEST) $(LD_OPTS) -v ./... clean: - @$(RM) $(CROWDSEC_BIN) $(CROWDSEC_BIN).test $(WIN_IGNORE_ERR) + @$(RM) $(CROWDSEC_BIN) $(WIN_IGNORE_ERR) .PHONY: install install: install-conf install-bin @@ -59,7 +57,7 @@ install-conf: install-bin: install -v -m 755 -D "$(CROWDSEC_BIN)" "$(BIN_PREFIX)/$(CROWDSEC_BIN)" || exit -.PHONY: systemd"$(BIN_PREFI"$(BIN_PREFIX)/$(CROWDSEC_BIN)""$(BIN_PREFIX)/$(CROWDSEC_BIN)"X)/$(CROWDSEC_BIN)" +.PHONY: systemd systemd: install CFG=$(CFG_PREFIX) PID=$(PID_DIR) BIN=$(BIN_PREFIX)"/"$(CROWDSEC_BIN) envsubst < ../../config/crowdsec.service > "$(SYSTEMD_PATH_FILE)" systemctl daemon-reload diff --git a/cmd/crowdsec/api.go b/cmd/crowdsec/api.go index daae1ca7a..3ce249d4c 100644 --- a/cmd/crowdsec/api.go +++ b/cmd/crowdsec/api.go @@ -7,9 +7,10 @@ import ( "github.com/pkg/errors" log "github.com/sirupsen/logrus" + "github.com/crowdsecurity/go-cs-lib/pkg/trace" + "github.com/crowdsecurity/crowdsec/pkg/apiserver" "github.com/crowdsecurity/crowdsec/pkg/csconfig" - "github.com/crowdsecurity/crowdsec/pkg/types" ) func initAPIServer(cConfig *csconfig.Config) (*apiserver.APIServer, error) { @@ -52,9 +53,9 @@ func initAPIServer(cConfig *csconfig.Config) (*apiserver.APIServer, error) { func serveAPIServer(apiServer *apiserver.APIServer, apiReady chan bool) { apiTomb.Go(func() error { - defer types.CatchPanic("crowdsec/serveAPIServer") + defer trace.CatchPanic("crowdsec/serveAPIServer") go func() { - defer types.CatchPanic("crowdsec/runAPIServer") + defer trace.CatchPanic("crowdsec/runAPIServer") log.Debugf("serving API after %s ms", time.Since(crowdsecT0)) if err := apiServer.Run(apiReady); err != nil { log.Fatal(err) diff --git a/cmd/crowdsec/crowdsec.go b/cmd/crowdsec/crowdsec.go index 10ac48aba..8b4487e15 100644 --- a/cmd/crowdsec/crowdsec.go +++ b/cmd/crowdsec/crowdsec.go @@ -8,6 +8,8 @@ import ( "path/filepath" + "github.com/crowdsecurity/go-cs-lib/pkg/trace" + "github.com/crowdsecurity/crowdsec/pkg/acquisition" "github.com/crowdsecurity/crowdsec/pkg/csconfig" "github.com/crowdsecurity/crowdsec/pkg/cwhub" @@ -53,7 +55,7 @@ func runCrowdsec(cConfig *csconfig.Config, parsers *parser.Parsers) error { parserWg.Add(1) for i := 0; i < cConfig.Crowdsec.ParserRoutinesCount; i++ { parsersTomb.Go(func() error { - defer types.CatchPanic("crowdsec/runParse") + defer trace.CatchPanic("crowdsec/runParse") if err := runParse(inputLineChan, inputEventChan, *parsers.Ctx, parsers.Nodes); err != nil { //this error will never happen as parser.Parse is not able to return errors log.Fatalf("starting parse error : %s", err) return err @@ -79,7 +81,7 @@ func runCrowdsec(cConfig *csconfig.Config, parsers *parser.Parsers) error { for i := 0; i < cConfig.Crowdsec.BucketsRoutinesCount; i++ { bucketsTomb.Go(func() error { - defer types.CatchPanic("crowdsec/runPour") + defer trace.CatchPanic("crowdsec/runPour") if err := runPour(inputEventChan, holders, buckets, cConfig); err != nil { log.Fatalf("starting pour error : %s", err) return err @@ -97,7 +99,7 @@ func runCrowdsec(cConfig *csconfig.Config, parsers *parser.Parsers) error { outputWg.Add(1) for i := 0; i < cConfig.Crowdsec.OutputRoutinesCount; i++ { outputsTomb.Go(func() error { - defer types.CatchPanic("crowdsec/runOutput") + defer trace.CatchPanic("crowdsec/runOutput") if err := runOutput(inputEventChan, outputEventChan, buckets, *parsers.Povfwctx, parsers.Povfwnodes, *cConfig.API.Client.Credentials); err != nil { log.Fatalf("starting outputs error : %s", err) return err @@ -132,9 +134,9 @@ func runCrowdsec(cConfig *csconfig.Config, parsers *parser.Parsers) error { func serveCrowdsec(parsers *parser.Parsers, cConfig *csconfig.Config, agentReady chan bool) { crowdsecTomb.Go(func() error { - defer types.CatchPanic("crowdsec/serveCrowdsec") + defer trace.CatchPanic("crowdsec/serveCrowdsec") go func() { - defer types.CatchPanic("crowdsec/runCrowdsec") + defer trace.CatchPanic("crowdsec/runCrowdsec") // this logs every time, even at config reload log.Debugf("running agent after %s ms", time.Since(crowdsecT0)) agentReady <- true diff --git a/cmd/crowdsec/main.go b/cmd/crowdsec/main.go index 1cd92c293..767097f0e 100644 --- a/cmd/crowdsec/main.go +++ b/cmd/crowdsec/main.go @@ -154,7 +154,9 @@ func (f *Flags) Parse() { flag.BoolVar(&f.DisableAgent, "no-cs", false, "disable crowdsec agent") flag.BoolVar(&f.DisableAPI, "no-api", false, "disable local API") flag.BoolVar(&f.DisableCAPI, "no-capi", false, "disable communication with Central API") - flag.StringVar(&f.WinSvc, "winsvc", "", "Windows service Action : Install, Remove etc..") + if runtime.GOOS == "windows" { + flag.StringVar(&f.WinSvc, "winsvc", "", "Windows service Action: Install, Remove etc..") + } flag.StringVar(&dumpFolder, "dump-data", "", "dump parsers/buckets raw outputs") flag.Parse() } @@ -192,7 +194,7 @@ func newLogLevel(curLevelPtr *log.Level, f *Flags) *log.Level { // LoadConfig returns a configuration parsed from configuration file func LoadConfig(configFile string, disableAgent bool, disableAPI bool, quiet bool) (*csconfig.Config, error) { - cConfig, err := csconfig.NewConfig(configFile, disableAgent, disableAPI, quiet) + cConfig, _, err := csconfig.NewConfig(configFile, disableAgent, disableAPI, quiet) if err != nil { return nil, err } diff --git a/cmd/crowdsec/metrics.go b/cmd/crowdsec/metrics.go index 6b549603f..8e87eecd0 100644 --- a/cmd/crowdsec/metrics.go +++ b/cmd/crowdsec/metrics.go @@ -9,15 +9,16 @@ import ( "github.com/prometheus/client_golang/prometheus/promhttp" log "github.com/sirupsen/logrus" + "github.com/crowdsecurity/go-cs-lib/pkg/trace" + "github.com/crowdsecurity/go-cs-lib/pkg/version" + v1 "github.com/crowdsecurity/crowdsec/pkg/apiserver/controllers/v1" "github.com/crowdsecurity/crowdsec/pkg/cache" "github.com/crowdsecurity/crowdsec/pkg/csconfig" - "github.com/crowdsecurity/crowdsec/pkg/cwversion" "github.com/crowdsecurity/crowdsec/pkg/database" "github.com/crowdsecurity/crowdsec/pkg/exprhelpers" leaky "github.com/crowdsecurity/crowdsec/pkg/leakybucket" "github.com/crowdsecurity/crowdsec/pkg/parser" - "github.com/crowdsecurity/crowdsec/pkg/types" ) /*prometheus*/ @@ -61,7 +62,7 @@ var globalCsInfo = prometheus.NewGauge( prometheus.GaugeOpts{ Name: "cs_info", Help: "Information about Crowdsec.", - ConstLabels: prometheus.Labels{"version": cwversion.VersionStr()}, + ConstLabels: prometheus.Labels{"version": version.String()}, }, ) @@ -187,7 +188,7 @@ func servePrometheus(config *csconfig.PrometheusCfg, dbClient *database.Client, return } - defer types.CatchPanic("crowdsec/servePrometheus") + defer trace.CatchPanic("crowdsec/servePrometheus") http.Handle("/metrics", computeDynamicMetrics(promhttp.Handler(), dbClient)) <-apiReady diff --git a/cmd/crowdsec/output.go b/cmd/crowdsec/output.go index efeab7720..17cc99827 100644 --- a/cmd/crowdsec/output.go +++ b/cmd/crowdsec/output.go @@ -7,10 +7,11 @@ import ( "sync" "time" + "github.com/crowdsecurity/go-cs-lib/pkg/version" + "github.com/crowdsecurity/crowdsec/pkg/apiclient" "github.com/crowdsecurity/crowdsec/pkg/csconfig" "github.com/crowdsecurity/crowdsec/pkg/cwhub" - "github.com/crowdsecurity/crowdsec/pkg/cwversion" leaky "github.com/crowdsecurity/crowdsec/pkg/leakybucket" "github.com/crowdsecurity/crowdsec/pkg/models" "github.com/crowdsecurity/crowdsec/pkg/parser" @@ -88,7 +89,7 @@ func runOutput(input chan types.Event, overflow chan types.Event, buckets *leaky MachineID: apiConfig.Login, Password: password, Scenarios: scenarios, - UserAgent: fmt.Sprintf("crowdsec/%s", cwversion.VersionStr()), + UserAgent: fmt.Sprintf("crowdsec/%s", version.String()), URL: apiURL, PapiURL: papiURL, VersionPrefix: "v1", diff --git a/cmd/crowdsec/run_in_svc.go b/cmd/crowdsec/run_in_svc.go index d655afe88..a5ab996b5 100644 --- a/cmd/crowdsec/run_in_svc.go +++ b/cmd/crowdsec/run_in_svc.go @@ -10,10 +10,11 @@ import ( log "github.com/sirupsen/logrus" "github.com/sirupsen/logrus/hooks/writer" + "github.com/crowdsecurity/go-cs-lib/pkg/trace" + "github.com/crowdsecurity/go-cs-lib/pkg/version" + "github.com/crowdsecurity/crowdsec/pkg/csconfig" - "github.com/crowdsecurity/crowdsec/pkg/cwversion" "github.com/crowdsecurity/crowdsec/pkg/database" - "github.com/crowdsecurity/crowdsec/pkg/types" ) func StartRunSvc() error { @@ -22,7 +23,7 @@ func StartRunSvc() error { err error ) - defer types.CatchPanic("crowdsec/StartRunSvc") + defer trace.CatchPanic("crowdsec/StartRunSvc") // Set a default logger with level=fatal on stderr, // in addition to the one we configure afterwards @@ -38,7 +39,7 @@ func StartRunSvc() error { return err } - log.Infof("Crowdsec %s", cwversion.VersionStr()) + log.Infof("Crowdsec %s", version.String()) apiReady := make(chan bool, 1) agentReady := make(chan bool, 1) diff --git a/cmd/crowdsec/run_in_svc_windows.go b/cmd/crowdsec/run_in_svc_windows.go index 54287f770..c51d24147 100644 --- a/cmd/crowdsec/run_in_svc_windows.go +++ b/cmd/crowdsec/run_in_svc_windows.go @@ -7,17 +7,18 @@ import ( log "github.com/sirupsen/logrus" "golang.org/x/sys/windows/svc" + "github.com/crowdsecurity/go-cs-lib/pkg/trace" + "github.com/crowdsecurity/go-cs-lib/pkg/version" + "github.com/crowdsecurity/crowdsec/pkg/csconfig" - "github.com/crowdsecurity/crowdsec/pkg/cwversion" "github.com/crowdsecurity/crowdsec/pkg/database" - "github.com/crowdsecurity/crowdsec/pkg/types" ) func StartRunSvc() error { const svcName = "CrowdSec" const svcDescription = "Crowdsec IPS/IDS" - defer types.CatchPanic("crowdsec/StartRunSvc") + defer trace.CatchPanic("crowdsec/StartRunSvc") isRunninginService, err := svc.IsWindowsService() if err != nil { @@ -66,7 +67,7 @@ func WindowsRun() error { return err } // Configure logging - log.Infof("Crowdsec %s", cwversion.VersionStr()) + log.Infof("Crowdsec %s", version.String()) apiReady := make(chan bool, 1) agentReady := make(chan bool, 1) diff --git a/cmd/crowdsec/serve.go b/cmd/crowdsec/serve.go index 464af6327..5e2e8b720 100644 --- a/cmd/crowdsec/serve.go +++ b/cmd/crowdsec/serve.go @@ -11,6 +11,8 @@ import ( log "github.com/sirupsen/logrus" "gopkg.in/tomb.v2" + "github.com/crowdsecurity/go-cs-lib/pkg/trace" + "github.com/crowdsecurity/crowdsec/pkg/csconfig" "github.com/crowdsecurity/crowdsec/pkg/database" "github.com/crowdsecurity/crowdsec/pkg/exprhelpers" @@ -226,7 +228,7 @@ func HandleSignals(cConfig *csconfig.Config) error { exitChan := make(chan error) go func() { - defer types.CatchPanic("crowdsec/HandleSignals") + defer trace.CatchPanic("crowdsec/HandleSignals") Loop: for { s := <-signalChan diff --git a/config/acquis_win.yaml b/config/acquis_win.yaml index a22dc260e..86d233cca 100644 --- a/config/acquis_win.yaml +++ b/config/acquis_win.yaml @@ -1,3 +1,4 @@ +##RDP source: wineventlog event_channel: Security event_ids: @@ -5,4 +6,26 @@ event_ids: - 4623 event_level: information labels: - type: eventlog \ No newline at end of file + type: eventlog +--- +##Firewall +filenames: + - C:\Windows\System32\LogFiles\Firewall\pfirewall.log +labels: + type: windows-firewall +--- +##SQL Server +source: wineventlog +event_channel: Application +event_ids: + - 18456 +event_level: information +labels: + type: eventlog +--- +##IIS +use_time_machine: true +filenames: + - C:\inetpub\logs\LogFiles\*\*.log +labels: + type: iis \ No newline at end of file diff --git a/config/simulation.yaml b/config/simulation.yaml index e9c689993..dad850204 100644 --- a/config/simulation.yaml +++ b/config/simulation.yaml @@ -1,4 +1,3 @@ -simulation: off +simulation: false # exclusions: # - crowdsecurity/ssh-bf - \ No newline at end of file diff --git a/debian/rules b/debian/rules index 9f9258a2f..6683e5443 100755 --- a/debian/rules +++ b/debian/rules @@ -4,12 +4,6 @@ export DEB_VERSION=$(shell dpkg-parsechangelog | egrep '^Version:' | cut -f 2 -d export BUILD_VERSION=v${DEB_VERSION}-debian-pragmatic export GO111MODULE=on -# LD_OPTS=-ldflags "-s -w -X github.com/crowdsecurity/crowdsec/pkg/cwversion.Version=$(BUILD_VERSION) \ -# -X github.com/crowdsecurity/crowdsec/pkg/cwversion.BuildDate=$(BUILD_TIMESTAMP) \ -# -X github.com/crowdsecurity/crowdsec/pkg/cwversion.Codename=$(BUILD_CODENAME) \ -# -X github.com/crowdsecurity/crowdsec/pkg/cwversion.Tag=$(BUILD_TAG) \ -# -X github.com/crowdsecurity/crowdsec/pkg/cwversion.GoVersion=$(BUILD_GOVERSION)" - %: dh $@ diff --git a/docker/README.md b/docker/README.md index 86dcbc73f..e1c7b517e 100644 --- a/docker/README.md +++ b/docker/README.md @@ -280,6 +280,7 @@ config.yaml) each time the container is run. | __LAPI__ | | (useless with DISABLE_LOCAL_API) | | `USE_WAL` | false | Enable Write-Ahead Logging with SQLite | | `CUSTOM_HOSTNAME` | localhost | Name for the local agent (running in the container with LAPI) | +| `CAPI_WHITELISTS_PATH` | | Path for capi_whitelists.yaml | | | | | | __Agent__ | | (these don't work with DISABLE_AGENT) | | `TYPE` | | [`Labels.type`](https://docs.crowdsec.net/Crowdsec/v1/references/acquisition/) for file in time-machine: `-e TYPE=""` | diff --git a/docker/docker_start.sh b/docker/docker_start.sh index 3f9b0030a..8ec449103 100755 --- a/docker/docker_start.sh +++ b/docker/docker_start.sh @@ -187,7 +187,6 @@ fi lapi_credentials_path=$(conf_get '.api.client.credentials_path') - if isfalse "$DISABLE_LOCAL_API"; then # generate local agent credentials (even if agent is disabled, cscli needs a # connection to the API) @@ -365,6 +364,11 @@ for BOUNCER in /run/secrets/@(bouncer_key|BOUNCER_KEY)* ; do done shopt -u nullglob extglob +# set all options before validating the configuration + +conf_set_if "$CAPI_WHITELISTS_PATH" '.api.server.capi_whitelists_path = strenv(CAPI_WHITELISTS_PATH)' +conf_set_if "$METRICS_PORT" '.prometheus.listen_port=env(METRICS_PORT)' + ARGS="" if [ "$CONFIG_FILE" != "" ]; then ARGS="-c $CONFIG_FILE" @@ -402,7 +406,5 @@ if istrue "$LEVEL_INFO"; then ARGS="$ARGS -info" fi -conf_set_if "$METRICS_PORT" '.prometheus.listen_port=env(METRICS_PORT)' - # shellcheck disable=SC2086 exec crowdsec $ARGS diff --git a/docker/test/Pipfile b/docker/test/Pipfile index 6a617720d..980ed8816 100644 --- a/docker/test/Pipfile +++ b/docker/test/Pipfile @@ -1,11 +1,11 @@ [packages] pytest-dotenv = "*" pytest-xdist = "*" -pytest-cs = {ref = "0.4.0", git = "https://github.com/crowdsecurity/pytest-cs.git"} +pytest-cs = {ref = "0.7.16", git = "https://github.com/crowdsecurity/pytest-cs.git"} [dev-packages] gnureadline = "*" ipdb = "*" [requires] -python_version = "3.10" +python_version = "*" diff --git a/docker/test/Pipfile.lock b/docker/test/Pipfile.lock index ec03981db..3a3a60c5f 100644 --- a/docker/test/Pipfile.lock +++ b/docker/test/Pipfile.lock @@ -1,11 +1,11 @@ { "_meta": { "hash": { - "sha256": "da2959f993eb751a5f6d2b1c4537ba39ed414d0e9d300dc513ced5a8f0ab4261" + "sha256": "7e91f125d4ad0d1f1b5da7ef441d75baf4f28788c791803a216cb6956b131ea9" }, "pipfile-spec": 6, "requires": { - "python_version": "3.10" + "python_version": "*" }, "sources": [ { @@ -16,21 +16,13 @@ ] }, "default": { - "attrs": { - "hashes": [ - "sha256:29e95c7f6778868dbd49170f98f8818f78f3dc5e0e37c0b1f474e3561b240836", - "sha256:c9227bfc2f01993c03f68db37d1d15c9690188323c067c641f1a35ca58185f99" - ], - "markers": "python_version >= '3.6'", - "version": "==22.2.0" - }, "certifi": { "hashes": [ - "sha256:35824b4c3a97115964b408844d64aa14db1cc518f6562e8d7261699d1350a9e3", - "sha256:4ad3232f5e926d6718ec31cfc1fcadfde020920e278684144551c91769c7bc18" + "sha256:0f0d56dc5a6ad56fd4ba36484d6cc34451e1c6548c61daad8c320169f91eddc7", + "sha256:c6c2e98f5c7869efca1f8916fed228dd91539f9f1b444c314c06eef02980c716" ], "markers": "python_version >= '3.6'", - "version": "==2022.12.7" + "version": "==2023.5.7" }, "cffi": { "hashes": [ @@ -184,48 +176,36 @@ }, "cryptography": { "hashes": [ - "sha256:103e8f7155f3ce2ffa0049fe60169878d47a4364b277906386f8de21c9234aa1", - "sha256:23df8ca3f24699167daf3e23e51f7ba7334d504af63a94af468f468b975b7dd7", - "sha256:2725672bb53bb92dc7b4150d233cd4b8c59615cd8288d495eaa86db00d4e5c06", - "sha256:30b1d1bfd00f6fc80d11300a29f1d8ab2b8d9febb6ed4a38a76880ec564fae84", - "sha256:35d658536b0a4117c885728d1a7032bdc9a5974722ae298d6c533755a6ee3915", - "sha256:50cadb9b2f961757e712a9737ef33d89b8190c3ea34d0fb6675e00edbe35d074", - "sha256:5f8c682e736513db7d04349b4f6693690170f95aac449c56f97415c6980edef5", - "sha256:6236a9610c912b129610eb1a274bdc1350b5df834d124fa84729ebeaf7da42c3", - "sha256:788b3921d763ee35dfdb04248d0e3de11e3ca8eb22e2e48fef880c42e1f3c8f9", - "sha256:8bc0008ef798231fac03fe7d26e82d601d15bd16f3afaad1c6113771566570f3", - "sha256:8f35c17bd4faed2bc7797d2a66cbb4f986242ce2e30340ab832e5d99ae60e011", - "sha256:b49a88ff802e1993b7f749b1eeb31134f03c8d5c956e3c125c75558955cda536", - "sha256:bc0521cce2c1d541634b19f3ac661d7a64f9555135e9d8af3980965be717fd4a", - "sha256:bc5b871e977c8ee5a1bbc42fa8d19bcc08baf0c51cbf1586b0e87a2694dde42f", - "sha256:c43ac224aabcbf83a947eeb8b17eaf1547bce3767ee2d70093b461f31729a480", - "sha256:d15809e0dbdad486f4ad0979753518f47980020b7a34e9fc56e8be4f60702fac", - "sha256:d7d84a512a59f4412ca8549b01f94be4161c94efc598bf09d027d67826beddc0", - "sha256:e029b844c21116564b8b61216befabca4b500e6816fa9f0ba49527653cae2108", - "sha256:e8a0772016feeb106efd28d4a328e77dc2edae84dfbac06061319fdb669ff828", - "sha256:e944fe07b6f229f4c1a06a7ef906a19652bdd9fd54c761b0ff87e83ae7a30354", - "sha256:eb40fe69cfc6f5cdab9a5ebd022131ba21453cf7b8a7fd3631f45bbf52bed612", - "sha256:fa507318e427169ade4e9eccef39e9011cdc19534f55ca2f36ec3f388c1f70f3", - "sha256:ffd394c7896ed7821a6d13b24657c6a34b6e2650bd84ae063cf11ccffa4f1a97" + "sha256:05dc219433b14046c476f6f09d7636b92a1c3e5808b9a6536adf4932b3b2c440", + "sha256:0dcca15d3a19a66e63662dc8d30f8036b07be851a8680eda92d079868f106288", + "sha256:142bae539ef28a1c76794cca7f49729e7c54423f615cfd9b0b1fa90ebe53244b", + "sha256:3daf9b114213f8ba460b829a02896789751626a2a4e7a43a28ee77c04b5e4958", + "sha256:48f388d0d153350f378c7f7b41497a54ff1513c816bcbbcafe5b829e59b9ce5b", + "sha256:4df2af28d7bedc84fe45bd49bc35d710aede676e2a4cb7fc6d103a2adc8afe4d", + "sha256:4f01c9863da784558165f5d4d916093737a75203a5c5286fde60e503e4276c7a", + "sha256:7a38250f433cd41df7fcb763caa3ee9362777fdb4dc642b9a349721d2bf47404", + "sha256:8f79b5ff5ad9d3218afb1e7e20ea74da5f76943ee5edb7f76e56ec5161ec782b", + "sha256:956ba8701b4ffe91ba59665ed170a2ebbdc6fc0e40de5f6059195d9f2b33ca0e", + "sha256:a04386fb7bc85fab9cd51b6308633a3c271e3d0d3eae917eebab2fac6219b6d2", + "sha256:a95f4802d49faa6a674242e25bfeea6fc2acd915b5e5e29ac90a32b1139cae1c", + "sha256:adc0d980fd2760c9e5de537c28935cc32b9353baaf28e0814df417619c6c8c3b", + "sha256:aecbb1592b0188e030cb01f82d12556cf72e218280f621deed7d806afd2113f9", + "sha256:b12794f01d4cacfbd3177b9042198f3af1c856eedd0a98f10f141385c809a14b", + "sha256:c0764e72b36a3dc065c155e5b22f93df465da9c39af65516fe04ed3c68c92636", + "sha256:c33c0d32b8594fa647d2e01dbccc303478e16fdd7cf98652d5b3ed11aa5e5c99", + "sha256:cbaba590180cba88cb99a5f76f90808a624f18b169b90a4abb40c1fd8c19420e", + "sha256:d5a1bd0e9e2031465761dfa920c16b0065ad77321d8a8c1f5ee331021fda65e9" ], "markers": "python_version >= '3.6'", - "version": "==39.0.2" + "version": "==40.0.2" }, "docker": { "hashes": [ - "sha256:896c4282e5c7af5c45e8b683b0b0c33932974fe6e50fc6906a0a83616ab3da97", - "sha256:dbcb3bd2fa80dca0788ed908218bf43972772009b881ed1e20dfc29a65e49782" + "sha256:134cd828f84543cbf8e594ff81ca90c38288df3c0a559794c12f2e4b634ea19e", + "sha256:dcc088adc2ec4e7cfc594e275d8bd2c9738c56c808de97476939ef67db5af8c2" ], "markers": "python_version >= '3.7'", - "version": "==6.0.1" - }, - "exceptiongroup": { - "hashes": [ - "sha256:232c37c63e4f682982c8b6459f33a8981039e5fb8756b2074364e5055c498c9e", - "sha256:d484c3090ba2889ae2928419117447a14daf3c1231d5e30d0aae34f354f01785" - ], - "markers": "python_version < '3.11'", - "version": "==1.1.1" + "version": "==6.1.2" }, "execnet": { "hashes": [ @@ -253,11 +233,11 @@ }, "packaging": { "hashes": [ - "sha256:714ac14496c3e68c99c29b00845f7a2b85f3bb6f1078fd9f72fd20f0570002b2", - "sha256:b6ad297f8907de0fa2fe1ccbd26fdaf387f5f47c7275fedf8cce89f99446cf97" + "sha256:994793af429502c4ea2ebf6bf664629d07c1a9fe974af92966e4b8d2df7edc61", + "sha256:a392980d2b6cffa644431898be54b0045151319d1e7ec34f0cfed48767dd334f" ], "markers": "python_version >= '3.7'", - "version": "==23.0" + "version": "==23.1" }, "pluggy": { "hashes": [ @@ -269,23 +249,23 @@ }, "psutil": { "hashes": [ - "sha256:149555f59a69b33f056ba1c4eb22bb7bf24332ce631c44a319cec09f876aaeff", - "sha256:16653106f3b59386ffe10e0bad3bb6299e169d5327d3f187614b1cb8f24cf2e1", - "sha256:3d7f9739eb435d4b1338944abe23f49584bde5395f27487d2ee25ad9a8774a62", - "sha256:3ff89f9b835100a825b14c2808a106b6fdcc4b15483141482a12c725e7f78549", - "sha256:54c0d3d8e0078b7666984e11b12b88af2db11d11249a8ac8920dd5ef68a66e08", - "sha256:54d5b184728298f2ca8567bf83c422b706200bcbbfafdc06718264f9393cfeb7", - "sha256:6001c809253a29599bc0dfd5179d9f8a5779f9dffea1da0f13c53ee568115e1e", - "sha256:68908971daf802203f3d37e78d3f8831b6d1014864d7a85937941bb35f09aefe", - "sha256:6b92c532979bafc2df23ddc785ed116fced1f492ad90a6830cf24f4d1ea27d24", - "sha256:852dd5d9f8a47169fe62fd4a971aa07859476c2ba22c2254d4a1baa4e10b95ad", - "sha256:9120cd39dca5c5e1c54b59a41d205023d436799b1c8c4d3ff71af18535728e94", - "sha256:c1ca331af862803a42677c120aff8a814a804e09832f166f226bfd22b56feee8", - "sha256:efeae04f9516907be44904cc7ce08defb6b665128992a56957abc9b61dca94b7", - "sha256:fd8522436a6ada7b4aad6638662966de0d61d241cb821239b2ae7013d41a43d4" + "sha256:104a5cc0e31baa2bcf67900be36acde157756b9c44017b86b2c049f11957887d", + "sha256:3c6f686f4225553615612f6d9bc21f1c0e305f75d7d8454f9b46e901778e7217", + "sha256:4aef137f3345082a3d3232187aeb4ac4ef959ba3d7c10c33dd73763fbc063da4", + "sha256:5410638e4df39c54d957fc51ce03048acd8e6d60abc0f5107af51e5fb566eb3c", + "sha256:5b9b8cb93f507e8dbaf22af6a2fd0ccbe8244bf30b1baad6b3954e935157ae3f", + "sha256:7a7dd9997128a0d928ed4fb2c2d57e5102bb6089027939f3b722f3a210f9a8da", + "sha256:89518112647f1276b03ca97b65cc7f64ca587b1eb0278383017c2a0dcc26cbe4", + "sha256:8c5f7c5a052d1d567db4ddd231a9d27a74e8e4a9c3f44b1032762bd7b9fdcd42", + "sha256:ab8ed1a1d77c95453db1ae00a3f9c50227ebd955437bcf2a574ba8adbf6a74d5", + "sha256:acf2aef9391710afded549ff602b5887d7a2349831ae4c26be7c807c0a39fac4", + "sha256:b258c0c1c9d145a1d5ceffab1134441c4c5113b2417fafff7315a917a026c3c9", + "sha256:be8929ce4313f9f8146caad4272f6abb8bf99fc6cf59344a3167ecd74f4f203f", + "sha256:c607bb3b57dc779d55e1554846352b4e358c10fff3abf3514a7a6601beebdb30", + "sha256:ea8518d152174e1249c4f2a1c89e3e6065941df2fa13a1ab45327716a23c2b48" ], "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", - "version": "==5.9.4" + "version": "==5.9.5" }, "pycparser": { "hashes": [ @@ -296,15 +276,15 @@ }, "pytest": { "hashes": [ - "sha256:130328f552dcfac0b1cec75c12e3f005619dc5f874f0a06e8ff7263f0ee6225e", - "sha256:c99ab0c73aceb050f68929bc93af19ab6db0558791c6a0715723abe9d0ade9d4" + "sha256:3799fa815351fea3a5e96ac7e503a96fa51cc9942c3753cda7651b93c1cfa362", + "sha256:434afafd78b1d78ed0addf160ad2b77a30d35d4bdf8af234fe621919d9ed15e3" ], "markers": "python_version >= '3.7'", - "version": "==7.2.2" + "version": "==7.3.1" }, "pytest-cs": { "git": "https://github.com/crowdsecurity/pytest-cs.git", - "ref": "8c49bd8072672c49855a7991a5900858b3ebb777" + "ref": "4a3451084215053af8a48ff37507b4f86bf75c10" }, "pytest-datadir": { "hashes": [ @@ -324,11 +304,11 @@ }, "pytest-xdist": { "hashes": [ - "sha256:1849bd98d8b242b948e472db7478e090bf3361912a8fed87992ed94085f54727", - "sha256:37290d161638a20b672401deef1cba812d110ac27e35d213f091d15b8beb40c9" + "sha256:d5ee0520eb1b7bcca50a60a518ab7a7707992812c578198f8b44fdfac78e8c93", + "sha256:ff9daa7793569e6a68544850fd3927cd257cc03a7ef76c95e86915355e82b5f2" ], "index": "pypi", - "version": "==3.2.1" + "version": "==3.3.1" }, "python-dotenv": { "hashes": [ @@ -386,42 +366,35 @@ }, "requests": { "hashes": [ - "sha256:64299f4909223da747622c030b781c0d7811e359c37124b4bd368fb8c6518baa", - "sha256:98b1b2782e3c6c4904938b84c0eb932721069dfdb9134313beff7c83c2df24bf" + "sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f", + "sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1" ], - "markers": "python_version >= '3.7' and python_version < '4'", - "version": "==2.28.2" - }, - "tomli": { - "hashes": [ - "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc", - "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f" - ], - "markers": "python_version < '3.11'", - "version": "==2.0.1" + "markers": "python_version >= '3.7'", + "version": "==2.31.0" }, "trustme": { "hashes": [ - "sha256:5e07b23d70ceed64f3bb36ae4b9abc52354c16c98d45ab037bee2b5fbffe586c", - "sha256:a6e53039cc43e70548ebd9a42ec1af5cba803a16d14321cd96352d2b4e010e04" + "sha256:1d4f0b0fe28091506edc29c19ad90cca387646add436c3ca66ba7bcc53807f55", + "sha256:7a9f82ad494d661cd10c9eed38e0f708154eb59a2e415da6b02af3e5dac53134" ], - "version": "==0.9.0" + "markers": "python_version >= '3.7'", + "version": "==1.0.0" }, "urllib3": { "hashes": [ - "sha256:8a388717b9476f934a21484e8c8e61875ab60644d29b9b39e11e4b9dc1c6b305", - "sha256:aa751d169e23c7479ce47a0cb0da579e3ede798f994f5816a74e4f4500dcea42" + "sha256:61717a1095d7e155cdb737ac7bb2f4324a858a1e2e6466f6d03ff630ca68d3cc", + "sha256:d055c2f9d38dc53c808f6fdc8eab7360b6fdbbde02340ed25cfbcd817c62469e" ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4, 3.5'", - "version": "==1.26.15" + "markers": "python_version >= '3.7'", + "version": "==2.0.2" }, "websocket-client": { "hashes": [ - "sha256:3f09e6d8230892547132177f575a4e3e73cfdf06526e20cc02aa1c3b47184d40", - "sha256:cdf5877568b7e83aa7cf2244ab56a3213de587bbe0ce9d8b9600fc77b455d89e" + "sha256:c7d67c13b928645f259d9b847ab5b57fd2d127213ca41ebd880de1f553b7c23b", + "sha256:f8c64e28cd700e7ba1f04350d66422b6833b82a796b525a51e740b8cc8dab4b1" ], "markers": "python_version >= '3.7'", - "version": "==1.5.1" + "version": "==1.5.2" } }, "develop": { @@ -444,7 +417,7 @@ "sha256:637996211036b6385ef91435e4fae22989472f9d571faba8927ba8253acbc330", "sha256:b8c3f85900b9dc423225913c5aace94729fe1fa9763b38939a95226f02d37186" ], - "markers": "python_version < '3.11' and python_version >= '3.7'", + "markers": "python_version >= '3.11'", "version": "==5.1.1" }, "executing": { @@ -497,11 +470,11 @@ }, "ipython": { "hashes": [ - "sha256:5b54478e459155a326bf5f42ee4f29df76258c0279c36f21d71ddb560f88b156", - "sha256:735cede4099dbc903ee540307b9171fbfef4aa75cfcacc5a273b2cda2f02be04" + "sha256:7dff3fad32b97f6488e02f87b970f309d082f758d7b7fc252e3b19ee0e432dbb", + "sha256:ffca270240fbd21b06b2974e14a86494d6d29290184e788275f55e0b55914926" ], - "markers": "python_version < '3.11' and python_version >= '3.7'", - "version": "==8.11.0" + "markers": "python_version >= '3.11'", + "version": "==8.13.2" }, "jedi": { "hashes": [ @@ -566,11 +539,11 @@ }, "pygments": { "hashes": [ - "sha256:b3ed06a9e8ac9a9aae5a6f5dbe78a8a58655d17b43b93c078f094ddc476ae297", - "sha256:fa7bd7bd2771287c0de303af8bfdfc731f51bd2c6a47ab69d117138893b82717" + "sha256:8ace4d3c1dd481894b2005f560ead0f9f19ee64fe983366be1a21e171d12775c", + "sha256:db2db3deb4b4179f399a09054b023b6a586b76499d36965813c71aa8ed7b5fd1" ], - "markers": "python_version >= '3.6'", - "version": "==2.14.0" + "markers": "python_version >= '3.7'", + "version": "==2.15.1" }, "six": { "hashes": [ @@ -587,14 +560,6 @@ ], "version": "==0.6.2" }, - "tomli": { - "hashes": [ - "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc", - "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f" - ], - "markers": "python_version < '3.11'", - "version": "==2.0.1" - }, "traitlets": { "hashes": [ "sha256:9e6ec080259b9a5940c797d58b613b5e31441c2257b87c2e795c5228ae80d2d8", diff --git a/docker/test/tests/test_capi_whitelists.py b/docker/test/tests/test_capi_whitelists.py new file mode 100644 index 000000000..f8e3c17c0 --- /dev/null +++ b/docker/test/tests/test_capi_whitelists.py @@ -0,0 +1,32 @@ +#!/usr/bin/env python + +from http import HTTPStatus +import yaml + +import pytest + +pytestmark = pytest.mark.docker + + +def test_capi_whitelists(crowdsec, tmp_path_factory, flavor,): + """Test CAPI_WHITELISTS_PATH""" + env = { + "CAPI_WHITELISTS_PATH": "/path/to/whitelists.yaml" + } + + whitelists = tmp_path_factory.mktemp("whitelists") + with open(whitelists / "whitelists.yaml", "w") as f: + yaml.dump({"ips": ["1.2.3.4", "2.3.4.5"], "cidrs": ["1.2.3.0/24"]}, f) + + volumes = { + whitelists / "whitelists.yaml": {"bind": "/path/to/whitelists.yaml", "mode": "ro"} + } + + with crowdsec(flavor=flavor, environment=env, volumes=volumes) as cs: + cs.wait_for_log("*Starting processing data*") + cs.wait_for_http(8080, '/health', want_status=HTTPStatus.OK) + res = cs.cont.exec_run(f'cscli config show-yaml') + assert res.exit_code == 0 + stdout = res.output.decode() + y = yaml.safe_load(stdout) + assert y['api']['server']['capi_whitelists_path'] == '/path/to/whitelists.yaml' diff --git a/go.mod b/go.mod index fca6d2f6f..02f0defef 100644 --- a/go.mod +++ b/go.mod @@ -48,11 +48,11 @@ require ( github.com/prometheus/client_model v0.3.0 github.com/prometheus/prom2json v1.3.0 github.com/r3labs/diff/v2 v2.14.1 - github.com/sirupsen/logrus v1.9.0 + github.com/sirupsen/logrus v1.9.2 github.com/spf13/cobra v1.7.0 - github.com/stretchr/testify v1.8.2 - golang.org/x/crypto v0.0.0-20220622213112-05595931fe9d - golang.org/x/mod v0.8.0 + github.com/stretchr/testify v1.8.3 + golang.org/x/crypto v0.1.0 + golang.org/x/mod v0.6.0 google.golang.org/grpc v1.47.0 google.golang.org/protobuf v1.28.1 gopkg.in/natefinch/lumberjack.v2 v2.2.1 @@ -72,6 +72,7 @@ require ( github.com/cespare/xxhash/v2 v2.1.2 github.com/corazawaf/coraza/v3 v3.0.0-rc.2 github.com/coreos/go-systemd/v22 v22.5.0 + github.com/crowdsecurity/go-cs-lib v0.0.0-20230531105801-4c1535c2b3bd github.com/goccy/go-yaml v1.9.7 github.com/gofrs/uuid v4.0.0+incompatible github.com/golang-jwt/jwt/v4 v4.2.0 @@ -85,6 +86,7 @@ require ( github.com/texttheater/golang-levenshtein/levenshtein v0.0.0-20200805054039-cae8b0eaed6c github.com/umahmood/haversine v0.0.0-20151105152445-808ab04add26 github.com/wasilibs/go-re2 v0.2.1 + golang.org/x/exp v0.0.0-20230515195305-f3d0a9c9a5cc golang.org/x/sys v0.7.0 gopkg.in/yaml.v3 v3.0.1 k8s.io/apiserver v0.22.5 @@ -103,7 +105,7 @@ require ( github.com/beorn7/perks v1.0.1 // indirect github.com/corazawaf/libinjection-go v0.1.2 // indirect github.com/cpuguy83/go-md2man/v2 v2.0.2 // indirect - github.com/docker/distribution v2.8.0+incompatible // indirect + github.com/docker/distribution v2.8.2+incompatible // indirect github.com/docker/go-units v0.4.0 // indirect github.com/gin-contrib/sse v0.1.0 // indirect github.com/go-logr/logr v1.2.3 // indirect diff --git a/go.sum b/go.sum index 327106a30..8d4506758 100644 --- a/go.sum +++ b/go.sum @@ -174,6 +174,8 @@ github.com/creack/pty v1.1.11 h1:07n33Z8lZxZ2qwegKbObQohDhXDQxiMMz1NOUGYlesw= github.com/creack/pty v1.1.11/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E= github.com/crowdsecurity/dlog v0.0.0-20170105205344-4fb5f8204f26 h1:r97WNVC30Uen+7WnLs4xDScS/Ex988+id2k6mDf8psU= github.com/crowdsecurity/dlog v0.0.0-20170105205344-4fb5f8204f26/go.mod h1:zpv7r+7KXwgVUZnUNjyP22zc/D7LKjyoY02weH2RBbk= +github.com/crowdsecurity/go-cs-lib v0.0.0-20230531105801-4c1535c2b3bd h1:Y70ceDKAKYFXTnxEjXuBDSh07umvDhbX3PCCYhdtsZ0= +github.com/crowdsecurity/go-cs-lib v0.0.0-20230531105801-4c1535c2b3bd/go.mod h1:9JJLSpGj1ZXnROV3xAcJvS/HTaUvuA8K3gGOpO4tfVc= github.com/crowdsecurity/grokky v0.2.1 h1:t4VYnDlAd0RjDM2SlILalbwfCrQxtJSMGdQOR0zwkE4= github.com/crowdsecurity/grokky v0.2.1/go.mod h1:33usDIYzGDsgX1kHAThCbseso6JuWNJXOzRQDGXHtWM= github.com/crowdsecurity/machineid v1.0.2 h1:wpkpsUghJF8Khtmn/tg6GxgdhLA1Xflerh5lirI+bdc= @@ -185,8 +187,8 @@ github.com/dghubble/sling v1.3.0 h1:pZHjCJq4zJvc6qVQ5wN1jo5oNZlNE0+8T/h0XeXBUKU= github.com/dghubble/sling v1.3.0/go.mod h1:XXShWaBWKzNLhu2OxikSNFrlsvowtz4kyRuXUG7oQKY= github.com/dgrijalva/jwt-go v3.2.0+incompatible/go.mod h1:E3ru+11k8xSBh+hMPgOLZmtrrCbhqsmaPHjLKYnJCaQ= github.com/dgryski/go-sip13 v0.0.0-20181026042036-e10d5fee7954/go.mod h1:vAd38F8PWV+bWy6jNmig1y/TA+kYO4g3RSRF0IAv0no= -github.com/docker/distribution v2.8.0+incompatible h1:l9EaZDICImO1ngI+uTifW+ZYvvz7fKISBAKpg+MbWbY= -github.com/docker/distribution v2.8.0+incompatible/go.mod h1:J2gT2udsDAN96Uj4KfcMRqY0/ypR+oyYUYmja8H+y+w= +github.com/docker/distribution v2.8.2+incompatible h1:T3de5rq0dB1j30rp0sA2rER+m322EBzniBPB6ZIzuh8= +github.com/docker/distribution v2.8.2+incompatible/go.mod h1:J2gT2udsDAN96Uj4KfcMRqY0/ypR+oyYUYmja8H+y+w= github.com/docker/docker v20.10.24+incompatible h1:Ugvxm7a8+Gz6vqQYQQ2W7GYq5EUPaAiuPgIfVyI3dYE= github.com/docker/docker v20.10.24+incompatible/go.mod h1:eEKB0N0r5NX/I1kEveEz05bcu8tLC/8azJZsviup8Sk= github.com/docker/go-connections v0.4.0 h1:El9xVISelRB7BuFusrZozjnkIM5YnzCViNKohAFqRJQ= @@ -847,8 +849,8 @@ github.com/sirupsen/logrus v1.4.2/go.mod h1:tLMulIdttU9McNUspp0xgXVQah82FyeX6Mwd github.com/sirupsen/logrus v1.6.0/go.mod h1:7uNnSEd1DgxDLC74fIahvMZmmYsHGZGEOFrfsX/uA88= github.com/sirupsen/logrus v1.7.0/go.mod h1:yWOB1SBYBC5VeMP7gHvWumXLIWorT60ONWic61uBYv0= github.com/sirupsen/logrus v1.8.1/go.mod h1:yWOB1SBYBC5VeMP7gHvWumXLIWorT60ONWic61uBYv0= -github.com/sirupsen/logrus v1.9.0 h1:trlNQbNUG3OdDrDil03MCb1H2o9nJ1x4/5LYw7byDE0= -github.com/sirupsen/logrus v1.9.0/go.mod h1:naHLuLoDiP4jHNo9R0sCBMtWGeIprob74mVsIT4qYEQ= +github.com/sirupsen/logrus v1.9.2 h1:oxx1eChJGI6Uks2ZC4W1zpLlVgqB8ner4EuQwV4Ik1Y= +github.com/sirupsen/logrus v1.9.2/go.mod h1:naHLuLoDiP4jHNo9R0sCBMtWGeIprob74mVsIT4qYEQ= github.com/smartystreets/assertions v0.0.0-20180927180507-b2de0cb4f26d/go.mod h1:OnSkiWE9lh6wB0YB77sQom3nweQdgAjqCqsofrRNTgc= github.com/smartystreets/goconvey v1.6.4/go.mod h1:syvi0/a8iFYH4r/RixwvyeAJjdLS9QV7WQ/tjFTllLA= github.com/soheilhy/cmux v0.1.4/go.mod h1:IM3LyeVVIOuxMH7sFAkER9+bJ4dT7Ms6E4xg4kGIyLM= @@ -887,8 +889,8 @@ github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/ github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU= github.com/stretchr/testify v1.8.1/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4= -github.com/stretchr/testify v1.8.2 h1:+h33VjcLVPDHtOdpUCuF+7gSuG3yGIftsP1YvFihtJ8= -github.com/stretchr/testify v1.8.2/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4= +github.com/stretchr/testify v1.8.3 h1:RP3t2pwF7cMEbC1dqtB6poj3niw/9gnV4Cjg5oW5gtY= +github.com/stretchr/testify v1.8.3/go.mod h1:sz/lmYIOXD/1dqDmKjjqLyZ2RngseejIcXlSw2iwfAo= github.com/subosito/gotenv v1.2.0/go.mod h1:N0PQaV/YGNqwC0u51sEeR/aUtSLEXKX9iv69rRypqCw= github.com/tetratelabs/wazero v1.0.0-rc.2 h1:OA3UUynnoqxrjCQ94mpAtdO4/oMxFQVNL2BXDMOc66Q= github.com/tetratelabs/wazero v1.0.0-rc.2/go.mod h1:wYx2gNRg8/WihJfSDxA1TIL8H+GkfLYm+bIfbblu9VQ= @@ -1016,8 +1018,9 @@ golang.org/x/crypto v0.0.0-20201216223049-8b5274cf687f/go.mod h1:jdWPYTVW3xRLrWP golang.org/x/crypto v0.0.0-20210220033148-5ea612d1eb83/go.mod h1:jdWPYTVW3xRLrWPugEBEK3UY2ZEsg3UU495nc5E+M+I= golang.org/x/crypto v0.0.0-20210616213533-5ff15b29337e/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= golang.org/x/crypto v0.0.0-20210711020723-a769d52b0f97/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= -golang.org/x/crypto v0.0.0-20220622213112-05595931fe9d h1:sK3txAijHtOK88l68nt020reeT1ZdKLIYetKl95FzVY= golang.org/x/crypto v0.0.0-20220622213112-05595931fe9d/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4= +golang.org/x/crypto v0.1.0 h1:MDRAIl0xIo9Io2xV565hzXHw3zVseKrJKodhohM5CjU= +golang.org/x/crypto v0.1.0/go.mod h1:RecgLatLF4+eUMCP1PoPZQb+cVrJcOPbHkTkbkB9sbw= golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= golang.org/x/exp v0.0.0-20190306152737-a1d7652674e8/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= golang.org/x/exp v0.0.0-20190510132918-efd6b22b2522/go.mod h1:ZjyILWgesfNpC6sMxTJOJm9Kp84zZh5NQWvqDGG3Qr8= @@ -1028,6 +1031,8 @@ golang.org/x/exp v0.0.0-20191227195350-da58074b4299/go.mod h1:2RIsYlXP63K8oxa1u0 golang.org/x/exp v0.0.0-20200119233911-0405dc783f0a/go.mod h1:2RIsYlXP63K8oxa1u096TMicItID8zy7Y6sNkU49FU4= golang.org/x/exp v0.0.0-20200207192155-f17229e696bd/go.mod h1:J/WKrq2StrnmMY6+EHIKF9dgMWnmCNThgcyBT1FY9mM= golang.org/x/exp v0.0.0-20200224162631-6cc2880d07d6/go.mod h1:3jZMyOhIsHpP37uCMkUooju7aAi5cS1Q23tOzKc+0MU= +golang.org/x/exp v0.0.0-20230515195305-f3d0a9c9a5cc h1:mCRnTeVUjcrhlRmO0VK8a6k6Rrf6TF9htwo2pJVSjIU= +golang.org/x/exp v0.0.0-20230515195305-f3d0a9c9a5cc/go.mod h1:V1LtkGg67GoY2N1AnLN78QLrzxkLyJw7RJb1gzOOz9w= golang.org/x/image v0.0.0-20190227222117-0694c2d4d067/go.mod h1:kZ7UVZpmo3dzQBMxlp+ypCbDeSB+sBbTgSJuh5dn5js= golang.org/x/image v0.0.0-20190802002840-cff245a6509b/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0= golang.org/x/lint v0.0.0-20181026193005-c67002cb31c3/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE= diff --git a/mk/goversion.mk b/mk/goversion.mk index 48979ac8e..dd9954928 100644 --- a/mk/goversion.mk +++ b/mk/goversion.mk @@ -1,7 +1,4 @@ -BUILD_REQUIRE_GO_MAJOR ?= 1 -BUILD_REQUIRE_GO_MINOR ?= 20 - BUILD_GOVERSION = $(subst go,,$(shell go env GOVERSION)) go_major_minor = $(subst ., ,$(BUILD_GOVERSION)) @@ -10,8 +7,19 @@ GO_MINOR_VERSION = $(word 2, $(go_major_minor)) GO_VERSION_VALIDATION_ERR_MSG = Golang version ($(BUILD_GOVERSION)) is not supported, please use at least $(BUILD_REQUIRE_GO_MAJOR).$(BUILD_REQUIRE_GO_MINOR) + .PHONY: goversion -goversion: +goversion: $(if $(findstring devel,$(shell go env GOVERSION)),goversion_devel,goversion_check) + + +.PHONY: goversion_devel +goversion_devel: + $(warning WARNING: You are using a development version of Golang ($(BUILD_GOVERSION)) which is not supported. For production environments, use a stable version (at least $(BUILD_REQUIRE_GO_MAJOR).$(BUILD_REQUIRE_GO_MINOR))) + $(info ) + + +.PHONY: goversion_check +goversion_check: ifneq ($(OS), Windows_NT) @if [ $(GO_MAJOR_VERSION) -gt $(BUILD_REQUIRE_GO_MAJOR) ]; then \ exit 0; \ diff --git a/mk/platform.mk b/mk/platform.mk index 67c94c07d..9e375de3e 100644 --- a/mk/platform.mk +++ b/mk/platform.mk @@ -1,3 +1,8 @@ + +BUILD_CODENAME ?= alphaga +GOARCH ?= $(shell go env GOARCH) +BUILD_TAG ?= $(shell git rev-parse HEAD) + ifeq ($(OS), Windows_NT) SHELL := pwsh.exe .SHELLFLAGS := -NoProfile -Command diff --git a/mk/platform/freebsd.mk b/mk/platform/freebsd.mk index c08c82d6e..600a3926a 100644 --- a/mk/platform/freebsd.mk +++ b/mk/platform/freebsd.mk @@ -1,5 +1,3 @@ # FreeBSD specific MAKE=gmake - -$(info building for FreeBSD) diff --git a/mk/platform/linux.mk b/mk/platform/linux.mk index 0c31e884a..02d38f873 100644 --- a/mk/platform/linux.mk +++ b/mk/platform/linux.mk @@ -1,5 +1,3 @@ # Linux specific MAKE=make - -$(info Building for linux) \ No newline at end of file diff --git a/mk/platform/openbsd.mk b/mk/platform/openbsd.mk index 145b8257f..682221353 100644 --- a/mk/platform/openbsd.mk +++ b/mk/platform/openbsd.mk @@ -1,5 +1,3 @@ # OpenBSD specific MAKE=gmake - -$(info building for OpenBSD) diff --git a/mk/platform/unix_common.mk b/mk/platform/unix_common.mk index 23f9d886b..f611693f4 100644 --- a/mk/platform/unix_common.mk +++ b/mk/platform/unix_common.mk @@ -6,12 +6,9 @@ MKDIR=mkdir -p # Go should not be required to run functional tests GOOS ?= $(shell go env GOOS) -GOARCH ?= $(shell go env GOARCH) #Current versioning information from env BUILD_VERSION?=$(shell git describe --tags) -BUILD_CODENAME="alphaga" BUILD_TIMESTAMP=$(shell date +%F"_"%T) -BUILD_TAG?=$(shell git rev-parse HEAD) DEFAULT_CONFIGDIR?=/etc/crowdsec DEFAULT_DATADIR?=/var/lib/crowdsec/data diff --git a/mk/platform/windows.mk b/mk/platform/windows.mk index 33b92b828..8e2cdf19b 100644 --- a/mk/platform/windows.mk +++ b/mk/platform/windows.mk @@ -4,16 +4,11 @@ MAKE=make GOOS=windows PREFIX=$(shell $$env:TEMP) -GOOS ?= $(shell go env GOOS) -GOARCH ?= $(shell go env GOARCH) - #Current versioning information from env #BUILD_VERSION?=$(shell (Invoke-WebRequest -UseBasicParsing -Uri https://api.github.com/repos/crowdsecurity/crowdsec/releases/latest).Content | jq -r '.tag_name') #hardcode it till i find a workaround BUILD_VERSION?=$(shell git describe --tags $$(git rev-list --tags --max-count=1)) -BUILD_CODENAME?=alphaga BUILD_TIMESTAMP?=$(shell Get-Date -Format "yyyy-MM-dd_HH:mm:ss") -BUILD_TAG?=$(shell git rev-parse HEAD) DEFAULT_CONFIGDIR?=C:\\ProgramData\\CrowdSec\\config DEFAULT_DATADIR?=C:\\ProgramData\\CrowdSec\\data @@ -23,5 +18,3 @@ CP=Copy-Item CPR=Copy-Item -Recurse MKDIR=New-Item -ItemType directory WIN_IGNORE_ERR=; exit 0 - -$(info Building for windows) diff --git a/pkg/acquisition/acquisition.go b/pkg/acquisition/acquisition.go index 1c0d07d5f..9fc8fc86f 100644 --- a/pkg/acquisition/acquisition.go +++ b/pkg/acquisition/acquisition.go @@ -15,6 +15,8 @@ import ( tomb "gopkg.in/tomb.v2" "gopkg.in/yaml.v2" + "github.com/crowdsecurity/go-cs-lib/pkg/trace" + "github.com/crowdsecurity/crowdsec/pkg/acquisition/configuration" cloudwatchacquisition "github.com/crowdsecurity/crowdsec/pkg/acquisition/modules/cloudwatch" dockeracquisition "github.com/crowdsecurity/crowdsec/pkg/acquisition/modules/docker" @@ -243,7 +245,7 @@ func GetMetrics(sources []DataSource, aggregated bool) error { } func transform(transformChan chan types.Event, output chan types.Event, AcquisTomb *tomb.Tomb, transformRuntime *vm.Program, logger *log.Entry) { - defer types.CatchPanic("crowdsec/acquis") + defer trace.CatchPanic("crowdsec/acquis") logger.Infof("transformer started") for { select { @@ -298,7 +300,7 @@ func StartAcquisition(sources []DataSource, output chan types.Event, AcquisTomb log.Debugf("starting one source %d/%d ->> %T", i, len(sources), subsrc) AcquisTomb.Go(func() error { - defer types.CatchPanic("crowdsec/acquis") + defer trace.CatchPanic("crowdsec/acquis") var err error outChan := output diff --git a/pkg/acquisition/acquisition_test.go b/pkg/acquisition/acquisition_test.go index fc057fc6d..6b6d5ce71 100644 --- a/pkg/acquisition/acquisition_test.go +++ b/pkg/acquisition/acquisition_test.go @@ -13,9 +13,10 @@ import ( tomb "gopkg.in/tomb.v2" "gopkg.in/yaml.v2" + "github.com/crowdsecurity/go-cs-lib/pkg/cstest" + "github.com/crowdsecurity/crowdsec/pkg/acquisition/configuration" "github.com/crowdsecurity/crowdsec/pkg/csconfig" - "github.com/crowdsecurity/crowdsec/pkg/cstest" "github.com/crowdsecurity/crowdsec/pkg/types" ) diff --git a/pkg/acquisition/modules/cloudwatch/cloudwatch_test.go b/pkg/acquisition/modules/cloudwatch/cloudwatch_test.go index b9a739462..7cdfefca6 100644 --- a/pkg/acquisition/modules/cloudwatch/cloudwatch_test.go +++ b/pkg/acquisition/modules/cloudwatch/cloudwatch_test.go @@ -9,9 +9,10 @@ import ( "testing" "time" + "github.com/crowdsecurity/go-cs-lib/pkg/cstest" + "github.com/aws/aws-sdk-go/aws" "github.com/aws/aws-sdk-go/service/cloudwatchlogs" - "github.com/crowdsecurity/crowdsec/pkg/cstest" "github.com/crowdsecurity/crowdsec/pkg/types" log "github.com/sirupsen/logrus" "github.com/stretchr/testify/require" diff --git a/pkg/acquisition/modules/docker/docker_test.go b/pkg/acquisition/modules/docker/docker_test.go index f0a16b801..65c992726 100644 --- a/pkg/acquisition/modules/docker/docker_test.go +++ b/pkg/acquisition/modules/docker/docker_test.go @@ -11,7 +11,8 @@ import ( "testing" "time" - "github.com/crowdsecurity/crowdsec/pkg/cstest" + "github.com/crowdsecurity/go-cs-lib/pkg/cstest" + "github.com/crowdsecurity/crowdsec/pkg/types" dockerTypes "github.com/docker/docker/api/types" dockerContainer "github.com/docker/docker/api/types/container" diff --git a/pkg/acquisition/modules/file/file.go b/pkg/acquisition/modules/file/file.go index efcf7f3f5..c24b17332 100644 --- a/pkg/acquisition/modules/file/file.go +++ b/pkg/acquisition/modules/file/file.go @@ -14,6 +14,8 @@ import ( "strings" "time" + "github.com/crowdsecurity/go-cs-lib/pkg/trace" + "github.com/fsnotify/fsnotify" "github.com/nxadm/tail" "github.com/pkg/errors" @@ -39,6 +41,7 @@ type FileConfiguration struct { Filename string ForceInotify bool `yaml:"force_inotify"` MaxBufferSize int `yaml:"max_buffer_size"` + PollWithoutInotify bool `yaml:"poll_without_inotify"` configuration.DataSourceCommonCfg `yaml:",inline"` } @@ -328,14 +331,14 @@ func (f *FileSource) StreamingAcquisition(out chan types.Event, t *tomb.Tomb) er continue } - tail, err := tail.TailFile(file, tail.Config{ReOpen: true, Follow: true, Poll: true, Location: &tail.SeekInfo{Offset: 0, Whence: io.SeekEnd}, Logger: log.NewEntry(log.StandardLogger())}) + tail, err := tail.TailFile(file, tail.Config{ReOpen: true, Follow: true, Poll: f.config.PollWithoutInotify, Location: &tail.SeekInfo{Offset: 0, Whence: io.SeekEnd}, Logger: log.NewEntry(log.StandardLogger())}) if err != nil { f.logger.Errorf("Could not start tailing file %s : %s", file, err) continue } f.tails[file] = true t.Go(func() error { - defer types.CatchPanic("crowdsec/acquis/file/live/fsnotify") + defer trace.CatchPanic("crowdsec/acquis/file/live/fsnotify") return f.tailFile(out, t, tail) }) } @@ -411,14 +414,14 @@ func (f *FileSource) monitorNewFiles(out chan types.Event, t *tomb.Tomb) error { continue } //Slightly different parameters for Location, as we want to read the first lines of the newly created file - tail, err := tail.TailFile(event.Name, tail.Config{ReOpen: true, Follow: true, Poll: true, Location: &tail.SeekInfo{Offset: 0, Whence: io.SeekStart}}) + tail, err := tail.TailFile(event.Name, tail.Config{ReOpen: true, Follow: true, Poll: f.config.PollWithoutInotify, Location: &tail.SeekInfo{Offset: 0, Whence: io.SeekStart}}) if err != nil { logger.Errorf("Could not start tailing file %s : %s", event.Name, err) break } f.tails[event.Name] = true t.Go(func() error { - defer types.CatchPanic("crowdsec/acquis/tailfile") + defer trace.CatchPanic("crowdsec/acquis/tailfile") return f.tailFile(out, t, tail) }) } diff --git a/pkg/acquisition/modules/file/file_test.go b/pkg/acquisition/modules/file/file_test.go index d9ce82312..ff55bc413 100644 --- a/pkg/acquisition/modules/file/file_test.go +++ b/pkg/acquisition/modules/file/file_test.go @@ -13,8 +13,9 @@ import ( "github.com/stretchr/testify/require" "gopkg.in/tomb.v2" + "github.com/crowdsecurity/go-cs-lib/pkg/cstest" + fileacquisition "github.com/crowdsecurity/crowdsec/pkg/acquisition/modules/file" - "github.com/crowdsecurity/crowdsec/pkg/cstest" "github.com/crowdsecurity/crowdsec/pkg/types" ) diff --git a/pkg/acquisition/modules/journalctl/journalctl.go b/pkg/acquisition/modules/journalctl/journalctl.go index 9858381e6..7882cb7c2 100644 --- a/pkg/acquisition/modules/journalctl/journalctl.go +++ b/pkg/acquisition/modules/journalctl/journalctl.go @@ -15,6 +15,8 @@ import ( "gopkg.in/tomb.v2" "gopkg.in/yaml.v2" + "github.com/crowdsecurity/go-cs-lib/pkg/trace" + "github.com/crowdsecurity/crowdsec/pkg/acquisition/configuration" "github.com/crowdsecurity/crowdsec/pkg/types" ) @@ -257,7 +259,7 @@ func (j *JournalCtlSource) GetName() string { } func (j *JournalCtlSource) OneShotAcquisition(out chan types.Event, t *tomb.Tomb) error { - defer types.CatchPanic("crowdsec/acquis/journalctl/oneshot") + defer trace.CatchPanic("crowdsec/acquis/journalctl/oneshot") err := j.runJournalCtl(out, t) j.logger.Debug("Oneshot journalctl acquisition is done") return err @@ -266,7 +268,7 @@ func (j *JournalCtlSource) OneShotAcquisition(out chan types.Event, t *tomb.Tomb func (j *JournalCtlSource) StreamingAcquisition(out chan types.Event, t *tomb.Tomb) error { t.Go(func() error { - defer types.CatchPanic("crowdsec/acquis/journalctl/streaming") + defer trace.CatchPanic("crowdsec/acquis/journalctl/streaming") return j.runJournalCtl(out, t) }) return nil diff --git a/pkg/acquisition/modules/journalctl/journalctl_test.go b/pkg/acquisition/modules/journalctl/journalctl_test.go index e853f4b50..2c04c9028 100644 --- a/pkg/acquisition/modules/journalctl/journalctl_test.go +++ b/pkg/acquisition/modules/journalctl/journalctl_test.go @@ -8,7 +8,8 @@ import ( "testing" "time" - "github.com/crowdsecurity/crowdsec/pkg/cstest" + "github.com/crowdsecurity/go-cs-lib/pkg/cstest" + "github.com/crowdsecurity/crowdsec/pkg/types" log "github.com/sirupsen/logrus" "github.com/sirupsen/logrus/hooks/test" diff --git a/pkg/acquisition/modules/kafka/kafka.go b/pkg/acquisition/modules/kafka/kafka.go index af9251163..085751cfc 100644 --- a/pkg/acquisition/modules/kafka/kafka.go +++ b/pkg/acquisition/modules/kafka/kafka.go @@ -17,6 +17,8 @@ import ( "gopkg.in/tomb.v2" "gopkg.in/yaml.v2" + "github.com/crowdsecurity/go-cs-lib/pkg/trace" + "github.com/crowdsecurity/crowdsec/pkg/acquisition/configuration" "github.com/crowdsecurity/crowdsec/pkg/types" ) @@ -190,7 +192,7 @@ func (k *KafkaSource) StreamingAcquisition(out chan types.Event, t *tomb.Tomb) e k.logger.Infof("start reader on topic '%s'", k.Config.Topic) t.Go(func() error { - defer types.CatchPanic("crowdsec/acquis/kafka/live") + defer trace.CatchPanic("crowdsec/acquis/kafka/live") return k.RunReader(out, t) }) @@ -212,7 +214,13 @@ func (kc *KafkaConfiguration) NewTLSConfig() (*tls.Config, error) { if err != nil { return &tlsConfig, err } - caCertPool := x509.NewCertPool() + caCertPool, err := x509.SystemCertPool() + if err != nil { + return &tlsConfig, fmt.Errorf("unable to load system CA certificates: %w", err) + } + if caCertPool == nil { + caCertPool = x509.NewCertPool() + } caCertPool.AppendCertsFromPEM(caCert) tlsConfig.RootCAs = caCertPool diff --git a/pkg/acquisition/modules/kafka/kafka_test.go b/pkg/acquisition/modules/kafka/kafka_test.go index 2e69a9a5b..b37d0e7b7 100644 --- a/pkg/acquisition/modules/kafka/kafka_test.go +++ b/pkg/acquisition/modules/kafka/kafka_test.go @@ -8,7 +8,8 @@ import ( "testing" "time" - "github.com/crowdsecurity/crowdsec/pkg/cstest" + "github.com/crowdsecurity/go-cs-lib/pkg/cstest" + "github.com/crowdsecurity/crowdsec/pkg/types" "github.com/segmentio/kafka-go" log "github.com/sirupsen/logrus" diff --git a/pkg/acquisition/modules/kinesis/kinesis.go b/pkg/acquisition/modules/kinesis/kinesis.go index 7700975b9..60cdc3751 100644 --- a/pkg/acquisition/modules/kinesis/kinesis.go +++ b/pkg/acquisition/modules/kinesis/kinesis.go @@ -19,6 +19,8 @@ import ( "gopkg.in/tomb.v2" "gopkg.in/yaml.v2" + "github.com/crowdsecurity/go-cs-lib/pkg/trace" + "github.com/crowdsecurity/crowdsec/pkg/acquisition/configuration" "github.com/crowdsecurity/crowdsec/pkg/types" ) @@ -490,7 +492,7 @@ func (k *KinesisSource) ReadFromStream(out chan types.Event, t *tomb.Tomb) error for _, shard := range shards.Shards { shardId := *shard.ShardId k.shardReaderTomb.Go(func() error { - defer types.CatchPanic("crowdsec/acquis/kinesis/streaming/shard") + defer trace.CatchPanic("crowdsec/acquis/kinesis/streaming/shard") return k.ReadFromShard(out, shardId) }) } @@ -514,7 +516,7 @@ func (k *KinesisSource) ReadFromStream(out chan types.Event, t *tomb.Tomb) error func (k *KinesisSource) StreamingAcquisition(out chan types.Event, t *tomb.Tomb) error { t.Go(func() error { - defer types.CatchPanic("crowdsec/acquis/kinesis/streaming") + defer trace.CatchPanic("crowdsec/acquis/kinesis/streaming") if k.Config.UseEnhancedFanOut { return k.EnhancedRead(out, t) } else { diff --git a/pkg/acquisition/modules/kinesis/kinesis_test.go b/pkg/acquisition/modules/kinesis/kinesis_test.go index 46435ac27..25941e20d 100644 --- a/pkg/acquisition/modules/kinesis/kinesis_test.go +++ b/pkg/acquisition/modules/kinesis/kinesis_test.go @@ -12,10 +12,11 @@ import ( "testing" "time" + "github.com/crowdsecurity/go-cs-lib/pkg/cstest" + "github.com/aws/aws-sdk-go/aws" "github.com/aws/aws-sdk-go/aws/session" "github.com/aws/aws-sdk-go/service/kinesis" - "github.com/crowdsecurity/crowdsec/pkg/cstest" "github.com/crowdsecurity/crowdsec/pkg/types" log "github.com/sirupsen/logrus" "github.com/stretchr/testify/assert" diff --git a/pkg/acquisition/modules/kubernetesaudit/k8s_audit.go b/pkg/acquisition/modules/kubernetesaudit/k8s_audit.go index 15a1a1033..f65a0aa57 100644 --- a/pkg/acquisition/modules/kubernetesaudit/k8s_audit.go +++ b/pkg/acquisition/modules/kubernetesaudit/k8s_audit.go @@ -8,6 +8,8 @@ import ( "net/http" "strings" + "github.com/crowdsecurity/go-cs-lib/pkg/trace" + "github.com/crowdsecurity/crowdsec/pkg/acquisition/configuration" "github.com/crowdsecurity/crowdsec/pkg/types" "github.com/pkg/errors" @@ -133,7 +135,7 @@ func (ka *KubernetesAuditSource) OneShotAcquisition(out chan types.Event, t *tom func (ka *KubernetesAuditSource) StreamingAcquisition(out chan types.Event, t *tomb.Tomb) error { ka.outChan = out t.Go(func() error { - defer types.CatchPanic("crowdsec/acquis/k8s-audit/live") + defer trace.CatchPanic("crowdsec/acquis/k8s-audit/live") ka.logger.Infof("Starting k8s-audit server on %s:%d%s", ka.config.ListenAddr, ka.config.ListenPort, ka.config.WebhookPath) t.Go(func() error { err := ka.server.ListenAndServe() diff --git a/pkg/acquisition/modules/syslog/internal/parser/rfc5424/parse_test.go b/pkg/acquisition/modules/syslog/internal/parser/rfc5424/parse_test.go index 8085f66f8..9a030e6fe 100644 --- a/pkg/acquisition/modules/syslog/internal/parser/rfc5424/parse_test.go +++ b/pkg/acquisition/modules/syslog/internal/parser/rfc5424/parse_test.go @@ -4,8 +4,9 @@ import ( "testing" "time" + "github.com/crowdsecurity/go-cs-lib/pkg/cstest" + "github.com/stretchr/testify/require" - "github.com/crowdsecurity/crowdsec/pkg/cstest" ) func TestPri(t *testing.T) { diff --git a/pkg/acquisition/modules/syslog/syslog.go b/pkg/acquisition/modules/syslog/syslog.go index cc93c3e2b..948f3d005 100644 --- a/pkg/acquisition/modules/syslog/syslog.go +++ b/pkg/acquisition/modules/syslog/syslog.go @@ -12,6 +12,8 @@ import ( "gopkg.in/tomb.v2" "gopkg.in/yaml.v2" + "github.com/crowdsecurity/go-cs-lib/pkg/trace" + "github.com/crowdsecurity/crowdsec/pkg/acquisition/configuration" "github.com/crowdsecurity/crowdsec/pkg/acquisition/modules/syslog/internal/parser/rfc3164" "github.com/crowdsecurity/crowdsec/pkg/acquisition/modules/syslog/internal/parser/rfc5424" @@ -142,7 +144,7 @@ func (s *SyslogSource) StreamingAcquisition(out chan types.Event, t *tomb.Tomb) } s.serverTomb = s.server.StartServer() t.Go(func() error { - defer types.CatchPanic("crowdsec/acquis/syslog/live") + defer trace.CatchPanic("crowdsec/acquis/syslog/live") return s.handleSyslogMsg(out, t, c) }) return nil diff --git a/pkg/acquisition/modules/syslog/syslog_test.go b/pkg/acquisition/modules/syslog/syslog_test.go index 1ac7051b9..2557f26d5 100644 --- a/pkg/acquisition/modules/syslog/syslog_test.go +++ b/pkg/acquisition/modules/syslog/syslog_test.go @@ -7,7 +7,8 @@ import ( "testing" "time" - "github.com/crowdsecurity/crowdsec/pkg/cstest" + "github.com/crowdsecurity/go-cs-lib/pkg/cstest" + "github.com/crowdsecurity/crowdsec/pkg/types" log "github.com/sirupsen/logrus" "gopkg.in/tomb.v2" diff --git a/pkg/acquisition/modules/wineventlog/wineventlog_windows.go b/pkg/acquisition/modules/wineventlog/wineventlog_windows.go index ba5829d8a..3a78a1932 100644 --- a/pkg/acquisition/modules/wineventlog/wineventlog_windows.go +++ b/pkg/acquisition/modules/wineventlog/wineventlog_windows.go @@ -17,6 +17,8 @@ import ( "gopkg.in/tomb.v2" "gopkg.in/yaml.v2" + "github.com/crowdsecurity/go-cs-lib/pkg/trace" + "github.com/crowdsecurity/crowdsec/pkg/acquisition/configuration" "github.com/crowdsecurity/crowdsec/pkg/types" ) @@ -321,7 +323,7 @@ func (w *WinEventLogSource) CanRun() error { func (w *WinEventLogSource) StreamingAcquisition(out chan types.Event, t *tomb.Tomb) error { t.Go(func() error { - defer types.CatchPanic("crowdsec/acquis/wineventlog/streaming") + defer trace.CatchPanic("crowdsec/acquis/wineventlog/streaming") return w.getEvents(out, t) }) return nil diff --git a/pkg/alertcontext/alertcontext.go b/pkg/alertcontext/alertcontext.go index 16a6f3bfe..29c13e3f3 100644 --- a/pkg/alertcontext/alertcontext.go +++ b/pkg/alertcontext/alertcontext.go @@ -7,10 +7,12 @@ import ( "github.com/antonmedv/expr" "github.com/antonmedv/expr/vm" + log "github.com/sirupsen/logrus" + "golang.org/x/exp/slices" + "github.com/crowdsecurity/crowdsec/pkg/exprhelpers" "github.com/crowdsecurity/crowdsec/pkg/models" "github.com/crowdsecurity/crowdsec/pkg/types" - log "github.com/sirupsen/logrus" ) const ( @@ -131,7 +133,7 @@ func EventToContext(events []types.Event) (models.Meta, []error) { errors = append(errors, fmt.Errorf("unexpected return type for %s : %T", key, output)) continue } - if val != "" && !types.InSlice(val, tmpContext[key]) { + if val != "" && !slices.Contains(tmpContext[key], val) { tmpContext[key] = append(tmpContext[key], val) } } diff --git a/pkg/apiclient/alerts_service_test.go b/pkg/apiclient/alerts_service_test.go index 4270851d5..f4ec8cabe 100644 --- a/pkg/apiclient/alerts_service_test.go +++ b/pkg/apiclient/alerts_service_test.go @@ -8,7 +8,8 @@ import ( "reflect" "testing" - "github.com/crowdsecurity/crowdsec/pkg/cwversion" + "github.com/crowdsecurity/go-cs-lib/pkg/version" + "github.com/crowdsecurity/crowdsec/pkg/models" log "github.com/sirupsen/logrus" "github.com/stretchr/testify/assert" @@ -31,7 +32,7 @@ func TestAlertsListAsMachine(t *testing.T) { client, err := NewClient(&Config{ MachineID: "test_login", Password: "test_password", - UserAgent: fmt.Sprintf("crowdsec/%s", cwversion.VersionStr()), + UserAgent: fmt.Sprintf("crowdsec/%s", version.String()), URL: apiURL, VersionPrefix: "v1", }) @@ -233,7 +234,7 @@ func TestAlertsGetAsMachine(t *testing.T) { client, err := NewClient(&Config{ MachineID: "test_login", Password: "test_password", - UserAgent: fmt.Sprintf("crowdsec/%s", cwversion.VersionStr()), + UserAgent: fmt.Sprintf("crowdsec/%s", version.String()), URL: apiURL, VersionPrefix: "v1", }) @@ -423,7 +424,7 @@ func TestAlertsCreateAsMachine(t *testing.T) { client, err := NewClient(&Config{ MachineID: "test_login", Password: "test_password", - UserAgent: fmt.Sprintf("crowdsec/%s", cwversion.VersionStr()), + UserAgent: fmt.Sprintf("crowdsec/%s", version.String()), URL: apiURL, VersionPrefix: "v1", }) @@ -467,7 +468,7 @@ func TestAlertsDeleteAsMachine(t *testing.T) { client, err := NewClient(&Config{ MachineID: "test_login", Password: "test_password", - UserAgent: fmt.Sprintf("crowdsec/%s", cwversion.VersionStr()), + UserAgent: fmt.Sprintf("crowdsec/%s", version.String()), URL: apiURL, VersionPrefix: "v1", }) diff --git a/pkg/apiclient/auth_service_test.go b/pkg/apiclient/auth_service_test.go index 1e3e83e04..6236cf041 100644 --- a/pkg/apiclient/auth_service_test.go +++ b/pkg/apiclient/auth_service_test.go @@ -10,7 +10,8 @@ import ( "net/url" "testing" - "github.com/crowdsecurity/crowdsec/pkg/cwversion" + "github.com/crowdsecurity/go-cs-lib/pkg/version" + "github.com/crowdsecurity/crowdsec/pkg/models" log "github.com/sirupsen/logrus" "github.com/stretchr/testify/assert" @@ -86,7 +87,7 @@ func TestWatcherRegister(t *testing.T) { clientconfig := Config{ MachineID: "test_login", Password: "test_password", - UserAgent: fmt.Sprintf("crowdsec/%s", cwversion.VersionStr()), + UserAgent: fmt.Sprintf("crowdsec/%s", version.String()), URL: apiURL, VersionPrefix: "v1", } @@ -128,7 +129,7 @@ func TestWatcherAuth(t *testing.T) { clientConfig := &Config{ MachineID: "test_login", Password: "test_password", - UserAgent: fmt.Sprintf("crowdsec/%s", cwversion.VersionStr()), + UserAgent: fmt.Sprintf("crowdsec/%s", version.String()), URL: apiURL, VersionPrefix: "v1", Scenarios: []string{"crowdsecurity/test"}, @@ -216,7 +217,7 @@ func TestWatcherUnregister(t *testing.T) { mycfg := &Config{ MachineID: "test_login", Password: "test_password", - UserAgent: fmt.Sprintf("crowdsec/%s", cwversion.VersionStr()), + UserAgent: fmt.Sprintf("crowdsec/%s", version.String()), URL: apiURL, VersionPrefix: "v1", Scenarios: []string{"crowdsecurity/test"}, @@ -270,7 +271,7 @@ func TestWatcherEnroll(t *testing.T) { mycfg := &Config{ MachineID: "test_login", Password: "test_password", - UserAgent: fmt.Sprintf("crowdsec/%s", cwversion.VersionStr()), + UserAgent: fmt.Sprintf("crowdsec/%s", version.String()), URL: apiURL, VersionPrefix: "v1", Scenarios: []string{"crowdsecurity/test"}, diff --git a/pkg/apiclient/client_http_test.go b/pkg/apiclient/client_http_test.go index f9ad57210..c50769041 100644 --- a/pkg/apiclient/client_http_test.go +++ b/pkg/apiclient/client_http_test.go @@ -8,7 +8,8 @@ import ( "testing" "time" - "github.com/crowdsecurity/crowdsec/pkg/cwversion" + "github.com/crowdsecurity/go-cs-lib/pkg/version" + "github.com/stretchr/testify/assert" ) @@ -23,7 +24,7 @@ func TestNewRequestInvalid(t *testing.T) { client, err := NewClient(&Config{ MachineID: "test_login", Password: "test_password", - UserAgent: fmt.Sprintf("crowdsec/%s", cwversion.VersionStr()), + UserAgent: fmt.Sprintf("crowdsec/%s", version.String()), URL: apiURL, VersionPrefix: "v1", }) @@ -56,7 +57,7 @@ func TestNewRequestTimeout(t *testing.T) { client, err := NewClient(&Config{ MachineID: "test_login", Password: "test_password", - UserAgent: fmt.Sprintf("crowdsec/%s", cwversion.VersionStr()), + UserAgent: fmt.Sprintf("crowdsec/%s", version.String()), URL: apiURL, VersionPrefix: "v1", }) diff --git a/pkg/apiclient/client_test.go b/pkg/apiclient/client_test.go index 3b55dce07..ef52a60ab 100644 --- a/pkg/apiclient/client_test.go +++ b/pkg/apiclient/client_test.go @@ -11,8 +11,9 @@ import ( "github.com/stretchr/testify/assert" - "github.com/crowdsecurity/crowdsec/pkg/cwversion" log "github.com/sirupsen/logrus" + + "github.com/crowdsecurity/go-cs-lib/pkg/version" ) /*this is a ripoff of google/go-github approach : @@ -55,7 +56,7 @@ func TestNewClientOk(t *testing.T) { client, err := NewClient(&Config{ MachineID: "test_login", Password: "test_password", - UserAgent: fmt.Sprintf("crowdsec/%s", cwversion.VersionStr()), + UserAgent: fmt.Sprintf("crowdsec/%s", version.String()), URL: apiURL, VersionPrefix: "v1", }) @@ -92,7 +93,7 @@ func TestNewClientKo(t *testing.T) { client, err := NewClient(&Config{ MachineID: "test_login", Password: "test_password", - UserAgent: fmt.Sprintf("crowdsec/%s", cwversion.VersionStr()), + UserAgent: fmt.Sprintf("crowdsec/%s", version.String()), URL: apiURL, VersionPrefix: "v1", }) @@ -143,7 +144,7 @@ func TestNewClientRegisterKO(t *testing.T) { _, err = RegisterClient(&Config{ MachineID: "test_login", Password: "test_password", - UserAgent: fmt.Sprintf("crowdsec/%s", cwversion.VersionStr()), + UserAgent: fmt.Sprintf("crowdsec/%s", version.String()), URL: apiURL, VersionPrefix: "v1", }, &http.Client{}) @@ -173,7 +174,7 @@ func TestNewClientRegisterOK(t *testing.T) { client, err := RegisterClient(&Config{ MachineID: "test_login", Password: "test_password", - UserAgent: fmt.Sprintf("crowdsec/%s", cwversion.VersionStr()), + UserAgent: fmt.Sprintf("crowdsec/%s", version.String()), URL: apiURL, VersionPrefix: "v1", }, &http.Client{}) @@ -201,7 +202,7 @@ func TestNewClientBadAnswer(t *testing.T) { _, err = RegisterClient(&Config{ MachineID: "test_login", Password: "test_password", - UserAgent: fmt.Sprintf("crowdsec/%s", cwversion.VersionStr()), + UserAgent: fmt.Sprintf("crowdsec/%s", version.String()), URL: apiURL, VersionPrefix: "v1", }, &http.Client{}) diff --git a/pkg/apiclient/decisions_service.go b/pkg/apiclient/decisions_service.go index ae92fb189..054c51a9c 100644 --- a/pkg/apiclient/decisions_service.go +++ b/pkg/apiclient/decisions_service.go @@ -6,6 +6,8 @@ import ( "fmt" "net/http" + "github.com/crowdsecurity/go-cs-lib/pkg/ptr" + "github.com/crowdsecurity/crowdsec/pkg/models" "github.com/crowdsecurity/crowdsec/pkg/modelscapi" "github.com/crowdsecurity/crowdsec/pkg/types" @@ -102,10 +104,10 @@ func (s *DecisionsService) GetDecisionsFromGroups(decisionsGroups []*modelscapi. partialDecisions[idx] = &models.Decision{ Scenario: decisionsGroup.Scenario, Scope: decisionsGroup.Scope, - Type: types.StrPtr(types.DecisionTypeBan), + Type: ptr.Of(types.DecisionTypeBan), Value: decision.Value, Duration: decision.Duration, - Origin: types.StrPtr(types.CAPIOrigin), + Origin: ptr.Of(types.CAPIOrigin), } } decisions = append(decisions, partialDecisions...) @@ -138,10 +140,10 @@ func (s *DecisionsService) FetchV3Decisions(ctx context.Context, url string) (*m partialDecisions[idx] = &models.Decision{ Scenario: &scenarioDeleted, Scope: decisionsGroup.Scope, - Type: types.StrPtr(types.DecisionTypeBan), + Type: ptr.Of(types.DecisionTypeBan), Value: &decision, Duration: &durationDeleted, - Origin: types.StrPtr(types.CAPIOrigin), + Origin: ptr.Of(types.CAPIOrigin), } } v2Decisions.Deleted = append(v2Decisions.Deleted, partialDecisions...) @@ -210,7 +212,7 @@ func (s *DecisionsService) GetDecisionsFromBlocklist(ctx context.Context, blockl Type: blocklist.Remediation, Value: &decision, Duration: blocklist.Duration, - Origin: types.StrPtr(types.ListOrigin), + Origin: ptr.Of(types.ListOrigin), }) } diff --git a/pkg/apiclient/decisions_service_test.go b/pkg/apiclient/decisions_service_test.go index 35d819d5a..935ddcea5 100644 --- a/pkg/apiclient/decisions_service_test.go +++ b/pkg/apiclient/decisions_service_test.go @@ -8,10 +8,11 @@ import ( "reflect" "testing" - "github.com/crowdsecurity/crowdsec/pkg/cwversion" + "github.com/crowdsecurity/go-cs-lib/pkg/ptr" + "github.com/crowdsecurity/go-cs-lib/pkg/version" + "github.com/crowdsecurity/crowdsec/pkg/models" "github.com/crowdsecurity/crowdsec/pkg/modelscapi" - "github.com/crowdsecurity/crowdsec/pkg/types" log "github.com/sirupsen/logrus" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" @@ -460,7 +461,7 @@ func TestDecisionsFromBlocklist(t *testing.T) { Remediation: &tremediationBlocklist, Name: &tnameBlocklist, Duration: &tdurationBlocklist, - }, types.StrPtr("Sun, 01 Jan 2023 01:01:01 GMT")) + }, ptr.Of("Sun, 01 Jan 2023 01:01:01 GMT")) require.NoError(t, err) assert.False(t, isModified) _, isModified, err = newcli.Decisions.GetDecisionsFromBlocklist(context.Background(), &modelscapi.BlocklistLink{ @@ -469,7 +470,7 @@ func TestDecisionsFromBlocklist(t *testing.T) { Remediation: &tremediationBlocklist, Name: &tnameBlocklist, Duration: &tdurationBlocklist, - }, types.StrPtr("Mon, 02 Jan 2023 01:01:01 GMT")) + }, ptr.Of("Mon, 02 Jan 2023 01:01:01 GMT")) require.NoError(t, err) assert.True(t, isModified) } @@ -495,7 +496,7 @@ func TestDeleteDecisions(t *testing.T) { client, err := NewClient(&Config{ MachineID: "test_login", Password: "test_password", - UserAgent: fmt.Sprintf("crowdsec/%s", cwversion.VersionStr()), + UserAgent: fmt.Sprintf("crowdsec/%s", version.String()), URL: apiURL, VersionPrefix: "v1", }) diff --git a/pkg/apiclient/heartbeat.go b/pkg/apiclient/heartbeat.go index dc7c256e3..497ccb7eb 100644 --- a/pkg/apiclient/heartbeat.go +++ b/pkg/apiclient/heartbeat.go @@ -6,9 +6,10 @@ import ( "net/http" "time" - "github.com/crowdsecurity/crowdsec/pkg/types" log "github.com/sirupsen/logrus" tomb "gopkg.in/tomb.v2" + + "github.com/crowdsecurity/go-cs-lib/pkg/trace" ) type HeartBeatService service @@ -32,7 +33,7 @@ func (h *HeartBeatService) Ping(ctx context.Context) (bool, *Response, error) { func (h *HeartBeatService) StartHeartBeat(ctx context.Context, t *tomb.Tomb) { t.Go(func() error { - defer types.CatchPanic("crowdsec/apiClient/heartbeat") + defer trace.CatchPanic("crowdsec/apiClient/heartbeat") hbTimer := time.NewTicker(1 * time.Minute) for { select { diff --git a/pkg/apiserver/alerts_test.go b/pkg/apiserver/alerts_test.go index f6388fc1e..5fd23d116 100644 --- a/pkg/apiserver/alerts_test.go +++ b/pkg/apiserver/alerts_test.go @@ -28,7 +28,7 @@ type LAPI struct { func SetupLAPITest(t *testing.T) LAPI { t.Helper() - router, loginResp, config, err := InitMachineTest() + router, loginResp, config, err := InitMachineTest(t) if err != nil { t.Fatal(err) } @@ -68,8 +68,8 @@ func (l *LAPI) RecordResponse(verb string, url string, body *strings.Reader, aut return w } -func InitMachineTest() (*gin.Engine, models.WatcherAuthResponse, csconfig.Config, error) { - router, config, err := NewAPITest() +func InitMachineTest(t *testing.T) (*gin.Engine, models.WatcherAuthResponse, csconfig.Config, error) { + router, config, err := NewAPITest(t) if err != nil { return nil, models.WatcherAuthResponse{}, config, fmt.Errorf("unable to run local API: %s", err) } @@ -151,7 +151,7 @@ func TestCreateAlert(t *testing.T) { func TestCreateAlertChannels(t *testing.T) { - apiServer, config, err := NewAPIServer() + apiServer, config, err := NewAPIServer(t) if err != nil { log.Fatalln(err) } @@ -443,7 +443,7 @@ func TestDeleteAlertByID(t *testing.T) { } func TestDeleteAlertTrustedIPS(t *testing.T) { - cfg := LoadTestConfig() + cfg := LoadTestConfig(t) // IPv6 mocking doesn't seem to work. // cfg.API.Server.TrustedIPs = []string{"1.2.3.4", "1.2.4.0/24", "::"} cfg.API.Server.TrustedIPs = []string{"1.2.3.4", "1.2.4.0/24"} diff --git a/pkg/apiserver/api_key_test.go b/pkg/apiserver/api_key_test.go index d1db144af..a77ab3f83 100644 --- a/pkg/apiserver/api_key_test.go +++ b/pkg/apiserver/api_key_test.go @@ -11,7 +11,7 @@ import ( ) func TestAPIKey(t *testing.T) { - router, config, err := NewAPITest() + router, config, err := NewAPITest(t) if err != nil { log.Fatalf("unable to run local API: %s", err) } diff --git a/pkg/apiserver/apic.go b/pkg/apiserver/apic.go index 5e8c714a1..3791a9a3c 100644 --- a/pkg/apiserver/apic.go +++ b/pkg/apiserver/apic.go @@ -15,11 +15,15 @@ import ( "github.com/go-openapi/strfmt" "github.com/pkg/errors" log "github.com/sirupsen/logrus" + "golang.org/x/exp/slices" "gopkg.in/tomb.v2" + "github.com/crowdsecurity/go-cs-lib/pkg/ptr" + "github.com/crowdsecurity/go-cs-lib/pkg/trace" + "github.com/crowdsecurity/go-cs-lib/pkg/version" + "github.com/crowdsecurity/crowdsec/pkg/apiclient" "github.com/crowdsecurity/crowdsec/pkg/csconfig" - "github.com/crowdsecurity/crowdsec/pkg/cwversion" "github.com/crowdsecurity/crowdsec/pkg/database" "github.com/crowdsecurity/crowdsec/pkg/database/ent" "github.com/crowdsecurity/crowdsec/pkg/database/ent/alert" @@ -81,7 +85,7 @@ func (a *apic) FetchScenariosListFromDB() ([]string, error) { machineScenarios := strings.Split(v.Scenarios, ",") log.Debugf("%d scenarios for machine %d", len(machineScenarios), v.ID) for _, sv := range machineScenarios { - if !types.InSlice(sv, scenarios) && sv != "" { + if !slices.Contains(scenarios, sv) && sv != "" { scenarios = append(scenarios, sv) } } @@ -94,15 +98,15 @@ func decisionsToApiDecisions(decisions []*models.Decision) models.AddSignalsRequ apiDecisions := models.AddSignalsRequestItemDecisions{} for _, decision := range decisions { x := &models.AddSignalsRequestItemDecisionsItem{ - Duration: types.StrPtr(*decision.Duration), + Duration: ptr.Of(*decision.Duration), ID: new(int64), - Origin: types.StrPtr(*decision.Origin), - Scenario: types.StrPtr(*decision.Scenario), - Scope: types.StrPtr(*decision.Scope), + Origin: ptr.Of(*decision.Origin), + Scenario: ptr.Of(*decision.Scenario), + Scope: ptr.Of(*decision.Scope), //Simulated: *decision.Simulated, - Type: types.StrPtr(*decision.Type), + Type: ptr.Of(*decision.Type), Until: decision.Until, - Value: types.StrPtr(*decision.Value), + Value: ptr.Of(*decision.Value), UUID: decision.UUID, } *x.ID = decision.ID @@ -193,7 +197,7 @@ func NewAPIC(config *csconfig.OnlineApiClientCfg, dbClient *database.Client, con ret.apiClient, err = apiclient.NewClient(&apiclient.Config{ MachineID: config.Credentials.Login, Password: password, - UserAgent: fmt.Sprintf("crowdsec/%s", cwversion.VersionStr()), + UserAgent: fmt.Sprintf("crowdsec/%s", version.String()), URL: apiURL, PapiURL: papiURL, VersionPrefix: "v3", @@ -231,7 +235,7 @@ func NewAPIC(config *csconfig.OnlineApiClientCfg, dbClient *database.Client, con // keep track of all alerts in cache and push it to CAPI every PushInterval. func (a *apic) Push() error { - defer types.CatchPanic("lapi/pushToAPIC") + defer trace.CatchPanic("lapi/pushToAPIC") var cache models.AddSignalsRequest ticker := time.NewTicker(a.pushIntervalFirst) @@ -427,7 +431,7 @@ func (a *apic) HandleDeletedDecisionsV3(deletedDecisions []*modelscapi.GetDecisi if err != nil { return 0, errors.Wrapf(err, "converting db ret %d", dbCliDel) } - updateCounterForDecision(delete_counters, types.StrPtr(types.CAPIOrigin), nil, dbCliDel) + updateCounterForDecision(delete_counters, ptr.Of(types.CAPIOrigin), nil, dbCliDel) nbDeleted += dbCliDel } } @@ -473,26 +477,26 @@ func createAlertsForDecisions(decisions []*models.Decision) []*models.Alert { func createAlertForDecision(decision *models.Decision) *models.Alert { newAlert := &models.Alert{} newAlert.Source = &models.Source{} - newAlert.Source.Scope = types.StrPtr("") + newAlert.Source.Scope = ptr.Of("") if *decision.Origin == types.CAPIOrigin { //to make things more user friendly, we replace CAPI with community-blocklist - newAlert.Scenario = types.StrPtr(types.CAPIOrigin) - newAlert.Source.Scope = types.StrPtr(types.CAPIOrigin) + newAlert.Scenario = ptr.Of(types.CAPIOrigin) + newAlert.Source.Scope = ptr.Of(types.CAPIOrigin) } else if *decision.Origin == types.ListOrigin { - newAlert.Scenario = types.StrPtr(*decision.Scenario) - newAlert.Source.Scope = types.StrPtr(types.ListOrigin) + newAlert.Scenario = ptr.Of(*decision.Scenario) + newAlert.Source.Scope = ptr.Of(types.ListOrigin) } else { log.Warningf("unknown origin %s", *decision.Origin) } - newAlert.Message = types.StrPtr("") - newAlert.Source.Value = types.StrPtr("") - newAlert.StartAt = types.StrPtr(time.Now().UTC().Format(time.RFC3339)) - newAlert.StopAt = types.StrPtr(time.Now().UTC().Format(time.RFC3339)) - newAlert.Capacity = types.Int32Ptr(0) - newAlert.Simulated = types.BoolPtr(false) - newAlert.EventsCount = types.Int32Ptr(0) - newAlert.Leakspeed = types.StrPtr("") - newAlert.ScenarioHash = types.StrPtr("") - newAlert.ScenarioVersion = types.StrPtr("") + newAlert.Message = ptr.Of("") + newAlert.Source.Value = ptr.Of("") + newAlert.StartAt = ptr.Of(time.Now().UTC().Format(time.RFC3339)) + newAlert.StopAt = ptr.Of(time.Now().UTC().Format(time.RFC3339)) + newAlert.Capacity = ptr.Of(int32(0)) + newAlert.Simulated = ptr.Of(false) + newAlert.EventsCount = ptr.Of(int32(0)) + newAlert.Leakspeed = ptr.Of("") + newAlert.ScenarioHash = ptr.Of("") + newAlert.ScenarioVersion = ptr.Of("") newAlert.MachineID = database.CapiMachineID return newAlert } @@ -769,16 +773,16 @@ func (a *apic) UpdateBlocklists(links *modelscapi.GetDecisionsStreamResponseLink func setAlertScenario(add_counters map[string]map[string]int, delete_counters map[string]map[string]int, alert *models.Alert) *models.Alert { if *alert.Source.Scope == types.CAPIOrigin { *alert.Source.Scope = SCOPE_CAPI_ALIAS_ALIAS - alert.Scenario = types.StrPtr(fmt.Sprintf("update : +%d/-%d IPs", add_counters[types.CAPIOrigin]["all"], delete_counters[types.CAPIOrigin]["all"])) + alert.Scenario = ptr.Of(fmt.Sprintf("update : +%d/-%d IPs", add_counters[types.CAPIOrigin]["all"], delete_counters[types.CAPIOrigin]["all"])) } else if *alert.Source.Scope == types.ListOrigin { *alert.Source.Scope = fmt.Sprintf("%s:%s", types.ListOrigin, *alert.Scenario) - alert.Scenario = types.StrPtr(fmt.Sprintf("update : +%d/-%d IPs", add_counters[types.ListOrigin][*alert.Scenario], delete_counters[types.ListOrigin][*alert.Scenario])) + alert.Scenario = ptr.Of(fmt.Sprintf("update : +%d/-%d IPs", add_counters[types.ListOrigin][*alert.Scenario], delete_counters[types.ListOrigin][*alert.Scenario])) } return alert } func (a *apic) Pull() error { - defer types.CatchPanic("lapi/pullFromAPIC") + defer trace.CatchPanic("lapi/pullFromAPIC") toldOnce := false for { @@ -820,7 +824,7 @@ func (a *apic) Pull() error { func (a *apic) GetMetrics() (*models.Metrics, error) { metric := &models.Metrics{ - ApilVersion: types.StrPtr(cwversion.VersionStr()), + ApilVersion: ptr.Of(version.String()), Machines: make([]*models.MetricsAgentInfo, 0), Bouncers: make([]*models.MetricsBouncerInfo, 0), } @@ -861,7 +865,7 @@ func (a *apic) GetMetrics() (*models.Metrics, error) { } func (a *apic) SendMetrics(stop chan (bool)) { - defer types.CatchPanic("lapi/metricsToAPIC") + defer trace.CatchPanic("lapi/metricsToAPIC") ticker := time.NewTicker(a.metricsIntervalFirst) diff --git a/pkg/apiserver/apic_test.go b/pkg/apiserver/apic_test.go index 965ef0378..65ca29991 100644 --- a/pkg/apiserver/apic_test.go +++ b/pkg/apiserver/apic_test.go @@ -20,10 +20,12 @@ import ( "github.com/stretchr/testify/require" "gopkg.in/tomb.v2" + "github.com/crowdsecurity/go-cs-lib/pkg/cstest" + "github.com/crowdsecurity/go-cs-lib/pkg/ptr" + "github.com/crowdsecurity/go-cs-lib/pkg/version" + "github.com/crowdsecurity/crowdsec/pkg/apiclient" "github.com/crowdsecurity/crowdsec/pkg/csconfig" - "github.com/crowdsecurity/crowdsec/pkg/cstest" - "github.com/crowdsecurity/crowdsec/pkg/cwversion" "github.com/crowdsecurity/crowdsec/pkg/database" "github.com/crowdsecurity/crowdsec/pkg/database/ent/decision" "github.com/crowdsecurity/crowdsec/pkg/database/ent/machine" @@ -59,10 +61,10 @@ func getAPIC(t *testing.T) *apic { metricsTomb: tomb.Tomb{}, scenarioList: make([]string, 0), consoleConfig: &csconfig.ConsoleConfig{ - ShareManualDecisions: types.BoolPtr(false), - ShareTaintedScenarios: types.BoolPtr(false), - ShareCustomScenarios: types.BoolPtr(false), - ShareContext: types.BoolPtr(false), + ShareManualDecisions: ptr.Of(false), + ShareTaintedScenarios: ptr.Of(false), + ShareCustomScenarios: ptr.Of(false), + ShareContext: ptr.Of(false), }, isPulling: make(chan bool, 1), } @@ -205,7 +207,7 @@ func TestNewAPIC(t *testing.T) { action: func() {}, args: args{ dbClient: getDBClient(t), - consoleConfig: LoadTestConfig().API.Server.ConsoleConfig, + consoleConfig: LoadTestConfig(t).API.Server.ConsoleConfig, }, }, { @@ -213,7 +215,7 @@ func TestNewAPIC(t *testing.T) { action: func() { testConfig.Credentials.URL = "foobar http://" }, args: args{ dbClient: getDBClient(t), - consoleConfig: LoadTestConfig().API.Server.ConsoleConfig, + consoleConfig: LoadTestConfig(t).API.Server.ConsoleConfig, }, expectedErr: "first path segment in URL cannot contain colon", }, @@ -265,11 +267,11 @@ func TestAPICHandleDeletedDecisions(t *testing.T) { assertTotalDecisionCount(t, api.dbClient, 2) nbDeleted, err := api.HandleDeletedDecisions([]*models.Decision{{ - Value: types.StrPtr("1.2.3.4"), - Origin: types.StrPtr(types.CAPIOrigin), + Value: ptr.Of("1.2.3.4"), + Origin: ptr.Of(types.CAPIOrigin), Type: &decision1.Type, - Scenario: types.StrPtr("crowdsec/test"), - Scope: types.StrPtr("IP"), + Scenario: ptr.Of("crowdsec/test"), + Scope: ptr.Of("IP"), }}, deleteCounters) assert.NoError(t, err) @@ -293,7 +295,7 @@ func TestAPICGetMetrics(t *testing.T) { machineIDs: []string{}, bouncers: []string{}, expectedMetric: &models.Metrics{ - ApilVersion: types.StrPtr(cwversion.VersionStr()), + ApilVersion: ptr.Of(version.String()), Bouncers: []*models.MetricsBouncerInfo{}, Machines: []*models.MetricsAgentInfo{}, }, @@ -303,7 +305,7 @@ func TestAPICGetMetrics(t *testing.T) { machineIDs: []string{"a", "b", "c"}, bouncers: []string{"1", "2", "3"}, expectedMetric: &models.Metrics{ - ApilVersion: types.StrPtr(cwversion.VersionStr()), + ApilVersion: ptr.Of(version.String()), Bouncers: []*models.MetricsBouncerInfo{ { CustomName: "1", @@ -374,23 +376,23 @@ func TestAPICGetMetrics(t *testing.T) { func TestCreateAlertsForDecision(t *testing.T) { httpBfDecisionList := &models.Decision{ - Origin: types.StrPtr(types.ListOrigin), - Scenario: types.StrPtr("crowdsecurity/http-bf"), + Origin: ptr.Of(types.ListOrigin), + Scenario: ptr.Of("crowdsecurity/http-bf"), } sshBfDecisionList := &models.Decision{ - Origin: types.StrPtr(types.ListOrigin), - Scenario: types.StrPtr("crowdsecurity/ssh-bf"), + Origin: ptr.Of(types.ListOrigin), + Scenario: ptr.Of("crowdsecurity/ssh-bf"), } httpBfDecisionCommunity := &models.Decision{ - Origin: types.StrPtr(types.CAPIOrigin), - Scenario: types.StrPtr("crowdsecurity/http-bf"), + Origin: ptr.Of(types.CAPIOrigin), + Scenario: ptr.Of("crowdsecurity/http-bf"), } sshBfDecisionCommunity := &models.Decision{ - Origin: types.StrPtr(types.CAPIOrigin), - Scenario: types.StrPtr("crowdsecurity/ssh-bf"), + Origin: ptr.Of(types.CAPIOrigin), + Scenario: ptr.Of("crowdsecurity/ssh-bf"), } type args struct { decisions []*models.Decision @@ -453,27 +455,27 @@ func TestCreateAlertsForDecision(t *testing.T) { func TestFillAlertsWithDecisions(t *testing.T) { httpBfDecisionCommunity := &models.Decision{ - Origin: types.StrPtr(types.CAPIOrigin), - Scenario: types.StrPtr("crowdsecurity/http-bf"), - Scope: types.StrPtr("ip"), + Origin: ptr.Of(types.CAPIOrigin), + Scenario: ptr.Of("crowdsecurity/http-bf"), + Scope: ptr.Of("ip"), } sshBfDecisionCommunity := &models.Decision{ - Origin: types.StrPtr(types.CAPIOrigin), - Scenario: types.StrPtr("crowdsecurity/ssh-bf"), - Scope: types.StrPtr("ip"), + Origin: ptr.Of(types.CAPIOrigin), + Scenario: ptr.Of("crowdsecurity/ssh-bf"), + Scope: ptr.Of("ip"), } httpBfDecisionList := &models.Decision{ - Origin: types.StrPtr(types.ListOrigin), - Scenario: types.StrPtr("crowdsecurity/http-bf"), - Scope: types.StrPtr("ip"), + Origin: ptr.Of(types.ListOrigin), + Scenario: ptr.Of("crowdsecurity/http-bf"), + Scope: ptr.Of("ip"), } sshBfDecisionList := &models.Decision{ - Origin: types.StrPtr(types.ListOrigin), - Scenario: types.StrPtr("crowdsecurity/ssh-bf"), - Scope: types.StrPtr("ip"), + Origin: ptr.Of(types.ListOrigin), + Scenario: ptr.Of("crowdsecurity/ssh-bf"), + Scope: ptr.Of("ip"), } type args struct { alerts []*models.Alert @@ -572,58 +574,58 @@ func TestAPICWhitelists(t *testing.T) { "9.9.9.9", // This is already present in DB "9.1.9.9", // This not present in DB }, - Scope: types.StrPtr("Ip"), + Scope: ptr.Of("Ip"), }, // This is already present in DB }, New: modelscapi.GetDecisionsStreamResponseNew{ &modelscapi.GetDecisionsStreamResponseNewItem{ - Scenario: types.StrPtr("crowdsecurity/test1"), - Scope: types.StrPtr("Ip"), + Scenario: ptr.Of("crowdsecurity/test1"), + Scope: ptr.Of("Ip"), Decisions: []*modelscapi.GetDecisionsStreamResponseNewItemDecisionsItems0{ { - Value: types.StrPtr("13.2.3.4"), //wl by cidr - Duration: types.StrPtr("24h"), + Value: ptr.Of("13.2.3.4"), //wl by cidr + Duration: ptr.Of("24h"), }, }, }, &modelscapi.GetDecisionsStreamResponseNewItem{ - Scenario: types.StrPtr("crowdsecurity/test1"), - Scope: types.StrPtr("Ip"), + Scenario: ptr.Of("crowdsecurity/test1"), + Scope: ptr.Of("Ip"), Decisions: []*modelscapi.GetDecisionsStreamResponseNewItemDecisionsItems0{ { - Value: types.StrPtr("2.2.3.4"), - Duration: types.StrPtr("24h"), + Value: ptr.Of("2.2.3.4"), + Duration: ptr.Of("24h"), }, }, }, &modelscapi.GetDecisionsStreamResponseNewItem{ - Scenario: types.StrPtr("crowdsecurity/test2"), - Scope: types.StrPtr("Ip"), + Scenario: ptr.Of("crowdsecurity/test2"), + Scope: ptr.Of("Ip"), Decisions: []*modelscapi.GetDecisionsStreamResponseNewItemDecisionsItems0{ { - Value: types.StrPtr("13.2.3.5"), //wl by cidr - Duration: types.StrPtr("24h"), + Value: ptr.Of("13.2.3.5"), //wl by cidr + Duration: ptr.Of("24h"), }, }, }, // These two are from community list. &modelscapi.GetDecisionsStreamResponseNewItem{ - Scenario: types.StrPtr("crowdsecurity/test1"), - Scope: types.StrPtr("Ip"), + Scenario: ptr.Of("crowdsecurity/test1"), + Scope: ptr.Of("Ip"), Decisions: []*modelscapi.GetDecisionsStreamResponseNewItemDecisionsItems0{ { - Value: types.StrPtr("6.2.3.4"), - Duration: types.StrPtr("24h"), + Value: ptr.Of("6.2.3.4"), + Duration: ptr.Of("24h"), }, }, }, &modelscapi.GetDecisionsStreamResponseNewItem{ - Scenario: types.StrPtr("crowdsecurity/test1"), - Scope: types.StrPtr("Ip"), + Scenario: ptr.Of("crowdsecurity/test1"), + Scope: ptr.Of("Ip"), Decisions: []*modelscapi.GetDecisionsStreamResponseNewItemDecisionsItems0{ { - Value: types.StrPtr("9.2.3.4"), //wl by ip - Duration: types.StrPtr("24h"), + Value: ptr.Of("9.2.3.4"), //wl by ip + Duration: ptr.Of("24h"), }, }, }, @@ -631,18 +633,18 @@ func TestAPICWhitelists(t *testing.T) { Links: &modelscapi.GetDecisionsStreamResponseLinks{ Blocklists: []*modelscapi.BlocklistLink{ { - URL: types.StrPtr("http://api.crowdsec.net/blocklist1"), - Name: types.StrPtr("blocklist1"), - Scope: types.StrPtr("Ip"), - Remediation: types.StrPtr("ban"), - Duration: types.StrPtr("24h"), + URL: ptr.Of("http://api.crowdsec.net/blocklist1"), + Name: ptr.Of("blocklist1"), + Scope: ptr.Of("Ip"), + Remediation: ptr.Of("ban"), + Duration: ptr.Of("24h"), }, { - URL: types.StrPtr("http://api.crowdsec.net/blocklist2"), - Name: types.StrPtr("blocklist2"), - Scope: types.StrPtr("Ip"), - Remediation: types.StrPtr("ban"), - Duration: types.StrPtr("24h"), + URL: ptr.Of("http://api.crowdsec.net/blocklist2"), + Name: ptr.Of("blocklist2"), + Scope: ptr.Of("Ip"), + Remediation: ptr.Of("ban"), + Duration: ptr.Of("24h"), }, }, }, @@ -661,7 +663,7 @@ func TestAPICWhitelists(t *testing.T) { apic, err := apiclient.NewDefaultClient( url, "/api", - fmt.Sprintf("crowdsec/%s", cwversion.VersionStr()), + fmt.Sprintf("crowdsec/%s", version.String()), nil, ) require.NoError(t, err) @@ -734,27 +736,27 @@ func TestAPICPullTop(t *testing.T) { "9.9.9.9", // This is already present in DB "9.1.9.9", // This not present in DB }, - Scope: types.StrPtr("Ip"), + Scope: ptr.Of("Ip"), }, // This is already present in DB }, New: modelscapi.GetDecisionsStreamResponseNew{ &modelscapi.GetDecisionsStreamResponseNewItem{ - Scenario: types.StrPtr("crowdsecurity/test1"), - Scope: types.StrPtr("Ip"), + Scenario: ptr.Of("crowdsecurity/test1"), + Scope: ptr.Of("Ip"), Decisions: []*modelscapi.GetDecisionsStreamResponseNewItemDecisionsItems0{ { - Value: types.StrPtr("1.2.3.4"), - Duration: types.StrPtr("24h"), + Value: ptr.Of("1.2.3.4"), + Duration: ptr.Of("24h"), }, }, }, &modelscapi.GetDecisionsStreamResponseNewItem{ - Scenario: types.StrPtr("crowdsecurity/test2"), - Scope: types.StrPtr("Ip"), + Scenario: ptr.Of("crowdsecurity/test2"), + Scope: ptr.Of("Ip"), Decisions: []*modelscapi.GetDecisionsStreamResponseNewItemDecisionsItems0{ { - Value: types.StrPtr("1.2.3.5"), - Duration: types.StrPtr("24h"), + Value: ptr.Of("1.2.3.5"), + Duration: ptr.Of("24h"), }, }, }, // These two are from community list. @@ -762,18 +764,18 @@ func TestAPICPullTop(t *testing.T) { Links: &modelscapi.GetDecisionsStreamResponseLinks{ Blocklists: []*modelscapi.BlocklistLink{ { - URL: types.StrPtr("http://api.crowdsec.net/blocklist1"), - Name: types.StrPtr("blocklist1"), - Scope: types.StrPtr("Ip"), - Remediation: types.StrPtr("ban"), - Duration: types.StrPtr("24h"), + URL: ptr.Of("http://api.crowdsec.net/blocklist1"), + Name: ptr.Of("blocklist1"), + Scope: ptr.Of("Ip"), + Remediation: ptr.Of("ban"), + Duration: ptr.Of("24h"), }, { - URL: types.StrPtr("http://api.crowdsec.net/blocklist2"), - Name: types.StrPtr("blocklist2"), - Scope: types.StrPtr("Ip"), - Remediation: types.StrPtr("ban"), - Duration: types.StrPtr("24h"), + URL: ptr.Of("http://api.crowdsec.net/blocklist2"), + Name: ptr.Of("blocklist2"), + Scope: ptr.Of("Ip"), + Remediation: ptr.Of("ban"), + Duration: ptr.Of("24h"), }, }, }, @@ -792,7 +794,7 @@ func TestAPICPullTop(t *testing.T) { apic, err := apiclient.NewDefaultClient( url, "/api", - fmt.Sprintf("crowdsec/%s", cwversion.VersionStr()), + fmt.Sprintf("crowdsec/%s", version.String()), nil, ) require.NoError(t, err) @@ -840,12 +842,12 @@ func TestAPICPullTopBLCacheFirstCall(t *testing.T) { modelscapi.GetDecisionsStreamResponse{ New: modelscapi.GetDecisionsStreamResponseNew{ &modelscapi.GetDecisionsStreamResponseNewItem{ - Scenario: types.StrPtr("crowdsecurity/test1"), - Scope: types.StrPtr("Ip"), + Scenario: ptr.Of("crowdsecurity/test1"), + Scope: ptr.Of("Ip"), Decisions: []*modelscapi.GetDecisionsStreamResponseNewItemDecisionsItems0{ { - Value: types.StrPtr("1.2.3.4"), - Duration: types.StrPtr("24h"), + Value: ptr.Of("1.2.3.4"), + Duration: ptr.Of("24h"), }, }, }, @@ -853,11 +855,11 @@ func TestAPICPullTopBLCacheFirstCall(t *testing.T) { Links: &modelscapi.GetDecisionsStreamResponseLinks{ Blocklists: []*modelscapi.BlocklistLink{ { - URL: types.StrPtr("http://api.crowdsec.net/blocklist1"), - Name: types.StrPtr("blocklist1"), - Scope: types.StrPtr("Ip"), - Remediation: types.StrPtr("ban"), - Duration: types.StrPtr("24h"), + URL: ptr.Of("http://api.crowdsec.net/blocklist1"), + Name: ptr.Of("blocklist1"), + Scope: ptr.Of("Ip"), + Remediation: ptr.Of("ban"), + Duration: ptr.Of("24h"), }, }, }, @@ -874,7 +876,7 @@ func TestAPICPullTopBLCacheFirstCall(t *testing.T) { apic, err := apiclient.NewDefaultClient( url, "/api", - fmt.Sprintf("crowdsec/%s", cwversion.VersionStr()), + fmt.Sprintf("crowdsec/%s", version.String()), nil, ) require.NoError(t, err) @@ -883,7 +885,7 @@ func TestAPICPullTopBLCacheFirstCall(t *testing.T) { err = api.PullTop(false) require.NoError(t, err) - blocklistConfigItemName := fmt.Sprintf("blocklist:%s:last_pull", *types.StrPtr("blocklist1")) + blocklistConfigItemName := "blocklist:blocklist1:last_pull" lastPullTimestamp, err := api.dbClient.GetConfigItem(blocklistConfigItemName) require.NoError(t, err) assert.NotEqual(t, "", *lastPullTimestamp) @@ -927,12 +929,12 @@ func TestAPICPullTopBLCacheForceCall(t *testing.T) { modelscapi.GetDecisionsStreamResponse{ New: modelscapi.GetDecisionsStreamResponseNew{ &modelscapi.GetDecisionsStreamResponseNewItem{ - Scenario: types.StrPtr("crowdsecurity/test1"), - Scope: types.StrPtr("Ip"), + Scenario: ptr.Of("crowdsecurity/test1"), + Scope: ptr.Of("Ip"), Decisions: []*modelscapi.GetDecisionsStreamResponseNewItemDecisionsItems0{ { - Value: types.StrPtr("1.2.3.4"), - Duration: types.StrPtr("24h"), + Value: ptr.Of("1.2.3.4"), + Duration: ptr.Of("24h"), }, }, }, @@ -940,11 +942,11 @@ func TestAPICPullTopBLCacheForceCall(t *testing.T) { Links: &modelscapi.GetDecisionsStreamResponseLinks{ Blocklists: []*modelscapi.BlocklistLink{ { - URL: types.StrPtr("http://api.crowdsec.net/blocklist1"), - Name: types.StrPtr("blocklist1"), - Scope: types.StrPtr("Ip"), - Remediation: types.StrPtr("ban"), - Duration: types.StrPtr("24h"), + URL: ptr.Of("http://api.crowdsec.net/blocklist1"), + Name: ptr.Of("blocklist1"), + Scope: ptr.Of("Ip"), + Remediation: ptr.Of("ban"), + Duration: ptr.Of("24h"), }, }, }, @@ -961,7 +963,7 @@ func TestAPICPullTopBLCacheForceCall(t *testing.T) { apic, err := apiclient.NewDefaultClient( url, "/api", - fmt.Sprintf("crowdsec/%s", cwversion.VersionStr()), + fmt.Sprintf("crowdsec/%s", version.String()), nil, ) require.NoError(t, err) @@ -981,10 +983,10 @@ func TestAPICPush(t *testing.T) { name: "simple single alert", alerts: []*models.Alert{ { - Scenario: types.StrPtr("crowdsec/test"), - ScenarioHash: types.StrPtr("certified"), - ScenarioVersion: types.StrPtr("v1.0"), - Simulated: types.BoolPtr(false), + Scenario: ptr.Of("crowdsec/test"), + ScenarioHash: ptr.Of("certified"), + ScenarioVersion: ptr.Of("v1.0"), + Simulated: ptr.Of(false), Source: &models.Source{}, }, }, @@ -994,10 +996,10 @@ func TestAPICPush(t *testing.T) { name: "simulated alert is not pushed", alerts: []*models.Alert{ { - Scenario: types.StrPtr("crowdsec/test"), - ScenarioHash: types.StrPtr("certified"), - ScenarioVersion: types.StrPtr("v1.0"), - Simulated: types.BoolPtr(true), + Scenario: ptr.Of("crowdsec/test"), + ScenarioHash: ptr.Of("certified"), + ScenarioVersion: ptr.Of("v1.0"), + Simulated: ptr.Of(true), Source: &models.Source{}, }, }, @@ -1010,10 +1012,10 @@ func TestAPICPush(t *testing.T) { alerts := make([]*models.Alert, 100) for i := 0; i < 100; i++ { alerts[i] = &models.Alert{ - Scenario: types.StrPtr("crowdsec/test"), - ScenarioHash: types.StrPtr("certified"), - ScenarioVersion: types.StrPtr("v1.0"), - Simulated: types.BoolPtr(false), + Scenario: ptr.Of("crowdsec/test"), + ScenarioHash: ptr.Of("certified"), + ScenarioVersion: ptr.Of("v1.0"), + Simulated: ptr.Of(false), Source: &models.Source{}, } } @@ -1036,7 +1038,7 @@ func TestAPICPush(t *testing.T) { apic, err := apiclient.NewDefaultClient( url, "/api", - fmt.Sprintf("crowdsec/%s", cwversion.VersionStr()), + fmt.Sprintf("crowdsec/%s", version.String()), nil, ) require.NoError(t, err) @@ -1111,7 +1113,7 @@ func TestAPICSendMetrics(t *testing.T) { apiClient, err := apiclient.NewDefaultClient( url, "/api", - fmt.Sprintf("crowdsec/%s", cwversion.VersionStr()), + fmt.Sprintf("crowdsec/%s", version.String()), nil, ) require.NoError(t, err) @@ -1179,7 +1181,7 @@ func TestAPICPull(t *testing.T) { apic, err := apiclient.NewDefaultClient( url, "/api", - fmt.Sprintf("crowdsec/%s", cwversion.VersionStr()), + fmt.Sprintf("crowdsec/%s", version.String()), nil, ) require.NoError(t, err) @@ -1188,12 +1190,12 @@ func TestAPICPull(t *testing.T) { modelscapi.GetDecisionsStreamResponse{ New: modelscapi.GetDecisionsStreamResponseNew{ &modelscapi.GetDecisionsStreamResponseNewItem{ - Scenario: types.StrPtr("crowdsecurity/ssh-bf"), - Scope: types.StrPtr("Ip"), + Scenario: ptr.Of("crowdsecurity/ssh-bf"), + Scope: ptr.Of("Ip"), Decisions: []*modelscapi.GetDecisionsStreamResponseNewItemDecisionsItems0{ { - Value: types.StrPtr("1.2.3.5"), - Duration: types.StrPtr("24h"), + Value: ptr.Of("1.2.3.5"), + Duration: ptr.Of("24h"), }, }, }, @@ -1228,29 +1230,29 @@ func TestShouldShareAlert(t *testing.T) { { name: "custom alert should be shared if config enables it", consoleConfig: &csconfig.ConsoleConfig{ - ShareCustomScenarios: types.BoolPtr(true), + ShareCustomScenarios: ptr.Of(true), }, - alert: &models.Alert{Simulated: types.BoolPtr(false)}, + alert: &models.Alert{Simulated: ptr.Of(false)}, expectedRet: true, expectedTrust: "custom", }, { name: "custom alert should not be shared if config disables it", consoleConfig: &csconfig.ConsoleConfig{ - ShareCustomScenarios: types.BoolPtr(false), + ShareCustomScenarios: ptr.Of(false), }, - alert: &models.Alert{Simulated: types.BoolPtr(false)}, + alert: &models.Alert{Simulated: ptr.Of(false)}, expectedRet: false, expectedTrust: "custom", }, { name: "manual alert should be shared if config enables it", consoleConfig: &csconfig.ConsoleConfig{ - ShareManualDecisions: types.BoolPtr(true), + ShareManualDecisions: ptr.Of(true), }, alert: &models.Alert{ - Simulated: types.BoolPtr(false), - Decisions: []*models.Decision{{Origin: types.StrPtr(types.CscliOrigin)}}, + Simulated: ptr.Of(false), + Decisions: []*models.Decision{{Origin: ptr.Of(types.CscliOrigin)}}, }, expectedRet: true, expectedTrust: "manual", @@ -1258,11 +1260,11 @@ func TestShouldShareAlert(t *testing.T) { { name: "manual alert should not be shared if config disables it", consoleConfig: &csconfig.ConsoleConfig{ - ShareManualDecisions: types.BoolPtr(false), + ShareManualDecisions: ptr.Of(false), }, alert: &models.Alert{ - Simulated: types.BoolPtr(false), - Decisions: []*models.Decision{{Origin: types.StrPtr(types.CscliOrigin)}}, + Simulated: ptr.Of(false), + Decisions: []*models.Decision{{Origin: ptr.Of(types.CscliOrigin)}}, }, expectedRet: false, expectedTrust: "manual", @@ -1270,11 +1272,11 @@ func TestShouldShareAlert(t *testing.T) { { name: "manual alert should be shared if config enables it", consoleConfig: &csconfig.ConsoleConfig{ - ShareTaintedScenarios: types.BoolPtr(true), + ShareTaintedScenarios: ptr.Of(true), }, alert: &models.Alert{ - Simulated: types.BoolPtr(false), - ScenarioHash: types.StrPtr("whateverHash"), + Simulated: ptr.Of(false), + ScenarioHash: ptr.Of("whateverHash"), }, expectedRet: true, expectedTrust: "tainted", @@ -1282,11 +1284,11 @@ func TestShouldShareAlert(t *testing.T) { { name: "manual alert should not be shared if config disables it", consoleConfig: &csconfig.ConsoleConfig{ - ShareTaintedScenarios: types.BoolPtr(false), + ShareTaintedScenarios: ptr.Of(false), }, alert: &models.Alert{ - Simulated: types.BoolPtr(false), - ScenarioHash: types.StrPtr("whateverHash"), + Simulated: ptr.Of(false), + ScenarioHash: ptr.Of("whateverHash"), }, expectedRet: false, expectedTrust: "tainted", diff --git a/pkg/apiserver/apiserver.go b/pkg/apiserver/apiserver.go index 278a776be..c65100573 100644 --- a/pkg/apiserver/apiserver.go +++ b/pkg/apiserver/apiserver.go @@ -12,6 +12,8 @@ import ( "strings" "time" + "github.com/crowdsecurity/go-cs-lib/pkg/trace" + "github.com/crowdsecurity/crowdsec/pkg/apiclient" "github.com/crowdsecurity/crowdsec/pkg/apiserver/controllers" v1 "github.com/crowdsecurity/crowdsec/pkg/apiserver/middlewares/v1" @@ -87,7 +89,7 @@ func CustomRecoveryWithWriter() gin.HandlerFunc { log.Warningf("client %s disconnected : %s", c.ClientIP(), err) c.Abort() } else { - filename := types.WriteStackTrace(err) + filename := trace.WriteStackTrace(err) log.Warningf("client %s error : %s", c.ClientIP(), err) log.Warningf("stacktrace written to %s, please join to your issue", filename) c.AbortWithStatus(http.StatusInternalServerError) @@ -311,7 +313,13 @@ func (s *APIServer) GetTLSConfig() (*tls.Config, error) { if err != nil { return nil, errors.Wrap(err, "Error opening cert file") } - caCertPool = x509.NewCertPool() + caCertPool, err = x509.SystemCertPool() + if err != nil { + log.Warnf("Error loading system CA certificates: %s", err) + } + if caCertPool == nil { + caCertPool = x509.NewCertPool() + } caCertPool.AppendCertsFromPEM(caCert) } } @@ -325,7 +333,7 @@ func (s *APIServer) GetTLSConfig() (*tls.Config, error) { } func (s *APIServer) Run(apiReady chan bool) error { - defer types.CatchPanic("lapi/runServer") + defer trace.CatchPanic("lapi/runServer") tlsCfg, err := s.GetTLSConfig() if err != nil { return errors.Wrap(err, "while creating TLS config") diff --git a/pkg/apiserver/apiserver_test.go b/pkg/apiserver/apiserver_test.go index 3eaaab651..464c93f83 100644 --- a/pkg/apiserver/apiserver_test.go +++ b/pkg/apiserver/apiserver_test.go @@ -11,8 +11,9 @@ import ( "testing" "time" + "github.com/crowdsecurity/go-cs-lib/pkg/version" + middlewares "github.com/crowdsecurity/crowdsec/pkg/apiserver/middlewares/v1" - "github.com/crowdsecurity/crowdsec/pkg/cwversion" "github.com/crowdsecurity/crowdsec/pkg/models" "github.com/crowdsecurity/crowdsec/pkg/types" "github.com/go-openapi/strfmt" @@ -33,16 +34,19 @@ var MachineTest = models.WatcherAuthRequest{ Password: &testPassword, } -var UserAgent = fmt.Sprintf("crowdsec-test/%s", cwversion.Version) +var UserAgent = fmt.Sprintf("crowdsec-test/%s", version.Version) var emptyBody = strings.NewReader("") -func LoadTestConfig() csconfig.Config { +func LoadTestConfig(t *testing.T) csconfig.Config { config := csconfig.Config{} maxAge := "1h" flushConfig := csconfig.FlushDBCfg{ MaxAge: &maxAge, } + tempDir, _ := os.MkdirTemp("", "crowdsec_tests") + t.Cleanup(func() { os.RemoveAll(tempDir) }) + dbconfig := csconfig.DatabaseCfg{ Type: "sqlite", DbPath: filepath.Join(tempDir, "ent"), @@ -68,13 +72,16 @@ func LoadTestConfig() csconfig.Config { return config } -func LoadTestConfigForwardedFor() csconfig.Config { +func LoadTestConfigForwardedFor(t *testing.T) csconfig.Config { config := csconfig.Config{} maxAge := "1h" flushConfig := csconfig.FlushDBCfg{ MaxAge: &maxAge, } + tempDir, _ := os.MkdirTemp("", "crowdsec_tests") + t.Cleanup(func() { os.RemoveAll(tempDir) }) + dbconfig := csconfig.DatabaseCfg{ Type: "sqlite", DbPath: filepath.Join(tempDir, "ent"), @@ -102,8 +109,8 @@ func LoadTestConfigForwardedFor() csconfig.Config { return config } -func NewAPIServer() (*APIServer, csconfig.Config, error) { - config := LoadTestConfig() +func NewAPIServer(t *testing.T) (*APIServer, csconfig.Config, error) { + config := LoadTestConfig(t) os.Remove("./ent") apiServer, err := NewServer(config.API.Server) if err != nil { @@ -114,8 +121,8 @@ func NewAPIServer() (*APIServer, csconfig.Config, error) { return apiServer, config, nil } -func NewAPITest() (*gin.Engine, csconfig.Config, error) { - apiServer, config, err := NewAPIServer() +func NewAPITest(t *testing.T) (*gin.Engine, csconfig.Config, error) { + apiServer, config, err := NewAPIServer(t) if err != nil { return nil, config, fmt.Errorf("unable to run local API: %s", err) } @@ -130,8 +137,8 @@ func NewAPITest() (*gin.Engine, csconfig.Config, error) { return router, config, nil } -func NewAPITestForwardedFor() (*gin.Engine, csconfig.Config, error) { - config := LoadTestConfigForwardedFor() +func NewAPITestForwardedFor(t *testing.T) (*gin.Engine, csconfig.Config, error) { + config := LoadTestConfigForwardedFor(t) os.Remove("./ent") apiServer, err := NewServer(config.API.Server) @@ -284,7 +291,7 @@ func CreateTestBouncer(config *csconfig.DatabaseCfg) (string, error) { } func TestWithWrongDBConfig(t *testing.T) { - config := LoadTestConfig() + config := LoadTestConfig(t) config.API.Server.DbConfig.Type = "test" apiServer, err := NewServer(config.API.Server) @@ -293,7 +300,7 @@ func TestWithWrongDBConfig(t *testing.T) { } func TestWithWrongFlushConfig(t *testing.T) { - config := LoadTestConfig() + config := LoadTestConfig(t) maxItems := -1 config.API.Server.DbConfig.Flush.MaxItems = &maxItems apiServer, err := NewServer(config.API.Server) @@ -303,7 +310,7 @@ func TestWithWrongFlushConfig(t *testing.T) { } func TestUnknownPath(t *testing.T) { - router, _, err := NewAPITest() + router, _, err := NewAPITest(t) if err != nil { log.Fatalf("unable to run local API: %s", err) } @@ -333,13 +340,15 @@ ListenURI string `yaml:"listen_uri,omitempty"` //127.0 */ func TestLoggingDebugToFileConfig(t *testing.T) { - /*declare settings*/ maxAge := "1h" flushConfig := csconfig.FlushDBCfg{ MaxAge: &maxAge, } + tempDir, _ := os.MkdirTemp("", "crowdsec_tests") + t.Cleanup(func() { os.RemoveAll(tempDir) }) + dbconfig := csconfig.DatabaseCfg{ Type: "sqlite", DbPath: filepath.Join(tempDir, "ent"), @@ -397,7 +406,10 @@ func TestLoggingErrorToFileConfig(t *testing.T) { flushConfig := csconfig.FlushDBCfg{ MaxAge: &maxAge, } + tempDir, _ := os.MkdirTemp("", "crowdsec_tests") + t.Cleanup(func() { os.RemoveAll(tempDir) }) + dbconfig := csconfig.DatabaseCfg{ Type: "sqlite", DbPath: filepath.Join(tempDir, "ent"), diff --git a/pkg/apiserver/jwt_test.go b/pkg/apiserver/jwt_test.go index ece32c1b8..ebca91252 100644 --- a/pkg/apiserver/jwt_test.go +++ b/pkg/apiserver/jwt_test.go @@ -11,7 +11,7 @@ import ( ) func TestLogin(t *testing.T) { - router, config, err := NewAPITest() + router, config, err := NewAPITest(t) if err != nil { log.Fatalf("unable to run local API: %s", err) } diff --git a/pkg/apiserver/machines_test.go b/pkg/apiserver/machines_test.go index 373cd42a4..25fd0eaf4 100644 --- a/pkg/apiserver/machines_test.go +++ b/pkg/apiserver/machines_test.go @@ -12,7 +12,7 @@ import ( ) func TestCreateMachine(t *testing.T) { - router, _, err := NewAPITest() + router, _, err := NewAPITest(t) if err != nil { log.Fatalf("unable to run local API: %s", err) } @@ -53,7 +53,7 @@ func TestCreateMachine(t *testing.T) { } func TestCreateMachineWithForwardedFor(t *testing.T) { - router, config, err := NewAPITestForwardedFor() + router, config, err := NewAPITestForwardedFor(t) if err != nil { log.Fatalf("unable to run local API: %s", err) } @@ -82,7 +82,7 @@ func TestCreateMachineWithForwardedFor(t *testing.T) { } func TestCreateMachineWithForwardedForNoConfig(t *testing.T) { - router, config, err := NewAPITest() + router, config, err := NewAPITest(t) if err != nil { log.Fatalf("unable to run local API: %s", err) } @@ -113,7 +113,7 @@ func TestCreateMachineWithForwardedForNoConfig(t *testing.T) { } func TestCreateMachineWithoutForwardedFor(t *testing.T) { - router, config, err := NewAPITestForwardedFor() + router, config, err := NewAPITestForwardedFor(t) if err != nil { log.Fatalf("unable to run local API: %s", err) } @@ -143,7 +143,7 @@ func TestCreateMachineWithoutForwardedFor(t *testing.T) { } func TestCreateMachineAlreadyExist(t *testing.T) { - router, _, err := NewAPITest() + router, _, err := NewAPITest(t) if err != nil { log.Fatalf("unable to run local API: %s", err) } diff --git a/pkg/apiserver/papi.go b/pkg/apiserver/papi.go index 33028b13c..e59160aee 100644 --- a/pkg/apiserver/papi.go +++ b/pkg/apiserver/papi.go @@ -8,6 +8,8 @@ import ( "sync" "time" + "github.com/crowdsecurity/go-cs-lib/pkg/trace" + "github.com/crowdsecurity/crowdsec/pkg/apiclient" "github.com/crowdsecurity/crowdsec/pkg/csconfig" "github.com/crowdsecurity/crowdsec/pkg/database" @@ -217,8 +219,7 @@ func (p *Papi) PullOnce(since time.Time, sync bool) error { // PullPAPI is the long polling client for real-time decisions from PAPI func (p *Papi) Pull() error { - - defer types.CatchPanic("lapi/PullPAPI") + defer trace.CatchPanic("lapi/PullPAPI") p.Logger.Infof("Starting Polling API Pull") lastTimestamp := time.Time{} @@ -270,7 +271,7 @@ func (p *Papi) Pull() error { } func (p *Papi) SyncDecisions() error { - defer types.CatchPanic("lapi/syncDecisionsToCAPI") + defer trace.CatchPanic("lapi/syncDecisionsToCAPI") var cache models.DecisionsDeleteRequest ticker := time.NewTicker(p.SyncInterval) diff --git a/pkg/apiserver/papi_cmd.go b/pkg/apiserver/papi_cmd.go index 3635bc207..4cb9603b7 100644 --- a/pkg/apiserver/papi_cmd.go +++ b/pkg/apiserver/papi_cmd.go @@ -5,11 +5,13 @@ import ( "fmt" "time" + log "github.com/sirupsen/logrus" + + "github.com/crowdsecurity/go-cs-lib/pkg/ptr" + "github.com/crowdsecurity/crowdsec/pkg/apiclient" "github.com/crowdsecurity/crowdsec/pkg/models" "github.com/crowdsecurity/crowdsec/pkg/types" - "github.com/pkg/errors" - log "github.com/sirupsen/logrus" ) type deleteDecisions struct { @@ -75,7 +77,7 @@ func AlertCmd(message *Message, p *Papi, sync bool) error { alert := &models.Alert{} if err := json.Unmarshal(data, alert); err != nil { - return errors.Wrapf(err, "message for '%s' contains bad alert format", message.Header.OperationType) + return fmt.Errorf("message for '%s' contains bad alert format: %w", message.Header.OperationType, err) } log.Infof("Received order %s from PAPI (%d decisions)", alert.UUID, len(alert.Decisions)) @@ -83,20 +85,20 @@ func AlertCmd(message *Message, p *Papi, sync bool) error { /*Fix the alert with missing mandatory items*/ if alert.StartAt == nil || *alert.StartAt == "" { log.Warnf("Alert %d has no StartAt, setting it to now", alert.ID) - alert.StartAt = types.StrPtr(time.Now().UTC().Format(time.RFC3339)) + alert.StartAt = ptr.Of(time.Now().UTC().Format(time.RFC3339)) } if alert.StopAt == nil || *alert.StopAt == "" { log.Warnf("Alert %d has no StopAt, setting it to now", alert.ID) - alert.StopAt = types.StrPtr(time.Now().UTC().Format(time.RFC3339)) + alert.StopAt = ptr.Of(time.Now().UTC().Format(time.RFC3339)) } - alert.EventsCount = types.Int32Ptr(0) - alert.Capacity = types.Int32Ptr(0) - alert.Leakspeed = types.StrPtr("") - alert.Simulated = types.BoolPtr(false) - alert.ScenarioHash = types.StrPtr("") - alert.ScenarioVersion = types.StrPtr("") - alert.Message = types.StrPtr("") - alert.Scenario = types.StrPtr("") + alert.EventsCount = ptr.Of(int32(0)) + alert.Capacity = ptr.Of(int32(0)) + alert.Leakspeed = ptr.Of("") + alert.Simulated = ptr.Of(false) + alert.ScenarioHash = ptr.Of("") + alert.ScenarioVersion = ptr.Of("") + alert.Message = ptr.Of("") + alert.Scenario = ptr.Of("") alert.Source = &models.Source{} //if we're setting Source.Scope to types.ConsoleOrigin, it messes up the alert's value @@ -105,7 +107,7 @@ func AlertCmd(message *Message, p *Papi, sync bool) error { alert.Source.Value = alert.Decisions[0].Value } else { log.Warningf("No decision found in alert for Polling API (%s : %s)", message.Header.Source.User, message.Header.Message) - alert.Source.Scope = types.StrPtr(types.ConsoleOrigin) + alert.Source.Scope = ptr.Of(types.ConsoleOrigin) alert.Source.Value = &message.Header.Source.User } alert.Scenario = &message.Header.Message diff --git a/pkg/csconfig/api.go b/pkg/csconfig/api.go index bcc3c5b93..06d6a9712 100644 --- a/pkg/csconfig/api.go +++ b/pkg/csconfig/api.go @@ -9,13 +9,13 @@ import ( "strings" "time" - "github.com/pkg/errors" log "github.com/sirupsen/logrus" "gopkg.in/yaml.v2" + "github.com/crowdsecurity/go-cs-lib/pkg/ptr" + "github.com/crowdsecurity/go-cs-lib/pkg/yamlpatch" + "github.com/crowdsecurity/crowdsec/pkg/apiclient" - "github.com/crowdsecurity/crowdsec/pkg/types" - "github.com/crowdsecurity/crowdsec/pkg/yamlpatch" ) type APICfg struct { @@ -82,11 +82,11 @@ func (o *OnlineApiClientCfg) Load() error { o.Credentials = new(ApiCredentialsCfg) fcontent, err := os.ReadFile(o.CredentialsFilePath) if err != nil { - return errors.Wrapf(err, "failed to read api server credentials configuration file '%s'", o.CredentialsFilePath) + return fmt.Errorf("failed to read api server credentials configuration file '%s': %w", o.CredentialsFilePath, err) } err = yaml.UnmarshalStrict(fcontent, o.Credentials) if err != nil { - return errors.Wrapf(err, "failed unmarshaling api server credentials configuration file '%s'", o.CredentialsFilePath) + return fmt.Errorf("failed unmarshaling api server credentials configuration file '%s': %w", o.CredentialsFilePath, err) } if o.Credentials.Login == "" || o.Credentials.Password == "" || o.Credentials.URL == "" { log.Warningf("can't load CAPI credentials from '%s' (missing field)", o.CredentialsFilePath) @@ -104,7 +104,7 @@ func (l *LocalApiClientCfg) Load() error { } err = yaml.UnmarshalStrict(fcontent, &l.Credentials) if err != nil { - return errors.Wrapf(err, "failed unmarshaling api client credential configuration file '%s'", l.CredentialsFilePath) + return fmt.Errorf("failed unmarshaling api client credential configuration file '%s': %w", l.CredentialsFilePath, err) } if l.Credentials == nil || l.Credentials.URL == "" { return fmt.Errorf("no credentials or URL found in api client configuration '%s'", l.CredentialsFilePath) @@ -129,10 +129,16 @@ func (l *LocalApiClientCfg) Load() error { if l.Credentials.CACertPath != "" { caCert, err := os.ReadFile(l.Credentials.CACertPath) if err != nil { - return errors.Wrapf(err, "failed to load cacert") + return fmt.Errorf("failed to load cacert: %w", err) } - caCertPool := x509.NewCertPool() + caCertPool, err := x509.SystemCertPool() + if err != nil { + log.Warningf("Error loading system CA certificates: %s", err) + } + if caCertPool == nil { + caCertPool = x509.NewCertPool() + } caCertPool.AppendCertsFromPEM(caCert) apiclient.CaCertPool = caCertPool } @@ -140,7 +146,7 @@ func (l *LocalApiClientCfg) Load() error { if l.Credentials.CertPath != "" && l.Credentials.KeyPath != "" { cert, err := tls.LoadX509KeyPair(l.Credentials.CertPath, l.Credentials.KeyPath) if err != nil { - return errors.Wrapf(err, "failed to load api client certificate") + return fmt.Errorf("failed to load api client certificate: %w", err) } apiclient.Cert = &cert @@ -222,47 +228,49 @@ func (c *Config) LoadAPIServer() error { log.Warning("crowdsec local API is disabled from flag") } - if c.API.Server != nil { - - //inherit log level from common, then api->server - var logLevel log.Level - if c.API.Server.LogLevel != nil { - logLevel = *c.API.Server.LogLevel - } else if c.Common.LogLevel != nil { - logLevel = *c.Common.LogLevel - } else { - logLevel = log.InfoLevel - } - - if c.API.Server.PapiLogLevel == nil { - c.API.Server.PapiLogLevel = &logLevel - } - - if c.API.Server.OnlineClient != nil && c.API.Server.OnlineClient.CredentialsFilePath != "" { - if err := c.API.Server.OnlineClient.Load(); err != nil { - return errors.Wrap(err, "loading online client credentials") - } - } - if c.API.Server.OnlineClient == nil || c.API.Server.OnlineClient.Credentials == nil { - log.Printf("push and pull to Central API disabled") - } - if err := c.LoadDBConfig(); err != nil { - return err - } - - if err := c.API.Server.LoadCapiWhitelists(); err != nil { - return err - } - - } else { + if c.API.Server == nil { log.Warning("crowdsec local API is disabled") c.DisableAPI = true return nil } + //inherit log level from common, then api->server + var logLevel log.Level + if c.API.Server.LogLevel != nil { + logLevel = *c.API.Server.LogLevel + } else if c.Common.LogLevel != nil { + logLevel = *c.Common.LogLevel + } else { + logLevel = log.InfoLevel + } + + if c.API.Server.PapiLogLevel == nil { + c.API.Server.PapiLogLevel = &logLevel + } + + if c.API.Server.OnlineClient != nil && c.API.Server.OnlineClient.CredentialsFilePath != "" { + if err := c.API.Server.OnlineClient.Load(); err != nil { + return fmt.Errorf("loading online client credentials: %w", err) + } + } + if c.API.Server.OnlineClient == nil || c.API.Server.OnlineClient.Credentials == nil { + log.Printf("push and pull to Central API disabled") + } + if err := c.LoadDBConfig(); err != nil { + return err + } + + if err := c.API.Server.LoadCapiWhitelists(); err != nil { + return err + } + + if c.API.Server.CapiWhitelistsPath != "" { + log.Infof("loaded capi whitelist from %s: %d IPs, %d CIDRs", c.API.Server.CapiWhitelistsPath, len(c.API.Server.CapiWhitelists.Ips), len(c.API.Server.CapiWhitelists.Cidrs)) + } + if c.API.Server.Enable == nil { // if the option is not present, it is enabled by default - c.API.Server.Enable = types.BoolPtr(true) + c.API.Server.Enable = ptr.Of(true) } if !*c.API.Server.Enable { @@ -291,18 +299,18 @@ func (c *Config) LoadAPIServer() error { c.API.Server.UseForwardedForHeaders = true } if err := c.API.Server.LoadProfiles(); err != nil { - return errors.Wrap(err, "while loading profiles for LAPI") + return fmt.Errorf("while loading profiles for LAPI: %w", err) } if c.API.Server.ConsoleConfigPath == "" { c.API.Server.ConsoleConfigPath = DefaultConsoleConfigFilePath } if err := c.API.Server.LoadConsoleConfig(); err != nil { - return errors.Wrap(err, "while loading console options") + return fmt.Errorf("while loading console options: %w", err) } if c.API.Server.OnlineClient != nil && c.API.Server.OnlineClient.CredentialsFilePath != "" { if err := c.API.Server.OnlineClient.Load(); err != nil { - return errors.Wrap(err, "loading online client credentials") + return fmt.Errorf("loading online client credentials: %w", err) } } if c.API.Server.OnlineClient == nil || c.API.Server.OnlineClient.Credentials == nil { @@ -311,7 +319,7 @@ func (c *Config) LoadAPIServer() error { if c.API.CTI != nil { if err := c.API.CTI.Load(); err != nil { - return errors.Wrap(err, "loading CTI configuration") + return fmt.Errorf("loading CTI configuration: %w", err) } } @@ -354,7 +362,7 @@ func (s *LocalApiServerCfg) LoadCapiWhitelists() error { for _, v := range fromCfg.Cidrs { _, tnet, err := net.ParseCIDR(v) if err != nil { - return fmt.Errorf("unable to parse cidr whitelist '%s' : %v.", v, err) + return fmt.Errorf("unable to parse cidr whitelist '%s' : %v", v, err) } s.CapiWhitelists.Cidrs = append(s.CapiWhitelists.Cidrs, tnet) } diff --git a/pkg/csconfig/api_test.go b/pkg/csconfig/api_test.go index 802706dd7..7450800e9 100644 --- a/pkg/csconfig/api_test.go +++ b/pkg/csconfig/api_test.go @@ -7,12 +7,12 @@ import ( "strings" "testing" - "github.com/crowdsecurity/crowdsec/pkg/types" log "github.com/sirupsen/logrus" "github.com/stretchr/testify/assert" "gopkg.in/yaml.v2" - "github.com/crowdsecurity/crowdsec/pkg/cstest" + "github.com/crowdsecurity/go-cs-lib/pkg/cstest" + "github.com/crowdsecurity/go-cs-lib/pkg/ptr" ) func TestLoadLocalApiClientCfg(t *testing.T) { @@ -53,7 +53,7 @@ func TestLoadLocalApiClientCfg(t *testing.T) { name: "valid configuration with insecure skip verify", input: &LocalApiClientCfg{ CredentialsFilePath: "./tests/lapi-secrets.yaml", - InsecureSkipVerify: types.BoolPtr(false), + InsecureSkipVerify: ptr.Of(false), }, expected: &ApiCredentialsCfg{ URL: "http://localhost:8080/", @@ -188,21 +188,21 @@ func TestLoadAPIServer(t *testing.T) { DisableAPI: false, }, expected: &LocalApiServerCfg{ - Enable: types.BoolPtr(true), + Enable: ptr.Of(true), ListenURI: "http://crowdsec.api", TLS: nil, DbConfig: &DatabaseCfg{ DbPath: "./tests/test.db", Type: "sqlite", - MaxOpenConns: types.IntPtr(DEFAULT_MAX_OPEN_CONNS), + MaxOpenConns: ptr.Of(DEFAULT_MAX_OPEN_CONNS), }, ConsoleConfigPath: DefaultConfigPath("console.yaml"), ConsoleConfig: &ConsoleConfig{ - ShareManualDecisions: types.BoolPtr(false), - ShareTaintedScenarios: types.BoolPtr(true), - ShareCustomScenarios: types.BoolPtr(true), - ShareContext: types.BoolPtr(false), - ConsoleManagement: types.BoolPtr(false), + ShareManualDecisions: ptr.Of(false), + ShareTaintedScenarios: ptr.Of(true), + ShareCustomScenarios: ptr.Of(true), + ShareContext: ptr.Of(false), + ConsoleManagement: ptr.Of(false), }, LogDir: LogDirFullPath, LogMedia: "stdout", diff --git a/pkg/csconfig/config.go b/pkg/csconfig/config.go index 32f17cbf7..d4b4aa4af 100644 --- a/pkg/csconfig/config.go +++ b/pkg/csconfig/config.go @@ -5,13 +5,12 @@ import ( "os" "path/filepath" - "github.com/pkg/errors" log "github.com/sirupsen/logrus" "gopkg.in/yaml.v2" - "github.com/crowdsecurity/crowdsec/pkg/csstring" - "github.com/crowdsecurity/crowdsec/pkg/types" - "github.com/crowdsecurity/crowdsec/pkg/yamlpatch" + "github.com/crowdsecurity/go-cs-lib/pkg/csstring" + "github.com/crowdsecurity/go-cs-lib/pkg/ptr" + "github.com/crowdsecurity/go-cs-lib/pkg/yamlpatch" ) // defaultConfigDir is the base path to all configuration files, to be overridden in the Makefile */ @@ -41,18 +40,18 @@ type Config struct { func (c *Config) Dump() error { out, err := yaml.Marshal(c) if err != nil { - return errors.Wrap(err, "failed marshaling config") + return fmt.Errorf("failed marshaling config: %w", err) } fmt.Printf("%s", string(out)) return nil } -func NewConfig(configFile string, disableAgent bool, disableAPI bool, quiet bool) (*Config, error) { +func NewConfig(configFile string, disableAgent bool, disableAPI bool, quiet bool) (*Config, string, error) { patcher := yamlpatch.NewPatcher(configFile, ".local") patcher.SetQuiet(quiet) fcontent, err := patcher.MergedPatchContent() if err != nil { - return nil, err + return nil, "", err } configData := csstring.StrictExpand(string(fcontent), os.LookupEnv) cfg := Config{ @@ -64,9 +63,9 @@ func NewConfig(configFile string, disableAgent bool, disableAPI bool, quiet bool err = yaml.UnmarshalStrict([]byte(configData), &cfg) if err != nil { // this is actually the "merged" yaml - return nil, errors.Wrap(err, configFile) + return nil, "", fmt.Errorf("%s: %w", configFile, err) } - return &cfg, nil + return &cfg, configData, nil } func NewDefaultConfig() *Config { @@ -112,14 +111,14 @@ func NewDefaultConfig() *Config { }, }, CTI: &CTICfg{ - Enabled: types.BoolPtr(false), + Enabled: ptr.Of(false), }, } dbConfig := DatabaseCfg{ Type: "sqlite", DbPath: DefaultDataPath("crowdsec.db"), - MaxOpenConns: types.IntPtr(DEFAULT_MAX_OPEN_CONNS), + MaxOpenConns: ptr.Of(DEFAULT_MAX_OPEN_CONNS), } globalCfg := Config{ diff --git a/pkg/csconfig/config_test.go b/pkg/csconfig/config_test.go index 6f556245f..7a53d0e72 100644 --- a/pkg/csconfig/config_test.go +++ b/pkg/csconfig/config_test.go @@ -6,17 +6,17 @@ import ( "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" - "github.com/crowdsecurity/crowdsec/pkg/cstest" + "github.com/crowdsecurity/go-cs-lib/pkg/cstest" ) func TestNormalLoad(t *testing.T) { - _, err := NewConfig("./tests/config.yaml", false, false, false) + _, _, err := NewConfig("./tests/config.yaml", false, false, false) require.NoError(t, err) - _, err = NewConfig("./tests/xxx.yaml", false, false, false) + _, _, err = NewConfig("./tests/xxx.yaml", false, false, false) assert.EqualError(t, err, "while reading yaml file: open ./tests/xxx.yaml: "+cstest.FileNotFoundMessage) - _, err = NewConfig("./tests/simulation.yaml", false, false, false) + _, _, err = NewConfig("./tests/simulation.yaml", false, false, false) assert.EqualError(t, err, "./tests/simulation.yaml: yaml: unmarshal errors:\n line 1: field simulation not found in type csconfig.Config") } diff --git a/pkg/csconfig/console.go b/pkg/csconfig/console.go index 18d175365..5adf6ca37 100644 --- a/pkg/csconfig/console.go +++ b/pkg/csconfig/console.go @@ -4,11 +4,12 @@ import ( "fmt" "os" - "github.com/crowdsecurity/crowdsec/pkg/fflag" - "github.com/crowdsecurity/crowdsec/pkg/types" - "github.com/pkg/errors" log "github.com/sirupsen/logrus" "gopkg.in/yaml.v2" + + "github.com/crowdsecurity/go-cs-lib/pkg/ptr" + + "github.com/crowdsecurity/crowdsec/pkg/fflag" ) const ( @@ -35,11 +36,11 @@ func (c *LocalApiServerCfg) LoadConsoleConfig() error { c.ConsoleConfig = &ConsoleConfig{} if _, err := os.Stat(c.ConsoleConfigPath); err != nil && os.IsNotExist(err) { log.Debugf("no console configuration to load") - c.ConsoleConfig.ShareCustomScenarios = types.BoolPtr(true) - c.ConsoleConfig.ShareTaintedScenarios = types.BoolPtr(true) - c.ConsoleConfig.ShareManualDecisions = types.BoolPtr(false) - c.ConsoleConfig.ConsoleManagement = types.BoolPtr(false) - c.ConsoleConfig.ShareContext = types.BoolPtr(false) + c.ConsoleConfig.ShareCustomScenarios = ptr.Of(true) + c.ConsoleConfig.ShareTaintedScenarios = ptr.Of(true) + c.ConsoleConfig.ShareManualDecisions = ptr.Of(false) + c.ConsoleConfig.ConsoleManagement = ptr.Of(false) + c.ConsoleConfig.ShareContext = ptr.Of(false) return nil } @@ -54,27 +55,27 @@ func (c *LocalApiServerCfg) LoadConsoleConfig() error { if c.ConsoleConfig.ShareCustomScenarios == nil { log.Debugf("no share_custom scenarios found, setting to true") - c.ConsoleConfig.ShareCustomScenarios = types.BoolPtr(true) + c.ConsoleConfig.ShareCustomScenarios = ptr.Of(true) } if c.ConsoleConfig.ShareTaintedScenarios == nil { log.Debugf("no share_tainted scenarios found, setting to true") - c.ConsoleConfig.ShareTaintedScenarios = types.BoolPtr(true) + c.ConsoleConfig.ShareTaintedScenarios = ptr.Of(true) } if c.ConsoleConfig.ShareManualDecisions == nil { log.Debugf("no share_manual scenarios found, setting to false") - c.ConsoleConfig.ShareManualDecisions = types.BoolPtr(false) + c.ConsoleConfig.ShareManualDecisions = ptr.Of(false) } if !fflag.PapiClient.IsEnabled() { - c.ConsoleConfig.ConsoleManagement = types.BoolPtr(false) + c.ConsoleConfig.ConsoleManagement = ptr.Of(false) } else if c.ConsoleConfig.ConsoleManagement == nil { log.Debugf("no console_management found, setting to false") - c.ConsoleConfig.ConsoleManagement = types.BoolPtr(false) + c.ConsoleConfig.ConsoleManagement = ptr.Of(false) } if c.ConsoleConfig.ShareContext == nil { log.Debugf("no 'context' found, setting to false") - c.ConsoleConfig.ShareContext = types.BoolPtr(false) + c.ConsoleConfig.ShareContext = ptr.Of(false) } log.Debugf("Console configuration '%s' loaded successfully", c.ConsoleConfigPath) @@ -87,7 +88,7 @@ func (c *LocalApiServerCfg) DumpConsoleConfig() error { var err error if out, err = yaml.Marshal(c.ConsoleConfig); err != nil { - return errors.Wrapf(err, "while marshaling ConsoleConfig (for %s)", c.ConsoleConfigPath) + return fmt.Errorf("while marshaling ConsoleConfig (for %s): %w", c.ConsoleConfigPath, err) } if c.ConsoleConfigPath == "" { c.ConsoleConfigPath = DefaultConsoleConfigFilePath @@ -96,7 +97,7 @@ func (c *LocalApiServerCfg) DumpConsoleConfig() error { } if err := os.WriteFile(c.ConsoleConfigPath, out, 0600); err != nil { - return errors.Wrapf(err, "while dumping console config to %s", c.ConsoleConfigPath) + return fmt.Errorf("while dumping console config to %s: %w", c.ConsoleConfigPath, err) } return nil diff --git a/pkg/csconfig/crowdsec_service.go b/pkg/csconfig/crowdsec_service.go index 8bcc13852..2642603cf 100644 --- a/pkg/csconfig/crowdsec_service.go +++ b/pkg/csconfig/crowdsec_service.go @@ -5,11 +5,10 @@ import ( "os" "path/filepath" - "github.com/pkg/errors" log "github.com/sirupsen/logrus" "gopkg.in/yaml.v2" - "github.com/crowdsecurity/crowdsec/pkg/types" + "github.com/crowdsecurity/go-cs-lib/pkg/ptr" ) // CrowdsecServiceCfg contains the location of parsers/scenarios/... and acquisition files @@ -48,7 +47,7 @@ func (c *Config) LoadCrowdsec() error { if c.Crowdsec.Enable == nil { // if the option is not present, it is enabled by default - c.Crowdsec.Enable = types.BoolPtr(true) + c.Crowdsec.Enable = ptr.Of(true) } if !*c.Crowdsec.Enable { @@ -72,20 +71,20 @@ func (c *Config) LoadCrowdsec() error { if c.Crowdsec.AcquisitionDirPath != "" { c.Crowdsec.AcquisitionDirPath, err = filepath.Abs(c.Crowdsec.AcquisitionDirPath) if err != nil { - return errors.Wrapf(err, "can't get absolute path of '%s'", c.Crowdsec.AcquisitionDirPath) + return fmt.Errorf("can't get absolute path of '%s': %w", c.Crowdsec.AcquisitionDirPath, err) } var files []string files, err = filepath.Glob(c.Crowdsec.AcquisitionDirPath + "/*.yaml") if err != nil { - return errors.Wrap(err, "while globbing acquis_dir") + return fmt.Errorf("while globbing acquis_dir: %w", err) } c.Crowdsec.AcquisitionFiles = append(c.Crowdsec.AcquisitionFiles, files...) files, err = filepath.Glob(c.Crowdsec.AcquisitionDirPath + "/*.yml") if err != nil { - return errors.Wrap(err, "while globbing acquis_dir") + return fmt.Errorf("while globbing acquis_dir: %w", err) } c.Crowdsec.AcquisitionFiles = append(c.Crowdsec.AcquisitionFiles, files...) } @@ -99,7 +98,7 @@ func (c *Config) LoadCrowdsec() error { } if err = c.LoadSimulation(); err != nil { - return errors.Wrap(err, "load error (simulation)") + return fmt.Errorf("load error (simulation): %w", err) } c.Crowdsec.ConfigDir = c.ConfigPaths.ConfigDir @@ -129,7 +128,7 @@ func (c *Config) LoadCrowdsec() error { } *k, err = filepath.Abs(*k) if err != nil { - return errors.Wrapf(err, "failed to get absolute path of '%s'", *k) + return fmt.Errorf("failed to get absolute path of '%s': %w", *k, err) } } @@ -137,7 +136,7 @@ func (c *Config) LoadCrowdsec() error { for i, file := range c.Crowdsec.AcquisitionFiles { f, err := filepath.Abs(file) if err != nil { - return errors.Wrapf(err, "failed to get absolute path of '%s'", file) + return fmt.Errorf("failed to get absolute path of '%s': %w", file, err) } c.Crowdsec.AcquisitionFiles[i] = f } @@ -147,7 +146,7 @@ func (c *Config) LoadCrowdsec() error { } if err := c.LoadHub(); err != nil { - return errors.Wrap(err, "while loading hub") + return fmt.Errorf("while loading hub: %w", err) } c.Crowdsec.ContextToSend = make(map[string][]string, 0) @@ -186,11 +185,11 @@ func (c *CrowdsecServiceCfg) DumpContextConfigFile() error { var err error if out, err = yaml.Marshal(c.ContextToSend); err != nil { - return errors.Wrapf(err, "while marshaling ConsoleConfig (for %s)", c.ConsoleContextPath) + return fmt.Errorf("while marshaling ConsoleConfig (for %s): %w", c.ConsoleContextPath, err) } if err := os.WriteFile(c.ConsoleContextPath, out, 0600); err != nil { - return errors.Wrapf(err, "while dumping console config to %s", c.ConsoleContextPath) + return fmt.Errorf("while dumping console config to %s: %w", c.ConsoleContextPath, err) } log.Infof("%s file saved", c.ConsoleContextPath) diff --git a/pkg/csconfig/crowdsec_service_test.go b/pkg/csconfig/crowdsec_service_test.go index b9701ee52..5423d1a45 100644 --- a/pkg/csconfig/crowdsec_service_test.go +++ b/pkg/csconfig/crowdsec_service_test.go @@ -5,13 +5,13 @@ import ( "path/filepath" "testing" - "github.com/crowdsecurity/crowdsec/pkg/cstest" - "github.com/crowdsecurity/crowdsec/pkg/types" + "github.com/crowdsecurity/go-cs-lib/pkg/cstest" + "github.com/crowdsecurity/go-cs-lib/pkg/ptr" + "github.com/stretchr/testify/require" ) func TestLoadCrowdsec(t *testing.T) { - falseBoolPtr := false acquisFullPath, err := filepath.Abs("./tests/acquis.yaml") require.NoError(t, err) @@ -63,7 +63,7 @@ func TestLoadCrowdsec(t *testing.T) { }, }, expectedResult: &CrowdsecServiceCfg{ - Enable: types.BoolPtr(true), + Enable: ptr.Of(true), AcquisitionDirPath: "", ConsoleContextPath: contextFileFullPath, AcquisitionFilePath: acquisFullPath, @@ -81,7 +81,7 @@ func TestLoadCrowdsec(t *testing.T) { "source_ip": {"evt.Parsed.source_ip"}, }, SimulationConfig: &SimulationConfig{ - Simulation: &falseBoolPtr, + Simulation: ptr.Of(false), }, }, }, @@ -106,7 +106,7 @@ func TestLoadCrowdsec(t *testing.T) { }, }, expectedResult: &CrowdsecServiceCfg{ - Enable: types.BoolPtr(true), + Enable: ptr.Of(true), AcquisitionDirPath: acquisDirFullPath, AcquisitionFilePath: acquisFullPath, ConsoleContextPath: contextFileFullPath, @@ -124,7 +124,7 @@ func TestLoadCrowdsec(t *testing.T) { }, SimulationFilePath: "./tests/simulation.yaml", SimulationConfig: &SimulationConfig{ - Simulation: &falseBoolPtr, + Simulation: ptr.Of(false), }, }, }, @@ -147,7 +147,7 @@ func TestLoadCrowdsec(t *testing.T) { }, }, expectedResult: &CrowdsecServiceCfg{ - Enable: types.BoolPtr(true), + Enable: ptr.Of(true), AcquisitionDirPath: "", AcquisitionFilePath: "", ConfigDir: configDirFullPath, @@ -165,7 +165,7 @@ func TestLoadCrowdsec(t *testing.T) { "source_ip": {"evt.Parsed.source_ip"}, }, SimulationConfig: &SimulationConfig{ - Simulation: &falseBoolPtr, + Simulation: ptr.Of(false), }, }, }, diff --git a/pkg/csconfig/database.go b/pkg/csconfig/database.go index e7f67d474..0f8866824 100644 --- a/pkg/csconfig/database.go +++ b/pkg/csconfig/database.go @@ -5,8 +5,9 @@ import ( "time" "entgo.io/ent/dialect" - "github.com/crowdsecurity/crowdsec/pkg/types" log "github.com/sirupsen/logrus" + + "github.com/crowdsecurity/go-cs-lib/pkg/ptr" ) var DEFAULT_MAX_OPEN_CONNS = 100 @@ -56,7 +57,7 @@ func (c *Config) LoadDBConfig() error { } if c.DbConfig.MaxOpenConns == nil { - c.DbConfig.MaxOpenConns = types.IntPtr(DEFAULT_MAX_OPEN_CONNS) + c.DbConfig.MaxOpenConns = ptr.Of(DEFAULT_MAX_OPEN_CONNS) } if c.DbConfig.Type == "sqlite" { diff --git a/pkg/csconfig/database_test.go b/pkg/csconfig/database_test.go index b029f3883..d33c54424 100644 --- a/pkg/csconfig/database_test.go +++ b/pkg/csconfig/database_test.go @@ -5,8 +5,9 @@ import ( "strings" "testing" - "github.com/crowdsecurity/crowdsec/pkg/types" "github.com/stretchr/testify/assert" + + "github.com/crowdsecurity/go-cs-lib/pkg/ptr" ) func TestLoadDBConfig(t *testing.T) { @@ -22,7 +23,7 @@ func TestLoadDBConfig(t *testing.T) { DbConfig: &DatabaseCfg{ Type: "sqlite", DbPath: "./tests/test.db", - MaxOpenConns: types.IntPtr(10), + MaxOpenConns: ptr.Of(10), }, Cscli: &CscliCfg{}, API: &APICfg{ @@ -32,7 +33,7 @@ func TestLoadDBConfig(t *testing.T) { expectedResult: &DatabaseCfg{ Type: "sqlite", DbPath: "./tests/test.db", - MaxOpenConns: types.IntPtr(10), + MaxOpenConns: ptr.Of(10), }, }, { diff --git a/pkg/csconfig/profiles.go b/pkg/csconfig/profiles.go index 16a4e454d..41725bcf2 100644 --- a/pkg/csconfig/profiles.go +++ b/pkg/csconfig/profiles.go @@ -5,8 +5,9 @@ import ( "fmt" "io" + "github.com/crowdsecurity/go-cs-lib/pkg/yamlpatch" + "github.com/crowdsecurity/crowdsec/pkg/models" - "github.com/crowdsecurity/crowdsec/pkg/yamlpatch" "github.com/pkg/errors" "gopkg.in/yaml.v2" ) diff --git a/pkg/csconfig/prometheus.go b/pkg/csconfig/prometheus.go index 31df85110..eea768ab7 100644 --- a/pkg/csconfig/prometheus.go +++ b/pkg/csconfig/prometheus.go @@ -2,7 +2,6 @@ package csconfig import "fmt" -/**/ type PrometheusCfg struct { Enabled bool `yaml:"enabled"` Level string `yaml:"level"` //aggregated|full @@ -16,6 +15,5 @@ func (c *Config) LoadPrometheus() error { c.Cscli.PrometheusUrl = fmt.Sprintf("http://%s:%d", c.Prometheus.ListenAddr, c.Prometheus.ListenPort) } } - return nil } diff --git a/pkg/csconfig/prometheus_test.go b/pkg/csconfig/prometheus_test.go index f7a483d32..3df9c298b 100644 --- a/pkg/csconfig/prometheus_test.go +++ b/pkg/csconfig/prometheus_test.go @@ -1,20 +1,19 @@ package csconfig import ( - "fmt" - "strings" "testing" - "github.com/stretchr/testify/assert" + "github.com/crowdsecurity/go-cs-lib/pkg/cstest" + + "github.com/stretchr/testify/require" ) func TestLoadPrometheus(t *testing.T) { - tests := []struct { - name string - Input *Config - expectedResult string - err string + name string + Input *Config + expectedURL string + expectedErr string }{ { name: "basic valid configuration", @@ -27,29 +26,17 @@ func TestLoadPrometheus(t *testing.T) { }, Cscli: &CscliCfg{}, }, - expectedResult: "http://127.0.0.1:6060", + expectedURL: "http://127.0.0.1:6060", }, } - for idx, test := range tests { - err := test.Input.LoadPrometheus() - if err == nil && test.err != "" { - fmt.Printf("TEST '%s': NOK\n", test.name) - t.Fatalf("%d/%d expected error, didn't get it", idx, len(tests)) - } else if test.err != "" { - if !strings.HasPrefix(fmt.Sprintf("%s", err), test.err) { - fmt.Printf("TEST '%s': NOK\n", test.name) - t.Fatalf("%d/%d expected '%s' got '%s'", idx, len(tests), - test.err, - fmt.Sprintf("%s", err)) - } - } + for _, tc := range tests { + tc := tc + t.Run(tc.name, func(t *testing.T) { + err := tc.Input.LoadPrometheus() + cstest.RequireErrorContains(t, err, tc.expectedErr) - isOk := assert.Equal(t, test.expectedResult, test.Input.Cscli.PrometheusUrl) - if !isOk { - t.Fatalf("test '%s' failed\n", test.name) - } else { - fmt.Printf("TEST '%s': OK\n", test.name) - } + require.Equal(t, tc.expectedURL, tc.Input.Cscli.PrometheusUrl) + }) } } diff --git a/pkg/csconfig/simulation.go b/pkg/csconfig/simulation.go index 69c520c5c..f291a4e16 100644 --- a/pkg/csconfig/simulation.go +++ b/pkg/csconfig/simulation.go @@ -4,7 +4,7 @@ import ( "fmt" "path/filepath" - "github.com/crowdsecurity/crowdsec/pkg/yamlpatch" + "github.com/crowdsecurity/go-cs-lib/pkg/yamlpatch" "gopkg.in/yaml.v2" ) diff --git a/pkg/csconfig/simulation_test.go b/pkg/csconfig/simulation_test.go index 5256806d0..8b2025993 100644 --- a/pkg/csconfig/simulation_test.go +++ b/pkg/csconfig/simulation_test.go @@ -8,7 +8,7 @@ import ( "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" - "github.com/crowdsecurity/crowdsec/pkg/cstest" + "github.com/crowdsecurity/go-cs-lib/pkg/cstest" ) func TestSimulationLoading(t *testing.T) { diff --git a/pkg/csplugin/broker.go b/pkg/csplugin/broker.go index 716368a92..6bc3b1296 100644 --- a/pkg/csplugin/broker.go +++ b/pkg/csplugin/broker.go @@ -5,7 +5,7 @@ import ( "fmt" "io" "os" - "path/filepath" + "reflect" "strings" "sync" "text/template" @@ -19,8 +19,10 @@ import ( "gopkg.in/tomb.v2" "gopkg.in/yaml.v2" + "github.com/crowdsecurity/go-cs-lib/pkg/csstring" + "github.com/crowdsecurity/go-cs-lib/pkg/slicetools" + "github.com/crowdsecurity/crowdsec/pkg/csconfig" - "github.com/crowdsecurity/crowdsec/pkg/csstring" "github.com/crowdsecurity/crowdsec/pkg/models" "github.com/crowdsecurity/crowdsec/pkg/protobufs" "github.com/crowdsecurity/crowdsec/pkg/types" @@ -115,8 +117,15 @@ loop: pb.alertsByPluginName[pluginName] = make([]*models.Alert, 0) pluginMutex.Unlock() go func() { - if err := pb.pushNotificationsToPlugin(pluginName, tmpAlerts); err != nil { - log.WithField("plugin:", pluginName).Error(err) + //Chunk alerts to respect group_threshold + threshold := pb.pluginConfigByName[pluginName].GroupThreshold + if threshold == 0 { + threshold = 1 + } + for _, chunk := range slicetools.Chunks(tmpAlerts, threshold) { + if err := pb.pushNotificationsToPlugin(pluginName, chunk); err != nil { + log.WithField("plugin:", pluginName).Error(err) + } } }() @@ -183,14 +192,14 @@ func (pb *PluginBroker) loadConfig(path string) error { return err } for _, pluginConfig := range pluginConfigs { + setRequiredFields(&pluginConfig) + if _, ok := pb.pluginConfigByName[pluginConfig.Name]; ok { + log.Warningf("notification '%s' is defined multiple times", pluginConfig.Name) + } + pb.pluginConfigByName[pluginConfig.Name] = pluginConfig if !pb.profilesContainPlugin(pluginConfig.Name) { continue } - setRequiredFields(&pluginConfig) - if _, ok := pb.pluginConfigByName[pluginConfig.Name]; ok { - log.Warnf("several configs for notification %s found ", pluginConfig.Name) - } - pb.pluginConfigByName[pluginConfig.Name] = pluginConfig } } err = pb.verifyPluginConfigsWithProfile() @@ -358,6 +367,10 @@ func ParsePluginConfigFile(path string) ([]PluginConfig, error) { } return []PluginConfig{}, fmt.Errorf("while decoding %s got error %s", path, err) } + // if the yaml document is empty, skip + if reflect.DeepEqual(pc, PluginConfig{}) { + continue + } parsedConfigs = append(parsedConfigs, pc) } return parsedConfigs, nil @@ -371,23 +384,6 @@ func setRequiredFields(pluginCfg *PluginConfig) { if pluginCfg.TimeOut == time.Second*0 { pluginCfg.TimeOut = time.Second * 5 } - -} - -// helper which gives paths to all files in the given directory non-recursively -func listFilesAtPath(path string) ([]string, error) { - filePaths := make([]string, 0) - files, err := os.ReadDir(path) - if err != nil { - return nil, err - } - for _, file := range files { - if file.IsDir() { - continue - } - filePaths = append(filePaths, filepath.Join(path, file.Name())) - } - return filePaths, nil } func getUUID() (string, error) { diff --git a/pkg/csplugin/broker_suite_test.go b/pkg/csplugin/broker_suite_test.go new file mode 100644 index 000000000..4c7cdd6eb --- /dev/null +++ b/pkg/csplugin/broker_suite_test.go @@ -0,0 +1,164 @@ +package csplugin + +import ( + "io" + "os" + "os/exec" + "path" + "runtime" + "testing" + + "github.com/stretchr/testify/require" + "github.com/stretchr/testify/suite" + + "github.com/crowdsecurity/crowdsec/pkg/csconfig" +) + + +type PluginSuite struct { + suite.Suite + + // where the plugin is built - temporary directory for the suite + buildDir string + // full path to the built plugin binary + builtBinary string + + runDir string // temporary directory for each test + pluginDir string // (config_paths.plugin_dir) + notifDir string // (config_paths.notification_dir) + pluginBinary string // full path to the plugin binary (unique for each test) + pluginConfig string // full path to the notification config (unique for each test) + + pluginBroker *PluginBroker +} + + +func TestPluginSuite(t *testing.T) { + suite.Run(t, new(PluginSuite)) +} + + +func (s *PluginSuite) SetupSuite() { + var err error + + t := s.T() + + s.buildDir, err = os.MkdirTemp("", "cs_plugin_test_build") + require.NoError(t, err) + + s.builtBinary = path.Join(s.buildDir, "notification-dummy") + + if runtime.GOOS == "windows" { + s.builtBinary += ".exe" + } + + cmd := exec.Command("go", "build", "-o", s.builtBinary, "../../plugins/notifications/dummy/") + err = cmd.Run() + require.NoError(t, err, "while building dummy plugin") +} + + +func (s *PluginSuite) TearDownSuite() { + t := s.T() + err := os.RemoveAll(s.buildDir) + require.NoError(t, err) +} + + +func copyFile(src string, dst string) error { + s, err := os.Open(src) + if err != nil { + return err + } + defer s.Close() + + d, err := os.Create(dst) + if err != nil { + return err + } + defer d.Close() + + _, err = io.Copy(d, s) + if err != nil { + return err + } + + err = d.Sync() + if err != nil { + return err + } + + return nil +} + +func (s *PluginSuite) SetupTest() { + s.SetupSubTest() +} + +func (s *PluginSuite) TearDownTest() { + s.TearDownSubTest() +} + + +func (s *PluginSuite) SetupSubTest() { + var err error + t := s.T() + + s.runDir, err = os.MkdirTemp("", "cs_plugin_test") + require.NoError(t, err) + + s.pluginDir = path.Join(s.runDir, "bin") + err = os.MkdirAll(path.Join(s.runDir, "bin"), 0o755) + require.NoError(t, err, "while creating bin dir") + + s.notifDir = path.Join(s.runDir, "config") + err = os.MkdirAll(s.notifDir, 0o755) + require.NoError(t, err, "while creating config dir") + + s.pluginBinary = path.Join(s.pluginDir, "notification-dummy") + + if runtime.GOOS == "windows" { + s.pluginBinary += ".exe" + } + + err = copyFile(s.builtBinary, s.pluginBinary) + require.NoError(t, err, "while copying built binary") + err = os.Chmod(s.pluginBinary, 0o744) + require.NoError(t, err, "chmod 0744 %s", s.pluginBinary) + + s.pluginConfig = path.Join(s.notifDir, "dummy.yaml") + err = copyFile("testdata/dummy.yaml", s.pluginConfig) + require.NoError(t, err, "while copying plugin config") +} + +func (s *PluginSuite) TearDownSubTest() { + t := s.T() + if s.pluginBroker != nil { + s.pluginBroker.Kill() + s.pluginBroker = nil + } + + err := os.RemoveAll(s.runDir) + if runtime.GOOS != "windows" { + require.NoError(t, err) + } + + os.Remove("./out") +} + +func (s *PluginSuite) InitBroker(procCfg *csconfig.PluginCfg) (*PluginBroker, error) { + pb := PluginBroker{} + if procCfg == nil { + procCfg = &csconfig.PluginCfg{} + } + profiles := csconfig.NewDefaultConfig().API.Server.Profiles + profiles = append(profiles, &csconfig.ProfileCfg{ + Notifications: []string{"dummy_default"}, + }) + err := pb.Init(procCfg, profiles, &csconfig.ConfigurationPaths{ + PluginDir: s.pluginDir, + NotificationDir: s.notifDir, + }) + s.pluginBroker = &pb + return s.pluginBroker, err +} diff --git a/pkg/csplugin/broker_test.go b/pkg/csplugin/broker_test.go index b2e9a7d94..467fadf45 100644 --- a/pkg/csplugin/broker_test.go +++ b/pkg/csplugin/broker_test.go @@ -3,13 +3,10 @@ package csplugin import ( + "bytes" "encoding/json" + "io" "os" - "os/exec" - "path" - "path/filepath" - "reflect" - "runtime" "testing" "time" @@ -19,121 +16,45 @@ import ( "gopkg.in/tomb.v2" "gopkg.in/yaml.v2" + "github.com/crowdsecurity/go-cs-lib/pkg/cstest" + "github.com/crowdsecurity/crowdsec/pkg/csconfig" - "github.com/crowdsecurity/crowdsec/pkg/cstest" "github.com/crowdsecurity/crowdsec/pkg/models" ) -var testPath string -func setPluginPermTo744(t *testing.T) { - setPluginPermTo(t, "744") -} - -func setPluginPermTo722(t *testing.T) { - setPluginPermTo(t, "722") -} - -func setPluginPermTo724(t *testing.T) { - setPluginPermTo(t, "724") -} -func TestGetPluginNameAndTypeFromPath(t *testing.T) { - setUp(t) - defer tearDown(t) - type args struct { - path string - } - tests := []struct { - name string - args args - want string - want1 string - expectedErr string - }{ - { - name: "valid plugin name, single dash", - args: args{ - path: path.Join(testPath, "notification-gitter"), - }, - want: "notification", - want1: "gitter", - }, - { - name: "invalid plugin name", - args: args{ - path: "./tests/gitter", - }, - expectedErr: "plugin name ./tests/gitter is invalid. Name should be like {type-name}", - }, - { - name: "valid plugin name, multiple dash", - args: args{ - path: "./tests/notification-instant-slack", - }, - want: "notification-instant", - want1: "slack", - }, - } - for _, tc := range tests { - tc := tc - t.Run(tc.name, func(t *testing.T) { - got, got1, err := getPluginTypeAndSubtypeFromPath(tc.args.path) - cstest.RequireErrorContains(t, err, tc.expectedErr) - - assert.Equal(t, tc.want, got) - assert.Equal(t, tc.want1, got1) - }) +func (s *PluginSuite) permissionSetter(perm os.FileMode) func(*testing.T) { + return func(t *testing.T) { + err := os.Chmod(s.pluginBinary, perm) + require.NoError(t, err, "chmod %s %s", perm, s.pluginBinary) } } -func TestListFilesAtPath(t *testing.T) { - setUp(t) - defer tearDown(t) - type args struct { - path string - } - tests := []struct { - name string - args args - want []string - expectedErr string - }{ - { - name: "valid directory", - args: args{ - path: testPath, - }, - want: []string{ - filepath.Join(testPath, "notification-gitter"), - filepath.Join(testPath, "slack"), - }, - }, - { - name: "invalid directory", - args: args{ - path: "./foo/bar/", - }, - expectedErr: "open ./foo/bar/: " + cstest.FileNotFoundMessage, - }, - } - for _, tc := range tests { - tc := tc - t.Run(tc.name, func(t *testing.T) { - got, err := listFilesAtPath(tc.args.path) - cstest.RequireErrorContains(t, err, tc.expectedErr) +func (s *PluginSuite) readconfig() (PluginConfig) { + var config PluginConfig + t := s.T() - if !reflect.DeepEqual(got, tc.want) { - t.Errorf("listFilesAtPath() = %v, want %v", got, tc.want) - } - }) - } + orig, err := os.ReadFile(s.pluginConfig) + require.NoError(t, err,"unable to read config file %s", s.pluginConfig) + + err = yaml.Unmarshal(orig, &config) + require.NoError(t, err,"unable to unmarshal config file") + + return config } -func TestBrokerInit(t *testing.T) { - if runtime.GOOS == "windows" { - t.Skip("Skipping test on windows") - } +func (s *PluginSuite) writeconfig(config PluginConfig) { + t := s.T() + data, err := yaml.Marshal(&config) + require.NoError(t, err,"unable to marshal config file") + + err = os.WriteFile(s.pluginConfig, data, 0644) + require.NoError(t, err,"unable to write config file %s", s.pluginConfig) +} + + +func (s *PluginSuite) TestBrokerInit() { tests := []struct { name string action func(*testing.T) @@ -142,28 +63,30 @@ func TestBrokerInit(t *testing.T) { }{ { name: "valid config", - action: setPluginPermTo744, }, { name: "group writable binary", expectedErr: "notification-dummy is world writable", - action: setPluginPermTo722, + action: s.permissionSetter(0o722), }, { name: "group writable binary", expectedErr: "notification-dummy is group writable", - action: setPluginPermTo724, + action: s.permissionSetter(0o724), }, { name: "no plugin dir", expectedErr: cstest.FileNotFoundMessage, - action: tearDown, + action: func(t *testing.T) { + err := os.RemoveAll(s.runDir) + require.NoError(t, err) + }, }, { name: "no plugin binary", expectedErr: "binary for plugin dummy_default not found", action: func(t *testing.T) { - err := os.Remove(path.Join(testPath, "notification-dummy")) + err := os.Remove(s.pluginBinary) require.NoError(t, err) }, }, @@ -173,7 +96,6 @@ func TestBrokerInit(t *testing.T) { procCfg: csconfig.PluginCfg{ User: "123445555551122toto", }, - action: setPluginPermTo744, }, { name: "only specify group", @@ -181,7 +103,6 @@ func TestBrokerInit(t *testing.T) { procCfg: csconfig.PluginCfg{ Group: "123445555551122toto", }, - action: setPluginPermTo744, }, { name: "Fails to run as root", @@ -190,7 +111,6 @@ func TestBrokerInit(t *testing.T) { User: "root", Group: "root", }, - action: setPluginPermTo744, }, { name: "Invalid user and group", @@ -199,7 +119,6 @@ func TestBrokerInit(t *testing.T) { User: "toto1234", Group: "toto1234", }, - action: setPluginPermTo744, }, { name: "Valid user and invalid group", @@ -208,79 +127,33 @@ func TestBrokerInit(t *testing.T) { User: "nobody", Group: "toto1234", }, - action: setPluginPermTo744, }, } for _, tc := range tests { tc := tc - t.Run(tc.name, func(t *testing.T) { - defer tearDown(t) - buildDummyPlugin(t) + s.Run(tc.name, func() { + t := s.T() if tc.action != nil { tc.action(t) } - pb := PluginBroker{} - profiles := csconfig.NewDefaultConfig().API.Server.Profiles - profiles = append(profiles, &csconfig.ProfileCfg{ - Notifications: []string{"dummy_default"}, - }) - err := pb.Init(&tc.procCfg, profiles, &csconfig.ConfigurationPaths{ - PluginDir: testPath, - NotificationDir: "./tests/notifications", - }) - defer pb.Kill() + _, err := s.InitBroker(&tc.procCfg) cstest.RequireErrorContains(t, err, tc.expectedErr) }) } } -func readconfig(t *testing.T, path string) ([]byte, PluginConfig) { - var config PluginConfig - orig, err := os.ReadFile("tests/notifications/dummy.yaml") - require.NoError(t, err,"unable to read config file %s", path) - - err = yaml.Unmarshal(orig, &config) - require.NoError(t, err,"unable to unmarshal config file") - - return orig, config -} - -func writeconfig(t *testing.T, config PluginConfig, path string) { - data, err := yaml.Marshal(&config) - require.NoError(t, err,"unable to marshal config file") - - err = os.WriteFile(path, data, 0644) - require.NoError(t, err,"unable to write config file %s", path) -} - -func TestBrokerNoThreshold(t *testing.T) { +func (s *PluginSuite) TestBrokerNoThreshold() { var alerts []models.Alert DefaultEmptyTicker = 50 * time.Millisecond - buildDummyPlugin(t) - setPluginPermTo744(t) - defer tearDown(t) - - // init - pluginCfg := csconfig.PluginCfg{} - pb := PluginBroker{} - profiles := csconfig.NewDefaultConfig().API.Server.Profiles - profiles = append(profiles, &csconfig.ProfileCfg{ - Notifications: []string{"dummy_default"}, - }) - - // default config - err := pb.Init(&pluginCfg, profiles, &csconfig.ConfigurationPaths{ - PluginDir: testPath, - NotificationDir: "./tests/notifications", - }) + t := s.T() + pb, err := s.InitBroker(nil) assert.NoError(t, err) - tomb := tomb.Tomb{} + tomb := tomb.Tomb{} go pb.Run(&tomb) - defer pb.Kill() // send one item, it should be processed right now pb.PluginChannel <- ProfileAlert{ProfileID: uint(0), Alert: &models.Alert{}} @@ -291,7 +164,7 @@ func TestBrokerNoThreshold(t *testing.T) { require.NoError(t, err, "Error reading file") err = json.Unmarshal(content, &alerts) - assert.NoError(t, err) + require.NoError(t, err) assert.Len(t, alerts, 1) // remove it @@ -312,34 +185,24 @@ func TestBrokerNoThreshold(t *testing.T) { assert.Len(t, alerts, 1) } -func TestBrokerRunGroupAndTimeThreshold_TimeFirst(t *testing.T) { +func (s *PluginSuite) TestBrokerRunGroupAndTimeThreshold_TimeFirst() { // test grouping by "time" DefaultEmptyTicker = 50 * time.Millisecond - buildDummyPlugin(t) - setPluginPermTo744(t) - defer tearDown(t) - // init - pluginCfg := csconfig.PluginCfg{} - pb := PluginBroker{} - profiles := csconfig.NewDefaultConfig().API.Server.Profiles - profiles = append(profiles, &csconfig.ProfileCfg{ - Notifications: []string{"dummy_default"}, - }) + t := s.T() + // set groupwait and groupthreshold, should honor whichever comes first - raw, cfg := readconfig(t, "tests/notifications/dummy.yaml") + cfg := s.readconfig() cfg.GroupThreshold = 4 cfg.GroupWait = 1 * time.Second - writeconfig(t, cfg, "tests/notifications/dummy.yaml") - err := pb.Init(&pluginCfg, profiles, &csconfig.ConfigurationPaths{ - PluginDir: testPath, - NotificationDir: "./tests/notifications", - }) - assert.NoError(t, err) - tomb := tomb.Tomb{} + s.writeconfig(cfg) + pb, err := s.InitBroker(nil) + assert.NoError(t, err) + + tomb := tomb.Tomb{} go pb.Run(&tomb) - defer pb.Kill() + // send data pb.PluginChannel <- ProfileAlert{ProfileID: uint(0), Alert: &models.Alert{}} pb.PluginChannel <- ProfileAlert{ProfileID: uint(0), Alert: &models.Alert{}} @@ -356,40 +219,24 @@ func TestBrokerRunGroupAndTimeThreshold_TimeFirst(t *testing.T) { err = json.Unmarshal(content, &alerts) assert.NoError(t, err) assert.Len(t, alerts, 3) - - // restore config - err = os.WriteFile("tests/notifications/dummy.yaml", raw, 0644) - require.NoError(t, err,"unable to write config file") } -func TestBrokerRunGroupAndTimeThreshold_CountFirst(t *testing.T) { +func (s *PluginSuite) TestBrokerRunGroupAndTimeThreshold_CountFirst() { DefaultEmptyTicker = 50 * time.Millisecond - buildDummyPlugin(t) - setPluginPermTo(t, "744") - defer tearDown(t) - // init - pluginCfg := csconfig.PluginCfg{} - pb := PluginBroker{} - profiles := csconfig.NewDefaultConfig().API.Server.Profiles - profiles = append(profiles, &csconfig.ProfileCfg{ - Notifications: []string{"dummy_default"}, - }) + t := s.T() // set groupwait and groupthreshold, should honor whichever comes first - raw, cfg := readconfig(t, "tests/notifications/dummy.yaml") + cfg := s.readconfig() cfg.GroupThreshold = 4 cfg.GroupWait = 4 * time.Second - writeconfig(t, cfg, "tests/notifications/dummy.yaml") - err := pb.Init(&pluginCfg, profiles, &csconfig.ConfigurationPaths{ - PluginDir: testPath, - NotificationDir: "./tests/notifications", - }) - assert.NoError(t, err) - tomb := tomb.Tomb{} + s.writeconfig(cfg) + pb, err := s.InitBroker(nil) + assert.NoError(t, err) + + tomb := tomb.Tomb{} go pb.Run(&tomb) - defer pb.Kill() // send data pb.PluginChannel <- ProfileAlert{ProfileID: uint(0), Alert: &models.Alert{}} @@ -410,94 +257,75 @@ func TestBrokerRunGroupAndTimeThreshold_CountFirst(t *testing.T) { err = json.Unmarshal(content, &alerts) assert.NoError(t, err) assert.Len(t, alerts, 4) - - // restore config - err = os.WriteFile("tests/notifications/dummy.yaml", raw, 0644) - require.NoError(t, err,"unable to write config file") } -func TestBrokerRunGroupThreshold(t *testing.T) { +func (s *PluginSuite) TestBrokerRunGroupThreshold() { // test grouping by "size" DefaultEmptyTicker = 50 * time.Millisecond - buildDummyPlugin(t) - setPluginPermTo(t, "744") - defer tearDown(t) - // init - pluginCfg := csconfig.PluginCfg{} - pb := PluginBroker{} - profiles := csconfig.NewDefaultConfig().API.Server.Profiles - profiles = append(profiles, &csconfig.ProfileCfg{ - Notifications: []string{"dummy_default"}, - }) + t := s.T() // set groupwait - raw, cfg := readconfig(t, "tests/notifications/dummy.yaml") + cfg := s.readconfig() cfg.GroupThreshold = 4 - writeconfig(t, cfg, "tests/notifications/dummy.yaml") - err := pb.Init(&pluginCfg, profiles, &csconfig.ConfigurationPaths{ - PluginDir: testPath, - NotificationDir: "./tests/notifications", - }) + s.writeconfig(cfg) + pb, err := s.InitBroker(nil) assert.NoError(t, err) - tomb := tomb.Tomb{} + tomb := tomb.Tomb{} go pb.Run(&tomb) - defer pb.Kill() // send data pb.PluginChannel <- ProfileAlert{ProfileID: uint(0), Alert: &models.Alert{}} pb.PluginChannel <- ProfileAlert{ProfileID: uint(0), Alert: &models.Alert{}} pb.PluginChannel <- ProfileAlert{ProfileID: uint(0), Alert: &models.Alert{}} - time.Sleep(100 * time.Millisecond) + time.Sleep(time.Second) // because of group threshold, we shouldn't have data yet assert.NoFileExists(t, "./out") pb.PluginChannel <- ProfileAlert{ProfileID: uint(0), Alert: &models.Alert{}} - time.Sleep(100 * time.Millisecond) + pb.PluginChannel <- ProfileAlert{ProfileID: uint(0), Alert: &models.Alert{}} + pb.PluginChannel <- ProfileAlert{ProfileID: uint(0), Alert: &models.Alert{}} + time.Sleep(time.Second) // and now we should content, err := os.ReadFile("./out") require.NoError(t, err, "Error reading file") + decoder := json.NewDecoder(bytes.NewReader(content)) + var alerts []models.Alert - err = json.Unmarshal(content, &alerts) + + // two notifications, one with 4 alerts, one with 2 alerts + + err = decoder.Decode(&alerts) assert.NoError(t, err) assert.Len(t, alerts, 4) - // restore config - err = os.WriteFile("tests/notifications/dummy.yaml", raw, 0644) - require.NoError(t, err, "unable to write config file") + err = decoder.Decode(&alerts) + assert.NoError(t, err) + assert.Len(t, alerts, 2) + + err = decoder.Decode(&alerts) + assert.Equal(t, err, io.EOF) } -func TestBrokerRunTimeThreshold(t *testing.T) { +func (s *PluginSuite) TestBrokerRunTimeThreshold() { DefaultEmptyTicker = 50 * time.Millisecond - buildDummyPlugin(t) - setPluginPermTo(t, "744") - defer tearDown(t) - // init - pluginCfg := csconfig.PluginCfg{} - pb := PluginBroker{} - profiles := csconfig.NewDefaultConfig().API.Server.Profiles - profiles = append(profiles, &csconfig.ProfileCfg{ - Notifications: []string{"dummy_default"}, - }) + t := s.T() // set groupwait - raw, cfg := readconfig(t, "tests/notifications/dummy.yaml") + cfg := s.readconfig() cfg.GroupWait = 1 * time.Second - writeconfig(t, cfg, "tests/notifications/dummy.yaml") - err := pb.Init(&pluginCfg, profiles, &csconfig.ConfigurationPaths{ - PluginDir: testPath, - NotificationDir: "./tests/notifications", - }) - assert.NoError(t, err) - tomb := tomb.Tomb{} + s.writeconfig(cfg) + pb, err := s.InitBroker(nil) + assert.NoError(t, err) + + tomb := tomb.Tomb{} go pb.Run(&tomb) - defer pb.Kill() // send data pb.PluginChannel <- ProfileAlert{ProfileID: uint(0), Alert: &models.Alert{}} @@ -515,32 +343,18 @@ func TestBrokerRunTimeThreshold(t *testing.T) { err = json.Unmarshal(content, &alerts) assert.NoError(t, err) assert.Len(t, alerts, 1) - - // restore config - err = os.WriteFile("tests/notifications/dummy.yaml", raw, 0644) - require.NoError(t, err, "unable to write config file %s", err) } -func TestBrokerRunSimple(t *testing.T) { +func (s *PluginSuite) TestBrokerRunSimple() { DefaultEmptyTicker = 50 * time.Millisecond - buildDummyPlugin(t) - setPluginPermTo(t, "744") - defer tearDown(t) - pluginCfg := csconfig.PluginCfg{} - pb := PluginBroker{} - profiles := csconfig.NewDefaultConfig().API.Server.Profiles - profiles = append(profiles, &csconfig.ProfileCfg{ - Notifications: []string{"dummy_default"}, - }) - err := pb.Init(&pluginCfg, profiles, &csconfig.ConfigurationPaths{ - PluginDir: testPath, - NotificationDir: "./tests/notifications", - }) - assert.NoError(t, err) - tomb := tomb.Tomb{} + t := s.T() + + pb, err := s.InitBroker(nil) + assert.NoError(t, err) + + tomb := tomb.Tomb{} go pb.Run(&tomb) - defer pb.Kill() assert.NoFileExists(t, "./out") @@ -548,57 +362,26 @@ func TestBrokerRunSimple(t *testing.T) { pb.PluginChannel <- ProfileAlert{ProfileID: uint(0), Alert: &models.Alert{}} pb.PluginChannel <- ProfileAlert{ProfileID: uint(0), Alert: &models.Alert{}} - time.Sleep(time.Millisecond * 200) + // make it wait a bit, CI can be slow + time.Sleep(time.Second) content, err := os.ReadFile("./out") require.NoError(t, err, "Error reading file") + decoder := json.NewDecoder(bytes.NewReader(content)) + var alerts []models.Alert - err = json.Unmarshal(content, &alerts) + + // two notifications, one alert each + + err = decoder.Decode(&alerts) assert.NoError(t, err) - assert.Len(t, alerts, 2) -} - -func buildDummyPlugin(t *testing.T) { - dir, err := os.MkdirTemp("./tests", "cs_plugin_test") - require.NoError(t, err) - - cmd := exec.Command("go", "build", "-o", path.Join(dir, "notification-dummy"), "../../plugins/notifications/dummy/") - err = cmd.Run() - require.NoError(t, err, "while building dummy plugin") - - testPath = dir - os.Remove("./out") -} - -func setPluginPermTo(t *testing.T, perm string) { - if runtime.GOOS != "windows" { - err := exec.Command("chmod", perm, path.Join(testPath, "notification-dummy")).Run() - require.NoError(t, err, "chmod 744 %s", path.Join(testPath, "notification-dummy")) - } -} - -func setUp(t *testing.T) { - dir, err := os.MkdirTemp("./", "cs_plugin_test") - require.NoError(t, err) - - f, err := os.Create(path.Join(dir, "slack")) - require.NoError(t, err) - - f.Close() - f, err = os.Create(path.Join(dir, "notification-gitter")) - require.NoError(t, err) - - f.Close() - err = os.Mkdir(path.Join(dir, "dummy_dir"), 0666) - require.NoError(t, err) - - testPath = dir -} - -func tearDown(t *testing.T) { - err := os.RemoveAll(testPath) - require.NoError(t, err) - - os.Remove("./out") + assert.Len(t, alerts, 1) + + err = decoder.Decode(&alerts) + assert.NoError(t, err) + assert.Len(t, alerts, 1) + + err = decoder.Decode(&alerts) + assert.Equal(t, err, io.EOF) } diff --git a/pkg/csplugin/broker_win_test.go b/pkg/csplugin/broker_win_test.go index 2595db3f3..3d7498c0d 100644 --- a/pkg/csplugin/broker_win_test.go +++ b/pkg/csplugin/broker_win_test.go @@ -3,22 +3,21 @@ package csplugin import ( - "log" + "bytes" + "encoding/json" + "io" "os" - "os/exec" - "path" - "path/filepath" - "reflect" "testing" "time" "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" "gopkg.in/tomb.v2" + "github.com/crowdsecurity/go-cs-lib/pkg/cstest" + "github.com/crowdsecurity/crowdsec/pkg/csconfig" - "github.com/crowdsecurity/crowdsec/pkg/cstest" "github.com/crowdsecurity/crowdsec/pkg/models" - "github.com/crowdsecurity/crowdsec/pkg/types" ) /* @@ -26,188 +25,55 @@ Due to the complexity of file permission modification with go on windows, we onl not if it will actually reject plugins with invalid permissions */ -var testPath string - -func TestGetPluginNameAndTypeFromPath(t *testing.T) { - setUp() - defer tearDown() - type args struct { - path string - } - tests := []struct { - name string - args args - want string - want1 string - wantErr bool - }{ - { - name: "valid plugin name, single dash", - args: args{ - path: path.Join(testPath, "notification-gitter"), - }, - want: "notification", - want1: "gitter", - wantErr: false, - }, - { - name: "invalid plugin name", - args: args{ - path: ".\\tests\\gitter.exe", - }, - want: "", - want1: "", - wantErr: true, - }, - { - name: "valid plugin name, multiple dash", - args: args{ - path: ".\\tests\\notification-instant-slack.exe", - }, - want: "notification-instant", - want1: "slack", - wantErr: false, - }, - } - for _, tt := range tests { - tt := tt - t.Run(tt.name, func(t *testing.T) { - got, got1, err := getPluginTypeAndSubtypeFromPath(tt.args.path) - if (err != nil) != tt.wantErr { - t.Errorf("getPluginNameAndTypeFromPath() error = %v, wantErr %v", err, tt.wantErr) - return - } - if got != tt.want { - t.Errorf("getPluginNameAndTypeFromPath() got = %v, want %v", got, tt.want) - } - if got1 != tt.want1 { - t.Errorf("getPluginNameAndTypeFromPath() got1 = %v, want %v", got1, tt.want1) - } - }) - } -} - -func TestListFilesAtPath(t *testing.T) { - setUp() - defer tearDown() - type args struct { - path string - } - tests := []struct { - name string - args args - want []string - wantErr bool - }{ - { - name: "valid directory", - args: args{ - path: testPath, - }, - want: []string{ - filepath.Join(testPath, "notification-gitter"), - filepath.Join(testPath, "slack"), - }, - }, - { - name: "invalid directory", - args: args{ - path: "./foo/bar/", - }, - wantErr: true, - }, - } - for _, tt := range tests { - tt := tt - t.Run(tt.name, func(t *testing.T) { - got, err := listFilesAtPath(tt.args.path) - if (err != nil) != tt.wantErr { - t.Errorf("listFilesAtPath() error = %v, wantErr %v", err, tt.wantErr) - return - } - if !reflect.DeepEqual(got, tt.want) { - t.Errorf("listFilesAtPath() = %v, want %v", got, tt.want) - } - }) - } -} - -func TestBrokerInit(t *testing.T) { +func (s *PluginSuite) TestBrokerInit() { tests := []struct { name string - action func() - errContains string - wantErr bool + action func(*testing.T) procCfg csconfig.PluginCfg + expectedErr string }{ { name: "valid config", - wantErr: false, }, { name: "no plugin dir", - wantErr: true, - errContains: cstest.FileNotFoundMessage, - action: tearDown, + expectedErr: cstest.PathNotFoundMessage, + action: func(t *testing.T) { + err := os.RemoveAll(s.runDir) + require.NoError(t, err) + }, }, { name: "no plugin binary", - wantErr: true, - errContains: "binary for plugin dummy_default not found", - action: func() { - err := os.Remove(path.Join(testPath, "notification-dummy.exe")) - if err != nil { - t.Fatal(err) - } + expectedErr: "binary for plugin dummy_default not found", + action: func(t *testing.T) { + err := os.Remove(s.pluginBinary) + require.NoError(t, err) }, }, } - for _, test := range tests { - test := test - t.Run(test.name, func(t *testing.T) { - defer tearDown() - buildDummyPlugin() - if test.action != nil { - test.action() + for _, tc := range tests { + tc := tc + s.Run(tc.name, func() { + t := s.T() + if tc.action != nil { + tc.action(t) } - pb := PluginBroker{} - profiles := csconfig.NewDefaultConfig().API.Server.Profiles - profiles = append(profiles, &csconfig.ProfileCfg{ - Notifications: []string{"dummy_default"}, - }) - err := pb.Init(&test.procCfg, profiles, &csconfig.ConfigurationPaths{ - PluginDir: testPath, - NotificationDir: "./tests/notifications", - }) - defer pb.Kill() - if test.wantErr { - assert.ErrorContains(t, err, test.errContains) - } else { - assert.NoError(t, err) - } - + _, err := s.InitBroker(&tc.procCfg) + cstest.RequireErrorContains(t, err, tc.expectedErr) }) } } -func TestBrokerRun(t *testing.T) { - buildDummyPlugin() - defer tearDown() - procCfg := csconfig.PluginCfg{} - pb := PluginBroker{} - profiles := csconfig.NewDefaultConfig().API.Server.Profiles - profiles = append(profiles, &csconfig.ProfileCfg{ - Notifications: []string{"dummy_default"}, - }) - err := pb.Init(&procCfg, profiles, &csconfig.ConfigurationPaths{ - PluginDir: testPath, - NotificationDir: "./tests/notifications", - }) +func (s *PluginSuite) TestBrokerRun() { + t := s.T() + + pb, err := s.InitBroker(nil) assert.NoError(t, err) + tomb := tomb.Tomb{} go pb.Run(&tomb) - defer pb.Kill() assert.NoFileExists(t, "./out") defer os.Remove("./out") @@ -217,46 +83,24 @@ func TestBrokerRun(t *testing.T) { time.Sleep(time.Second * 4) assert.FileExists(t, ".\\out") - assert.Equal(t, types.GetLineCountForFile(".\\out"), 2) -} -func buildDummyPlugin() { - dir, err := os.MkdirTemp(".\\tests", "cs_plugin_test") - if err != nil { - log.Fatal(err) - } - cmd := exec.Command("go", "build", "-o", path.Join(dir, "notification-dummy.exe"), "../../plugins/notifications/dummy/") - if err := cmd.Run(); err != nil { - log.Fatal(err) - } - testPath = dir -} + content, err := os.ReadFile("./out") + require.NoError(t, err, "Error reading file") -func setUp() { - dir, err := os.MkdirTemp("./", "cs_plugin_test") - if err != nil { - log.Fatal(err) - } - f, err := os.Create(path.Join(dir, "slack")) - if err != nil { - log.Fatal(err) - } - f.Close() - f, err = os.Create(path.Join(dir, "notification-gitter")) - if err != nil { - log.Fatal(err) - } - f.Close() - err = os.Mkdir(path.Join(dir, "dummy_dir"), 0666) - if err != nil { - log.Fatal(err) - } - testPath = dir -} + decoder := json.NewDecoder(bytes.NewReader(content)) -func tearDown() { - err := os.RemoveAll(testPath) - if err != nil { - log.Fatal(err) - } + var alerts []models.Alert + + // two notifications, one alert each + + err = decoder.Decode(&alerts) + assert.NoError(t, err) + assert.Len(t, alerts, 1) + + err = decoder.Decode(&alerts) + assert.NoError(t, err) + assert.Len(t, alerts, 1) + + err = decoder.Decode(&alerts) + assert.Equal(t, err, io.EOF) } diff --git a/pkg/csplugin/helpers.go b/pkg/csplugin/helpers.go index 857e9a53b..297742e8d 100644 --- a/pkg/csplugin/helpers.go +++ b/pkg/csplugin/helpers.go @@ -1,6 +1,7 @@ package csplugin import ( + "os" "text/template" "github.com/crowdsecurity/crowdsec/pkg/exprhelpers" @@ -20,6 +21,7 @@ var helpers = template.FuncMap{ return metaValues }, "CrowdsecCTI": exprhelpers.CrowdsecCTI, + "Hostname": os.Hostname, } func funcMap() template.FuncMap { diff --git a/pkg/csplugin/listfiles.go b/pkg/csplugin/listfiles.go new file mode 100644 index 000000000..2dea44f4f --- /dev/null +++ b/pkg/csplugin/listfiles.go @@ -0,0 +1,22 @@ +package csplugin + +import ( + "os" + "path/filepath" +) + +// helper which gives paths to all files in the given directory non-recursively +func listFilesAtPath(path string) ([]string, error) { + filePaths := make([]string, 0) + files, err := os.ReadDir(path) + if err != nil { + return nil, err + } + for _, file := range files { + if ! file.IsDir() { + filePaths = append(filePaths, filepath.Join(path, file.Name())) + } + } + return filePaths, nil +} + diff --git a/pkg/csplugin/listfiles_test.go b/pkg/csplugin/listfiles_test.go new file mode 100644 index 000000000..8bcedaa1f --- /dev/null +++ b/pkg/csplugin/listfiles_test.go @@ -0,0 +1,57 @@ +package csplugin + +import ( + "os" + "path/filepath" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + + "github.com/crowdsecurity/go-cs-lib/pkg/cstest" +) + +func TestListFilesAtPath(t *testing.T) { + dir, err := os.MkdirTemp("", "test-listfiles") + require.NoError(t, err) + t.Cleanup(func() { + os.RemoveAll(dir) + }) + _, err = os.Create(filepath.Join(dir, "notification-gitter")) + require.NoError(t, err) + _, err = os.Create(filepath.Join(dir, "slack")) + require.NoError(t, err) + err = os.Mkdir(filepath.Join(dir, "somedir"), 0755) + require.NoError(t, err) + _, err = os.Create(filepath.Join(dir, "somedir", "inner")) + require.NoError(t, err) + + tests := []struct { + name string + path string + want []string + expectedErr string + }{ + { + name: "valid directory", + path: dir, + want: []string{ + filepath.Join(dir, "notification-gitter"), + filepath.Join(dir, "slack"), + }, + }, + { + name: "invalid directory", + path: "./foo/bar/", + expectedErr: "open ./foo/bar/: " + cstest.PathNotFoundMessage, + }, + } + for _, tc := range tests { + tc := tc + t.Run(tc.name, func(t *testing.T) { + got, err := listFilesAtPath(tc.path) + cstest.RequireErrorContains(t, err, tc.expectedErr) + assert.ElementsMatch(t, tc.want, got) + }) + } +} diff --git a/pkg/csplugin/tests/notifications/dummy.yaml b/pkg/csplugin/testdata/dummy.yaml similarity index 100% rename from pkg/csplugin/tests/notifications/dummy.yaml rename to pkg/csplugin/testdata/dummy.yaml diff --git a/pkg/csplugin/utils_test.go b/pkg/csplugin/utils_test.go new file mode 100644 index 000000000..b4ac1e7e7 --- /dev/null +++ b/pkg/csplugin/utils_test.go @@ -0,0 +1,49 @@ +//go:build linux || freebsd || netbsd || openbsd || solaris || !windows + +package csplugin + +import ( + "testing" + + "github.com/stretchr/testify/assert" + + "github.com/crowdsecurity/go-cs-lib/pkg/cstest" +) + +func TestGetPluginNameAndTypeFromPath(t *testing.T) { + tests := []struct { + name string + path string + want string + want1 string + expectedErr string + }{ + { + name: "valid plugin name, single dash", + path: "/path/to/notification-gitter", + want: "notification", + want1: "gitter", + }, + { + name: "invalid plugin name", + path: "/path/to/gitter", + expectedErr: "plugin name /path/to/gitter is invalid. Name should be like {type-name}", + }, + { + name: "valid plugin name, multiple dash", + path: "/path/to/notification-instant-slack", + want: "notification-instant", + want1: "slack", + }, + } + for _, tc := range tests { + tc := tc + t.Run(tc.name, func(t *testing.T) { + got, got1, err := getPluginTypeAndSubtypeFromPath(tc.path) + cstest.RequireErrorContains(t, err, tc.expectedErr) + + assert.Equal(t, tc.want, got) + assert.Equal(t, tc.want1, got1) + }) + } +} diff --git a/pkg/csplugin/utils_windows_test.go b/pkg/csplugin/utils_windows_test.go new file mode 100644 index 000000000..9161fd45b --- /dev/null +++ b/pkg/csplugin/utils_windows_test.go @@ -0,0 +1,49 @@ +//go:build windows + +package csplugin + +import ( + "testing" + + "github.com/stretchr/testify/assert" + + "github.com/crowdsecurity/go-cs-lib/pkg/cstest" +) + +func TestGetPluginNameAndTypeFromPath(t *testing.T) { + tests := []struct { + name string + path string + want string + want1 string + expectedErr string + }{ + { + name: "valid plugin name, single dash", + path: "c:\\path\\to\\notification-gitter", + want: "notification", + want1: "gitter", + }, + { + name: "invalid plugin name", + path: "c:\\path\\to\\gitter.exe", + expectedErr: "plugin name c:\\path\\to\\gitter.exe is invalid. Name should be like {type-name}", + }, + { + name: "valid plugin name, multiple dash", + path: "c:\\path\\to\\notification-instant-slack.exe", + want: "notification-instant", + want1: "slack", + }, + } + for _, tc := range tests { + tc := tc + t.Run(tc.name, func(t *testing.T) { + got, got1, err := getPluginTypeAndSubtypeFromPath(tc.path) + cstest.RequireErrorContains(t, err, tc.expectedErr) + + assert.Equal(t, tc.want, got) + assert.Equal(t, tc.want1, got1) + }) + } +} diff --git a/pkg/csstring/expand.go b/pkg/csstring/expand.go deleted file mode 100644 index 6f9037af0..000000000 --- a/pkg/csstring/expand.go +++ /dev/null @@ -1,75 +0,0 @@ -package csstring - -func seekClosingBracket(s string, i int) int { - for ; i < len(s); i++ { - if s[i] == '}' { - return i - } - } - - return -1 -} - -func seekEndVarname(s string, i int) int { - // envvar names are more strict but this is good enough - for ; i < len(s); i++ { - if (s[i] < 'a' || s[i] > 'z') && (s[i] < 'A' || s[i] > 'Z') && (s[i] < '0' || s[i] > '9') && s[i] != '_' { - break - } - } - - return i -} - -func replaceVarBracket(s string, i int, mapping func(string) (string, bool)) string { - j := seekClosingBracket(s, i+2) - if j < 0 { - return s - } - - if j < len(s) { - varName := s[i+2 : j] - if val, ok := mapping(varName); ok { - s = s[:i] + val + s[j+1:] - } - } - - return s -} - -func replaceVar(s string, i int, mapping func(string) (string, bool)) string { - if s[i+1] == '{' { - return replaceVarBracket(s, i, mapping) - } - - j := seekEndVarname(s, i+1) - if j < 0 { - return s - } - - if j > i+1 { - varName := s[i+1 : j] - if val, ok := mapping(varName); ok { - s = s[:i] + val + s[j:] - } - } - - return s -} - -// StrictExpand replaces ${var} or $var in the string according to the mapping -// function, like os.Expand. The difference is that the mapping function -// returns a boolean indicating whether the variable was found. -// If the variable was not found, the string is not modified. -// -// Whereas os.ExpandEnv uses os.Getenv, here we can use os.LookupEnv -// to distinguish between an empty variable and an undefined one. -func StrictExpand(s string, mapping func(string) (string, bool)) string { - for i := 0; i < len(s); i++ { - if s[i] == '$' { - s = replaceVar(s, i, mapping) - } - } - - return s -} diff --git a/pkg/csstring/expand_test.go b/pkg/csstring/expand_test.go deleted file mode 100644 index 72a281be9..000000000 --- a/pkg/csstring/expand_test.go +++ /dev/null @@ -1,98 +0,0 @@ -package csstring_test - -import ( - "fmt" - "testing" - - "github.com/stretchr/testify/assert" - - "github.com/crowdsecurity/crowdsec/pkg/csstring" -) - -func TestStrictExpand(t *testing.T) { - t.Parallel() - - testenv := func(key string) (string, bool) { - switch key { - case "USER": - return "testuser", true - case "HOME": - return "/home/testuser", true - case "empty": - return "", true - default: - return "", false - } - } - - home, _ := testenv("HOME") - user, _ := testenv("USER") - - tests := []struct { - input string - expected string - }{ - { - input: "$HOME", - expected: home, - }, - { - input: "${USER}", - expected: user, - }, - { - input: "Hello, $USER!", - expected: fmt.Sprintf("Hello, %s!", user), - }, - { - input: "My home directory is ${HOME}", - expected: fmt.Sprintf("My home directory is %s", home), - }, - { - input: "This is a $SINGLE_VAR string with ${HOME}", - expected: fmt.Sprintf("This is a $SINGLE_VAR string with %s", home), - }, - { - input: "This is a $SINGLE_VAR string with $HOME", - expected: fmt.Sprintf("This is a $SINGLE_VAR string with %s", home), - }, - { - input: "This variable does not exist: $NON_EXISTENT_VAR", - expected: "This variable does not exist: $NON_EXISTENT_VAR", - }, - { - input: "This is a $MULTI_VAR string with ${HOME} and ${USER}", - expected: fmt.Sprintf("This is a $MULTI_VAR string with %s and %s", home, user), - }, - { - input: "This is a ${MULTI_VAR} string with $HOME and $USER", - expected: fmt.Sprintf("This is a ${MULTI_VAR} string with %s and %s", home, user), - }, - { - input: "This is a plain string with no variables", - expected: "This is a plain string with no variables", - }, - { - input: "$empty", - expected: "", - }, - { - input: "", - expected: "", - }, - { - input: "$USER:$empty:$HOME", - expected: fmt.Sprintf("%s::%s", user, home), - }, - } - - for _, tc := range tests { - tc := tc - t.Run(tc.input, func(t *testing.T) { - t.Parallel() - - output := csstring.StrictExpand(tc.input, testenv) - assert.Equal(t, tc.expected, output) - }) - } -} diff --git a/pkg/cstest/filenotfound_unix.go b/pkg/cstest/filenotfound_unix.go deleted file mode 100644 index fcf1a4119..000000000 --- a/pkg/cstest/filenotfound_unix.go +++ /dev/null @@ -1,5 +0,0 @@ -//go:build unix || linux || freebsd || netbsd || openbsd || solaris - -package cstest - -const FileNotFoundMessage = "no such file or directory" diff --git a/pkg/cstest/filenotfound_windows.go b/pkg/cstest/filenotfound_windows.go deleted file mode 100644 index 31816c014..000000000 --- a/pkg/cstest/filenotfound_windows.go +++ /dev/null @@ -1,5 +0,0 @@ -//go:build windows - -package cstest - -const FileNotFoundMessage = "The system cannot find the file specified." diff --git a/pkg/cstest/utils.go b/pkg/cstest/utils.go deleted file mode 100644 index 068c2e284..000000000 --- a/pkg/cstest/utils.go +++ /dev/null @@ -1,99 +0,0 @@ -package cstest - -import ( - "strings" - "testing" - "text/template" - - "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/require" - - logtest "github.com/sirupsen/logrus/hooks/test" -) - -func AssertErrorContains(t *testing.T, err error, expectedErr string) { - t.Helper() - - if expectedErr != "" { - assert.ErrorContains(t, err, expectedErr) - return - } - - assert.NoError(t, err) -} - -func AssertErrorMessage(t *testing.T, err error, expectedErr string) { - t.Helper() - - if expectedErr != "" { - errmsg := "" - if err != nil { - errmsg = err.Error() - } - assert.Equal(t, expectedErr, errmsg) - return - } - - require.NoError(t, err) -} - -func RequireErrorContains(t *testing.T, err error, expectedErr string) { - t.Helper() - - if expectedErr != "" { - require.ErrorContains(t, err, expectedErr) - return - } - - require.NoError(t, err) -} - -func RequireErrorMessage(t *testing.T, err error, expectedErr string) { - t.Helper() - - if expectedErr != "" { - errmsg := "" - if err != nil { - errmsg = err.Error() - } - require.Equal(t, expectedErr, errmsg) - return - } - - require.NoError(t, err) -} - -func RequireLogContains(t *testing.T, hook *logtest.Hook, expected string) { - t.Helper() - - // look for a log entry that matches the expected message - for _, entry := range hook.AllEntries() { - if strings.Contains(entry.Message, expected) { - return - } - } - - // show all hook entries, in case the test fails we'll need them - for _, entry := range hook.AllEntries() { - t.Logf("log entry: %s", entry.Message) - } - - require.Fail(t, "no log entry found with message", expected) -} - -// Interpolate fills a string template with the given values, can be map or struct. -// example: Interpolate("{{.Name}}", map[string]string{"Name": "JohnDoe"}) -func Interpolate(s string, data interface{}) (string, error) { - tmpl, err := template.New("").Parse(s) - if err != nil { - return "", err - } - - var b strings.Builder - err = tmpl.Execute(&b, data) - if err != nil { - return "", err - } - - return b.String(), nil -} diff --git a/pkg/cticlient/client_test.go b/pkg/cticlient/client_test.go index a487f1098..a8f22e094 100644 --- a/pkg/cticlient/client_test.go +++ b/pkg/cticlient/client_test.go @@ -11,9 +11,9 @@ import ( "testing" log "github.com/sirupsen/logrus" - - "github.com/crowdsecurity/crowdsec/pkg/types" "github.com/stretchr/testify/assert" + + "github.com/crowdsecurity/go-cs-lib/pkg/ptr" ) const validApiKey = "my-api-key" @@ -202,12 +202,12 @@ func TestFireOk(t *testing.T) { assert.Equal(t, len(data.Items), 3) assert.Equal(t, data.Items[0].Ip, "1.2.3.4") //page 1 is the default - data, err = cticlient.Fire(FireParams{Page: types.IntPtr(1)}) + data, err = cticlient.Fire(FireParams{Page: ptr.Of(1)}) assert.Equal(t, err, nil) assert.Equal(t, len(data.Items), 3) assert.Equal(t, data.Items[0].Ip, "1.2.3.4") //page 2 - data, err = cticlient.Fire(FireParams{Page: types.IntPtr(2)}) + data, err = cticlient.Fire(FireParams{Page: ptr.Of(2)}) assert.Equal(t, err, nil) assert.Equal(t, len(data.Items), 3) assert.Equal(t, data.Items[0].Ip, "4.2.3.4") @@ -250,7 +250,7 @@ func TestSmokeInfoValidIP(t *testing.T) { } assert.Equal(t, "1.1.1.1", resp.Ip) - assert.Equal(t, types.StrPtr("1.1.1.0/24"), resp.IpRange) + assert.Equal(t, ptr.Of("1.1.1.0/24"), resp.IpRange) } func TestSmokeUnknownIP(t *testing.T) { diff --git a/pkg/cticlient/types_test.go b/pkg/cticlient/types_test.go index 046d28692..1ec58cc78 100644 --- a/pkg/cticlient/types_test.go +++ b/pkg/cticlient/types_test.go @@ -5,7 +5,7 @@ import ( "github.com/stretchr/testify/assert" - "github.com/crowdsecurity/crowdsec/pkg/types" + "github.com/crowdsecurity/go-cs-lib/pkg/ptr" ) //func (c *SmokeItem) GetAttackDetails() []string { @@ -16,16 +16,16 @@ func getSampleSmokeItem() SmokeItem { emptyItem := SmokeItem{ IpRangeScore: 2.0, Ip: "1.2.3.4", - IpRange: types.StrPtr("1.2.3.0/24"), - AsName: types.StrPtr("AS1234"), - AsNum: types.IntPtr(1234), + IpRange: ptr.Of("1.2.3.0/24"), + AsName: ptr.Of("AS1234"), + AsNum: ptr.Of(1234), Location: CTILocationInfo{ - Country: types.StrPtr("FR"), - City: types.StrPtr("Paris"), + Country: ptr.Of("FR"), + City: ptr.Of("Paris"), Latitude: &lat, Longitude: &long, }, - ReverseDNS: types.StrPtr("foo.bar.com"), + ReverseDNS: ptr.Of("foo.bar.com"), Behaviors: []*CTIBehavior{ { Name: "ssh:bruteforce", @@ -34,8 +34,8 @@ func getSampleSmokeItem() SmokeItem { }, }, History: CTIHistory{ - FirstSeen: types.StrPtr("2022-12-05T17:45:00+00:00"), - LastSeen: types.StrPtr("2022-12-06T19:15:00+00:00"), + FirstSeen: ptr.Of("2022-12-05T17:45:00+00:00"), + LastSeen: ptr.Of("2022-12-06T19:15:00+00:00"), FullAge: 3, DaysAge: 1, }, @@ -56,7 +56,7 @@ func getSampleSmokeItem() SmokeItem { "GB": 14, "US": 14, }, - BackgroundNoiseScore: types.IntPtr(3), + BackgroundNoiseScore: ptr.Of(3), Scores: CTIScores{ Overall: CTIScore{ Aggressiveness: 2, diff --git a/pkg/cwhub/download.go b/pkg/cwhub/download.go index 1aab64734..6923c6d05 100644 --- a/pkg/cwhub/download.go +++ b/pkg/cwhub/download.go @@ -18,6 +18,8 @@ import ( "gopkg.in/yaml.v2" ) +var ErrIndexNotFound = fmt.Errorf("index not found") + func UpdateHubIdx(hub *csconfig.Hub) error { bidx, err := DownloadHubIdx(hub) @@ -47,10 +49,13 @@ func DownloadHubIdx(hub *csconfig.Hub) ([]byte, error) { if err != nil { return nil, errors.Wrap(err, "failed http request for hub index") } + defer resp.Body.Close() if resp.StatusCode != http.StatusOK { + if resp.StatusCode == http.StatusNotFound { + return nil, ErrIndexNotFound + } return nil, fmt.Errorf("bad http code %d while requesting %s", resp.StatusCode, req.URL.String()) } - defer resp.Body.Close() body, err := io.ReadAll(resp.Body) if err != nil { return nil, errors.Wrap(err, "failed to read request answer for hub index") @@ -81,7 +86,7 @@ func DownloadHubIdx(hub *csconfig.Hub) ([]byte, error) { return body, nil } -//DownloadLatest will download the latest version of Item to the tdir directory +// DownloadLatest will download the latest version of Item to the tdir directory func DownloadLatest(hub *csconfig.Hub, target Item, overwrite bool, updateOnly bool) (Item, error) { var err error diff --git a/pkg/cwhub/helpers.go b/pkg/cwhub/helpers.go index 95d874b6d..af1e938d7 100644 --- a/pkg/cwhub/helpers.go +++ b/pkg/cwhub/helpers.go @@ -16,12 +16,14 @@ import ( func chooseHubBranch() (string, error) { latest, err := cwversion.Latest() if err != nil { + log.Warningf("Unable to retrieve latest crowdsec version: %s, defaulting to master", err) //lint:ignore nilerr reason return "master", nil // ignore } csVersion := cwversion.VersionStrip() if csVersion == latest { + log.Debugf("current version is equal to latest (%s)", csVersion) return "master", nil } diff --git a/pkg/cwversion/version.go b/pkg/cwversion/version.go index 57aea47f9..aeac6f2f2 100644 --- a/pkg/cwversion/version.go +++ b/pkg/cwversion/version.go @@ -8,50 +8,38 @@ import ( "runtime" "strings" - version "github.com/hashicorp/go-version" + goversion "github.com/hashicorp/go-version" + + "github.com/crowdsecurity/go-cs-lib/pkg/version" ) -/* - -Given a version number MAJOR.MINOR.PATCH, increment the: - - MAJOR version when you make incompatible API changes, - MINOR version when you add functionality in a backwards compatible manner, and - PATCH version when you make backwards compatible bug fixes. - -Additional labels for pre-release and build metadata are available as extensions to the MAJOR.MINOR.PATCH format. - -*/ - var ( - Version string // = "v0.0.0" Codename string // = "SoumSoum" - BuildDate string // = "0000-00-00_00:00:00" - Tag string // = "dev" - GoVersion = runtime.Version()[2:] // = "1.13" System = runtime.GOOS // = "linux" Constraint_parser = ">= 1.0, <= 2.0" Constraint_scenario = ">= 1.0, < 3.0" Constraint_api = "v1" Constraint_acquis = ">= 1.0, < 2.0" + Libre2 = "WebAssembly" ) func ShowStr() string { ret := "" - ret += fmt.Sprintf("version: %s-%s\n", Version, Tag) + ret += fmt.Sprintf("version: %s-%s\n", version.Version, version.Tag) ret += fmt.Sprintf("Codename: %s\n", Codename) - ret += fmt.Sprintf("BuildDate: %s\n", BuildDate) - ret += fmt.Sprintf("GoVersion: %s\n", GoVersion) + ret += fmt.Sprintf("BuildDate: %s\n", version.BuildDate) + ret += fmt.Sprintf("GoVersion: %s\n", version.GoVersion) ret += fmt.Sprintf("Platform: %s\n", System) return ret } func Show() { - log.Printf("version: %s-%s", Version, Tag) + log.Printf("version: %s-%s", version.Version, version.Tag) log.Printf("Codename: %s", Codename) - log.Printf("BuildDate: %s", BuildDate) - log.Printf("GoVersion: %s", GoVersion) + log.Printf("BuildDate: %s", version.BuildDate) + log.Printf("GoVersion: %s", version.GoVersion) log.Printf("Platform: %s\n", System) + log.Printf("libre2: %s\n", Libre2) log.Printf("Constraint_parser: %s", Constraint_parser) log.Printf("Constraint_scenario: %s", Constraint_scenario) log.Printf("Constraint_api: %s", Constraint_api) @@ -59,20 +47,21 @@ func Show() { } func VersionStr() string { - return fmt.Sprintf("%s-%s-%s", Version, System, Tag) + return fmt.Sprintf("%s-%s-%s", version.Version, System, version.Tag) } func VersionStrip() string { - version := strings.Split(Version, "~") + version := strings.Split(version.Version, "~") + version = strings.Split(version[0], "-") return version[0] } -func Statisfies(strvers string, constraint string) (bool, error) { - vers, err := version.NewVersion(strvers) +func Satisfies(strvers string, constraint string) (bool, error) { + vers, err := goversion.NewVersion(strvers) if err != nil { return false, fmt.Errorf("failed to parse '%s' : %v", strvers, err) } - constraints, err := version.NewConstraint(constraint) + constraints, err := goversion.NewConstraint(constraint) if err != nil { return false, fmt.Errorf("failed to parse constraint '%s'", constraint) } diff --git a/pkg/database/alerts.go b/pkg/database/alerts.go index c32381748..ad0117236 100644 --- a/pkg/database/alerts.go +++ b/pkg/database/alerts.go @@ -866,6 +866,8 @@ func AlertPredicatesFromFilter(filter map[string][]string) ([]predicate.Alert, e continue case "simulated": continue + case "with_decisions": + continue default: return nil, errors.Wrapf(InvalidFilter, "Filter parameter '%s' is unknown (=%s)", param, value[0]) } diff --git a/pkg/database/database.go b/pkg/database/database.go index 3c0828705..46de4e73a 100644 --- a/pkg/database/database.go +++ b/pkg/database/database.go @@ -8,15 +8,17 @@ import ( "time" entsql "entgo.io/ent/dialect/sql" - "github.com/crowdsecurity/crowdsec/pkg/csconfig" - "github.com/crowdsecurity/crowdsec/pkg/database/ent" - "github.com/crowdsecurity/crowdsec/pkg/types" "github.com/go-co-op/gocron" _ "github.com/go-sql-driver/mysql" _ "github.com/jackc/pgx/v4/stdlib" _ "github.com/mattn/go-sqlite3" - "github.com/pkg/errors" log "github.com/sirupsen/logrus" + + "github.com/crowdsecurity/go-cs-lib/pkg/ptr" + + "github.com/crowdsecurity/crowdsec/pkg/csconfig" + "github.com/crowdsecurity/crowdsec/pkg/database/ent" + "github.com/crowdsecurity/crowdsec/pkg/types" ) type Client struct { @@ -35,7 +37,7 @@ func getEntDriver(dbtype string, dbdialect string, dsn string, config *csconfig. } if config.MaxOpenConns == nil { log.Warningf("MaxOpenConns is 0, defaulting to %d", csconfig.DEFAULT_MAX_OPEN_CONNS) - config.MaxOpenConns = types.IntPtr(csconfig.DEFAULT_MAX_OPEN_CONNS) + config.MaxOpenConns = ptr.Of(csconfig.DEFAULT_MAX_OPEN_CONNS) } db.SetMaxOpenConns(*config.MaxOpenConns) drv := entsql.OpenDB(dbdialect, db) @@ -51,7 +53,7 @@ func NewClient(config *csconfig.DatabaseCfg) (*Client, error) { /*The logger that will be used by db operations*/ clog := log.New() if err := types.ConfigureLogger(clog); err != nil { - return nil, errors.Wrap(err, "while configuring db logger") + return nil, fmt.Errorf("while configuring db logger: %w", err) } if config.LogLevel != nil { clog.SetLevel(*config.LogLevel) @@ -68,10 +70,10 @@ func NewClient(config *csconfig.DatabaseCfg) (*Client, error) { if _, err := os.Stat(config.DbPath); os.IsNotExist(err) { f, err := os.OpenFile(config.DbPath, os.O_CREATE|os.O_RDWR, 0600) if err != nil { - return &Client{}, errors.Wrapf(err, "failed to create SQLite database file %q", config.DbPath) + return &Client{}, fmt.Errorf("failed to create SQLite database file %q: %w", config.DbPath, err) } if err := f.Close(); err != nil { - return &Client{}, errors.Wrapf(err, "failed to create SQLite database file %q", config.DbPath) + return &Client{}, fmt.Errorf("failed to create SQLite database file %q: %w", config.DbPath, err) } } //Always try to set permissions to simplify a bit the code for windows (as the permissions set by OpenFile will be garbage) @@ -111,7 +113,7 @@ func (c *Client) StartFlushScheduler(config *csconfig.FlushDBCfg) (*gocron.Sched scheduler := gocron.NewScheduler(time.UTC) job, err := scheduler.Every(1).Minute().Do(c.FlushAlerts, maxAge, maxItems) if err != nil { - return nil, errors.Wrap(err, "while starting FlushAlerts scheduler") + return nil, fmt.Errorf("while starting FlushAlerts scheduler: %w", err) } job.SingletonMode() // Init & Start cronjob every hour for bouncers/agents @@ -119,14 +121,14 @@ func (c *Client) StartFlushScheduler(config *csconfig.FlushDBCfg) (*gocron.Sched if config.AgentsGC.Cert != nil { duration, err := types.ParseDuration(*config.AgentsGC.Cert) if err != nil { - return nil, errors.Wrap(err, "while parsing agents cert auto-delete duration") + return nil, fmt.Errorf("while parsing agents cert auto-delete duration: %w", err) } config.AgentsGC.CertDuration = &duration } if config.AgentsGC.LoginPassword != nil { duration, err := types.ParseDuration(*config.AgentsGC.LoginPassword) if err != nil { - return nil, errors.Wrap(err, "while parsing agents login/password auto-delete duration") + return nil, fmt.Errorf("while parsing agents login/password auto-delete duration: %w", err) } config.AgentsGC.LoginPasswordDuration = &duration } @@ -138,14 +140,14 @@ func (c *Client) StartFlushScheduler(config *csconfig.FlushDBCfg) (*gocron.Sched if config.BouncersGC.Cert != nil { duration, err := types.ParseDuration(*config.BouncersGC.Cert) if err != nil { - return nil, errors.Wrap(err, "while parsing bouncers cert auto-delete duration") + return nil, fmt.Errorf("while parsing bouncers cert auto-delete duration: %w", err) } config.BouncersGC.CertDuration = &duration } if config.BouncersGC.Api != nil { duration, err := types.ParseDuration(*config.BouncersGC.Api) if err != nil { - return nil, errors.Wrap(err, "while parsing bouncers api auto-delete duration") + return nil, fmt.Errorf("while parsing bouncers api auto-delete duration: %w", err) } config.BouncersGC.ApiDuration = &duration } @@ -155,7 +157,7 @@ func (c *Client) StartFlushScheduler(config *csconfig.FlushDBCfg) (*gocron.Sched } baJob, err := scheduler.Every(1).Minute().Do(c.FlushAgentsAndBouncers, config.AgentsGC, config.BouncersGC) if err != nil { - return nil, errors.Wrap(err, "while starting FlushAgentsAndBouncers scheduler") + return nil, fmt.Errorf("while starting FlushAgentsAndBouncers scheduler: %w", err) } baJob.SingletonMode() scheduler.StartAsync() diff --git a/pkg/database/ent/alert_query.go b/pkg/database/ent/alert_query.go index a7a00cab0..68789196d 100644 --- a/pkg/database/ent/alert_query.go +++ b/pkg/database/ent/alert_query.go @@ -562,7 +562,6 @@ func (aq *AlertQuery) loadDecisions(ctx context.Context, query *DecisionQuery, n init(nodes[i]) } } - query.withFKs = true query.Where(predicate.Decision(func(s *sql.Selector) { s.Where(sql.InValues(alert.DecisionsColumn, fks...)) })) @@ -571,13 +570,10 @@ func (aq *AlertQuery) loadDecisions(ctx context.Context, query *DecisionQuery, n return err } for _, n := range neighbors { - fk := n.alert_decisions - if fk == nil { - return fmt.Errorf(`foreign-key "alert_decisions" is nil for node %v`, n.ID) - } - node, ok := nodeids[*fk] + fk := n.AlertDecisions + node, ok := nodeids[fk] if !ok { - return fmt.Errorf(`unexpected foreign-key "alert_decisions" returned %v for node %v`, *fk, n.ID) + return fmt.Errorf(`unexpected foreign-key "alert_decisions" returned %v for node %v`, fk, n.ID) } assign(node, n) } @@ -593,7 +589,6 @@ func (aq *AlertQuery) loadEvents(ctx context.Context, query *EventQuery, nodes [ init(nodes[i]) } } - query.withFKs = true query.Where(predicate.Event(func(s *sql.Selector) { s.Where(sql.InValues(alert.EventsColumn, fks...)) })) @@ -602,13 +597,10 @@ func (aq *AlertQuery) loadEvents(ctx context.Context, query *EventQuery, nodes [ return err } for _, n := range neighbors { - fk := n.alert_events - if fk == nil { - return fmt.Errorf(`foreign-key "alert_events" is nil for node %v`, n.ID) - } - node, ok := nodeids[*fk] + fk := n.AlertEvents + node, ok := nodeids[fk] if !ok { - return fmt.Errorf(`unexpected foreign-key "alert_events" returned %v for node %v`, *fk, n.ID) + return fmt.Errorf(`unexpected foreign-key "alert_events" returned %v for node %v`, fk, n.ID) } assign(node, n) } @@ -624,7 +616,6 @@ func (aq *AlertQuery) loadMetas(ctx context.Context, query *MetaQuery, nodes []* init(nodes[i]) } } - query.withFKs = true query.Where(predicate.Meta(func(s *sql.Selector) { s.Where(sql.InValues(alert.MetasColumn, fks...)) })) @@ -633,13 +624,10 @@ func (aq *AlertQuery) loadMetas(ctx context.Context, query *MetaQuery, nodes []* return err } for _, n := range neighbors { - fk := n.alert_metas - if fk == nil { - return fmt.Errorf(`foreign-key "alert_metas" is nil for node %v`, n.ID) - } - node, ok := nodeids[*fk] + fk := n.AlertMetas + node, ok := nodeids[fk] if !ok { - return fmt.Errorf(`unexpected foreign-key "alert_metas" returned %v for node %v`, *fk, n.ID) + return fmt.Errorf(`unexpected foreign-key "alert_metas" returned %v for node %v`, fk, n.ID) } assign(node, n) } diff --git a/pkg/database/ent/decision.go b/pkg/database/ent/decision.go index ef16b39ca..c969e5767 100644 --- a/pkg/database/ent/decision.go +++ b/pkg/database/ent/decision.go @@ -47,10 +47,11 @@ type Decision struct { Simulated bool `json:"simulated,omitempty"` // UUID holds the value of the "uuid" field. UUID string `json:"uuid,omitempty"` + // AlertDecisions holds the value of the "alert_decisions" field. + AlertDecisions int `json:"alert_decisions,omitempty"` // Edges holds the relations/edges for other nodes in the graph. // The values are being populated by the DecisionQuery when eager-loading is set. - Edges DecisionEdges `json:"edges"` - alert_decisions *int + Edges DecisionEdges `json:"edges"` } // DecisionEdges holds the relations/edges for other nodes in the graph. @@ -82,14 +83,12 @@ func (*Decision) scanValues(columns []string) ([]any, error) { switch columns[i] { case decision.FieldSimulated: values[i] = new(sql.NullBool) - case decision.FieldID, decision.FieldStartIP, decision.FieldEndIP, decision.FieldStartSuffix, decision.FieldEndSuffix, decision.FieldIPSize: + case decision.FieldID, decision.FieldStartIP, decision.FieldEndIP, decision.FieldStartSuffix, decision.FieldEndSuffix, decision.FieldIPSize, decision.FieldAlertDecisions: values[i] = new(sql.NullInt64) case decision.FieldScenario, decision.FieldType, decision.FieldScope, decision.FieldValue, decision.FieldOrigin, decision.FieldUUID: values[i] = new(sql.NullString) case decision.FieldCreatedAt, decision.FieldUpdatedAt, decision.FieldUntil: values[i] = new(sql.NullTime) - case decision.ForeignKeys[0]: // alert_decisions - values[i] = new(sql.NullInt64) default: return nil, fmt.Errorf("unexpected column %q for type Decision", columns[i]) } @@ -204,12 +203,11 @@ func (d *Decision) assignValues(columns []string, values []any) error { } else if value.Valid { d.UUID = value.String } - case decision.ForeignKeys[0]: + case decision.FieldAlertDecisions: if value, ok := values[i].(*sql.NullInt64); !ok { - return fmt.Errorf("unexpected type %T for edge-field alert_decisions", value) + return fmt.Errorf("unexpected type %T for field alert_decisions", values[i]) } else if value.Valid { - d.alert_decisions = new(int) - *d.alert_decisions = int(value.Int64) + d.AlertDecisions = int(value.Int64) } } } @@ -294,6 +292,9 @@ func (d *Decision) String() string { builder.WriteString(", ") builder.WriteString("uuid=") builder.WriteString(d.UUID) + builder.WriteString(", ") + builder.WriteString("alert_decisions=") + builder.WriteString(fmt.Sprintf("%v", d.AlertDecisions)) builder.WriteByte(')') return builder.String() } diff --git a/pkg/database/ent/decision/decision.go b/pkg/database/ent/decision/decision.go index 6fa929583..a0012d940 100644 --- a/pkg/database/ent/decision/decision.go +++ b/pkg/database/ent/decision/decision.go @@ -41,6 +41,8 @@ const ( FieldSimulated = "simulated" // FieldUUID holds the string denoting the uuid field in the database. FieldUUID = "uuid" + // FieldAlertDecisions holds the string denoting the alert_decisions field in the database. + FieldAlertDecisions = "alert_decisions" // EdgeOwner holds the string denoting the owner edge name in mutations. EdgeOwner = "owner" // Table holds the table name of the decision in the database. @@ -72,12 +74,7 @@ var Columns = []string{ FieldOrigin, FieldSimulated, FieldUUID, -} - -// ForeignKeys holds the SQL foreign-keys that are owned by the "decisions" -// table and are not defined as standalone fields in the schema. -var ForeignKeys = []string{ - "alert_decisions", + FieldAlertDecisions, } // ValidColumn reports if the column name is valid (part of the table columns). @@ -87,11 +84,6 @@ func ValidColumn(column string) bool { return true } } - for i := range ForeignKeys { - if column == ForeignKeys[i] { - return true - } - } return false } diff --git a/pkg/database/ent/decision/where.go b/pkg/database/ent/decision/where.go index 378f687b7..18716a4a7 100644 --- a/pkg/database/ent/decision/where.go +++ b/pkg/database/ent/decision/where.go @@ -186,6 +186,13 @@ func UUID(v string) predicate.Decision { }) } +// AlertDecisions applies equality check predicate on the "alert_decisions" field. It's identical to AlertDecisionsEQ. +func AlertDecisions(v int) predicate.Decision { + return predicate.Decision(func(s *sql.Selector) { + s.Where(sql.EQ(s.C(FieldAlertDecisions), v)) + }) +} + // CreatedAtEQ applies the EQ predicate on the "created_at" field. func CreatedAtEQ(v time.Time) predicate.Decision { return predicate.Decision(func(s *sql.Selector) { @@ -1432,6 +1439,56 @@ func UUIDContainsFold(v string) predicate.Decision { }) } +// AlertDecisionsEQ applies the EQ predicate on the "alert_decisions" field. +func AlertDecisionsEQ(v int) predicate.Decision { + return predicate.Decision(func(s *sql.Selector) { + s.Where(sql.EQ(s.C(FieldAlertDecisions), v)) + }) +} + +// AlertDecisionsNEQ applies the NEQ predicate on the "alert_decisions" field. +func AlertDecisionsNEQ(v int) predicate.Decision { + return predicate.Decision(func(s *sql.Selector) { + s.Where(sql.NEQ(s.C(FieldAlertDecisions), v)) + }) +} + +// AlertDecisionsIn applies the In predicate on the "alert_decisions" field. +func AlertDecisionsIn(vs ...int) predicate.Decision { + v := make([]any, len(vs)) + for i := range v { + v[i] = vs[i] + } + return predicate.Decision(func(s *sql.Selector) { + s.Where(sql.In(s.C(FieldAlertDecisions), v...)) + }) +} + +// AlertDecisionsNotIn applies the NotIn predicate on the "alert_decisions" field. +func AlertDecisionsNotIn(vs ...int) predicate.Decision { + v := make([]any, len(vs)) + for i := range v { + v[i] = vs[i] + } + return predicate.Decision(func(s *sql.Selector) { + s.Where(sql.NotIn(s.C(FieldAlertDecisions), v...)) + }) +} + +// AlertDecisionsIsNil applies the IsNil predicate on the "alert_decisions" field. +func AlertDecisionsIsNil() predicate.Decision { + return predicate.Decision(func(s *sql.Selector) { + s.Where(sql.IsNull(s.C(FieldAlertDecisions))) + }) +} + +// AlertDecisionsNotNil applies the NotNil predicate on the "alert_decisions" field. +func AlertDecisionsNotNil() predicate.Decision { + return predicate.Decision(func(s *sql.Selector) { + s.Where(sql.NotNull(s.C(FieldAlertDecisions))) + }) +} + // HasOwner applies the HasEdge predicate on the "owner" edge. func HasOwner() predicate.Decision { return predicate.Decision(func(s *sql.Selector) { diff --git a/pkg/database/ent/decision_create.go b/pkg/database/ent/decision_create.go index e3c7c632d..64238cb70 100644 --- a/pkg/database/ent/decision_create.go +++ b/pkg/database/ent/decision_create.go @@ -191,6 +191,20 @@ func (dc *DecisionCreate) SetNillableUUID(s *string) *DecisionCreate { return dc } +// SetAlertDecisions sets the "alert_decisions" field. +func (dc *DecisionCreate) SetAlertDecisions(i int) *DecisionCreate { + dc.mutation.SetAlertDecisions(i) + return dc +} + +// SetNillableAlertDecisions sets the "alert_decisions" field if the given value is not nil. +func (dc *DecisionCreate) SetNillableAlertDecisions(i *int) *DecisionCreate { + if i != nil { + dc.SetAlertDecisions(*i) + } + return dc +} + // SetOwnerID sets the "owner" edge to the Alert entity by ID. func (dc *DecisionCreate) SetOwnerID(id int) *DecisionCreate { dc.mutation.SetOwnerID(id) @@ -485,7 +499,7 @@ func (dc *DecisionCreate) createSpec() (*Decision, *sqlgraph.CreateSpec) { for _, k := range nodes { edge.Target.Nodes = append(edge.Target.Nodes, k) } - _node.alert_decisions = &nodes[0] + _node.AlertDecisions = nodes[0] _spec.Edges = append(_spec.Edges, edge) } return _node, _spec diff --git a/pkg/database/ent/decision_query.go b/pkg/database/ent/decision_query.go index 414827258..91aebded9 100644 --- a/pkg/database/ent/decision_query.go +++ b/pkg/database/ent/decision_query.go @@ -25,7 +25,6 @@ type DecisionQuery struct { fields []string predicates []predicate.Decision withOwner *AlertQuery - withFKs bool // intermediate query (i.e. traversal path). sql *sql.Selector path func(context.Context) (*sql.Selector, error) @@ -351,18 +350,11 @@ func (dq *DecisionQuery) prepareQuery(ctx context.Context) error { func (dq *DecisionQuery) sqlAll(ctx context.Context, hooks ...queryHook) ([]*Decision, error) { var ( nodes = []*Decision{} - withFKs = dq.withFKs _spec = dq.querySpec() loadedTypes = [1]bool{ dq.withOwner != nil, } ) - if dq.withOwner != nil { - withFKs = true - } - if withFKs { - _spec.Node.Columns = append(_spec.Node.Columns, decision.ForeignKeys...) - } _spec.ScanValues = func(columns []string) ([]any, error) { return (*Decision).scanValues(nil, columns) } @@ -394,10 +386,7 @@ func (dq *DecisionQuery) loadOwner(ctx context.Context, query *AlertQuery, nodes ids := make([]int, 0, len(nodes)) nodeids := make(map[int][]*Decision) for i := range nodes { - if nodes[i].alert_decisions == nil { - continue - } - fk := *nodes[i].alert_decisions + fk := nodes[i].AlertDecisions if _, ok := nodeids[fk]; !ok { ids = append(ids, fk) } diff --git a/pkg/database/ent/decision_update.go b/pkg/database/ent/decision_update.go index 198697280..64b40871e 100644 --- a/pkg/database/ent/decision_update.go +++ b/pkg/database/ent/decision_update.go @@ -272,6 +272,26 @@ func (du *DecisionUpdate) ClearUUID() *DecisionUpdate { return du } +// SetAlertDecisions sets the "alert_decisions" field. +func (du *DecisionUpdate) SetAlertDecisions(i int) *DecisionUpdate { + du.mutation.SetAlertDecisions(i) + return du +} + +// SetNillableAlertDecisions sets the "alert_decisions" field if the given value is not nil. +func (du *DecisionUpdate) SetNillableAlertDecisions(i *int) *DecisionUpdate { + if i != nil { + du.SetAlertDecisions(*i) + } + return du +} + +// ClearAlertDecisions clears the value of the "alert_decisions" field. +func (du *DecisionUpdate) ClearAlertDecisions() *DecisionUpdate { + du.mutation.ClearAlertDecisions() + return du +} + // SetOwnerID sets the "owner" edge to the Alert entity by ID. func (du *DecisionUpdate) SetOwnerID(id int) *DecisionUpdate { du.mutation.SetOwnerID(id) @@ -878,6 +898,26 @@ func (duo *DecisionUpdateOne) ClearUUID() *DecisionUpdateOne { return duo } +// SetAlertDecisions sets the "alert_decisions" field. +func (duo *DecisionUpdateOne) SetAlertDecisions(i int) *DecisionUpdateOne { + duo.mutation.SetAlertDecisions(i) + return duo +} + +// SetNillableAlertDecisions sets the "alert_decisions" field if the given value is not nil. +func (duo *DecisionUpdateOne) SetNillableAlertDecisions(i *int) *DecisionUpdateOne { + if i != nil { + duo.SetAlertDecisions(*i) + } + return duo +} + +// ClearAlertDecisions clears the value of the "alert_decisions" field. +func (duo *DecisionUpdateOne) ClearAlertDecisions() *DecisionUpdateOne { + duo.mutation.ClearAlertDecisions() + return duo +} + // SetOwnerID sets the "owner" edge to the Alert entity by ID. func (duo *DecisionUpdateOne) SetOwnerID(id int) *DecisionUpdateOne { duo.mutation.SetOwnerID(id) diff --git a/pkg/database/ent/event.go b/pkg/database/ent/event.go index 276d0918e..4754107fd 100644 --- a/pkg/database/ent/event.go +++ b/pkg/database/ent/event.go @@ -25,10 +25,11 @@ type Event struct { Time time.Time `json:"time,omitempty"` // Serialized holds the value of the "serialized" field. Serialized string `json:"serialized,omitempty"` + // AlertEvents holds the value of the "alert_events" field. + AlertEvents int `json:"alert_events,omitempty"` // Edges holds the relations/edges for other nodes in the graph. // The values are being populated by the EventQuery when eager-loading is set. - Edges EventEdges `json:"edges"` - alert_events *int + Edges EventEdges `json:"edges"` } // EventEdges holds the relations/edges for other nodes in the graph. @@ -58,14 +59,12 @@ func (*Event) scanValues(columns []string) ([]any, error) { values := make([]any, len(columns)) for i := range columns { switch columns[i] { - case event.FieldID: + case event.FieldID, event.FieldAlertEvents: values[i] = new(sql.NullInt64) case event.FieldSerialized: values[i] = new(sql.NullString) case event.FieldCreatedAt, event.FieldUpdatedAt, event.FieldTime: values[i] = new(sql.NullTime) - case event.ForeignKeys[0]: // alert_events - values[i] = new(sql.NullInt64) default: return nil, fmt.Errorf("unexpected column %q for type Event", columns[i]) } @@ -113,12 +112,11 @@ func (e *Event) assignValues(columns []string, values []any) error { } else if value.Valid { e.Serialized = value.String } - case event.ForeignKeys[0]: + case event.FieldAlertEvents: if value, ok := values[i].(*sql.NullInt64); !ok { - return fmt.Errorf("unexpected type %T for edge-field alert_events", value) + return fmt.Errorf("unexpected type %T for field alert_events", values[i]) } else if value.Valid { - e.alert_events = new(int) - *e.alert_events = int(value.Int64) + e.AlertEvents = int(value.Int64) } } } @@ -168,6 +166,9 @@ func (e *Event) String() string { builder.WriteString(", ") builder.WriteString("serialized=") builder.WriteString(e.Serialized) + builder.WriteString(", ") + builder.WriteString("alert_events=") + builder.WriteString(fmt.Sprintf("%v", e.AlertEvents)) builder.WriteByte(')') return builder.String() } diff --git a/pkg/database/ent/event/event.go b/pkg/database/ent/event/event.go index 07e58f16d..33b9b67f8 100644 --- a/pkg/database/ent/event/event.go +++ b/pkg/database/ent/event/event.go @@ -19,6 +19,8 @@ const ( FieldTime = "time" // FieldSerialized holds the string denoting the serialized field in the database. FieldSerialized = "serialized" + // FieldAlertEvents holds the string denoting the alert_events field in the database. + FieldAlertEvents = "alert_events" // EdgeOwner holds the string denoting the owner edge name in mutations. EdgeOwner = "owner" // Table holds the table name of the event in the database. @@ -39,12 +41,7 @@ var Columns = []string{ FieldUpdatedAt, FieldTime, FieldSerialized, -} - -// ForeignKeys holds the SQL foreign-keys that are owned by the "events" -// table and are not defined as standalone fields in the schema. -var ForeignKeys = []string{ - "alert_events", + FieldAlertEvents, } // ValidColumn reports if the column name is valid (part of the table columns). @@ -54,11 +51,6 @@ func ValidColumn(column string) bool { return true } } - for i := range ForeignKeys { - if column == ForeignKeys[i] { - return true - } - } return false } diff --git a/pkg/database/ent/event/where.go b/pkg/database/ent/event/where.go index 322b6f49f..7554e59e6 100644 --- a/pkg/database/ent/event/where.go +++ b/pkg/database/ent/event/where.go @@ -109,6 +109,13 @@ func Serialized(v string) predicate.Event { }) } +// AlertEvents applies equality check predicate on the "alert_events" field. It's identical to AlertEventsEQ. +func AlertEvents(v int) predicate.Event { + return predicate.Event(func(s *sql.Selector) { + s.Where(sql.EQ(s.C(FieldAlertEvents), v)) + }) +} + // CreatedAtEQ applies the EQ predicate on the "created_at" field. func CreatedAtEQ(v time.Time) predicate.Event { return predicate.Event(func(s *sql.Selector) { @@ -428,6 +435,56 @@ func SerializedContainsFold(v string) predicate.Event { }) } +// AlertEventsEQ applies the EQ predicate on the "alert_events" field. +func AlertEventsEQ(v int) predicate.Event { + return predicate.Event(func(s *sql.Selector) { + s.Where(sql.EQ(s.C(FieldAlertEvents), v)) + }) +} + +// AlertEventsNEQ applies the NEQ predicate on the "alert_events" field. +func AlertEventsNEQ(v int) predicate.Event { + return predicate.Event(func(s *sql.Selector) { + s.Where(sql.NEQ(s.C(FieldAlertEvents), v)) + }) +} + +// AlertEventsIn applies the In predicate on the "alert_events" field. +func AlertEventsIn(vs ...int) predicate.Event { + v := make([]any, len(vs)) + for i := range v { + v[i] = vs[i] + } + return predicate.Event(func(s *sql.Selector) { + s.Where(sql.In(s.C(FieldAlertEvents), v...)) + }) +} + +// AlertEventsNotIn applies the NotIn predicate on the "alert_events" field. +func AlertEventsNotIn(vs ...int) predicate.Event { + v := make([]any, len(vs)) + for i := range v { + v[i] = vs[i] + } + return predicate.Event(func(s *sql.Selector) { + s.Where(sql.NotIn(s.C(FieldAlertEvents), v...)) + }) +} + +// AlertEventsIsNil applies the IsNil predicate on the "alert_events" field. +func AlertEventsIsNil() predicate.Event { + return predicate.Event(func(s *sql.Selector) { + s.Where(sql.IsNull(s.C(FieldAlertEvents))) + }) +} + +// AlertEventsNotNil applies the NotNil predicate on the "alert_events" field. +func AlertEventsNotNil() predicate.Event { + return predicate.Event(func(s *sql.Selector) { + s.Where(sql.NotNull(s.C(FieldAlertEvents))) + }) +} + // HasOwner applies the HasEdge predicate on the "owner" edge. func HasOwner() predicate.Event { return predicate.Event(func(s *sql.Selector) { diff --git a/pkg/database/ent/event_create.go b/pkg/database/ent/event_create.go index 3d844892e..c58613051 100644 --- a/pkg/database/ent/event_create.go +++ b/pkg/database/ent/event_create.go @@ -61,6 +61,20 @@ func (ec *EventCreate) SetSerialized(s string) *EventCreate { return ec } +// SetAlertEvents sets the "alert_events" field. +func (ec *EventCreate) SetAlertEvents(i int) *EventCreate { + ec.mutation.SetAlertEvents(i) + return ec +} + +// SetNillableAlertEvents sets the "alert_events" field if the given value is not nil. +func (ec *EventCreate) SetNillableAlertEvents(i *int) *EventCreate { + if i != nil { + ec.SetAlertEvents(*i) + } + return ec +} + // SetOwnerID sets the "owner" edge to the Alert entity by ID. func (ec *EventCreate) SetOwnerID(id int) *EventCreate { ec.mutation.SetOwnerID(id) @@ -256,7 +270,7 @@ func (ec *EventCreate) createSpec() (*Event, *sqlgraph.CreateSpec) { for _, k := range nodes { edge.Target.Nodes = append(edge.Target.Nodes, k) } - _node.alert_events = &nodes[0] + _node.AlertEvents = nodes[0] _spec.Edges = append(_spec.Edges, edge) } return _node, _spec diff --git a/pkg/database/ent/event_query.go b/pkg/database/ent/event_query.go index 49794e980..045d750f8 100644 --- a/pkg/database/ent/event_query.go +++ b/pkg/database/ent/event_query.go @@ -25,7 +25,6 @@ type EventQuery struct { fields []string predicates []predicate.Event withOwner *AlertQuery - withFKs bool // intermediate query (i.e. traversal path). sql *sql.Selector path func(context.Context) (*sql.Selector, error) @@ -351,18 +350,11 @@ func (eq *EventQuery) prepareQuery(ctx context.Context) error { func (eq *EventQuery) sqlAll(ctx context.Context, hooks ...queryHook) ([]*Event, error) { var ( nodes = []*Event{} - withFKs = eq.withFKs _spec = eq.querySpec() loadedTypes = [1]bool{ eq.withOwner != nil, } ) - if eq.withOwner != nil { - withFKs = true - } - if withFKs { - _spec.Node.Columns = append(_spec.Node.Columns, event.ForeignKeys...) - } _spec.ScanValues = func(columns []string) ([]any, error) { return (*Event).scanValues(nil, columns) } @@ -394,10 +386,7 @@ func (eq *EventQuery) loadOwner(ctx context.Context, query *AlertQuery, nodes [] ids := make([]int, 0, len(nodes)) nodeids := make(map[int][]*Event) for i := range nodes { - if nodes[i].alert_events == nil { - continue - } - fk := *nodes[i].alert_events + fk := nodes[i].AlertEvents if _, ok := nodeids[fk]; !ok { ids = append(ids, fk) } diff --git a/pkg/database/ent/event_update.go b/pkg/database/ent/event_update.go index bb210745c..fcd0cc50c 100644 --- a/pkg/database/ent/event_update.go +++ b/pkg/database/ent/event_update.go @@ -65,6 +65,26 @@ func (eu *EventUpdate) SetSerialized(s string) *EventUpdate { return eu } +// SetAlertEvents sets the "alert_events" field. +func (eu *EventUpdate) SetAlertEvents(i int) *EventUpdate { + eu.mutation.SetAlertEvents(i) + return eu +} + +// SetNillableAlertEvents sets the "alert_events" field if the given value is not nil. +func (eu *EventUpdate) SetNillableAlertEvents(i *int) *EventUpdate { + if i != nil { + eu.SetAlertEvents(*i) + } + return eu +} + +// ClearAlertEvents clears the value of the "alert_events" field. +func (eu *EventUpdate) ClearAlertEvents() *EventUpdate { + eu.mutation.ClearAlertEvents() + return eu +} + // SetOwnerID sets the "owner" edge to the Alert entity by ID. func (eu *EventUpdate) SetOwnerID(id int) *EventUpdate { eu.mutation.SetOwnerID(id) @@ -326,6 +346,26 @@ func (euo *EventUpdateOne) SetSerialized(s string) *EventUpdateOne { return euo } +// SetAlertEvents sets the "alert_events" field. +func (euo *EventUpdateOne) SetAlertEvents(i int) *EventUpdateOne { + euo.mutation.SetAlertEvents(i) + return euo +} + +// SetNillableAlertEvents sets the "alert_events" field if the given value is not nil. +func (euo *EventUpdateOne) SetNillableAlertEvents(i *int) *EventUpdateOne { + if i != nil { + euo.SetAlertEvents(*i) + } + return euo +} + +// ClearAlertEvents clears the value of the "alert_events" field. +func (euo *EventUpdateOne) ClearAlertEvents() *EventUpdateOne { + euo.mutation.ClearAlertEvents() + return euo +} + // SetOwnerID sets the "owner" edge to the Alert entity by ID. func (euo *EventUpdateOne) SetOwnerID(id int) *EventUpdateOne { euo.mutation.SetOwnerID(id) diff --git a/pkg/database/ent/meta.go b/pkg/database/ent/meta.go index ee2082dab..660f1a4db 100644 --- a/pkg/database/ent/meta.go +++ b/pkg/database/ent/meta.go @@ -25,10 +25,11 @@ type Meta struct { Key string `json:"key,omitempty"` // Value holds the value of the "value" field. Value string `json:"value,omitempty"` + // AlertMetas holds the value of the "alert_metas" field. + AlertMetas int `json:"alert_metas,omitempty"` // Edges holds the relations/edges for other nodes in the graph. // The values are being populated by the MetaQuery when eager-loading is set. - Edges MetaEdges `json:"edges"` - alert_metas *int + Edges MetaEdges `json:"edges"` } // MetaEdges holds the relations/edges for other nodes in the graph. @@ -58,14 +59,12 @@ func (*Meta) scanValues(columns []string) ([]any, error) { values := make([]any, len(columns)) for i := range columns { switch columns[i] { - case meta.FieldID: + case meta.FieldID, meta.FieldAlertMetas: values[i] = new(sql.NullInt64) case meta.FieldKey, meta.FieldValue: values[i] = new(sql.NullString) case meta.FieldCreatedAt, meta.FieldUpdatedAt: values[i] = new(sql.NullTime) - case meta.ForeignKeys[0]: // alert_metas - values[i] = new(sql.NullInt64) default: return nil, fmt.Errorf("unexpected column %q for type Meta", columns[i]) } @@ -113,12 +112,11 @@ func (m *Meta) assignValues(columns []string, values []any) error { } else if value.Valid { m.Value = value.String } - case meta.ForeignKeys[0]: + case meta.FieldAlertMetas: if value, ok := values[i].(*sql.NullInt64); !ok { - return fmt.Errorf("unexpected type %T for edge-field alert_metas", value) + return fmt.Errorf("unexpected type %T for field alert_metas", values[i]) } else if value.Valid { - m.alert_metas = new(int) - *m.alert_metas = int(value.Int64) + m.AlertMetas = int(value.Int64) } } } @@ -168,6 +166,9 @@ func (m *Meta) String() string { builder.WriteString(", ") builder.WriteString("value=") builder.WriteString(m.Value) + builder.WriteString(", ") + builder.WriteString("alert_metas=") + builder.WriteString(fmt.Sprintf("%v", m.AlertMetas)) builder.WriteByte(')') return builder.String() } diff --git a/pkg/database/ent/meta/meta.go b/pkg/database/ent/meta/meta.go index 5fea86c81..6d10f2589 100644 --- a/pkg/database/ent/meta/meta.go +++ b/pkg/database/ent/meta/meta.go @@ -19,6 +19,8 @@ const ( FieldKey = "key" // FieldValue holds the string denoting the value field in the database. FieldValue = "value" + // FieldAlertMetas holds the string denoting the alert_metas field in the database. + FieldAlertMetas = "alert_metas" // EdgeOwner holds the string denoting the owner edge name in mutations. EdgeOwner = "owner" // Table holds the table name of the meta in the database. @@ -39,12 +41,7 @@ var Columns = []string{ FieldUpdatedAt, FieldKey, FieldValue, -} - -// ForeignKeys holds the SQL foreign-keys that are owned by the "meta" -// table and are not defined as standalone fields in the schema. -var ForeignKeys = []string{ - "alert_metas", + FieldAlertMetas, } // ValidColumn reports if the column name is valid (part of the table columns). @@ -54,11 +51,6 @@ func ValidColumn(column string) bool { return true } } - for i := range ForeignKeys { - if column == ForeignKeys[i] { - return true - } - } return false } diff --git a/pkg/database/ent/meta/where.go b/pkg/database/ent/meta/where.go index 9938e9a6d..479792fd4 100644 --- a/pkg/database/ent/meta/where.go +++ b/pkg/database/ent/meta/where.go @@ -109,6 +109,13 @@ func Value(v string) predicate.Meta { }) } +// AlertMetas applies equality check predicate on the "alert_metas" field. It's identical to AlertMetasEQ. +func AlertMetas(v int) predicate.Meta { + return predicate.Meta(func(s *sql.Selector) { + s.Where(sql.EQ(s.C(FieldAlertMetas), v)) + }) +} + // CreatedAtEQ applies the EQ predicate on the "created_at" field. func CreatedAtEQ(v time.Time) predicate.Meta { return predicate.Meta(func(s *sql.Selector) { @@ -463,6 +470,56 @@ func ValueContainsFold(v string) predicate.Meta { }) } +// AlertMetasEQ applies the EQ predicate on the "alert_metas" field. +func AlertMetasEQ(v int) predicate.Meta { + return predicate.Meta(func(s *sql.Selector) { + s.Where(sql.EQ(s.C(FieldAlertMetas), v)) + }) +} + +// AlertMetasNEQ applies the NEQ predicate on the "alert_metas" field. +func AlertMetasNEQ(v int) predicate.Meta { + return predicate.Meta(func(s *sql.Selector) { + s.Where(sql.NEQ(s.C(FieldAlertMetas), v)) + }) +} + +// AlertMetasIn applies the In predicate on the "alert_metas" field. +func AlertMetasIn(vs ...int) predicate.Meta { + v := make([]any, len(vs)) + for i := range v { + v[i] = vs[i] + } + return predicate.Meta(func(s *sql.Selector) { + s.Where(sql.In(s.C(FieldAlertMetas), v...)) + }) +} + +// AlertMetasNotIn applies the NotIn predicate on the "alert_metas" field. +func AlertMetasNotIn(vs ...int) predicate.Meta { + v := make([]any, len(vs)) + for i := range v { + v[i] = vs[i] + } + return predicate.Meta(func(s *sql.Selector) { + s.Where(sql.NotIn(s.C(FieldAlertMetas), v...)) + }) +} + +// AlertMetasIsNil applies the IsNil predicate on the "alert_metas" field. +func AlertMetasIsNil() predicate.Meta { + return predicate.Meta(func(s *sql.Selector) { + s.Where(sql.IsNull(s.C(FieldAlertMetas))) + }) +} + +// AlertMetasNotNil applies the NotNil predicate on the "alert_metas" field. +func AlertMetasNotNil() predicate.Meta { + return predicate.Meta(func(s *sql.Selector) { + s.Where(sql.NotNull(s.C(FieldAlertMetas))) + }) +} + // HasOwner applies the HasEdge predicate on the "owner" edge. func HasOwner() predicate.Meta { return predicate.Meta(func(s *sql.Selector) { diff --git a/pkg/database/ent/meta_create.go b/pkg/database/ent/meta_create.go index cb7c6fcba..df4f63159 100644 --- a/pkg/database/ent/meta_create.go +++ b/pkg/database/ent/meta_create.go @@ -61,6 +61,20 @@ func (mc *MetaCreate) SetValue(s string) *MetaCreate { return mc } +// SetAlertMetas sets the "alert_metas" field. +func (mc *MetaCreate) SetAlertMetas(i int) *MetaCreate { + mc.mutation.SetAlertMetas(i) + return mc +} + +// SetNillableAlertMetas sets the "alert_metas" field if the given value is not nil. +func (mc *MetaCreate) SetNillableAlertMetas(i *int) *MetaCreate { + if i != nil { + mc.SetAlertMetas(*i) + } + return mc +} + // SetOwnerID sets the "owner" edge to the Alert entity by ID. func (mc *MetaCreate) SetOwnerID(id int) *MetaCreate { mc.mutation.SetOwnerID(id) @@ -256,7 +270,7 @@ func (mc *MetaCreate) createSpec() (*Meta, *sqlgraph.CreateSpec) { for _, k := range nodes { edge.Target.Nodes = append(edge.Target.Nodes, k) } - _node.alert_metas = &nodes[0] + _node.AlertMetas = nodes[0] _spec.Edges = append(_spec.Edges, edge) } return _node, _spec diff --git a/pkg/database/ent/meta_query.go b/pkg/database/ent/meta_query.go index 96c7721e7..d6fd4f3d5 100644 --- a/pkg/database/ent/meta_query.go +++ b/pkg/database/ent/meta_query.go @@ -25,7 +25,6 @@ type MetaQuery struct { fields []string predicates []predicate.Meta withOwner *AlertQuery - withFKs bool // intermediate query (i.e. traversal path). sql *sql.Selector path func(context.Context) (*sql.Selector, error) @@ -351,18 +350,11 @@ func (mq *MetaQuery) prepareQuery(ctx context.Context) error { func (mq *MetaQuery) sqlAll(ctx context.Context, hooks ...queryHook) ([]*Meta, error) { var ( nodes = []*Meta{} - withFKs = mq.withFKs _spec = mq.querySpec() loadedTypes = [1]bool{ mq.withOwner != nil, } ) - if mq.withOwner != nil { - withFKs = true - } - if withFKs { - _spec.Node.Columns = append(_spec.Node.Columns, meta.ForeignKeys...) - } _spec.ScanValues = func(columns []string) ([]any, error) { return (*Meta).scanValues(nil, columns) } @@ -394,10 +386,7 @@ func (mq *MetaQuery) loadOwner(ctx context.Context, query *AlertQuery, nodes []* ids := make([]int, 0, len(nodes)) nodeids := make(map[int][]*Meta) for i := range nodes { - if nodes[i].alert_metas == nil { - continue - } - fk := *nodes[i].alert_metas + fk := nodes[i].AlertMetas if _, ok := nodeids[fk]; !ok { ids = append(ids, fk) } diff --git a/pkg/database/ent/meta_update.go b/pkg/database/ent/meta_update.go index 1cbdc2c4a..67a198ddd 100644 --- a/pkg/database/ent/meta_update.go +++ b/pkg/database/ent/meta_update.go @@ -65,6 +65,26 @@ func (mu *MetaUpdate) SetValue(s string) *MetaUpdate { return mu } +// SetAlertMetas sets the "alert_metas" field. +func (mu *MetaUpdate) SetAlertMetas(i int) *MetaUpdate { + mu.mutation.SetAlertMetas(i) + return mu +} + +// SetNillableAlertMetas sets the "alert_metas" field if the given value is not nil. +func (mu *MetaUpdate) SetNillableAlertMetas(i *int) *MetaUpdate { + if i != nil { + mu.SetAlertMetas(*i) + } + return mu +} + +// ClearAlertMetas clears the value of the "alert_metas" field. +func (mu *MetaUpdate) ClearAlertMetas() *MetaUpdate { + mu.mutation.ClearAlertMetas() + return mu +} + // SetOwnerID sets the "owner" edge to the Alert entity by ID. func (mu *MetaUpdate) SetOwnerID(id int) *MetaUpdate { mu.mutation.SetOwnerID(id) @@ -326,6 +346,26 @@ func (muo *MetaUpdateOne) SetValue(s string) *MetaUpdateOne { return muo } +// SetAlertMetas sets the "alert_metas" field. +func (muo *MetaUpdateOne) SetAlertMetas(i int) *MetaUpdateOne { + muo.mutation.SetAlertMetas(i) + return muo +} + +// SetNillableAlertMetas sets the "alert_metas" field if the given value is not nil. +func (muo *MetaUpdateOne) SetNillableAlertMetas(i *int) *MetaUpdateOne { + if i != nil { + muo.SetAlertMetas(*i) + } + return muo +} + +// ClearAlertMetas clears the value of the "alert_metas" field. +func (muo *MetaUpdateOne) ClearAlertMetas() *MetaUpdateOne { + muo.mutation.ClearAlertMetas() + return muo +} + // SetOwnerID sets the "owner" edge to the Alert entity by ID. func (muo *MetaUpdateOne) SetOwnerID(id int) *MetaUpdateOne { muo.mutation.SetOwnerID(id) diff --git a/pkg/database/ent/migrate/schema.go b/pkg/database/ent/migrate/schema.go index 89a660a10..375fd4e78 100644 --- a/pkg/database/ent/migrate/schema.go +++ b/pkg/database/ent/migrate/schema.go @@ -141,6 +141,11 @@ var ( Unique: false, Columns: []*schema.Column{DecisionsColumns[3]}, }, + { + Name: "decision_alert_decisions", + Unique: false, + Columns: []*schema.Column{DecisionsColumns[16]}, + }, }, } // EventsColumns holds the columns for the "events" table. @@ -165,6 +170,13 @@ var ( OnDelete: schema.Cascade, }, }, + Indexes: []*schema.Index{ + { + Name: "event_alert_events", + Unique: false, + Columns: []*schema.Column{EventsColumns[5]}, + }, + }, } // MachinesColumns holds the columns for the "machines" table. MachinesColumns = []*schema.Column{ @@ -210,6 +222,13 @@ var ( OnDelete: schema.Cascade, }, }, + Indexes: []*schema.Index{ + { + Name: "meta_alert_metas", + Unique: false, + Columns: []*schema.Column{MetaColumns[5]}, + }, + }, } // Tables holds all the tables in the schema. Tables = []*schema.Table{ diff --git a/pkg/database/ent/mutation.go b/pkg/database/ent/mutation.go index 911d44295..907c1ef01 100644 --- a/pkg/database/ent/mutation.go +++ b/pkg/database/ent/mutation.go @@ -4773,6 +4773,55 @@ func (m *DecisionMutation) ResetUUID() { delete(m.clearedFields, decision.FieldUUID) } +// SetAlertDecisions sets the "alert_decisions" field. +func (m *DecisionMutation) SetAlertDecisions(i int) { + m.owner = &i +} + +// AlertDecisions returns the value of the "alert_decisions" field in the mutation. +func (m *DecisionMutation) AlertDecisions() (r int, exists bool) { + v := m.owner + if v == nil { + return + } + return *v, true +} + +// OldAlertDecisions returns the old "alert_decisions" field's value of the Decision entity. +// If the Decision object wasn't provided to the builder, the object is fetched from the database. +// An error is returned if the mutation operation is not UpdateOne, or the database query fails. +func (m *DecisionMutation) OldAlertDecisions(ctx context.Context) (v int, err error) { + if !m.op.Is(OpUpdateOne) { + return v, errors.New("OldAlertDecisions is only allowed on UpdateOne operations") + } + if m.id == nil || m.oldValue == nil { + return v, errors.New("OldAlertDecisions requires an ID field in the mutation") + } + oldValue, err := m.oldValue(ctx) + if err != nil { + return v, fmt.Errorf("querying old value for OldAlertDecisions: %w", err) + } + return oldValue.AlertDecisions, nil +} + +// ClearAlertDecisions clears the value of the "alert_decisions" field. +func (m *DecisionMutation) ClearAlertDecisions() { + m.owner = nil + m.clearedFields[decision.FieldAlertDecisions] = struct{}{} +} + +// AlertDecisionsCleared returns if the "alert_decisions" field was cleared in this mutation. +func (m *DecisionMutation) AlertDecisionsCleared() bool { + _, ok := m.clearedFields[decision.FieldAlertDecisions] + return ok +} + +// ResetAlertDecisions resets all changes to the "alert_decisions" field. +func (m *DecisionMutation) ResetAlertDecisions() { + m.owner = nil + delete(m.clearedFields, decision.FieldAlertDecisions) +} + // SetOwnerID sets the "owner" edge to the Alert entity by id. func (m *DecisionMutation) SetOwnerID(id int) { m.owner = &id @@ -4785,7 +4834,7 @@ func (m *DecisionMutation) ClearOwner() { // OwnerCleared reports if the "owner" edge to the Alert entity was cleared. func (m *DecisionMutation) OwnerCleared() bool { - return m.clearedowner + return m.AlertDecisionsCleared() || m.clearedowner } // OwnerID returns the "owner" edge ID in the mutation. @@ -4831,7 +4880,7 @@ func (m *DecisionMutation) Type() string { // order to get all numeric fields that were incremented/decremented, call // AddedFields(). func (m *DecisionMutation) Fields() []string { - fields := make([]string, 0, 15) + fields := make([]string, 0, 16) if m.created_at != nil { fields = append(fields, decision.FieldCreatedAt) } @@ -4877,6 +4926,9 @@ func (m *DecisionMutation) Fields() []string { if m.uuid != nil { fields = append(fields, decision.FieldUUID) } + if m.owner != nil { + fields = append(fields, decision.FieldAlertDecisions) + } return fields } @@ -4915,6 +4967,8 @@ func (m *DecisionMutation) Field(name string) (ent.Value, bool) { return m.Simulated() case decision.FieldUUID: return m.UUID() + case decision.FieldAlertDecisions: + return m.AlertDecisions() } return nil, false } @@ -4954,6 +5008,8 @@ func (m *DecisionMutation) OldField(ctx context.Context, name string) (ent.Value return m.OldSimulated(ctx) case decision.FieldUUID: return m.OldUUID(ctx) + case decision.FieldAlertDecisions: + return m.OldAlertDecisions(ctx) } return nil, fmt.Errorf("unknown Decision field %s", name) } @@ -5068,6 +5124,13 @@ func (m *DecisionMutation) SetField(name string, value ent.Value) error { } m.SetUUID(v) return nil + case decision.FieldAlertDecisions: + v, ok := value.(int) + if !ok { + return fmt.Errorf("unexpected type %T for field %s", value, name) + } + m.SetAlertDecisions(v) + return nil } return fmt.Errorf("unknown Decision field %s", name) } @@ -5188,6 +5251,9 @@ func (m *DecisionMutation) ClearedFields() []string { if m.FieldCleared(decision.FieldUUID) { fields = append(fields, decision.FieldUUID) } + if m.FieldCleared(decision.FieldAlertDecisions) { + fields = append(fields, decision.FieldAlertDecisions) + } return fields } @@ -5229,6 +5295,9 @@ func (m *DecisionMutation) ClearField(name string) error { case decision.FieldUUID: m.ClearUUID() return nil + case decision.FieldAlertDecisions: + m.ClearAlertDecisions() + return nil } return fmt.Errorf("unknown Decision nullable field %s", name) } @@ -5282,6 +5351,9 @@ func (m *DecisionMutation) ResetField(name string) error { case decision.FieldUUID: m.ResetUUID() return nil + case decision.FieldAlertDecisions: + m.ResetAlertDecisions() + return nil } return fmt.Errorf("unknown Decision field %s", name) } @@ -5646,6 +5718,55 @@ func (m *EventMutation) ResetSerialized() { m.serialized = nil } +// SetAlertEvents sets the "alert_events" field. +func (m *EventMutation) SetAlertEvents(i int) { + m.owner = &i +} + +// AlertEvents returns the value of the "alert_events" field in the mutation. +func (m *EventMutation) AlertEvents() (r int, exists bool) { + v := m.owner + if v == nil { + return + } + return *v, true +} + +// OldAlertEvents returns the old "alert_events" field's value of the Event entity. +// If the Event object wasn't provided to the builder, the object is fetched from the database. +// An error is returned if the mutation operation is not UpdateOne, or the database query fails. +func (m *EventMutation) OldAlertEvents(ctx context.Context) (v int, err error) { + if !m.op.Is(OpUpdateOne) { + return v, errors.New("OldAlertEvents is only allowed on UpdateOne operations") + } + if m.id == nil || m.oldValue == nil { + return v, errors.New("OldAlertEvents requires an ID field in the mutation") + } + oldValue, err := m.oldValue(ctx) + if err != nil { + return v, fmt.Errorf("querying old value for OldAlertEvents: %w", err) + } + return oldValue.AlertEvents, nil +} + +// ClearAlertEvents clears the value of the "alert_events" field. +func (m *EventMutation) ClearAlertEvents() { + m.owner = nil + m.clearedFields[event.FieldAlertEvents] = struct{}{} +} + +// AlertEventsCleared returns if the "alert_events" field was cleared in this mutation. +func (m *EventMutation) AlertEventsCleared() bool { + _, ok := m.clearedFields[event.FieldAlertEvents] + return ok +} + +// ResetAlertEvents resets all changes to the "alert_events" field. +func (m *EventMutation) ResetAlertEvents() { + m.owner = nil + delete(m.clearedFields, event.FieldAlertEvents) +} + // SetOwnerID sets the "owner" edge to the Alert entity by id. func (m *EventMutation) SetOwnerID(id int) { m.owner = &id @@ -5658,7 +5779,7 @@ func (m *EventMutation) ClearOwner() { // OwnerCleared reports if the "owner" edge to the Alert entity was cleared. func (m *EventMutation) OwnerCleared() bool { - return m.clearedowner + return m.AlertEventsCleared() || m.clearedowner } // OwnerID returns the "owner" edge ID in the mutation. @@ -5704,7 +5825,7 @@ func (m *EventMutation) Type() string { // order to get all numeric fields that were incremented/decremented, call // AddedFields(). func (m *EventMutation) Fields() []string { - fields := make([]string, 0, 4) + fields := make([]string, 0, 5) if m.created_at != nil { fields = append(fields, event.FieldCreatedAt) } @@ -5717,6 +5838,9 @@ func (m *EventMutation) Fields() []string { if m.serialized != nil { fields = append(fields, event.FieldSerialized) } + if m.owner != nil { + fields = append(fields, event.FieldAlertEvents) + } return fields } @@ -5733,6 +5857,8 @@ func (m *EventMutation) Field(name string) (ent.Value, bool) { return m.Time() case event.FieldSerialized: return m.Serialized() + case event.FieldAlertEvents: + return m.AlertEvents() } return nil, false } @@ -5750,6 +5876,8 @@ func (m *EventMutation) OldField(ctx context.Context, name string) (ent.Value, e return m.OldTime(ctx) case event.FieldSerialized: return m.OldSerialized(ctx) + case event.FieldAlertEvents: + return m.OldAlertEvents(ctx) } return nil, fmt.Errorf("unknown Event field %s", name) } @@ -5787,6 +5915,13 @@ func (m *EventMutation) SetField(name string, value ent.Value) error { } m.SetSerialized(v) return nil + case event.FieldAlertEvents: + v, ok := value.(int) + if !ok { + return fmt.Errorf("unexpected type %T for field %s", value, name) + } + m.SetAlertEvents(v) + return nil } return fmt.Errorf("unknown Event field %s", name) } @@ -5794,13 +5929,16 @@ func (m *EventMutation) SetField(name string, value ent.Value) error { // AddedFields returns all numeric fields that were incremented/decremented during // this mutation. func (m *EventMutation) AddedFields() []string { - return nil + var fields []string + return fields } // AddedField returns the numeric value that was incremented/decremented on a field // with the given name. The second boolean return value indicates that this field // was not set, or was not defined in the schema. func (m *EventMutation) AddedField(name string) (ent.Value, bool) { + switch name { + } return nil, false } @@ -5823,6 +5961,9 @@ func (m *EventMutation) ClearedFields() []string { if m.FieldCleared(event.FieldUpdatedAt) { fields = append(fields, event.FieldUpdatedAt) } + if m.FieldCleared(event.FieldAlertEvents) { + fields = append(fields, event.FieldAlertEvents) + } return fields } @@ -5843,6 +5984,9 @@ func (m *EventMutation) ClearField(name string) error { case event.FieldUpdatedAt: m.ClearUpdatedAt() return nil + case event.FieldAlertEvents: + m.ClearAlertEvents() + return nil } return fmt.Errorf("unknown Event nullable field %s", name) } @@ -5863,6 +6007,9 @@ func (m *EventMutation) ResetField(name string) error { case event.FieldSerialized: m.ResetSerialized() return nil + case event.FieldAlertEvents: + m.ResetAlertEvents() + return nil } return fmt.Errorf("unknown Event field %s", name) } @@ -7361,6 +7508,55 @@ func (m *MetaMutation) ResetValue() { m.value = nil } +// SetAlertMetas sets the "alert_metas" field. +func (m *MetaMutation) SetAlertMetas(i int) { + m.owner = &i +} + +// AlertMetas returns the value of the "alert_metas" field in the mutation. +func (m *MetaMutation) AlertMetas() (r int, exists bool) { + v := m.owner + if v == nil { + return + } + return *v, true +} + +// OldAlertMetas returns the old "alert_metas" field's value of the Meta entity. +// If the Meta object wasn't provided to the builder, the object is fetched from the database. +// An error is returned if the mutation operation is not UpdateOne, or the database query fails. +func (m *MetaMutation) OldAlertMetas(ctx context.Context) (v int, err error) { + if !m.op.Is(OpUpdateOne) { + return v, errors.New("OldAlertMetas is only allowed on UpdateOne operations") + } + if m.id == nil || m.oldValue == nil { + return v, errors.New("OldAlertMetas requires an ID field in the mutation") + } + oldValue, err := m.oldValue(ctx) + if err != nil { + return v, fmt.Errorf("querying old value for OldAlertMetas: %w", err) + } + return oldValue.AlertMetas, nil +} + +// ClearAlertMetas clears the value of the "alert_metas" field. +func (m *MetaMutation) ClearAlertMetas() { + m.owner = nil + m.clearedFields[meta.FieldAlertMetas] = struct{}{} +} + +// AlertMetasCleared returns if the "alert_metas" field was cleared in this mutation. +func (m *MetaMutation) AlertMetasCleared() bool { + _, ok := m.clearedFields[meta.FieldAlertMetas] + return ok +} + +// ResetAlertMetas resets all changes to the "alert_metas" field. +func (m *MetaMutation) ResetAlertMetas() { + m.owner = nil + delete(m.clearedFields, meta.FieldAlertMetas) +} + // SetOwnerID sets the "owner" edge to the Alert entity by id. func (m *MetaMutation) SetOwnerID(id int) { m.owner = &id @@ -7373,7 +7569,7 @@ func (m *MetaMutation) ClearOwner() { // OwnerCleared reports if the "owner" edge to the Alert entity was cleared. func (m *MetaMutation) OwnerCleared() bool { - return m.clearedowner + return m.AlertMetasCleared() || m.clearedowner } // OwnerID returns the "owner" edge ID in the mutation. @@ -7419,7 +7615,7 @@ func (m *MetaMutation) Type() string { // order to get all numeric fields that were incremented/decremented, call // AddedFields(). func (m *MetaMutation) Fields() []string { - fields := make([]string, 0, 4) + fields := make([]string, 0, 5) if m.created_at != nil { fields = append(fields, meta.FieldCreatedAt) } @@ -7432,6 +7628,9 @@ func (m *MetaMutation) Fields() []string { if m.value != nil { fields = append(fields, meta.FieldValue) } + if m.owner != nil { + fields = append(fields, meta.FieldAlertMetas) + } return fields } @@ -7448,6 +7647,8 @@ func (m *MetaMutation) Field(name string) (ent.Value, bool) { return m.Key() case meta.FieldValue: return m.Value() + case meta.FieldAlertMetas: + return m.AlertMetas() } return nil, false } @@ -7465,6 +7666,8 @@ func (m *MetaMutation) OldField(ctx context.Context, name string) (ent.Value, er return m.OldKey(ctx) case meta.FieldValue: return m.OldValue(ctx) + case meta.FieldAlertMetas: + return m.OldAlertMetas(ctx) } return nil, fmt.Errorf("unknown Meta field %s", name) } @@ -7502,6 +7705,13 @@ func (m *MetaMutation) SetField(name string, value ent.Value) error { } m.SetValue(v) return nil + case meta.FieldAlertMetas: + v, ok := value.(int) + if !ok { + return fmt.Errorf("unexpected type %T for field %s", value, name) + } + m.SetAlertMetas(v) + return nil } return fmt.Errorf("unknown Meta field %s", name) } @@ -7509,13 +7719,16 @@ func (m *MetaMutation) SetField(name string, value ent.Value) error { // AddedFields returns all numeric fields that were incremented/decremented during // this mutation. func (m *MetaMutation) AddedFields() []string { - return nil + var fields []string + return fields } // AddedField returns the numeric value that was incremented/decremented on a field // with the given name. The second boolean return value indicates that this field // was not set, or was not defined in the schema. func (m *MetaMutation) AddedField(name string) (ent.Value, bool) { + switch name { + } return nil, false } @@ -7538,6 +7751,9 @@ func (m *MetaMutation) ClearedFields() []string { if m.FieldCleared(meta.FieldUpdatedAt) { fields = append(fields, meta.FieldUpdatedAt) } + if m.FieldCleared(meta.FieldAlertMetas) { + fields = append(fields, meta.FieldAlertMetas) + } return fields } @@ -7558,6 +7774,9 @@ func (m *MetaMutation) ClearField(name string) error { case meta.FieldUpdatedAt: m.ClearUpdatedAt() return nil + case meta.FieldAlertMetas: + m.ClearAlertMetas() + return nil } return fmt.Errorf("unknown Meta nullable field %s", name) } @@ -7578,6 +7797,9 @@ func (m *MetaMutation) ResetField(name string) error { case meta.FieldValue: m.ResetValue() return nil + case meta.FieldAlertMetas: + m.ResetAlertMetas() + return nil } return fmt.Errorf("unknown Meta field %s", name) } diff --git a/pkg/database/ent/schema/decision.go b/pkg/database/ent/schema/decision.go index ae399e850..b7a99fb7a 100644 --- a/pkg/database/ent/schema/decision.go +++ b/pkg/database/ent/schema/decision.go @@ -38,6 +38,7 @@ func (Decision) Fields() []ent.Field { field.String("origin"), field.Bool("simulated").Default(false), field.String("uuid").Optional(), //this uuid is mostly here to ensure that CAPI/PAPI has a unique id for each decision + field.Int("alert_decisions").Optional(), } } @@ -46,6 +47,7 @@ func (Decision) Edges() []ent.Edge { return []ent.Edge{ edge.From("owner", Alert.Type). Ref("decisions"). + Field("alert_decisions"). Unique(), } } @@ -55,5 +57,6 @@ func (Decision) Indexes() []ent.Index { index.Fields("start_ip", "end_ip"), index.Fields("value"), index.Fields("until"), + index.Fields("alert_decisions"), } } diff --git a/pkg/database/ent/schema/event.go b/pkg/database/ent/schema/event.go index f31294021..6b6d2733f 100644 --- a/pkg/database/ent/schema/event.go +++ b/pkg/database/ent/schema/event.go @@ -4,6 +4,7 @@ import ( "entgo.io/ent" "entgo.io/ent/schema/edge" "entgo.io/ent/schema/field" + "entgo.io/ent/schema/index" "github.com/crowdsecurity/crowdsec/pkg/types" ) @@ -23,6 +24,7 @@ func (Event) Fields() []ent.Field { UpdateDefault(types.UtcNow).Nillable().Optional(), field.Time("time"), field.String("serialized").MaxLen(8191), + field.Int("alert_events").Optional(), } } @@ -31,6 +33,13 @@ func (Event) Edges() []ent.Edge { return []ent.Edge{ edge.From("owner", Alert.Type). Ref("events"). + Field("alert_events"). Unique(), } } + +func (Event) Indexes() []ent.Index { + return []ent.Index{ + index.Fields("alert_events"), + } +} diff --git a/pkg/database/ent/schema/meta.go b/pkg/database/ent/schema/meta.go index 121e7a288..1a84bb1b6 100644 --- a/pkg/database/ent/schema/meta.go +++ b/pkg/database/ent/schema/meta.go @@ -4,6 +4,7 @@ import ( "entgo.io/ent" "entgo.io/ent/schema/edge" "entgo.io/ent/schema/field" + "entgo.io/ent/schema/index" "github.com/crowdsecurity/crowdsec/pkg/types" ) @@ -23,6 +24,7 @@ func (Meta) Fields() []ent.Field { UpdateDefault(types.UtcNow).Nillable().Optional(), field.String("key"), field.String("value").MaxLen(4095), + field.Int("alert_metas").Optional(), } } @@ -31,6 +33,13 @@ func (Meta) Edges() []ent.Edge { return []ent.Edge{ edge.From("owner", Alert.Type). Ref("metas"). + Field("alert_metas"). Unique(), } } + +func (Meta) Indexes() []ent.Index { + return []ent.Index{ + index.Fields("alert_metas"), + } +} diff --git a/pkg/exprhelpers/crowdsec_cti_test.go b/pkg/exprhelpers/crowdsec_cti_test.go index 41afcd6ee..51ab5f8a3 100644 --- a/pkg/exprhelpers/crowdsec_cti_test.go +++ b/pkg/exprhelpers/crowdsec_cti_test.go @@ -9,9 +9,11 @@ import ( "testing" "time" - "github.com/crowdsecurity/crowdsec/pkg/cticlient" - "github.com/crowdsecurity/crowdsec/pkg/types" "github.com/stretchr/testify/assert" + + "github.com/crowdsecurity/go-cs-lib/pkg/ptr" + + "github.com/crowdsecurity/crowdsec/pkg/cticlient" ) var sampledata = map[string]cticlient.SmokeItem{ @@ -106,7 +108,7 @@ func smokeHandler(req *http.Request) *http.Response { func TestInvalidAuth(t *testing.T) { defer ShutdownCrowdsecCTI() - if err := InitCrowdsecCTI(types.StrPtr("asdasd"), nil, nil, nil); err != nil { + if err := InitCrowdsecCTI(ptr.Of("asdasd"), nil, nil, nil); err != nil { t.Fatalf("failed to init CTI : %s", err) } //Replace the client created by InitCrowdsecCTI with one that uses a custom transport @@ -148,7 +150,7 @@ func TestNoKey(t *testing.T) { func TestCache(t *testing.T) { defer ShutdownCrowdsecCTI() cacheDuration := 1 * time.Second - if err := InitCrowdsecCTI(types.StrPtr(validApiKey), &cacheDuration, nil, nil); err != nil { + if err := InitCrowdsecCTI(ptr.Of(validApiKey), &cacheDuration, nil, nil); err != nil { t.Fatalf("failed to init CTI : %s", err) } //Replace the client created by InitCrowdsecCTI with one that uses a custom transport diff --git a/pkg/exprhelpers/expr_lib.go b/pkg/exprhelpers/expr_lib.go index f4559157d..f4e1f4722 100644 --- a/pkg/exprhelpers/expr_lib.go +++ b/pkg/exprhelpers/expr_lib.go @@ -377,6 +377,13 @@ var exprFuncs = []exprCustomFunc{ new(func(string, string) bool), }, }, + { + name: "KeyExists", + function: KeyExists, + signature: []interface{}{ + new(func(string, map[string]any) bool), + }, + }, { name: "LogInfo", function: LogInfo, @@ -384,6 +391,34 @@ var exprFuncs = []exprCustomFunc{ new(func(string, ...interface{}) bool), }, }, + { + name: "B64Decode", + function: B64Decode, + signature: []interface{}{ + new(func(string) string), + }, + }, + { + name: "UnmarshalJSON", + function: UnmarshalJSON, + signature: []interface{}{ + new(func(string, map[string]interface{}, string) error), + }, + }, + { + name: "ParseKV", + function: ParseKV, + signature: []interface{}{ + new(func(string, map[string]interface{}, string) error), + }, + }, + { + name: "Hostname", + function: Hostname, + signature: []interface{}{ + new(func() (string, error)), + }, + }, } //go 1.20 "CutPrefix": strings.CutPrefix, diff --git a/pkg/exprhelpers/exprlib_test.go b/pkg/exprhelpers/exprlib_test.go index 7fb471680..53f7d7d15 100644 --- a/pkg/exprhelpers/exprlib_test.go +++ b/pkg/exprhelpers/exprlib_test.go @@ -13,8 +13,10 @@ import ( "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" + "github.com/crowdsecurity/go-cs-lib/pkg/cstest" + "github.com/crowdsecurity/go-cs-lib/pkg/ptr" + "github.com/crowdsecurity/crowdsec/pkg/csconfig" - "github.com/crowdsecurity/crowdsec/pkg/cstest" "github.com/crowdsecurity/crowdsec/pkg/database" "github.com/crowdsecurity/crowdsec/pkg/models" "github.com/crowdsecurity/crowdsec/pkg/types" @@ -220,7 +222,7 @@ func TestRegexpCacheBehavior(t *testing.T) { require.NoError(t, err) //cache with no TTL - err = RegexpCacheInit(filename, types.DataSource{Type: "regex", Size: types.IntPtr(1)}) + err = RegexpCacheInit(filename, types.DataSource{Type: "regex", Size: ptr.Of(1)}) require.NoError(t, err) ret, _ := RegexpInFile("crowdsec", filename) @@ -233,7 +235,7 @@ func TestRegexpCacheBehavior(t *testing.T) { //cache with TTL ttl := 500 * time.Millisecond - err = RegexpCacheInit(filename, types.DataSource{Type: "regex", Size: types.IntPtr(2), TTL: &ttl}) + err = RegexpCacheInit(filename, types.DataSource{Type: "regex", Size: ptr.Of(2), TTL: &ttl}) require.NoError(t, err) ret, _ = RegexpInFile("crowdsec", filename) @@ -1304,5 +1306,126 @@ func TestToString(t *testing.T) { require.Equal(t, tc.expected, output) }) } - +} + +func TestB64Decode(t *testing.T) { + err := Init(nil) + require.NoError(t, err) + + tests := []struct { + name string + value interface{} + expected string + expr string + expectedBuildErr bool + expectedRuntimeErr bool + }{ + { + name: "B64Decode() test: valid string", + value: "Zm9v", + expected: "foo", + expr: `B64Decode(value)`, + expectedBuildErr: false, + }, + { + name: "B64Decode() test: invalid string", + value: "foo", + expected: "", + expr: `B64Decode(value)`, + expectedBuildErr: false, + expectedRuntimeErr: true, + }, + { + name: "B64Decode() test: invalid type", + value: 1, + expected: "", + expr: `B64Decode(value)`, + expectedBuildErr: true, + }, + } + for _, tc := range tests { + tc := tc + t.Run(tc.name, func(t *testing.T) { + vm, err := expr.Compile(tc.expr, GetExprOptions(map[string]interface{}{"value": tc.value})...) + if tc.expectedBuildErr { + assert.Error(t, err) + return + } + assert.NoError(t, err) + output, err := expr.Run(vm, map[string]interface{}{"value": tc.value}) + if tc.expectedRuntimeErr { + assert.Error(t, err) + return + } + assert.NoError(t, err) + require.Equal(t, tc.expected, output) + }) + } +} + +func TestParseKv(t *testing.T) { + err := Init(nil) + require.NoError(t, err) + + tests := []struct { + name string + value string + expected map[string]string + expr string + expectedBuildErr bool + expectedRuntimeErr bool + }{ + { + name: "ParseKv() test: valid string", + value: "foo=bar", + expected: map[string]string{"foo": "bar"}, + expr: `ParseKV(value, out, "a")`, + }, + { + name: "ParseKv() test: valid string", + value: "foo=bar bar=foo", + expected: map[string]string{"foo": "bar", "bar": "foo"}, + expr: `ParseKV(value, out, "a")`, + }, + { + name: "ParseKv() test: valid string", + value: "foo=bar bar=foo foo=foo", + expected: map[string]string{"foo": "foo", "bar": "foo"}, + expr: `ParseKV(value, out, "a")`, + }, + { + name: "ParseKV() test: quoted string", + value: `foo="bar=toto"`, + expected: map[string]string{"foo": "bar=toto"}, + expr: `ParseKV(value, out, "a")`, + }, + { + name: "ParseKV() test: empty unquoted string", + value: `foo= bar=toto`, + expected: map[string]string{"bar": "toto", "foo": ""}, + expr: `ParseKV(value, out, "a")`, + }, + { + name: "ParseKV() test: empty quoted string ", + value: `foo="" bar=toto`, + expected: map[string]string{"bar": "toto", "foo": ""}, + expr: `ParseKV(value, out, "a")`, + }, + } + + for _, tc := range tests { + tc := tc + t.Run(tc.name, func(t *testing.T) { + outMap := make(map[string]interface{}) + env := map[string]interface{}{ + "value": tc.value, + "out": outMap, + } + vm, err := expr.Compile(tc.expr, GetExprOptions(env)...) + assert.NoError(t, err) + _, err = expr.Run(vm, env) + assert.NoError(t, err) + assert.Equal(t, tc.expected, outMap["a"]) + }) + } } diff --git a/pkg/exprhelpers/helpers.go b/pkg/exprhelpers/helpers.go index 4e2bf23f8..a5f45c4b0 100644 --- a/pkg/exprhelpers/helpers.go +++ b/pkg/exprhelpers/helpers.go @@ -2,6 +2,7 @@ package exprhelpers import ( "bufio" + "encoding/base64" "fmt" "net" "net/url" @@ -19,15 +20,15 @@ import ( "github.com/davecgh/go-spew/spew" "github.com/prometheus/client_golang/prometheus" log "github.com/sirupsen/logrus" - "github.com/umahmood/haversine" + "github.com/wasilibs/go-re2" + + "github.com/crowdsecurity/go-cs-lib/pkg/ptr" "github.com/crowdsecurity/crowdsec/pkg/cache" "github.com/crowdsecurity/crowdsec/pkg/database" "github.com/crowdsecurity/crowdsec/pkg/fflag" "github.com/crowdsecurity/crowdsec/pkg/types" - - "github.com/wasilibs/go-re2" ) var dataFile map[string][]string @@ -50,6 +51,8 @@ var dbClient *database.Client var exprFunctionOptions []expr.Option +var keyValuePattern = regexp.MustCompile(`(?P[^=\s]+)=(?:"(?P[^"\\]*(?:\\.[^"\\]*)*)"|(?P[^=\s]+)|\s*)`) + func GetExprOptions(ctx map[string]interface{}) []expr.Option { ret := []expr.Option{} ret = append(ret, exprFunctionOptions...) @@ -88,13 +91,13 @@ func RegexpCacheInit(filename string, CacheCfg types.DataSource) error { //cache is enabled if CacheCfg.Size == nil { - CacheCfg.Size = types.IntPtr(50) + CacheCfg.Size = ptr.Of(50) } gc := gcache.New(*CacheCfg.Size) if CacheCfg.Strategy == nil { - CacheCfg.Strategy = types.StrPtr("LRU") + CacheCfg.Strategy = ptr.Of("LRU") } switch *CacheCfg.Strategy { case "LRU": @@ -585,3 +588,58 @@ func Match(params ...any) (any, error) { } return matched, nil } + +func B64Decode(params ...any) (any, error) { + encoded := params[0].(string) + decoded, err := base64.StdEncoding.DecodeString(encoded) + if err != nil { + return "", err + } + return string(decoded), nil +} + +func ParseKV(params ...any) (any, error) { + + blob := params[0].(string) + target := params[1].(map[string]interface{}) + prefix := params[2].(string) + + matches := keyValuePattern.FindAllStringSubmatch(blob, -1) + if matches == nil { + log.Errorf("could not find any key/value pair in line") + return nil, fmt.Errorf("invalid input format") + } + if _, ok := target[prefix]; !ok { + target[prefix] = make(map[string]string) + } else { + _, ok := target[prefix].(map[string]string) + if !ok { + log.Errorf("ParseKV: target is not a map[string]string") + return nil, fmt.Errorf("target is not a map[string]string") + } + } + for _, match := range matches { + key := "" + value := "" + for i, name := range keyValuePattern.SubexpNames() { + if name == "key" { + key = match[i] + } else if name == "quoted_value" && match[i] != "" { + value = match[i] + } else if name == "value" && match[i] != "" { + value = match[i] + } + } + target[prefix].(map[string]string)[key] = value + } + log.Tracef("unmarshaled KV: %+v", target[prefix]) + return nil, nil +} + +func Hostname(params ...any) (any, error) { + hostname, err := os.Hostname() + if err != nil { + return "", err + } + return hostname, nil +} diff --git a/pkg/exprhelpers/jsonextract.go b/pkg/exprhelpers/jsonextract.go index 12dbb9da8..a874122ff 100644 --- a/pkg/exprhelpers/jsonextract.go +++ b/pkg/exprhelpers/jsonextract.go @@ -163,3 +163,20 @@ func ToJson(params ...any) (any, error) { } return string(b), nil } + +// Func UnmarshalJSON(jsonBlob []byte, target interface{}) error { +func UnmarshalJSON(params ...any) (any, error) { + jsonBlob := params[0].(string) + target := params[1].(map[string]interface{}) + key := params[2].(string) + + var out interface{} + + err := json.Unmarshal([]byte(jsonBlob), &out) + if err != nil { + log.Errorf("UnmarshalJSON : %s", err) + return "", err + } + target[key] = out + return "", nil +} diff --git a/pkg/exprhelpers/jsonextract_test.go b/pkg/exprhelpers/jsonextract_test.go index 594087474..481c7d723 100644 --- a/pkg/exprhelpers/jsonextract_test.go +++ b/pkg/exprhelpers/jsonextract_test.go @@ -1,9 +1,10 @@ package exprhelpers import ( - "log" "testing" + log "github.com/sirupsen/logrus" + "github.com/antonmedv/expr" "github.com/stretchr/testify/assert" ) @@ -304,3 +305,67 @@ func TestToJson(t *testing.T) { }) } } + +func TestUnmarshalJSON(t *testing.T) { + err := Init(nil) + assert.NoError(t, err) + tests := []struct { + name string + json string + expectResult interface{} + expr string + }{ + { + name: "convert int", + json: "42", + expectResult: float64(42), + expr: "UnmarshalJSON(json, out, 'a')", + }, + { + name: "convert slice", + json: `["foo","bar"]`, + expectResult: []interface{}{"foo", "bar"}, + expr: "UnmarshalJSON(json, out, 'a')", + }, + { + name: "convert map", + json: `{"foo":"bar"}`, + expectResult: map[string]interface{}{"foo": "bar"}, + expr: "UnmarshalJSON(json, out, 'a')", + }, + { + name: "convert struct", + json: `{"Foo":"bar"}`, + expectResult: map[string]interface{}{"Foo": "bar"}, + expr: "UnmarshalJSON(json, out, 'a')", + }, + { + name: "convert complex struct", + json: `{"Foo":"bar","Bar":{"Baz":"baz"},"Bla":["foo","bar"]}`, + expectResult: map[string]interface{}{ + "Foo": "bar", + "Bar": map[string]interface{}{ + "Baz": "baz", + }, + "Bla": []interface{}{"foo", "bar"}, + }, + expr: "UnmarshalJSON(json, out, 'a')", + }, + } + + for _, test := range tests { + t.Run(test.name, func(t *testing.T) { + outMap := make(map[string]interface{}) + env := map[string]interface{}{ + "json": test.json, + "out": outMap, + } + vm, err := expr.Compile(test.expr, GetExprOptions(env)...) + assert.NoError(t, err) + _, err = expr.Run(vm, env) + assert.NoError(t, err) + assert.Equal(t, test.expectResult, outMap["a"]) + }) + } + +} diff --git a/pkg/fflag/features_test.go b/pkg/fflag/features_test.go index aac8c079a..be7434c2a 100644 --- a/pkg/fflag/features_test.go +++ b/pkg/fflag/features_test.go @@ -9,7 +9,8 @@ import ( logtest "github.com/sirupsen/logrus/hooks/test" "github.com/stretchr/testify/require" - "github.com/crowdsecurity/crowdsec/pkg/cstest" + "github.com/crowdsecurity/go-cs-lib/pkg/cstest" + "github.com/crowdsecurity/crowdsec/pkg/fflag" ) diff --git a/pkg/hubtest/parser_assert.go b/pkg/hubtest/parser_assert.go index 0f0eaedcc..3d52f37e5 100644 --- a/pkg/hubtest/parser_assert.go +++ b/pkg/hubtest/parser_assert.go @@ -246,12 +246,47 @@ func (p *ParserAssert) AutoGenParserAssert() string { } ret += fmt.Sprintf(`results["%s"]["%s"][%d].Evt.Enriched["%s"] == "%s"`+"\n", stage, parser, pidx, ekey, Escape(eval)) } + for ekey, eval := range result.Evt.Unmarshaled { + if eval == "" { + continue + } + base := fmt.Sprintf(`results["%s"]["%s"][%d].Evt.Unmarshaled["%s"]`, stage, parser, pidx, ekey) + for _, line := range p.buildUnmarshaledAssert("", eval) { + ret += base + line + } + } } } } return ret } +func (p *ParserAssert) buildUnmarshaledAssert(ekey string, eval interface{}) []string { + ret := make([]string, 0) + switch val := eval.(type) { + case map[string]interface{}: + for k, v := range val { + ret = append(ret, p.buildUnmarshaledAssert(fmt.Sprintf(`%s["%s"]`, ekey, k), v)...) + } + case map[interface{}]interface{}: + for k, v := range val { + ret = append(ret, p.buildUnmarshaledAssert(fmt.Sprintf(`%s["%s"]`, ekey, k), v)...) + } + case []interface{}: + case string: + ret = append(ret, fmt.Sprintf(`%s == "%s"`+"\n", ekey, Escape(val))) + case bool: + ret = append(ret, fmt.Sprintf(`%s == %t`+"\n", ekey, val)) + case int: + ret = append(ret, fmt.Sprintf(`%s == %d`+"\n", ekey, val)) + case float64: + ret = append(ret, fmt.Sprintf(`%s == %f`+"\n", ekey, val)) + default: + log.Warningf("unknown type '%T' for key '%s'", val, ekey) + } + return ret +} + func LoadParserDump(filepath string) (*ParserResults, error) { var pdump ParserResults diff --git a/pkg/leakybucket/bucket.go b/pkg/leakybucket/bucket.go index 8ce03e41a..004d5b9d8 100644 --- a/pkg/leakybucket/bucket.go +++ b/pkg/leakybucket/bucket.go @@ -6,6 +6,8 @@ import ( "sync/atomic" "time" + "github.com/crowdsecurity/go-cs-lib/pkg/trace" + "github.com/crowdsecurity/crowdsec/pkg/time/rate" "github.com/crowdsecurity/crowdsec/pkg/types" "github.com/davecgh/go-spew/spew" @@ -202,7 +204,7 @@ func LeakRoutine(leaky *Leaky) error { firstEvent = true ) - defer types.CatchPanic(fmt.Sprintf("crowdsec/LeakRoutine/%s", leaky.Name)) + defer trace.CatchPanic(fmt.Sprintf("crowdsec/LeakRoutine/%s", leaky.Name)) BucketsCurrentCount.With(prometheus.Labels{"name": leaky.Name}).Inc() defer BucketsCurrentCount.With(prometheus.Labels{"name": leaky.Name}).Dec() diff --git a/pkg/leakybucket/manager_load.go b/pkg/leakybucket/manager_load.go index 3789c749d..1e212f815 100644 --- a/pkg/leakybucket/manager_load.go +++ b/pkg/leakybucket/manager_load.go @@ -200,7 +200,7 @@ func LoadBuckets(cscfg *csconfig.CrowdsecServiceCfg, files []string, tomb *tomb. log.Tracef("no version in %s : %s, assuming '1.0'", bucketFactory.Name, f) bucketFactory.FormatVersion = "1.0" } - ok, err := cwversion.Statisfies(bucketFactory.FormatVersion, cwversion.Constraint_scenario) + ok, err := cwversion.Satisfies(bucketFactory.FormatVersion, cwversion.Constraint_scenario) if err != nil { log.Fatalf("Failed to check version : %s", err) } diff --git a/pkg/metabase/api.go b/pkg/metabase/api.go index 4e33b9349..99cbf9ec7 100644 --- a/pkg/metabase/api.go +++ b/pkg/metabase/api.go @@ -6,7 +6,8 @@ import ( "net/http" "time" - "github.com/crowdsecurity/crowdsec/pkg/cwversion" + "github.com/crowdsecurity/go-cs-lib/pkg/version" + "github.com/dghubble/sling" log "github.com/sirupsen/logrus" ) @@ -37,7 +38,7 @@ var ( func NewAPIClient(url string) (*APIClient, error) { httpClient := &http.Client{Timeout: 20 * time.Second} return &APIClient{ - CTX: sling.New().Client(httpClient).Base(url).Set("User-Agent", fmt.Sprintf("crowdsec/%s", cwversion.VersionStr())), + CTX: sling.New().Client(httpClient).Base(url).Set("User-Agent", fmt.Sprintf("crowdsec/%s", version.String())), Client: httpClient, }, nil } diff --git a/pkg/parser/enrich_date_test.go b/pkg/parser/enrich_date_test.go index 5a3e08ffd..0a9ac67f8 100644 --- a/pkg/parser/enrich_date_test.go +++ b/pkg/parser/enrich_date_test.go @@ -3,8 +3,11 @@ package parser import ( "testing" - "github.com/crowdsecurity/crowdsec/pkg/types" log "github.com/sirupsen/logrus" + + "github.com/crowdsecurity/go-cs-lib/pkg/ptr" + + "github.com/crowdsecurity/crowdsec/pkg/types" ) func TestDateParse(t *testing.T) { @@ -20,7 +23,7 @@ func TestDateParse(t *testing.T) { StrTime: "2019-10-12T07:20:50.52Z", }, expected_err: nil, - expected_strTime: types.StrPtr("2019-10-12T07:20:50.52Z"), + expected_strTime: ptr.Of("2019-10-12T07:20:50.52Z"), }, { name: "02/Jan/2006:15:04:05 -0700", @@ -28,7 +31,7 @@ func TestDateParse(t *testing.T) { StrTime: "02/Jan/2006:15:04:05 -0700", }, expected_err: nil, - expected_strTime: types.StrPtr("2006-01-02T15:04:05-07:00"), + expected_strTime: ptr.Of("2006-01-02T15:04:05-07:00"), }, { name: "Dec 17 08:17:43", @@ -37,7 +40,7 @@ func TestDateParse(t *testing.T) { StrTimeFormat: "2006 X 2 zz 15X04X05 oneone Jan", }, expected_err: nil, - expected_strTime: types.StrPtr("2011-12-17T08:17:43Z"), + expected_strTime: ptr.Of("2011-12-17T08:17:43Z"), }, } diff --git a/pkg/parser/runtime.go b/pkg/parser/runtime.go index 4541eafd9..cbeee91ee 100644 --- a/pkg/parser/runtime.go +++ b/pkg/parser/runtime.go @@ -133,11 +133,11 @@ func (n *Node) ProcessStatics(statics []types.ExtraField, event *types.Event) er case int: value = strconv.Itoa(out) case map[string]interface{}: - clog.Warnf("Expression returned a map, please use ToJsonString() to convert it to string if you want to keep it as is, or refine your expression to extract a string") + clog.Warnf("Expression '%s' returned a map, please use ToJsonString() to convert it to string if you want to keep it as is, or refine your expression to extract a string", static.ExpValue) case []interface{}: - clog.Warnf("Expression returned a map, please use ToJsonString() to convert it to string if you want to keep it as is, or refine your expression to extract a string") + clog.Warnf("Expression '%s' returned an array, please use ToJsonString() to convert it to string if you want to keep it as is, or refine your expression to extract a string", static.ExpValue) case nil: - clog.Debugf("Expression returned nil, skipping") + clog.Debugf("Expression '%s' returned nil, skipping", static.ExpValue) default: clog.Errorf("unexpected return type for RunTimeValue : %T", output) return errors.New("unexpected return type for RunTimeValue") @@ -164,7 +164,7 @@ func (n *Node) ProcessStatics(statics []types.ExtraField, event *types.Event) er processed = true clog.Debugf("+ Method %s('%s') returned %d entries to merge in .Enriched\n", static.Method, value, len(ret)) //Hackish check, but those methods do not return any data by design - if len(ret) == 0 && static.Method != "UnmarshalXML" && static.Method != "UnmarshalJSON" { + if len(ret) == 0 && static.Method != "UnmarshalJSON" { clog.Debugf("+ Method '%s' empty response on '%s'", static.Method, value) } for k, v := range ret { @@ -265,6 +265,9 @@ func Parse(ctx UnixParserCtx, xp types.Event, nodes []Node) (types.Event, error) if event.Meta == nil { event.Meta = make(map[string]string) } + if event.Unmarshaled == nil { + event.Unmarshaled = make(map[string]interface{}) + } if event.Type == types.LOG { log.Tracef("INPUT '%s'", event.Line.Raw) } @@ -323,11 +326,11 @@ func Parse(ctx UnixParserCtx, xp types.Event, nodes []Node) (types.Event, error) } clog.Tracef("node (%s) ret : %v", node.rn, ret) if ParseDump { + StageParseMutex.Lock() if len(StageParseCache[stage][node.Name]) == 0 { - StageParseMutex.Lock() StageParseCache[stage][node.Name] = make([]ParserResult, 0) - StageParseMutex.Unlock() } + StageParseMutex.Unlock() evtcopy := deepcopy.Copy(event) parserInfo := ParserResult{Evt: evtcopy.(types.Event), Success: ret} StageParseMutex.Lock() diff --git a/pkg/parser/stage.go b/pkg/parser/stage.go index 4ffa50f2f..3bdaed1f0 100644 --- a/pkg/parser/stage.go +++ b/pkg/parser/stage.go @@ -83,7 +83,7 @@ func LoadStages(stageFiles []Stagefile, pctx *UnixParserCtx, ectx EnricherCtx) ( log.Tracef("no version in %s, assuming '1.0'", node.Name) node.FormatVersion = "1.0" } - ok, err := cwversion.Statisfies(node.FormatVersion, cwversion.Constraint_parser) + ok, err := cwversion.Satisfies(node.FormatVersion, cwversion.Constraint_parser) if err != nil { log.Fatalf("Failed to check version : %s", err) } diff --git a/pkg/parser/tests/json-unmarshal/test.yaml b/pkg/parser/tests/json-unmarshal/test.yaml index 9d4e2e025..4b4154690 100644 --- a/pkg/parser/tests/json-unmarshal/test.yaml +++ b/pkg/parser/tests/json-unmarshal/test.yaml @@ -8,11 +8,13 @@ lines: #these are the results we expect from the parser results: - Unmarshaled: - foo: "bar" - pouet: 42 + JSON: + foo: "bar" + pouet: 42 Process: true Stage: s00-raw - - Unmarshaled: {} + - Unmarshaled: + JSON: {} Process: true Stage: s00-raw diff --git a/pkg/setup/detect_test.go b/pkg/setup/detect_test.go index 44aa36af2..4f6ef0c33 100644 --- a/pkg/setup/detect_test.go +++ b/pkg/setup/detect_test.go @@ -10,7 +10,8 @@ import ( "github.com/lithammer/dedent" "github.com/stretchr/testify/require" - "github.com/crowdsecurity/crowdsec/pkg/cstest" + "github.com/crowdsecurity/go-cs-lib/pkg/cstest" + "github.com/crowdsecurity/crowdsec/pkg/setup" ) diff --git a/pkg/types/utils.go b/pkg/types/utils.go index 342fa6372..caed6961e 100644 --- a/pkg/types/utils.go +++ b/pkg/types/utils.go @@ -9,15 +9,12 @@ import ( "os" "path/filepath" "regexp" - "runtime/debug" "strconv" "strings" "time" log "github.com/sirupsen/logrus" "gopkg.in/natefinch/lumberjack.v2" - - "github.com/crowdsecurity/crowdsec/pkg/cwversion" ) var logFormatter log.Formatter @@ -25,7 +22,6 @@ var LogOutput *lumberjack.Logger //io.Writer var logLevel log.Level func SetDefaultLoggerConfig(cfgMode string, cfgFolder string, cfgLevel log.Level, maxSize int, maxFiles int, maxAge int, compress *bool, forceColors bool) error { - /*Configure logs*/ if cfgMode == "file" { _maxsize := 500 @@ -87,7 +83,6 @@ func ConfigureLogger(clog *log.Logger) error { } func Clone(a, b interface{}) error { - buff := new(bytes.Buffer) enc := gob.NewEncoder(buff) dec := gob.NewDecoder(buff) @@ -100,40 +95,6 @@ func Clone(a, b interface{}) error { return nil } -func WriteStackTrace(iErr interface{}) string { - tmpfile, err := os.CreateTemp("", "crowdsec-crash.*.txt") - if err != nil { - log.Fatal(err) - } - if _, err := tmpfile.Write([]byte(fmt.Sprintf("error : %+v\n", iErr))); err != nil { - tmpfile.Close() - log.Fatal(err) - } - if _, err := tmpfile.Write([]byte(cwversion.ShowStr())); err != nil { - tmpfile.Close() - log.Fatal(err) - } - if _, err := tmpfile.Write(debug.Stack()); err != nil { - tmpfile.Close() - log.Fatal(err) - } - if err := tmpfile.Close(); err != nil { - log.Fatal(err) - } - return tmpfile.Name() -} - -//CatchPanic is a util func that we should call from all go-routines to ensure proper stacktrace handling -func CatchPanic(component string) { - if r := recover(); r != nil { - log.Errorf("crowdsec - goroutine %s crashed : %s", component, r) - log.Errorf("please report this error to https://github.com/crowdsecurity/crowdsec/") - filename := WriteStackTrace(r) - log.Errorf("stacktrace/report is written to %s : please join it to your issue", filename) - log.Fatalf("crowdsec stopped") - } -} - func ParseDuration(d string) (time.Duration, error) { durationStr := d if strings.HasSuffix(d, "d") { @@ -181,7 +142,6 @@ func copyFileContents(src, dst string) (err error) { /*copy the file, ioutile doesn't offer the feature*/ func CopyFile(sourceSymLink, destinationFile string) (err error) { - sourceFile, err := filepath.EvalSymlinks(sourceSymLink) if err != nil { log.Infof("Not a symlink : %s", err) @@ -216,31 +176,6 @@ func CopyFile(sourceSymLink, destinationFile string) (err error) { return } -func StrPtr(s string) *string { - return &s -} - -func IntPtr(i int) *int { - return &i -} - -func Int32Ptr(i int32) *int32 { - return &i -} - -func BoolPtr(b bool) *bool { - return &b -} - -func InSlice(str string, slice []string) bool { - for _, item := range slice { - if str == item { - return true - } - } - return false -} - func UtcNow() time.Time { return time.Now().UTC() } diff --git a/pkg/yamlpatch/merge.go b/pkg/yamlpatch/merge.go deleted file mode 100644 index 8a61b6470..000000000 --- a/pkg/yamlpatch/merge.go +++ /dev/null @@ -1,168 +0,0 @@ -// -// from https://github.com/uber-go/config/tree/master/internal/merge -// -// Copyright (c) 2019 Uber Technologies, Inc. -// -// Permission is hereby granted, free of charge, to any person obtaining a copy -// of this software and associated documentation files (the "Software"), to deal -// in the Software without restriction, including without limitation the rights -// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -// copies of the Software, and to permit persons to whom the Software is -// furnished to do so, subject to the following conditions: -// -// The above copyright notice and this permission notice shall be included in -// all copies or substantial portions of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -// THE SOFTWARE. - -package yamlpatch - -import ( - "bytes" - "fmt" - "io" - - "github.com/pkg/errors" - - yaml "gopkg.in/yaml.v2" -) - -type ( - // YAML has three fundamental types. When unmarshaled into interface{}, - // they're represented like this. - mapping = map[interface{}]interface{} - sequence = []interface{} -) - -// YAML deep-merges any number of YAML sources, with later sources taking -// priority over earlier ones. -// -// Maps are deep-merged. For example, -// {"one": 1, "two": 2} + {"one": 42, "three": 3} -// == {"one": 42, "two": 2, "three": 3} -// Sequences are replaced. For example, -// {"foo": [1, 2, 3]} + {"foo": [4, 5, 6]} -// == {"foo": [4, 5, 6]} -// -// In non-strict mode, duplicate map keys are allowed within a single source, -// with later values overwriting previous ones. Attempting to merge -// mismatched types (e.g., merging a sequence into a map) replaces the old -// value with the new. -// -// Enabling strict mode returns errors in both of the above cases. -func YAML(sources [][]byte, strict bool) (*bytes.Buffer, error) { - var merged interface{} - var hasContent bool - for _, r := range sources { - d := yaml.NewDecoder(bytes.NewReader(r)) - d.SetStrict(strict) - - var contents interface{} - if err := d.Decode(&contents); err == io.EOF { - // Skip empty and comment-only sources, which we should handle - // differently from explicit nils. - continue - } else if err != nil { - return nil, fmt.Errorf("couldn't decode source: %v", err) - } - - hasContent = true - pair, err := merge(merged, contents, strict) - if err != nil { - return nil, err // error is already descriptive enough - } - merged = pair - } - - buf := &bytes.Buffer{} - if !hasContent { - // No sources had any content. To distinguish this from a source with just - // an explicit top-level null, return an empty buffer. - return buf, nil - } - enc := yaml.NewEncoder(buf) - if err := enc.Encode(merged); err != nil { - return nil, errors.Wrap(err, "couldn't re-serialize merged YAML") - } - return buf, nil -} - -func merge(into, from interface{}, strict bool) (interface{}, error) { - // It's possible to handle this with a mass of reflection, but we only need - // to merge whole YAML files. Since we're always unmarshaling into - // interface{}, we only need to handle a few types. This ends up being - // cleaner if we just handle each case explicitly. - if into == nil { - return from, nil - } - if from == nil { - // Allow higher-priority YAML to explicitly nil out lower-priority entries. - return nil, nil - } - if IsScalar(into) && IsScalar(from) { - return from, nil - } - if IsSequence(into) && IsSequence(from) { - return from, nil - } - if IsMapping(into) && IsMapping(from) { - return mergeMapping(into.(mapping), from.(mapping), strict) - } - // YAML types don't match, so no merge is possible. For backward - // compatibility, ignore mismatches unless we're in strict mode and return - // the higher-priority value. - if !strict { - return from, nil - } - return nil, fmt.Errorf("can't merge a %s into a %s", describe(from), describe(into)) -} - -func mergeMapping(into, from mapping, strict bool) (mapping, error) { - merged := make(mapping, len(into)) - for k, v := range into { - merged[k] = v - } - for k := range from { - m, err := merge(merged[k], from[k], strict) - if err != nil { - return nil, err - } - merged[k] = m - } - return merged, nil -} - -// IsMapping reports whether a type is a mapping in YAML, represented as a -// map[interface{}]interface{}. -func IsMapping(i interface{}) bool { - _, is := i.(mapping) - return is -} - -// IsSequence reports whether a type is a sequence in YAML, represented as an -// []interface{}. -func IsSequence(i interface{}) bool { - _, is := i.(sequence) - return is -} - -// IsScalar reports whether a type is a scalar value in YAML. -func IsScalar(i interface{}) bool { - return !IsMapping(i) && !IsSequence(i) -} - -func describe(i interface{}) string { - if IsMapping(i) { - return "mapping" - } - if IsSequence(i) { - return "sequence" - } - return "scalar" -} diff --git a/pkg/yamlpatch/merge_test.go b/pkg/yamlpatch/merge_test.go deleted file mode 100644 index e86f6fea7..000000000 --- a/pkg/yamlpatch/merge_test.go +++ /dev/null @@ -1,238 +0,0 @@ -// Copyright (c) 2018 Uber Technologies, Inc. -// -// Permission is hereby granted, free of charge, to any person obtaining a copy -// of this software and associated documentation files (the "Software"), to deal -// in the Software without restriction, including without limitation the rights -// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -// copies of the Software, and to permit persons to whom the Software is -// furnished to do so, subject to the following conditions: -// -// The above copyright notice and this permission notice shall be included in -// all copies or substantial portions of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -// THE SOFTWARE. - -package yamlpatch - -import ( - "bytes" - "os" - "strings" - "testing" - - "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/require" - yaml "gopkg.in/yaml.v2" -) - -func trimcr(s string) string { - return strings.ReplaceAll(s, "\r\n", "\n") -} - -func mustRead(t testing.TB, fname string) []byte { - contents, err := os.ReadFile(fname) - require.NoError(t, err, "failed to read file: %s", fname) - return contents -} - -func dump(t testing.TB, actual, expected string) { - // It's impossible to debug YAML if the actual and expected values are - // printed on a single line. - t.Logf("Actual:\n\n%s\n\n", actual) - t.Logf("Expected:\n\n%s\n\n", expected) -} - -func strip(s string) string { - // It's difficult to write string constants that are valid YAML. Normalize - // strings for ease of testing. - s = strings.TrimSpace(s) - s = strings.Replace(s, "\t", " ", -1) - return s -} - -func canonicalize(t testing.TB, s string) string { - // round-trip to canonicalize formatting - var i interface{} - require.NoError(t, - yaml.Unmarshal([]byte(strip(s)), &i), - "canonicalize: couldn't unmarshal YAML", - ) - formatted, err := yaml.Marshal(i) - require.NoError(t, err, "canonicalize: couldn't marshal YAML") - return string(bytes.TrimSpace(formatted)) -} - -func unmarshal(t testing.TB, s string) interface{} { - var i interface{} - require.NoError(t, yaml.Unmarshal([]byte(strip(s)), &i), "unmarshaling failed") - return i -} - -func succeeds(t testing.TB, strict bool, left, right, expect string) { - l, r := unmarshal(t, left), unmarshal(t, right) - m, err := merge(l, r, strict) - require.NoError(t, err, "merge failed") - - actualBytes, err := yaml.Marshal(m) - require.NoError(t, err, "couldn't marshal merged structure") - actual := canonicalize(t, string(actualBytes)) - expect = canonicalize(t, expect) - if !assert.Equal(t, expect, actual) { - dump(t, actual, expect) - } -} - -func fails(t testing.TB, strict bool, left, right string) { - _, err := merge(unmarshal(t, left), unmarshal(t, right), strict) - assert.Error(t, err, "merge succeeded") -} - -func TestIntegration(t *testing.T) { - base := mustRead(t, "testdata/base.yaml") - prod := mustRead(t, "testdata/production.yaml") - expect := mustRead(t, "testdata/expect.yaml") - - merged, err := YAML([][]byte{base, prod}, true /* strict */) - require.NoError(t, err, "merge failed") - - if !assert.Equal(t, trimcr(string(expect)), merged.String(), "unexpected contents") { - dump(t, merged.String(), string(expect)) - } -} - -func TestEmpty(t *testing.T) { - full := []byte("foo: bar\n") - null := []byte("~") - - tests := []struct { - desc string - sources [][]byte - expect string - }{ - {"empty base", [][]byte{nil, full}, string(full)}, - {"empty override", [][]byte{full, nil}, string(full)}, - {"both empty", [][]byte{nil, nil}, ""}, - {"null base", [][]byte{null, full}, string(full)}, - {"null override", [][]byte{full, null}, "null\n"}, - {"empty base and null override", [][]byte{nil, null}, "null\n"}, - {"null base and empty override", [][]byte{null, nil}, "null\n"}, - } - - for _, tt := range tests { - tt := tt - t.Run(tt.desc, func(t *testing.T) { - merged, err := YAML(tt.sources, true /* strict */) - require.NoError(t, err, "merge failed") - assert.Equal(t, tt.expect, merged.String(), "wrong contents after merge") - }) - } -} - -func TestSuccess(t *testing.T) { - left := ` -fun: [maserati, porsche] -practical: {toyota: camry, honda: accord} -occupants: - honda: {driver: jane, backseat: [nate]} - ` - right := ` -fun: [lamborghini, porsche] -practical: {honda: civic, nissan: altima} -occupants: - honda: {passenger: arthur, backseat: [nora]} - ` - expect := ` -fun: [lamborghini, porsche] -practical: {toyota: camry, honda: civic, nissan: altima} -occupants: - honda: {passenger: arthur, driver: jane, backseat: [nora]} - ` - succeeds(t, true, left, right, expect) - succeeds(t, false, left, right, expect) -} - -func TestErrors(t *testing.T) { - check := func(t testing.TB, strict bool, sources ...[]byte) error { - _, err := YAML(sources, strict) - return err - } - t.Run("tabs in source", func(t *testing.T) { - src := []byte("foo:\n\tbar:baz") - assert.Error(t, check(t, false, src), "expected error in permissive mode") - assert.Error(t, check(t, true, src), "expected error in strict mode") - }) - - t.Run("duplicated keys", func(t *testing.T) { - src := []byte("{foo: bar, foo: baz}") - assert.NoError(t, check(t, false, src), "expected success in permissive mode") - assert.Error(t, check(t, true, src), "expected error in permissive mode") - }) - - t.Run("merge error", func(t *testing.T) { - left := []byte("foo: [1, 2]") - right := []byte("foo: {bar: baz}") - assert.NoError(t, check(t, false, left, right), "expected success in permissive mode") - assert.Error(t, check(t, true, left, right), "expected error in strict mode") - }) -} - -func TestMismatchedTypes(t *testing.T) { - tests := []struct { - desc string - left, right string - }{ - {"sequence and mapping", "[one, two]", "{foo: bar}"}, - {"sequence and scalar", "[one, two]", "foo"}, - {"mapping and scalar", "{foo: bar}", "foo"}, - {"nested", "{foo: [one, two]}", "{foo: bar}"}, - } - - for _, tt := range tests { - tt := tt - t.Run(tt.desc+" strict", func(t *testing.T) { - fails(t, true, tt.left, tt.right) - }) - t.Run(tt.desc+" permissive", func(t *testing.T) { - // prefer the higher-priority value - succeeds(t, false, tt.left, tt.right, tt.right) - }) - } -} - -func TestBooleans(t *testing.T) { - // YAML helpfully interprets many strings as Booleans. - tests := []struct { - in, out string - }{ - {"yes", "true"}, - {"YES", "true"}, - {"on", "true"}, - {"ON", "true"}, - {"no", "false"}, - {"NO", "false"}, - {"off", "false"}, - {"OFF", "false"}, - } - - for _, tt := range tests { - tt := tt - t.Run(tt.in, func(t *testing.T) { - succeeds(t, true, "", tt.in, tt.out) - succeeds(t, false, "", tt.in, tt.out) - }) - } -} - -func TestExplicitNil(t *testing.T) { - base := `foo: {one: two}` - override := `foo: ~` - expect := `foo: ~` - succeeds(t, true, base, override, expect) - succeeds(t, false, base, override, expect) -} diff --git a/pkg/yamlpatch/patcher.go b/pkg/yamlpatch/patcher.go deleted file mode 100644 index 43a2a0cb8..000000000 --- a/pkg/yamlpatch/patcher.go +++ /dev/null @@ -1,173 +0,0 @@ -package yamlpatch - -import ( - "bytes" - "io" - "os" - - "github.com/pkg/errors" - log "github.com/sirupsen/logrus" - "gopkg.in/yaml.v2" -) - -type Patcher struct { - BaseFilePath string - PatchFilePath string - quiet bool -} - -func NewPatcher(filePath string, suffix string) *Patcher { - return &Patcher{ - BaseFilePath: filePath, - PatchFilePath: filePath + suffix, - quiet: false, - } -} - - -// SetQuiet sets the quiet flag, which will log as DEBUG_LEVEL instead of INFO -func (p *Patcher) SetQuiet(quiet bool) { - p.quiet = quiet -} - - -// read a single YAML file, check for errors (the merge package doesn't) then return the content as bytes. -func readYAML(filePath string) ([]byte, error) { - var content []byte - - var err error - - if content, err = os.ReadFile(filePath); err != nil { - return nil, errors.Wrap(err, "while reading yaml file") - } - - var yamlMap map[interface{}]interface{} - if err = yaml.Unmarshal(content, &yamlMap); err != nil { - return nil, errors.Wrap(err, filePath) - } - - return content, nil -} - -// MergedPatchContent reads a YAML file and, if it exists, its patch file, -// then merges them and returns it serialized. -func (p *Patcher) MergedPatchContent() ([]byte, error) { - var err error - - var base []byte - - base, err = readYAML(p.BaseFilePath) - if err != nil { - return nil, err - } - - var over []byte - - over, err = readYAML(p.PatchFilePath) - if errors.Is(err, os.ErrNotExist) { - return base, nil - } - - if err != nil { - return nil, err - } - - logf := log.Infof - if p.quiet { - logf = log.Debugf - } - logf("Patching yaml: '%s' with '%s'", p.BaseFilePath, p.PatchFilePath) - - var patched *bytes.Buffer - - // strict mode true, will raise errors for duplicate map keys and - // overriding with a different type - patched, err = YAML([][]byte{base, over}, true) - if err != nil { - return nil, err - } - - return patched.Bytes(), nil -} - -// read multiple YAML documents inside a file, and writes them to a buffer -// separated by the appropriate '---' terminators. -func decodeDocuments(file *os.File, buf *bytes.Buffer, finalDashes bool) error { - var ( - err error - docBytes []byte - ) - - dec := yaml.NewDecoder(file) - dec.SetStrict(true) - - dashTerminator := false - - for { - yml := make(map[interface{}]interface{}) - - err = dec.Decode(&yml) - if err != nil { - if errors.Is(err, io.EOF) { - break - } - return errors.Wrapf(err, "while decoding %s", file.Name()) - } - - docBytes, err = yaml.Marshal(&yml) - if err != nil { - return errors.Wrapf(err, "while marshaling %s", file.Name()) - } - - if dashTerminator { - buf.Write([]byte("---\n")) - } - - buf.Write(docBytes) - dashTerminator = true - } - if dashTerminator && finalDashes { - buf.Write([]byte("---\n")) - } - return nil -} - -// PrependedPatchContent collates the base .yaml file with the .yaml.patch, by putting -// the content of the patch BEFORE the base document. The result is a multi-document -// YAML in all cases, even if the base and patch files are single documents. -func (p *Patcher) PrependedPatchContent() ([]byte, error) { - var ( - result bytes.Buffer - patchFile *os.File - baseFile *os.File - err error - ) - - patchFile, err = os.Open(p.PatchFilePath) - // optional file, ignore if it does not exist - if err != nil && !errors.Is(err, os.ErrNotExist) { - return nil, errors.Wrapf(err, "while opening %s", p.PatchFilePath) - } - - if patchFile != nil { - if err = decodeDocuments(patchFile, &result, true); err != nil { - return nil, err - } - logf := log.Infof - if p.quiet { - logf = log.Debugf - } - logf("Prepending yaml: '%s' with '%s'", p.BaseFilePath, p.PatchFilePath) - } - - baseFile, err = os.Open(p.BaseFilePath) - if err != nil { - return nil, errors.Wrapf(err, "while opening %s", p.BaseFilePath) - } - - if err = decodeDocuments(baseFile, &result, false); err != nil { - return nil, err - } - - return result.Bytes(), nil -} diff --git a/pkg/yamlpatch/patcher_test.go b/pkg/yamlpatch/patcher_test.go deleted file mode 100644 index be4a855cf..000000000 --- a/pkg/yamlpatch/patcher_test.go +++ /dev/null @@ -1,313 +0,0 @@ -package yamlpatch_test - -import ( - "os" - "path/filepath" - "testing" - - "github.com/crowdsecurity/crowdsec/pkg/yamlpatch" - "github.com/stretchr/testify/require" -) - -// similar to the one in cstest, but with test number too. We cannot import -// cstest here because of circular dependency. -func requireErrorContains(t *testing.T, err error, expectedErr string) { - t.Helper() - - if expectedErr != "" { - require.ErrorContains(t, err, expectedErr) - - return - } - - require.NoError(t, err) -} - -func TestMergedPatchContent(t *testing.T) { - t.Parallel() - - tests := []struct { - name string - base string - patch string - expected string - expectedErr string - }{ - { - "invalid yaml in base", - "notayaml", - "", - "", - "config.yaml: yaml: unmarshal errors:", - }, - { - "invalid yaml in base (detailed message)", - "notayaml", - "", - "", - "cannot unmarshal !!str `notayaml`", - }, - { - "invalid yaml in patch", - "", - "notayaml", - "", - "config.yaml.local: yaml: unmarshal errors:", - }, - { - "invalid yaml in patch (detailed message)", - "", - "notayaml", - "", - "cannot unmarshal !!str `notayaml`", - }, - { - "basic merge", - "{'first':{'one':1,'two':2},'second':{'three':3}}", - "{'first':{'one':10,'dos':2}}", - "{'first':{'one':10,'dos':2,'two':2},'second':{'three':3}}", - "", - }, - - // bools and zero values; here the "mergo" package had issues - // so we used something simpler. - - { - "bool merge - off if false", - "bool: on", - "bool: off", - "bool: false", - "", - }, - { - "bool merge - on is true", - "bool: off", - "bool: on", - "bool: true", - "", - }, - { - "string is not a bool - on to off", - "{'bool': 'on'}", - "{'bool': 'off'}", - "{'bool': 'off'}", - "", - }, - { - "string is not a bool - off to on", - "{'bool': 'off'}", - "{'bool': 'on'}", - "{'bool': 'on'}", - "", - }, - { - "bool merge - true to false", - "{'bool': true}", - "{'bool': false}", - "{'bool': false}", - "", - }, - { - "bool merge - false to true", - "{'bool': false}", - "{'bool': true}", - "{'bool': true}", - "", - }, - { - "string merge - value to value", - "{'string': 'value'}", - "{'string': ''}", - "{'string': ''}", - "", - }, - { - "sequence merge - value to empty", - "{'sequence': [1, 2]}", - "{'sequence': []}", - "{'sequence': []}", - "", - }, - { - "map merge - value to value", - "{'map': {'one': 1, 'two': 2}}", - "{'map': {}}", - "{'map': {'one': 1, 'two': 2}}", - "", - }, - - // mismatched types - - { - "can't merge a sequence into a mapping", - "map: {'key': 'value'}", - "map: ['value1', 'value2']", - "", - "can't merge a sequence into a mapping", - }, - { - "can't merge a scalar into a mapping", - "map: {'key': 'value'}", - "map: 3", - "", - "can't merge a scalar into a mapping", - }, - { - "can't merge a mapping into a sequence", - "sequence: ['value1', 'value2']", - "sequence: {'key': 'value'}", - "", - "can't merge a mapping into a sequence", - }, - { - "can't merge a scalar into a sequence", - "sequence: ['value1', 'value2']", - "sequence: 3", - "", - "can't merge a scalar into a sequence", - }, - { - "can't merge a sequence into a scalar", - "scalar: true", - "scalar: ['value1', 'value2']", - "", - "can't merge a sequence into a scalar", - }, - { - "can't merge a mapping into a scalar", - "scalar: true", - "scalar: {'key': 'value'}", - "", - "can't merge a mapping into a scalar", - }, - } - - for _, tc := range tests { - tc := tc - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - dirPath, err := os.MkdirTemp("", "yamlpatch") - require.NoError(t, err) - - defer os.RemoveAll(dirPath) - - configPath := filepath.Join(dirPath, "config.yaml") - patchPath := filepath.Join(dirPath, "config.yaml.local") - err = os.WriteFile(configPath, []byte(tc.base), 0o600) - require.NoError(t, err) - - err = os.WriteFile(patchPath, []byte(tc.patch), 0o600) - require.NoError(t, err) - - patcher := yamlpatch.NewPatcher(configPath, ".local") - patchedBytes, err := patcher.MergedPatchContent() - requireErrorContains(t, err, tc.expectedErr) - require.YAMLEq(t, tc.expected, string(patchedBytes)) - }) - } -} - -func TestPrependedPatchContent(t *testing.T) { - t.Parallel() - - tests := []struct { - name string - base string - patch string - expected string - expectedErr string - }{ - // we test with scalars here, because YAMLeq does not work - // with multi-document files, so we need char-to-char comparison - // which is noisy with sequences and (unordered) mappings - { - "newlines are always appended, if missing, by yaml.Marshal()", - "foo: bar", - "", - "foo: bar\n", - "", - }, - { - "prepend empty document", - "foo: bar\n", - "", - "foo: bar\n", - "", - }, - { - "prepend a document to another", - "foo: bar", - "baz: qux", - "baz: qux\n---\nfoo: bar\n", - "", - }, - { - "prepend document with same key", - "foo: true", - "foo: false", - "foo: false\n---\nfoo: true\n", - "", - }, - { - "prepend multiple documents", - "one: 1\n---\ntwo: 2\n---\none: 3", - "four: 4\n---\none: 1.1", - "four: 4\n---\none: 1.1\n---\none: 1\n---\ntwo: 2\n---\none: 3\n", - "", - }, - { - "invalid yaml in base", - "blablabla", - "", - "", - "config.yaml: yaml: unmarshal errors:", - }, - { - "invalid yaml in base (detailed message)", - "blablabla", - "", - "", - "cannot unmarshal !!str `blablabla`", - }, - { - "invalid yaml in patch", - "", - "blablabla", - "", - "config.yaml.local: yaml: unmarshal errors:", - }, - { - "invalid yaml in patch (detailed message)", - "", - "blablabla", - "", - "cannot unmarshal !!str `blablabla`", - }, - } - - for _, tc := range tests { - tc := tc - t.Run(tc.name, func(t *testing.T) { - t.Parallel() - dirPath, err := os.MkdirTemp("", "yamlpatch") - require.NoError(t, err) - - defer os.RemoveAll(dirPath) - - configPath := filepath.Join(dirPath, "config.yaml") - patchPath := filepath.Join(dirPath, "config.yaml.local") - - err = os.WriteFile(configPath, []byte(tc.base), 0o600) - require.NoError(t, err) - - err = os.WriteFile(patchPath, []byte(tc.patch), 0o600) - require.NoError(t, err) - - patcher := yamlpatch.NewPatcher(configPath, ".local") - patchedBytes, err := patcher.PrependedPatchContent() - requireErrorContains(t, err, tc.expectedErr) - // YAMLeq does not handle multiple documents - require.Equal(t, tc.expected, string(patchedBytes)) - }) - } -} diff --git a/pkg/yamlpatch/testdata/base.yaml b/pkg/yamlpatch/testdata/base.yaml deleted file mode 100644 index 4ac551ad5..000000000 --- a/pkg/yamlpatch/testdata/base.yaml +++ /dev/null @@ -1,13 +0,0 @@ -fun: - - maserati - - porsche - -practical: - toyota: camry - honda: accord - -occupants: - honda: - driver: jane - backseat: - - nate diff --git a/pkg/yamlpatch/testdata/expect.yaml b/pkg/yamlpatch/testdata/expect.yaml deleted file mode 100644 index c19091563..000000000 --- a/pkg/yamlpatch/testdata/expect.yaml +++ /dev/null @@ -1,13 +0,0 @@ -fun: -- lamborghini -- porsche -occupants: - honda: - backseat: - - nora - driver: jane - passenger: arthur -practical: - honda: civic - nissan: altima - toyota: camry diff --git a/pkg/yamlpatch/testdata/production.yaml b/pkg/yamlpatch/testdata/production.yaml deleted file mode 100644 index 7dab2aeee..000000000 --- a/pkg/yamlpatch/testdata/production.yaml +++ /dev/null @@ -1,13 +0,0 @@ -fun: - - lamborghini - - porsche - -practical: - honda: civic - nissan: altima - -occupants: - honda: - passenger: arthur - backseat: - - nora diff --git a/plugins/notifications/dummy/Makefile b/plugins/notifications/dummy/Makefile index e47c2fab0..612ec6c86 100644 --- a/plugins/notifications/dummy/Makefile +++ b/plugins/notifications/dummy/Makefile @@ -4,17 +4,20 @@ ifeq ($(OS), Windows_NT) EXT = .exe endif -# Go parameters +PLUGIN = dummy +BINARY_NAME = notification-$(PLUGIN)$(EXT) + GOCMD = go GOBUILD = $(GOCMD) build -GOCLEAN = $(GOCMD) clean -GOTEST = $(GOCMD) test -GOGET = $(GOCMD) get - -BINARY_NAME = notification-dummy$(EXT) build: clean $(GOBUILD) $(LD_OPTS) $(BUILD_VENDOR_FLAGS) -o $(BINARY_NAME) +.PHONY: clean clean: @$(RM) $(BINARY_NAME) $(WIN_IGNORE_ERR) + +.PHONY: vendor +vendor: + @echo "vendoring $(PLUGIN) plugin..." + @$(GOCMD) mod vendor diff --git a/plugins/notifications/email/Makefile b/plugins/notifications/email/Makefile index e80b4a70c..a386625ac 100644 --- a/plugins/notifications/email/Makefile +++ b/plugins/notifications/email/Makefile @@ -4,17 +4,20 @@ ifeq ($(OS), Windows_NT) EXT = .exe endif -# Go parameters +PLUGIN = email +BINARY_NAME = notification-$(PLUGIN)$(EXT) + GOCMD = go GOBUILD = $(GOCMD) build -GOCLEAN = $(GOCMD) clean -GOTEST = $(GOCMD) test -GOGET = $(GOCMD) get - -BINARY_NAME = notification-email$(EXT) build: clean $(GOBUILD) $(LD_OPTS) $(BUILD_VENDOR_FLAGS) -o $(BINARY_NAME) +.PHONY: clean clean: @$(RM) $(BINARY_NAME) $(WIN_IGNORE_ERR) + +.PHONY: vendor +vendor: + @echo "vendoring $(PLUGIN) plugin..." + @$(GOCMD) mod vendor diff --git a/plugins/notifications/http/Makefile b/plugins/notifications/http/Makefile index 8d0592fd3..44ee8c58f 100644 --- a/plugins/notifications/http/Makefile +++ b/plugins/notifications/http/Makefile @@ -4,17 +4,20 @@ ifeq ($(OS), Windows_NT) EXT = .exe endif -# Go parameters +PLUGIN=http +BINARY_NAME = notification-$(PLUGIN)$(EXT) + GOCMD = go GOBUILD = $(GOCMD) build -GOCLEAN = $(GOCMD) clean -GOTEST = $(GOCMD) test -GOGET = $(GOCMD) get - -BINARY_NAME = notification-http$(EXT) build: clean $(GOBUILD) $(LD_OPTS) $(BUILD_VENDOR_FLAGS) -o $(BINARY_NAME) +.PHONY: clean clean: @$(RM) $(BINARY_NAME) $(WIN_IGNORE_ERR) + +.PHONY: vendor +vendor: + @echo "vendoring $(PLUGIN) plugin..." + @$(GOCMD) mod vendor diff --git a/plugins/notifications/slack/Makefile b/plugins/notifications/slack/Makefile index 406b6872a..e950eba92 100644 --- a/plugins/notifications/slack/Makefile +++ b/plugins/notifications/slack/Makefile @@ -4,17 +4,20 @@ ifeq ($(OS), Windows_NT) EXT = .exe endif -# Go parameters +PLUGIN=slack +BINARY_NAME = notification-$(PLUGIN)$(EXT) + GOCMD = go GOBUILD = $(GOCMD) build -GOCLEAN = $(GOCMD) clean -GOTEST = $(GOCMD) test -GOGET = $(GOCMD) get - -BINARY_NAME = notification-slack$(EXT) build: clean $(GOBUILD) $(LD_OPTS) $(BUILD_VENDOR_FLAGS) -o $(BINARY_NAME) +.PHONY: clean clean: @$(RM) $(BINARY_NAME) $(WIN_IGNORE_ERR) + +.PHONY: vendor +vendor: + @echo "vendoring $(PLUGIN) plugin..." + @$(GOCMD) mod vendor diff --git a/plugins/notifications/splunk/Makefile b/plugins/notifications/splunk/Makefile index 6a24b5c32..a49c87bd6 100644 --- a/plugins/notifications/splunk/Makefile +++ b/plugins/notifications/splunk/Makefile @@ -4,17 +4,20 @@ ifeq ($(OS), Windows_NT) EXT = .exe endif -# Go parameters +PLUGIN=splunk +BINARY_NAME = notification-$(PLUGIN)$(EXT) + GOCMD = go GOBUILD = $(GOCMD) build -GOCLEAN = $(GOCMD) clean -GOTEST = $(GOCMD) test -GOGET = $(GOCMD) get - -BINARY_NAME = notification-splunk$(EXT) build: clean $(GOBUILD) $(LD_OPTS) $(BUILD_VENDOR_FLAGS) -o $(BINARY_NAME) +.PHONY: clean clean: @$(RM) $(BINARY_NAME) $(WIN_IGNORE_ERR) + +.PHONY: vendor +vendor: + @echo "vendoring $(PLUGIN) plugin..." + @$(GOCMD) mod vendor diff --git a/test/ansible/vars/go.yml b/test/ansible/vars/go.yml index 97a5457c3..683b4fbbe 100644 --- a/test/ansible/vars/go.yml +++ b/test/ansible/vars/go.yml @@ -1,5 +1,5 @@ # vim: set ft=yaml.ansible: --- -golang_version: "1.20.3" +golang_version: "1.20.4" golang_install_dir: "/opt/go/{{ golang_version }}" diff --git a/test/bats.mk b/test/bats.mk index 9d70a6eec..65bb4a286 100644 --- a/test/bats.mk +++ b/test/bats.mk @@ -24,7 +24,8 @@ DATA_DIR = $(LOCAL_DIR)/var/lib/crowdsec/data LOCAL_INIT_DIR = $(TEST_DIR)/local-init LOG_DIR = $(LOCAL_DIR)/var/log PID_DIR = $(LOCAL_DIR)/var/run -PLUGIN_DIR = $(LOCAL_DIR)/lib/crowdsec/plugins +# do not shadow $(PLUGINS_DIR) from the main Makefile +BATS_PLUGIN_DIR = $(LOCAL_DIR)/lib/crowdsec/plugins DB_BACKEND ?= sqlite CROWDSEC ?= $(BIN_DIR)/crowdsec @@ -43,7 +44,7 @@ export CONFIG_YAML="$(CONFIG_DIR)/config.yaml" export LOCAL_INIT_DIR="$(LOCAL_INIT_DIR)" export LOG_DIR="$(LOG_DIR)" export PID_DIR="$(PID_DIR)" -export PLUGIN_DIR="$(PLUGIN_DIR)" +export PLUGIN_DIR="$(BATS_PLUGIN_DIR)" export DB_BACKEND="$(DB_BACKEND)" export INIT_BACKEND="$(INIT_BACKEND)" export CONFIG_BACKEND="$(CONFIG_BACKEND)" @@ -66,10 +67,10 @@ bats-check-requirements: # Build and installs crowdsec in a local directory. Rebuilds if already exists. bats-build: bats-environment bats-check-requirements - @mkdir -p $(BIN_DIR) $(LOG_DIR) $(PID_DIR) $(PLUGIN_DIR) - @TEST_COVERAGE=$(TEST_COVERAGE) DEFAULT_CONFIGDIR=$(CONFIG_DIR) DEFAULT_DATADIR=$(DATA_DIR) $(MAKE) goversion crowdsec cscli plugins + @$(MKDIR) $(BIN_DIR) $(LOG_DIR) $(PID_DIR) $(BATS_PLUGIN_DIR) + @TEST_COVERAGE=$(TEST_COVERAGE) DEFAULT_CONFIGDIR=$(CONFIG_DIR) DEFAULT_DATADIR=$(DATA_DIR) $(MAKE) build @install -m 0755 cmd/crowdsec/crowdsec cmd/crowdsec-cli/cscli $(BIN_DIR)/ - @install -m 0755 plugins/notifications/*/notification-* $(PLUGIN_DIR)/ + @install -m 0755 plugins/notifications/*/notification-* $(BATS_PLUGIN_DIR)/ # Create a reusable package with initial configuration + data bats-fixture: @@ -82,6 +83,7 @@ bats-clean: @$(RM) $(LOCAL_INIT_DIR) $(WIN_IGNORE_ERR) @$(RM) $(TEST_DIR)/dyn-bats/*.bats $(WIN_IGNORE_ERR) @$(RM) test/.environment.sh $(WIN_IGNORE_ERR) + @$(RM) test/coverage/* $(WIN_IGNORE_ERR) # Run the test suite bats-test: bats-environment bats-check-requirements @@ -98,10 +100,7 @@ bats-lint: @shellcheck --version >/dev/null 2>&1 || (echo "ERROR: shellcheck is required."; exit 1) @shellcheck -x $(TEST_DIR)/bats/*.bats - bats-test-package: bats-environment $(TEST_DIR)/instance-data make $(TEST_DIR)/run-tests $(TEST_DIR)/bats $(TEST_DIR)/run-tests $(TEST_DIR)/dyn-bats - -.PHONY: bats-environment diff --git a/test/bats/01_cscli.bats b/test/bats/01_cscli.bats index e96a9bc69..a01d936b7 100644 --- a/test/bats/01_cscli.bats +++ b/test/bats/01_cscli.bats @@ -110,6 +110,16 @@ teardown() { assert_output '["http://127.0.0.1:8080/","githubciXXXXXXXXXXXXXXXXXXXXXXXX"]' } +@test "cscli config show-yaml" { + rune -0 cscli config show-yaml + rune -0 yq .common.log_level <(output) + assert_output "info" + echo 'common: {"log_level": "debug"}' >> "${CONFIG_YAML}.local" + rune -0 cscli config show-yaml + rune -0 yq .common.log_level <(output) + assert_output "debug" +} + @test "cscli config backup / restore" { # test that we need a valid path # disabled because in CI, the empty string is not passed as a parameter @@ -218,7 +228,6 @@ teardown() { assert_output --partial "Route" assert_output --partial '/v1/watchers/login' assert_output --partial "Local Api Metrics:" - } @test "'cscli completion' with or without configuration file" { diff --git a/test/bats/02_nolapi.bats b/test/bats/02_nolapi.bats index 2b8e1ef70..c457900ee 100644 --- a/test/bats/02_nolapi.bats +++ b/test/bats/02_nolapi.bats @@ -24,35 +24,33 @@ teardown() { #---------- @test "test without -no-api flag" { - run -124 --separate-stderr timeout 2s "${CROWDSEC}" + rune -124 timeout 2s "${CROWDSEC}" # from `man timeout`: If the command times out, and --preserve-status is not set, then exit with status 124. } @test "crowdsec should not run without LAPI (-no-api flag)" { # really needs 4 secs on slow boxes - run -1 --separate-stderr timeout 4s "${CROWDSEC}" -no-api + rune -1 timeout 4s "${CROWDSEC}" -no-api } @test "crowdsec should not run without LAPI (no api.server in configuration file)" { config_disable_lapi config_log_stderr # really needs 4 secs on slow boxes - run -1 --separate-stderr timeout 4s "${CROWDSEC}" - + rune -1 timeout 4s "${CROWDSEC}" assert_stderr --partial "crowdsec local API is disabled" } @test "capi status shouldn't be ok without api.server" { config_disable_lapi - run -1 --separate-stderr cscli capi status - + rune -1 cscli capi status assert_stderr --partial "crowdsec local API is disabled" assert_stderr --partial "There is no configuration on 'api.server:'" } @test "cscli config show -o human" { config_disable_lapi - run -0 cscli config show -o human + rune -0 cscli config show -o human assert_output --partial "Global:" assert_output --partial "Crowdsec:" assert_output --partial "cscli:" @@ -62,9 +60,9 @@ teardown() { @test "cscli config backup" { config_disable_lapi backupdir=$(TMPDIR="${BATS_TEST_TMPDIR}" mktemp -u) - run -0 cscli config backup "${backupdir}" - assert_output --partial "Starting configuration backup" - run -1 --separate-stderr cscli config backup "${backupdir}" + rune -0 cscli config backup "${backupdir}" + assert_stderr --partial "Starting configuration backup" + rune -1 cscli config backup "${backupdir}" rm -rf -- "${backupdir:?}" assert_stderr --partial "failed to backup config" @@ -74,7 +72,7 @@ teardown() { @test "lapi status shouldn't be ok without api.server" { config_disable_lapi ./instance-crowdsec start || true - run -1 --separate-stderr cscli machines list + rune -1 cscli machines list assert_stderr --partial "local API is disabled, please run this command on the local API machine" } @@ -82,7 +80,7 @@ teardown() { skip 'need to trigger metrics with a live parse' config_disable_lapi ./instance-crowdsec start - run -0 --separate-stderr cscli metrics + rune -0 cscli metrics assert_output --partial "ROUTE" assert_output --partial "/v1/watchers/login" diff --git a/test/bats/03_noagent.bats b/test/bats/03_noagent.bats index 211f62947..12c66d2c0 100644 --- a/test/bats/03_noagent.bats +++ b/test/bats/03_noagent.bats @@ -23,25 +23,25 @@ teardown() { #---------- @test "with agent: test without -no-cs flag" { - run -124 timeout 2s "${CROWDSEC}" + rune -124 timeout 2s "${CROWDSEC}" # from `man timeout`: If the command times out, and --preserve-status is not set, then exit with status 124. } @test "no agent: crowdsec LAPI should run (-no-cs flag)" { - run -124 timeout 2s "${CROWDSEC}" -no-cs + rune -124 timeout 2s "${CROWDSEC}" -no-cs } @test "no agent: crowdsec LAPI should run (no crowdsec_service in configuration file)" { config_disable_agent config_log_stderr - run -124 --separate-stderr timeout 2s "${CROWDSEC}" + rune -124 timeout 2s "${CROWDSEC}" assert_stderr --partial "crowdsec agent is disabled" } @test "no agent: cscli config show" { config_disable_agent - run -0 --separate-stderr cscli config show -o human + rune -0 cscli config show -o human assert_output --partial "Global:" assert_output --partial "cscli:" assert_output --partial "Local API Server:" @@ -52,9 +52,9 @@ teardown() { @test "no agent: cscli config backup" { config_disable_agent backupdir=$(TMPDIR="${BATS_TEST_TMPDIR}" mktemp -u) - run -0 cscli config backup "${backupdir}" - assert_output --partial "Starting configuration backup" - run -1 --separate-stderr cscli config backup "${backupdir}" + rune -0 cscli config backup "${backupdir}" + assert_stderr --partial "Starting configuration backup" + rune -1 cscli config backup "${backupdir}" assert_stderr --partial "failed to backup config" assert_stderr --partial "file exists" @@ -64,13 +64,13 @@ teardown() { @test "no agent: lapi status should be ok" { config_disable_agent ./instance-crowdsec start - run -0 --separate-stderr cscli lapi status + rune -0 cscli lapi status assert_stderr --partial "You can successfully interact with Local API (LAPI)" } @test "cscli metrics" { config_disable_agent ./instance-crowdsec start - run -0 cscli lapi status - run -0 cscli metrics + rune -0 cscli lapi status + rune -0 cscli metrics } diff --git a/test/bats/04_capi.bats b/test/bats/04_capi.bats index cc74abd91..7015f2c5d 100644 --- a/test/bats/04_capi.bats +++ b/test/bats/04_capi.bats @@ -21,12 +21,12 @@ setup() { @test "cscli capi status" { config_enable_capi - run -0 cscli capi register --schmilblick githubciXXXXXXXXXXXXXXXXXXXXXXXX - run -0 cscli capi status - assert_output --partial "Loaded credentials from" - assert_output --partial "Trying to authenticate with username" - assert_output --partial " on https://api.crowdsec.net/" - assert_output --partial "You can successfully interact with Central API (CAPI)" + rune -0 cscli capi register --schmilblick githubciXXXXXXXXXXXXXXXXXXXXXXXX + rune -0 cscli capi status + assert_stderr --partial "Loaded credentials from" + assert_stderr --partial "Trying to authenticate with username" + assert_stderr --partial " on https://api.crowdsec.net/" + assert_stderr --partial "You can successfully interact with Central API (CAPI)" } @test "cscli alerts list: receive a community pull when capi is enabled" { @@ -37,17 +37,14 @@ setup() { [[ $(cscli alerts list -a -o json 2>/dev/null || cscli alerts list -o json) != "null" ]] && break done - run --separate-stderr cscli alerts list -a -o json - if [[ "${status}" -ne 0 ]]; then - run --separate-stderr cscli alerts list -o json - fi - run -0 jq -r '. | length' <(output) + rune -0 cscli alerts list -a -o json + rune -0 jq -r '. | length' <(output) refute_output 0 } @test "we have exactly one machine, localhost" { - run -0 --separate-stderr cscli machines list -o json - run -0 jq -c '[. | length, .[0].machineId[0:32], .[0].isValidated, .[0].ipAddress]' <(output) + rune -0 cscli machines list -o json + rune -0 jq -c '[. | length, .[0].machineId[0:32], .[0].isValidated, .[0].ipAddress]' <(output) assert_output '[1,"githubciXXXXXXXXXXXXXXXXXXXXXXXX",true,"127.0.0.1"]' } @@ -55,13 +52,13 @@ setup() { ./instance-crowdsec stop config_disable_agent ./instance-crowdsec start - run -0 --separate-stderr cscli capi status + rune -0 cscli capi status assert_stderr --partial "You can successfully interact with Central API (CAPI)" } @test "cscli capi status: fails without credentials" { ONLINE_API_CREDENTIALS_YAML="$(config_get '.api.server.online_client.credentials_path')" rm "${ONLINE_API_CREDENTIALS_YAML}" - run -1 --separate-stderr cscli capi status + rune -1 cscli capi status assert_stderr --partial "Local API is disabled, please run this command on the local API machine: loading online client credentials: failed to read api server credentials configuration file '${ONLINE_API_CREDENTIALS_YAML}': open ${ONLINE_API_CREDENTIALS_YAML}: no such file or directory" } diff --git a/test/bats/04_nocapi.bats b/test/bats/04_nocapi.bats index 533f1e84a..3e282f1fe 100644 --- a/test/bats/04_nocapi.bats +++ b/test/bats/04_nocapi.bats @@ -25,14 +25,14 @@ teardown() { @test "without capi: crowdsec LAPI should run without capi (-no-capi flag)" { config_set '.common.log_media="stdout"' - run -124 --separate-stderr timeout 1s "${CROWDSEC}" -no-capi + rune -124 timeout 1s "${CROWDSEC}" -no-capi assert_stderr --partial "Communication with CrowdSec Central API disabled from args" } @test "without capi: crowdsec LAPI should still work" { config_disable_capi config_set '.common.log_media="stdout"' - run -124 --separate-stderr timeout 1s "${CROWDSEC}" + rune -124 timeout 1s "${CROWDSEC}" # from `man timeout`: If the command times out, and --preserve-status is not set, then exit with status 124. assert_stderr --partial "push and pull to Central API disabled" } @@ -40,13 +40,13 @@ teardown() { @test "without capi: cscli capi status -> fail" { config_disable_capi ./instance-crowdsec start - run -1 --separate-stderr cscli capi status + rune -1 cscli capi status assert_stderr --partial "no configuration for Central API in " } @test "no capi: cscli config show" { config_disable_capi - run -0 --separate-stderr cscli config show -o human + rune -0 cscli config show -o human assert_output --partial "Global:" assert_output --partial "cscli:" assert_output --partial "Crowdsec:" @@ -56,9 +56,9 @@ teardown() { @test "no agent: cscli config backup" { config_disable_capi backupdir=$(TMPDIR="${BATS_TEST_TMPDIR}" mktemp -u) - run -0 cscli config backup "${backupdir}" - assert_output --partial "Starting configuration backup" - run -1 --separate-stderr cscli config backup "${backupdir}" + rune -0 cscli config backup "${backupdir}" + assert_stderr --partial "Starting configuration backup" + rune -1 cscli config backup "${backupdir}" assert_stderr --partial "failed to backup config" assert_stderr --partial "file exists" rm -rf -- "${backupdir:?}" @@ -67,15 +67,15 @@ teardown() { @test "without capi: cscli lapi status -> success" { config_disable_capi ./instance-crowdsec start - run -0 --separate-stderr cscli lapi status + rune -0 cscli lapi status assert_stderr --partial "You can successfully interact with Local API (LAPI)" } @test "cscli metrics" { config_disable_capi ./instance-crowdsec start - run -0 cscli lapi status - run -0 --separate-stderr cscli metrics + rune -0 cscli lapi status + rune -0 cscli metrics assert_output --partial "Route" assert_output --partial '/v1/watchers/login' assert_output --partial "Local Api Metrics:" diff --git a/test/bats/05_config_yaml_local.bats b/test/bats/05_config_yaml_local.bats index d974ff729..3cc20819b 100644 --- a/test/bats/05_config_yaml_local.bats +++ b/test/bats/05_config_yaml_local.bats @@ -20,7 +20,7 @@ teardown_file() { setup() { load "../lib/setup.sh" ./instance-data load - run -0 config_get '.api.client.credentials_path' + rune -0 config_get '.api.client.credentials_path' LOCAL_API_CREDENTIALS="${output}" export LOCAL_API_CREDENTIALS } @@ -33,82 +33,82 @@ teardown() { @test "config.yaml.local - cscli (log_level)" { config_set '.common.log_level="warning"' - run -0 --separate-stderr cscli config show --key Config.Common.LogLevel + rune -0 cscli config show --key Config.Common.LogLevel assert_output "warning" echo "{'common':{'log_level':'debug'}}" >"${CONFIG_YAML}.local" - run -0 --separate-stderr cscli config show --key Config.Common.LogLevel + rune -0 cscli config show --key Config.Common.LogLevel assert_output "debug" } @test "config.yaml.local - cscli (log_level - with envvar)" { config_set '.common.log_level="warning"' - run -0 --separate-stderr cscli config show --key Config.Common.LogLevel + rune -0 cscli config show --key Config.Common.LogLevel assert_output "warning" export CROWDSEC_LOG_LEVEL=debug echo "{'common':{'log_level':'${CROWDSEC_LOG_LEVEL}'}}" >"${CONFIG_YAML}.local" - run -0 --separate-stderr cscli config show --key Config.Common.LogLevel + rune -0 cscli config show --key Config.Common.LogLevel assert_output "debug" } @test "config.yaml.local - crowdsec (listen_url)" { # disable the agent or we'll need to patch api client credentials too - run -0 config_disable_agent + rune -0 config_disable_agent ./instance-crowdsec start - run -0 ./bin/wait-for-port -q 8080 + rune -0 ./bin/wait-for-port -q 8080 ./instance-crowdsec stop - run -1 ./bin/wait-for-port -q 8080 + rune -1 ./bin/wait-for-port -q 8080 echo "{'api':{'server':{'listen_uri':127.0.0.1:8083}}}" >"${CONFIG_YAML}.local" ./instance-crowdsec start - run -0 ./bin/wait-for-port -q 8083 - run -1 ./bin/wait-for-port -q 8080 + rune -0 ./bin/wait-for-port -q 8083 + rune -1 ./bin/wait-for-port -q 8080 ./instance-crowdsec stop rm -f "${CONFIG_YAML}.local" ./instance-crowdsec start - run -1 ./bin/wait-for-port -q 8083 - run -0 ./bin/wait-for-port -q 8080 + rune -1 ./bin/wait-for-port -q 8083 + rune -0 ./bin/wait-for-port -q 8080 } @test "local_api_credentials.yaml.local" { - run -0 config_disable_agent + rune -0 config_disable_agent echo "{'api':{'server':{'listen_uri':127.0.0.1:8083}}}" >"${CONFIG_YAML}.local" ./instance-crowdsec start - run -0 ./bin/wait-for-port -q 8083 + rune -0 ./bin/wait-for-port -q 8083 - run -1 cscli decisions list + rune -1 cscli decisions list echo "{'url':'http://127.0.0.1:8083'}" >"${LOCAL_API_CREDENTIALS}.local" - run -0 cscli decisions list + rune -0 cscli decisions list } @test "simulation.yaml.local" { - run -0 config_get '.config_paths.simulation_path' + rune -0 config_get '.config_paths.simulation_path' refute_output null SIMULATION="${output}" echo "simulation: off" >"${SIMULATION}" - run -0 cscli simulation status -o human - assert_output --partial "global simulation: disabled" + rune -0 cscli simulation status -o human + assert_stderr --partial "global simulation: disabled" echo "simulation: on" >"${SIMULATION}" - run -0 cscli simulation status -o human - assert_output --partial "global simulation: enabled" + rune -0 cscli simulation status -o human + assert_stderr --partial "global simulation: enabled" echo "simulation: off" >"${SIMULATION}.local" - run -0 cscli simulation status -o human - assert_output --partial "global simulation: disabled" + rune -0 cscli simulation status -o human + assert_stderr --partial "global simulation: disabled" rm -f "${SIMULATION}.local" - run -0 cscli simulation status -o human - assert_output --partial "global simulation: enabled" + rune -0 cscli simulation status -o human + assert_stderr --partial "global simulation: enabled" } @test "profiles.yaml.local" { - run -0 --separate-stderr config_get '.api.server.profiles_path' + rune -0 config_get '.api.server.profiles_path' refute_output null PROFILES="${output}" @@ -135,8 +135,8 @@ teardown() { # wait more than required for ((i=0;i<30;i++)); do sleep .5 - run -0 --separate-stderr cscli decisions list -o json - run -0 jq --exit-status '.[].decisions[0] | [.value,.type] == ["1.1.1.172","captcha"]' <(output) && break + rune -0 cscli decisions list -o json + rune -0 jq --exit-status '.[].decisions[0] | [.value,.type] == ["1.1.1.172","captcha"]' <(output) && break done rm -f -- "${tmpfile}" [[ "${status}" -eq 0 ]] || fail "captcha not triggered" diff --git a/test/bats/07_setup.bats b/test/bats/07_setup.bats index 714b730c0..c63f07024 100644 --- a/test/bats/07_setup.bats +++ b/test/bats/07_setup.bats @@ -67,7 +67,7 @@ teardown() { assert_line --partial "--force-os-version string override OS.RawVersion (of OS or Linux distribution)" assert_line --partial "--skip-service strings ignore a service, don't recommend hub/datasources (can be repeated)" - rune -1 --separate-stderr cscli setup detect --detect-config /path/does/not/exist + rune -1 cscli setup detect --detect-config /path/does/not/exist assert_stderr --partial "detecting services: while reading file: open /path/does/not/exist: no such file or directory" # rm -f "${HUB_DIR}/detect.yaml" @@ -91,27 +91,27 @@ teardown() { foobarbaz: EOT - rune -0 --separate-stderr cscli setup detect --detect-config "$tempfile" + rune -0 cscli setup detect --detect-config "$tempfile" assert_json '{setup:[{detected_service:"foobarbaz"},{detected_service:"linux",install:{collections:["crowdsecurity/linux"]}}]}' - rune -0 --separate-stderr cscli setup detect --detect-config "$tempfile" --skip-service linux + rune -0 cscli setup detect --detect-config "$tempfile" --skip-service linux assert_json '{setup:[{detected_service:"foobarbaz"}]}' } @test "cscli setup detect --force-os-*" { - rune -0 --separate-stderr cscli setup detect --force-os-family linux --detect-config "${TESTDATA}/detect.yaml" + rune -0 cscli setup detect --force-os-family linux --detect-config "${TESTDATA}/detect.yaml" rune -0 jq -cS '.setup[] | select(.detected_service=="linux")' <(output) assert_json '{detected_service:"linux",install:{collections:["crowdsecurity/linux"]},datasource:{source:"file",labels:{type:"syslog"},filenames:["/var/log/syslog","/var/log/kern.log","/var/log/messages"]}}' - rune -0 --separate-stderr cscli setup detect --force-os-family freebsd --detect-config "${TESTDATA}/detect.yaml" + rune -0 cscli setup detect --force-os-family freebsd --detect-config "${TESTDATA}/detect.yaml" rune -0 jq -cS '.setup[] | select(.detected_service=="freebsd")' <(output) assert_json '{detected_service:"freebsd",install:{collections:["crowdsecurity/freebsd"]}}' - rune -0 --separate-stderr cscli setup detect --force-os-family windows --detect-config "${TESTDATA}/detect.yaml" + rune -0 cscli setup detect --force-os-family windows --detect-config "${TESTDATA}/detect.yaml" rune -0 jq -cS '.setup[] | select(.detected_service=="windows")' <(output) assert_json '{detected_service:"windows",install:{collections:["crowdsecurity/windows"]}}' - rune -0 --separate-stderr cscli setup detect --force-os-family darwin --detect-config "${TESTDATA}/detect.yaml" + rune -0 cscli setup detect --force-os-family darwin --detect-config "${TESTDATA}/detect.yaml" # XXX do we want do disallow unknown family? # assert_stderr --partial "detecting services: OS 'darwin' not supported" @@ -129,7 +129,7 @@ teardown() { apache2: EOT - rune -0 --separate-stderr cscli setup detect --list-supported-services --detect-config "$tempfile" + rune -0 cscli setup detect --list-supported-services --detect-config "$tempfile" # the service list is sorted assert_output - <<-EOT apache2 @@ -141,7 +141,7 @@ teardown() { thisisajoke EOT - rune -1 --separate-stderr cscli setup detect --list-supported-services --detect-config "$tempfile" + rune -1 cscli setup detect --list-supported-services --detect-config "$tempfile" assert_stderr --partial "while parsing ${tempfile}: yaml: unmarshal errors:" rm -f "$tempfile" @@ -201,7 +201,7 @@ update-notifier-motd.timer enabled enabled 20 unit files listed.' mock_set_status "$mock" 1 2 - rune -0 --separate-stderr cscli setup detect + rune -0 cscli setup detect rune -0 jq -c '.setup' <(output) # If a call to UnitFoundwas part of the expression and it returned true, @@ -254,7 +254,7 @@ update-notifier-motd.timer enabled enabled mock_set_output "$mock" "" mock_set_status "$mock" 1 2 - rune -0 --separate-stderr cscli setup detect --snub-systemd + rune -0 cscli setup detect --snub-systemd # setup must not be 'null', but an empty list assert_json '{setup:[]}' @@ -291,20 +291,20 @@ update-notifier-motd.timer enabled enabled type: apache3 EOT - rune -0 --separate-stderr cscli setup detect --force-unit force-apache2 + rune -0 cscli setup detect --force-unit force-apache2 rune -0 jq -cS '.setup' <(output) assert_json '[{datasource:{source:"file",filename:"dummy.log",labels:{"type":"apache2"}},detected_service:"apache2"}]' - rune -0 --separate-stderr cscli setup detect --force-unit force-apache2,force-apache3 + rune -0 cscli setup detect --force-unit force-apache2,force-apache3 rune -0 jq -cS '.setup' <(output) assert_json '[{datasource:{source:"file",filename:"dummy.log",labels:{type:"apache2"}},detected_service:"apache2"},{datasource:{source:"file",filename:"dummy.log",labels:{"type":"apache3"}},detected_service:"apache3"}]' # force-unit can be specified multiple times, the order does not matter - rune -0 --separate-stderr cscli setup detect --force-unit force-apache3 --force-unit force-apache2 + rune -0 cscli setup detect --force-unit force-apache3 --force-unit force-apache2 rune -0 jq -cS '.setup' <(output) assert_json '[{datasource:{source:"file",filename:"dummy.log",labels:{type:"apache2"}},detected_service:"apache2"},{datasource:{source:"file",filename:"dummy.log",labels:{type:"apache3"}},detected_service:"apache3"}]' - rune -1 --separate-stderr cscli setup detect --force-unit mock-doesnotexist + rune -1 cscli setup detect --force-unit mock-doesnotexist assert_stderr --partial "detecting services: unit(s) forced but not supported: [mock-doesnotexist]" } @@ -324,7 +324,7 @@ update-notifier-motd.timer enabled enabled - ProcessRunning("this-does-not-exist") EOT - rune -0 --separate-stderr cscli setup detect + rune -0 cscli setup detect rune -0 jq -cS '.setup' <(output) assert_json '[{detected_service:"apache2"}]' } @@ -341,7 +341,7 @@ update-notifier-motd.timer enabled enabled - ProcessRunning("this-does-not-exist") EOT - rune -0 --separate-stderr cscli setup detect --force-process force-apache2 + rune -0 cscli setup detect --force-process force-apache2 rune -0 jq -cS '.setup' <(output) assert_json '[{detected_service:"apache2"}]' } @@ -360,11 +360,11 @@ update-notifier-motd.timer enabled enabled type: apache2 EOT - rune -0 --separate-stderr cscli setup detect --force-unit force-apache2 + rune -0 cscli setup detect --force-unit force-apache2 rune -0 jq -cS '.setup' <(output) assert_json '[{datasource:{source:"file",filename:"dummy.log",labels:{type:"apache2"}},detected_service:"apache2"}]' - rune -0 --separate-stderr cscli setup detect --force-unit force-apache2 --yaml + rune -0 cscli setup detect --force-unit force-apache2 --yaml assert_output - <<-EOT setup: - detected_service: apache2 @@ -417,7 +417,7 @@ update-notifier-motd.timer enabled enabled always: EOT - rune -0 --separate-stderr cscli setup detect + rune -0 cscli setup detect assert_json '{setup:[{detected_service:"always"}]}' setup=$output rune -0 cscli setup datasources /dev/stdin <<<"$setup" @@ -448,7 +448,7 @@ update-notifier-motd.timer enabled enabled - crowdsecurity/apache2 EOT - rune -0 --separate-stderr cscli setup detect --force-process force-apache2,force-foobar + rune -0 cscli setup detect --force-process force-apache2,force-foobar rune -0 jq -Sc '.setup | sort' <(output) assert_json '[{install:{collections:["crowdsecurity/apache2"]},detected_service:"apache2"},{install:{collections:["crowdsecurity/foobar"]},detected_service:"foobar"}]' } @@ -469,7 +469,7 @@ update-notifier-motd.timer enabled enabled - /var/log/*http*/*.log EOT - rune -0 --separate-stderr cscli setup detect --force-process force-foobar + rune -0 cscli setup detect --force-process force-foobar rune -0 yq -op '.setup | sort_keys(..)' <(output) assert_output - <<-EOT 0.datasource.filenames.0 = /var/log/apache2/*.log @@ -479,7 +479,7 @@ update-notifier-motd.timer enabled enabled 0.detected_service = foobar EOT - rune -1 --separate-stderr cscli setup detect --force-process mock-doesnotexist + rune -1 cscli setup detect --force-process mock-doesnotexist assert_stderr --partial "detecting services: process(es) forced but not supported: [mock-doesnotexist]" } @@ -493,7 +493,7 @@ update-notifier-motd.timer enabled enabled type: something EOT - rune -1 --separate-stderr cscli setup detect + rune -1 cscli setup detect assert_stderr --partial "detecting services: invalid datasource for foobar: source is empty" # more datasource-specific tests are in detect_test.go @@ -501,38 +501,38 @@ update-notifier-motd.timer enabled enabled @test "cscli setup install-hub (dry run)" { # it's not installed - rune -0 --separate-stderr cscli collections list -o json + rune -0 cscli collections list -o json rune -0 jq -r '.collections[].name' <(output) refute_line "crowdsecurity/apache2" # we install it - rune -0 --separate-stderr cscli setup install-hub /dev/stdin --dry-run <<< '{"setup":[{"install":{"collections":["crowdsecurity/apache2"]}}]}' + rune -0 cscli setup install-hub /dev/stdin --dry-run <<< '{"setup":[{"install":{"collections":["crowdsecurity/apache2"]}}]}' assert_output 'dry-run: would install collection crowdsecurity/apache2' # still not installed - rune -0 --separate-stderr cscli collections list -o json + rune -0 cscli collections list -o json rune -0 jq -r '.collections[].name' <(output) refute_line "crowdsecurity/apache2" } @test "cscli setup install-hub (dry run: install multiple collections)" { # it's not installed - rune -0 --separate-stderr cscli collections list -o json + rune -0 cscli collections list -o json rune -0 jq -r '.collections[].name' <(output) refute_line "crowdsecurity/apache2" # we install it - rune -0 --separate-stderr cscli setup install-hub /dev/stdin --dry-run <<< '{"setup":[{"install":{"collections":["crowdsecurity/apache2"]}}]}' + rune -0 cscli setup install-hub /dev/stdin --dry-run <<< '{"setup":[{"install":{"collections":["crowdsecurity/apache2"]}}]}' assert_output 'dry-run: would install collection crowdsecurity/apache2' # still not installed - rune -0 --separate-stderr cscli collections list -o json + rune -0 cscli collections list -o json rune -0 jq -r '.collections[].name' <(output) refute_line "crowdsecurity/apache2" } @test "cscli setup install-hub (dry run: install multiple collections, parsers, scenarios, postoverflows)" { - rune -0 --separate-stderr cscli setup install-hub /dev/stdin --dry-run <<< '{"setup":[{"install":{"collections":["crowdsecurity/foo","johndoe/bar"],"parsers":["crowdsecurity/fooparser","johndoe/barparser"],"scenarios":["crowdsecurity/fooscenario","johndoe/barscenario"],"postoverflows":["crowdsecurity/foopo","johndoe/barpo"]}}]}' + rune -0 cscli setup install-hub /dev/stdin --dry-run <<< '{"setup":[{"install":{"collections":["crowdsecurity/foo","johndoe/bar"],"parsers":["crowdsecurity/fooparser","johndoe/barparser"],"scenarios":["crowdsecurity/fooscenario","johndoe/barscenario"],"postoverflows":["crowdsecurity/foopo","johndoe/barpo"]}}]}' assert_line 'dry-run: would install collection crowdsecurity/foo' assert_line 'dry-run: would install collection johndoe/bar' assert_line 'dry-run: would install parser crowdsecurity/fooparser' @@ -544,12 +544,12 @@ update-notifier-motd.timer enabled enabled } @test "cscli setup datasources" { - rune -0 --separate-stderr cscli setup datasources --help + rune -0 cscli setup datasources --help assert_line --partial "--to-dir string write the configuration to a directory, in multiple files" # single item - rune -0 --separate-stderr cscli setup datasources /dev/stdin <<-EOT + rune -0 cscli setup datasources /dev/stdin <<-EOT setup: - datasource: source: file @@ -575,7 +575,7 @@ update-notifier-motd.timer enabled enabled # multiple items - rune -0 --separate-stderr cscli setup datasources /dev/stdin <<-EOT + rune -0 cscli setup datasources /dev/stdin <<-EOT setup: - datasource: labels: @@ -713,14 +713,14 @@ update-notifier-motd.timer enabled enabled EOT # the directory must exist - rune -1 --separate-stderr cscli setup datasources /dev/stdin --to-dir /path/does/not/exist <<< '{}' + rune -1 cscli setup datasources /dev/stdin --to-dir /path/does/not/exist <<< '{}' assert_stderr --partial "directory /path/does/not/exist does not exist" # of course it must be a directory touch "${acquisdir}/notadir" - rune -1 --separate-stderr cscli setup datasources /dev/stdin --to-dir "${acquisdir}/notadir" <<-EOT + rune -1 cscli setup datasources /dev/stdin --to-dir "${acquisdir}/notadir" <<-EOT setup: - detected_service: apache2 datasource: @@ -735,11 +735,11 @@ update-notifier-motd.timer enabled enabled @test "cscli setup datasources (disclaimer)" { disclaimer="This file was automatically generated" - rune -0 --separate-stderr cscli setup datasources /dev/stdin <<<"setup:" + rune -0 cscli setup datasources /dev/stdin <<<"setup:" rune -0 yq 'head_comment' <(output) assert_output --partial "$disclaimer" - rune -0 --separate-stderr cscli setup datasources /dev/stdin <<-EOT + rune -0 cscli setup datasources /dev/stdin <<-EOT setup: - detected_service: something datasource: @@ -768,10 +768,10 @@ update-notifier-motd.timer enabled enabled - "SYSLOG_IDENTIFIER=TheWiz" EOT - rune -0 --separate-stderr cscli setup detect --detect-config "$tempfile" --force-unit thewiz.service + rune -0 cscli setup detect --detect-config "$tempfile" --force-unit thewiz.service rune -0 jq -cS '.' <(output) assert_json '{setup:[{datasource:{source:"journalctl",journalctl_filter:["SYSLOG_IDENTIFIER=TheWiz"],labels:{type:"thewiz"}},detected_service:"thewiz"}]}' - rune -0 --separate-stderr cscli setup datasources <(output) + rune -0 cscli setup datasources <(output) rune -0 yq '. head_comment=""' <(output) assert_output - <<-EOT journalctl_filter: @@ -786,17 +786,17 @@ update-notifier-motd.timer enabled enabled @test "cscli setup validate" { # an empty file is not enough - rune -1 --separate-stderr cscli setup validate /dev/null + rune -1 cscli setup validate /dev/null assert_output "EOF" assert_stderr --partial "invalid setup file" # this is ok; install nothing - rune -0 --separate-stderr cscli setup validate /dev/stdin <<-EOT + rune -0 cscli setup validate /dev/stdin <<-EOT setup: EOT refute_output - rune -1 --separate-stderr cscli setup validate /dev/stdin <<-EOT + rune -1 cscli setup validate /dev/stdin <<-EOT se tup: EOT assert_output - <<-EOT @@ -806,7 +806,7 @@ update-notifier-motd.timer enabled enabled EOT assert_stderr --partial "invalid setup file" - rune -1 --separate-stderr cscli setup validate /dev/stdin <<-EOT + rune -1 cscli setup validate /dev/stdin <<-EOT setup: alsdk al; sdf EOT diff --git a/test/bats/08_metrics.bats b/test/bats/08_metrics.bats new file mode 100644 index 000000000..836e22048 --- /dev/null +++ b/test/bats/08_metrics.bats @@ -0,0 +1,60 @@ +#!/usr/bin/env bats +# vim: ft=bats:list:ts=8:sts=4:sw=4:et:ai:si: + +set -u + +setup_file() { + load "../lib/setup_file.sh" +} + +teardown_file() { + load "../lib/teardown_file.sh" +} + +setup() { + load "../lib/setup.sh" + ./instance-data load +} + +teardown() { + ./instance-crowdsec stop +} + +#---------- + +@test "cscli metrics (crowdsec not running)" { + rune -1 cscli metrics + # crowdsec is down + assert_stderr --partial "failed to fetch prometheus metrics" + assert_stderr --partial "connect: connection refused" +} + +@test "cscli metrics (bad configuration)" { + config_set '.prometheus.foo="bar"' + rune -1 cscli metrics + assert_stderr --partial "field foo not found in type csconfig.PrometheusCfg" +} + +@test "cscli metrics (.prometheus.enabled=false)" { + config_set '.prometheus.enabled=false' + rune -1 cscli metrics + assert_stderr --partial "prometheus is not enabled, can't show metrics" +} + +@test "cscli metrics (missing listen_addr)" { + config_set 'del(.prometheus.listen_addr)' + rune -1 cscli metrics + assert_stderr --partial "no prometheus url, please specify" +} + +@test "cscli metrics (missing listen_port)" { + config_set 'del(.prometheus.listen_addr)' + rune -1 cscli metrics + assert_stderr --partial "no prometheus url, please specify" +} + +@test "cscli metrics (missing prometheus section)" { + config_set 'del(.prometheus)' + rune -1 cscli metrics + assert_stderr --partial "prometheus section missing, can't show metrics" +} diff --git a/test/bats/10_bouncers.bats b/test/bats/10_bouncers.bats index b22c5d861..8db64ff5e 100644 --- a/test/bats/10_bouncers.bats +++ b/test/bats/10_bouncers.bats @@ -24,37 +24,35 @@ teardown() { #---------- @test "there are 0 bouncers" { - run -0 --separate-stderr cscli bouncers list -o json + rune -0 cscli bouncers list -o json assert_output "[]" } @test "we can add one bouncer, and delete it" { - run -0 cscli bouncers add ciTestBouncer + rune -0 cscli bouncers add ciTestBouncer assert_output --partial "Api key for 'ciTestBouncer':" - run -0 cscli bouncers delete ciTestBouncer - run -0 --separate-stderr cscli bouncers list -o json + rune -0 cscli bouncers delete ciTestBouncer + rune -0 cscli bouncers list -o json assert_output '[]' } @test "we can't add the same bouncer twice" { - run -0 cscli bouncers add ciTestBouncer - run -1 --separate-stderr cscli bouncers add ciTestBouncer -o json + rune -0 cscli bouncers add ciTestBouncer + rune -1 cscli bouncers add ciTestBouncer -o json # XXX temporary hack to filter out unwanted log lines that may appear before # log configuration (= not json) - run -0 jq -r '.level' <(stderr | grep "^{") - assert_output 'fatal' - run -0 jq -r '.msg' <(stderr | grep "^{") - assert_output "unable to create bouncer: bouncer ciTestBouncer already exists" + rune -0 jq -c '[.level,.msg]' <(stderr | grep "^{") + assert_output '["fatal","unable to create bouncer: bouncer ciTestBouncer already exists"]' - run -0 --separate-stderr cscli bouncers list -o json - run -0 jq '. | length' <(output) + rune -0 cscli bouncers list -o json + rune -0 jq '. | length' <(output) assert_output 1 } @test "delete the bouncer multiple times, even if it does not exist" { - run -0 cscli bouncers add ciTestBouncer - run -0 cscli bouncers delete ciTestBouncer - run -1 cscli bouncers delete ciTestBouncer - run -1 cscli bouncers delete foobarbaz + rune -0 cscli bouncers add ciTestBouncer + rune -0 cscli bouncers delete ciTestBouncer + rune -1 cscli bouncers delete ciTestBouncer + rune -1 cscli bouncers delete foobarbaz } diff --git a/test/bats/11_bouncers_tls.bats b/test/bats/11_bouncers_tls.bats index dcb1ff450..8fb457925 100644 --- a/test/bats/11_bouncers_tls.bats +++ b/test/bats/11_bouncers_tls.bats @@ -61,37 +61,37 @@ teardown() { #---------- @test "there are 0 bouncers" { - run -0 --separate-stderr cscli bouncers list -o json + rune -0 cscli bouncers list -o json assert_output "[]" } @test "simulate one bouncer request with a valid cert" { - run -0 curl -s --cert "${tmpdir}/bouncer.pem" --key "${tmpdir}/bouncer-key.pem" --cacert "${tmpdir}/bundle.pem" https://localhost:8080/v1/decisions\?ip=42.42.42.42 + rune -0 curl -s --cert "${tmpdir}/bouncer.pem" --key "${tmpdir}/bouncer-key.pem" --cacert "${tmpdir}/bundle.pem" https://localhost:8080/v1/decisions\?ip=42.42.42.42 assert_output "null" - run -0 --separate-stderr cscli bouncers list -o json - run -0 jq '. | length' <(output) + rune -0 cscli bouncers list -o json + rune -0 jq '. | length' <(output) assert_output '1' - run -0 --separate-stderr cscli bouncers list -o json - run -0 jq -r '.[] | .name' <(output) + rune -0 cscli bouncers list -o json + rune -0 jq -r '.[] | .name' <(output) assert_output "localhost@127.0.0.1" - run cscli bouncers delete localhost@127.0.0.1 + rune cscli bouncers delete localhost@127.0.0.1 } @test "simulate one bouncer request with an invalid cert" { - run curl -s --cert "${tmpdir}/bouncer_invalid.pem" --key "${tmpdir}/bouncer_invalid-key.pem" --cacert "${tmpdir}/ca-key.pem" https://localhost:8080/v1/decisions\?ip=42.42.42.42 - run -0 --separate-stderr cscli bouncers list -o json + rune curl -s --cert "${tmpdir}/bouncer_invalid.pem" --key "${tmpdir}/bouncer_invalid-key.pem" --cacert "${tmpdir}/ca-key.pem" https://localhost:8080/v1/decisions\?ip=42.42.42.42 + rune -0 cscli bouncers list -o json assert_output "[]" } @test "simulate one bouncer request with an invalid OU" { - run curl -s --cert "${tmpdir}/bouncer_bad_ou.pem" --key "${tmpdir}/bouncer_bad_ou-key.pem" --cacert "${tmpdir}/bundle.pem" https://localhost:8080/v1/decisions\?ip=42.42.42.42 - run -0 --separate-stderr cscli bouncers list -o json + rune curl -s --cert "${tmpdir}/bouncer_bad_ou.pem" --key "${tmpdir}/bouncer_bad_ou-key.pem" --cacert "${tmpdir}/bundle.pem" https://localhost:8080/v1/decisions\?ip=42.42.42.42 + rune -0 cscli bouncers list -o json assert_output "[]" } @test "simulate one bouncer request with a revoked certificate" { - run -0 curl -i -s --cert "${tmpdir}/bouncer_revoked.pem" --key "${tmpdir}/bouncer_revoked-key.pem" --cacert "${tmpdir}/bundle.pem" https://localhost:8080/v1/decisions\?ip=42.42.42.42 + rune -0 curl -i -s --cert "${tmpdir}/bouncer_revoked.pem" --key "${tmpdir}/bouncer_revoked-key.pem" --cacert "${tmpdir}/bundle.pem" https://localhost:8080/v1/decisions\?ip=42.42.42.42 assert_output --partial "access forbidden" - run -0 --separate-stderr cscli bouncers list -o json + rune -0 cscli bouncers list -o json assert_output "[]" } diff --git a/test/bats/20_collections.bats b/test/bats/20_collections.bats index 7a8efa469..aa1fa6b21 100644 --- a/test/bats/20_collections.bats +++ b/test/bats/20_collections.bats @@ -24,7 +24,7 @@ teardown() { #---------- @test "we can list collections" { - run -0 cscli collections list + rune -0 cscli collections list } @test "there are 2 collections (linux and sshd)" { @@ -105,7 +105,7 @@ teardown() { rune -0 cscli collections remove --all assert_stderr --partial "Removed symlink [crowdsecurity/sshd]" assert_stderr --partial "Removed symlink [crowdsecurity/linux]" - rune -0 --separate-stderr cscli hub list -o json + rune -0 cscli hub list -o json assert_json '{collections:[],parsers:[],postoverflows:[],scenarios:[]}' rune -0 cscli collections remove --all assert_stderr --partial 'Disabled 0 items' diff --git a/test/bats/30_machines.bats b/test/bats/30_machines.bats index 8f2c33d72..d5ddf840f 100644 --- a/test/bats/30_machines.bats +++ b/test/bats/30_machines.bats @@ -24,60 +24,60 @@ teardown() { #---------- @test "can list machines as regular user" { - run -0 cscli machines list + rune -0 cscli machines list } @test "we have exactly one machine" { - run -0 --separate-stderr cscli machines list -o json - run -0 jq -c '[. | length, .[0].machineId[0:32], .[0].isValidated]' <(output) + rune -0 cscli machines list -o json + rune -0 jq -c '[. | length, .[0].machineId[0:32], .[0].isValidated]' <(output) assert_output '[1,"githubciXXXXXXXXXXXXXXXXXXXXXXXX",true]' } @test "add a new machine and delete it" { - run -0 cscli machines add -a -f /dev/null CiTestMachine -o human - assert_output --partial "Machine 'CiTestMachine' successfully added to the local API" - assert_output --partial "API credentials dumped to '/dev/null'" + rune -0 cscli machines add -a -f /dev/null CiTestMachine -o human + assert_stderr --partial "Machine 'CiTestMachine' successfully added to the local API" + assert_stderr --partial "API credentials dumped to '/dev/null'" # we now have two machines - run -0 --separate-stderr cscli machines list -o json - run -0 jq -c '[. | length, .[-1].machineId, .[0].isValidated]' <(output) + rune -0 cscli machines list -o json + rune -0 jq -c '[. | length, .[-1].machineId, .[0].isValidated]' <(output) assert_output '[2,"CiTestMachine",true]' # delete the test machine - run -0 cscli machines delete CiTestMachine -o human - assert_output --partial "machine 'CiTestMachine' deleted successfully" + rune -0 cscli machines delete CiTestMachine -o human + assert_stderr --partial "machine 'CiTestMachine' deleted successfully" # we now have one machine again - run -0 --separate-stderr cscli machines list -o json - run -0 jq '. | length' <(output) + rune -0 cscli machines list -o json + rune -0 jq '. | length' <(output) assert_output 1 } @test "register, validate and then remove a machine" { - run -0 cscli lapi register --machine CiTestMachineRegister -f /dev/null -o human - assert_output --partial "Successfully registered to Local API (LAPI)" - assert_output --partial "Local API credentials dumped to '/dev/null'" + rune -0 cscli lapi register --machine CiTestMachineRegister -f /dev/null -o human + assert_stderr --partial "Successfully registered to Local API (LAPI)" + assert_stderr --partial "Local API credentials dumped to '/dev/null'" # the machine is not validated yet - run -0 --separate-stderr cscli machines list -o json - run -0 jq '.[-1].isValidated' <(output) + rune -0 cscli machines list -o json + rune -0 jq '.[-1].isValidated' <(output) assert_output 'null' # validate the machine - run -0 cscli machines validate CiTestMachineRegister -o human - assert_output --partial "machine 'CiTestMachineRegister' validated successfully" + rune -0 cscli machines validate CiTestMachineRegister -o human + assert_stderr --partial "machine 'CiTestMachineRegister' validated successfully" # the machine is now validated - run -0 --separate-stderr cscli machines list -o json - run -0 jq '.[-1].isValidated' <(output) + rune -0 cscli machines list -o json + rune -0 jq '.[-1].isValidated' <(output) assert_output 'true' # delete the test machine again - run -0 cscli machines delete CiTestMachineRegister -o human - assert_output --partial "machine 'CiTestMachineRegister' deleted successfully" + rune -0 cscli machines delete CiTestMachineRegister -o human + assert_stderr --partial "machine 'CiTestMachineRegister' deleted successfully" # we now have one machine, again - run -0 --separate-stderr cscli machines list -o json - run -0 jq '. | length' <(output) + rune -0 cscli machines list -o json + rune -0 jq '. | length' <(output) assert_output 1 } diff --git a/test/bats/30_machines_tls.bats b/test/bats/30_machines_tls.bats index 7c8458388..121cdecdf 100644 --- a/test/bats/30_machines_tls.bats +++ b/test/bats/30_machines_tls.bats @@ -47,10 +47,8 @@ setup_file() { # remove all machines - run -0 cscli machines list -o json - run -0 jq -r '.[].machineId' <(output) - for machine in $(output); do - run -0 cscli machines delete "${machine}" + for machine in $(cscli machines list -o json | jq -r '.[].machineId'); do + cscli machines delete "${machine}" >/dev/null 2>&1 done config_disable_agent @@ -69,7 +67,6 @@ setup() { .api.server.tls.crl_path=strenv(tmpdir) + "/crl.pem" | .api.server.tls.agents_allowed_ou=["agent-ou"] ' - } teardown() { @@ -102,7 +99,7 @@ teardown() { config_set "${CONFIG_DIR}/local_api_credentials.yaml" 'del(.login,.password)' ./instance-crowdsec start - run -0 --separate-stderr cscli machines list -o json + rune -0 cscli machines list -o json assert_output '[]' } @@ -116,9 +113,9 @@ teardown() { config_set "${CONFIG_DIR}/local_api_credentials.yaml" 'del(.login,.password)' ./instance-crowdsec start - run -0 cscli lapi status - run -0 --separate-stderr cscli machines list -o json - run -0 jq -c '[. | length, .[0].machineId[0:32], .[0].isValidated, .[0].ipAddress, .[0].auth_type]' <(output) + rune -0 cscli lapi status + rune -0 cscli machines list -o json + rune -0 jq -c '[. | length, .[0].machineId[0:32], .[0].isValidated, .[0].ipAddress, .[0].auth_type]' <(output) assert_output '[1,"localhost@127.0.0.1",true,"127.0.0.1","tls"]' cscli machines delete localhost@127.0.0.1 @@ -133,7 +130,7 @@ teardown() { ' config_set "${CONFIG_DIR}/local_api_credentials.yaml" 'del(.login,.password)' ./instance-crowdsec start - run -0 --separate-stderr cscli machines list -o json + rune -0 cscli machines list -o json assert_output '[]' } @@ -147,6 +144,6 @@ teardown() { config_set "${CONFIG_DIR}/local_api_credentials.yaml" 'del(.login,.password)' ./instance-crowdsec start - run -0 --separate-stderr cscli machines list -o json + rune -0 cscli machines list -o json assert_output '[]' } diff --git a/test/bats/40_live-ban.bats b/test/bats/40_live-ban.bats index bb54e6037..c410cbce5 100644 --- a/test/bats/40_live-ban.bats +++ b/test/bats/40_live-ban.bats @@ -39,7 +39,7 @@ teardown() { fake_log >>"${tmpfile}" sleep 2 rm -f -- "${tmpfile}" - run -0 --separate-stderr cscli decisions list -o json - run -0 jq -r '.[].decisions[0].value' <(output) + rune -0 cscli decisions list -o json + rune -0 jq -r '.[].decisions[0].value' <(output) assert_output '1.1.1.172' } diff --git a/test/bats/50_simulation.bats b/test/bats/50_simulation.bats index 7fc2d7435..578dcf81a 100644 --- a/test/bats/50_simulation.bats +++ b/test/bats/50_simulation.bats @@ -27,40 +27,40 @@ setup() { #---------- @test "we have one decision" { - run -0 cscli simulation disable --global + rune -0 cscli simulation disable --global fake_log | "${CROWDSEC}" -dsn file:///dev/fd/0 -type syslog -no-api - run -0 --separate-stderr cscli decisions list -o json - run -0 jq '. | length' <(output) + rune -0 cscli decisions list -o json + rune -0 jq '. | length' <(output) assert_output 1 } @test "1.1.1.174 has been banned (exact)" { - run -0 cscli simulation disable --global + rune -0 cscli simulation disable --global fake_log | "${CROWDSEC}" -dsn file:///dev/fd/0 -type syslog -no-api - run -0 --separate-stderr cscli decisions list -o json - run -0 jq -r '.[].decisions[0].value' <(output) + rune -0 cscli decisions list -o json + rune -0 jq -r '.[].decisions[0].value' <(output) assert_output '1.1.1.174' } @test "decision has simulated == false (exact)" { - run -0 cscli simulation disable --global + rune -0 cscli simulation disable --global fake_log | "${CROWDSEC}" -dsn file:///dev/fd/0 -type syslog -no-api - run -0 --separate-stderr cscli decisions list -o json - run -0 jq '.[].decisions[0].simulated' <(output) + rune -0 cscli decisions list -o json + rune -0 jq '.[].decisions[0].simulated' <(output) assert_output 'false' } @test "simulated scenario, listing non-simulated: expect no decision" { - run -0 cscli simulation enable crowdsecurity/ssh-bf + rune -0 cscli simulation enable crowdsecurity/ssh-bf fake_log | "${CROWDSEC}" -dsn file:///dev/fd/0 -type syslog -no-api - run -0 --separate-stderr cscli decisions list --no-simu -o json + rune -0 cscli decisions list --no-simu -o json assert_output 'null' } @test "global simulation, listing non-simulated: expect no decision" { - run -0 cscli simulation disable crowdsecurity/ssh-bf - run -0 cscli simulation enable --global + rune -0 cscli simulation disable crowdsecurity/ssh-bf + rune -0 cscli simulation enable --global fake_log | "${CROWDSEC}" -dsn file:///dev/fd/0 -type syslog -no-api - run -0 --separate-stderr cscli decisions list --no-simu -o json + rune -0 cscli decisions list --no-simu -o json assert_output 'null' } diff --git a/test/bats/70_http_plugin.bats b/test/bats/70_http_plugin.bats index 1b1b01399..a8b860aab 100644 --- a/test/bats/70_http_plugin.bats +++ b/test/bats/70_http_plugin.bats @@ -54,33 +54,33 @@ setup() { #---------- @test "add two bans" { - run -0 --separate-stderr cscli decisions add --ip 1.2.3.4 --duration 30s + rune -0 cscli decisions add --ip 1.2.3.4 --duration 30s assert_stderr --partial 'Decision successfully added' - run -0 --separate-stderr cscli decisions add --ip 1.2.3.5 --duration 30s + rune -0 cscli decisions add --ip 1.2.3.5 --duration 30s assert_stderr --partial 'Decision successfully added' sleep 5 } @test "expected 1 log line from http server" { - run -0 wc -l <"${MOCK_OUT}" + rune -0 wc -l <"${MOCK_OUT}" # wc can pad with spaces on some platforms - run -0 tr -d ' ' < <(output) + rune -0 tr -d ' ' < <(output) assert_output 1 } @test "expected to receive 2 alerts in the request body from plugin" { - run -0 jq -r '.request_body' <"${MOCK_OUT}" - run -0 jq -r 'length' <(output) + rune -0 jq -r '.request_body' <"${MOCK_OUT}" + rune -0 jq -r 'length' <(output) assert_output 2 } @test "expected to receive IP 1.2.3.4 as value of first decision" { - run -0 jq -r '.request_body[0].decisions[0].value' <"${MOCK_OUT}" + rune -0 jq -r '.request_body[0].decisions[0].value' <"${MOCK_OUT}" assert_output 1.2.3.4 } @test "expected to receive IP 1.2.3.5 as value of second decision" { - run -0 jq -r '.request_body[1].decisions[0].value' <"${MOCK_OUT}" + rune -0 jq -r '.request_body[1].decisions[0].value' <"${MOCK_OUT}" assert_output 1.2.3.5 } diff --git a/test/bats/71_dummy_plugin.bats b/test/bats/71_dummy_plugin.bats index dd95295b0..78352c514 100644 --- a/test/bats/71_dummy_plugin.bats +++ b/test/bats/71_dummy_plugin.bats @@ -58,21 +58,21 @@ setup() { #---------- @test "add two bans" { - run -0 --separate-stderr cscli decisions add --ip 1.2.3.4 --duration 30s + rune -0 cscli decisions add --ip 1.2.3.4 --duration 30s assert_stderr --partial 'Decision successfully added' - run -0 --separate-stderr cscli decisions add --ip 1.2.3.5 --duration 30s + rune -0 cscli decisions add --ip 1.2.3.5 --duration 30s assert_stderr --partial 'Decision successfully added' sleep 2 } @test "expected 1 notification" { - run -0 cat "${tempfile}" + rune -0 cat "${tempfile}" assert_output --partial 1.2.3.4 assert_output --partial 1.2.3.5 } @test "second notification works too" { - run -0 cat "${tempfile2}" + rune -0 cat "${tempfile2}" assert_output --partial secondfile } diff --git a/test/bats/72_plugin_badconfig.bats b/test/bats/72_plugin_badconfig.bats index 49da10c4c..c216067f9 100644 --- a/test/bats/72_plugin_badconfig.bats +++ b/test/bats/72_plugin_badconfig.bats @@ -35,76 +35,90 @@ teardown() { @test "misconfigured plugin, only user is empty" { config_set '.plugin_config.user="" | .plugin_config.group="nogroup"' config_set "${PROFILES_PATH}" '.notifications=["http_default"]' - run -1 --separate-stderr timeout 2s "${CROWDSEC}" + rune -1 timeout 2s "${CROWDSEC}" assert_stderr --partial "api server init: unable to run local API: while loading plugin: while getting process attributes: both plugin user and group must be set" } @test "misconfigured plugin, only group is empty" { config_set '(.plugin_config.user="nobody") | (.plugin_config.group="")' config_set "${PROFILES_PATH}" '.notifications=["http_default"]' - run -1 --separate-stderr timeout 2s "${CROWDSEC}" + rune -1 timeout 2s "${CROWDSEC}" assert_stderr --partial "api server init: unable to run local API: while loading plugin: while getting process attributes: both plugin user and group must be set" } @test "misconfigured plugin, user does not exist" { config_set '(.plugin_config.user="userdoesnotexist") | (.plugin_config.group="groupdoesnotexist")' config_set "${PROFILES_PATH}" '.notifications=["http_default"]' - run -1 --separate-stderr timeout 2s "${CROWDSEC}" + rune -1 timeout 2s "${CROWDSEC}" assert_stderr --partial "api server init: unable to run local API: while loading plugin: while getting process attributes: user: unknown user userdoesnotexist" } @test "misconfigured plugin, group does not exist" { config_set '(.plugin_config.user=strenv(USER)) | (.plugin_config.group="groupdoesnotexist")' config_set "${PROFILES_PATH}" '.notifications=["http_default"]' - run -1 --separate-stderr timeout 2s "${CROWDSEC}" + rune -1 timeout 2s "${CROWDSEC}" assert_stderr --partial "api server init: unable to run local API: while loading plugin: while getting process attributes: group: unknown group groupdoesnotexist" } @test "bad plugin name" { config_set "${PROFILES_PATH}" '.notifications=["http_default"]' cp "${PLUGIN_DIR}"/notification-http "${PLUGIN_DIR}"/badname - run -1 --separate-stderr timeout 2s "${CROWDSEC}" + rune -1 timeout 2s "${CROWDSEC}" assert_stderr --partial "api server init: unable to run local API: while loading plugin: plugin name ${PLUGIN_DIR}/badname is invalid. Name should be like {type-name}" } +@test "duplicate notification config" { + CONFIG_DIR=$(dirname "$CONFIG_YAML") + # email_default has two configurations + rune -0 yq -i '.name="email_default"' "$CONFIG_DIR/notifications/http.yaml" + # enable a notification, otherwise plugins are ignored + config_set "${PROFILES_PATH}" '.notifications=["slack_default"]' + # we want to check the logs + config_set '.common.log_media="stdout"' + # the command will fail because slack_deault is not working + run -1 --separate-stderr timeout 2s "${CROWDSEC}" + # but we have what we wanted + assert_stderr --partial "notification 'email_default' is defined multiple times" +} + @test "bad plugin permission (group writable)" { config_set "${PROFILES_PATH}" '.notifications=["http_default"]' chmod g+w "${PLUGIN_DIR}"/notification-http - run -1 --separate-stderr timeout 2s "${CROWDSEC}" + rune -1 timeout 2s "${CROWDSEC}" assert_stderr --partial "api server init: unable to run local API: while loading plugin: plugin at ${PLUGIN_DIR}/notification-http is group writable, group writable plugins are invalid" } @test "bad plugin permission (world writable)" { config_set "${PROFILES_PATH}" '.notifications=["http_default"]' chmod o+w "${PLUGIN_DIR}"/notification-http - run -1 --separate-stderr timeout 2s "${CROWDSEC}" + rune -1 timeout 2s "${CROWDSEC}" assert_stderr --partial "api server init: unable to run local API: while loading plugin: plugin at ${PLUGIN_DIR}/notification-http is world writable, world writable plugins are invalid" } @test "config.yaml: missing .plugin_config section" { config_set 'del(.plugin_config)' config_set "${PROFILES_PATH}" '.notifications=["http_default"]' - run -1 --separate-stderr timeout 2s "${CROWDSEC}" + rune -1 timeout 2s "${CROWDSEC}" assert_stderr --partial "api server init: plugins are enabled, but the plugin_config section is missing in the configuration" } @test "config.yaml: missing config_paths.notification_dir" { config_set 'del(.config_paths.notification_dir)' config_set "${PROFILES_PATH}" '.notifications=["http_default"]' - run -1 --separate-stderr timeout 2s "${CROWDSEC}" + rune -1 timeout 2s "${CROWDSEC}" assert_stderr --partial "api server init: plugins are enabled, but config_paths.notification_dir is not defined" } @test "config.yaml: missing config_paths.plugin_dir" { config_set 'del(.config_paths.plugin_dir)' config_set "${PROFILES_PATH}" '.notifications=["http_default"]' - run -1 --separate-stderr timeout 2s "${CROWDSEC}" + rune -1 timeout 2s "${CROWDSEC}" assert_stderr --partial "api server init: plugins are enabled, but config_paths.plugin_dir is not defined" } @test "unable to run local API: while reading plugin config" { config_set '.config_paths.notification_dir="/this/path/does/not/exist"' config_set "${PROFILES_PATH}" '.notifications=["http_default"]' - run -1 --separate-stderr timeout 2s "${CROWDSEC}" + rune -1 timeout 2s "${CROWDSEC}" assert_stderr --partial "api server init: unable to run local API: while loading plugin config: open /this/path/does/not/exist: no such file or directory" } diff --git a/test/bats/80_alerts.bats b/test/bats/80_alerts.bats index 81c4800b7..6a7f1386d 100644 --- a/test/bats/80_alerts.bats +++ b/test/bats/80_alerts.bats @@ -25,42 +25,42 @@ teardown() { @test "cscli alerts list, with and without --machine" { is_db_postgres && skip - run -0 cscli decisions add -i 10.20.30.40 -t ban + rune -0 cscli decisions add -i 10.20.30.40 -t ban - run -0 cscli alerts list + rune -0 cscli alerts list refute_output --partial 'machine' # machine name appears quoted in the "REASON" column assert_output --regexp " 'githubciXXXXXXXXXXXXXXXXXXXXXXXX([a-zA-Z0-9]{16})?' " refute_output --regexp " githubciXXXXXXXXXXXXXXXXXXXXXXXX([a-zA-Z0-9]{16})? " - run -0 cscli alerts list -m + rune -0 cscli alerts list -m assert_output --partial 'machine' assert_output --regexp " 'githubciXXXXXXXXXXXXXXXXXXXXXXXX([a-zA-Z0-9]{16})?' " assert_output --regexp " githubciXXXXXXXXXXXXXXXXXXXXXXXX([a-zA-Z0-9]{16})? " - run -0 cscli alerts list --machine + rune -0 cscli alerts list --machine assert_output --partial 'machine' assert_output --regexp " 'githubciXXXXXXXXXXXXXXXXXXXXXXXX([a-zA-Z0-9]{16})?' " assert_output --regexp " githubciXXXXXXXXXXXXXXXXXXXXXXXX([a-zA-Z0-9]{16})? " } @test "cscli alerts list, human/json/raw" { - run -0 cscli decisions add -i 10.20.30.40 -t ban + rune -0 cscli decisions add -i 10.20.30.40 -t ban - run -0 cscli alerts list -o human - run -0 plaintext < <(output) + rune -0 cscli alerts list -o human + rune -0 plaintext < <(output) assert_output --regexp ".* ID .* value .* reason .* country .* as .* decisions .* created_at .*" assert_output --regexp ".*Ip:10.20.30.40.*manual 'ban' from.*ban:1.*" - run -0 --separate-stderr cscli alerts list -o json - run -0 jq -c '.[].decisions[0] | [.origin, .scenario, .scope, .simulated, .type, .value]' <(output) + rune -0 cscli alerts list -o json + rune -0 jq -c '.[].decisions[0] | [.origin, .scenario, .scope, .simulated, .type, .value]' <(output) assert_line --regexp "\[\"cscli\",\"manual 'ban' from 'githubciXXXXXXXXXXXXXXXXXXXXXXXX([a-zA-Z0-9]{16})?'\",\"Ip\",false,\"ban\",\"10.20.30.40\"\]" - run -0 cscli alerts list -o raw + rune -0 cscli alerts list -o raw assert_line "id,scope,value,reason,country,as,decisions,created_at" assert_line --regexp ".*,Ip,10.20.30.40,manual 'ban' from 'githubciXXXXXXXXXXXXXXXXXXXXXXXX([a-zA-Z0-9]{16})?',,,ban:1,.*" - run -0 cscli alerts list -o raw --machine + rune -0 cscli alerts list -o raw --machine assert_line "id,scope,value,reason,country,as,decisions,created_at,machine" assert_line --regexp "^[0-9]+,Ip,10.20.30.40,manual 'ban' from 'githubciXXXXXXXXXXXXXXXXXXXXXXXX([a-zA-Z0-9]{16})?',,,ban:1,.*,githubciXXXXXXXXXXXXXXXXXXXXXXXX([a-zA-Z0-9]{16})?$" } @@ -69,14 +69,14 @@ teardown() { rune -1 cscli alerts inspect assert_stderr --partial 'missing alert_id' - run -0 cscli decisions add -i 10.20.30.40 -t ban - run -0 cscli alerts list -o raw <(output) - run -0 grep 10.20.30.40 <(output) - run -0 cut -d, -f1 <(output) + rune -0 cscli decisions add -i 10.20.30.40 -t ban + rune -0 cscli alerts list -o raw <(output) + rune -0 grep 10.20.30.40 <(output) + rune -0 cut -d, -f1 <(output) ALERT_ID="${output}" - run -0 cscli alerts inspect "${ALERT_ID}" -o human - run -0 plaintext < <(output) + rune -0 cscli alerts inspect "${ALERT_ID}" -o human + rune -0 plaintext < <(output) assert_line --regexp '^#+$' assert_line --regexp "^ - ID *: ${ALERT_ID}$" assert_line --regexp "^ - Date *: .*$" @@ -93,10 +93,10 @@ teardown() { assert_line --regexp "^.* ID .* scope:value .* action .* expiration .* created_at .*$" assert_line --regexp "^.* Ip:10.20.30.40 .* ban .*$" - run -0 cscli alerts inspect "${ALERT_ID}" -o human --details + rune -0 cscli alerts inspect "${ALERT_ID}" -o human --details # XXX can we have something here? - run -0 cscli alerts inspect "${ALERT_ID}" -o raw + rune -0 cscli alerts inspect "${ALERT_ID}" -o raw assert_line --regexp "^ *capacity: 0$" assert_line --regexp "^ *id: ${ALERT_ID}$" assert_line --regexp "^ *origin: cscli$" @@ -106,91 +106,91 @@ teardown() { assert_line --regexp "^ *type: ban$" assert_line --regexp "^ *value: 10.20.30.40$" - run -0 --separate-stderr cscli alerts inspect "${ALERT_ID}" -o json + rune -0 cscli alerts inspect "${ALERT_ID}" -o json alert=${output} - run jq -c '.decisions[] | [.origin,.scenario,.scope,.simulated,.type,.value]' <<<"${alert}" + rune jq -c '.decisions[] | [.origin,.scenario,.scope,.simulated,.type,.value]' <<<"${alert}" assert_output --regexp "\[\"cscli\",\"manual 'ban' from 'githubciXXXXXXXXXXXXXXXXXXXXXXXX.*'\",\"Ip\",false,\"ban\",\"10.20.30.40\"\]" - run jq -c '.source' <<<"${alert}" + rune jq -c '.source' <<<"${alert}" assert_json '{ip:"10.20.30.40",scope:"Ip",value:"10.20.30.40"}' } @test "no active alerts" { - run -0 --separate-stderr cscli alerts list --until 200d -o human + rune -0 cscli alerts list --until 200d -o human assert_output "No active alerts" - run -0 --separate-stderr cscli alerts list --until 200d -o json + rune -0 cscli alerts list --until 200d -o json assert_output "null" - run -0 --separate-stderr cscli alerts list --until 200d -o raw + rune -0 cscli alerts list --until 200d -o raw assert_output "id,scope,value,reason,country,as,decisions,created_at" - run -0 --separate-stderr cscli alerts list --until 200d -o raw --machine + rune -0 cscli alerts list --until 200d -o raw --machine assert_output "id,scope,value,reason,country,as,decisions,created_at,machine" } @test "cscli alerts delete (by id)" { - run -0 --separate-stderr cscli alerts delete --help + rune -0 cscli alerts delete --help if [[ ! "$output" =~ "--id string" ]]; then skip "cscli alerts delete --id not supported" fi # make sure there is at least one alert - run -0 cscli decisions add -i 127.0.0.1 -d 1h -R crowdsecurity/test + rune -0 cscli decisions add -i 127.0.0.1 -d 1h -R crowdsecurity/test # when testing with global config, alert id is not guaranteed to be 1. # we'll just remove the first alert we find - run -0 --separate-stderr cscli alerts list -o json - run -0 jq -c '.[0].id' <(output) + rune -0 cscli alerts list -o json + rune -0 jq -c '.[0].id' <(output) ALERT_ID="$output" - run -0 --separate-stderr cscli alerts delete --id "$ALERT_ID" + rune -0 cscli alerts delete --id "$ALERT_ID" refute_output assert_stderr --partial "1 alert(s) deleted" # can't delete twice - run -1 --separate-stderr cscli alerts delete --id "$ALERT_ID" + rune -1 cscli alerts delete --id "$ALERT_ID" refute_output assert_stderr --partial "unable to delete alert" assert_stderr --partial "API error: ent: alert not found" } @test "cscli alerts delete (all)" { - run -0 --separate-stderr cscli alerts delete --all + rune -0 cscli alerts delete --all assert_stderr --partial '0 alert(s) deleted' - run -0 cscli decisions add -i 1.2.3.4 -d 1h -R crowdsecurity/test - run -0 cscli decisions add -i 1.2.3.5 -d 1h -R crowdsecurity/test + rune -0 cscli decisions add -i 1.2.3.4 -d 1h -R crowdsecurity/test + rune -0 cscli decisions add -i 1.2.3.5 -d 1h -R crowdsecurity/test - run -0 --separate-stderr cscli alerts delete --all + rune -0 cscli alerts delete --all assert_stderr --partial '2 alert(s) deleted' # XXX TODO: delete by scope, value, scenario, range.. } @test "cscli alerts delete (with cascade to decisions)" { - run -0 cscli decisions add -i 1.2.3.4 - run -0 --separate-stderr cscli decisions list -o json - run -0 jq '. | length' <(output) + rune -0 cscli decisions add -i 1.2.3.4 + rune -0 cscli decisions list -o json + rune -0 jq '. | length' <(output) assert_output 1 - run -0 --separate-stderr cscli alerts delete -i 1.2.3.4 + rune -0 cscli alerts delete -i 1.2.3.4 assert_stderr --partial 'alert(s) deleted' - run -0 --separate-stderr cscli decisions list -o json + rune -0 cscli decisions list -o json assert_output null } @test "cscli alerts delete (must ignore the query limit)" { for i in $(seq 1 200); do - run -0 cscli decisions add -i 1.2.3.4 + rune -0 cscli decisions add -i 1.2.3.4 done - run -0 --separate-stderr cscli alerts delete -i 1.2.3.4 + rune -0 cscli alerts delete -i 1.2.3.4 assert_stderr --partial '200 alert(s) deleted' } @test "bad duration" { skip 'TODO' - run -0 cscli decisions add -i 10.20.30.40 -t ban - run -9 --separate-stderr cscli decisions list --ip 10.20.30.40 -o json - run -9 jq -r '.[].decisions[].id' <(output) + rune -0 cscli decisions add -i 10.20.30.40 -t ban + rune -9 cscli decisions list --ip 10.20.30.40 -o json + rune -9 jq -r '.[].decisions[].id' <(output) DECISION_ID="${output}" ./instance-crowdsec stop - run -0 ./instance-db exec_sql "UPDATE decisions SET ... WHERE id=${DECISION_ID}" + rune -0 ./instance-db exec_sql "UPDATE decisions SET ... WHERE id=${DECISION_ID}" ./instance-crowdsec start } diff --git a/test/bats/90_decisions.bats b/test/bats/90_decisions.bats index dbc955ce4..3499f3e0e 100644 --- a/test/bats/90_decisions.bats +++ b/test/bats/90_decisions.bats @@ -21,47 +21,43 @@ teardown() { ./instance-crowdsec stop } -declare stderr - #---------- @test "'decisions add' requires parameters" { - run -1 --separate-stderr cscli decisions add + rune -1 cscli decisions add assert_line "Usage:" assert_stderr --partial "Missing arguments, a value is required (--ip, --range or --scope and --value)" - run -1 --separate-stderr cscli decisions add -o json - run echo "${stderr}" - run -0 jq -c '[ .level, .msg]' <(output | grep "^{") + rune -1 cscli decisions add -o json + rune -0 jq -c '[ .level, .msg]' <(stderr | grep "^{") assert_output '["fatal","Missing arguments, a value is required (--ip, --range or --scope and --value)"]' } @test "cscli decisions list, with and without --machine" { is_db_postgres && skip - run -0 cscli decisions add -i 10.20.30.40 -t ban + rune -0 cscli decisions add -i 10.20.30.40 -t ban - run -0 cscli decisions list + rune -0 cscli decisions list refute_output --partial 'Machine' # machine name appears quoted in the "REASON" column assert_output --regexp " 'githubciXXXXXXXXXXXXXXXXXXXXXXXX([a-zA-Z0-9]{16})?' " refute_output --regexp " githubciXXXXXXXXXXXXXXXXXXXXXXXX([a-zA-Z0-9]{16})? " - run -0 cscli decisions list -m + rune -0 cscli decisions list -m assert_output --partial 'Machine' assert_output --regexp " 'githubciXXXXXXXXXXXXXXXXXXXXXXXX([a-zA-Z0-9]{16})?' " assert_output --regexp " githubciXXXXXXXXXXXXXXXXXXXXXXXX([a-zA-Z0-9]{16})? " - run -0 cscli decisions list --machine + rune -0 cscli decisions list --machine assert_output --partial 'Machine' assert_output --regexp " 'githubciXXXXXXXXXXXXXXXXXXXXXXXX([a-zA-Z0-9]{16})?' " assert_output --regexp " githubciXXXXXXXXXXXXXXXXXXXXXXXX([a-zA-Z0-9]{16})? " } @test "cscli decisions list, incorrect parameters" { - run -1 --separate-stderr cscli decisions list --until toto + rune -1 cscli decisions list --until toto assert_stderr --partial 'Unable to list decisions : performing request: API error: while parsing duration: time: invalid duration \"toto\"' - run -1 --separate-stderr cscli decisions list --until toto -o json - run echo "${stderr}" - run -0 jq -c '[.level, .msg]' <(output | grep "^{") + rune -1 cscli decisions list --until toto -o json + rune -0 jq -c '[.level, .msg]' <(stderr | grep "^{") assert_output '["fatal","Unable to list decisions : performing request: API error: while parsing duration: time: invalid duration \"toto\""]' } diff --git a/test/bats/97_ipv4_single.bats b/test/bats/97_ipv4_single.bats index 18ff712f0..c42836071 100644 --- a/test/bats/97_ipv4_single.bats +++ b/test/bats/97_ipv4_single.bats @@ -31,77 +31,77 @@ api() { @test "cli - first decisions list: must be empty" { # delete community pull - run -0 cscli decisions delete --all - run -0 --separate-stderr cscli decisions list -o json + rune -0 cscli decisions delete --all + rune -0 cscli decisions list -o json assert_output 'null' } @test "API - first decisions list: must be empty" { - run -0 --separate-stderr api '/v1/decisions' + rune -0 api '/v1/decisions' assert_output 'null' } @test "adding decision for 1.2.3.4" { - run -0 --separate-stderr cscli decisions add -i '1.2.3.4' + rune -0 cscli decisions add -i '1.2.3.4' assert_stderr --partial 'Decision successfully added' } @test "CLI - all decisions" { - run -0 --separate-stderr cscli decisions list -o json - run -0 jq -r '.[0].decisions[0].value' <(output) + rune -0 cscli decisions list -o json + rune -0 jq -r '.[0].decisions[0].value' <(output) assert_output '1.2.3.4' } @test "API - all decisions" { - run -0 --separate-stderr api '/v1/decisions' - run -0 jq -c '[ . | length, .[0].value ]' <(output) + rune -0 api '/v1/decisions' + rune -0 jq -c '[ . | length, .[0].value ]' <(output) assert_output '[1,"1.2.3.4"]' } # check ip match @test "CLI - decision for 1.2.3.4" { - run -0 --separate-stderr cscli decisions list -i '1.2.3.4' -o json - run -0 jq -r '.[0].decisions[0].value' <(output) + rune -0 cscli decisions list -i '1.2.3.4' -o json + rune -0 jq -r '.[0].decisions[0].value' <(output) assert_output '1.2.3.4' } @test "API - decision for 1.2.3.4" { - run -0 --separate-stderr api '/v1/decisions?ip=1.2.3.4' - run -0 jq -r '.[0].value' <(output) + rune -0 api '/v1/decisions?ip=1.2.3.4' + rune -0 jq -r '.[0].value' <(output) assert_output '1.2.3.4' } @test "CLI - decision for 1.2.3.5" { - run -0 --separate-stderr cscli decisions list -i '1.2.3.5' -o json + rune -0 cscli decisions list -i '1.2.3.5' -o json assert_output 'null' } @test "API - decision for 1.2.3.5" { - run -0 --separate-stderr api '/v1/decisions?ip=1.2.3.5' + rune -0 api '/v1/decisions?ip=1.2.3.5' assert_output 'null' } ## check outer range match @test "CLI - decision for 1.2.3.0/24" { - run -0 --separate-stderr cscli decisions list -r '1.2.3.0/24' -o json + rune -0 cscli decisions list -r '1.2.3.0/24' -o json assert_output 'null' } @test "API - decision for 1.2.3.0/24" { - run -0 --separate-stderr api '/v1/decisions?range=1.2.3.0/24' + rune -0 api '/v1/decisions?range=1.2.3.0/24' assert_output 'null' } @test "CLI - decisions where IP in 1.2.3.0/24" { - run -0 --separate-stderr cscli decisions list -r '1.2.3.0/24' --contained -o json - run -0 jq -r '.[0].decisions[0].value' <(output) + rune -0 cscli decisions list -r '1.2.3.0/24' --contained -o json + rune -0 jq -r '.[0].decisions[0].value' <(output) assert_output '1.2.3.4' } @test "API - decisions where IP in 1.2.3.0/24" { - run -0 --separate-stderr api '/v1/decisions?range=1.2.3.0/24&contains=false' - run -0 jq -r '.[0].value' <(output) + rune -0 api '/v1/decisions?range=1.2.3.0/24&contains=false' + rune -0 jq -r '.[0].value' <(output) assert_output '1.2.3.4' } diff --git a/test/bats/97_ipv6_single.bats b/test/bats/97_ipv6_single.bats index 38ef681be..41948fb25 100644 --- a/test/bats/97_ipv6_single.bats +++ b/test/bats/97_ipv6_single.bats @@ -31,125 +31,125 @@ api() { @test "cli - first decisions list: must be empty" { # delete community pull - run -0 cscli decisions delete --all - run -0 --separate-stderr cscli decisions list -o json + rune -0 cscli decisions delete --all + rune -0 cscli decisions list -o json assert_output 'null' } @test "adding decision for ip 1111:2222:3333:4444:5555:6666:7777:8888" { - run -0 --separate-stderr cscli decisions add -i '1111:2222:3333:4444:5555:6666:7777:8888' + rune -0 cscli decisions add -i '1111:2222:3333:4444:5555:6666:7777:8888' assert_stderr --partial 'Decision successfully added' } @test "CLI - all decisions" { - run -0 --separate-stderr cscli decisions list -o json - run -0 jq -r '.[].decisions[0].value' <(output) + rune -0 cscli decisions list -o json + rune -0 jq -r '.[].decisions[0].value' <(output) assert_output '1111:2222:3333:4444:5555:6666:7777:8888' } @test "API - all decisions" { - run -0 --separate-stderr api "/v1/decisions" - run -0 jq -r '.[].value' <(output) + rune -0 api "/v1/decisions" + rune -0 jq -r '.[].value' <(output) assert_output '1111:2222:3333:4444:5555:6666:7777:8888' } @test "CLI - decisions for ip 1111:2222:3333:4444:5555:6666:7777:8888" { - run -0 --separate-stderr cscli decisions list -i '1111:2222:3333:4444:5555:6666:7777:8888' -o json - run -0 jq -r '.[].decisions[0].value' <(output) + rune -0 cscli decisions list -i '1111:2222:3333:4444:5555:6666:7777:8888' -o json + rune -0 jq -r '.[].decisions[0].value' <(output) assert_output '1111:2222:3333:4444:5555:6666:7777:8888' } @test "API - decisions for ip 1111:2222:3333:4444:5555:6666:7777:888" { - run -0 --separate-stderr api '/v1/decisions?ip=1111:2222:3333:4444:5555:6666:7777:8888' - run -0 jq -r '.[].value' <(output) + rune -0 api '/v1/decisions?ip=1111:2222:3333:4444:5555:6666:7777:8888' + rune -0 jq -r '.[].value' <(output) assert_output '1111:2222:3333:4444:5555:6666:7777:8888' } @test "CLI - decisions for ip 1211:2222:3333:4444:5555:6666:7777:8888" { - run -0 --separate-stderr cscli decisions list -i '1211:2222:3333:4444:5555:6666:7777:8888' -o json + rune -0 cscli decisions list -i '1211:2222:3333:4444:5555:6666:7777:8888' -o json assert_output 'null' } @test "API - decisions for ip 1211:2222:3333:4444:5555:6666:7777:888" { - run -0 --separate-stderr api '/v1/decisions?ip=1211:2222:3333:4444:5555:6666:7777:8888' + rune -0 api '/v1/decisions?ip=1211:2222:3333:4444:5555:6666:7777:8888' assert_output 'null' } @test "CLI - decisions for ip 1111:2222:3333:4444:5555:6666:7777:8887" { - run -0 --separate-stderr cscli decisions list -i '1111:2222:3333:4444:5555:6666:7777:8887' -o json + rune -0 cscli decisions list -i '1111:2222:3333:4444:5555:6666:7777:8887' -o json assert_output 'null' } @test "API - decisions for ip 1111:2222:3333:4444:5555:6666:7777:8887" { - run -0 --separate-stderr api '/v1/decisions?ip=1111:2222:3333:4444:5555:6666:7777:8887' + rune -0 api '/v1/decisions?ip=1111:2222:3333:4444:5555:6666:7777:8887' assert_output 'null' } @test "CLI - decisions for range 1111:2222:3333:4444:5555:6666:7777:8888/48" { - run -0 --separate-stderr cscli decisions list -r '1111:2222:3333:4444:5555:6666:7777:8888/48' -o json + rune -0 cscli decisions list -r '1111:2222:3333:4444:5555:6666:7777:8888/48' -o json assert_output 'null' } @test "API - decisions for range 1111:2222:3333:4444:5555:6666:7777:8888/48" { - run -0 --separate-stderr api '/v1/decisions?range=1111:2222:3333:4444:5555:6666:7777:8888/48' + rune -0 api '/v1/decisions?range=1111:2222:3333:4444:5555:6666:7777:8888/48' assert_output 'null' } @test "CLI - decisions for ip/range in 1111:2222:3333:4444:5555:6666:7777:8888/48" { - run -0 --separate-stderr cscli decisions list -r '1111:2222:3333:4444:5555:6666:7777:8888/48' --contained -o json - run -0 jq -r '.[].decisions[0].value' <(output) + rune -0 cscli decisions list -r '1111:2222:3333:4444:5555:6666:7777:8888/48' --contained -o json + rune -0 jq -r '.[].decisions[0].value' <(output) assert_output '1111:2222:3333:4444:5555:6666:7777:8888' } @test "API - decisions for ip/range in 1111:2222:3333:4444:5555:6666:7777:8888/48" { - run -0 --separate-stderr api '/v1/decisions?range=1111:2222:3333:4444:5555:6666:7777:8888/48&&contains=false' - run -0 jq -r '.[].value' <(output) + rune -0 api '/v1/decisions?range=1111:2222:3333:4444:5555:6666:7777:8888/48&&contains=false' + rune -0 jq -r '.[].value' <(output) assert_output '1111:2222:3333:4444:5555:6666:7777:8888' } @test "CLI - decisions for range 1111:2222:3333:4444:5555:6666:7777:8888/64" { - run -0 --separate-stderr cscli decisions list -r '1111:2222:3333:4444:5555:6666:7777:8888/64' -o json + rune -0 cscli decisions list -r '1111:2222:3333:4444:5555:6666:7777:8888/64' -o json assert_output 'null' } @test "API - decisions for range 1111:2222:3333:4444:5555:6666:7777:8888/64" { - run -0 --separate-stderr api '/v1/decisions?range=1111:2222:3333:4444:5555:6666:7777:8888/64' + rune -0 api '/v1/decisions?range=1111:2222:3333:4444:5555:6666:7777:8888/64' assert_output 'null' } @test "CLI - decisions for ip/range in 1111:2222:3333:4444:5555:6666:7777:8888/64" { - run -0 --separate-stderr cscli decisions list -r '1111:2222:3333:4444:5555:6666:7777:8888/64' -o json --contained - run -0 jq -r '.[].decisions[0].value' <(output) + rune -0 cscli decisions list -r '1111:2222:3333:4444:5555:6666:7777:8888/64' -o json --contained + rune -0 jq -r '.[].decisions[0].value' <(output) assert_output '1111:2222:3333:4444:5555:6666:7777:8888' } @test "API - decisions for ip/range in 1111:2222:3333:4444:5555:6666:7777:8888/64" { - run -0 --separate-stderr api '/v1/decisions?range=1111:2222:3333:4444:5555:6666:7777:8888/64&&contains=false' - run -0 jq -r '.[].value' <(output) + rune -0 api '/v1/decisions?range=1111:2222:3333:4444:5555:6666:7777:8888/64&&contains=false' + rune -0 jq -r '.[].value' <(output) assert_output '1111:2222:3333:4444:5555:6666:7777:8888' } @test "adding decision for ip 1111:2222:3333:4444:5555:6666:7777:8889" { - run -0 --separate-stderr cscli decisions add -i '1111:2222:3333:4444:5555:6666:7777:8889' + rune -0 cscli decisions add -i '1111:2222:3333:4444:5555:6666:7777:8889' assert_stderr --partial 'Decision successfully added' } @test "deleting decision for ip 1111:2222:3333:4444:5555:6666:7777:8889" { - run -0 --separate-stderr cscli decisions delete -i '1111:2222:3333:4444:5555:6666:7777:8889' + rune -0 cscli decisions delete -i '1111:2222:3333:4444:5555:6666:7777:8889' assert_stderr --partial '1 decision(s) deleted' } @test "CLI - decisions for ip 1111:2222:3333:4444:5555:6666:7777:8889 after delete" { - run -0 --separate-stderr cscli decisions list -i '1111:2222:3333:4444:5555:6666:7777:8889' -o json + rune -0 cscli decisions list -i '1111:2222:3333:4444:5555:6666:7777:8889' -o json assert_output 'null' } @test "deleting decision for range 1111:2222:3333:4444:5555:6666:7777:8888/64" { - run -0 --separate-stderr cscli decisions delete -r '1111:2222:3333:4444:5555:6666:7777:8888/64' --contained + rune -0 cscli decisions delete -r '1111:2222:3333:4444:5555:6666:7777:8888/64' --contained assert_stderr --partial '1 decision(s) deleted' } @test "CLI - decisions for ip/range in 1111:2222:3333:4444:5555:6666:7777:8888/64 after delete" { - run -0 --separate-stderr cscli decisions list -r '1111:2222:3333:4444:5555:6666:7777:8888/64' -o json --contained + rune -0 cscli decisions list -r '1111:2222:3333:4444:5555:6666:7777:8888/64' -o json --contained assert_output 'null' } diff --git a/test/bats/98_ipv4_range.bats b/test/bats/98_ipv4_range.bats index 524b5944b..1983225b9 100644 --- a/test/bats/98_ipv4_range.bats +++ b/test/bats/98_ipv4_range.bats @@ -31,104 +31,104 @@ api() { @test "cli - first decisions list: must be empty" { # delete community pull - run -0 cscli decisions delete --all - run -0 --separate-stderr cscli decisions list -o json + rune -0 cscli decisions delete --all + rune -0 cscli decisions list -o json assert_output 'null' } @test "adding decision for range 4.4.4.0/24" { - run -0 --separate-stderr cscli decisions add -r '4.4.4.0/24' + rune -0 cscli decisions add -r '4.4.4.0/24' assert_stderr --partial 'Decision successfully added' } @test "CLI - all decisions" { - run -0 --separate-stderr cscli decisions list -o json - run -0 jq -r '.[0].decisions[0].value' <(output) + rune -0 cscli decisions list -o json + rune -0 jq -r '.[0].decisions[0].value' <(output) assert_output '4.4.4.0/24' } @test "API - all decisions" { - run -0 --separate-stderr api '/v1/decisions' - run -0 jq -r '.[0].value' <(output) + rune -0 api '/v1/decisions' + rune -0 jq -r '.[0].value' <(output) assert_output '4.4.4.0/24' } # check ip within/outside of range @test "CLI - decisions for ip 4.4.4." { - run -0 --separate-stderr cscli decisions list -i '4.4.4.3' -o json - run -0 jq -r '.[0].decisions[0].value' <(output) + rune -0 cscli decisions list -i '4.4.4.3' -o json + rune -0 jq -r '.[0].decisions[0].value' <(output) assert_output '4.4.4.0/24' } @test "API - decisions for ip 4.4.4." { - run -0 --separate-stderr api '/v1/decisions?ip=4.4.4.3' - run -0 jq -r '.[0].value' <(output) + rune -0 api '/v1/decisions?ip=4.4.4.3' + rune -0 jq -r '.[0].value' <(output) assert_output '4.4.4.0/24' } @test "CLI - decisions for ip contained in 4.4.4." { - run -0 --separate-stderr cscli decisions list -i '4.4.4.4' -o json --contained + rune -0 cscli decisions list -i '4.4.4.4' -o json --contained assert_output 'null' } @test "API - decisions for ip contained in 4.4.4." { - run -0 --separate-stderr api '/v1/decisions?ip=4.4.4.4&contains=false' + rune -0 api '/v1/decisions?ip=4.4.4.4&contains=false' assert_output 'null' } @test "CLI - decisions for ip 5.4.4." { - run -0 --separate-stderr cscli decisions list -i '5.4.4.3' -o json + rune -0 cscli decisions list -i '5.4.4.3' -o json assert_output 'null' } @test "API - decisions for ip 5.4.4." { - run -0 --separate-stderr api '/v1/decisions?ip=5.4.4.3' + rune -0 api '/v1/decisions?ip=5.4.4.3' assert_output 'null' } @test "CLI - decisions for range 4.4.0.0/1" { - run -0 --separate-stderr cscli decisions list -r '4.4.0.0/16' -o json + rune -0 cscli decisions list -r '4.4.0.0/16' -o json assert_output 'null' } @test "API - decisions for range 4.4.0.0/1" { - run -0 --separate-stderr api '/v1/decisions?range=4.4.0.0/16' + rune -0 api '/v1/decisions?range=4.4.0.0/16' assert_output 'null' } @test "CLI - decisions for ip/range in 4.4.0.0/1" { - run -0 --separate-stderr cscli decisions list -r '4.4.0.0/16' -o json --contained - run -0 jq -r '.[0].decisions[0].value' <(output) + rune -0 cscli decisions list -r '4.4.0.0/16' -o json --contained + rune -0 jq -r '.[0].decisions[0].value' <(output) assert_output '4.4.4.0/24' } @test "API - decisions for ip/range in 4.4.0.0/1" { - run -0 --separate-stderr api '/v1/decisions?range=4.4.0.0/16&contains=false' - run -0 jq -r '.[0].value' <(output) + rune -0 api '/v1/decisions?range=4.4.0.0/16&contains=false' + rune -0 jq -r '.[0].value' <(output) assert_output '4.4.4.0/24' } # check subrange @test "CLI - decisions for range 4.4.4.2/2" { - run -0 --separate-stderr cscli decisions list -r '4.4.4.2/28' -o json - run -0 jq -r '.[].decisions[0].value' <(output) + rune -0 cscli decisions list -r '4.4.4.2/28' -o json + rune -0 jq -r '.[].decisions[0].value' <(output) assert_output '4.4.4.0/24' } @test "API - decisions for range 4.4.4.2/2" { - run -0 --separate-stderr api '/v1/decisions?range=4.4.4.2/28' - run -0 jq -r '.[].value' <(output) + rune -0 api '/v1/decisions?range=4.4.4.2/28' + rune -0 jq -r '.[].value' <(output) assert_output '4.4.4.0/24' } @test "CLI - decisions for range 4.4.3.2/2" { - run -0 --separate-stderr cscli decisions list -r '4.4.3.2/28' -o json + rune -0 cscli decisions list -r '4.4.3.2/28' -o json assert_output 'null' } @test "API - decisions for range 4.4.3.2/2" { - run -0 --separate-stderr api '/v1/decisions?range=4.4.3.2/28' + rune -0 api '/v1/decisions?range=4.4.3.2/28' assert_output 'null' } diff --git a/test/bats/98_ipv6_range.bats b/test/bats/98_ipv6_range.bats index 52074a816..b85f0dfcd 100644 --- a/test/bats/98_ipv6_range.bats +++ b/test/bats/98_ipv6_range.bats @@ -31,187 +31,187 @@ api() { @test "cli - first decisions list: must be empty" { # delete community pull - run -0 cscli decisions delete --all - run -0 --separate-stderr cscli decisions list -o json + rune -0 cscli decisions delete --all + rune -0 cscli decisions list -o json assert_output 'null' } @test "adding decision for range aaaa:2222:3333:4444::/64" { - run -0 --separate-stderr cscli decisions add -r 'aaaa:2222:3333:4444::/64' + rune -0 cscli decisions add -r 'aaaa:2222:3333:4444::/64' assert_stderr --partial 'Decision successfully added' } @test "CLI - all decisions (2)" { - run -0 --separate-stderr cscli decisions list -o json - run -0 jq -r '.[].decisions[0].value' <(output) + rune -0 cscli decisions list -o json + rune -0 jq -r '.[].decisions[0].value' <(output) assert_output 'aaaa:2222:3333:4444::/64' } @test "API - all decisions (2)" { - run -0 --separate-stderr api '/v1/decisions' - run -0 jq -r '.[].value' <(output) + rune -0 api '/v1/decisions' + rune -0 jq -r '.[].value' <(output) assert_output 'aaaa:2222:3333:4444::/64' } # check ip within/out of range @test "CLI - decisions for ip aaaa:2222:3333:4444:5555:6666:7777:8888" { - run -0 --separate-stderr cscli decisions list -i 'aaaa:2222:3333:4444:5555:6666:7777:8888' -o json - run -0 jq -r '.[].decisions[0].value' <(output) + rune -0 cscli decisions list -i 'aaaa:2222:3333:4444:5555:6666:7777:8888' -o json + rune -0 jq -r '.[].decisions[0].value' <(output) assert_output 'aaaa:2222:3333:4444::/64' } @test "API - decisions for ip aaaa:2222:3333:4444:5555:6666:7777:8888" { - run -0 --separate-stderr api '/v1/decisions?ip=aaaa:2222:3333:4444:5555:6666:7777:8888' - run -0 jq -r '.[].value' <(output) + rune -0 api '/v1/decisions?ip=aaaa:2222:3333:4444:5555:6666:7777:8888' + rune -0 jq -r '.[].value' <(output) assert_output 'aaaa:2222:3333:4444::/64' } @test "CLI - decisions for ip aaaa:2222:3333:4445:5555:6666:7777:8888" { - run -0 --separate-stderr cscli decisions list -i 'aaaa:2222:3333:4445:5555:6666:7777:8888' -o json + rune -0 cscli decisions list -i 'aaaa:2222:3333:4445:5555:6666:7777:8888' -o json assert_output 'null' } @test "API - decisions for ip aaaa:2222:3333:4445:5555:6666:7777:8888" { - run -0 --separate-stderr api '/v1/decisions?ip=aaaa:2222:3333:4445:5555:6666:7777:8888' + rune -0 api '/v1/decisions?ip=aaaa:2222:3333:4445:5555:6666:7777:8888' assert_output 'null' } @test "CLI - decisions for ip aaa1:2222:3333:4444:5555:6666:7777:8887" { - run -0 --separate-stderr cscli decisions list -i 'aaa1:2222:3333:4444:5555:6666:7777:8887' -o json + rune -0 cscli decisions list -i 'aaa1:2222:3333:4444:5555:6666:7777:8887' -o json assert_output 'null' } @test "API - decisions for ip aaa1:2222:3333:4444:5555:6666:7777:8887" { - run -0 --separate-stderr api '/v1/decisions?ip=aaa1:2222:3333:4444:5555:6666:7777:8887' + rune -0 api '/v1/decisions?ip=aaa1:2222:3333:4444:5555:6666:7777:8887' assert_output 'null' } # check subrange within/out of range @test "CLI - decisions for range aaaa:2222:3333:4444:5555::/80" { - run -0 --separate-stderr cscli decisions list -r 'aaaa:2222:3333:4444:5555::/80' -o json - run -0 jq -r '.[].decisions[0].value' <(output) + rune -0 cscli decisions list -r 'aaaa:2222:3333:4444:5555::/80' -o json + rune -0 jq -r '.[].decisions[0].value' <(output) assert_output 'aaaa:2222:3333:4444::/64' } @test "API - decisions for range aaaa:2222:3333:4444:5555::/80" { - run -0 --separate-stderr api '/v1/decisions?range=aaaa:2222:3333:4444:5555::/80' - run -0 jq -r '.[].value' <(output) + rune -0 api '/v1/decisions?range=aaaa:2222:3333:4444:5555::/80' + rune -0 jq -r '.[].value' <(output) assert_output 'aaaa:2222:3333:4444::/64' } @test "CLI - decisions for range aaaa:2222:3333:4441:5555::/80" { - run -0 --separate-stderr cscli decisions list -r 'aaaa:2222:3333:4441:5555::/80' -o json + rune -0 cscli decisions list -r 'aaaa:2222:3333:4441:5555::/80' -o json assert_output 'null' } @test "API - decisions for range aaaa:2222:3333:4441:5555::/80" { - run -0 --separate-stderr api '/v1/decisions?range=aaaa:2222:3333:4441:5555::/80' + rune -0 api '/v1/decisions?range=aaaa:2222:3333:4441:5555::/80' assert_output 'null' } @test "CLI - decisions for range aaa1:2222:3333:4444:5555::/80" { - run -0 --separate-stderr cscli decisions list -r 'aaa1:2222:3333:4444:5555::/80' -o json + rune -0 cscli decisions list -r 'aaa1:2222:3333:4444:5555::/80' -o json assert_output 'null' } @test "API - decisions for range aaa1:2222:3333:4444:5555::/80" { - run -0 --separate-stderr api '/v1/decisions?range=aaa1:2222:3333:4444:5555::/80' + rune -0 api '/v1/decisions?range=aaa1:2222:3333:4444:5555::/80' assert_output 'null' } # check outer range @test "CLI - decisions for range aaaa:2222:3333:4444:5555:6666:7777:8888/48" { - run -0 --separate-stderr cscli decisions list -r 'aaaa:2222:3333:4444:5555:6666:7777:8888/48' -o json + rune -0 cscli decisions list -r 'aaaa:2222:3333:4444:5555:6666:7777:8888/48' -o json assert_output 'null' } @test "API - decisions for range aaaa:2222:3333:4444:5555:6666:7777:8888/48" { - run -0 --separate-stderr api '/v1/decisions?range=aaaa:2222:3333:4444:5555:6666:7777:8888/48' + rune -0 api '/v1/decisions?range=aaaa:2222:3333:4444:5555:6666:7777:8888/48' assert_output 'null' } @test "CLI - decisions for ip/range in aaaa:2222:3333:4444:5555:6666:7777:8888/48" { - run -0 --separate-stderr cscli decisions list -r 'aaaa:2222:3333:4444:5555:6666:7777:8888/48' -o json --contained - run -0 jq -r '.[].decisions[0].value' <(output) + rune -0 cscli decisions list -r 'aaaa:2222:3333:4444:5555:6666:7777:8888/48' -o json --contained + rune -0 jq -r '.[].decisions[0].value' <(output) assert_output 'aaaa:2222:3333:4444::/64' } @test "API - decisions for ip/range in aaaa:2222:3333:4444:5555:6666:7777:8888/48" { - run -0 --separate-stderr api '/v1/decisions?range=aaaa:2222:3333:4444:5555:6666:7777:8888/48&contains=false' - run -0 jq -r '.[].value' <(output) + rune -0 api '/v1/decisions?range=aaaa:2222:3333:4444:5555:6666:7777:8888/48&contains=false' + rune -0 jq -r '.[].value' <(output) assert_output 'aaaa:2222:3333:4444::/64' } @test "CLI - decisions for ip/range in aaaa:2222:3333:4445:5555:6666:7777:8888/48" { - run -0 --separate-stderr cscli decisions list -r 'aaaa:2222:3333:4445:5555:6666:7777:8888/48' -o json + rune -0 cscli decisions list -r 'aaaa:2222:3333:4445:5555:6666:7777:8888/48' -o json assert_output 'null' } @test "API - decisions for ip/range in aaaa:2222:3333:4445:5555:6666:7777:8888/48" { - run -0 --separate-stderr api '/v1/decisions?range=aaaa:2222:3333:4445:5555:6666:7777:8888/48' + rune -0 api '/v1/decisions?range=aaaa:2222:3333:4445:5555:6666:7777:8888/48' assert_output 'null' } # bbbb:db8:: -> bbbb:db8:0000:0000:0000:7fff:ffff:ffff @test "adding decision for range bbbb:db8::/81" { - run -0 --separate-stderr cscli decisions add -r 'bbbb:db8::/81' + rune -0 cscli decisions add -r 'bbbb:db8::/81' assert_stderr --partial 'Decision successfully added' } @test "CLI - decisions for ip bbbb:db8:0000:0000:0000:6fff:ffff:ffff" { - run -0 --separate-stderr cscli decisions list -o json -i 'bbbb:db8:0000:0000:0000:6fff:ffff:ffff' - run -0 jq -r '.[].decisions[0].value' <(output) + rune -0 cscli decisions list -o json -i 'bbbb:db8:0000:0000:0000:6fff:ffff:ffff' + rune -0 jq -r '.[].decisions[0].value' <(output) assert_output 'bbbb:db8::/81' } @test "API - decisions for ip in bbbb:db8:0000:0000:0000:6fff:ffff:ffff" { - run -0 --separate-stderr api '/v1/decisions?ip=bbbb:db8:0000:0000:0000:6fff:ffff:ffff' - run -0 jq -r '.[].value' <(output) + rune -0 api '/v1/decisions?ip=bbbb:db8:0000:0000:0000:6fff:ffff:ffff' + rune -0 jq -r '.[].value' <(output) assert_output 'bbbb:db8::/81' } @test "CLI - decisions for ip bbbb:db8:0000:0000:0000:8fff:ffff:ffff" { - run -0 --separate-stderr cscli decisions list -o json -i 'bbbb:db8:0000:0000:0000:8fff:ffff:ffff' + rune -0 cscli decisions list -o json -i 'bbbb:db8:0000:0000:0000:8fff:ffff:ffff' assert_output 'null' } @test "API - decisions for ip in bbbb:db8:0000:0000:0000:8fff:ffff:ffff" { - run -0 --separate-stderr api '/v1/decisions?ip=bbbb:db8:0000:0000:0000:8fff:ffff:ffff' + rune -0 api '/v1/decisions?ip=bbbb:db8:0000:0000:0000:8fff:ffff:ffff' assert_output 'null' } @test "deleting decision for range aaaa:2222:3333:4444:5555:6666:7777:8888/48" { - run -0 --separate-stderr cscli decisions delete -r 'aaaa:2222:3333:4444:5555:6666:7777:8888/48' --contained + rune -0 cscli decisions delete -r 'aaaa:2222:3333:4444:5555:6666:7777:8888/48' --contained assert_stderr --partial '1 decision(s) deleted' } @test "CLI - decisions for range aaaa:2222:3333:4444::/64 after delete" { - run -0 --separate-stderr cscli decisions list -o json -r 'aaaa:2222:3333:4444::/64' + rune -0 cscli decisions list -o json -r 'aaaa:2222:3333:4444::/64' assert_output 'null' } @test "adding decision for ip bbbb:db8:0000:0000:0000:8fff:ffff:ffff" { - run -0 --separate-stderr cscli decisions add -i 'bbbb:db8:0000:0000:0000:8fff:ffff:ffff' + rune -0 cscli decisions add -i 'bbbb:db8:0000:0000:0000:8fff:ffff:ffff' assert_stderr --partial 'Decision successfully added' } @test "adding decision for ip bbbb:db8:0000:0000:0000:6fff:ffff:ffff" { - run -0 --separate-stderr cscli decisions add -i 'bbbb:db8:0000:0000:0000:6fff:ffff:ffff' + rune -0 cscli decisions add -i 'bbbb:db8:0000:0000:0000:6fff:ffff:ffff' assert_stderr --partial 'Decision successfully added' } @test "deleting decisions for range bbbb:db8::/81" { - run -0 --separate-stderr cscli decisions delete -r 'bbbb:db8::/81' --contained + rune -0 cscli decisions delete -r 'bbbb:db8::/81' --contained assert_stderr --partial '2 decision(s) deleted' } @test "CLI - all decisions (3)" { - run -0 --separate-stderr cscli decisions list -o json - run -0 jq -r '.[].decisions[0].value' <(output) + rune -0 cscli decisions list -o json + rune -0 jq -r '.[].decisions[0].value' <(output) assert_output 'bbbb:db8:0000:0000:0000:8fff:ffff:ffff' } diff --git a/test/bats/99_lapi-stream-mode-scenario.bats b/test/bats/99_lapi-stream-mode-scenario.bats index e0862e41f..9b4d562f3 100644 --- a/test/bats/99_lapi-stream-mode-scenario.bats +++ b/test/bats/99_lapi-stream-mode-scenario.bats @@ -36,34 +36,34 @@ output_new_decisions() { @test "adding decisions with different duration, scenario, origin" { # origin: test - run -0 cscli decisions add -i 127.0.0.1 -d 1h -R crowdsecurity/test + rune -0 cscli decisions add -i 127.0.0.1 -d 1h -R crowdsecurity/test ./instance-crowdsec stop - run -0 ./instance-db exec_sql "update decisions set origin='test' where origin='cscli'" + rune -0 ./instance-db exec_sql "update decisions set origin='test' where origin='cscli'" ./instance-crowdsec start - run -0 cscli decisions add -i 127.0.0.1 -d 3h -R crowdsecurity/ssh_bf + rune -0 cscli decisions add -i 127.0.0.1 -d 3h -R crowdsecurity/ssh_bf ./instance-crowdsec stop - run -0 ./instance-db exec_sql "update decisions set origin='another_origin' where origin='cscli'" + rune -0 ./instance-db exec_sql "update decisions set origin='another_origin' where origin='cscli'" ./instance-crowdsec start - run -0 cscli decisions add -i 127.0.0.1 -d 5h -R crowdsecurity/longest - run -0 cscli decisions add -i 127.0.0.2 -d 3h -R crowdsecurity/test - run -0 cscli decisions add -i 127.0.0.2 -d 3h -R crowdsecurity/ssh_bf - run -0 cscli decisions add -i 127.0.0.2 -d 1h -R crowdsecurity/ssh_bf + rune -0 cscli decisions add -i 127.0.0.1 -d 5h -R crowdsecurity/longest + rune -0 cscli decisions add -i 127.0.0.2 -d 3h -R crowdsecurity/test + rune -0 cscli decisions add -i 127.0.0.2 -d 3h -R crowdsecurity/ssh_bf + rune -0 cscli decisions add -i 127.0.0.2 -d 1h -R crowdsecurity/ssh_bf ./instance-crowdsec stop - run -0 ./instance-db exec_sql "update decisions set origin='test' where origin='cscli'" + rune -0 ./instance-db exec_sql "update decisions set origin='test' where origin='cscli'" ./instance-crowdsec start # origin: another_origin - run -0 cscli decisions add -i 127.0.0.2 -d 2h -R crowdsecurity/test + rune -0 cscli decisions add -i 127.0.0.2 -d 2h -R crowdsecurity/test ./instance-crowdsec stop - run -0 ./instance-db exec_sql "update decisions set origin='another_origin' where origin='cscli'" + rune -0 ./instance-db exec_sql "update decisions set origin='another_origin' where origin='cscli'" ./instance-crowdsec start } @test "test startup" { - run -0 api "/v1/decisions/stream?startup=true" - run -0 output_new_decisions + rune -0 api "/v1/decisions/stream?startup=true" + rune -0 output_new_decisions assert_output - <<-EOT {"duration":"2h59m","origin":"test","scenario":"crowdsecurity/test","scope":"Ip","type":"ban","value":"127.0.0.2"} {"duration":"4h59m","origin":"test","scenario":"crowdsecurity/longest","scope":"Ip","type":"ban","value":"127.0.0.1"} @@ -71,8 +71,8 @@ output_new_decisions() { } @test "test startup with scenarios containing" { - run -0 api "/v1/decisions/stream?startup=true&scenarios_containing=ssh_bf" - run -0 output_new_decisions + rune -0 api "/v1/decisions/stream?startup=true&scenarios_containing=ssh_bf" + rune -0 output_new_decisions assert_output - <<-EOT {"duration":"2h59m","origin":"another_origin","scenario":"crowdsecurity/ssh_bf","scope":"Ip","type":"ban","value":"127.0.0.1"} {"duration":"2h59m","origin":"test","scenario":"crowdsecurity/ssh_bf","scope":"Ip","type":"ban","value":"127.0.0.2"} @@ -80,8 +80,8 @@ output_new_decisions() { } @test "test startup with multiple scenarios containing" { - run -0 api "/v1/decisions/stream?startup=true&scenarios_containing=ssh_bf,test" - run -0 output_new_decisions + rune -0 api "/v1/decisions/stream?startup=true&scenarios_containing=ssh_bf,test" + rune -0 output_new_decisions assert_output - <<-EOT {"duration":"2h59m","origin":"another_origin","scenario":"crowdsecurity/ssh_bf","scope":"Ip","type":"ban","value":"127.0.0.1"} {"duration":"2h59m","origin":"test","scenario":"crowdsecurity/test","scope":"Ip","type":"ban","value":"127.0.0.2"} @@ -89,13 +89,13 @@ output_new_decisions() { } @test "test startup with unknown scenarios containing" { - run -0 api "/v1/decisions/stream?startup=true&scenarios_containing=unknown" + rune -0 api "/v1/decisions/stream?startup=true&scenarios_containing=unknown" assert_output '{"deleted":null,"new":null}' } @test "test startup with scenarios containing and not containing" { - run -0 api "/v1/decisions/stream?startup=true&scenarios_containing=test&scenarios_not_containing=ssh_bf" - run -0 output_new_decisions + rune -0 api "/v1/decisions/stream?startup=true&scenarios_containing=test&scenarios_not_containing=ssh_bf" + rune -0 output_new_decisions assert_output - <<-EOT {"duration":"2h59m","origin":"test","scenario":"crowdsecurity/test","scope":"Ip","type":"ban","value":"127.0.0.2"} {"origin":"test","scenario":"crowdsecurity/test","scope":"Ip","type":"ban","value":"127.0.0.1"} @@ -103,16 +103,16 @@ output_new_decisions() { } @test "test startup with scenarios containing and not containing 2" { - run -0 api "/v1/decisions/stream?startup=true&scenarios_containing=longest&scenarios_not_containing=ssh_bf,test" - run -0 output_new_decisions + rune -0 api "/v1/decisions/stream?startup=true&scenarios_containing=longest&scenarios_not_containing=ssh_bf,test" + rune -0 output_new_decisions assert_output - <<-EOT {"duration":"4h59m","origin":"test","scenario":"crowdsecurity/longest","scope":"Ip","type":"ban","value":"127.0.0.1"} EOT } @test "test startup with scenarios not containing" { - run -0 api "/v1/decisions/stream?startup=true&scenarios_not_containing=ssh_bf" - run -0 output_new_decisions + rune -0 api "/v1/decisions/stream?startup=true&scenarios_not_containing=ssh_bf" + rune -0 output_new_decisions assert_output - <<-EOT {"duration":"2h59m","origin":"test","scenario":"crowdsecurity/test","scope":"Ip","type":"ban","value":"127.0.0.2"} {"duration":"4h59m","origin":"test","scenario":"crowdsecurity/longest","scope":"Ip","type":"ban","value":"127.0.0.1"} @@ -120,16 +120,16 @@ output_new_decisions() { } @test "test startup with multiple scenarios not containing" { - run -0 api "/v1/decisions/stream?startup=true&scenarios_not_containing=ssh_bf,test" - run -0 output_new_decisions + rune -0 api "/v1/decisions/stream?startup=true&scenarios_not_containing=ssh_bf,test" + rune -0 output_new_decisions assert_output - <<-EOT {"duration":"4h59m","origin":"test","scenario":"crowdsecurity/longest","scope":"Ip","type":"ban","value":"127.0.0.1"} EOT } @test "test startup with origins parameter" { - run -0 api "/v1/decisions/stream?startup=true&origins=another_origin" - run -0 output_new_decisions + rune -0 api "/v1/decisions/stream?startup=true&origins=another_origin" + rune -0 output_new_decisions assert_output - <<-EOT {"duration":"1h59m","origin":"another_origin","scenario":"crowdsecurity/test","scope":"Ip","type":"ban","value":"127.0.0.2"} {"duration":"2h59m","origin":"another_origin","scenario":"crowdsecurity/ssh_bf","scope":"Ip","type":"ban","value":"127.0.0.1"} @@ -137,8 +137,8 @@ output_new_decisions() { } @test "test startup with multiple origins parameter" { - run -0 api "/v1/decisions/stream?startup=true&origins=another_origin,test" - run -0 output_new_decisions + rune -0 api "/v1/decisions/stream?startup=true&origins=another_origin,test" + rune -0 output_new_decisions assert_output - <<-EOT {"duration":"2h59m","origin":"test","scenario":"crowdsecurity/test","scope":"Ip","type":"ban","value":"127.0.0.2"} {"duration":"4h59m","origin":"test","scenario":"crowdsecurity/longest","scope":"Ip","type":"ban","value":"127.0.0.1"} @@ -146,7 +146,7 @@ output_new_decisions() { } @test "test startup with unknown origins" { - run -0 api "/v1/decisions/stream?startup=true&origins=unknown" + rune -0 api "/v1/decisions/stream?startup=true&origins=unknown" assert_output '{"deleted":null,"new":null}' } diff --git a/test/bats/99_lapi-stream-mode-scopes.bats b/test/bats/99_lapi-stream-mode-scopes.bats index 6e4bca021..a1d01c489 100644 --- a/test/bats/99_lapi-stream-mode-scopes.bats +++ b/test/bats/99_lapi-stream-mode-scopes.bats @@ -29,36 +29,36 @@ api() { } @test "adding decisions for multiple scopes" { - run -0 --separate-stderr cscli decisions add -i '1.2.3.6' + rune -0 cscli decisions add -i '1.2.3.6' assert_stderr --partial 'Decision successfully added' - run -0 --separate-stderr cscli decisions add --scope user --value toto + rune -0 cscli decisions add --scope user --value toto assert_stderr --partial 'Decision successfully added' } @test "stream start (implicit ip scope)" { - run -0 api "/v1/decisions/stream?startup=true" - run -0 jq -r '.new' <(output) + rune -0 api "/v1/decisions/stream?startup=true" + rune -0 jq -r '.new' <(output) assert_output --partial '1.2.3.6' refute_output --partial 'toto' } @test "stream start (explicit ip scope)" { - run -0 api "/v1/decisions/stream?startup=true&scopes=ip" - run -0 jq -r '.new' <(output) + rune -0 api "/v1/decisions/stream?startup=true&scopes=ip" + rune -0 jq -r '.new' <(output) assert_output --partial '1.2.3.6' refute_output --partial 'toto' } @test "stream start (user scope)" { - run -0 api "/v1/decisions/stream?startup=true&scopes=user" - run -0 jq -r '.new' <(output) + rune -0 api "/v1/decisions/stream?startup=true&scopes=user" + rune -0 jq -r '.new' <(output) refute_output --partial '1.2.3.6' assert_output --partial 'toto' } @test "stream start (user+ip scope)" { - run -0 api "/v1/decisions/stream?startup=true&scopes=user,ip" - run -0 jq -r '.new' <(output) + rune -0 api "/v1/decisions/stream?startup=true&scopes=user,ip" + rune -0 jq -r '.new' <(output) assert_output --partial '1.2.3.6' assert_output --partial 'toto' } diff --git a/test/bats/99_lapi-stream-mode.bats b/test/bats/99_lapi-stream-mode.bats index 0ee0a64b7..08ddde42c 100644 --- a/test/bats/99_lapi-stream-mode.bats +++ b/test/bats/99_lapi-stream-mode.bats @@ -29,44 +29,46 @@ api() { } @test "adding decisions for multiple ips" { - run -0 cscli decisions add -i '1111:2222:3333:4444:5555:6666:7777:8888' - run -0 cscli decisions add -i '1.2.3.4' - run -0 --separate-stderr cscli decisions add -r '1.2.4.0/24' + rune -0 cscli decisions add -i '1111:2222:3333:4444:5555:6666:7777:8888' + assert_stderr --partial 'Decision successfully added' + rune -0 cscli decisions add -i '1.2.3.4' + assert_stderr --partial 'Decision successfully added' + rune -0 cscli decisions add -r '1.2.4.0/24' assert_stderr --partial 'Decision successfully added' } @test "stream start" { - run -0 --separate-stderr api "/v1/decisions/stream?startup=true" + rune -0 api "/v1/decisions/stream?startup=true" if is_db_mysql; then sleep 3; fi - run -0 jq -r '.new' <(output) + rune -0 jq -r '.new' <(output) assert_output --partial '1111:2222:3333:4444:5555:6666:7777:8888' assert_output --partial '1.2.3.4' assert_output --partial '1.2.4.0/24' } @test "stream cont (add)" { - run -0 cscli decisions add -i '1.2.3.5' + rune -0 cscli decisions add -i '1.2.3.5' if is_db_mysql; then sleep 3; fi - run -0 --separate-stderr api "/v1/decisions/stream" - run -0 jq -r '.new' <(output) + rune -0 api "/v1/decisions/stream" + rune -0 jq -r '.new' <(output) assert_output --partial '1.2.3.5' } @test "stream cont (del)" { - run -0 cscli decisions delete -i '1.2.3.4' + rune -0 cscli decisions delete -i '1.2.3.4' if is_db_mysql; then sleep 3; fi - run -0 --separate-stderr api "/v1/decisions/stream" - run -0 jq -r '.deleted' <(output) + rune -0 api "/v1/decisions/stream" + rune -0 jq -r '.deleted' <(output) assert_output --partial '1.2.3.4' } @test "stream restart" { - run -0 --separate-stderr api "/v1/decisions/stream?startup=true" + rune -0 api "/v1/decisions/stream?startup=true" api_out=${output} - run -0 jq -r '.deleted' <(output) + rune -0 jq -r '.deleted' <(output) assert_output --partial '1.2.3.4' output=${api_out} - run -0 jq -r '.new' <(output) + rune -0 jq -r '.new' <(output) assert_output --partial '1111:2222:3333:4444:5555:6666:7777:8888' assert_output --partial '1.2.3.5' assert_output --partial '1.2.4.0/24' diff --git a/test/bin/assert-crowdsec-not-running b/test/bin/assert-crowdsec-not-running index 80de3dda7..b545ebf0a 100755 --- a/test/bin/assert-crowdsec-not-running +++ b/test/bin/assert-crowdsec-not-running @@ -2,7 +2,7 @@ is_crowdsec_running() { # ignore processes in containers - PIDS=$(pgrep --ns $$ -x 'crowdsec|crowdsec.test') + PIDS=$(pgrep --ns $$ -x 'crowdsec') } # The process can be slow, especially on CI and during test coverage. diff --git a/windows/installer/WixUI_HK.wxs b/windows/installer/WixUI_HK.wxs index 38cccbd9d..9133b495d 100644 --- a/windows/installer/WixUI_HK.wxs +++ b/windows/installer/WixUI_HK.wxs @@ -27,18 +27,9 @@ 1 - NOT Installed - Installed AND PATCH - - 1 - 1 - NOT WIXUI_DONTVALIDATEPATH - "1"]]> - WIXUI_DONTVALIDATEPATH OR WIXUI_INSTALLDIR_VALID="1" - - 1 - 1 + 1 + NOT Installed Installed 1 diff --git a/windows/installer/product.wxs b/windows/installer/product.wxs index 408178cda..b43cd6de3 100644 --- a/windows/installer/product.wxs +++ b/windows/installer/product.wxs @@ -132,17 +132,17 @@ - - - + + + - + - + NOT Installed AND NOT REMOVE