Compare commits
14 commits
lapi-detai
...
master
Author | SHA1 | Date | |
---|---|---|---|
|
05b54687b6 | ||
|
c4473839c4 | ||
|
d2c4bc55fc | ||
|
2abc078e53 | ||
|
ceb4479ec4 | ||
|
845d4542bb | ||
|
f4ed7b3520 | ||
|
60431804d8 | ||
|
0f942a95f1 | ||
|
97e6588a45 | ||
|
725cae1fa8 | ||
|
c64332d30a | ||
|
718d1c54b2 | ||
|
b48b728317 |
162 changed files with 1787 additions and 7352 deletions
2
.github/workflows/bats-hub.yml
vendored
2
.github/workflows/bats-hub.yml
vendored
|
@ -33,7 +33,7 @@ jobs:
|
||||||
- name: "Set up Go"
|
- name: "Set up Go"
|
||||||
uses: actions/setup-go@v5
|
uses: actions/setup-go@v5
|
||||||
with:
|
with:
|
||||||
go-version: "1.21.9"
|
go-version: "1.22.2"
|
||||||
|
|
||||||
- name: "Install bats dependencies"
|
- name: "Install bats dependencies"
|
||||||
env:
|
env:
|
||||||
|
|
2
.github/workflows/bats-mysql.yml
vendored
2
.github/workflows/bats-mysql.yml
vendored
|
@ -36,7 +36,7 @@ jobs:
|
||||||
- name: "Set up Go"
|
- name: "Set up Go"
|
||||||
uses: actions/setup-go@v5
|
uses: actions/setup-go@v5
|
||||||
with:
|
with:
|
||||||
go-version: "1.21.9"
|
go-version: "1.22.2"
|
||||||
|
|
||||||
- name: "Install bats dependencies"
|
- name: "Install bats dependencies"
|
||||||
env:
|
env:
|
||||||
|
|
2
.github/workflows/bats-postgres.yml
vendored
2
.github/workflows/bats-postgres.yml
vendored
|
@ -45,7 +45,7 @@ jobs:
|
||||||
- name: "Set up Go"
|
- name: "Set up Go"
|
||||||
uses: actions/setup-go@v5
|
uses: actions/setup-go@v5
|
||||||
with:
|
with:
|
||||||
go-version: "1.21.9"
|
go-version: "1.22.2"
|
||||||
|
|
||||||
- name: "Install bats dependencies"
|
- name: "Install bats dependencies"
|
||||||
env:
|
env:
|
||||||
|
|
3
.github/workflows/bats-sqlite-coverage.yml
vendored
3
.github/workflows/bats-sqlite-coverage.yml
vendored
|
@ -28,7 +28,7 @@ jobs:
|
||||||
- name: "Set up Go"
|
- name: "Set up Go"
|
||||||
uses: actions/setup-go@v5
|
uses: actions/setup-go@v5
|
||||||
with:
|
with:
|
||||||
go-version: "1.21.9"
|
go-version: "1.22.2"
|
||||||
|
|
||||||
- name: "Install bats dependencies"
|
- name: "Install bats dependencies"
|
||||||
env:
|
env:
|
||||||
|
@ -81,3 +81,4 @@ jobs:
|
||||||
with:
|
with:
|
||||||
files: ./coverage-bats.out
|
files: ./coverage-bats.out
|
||||||
flags: bats
|
flags: bats
|
||||||
|
token: ${{ secrets.CODECOV_TOKEN }}
|
||||||
|
|
2
.github/workflows/ci-windows-build-msi.yml
vendored
2
.github/workflows/ci-windows-build-msi.yml
vendored
|
@ -35,7 +35,7 @@ jobs:
|
||||||
- name: "Set up Go"
|
- name: "Set up Go"
|
||||||
uses: actions/setup-go@v5
|
uses: actions/setup-go@v5
|
||||||
with:
|
with:
|
||||||
go-version: "1.21.9"
|
go-version: "1.22.2"
|
||||||
|
|
||||||
- name: Build
|
- name: Build
|
||||||
run: make windows_installer BUILD_RE2_WASM=1
|
run: make windows_installer BUILD_RE2_WASM=1
|
||||||
|
|
2
.github/workflows/codeql-analysis.yml
vendored
2
.github/workflows/codeql-analysis.yml
vendored
|
@ -52,7 +52,7 @@ jobs:
|
||||||
- name: "Set up Go"
|
- name: "Set up Go"
|
||||||
uses: actions/setup-go@v5
|
uses: actions/setup-go@v5
|
||||||
with:
|
with:
|
||||||
go-version: "1.21.9"
|
go-version: "1.22.2"
|
||||||
cache-dependency-path: "**/go.sum"
|
cache-dependency-path: "**/go.sum"
|
||||||
|
|
||||||
# Initializes the CodeQL tools for scanning.
|
# Initializes the CodeQL tools for scanning.
|
||||||
|
|
3
.github/workflows/go-tests-windows.yml
vendored
3
.github/workflows/go-tests-windows.yml
vendored
|
@ -34,7 +34,7 @@ jobs:
|
||||||
- name: "Set up Go"
|
- name: "Set up Go"
|
||||||
uses: actions/setup-go@v5
|
uses: actions/setup-go@v5
|
||||||
with:
|
with:
|
||||||
go-version: "1.21.9"
|
go-version: "1.22.2"
|
||||||
|
|
||||||
- name: Build
|
- name: Build
|
||||||
run: |
|
run: |
|
||||||
|
@ -52,6 +52,7 @@ jobs:
|
||||||
with:
|
with:
|
||||||
files: coverage.out
|
files: coverage.out
|
||||||
flags: unit-windows
|
flags: unit-windows
|
||||||
|
token: ${{ secrets.CODECOV_TOKEN }}
|
||||||
|
|
||||||
- name: golangci-lint
|
- name: golangci-lint
|
||||||
uses: golangci/golangci-lint-action@v4
|
uses: golangci/golangci-lint-action@v4
|
||||||
|
|
3
.github/workflows/go-tests.yml
vendored
3
.github/workflows/go-tests.yml
vendored
|
@ -126,7 +126,7 @@ jobs:
|
||||||
- name: "Set up Go"
|
- name: "Set up Go"
|
||||||
uses: actions/setup-go@v5
|
uses: actions/setup-go@v5
|
||||||
with:
|
with:
|
||||||
go-version: "1.21.9"
|
go-version: "1.22.2"
|
||||||
|
|
||||||
- name: Create localstack streams
|
- name: Create localstack streams
|
||||||
run: |
|
run: |
|
||||||
|
@ -153,6 +153,7 @@ jobs:
|
||||||
with:
|
with:
|
||||||
files: coverage.out
|
files: coverage.out
|
||||||
flags: unit-linux
|
flags: unit-linux
|
||||||
|
token: ${{ secrets.CODECOV_TOKEN }}
|
||||||
|
|
||||||
- name: golangci-lint
|
- name: golangci-lint
|
||||||
uses: golangci/golangci-lint-action@v4
|
uses: golangci/golangci-lint-action@v4
|
||||||
|
|
|
@ -25,7 +25,7 @@ jobs:
|
||||||
- name: "Set up Go"
|
- name: "Set up Go"
|
||||||
uses: actions/setup-go@v5
|
uses: actions/setup-go@v5
|
||||||
with:
|
with:
|
||||||
go-version: "1.21.9"
|
go-version: "1.22.2"
|
||||||
|
|
||||||
- name: Build the binaries
|
- name: Build the binaries
|
||||||
run: |
|
run: |
|
||||||
|
|
|
@ -3,7 +3,7 @@
|
||||||
linters-settings:
|
linters-settings:
|
||||||
cyclop:
|
cyclop:
|
||||||
# lower this after refactoring
|
# lower this after refactoring
|
||||||
max-complexity: 53
|
max-complexity: 48
|
||||||
|
|
||||||
gci:
|
gci:
|
||||||
sections:
|
sections:
|
||||||
|
@ -22,7 +22,7 @@ linters-settings:
|
||||||
|
|
||||||
gocyclo:
|
gocyclo:
|
||||||
# lower this after refactoring
|
# lower this after refactoring
|
||||||
min-complexity: 49
|
min-complexity: 48
|
||||||
|
|
||||||
funlen:
|
funlen:
|
||||||
# Checks the number of lines in a function.
|
# Checks the number of lines in a function.
|
||||||
|
@ -37,17 +37,10 @@ linters-settings:
|
||||||
statements: 122
|
statements: 122
|
||||||
|
|
||||||
govet:
|
govet:
|
||||||
enable:
|
enable-all: true
|
||||||
- atomicalign
|
disable:
|
||||||
- deepequalerrors
|
- reflectvaluecompare
|
||||||
# TODO: - fieldalignment
|
- fieldalignment
|
||||||
- findcall
|
|
||||||
- nilness
|
|
||||||
# TODO: - reflectvaluecompare
|
|
||||||
- shadow
|
|
||||||
- sortslice
|
|
||||||
- timeformat
|
|
||||||
- unusedwrite
|
|
||||||
|
|
||||||
lll:
|
lll:
|
||||||
# lower this after refactoring
|
# lower this after refactoring
|
||||||
|
@ -65,7 +58,7 @@ linters-settings:
|
||||||
min-complexity: 28
|
min-complexity: 28
|
||||||
|
|
||||||
nlreturn:
|
nlreturn:
|
||||||
block-size: 4
|
block-size: 5
|
||||||
|
|
||||||
nolintlint:
|
nolintlint:
|
||||||
allow-unused: false # report any unused nolint directives
|
allow-unused: false # report any unused nolint directives
|
||||||
|
@ -89,18 +82,6 @@ linters-settings:
|
||||||
- "!**/pkg/apiserver/controllers/v1/errors.go"
|
- "!**/pkg/apiserver/controllers/v1/errors.go"
|
||||||
yaml:
|
yaml:
|
||||||
files:
|
files:
|
||||||
- "!**/cmd/crowdsec-cli/alerts.go"
|
|
||||||
- "!**/cmd/crowdsec-cli/capi.go"
|
|
||||||
- "!**/cmd/crowdsec-cli/config_show.go"
|
|
||||||
- "!**/cmd/crowdsec-cli/hubtest.go"
|
|
||||||
- "!**/cmd/crowdsec-cli/lapi.go"
|
|
||||||
- "!**/cmd/crowdsec-cli/simulation.go"
|
|
||||||
- "!**/cmd/crowdsec/crowdsec.go"
|
|
||||||
- "!**/cmd/notification-dummy/main.go"
|
|
||||||
- "!**/cmd/notification-email/main.go"
|
|
||||||
- "!**/cmd/notification-http/main.go"
|
|
||||||
- "!**/cmd/notification-slack/main.go"
|
|
||||||
- "!**/cmd/notification-splunk/main.go"
|
|
||||||
- "!**/pkg/acquisition/acquisition.go"
|
- "!**/pkg/acquisition/acquisition.go"
|
||||||
- "!**/pkg/acquisition/acquisition_test.go"
|
- "!**/pkg/acquisition/acquisition_test.go"
|
||||||
- "!**/pkg/acquisition/modules/appsec/appsec.go"
|
- "!**/pkg/acquisition/modules/appsec/appsec.go"
|
||||||
|
@ -158,6 +139,13 @@ linters:
|
||||||
- structcheck
|
- structcheck
|
||||||
- varcheck
|
- varcheck
|
||||||
|
|
||||||
|
#
|
||||||
|
# Disabled until fixed for go 1.22
|
||||||
|
#
|
||||||
|
|
||||||
|
- copyloopvar # copyloopvar is a linter detects places where loop variables are copied
|
||||||
|
- intrange # intrange is a linter to find places where for loops could make use of an integer range.
|
||||||
|
|
||||||
#
|
#
|
||||||
# Enabled
|
# Enabled
|
||||||
#
|
#
|
||||||
|
@ -166,7 +154,6 @@ linters:
|
||||||
# - asciicheck # checks that all code identifiers does not have non-ASCII symbols in the name
|
# - asciicheck # checks that all code identifiers does not have non-ASCII symbols in the name
|
||||||
# - bidichk # Checks for dangerous unicode character sequences
|
# - bidichk # Checks for dangerous unicode character sequences
|
||||||
# - bodyclose # checks whether HTTP response body is closed successfully
|
# - bodyclose # checks whether HTTP response body is closed successfully
|
||||||
# - copyloopvar # copyloopvar is a linter detects places where loop variables are copied
|
|
||||||
# - cyclop # checks function and package cyclomatic complexity
|
# - cyclop # checks function and package cyclomatic complexity
|
||||||
# - decorder # check declaration order and count of types, constants, variables and functions
|
# - decorder # check declaration order and count of types, constants, variables and functions
|
||||||
# - depguard # Go linter that checks if package imports are in a list of acceptable packages
|
# - depguard # Go linter that checks if package imports are in a list of acceptable packages
|
||||||
|
@ -195,7 +182,6 @@ linters:
|
||||||
# - importas # Enforces consistent import aliases
|
# - importas # Enforces consistent import aliases
|
||||||
# - ineffassign # Detects when assignments to existing variables are not used
|
# - ineffassign # Detects when assignments to existing variables are not used
|
||||||
# - interfacebloat # A linter that checks the number of methods inside an interface.
|
# - interfacebloat # A linter that checks the number of methods inside an interface.
|
||||||
# - intrange # intrange is a linter to find places where for loops could make use of an integer range.
|
|
||||||
# - lll # Reports long lines
|
# - lll # Reports long lines
|
||||||
# - loggercheck # (logrlint): Checks key value pairs for common logger libraries (kitlog,klog,logr,zap).
|
# - loggercheck # (logrlint): Checks key value pairs for common logger libraries (kitlog,klog,logr,zap).
|
||||||
# - logrlint # Check logr arguments.
|
# - logrlint # Check logr arguments.
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
# vim: set ft=dockerfile:
|
# vim: set ft=dockerfile:
|
||||||
FROM golang:1.21.9-alpine3.18 AS build
|
FROM golang:1.22.2-alpine3.18 AS build
|
||||||
|
|
||||||
ARG BUILD_VERSION
|
ARG BUILD_VERSION
|
||||||
|
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
# vim: set ft=dockerfile:
|
# vim: set ft=dockerfile:
|
||||||
FROM golang:1.21.9-bookworm AS build
|
FROM golang:1.22.2-bookworm AS build
|
||||||
|
|
||||||
ARG BUILD_VERSION
|
ARG BUILD_VERSION
|
||||||
|
|
||||||
|
|
|
@ -21,7 +21,7 @@ stages:
|
||||||
- task: GoTool@0
|
- task: GoTool@0
|
||||||
displayName: "Install Go"
|
displayName: "Install Go"
|
||||||
inputs:
|
inputs:
|
||||||
version: '1.21.9'
|
version: '1.22.2'
|
||||||
|
|
||||||
- pwsh: |
|
- pwsh: |
|
||||||
choco install -y make
|
choco install -y make
|
||||||
|
|
|
@ -4,6 +4,7 @@ import (
|
||||||
"context"
|
"context"
|
||||||
"encoding/csv"
|
"encoding/csv"
|
||||||
"encoding/json"
|
"encoding/json"
|
||||||
|
"errors"
|
||||||
"fmt"
|
"fmt"
|
||||||
"net/url"
|
"net/url"
|
||||||
"os"
|
"os"
|
||||||
|
@ -16,7 +17,7 @@ import (
|
||||||
"github.com/go-openapi/strfmt"
|
"github.com/go-openapi/strfmt"
|
||||||
log "github.com/sirupsen/logrus"
|
log "github.com/sirupsen/logrus"
|
||||||
"github.com/spf13/cobra"
|
"github.com/spf13/cobra"
|
||||||
"gopkg.in/yaml.v2"
|
"gopkg.in/yaml.v3"
|
||||||
|
|
||||||
"github.com/crowdsecurity/go-cs-lib/version"
|
"github.com/crowdsecurity/go-cs-lib/version"
|
||||||
|
|
||||||
|
@ -204,6 +205,7 @@ func (cli *cliAlerts) NewCommand() *cobra.Command {
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return fmt.Errorf("parsing api url %s: %w", apiURL, err)
|
return fmt.Errorf("parsing api url %s: %w", apiURL, err)
|
||||||
}
|
}
|
||||||
|
|
||||||
cli.client, err = apiclient.NewClient(&apiclient.Config{
|
cli.client, err = apiclient.NewClient(&apiclient.Config{
|
||||||
MachineID: cfg.API.Client.Credentials.Login,
|
MachineID: cfg.API.Client.Credentials.Login,
|
||||||
Password: strfmt.Password(cfg.API.Client.Credentials.Password),
|
Password: strfmt.Password(cfg.API.Client.Credentials.Password),
|
||||||
|
@ -211,7 +213,6 @@ func (cli *cliAlerts) NewCommand() *cobra.Command {
|
||||||
URL: apiURL,
|
URL: apiURL,
|
||||||
VersionPrefix: "v1",
|
VersionPrefix: "v1",
|
||||||
})
|
})
|
||||||
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return fmt.Errorf("new api client: %w", err)
|
return fmt.Errorf("new api client: %w", err)
|
||||||
}
|
}
|
||||||
|
@ -229,7 +230,7 @@ func (cli *cliAlerts) NewCommand() *cobra.Command {
|
||||||
}
|
}
|
||||||
|
|
||||||
func (cli *cliAlerts) NewListCmd() *cobra.Command {
|
func (cli *cliAlerts) NewListCmd() *cobra.Command {
|
||||||
var alertListFilter = apiclient.AlertsListOpts{
|
alertListFilter := apiclient.AlertsListOpts{
|
||||||
ScopeEquals: new(string),
|
ScopeEquals: new(string),
|
||||||
ValueEquals: new(string),
|
ValueEquals: new(string),
|
||||||
ScenarioEquals: new(string),
|
ScenarioEquals: new(string),
|
||||||
|
@ -363,7 +364,7 @@ func (cli *cliAlerts) NewDeleteCmd() *cobra.Command {
|
||||||
delAlertByID string
|
delAlertByID string
|
||||||
)
|
)
|
||||||
|
|
||||||
var alertDeleteFilter = apiclient.AlertsDeleteOpts{
|
alertDeleteFilter := apiclient.AlertsDeleteOpts{
|
||||||
ScopeEquals: new(string),
|
ScopeEquals: new(string),
|
||||||
ValueEquals: new(string),
|
ValueEquals: new(string),
|
||||||
ScenarioEquals: new(string),
|
ScenarioEquals: new(string),
|
||||||
|
@ -391,7 +392,7 @@ cscli alerts delete -s crowdsecurity/ssh-bf"`,
|
||||||
*alertDeleteFilter.ScenarioEquals == "" && *alertDeleteFilter.IPEquals == "" &&
|
*alertDeleteFilter.ScenarioEquals == "" && *alertDeleteFilter.IPEquals == "" &&
|
||||||
*alertDeleteFilter.RangeEquals == "" && delAlertByID == "" {
|
*alertDeleteFilter.RangeEquals == "" && delAlertByID == "" {
|
||||||
_ = cmd.Usage()
|
_ = cmd.Usage()
|
||||||
return fmt.Errorf("at least one filter or --all must be specified")
|
return errors.New("at least one filter or --all must be specified")
|
||||||
}
|
}
|
||||||
|
|
||||||
return nil
|
return nil
|
||||||
|
@ -478,7 +479,7 @@ func (cli *cliAlerts) NewInspectCmd() *cobra.Command {
|
||||||
cfg := cli.cfg()
|
cfg := cli.cfg()
|
||||||
if len(args) == 0 {
|
if len(args) == 0 {
|
||||||
printHelp(cmd)
|
printHelp(cmd)
|
||||||
return fmt.Errorf("missing alert_id")
|
return errors.New("missing alert_id")
|
||||||
}
|
}
|
||||||
for _, alertID := range args {
|
for _, alertID := range args {
|
||||||
id, err := strconv.Atoi(alertID)
|
id, err := strconv.Atoi(alertID)
|
||||||
|
|
|
@ -10,7 +10,7 @@ import (
|
||||||
"github.com/go-openapi/strfmt"
|
"github.com/go-openapi/strfmt"
|
||||||
log "github.com/sirupsen/logrus"
|
log "github.com/sirupsen/logrus"
|
||||||
"github.com/spf13/cobra"
|
"github.com/spf13/cobra"
|
||||||
"gopkg.in/yaml.v2"
|
"gopkg.in/yaml.v3"
|
||||||
|
|
||||||
"github.com/crowdsecurity/go-cs-lib/version"
|
"github.com/crowdsecurity/go-cs-lib/version"
|
||||||
|
|
||||||
|
@ -85,7 +85,6 @@ func (cli *cliCapi) register(capiUserPrefix string, outputFile string) error {
|
||||||
URL: apiurl,
|
URL: apiurl,
|
||||||
VersionPrefix: CAPIURLPrefix,
|
VersionPrefix: CAPIURLPrefix,
|
||||||
}, nil)
|
}, nil)
|
||||||
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return fmt.Errorf("api client register ('%s'): %w", types.CAPIBaseURL, err)
|
return fmt.Errorf("api client register ('%s'): %w", types.CAPIBaseURL, err)
|
||||||
}
|
}
|
||||||
|
@ -175,7 +174,7 @@ func (cli *cliCapi) status() error {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
scenarios, err := hub.GetInstalledItemNames(cwhub.SCENARIOS)
|
scenarios, err := hub.GetInstalledNamesByType(cwhub.SCENARIOS)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return fmt.Errorf("failed to get scenarios: %w", err)
|
return fmt.Errorf("failed to get scenarios: %w", err)
|
||||||
}
|
}
|
||||||
|
|
|
@ -10,13 +10,15 @@ import (
|
||||||
"github.com/sanity-io/litter"
|
"github.com/sanity-io/litter"
|
||||||
log "github.com/sirupsen/logrus"
|
log "github.com/sirupsen/logrus"
|
||||||
"github.com/spf13/cobra"
|
"github.com/spf13/cobra"
|
||||||
"gopkg.in/yaml.v2"
|
"gopkg.in/yaml.v3"
|
||||||
|
|
||||||
"github.com/crowdsecurity/crowdsec/pkg/csconfig"
|
"github.com/crowdsecurity/crowdsec/pkg/csconfig"
|
||||||
"github.com/crowdsecurity/crowdsec/pkg/exprhelpers"
|
"github.com/crowdsecurity/crowdsec/pkg/exprhelpers"
|
||||||
)
|
)
|
||||||
|
|
||||||
func showConfigKey(key string) error {
|
func (cli *cliConfig) showKey(key string) error {
|
||||||
|
cfg := cli.cfg()
|
||||||
|
|
||||||
type Env struct {
|
type Env struct {
|
||||||
Config *csconfig.Config
|
Config *csconfig.Config
|
||||||
}
|
}
|
||||||
|
@ -30,15 +32,15 @@ func showConfigKey(key string) error {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
output, err := expr.Run(program, Env{Config: csConfig})
|
output, err := expr.Run(program, Env{Config: cfg})
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
switch csConfig.Cscli.Output {
|
switch cfg.Cscli.Output {
|
||||||
case "human", "raw":
|
case "human", "raw":
|
||||||
// Don't use litter for strings, it adds quotes
|
// Don't use litter for strings, it adds quotes
|
||||||
// that we didn't have before
|
// that would break compatibility with previous versions
|
||||||
switch output.(type) {
|
switch output.(type) {
|
||||||
case string:
|
case string:
|
||||||
fmt.Println(output)
|
fmt.Println(output)
|
||||||
|
@ -51,13 +53,14 @@ func showConfigKey(key string) error {
|
||||||
return fmt.Errorf("failed to marshal configuration: %w", err)
|
return fmt.Errorf("failed to marshal configuration: %w", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
fmt.Printf("%s\n", string(data))
|
fmt.Println(string(data))
|
||||||
}
|
}
|
||||||
|
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
var configShowTemplate = `Global:
|
func (cli *cliConfig) template() string {
|
||||||
|
return `Global:
|
||||||
|
|
||||||
{{- if .ConfigPaths }}
|
{{- if .ConfigPaths }}
|
||||||
- Configuration Folder : {{.ConfigPaths.ConfigDir}}
|
- Configuration Folder : {{.ConfigPaths.ConfigDir}}
|
||||||
|
@ -182,19 +185,11 @@ Central API:
|
||||||
{{- end }}
|
{{- end }}
|
||||||
{{- end }}
|
{{- end }}
|
||||||
`
|
`
|
||||||
|
}
|
||||||
|
|
||||||
func (cli *cliConfig) show(key string) error {
|
func (cli *cliConfig) show() error {
|
||||||
cfg := cli.cfg()
|
cfg := cli.cfg()
|
||||||
|
|
||||||
if err := cfg.LoadAPIClient(); err != nil {
|
|
||||||
log.Errorf("failed to load API client configuration: %s", err)
|
|
||||||
// don't return, we can still show the configuration
|
|
||||||
}
|
|
||||||
|
|
||||||
if key != "" {
|
|
||||||
return showConfigKey(key)
|
|
||||||
}
|
|
||||||
|
|
||||||
switch cfg.Cscli.Output {
|
switch cfg.Cscli.Output {
|
||||||
case "human":
|
case "human":
|
||||||
// The tests on .Enable look funny because the option has a true default which has
|
// The tests on .Enable look funny because the option has a true default which has
|
||||||
|
@ -205,7 +200,7 @@ func (cli *cliConfig) show(key string) error {
|
||||||
"ValueBool": func(b *bool) bool { return b != nil && *b },
|
"ValueBool": func(b *bool) bool { return b != nil && *b },
|
||||||
}
|
}
|
||||||
|
|
||||||
tmp, err := template.New("config").Funcs(funcs).Parse(configShowTemplate)
|
tmp, err := template.New("config").Funcs(funcs).Parse(cli.template())
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
@ -220,14 +215,14 @@ func (cli *cliConfig) show(key string) error {
|
||||||
return fmt.Errorf("failed to marshal configuration: %w", err)
|
return fmt.Errorf("failed to marshal configuration: %w", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
fmt.Printf("%s\n", string(data))
|
fmt.Println(string(data))
|
||||||
case "raw":
|
case "raw":
|
||||||
data, err := yaml.Marshal(cfg)
|
data, err := yaml.Marshal(cfg)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return fmt.Errorf("failed to marshal configuration: %w", err)
|
return fmt.Errorf("failed to marshal configuration: %w", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
fmt.Printf("%s\n", string(data))
|
fmt.Println(string(data))
|
||||||
}
|
}
|
||||||
|
|
||||||
return nil
|
return nil
|
||||||
|
@ -243,7 +238,16 @@ func (cli *cliConfig) newShowCmd() *cobra.Command {
|
||||||
Args: cobra.ExactArgs(0),
|
Args: cobra.ExactArgs(0),
|
||||||
DisableAutoGenTag: true,
|
DisableAutoGenTag: true,
|
||||||
RunE: func(_ *cobra.Command, _ []string) error {
|
RunE: func(_ *cobra.Command, _ []string) error {
|
||||||
return cli.show(key)
|
if err := cli.cfg().LoadAPIClient(); err != nil {
|
||||||
|
log.Errorf("failed to load API client configuration: %s", err)
|
||||||
|
// don't return, we can still show the configuration
|
||||||
|
}
|
||||||
|
|
||||||
|
if key != "" {
|
||||||
|
return cli.showKey(key)
|
||||||
|
}
|
||||||
|
|
||||||
|
return cli.show()
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -4,9 +4,11 @@ import (
|
||||||
"context"
|
"context"
|
||||||
"encoding/csv"
|
"encoding/csv"
|
||||||
"encoding/json"
|
"encoding/json"
|
||||||
|
"errors"
|
||||||
"fmt"
|
"fmt"
|
||||||
"net/url"
|
"net/url"
|
||||||
"os"
|
"os"
|
||||||
|
"strconv"
|
||||||
"strings"
|
"strings"
|
||||||
|
|
||||||
"github.com/fatih/color"
|
"github.com/fatih/color"
|
||||||
|
@ -36,7 +38,7 @@ func NewCLIConsole(cfg configGetter) *cliConsole {
|
||||||
}
|
}
|
||||||
|
|
||||||
func (cli *cliConsole) NewCommand() *cobra.Command {
|
func (cli *cliConsole) NewCommand() *cobra.Command {
|
||||||
var cmd = &cobra.Command{
|
cmd := &cobra.Command{
|
||||||
Use: "console [action]",
|
Use: "console [action]",
|
||||||
Short: "Manage interaction with Crowdsec console (https://app.crowdsec.net)",
|
Short: "Manage interaction with Crowdsec console (https://app.crowdsec.net)",
|
||||||
Args: cobra.MinimumNArgs(1),
|
Args: cobra.MinimumNArgs(1),
|
||||||
|
@ -101,7 +103,7 @@ After running this command your will need to validate the enrollment in the weba
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
scenarios, err := hub.GetInstalledItemNames(cwhub.SCENARIOS)
|
scenarios, err := hub.GetInstalledNamesByType(cwhub.SCENARIOS)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return fmt.Errorf("failed to get installed scenarios: %w", err)
|
return fmt.Errorf("failed to get installed scenarios: %w", err)
|
||||||
}
|
}
|
||||||
|
@ -203,7 +205,7 @@ Enable given information push to the central API. Allows to empower the console`
|
||||||
log.Infof("All features have been enabled successfully")
|
log.Infof("All features have been enabled successfully")
|
||||||
} else {
|
} else {
|
||||||
if len(args) == 0 {
|
if len(args) == 0 {
|
||||||
return fmt.Errorf("you must specify at least one feature to enable")
|
return errors.New("you must specify at least one feature to enable")
|
||||||
}
|
}
|
||||||
if err := cli.setConsoleOpts(args, true); err != nil {
|
if err := cli.setConsoleOpts(args, true); err != nil {
|
||||||
return err
|
return err
|
||||||
|
@ -288,11 +290,11 @@ func (cli *cliConsole) newStatusCmd() *cobra.Command {
|
||||||
}
|
}
|
||||||
|
|
||||||
rows := [][]string{
|
rows := [][]string{
|
||||||
{csconfig.SEND_MANUAL_SCENARIOS, fmt.Sprintf("%t", *consoleCfg.ShareManualDecisions)},
|
{csconfig.SEND_MANUAL_SCENARIOS, strconv.FormatBool(*consoleCfg.ShareManualDecisions)},
|
||||||
{csconfig.SEND_CUSTOM_SCENARIOS, fmt.Sprintf("%t", *consoleCfg.ShareCustomScenarios)},
|
{csconfig.SEND_CUSTOM_SCENARIOS, strconv.FormatBool(*consoleCfg.ShareCustomScenarios)},
|
||||||
{csconfig.SEND_TAINTED_SCENARIOS, fmt.Sprintf("%t", *consoleCfg.ShareTaintedScenarios)},
|
{csconfig.SEND_TAINTED_SCENARIOS, strconv.FormatBool(*consoleCfg.ShareTaintedScenarios)},
|
||||||
{csconfig.SEND_CONTEXT, fmt.Sprintf("%t", *consoleCfg.ShareContext)},
|
{csconfig.SEND_CONTEXT, strconv.FormatBool(*consoleCfg.ShareContext)},
|
||||||
{csconfig.CONSOLE_MANAGEMENT, fmt.Sprintf("%t", *consoleCfg.ConsoleManagement)},
|
{csconfig.CONSOLE_MANAGEMENT, strconv.FormatBool(*consoleCfg.ConsoleManagement)},
|
||||||
}
|
}
|
||||||
for _, row := range rows {
|
for _, row := range rows {
|
||||||
err = csvwriter.Write(row)
|
err = csvwriter.Write(row)
|
||||||
|
|
|
@ -9,7 +9,6 @@ import (
|
||||||
log "github.com/sirupsen/logrus"
|
log "github.com/sirupsen/logrus"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
/*help to copy the file, ioutil doesn't offer the feature*/
|
/*help to copy the file, ioutil doesn't offer the feature*/
|
||||||
|
|
||||||
func copyFileContents(src, dst string) (err error) {
|
func copyFileContents(src, dst string) (err error) {
|
||||||
|
@ -69,6 +68,7 @@ func CopyFile(sourceSymLink, destinationFile string) error {
|
||||||
if !(destinationFileStat.Mode().IsRegular()) {
|
if !(destinationFileStat.Mode().IsRegular()) {
|
||||||
return fmt.Errorf("copyFile: non-regular destination file %s (%q)", destinationFileStat.Name(), destinationFileStat.Mode().String())
|
return fmt.Errorf("copyFile: non-regular destination file %s (%q)", destinationFileStat.Name(), destinationFileStat.Mode().String())
|
||||||
}
|
}
|
||||||
|
|
||||||
if os.SameFile(sourceFileStat, destinationFileStat) {
|
if os.SameFile(sourceFileStat, destinationFileStat) {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
@ -80,4 +80,3 @@ func CopyFile(sourceSymLink, destinationFile string) error {
|
||||||
|
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -4,6 +4,7 @@ import (
|
||||||
"context"
|
"context"
|
||||||
"encoding/csv"
|
"encoding/csv"
|
||||||
"encoding/json"
|
"encoding/json"
|
||||||
|
"errors"
|
||||||
"fmt"
|
"fmt"
|
||||||
"net/url"
|
"net/url"
|
||||||
"os"
|
"os"
|
||||||
|
@ -346,7 +347,7 @@ cscli decisions add --scope username --value foobar
|
||||||
addScope = types.Range
|
addScope = types.Range
|
||||||
} else if addValue == "" {
|
} else if addValue == "" {
|
||||||
printHelp(cmd)
|
printHelp(cmd)
|
||||||
return fmt.Errorf("missing arguments, a value is required (--ip, --range or --scope and --value)")
|
return errors.New("missing arguments, a value is required (--ip, --range or --scope and --value)")
|
||||||
}
|
}
|
||||||
|
|
||||||
if addReason == "" {
|
if addReason == "" {
|
||||||
|
@ -371,7 +372,7 @@ cscli decisions add --scope username --value foobar
|
||||||
Scenario: &addReason,
|
Scenario: &addReason,
|
||||||
ScenarioVersion: &empty,
|
ScenarioVersion: &empty,
|
||||||
Simulated: &simulated,
|
Simulated: &simulated,
|
||||||
//setting empty scope/value broke plugins, and it didn't seem to be needed anymore w/ latest papi changes
|
// setting empty scope/value broke plugins, and it didn't seem to be needed anymore w/ latest papi changes
|
||||||
Source: &models.Source{
|
Source: &models.Source{
|
||||||
AsName: empty,
|
AsName: empty,
|
||||||
AsNumber: empty,
|
AsNumber: empty,
|
||||||
|
@ -411,7 +412,7 @@ cscli decisions add --scope username --value foobar
|
||||||
}
|
}
|
||||||
|
|
||||||
func (cli *cliDecisions) newDeleteCmd() *cobra.Command {
|
func (cli *cliDecisions) newDeleteCmd() *cobra.Command {
|
||||||
var delFilter = apiclient.DecisionsDeleteOpts{
|
delFilter := apiclient.DecisionsDeleteOpts{
|
||||||
ScopeEquals: new(string),
|
ScopeEquals: new(string),
|
||||||
ValueEquals: new(string),
|
ValueEquals: new(string),
|
||||||
TypeEquals: new(string),
|
TypeEquals: new(string),
|
||||||
|
@ -448,7 +449,7 @@ cscli decisions delete --origin lists --scenario list_name
|
||||||
*delFilter.RangeEquals == "" && *delFilter.ScenarioEquals == "" &&
|
*delFilter.RangeEquals == "" && *delFilter.ScenarioEquals == "" &&
|
||||||
*delFilter.OriginEquals == "" && delDecisionID == "" {
|
*delFilter.OriginEquals == "" && delDecisionID == "" {
|
||||||
cmd.Usage()
|
cmd.Usage()
|
||||||
return fmt.Errorf("at least one filter or --all must be specified")
|
return errors.New("at least one filter or --all must be specified")
|
||||||
}
|
}
|
||||||
|
|
||||||
return nil
|
return nil
|
||||||
|
|
|
@ -5,6 +5,7 @@ import (
|
||||||
"bytes"
|
"bytes"
|
||||||
"context"
|
"context"
|
||||||
"encoding/json"
|
"encoding/json"
|
||||||
|
"errors"
|
||||||
"fmt"
|
"fmt"
|
||||||
"io"
|
"io"
|
||||||
"os"
|
"os"
|
||||||
|
@ -81,7 +82,7 @@ func (cli *cliDecisions) runImport(cmd *cobra.Command, args []string) error {
|
||||||
}
|
}
|
||||||
|
|
||||||
if defaultDuration == "" {
|
if defaultDuration == "" {
|
||||||
return fmt.Errorf("--duration cannot be empty")
|
return errors.New("--duration cannot be empty")
|
||||||
}
|
}
|
||||||
|
|
||||||
defaultScope, err := flags.GetString("scope")
|
defaultScope, err := flags.GetString("scope")
|
||||||
|
@ -90,7 +91,7 @@ func (cli *cliDecisions) runImport(cmd *cobra.Command, args []string) error {
|
||||||
}
|
}
|
||||||
|
|
||||||
if defaultScope == "" {
|
if defaultScope == "" {
|
||||||
return fmt.Errorf("--scope cannot be empty")
|
return errors.New("--scope cannot be empty")
|
||||||
}
|
}
|
||||||
|
|
||||||
defaultReason, err := flags.GetString("reason")
|
defaultReason, err := flags.GetString("reason")
|
||||||
|
@ -99,7 +100,7 @@ func (cli *cliDecisions) runImport(cmd *cobra.Command, args []string) error {
|
||||||
}
|
}
|
||||||
|
|
||||||
if defaultReason == "" {
|
if defaultReason == "" {
|
||||||
return fmt.Errorf("--reason cannot be empty")
|
return errors.New("--reason cannot be empty")
|
||||||
}
|
}
|
||||||
|
|
||||||
defaultType, err := flags.GetString("type")
|
defaultType, err := flags.GetString("type")
|
||||||
|
@ -108,7 +109,7 @@ func (cli *cliDecisions) runImport(cmd *cobra.Command, args []string) error {
|
||||||
}
|
}
|
||||||
|
|
||||||
if defaultType == "" {
|
if defaultType == "" {
|
||||||
return fmt.Errorf("--type cannot be empty")
|
return errors.New("--type cannot be empty")
|
||||||
}
|
}
|
||||||
|
|
||||||
batchSize, err := flags.GetInt("batch")
|
batchSize, err := flags.GetInt("batch")
|
||||||
|
@ -136,7 +137,7 @@ func (cli *cliDecisions) runImport(cmd *cobra.Command, args []string) error {
|
||||||
}
|
}
|
||||||
|
|
||||||
if format == "" {
|
if format == "" {
|
||||||
return fmt.Errorf("unable to guess format from file extension, please provide a format with --format flag")
|
return errors.New("unable to guess format from file extension, please provide a format with --format flag")
|
||||||
}
|
}
|
||||||
|
|
||||||
if input == "-" {
|
if input == "-" {
|
||||||
|
@ -235,7 +236,6 @@ func (cli *cliDecisions) runImport(cmd *cobra.Command, args []string) error {
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
func (cli *cliDecisions) newImportCmd() *cobra.Command {
|
func (cli *cliDecisions) newImportCmd() *cobra.Command {
|
||||||
cmd := &cobra.Command{
|
cmd := &cobra.Command{
|
||||||
Use: "import [options]",
|
Use: "import [options]",
|
||||||
|
|
|
@ -39,8 +39,10 @@ id: %s
|
||||||
title: %s
|
title: %s
|
||||||
---
|
---
|
||||||
`
|
`
|
||||||
|
|
||||||
name := filepath.Base(filename)
|
name := filepath.Base(filename)
|
||||||
base := strings.TrimSuffix(name, filepath.Ext(name))
|
base := strings.TrimSuffix(name, filepath.Ext(name))
|
||||||
|
|
||||||
return fmt.Sprintf(header, base, strings.ReplaceAll(base, "_", " "))
|
return fmt.Sprintf(header, base, strings.ReplaceAll(base, "_", " "))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -83,7 +83,7 @@ tail -n 5 myfile.log | cscli explain --type nginx -f -
|
||||||
PersistentPreRunE: func(_ *cobra.Command, _ []string) error {
|
PersistentPreRunE: func(_ *cobra.Command, _ []string) error {
|
||||||
fileInfo, _ := os.Stdin.Stat()
|
fileInfo, _ := os.Stdin.Stat()
|
||||||
if cli.flags.logFile == "-" && ((fileInfo.Mode() & os.ModeCharDevice) == os.ModeCharDevice) {
|
if cli.flags.logFile == "-" && ((fileInfo.Mode() & os.ModeCharDevice) == os.ModeCharDevice) {
|
||||||
return fmt.Errorf("the option -f - is intended to work with pipes")
|
return errors.New("the option -f - is intended to work with pipes")
|
||||||
}
|
}
|
||||||
|
|
||||||
return nil
|
return nil
|
||||||
|
@ -160,18 +160,22 @@ func (cli *cliExplain) run() error {
|
||||||
} else if logFile == "-" {
|
} else if logFile == "-" {
|
||||||
reader := bufio.NewReader(os.Stdin)
|
reader := bufio.NewReader(os.Stdin)
|
||||||
errCount := 0
|
errCount := 0
|
||||||
|
|
||||||
for {
|
for {
|
||||||
input, err := reader.ReadBytes('\n')
|
input, err := reader.ReadBytes('\n')
|
||||||
if err != nil && errors.Is(err, io.EOF) {
|
if err != nil && errors.Is(err, io.EOF) {
|
||||||
break
|
break
|
||||||
}
|
}
|
||||||
|
|
||||||
if len(input) > 1 {
|
if len(input) > 1 {
|
||||||
_, err = f.Write(input)
|
_, err = f.Write(input)
|
||||||
}
|
}
|
||||||
|
|
||||||
if err != nil || len(input) <= 1 {
|
if err != nil || len(input) <= 1 {
|
||||||
errCount++
|
errCount++
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if errCount > 0 {
|
if errCount > 0 {
|
||||||
log.Warnf("Failed to write %d lines to %s", errCount, tmpFile)
|
log.Warnf("Failed to write %d lines to %s", errCount, tmpFile)
|
||||||
}
|
}
|
||||||
|
@ -207,7 +211,7 @@ func (cli *cliExplain) run() error {
|
||||||
}
|
}
|
||||||
|
|
||||||
if dsn == "" {
|
if dsn == "" {
|
||||||
return fmt.Errorf("no acquisition (--file or --dsn) provided, can't run cscli test")
|
return errors.New("no acquisition (--file or --dsn) provided, can't run cscli test")
|
||||||
}
|
}
|
||||||
|
|
||||||
cmdArgs := []string{"-c", ConfigFilePath, "-type", logType, "-dsn", dsn, "-dump-data", dir, "-no-api"}
|
cmdArgs := []string{"-c", ConfigFilePath, "-type", logType, "-dsn", dsn, "-dump-data", dir, "-no-api"}
|
||||||
|
|
|
@ -13,7 +13,7 @@ import (
|
||||||
"github.com/crowdsecurity/crowdsec/pkg/cwhub"
|
"github.com/crowdsecurity/crowdsec/pkg/cwhub"
|
||||||
)
|
)
|
||||||
|
|
||||||
type cliHub struct {
|
type cliHub struct{
|
||||||
cfg configGetter
|
cfg configGetter
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -137,7 +137,7 @@ func (cli *cliHub) upgrade(force bool) error {
|
||||||
}
|
}
|
||||||
|
|
||||||
for _, itemType := range cwhub.ItemTypes {
|
for _, itemType := range cwhub.ItemTypes {
|
||||||
items, err := hub.GetInstalledItems(itemType)
|
items, err := hub.GetInstalledItemsByType(itemType)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
|
@ -13,8 +13,9 @@ import (
|
||||||
"github.com/crowdsecurity/crowdsec/pkg/cwhub"
|
"github.com/crowdsecurity/crowdsec/pkg/cwhub"
|
||||||
)
|
)
|
||||||
|
|
||||||
func NewCLIAppsecConfig() *cliItem {
|
func NewCLIAppsecConfig(cfg configGetter) *cliItem {
|
||||||
return &cliItem{
|
return &cliItem{
|
||||||
|
cfg: cfg,
|
||||||
name: cwhub.APPSEC_CONFIGS,
|
name: cwhub.APPSEC_CONFIGS,
|
||||||
singular: "appsec-config",
|
singular: "appsec-config",
|
||||||
oneOrMore: "appsec-config(s)",
|
oneOrMore: "appsec-config(s)",
|
||||||
|
@ -46,7 +47,7 @@ cscli appsec-configs list crowdsecurity/vpatch`,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func NewCLIAppsecRule() *cliItem {
|
func NewCLIAppsecRule(cfg configGetter) *cliItem {
|
||||||
inspectDetail := func(item *cwhub.Item) error {
|
inspectDetail := func(item *cwhub.Item) error {
|
||||||
// Only show the converted rules in human mode
|
// Only show the converted rules in human mode
|
||||||
if csConfig.Cscli.Output != "human" {
|
if csConfig.Cscli.Output != "human" {
|
||||||
|
@ -57,11 +58,11 @@ func NewCLIAppsecRule() *cliItem {
|
||||||
|
|
||||||
yamlContent, err := os.ReadFile(item.State.LocalPath)
|
yamlContent, err := os.ReadFile(item.State.LocalPath)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return fmt.Errorf("unable to read file %s : %s", item.State.LocalPath, err)
|
return fmt.Errorf("unable to read file %s: %w", item.State.LocalPath, err)
|
||||||
}
|
}
|
||||||
|
|
||||||
if err := yaml.Unmarshal(yamlContent, &appsecRule); err != nil {
|
if err := yaml.Unmarshal(yamlContent, &appsecRule); err != nil {
|
||||||
return fmt.Errorf("unable to unmarshal yaml file %s : %s", item.State.LocalPath, err)
|
return fmt.Errorf("unable to unmarshal yaml file %s: %w", item.State.LocalPath, err)
|
||||||
}
|
}
|
||||||
|
|
||||||
for _, ruleType := range appsec_rule.SupportedTypes() {
|
for _, ruleType := range appsec_rule.SupportedTypes() {
|
||||||
|
@ -70,7 +71,7 @@ func NewCLIAppsecRule() *cliItem {
|
||||||
for _, rule := range appsecRule.Rules {
|
for _, rule := range appsecRule.Rules {
|
||||||
convertedRule, _, err := rule.Convert(ruleType, appsecRule.Name)
|
convertedRule, _, err := rule.Convert(ruleType, appsecRule.Name)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return fmt.Errorf("unable to convert rule %s : %s", rule.Name, err)
|
return fmt.Errorf("unable to convert rule %s: %w", rule.Name, err)
|
||||||
}
|
}
|
||||||
|
|
||||||
fmt.Println(convertedRule)
|
fmt.Println(convertedRule)
|
||||||
|
@ -88,6 +89,7 @@ func NewCLIAppsecRule() *cliItem {
|
||||||
}
|
}
|
||||||
|
|
||||||
return &cliItem{
|
return &cliItem{
|
||||||
|
cfg: cfg,
|
||||||
name: "appsec-rules",
|
name: "appsec-rules",
|
||||||
singular: "appsec-rule",
|
singular: "appsec-rule",
|
||||||
oneOrMore: "appsec-rule(s)",
|
oneOrMore: "appsec-rule(s)",
|
||||||
|
|
|
@ -4,8 +4,9 @@ import (
|
||||||
"github.com/crowdsecurity/crowdsec/pkg/cwhub"
|
"github.com/crowdsecurity/crowdsec/pkg/cwhub"
|
||||||
)
|
)
|
||||||
|
|
||||||
func NewCLICollection() *cliItem {
|
func NewCLICollection(cfg configGetter) *cliItem {
|
||||||
return &cliItem{
|
return &cliItem{
|
||||||
|
cfg: cfg,
|
||||||
name: cwhub.COLLECTIONS,
|
name: cwhub.COLLECTIONS,
|
||||||
singular: "collection",
|
singular: "collection",
|
||||||
oneOrMore: "collection(s)",
|
oneOrMore: "collection(s)",
|
||||||
|
|
|
@ -4,8 +4,9 @@ import (
|
||||||
"github.com/crowdsecurity/crowdsec/pkg/cwhub"
|
"github.com/crowdsecurity/crowdsec/pkg/cwhub"
|
||||||
)
|
)
|
||||||
|
|
||||||
func NewCLIContext() *cliItem {
|
func NewCLIContext(cfg configGetter) *cliItem {
|
||||||
return &cliItem{
|
return &cliItem{
|
||||||
|
cfg: cfg,
|
||||||
name: cwhub.CONTEXTS,
|
name: cwhub.CONTEXTS,
|
||||||
singular: "context",
|
singular: "context",
|
||||||
oneOrMore: "context(s)",
|
oneOrMore: "context(s)",
|
||||||
|
|
|
@ -4,8 +4,9 @@ import (
|
||||||
"github.com/crowdsecurity/crowdsec/pkg/cwhub"
|
"github.com/crowdsecurity/crowdsec/pkg/cwhub"
|
||||||
)
|
)
|
||||||
|
|
||||||
func NewCLIParser() *cliItem {
|
func NewCLIParser(cfg configGetter) *cliItem {
|
||||||
return &cliItem{
|
return &cliItem{
|
||||||
|
cfg: cfg,
|
||||||
name: cwhub.PARSERS,
|
name: cwhub.PARSERS,
|
||||||
singular: "parser",
|
singular: "parser",
|
||||||
oneOrMore: "parser(s)",
|
oneOrMore: "parser(s)",
|
||||||
|
|
|
@ -4,8 +4,9 @@ import (
|
||||||
"github.com/crowdsecurity/crowdsec/pkg/cwhub"
|
"github.com/crowdsecurity/crowdsec/pkg/cwhub"
|
||||||
)
|
)
|
||||||
|
|
||||||
func NewCLIPostOverflow() *cliItem {
|
func NewCLIPostOverflow(cfg configGetter) *cliItem {
|
||||||
return &cliItem{
|
return &cliItem{
|
||||||
|
cfg: cfg,
|
||||||
name: cwhub.POSTOVERFLOWS,
|
name: cwhub.POSTOVERFLOWS,
|
||||||
singular: "postoverflow",
|
singular: "postoverflow",
|
||||||
oneOrMore: "postoverflow(s)",
|
oneOrMore: "postoverflow(s)",
|
||||||
|
|
|
@ -4,8 +4,9 @@ import (
|
||||||
"github.com/crowdsecurity/crowdsec/pkg/cwhub"
|
"github.com/crowdsecurity/crowdsec/pkg/cwhub"
|
||||||
)
|
)
|
||||||
|
|
||||||
func NewCLIScenario() *cliItem {
|
func NewCLIScenario(cfg configGetter) *cliItem {
|
||||||
return &cliItem{
|
return &cliItem{
|
||||||
|
cfg: cfg,
|
||||||
name: cwhub.SCENARIOS,
|
name: cwhub.SCENARIOS,
|
||||||
singular: "scenario",
|
singular: "scenario",
|
||||||
oneOrMore: "scenario(s)",
|
oneOrMore: "scenario(s)",
|
||||||
|
|
|
@ -14,7 +14,7 @@ import (
|
||||||
"github.com/fatih/color"
|
"github.com/fatih/color"
|
||||||
log "github.com/sirupsen/logrus"
|
log "github.com/sirupsen/logrus"
|
||||||
"github.com/spf13/cobra"
|
"github.com/spf13/cobra"
|
||||||
"gopkg.in/yaml.v2"
|
"gopkg.in/yaml.v3"
|
||||||
|
|
||||||
"github.com/crowdsecurity/crowdsec/pkg/dumps"
|
"github.com/crowdsecurity/crowdsec/pkg/dumps"
|
||||||
"github.com/crowdsecurity/crowdsec/pkg/emoji"
|
"github.com/crowdsecurity/crowdsec/pkg/emoji"
|
||||||
|
@ -135,7 +135,8 @@ cscli hubtest create my-scenario-test --parsers crowdsecurity/nginx --scenarios
|
||||||
// create empty nuclei template file
|
// create empty nuclei template file
|
||||||
nucleiFileName := fmt.Sprintf("%s.yaml", testName)
|
nucleiFileName := fmt.Sprintf("%s.yaml", testName)
|
||||||
nucleiFilePath := filepath.Join(testPath, nucleiFileName)
|
nucleiFilePath := filepath.Join(testPath, nucleiFileName)
|
||||||
nucleiFile, err := os.OpenFile(nucleiFilePath, os.O_RDWR|os.O_CREATE, 0755)
|
|
||||||
|
nucleiFile, err := os.OpenFile(nucleiFilePath, os.O_RDWR|os.O_CREATE, 0o755)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
@ -405,7 +406,7 @@ func (cli *cliHubTest) NewRunCmd() *cobra.Command {
|
||||||
}
|
}
|
||||||
|
|
||||||
func (cli *cliHubTest) NewCleanCmd() *cobra.Command {
|
func (cli *cliHubTest) NewCleanCmd() *cobra.Command {
|
||||||
var cmd = &cobra.Command{
|
cmd := &cobra.Command{
|
||||||
Use: "clean",
|
Use: "clean",
|
||||||
Short: "clean [test_name]",
|
Short: "clean [test_name]",
|
||||||
Args: cobra.MinimumNArgs(1),
|
Args: cobra.MinimumNArgs(1),
|
||||||
|
|
|
@ -37,6 +37,7 @@ func ShowMetrics(hubItem *cwhub.Item) error {
|
||||||
appsecMetricsTable(color.Output, hubItem.Name, metrics)
|
appsecMetricsTable(color.Output, hubItem.Name, metrics)
|
||||||
default: // no metrics for this item type
|
default: // no metrics for this item type
|
||||||
}
|
}
|
||||||
|
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -49,21 +50,27 @@ func GetParserMetric(url string, itemName string) map[string]map[string]int {
|
||||||
if !strings.HasPrefix(fam.Name, "cs_") {
|
if !strings.HasPrefix(fam.Name, "cs_") {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
||||||
log.Tracef("round %d", idx)
|
log.Tracef("round %d", idx)
|
||||||
|
|
||||||
for _, m := range fam.Metrics {
|
for _, m := range fam.Metrics {
|
||||||
metric, ok := m.(prom2json.Metric)
|
metric, ok := m.(prom2json.Metric)
|
||||||
if !ok {
|
if !ok {
|
||||||
log.Debugf("failed to convert metric to prom2json.Metric")
|
log.Debugf("failed to convert metric to prom2json.Metric")
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
||||||
name, ok := metric.Labels["name"]
|
name, ok := metric.Labels["name"]
|
||||||
if !ok {
|
if !ok {
|
||||||
log.Debugf("no name in Metric %v", metric.Labels)
|
log.Debugf("no name in Metric %v", metric.Labels)
|
||||||
}
|
}
|
||||||
|
|
||||||
if name != itemName {
|
if name != itemName {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
||||||
source, ok := metric.Labels["source"]
|
source, ok := metric.Labels["source"]
|
||||||
|
|
||||||
if !ok {
|
if !ok {
|
||||||
log.Debugf("no source in Metric %v", metric.Labels)
|
log.Debugf("no source in Metric %v", metric.Labels)
|
||||||
} else {
|
} else {
|
||||||
|
@ -71,12 +78,15 @@ func GetParserMetric(url string, itemName string) map[string]map[string]int {
|
||||||
source = srctype + ":" + source
|
source = srctype + ":" + source
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
value := m.(prom2json.Metric).Value
|
value := m.(prom2json.Metric).Value
|
||||||
|
|
||||||
fval, err := strconv.ParseFloat(value, 32)
|
fval, err := strconv.ParseFloat(value, 32)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Errorf("Unexpected int value %s : %s", value, err)
|
log.Errorf("Unexpected int value %s : %s", value, err)
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
||||||
ival := int(fval)
|
ival := int(fval)
|
||||||
|
|
||||||
switch fam.Name {
|
switch fam.Name {
|
||||||
|
@ -119,6 +129,7 @@ func GetParserMetric(url string, itemName string) map[string]map[string]int {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return stats
|
return stats
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -136,26 +147,34 @@ func GetScenarioMetric(url string, itemName string) map[string]int {
|
||||||
if !strings.HasPrefix(fam.Name, "cs_") {
|
if !strings.HasPrefix(fam.Name, "cs_") {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
||||||
log.Tracef("round %d", idx)
|
log.Tracef("round %d", idx)
|
||||||
|
|
||||||
for _, m := range fam.Metrics {
|
for _, m := range fam.Metrics {
|
||||||
metric, ok := m.(prom2json.Metric)
|
metric, ok := m.(prom2json.Metric)
|
||||||
if !ok {
|
if !ok {
|
||||||
log.Debugf("failed to convert metric to prom2json.Metric")
|
log.Debugf("failed to convert metric to prom2json.Metric")
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
||||||
name, ok := metric.Labels["name"]
|
name, ok := metric.Labels["name"]
|
||||||
|
|
||||||
if !ok {
|
if !ok {
|
||||||
log.Debugf("no name in Metric %v", metric.Labels)
|
log.Debugf("no name in Metric %v", metric.Labels)
|
||||||
}
|
}
|
||||||
|
|
||||||
if name != itemName {
|
if name != itemName {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
||||||
value := m.(prom2json.Metric).Value
|
value := m.(prom2json.Metric).Value
|
||||||
|
|
||||||
fval, err := strconv.ParseFloat(value, 32)
|
fval, err := strconv.ParseFloat(value, 32)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Errorf("Unexpected int value %s : %s", value, err)
|
log.Errorf("Unexpected int value %s : %s", value, err)
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
||||||
ival := int(fval)
|
ival := int(fval)
|
||||||
|
|
||||||
switch fam.Name {
|
switch fam.Name {
|
||||||
|
@ -174,6 +193,7 @@ func GetScenarioMetric(url string, itemName string) map[string]int {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return stats
|
return stats
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -188,17 +208,22 @@ func GetAppsecRuleMetric(url string, itemName string) map[string]int {
|
||||||
if !strings.HasPrefix(fam.Name, "cs_") {
|
if !strings.HasPrefix(fam.Name, "cs_") {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
||||||
log.Tracef("round %d", idx)
|
log.Tracef("round %d", idx)
|
||||||
|
|
||||||
for _, m := range fam.Metrics {
|
for _, m := range fam.Metrics {
|
||||||
metric, ok := m.(prom2json.Metric)
|
metric, ok := m.(prom2json.Metric)
|
||||||
if !ok {
|
if !ok {
|
||||||
log.Debugf("failed to convert metric to prom2json.Metric")
|
log.Debugf("failed to convert metric to prom2json.Metric")
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
||||||
name, ok := metric.Labels["rule_name"]
|
name, ok := metric.Labels["rule_name"]
|
||||||
|
|
||||||
if !ok {
|
if !ok {
|
||||||
log.Debugf("no rule_name in Metric %v", metric.Labels)
|
log.Debugf("no rule_name in Metric %v", metric.Labels)
|
||||||
}
|
}
|
||||||
|
|
||||||
if name != itemName {
|
if name != itemName {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
@ -209,11 +234,13 @@ func GetAppsecRuleMetric(url string, itemName string) map[string]int {
|
||||||
}
|
}
|
||||||
|
|
||||||
value := m.(prom2json.Metric).Value
|
value := m.(prom2json.Metric).Value
|
||||||
|
|
||||||
fval, err := strconv.ParseFloat(value, 32)
|
fval, err := strconv.ParseFloat(value, 32)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Errorf("Unexpected int value %s : %s", value, err)
|
log.Errorf("Unexpected int value %s : %s", value, err)
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
||||||
ival := int(fval)
|
ival := int(fval)
|
||||||
|
|
||||||
switch fam.Name {
|
switch fam.Name {
|
||||||
|
@ -231,6 +258,7 @@ func GetAppsecRuleMetric(url string, itemName string) map[string]int {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return stats
|
return stats
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -247,6 +275,7 @@ func GetPrometheusMetric(url string) []*prom2json.Family {
|
||||||
|
|
||||||
go func() {
|
go func() {
|
||||||
defer trace.CatchPanic("crowdsec/GetPrometheusMetric")
|
defer trace.CatchPanic("crowdsec/GetPrometheusMetric")
|
||||||
|
|
||||||
err := prom2json.FetchMetricFamilies(url, mfChan, transport)
|
err := prom2json.FetchMetricFamilies(url, mfChan, transport)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Fatalf("failed to fetch prometheus metrics : %v", err)
|
log.Fatalf("failed to fetch prometheus metrics : %v", err)
|
||||||
|
@ -257,6 +286,7 @@ func GetPrometheusMetric(url string) []*prom2json.Family {
|
||||||
for mf := range mfChan {
|
for mf := range mfChan {
|
||||||
result = append(result, prom2json.NewFamily(mf))
|
result = append(result, prom2json.NewFamily(mf))
|
||||||
}
|
}
|
||||||
|
|
||||||
log.Debugf("Finished reading prometheus output, %d entries", len(result))
|
log.Debugf("Finished reading prometheus output, %d entries", len(result))
|
||||||
|
|
||||||
return result
|
return result
|
||||||
|
|
|
@ -61,7 +61,7 @@ func compInstalledItems(itemType string, args []string, toComplete string) ([]st
|
||||||
return nil, cobra.ShellCompDirectiveDefault
|
return nil, cobra.ShellCompDirectiveDefault
|
||||||
}
|
}
|
||||||
|
|
||||||
items, err := hub.GetInstalledItemNames(itemType)
|
items, err := hub.GetInstalledNamesByType(itemType)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
cobra.CompDebugln(fmt.Sprintf("list installed %s err: %s", itemType, err), true)
|
cobra.CompDebugln(fmt.Sprintf("list installed %s err: %s", itemType, err), true)
|
||||||
return nil, cobra.ShellCompDirectiveDefault
|
return nil, cobra.ShellCompDirectiveDefault
|
||||||
|
|
|
@ -1,6 +1,7 @@
|
||||||
package main
|
package main
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"errors"
|
||||||
"fmt"
|
"fmt"
|
||||||
"os"
|
"os"
|
||||||
"strings"
|
"strings"
|
||||||
|
@ -28,6 +29,7 @@ type cliHelp struct {
|
||||||
}
|
}
|
||||||
|
|
||||||
type cliItem struct {
|
type cliItem struct {
|
||||||
|
cfg configGetter
|
||||||
name string // plural, as used in the hub index
|
name string // plural, as used in the hub index
|
||||||
singular string
|
singular string
|
||||||
oneOrMore string // parenthetical pluralizaion: "parser(s)"
|
oneOrMore string // parenthetical pluralizaion: "parser(s)"
|
||||||
|
@ -61,7 +63,9 @@ func (cli cliItem) NewCommand() *cobra.Command {
|
||||||
}
|
}
|
||||||
|
|
||||||
func (cli cliItem) install(args []string, downloadOnly bool, force bool, ignoreError bool) error {
|
func (cli cliItem) install(args []string, downloadOnly bool, force bool, ignoreError bool) error {
|
||||||
hub, err := require.Hub(csConfig, require.RemoteHub(csConfig), log.StandardLogger())
|
cfg := cli.cfg()
|
||||||
|
|
||||||
|
hub, err := require.Hub(cfg, require.RemoteHub(cfg), log.StandardLogger())
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
@ -71,7 +75,7 @@ func (cli cliItem) install(args []string, downloadOnly bool, force bool, ignoreE
|
||||||
if item == nil {
|
if item == nil {
|
||||||
msg := suggestNearestMessage(hub, cli.name, name)
|
msg := suggestNearestMessage(hub, cli.name, name)
|
||||||
if !ignoreError {
|
if !ignoreError {
|
||||||
return fmt.Errorf(msg)
|
return errors.New(msg)
|
||||||
}
|
}
|
||||||
|
|
||||||
log.Errorf(msg)
|
log.Errorf(msg)
|
||||||
|
@ -107,10 +111,10 @@ func (cli cliItem) newInstallCmd() *cobra.Command {
|
||||||
Example: cli.installHelp.example,
|
Example: cli.installHelp.example,
|
||||||
Args: cobra.MinimumNArgs(1),
|
Args: cobra.MinimumNArgs(1),
|
||||||
DisableAutoGenTag: true,
|
DisableAutoGenTag: true,
|
||||||
ValidArgsFunction: func(cmd *cobra.Command, args []string, toComplete string) ([]string, cobra.ShellCompDirective) {
|
ValidArgsFunction: func(_ *cobra.Command, args []string, toComplete string) ([]string, cobra.ShellCompDirective) {
|
||||||
return compAllItems(cli.name, args, toComplete)
|
return compAllItems(cli.name, args, toComplete)
|
||||||
},
|
},
|
||||||
RunE: func(cmd *cobra.Command, args []string) error {
|
RunE: func(_ *cobra.Command, args []string) error {
|
||||||
return cli.install(args, downloadOnly, force, ignoreError)
|
return cli.install(args, downloadOnly, force, ignoreError)
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
@ -137,15 +141,15 @@ func istalledParentNames(item *cwhub.Item) []string {
|
||||||
}
|
}
|
||||||
|
|
||||||
func (cli cliItem) remove(args []string, purge bool, force bool, all bool) error {
|
func (cli cliItem) remove(args []string, purge bool, force bool, all bool) error {
|
||||||
hub, err := require.Hub(csConfig, nil, log.StandardLogger())
|
hub, err := require.Hub(cli.cfg(), nil, log.StandardLogger())
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
if all {
|
if all {
|
||||||
getter := hub.GetInstalledItems
|
getter := hub.GetInstalledItemsByType
|
||||||
if purge {
|
if purge {
|
||||||
getter = hub.GetAllItems
|
getter = hub.GetItemsByType
|
||||||
}
|
}
|
||||||
|
|
||||||
items, err := getter(cli.name)
|
items, err := getter(cli.name)
|
||||||
|
@ -163,6 +167,7 @@ func (cli cliItem) remove(args []string, purge bool, force bool, all bool) error
|
||||||
|
|
||||||
if didRemove {
|
if didRemove {
|
||||||
log.Infof("Removed %s", item.Name)
|
log.Infof("Removed %s", item.Name)
|
||||||
|
|
||||||
removed++
|
removed++
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -204,6 +209,7 @@ func (cli cliItem) remove(args []string, purge bool, force bool, all bool) error
|
||||||
|
|
||||||
if didRemove {
|
if didRemove {
|
||||||
log.Infof("Removed %s", item.Name)
|
log.Infof("Removed %s", item.Name)
|
||||||
|
|
||||||
removed++
|
removed++
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -231,10 +237,10 @@ func (cli cliItem) newRemoveCmd() *cobra.Command {
|
||||||
Example: cli.removeHelp.example,
|
Example: cli.removeHelp.example,
|
||||||
Aliases: []string{"delete"},
|
Aliases: []string{"delete"},
|
||||||
DisableAutoGenTag: true,
|
DisableAutoGenTag: true,
|
||||||
ValidArgsFunction: func(cmd *cobra.Command, args []string, toComplete string) ([]string, cobra.ShellCompDirective) {
|
ValidArgsFunction: func(_ *cobra.Command, args []string, toComplete string) ([]string, cobra.ShellCompDirective) {
|
||||||
return compInstalledItems(cli.name, args, toComplete)
|
return compInstalledItems(cli.name, args, toComplete)
|
||||||
},
|
},
|
||||||
RunE: func(cmd *cobra.Command, args []string) error {
|
RunE: func(_ *cobra.Command, args []string) error {
|
||||||
return cli.remove(args, purge, force, all)
|
return cli.remove(args, purge, force, all)
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
@ -248,13 +254,15 @@ func (cli cliItem) newRemoveCmd() *cobra.Command {
|
||||||
}
|
}
|
||||||
|
|
||||||
func (cli cliItem) upgrade(args []string, force bool, all bool) error {
|
func (cli cliItem) upgrade(args []string, force bool, all bool) error {
|
||||||
hub, err := require.Hub(csConfig, require.RemoteHub(csConfig), log.StandardLogger())
|
cfg := cli.cfg()
|
||||||
|
|
||||||
|
hub, err := require.Hub(cfg, require.RemoteHub(cfg), log.StandardLogger())
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
if all {
|
if all {
|
||||||
items, err := hub.GetInstalledItems(cli.name)
|
items, err := hub.GetInstalledItemsByType(cli.name)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
@ -300,6 +308,7 @@ func (cli cliItem) upgrade(args []string, force bool, all bool) error {
|
||||||
|
|
||||||
if didUpdate {
|
if didUpdate {
|
||||||
log.Infof("Updated %s", item.Name)
|
log.Infof("Updated %s", item.Name)
|
||||||
|
|
||||||
updated++
|
updated++
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -323,10 +332,10 @@ func (cli cliItem) newUpgradeCmd() *cobra.Command {
|
||||||
Long: coalesce.String(cli.upgradeHelp.long, fmt.Sprintf("Fetch and upgrade one or more %s from the hub", cli.name)),
|
Long: coalesce.String(cli.upgradeHelp.long, fmt.Sprintf("Fetch and upgrade one or more %s from the hub", cli.name)),
|
||||||
Example: cli.upgradeHelp.example,
|
Example: cli.upgradeHelp.example,
|
||||||
DisableAutoGenTag: true,
|
DisableAutoGenTag: true,
|
||||||
ValidArgsFunction: func(cmd *cobra.Command, args []string, toComplete string) ([]string, cobra.ShellCompDirective) {
|
ValidArgsFunction: func(_ *cobra.Command, args []string, toComplete string) ([]string, cobra.ShellCompDirective) {
|
||||||
return compInstalledItems(cli.name, args, toComplete)
|
return compInstalledItems(cli.name, args, toComplete)
|
||||||
},
|
},
|
||||||
RunE: func(cmd *cobra.Command, args []string) error {
|
RunE: func(_ *cobra.Command, args []string) error {
|
||||||
return cli.upgrade(args, force, all)
|
return cli.upgrade(args, force, all)
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
@ -339,21 +348,23 @@ func (cli cliItem) newUpgradeCmd() *cobra.Command {
|
||||||
}
|
}
|
||||||
|
|
||||||
func (cli cliItem) inspect(args []string, url string, diff bool, rev bool, noMetrics bool) error {
|
func (cli cliItem) inspect(args []string, url string, diff bool, rev bool, noMetrics bool) error {
|
||||||
|
cfg := cli.cfg()
|
||||||
|
|
||||||
if rev && !diff {
|
if rev && !diff {
|
||||||
return fmt.Errorf("--rev can only be used with --diff")
|
return errors.New("--rev can only be used with --diff")
|
||||||
}
|
}
|
||||||
|
|
||||||
if url != "" {
|
if url != "" {
|
||||||
csConfig.Cscli.PrometheusUrl = url
|
cfg.Cscli.PrometheusUrl = url
|
||||||
}
|
}
|
||||||
|
|
||||||
remote := (*cwhub.RemoteHubCfg)(nil)
|
remote := (*cwhub.RemoteHubCfg)(nil)
|
||||||
|
|
||||||
if diff {
|
if diff {
|
||||||
remote = require.RemoteHub(csConfig)
|
remote = require.RemoteHub(cfg)
|
||||||
}
|
}
|
||||||
|
|
||||||
hub, err := require.Hub(csConfig, remote, log.StandardLogger())
|
hub, err := require.Hub(cfg, remote, log.StandardLogger())
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
@ -399,10 +410,10 @@ func (cli cliItem) newInspectCmd() *cobra.Command {
|
||||||
Example: cli.inspectHelp.example,
|
Example: cli.inspectHelp.example,
|
||||||
Args: cobra.MinimumNArgs(1),
|
Args: cobra.MinimumNArgs(1),
|
||||||
DisableAutoGenTag: true,
|
DisableAutoGenTag: true,
|
||||||
ValidArgsFunction: func(cmd *cobra.Command, args []string, toComplete string) ([]string, cobra.ShellCompDirective) {
|
ValidArgsFunction: func(_ *cobra.Command, args []string, toComplete string) ([]string, cobra.ShellCompDirective) {
|
||||||
return compInstalledItems(cli.name, args, toComplete)
|
return compInstalledItems(cli.name, args, toComplete)
|
||||||
},
|
},
|
||||||
RunE: func(cmd *cobra.Command, args []string) error {
|
RunE: func(_ *cobra.Command, args []string) error {
|
||||||
return cli.inspect(args, url, diff, rev, noMetrics)
|
return cli.inspect(args, url, diff, rev, noMetrics)
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
@ -417,7 +428,7 @@ func (cli cliItem) newInspectCmd() *cobra.Command {
|
||||||
}
|
}
|
||||||
|
|
||||||
func (cli cliItem) list(args []string, all bool) error {
|
func (cli cliItem) list(args []string, all bool) error {
|
||||||
hub, err := require.Hub(csConfig, nil, log.StandardLogger())
|
hub, err := require.Hub(cli.cfg(), nil, log.StandardLogger())
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
@ -526,6 +537,7 @@ func (cli cliItem) whyTainted(hub *cwhub.Hub, item *cwhub.Item, reverse bool) st
|
||||||
// hack: avoid message "item is tainted by itself"
|
// hack: avoid message "item is tainted by itself"
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
||||||
ret = append(ret, fmt.Sprintf("# %s is tainted by %s", sub.FQName(), taintList))
|
ret = append(ret, fmt.Sprintf("# %s is tainted by %s", sub.FQName(), taintList))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -17,7 +17,7 @@ import (
|
||||||
|
|
||||||
// selectItems returns a slice of items of a given type, selected by name and sorted by case-insensitive name
|
// selectItems returns a slice of items of a given type, selected by name and sorted by case-insensitive name
|
||||||
func selectItems(hub *cwhub.Hub, itemType string, args []string, installedOnly bool) ([]*cwhub.Item, error) {
|
func selectItems(hub *cwhub.Hub, itemType string, args []string, installedOnly bool) ([]*cwhub.Item, error) {
|
||||||
itemNames := hub.GetItemNames(itemType)
|
itemNames := hub.GetNamesByType(itemType)
|
||||||
|
|
||||||
notExist := []string{}
|
notExist := []string{}
|
||||||
|
|
||||||
|
@ -116,7 +116,7 @@ func listItems(out io.Writer, itemTypes []string, items map[string][]*cwhub.Item
|
||||||
}
|
}
|
||||||
|
|
||||||
if err := csvwriter.Write(header); err != nil {
|
if err := csvwriter.Write(header); err != nil {
|
||||||
return fmt.Errorf("failed to write header: %s", err)
|
return fmt.Errorf("failed to write header: %w", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
for _, itemType := range itemTypes {
|
for _, itemType := range itemTypes {
|
||||||
|
@ -132,7 +132,7 @@ func listItems(out io.Writer, itemTypes []string, items map[string][]*cwhub.Item
|
||||||
}
|
}
|
||||||
|
|
||||||
if err := csvwriter.Write(row); err != nil {
|
if err := csvwriter.Write(row); err != nil {
|
||||||
return fmt.Errorf("failed to write raw output: %s", err)
|
return fmt.Errorf("failed to write raw output: %w", err)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -150,12 +150,12 @@ func inspectItem(item *cwhub.Item, showMetrics bool) error {
|
||||||
enc.SetIndent(2)
|
enc.SetIndent(2)
|
||||||
|
|
||||||
if err := enc.Encode(item); err != nil {
|
if err := enc.Encode(item); err != nil {
|
||||||
return fmt.Errorf("unable to encode item: %s", err)
|
return fmt.Errorf("unable to encode item: %w", err)
|
||||||
}
|
}
|
||||||
case "json":
|
case "json":
|
||||||
b, err := json.MarshalIndent(*item, "", " ")
|
b, err := json.MarshalIndent(*item, "", " ")
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return fmt.Errorf("unable to marshal item: %s", err)
|
return fmt.Errorf("unable to marshal item: %w", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
fmt.Print(string(b))
|
fmt.Print(string(b))
|
||||||
|
|
|
@ -13,7 +13,7 @@ import (
|
||||||
"github.com/go-openapi/strfmt"
|
"github.com/go-openapi/strfmt"
|
||||||
log "github.com/sirupsen/logrus"
|
log "github.com/sirupsen/logrus"
|
||||||
"github.com/spf13/cobra"
|
"github.com/spf13/cobra"
|
||||||
"gopkg.in/yaml.v2"
|
"gopkg.in/yaml.v3"
|
||||||
|
|
||||||
"github.com/crowdsecurity/go-cs-lib/version"
|
"github.com/crowdsecurity/go-cs-lib/version"
|
||||||
|
|
||||||
|
@ -56,7 +56,7 @@ func (cli *cliLapi) status() error {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
scenarios, err := hub.GetInstalledItemNames(cwhub.SCENARIOS)
|
scenarios, err := hub.GetInstalledNamesByType(cwhub.SCENARIOS)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return fmt.Errorf("failed to get scenarios: %w", err)
|
return fmt.Errorf("failed to get scenarios: %w", err)
|
||||||
}
|
}
|
||||||
|
@ -116,7 +116,6 @@ func (cli *cliLapi) register(apiURL string, outputFile string, machine string) e
|
||||||
URL: apiurl,
|
URL: apiurl,
|
||||||
VersionPrefix: LAPIURLPrefix,
|
VersionPrefix: LAPIURLPrefix,
|
||||||
}, nil)
|
}, nil)
|
||||||
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return fmt.Errorf("api client register: %w", err)
|
return fmt.Errorf("api client register: %w", err)
|
||||||
}
|
}
|
||||||
|
@ -585,7 +584,7 @@ func detectNode(node parser.Node, parserCTX parser.UnixParserCtx) []string {
|
||||||
}
|
}
|
||||||
|
|
||||||
func detectSubNode(node parser.Node, parserCTX parser.UnixParserCtx) []string {
|
func detectSubNode(node parser.Node, parserCTX parser.UnixParserCtx) []string {
|
||||||
var ret = make([]string, 0)
|
ret := make([]string, 0)
|
||||||
|
|
||||||
for _, subnode := range node.LeavesNodes {
|
for _, subnode := range node.LeavesNodes {
|
||||||
if subnode.Grok.RunTimeRegexp != nil {
|
if subnode.Grok.RunTimeRegexp != nil {
|
||||||
|
|
|
@ -1,7 +1,6 @@
|
||||||
package main
|
package main
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"fmt"
|
|
||||||
"io"
|
"io"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
|
@ -11,20 +10,11 @@ import (
|
||||||
"github.com/crowdsecurity/crowdsec/pkg/emoji"
|
"github.com/crowdsecurity/crowdsec/pkg/emoji"
|
||||||
)
|
)
|
||||||
|
|
||||||
var tableHeaders = []string{"Name", "IP Address", "Last Update", "Status", "Version", "OS", "Auth Type", "Feature Flags", "Last Heartbeat"}
|
|
||||||
|
|
||||||
func getAgentsTable(out io.Writer, machines []*ent.Machine) {
|
func getAgentsTable(out io.Writer, machines []*ent.Machine) {
|
||||||
t := newLightTable(out)
|
t := newLightTable(out)
|
||||||
t.SetHeaders(tableHeaders...)
|
t.SetHeaders("Name", "IP Address", "Last Update", "Status", "Version", "Auth Type", "Last Heartbeat")
|
||||||
|
t.SetHeaderAlignment(table.AlignLeft, table.AlignLeft, table.AlignLeft, table.AlignLeft, table.AlignLeft, table.AlignLeft, table.AlignLeft)
|
||||||
alignment := []table.Alignment{}
|
t.SetAlignment(table.AlignLeft, table.AlignLeft, table.AlignLeft, table.AlignLeft, table.AlignLeft, table.AlignLeft, table.AlignLeft)
|
||||||
|
|
||||||
for i := 0; i < len(tableHeaders); i++ {
|
|
||||||
alignment = append(alignment, table.AlignLeft)
|
|
||||||
}
|
|
||||||
|
|
||||||
t.SetHeaderAlignment(alignment...)
|
|
||||||
t.SetAlignment(alignment...)
|
|
||||||
|
|
||||||
for _, m := range machines {
|
for _, m := range machines {
|
||||||
validated := emoji.Prohibited
|
validated := emoji.Prohibited
|
||||||
|
@ -37,7 +27,7 @@ func getAgentsTable(out io.Writer, machines []*ent.Machine) {
|
||||||
hb = emoji.Warning + " " + hb
|
hb = emoji.Warning + " " + hb
|
||||||
}
|
}
|
||||||
|
|
||||||
t.AddRow(m.MachineId, m.IpAddress, m.UpdatedAt.Format(time.RFC3339), validated, m.Version, fmt.Sprintf("%s/%s", m.Osname, m.Osversion), m.AuthType, m.Featureflags, hb)
|
t.AddRow(m.MachineId, m.IpAddress, m.UpdatedAt.Format(time.RFC3339), validated, m.Version, m.AuthType, hb)
|
||||||
}
|
}
|
||||||
|
|
||||||
t.Render()
|
t.Render()
|
||||||
|
|
|
@ -1,7 +1,9 @@
|
||||||
package main
|
package main
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"fmt"
|
||||||
"os"
|
"os"
|
||||||
|
"path/filepath"
|
||||||
"slices"
|
"slices"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
|
@ -10,14 +12,18 @@ import (
|
||||||
log "github.com/sirupsen/logrus"
|
log "github.com/sirupsen/logrus"
|
||||||
"github.com/spf13/cobra"
|
"github.com/spf13/cobra"
|
||||||
|
|
||||||
|
"github.com/crowdsecurity/go-cs-lib/trace"
|
||||||
|
|
||||||
"github.com/crowdsecurity/crowdsec/pkg/csconfig"
|
"github.com/crowdsecurity/crowdsec/pkg/csconfig"
|
||||||
"github.com/crowdsecurity/crowdsec/pkg/database"
|
"github.com/crowdsecurity/crowdsec/pkg/database"
|
||||||
"github.com/crowdsecurity/crowdsec/pkg/fflag"
|
"github.com/crowdsecurity/crowdsec/pkg/fflag"
|
||||||
)
|
)
|
||||||
|
|
||||||
var ConfigFilePath string
|
var (
|
||||||
var csConfig *csconfig.Config
|
ConfigFilePath string
|
||||||
var dbClient *database.Client
|
csConfig *csconfig.Config
|
||||||
|
dbClient *database.Client
|
||||||
|
)
|
||||||
|
|
||||||
type configGetter func() *csconfig.Config
|
type configGetter func() *csconfig.Config
|
||||||
|
|
||||||
|
@ -82,6 +88,11 @@ func loadConfigFor(command string) (*csconfig.Config, string, error) {
|
||||||
return nil, "", err
|
return nil, "", err
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// set up directory for trace files
|
||||||
|
if err := trace.Init(filepath.Join(config.ConfigPaths.DataDir, "trace")); err != nil {
|
||||||
|
return nil, "", fmt.Errorf("while setting up trace directory: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
return config, merged, nil
|
return config, merged, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -249,13 +260,13 @@ It is meant to allow you to manage bans, parsers/scenarios/etc, api and generall
|
||||||
cmd.AddCommand(NewCLINotifications(cli.cfg).NewCommand())
|
cmd.AddCommand(NewCLINotifications(cli.cfg).NewCommand())
|
||||||
cmd.AddCommand(NewCLISupport().NewCommand())
|
cmd.AddCommand(NewCLISupport().NewCommand())
|
||||||
cmd.AddCommand(NewCLIPapi(cli.cfg).NewCommand())
|
cmd.AddCommand(NewCLIPapi(cli.cfg).NewCommand())
|
||||||
cmd.AddCommand(NewCLICollection().NewCommand())
|
cmd.AddCommand(NewCLICollection(cli.cfg).NewCommand())
|
||||||
cmd.AddCommand(NewCLIParser().NewCommand())
|
cmd.AddCommand(NewCLIParser(cli.cfg).NewCommand())
|
||||||
cmd.AddCommand(NewCLIScenario().NewCommand())
|
cmd.AddCommand(NewCLIScenario(cli.cfg).NewCommand())
|
||||||
cmd.AddCommand(NewCLIPostOverflow().NewCommand())
|
cmd.AddCommand(NewCLIPostOverflow(cli.cfg).NewCommand())
|
||||||
cmd.AddCommand(NewCLIContext().NewCommand())
|
cmd.AddCommand(NewCLIContext(cli.cfg).NewCommand())
|
||||||
cmd.AddCommand(NewCLIAppsecConfig().NewCommand())
|
cmd.AddCommand(NewCLIAppsecConfig(cli.cfg).NewCommand())
|
||||||
cmd.AddCommand(NewCLIAppsecRule().NewCommand())
|
cmd.AddCommand(NewCLIAppsecRule(cli.cfg).NewCommand())
|
||||||
|
|
||||||
if fflag.CscliSetup.IsEnabled() {
|
if fflag.CscliSetup.IsEnabled() {
|
||||||
cmd.AddCommand(NewSetupCmd())
|
cmd.AddCommand(NewSetupCmd())
|
||||||
|
|
|
@ -4,6 +4,7 @@ import (
|
||||||
"context"
|
"context"
|
||||||
"encoding/csv"
|
"encoding/csv"
|
||||||
"encoding/json"
|
"encoding/json"
|
||||||
|
"errors"
|
||||||
"fmt"
|
"fmt"
|
||||||
"io/fs"
|
"io/fs"
|
||||||
"net/url"
|
"net/url"
|
||||||
|
@ -88,7 +89,7 @@ func (cli *cliNotifications) getPluginConfigs() (map[string]csplugin.PluginConfi
|
||||||
return fmt.Errorf("error while traversing directory %s: %w", path, err)
|
return fmt.Errorf("error while traversing directory %s: %w", path, err)
|
||||||
}
|
}
|
||||||
|
|
||||||
name := filepath.Join(cfg.ConfigPaths.NotificationDir, info.Name()) //Avoid calling info.Name() twice
|
name := filepath.Join(cfg.ConfigPaths.NotificationDir, info.Name()) // Avoid calling info.Name() twice
|
||||||
if (strings.HasSuffix(name, "yaml") || strings.HasSuffix(name, "yml")) && !(info.IsDir()) {
|
if (strings.HasSuffix(name, "yaml") || strings.HasSuffix(name, "yml")) && !(info.IsDir()) {
|
||||||
ts, err := csplugin.ParsePluginConfigFile(name)
|
ts, err := csplugin.ParsePluginConfigFile(name)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
@ -266,7 +267,7 @@ func (cli *cliNotifications) NewTestCmd() *cobra.Command {
|
||||||
if !ok {
|
if !ok {
|
||||||
return fmt.Errorf("plugin name: '%s' does not exist", args[0])
|
return fmt.Errorf("plugin name: '%s' does not exist", args[0])
|
||||||
}
|
}
|
||||||
//Create a single profile with plugin name as notification name
|
// Create a single profile with plugin name as notification name
|
||||||
return pluginBroker.Init(cfg.PluginConfig, []*csconfig.ProfileCfg{
|
return pluginBroker.Init(cfg.PluginConfig, []*csconfig.ProfileCfg{
|
||||||
{
|
{
|
||||||
Notifications: []string{
|
Notifications: []string{
|
||||||
|
@ -320,8 +321,8 @@ func (cli *cliNotifications) NewTestCmd() *cobra.Command {
|
||||||
Alert: alert,
|
Alert: alert,
|
||||||
}
|
}
|
||||||
|
|
||||||
//time.Sleep(2 * time.Second) // There's no mechanism to ensure notification has been sent
|
// time.Sleep(2 * time.Second) // There's no mechanism to ensure notification has been sent
|
||||||
pluginTomb.Kill(fmt.Errorf("terminating"))
|
pluginTomb.Kill(errors.New("terminating"))
|
||||||
pluginTomb.Wait()
|
pluginTomb.Wait()
|
||||||
|
|
||||||
return nil
|
return nil
|
||||||
|
@ -416,8 +417,8 @@ cscli notifications reinject <alert_id> -a '{"remediation": true,"scenario":"not
|
||||||
break
|
break
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
//time.Sleep(2 * time.Second) // There's no mechanism to ensure notification has been sent
|
// time.Sleep(2 * time.Second) // There's no mechanism to ensure notification has been sent
|
||||||
pluginTomb.Kill(fmt.Errorf("terminating"))
|
pluginTomb.Kill(errors.New("terminating"))
|
||||||
pluginTomb.Wait()
|
pluginTomb.Wait()
|
||||||
|
|
||||||
return nil
|
return nil
|
||||||
|
|
|
@ -64,25 +64,22 @@ func (cli *cliPapi) NewStatusCmd() *cobra.Command {
|
||||||
cfg := cli.cfg()
|
cfg := cli.cfg()
|
||||||
dbClient, err = database.NewClient(cfg.DbConfig)
|
dbClient, err = database.NewClient(cfg.DbConfig)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return fmt.Errorf("unable to initialize database client: %s", err)
|
return fmt.Errorf("unable to initialize database client: %w", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
apic, err := apiserver.NewAPIC(cfg.API.Server.OnlineClient, dbClient, cfg.API.Server.ConsoleConfig, cfg.API.Server.CapiWhitelists)
|
apic, err := apiserver.NewAPIC(cfg.API.Server.OnlineClient, dbClient, cfg.API.Server.ConsoleConfig, cfg.API.Server.CapiWhitelists)
|
||||||
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return fmt.Errorf("unable to initialize API client: %s", err)
|
return fmt.Errorf("unable to initialize API client: %w", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
papi, err := apiserver.NewPAPI(apic, dbClient, cfg.API.Server.ConsoleConfig, log.GetLevel())
|
papi, err := apiserver.NewPAPI(apic, dbClient, cfg.API.Server.ConsoleConfig, log.GetLevel())
|
||||||
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return fmt.Errorf("unable to initialize PAPI client: %s", err)
|
return fmt.Errorf("unable to initialize PAPI client: %w", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
perms, err := papi.GetPermissions()
|
perms, err := papi.GetPermissions()
|
||||||
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return fmt.Errorf("unable to get PAPI permissions: %s", err)
|
return fmt.Errorf("unable to get PAPI permissions: %w", err)
|
||||||
}
|
}
|
||||||
var lastTimestampStr *string
|
var lastTimestampStr *string
|
||||||
lastTimestampStr, err = dbClient.GetConfigItem(apiserver.PapiPullKey)
|
lastTimestampStr, err = dbClient.GetConfigItem(apiserver.PapiPullKey)
|
||||||
|
@ -118,27 +115,26 @@ func (cli *cliPapi) NewSyncCmd() *cobra.Command {
|
||||||
|
|
||||||
dbClient, err = database.NewClient(cfg.DbConfig)
|
dbClient, err = database.NewClient(cfg.DbConfig)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return fmt.Errorf("unable to initialize database client: %s", err)
|
return fmt.Errorf("unable to initialize database client: %w", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
apic, err := apiserver.NewAPIC(cfg.API.Server.OnlineClient, dbClient, cfg.API.Server.ConsoleConfig, cfg.API.Server.CapiWhitelists)
|
apic, err := apiserver.NewAPIC(cfg.API.Server.OnlineClient, dbClient, cfg.API.Server.ConsoleConfig, cfg.API.Server.CapiWhitelists)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return fmt.Errorf("unable to initialize API client: %s", err)
|
return fmt.Errorf("unable to initialize API client: %w", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
t.Go(apic.Push)
|
t.Go(apic.Push)
|
||||||
|
|
||||||
papi, err := apiserver.NewPAPI(apic, dbClient, cfg.API.Server.ConsoleConfig, log.GetLevel())
|
papi, err := apiserver.NewPAPI(apic, dbClient, cfg.API.Server.ConsoleConfig, log.GetLevel())
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return fmt.Errorf("unable to initialize PAPI client: %s", err)
|
return fmt.Errorf("unable to initialize PAPI client: %w", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
t.Go(papi.SyncDecisions)
|
t.Go(papi.SyncDecisions)
|
||||||
|
|
||||||
err = papi.PullOnce(time.Time{}, true)
|
err = papi.PullOnce(time.Time{}, true)
|
||||||
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return fmt.Errorf("unable to sync decisions: %s", err)
|
return fmt.Errorf("unable to sync decisions: %w", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
log.Infof("Sending acknowledgements to CAPI")
|
log.Infof("Sending acknowledgements to CAPI")
|
||||||
|
|
|
@ -1,6 +1,7 @@
|
||||||
package require
|
package require
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"errors"
|
||||||
"fmt"
|
"fmt"
|
||||||
"io"
|
"io"
|
||||||
|
|
||||||
|
@ -16,7 +17,7 @@ func LAPI(c *csconfig.Config) error {
|
||||||
}
|
}
|
||||||
|
|
||||||
if c.DisableAPI {
|
if c.DisableAPI {
|
||||||
return fmt.Errorf("local API is disabled -- this command must be run on the local API machine")
|
return errors.New("local API is disabled -- this command must be run on the local API machine")
|
||||||
}
|
}
|
||||||
|
|
||||||
return nil
|
return nil
|
||||||
|
@ -32,7 +33,7 @@ func CAPI(c *csconfig.Config) error {
|
||||||
|
|
||||||
func PAPI(c *csconfig.Config) error {
|
func PAPI(c *csconfig.Config) error {
|
||||||
if c.API.Server.OnlineClient.Credentials.PapiURL == "" {
|
if c.API.Server.OnlineClient.Credentials.PapiURL == "" {
|
||||||
return fmt.Errorf("no PAPI URL in configuration")
|
return errors.New("no PAPI URL in configuration")
|
||||||
}
|
}
|
||||||
|
|
||||||
return nil
|
return nil
|
||||||
|
@ -40,7 +41,7 @@ func PAPI(c *csconfig.Config) error {
|
||||||
|
|
||||||
func CAPIRegistered(c *csconfig.Config) error {
|
func CAPIRegistered(c *csconfig.Config) error {
|
||||||
if c.API.Server.OnlineClient.Credentials == nil {
|
if c.API.Server.OnlineClient.Credentials == nil {
|
||||||
return fmt.Errorf("the Central API (CAPI) must be configured with 'cscli capi register'")
|
return errors.New("the Central API (CAPI) must be configured with 'cscli capi register'")
|
||||||
}
|
}
|
||||||
|
|
||||||
return nil
|
return nil
|
||||||
|
@ -56,7 +57,7 @@ func DB(c *csconfig.Config) error {
|
||||||
|
|
||||||
func Notifications(c *csconfig.Config) error {
|
func Notifications(c *csconfig.Config) error {
|
||||||
if c.ConfigPaths.NotificationDir == "" {
|
if c.ConfigPaths.NotificationDir == "" {
|
||||||
return fmt.Errorf("config_paths.notification_dir is not set in crowdsec config")
|
return errors.New("config_paths.notification_dir is not set in crowdsec config")
|
||||||
}
|
}
|
||||||
|
|
||||||
return nil
|
return nil
|
||||||
|
@ -82,7 +83,7 @@ func Hub(c *csconfig.Config, remote *cwhub.RemoteHubCfg, logger *logrus.Logger)
|
||||||
local := c.Hub
|
local := c.Hub
|
||||||
|
|
||||||
if local == nil {
|
if local == nil {
|
||||||
return nil, fmt.Errorf("you must configure cli before interacting with hub")
|
return nil, errors.New("you must configure cli before interacting with hub")
|
||||||
}
|
}
|
||||||
|
|
||||||
if logger == nil {
|
if logger == nil {
|
||||||
|
|
|
@ -2,6 +2,7 @@ package main
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"bytes"
|
"bytes"
|
||||||
|
"errors"
|
||||||
"fmt"
|
"fmt"
|
||||||
"os"
|
"os"
|
||||||
"os/exec"
|
"os/exec"
|
||||||
|
@ -118,9 +119,11 @@ func runSetupDetect(cmd *cobra.Command, args []string) error {
|
||||||
switch detectConfigFile {
|
switch detectConfigFile {
|
||||||
case "-":
|
case "-":
|
||||||
log.Tracef("Reading detection rules from stdin")
|
log.Tracef("Reading detection rules from stdin")
|
||||||
|
|
||||||
detectReader = os.Stdin
|
detectReader = os.Stdin
|
||||||
default:
|
default:
|
||||||
log.Tracef("Reading detection rules: %s", detectConfigFile)
|
log.Tracef("Reading detection rules: %s", detectConfigFile)
|
||||||
|
|
||||||
detectReader, err = os.Open(detectConfigFile)
|
detectReader, err = os.Open(detectConfigFile)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
|
@ -171,6 +174,7 @@ func runSetupDetect(cmd *cobra.Command, args []string) error {
|
||||||
_, err := exec.LookPath("systemctl")
|
_, err := exec.LookPath("systemctl")
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Debug("systemctl not available: snubbing systemd")
|
log.Debug("systemctl not available: snubbing systemd")
|
||||||
|
|
||||||
snubSystemd = true
|
snubSystemd = true
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -182,6 +186,7 @@ func runSetupDetect(cmd *cobra.Command, args []string) error {
|
||||||
|
|
||||||
if forcedOSFamily == "" && forcedOSID != "" {
|
if forcedOSFamily == "" && forcedOSID != "" {
|
||||||
log.Debug("force-os-id is set: force-os-family defaults to 'linux'")
|
log.Debug("force-os-id is set: force-os-family defaults to 'linux'")
|
||||||
|
|
||||||
forcedOSFamily = "linux"
|
forcedOSFamily = "linux"
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -219,6 +224,7 @@ func runSetupDetect(cmd *cobra.Command, args []string) error {
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
fmt.Println(setup)
|
fmt.Println(setup)
|
||||||
|
|
||||||
return nil
|
return nil
|
||||||
|
@ -318,6 +324,7 @@ func runSetupInstallHub(cmd *cobra.Command, args []string) error {
|
||||||
|
|
||||||
func runSetupValidate(cmd *cobra.Command, args []string) error {
|
func runSetupValidate(cmd *cobra.Command, args []string) error {
|
||||||
fromFile := args[0]
|
fromFile := args[0]
|
||||||
|
|
||||||
input, err := os.ReadFile(fromFile)
|
input, err := os.ReadFile(fromFile)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return fmt.Errorf("while reading stdin: %w", err)
|
return fmt.Errorf("while reading stdin: %w", err)
|
||||||
|
@ -325,7 +332,7 @@ func runSetupValidate(cmd *cobra.Command, args []string) error {
|
||||||
|
|
||||||
if err = setup.Validate(input); err != nil {
|
if err = setup.Validate(input); err != nil {
|
||||||
fmt.Printf("%v\n", err)
|
fmt.Printf("%v\n", err)
|
||||||
return fmt.Errorf("invalid setup file")
|
return errors.New("invalid setup file")
|
||||||
}
|
}
|
||||||
|
|
||||||
return nil
|
return nil
|
||||||
|
|
|
@ -1,13 +1,14 @@
|
||||||
package main
|
package main
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"errors"
|
||||||
"fmt"
|
"fmt"
|
||||||
"os"
|
"os"
|
||||||
"slices"
|
"slices"
|
||||||
|
|
||||||
log "github.com/sirupsen/logrus"
|
log "github.com/sirupsen/logrus"
|
||||||
"github.com/spf13/cobra"
|
"github.com/spf13/cobra"
|
||||||
"gopkg.in/yaml.v2"
|
"gopkg.in/yaml.v3"
|
||||||
|
|
||||||
"github.com/crowdsecurity/crowdsec/cmd/crowdsec-cli/require"
|
"github.com/crowdsecurity/crowdsec/cmd/crowdsec-cli/require"
|
||||||
"github.com/crowdsecurity/crowdsec/pkg/cwhub"
|
"github.com/crowdsecurity/crowdsec/pkg/cwhub"
|
||||||
|
@ -36,7 +37,7 @@ cscli simulation disable crowdsecurity/ssh-bf`,
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
if cli.cfg().Cscli.SimulationConfig == nil {
|
if cli.cfg().Cscli.SimulationConfig == nil {
|
||||||
return fmt.Errorf("no simulation configured")
|
return errors.New("no simulation configured")
|
||||||
}
|
}
|
||||||
|
|
||||||
return nil
|
return nil
|
||||||
|
@ -73,7 +74,7 @@ func (cli *cliSimulation) NewEnableCmd() *cobra.Command {
|
||||||
|
|
||||||
if len(args) > 0 {
|
if len(args) > 0 {
|
||||||
for _, scenario := range args {
|
for _, scenario := range args {
|
||||||
var item = hub.GetItem(cwhub.SCENARIOS, scenario)
|
item := hub.GetItem(cwhub.SCENARIOS, scenario)
|
||||||
if item == nil {
|
if item == nil {
|
||||||
log.Errorf("'%s' doesn't exist or is not a scenario", scenario)
|
log.Errorf("'%s' doesn't exist or is not a scenario", scenario)
|
||||||
continue
|
continue
|
||||||
|
@ -99,11 +100,11 @@ func (cli *cliSimulation) NewEnableCmd() *cobra.Command {
|
||||||
log.Printf("simulation mode for '%s' enabled", scenario)
|
log.Printf("simulation mode for '%s' enabled", scenario)
|
||||||
}
|
}
|
||||||
if err := cli.dumpSimulationFile(); err != nil {
|
if err := cli.dumpSimulationFile(); err != nil {
|
||||||
return fmt.Errorf("simulation enable: %s", err)
|
return fmt.Errorf("simulation enable: %w", err)
|
||||||
}
|
}
|
||||||
} else if forceGlobalSimulation {
|
} else if forceGlobalSimulation {
|
||||||
if err := cli.enableGlobalSimulation(); err != nil {
|
if err := cli.enableGlobalSimulation(); err != nil {
|
||||||
return fmt.Errorf("unable to enable global simulation mode: %s", err)
|
return fmt.Errorf("unable to enable global simulation mode: %w", err)
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
printHelp(cmd)
|
printHelp(cmd)
|
||||||
|
@ -146,11 +147,11 @@ func (cli *cliSimulation) NewDisableCmd() *cobra.Command {
|
||||||
log.Printf("simulation mode for '%s' disabled", scenario)
|
log.Printf("simulation mode for '%s' disabled", scenario)
|
||||||
}
|
}
|
||||||
if err := cli.dumpSimulationFile(); err != nil {
|
if err := cli.dumpSimulationFile(); err != nil {
|
||||||
return fmt.Errorf("simulation disable: %s", err)
|
return fmt.Errorf("simulation disable: %w", err)
|
||||||
}
|
}
|
||||||
} else if forceGlobalSimulation {
|
} else if forceGlobalSimulation {
|
||||||
if err := cli.disableGlobalSimulation(); err != nil {
|
if err := cli.disableGlobalSimulation(); err != nil {
|
||||||
return fmt.Errorf("unable to disable global simulation mode: %s", err)
|
return fmt.Errorf("unable to disable global simulation mode: %w", err)
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
printHelp(cmd)
|
printHelp(cmd)
|
||||||
|
@ -202,7 +203,7 @@ func (cli *cliSimulation) enableGlobalSimulation() error {
|
||||||
cfg.Cscli.SimulationConfig.Exclusions = []string{}
|
cfg.Cscli.SimulationConfig.Exclusions = []string{}
|
||||||
|
|
||||||
if err := cli.dumpSimulationFile(); err != nil {
|
if err := cli.dumpSimulationFile(); err != nil {
|
||||||
return fmt.Errorf("unable to dump simulation file: %s", err)
|
return fmt.Errorf("unable to dump simulation file: %w", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
log.Printf("global simulation: enabled")
|
log.Printf("global simulation: enabled")
|
||||||
|
@ -215,12 +216,12 @@ func (cli *cliSimulation) dumpSimulationFile() error {
|
||||||
|
|
||||||
newConfigSim, err := yaml.Marshal(cfg.Cscli.SimulationConfig)
|
newConfigSim, err := yaml.Marshal(cfg.Cscli.SimulationConfig)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return fmt.Errorf("unable to marshal simulation configuration: %s", err)
|
return fmt.Errorf("unable to marshal simulation configuration: %w", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
err = os.WriteFile(cfg.ConfigPaths.SimulationFilePath, newConfigSim, 0o644)
|
err = os.WriteFile(cfg.ConfigPaths.SimulationFilePath, newConfigSim, 0o644)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return fmt.Errorf("write simulation config in '%s' failed: %s", cfg.ConfigPaths.SimulationFilePath, err)
|
return fmt.Errorf("write simulation config in '%s' failed: %w", cfg.ConfigPaths.SimulationFilePath, err)
|
||||||
}
|
}
|
||||||
|
|
||||||
log.Debugf("updated simulation file %s", cfg.ConfigPaths.SimulationFilePath)
|
log.Debugf("updated simulation file %s", cfg.ConfigPaths.SimulationFilePath)
|
||||||
|
@ -237,12 +238,12 @@ func (cli *cliSimulation) disableGlobalSimulation() error {
|
||||||
|
|
||||||
newConfigSim, err := yaml.Marshal(cfg.Cscli.SimulationConfig)
|
newConfigSim, err := yaml.Marshal(cfg.Cscli.SimulationConfig)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return fmt.Errorf("unable to marshal new simulation configuration: %s", err)
|
return fmt.Errorf("unable to marshal new simulation configuration: %w", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
err = os.WriteFile(cfg.ConfigPaths.SimulationFilePath, newConfigSim, 0o644)
|
err = os.WriteFile(cfg.ConfigPaths.SimulationFilePath, newConfigSim, 0o644)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return fmt.Errorf("unable to write new simulation config in '%s': %s", cfg.ConfigPaths.SimulationFilePath, err)
|
return fmt.Errorf("unable to write new simulation config in '%s': %w", cfg.ConfigPaths.SimulationFilePath, err)
|
||||||
}
|
}
|
||||||
|
|
||||||
log.Printf("global simulation: disabled")
|
log.Printf("global simulation: disabled")
|
||||||
|
@ -269,8 +270,10 @@ func (cli *cliSimulation) status() {
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
log.Println("global simulation: disabled")
|
log.Println("global simulation: disabled")
|
||||||
|
|
||||||
if len(cfg.Cscli.SimulationConfig.Exclusions) > 0 {
|
if len(cfg.Cscli.SimulationConfig.Exclusions) > 0 {
|
||||||
log.Println("Scenarios in simulation mode :")
|
log.Println("Scenarios in simulation mode :")
|
||||||
|
|
||||||
for _, scenario := range cfg.Cscli.SimulationConfig.Exclusions {
|
for _, scenario := range cfg.Cscli.SimulationConfig.Exclusions {
|
||||||
log.Printf(" - %s", scenario)
|
log.Printf(" - %s", scenario)
|
||||||
}
|
}
|
||||||
|
|
|
@ -4,6 +4,7 @@ import (
|
||||||
"archive/zip"
|
"archive/zip"
|
||||||
"bytes"
|
"bytes"
|
||||||
"context"
|
"context"
|
||||||
|
"errors"
|
||||||
"fmt"
|
"fmt"
|
||||||
"io"
|
"io"
|
||||||
"net/http"
|
"net/http"
|
||||||
|
@ -12,12 +13,14 @@ import (
|
||||||
"path/filepath"
|
"path/filepath"
|
||||||
"regexp"
|
"regexp"
|
||||||
"strings"
|
"strings"
|
||||||
|
"time"
|
||||||
|
|
||||||
"github.com/blackfireio/osinfo"
|
"github.com/blackfireio/osinfo"
|
||||||
"github.com/go-openapi/strfmt"
|
"github.com/go-openapi/strfmt"
|
||||||
log "github.com/sirupsen/logrus"
|
log "github.com/sirupsen/logrus"
|
||||||
"github.com/spf13/cobra"
|
"github.com/spf13/cobra"
|
||||||
|
|
||||||
|
"github.com/crowdsecurity/go-cs-lib/trace"
|
||||||
"github.com/crowdsecurity/go-cs-lib/version"
|
"github.com/crowdsecurity/go-cs-lib/version"
|
||||||
|
|
||||||
"github.com/crowdsecurity/crowdsec/cmd/crowdsec-cli/require"
|
"github.com/crowdsecurity/crowdsec/cmd/crowdsec-cli/require"
|
||||||
|
@ -47,6 +50,7 @@ const (
|
||||||
SUPPORT_CAPI_STATUS_PATH = "capi_status.txt"
|
SUPPORT_CAPI_STATUS_PATH = "capi_status.txt"
|
||||||
SUPPORT_ACQUISITION_CONFIG_BASE_PATH = "config/acquis/"
|
SUPPORT_ACQUISITION_CONFIG_BASE_PATH = "config/acquis/"
|
||||||
SUPPORT_CROWDSEC_PROFILE_PATH = "config/profiles.yaml"
|
SUPPORT_CROWDSEC_PROFILE_PATH = "config/profiles.yaml"
|
||||||
|
SUPPORT_CRASH_PATH = "crash/"
|
||||||
)
|
)
|
||||||
|
|
||||||
// from https://github.com/acarl005/stripansi
|
// from https://github.com/acarl005/stripansi
|
||||||
|
@ -62,7 +66,7 @@ func collectMetrics() ([]byte, []byte, error) {
|
||||||
|
|
||||||
if csConfig.Cscli.PrometheusUrl == "" {
|
if csConfig.Cscli.PrometheusUrl == "" {
|
||||||
log.Warn("No Prometheus URL configured, metrics will not be collected")
|
log.Warn("No Prometheus URL configured, metrics will not be collected")
|
||||||
return nil, nil, fmt.Errorf("prometheus_uri is not set")
|
return nil, nil, errors.New("prometheus_uri is not set")
|
||||||
}
|
}
|
||||||
|
|
||||||
humanMetrics := bytes.NewBuffer(nil)
|
humanMetrics := bytes.NewBuffer(nil)
|
||||||
|
@ -70,7 +74,7 @@ func collectMetrics() ([]byte, []byte, error) {
|
||||||
ms := NewMetricStore()
|
ms := NewMetricStore()
|
||||||
|
|
||||||
if err := ms.Fetch(csConfig.Cscli.PrometheusUrl); err != nil {
|
if err := ms.Fetch(csConfig.Cscli.PrometheusUrl); err != nil {
|
||||||
return nil, nil, fmt.Errorf("could not fetch prometheus metrics: %s", err)
|
return nil, nil, fmt.Errorf("could not fetch prometheus metrics: %w", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
if err := ms.Format(humanMetrics, nil, "human", false); err != nil {
|
if err := ms.Format(humanMetrics, nil, "human", false); err != nil {
|
||||||
|
@ -79,21 +83,21 @@ func collectMetrics() ([]byte, []byte, error) {
|
||||||
|
|
||||||
req, err := http.NewRequest(http.MethodGet, csConfig.Cscli.PrometheusUrl, nil)
|
req, err := http.NewRequest(http.MethodGet, csConfig.Cscli.PrometheusUrl, nil)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, nil, fmt.Errorf("could not create requests to prometheus endpoint: %s", err)
|
return nil, nil, fmt.Errorf("could not create requests to prometheus endpoint: %w", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
client := &http.Client{}
|
client := &http.Client{}
|
||||||
|
|
||||||
resp, err := client.Do(req)
|
resp, err := client.Do(req)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, nil, fmt.Errorf("could not get metrics from prometheus endpoint: %s", err)
|
return nil, nil, fmt.Errorf("could not get metrics from prometheus endpoint: %w", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
defer resp.Body.Close()
|
defer resp.Body.Close()
|
||||||
|
|
||||||
body, err := io.ReadAll(resp.Body)
|
body, err := io.ReadAll(resp.Body)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, nil, fmt.Errorf("could not read metrics from prometheus endpoint: %s", err)
|
return nil, nil, fmt.Errorf("could not read metrics from prometheus endpoint: %w", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
return humanMetrics.Bytes(), body, nil
|
return humanMetrics.Bytes(), body, nil
|
||||||
|
@ -121,19 +125,18 @@ func collectOSInfo() ([]byte, error) {
|
||||||
log.Info("Collecting OS info")
|
log.Info("Collecting OS info")
|
||||||
|
|
||||||
info, err := osinfo.GetOSInfo()
|
info, err := osinfo.GetOSInfo()
|
||||||
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
w := bytes.NewBuffer(nil)
|
w := bytes.NewBuffer(nil)
|
||||||
w.WriteString(fmt.Sprintf("Architecture: %s\n", info.Architecture))
|
fmt.Fprintf(w, "Architecture: %s\n", info.Architecture)
|
||||||
w.WriteString(fmt.Sprintf("Family: %s\n", info.Family))
|
fmt.Fprintf(w, "Family: %s\n", info.Family)
|
||||||
w.WriteString(fmt.Sprintf("ID: %s\n", info.ID))
|
fmt.Fprintf(w, "ID: %s\n", info.ID)
|
||||||
w.WriteString(fmt.Sprintf("Name: %s\n", info.Name))
|
fmt.Fprintf(w, "Name: %s\n", info.Name)
|
||||||
w.WriteString(fmt.Sprintf("Codename: %s\n", info.Codename))
|
fmt.Fprintf(w, "Codename: %s\n", info.Codename)
|
||||||
w.WriteString(fmt.Sprintf("Version: %s\n", info.Version))
|
fmt.Fprintf(w, "Version: %s\n", info.Version)
|
||||||
w.WriteString(fmt.Sprintf("Build: %s\n", info.Build))
|
fmt.Fprintf(w, "Build: %s\n", info.Build)
|
||||||
|
|
||||||
return w.Bytes(), nil
|
return w.Bytes(), nil
|
||||||
}
|
}
|
||||||
|
@ -163,7 +166,7 @@ func collectBouncers(dbClient *database.Client) ([]byte, error) {
|
||||||
|
|
||||||
bouncers, err := dbClient.ListBouncers()
|
bouncers, err := dbClient.ListBouncers()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, fmt.Errorf("unable to list bouncers: %s", err)
|
return nil, fmt.Errorf("unable to list bouncers: %w", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
getBouncersTable(out, bouncers)
|
getBouncersTable(out, bouncers)
|
||||||
|
@ -176,7 +179,7 @@ func collectAgents(dbClient *database.Client) ([]byte, error) {
|
||||||
|
|
||||||
machines, err := dbClient.ListMachines()
|
machines, err := dbClient.ListMachines()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, fmt.Errorf("unable to list machines: %s", err)
|
return nil, fmt.Errorf("unable to list machines: %w", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
getAgentsTable(out, machines)
|
getAgentsTable(out, machines)
|
||||||
|
@ -196,7 +199,7 @@ func collectAPIStatus(login string, password string, endpoint string, prefix str
|
||||||
return []byte(fmt.Sprintf("cannot parse API URL: %s", err))
|
return []byte(fmt.Sprintf("cannot parse API URL: %s", err))
|
||||||
}
|
}
|
||||||
|
|
||||||
scenarios, err := hub.GetInstalledItemNames(cwhub.SCENARIOS)
|
scenarios, err := hub.GetInstalledNamesByType(cwhub.SCENARIOS)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return []byte(fmt.Sprintf("could not collect scenarios: %s", err))
|
return []byte(fmt.Sprintf("could not collect scenarios: %s", err))
|
||||||
}
|
}
|
||||||
|
@ -264,6 +267,11 @@ func collectAcquisitionConfig() map[string][]byte {
|
||||||
return ret
|
return ret
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func collectCrash() ([]string, error) {
|
||||||
|
log.Info("Collecting crash dumps")
|
||||||
|
return trace.List()
|
||||||
|
}
|
||||||
|
|
||||||
type cliSupport struct{}
|
type cliSupport struct{}
|
||||||
|
|
||||||
func NewCLISupport() *cliSupport {
|
func NewCLISupport() *cliSupport {
|
||||||
|
@ -311,7 +319,7 @@ cscli support dump -f /tmp/crowdsec-support.zip
|
||||||
`,
|
`,
|
||||||
Args: cobra.NoArgs,
|
Args: cobra.NoArgs,
|
||||||
DisableAutoGenTag: true,
|
DisableAutoGenTag: true,
|
||||||
Run: func(_ *cobra.Command, _ []string) {
|
RunE: func(_ *cobra.Command, _ []string) error {
|
||||||
var err error
|
var err error
|
||||||
var skipHub, skipDB, skipCAPI, skipLAPI, skipAgent bool
|
var skipHub, skipDB, skipCAPI, skipLAPI, skipAgent bool
|
||||||
infos := map[string][]byte{
|
infos := map[string][]byte{
|
||||||
|
@ -431,11 +439,31 @@ cscli support dump -f /tmp/crowdsec-support.zip
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
crash, err := collectCrash()
|
||||||
|
if err != nil {
|
||||||
|
log.Errorf("could not collect crash dumps: %s", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, filename := range crash {
|
||||||
|
content, err := os.ReadFile(filename)
|
||||||
|
if err != nil {
|
||||||
|
log.Errorf("could not read crash dump %s: %s", filename, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
infos[SUPPORT_CRASH_PATH+filepath.Base(filename)] = content
|
||||||
|
}
|
||||||
|
|
||||||
w := bytes.NewBuffer(nil)
|
w := bytes.NewBuffer(nil)
|
||||||
zipWriter := zip.NewWriter(w)
|
zipWriter := zip.NewWriter(w)
|
||||||
|
|
||||||
for filename, data := range infos {
|
for filename, data := range infos {
|
||||||
fw, err := zipWriter.Create(filename)
|
header := &zip.FileHeader{
|
||||||
|
Name: filename,
|
||||||
|
Method: zip.Deflate,
|
||||||
|
// TODO: retain mtime where possible (esp. trace)
|
||||||
|
Modified: time.Now(),
|
||||||
|
}
|
||||||
|
fw, err := zipWriter.CreateHeader(header)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Errorf("Could not add zip entry for %s: %s", filename, err)
|
log.Errorf("Could not add zip entry for %s: %s", filename, err)
|
||||||
continue
|
continue
|
||||||
|
@ -445,15 +473,19 @@ cscli support dump -f /tmp/crowdsec-support.zip
|
||||||
|
|
||||||
err = zipWriter.Close()
|
err = zipWriter.Close()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Fatalf("could not finalize zip file: %s", err)
|
return fmt.Errorf("could not finalize zip file: %s", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if outFile == "-" {
|
||||||
|
_, err = os.Stdout.Write(w.Bytes())
|
||||||
|
return err
|
||||||
|
}
|
||||||
err = os.WriteFile(outFile, w.Bytes(), 0o600)
|
err = os.WriteFile(outFile, w.Bytes(), 0o600)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Fatalf("could not write zip file to %s: %s", outFile, err)
|
return fmt.Errorf("could not write zip file to %s: %s", outFile, err)
|
||||||
}
|
}
|
||||||
|
|
||||||
log.Infof("Written zip file to %s", outFile)
|
log.Infof("Written zip file to %s", outFile)
|
||||||
|
return nil
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -9,8 +9,7 @@ import (
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
log "github.com/sirupsen/logrus"
|
log "github.com/sirupsen/logrus"
|
||||||
"gopkg.in/tomb.v2"
|
"gopkg.in/yaml.v3"
|
||||||
"gopkg.in/yaml.v2"
|
|
||||||
|
|
||||||
"github.com/crowdsecurity/go-cs-lib/trace"
|
"github.com/crowdsecurity/go-cs-lib/trace"
|
||||||
|
|
||||||
|
@ -147,22 +146,6 @@ func runCrowdsec(cConfig *csconfig.Config, parsers *parser.Parsers, hub *cwhub.H
|
||||||
})
|
})
|
||||||
outputWg.Wait()
|
outputWg.Wait()
|
||||||
|
|
||||||
mp := NewMetricsProvider(
|
|
||||||
apiClient,
|
|
||||||
*cConfig.Crowdsec.MetricsInterval,
|
|
||||||
log.WithField("service", "lpmetrics"),
|
|
||||||
cConfig.API.Server.ConsoleConfig.EnabledOptions(),
|
|
||||||
datasources,
|
|
||||||
hub,
|
|
||||||
)
|
|
||||||
|
|
||||||
lpMetricsTomb := tomb.Tomb{}
|
|
||||||
|
|
||||||
lpMetricsTomb.Go(func() error {
|
|
||||||
// XXX: context?
|
|
||||||
return mp.Run(context.Background(), &lpMetricsTomb)
|
|
||||||
})
|
|
||||||
|
|
||||||
if cConfig.Prometheus != nil && cConfig.Prometheus.Enabled {
|
if cConfig.Prometheus != nil && cConfig.Prometheus.Enabled {
|
||||||
aggregated := false
|
aggregated := false
|
||||||
if cConfig.Prometheus.Level == configuration.CFG_METRICS_AGGREGATE {
|
if cConfig.Prometheus.Level == configuration.CFG_METRICS_AGGREGATE {
|
||||||
|
@ -224,7 +207,7 @@ func serveCrowdsec(parsers *parser.Parsers, cConfig *csconfig.Config, hub *cwhub
|
||||||
}
|
}
|
||||||
|
|
||||||
func dumpBucketsPour() {
|
func dumpBucketsPour() {
|
||||||
fd, err := os.OpenFile(filepath.Join(parser.DumpFolder, "bucketpour-dump.yaml"), os.O_CREATE|os.O_WRONLY|os.O_TRUNC, 0666)
|
fd, err := os.OpenFile(filepath.Join(parser.DumpFolder, "bucketpour-dump.yaml"), os.O_CREATE|os.O_WRONLY|os.O_TRUNC, 0o666)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Fatalf("open: %s", err)
|
log.Fatalf("open: %s", err)
|
||||||
}
|
}
|
||||||
|
@ -247,7 +230,7 @@ func dumpBucketsPour() {
|
||||||
}
|
}
|
||||||
|
|
||||||
func dumpParserState() {
|
func dumpParserState() {
|
||||||
fd, err := os.OpenFile(filepath.Join(parser.DumpFolder, "parser-dump.yaml"), os.O_CREATE|os.O_WRONLY|os.O_TRUNC, 0666)
|
fd, err := os.OpenFile(filepath.Join(parser.DumpFolder, "parser-dump.yaml"), os.O_CREATE|os.O_WRONLY|os.O_TRUNC, 0o666)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Fatalf("open: %s", err)
|
log.Fatalf("open: %s", err)
|
||||||
}
|
}
|
||||||
|
@ -270,7 +253,7 @@ func dumpParserState() {
|
||||||
}
|
}
|
||||||
|
|
||||||
func dumpOverflowState() {
|
func dumpOverflowState() {
|
||||||
fd, err := os.OpenFile(filepath.Join(parser.DumpFolder, "bucket-dump.yaml"), os.O_CREATE|os.O_WRONLY|os.O_TRUNC, 0666)
|
fd, err := os.OpenFile(filepath.Join(parser.DumpFolder, "bucket-dump.yaml"), os.O_CREATE|os.O_WRONLY|os.O_TRUNC, 0o666)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Fatalf("open: %s", err)
|
log.Fatalf("open: %s", err)
|
||||||
}
|
}
|
||||||
|
|
|
@ -17,12 +17,12 @@ import (
|
||||||
)
|
)
|
||||||
|
|
||||||
func AuthenticatedLAPIClient(credentials csconfig.ApiCredentialsCfg, hub *cwhub.Hub) (*apiclient.ApiClient, error) {
|
func AuthenticatedLAPIClient(credentials csconfig.ApiCredentialsCfg, hub *cwhub.Hub) (*apiclient.ApiClient, error) {
|
||||||
scenarios, err := hub.GetInstalledItemNames(cwhub.SCENARIOS)
|
scenarios, err := hub.GetInstalledNamesByType(cwhub.SCENARIOS)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, fmt.Errorf("loading list of installed hub scenarios: %w", err)
|
return nil, fmt.Errorf("loading list of installed hub scenarios: %w", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
appsecRules, err := hub.GetInstalledItemNames(cwhub.APPSEC_RULES)
|
appsecRules, err := hub.GetInstalledNamesByType(cwhub.APPSEC_RULES)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, fmt.Errorf("loading list of installed hub appsec rules: %w", err)
|
return nil, fmt.Errorf("loading list of installed hub appsec rules: %w", err)
|
||||||
}
|
}
|
||||||
|
@ -52,11 +52,11 @@ func AuthenticatedLAPIClient(credentials csconfig.ApiCredentialsCfg, hub *cwhub.
|
||||||
PapiURL: papiURL,
|
PapiURL: papiURL,
|
||||||
VersionPrefix: "v1",
|
VersionPrefix: "v1",
|
||||||
UpdateScenario: func() ([]string, error) {
|
UpdateScenario: func() ([]string, error) {
|
||||||
scenarios, err := hub.GetInstalledItemNames(cwhub.SCENARIOS)
|
scenarios, err := hub.GetInstalledNamesByType(cwhub.SCENARIOS)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
appsecRules, err := hub.GetInstalledItemNames(cwhub.APPSEC_RULES)
|
appsecRules, err := hub.GetInstalledNamesByType(cwhub.APPSEC_RULES)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,187 +0,0 @@
|
||||||
package main
|
|
||||||
|
|
||||||
import (
|
|
||||||
"context"
|
|
||||||
"errors"
|
|
||||||
"net/http"
|
|
||||||
"time"
|
|
||||||
|
|
||||||
"github.com/blackfireio/osinfo"
|
|
||||||
"github.com/sirupsen/logrus"
|
|
||||||
|
|
||||||
"gopkg.in/tomb.v2"
|
|
||||||
|
|
||||||
"github.com/crowdsecurity/go-cs-lib/ptr"
|
|
||||||
"github.com/crowdsecurity/go-cs-lib/trace"
|
|
||||||
|
|
||||||
"github.com/crowdsecurity/crowdsec/pkg/acquisition"
|
|
||||||
"github.com/crowdsecurity/crowdsec/pkg/apiclient"
|
|
||||||
"github.com/crowdsecurity/crowdsec/pkg/cwhub"
|
|
||||||
"github.com/crowdsecurity/crowdsec/pkg/cwversion"
|
|
||||||
"github.com/crowdsecurity/crowdsec/pkg/fflag"
|
|
||||||
"github.com/crowdsecurity/crowdsec/pkg/models"
|
|
||||||
)
|
|
||||||
|
|
||||||
// MetricsProvider collects metrics from the LP and sends them to the LAPI
|
|
||||||
type MetricsProvider struct {
|
|
||||||
apic *apiclient.ApiClient
|
|
||||||
interval time.Duration
|
|
||||||
static staticMetrics
|
|
||||||
logger *logrus.Entry
|
|
||||||
}
|
|
||||||
|
|
||||||
type staticMetrics struct {
|
|
||||||
osName string
|
|
||||||
osVersion string
|
|
||||||
startupTS int64
|
|
||||||
featureFlags []string
|
|
||||||
consoleOptions []string
|
|
||||||
datasourceMap map[string]int64
|
|
||||||
hubState models.HubItems
|
|
||||||
}
|
|
||||||
|
|
||||||
func getHubState(hub *cwhub.Hub) models.HubItems {
|
|
||||||
ret := models.HubItems{}
|
|
||||||
|
|
||||||
for _, itemType := range cwhub.ItemTypes {
|
|
||||||
items, _ := hub.GetInstalledItems(itemType)
|
|
||||||
for _, item := range items {
|
|
||||||
status := "official"
|
|
||||||
if item.State.IsLocal() {
|
|
||||||
status = "custom"
|
|
||||||
}
|
|
||||||
if item.State.Tainted {
|
|
||||||
status = "tainted"
|
|
||||||
}
|
|
||||||
ret[item.FQName()] = models.HubItem{
|
|
||||||
Version: item.Version,
|
|
||||||
Status: status,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return ret
|
|
||||||
}
|
|
||||||
|
|
||||||
// newStaticMetrics is called when the process starts, or reloads the configuration
|
|
||||||
func newStaticMetrics(consoleOptions []string, datasources []acquisition.DataSource, hub *cwhub.Hub) staticMetrics {
|
|
||||||
datasourceMap := map[string]int64{}
|
|
||||||
|
|
||||||
for _, ds := range datasources {
|
|
||||||
datasourceMap[ds.GetName()] += 1
|
|
||||||
}
|
|
||||||
|
|
||||||
osName, osVersion := detectOS()
|
|
||||||
|
|
||||||
return staticMetrics{
|
|
||||||
osName: osName,
|
|
||||||
osVersion: osVersion,
|
|
||||||
startupTS: time.Now().Unix(),
|
|
||||||
featureFlags: fflag.Crowdsec.GetEnabledFeatures(),
|
|
||||||
consoleOptions: consoleOptions,
|
|
||||||
datasourceMap: datasourceMap,
|
|
||||||
hubState: getHubState(hub),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func detectOS() (string, string) {
|
|
||||||
if cwversion.System == "docker" {
|
|
||||||
return "docker", ""
|
|
||||||
}
|
|
||||||
|
|
||||||
osInfo, err := osinfo.GetOSInfo()
|
|
||||||
if err != nil {
|
|
||||||
return cwversion.System, "???"
|
|
||||||
}
|
|
||||||
|
|
||||||
return osInfo.Name, osInfo.Version
|
|
||||||
}
|
|
||||||
|
|
||||||
func NewMetricsProvider(apic *apiclient.ApiClient, interval time.Duration, logger *logrus.Entry,
|
|
||||||
consoleOptions []string, datasources []acquisition.DataSource, hub *cwhub.Hub) *MetricsProvider {
|
|
||||||
return &MetricsProvider{
|
|
||||||
apic: apic,
|
|
||||||
interval: interval,
|
|
||||||
logger: logger,
|
|
||||||
static: newStaticMetrics(consoleOptions, datasources, hub),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (m *MetricsProvider) metricsPayload() *models.AllMetrics {
|
|
||||||
meta := &models.MetricsMeta{
|
|
||||||
UtcStartupTimestamp: m.static.startupTS,
|
|
||||||
WindowSizeSeconds: int64(m.interval.Seconds()),
|
|
||||||
}
|
|
||||||
|
|
||||||
os := &models.OSversion{
|
|
||||||
Name: m.static.osName,
|
|
||||||
Version: m.static.osVersion,
|
|
||||||
}
|
|
||||||
|
|
||||||
base := models.BaseMetrics{
|
|
||||||
Meta: meta,
|
|
||||||
Os: os,
|
|
||||||
Version: ptr.Of(cwversion.VersionStr()),
|
|
||||||
FeatureFlags: m.static.featureFlags,
|
|
||||||
}
|
|
||||||
|
|
||||||
met := &models.LogProcessorsMetrics{
|
|
||||||
BaseMetrics: base,
|
|
||||||
ConsoleOptions: m.static.consoleOptions,
|
|
||||||
Datasources: m.static.datasourceMap,
|
|
||||||
HubItems: m.static.hubState,
|
|
||||||
}
|
|
||||||
|
|
||||||
// TODO: more metric details... ?
|
|
||||||
|
|
||||||
return &models.AllMetrics{
|
|
||||||
LogProcessors: []*models.LogProcessorsMetrics{met},
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (m *MetricsProvider) Run(ctx context.Context, myTomb *tomb.Tomb) error {
|
|
||||||
defer trace.CatchPanic("crowdsec/MetricsProvider.Run")
|
|
||||||
|
|
||||||
if m.interval == time.Duration(0) {
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
met := m.metricsPayload()
|
|
||||||
|
|
||||||
ticker := time.NewTicker(1) //Send on start
|
|
||||||
|
|
||||||
for {
|
|
||||||
select {
|
|
||||||
case <-ticker.C:
|
|
||||||
met.LogProcessors[0].Meta.UtcNowTimestamp = time.Now().Unix()
|
|
||||||
|
|
||||||
ctxTime, cancel := context.WithTimeout(ctx, 10*time.Second)
|
|
||||||
defer cancel()
|
|
||||||
|
|
||||||
_, resp, err := m.apic.UsageMetrics.Add(ctxTime, met)
|
|
||||||
switch {
|
|
||||||
case errors.Is(err, context.DeadlineExceeded):
|
|
||||||
m.logger.Warnf("timeout sending lp metrics")
|
|
||||||
continue
|
|
||||||
case err != nil && resp != nil && resp.Response.StatusCode == http.StatusNotFound:
|
|
||||||
m.logger.Warnf("metrics endpoint not found, older LAPI?")
|
|
||||||
continue
|
|
||||||
case err != nil:
|
|
||||||
m.logger.Warnf("failed to send lp metrics: %s", err)
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
|
|
||||||
if resp.Response.StatusCode != http.StatusCreated {
|
|
||||||
m.logger.Warnf("failed to send lp metrics: %s", resp.Response.Status)
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
|
|
||||||
ticker.Reset(m.interval)
|
|
||||||
|
|
||||||
m.logger.Tracef("lp usage metrics sent")
|
|
||||||
case <-myTomb.Dying():
|
|
||||||
ticker.Stop()
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -6,6 +6,7 @@ import (
|
||||||
"fmt"
|
"fmt"
|
||||||
_ "net/http/pprof"
|
_ "net/http/pprof"
|
||||||
"os"
|
"os"
|
||||||
|
"path/filepath"
|
||||||
"runtime"
|
"runtime"
|
||||||
"runtime/pprof"
|
"runtime/pprof"
|
||||||
"strings"
|
"strings"
|
||||||
|
@ -14,6 +15,8 @@ import (
|
||||||
log "github.com/sirupsen/logrus"
|
log "github.com/sirupsen/logrus"
|
||||||
"gopkg.in/tomb.v2"
|
"gopkg.in/tomb.v2"
|
||||||
|
|
||||||
|
"github.com/crowdsecurity/go-cs-lib/trace"
|
||||||
|
|
||||||
"github.com/crowdsecurity/crowdsec/pkg/acquisition"
|
"github.com/crowdsecurity/crowdsec/pkg/acquisition"
|
||||||
"github.com/crowdsecurity/crowdsec/pkg/csconfig"
|
"github.com/crowdsecurity/crowdsec/pkg/csconfig"
|
||||||
"github.com/crowdsecurity/crowdsec/pkg/csplugin"
|
"github.com/crowdsecurity/crowdsec/pkg/csplugin"
|
||||||
|
@ -96,8 +99,8 @@ func LoadBuckets(cConfig *csconfig.Config, hub *cwhub.Hub) error {
|
||||||
buckets = leakybucket.NewBuckets()
|
buckets = leakybucket.NewBuckets()
|
||||||
|
|
||||||
log.Infof("Loading %d scenario files", len(files))
|
log.Infof("Loading %d scenario files", len(files))
|
||||||
holders, outputEventChan, err = leakybucket.LoadBuckets(cConfig.Crowdsec, hub, files, &bucketsTomb, buckets, flags.OrderEvent)
|
|
||||||
|
|
||||||
|
holders, outputEventChan, err = leakybucket.LoadBuckets(cConfig.Crowdsec, hub, files, &bucketsTomb, buckets, flags.OrderEvent)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return fmt.Errorf("scenario loading failed: %w", err)
|
return fmt.Errorf("scenario loading failed: %w", err)
|
||||||
}
|
}
|
||||||
|
@ -230,6 +233,10 @@ func LoadConfig(configFile string, disableAgent bool, disableAPI bool, quiet boo
|
||||||
return nil, fmt.Errorf("while loading configuration file: %w", err)
|
return nil, fmt.Errorf("while loading configuration file: %w", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if err := trace.Init(filepath.Join(cConfig.ConfigPaths.DataDir, "trace")); err != nil {
|
||||||
|
return nil, fmt.Errorf("while setting up trace directory: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
cConfig.Common.LogLevel = newLogLevel(cConfig.Common.LogLevel, flags)
|
cConfig.Common.LogLevel = newLogLevel(cConfig.Common.LogLevel, flags)
|
||||||
|
|
||||||
if dumpFolder != "" {
|
if dumpFolder != "" {
|
||||||
|
|
|
@ -3,7 +3,6 @@ package main
|
||||||
import (
|
import (
|
||||||
"fmt"
|
"fmt"
|
||||||
"net/http"
|
"net/http"
|
||||||
"time"
|
|
||||||
|
|
||||||
"github.com/prometheus/client_golang/prometheus"
|
"github.com/prometheus/client_golang/prometheus"
|
||||||
"github.com/prometheus/client_golang/prometheus/promhttp"
|
"github.com/prometheus/client_golang/prometheus/promhttp"
|
||||||
|
@ -22,7 +21,8 @@ import (
|
||||||
"github.com/crowdsecurity/crowdsec/pkg/parser"
|
"github.com/crowdsecurity/crowdsec/pkg/parser"
|
||||||
)
|
)
|
||||||
|
|
||||||
/*prometheus*/
|
// Prometheus
|
||||||
|
|
||||||
var globalParserHits = prometheus.NewCounterVec(
|
var globalParserHits = prometheus.NewCounterVec(
|
||||||
prometheus.CounterOpts{
|
prometheus.CounterOpts{
|
||||||
Name: "cs_parser_hits_total",
|
Name: "cs_parser_hits_total",
|
||||||
|
@ -30,6 +30,7 @@ var globalParserHits = prometheus.NewCounterVec(
|
||||||
},
|
},
|
||||||
[]string{"source", "type"},
|
[]string{"source", "type"},
|
||||||
)
|
)
|
||||||
|
|
||||||
var globalParserHitsOk = prometheus.NewCounterVec(
|
var globalParserHitsOk = prometheus.NewCounterVec(
|
||||||
prometheus.CounterOpts{
|
prometheus.CounterOpts{
|
||||||
Name: "cs_parser_hits_ok_total",
|
Name: "cs_parser_hits_ok_total",
|
||||||
|
@ -37,6 +38,7 @@ var globalParserHitsOk = prometheus.NewCounterVec(
|
||||||
},
|
},
|
||||||
[]string{"source", "type"},
|
[]string{"source", "type"},
|
||||||
)
|
)
|
||||||
|
|
||||||
var globalParserHitsKo = prometheus.NewCounterVec(
|
var globalParserHitsKo = prometheus.NewCounterVec(
|
||||||
prometheus.CounterOpts{
|
prometheus.CounterOpts{
|
||||||
Name: "cs_parser_hits_ko_total",
|
Name: "cs_parser_hits_ko_total",
|
||||||
|
@ -116,9 +118,7 @@ func computeDynamicMetrics(next http.Handler, dbClient *database.Client) http.Ha
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
decisionsFilters := make(map[string][]string, 0)
|
decisions, err := dbClient.QueryDecisionCountByScenario()
|
||||||
|
|
||||||
decisions, err := dbClient.QueryDecisionCountByScenario(decisionsFilters)
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Errorf("Error querying decisions for metrics: %v", err)
|
log.Errorf("Error querying decisions for metrics: %v", err)
|
||||||
next.ServeHTTP(w, r)
|
next.ServeHTTP(w, r)
|
||||||
|
@ -139,7 +139,6 @@ func computeDynamicMetrics(next http.Handler, dbClient *database.Client) http.Ha
|
||||||
}
|
}
|
||||||
|
|
||||||
alerts, err := dbClient.AlertsCountPerScenario(alertsFilter)
|
alerts, err := dbClient.AlertsCountPerScenario(alertsFilter)
|
||||||
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Errorf("Error querying alerts for metrics: %v", err)
|
log.Errorf("Error querying alerts for metrics: %v", err)
|
||||||
next.ServeHTTP(w, r)
|
next.ServeHTTP(w, r)
|
||||||
|
@ -194,7 +193,6 @@ func servePrometheus(config *csconfig.PrometheusCfg, dbClient *database.Client,
|
||||||
defer trace.CatchPanic("crowdsec/servePrometheus")
|
defer trace.CatchPanic("crowdsec/servePrometheus")
|
||||||
|
|
||||||
http.Handle("/metrics", computeDynamicMetrics(promhttp.Handler(), dbClient))
|
http.Handle("/metrics", computeDynamicMetrics(promhttp.Handler(), dbClient))
|
||||||
log.Debugf("serving metrics after %s ms", time.Since(crowdsecT0))
|
|
||||||
|
|
||||||
if err := http.ListenAndServe(fmt.Sprintf("%s:%d", config.ListenAddr, config.ListenPort), nil); err != nil {
|
if err := http.ListenAndServe(fmt.Sprintf("%s:%d", config.ListenAddr, config.ListenPort), nil); err != nil {
|
||||||
// in time machine, we most likely have the LAPI using the port
|
// in time machine, we most likely have the LAPI using the port
|
||||||
|
|
|
@ -391,7 +391,7 @@ func Serve(cConfig *csconfig.Config, agentReady chan bool) error {
|
||||||
}
|
}
|
||||||
|
|
||||||
if cConfig.Common != nil && cConfig.Common.Daemonize {
|
if cConfig.Common != nil && cConfig.Common.Daemonize {
|
||||||
csdaemon.NotifySystemd(log.StandardLogger())
|
csdaemon.Notify(csdaemon.Ready, log.StandardLogger())
|
||||||
// wait for signals
|
// wait for signals
|
||||||
return HandleSignals(cConfig)
|
return HandleSignals(cConfig)
|
||||||
}
|
}
|
||||||
|
|
|
@ -5,10 +5,11 @@ import (
|
||||||
"fmt"
|
"fmt"
|
||||||
"os"
|
"os"
|
||||||
|
|
||||||
"github.com/crowdsecurity/crowdsec/pkg/protobufs"
|
|
||||||
"github.com/hashicorp/go-hclog"
|
"github.com/hashicorp/go-hclog"
|
||||||
plugin "github.com/hashicorp/go-plugin"
|
plugin "github.com/hashicorp/go-plugin"
|
||||||
"gopkg.in/yaml.v2"
|
"gopkg.in/yaml.v3"
|
||||||
|
|
||||||
|
"github.com/crowdsecurity/crowdsec/pkg/protobufs"
|
||||||
)
|
)
|
||||||
|
|
||||||
type PluginConfig struct {
|
type PluginConfig struct {
|
||||||
|
@ -32,6 +33,7 @@ func (s *DummyPlugin) Notify(ctx context.Context, notification *protobufs.Notifi
|
||||||
if _, ok := s.PluginConfigByName[notification.Name]; !ok {
|
if _, ok := s.PluginConfigByName[notification.Name]; !ok {
|
||||||
return nil, fmt.Errorf("invalid plugin config name %s", notification.Name)
|
return nil, fmt.Errorf("invalid plugin config name %s", notification.Name)
|
||||||
}
|
}
|
||||||
|
|
||||||
cfg := s.PluginConfigByName[notification.Name]
|
cfg := s.PluginConfigByName[notification.Name]
|
||||||
|
|
||||||
if cfg.LogLevel != nil && *cfg.LogLevel != "" {
|
if cfg.LogLevel != nil && *cfg.LogLevel != "" {
|
||||||
|
@ -42,19 +44,22 @@ func (s *DummyPlugin) Notify(ctx context.Context, notification *protobufs.Notifi
|
||||||
logger.Debug(notification.Text)
|
logger.Debug(notification.Text)
|
||||||
|
|
||||||
if cfg.OutputFile != nil && *cfg.OutputFile != "" {
|
if cfg.OutputFile != nil && *cfg.OutputFile != "" {
|
||||||
f, err := os.OpenFile(*cfg.OutputFile, os.O_APPEND|os.O_CREATE|os.O_WRONLY, 0644)
|
f, err := os.OpenFile(*cfg.OutputFile, os.O_APPEND|os.O_CREATE|os.O_WRONLY, 0o644)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
logger.Error(fmt.Sprintf("Cannot open notification file: %s", err))
|
logger.Error(fmt.Sprintf("Cannot open notification file: %s", err))
|
||||||
}
|
}
|
||||||
|
|
||||||
if _, err := f.WriteString(notification.Text + "\n"); err != nil {
|
if _, err := f.WriteString(notification.Text + "\n"); err != nil {
|
||||||
f.Close()
|
f.Close()
|
||||||
logger.Error(fmt.Sprintf("Cannot write notification to file: %s", err))
|
logger.Error(fmt.Sprintf("Cannot write notification to file: %s", err))
|
||||||
}
|
}
|
||||||
|
|
||||||
err = f.Close()
|
err = f.Close()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
logger.Error(fmt.Sprintf("Cannot close notification file: %s", err))
|
logger.Error(fmt.Sprintf("Cannot close notification file: %s", err))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fmt.Println(notification.Text)
|
fmt.Println(notification.Text)
|
||||||
|
|
||||||
return &protobufs.Empty{}, nil
|
return &protobufs.Empty{}, nil
|
||||||
|
@ -64,11 +69,12 @@ func (s *DummyPlugin) Configure(ctx context.Context, config *protobufs.Config) (
|
||||||
d := PluginConfig{}
|
d := PluginConfig{}
|
||||||
err := yaml.Unmarshal(config.Config, &d)
|
err := yaml.Unmarshal(config.Config, &d)
|
||||||
s.PluginConfigByName[d.Name] = d
|
s.PluginConfigByName[d.Name] = d
|
||||||
|
|
||||||
return &protobufs.Empty{}, err
|
return &protobufs.Empty{}, err
|
||||||
}
|
}
|
||||||
|
|
||||||
func main() {
|
func main() {
|
||||||
var handshake = plugin.HandshakeConfig{
|
handshake := plugin.HandshakeConfig{
|
||||||
ProtocolVersion: 1,
|
ProtocolVersion: 1,
|
||||||
MagicCookieKey: "CROWDSEC_PLUGIN_KEY",
|
MagicCookieKey: "CROWDSEC_PLUGIN_KEY",
|
||||||
MagicCookieValue: os.Getenv("CROWDSEC_PLUGIN_KEY"),
|
MagicCookieValue: os.Getenv("CROWDSEC_PLUGIN_KEY"),
|
||||||
|
|
|
@ -2,15 +2,17 @@ package main
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
|
"errors"
|
||||||
"fmt"
|
"fmt"
|
||||||
"os"
|
"os"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
"github.com/crowdsecurity/crowdsec/pkg/protobufs"
|
|
||||||
"github.com/hashicorp/go-hclog"
|
"github.com/hashicorp/go-hclog"
|
||||||
plugin "github.com/hashicorp/go-plugin"
|
plugin "github.com/hashicorp/go-plugin"
|
||||||
mail "github.com/xhit/go-simple-mail/v2"
|
mail "github.com/xhit/go-simple-mail/v2"
|
||||||
"gopkg.in/yaml.v2"
|
"gopkg.in/yaml.v3"
|
||||||
|
|
||||||
|
"github.com/crowdsecurity/crowdsec/pkg/protobufs"
|
||||||
)
|
)
|
||||||
|
|
||||||
var baseLogger hclog.Logger = hclog.New(&hclog.LoggerOptions{
|
var baseLogger hclog.Logger = hclog.New(&hclog.LoggerOptions{
|
||||||
|
@ -72,19 +74,20 @@ func (n *EmailPlugin) Configure(ctx context.Context, config *protobufs.Config) (
|
||||||
}
|
}
|
||||||
|
|
||||||
if d.Name == "" {
|
if d.Name == "" {
|
||||||
return nil, fmt.Errorf("name is required")
|
return nil, errors.New("name is required")
|
||||||
}
|
}
|
||||||
|
|
||||||
if d.SMTPHost == "" {
|
if d.SMTPHost == "" {
|
||||||
return nil, fmt.Errorf("SMTP host is not set")
|
return nil, errors.New("SMTP host is not set")
|
||||||
}
|
}
|
||||||
|
|
||||||
if d.ReceiverEmails == nil || len(d.ReceiverEmails) == 0 {
|
if d.ReceiverEmails == nil || len(d.ReceiverEmails) == 0 {
|
||||||
return nil, fmt.Errorf("receiver emails are not set")
|
return nil, errors.New("receiver emails are not set")
|
||||||
}
|
}
|
||||||
|
|
||||||
n.ConfigByName[d.Name] = d
|
n.ConfigByName[d.Name] = d
|
||||||
baseLogger.Debug(fmt.Sprintf("Email plugin '%s' use SMTP host '%s:%d'", d.Name, d.SMTPHost, d.SMTPPort))
|
baseLogger.Debug(fmt.Sprintf("Email plugin '%s' use SMTP host '%s:%d'", d.Name, d.SMTPHost, d.SMTPPort))
|
||||||
|
|
||||||
return &protobufs.Empty{}, nil
|
return &protobufs.Empty{}, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -92,6 +95,7 @@ func (n *EmailPlugin) Notify(ctx context.Context, notification *protobufs.Notifi
|
||||||
if _, ok := n.ConfigByName[notification.Name]; !ok {
|
if _, ok := n.ConfigByName[notification.Name]; !ok {
|
||||||
return nil, fmt.Errorf("invalid plugin config name %s", notification.Name)
|
return nil, fmt.Errorf("invalid plugin config name %s", notification.Name)
|
||||||
}
|
}
|
||||||
|
|
||||||
cfg := n.ConfigByName[notification.Name]
|
cfg := n.ConfigByName[notification.Name]
|
||||||
|
|
||||||
logger := baseLogger.Named(cfg.Name)
|
logger := baseLogger.Named(cfg.Name)
|
||||||
|
@ -117,6 +121,7 @@ func (n *EmailPlugin) Notify(ctx context.Context, notification *protobufs.Notifi
|
||||||
server.ConnectTimeout, err = time.ParseDuration(cfg.ConnectTimeout)
|
server.ConnectTimeout, err = time.ParseDuration(cfg.ConnectTimeout)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
logger.Warn(fmt.Sprintf("invalid connect timeout '%s', using default '10s'", cfg.ConnectTimeout))
|
logger.Warn(fmt.Sprintf("invalid connect timeout '%s', using default '10s'", cfg.ConnectTimeout))
|
||||||
|
|
||||||
server.ConnectTimeout = 10 * time.Second
|
server.ConnectTimeout = 10 * time.Second
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -125,15 +130,18 @@ func (n *EmailPlugin) Notify(ctx context.Context, notification *protobufs.Notifi
|
||||||
server.SendTimeout, err = time.ParseDuration(cfg.SendTimeout)
|
server.SendTimeout, err = time.ParseDuration(cfg.SendTimeout)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
logger.Warn(fmt.Sprintf("invalid send timeout '%s', using default '10s'", cfg.SendTimeout))
|
logger.Warn(fmt.Sprintf("invalid send timeout '%s', using default '10s'", cfg.SendTimeout))
|
||||||
|
|
||||||
server.SendTimeout = 10 * time.Second
|
server.SendTimeout = 10 * time.Second
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
logger.Debug("making smtp connection")
|
logger.Debug("making smtp connection")
|
||||||
|
|
||||||
smtpClient, err := server.Connect()
|
smtpClient, err := server.Connect()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return &protobufs.Empty{}, err
|
return &protobufs.Empty{}, err
|
||||||
}
|
}
|
||||||
|
|
||||||
logger.Debug("smtp connection done")
|
logger.Debug("smtp connection done")
|
||||||
|
|
||||||
email := mail.NewMSG()
|
email := mail.NewMSG()
|
||||||
|
@ -146,12 +154,14 @@ func (n *EmailPlugin) Notify(ctx context.Context, notification *protobufs.Notifi
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return &protobufs.Empty{}, err
|
return &protobufs.Empty{}, err
|
||||||
}
|
}
|
||||||
|
|
||||||
logger.Info(fmt.Sprintf("sent email to %v", cfg.ReceiverEmails))
|
logger.Info(fmt.Sprintf("sent email to %v", cfg.ReceiverEmails))
|
||||||
|
|
||||||
return &protobufs.Empty{}, nil
|
return &protobufs.Empty{}, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func main() {
|
func main() {
|
||||||
var handshake = plugin.HandshakeConfig{
|
handshake := plugin.HandshakeConfig{
|
||||||
ProtocolVersion: 1,
|
ProtocolVersion: 1,
|
||||||
MagicCookieKey: "CROWDSEC_PLUGIN_KEY",
|
MagicCookieKey: "CROWDSEC_PLUGIN_KEY",
|
||||||
MagicCookieValue: os.Getenv("CROWDSEC_PLUGIN_KEY"),
|
MagicCookieValue: os.Getenv("CROWDSEC_PLUGIN_KEY"),
|
||||||
|
|
|
@ -12,10 +12,11 @@ import (
|
||||||
"os"
|
"os"
|
||||||
"strings"
|
"strings"
|
||||||
|
|
||||||
"github.com/crowdsecurity/crowdsec/pkg/protobufs"
|
|
||||||
"github.com/hashicorp/go-hclog"
|
"github.com/hashicorp/go-hclog"
|
||||||
plugin "github.com/hashicorp/go-plugin"
|
plugin "github.com/hashicorp/go-plugin"
|
||||||
"gopkg.in/yaml.v2"
|
"gopkg.in/yaml.v3"
|
||||||
|
|
||||||
|
"github.com/crowdsecurity/crowdsec/pkg/protobufs"
|
||||||
)
|
)
|
||||||
|
|
||||||
type PluginConfig struct {
|
type PluginConfig struct {
|
||||||
|
@ -90,18 +91,23 @@ func getTLSClient(c *PluginConfig) error {
|
||||||
|
|
||||||
tlsConfig.Certificates = []tls.Certificate{cert}
|
tlsConfig.Certificates = []tls.Certificate{cert}
|
||||||
}
|
}
|
||||||
|
|
||||||
transport := &http.Transport{
|
transport := &http.Transport{
|
||||||
TLSClientConfig: tlsConfig,
|
TLSClientConfig: tlsConfig,
|
||||||
}
|
}
|
||||||
|
|
||||||
if c.UnixSocket != "" {
|
if c.UnixSocket != "" {
|
||||||
logger.Info(fmt.Sprintf("Using socket '%s'", c.UnixSocket))
|
logger.Info(fmt.Sprintf("Using socket '%s'", c.UnixSocket))
|
||||||
|
|
||||||
transport.DialContext = func(_ context.Context, _, _ string) (net.Conn, error) {
|
transport.DialContext = func(_ context.Context, _, _ string) (net.Conn, error) {
|
||||||
return net.Dial("unix", strings.TrimSuffix(c.UnixSocket, "/"))
|
return net.Dial("unix", strings.TrimSuffix(c.UnixSocket, "/"))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
c.Client = &http.Client{
|
c.Client = &http.Client{
|
||||||
Transport: transport,
|
Transport: transport,
|
||||||
}
|
}
|
||||||
|
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -109,6 +115,7 @@ func (s *HTTPPlugin) Notify(ctx context.Context, notification *protobufs.Notific
|
||||||
if _, ok := s.PluginConfigByName[notification.Name]; !ok {
|
if _, ok := s.PluginConfigByName[notification.Name]; !ok {
|
||||||
return nil, fmt.Errorf("invalid plugin config name %s", notification.Name)
|
return nil, fmt.Errorf("invalid plugin config name %s", notification.Name)
|
||||||
}
|
}
|
||||||
|
|
||||||
cfg := s.PluginConfigByName[notification.Name]
|
cfg := s.PluginConfigByName[notification.Name]
|
||||||
|
|
||||||
if cfg.LogLevel != nil && *cfg.LogLevel != "" {
|
if cfg.LogLevel != nil && *cfg.LogLevel != "" {
|
||||||
|
@ -121,11 +128,14 @@ func (s *HTTPPlugin) Notify(ctx context.Context, notification *protobufs.Notific
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
for headerName, headerValue := range cfg.Headers {
|
for headerName, headerValue := range cfg.Headers {
|
||||||
logger.Debug(fmt.Sprintf("adding header %s: %s", headerName, headerValue))
|
logger.Debug(fmt.Sprintf("adding header %s: %s", headerName, headerValue))
|
||||||
request.Header.Add(headerName, headerValue)
|
request.Header.Add(headerName, headerValue)
|
||||||
}
|
}
|
||||||
|
|
||||||
logger.Debug(fmt.Sprintf("making HTTP %s call to %s with body %s", cfg.Method, cfg.URL, notification.Text))
|
logger.Debug(fmt.Sprintf("making HTTP %s call to %s with body %s", cfg.Method, cfg.URL, notification.Text))
|
||||||
|
|
||||||
resp, err := cfg.Client.Do(request.WithContext(ctx))
|
resp, err := cfg.Client.Do(request.WithContext(ctx))
|
||||||
if err != nil {
|
if err != nil {
|
||||||
logger.Error(fmt.Sprintf("Failed to make HTTP request : %s", err))
|
logger.Error(fmt.Sprintf("Failed to make HTTP request : %s", err))
|
||||||
|
@ -135,7 +145,7 @@ func (s *HTTPPlugin) Notify(ctx context.Context, notification *protobufs.Notific
|
||||||
|
|
||||||
respData, err := io.ReadAll(resp.Body)
|
respData, err := io.ReadAll(resp.Body)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, fmt.Errorf("failed to read response body got error %s", err)
|
return nil, fmt.Errorf("failed to read response body got error %w", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
logger.Debug(fmt.Sprintf("got response %s", string(respData)))
|
logger.Debug(fmt.Sprintf("got response %s", string(respData)))
|
||||||
|
@ -143,6 +153,7 @@ func (s *HTTPPlugin) Notify(ctx context.Context, notification *protobufs.Notific
|
||||||
if resp.StatusCode < 200 || resp.StatusCode >= 300 {
|
if resp.StatusCode < 200 || resp.StatusCode >= 300 {
|
||||||
logger.Warn(fmt.Sprintf("HTTP server returned non 200 status code: %d", resp.StatusCode))
|
logger.Warn(fmt.Sprintf("HTTP server returned non 200 status code: %d", resp.StatusCode))
|
||||||
logger.Debug(fmt.Sprintf("HTTP server returned body: %s", string(respData)))
|
logger.Debug(fmt.Sprintf("HTTP server returned body: %s", string(respData)))
|
||||||
|
|
||||||
return &protobufs.Empty{}, nil
|
return &protobufs.Empty{}, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -151,21 +162,25 @@ func (s *HTTPPlugin) Notify(ctx context.Context, notification *protobufs.Notific
|
||||||
|
|
||||||
func (s *HTTPPlugin) Configure(ctx context.Context, config *protobufs.Config) (*protobufs.Empty, error) {
|
func (s *HTTPPlugin) Configure(ctx context.Context, config *protobufs.Config) (*protobufs.Empty, error) {
|
||||||
d := PluginConfig{}
|
d := PluginConfig{}
|
||||||
|
|
||||||
err := yaml.Unmarshal(config.Config, &d)
|
err := yaml.Unmarshal(config.Config, &d)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
err = getTLSClient(&d)
|
err = getTLSClient(&d)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
s.PluginConfigByName[d.Name] = d
|
s.PluginConfigByName[d.Name] = d
|
||||||
logger.Debug(fmt.Sprintf("HTTP plugin '%s' use URL '%s'", d.Name, d.URL))
|
logger.Debug(fmt.Sprintf("HTTP plugin '%s' use URL '%s'", d.Name, d.URL))
|
||||||
|
|
||||||
return &protobufs.Empty{}, err
|
return &protobufs.Empty{}, err
|
||||||
}
|
}
|
||||||
|
|
||||||
func main() {
|
func main() {
|
||||||
var handshake = plugin.HandshakeConfig{
|
handshake := plugin.HandshakeConfig{
|
||||||
ProtocolVersion: 1,
|
ProtocolVersion: 1,
|
||||||
MagicCookieKey: "CROWDSEC_PLUGIN_KEY",
|
MagicCookieKey: "CROWDSEC_PLUGIN_KEY",
|
||||||
MagicCookieValue: os.Getenv("CROWDSEC_PLUGIN_KEY"),
|
MagicCookieValue: os.Getenv("CROWDSEC_PLUGIN_KEY"),
|
||||||
|
|
|
@ -5,12 +5,12 @@ import (
|
||||||
"fmt"
|
"fmt"
|
||||||
"os"
|
"os"
|
||||||
|
|
||||||
"github.com/crowdsecurity/crowdsec/pkg/protobufs"
|
|
||||||
"github.com/hashicorp/go-hclog"
|
"github.com/hashicorp/go-hclog"
|
||||||
plugin "github.com/hashicorp/go-plugin"
|
plugin "github.com/hashicorp/go-plugin"
|
||||||
|
|
||||||
"github.com/slack-go/slack"
|
"github.com/slack-go/slack"
|
||||||
"gopkg.in/yaml.v2"
|
"gopkg.in/yaml.v3"
|
||||||
|
|
||||||
|
"github.com/crowdsecurity/crowdsec/pkg/protobufs"
|
||||||
)
|
)
|
||||||
|
|
||||||
type PluginConfig struct {
|
type PluginConfig struct {
|
||||||
|
@ -33,13 +33,16 @@ func (n *Notify) Notify(ctx context.Context, notification *protobufs.Notificatio
|
||||||
if _, ok := n.ConfigByName[notification.Name]; !ok {
|
if _, ok := n.ConfigByName[notification.Name]; !ok {
|
||||||
return nil, fmt.Errorf("invalid plugin config name %s", notification.Name)
|
return nil, fmt.Errorf("invalid plugin config name %s", notification.Name)
|
||||||
}
|
}
|
||||||
|
|
||||||
cfg := n.ConfigByName[notification.Name]
|
cfg := n.ConfigByName[notification.Name]
|
||||||
|
|
||||||
if cfg.LogLevel != nil && *cfg.LogLevel != "" {
|
if cfg.LogLevel != nil && *cfg.LogLevel != "" {
|
||||||
logger.SetLevel(hclog.LevelFromString(*cfg.LogLevel))
|
logger.SetLevel(hclog.LevelFromString(*cfg.LogLevel))
|
||||||
}
|
}
|
||||||
|
|
||||||
logger.Info(fmt.Sprintf("found notify signal for %s config", notification.Name))
|
logger.Info(fmt.Sprintf("found notify signal for %s config", notification.Name))
|
||||||
logger.Debug(fmt.Sprintf("posting to %s webhook, message %s", cfg.Webhook, notification.Text))
|
logger.Debug(fmt.Sprintf("posting to %s webhook, message %s", cfg.Webhook, notification.Text))
|
||||||
|
|
||||||
err := slack.PostWebhookContext(ctx, n.ConfigByName[notification.Name].Webhook, &slack.WebhookMessage{
|
err := slack.PostWebhookContext(ctx, n.ConfigByName[notification.Name].Webhook, &slack.WebhookMessage{
|
||||||
Text: notification.Text,
|
Text: notification.Text,
|
||||||
})
|
})
|
||||||
|
@ -52,16 +55,19 @@ func (n *Notify) Notify(ctx context.Context, notification *protobufs.Notificatio
|
||||||
|
|
||||||
func (n *Notify) Configure(ctx context.Context, config *protobufs.Config) (*protobufs.Empty, error) {
|
func (n *Notify) Configure(ctx context.Context, config *protobufs.Config) (*protobufs.Empty, error) {
|
||||||
d := PluginConfig{}
|
d := PluginConfig{}
|
||||||
|
|
||||||
if err := yaml.Unmarshal(config.Config, &d); err != nil {
|
if err := yaml.Unmarshal(config.Config, &d); err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
n.ConfigByName[d.Name] = d
|
n.ConfigByName[d.Name] = d
|
||||||
logger.Debug(fmt.Sprintf("Slack plugin '%s' use URL '%s'", d.Name, d.Webhook))
|
logger.Debug(fmt.Sprintf("Slack plugin '%s' use URL '%s'", d.Name, d.Webhook))
|
||||||
|
|
||||||
return &protobufs.Empty{}, nil
|
return &protobufs.Empty{}, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func main() {
|
func main() {
|
||||||
var handshake = plugin.HandshakeConfig{
|
handshake := plugin.HandshakeConfig{
|
||||||
ProtocolVersion: 1,
|
ProtocolVersion: 1,
|
||||||
MagicCookieKey: "CROWDSEC_PLUGIN_KEY",
|
MagicCookieKey: "CROWDSEC_PLUGIN_KEY",
|
||||||
MagicCookieValue: os.Getenv("CROWDSEC_PLUGIN_KEY"),
|
MagicCookieValue: os.Getenv("CROWDSEC_PLUGIN_KEY"),
|
||||||
|
|
|
@ -10,11 +10,11 @@ import (
|
||||||
"os"
|
"os"
|
||||||
"strings"
|
"strings"
|
||||||
|
|
||||||
"github.com/crowdsecurity/crowdsec/pkg/protobufs"
|
|
||||||
"github.com/hashicorp/go-hclog"
|
"github.com/hashicorp/go-hclog"
|
||||||
plugin "github.com/hashicorp/go-plugin"
|
plugin "github.com/hashicorp/go-plugin"
|
||||||
|
"gopkg.in/yaml.v3"
|
||||||
|
|
||||||
"gopkg.in/yaml.v2"
|
"github.com/crowdsecurity/crowdsec/pkg/protobufs"
|
||||||
)
|
)
|
||||||
|
|
||||||
var logger hclog.Logger = hclog.New(&hclog.LoggerOptions{
|
var logger hclog.Logger = hclog.New(&hclog.LoggerOptions{
|
||||||
|
@ -44,6 +44,7 @@ func (s *Splunk) Notify(ctx context.Context, notification *protobufs.Notificatio
|
||||||
if _, ok := s.PluginConfigByName[notification.Name]; !ok {
|
if _, ok := s.PluginConfigByName[notification.Name]; !ok {
|
||||||
return &protobufs.Empty{}, fmt.Errorf("splunk invalid config name %s", notification.Name)
|
return &protobufs.Empty{}, fmt.Errorf("splunk invalid config name %s", notification.Name)
|
||||||
}
|
}
|
||||||
|
|
||||||
cfg := s.PluginConfigByName[notification.Name]
|
cfg := s.PluginConfigByName[notification.Name]
|
||||||
|
|
||||||
if cfg.LogLevel != nil && *cfg.LogLevel != "" {
|
if cfg.LogLevel != nil && *cfg.LogLevel != "" {
|
||||||
|
@ -53,6 +54,7 @@ func (s *Splunk) Notify(ctx context.Context, notification *protobufs.Notificatio
|
||||||
logger.Info(fmt.Sprintf("received notify signal for %s config", notification.Name))
|
logger.Info(fmt.Sprintf("received notify signal for %s config", notification.Name))
|
||||||
|
|
||||||
p := Payload{Event: notification.Text}
|
p := Payload{Event: notification.Text}
|
||||||
|
|
||||||
data, err := json.Marshal(p)
|
data, err := json.Marshal(p)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return &protobufs.Empty{}, err
|
return &protobufs.Empty{}, err
|
||||||
|
@ -65,6 +67,7 @@ func (s *Splunk) Notify(ctx context.Context, notification *protobufs.Notificatio
|
||||||
|
|
||||||
req.Header.Add("Authorization", fmt.Sprintf("Splunk %s", cfg.Token))
|
req.Header.Add("Authorization", fmt.Sprintf("Splunk %s", cfg.Token))
|
||||||
logger.Debug(fmt.Sprintf("posting event %s to %s", string(data), req.URL))
|
logger.Debug(fmt.Sprintf("posting event %s to %s", string(data), req.URL))
|
||||||
|
|
||||||
resp, err := s.Client.Do(req.WithContext(ctx))
|
resp, err := s.Client.Do(req.WithContext(ctx))
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return &protobufs.Empty{}, err
|
return &protobufs.Empty{}, err
|
||||||
|
@ -73,15 +76,19 @@ func (s *Splunk) Notify(ctx context.Context, notification *protobufs.Notificatio
|
||||||
if resp.StatusCode != http.StatusOK {
|
if resp.StatusCode != http.StatusOK {
|
||||||
content, err := io.ReadAll(resp.Body)
|
content, err := io.ReadAll(resp.Body)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return &protobufs.Empty{}, fmt.Errorf("got non 200 response and failed to read error %s", err)
|
return &protobufs.Empty{}, fmt.Errorf("got non 200 response and failed to read error %w", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
return &protobufs.Empty{}, fmt.Errorf("got non 200 response %s", string(content))
|
return &protobufs.Empty{}, fmt.Errorf("got non 200 response %s", string(content))
|
||||||
}
|
}
|
||||||
|
|
||||||
respData, err := io.ReadAll(resp.Body)
|
respData, err := io.ReadAll(resp.Body)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return &protobufs.Empty{}, fmt.Errorf("failed to read response body got error %s", err)
|
return &protobufs.Empty{}, fmt.Errorf("failed to read response body got error %w", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
logger.Debug(fmt.Sprintf("got response %s", string(respData)))
|
logger.Debug(fmt.Sprintf("got response %s", string(respData)))
|
||||||
|
|
||||||
return &protobufs.Empty{}, nil
|
return &protobufs.Empty{}, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -90,11 +97,12 @@ func (s *Splunk) Configure(ctx context.Context, config *protobufs.Config) (*prot
|
||||||
err := yaml.Unmarshal(config.Config, &d)
|
err := yaml.Unmarshal(config.Config, &d)
|
||||||
s.PluginConfigByName[d.Name] = d
|
s.PluginConfigByName[d.Name] = d
|
||||||
logger.Debug(fmt.Sprintf("Splunk plugin '%s' use URL '%s'", d.Name, d.URL))
|
logger.Debug(fmt.Sprintf("Splunk plugin '%s' use URL '%s'", d.Name, d.URL))
|
||||||
|
|
||||||
return &protobufs.Empty{}, err
|
return &protobufs.Empty{}, err
|
||||||
}
|
}
|
||||||
|
|
||||||
func main() {
|
func main() {
|
||||||
var handshake = plugin.HandshakeConfig{
|
handshake := plugin.HandshakeConfig{
|
||||||
ProtocolVersion: 1,
|
ProtocolVersion: 1,
|
||||||
MagicCookieKey: "CROWDSEC_PLUGIN_KEY",
|
MagicCookieKey: "CROWDSEC_PLUGIN_KEY",
|
||||||
MagicCookieValue: os.Getenv("CROWDSEC_PLUGIN_KEY"),
|
MagicCookieValue: os.Getenv("CROWDSEC_PLUGIN_KEY"),
|
||||||
|
|
4
go.mod
4
go.mod
|
@ -1,6 +1,6 @@
|
||||||
module github.com/crowdsecurity/crowdsec
|
module github.com/crowdsecurity/crowdsec
|
||||||
|
|
||||||
go 1.21
|
go 1.22
|
||||||
|
|
||||||
// Don't use the toolchain directive to avoid uncontrolled downloads during
|
// Don't use the toolchain directive to avoid uncontrolled downloads during
|
||||||
// a build, especially in sandboxed environments (freebsd, gentoo...).
|
// a build, especially in sandboxed environments (freebsd, gentoo...).
|
||||||
|
@ -27,7 +27,7 @@ require (
|
||||||
github.com/corazawaf/libinjection-go v0.1.2
|
github.com/corazawaf/libinjection-go v0.1.2
|
||||||
github.com/crowdsecurity/coraza/v3 v3.0.0-20240108124027-a62b8d8e5607
|
github.com/crowdsecurity/coraza/v3 v3.0.0-20240108124027-a62b8d8e5607
|
||||||
github.com/crowdsecurity/dlog v0.0.0-20170105205344-4fb5f8204f26
|
github.com/crowdsecurity/dlog v0.0.0-20170105205344-4fb5f8204f26
|
||||||
github.com/crowdsecurity/go-cs-lib v0.0.6
|
github.com/crowdsecurity/go-cs-lib v0.0.10
|
||||||
github.com/crowdsecurity/grokky v0.2.1
|
github.com/crowdsecurity/grokky v0.2.1
|
||||||
github.com/crowdsecurity/machineid v1.0.2
|
github.com/crowdsecurity/machineid v1.0.2
|
||||||
github.com/davecgh/go-spew v1.1.1
|
github.com/davecgh/go-spew v1.1.1
|
||||||
|
|
4
go.sum
4
go.sum
|
@ -102,8 +102,8 @@ github.com/crowdsecurity/coraza/v3 v3.0.0-20240108124027-a62b8d8e5607 h1:hyrYw3h
|
||||||
github.com/crowdsecurity/coraza/v3 v3.0.0-20240108124027-a62b8d8e5607/go.mod h1:br36fEqurGYZQGit+iDYsIzW0FF6VufMbDzyyLxEuPA=
|
github.com/crowdsecurity/coraza/v3 v3.0.0-20240108124027-a62b8d8e5607/go.mod h1:br36fEqurGYZQGit+iDYsIzW0FF6VufMbDzyyLxEuPA=
|
||||||
github.com/crowdsecurity/dlog v0.0.0-20170105205344-4fb5f8204f26 h1:r97WNVC30Uen+7WnLs4xDScS/Ex988+id2k6mDf8psU=
|
github.com/crowdsecurity/dlog v0.0.0-20170105205344-4fb5f8204f26 h1:r97WNVC30Uen+7WnLs4xDScS/Ex988+id2k6mDf8psU=
|
||||||
github.com/crowdsecurity/dlog v0.0.0-20170105205344-4fb5f8204f26/go.mod h1:zpv7r+7KXwgVUZnUNjyP22zc/D7LKjyoY02weH2RBbk=
|
github.com/crowdsecurity/dlog v0.0.0-20170105205344-4fb5f8204f26/go.mod h1:zpv7r+7KXwgVUZnUNjyP22zc/D7LKjyoY02weH2RBbk=
|
||||||
github.com/crowdsecurity/go-cs-lib v0.0.6 h1:Ef6MylXe0GaJE9vrfvxEdbHb31+JUP1os+murPz7Pos=
|
github.com/crowdsecurity/go-cs-lib v0.0.10 h1:Twt/y/rYCUspGY1zxDnGurL2svRSREAz+2+puLepd9c=
|
||||||
github.com/crowdsecurity/go-cs-lib v0.0.6/go.mod h1:8FMKNGsh3hMZi2SEv6P15PURhEJnZV431XjzzBSuf0k=
|
github.com/crowdsecurity/go-cs-lib v0.0.10/go.mod h1:8FMKNGsh3hMZi2SEv6P15PURhEJnZV431XjzzBSuf0k=
|
||||||
github.com/crowdsecurity/grokky v0.2.1 h1:t4VYnDlAd0RjDM2SlILalbwfCrQxtJSMGdQOR0zwkE4=
|
github.com/crowdsecurity/grokky v0.2.1 h1:t4VYnDlAd0RjDM2SlILalbwfCrQxtJSMGdQOR0zwkE4=
|
||||||
github.com/crowdsecurity/grokky v0.2.1/go.mod h1:33usDIYzGDsgX1kHAThCbseso6JuWNJXOzRQDGXHtWM=
|
github.com/crowdsecurity/grokky v0.2.1/go.mod h1:33usDIYzGDsgX1kHAThCbseso6JuWNJXOzRQDGXHtWM=
|
||||||
github.com/crowdsecurity/machineid v1.0.2 h1:wpkpsUghJF8Khtmn/tg6GxgdhLA1Xflerh5lirI+bdc=
|
github.com/crowdsecurity/machineid v1.0.2 h1:wpkpsUghJF8Khtmn/tg6GxgdhLA1Xflerh5lirI+bdc=
|
||||||
|
|
|
@ -104,7 +104,7 @@ func LoadConsoleContext(c *csconfig.Config, hub *cwhub.Hub) error {
|
||||||
c.Crowdsec.ContextToSend = make(map[string][]string, 0)
|
c.Crowdsec.ContextToSend = make(map[string][]string, 0)
|
||||||
|
|
||||||
if hub != nil {
|
if hub != nil {
|
||||||
items, err := hub.GetInstalledItems(cwhub.CONTEXTS)
|
items, err := hub.GetInstalledItemsByType(cwhub.CONTEXTS)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
|
@ -39,7 +39,6 @@ type ApiClient struct {
|
||||||
Metrics *MetricsService
|
Metrics *MetricsService
|
||||||
Signal *SignalService
|
Signal *SignalService
|
||||||
HeartBeat *HeartBeatService
|
HeartBeat *HeartBeatService
|
||||||
UsageMetrics *UsageMetricsService
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func (a *ApiClient) GetClient() *http.Client {
|
func (a *ApiClient) GetClient() *http.Client {
|
||||||
|
@ -102,7 +101,6 @@ func NewClient(config *Config) (*ApiClient, error) {
|
||||||
c.Signal = (*SignalService)(&c.common)
|
c.Signal = (*SignalService)(&c.common)
|
||||||
c.DecisionDelete = (*DecisionDeleteService)(&c.common)
|
c.DecisionDelete = (*DecisionDeleteService)(&c.common)
|
||||||
c.HeartBeat = (*HeartBeatService)(&c.common)
|
c.HeartBeat = (*HeartBeatService)(&c.common)
|
||||||
c.UsageMetrics = (*UsageMetricsService)(&c.common)
|
|
||||||
|
|
||||||
return c, nil
|
return c, nil
|
||||||
}
|
}
|
||||||
|
@ -139,7 +137,6 @@ func NewDefaultClient(URL *url.URL, prefix string, userAgent string, client *htt
|
||||||
c.Signal = (*SignalService)(&c.common)
|
c.Signal = (*SignalService)(&c.common)
|
||||||
c.DecisionDelete = (*DecisionDeleteService)(&c.common)
|
c.DecisionDelete = (*DecisionDeleteService)(&c.common)
|
||||||
c.HeartBeat = (*HeartBeatService)(&c.common)
|
c.HeartBeat = (*HeartBeatService)(&c.common)
|
||||||
c.UsageMetrics = (*UsageMetricsService)(&c.common)
|
|
||||||
|
|
||||||
return c, nil
|
return c, nil
|
||||||
}
|
}
|
||||||
|
|
|
@ -34,18 +34,12 @@ func CheckResponse(r *http.Response) error {
|
||||||
|
|
||||||
data, err := io.ReadAll(r.Body)
|
data, err := io.ReadAll(r.Body)
|
||||||
if err != nil || len(data) == 0 {
|
if err != nil || len(data) == 0 {
|
||||||
ret.Message = ptr.Of(fmt.Sprintf("http code %d, no response body", r.StatusCode))
|
ret.Message = ptr.Of(fmt.Sprintf("http code %d, no error message", r.StatusCode))
|
||||||
return ret
|
return ret
|
||||||
}
|
}
|
||||||
|
|
||||||
switch r.StatusCode {
|
if err := json.Unmarshal(data, ret); err != nil {
|
||||||
case 422:
|
return fmt.Errorf("http code %d, invalid body: %w", r.StatusCode, err)
|
||||||
ret.Message = ptr.Of(fmt.Sprintf("http code %d, invalid request: %s", r.StatusCode, string(data)))
|
|
||||||
default:
|
|
||||||
if err := json.Unmarshal(data, ret); err != nil {
|
|
||||||
ret.Message = ptr.Of(fmt.Sprintf("http code %d, invalid body: %s", r.StatusCode, string(data)))
|
|
||||||
return ret
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
return ret
|
return ret
|
||||||
|
|
|
@ -1,28 +0,0 @@
|
||||||
package apiclient
|
|
||||||
|
|
||||||
import (
|
|
||||||
"context"
|
|
||||||
"fmt"
|
|
||||||
"net/http"
|
|
||||||
|
|
||||||
"github.com/crowdsecurity/crowdsec/pkg/models"
|
|
||||||
)
|
|
||||||
|
|
||||||
type UsageMetricsService service
|
|
||||||
|
|
||||||
func (s *UsageMetricsService) Add(ctx context.Context, metrics *models.AllMetrics) (interface{}, *Response, error) {
|
|
||||||
u := fmt.Sprintf("%s/usage-metrics", s.client.URLPrefix)
|
|
||||||
|
|
||||||
req, err := s.client.NewRequest(http.MethodPost, u, &metrics)
|
|
||||||
if err != nil {
|
|
||||||
return nil, nil, err
|
|
||||||
}
|
|
||||||
var response interface{}
|
|
||||||
|
|
||||||
resp, err := s.client.Do(ctx, req, &response)
|
|
||||||
if err != nil {
|
|
||||||
return nil, resp, err
|
|
||||||
}
|
|
||||||
|
|
||||||
return &response, resp, nil
|
|
||||||
}
|
|
|
@ -2,170 +2,18 @@ package apiserver
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
"encoding/json"
|
|
||||||
"strings"
|
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
"slices"
|
|
||||||
|
|
||||||
log "github.com/sirupsen/logrus"
|
log "github.com/sirupsen/logrus"
|
||||||
|
"slices"
|
||||||
|
|
||||||
"github.com/crowdsecurity/go-cs-lib/ptr"
|
"github.com/crowdsecurity/go-cs-lib/ptr"
|
||||||
"github.com/crowdsecurity/go-cs-lib/trace"
|
"github.com/crowdsecurity/go-cs-lib/trace"
|
||||||
"github.com/crowdsecurity/go-cs-lib/version"
|
"github.com/crowdsecurity/go-cs-lib/version"
|
||||||
|
|
||||||
"github.com/crowdsecurity/crowdsec/pkg/cwversion"
|
|
||||||
"github.com/crowdsecurity/crowdsec/pkg/database/ent"
|
|
||||||
"github.com/crowdsecurity/crowdsec/pkg/fflag"
|
|
||||||
"github.com/crowdsecurity/crowdsec/pkg/models"
|
"github.com/crowdsecurity/crowdsec/pkg/models"
|
||||||
)
|
)
|
||||||
|
|
||||||
func (a *apic) GetUsageMetrics() (*models.AllMetrics, []int, error) {
|
|
||||||
lpsMetrics, err := a.dbClient.GetLPsUsageMetrics()
|
|
||||||
metricsIds := make([]int, 0)
|
|
||||||
|
|
||||||
if err != nil {
|
|
||||||
return nil, nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
//spew.Dump(lpsMetrics)
|
|
||||||
|
|
||||||
bouncersMetrics, err := a.dbClient.GetBouncersUsageMetrics()
|
|
||||||
if err != nil {
|
|
||||||
return nil, nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
//spew.Dump(bouncersMetrics)
|
|
||||||
|
|
||||||
allMetrics := &models.AllMetrics{}
|
|
||||||
|
|
||||||
lpsCache := make(map[string]*ent.Machine)
|
|
||||||
bouncersCache := make(map[string]*ent.Bouncer)
|
|
||||||
|
|
||||||
for _, lpsMetric := range lpsMetrics {
|
|
||||||
lpName := lpsMetric.GeneratedBy
|
|
||||||
metrics := models.LogProcessorsMetrics{}
|
|
||||||
|
|
||||||
err := json.Unmarshal([]byte(lpsMetric.Payload), &metrics)
|
|
||||||
if err != nil {
|
|
||||||
log.Errorf("unable to unmarshal LPs metrics (%s)", err)
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
|
|
||||||
var lp *ent.Machine
|
|
||||||
|
|
||||||
if _, ok := lpsCache[lpName]; !ok {
|
|
||||||
lp, err = a.dbClient.QueryMachineByID(lpName)
|
|
||||||
|
|
||||||
if err != nil {
|
|
||||||
log.Errorf("unable to get LP information for %s: %s", lpName, err)
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
lp = lpsCache[lpName]
|
|
||||||
}
|
|
||||||
|
|
||||||
if lp.Hubstate != nil {
|
|
||||||
metrics.HubItems = *lp.Hubstate
|
|
||||||
}
|
|
||||||
|
|
||||||
metrics.Os = &models.OSversion{
|
|
||||||
Name: lp.Osname,
|
|
||||||
Version: lp.Osversion,
|
|
||||||
}
|
|
||||||
|
|
||||||
metrics.FeatureFlags = strings.Split(lp.Featureflags, ",")
|
|
||||||
metrics.Version = &lp.Version
|
|
||||||
|
|
||||||
metrics.Name = lpName
|
|
||||||
metrics.LastPush = lp.LastPush.UTC().Unix()
|
|
||||||
metrics.LastUpdate = lp.UpdatedAt.UTC().Unix()
|
|
||||||
|
|
||||||
//To prevent marshalling a nil slice to null, which gets rejected by the API
|
|
||||||
if metrics.Metrics == nil {
|
|
||||||
metrics.Metrics = make([]*models.MetricsDetailItem, 0)
|
|
||||||
}
|
|
||||||
|
|
||||||
allMetrics.LogProcessors = append(allMetrics.LogProcessors, &metrics)
|
|
||||||
metricsIds = append(metricsIds, lpsMetric.ID)
|
|
||||||
}
|
|
||||||
|
|
||||||
for _, bouncersMetric := range bouncersMetrics {
|
|
||||||
bouncerName := bouncersMetric.GeneratedBy
|
|
||||||
metrics := models.RemediationComponentsMetrics{}
|
|
||||||
|
|
||||||
err := json.Unmarshal([]byte(bouncersMetric.Payload), &metrics)
|
|
||||||
if err != nil {
|
|
||||||
log.Errorf("unable to unmarshal bouncers metrics (%s)", err)
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
|
|
||||||
var bouncer *ent.Bouncer
|
|
||||||
|
|
||||||
if _, ok := bouncersCache[bouncerName]; !ok {
|
|
||||||
bouncer, err = a.dbClient.SelectBouncerByName(bouncerName)
|
|
||||||
if err != nil {
|
|
||||||
log.Errorf("unable to get bouncer information for %s: %s", bouncerName, err)
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
bouncer = bouncersCache[bouncerName]
|
|
||||||
}
|
|
||||||
|
|
||||||
metrics.Os = &models.OSversion{
|
|
||||||
Name: bouncer.Osname,
|
|
||||||
Version: bouncer.Osversion,
|
|
||||||
}
|
|
||||||
metrics.Type = bouncer.Type
|
|
||||||
metrics.FeatureFlags = strings.Split(bouncer.Featureflags, ",")
|
|
||||||
metrics.Version = &bouncer.Version
|
|
||||||
metrics.Name = bouncerName
|
|
||||||
metrics.LastPull = bouncer.LastPull.UTC().Unix()
|
|
||||||
|
|
||||||
//To prevent marshalling a nil slice to null, which gets rejected by the API
|
|
||||||
if metrics.Metrics == nil {
|
|
||||||
metrics.Metrics = make([]*models.MetricsDetailItem, 0)
|
|
||||||
}
|
|
||||||
|
|
||||||
allMetrics.RemediationComponents = append(allMetrics.RemediationComponents, &metrics)
|
|
||||||
metricsIds = append(metricsIds, bouncersMetric.ID)
|
|
||||||
}
|
|
||||||
|
|
||||||
//FIXME: all of this should only be done once on startup/reload
|
|
||||||
allMetrics.Lapi = &models.LapiMetrics{
|
|
||||||
ConsoleOptions: models.ConsoleOptions{
|
|
||||||
"FIXME",
|
|
||||||
},
|
|
||||||
}
|
|
||||||
allMetrics.Lapi.Os = &models.OSversion{
|
|
||||||
Name: "FIXME",
|
|
||||||
Version: "FIXME",
|
|
||||||
}
|
|
||||||
allMetrics.Lapi.Version = ptr.Of(cwversion.VersionStr())
|
|
||||||
allMetrics.Lapi.FeatureFlags = fflag.Crowdsec.GetEnabledFeatures()
|
|
||||||
|
|
||||||
allMetrics.Lapi.Meta = &models.MetricsMeta{
|
|
||||||
UtcStartupTimestamp: time.Now().UTC().Unix(),
|
|
||||||
UtcNowTimestamp: time.Now().UTC().Unix(),
|
|
||||||
WindowSizeSeconds: int64(a.metricsInterval.Seconds()),
|
|
||||||
}
|
|
||||||
allMetrics.Lapi.Metrics = make([]*models.MetricsDetailItem, 0)
|
|
||||||
|
|
||||||
if allMetrics.RemediationComponents == nil {
|
|
||||||
allMetrics.RemediationComponents = make([]*models.RemediationComponentsMetrics, 0)
|
|
||||||
}
|
|
||||||
|
|
||||||
if allMetrics.LogProcessors == nil {
|
|
||||||
allMetrics.LogProcessors = make([]*models.LogProcessorsMetrics, 0)
|
|
||||||
}
|
|
||||||
|
|
||||||
return allMetrics, metricsIds, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (a *apic) MarkUsageMetricsAsSent(ids []int) error {
|
|
||||||
return a.dbClient.MarkUsageMetricsAsSent(ids)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (a *apic) GetMetrics() (*models.Metrics, error) {
|
func (a *apic) GetMetrics() (*models.Metrics, error) {
|
||||||
machines, err := a.dbClient.ListMachines()
|
machines, err := a.dbClient.ListMachines()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
@ -312,37 +160,3 @@ func (a *apic) SendMetrics(stop chan (bool)) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (a *apic) SendUsageMetrics() {
|
|
||||||
defer trace.CatchPanic("lapi/usageMetricsToAPIC")
|
|
||||||
|
|
||||||
ticker := time.NewTicker(5 * time.Second)
|
|
||||||
|
|
||||||
for {
|
|
||||||
select {
|
|
||||||
case <-a.metricsTomb.Dying():
|
|
||||||
//The normal metrics routine also kills push/pull tombs, does that make sense ?
|
|
||||||
ticker.Stop()
|
|
||||||
return
|
|
||||||
case <-ticker.C:
|
|
||||||
metrics, metricsId, err := a.GetUsageMetrics()
|
|
||||||
if err != nil {
|
|
||||||
log.Errorf("unable to get usage metrics: %s", err)
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
_, _, err = a.apiClient.UsageMetrics.Add(context.Background(), metrics)
|
|
||||||
|
|
||||||
if err != nil {
|
|
||||||
log.Errorf("unable to send usage metrics: %s", err)
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
err = a.MarkUsageMetricsAsSent(metricsId)
|
|
||||||
if err != nil {
|
|
||||||
log.Errorf("unable to mark usage metrics as sent: %s", err)
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
log.Infof("Usage metrics sent")
|
|
||||||
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
|
@ -25,7 +25,6 @@ import (
|
||||||
"github.com/crowdsecurity/crowdsec/pkg/csconfig"
|
"github.com/crowdsecurity/crowdsec/pkg/csconfig"
|
||||||
"github.com/crowdsecurity/crowdsec/pkg/csplugin"
|
"github.com/crowdsecurity/crowdsec/pkg/csplugin"
|
||||||
"github.com/crowdsecurity/crowdsec/pkg/database"
|
"github.com/crowdsecurity/crowdsec/pkg/database"
|
||||||
"github.com/crowdsecurity/crowdsec/pkg/fflag"
|
|
||||||
"github.com/crowdsecurity/crowdsec/pkg/types"
|
"github.com/crowdsecurity/crowdsec/pkg/types"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -85,11 +84,16 @@ func recoverFromPanic(c *gin.Context) {
|
||||||
}
|
}
|
||||||
|
|
||||||
if brokenPipe {
|
if brokenPipe {
|
||||||
log.Warningf("client %s disconnected : %s", c.ClientIP(), err)
|
log.Warningf("client %s disconnected: %s", c.ClientIP(), err)
|
||||||
c.Abort()
|
c.Abort()
|
||||||
} else {
|
} else {
|
||||||
filename := trace.WriteStackTrace(err)
|
log.Warningf("client %s error: %s", c.ClientIP(), err)
|
||||||
log.Warningf("client %s error : %s", c.ClientIP(), err)
|
|
||||||
|
filename, err := trace.WriteStackTrace(err)
|
||||||
|
if err != nil {
|
||||||
|
log.Errorf("also while writing stacktrace: %s", err)
|
||||||
|
}
|
||||||
|
|
||||||
log.Warningf("stacktrace written to %s, please join to your issue", filename)
|
log.Warningf("stacktrace written to %s, please join to your issue", filename)
|
||||||
c.AbortWithStatus(http.StatusInternalServerError)
|
c.AbortWithStatus(http.StatusInternalServerError)
|
||||||
}
|
}
|
||||||
|
@ -361,15 +365,6 @@ func (s *APIServer) Run(apiReady chan bool) error {
|
||||||
s.apic.SendMetrics(make(chan bool))
|
s.apic.SendMetrics(make(chan bool))
|
||||||
return nil
|
return nil
|
||||||
})
|
})
|
||||||
|
|
||||||
if fflag.CAPIUsageMetrics.IsEnabled() {
|
|
||||||
log.Infof("CAPI_USAGE_METRICS flag is enabled, starting usage metrics routine")
|
|
||||||
s.apic.metricsTomb.Go(func() error {
|
|
||||||
s.apic.SendUsageMetrics()
|
|
||||||
return nil
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
s.httpServerTomb.Go(func() error {
|
s.httpServerTomb.Go(func() error {
|
||||||
|
@ -378,7 +373,7 @@ func (s *APIServer) Run(apiReady chan bool) error {
|
||||||
|
|
||||||
if err := s.httpServerTomb.Wait(); err != nil {
|
if err := s.httpServerTomb.Wait(); err != nil {
|
||||||
return fmt.Errorf("local API server stopped with error: %w", err)
|
return fmt.Errorf("local API server stopped with error: %w", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
|
@ -59,17 +59,6 @@ func serveHealth() http.HandlerFunc {
|
||||||
return health.NewHandler(checker)
|
return health.NewHandler(checker)
|
||||||
}
|
}
|
||||||
|
|
||||||
func eitherAuthMiddleware(jwtMiddleware gin.HandlerFunc, apiKeyMiddleware gin.HandlerFunc) gin.HandlerFunc {
|
|
||||||
return func(c *gin.Context) {
|
|
||||||
// XXX: what when there's no api key for a RC?
|
|
||||||
if c.GetHeader("X-Api-Key") != "" {
|
|
||||||
apiKeyMiddleware(c)
|
|
||||||
} else {
|
|
||||||
jwtMiddleware(c)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (c *Controller) NewV1() error {
|
func (c *Controller) NewV1() error {
|
||||||
var err error
|
var err error
|
||||||
|
|
||||||
|
@ -128,12 +117,6 @@ func (c *Controller) NewV1() error {
|
||||||
apiKeyAuth.HEAD("/decisions/stream", c.HandlerV1.StreamDecision)
|
apiKeyAuth.HEAD("/decisions/stream", c.HandlerV1.StreamDecision)
|
||||||
}
|
}
|
||||||
|
|
||||||
eitherAuth := groupV1.Group("")
|
|
||||||
eitherAuth.Use(eitherAuthMiddleware(c.HandlerV1.Middlewares.JWT.Middleware.MiddlewareFunc(), c.HandlerV1.Middlewares.APIKey.MiddlewareFunc()))
|
|
||||||
{
|
|
||||||
eitherAuth.POST("/usage-metrics", c.HandlerV1.UsageMetrics)
|
|
||||||
}
|
|
||||||
|
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -1,144 +0,0 @@
|
||||||
package v1
|
|
||||||
|
|
||||||
import (
|
|
||||||
"encoding/json"
|
|
||||||
"fmt"
|
|
||||||
"net/http"
|
|
||||||
"time"
|
|
||||||
|
|
||||||
"github.com/gin-gonic/gin"
|
|
||||||
"github.com/go-openapi/strfmt"
|
|
||||||
log "github.com/sirupsen/logrus"
|
|
||||||
|
|
||||||
"github.com/crowdsecurity/crowdsec/pkg/database/ent"
|
|
||||||
"github.com/crowdsecurity/crowdsec/pkg/database/ent/metric"
|
|
||||||
"github.com/crowdsecurity/crowdsec/pkg/models"
|
|
||||||
)
|
|
||||||
|
|
||||||
// updateBaseMetrics updates the base metrics for a machine or bouncer
|
|
||||||
func (c *Controller) updateBaseMetrics(machineID string, bouncer *ent.Bouncer, baseMetrics *models.BaseMetrics, hubItems *models.HubItems) error {
|
|
||||||
switch {
|
|
||||||
case machineID != "":
|
|
||||||
c.DBClient.MachineUpdateBaseMetrics(machineID, baseMetrics, hubItems)
|
|
||||||
case bouncer != nil:
|
|
||||||
c.DBClient.BouncerUpdateBaseMetrics(bouncer.Name, bouncer.Type, baseMetrics)
|
|
||||||
default:
|
|
||||||
return fmt.Errorf("no machineID or bouncerName set")
|
|
||||||
}
|
|
||||||
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// UsageMetrics receives metrics from log processors and remediation components
|
|
||||||
func (c *Controller) UsageMetrics(gctx *gin.Context) {
|
|
||||||
var input models.AllMetrics
|
|
||||||
|
|
||||||
// parse the payload
|
|
||||||
|
|
||||||
if err := gctx.ShouldBindJSON(&input); err != nil {
|
|
||||||
log.Errorf("Failed to bind json: %s", err)
|
|
||||||
gctx.JSON(http.StatusBadRequest, gin.H{"message": err.Error()})
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
if err := input.Validate(strfmt.Default); err != nil {
|
|
||||||
log.Errorf("Failed to validate usage metrics: %s", err)
|
|
||||||
c.HandleDBErrors(gctx, err)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
// TODO: validate payload with the right type, depending on auth context
|
|
||||||
|
|
||||||
var (
|
|
||||||
generatedType metric.GeneratedType
|
|
||||||
generatedBy string
|
|
||||||
collectedAt time.Time
|
|
||||||
)
|
|
||||||
|
|
||||||
bouncer, _ := getBouncerFromContext(gctx)
|
|
||||||
if bouncer != nil {
|
|
||||||
log.Tracef("Received usage metris for bouncer: %s", bouncer.Name)
|
|
||||||
generatedType = metric.GeneratedTypeRC
|
|
||||||
generatedBy = bouncer.Name
|
|
||||||
}
|
|
||||||
|
|
||||||
machineID, _ := getMachineIDFromContext(gctx)
|
|
||||||
if machineID != "" {
|
|
||||||
log.Tracef("Received usage metrics for log processor: %s", machineID)
|
|
||||||
generatedType = metric.GeneratedTypeLP
|
|
||||||
generatedBy = machineID
|
|
||||||
}
|
|
||||||
|
|
||||||
// TODO: if both or none are set, which error should we return?
|
|
||||||
|
|
||||||
var (
|
|
||||||
payload map[string]any
|
|
||||||
baseMetrics models.BaseMetrics
|
|
||||||
hubItems models.HubItems
|
|
||||||
)
|
|
||||||
|
|
||||||
switch len(input.LogProcessors) {
|
|
||||||
case 0:
|
|
||||||
break
|
|
||||||
case 1:
|
|
||||||
// the final slice can't have more than one item,
|
|
||||||
// guaranteed by the swagger schema
|
|
||||||
item0 := input.LogProcessors[0]
|
|
||||||
payload = map[string]any{
|
|
||||||
"console_options": item0.ConsoleOptions,
|
|
||||||
"datasources": item0.Datasources,
|
|
||||||
"metrics": item0.Metrics,
|
|
||||||
"meta": item0.Meta,
|
|
||||||
}
|
|
||||||
baseMetrics = item0.BaseMetrics
|
|
||||||
hubItems = item0.HubItems
|
|
||||||
default:
|
|
||||||
log.Errorf("Payload has more than one log processor")
|
|
||||||
// this is not checked in the swagger schema
|
|
||||||
gctx.JSON(http.StatusBadRequest, gin.H{"message": "Payload has more than one log processor"})
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
switch len(input.RemediationComponents) {
|
|
||||||
case 0:
|
|
||||||
break
|
|
||||||
case 1:
|
|
||||||
item0 := input.RemediationComponents[0]
|
|
||||||
payload = map[string]any{
|
|
||||||
"type": item0.Type,
|
|
||||||
"metrics": item0.Metrics,
|
|
||||||
"meta": item0.Meta,
|
|
||||||
}
|
|
||||||
baseMetrics = item0.BaseMetrics
|
|
||||||
default:
|
|
||||||
gctx.JSON(http.StatusBadRequest, gin.H{"message": "Payload has more than one remediation component"})
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
err := c.updateBaseMetrics(machineID, bouncer, &baseMetrics, &hubItems)
|
|
||||||
if err != nil {
|
|
||||||
log.Errorf("Failed to update base metrics: %s", err)
|
|
||||||
c.HandleDBErrors(gctx, err)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
collectedAt = time.Unix(baseMetrics.Meta.UtcNowTimestamp, 0).UTC()
|
|
||||||
|
|
||||||
jsonPayload, err := json.Marshal(payload)
|
|
||||||
if err != nil {
|
|
||||||
log.Errorf("Failed to marshal usage metrics: %s", err)
|
|
||||||
c.HandleDBErrors(gctx, err)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
if _, err := c.DBClient.CreateMetric(generatedType, generatedBy, collectedAt, string(jsonPayload)); err != nil {
|
|
||||||
log.Error(err)
|
|
||||||
c.HandleDBErrors(gctx, err)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
// if CreateMetrics() returned nil, the metric was already there, we're good
|
|
||||||
// and don't split hair about 201 vs 200/204
|
|
||||||
|
|
||||||
gctx.Status(http.StatusCreated)
|
|
||||||
}
|
|
|
@ -4,7 +4,6 @@ import (
|
||||||
"fmt"
|
"fmt"
|
||||||
"os"
|
"os"
|
||||||
"path/filepath"
|
"path/filepath"
|
||||||
"time"
|
|
||||||
|
|
||||||
log "github.com/sirupsen/logrus"
|
log "github.com/sirupsen/logrus"
|
||||||
"gopkg.in/yaml.v3"
|
"gopkg.in/yaml.v3"
|
||||||
|
@ -12,11 +11,6 @@ import (
|
||||||
"github.com/crowdsecurity/go-cs-lib/ptr"
|
"github.com/crowdsecurity/go-cs-lib/ptr"
|
||||||
)
|
)
|
||||||
|
|
||||||
const (
|
|
||||||
defaultMetricsInterval = 30 * time.Minute
|
|
||||||
minimumMetricsInterval = 15 * time.Minute
|
|
||||||
)
|
|
||||||
|
|
||||||
// CrowdsecServiceCfg contains the location of parsers/scenarios/... and acquisition files
|
// CrowdsecServiceCfg contains the location of parsers/scenarios/... and acquisition files
|
||||||
type CrowdsecServiceCfg struct {
|
type CrowdsecServiceCfg struct {
|
||||||
Enable *bool `yaml:"enable"`
|
Enable *bool `yaml:"enable"`
|
||||||
|
@ -32,7 +26,6 @@ type CrowdsecServiceCfg struct {
|
||||||
BucketStateFile string `yaml:"state_input_file,omitempty"` // if we need to unserialize buckets at start
|
BucketStateFile string `yaml:"state_input_file,omitempty"` // if we need to unserialize buckets at start
|
||||||
BucketStateDumpDir string `yaml:"state_output_dir,omitempty"` // if we need to unserialize buckets on shutdown
|
BucketStateDumpDir string `yaml:"state_output_dir,omitempty"` // if we need to unserialize buckets on shutdown
|
||||||
BucketsGCEnabled bool `yaml:"-"` // we need to garbage collect buckets when in forensic mode
|
BucketsGCEnabled bool `yaml:"-"` // we need to garbage collect buckets when in forensic mode
|
||||||
MetricsInterval *time.Duration `yaml:"metrics_interval,omitempty"`
|
|
||||||
|
|
||||||
SimulationFilePath string `yaml:"-"`
|
SimulationFilePath string `yaml:"-"`
|
||||||
ContextToSend map[string][]string `yaml:"-"`
|
ContextToSend map[string][]string `yaml:"-"`
|
||||||
|
@ -139,8 +132,6 @@ func (c *Config) LoadCrowdsec() error {
|
||||||
c.Crowdsec.AcquisitionFiles[i] = f
|
c.Crowdsec.AcquisitionFiles[i] = f
|
||||||
}
|
}
|
||||||
|
|
||||||
c.Crowdsec.setMetricsInterval()
|
|
||||||
|
|
||||||
if err = c.LoadAPIClient(); err != nil {
|
if err = c.LoadAPIClient(); err != nil {
|
||||||
return fmt.Errorf("loading api client: %w", err)
|
return fmt.Errorf("loading api client: %w", err)
|
||||||
}
|
}
|
||||||
|
@ -148,21 +139,6 @@ func (c *Config) LoadCrowdsec() error {
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (c *CrowdsecServiceCfg) setMetricsInterval() {
|
|
||||||
switch {
|
|
||||||
case c.MetricsInterval == nil:
|
|
||||||
log.Tracef("metrics_interval is not set, default to %s", defaultMetricsInterval)
|
|
||||||
c.MetricsInterval = ptr.Of(defaultMetricsInterval)
|
|
||||||
case *c.MetricsInterval == time.Duration(0):
|
|
||||||
log.Info("metrics_interval is set to 0, disabling metrics")
|
|
||||||
case *c.MetricsInterval < minimumMetricsInterval:
|
|
||||||
log.Warnf("metrics_interval is too low (%s), setting it to %s", *c.MetricsInterval, minimumMetricsInterval)
|
|
||||||
c.MetricsInterval = ptr.Of(minimumMetricsInterval)
|
|
||||||
default:
|
|
||||||
log.Tracef("metrics_interval set to %s", c.MetricsInterval)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (c *CrowdsecServiceCfg) DumpContextConfigFile() error {
|
func (c *CrowdsecServiceCfg) DumpContextConfigFile() error {
|
||||||
// XXX: MakeDirs
|
// XXX: MakeDirs
|
||||||
out, err := yaml.Marshal(c.ContextToSend)
|
out, err := yaml.Marshal(c.ContextToSend)
|
||||||
|
|
|
@ -58,7 +58,6 @@ func TestLoadCrowdsec(t *testing.T) {
|
||||||
ParserRoutinesCount: 1,
|
ParserRoutinesCount: 1,
|
||||||
OutputRoutinesCount: 1,
|
OutputRoutinesCount: 1,
|
||||||
ConsoleContextValueLength: 2500,
|
ConsoleContextValueLength: 2500,
|
||||||
MetricsInterval: ptr.Of(defaultMetricsInterval),
|
|
||||||
AcquisitionFiles: []string{acquisFullPath},
|
AcquisitionFiles: []string{acquisFullPath},
|
||||||
SimulationFilePath: "./testdata/simulation.yaml",
|
SimulationFilePath: "./testdata/simulation.yaml",
|
||||||
// context is loaded in pkg/alertcontext
|
// context is loaded in pkg/alertcontext
|
||||||
|
@ -99,7 +98,6 @@ func TestLoadCrowdsec(t *testing.T) {
|
||||||
ParserRoutinesCount: 1,
|
ParserRoutinesCount: 1,
|
||||||
OutputRoutinesCount: 1,
|
OutputRoutinesCount: 1,
|
||||||
ConsoleContextValueLength: 0,
|
ConsoleContextValueLength: 0,
|
||||||
MetricsInterval: ptr.Of(defaultMetricsInterval),
|
|
||||||
AcquisitionFiles: []string{acquisFullPath, acquisInDirFullPath},
|
AcquisitionFiles: []string{acquisFullPath, acquisInDirFullPath},
|
||||||
// context is loaded in pkg/alertcontext
|
// context is loaded in pkg/alertcontext
|
||||||
// ContextToSend: map[string][]string{
|
// ContextToSend: map[string][]string{
|
||||||
|
@ -138,7 +136,6 @@ func TestLoadCrowdsec(t *testing.T) {
|
||||||
ParserRoutinesCount: 1,
|
ParserRoutinesCount: 1,
|
||||||
OutputRoutinesCount: 1,
|
OutputRoutinesCount: 1,
|
||||||
ConsoleContextValueLength: 10,
|
ConsoleContextValueLength: 10,
|
||||||
MetricsInterval: ptr.Of(defaultMetricsInterval),
|
|
||||||
AcquisitionFiles: []string{},
|
AcquisitionFiles: []string{},
|
||||||
SimulationFilePath: "",
|
SimulationFilePath: "",
|
||||||
// context is loaded in pkg/alertcontext
|
// context is loaded in pkg/alertcontext
|
||||||
|
|
|
@ -39,7 +39,6 @@ type DatabaseCfg struct {
|
||||||
}
|
}
|
||||||
|
|
||||||
type AuthGCCfg struct {
|
type AuthGCCfg struct {
|
||||||
// XXX: define these as custom type (with days etc.) ?
|
|
||||||
Cert *string `yaml:"cert,omitempty"`
|
Cert *string `yaml:"cert,omitempty"`
|
||||||
CertDuration *time.Duration
|
CertDuration *time.Duration
|
||||||
Api *string `yaml:"api_key,omitempty"`
|
Api *string `yaml:"api_key,omitempty"`
|
||||||
|
@ -49,12 +48,11 @@ type AuthGCCfg struct {
|
||||||
}
|
}
|
||||||
|
|
||||||
type FlushDBCfg struct {
|
type FlushDBCfg struct {
|
||||||
MaxItems *int `yaml:"max_items,omitempty"`
|
MaxItems *int `yaml:"max_items,omitempty"`
|
||||||
// We could unmarshal as time.Duration, but alert filters right now are a map of strings
|
// We could unmarshal as time.Duration, but alert filters right now are a map of strings
|
||||||
MaxAge *string `yaml:"max_age,omitempty"`
|
MaxAge *string `yaml:"max_age,omitempty"`
|
||||||
BouncersGC *AuthGCCfg `yaml:"bouncers_autodelete,omitempty"`
|
BouncersGC *AuthGCCfg `yaml:"bouncers_autodelete,omitempty"`
|
||||||
AgentsGC *AuthGCCfg `yaml:"agents_autodelete,omitempty"`
|
AgentsGC *AuthGCCfg `yaml:"agents_autodelete,omitempty"`
|
||||||
MetricsMaxAge *time.Duration `yaml:"metrics_max_age,omitempty"`
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func (c *Config) LoadDBConfig(inCli bool) error {
|
func (c *Config) LoadDBConfig(inCli bool) error {
|
||||||
|
@ -78,26 +76,24 @@ func (c *Config) LoadDBConfig(inCli bool) error {
|
||||||
if c.DbConfig.UseWal == nil {
|
if c.DbConfig.UseWal == nil {
|
||||||
dbDir := filepath.Dir(c.DbConfig.DbPath)
|
dbDir := filepath.Dir(c.DbConfig.DbPath)
|
||||||
isNetwork, fsType, err := types.IsNetworkFS(dbDir)
|
isNetwork, fsType, err := types.IsNetworkFS(dbDir)
|
||||||
if err != nil {
|
switch {
|
||||||
|
case err != nil:
|
||||||
log.Warnf("unable to determine if database is on network filesystem: %s", err)
|
log.Warnf("unable to determine if database is on network filesystem: %s", err)
|
||||||
log.Warning("You are using sqlite without WAL, this can have a performance impact. If you do not store the database in a network share, set db_config.use_wal to true. Set explicitly to false to disable this warning.")
|
log.Warning("You are using sqlite without WAL, this can have a performance impact. If you do not store the database in a network share, set db_config.use_wal to true. Set explicitly to false to disable this warning.")
|
||||||
return nil
|
case isNetwork:
|
||||||
}
|
|
||||||
if isNetwork {
|
|
||||||
log.Debugf("database is on network filesystem (%s), setting useWal to false", fsType)
|
log.Debugf("database is on network filesystem (%s), setting useWal to false", fsType)
|
||||||
c.DbConfig.UseWal = ptr.Of(false)
|
c.DbConfig.UseWal = ptr.Of(false)
|
||||||
} else {
|
default:
|
||||||
log.Debugf("database is on local filesystem (%s), setting useWal to true", fsType)
|
log.Debugf("database is on local filesystem (%s), setting useWal to true", fsType)
|
||||||
c.DbConfig.UseWal = ptr.Of(true)
|
c.DbConfig.UseWal = ptr.Of(true)
|
||||||
}
|
}
|
||||||
} else if *c.DbConfig.UseWal {
|
} else if *c.DbConfig.UseWal {
|
||||||
dbDir := filepath.Dir(c.DbConfig.DbPath)
|
dbDir := filepath.Dir(c.DbConfig.DbPath)
|
||||||
isNetwork, fsType, err := types.IsNetworkFS(dbDir)
|
isNetwork, fsType, err := types.IsNetworkFS(dbDir)
|
||||||
if err != nil {
|
switch {
|
||||||
|
case err != nil:
|
||||||
log.Warnf("unable to determine if database is on network filesystem: %s", err)
|
log.Warnf("unable to determine if database is on network filesystem: %s", err)
|
||||||
return nil
|
case isNetwork:
|
||||||
}
|
|
||||||
if isNetwork {
|
|
||||||
log.Warnf("database seems to be stored on a network share (%s), but useWal is set to true. Proceed at your own risk.", fsType)
|
log.Warnf("database seems to be stored on a network share (%s), but useWal is set to true. Proceed at your own risk.", fsType)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -214,9 +214,9 @@ func (h *Hub) GetItemFQ(itemFQName string) (*Item, error) {
|
||||||
return i, nil
|
return i, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// GetItemNames returns a slice of (full) item names for a given type
|
// GetNamesByType returns a slice of (full) item names for a given type
|
||||||
// (eg. for collections: crowdsecurity/apache2 crowdsecurity/nginx).
|
// (eg. for collections: crowdsecurity/apache2 crowdsecurity/nginx).
|
||||||
func (h *Hub) GetItemNames(itemType string) []string {
|
func (h *Hub) GetNamesByType(itemType string) []string {
|
||||||
m := h.GetItemMap(itemType)
|
m := h.GetItemMap(itemType)
|
||||||
if m == nil {
|
if m == nil {
|
||||||
return nil
|
return nil
|
||||||
|
@ -230,8 +230,8 @@ func (h *Hub) GetItemNames(itemType string) []string {
|
||||||
return names
|
return names
|
||||||
}
|
}
|
||||||
|
|
||||||
// GetAllItems returns a slice of all the items of a given type, installed or not.
|
// GetItemsByType returns a slice of all the items of a given type, installed or not.
|
||||||
func (h *Hub) GetAllItems(itemType string) ([]*Item, error) {
|
func (h *Hub) GetItemsByType(itemType string) ([]*Item, error) {
|
||||||
if !slices.Contains(ItemTypes, itemType) {
|
if !slices.Contains(ItemTypes, itemType) {
|
||||||
return nil, fmt.Errorf("invalid item type %s", itemType)
|
return nil, fmt.Errorf("invalid item type %s", itemType)
|
||||||
}
|
}
|
||||||
|
@ -250,8 +250,8 @@ func (h *Hub) GetAllItems(itemType string) ([]*Item, error) {
|
||||||
return ret, nil
|
return ret, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// GetInstalledItems returns a slice of the installed items of a given type.
|
// GetInstalledItemsByType returns a slice of the installed items of a given type.
|
||||||
func (h *Hub) GetInstalledItems(itemType string) ([]*Item, error) {
|
func (h *Hub) GetInstalledItemsByType(itemType string) ([]*Item, error) {
|
||||||
if !slices.Contains(ItemTypes, itemType) {
|
if !slices.Contains(ItemTypes, itemType) {
|
||||||
return nil, fmt.Errorf("invalid item type %s", itemType)
|
return nil, fmt.Errorf("invalid item type %s", itemType)
|
||||||
}
|
}
|
||||||
|
@ -269,9 +269,9 @@ func (h *Hub) GetInstalledItems(itemType string) ([]*Item, error) {
|
||||||
return retItems, nil
|
return retItems, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// GetInstalledItemNames returns the names of the installed items of a given type.
|
// GetInstalledNamesByType returns the names of the installed items of a given type.
|
||||||
func (h *Hub) GetInstalledItemNames(itemType string) ([]string, error) {
|
func (h *Hub) GetInstalledNamesByType(itemType string) ([]string, error) {
|
||||||
items, err := h.GetInstalledItems(itemType)
|
items, err := h.GetInstalledItemsByType(itemType)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
|
@ -636,14 +636,24 @@ func (c *Client) createAlertChunk(machineID string, owner *ent.Machine, alerts [
|
||||||
if len(alertItem.Meta) > 0 {
|
if len(alertItem.Meta) > 0 {
|
||||||
metaBulk := make([]*ent.MetaCreate, len(alertItem.Meta))
|
metaBulk := make([]*ent.MetaCreate, len(alertItem.Meta))
|
||||||
for i, metaItem := range alertItem.Meta {
|
for i, metaItem := range alertItem.Meta {
|
||||||
|
key := metaItem.Key
|
||||||
|
value := metaItem.Value
|
||||||
|
if len(metaItem.Value) > 4095 {
|
||||||
|
c.Log.Warningf("truncated meta %s : value too long", metaItem.Key)
|
||||||
|
value = value[:4095]
|
||||||
|
}
|
||||||
|
if len(metaItem.Key) > 255 {
|
||||||
|
c.Log.Warningf("truncated meta %s : key too long", metaItem.Key)
|
||||||
|
key = key[:255]
|
||||||
|
}
|
||||||
metaBulk[i] = c.Ent.Meta.Create().
|
metaBulk[i] = c.Ent.Meta.Create().
|
||||||
SetKey(metaItem.Key).
|
SetKey(key).
|
||||||
SetValue(metaItem.Value)
|
SetValue(value)
|
||||||
}
|
}
|
||||||
|
|
||||||
metas, err = c.Ent.Meta.CreateBulk(metaBulk...).Save(c.CTX)
|
metas, err = c.Ent.Meta.CreateBulk(metaBulk...).Save(c.CTX)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, errors.Wrapf(BulkError, "creating alert meta: %s", err)
|
c.Log.Warningf("error creating alert meta: %s", err)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -2,37 +2,14 @@ package database
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"fmt"
|
"fmt"
|
||||||
"strings"
|
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
"github.com/pkg/errors"
|
"github.com/pkg/errors"
|
||||||
|
|
||||||
"github.com/crowdsecurity/crowdsec/pkg/models"
|
|
||||||
"github.com/crowdsecurity/crowdsec/pkg/database/ent"
|
"github.com/crowdsecurity/crowdsec/pkg/database/ent"
|
||||||
"github.com/crowdsecurity/crowdsec/pkg/database/ent/bouncer"
|
"github.com/crowdsecurity/crowdsec/pkg/database/ent/bouncer"
|
||||||
)
|
)
|
||||||
|
|
||||||
func (c *Client) BouncerUpdateBaseMetrics(bouncerName string, bouncerType string, baseMetrics *models.BaseMetrics) error {
|
|
||||||
os := baseMetrics.Os
|
|
||||||
features := strings.Join(baseMetrics.FeatureFlags, ",")
|
|
||||||
|
|
||||||
// XXX: bouncers have no heartbeat, they have "last pull", are we updating it?
|
|
||||||
|
|
||||||
_, err := c.Ent.Bouncer.
|
|
||||||
Update().
|
|
||||||
Where(bouncer.NameEQ(bouncerName)).
|
|
||||||
SetNillableVersion(baseMetrics.Version).
|
|
||||||
SetOsname(os.Name).
|
|
||||||
SetOsversion(os.Version).
|
|
||||||
SetFeatureflags(features).
|
|
||||||
SetType(bouncerType).
|
|
||||||
Save(c.CTX)
|
|
||||||
if err != nil {
|
|
||||||
return fmt.Errorf("unable to update base bouncer metrics in database: %s", err)
|
|
||||||
}
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (c *Client) SelectBouncer(apiKeyHash string) (*ent.Bouncer, error) {
|
func (c *Client) SelectBouncer(apiKeyHash string) (*ent.Bouncer, error) {
|
||||||
result, err := c.Ent.Bouncer.Query().Where(bouncer.APIKeyEQ(apiKeyHash)).First(c.CTX)
|
result, err := c.Ent.Bouncer.Query().Where(bouncer.APIKeyEQ(apiKeyHash)).First(c.CTX)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
|
|
@ -37,6 +37,7 @@ func BuildDecisionRequestWithFilter(query *ent.DecisionQuery, filter map[string]
|
||||||
if v[0] == "false" {
|
if v[0] == "false" {
|
||||||
query = query.Where(decision.SimulatedEQ(false))
|
query = query.Where(decision.SimulatedEQ(false))
|
||||||
}
|
}
|
||||||
|
|
||||||
delete(filter, "simulated")
|
delete(filter, "simulated")
|
||||||
} else {
|
} else {
|
||||||
query = query.Where(decision.SimulatedEQ(false))
|
query = query.Where(decision.SimulatedEQ(false))
|
||||||
|
@ -49,7 +50,7 @@ func BuildDecisionRequestWithFilter(query *ent.DecisionQuery, filter map[string]
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, errors.Wrapf(InvalidFilter, "invalid contains value : %s", err)
|
return nil, errors.Wrapf(InvalidFilter, "invalid contains value : %s", err)
|
||||||
}
|
}
|
||||||
case "scopes", "scope": //Swagger mentions both of them, let's just support both to make sure we don't break anything
|
case "scopes", "scope": // Swagger mentions both of them, let's just support both to make sure we don't break anything
|
||||||
scopes := strings.Split(value[0], ",")
|
scopes := strings.Split(value[0], ",")
|
||||||
for i, scope := range scopes {
|
for i, scope := range scopes {
|
||||||
switch strings.ToLower(scope) {
|
switch strings.ToLower(scope) {
|
||||||
|
@ -63,6 +64,7 @@ func BuildDecisionRequestWithFilter(query *ent.DecisionQuery, filter map[string]
|
||||||
scopes[i] = types.AS
|
scopes[i] = types.AS
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
query = query.Where(decision.ScopeIn(scopes...))
|
query = query.Where(decision.ScopeIn(scopes...))
|
||||||
case "value":
|
case "value":
|
||||||
query = query.Where(decision.ValueEQ(value[0]))
|
query = query.Where(decision.ValueEQ(value[0]))
|
||||||
|
@ -164,11 +166,11 @@ func (c *Client) QueryExpiredDecisionsWithFilters(filters map[string][]string) (
|
||||||
return data, nil
|
return data, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (c *Client) QueryDecisionCountByScenario(filters map[string][]string) ([]*DecisionsByScenario, error) {
|
func (c *Client) QueryDecisionCountByScenario() ([]*DecisionsByScenario, error) {
|
||||||
query := c.Ent.Decision.Query().Where(
|
query := c.Ent.Decision.Query().Where(
|
||||||
decision.UntilGT(time.Now().UTC()),
|
decision.UntilGT(time.Now().UTC()),
|
||||||
)
|
)
|
||||||
query, err := BuildDecisionRequestWithFilter(query, filters)
|
query, err := BuildDecisionRequestWithFilter(query, make(map[string][]string))
|
||||||
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
c.Log.Warningf("QueryDecisionCountByScenario : %s", err)
|
c.Log.Warningf("QueryDecisionCountByScenario : %s", err)
|
||||||
|
@ -277,10 +279,12 @@ func (c *Client) QueryNewDecisionsSinceWithFilters(since time.Time, filters map[
|
||||||
decision.CreatedAtGT(since),
|
decision.CreatedAtGT(since),
|
||||||
decision.UntilGT(time.Now().UTC()),
|
decision.UntilGT(time.Now().UTC()),
|
||||||
)
|
)
|
||||||
//Allow a bouncer to ask for non-deduplicated results
|
|
||||||
|
// Allow a bouncer to ask for non-deduplicated results
|
||||||
if v, ok := filters["dedup"]; !ok || v[0] != "false" {
|
if v, ok := filters["dedup"]; !ok || v[0] != "false" {
|
||||||
query = query.Where(longestDecisionForScopeTypeValue)
|
query = query.Where(longestDecisionForScopeTypeValue)
|
||||||
}
|
}
|
||||||
|
|
||||||
query, err := BuildDecisionRequestWithFilter(query, filters)
|
query, err := BuildDecisionRequestWithFilter(query, filters)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
c.Log.Warningf("QueryNewDecisionsSinceWithFilters : %s", err)
|
c.Log.Warningf("QueryNewDecisionsSinceWithFilters : %s", err)
|
||||||
|
@ -294,17 +298,20 @@ func (c *Client) QueryNewDecisionsSinceWithFilters(since time.Time, filters map[
|
||||||
c.Log.Warningf("QueryNewDecisionsSinceWithFilters : %s", err)
|
c.Log.Warningf("QueryNewDecisionsSinceWithFilters : %s", err)
|
||||||
return []*ent.Decision{}, errors.Wrapf(QueryFail, "new decisions since '%s'", since.String())
|
return []*ent.Decision{}, errors.Wrapf(QueryFail, "new decisions since '%s'", since.String())
|
||||||
}
|
}
|
||||||
|
|
||||||
return data, nil
|
return data, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (c *Client) DeleteDecisionById(decisionId int) ([]*ent.Decision, error) {
|
func (c *Client) DeleteDecisionById(decisionID int) ([]*ent.Decision, error) {
|
||||||
toDelete, err := c.Ent.Decision.Query().Where(decision.IDEQ(decisionId)).All(c.CTX)
|
toDelete, err := c.Ent.Decision.Query().Where(decision.IDEQ(decisionID)).All(c.CTX)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
c.Log.Warningf("DeleteDecisionById : %s", err)
|
c.Log.Warningf("DeleteDecisionById : %s", err)
|
||||||
return nil, errors.Wrapf(DeleteFail, "decision with id '%d' doesn't exist", decisionId)
|
return nil, errors.Wrapf(DeleteFail, "decision with id '%d' doesn't exist", decisionID)
|
||||||
}
|
}
|
||||||
|
|
||||||
count, err := c.BulkDeleteDecisions(toDelete, false)
|
count, err := c.BulkDeleteDecisions(toDelete, false)
|
||||||
c.Log.Debugf("deleted %d decisions", count)
|
c.Log.Debugf("deleted %d decisions", count)
|
||||||
|
|
||||||
return toDelete, err
|
return toDelete, err
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -317,6 +324,7 @@ func (c *Client) DeleteDecisionsWithFilter(filter map[string][]string) (string,
|
||||||
else, return bans that are *contained* by the given value (value is the outer) */
|
else, return bans that are *contained* by the given value (value is the outer) */
|
||||||
|
|
||||||
decisions := c.Ent.Decision.Query()
|
decisions := c.Ent.Decision.Query()
|
||||||
|
|
||||||
for param, value := range filter {
|
for param, value := range filter {
|
||||||
switch param {
|
switch param {
|
||||||
case "contains":
|
case "contains":
|
||||||
|
@ -359,48 +367,48 @@ func (c *Client) DeleteDecisionsWithFilter(filter map[string][]string) (string,
|
||||||
} else if ip_sz == 16 {
|
} else if ip_sz == 16 {
|
||||||
if contains { /*decision contains {start_ip,end_ip}*/
|
if contains { /*decision contains {start_ip,end_ip}*/
|
||||||
decisions = decisions.Where(decision.And(
|
decisions = decisions.Where(decision.And(
|
||||||
//matching addr size
|
// matching addr size
|
||||||
decision.IPSizeEQ(int64(ip_sz)),
|
decision.IPSizeEQ(int64(ip_sz)),
|
||||||
decision.Or(
|
decision.Or(
|
||||||
//decision.start_ip < query.start_ip
|
// decision.start_ip < query.start_ip
|
||||||
decision.StartIPLT(start_ip),
|
decision.StartIPLT(start_ip),
|
||||||
decision.And(
|
decision.And(
|
||||||
//decision.start_ip == query.start_ip
|
// decision.start_ip == query.start_ip
|
||||||
decision.StartIPEQ(start_ip),
|
decision.StartIPEQ(start_ip),
|
||||||
//decision.start_suffix <= query.start_suffix
|
// decision.start_suffix <= query.start_suffix
|
||||||
decision.StartSuffixLTE(start_sfx),
|
decision.StartSuffixLTE(start_sfx),
|
||||||
)),
|
)),
|
||||||
decision.Or(
|
decision.Or(
|
||||||
//decision.end_ip > query.end_ip
|
// decision.end_ip > query.end_ip
|
||||||
decision.EndIPGT(end_ip),
|
decision.EndIPGT(end_ip),
|
||||||
decision.And(
|
decision.And(
|
||||||
//decision.end_ip == query.end_ip
|
// decision.end_ip == query.end_ip
|
||||||
decision.EndIPEQ(end_ip),
|
decision.EndIPEQ(end_ip),
|
||||||
//decision.end_suffix >= query.end_suffix
|
// decision.end_suffix >= query.end_suffix
|
||||||
decision.EndSuffixGTE(end_sfx),
|
decision.EndSuffixGTE(end_sfx),
|
||||||
),
|
),
|
||||||
),
|
),
|
||||||
))
|
))
|
||||||
} else {
|
} else {
|
||||||
decisions = decisions.Where(decision.And(
|
decisions = decisions.Where(decision.And(
|
||||||
//matching addr size
|
// matching addr size
|
||||||
decision.IPSizeEQ(int64(ip_sz)),
|
decision.IPSizeEQ(int64(ip_sz)),
|
||||||
decision.Or(
|
decision.Or(
|
||||||
//decision.start_ip > query.start_ip
|
// decision.start_ip > query.start_ip
|
||||||
decision.StartIPGT(start_ip),
|
decision.StartIPGT(start_ip),
|
||||||
decision.And(
|
decision.And(
|
||||||
//decision.start_ip == query.start_ip
|
// decision.start_ip == query.start_ip
|
||||||
decision.StartIPEQ(start_ip),
|
decision.StartIPEQ(start_ip),
|
||||||
//decision.start_suffix >= query.start_suffix
|
// decision.start_suffix >= query.start_suffix
|
||||||
decision.StartSuffixGTE(start_sfx),
|
decision.StartSuffixGTE(start_sfx),
|
||||||
)),
|
)),
|
||||||
decision.Or(
|
decision.Or(
|
||||||
//decision.end_ip < query.end_ip
|
// decision.end_ip < query.end_ip
|
||||||
decision.EndIPLT(end_ip),
|
decision.EndIPLT(end_ip),
|
||||||
decision.And(
|
decision.And(
|
||||||
//decision.end_ip == query.end_ip
|
// decision.end_ip == query.end_ip
|
||||||
decision.EndIPEQ(end_ip),
|
decision.EndIPEQ(end_ip),
|
||||||
//decision.end_suffix <= query.end_suffix
|
// decision.end_suffix <= query.end_suffix
|
||||||
decision.EndSuffixLTE(end_sfx),
|
decision.EndSuffixLTE(end_sfx),
|
||||||
),
|
),
|
||||||
),
|
),
|
||||||
|
@ -415,11 +423,13 @@ func (c *Client) DeleteDecisionsWithFilter(filter map[string][]string) (string,
|
||||||
c.Log.Warningf("DeleteDecisionsWithFilter : %s", err)
|
c.Log.Warningf("DeleteDecisionsWithFilter : %s", err)
|
||||||
return "0", nil, errors.Wrap(DeleteFail, "decisions with provided filter")
|
return "0", nil, errors.Wrap(DeleteFail, "decisions with provided filter")
|
||||||
}
|
}
|
||||||
|
|
||||||
count, err := c.BulkDeleteDecisions(toDelete, false)
|
count, err := c.BulkDeleteDecisions(toDelete, false)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
c.Log.Warningf("While deleting decisions : %s", err)
|
c.Log.Warningf("While deleting decisions : %s", err)
|
||||||
return "0", nil, errors.Wrap(DeleteFail, "decisions with provided filter")
|
return "0", nil, errors.Wrap(DeleteFail, "decisions with provided filter")
|
||||||
}
|
}
|
||||||
|
|
||||||
return strconv.Itoa(count), toDelete, nil
|
return strconv.Itoa(count), toDelete, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -432,6 +442,7 @@ func (c *Client) SoftDeleteDecisionsWithFilter(filter map[string][]string) (stri
|
||||||
/*if contains is true, return bans that *contains* the given value (value is the inner)
|
/*if contains is true, return bans that *contains* the given value (value is the inner)
|
||||||
else, return bans that are *contained* by the given value (value is the outer)*/
|
else, return bans that are *contained* by the given value (value is the outer)*/
|
||||||
decisions := c.Ent.Decision.Query().Where(decision.UntilGT(time.Now().UTC()))
|
decisions := c.Ent.Decision.Query().Where(decision.UntilGT(time.Now().UTC()))
|
||||||
|
|
||||||
for param, value := range filter {
|
for param, value := range filter {
|
||||||
switch param {
|
switch param {
|
||||||
case "contains":
|
case "contains":
|
||||||
|
@ -480,24 +491,24 @@ func (c *Client) SoftDeleteDecisionsWithFilter(filter map[string][]string) (stri
|
||||||
/*decision contains {start_ip,end_ip}*/
|
/*decision contains {start_ip,end_ip}*/
|
||||||
if contains {
|
if contains {
|
||||||
decisions = decisions.Where(decision.And(
|
decisions = decisions.Where(decision.And(
|
||||||
//matching addr size
|
// matching addr size
|
||||||
decision.IPSizeEQ(int64(ip_sz)),
|
decision.IPSizeEQ(int64(ip_sz)),
|
||||||
decision.Or(
|
decision.Or(
|
||||||
//decision.start_ip < query.start_ip
|
// decision.start_ip < query.start_ip
|
||||||
decision.StartIPLT(start_ip),
|
decision.StartIPLT(start_ip),
|
||||||
decision.And(
|
decision.And(
|
||||||
//decision.start_ip == query.start_ip
|
// decision.start_ip == query.start_ip
|
||||||
decision.StartIPEQ(start_ip),
|
decision.StartIPEQ(start_ip),
|
||||||
//decision.start_suffix <= query.start_suffix
|
// decision.start_suffix <= query.start_suffix
|
||||||
decision.StartSuffixLTE(start_sfx),
|
decision.StartSuffixLTE(start_sfx),
|
||||||
)),
|
)),
|
||||||
decision.Or(
|
decision.Or(
|
||||||
//decision.end_ip > query.end_ip
|
// decision.end_ip > query.end_ip
|
||||||
decision.EndIPGT(end_ip),
|
decision.EndIPGT(end_ip),
|
||||||
decision.And(
|
decision.And(
|
||||||
//decision.end_ip == query.end_ip
|
// decision.end_ip == query.end_ip
|
||||||
decision.EndIPEQ(end_ip),
|
decision.EndIPEQ(end_ip),
|
||||||
//decision.end_suffix >= query.end_suffix
|
// decision.end_suffix >= query.end_suffix
|
||||||
decision.EndSuffixGTE(end_sfx),
|
decision.EndSuffixGTE(end_sfx),
|
||||||
),
|
),
|
||||||
),
|
),
|
||||||
|
@ -505,24 +516,24 @@ func (c *Client) SoftDeleteDecisionsWithFilter(filter map[string][]string) (stri
|
||||||
} else {
|
} else {
|
||||||
/*decision is contained within {start_ip,end_ip}*/
|
/*decision is contained within {start_ip,end_ip}*/
|
||||||
decisions = decisions.Where(decision.And(
|
decisions = decisions.Where(decision.And(
|
||||||
//matching addr size
|
// matching addr size
|
||||||
decision.IPSizeEQ(int64(ip_sz)),
|
decision.IPSizeEQ(int64(ip_sz)),
|
||||||
decision.Or(
|
decision.Or(
|
||||||
//decision.start_ip > query.start_ip
|
// decision.start_ip > query.start_ip
|
||||||
decision.StartIPGT(start_ip),
|
decision.StartIPGT(start_ip),
|
||||||
decision.And(
|
decision.And(
|
||||||
//decision.start_ip == query.start_ip
|
// decision.start_ip == query.start_ip
|
||||||
decision.StartIPEQ(start_ip),
|
decision.StartIPEQ(start_ip),
|
||||||
//decision.start_suffix >= query.start_suffix
|
// decision.start_suffix >= query.start_suffix
|
||||||
decision.StartSuffixGTE(start_sfx),
|
decision.StartSuffixGTE(start_sfx),
|
||||||
)),
|
)),
|
||||||
decision.Or(
|
decision.Or(
|
||||||
//decision.end_ip < query.end_ip
|
// decision.end_ip < query.end_ip
|
||||||
decision.EndIPLT(end_ip),
|
decision.EndIPLT(end_ip),
|
||||||
decision.And(
|
decision.And(
|
||||||
//decision.end_ip == query.end_ip
|
// decision.end_ip == query.end_ip
|
||||||
decision.EndIPEQ(end_ip),
|
decision.EndIPEQ(end_ip),
|
||||||
//decision.end_suffix <= query.end_suffix
|
// decision.end_suffix <= query.end_suffix
|
||||||
decision.EndSuffixLTE(end_sfx),
|
decision.EndSuffixLTE(end_sfx),
|
||||||
),
|
),
|
||||||
),
|
),
|
||||||
|
@ -531,6 +542,7 @@ func (c *Client) SoftDeleteDecisionsWithFilter(filter map[string][]string) (stri
|
||||||
} else if ip_sz != 0 {
|
} else if ip_sz != 0 {
|
||||||
return "0", nil, errors.Wrapf(InvalidFilter, "Unknown ip size %d", ip_sz)
|
return "0", nil, errors.Wrapf(InvalidFilter, "Unknown ip size %d", ip_sz)
|
||||||
}
|
}
|
||||||
|
|
||||||
DecisionsToDelete, err := decisions.All(c.CTX)
|
DecisionsToDelete, err := decisions.All(c.CTX)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
c.Log.Warningf("SoftDeleteDecisionsWithFilter : %s", err)
|
c.Log.Warningf("SoftDeleteDecisionsWithFilter : %s", err)
|
||||||
|
@ -541,13 +553,14 @@ func (c *Client) SoftDeleteDecisionsWithFilter(filter map[string][]string) (stri
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return "0", nil, errors.Wrapf(DeleteFail, "soft delete decisions with provided filter : %s", err)
|
return "0", nil, errors.Wrapf(DeleteFail, "soft delete decisions with provided filter : %s", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
return strconv.Itoa(count), DecisionsToDelete, err
|
return strconv.Itoa(count), DecisionsToDelete, err
|
||||||
}
|
}
|
||||||
|
|
||||||
// BulkDeleteDecisions set the expiration of a bulk of decisions to now() or hard deletes them.
|
// BulkDeleteDecisions sets the expiration of a bulk of decisions to now() or hard deletes them.
|
||||||
// We are doing it this way so we can return impacted decisions for sync with CAPI/PAPI
|
// We are doing it this way so we can return impacted decisions for sync with CAPI/PAPI
|
||||||
func (c *Client) BulkDeleteDecisions(decisionsToDelete []*ent.Decision, softDelete bool) (int, error) {
|
func (c *Client) BulkDeleteDecisions(decisionsToDelete []*ent.Decision, softDelete bool) (int, error) {
|
||||||
const bulkSize = 256 //scientifically proven to be the best value for bulk delete
|
const bulkSize = 256 // scientifically proven to be the best value for bulk delete
|
||||||
|
|
||||||
var (
|
var (
|
||||||
nbUpdates int
|
nbUpdates int
|
||||||
|
@ -576,6 +589,7 @@ func (c *Client) BulkDeleteDecisions(decisionsToDelete []*ent.Decision, softDele
|
||||||
return totalUpdates, fmt.Errorf("hard delete decisions with provided filter: %w", err)
|
return totalUpdates, fmt.Errorf("hard delete decisions with provided filter: %w", err)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
totalUpdates += nbUpdates
|
totalUpdates += nbUpdates
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -612,6 +626,7 @@ func (c *Client) CountDecisionsByValue(decisionValue string) (int, error) {
|
||||||
|
|
||||||
contains := true
|
contains := true
|
||||||
decisions := c.Ent.Decision.Query()
|
decisions := c.Ent.Decision.Query()
|
||||||
|
|
||||||
decisions, err = applyStartIpEndIpFilter(decisions, contains, ip_sz, start_ip, start_sfx, end_ip, end_sfx)
|
decisions, err = applyStartIpEndIpFilter(decisions, contains, ip_sz, start_ip, start_sfx, end_ip, end_sfx)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return 0, errors.Wrapf(err, "fail to apply StartIpEndIpFilter")
|
return 0, errors.Wrapf(err, "fail to apply StartIpEndIpFilter")
|
||||||
|
@ -667,6 +682,7 @@ func applyStartIpEndIpFilter(decisions *ent.DecisionQuery, contains bool, ip_sz
|
||||||
decision.IPSizeEQ(int64(ip_sz)),
|
decision.IPSizeEQ(int64(ip_sz)),
|
||||||
))
|
))
|
||||||
}
|
}
|
||||||
|
|
||||||
return decisions, nil
|
return decisions, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -674,24 +690,24 @@ func applyStartIpEndIpFilter(decisions *ent.DecisionQuery, contains bool, ip_sz
|
||||||
/*decision contains {start_ip,end_ip}*/
|
/*decision contains {start_ip,end_ip}*/
|
||||||
if contains {
|
if contains {
|
||||||
decisions = decisions.Where(decision.And(
|
decisions = decisions.Where(decision.And(
|
||||||
//matching addr size
|
// matching addr size
|
||||||
decision.IPSizeEQ(int64(ip_sz)),
|
decision.IPSizeEQ(int64(ip_sz)),
|
||||||
decision.Or(
|
decision.Or(
|
||||||
//decision.start_ip < query.start_ip
|
// decision.start_ip < query.start_ip
|
||||||
decision.StartIPLT(start_ip),
|
decision.StartIPLT(start_ip),
|
||||||
decision.And(
|
decision.And(
|
||||||
//decision.start_ip == query.start_ip
|
// decision.start_ip == query.start_ip
|
||||||
decision.StartIPEQ(start_ip),
|
decision.StartIPEQ(start_ip),
|
||||||
//decision.start_suffix <= query.start_suffix
|
// decision.start_suffix <= query.start_suffix
|
||||||
decision.StartSuffixLTE(start_sfx),
|
decision.StartSuffixLTE(start_sfx),
|
||||||
)),
|
)),
|
||||||
decision.Or(
|
decision.Or(
|
||||||
//decision.end_ip > query.end_ip
|
// decision.end_ip > query.end_ip
|
||||||
decision.EndIPGT(end_ip),
|
decision.EndIPGT(end_ip),
|
||||||
decision.And(
|
decision.And(
|
||||||
//decision.end_ip == query.end_ip
|
// decision.end_ip == query.end_ip
|
||||||
decision.EndIPEQ(end_ip),
|
decision.EndIPEQ(end_ip),
|
||||||
//decision.end_suffix >= query.end_suffix
|
// decision.end_suffix >= query.end_suffix
|
||||||
decision.EndSuffixGTE(end_sfx),
|
decision.EndSuffixGTE(end_sfx),
|
||||||
),
|
),
|
||||||
),
|
),
|
||||||
|
@ -699,29 +715,30 @@ func applyStartIpEndIpFilter(decisions *ent.DecisionQuery, contains bool, ip_sz
|
||||||
} else {
|
} else {
|
||||||
/*decision is contained within {start_ip,end_ip}*/
|
/*decision is contained within {start_ip,end_ip}*/
|
||||||
decisions = decisions.Where(decision.And(
|
decisions = decisions.Where(decision.And(
|
||||||
//matching addr size
|
// matching addr size
|
||||||
decision.IPSizeEQ(int64(ip_sz)),
|
decision.IPSizeEQ(int64(ip_sz)),
|
||||||
decision.Or(
|
decision.Or(
|
||||||
//decision.start_ip > query.start_ip
|
// decision.start_ip > query.start_ip
|
||||||
decision.StartIPGT(start_ip),
|
decision.StartIPGT(start_ip),
|
||||||
decision.And(
|
decision.And(
|
||||||
//decision.start_ip == query.start_ip
|
// decision.start_ip == query.start_ip
|
||||||
decision.StartIPEQ(start_ip),
|
decision.StartIPEQ(start_ip),
|
||||||
//decision.start_suffix >= query.start_suffix
|
// decision.start_suffix >= query.start_suffix
|
||||||
decision.StartSuffixGTE(start_sfx),
|
decision.StartSuffixGTE(start_sfx),
|
||||||
)),
|
)),
|
||||||
decision.Or(
|
decision.Or(
|
||||||
//decision.end_ip < query.end_ip
|
// decision.end_ip < query.end_ip
|
||||||
decision.EndIPLT(end_ip),
|
decision.EndIPLT(end_ip),
|
||||||
decision.And(
|
decision.And(
|
||||||
//decision.end_ip == query.end_ip
|
// decision.end_ip == query.end_ip
|
||||||
decision.EndIPEQ(end_ip),
|
decision.EndIPEQ(end_ip),
|
||||||
//decision.end_suffix <= query.end_suffix
|
// decision.end_suffix <= query.end_suffix
|
||||||
decision.EndSuffixLTE(end_sfx),
|
decision.EndSuffixLTE(end_sfx),
|
||||||
),
|
),
|
||||||
),
|
),
|
||||||
))
|
))
|
||||||
}
|
}
|
||||||
|
|
||||||
return decisions, nil
|
return decisions, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -735,8 +752,10 @@ func applyStartIpEndIpFilter(decisions *ent.DecisionQuery, contains bool, ip_sz
|
||||||
func decisionPredicatesFromStr(s string, predicateFunc func(string) predicate.Decision) []predicate.Decision {
|
func decisionPredicatesFromStr(s string, predicateFunc func(string) predicate.Decision) []predicate.Decision {
|
||||||
words := strings.Split(s, ",")
|
words := strings.Split(s, ",")
|
||||||
predicates := make([]predicate.Decision, len(words))
|
predicates := make([]predicate.Decision, len(words))
|
||||||
|
|
||||||
for i, word := range words {
|
for i, word := range words {
|
||||||
predicates[i] = predicateFunc(word)
|
predicates[i] = predicateFunc(word)
|
||||||
}
|
}
|
||||||
|
|
||||||
return predicates
|
return predicates
|
||||||
}
|
}
|
||||||
|
|
|
@ -19,9 +19,9 @@ type Alert struct {
|
||||||
// ID of the ent.
|
// ID of the ent.
|
||||||
ID int `json:"id,omitempty"`
|
ID int `json:"id,omitempty"`
|
||||||
// CreatedAt holds the value of the "created_at" field.
|
// CreatedAt holds the value of the "created_at" field.
|
||||||
CreatedAt time.Time `json:"created_at,omitempty"`
|
CreatedAt *time.Time `json:"created_at,omitempty"`
|
||||||
// UpdatedAt holds the value of the "updated_at" field.
|
// UpdatedAt holds the value of the "updated_at" field.
|
||||||
UpdatedAt time.Time `json:"updated_at,omitempty"`
|
UpdatedAt *time.Time `json:"updated_at,omitempty"`
|
||||||
// Scenario holds the value of the "scenario" field.
|
// Scenario holds the value of the "scenario" field.
|
||||||
Scenario string `json:"scenario,omitempty"`
|
Scenario string `json:"scenario,omitempty"`
|
||||||
// BucketId holds the value of the "bucketId" field.
|
// BucketId holds the value of the "bucketId" field.
|
||||||
|
@ -168,13 +168,15 @@ func (a *Alert) assignValues(columns []string, values []any) error {
|
||||||
if value, ok := values[i].(*sql.NullTime); !ok {
|
if value, ok := values[i].(*sql.NullTime); !ok {
|
||||||
return fmt.Errorf("unexpected type %T for field created_at", values[i])
|
return fmt.Errorf("unexpected type %T for field created_at", values[i])
|
||||||
} else if value.Valid {
|
} else if value.Valid {
|
||||||
a.CreatedAt = value.Time
|
a.CreatedAt = new(time.Time)
|
||||||
|
*a.CreatedAt = value.Time
|
||||||
}
|
}
|
||||||
case alert.FieldUpdatedAt:
|
case alert.FieldUpdatedAt:
|
||||||
if value, ok := values[i].(*sql.NullTime); !ok {
|
if value, ok := values[i].(*sql.NullTime); !ok {
|
||||||
return fmt.Errorf("unexpected type %T for field updated_at", values[i])
|
return fmt.Errorf("unexpected type %T for field updated_at", values[i])
|
||||||
} else if value.Valid {
|
} else if value.Valid {
|
||||||
a.UpdatedAt = value.Time
|
a.UpdatedAt = new(time.Time)
|
||||||
|
*a.UpdatedAt = value.Time
|
||||||
}
|
}
|
||||||
case alert.FieldScenario:
|
case alert.FieldScenario:
|
||||||
if value, ok := values[i].(*sql.NullString); !ok {
|
if value, ok := values[i].(*sql.NullString); !ok {
|
||||||
|
@ -365,11 +367,15 @@ func (a *Alert) String() string {
|
||||||
var builder strings.Builder
|
var builder strings.Builder
|
||||||
builder.WriteString("Alert(")
|
builder.WriteString("Alert(")
|
||||||
builder.WriteString(fmt.Sprintf("id=%v, ", a.ID))
|
builder.WriteString(fmt.Sprintf("id=%v, ", a.ID))
|
||||||
builder.WriteString("created_at=")
|
if v := a.CreatedAt; v != nil {
|
||||||
builder.WriteString(a.CreatedAt.Format(time.ANSIC))
|
builder.WriteString("created_at=")
|
||||||
|
builder.WriteString(v.Format(time.ANSIC))
|
||||||
|
}
|
||||||
builder.WriteString(", ")
|
builder.WriteString(", ")
|
||||||
builder.WriteString("updated_at=")
|
if v := a.UpdatedAt; v != nil {
|
||||||
builder.WriteString(a.UpdatedAt.Format(time.ANSIC))
|
builder.WriteString("updated_at=")
|
||||||
|
builder.WriteString(v.Format(time.ANSIC))
|
||||||
|
}
|
||||||
builder.WriteString(", ")
|
builder.WriteString(", ")
|
||||||
builder.WriteString("scenario=")
|
builder.WriteString("scenario=")
|
||||||
builder.WriteString(a.Scenario)
|
builder.WriteString(a.Scenario)
|
||||||
|
|
|
@ -152,6 +152,8 @@ func ValidColumn(column string) bool {
|
||||||
var (
|
var (
|
||||||
// DefaultCreatedAt holds the default value on creation for the "created_at" field.
|
// DefaultCreatedAt holds the default value on creation for the "created_at" field.
|
||||||
DefaultCreatedAt func() time.Time
|
DefaultCreatedAt func() time.Time
|
||||||
|
// UpdateDefaultCreatedAt holds the default value on update for the "created_at" field.
|
||||||
|
UpdateDefaultCreatedAt func() time.Time
|
||||||
// DefaultUpdatedAt holds the default value on creation for the "updated_at" field.
|
// DefaultUpdatedAt holds the default value on creation for the "updated_at" field.
|
||||||
DefaultUpdatedAt func() time.Time
|
DefaultUpdatedAt func() time.Time
|
||||||
// UpdateDefaultUpdatedAt holds the default value on update for the "updated_at" field.
|
// UpdateDefaultUpdatedAt holds the default value on update for the "updated_at" field.
|
||||||
|
|
|
@ -210,6 +210,16 @@ func CreatedAtLTE(v time.Time) predicate.Alert {
|
||||||
return predicate.Alert(sql.FieldLTE(FieldCreatedAt, v))
|
return predicate.Alert(sql.FieldLTE(FieldCreatedAt, v))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// CreatedAtIsNil applies the IsNil predicate on the "created_at" field.
|
||||||
|
func CreatedAtIsNil() predicate.Alert {
|
||||||
|
return predicate.Alert(sql.FieldIsNull(FieldCreatedAt))
|
||||||
|
}
|
||||||
|
|
||||||
|
// CreatedAtNotNil applies the NotNil predicate on the "created_at" field.
|
||||||
|
func CreatedAtNotNil() predicate.Alert {
|
||||||
|
return predicate.Alert(sql.FieldNotNull(FieldCreatedAt))
|
||||||
|
}
|
||||||
|
|
||||||
// UpdatedAtEQ applies the EQ predicate on the "updated_at" field.
|
// UpdatedAtEQ applies the EQ predicate on the "updated_at" field.
|
||||||
func UpdatedAtEQ(v time.Time) predicate.Alert {
|
func UpdatedAtEQ(v time.Time) predicate.Alert {
|
||||||
return predicate.Alert(sql.FieldEQ(FieldUpdatedAt, v))
|
return predicate.Alert(sql.FieldEQ(FieldUpdatedAt, v))
|
||||||
|
@ -250,6 +260,16 @@ func UpdatedAtLTE(v time.Time) predicate.Alert {
|
||||||
return predicate.Alert(sql.FieldLTE(FieldUpdatedAt, v))
|
return predicate.Alert(sql.FieldLTE(FieldUpdatedAt, v))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// UpdatedAtIsNil applies the IsNil predicate on the "updated_at" field.
|
||||||
|
func UpdatedAtIsNil() predicate.Alert {
|
||||||
|
return predicate.Alert(sql.FieldIsNull(FieldUpdatedAt))
|
||||||
|
}
|
||||||
|
|
||||||
|
// UpdatedAtNotNil applies the NotNil predicate on the "updated_at" field.
|
||||||
|
func UpdatedAtNotNil() predicate.Alert {
|
||||||
|
return predicate.Alert(sql.FieldNotNull(FieldUpdatedAt))
|
||||||
|
}
|
||||||
|
|
||||||
// ScenarioEQ applies the EQ predicate on the "scenario" field.
|
// ScenarioEQ applies the EQ predicate on the "scenario" field.
|
||||||
func ScenarioEQ(v string) predicate.Alert {
|
func ScenarioEQ(v string) predicate.Alert {
|
||||||
return predicate.Alert(sql.FieldEQ(FieldScenario, v))
|
return predicate.Alert(sql.FieldEQ(FieldScenario, v))
|
||||||
|
|
|
@ -473,12 +473,6 @@ func (ac *AlertCreate) defaults() {
|
||||||
|
|
||||||
// check runs all checks and user-defined validators on the builder.
|
// check runs all checks and user-defined validators on the builder.
|
||||||
func (ac *AlertCreate) check() error {
|
func (ac *AlertCreate) check() error {
|
||||||
if _, ok := ac.mutation.CreatedAt(); !ok {
|
|
||||||
return &ValidationError{Name: "created_at", err: errors.New(`ent: missing required field "Alert.created_at"`)}
|
|
||||||
}
|
|
||||||
if _, ok := ac.mutation.UpdatedAt(); !ok {
|
|
||||||
return &ValidationError{Name: "updated_at", err: errors.New(`ent: missing required field "Alert.updated_at"`)}
|
|
||||||
}
|
|
||||||
if _, ok := ac.mutation.Scenario(); !ok {
|
if _, ok := ac.mutation.Scenario(); !ok {
|
||||||
return &ValidationError{Name: "scenario", err: errors.New(`ent: missing required field "Alert.scenario"`)}
|
return &ValidationError{Name: "scenario", err: errors.New(`ent: missing required field "Alert.scenario"`)}
|
||||||
}
|
}
|
||||||
|
@ -513,11 +507,11 @@ func (ac *AlertCreate) createSpec() (*Alert, *sqlgraph.CreateSpec) {
|
||||||
)
|
)
|
||||||
if value, ok := ac.mutation.CreatedAt(); ok {
|
if value, ok := ac.mutation.CreatedAt(); ok {
|
||||||
_spec.SetField(alert.FieldCreatedAt, field.TypeTime, value)
|
_spec.SetField(alert.FieldCreatedAt, field.TypeTime, value)
|
||||||
_node.CreatedAt = value
|
_node.CreatedAt = &value
|
||||||
}
|
}
|
||||||
if value, ok := ac.mutation.UpdatedAt(); ok {
|
if value, ok := ac.mutation.UpdatedAt(); ok {
|
||||||
_spec.SetField(alert.FieldUpdatedAt, field.TypeTime, value)
|
_spec.SetField(alert.FieldUpdatedAt, field.TypeTime, value)
|
||||||
_node.UpdatedAt = value
|
_node.UpdatedAt = &value
|
||||||
}
|
}
|
||||||
if value, ok := ac.mutation.Scenario(); ok {
|
if value, ok := ac.mutation.Scenario(); ok {
|
||||||
_spec.SetField(alert.FieldScenario, field.TypeString, value)
|
_spec.SetField(alert.FieldScenario, field.TypeString, value)
|
||||||
|
|
|
@ -32,12 +32,30 @@ func (au *AlertUpdate) Where(ps ...predicate.Alert) *AlertUpdate {
|
||||||
return au
|
return au
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// SetCreatedAt sets the "created_at" field.
|
||||||
|
func (au *AlertUpdate) SetCreatedAt(t time.Time) *AlertUpdate {
|
||||||
|
au.mutation.SetCreatedAt(t)
|
||||||
|
return au
|
||||||
|
}
|
||||||
|
|
||||||
|
// ClearCreatedAt clears the value of the "created_at" field.
|
||||||
|
func (au *AlertUpdate) ClearCreatedAt() *AlertUpdate {
|
||||||
|
au.mutation.ClearCreatedAt()
|
||||||
|
return au
|
||||||
|
}
|
||||||
|
|
||||||
// SetUpdatedAt sets the "updated_at" field.
|
// SetUpdatedAt sets the "updated_at" field.
|
||||||
func (au *AlertUpdate) SetUpdatedAt(t time.Time) *AlertUpdate {
|
func (au *AlertUpdate) SetUpdatedAt(t time.Time) *AlertUpdate {
|
||||||
au.mutation.SetUpdatedAt(t)
|
au.mutation.SetUpdatedAt(t)
|
||||||
return au
|
return au
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// ClearUpdatedAt clears the value of the "updated_at" field.
|
||||||
|
func (au *AlertUpdate) ClearUpdatedAt() *AlertUpdate {
|
||||||
|
au.mutation.ClearUpdatedAt()
|
||||||
|
return au
|
||||||
|
}
|
||||||
|
|
||||||
// SetScenario sets the "scenario" field.
|
// SetScenario sets the "scenario" field.
|
||||||
func (au *AlertUpdate) SetScenario(s string) *AlertUpdate {
|
func (au *AlertUpdate) SetScenario(s string) *AlertUpdate {
|
||||||
au.mutation.SetScenario(s)
|
au.mutation.SetScenario(s)
|
||||||
|
@ -642,7 +660,11 @@ func (au *AlertUpdate) ExecX(ctx context.Context) {
|
||||||
|
|
||||||
// defaults sets the default values of the builder before save.
|
// defaults sets the default values of the builder before save.
|
||||||
func (au *AlertUpdate) defaults() {
|
func (au *AlertUpdate) defaults() {
|
||||||
if _, ok := au.mutation.UpdatedAt(); !ok {
|
if _, ok := au.mutation.CreatedAt(); !ok && !au.mutation.CreatedAtCleared() {
|
||||||
|
v := alert.UpdateDefaultCreatedAt()
|
||||||
|
au.mutation.SetCreatedAt(v)
|
||||||
|
}
|
||||||
|
if _, ok := au.mutation.UpdatedAt(); !ok && !au.mutation.UpdatedAtCleared() {
|
||||||
v := alert.UpdateDefaultUpdatedAt()
|
v := alert.UpdateDefaultUpdatedAt()
|
||||||
au.mutation.SetUpdatedAt(v)
|
au.mutation.SetUpdatedAt(v)
|
||||||
}
|
}
|
||||||
|
@ -657,9 +679,18 @@ func (au *AlertUpdate) sqlSave(ctx context.Context) (n int, err error) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
if value, ok := au.mutation.CreatedAt(); ok {
|
||||||
|
_spec.SetField(alert.FieldCreatedAt, field.TypeTime, value)
|
||||||
|
}
|
||||||
|
if au.mutation.CreatedAtCleared() {
|
||||||
|
_spec.ClearField(alert.FieldCreatedAt, field.TypeTime)
|
||||||
|
}
|
||||||
if value, ok := au.mutation.UpdatedAt(); ok {
|
if value, ok := au.mutation.UpdatedAt(); ok {
|
||||||
_spec.SetField(alert.FieldUpdatedAt, field.TypeTime, value)
|
_spec.SetField(alert.FieldUpdatedAt, field.TypeTime, value)
|
||||||
}
|
}
|
||||||
|
if au.mutation.UpdatedAtCleared() {
|
||||||
|
_spec.ClearField(alert.FieldUpdatedAt, field.TypeTime)
|
||||||
|
}
|
||||||
if value, ok := au.mutation.Scenario(); ok {
|
if value, ok := au.mutation.Scenario(); ok {
|
||||||
_spec.SetField(alert.FieldScenario, field.TypeString, value)
|
_spec.SetField(alert.FieldScenario, field.TypeString, value)
|
||||||
}
|
}
|
||||||
|
@ -976,12 +1007,30 @@ type AlertUpdateOne struct {
|
||||||
mutation *AlertMutation
|
mutation *AlertMutation
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// SetCreatedAt sets the "created_at" field.
|
||||||
|
func (auo *AlertUpdateOne) SetCreatedAt(t time.Time) *AlertUpdateOne {
|
||||||
|
auo.mutation.SetCreatedAt(t)
|
||||||
|
return auo
|
||||||
|
}
|
||||||
|
|
||||||
|
// ClearCreatedAt clears the value of the "created_at" field.
|
||||||
|
func (auo *AlertUpdateOne) ClearCreatedAt() *AlertUpdateOne {
|
||||||
|
auo.mutation.ClearCreatedAt()
|
||||||
|
return auo
|
||||||
|
}
|
||||||
|
|
||||||
// SetUpdatedAt sets the "updated_at" field.
|
// SetUpdatedAt sets the "updated_at" field.
|
||||||
func (auo *AlertUpdateOne) SetUpdatedAt(t time.Time) *AlertUpdateOne {
|
func (auo *AlertUpdateOne) SetUpdatedAt(t time.Time) *AlertUpdateOne {
|
||||||
auo.mutation.SetUpdatedAt(t)
|
auo.mutation.SetUpdatedAt(t)
|
||||||
return auo
|
return auo
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// ClearUpdatedAt clears the value of the "updated_at" field.
|
||||||
|
func (auo *AlertUpdateOne) ClearUpdatedAt() *AlertUpdateOne {
|
||||||
|
auo.mutation.ClearUpdatedAt()
|
||||||
|
return auo
|
||||||
|
}
|
||||||
|
|
||||||
// SetScenario sets the "scenario" field.
|
// SetScenario sets the "scenario" field.
|
||||||
func (auo *AlertUpdateOne) SetScenario(s string) *AlertUpdateOne {
|
func (auo *AlertUpdateOne) SetScenario(s string) *AlertUpdateOne {
|
||||||
auo.mutation.SetScenario(s)
|
auo.mutation.SetScenario(s)
|
||||||
|
@ -1599,7 +1648,11 @@ func (auo *AlertUpdateOne) ExecX(ctx context.Context) {
|
||||||
|
|
||||||
// defaults sets the default values of the builder before save.
|
// defaults sets the default values of the builder before save.
|
||||||
func (auo *AlertUpdateOne) defaults() {
|
func (auo *AlertUpdateOne) defaults() {
|
||||||
if _, ok := auo.mutation.UpdatedAt(); !ok {
|
if _, ok := auo.mutation.CreatedAt(); !ok && !auo.mutation.CreatedAtCleared() {
|
||||||
|
v := alert.UpdateDefaultCreatedAt()
|
||||||
|
auo.mutation.SetCreatedAt(v)
|
||||||
|
}
|
||||||
|
if _, ok := auo.mutation.UpdatedAt(); !ok && !auo.mutation.UpdatedAtCleared() {
|
||||||
v := alert.UpdateDefaultUpdatedAt()
|
v := alert.UpdateDefaultUpdatedAt()
|
||||||
auo.mutation.SetUpdatedAt(v)
|
auo.mutation.SetUpdatedAt(v)
|
||||||
}
|
}
|
||||||
|
@ -1631,9 +1684,18 @@ func (auo *AlertUpdateOne) sqlSave(ctx context.Context) (_node *Alert, err error
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
if value, ok := auo.mutation.CreatedAt(); ok {
|
||||||
|
_spec.SetField(alert.FieldCreatedAt, field.TypeTime, value)
|
||||||
|
}
|
||||||
|
if auo.mutation.CreatedAtCleared() {
|
||||||
|
_spec.ClearField(alert.FieldCreatedAt, field.TypeTime)
|
||||||
|
}
|
||||||
if value, ok := auo.mutation.UpdatedAt(); ok {
|
if value, ok := auo.mutation.UpdatedAt(); ok {
|
||||||
_spec.SetField(alert.FieldUpdatedAt, field.TypeTime, value)
|
_spec.SetField(alert.FieldUpdatedAt, field.TypeTime, value)
|
||||||
}
|
}
|
||||||
|
if auo.mutation.UpdatedAtCleared() {
|
||||||
|
_spec.ClearField(alert.FieldUpdatedAt, field.TypeTime)
|
||||||
|
}
|
||||||
if value, ok := auo.mutation.Scenario(); ok {
|
if value, ok := auo.mutation.Scenario(); ok {
|
||||||
_spec.SetField(alert.FieldScenario, field.TypeString, value)
|
_spec.SetField(alert.FieldScenario, field.TypeString, value)
|
||||||
}
|
}
|
||||||
|
|
|
@ -18,9 +18,9 @@ type Bouncer struct {
|
||||||
// ID of the ent.
|
// ID of the ent.
|
||||||
ID int `json:"id,omitempty"`
|
ID int `json:"id,omitempty"`
|
||||||
// CreatedAt holds the value of the "created_at" field.
|
// CreatedAt holds the value of the "created_at" field.
|
||||||
CreatedAt time.Time `json:"created_at"`
|
CreatedAt *time.Time `json:"created_at"`
|
||||||
// UpdatedAt holds the value of the "updated_at" field.
|
// UpdatedAt holds the value of the "updated_at" field.
|
||||||
UpdatedAt time.Time `json:"updated_at"`
|
UpdatedAt *time.Time `json:"updated_at"`
|
||||||
// Name holds the value of the "name" field.
|
// Name holds the value of the "name" field.
|
||||||
Name string `json:"name"`
|
Name string `json:"name"`
|
||||||
// APIKey holds the value of the "api_key" field.
|
// APIKey holds the value of the "api_key" field.
|
||||||
|
@ -38,13 +38,7 @@ type Bouncer struct {
|
||||||
// LastPull holds the value of the "last_pull" field.
|
// LastPull holds the value of the "last_pull" field.
|
||||||
LastPull time.Time `json:"last_pull"`
|
LastPull time.Time `json:"last_pull"`
|
||||||
// AuthType holds the value of the "auth_type" field.
|
// AuthType holds the value of the "auth_type" field.
|
||||||
AuthType string `json:"auth_type"`
|
AuthType string `json:"auth_type"`
|
||||||
// Osname holds the value of the "osname" field.
|
|
||||||
Osname string `json:"osname,omitempty"`
|
|
||||||
// Osversion holds the value of the "osversion" field.
|
|
||||||
Osversion string `json:"osversion,omitempty"`
|
|
||||||
// Featureflags holds the value of the "featureflags" field.
|
|
||||||
Featureflags string `json:"featureflags,omitempty"`
|
|
||||||
selectValues sql.SelectValues
|
selectValues sql.SelectValues
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -57,7 +51,7 @@ func (*Bouncer) scanValues(columns []string) ([]any, error) {
|
||||||
values[i] = new(sql.NullBool)
|
values[i] = new(sql.NullBool)
|
||||||
case bouncer.FieldID:
|
case bouncer.FieldID:
|
||||||
values[i] = new(sql.NullInt64)
|
values[i] = new(sql.NullInt64)
|
||||||
case bouncer.FieldName, bouncer.FieldAPIKey, bouncer.FieldIPAddress, bouncer.FieldType, bouncer.FieldVersion, bouncer.FieldAuthType, bouncer.FieldOsname, bouncer.FieldOsversion, bouncer.FieldFeatureflags:
|
case bouncer.FieldName, bouncer.FieldAPIKey, bouncer.FieldIPAddress, bouncer.FieldType, bouncer.FieldVersion, bouncer.FieldAuthType:
|
||||||
values[i] = new(sql.NullString)
|
values[i] = new(sql.NullString)
|
||||||
case bouncer.FieldCreatedAt, bouncer.FieldUpdatedAt, bouncer.FieldUntil, bouncer.FieldLastPull:
|
case bouncer.FieldCreatedAt, bouncer.FieldUpdatedAt, bouncer.FieldUntil, bouncer.FieldLastPull:
|
||||||
values[i] = new(sql.NullTime)
|
values[i] = new(sql.NullTime)
|
||||||
|
@ -86,13 +80,15 @@ func (b *Bouncer) assignValues(columns []string, values []any) error {
|
||||||
if value, ok := values[i].(*sql.NullTime); !ok {
|
if value, ok := values[i].(*sql.NullTime); !ok {
|
||||||
return fmt.Errorf("unexpected type %T for field created_at", values[i])
|
return fmt.Errorf("unexpected type %T for field created_at", values[i])
|
||||||
} else if value.Valid {
|
} else if value.Valid {
|
||||||
b.CreatedAt = value.Time
|
b.CreatedAt = new(time.Time)
|
||||||
|
*b.CreatedAt = value.Time
|
||||||
}
|
}
|
||||||
case bouncer.FieldUpdatedAt:
|
case bouncer.FieldUpdatedAt:
|
||||||
if value, ok := values[i].(*sql.NullTime); !ok {
|
if value, ok := values[i].(*sql.NullTime); !ok {
|
||||||
return fmt.Errorf("unexpected type %T for field updated_at", values[i])
|
return fmt.Errorf("unexpected type %T for field updated_at", values[i])
|
||||||
} else if value.Valid {
|
} else if value.Valid {
|
||||||
b.UpdatedAt = value.Time
|
b.UpdatedAt = new(time.Time)
|
||||||
|
*b.UpdatedAt = value.Time
|
||||||
}
|
}
|
||||||
case bouncer.FieldName:
|
case bouncer.FieldName:
|
||||||
if value, ok := values[i].(*sql.NullString); !ok {
|
if value, ok := values[i].(*sql.NullString); !ok {
|
||||||
|
@ -148,24 +144,6 @@ func (b *Bouncer) assignValues(columns []string, values []any) error {
|
||||||
} else if value.Valid {
|
} else if value.Valid {
|
||||||
b.AuthType = value.String
|
b.AuthType = value.String
|
||||||
}
|
}
|
||||||
case bouncer.FieldOsname:
|
|
||||||
if value, ok := values[i].(*sql.NullString); !ok {
|
|
||||||
return fmt.Errorf("unexpected type %T for field osname", values[i])
|
|
||||||
} else if value.Valid {
|
|
||||||
b.Osname = value.String
|
|
||||||
}
|
|
||||||
case bouncer.FieldOsversion:
|
|
||||||
if value, ok := values[i].(*sql.NullString); !ok {
|
|
||||||
return fmt.Errorf("unexpected type %T for field osversion", values[i])
|
|
||||||
} else if value.Valid {
|
|
||||||
b.Osversion = value.String
|
|
||||||
}
|
|
||||||
case bouncer.FieldFeatureflags:
|
|
||||||
if value, ok := values[i].(*sql.NullString); !ok {
|
|
||||||
return fmt.Errorf("unexpected type %T for field featureflags", values[i])
|
|
||||||
} else if value.Valid {
|
|
||||||
b.Featureflags = value.String
|
|
||||||
}
|
|
||||||
default:
|
default:
|
||||||
b.selectValues.Set(columns[i], values[i])
|
b.selectValues.Set(columns[i], values[i])
|
||||||
}
|
}
|
||||||
|
@ -202,11 +180,15 @@ func (b *Bouncer) String() string {
|
||||||
var builder strings.Builder
|
var builder strings.Builder
|
||||||
builder.WriteString("Bouncer(")
|
builder.WriteString("Bouncer(")
|
||||||
builder.WriteString(fmt.Sprintf("id=%v, ", b.ID))
|
builder.WriteString(fmt.Sprintf("id=%v, ", b.ID))
|
||||||
builder.WriteString("created_at=")
|
if v := b.CreatedAt; v != nil {
|
||||||
builder.WriteString(b.CreatedAt.Format(time.ANSIC))
|
builder.WriteString("created_at=")
|
||||||
|
builder.WriteString(v.Format(time.ANSIC))
|
||||||
|
}
|
||||||
builder.WriteString(", ")
|
builder.WriteString(", ")
|
||||||
builder.WriteString("updated_at=")
|
if v := b.UpdatedAt; v != nil {
|
||||||
builder.WriteString(b.UpdatedAt.Format(time.ANSIC))
|
builder.WriteString("updated_at=")
|
||||||
|
builder.WriteString(v.Format(time.ANSIC))
|
||||||
|
}
|
||||||
builder.WriteString(", ")
|
builder.WriteString(", ")
|
||||||
builder.WriteString("name=")
|
builder.WriteString("name=")
|
||||||
builder.WriteString(b.Name)
|
builder.WriteString(b.Name)
|
||||||
|
@ -233,15 +215,6 @@ func (b *Bouncer) String() string {
|
||||||
builder.WriteString(", ")
|
builder.WriteString(", ")
|
||||||
builder.WriteString("auth_type=")
|
builder.WriteString("auth_type=")
|
||||||
builder.WriteString(b.AuthType)
|
builder.WriteString(b.AuthType)
|
||||||
builder.WriteString(", ")
|
|
||||||
builder.WriteString("osname=")
|
|
||||||
builder.WriteString(b.Osname)
|
|
||||||
builder.WriteString(", ")
|
|
||||||
builder.WriteString("osversion=")
|
|
||||||
builder.WriteString(b.Osversion)
|
|
||||||
builder.WriteString(", ")
|
|
||||||
builder.WriteString("featureflags=")
|
|
||||||
builder.WriteString(b.Featureflags)
|
|
||||||
builder.WriteByte(')')
|
builder.WriteByte(')')
|
||||||
return builder.String()
|
return builder.String()
|
||||||
}
|
}
|
||||||
|
|
|
@ -35,12 +35,6 @@ const (
|
||||||
FieldLastPull = "last_pull"
|
FieldLastPull = "last_pull"
|
||||||
// FieldAuthType holds the string denoting the auth_type field in the database.
|
// FieldAuthType holds the string denoting the auth_type field in the database.
|
||||||
FieldAuthType = "auth_type"
|
FieldAuthType = "auth_type"
|
||||||
// FieldOsname holds the string denoting the osname field in the database.
|
|
||||||
FieldOsname = "osname"
|
|
||||||
// FieldOsversion holds the string denoting the osversion field in the database.
|
|
||||||
FieldOsversion = "osversion"
|
|
||||||
// FieldFeatureflags holds the string denoting the featureflags field in the database.
|
|
||||||
FieldFeatureflags = "featureflags"
|
|
||||||
// Table holds the table name of the bouncer in the database.
|
// Table holds the table name of the bouncer in the database.
|
||||||
Table = "bouncers"
|
Table = "bouncers"
|
||||||
)
|
)
|
||||||
|
@ -59,9 +53,6 @@ var Columns = []string{
|
||||||
FieldUntil,
|
FieldUntil,
|
||||||
FieldLastPull,
|
FieldLastPull,
|
||||||
FieldAuthType,
|
FieldAuthType,
|
||||||
FieldOsname,
|
|
||||||
FieldOsversion,
|
|
||||||
FieldFeatureflags,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// ValidColumn reports if the column name is valid (part of the table columns).
|
// ValidColumn reports if the column name is valid (part of the table columns).
|
||||||
|
@ -77,6 +68,8 @@ func ValidColumn(column string) bool {
|
||||||
var (
|
var (
|
||||||
// DefaultCreatedAt holds the default value on creation for the "created_at" field.
|
// DefaultCreatedAt holds the default value on creation for the "created_at" field.
|
||||||
DefaultCreatedAt func() time.Time
|
DefaultCreatedAt func() time.Time
|
||||||
|
// UpdateDefaultCreatedAt holds the default value on update for the "created_at" field.
|
||||||
|
UpdateDefaultCreatedAt func() time.Time
|
||||||
// DefaultUpdatedAt holds the default value on creation for the "updated_at" field.
|
// DefaultUpdatedAt holds the default value on creation for the "updated_at" field.
|
||||||
DefaultUpdatedAt func() time.Time
|
DefaultUpdatedAt func() time.Time
|
||||||
// UpdateDefaultUpdatedAt holds the default value on update for the "updated_at" field.
|
// UpdateDefaultUpdatedAt holds the default value on update for the "updated_at" field.
|
||||||
|
@ -153,18 +146,3 @@ func ByLastPull(opts ...sql.OrderTermOption) OrderOption {
|
||||||
func ByAuthType(opts ...sql.OrderTermOption) OrderOption {
|
func ByAuthType(opts ...sql.OrderTermOption) OrderOption {
|
||||||
return sql.OrderByField(FieldAuthType, opts...).ToFunc()
|
return sql.OrderByField(FieldAuthType, opts...).ToFunc()
|
||||||
}
|
}
|
||||||
|
|
||||||
// ByOsname orders the results by the osname field.
|
|
||||||
func ByOsname(opts ...sql.OrderTermOption) OrderOption {
|
|
||||||
return sql.OrderByField(FieldOsname, opts...).ToFunc()
|
|
||||||
}
|
|
||||||
|
|
||||||
// ByOsversion orders the results by the osversion field.
|
|
||||||
func ByOsversion(opts ...sql.OrderTermOption) OrderOption {
|
|
||||||
return sql.OrderByField(FieldOsversion, opts...).ToFunc()
|
|
||||||
}
|
|
||||||
|
|
||||||
// ByFeatureflags orders the results by the featureflags field.
|
|
||||||
func ByFeatureflags(opts ...sql.OrderTermOption) OrderOption {
|
|
||||||
return sql.OrderByField(FieldFeatureflags, opts...).ToFunc()
|
|
||||||
}
|
|
||||||
|
|
|
@ -109,21 +109,6 @@ func AuthType(v string) predicate.Bouncer {
|
||||||
return predicate.Bouncer(sql.FieldEQ(FieldAuthType, v))
|
return predicate.Bouncer(sql.FieldEQ(FieldAuthType, v))
|
||||||
}
|
}
|
||||||
|
|
||||||
// Osname applies equality check predicate on the "osname" field. It's identical to OsnameEQ.
|
|
||||||
func Osname(v string) predicate.Bouncer {
|
|
||||||
return predicate.Bouncer(sql.FieldEQ(FieldOsname, v))
|
|
||||||
}
|
|
||||||
|
|
||||||
// Osversion applies equality check predicate on the "osversion" field. It's identical to OsversionEQ.
|
|
||||||
func Osversion(v string) predicate.Bouncer {
|
|
||||||
return predicate.Bouncer(sql.FieldEQ(FieldOsversion, v))
|
|
||||||
}
|
|
||||||
|
|
||||||
// Featureflags applies equality check predicate on the "featureflags" field. It's identical to FeatureflagsEQ.
|
|
||||||
func Featureflags(v string) predicate.Bouncer {
|
|
||||||
return predicate.Bouncer(sql.FieldEQ(FieldFeatureflags, v))
|
|
||||||
}
|
|
||||||
|
|
||||||
// CreatedAtEQ applies the EQ predicate on the "created_at" field.
|
// CreatedAtEQ applies the EQ predicate on the "created_at" field.
|
||||||
func CreatedAtEQ(v time.Time) predicate.Bouncer {
|
func CreatedAtEQ(v time.Time) predicate.Bouncer {
|
||||||
return predicate.Bouncer(sql.FieldEQ(FieldCreatedAt, v))
|
return predicate.Bouncer(sql.FieldEQ(FieldCreatedAt, v))
|
||||||
|
@ -164,6 +149,16 @@ func CreatedAtLTE(v time.Time) predicate.Bouncer {
|
||||||
return predicate.Bouncer(sql.FieldLTE(FieldCreatedAt, v))
|
return predicate.Bouncer(sql.FieldLTE(FieldCreatedAt, v))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// CreatedAtIsNil applies the IsNil predicate on the "created_at" field.
|
||||||
|
func CreatedAtIsNil() predicate.Bouncer {
|
||||||
|
return predicate.Bouncer(sql.FieldIsNull(FieldCreatedAt))
|
||||||
|
}
|
||||||
|
|
||||||
|
// CreatedAtNotNil applies the NotNil predicate on the "created_at" field.
|
||||||
|
func CreatedAtNotNil() predicate.Bouncer {
|
||||||
|
return predicate.Bouncer(sql.FieldNotNull(FieldCreatedAt))
|
||||||
|
}
|
||||||
|
|
||||||
// UpdatedAtEQ applies the EQ predicate on the "updated_at" field.
|
// UpdatedAtEQ applies the EQ predicate on the "updated_at" field.
|
||||||
func UpdatedAtEQ(v time.Time) predicate.Bouncer {
|
func UpdatedAtEQ(v time.Time) predicate.Bouncer {
|
||||||
return predicate.Bouncer(sql.FieldEQ(FieldUpdatedAt, v))
|
return predicate.Bouncer(sql.FieldEQ(FieldUpdatedAt, v))
|
||||||
|
@ -204,6 +199,16 @@ func UpdatedAtLTE(v time.Time) predicate.Bouncer {
|
||||||
return predicate.Bouncer(sql.FieldLTE(FieldUpdatedAt, v))
|
return predicate.Bouncer(sql.FieldLTE(FieldUpdatedAt, v))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// UpdatedAtIsNil applies the IsNil predicate on the "updated_at" field.
|
||||||
|
func UpdatedAtIsNil() predicate.Bouncer {
|
||||||
|
return predicate.Bouncer(sql.FieldIsNull(FieldUpdatedAt))
|
||||||
|
}
|
||||||
|
|
||||||
|
// UpdatedAtNotNil applies the NotNil predicate on the "updated_at" field.
|
||||||
|
func UpdatedAtNotNil() predicate.Bouncer {
|
||||||
|
return predicate.Bouncer(sql.FieldNotNull(FieldUpdatedAt))
|
||||||
|
}
|
||||||
|
|
||||||
// NameEQ applies the EQ predicate on the "name" field.
|
// NameEQ applies the EQ predicate on the "name" field.
|
||||||
func NameEQ(v string) predicate.Bouncer {
|
func NameEQ(v string) predicate.Bouncer {
|
||||||
return predicate.Bouncer(sql.FieldEQ(FieldName, v))
|
return predicate.Bouncer(sql.FieldEQ(FieldName, v))
|
||||||
|
@ -724,231 +729,6 @@ func AuthTypeContainsFold(v string) predicate.Bouncer {
|
||||||
return predicate.Bouncer(sql.FieldContainsFold(FieldAuthType, v))
|
return predicate.Bouncer(sql.FieldContainsFold(FieldAuthType, v))
|
||||||
}
|
}
|
||||||
|
|
||||||
// OsnameEQ applies the EQ predicate on the "osname" field.
|
|
||||||
func OsnameEQ(v string) predicate.Bouncer {
|
|
||||||
return predicate.Bouncer(sql.FieldEQ(FieldOsname, v))
|
|
||||||
}
|
|
||||||
|
|
||||||
// OsnameNEQ applies the NEQ predicate on the "osname" field.
|
|
||||||
func OsnameNEQ(v string) predicate.Bouncer {
|
|
||||||
return predicate.Bouncer(sql.FieldNEQ(FieldOsname, v))
|
|
||||||
}
|
|
||||||
|
|
||||||
// OsnameIn applies the In predicate on the "osname" field.
|
|
||||||
func OsnameIn(vs ...string) predicate.Bouncer {
|
|
||||||
return predicate.Bouncer(sql.FieldIn(FieldOsname, vs...))
|
|
||||||
}
|
|
||||||
|
|
||||||
// OsnameNotIn applies the NotIn predicate on the "osname" field.
|
|
||||||
func OsnameNotIn(vs ...string) predicate.Bouncer {
|
|
||||||
return predicate.Bouncer(sql.FieldNotIn(FieldOsname, vs...))
|
|
||||||
}
|
|
||||||
|
|
||||||
// OsnameGT applies the GT predicate on the "osname" field.
|
|
||||||
func OsnameGT(v string) predicate.Bouncer {
|
|
||||||
return predicate.Bouncer(sql.FieldGT(FieldOsname, v))
|
|
||||||
}
|
|
||||||
|
|
||||||
// OsnameGTE applies the GTE predicate on the "osname" field.
|
|
||||||
func OsnameGTE(v string) predicate.Bouncer {
|
|
||||||
return predicate.Bouncer(sql.FieldGTE(FieldOsname, v))
|
|
||||||
}
|
|
||||||
|
|
||||||
// OsnameLT applies the LT predicate on the "osname" field.
|
|
||||||
func OsnameLT(v string) predicate.Bouncer {
|
|
||||||
return predicate.Bouncer(sql.FieldLT(FieldOsname, v))
|
|
||||||
}
|
|
||||||
|
|
||||||
// OsnameLTE applies the LTE predicate on the "osname" field.
|
|
||||||
func OsnameLTE(v string) predicate.Bouncer {
|
|
||||||
return predicate.Bouncer(sql.FieldLTE(FieldOsname, v))
|
|
||||||
}
|
|
||||||
|
|
||||||
// OsnameContains applies the Contains predicate on the "osname" field.
|
|
||||||
func OsnameContains(v string) predicate.Bouncer {
|
|
||||||
return predicate.Bouncer(sql.FieldContains(FieldOsname, v))
|
|
||||||
}
|
|
||||||
|
|
||||||
// OsnameHasPrefix applies the HasPrefix predicate on the "osname" field.
|
|
||||||
func OsnameHasPrefix(v string) predicate.Bouncer {
|
|
||||||
return predicate.Bouncer(sql.FieldHasPrefix(FieldOsname, v))
|
|
||||||
}
|
|
||||||
|
|
||||||
// OsnameHasSuffix applies the HasSuffix predicate on the "osname" field.
|
|
||||||
func OsnameHasSuffix(v string) predicate.Bouncer {
|
|
||||||
return predicate.Bouncer(sql.FieldHasSuffix(FieldOsname, v))
|
|
||||||
}
|
|
||||||
|
|
||||||
// OsnameIsNil applies the IsNil predicate on the "osname" field.
|
|
||||||
func OsnameIsNil() predicate.Bouncer {
|
|
||||||
return predicate.Bouncer(sql.FieldIsNull(FieldOsname))
|
|
||||||
}
|
|
||||||
|
|
||||||
// OsnameNotNil applies the NotNil predicate on the "osname" field.
|
|
||||||
func OsnameNotNil() predicate.Bouncer {
|
|
||||||
return predicate.Bouncer(sql.FieldNotNull(FieldOsname))
|
|
||||||
}
|
|
||||||
|
|
||||||
// OsnameEqualFold applies the EqualFold predicate on the "osname" field.
|
|
||||||
func OsnameEqualFold(v string) predicate.Bouncer {
|
|
||||||
return predicate.Bouncer(sql.FieldEqualFold(FieldOsname, v))
|
|
||||||
}
|
|
||||||
|
|
||||||
// OsnameContainsFold applies the ContainsFold predicate on the "osname" field.
|
|
||||||
func OsnameContainsFold(v string) predicate.Bouncer {
|
|
||||||
return predicate.Bouncer(sql.FieldContainsFold(FieldOsname, v))
|
|
||||||
}
|
|
||||||
|
|
||||||
// OsversionEQ applies the EQ predicate on the "osversion" field.
|
|
||||||
func OsversionEQ(v string) predicate.Bouncer {
|
|
||||||
return predicate.Bouncer(sql.FieldEQ(FieldOsversion, v))
|
|
||||||
}
|
|
||||||
|
|
||||||
// OsversionNEQ applies the NEQ predicate on the "osversion" field.
|
|
||||||
func OsversionNEQ(v string) predicate.Bouncer {
|
|
||||||
return predicate.Bouncer(sql.FieldNEQ(FieldOsversion, v))
|
|
||||||
}
|
|
||||||
|
|
||||||
// OsversionIn applies the In predicate on the "osversion" field.
|
|
||||||
func OsversionIn(vs ...string) predicate.Bouncer {
|
|
||||||
return predicate.Bouncer(sql.FieldIn(FieldOsversion, vs...))
|
|
||||||
}
|
|
||||||
|
|
||||||
// OsversionNotIn applies the NotIn predicate on the "osversion" field.
|
|
||||||
func OsversionNotIn(vs ...string) predicate.Bouncer {
|
|
||||||
return predicate.Bouncer(sql.FieldNotIn(FieldOsversion, vs...))
|
|
||||||
}
|
|
||||||
|
|
||||||
// OsversionGT applies the GT predicate on the "osversion" field.
|
|
||||||
func OsversionGT(v string) predicate.Bouncer {
|
|
||||||
return predicate.Bouncer(sql.FieldGT(FieldOsversion, v))
|
|
||||||
}
|
|
||||||
|
|
||||||
// OsversionGTE applies the GTE predicate on the "osversion" field.
|
|
||||||
func OsversionGTE(v string) predicate.Bouncer {
|
|
||||||
return predicate.Bouncer(sql.FieldGTE(FieldOsversion, v))
|
|
||||||
}
|
|
||||||
|
|
||||||
// OsversionLT applies the LT predicate on the "osversion" field.
|
|
||||||
func OsversionLT(v string) predicate.Bouncer {
|
|
||||||
return predicate.Bouncer(sql.FieldLT(FieldOsversion, v))
|
|
||||||
}
|
|
||||||
|
|
||||||
// OsversionLTE applies the LTE predicate on the "osversion" field.
|
|
||||||
func OsversionLTE(v string) predicate.Bouncer {
|
|
||||||
return predicate.Bouncer(sql.FieldLTE(FieldOsversion, v))
|
|
||||||
}
|
|
||||||
|
|
||||||
// OsversionContains applies the Contains predicate on the "osversion" field.
|
|
||||||
func OsversionContains(v string) predicate.Bouncer {
|
|
||||||
return predicate.Bouncer(sql.FieldContains(FieldOsversion, v))
|
|
||||||
}
|
|
||||||
|
|
||||||
// OsversionHasPrefix applies the HasPrefix predicate on the "osversion" field.
|
|
||||||
func OsversionHasPrefix(v string) predicate.Bouncer {
|
|
||||||
return predicate.Bouncer(sql.FieldHasPrefix(FieldOsversion, v))
|
|
||||||
}
|
|
||||||
|
|
||||||
// OsversionHasSuffix applies the HasSuffix predicate on the "osversion" field.
|
|
||||||
func OsversionHasSuffix(v string) predicate.Bouncer {
|
|
||||||
return predicate.Bouncer(sql.FieldHasSuffix(FieldOsversion, v))
|
|
||||||
}
|
|
||||||
|
|
||||||
// OsversionIsNil applies the IsNil predicate on the "osversion" field.
|
|
||||||
func OsversionIsNil() predicate.Bouncer {
|
|
||||||
return predicate.Bouncer(sql.FieldIsNull(FieldOsversion))
|
|
||||||
}
|
|
||||||
|
|
||||||
// OsversionNotNil applies the NotNil predicate on the "osversion" field.
|
|
||||||
func OsversionNotNil() predicate.Bouncer {
|
|
||||||
return predicate.Bouncer(sql.FieldNotNull(FieldOsversion))
|
|
||||||
}
|
|
||||||
|
|
||||||
// OsversionEqualFold applies the EqualFold predicate on the "osversion" field.
|
|
||||||
func OsversionEqualFold(v string) predicate.Bouncer {
|
|
||||||
return predicate.Bouncer(sql.FieldEqualFold(FieldOsversion, v))
|
|
||||||
}
|
|
||||||
|
|
||||||
// OsversionContainsFold applies the ContainsFold predicate on the "osversion" field.
|
|
||||||
func OsversionContainsFold(v string) predicate.Bouncer {
|
|
||||||
return predicate.Bouncer(sql.FieldContainsFold(FieldOsversion, v))
|
|
||||||
}
|
|
||||||
|
|
||||||
// FeatureflagsEQ applies the EQ predicate on the "featureflags" field.
|
|
||||||
func FeatureflagsEQ(v string) predicate.Bouncer {
|
|
||||||
return predicate.Bouncer(sql.FieldEQ(FieldFeatureflags, v))
|
|
||||||
}
|
|
||||||
|
|
||||||
// FeatureflagsNEQ applies the NEQ predicate on the "featureflags" field.
|
|
||||||
func FeatureflagsNEQ(v string) predicate.Bouncer {
|
|
||||||
return predicate.Bouncer(sql.FieldNEQ(FieldFeatureflags, v))
|
|
||||||
}
|
|
||||||
|
|
||||||
// FeatureflagsIn applies the In predicate on the "featureflags" field.
|
|
||||||
func FeatureflagsIn(vs ...string) predicate.Bouncer {
|
|
||||||
return predicate.Bouncer(sql.FieldIn(FieldFeatureflags, vs...))
|
|
||||||
}
|
|
||||||
|
|
||||||
// FeatureflagsNotIn applies the NotIn predicate on the "featureflags" field.
|
|
||||||
func FeatureflagsNotIn(vs ...string) predicate.Bouncer {
|
|
||||||
return predicate.Bouncer(sql.FieldNotIn(FieldFeatureflags, vs...))
|
|
||||||
}
|
|
||||||
|
|
||||||
// FeatureflagsGT applies the GT predicate on the "featureflags" field.
|
|
||||||
func FeatureflagsGT(v string) predicate.Bouncer {
|
|
||||||
return predicate.Bouncer(sql.FieldGT(FieldFeatureflags, v))
|
|
||||||
}
|
|
||||||
|
|
||||||
// FeatureflagsGTE applies the GTE predicate on the "featureflags" field.
|
|
||||||
func FeatureflagsGTE(v string) predicate.Bouncer {
|
|
||||||
return predicate.Bouncer(sql.FieldGTE(FieldFeatureflags, v))
|
|
||||||
}
|
|
||||||
|
|
||||||
// FeatureflagsLT applies the LT predicate on the "featureflags" field.
|
|
||||||
func FeatureflagsLT(v string) predicate.Bouncer {
|
|
||||||
return predicate.Bouncer(sql.FieldLT(FieldFeatureflags, v))
|
|
||||||
}
|
|
||||||
|
|
||||||
// FeatureflagsLTE applies the LTE predicate on the "featureflags" field.
|
|
||||||
func FeatureflagsLTE(v string) predicate.Bouncer {
|
|
||||||
return predicate.Bouncer(sql.FieldLTE(FieldFeatureflags, v))
|
|
||||||
}
|
|
||||||
|
|
||||||
// FeatureflagsContains applies the Contains predicate on the "featureflags" field.
|
|
||||||
func FeatureflagsContains(v string) predicate.Bouncer {
|
|
||||||
return predicate.Bouncer(sql.FieldContains(FieldFeatureflags, v))
|
|
||||||
}
|
|
||||||
|
|
||||||
// FeatureflagsHasPrefix applies the HasPrefix predicate on the "featureflags" field.
|
|
||||||
func FeatureflagsHasPrefix(v string) predicate.Bouncer {
|
|
||||||
return predicate.Bouncer(sql.FieldHasPrefix(FieldFeatureflags, v))
|
|
||||||
}
|
|
||||||
|
|
||||||
// FeatureflagsHasSuffix applies the HasSuffix predicate on the "featureflags" field.
|
|
||||||
func FeatureflagsHasSuffix(v string) predicate.Bouncer {
|
|
||||||
return predicate.Bouncer(sql.FieldHasSuffix(FieldFeatureflags, v))
|
|
||||||
}
|
|
||||||
|
|
||||||
// FeatureflagsIsNil applies the IsNil predicate on the "featureflags" field.
|
|
||||||
func FeatureflagsIsNil() predicate.Bouncer {
|
|
||||||
return predicate.Bouncer(sql.FieldIsNull(FieldFeatureflags))
|
|
||||||
}
|
|
||||||
|
|
||||||
// FeatureflagsNotNil applies the NotNil predicate on the "featureflags" field.
|
|
||||||
func FeatureflagsNotNil() predicate.Bouncer {
|
|
||||||
return predicate.Bouncer(sql.FieldNotNull(FieldFeatureflags))
|
|
||||||
}
|
|
||||||
|
|
||||||
// FeatureflagsEqualFold applies the EqualFold predicate on the "featureflags" field.
|
|
||||||
func FeatureflagsEqualFold(v string) predicate.Bouncer {
|
|
||||||
return predicate.Bouncer(sql.FieldEqualFold(FieldFeatureflags, v))
|
|
||||||
}
|
|
||||||
|
|
||||||
// FeatureflagsContainsFold applies the ContainsFold predicate on the "featureflags" field.
|
|
||||||
func FeatureflagsContainsFold(v string) predicate.Bouncer {
|
|
||||||
return predicate.Bouncer(sql.FieldContainsFold(FieldFeatureflags, v))
|
|
||||||
}
|
|
||||||
|
|
||||||
// And groups predicates with the AND operator between them.
|
// And groups predicates with the AND operator between them.
|
||||||
func And(predicates ...predicate.Bouncer) predicate.Bouncer {
|
func And(predicates ...predicate.Bouncer) predicate.Bouncer {
|
||||||
return predicate.Bouncer(sql.AndPredicates(predicates...))
|
return predicate.Bouncer(sql.AndPredicates(predicates...))
|
||||||
|
|
|
@ -150,48 +150,6 @@ func (bc *BouncerCreate) SetNillableAuthType(s *string) *BouncerCreate {
|
||||||
return bc
|
return bc
|
||||||
}
|
}
|
||||||
|
|
||||||
// SetOsname sets the "osname" field.
|
|
||||||
func (bc *BouncerCreate) SetOsname(s string) *BouncerCreate {
|
|
||||||
bc.mutation.SetOsname(s)
|
|
||||||
return bc
|
|
||||||
}
|
|
||||||
|
|
||||||
// SetNillableOsname sets the "osname" field if the given value is not nil.
|
|
||||||
func (bc *BouncerCreate) SetNillableOsname(s *string) *BouncerCreate {
|
|
||||||
if s != nil {
|
|
||||||
bc.SetOsname(*s)
|
|
||||||
}
|
|
||||||
return bc
|
|
||||||
}
|
|
||||||
|
|
||||||
// SetOsversion sets the "osversion" field.
|
|
||||||
func (bc *BouncerCreate) SetOsversion(s string) *BouncerCreate {
|
|
||||||
bc.mutation.SetOsversion(s)
|
|
||||||
return bc
|
|
||||||
}
|
|
||||||
|
|
||||||
// SetNillableOsversion sets the "osversion" field if the given value is not nil.
|
|
||||||
func (bc *BouncerCreate) SetNillableOsversion(s *string) *BouncerCreate {
|
|
||||||
if s != nil {
|
|
||||||
bc.SetOsversion(*s)
|
|
||||||
}
|
|
||||||
return bc
|
|
||||||
}
|
|
||||||
|
|
||||||
// SetFeatureflags sets the "featureflags" field.
|
|
||||||
func (bc *BouncerCreate) SetFeatureflags(s string) *BouncerCreate {
|
|
||||||
bc.mutation.SetFeatureflags(s)
|
|
||||||
return bc
|
|
||||||
}
|
|
||||||
|
|
||||||
// SetNillableFeatureflags sets the "featureflags" field if the given value is not nil.
|
|
||||||
func (bc *BouncerCreate) SetNillableFeatureflags(s *string) *BouncerCreate {
|
|
||||||
if s != nil {
|
|
||||||
bc.SetFeatureflags(*s)
|
|
||||||
}
|
|
||||||
return bc
|
|
||||||
}
|
|
||||||
|
|
||||||
// Mutation returns the BouncerMutation object of the builder.
|
// Mutation returns the BouncerMutation object of the builder.
|
||||||
func (bc *BouncerCreate) Mutation() *BouncerMutation {
|
func (bc *BouncerCreate) Mutation() *BouncerMutation {
|
||||||
return bc.mutation
|
return bc.mutation
|
||||||
|
@ -255,12 +213,6 @@ func (bc *BouncerCreate) defaults() {
|
||||||
|
|
||||||
// check runs all checks and user-defined validators on the builder.
|
// check runs all checks and user-defined validators on the builder.
|
||||||
func (bc *BouncerCreate) check() error {
|
func (bc *BouncerCreate) check() error {
|
||||||
if _, ok := bc.mutation.CreatedAt(); !ok {
|
|
||||||
return &ValidationError{Name: "created_at", err: errors.New(`ent: missing required field "Bouncer.created_at"`)}
|
|
||||||
}
|
|
||||||
if _, ok := bc.mutation.UpdatedAt(); !ok {
|
|
||||||
return &ValidationError{Name: "updated_at", err: errors.New(`ent: missing required field "Bouncer.updated_at"`)}
|
|
||||||
}
|
|
||||||
if _, ok := bc.mutation.Name(); !ok {
|
if _, ok := bc.mutation.Name(); !ok {
|
||||||
return &ValidationError{Name: "name", err: errors.New(`ent: missing required field "Bouncer.name"`)}
|
return &ValidationError{Name: "name", err: errors.New(`ent: missing required field "Bouncer.name"`)}
|
||||||
}
|
}
|
||||||
|
@ -304,11 +256,11 @@ func (bc *BouncerCreate) createSpec() (*Bouncer, *sqlgraph.CreateSpec) {
|
||||||
)
|
)
|
||||||
if value, ok := bc.mutation.CreatedAt(); ok {
|
if value, ok := bc.mutation.CreatedAt(); ok {
|
||||||
_spec.SetField(bouncer.FieldCreatedAt, field.TypeTime, value)
|
_spec.SetField(bouncer.FieldCreatedAt, field.TypeTime, value)
|
||||||
_node.CreatedAt = value
|
_node.CreatedAt = &value
|
||||||
}
|
}
|
||||||
if value, ok := bc.mutation.UpdatedAt(); ok {
|
if value, ok := bc.mutation.UpdatedAt(); ok {
|
||||||
_spec.SetField(bouncer.FieldUpdatedAt, field.TypeTime, value)
|
_spec.SetField(bouncer.FieldUpdatedAt, field.TypeTime, value)
|
||||||
_node.UpdatedAt = value
|
_node.UpdatedAt = &value
|
||||||
}
|
}
|
||||||
if value, ok := bc.mutation.Name(); ok {
|
if value, ok := bc.mutation.Name(); ok {
|
||||||
_spec.SetField(bouncer.FieldName, field.TypeString, value)
|
_spec.SetField(bouncer.FieldName, field.TypeString, value)
|
||||||
|
@ -346,18 +298,6 @@ func (bc *BouncerCreate) createSpec() (*Bouncer, *sqlgraph.CreateSpec) {
|
||||||
_spec.SetField(bouncer.FieldAuthType, field.TypeString, value)
|
_spec.SetField(bouncer.FieldAuthType, field.TypeString, value)
|
||||||
_node.AuthType = value
|
_node.AuthType = value
|
||||||
}
|
}
|
||||||
if value, ok := bc.mutation.Osname(); ok {
|
|
||||||
_spec.SetField(bouncer.FieldOsname, field.TypeString, value)
|
|
||||||
_node.Osname = value
|
|
||||||
}
|
|
||||||
if value, ok := bc.mutation.Osversion(); ok {
|
|
||||||
_spec.SetField(bouncer.FieldOsversion, field.TypeString, value)
|
|
||||||
_node.Osversion = value
|
|
||||||
}
|
|
||||||
if value, ok := bc.mutation.Featureflags(); ok {
|
|
||||||
_spec.SetField(bouncer.FieldFeatureflags, field.TypeString, value)
|
|
||||||
_node.Featureflags = value
|
|
||||||
}
|
|
||||||
return _node, _spec
|
return _node, _spec
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -34,11 +34,9 @@ func (bu *BouncerUpdate) SetCreatedAt(t time.Time) *BouncerUpdate {
|
||||||
return bu
|
return bu
|
||||||
}
|
}
|
||||||
|
|
||||||
// SetNillableCreatedAt sets the "created_at" field if the given value is not nil.
|
// ClearCreatedAt clears the value of the "created_at" field.
|
||||||
func (bu *BouncerUpdate) SetNillableCreatedAt(t *time.Time) *BouncerUpdate {
|
func (bu *BouncerUpdate) ClearCreatedAt() *BouncerUpdate {
|
||||||
if t != nil {
|
bu.mutation.ClearCreatedAt()
|
||||||
bu.SetCreatedAt(*t)
|
|
||||||
}
|
|
||||||
return bu
|
return bu
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -48,6 +46,12 @@ func (bu *BouncerUpdate) SetUpdatedAt(t time.Time) *BouncerUpdate {
|
||||||
return bu
|
return bu
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// ClearUpdatedAt clears the value of the "updated_at" field.
|
||||||
|
func (bu *BouncerUpdate) ClearUpdatedAt() *BouncerUpdate {
|
||||||
|
bu.mutation.ClearUpdatedAt()
|
||||||
|
return bu
|
||||||
|
}
|
||||||
|
|
||||||
// SetName sets the "name" field.
|
// SetName sets the "name" field.
|
||||||
func (bu *BouncerUpdate) SetName(s string) *BouncerUpdate {
|
func (bu *BouncerUpdate) SetName(s string) *BouncerUpdate {
|
||||||
bu.mutation.SetName(s)
|
bu.mutation.SetName(s)
|
||||||
|
@ -198,66 +202,6 @@ func (bu *BouncerUpdate) SetNillableAuthType(s *string) *BouncerUpdate {
|
||||||
return bu
|
return bu
|
||||||
}
|
}
|
||||||
|
|
||||||
// SetOsname sets the "osname" field.
|
|
||||||
func (bu *BouncerUpdate) SetOsname(s string) *BouncerUpdate {
|
|
||||||
bu.mutation.SetOsname(s)
|
|
||||||
return bu
|
|
||||||
}
|
|
||||||
|
|
||||||
// SetNillableOsname sets the "osname" field if the given value is not nil.
|
|
||||||
func (bu *BouncerUpdate) SetNillableOsname(s *string) *BouncerUpdate {
|
|
||||||
if s != nil {
|
|
||||||
bu.SetOsname(*s)
|
|
||||||
}
|
|
||||||
return bu
|
|
||||||
}
|
|
||||||
|
|
||||||
// ClearOsname clears the value of the "osname" field.
|
|
||||||
func (bu *BouncerUpdate) ClearOsname() *BouncerUpdate {
|
|
||||||
bu.mutation.ClearOsname()
|
|
||||||
return bu
|
|
||||||
}
|
|
||||||
|
|
||||||
// SetOsversion sets the "osversion" field.
|
|
||||||
func (bu *BouncerUpdate) SetOsversion(s string) *BouncerUpdate {
|
|
||||||
bu.mutation.SetOsversion(s)
|
|
||||||
return bu
|
|
||||||
}
|
|
||||||
|
|
||||||
// SetNillableOsversion sets the "osversion" field if the given value is not nil.
|
|
||||||
func (bu *BouncerUpdate) SetNillableOsversion(s *string) *BouncerUpdate {
|
|
||||||
if s != nil {
|
|
||||||
bu.SetOsversion(*s)
|
|
||||||
}
|
|
||||||
return bu
|
|
||||||
}
|
|
||||||
|
|
||||||
// ClearOsversion clears the value of the "osversion" field.
|
|
||||||
func (bu *BouncerUpdate) ClearOsversion() *BouncerUpdate {
|
|
||||||
bu.mutation.ClearOsversion()
|
|
||||||
return bu
|
|
||||||
}
|
|
||||||
|
|
||||||
// SetFeatureflags sets the "featureflags" field.
|
|
||||||
func (bu *BouncerUpdate) SetFeatureflags(s string) *BouncerUpdate {
|
|
||||||
bu.mutation.SetFeatureflags(s)
|
|
||||||
return bu
|
|
||||||
}
|
|
||||||
|
|
||||||
// SetNillableFeatureflags sets the "featureflags" field if the given value is not nil.
|
|
||||||
func (bu *BouncerUpdate) SetNillableFeatureflags(s *string) *BouncerUpdate {
|
|
||||||
if s != nil {
|
|
||||||
bu.SetFeatureflags(*s)
|
|
||||||
}
|
|
||||||
return bu
|
|
||||||
}
|
|
||||||
|
|
||||||
// ClearFeatureflags clears the value of the "featureflags" field.
|
|
||||||
func (bu *BouncerUpdate) ClearFeatureflags() *BouncerUpdate {
|
|
||||||
bu.mutation.ClearFeatureflags()
|
|
||||||
return bu
|
|
||||||
}
|
|
||||||
|
|
||||||
// Mutation returns the BouncerMutation object of the builder.
|
// Mutation returns the BouncerMutation object of the builder.
|
||||||
func (bu *BouncerUpdate) Mutation() *BouncerMutation {
|
func (bu *BouncerUpdate) Mutation() *BouncerMutation {
|
||||||
return bu.mutation
|
return bu.mutation
|
||||||
|
@ -293,7 +237,11 @@ func (bu *BouncerUpdate) ExecX(ctx context.Context) {
|
||||||
|
|
||||||
// defaults sets the default values of the builder before save.
|
// defaults sets the default values of the builder before save.
|
||||||
func (bu *BouncerUpdate) defaults() {
|
func (bu *BouncerUpdate) defaults() {
|
||||||
if _, ok := bu.mutation.UpdatedAt(); !ok {
|
if _, ok := bu.mutation.CreatedAt(); !ok && !bu.mutation.CreatedAtCleared() {
|
||||||
|
v := bouncer.UpdateDefaultCreatedAt()
|
||||||
|
bu.mutation.SetCreatedAt(v)
|
||||||
|
}
|
||||||
|
if _, ok := bu.mutation.UpdatedAt(); !ok && !bu.mutation.UpdatedAtCleared() {
|
||||||
v := bouncer.UpdateDefaultUpdatedAt()
|
v := bouncer.UpdateDefaultUpdatedAt()
|
||||||
bu.mutation.SetUpdatedAt(v)
|
bu.mutation.SetUpdatedAt(v)
|
||||||
}
|
}
|
||||||
|
@ -311,9 +259,15 @@ func (bu *BouncerUpdate) sqlSave(ctx context.Context) (n int, err error) {
|
||||||
if value, ok := bu.mutation.CreatedAt(); ok {
|
if value, ok := bu.mutation.CreatedAt(); ok {
|
||||||
_spec.SetField(bouncer.FieldCreatedAt, field.TypeTime, value)
|
_spec.SetField(bouncer.FieldCreatedAt, field.TypeTime, value)
|
||||||
}
|
}
|
||||||
|
if bu.mutation.CreatedAtCleared() {
|
||||||
|
_spec.ClearField(bouncer.FieldCreatedAt, field.TypeTime)
|
||||||
|
}
|
||||||
if value, ok := bu.mutation.UpdatedAt(); ok {
|
if value, ok := bu.mutation.UpdatedAt(); ok {
|
||||||
_spec.SetField(bouncer.FieldUpdatedAt, field.TypeTime, value)
|
_spec.SetField(bouncer.FieldUpdatedAt, field.TypeTime, value)
|
||||||
}
|
}
|
||||||
|
if bu.mutation.UpdatedAtCleared() {
|
||||||
|
_spec.ClearField(bouncer.FieldUpdatedAt, field.TypeTime)
|
||||||
|
}
|
||||||
if value, ok := bu.mutation.Name(); ok {
|
if value, ok := bu.mutation.Name(); ok {
|
||||||
_spec.SetField(bouncer.FieldName, field.TypeString, value)
|
_spec.SetField(bouncer.FieldName, field.TypeString, value)
|
||||||
}
|
}
|
||||||
|
@ -353,24 +307,6 @@ func (bu *BouncerUpdate) sqlSave(ctx context.Context) (n int, err error) {
|
||||||
if value, ok := bu.mutation.AuthType(); ok {
|
if value, ok := bu.mutation.AuthType(); ok {
|
||||||
_spec.SetField(bouncer.FieldAuthType, field.TypeString, value)
|
_spec.SetField(bouncer.FieldAuthType, field.TypeString, value)
|
||||||
}
|
}
|
||||||
if value, ok := bu.mutation.Osname(); ok {
|
|
||||||
_spec.SetField(bouncer.FieldOsname, field.TypeString, value)
|
|
||||||
}
|
|
||||||
if bu.mutation.OsnameCleared() {
|
|
||||||
_spec.ClearField(bouncer.FieldOsname, field.TypeString)
|
|
||||||
}
|
|
||||||
if value, ok := bu.mutation.Osversion(); ok {
|
|
||||||
_spec.SetField(bouncer.FieldOsversion, field.TypeString, value)
|
|
||||||
}
|
|
||||||
if bu.mutation.OsversionCleared() {
|
|
||||||
_spec.ClearField(bouncer.FieldOsversion, field.TypeString)
|
|
||||||
}
|
|
||||||
if value, ok := bu.mutation.Featureflags(); ok {
|
|
||||||
_spec.SetField(bouncer.FieldFeatureflags, field.TypeString, value)
|
|
||||||
}
|
|
||||||
if bu.mutation.FeatureflagsCleared() {
|
|
||||||
_spec.ClearField(bouncer.FieldFeatureflags, field.TypeString)
|
|
||||||
}
|
|
||||||
if n, err = sqlgraph.UpdateNodes(ctx, bu.driver, _spec); err != nil {
|
if n, err = sqlgraph.UpdateNodes(ctx, bu.driver, _spec); err != nil {
|
||||||
if _, ok := err.(*sqlgraph.NotFoundError); ok {
|
if _, ok := err.(*sqlgraph.NotFoundError); ok {
|
||||||
err = &NotFoundError{bouncer.Label}
|
err = &NotFoundError{bouncer.Label}
|
||||||
|
@ -397,11 +333,9 @@ func (buo *BouncerUpdateOne) SetCreatedAt(t time.Time) *BouncerUpdateOne {
|
||||||
return buo
|
return buo
|
||||||
}
|
}
|
||||||
|
|
||||||
// SetNillableCreatedAt sets the "created_at" field if the given value is not nil.
|
// ClearCreatedAt clears the value of the "created_at" field.
|
||||||
func (buo *BouncerUpdateOne) SetNillableCreatedAt(t *time.Time) *BouncerUpdateOne {
|
func (buo *BouncerUpdateOne) ClearCreatedAt() *BouncerUpdateOne {
|
||||||
if t != nil {
|
buo.mutation.ClearCreatedAt()
|
||||||
buo.SetCreatedAt(*t)
|
|
||||||
}
|
|
||||||
return buo
|
return buo
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -411,6 +345,12 @@ func (buo *BouncerUpdateOne) SetUpdatedAt(t time.Time) *BouncerUpdateOne {
|
||||||
return buo
|
return buo
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// ClearUpdatedAt clears the value of the "updated_at" field.
|
||||||
|
func (buo *BouncerUpdateOne) ClearUpdatedAt() *BouncerUpdateOne {
|
||||||
|
buo.mutation.ClearUpdatedAt()
|
||||||
|
return buo
|
||||||
|
}
|
||||||
|
|
||||||
// SetName sets the "name" field.
|
// SetName sets the "name" field.
|
||||||
func (buo *BouncerUpdateOne) SetName(s string) *BouncerUpdateOne {
|
func (buo *BouncerUpdateOne) SetName(s string) *BouncerUpdateOne {
|
||||||
buo.mutation.SetName(s)
|
buo.mutation.SetName(s)
|
||||||
|
@ -561,66 +501,6 @@ func (buo *BouncerUpdateOne) SetNillableAuthType(s *string) *BouncerUpdateOne {
|
||||||
return buo
|
return buo
|
||||||
}
|
}
|
||||||
|
|
||||||
// SetOsname sets the "osname" field.
|
|
||||||
func (buo *BouncerUpdateOne) SetOsname(s string) *BouncerUpdateOne {
|
|
||||||
buo.mutation.SetOsname(s)
|
|
||||||
return buo
|
|
||||||
}
|
|
||||||
|
|
||||||
// SetNillableOsname sets the "osname" field if the given value is not nil.
|
|
||||||
func (buo *BouncerUpdateOne) SetNillableOsname(s *string) *BouncerUpdateOne {
|
|
||||||
if s != nil {
|
|
||||||
buo.SetOsname(*s)
|
|
||||||
}
|
|
||||||
return buo
|
|
||||||
}
|
|
||||||
|
|
||||||
// ClearOsname clears the value of the "osname" field.
|
|
||||||
func (buo *BouncerUpdateOne) ClearOsname() *BouncerUpdateOne {
|
|
||||||
buo.mutation.ClearOsname()
|
|
||||||
return buo
|
|
||||||
}
|
|
||||||
|
|
||||||
// SetOsversion sets the "osversion" field.
|
|
||||||
func (buo *BouncerUpdateOne) SetOsversion(s string) *BouncerUpdateOne {
|
|
||||||
buo.mutation.SetOsversion(s)
|
|
||||||
return buo
|
|
||||||
}
|
|
||||||
|
|
||||||
// SetNillableOsversion sets the "osversion" field if the given value is not nil.
|
|
||||||
func (buo *BouncerUpdateOne) SetNillableOsversion(s *string) *BouncerUpdateOne {
|
|
||||||
if s != nil {
|
|
||||||
buo.SetOsversion(*s)
|
|
||||||
}
|
|
||||||
return buo
|
|
||||||
}
|
|
||||||
|
|
||||||
// ClearOsversion clears the value of the "osversion" field.
|
|
||||||
func (buo *BouncerUpdateOne) ClearOsversion() *BouncerUpdateOne {
|
|
||||||
buo.mutation.ClearOsversion()
|
|
||||||
return buo
|
|
||||||
}
|
|
||||||
|
|
||||||
// SetFeatureflags sets the "featureflags" field.
|
|
||||||
func (buo *BouncerUpdateOne) SetFeatureflags(s string) *BouncerUpdateOne {
|
|
||||||
buo.mutation.SetFeatureflags(s)
|
|
||||||
return buo
|
|
||||||
}
|
|
||||||
|
|
||||||
// SetNillableFeatureflags sets the "featureflags" field if the given value is not nil.
|
|
||||||
func (buo *BouncerUpdateOne) SetNillableFeatureflags(s *string) *BouncerUpdateOne {
|
|
||||||
if s != nil {
|
|
||||||
buo.SetFeatureflags(*s)
|
|
||||||
}
|
|
||||||
return buo
|
|
||||||
}
|
|
||||||
|
|
||||||
// ClearFeatureflags clears the value of the "featureflags" field.
|
|
||||||
func (buo *BouncerUpdateOne) ClearFeatureflags() *BouncerUpdateOne {
|
|
||||||
buo.mutation.ClearFeatureflags()
|
|
||||||
return buo
|
|
||||||
}
|
|
||||||
|
|
||||||
// Mutation returns the BouncerMutation object of the builder.
|
// Mutation returns the BouncerMutation object of the builder.
|
||||||
func (buo *BouncerUpdateOne) Mutation() *BouncerMutation {
|
func (buo *BouncerUpdateOne) Mutation() *BouncerMutation {
|
||||||
return buo.mutation
|
return buo.mutation
|
||||||
|
@ -669,7 +549,11 @@ func (buo *BouncerUpdateOne) ExecX(ctx context.Context) {
|
||||||
|
|
||||||
// defaults sets the default values of the builder before save.
|
// defaults sets the default values of the builder before save.
|
||||||
func (buo *BouncerUpdateOne) defaults() {
|
func (buo *BouncerUpdateOne) defaults() {
|
||||||
if _, ok := buo.mutation.UpdatedAt(); !ok {
|
if _, ok := buo.mutation.CreatedAt(); !ok && !buo.mutation.CreatedAtCleared() {
|
||||||
|
v := bouncer.UpdateDefaultCreatedAt()
|
||||||
|
buo.mutation.SetCreatedAt(v)
|
||||||
|
}
|
||||||
|
if _, ok := buo.mutation.UpdatedAt(); !ok && !buo.mutation.UpdatedAtCleared() {
|
||||||
v := bouncer.UpdateDefaultUpdatedAt()
|
v := bouncer.UpdateDefaultUpdatedAt()
|
||||||
buo.mutation.SetUpdatedAt(v)
|
buo.mutation.SetUpdatedAt(v)
|
||||||
}
|
}
|
||||||
|
@ -704,9 +588,15 @@ func (buo *BouncerUpdateOne) sqlSave(ctx context.Context) (_node *Bouncer, err e
|
||||||
if value, ok := buo.mutation.CreatedAt(); ok {
|
if value, ok := buo.mutation.CreatedAt(); ok {
|
||||||
_spec.SetField(bouncer.FieldCreatedAt, field.TypeTime, value)
|
_spec.SetField(bouncer.FieldCreatedAt, field.TypeTime, value)
|
||||||
}
|
}
|
||||||
|
if buo.mutation.CreatedAtCleared() {
|
||||||
|
_spec.ClearField(bouncer.FieldCreatedAt, field.TypeTime)
|
||||||
|
}
|
||||||
if value, ok := buo.mutation.UpdatedAt(); ok {
|
if value, ok := buo.mutation.UpdatedAt(); ok {
|
||||||
_spec.SetField(bouncer.FieldUpdatedAt, field.TypeTime, value)
|
_spec.SetField(bouncer.FieldUpdatedAt, field.TypeTime, value)
|
||||||
}
|
}
|
||||||
|
if buo.mutation.UpdatedAtCleared() {
|
||||||
|
_spec.ClearField(bouncer.FieldUpdatedAt, field.TypeTime)
|
||||||
|
}
|
||||||
if value, ok := buo.mutation.Name(); ok {
|
if value, ok := buo.mutation.Name(); ok {
|
||||||
_spec.SetField(bouncer.FieldName, field.TypeString, value)
|
_spec.SetField(bouncer.FieldName, field.TypeString, value)
|
||||||
}
|
}
|
||||||
|
@ -746,24 +636,6 @@ func (buo *BouncerUpdateOne) sqlSave(ctx context.Context) (_node *Bouncer, err e
|
||||||
if value, ok := buo.mutation.AuthType(); ok {
|
if value, ok := buo.mutation.AuthType(); ok {
|
||||||
_spec.SetField(bouncer.FieldAuthType, field.TypeString, value)
|
_spec.SetField(bouncer.FieldAuthType, field.TypeString, value)
|
||||||
}
|
}
|
||||||
if value, ok := buo.mutation.Osname(); ok {
|
|
||||||
_spec.SetField(bouncer.FieldOsname, field.TypeString, value)
|
|
||||||
}
|
|
||||||
if buo.mutation.OsnameCleared() {
|
|
||||||
_spec.ClearField(bouncer.FieldOsname, field.TypeString)
|
|
||||||
}
|
|
||||||
if value, ok := buo.mutation.Osversion(); ok {
|
|
||||||
_spec.SetField(bouncer.FieldOsversion, field.TypeString, value)
|
|
||||||
}
|
|
||||||
if buo.mutation.OsversionCleared() {
|
|
||||||
_spec.ClearField(bouncer.FieldOsversion, field.TypeString)
|
|
||||||
}
|
|
||||||
if value, ok := buo.mutation.Featureflags(); ok {
|
|
||||||
_spec.SetField(bouncer.FieldFeatureflags, field.TypeString, value)
|
|
||||||
}
|
|
||||||
if buo.mutation.FeatureflagsCleared() {
|
|
||||||
_spec.ClearField(bouncer.FieldFeatureflags, field.TypeString)
|
|
||||||
}
|
|
||||||
_node = &Bouncer{config: buo.config}
|
_node = &Bouncer{config: buo.config}
|
||||||
_spec.Assign = _node.assignValues
|
_spec.Assign = _node.assignValues
|
||||||
_spec.ScanValues = _node.scanValues
|
_spec.ScanValues = _node.scanValues
|
||||||
|
|
|
@ -23,7 +23,6 @@ import (
|
||||||
"github.com/crowdsecurity/crowdsec/pkg/database/ent/lock"
|
"github.com/crowdsecurity/crowdsec/pkg/database/ent/lock"
|
||||||
"github.com/crowdsecurity/crowdsec/pkg/database/ent/machine"
|
"github.com/crowdsecurity/crowdsec/pkg/database/ent/machine"
|
||||||
"github.com/crowdsecurity/crowdsec/pkg/database/ent/meta"
|
"github.com/crowdsecurity/crowdsec/pkg/database/ent/meta"
|
||||||
"github.com/crowdsecurity/crowdsec/pkg/database/ent/metric"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
// Client is the client that holds all ent builders.
|
// Client is the client that holds all ent builders.
|
||||||
|
@ -47,8 +46,6 @@ type Client struct {
|
||||||
Machine *MachineClient
|
Machine *MachineClient
|
||||||
// Meta is the client for interacting with the Meta builders.
|
// Meta is the client for interacting with the Meta builders.
|
||||||
Meta *MetaClient
|
Meta *MetaClient
|
||||||
// Metric is the client for interacting with the Metric builders.
|
|
||||||
Metric *MetricClient
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// NewClient creates a new client configured with the given options.
|
// NewClient creates a new client configured with the given options.
|
||||||
|
@ -68,7 +65,6 @@ func (c *Client) init() {
|
||||||
c.Lock = NewLockClient(c.config)
|
c.Lock = NewLockClient(c.config)
|
||||||
c.Machine = NewMachineClient(c.config)
|
c.Machine = NewMachineClient(c.config)
|
||||||
c.Meta = NewMetaClient(c.config)
|
c.Meta = NewMetaClient(c.config)
|
||||||
c.Metric = NewMetricClient(c.config)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
type (
|
type (
|
||||||
|
@ -169,7 +165,6 @@ func (c *Client) Tx(ctx context.Context) (*Tx, error) {
|
||||||
Lock: NewLockClient(cfg),
|
Lock: NewLockClient(cfg),
|
||||||
Machine: NewMachineClient(cfg),
|
Machine: NewMachineClient(cfg),
|
||||||
Meta: NewMetaClient(cfg),
|
Meta: NewMetaClient(cfg),
|
||||||
Metric: NewMetricClient(cfg),
|
|
||||||
}, nil
|
}, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -197,7 +192,6 @@ func (c *Client) BeginTx(ctx context.Context, opts *sql.TxOptions) (*Tx, error)
|
||||||
Lock: NewLockClient(cfg),
|
Lock: NewLockClient(cfg),
|
||||||
Machine: NewMachineClient(cfg),
|
Machine: NewMachineClient(cfg),
|
||||||
Meta: NewMetaClient(cfg),
|
Meta: NewMetaClient(cfg),
|
||||||
Metric: NewMetricClient(cfg),
|
|
||||||
}, nil
|
}, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -228,7 +222,7 @@ func (c *Client) Close() error {
|
||||||
func (c *Client) Use(hooks ...Hook) {
|
func (c *Client) Use(hooks ...Hook) {
|
||||||
for _, n := range []interface{ Use(...Hook) }{
|
for _, n := range []interface{ Use(...Hook) }{
|
||||||
c.Alert, c.Bouncer, c.ConfigItem, c.Decision, c.Event, c.Lock, c.Machine,
|
c.Alert, c.Bouncer, c.ConfigItem, c.Decision, c.Event, c.Lock, c.Machine,
|
||||||
c.Meta, c.Metric,
|
c.Meta,
|
||||||
} {
|
} {
|
||||||
n.Use(hooks...)
|
n.Use(hooks...)
|
||||||
}
|
}
|
||||||
|
@ -239,7 +233,7 @@ func (c *Client) Use(hooks ...Hook) {
|
||||||
func (c *Client) Intercept(interceptors ...Interceptor) {
|
func (c *Client) Intercept(interceptors ...Interceptor) {
|
||||||
for _, n := range []interface{ Intercept(...Interceptor) }{
|
for _, n := range []interface{ Intercept(...Interceptor) }{
|
||||||
c.Alert, c.Bouncer, c.ConfigItem, c.Decision, c.Event, c.Lock, c.Machine,
|
c.Alert, c.Bouncer, c.ConfigItem, c.Decision, c.Event, c.Lock, c.Machine,
|
||||||
c.Meta, c.Metric,
|
c.Meta,
|
||||||
} {
|
} {
|
||||||
n.Intercept(interceptors...)
|
n.Intercept(interceptors...)
|
||||||
}
|
}
|
||||||
|
@ -264,8 +258,6 @@ func (c *Client) Mutate(ctx context.Context, m Mutation) (Value, error) {
|
||||||
return c.Machine.mutate(ctx, m)
|
return c.Machine.mutate(ctx, m)
|
||||||
case *MetaMutation:
|
case *MetaMutation:
|
||||||
return c.Meta.mutate(ctx, m)
|
return c.Meta.mutate(ctx, m)
|
||||||
case *MetricMutation:
|
|
||||||
return c.Metric.mutate(ctx, m)
|
|
||||||
default:
|
default:
|
||||||
return nil, fmt.Errorf("ent: unknown mutation type %T", m)
|
return nil, fmt.Errorf("ent: unknown mutation type %T", m)
|
||||||
}
|
}
|
||||||
|
@ -1463,147 +1455,13 @@ func (c *MetaClient) mutate(ctx context.Context, m *MetaMutation) (Value, error)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// MetricClient is a client for the Metric schema.
|
|
||||||
type MetricClient struct {
|
|
||||||
config
|
|
||||||
}
|
|
||||||
|
|
||||||
// NewMetricClient returns a client for the Metric from the given config.
|
|
||||||
func NewMetricClient(c config) *MetricClient {
|
|
||||||
return &MetricClient{config: c}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Use adds a list of mutation hooks to the hooks stack.
|
|
||||||
// A call to `Use(f, g, h)` equals to `metric.Hooks(f(g(h())))`.
|
|
||||||
func (c *MetricClient) Use(hooks ...Hook) {
|
|
||||||
c.hooks.Metric = append(c.hooks.Metric, hooks...)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Intercept adds a list of query interceptors to the interceptors stack.
|
|
||||||
// A call to `Intercept(f, g, h)` equals to `metric.Intercept(f(g(h())))`.
|
|
||||||
func (c *MetricClient) Intercept(interceptors ...Interceptor) {
|
|
||||||
c.inters.Metric = append(c.inters.Metric, interceptors...)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Create returns a builder for creating a Metric entity.
|
|
||||||
func (c *MetricClient) Create() *MetricCreate {
|
|
||||||
mutation := newMetricMutation(c.config, OpCreate)
|
|
||||||
return &MetricCreate{config: c.config, hooks: c.Hooks(), mutation: mutation}
|
|
||||||
}
|
|
||||||
|
|
||||||
// CreateBulk returns a builder for creating a bulk of Metric entities.
|
|
||||||
func (c *MetricClient) CreateBulk(builders ...*MetricCreate) *MetricCreateBulk {
|
|
||||||
return &MetricCreateBulk{config: c.config, builders: builders}
|
|
||||||
}
|
|
||||||
|
|
||||||
// MapCreateBulk creates a bulk creation builder from the given slice. For each item in the slice, the function creates
|
|
||||||
// a builder and applies setFunc on it.
|
|
||||||
func (c *MetricClient) MapCreateBulk(slice any, setFunc func(*MetricCreate, int)) *MetricCreateBulk {
|
|
||||||
rv := reflect.ValueOf(slice)
|
|
||||||
if rv.Kind() != reflect.Slice {
|
|
||||||
return &MetricCreateBulk{err: fmt.Errorf("calling to MetricClient.MapCreateBulk with wrong type %T, need slice", slice)}
|
|
||||||
}
|
|
||||||
builders := make([]*MetricCreate, rv.Len())
|
|
||||||
for i := 0; i < rv.Len(); i++ {
|
|
||||||
builders[i] = c.Create()
|
|
||||||
setFunc(builders[i], i)
|
|
||||||
}
|
|
||||||
return &MetricCreateBulk{config: c.config, builders: builders}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Update returns an update builder for Metric.
|
|
||||||
func (c *MetricClient) Update() *MetricUpdate {
|
|
||||||
mutation := newMetricMutation(c.config, OpUpdate)
|
|
||||||
return &MetricUpdate{config: c.config, hooks: c.Hooks(), mutation: mutation}
|
|
||||||
}
|
|
||||||
|
|
||||||
// UpdateOne returns an update builder for the given entity.
|
|
||||||
func (c *MetricClient) UpdateOne(m *Metric) *MetricUpdateOne {
|
|
||||||
mutation := newMetricMutation(c.config, OpUpdateOne, withMetric(m))
|
|
||||||
return &MetricUpdateOne{config: c.config, hooks: c.Hooks(), mutation: mutation}
|
|
||||||
}
|
|
||||||
|
|
||||||
// UpdateOneID returns an update builder for the given id.
|
|
||||||
func (c *MetricClient) UpdateOneID(id int) *MetricUpdateOne {
|
|
||||||
mutation := newMetricMutation(c.config, OpUpdateOne, withMetricID(id))
|
|
||||||
return &MetricUpdateOne{config: c.config, hooks: c.Hooks(), mutation: mutation}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Delete returns a delete builder for Metric.
|
|
||||||
func (c *MetricClient) Delete() *MetricDelete {
|
|
||||||
mutation := newMetricMutation(c.config, OpDelete)
|
|
||||||
return &MetricDelete{config: c.config, hooks: c.Hooks(), mutation: mutation}
|
|
||||||
}
|
|
||||||
|
|
||||||
// DeleteOne returns a builder for deleting the given entity.
|
|
||||||
func (c *MetricClient) DeleteOne(m *Metric) *MetricDeleteOne {
|
|
||||||
return c.DeleteOneID(m.ID)
|
|
||||||
}
|
|
||||||
|
|
||||||
// DeleteOneID returns a builder for deleting the given entity by its id.
|
|
||||||
func (c *MetricClient) DeleteOneID(id int) *MetricDeleteOne {
|
|
||||||
builder := c.Delete().Where(metric.ID(id))
|
|
||||||
builder.mutation.id = &id
|
|
||||||
builder.mutation.op = OpDeleteOne
|
|
||||||
return &MetricDeleteOne{builder}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Query returns a query builder for Metric.
|
|
||||||
func (c *MetricClient) Query() *MetricQuery {
|
|
||||||
return &MetricQuery{
|
|
||||||
config: c.config,
|
|
||||||
ctx: &QueryContext{Type: TypeMetric},
|
|
||||||
inters: c.Interceptors(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Get returns a Metric entity by its id.
|
|
||||||
func (c *MetricClient) Get(ctx context.Context, id int) (*Metric, error) {
|
|
||||||
return c.Query().Where(metric.ID(id)).Only(ctx)
|
|
||||||
}
|
|
||||||
|
|
||||||
// GetX is like Get, but panics if an error occurs.
|
|
||||||
func (c *MetricClient) GetX(ctx context.Context, id int) *Metric {
|
|
||||||
obj, err := c.Get(ctx, id)
|
|
||||||
if err != nil {
|
|
||||||
panic(err)
|
|
||||||
}
|
|
||||||
return obj
|
|
||||||
}
|
|
||||||
|
|
||||||
// Hooks returns the client hooks.
|
|
||||||
func (c *MetricClient) Hooks() []Hook {
|
|
||||||
return c.hooks.Metric
|
|
||||||
}
|
|
||||||
|
|
||||||
// Interceptors returns the client interceptors.
|
|
||||||
func (c *MetricClient) Interceptors() []Interceptor {
|
|
||||||
return c.inters.Metric
|
|
||||||
}
|
|
||||||
|
|
||||||
func (c *MetricClient) mutate(ctx context.Context, m *MetricMutation) (Value, error) {
|
|
||||||
switch m.Op() {
|
|
||||||
case OpCreate:
|
|
||||||
return (&MetricCreate{config: c.config, hooks: c.Hooks(), mutation: m}).Save(ctx)
|
|
||||||
case OpUpdate:
|
|
||||||
return (&MetricUpdate{config: c.config, hooks: c.Hooks(), mutation: m}).Save(ctx)
|
|
||||||
case OpUpdateOne:
|
|
||||||
return (&MetricUpdateOne{config: c.config, hooks: c.Hooks(), mutation: m}).Save(ctx)
|
|
||||||
case OpDelete, OpDeleteOne:
|
|
||||||
return (&MetricDelete{config: c.config, hooks: c.Hooks(), mutation: m}).Exec(ctx)
|
|
||||||
default:
|
|
||||||
return nil, fmt.Errorf("ent: unknown Metric mutation op: %q", m.Op())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// hooks and interceptors per client, for fast access.
|
// hooks and interceptors per client, for fast access.
|
||||||
type (
|
type (
|
||||||
hooks struct {
|
hooks struct {
|
||||||
Alert, Bouncer, ConfigItem, Decision, Event, Lock, Machine, Meta,
|
Alert, Bouncer, ConfigItem, Decision, Event, Lock, Machine, Meta []ent.Hook
|
||||||
Metric []ent.Hook
|
|
||||||
}
|
}
|
||||||
inters struct {
|
inters struct {
|
||||||
Alert, Bouncer, ConfigItem, Decision, Event, Lock, Machine, Meta,
|
Alert, Bouncer, ConfigItem, Decision, Event, Lock, Machine,
|
||||||
Metric []ent.Interceptor
|
Meta []ent.Interceptor
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
|
|
@ -18,9 +18,9 @@ type ConfigItem struct {
|
||||||
// ID of the ent.
|
// ID of the ent.
|
||||||
ID int `json:"id,omitempty"`
|
ID int `json:"id,omitempty"`
|
||||||
// CreatedAt holds the value of the "created_at" field.
|
// CreatedAt holds the value of the "created_at" field.
|
||||||
CreatedAt time.Time `json:"created_at"`
|
CreatedAt *time.Time `json:"created_at"`
|
||||||
// UpdatedAt holds the value of the "updated_at" field.
|
// UpdatedAt holds the value of the "updated_at" field.
|
||||||
UpdatedAt time.Time `json:"updated_at"`
|
UpdatedAt *time.Time `json:"updated_at"`
|
||||||
// Name holds the value of the "name" field.
|
// Name holds the value of the "name" field.
|
||||||
Name string `json:"name"`
|
Name string `json:"name"`
|
||||||
// Value holds the value of the "value" field.
|
// Value holds the value of the "value" field.
|
||||||
|
@ -64,13 +64,15 @@ func (ci *ConfigItem) assignValues(columns []string, values []any) error {
|
||||||
if value, ok := values[i].(*sql.NullTime); !ok {
|
if value, ok := values[i].(*sql.NullTime); !ok {
|
||||||
return fmt.Errorf("unexpected type %T for field created_at", values[i])
|
return fmt.Errorf("unexpected type %T for field created_at", values[i])
|
||||||
} else if value.Valid {
|
} else if value.Valid {
|
||||||
ci.CreatedAt = value.Time
|
ci.CreatedAt = new(time.Time)
|
||||||
|
*ci.CreatedAt = value.Time
|
||||||
}
|
}
|
||||||
case configitem.FieldUpdatedAt:
|
case configitem.FieldUpdatedAt:
|
||||||
if value, ok := values[i].(*sql.NullTime); !ok {
|
if value, ok := values[i].(*sql.NullTime); !ok {
|
||||||
return fmt.Errorf("unexpected type %T for field updated_at", values[i])
|
return fmt.Errorf("unexpected type %T for field updated_at", values[i])
|
||||||
} else if value.Valid {
|
} else if value.Valid {
|
||||||
ci.UpdatedAt = value.Time
|
ci.UpdatedAt = new(time.Time)
|
||||||
|
*ci.UpdatedAt = value.Time
|
||||||
}
|
}
|
||||||
case configitem.FieldName:
|
case configitem.FieldName:
|
||||||
if value, ok := values[i].(*sql.NullString); !ok {
|
if value, ok := values[i].(*sql.NullString); !ok {
|
||||||
|
@ -120,11 +122,15 @@ func (ci *ConfigItem) String() string {
|
||||||
var builder strings.Builder
|
var builder strings.Builder
|
||||||
builder.WriteString("ConfigItem(")
|
builder.WriteString("ConfigItem(")
|
||||||
builder.WriteString(fmt.Sprintf("id=%v, ", ci.ID))
|
builder.WriteString(fmt.Sprintf("id=%v, ", ci.ID))
|
||||||
builder.WriteString("created_at=")
|
if v := ci.CreatedAt; v != nil {
|
||||||
builder.WriteString(ci.CreatedAt.Format(time.ANSIC))
|
builder.WriteString("created_at=")
|
||||||
|
builder.WriteString(v.Format(time.ANSIC))
|
||||||
|
}
|
||||||
builder.WriteString(", ")
|
builder.WriteString(", ")
|
||||||
builder.WriteString("updated_at=")
|
if v := ci.UpdatedAt; v != nil {
|
||||||
builder.WriteString(ci.UpdatedAt.Format(time.ANSIC))
|
builder.WriteString("updated_at=")
|
||||||
|
builder.WriteString(v.Format(time.ANSIC))
|
||||||
|
}
|
||||||
builder.WriteString(", ")
|
builder.WriteString(", ")
|
||||||
builder.WriteString("name=")
|
builder.WriteString("name=")
|
||||||
builder.WriteString(ci.Name)
|
builder.WriteString(ci.Name)
|
||||||
|
|
|
@ -47,6 +47,8 @@ func ValidColumn(column string) bool {
|
||||||
var (
|
var (
|
||||||
// DefaultCreatedAt holds the default value on creation for the "created_at" field.
|
// DefaultCreatedAt holds the default value on creation for the "created_at" field.
|
||||||
DefaultCreatedAt func() time.Time
|
DefaultCreatedAt func() time.Time
|
||||||
|
// UpdateDefaultCreatedAt holds the default value on update for the "created_at" field.
|
||||||
|
UpdateDefaultCreatedAt func() time.Time
|
||||||
// DefaultUpdatedAt holds the default value on creation for the "updated_at" field.
|
// DefaultUpdatedAt holds the default value on creation for the "updated_at" field.
|
||||||
DefaultUpdatedAt func() time.Time
|
DefaultUpdatedAt func() time.Time
|
||||||
// UpdateDefaultUpdatedAt holds the default value on update for the "updated_at" field.
|
// UpdateDefaultUpdatedAt holds the default value on update for the "updated_at" field.
|
||||||
|
|
|
@ -114,6 +114,16 @@ func CreatedAtLTE(v time.Time) predicate.ConfigItem {
|
||||||
return predicate.ConfigItem(sql.FieldLTE(FieldCreatedAt, v))
|
return predicate.ConfigItem(sql.FieldLTE(FieldCreatedAt, v))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// CreatedAtIsNil applies the IsNil predicate on the "created_at" field.
|
||||||
|
func CreatedAtIsNil() predicate.ConfigItem {
|
||||||
|
return predicate.ConfigItem(sql.FieldIsNull(FieldCreatedAt))
|
||||||
|
}
|
||||||
|
|
||||||
|
// CreatedAtNotNil applies the NotNil predicate on the "created_at" field.
|
||||||
|
func CreatedAtNotNil() predicate.ConfigItem {
|
||||||
|
return predicate.ConfigItem(sql.FieldNotNull(FieldCreatedAt))
|
||||||
|
}
|
||||||
|
|
||||||
// UpdatedAtEQ applies the EQ predicate on the "updated_at" field.
|
// UpdatedAtEQ applies the EQ predicate on the "updated_at" field.
|
||||||
func UpdatedAtEQ(v time.Time) predicate.ConfigItem {
|
func UpdatedAtEQ(v time.Time) predicate.ConfigItem {
|
||||||
return predicate.ConfigItem(sql.FieldEQ(FieldUpdatedAt, v))
|
return predicate.ConfigItem(sql.FieldEQ(FieldUpdatedAt, v))
|
||||||
|
@ -154,6 +164,16 @@ func UpdatedAtLTE(v time.Time) predicate.ConfigItem {
|
||||||
return predicate.ConfigItem(sql.FieldLTE(FieldUpdatedAt, v))
|
return predicate.ConfigItem(sql.FieldLTE(FieldUpdatedAt, v))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// UpdatedAtIsNil applies the IsNil predicate on the "updated_at" field.
|
||||||
|
func UpdatedAtIsNil() predicate.ConfigItem {
|
||||||
|
return predicate.ConfigItem(sql.FieldIsNull(FieldUpdatedAt))
|
||||||
|
}
|
||||||
|
|
||||||
|
// UpdatedAtNotNil applies the NotNil predicate on the "updated_at" field.
|
||||||
|
func UpdatedAtNotNil() predicate.ConfigItem {
|
||||||
|
return predicate.ConfigItem(sql.FieldNotNull(FieldUpdatedAt))
|
||||||
|
}
|
||||||
|
|
||||||
// NameEQ applies the EQ predicate on the "name" field.
|
// NameEQ applies the EQ predicate on the "name" field.
|
||||||
func NameEQ(v string) predicate.ConfigItem {
|
func NameEQ(v string) predicate.ConfigItem {
|
||||||
return predicate.ConfigItem(sql.FieldEQ(FieldName, v))
|
return predicate.ConfigItem(sql.FieldEQ(FieldName, v))
|
||||||
|
|
|
@ -107,12 +107,6 @@ func (cic *ConfigItemCreate) defaults() {
|
||||||
|
|
||||||
// check runs all checks and user-defined validators on the builder.
|
// check runs all checks and user-defined validators on the builder.
|
||||||
func (cic *ConfigItemCreate) check() error {
|
func (cic *ConfigItemCreate) check() error {
|
||||||
if _, ok := cic.mutation.CreatedAt(); !ok {
|
|
||||||
return &ValidationError{Name: "created_at", err: errors.New(`ent: missing required field "ConfigItem.created_at"`)}
|
|
||||||
}
|
|
||||||
if _, ok := cic.mutation.UpdatedAt(); !ok {
|
|
||||||
return &ValidationError{Name: "updated_at", err: errors.New(`ent: missing required field "ConfigItem.updated_at"`)}
|
|
||||||
}
|
|
||||||
if _, ok := cic.mutation.Name(); !ok {
|
if _, ok := cic.mutation.Name(); !ok {
|
||||||
return &ValidationError{Name: "name", err: errors.New(`ent: missing required field "ConfigItem.name"`)}
|
return &ValidationError{Name: "name", err: errors.New(`ent: missing required field "ConfigItem.name"`)}
|
||||||
}
|
}
|
||||||
|
@ -147,11 +141,11 @@ func (cic *ConfigItemCreate) createSpec() (*ConfigItem, *sqlgraph.CreateSpec) {
|
||||||
)
|
)
|
||||||
if value, ok := cic.mutation.CreatedAt(); ok {
|
if value, ok := cic.mutation.CreatedAt(); ok {
|
||||||
_spec.SetField(configitem.FieldCreatedAt, field.TypeTime, value)
|
_spec.SetField(configitem.FieldCreatedAt, field.TypeTime, value)
|
||||||
_node.CreatedAt = value
|
_node.CreatedAt = &value
|
||||||
}
|
}
|
||||||
if value, ok := cic.mutation.UpdatedAt(); ok {
|
if value, ok := cic.mutation.UpdatedAt(); ok {
|
||||||
_spec.SetField(configitem.FieldUpdatedAt, field.TypeTime, value)
|
_spec.SetField(configitem.FieldUpdatedAt, field.TypeTime, value)
|
||||||
_node.UpdatedAt = value
|
_node.UpdatedAt = &value
|
||||||
}
|
}
|
||||||
if value, ok := cic.mutation.Name(); ok {
|
if value, ok := cic.mutation.Name(); ok {
|
||||||
_spec.SetField(configitem.FieldName, field.TypeString, value)
|
_spec.SetField(configitem.FieldName, field.TypeString, value)
|
||||||
|
|
|
@ -28,12 +28,30 @@ func (ciu *ConfigItemUpdate) Where(ps ...predicate.ConfigItem) *ConfigItemUpdate
|
||||||
return ciu
|
return ciu
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// SetCreatedAt sets the "created_at" field.
|
||||||
|
func (ciu *ConfigItemUpdate) SetCreatedAt(t time.Time) *ConfigItemUpdate {
|
||||||
|
ciu.mutation.SetCreatedAt(t)
|
||||||
|
return ciu
|
||||||
|
}
|
||||||
|
|
||||||
|
// ClearCreatedAt clears the value of the "created_at" field.
|
||||||
|
func (ciu *ConfigItemUpdate) ClearCreatedAt() *ConfigItemUpdate {
|
||||||
|
ciu.mutation.ClearCreatedAt()
|
||||||
|
return ciu
|
||||||
|
}
|
||||||
|
|
||||||
// SetUpdatedAt sets the "updated_at" field.
|
// SetUpdatedAt sets the "updated_at" field.
|
||||||
func (ciu *ConfigItemUpdate) SetUpdatedAt(t time.Time) *ConfigItemUpdate {
|
func (ciu *ConfigItemUpdate) SetUpdatedAt(t time.Time) *ConfigItemUpdate {
|
||||||
ciu.mutation.SetUpdatedAt(t)
|
ciu.mutation.SetUpdatedAt(t)
|
||||||
return ciu
|
return ciu
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// ClearUpdatedAt clears the value of the "updated_at" field.
|
||||||
|
func (ciu *ConfigItemUpdate) ClearUpdatedAt() *ConfigItemUpdate {
|
||||||
|
ciu.mutation.ClearUpdatedAt()
|
||||||
|
return ciu
|
||||||
|
}
|
||||||
|
|
||||||
// SetName sets the "name" field.
|
// SetName sets the "name" field.
|
||||||
func (ciu *ConfigItemUpdate) SetName(s string) *ConfigItemUpdate {
|
func (ciu *ConfigItemUpdate) SetName(s string) *ConfigItemUpdate {
|
||||||
ciu.mutation.SetName(s)
|
ciu.mutation.SetName(s)
|
||||||
|
@ -97,7 +115,11 @@ func (ciu *ConfigItemUpdate) ExecX(ctx context.Context) {
|
||||||
|
|
||||||
// defaults sets the default values of the builder before save.
|
// defaults sets the default values of the builder before save.
|
||||||
func (ciu *ConfigItemUpdate) defaults() {
|
func (ciu *ConfigItemUpdate) defaults() {
|
||||||
if _, ok := ciu.mutation.UpdatedAt(); !ok {
|
if _, ok := ciu.mutation.CreatedAt(); !ok && !ciu.mutation.CreatedAtCleared() {
|
||||||
|
v := configitem.UpdateDefaultCreatedAt()
|
||||||
|
ciu.mutation.SetCreatedAt(v)
|
||||||
|
}
|
||||||
|
if _, ok := ciu.mutation.UpdatedAt(); !ok && !ciu.mutation.UpdatedAtCleared() {
|
||||||
v := configitem.UpdateDefaultUpdatedAt()
|
v := configitem.UpdateDefaultUpdatedAt()
|
||||||
ciu.mutation.SetUpdatedAt(v)
|
ciu.mutation.SetUpdatedAt(v)
|
||||||
}
|
}
|
||||||
|
@ -112,9 +134,18 @@ func (ciu *ConfigItemUpdate) sqlSave(ctx context.Context) (n int, err error) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
if value, ok := ciu.mutation.CreatedAt(); ok {
|
||||||
|
_spec.SetField(configitem.FieldCreatedAt, field.TypeTime, value)
|
||||||
|
}
|
||||||
|
if ciu.mutation.CreatedAtCleared() {
|
||||||
|
_spec.ClearField(configitem.FieldCreatedAt, field.TypeTime)
|
||||||
|
}
|
||||||
if value, ok := ciu.mutation.UpdatedAt(); ok {
|
if value, ok := ciu.mutation.UpdatedAt(); ok {
|
||||||
_spec.SetField(configitem.FieldUpdatedAt, field.TypeTime, value)
|
_spec.SetField(configitem.FieldUpdatedAt, field.TypeTime, value)
|
||||||
}
|
}
|
||||||
|
if ciu.mutation.UpdatedAtCleared() {
|
||||||
|
_spec.ClearField(configitem.FieldUpdatedAt, field.TypeTime)
|
||||||
|
}
|
||||||
if value, ok := ciu.mutation.Name(); ok {
|
if value, ok := ciu.mutation.Name(); ok {
|
||||||
_spec.SetField(configitem.FieldName, field.TypeString, value)
|
_spec.SetField(configitem.FieldName, field.TypeString, value)
|
||||||
}
|
}
|
||||||
|
@ -141,12 +172,30 @@ type ConfigItemUpdateOne struct {
|
||||||
mutation *ConfigItemMutation
|
mutation *ConfigItemMutation
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// SetCreatedAt sets the "created_at" field.
|
||||||
|
func (ciuo *ConfigItemUpdateOne) SetCreatedAt(t time.Time) *ConfigItemUpdateOne {
|
||||||
|
ciuo.mutation.SetCreatedAt(t)
|
||||||
|
return ciuo
|
||||||
|
}
|
||||||
|
|
||||||
|
// ClearCreatedAt clears the value of the "created_at" field.
|
||||||
|
func (ciuo *ConfigItemUpdateOne) ClearCreatedAt() *ConfigItemUpdateOne {
|
||||||
|
ciuo.mutation.ClearCreatedAt()
|
||||||
|
return ciuo
|
||||||
|
}
|
||||||
|
|
||||||
// SetUpdatedAt sets the "updated_at" field.
|
// SetUpdatedAt sets the "updated_at" field.
|
||||||
func (ciuo *ConfigItemUpdateOne) SetUpdatedAt(t time.Time) *ConfigItemUpdateOne {
|
func (ciuo *ConfigItemUpdateOne) SetUpdatedAt(t time.Time) *ConfigItemUpdateOne {
|
||||||
ciuo.mutation.SetUpdatedAt(t)
|
ciuo.mutation.SetUpdatedAt(t)
|
||||||
return ciuo
|
return ciuo
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// ClearUpdatedAt clears the value of the "updated_at" field.
|
||||||
|
func (ciuo *ConfigItemUpdateOne) ClearUpdatedAt() *ConfigItemUpdateOne {
|
||||||
|
ciuo.mutation.ClearUpdatedAt()
|
||||||
|
return ciuo
|
||||||
|
}
|
||||||
|
|
||||||
// SetName sets the "name" field.
|
// SetName sets the "name" field.
|
||||||
func (ciuo *ConfigItemUpdateOne) SetName(s string) *ConfigItemUpdateOne {
|
func (ciuo *ConfigItemUpdateOne) SetName(s string) *ConfigItemUpdateOne {
|
||||||
ciuo.mutation.SetName(s)
|
ciuo.mutation.SetName(s)
|
||||||
|
@ -223,7 +272,11 @@ func (ciuo *ConfigItemUpdateOne) ExecX(ctx context.Context) {
|
||||||
|
|
||||||
// defaults sets the default values of the builder before save.
|
// defaults sets the default values of the builder before save.
|
||||||
func (ciuo *ConfigItemUpdateOne) defaults() {
|
func (ciuo *ConfigItemUpdateOne) defaults() {
|
||||||
if _, ok := ciuo.mutation.UpdatedAt(); !ok {
|
if _, ok := ciuo.mutation.CreatedAt(); !ok && !ciuo.mutation.CreatedAtCleared() {
|
||||||
|
v := configitem.UpdateDefaultCreatedAt()
|
||||||
|
ciuo.mutation.SetCreatedAt(v)
|
||||||
|
}
|
||||||
|
if _, ok := ciuo.mutation.UpdatedAt(); !ok && !ciuo.mutation.UpdatedAtCleared() {
|
||||||
v := configitem.UpdateDefaultUpdatedAt()
|
v := configitem.UpdateDefaultUpdatedAt()
|
||||||
ciuo.mutation.SetUpdatedAt(v)
|
ciuo.mutation.SetUpdatedAt(v)
|
||||||
}
|
}
|
||||||
|
@ -255,9 +308,18 @@ func (ciuo *ConfigItemUpdateOne) sqlSave(ctx context.Context) (_node *ConfigItem
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
if value, ok := ciuo.mutation.CreatedAt(); ok {
|
||||||
|
_spec.SetField(configitem.FieldCreatedAt, field.TypeTime, value)
|
||||||
|
}
|
||||||
|
if ciuo.mutation.CreatedAtCleared() {
|
||||||
|
_spec.ClearField(configitem.FieldCreatedAt, field.TypeTime)
|
||||||
|
}
|
||||||
if value, ok := ciuo.mutation.UpdatedAt(); ok {
|
if value, ok := ciuo.mutation.UpdatedAt(); ok {
|
||||||
_spec.SetField(configitem.FieldUpdatedAt, field.TypeTime, value)
|
_spec.SetField(configitem.FieldUpdatedAt, field.TypeTime, value)
|
||||||
}
|
}
|
||||||
|
if ciuo.mutation.UpdatedAtCleared() {
|
||||||
|
_spec.ClearField(configitem.FieldUpdatedAt, field.TypeTime)
|
||||||
|
}
|
||||||
if value, ok := ciuo.mutation.Name(); ok {
|
if value, ok := ciuo.mutation.Name(); ok {
|
||||||
_spec.SetField(configitem.FieldName, field.TypeString, value)
|
_spec.SetField(configitem.FieldName, field.TypeString, value)
|
||||||
}
|
}
|
||||||
|
|
|
@ -19,9 +19,9 @@ type Decision struct {
|
||||||
// ID of the ent.
|
// ID of the ent.
|
||||||
ID int `json:"id,omitempty"`
|
ID int `json:"id,omitempty"`
|
||||||
// CreatedAt holds the value of the "created_at" field.
|
// CreatedAt holds the value of the "created_at" field.
|
||||||
CreatedAt time.Time `json:"created_at,omitempty"`
|
CreatedAt *time.Time `json:"created_at,omitempty"`
|
||||||
// UpdatedAt holds the value of the "updated_at" field.
|
// UpdatedAt holds the value of the "updated_at" field.
|
||||||
UpdatedAt time.Time `json:"updated_at,omitempty"`
|
UpdatedAt *time.Time `json:"updated_at,omitempty"`
|
||||||
// Until holds the value of the "until" field.
|
// Until holds the value of the "until" field.
|
||||||
Until *time.Time `json:"until,omitempty"`
|
Until *time.Time `json:"until,omitempty"`
|
||||||
// Scenario holds the value of the "scenario" field.
|
// Scenario holds the value of the "scenario" field.
|
||||||
|
@ -116,13 +116,15 @@ func (d *Decision) assignValues(columns []string, values []any) error {
|
||||||
if value, ok := values[i].(*sql.NullTime); !ok {
|
if value, ok := values[i].(*sql.NullTime); !ok {
|
||||||
return fmt.Errorf("unexpected type %T for field created_at", values[i])
|
return fmt.Errorf("unexpected type %T for field created_at", values[i])
|
||||||
} else if value.Valid {
|
} else if value.Valid {
|
||||||
d.CreatedAt = value.Time
|
d.CreatedAt = new(time.Time)
|
||||||
|
*d.CreatedAt = value.Time
|
||||||
}
|
}
|
||||||
case decision.FieldUpdatedAt:
|
case decision.FieldUpdatedAt:
|
||||||
if value, ok := values[i].(*sql.NullTime); !ok {
|
if value, ok := values[i].(*sql.NullTime); !ok {
|
||||||
return fmt.Errorf("unexpected type %T for field updated_at", values[i])
|
return fmt.Errorf("unexpected type %T for field updated_at", values[i])
|
||||||
} else if value.Valid {
|
} else if value.Valid {
|
||||||
d.UpdatedAt = value.Time
|
d.UpdatedAt = new(time.Time)
|
||||||
|
*d.UpdatedAt = value.Time
|
||||||
}
|
}
|
||||||
case decision.FieldUntil:
|
case decision.FieldUntil:
|
||||||
if value, ok := values[i].(*sql.NullTime); !ok {
|
if value, ok := values[i].(*sql.NullTime); !ok {
|
||||||
|
@ -250,11 +252,15 @@ func (d *Decision) String() string {
|
||||||
var builder strings.Builder
|
var builder strings.Builder
|
||||||
builder.WriteString("Decision(")
|
builder.WriteString("Decision(")
|
||||||
builder.WriteString(fmt.Sprintf("id=%v, ", d.ID))
|
builder.WriteString(fmt.Sprintf("id=%v, ", d.ID))
|
||||||
builder.WriteString("created_at=")
|
if v := d.CreatedAt; v != nil {
|
||||||
builder.WriteString(d.CreatedAt.Format(time.ANSIC))
|
builder.WriteString("created_at=")
|
||||||
|
builder.WriteString(v.Format(time.ANSIC))
|
||||||
|
}
|
||||||
builder.WriteString(", ")
|
builder.WriteString(", ")
|
||||||
builder.WriteString("updated_at=")
|
if v := d.UpdatedAt; v != nil {
|
||||||
builder.WriteString(d.UpdatedAt.Format(time.ANSIC))
|
builder.WriteString("updated_at=")
|
||||||
|
builder.WriteString(v.Format(time.ANSIC))
|
||||||
|
}
|
||||||
builder.WriteString(", ")
|
builder.WriteString(", ")
|
||||||
if v := d.Until; v != nil {
|
if v := d.Until; v != nil {
|
||||||
builder.WriteString("until=")
|
builder.WriteString("until=")
|
||||||
|
|
|
@ -93,6 +93,8 @@ func ValidColumn(column string) bool {
|
||||||
var (
|
var (
|
||||||
// DefaultCreatedAt holds the default value on creation for the "created_at" field.
|
// DefaultCreatedAt holds the default value on creation for the "created_at" field.
|
||||||
DefaultCreatedAt func() time.Time
|
DefaultCreatedAt func() time.Time
|
||||||
|
// UpdateDefaultCreatedAt holds the default value on update for the "created_at" field.
|
||||||
|
UpdateDefaultCreatedAt func() time.Time
|
||||||
// DefaultUpdatedAt holds the default value on creation for the "updated_at" field.
|
// DefaultUpdatedAt holds the default value on creation for the "updated_at" field.
|
||||||
DefaultUpdatedAt func() time.Time
|
DefaultUpdatedAt func() time.Time
|
||||||
// UpdateDefaultUpdatedAt holds the default value on update for the "updated_at" field.
|
// UpdateDefaultUpdatedAt holds the default value on update for the "updated_at" field.
|
||||||
|
|
|
@ -175,6 +175,16 @@ func CreatedAtLTE(v time.Time) predicate.Decision {
|
||||||
return predicate.Decision(sql.FieldLTE(FieldCreatedAt, v))
|
return predicate.Decision(sql.FieldLTE(FieldCreatedAt, v))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// CreatedAtIsNil applies the IsNil predicate on the "created_at" field.
|
||||||
|
func CreatedAtIsNil() predicate.Decision {
|
||||||
|
return predicate.Decision(sql.FieldIsNull(FieldCreatedAt))
|
||||||
|
}
|
||||||
|
|
||||||
|
// CreatedAtNotNil applies the NotNil predicate on the "created_at" field.
|
||||||
|
func CreatedAtNotNil() predicate.Decision {
|
||||||
|
return predicate.Decision(sql.FieldNotNull(FieldCreatedAt))
|
||||||
|
}
|
||||||
|
|
||||||
// UpdatedAtEQ applies the EQ predicate on the "updated_at" field.
|
// UpdatedAtEQ applies the EQ predicate on the "updated_at" field.
|
||||||
func UpdatedAtEQ(v time.Time) predicate.Decision {
|
func UpdatedAtEQ(v time.Time) predicate.Decision {
|
||||||
return predicate.Decision(sql.FieldEQ(FieldUpdatedAt, v))
|
return predicate.Decision(sql.FieldEQ(FieldUpdatedAt, v))
|
||||||
|
@ -215,6 +225,16 @@ func UpdatedAtLTE(v time.Time) predicate.Decision {
|
||||||
return predicate.Decision(sql.FieldLTE(FieldUpdatedAt, v))
|
return predicate.Decision(sql.FieldLTE(FieldUpdatedAt, v))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// UpdatedAtIsNil applies the IsNil predicate on the "updated_at" field.
|
||||||
|
func UpdatedAtIsNil() predicate.Decision {
|
||||||
|
return predicate.Decision(sql.FieldIsNull(FieldUpdatedAt))
|
||||||
|
}
|
||||||
|
|
||||||
|
// UpdatedAtNotNil applies the NotNil predicate on the "updated_at" field.
|
||||||
|
func UpdatedAtNotNil() predicate.Decision {
|
||||||
|
return predicate.Decision(sql.FieldNotNull(FieldUpdatedAt))
|
||||||
|
}
|
||||||
|
|
||||||
// UntilEQ applies the EQ predicate on the "until" field.
|
// UntilEQ applies the EQ predicate on the "until" field.
|
||||||
func UntilEQ(v time.Time) predicate.Decision {
|
func UntilEQ(v time.Time) predicate.Decision {
|
||||||
return predicate.Decision(sql.FieldEQ(FieldUntil, v))
|
return predicate.Decision(sql.FieldEQ(FieldUntil, v))
|
||||||
|
|
|
@ -275,12 +275,6 @@ func (dc *DecisionCreate) defaults() {
|
||||||
|
|
||||||
// check runs all checks and user-defined validators on the builder.
|
// check runs all checks and user-defined validators on the builder.
|
||||||
func (dc *DecisionCreate) check() error {
|
func (dc *DecisionCreate) check() error {
|
||||||
if _, ok := dc.mutation.CreatedAt(); !ok {
|
|
||||||
return &ValidationError{Name: "created_at", err: errors.New(`ent: missing required field "Decision.created_at"`)}
|
|
||||||
}
|
|
||||||
if _, ok := dc.mutation.UpdatedAt(); !ok {
|
|
||||||
return &ValidationError{Name: "updated_at", err: errors.New(`ent: missing required field "Decision.updated_at"`)}
|
|
||||||
}
|
|
||||||
if _, ok := dc.mutation.Scenario(); !ok {
|
if _, ok := dc.mutation.Scenario(); !ok {
|
||||||
return &ValidationError{Name: "scenario", err: errors.New(`ent: missing required field "Decision.scenario"`)}
|
return &ValidationError{Name: "scenario", err: errors.New(`ent: missing required field "Decision.scenario"`)}
|
||||||
}
|
}
|
||||||
|
@ -327,11 +321,11 @@ func (dc *DecisionCreate) createSpec() (*Decision, *sqlgraph.CreateSpec) {
|
||||||
)
|
)
|
||||||
if value, ok := dc.mutation.CreatedAt(); ok {
|
if value, ok := dc.mutation.CreatedAt(); ok {
|
||||||
_spec.SetField(decision.FieldCreatedAt, field.TypeTime, value)
|
_spec.SetField(decision.FieldCreatedAt, field.TypeTime, value)
|
||||||
_node.CreatedAt = value
|
_node.CreatedAt = &value
|
||||||
}
|
}
|
||||||
if value, ok := dc.mutation.UpdatedAt(); ok {
|
if value, ok := dc.mutation.UpdatedAt(); ok {
|
||||||
_spec.SetField(decision.FieldUpdatedAt, field.TypeTime, value)
|
_spec.SetField(decision.FieldUpdatedAt, field.TypeTime, value)
|
||||||
_node.UpdatedAt = value
|
_node.UpdatedAt = &value
|
||||||
}
|
}
|
||||||
if value, ok := dc.mutation.Until(); ok {
|
if value, ok := dc.mutation.Until(); ok {
|
||||||
_spec.SetField(decision.FieldUntil, field.TypeTime, value)
|
_spec.SetField(decision.FieldUntil, field.TypeTime, value)
|
||||||
|
|
|
@ -29,12 +29,30 @@ func (du *DecisionUpdate) Where(ps ...predicate.Decision) *DecisionUpdate {
|
||||||
return du
|
return du
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// SetCreatedAt sets the "created_at" field.
|
||||||
|
func (du *DecisionUpdate) SetCreatedAt(t time.Time) *DecisionUpdate {
|
||||||
|
du.mutation.SetCreatedAt(t)
|
||||||
|
return du
|
||||||
|
}
|
||||||
|
|
||||||
|
// ClearCreatedAt clears the value of the "created_at" field.
|
||||||
|
func (du *DecisionUpdate) ClearCreatedAt() *DecisionUpdate {
|
||||||
|
du.mutation.ClearCreatedAt()
|
||||||
|
return du
|
||||||
|
}
|
||||||
|
|
||||||
// SetUpdatedAt sets the "updated_at" field.
|
// SetUpdatedAt sets the "updated_at" field.
|
||||||
func (du *DecisionUpdate) SetUpdatedAt(t time.Time) *DecisionUpdate {
|
func (du *DecisionUpdate) SetUpdatedAt(t time.Time) *DecisionUpdate {
|
||||||
du.mutation.SetUpdatedAt(t)
|
du.mutation.SetUpdatedAt(t)
|
||||||
return du
|
return du
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// ClearUpdatedAt clears the value of the "updated_at" field.
|
||||||
|
func (du *DecisionUpdate) ClearUpdatedAt() *DecisionUpdate {
|
||||||
|
du.mutation.ClearUpdatedAt()
|
||||||
|
return du
|
||||||
|
}
|
||||||
|
|
||||||
// SetUntil sets the "until" field.
|
// SetUntil sets the "until" field.
|
||||||
func (du *DecisionUpdate) SetUntil(t time.Time) *DecisionUpdate {
|
func (du *DecisionUpdate) SetUntil(t time.Time) *DecisionUpdate {
|
||||||
du.mutation.SetUntil(t)
|
du.mutation.SetUntil(t)
|
||||||
|
@ -374,7 +392,11 @@ func (du *DecisionUpdate) ExecX(ctx context.Context) {
|
||||||
|
|
||||||
// defaults sets the default values of the builder before save.
|
// defaults sets the default values of the builder before save.
|
||||||
func (du *DecisionUpdate) defaults() {
|
func (du *DecisionUpdate) defaults() {
|
||||||
if _, ok := du.mutation.UpdatedAt(); !ok {
|
if _, ok := du.mutation.CreatedAt(); !ok && !du.mutation.CreatedAtCleared() {
|
||||||
|
v := decision.UpdateDefaultCreatedAt()
|
||||||
|
du.mutation.SetCreatedAt(v)
|
||||||
|
}
|
||||||
|
if _, ok := du.mutation.UpdatedAt(); !ok && !du.mutation.UpdatedAtCleared() {
|
||||||
v := decision.UpdateDefaultUpdatedAt()
|
v := decision.UpdateDefaultUpdatedAt()
|
||||||
du.mutation.SetUpdatedAt(v)
|
du.mutation.SetUpdatedAt(v)
|
||||||
}
|
}
|
||||||
|
@ -389,9 +411,18 @@ func (du *DecisionUpdate) sqlSave(ctx context.Context) (n int, err error) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
if value, ok := du.mutation.CreatedAt(); ok {
|
||||||
|
_spec.SetField(decision.FieldCreatedAt, field.TypeTime, value)
|
||||||
|
}
|
||||||
|
if du.mutation.CreatedAtCleared() {
|
||||||
|
_spec.ClearField(decision.FieldCreatedAt, field.TypeTime)
|
||||||
|
}
|
||||||
if value, ok := du.mutation.UpdatedAt(); ok {
|
if value, ok := du.mutation.UpdatedAt(); ok {
|
||||||
_spec.SetField(decision.FieldUpdatedAt, field.TypeTime, value)
|
_spec.SetField(decision.FieldUpdatedAt, field.TypeTime, value)
|
||||||
}
|
}
|
||||||
|
if du.mutation.UpdatedAtCleared() {
|
||||||
|
_spec.ClearField(decision.FieldUpdatedAt, field.TypeTime)
|
||||||
|
}
|
||||||
if value, ok := du.mutation.Until(); ok {
|
if value, ok := du.mutation.Until(); ok {
|
||||||
_spec.SetField(decision.FieldUntil, field.TypeTime, value)
|
_spec.SetField(decision.FieldUntil, field.TypeTime, value)
|
||||||
}
|
}
|
||||||
|
@ -516,12 +547,30 @@ type DecisionUpdateOne struct {
|
||||||
mutation *DecisionMutation
|
mutation *DecisionMutation
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// SetCreatedAt sets the "created_at" field.
|
||||||
|
func (duo *DecisionUpdateOne) SetCreatedAt(t time.Time) *DecisionUpdateOne {
|
||||||
|
duo.mutation.SetCreatedAt(t)
|
||||||
|
return duo
|
||||||
|
}
|
||||||
|
|
||||||
|
// ClearCreatedAt clears the value of the "created_at" field.
|
||||||
|
func (duo *DecisionUpdateOne) ClearCreatedAt() *DecisionUpdateOne {
|
||||||
|
duo.mutation.ClearCreatedAt()
|
||||||
|
return duo
|
||||||
|
}
|
||||||
|
|
||||||
// SetUpdatedAt sets the "updated_at" field.
|
// SetUpdatedAt sets the "updated_at" field.
|
||||||
func (duo *DecisionUpdateOne) SetUpdatedAt(t time.Time) *DecisionUpdateOne {
|
func (duo *DecisionUpdateOne) SetUpdatedAt(t time.Time) *DecisionUpdateOne {
|
||||||
duo.mutation.SetUpdatedAt(t)
|
duo.mutation.SetUpdatedAt(t)
|
||||||
return duo
|
return duo
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// ClearUpdatedAt clears the value of the "updated_at" field.
|
||||||
|
func (duo *DecisionUpdateOne) ClearUpdatedAt() *DecisionUpdateOne {
|
||||||
|
duo.mutation.ClearUpdatedAt()
|
||||||
|
return duo
|
||||||
|
}
|
||||||
|
|
||||||
// SetUntil sets the "until" field.
|
// SetUntil sets the "until" field.
|
||||||
func (duo *DecisionUpdateOne) SetUntil(t time.Time) *DecisionUpdateOne {
|
func (duo *DecisionUpdateOne) SetUntil(t time.Time) *DecisionUpdateOne {
|
||||||
duo.mutation.SetUntil(t)
|
duo.mutation.SetUntil(t)
|
||||||
|
@ -874,7 +923,11 @@ func (duo *DecisionUpdateOne) ExecX(ctx context.Context) {
|
||||||
|
|
||||||
// defaults sets the default values of the builder before save.
|
// defaults sets the default values of the builder before save.
|
||||||
func (duo *DecisionUpdateOne) defaults() {
|
func (duo *DecisionUpdateOne) defaults() {
|
||||||
if _, ok := duo.mutation.UpdatedAt(); !ok {
|
if _, ok := duo.mutation.CreatedAt(); !ok && !duo.mutation.CreatedAtCleared() {
|
||||||
|
v := decision.UpdateDefaultCreatedAt()
|
||||||
|
duo.mutation.SetCreatedAt(v)
|
||||||
|
}
|
||||||
|
if _, ok := duo.mutation.UpdatedAt(); !ok && !duo.mutation.UpdatedAtCleared() {
|
||||||
v := decision.UpdateDefaultUpdatedAt()
|
v := decision.UpdateDefaultUpdatedAt()
|
||||||
duo.mutation.SetUpdatedAt(v)
|
duo.mutation.SetUpdatedAt(v)
|
||||||
}
|
}
|
||||||
|
@ -906,9 +959,18 @@ func (duo *DecisionUpdateOne) sqlSave(ctx context.Context) (_node *Decision, err
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
if value, ok := duo.mutation.CreatedAt(); ok {
|
||||||
|
_spec.SetField(decision.FieldCreatedAt, field.TypeTime, value)
|
||||||
|
}
|
||||||
|
if duo.mutation.CreatedAtCleared() {
|
||||||
|
_spec.ClearField(decision.FieldCreatedAt, field.TypeTime)
|
||||||
|
}
|
||||||
if value, ok := duo.mutation.UpdatedAt(); ok {
|
if value, ok := duo.mutation.UpdatedAt(); ok {
|
||||||
_spec.SetField(decision.FieldUpdatedAt, field.TypeTime, value)
|
_spec.SetField(decision.FieldUpdatedAt, field.TypeTime, value)
|
||||||
}
|
}
|
||||||
|
if duo.mutation.UpdatedAtCleared() {
|
||||||
|
_spec.ClearField(decision.FieldUpdatedAt, field.TypeTime)
|
||||||
|
}
|
||||||
if value, ok := duo.mutation.Until(); ok {
|
if value, ok := duo.mutation.Until(); ok {
|
||||||
_spec.SetField(decision.FieldUntil, field.TypeTime, value)
|
_spec.SetField(decision.FieldUntil, field.TypeTime, value)
|
||||||
}
|
}
|
||||||
|
|
|
@ -20,7 +20,6 @@ import (
|
||||||
"github.com/crowdsecurity/crowdsec/pkg/database/ent/lock"
|
"github.com/crowdsecurity/crowdsec/pkg/database/ent/lock"
|
||||||
"github.com/crowdsecurity/crowdsec/pkg/database/ent/machine"
|
"github.com/crowdsecurity/crowdsec/pkg/database/ent/machine"
|
||||||
"github.com/crowdsecurity/crowdsec/pkg/database/ent/meta"
|
"github.com/crowdsecurity/crowdsec/pkg/database/ent/meta"
|
||||||
"github.com/crowdsecurity/crowdsec/pkg/database/ent/metric"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
// ent aliases to avoid import conflicts in user's code.
|
// ent aliases to avoid import conflicts in user's code.
|
||||||
|
@ -89,7 +88,6 @@ func checkColumn(table, column string) error {
|
||||||
lock.Table: lock.ValidColumn,
|
lock.Table: lock.ValidColumn,
|
||||||
machine.Table: machine.ValidColumn,
|
machine.Table: machine.ValidColumn,
|
||||||
meta.Table: meta.ValidColumn,
|
meta.Table: meta.ValidColumn,
|
||||||
metric.Table: metric.ValidColumn,
|
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
return columnCheck(table, column)
|
return columnCheck(table, column)
|
||||||
|
|
|
@ -19,9 +19,9 @@ type Event struct {
|
||||||
// ID of the ent.
|
// ID of the ent.
|
||||||
ID int `json:"id,omitempty"`
|
ID int `json:"id,omitempty"`
|
||||||
// CreatedAt holds the value of the "created_at" field.
|
// CreatedAt holds the value of the "created_at" field.
|
||||||
CreatedAt time.Time `json:"created_at,omitempty"`
|
CreatedAt *time.Time `json:"created_at,omitempty"`
|
||||||
// UpdatedAt holds the value of the "updated_at" field.
|
// UpdatedAt holds the value of the "updated_at" field.
|
||||||
UpdatedAt time.Time `json:"updated_at,omitempty"`
|
UpdatedAt *time.Time `json:"updated_at,omitempty"`
|
||||||
// Time holds the value of the "time" field.
|
// Time holds the value of the "time" field.
|
||||||
Time time.Time `json:"time,omitempty"`
|
Time time.Time `json:"time,omitempty"`
|
||||||
// Serialized holds the value of the "serialized" field.
|
// Serialized holds the value of the "serialized" field.
|
||||||
|
@ -92,13 +92,15 @@ func (e *Event) assignValues(columns []string, values []any) error {
|
||||||
if value, ok := values[i].(*sql.NullTime); !ok {
|
if value, ok := values[i].(*sql.NullTime); !ok {
|
||||||
return fmt.Errorf("unexpected type %T for field created_at", values[i])
|
return fmt.Errorf("unexpected type %T for field created_at", values[i])
|
||||||
} else if value.Valid {
|
} else if value.Valid {
|
||||||
e.CreatedAt = value.Time
|
e.CreatedAt = new(time.Time)
|
||||||
|
*e.CreatedAt = value.Time
|
||||||
}
|
}
|
||||||
case event.FieldUpdatedAt:
|
case event.FieldUpdatedAt:
|
||||||
if value, ok := values[i].(*sql.NullTime); !ok {
|
if value, ok := values[i].(*sql.NullTime); !ok {
|
||||||
return fmt.Errorf("unexpected type %T for field updated_at", values[i])
|
return fmt.Errorf("unexpected type %T for field updated_at", values[i])
|
||||||
} else if value.Valid {
|
} else if value.Valid {
|
||||||
e.UpdatedAt = value.Time
|
e.UpdatedAt = new(time.Time)
|
||||||
|
*e.UpdatedAt = value.Time
|
||||||
}
|
}
|
||||||
case event.FieldTime:
|
case event.FieldTime:
|
||||||
if value, ok := values[i].(*sql.NullTime); !ok {
|
if value, ok := values[i].(*sql.NullTime); !ok {
|
||||||
|
@ -159,11 +161,15 @@ func (e *Event) String() string {
|
||||||
var builder strings.Builder
|
var builder strings.Builder
|
||||||
builder.WriteString("Event(")
|
builder.WriteString("Event(")
|
||||||
builder.WriteString(fmt.Sprintf("id=%v, ", e.ID))
|
builder.WriteString(fmt.Sprintf("id=%v, ", e.ID))
|
||||||
builder.WriteString("created_at=")
|
if v := e.CreatedAt; v != nil {
|
||||||
builder.WriteString(e.CreatedAt.Format(time.ANSIC))
|
builder.WriteString("created_at=")
|
||||||
|
builder.WriteString(v.Format(time.ANSIC))
|
||||||
|
}
|
||||||
builder.WriteString(", ")
|
builder.WriteString(", ")
|
||||||
builder.WriteString("updated_at=")
|
if v := e.UpdatedAt; v != nil {
|
||||||
builder.WriteString(e.UpdatedAt.Format(time.ANSIC))
|
builder.WriteString("updated_at=")
|
||||||
|
builder.WriteString(v.Format(time.ANSIC))
|
||||||
|
}
|
||||||
builder.WriteString(", ")
|
builder.WriteString(", ")
|
||||||
builder.WriteString("time=")
|
builder.WriteString("time=")
|
||||||
builder.WriteString(e.Time.Format(time.ANSIC))
|
builder.WriteString(e.Time.Format(time.ANSIC))
|
||||||
|
|
|
@ -60,6 +60,8 @@ func ValidColumn(column string) bool {
|
||||||
var (
|
var (
|
||||||
// DefaultCreatedAt holds the default value on creation for the "created_at" field.
|
// DefaultCreatedAt holds the default value on creation for the "created_at" field.
|
||||||
DefaultCreatedAt func() time.Time
|
DefaultCreatedAt func() time.Time
|
||||||
|
// UpdateDefaultCreatedAt holds the default value on update for the "created_at" field.
|
||||||
|
UpdateDefaultCreatedAt func() time.Time
|
||||||
// DefaultUpdatedAt holds the default value on creation for the "updated_at" field.
|
// DefaultUpdatedAt holds the default value on creation for the "updated_at" field.
|
||||||
DefaultUpdatedAt func() time.Time
|
DefaultUpdatedAt func() time.Time
|
||||||
// UpdateDefaultUpdatedAt holds the default value on update for the "updated_at" field.
|
// UpdateDefaultUpdatedAt holds the default value on update for the "updated_at" field.
|
||||||
|
|
|
@ -120,6 +120,16 @@ func CreatedAtLTE(v time.Time) predicate.Event {
|
||||||
return predicate.Event(sql.FieldLTE(FieldCreatedAt, v))
|
return predicate.Event(sql.FieldLTE(FieldCreatedAt, v))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// CreatedAtIsNil applies the IsNil predicate on the "created_at" field.
|
||||||
|
func CreatedAtIsNil() predicate.Event {
|
||||||
|
return predicate.Event(sql.FieldIsNull(FieldCreatedAt))
|
||||||
|
}
|
||||||
|
|
||||||
|
// CreatedAtNotNil applies the NotNil predicate on the "created_at" field.
|
||||||
|
func CreatedAtNotNil() predicate.Event {
|
||||||
|
return predicate.Event(sql.FieldNotNull(FieldCreatedAt))
|
||||||
|
}
|
||||||
|
|
||||||
// UpdatedAtEQ applies the EQ predicate on the "updated_at" field.
|
// UpdatedAtEQ applies the EQ predicate on the "updated_at" field.
|
||||||
func UpdatedAtEQ(v time.Time) predicate.Event {
|
func UpdatedAtEQ(v time.Time) predicate.Event {
|
||||||
return predicate.Event(sql.FieldEQ(FieldUpdatedAt, v))
|
return predicate.Event(sql.FieldEQ(FieldUpdatedAt, v))
|
||||||
|
@ -160,6 +170,16 @@ func UpdatedAtLTE(v time.Time) predicate.Event {
|
||||||
return predicate.Event(sql.FieldLTE(FieldUpdatedAt, v))
|
return predicate.Event(sql.FieldLTE(FieldUpdatedAt, v))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// UpdatedAtIsNil applies the IsNil predicate on the "updated_at" field.
|
||||||
|
func UpdatedAtIsNil() predicate.Event {
|
||||||
|
return predicate.Event(sql.FieldIsNull(FieldUpdatedAt))
|
||||||
|
}
|
||||||
|
|
||||||
|
// UpdatedAtNotNil applies the NotNil predicate on the "updated_at" field.
|
||||||
|
func UpdatedAtNotNil() predicate.Event {
|
||||||
|
return predicate.Event(sql.FieldNotNull(FieldUpdatedAt))
|
||||||
|
}
|
||||||
|
|
||||||
// TimeEQ applies the EQ predicate on the "time" field.
|
// TimeEQ applies the EQ predicate on the "time" field.
|
||||||
func TimeEQ(v time.Time) predicate.Event {
|
func TimeEQ(v time.Time) predicate.Event {
|
||||||
return predicate.Event(sql.FieldEQ(FieldTime, v))
|
return predicate.Event(sql.FieldEQ(FieldTime, v))
|
||||||
|
|
|
@ -141,12 +141,6 @@ func (ec *EventCreate) defaults() {
|
||||||
|
|
||||||
// check runs all checks and user-defined validators on the builder.
|
// check runs all checks and user-defined validators on the builder.
|
||||||
func (ec *EventCreate) check() error {
|
func (ec *EventCreate) check() error {
|
||||||
if _, ok := ec.mutation.CreatedAt(); !ok {
|
|
||||||
return &ValidationError{Name: "created_at", err: errors.New(`ent: missing required field "Event.created_at"`)}
|
|
||||||
}
|
|
||||||
if _, ok := ec.mutation.UpdatedAt(); !ok {
|
|
||||||
return &ValidationError{Name: "updated_at", err: errors.New(`ent: missing required field "Event.updated_at"`)}
|
|
||||||
}
|
|
||||||
if _, ok := ec.mutation.Time(); !ok {
|
if _, ok := ec.mutation.Time(); !ok {
|
||||||
return &ValidationError{Name: "time", err: errors.New(`ent: missing required field "Event.time"`)}
|
return &ValidationError{Name: "time", err: errors.New(`ent: missing required field "Event.time"`)}
|
||||||
}
|
}
|
||||||
|
@ -186,11 +180,11 @@ func (ec *EventCreate) createSpec() (*Event, *sqlgraph.CreateSpec) {
|
||||||
)
|
)
|
||||||
if value, ok := ec.mutation.CreatedAt(); ok {
|
if value, ok := ec.mutation.CreatedAt(); ok {
|
||||||
_spec.SetField(event.FieldCreatedAt, field.TypeTime, value)
|
_spec.SetField(event.FieldCreatedAt, field.TypeTime, value)
|
||||||
_node.CreatedAt = value
|
_node.CreatedAt = &value
|
||||||
}
|
}
|
||||||
if value, ok := ec.mutation.UpdatedAt(); ok {
|
if value, ok := ec.mutation.UpdatedAt(); ok {
|
||||||
_spec.SetField(event.FieldUpdatedAt, field.TypeTime, value)
|
_spec.SetField(event.FieldUpdatedAt, field.TypeTime, value)
|
||||||
_node.UpdatedAt = value
|
_node.UpdatedAt = &value
|
||||||
}
|
}
|
||||||
if value, ok := ec.mutation.Time(); ok {
|
if value, ok := ec.mutation.Time(); ok {
|
||||||
_spec.SetField(event.FieldTime, field.TypeTime, value)
|
_spec.SetField(event.FieldTime, field.TypeTime, value)
|
||||||
|
|
|
@ -29,12 +29,30 @@ func (eu *EventUpdate) Where(ps ...predicate.Event) *EventUpdate {
|
||||||
return eu
|
return eu
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// SetCreatedAt sets the "created_at" field.
|
||||||
|
func (eu *EventUpdate) SetCreatedAt(t time.Time) *EventUpdate {
|
||||||
|
eu.mutation.SetCreatedAt(t)
|
||||||
|
return eu
|
||||||
|
}
|
||||||
|
|
||||||
|
// ClearCreatedAt clears the value of the "created_at" field.
|
||||||
|
func (eu *EventUpdate) ClearCreatedAt() *EventUpdate {
|
||||||
|
eu.mutation.ClearCreatedAt()
|
||||||
|
return eu
|
||||||
|
}
|
||||||
|
|
||||||
// SetUpdatedAt sets the "updated_at" field.
|
// SetUpdatedAt sets the "updated_at" field.
|
||||||
func (eu *EventUpdate) SetUpdatedAt(t time.Time) *EventUpdate {
|
func (eu *EventUpdate) SetUpdatedAt(t time.Time) *EventUpdate {
|
||||||
eu.mutation.SetUpdatedAt(t)
|
eu.mutation.SetUpdatedAt(t)
|
||||||
return eu
|
return eu
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// ClearUpdatedAt clears the value of the "updated_at" field.
|
||||||
|
func (eu *EventUpdate) ClearUpdatedAt() *EventUpdate {
|
||||||
|
eu.mutation.ClearUpdatedAt()
|
||||||
|
return eu
|
||||||
|
}
|
||||||
|
|
||||||
// SetTime sets the "time" field.
|
// SetTime sets the "time" field.
|
||||||
func (eu *EventUpdate) SetTime(t time.Time) *EventUpdate {
|
func (eu *EventUpdate) SetTime(t time.Time) *EventUpdate {
|
||||||
eu.mutation.SetTime(t)
|
eu.mutation.SetTime(t)
|
||||||
|
@ -143,7 +161,11 @@ func (eu *EventUpdate) ExecX(ctx context.Context) {
|
||||||
|
|
||||||
// defaults sets the default values of the builder before save.
|
// defaults sets the default values of the builder before save.
|
||||||
func (eu *EventUpdate) defaults() {
|
func (eu *EventUpdate) defaults() {
|
||||||
if _, ok := eu.mutation.UpdatedAt(); !ok {
|
if _, ok := eu.mutation.CreatedAt(); !ok && !eu.mutation.CreatedAtCleared() {
|
||||||
|
v := event.UpdateDefaultCreatedAt()
|
||||||
|
eu.mutation.SetCreatedAt(v)
|
||||||
|
}
|
||||||
|
if _, ok := eu.mutation.UpdatedAt(); !ok && !eu.mutation.UpdatedAtCleared() {
|
||||||
v := event.UpdateDefaultUpdatedAt()
|
v := event.UpdateDefaultUpdatedAt()
|
||||||
eu.mutation.SetUpdatedAt(v)
|
eu.mutation.SetUpdatedAt(v)
|
||||||
}
|
}
|
||||||
|
@ -171,9 +193,18 @@ func (eu *EventUpdate) sqlSave(ctx context.Context) (n int, err error) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
if value, ok := eu.mutation.CreatedAt(); ok {
|
||||||
|
_spec.SetField(event.FieldCreatedAt, field.TypeTime, value)
|
||||||
|
}
|
||||||
|
if eu.mutation.CreatedAtCleared() {
|
||||||
|
_spec.ClearField(event.FieldCreatedAt, field.TypeTime)
|
||||||
|
}
|
||||||
if value, ok := eu.mutation.UpdatedAt(); ok {
|
if value, ok := eu.mutation.UpdatedAt(); ok {
|
||||||
_spec.SetField(event.FieldUpdatedAt, field.TypeTime, value)
|
_spec.SetField(event.FieldUpdatedAt, field.TypeTime, value)
|
||||||
}
|
}
|
||||||
|
if eu.mutation.UpdatedAtCleared() {
|
||||||
|
_spec.ClearField(event.FieldUpdatedAt, field.TypeTime)
|
||||||
|
}
|
||||||
if value, ok := eu.mutation.Time(); ok {
|
if value, ok := eu.mutation.Time(); ok {
|
||||||
_spec.SetField(event.FieldTime, field.TypeTime, value)
|
_spec.SetField(event.FieldTime, field.TypeTime, value)
|
||||||
}
|
}
|
||||||
|
@ -229,12 +260,30 @@ type EventUpdateOne struct {
|
||||||
mutation *EventMutation
|
mutation *EventMutation
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// SetCreatedAt sets the "created_at" field.
|
||||||
|
func (euo *EventUpdateOne) SetCreatedAt(t time.Time) *EventUpdateOne {
|
||||||
|
euo.mutation.SetCreatedAt(t)
|
||||||
|
return euo
|
||||||
|
}
|
||||||
|
|
||||||
|
// ClearCreatedAt clears the value of the "created_at" field.
|
||||||
|
func (euo *EventUpdateOne) ClearCreatedAt() *EventUpdateOne {
|
||||||
|
euo.mutation.ClearCreatedAt()
|
||||||
|
return euo
|
||||||
|
}
|
||||||
|
|
||||||
// SetUpdatedAt sets the "updated_at" field.
|
// SetUpdatedAt sets the "updated_at" field.
|
||||||
func (euo *EventUpdateOne) SetUpdatedAt(t time.Time) *EventUpdateOne {
|
func (euo *EventUpdateOne) SetUpdatedAt(t time.Time) *EventUpdateOne {
|
||||||
euo.mutation.SetUpdatedAt(t)
|
euo.mutation.SetUpdatedAt(t)
|
||||||
return euo
|
return euo
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// ClearUpdatedAt clears the value of the "updated_at" field.
|
||||||
|
func (euo *EventUpdateOne) ClearUpdatedAt() *EventUpdateOne {
|
||||||
|
euo.mutation.ClearUpdatedAt()
|
||||||
|
return euo
|
||||||
|
}
|
||||||
|
|
||||||
// SetTime sets the "time" field.
|
// SetTime sets the "time" field.
|
||||||
func (euo *EventUpdateOne) SetTime(t time.Time) *EventUpdateOne {
|
func (euo *EventUpdateOne) SetTime(t time.Time) *EventUpdateOne {
|
||||||
euo.mutation.SetTime(t)
|
euo.mutation.SetTime(t)
|
||||||
|
@ -356,7 +405,11 @@ func (euo *EventUpdateOne) ExecX(ctx context.Context) {
|
||||||
|
|
||||||
// defaults sets the default values of the builder before save.
|
// defaults sets the default values of the builder before save.
|
||||||
func (euo *EventUpdateOne) defaults() {
|
func (euo *EventUpdateOne) defaults() {
|
||||||
if _, ok := euo.mutation.UpdatedAt(); !ok {
|
if _, ok := euo.mutation.CreatedAt(); !ok && !euo.mutation.CreatedAtCleared() {
|
||||||
|
v := event.UpdateDefaultCreatedAt()
|
||||||
|
euo.mutation.SetCreatedAt(v)
|
||||||
|
}
|
||||||
|
if _, ok := euo.mutation.UpdatedAt(); !ok && !euo.mutation.UpdatedAtCleared() {
|
||||||
v := event.UpdateDefaultUpdatedAt()
|
v := event.UpdateDefaultUpdatedAt()
|
||||||
euo.mutation.SetUpdatedAt(v)
|
euo.mutation.SetUpdatedAt(v)
|
||||||
}
|
}
|
||||||
|
@ -401,9 +454,18 @@ func (euo *EventUpdateOne) sqlSave(ctx context.Context) (_node *Event, err error
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
if value, ok := euo.mutation.CreatedAt(); ok {
|
||||||
|
_spec.SetField(event.FieldCreatedAt, field.TypeTime, value)
|
||||||
|
}
|
||||||
|
if euo.mutation.CreatedAtCleared() {
|
||||||
|
_spec.ClearField(event.FieldCreatedAt, field.TypeTime)
|
||||||
|
}
|
||||||
if value, ok := euo.mutation.UpdatedAt(); ok {
|
if value, ok := euo.mutation.UpdatedAt(); ok {
|
||||||
_spec.SetField(event.FieldUpdatedAt, field.TypeTime, value)
|
_spec.SetField(event.FieldUpdatedAt, field.TypeTime, value)
|
||||||
}
|
}
|
||||||
|
if euo.mutation.UpdatedAtCleared() {
|
||||||
|
_spec.ClearField(event.FieldUpdatedAt, field.TypeTime)
|
||||||
|
}
|
||||||
if value, ok := euo.mutation.Time(); ok {
|
if value, ok := euo.mutation.Time(); ok {
|
||||||
_spec.SetField(event.FieldTime, field.TypeTime, value)
|
_spec.SetField(event.FieldTime, field.TypeTime, value)
|
||||||
}
|
}
|
||||||
|
|
|
@ -105,18 +105,6 @@ func (f MetaFunc) Mutate(ctx context.Context, m ent.Mutation) (ent.Value, error)
|
||||||
return nil, fmt.Errorf("unexpected mutation type %T. expect *ent.MetaMutation", m)
|
return nil, fmt.Errorf("unexpected mutation type %T. expect *ent.MetaMutation", m)
|
||||||
}
|
}
|
||||||
|
|
||||||
// The MetricFunc type is an adapter to allow the use of ordinary
|
|
||||||
// function as Metric mutator.
|
|
||||||
type MetricFunc func(context.Context, *ent.MetricMutation) (ent.Value, error)
|
|
||||||
|
|
||||||
// Mutate calls f(ctx, m).
|
|
||||||
func (f MetricFunc) Mutate(ctx context.Context, m ent.Mutation) (ent.Value, error) {
|
|
||||||
if mv, ok := m.(*ent.MetricMutation); ok {
|
|
||||||
return f(ctx, mv)
|
|
||||||
}
|
|
||||||
return nil, fmt.Errorf("unexpected mutation type %T. expect *ent.MetricMutation", m)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Condition is a hook condition function.
|
// Condition is a hook condition function.
|
||||||
type Condition func(context.Context, ent.Mutation) bool
|
type Condition func(context.Context, ent.Mutation) bool
|
||||||
|
|
||||||
|
|
|
@ -28,6 +28,20 @@ func (lu *LockUpdate) Where(ps ...predicate.Lock) *LockUpdate {
|
||||||
return lu
|
return lu
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// SetName sets the "name" field.
|
||||||
|
func (lu *LockUpdate) SetName(s string) *LockUpdate {
|
||||||
|
lu.mutation.SetName(s)
|
||||||
|
return lu
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetNillableName sets the "name" field if the given value is not nil.
|
||||||
|
func (lu *LockUpdate) SetNillableName(s *string) *LockUpdate {
|
||||||
|
if s != nil {
|
||||||
|
lu.SetName(*s)
|
||||||
|
}
|
||||||
|
return lu
|
||||||
|
}
|
||||||
|
|
||||||
// SetCreatedAt sets the "created_at" field.
|
// SetCreatedAt sets the "created_at" field.
|
||||||
func (lu *LockUpdate) SetCreatedAt(t time.Time) *LockUpdate {
|
func (lu *LockUpdate) SetCreatedAt(t time.Time) *LockUpdate {
|
||||||
lu.mutation.SetCreatedAt(t)
|
lu.mutation.SetCreatedAt(t)
|
||||||
|
@ -83,6 +97,9 @@ func (lu *LockUpdate) sqlSave(ctx context.Context) (n int, err error) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
if value, ok := lu.mutation.Name(); ok {
|
||||||
|
_spec.SetField(lock.FieldName, field.TypeString, value)
|
||||||
|
}
|
||||||
if value, ok := lu.mutation.CreatedAt(); ok {
|
if value, ok := lu.mutation.CreatedAt(); ok {
|
||||||
_spec.SetField(lock.FieldCreatedAt, field.TypeTime, value)
|
_spec.SetField(lock.FieldCreatedAt, field.TypeTime, value)
|
||||||
}
|
}
|
||||||
|
@ -106,6 +123,20 @@ type LockUpdateOne struct {
|
||||||
mutation *LockMutation
|
mutation *LockMutation
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// SetName sets the "name" field.
|
||||||
|
func (luo *LockUpdateOne) SetName(s string) *LockUpdateOne {
|
||||||
|
luo.mutation.SetName(s)
|
||||||
|
return luo
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetNillableName sets the "name" field if the given value is not nil.
|
||||||
|
func (luo *LockUpdateOne) SetNillableName(s *string) *LockUpdateOne {
|
||||||
|
if s != nil {
|
||||||
|
luo.SetName(*s)
|
||||||
|
}
|
||||||
|
return luo
|
||||||
|
}
|
||||||
|
|
||||||
// SetCreatedAt sets the "created_at" field.
|
// SetCreatedAt sets the "created_at" field.
|
||||||
func (luo *LockUpdateOne) SetCreatedAt(t time.Time) *LockUpdateOne {
|
func (luo *LockUpdateOne) SetCreatedAt(t time.Time) *LockUpdateOne {
|
||||||
luo.mutation.SetCreatedAt(t)
|
luo.mutation.SetCreatedAt(t)
|
||||||
|
@ -191,6 +222,9 @@ func (luo *LockUpdateOne) sqlSave(ctx context.Context) (_node *Lock, err error)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
if value, ok := luo.mutation.Name(); ok {
|
||||||
|
_spec.SetField(lock.FieldName, field.TypeString, value)
|
||||||
|
}
|
||||||
if value, ok := luo.mutation.CreatedAt(); ok {
|
if value, ok := luo.mutation.CreatedAt(); ok {
|
||||||
_spec.SetField(lock.FieldCreatedAt, field.TypeTime, value)
|
_spec.SetField(lock.FieldCreatedAt, field.TypeTime, value)
|
||||||
}
|
}
|
||||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Reference in a new issue