lint: error handling cleanup (#1877)
This commit is contained in:
parent
66543493b5
commit
104f5d1fe6
43 changed files with 102 additions and 110 deletions
.github/workflows
.golangci.ymlcmd
crowdsec-cli
alerts.goalerts_table.gobouncers.godashboard.godecisions.gohubtest.gomachines.gomain.gometrics.gonotifications.gosimulation.gosupport.go
crowdsec
pkg
acquisition
apiclient
apiserver
csconfig
csplugin
cwhub
exprhelpers
leakybucket
metabase
parser
10
.github/workflows/ci_golangci-lint.yml
vendored
10
.github/workflows/ci_golangci-lint.yml
vendored
|
@ -33,16 +33,6 @@ jobs:
|
|||
- name: golangci-lint
|
||||
uses: golangci/golangci-lint-action@v3
|
||||
with:
|
||||
# Optional: version of golangci-lint to use in form of v1.2 or v1.2.3 or `latest` to use the latest version
|
||||
version: v1.50
|
||||
# Optional: golangci-lint command line arguments.
|
||||
args: --issues-exit-code=1 --timeout 10m
|
||||
# Optional: show only new issues if it's a pull request. The default value is `false`.
|
||||
only-new-issues: false
|
||||
# Optional: if set to true then the all caching functionality will be complete disabled,
|
||||
# takes precedence over all other caching options.
|
||||
skip-cache: false
|
||||
# Optional: if set to true then the action don't cache or restore ~/go/pkg.
|
||||
skip-pkg-cache: false
|
||||
# Optional: if set to true then the action don't cache or restore ~/.cache/go-build.
|
||||
skip-build-cache: false
|
||||
|
|
|
@ -5,6 +5,7 @@ run:
|
|||
- pkg/time/rate
|
||||
skip-files:
|
||||
- pkg/database/ent/generate.go
|
||||
- pkg/yamlpatch/merge.go
|
||||
- pkg/yamlpatch/merge_test.go
|
||||
|
||||
linters-settings:
|
||||
|
|
|
@ -420,7 +420,7 @@ cscli alerts delete -s crowdsecurity/ssh-bf"`,
|
|||
log.Fatal("Local API is disabled, please run this command on the local API machine")
|
||||
}
|
||||
if err := csConfig.LoadDBConfig(); err != nil {
|
||||
log.Fatalf(err.Error())
|
||||
log.Fatal(err)
|
||||
}
|
||||
dbClient, err = database.NewClient(csConfig.DbConfig)
|
||||
if err != nil {
|
||||
|
|
|
@ -55,7 +55,7 @@ func alertDecisionsTable(out io.Writer, alert *models.Alert) {
|
|||
for _, decision := range alert.Decisions {
|
||||
parsedDuration, err := time.ParseDuration(*decision.Duration)
|
||||
if err != nil {
|
||||
log.Errorf(err.Error())
|
||||
log.Error(err)
|
||||
}
|
||||
expire := time.Now().UTC().Add(parsedDuration)
|
||||
if time.Now().UTC().After(expire) {
|
||||
|
|
|
@ -75,7 +75,7 @@ Note: This command requires database direct access, so is intended to be run on
|
|||
log.Fatal("Local API is disabled, please run this command on the local API machine")
|
||||
}
|
||||
if err := csConfig.LoadDBConfig(); err != nil {
|
||||
log.Fatalf(err.Error())
|
||||
log.Fatal(err)
|
||||
}
|
||||
dbClient, err = database.NewClient(csConfig.DbConfig)
|
||||
if err != nil {
|
||||
|
|
|
@ -66,11 +66,11 @@ cscli dashboard remove
|
|||
metabaseConfigFolderPath := filepath.Join(csConfig.ConfigPaths.ConfigDir, metabaseConfigFolder)
|
||||
metabaseConfigPath = filepath.Join(metabaseConfigFolderPath, metabaseConfigFile)
|
||||
if err := os.MkdirAll(metabaseConfigFolderPath, os.ModePerm); err != nil {
|
||||
log.Fatalf(err.Error())
|
||||
log.Fatal(err)
|
||||
}
|
||||
if err := csConfig.LoadDBConfig(); err != nil {
|
||||
log.Errorf("This command requires direct database access (must be run on the local API machine)")
|
||||
log.Fatalf(err.Error())
|
||||
log.Fatal(err)
|
||||
}
|
||||
|
||||
/*
|
||||
|
@ -170,11 +170,11 @@ cscli dashboard setup -l 0.0.0.0 -p 443 --password <password>
|
|||
|
||||
mb, err := metabase.SetupMetabase(csConfig.API.Server.DbConfig, metabaseListenAddress, metabaseListenPort, metabaseUser, metabasePassword, metabaseDbPath, dockerGroup.Gid, metabaseContainerID)
|
||||
if err != nil {
|
||||
log.Fatalf(err.Error())
|
||||
log.Fatal(err)
|
||||
}
|
||||
|
||||
if err := mb.DumpConfig(metabaseConfigPath); err != nil {
|
||||
log.Fatalf(err.Error())
|
||||
log.Fatal(err)
|
||||
}
|
||||
|
||||
log.Infof("Metabase is ready")
|
||||
|
@ -203,7 +203,7 @@ cscli dashboard setup -l 0.0.0.0 -p 443 --password <password>
|
|||
Run: func(cmd *cobra.Command, args []string) {
|
||||
mb, err := metabase.NewMetabase(metabaseConfigPath, metabaseContainerID)
|
||||
if err != nil {
|
||||
log.Fatalf(err.Error())
|
||||
log.Fatal(err)
|
||||
}
|
||||
if err := mb.Container.Start(); err != nil {
|
||||
log.Fatalf("Failed to start metabase container : %s", err)
|
||||
|
|
|
@ -351,7 +351,7 @@ cscli decisions add --scope username --value foobar
|
|||
|
||||
_, _, err = Client.Alerts.Add(context.Background(), alerts)
|
||||
if err != nil {
|
||||
log.Fatalf(err.Error())
|
||||
log.Fatal(err)
|
||||
}
|
||||
|
||||
log.Info("Decision successfully added")
|
||||
|
@ -597,7 +597,7 @@ decisions.json :
|
|||
|
||||
_, _, err = Client.Alerts.Add(context.Background(), alerts)
|
||||
if err != nil {
|
||||
log.Fatalf(err.Error())
|
||||
log.Fatal(err)
|
||||
}
|
||||
log.Infof("%d decisions successfully imported", len(decisionsList))
|
||||
},
|
||||
|
|
|
@ -483,7 +483,7 @@ cscli hubtest create my-scenario-test --parsers crowdsecurity/nginx --scenarios
|
|||
}
|
||||
output, err := test.ParserAssert.EvalExpression(evalExpression)
|
||||
if err != nil {
|
||||
log.Fatalf(err.Error())
|
||||
log.Fatal(err)
|
||||
}
|
||||
fmt.Print(output)
|
||||
}
|
||||
|
|
|
@ -169,7 +169,7 @@ Note: This command requires database direct access, so is intended to be run on
|
|||
}
|
||||
if err := csConfig.LoadDBConfig(); err != nil {
|
||||
log.Errorf("This command requires direct database access (must be run on the local API machine)")
|
||||
log.Fatalf(err.Error())
|
||||
log.Fatal(err)
|
||||
}
|
||||
},
|
||||
}
|
||||
|
|
|
@ -58,11 +58,11 @@ func initConfig() {
|
|||
if !inSlice(os.Args[1], NoNeedConfig) {
|
||||
csConfig, err = csconfig.NewConfig(ConfigFilePath, false, false)
|
||||
if err != nil {
|
||||
log.Fatalf(err.Error())
|
||||
log.Fatal(err)
|
||||
}
|
||||
log.Debugf("Using %s as configuration file", ConfigFilePath)
|
||||
if err := csConfig.LoadCSCLI(); err != nil {
|
||||
log.Fatalf(err.Error())
|
||||
log.Fatal(err)
|
||||
}
|
||||
} else {
|
||||
csConfig = csconfig.NewDefaultConfig()
|
||||
|
|
|
@ -260,7 +260,7 @@ func NewMetricsCmd() *cobra.Command {
|
|||
DisableAutoGenTag: true,
|
||||
Run: func(cmd *cobra.Command, args []string) {
|
||||
if err := csConfig.LoadPrometheus(); err != nil {
|
||||
log.Fatalf(err.Error())
|
||||
log.Fatal(err)
|
||||
}
|
||||
if !csConfig.Prometheus.Enabled {
|
||||
log.Warning("Prometheus is not enabled, can't show metrics")
|
||||
|
|
|
@ -46,7 +46,7 @@ func NewNotificationsCmd() *cobra.Command {
|
|||
err error
|
||||
)
|
||||
if err = csConfig.API.Server.LoadProfiles(); err != nil {
|
||||
log.Fatalf(err.Error())
|
||||
log.Fatal(err)
|
||||
}
|
||||
if csConfig.ConfigPaths.NotificationDir == "" {
|
||||
log.Fatalf("config_paths.notification_dir is not set in crowdsec config")
|
||||
|
|
|
@ -108,7 +108,7 @@ cscli simulation disable crowdsecurity/ssh-bf`,
|
|||
DisableAutoGenTag: true,
|
||||
PersistentPreRunE: func(cmd *cobra.Command, args []string) error {
|
||||
if err := csConfig.LoadSimulation(); err != nil {
|
||||
log.Fatalf(err.Error())
|
||||
log.Fatal(err)
|
||||
}
|
||||
if csConfig.Cscli == nil {
|
||||
return fmt.Errorf("you must configure cli before using simulation")
|
||||
|
|
|
@ -160,11 +160,11 @@ func collectAPIStatus(login string, password string, endpoint string, prefix str
|
|||
apiurl, err := url.Parse(endpoint)
|
||||
|
||||
if err != nil {
|
||||
return []byte(fmt.Sprintf("cannot parse API URL: %s", err.Error()))
|
||||
return []byte(fmt.Sprintf("cannot parse API URL: %s", err))
|
||||
}
|
||||
scenarios, err := cwhub.GetInstalledScenariosAsString()
|
||||
if err != nil {
|
||||
return []byte(fmt.Sprintf("could not collect scenarios: %s", err.Error()))
|
||||
return []byte(fmt.Sprintf("could not collect scenarios: %s", err))
|
||||
}
|
||||
|
||||
Client, err = apiclient.NewDefaultClient(apiurl,
|
||||
|
@ -172,7 +172,7 @@ func collectAPIStatus(login string, password string, endpoint string, prefix str
|
|||
fmt.Sprintf("crowdsec/%s", cwversion.VersionStr()),
|
||||
nil)
|
||||
if err != nil {
|
||||
return []byte(fmt.Sprintf("could not init client: %s", err.Error()))
|
||||
return []byte(fmt.Sprintf("could not init client: %s", err))
|
||||
}
|
||||
t := models.WatcherAuthRequest{
|
||||
MachineID: &login,
|
||||
|
|
|
@ -52,7 +52,7 @@ func serveAPIServer(apiServer *apiserver.APIServer, apiReady chan bool) {
|
|||
defer types.CatchPanic("crowdsec/runAPIServer")
|
||||
log.Debugf("serving API after %s ms", time.Since(crowdsecT0))
|
||||
if err := apiServer.Run(apiReady); err != nil {
|
||||
log.Fatalf(err.Error())
|
||||
log.Fatal(err)
|
||||
}
|
||||
}()
|
||||
|
||||
|
|
|
@ -40,7 +40,7 @@ func StartRunSvc() error {
|
|||
// Configure logging
|
||||
if err = types.SetDefaultLoggerConfig(cConfig.Common.LogMedia, cConfig.Common.LogDir, *cConfig.Common.LogLevel,
|
||||
cConfig.Common.LogMaxSize, cConfig.Common.LogMaxFiles, cConfig.Common.LogMaxAge, cConfig.Common.CompressLogs, cConfig.Common.ForceColorLogs); err != nil {
|
||||
log.Fatal(err.Error())
|
||||
log.Fatal(err)
|
||||
}
|
||||
|
||||
log.Infof("Crowdsec %s", cwversion.VersionStr())
|
||||
|
|
|
@ -58,7 +58,7 @@ func (m *crowdsec_winservice) Execute(args []string, r <-chan svc.ChangeRequest,
|
|||
err := WindowsRun()
|
||||
changes <- svc.Status{State: svc.Stopped}
|
||||
if err != nil {
|
||||
log.Fatalf(err.Error())
|
||||
log.Fatal(err)
|
||||
}
|
||||
return
|
||||
}
|
||||
|
|
|
@ -184,11 +184,11 @@ func LoadAcquisitionFromFile(config *csconfig.CrowdsecServiceCfg) ([]DataSource,
|
|||
var idx int
|
||||
err = dec.Decode(&sub)
|
||||
if err != nil {
|
||||
if err == io.EOF {
|
||||
log.Tracef("End of yaml file")
|
||||
break
|
||||
if ! errors.Is(err, io.EOF) {
|
||||
return nil, errors.Wrapf(err, "failed to yaml decode %s", acquisFile)
|
||||
}
|
||||
return nil, errors.Wrapf(err, "failed to yaml decode %s", acquisFile)
|
||||
log.Tracef("End of yaml file")
|
||||
break
|
||||
}
|
||||
|
||||
//for backward compat ('type' was not mandatory, detect it)
|
||||
|
|
|
@ -85,18 +85,18 @@ func writeToKafka(w *kafka.Writer, logs []string) {
|
|||
func createTopic(topic string, broker string) {
|
||||
conn, err := kafka.Dial("tcp", broker)
|
||||
if err != nil {
|
||||
panic(err.Error())
|
||||
panic(err)
|
||||
}
|
||||
defer conn.Close()
|
||||
|
||||
controller, err := conn.Controller()
|
||||
if err != nil {
|
||||
panic(err.Error())
|
||||
panic(err)
|
||||
}
|
||||
var controllerConn *kafka.Conn
|
||||
controllerConn, err = kafka.Dial("tcp", net.JoinHostPort(controller.Host, strconv.Itoa(controller.Port)))
|
||||
if err != nil {
|
||||
panic(err.Error())
|
||||
panic(err)
|
||||
}
|
||||
defer controllerConn.Close()
|
||||
|
||||
|
@ -110,7 +110,7 @@ func createTopic(topic string, broker string) {
|
|||
|
||||
err = controllerConn.CreateTopics(topicConfigs...)
|
||||
if err != nil {
|
||||
panic(err.Error())
|
||||
panic(err)
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -103,17 +103,15 @@ func (c *ApiClient) Do(ctx context.Context, req *http.Request, v interface{}) (*
|
|||
}
|
||||
|
||||
if v != nil {
|
||||
if w, ok := v.(io.Writer); ok {
|
||||
io.Copy(w, resp.Body)
|
||||
} else {
|
||||
w, ok := v.(io.Writer)
|
||||
if !ok {
|
||||
decErr := json.NewDecoder(resp.Body).Decode(v)
|
||||
if decErr == io.EOF {
|
||||
if errors.Is(decErr, io.EOF) {
|
||||
decErr = nil // ignore EOF errors caused by empty response body
|
||||
}
|
||||
if decErr != nil {
|
||||
err = decErr
|
||||
}
|
||||
return response, decErr
|
||||
}
|
||||
io.Copy(w, resp.Body)
|
||||
}
|
||||
return response, err
|
||||
}
|
||||
|
|
|
@ -326,7 +326,7 @@ func TestDecisionsStreamOpts_addQueryParamsToURL(t *testing.T) {
|
|||
// })
|
||||
|
||||
// if err != nil {
|
||||
// t.Fatalf("new api client: %s", err.Error())
|
||||
// t.Fatalf("new api client: %s", err)
|
||||
// }
|
||||
|
||||
// filters := DecisionsDeleteOpts{IPEquals: new(string)}
|
||||
|
|
|
@ -76,7 +76,7 @@ func InitMachineTest() (*gin.Engine, models.WatcherAuthResponse, csconfig.Config
|
|||
|
||||
loginResp, err := LoginToTestAPI(router, config)
|
||||
if err != nil {
|
||||
return nil, models.WatcherAuthResponse{}, config, fmt.Errorf("%s", err)
|
||||
return nil, models.WatcherAuthResponse{}, config, err
|
||||
}
|
||||
return router, loginResp, config, nil
|
||||
}
|
||||
|
@ -84,7 +84,7 @@ func InitMachineTest() (*gin.Engine, models.WatcherAuthResponse, csconfig.Config
|
|||
func LoginToTestAPI(router *gin.Engine, config csconfig.Config) (models.WatcherAuthResponse, error) {
|
||||
body, err := CreateTestMachine(router)
|
||||
if err != nil {
|
||||
return models.WatcherAuthResponse{}, fmt.Errorf("%s", err)
|
||||
return models.WatcherAuthResponse{}, err
|
||||
}
|
||||
err = ValidateMachine("test", config.API.Server.DbConfig)
|
||||
if err != nil {
|
||||
|
@ -99,7 +99,7 @@ func LoginToTestAPI(router *gin.Engine, config csconfig.Config) (models.WatcherA
|
|||
loginResp := models.WatcherAuthResponse{}
|
||||
err = json.NewDecoder(w.Body).Decode(&loginResp)
|
||||
if err != nil {
|
||||
return models.WatcherAuthResponse{}, fmt.Errorf("%s", err)
|
||||
return models.WatcherAuthResponse{}, err
|
||||
}
|
||||
|
||||
return loginResp, nil
|
||||
|
|
|
@ -70,10 +70,10 @@ func CustomRecoveryWithWriter() gin.HandlerFunc {
|
|||
errHandlerComplete = errors.New("http2: request body closed due to handler exiting")
|
||||
errStreamClosed = errors.New("http2: stream closed")
|
||||
)
|
||||
if strErr == errClientDisconnected ||
|
||||
strErr == errClosedBody ||
|
||||
strErr == errHandlerComplete ||
|
||||
strErr == errStreamClosed {
|
||||
if errors.Is(strErr, errClientDisconnected) ||
|
||||
errors.Is(strErr, errClosedBody) ||
|
||||
errors.Is(strErr, errHandlerComplete) ||
|
||||
errors.Is(strErr, errStreamClosed) {
|
||||
brokenPipe = true
|
||||
}
|
||||
}
|
||||
|
|
|
@ -358,7 +358,7 @@ func TestLoggingDebugToFileConfig(t *testing.T) {
|
|||
|
||||
// Configure logging
|
||||
if err := types.SetDefaultLoggerConfig(cfg.LogMedia, cfg.LogDir, *cfg.LogLevel, cfg.LogMaxSize, cfg.LogMaxFiles, cfg.LogMaxAge, cfg.CompressLogs, false); err != nil {
|
||||
t.Fatal(err.Error())
|
||||
t.Fatal(err)
|
||||
}
|
||||
api, err := NewServer(&cfg)
|
||||
if err != nil {
|
||||
|
@ -415,7 +415,7 @@ func TestLoggingErrorToFileConfig(t *testing.T) {
|
|||
|
||||
// Configure logging
|
||||
if err := types.SetDefaultLoggerConfig(cfg.LogMedia, cfg.LogDir, *cfg.LogLevel, cfg.LogMaxSize, cfg.LogMaxFiles, cfg.LogMaxAge, cfg.CompressLogs, false); err != nil {
|
||||
t.Fatal(err.Error())
|
||||
t.Fatal(err)
|
||||
}
|
||||
api, err := NewServer(&cfg)
|
||||
if err != nil {
|
||||
|
|
|
@ -18,7 +18,7 @@ func TestLogin(t *testing.T) {
|
|||
|
||||
body, err := CreateTestMachine(router)
|
||||
if err != nil {
|
||||
log.Fatalln(err.Error())
|
||||
log.Fatalln(err)
|
||||
}
|
||||
|
||||
// Login with machine not validated yet
|
||||
|
@ -60,7 +60,7 @@ func TestLogin(t *testing.T) {
|
|||
//Validate machine
|
||||
err = ValidateMachine("test", config.API.Server.DbConfig)
|
||||
if err != nil {
|
||||
log.Fatalln(err.Error())
|
||||
log.Fatalln(err)
|
||||
}
|
||||
|
||||
// Login with invalid password
|
||||
|
|
|
@ -150,7 +150,7 @@ func TestCreateMachineAlreadyExist(t *testing.T) {
|
|||
|
||||
body, err := CreateTestMachine(router)
|
||||
if err != nil {
|
||||
log.Fatalln(err.Error())
|
||||
log.Fatalln(err)
|
||||
}
|
||||
|
||||
w := httptest.NewRecorder()
|
||||
|
|
|
@ -157,18 +157,18 @@ func TestLoadAPIServer(t *testing.T) {
|
|||
|
||||
LogDirFullPath, err := filepath.Abs("./tests")
|
||||
if err != nil {
|
||||
t.Fatalf(err.Error())
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
config := &Config{}
|
||||
fcontent, err := os.ReadFile("./tests/config.yaml")
|
||||
if err != nil {
|
||||
t.Fatalf(err.Error())
|
||||
t.Fatal(err)
|
||||
}
|
||||
configData := os.ExpandEnv(string(fcontent))
|
||||
err = yaml.UnmarshalStrict([]byte(configData), &config)
|
||||
if err != nil {
|
||||
t.Fatalf(err.Error())
|
||||
t.Fatal(err)
|
||||
}
|
||||
tests := []struct {
|
||||
name string
|
||||
|
|
|
@ -13,12 +13,12 @@ func TestLoadCommon(t *testing.T) {
|
|||
pidDirPath := "./tests"
|
||||
LogDirFullPath, err := filepath.Abs("./tests/log/")
|
||||
if err != nil {
|
||||
t.Fatalf(err.Error())
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
WorkingDirFullPath, err := filepath.Abs("./tests")
|
||||
if err != nil {
|
||||
t.Fatalf(err.Error())
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
tests := []struct {
|
||||
|
|
|
@ -12,22 +12,22 @@ import (
|
|||
func TestLoadCSCLI(t *testing.T) {
|
||||
hubFullPath, err := filepath.Abs("./hub")
|
||||
if err != nil {
|
||||
t.Fatalf(err.Error())
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
dataFullPath, err := filepath.Abs("./data")
|
||||
if err != nil {
|
||||
t.Fatalf(err.Error())
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
configDirFullPath, err := filepath.Abs("./tests")
|
||||
if err != nil {
|
||||
t.Fatalf(err.Error())
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
hubIndexFileFullPath, err := filepath.Abs("./hub/.index.json")
|
||||
if err != nil {
|
||||
t.Fatalf(err.Error())
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
tests := []struct {
|
||||
|
|
|
@ -42,7 +42,7 @@ func (c *LocalApiServerCfg) LoadProfiles() error {
|
|||
t := ProfileCfg{}
|
||||
err = dec.Decode(&t)
|
||||
if err != nil {
|
||||
if err == io.EOF {
|
||||
if errors.Is(err, io.EOF) {
|
||||
break
|
||||
}
|
||||
return errors.Wrapf(err, "while decoding %s", c.ProfilesPath)
|
||||
|
|
|
@ -351,7 +351,7 @@ func ParsePluginConfigFile(path string) ([]PluginConfig, error) {
|
|||
pc := PluginConfig{}
|
||||
err = dec.Decode(&pc)
|
||||
if err != nil {
|
||||
if err == io.EOF {
|
||||
if errors.Is(err, io.EOF) {
|
||||
break
|
||||
}
|
||||
return []PluginConfig{}, fmt.Errorf("while decoding %s got error %s", path, err)
|
||||
|
|
|
@ -254,10 +254,10 @@ func downloadData(dataFolder string, force bool, reader io.Reader) error {
|
|||
data := &types.DataSet{}
|
||||
err = dec.Decode(data)
|
||||
if err != nil {
|
||||
if err != io.EOF {
|
||||
return errors.Wrap(err, "while reading file")
|
||||
if errors.Is(err, io.EOF) {
|
||||
break
|
||||
}
|
||||
break
|
||||
return errors.Wrap(err, "while reading file")
|
||||
}
|
||||
|
||||
download := false
|
||||
|
|
|
@ -44,7 +44,7 @@ func getDBClient(t *testing.T) *database.Client {
|
|||
|
||||
func TestVisitor(t *testing.T) {
|
||||
if err := Init(nil); err != nil {
|
||||
log.Fatalf(err.Error())
|
||||
log.Fatal(err)
|
||||
}
|
||||
|
||||
tests := []struct {
|
||||
|
@ -131,12 +131,12 @@ func TestVisitor(t *testing.T) {
|
|||
|
||||
func TestRegexpInFile(t *testing.T) {
|
||||
if err := Init(nil); err != nil {
|
||||
log.Fatalf(err.Error())
|
||||
log.Fatal(err)
|
||||
}
|
||||
|
||||
err := FileInit(TestFolder, "test_data_re.txt", "regex")
|
||||
if err != nil {
|
||||
log.Fatalf(err.Error())
|
||||
log.Fatal(err)
|
||||
}
|
||||
|
||||
tests := []struct {
|
||||
|
@ -174,11 +174,11 @@ func TestRegexpInFile(t *testing.T) {
|
|||
for _, test := range tests {
|
||||
compiledFilter, err := expr.Compile(test.filter, expr.Env(GetExprEnv(map[string]interface{}{})))
|
||||
if err != nil {
|
||||
log.Fatalf(err.Error())
|
||||
log.Fatal(err)
|
||||
}
|
||||
result, err := expr.Run(compiledFilter, GetExprEnv(map[string]interface{}{}))
|
||||
if err != nil {
|
||||
log.Fatalf(err.Error())
|
||||
log.Fatal(err)
|
||||
}
|
||||
if isOk := assert.Equal(t, test.result, result); !isOk {
|
||||
t.Fatalf("test '%s' : NOK", test.name)
|
||||
|
@ -188,7 +188,7 @@ func TestRegexpInFile(t *testing.T) {
|
|||
|
||||
func TestFileInit(t *testing.T) {
|
||||
if err := Init(nil); err != nil {
|
||||
log.Fatalf(err.Error())
|
||||
log.Fatal(err)
|
||||
}
|
||||
|
||||
tests := []struct {
|
||||
|
@ -226,7 +226,7 @@ func TestFileInit(t *testing.T) {
|
|||
for _, test := range tests {
|
||||
err := FileInit(TestFolder, test.filename, test.types)
|
||||
if err != nil {
|
||||
log.Fatalf(err.Error())
|
||||
log.Fatal(err)
|
||||
}
|
||||
if test.types == "string" {
|
||||
if _, ok := dataFile[test.filename]; !ok {
|
||||
|
@ -256,12 +256,12 @@ func TestFileInit(t *testing.T) {
|
|||
|
||||
func TestFile(t *testing.T) {
|
||||
if err := Init(nil); err != nil {
|
||||
log.Fatalf(err.Error())
|
||||
log.Fatal(err)
|
||||
}
|
||||
|
||||
err := FileInit(TestFolder, "test_data.txt", "string")
|
||||
if err != nil {
|
||||
log.Fatalf(err.Error())
|
||||
log.Fatal(err)
|
||||
}
|
||||
|
||||
tests := []struct {
|
||||
|
@ -299,11 +299,11 @@ func TestFile(t *testing.T) {
|
|||
for _, test := range tests {
|
||||
compiledFilter, err := expr.Compile(test.filter, expr.Env(GetExprEnv(map[string]interface{}{})))
|
||||
if err != nil {
|
||||
log.Fatalf(err.Error())
|
||||
log.Fatal(err)
|
||||
}
|
||||
result, err := expr.Run(compiledFilter, GetExprEnv(map[string]interface{}{}))
|
||||
if err != nil {
|
||||
log.Fatalf(err.Error())
|
||||
log.Fatal(err)
|
||||
}
|
||||
if isOk := assert.Equal(t, test.result, result); !isOk {
|
||||
t.Fatalf("test '%s' : NOK", test.name)
|
||||
|
|
|
@ -2,6 +2,7 @@ package exprhelpers
|
|||
|
||||
import (
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"fmt"
|
||||
"strings"
|
||||
|
||||
|
@ -17,7 +18,7 @@ func JsonExtractLib(jsblob string, target ...string) string {
|
|||
)
|
||||
|
||||
if err != nil {
|
||||
if err == jsonparser.KeyPathNotFoundError {
|
||||
if errors.Is(err, jsonparser.KeyPathNotFoundError) {
|
||||
log.Debugf("%+v doesn't exist", target)
|
||||
return ""
|
||||
}
|
||||
|
@ -39,7 +40,7 @@ func JsonExtractUnescape(jsblob string, target ...string) string {
|
|||
)
|
||||
|
||||
if err != nil {
|
||||
if err == jsonparser.KeyPathNotFoundError {
|
||||
if errors.Is(err, jsonparser.KeyPathNotFoundError) {
|
||||
log.Debugf("%+v doesn't exist", target)
|
||||
return ""
|
||||
}
|
||||
|
@ -74,7 +75,7 @@ func jsonExtractType(jsblob string, target string, t jsonparser.ValueType) ([]by
|
|||
)
|
||||
|
||||
if err != nil {
|
||||
if err == jsonparser.KeyPathNotFoundError {
|
||||
if errors.Is(err, jsonparser.KeyPathNotFoundError) {
|
||||
log.Debugf("Key %+v doesn't exist", target)
|
||||
return nil, fmt.Errorf("key %s does not exist", target)
|
||||
}
|
||||
|
|
|
@ -9,12 +9,12 @@ import (
|
|||
|
||||
func TestJsonExtract(t *testing.T) {
|
||||
if err := Init(nil); err != nil {
|
||||
log.Fatalf(err.Error())
|
||||
log.Fatal(err)
|
||||
}
|
||||
|
||||
err := FileInit(TestFolder, "test_data_re.txt", "regex")
|
||||
if err != nil {
|
||||
log.Fatalf(err.Error())
|
||||
log.Fatal(err)
|
||||
}
|
||||
|
||||
tests := []struct {
|
||||
|
@ -55,12 +55,12 @@ func TestJsonExtract(t *testing.T) {
|
|||
}
|
||||
func TestJsonExtractUnescape(t *testing.T) {
|
||||
if err := Init(nil); err != nil {
|
||||
log.Fatalf(err.Error())
|
||||
log.Fatal(err)
|
||||
}
|
||||
|
||||
err := FileInit(TestFolder, "test_data_re.txt", "regex")
|
||||
if err != nil {
|
||||
log.Fatalf(err.Error())
|
||||
log.Fatal(err)
|
||||
}
|
||||
|
||||
tests := []struct {
|
||||
|
@ -95,12 +95,12 @@ func TestJsonExtractUnescape(t *testing.T) {
|
|||
|
||||
func TestJsonExtractSlice(t *testing.T) {
|
||||
if err := Init(nil); err != nil {
|
||||
log.Fatalf(err.Error())
|
||||
log.Fatal(err)
|
||||
}
|
||||
|
||||
err := FileInit(TestFolder, "test_data_re.txt", "regex")
|
||||
if err != nil {
|
||||
log.Fatalf(err.Error())
|
||||
log.Fatal(err)
|
||||
}
|
||||
|
||||
tests := []struct {
|
||||
|
@ -146,12 +146,12 @@ func TestJsonExtractSlice(t *testing.T) {
|
|||
|
||||
func TestJsonExtractObject(t *testing.T) {
|
||||
if err := Init(nil); err != nil {
|
||||
log.Fatalf(err.Error())
|
||||
log.Fatal(err)
|
||||
}
|
||||
|
||||
err := FileInit(TestFolder, "test_data_re.txt", "regex")
|
||||
if err != nil {
|
||||
log.Fatalf(err.Error())
|
||||
log.Fatal(err)
|
||||
}
|
||||
|
||||
tests := []struct {
|
||||
|
|
|
@ -9,7 +9,7 @@ import (
|
|||
|
||||
func TestXMLGetAttributeValue(t *testing.T) {
|
||||
if err := Init(nil); err != nil {
|
||||
log.Fatalf(err.Error())
|
||||
log.Fatal(err)
|
||||
}
|
||||
|
||||
tests := []struct {
|
||||
|
@ -68,7 +68,7 @@ func TestXMLGetAttributeValue(t *testing.T) {
|
|||
}
|
||||
func TestXMLGetNodeValue(t *testing.T) {
|
||||
if err := Init(nil); err != nil {
|
||||
log.Fatalf(err.Error())
|
||||
log.Fatal(err)
|
||||
}
|
||||
|
||||
tests := []struct {
|
||||
|
|
|
@ -3,6 +3,7 @@ package leakybucket
|
|||
import (
|
||||
"bytes"
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"fmt"
|
||||
"html/template"
|
||||
"io"
|
||||
|
@ -153,7 +154,7 @@ func testFile(t *testing.T, file string, bs string, holders []BucketFactory, res
|
|||
tf := TestFile{}
|
||||
err = dec.Decode(&tf)
|
||||
if err != nil {
|
||||
if err != io.EOF {
|
||||
if errors.Is(err, io.EOF) {
|
||||
t.Errorf("Failed to load testfile '%s' yaml error : %v", file, err)
|
||||
return false
|
||||
}
|
||||
|
|
|
@ -2,6 +2,7 @@ package leakybucket
|
|||
|
||||
import (
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"fmt"
|
||||
"io"
|
||||
"os"
|
||||
|
@ -169,7 +170,7 @@ func LoadBuckets(cscfg *csconfig.CrowdsecServiceCfg, files []string, tomb *tomb.
|
|||
bucketFactory := BucketFactory{}
|
||||
err = dec.Decode(&bucketFactory)
|
||||
if err != nil {
|
||||
if err != io.EOF {
|
||||
if !errors.Is(err, io.EOF) {
|
||||
log.Errorf("Bad yaml in %s : %v", f, err)
|
||||
return nil, nil, fmt.Errorf("bad yaml in %s : %v", f, err)
|
||||
}
|
||||
|
|
|
@ -155,7 +155,7 @@ func RemoveContainer(name string) error {
|
|||
ctx := context.Background()
|
||||
log.Printf("Removing docker metabase %s", name)
|
||||
if err := cli.ContainerRemove(ctx, name, types.ContainerRemoveOptions{}); err != nil {
|
||||
return fmt.Errorf("failed remove container %s : %s", name, err)
|
||||
return fmt.Errorf("failed to remove container %s : %s", name, err)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
@ -168,7 +168,7 @@ func RemoveImageContainer() error {
|
|||
ctx := context.Background()
|
||||
log.Printf("Removing docker image '%s'", metabaseImage)
|
||||
if _, err := cli.ImageRemove(ctx, metabaseImage, types.ImageRemoveOptions{}); err != nil {
|
||||
return fmt.Errorf("failed remove image container %s : %s", metabaseImage, err)
|
||||
return fmt.Errorf("failed to remove image container %s : %s", metabaseImage, err)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
|
|
@ -367,7 +367,7 @@ func (m *Metabase) ExtractDatabase(buf *bytes.Reader) error {
|
|||
return fmt.Errorf("while opening zip content %s : %s", f.Name, err)
|
||||
}
|
||||
written, err := io.Copy(tfd, rc)
|
||||
if err == io.EOF {
|
||||
if errors.Is(err, io.EOF) {
|
||||
log.Printf("files finished ok")
|
||||
} else if err != nil {
|
||||
return fmt.Errorf("while copying content to %s : %s", tfname, err)
|
||||
|
|
|
@ -420,7 +420,7 @@ func (n *Node) compile(pctx *UnixParserCtx, ectx EnricherCtx) error {
|
|||
for _, pattern := range n.SubGroks {
|
||||
n.Logger.Tracef("Adding subpattern '%s' : '%s'", pattern.Key, pattern.Value)
|
||||
if err := pctx.Grok.Add(pattern.Key.(string), pattern.Value.(string)); err != nil {
|
||||
if err == grokky.ErrAlreadyExist {
|
||||
if errors.Is(err, grokky.ErrAlreadyExist) {
|
||||
n.Logger.Warningf("grok '%s' already registred", pattern.Key)
|
||||
continue
|
||||
}
|
||||
|
|
|
@ -2,6 +2,7 @@ package parser
|
|||
|
||||
import (
|
||||
"bytes"
|
||||
"errors"
|
||||
"fmt"
|
||||
"html/template"
|
||||
"io"
|
||||
|
@ -182,7 +183,7 @@ func loadTestFile(file string) []TestFile {
|
|||
tf := TestFile{}
|
||||
err := dec.Decode(&tf)
|
||||
if err != nil {
|
||||
if err == io.EOF {
|
||||
if errors.Is(err, io.EOF) {
|
||||
break
|
||||
}
|
||||
log.Fatalf("Failed to load testfile '%s' yaml error : %v", file, err)
|
||||
|
|
|
@ -7,8 +7,7 @@ package parser
|
|||
*/
|
||||
|
||||
import (
|
||||
//"fmt"
|
||||
|
||||
"errors"
|
||||
"fmt"
|
||||
"io"
|
||||
_ "net/http/pprof"
|
||||
|
@ -68,7 +67,7 @@ func LoadStages(stageFiles []Stagefile, pctx *UnixParserCtx, ectx EnricherCtx) (
|
|||
node.OnSuccess = "continue" //default behavior is to continue
|
||||
err = dec.Decode(&node)
|
||||
if err != nil {
|
||||
if err == io.EOF {
|
||||
if errors.Is(err, io.EOF) {
|
||||
log.Tracef("End of yaml file")
|
||||
break
|
||||
}
|
||||
|
@ -115,7 +114,7 @@ func LoadStages(stageFiles []Stagefile, pctx *UnixParserCtx, ectx EnricherCtx) (
|
|||
for _, data := range node.Data {
|
||||
err = exprhelpers.FileInit(pctx.DataFolder, data.DestPath, data.Type)
|
||||
if err != nil {
|
||||
log.Errorf(err.Error())
|
||||
log.Error(err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
Loading…
Add table
Reference in a new issue