|
@@ -3,21 +3,16 @@ package hubtest
|
|
|
import (
|
|
|
"bufio"
|
|
|
"fmt"
|
|
|
- "io"
|
|
|
"os"
|
|
|
- "sort"
|
|
|
"strings"
|
|
|
- "time"
|
|
|
|
|
|
"github.com/antonmedv/expr"
|
|
|
- "github.com/enescakir/emoji"
|
|
|
- "github.com/fatih/color"
|
|
|
- diff "github.com/r3labs/diff/v2"
|
|
|
log "github.com/sirupsen/logrus"
|
|
|
"gopkg.in/yaml.v2"
|
|
|
|
|
|
+ "github.com/crowdsecurity/crowdsec/pkg/dumps"
|
|
|
"github.com/crowdsecurity/crowdsec/pkg/exprhelpers"
|
|
|
- "github.com/crowdsecurity/crowdsec/pkg/types"
|
|
|
+ "github.com/crowdsecurity/go-cs-lib/maptools"
|
|
|
)
|
|
|
|
|
|
type AssertFail struct {
|
|
@@ -34,16 +29,9 @@ type ParserAssert struct {
|
|
|
NbAssert int
|
|
|
Fails []AssertFail
|
|
|
Success bool
|
|
|
- TestData *ParserResults
|
|
|
+ TestData *dumps.ParserResults
|
|
|
}
|
|
|
|
|
|
-type ParserResult struct {
|
|
|
- Evt types.Event
|
|
|
- Success bool
|
|
|
-}
|
|
|
-
|
|
|
-type ParserResults map[string]map[string][]ParserResult
|
|
|
-
|
|
|
func NewParserAssert(file string) *ParserAssert {
|
|
|
ParserAssert := &ParserAssert{
|
|
|
File: file,
|
|
@@ -51,7 +39,7 @@ func NewParserAssert(file string) *ParserAssert {
|
|
|
Success: false,
|
|
|
Fails: make([]AssertFail, 0),
|
|
|
AutoGenAssert: false,
|
|
|
- TestData: &ParserResults{},
|
|
|
+ TestData: &dumps.ParserResults{},
|
|
|
}
|
|
|
|
|
|
return ParserAssert
|
|
@@ -69,7 +57,7 @@ func (p *ParserAssert) AutoGenFromFile(filename string) (string, error) {
|
|
|
}
|
|
|
|
|
|
func (p *ParserAssert) LoadTest(filename string) error {
|
|
|
- parserDump, err := LoadParserDump(filename)
|
|
|
+ parserDump, err := dumps.LoadParserDump(filename)
|
|
|
if err != nil {
|
|
|
return fmt.Errorf("loading parser dump file: %+v", err)
|
|
|
}
|
|
@@ -229,13 +217,13 @@ func (p *ParserAssert) AutoGenParserAssert() string {
|
|
|
ret := fmt.Sprintf("len(results) == %d\n", len(*p.TestData))
|
|
|
|
|
|
//sort map keys for consistent order
|
|
|
- stages := sortedMapKeys(*p.TestData)
|
|
|
+ stages := maptools.SortedKeys(*p.TestData)
|
|
|
|
|
|
for _, stage := range stages {
|
|
|
parsers := (*p.TestData)[stage]
|
|
|
|
|
|
//sort map keys for consistent order
|
|
|
- pnames := sortedMapKeys(parsers)
|
|
|
+ pnames := maptools.SortedKeys(parsers)
|
|
|
|
|
|
for _, parser := range pnames {
|
|
|
presults := parsers[parser]
|
|
@@ -248,7 +236,7 @@ func (p *ParserAssert) AutoGenParserAssert() string {
|
|
|
continue
|
|
|
}
|
|
|
|
|
|
- for _, pkey := range sortedMapKeys(result.Evt.Parsed) {
|
|
|
+ for _, pkey := range maptools.SortedKeys(result.Evt.Parsed) {
|
|
|
pval := result.Evt.Parsed[pkey]
|
|
|
if pval == "" {
|
|
|
continue
|
|
@@ -257,7 +245,7 @@ func (p *ParserAssert) AutoGenParserAssert() string {
|
|
|
ret += fmt.Sprintf(`results["%s"]["%s"][%d].Evt.Parsed["%s"] == "%s"`+"\n", stage, parser, pidx, pkey, Escape(pval))
|
|
|
}
|
|
|
|
|
|
- for _, mkey := range sortedMapKeys(result.Evt.Meta) {
|
|
|
+ for _, mkey := range maptools.SortedKeys(result.Evt.Meta) {
|
|
|
mval := result.Evt.Meta[mkey]
|
|
|
if mval == "" {
|
|
|
continue
|
|
@@ -266,7 +254,7 @@ func (p *ParserAssert) AutoGenParserAssert() string {
|
|
|
ret += fmt.Sprintf(`results["%s"]["%s"][%d].Evt.Meta["%s"] == "%s"`+"\n", stage, parser, pidx, mkey, Escape(mval))
|
|
|
}
|
|
|
|
|
|
- for _, ekey := range sortedMapKeys(result.Evt.Enriched) {
|
|
|
+ for _, ekey := range maptools.SortedKeys(result.Evt.Enriched) {
|
|
|
eval := result.Evt.Enriched[ekey]
|
|
|
if eval == "" {
|
|
|
continue
|
|
@@ -275,7 +263,7 @@ func (p *ParserAssert) AutoGenParserAssert() string {
|
|
|
ret += fmt.Sprintf(`results["%s"]["%s"][%d].Evt.Enriched["%s"] == "%s"`+"\n", stage, parser, pidx, ekey, Escape(eval))
|
|
|
}
|
|
|
|
|
|
- for _, ukey := range sortedMapKeys(result.Evt.Unmarshaled) {
|
|
|
+ for _, ukey := range maptools.SortedKeys(result.Evt.Unmarshaled) {
|
|
|
uval := result.Evt.Unmarshaled[ukey]
|
|
|
if uval == "" {
|
|
|
continue
|
|
@@ -328,288 +316,3 @@ func (p *ParserAssert) buildUnmarshaledAssert(ekey string, eval interface{}) []s
|
|
|
|
|
|
return ret
|
|
|
}
|
|
|
-
|
|
|
-func LoadParserDump(filepath string) (*ParserResults, error) {
|
|
|
- dumpData, err := os.Open(filepath)
|
|
|
- if err != nil {
|
|
|
- return nil, err
|
|
|
- }
|
|
|
- defer dumpData.Close()
|
|
|
-
|
|
|
- results, err := io.ReadAll(dumpData)
|
|
|
- if err != nil {
|
|
|
- return nil, err
|
|
|
- }
|
|
|
-
|
|
|
- pdump := ParserResults{}
|
|
|
-
|
|
|
- if err := yaml.Unmarshal(results, &pdump); err != nil {
|
|
|
- return nil, err
|
|
|
- }
|
|
|
-
|
|
|
- /* we know that some variables should always be set,
|
|
|
- let's check if they're present in last parser output of last stage */
|
|
|
-
|
|
|
- stages := sortedMapKeys(pdump)
|
|
|
-
|
|
|
- var lastStage string
|
|
|
-
|
|
|
- //Loop over stages to find last successful one with at least one parser
|
|
|
- for i := len(stages) - 2; i >= 0; i-- {
|
|
|
- if len(pdump[stages[i]]) != 0 {
|
|
|
- lastStage = stages[i]
|
|
|
- break
|
|
|
- }
|
|
|
- }
|
|
|
-
|
|
|
- parsers := make([]string, 0, len(pdump[lastStage]))
|
|
|
-
|
|
|
- for k := range pdump[lastStage] {
|
|
|
- parsers = append(parsers, k)
|
|
|
- }
|
|
|
-
|
|
|
- sort.Strings(parsers)
|
|
|
-
|
|
|
- if len(parsers) == 0 {
|
|
|
- return nil, fmt.Errorf("no parser found. Please install the appropriate parser and retry")
|
|
|
- }
|
|
|
-
|
|
|
- lastParser := parsers[len(parsers)-1]
|
|
|
-
|
|
|
- for idx, result := range pdump[lastStage][lastParser] {
|
|
|
- if result.Evt.StrTime == "" {
|
|
|
- log.Warningf("Line %d/%d is missing evt.StrTime. It is most likely a mistake as it will prevent your logs to be processed in time-machine/forensic mode.", idx, len(pdump[lastStage][lastParser]))
|
|
|
- } else {
|
|
|
- log.Debugf("Line %d/%d has evt.StrTime set to '%s'", idx, len(pdump[lastStage][lastParser]), result.Evt.StrTime)
|
|
|
- }
|
|
|
- }
|
|
|
-
|
|
|
- return &pdump, nil
|
|
|
-}
|
|
|
-
|
|
|
-type DumpOpts struct {
|
|
|
- Details bool
|
|
|
- SkipOk bool
|
|
|
- ShowNotOkParsers bool
|
|
|
-}
|
|
|
-
|
|
|
-func DumpTree(parserResults ParserResults, bucketPour BucketPourInfo, opts DumpOpts) {
|
|
|
- //note : we can use line -> time as the unique identifier (of acquisition)
|
|
|
- state := make(map[time.Time]map[string]map[string]ParserResult)
|
|
|
- assoc := make(map[time.Time]string, 0)
|
|
|
-
|
|
|
- for stage, parsers := range parserResults {
|
|
|
- for parser, results := range parsers {
|
|
|
- for _, parserRes := range results {
|
|
|
- evt := parserRes.Evt
|
|
|
- if _, ok := state[evt.Line.Time]; !ok {
|
|
|
- state[evt.Line.Time] = make(map[string]map[string]ParserResult)
|
|
|
- assoc[evt.Line.Time] = evt.Line.Raw
|
|
|
- }
|
|
|
-
|
|
|
- if _, ok := state[evt.Line.Time][stage]; !ok {
|
|
|
- state[evt.Line.Time][stage] = make(map[string]ParserResult)
|
|
|
- }
|
|
|
-
|
|
|
- state[evt.Line.Time][stage][parser] = ParserResult{Evt: evt, Success: parserRes.Success}
|
|
|
- }
|
|
|
- }
|
|
|
- }
|
|
|
-
|
|
|
- for bname, evtlist := range bucketPour {
|
|
|
- for _, evt := range evtlist {
|
|
|
- if evt.Line.Raw == "" {
|
|
|
- continue
|
|
|
- }
|
|
|
-
|
|
|
- //it might be bucket overflow being reprocessed, skip this
|
|
|
- if _, ok := state[evt.Line.Time]; !ok {
|
|
|
- state[evt.Line.Time] = make(map[string]map[string]ParserResult)
|
|
|
- assoc[evt.Line.Time] = evt.Line.Raw
|
|
|
- }
|
|
|
-
|
|
|
- //there is a trick : to know if an event successfully exit the parsers, we check if it reached the pour() phase
|
|
|
- //we thus use a fake stage "buckets" and a fake parser "OK" to know if it entered
|
|
|
- if _, ok := state[evt.Line.Time]["buckets"]; !ok {
|
|
|
- state[evt.Line.Time]["buckets"] = make(map[string]ParserResult)
|
|
|
- }
|
|
|
-
|
|
|
- state[evt.Line.Time]["buckets"][bname] = ParserResult{Success: true}
|
|
|
- }
|
|
|
- }
|
|
|
-
|
|
|
- yellow := color.New(color.FgYellow).SprintFunc()
|
|
|
- red := color.New(color.FgRed).SprintFunc()
|
|
|
- green := color.New(color.FgGreen).SprintFunc()
|
|
|
- whitelistReason := ""
|
|
|
- //get each line
|
|
|
- for tstamp, rawstr := range assoc {
|
|
|
- if opts.SkipOk {
|
|
|
- if _, ok := state[tstamp]["buckets"]["OK"]; ok {
|
|
|
- continue
|
|
|
- }
|
|
|
- }
|
|
|
-
|
|
|
- fmt.Printf("line: %s\n", rawstr)
|
|
|
-
|
|
|
- skeys := make([]string, 0, len(state[tstamp]))
|
|
|
-
|
|
|
- for k := range state[tstamp] {
|
|
|
- //there is a trick : to know if an event successfully exit the parsers, we check if it reached the pour() phase
|
|
|
- //we thus use a fake stage "buckets" and a fake parser "OK" to know if it entered
|
|
|
- if k == "buckets" {
|
|
|
- continue
|
|
|
- }
|
|
|
-
|
|
|
- skeys = append(skeys, k)
|
|
|
- }
|
|
|
-
|
|
|
- sort.Strings(skeys)
|
|
|
-
|
|
|
- // iterate stage
|
|
|
- var prevItem types.Event
|
|
|
-
|
|
|
- for _, stage := range skeys {
|
|
|
- parsers := state[tstamp][stage]
|
|
|
-
|
|
|
- sep := "├"
|
|
|
- presep := "|"
|
|
|
-
|
|
|
- fmt.Printf("\t%s %s\n", sep, stage)
|
|
|
-
|
|
|
- pkeys := sortedMapKeys(parsers)
|
|
|
-
|
|
|
- for idx, parser := range pkeys {
|
|
|
- res := parsers[parser].Success
|
|
|
- sep := "├"
|
|
|
-
|
|
|
- if idx == len(pkeys)-1 {
|
|
|
- sep = "└"
|
|
|
- }
|
|
|
-
|
|
|
- created := 0
|
|
|
- updated := 0
|
|
|
- deleted := 0
|
|
|
- whitelisted := false
|
|
|
- changeStr := ""
|
|
|
- detailsDisplay := ""
|
|
|
-
|
|
|
- if res {
|
|
|
- changelog, _ := diff.Diff(prevItem, parsers[parser].Evt)
|
|
|
- for _, change := range changelog {
|
|
|
- switch change.Type {
|
|
|
- case "create":
|
|
|
- created++
|
|
|
-
|
|
|
- detailsDisplay += fmt.Sprintf("\t%s\t\t%s %s evt.%s : %s\n", presep, sep, change.Type, strings.Join(change.Path, "."), green(change.To))
|
|
|
- case "update":
|
|
|
- detailsDisplay += fmt.Sprintf("\t%s\t\t%s %s evt.%s : %s -> %s\n", presep, sep, change.Type, strings.Join(change.Path, "."), change.From, yellow(change.To))
|
|
|
-
|
|
|
- if change.Path[0] == "Whitelisted" && change.To == true {
|
|
|
- whitelisted = true
|
|
|
-
|
|
|
- if whitelistReason == "" {
|
|
|
- whitelistReason = parsers[parser].Evt.WhitelistReason
|
|
|
- }
|
|
|
- }
|
|
|
- updated++
|
|
|
- case "delete":
|
|
|
- deleted++
|
|
|
-
|
|
|
- detailsDisplay += fmt.Sprintf("\t%s\t\t%s %s evt.%s\n", presep, sep, change.Type, red(strings.Join(change.Path, ".")))
|
|
|
- }
|
|
|
- }
|
|
|
-
|
|
|
- prevItem = parsers[parser].Evt
|
|
|
- }
|
|
|
-
|
|
|
- if created > 0 {
|
|
|
- changeStr += green(fmt.Sprintf("+%d", created))
|
|
|
- }
|
|
|
-
|
|
|
- if updated > 0 {
|
|
|
- if len(changeStr) > 0 {
|
|
|
- changeStr += " "
|
|
|
- }
|
|
|
-
|
|
|
- changeStr += yellow(fmt.Sprintf("~%d", updated))
|
|
|
- }
|
|
|
-
|
|
|
- if deleted > 0 {
|
|
|
- if len(changeStr) > 0 {
|
|
|
- changeStr += " "
|
|
|
- }
|
|
|
-
|
|
|
- changeStr += red(fmt.Sprintf("-%d", deleted))
|
|
|
- }
|
|
|
-
|
|
|
- if whitelisted {
|
|
|
- if len(changeStr) > 0 {
|
|
|
- changeStr += " "
|
|
|
- }
|
|
|
-
|
|
|
- changeStr += red("[whitelisted]")
|
|
|
- }
|
|
|
-
|
|
|
- if changeStr == "" {
|
|
|
- changeStr = yellow("unchanged")
|
|
|
- }
|
|
|
-
|
|
|
- if res {
|
|
|
- fmt.Printf("\t%s\t%s %s %s (%s)\n", presep, sep, emoji.GreenCircle, parser, changeStr)
|
|
|
-
|
|
|
- if opts.Details {
|
|
|
- fmt.Print(detailsDisplay)
|
|
|
- }
|
|
|
- } else if opts.ShowNotOkParsers {
|
|
|
- fmt.Printf("\t%s\t%s %s %s\n", presep, sep, emoji.RedCircle, parser)
|
|
|
- }
|
|
|
- }
|
|
|
- }
|
|
|
-
|
|
|
- sep := "└"
|
|
|
-
|
|
|
- if len(state[tstamp]["buckets"]) > 0 {
|
|
|
- sep = "├"
|
|
|
- }
|
|
|
-
|
|
|
- //did the event enter the bucket pour phase ?
|
|
|
- if _, ok := state[tstamp]["buckets"]["OK"]; ok {
|
|
|
- fmt.Printf("\t%s-------- parser success %s\n", sep, emoji.GreenCircle)
|
|
|
- } else if whitelistReason != "" {
|
|
|
- fmt.Printf("\t%s-------- parser success, ignored by whitelist (%s) %s\n", sep, whitelistReason, emoji.GreenCircle)
|
|
|
- } else {
|
|
|
- fmt.Printf("\t%s-------- parser failure %s\n", sep, emoji.RedCircle)
|
|
|
- }
|
|
|
-
|
|
|
- //now print bucket info
|
|
|
- if len(state[tstamp]["buckets"]) > 0 {
|
|
|
- fmt.Printf("\t├ Scenarios\n")
|
|
|
- }
|
|
|
-
|
|
|
- bnames := make([]string, 0, len(state[tstamp]["buckets"]))
|
|
|
-
|
|
|
- for k := range state[tstamp]["buckets"] {
|
|
|
- //there is a trick : to know if an event successfully exit the parsers, we check if it reached the pour() phase
|
|
|
- //we thus use a fake stage "buckets" and a fake parser "OK" to know if it entered
|
|
|
- if k == "OK" {
|
|
|
- continue
|
|
|
- }
|
|
|
-
|
|
|
- bnames = append(bnames, k)
|
|
|
- }
|
|
|
-
|
|
|
- sort.Strings(bnames)
|
|
|
-
|
|
|
- for idx, bname := range bnames {
|
|
|
- sep := "├"
|
|
|
- if idx == len(bnames)-1 {
|
|
|
- sep = "└"
|
|
|
- }
|
|
|
-
|
|
|
- fmt.Printf("\t\t%s %s %s\n", sep, emoji.GreenCircle, bname)
|
|
|
- }
|
|
|
-
|
|
|
- fmt.Println()
|
|
|
- }
|
|
|
-}
|