Compare commits
25 commits
master
...
console_la
Author | SHA1 | Date | |
---|---|---|---|
|
6a3a738258 | ||
|
b30b795656 | ||
|
f8c320fd73 | ||
|
8ec524fd13 | ||
|
82ebc1a04b | ||
|
8cab31ba2e | ||
|
cb7061d751 | ||
|
592bd20cc4 | ||
|
9cf286d0c8 | ||
|
f7d726822c | ||
|
e3315daa8c | ||
|
ba6aefc5b0 | ||
|
7f42f94fe3 | ||
|
c7723158a0 | ||
|
8148b858fb | ||
|
9555d5dce8 | ||
|
c45e5489f6 | ||
|
699dcb2c0f | ||
|
eb0fb243a3 | ||
|
186d5c3aa5 | ||
|
ac16db5f21 | ||
|
ab525fff6a | ||
|
f96cb2a70d | ||
|
4f80e889d4 | ||
|
1831a27600 |
25 changed files with 714 additions and 104 deletions
|
@ -167,6 +167,28 @@ func DisplayOneAlert(alert *models.Alert, withDetail bool) error {
|
||||||
table.Render() // Send output
|
table.Render() // Send output
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fmt.Printf("\n - Context :\n")
|
||||||
|
sort.Slice(alert.Meta, func(i, j int) bool {
|
||||||
|
return alert.Meta[i].Key < alert.Meta[j].Key
|
||||||
|
})
|
||||||
|
table = tablewriter.NewWriter(os.Stdout)
|
||||||
|
table.SetHeader([]string{"Key", "Value"})
|
||||||
|
for _, meta := range alert.Meta {
|
||||||
|
var valSlice []string
|
||||||
|
if err := json.Unmarshal([]byte(meta.Value), &valSlice); err != nil {
|
||||||
|
log.Fatalf("unknown context value type '%s' : %s", meta.Value, err)
|
||||||
|
}
|
||||||
|
for _, value := range valSlice {
|
||||||
|
table.Append([]string{
|
||||||
|
meta.Key,
|
||||||
|
value,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
table.SetAutoMergeCells(true)
|
||||||
|
table.SetRowLine(true)
|
||||||
|
table.Render()
|
||||||
|
|
||||||
if withDetail {
|
if withDetail {
|
||||||
fmt.Printf("\n - Events :\n")
|
fmt.Printf("\n - Events :\n")
|
||||||
for _, event := range alert.Events {
|
for _, event := range alert.Events {
|
||||||
|
|
|
@ -9,17 +9,22 @@ import (
|
||||||
"io/fs"
|
"io/fs"
|
||||||
"net/url"
|
"net/url"
|
||||||
"os"
|
"os"
|
||||||
|
"sort"
|
||||||
|
"strings"
|
||||||
|
|
||||||
"github.com/crowdsecurity/crowdsec/pkg/apiclient"
|
"github.com/crowdsecurity/crowdsec/pkg/apiclient"
|
||||||
"github.com/crowdsecurity/crowdsec/pkg/csconfig"
|
"github.com/crowdsecurity/crowdsec/pkg/csconfig"
|
||||||
"github.com/crowdsecurity/crowdsec/pkg/cwhub"
|
"github.com/crowdsecurity/crowdsec/pkg/cwhub"
|
||||||
"github.com/crowdsecurity/crowdsec/pkg/cwversion"
|
"github.com/crowdsecurity/crowdsec/pkg/cwversion"
|
||||||
|
"github.com/crowdsecurity/crowdsec/pkg/exprhelpers"
|
||||||
|
"github.com/crowdsecurity/crowdsec/pkg/parser"
|
||||||
"github.com/crowdsecurity/crowdsec/pkg/types"
|
"github.com/crowdsecurity/crowdsec/pkg/types"
|
||||||
"github.com/enescakir/emoji"
|
"github.com/enescakir/emoji"
|
||||||
"github.com/go-openapi/strfmt"
|
"github.com/go-openapi/strfmt"
|
||||||
"github.com/olekukonko/tablewriter"
|
"github.com/olekukonko/tablewriter"
|
||||||
log "github.com/sirupsen/logrus"
|
log "github.com/sirupsen/logrus"
|
||||||
"github.com/spf13/cobra"
|
"github.com/spf13/cobra"
|
||||||
|
"gopkg.in/yaml.v2"
|
||||||
)
|
)
|
||||||
|
|
||||||
func NewConsoleCmd() *cobra.Command {
|
func NewConsoleCmd() *cobra.Command {
|
||||||
|
@ -39,6 +44,10 @@ func NewConsoleCmd() *cobra.Command {
|
||||||
}
|
}
|
||||||
log.Fatal("Local API is disabled, please run this command on the local API machine")
|
log.Fatal("Local API is disabled, please run this command on the local API machine")
|
||||||
}
|
}
|
||||||
|
if err := csConfig.LoadCrowdsec(); err != nil {
|
||||||
|
log.Fatalf("Unable to load CrowdSec Agent: %s", err)
|
||||||
|
}
|
||||||
|
|
||||||
if csConfig.DisableAPI {
|
if csConfig.DisableAPI {
|
||||||
log.Fatal("Local API is disabled, please run this command on the local API machine")
|
log.Fatal("Local API is disabled, please run this command on the local API machine")
|
||||||
}
|
}
|
||||||
|
@ -213,6 +222,12 @@ Disable given information push to the central API.`,
|
||||||
activated = string(emoji.CheckMarkButton)
|
activated = string(emoji.CheckMarkButton)
|
||||||
}
|
}
|
||||||
table.Append([]string{option, activated, "Send alerts from tainted scenarios to the console"})
|
table.Append([]string{option, activated, "Send alerts from tainted scenarios to the console"})
|
||||||
|
case csconfig.SEND_CONTEXT:
|
||||||
|
activated := string(emoji.CrossMark)
|
||||||
|
if *csConfig.API.Server.ConsoleConfig.ShareContext {
|
||||||
|
activated = string(emoji.CheckMarkButton)
|
||||||
|
}
|
||||||
|
table.Append([]string{option, activated, "Send context with alerts to the console"})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
table.Render()
|
table.Render()
|
||||||
|
@ -233,6 +248,7 @@ Disable given information push to the central API.`,
|
||||||
{"share_manual_decisions", fmt.Sprintf("%t", *csConfig.API.Server.ConsoleConfig.ShareManualDecisions)},
|
{"share_manual_decisions", fmt.Sprintf("%t", *csConfig.API.Server.ConsoleConfig.ShareManualDecisions)},
|
||||||
{"share_custom", fmt.Sprintf("%t", *csConfig.API.Server.ConsoleConfig.ShareCustomScenarios)},
|
{"share_custom", fmt.Sprintf("%t", *csConfig.API.Server.ConsoleConfig.ShareCustomScenarios)},
|
||||||
{"share_tainted", fmt.Sprintf("%t", *csConfig.API.Server.ConsoleConfig.ShareTaintedScenarios)},
|
{"share_tainted", fmt.Sprintf("%t", *csConfig.API.Server.ConsoleConfig.ShareTaintedScenarios)},
|
||||||
|
{"share_context", fmt.Sprintf("%t", *csConfig.API.Server.ConsoleConfig.ShareContext)},
|
||||||
}
|
}
|
||||||
for _, row := range rows {
|
for _, row := range rows {
|
||||||
err = csvwriter.Write(row)
|
err = csvwriter.Write(row)
|
||||||
|
@ -246,6 +262,193 @@ Disable given information push to the central API.`,
|
||||||
}
|
}
|
||||||
|
|
||||||
cmdConsole.AddCommand(cmdConsoleStatus)
|
cmdConsole.AddCommand(cmdConsoleStatus)
|
||||||
|
|
||||||
|
cmdContext := &cobra.Command{
|
||||||
|
Use: "context [feature-flag]",
|
||||||
|
Short: "Manage context to send with alerts",
|
||||||
|
DisableAutoGenTag: true,
|
||||||
|
Run: func(cmd *cobra.Command, args []string) {
|
||||||
|
printHelp(cmd)
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
var keyToAdd string
|
||||||
|
var valuesToAdd []string
|
||||||
|
cmdContextAdd := &cobra.Command{
|
||||||
|
Use: "add",
|
||||||
|
Short: "Add context to send with alerts",
|
||||||
|
DisableAutoGenTag: true,
|
||||||
|
Run: func(cmd *cobra.Command, args []string) {
|
||||||
|
if _, ok := csConfig.Crowdsec.ContextToSend[keyToAdd]; !ok {
|
||||||
|
csConfig.Crowdsec.ContextToSend[keyToAdd] = make([]string, 0)
|
||||||
|
log.Infof("key '%s' added", keyToAdd)
|
||||||
|
}
|
||||||
|
data := csConfig.Crowdsec.ContextToSend[keyToAdd]
|
||||||
|
for _, val := range valuesToAdd {
|
||||||
|
if !inSlice(val, data) {
|
||||||
|
log.Infof("value '%s' added to key '%s'", val, keyToAdd)
|
||||||
|
data = append(data, val)
|
||||||
|
}
|
||||||
|
csConfig.Crowdsec.ContextToSend[keyToAdd] = data
|
||||||
|
}
|
||||||
|
if err := csConfig.Crowdsec.DumpContextConfigFile(); err != nil {
|
||||||
|
log.Fatalf(err.Error())
|
||||||
|
}
|
||||||
|
},
|
||||||
|
}
|
||||||
|
cmdContextAdd.Flags().StringVarP(&keyToAdd, "key", "k", "", "The key of the different values to send")
|
||||||
|
cmdContextAdd.Flags().StringSliceVar(&valuesToAdd, "value", []string{}, "The expr fields to associate with the key")
|
||||||
|
cmdContextAdd.MarkFlagRequired("key")
|
||||||
|
cmdContextAdd.MarkFlagRequired("value")
|
||||||
|
cmdContext.AddCommand(cmdContextAdd)
|
||||||
|
|
||||||
|
cmdContextStatus := &cobra.Command{
|
||||||
|
Use: "status",
|
||||||
|
Short: "List context to send with alerts",
|
||||||
|
DisableAutoGenTag: true,
|
||||||
|
Run: func(cmd *cobra.Command, args []string) {
|
||||||
|
dump, err := yaml.Marshal(csConfig.Crowdsec.ContextToSend)
|
||||||
|
if err != nil {
|
||||||
|
log.Fatalf("unable to show context status: %s", err)
|
||||||
|
}
|
||||||
|
fmt.Println(string(dump))
|
||||||
|
|
||||||
|
},
|
||||||
|
}
|
||||||
|
cmdContext.AddCommand(cmdContextStatus)
|
||||||
|
|
||||||
|
var detectAll bool
|
||||||
|
cmdContextDetect := &cobra.Command{
|
||||||
|
Use: "detect",
|
||||||
|
Short: "Detect available fields from the installed parsers",
|
||||||
|
DisableAutoGenTag: true,
|
||||||
|
Run: func(cmd *cobra.Command, args []string) {
|
||||||
|
var err error
|
||||||
|
|
||||||
|
if !detectAll && len(args) == 0 {
|
||||||
|
log.Infof("Please provide parsers to detect or --all flag.")
|
||||||
|
printHelp(cmd)
|
||||||
|
}
|
||||||
|
|
||||||
|
// to avoid all the log.Info from the loaders functions
|
||||||
|
log.SetLevel(log.ErrorLevel)
|
||||||
|
|
||||||
|
err = exprhelpers.Init()
|
||||||
|
if err != nil {
|
||||||
|
log.Fatalf("Failed to init expr helpers : %s", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Populate cwhub package tools
|
||||||
|
if err := cwhub.GetHubIdx(csConfig.Hub); err != nil {
|
||||||
|
log.Fatalf("Failed to load hub index : %s", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
csParsers := parser.NewParsers()
|
||||||
|
if csParsers, err = parser.LoadParsers(csConfig, csParsers); err != nil {
|
||||||
|
log.Fatalf("unable to load parsers: %s", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
fieldByParsers := make(map[string][]string)
|
||||||
|
for _, node := range csParsers.Nodes {
|
||||||
|
if !detectAll && !inSlice(node.Name, args) {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
if !detectAll {
|
||||||
|
args = removeFromSlice(node.Name, args)
|
||||||
|
}
|
||||||
|
fieldByParsers[node.Name] = make([]string, 0)
|
||||||
|
fieldByParsers[node.Name] = detectNode(node, *csParsers.Ctx)
|
||||||
|
|
||||||
|
subNodeFields := detectSubNode(node, *csParsers.Ctx)
|
||||||
|
for _, field := range subNodeFields {
|
||||||
|
if !inSlice(field, fieldByParsers[node.Name]) {
|
||||||
|
fieldByParsers[node.Name] = append(fieldByParsers[node.Name], field)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
fmt.Printf("Acquisition :\n\n")
|
||||||
|
fmt.Printf(" - evt.Line.Module\n")
|
||||||
|
fmt.Printf(" - evt.Line.Raw\n")
|
||||||
|
fmt.Printf(" - evt.Line.Src\n")
|
||||||
|
fmt.Println()
|
||||||
|
|
||||||
|
parsersKey := make([]string, 0)
|
||||||
|
for k := range fieldByParsers {
|
||||||
|
parsersKey = append(parsersKey, k)
|
||||||
|
}
|
||||||
|
sort.Strings(parsersKey)
|
||||||
|
|
||||||
|
for _, k := range parsersKey {
|
||||||
|
fmt.Printf("%s :\n\n", k)
|
||||||
|
values := fieldByParsers[k]
|
||||||
|
sort.Strings(values)
|
||||||
|
for _, value := range values {
|
||||||
|
fmt.Printf(" - %s\n", value)
|
||||||
|
}
|
||||||
|
fmt.Println()
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(args) > 0 {
|
||||||
|
for _, parserNotFound := range args {
|
||||||
|
log.Errorf("parser '%s' not found, can't detect fields", parserNotFound)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
}
|
||||||
|
cmdContextDetect.Flags().BoolVarP(&detectAll, "all", "a", false, "Detect evt field for all installed parser")
|
||||||
|
cmdContext.AddCommand(cmdContextDetect)
|
||||||
|
|
||||||
|
var keysToDelete []string
|
||||||
|
var valuesToDelete []string
|
||||||
|
cmdContextDelete := &cobra.Command{
|
||||||
|
Use: "delete",
|
||||||
|
Short: "Delete context to send with alerts",
|
||||||
|
DisableAutoGenTag: true,
|
||||||
|
Run: func(cmd *cobra.Command, args []string) {
|
||||||
|
if len(keysToDelete) == 0 && len(valuesToDelete) == 0 {
|
||||||
|
log.Fatalf("please provide at least a key or a value to delete")
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, key := range keysToDelete {
|
||||||
|
if _, ok := csConfig.Crowdsec.ContextToSend[key]; ok {
|
||||||
|
delete(csConfig.Crowdsec.ContextToSend, key)
|
||||||
|
log.Infof("key '%s' has been removed", key)
|
||||||
|
} else {
|
||||||
|
log.Warningf("key '%s' doesn't exist", key)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, value := range valuesToDelete {
|
||||||
|
valueFound := false
|
||||||
|
for key, context := range csConfig.Crowdsec.ContextToSend {
|
||||||
|
if inSlice(value, context) {
|
||||||
|
valueFound = true
|
||||||
|
csConfig.Crowdsec.ContextToSend[key] = removeFromSlice(value, context)
|
||||||
|
log.Infof("value '%s' has been removed from key '%s'", value, key)
|
||||||
|
}
|
||||||
|
if len(csConfig.Crowdsec.ContextToSend[key]) == 0 {
|
||||||
|
delete(csConfig.Crowdsec.ContextToSend, key)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if !valueFound {
|
||||||
|
log.Warningf("value '%s' not found", value)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := csConfig.Crowdsec.DumpContextConfigFile(); err != nil {
|
||||||
|
log.Fatalf(err.Error())
|
||||||
|
}
|
||||||
|
|
||||||
|
},
|
||||||
|
}
|
||||||
|
cmdContextDelete.Flags().StringSliceVarP(&keysToDelete, "key", "k", []string{}, "The keys to delete")
|
||||||
|
cmdContextDelete.Flags().StringSliceVar(&valuesToDelete, "value", []string{}, "The expr fields to delete")
|
||||||
|
cmdContext.AddCommand(cmdContextDelete)
|
||||||
|
|
||||||
|
cmdConsole.AddCommand(cmdContext)
|
||||||
|
|
||||||
return cmdConsole
|
return cmdConsole
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -291,9 +494,143 @@ func SetConsoleOpts(args []string, wanted bool) {
|
||||||
log.Infof("%s set to %t", csconfig.SEND_MANUAL_SCENARIOS, wanted)
|
log.Infof("%s set to %t", csconfig.SEND_MANUAL_SCENARIOS, wanted)
|
||||||
csConfig.API.Server.ConsoleConfig.ShareManualDecisions = types.BoolPtr(wanted)
|
csConfig.API.Server.ConsoleConfig.ShareManualDecisions = types.BoolPtr(wanted)
|
||||||
}
|
}
|
||||||
|
case csconfig.SEND_CONTEXT:
|
||||||
|
/*for each flag check if it's already set before setting it*/
|
||||||
|
if csConfig.API.Server.ConsoleConfig.ShareContext != nil {
|
||||||
|
if *csConfig.API.Server.ConsoleConfig.ShareContext == wanted {
|
||||||
|
log.Infof("%s already set to %t", csconfig.SEND_CONTEXT, wanted)
|
||||||
|
} else {
|
||||||
|
log.Infof("%s set to %t", csconfig.SEND_CONTEXT, wanted)
|
||||||
|
*csConfig.API.Server.ConsoleConfig.ShareContext = wanted
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
log.Infof("%s set to %t", csconfig.SEND_CONTEXT, wanted)
|
||||||
|
csConfig.API.Server.ConsoleConfig.ShareContext = types.BoolPtr(wanted)
|
||||||
|
}
|
||||||
default:
|
default:
|
||||||
log.Fatalf("unknown flag %s", arg)
|
log.Fatalf("unknown flag %s", arg)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func detectStaticField(GrokStatics []types.ExtraField) []string {
|
||||||
|
ret := make([]string, 0)
|
||||||
|
for _, static := range GrokStatics {
|
||||||
|
if static.Parsed != "" {
|
||||||
|
fieldName := fmt.Sprintf("evt.Parsed.%s", static.Parsed)
|
||||||
|
if !inSlice(fieldName, ret) {
|
||||||
|
ret = append(ret, fieldName)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if static.Meta != "" {
|
||||||
|
fieldName := fmt.Sprintf("evt.Meta.%s", static.Meta)
|
||||||
|
if !inSlice(fieldName, ret) {
|
||||||
|
ret = append(ret, fieldName)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if static.TargetByName != "" {
|
||||||
|
fieldName := static.TargetByName
|
||||||
|
if !strings.HasPrefix(fieldName, "evt.") {
|
||||||
|
fieldName = "evt." + fieldName
|
||||||
|
}
|
||||||
|
if !inSlice(static.TargetByName, ret) {
|
||||||
|
ret = append(ret, static.TargetByName)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return ret
|
||||||
|
}
|
||||||
|
|
||||||
|
func detectNode(node parser.Node, parserCTX parser.UnixParserCtx) []string {
|
||||||
|
var ret = make([]string, 0)
|
||||||
|
if node.Grok.RunTimeRegexp != nil {
|
||||||
|
for _, capturedField := range node.Grok.RunTimeRegexp.Names() {
|
||||||
|
fieldName := fmt.Sprintf("evt.Parsed.%s", capturedField)
|
||||||
|
if !inSlice(fieldName, ret) {
|
||||||
|
ret = append(ret, fieldName)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if node.Grok.RegexpName != "" {
|
||||||
|
grokCompiled, err := parserCTX.Grok.Get(node.Grok.RegexpName)
|
||||||
|
if err != nil {
|
||||||
|
log.Warningf("Can't get subgrok: %s", err)
|
||||||
|
}
|
||||||
|
for _, capturedField := range grokCompiled.Names() {
|
||||||
|
fieldName := fmt.Sprintf("evt.Parsed.%s", capturedField)
|
||||||
|
if !inSlice(fieldName, ret) {
|
||||||
|
ret = append(ret, fieldName)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(node.Grok.Statics) > 0 {
|
||||||
|
staticsField := detectStaticField(node.Grok.Statics)
|
||||||
|
for _, staticField := range staticsField {
|
||||||
|
if !inSlice(staticField, ret) {
|
||||||
|
ret = append(ret, staticField)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(node.Statics) > 0 {
|
||||||
|
staticsField := detectStaticField(node.Statics)
|
||||||
|
for _, staticField := range staticsField {
|
||||||
|
if !inSlice(staticField, ret) {
|
||||||
|
ret = append(ret, staticField)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return ret
|
||||||
|
}
|
||||||
|
|
||||||
|
func detectSubNode(node parser.Node, parserCTX parser.UnixParserCtx) []string {
|
||||||
|
var ret = make([]string, 0)
|
||||||
|
|
||||||
|
for _, subnode := range node.LeavesNodes {
|
||||||
|
if subnode.Grok.RunTimeRegexp != nil {
|
||||||
|
for _, capturedField := range subnode.Grok.RunTimeRegexp.Names() {
|
||||||
|
fieldName := fmt.Sprintf("evt.Parsed.%s", capturedField)
|
||||||
|
if !inSlice(fieldName, ret) {
|
||||||
|
ret = append(ret, fieldName)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if subnode.Grok.RegexpName != "" {
|
||||||
|
grokCompiled, err := parserCTX.Grok.Get(subnode.Grok.RegexpName)
|
||||||
|
if err != nil {
|
||||||
|
log.Warningf("Can't get subgrok: %s", err)
|
||||||
|
}
|
||||||
|
for _, capturedField := range grokCompiled.Names() {
|
||||||
|
fieldName := fmt.Sprintf("evt.Parsed.%s", capturedField)
|
||||||
|
if !inSlice(fieldName, ret) {
|
||||||
|
ret = append(ret, fieldName)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(subnode.Grok.Statics) > 0 {
|
||||||
|
staticsField := detectStaticField(subnode.Grok.Statics)
|
||||||
|
for _, staticField := range staticsField {
|
||||||
|
if !inSlice(staticField, ret) {
|
||||||
|
ret = append(ret, staticField)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(subnode.Statics) > 0 {
|
||||||
|
staticsField := detectStaticField(subnode.Statics)
|
||||||
|
for _, staticField := range staticsField {
|
||||||
|
if !inSlice(staticField, ret) {
|
||||||
|
ret = append(ret, staticField)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return ret
|
||||||
|
}
|
||||||
|
|
|
@ -42,6 +42,24 @@ func inSlice(s string, slice []string) bool {
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func removeFromSlice(val string, slice []string) []string {
|
||||||
|
var i int
|
||||||
|
var value string
|
||||||
|
|
||||||
|
// get the index
|
||||||
|
for i, value = range slice {
|
||||||
|
if value == val {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
slice[i] = slice[len(slice)-1]
|
||||||
|
slice[len(slice)-1] = ""
|
||||||
|
slice = slice[:len(slice)-1]
|
||||||
|
|
||||||
|
return slice
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
func indexOf(s string, slice []string) int {
|
func indexOf(s string, slice []string) int {
|
||||||
for i, elem := range slice {
|
for i, elem := range slice {
|
||||||
if s == elem {
|
if s == elem {
|
||||||
|
|
|
@ -31,7 +31,7 @@ func initCrowdsec(cConfig *csconfig.Config) (*parser.Parsers, error) {
|
||||||
}
|
}
|
||||||
|
|
||||||
// Start loading configs
|
// Start loading configs
|
||||||
csParsers := newParsers()
|
csParsers := parser.NewParsers()
|
||||||
if csParsers, err = parser.LoadParsers(cConfig, csParsers); err != nil {
|
if csParsers, err = parser.LoadParsers(cConfig, csParsers); err != nil {
|
||||||
return &parser.Parsers{}, fmt.Errorf("Failed to load parsers: %s", err)
|
return &parser.Parsers{}, fmt.Errorf("Failed to load parsers: %s", err)
|
||||||
}
|
}
|
||||||
|
|
|
@ -4,7 +4,6 @@ import (
|
||||||
"flag"
|
"flag"
|
||||||
"fmt"
|
"fmt"
|
||||||
"os"
|
"os"
|
||||||
"sort"
|
|
||||||
"strings"
|
"strings"
|
||||||
|
|
||||||
_ "net/http/pprof"
|
_ "net/http/pprof"
|
||||||
|
@ -69,45 +68,6 @@ type Flags struct {
|
||||||
|
|
||||||
type labelsMap map[string]string
|
type labelsMap map[string]string
|
||||||
|
|
||||||
// Return new parsers
|
|
||||||
// nodes and povfwnodes are already initialized in parser.LoadStages
|
|
||||||
func newParsers() *parser.Parsers {
|
|
||||||
parsers := &parser.Parsers{
|
|
||||||
Ctx: &parser.UnixParserCtx{},
|
|
||||||
Povfwctx: &parser.UnixParserCtx{},
|
|
||||||
StageFiles: make([]parser.Stagefile, 0),
|
|
||||||
PovfwStageFiles: make([]parser.Stagefile, 0),
|
|
||||||
}
|
|
||||||
for _, itemType := range []string{cwhub.PARSERS, cwhub.PARSERS_OVFLW} {
|
|
||||||
for _, hubParserItem := range cwhub.GetItemMap(itemType) {
|
|
||||||
if hubParserItem.Installed {
|
|
||||||
stagefile := parser.Stagefile{
|
|
||||||
Filename: hubParserItem.LocalPath,
|
|
||||||
Stage: hubParserItem.Stage,
|
|
||||||
}
|
|
||||||
if itemType == cwhub.PARSERS {
|
|
||||||
parsers.StageFiles = append(parsers.StageFiles, stagefile)
|
|
||||||
}
|
|
||||||
if itemType == cwhub.PARSERS_OVFLW {
|
|
||||||
parsers.PovfwStageFiles = append(parsers.PovfwStageFiles, stagefile)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if parsers.StageFiles != nil {
|
|
||||||
sort.Slice(parsers.StageFiles, func(i, j int) bool {
|
|
||||||
return parsers.StageFiles[i].Filename < parsers.StageFiles[j].Filename
|
|
||||||
})
|
|
||||||
}
|
|
||||||
if parsers.PovfwStageFiles != nil {
|
|
||||||
sort.Slice(parsers.PovfwStageFiles, func(i, j int) bool {
|
|
||||||
return parsers.PovfwStageFiles[i].Filename < parsers.PovfwStageFiles[j].Filename
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
return parsers
|
|
||||||
}
|
|
||||||
|
|
||||||
func LoadBuckets(cConfig *csconfig.Config) error {
|
func LoadBuckets(cConfig *csconfig.Config) error {
|
||||||
|
|
||||||
var (
|
var (
|
||||||
|
|
|
@ -14,6 +14,7 @@ config_paths:
|
||||||
notification_dir: /etc/crowdsec/notifications/
|
notification_dir: /etc/crowdsec/notifications/
|
||||||
plugin_dir: /usr/local/lib/crowdsec/plugins/
|
plugin_dir: /usr/local/lib/crowdsec/plugins/
|
||||||
crowdsec_service:
|
crowdsec_service:
|
||||||
|
console_context_path: /etc/crowdsec/console/context.yaml
|
||||||
acquisition_path: /etc/crowdsec/acquis.yaml
|
acquisition_path: /etc/crowdsec/acquis.yaml
|
||||||
parser_routines: 1
|
parser_routines: 1
|
||||||
cscli:
|
cscli:
|
||||||
|
|
|
@ -1,3 +1,4 @@
|
||||||
share_manual_decisions: false
|
share_manual_decisions: false
|
||||||
share_custom: true
|
share_custom: true
|
||||||
share_tainted: true
|
share_tainted: true
|
||||||
|
share_labels: false
|
0
config/context.yaml
Normal file
0
config/context.yaml
Normal file
2
debian/rules
vendored
2
debian/rules
vendored
|
@ -27,6 +27,7 @@ override_dh_auto_install:
|
||||||
mkdir -p debian/crowdsec/etc/crowdsec
|
mkdir -p debian/crowdsec/etc/crowdsec
|
||||||
mkdir -p debian/crowdsec/usr/share/crowdsec
|
mkdir -p debian/crowdsec/usr/share/crowdsec
|
||||||
mkdir -p debian/crowdsec/etc/crowdsec/hub/
|
mkdir -p debian/crowdsec/etc/crowdsec/hub/
|
||||||
|
mkdir -p debian/crowdsec/etc/crowdsec/console/
|
||||||
mkdir -p debian/crowdsec/usr/share/crowdsec/config
|
mkdir -p debian/crowdsec/usr/share/crowdsec/config
|
||||||
|
|
||||||
|
|
||||||
|
@ -45,5 +46,6 @@ override_dh_auto_install:
|
||||||
cp config/simulation.yaml debian/crowdsec/etc/crowdsec/simulation.yaml
|
cp config/simulation.yaml debian/crowdsec/etc/crowdsec/simulation.yaml
|
||||||
cp config/profiles.yaml debian/crowdsec/etc/crowdsec/profiles.yaml
|
cp config/profiles.yaml debian/crowdsec/etc/crowdsec/profiles.yaml
|
||||||
cp config/console.yaml debian/crowdsec/etc/crowdsec/console.yaml
|
cp config/console.yaml debian/crowdsec/etc/crowdsec/console.yaml
|
||||||
|
cp config/context.yaml debian/crowdsec/etc/crowdsec/console/context.yaml
|
||||||
cp -a config/patterns debian/crowdsec/etc/crowdsec
|
cp -a config/patterns debian/crowdsec/etc/crowdsec
|
||||||
|
|
||||||
|
|
|
@ -159,6 +159,9 @@ func (a *apic) Push() error {
|
||||||
if ok := shouldShareAlert(alert, a.consoleConfig); ok {
|
if ok := shouldShareAlert(alert, a.consoleConfig); ok {
|
||||||
signals = append(signals, alertToSignal(alert, getScenarioTrustOfAlert(alert)))
|
signals = append(signals, alertToSignal(alert, getScenarioTrustOfAlert(alert)))
|
||||||
}
|
}
|
||||||
|
if !*a.consoleConfig.ShareContext {
|
||||||
|
alert.Meta = models.Meta{}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
a.mu.Lock()
|
a.mu.Lock()
|
||||||
cache = append(cache, signals...)
|
cache = append(cache, signals...)
|
||||||
|
|
|
@ -60,6 +60,7 @@ func getAPIC(t *testing.T) *apic {
|
||||||
ShareManualDecisions: types.BoolPtr(false),
|
ShareManualDecisions: types.BoolPtr(false),
|
||||||
ShareTaintedScenarios: types.BoolPtr(false),
|
ShareTaintedScenarios: types.BoolPtr(false),
|
||||||
ShareCustomScenarios: types.BoolPtr(false),
|
ShareCustomScenarios: types.BoolPtr(false),
|
||||||
|
ShareContext: types.BoolPtr(false),
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -213,6 +213,7 @@ func TestLoadAPIServer(t *testing.T) {
|
||||||
ShareManualDecisions: types.BoolPtr(false),
|
ShareManualDecisions: types.BoolPtr(false),
|
||||||
ShareTaintedScenarios: types.BoolPtr(true),
|
ShareTaintedScenarios: types.BoolPtr(true),
|
||||||
ShareCustomScenarios: types.BoolPtr(true),
|
ShareCustomScenarios: types.BoolPtr(true),
|
||||||
|
ShareContext: types.BoolPtr(false),
|
||||||
},
|
},
|
||||||
LogDir: LogDirFullPath,
|
LogDir: LogDirFullPath,
|
||||||
LogMedia: "stdout",
|
LogMedia: "stdout",
|
||||||
|
|
|
@ -15,9 +15,10 @@ const (
|
||||||
SEND_CUSTOM_SCENARIOS = "custom"
|
SEND_CUSTOM_SCENARIOS = "custom"
|
||||||
SEND_TAINTED_SCENARIOS = "tainted"
|
SEND_TAINTED_SCENARIOS = "tainted"
|
||||||
SEND_MANUAL_SCENARIOS = "manual"
|
SEND_MANUAL_SCENARIOS = "manual"
|
||||||
|
SEND_CONTEXT = "context"
|
||||||
)
|
)
|
||||||
|
|
||||||
var CONSOLE_CONFIGS = []string{SEND_CUSTOM_SCENARIOS, SEND_MANUAL_SCENARIOS, SEND_TAINTED_SCENARIOS}
|
var CONSOLE_CONFIGS = []string{SEND_CUSTOM_SCENARIOS, SEND_MANUAL_SCENARIOS, SEND_TAINTED_SCENARIOS, SEND_CONTEXT}
|
||||||
|
|
||||||
var DefaultConsoleConfigFilePath = DefaultConfigPath("console.yaml")
|
var DefaultConsoleConfigFilePath = DefaultConfigPath("console.yaml")
|
||||||
|
|
||||||
|
@ -25,6 +26,7 @@ type ConsoleConfig struct {
|
||||||
ShareManualDecisions *bool `yaml:"share_manual_decisions"`
|
ShareManualDecisions *bool `yaml:"share_manual_decisions"`
|
||||||
ShareTaintedScenarios *bool `yaml:"share_tainted"`
|
ShareTaintedScenarios *bool `yaml:"share_tainted"`
|
||||||
ShareCustomScenarios *bool `yaml:"share_custom"`
|
ShareCustomScenarios *bool `yaml:"share_custom"`
|
||||||
|
ShareContext *bool `yaml:"share_context"`
|
||||||
}
|
}
|
||||||
|
|
||||||
func (c *LocalApiServerCfg) LoadConsoleConfig() error {
|
func (c *LocalApiServerCfg) LoadConsoleConfig() error {
|
||||||
|
@ -34,6 +36,7 @@ func (c *LocalApiServerCfg) LoadConsoleConfig() error {
|
||||||
c.ConsoleConfig.ShareCustomScenarios = types.BoolPtr(true)
|
c.ConsoleConfig.ShareCustomScenarios = types.BoolPtr(true)
|
||||||
c.ConsoleConfig.ShareTaintedScenarios = types.BoolPtr(true)
|
c.ConsoleConfig.ShareTaintedScenarios = types.BoolPtr(true)
|
||||||
c.ConsoleConfig.ShareManualDecisions = types.BoolPtr(false)
|
c.ConsoleConfig.ShareManualDecisions = types.BoolPtr(false)
|
||||||
|
c.ConsoleConfig.ShareContext = types.BoolPtr(false)
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -58,6 +61,12 @@ func (c *LocalApiServerCfg) LoadConsoleConfig() error {
|
||||||
log.Debugf("no share_manual scenarios found, setting to false")
|
log.Debugf("no share_manual scenarios found, setting to false")
|
||||||
c.ConsoleConfig.ShareManualDecisions = types.BoolPtr(false)
|
c.ConsoleConfig.ShareManualDecisions = types.BoolPtr(false)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if c.ConsoleConfig.ShareContext == nil {
|
||||||
|
log.Debugf("no 'context' found, setting to false")
|
||||||
|
c.ConsoleConfig.ShareContext = types.BoolPtr(false)
|
||||||
|
}
|
||||||
|
|
||||||
log.Debugf("Console configuration '%s' loaded successfully", c.ConsoleConfigPath)
|
log.Debugf("Console configuration '%s' loaded successfully", c.ConsoleConfigPath)
|
||||||
|
|
||||||
return nil
|
return nil
|
||||||
|
|
|
@ -2,18 +2,20 @@ package csconfig
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"fmt"
|
"fmt"
|
||||||
|
"io/ioutil"
|
||||||
"os"
|
"os"
|
||||||
"path/filepath"
|
"path/filepath"
|
||||||
|
|
||||||
"github.com/pkg/errors"
|
"github.com/pkg/errors"
|
||||||
log "github.com/sirupsen/logrus"
|
log "github.com/sirupsen/logrus"
|
||||||
|
"gopkg.in/yaml.v2"
|
||||||
)
|
)
|
||||||
|
|
||||||
/*Configurations needed for crowdsec to load parser/scenarios/... + acquisition*/
|
/*Configurations needed for crowdsec to load parser/scenarios/... + acquisition*/
|
||||||
type CrowdsecServiceCfg struct {
|
type CrowdsecServiceCfg struct {
|
||||||
AcquisitionFilePath string `yaml:"acquisition_path,omitempty"`
|
AcquisitionFilePath string `yaml:"acquisition_path,omitempty"`
|
||||||
AcquisitionDirPath string `yaml:"acquisition_dir,omitempty"`
|
AcquisitionDirPath string `yaml:"acquisition_dir,omitempty"`
|
||||||
|
ConsoleContextPath string `yaml:"console_context_path"`
|
||||||
AcquisitionFiles []string `yaml:"-"`
|
AcquisitionFiles []string `yaml:"-"`
|
||||||
ParserRoutinesCount int `yaml:"parser_routines"`
|
ParserRoutinesCount int `yaml:"parser_routines"`
|
||||||
BucketsRoutinesCount int `yaml:"buckets_routines"`
|
BucketsRoutinesCount int `yaml:"buckets_routines"`
|
||||||
|
@ -29,8 +31,11 @@ type CrowdsecServiceCfg struct {
|
||||||
ConfigDir string `yaml:"-"`
|
ConfigDir string `yaml:"-"`
|
||||||
HubIndexFile string `yaml:"-"`
|
HubIndexFile string `yaml:"-"`
|
||||||
SimulationFilePath string `yaml:"-"`
|
SimulationFilePath string `yaml:"-"`
|
||||||
|
ContextToSend map[string][]string `yaml:"-"`
|
||||||
}
|
}
|
||||||
|
|
||||||
|
var DefaultContextConfigFilePath = DefaultConfigPath("console", "context.yaml")
|
||||||
|
|
||||||
func (c *Config) LoadCrowdsec() error {
|
func (c *Config) LoadCrowdsec() error {
|
||||||
var err error
|
var err error
|
||||||
// Configuration paths are dependency to load crowdsec configuration
|
// Configuration paths are dependency to load crowdsec configuration
|
||||||
|
@ -89,9 +94,22 @@ func (c *Config) LoadCrowdsec() error {
|
||||||
if c.Crowdsec.OutputRoutinesCount <= 0 {
|
if c.Crowdsec.OutputRoutinesCount <= 0 {
|
||||||
c.Crowdsec.OutputRoutinesCount = 1
|
c.Crowdsec.OutputRoutinesCount = 1
|
||||||
}
|
}
|
||||||
|
if c.Crowdsec.ConsoleContextPath == "" {
|
||||||
|
c.Crowdsec.ConsoleContextPath = DefaultContextConfigFilePath
|
||||||
|
}
|
||||||
|
yamlFile, err := ioutil.ReadFile(c.Crowdsec.ConsoleContextPath)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("reading console label file '%s': %s", c.Crowdsec.ConsoleContextPath, err)
|
||||||
|
}
|
||||||
|
c.Crowdsec.ContextToSend = make(map[string][]string, 0)
|
||||||
|
err = yaml.Unmarshal(yamlFile, c.Crowdsec.ContextToSend)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("unmarshaling labels console config file '%s': %s", DefaultContextConfigFilePath, err)
|
||||||
|
}
|
||||||
|
|
||||||
var crowdsecCleanup = []*string{
|
var crowdsecCleanup = []*string{
|
||||||
&c.Crowdsec.AcquisitionFilePath,
|
&c.Crowdsec.AcquisitionFilePath,
|
||||||
|
&c.Crowdsec.ConsoleContextPath,
|
||||||
}
|
}
|
||||||
for _, k := range crowdsecCleanup {
|
for _, k := range crowdsecCleanup {
|
||||||
if *k == "" {
|
if *k == "" {
|
||||||
|
@ -118,3 +136,20 @@ func (c *Config) LoadCrowdsec() error {
|
||||||
}
|
}
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (c *CrowdsecServiceCfg) DumpContextConfigFile() error {
|
||||||
|
var out []byte
|
||||||
|
var err error
|
||||||
|
|
||||||
|
if out, err = yaml.Marshal(c.ContextToSend); err != nil {
|
||||||
|
return errors.Wrapf(err, "while marshaling ConsoleConfig (for %s)", DefaultContextConfigFilePath)
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := os.WriteFile(c.ConsoleContextPath, out, 0600); err != nil {
|
||||||
|
return errors.Wrapf(err, "while dumping console config to %s", DefaultContextConfigFilePath)
|
||||||
|
}
|
||||||
|
|
||||||
|
log.Infof("%s file saved", c.ConsoleContextPath)
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
|
@ -46,6 +46,11 @@ func TestLoadCrowdsec(t *testing.T) {
|
||||||
t.Fatalf(err.Error())
|
t.Fatalf(err.Error())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
contextFileFullPath, err := filepath.Abs("./tests/context.yaml")
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf(err.Error())
|
||||||
|
}
|
||||||
|
|
||||||
tests := []struct {
|
tests := []struct {
|
||||||
name string
|
name string
|
||||||
Input *Config
|
Input *Config
|
||||||
|
@ -68,10 +73,12 @@ func TestLoadCrowdsec(t *testing.T) {
|
||||||
Crowdsec: &CrowdsecServiceCfg{
|
Crowdsec: &CrowdsecServiceCfg{
|
||||||
AcquisitionFilePath: "./tests/acquis.yaml",
|
AcquisitionFilePath: "./tests/acquis.yaml",
|
||||||
SimulationFilePath: "./tests/simulation.yaml",
|
SimulationFilePath: "./tests/simulation.yaml",
|
||||||
|
ConsoleContextPath: "./tests/context.yaml",
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
expectedResult: &CrowdsecServiceCfg{
|
expectedResult: &CrowdsecServiceCfg{
|
||||||
AcquisitionDirPath: "",
|
AcquisitionDirPath: "",
|
||||||
|
ConsoleContextPath: contextFileFullPath,
|
||||||
AcquisitionFilePath: acquisFullPath,
|
AcquisitionFilePath: acquisFullPath,
|
||||||
ConfigDir: configDirFullPath,
|
ConfigDir: configDirFullPath,
|
||||||
DataDir: dataFullPath,
|
DataDir: dataFullPath,
|
||||||
|
@ -81,6 +88,9 @@ func TestLoadCrowdsec(t *testing.T) {
|
||||||
ParserRoutinesCount: 1,
|
ParserRoutinesCount: 1,
|
||||||
OutputRoutinesCount: 1,
|
OutputRoutinesCount: 1,
|
||||||
AcquisitionFiles: []string{acquisFullPath},
|
AcquisitionFiles: []string{acquisFullPath},
|
||||||
|
ContextToSend: map[string][]string{
|
||||||
|
"source_ip": {"evt.Parsed.source_ip"},
|
||||||
|
},
|
||||||
SimulationFilePath: "./tests/simulation.yaml",
|
SimulationFilePath: "./tests/simulation.yaml",
|
||||||
SimulationConfig: &SimulationConfig{
|
SimulationConfig: &SimulationConfig{
|
||||||
Simulation: &falseBoolPtr,
|
Simulation: &falseBoolPtr,
|
||||||
|
@ -104,18 +114,23 @@ func TestLoadCrowdsec(t *testing.T) {
|
||||||
AcquisitionFilePath: "./tests/acquis.yaml",
|
AcquisitionFilePath: "./tests/acquis.yaml",
|
||||||
AcquisitionDirPath: "./tests/acquis/",
|
AcquisitionDirPath: "./tests/acquis/",
|
||||||
SimulationFilePath: "./tests/simulation.yaml",
|
SimulationFilePath: "./tests/simulation.yaml",
|
||||||
|
ConsoleContextPath: "./tests/context.yaml",
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
expectedResult: &CrowdsecServiceCfg{
|
expectedResult: &CrowdsecServiceCfg{
|
||||||
AcquisitionDirPath: acquisDirFullPath,
|
AcquisitionDirPath: acquisDirFullPath,
|
||||||
AcquisitionFilePath: acquisFullPath,
|
AcquisitionFilePath: acquisFullPath,
|
||||||
ConfigDir: configDirFullPath,
|
ConfigDir: configDirFullPath,
|
||||||
|
ConsoleContextPath: contextFileFullPath,
|
||||||
HubIndexFile: hubIndexFileFullPath,
|
HubIndexFile: hubIndexFileFullPath,
|
||||||
DataDir: dataFullPath,
|
DataDir: dataFullPath,
|
||||||
HubDir: hubFullPath,
|
HubDir: hubFullPath,
|
||||||
BucketsRoutinesCount: 1,
|
BucketsRoutinesCount: 1,
|
||||||
ParserRoutinesCount: 1,
|
ParserRoutinesCount: 1,
|
||||||
OutputRoutinesCount: 1,
|
OutputRoutinesCount: 1,
|
||||||
|
ContextToSend: map[string][]string{
|
||||||
|
"source_ip": {"evt.Parsed.source_ip"},
|
||||||
|
},
|
||||||
AcquisitionFiles: []string{acquisFullPath, acquisInDirFullPath},
|
AcquisitionFiles: []string{acquisFullPath, acquisInDirFullPath},
|
||||||
SimulationFilePath: "./tests/simulation.yaml",
|
SimulationFilePath: "./tests/simulation.yaml",
|
||||||
SimulationConfig: &SimulationConfig{
|
SimulationConfig: &SimulationConfig{
|
||||||
|
@ -146,6 +161,7 @@ func TestLoadCrowdsec(t *testing.T) {
|
||||||
HubIndexFile: hubIndexFileFullPath,
|
HubIndexFile: hubIndexFileFullPath,
|
||||||
DataDir: dataFullPath,
|
DataDir: dataFullPath,
|
||||||
HubDir: hubFullPath,
|
HubDir: hubFullPath,
|
||||||
|
ConsoleContextPath: DefaultContextConfigFilePath,
|
||||||
SimulationConfig: &SimulationConfig{
|
SimulationConfig: &SimulationConfig{
|
||||||
Simulation: &falseBoolPtr,
|
Simulation: &falseBoolPtr,
|
||||||
},
|
},
|
||||||
|
@ -169,6 +185,7 @@ func TestLoadCrowdsec(t *testing.T) {
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
expectedResult: &CrowdsecServiceCfg{
|
expectedResult: &CrowdsecServiceCfg{
|
||||||
|
ConsoleContextPath: "",
|
||||||
AcquisitionFilePath: "./tests/acquis_not_exist.yaml",
|
AcquisitionFilePath: "./tests/acquis_not_exist.yaml",
|
||||||
BucketsRoutinesCount: 0,
|
BucketsRoutinesCount: 0,
|
||||||
ParserRoutinesCount: 0,
|
ParserRoutinesCount: 0,
|
||||||
|
|
2
pkg/csconfig/tests/context.yaml
Normal file
2
pkg/csconfig/tests/context.yaml
Normal file
|
@ -0,0 +1,2 @@
|
||||||
|
source_ip:
|
||||||
|
- evt.Parsed.source_ip
|
|
@ -392,7 +392,6 @@ func (c *Client) CreateAlertBulk(machineId string, alertList []*models.Alert) ([
|
||||||
return []string{}, errors.Wrapf(BulkError, "creating alert events: %s", err)
|
return []string{}, errors.Wrapf(BulkError, "creating alert events: %s", err)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if len(alertItem.Meta) > 0 {
|
if len(alertItem.Meta) > 0 {
|
||||||
metaBulk := make([]*ent.MetaCreate, len(alertItem.Meta))
|
metaBulk := make([]*ent.MetaCreate, len(alertItem.Meta))
|
||||||
for i, metaItem := range alertItem.Meta {
|
for i, metaItem := range alertItem.Meta {
|
||||||
|
|
|
@ -7,6 +7,7 @@ import (
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
//"log"
|
//"log"
|
||||||
|
"github.com/antonmedv/expr/vm"
|
||||||
"github.com/crowdsecurity/crowdsec/pkg/time/rate"
|
"github.com/crowdsecurity/crowdsec/pkg/time/rate"
|
||||||
"github.com/crowdsecurity/crowdsec/pkg/types"
|
"github.com/crowdsecurity/crowdsec/pkg/types"
|
||||||
"github.com/goombaio/namegenerator"
|
"github.com/goombaio/namegenerator"
|
||||||
|
@ -71,6 +72,7 @@ type Leaky struct {
|
||||||
wgPour *sync.WaitGroup
|
wgPour *sync.WaitGroup
|
||||||
wgDumpState *sync.WaitGroup
|
wgDumpState *sync.WaitGroup
|
||||||
mutex *sync.Mutex //used only for TIMEMACHINE mode to allow garbage collection without races
|
mutex *sync.Mutex //used only for TIMEMACHINE mode to allow garbage collection without races
|
||||||
|
LabelsToSend map[string][]*vm.Program
|
||||||
}
|
}
|
||||||
|
|
||||||
var BucketsPour = prometheus.NewCounterVec(
|
var BucketsPour = prometheus.NewCounterVec(
|
||||||
|
@ -179,6 +181,7 @@ func FromFactory(bucketFactory BucketFactory) *Leaky {
|
||||||
wgPour: bucketFactory.wgPour,
|
wgPour: bucketFactory.wgPour,
|
||||||
wgDumpState: bucketFactory.wgDumpState,
|
wgDumpState: bucketFactory.wgDumpState,
|
||||||
mutex: &sync.Mutex{},
|
mutex: &sync.Mutex{},
|
||||||
|
LabelsToSend: bucketFactory.LabelsToSendCompiled,
|
||||||
}
|
}
|
||||||
if l.BucketConfig.Capacity > 0 && l.BucketConfig.leakspeed != time.Duration(0) {
|
if l.BucketConfig.Capacity > 0 && l.BucketConfig.leakspeed != time.Duration(0) {
|
||||||
l.Duration = time.Duration(l.BucketConfig.Capacity+1) * l.BucketConfig.leakspeed
|
l.Duration = time.Duration(l.BucketConfig.Capacity+1) * l.BucketConfig.leakspeed
|
||||||
|
|
|
@ -72,6 +72,8 @@ type BucketFactory struct {
|
||||||
tomb *tomb.Tomb `yaml:"-"`
|
tomb *tomb.Tomb `yaml:"-"`
|
||||||
wgPour *sync.WaitGroup `yaml:"-"`
|
wgPour *sync.WaitGroup `yaml:"-"`
|
||||||
wgDumpState *sync.WaitGroup `yaml:"-"`
|
wgDumpState *sync.WaitGroup `yaml:"-"`
|
||||||
|
LabelsToSend map[string][]string `yaml:"-"`
|
||||||
|
LabelsToSendCompiled map[string][]*vm.Program `yaml:"-"`
|
||||||
}
|
}
|
||||||
|
|
||||||
func ValidateFactory(bucketFactory *BucketFactory) error {
|
func ValidateFactory(bucketFactory *BucketFactory) error {
|
||||||
|
@ -216,6 +218,7 @@ func LoadBuckets(cscfg *csconfig.CrowdsecServiceCfg, files []string, tomb *tomb.
|
||||||
|
|
||||||
bucketFactory.wgDumpState = buckets.wgDumpState
|
bucketFactory.wgDumpState = buckets.wgDumpState
|
||||||
bucketFactory.wgPour = buckets.wgPour
|
bucketFactory.wgPour = buckets.wgPour
|
||||||
|
bucketFactory.LabelsToSend = cscfg.ContextToSend
|
||||||
err = LoadBucket(&bucketFactory, tomb)
|
err = LoadBucket(&bucketFactory, tomb)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Errorf("Failed to load bucket %s : %v", bucketFactory.Name, err)
|
log.Errorf("Failed to load bucket %s : %v", bucketFactory.Name, err)
|
||||||
|
@ -348,6 +351,18 @@ func LoadBucket(bucketFactory *BucketFactory, tomb *tomb.Tomb) error {
|
||||||
return fmt.Errorf("invalid bucket from %s : %v", bucketFactory.Filename, err)
|
return fmt.Errorf("invalid bucket from %s : %v", bucketFactory.Filename, err)
|
||||||
}
|
}
|
||||||
bucketFactory.tomb = tomb
|
bucketFactory.tomb = tomb
|
||||||
|
bucketFactory.LabelsToSendCompiled = make(map[string][]*vm.Program)
|
||||||
|
for key, values := range bucketFactory.LabelsToSend {
|
||||||
|
bucketFactory.LabelsToSendCompiled[key] = make([]*vm.Program, 0)
|
||||||
|
for _, value := range values {
|
||||||
|
valueCompiled, err := expr.Compile(value, expr.Env(exprhelpers.GetExprEnv(map[string]interface{}{"evt": &types.Event{}})))
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("compilation of '%s' failed: %v", value, err)
|
||||||
|
}
|
||||||
|
bucketFactory.LabelsToSendCompiled[key] = append(bucketFactory.LabelsToSendCompiled[key], valueCompiled)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
return nil
|
return nil
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,6 +1,7 @@
|
||||||
package leakybucket
|
package leakybucket
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"encoding/json"
|
||||||
"fmt"
|
"fmt"
|
||||||
"net"
|
"net"
|
||||||
"sort"
|
"sort"
|
||||||
|
@ -14,9 +15,14 @@ import (
|
||||||
log "github.com/sirupsen/logrus"
|
log "github.com/sirupsen/logrus"
|
||||||
|
|
||||||
"github.com/antonmedv/expr"
|
"github.com/antonmedv/expr"
|
||||||
|
"github.com/antonmedv/expr/vm"
|
||||||
"github.com/crowdsecurity/crowdsec/pkg/exprhelpers"
|
"github.com/crowdsecurity/crowdsec/pkg/exprhelpers"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
const (
|
||||||
|
maxContextValueLen = 4000
|
||||||
|
)
|
||||||
|
|
||||||
//SourceFromEvent extracts and formats a valid models.Source object from an Event
|
//SourceFromEvent extracts and formats a valid models.Source object from an Event
|
||||||
func SourceFromEvent(evt types.Event, leaky *Leaky) (map[string]models.Source, error) {
|
func SourceFromEvent(evt types.Event, leaky *Leaky) (map[string]models.Source, error) {
|
||||||
srcs := make(map[string]models.Source)
|
srcs := make(map[string]models.Source)
|
||||||
|
@ -232,6 +238,77 @@ func alertFormatSource(leaky *Leaky, queue *Queue) (map[string]models.Source, st
|
||||||
return sources, source_type, nil
|
return sources, source_type, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func truncate(values []string) (string, error) {
|
||||||
|
var ret string
|
||||||
|
valueByte, err := json.Marshal(values)
|
||||||
|
if err != nil {
|
||||||
|
return "", fmt.Errorf("unable to dump metas: %s", err)
|
||||||
|
}
|
||||||
|
ret = string(valueByte)
|
||||||
|
for {
|
||||||
|
if len(ret) <= maxContextValueLen {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
values = values[:len(values)-1]
|
||||||
|
valueByte, err = json.Marshal(values)
|
||||||
|
if err != nil {
|
||||||
|
return "", fmt.Errorf("unable to dump metas: %s", err)
|
||||||
|
}
|
||||||
|
ret = string(valueByte)
|
||||||
|
}
|
||||||
|
return ret, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func EventToContext(labels map[string][]*vm.Program, queue *Queue) models.Meta {
|
||||||
|
metas := make([]*models.MetaItems0, 0)
|
||||||
|
tmpContext := make(map[string][]string)
|
||||||
|
for _, evt := range queue.Queue {
|
||||||
|
for key, values := range labels {
|
||||||
|
if _, ok := tmpContext[key]; !ok {
|
||||||
|
tmpContext[key] = make([]string, 0)
|
||||||
|
}
|
||||||
|
for _, value := range values {
|
||||||
|
var val string
|
||||||
|
output, err := expr.Run(value, exprhelpers.GetExprEnv(map[string]interface{}{"evt": evt}))
|
||||||
|
if err != nil {
|
||||||
|
log.Warningf("failed to get value of '%v': %v", value, err)
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
switch out := output.(type) {
|
||||||
|
case string:
|
||||||
|
val = out
|
||||||
|
case int:
|
||||||
|
val = strconv.Itoa(out)
|
||||||
|
default:
|
||||||
|
log.Warningf("unexpected return type for context to send : %T", output)
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
if val != "" && !types.InSlice(val, tmpContext[key]) {
|
||||||
|
tmpContext[key] = append(tmpContext[key], val)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
for key, values := range tmpContext {
|
||||||
|
if len(values) == 0 {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
valueStr, err := truncate(values)
|
||||||
|
if err != nil {
|
||||||
|
log.Warningf(err.Error())
|
||||||
|
}
|
||||||
|
meta := models.MetaItems0{
|
||||||
|
Key: key,
|
||||||
|
Value: valueStr,
|
||||||
|
}
|
||||||
|
metas = append(metas, &meta)
|
||||||
|
}
|
||||||
|
|
||||||
|
ret := models.Meta(metas)
|
||||||
|
return ret
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
//NewAlert will generate a RuntimeAlert and its APIAlert(s) from a bucket that overflowed
|
//NewAlert will generate a RuntimeAlert and its APIAlert(s) from a bucket that overflowed
|
||||||
func NewAlert(leaky *Leaky, queue *Queue) (types.RuntimeAlert, error) {
|
func NewAlert(leaky *Leaky, queue *Queue) (types.RuntimeAlert, error) {
|
||||||
var runtimeAlert types.RuntimeAlert
|
var runtimeAlert types.RuntimeAlert
|
||||||
|
@ -292,6 +369,7 @@ func NewAlert(leaky *Leaky, queue *Queue) (types.RuntimeAlert, error) {
|
||||||
*apiAlert.Message = fmt.Sprintf("%s %s performed '%s' (%d events over %s) at %s", source_scope, sourceStr, leaky.Name, leaky.Total_count, leaky.Ovflw_ts.Sub(leaky.First_ts), leaky.Last_ts)
|
*apiAlert.Message = fmt.Sprintf("%s %s performed '%s' (%d events over %s) at %s", source_scope, sourceStr, leaky.Name, leaky.Total_count, leaky.Ovflw_ts.Sub(leaky.First_ts), leaky.Last_ts)
|
||||||
//Get the events from Leaky/Queue
|
//Get the events from Leaky/Queue
|
||||||
apiAlert.Events = EventsFromQueue(queue)
|
apiAlert.Events = EventsFromQueue(queue)
|
||||||
|
apiAlert.Meta = EventToContext(leaky.LabelsToSend, leaky.Queue)
|
||||||
|
|
||||||
//Loop over the Sources and generate appropriate number of ApiAlerts
|
//Loop over the Sources and generate appropriate number of ApiAlerts
|
||||||
for _, srcValue := range sources {
|
for _, srcValue := range sources {
|
||||||
|
@ -317,5 +395,6 @@ func NewAlert(leaky *Leaky, queue *Queue) (types.RuntimeAlert, error) {
|
||||||
if leaky.Reprocess {
|
if leaky.Reprocess {
|
||||||
runtimeAlert.Reprocess = true
|
runtimeAlert.Reprocess = true
|
||||||
}
|
}
|
||||||
|
|
||||||
return runtimeAlert, nil
|
return runtimeAlert, nil
|
||||||
}
|
}
|
||||||
|
|
|
@ -4,8 +4,10 @@ import (
|
||||||
"fmt"
|
"fmt"
|
||||||
"io/ioutil"
|
"io/ioutil"
|
||||||
"path"
|
"path"
|
||||||
|
"sort"
|
||||||
|
|
||||||
"github.com/crowdsecurity/crowdsec/pkg/csconfig"
|
"github.com/crowdsecurity/crowdsec/pkg/csconfig"
|
||||||
|
"github.com/crowdsecurity/crowdsec/pkg/cwhub"
|
||||||
|
|
||||||
"github.com/crowdsecurity/grokky"
|
"github.com/crowdsecurity/grokky"
|
||||||
log "github.com/sirupsen/logrus"
|
log "github.com/sirupsen/logrus"
|
||||||
|
@ -46,6 +48,45 @@ func Init(c map[string]interface{}) (*UnixParserCtx, error) {
|
||||||
return &r, nil
|
return &r, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Return new parsers
|
||||||
|
// nodes and povfwnodes are already initialized in LoadStages
|
||||||
|
func NewParsers() *Parsers {
|
||||||
|
parsers := &Parsers{
|
||||||
|
Ctx: &UnixParserCtx{},
|
||||||
|
Povfwctx: &UnixParserCtx{},
|
||||||
|
StageFiles: make([]Stagefile, 0),
|
||||||
|
PovfwStageFiles: make([]Stagefile, 0),
|
||||||
|
}
|
||||||
|
for _, itemType := range []string{cwhub.PARSERS, cwhub.PARSERS_OVFLW} {
|
||||||
|
for _, hubParserItem := range cwhub.GetItemMap(itemType) {
|
||||||
|
if hubParserItem.Installed {
|
||||||
|
stagefile := Stagefile{
|
||||||
|
Filename: hubParserItem.LocalPath,
|
||||||
|
Stage: hubParserItem.Stage,
|
||||||
|
}
|
||||||
|
if itemType == cwhub.PARSERS {
|
||||||
|
parsers.StageFiles = append(parsers.StageFiles, stagefile)
|
||||||
|
}
|
||||||
|
if itemType == cwhub.PARSERS_OVFLW {
|
||||||
|
parsers.PovfwStageFiles = append(parsers.PovfwStageFiles, stagefile)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if parsers.StageFiles != nil {
|
||||||
|
sort.Slice(parsers.StageFiles, func(i, j int) bool {
|
||||||
|
return parsers.StageFiles[i].Filename < parsers.StageFiles[j].Filename
|
||||||
|
})
|
||||||
|
}
|
||||||
|
if parsers.PovfwStageFiles != nil {
|
||||||
|
sort.Slice(parsers.PovfwStageFiles, func(i, j int) bool {
|
||||||
|
return parsers.PovfwStageFiles[i].Filename < parsers.PovfwStageFiles[j].Filename
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
return parsers
|
||||||
|
}
|
||||||
|
|
||||||
func LoadParsers(cConfig *csconfig.Config, parsers *Parsers) (*Parsers, error) {
|
func LoadParsers(cConfig *csconfig.Config, parsers *Parsers) (*Parsers, error) {
|
||||||
var err error
|
var err error
|
||||||
|
|
||||||
|
|
|
@ -45,6 +45,7 @@ sed -i "s#/usr/local/lib/crowdsec/plugins/#%{_libdir}/%{name}/plugins/#g" config
|
||||||
%install
|
%install
|
||||||
rm -rf %{buildroot}
|
rm -rf %{buildroot}
|
||||||
mkdir -p %{buildroot}/etc/crowdsec/hub
|
mkdir -p %{buildroot}/etc/crowdsec/hub
|
||||||
|
mkdir -p %{buildroot}/etc/crowdsec/console/
|
||||||
mkdir -p %{buildroot}/etc/crowdsec/patterns
|
mkdir -p %{buildroot}/etc/crowdsec/patterns
|
||||||
mkdir -p %{buildroot}%{_sharedstatedir}/%{name}/data
|
mkdir -p %{buildroot}%{_sharedstatedir}/%{name}/data
|
||||||
mkdir -p %{buildroot}%{_presetdir}
|
mkdir -p %{buildroot}%{_presetdir}
|
||||||
|
@ -62,6 +63,7 @@ install -m 644 -D config/config.yaml %{buildroot}%{_sysconfdir}/crowdsec
|
||||||
install -m 644 -D config/simulation.yaml %{buildroot}%{_sysconfdir}/crowdsec
|
install -m 644 -D config/simulation.yaml %{buildroot}%{_sysconfdir}/crowdsec
|
||||||
install -m 644 -D config/profiles.yaml %{buildroot}%{_sysconfdir}/crowdsec
|
install -m 644 -D config/profiles.yaml %{buildroot}%{_sysconfdir}/crowdsec
|
||||||
install -m 644 -D config/console.yaml %{buildroot}%{_sysconfdir}/crowdsec
|
install -m 644 -D config/console.yaml %{buildroot}%{_sysconfdir}/crowdsec
|
||||||
|
install -m 644 -D config/context.yaml %{buildroot}%{_sysconfdir}/crowdsec/console/
|
||||||
install -m 644 -D %{SOURCE1} %{buildroot}%{_presetdir}
|
install -m 644 -D %{SOURCE1} %{buildroot}%{_presetdir}
|
||||||
|
|
||||||
install -m 551 plugins/notifications/slack/notification-slack %{buildroot}%{_libdir}/%{name}/plugins/
|
install -m 551 plugins/notifications/slack/notification-slack %{buildroot}%{_libdir}/%{name}/plugins/
|
||||||
|
@ -114,6 +116,7 @@ rm -rf %{buildroot}
|
||||||
%config(noreplace) %{_sysconfdir}/%{name}/simulation.yaml
|
%config(noreplace) %{_sysconfdir}/%{name}/simulation.yaml
|
||||||
%config(noreplace) %{_sysconfdir}/%{name}/profiles.yaml
|
%config(noreplace) %{_sysconfdir}/%{name}/profiles.yaml
|
||||||
%config(noreplace) %{_sysconfdir}/%{name}/console.yaml
|
%config(noreplace) %{_sysconfdir}/%{name}/console.yaml
|
||||||
|
%config(noreplace) %{_sysconfdir}/%{name}/console/context.yaml
|
||||||
%config(noreplace) %{_presetdir}/80-%{name}.preset
|
%config(noreplace) %{_presetdir}/80-%{name}.preset
|
||||||
%config(noreplace) %{_sysconfdir}/%{name}/notifications/http.yaml
|
%config(noreplace) %{_sysconfdir}/%{name}/notifications/http.yaml
|
||||||
%config(noreplace) %{_sysconfdir}/%{name}/notifications/slack.yaml
|
%config(noreplace) %{_sysconfdir}/%{name}/notifications/slack.yaml
|
||||||
|
|
52
tests/bats/81_alerts-context.bats
Normal file
52
tests/bats/81_alerts-context.bats
Normal file
|
@ -0,0 +1,52 @@
|
||||||
|
#!/usr/bin/env bats
|
||||||
|
# vim: ft=bats:list:ts=8:sts=4:sw=4:et:ai:si:
|
||||||
|
|
||||||
|
set -u
|
||||||
|
|
||||||
|
fake_log() {
|
||||||
|
for _ in $(seq 1 6); do
|
||||||
|
echo "$(LC_ALL=C date '+%b %d %H:%M:%S ')"'sd-126005 sshd[12422]: Invalid user netflix from 1.1.1.172 port 35424'
|
||||||
|
done
|
||||||
|
}
|
||||||
|
|
||||||
|
setup_file() {
|
||||||
|
load "../lib/setup_file.sh"
|
||||||
|
# we reset config and data, but run the daemon only in the tests that need it
|
||||||
|
./instance-data load
|
||||||
|
}
|
||||||
|
|
||||||
|
teardown_file() {
|
||||||
|
load "../lib/teardown_file.sh"
|
||||||
|
}
|
||||||
|
|
||||||
|
setup() {
|
||||||
|
load "../lib/setup.sh"
|
||||||
|
}
|
||||||
|
|
||||||
|
teardown() {
|
||||||
|
./instance-crowdsec stop
|
||||||
|
}
|
||||||
|
|
||||||
|
#----------
|
||||||
|
|
||||||
|
@test "$FILE 1.1.1.172 has context" {
|
||||||
|
tmpfile=$(TMPDIR="${BATS_TEST_TMPDIR}" mktemp)
|
||||||
|
touch "${tmpfile}"
|
||||||
|
ACQUIS_YAML=$(config_yq '.crowdsec_service.acquisition_path')
|
||||||
|
echo -e "---\nfilename: $tmpfile\nlabels:\n type: syslog\n" >>"${ACQUIS_YAML}"
|
||||||
|
|
||||||
|
CONTEXT_YAML=$(config_yq '.crowdsec_service.console_context_path')
|
||||||
|
echo -e "---\ntarget_user:\n- evt.Parsed.sshd_invalid_user\nsource_ip:\n- evt.Parsed.sshd_client_ip\nsource_host:\n- evt.Meta.machine\n" >>"${CONTEXT_YAML}"
|
||||||
|
|
||||||
|
./instance-crowdsec start
|
||||||
|
sleep 2
|
||||||
|
fake_log >>"${tmpfile}"
|
||||||
|
sleep 2
|
||||||
|
rm -f -- "${tmpfile}"
|
||||||
|
|
||||||
|
run -0 cscli alerts inspect 2 -o json
|
||||||
|
run -0 jq -c '.meta | sort_by(.key) | map([.key,.value])' <(output)
|
||||||
|
|
||||||
|
assert_output '[["source_host","[\"sd-126005\"]"],["source_ip","[\"1.1.1.172\"]"],["target_user","[\"netflix\"]"]]'
|
||||||
|
|
||||||
|
}
|
|
@ -53,6 +53,8 @@ config_generate() {
|
||||||
../config/online_api_credentials.yaml \
|
../config/online_api_credentials.yaml \
|
||||||
"${CONFIG_DIR}/"
|
"${CONFIG_DIR}/"
|
||||||
|
|
||||||
|
cp ../config/context.yaml "${CONFIG_DIR}/console/"
|
||||||
|
|
||||||
cp ../plugins/notifications/*/{http,email,slack,splunk,dummy}.yaml \
|
cp ../plugins/notifications/*/{http,email,slack,splunk,dummy}.yaml \
|
||||||
"${CONFIG_DIR}/notifications/"
|
"${CONFIG_DIR}/notifications/"
|
||||||
|
|
||||||
|
@ -73,6 +75,7 @@ config_generate() {
|
||||||
.api.client.credentials_path=strenv(CONFIG_DIR)+"/local_api_credentials.yaml" |
|
.api.client.credentials_path=strenv(CONFIG_DIR)+"/local_api_credentials.yaml" |
|
||||||
.api.server.profiles_path=strenv(CONFIG_DIR)+"/profiles.yaml" |
|
.api.server.profiles_path=strenv(CONFIG_DIR)+"/profiles.yaml" |
|
||||||
.api.server.console_path=strenv(CONFIG_DIR)+"/console.yaml" |
|
.api.server.console_path=strenv(CONFIG_DIR)+"/console.yaml" |
|
||||||
|
.crowdsec_service.console_context_path=strenv(CONFIG_DIR) + "/console/context.yaml" |
|
||||||
.api.server.online_client.credentials_path=strenv(CONFIG_DIR)+"/online_api_credentials.yaml"
|
.api.server.online_client.credentials_path=strenv(CONFIG_DIR)+"/online_api_credentials.yaml"
|
||||||
' <../config/config.yaml >"${CONFIG_DIR}/config.yaml"
|
' <../config/config.yaml >"${CONFIG_DIR}/config.yaml"
|
||||||
}
|
}
|
||||||
|
@ -84,6 +87,7 @@ make_init_data() {
|
||||||
mkdir -p "${CONFIG_DIR}/notifications"
|
mkdir -p "${CONFIG_DIR}/notifications"
|
||||||
mkdir -p "${CONFIG_DIR}/hub"
|
mkdir -p "${CONFIG_DIR}/hub"
|
||||||
mkdir -p "${CONFIG_DIR}/patterns"
|
mkdir -p "${CONFIG_DIR}/patterns"
|
||||||
|
mkdir -p "${CONFIG_DIR}/console"
|
||||||
cp -ax "../config/patterns" "${CONFIG_DIR}/"
|
cp -ax "../config/patterns" "${CONFIG_DIR}/"
|
||||||
config_generate
|
config_generate
|
||||||
# XXX errors from instance-db should be reported...
|
# XXX errors from instance-db should be reported...
|
||||||
|
|
|
@ -24,6 +24,8 @@ CROWDSEC_CONFIG_PATH="${CROWDSEC_PATH}"
|
||||||
CROWDSEC_LOG_FILE="/var/log/crowdsec.log"
|
CROWDSEC_LOG_FILE="/var/log/crowdsec.log"
|
||||||
LAPI_LOG_FILE="/var/log/crowdsec_api.log"
|
LAPI_LOG_FILE="/var/log/crowdsec_api.log"
|
||||||
CROWDSEC_PLUGIN_DIR="${CROWDSEC_USR_DIR}/plugins"
|
CROWDSEC_PLUGIN_DIR="${CROWDSEC_USR_DIR}/plugins"
|
||||||
|
CROWDSEC_CONSOLE_DIR="${CROWDSEC_PATH}/console"
|
||||||
|
|
||||||
|
|
||||||
CROWDSEC_BIN="./cmd/crowdsec/crowdsec"
|
CROWDSEC_BIN="./cmd/crowdsec/crowdsec"
|
||||||
CSCLI_BIN="./cmd/crowdsec-cli/cscli"
|
CSCLI_BIN="./cmd/crowdsec-cli/cscli"
|
||||||
|
@ -387,6 +389,8 @@ check_cs_version () {
|
||||||
#install crowdsec and cscli
|
#install crowdsec and cscli
|
||||||
install_crowdsec() {
|
install_crowdsec() {
|
||||||
mkdir -p "${CROWDSEC_DATA_DIR}"
|
mkdir -p "${CROWDSEC_DATA_DIR}"
|
||||||
|
mkdir -p "${CROWDSEC_CONSOLE_DIR}"
|
||||||
|
|
||||||
(cd config && find patterns -type f -exec install -Dm 644 "{}" "${CROWDSEC_CONFIG_PATH}/{}" \; && cd ../) || exit
|
(cd config && find patterns -type f -exec install -Dm 644 "{}" "${CROWDSEC_CONFIG_PATH}/{}" \; && cd ../) || exit
|
||||||
mkdir -p "${CROWDSEC_CONFIG_PATH}/scenarios" || exit
|
mkdir -p "${CROWDSEC_CONFIG_PATH}/scenarios" || exit
|
||||||
mkdir -p "${CROWDSEC_CONFIG_PATH}/postoverflows" || exit
|
mkdir -p "${CROWDSEC_CONFIG_PATH}/postoverflows" || exit
|
||||||
|
@ -408,6 +412,7 @@ install_crowdsec() {
|
||||||
install -v -m 644 -D ./config/profiles.yaml "${CROWDSEC_CONFIG_PATH}" 1> /dev/null || exit
|
install -v -m 644 -D ./config/profiles.yaml "${CROWDSEC_CONFIG_PATH}" 1> /dev/null || exit
|
||||||
install -v -m 644 -D ./config/simulation.yaml "${CROWDSEC_CONFIG_PATH}" 1> /dev/null || exit
|
install -v -m 644 -D ./config/simulation.yaml "${CROWDSEC_CONFIG_PATH}" 1> /dev/null || exit
|
||||||
install -v -m 644 -D ./config/"${CONSOLE_FILE}" "${CROWDSEC_CONFIG_PATH}" 1> /dev/null || exit
|
install -v -m 644 -D ./config/"${CONSOLE_FILE}" "${CROWDSEC_CONFIG_PATH}" 1> /dev/null || exit
|
||||||
|
install -v -m 644 -D ./config/context.yaml "${CROWDSEC_CONSOLE_DIR}" 1> /dev/null || exit
|
||||||
|
|
||||||
DATA=${CROWDSEC_DATA_DIR} CFG=${CROWDSEC_CONFIG_PATH} envsubst '$CFG $DATA' < ./config/user.yaml > ${CROWDSEC_CONFIG_PATH}"/user.yaml" || log_fatal "unable to generate user configuration file"
|
DATA=${CROWDSEC_DATA_DIR} CFG=${CROWDSEC_CONFIG_PATH} envsubst '$CFG $DATA' < ./config/user.yaml > ${CROWDSEC_CONFIG_PATH}"/user.yaml" || log_fatal "unable to generate user configuration file"
|
||||||
if [[ ${DOCKER_MODE} == "false" ]]; then
|
if [[ ${DOCKER_MODE} == "false" ]]; then
|
||||||
|
|
Loading…
Reference in a new issue