2022-08-18 09:54:01 +00:00
|
|
|
package main
|
|
|
|
|
|
|
|
import (
|
|
|
|
"archive/zip"
|
|
|
|
"bytes"
|
|
|
|
"context"
|
2024-04-22 21:54:51 +00:00
|
|
|
"errors"
|
2022-08-18 09:54:01 +00:00
|
|
|
"fmt"
|
2022-09-06 11:55:03 +00:00
|
|
|
"io"
|
2022-08-18 09:54:01 +00:00
|
|
|
"net/http"
|
|
|
|
"net/url"
|
2022-09-06 11:55:03 +00:00
|
|
|
"os"
|
2022-08-18 09:54:01 +00:00
|
|
|
"path/filepath"
|
|
|
|
"regexp"
|
|
|
|
"strings"
|
2024-04-22 21:54:51 +00:00
|
|
|
"time"
|
2022-08-18 09:54:01 +00:00
|
|
|
|
|
|
|
"github.com/blackfireio/osinfo"
|
2022-10-07 09:05:35 +00:00
|
|
|
"github.com/go-openapi/strfmt"
|
|
|
|
log "github.com/sirupsen/logrus"
|
|
|
|
"github.com/spf13/cobra"
|
|
|
|
|
2024-04-22 21:54:51 +00:00
|
|
|
"github.com/crowdsecurity/go-cs-lib/trace"
|
2023-07-28 14:35:08 +00:00
|
|
|
"github.com/crowdsecurity/go-cs-lib/version"
|
2023-05-23 08:52:47 +00:00
|
|
|
|
2023-10-04 08:42:47 +00:00
|
|
|
"github.com/crowdsecurity/crowdsec/cmd/crowdsec-cli/require"
|
2022-08-18 09:54:01 +00:00
|
|
|
"github.com/crowdsecurity/crowdsec/pkg/apiclient"
|
|
|
|
"github.com/crowdsecurity/crowdsec/pkg/cwhub"
|
|
|
|
"github.com/crowdsecurity/crowdsec/pkg/cwversion"
|
|
|
|
"github.com/crowdsecurity/crowdsec/pkg/database"
|
2022-12-20 15:11:51 +00:00
|
|
|
"github.com/crowdsecurity/crowdsec/pkg/fflag"
|
2022-08-18 09:54:01 +00:00
|
|
|
"github.com/crowdsecurity/crowdsec/pkg/models"
|
|
|
|
)
|
|
|
|
|
|
|
|
const (
|
|
|
|
SUPPORT_METRICS_HUMAN_PATH = "metrics/metrics.human"
|
|
|
|
SUPPORT_METRICS_PROMETHEUS_PATH = "metrics/metrics.prometheus"
|
|
|
|
SUPPORT_VERSION_PATH = "version.txt"
|
2022-12-20 15:11:51 +00:00
|
|
|
SUPPORT_FEATURES_PATH = "features.txt"
|
2022-08-18 09:54:01 +00:00
|
|
|
SUPPORT_OS_INFO_PATH = "osinfo.txt"
|
|
|
|
SUPPORT_PARSERS_PATH = "hub/parsers.txt"
|
|
|
|
SUPPORT_SCENARIOS_PATH = "hub/scenarios.txt"
|
2023-12-07 15:20:13 +00:00
|
|
|
SUPPORT_CONTEXTS_PATH = "hub/scenarios.txt"
|
2022-08-18 09:54:01 +00:00
|
|
|
SUPPORT_COLLECTIONS_PATH = "hub/collections.txt"
|
|
|
|
SUPPORT_POSTOVERFLOWS_PATH = "hub/postoverflows.txt"
|
|
|
|
SUPPORT_BOUNCERS_PATH = "lapi/bouncers.txt"
|
|
|
|
SUPPORT_AGENTS_PATH = "lapi/agents.txt"
|
|
|
|
SUPPORT_CROWDSEC_CONFIG_PATH = "config/crowdsec.yaml"
|
|
|
|
SUPPORT_LAPI_STATUS_PATH = "lapi_status.txt"
|
|
|
|
SUPPORT_CAPI_STATUS_PATH = "capi_status.txt"
|
|
|
|
SUPPORT_ACQUISITION_CONFIG_BASE_PATH = "config/acquis/"
|
|
|
|
SUPPORT_CROWDSEC_PROFILE_PATH = "config/profiles.yaml"
|
2024-04-22 21:54:51 +00:00
|
|
|
SUPPORT_CRASH_PATH = "crash/"
|
2022-08-18 09:54:01 +00:00
|
|
|
)
|
|
|
|
|
2023-06-08 13:08:51 +00:00
|
|
|
// from https://github.com/acarl005/stripansi
|
|
|
|
var reStripAnsi = regexp.MustCompile("[\u001B\u009B][[\\]()#;?]*(?:(?:(?:[a-zA-Z\\d]*(?:;[a-zA-Z\\d]*)*)?\u0007)|(?:(?:\\d{1,4}(?:;\\d{0,4})*)?[\\dA-PRZcf-ntqry=><~]))")
|
|
|
|
|
|
|
|
func stripAnsiString(str string) string {
|
|
|
|
// the byte version doesn't strip correctly
|
|
|
|
return reStripAnsi.ReplaceAllString(str, "")
|
|
|
|
}
|
|
|
|
|
2022-08-18 09:54:01 +00:00
|
|
|
func collectMetrics() ([]byte, []byte, error) {
|
|
|
|
log.Info("Collecting prometheus metrics")
|
|
|
|
|
|
|
|
if csConfig.Cscli.PrometheusUrl == "" {
|
|
|
|
log.Warn("No Prometheus URL configured, metrics will not be collected")
|
2024-04-22 21:54:51 +00:00
|
|
|
return nil, nil, errors.New("prometheus_uri is not set")
|
2022-08-18 09:54:01 +00:00
|
|
|
}
|
|
|
|
|
2022-10-07 09:05:35 +00:00
|
|
|
humanMetrics := bytes.NewBuffer(nil)
|
2022-08-18 09:54:01 +00:00
|
|
|
|
2024-02-06 09:07:05 +00:00
|
|
|
ms := NewMetricStore()
|
|
|
|
|
|
|
|
if err := ms.Fetch(csConfig.Cscli.PrometheusUrl); err != nil {
|
2024-04-22 21:54:51 +00:00
|
|
|
return nil, nil, fmt.Errorf("could not fetch prometheus metrics: %w", err)
|
2024-02-06 09:07:05 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
if err := ms.Format(humanMetrics, nil, "human", false); err != nil {
|
|
|
|
return nil, nil, err
|
2022-08-18 09:54:01 +00:00
|
|
|
}
|
|
|
|
|
2023-11-24 14:57:32 +00:00
|
|
|
req, err := http.NewRequest(http.MethodGet, csConfig.Cscli.PrometheusUrl, nil)
|
2022-08-18 09:54:01 +00:00
|
|
|
if err != nil {
|
2024-04-22 21:54:51 +00:00
|
|
|
return nil, nil, fmt.Errorf("could not create requests to prometheus endpoint: %w", err)
|
2022-08-18 09:54:01 +00:00
|
|
|
}
|
2024-02-01 21:36:21 +00:00
|
|
|
|
2022-08-18 09:54:01 +00:00
|
|
|
client := &http.Client{}
|
|
|
|
|
2024-02-01 21:36:21 +00:00
|
|
|
resp, err := client.Do(req)
|
2022-08-18 09:54:01 +00:00
|
|
|
if err != nil {
|
2024-04-22 21:54:51 +00:00
|
|
|
return nil, nil, fmt.Errorf("could not get metrics from prometheus endpoint: %w", err)
|
2022-08-18 09:54:01 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
defer resp.Body.Close()
|
|
|
|
|
2022-09-06 11:55:03 +00:00
|
|
|
body, err := io.ReadAll(resp.Body)
|
2022-08-18 09:54:01 +00:00
|
|
|
if err != nil {
|
2024-04-22 21:54:51 +00:00
|
|
|
return nil, nil, fmt.Errorf("could not read metrics from prometheus endpoint: %w", err)
|
2022-08-18 09:54:01 +00:00
|
|
|
}
|
|
|
|
|
2022-10-07 09:05:35 +00:00
|
|
|
return humanMetrics.Bytes(), body, nil
|
2022-08-18 09:54:01 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
func collectVersion() []byte {
|
|
|
|
log.Info("Collecting version")
|
|
|
|
return []byte(cwversion.ShowStr())
|
|
|
|
}
|
|
|
|
|
2022-12-20 15:11:51 +00:00
|
|
|
func collectFeatures() []byte {
|
|
|
|
log.Info("Collecting feature flags")
|
2024-02-01 21:36:21 +00:00
|
|
|
|
2022-12-26 13:23:41 +00:00
|
|
|
enabledFeatures := fflag.Crowdsec.GetEnabledFeatures()
|
2022-12-20 15:11:51 +00:00
|
|
|
|
|
|
|
w := bytes.NewBuffer(nil)
|
|
|
|
for _, k := range enabledFeatures {
|
|
|
|
fmt.Fprintf(w, "%s\n", k)
|
|
|
|
}
|
2024-02-01 21:36:21 +00:00
|
|
|
|
2022-12-20 15:11:51 +00:00
|
|
|
return w.Bytes()
|
|
|
|
}
|
|
|
|
|
2022-08-18 09:54:01 +00:00
|
|
|
func collectOSInfo() ([]byte, error) {
|
|
|
|
log.Info("Collecting OS info")
|
2024-02-01 21:36:21 +00:00
|
|
|
|
2022-08-18 09:54:01 +00:00
|
|
|
info, err := osinfo.GetOSInfo()
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
|
|
|
|
w := bytes.NewBuffer(nil)
|
2024-04-22 21:54:51 +00:00
|
|
|
fmt.Fprintf(w, "Architecture: %s\n", info.Architecture)
|
|
|
|
fmt.Fprintf(w, "Family: %s\n", info.Family)
|
|
|
|
fmt.Fprintf(w, "ID: %s\n", info.ID)
|
|
|
|
fmt.Fprintf(w, "Name: %s\n", info.Name)
|
|
|
|
fmt.Fprintf(w, "Codename: %s\n", info.Codename)
|
|
|
|
fmt.Fprintf(w, "Version: %s\n", info.Version)
|
|
|
|
fmt.Fprintf(w, "Build: %s\n", info.Build)
|
2022-08-18 09:54:01 +00:00
|
|
|
|
|
|
|
return w.Bytes(), nil
|
|
|
|
}
|
|
|
|
|
2023-11-24 14:57:32 +00:00
|
|
|
func collectHubItems(hub *cwhub.Hub, itemType string) []byte {
|
|
|
|
var err error
|
|
|
|
|
2022-10-07 09:05:35 +00:00
|
|
|
out := bytes.NewBuffer(nil)
|
2024-02-01 21:36:21 +00:00
|
|
|
|
2022-08-18 09:54:01 +00:00
|
|
|
log.Infof("Collecting %s list", itemType)
|
2023-11-24 14:57:32 +00:00
|
|
|
|
|
|
|
items := make(map[string][]*cwhub.Item)
|
|
|
|
|
|
|
|
if items[itemType], err = selectItems(hub, itemType, nil, true); err != nil {
|
|
|
|
log.Warnf("could not collect %s list: %s", itemType, err)
|
|
|
|
}
|
|
|
|
|
2023-12-05 12:38:52 +00:00
|
|
|
if err := listItems(out, []string{itemType}, items, false); err != nil {
|
2023-11-24 14:57:32 +00:00
|
|
|
log.Warnf("could not collect %s list: %s", itemType, err)
|
|
|
|
}
|
2024-02-01 21:36:21 +00:00
|
|
|
|
2022-10-07 09:05:35 +00:00
|
|
|
return out.Bytes()
|
2022-08-18 09:54:01 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
func collectBouncers(dbClient *database.Client) ([]byte, error) {
|
2022-10-07 09:05:35 +00:00
|
|
|
out := bytes.NewBuffer(nil)
|
2024-02-01 21:36:21 +00:00
|
|
|
|
2024-01-31 11:40:41 +00:00
|
|
|
bouncers, err := dbClient.ListBouncers()
|
2022-10-07 09:05:35 +00:00
|
|
|
if err != nil {
|
2024-04-22 21:54:51 +00:00
|
|
|
return nil, fmt.Errorf("unable to list bouncers: %w", err)
|
2022-10-07 09:05:35 +00:00
|
|
|
}
|
2024-02-01 21:36:21 +00:00
|
|
|
|
2024-01-31 11:40:41 +00:00
|
|
|
getBouncersTable(out, bouncers)
|
2024-02-01 21:36:21 +00:00
|
|
|
|
2022-10-07 09:05:35 +00:00
|
|
|
return out.Bytes(), nil
|
2022-08-18 09:54:01 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
func collectAgents(dbClient *database.Client) ([]byte, error) {
|
2022-10-07 09:05:35 +00:00
|
|
|
out := bytes.NewBuffer(nil)
|
2024-02-01 21:36:21 +00:00
|
|
|
|
2024-02-01 16:22:52 +00:00
|
|
|
machines, err := dbClient.ListMachines()
|
2022-10-07 09:05:35 +00:00
|
|
|
if err != nil {
|
2024-04-22 21:54:51 +00:00
|
|
|
return nil, fmt.Errorf("unable to list machines: %w", err)
|
2022-10-07 09:05:35 +00:00
|
|
|
}
|
2024-02-01 21:36:21 +00:00
|
|
|
|
2024-02-01 16:22:52 +00:00
|
|
|
getAgentsTable(out, machines)
|
2024-02-01 21:36:21 +00:00
|
|
|
|
2022-10-07 09:05:35 +00:00
|
|
|
return out.Bytes(), nil
|
2022-08-18 09:54:01 +00:00
|
|
|
}
|
|
|
|
|
2023-11-24 14:57:32 +00:00
|
|
|
func collectAPIStatus(login string, password string, endpoint string, prefix string, hub *cwhub.Hub) []byte {
|
2022-08-18 09:54:01 +00:00
|
|
|
if csConfig.API.Client == nil || csConfig.API.Client.Credentials == nil {
|
|
|
|
return []byte("No agent credentials found, are we LAPI ?")
|
|
|
|
}
|
2024-02-01 21:36:21 +00:00
|
|
|
|
2022-08-18 09:54:01 +00:00
|
|
|
pwd := strfmt.Password(password)
|
|
|
|
|
2024-02-01 21:36:21 +00:00
|
|
|
apiurl, err := url.Parse(endpoint)
|
2022-08-18 09:54:01 +00:00
|
|
|
if err != nil {
|
2022-11-29 08:16:07 +00:00
|
|
|
return []byte(fmt.Sprintf("cannot parse API URL: %s", err))
|
2022-08-18 09:54:01 +00:00
|
|
|
}
|
2024-02-01 21:36:21 +00:00
|
|
|
|
2024-04-24 09:09:37 +00:00
|
|
|
scenarios, err := hub.GetInstalledNamesByType(cwhub.SCENARIOS)
|
2022-08-18 09:54:01 +00:00
|
|
|
if err != nil {
|
2022-11-29 08:16:07 +00:00
|
|
|
return []byte(fmt.Sprintf("could not collect scenarios: %s", err))
|
2022-08-18 09:54:01 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
Client, err = apiclient.NewDefaultClient(apiurl,
|
|
|
|
prefix,
|
2023-05-23 08:52:47 +00:00
|
|
|
fmt.Sprintf("crowdsec/%s", version.String()),
|
2022-08-18 09:54:01 +00:00
|
|
|
nil)
|
|
|
|
if err != nil {
|
2022-11-29 08:16:07 +00:00
|
|
|
return []byte(fmt.Sprintf("could not init client: %s", err))
|
2022-08-18 09:54:01 +00:00
|
|
|
}
|
2024-02-01 21:36:21 +00:00
|
|
|
|
2022-08-18 09:54:01 +00:00
|
|
|
t := models.WatcherAuthRequest{
|
|
|
|
MachineID: &login,
|
|
|
|
Password: &pwd,
|
|
|
|
Scenarios: scenarios,
|
|
|
|
}
|
|
|
|
|
2023-02-16 15:16:26 +00:00
|
|
|
_, _, err = Client.Auth.AuthenticateWatcher(context.Background(), t)
|
2022-08-18 09:54:01 +00:00
|
|
|
if err != nil {
|
|
|
|
return []byte(fmt.Sprintf("Could not authenticate to API: %s", err))
|
|
|
|
} else {
|
|
|
|
return []byte("Successfully authenticated to LAPI")
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
func collectCrowdsecConfig() []byte {
|
|
|
|
log.Info("Collecting crowdsec config")
|
2024-02-01 21:36:21 +00:00
|
|
|
|
2022-09-06 11:55:03 +00:00
|
|
|
config, err := os.ReadFile(*csConfig.FilePath)
|
2022-08-18 09:54:01 +00:00
|
|
|
if err != nil {
|
|
|
|
return []byte(fmt.Sprintf("could not read config file: %s", err))
|
|
|
|
}
|
|
|
|
|
|
|
|
r := regexp.MustCompile(`(\s+password:|\s+user:|\s+host:)\s+.*`)
|
|
|
|
|
|
|
|
return r.ReplaceAll(config, []byte("$1 ****REDACTED****"))
|
|
|
|
}
|
|
|
|
|
|
|
|
func collectCrowdsecProfile() []byte {
|
|
|
|
log.Info("Collecting crowdsec profile")
|
2024-02-01 21:36:21 +00:00
|
|
|
|
2022-09-06 11:55:03 +00:00
|
|
|
config, err := os.ReadFile(csConfig.API.Server.ProfilesPath)
|
2022-08-18 09:54:01 +00:00
|
|
|
if err != nil {
|
|
|
|
return []byte(fmt.Sprintf("could not read profile file: %s", err))
|
|
|
|
}
|
2024-02-01 21:36:21 +00:00
|
|
|
|
2022-08-18 09:54:01 +00:00
|
|
|
return config
|
|
|
|
}
|
|
|
|
|
|
|
|
func collectAcquisitionConfig() map[string][]byte {
|
|
|
|
log.Info("Collecting acquisition config")
|
2024-02-01 21:36:21 +00:00
|
|
|
|
2022-08-18 09:54:01 +00:00
|
|
|
ret := make(map[string][]byte)
|
|
|
|
|
|
|
|
for _, filename := range csConfig.Crowdsec.AcquisitionFiles {
|
2022-09-06 11:55:03 +00:00
|
|
|
fileContent, err := os.ReadFile(filename)
|
2022-08-18 09:54:01 +00:00
|
|
|
if err != nil {
|
|
|
|
ret[filename] = []byte(fmt.Sprintf("could not read file: %s", err))
|
|
|
|
} else {
|
|
|
|
ret[filename] = fileContent
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
return ret
|
|
|
|
}
|
|
|
|
|
2024-04-22 21:54:51 +00:00
|
|
|
func collectCrash() ([]string, error) {
|
|
|
|
log.Info("Collecting crash dumps")
|
|
|
|
return trace.List()
|
|
|
|
}
|
|
|
|
|
2023-12-19 16:20:09 +00:00
|
|
|
type cliSupport struct{}
|
2023-12-07 13:36:35 +00:00
|
|
|
|
|
|
|
func NewCLISupport() *cliSupport {
|
|
|
|
return &cliSupport{}
|
|
|
|
}
|
|
|
|
|
|
|
|
func (cli cliSupport) NewCommand() *cobra.Command {
|
|
|
|
cmd := &cobra.Command{
|
2022-08-18 09:54:01 +00:00
|
|
|
Use: "support [action]",
|
|
|
|
Short: "Provide commands to help during support",
|
|
|
|
Args: cobra.MinimumNArgs(1),
|
|
|
|
DisableAutoGenTag: true,
|
|
|
|
PersistentPreRunE: func(cmd *cobra.Command, args []string) error {
|
|
|
|
return nil
|
|
|
|
},
|
|
|
|
}
|
|
|
|
|
2023-12-07 13:36:35 +00:00
|
|
|
cmd.AddCommand(cli.NewDumpCmd())
|
|
|
|
|
|
|
|
return cmd
|
|
|
|
}
|
|
|
|
|
|
|
|
func (cli cliSupport) NewDumpCmd() *cobra.Command {
|
2022-08-18 09:54:01 +00:00
|
|
|
var outFile string
|
|
|
|
|
2023-12-07 13:36:35 +00:00
|
|
|
cmd := &cobra.Command{
|
2022-08-18 09:54:01 +00:00
|
|
|
Use: "dump",
|
|
|
|
Short: "Dump all your configuration to a zip file for easier support",
|
|
|
|
Long: `Dump the following informations:
|
|
|
|
- Crowdsec version
|
|
|
|
- OS version
|
|
|
|
- Installed collections list
|
|
|
|
- Installed parsers list
|
|
|
|
- Installed scenarios list
|
|
|
|
- Installed postoverflows list
|
2023-12-07 15:20:13 +00:00
|
|
|
- Installed context list
|
2022-08-18 09:54:01 +00:00
|
|
|
- Bouncers list
|
|
|
|
- Machines list
|
|
|
|
- CAPI status
|
|
|
|
- LAPI status
|
|
|
|
- Crowdsec config (sensitive information like username and password are redacted)
|
|
|
|
- Crowdsec metrics`,
|
|
|
|
Example: `cscli support dump
|
|
|
|
cscli support dump -f /tmp/crowdsec-support.zip
|
|
|
|
`,
|
|
|
|
Args: cobra.NoArgs,
|
|
|
|
DisableAutoGenTag: true,
|
2024-04-26 14:56:15 +00:00
|
|
|
RunE: func(_ *cobra.Command, _ []string) error {
|
2022-08-18 09:54:01 +00:00
|
|
|
var err error
|
|
|
|
var skipHub, skipDB, skipCAPI, skipLAPI, skipAgent bool
|
|
|
|
infos := map[string][]byte{
|
2023-02-16 15:16:26 +00:00
|
|
|
SUPPORT_VERSION_PATH: collectVersion(),
|
2022-12-20 15:11:51 +00:00
|
|
|
SUPPORT_FEATURES_PATH: collectFeatures(),
|
2022-08-18 09:54:01 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
if outFile == "" {
|
|
|
|
outFile = "/tmp/crowdsec-support.zip"
|
|
|
|
}
|
|
|
|
|
|
|
|
dbClient, err = database.NewClient(csConfig.DbConfig)
|
|
|
|
if err != nil {
|
|
|
|
log.Warnf("Could not connect to database: %s", err)
|
|
|
|
skipDB = true
|
|
|
|
infos[SUPPORT_BOUNCERS_PATH] = []byte(err.Error())
|
|
|
|
infos[SUPPORT_AGENTS_PATH] = []byte(err.Error())
|
|
|
|
}
|
|
|
|
|
2024-02-01 21:36:21 +00:00
|
|
|
if err = csConfig.LoadAPIServer(true); err != nil {
|
2022-08-18 09:54:01 +00:00
|
|
|
log.Warnf("could not load LAPI, skipping CAPI check")
|
|
|
|
skipLAPI = true
|
|
|
|
infos[SUPPORT_CAPI_STATUS_PATH] = []byte(err.Error())
|
|
|
|
}
|
|
|
|
|
2024-02-01 21:36:21 +00:00
|
|
|
if err = csConfig.LoadCrowdsec(); err != nil {
|
2022-08-18 09:54:01 +00:00
|
|
|
log.Warnf("could not load agent config, skipping crowdsec config check")
|
|
|
|
skipAgent = true
|
|
|
|
}
|
|
|
|
|
2023-12-19 16:20:09 +00:00
|
|
|
hub, err := require.Hub(csConfig, nil, nil)
|
2023-11-24 14:57:32 +00:00
|
|
|
if err != nil {
|
2022-08-18 09:54:01 +00:00
|
|
|
log.Warn("Could not init hub, running on LAPI ? Hub related information will not be collected")
|
|
|
|
skipHub = true
|
|
|
|
infos[SUPPORT_PARSERS_PATH] = []byte(err.Error())
|
|
|
|
infos[SUPPORT_SCENARIOS_PATH] = []byte(err.Error())
|
|
|
|
infos[SUPPORT_POSTOVERFLOWS_PATH] = []byte(err.Error())
|
2023-12-07 15:20:13 +00:00
|
|
|
infos[SUPPORT_CONTEXTS_PATH] = []byte(err.Error())
|
2022-08-18 09:54:01 +00:00
|
|
|
infos[SUPPORT_COLLECTIONS_PATH] = []byte(err.Error())
|
|
|
|
}
|
|
|
|
|
|
|
|
if csConfig.API.Client == nil || csConfig.API.Client.Credentials == nil {
|
|
|
|
log.Warn("no agent credentials found, skipping LAPI connectivity check")
|
|
|
|
if _, ok := infos[SUPPORT_LAPI_STATUS_PATH]; ok {
|
|
|
|
infos[SUPPORT_LAPI_STATUS_PATH] = append(infos[SUPPORT_LAPI_STATUS_PATH], []byte("\nNo LAPI credentials found")...)
|
|
|
|
}
|
|
|
|
skipLAPI = true
|
|
|
|
}
|
|
|
|
|
2022-12-20 15:11:51 +00:00
|
|
|
if csConfig.API.Server == nil || csConfig.API.Server.OnlineClient == nil || csConfig.API.Server.OnlineClient.Credentials == nil {
|
2022-08-18 09:54:01 +00:00
|
|
|
log.Warn("no CAPI credentials found, skipping CAPI connectivity check")
|
|
|
|
skipCAPI = true
|
|
|
|
}
|
|
|
|
|
|
|
|
infos[SUPPORT_METRICS_HUMAN_PATH], infos[SUPPORT_METRICS_PROMETHEUS_PATH], err = collectMetrics()
|
|
|
|
if err != nil {
|
|
|
|
log.Warnf("could not collect prometheus metrics information: %s", err)
|
|
|
|
infos[SUPPORT_METRICS_HUMAN_PATH] = []byte(err.Error())
|
|
|
|
infos[SUPPORT_METRICS_PROMETHEUS_PATH] = []byte(err.Error())
|
|
|
|
}
|
|
|
|
|
|
|
|
infos[SUPPORT_OS_INFO_PATH], err = collectOSInfo()
|
|
|
|
if err != nil {
|
|
|
|
log.Warnf("could not collect OS information: %s", err)
|
|
|
|
infos[SUPPORT_OS_INFO_PATH] = []byte(err.Error())
|
|
|
|
}
|
|
|
|
|
|
|
|
infos[SUPPORT_CROWDSEC_CONFIG_PATH] = collectCrowdsecConfig()
|
|
|
|
|
|
|
|
if !skipHub {
|
2023-11-24 14:57:32 +00:00
|
|
|
infos[SUPPORT_PARSERS_PATH] = collectHubItems(hub, cwhub.PARSERS)
|
|
|
|
infos[SUPPORT_SCENARIOS_PATH] = collectHubItems(hub, cwhub.SCENARIOS)
|
|
|
|
infos[SUPPORT_POSTOVERFLOWS_PATH] = collectHubItems(hub, cwhub.POSTOVERFLOWS)
|
2023-12-07 15:20:13 +00:00
|
|
|
infos[SUPPORT_CONTEXTS_PATH] = collectHubItems(hub, cwhub.POSTOVERFLOWS)
|
2023-11-24 14:57:32 +00:00
|
|
|
infos[SUPPORT_COLLECTIONS_PATH] = collectHubItems(hub, cwhub.COLLECTIONS)
|
2022-08-18 09:54:01 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
if !skipDB {
|
|
|
|
infos[SUPPORT_BOUNCERS_PATH], err = collectBouncers(dbClient)
|
|
|
|
if err != nil {
|
|
|
|
log.Warnf("could not collect bouncers information: %s", err)
|
|
|
|
infos[SUPPORT_BOUNCERS_PATH] = []byte(err.Error())
|
|
|
|
}
|
|
|
|
|
|
|
|
infos[SUPPORT_AGENTS_PATH], err = collectAgents(dbClient)
|
|
|
|
if err != nil {
|
|
|
|
log.Warnf("could not collect agents information: %s", err)
|
|
|
|
infos[SUPPORT_AGENTS_PATH] = []byte(err.Error())
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
if !skipCAPI {
|
|
|
|
log.Info("Collecting CAPI status")
|
|
|
|
infos[SUPPORT_CAPI_STATUS_PATH] = collectAPIStatus(csConfig.API.Server.OnlineClient.Credentials.Login,
|
|
|
|
csConfig.API.Server.OnlineClient.Credentials.Password,
|
|
|
|
csConfig.API.Server.OnlineClient.Credentials.URL,
|
2023-11-24 14:57:32 +00:00
|
|
|
CAPIURLPrefix,
|
|
|
|
hub)
|
2022-08-18 09:54:01 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
if !skipLAPI {
|
|
|
|
log.Info("Collection LAPI status")
|
|
|
|
infos[SUPPORT_LAPI_STATUS_PATH] = collectAPIStatus(csConfig.API.Client.Credentials.Login,
|
|
|
|
csConfig.API.Client.Credentials.Password,
|
|
|
|
csConfig.API.Client.Credentials.URL,
|
2023-11-24 14:57:32 +00:00
|
|
|
LAPIURLPrefix,
|
|
|
|
hub)
|
2022-08-18 09:54:01 +00:00
|
|
|
infos[SUPPORT_CROWDSEC_PROFILE_PATH] = collectCrowdsecProfile()
|
|
|
|
}
|
|
|
|
|
|
|
|
if !skipAgent {
|
|
|
|
acquis := collectAcquisitionConfig()
|
|
|
|
|
|
|
|
for filename, content := range acquis {
|
|
|
|
fname := strings.ReplaceAll(filename, string(filepath.Separator), "___")
|
|
|
|
infos[SUPPORT_ACQUISITION_CONFIG_BASE_PATH+fname] = content
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2024-04-22 21:54:51 +00:00
|
|
|
crash, err := collectCrash()
|
|
|
|
if err != nil {
|
|
|
|
log.Errorf("could not collect crash dumps: %s", err)
|
|
|
|
}
|
|
|
|
|
|
|
|
for _, filename := range crash {
|
|
|
|
content, err := os.ReadFile(filename)
|
|
|
|
if err != nil {
|
|
|
|
log.Errorf("could not read crash dump %s: %s", filename, err)
|
|
|
|
}
|
|
|
|
|
|
|
|
infos[SUPPORT_CRASH_PATH+filepath.Base(filename)] = content
|
|
|
|
}
|
|
|
|
|
2022-08-18 09:54:01 +00:00
|
|
|
w := bytes.NewBuffer(nil)
|
|
|
|
zipWriter := zip.NewWriter(w)
|
|
|
|
|
|
|
|
for filename, data := range infos {
|
2024-04-22 21:54:51 +00:00
|
|
|
header := &zip.FileHeader{
|
|
|
|
Name: filename,
|
|
|
|
Method: zip.Deflate,
|
|
|
|
// TODO: retain mtime where possible (esp. trace)
|
|
|
|
Modified: time.Now(),
|
|
|
|
}
|
|
|
|
fw, err := zipWriter.CreateHeader(header)
|
2022-08-18 09:54:01 +00:00
|
|
|
if err != nil {
|
|
|
|
log.Errorf("Could not add zip entry for %s: %s", filename, err)
|
|
|
|
continue
|
|
|
|
}
|
2023-06-08 13:08:51 +00:00
|
|
|
fw.Write([]byte(stripAnsiString(string(data))))
|
2022-08-18 09:54:01 +00:00
|
|
|
}
|
2022-12-20 15:11:51 +00:00
|
|
|
|
2022-08-18 09:54:01 +00:00
|
|
|
err = zipWriter.Close()
|
|
|
|
if err != nil {
|
2024-04-26 14:56:15 +00:00
|
|
|
return fmt.Errorf("could not finalize zip file: %s", err)
|
2022-08-18 09:54:01 +00:00
|
|
|
}
|
2022-12-20 15:11:51 +00:00
|
|
|
|
2024-04-26 14:56:15 +00:00
|
|
|
if outFile == "-" {
|
|
|
|
_, err = os.Stdout.Write(w.Bytes())
|
|
|
|
return err
|
|
|
|
}
|
2023-12-08 09:51:15 +00:00
|
|
|
err = os.WriteFile(outFile, w.Bytes(), 0o600)
|
2022-08-18 09:54:01 +00:00
|
|
|
if err != nil {
|
2024-04-26 14:56:15 +00:00
|
|
|
return fmt.Errorf("could not write zip file to %s: %s", outFile, err)
|
2022-08-18 09:54:01 +00:00
|
|
|
}
|
|
|
|
log.Infof("Written zip file to %s", outFile)
|
2024-04-26 14:56:15 +00:00
|
|
|
return nil
|
2022-08-18 09:54:01 +00:00
|
|
|
},
|
|
|
|
}
|
|
|
|
|
2023-12-07 13:36:35 +00:00
|
|
|
cmd.Flags().StringVarP(&outFile, "outFile", "f", "", "File to dump the information to")
|
|
|
|
|
|
|
|
return cmd
|
2022-08-18 09:54:01 +00:00
|
|
|
}
|