make ParserIndex(), DownloadIndex() private methods (#2579)
* unnecessary pointer type * ParseIndex() as hub method, don't collect missing items since they are never used * don't export hub.parseIndex(), hub.downloadIndex()
This commit is contained in:
parent
41d19de092
commit
bfd94ceda7
9 changed files with 49 additions and 57 deletions
|
@ -15,7 +15,7 @@ import (
|
|||
)
|
||||
|
||||
type DataSet struct {
|
||||
Data []*types.DataSource `yaml:"data,omitempty"`
|
||||
Data []types.DataSource `yaml:"data,omitempty"`
|
||||
}
|
||||
|
||||
func downloadFile(url string, destPath string) error {
|
||||
|
@ -59,7 +59,7 @@ func downloadFile(url string, destPath string) error {
|
|||
return nil
|
||||
}
|
||||
|
||||
func GetData(data []*types.DataSource, dataDir string) error {
|
||||
func GetData(data []types.DataSource, dataDir string) error {
|
||||
for _, dataS := range data {
|
||||
destPath := filepath.Join(dataDir, dataS.DestPath)
|
||||
log.Infof("downloading data '%s' in '%s'", dataS.SourceURL, destPath)
|
||||
|
|
|
@ -123,6 +123,7 @@ func (h *Hub) DisableItem(target *Item, purge bool, force bool) error {
|
|||
|
||||
// disable sub-items if any - it's a collection
|
||||
for _, sub := range target.SubItems() {
|
||||
// XXX: we do this already when syncing, do we really need to do consistency checks here and there?
|
||||
val, ok := h.Items[sub.Type][sub.Name]
|
||||
if !ok {
|
||||
log.Errorf("Referred %s %s in collection %s doesn't exist.", sub.Type, sub.Name, target.Name)
|
||||
|
|
|
@ -7,6 +7,6 @@ import (
|
|||
var (
|
||||
// ErrNilRemoteHub is returned when the remote hub configuration is not provided to the NewHub constructor.
|
||||
// All attempts to download index or items will return this error.
|
||||
ErrMissingReference = errors.New("Reference(s) missing in collection")
|
||||
ErrNilRemoteHub = errors.New("remote hub configuration is not provided. Please report this issue to the developers")
|
||||
ErrIndexNotFound = errors.New("index not found")
|
||||
)
|
||||
|
|
|
@ -2,7 +2,6 @@ package cwhub
|
|||
|
||||
import (
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"fmt"
|
||||
"os"
|
||||
"strings"
|
||||
|
@ -20,10 +19,7 @@ type Hub struct {
|
|||
skippedTainted int
|
||||
}
|
||||
|
||||
var (
|
||||
theHub *Hub
|
||||
ErrIndexNotFound = fmt.Errorf("index not found")
|
||||
)
|
||||
var theHub *Hub
|
||||
|
||||
// GetHub returns the hub singleton
|
||||
// it returns an error if it's not initialized to avoid nil dereference
|
||||
|
@ -44,59 +40,47 @@ func NewHub(local *csconfig.LocalHubCfg, remote *RemoteHubCfg, downloadIndex boo
|
|||
}
|
||||
|
||||
if downloadIndex {
|
||||
if err := remote.DownloadIndex(local.HubIndexFile); err != nil {
|
||||
if err := remote.downloadIndex(local.HubIndexFile); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
||||
log.Debugf("loading hub idx %s", local.HubIndexFile)
|
||||
|
||||
bidx, err := os.ReadFile(local.HubIndexFile)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("unable to read index file: %w", err)
|
||||
}
|
||||
|
||||
ret, err := ParseIndex(bidx)
|
||||
if err != nil {
|
||||
if !errors.Is(err, ErrMissingReference) {
|
||||
return nil, fmt.Errorf("failed to load index: %w", err)
|
||||
}
|
||||
|
||||
// XXX: why the error check if we bail out anyway?
|
||||
return nil, err
|
||||
}
|
||||
|
||||
theHub = &Hub{
|
||||
Items: ret,
|
||||
local: local,
|
||||
remote: remote,
|
||||
}
|
||||
|
||||
if _, err = theHub.LocalSync(); err != nil {
|
||||
if err := theHub.parseIndex(); err != nil {
|
||||
return nil, fmt.Errorf("failed to load index: %w", err)
|
||||
}
|
||||
|
||||
if _, err := theHub.LocalSync(); err != nil {
|
||||
return nil, fmt.Errorf("failed to sync hub index: %w", err)
|
||||
}
|
||||
|
||||
return theHub, nil
|
||||
}
|
||||
|
||||
// ParseIndex takes the content of an index file and returns the map of associated parsers/scenarios/collections
|
||||
func ParseIndex(buff []byte) (HubItems, error) {
|
||||
var (
|
||||
RawIndex HubItems
|
||||
missingItems []string
|
||||
)
|
||||
// parseIndex takes the content of an index file and fills the map of associated parsers/scenarios/collections
|
||||
func (h *Hub) parseIndex() error {
|
||||
bidx, err := os.ReadFile(h.local.HubIndexFile)
|
||||
if err != nil {
|
||||
return fmt.Errorf("unable to read index file: %w", err)
|
||||
}
|
||||
|
||||
if err := json.Unmarshal(buff, &RawIndex); err != nil {
|
||||
return nil, fmt.Errorf("failed to unmarshal index: %w", err)
|
||||
if err := json.Unmarshal(bidx, &h.Items); err != nil {
|
||||
return fmt.Errorf("failed to unmarshal index: %w", err)
|
||||
}
|
||||
|
||||
log.Debugf("%d item types in hub index", len(ItemTypes))
|
||||
|
||||
// Iterate over the different types to complete the struct
|
||||
for _, itemType := range ItemTypes {
|
||||
log.Tracef("%s: %d items", itemType, len(RawIndex[itemType]))
|
||||
log.Tracef("%s: %d items", itemType, len(h.Items[itemType]))
|
||||
|
||||
for name, item := range RawIndex[itemType] {
|
||||
for name, item := range h.Items[itemType] {
|
||||
item.Name = name
|
||||
|
||||
// if the item has no (redundant) author, take it from the json key
|
||||
|
@ -107,24 +91,19 @@ func ParseIndex(buff []byte) (HubItems, error) {
|
|||
item.Type = itemType
|
||||
x := strings.Split(item.RemotePath, "/")
|
||||
item.FileName = x[len(x)-1]
|
||||
RawIndex[itemType][name] = item
|
||||
h.Items[itemType][name] = item
|
||||
|
||||
// if it's a collection, check its sub-items are present
|
||||
// XXX should be done later
|
||||
// XXX should be done later, maybe report all missing at once?
|
||||
for _, sub := range item.SubItems() {
|
||||
if _, ok := RawIndex[sub.Type][sub.Name]; !ok {
|
||||
if _, ok := h.Items[sub.Type][sub.Name]; !ok {
|
||||
log.Errorf("Referred %s %s in collection %s doesn't exist.", sub.Type, sub.Name, item.Name)
|
||||
missingItems = append(missingItems, sub.Name)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if len(missingItems) > 0 {
|
||||
return RawIndex, fmt.Errorf("%q: %w", missingItems, ErrMissingReference)
|
||||
}
|
||||
|
||||
return RawIndex, nil
|
||||
return nil
|
||||
}
|
||||
|
||||
// ItemStats returns total counts of the hub items
|
||||
|
|
|
@ -45,7 +45,7 @@ func TestDownloadIndex(t *testing.T) {
|
|||
IndexPath: "",
|
||||
}
|
||||
|
||||
err = hub.remote.DownloadIndex(tmpIndex.Name())
|
||||
err = hub.remote.downloadIndex(tmpIndex.Name())
|
||||
cstest.RequireErrorContains(t, err, "failed to build hub index request: invalid URL template 'x'")
|
||||
|
||||
// bad domain
|
||||
|
@ -57,7 +57,7 @@ func TestDownloadIndex(t *testing.T) {
|
|||
IndexPath: ".index.json",
|
||||
}
|
||||
|
||||
err = hub.remote.DownloadIndex(tmpIndex.Name())
|
||||
err = hub.remote.downloadIndex(tmpIndex.Name())
|
||||
require.NoError(t, err)
|
||||
// XXX: this is not failing
|
||||
// cstest.RequireErrorContains(t, err, "failed http request for hub index: Get")
|
||||
|
@ -71,6 +71,6 @@ func TestDownloadIndex(t *testing.T) {
|
|||
IndexPath: ".index.json",
|
||||
}
|
||||
|
||||
err = hub.remote.DownloadIndex("/does/not/exist/index.json")
|
||||
err = hub.remote.downloadIndex("/does/not/exist/index.json")
|
||||
cstest.RequireErrorContains(t, err, "while opening hub index file: open /does/not/exist/index.json:")
|
||||
}
|
||||
|
|
|
@ -234,6 +234,7 @@ func (h *Hub) AddItem(item Item) error {
|
|||
}
|
||||
}
|
||||
|
||||
// XXX: can this happen?
|
||||
return fmt.Errorf("ItemType %s is unknown", item.Type)
|
||||
}
|
||||
|
||||
|
|
|
@ -29,8 +29,8 @@ func (r *RemoteHubCfg) urlTo(remotePath string) (string, error) {
|
|||
return fmt.Sprintf(r.URLTemplate, r.Branch, remotePath), nil
|
||||
}
|
||||
|
||||
// DownloadIndex downloads the latest version of the index
|
||||
func (r *RemoteHubCfg) DownloadIndex(localPath string) error {
|
||||
// downloadIndex downloads the latest version of the index
|
||||
func (r *RemoteHubCfg) downloadIndex(localPath string) error {
|
||||
if r == nil {
|
||||
return ErrNilRemoteHub
|
||||
}
|
||||
|
|
|
@ -6,6 +6,7 @@ import (
|
|||
"os/exec"
|
||||
"path/filepath"
|
||||
|
||||
"github.com/crowdsecurity/crowdsec/pkg/csconfig"
|
||||
"github.com/crowdsecurity/crowdsec/pkg/cwhub"
|
||||
)
|
||||
|
||||
|
@ -56,15 +57,17 @@ func NewHubTest(hubPath string, crowdsecPath string, cscliPath string) (HubTest,
|
|||
}
|
||||
|
||||
hubIndexFile := filepath.Join(hubPath, ".index.json")
|
||||
bidx, err := os.ReadFile(hubIndexFile)
|
||||
if err != nil {
|
||||
return HubTest{}, fmt.Errorf("unable to read index file: %s", err)
|
||||
|
||||
local := &csconfig.LocalHubCfg{
|
||||
HubDir: hubPath,
|
||||
HubIndexFile: hubIndexFile,
|
||||
InstallDir: HubTestPath,
|
||||
InstallDataDir: HubTestPath,
|
||||
}
|
||||
|
||||
// load hub index
|
||||
hubIndex, err := cwhub.ParseIndex(bidx)
|
||||
hub, err := cwhub.NewHub(local, nil, false)
|
||||
if err != nil {
|
||||
return HubTest{}, fmt.Errorf("unable to load hub index file: %s", err)
|
||||
return HubTest{}, fmt.Errorf("unable to load hub: %s", err)
|
||||
}
|
||||
|
||||
templateConfigFilePath := filepath.Join(HubTestPath, templateConfigFile)
|
||||
|
@ -80,7 +83,7 @@ func NewHubTest(hubPath string, crowdsecPath string, cscliPath string) (HubTest,
|
|||
TemplateConfigPath: templateConfigFilePath,
|
||||
TemplateProfilePath: templateProfilePath,
|
||||
TemplateSimulationPath: templateSimulationPath,
|
||||
HubIndex: &cwhub.Hub{Items: hubIndex},
|
||||
HubIndex: hub,
|
||||
}, nil
|
||||
}
|
||||
|
||||
|
|
|
@ -62,6 +62,14 @@ teardown() {
|
|||
assert_output --partial 'crowdsecurity/linux'
|
||||
}
|
||||
|
||||
@test "missing reference in hub index" {
|
||||
new_hub=$(jq <"$HUB_DIR/.index.json" 'del(.parsers."crowdsecurity/smb-logs") | del (.scenarios."crowdsecurity/mysql-bf")')
|
||||
echo "$new_hub" >"$HUB_DIR/.index.json"
|
||||
rune -0 cscli hub list --error
|
||||
assert_stderr --partial "Referred parsers crowdsecurity/smb-logs in collection crowdsecurity/smb doesn't exist."
|
||||
assert_stderr --partial "Referred scenarios crowdsecurity/mysql-bf in collection crowdsecurity/mysql doesn't exist."
|
||||
}
|
||||
|
||||
@test "cscli hub update" {
|
||||
#XXX: todo
|
||||
:
|
||||
|
|
Loading…
Add table
Reference in a new issue