command "cscli metrics show bouncers" (#3126)

* cscli metrics show bouncers

* db metrics: increase payload size

* func tests
This commit is contained in:
mmetc 2024-07-15 09:55:52 +02:00 committed by GitHub
parent 06720538f5
commit f130ce677d
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
27 changed files with 894 additions and 105 deletions

View file

@ -1,4 +1,4 @@
package metrics
package climetrics
import (
"encoding/json"
@ -6,7 +6,9 @@ import (
"github.com/fatih/color"
"github.com/spf13/cobra"
"gopkg.in/yaml.v3"
"github.com/jedib0t/go-pretty/v6/table"
"github.com/jedib0t/go-pretty/v6/text"
"github.com/crowdsecurity/go-cs-lib/maptools"
@ -32,17 +34,36 @@ func (cli *cliMetrics) list() error {
})
}
switch cli.cfg().Cscli.Output {
outputFormat := cli.cfg().Cscli.Output
switch outputFormat {
case "human":
t := cstable.New(color.Output, cli.cfg().Cscli.Color)
t.SetRowLines(true)
t.SetHeaders("Type", "Title", "Description")
out := color.Output
t := cstable.New(out, cli.cfg().Cscli.Color).Writer
t.AppendHeader(table.Row{"Type", "Title", "Description"})
t.SetColumnConfigs([]table.ColumnConfig{
{
Name: "Type",
AlignHeader: text.AlignCenter,
},
{
Name: "Title",
AlignHeader: text.AlignCenter,
},
{
Name: "Description",
AlignHeader: text.AlignCenter,
WidthMax: 60,
WidthMaxEnforcer: text.WrapSoft,
},
})
t.Style().Options.SeparateRows = true
for _, metric := range allMetrics {
t.AddRow(metric.Type, metric.Title, metric.Description)
t.AppendRow(table.Row{metric.Type, metric.Title, metric.Description})
}
t.Render()
fmt.Fprintln(out, t.Render())
case "json":
x, err := json.MarshalIndent(allMetrics, "", " ")
if err != nil {
@ -50,13 +71,8 @@ func (cli *cliMetrics) list() error {
}
fmt.Println(string(x))
case "raw":
x, err := yaml.Marshal(allMetrics)
if err != nil {
return fmt.Errorf("failed to marshal metric types: %w", err)
}
fmt.Println(string(x))
default:
return fmt.Errorf("output format '%s' not supported for this command", outputFormat)
}
return nil

View file

@ -1,4 +1,4 @@
package metrics
package climetrics
import (
"github.com/spf13/cobra"
@ -12,7 +12,7 @@ type cliMetrics struct {
cfg configGetter
}
func NewCLI(cfg configGetter) *cliMetrics {
func New(cfg configGetter) *cliMetrics {
return &cliMetrics{
cfg: cfg,
}
@ -38,8 +38,8 @@ cscli metrics --url http://lapi.local:6060/metrics show acquisition parsers
cscli metrics list`,
Args: cobra.ExactArgs(0),
DisableAutoGenTag: true,
RunE: func(_ *cobra.Command, _ []string) error {
return cli.show(nil, url, noUnit)
RunE: func(cmd *cobra.Command, _ []string) error {
return cli.show(cmd.Context(), nil, url, noUnit)
},
}

View file

@ -1,8 +1,9 @@
package metrics
package climetrics
import (
"fmt"
"math"
"strconv"
)
type unit struct {
@ -20,11 +21,15 @@ var ranges = []unit{
{value: 1, symbol: ""},
}
func formatNumber(num int) string {
goodUnit := unit{}
func formatNumber(num int64, withUnit bool) string {
if !withUnit {
return strconv.FormatInt(num, 10)
}
goodUnit := ranges[len(ranges)-1]
for _, u := range ranges {
if int64(num) >= u.value {
if num >= u.value {
goodUnit = u
break
}

View file

@ -1,11 +1,16 @@
package metrics
package climetrics
import (
"context"
"errors"
"fmt"
log "github.com/sirupsen/logrus"
"github.com/fatih/color"
"github.com/spf13/cobra"
"github.com/crowdsecurity/crowdsec/cmd/crowdsec-cli/require"
)
var (
@ -13,7 +18,7 @@ var (
ErrMetricsDisabled = errors.New("prometheus is not enabled, can't show metrics")
)
func (cli *cliMetrics) show(sections []string, url string, noUnit bool) error {
func (cli *cliMetrics) show(ctx context.Context, sections []string, url string, noUnit bool) error {
cfg := cli.cfg()
if url != "" {
@ -30,8 +35,13 @@ func (cli *cliMetrics) show(sections []string, url string, noUnit bool) error {
ms := NewMetricStore()
if err := ms.Fetch(cfg.Cscli.PrometheusUrl); err != nil {
return err
db, err := require.DBClient(ctx, cfg.DbConfig)
if err != nil {
log.Warnf("unable to open database: %s", err)
}
if err := ms.Fetch(ctx, cfg.Cscli.PrometheusUrl, db); err != nil {
log.Warn(err)
}
// any section that we don't have in the store is an error
@ -90,9 +100,9 @@ cscli metrics list; cscli metrics list -o json
cscli metrics show acquisition parsers scenarios stash -o json`,
// Positional args are optional
DisableAutoGenTag: true,
RunE: func(_ *cobra.Command, args []string) error {
RunE: func(cmd *cobra.Command, args []string) error {
args = expandAlias(args)
return cli.show(args, url, noUnit)
return cli.show(cmd.Context(), args, url, noUnit)
},
}

View file

@ -1,4 +1,4 @@
package metrics
package climetrics
import (
"io"

View file

@ -1,4 +1,4 @@
package metrics
package climetrics
import (
"io"

View file

@ -1,4 +1,4 @@
package metrics
package climetrics
import (
"io"

View file

@ -1,4 +1,4 @@
package metrics
package climetrics
import (
"fmt"

View file

@ -0,0 +1,340 @@
package climetrics
import (
"context"
"encoding/json"
"fmt"
"io"
"time"
"github.com/jedib0t/go-pretty/v6/table"
"github.com/jedib0t/go-pretty/v6/text"
log "github.com/sirupsen/logrus"
"github.com/crowdsecurity/go-cs-lib/maptools"
"github.com/crowdsecurity/crowdsec/cmd/crowdsec-cli/cstable"
"github.com/crowdsecurity/crowdsec/pkg/database"
"github.com/crowdsecurity/crowdsec/pkg/database/ent/metric"
"github.com/crowdsecurity/crowdsec/pkg/models"
)
// un-aggregated data, de-normalized.
type bouncerMetricItem struct {
bouncerName string
ipType string
origin string
name string
unit string
value float64
}
type statBouncer struct {
// oldest collection timestamp for each bouncer
oldestTS map[string]*time.Time
// we keep de-normalized metrics so we can iterate
// over them multiple times and keep the aggregation code simple
rawMetrics []bouncerMetricItem
aggregated map[string]map[string]map[string]map[string]int64
aggregatedAllOrigin map[string]map[string]map[string]int64
}
var knownPlurals = map[string]string{
"byte": "bytes",
"packet": "packets",
"ip": "IPs",
}
func (s *statBouncer) MarshalJSON() ([]byte, error) {
return json.Marshal(s.aggregated)
}
func (s *statBouncer) Description() (string, string) {
return "Bouncer Metrics",
`Network traffic blocked by bouncers.`
}
func warnOnce(warningsLogged map[string]bool, msg string) {
if _, ok := warningsLogged[msg]; !ok {
log.Warningf(msg)
warningsLogged[msg] = true
}
}
func (s *statBouncer) Fetch(ctx context.Context, db *database.Client) error {
if db == nil {
return nil
}
// query all bouncer metrics that have not been flushed
metrics, err := db.Ent.Metric.Query().
Where(
metric.GeneratedTypeEQ(metric.GeneratedTypeRC),
).All(ctx)
if err != nil {
return fmt.Errorf("unable to fetch metrics: %w", err)
}
s.oldestTS = make(map[string]*time.Time)
// don't spam the user with the same warnings
warningsLogged := make(map[string]bool)
for _, met := range metrics {
bouncerName := met.GeneratedBy
collectedAt := met.CollectedAt
if s.oldestTS[bouncerName] == nil || collectedAt.Before(*s.oldestTS[bouncerName]) {
s.oldestTS[bouncerName] = &collectedAt
}
type bouncerMetrics struct {
Metrics []models.DetailedMetrics `json:"metrics"`
}
payload := bouncerMetrics{}
err := json.Unmarshal([]byte(met.Payload), &payload)
if err != nil {
log.Warningf("while parsing metrics for %s: %s", bouncerName, err)
continue
}
for _, m := range payload.Metrics {
for _, item := range m.Items {
labels := item.Labels
// these are mandatory but we got pointers, so...
valid := true
if item.Name == nil {
warnOnce(warningsLogged, "missing 'name' field in metrics reported by "+bouncerName)
// no continue - keep checking the rest
valid = false
}
if item.Unit == nil {
warnOnce(warningsLogged, "missing 'unit' field in metrics reported by "+bouncerName)
valid = false
}
if item.Value == nil {
warnOnce(warningsLogged, "missing 'value' field in metrics reported by "+bouncerName)
valid = false
}
if !valid {
continue
}
name := *item.Name
unit := *item.Unit
value := *item.Value
rawMetric := bouncerMetricItem{
bouncerName: bouncerName,
ipType: labels["ip_type"],
origin: labels["origin"],
name: name,
unit: unit,
value: value,
}
s.rawMetrics = append(s.rawMetrics, rawMetric)
}
}
}
s.aggregate()
return nil
}
func (s *statBouncer) aggregate() {
// [bouncer][origin][name][unit]value
if s.aggregated == nil {
s.aggregated = make(map[string]map[string]map[string]map[string]int64)
}
if s.aggregatedAllOrigin == nil {
s.aggregatedAllOrigin = make(map[string]map[string]map[string]int64)
}
for _, raw := range s.rawMetrics {
if _, ok := s.aggregated[raw.bouncerName]; !ok {
s.aggregated[raw.bouncerName] = make(map[string]map[string]map[string]int64)
}
if _, ok := s.aggregated[raw.bouncerName][raw.origin]; !ok {
s.aggregated[raw.bouncerName][raw.origin] = make(map[string]map[string]int64)
}
if _, ok := s.aggregated[raw.bouncerName][raw.origin][raw.name]; !ok {
s.aggregated[raw.bouncerName][raw.origin][raw.name] = make(map[string]int64)
}
if _, ok := s.aggregated[raw.bouncerName][raw.origin][raw.name][raw.unit]; !ok {
s.aggregated[raw.bouncerName][raw.origin][raw.name][raw.unit] = 0
}
s.aggregated[raw.bouncerName][raw.origin][raw.name][raw.unit] += int64(raw.value)
if _, ok := s.aggregatedAllOrigin[raw.bouncerName]; !ok {
s.aggregatedAllOrigin[raw.bouncerName] = make(map[string]map[string]int64)
}
if _, ok := s.aggregatedAllOrigin[raw.bouncerName][raw.name]; !ok {
s.aggregatedAllOrigin[raw.bouncerName][raw.name] = make(map[string]int64)
}
if _, ok := s.aggregatedAllOrigin[raw.bouncerName][raw.name][raw.unit]; !ok {
s.aggregatedAllOrigin[raw.bouncerName][raw.name][raw.unit] = 0
}
s.aggregatedAllOrigin[raw.bouncerName][raw.name][raw.unit] += int64(raw.value)
}
}
// bouncerTable displays a table of metrics for a single bouncer
func (s *statBouncer) bouncerTable(out io.Writer, bouncerName string, wantColor string, noUnit bool) {
columns := make(map[string]map[string]bool)
for _, item := range s.rawMetrics {
if item.bouncerName != bouncerName {
continue
}
// build a map of the metric names and units, to display dynamic columns
if _, ok := columns[item.name]; !ok {
columns[item.name] = make(map[string]bool)
}
columns[item.name][item.unit] = true
}
// no metrics for this bouncer, skip. how did we get here ?
// anyway we can't honor the "showEmpty" flag in this case,
// we don't heven have the table headers
if len(columns) == 0 {
return
}
t := cstable.New(out, wantColor).Writer
header1 := table.Row{"Origin"}
header2 := table.Row{""}
colNum := 1
colCfg := []table.ColumnConfig{{
Number:colNum,
AlignHeader:
text.AlignLeft,
Align: text.AlignLeft,
AlignFooter: text.AlignRight,
}}
for _, name := range maptools.SortedKeys(columns) {
for _, unit := range maptools.SortedKeys(columns[name]) {
colNum += 1
header1 = append(header1, name)
// we don't add "s" to random words
if knownPlurals[unit] != "" {
unit = knownPlurals[unit]
}
header2 = append(header2, unit)
colCfg = append(colCfg, table.ColumnConfig{
Number: colNum,
AlignHeader: text.AlignCenter,
Align: text.AlignRight,
AlignFooter: text.AlignRight},
)
}
}
t.AppendHeader(header1, table.RowConfig{AutoMerge: true})
t.AppendHeader(header2)
t.SetColumnConfigs(colCfg)
numRows := 0
// sort all the ranges for stable output
for _, origin := range maptools.SortedKeys(s.aggregated[bouncerName]) {
if origin == "" {
// if the metric has no origin (i.e. processed bytes/packets)
// we don't display it in the table body but it still gets aggreagted
// in the footer's totals
continue
}
metrics := s.aggregated[bouncerName][origin]
// some users don't know what capi is
if origin == "CAPI" {
origin += " (community blocklist)"
}
row := table.Row{origin}
for _, name := range maptools.SortedKeys(columns) {
for _, unit := range maptools.SortedKeys(columns[name]) {
valStr := "-"
val, ok := metrics[name][unit]
if ok {
valStr = formatNumber(val, !noUnit)
}
row = append(row, valStr)
}
}
t.AppendRow(row)
numRows += 1
}
totals := s.aggregatedAllOrigin[bouncerName]
if numRows == 0 {
t.Style().Options.SeparateFooter = false
}
footer := table.Row{"Total"}
for _, name := range maptools.SortedKeys(columns) {
for _, unit := range maptools.SortedKeys(columns[name]) {
footer = append(footer, formatNumber(totals[name][unit], !noUnit))
}
}
t.AppendFooter(footer)
title, _ := s.Description()
title = fmt.Sprintf("%s (%s)", title, bouncerName)
if s.oldestTS != nil {
// if we change this to .Local() beware of tests
title = fmt.Sprintf("%s since %s", title, s.oldestTS[bouncerName].String())
}
title += ":"
// don't use SetTitle() because it draws the title inside table box
// TODO: newline position wrt other stat tables
cstable.RenderTitle(out, title)
fmt.Fprintln(out, t.Render())
}
// Table displays a table of metrics for each bouncer
func (s *statBouncer) Table(out io.Writer, wantColor string, noUnit bool, _ bool) {
bouncerNames := make(map[string]bool)
for _, item := range s.rawMetrics {
bouncerNames[item.bouncerName] = true
}
nl := false
for _, bouncerName := range maptools.SortedKeys(bouncerNames) {
if nl {
// empty line between tables
fmt.Fprintln(out)
}
s.bouncerTable(out, bouncerName, wantColor, noUnit)
nl = true
}
}

View file

@ -1,4 +1,4 @@
package metrics
package climetrics
import (
"io"

View file

@ -1,4 +1,4 @@
package metrics
package climetrics
import (
"io"

View file

@ -1,4 +1,4 @@
package metrics
package climetrics
import (
"io"

View file

@ -1,4 +1,4 @@
package metrics
package climetrics
import (
"io"

View file

@ -1,4 +1,4 @@
package metrics
package climetrics
import (
"io"

View file

@ -1,4 +1,4 @@
package metrics
package climetrics
import (
"io"

View file

@ -1,4 +1,4 @@
package metrics
package climetrics
import (
"io"

View file

@ -1,4 +1,4 @@
package metrics
package climetrics
import (
"io"

View file

@ -1,4 +1,4 @@
package metrics
package climetrics
import (
"io"

View file

@ -1,6 +1,7 @@
package metrics
package climetrics
import (
"context"
"encoding/json"
"fmt"
"io"
@ -12,10 +13,11 @@ import (
dto "github.com/prometheus/client_model/go"
"github.com/prometheus/prom2json"
log "github.com/sirupsen/logrus"
"gopkg.in/yaml.v3"
"github.com/crowdsecurity/go-cs-lib/maptools"
"github.com/crowdsecurity/go-cs-lib/trace"
"github.com/crowdsecurity/crowdsec/pkg/database"
)
type metricSection interface {
@ -28,22 +30,31 @@ type metricStore map[string]metricSection
func NewMetricStore() metricStore {
return metricStore{
"acquisition": statAcquis{},
"scenarios": statBucket{},
"parsers": statParser{},
"lapi": statLapi{},
"lapi-machine": statLapiMachine{},
"lapi-bouncer": statLapiBouncer{},
"lapi-decisions": statLapiDecision{},
"decisions": statDecision{},
"alerts": statAlert{},
"stash": statStash{},
"bouncers": &statBouncer{},
"appsec-engine": statAppsecEngine{},
"appsec-rule": statAppsecRule{},
"decisions": statDecision{},
"lapi": statLapi{},
"lapi-bouncer": statLapiBouncer{},
"lapi-decisions": statLapiDecision{},
"lapi-machine": statLapiMachine{},
"parsers": statParser{},
"scenarios": statBucket{},
"stash": statStash{},
"whitelists": statWhitelist{},
}
}
func (ms metricStore) Fetch(url string) error {
func (ms metricStore) Fetch(ctx context.Context, url string, db *database.Client) error {
if err := ms["bouncers"].(*statBouncer).Fetch(ctx, db); err != nil {
return err
}
return ms.fetchPrometheusMetrics(url)
}
func (ms metricStore) fetchPrometheusMetrics(url string) error {
mfChan := make(chan *dto.MetricFamily, 1024)
errChan := make(chan error, 1)
@ -59,7 +70,7 @@ func (ms metricStore) Fetch(url string) error {
err := prom2json.FetchMetricFamilies(url, mfChan, transport)
if err != nil {
errChan <- fmt.Errorf("failed to fetch metrics: %w", err)
errChan <- fmt.Errorf("while fetching metrics: %w", err)
return
}
errChan <- nil
@ -75,19 +86,23 @@ func (ms metricStore) Fetch(url string) error {
}
log.Debugf("Finished reading metrics output, %d entries", len(result))
/*walk*/
ms.processPrometheusMetrics(result)
return nil
}
func (ms metricStore) processPrometheusMetrics(result []*prom2json.Family) {
mAcquis := ms["acquisition"].(statAcquis)
mParser := ms["parsers"].(statParser)
mBucket := ms["scenarios"].(statBucket)
mLapi := ms["lapi"].(statLapi)
mLapiMachine := ms["lapi-machine"].(statLapiMachine)
mLapiBouncer := ms["lapi-bouncer"].(statLapiBouncer)
mLapiDecision := ms["lapi-decisions"].(statLapiDecision)
mDecision := ms["decisions"].(statDecision)
mAlert := ms["alerts"].(statAlert)
mAppsecEngine := ms["appsec-engine"].(statAppsecEngine)
mAppsecRule := ms["appsec-rule"].(statAppsecRule)
mAlert := ms["alerts"].(statAlert)
mDecision := ms["decisions"].(statDecision)
mLapi := ms["lapi"].(statLapi)
mLapiBouncer := ms["lapi-bouncer"].(statLapiBouncer)
mLapiDecision := ms["lapi-decisions"].(statLapiDecision)
mLapiMachine := ms["lapi-machine"].(statLapiMachine)
mParser := ms["parsers"].(statParser)
mBucket := ms["scenarios"].(statBucket)
mStash := ms["stash"].(statStash)
mWhitelist := ms["whitelists"].(statWhitelist)
@ -219,11 +234,9 @@ func (ms metricStore) Fetch(url string) error {
}
}
}
return nil
}
func (ms metricStore) Format(out io.Writer, wantColor string, sections []string, formatType string, noUnit bool) error {
func (ms metricStore) Format(out io.Writer, wantColor string, sections []string, outputFormat string, noUnit bool) error {
// copy only the sections we want
want := map[string]metricSection{}
@ -239,7 +252,7 @@ func (ms metricStore) Format(out io.Writer, wantColor string, sections []string,
want[section] = ms[section]
}
switch formatType {
switch outputFormat {
case "human":
for _, section := range maptools.SortedKeys(want) {
want[section].Table(out, wantColor, noUnit, showEmpty)
@ -250,14 +263,8 @@ func (ms metricStore) Format(out io.Writer, wantColor string, sections []string,
return fmt.Errorf("failed to marshal metrics: %w", err)
}
out.Write(x)
case "raw":
x, err := yaml.Marshal(want)
if err != nil {
return fmt.Errorf("failed to marshal metrics: %w", err)
}
out.Write(x)
default:
return fmt.Errorf("unknown format type %s", formatType)
return fmt.Errorf("output format '%s' not supported for this command", outputFormat)
}
return nil

View file

@ -1,4 +1,4 @@
package metrics
package climetrics
import (
"errors"
@ -110,12 +110,7 @@ func metricsToTable(t *cstable.Table, stats map[string]map[string]int, keys []st
for _, sl := range keys {
if v, ok := astats[sl]; ok && v != 0 {
numberToShow := strconv.Itoa(v)
if !noUnit {
numberToShow = formatNumber(v)
}
row = append(row, numberToShow)
row = append(row, formatNumber(int64(v), !noUnit))
} else {
row = append(row, "-")
}

View file

@ -14,7 +14,7 @@ import (
"github.com/crowdsecurity/go-cs-lib/trace"
"github.com/crowdsecurity/crowdsec/cmd/crowdsec-cli/metrics"
"github.com/crowdsecurity/crowdsec/cmd/crowdsec-cli/climetrics"
"github.com/crowdsecurity/crowdsec/pkg/csconfig"
"github.com/crowdsecurity/crowdsec/pkg/fflag"
@ -252,7 +252,7 @@ It is meant to allow you to manage bans, parsers/scenarios/etc, api and generall
cmd.AddCommand(NewCLIVersion().NewCommand())
cmd.AddCommand(NewCLIConfig(cli.cfg).NewCommand())
cmd.AddCommand(NewCLIHub(cli.cfg).NewCommand())
cmd.AddCommand(metrics.NewCLI(cli.cfg).NewCommand())
cmd.AddCommand(climetrics.New(cli.cfg).NewCommand())
cmd.AddCommand(NewCLIDashboard(cli.cfg).NewCommand())
cmd.AddCommand(NewCLIDecisions(cli.cfg).NewCommand())
cmd.AddCommand(NewCLIAlerts(cli.cfg).NewCommand())

View file

@ -22,7 +22,7 @@ import (
"github.com/crowdsecurity/go-cs-lib/trace"
"github.com/crowdsecurity/crowdsec/cmd/crowdsec-cli/metrics"
"github.com/crowdsecurity/crowdsec/cmd/crowdsec-cli/climetrics"
"github.com/crowdsecurity/crowdsec/cmd/crowdsec-cli/require"
"github.com/crowdsecurity/crowdsec/pkg/csconfig"
"github.com/crowdsecurity/crowdsec/pkg/cwhub"
@ -78,7 +78,7 @@ func stripAnsiString(str string) string {
return reStripAnsi.ReplaceAllString(str, "")
}
func (cli *cliSupport) dumpMetrics(ctx context.Context, zw *zip.Writer) error {
func (cli *cliSupport) dumpMetrics(ctx context.Context, db *database.Client, zw *zip.Writer) error {
log.Info("Collecting prometheus metrics")
cfg := cli.cfg()
@ -89,9 +89,9 @@ func (cli *cliSupport) dumpMetrics(ctx context.Context, zw *zip.Writer) error {
humanMetrics := new(bytes.Buffer)
ms := metrics.NewMetricStore()
ms := climetrics.NewMetricStore()
if err := ms.Fetch(cfg.Cscli.PrometheusUrl); err != nil {
if err := ms.Fetch(ctx, cfg.Cscli.PrometheusUrl, db); err != nil {
return err
}
@ -493,7 +493,7 @@ func (cli *cliSupport) dump(ctx context.Context, outFile string) error {
skipCAPI = true
}
if err = cli.dumpMetrics(ctx, zipWriter); err != nil {
if err = cli.dumpMetrics(ctx, db, zipWriter); err != nil {
log.Warn(err)
}

View file

@ -255,7 +255,7 @@ var (
{Name: "generated_by", Type: field.TypeString},
{Name: "collected_at", Type: field.TypeTime},
{Name: "pushed_at", Type: field.TypeTime, Nullable: true},
{Name: "payload", Type: field.TypeString},
{Name: "payload", Type: field.TypeString, Size: 2147483647},
}
// MetricsTable holds the schema information for the "metrics" table.
MetricsTable = &schema.Table{

View file

@ -28,7 +28,7 @@ func (Metric) Fields() []ent.Field {
Nillable().
Optional().
Comment("When the metrics are sent to the console"),
field.String("payload").
field.Text("payload").
Immutable().
Comment("The actual metrics (item0)"),
}

View file

@ -23,9 +23,9 @@ teardown() {
#----------
@test "cscli metrics (crowdsec not running)" {
rune -1 cscli metrics
# crowdsec is down
assert_stderr --partial 'failed to fetch metrics: executing GET request for URL \"http://127.0.0.1:6060/metrics\" failed: Get \"http://127.0.0.1:6060/metrics\": dial tcp 127.0.0.1:6060: connect: connection refused'
rune -0 cscli metrics
# crowdsec is down, we won't get an error because some metrics come from the db instead
assert_stderr --partial 'while fetching metrics: executing GET request for URL \"http://127.0.0.1:6060/metrics\" failed: Get \"http://127.0.0.1:6060/metrics\": dial tcp 127.0.0.1:6060: connect: connection refused'
}
@test "cscli metrics (bad configuration)" {
@ -72,10 +72,6 @@ teardown() {
rune -0 jq 'keys' <(output)
assert_output --partial '"alerts",'
assert_output --partial '"parsers",'
rune -0 cscli metrics -o raw
assert_output --partial 'alerts: {}'
assert_output --partial 'parsers: {}'
}
@test "cscli metrics list" {
@ -85,10 +81,6 @@ teardown() {
rune -0 cscli metrics list -o json
rune -0 jq -c '.[] | [.type,.title]' <(output)
assert_line '["acquisition","Acquisition Metrics"]'
rune -0 cscli metrics list -o raw
assert_line "- type: acquisition"
assert_line " title: Acquisition Metrics"
}
@test "cscli metrics show" {
@ -108,8 +100,4 @@ teardown() {
rune -0 cscli metrics show lapi -o json
rune -0 jq -c '.lapi."/v1/watchers/login" | keys' <(output)
assert_json '["POST"]'
rune -0 cscli metrics show lapi -o raw
assert_line 'lapi:'
assert_line ' /v1/watchers/login:'
}

View file

@ -0,0 +1,327 @@
#!/usr/bin/env bats
# vim: ft=bats:list:ts=8:sts=4:sw=4:et:ai:si:
set -u
setup_file() {
load "../lib/setup_file.sh"
}
teardown_file() {
load "../lib/teardown_file.sh"
}
setup() {
load "../lib/setup.sh"
./instance-data load
./instance-crowdsec start
skip "require the usage_metrics endpoint on apiserver"
}
teardown() {
./instance-crowdsec stop
}
#----------
@test "cscli metrics show bouncers" {
# there are no bouncers, so no metrics yet
rune -0 cscli metrics show bouncers
refute_output
}
@test "rc usage metrics (empty payload)" {
# a registered bouncer can send metrics for the lapi and console
API_KEY=$(cscli bouncers add testbouncer -o raw)
export API_KEY
payload=$(yq -o j <<-EOT
remediation_components: []
log_processors: []
EOT
)
rune -22 curl-with-key '/v1/usage-metrics' -X POST --data "$payload"
assert_stderr --partial 'error: 400'
assert_json '{message: "Missing remediation component data"}'
}
@test "rc usage metrics (bad payload)" {
API_KEY=$(cscli bouncers add testbouncer -o raw)
export API_KEY
payload=$(yq -o j <<-EOT
remediation_components:
- version: "v1.0"
log_processors: []
EOT
)
rune -22 curl-with-key '/v1/usage-metrics' -X POST --data "$payload"
assert_stderr --partial "error: 422"
rune -0 jq -r '.message' <(output)
assert_output - <<-EOT
validation failure list:
remediation_components.0.utc_startup_timestamp in body is required
EOT
# validation, like timestamp format
payload=$(yq -o j '.remediation_components[0].utc_startup_timestamp = "2021-09-01T00:00:00Z"' <<<"$payload")
rune -22 curl-with-key '/v1/usage-metrics' -X POST --data "$payload"
assert_stderr --partial "error: 400"
assert_json '{message: "json: cannot unmarshal string into Go struct field AllMetrics.remediation_components of type int64"}'
payload=$(yq -o j '.remediation_components[0].utc_startup_timestamp = 1707399316' <<<"$payload")
rune -0 curl-with-key '/v1/usage-metrics' -X POST --data "$payload"
refute_output
}
@test "rc usage metrics (good payload)" {
API_KEY=$(cscli bouncers add testbouncer -o raw)
export API_KEY
payload=$(yq -o j <<-EOT
remediation_components:
- version: "v1.0"
utc_startup_timestamp: 1707399316
log_processors: []
EOT
)
# bouncers have feature flags too
payload=$(yq -o j '
.remediation_components[0].feature_flags = ["huey", "dewey", "louie"] |
.remediation_components[0].os = {"name": "Multics", "version": "MR12.5"}
' <<<"$payload")
rune -0 curl-with-key '/v1/usage-metrics' -X POST --data "$payload"
rune -0 cscli bouncer inspect testbouncer -o json
rune -0 yq -o j '[.os,.featureflags]' <(output)
assert_json '["Multics/MR12.5",["huey","dewey","louie"]]'
payload=$(yq -o j '
.remediation_components[0].metrics = [
{
"meta": {"utc_now_timestamp": 1707399316, "window_size_seconds":600},
"items":[
{"name": "foo", "unit": "pound", "value": 3.1415926},
{"name": "foo", "unit": "pound", "value": 2.7182818},
{"name": "foo", "unit": "dogyear", "value": 2.7182818}
]
}
]
' <<<"$payload")
rune -0 curl-with-key '/v1/usage-metrics' -X POST --data "$payload"
rune -0 cscli metrics show bouncers -o json
# aggregation is ok -- we are truncating, not rounding, because the float is mandated by swagger.
# but without labels the origin string is empty
assert_json '{bouncers:{testbouncer:{"": {"foo": {"dogyear": 2, "pound": 5}}}}}'
rune -0 cscli metrics show bouncers
assert_output - <<-EOT
Bouncer Metrics (testbouncer) since 2024-02-08 13:35:16 +0000 UTC:
+--------+-----------------+
| Origin | foo |
| | dogyear | pound |
+--------+---------+-------+
| Total | 2 | 5 |
+--------+---------+-------+
EOT
# some more realistic values, at least for the labels
# we don't use the same now_timestamp or the payload will be silently discarded
payload=$(yq -o j '
.remediation_components[0].metrics = [
{
"meta": {"utc_now_timestamp": 1707399916, "window_size_seconds":600},
"items":[
{"name": "active_decisions", "unit": "ip", "value": 51936, "labels": {"ip_type": "ipv4", "origin": "lists:firehol_voipbl"}},
{"name": "active_decisions", "unit": "ip", "value": 1, "labels": {"ip_type": "ipv6", "origin": "cscli"}},
{"name": "dropped", "unit": "byte", "value": 3800, "labels": {"ip_type": "ipv4", "origin": "CAPI"}},
{"name": "dropped", "unit": "byte", "value": 0, "labels": {"ip_type": "ipv4", "origin": "cscli"}},
{"name": "dropped", "unit": "byte", "value": 1034, "labels": {"ip_type": "ipv4", "origin": "lists:firehol_cruzit_web_attacks"}},
{"name": "dropped", "unit": "byte", "value": 3847, "labels": {"ip_type": "ipv4", "origin": "lists:firehol_voipbl"}},
{"name": "dropped", "unit": "byte", "value": 380, "labels": {"ip_type": "ipv6", "origin": "cscli"}},
{"name": "dropped", "unit": "packet", "value": 100, "labels": {"ip_type": "ipv4", "origin": "CAPI"}},
{"name": "dropped", "unit": "packet", "value": 10, "labels": {"ip_type": "ipv4", "origin": "cscli"}},
{"name": "dropped", "unit": "packet", "value": 23, "labels": {"ip_type": "ipv4", "origin": "lists:firehol_cruzit_web_attacks"}},
{"name": "dropped", "unit": "packet", "value": 58, "labels": {"ip_type": "ipv4", "origin": "lists:firehol_voipbl"}},
{"name": "dropped", "unit": "packet", "value": 0, "labels": {"ip_type": "ipv4", "origin": "lists:anotherlist"}},
{"name": "dropped", "unit": "byte", "value": 0, "labels": {"ip_type": "ipv4", "origin": "lists:anotherlist"}},
{"name": "dropped", "unit": "packet", "value": 0, "labels": {"ip_type": "ipv6", "origin": "cscli"}}
]
}
] |
.remediation_components[0].type = "crowdsec-firewall-bouncer"
' <<<"$payload")
rune -0 curl-with-key '/v1/usage-metrics' -X POST --data "$payload"
rune -0 cscli metrics show bouncers -o json
assert_json '{
"bouncers": {
"testbouncer": {
"": {
"foo": {
"dogyear": 2,
"pound": 5
}
},
"CAPI": {
"dropped": {
"byte": 3800,
"packet": 100
}
},
"cscli": {
"active_decisions": {
"ip": 1
},
"dropped": {
"byte": 380,
"packet": 10
}
},
"lists:firehol_cruzit_web_attacks": {
"dropped": {
"byte": 1034,
"packet": 23
}
},
"lists:firehol_voipbl": {
"active_decisions": {
"ip": 51936
},
"dropped": {
"byte": 3847,
"packet": 58
},
},
"lists:anotherlist": {
"dropped": {
"byte": 0,
"packet": 0
}
}
}
}
}'
rune -0 cscli metrics show bouncers
assert_output - <<-EOT
Bouncer Metrics (testbouncer) since 2024-02-08 13:35:16 +0000 UTC:
+----------------------------------+------------------+-------------------+-----------------+
| Origin | active_decisions | dropped | foo |
| | IPs | bytes | packets | dogyear | pound |
+----------------------------------+------------------+---------+---------+---------+-------+
| CAPI (community blocklist) | - | 3.80k | 100 | - | - |
| cscli | 1 | 380 | 10 | - | - |
| lists:anotherlist | - | 0 | 0 | - | - |
| lists:firehol_cruzit_web_attacks | - | 1.03k | 23 | - | - |
| lists:firehol_voipbl | 51.94k | 3.85k | 58 | - | - |
+----------------------------------+------------------+---------+---------+---------+-------+
| Total | 51.94k | 9.06k | 191 | 2 | 5 |
+----------------------------------+------------------+---------+---------+---------+-------+
EOT
# TODO: multiple item lists
}
@test "rc usage metrics (multiple bouncers)" {
# multiple bouncers have separate totals and can have different types of metrics and units -> different columns
API_KEY=$(cscli bouncers add bouncer1 -o raw)
export API_KEY
payload=$(yq -o j <<-EOT
remediation_components:
- version: "v1.0"
utc_startup_timestamp: 1707369316
metrics:
- meta:
utc_now_timestamp: 1707399316
window_size_seconds: 600
items:
- name: dropped
unit: byte
value: 1000
labels:
origin: CAPI
- name: dropped
unit: byte
value: 800
labels:
origin: lists:somelist
- name: processed
unit: byte
value: 12340
- name: processed
unit: packet
value: 100
EOT
)
rune -0 curl-with-key '/v1/usage-metrics' -X POST --data "$payload"
API_KEY=$(cscli bouncers add bouncer2 -o raw)
export API_KEY
payload=$(yq -o j <<-EOT
remediation_components:
- version: "v1.0"
utc_startup_timestamp: 1707379316
metrics:
- meta:
utc_now_timestamp: 1707389316
window_size_seconds: 600
items:
- name: dropped
unit: byte
value: 1500
labels:
origin: lists:somelist
- name: dropped
unit: byte
value: 2000
labels:
origin: CAPI
- name: dropped
unit: packet
value: 20
labels:
origin: lists:somelist
EOT
)
rune -0 curl-with-key '/v1/usage-metrics' -X POST --data "$payload"
rune -0 cscli metrics show bouncers -o json
assert_json '{bouncers:{bouncer1:{"":{processed:{byte:12340,packet:100}},CAPI:{dropped:{byte:1000}},"lists:somelist":{dropped:{byte:800}}},bouncer2:{"lists:somelist":{dropped:{byte:1500,packet:20}},CAPI:{dropped:{byte:2000}}}}}'
rune -0 cscli metrics show bouncers
assert_output - <<-EOT
Bouncer Metrics (bouncer1) since 2024-02-08 13:35:16 +0000 UTC:
+----------------------------+---------+-----------------------+
| Origin | dropped | processed |
| | bytes | bytes | packets |
+----------------------------+---------+-----------+-----------+
| CAPI (community blocklist) | 1.00k | - | - |
| lists:somelist | 800 | - | - |
+----------------------------+---------+-----------+-----------+
| Total | 1.80k | 12.34k | 100 |
+----------------------------+---------+-----------+-----------+
Bouncer Metrics (bouncer2) since 2024-02-08 10:48:36 +0000 UTC:
+----------------------------+-------------------+
| Origin | dropped |
| | bytes | packets |
+----------------------------+---------+---------+
| CAPI (community blocklist) | 2.00k | - |
| lists:somelist | 1.50k | 20 |
+----------------------------+---------+---------+
| Total | 3.50k | 20 |
+----------------------------+---------+---------+
EOT
}

View file

@ -0,0 +1,101 @@
#!/usr/bin/env bats
# vim: ft=bats:list:ts=8:sts=4:sw=4:et:ai:si:
set -u
setup_file() {
load "../lib/setup_file.sh"
}
teardown_file() {
load "../lib/teardown_file.sh"
}
setup() {
load "../lib/setup.sh"
./instance-data load
./instance-crowdsec start
skip "require the usage_metrics endpoint on apiserver"
}
teardown() {
./instance-crowdsec stop
}
#----------
@test "lp usage metrics (empty payload)" {
# a registered log processor can send metrics for the lapi and console
TOKEN=$(lp-get-token)
export TOKEN
payload=$(yq -o j <<-EOT
remediation_components: []
log_processors: []
EOT
)
rune -22 curl-with-token '/v1/usage-metrics' -X POST --data "$payload"
assert_stderr --partial 'error: 400'
assert_json '{message: "Missing log processor data"}'
}
@test "lp usage metrics (bad payload)" {
TOKEN=$(lp-get-token)
export TOKEN
payload=$(yq -o j <<-EOT
remediation_components: []
log_processors:
- version: "v1.0"
EOT
)
rune -22 curl-with-token '/v1/usage-metrics' -X POST --data "$payload"
assert_stderr --partial "error: 422"
rune -0 jq -r '.message' <(output)
assert_output - <<-EOT
validation failure list:
log_processors.0.utc_startup_timestamp in body is required
log_processors.0.datasources in body is required
log_processors.0.hub_items in body is required
EOT
}
@test "lp usage metrics (full payload)" {
TOKEN=$(lp-get-token)
export TOKEN
# base payload without any measurement
payload=$(yq -o j <<-EOT
remediation_components: []
log_processors:
- version: "v1.0"
utc_startup_timestamp: 1707399316
hub_items: {}
feature_flags:
- marshmallows
os:
name: CentOS
version: "8"
metrics:
- name: logs_parsed
value: 5000
unit: count
labels: {}
items: []
meta:
window_size_seconds: 600
utc_now_timestamp: 1707485349
console_options:
- share_context
datasources:
syslog: 1
file: 4
EOT
)
rune -0 curl-with-token '/v1/usage-metrics' -X POST --data "$payload"
refute_output
}