lint: style, autofix (#3354)

This commit is contained in:
mmetc 2024-12-05 10:40:48 +01:00 committed by GitHub
parent bbe7752967
commit 7a1ad8376a
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
48 changed files with 177 additions and 169 deletions

View file

@ -243,7 +243,8 @@ func (cli *cliDashboard) newStopCmd() *cobra.Command {
} }
func (cli *cliDashboard) newShowPasswordCmd() *cobra.Command { func (cli *cliDashboard) newShowPasswordCmd() *cobra.Command {
cmd := &cobra.Command{Use: "show-password", cmd := &cobra.Command{
Use: "show-password",
Short: "displays password of metabase.", Short: "displays password of metabase.",
Args: cobra.NoArgs, Args: cobra.NoArgs,
DisableAutoGenTag: true, DisableAutoGenTag: true,
@ -457,7 +458,6 @@ func checkGroups(forceYes *bool) (*user.Group, error) {
func (cli *cliDashboard) chownDatabase(gid string) error { func (cli *cliDashboard) chownDatabase(gid string) error {
cfg := cli.cfg() cfg := cli.cfg()
intID, err := strconv.Atoi(gid) intID, err := strconv.Atoi(gid)
if err != nil { if err != nil {
return fmt.Errorf("unable to convert group ID to int: %s", err) return fmt.Errorf("unable to convert group ID to int: %s", err)
} }

View file

@ -1,4 +1,5 @@
//go:build !no_cscli_setup //go:build !no_cscli_setup
package main package main
import ( import (

View file

@ -1,4 +1,4 @@
// +build !no_datasource_appsec //go:build !no_datasource_appsec
package main package main

View file

@ -16,9 +16,11 @@ type DataSourceCommonCfg struct {
Config map[string]interface{} `yaml:",inline"` // to keep the datasource-specific configuration directives Config map[string]interface{} `yaml:",inline"` // to keep the datasource-specific configuration directives
} }
var TAIL_MODE = "tail" var (
var CAT_MODE = "cat" TAIL_MODE = "tail"
var SERVER_MODE = "server" // No difference with tail, just a bit more verbose CAT_MODE = "cat"
SERVER_MODE = "server" // No difference with tail, just a bit more verbose
)
const ( const (
METRICS_NONE = iota METRICS_NONE = iota

View file

@ -341,7 +341,6 @@ func TestAppsecOnMatchHooks(t *testing.T) {
} }
func TestAppsecPreEvalHooks(t *testing.T) { func TestAppsecPreEvalHooks(t *testing.T) {
tests := []appsecRuleTest{ tests := []appsecRuleTest{
{ {
name: "Basic pre_eval hook to disable inband rule", name: "Basic pre_eval hook to disable inband rule",
@ -403,7 +402,6 @@ func TestAppsecPreEvalHooks(t *testing.T) {
require.Len(t, responses, 1) require.Len(t, responses, 1)
require.True(t, responses[0].InBandInterrupt) require.True(t, responses[0].InBandInterrupt)
}, },
}, },
{ {
@ -670,7 +668,6 @@ func TestAppsecPreEvalHooks(t *testing.T) {
} }
func TestAppsecRemediationConfigHooks(t *testing.T) { func TestAppsecRemediationConfigHooks(t *testing.T) {
tests := []appsecRuleTest{ tests := []appsecRuleTest{
{ {
name: "Basic matching rule", name: "Basic matching rule",
@ -759,6 +756,7 @@ func TestAppsecRemediationConfigHooks(t *testing.T) {
}) })
} }
} }
func TestOnMatchRemediationHooks(t *testing.T) { func TestOnMatchRemediationHooks(t *testing.T) {
tests := []appsecRuleTest{ tests := []appsecRuleTest{
{ {

View file

@ -90,7 +90,6 @@ func (r *AppsecRunner) Init(datadir string) error {
outbandCfg = outbandCfg.WithRequestBodyInMemoryLimit(*r.AppsecRuntime.Config.OutOfBandOptions.RequestBodyInMemoryLimit) outbandCfg = outbandCfg.WithRequestBodyInMemoryLimit(*r.AppsecRuntime.Config.OutOfBandOptions.RequestBodyInMemoryLimit)
} }
r.AppsecOutbandEngine, err = coraza.NewWAF(outbandCfg) r.AppsecOutbandEngine, err = coraza.NewWAF(outbandCfg)
if err != nil { if err != nil {
return fmt.Errorf("unable to initialize outband engine : %w", err) return fmt.Errorf("unable to initialize outband engine : %w", err)
} }
@ -379,7 +378,6 @@ func (r *AppsecRunner) handleRequest(request *appsec.ParsedRequest) {
// time spent to process inband AND out of band rules // time spent to process inband AND out of band rules
globalParsingElapsed := time.Since(startGlobalParsing) globalParsingElapsed := time.Since(startGlobalParsing)
AppsecGlobalParsingHistogram.With(prometheus.Labels{"source": request.RemoteAddrNormalized, "appsec_engine": request.AppsecEngine}).Observe(globalParsingElapsed.Seconds()) AppsecGlobalParsingHistogram.With(prometheus.Labels{"source": request.RemoteAddrNormalized, "appsec_engine": request.AppsecEngine}).Observe(globalParsingElapsed.Seconds())
} }
func (r *AppsecRunner) Run(t *tomb.Tomb) error { func (r *AppsecRunner) Run(t *tomb.Tomb) error {

View file

@ -66,7 +66,8 @@ func loadAppSecEngine(test appsecRuleTest, t *testing.T) {
outofbandRules = append(outofbandRules, strRule) outofbandRules = append(outofbandRules, strRule)
} }
appsecCfg := appsec.AppsecConfig{Logger: logger, appsecCfg := appsec.AppsecConfig{
Logger: logger,
OnLoad: test.on_load, OnLoad: test.on_load,
PreEval: test.pre_eval, PreEval: test.pre_eval,
PostEval: test.post_eval, PostEval: test.post_eval,
@ -75,7 +76,8 @@ func loadAppSecEngine(test appsecRuleTest, t *testing.T) {
UserBlockedHTTPCode: test.UserBlockedHTTPCode, UserBlockedHTTPCode: test.UserBlockedHTTPCode,
UserPassedHTTPCode: test.UserPassedHTTPCode, UserPassedHTTPCode: test.UserPassedHTTPCode,
DefaultRemediation: test.DefaultRemediation, DefaultRemediation: test.DefaultRemediation,
DefaultPassAction: test.DefaultPassAction} DefaultPassAction: test.DefaultPassAction,
}
AppsecRuntime, err := appsecCfg.Build() AppsecRuntime, err := appsecCfg.Build()
if err != nil { if err != nil {
t.Fatalf("unable to build appsec runtime : %s", err) t.Fatalf("unable to build appsec runtime : %s", err)

View file

@ -9,8 +9,7 @@ import (
"github.com/crowdsecurity/coraza/v3/experimental/plugins/plugintypes" "github.com/crowdsecurity/coraza/v3/experimental/plugins/plugintypes"
) )
type rawBodyProcessor struct { type rawBodyProcessor struct{}
}
type setterInterface interface { type setterInterface interface {
Set(string) Set(string)
@ -33,9 +32,7 @@ func (*rawBodyProcessor) ProcessResponse(reader io.Reader, v plugintypes.Transac
return nil return nil
} }
var ( var _ plugintypes.BodyProcessor = &rawBodyProcessor{}
_ plugintypes.BodyProcessor = &rawBodyProcessor{}
)
//nolint:gochecknoinits //Coraza recommends to use init() for registering plugins //nolint:gochecknoinits //Coraza recommends to use init() for registering plugins
func init() { func init() {

View file

@ -26,9 +26,7 @@ import (
"github.com/crowdsecurity/crowdsec/pkg/types" "github.com/crowdsecurity/crowdsec/pkg/types"
) )
var ( var dataSourceName = "http"
dataSourceName = "http"
)
var linesRead = prometheus.NewCounterVec( var linesRead = prometheus.NewCounterVec(
prometheus.CounterOpts{ prometheus.CounterOpts{

View file

@ -18,9 +18,9 @@ import (
"github.com/crowdsecurity/go-cs-lib/cstest" "github.com/crowdsecurity/go-cs-lib/cstest"
"github.com/prometheus/client_golang/prometheus" "github.com/prometheus/client_golang/prometheus"
log "github.com/sirupsen/logrus" log "github.com/sirupsen/logrus"
"gopkg.in/tomb.v2"
"github.com/stretchr/testify/assert" "github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
"gopkg.in/tomb.v2"
) )
const ( const (
@ -257,7 +257,6 @@ basic_auth:
h.Server.Close() h.Server.Close()
tomb.Kill(nil) tomb.Kill(nil)
tomb.Wait() tomb.Wait()
} }
func TestStreamingAcquisitionUnknownPath(t *testing.T) { func TestStreamingAcquisitionUnknownPath(t *testing.T) {

View file

@ -48,7 +48,6 @@ func WithStrictHostname() RFC3164Option {
} }
func (r *RFC3164) parsePRI() error { func (r *RFC3164) parsePRI() error {
pri := 0 pri := 0
if r.buf[r.position] != '<' { if r.buf[r.position] != '<' {

View file

@ -48,7 +48,6 @@ func WithStrictHostname() RFC5424Option {
} }
func (r *RFC5424) parsePRI() error { func (r *RFC5424) parsePRI() error {
pri := 0 pri := 0
if r.buf[r.position] != '<' { if r.buf[r.position] != '<' {
@ -94,7 +93,6 @@ func (r *RFC5424) parseVersion() error {
} }
func (r *RFC5424) parseTimestamp() error { func (r *RFC5424) parseTimestamp() error {
timestamp := []byte{} timestamp := []byte{}
if r.buf[r.position] == NIL_VALUE { if r.buf[r.position] == NIL_VALUE {
@ -121,7 +119,6 @@ func (r *RFC5424) parseTimestamp() error {
} }
date, err := time.Parse(VALID_TIMESTAMP, string(timestamp)) date, err := time.Parse(VALID_TIMESTAMP, string(timestamp))
if err != nil { if err != nil {
return errors.New("timestamp is not valid") return errors.New("timestamp is not valid")
} }

View file

@ -94,7 +94,8 @@ func TestParse(t *testing.T) {
}{ }{
{ {
"valid msg", "valid msg",
`<13>1 2021-05-18T11:58:40.828081+02:42 mantis sshd 49340 - [timeQuality isSynced="0" tzKnown="1"] blabla`, expected{ `<13>1 2021-05-18T11:58:40.828081+02:42 mantis sshd 49340 - [timeQuality isSynced="0" tzKnown="1"] blabla`,
expected{
Timestamp: time.Date(2021, 5, 18, 11, 58, 40, 828081000, time.FixedZone("+0242", 9720)), Timestamp: time.Date(2021, 5, 18, 11, 58, 40, 828081000, time.FixedZone("+0242", 9720)),
Hostname: "mantis", Hostname: "mantis",
Tag: "sshd", Tag: "sshd",
@ -102,11 +103,14 @@ func TestParse(t *testing.T) {
MsgID: "", MsgID: "",
Message: "blabla", Message: "blabla",
PRI: 13, PRI: 13,
}, "", []RFC5424Option{}, },
"",
[]RFC5424Option{},
}, },
{ {
"valid msg with msgid", "valid msg with msgid",
`<13>1 2021-05-18T11:58:40.828081+02:42 mantis foobar 49340 123123 [timeQuality isSynced="0" tzKnown="1"] blabla`, expected{ `<13>1 2021-05-18T11:58:40.828081+02:42 mantis foobar 49340 123123 [timeQuality isSynced="0" tzKnown="1"] blabla`,
expected{
Timestamp: time.Date(2021, 5, 18, 11, 58, 40, 828081000, time.FixedZone("+0242", 9720)), Timestamp: time.Date(2021, 5, 18, 11, 58, 40, 828081000, time.FixedZone("+0242", 9720)),
Hostname: "mantis", Hostname: "mantis",
Tag: "foobar", Tag: "foobar",
@ -114,11 +118,14 @@ func TestParse(t *testing.T) {
MsgID: "123123", MsgID: "123123",
Message: "blabla", Message: "blabla",
PRI: 13, PRI: 13,
}, "", []RFC5424Option{}, },
"",
[]RFC5424Option{},
}, },
{ {
"valid msg with repeating SD", "valid msg with repeating SD",
`<13>1 2021-05-18T11:58:40.828081+02:42 mantis foobar 49340 123123 [timeQuality isSynced="0" tzKnown="1"][foo="bar][a] blabla`, expected{ `<13>1 2021-05-18T11:58:40.828081+02:42 mantis foobar 49340 123123 [timeQuality isSynced="0" tzKnown="1"][foo="bar][a] blabla`,
expected{
Timestamp: time.Date(2021, 5, 18, 11, 58, 40, 828081000, time.FixedZone("+0242", 9720)), Timestamp: time.Date(2021, 5, 18, 11, 58, 40, 828081000, time.FixedZone("+0242", 9720)),
Hostname: "mantis", Hostname: "mantis",
Tag: "foobar", Tag: "foobar",
@ -126,36 +133,53 @@ func TestParse(t *testing.T) {
MsgID: "123123", MsgID: "123123",
Message: "blabla", Message: "blabla",
PRI: 13, PRI: 13,
}, "", []RFC5424Option{}, },
"",
[]RFC5424Option{},
}, },
{ {
"invalid SD", "invalid SD",
`<13>1 2021-05-18T11:58:40.828081+02:00 mantis foobar 49340 123123 [timeQuality asd`, expected{}, "structured data must end with ']'", []RFC5424Option{}, `<13>1 2021-05-18T11:58:40.828081+02:00 mantis foobar 49340 123123 [timeQuality asd`,
expected{},
"structured data must end with ']'",
[]RFC5424Option{},
}, },
{ {
"invalid version", "invalid version",
`<13>42 2021-05-18T11:58:40.828081+02:00 mantis foobar 49340 123123 [timeQuality isSynced="0" tzKnown="1"] blabla`, expected{}, "version must be 1", []RFC5424Option{}, `<13>42 2021-05-18T11:58:40.828081+02:00 mantis foobar 49340 123123 [timeQuality isSynced="0" tzKnown="1"] blabla`,
expected{},
"version must be 1",
[]RFC5424Option{},
}, },
{ {
"invalid message", "invalid message",
`<13>1`, expected{}, "version must be followed by a space", []RFC5424Option{}, `<13>1`,
expected{},
"version must be followed by a space",
[]RFC5424Option{},
}, },
{ {
"valid msg with empty fields", "valid msg with empty fields",
`<13>1 - foo - - - - blabla`, expected{ `<13>1 - foo - - - - blabla`,
expected{
Timestamp: time.Now().UTC(), Timestamp: time.Now().UTC(),
Hostname: "foo", Hostname: "foo",
PRI: 13, PRI: 13,
Message: "blabla", Message: "blabla",
}, "", []RFC5424Option{}, },
"",
[]RFC5424Option{},
}, },
{ {
"valid msg with empty fields", "valid msg with empty fields",
`<13>1 - - - - - - blabla`, expected{ `<13>1 - - - - - - blabla`,
expected{
Timestamp: time.Now().UTC(), Timestamp: time.Now().UTC(),
PRI: 13, PRI: 13,
Message: "blabla", Message: "blabla",
}, "", []RFC5424Option{}, },
"",
[]RFC5424Option{},
}, },
{ {
"valid msg with escaped SD", "valid msg with escaped SD",
@ -167,7 +191,9 @@ func TestParse(t *testing.T) {
Hostname: "testhostname", Hostname: "testhostname",
MsgID: `sn="msgid"`, MsgID: `sn="msgid"`,
Message: `testmessage`, Message: `testmessage`,
}, "", []RFC5424Option{}, },
"",
[]RFC5424Option{},
}, },
{ {
"valid complex msg", "valid complex msg",
@ -179,7 +205,9 @@ func TestParse(t *testing.T) {
PRI: 13, PRI: 13,
MsgID: `sn="msgid"`, MsgID: `sn="msgid"`,
Message: `source: sn="www.foobar.com" | message: 1.1.1.1 - - [24/May/2022:10:57:37 +0200] "GET /dist/precache-manifest.58b57debe6bc4f96698da0dc314461e9.js HTTP/2.0" 304 0 "https://www.foobar.com/sw.js" "Mozilla/5.0 (Linux; Android 9; ANE-LX1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/101.0.4951.61 Mobile Safari/537.36" "-" "www.foobar.com" sn="www.foobar.com" rt=0.000 ua="-" us="-" ut="-" ul="-" cs=HIT { request: /dist/precache-manifest.58b57debe6bc4f96698da0dc314461e9.js | src_ip_geo_country: DE | MONTH: May | COMMONAPACHELOG: 1.1.1.1 - - [24/May/2022:10:57:37 +0200] "GET /dist/precache-manifest.58b57debe6bc4f96698da0dc314461e9.js HTTP/2.0" 304 0 | auth: - | HOUR: 10 | gl2_remote_ip: 172.31.32.142 | ident: - | gl2_remote_port: 43375 | BASE10NUM: [2.0, 304, 0] | pid: -1 | program: nginx | gl2_source_input: 623ed3440183476d61cff974 | INT: +0200 | is_private_ip: false | YEAR: 2022 | src_ip_geo_city: Achern | clientip: 1.1.1.1 | USERNAME:`, Message: `source: sn="www.foobar.com" | message: 1.1.1.1 - - [24/May/2022:10:57:37 +0200] "GET /dist/precache-manifest.58b57debe6bc4f96698da0dc314461e9.js HTTP/2.0" 304 0 "https://www.foobar.com/sw.js" "Mozilla/5.0 (Linux; Android 9; ANE-LX1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/101.0.4951.61 Mobile Safari/537.36" "-" "www.foobar.com" sn="www.foobar.com" rt=0.000 ua="-" us="-" ut="-" ul="-" cs=HIT { request: /dist/precache-manifest.58b57debe6bc4f96698da0dc314461e9.js | src_ip_geo_country: DE | MONTH: May | COMMONAPACHELOG: 1.1.1.1 - - [24/May/2022:10:57:37 +0200] "GET /dist/precache-manifest.58b57debe6bc4f96698da0dc314461e9.js HTTP/2.0" 304 0 | auth: - | HOUR: 10 | gl2_remote_ip: 172.31.32.142 | ident: - | gl2_remote_port: 43375 | BASE10NUM: [2.0, 304, 0] | pid: -1 | program: nginx | gl2_source_input: 623ed3440183476d61cff974 | INT: +0200 | is_private_ip: false | YEAR: 2022 | src_ip_geo_city: Achern | clientip: 1.1.1.1 | USERNAME:`,
}, "", []RFC5424Option{}, },
"",
[]RFC5424Option{},
}, },
{ {
"partial message", "partial message",

View file

@ -25,7 +25,6 @@ type SyslogMessage struct {
} }
func (s *SyslogServer) Listen(listenAddr string, port int) error { func (s *SyslogServer) Listen(listenAddr string, port int) error {
s.listenAddr = listenAddr s.listenAddr = listenAddr
s.port = port s.port = port
udpAddr, err := net.ResolveUDPAddr("udp", fmt.Sprintf("%s:%d", s.listenAddr, s.port)) udpAddr, err := net.ResolveUDPAddr("udp", fmt.Sprintf("%s:%d", s.listenAddr, s.port))

View file

@ -239,7 +239,6 @@ func TestValidateContextExpr(t *testing.T) {
} }
func TestAppsecEventToContext(t *testing.T) { func TestAppsecEventToContext(t *testing.T) {
tests := []struct { tests := []struct {
name string name string
contextToSend map[string][]string contextToSend map[string][]string

View file

@ -62,7 +62,6 @@ func (t *JWTTransport) refreshJwtToken() error {
enc := json.NewEncoder(buf) enc := json.NewEncoder(buf)
enc.SetEscapeHTML(false) enc.SetEscapeHTML(false)
err = enc.Encode(auth) err = enc.Encode(auth)
if err != nil { if err != nil {
return fmt.Errorf("could not encode jwt auth body: %w", err) return fmt.Errorf("could not encode jwt auth body: %w", err)
} }
@ -169,7 +168,6 @@ func (t *JWTTransport) prepareRequest(req *http.Request) (*http.Request, error)
// RoundTrip implements the RoundTripper interface. // RoundTrip implements the RoundTripper interface.
func (t *JWTTransport) RoundTrip(req *http.Request) (*http.Response, error) { func (t *JWTTransport) RoundTrip(req *http.Request) (*http.Response, error) {
var resp *http.Response var resp *http.Response
attemptsCount := make(map[int]int) attemptsCount := make(map[int]int)
@ -229,7 +227,6 @@ func (t *JWTTransport) RoundTrip(req *http.Request) (*http.Response, error) {
} }
} }
return resp, nil return resp, nil
} }
func (t *JWTTransport) Client() *http.Client { func (t *JWTTransport) Client() *http.Client {

View file

@ -332,7 +332,6 @@ func getScenarioTrustOfAlert(alert *models.Alert) string {
} }
func shouldShareAlert(alert *models.Alert, consoleConfig *csconfig.ConsoleConfig, shareSignals bool) bool { func shouldShareAlert(alert *models.Alert, consoleConfig *csconfig.ConsoleConfig, shareSignals bool) bool {
if !shareSignals { if !shareSignals {
log.Debugf("sharing signals is disabled") log.Debugf("sharing signals is disabled")
return false return false

View file

@ -174,7 +174,6 @@ func (a *APIKey) authPlain(c *gin.Context, logger *log.Entry) *ent.Bouncer {
logger.Infof("Creating bouncer %s", bouncerName) logger.Infof("Creating bouncer %s", bouncerName)
bouncer, err = a.DbClient.CreateBouncer(ctx, bouncerName, clientIP, hashStr, types.ApiKeyAuthType, true) bouncer, err = a.DbClient.CreateBouncer(ctx, bouncerName, clientIP, hashStr, types.ApiKeyAuthType, true)
if err != nil { if err != nil {
logger.Errorf("while creating bouncer db entry: %s", err) logger.Errorf("while creating bouncer db entry: %s", err)
return nil return nil

View file

@ -158,7 +158,6 @@ func (wc *AppsecConfig) SetUpLogger() {
/* wc.Name is actually the datasource name.*/ /* wc.Name is actually the datasource name.*/
wc.Logger = wc.Logger.Dup().WithField("name", wc.Name) wc.Logger = wc.Logger.Dup().WithField("name", wc.Name)
wc.Logger.Logger.SetLevel(*wc.LogLevel) wc.Logger.Logger.SetLevel(*wc.LogLevel)
} }
func (wc *AppsecConfig) LoadByPath(file string) error { func (wc *AppsecConfig) LoadByPath(file string) error {

View file

@ -47,7 +47,6 @@ type CustomRule struct {
} }
func (v *CustomRule) Convert(ruleType string, appsecRuleName string) (string, []uint32, error) { func (v *CustomRule) Convert(ruleType string, appsecRuleName string) (string, []uint32, error) {
if v.Zones == nil && v.And == nil && v.Or == nil { if v.Zones == nil && v.And == nil && v.Or == nil {
return "", nil, errors.New("no zones defined") return "", nil, errors.New("no zones defined")
} }

View file

@ -88,7 +88,6 @@ func TestVPatchRuleString(t *testing.T) {
rule: CustomRule{ rule: CustomRule{
And: []CustomRule{ And: []CustomRule{
{ {
Zones: []string{"ARGS"}, Zones: []string{"ARGS"},
Variables: []string{"foo"}, Variables: []string{"foo"},
Match: Match{Type: "regex", Value: "[^a-zA-Z]"}, Match: Match{Type: "regex", Value: "[^a-zA-Z]"},
@ -161,7 +160,6 @@ SecRule ARGS_GET:foo "@rx [^a-zA-Z]" "id:1519945803,phase:2,deny,log,msg:'OR AND
for _, tt := range tests { for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) { t.Run(tt.name, func(t *testing.T) {
actual, _, err := tt.rule.Convert(ModsecurityRuleType, tt.name) actual, _, err := tt.rule.Convert(ModsecurityRuleType, tt.name)
if err != nil { if err != nil {
t.Errorf("Error converting rule: %s", err) t.Errorf("Error converting rule: %s", err)
} }

View file

@ -3,7 +3,6 @@ package appsec
import "testing" import "testing"
func TestBodyDumper(t *testing.T) { func TestBodyDumper(t *testing.T) {
tests := []struct { tests := []struct {
name string name string
req *ParsedRequest req *ParsedRequest
@ -159,7 +158,6 @@ func TestBodyDumper(t *testing.T) {
} }
for idx, test := range tests { for idx, test := range tests {
t.Run(test.name, func(t *testing.T) { t.Run(test.name, func(t *testing.T) {
orig_dr := test.req.DumpRequest() orig_dr := test.req.DumpRequest()
result := test.filter(orig_dr).GetFilteredRequest() result := test.filter(orig_dr).GetFilteredRequest()
@ -177,5 +175,4 @@ func TestBodyDumper(t *testing.T) {
} }
}) })
} }
} }

View file

@ -91,7 +91,6 @@ func (pb *PluginBroker) Init(ctx context.Context, pluginCfg *csconfig.PluginCfg,
pb.watcher = PluginWatcher{} pb.watcher = PluginWatcher{}
pb.watcher.Init(pb.pluginConfigByName, pb.alertsByPluginName) pb.watcher.Init(pb.pluginConfigByName, pb.alertsByPluginName)
return nil return nil
} }
func (pb *PluginBroker) Kill() { func (pb *PluginBroker) Kill() {
@ -166,6 +165,7 @@ func (pb *PluginBroker) addProfileAlert(profileAlert ProfileAlert) {
pb.watcher.Inserts <- pluginName pb.watcher.Inserts <- pluginName
} }
} }
func (pb *PluginBroker) profilesContainPlugin(pluginName string) bool { func (pb *PluginBroker) profilesContainPlugin(pluginName string) bool {
for _, profileCfg := range pb.profileConfigs { for _, profileCfg := range pb.profileConfigs {
for _, name := range profileCfg.Notifications { for _, name := range profileCfg.Notifications {
@ -176,6 +176,7 @@ func (pb *PluginBroker) profilesContainPlugin(pluginName string) bool {
} }
return false return false
} }
func (pb *PluginBroker) loadConfig(path string) error { func (pb *PluginBroker) loadConfig(path string) error {
files, err := listFilesAtPath(path) files, err := listFilesAtPath(path)
if err != nil { if err != nil {
@ -277,7 +278,6 @@ func (pb *PluginBroker) loadPlugins(ctx context.Context, path string) error {
} }
func (pb *PluginBroker) loadNotificationPlugin(name string, binaryPath string) (protobufs.NotifierServer, error) { func (pb *PluginBroker) loadNotificationPlugin(name string, binaryPath string) (protobufs.NotifierServer, error) {
handshake, err := getHandshake() handshake, err := getHandshake()
if err != nil { if err != nil {
return nil, err return nil, err

View file

@ -210,7 +210,6 @@ func (c *SmokeItem) GetFalsePositives() []string {
} }
func (c *SmokeItem) IsFalsePositive() bool { func (c *SmokeItem) IsFalsePositive() bool {
if c.Classifications.FalsePositives != nil { if c.Classifications.FalsePositives != nil {
if len(c.Classifications.FalsePositives) > 0 { if len(c.Classifications.FalsePositives) > 0 {
return true return true
@ -284,7 +283,6 @@ func (c *FireItem) GetFalsePositives() []string {
} }
func (c *FireItem) IsFalsePositive() bool { func (c *FireItem) IsFalsePositive() bool {
if c.Classifications.FalsePositives != nil { if c.Classifications.FalsePositives != nil {
if len(c.Classifications.FalsePositives) > 0 { if len(c.Classifications.FalsePositives) > 0 {
return true return true

View file

@ -12,16 +12,20 @@ import (
"github.com/crowdsecurity/crowdsec/pkg/types" "github.com/crowdsecurity/crowdsec/pkg/types"
) )
var CTIUrl = "https://cti.api.crowdsec.net" var (
var CTIUrlSuffix = "/v2/smoke/" CTIUrl = "https://cti.api.crowdsec.net"
var CTIApiKey = "" CTIUrlSuffix = "/v2/smoke/"
CTIApiKey = ""
)
// this is set for non-recoverable errors, such as 403 when querying API or empty API key // this is set for non-recoverable errors, such as 403 when querying API or empty API key
var CTIApiEnabled = false var CTIApiEnabled = false
// when hitting quotas or auth errors, we temporarily disable the API // when hitting quotas or auth errors, we temporarily disable the API
var CTIBackOffUntil time.Time var (
var CTIBackOffDuration = 5 * time.Minute CTIBackOffUntil time.Time
CTIBackOffDuration = 5 * time.Minute
)
var ctiClient *cticlient.CrowdsecCTIClient var ctiClient *cticlient.CrowdsecCTIClient
@ -62,8 +66,10 @@ func ShutdownCrowdsecCTI() {
} }
// Cache for responses // Cache for responses
var CTICache gcache.Cache var (
var CacheExpiration time.Duration CTICache gcache.Cache
CacheExpiration time.Duration
)
func CrowdsecCTIInitCache(size int, ttl time.Duration) { func CrowdsecCTIInitCache(size int, ttl time.Duration) {
CTICache = gcache.New(size).LRU().Build() CTICache = gcache.New(size).LRU().Build()

View file

@ -14,7 +14,6 @@ func GeoIPEnrich(params ...any) (any, error) {
parsedIP := net.ParseIP(ip) parsedIP := net.ParseIP(ip)
city, err := geoIPCityReader.City(parsedIP) city, err := geoIPCityReader.City(parsedIP)
if err != nil { if err != nil {
return nil, err return nil, err
} }
@ -31,7 +30,6 @@ func GeoIPASNEnrich(params ...any) (any, error) {
parsedIP := net.ParseIP(ip) parsedIP := net.ParseIP(ip)
asn, err := geoIPASNReader.ASN(parsedIP) asn, err := geoIPASNReader.ASN(parsedIP)
if err != nil { if err != nil {
return nil, err return nil, err
} }
@ -50,7 +48,6 @@ func GeoIPRangeEnrich(params ...any) (any, error) {
parsedIP := net.ParseIP(ip) parsedIP := net.ParseIP(ip)
rangeIP, ok, err := geoIPRangeReader.LookupNetwork(parsedIP, &dummy) rangeIP, ok, err := geoIPRangeReader.LookupNetwork(parsedIP, &dummy)
if err != nil { if err != nil {
return nil, err return nil, err
} }

View file

@ -2,12 +2,14 @@ package fflag
var Crowdsec = FeatureRegister{EnvPrefix: "CROWDSEC_FEATURE_"} var Crowdsec = FeatureRegister{EnvPrefix: "CROWDSEC_FEATURE_"}
var CscliSetup = &Feature{Name: "cscli_setup", Description: "Enable cscli setup command (service detection)"} var (
var DisableHttpRetryBackoff = &Feature{Name: "disable_http_retry_backoff", Description: "Disable http retry backoff"} CscliSetup = &Feature{Name: "cscli_setup", Description: "Enable cscli setup command (service detection)"}
var ChunkedDecisionsStream = &Feature{Name: "chunked_decisions_stream", Description: "Enable chunked decisions stream"} DisableHttpRetryBackoff = &Feature{Name: "disable_http_retry_backoff", Description: "Disable http retry backoff"}
var PapiClient = &Feature{Name: "papi_client", Description: "Enable Polling API client", State: DeprecatedState} ChunkedDecisionsStream = &Feature{Name: "chunked_decisions_stream", Description: "Enable chunked decisions stream"}
var Re2GrokSupport = &Feature{Name: "re2_grok_support", Description: "Enable RE2 support for GROK patterns"} PapiClient = &Feature{Name: "papi_client", Description: "Enable Polling API client", State: DeprecatedState}
var Re2RegexpInfileSupport = &Feature{Name: "re2_regexp_in_file_support", Description: "Enable RE2 support for RegexpInFile expr helper"} Re2GrokSupport = &Feature{Name: "re2_grok_support", Description: "Enable RE2 support for GROK patterns"}
Re2RegexpInfileSupport = &Feature{Name: "re2_regexp_in_file_support", Description: "Enable RE2 support for RegexpInFile expr helper"}
)
func RegisterAllFeatures() error { func RegisterAllFeatures() error {
err := Crowdsec.RegisterFeature(CscliSetup) err := Crowdsec.RegisterFeature(CscliSetup)

View file

@ -49,7 +49,6 @@ func (bl *Blackhole) OnBucketOverflow(bucketFactory *BucketFactory) func(*Leaky,
tmp = append(tmp, element) tmp = append(tmp, element)
} else { } else {
leaky.logger.Debugf("%s left blackhole %s ago", element.key, leaky.Ovflw_ts.Sub(element.expiration)) leaky.logger.Debugf("%s left blackhole %s ago", element.key, leaky.Ovflw_ts.Sub(element.expiration))
} }
} }
bl.hiddenKeys = tmp bl.hiddenKeys = tmp
@ -64,5 +63,4 @@ func (bl *Blackhole) OnBucketOverflow(bucketFactory *BucketFactory) func(*Leaky,
leaky.logger.Debugf("Adding overflow to blackhole (%s)", leaky.First_ts) leaky.logger.Debugf("Adding overflow to blackhole (%s)", leaky.First_ts)
return alert, queue return alert, queue
} }
} }

View file

@ -204,7 +204,6 @@ func FromFactory(bucketFactory BucketFactory) *Leaky {
/* for now mimic a leak routine */ /* for now mimic a leak routine */
//LeakRoutine us the life of a bucket. It dies when the bucket underflows or overflows //LeakRoutine us the life of a bucket. It dies when the bucket underflows or overflows
func LeakRoutine(leaky *Leaky) error { func LeakRoutine(leaky *Leaky) error {
var ( var (
durationTickerChan = make(<-chan time.Time) durationTickerChan = make(<-chan time.Time)
durationTicker *time.Ticker durationTicker *time.Ticker

View file

@ -25,5 +25,4 @@ func NewBuckets() *Buckets {
func GetKey(bucketCfg BucketFactory, stackkey string) string { func GetKey(bucketCfg BucketFactory, stackkey string) string {
return fmt.Sprintf("%x", sha1.Sum([]byte(bucketCfg.Filter+stackkey+bucketCfg.Name))) return fmt.Sprintf("%x", sha1.Sum([]byte(bucketCfg.Filter+stackkey+bucketCfg.Name)))
} }

View file

@ -11,8 +11,10 @@ import (
"github.com/crowdsecurity/crowdsec/pkg/types" "github.com/crowdsecurity/crowdsec/pkg/types"
) )
var conditionalExprCache map[string]vm.Program var (
var conditionalExprCacheLock sync.Mutex conditionalExprCache map[string]vm.Program
conditionalExprCacheLock sync.Mutex
)
type ConditionalOverflow struct { type ConditionalOverflow struct {
ConditionalFilter string ConditionalFilter string

View file

@ -51,7 +51,7 @@ func TestBadBucketsConfig(t *testing.T) {
} }
func TestLeakyBucketsConfig(t *testing.T) { func TestLeakyBucketsConfig(t *testing.T) {
var CfgTests = []cfgTest{ CfgTests := []cfgTest{
// leaky with bad capacity // leaky with bad capacity
{BucketFactory{Name: "test", Description: "test1", Type: "leaky", Capacity: 0}, false, false}, {BucketFactory{Name: "test", Description: "test1", Type: "leaky", Capacity: 0}, false, false},
// leaky with empty leakspeed // leaky with empty leakspeed
@ -73,11 +73,10 @@ func TestLeakyBucketsConfig(t *testing.T) {
if err := runTest(CfgTests); err != nil { if err := runTest(CfgTests); err != nil {
t.Fatalf("%s", err) t.Fatalf("%s", err)
} }
} }
func TestBlackholeConfig(t *testing.T) { func TestBlackholeConfig(t *testing.T) {
var CfgTests = []cfgTest{ CfgTests := []cfgTest{
// basic bh // basic bh
{BucketFactory{Name: "test", Description: "test1", Type: "trigger", Filter: "true", Blackhole: "15s"}, true, true}, {BucketFactory{Name: "test", Description: "test1", Type: "trigger", Filter: "true", Blackhole: "15s"}, true, true},
// bad bh // bad bh
@ -87,11 +86,10 @@ func TestBlackholeConfig(t *testing.T) {
if err := runTest(CfgTests); err != nil { if err := runTest(CfgTests); err != nil {
t.Fatalf("%s", err) t.Fatalf("%s", err)
} }
} }
func TestTriggerBucketsConfig(t *testing.T) { func TestTriggerBucketsConfig(t *testing.T) {
var CfgTests = []cfgTest{ CfgTests := []cfgTest{
// basic valid counter // basic valid counter
{BucketFactory{Name: "test", Description: "test1", Type: "trigger", Filter: "true"}, true, true}, {BucketFactory{Name: "test", Description: "test1", Type: "trigger", Filter: "true"}, true, true},
} }
@ -99,12 +97,10 @@ func TestTriggerBucketsConfig(t *testing.T) {
if err := runTest(CfgTests); err != nil { if err := runTest(CfgTests); err != nil {
t.Fatalf("%s", err) t.Fatalf("%s", err)
} }
} }
func TestCounterBucketsConfig(t *testing.T) { func TestCounterBucketsConfig(t *testing.T) {
var CfgTests = []cfgTest{ CfgTests := []cfgTest{
// basic valid counter // basic valid counter
{BucketFactory{Name: "test", Description: "test1", Type: "counter", Capacity: -1, Duration: "5s", Filter: "true"}, true, true}, {BucketFactory{Name: "test", Description: "test1", Type: "counter", Capacity: -1, Duration: "5s", Filter: "true"}, true, true},
// missing duration // missing duration
@ -117,12 +113,10 @@ func TestCounterBucketsConfig(t *testing.T) {
if err := runTest(CfgTests); err != nil { if err := runTest(CfgTests); err != nil {
t.Fatalf("%s", err) t.Fatalf("%s", err)
} }
} }
func TestBayesianBucketsConfig(t *testing.T) { func TestBayesianBucketsConfig(t *testing.T) {
var CfgTests = []cfgTest{ CfgTests := []cfgTest{
// basic valid counter // basic valid counter
{BucketFactory{Name: "test", Description: "test1", Type: "bayesian", Capacity: -1, Filter: "true", BayesianPrior: 0.5, BayesianThreshold: 0.5, BayesianConditions: []RawBayesianCondition{{ConditionalFilterName: "true", ProbGivenEvil: 0.5, ProbGivenBenign: 0.5}}}, true, true}, {BucketFactory{Name: "test", Description: "test1", Type: "bayesian", Capacity: -1, Filter: "true", BayesianPrior: 0.5, BayesianThreshold: 0.5, BayesianConditions: []RawBayesianCondition{{ConditionalFilterName: "true", ProbGivenEvil: 0.5, ProbGivenBenign: 0.5}}}, true, true},
// bad capacity // bad capacity
@ -139,5 +133,4 @@ func TestBayesianBucketsConfig(t *testing.T) {
if err := runTest(CfgTests); err != nil { if err := runTest(CfgTests); err != nil {
t.Fatalf("%s", err) t.Fatalf("%s", err)
} }
} }

View file

@ -17,9 +17,11 @@ import (
"github.com/crowdsecurity/crowdsec/pkg/types" "github.com/crowdsecurity/crowdsec/pkg/types"
) )
var serialized map[string]Leaky var (
var BucketPourCache map[string][]types.Event serialized map[string]Leaky
var BucketPourTrack bool BucketPourCache map[string][]types.Event
BucketPourTrack bool
)
/* /*
The leaky routines lifecycle are based on "real" time. The leaky routines lifecycle are based on "real" time.
@ -243,7 +245,6 @@ func PourItemToBucket(bucket *Leaky, holder BucketFactory, buckets *Buckets, par
} }
func LoadOrStoreBucketFromHolder(partitionKey string, buckets *Buckets, holder BucketFactory, expectMode int) (*Leaky, error) { func LoadOrStoreBucketFromHolder(partitionKey string, buckets *Buckets, holder BucketFactory, expectMode int) (*Leaky, error) {
biface, ok := buckets.Bucket_map.Load(partitionKey) biface, ok := buckets.Bucket_map.Load(partitionKey)
/* the bucket doesn't exist, create it !*/ /* the bucket doesn't exist, create it !*/
@ -283,9 +284,7 @@ func LoadOrStoreBucketFromHolder(partitionKey string, buckets *Buckets, holder B
var orderEvent map[string]*sync.WaitGroup var orderEvent map[string]*sync.WaitGroup
func PourItemToHolders(parsed types.Event, holders []BucketFactory, buckets *Buckets) (bool, error) { func PourItemToHolders(parsed types.Event, holders []BucketFactory, buckets *Buckets) (bool, error) {
var ( var ok, condition, poured bool
ok, condition, poured bool
)
if BucketPourTrack { if BucketPourTrack {
if BucketPourCache == nil { if BucketPourCache == nil {

View file

@ -10,8 +10,7 @@ type Processor interface {
AfterBucketPour(Bucket *BucketFactory) func(types.Event, *Leaky) *types.Event AfterBucketPour(Bucket *BucketFactory) func(types.Event, *Leaky) *types.Event
} }
type DumbProcessor struct { type DumbProcessor struct{}
}
func (d *DumbProcessor) OnBucketInit(bucketFactory *BucketFactory) error { func (d *DumbProcessor) OnBucketInit(bucketFactory *BucketFactory) error {
return nil return nil

View file

@ -23,10 +23,12 @@ type CancelOnFilter struct {
Debug bool Debug bool
} }
var cancelExprCacheLock sync.Mutex var (
var cancelExprCache map[string]struct { cancelExprCacheLock sync.Mutex
cancelExprCache map[string]struct {
CancelOnFilter *vm.Program CancelOnFilter *vm.Program
} }
)
func (u *CancelOnFilter) OnBucketPour(bucketFactory *BucketFactory) func(types.Event, *Leaky) *types.Event { func (u *CancelOnFilter) OnBucketPour(bucketFactory *BucketFactory) func(types.Event, *Leaky) *types.Event {
return func(msg types.Event, leaky *Leaky) *types.Event { return func(msg types.Event, leaky *Leaky) *types.Event {

View file

@ -16,8 +16,10 @@ import (
// on overflow // on overflow
// on leak // on leak
var uniqExprCache map[string]vm.Program var (
var uniqExprCacheLock sync.Mutex uniqExprCache map[string]vm.Program
uniqExprCacheLock sync.Mutex
)
type Uniq struct { type Uniq struct {
DistinctCompiled *vm.Program DistinctCompiled *vm.Program

View file

@ -7,8 +7,10 @@ import (
) )
/* should be part of a package shared with enrich/geoip.go */ /* should be part of a package shared with enrich/geoip.go */
type EnrichFunc func(string, *types.Event, *log.Entry) (map[string]string, error) type (
type InitFunc func(map[string]string) (interface{}, error) EnrichFunc func(string, *types.Event, *log.Entry) (map[string]string, error)
InitFunc func(map[string]string) (interface{}, error)
)
type EnricherCtx struct { type EnricherCtx struct {
Registered map[string]*Enricher Registered map[string]*Enricher

View file

@ -18,7 +18,6 @@ func IpToRange(field string, p *types.Event, plog *log.Entry) (map[string]string
} }
r, err := exprhelpers.GeoIPRangeEnrich(field) r, err := exprhelpers.GeoIPRangeEnrich(field)
if err != nil { if err != nil {
plog.Errorf("Unable to enrich ip '%s'", field) plog.Errorf("Unable to enrich ip '%s'", field)
return nil, nil //nolint:nilerr return nil, nil //nolint:nilerr
@ -47,7 +46,6 @@ func GeoIpASN(field string, p *types.Event, plog *log.Entry) (map[string]string,
} }
r, err := exprhelpers.GeoIPASNEnrich(field) r, err := exprhelpers.GeoIPASNEnrich(field)
if err != nil { if err != nil {
plog.Debugf("Unable to enrich ip '%s'", field) plog.Debugf("Unable to enrich ip '%s'", field)
return nil, nil //nolint:nilerr return nil, nil //nolint:nilerr
@ -81,7 +79,6 @@ func GeoIpCity(field string, p *types.Event, plog *log.Entry) (map[string]string
} }
r, err := exprhelpers.GeoIPEnrich(field) r, err := exprhelpers.GeoIPEnrich(field)
if err != nil { if err != nil {
plog.Debugf("Unable to enrich ip '%s'", field) plog.Debugf("Unable to enrich ip '%s'", field)
return nil, nil //nolint:nilerr return nil, nil //nolint:nilerr

View file

@ -151,7 +151,7 @@ func testOneParser(pctx *UnixParserCtx, ectx EnricherCtx, dir string, b *testing
b.ResetTimer() b.ResetTimer()
} }
for range(count) { for range count {
if !testFile(tests, *pctx, pnodes) { if !testFile(tests, *pctx, pnodes) {
return errors.New("test failed") return errors.New("test failed")
} }
@ -285,7 +285,7 @@ func matchEvent(expected types.Event, out types.Event, debug bool) ([]string, bo
valid = true valid = true
for mapIdx := range(len(expectMaps)) { for mapIdx := range len(expectMaps) {
for expKey, expVal := range expectMaps[mapIdx] { for expKey, expVal := range expectMaps[mapIdx] {
outVal, ok := outMaps[mapIdx][expKey] outVal, ok := outMaps[mapIdx][expKey]
if !ok { if !ok {

View file

@ -248,14 +248,18 @@ func stageidx(stage string, stages []string) int {
return -1 return -1
} }
var ParseDump bool var (
var DumpFolder string ParseDump bool
DumpFolder string
)
var StageParseCache dumps.ParserResults var (
var StageParseMutex sync.Mutex StageParseCache dumps.ParserResults
StageParseMutex sync.Mutex
)
func Parse(ctx UnixParserCtx, xp types.Event, nodes []Node) (types.Event, error) { func Parse(ctx UnixParserCtx, xp types.Event, nodes []Node) (types.Event, error) {
var event = xp event := xp
/* the stage is undefined, probably line is freshly acquired, set to first stage !*/ /* the stage is undefined, probably line is freshly acquired, set to first stage !*/
if event.Stage == "" && len(ctx.Stages) > 0 { if event.Stage == "" && len(ctx.Stages) > 0 {

View file

@ -60,7 +60,6 @@ func (w AppsecEvent) GetVar(varName string) string {
} }
log.Infof("var %s not found. Available variables: %+v", varName, w.Vars) log.Infof("var %s not found. Available variables: %+v", varName, w.Vars)
return "" return ""
} }
// getters // getters

View file

@ -1,23 +1,29 @@
package types package types
const ApiKeyAuthType = "api-key" const (
const TlsAuthType = "tls" ApiKeyAuthType = "api-key"
const PasswordAuthType = "password" TlsAuthType = "tls"
PasswordAuthType = "password"
)
const PAPIBaseURL = "https://papi.api.crowdsec.net/" const (
const PAPIVersion = "v1" PAPIBaseURL = "https://papi.api.crowdsec.net/"
const PAPIPollUrl = "/decisions/stream/poll" PAPIVersion = "v1"
const PAPIPermissionsUrl = "/permissions" PAPIPollUrl = "/decisions/stream/poll"
PAPIPermissionsUrl = "/permissions"
)
const CAPIBaseURL = "https://api.crowdsec.net/" const CAPIBaseURL = "https://api.crowdsec.net/"
const CscliOrigin = "cscli" const (
const CrowdSecOrigin = "crowdsec" CscliOrigin = "cscli"
const ConsoleOrigin = "console" CrowdSecOrigin = "crowdsec"
const CscliImportOrigin = "cscli-import" ConsoleOrigin = "console"
const ListOrigin = "lists" CscliImportOrigin = "cscli-import"
const CAPIOrigin = "CAPI" ListOrigin = "lists"
const CommunityBlocklistPullSourceScope = "crowdsecurity/community-blocklist" CAPIOrigin = "CAPI"
CommunityBlocklistPullSourceScope = "crowdsecurity/community-blocklist"
)
const DecisionTypeBan = "ban" const DecisionTypeBan = "ban"

View file

@ -46,7 +46,6 @@ func TestSetParsed(t *testing.T) {
assert.Equal(t, tt.value, tt.evt.Parsed[tt.key]) assert.Equal(t, tt.value, tt.evt.Parsed[tt.key])
}) })
} }
} }
func TestSetMeta(t *testing.T) { func TestSetMeta(t *testing.T) {
@ -86,7 +85,6 @@ func TestSetMeta(t *testing.T) {
assert.Equal(t, tt.value, tt.evt.GetMeta(tt.key)) assert.Equal(t, tt.value, tt.evt.GetMeta(tt.key))
}) })
} }
} }
func TestParseIPSources(t *testing.T) { func TestParseIPSources(t *testing.T) {

View file

@ -100,7 +100,6 @@ func GetFSType(path string) (string, error) {
var buf unix.Statfs_t var buf unix.Statfs_t
err := unix.Statfs(path, &buf) err := unix.Statfs(path, &buf)
if err != nil { if err != nil {
return "", err return "", err
} }

View file

@ -23,7 +23,8 @@ func LastAddress(n net.IPNet) net.IP {
ip[6] | ^n.Mask[6], ip[7] | ^n.Mask[7], ip[8] | ^n.Mask[8], ip[6] | ^n.Mask[6], ip[7] | ^n.Mask[7], ip[8] | ^n.Mask[8],
ip[9] | ^n.Mask[9], ip[10] | ^n.Mask[10], ip[11] | ^n.Mask[11], ip[9] | ^n.Mask[9], ip[10] | ^n.Mask[10], ip[11] | ^n.Mask[11],
ip[12] | ^n.Mask[12], ip[13] | ^n.Mask[13], ip[14] | ^n.Mask[14], ip[12] | ^n.Mask[12], ip[13] | ^n.Mask[13], ip[14] | ^n.Mask[14],
ip[15] | ^n.Mask[15]} ip[15] | ^n.Mask[15],
}
} }
return net.IPv4( return net.IPv4(

View file

@ -8,13 +8,13 @@ import (
) )
func TestIP2Int(t *testing.T) { func TestIP2Int(t *testing.T) {
tEmpty := net.IP{} tEmpty := net.IP{}
_, _, _, err := IP2Ints(tEmpty) _, _, _, err := IP2Ints(tEmpty)
if !strings.Contains(err.Error(), "unexpected len 0 for <nil>") { if !strings.Contains(err.Error(), "unexpected len 0 for <nil>") {
t.Fatalf("unexpected: %s", err) t.Fatalf("unexpected: %s", err)
} }
} }
func TestRange2Int(t *testing.T) { func TestRange2Int(t *testing.T) {
tEmpty := net.IPNet{} tEmpty := net.IPNet{}
// empty item // empty item
@ -22,7 +22,6 @@ func TestRange2Int(t *testing.T) {
if !strings.Contains(err.Error(), "converting first ip in range") { if !strings.Contains(err.Error(), "converting first ip in range") {
t.Fatalf("unexpected: %s", err) t.Fatalf("unexpected: %s", err)
} }
} }
func TestAdd2Int(t *testing.T) { func TestAdd2Int(t *testing.T) {

View file

@ -10,9 +10,11 @@ import (
"gopkg.in/natefinch/lumberjack.v2" "gopkg.in/natefinch/lumberjack.v2"
) )
var logFormatter log.Formatter var (
var LogOutput *lumberjack.Logger //io.Writer logFormatter log.Formatter
var logLevel log.Level LogOutput *lumberjack.Logger // io.Writer
logLevel log.Level
)
func SetDefaultLoggerConfig(cfgMode string, cfgFolder string, cfgLevel log.Level, maxSize int, maxFiles int, maxAge int, compress *bool, forceColors bool) error { func SetDefaultLoggerConfig(cfgMode string, cfgFolder string, cfgLevel log.Level, maxSize int, maxFiles int, maxAge int, compress *bool, forceColors bool) error {
/*Configure logs*/ /*Configure logs*/