lint: enable more gocritic checks (#3363)

* lint: gocritic - avoid unnecessary byte->string conversions
* lint: gocritic - use %q instead of "%s"
* lint: return struct from DataSource constructor, not pointer (ptrToRefParam)
* lint: gocritic - check use of append()
This commit is contained in:
mmetc 2024-12-13 15:42:55 +01:00 committed by GitHub
parent 08296d9cfa
commit 118275fd03
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
8 changed files with 47 additions and 22 deletions

View file

@ -192,13 +192,9 @@ linters-settings:
- typeUnparen
- commentFormatting
- deferInLoop #
- sprintfQuotedString #
- whyNoLint
- equalFold #
- unnecessaryBlock #
- ptrToRefParam #
- stringXbytes #
- appendAssign #
- tooManyResultsChecker
- unnecessaryDefer
- docStub

View file

@ -116,7 +116,7 @@ func setupLogger(source, name string, level *log.Level) (*log.Entry, error) {
// if the configuration is not valid it returns an error.
// If the datasource can't be run (eg. journalctl not available), it still returns an error which
// can be checked for the appropriate action.
func DataSourceConfigure(commonConfig configuration.DataSourceCommonCfg, metricsLevel int) (*DataSource, error) {
func DataSourceConfigure(commonConfig configuration.DataSourceCommonCfg, metricsLevel int) (DataSource, error) {
// we dump it back to []byte, because we want to decode the yaml blob twice:
// once to DataSourceCommonCfg, and then later to the dedicated type of the datasource
yamlConfig, err := yaml.Marshal(commonConfig)
@ -143,7 +143,7 @@ func DataSourceConfigure(commonConfig configuration.DataSourceCommonCfg, metrics
return nil, err
}
return &dataSrc, nil
return dataSrc, nil
}
// detectBackwardCompatAcquis: try to magically detect the type for backward compat (type was not mandatory then)
@ -309,7 +309,7 @@ func LoadAcquisitionFromFile(config *csconfig.CrowdsecServiceCfg, prom *csconfig
transformRuntimes[uniqueId] = vm
}
sources = append(sources, *src)
sources = append(sources, src)
}
}

View file

@ -193,19 +193,19 @@ wowo: ajsajasjas
switch tc.TestName {
case "basic_valid_config":
mock := (*ds).Dump().(*MockSource)
mock := ds.Dump().(*MockSource)
assert.Equal(t, "test_value1", mock.Toto)
assert.Equal(t, "cat", mock.Mode)
assert.Equal(t, log.InfoLevel, mock.logger.Logger.Level)
assert.Equal(t, map[string]string{"test": "foobar"}, mock.Labels)
case "basic_debug_config":
mock := (*ds).Dump().(*MockSource)
mock := ds.Dump().(*MockSource)
assert.Equal(t, "test_value1", mock.Toto)
assert.Equal(t, "cat", mock.Mode)
assert.Equal(t, log.DebugLevel, mock.logger.Logger.Level)
assert.Equal(t, map[string]string{"test": "foobar"}, mock.Labels)
case "basic_tailmode_config":
mock := (*ds).Dump().(*MockSource)
mock := ds.Dump().(*MockSource)
assert.Equal(t, "test_value1", mock.Toto)
assert.Equal(t, "tail", mock.Mode)
assert.Equal(t, log.DebugLevel, mock.logger.Logger.Level)

View file

@ -53,15 +53,18 @@ func readLine(scanner *bufio.Scanner, out chan string, errChan chan error) error
txt := scanner.Text()
out <- txt
}
if errChan != nil && scanner.Err() != nil {
errChan <- scanner.Err()
close(errChan)
// the error is already consumed by runJournalCtl
return nil //nolint:nilerr
}
if errChan != nil {
close(errChan)
}
return nil
}
@ -69,15 +72,17 @@ func (j *JournalCtlSource) runJournalCtl(ctx context.Context, out chan types.Eve
ctx, cancel := context.WithCancel(ctx)
cmd := exec.CommandContext(ctx, journalctlCmd, j.args...)
stdout, err := cmd.StdoutPipe()
if err != nil {
cancel()
return fmt.Errorf("could not get journalctl stdout: %s", err)
return fmt.Errorf("could not get journalctl stdout: %w", err)
}
stderr, err := cmd.StderrPipe()
if err != nil {
cancel()
return fmt.Errorf("could not get journalctl stderr: %s", err)
return fmt.Errorf("could not get journalctl stderr: %w", err)
}
stderrChan := make(chan string)
@ -87,6 +92,7 @@ func (j *JournalCtlSource) runJournalCtl(ctx context.Context, out chan types.Eve
logger := j.logger.WithField("src", j.src)
logger.Infof("Running journalctl command: %s %s", cmd.Path, cmd.Args)
err = cmd.Start()
if err != nil {
cancel()
@ -109,9 +115,11 @@ func (j *JournalCtlSource) runJournalCtl(ctx context.Context, out chan types.Eve
cmd.Wait()
return errors.New("failed to create stderr scanner")
}
t.Go(func() error {
return readLine(stdoutscanner, stdoutChan, errChan)
})
t.Go(func() error {
// looks like journalctl closes stderr quite early, so ignore its status (but not its output)
return readLine(stderrScanner, stderrChan, nil)
@ -123,6 +131,7 @@ func (j *JournalCtlSource) runJournalCtl(ctx context.Context, out chan types.Eve
logger.Infof("journalctl datasource %s stopping", j.src)
cancel()
cmd.Wait() // avoid zombie process
return nil
case stdoutLine := <-stdoutChan:
l := types.Line{}
@ -133,6 +142,7 @@ func (j *JournalCtlSource) runJournalCtl(ctx context.Context, out chan types.Eve
l.Src = j.src
l.Process = true
l.Module = j.GetName()
if j.metricsLevel != configuration.METRICS_NONE {
linesRead.With(prometheus.Labels{"source": j.src}).Inc()
}
@ -149,6 +159,7 @@ func (j *JournalCtlSource) runJournalCtl(ctx context.Context, out chan types.Eve
logger.Debugf("errChan is closed, quitting")
t.Kill(nil)
}
if errScanner != nil {
t.Kill(errScanner)
}
@ -170,6 +181,7 @@ func (j *JournalCtlSource) GetAggregMetrics() []prometheus.Collector {
func (j *JournalCtlSource) UnmarshalConfig(yamlConfig []byte) error {
j.config = JournalCtlConfiguration{}
err := yaml.UnmarshalStrict(yamlConfig, &j.config)
if err != nil {
return fmt.Errorf("cannot parse JournalCtlSource configuration: %w", err)
@ -189,8 +201,11 @@ func (j *JournalCtlSource) UnmarshalConfig(yamlConfig []byte) error {
if len(j.config.Filters) == 0 {
return errors.New("journalctl_filter is required")
}
j.args = append(args, j.config.Filters...)
j.src = fmt.Sprintf("journalctl-%s", strings.Join(j.config.Filters, "."))
args = append(args, j.config.Filters...)
j.args = args
j.src = "journalctl-%s" + strings.Join(j.config.Filters, ".")
return nil
}
@ -226,8 +241,9 @@ func (j *JournalCtlSource) ConfigureByDSN(dsn string, labels map[string]string,
params, err := url.ParseQuery(qs)
if err != nil {
return fmt.Errorf("could not parse journalctl DSN : %s", err)
return fmt.Errorf("could not parse journalctl DSN: %w", err)
}
for key, value := range params {
switch key {
case "filters":
@ -236,10 +252,12 @@ func (j *JournalCtlSource) ConfigureByDSN(dsn string, labels map[string]string,
if len(value) != 1 {
return errors.New("expected zero or one value for 'log_level'")
}
lvl, err := log.ParseLevel(value[0])
if err != nil {
return fmt.Errorf("unknown level %s: %w", value[0], err)
}
j.logger.Logger.SetLevel(lvl)
case "since":
j.args = append(j.args, "--since", value[0])
@ -247,7 +265,9 @@ func (j *JournalCtlSource) ConfigureByDSN(dsn string, labels map[string]string,
return fmt.Errorf("unsupported key %s in journalctl DSN", key)
}
}
j.args = append(j.args, j.config.Filters...)
return nil
}
@ -261,8 +281,10 @@ func (j *JournalCtlSource) GetName() string {
func (j *JournalCtlSource) OneShotAcquisition(ctx context.Context, out chan types.Event, t *tomb.Tomb) error {
defer trace.CatchPanic("crowdsec/acquis/journalctl/oneshot")
err := j.runJournalCtl(ctx, out, t)
j.logger.Debug("Oneshot journalctl acquisition is done")
return err
}
@ -271,6 +293,7 @@ func (j *JournalCtlSource) StreamingAcquisition(ctx context.Context, out chan ty
defer trace.CatchPanic("crowdsec/acquis/journalctl/streaming")
return j.runJournalCtl(ctx, out, t)
})
return nil
}

View file

@ -81,7 +81,7 @@ func TestConfigureDSN(t *testing.T) {
},
{
dsn: "journalctl://filters=%ZZ",
expectedErr: "could not parse journalctl DSN : invalid URL escape \"%ZZ\"",
expectedErr: "could not parse journalctl DSN: invalid URL escape \"%ZZ\"",
},
{
dsn: "journalctl://filters=_UID=42?log_level=warn",
@ -191,6 +191,7 @@ journalctl_filter:
func TestStreaming(t *testing.T) {
ctx := context.Background()
if runtime.GOOS == "windows" {
t.Skip("Skipping test on windows")
}
@ -270,7 +271,7 @@ journalctl_filter:
tomb.Wait()
output, _ := exec.Command("pgrep", "-x", "journalctl").CombinedOutput()
if string(output) != "" {
if len(output) != 0 {
t.Fatalf("Found a journalctl process after killing the tomb !")
}

View file

@ -209,7 +209,7 @@ func NewServer(ctx context.Context, config *csconfig.LocalApiServerCfg) (*APISer
gin.DefaultWriter = clog.Writer()
router.Use(gin.LoggerWithFormatter(func(param gin.LogFormatterParams) string {
return fmt.Sprintf("%s - [%s] \"%s %s %s %d %s \"%s\" %s\"\n",
return fmt.Sprintf("%s - [%s] \"%s %s %s %d %s %q %s\"\n",
param.ClientIP,
param.TimeStamp.Format(time.RFC1123),
param.Method,

View file

@ -132,7 +132,7 @@ func TestEvaluateProfile(t *testing.T) {
name: "simple pass single expr",
args: args{
profileCfg: &csconfig.ProfileCfg{
Filters: []string{fmt.Sprintf("Alert.GetScenario() == \"%s\"", scenario)},
Filters: []string{fmt.Sprintf("Alert.GetScenario() == %q", scenario)},
Debug: &boolFalse,
},
Alert: &models.Alert{Remediation: true, Scenario: &scenario},
@ -199,17 +199,22 @@ func TestEvaluateProfile(t *testing.T) {
profilesCfg := []*csconfig.ProfileCfg{
tt.args.profileCfg,
}
profile, err := NewProfile(profilesCfg)
if err != nil {
t.Errorf("failed to get newProfile : %+v", err)
}
got, got1, _ := profile[0].EvaluateProfile(tt.args.Alert)
if !reflect.DeepEqual(len(got), tt.expectedDecisionCount) {
t.Errorf("EvaluateProfile() got = %+v, want %+v", got, tt.expectedDecisionCount)
}
if got1 != tt.expectedMatchStatus {
t.Errorf("EvaluateProfile() got1 = %v, want %v", got1, tt.expectedMatchStatus)
}
if tt.expectedDuration != "" {
require.Equal(t, tt.expectedDuration, *got[0].Duration, "The two durations should be the same")
}

View file

@ -270,7 +270,7 @@ func (p *ParserAssert) AutoGenParserAssert() string {
continue
}
base := fmt.Sprintf(`results["%s"]["%s"][%d].Evt.Unmarshaled["%s"]`, stage, parser, pidx, ukey)
base := fmt.Sprintf("results[%q][%q][%d].Evt.Unmarshaled[%q]", stage, parser, pidx, ukey)
for _, line := range p.buildUnmarshaledAssert(base, uval) {
ret += line
@ -295,11 +295,11 @@ func (p *ParserAssert) buildUnmarshaledAssert(ekey string, eval interface{}) []s
switch val := eval.(type) {
case map[string]interface{}:
for k, v := range val {
ret = append(ret, p.buildUnmarshaledAssert(fmt.Sprintf(`%s["%s"]`, ekey, k), v)...)
ret = append(ret, p.buildUnmarshaledAssert(fmt.Sprintf("%s[%q]", ekey, k), v)...)
}
case map[interface{}]interface{}:
for k, v := range val {
ret = append(ret, p.buildUnmarshaledAssert(fmt.Sprintf(`%s["%s"]`, ekey, k), v)...)
ret = append(ret, p.buildUnmarshaledAssert(fmt.Sprintf("%s[%q]", ekey, k), v)...)
}
case []interface{}:
case string: