chore: Enable `revive:enforce-slice-style` rule (#16173)

This commit is contained in:
Paweł Żak 2024-11-13 08:24:35 +01:00 committed by GitHub
parent 35fe105bb4
commit 0d30797c08
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
26 changed files with 41 additions and 52 deletions

View File

@ -265,6 +265,8 @@ linters-settings:
- name: enforce-map-style - name: enforce-map-style
arguments: ["make"] arguments: ["make"]
exclude: [ "TEST" ] exclude: [ "TEST" ]
- name: enforce-slice-style
arguments: ["make"]
- name: error-naming - name: error-naming
- name: error-return - name: error-return
- name: error-strings - name: error-strings

View File

@ -127,7 +127,7 @@ func TestSetPrecision(t *testing.T) {
func TestAddTrackingMetricGroupEmpty(t *testing.T) { func TestAddTrackingMetricGroupEmpty(t *testing.T) {
ch := make(chan telegraf.Metric, 10) ch := make(chan telegraf.Metric, 10)
metrics := []telegraf.Metric{} metrics := make([]telegraf.Metric, 0)
acc := NewAccumulator(&TestMetricMaker{}, ch).WithTracking(1) acc := NewAccumulator(&TestMetricMaker{}, ch).WithTracking(1)
id := acc.AddTrackingMetricGroup(metrics) id := acc.AddTrackingMetricGroup(metrics)

View File

@ -83,7 +83,6 @@ func TestAgent_LoadOutput(t *testing.T) {
require.Len(t, a.Config.Outputs, 1) require.Len(t, a.Config.Outputs, 1)
c = config.NewConfig() c = config.NewConfig()
c.OutputFilters = []string{}
err = c.LoadConfig("../config/testdata/telegraf-agent.toml") err = c.LoadConfig("../config/testdata/telegraf-agent.toml")
require.NoError(t, err) require.NoError(t, err)
a = NewAgent(c) a = NewAgent(c)

View File

@ -37,8 +37,7 @@ func TestAlignedTicker(t *testing.T) {
time.Unix(60, 0).UTC(), time.Unix(60, 0).UTC(),
} }
actual := []time.Time{} actual := make([]time.Time, 0)
clk.Add(10 * time.Second) clk.Add(10 * time.Second)
for !clk.Now().After(until) { for !clk.Now().After(until) {
tm := <-ticker.Elapsed() tm := <-ticker.Elapsed()
@ -109,8 +108,7 @@ func TestAlignedTickerOffset(t *testing.T) {
time.Unix(53, 0).UTC(), time.Unix(53, 0).UTC(),
} }
actual := []time.Time{} actual := make([]time.Time, 0)
clk.Add(10*time.Second + offset) clk.Add(10*time.Second + offset)
for !clk.Now().After(until) { for !clk.Now().After(until) {
tm := <-ticker.Elapsed() tm := <-ticker.Elapsed()
@ -174,7 +172,7 @@ func TestUnalignedTicker(t *testing.T) {
time.Unix(61, 0).UTC(), time.Unix(61, 0).UTC(),
} }
actual := []time.Time{} actual := make([]time.Time, 0)
for !clk.Now().After(until) { for !clk.Now().After(until) {
select { select {
case tm := <-ticker.Elapsed(): case tm := <-ticker.Elapsed():
@ -215,7 +213,7 @@ func TestRollingTicker(t *testing.T) {
time.Unix(61, 0).UTC(), time.Unix(61, 0).UTC(),
} }
actual := []time.Time{} actual := make([]time.Time, 0)
for !clk.Now().After(until) { for !clk.Now().After(until) {
select { select {
case tm := <-ticker.Elapsed(): case tm := <-ticker.Elapsed():

View File

@ -9,7 +9,7 @@ import (
) )
func cliFlags() []cli.Flag { func cliFlags() []cli.Flag {
return []cli.Flag{} return make([]cli.Flag, 0)
} }
func getServiceCommands(io.Writer) []*cli.Command { func getServiceCommands(io.Writer) []*cli.Command {

View File

@ -24,7 +24,7 @@ var (
inputDefaults = []string{"cpu", "mem", "swap", "system", "kernel", "processes", "disk", "diskio"} inputDefaults = []string{"cpu", "mem", "swap", "system", "kernel", "processes", "disk", "diskio"}
// Default output plugins // Default output plugins
outputDefaults = []string{} outputDefaults = make([]string, 0)
) )
var header = `# Telegraf Configuration var header = `# Telegraf Configuration
@ -126,7 +126,7 @@ func printSampleConfig(outputBuffer io.Writer, filters Filters) {
printFilteredSecretstores(secretstoreFilters, false, outputBuffer) printFilteredSecretstores(secretstoreFilters, false, outputBuffer)
} else { } else {
fmt.Print(secretstoreHeader) fmt.Print(secretstoreHeader)
snames := []string{} snames := make([]string, 0, len(secretstores.SecretStores))
for sname := range secretstores.SecretStores { for sname := range secretstores.SecretStores {
snames = append(snames, sname) snames = append(snames, sname)
} }
@ -165,7 +165,7 @@ func printSampleConfig(outputBuffer io.Writer, filters Filters) {
printFilteredProcessors(processorFilters, false, outputBuffer) printFilteredProcessors(processorFilters, false, outputBuffer)
} else { } else {
outputBuffer.Write([]byte(processorHeader)) outputBuffer.Write([]byte(processorHeader))
pnames := []string{} pnames := make([]string, 0, len(processors.Processors))
for pname := range processors.Processors { for pname := range processors.Processors {
pnames = append(pnames, pname) pnames = append(pnames, pname)
} }
@ -182,7 +182,7 @@ func printSampleConfig(outputBuffer io.Writer, filters Filters) {
printFilteredAggregators(aggregatorFilters, false, outputBuffer) printFilteredAggregators(aggregatorFilters, false, outputBuffer)
} else { } else {
outputBuffer.Write([]byte(aggregatorHeader)) outputBuffer.Write([]byte(aggregatorHeader))
pnames := []string{} pnames := make([]string, 0, len(aggregators.Aggregators))
for pname := range aggregators.Aggregators { for pname := range aggregators.Aggregators {
pnames = append(pnames, pname) pnames = append(pnames, pname)
} }
@ -261,7 +261,7 @@ func printFilteredInputs(inputFilters []string, commented bool, outputBuffer io.
// cache service inputs to print them at the end // cache service inputs to print them at the end
servInputs := make(map[string]telegraf.ServiceInput) servInputs := make(map[string]telegraf.ServiceInput)
// for alphabetical looping: // for alphabetical looping:
servInputNames := []string{} servInputNames := make([]string, 0, len(pnames))
// Print Inputs // Print Inputs
for _, pname := range pnames { for _, pname := range pnames {

View File

@ -434,7 +434,7 @@ func GetDefaultConfigPath() ([]string, error) {
// At this point we need to check if the files under /etc/telegraf are // At this point we need to check if the files under /etc/telegraf are
// populated and return them all. // populated and return them all.
confFiles := []string{} confFiles := make([]string, 0)
if _, err := os.Stat(etcfile); err == nil { if _, err := os.Stat(etcfile); err == nil {
confFiles = append(confFiles, etcfile) confFiles = append(confFiles, etcfile)
} }
@ -1805,7 +1805,7 @@ func (c *Config) getFieldTagFilter(tbl *ast.Table, fieldName string) []models.Ta
} }
func keys(m map[string]bool) []string { func keys(m map[string]bool) []string {
result := []string{} result := make([]string, 0, len(m))
for k := range m { for k := range m {
result = append(result, k) result = append(result, k)
} }

View File

@ -479,8 +479,6 @@ func TestConfig_InlineTables(t *testing.T) {
} }
func TestConfig_SliceComment(t *testing.T) { func TestConfig_SliceComment(t *testing.T) {
t.Skipf("Skipping until #3642 is resolved")
c := config.NewConfig() c := config.NewConfig()
require.NoError(t, c.LoadConfig("./testdata/slice_comment.toml")) require.NoError(t, c.LoadConfig("./testdata/slice_comment.toml"))
require.Len(t, c.Outputs, 1) require.Len(t, c.Outputs, 1)
@ -1575,7 +1573,6 @@ func (m *MockupStatePlugin) Init() error {
} }
m.state = MockupState{ m.state = MockupState{
Name: "mockup", Name: "mockup",
Bits: []int{},
Modified: t0, Modified: t0,
} }

View File

@ -67,7 +67,7 @@ func TestGettingMissingResolver(t *testing.T) {
mysecret := "a @{referenced:secret}" mysecret := "a @{referenced:secret}"
s := NewSecret([]byte(mysecret)) s := NewSecret([]byte(mysecret))
defer s.Destroy() defer s.Destroy()
s.unlinked = []string{} s.unlinked = make([]string, 0)
s.resolvers = map[string]telegraf.ResolveFunc{ s.resolvers = map[string]telegraf.ResolveFunc{
"@{a:dummy}": func() ([]byte, bool, error) { "@{a:dummy}": func() ([]byte, bool, error) {
return nil, false, nil return nil, false, nil
@ -82,7 +82,7 @@ func TestGettingResolverError(t *testing.T) {
mysecret := "a @{referenced:secret}" mysecret := "a @{referenced:secret}"
s := NewSecret([]byte(mysecret)) s := NewSecret([]byte(mysecret))
defer s.Destroy() defer s.Destroy()
s.unlinked = []string{} s.unlinked = make([]string, 0)
s.resolvers = map[string]telegraf.ResolveFunc{ s.resolvers = map[string]telegraf.ResolveFunc{
"@{referenced:secret}": func() ([]byte, bool, error) { "@{referenced:secret}": func() ([]byte, bool, error) {
return nil, false, errors.New("broken") return nil, false, errors.New("broken")
@ -111,7 +111,7 @@ func TestEnclaveOpenError(t *testing.T) {
err := s.Link(map[string]telegraf.ResolveFunc{}) err := s.Link(map[string]telegraf.ResolveFunc{})
require.ErrorContains(t, err, "opening enclave failed") require.ErrorContains(t, err, "opening enclave failed")
s.unlinked = []string{} s.unlinked = make([]string, 0)
_, err = s.Get() _, err = s.Get()
require.ErrorContains(t, err, "opening enclave failed") require.ErrorContains(t, err, "opening enclave failed")
} }

View File

@ -7,7 +7,7 @@ import (
) )
func TestCompile(t *testing.T) { func TestCompile(t *testing.T) {
f, err := Compile([]string{}) f, err := Compile(nil)
require.NoError(t, err) require.NoError(t, err)
require.Nil(t, f) require.Nil(t, f)
@ -50,10 +50,10 @@ func TestCompile(t *testing.T) {
} }
func TestIncludeExclude(t *testing.T) { func TestIncludeExclude(t *testing.T) {
tags := []string{}
labels := []string{"best", "com_influxdata", "timeseries", "com_influxdata_telegraf", "ever"} labels := []string{"best", "com_influxdata", "timeseries", "com_influxdata_telegraf", "ever"}
tags := make([]string, 0, len(labels))
filter, err := NewIncludeExcludeFilter([]string{}, []string{"com_influx*"}) filter, err := NewIncludeExcludeFilter(nil, []string{"com_influx*"})
if err != nil { if err != nil {
t.Fatalf("Failed to create include/exclude filter - %v", err) t.Fatalf("Failed to create include/exclude filter - %v", err)
} }

View File

@ -45,7 +45,7 @@ func New(command []string, envs []string) (*Process, error) {
p := &Process{ p := &Process{
RestartDelay: 5 * time.Second, RestartDelay: 5 * time.Second,
name: command[0], name: command[0],
args: []string{}, args: make([]string, 0),
envs: envs, envs: envs,
} }

View File

@ -181,7 +181,7 @@ func TestConvertHextoint(t *testing.T) {
{ {
name: "big endian invalid", name: "big endian invalid",
conversion: "hextoint:BigEndian:invalid", conversion: "hextoint:BigEndian:invalid",
ent: gosnmp.SnmpPDU{Type: gosnmp.OctetString, Value: []uint8{}}, ent: gosnmp.SnmpPDU{Type: gosnmp.OctetString, Value: make([]uint8, 0)},
errmsg: "invalid bit value", errmsg: "invalid bit value",
}, },
{ {
@ -223,13 +223,13 @@ func TestConvertHextoint(t *testing.T) {
{ {
name: "little endian invalid", name: "little endian invalid",
conversion: "hextoint:LittleEndian:invalid", conversion: "hextoint:LittleEndian:invalid",
ent: gosnmp.SnmpPDU{Type: gosnmp.OctetString, Value: []byte{}}, ent: gosnmp.SnmpPDU{Type: gosnmp.OctetString, Value: make([]byte, 0)},
errmsg: "invalid bit value", errmsg: "invalid bit value",
}, },
{ {
name: "invalid", name: "invalid",
conversion: "hextoint:invalid:uint64", conversion: "hextoint:invalid:uint64",
ent: gosnmp.SnmpPDU{Type: gosnmp.OctetString, Value: []byte{}}, ent: gosnmp.SnmpPDU{Type: gosnmp.OctetString, Value: make([]byte, 0)},
errmsg: "invalid Endian value", errmsg: "invalid Endian value",
}, },
} }

View File

@ -92,8 +92,8 @@ func LoadMibsFromPath(paths []string, log telegraf.Logger, loader MibLoader) err
// should walk the paths given and find all folders // should walk the paths given and find all folders
func walkPaths(paths []string, log telegraf.Logger) ([]string, error) { func walkPaths(paths []string, log telegraf.Logger) ([]string, error) {
once.Do(gosmi.Init) once.Do(gosmi.Init)
folders := []string{}
folders := make([]string, 0)
for _, mibPath := range paths { for _, mibPath := range paths {
// Check if we loaded that path already and skip it if so // Check if we loaded that path already and skip it if so
m.Lock() m.Lock()

View File

@ -28,7 +28,7 @@ func TestMockExecCommand(_ *testing.T) {
var cmd []string //nolint:prealloc // Pre-allocated this slice would break the algorithm var cmd []string //nolint:prealloc // Pre-allocated this slice would break the algorithm
for _, arg := range os.Args { for _, arg := range os.Args {
if arg == "--" { if arg == "--" {
cmd = []string{} cmd = make([]string, 0)
continue continue
} }
if cmd == nil { if cmd == nil {

View File

@ -23,7 +23,7 @@ import (
func NewSeriesGrouper() *SeriesGrouper { func NewSeriesGrouper() *SeriesGrouper {
return &SeriesGrouper{ return &SeriesGrouper{
metrics: make(map[uint64]telegraf.Metric), metrics: make(map[uint64]telegraf.Metric),
ordered: []telegraf.Metric{}, ordered: make([]telegraf.Metric, 0),
hashSeed: maphash.MakeSeed(), hashSeed: maphash.MakeSeed(),
} }
} }

View File

@ -127,7 +127,7 @@ func (b *DiskBuffer) Batch(batchSize int) []telegraf.Metric {
if b.length() == 0 { if b.length() == 0 {
// no metrics in the wal file, so return an empty array // no metrics in the wal file, so return an empty array
return []telegraf.Metric{} return make([]telegraf.Metric, 0)
} }
b.batchFirst = b.readIndex() b.batchFirst = b.readIndex()
var metrics []telegraf.Metric var metrics []telegraf.Metric

View File

@ -206,7 +206,7 @@ func (f *Filter) shouldTagsPass(tags []*telegraf.Tag) bool {
// filterFields removes fields according to fieldinclude/fieldexclude. // filterFields removes fields according to fieldinclude/fieldexclude.
func (f *Filter) filterFields(metric telegraf.Metric) { func (f *Filter) filterFields(metric telegraf.Metric) {
filterKeys := []string{} filterKeys := make([]string, 0, len(metric.FieldList()))
for _, field := range metric.FieldList() { for _, field := range metric.FieldList() {
if !ShouldPassFilters(f.fieldIncludeFilter, f.fieldExcludeFilter, field.Key) { if !ShouldPassFilters(f.fieldIncludeFilter, f.fieldExcludeFilter, field.Key) {
filterKeys = append(filterKeys, field.Key) filterKeys = append(filterKeys, field.Key)
@ -220,7 +220,7 @@ func (f *Filter) filterFields(metric telegraf.Metric) {
// filterTags removes tags according to taginclude/tagexclude. // filterTags removes tags according to taginclude/tagexclude.
func (f *Filter) filterTags(metric telegraf.Metric) { func (f *Filter) filterTags(metric telegraf.Metric) {
filterKeys := []string{} filterKeys := make([]string, 0, len(metric.TagList()))
for _, tag := range metric.TagList() { for _, tag := range metric.TagList() {
if !ShouldPassFilters(f.tagIncludeFilter, f.tagExcludeFilter, tag.Key) { if !ShouldPassFilters(f.tagIncludeFilter, f.tagExcludeFilter, tag.Key) {
filterKeys = append(filterKeys, tag.Key) filterKeys = append(filterKeys, tag.Key)

View File

@ -777,10 +777,6 @@ func (m *mockOutput) Write(metrics []telegraf.Metric) error {
return errors.New("failed write") return errors.New("failed write")
} }
if m.metrics == nil {
m.metrics = []telegraf.Metric{}
}
m.metrics = append(m.metrics, metrics...) m.metrics = append(m.metrics, metrics...)
return nil return nil
} }

View File

@ -718,7 +718,7 @@ func TestBasicStatsWithAllStats(t *testing.T) {
// Test that if an empty array is passed, no points are pushed // Test that if an empty array is passed, no points are pushed
func TestBasicStatsWithNoStats(t *testing.T) { func TestBasicStatsWithNoStats(t *testing.T) {
aggregator := NewBasicStats() aggregator := NewBasicStats()
aggregator.Stats = []string{} aggregator.Stats = make([]string, 0)
aggregator.Log = testutil.Logger{} aggregator.Log = testutil.Logger{}
aggregator.initConfiguredStats() aggregator.initConfiguredStats()

View File

@ -139,9 +139,8 @@ func (h *HistogramAggregator) Add(in telegraf.Metric) {
// Push returns histogram values for metrics // Push returns histogram values for metrics
func (h *HistogramAggregator) Push(acc telegraf.Accumulator) { func (h *HistogramAggregator) Push(acc telegraf.Accumulator) {
metricsWithGroupedFields := []groupedByCountFields{}
now := timeNow() now := timeNow()
metricsWithGroupedFields := make([]groupedByCountFields, 0)
for id, aggregate := range h.cache { for id, aggregate := range h.cache {
if h.ExpirationInterval != 0 && now.After(aggregate.expireTime) { if h.ExpirationInterval != 0 && now.After(aggregate.expireTime) {
delete(h.cache, id) delete(h.cache, id)

View File

@ -11,5 +11,5 @@ func (*DMCache) Gather(_ telegraf.Accumulator) error {
} }
func dmSetupStatus() ([]string, error) { func dmSetupStatus() ([]string, error) {
return []string{}, nil return make([]string, 0), nil
} }

View File

@ -99,7 +99,7 @@ func (m *FakePerformanceQuery) ExpandWildCardPath(counterPath string) ([]string,
if e, ok := m.expandPaths[counterPath]; ok { if e, ok := m.expandPaths[counterPath]; ok {
return e, nil return e, nil
} }
return []string{}, fmt.Errorf("in ExpandWildCardPath: invalid counter path: %q", counterPath) return nil, fmt.Errorf("in ExpandWildCardPath: invalid counter path: %q", counterPath)
} }
func (m *FakePerformanceQuery) GetFormattedCounterValueDouble(counterHandle pdhCounterHandle) (float64, error) { func (m *FakePerformanceQuery) GetFormattedCounterValueDouble(counterHandle pdhCounterHandle) (float64, error) {

View File

@ -50,7 +50,7 @@ func TestEmptyListIntegration(t *testing.T) {
}() }()
winServices := &WinServices{ winServices := &WinServices{
ServiceNames: []string{}, ServiceNames: make([]string, 0),
} }
require.NoError(t, winServices.Init()) require.NoError(t, winServices.Init())

View File

@ -78,9 +78,7 @@ func (c *Container) Start() error {
} }
c.container = cntnr c.container = cntnr
c.Logs = TestLogConsumer{ c.Logs = TestLogConsumer{}
Msgs: []string{},
}
c.container.FollowOutput(&c.Logs) c.container.FollowOutput(&c.Logs)
err = c.container.StartLogProducer(c.ctx) err = c.container.StartLogProducer(c.ctx)
if err != nil { if err != nil {

View File

@ -97,7 +97,7 @@ func checkFile(filename string, pluginType plugin, sourceFlag bool) (bool, error
scanner := bufio.NewScanner(bytes.NewReader(md)) scanner := bufio.NewScanner(bytes.NewReader(md))
scanner.Split(bufio.ScanRunes) scanner.Split(bufio.ScanRunes)
offset := 0 offset := 0
newlineOffsets := []int{} newlineOffsets := make([]int, 0)
for scanner.Scan() { for scanner.Scan() {
if scanner.Text() == "\n" { if scanner.Text() == "\n" {
newlineOffsets = append(newlineOffsets, offset) newlineOffsets = append(newlineOffsets, offset)

View File

@ -92,7 +92,7 @@ func noLongLinesInParagraphs(threshold int) func(*T, ast.Node) error {
return func(t *T, root ast.Node) error { return func(t *T, root ast.Node) error {
// We're looking for long lines in paragraphs. Find paragraphs // We're looking for long lines in paragraphs. Find paragraphs
// first, then which lines are in paragraphs // first, then which lines are in paragraphs
paraLines := []int{} paraLines := make([]int, 0)
for n := root.FirstChild(); n != nil; n = n.NextSibling() { for n := root.FirstChild(); n != nil; n = n.NextSibling() {
var p *ast.Paragraph var p *ast.Paragraph
var ok bool var ok bool
@ -108,7 +108,7 @@ func noLongLinesInParagraphs(threshold int) func(*T, ast.Node) error {
} }
// Find long lines in the whole file // Find long lines in the whole file
longLines := []int{} longLines := make([]int, 0, len(t.newlineOffsets))
last := 0 last := 0
for i, cur := range t.newlineOffsets { for i, cur := range t.newlineOffsets {
length := cur - last - 1 // -1 to exclude the newline length := cur - last - 1 // -1 to exclude the newline
@ -121,7 +121,7 @@ func noLongLinesInParagraphs(threshold int) func(*T, ast.Node) error {
// Merge both lists // Merge both lists
p := 0 p := 0
l := 0 l := 0
bads := []int{} bads := make([]int, 0, max(len(paraLines), len(longLines)))
for p < len(paraLines) && l < len(longLines) { for p < len(paraLines) && l < len(longLines) {
long := longLines[l] long := longLines[l]
para := paraLines[p] para := paraLines[p]