chore: Fix linter findings for `revive:enforce-slice-style` in `plugins/inputs/[p-z]*` (#16043)

This commit is contained in:
Paweł Żak 2024-10-24 11:03:31 +02:00 committed by GitHub
parent 6c45aefe6e
commit d9254c210f
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
30 changed files with 66 additions and 82 deletions

View File

@ -443,7 +443,7 @@ func TestSingleEntitiesMultipleCounterRead(t *testing.T) {
func TestNoCountersAvailable(t *testing.T) {
forwardingPipelineConfig := &p4.ForwardingPipelineConfig{
P4Info: &p4_config.P4Info{Counters: []*p4_config.Counter{}},
P4Info: &p4_config.P4Info{Counters: make([]*p4_config.Counter, 0)},
}
p4RtClient := &fakeP4RuntimeClient{

View File

@ -336,8 +336,8 @@ func init() {
Deadline: 10,
Method: "exec",
Binary: "ping",
Arguments: []string{},
Percentiles: []int{},
Arguments: make([]string, 0),
Percentiles: make([]int, 0),
}
p.nativePingFunc = p.nativePing
return p

View File

@ -89,7 +89,7 @@ func TestPostgresqlGeneratesMetricsIntegration(t *testing.T) {
"sessions_abandoned",
}
int32Metrics := []string{}
var int32Metrics []string
floatMetrics := []string{
"blk_read_time",

View File

@ -88,7 +88,7 @@ func TestPostgresqlGeneratesMetricsIntegration(t *testing.T) {
"datid",
}
int32Metrics := []string{}
var int32Metrics []string
floatMetrics := []string{
"blk_read_time",
@ -205,7 +205,7 @@ func TestPostgresqlFieldOutputIntegration(t *testing.T) {
"datid",
}
int32Metrics := []string{}
var int32Metrics []string
floatMetrics := []string{
"blk_read_time",

View File

@ -72,8 +72,8 @@ func (pg *Pgrep) find(args []string) ([]PID, error) {
out := string(buf)
// Parse the command output to extract the PIDs
pids := []PID{}
fields := strings.Fields(out)
pids := make([]PID, 0, len(fields))
for _, field := range fields {
pid, err := strconv.ParseInt(field, 10, 32)
if err != nil {

View File

@ -33,7 +33,7 @@ func TestMockExecCommand(_ *testing.T) {
var cmd []string //nolint:prealloc // Pre-allocated this slice would break the algorithm
for _, arg := range os.Args {
if arg == "--" {
cmd = []string{}
cmd = make([]string, 0)
continue
}
if cmd == nil {
@ -139,7 +139,8 @@ func (p *testProc) SetTag(k, v string) {
}
func (p *testProc) MemoryMaps(bool) (*[]process.MemoryMapsStat, error) {
return &[]process.MemoryMapsStat{}, nil
stats := make([]process.MemoryMapsStat, 0)
return &stats, nil
}
func (p *testProc) Metrics(prefix string, cfg *collectionConfig, t time.Time) ([]telegraf.Metric, error) {

View File

@ -64,7 +64,7 @@ func TestIPv4SW(t *testing.T) {
packet, err := hex.DecodeString(str)
require.NoError(t, err)
actual := []telegraf.Metric{}
actual := make([]telegraf.Metric, 0)
dc := newDecoder()
dc.OnPacket(func(p *v5Format) {
metrics := makeMetrics(p)
@ -835,6 +835,6 @@ func TestFlowExpandCounter(t *testing.T) {
actual := makeMetrics(p)
// we don't do anything with samples yet
expected := []telegraf.Metric{}
expected := make([]telegraf.Metric, 0)
testutil.RequireMetricsEqual(t, expected, actual, testutil.IgnoreTime())
}

View File

@ -10,7 +10,7 @@ import (
func makeMetrics(p *v5Format) []telegraf.Metric {
now := time.Now()
metrics := []telegraf.Metric{}
metrics := make([]telegraf.Metric, 0)
tags := map[string]string{
"agent_address": p.AgentAddress.String(),
}

View File

@ -94,13 +94,13 @@ func (d *packetDecoder) DecodeOnePacket(r io.Reader) (*v5Format, error) {
}
func (d *packetDecoder) decodeSamples(r io.Reader) ([]sample, error) {
result := []sample{}
// # of samples
var numOfSamples uint32
if err := read(r, &numOfSamples, "sample count"); err != nil {
return nil, err
}
result := make([]sample, 0, numOfSamples)
for i := 0; i < int(numOfSamples); i++ {
sam, err := d.decodeSample(r)
if err != nil {

View File

@ -511,7 +511,7 @@ func distinguishNVMeDevices(userDevices, availableNVMeDevices []string) []string
func (m *Smart) scanDevices(ignoreExcludes bool, scanArgs ...string) ([]string, error) {
out, err := runCmd(m.Timeout, m.UseSudo, m.PathSmartctl, scanArgs...)
if err != nil {
return []string{}, fmt.Errorf("failed to run command '%s %s': %w - %s", m.PathSmartctl, scanArgs, err, string(out))
return nil, fmt.Errorf("failed to run command '%s %s': %w - %s", m.PathSmartctl, scanArgs, err, string(out))
}
var devices []string
for _, line := range strings.Split(string(out), "\n") {

View File

@ -140,7 +140,7 @@ func TestGatherInParallelMode(t *testing.T) {
require.NoError(t, err)
result := acc.GetTelegrafMetrics()
testutil.RequireMetricsEqual(t, []telegraf.Metric{}, result)
require.Empty(t, result)
})
}
@ -186,7 +186,7 @@ func TestGatherNoAttributes(t *testing.T) {
func TestExcludedDev(t *testing.T) {
require.True(t, excludedDev([]string{"/dev/pass6"}, "/dev/pass6 -d atacam"), "Should be excluded.")
require.False(t, excludedDev([]string{}, "/dev/pass6 -d atacam"), "Shouldn't be excluded.")
require.False(t, excludedDev(make([]string, 0), "/dev/pass6 -d atacam"), "Shouldn't be excluded.")
require.False(t, excludedDev([]string{"/dev/pass6"}, "/dev/pass1 -d atacam"), "Shouldn't be excluded.")
}

View File

@ -676,7 +676,6 @@ func TestSnmpInit_noTranslateGosmi(t *testing.T) {
}},
},
ClientConfig: snmp.ClientConfig{
Path: []string{},
Translator: "gosmi",
},
}

View File

@ -251,8 +251,6 @@ func TestReceiveTrap(t *testing.T) {
},
},
},
entries: []entry{}, // nothing in cache
metrics: []telegraf.Metric{},
},
// v1 enterprise specific trap
{

View File

@ -16,7 +16,7 @@ import (
func TestSqlServer_QueriesInclusionExclusion(t *testing.T) {
cases := []map[string]interface{}{
{
"IncludeQuery": []string{},
"IncludeQuery": make([]string, 0),
"ExcludeQuery": []string{"WaitStatsCategorized", "DatabaseIO", "ServerProperties", "MemoryClerk", "Schedulers", "VolumeSpace", "Cpu"},
"queries": []string{"PerformanceCounters", "SqlRequests"},
"queriesTotal": 2,

View File

@ -490,7 +490,7 @@ func (s *stackdriver) generatetimeSeriesConfs(
return s.timeSeriesConfCache.TimeSeriesConfs, nil
}
ret := []*timeSeriesConf{}
ret := make([]*timeSeriesConf, 0)
req := &monitoringpb.ListMetricDescriptorsRequest{
Name: "projects/" + s.Project,
}
@ -714,11 +714,10 @@ func (s *stackdriver) addDistribution(dist *distributionpb.Distribution, tags ma
func init() {
inputs.Add("stackdriver", func() telegraf.Input {
return &stackdriver{
CacheTTL: defaultCacheTTL,
RateLimit: defaultRateLimit,
Delay: defaultDelay,
GatherRawDistributionBuckets: true,
DistributionAggregationAligners: []string{},
CacheTTL: defaultCacheTTL,
RateLimit: defaultRateLimit,
Delay: defaultDelay,
GatherRawDistributionBuckets: true,
}
})
}

View File

@ -55,7 +55,7 @@ func (m *MockStackdriverClient) ListTimeSeries(
}
func (m *MockStackdriverClient) Close() error {
call := &Call{name: "Close", args: []interface{}{}}
call := &Call{name: "Close", args: make([]interface{}, 0)}
m.Lock()
m.calls = append(m.calls, call)
m.Unlock()
@ -64,11 +64,10 @@ func (m *MockStackdriverClient) Close() error {
func TestInitAndRegister(t *testing.T) {
expected := &stackdriver{
CacheTTL: defaultCacheTTL,
RateLimit: defaultRateLimit,
Delay: defaultDelay,
GatherRawDistributionBuckets: true,
DistributionAggregationAligners: []string{},
CacheTTL: defaultCacheTTL,
RateLimit: defaultRateLimit,
Delay: defaultDelay,
GatherRawDistributionBuckets: true,
}
require.Equal(t, expected, inputs.Inputs["stackdriver"]())
}
@ -751,7 +750,7 @@ func TestGather(t *testing.T) {
require.Equalf(t, tt.wantAccErr, len(acc.Errors) > 0,
"Accumulator errors. got=%v, want=%t", acc.Errors, tt.wantAccErr)
actual := []telegraf.Metric{}
actual := make([]telegraf.Metric, 0, len(acc.Metrics))
for _, m := range acc.Metrics {
actual = append(actual, testutil.FromTestMetric(m))
}
@ -874,7 +873,7 @@ func TestGatherAlign(t *testing.T) {
err := s.Gather(&acc)
require.NoError(t, err)
actual := []telegraf.Metric{}
actual := make([]telegraf.Metric, 0, len(acc.Metrics))
for _, m := range acc.Metrics {
actual = append(actual, testutil.FromTestMetric(m))
}

View File

@ -1717,7 +1717,7 @@ func TestParse_TimingsMultipleFieldsWithTemplate(t *testing.T) {
// In this case the behaviour should be the same as normal behaviour
func TestParse_TimingsMultipleFieldsWithoutTemplate(t *testing.T) {
s := NewTestStatsd()
s.Templates = []string{}
s.Templates = make([]string, 0)
s.Percentiles = []Number{90.0}
acc := &testutil.Accumulator{}

View File

@ -103,9 +103,7 @@ func TestShort_SampleData(t *testing.T) {
for _, tC := range testCases {
t.Run(tC.desc, func(t *testing.T) {
s := &Supervisor{
Server: "http://example.org:9001/RPC2",
MetricsInc: []string{},
MetricsExc: []string{},
Server: "http://example.org:9001/RPC2",
}
status := supervisorInfo{
StateCode: tC.supervisorData.StateCode,
@ -152,9 +150,7 @@ func TestIntegration_BasicGathering(t *testing.T) {
require.NoError(t, err, "failed to start container")
defer ctr.Terminate()
s := &Supervisor{
Server: "http://login:pass@" + testutil.GetLocalHost() + ":" + ctr.Ports[supervisorPort] + "/RPC2",
MetricsInc: []string{},
MetricsExc: []string{},
Server: "http://login:pass@" + testutil.GetLocalHost() + ":" + ctr.Ports[supervisorPort] + "/RPC2",
}
err = s.Init()
require.NoError(t, err, "failed to run Init function")

View File

@ -146,7 +146,7 @@ func (s *Sysstat) Gather(acc telegraf.Accumulator) error {
// The above command collects system metrics during <collectInterval> and
// saves it in binary form to tmpFile.
func (s *Sysstat) collect(tempfile string) error {
options := []string{}
options := make([]string, 0, len(s.Activities))
for _, act := range s.Activities {
options = append(options, "-S", act)
}

View File

@ -23,7 +23,6 @@ func TestUniqueUsers(t *testing.T) {
{
name: "empty entry",
expected: 0,
data: []host.UserStat{},
},
{
name: "all duplicates",

View File

@ -408,7 +408,6 @@ func init() {
UseSudo: false,
InstanceName: "",
Timeout: defaultTimeout,
Regexps: []string{},
}
})
}

View File

@ -208,7 +208,7 @@ func NewClient(ctx context.Context, vSphereURL *url.URL, vs *VSphere) (*Client,
c.Timeout = time.Duration(vs.Timeout)
m := view.NewManager(c.Client)
v, err := m.CreateContainerView(ctx, c.ServiceContent.RootFolder, []string{}, true)
v, err := m.CreateContainerView(ctx, c.ServiceContent.RootFolder, make([]string, 0), true)
if err != nil {
return nil, err
}

View File

@ -388,7 +388,7 @@ func (e *Endpoint) queryResyncSummary(ctx context.Context, vsanClient *soap.Clie
includeSummary := true
request := vsantypes.VsanQuerySyncingVsanObjects{
This: vsanSystemEx,
Uuids: []string{}, // We only need summary information.
Uuids: make([]string, 0), // We only need summary information.
Start: 0,
IncludeSummary: &includeSummary,
}

View File

@ -291,19 +291,19 @@ func TestFinder(t *testing.T) {
require.Len(t, host, 1)
require.Equal(t, "DC0_H0", host[0].Name)
host = []mo.HostSystem{}
host = make([]mo.HostSystem, 0)
err = f.Find(ctx, "HostSystem", "/DC0/host/DC0_C0/DC0_C0_H0", &host)
require.NoError(t, err)
require.Len(t, host, 1)
require.Equal(t, "DC0_C0_H0", host[0].Name)
var resourcepool = []mo.ResourcePool{}
resourcepool := make([]mo.ResourcePool, 0)
err = f.Find(ctx, "ResourcePool", "/DC0/host/DC0_C0/Resources/DC0_C0_RP0", &resourcepool)
require.NoError(t, err)
require.Len(t, host, 1)
require.Equal(t, "DC0_C0_H0", host[0].Name)
host = []mo.HostSystem{}
host = make([]mo.HostSystem, 0)
err = f.Find(ctx, "HostSystem", "/DC0/host/DC0_C0/*", &host)
require.NoError(t, err)
require.Len(t, host, 3)
@ -322,8 +322,8 @@ func TestFinder(t *testing.T) {
testLookupVM(ctx, t, &f, "/*/host/**/*DC*VM*", 8, "")
testLookupVM(ctx, t, &f, "/*/host/**/*DC*/*/*DC*", 4, "")
vm = []mo.VirtualMachine{}
err = f.FindAll(ctx, "VirtualMachine", []string{"/DC0/vm/DC0_H0*", "/DC0/vm/DC0_C0*"}, []string{}, &vm)
vm = make([]mo.VirtualMachine, 0)
err = f.FindAll(ctx, "VirtualMachine", []string{"/DC0/vm/DC0_H0*", "/DC0/vm/DC0_C0*"}, nil, &vm)
require.NoError(t, err)
require.Len(t, vm, 4)
@ -333,7 +333,7 @@ func TestFinder(t *testing.T) {
excludePaths: []string{"/DC0/vm/DC0_H0_VM0"},
resType: "VirtualMachine",
}
vm = []mo.VirtualMachine{}
vm = make([]mo.VirtualMachine, 0)
require.NoError(t, rf.FindAll(ctx, &vm))
require.Len(t, vm, 3)
@ -343,7 +343,7 @@ func TestFinder(t *testing.T) {
excludePaths: []string{"/**"},
resType: "VirtualMachine",
}
vm = []mo.VirtualMachine{}
vm = make([]mo.VirtualMachine, 0)
require.NoError(t, rf.FindAll(ctx, &vm))
require.Empty(t, vm)
@ -353,7 +353,7 @@ func TestFinder(t *testing.T) {
excludePaths: []string{"/**"},
resType: "VirtualMachine",
}
vm = []mo.VirtualMachine{}
vm = make([]mo.VirtualMachine, 0)
require.NoError(t, rf.FindAll(ctx, &vm))
require.Empty(t, vm)
@ -363,7 +363,7 @@ func TestFinder(t *testing.T) {
excludePaths: []string{"/this won't match anything"},
resType: "VirtualMachine",
}
vm = []mo.VirtualMachine{}
vm = make([]mo.VirtualMachine, 0)
require.NoError(t, rf.FindAll(ctx, &vm))
require.Len(t, vm, 8)
@ -373,7 +373,7 @@ func TestFinder(t *testing.T) {
excludePaths: []string{"/**/*VM0"},
resType: "VirtualMachine",
}
vm = []mo.VirtualMachine{}
vm = make([]mo.VirtualMachine, 0)
require.NoError(t, rf.FindAll(ctx, &vm))
require.Len(t, vm, 4)
}
@ -428,7 +428,7 @@ func TestVsanCmmds(t *testing.T) {
f := Finder{c}
var clusters []mo.ClusterComputeResource
err = f.FindAll(ctx, "ClusterComputeResource", []string{"/**"}, []string{}, &clusters)
err = f.FindAll(ctx, "ClusterComputeResource", []string{"/**"}, nil, &clusters)
require.NoError(t, err)
clusterObj := object.NewClusterComputeResource(c.Client.Client, clusters[0].Reference())

View File

@ -116,7 +116,7 @@ func (wg *Wireguard) gatherDevicePeerMetrics(acc telegraf.Accumulator, device *w
}
if len(peer.AllowedIPs) > 0 {
cidrs := []string{}
cidrs := make([]string, 0, len(peer.AllowedIPs))
for _, ip := range peer.AllowedIPs {
cidrs = append(cidrs, ip.String())
}

View File

@ -367,25 +367,24 @@ func TestGetTags(t *testing.T) {
expected map[string]string
}{
{
"no pools",
[]poolInfo{},
map[string]string{"pools": ""},
name: "no pools",
expected: map[string]string{"pools": ""},
},
{
"single pool",
[]poolInfo{
name: "single pool",
pools: []poolInfo{
{"data", "/proc/spl/kstat/zfs/data/objset-0x9288", v2},
},
map[string]string{"pools": "data"},
expected: map[string]string{"pools": "data"},
},
{
"duplicate pool names",
[]poolInfo{
name: "duplicate pool names",
pools: []poolInfo{
{"pool", "/proc/spl/kstat/zfs/pool/objset-0x23ce1", v2},
{"pool", "/proc/spl/kstat/zfs/pool/objset-0x2e", v2},
{"data", "/proc/spl/kstat/zfs/data/objset-0x9288", v2},
},
map[string]string{"pools": "pool::data"},
expected: map[string]string{"pools": "pool::data"},
},
}

View File

@ -96,10 +96,8 @@ func Test_minMax(t *testing.T) {
},
},
{
name: "No Annotations",
span: &MockSpan{
Anno: []Annotation{},
},
name: "No Annotations",
span: &MockSpan{},
wantMin: time.Unix(2, 0).UTC(),
wantMax: time.Unix(2, 0).UTC(),
now: func() time.Time {
@ -459,8 +457,8 @@ func TestNewTrace(t *testing.T) {
trace.Span{
ServiceName: "unknown",
Timestamp: time.Unix(0, 0).UTC(),
Annotations: []trace.Annotation{},
BinaryAnnotations: []trace.BinaryAnnotation{},
Annotations: make([]trace.Annotation, 0),
BinaryAnnotations: make([]trace.BinaryAnnotation, 0),
},
},
},
@ -515,7 +513,7 @@ func TestNewTrace(t *testing.T) {
ServiceName: "myname",
},
},
BinaryAnnotations: []trace.BinaryAnnotation{},
BinaryAnnotations: make([]trace.BinaryAnnotation, 0),
},
},
},

View File

@ -111,7 +111,7 @@ func TestUnmarshalThrift(t *testing.T) {
ParentID: addr(22964302721410078),
Timestamp: addr(1498688360851331),
Duration: addr(53106),
Annotations: []*zipkincore.Annotation{},
Annotations: make([]*zipkincore.Annotation, 0),
BinaryAnnotations: []*zipkincore.BinaryAnnotation{
{
Key: "lc",
@ -131,7 +131,7 @@ func TestUnmarshalThrift(t *testing.T) {
ParentID: addr(22964302721410078),
Timestamp: addr(1498688360904552),
Duration: addr(50410),
Annotations: []*zipkincore.Annotation{},
Annotations: make([]*zipkincore.Annotation, 0),
BinaryAnnotations: []*zipkincore.BinaryAnnotation{
{
Key: "lc",

View File

@ -40,7 +40,6 @@ func TestLineProtocolConverter_Record(t *testing.T) {
Timestamp: time.Unix(0, 1498688360851331000).UTC(),
Duration: time.Duration(53106) * time.Microsecond,
ServiceName: "trivial",
Annotations: []trace.Annotation{},
BinaryAnnotations: []trace.BinaryAnnotation{
{
Key: "lc",
@ -58,7 +57,6 @@ func TestLineProtocolConverter_Record(t *testing.T) {
Timestamp: time.Unix(0, 1498688360904552000).UTC(),
Duration: time.Duration(50410) * time.Microsecond,
ServiceName: "trivial",
Annotations: []trace.Annotation{},
BinaryAnnotations: []trace.BinaryAnnotation{
{
Key: "lc",
@ -287,7 +285,6 @@ func TestLineProtocolConverter_Record(t *testing.T) {
ServiceName: "go-zipkin-testclient",
},
},
BinaryAnnotations: []trace.BinaryAnnotation{},
},
},
},
@ -336,7 +333,7 @@ func TestLineProtocolConverter_Record(t *testing.T) {
if err := l.Record(tt.args.t); (err != nil) != tt.wantErr {
t.Errorf("LineProtocolConverter.Record() error = %v, wantErr %v", err, tt.wantErr)
}
got := []testutil.Metric{}
got := make([]testutil.Metric, 0, len(mockAcc.Metrics))
for _, metric := range mockAcc.Metrics {
got = append(got, *metric)
}

View File

@ -63,7 +63,7 @@ func TestSpanHandler(t *testing.T) {
Timestamp: time.Unix(0, 1498688360851331*int64(time.Microsecond)).UTC(),
Duration: time.Duration(53106) * time.Microsecond,
ServiceName: "trivial",
Annotations: []trace.Annotation{},
Annotations: make([]trace.Annotation, 0),
BinaryAnnotations: []trace.BinaryAnnotation{
{
Key: "lc",
@ -81,7 +81,7 @@ func TestSpanHandler(t *testing.T) {
Timestamp: time.Unix(0, 1498688360904552*int64(time.Microsecond)).UTC(),
Duration: time.Duration(50410) * time.Microsecond,
ServiceName: "trivial",
Annotations: []trace.Annotation{},
Annotations: make([]trace.Annotation, 0),
BinaryAnnotations: []trace.BinaryAnnotation{
{
Key: "lc",