chore: fix linter findings for makezero (part3) (#12371)

This commit is contained in:
Paweł Żak 2022-12-12 15:05:33 +01:00 committed by GitHub
parent 6fb840085d
commit c6663aca4f
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
15 changed files with 90 additions and 89 deletions

View File

@ -245,17 +245,20 @@ func (s *Subscription) buildSubscription() (*gnmiLib.Subscription, error) {
// Create a new gNMI SubscribeRequest // Create a new gNMI SubscribeRequest
func (c *GNMI) newSubscribeRequest() (*gnmiLib.SubscribeRequest, error) { func (c *GNMI) newSubscribeRequest() (*gnmiLib.SubscribeRequest, error) {
// Create subscription objects // Create subscription objects
var err error subscriptions := make([]*gnmiLib.Subscription, 0, len(c.Subscriptions)+len(c.TagSubscriptions))
subscriptions := make([]*gnmiLib.Subscription, len(c.Subscriptions)+len(c.TagSubscriptions)) for _, subscription := range c.TagSubscriptions {
for i, subscription := range c.TagSubscriptions { sub, err := subscription.buildSubscription()
if subscriptions[i], err = subscription.buildSubscription(); err != nil { if err != nil {
return nil, err return nil, err
} }
subscriptions = append(subscriptions, sub)
} }
for i, subscription := range c.Subscriptions { for _, subscription := range c.Subscriptions {
if subscriptions[i+len(c.TagSubscriptions)], err = subscription.buildSubscription(); err != nil { sub, err := subscription.buildSubscription()
if err != nil {
return nil, err return nil, err
} }
subscriptions = append(subscriptions, sub)
} }
// Construct subscribe request // Construct subscribe request

View File

@ -83,23 +83,23 @@ func BenchmarkInfluxDBListener_serveWrite(b *testing.B) {
} }
func lines(lines, numTags, numFields int) string { func lines(lines, numTags, numFields int) string {
lp := make([]string, lines) lp := make([]string, 0, lines)
for i := 0; i < lines; i++ { for i := 0; i < lines; i++ {
tags := make([]string, numTags) tags := make([]string, 0, numTags)
for j := 0; j < numTags; j++ { for j := 0; j < numTags; j++ {
tags[j] = fmt.Sprintf("t%d=v%d", j, j) tags = append(tags, fmt.Sprintf("t%d=v%d", j, j))
} }
fields := make([]string, numFields) fields := make([]string, 0, numFields)
for k := 0; k < numFields; k++ { for k := 0; k < numFields; k++ {
fields[k] = fmt.Sprintf("f%d=%d", k, k) fields = append(fields, fmt.Sprintf("f%d=%d", k, k))
} }
lp[i] = fmt.Sprintf("m%d,%s %s", lp = append(lp, fmt.Sprintf("m%d,%s %s",
i, i,
strings.Join(tags, ","), strings.Join(tags, ","),
strings.Join(fields, ","), strings.Join(fields, ","),
) ))
} }
return strings.Join(lp, "\n") return strings.Join(lp, "\n")

View File

@ -83,23 +83,23 @@ func BenchmarkInfluxDBV2Listener_serveWrite(b *testing.B) {
} }
func lines(lines, numTags, numFields int) string { func lines(lines, numTags, numFields int) string {
lp := make([]string, lines) lp := make([]string, 0, lines)
for i := 0; i < lines; i++ { for i := 0; i < lines; i++ {
tags := make([]string, numTags) tags := make([]string, 0, numTags)
for j := 0; j < numTags; j++ { for j := 0; j < numTags; j++ {
tags[j] = fmt.Sprintf("t%d=v%d", j, j) tags = append(tags, fmt.Sprintf("t%d=v%d", j, j))
} }
fields := make([]string, numFields) fields := make([]string, 0, numFields)
for k := 0; k < numFields; k++ { for k := 0; k < numFields; k++ {
fields[k] = fmt.Sprintf("f%d=%d", k, k) fields = append(fields, fmt.Sprintf("f%d=%d", k, k))
} }
lp[i] = fmt.Sprintf("m%d,%s %s", lp = append(lp, fmt.Sprintf("m%d,%s %s",
i, i,
strings.Join(tags, ","), strings.Join(tags, ","),
strings.Join(fields, ","), strings.Join(fields, ","),
) ))
} }
return strings.Join(lp, "\n") return strings.Join(lp, "\n")

View File

@ -606,12 +606,14 @@ func (m *Mysql) gatherSlaveStatuses(db *sql.DB, serv string, acc telegraf.Accumu
if err != nil { if err != nil {
return err return err
} }
vals := make([]sql.RawBytes, len(cols))
valPtrs := make([]interface{}, len(cols)) vals := make([]sql.RawBytes, 0, len(cols))
valPtrs := make([]interface{}, 0, len(cols))
// fill the array with sql.Rawbytes // fill the array with sql.Rawbytes
for i := range vals { for range cols {
vals[i] = sql.RawBytes{} rawBytes := sql.RawBytes{}
valPtrs[i] = &vals[i] vals = append(vals, rawBytes)
valPtrs = append(valPtrs, &rawBytes)
} }
if err = rows.Scan(valPtrs...); err != nil { if err = rows.Scan(valPtrs...); err != nil {
return err return err

View File

@ -57,10 +57,10 @@ func (o *ReadClient) Connect() error {
return err return err
} }
readValueIds := make([]*ua.ReadValueID, len(o.NodeIDs)) readValueIds := make([]*ua.ReadValueID, 0, len(o.NodeIDs))
if o.Workarounds.UseUnregisteredReads { if o.Workarounds.UseUnregisteredReads {
for i, nid := range o.NodeIDs { for _, nid := range o.NodeIDs {
readValueIds[i] = &ua.ReadValueID{NodeID: nid} readValueIds = append(readValueIds, &ua.ReadValueID{NodeID: nid})
} }
} else { } else {
regResp, err := o.Client.RegisterNodes(&ua.RegisterNodesRequest{ regResp, err := o.Client.RegisterNodes(&ua.RegisterNodesRequest{
@ -70,8 +70,8 @@ func (o *ReadClient) Connect() error {
return fmt.Errorf("registerNodes failed: %v", err) return fmt.Errorf("registerNodes failed: %v", err)
} }
for i, v := range regResp.RegisteredNodeIDs { for _, v := range regResp.RegisteredNodeIDs {
readValueIds[i] = &ua.ReadValueID{NodeID: v} readValueIds = append(readValueIds, &ua.ReadValueID{NodeID: v})
} }
} }

View File

@ -87,9 +87,9 @@ func (pg *NativeFinder) FastProcessList() ([]*process.Process, error) {
return nil, err return nil, err
} }
result := make([]*process.Process, len(pids)) result := make([]*process.Process, 0, len(pids))
for i, pid := range pids { for _, pid := range pids {
result[i] = &process.Process{Pid: pid} result = append(result, &process.Process{Pid: pid})
} }
return result, nil return result, nil
} }

View File

@ -16,6 +16,7 @@ import (
"time" "time"
"github.com/go-redis/redis/v8" "github.com/go-redis/redis/v8"
"github.com/influxdata/telegraf" "github.com/influxdata/telegraf"
"github.com/influxdata/telegraf/plugins/common/tls" "github.com/influxdata/telegraf/plugins/common/tls"
"github.com/influxdata/telegraf/plugins/inputs" "github.com/influxdata/telegraf/plugins/inputs"
@ -228,9 +229,8 @@ func (r *Redis) connect() error {
r.Servers = []string{"tcp://localhost:6379"} r.Servers = []string{"tcp://localhost:6379"}
} }
r.clients = make([]Client, len(r.Servers)) r.clients = make([]Client, 0, len(r.Servers))
for _, serv := range r.Servers {
for i, serv := range r.Servers {
if !strings.HasPrefix(serv, "tcp://") && !strings.HasPrefix(serv, "unix://") { if !strings.HasPrefix(serv, "tcp://") && !strings.HasPrefix(serv, "unix://") {
r.Log.Warn("Server URL found without scheme; please update your configuration file") r.Log.Warn("Server URL found without scheme; please update your configuration file")
serv = "tcp://" + serv serv = "tcp://" + serv
@ -288,10 +288,10 @@ func (r *Redis) connect() error {
tags["port"] = u.Port() tags["port"] = u.Port()
} }
r.clients[i] = &RedisClient{ r.clients = append(r.clients, &RedisClient{
client: client, client: client,
tags: tags, tags: tags,
} })
} }
r.connected = true r.connected = true

View File

@ -53,14 +53,13 @@ func (r *RedisSentinel) Init() error {
r.Servers = []string{"tcp://localhost:26379"} r.Servers = []string{"tcp://localhost:26379"}
} }
r.clients = make([]*RedisSentinelClient, len(r.Servers))
tlsConfig, err := r.ClientConfig.TLSConfig() tlsConfig, err := r.ClientConfig.TLSConfig()
if err != nil { if err != nil {
return err return err
} }
for i, serv := range r.Servers { r.clients = make([]*RedisSentinelClient, 0, len(r.Servers))
for _, serv := range r.Servers {
u, err := url.Parse(serv) u, err := url.Parse(serv)
if err != nil { if err != nil {
return fmt.Errorf("unable to parse to address %q: %v", serv, err) return fmt.Errorf("unable to parse to address %q: %v", serv, err)
@ -96,10 +95,10 @@ func (r *RedisSentinel) Init() error {
}, },
) )
r.clients[i] = &RedisSentinelClient{ r.clients = append(r.clients, &RedisSentinelClient{
sentinel: sentinel, sentinel: sentinel,
tags: tags, tags: tags,
} })
} }
return nil return nil

View File

@ -18,12 +18,11 @@ import (
func pwgen(n int) string { func pwgen(n int) string {
charset := []byte("abcdedfghijklmnopqrstABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789") charset := []byte("abcdedfghijklmnopqrstABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789")
nchars := len(charset) nchars := len(charset)
buffer := make([]byte, n)
for i := range buffer { buffer := make([]byte, 0, n)
buffer[i] = charset[rand.Intn(nchars)] for i := 0; i < n; i++ {
buffer = append(buffer, charset[rand.Intn(nchars)])
} }
return string(buffer) return string(buffer)

View File

@ -270,15 +270,15 @@ func (s *Stackdriver) newListTimeSeriesFilter(metricType string) string {
var valueFmt string var valueFmt string
if len(s.Filter.ResourceLabels) > 0 { if len(s.Filter.ResourceLabels) > 0 {
resourceLabelsFilter := make([]string, len(s.Filter.ResourceLabels)) resourceLabelsFilter := make([]string, 0, len(s.Filter.ResourceLabels))
for i, resourceLabel := range s.Filter.ResourceLabels { for _, resourceLabel := range s.Filter.ResourceLabels {
// check if resource label value contains function // check if resource label value contains function
if includeExcludeHelper(resourceLabel.Value, functions, nil) { if includeExcludeHelper(resourceLabel.Value, functions, nil) {
valueFmt = `resource.labels.%s = %s` valueFmt = `resource.labels.%s = %s`
} else { } else {
valueFmt = `resource.labels.%s = "%s"` valueFmt = `resource.labels.%s = "%s"`
} }
resourceLabelsFilter[i] = fmt.Sprintf(valueFmt, resourceLabel.Key, resourceLabel.Value) resourceLabelsFilter = append(resourceLabelsFilter, fmt.Sprintf(valueFmt, resourceLabel.Key, resourceLabel.Value))
} }
if len(resourceLabelsFilter) == 1 { if len(resourceLabelsFilter) == 1 {
filterString += fmt.Sprintf(" AND %s", resourceLabelsFilter[0]) filterString += fmt.Sprintf(" AND %s", resourceLabelsFilter[0])
@ -288,15 +288,15 @@ func (s *Stackdriver) newListTimeSeriesFilter(metricType string) string {
} }
if len(s.Filter.MetricLabels) > 0 { if len(s.Filter.MetricLabels) > 0 {
metricLabelsFilter := make([]string, len(s.Filter.MetricLabels)) metricLabelsFilter := make([]string, 0, len(s.Filter.MetricLabels))
for i, metricLabel := range s.Filter.MetricLabels { for _, metricLabel := range s.Filter.MetricLabels {
// check if metric label value contains function // check if metric label value contains function
if includeExcludeHelper(metricLabel.Value, functions, nil) { if includeExcludeHelper(metricLabel.Value, functions, nil) {
valueFmt = `metric.labels.%s = %s` valueFmt = `metric.labels.%s = %s`
} else { } else {
valueFmt = `metric.labels.%s = "%s"` valueFmt = `metric.labels.%s = "%s"`
} }
metricLabelsFilter[i] = fmt.Sprintf(valueFmt, metricLabel.Key, metricLabel.Value) metricLabelsFilter = append(metricLabelsFilter, fmt.Sprintf(valueFmt, metricLabel.Key, metricLabel.Value))
} }
if len(metricLabelsFilter) == 1 { if len(metricLabelsFilter) == 1 {
filterString += fmt.Sprintf(" AND %s", metricLabelsFilter[0]) filterString += fmt.Sprintf(" AND %s", metricLabelsFilter[0])
@ -429,9 +429,9 @@ func (s *Stackdriver) newListMetricDescriptorsFilters() []string {
return nil return nil
} }
metricTypeFilters := make([]string, len(s.MetricTypePrefixInclude)) metricTypeFilters := make([]string, 0, len(s.MetricTypePrefixInclude))
for i, metricTypePrefix := range s.MetricTypePrefixInclude { for _, metricTypePrefix := range s.MetricTypePrefixInclude {
metricTypeFilters[i] = fmt.Sprintf(`metric.type = starts_with(%q)`, metricTypePrefix) metricTypeFilters = append(metricTypeFilters, fmt.Sprintf(`metric.type = starts_with(%q)`, metricTypePrefix))
} }
return metricTypeFilters return metricTypeFilters
} }

View File

@ -551,11 +551,9 @@ func (e *Endpoint) simpleMetadataSelect(ctx context.Context, client *Client, res
func (e *Endpoint) complexMetadataSelect(ctx context.Context, res *resourceKind, objects objectMap) { func (e *Endpoint) complexMetadataSelect(ctx context.Context, res *resourceKind, objects objectMap) {
// We're only going to get metadata from maxMetadataSamples resources. If we have // We're only going to get metadata from maxMetadataSamples resources. If we have
// more resources than that, we pick maxMetadataSamples samples at random. // more resources than that, we pick maxMetadataSamples samples at random.
sampledObjects := make([]*objectRef, len(objects)) sampledObjects := make([]*objectRef, 0, len(objects))
i := 0
for _, obj := range objects { for _, obj := range objects {
sampledObjects[i] = obj sampledObjects = append(sampledObjects, obj)
i++
} }
n := len(sampledObjects) n := len(sampledObjects)
if n > maxMetadataSamples { if n > maxMetadataSamples {

View File

@ -91,8 +91,8 @@ func (v *VSphere) Start(_ telegraf.Accumulator) error {
v.cancel = cancel v.cancel = cancel
// Create endpoints, one for each vCenter we're monitoring // Create endpoints, one for each vCenter we're monitoring
v.endpoints = make([]*Endpoint, len(v.Vcenters)) v.endpoints = make([]*Endpoint, 0, len(v.Vcenters))
for i, rawURL := range v.Vcenters { for _, rawURL := range v.Vcenters {
u, err := soap.ParseURL(rawURL) u, err := soap.ParseURL(rawURL)
if err != nil { if err != nil {
return err return err
@ -101,7 +101,7 @@ func (v *VSphere) Start(_ telegraf.Accumulator) error {
if err != nil { if err != nil {
return err return err
} }
v.endpoints[i] = ep v.endpoints = append(v.endpoints, ep)
} }
return nil return nil
} }

View File

@ -164,14 +164,14 @@ func createSim(folders int) (*simulator.Model, *simulator.Server, error) {
func testAlignUniform(t *testing.T, n int) { func testAlignUniform(t *testing.T, n int) {
now := time.Now().Truncate(60 * time.Second) now := time.Now().Truncate(60 * time.Second)
info := make([]types.PerfSampleInfo, n) info := make([]types.PerfSampleInfo, 0, n)
values := make([]int64, n) values := make([]int64, 0, n)
for i := 0; i < n; i++ { for i := 0; i < n; i++ {
info[i] = types.PerfSampleInfo{ info = append(info, types.PerfSampleInfo{
Timestamp: now.Add(time.Duration(20*i) * time.Second), Timestamp: now.Add(time.Duration(20*i) * time.Second),
Interval: 20, Interval: 20,
} })
values[i] = 1 values = append(values, 1)
} }
e := Endpoint{log: testutil.Logger{}} e := Endpoint{log: testutil.Logger{}}
newInfo, newValues := e.alignSamples(info, values, 60*time.Second) newInfo, newValues := e.alignSamples(info, values, 60*time.Second)
@ -190,14 +190,14 @@ func TestAlignMetrics(t *testing.T) {
// 20s to 60s of 1,2,3,1,2,3... (should average to 2) // 20s to 60s of 1,2,3,1,2,3... (should average to 2)
n := 30 n := 30
now := time.Now().Truncate(60 * time.Second) now := time.Now().Truncate(60 * time.Second)
info := make([]types.PerfSampleInfo, n) info := make([]types.PerfSampleInfo, 0, n)
values := make([]int64, n) values := make([]int64, 0, n)
for i := 0; i < n; i++ { for i := 0; i < n; i++ {
info[i] = types.PerfSampleInfo{ info = append(info, types.PerfSampleInfo{
Timestamp: now.Add(time.Duration(20*i) * time.Second), Timestamp: now.Add(time.Duration(20*i) * time.Second),
Interval: 20, Interval: 20,
} })
values[i] = int64(i%3 + 1) values = append(values, int64(i%3+1))
} }
e := Endpoint{log: testutil.Logger{}} e := Endpoint{log: testutil.Logger{}}
newInfo, newValues := e.alignSamples(info, values, 60*time.Second) newInfo, newValues := e.alignSamples(info, values, 60*time.Second)

View File

@ -105,14 +105,14 @@ func NewTrace(spans []Span) (trace.Trace, error) {
// NewAnnotations converts a slice of Annotation into a slice of new Annotations // NewAnnotations converts a slice of Annotation into a slice of new Annotations
func NewAnnotations(annotations []Annotation, endpoint Endpoint) []trace.Annotation { func NewAnnotations(annotations []Annotation, endpoint Endpoint) []trace.Annotation {
formatted := make([]trace.Annotation, len(annotations)) formatted := make([]trace.Annotation, 0, len(annotations))
for i, annotation := range annotations { for _, annotation := range annotations {
formatted[i] = trace.Annotation{ formatted = append(formatted, trace.Annotation{
Host: endpoint.Host(), Host: endpoint.Host(),
ServiceName: endpoint.Name(), ServiceName: endpoint.Name(),
Timestamp: annotation.Timestamp(), Timestamp: annotation.Timestamp(),
Value: annotation.Value(), Value: annotation.Value(),
} })
} }
return formatted return formatted
@ -121,14 +121,14 @@ func NewAnnotations(annotations []Annotation, endpoint Endpoint) []trace.Annotat
// NewBinaryAnnotations is very similar to NewAnnotations, but it // NewBinaryAnnotations is very similar to NewAnnotations, but it
// converts BinaryAnnotations instead of the normal Annotation // converts BinaryAnnotations instead of the normal Annotation
func NewBinaryAnnotations(annotations []BinaryAnnotation, endpoint Endpoint) []trace.BinaryAnnotation { func NewBinaryAnnotations(annotations []BinaryAnnotation, endpoint Endpoint) []trace.BinaryAnnotation {
formatted := make([]trace.BinaryAnnotation, len(annotations)) formatted := make([]trace.BinaryAnnotation, 0, len(annotations))
for i, annotation := range annotations { for _, annotation := range annotations {
formatted[i] = trace.BinaryAnnotation{ formatted = append(formatted, trace.BinaryAnnotation{
Host: endpoint.Host(), Host: endpoint.Host(),
ServiceName: endpoint.Name(), ServiceName: endpoint.Name(),
Key: annotation.Key(), Key: annotation.Key(),
Value: annotation.Value(), Value: annotation.Value(),
} })
} }
return formatted return formatted
} }

View File

@ -21,12 +21,12 @@ func (j *JSON) Decode(octets []byte) ([]codec.Span, error) {
return nil, err return nil, err
} }
res := make([]codec.Span, len(spans)) res := make([]codec.Span, 0, len(spans))
for i := range spans { for i := range spans {
if err := spans[i].Validate(); err != nil { if err := spans[i].Validate(); err != nil {
return nil, err return nil, err
} }
res[i] = &spans[i] res = append(res, &spans[i])
} }
return res, nil return res, nil
} }
@ -89,23 +89,23 @@ func (s *span) Name() string {
} }
func (s *span) Annotations() []codec.Annotation { func (s *span) Annotations() []codec.Annotation {
res := make([]codec.Annotation, len(s.Anno)) res := make([]codec.Annotation, 0, len(s.Anno))
for i := range s.Anno { for i := range s.Anno {
res[i] = &s.Anno[i] res = append(res, &s.Anno[i])
} }
return res return res
} }
func (s *span) BinaryAnnotations() ([]codec.BinaryAnnotation, error) { func (s *span) BinaryAnnotations() ([]codec.BinaryAnnotation, error) {
res := make([]codec.BinaryAnnotation, len(s.BAnno)) res := make([]codec.BinaryAnnotation, 0, len(s.BAnno))
for i, a := range s.BAnno { for i, a := range s.BAnno {
if a.Key() != "" && a.Value() == "" { if a.Key() != "" && a.Value() == "" {
return nil, fmt.Errorf("No value for key %s at binaryAnnotations[%d]", a.K, i) return nil, fmt.Errorf("no value for key %s at binaryAnnotations[%d]", a.K, i)
} }
if a.Value() != "" && a.Key() == "" { if a.Value() != "" && a.Key() == "" {
return nil, fmt.Errorf("No key at binaryAnnotations[%d]", i) return nil, fmt.Errorf("no key at binaryAnnotations[%d]", i)
} }
res[i] = &s.BAnno[i] res = append(res, &s.BAnno[i])
} }
return res, nil return res, nil
} }