chore: Fix linter findings for makezero (part4) (#12410)

This commit is contained in:
Paweł Żak 2022-12-20 11:42:09 +01:00 committed by GitHub
parent 9b24184730
commit 86cd0c0c24
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
17 changed files with 94 additions and 101 deletions

View File

@ -17,6 +17,7 @@ linters:
- ineffassign
- interfacebloat
- lll
- makezero
- nakedret
- nilerr
- prealloc

View File

@ -336,13 +336,13 @@ func (o *OpcUAInputClient) InitNodeMetricMapping() error {
}
func (o *OpcUAInputClient) initNodeIDs() error {
o.NodeIDs = make([]*ua.NodeID, len(o.NodeMetricMapping))
for i, node := range o.NodeMetricMapping {
o.NodeIDs = make([]*ua.NodeID, 0, len(o.NodeMetricMapping))
for _, node := range o.NodeMetricMapping {
nid, err := ua.ParseNodeID(node.Tag.NodeID())
if err != nil {
return err
}
o.NodeIDs[i] = nid
o.NodeIDs = append(o.NodeIDs, nid)
}
return nil

View File

@ -308,15 +308,15 @@ func (s *Stackdriver) newListTimeSeriesFilter(metricType string) string {
}
if len(s.Filter.UserLabels) > 0 {
userLabelsFilter := make([]string, len(s.Filter.UserLabels))
for i, metricLabel := range s.Filter.UserLabels {
userLabelsFilter := make([]string, 0, len(s.Filter.UserLabels))
for _, metricLabel := range s.Filter.UserLabels {
// check if metric label value contains function
if includeExcludeHelper(metricLabel.Value, functions, nil) {
valueFmt = `metadata.user_labels."%s" = %s`
} else {
valueFmt = `metadata.user_labels."%s" = "%s"`
}
userLabelsFilter[i] = fmt.Sprintf(valueFmt, metricLabel.Key, metricLabel.Value)
userLabelsFilter = append(userLabelsFilter, fmt.Sprintf(valueFmt, metricLabel.Key, metricLabel.Value))
}
if len(userLabelsFilter) == 1 {
filterString += fmt.Sprintf(" AND %s", userLabelsFilter[0])
@ -326,15 +326,15 @@ func (s *Stackdriver) newListTimeSeriesFilter(metricType string) string {
}
if len(s.Filter.SystemLabels) > 0 {
systemLabelsFilter := make([]string, len(s.Filter.SystemLabels))
for i, metricLabel := range s.Filter.SystemLabels {
systemLabelsFilter := make([]string, 0, len(s.Filter.SystemLabels))
for _, metricLabel := range s.Filter.SystemLabels {
// check if metric label value contains function
if includeExcludeHelper(metricLabel.Value, functions, nil) {
valueFmt = `metadata.system_labels."%s" = %s`
} else {
valueFmt = `metadata.system_labels."%s" = "%s"`
}
systemLabelsFilter[i] = fmt.Sprintf(valueFmt, metricLabel.Key, metricLabel.Value)
systemLabelsFilter = append(systemLabelsFilter, fmt.Sprintf(valueFmt, metricLabel.Key, metricLabel.Value))
}
if len(systemLabelsFilter) == 1 {
filterString += fmt.Sprintf(" AND %s", systemLabelsFilter[0])

View File

@ -26,13 +26,13 @@ func UnmarshalThrift(body []byte) ([]*zipkincore.Span, error) {
return nil, err
}
spans := make([]*zipkincore.Span, size)
spans := make([]*zipkincore.Span, 0, size)
for i := 0; i < size; i++ {
zs := &zipkincore.Span{}
if err = zs.Read(context.Background(), transport); err != nil {
return nil, err
}
spans[i] = zs
spans = append(spans, zs)
}
if err = transport.ReadListEnd(context.Background()); err != nil {
@ -51,9 +51,9 @@ func (t *Thrift) Decode(octets []byte) ([]codec.Span, error) {
return nil, err
}
res := make([]codec.Span, len(spans))
for i, s := range spans {
res[i] = &span{s}
res := make([]codec.Span, 0, len(spans))
for _, s := range spans {
res = append(res, &span{s})
}
return res, nil
}
@ -171,17 +171,17 @@ func (s *span) Name() string {
}
func (s *span) Annotations() []codec.Annotation {
res := make([]codec.Annotation, len(s.Span.Annotations))
for i := range s.Span.Annotations {
res[i] = &annotation{s.Span.Annotations[i]}
res := make([]codec.Annotation, 0, len(s.Span.Annotations))
for _, ann := range s.Span.Annotations {
res = append(res, &annotation{ann})
}
return res
}
func (s *span) BinaryAnnotations() ([]codec.BinaryAnnotation, error) {
res := make([]codec.BinaryAnnotation, len(s.Span.BinaryAnnotations))
for i := range s.Span.BinaryAnnotations {
res[i] = &binaryAnnotation{s.Span.BinaryAnnotations[i]}
res := make([]codec.BinaryAnnotation, 0, len(s.Span.BinaryAnnotations))
for _, ann := range s.Span.BinaryAnnotations {
res = append(res, &binaryAnnotation{ann})
}
return res, nil
}

View File

@ -89,9 +89,9 @@ func (ps *PubSub) Write(metrics []telegraf.Metric) error {
cctx, cancel := context.WithCancel(context.Background())
// Publish all messages - each call to Publish returns a future.
ps.publishResults = make([]publishResult, len(msgs))
for i, m := range msgs {
ps.publishResults[i] = ps.t.Publish(cctx, m)
ps.publishResults = make([]publishResult, 0, len(msgs))
for _, m := range msgs {
ps.publishResults = append(ps.publishResults, ps.t.Publish(cctx, m))
}
// topic.Stop() forces all published messages to be sent, even
@ -180,8 +180,8 @@ func (ps *PubSub) toMessages(metrics []telegraf.Metric) ([]*pubsub.Message, erro
return []*pubsub.Message{msg}, nil
}
msgs := make([]*pubsub.Message, len(metrics))
for i, m := range metrics {
msgs := make([]*pubsub.Message, 0, len(metrics))
for _, m := range metrics {
b, err := ps.serializer.Serialize(m)
if err != nil {
ps.Log.Debugf("Could not serialize metric: %v", err)
@ -193,12 +193,13 @@ func (ps *PubSub) toMessages(metrics []telegraf.Metric) ([]*pubsub.Message, erro
b = []byte(encoded)
}
msgs[i] = &pubsub.Message{
msg := &pubsub.Message{
Data: b,
}
if ps.Attributes != nil {
msgs[i].Attributes = ps.Attributes
msg.Attributes = ps.Attributes
}
msgs = append(msgs, msg)
}
return msgs, nil

View File

@ -64,15 +64,15 @@ type (
func getTestResources(tT *testing.T, settings pubsub.PublishSettings, testM []testMetric) (*PubSub, *stubTopic, []telegraf.Metric) {
s := serializers.NewInfluxSerializer()
metrics := make([]telegraf.Metric, len(testM))
metrics := make([]telegraf.Metric, 0, len(testM))
t := &stubTopic{
T: tT,
ReturnErr: make(map[string]bool),
published: make(map[string]*pubsub.Message),
}
for i, tm := range testM {
metrics[i] = tm.m
for _, tm := range testM {
metrics = append(metrics, tm.m)
if tm.returnErr {
v, _ := tm.m.GetField("value")
t.ReturnErr[v.(string)] = true
@ -196,10 +196,10 @@ func (t *stubTopic) parseIDs(msg *pubsub.Message) []string {
}
}
ids := make([]string, len(metrics))
for i, met := range metrics {
ids := make([]string, 0, len(metrics))
for _, met := range metrics {
id, _ := met.GetField("value")
ids[i] = id.(string)
ids = append(ids, id.(string))
}
return ids
}

View File

@ -23,11 +23,9 @@ func TestBuildDimensions(t *testing.T) {
testPoint := testutil.TestMetric(1)
dimensions := BuildDimensions(testPoint.Tags())
tagKeys := make([]string, len(testPoint.Tags()))
i := 0
tagKeys := make([]string, 0, len(testPoint.Tags()))
for k := range testPoint.Tags() {
tagKeys[i] = k
i++
tagKeys = append(tagKeys, k)
}
sort.Strings(tagKeys)

View File

@ -82,8 +82,8 @@ func (c *CrateDB) Write(metrics []telegraf.Metric) error {
}
func insertSQL(table string, keyReplacement string, metrics []telegraf.Metric) (string, error) {
rows := make([]string, len(metrics))
for i, m := range metrics {
rows := make([]string, 0, len(metrics))
for _, m := range metrics {
cols := []interface{}{
hashID(m),
m.Time().UTC(),
@ -92,15 +92,15 @@ func insertSQL(table string, keyReplacement string, metrics []telegraf.Metric) (
m.Fields(),
}
escapedCols := make([]string, len(cols))
for i, col := range cols {
escapedCols := make([]string, 0, len(cols))
for _, col := range cols {
escaped, err := escapeValue(col, keyReplacement)
if err != nil {
return "", err
}
escapedCols[i] = escaped
escapedCols = append(escapedCols, escaped)
}
rows[i] = `(` + strings.Join(escapedCols, ", ") + `)`
rows = append(rows, `(`+strings.Join(escapedCols, ", ")+`)`)
}
query := `INSERT INTO ` + table + ` ("hash_id", "timestamp", "name", "tags", "fields")
VALUES
@ -204,11 +204,9 @@ func hashID(m telegraf.Metric) int64 {
h := sha512.New()
h.Write([]byte(m.Name())) //nolint:revive // from hash.go: "It never returns an error"
tags := m.Tags()
tmp := make([]string, len(tags))
i := 0
tmp := make([]string, 0, len(tags))
for k, v := range tags {
tmp[i] = k + v
i++
tmp = append(tmp, k+v)
}
sort.Strings(tmp)

View File

@ -201,11 +201,9 @@ func buildMetrics(m telegraf.Metric) (map[string]Point, error) {
}
func buildTags(tagList []*telegraf.Tag) []string {
tags := make([]string, len(tagList))
index := 0
tags := make([]string, 0, len(tagList))
for _, tag := range tagList {
tags[index] = fmt.Sprintf("%s:%s", tag.Key, tag.Value)
index++
tags = append(tags, fmt.Sprintf("%s:%s", tag.Key, tag.Value))
}
return tags
}

View File

@ -581,14 +581,14 @@ func createTestMetrics(
count uint32,
serializer serializers.Serializer,
) ([]telegraf.Metric, [][]byte) {
metrics := make([]telegraf.Metric, count)
metricsData := make([][]byte, count)
metrics := make([]telegraf.Metric, 0, count)
metricsData := make([][]byte, 0, count)
for i := uint32(0); i < count; i++ {
name := fmt.Sprintf("metric%d", i)
metric, data := createTestMetric(t, name, serializer)
metrics[i] = metric
metricsData[i] = data
metrics = append(metrics, metric)
metricsData = append(metricsData, data)
}
return metrics, metricsData
@ -597,14 +597,13 @@ func createTestMetrics(
func createPutRecordsRequestEntries(
metricsData [][]byte,
) []types.PutRecordsRequestEntry {
count := len(metricsData)
records := make([]types.PutRecordsRequestEntry, count)
records := make([]types.PutRecordsRequestEntry, 0, len(metricsData))
for i := 0; i < count; i++ {
records[i] = types.PutRecordsRequestEntry{
for _, data := range metricsData {
records = append(records, types.PutRecordsRequestEntry{
PartitionKey: aws.String(testPartitionKey),
Data: metricsData[i],
}
Data: data,
})
}
return records

View File

@ -48,11 +48,9 @@ type OpenTSDB struct {
}
func ToLineFormat(tags map[string]string) string {
tagsArray := make([]string, len(tags))
index := 0
tagsArray := make([]string, 0, len(tags))
for k, v := range tags {
tagsArray[index] = fmt.Sprintf("%s=%s", k, v)
index++
tagsArray = append(tagsArray, fmt.Sprintf("%s=%s", k, v))
}
sort.Strings(tagsArray)
return strings.Join(tagsArray, " ")

View File

@ -128,9 +128,9 @@ func TestSanitize(t *testing.T) {
func BenchmarkHttpSend(b *testing.B) {
const batchSize = 50
const metricsCount = 4 * batchSize
metrics := make([]telegraf.Metric, metricsCount)
metrics := make([]telegraf.Metric, 0, metricsCount)
for i := 0; i < metricsCount; i++ {
metrics[i] = testutil.TestMetric(1.0)
metrics = append(metrics, testutil.TestMetric(1.0))
}
ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {

View File

@ -61,19 +61,19 @@ type batchGeneratorArgs struct {
// tagCardinality counts all the tag keys & values as one element. fieldCardinality counts all the field keys (not values) as one element.
func batchGenerator(args batchGeneratorArgs) <-chan []telegraf.Metric {
tagSets := make([]MSS, args.tagCardinality)
tagSets := make([]MSS, 0, args.tagCardinality)
for i := 0; i < args.tagCardinality; i++ {
tags := MSS{}
for j := 0; j < args.numTags; j++ {
tags[fmt.Sprintf("tag_%d", j)] = fmt.Sprintf("%d", rand.Int())
}
tagSets[i] = tags
tagSets = append(tagSets, tags)
}
metricChan := make(chan []telegraf.Metric, 32)
go func() {
for {
batch := make([]telegraf.Metric, args.batchSize)
batch := make([]telegraf.Metric, 0, args.batchSize)
for i := 0; i < args.batchSize; i++ {
tableName := args.b.Name() + "_" + strconv.Itoa(rand.Intn(args.numTables))
@ -88,7 +88,7 @@ func batchGenerator(args batchGeneratorArgs) <-chan []telegraf.Metric {
}
m.AddField("f"+strconv.Itoa(rand.Intn(args.fieldCardinality)), rand.Int())
batch[i] = m
batch = append(batch, m)
}
select {

View File

@ -261,9 +261,9 @@ func (tc Column) IsField() bool {
type Columns []Column
func NewColumns(cols []utils.Column) Columns {
tcols := make(Columns, len(cols))
for i, col := range cols {
tcols[i] = Column(col)
tcols := make(Columns, 0, len(cols))
for _, col := range cols {
tcols = append(tcols, Column(col))
}
return tcols
}
@ -275,36 +275,36 @@ func (cols Columns) List() []Column {
// Definitions returns the list of column definitions.
func (cols Columns) Definitions() []string {
defs := make([]string, len(cols))
for i, tc := range cols {
defs[i] = tc.Definition()
defs := make([]string, 0, len(cols))
for _, tc := range cols {
defs = append(defs, tc.Definition())
}
return defs
}
// Identifiers returns the list of quoted column identifiers.
func (cols Columns) Identifiers() []string {
idents := make([]string, len(cols))
for i, tc := range cols {
idents[i] = tc.Identifier()
idents := make([]string, 0, len(cols))
for _, tc := range cols {
idents = append(idents, tc.Identifier())
}
return idents
}
// Selectors returns the list of column selectors.
func (cols Columns) Selectors() []string {
selectors := make([]string, len(cols))
for i, tc := range cols {
selectors[i] = tc.Selector()
selectors := make([]string, 0, len(cols))
for _, tc := range cols {
selectors = append(selectors, tc.Selector())
}
return selectors
}
// String returns the comma delimited list of column identifiers.
func (cols Columns) String() string {
colStrs := make([]string, len(cols))
for i, tc := range cols {
colStrs[i] = tc.String()
colStrs := make([]string, 0, len(cols))
for _, tc := range cols {
colStrs = append(colStrs, tc.String())
}
return strings.Join(colStrs, ", ")
}

View File

@ -93,12 +93,12 @@ func (tm *TableManager) MatchSource(ctx context.Context, db dbh, rowSource *Tabl
}
if len(missingCols) > 0 {
colDefs := make([]string, len(missingCols))
for i, col := range missingCols {
colDefs := make([]string, 0, len(missingCols))
for _, col := range missingCols {
if err := rowSource.DropColumn(col); err != nil {
return fmt.Errorf("metric/table mismatch: Unable to omit field/column from \"%s\": %w", tagTable.name, err)
}
colDefs[i] = col.Name + " " + col.Type
colDefs = append(colDefs, col.Name+" "+col.Type)
}
tm.Logger.Errorf("table '%s' is missing tag columns (dropping metrics): %s",
tagTable.name,
@ -124,12 +124,12 @@ func (tm *TableManager) MatchSource(ctx context.Context, db dbh, rowSource *Tabl
}
if len(missingCols) > 0 {
colDefs := make([]string, len(missingCols))
for i, col := range missingCols {
colDefs := make([]string, 0, len(missingCols))
for _, col := range missingCols {
if err := rowSource.DropColumn(col); err != nil {
return fmt.Errorf("metric/table mismatch: Unable to omit field/column from \"%s\": %w", metricTable.name, err)
}
colDefs[i] = col.Name + " " + col.Type
colDefs = append(colDefs, col.Name+" "+col.Type)
}
tm.Logger.Errorf("table '%s' is missing columns (omitting fields): %s",
metricTable.name,

View File

@ -179,9 +179,9 @@ func (tsrc *TableSource) TagTableColumns() []utils.Column {
func (tsrc *TableSource) ColumnNames() []string {
cols := tsrc.MetricTableColumns()
names := make([]string, len(cols))
for i, col := range cols {
names[i] = col.Name
names := make([]string, 0, len(cols))
for _, col := range cols {
names = append(names, col.Name)
}
return names
}
@ -370,9 +370,9 @@ func (ttsrc *TagTableSource) cacheTouch(tagID int64) {
func (ttsrc *TagTableSource) ColumnNames() []string {
cols := ttsrc.TagTableColumns()
names := make([]string, len(cols))
for i, col := range cols {
names[i] = col.Name
names := make([]string, 0, len(cols))
for _, col := range cols {
names = append(names, col.Name)
}
return names
}

View File

@ -235,9 +235,9 @@ func BenchmarkReader(b *testing.B) {
},
time.Unix(0, 1517620624000000000),
)
metrics := make([]telegraf.Metric, 1000)
for i := range metrics {
metrics[i] = m
metrics := make([]telegraf.Metric, 0, 1000)
for i := 0; i < 1000; i++ {
metrics = append(metrics, m)
}
b.ResetTimer()