chore: Fix linter findings for `revive:enforce-map-style` in `plugins/aggregators`, `plugins/common`, `plugins/parsers`, `plugins/processors`, `plugins/secretstores` and `plugins/serializers` (#16039)
This commit is contained in:
parent
3951d894ee
commit
39a5ca27ef
|
|
@ -165,7 +165,7 @@ func (b *BasicStats) Add(in telegraf.Metric) {
|
|||
|
||||
func (b *BasicStats) Push(acc telegraf.Accumulator) {
|
||||
for _, aggregate := range b.cache {
|
||||
fields := map[string]interface{}{}
|
||||
fields := make(map[string]interface{})
|
||||
for k, v := range aggregate.fields {
|
||||
if b.statsConfig.count {
|
||||
fields[k+"_count"] = v.count
|
||||
|
|
|
|||
|
|
@ -3,4 +3,4 @@ package aggregators
|
|||
import "github.com/influxdata/telegraf"
|
||||
|
||||
// Deprecations lists the deprecated plugins
|
||||
var Deprecations = map[string]telegraf.DeprecationInfo{}
|
||||
var Deprecations = make(map[string]telegraf.DeprecationInfo)
|
||||
|
|
|
|||
|
|
@ -69,7 +69,7 @@ func (m *Final) Push(acc telegraf.Accumulator) {
|
|||
if m.KeepOriginalFieldNames {
|
||||
fields = metric.Fields()
|
||||
} else {
|
||||
fields = map[string]any{}
|
||||
fields = make(map[string]any, len(metric.FieldList()))
|
||||
for _, field := range metric.FieldList() {
|
||||
fields[field.Key+"_final"] = field.Value
|
||||
}
|
||||
|
|
|
|||
|
|
@ -297,7 +297,7 @@ func convert(in interface{}) (float64, bool) {
|
|||
|
||||
// copyTags copies tags
|
||||
func copyTags(tags map[string]string) map[string]string {
|
||||
copiedTags := map[string]string{}
|
||||
copiedTags := make(map[string]string, len(tags))
|
||||
for key, val := range tags {
|
||||
copiedTags[key] = val
|
||||
}
|
||||
|
|
@ -322,7 +322,7 @@ func isTagsIdentical(originalTags, checkedTags map[string]string) bool {
|
|||
|
||||
// makeFieldsWithCount assigns count value to all metric fields
|
||||
func makeFieldsWithCount(fieldsWithCountIn map[string]int64) map[string]interface{} {
|
||||
fieldsWithCountOut := map[string]interface{}{}
|
||||
fieldsWithCountOut := make(map[string]interface{}, len(fieldsWithCountIn))
|
||||
for field, count := range fieldsWithCountIn {
|
||||
fieldsWithCountOut[field+"_bucket"] = count
|
||||
}
|
||||
|
|
|
|||
|
|
@ -81,7 +81,7 @@ func (m *MinMax) Add(in telegraf.Metric) {
|
|||
|
||||
func (m *MinMax) Push(acc telegraf.Accumulator) {
|
||||
for _, aggregate := range m.cache {
|
||||
fields := map[string]interface{}{}
|
||||
fields := make(map[string]interface{}, len(aggregate.fields))
|
||||
for k, v := range aggregate.fields {
|
||||
fields[k+"_min"] = v.min
|
||||
fields[k+"_max"] = v.max
|
||||
|
|
|
|||
|
|
@ -78,7 +78,7 @@ func (q *Quantile) Add(in telegraf.Metric) {
|
|||
|
||||
func (q *Quantile) Push(acc telegraf.Accumulator) {
|
||||
for _, aggregate := range q.cache {
|
||||
fields := map[string]interface{}{}
|
||||
fields := make(map[string]interface{}, len(aggregate.fields)*len(q.Quantiles))
|
||||
for k, algo := range aggregate.fields {
|
||||
for i, qtl := range q.Quantiles {
|
||||
fields[k+q.suffixes[i]] = algo.Quantile(qtl)
|
||||
|
|
|
|||
|
|
@ -4,7 +4,7 @@ import "github.com/influxdata/telegraf"
|
|||
|
||||
type Creator func() telegraf.Aggregator
|
||||
|
||||
var Aggregators = map[string]Creator{}
|
||||
var Aggregators = make(map[string]Creator)
|
||||
|
||||
func Add(name string, creator Creator) {
|
||||
Aggregators[name] = creator
|
||||
|
|
|
|||
|
|
@ -65,8 +65,7 @@ func (vc *ValueCounter) Add(in telegraf.Metric) {
|
|||
// Push emits the counters
|
||||
func (vc *ValueCounter) Push(acc telegraf.Accumulator) {
|
||||
for _, agg := range vc.cache {
|
||||
fields := map[string]interface{}{}
|
||||
|
||||
fields := make(map[string]interface{}, len(agg.fieldCount))
|
||||
for field, count := range agg.fieldCount {
|
||||
fields[field] = count
|
||||
}
|
||||
|
|
|
|||
|
|
@ -308,7 +308,7 @@ func validateNodeToAdd(existing map[metricParts]struct{}, nmm *NodeMetricMapping
|
|||
|
||||
// InitNodeMetricMapping builds nodes from the configuration
|
||||
func (o *OpcUAInputClient) InitNodeMetricMapping() error {
|
||||
existing := map[metricParts]struct{}{}
|
||||
existing := make(map[metricParts]struct{}, len(o.Config.RootNodes))
|
||||
for _, node := range o.Config.RootNodes {
|
||||
nmm, err := NewNodeMetricMapping(o.Config.MetricName, node, make(map[string]string))
|
||||
if err != nil {
|
||||
|
|
|
|||
|
|
@ -147,9 +147,9 @@ func createPluginsWithTomlConfig(md toml.MetaData, conf config) (loadedConfig, e
|
|||
// without having to define a config dead easy.
|
||||
func DefaultImportedPlugins() config {
|
||||
conf := config{
|
||||
Inputs: map[string][]toml.Primitive{},
|
||||
Processors: map[string][]toml.Primitive{},
|
||||
Outputs: map[string][]toml.Primitive{},
|
||||
Inputs: make(map[string][]toml.Primitive, len(inputs.Inputs)),
|
||||
Processors: make(map[string][]toml.Primitive, len(processors.Processors)),
|
||||
Outputs: make(map[string][]toml.Primitive, len(outputs.Outputs)),
|
||||
}
|
||||
for name := range inputs.Inputs {
|
||||
log.Println("No config found. Loading default config for plugin", name)
|
||||
|
|
|
|||
|
|
@ -83,7 +83,7 @@ func (record metadataPattern) Less(i, j int) bool {
|
|||
|
||||
func (p *Parser) initializeMetadataSeparators() error {
|
||||
// initialize metadata
|
||||
p.metadataTags = map[string]string{}
|
||||
p.metadataTags = make(map[string]string)
|
||||
|
||||
if p.MetadataRows <= 0 {
|
||||
return nil
|
||||
|
|
@ -94,7 +94,7 @@ func (p *Parser) initializeMetadataSeparators() error {
|
|||
}
|
||||
|
||||
p.metadataSeparatorList = make(metadataPattern, 0, len(p.MetadataSeparators))
|
||||
patternList := map[string]bool{}
|
||||
patternList := make(map[string]bool, len(p.MetadataSeparators))
|
||||
for _, pattern := range p.MetadataSeparators {
|
||||
if patternList[pattern] {
|
||||
// Ignore further, duplicated entries
|
||||
|
|
|
|||
|
|
@ -3,4 +3,4 @@ package parsers
|
|||
import "github.com/influxdata/telegraf"
|
||||
|
||||
// Deprecations lists the deprecated plugins
|
||||
var Deprecations = map[string]telegraf.DeprecationInfo{}
|
||||
var Deprecations = make(map[string]telegraf.DeprecationInfo)
|
||||
|
|
|
|||
|
|
@ -196,7 +196,7 @@ func (p *Parser) readDWMetrics(metricType string, dwms interface{}, metrics []te
|
|||
parsed, err := p.seriesParser.Parse([]byte(measurementName))
|
||||
var m telegraf.Metric
|
||||
if err != nil || len(parsed) != 1 {
|
||||
m = metric.New(measurementName, map[string]string{}, map[string]interface{}{}, tm)
|
||||
m = metric.New(measurementName, make(map[string]string), make(map[string]interface{}), tm)
|
||||
} else {
|
||||
m = parsed[0]
|
||||
m.SetTime(tm)
|
||||
|
|
|
|||
|
|
@ -24,7 +24,7 @@ func (c *Config) Validate() error {
|
|||
|
||||
func (c *Config) validateTemplates() error {
|
||||
// map to keep track of filters we see
|
||||
filters := map[string]struct{}{}
|
||||
filters := make(map[string]struct{}, len(c.Templates))
|
||||
|
||||
for i, template := range c.Templates {
|
||||
parts := strings.Fields(template)
|
||||
|
|
|
|||
|
|
@ -111,7 +111,7 @@ func (p *Parser) ParseLine(line string) (telegraf.Metric, error) {
|
|||
return nil, fmt.Errorf(`field %q value: %w`, fields[0], err)
|
||||
}
|
||||
|
||||
fieldValues := map[string]interface{}{}
|
||||
fieldValues := make(map[string]interface{}, 1)
|
||||
if field != "" {
|
||||
fieldValues[field] = v
|
||||
} else {
|
||||
|
|
|
|||
|
|
@ -260,8 +260,8 @@ func (p *Parser) processMetric(input []byte, data []DataSet, tag bool, timestamp
|
|||
Tag: tag,
|
||||
Metric: metric.New(
|
||||
p.measurementName,
|
||||
map[string]string{},
|
||||
map[string]interface{}{},
|
||||
make(map[string]string),
|
||||
make(map[string]interface{}),
|
||||
timestamp,
|
||||
),
|
||||
Result: result,
|
||||
|
|
@ -341,8 +341,8 @@ func (p *Parser) expandArray(result metricNode, timestamp time.Time) ([]telegraf
|
|||
result.ForEach(func(_, val gjson.Result) bool {
|
||||
m := metric.New(
|
||||
p.measurementName,
|
||||
map[string]string{},
|
||||
map[string]interface{}{},
|
||||
make(map[string]string),
|
||||
make(map[string]interface{}),
|
||||
timestamp,
|
||||
)
|
||||
if val.IsObject() {
|
||||
|
|
@ -519,8 +519,8 @@ func (p *Parser) processObjects(input []byte, objects []Object, timestamp time.T
|
|||
rootObject := metricNode{
|
||||
Metric: metric.New(
|
||||
p.measurementName,
|
||||
map[string]string{},
|
||||
map[string]interface{}{},
|
||||
make(map[string]string),
|
||||
make(map[string]interface{}),
|
||||
timestamp,
|
||||
),
|
||||
Result: result,
|
||||
|
|
|
|||
|
|
@ -21,8 +21,7 @@ func mapValueType(mt dto.MetricType) telegraf.ValueType {
|
|||
}
|
||||
|
||||
func getTagsFromLabels(m *dto.Metric, defaultTags map[string]string) map[string]string {
|
||||
result := map[string]string{}
|
||||
|
||||
result := make(map[string]string, len(defaultTags)+len(m.Label))
|
||||
for key, value := range defaultTags {
|
||||
result[key] = value
|
||||
}
|
||||
|
|
|
|||
|
|
@ -30,7 +30,7 @@ func (p *Parser) Parse(buf []byte) ([]telegraf.Metric, error) {
|
|||
now := time.Now()
|
||||
|
||||
for _, ts := range req.Timeseries {
|
||||
tags := map[string]string{}
|
||||
tags := make(map[string]string, len(p.DefaultTags)+len(ts.Labels))
|
||||
for key, value := range p.DefaultTags {
|
||||
tags[key] = value
|
||||
}
|
||||
|
|
|
|||
|
|
@ -8,7 +8,7 @@ import (
|
|||
type Creator func(defaultMetricName string) telegraf.Parser
|
||||
|
||||
// Parsers contains the registry of all known parsers (following the new style)
|
||||
var Parsers = map[string]Creator{}
|
||||
var Parsers = make(map[string]Creator)
|
||||
|
||||
// Add adds a parser to the registry. Usually this function is called in the plugin's init function
|
||||
func Add(name string, creator Creator) {
|
||||
|
|
|
|||
|
|
@ -3,4 +3,4 @@ package processors
|
|||
import "github.com/influxdata/telegraf"
|
||||
|
||||
// Deprecations lists the deprecated plugins
|
||||
var Deprecations = map[string]telegraf.DeprecationInfo{}
|
||||
var Deprecations = make(map[string]telegraf.DeprecationInfo)
|
||||
|
|
|
|||
|
|
@ -13,7 +13,7 @@ type HasUnwrap interface {
|
|||
|
||||
// all processors are streaming processors.
|
||||
// telegraf.Processor processors are upgraded to telegraf.StreamingProcessor
|
||||
var Processors = map[string]StreamingCreator{}
|
||||
var Processors = make(map[string]StreamingCreator)
|
||||
|
||||
// Add adds a telegraf.Processor processor
|
||||
func Add(name string, creator Creator) {
|
||||
|
|
|
|||
|
|
@ -68,7 +68,7 @@ func NewReverseDNSCache(ttl, lookupTimeout time.Duration, workerPoolSize int) *R
|
|||
d := &ReverseDNSCache{
|
||||
ttl: ttl,
|
||||
lookupTimeout: lookupTimeout,
|
||||
cache: map[string]*dnslookup{},
|
||||
cache: make(map[string]*dnslookup),
|
||||
maxWorkers: workerPoolSize,
|
||||
sem: semaphore.NewWeighted(int64(workerPoolSize)),
|
||||
cancelCleanupWorker: cancel,
|
||||
|
|
|
|||
|
|
@ -3,4 +3,4 @@ package secretstores
|
|||
import "github.com/influxdata/telegraf"
|
||||
|
||||
// Deprecations lists the deprecated plugins
|
||||
var Deprecations = map[string]telegraf.DeprecationInfo{}
|
||||
var Deprecations = make(map[string]telegraf.DeprecationInfo)
|
||||
|
|
|
|||
|
|
@ -8,7 +8,7 @@ import (
|
|||
type Creator func(id string) telegraf.SecretStore
|
||||
|
||||
// SecretStores contains the registry of all known secret-stores
|
||||
var SecretStores = map[string]Creator{}
|
||||
var SecretStores = make(map[string]Creator)
|
||||
|
||||
// Add adds a secret-store to the registry. Usually this function is called in the plugin's init function
|
||||
func Add(name string, creator Creator) {
|
||||
|
|
|
|||
|
|
@ -3,4 +3,4 @@ package serializers
|
|||
import "github.com/influxdata/telegraf"
|
||||
|
||||
// Deprecations lists the deprecated plugins
|
||||
var Deprecations = map[string]telegraf.DeprecationInfo{}
|
||||
var Deprecations = make(map[string]telegraf.DeprecationInfo)
|
||||
|
|
|
|||
|
|
@ -114,9 +114,7 @@ func (s *Serializer) createObject(metric telegraf.Metric) OIMetrics {
|
|||
oimetric.Value = field.Value
|
||||
|
||||
if oimetric.Node != "" {
|
||||
cimapping := map[string]string{}
|
||||
cimapping["node"] = oimetric.Node
|
||||
oimetric.CiMapping = cimapping
|
||||
oimetric.CiMapping = map[string]string{"node": oimetric.Node}
|
||||
}
|
||||
|
||||
allmetrics = append(allmetrics, oimetric)
|
||||
|
|
|
|||
|
|
@ -11,7 +11,7 @@ import (
|
|||
type Creator func() Serializer
|
||||
|
||||
// Serializers contains the registry of all known serializers (following the new style)
|
||||
var Serializers = map[string]Creator{}
|
||||
var Serializers = make(map[string]Creator)
|
||||
|
||||
// Add adds a serializer to the registry. Usually this function is called in the plugin's init function
|
||||
func Add(name string, creator Creator) {
|
||||
|
|
|
|||
|
|
@ -165,7 +165,7 @@ func (s *Serializer) createObject(metric telegraf.Metric) ([]byte, error) {
|
|||
// The tags are common to all events in this timeseries
|
||||
commonTags := CommonTags{}
|
||||
|
||||
commonTags.Fields = map[string]interface{}{}
|
||||
commonTags.Fields = make(map[string]interface{}, len(metric.Tags()))
|
||||
|
||||
// Break tags out into key(n)=value(t) pairs
|
||||
for n, t := range metric.Tags() {
|
||||
|
|
|
|||
Loading…
Reference in New Issue