chore: Fix linter findings for `revive:unused-receiver` in `plugins/`: `aggregators`, `common`, `parsers`, `processors`, `secretstores` and `serializers` (#16339)

This commit is contained in:
Paweł Żak 2025-01-14 09:24:13 +01:00 committed by GitHub
parent 2a66067df6
commit d26479b58e
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
43 changed files with 143 additions and 166 deletions

View File

@ -80,7 +80,7 @@ func (m *Final) Push(acc telegraf.Accumulator) {
}
}
func (m *Final) Reset() {
func (*Final) Reset() {
}
func init() {

View File

@ -184,12 +184,12 @@ func (h *HistogramAggregator) groupFieldsByBuckets(
}
sum += count
h.groupField(metricsWithGroupedFields, name, field, sum, copyTags(tags))
groupField(metricsWithGroupedFields, name, field, sum, copyTags(tags))
}
}
// groupField groups field by count value
func (h *HistogramAggregator) groupField(metricsWithGroupedFields *[]groupedByCountFields, name, field string, count int64, tags map[string]string) {
func groupField(metricsWithGroupedFields *[]groupedByCountFields, name, field string, count int64, tags map[string]string) {
for key, metric := range *metricsWithGroupedFields {
if name == metric.name && isTagsIdentical(tags, metric.tags) {
(*metricsWithGroupedFields)[key].fieldsWithCount[field] = count

View File

@ -54,7 +54,7 @@ func (g *Gatherer) gatherResponses(responses []ReadResponse, tags map[string]str
points = make([]point, 0)
}
responsePoints, responseErrors := g.generatePoints(metric, responses)
responsePoints, responseErrors := generatePoints(metric, responses)
points = append(points, responsePoints...)
for _, err := range responseErrors {
acc.AddError(err)
@ -71,9 +71,8 @@ func (g *Gatherer) gatherResponses(responses []ReadResponse, tags map[string]str
}
}
// generatePoints creates points for the supplied metric from the ReadResponse
// objects returned by the Jolokia client.
func (g *Gatherer) generatePoints(metric Metric, responses []ReadResponse) ([]point, []error) {
// generatePoints creates points for the supplied metric from the ReadResponse objects returned by the Jolokia client.
func generatePoints(metric Metric, responses []ReadResponse) ([]point, []error) {
points := make([]point, 0)
errors := make([]error, 0)

View File

@ -16,11 +16,11 @@ var (
type debugLogger struct{}
func (l *debugLogger) Print(v ...interface{}) {
func (*debugLogger) Print(v ...interface{}) {
log.Trace(v...)
}
func (l *debugLogger) Printf(format string, v ...interface{}) {
func (*debugLogger) Printf(format string, v ...interface{}) {
log.Tracef(format, v...)
}

View File

@ -24,12 +24,12 @@ func InstallHook() {
})
}
func (h *LogHook) Fire(entry *logrus.Entry) error {
func (*LogHook) Fire(entry *logrus.Entry) error {
msg := strings.ReplaceAll(entry.Message, "\n", " ")
log.Print("D! [logrus] ", msg)
return nil
}
func (h *LogHook) Levels() []logrus.Level {
func (*LogHook) Levels() []logrus.Level {
return logrus.AllLevels
}

View File

@ -150,12 +150,12 @@ func (m *mqttv5Client) Publish(topic string, body []byte) error {
return err
}
func (m *mqttv5Client) SubscribeMultiple(filters map[string]byte, callback paho.MessageHandler) error {
func (*mqttv5Client) SubscribeMultiple(filters map[string]byte, callback paho.MessageHandler) error {
_, _ = filters, callback
panic("not implemented")
}
func (m *mqttv5Client) AddRoute(topic string, callback paho.MessageHandler) {
func (*mqttv5Client) AddRoute(topic string, callback paho.MessageHandler) {
_, _ = topic, callback
panic("not implemented")
}

View File

@ -66,14 +66,11 @@ type testDurationInput struct {
Hex int64 `toml:"hex"`
}
func (i *testDurationInput) SampleConfig() string {
func (*testDurationInput) SampleConfig() string {
return ""
}
func (i *testDurationInput) Description() string {
return ""
}
func (i *testDurationInput) Gather(_ telegraf.Accumulator) error {
func (*testDurationInput) Gather(telegraf.Accumulator) error {
return nil
}
@ -81,13 +78,10 @@ type testConfigProcessor struct {
Loaded string `toml:"loaded"`
}
func (p *testConfigProcessor) SampleConfig() string {
func (*testConfigProcessor) SampleConfig() string {
return ""
}
func (p *testConfigProcessor) Description() string {
return ""
}
func (p *testConfigProcessor) Apply(metrics ...telegraf.Metric) []telegraf.Metric {
func (*testConfigProcessor) Apply(metrics ...telegraf.Metric) []telegraf.Metric {
return metrics
}

View File

@ -64,7 +64,7 @@ func New() *Shim {
}
}
func (s *Shim) watchForShutdown(cancel context.CancelFunc) {
func (*Shim) watchForShutdown(cancel context.CancelFunc) {
quit := make(chan os.Signal, 1)
signal.Notify(quit, syscall.SIGINT, syscall.SIGTERM)
go func() {
@ -130,12 +130,12 @@ func (s *Shim) writeProcessedMetrics() error {
}
// LogName satisfies the MetricMaker interface
func (s *Shim) LogName() string {
func (*Shim) LogName() string {
return ""
}
// MakeMetric satisfies the MetricMaker interface
func (s *Shim) MakeMetric(m telegraf.Metric) telegraf.Metric {
func (*Shim) MakeMetric(m telegraf.Metric) telegraf.Metric {
return m // don't need to do anything to it.
}

View File

@ -61,18 +61,18 @@ func runErroringInputPlugin(t *testing.T, interval time.Duration, stdin io.Reade
type erroringInput struct {
}
func (i *erroringInput) SampleConfig() string {
func (*erroringInput) SampleConfig() string {
return ""
}
func (i *erroringInput) Gather(acc telegraf.Accumulator) error {
func (*erroringInput) Gather(acc telegraf.Accumulator) error {
acc.AddError(errors.New("intentional"))
return nil
}
func (i *erroringInput) Start(_ telegraf.Accumulator) error {
func (*erroringInput) Start(telegraf.Accumulator) error {
return nil
}
func (i *erroringInput) Stop() {
func (*erroringInput) Stop() {
}

View File

@ -87,14 +87,10 @@ type testInput struct {
metricProcessed chan bool
}
func (i *testInput) SampleConfig() string {
func (*testInput) SampleConfig() string {
return ""
}
func (i *testInput) Description() string {
return "test"
}
func (i *testInput) Gather(acc telegraf.Accumulator) error {
acc.AddFields("measurement",
map[string]interface{}{
@ -107,11 +103,11 @@ func (i *testInput) Gather(acc telegraf.Accumulator) error {
return nil
}
func (i *testInput) Start(_ telegraf.Accumulator) error {
func (*testInput) Start(telegraf.Accumulator) error {
return nil
}
func (i *testInput) Stop() {
func (*testInput) Stop() {
}
type serviceInput struct {
@ -120,15 +116,11 @@ type serviceInput struct {
SecretValue string `toml:"secret_value"`
}
func (i *serviceInput) SampleConfig() string {
func (*serviceInput) SampleConfig() string {
return ""
}
func (i *serviceInput) Description() string {
return ""
}
func (i *serviceInput) Gather(acc telegraf.Accumulator) error {
func (*serviceInput) Gather(acc telegraf.Accumulator) error {
acc.AddFields("measurement",
map[string]interface{}{
"field": 1,
@ -140,9 +132,9 @@ func (i *serviceInput) Gather(acc telegraf.Accumulator) error {
return nil
}
func (i *serviceInput) Start(_ telegraf.Accumulator) error {
func (*serviceInput) Start(telegraf.Accumulator) error {
return nil
}
func (i *serviceInput) Stop() {
func (*serviceInput) Stop() {
}

View File

@ -65,10 +65,10 @@ type testOutput struct {
MetricsWritten []telegraf.Metric
}
func (o *testOutput) Connect() error {
func (*testOutput) Connect() error {
return nil
}
func (o *testOutput) Close() error {
func (*testOutput) Close() error {
return nil
}
func (o *testOutput) Write(metrics []telegraf.Metric) error {
@ -76,10 +76,6 @@ func (o *testOutput) Write(metrics []telegraf.Metric) error {
return nil
}
func (o *testOutput) SampleConfig() string {
return ""
}
func (o *testOutput) Description() string {
func (*testOutput) SampleConfig() string {
return ""
}

View File

@ -108,10 +108,6 @@ func (p *testProcessor) Apply(in ...telegraf.Metric) []telegraf.Metric {
return in
}
func (p *testProcessor) SampleConfig() string {
return ""
}
func (p *testProcessor) Description() string {
func (*testProcessor) SampleConfig() string {
return ""
}

View File

@ -33,7 +33,7 @@ func (d FieldDict) String() string {
return buf.String()
}
func (d FieldDict) Type() string {
func (FieldDict) Type() string {
return "Fields"
}
@ -49,12 +49,12 @@ func (d FieldDict) Truth() starlark.Bool {
return len(d.metric.FieldList()) != 0
}
func (d FieldDict) Hash() (uint32, error) {
func (FieldDict) Hash() (uint32, error) {
return 0, errors.New("not hashable")
}
// AttrNames implements the starlark.HasAttrs interface.
func (d FieldDict) AttrNames() []string {
func (FieldDict) AttrNames() []string {
return builtinAttrNames(FieldDictMethods)
}

View File

@ -56,7 +56,7 @@ func (m *Metric) String() string {
return buf.String()
}
func (m *Metric) Type() string {
func (*Metric) Type() string {
return "Metric"
}
@ -64,16 +64,16 @@ func (m *Metric) Freeze() {
m.frozen = true
}
func (m *Metric) Truth() starlark.Bool {
func (*Metric) Truth() starlark.Bool {
return true
}
func (m *Metric) Hash() (uint32, error) {
func (*Metric) Hash() (uint32, error) {
return 0, errors.New("not hashable")
}
// AttrNames implements the starlark.HasAttrs interface.
func (m *Metric) AttrNames() []string {
func (*Metric) AttrNames() []string {
return []string{"name", "tags", "fields", "time"}
}

View File

@ -31,7 +31,7 @@ func (d TagDict) String() string {
return buf.String()
}
func (d TagDict) Type() string {
func (TagDict) Type() string {
return "Tags"
}
@ -47,12 +47,12 @@ func (d TagDict) Truth() starlark.Bool {
return len(d.metric.TagList()) != 0
}
func (d TagDict) Hash() (uint32, error) {
func (TagDict) Hash() (uint32, error) {
return 0, errors.New("not hashable")
}
// AttrNames implements the starlark.HasAttrs interface.
func (d TagDict) AttrNames() []string {
func (TagDict) AttrNames() []string {
return builtinAttrNames(TagDictMethods)
}

View File

@ -98,7 +98,7 @@ func (p *Parser) Parse(buf []byte) ([]telegraf.Metric, error) {
}
// ParseLine is not supported by the dropwizard format
func (p *Parser) ParseLine(_ string) (telegraf.Metric, error) {
func (*Parser) ParseLine(string) (telegraf.Metric, error) {
return nil, errors.New("parsing line is not supported by the dropwizard format")
}

View File

@ -34,7 +34,7 @@ func (p Parser) Parse(buf []byte) ([]telegraf.Metric, error) {
}
tags := p.extractTags(values)
fields := p.parseFields(values)
fields := parseFields(values)
for key, value := range p.DefaultTags {
tags[key] = value
@ -80,7 +80,7 @@ func (p Parser) extractTags(values url.Values) map[string]string {
return tags
}
func (p Parser) parseFields(values url.Values) map[string]interface{} {
func parseFields(values url.Values) map[string]interface{} {
fields := make(map[string]interface{})
for key, value := range values {

View File

@ -59,7 +59,7 @@ func (c *Config) validateTemplates() error {
}
// Validate the template has one and only one measurement
if err := c.validateTemplate(template); err != nil {
if err := validateTemplate(template); err != nil {
return err
}
@ -71,7 +71,7 @@ func (c *Config) validateTemplates() error {
if filter != "" {
// Validate filter expression is valid
if err := c.validateFilter(filter); err != nil {
if err := validateFilter(filter); err != nil {
return err
}
}
@ -79,7 +79,7 @@ func (c *Config) validateTemplates() error {
if tags != "" {
// Validate tags
for _, tagStr := range strings.Split(tags, ",") {
if err := c.validateTag(tagStr); err != nil {
if err := validateTag(tagStr); err != nil {
return err
}
}
@ -88,7 +88,7 @@ func (c *Config) validateTemplates() error {
return nil
}
func (c *Config) validateTemplate(template string) error {
func validateTemplate(template string) error {
hasMeasurement := false
for _, p := range strings.Split(template, ".") {
if p == "measurement" || p == "measurement*" {
@ -103,7 +103,7 @@ func (c *Config) validateTemplate(template string) error {
return nil
}
func (c *Config) validateFilter(filter string) error {
func validateFilter(filter string) error {
for _, p := range strings.Split(filter, ".") {
if p == "" {
return fmt.Errorf("filter contains blank section: %s", filter)
@ -116,7 +116,7 @@ func (c *Config) validateFilter(filter string) error {
return nil
}
func (c *Config) validateTag(keyValue string) error {
func validateTag(keyValue string) error {
parts := strings.Split(keyValue, "=")
if len(parts) != 2 {
return fmt.Errorf("invalid template tags: %q", keyValue)

View File

@ -156,35 +156,35 @@ func (h *TestingHandler) Results() []Result {
type BenchmarkingHandler struct {
}
func (h *BenchmarkingHandler) SetMeasurement(_ []byte) error {
func (*BenchmarkingHandler) SetMeasurement([]byte) error {
return nil
}
func (h *BenchmarkingHandler) AddTag(_, _ []byte) error {
func (*BenchmarkingHandler) AddTag(_, _ []byte) error {
return nil
}
func (h *BenchmarkingHandler) AddInt(_, _ []byte) error {
func (*BenchmarkingHandler) AddInt(_, _ []byte) error {
return nil
}
func (h *BenchmarkingHandler) AddUint(_, _ []byte) error {
func (*BenchmarkingHandler) AddUint(_, _ []byte) error {
return nil
}
func (h *BenchmarkingHandler) AddFloat(_, _ []byte) error {
func (*BenchmarkingHandler) AddFloat(_, _ []byte) error {
return nil
}
func (h *BenchmarkingHandler) AddString(_, _ []byte) error {
func (*BenchmarkingHandler) AddString(_, _ []byte) error {
return nil
}
func (h *BenchmarkingHandler) AddBool(_, _ []byte) error {
func (*BenchmarkingHandler) AddBool(_, _ []byte) error {
return nil
}
func (h *BenchmarkingHandler) SetTimestamp(_ []byte) error {
func (*BenchmarkingHandler) SetTimestamp([]byte) error {
return nil
}

View File

@ -435,7 +435,7 @@ func (p *Parser) expandArray(result metricNode, timestamp time.Time) ([]telegraf
if result.Tag {
desiredType = "string"
}
v, err := p.convertType(result.Result, desiredType, result.SetName)
v, err := convertType(result.Result, desiredType, result.SetName)
if err != nil {
return nil, err
}
@ -648,7 +648,7 @@ func (p *Parser) isExcluded(key string) bool {
return false
}
func (p *Parser) ParseLine(_ string) (telegraf.Metric, error) {
func (*Parser) ParseLine(string) (telegraf.Metric, error) {
return nil, errors.New("parsing line is not supported by JSON format")
}
@ -657,7 +657,7 @@ func (p *Parser) SetDefaultTags(tags map[string]string) {
}
// convertType will convert the value parsed from the input JSON to the specified type in the config
func (p *Parser) convertType(input gjson.Result, desiredType, name string) (interface{}, error) {
func convertType(input gjson.Result, desiredType, name string) (interface{}, error) {
switch inputType := input.Value().(type) {
case string:
switch desiredType {

View File

@ -30,7 +30,7 @@ type loopedParser struct {
wsParser *whiteSpaceParser
}
func (ep *nameParser) parse(p *PointParser, pt *Point) error {
func (*nameParser) parse(p *PointParser, pt *Point) error {
// Valid characters are: a-z, A-Z, 0-9, hyphen ("-"), underscore ("_"), dot (".").
// Forward slash ("/") and comma (",") are allowed if metricName is enclosed in double quotes.
// Delta (U+2206) is allowed as the first character of the
@ -44,7 +44,7 @@ func (ep *nameParser) parse(p *PointParser, pt *Point) error {
return nil
}
func (ep *valueParser) parse(p *PointParser, pt *Point) error {
func (*valueParser) parse(p *PointParser, pt *Point) error {
tok, lit := p.scan()
if tok == EOF {
return fmt.Errorf("found %q, expected number", lit)
@ -137,7 +137,7 @@ func (ep *loopedParser) parse(p *PointParser, pt *Point) error {
return nil
}
func (ep *tagParser) parse(p *PointParser, pt *Point) error {
func (*tagParser) parse(p *PointParser, pt *Point) error {
k, err := parseLiteral(p)
if err != nil {
if k == "" {

View File

@ -11,11 +11,11 @@ import (
type cborDocument struct{}
func (d *cborDocument) Parse(buf []byte) (dataNode, error) {
func (*cborDocument) Parse(buf []byte) (dataNode, error) {
return cborquery.Parse(strings.NewReader(string(buf)))
}
func (d *cborDocument) QueryAll(node dataNode, expr string) ([]dataNode, error) {
func (*cborDocument) QueryAll(node dataNode, expr string) ([]dataNode, error) {
// If this panics it's a programming error as we changed the document type while processing
native, err := cborquery.QueryAll(node.(*cborquery.Node), expr)
if err != nil {
@ -29,7 +29,7 @@ func (d *cborDocument) QueryAll(node dataNode, expr string) ([]dataNode, error)
return nodes, nil
}
func (d *cborDocument) CreateXPathNavigator(node dataNode) path.NodeNavigator {
func (*cborDocument) CreateXPathNavigator(node dataNode) path.NodeNavigator {
// If this panics it's a programming error as we changed the document type while processing
return cborquery.CreateXPathNavigator(node.(*cborquery.Node))
}
@ -87,12 +87,12 @@ func (d *cborDocument) GetNodeName(node dataNode, sep string, withParent bool) s
return name
}
func (d *cborDocument) OutputXML(node dataNode) string {
func (*cborDocument) OutputXML(node dataNode) string {
native := node.(*cborquery.Node)
return native.OutputXML()
}
func (d *cborDocument) index(node *cborquery.Node) string {
func (*cborDocument) index(node *cborquery.Node) string {
idx := 0
for n := node; n.PrevSibling != nil; n = n.PrevSibling {

View File

@ -11,11 +11,11 @@ import (
type jsonDocument struct{}
func (d *jsonDocument) Parse(buf []byte) (dataNode, error) {
func (*jsonDocument) Parse(buf []byte) (dataNode, error) {
return jsonquery.Parse(strings.NewReader(string(buf)))
}
func (d *jsonDocument) QueryAll(node dataNode, expr string) ([]dataNode, error) {
func (*jsonDocument) QueryAll(node dataNode, expr string) ([]dataNode, error) {
// If this panics it's a programming error as we changed the document type while processing
native, err := jsonquery.QueryAll(node.(*jsonquery.Node), expr)
if err != nil {
@ -29,7 +29,7 @@ func (d *jsonDocument) QueryAll(node dataNode, expr string) ([]dataNode, error)
return nodes, nil
}
func (d *jsonDocument) CreateXPathNavigator(node dataNode) path.NodeNavigator {
func (*jsonDocument) CreateXPathNavigator(node dataNode) path.NodeNavigator {
// If this panics it's a programming error as we changed the document type while processing
return jsonquery.CreateXPathNavigator(node.(*jsonquery.Node))
}
@ -87,12 +87,12 @@ func (d *jsonDocument) GetNodeName(node dataNode, sep string, withParent bool) s
return name
}
func (d *jsonDocument) OutputXML(node dataNode) string {
func (*jsonDocument) OutputXML(node dataNode) string {
native := node.(*jsonquery.Node)
return native.OutputXML()
}
func (d *jsonDocument) index(node *jsonquery.Node) string {
func (*jsonDocument) index(node *jsonquery.Node) string {
idx := 0
for n := node; n.PrevSibling != nil; n = n.PrevSibling {

View File

@ -11,7 +11,7 @@ import (
type msgpackDocument jsonDocument
func (d *msgpackDocument) Parse(buf []byte) (dataNode, error) {
func (*msgpackDocument) Parse(buf []byte) (dataNode, error) {
var json bytes.Buffer
// Unmarshal the message-pack binary message to JSON and proceed with the jsonquery class

View File

@ -113,7 +113,7 @@ func (d *protobufDocument) Parse(buf []byte) (dataNode, error) {
return protobufquery.Parse(msg)
}
func (d *protobufDocument) QueryAll(node dataNode, expr string) ([]dataNode, error) {
func (*protobufDocument) QueryAll(node dataNode, expr string) ([]dataNode, error) {
// If this panics it's a programming error as we changed the document type while processing
native, err := protobufquery.QueryAll(node.(*protobufquery.Node), expr)
if err != nil {
@ -127,7 +127,7 @@ func (d *protobufDocument) QueryAll(node dataNode, expr string) ([]dataNode, err
return nodes, nil
}
func (d *protobufDocument) CreateXPathNavigator(node dataNode) path.NodeNavigator {
func (*protobufDocument) CreateXPathNavigator(node dataNode) path.NodeNavigator {
// If this panics it's a programming error as we changed the document type while processing
return protobufquery.CreateXPathNavigator(node.(*protobufquery.Node))
}
@ -194,12 +194,12 @@ func (d *protobufDocument) GetNodeName(node dataNode, sep string, withParent boo
return name
}
func (d *protobufDocument) OutputXML(node dataNode) string {
func (*protobufDocument) OutputXML(node dataNode) string {
native := node.(*protobufquery.Node)
return native.OutputXML()
}
func (d *protobufDocument) index(node *protobufquery.Node) string {
func (*protobufDocument) index(node *protobufquery.Node) string {
idx := 0
for n := node; n.PrevSibling != nil; n = n.PrevSibling {

View File

@ -9,11 +9,11 @@ import (
type xmlDocument struct{}
func (d *xmlDocument) Parse(buf []byte) (dataNode, error) {
func (*xmlDocument) Parse(buf []byte) (dataNode, error) {
return xmlquery.Parse(strings.NewReader(string(buf)))
}
func (d *xmlDocument) QueryAll(node dataNode, expr string) ([]dataNode, error) {
func (*xmlDocument) QueryAll(node dataNode, expr string) ([]dataNode, error) {
// If this panics it's a programming error as we changed the document type while processing
native, err := xmlquery.QueryAll(node.(*xmlquery.Node), expr)
if err != nil {
@ -27,7 +27,7 @@ func (d *xmlDocument) QueryAll(node dataNode, expr string) ([]dataNode, error) {
return nodes, nil
}
func (d *xmlDocument) CreateXPathNavigator(node dataNode) path.NodeNavigator {
func (*xmlDocument) CreateXPathNavigator(node dataNode) path.NodeNavigator {
// If this panics it's a programming error as we changed the document type while processing
return xmlquery.CreateXPathNavigator(node.(*xmlquery.Node))
}
@ -60,14 +60,14 @@ func (d *xmlDocument) GetNodePath(node, relativeTo dataNode, sep string) string
return nodepath[:len(nodepath)-1]
}
func (d *xmlDocument) GetNodeName(node dataNode, _ string, _ bool) string {
func (*xmlDocument) GetNodeName(node dataNode, _ string, _ bool) string {
// If this panics it's a programming error as we changed the document type while processing
nativeNode := node.(*xmlquery.Node)
return nativeNode.Data
}
func (d *xmlDocument) OutputXML(node dataNode) string {
func (*xmlDocument) OutputXML(node dataNode) string {
native := node.(*xmlquery.Node)
return native.OutputXML(false)
}

View File

@ -39,7 +39,7 @@ type RelOpts struct {
}
// applyFunc applies the specified function to the metric
func (o *Options) applyFunc(bo BaseOpts, fn ProcessorFunc, metric telegraf.Metric) {
func applyFunc(bo BaseOpts, fn ProcessorFunc, metric telegraf.Metric) {
if bo.Tag != "" {
if v, ok := metric.GetTag(bo.Tag); ok {
targetTag := bo.Tag
@ -75,15 +75,15 @@ func stemFilePath(path string) string {
func (o *Options) processMetric(metric telegraf.Metric) {
// Stem
for _, v := range o.Stem {
o.applyFunc(v, stemFilePath, metric)
applyFunc(v, stemFilePath, metric)
}
// Basename
for _, v := range o.BaseName {
o.applyFunc(v, filepath.Base, metric)
applyFunc(v, filepath.Base, metric)
}
// Rel
for _, v := range o.Rel {
o.applyFunc(v.BaseOpts, func(s string) string {
applyFunc(v.BaseOpts, func(s string) string {
relPath, err := filepath.Rel(v.BasePath, s)
if err != nil {
o.Log.Errorf("filepath processor failed to process relative filepath %s: %v", s, err)
@ -94,15 +94,15 @@ func (o *Options) processMetric(metric telegraf.Metric) {
}
// Dirname
for _, v := range o.DirName {
o.applyFunc(v, filepath.Dir, metric)
applyFunc(v, filepath.Dir, metric)
}
// Clean
for _, v := range o.Clean {
o.applyFunc(v, filepath.Clean, metric)
applyFunc(v, filepath.Clean, metric)
}
// ToSlash
for _, v := range o.ToSlash {
o.applyFunc(v, filepath.ToSlash, metric)
applyFunc(v, filepath.ToSlash, metric)
}
}

View File

@ -132,11 +132,11 @@ func (d *IfName) invalidate(agent string) {
func (d *IfName) Start(acc telegraf.Accumulator) error {
var err error
d.ifTable, err = d.makeTable("1.3.6.1.2.1.2.2.1.2")
d.ifTable, err = makeTable("1.3.6.1.2.1.2.2.1.2")
if err != nil {
return fmt.Errorf("preparing ifTable: %w", err)
}
d.ifXTable, err = d.makeTable("1.3.6.1.2.1.31.1.1.1.1")
d.ifXTable, err = makeTable("1.3.6.1.2.1.31.1.1.1.1")
if err != nil {
return fmt.Errorf("preparing ifXTable: %w", err)
}
@ -246,11 +246,11 @@ func (d *IfName) getMapRemoteNoMock(agent string) (nameMap, error) {
// try ifXtable and ifName first. if that fails, fall back to
// ifTable and ifDescr
var m nameMap
if m, err = d.buildMap(gs, d.ifXTable); err == nil {
if m, err = buildMap(gs, d.ifXTable); err == nil {
return m, nil
}
if m, err = d.buildMap(gs, d.ifTable); err == nil {
if m, err = buildMap(gs, d.ifTable); err == nil {
return m, nil
}
@ -271,7 +271,7 @@ func init() {
})
}
func (d *IfName) makeTable(oid string) (*snmp.Table, error) {
func makeTable(oid string) (*snmp.Table, error) {
var err error
tab := snmp.Table{
Name: "ifTable",
@ -290,7 +290,7 @@ func (d *IfName) makeTable(oid string) (*snmp.Table, error) {
return &tab, nil
}
func (d *IfName) buildMap(gs snmp.GosnmpWrapper, tab *snmp.Table) (nameMap, error) {
func buildMap(gs snmp.GosnmpWrapper, tab *snmp.Table) (nameMap, error) {
var err error
rtab, err := tab.Build(gs, true)

View File

@ -31,7 +31,7 @@ func TestTableIntegration(t *testing.T) {
d := IfName{}
err := d.Init()
require.NoError(t, err)
tab, err := d.makeTable("1.3.6.1.2.1.2.2.1.2")
tab, err := makeTable("1.3.6.1.2.1.2.2.1.2")
require.NoError(t, err)
gs, err := snmp.NewWrapper(*snmp.DefaultClientConfig())
@ -43,7 +43,7 @@ func TestTableIntegration(t *testing.T) {
require.NoError(t, err)
// Could use ifIndex but oid index is always the same
m, err := d.buildMap(gs, tab)
m, err := buildMap(gs, tab)
require.NoError(t, err)
require.NotEmpty(t, m)
}

View File

@ -69,7 +69,7 @@ func (p *Parser) Apply(metrics ...telegraf.Metric) []telegraf.Metric {
continue
}
value, err := p.toBytes(field.Value)
value, err := toBytes(field.Value)
if err != nil {
p.Log.Errorf("could not convert field %s: %v; skipping", field.Key, err)
continue
@ -181,7 +181,7 @@ func (p *Parser) parseValue(value string) ([]telegraf.Metric, error) {
return p.parser.Parse([]byte(value))
}
func (p *Parser) toBytes(value interface{}) ([]byte, error) {
func toBytes(value interface{}) ([]byte, error) {
if v, ok := value.(string); ok {
return []byte(v), nil
}

View File

@ -194,7 +194,7 @@ func (pn *PortName) Apply(metrics ...telegraf.Metric) []telegraf.Metric {
return metrics
}
func (pn *PortName) Init() error {
func (*PortName) Init() error {
services = make(sMap)
readServicesFile()
return nil

View File

@ -125,13 +125,13 @@ func TestLookupTimeout(t *testing.T) {
type timeoutResolver struct{}
func (r *timeoutResolver) LookupAddr(_ context.Context, _ string) (names []string, err error) {
func (*timeoutResolver) LookupAddr(context.Context, string) (names []string, err error) {
return nil, errors.New("timeout")
}
type localResolver struct{}
func (r *localResolver) LookupAddr(_ context.Context, _ string) (names []string, err error) {
func (*localResolver) LookupAddr(context.Context, string) (names []string, err error) {
return []string{"localhost"}, nil
}

View File

@ -22,11 +22,11 @@ type testSNMPConnection struct {
calls atomic.Uint64
}
func (tsc *testSNMPConnection) Host() string {
func (*testSNMPConnection) Host() string {
return "127.0.0.1"
}
func (tsc *testSNMPConnection) Get(_ []string) (*gosnmp.SnmpPacket, error) {
func (*testSNMPConnection) Get([]string) (*gosnmp.SnmpPacket, error) {
return &gosnmp.SnmpPacket{}, errors.New("not implemented")
}
@ -48,7 +48,7 @@ func (tsc *testSNMPConnection) Walk(oid string, wf gosnmp.WalkFunc) error {
return nil
}
func (tsc *testSNMPConnection) Reconnect() error {
func (*testSNMPConnection) Reconnect() error {
return errors.New("not implemented")
}

View File

@ -42,7 +42,7 @@ func (s *Starlark) Init() error {
return nil
}
func (s *Starlark) Start(_ telegraf.Accumulator) error {
func (*Starlark) Start(telegraf.Accumulator) error {
return nil
}
@ -120,7 +120,7 @@ func (s *Starlark) Add(origMetric telegraf.Metric, acc telegraf.Accumulator) err
return nil
}
func (s *Starlark) Stop() {}
func (*Starlark) Stop() {}
func containsMetric(metrics []telegraf.Metric, target telegraf.Metric) bool {
for _, m := range metrics {

View File

@ -36,7 +36,7 @@ func (sp *streamingProcessor) Add(m telegraf.Metric, acc telegraf.Accumulator) e
return nil
}
func (sp *streamingProcessor) Stop() {
func (*streamingProcessor) Stop() {
}
// Make the streamingProcessor of type Initializer to be able

View File

@ -71,7 +71,7 @@ func (d *Docker) List() ([]string, error) {
return secrets, nil
}
func (d *Docker) Set(_, _ string) error {
func (*Docker) Set(_, _ string) error {
return errors.New("secret-store does not support creating secrets")
}

View File

@ -44,7 +44,7 @@ type HTTP struct {
decrypter Decrypter
}
func (h *HTTP) SampleConfig() string {
func (*HTTP) SampleConfig() string {
return sampleConfig
}
@ -106,7 +106,7 @@ func (h *HTTP) Get(key string) ([]byte, error) {
}
// Set sets the given secret for the given key
func (h *HTTP) Set(_, _ string) error {
func (*HTTP) Set(_, _ string) error {
return errors.New("setting secrets not supported")
}

View File

@ -170,7 +170,7 @@ func (o *OAuth2) Get(key string) ([]byte, error) {
}
// Set sets the given secret for the given key
func (o *OAuth2) Set(_, _ string) error {
func (*OAuth2) Set(_, _ string) error {
return errors.New("not supported")
}

View File

@ -101,7 +101,7 @@ func (s *Systemd) List() ([]string, error) {
return secrets, nil
}
func (s *Systemd) Set(_, _ string) error {
func (*Systemd) Set(_, _ string) error {
return errors.New("secret-store does not support creating secrets")
}

View File

@ -59,25 +59,25 @@ func (e *Entry) fillDefaults() error {
case "":
return errors.New("missing data format")
case "float64":
e.converter = e.convertToFloat64
e.converter = convertToFloat64
case "float32":
e.converter = e.convertToFloat32
e.converter = convertToFloat32
case "uint64":
e.converter = e.convertToUint64
e.converter = convertToUint64
case "uint32":
e.converter = e.convertToUint32
e.converter = convertToUint32
case "uint16":
e.converter = e.convertToUint16
e.converter = convertToUint16
case "uint8":
e.converter = e.convertToUint8
e.converter = convertToUint8
case "int64":
e.converter = e.convertToInt64
e.converter = convertToInt64
case "int32":
e.converter = e.convertToInt32
e.converter = convertToInt32
case "int16":
e.converter = e.convertToInt16
e.converter = convertToInt16
case "int8":
e.converter = e.convertToInt8
e.converter = convertToInt8
case "string":
switch e.StringTerminator {
case "", "null":

View File

@ -27,7 +27,7 @@ func (e *Entry) convertToString(value interface{}, _ binary.ByteOrder) ([]byte,
return buf, nil
}
func (e *Entry) convertToUint64(value interface{}, order binary.ByteOrder) ([]byte, error) {
func convertToUint64(value interface{}, order binary.ByteOrder) ([]byte, error) {
buf := make([]byte, 8)
v, err := internal.ToUint64(value)
@ -35,7 +35,7 @@ func (e *Entry) convertToUint64(value interface{}, order binary.ByteOrder) ([]by
return buf, err
}
func (e *Entry) convertToUint32(value interface{}, order binary.ByteOrder) ([]byte, error) {
func convertToUint32(value interface{}, order binary.ByteOrder) ([]byte, error) {
buf := make([]byte, 4)
v, err := internal.ToUint32(value)
@ -43,7 +43,7 @@ func (e *Entry) convertToUint32(value interface{}, order binary.ByteOrder) ([]by
return buf, err
}
func (e *Entry) convertToUint16(value interface{}, order binary.ByteOrder) ([]byte, error) {
func convertToUint16(value interface{}, order binary.ByteOrder) ([]byte, error) {
buf := make([]byte, 2)
v, err := internal.ToUint16(value)
@ -51,12 +51,12 @@ func (e *Entry) convertToUint16(value interface{}, order binary.ByteOrder) ([]by
return buf, err
}
func (e *Entry) convertToUint8(value interface{}, _ binary.ByteOrder) ([]byte, error) {
func convertToUint8(value interface{}, _ binary.ByteOrder) ([]byte, error) {
v, err := internal.ToUint8(value)
return []byte{v}, err
}
func (e *Entry) convertToInt64(value interface{}, order binary.ByteOrder) ([]byte, error) {
func convertToInt64(value interface{}, order binary.ByteOrder) ([]byte, error) {
buf := make([]byte, 8)
v, err := internal.ToInt64(value)
@ -64,7 +64,7 @@ func (e *Entry) convertToInt64(value interface{}, order binary.ByteOrder) ([]byt
return buf, err
}
func (e *Entry) convertToInt32(value interface{}, order binary.ByteOrder) ([]byte, error) {
func convertToInt32(value interface{}, order binary.ByteOrder) ([]byte, error) {
buf := make([]byte, 4)
v, err := internal.ToInt32(value)
@ -72,7 +72,7 @@ func (e *Entry) convertToInt32(value interface{}, order binary.ByteOrder) ([]byt
return buf, err
}
func (e *Entry) convertToInt16(value interface{}, order binary.ByteOrder) ([]byte, error) {
func convertToInt16(value interface{}, order binary.ByteOrder) ([]byte, error) {
buf := make([]byte, 2)
v, err := internal.ToInt16(value)
@ -80,12 +80,12 @@ func (e *Entry) convertToInt16(value interface{}, order binary.ByteOrder) ([]byt
return buf, err
}
func (e *Entry) convertToInt8(value interface{}, _ binary.ByteOrder) ([]byte, error) {
func convertToInt8(value interface{}, _ binary.ByteOrder) ([]byte, error) {
v, err := internal.ToInt8(value)
return []byte{uint8(v)}, err
}
func (e *Entry) convertToFloat64(value interface{}, order binary.ByteOrder) ([]byte, error) {
func convertToFloat64(value interface{}, order binary.ByteOrder) ([]byte, error) {
v, err := internal.ToFloat64(value)
if err != nil {
return nil, err
@ -97,7 +97,7 @@ func (e *Entry) convertToFloat64(value interface{}, order binary.ByteOrder) ([]b
return buf, nil
}
func (e *Entry) convertToFloat32(value interface{}, order binary.ByteOrder) ([]byte, error) {
func convertToFloat32(value interface{}, order binary.ByteOrder) ([]byte, error) {
v, err := internal.ToFloat32(value)
if err != nil {
return nil, err

View File

@ -19,13 +19,13 @@ func marshalMetric(buf []byte, metric telegraf.Metric) ([]byte, error) {
// Serialize implements serializers.Serializer.Serialize
// github.com/influxdata/telegraf/plugins/serializers/Serializer
func (s *Serializer) Serialize(metric telegraf.Metric) ([]byte, error) {
func (*Serializer) Serialize(metric telegraf.Metric) ([]byte, error) {
return marshalMetric(nil, metric)
}
// SerializeBatch implements serializers.Serializer.SerializeBatch
// github.com/influxdata/telegraf/plugins/serializers/Serializer
func (s *Serializer) SerializeBatch(metrics []telegraf.Metric) ([]byte, error) {
func (*Serializer) SerializeBatch(metrics []telegraf.Metric) ([]byte, error) {
buf := make([]byte, 0)
for _, m := range metrics {
var err error

View File

@ -41,7 +41,7 @@ func (s *Serializer) Init() error {
}
func (s *Serializer) Serialize(metric telegraf.Metric) (out []byte, err error) {
m := s.createObject(metric)
m := createObject(metric)
if s.Format == "jsonv2" {
obj := OIMetricsObj{Records: m}
@ -53,7 +53,7 @@ func (s *Serializer) Serialize(metric telegraf.Metric) (out []byte, err error) {
func (s *Serializer) SerializeBatch(metrics []telegraf.Metric) (out []byte, err error) {
objects := make([]OIMetric, 0)
for _, metric := range metrics {
objects = append(objects, s.createObject(metric)...)
objects = append(objects, createObject(metric)...)
}
if s.Format == "jsonv2" {
@ -64,7 +64,7 @@ func (s *Serializer) SerializeBatch(metrics []telegraf.Metric) (out []byte, err
return json.Marshal(objects)
}
func (s *Serializer) createObject(metric telegraf.Metric) OIMetrics {
func createObject(metric telegraf.Metric) OIMetrics {
/* ServiceNow Operational Intelligence supports an array of JSON objects.
** Following elements accepted in the request body:
** metric_type: The name of the metric