chore(linters): Fix non-input/output plugin errcheck warnings (#15472)

This commit is contained in:
Dane Strandboge 2024-06-12 08:52:42 -05:00 committed by GitHub
parent d097a91298
commit 28543bae7c
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
46 changed files with 259 additions and 127 deletions

View File

@ -339,9 +339,13 @@ issues:
- path: cmd/telegraf/(main|printer|cmd_plugins).go
text: "Error return value of `outputBuffer.Write` is not checked" #errcheck
- path: plugins/*
# temporary disabling of errcheck as this linter is gradually being applied across the codebase
- path: plugins/inputs/*
linters:
- errcheck # temporary as this linter is gradually being applied across the codebase
- errcheck
- path: plugins/outputs/*
linters:
- errcheck
- path: _test\.go
text: "Potential hardcoded credentials" #gosec:G101

View File

@ -96,7 +96,8 @@ func TestTwoFullEventsWithoutParameter(t *testing.T) {
require.NoError(t, err)
startTime := time.Now()
duration, _ := time.ParseDuration("2s")
duration, err := time.ParseDuration("2s")
require.NoError(t, err)
endTime := startTime.Add(duration)
first := metric.New("One Field",
@ -273,7 +274,8 @@ func TestMergesDifferentMetricsWithSameHash(t *testing.T) {
require.NoError(t, err)
startTime := time.Now()
duration, _ := time.ParseDuration("2s")
duration, err := time.ParseDuration("2s")
require.NoError(t, err)
endTime := startTime.Add(duration)
part1 := metric.New("TestMetric",
map[string]string{"state": "full"},
@ -365,11 +367,11 @@ func TestAddMetricsResetsRollOver(t *testing.T) {
func TestCalculatesCorrectDerivativeOnTwoConsecutivePeriods(t *testing.T) {
acc := testutil.Accumulator{}
period, _ := time.ParseDuration("10s")
period, err := time.ParseDuration("10s")
require.NoError(t, err)
derivative := NewDerivative()
derivative.Log = testutil.Logger{}
err := derivative.Init()
require.NoError(t, err)
require.NoError(t, derivative.Init())
startTime := time.Now()
first := metric.New("One Field",

View File

@ -62,9 +62,11 @@ func (q *Quantile) Add(in telegraf.Metric) {
}
for k, field := range in.Fields() {
if v, isconvertible := convert(field); isconvertible {
// This should never error out as we tested it in Init()
algo, _ := q.newAlgorithm(q.Compression)
err := algo.Add(v)
algo, err := q.newAlgorithm(q.Compression)
if err != nil {
q.Log.Errorf("generating algorithm %s: %v", k, err)
}
err = algo.Add(v)
if err != nil {
q.Log.Errorf("adding field %s: %v", k, err)
}

View File

@ -49,7 +49,8 @@ var m2 = metric.New("m1",
)
func BenchmarkApply(b *testing.B) {
minmax, _ := newMinMax()
minmax, err := newMinMax()
require.NoError(b, err)
for n := 0; n < b.N; n++ {
minmax.Add(m1)

View File

@ -87,7 +87,8 @@ func newFakeServer(t *testing.T) fakeServer {
w.WriteHeader(http.StatusForbidden)
return
}
_, _ = w.Write([]byte("good test response"))
_, err := w.Write([]byte("good test response"))
require.NoError(t, err)
}
})),
int32: &c,

View File

@ -330,13 +330,14 @@ func TestValidateNodeToAdd(t *testing.T) {
name: "valid",
existing: map[metricParts]struct{}{},
nmm: func() *NodeMetricMapping {
nmm, _ := NewNodeMetricMapping("testmetric", NodeSettings{
nmm, err := NewNodeMetricMapping("testmetric", NodeSettings{
FieldName: "f",
Namespace: "2",
IdentifierType: "s",
Identifier: "hf",
TagsSlice: [][]string{},
}, map[string]string{})
require.NoError(t, err)
return nmm
}(),
err: nil,
@ -345,13 +346,14 @@ func TestValidateNodeToAdd(t *testing.T) {
name: "empty field name not allowed",
existing: map[metricParts]struct{}{},
nmm: func() *NodeMetricMapping {
nmm, _ := NewNodeMetricMapping("testmetric", NodeSettings{
nmm, err := NewNodeMetricMapping("testmetric", NodeSettings{
FieldName: "",
Namespace: "2",
IdentifierType: "s",
Identifier: "hf",
TagsSlice: [][]string{},
}, map[string]string{})
require.NoError(t, err)
return nmm
}(),
err: errors.New(`empty name in ""`),
@ -360,13 +362,14 @@ func TestValidateNodeToAdd(t *testing.T) {
name: "empty namespace not allowed",
existing: map[metricParts]struct{}{},
nmm: func() *NodeMetricMapping {
nmm, _ := NewNodeMetricMapping("testmetric", NodeSettings{
nmm, err := NewNodeMetricMapping("testmetric", NodeSettings{
FieldName: "f",
Namespace: "",
IdentifierType: "s",
Identifier: "hf",
TagsSlice: [][]string{},
}, map[string]string{})
require.NoError(t, err)
return nmm
}(),
err: errors.New("empty node namespace not allowed"),
@ -375,13 +378,14 @@ func TestValidateNodeToAdd(t *testing.T) {
name: "empty identifier type not allowed",
existing: map[metricParts]struct{}{},
nmm: func() *NodeMetricMapping {
nmm, _ := NewNodeMetricMapping("testmetric", NodeSettings{
nmm, err := NewNodeMetricMapping("testmetric", NodeSettings{
FieldName: "f",
Namespace: "2",
IdentifierType: "",
Identifier: "hf",
TagsSlice: [][]string{},
}, map[string]string{})
require.NoError(t, err)
return nmm
}(),
err: errors.New(`invalid identifier type "" in "f"`),
@ -390,13 +394,14 @@ func TestValidateNodeToAdd(t *testing.T) {
name: "invalid identifier type not allowed",
existing: map[metricParts]struct{}{},
nmm: func() *NodeMetricMapping {
nmm, _ := NewNodeMetricMapping("testmetric", NodeSettings{
nmm, err := NewNodeMetricMapping("testmetric", NodeSettings{
FieldName: "f",
Namespace: "2",
IdentifierType: "j",
Identifier: "hf",
TagsSlice: [][]string{},
}, map[string]string{})
require.NoError(t, err)
return nmm
}(),
err: errors.New(`invalid identifier type "j" in "f"`),
@ -407,13 +412,14 @@ func TestValidateNodeToAdd(t *testing.T) {
{metricName: "testmetric", fieldName: "f", tags: "t1=v1, t2=v2"}: {},
},
nmm: func() *NodeMetricMapping {
nmm, _ := NewNodeMetricMapping("testmetric", NodeSettings{
nmm, err := NewNodeMetricMapping("testmetric", NodeSettings{
FieldName: "f",
Namespace: "2",
IdentifierType: "s",
Identifier: "hf",
TagsSlice: [][]string{{"t1", "v1"}, {"t2", "v2"}},
}, map[string]string{})
require.NoError(t, err)
return nmm
}(),
err: errors.New(`name "f" is duplicated (metric name "testmetric", tags "t1=v1, t2=v2")`),
@ -422,13 +428,14 @@ func TestValidateNodeToAdd(t *testing.T) {
name: "identifier type mismatch",
existing: map[metricParts]struct{}{},
nmm: func() *NodeMetricMapping {
nmm, _ := NewNodeMetricMapping("testmetric", NodeSettings{
nmm, err := NewNodeMetricMapping("testmetric", NodeSettings{
FieldName: "f",
Namespace: "2",
IdentifierType: "i",
Identifier: "hf",
TagsSlice: [][]string{},
}, map[string]string{})
require.NoError(t, err)
return nmm
}(),
err: errors.New(`identifier type "i" does not match the type of identifier "hf"`),
@ -449,13 +456,14 @@ func TestValidateNodeToAdd(t *testing.T) {
name: "identifier type " + idT + " allowed",
existing: map[metricParts]struct{}{},
nmm: func() *NodeMetricMapping {
nmm, _ := NewNodeMetricMapping("testmetric", NodeSettings{
nmm, err := NewNodeMetricMapping("testmetric", NodeSettings{
FieldName: "f",
Namespace: "2",
IdentifierType: idT,
Identifier: idV,
TagsSlice: [][]string{},
}, map[string]string{})
require.NoError(t, err)
return nmm
}(),
err: nil,
@ -771,7 +779,8 @@ func TestUpdateNodeValue(t *testing.T) {
t.Run(tt.testname, func(t *testing.T) {
o.LastReceivedData = make([]NodeValue, 2)
for i, step := range tt.steps {
v, _ := ua.NewVariant(step.value)
v, err := ua.NewVariant(step.value)
require.NoError(t, err)
o.UpdateNodeValue(0, &ua.DataValue{
Value: v,
Status: step.status,

View File

@ -32,7 +32,10 @@ func newTempDir() (string, error) {
}
func generateCert(host string, rsaBits int, certFile, keyFile string, dur time.Duration) (cert string, key string, err error) {
dir, _ := newTempDir()
dir, err := newTempDir()
if err != nil {
return "", "", fmt.Errorf("failed to create certificate: %w", err)
}
if len(host) == 0 {
return "", "", errors.New("missing required host parameter")

View File

@ -47,7 +47,8 @@ func TestInputShimStdinSignalingWorks(t *testing.T) {
err = stdinWriter.Close()
require.NoError(t, err)
go func() {
_, _ = io.ReadAll(r)
_, err = io.ReadAll(r)
require.NoError(t, err)
}()
// check that it exits cleanly
<-exited

View File

@ -88,7 +88,8 @@ func testSendAndReceive(t *testing.T, fieldKey string, fieldValue string) {
require.True(t, ok)
require.Equal(t, fieldValue, val2)
go func() {
_, _ = io.ReadAll(r)
_, err = io.ReadAll(r)
require.NoError(t, err)
}()
wg.Wait()
}

View File

@ -488,7 +488,7 @@ func TestClosingConnections(t *testing.T) {
sock.Close()
// Verify that plugin.Stop() closed the client's connection
_ = client.SetReadDeadline(time.Now().Add(time.Second))
require.NoError(t, client.SetReadDeadline(time.Now().Add(time.Second)))
buf := []byte{1}
_, err = client.Read(buf)
require.Equal(t, err, io.EOF)

View File

@ -102,11 +102,17 @@ func (l *streamListener) setupVsock(u *url.URL) error {
}
// Parse CID and port number from address string both being 32-bit
// source: https://man7.org/linux/man-pages/man7/vsock.7.html
cid, _ := strconv.ParseUint(addrTuple[0], 10, 32)
cid, err := strconv.ParseUint(addrTuple[0], 10, 32)
if err != nil {
return fmt.Errorf("failed to parse CID %s: %w", addrTuple[0], err)
}
if (cid >= uint64(math.Pow(2, 32))-1) && (cid <= 0) {
return fmt.Errorf("CID %d is out of range", cid)
}
port, _ := strconv.ParseUint(addrTuple[1], 10, 32)
port, err := strconv.ParseUint(addrTuple[1], 10, 32)
if err != nil {
return fmt.Errorf("failed to parse port number %s: %w", addrTuple[1], err)
}
if (port >= uint64(math.Pow(2, 32))-1) && (port <= 0) {
return fmt.Errorf("port number %d is out of range", port)
}

View File

@ -248,8 +248,8 @@ func (d *Decoder) DecodePathElement(origin, path string, value interface{}) (int
}
for _, root := range rootNodes {
node, _ := yang.FindNode(root, path)
if node == nil {
node, err := yang.FindNode(root, path)
if node == nil || err != nil {
// The path does not exist in this root node
continue
}

View File

@ -115,7 +115,8 @@ func BenchmarkParsing(b *testing.B) {
b.ResetTimer()
for n := 0; n < b.N; n++ {
_, _ = plugin.Parse(benchmarkData)
//nolint:errcheck // Benchmarking so skip the error check to avoid the unnecessary operations
plugin.Parse(benchmarkData)
}
}
@ -176,6 +177,7 @@ func BenchmarkParsingBinary(b *testing.B) {
require.NoError(b, err)
for n := 0; n < b.N; n++ {
_, _ = plugin.Parse(benchmarkData)
//nolint:errcheck // Benchmarking so skip the error check to avoid the unnecessary operations
plugin.Parse(benchmarkData)
}
}

View File

@ -1608,6 +1608,7 @@ func BenchmarkParsing(b *testing.B) {
require.NoError(b, plugin.Init())
for n := 0; n < b.N; n++ {
_, _ = plugin.Parse(benchmarkData[n%2])
//nolint:errcheck // Benchmarking so skip the error check to avoid the unnecessary operations
plugin.Parse(benchmarkData[n%2])
}
}

View File

@ -405,6 +405,7 @@ func BenchmarkParsing(b *testing.B) {
b.ResetTimer()
for n := 0; n < b.N; n++ {
_, _ = parser.Parse(bytes)
//nolint:errcheck // Benchmarking so skip the error check to avoid the unnecessary operations
parser.Parse(bytes)
}
}

View File

@ -1574,6 +1574,7 @@ func BenchmarkParsing(b *testing.B) {
require.NoError(b, plugin.Init())
for n := 0; n < b.N; n++ {
_, _ = plugin.Parse([]byte(benchmarkData))
//nolint:errcheck // Benchmarking so skip the error check to avoid the unnecessary operations
plugin.Parse([]byte(benchmarkData))
}
}

View File

@ -47,11 +47,26 @@ func (p *Parser) Parse(buf []byte) ([]telegraf.Metric, error) {
return nil, err
}
metrics = p.readDWMetrics("counter", dwr["counters"], metrics, metricTime)
metrics = p.readDWMetrics("meter", dwr["meters"], metrics, metricTime)
metrics = p.readDWMetrics("gauge", dwr["gauges"], metrics, metricTime)
metrics = p.readDWMetrics("histogram", dwr["histograms"], metrics, metricTime)
metrics = p.readDWMetrics("timer", dwr["timers"], metrics, metricTime)
metrics, err = p.readDWMetrics("counter", dwr["counters"], metrics, metricTime)
if err != nil {
return nil, err
}
metrics, err = p.readDWMetrics("meter", dwr["meters"], metrics, metricTime)
if err != nil {
return nil, err
}
metrics, err = p.readDWMetrics("gauge", dwr["gauges"], metrics, metricTime)
if err != nil {
return nil, err
}
metrics, err = p.readDWMetrics("histogram", dwr["histograms"], metrics, metricTime)
if err != nil {
return nil, err
}
metrics, err = p.readDWMetrics("timer", dwr["timers"], metrics, metricTime)
if err != nil {
return nil, err
}
jsonTags := p.readTags(buf)
@ -160,14 +175,18 @@ func (p *Parser) unmarshalMetrics(buf []byte) (map[string]interface{}, error) {
return jsonOut, nil
}
func (p *Parser) readDWMetrics(metricType string, dwms interface{}, metrics []telegraf.Metric, tm time.Time) []telegraf.Metric {
func (p *Parser) readDWMetrics(metricType string, dwms interface{}, metrics []telegraf.Metric, tm time.Time) ([]telegraf.Metric, error) {
if dwmsTyped, ok := dwms.(map[string]interface{}); ok {
for dwmName, dwmFields := range dwmsTyped {
measurementName := dwmName
tags := make(map[string]string)
fieldPrefix := ""
if p.templateEngine != nil {
measurementName, tags, fieldPrefix, _ = p.templateEngine.Apply(dwmName)
var err error
measurementName, tags, fieldPrefix, err = p.templateEngine.Apply(dwmName)
if err != nil {
return nil, fmt.Errorf("failed to apply template for type %s: %w", metricType, err)
}
if len(fieldPrefix) > 0 {
fieldPrefix = fmt.Sprintf("%s%s", fieldPrefix, p.Separator)
}
@ -202,7 +221,7 @@ func (p *Parser) readDWMetrics(metricType string, dwms interface{}, metrics []te
}
}
return metrics
return metrics, nil
}
func (p *Parser) Init() error {

View File

@ -89,7 +89,8 @@ const validEmbeddedCounterJSON = `
func TestParseValidEmbeddedCounterJSON(t *testing.T) {
timeFormat := "2006-01-02T15:04:05Z07:00"
metricTime, _ := time.Parse(timeFormat, "2017-02-22T15:33:03.662+03:00")
metricTime, err := time.Parse(timeFormat, "2017-02-22T15:33:03.662+03:00")
require.NoError(t, err)
parser := &Parser{
MetricRegistryPath: "metrics",
TagsPath: "tags",
@ -646,6 +647,7 @@ func BenchmarkParsing(b *testing.B) {
require.NoError(b, plugin.Init())
for n := 0; n < b.N; n++ {
_, _ = plugin.Parse([]byte(benchmarkData))
//nolint:errcheck // Benchmarking so skip the error check to avoid the unnecessary operations
plugin.Parse([]byte(benchmarkData))
}
}

View File

@ -210,6 +210,7 @@ func BenchmarkParsing(b *testing.B) {
}
for n := 0; n < b.N; n++ {
_, _ = plugin.Parse([]byte(benchmarkData))
//nolint:errcheck // Benchmarking so skip the error check to avoid the unnecessary operations
plugin.Parse([]byte(benchmarkData))
}
}

View File

@ -128,7 +128,8 @@ func TestTemplateApply(t *testing.T) {
}
require.NoError(t, err)
measurement, tags, _, _ := tmpl.Apply(test.input, DefaultSeparator)
measurement, tags, _, err := tmpl.Apply(test.input, DefaultSeparator)
require.NoError(t, err)
require.Equal(t, test.measurement, measurement)
require.Len(t, tags, len(test.tags))
for k, v := range test.tags {
@ -759,7 +760,8 @@ func TestApplyTemplateSpecific(t *testing.T) {
}
require.NoError(t, p.Init())
measurement, tags, _, _ := p.ApplyTemplate("current.users.facebook")
measurement, tags, _, err := p.ApplyTemplate("current.users.facebook")
require.NoError(t, err)
require.Equal(t, "current_users", measurement)
service, ok := tags["service"]
@ -776,7 +778,8 @@ func TestApplyTemplateTags(t *testing.T) {
}
require.NoError(t, p.Init())
measurement, tags, _, _ := p.ApplyTemplate("current.users")
measurement, tags, _, err := p.ApplyTemplate("current.users")
require.NoError(t, err)
require.Equal(t, "current_users", measurement)
region, ok := tags["region"]

View File

@ -186,7 +186,7 @@ func (p *Parser) Compile() error {
p.loc, err = time.LoadLocation(p.Timezone)
if err != nil {
p.Log.Warnf("Improper timezone supplied (%s), setting loc to UTC", p.Timezone)
p.loc, _ = time.LoadLocation("UTC")
p.loc = time.UTC
}
if p.timeFunc == nil {

View File

@ -269,7 +269,8 @@ func TestCompileErrorsOnInvalidPattern(t *testing.T) {
}
require.Error(t, p.Compile())
metricA, _ := p.ParseLine(`1.25 200 192.168.1.1 5.432µs`)
metricA, err := p.ParseLine(`1.25 200 192.168.1.1 5.432µs`)
require.Error(t, err)
require.Nil(t, metricA)
}
@ -1236,6 +1237,7 @@ func BenchmarkParsing(b *testing.B) {
require.NoError(b, plugin.Init())
for n := 0; n < b.N; n++ {
_, _ = plugin.Parse([]byte(benchmarkData))
//nolint:errcheck // Benchmarking so skip the error check to avoid the unnecessary operations
plugin.Parse([]byte(benchmarkData))
}
}

View File

@ -1065,6 +1065,7 @@ func BenchmarkParsing(b *testing.B) {
require.NoError(b, plugin.Init())
for n := 0; n < b.N; n++ {
_, _ = plugin.Parse([]byte(benchmarkData))
//nolint:errcheck // Benchmarking so skip the error check to avoid the unnecessary operations
plugin.Parse([]byte(benchmarkData))
}
}

View File

@ -1032,6 +1032,7 @@ func BenchmarkParsing(b *testing.B) {
require.NoError(b, plugin.Init())
for n := 0; n < b.N; n++ {
_, _ = plugin.Parse([]byte(benchmarkData))
//nolint:errcheck // Benchmarking so skip the error check to avoid the unnecessary operations
plugin.Parse([]byte(benchmarkData))
}
}

View File

@ -1453,7 +1453,8 @@ func BenchmarkParsingSequential(b *testing.B) {
// Do the benchmarking
for n := 0; n < b.N; n++ {
_, _ = plugin.Parse([]byte(benchmarkData))
//nolint:errcheck // Benchmarking so skip the error check to avoid the unnecessary operations
plugin.Parse([]byte(benchmarkData))
}
}
@ -1468,7 +1469,8 @@ func BenchmarkParsingParallel(b *testing.B) {
// Do the benchmarking
b.RunParallel(func(p *testing.PB) {
for p.Next() {
_, _ = plugin.Parse([]byte(benchmarkData))
//nolint:errcheck // Benchmarking so skip the error check to avoid the unnecessary operations
plugin.Parse([]byte(benchmarkData))
}
})
}
@ -1489,6 +1491,7 @@ func FuzzParserJSON(f *testing.F) {
require.NoError(f, parser.Init())
f.Fuzz(func(_ *testing.T, input []byte) {
_, _ = parser.Parse(input)
//nolint:errcheck // fuzz testing can give lots of errors, but we just want to test for crashes
parser.Parse(input)
})
}

View File

@ -125,7 +125,8 @@ func BenchmarkParsingSequential(b *testing.B) {
// Do the benchmarking
for n := 0; n < b.N; n++ {
_, _ = plugin.Parse(input)
//nolint:errcheck // Benchmarking so skip the error check to avoid the unnecessary operations
plugin.Parse(input)
}
}
@ -155,7 +156,8 @@ func BenchmarkParsingParallel(b *testing.B) {
// Do the benchmarking
b.RunParallel(func(p *testing.PB) {
for p.Next() {
_, _ = plugin.Parse(input)
//nolint:errcheck // Benchmarking so skip the error check to avoid the unnecessary operations
plugin.Parse(input)
}
})
}

View File

@ -333,6 +333,7 @@ func BenchmarkParsing(b *testing.B) {
require.NoError(b, plugin.Init())
for n := 0; n < b.N; n++ {
_, _ = plugin.Parse([]byte(benchmarkData))
//nolint:errcheck // Benchmarking so skip the error check to avoid the unnecessary operations
plugin.Parse([]byte(benchmarkData))
}
}

View File

@ -571,6 +571,7 @@ func BenchmarkParsing(b *testing.B) {
plugin := &Parser{}
for n := 0; n < b.N; n++ {
_, _ = plugin.Parse([]byte(benchmarkData))
//nolint:errcheck // Benchmarking so skip the error check to avoid the unnecessary operations
plugin.Parse([]byte(benchmarkData))
}
}

View File

@ -159,7 +159,8 @@ func BenchmarkParsingMetricVersion1(b *testing.B) {
require.NotEmpty(b, benchmarkData)
for n := 0; n < b.N; n++ {
_, _ = plugin.Parse(benchmarkData)
//nolint:errcheck // Benchmarking so skip the error check to avoid the unnecessary operations
plugin.Parse(benchmarkData)
}
}
@ -171,6 +172,7 @@ func BenchmarkParsingMetricVersion2(b *testing.B) {
require.NotEmpty(b, benchmarkData)
for n := 0; n < b.N; n++ {
_, _ = plugin.Parse(benchmarkData)
//nolint:errcheck // Benchmarking so skip the error check to avoid the unnecessary operations
plugin.Parse(benchmarkData)
}
}

View File

@ -351,6 +351,7 @@ func BenchmarkParsing(b *testing.B) {
plugin := &Parser{}
for n := 0; n < b.N; n++ {
_, _ = plugin.Parse([]byte(benchmarkData))
//nolint:errcheck // Benchmarking so skip the error check to avoid the unnecessary operations
plugin.Parse([]byte(benchmarkData))
}
}

View File

@ -69,6 +69,7 @@ func BenchmarkParsing(b *testing.B) {
b.ResetTimer()
for n := 0; n < b.N; n++ {
_, _ = plugin.Parse(benchmarkData)
//nolint:errcheck // Benchmarking so skip the error check to avoid the unnecessary operations
plugin.Parse(benchmarkData)
}
}

View File

@ -159,7 +159,8 @@ func BenchmarkParsingMetricVersion1(b *testing.B) {
require.NotEmpty(b, benchmarkData)
for n := 0; n < b.N; n++ {
_, _ = plugin.Parse(benchmarkData)
//nolint:errcheck // Benchmarking so skip the error check to avoid the unnecessary operations
plugin.Parse(benchmarkData)
}
}
@ -171,6 +172,7 @@ func BenchmarkParsingMetricVersion2(b *testing.B) {
require.NotEmpty(b, benchmarkData)
for n := 0; n < b.N; n++ {
_, _ = plugin.Parse(benchmarkData)
//nolint:errcheck // Benchmarking so skip the error check to avoid the unnecessary operations
plugin.Parse(benchmarkData)
}
}

View File

@ -341,6 +341,7 @@ func BenchmarkParsing(b *testing.B) {
b.ResetTimer()
for n := 0; n < b.N; n++ {
_, _ = plugin.Parse(benchmarkData)
//nolint:errcheck // Benchmarking so skip the error check to avoid the unnecessary operations
plugin.Parse(benchmarkData)
}
}

View File

@ -327,6 +327,7 @@ func BenchmarkParsing(b *testing.B) {
require.NoError(b, plugin.Init())
for n := 0; n < b.N; n++ {
_, _ = plugin.Parse([]byte(benchmarkData))
//nolint:errcheck // Benchmarking so skip the error check to avoid the unnecessary operations
plugin.Parse([]byte(benchmarkData))
}
}

View File

@ -336,6 +336,7 @@ func BenchmarkParsing(b *testing.B) {
require.NoError(b, plugin.Init())
for n := 0; n < b.N; n++ {
_, _ = plugin.Parse([]byte(benchmarkData))
//nolint:errcheck // Benchmarking so skip the error check to avoid the unnecessary operations
plugin.Parse([]byte(benchmarkData))
}
}

View File

@ -1345,9 +1345,10 @@ func TestTestCases(t *testing.T) {
require.NoError(t, err)
// Get the expectations
expectedOutputs, err := testutil.ParseMetricsFrom(header, "Expected Output:", parser)
require.NoError(t, err)
//nolint:errcheck // these may not be set by the testcase, in which case it would error correctly
expectedOutputs, _ := testutil.ParseMetricsFrom(header, "Expected Output:", parser)
//nolint:errcheck // these may not be set by the testcase, in which case it would error correctly
expectedErrors, _ := testutil.ParseRawLinesFrom(header, "Expected Error:")
// Setup the parser and run it.
@ -1578,7 +1579,8 @@ func BenchmarkParsingXML(b *testing.B) {
require.NoError(b, plugin.Init())
for n := 0; n < b.N; n++ {
_, _ = plugin.Parse([]byte(benchmarkDataXML))
//nolint:errcheck // Benchmarking so skip the error check to avoid the unnecessary operations
plugin.Parse([]byte(benchmarkDataXML))
}
}
@ -1640,7 +1642,8 @@ func BenchmarkParsingJSON(b *testing.B) {
require.NoError(b, plugin.Init())
for n := 0; n < b.N; n++ {
_, _ = plugin.Parse([]byte(benchmarkDataJSON))
//nolint:errcheck // Benchmarking so skip the error check to avoid the unnecessary operations
plugin.Parse([]byte(benchmarkDataJSON))
}
}
@ -1675,7 +1678,8 @@ func BenchmarkParsingProtobuf(b *testing.B) {
require.NoError(b, err)
for n := 0; n < b.N; n++ {
_, _ = plugin.Parse(benchmarkData)
//nolint:errcheck // Benchmarking so skip the error check to avoid the unnecessary operations
plugin.Parse(benchmarkData)
}
}
@ -1778,7 +1782,8 @@ func BenchmarkParsingMsgPack(b *testing.B) {
require.NoError(b, plugin.Init())
for n := 0; n < b.N; n++ {
_, _ = plugin.Parse(benchmarkDataMsgPack[n%2])
//nolint:errcheck // Benchmarking so skip the error check to avoid the unnecessary operations
plugin.Parse(benchmarkDataMsgPack[n%2])
}
}
@ -1810,6 +1815,7 @@ func BenchmarkParsingCBOR(b *testing.B) {
require.NoError(b, err)
for n := 0; n < b.N; n++ {
_, _ = plugin.Parse(benchmarkData)
//nolint:errcheck // Benchmarking so skip the error check to avoid the unnecessary operations
plugin.Parse(benchmarkData)
}
}

View File

@ -20,6 +20,7 @@ type Dedup struct {
DedupInterval config.Duration `toml:"dedup_interval"`
FlushTime time.Time
Cache map[uint64]telegraf.Metric
Log telegraf.Logger `toml:"-"`
}
// Remove expired items from cache
@ -126,7 +127,10 @@ func (d *Dedup) GetState() interface{} {
for _, value := range d.Cache {
v = append(v, value)
}
state, _ := s.SerializeBatch(v)
state, err := s.SerializeBatch(v)
if err != nil {
d.Log.Errorf("dedup processor failed to serialize metric batch: %v", err)
}
return state
}

View File

@ -167,7 +167,8 @@ func runCountMultiplierProgram() {
fieldName := os.Getenv("FIELD_NAME")
parser := influx.NewStreamParser(os.Stdin)
serializer := &influxSerializer.Serializer{}
_ = serializer.Init() // this should always succeed
//nolint:errcheck // this should always succeed
serializer.Init()
for {
m, err := parser.Next()

View File

@ -20,6 +20,8 @@ type Options struct {
Clean []BaseOpts
Rel []RelOpts
ToSlash []BaseOpts `toml:"toslash"`
Log telegraf.Logger `toml:"-"`
}
type ProcessorFunc func(s string) string
@ -82,7 +84,11 @@ func (o *Options) processMetric(metric telegraf.Metric) {
// Rel
for _, v := range o.Rel {
o.applyFunc(v.BaseOpts, func(s string) string {
relPath, _ := filepath.Rel(v.BasePath, s)
relPath, err := filepath.Rel(v.BasePath, s)
if err != nil {
o.Log.Errorf("filepath processor failed to process relative filepath %s: %v", s, err)
return v.BasePath
}
return relPath
}, metric)
}

View File

@ -62,7 +62,8 @@ func TestCases(t *testing.T) {
server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
if r.URL.Path == "/secrets" {
_, _ = w.Write(input)
_, err = w.Write(input)
require.NoError(t, err)
} else {
w.WriteHeader(http.StatusNotFound)
}
@ -155,7 +156,8 @@ func TestGetErrors(t *testing.T) {
func TestResolver(t *testing.T) {
server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, _ *http.Request) {
_, _ = w.Write([]byte(`{"test": "aedMZXaLR246OHHjVtJKXQ=="}`))
_, err := w.Write([]byte(`{"test": "aedMZXaLR246OHHjVtJKXQ=="}`))
require.NoError(t, err)
}))
defer server.Close()
@ -196,7 +198,8 @@ func TestGetResolverErrors(t *testing.T) {
dummy.Close()
server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, _ *http.Request) {
_, _ = w.Write([]byte(`[{"test": "aedMZXaLR246OHHjVtJKXQ=="}]`))
_, err = w.Write([]byte(`[{"test": "aedMZXaLR246OHHjVtJKXQ=="}]`))
require.NoError(t, err)
}))
defer server.Close()
@ -229,7 +232,8 @@ func TestInvalidServerResponse(t *testing.T) {
defer dummy.Close()
server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, _ *http.Request) {
_, _ = w.Write([]byte(`[somerandomebytes`))
_, err = w.Write([]byte(`[somerandomebytes`))
require.NoError(t, err)
}))
defer server.Close()
@ -263,7 +267,8 @@ func TestAdditionalHeaders(t *testing.T) {
if r.Host != "" {
actual.Add("host", r.Host)
}
_, _ = w.Write([]byte(`{"test": "aedMZXaLR246OHHjVtJKXQ=="}`))
_, err = w.Write([]byte(`{"test": "aedMZXaLR246OHHjVtJKXQ=="}`))
require.NoError(t, err)
}))
defer server.Close()
@ -305,12 +310,14 @@ func TestServerReturnCodes(t *testing.T) {
server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
switch r.URL.Path {
case "/", "/200":
_, _ = w.Write([]byte(`{}`))
_, err = w.Write([]byte(`{}`))
require.NoError(t, err)
case "/201":
w.WriteHeader(201)
case "/300":
w.WriteHeader(300)
_, _ = w.Write([]byte(`{}`))
_, err = w.Write([]byte(`{}`))
require.NoError(t, err)
case "/401":
w.WriteHeader(401)
default:
@ -350,7 +357,8 @@ func TestAuthenticationBasic(t *testing.T) {
var header http.Header
server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
header = r.Header
_, _ = w.Write([]byte(`{}`))
_, err = w.Write([]byte(`{}`))
require.NoError(t, err)
}))
defer server.Close()
@ -377,7 +385,8 @@ func TestAuthenticationToken(t *testing.T) {
var header http.Header
server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
header = r.Header
_, _ = w.Write([]byte(`{}`))
_, err = w.Write([]byte(`{}`))
require.NoError(t, err)
}))
defer server.Close()

View File

@ -158,7 +158,8 @@ func TestGet(t *testing.T) {
func(w http.ResponseWriter, r *http.Request) {
body, err := io.ReadAll(r.Body)
if err != nil {
_, _ = w.Write([]byte(err.Error()))
_, err := w.Write([]byte(err.Error()))
require.NoError(t, err)
w.WriteHeader(http.StatusInternalServerError)
return
}
@ -198,7 +199,8 @@ func TestGetMultipleTimes(t *testing.T) {
func(w http.ResponseWriter, r *http.Request) {
body, err := io.ReadAll(r.Body)
if err != nil {
_, _ = w.Write([]byte(err.Error()))
_, err := w.Write([]byte(err.Error()))
require.NoError(t, err)
w.WriteHeader(http.StatusInternalServerError)
return
}
@ -244,7 +246,8 @@ func TestGetExpired(t *testing.T) {
func(w http.ResponseWriter, r *http.Request) {
body, err := io.ReadAll(r.Body)
if err != nil {
_, _ = w.Write([]byte(err.Error()))
_, err := w.Write([]byte(err.Error()))
require.NoError(t, err)
w.WriteHeader(http.StatusInternalServerError)
return
}
@ -285,7 +288,8 @@ func TestGetRefresh(t *testing.T) {
func(w http.ResponseWriter, r *http.Request) {
body, err := io.ReadAll(r.Body)
if err != nil {
_, _ = w.Write([]byte(err.Error()))
_, err := w.Write([]byte(err.Error()))
require.NoError(t, err)
w.WriteHeader(http.StatusInternalServerError)
return
}

View File

@ -1052,7 +1052,8 @@ func TestCleanWithTagsSupport(t *testing.T) {
require.NoError(t, s.Init())
m := metric.New(tt.metricName, tt.tags, tt.fields, now)
actual, _ := s.Serialize(m)
actual, err := s.Serialize(m)
require.NoError(t, err)
require.Equal(t, tt.expected, string(actual))
})
}
@ -1149,7 +1150,8 @@ func TestCleanWithTagsSupportCompatibleSanitize(t *testing.T) {
require.NoError(t, s.Init())
m := metric.New(tt.metricName, tt.tags, tt.fields, now)
actual, _ := s.Serialize(m)
actual, err := s.Serialize(m)
require.NoError(t, err)
require.Equal(t, tt.expected, string(actual))
})
}
@ -1179,7 +1181,8 @@ func TestSerializeBatch(t *testing.T) {
require.NoError(t, s.Init())
m := metric.New(tt.metricName, tt.tags, tt.fields, now)
actual, _ := s.SerializeBatch([]telegraf.Metric{m, m})
actual, err := s.SerializeBatch([]telegraf.Metric{m, m})
require.NoError(t, err)
require.Equal(t, tt.expected, string(actual))
})
}
@ -1212,7 +1215,8 @@ func TestSerializeBatchWithTagsSupport(t *testing.T) {
require.NoError(t, s.Init())
m := metric.New(tt.metricName, tt.tags, tt.fields, now)
actual, _ := s.SerializeBatch([]telegraf.Metric{m, m})
actual, err := s.SerializeBatch([]telegraf.Metric{m, m})
require.NoError(t, err)
require.Equal(t, tt.expected, string(actual))
})
}

View File

@ -81,57 +81,71 @@ func TestMsgPackTimeEdgeCases(t *testing.T) {
// Unix epoch. Begin of 4bytes dates
// Nanoseconds: 0x00000000, Seconds: 0x0000000000000000
ts, _ := time.Parse(time.RFC3339, "1970-01-01T00:00:00Z")
bs, _ := hex.DecodeString("d6ff00000000")
ts, err := time.Parse(time.RFC3339, "1970-01-01T00:00:00Z")
require.NoError(t, err)
bs, err := hex.DecodeString("d6ff00000000")
require.NoError(t, err)
times = append(times, ts)
expected = append(expected, bs)
// End of 4bytes dates
// Nanoseconds: 0x00000000, Seconds: 0x00000000ffffffff
ts, _ = time.Parse(time.RFC3339, "2106-02-07T06:28:15Z")
bs, _ = hex.DecodeString("d6ffffffffff")
ts, err = time.Parse(time.RFC3339, "2106-02-07T06:28:15Z")
require.NoError(t, err)
bs, err = hex.DecodeString("d6ffffffffff")
require.NoError(t, err)
times = append(times, ts)
expected = append(expected, bs)
// Begin of 8bytes dates
// Nanoseconds: 0x00000000, Seconds: 0x0000000100000000
ts, _ = time.Parse(time.RFC3339, "2106-02-07T06:28:16Z")
bs, _ = hex.DecodeString("d7ff0000000100000000")
ts, err = time.Parse(time.RFC3339, "2106-02-07T06:28:16Z")
require.NoError(t, err)
bs, err = hex.DecodeString("d7ff0000000100000000")
require.NoError(t, err)
times = append(times, ts)
expected = append(expected, bs)
// Just after Unix epoch. Non zero nanoseconds
// Nanoseconds: 0x00000001, Seconds: 0x0000000000000000
ts, _ = time.Parse(time.RFC3339Nano, "1970-01-01T00:00:00.000000001Z")
bs, _ = hex.DecodeString("d7ff0000000400000000")
ts, err = time.Parse(time.RFC3339Nano, "1970-01-01T00:00:00.000000001Z")
require.NoError(t, err)
bs, err = hex.DecodeString("d7ff0000000400000000")
require.NoError(t, err)
times = append(times, ts)
expected = append(expected, bs)
// End of 8bytes dates
// Nanoseconds: 0x00000000, Seconds: 0x00000003ffffffff
ts, _ = time.Parse(time.RFC3339Nano, "2514-05-30T01:53:03.000000000Z")
bs, _ = hex.DecodeString("d7ff00000003ffffffff")
ts, err = time.Parse(time.RFC3339Nano, "2514-05-30T01:53:03.000000000Z")
require.NoError(t, err)
bs, err = hex.DecodeString("d7ff00000003ffffffff")
require.NoError(t, err)
times = append(times, ts)
expected = append(expected, bs)
// Begin of 12bytes date
// Nanoseconds: 0x00000000, Seconds: 0x0000000400000000
ts, _ = time.Parse(time.RFC3339Nano, "2514-05-30T01:53:04.000000000Z")
bs, _ = hex.DecodeString("c70cff000000000000000400000000")
ts, err = time.Parse(time.RFC3339Nano, "2514-05-30T01:53:04.000000000Z")
require.NoError(t, err)
bs, err = hex.DecodeString("c70cff000000000000000400000000")
require.NoError(t, err)
times = append(times, ts)
expected = append(expected, bs)
// Zero value, 0001-01-01T00:00:00Z
// Nanoseconds: 0x00000000, Seconds: 0xfffffff1886e0900
ts = time.Time{}
bs, _ = hex.DecodeString("c70cff00000000fffffff1886e0900")
bs, err = hex.DecodeString("c70cff00000000fffffff1886e0900")
require.NoError(t, err)
times = append(times, ts)
expected = append(expected, bs)
// Max value
// Nanoseconds: 0x3b9ac9ff, Seconds: 0x7fffffffffffffff
ts = time.Unix(math.MaxInt64, 999_999_999).UTC()
bs, _ = hex.DecodeString("c70cff3b9ac9ff7fffffffffffffff")
bs, err = hex.DecodeString("c70cff3b9ac9ff7fffffffffffffff")
require.NoError(t, err)
times = append(times, ts)
expected = append(expected, bs)
@ -141,7 +155,8 @@ func TestMsgPackTimeEdgeCases(t *testing.T) {
m := Metric{Time: t1}
buf = buf[:0]
buf, _ = m.MarshalMsg(buf)
buf, err = m.MarshalMsg(buf)
require.NoError(t, err)
require.Equal(t, expected[i], buf[12:len(buf)-14])
}
}

View File

@ -35,7 +35,8 @@ func BenchmarkRemoteWrite(b *testing.B) {
}
s := &Serializer{}
for n := 0; n < b.N; n++ {
_, _ = s.SerializeBatch(batch)
//nolint:errcheck // Benchmarking so skip the error check to avoid the unnecessary operations
s.SerializeBatch(batch)
}
}

View File

@ -32,14 +32,17 @@ type HECTimeSeries struct {
}
func (s *Serializer) Serialize(metric telegraf.Metric) ([]byte, error) {
return s.createObject(metric), nil
return s.createObject(metric)
}
func (s *Serializer) SerializeBatch(metrics []telegraf.Metric) ([]byte, error) {
var serialized []byte
for _, metric := range metrics {
m := s.createObject(metric)
m, err := s.createObject(metric)
if err != nil {
return nil, err
}
if m != nil {
serialized = append(serialized, m...)
}
@ -148,7 +151,7 @@ func (s *Serializer) createSingle(metric telegraf.Metric, dataGroup HECTimeSerie
return metricGroup, nil
}
func (s *Serializer) createObject(metric telegraf.Metric) (metricGroup []byte) {
func (s *Serializer) createObject(metric telegraf.Metric) ([]byte, error) {
/* Splunk supports one metric json object, and does _not_ support an array of JSON objects.
** Splunk has the following required names for the metric store:
** metric_name: The name of the metric
@ -177,15 +180,10 @@ func (s *Serializer) createObject(metric telegraf.Metric) (metricGroup []byte) {
}
}
commonTags.Time = float64(metric.Time().UnixNano()) / float64(1000000000)
switch s.MultiMetric {
case true:
metricGroup, _ = s.createMulti(metric, dataGroup, commonTags)
default:
metricGroup, _ = s.createSingle(metric, dataGroup, commonTags)
if s.MultiMetric {
return s.createMulti(metric, dataGroup, commonTags)
}
// Return the metric group regardless of if it's multimetric or single metric.
return metricGroup
return s.createSingle(metric, dataGroup, commonTags)
}
func verifyValue(v interface{}) (value interface{}, valid bool) {

View File

@ -181,7 +181,8 @@ func TestSerializeMetricFloat(t *testing.T) {
m := metric.New("cpu", tags, fields, now)
s := &Serializer{}
buf, _ := s.Serialize(m)
buf, err := s.Serialize(m)
require.NoError(t, err)
mS := strings.Split(strings.TrimSpace(string(buf)), "\n")
expS := []string{fmt.Sprintf("\"cpu.usage.idle\" 91.500000 %d source=\"realHost\" \"cpu\"=\"cpu0\"", now.UnixNano()/1000000000)}
@ -200,7 +201,8 @@ func TestSerializeMetricInt(t *testing.T) {
m := metric.New("cpu", tags, fields, now)
s := &Serializer{}
buf, _ := s.Serialize(m)
buf, err := s.Serialize(m)
require.NoError(t, err)
mS := strings.Split(strings.TrimSpace(string(buf)), "\n")
expS := []string{fmt.Sprintf("\"cpu.usage.idle\" 91.000000 %d source=\"realHost\" \"cpu\"=\"cpu0\"", now.UnixNano()/1000000000)}
@ -219,7 +221,8 @@ func TestSerializeMetricBoolTrue(t *testing.T) {
m := metric.New("cpu", tags, fields, now)
s := &Serializer{}
buf, _ := s.Serialize(m)
buf, err := s.Serialize(m)
require.NoError(t, err)
mS := strings.Split(strings.TrimSpace(string(buf)), "\n")
expS := []string{fmt.Sprintf("\"cpu.usage.idle\" 1.000000 %d source=\"realHost\" \"cpu\"=\"cpu0\"", now.UnixNano()/1000000000)}
@ -238,7 +241,8 @@ func TestSerializeMetricBoolFalse(t *testing.T) {
m := metric.New("cpu", tags, fields, now)
s := &Serializer{}
buf, _ := s.Serialize(m)
buf, err := s.Serialize(m)
require.NoError(t, err)
mS := strings.Split(strings.TrimSpace(string(buf)), "\n")
expS := []string{fmt.Sprintf("\"cpu.usage.idle\" 0.000000 %d source=\"realHost\" \"cpu\"=\"cpu0\"", now.UnixNano()/1000000000)}