chore: Resolve linter issues for ineffassign, nilerr, gosimple... (#11966)

This commit is contained in:
Paweł Żak 2022-10-12 21:08:03 +02:00 committed by GitHub
parent beba64e006
commit 08c1ce9cb6
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
12 changed files with 95 additions and 120 deletions

View File

@ -41,10 +41,9 @@ func TestAlignedTicker(t *testing.T) {
clk.Add(10 * time.Second)
for !clk.Now().After(until) {
select {
case tm := <-ticker.Elapsed():
actual = append(actual, tm.UTC())
}
tm := <-ticker.Elapsed()
actual = append(actual, tm.UTC())
clk.Add(10 * time.Second)
}

View File

@ -88,7 +88,7 @@ type AutoDecoder struct {
identity *IdentityDecoder
}
func (a *AutoDecoder) SetEnconding(encoding string) {
func (a *AutoDecoder) SetEncoding(encoding string) {
a.encoding = encoding
}
@ -199,7 +199,7 @@ func (*IdentityEncoder) Encode(data []byte) ([]byte, error) {
// ContentDecoder removes a wrapper encoding from byte buffers.
type ContentDecoder interface {
SetEnconding(string)
SetEncoding(string)
Decode([]byte) ([]byte, error)
}
@ -216,13 +216,16 @@ func NewGzipDecoder() (*GzipDecoder, error) {
}, nil
}
func (*GzipDecoder) SetEnconding(string) {}
func (*GzipDecoder) SetEncoding(string) {}
func (d *GzipDecoder) Decode(data []byte) ([]byte, error) {
d.reader.Reset(bytes.NewBuffer(data))
err := d.reader.Reset(bytes.NewBuffer(data))
if err != nil {
return nil, err
}
d.buf.Reset()
_, err := d.buf.ReadFrom(d.reader)
_, err = d.buf.ReadFrom(d.reader)
if err != nil && err != io.EOF {
return nil, err
}
@ -243,7 +246,7 @@ func NewZlibDecoder() (*ZlibDecoder, error) {
}, nil
}
func (*ZlibDecoder) SetEnconding(string) {}
func (*ZlibDecoder) SetEncoding(string) {}
func (d *ZlibDecoder) Decode(data []byte) ([]byte, error) {
d.buf.Reset()
@ -271,7 +274,7 @@ func NewIdentityDecoder() *IdentityDecoder {
return &IdentityDecoder{}
}
func (*IdentityDecoder) SetEnconding(string) {}
func (*IdentityDecoder) SetEncoding(string) {}
func (*IdentityDecoder) Decode(data []byte) ([]byte, error) {
return data, nil

View File

@ -70,18 +70,7 @@ func ProductToken() string {
}
// ReadLines reads contents from a file and splits them by new lines.
// A convenience wrapper to ReadLinesOffsetN(filename, 0, -1).
func ReadLines(filename string) ([]string, error) {
return ReadLinesOffsetN(filename, 0, -1)
}
// ReadLines reads contents from file and splits them by new line.
// The offset tells at which line number to start.
// The count determines the number of lines to read (starting from offset):
//
// n >= 0: at most n lines
// n < 0: whole file
func ReadLinesOffsetN(filename string, offset uint, n int) ([]string, error) {
f, err := os.Open(filename)
if err != nil {
return []string{""}, err
@ -89,26 +78,18 @@ func ReadLinesOffsetN(filename string, offset uint, n int) ([]string, error) {
defer f.Close()
var ret []string
r := bufio.NewReader(f)
for i := 0; i < n+int(offset) || n < 0; i++ {
line, err := r.ReadString('\n')
if err != nil {
break
}
if i < int(offset) {
continue
}
ret = append(ret, strings.Trim(line, "\n"))
scanner := bufio.NewScanner(f)
for scanner.Scan() {
ret = append(ret, scanner.Text())
}
return ret, nil
}
// RandomString returns a random string of alpha-numeric characters
// RandomString returns a random string of alphanumeric characters
func RandomString(n int) string {
var bytes = make([]byte, n)
rand.Read(bytes)
rand.Read(bytes) //nolint:revive // from math/rand/rand.go: "It always returns len(p) and a nil error."
for i, b := range bytes {
bytes[i] = alphanum[b%byte(len(alphanum))]
}
@ -249,7 +230,7 @@ func CompressWithGzip(data io.Reader) (io.ReadCloser, error) {
// The format can be one of "unix", "unix_ms", "unix_us", "unix_ns", or a Go
// time layout suitable for time.Parse.
//
// When using the "unix" format, a optional fractional component is allowed.
// When using the "unix" format, an optional fractional component is allowed.
// Specific unix time precisions cannot have a fractional component.
//
// Unix times may be an int64, float64, or string. When using a Go format
@ -344,17 +325,17 @@ func timeFromFraction(f *big.Rat, factor int64) time.Time {
// sanitizeTimestamp removes thousand separators and uses dot as
// decimal separator. Returns also a boolean indicating success.
func sanitizeTimestamp(timestamp string, decimalSeparartor []string) string {
func sanitizeTimestamp(timestamp string, decimalSeparator []string) string {
// Remove thousand-separators that are not used for decimal separation
sanitized := timestamp
for _, s := range []string{" ", ",", "."} {
if !choice.Contains(s, decimalSeparartor) {
if !choice.Contains(s, decimalSeparator) {
sanitized = strings.ReplaceAll(sanitized, s, "")
}
}
// Replace decimal separators by dot to have a standard, parsable format
for _, s := range decimalSeparartor {
for _, s := range decimalSeparator {
// Make sure we replace only the first occurrence of any separator.
if strings.Contains(sanitized, s) {
return strings.Replace(sanitized, s, ".", 1)

View File

@ -11,7 +11,6 @@ import (
"testing"
"time"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)
@ -61,9 +60,9 @@ func TestRunTimeout(t *testing.T) {
err := RunTimeout(cmd, time.Millisecond*20)
elapsed := time.Since(start)
assert.Equal(t, ErrTimeout, err)
require.Equal(t, ErrTimeout, err)
// Verify that command gets killed in 20ms, with some breathing room
assert.True(t, elapsed < time.Millisecond*75)
require.True(t, elapsed < time.Millisecond*75)
}
// Verifies behavior of a command that doesn't get killed.
@ -83,7 +82,7 @@ func TestRunTimeoutFastExit(t *testing.T) {
require.NoError(t, err)
// Verify that command gets killed in 20ms, with some breathing room
assert.True(t, elapsed < time.Millisecond*75)
require.True(t, elapsed < time.Millisecond*75)
// Verify "process already finished" log doesn't occur.
time.Sleep(time.Millisecond * 75)
@ -101,9 +100,9 @@ func TestCombinedOutputTimeout(t *testing.T) {
_, err := CombinedOutputTimeout(cmd, time.Millisecond*20)
elapsed := time.Since(start)
assert.Equal(t, ErrTimeout, err)
require.Equal(t, ErrTimeout, err)
// Verify that command gets killed in 20ms, with some breathing room
assert.True(t, elapsed < time.Millisecond*75)
require.True(t, elapsed < time.Millisecond*75)
}
func TestCombinedOutput(t *testing.T) {
@ -113,8 +112,8 @@ func TestCombinedOutput(t *testing.T) {
cmd := exec.Command(echobin, "foo")
out, err := CombinedOutputTimeout(cmd, time.Second)
assert.NoError(t, err)
assert.Equal(t, "foo\n", string(out))
require.NoError(t, err)
require.Equal(t, "foo\n", string(out))
}
// test that CombinedOutputTimeout and exec.Cmd.CombinedOutput return
@ -125,12 +124,13 @@ func TestCombinedOutputError(t *testing.T) {
}
cmd := exec.Command(shell, "-c", "false")
expected, err := cmd.CombinedOutput()
require.Error(t, err)
cmd2 := exec.Command(shell, "-c", "false")
actual, err := CombinedOutputTimeout(cmd2, time.Second)
assert.Error(t, err)
assert.Equal(t, expected, actual)
require.Error(t, err)
require.Equal(t, expected, actual)
}
func TestRunError(t *testing.T) {
@ -140,7 +140,7 @@ func TestRunError(t *testing.T) {
cmd := exec.Command(shell, "-c", "false")
err := RunTimeout(cmd, time.Second)
assert.Error(t, err)
require.Error(t, err)
}
func TestRandomSleep(t *testing.T) {
@ -150,13 +150,13 @@ func TestRandomSleep(t *testing.T) {
s := time.Now()
RandomSleep(time.Duration(0), make(chan struct{}))
elapsed := time.Since(s)
assert.True(t, elapsed < time.Millisecond)
require.True(t, elapsed < time.Millisecond)
// test that max sleep is respected
s = time.Now()
RandomSleep(time.Millisecond*50, make(chan struct{}))
elapsed = time.Since(s)
assert.True(t, elapsed < time.Millisecond*100)
require.True(t, elapsed < time.Millisecond*100)
// test that shutdown is respected
s = time.Now()
@ -167,7 +167,7 @@ func TestRandomSleep(t *testing.T) {
}()
RandomSleep(time.Second, shutdown)
elapsed = time.Since(s)
assert.True(t, elapsed < time.Millisecond*150)
require.True(t, elapsed < time.Millisecond*150)
}
func TestCompressWithGzip(t *testing.T) {
@ -175,16 +175,16 @@ func TestCompressWithGzip(t *testing.T) {
inputBuffer := bytes.NewBuffer([]byte(testData))
outputBuffer, err := CompressWithGzip(inputBuffer)
assert.NoError(t, err)
require.NoError(t, err)
gzipReader, err := gzip.NewReader(outputBuffer)
assert.NoError(t, err)
require.NoError(t, err)
defer gzipReader.Close()
output, err := io.ReadAll(gzipReader)
assert.NoError(t, err)
require.NoError(t, err)
assert.Equal(t, testData, string(output))
require.Equal(t, testData, string(output))
}
type mockReader struct {
@ -200,23 +200,23 @@ func TestCompressWithGzipEarlyClose(t *testing.T) {
mr := &mockReader{}
rc, err := CompressWithGzip(mr)
assert.NoError(t, err)
require.NoError(t, err)
n, err := io.CopyN(io.Discard, rc, 10000)
assert.NoError(t, err)
assert.Equal(t, int64(10000), n)
require.NoError(t, err)
require.Equal(t, int64(10000), n)
r1 := mr.readN
err = rc.Close()
assert.NoError(t, err)
require.NoError(t, err)
n, err = io.CopyN(io.Discard, rc, 10000)
assert.Error(t, io.EOF, err)
assert.Equal(t, int64(0), n)
require.Error(t, io.EOF, err)
require.Equal(t, int64(0), n)
r2 := mr.readN
// no more read to the source after closing
assert.Equal(t, r1, r2)
require.Equal(t, r1, r2)
}
func TestAlignDuration(t *testing.T) {

View File

@ -46,7 +46,7 @@ func TestRestartingRebindsPipes(t *testing.T) {
time.Sleep(1 * time.Millisecond)
}
syscall.Kill(p.Pid(), syscall.SIGKILL)
require.NoError(t, syscall.Kill(p.Pid(), syscall.SIGKILL))
for atomic.LoadInt64(&linesRead) < 2 {
time.Sleep(1 * time.Millisecond)
@ -74,7 +74,7 @@ func TestMain(m *testing.M) {
// cleanly.
func externalProcess() {
wait := make(chan int)
fmt.Fprintln(os.Stdout, "started")
_, _ = fmt.Fprintln(os.Stdout, "started")
<-wait
os.Exit(2)
}

View File

@ -26,12 +26,12 @@ func (f Framing) String() string {
}
// UnmarshalTOML implements ability to unmarshal framing from TOML files.
func (f *Framing) UnmarshalTOML(data []byte) (err error) {
func (f *Framing) UnmarshalTOML(data []byte) error {
return f.UnmarshalText(data)
}
// UnmarshalText implements encoding.TextUnmarshaler
func (f *Framing) UnmarshalText(data []byte) (err error) {
func (f *Framing) UnmarshalText(data []byte) error {
s := string(data)
switch strings.ToUpper(s) {
case `OCTET-COUNTING`:
@ -40,21 +40,20 @@ func (f *Framing) UnmarshalText(data []byte) (err error) {
fallthrough
case `'OCTET-COUNTING'`:
*f = OctetCounting
return
return nil
case `NON-TRANSPARENT`:
fallthrough
case `"NON-TRANSPARENT"`:
fallthrough
case `'NON-TRANSPARENT'`:
*f = NonTransparent
return
return nil
}
*f = -1
return fmt.Errorf("unknown framing")
}
// MarshalText implements encoding.TextMarshaler
// MarshalText implements encoding.TextMarshaller
func (f Framing) MarshalText() ([]byte, error) {
s := f.String()
if s != "" {

View File

@ -19,7 +19,9 @@ type Engine struct {
// Apply extracts the template fields from the given line and returns the measurement
// name, tags and field name
func (e *Engine) Apply(line string) (string, map[string]string, string, error) {
//
//nolint:revive //function-result-limit conditionally 4 return results allowed
func (e *Engine) Apply(line string) (measurementName string, tags map[string]string, field string, err error) {
return e.matcher.match(line).Apply(line, e.joiner)
}

View File

@ -5,7 +5,7 @@ import (
"strings"
)
// Template represents a pattern and tags to map a metric string to a influxdb Point
// Template represents a pattern and tags to map a metric string to an influxdb Point
type Template struct {
separator string
parts []string
@ -14,19 +14,21 @@ type Template struct {
greedyMeasurement bool
}
// apply extracts the template fields from the given line and returns the measurement
// Apply extracts the template fields from the given line and returns the measurement
// name, tags and field name
func (t *Template) Apply(line string, joiner string) (string, map[string]string, string, error) {
fields := strings.Split(line, t.separator)
//
//nolint:revive //function-result-limit conditionally 4 return results allowed
func (t *Template) Apply(line string, joiner string) (measurementName string, tags map[string]string, field string, err error) {
allFields := strings.Split(line, t.separator)
var (
measurement []string
tags = make(map[string][]string)
field []string
measurements []string
tagsMap = make(map[string][]string)
fields []string
)
// Set any default tags
for k, v := range t.defaultTags {
tags[k] = append(tags[k], v)
tagsMap[k] = append(tagsMap[k], v)
}
// See if an invalid combination has been specified in the template:
@ -45,7 +47,7 @@ func (t *Template) Apply(line string, joiner string) (string, map[string]string,
}
for i, tag := range t.parts {
if i >= len(fields) {
if i >= len(allFields) {
continue
}
if tag == "" {
@ -54,25 +56,25 @@ func (t *Template) Apply(line string, joiner string) (string, map[string]string,
switch tag {
case "measurement":
measurement = append(measurement, fields[i])
measurements = append(measurements, allFields[i])
case "field":
field = append(field, fields[i])
fields = append(fields, allFields[i])
case "field*":
field = append(field, fields[i:]...)
fields = append(fields, allFields[i:]...)
case "measurement*":
measurement = append(measurement, fields[i:]...)
measurements = append(measurements, allFields[i:]...)
default:
tags[tag] = append(tags[tag], fields[i])
tagsMap[tag] = append(tagsMap[tag], allFields[i])
}
}
// Convert to map of strings.
outtags := make(map[string]string)
for k, values := range tags {
outtags[k] = strings.Join(values, joiner)
tags = make(map[string]string)
for k, values := range tagsMap {
tags[k] = strings.Join(values, joiner)
}
return strings.Join(measurement, joiner), outtags, strings.Join(field, joiner), nil
return strings.Join(measurements, joiner), tags, strings.Join(fields, joiner), nil
}
func NewDefaultTemplateWithPattern(pattern string) (*Template, error) {

View File

@ -4,9 +4,9 @@ import (
"testing"
"time"
"github.com/influxdata/telegraf"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"github.com/influxdata/telegraf"
)
func TestNewMetric(t *testing.T) {
@ -78,17 +78,17 @@ func TestRemoveTagNoEffectOnMissingTags(t *testing.T) {
func TestGetTag(t *testing.T) {
m := baseMetric()
value, ok := m.GetTag("host")
_, ok := m.GetTag("host")
require.False(t, ok)
m.AddTag("host", "localhost")
value, ok = m.GetTag("host")
value, ok := m.GetTag("host")
require.True(t, ok)
require.Equal(t, "localhost", value)
m.RemoveTag("host")
value, ok = m.GetTag("host")
_, ok = m.GetTag("host")
require.False(t, ok)
}
@ -143,17 +143,17 @@ func TestRemoveFieldNoEffectOnMissingFields(t *testing.T) {
func TestGetField(t *testing.T) {
m := baseMetric()
value, ok := m.GetField("foo")
_, ok := m.GetField("foo")
require.False(t, ok)
m.AddField("foo", "bar")
value, ok = m.GetField("foo")
value, ok := m.GetField("foo")
require.True(t, ok)
require.Equal(t, "bar", value)
m.RemoveTag("foo")
value, ok = m.GetTag("foo")
_, ok = m.GetTag("foo")
require.False(t, ok)
}
@ -218,20 +218,20 @@ func TestHashID(t *testing.T) {
// adding a field doesn't change the hash:
m.AddField("foo", int64(100))
assert.Equal(t, hash, m.HashID())
require.Equal(t, hash, m.HashID())
// removing a non-existent tag doesn't change the hash:
m.RemoveTag("no-op")
assert.Equal(t, hash, m.HashID())
require.Equal(t, hash, m.HashID())
// adding a tag does change it:
m.AddTag("foo", "bar")
assert.NotEqual(t, hash, m.HashID())
require.NotEqual(t, hash, m.HashID())
hash = m.HashID()
// removing a tag also changes it:
m.RemoveTag("mytag")
assert.NotEqual(t, hash, m.HashID())
require.NotEqual(t, hash, m.HashID())
}
func TestHashID_Consistency(t *testing.T) {
@ -261,10 +261,10 @@ func TestHashID_Consistency(t *testing.T) {
},
time.Now(),
)
assert.Equal(t, hash, m2.HashID())
require.Equal(t, hash, m2.HashID())
m3 := m.Copy()
assert.Equal(t, m2.HashID(), m3.HashID())
require.Equal(t, m2.HashID(), m3.HashID())
}
func TestHashID_Delimiting(t *testing.T) {
@ -290,7 +290,7 @@ func TestHashID_Delimiting(t *testing.T) {
},
time.Now(),
)
assert.NotEqual(t, m1.HashID(), m2.HashID())
require.NotEqual(t, m1.HashID(), m2.HashID())
}
func TestSetName(t *testing.T) {
@ -324,5 +324,5 @@ func TestValueType(t *testing.T) {
}
m := New("cpu", tags, fields, now, telegraf.Gauge)
assert.Equal(t, telegraf.Gauge, m.Type())
require.Equal(t, telegraf.Gauge, m.Type())
}

View File

@ -1,7 +1,6 @@
package metric
import (
"log"
"runtime"
"sync/atomic"
@ -24,10 +23,6 @@ func WithGroupTracking(metric []telegraf.Metric, fn NotifyFunc) ([]telegraf.Metr
return newTrackingMetricGroup(metric, fn)
}
func EnableDebugFinalizer() {
finalizer = debugFinalizer
}
var (
lastID uint64
finalizer func(*trackingData)
@ -37,13 +32,6 @@ func newTrackingID() telegraf.TrackingID {
return telegraf.TrackingID(atomic.AddUint64(&lastID, 1))
}
func debugFinalizer(d *trackingData) {
rc := atomic.LoadInt32(&d.rc)
if rc != 0 {
log.Fatalf("E! [agent] metric collected with non-zero reference count rc: %d", rc)
}
}
type trackingData struct {
id telegraf.TrackingID
rc int32

View File

@ -395,7 +395,7 @@ func (a *AMQPConsumer) onMessage(acc telegraf.TrackingAccumulator, d amqp.Delive
}
}
a.decoder.SetEnconding(d.ContentEncoding)
a.decoder.SetEncoding(d.ContentEncoding)
body, err := a.decoder.Decode(d.Body)
if err != nil {
onError()

View File

@ -50,6 +50,7 @@ func TestOpenTelemetry(t *testing.T) {
meter := mp.Meter("library-name")
counter, err := meter.SyncInt64().Counter("measurement-counter")
require.NoError(t, err)
counter.Add(ctx, 7)
// write metrics through the telegraf OpenTelemetry input plugin