chore(linters): Fix findings found by testifylint: len (#14203)
This commit is contained in:
parent
e919884588
commit
34ae468a8c
|
|
@ -36,7 +36,7 @@ func TestAgent_LoadPlugin(t *testing.T) {
|
|||
err := c.LoadConfig("../config/testdata/telegraf-agent.toml")
|
||||
require.NoError(t, err)
|
||||
a := NewAgent(c)
|
||||
require.Equal(t, 1, len(a.Config.Inputs))
|
||||
require.Len(t, a.Config.Inputs, 1)
|
||||
|
||||
c = config.NewConfig()
|
||||
c.InputFilters = []string{"foo"}
|
||||
|
|
@ -50,21 +50,21 @@ func TestAgent_LoadPlugin(t *testing.T) {
|
|||
err = c.LoadConfig("../config/testdata/telegraf-agent.toml")
|
||||
require.NoError(t, err)
|
||||
a = NewAgent(c)
|
||||
require.Equal(t, 1, len(a.Config.Inputs))
|
||||
require.Len(t, a.Config.Inputs, 1)
|
||||
|
||||
c = config.NewConfig()
|
||||
c.InputFilters = []string{"mysql", "redis"}
|
||||
err = c.LoadConfig("../config/testdata/telegraf-agent.toml")
|
||||
require.NoError(t, err)
|
||||
a = NewAgent(c)
|
||||
require.Equal(t, 2, len(a.Config.Inputs))
|
||||
require.Len(t, a.Config.Inputs, 2)
|
||||
|
||||
c = config.NewConfig()
|
||||
c.InputFilters = []string{"mysql", "foo", "redis", "bar"}
|
||||
err = c.LoadConfig("../config/testdata/telegraf-agent.toml")
|
||||
require.NoError(t, err)
|
||||
a = NewAgent(c)
|
||||
require.Equal(t, 2, len(a.Config.Inputs))
|
||||
require.Len(t, a.Config.Inputs, 2)
|
||||
}
|
||||
|
||||
func TestAgent_LoadOutput(t *testing.T) {
|
||||
|
|
@ -73,21 +73,21 @@ func TestAgent_LoadOutput(t *testing.T) {
|
|||
err := c.LoadConfig("../config/testdata/telegraf-agent.toml")
|
||||
require.NoError(t, err)
|
||||
a := NewAgent(c)
|
||||
require.Equal(t, 2, len(a.Config.Outputs))
|
||||
require.Len(t, a.Config.Outputs, 2)
|
||||
|
||||
c = config.NewConfig()
|
||||
c.OutputFilters = []string{"kafka"}
|
||||
err = c.LoadConfig("../config/testdata/telegraf-agent.toml")
|
||||
require.NoError(t, err)
|
||||
a = NewAgent(c)
|
||||
require.Equal(t, 1, len(a.Config.Outputs))
|
||||
require.Len(t, a.Config.Outputs, 1)
|
||||
|
||||
c = config.NewConfig()
|
||||
c.OutputFilters = []string{}
|
||||
err = c.LoadConfig("../config/testdata/telegraf-agent.toml")
|
||||
require.NoError(t, err)
|
||||
a = NewAgent(c)
|
||||
require.Equal(t, 3, len(a.Config.Outputs))
|
||||
require.Len(t, a.Config.Outputs, 3)
|
||||
|
||||
c = config.NewConfig()
|
||||
c.OutputFilters = []string{"foo"}
|
||||
|
|
@ -101,22 +101,22 @@ func TestAgent_LoadOutput(t *testing.T) {
|
|||
err = c.LoadConfig("../config/testdata/telegraf-agent.toml")
|
||||
require.NoError(t, err)
|
||||
a = NewAgent(c)
|
||||
require.Equal(t, 2, len(a.Config.Outputs))
|
||||
require.Len(t, a.Config.Outputs, 2)
|
||||
|
||||
c = config.NewConfig()
|
||||
c.OutputFilters = []string{"influxdb", "kafka"}
|
||||
err = c.LoadConfig("../config/testdata/telegraf-agent.toml")
|
||||
require.NoError(t, err)
|
||||
require.Equal(t, 3, len(c.Outputs))
|
||||
require.Len(t, c.Outputs, 3)
|
||||
a = NewAgent(c)
|
||||
require.Equal(t, 3, len(a.Config.Outputs))
|
||||
require.Len(t, a.Config.Outputs, 3)
|
||||
|
||||
c = config.NewConfig()
|
||||
c.OutputFilters = []string{"influxdb", "foo", "kafka", "bar"}
|
||||
err = c.LoadConfig("../config/testdata/telegraf-agent.toml")
|
||||
require.NoError(t, err)
|
||||
a = NewAgent(c)
|
||||
require.Equal(t, 3, len(a.Config.Outputs))
|
||||
require.Len(t, a.Config.Outputs, 3)
|
||||
}
|
||||
|
||||
func TestWindow(t *testing.T) {
|
||||
|
|
|
|||
|
|
@ -20,7 +20,7 @@ func TestFileWriter_NoRotation(t *testing.T) {
|
|||
_, err = writer.Write([]byte("Hello World 2"))
|
||||
require.NoError(t, err)
|
||||
files, _ := os.ReadDir(tempDir)
|
||||
require.Equal(t, 1, len(files))
|
||||
require.Len(t, files, 1)
|
||||
}
|
||||
|
||||
func TestFileWriter_TimeRotation(t *testing.T) {
|
||||
|
|
@ -36,7 +36,7 @@ func TestFileWriter_TimeRotation(t *testing.T) {
|
|||
_, err = writer.Write([]byte("Hello World 2"))
|
||||
require.NoError(t, err)
|
||||
files, _ := os.ReadDir(tempDir)
|
||||
require.Equal(t, 2, len(files))
|
||||
require.Len(t, files, 2)
|
||||
}
|
||||
|
||||
func TestFileWriter_ReopenTimeRotation(t *testing.T) {
|
||||
|
|
@ -51,7 +51,7 @@ func TestFileWriter_ReopenTimeRotation(t *testing.T) {
|
|||
t.Cleanup(func() { require.NoError(t, writer.Close()) })
|
||||
|
||||
files, _ := os.ReadDir(tempDir)
|
||||
require.Equal(t, 2, len(files))
|
||||
require.Len(t, files, 2)
|
||||
}
|
||||
|
||||
func TestFileWriter_SizeRotation(t *testing.T) {
|
||||
|
|
@ -66,7 +66,7 @@ func TestFileWriter_SizeRotation(t *testing.T) {
|
|||
_, err = writer.Write([]byte("World 2"))
|
||||
require.NoError(t, err)
|
||||
files, _ := os.ReadDir(tempDir)
|
||||
require.Equal(t, 2, len(files))
|
||||
require.Len(t, files, 2)
|
||||
}
|
||||
|
||||
func TestFileWriter_ReopenSizeRotation(t *testing.T) {
|
||||
|
|
@ -82,7 +82,7 @@ func TestFileWriter_ReopenSizeRotation(t *testing.T) {
|
|||
_, err = writer.Write([]byte("Hello World Again"))
|
||||
require.NoError(t, err)
|
||||
files, _ := os.ReadDir(tempDir)
|
||||
require.Equal(t, 2, len(files))
|
||||
require.Len(t, files, 2)
|
||||
}
|
||||
|
||||
func TestFileWriter_DeleteArchives(t *testing.T) {
|
||||
|
|
@ -109,7 +109,7 @@ func TestFileWriter_DeleteArchives(t *testing.T) {
|
|||
require.NoError(t, err)
|
||||
|
||||
files, _ := os.ReadDir(tempDir)
|
||||
require.Equal(t, 3, len(files))
|
||||
require.Len(t, files, 3)
|
||||
|
||||
for _, tempFile := range files {
|
||||
var bytes []byte
|
||||
|
|
@ -136,6 +136,6 @@ func TestFileWriter_CloseDoesNotRotate(t *testing.T) {
|
|||
require.NoError(t, writer.Close())
|
||||
|
||||
files, _ := os.ReadDir(tempDir)
|
||||
require.Equal(t, 1, len(files))
|
||||
require.Len(t, files, 1)
|
||||
require.Regexp(t, "^test.log$", files[0].Name())
|
||||
}
|
||||
|
|
|
|||
|
|
@ -119,7 +119,7 @@ func TestWriteToFileInRotation(t *testing.T) {
|
|||
log.Printf("I! TEST 1") // Writes 31 bytes, will rotate
|
||||
log.Printf("I! TEST") // Writes 29 byes, no rotation expected
|
||||
files, _ := os.ReadDir(tempDir)
|
||||
require.Equal(t, 2, len(files))
|
||||
require.Len(t, files, 2)
|
||||
}
|
||||
|
||||
func TestLogTargetSettings(t *testing.T) {
|
||||
|
|
|
|||
|
|
@ -25,7 +25,7 @@ func TestNewMetric(t *testing.T) {
|
|||
require.Equal(t, "cpu", m.Name())
|
||||
require.Equal(t, tags, m.Tags())
|
||||
require.Equal(t, fields, m.Fields())
|
||||
require.Equal(t, 2, len(m.FieldList()))
|
||||
require.Len(t, m.FieldList(), 2)
|
||||
require.Equal(t, now, m.Time())
|
||||
}
|
||||
|
||||
|
|
@ -60,7 +60,7 @@ func TestAddTagOverwrites(t *testing.T) {
|
|||
value, ok := m.GetTag("host")
|
||||
require.True(t, ok)
|
||||
require.Equal(t, "example.org", value)
|
||||
require.Equal(t, 1, len(m.TagList()))
|
||||
require.Len(t, m.TagList(), 1)
|
||||
}
|
||||
|
||||
func TestRemoveTagNoEffectOnMissingTags(t *testing.T) {
|
||||
|
|
@ -108,7 +108,7 @@ func TestAddFieldOverwrites(t *testing.T) {
|
|||
m.AddField("value", 1.0)
|
||||
m.AddField("value", 42.0)
|
||||
|
||||
require.Equal(t, 1, len(m.FieldList()))
|
||||
require.Len(t, m.FieldList(), 1)
|
||||
|
||||
value, ok := m.GetField("value")
|
||||
require.True(t, ok)
|
||||
|
|
@ -121,7 +121,7 @@ func TestAddFieldChangesType(t *testing.T) {
|
|||
m.AddField("value", 1.0)
|
||||
m.AddField("value", "xyzzy")
|
||||
|
||||
require.Equal(t, 1, len(m.FieldList()))
|
||||
require.Len(t, m.FieldList(), 1)
|
||||
|
||||
value, ok := m.GetField("value")
|
||||
require.True(t, ok)
|
||||
|
|
|
|||
|
|
@ -4,9 +4,10 @@ import (
|
|||
"testing"
|
||||
"time"
|
||||
|
||||
"github.com/stretchr/testify/require"
|
||||
|
||||
"github.com/influxdata/telegraf"
|
||||
"github.com/influxdata/telegraf/testutil"
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
func TestAdd(t *testing.T) {
|
||||
|
|
@ -34,7 +35,7 @@ func TestAdd(t *testing.T) {
|
|||
require.False(t, ra.Add(m))
|
||||
ra.Push(&acc)
|
||||
|
||||
require.Equal(t, 1, len(acc.Metrics))
|
||||
require.Len(t, acc.Metrics, 1)
|
||||
require.Equal(t, int64(101), acc.Metrics[0].Fields["sum"])
|
||||
}
|
||||
|
||||
|
|
@ -84,7 +85,7 @@ func TestAddMetricsOutsideCurrentPeriod(t *testing.T) {
|
|||
require.False(t, ra.Add(m))
|
||||
|
||||
ra.Push(&acc)
|
||||
require.Equal(t, 1, len(acc.Metrics))
|
||||
require.Len(t, acc.Metrics, 1)
|
||||
require.Equal(t, int64(101), acc.Metrics[0].Fields["sum"])
|
||||
}
|
||||
|
||||
|
|
@ -146,7 +147,7 @@ func TestAddMetricsOutsideCurrentPeriodWithGrace(t *testing.T) {
|
|||
require.False(t, ra.Add(m))
|
||||
|
||||
ra.Push(&acc)
|
||||
require.Equal(t, 1, len(acc.Metrics))
|
||||
require.Len(t, acc.Metrics, 1)
|
||||
require.Equal(t, int64(203), acc.Metrics[0].Fields["sum"])
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -52,7 +52,7 @@ func TestAurora(t *testing.T) {
|
|||
},
|
||||
check: func(t *testing.T, err error, acc *testutil.Accumulator) {
|
||||
require.NoError(t, err)
|
||||
require.Equal(t, 1, len(acc.Metrics))
|
||||
require.Len(t, acc.Metrics, 1)
|
||||
acc.AssertContainsTaggedFields(t,
|
||||
"aurora",
|
||||
map[string]interface{}{
|
||||
|
|
|
|||
|
|
@ -140,7 +140,7 @@ func TestHttpJsonJavaMultiValue(t *testing.T) {
|
|||
err := acc.GatherError(cassandra.Gather)
|
||||
|
||||
require.NoError(t, err)
|
||||
require.Equal(t, 2, len(acc.Metrics))
|
||||
require.Len(t, acc.Metrics, 2)
|
||||
|
||||
fields := map[string]interface{}{
|
||||
"HeapMemoryUsage_init": 67108864.0,
|
||||
|
|
@ -169,7 +169,7 @@ func TestHttpJsonJavaMultiType(t *testing.T) {
|
|||
err := acc.GatherError(cassandra.Gather)
|
||||
|
||||
require.NoError(t, err)
|
||||
require.Equal(t, 2, len(acc.Metrics))
|
||||
require.Len(t, acc.Metrics, 2)
|
||||
|
||||
fields := map[string]interface{}{
|
||||
"CollectionCount": 1.0,
|
||||
|
|
@ -202,7 +202,7 @@ func TestHttpJsonCassandraMultiValue(t *testing.T) {
|
|||
err := acc.GatherError(cassandra.Gather)
|
||||
|
||||
require.NoError(t, err)
|
||||
require.Equal(t, 1, len(acc.Metrics))
|
||||
require.Len(t, acc.Metrics, 1)
|
||||
|
||||
fields := map[string]interface{}{
|
||||
"ReadLatency_999thPercentile": 20.0,
|
||||
|
|
@ -234,7 +234,7 @@ func TestHttpJsonCassandraNestedMultiValue(t *testing.T) {
|
|||
err := acc.GatherError(cassandra.Gather)
|
||||
|
||||
require.NoError(t, err)
|
||||
require.Equal(t, 2, len(acc.Metrics))
|
||||
require.Len(t, acc.Metrics, 2)
|
||||
|
||||
fields1 := map[string]interface{}{
|
||||
"ReadLatency_999thPercentile": 1.0,
|
||||
|
|
|
|||
|
|
@ -358,7 +358,7 @@ func TestSelectMetrics(t *testing.T) {
|
|||
filtered, err := getFilteredMetrics(c)
|
||||
// We've asked for 2 (out of 4) metrics, over all 3 load balancers in all 2
|
||||
// AZs. We should get 12 metrics.
|
||||
require.Equal(t, 12, len(filtered[0].metrics))
|
||||
require.Len(t, filtered[0].metrics, 12)
|
||||
require.NoError(t, err)
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -139,8 +139,8 @@ func TestGatherDetailedBucketMetrics(t *testing.T) {
|
|||
acc.AddFields("couchbase_bucket", fields, nil)
|
||||
|
||||
// Ensure we gathered only one metric (the one that we configured).
|
||||
require.Equal(t, len(acc.Metrics), 1)
|
||||
require.Equal(t, len(acc.Metrics[0].Fields), 1)
|
||||
require.Len(t, acc.Metrics, 1)
|
||||
require.Len(t, acc.Metrics[0].Fields, 1)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
|
@ -167,7 +167,7 @@ func TestGatherNodeOnly(t *testing.T) {
|
|||
require.NoError(t, cb.gatherServer(&acc, faker.URL))
|
||||
|
||||
require.Empty(t, acc.Errors)
|
||||
require.Equal(t, 7, len(acc.Metrics))
|
||||
require.Len(t, acc.Metrics, 7)
|
||||
acc.AssertDoesNotContainMeasurement(t, "couchbase_bucket")
|
||||
}
|
||||
|
||||
|
|
@ -198,7 +198,7 @@ func TestGatherFailover(t *testing.T) {
|
|||
var acc testutil.Accumulator
|
||||
require.NoError(t, cb.gatherServer(&acc, faker.URL))
|
||||
require.Empty(t, acc.Errors)
|
||||
require.Equal(t, 8, len(acc.Metrics))
|
||||
require.Len(t, acc.Metrics, 8)
|
||||
|
||||
var metric *testutil.Metric
|
||||
for _, m := range acc.Metrics {
|
||||
|
|
|
|||
|
|
@ -89,7 +89,7 @@ func TestCSVGZImport(t *testing.T) {
|
|||
r.Stop()
|
||||
|
||||
// Verify that we read both files once.
|
||||
require.Equal(t, len(acc.Metrics), 6)
|
||||
require.Len(t, acc.Metrics, 6)
|
||||
|
||||
// File should have gone back to the test directory, as we configured.
|
||||
_, err = os.Stat(filepath.Join(finishedDirectory, testCsvFile))
|
||||
|
|
@ -160,7 +160,7 @@ func TestCSVGZImportWithHeader(t *testing.T) {
|
|||
r.Stop()
|
||||
|
||||
// Verify that we read both files once.
|
||||
require.Equal(t, len(acc.Metrics), 6)
|
||||
require.Len(t, acc.Metrics, 6)
|
||||
|
||||
// File should have gone back to the test directory, as we configured.
|
||||
_, err = os.Stat(filepath.Join(finishedDirectory, testCsvFile))
|
||||
|
|
@ -217,7 +217,7 @@ func TestMultipleJSONFileImports(t *testing.T) {
|
|||
r.Stop()
|
||||
|
||||
// Verify that we read each JSON line once to a single metric.
|
||||
require.Equal(t, len(acc.Metrics), 5)
|
||||
require.Len(t, acc.Metrics, 5)
|
||||
}
|
||||
|
||||
func TestFileTag(t *testing.T) {
|
||||
|
|
@ -264,7 +264,7 @@ func TestFileTag(t *testing.T) {
|
|||
r.Stop()
|
||||
|
||||
// Verify that we read each JSON line once to a single metric.
|
||||
require.Equal(t, len(acc.Metrics), 1)
|
||||
require.Len(t, acc.Metrics, 1)
|
||||
for _, m := range acc.Metrics {
|
||||
for key, value := range m.Tags {
|
||||
require.Equal(t, r.FileTag, key)
|
||||
|
|
@ -328,7 +328,7 @@ hello,80,test_name2`
|
|||
r.Stop()
|
||||
|
||||
// Verify that we read both files once.
|
||||
require.Equal(t, len(acc.Metrics), 1)
|
||||
require.Len(t, acc.Metrics, 1)
|
||||
|
||||
// File should have gone back to the test directory, as we configured.
|
||||
_, err = os.Stat(filepath.Join(finishedDirectory, testCsvFile))
|
||||
|
|
@ -399,7 +399,7 @@ hello,80,test_name2`
|
|||
r.Stop()
|
||||
|
||||
// Verify that we read both files once.
|
||||
require.Equal(t, len(acc.Metrics), 1)
|
||||
require.Len(t, acc.Metrics, 1)
|
||||
|
||||
// File should have gone back to the test directory, as we configured.
|
||||
_, err = os.Stat(filepath.Join(finishedDirectory, testCsvFile))
|
||||
|
|
@ -468,7 +468,7 @@ hello,80,test_name2`
|
|||
r.Stop()
|
||||
|
||||
// Verify that we read both files once.
|
||||
require.Equal(t, len(acc.Metrics), 1)
|
||||
require.Len(t, acc.Metrics, 1)
|
||||
|
||||
// File should have gone back to the test directory, as we configured.
|
||||
_, err = os.Stat(filepath.Join(finishedDirectory, testCsvFile))
|
||||
|
|
|
|||
|
|
@ -3,10 +3,11 @@ package diskio
|
|||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/influxdata/telegraf/plugins/inputs/system"
|
||||
"github.com/influxdata/telegraf/testutil"
|
||||
"github.com/shirou/gopsutil/v3/disk"
|
||||
"github.com/stretchr/testify/require"
|
||||
|
||||
"github.com/influxdata/telegraf/plugins/inputs/system"
|
||||
"github.com/influxdata/telegraf/testutil"
|
||||
)
|
||||
|
||||
func TestDiskIO(t *testing.T) {
|
||||
|
|
@ -121,7 +122,7 @@ func TestDiskIO(t *testing.T) {
|
|||
"missing point: diskio %v %q: %v", metric.tags, k, v)
|
||||
}
|
||||
}
|
||||
require.Equal(t, len(tt.metrics), int(acc.NMetrics()), "unexpected number of metrics")
|
||||
require.Len(t, tt.metrics, int(acc.NMetrics()), "unexpected number of metrics")
|
||||
require.True(t, mps.AssertExpectations(t))
|
||||
})
|
||||
}
|
||||
|
|
|
|||
|
|
@ -145,11 +145,11 @@ func Test_validateCommands(t *testing.T) {
|
|||
"/test", "/test",
|
||||
},
|
||||
}
|
||||
require.Equal(t, 2, len(dpdk.AdditionalCommands))
|
||||
require.Len(t, dpdk.AdditionalCommands, 2)
|
||||
|
||||
err := dpdk.validateCommands()
|
||||
|
||||
require.Equal(t, 1, len(dpdk.AdditionalCommands))
|
||||
require.Len(t, dpdk.AdditionalCommands, 1)
|
||||
require.NoError(t, err)
|
||||
})
|
||||
}
|
||||
|
|
@ -188,7 +188,7 @@ func Test_processCommand(t *testing.T) {
|
|||
|
||||
dpdk.processCommand(mockAcc, "/")
|
||||
|
||||
require.Equal(t, 1, len(mockAcc.Errors))
|
||||
require.Len(t, mockAcc.Errors, 1)
|
||||
require.Contains(t, mockAcc.Errors[0].Error(), "invalid character")
|
||||
})
|
||||
|
||||
|
|
@ -201,7 +201,7 @@ func Test_processCommand(t *testing.T) {
|
|||
|
||||
dpdk.processCommand(mockAcc, "/")
|
||||
|
||||
require.Equal(t, 1, len(mockAcc.Errors))
|
||||
require.Len(t, mockAcc.Errors, 1)
|
||||
require.Contains(t, mockAcc.Errors[0].Error(), "deadline exceeded")
|
||||
})
|
||||
|
||||
|
|
@ -213,7 +213,7 @@ func Test_processCommand(t *testing.T) {
|
|||
|
||||
dpdk.processCommand(mockAcc, "/test,param")
|
||||
|
||||
require.Equal(t, 1, len(mockAcc.Errors))
|
||||
require.Len(t, mockAcc.Errors, 1)
|
||||
require.Contains(t, mockAcc.Errors[0].Error(), "got empty json on")
|
||||
})
|
||||
}
|
||||
|
|
@ -229,7 +229,7 @@ func Test_appendCommandsWithParams(t *testing.T) {
|
|||
result, err := dpdk.appendCommandsWithParamsFromList("/testendpoint", []string{"/action1", "/action2"})
|
||||
|
||||
require.NoError(t, err)
|
||||
require.Equal(t, 4, len(result))
|
||||
require.Len(t, result, 4)
|
||||
require.ElementsMatch(t, result, expectedCommands)
|
||||
})
|
||||
}
|
||||
|
|
@ -297,7 +297,7 @@ func Test_getCommandsAndParamsCombinations(t *testing.T) {
|
|||
commands := dpdk.gatherCommands(mockAcc)
|
||||
|
||||
require.Empty(t, commands)
|
||||
require.Equal(t, 1, len(mockAcc.Errors))
|
||||
require.Len(t, mockAcc.Errors, 1)
|
||||
})
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -34,7 +34,7 @@ func TestRefreshFilePaths(t *testing.T) {
|
|||
|
||||
err = r.refreshFilePaths()
|
||||
require.NoError(t, err)
|
||||
require.Equal(t, 2, len(r.filenames))
|
||||
require.Len(t, r.filenames, 2)
|
||||
}
|
||||
|
||||
func TestFileTag(t *testing.T) {
|
||||
|
|
@ -79,7 +79,7 @@ func TestJSONParserCompile(t *testing.T) {
|
|||
|
||||
require.NoError(t, r.Gather(&acc))
|
||||
require.Equal(t, map[string]string{"parent_ignored_child": "hi"}, acc.Metrics[0].Tags)
|
||||
require.Equal(t, 5, len(acc.Metrics[0].Fields))
|
||||
require.Len(t, acc.Metrics[0].Fields, 5)
|
||||
}
|
||||
|
||||
func TestGrokParser(t *testing.T) {
|
||||
|
|
|
|||
|
|
@ -116,7 +116,7 @@ func TestRunGatherOneIteration(t *testing.T) {
|
|||
|
||||
require.NoError(t, gcs.Gather(acc))
|
||||
|
||||
require.Equal(t, 3, len(acc.Metrics))
|
||||
require.Len(t, acc.Metrics, 3)
|
||||
}
|
||||
|
||||
func TestRunGatherIteratiosnWithLimit(t *testing.T) {
|
||||
|
|
@ -140,13 +140,13 @@ func TestRunGatherIteratiosnWithLimit(t *testing.T) {
|
|||
|
||||
require.NoError(t, gcs.Gather(acc))
|
||||
|
||||
require.Equal(t, 1, len(acc.Metrics))
|
||||
require.Len(t, acc.Metrics, 1)
|
||||
require.NoError(t, gcs.Gather(acc))
|
||||
|
||||
require.Equal(t, 2, len(acc.Metrics))
|
||||
require.Len(t, acc.Metrics, 2)
|
||||
require.NoError(t, gcs.Gather(acc))
|
||||
|
||||
require.Equal(t, 3, len(acc.Metrics))
|
||||
require.Len(t, acc.Metrics, 3)
|
||||
}
|
||||
|
||||
func TestRunGatherIterationWithPages(t *testing.T) {
|
||||
|
|
@ -169,7 +169,7 @@ func TestRunGatherIterationWithPages(t *testing.T) {
|
|||
|
||||
require.NoError(t, gcs.Gather(acc))
|
||||
|
||||
require.Equal(t, 4, len(acc.Metrics))
|
||||
require.Len(t, acc.Metrics, 4)
|
||||
require.True(t, gcs.offSet.isPresent())
|
||||
require.Equal(t, "prefix/1604148850994", gcs.offSet.OffSet)
|
||||
|
||||
|
|
|
|||
|
|
@ -777,7 +777,7 @@ func TestWriteWithPrecision(t *testing.T) {
|
|||
require.EqualValues(t, 204, resp.StatusCode)
|
||||
|
||||
acc.Wait(1)
|
||||
require.Equal(t, 1, len(acc.Metrics))
|
||||
require.Len(t, acc.Metrics, 1)
|
||||
// When timestamp is provided, the precision parameter is
|
||||
// overloaded to specify the timestamp's unit
|
||||
require.Equal(t, time.Unix(0, 1422568543000000000), acc.Metrics[0].Time)
|
||||
|
|
@ -807,7 +807,7 @@ func TestWriteWithPrecisionNoTimestamp(t *testing.T) {
|
|||
require.EqualValues(t, 204, resp.StatusCode)
|
||||
|
||||
acc.Wait(1)
|
||||
require.Equal(t, 1, len(acc.Metrics))
|
||||
require.Len(t, acc.Metrics, 1)
|
||||
// When timestamp is omitted, the precision parameter actually
|
||||
// specifies the precision. The timestamp is set to the greatest
|
||||
// integer unit less than the provided timestamp (floor).
|
||||
|
|
|
|||
|
|
@ -593,7 +593,7 @@ func TestWriteWithPrecisionNoTimestamp(t *testing.T) {
|
|||
require.EqualValues(t, 204, resp.StatusCode)
|
||||
|
||||
acc.Wait(1)
|
||||
require.Equal(t, 1, len(acc.Metrics))
|
||||
require.Len(t, acc.Metrics, 1)
|
||||
// When timestamp is omitted, the precision parameter actually
|
||||
// specifies the precision. The timestamp is set to the greatest
|
||||
// integer unit less than the provided timestamp (floor).
|
||||
|
|
|
|||
|
|
@ -127,7 +127,7 @@ func TestGather(t *testing.T) {
|
|||
require.NoError(t, power.Gather(&acc))
|
||||
// Number of global metrics : 3
|
||||
// Number of per core metrics : 7
|
||||
require.Equal(t, 3*len(packageIDs)+7*len(coreIDs), len(acc.GetTelegrafMetrics()))
|
||||
require.Len(t, acc.GetTelegrafMetrics(), 3*len(packageIDs)+7*len(coreIDs))
|
||||
}
|
||||
|
||||
func TestAddGlobalMetricsNegative(t *testing.T) {
|
||||
|
|
@ -159,7 +159,7 @@ func TestAddGlobalMetricsNegative(t *testing.T) {
|
|||
On("getConstraintMaxPowerWatts", mock.Anything).Return(12313851.5, nil).Twice()
|
||||
|
||||
power.addGlobalMetrics(&acc)
|
||||
require.Equal(t, 3, len(acc.GetTelegrafMetrics()))
|
||||
require.Len(t, acc.GetTelegrafMetrics(), 3)
|
||||
}
|
||||
|
||||
func TestAddGlobalMetricsPositive(t *testing.T) {
|
||||
|
|
@ -178,7 +178,7 @@ func TestAddGlobalMetricsPositive(t *testing.T) {
|
|||
On("getCurrentDramPowerConsumption", mock.Anything).Return(dramCurrentEnergy)
|
||||
|
||||
power.addGlobalMetrics(&acc)
|
||||
require.Equal(t, 6, len(acc.GetTelegrafMetrics()))
|
||||
require.Len(t, acc.GetTelegrafMetrics(), 6)
|
||||
|
||||
expectedResults := getGlobalMetrics(maxPower, socketCurrentEnergy, dramCurrentEnergy)
|
||||
for _, test := range expectedResults {
|
||||
|
|
@ -222,7 +222,7 @@ func TestAddCPUFrequencyMetric(t *testing.T) {
|
|||
mockServices.msr.On("retrieveCPUFrequencyForCore", mock.Anything).Return(frequency, nil).Once()
|
||||
|
||||
power.addCPUFrequencyMetric(cpuID, &acc)
|
||||
require.Equal(t, 1, len(acc.GetTelegrafMetrics()))
|
||||
require.Len(t, acc.GetTelegrafMetrics(), 1)
|
||||
|
||||
expectedFrequency := roundFloatToNearestTwoDecimalPlaces(frequency)
|
||||
expectedMetric := getPowerCoreMetric("cpu_frequency_mhz", expectedFrequency, coreID, packageID, cpuID)
|
||||
|
|
@ -257,7 +257,7 @@ func TestReadUncoreFreq(t *testing.T) {
|
|||
power.readUncoreFreq("current", packageID, die, &acc)
|
||||
power.readUncoreFreq("initial", packageID, die, &acc)
|
||||
|
||||
require.Equal(t, 2, len(acc.GetTelegrafMetrics()))
|
||||
require.Len(t, acc.GetTelegrafMetrics(), 2)
|
||||
|
||||
expectedMetric := getPowerUncoreFreqMetric("initial", float64(500), float64(1200), nil, packageID, die)
|
||||
acc.AssertContainsTaggedFields(t, "powerstat_package", expectedMetric.fields, expectedMetric.tags)
|
||||
|
|
@ -278,7 +278,7 @@ func TestAddCoreCPUTemperatureMetric(t *testing.T) {
|
|||
|
||||
mockServices.msr.On("getCPUCoresData").Return(preparedData).Once()
|
||||
power.addCPUTemperatureMetric(cpuID, &acc)
|
||||
require.Equal(t, 1, len(acc.GetTelegrafMetrics()))
|
||||
require.Len(t, acc.GetTelegrafMetrics(), 1)
|
||||
|
||||
expectedMetric := getPowerCoreMetric("cpu_temperature_celsius", expectedTemp, coreID, packageID, cpuID)
|
||||
acc.AssertContainsTaggedFields(t, "powerstat_core", expectedMetric.fields, expectedMetric.tags)
|
||||
|
|
@ -297,7 +297,7 @@ func TestAddC6StateResidencyMetric(t *testing.T) {
|
|||
|
||||
mockServices.msr.On("getCPUCoresData").Return(preparedData).Twice()
|
||||
power.addCPUC6StateResidencyMetric(cpuID, &acc)
|
||||
require.Equal(t, 1, len(acc.GetTelegrafMetrics()))
|
||||
require.Len(t, acc.GetTelegrafMetrics(), 1)
|
||||
|
||||
expectedMetric := getPowerCoreMetric("cpu_c6_state_residency_percent", expectedC6, coreID, packageID, cpuID)
|
||||
acc.AssertContainsTaggedFields(t, "powerstat_core", expectedMetric.fields, expectedMetric.tags)
|
||||
|
|
@ -323,7 +323,7 @@ func TestAddC0StateResidencyMetric(t *testing.T) {
|
|||
mockServices.msr.On("getCPUCoresData").Return(preparedData).Twice()
|
||||
power.cpuBusyCycles, power.cpuC0StateResidency = true, true
|
||||
power.addCPUC0StateResidencyMetric(cpuID, &acc)
|
||||
require.Equal(t, 2, len(acc.GetTelegrafMetrics()))
|
||||
require.Len(t, acc.GetTelegrafMetrics(), 2)
|
||||
|
||||
expectedMetric := getPowerCoreMetric("cpu_c0_state_residency_percent", expectedBusyCycles, coreID, packageID, cpuID)
|
||||
acc.AssertContainsTaggedFields(t, "powerstat_core", expectedMetric.fields, expectedMetric.tags)
|
||||
|
|
@ -350,7 +350,7 @@ func TestAddProcessorBusyFrequencyMetric(t *testing.T) {
|
|||
|
||||
mockServices.msr.On("getCPUCoresData").Return(preparedData).Twice()
|
||||
power.addCPUBusyFrequencyMetric(cpuID, &acc)
|
||||
require.Equal(t, 1, len(acc.GetTelegrafMetrics()))
|
||||
require.Len(t, acc.GetTelegrafMetrics(), 1)
|
||||
|
||||
acc.ClearMetrics()
|
||||
preparedData[cpuID].mperfDelta = 0
|
||||
|
|
@ -373,7 +373,7 @@ func TestAddC1StateResidencyMetric(t *testing.T) {
|
|||
mockServices.msr.On("getCPUCoresData").Return(preparedData).Twice()
|
||||
|
||||
power.addCPUC1StateResidencyMetric(cpuID, &acc)
|
||||
require.Equal(t, 1, len(acc.GetTelegrafMetrics()))
|
||||
require.Len(t, acc.GetTelegrafMetrics(), 1)
|
||||
|
||||
expectedMetric := getPowerCoreMetric("cpu_c1_state_residency_percent", expectedC1, coreID, packageID, cpuID)
|
||||
acc.AssertContainsTaggedFields(t, "powerstat_core", expectedMetric.fields, expectedMetric.tags)
|
||||
|
|
@ -398,7 +398,7 @@ func TestAddThermalDesignPowerMetric(t *testing.T) {
|
|||
require.Empty(t, acc.GetTelegrafMetrics())
|
||||
|
||||
power.addThermalDesignPowerMetric(sockets[0], &acc)
|
||||
require.Equal(t, 1, len(acc.GetTelegrafMetrics()))
|
||||
require.Len(t, acc.GetTelegrafMetrics(), 1)
|
||||
|
||||
expectedTDP := roundFloatToNearestTwoDecimalPlaces(maxPower)
|
||||
expectedMetric := getPowerGlobalMetric("thermal_design_power_watts", expectedTDP, sockets[0])
|
||||
|
|
@ -411,7 +411,7 @@ func TestCalculateTurboRatioGroup(t *testing.T) {
|
|||
turboRatioLimitGroups := make(map[int]uint64)
|
||||
|
||||
calculateTurboRatioGroup(coreCounts, msr, turboRatioLimitGroups)
|
||||
require.Equal(t, 8, len(turboRatioLimitGroups))
|
||||
require.Len(t, turboRatioLimitGroups, 8)
|
||||
require.Equal(t, uint64(100), turboRatioLimitGroups[1])
|
||||
require.Equal(t, uint64(200), turboRatioLimitGroups[2])
|
||||
require.Equal(t, uint64(300), turboRatioLimitGroups[3])
|
||||
|
|
@ -423,7 +423,7 @@ func TestCalculateTurboRatioGroup(t *testing.T) {
|
|||
|
||||
coreCounts = uint64(0x100e0c0a08060402)
|
||||
calculateTurboRatioGroup(coreCounts, msr, turboRatioLimitGroups)
|
||||
require.Equal(t, 16, len(turboRatioLimitGroups))
|
||||
require.Len(t, turboRatioLimitGroups, 16)
|
||||
require.Equal(t, uint64(100), turboRatioLimitGroups[1])
|
||||
require.Equal(t, uint64(100), turboRatioLimitGroups[2])
|
||||
require.Equal(t, uint64(200), turboRatioLimitGroups[3])
|
||||
|
|
@ -443,14 +443,14 @@ func TestCalculateTurboRatioGroup(t *testing.T) {
|
|||
coreCounts = uint64(0x1211)
|
||||
msr = uint64(0xfffe)
|
||||
calculateTurboRatioGroup(coreCounts, msr, turboRatioLimitGroups)
|
||||
require.Equal(t, 18, len(turboRatioLimitGroups))
|
||||
require.Len(t, turboRatioLimitGroups, 18)
|
||||
require.Equal(t, uint64(25400), turboRatioLimitGroups[17])
|
||||
require.Equal(t, uint64(25500), turboRatioLimitGroups[18])
|
||||
|
||||
coreCounts = uint64(0x1201)
|
||||
msr = uint64(0x0202)
|
||||
calculateTurboRatioGroup(coreCounts, msr, turboRatioLimitGroups)
|
||||
require.Equal(t, 18, len(turboRatioLimitGroups))
|
||||
require.Len(t, turboRatioLimitGroups, 18)
|
||||
require.Equal(t, uint64(200), turboRatioLimitGroups[1])
|
||||
require.Equal(t, uint64(200), turboRatioLimitGroups[2])
|
||||
require.Equal(t, uint64(200), turboRatioLimitGroups[3])
|
||||
|
|
@ -474,7 +474,7 @@ func TestCalculateTurboRatioGroup(t *testing.T) {
|
|||
msr = uint64(0xfffe)
|
||||
turboRatioLimitGroups = make(map[int]uint64)
|
||||
calculateTurboRatioGroup(coreCounts, msr, turboRatioLimitGroups)
|
||||
require.Equal(t, 2, len(turboRatioLimitGroups))
|
||||
require.Len(t, turboRatioLimitGroups, 2)
|
||||
require.Equal(t, uint64(25400), turboRatioLimitGroups[17])
|
||||
require.Equal(t, uint64(25500), turboRatioLimitGroups[18])
|
||||
}
|
||||
|
|
|
|||
|
|
@ -101,7 +101,7 @@ func TestGostats(t *testing.T) {
|
|||
|
||||
require.NotNil(t, metric)
|
||||
require.Equal(t, metric.Measurement, "internal_gostats")
|
||||
require.Equal(t, len(metric.Tags), 1)
|
||||
require.Len(t, metric.Tags, 1)
|
||||
require.Contains(t, metric.Tags, "go_version")
|
||||
|
||||
for name, value := range metric.Fields {
|
||||
|
|
|
|||
|
|
@ -146,7 +146,7 @@ func TestHttpJsonMultiValue(t *testing.T) {
|
|||
err := acc.GatherError(jolokia.Gather)
|
||||
|
||||
require.NoError(t, err)
|
||||
require.Equal(t, 1, len(acc.Metrics))
|
||||
require.Len(t, acc.Metrics, 1)
|
||||
|
||||
fields := map[string]interface{}{
|
||||
"heap_memory_usage_init": 67108864.0,
|
||||
|
|
@ -170,7 +170,7 @@ func TestHttpJsonBulkResponse(t *testing.T) {
|
|||
err := jolokia.Gather(&acc)
|
||||
|
||||
require.NoError(t, err)
|
||||
require.Equal(t, 1, len(acc.Metrics))
|
||||
require.Len(t, acc.Metrics, 1)
|
||||
|
||||
fields := map[string]interface{}{
|
||||
"heap_memory_usage_init": 67108864.0,
|
||||
|
|
@ -198,7 +198,7 @@ func TestHttpJsonThreeLevelMultiValue(t *testing.T) {
|
|||
err := acc.GatherError(jolokia.Gather)
|
||||
|
||||
require.NoError(t, err)
|
||||
require.Equal(t, 1, len(acc.Metrics))
|
||||
require.Len(t, acc.Metrics, 1)
|
||||
|
||||
fields := map[string]interface{}{
|
||||
"heap_memory_usage_java.lang:type=Memory_ObjectPendingFinalizationCount": 0.0,
|
||||
|
|
|
|||
|
|
@ -218,7 +218,7 @@ func TestKinesisConsumer_onMessage(t *testing.T) {
|
|||
t.Errorf("onMessage() error = %v, wantErr %v", err, tt.wantErr)
|
||||
}
|
||||
|
||||
require.Equal(t, tt.expected.numberOfMetrics, len(acc.Metrics))
|
||||
require.Len(t, acc.Metrics, tt.expected.numberOfMetrics)
|
||||
|
||||
for _, metric := range acc.Metrics {
|
||||
if logEventMessage, ok := metric.Fields["message"]; ok {
|
||||
|
|
|
|||
|
|
@ -394,7 +394,7 @@ func TestErrorWithHostNamePingGather(t *testing.T) {
|
|||
},
|
||||
}
|
||||
require.Error(t, acc.GatherError(p.Gather))
|
||||
require.Equal(t, 1, len(acc.Errors))
|
||||
require.Len(t, acc.Errors, 1)
|
||||
require.Contains(t, acc.Errors[0].Error(), param.error.Error())
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -331,7 +331,7 @@ func TestAccRow(t *testing.T) {
|
|||
}
|
||||
for _, tt := range tests {
|
||||
require.NoError(t, p.accRow("pgTEST", tt.fields, &acc, columns))
|
||||
require.Equal(t, 1, len(acc.Metrics))
|
||||
require.Len(t, acc.Metrics, 1)
|
||||
metric := acc.Metrics[0]
|
||||
require.Equal(t, tt.dbName, metric.Tags["db"])
|
||||
require.Equal(t, tt.server, metric.Tags["server"])
|
||||
|
|
|
|||
|
|
@ -419,7 +419,7 @@ func TestProcstatLookupMetric(t *testing.T) {
|
|||
var acc testutil.Accumulator
|
||||
err := acc.GatherError(p.Gather)
|
||||
require.NoError(t, err)
|
||||
require.Equal(t, len(p.procs)+1, len(acc.Metrics))
|
||||
require.Len(t, acc.Metrics, len(p.procs)+1)
|
||||
}
|
||||
|
||||
func TestGather_SameTimestamps(t *testing.T) {
|
||||
|
|
|
|||
|
|
@ -134,7 +134,7 @@ func TestAddPod(t *testing.T) {
|
|||
p := pod()
|
||||
p.Annotations = map[string]string{"prometheus.io/scrape": "true"}
|
||||
registerPod(p, prom)
|
||||
require.Equal(t, 1, len(prom.kubernetesPods))
|
||||
require.Len(t, prom.kubernetesPods, 1)
|
||||
}
|
||||
|
||||
func TestAddPodScrapeConfig(t *testing.T) {
|
||||
|
|
@ -144,7 +144,7 @@ func TestAddPodScrapeConfig(t *testing.T) {
|
|||
p := pod()
|
||||
p.Annotations = map[string]string{}
|
||||
registerPod(p, prom)
|
||||
require.Equal(t, 1, len(prom.kubernetesPods))
|
||||
require.Len(t, prom.kubernetesPods, 1)
|
||||
}
|
||||
|
||||
func TestAddMultipleDuplicatePods(t *testing.T) {
|
||||
|
|
@ -157,7 +157,7 @@ func TestAddMultipleDuplicatePods(t *testing.T) {
|
|||
registerPod(p, prom)
|
||||
|
||||
urls, _ := prom.GetAllURLs()
|
||||
require.Equal(t, 1, len(urls))
|
||||
require.Len(t, urls, 1)
|
||||
}
|
||||
|
||||
func TestAddMultiplePods(t *testing.T) {
|
||||
|
|
@ -169,7 +169,7 @@ func TestAddMultiplePods(t *testing.T) {
|
|||
p.Name = "Pod2"
|
||||
p.Status.PodIP = "127.0.0.2"
|
||||
registerPod(p, prom)
|
||||
require.Equal(t, 2, len(prom.kubernetesPods))
|
||||
require.Len(t, prom.kubernetesPods, 2)
|
||||
}
|
||||
|
||||
func TestDeletePods(t *testing.T) {
|
||||
|
|
|
|||
|
|
@ -17,7 +17,7 @@ func TestUpdateCounters(t *testing.T) {
|
|||
ras.updateCounters(&testData[i])
|
||||
}
|
||||
|
||||
require.Equal(t, 1, len(ras.cpuSocketCounters), "Should contain counters only for single socket")
|
||||
require.Len(t, ras.cpuSocketCounters, 1, "Should contain counters only for single socket")
|
||||
|
||||
for metric, value := range ras.cpuSocketCounters[0] {
|
||||
if metric == processorBase {
|
||||
|
|
@ -97,7 +97,7 @@ func TestMultipleSockets(t *testing.T) {
|
|||
for i := range testData {
|
||||
ras.updateCounters(&testData[i])
|
||||
}
|
||||
require.Equal(t, 4, len(ras.cpuSocketCounters), "Should contain counters for four sockets")
|
||||
require.Len(t, ras.cpuSocketCounters, 4, "Should contain counters for four sockets")
|
||||
|
||||
for _, metricData := range ras.cpuSocketCounters {
|
||||
for metric, value := range metricData {
|
||||
|
|
@ -121,8 +121,8 @@ func TestMissingDatabase(t *testing.T) {
|
|||
func TestEmptyDatabase(t *testing.T) {
|
||||
ras := newRas()
|
||||
|
||||
require.Equal(t, 1, len(ras.cpuSocketCounters), "Should contain default counters for one socket")
|
||||
require.Equal(t, 2, len(ras.serverCounters), "Should contain default counters for server")
|
||||
require.Len(t, ras.cpuSocketCounters, 1, "Should contain default counters for one socket")
|
||||
require.Len(t, ras.serverCounters, 2, "Should contain default counters for server")
|
||||
|
||||
for metric, value := range ras.cpuSocketCounters[0] {
|
||||
require.Equal(t, int64(0), value, fmt.Sprintf("%s should have value of 0", metric))
|
||||
|
|
|
|||
|
|
@ -21,7 +21,7 @@ func TestRawPacketHeaderFlowData(t *testing.T) {
|
|||
require.NotNil(t, fields)
|
||||
require.NotNil(t, tags)
|
||||
require.Contains(t, tags, "header_protocol")
|
||||
require.Equal(t, 1, len(tags))
|
||||
require.Len(t, tags, 1)
|
||||
}
|
||||
|
||||
// process a raw ethernet packet without any encapsulated protocol
|
||||
|
|
|
|||
|
|
@ -4,9 +4,10 @@ import (
|
|||
"os"
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/require"
|
||||
|
||||
"github.com/influxdata/telegraf/config"
|
||||
"github.com/influxdata/telegraf/testutil"
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
func TestAzureSQLIntegration_ArcManaged_DatabaseIO_Query(t *testing.T) {
|
||||
|
|
@ -94,7 +95,7 @@ func TestAzureSQLIntegration_ArcManaged_ServerProperties_Query(t *testing.T) {
|
|||
require.True(t, acc.HasTag("sqlserver_server_properties", "replica_updateability"))
|
||||
|
||||
// This query should only return one row
|
||||
require.Equal(t, 1, len(acc.Metrics))
|
||||
require.Len(t, acc.Metrics, 1)
|
||||
server.Stop()
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -52,7 +52,7 @@ func TestAzureSQLIntegration_Database_ResourceStats_Query(t *testing.T) {
|
|||
require.True(t, acc.HasTag("sqlserver_azure_db_resource_stats", "replica_updateability"))
|
||||
|
||||
// This query should only return one row
|
||||
require.Equal(t, 1, len(acc.Metrics))
|
||||
require.Len(t, acc.Metrics, 1)
|
||||
server.Stop()
|
||||
}
|
||||
|
||||
|
|
@ -241,7 +241,7 @@ func TestAzureSQLIntegration_Database_ServerProperties_Query(t *testing.T) {
|
|||
require.True(t, acc.HasTag("sqlserver_server_properties", "replica_updateability"))
|
||||
|
||||
// This query should only return one row
|
||||
require.Equal(t, 1, len(acc.Metrics))
|
||||
require.Len(t, acc.Metrics, 1)
|
||||
server.Stop()
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -4,9 +4,10 @@ import (
|
|||
"os"
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/require"
|
||||
|
||||
"github.com/influxdata/telegraf/config"
|
||||
"github.com/influxdata/telegraf/testutil"
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
func TestAzureSQLIntegration_Managed_ResourceStats_Query(t *testing.T) {
|
||||
|
|
@ -39,7 +40,7 @@ func TestAzureSQLIntegration_Managed_ResourceStats_Query(t *testing.T) {
|
|||
require.True(t, acc.HasTag("sqlserver_azure_db_resource_stats", "replica_updateability"))
|
||||
|
||||
// This query should only return one row
|
||||
require.Equal(t, 1, len(acc.Metrics))
|
||||
require.Len(t, acc.Metrics, 1)
|
||||
server.Stop()
|
||||
}
|
||||
|
||||
|
|
@ -172,7 +173,7 @@ func TestAzureSQLIntegration_Managed_ServerProperties_Query(t *testing.T) {
|
|||
require.True(t, acc.HasTag("sqlserver_server_properties", "replica_updateability"))
|
||||
|
||||
// This query should only return one row
|
||||
require.Equal(t, 1, len(acc.Metrics))
|
||||
require.Len(t, acc.Metrics, 1)
|
||||
server.Stop()
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -4,9 +4,10 @@ import (
|
|||
"os"
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/require"
|
||||
|
||||
"github.com/influxdata/telegraf/config"
|
||||
"github.com/influxdata/telegraf/testutil"
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
func TestAzureSQLIntegration_ElasticPool_ResourceStats_Query(t *testing.T) {
|
||||
|
|
@ -48,7 +49,7 @@ func TestAzureSQLIntegration_ElasticPool_ResourceStats_Query(t *testing.T) {
|
|||
require.True(t, acc.HasFloatField("sqlserver_pool_resource_stats", "avg_allocated_storage_percent"))
|
||||
|
||||
// This query should only return one row
|
||||
require.Equal(t, 1, len(acc.Metrics))
|
||||
require.Len(t, acc.Metrics, 1)
|
||||
server.Stop()
|
||||
}
|
||||
|
||||
|
|
@ -112,7 +113,7 @@ func TestAzureSQLIntegration_ElasticPool_ResourceGovernance_Query(t *testing.T)
|
|||
require.True(t, acc.HasInt64Field("sqlserver_pool_resource_governance", "volume_type_pfs_iops"))
|
||||
|
||||
// This query should only return one row
|
||||
require.Equal(t, 1, len(acc.Metrics))
|
||||
require.Len(t, acc.Metrics, 1)
|
||||
server.Stop()
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -37,7 +37,7 @@ func TestSqlServer_QueriesInclusionExclusion(t *testing.T) {
|
|||
Log: testutil.Logger{},
|
||||
}
|
||||
require.NoError(t, s.initQueries())
|
||||
require.Equal(t, len(s.queries), test["queriesTotal"].(int))
|
||||
require.Len(t, s.queries, test["queriesTotal"].(int))
|
||||
for _, query := range test["queries"].([]string) {
|
||||
require.Contains(t, s.queries, query)
|
||||
}
|
||||
|
|
|
|||
|
|
@ -157,7 +157,7 @@ func TestInvalidJSON(t *testing.T) {
|
|||
}
|
||||
var acc testutil.Accumulator
|
||||
require.NoError(t, plugin.Gather(&acc))
|
||||
require.Equal(t, 1, len(acc.Errors))
|
||||
require.Len(t, acc.Errors, 1)
|
||||
}
|
||||
|
||||
func TestHttpError(t *testing.T) {
|
||||
|
|
@ -167,7 +167,7 @@ func TestHttpError(t *testing.T) {
|
|||
}
|
||||
var acc testutil.Accumulator
|
||||
require.NoError(t, plugin.Gather(&acc))
|
||||
require.Equal(t, 1, len(acc.Errors))
|
||||
require.Len(t, acc.Errors, 1)
|
||||
}
|
||||
|
||||
func TestTcpError(t *testing.T) {
|
||||
|
|
@ -176,7 +176,7 @@ func TestTcpError(t *testing.T) {
|
|||
}
|
||||
var acc testutil.Accumulator
|
||||
require.NoError(t, plugin.Gather(&acc))
|
||||
require.Equal(t, 1, len(acc.Errors))
|
||||
require.Len(t, acc.Errors, 1)
|
||||
}
|
||||
|
||||
func TestUnixSocketError(t *testing.T) {
|
||||
|
|
@ -185,5 +185,5 @@ func TestUnixSocketError(t *testing.T) {
|
|||
}
|
||||
var acc testutil.Accumulator
|
||||
require.NoError(t, plugin.Gather(&acc))
|
||||
require.Equal(t, 1, len(acc.Errors))
|
||||
require.Len(t, acc.Errors, 1)
|
||||
}
|
||||
|
|
|
|||
|
|
@ -48,7 +48,7 @@ func TestParseFullOutput(t *testing.T) {
|
|||
acc.HasMeasurement("varnish")
|
||||
flat := flatten(acc.Metrics)
|
||||
require.Len(t, acc.Metrics, 6)
|
||||
require.Equal(t, 293, len(flat))
|
||||
require.Len(t, flat, 293)
|
||||
}
|
||||
|
||||
func TestFilterSomeStats(t *testing.T) {
|
||||
|
|
@ -62,7 +62,7 @@ func TestFilterSomeStats(t *testing.T) {
|
|||
acc.HasMeasurement("varnish")
|
||||
flat := flatten(acc.Metrics)
|
||||
require.Len(t, acc.Metrics, 2)
|
||||
require.Equal(t, 16, len(flat))
|
||||
require.Len(t, flat, 16)
|
||||
}
|
||||
|
||||
func TestFieldConfig(t *testing.T) {
|
||||
|
|
@ -83,7 +83,7 @@ func TestFieldConfig(t *testing.T) {
|
|||
|
||||
acc.HasMeasurement("varnish")
|
||||
flat := flatten(acc.Metrics)
|
||||
require.Equal(t, expected, len(flat))
|
||||
require.Len(t, flat, expected)
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -555,7 +555,7 @@ func TestVersions(t *testing.T) {
|
|||
require.NoError(t, err)
|
||||
err = server.processMetricsV2(c.activeReloadPrefix, acc, bytes.NewBuffer(output))
|
||||
require.NoError(t, err)
|
||||
require.Equal(t, c.size, len(acc.Metrics))
|
||||
require.Len(t, acc.Metrics, c.size)
|
||||
for _, m := range acc.Metrics {
|
||||
require.NotEmpty(t, m.Fields)
|
||||
require.Equal(t, m.Measurement, "varnish")
|
||||
|
|
|
|||
|
|
@ -175,8 +175,8 @@ func testAlignUniform(t *testing.T, n int) {
|
|||
}
|
||||
e := Endpoint{log: testutil.Logger{}}
|
||||
newInfo, newValues := e.alignSamples(info, values, 60*time.Second)
|
||||
require.Equal(t, n/3, len(newInfo), "Aligned infos have wrong size")
|
||||
require.Equal(t, n/3, len(newValues), "Aligned values have wrong size")
|
||||
require.Len(t, newInfo, n/3, "Aligned infos have wrong size")
|
||||
require.Len(t, newValues, n/3, "Aligned values have wrong size")
|
||||
for _, v := range newValues {
|
||||
require.Equal(t, 1.0, v, "Aligned value should be 1")
|
||||
}
|
||||
|
|
@ -201,8 +201,8 @@ func TestAlignMetrics(t *testing.T) {
|
|||
}
|
||||
e := Endpoint{log: testutil.Logger{}}
|
||||
newInfo, newValues := e.alignSamples(info, values, 60*time.Second)
|
||||
require.Equal(t, n/3, len(newInfo), "Aligned infos have wrong size")
|
||||
require.Equal(t, n/3, len(newValues), "Aligned values have wrong size")
|
||||
require.Len(t, newInfo, n/3, "Aligned infos have wrong size")
|
||||
require.Len(t, newValues, n/3, "Aligned values have wrong size")
|
||||
for _, v := range newValues {
|
||||
require.Equal(t, 2.0, v, "Aligned value should be 2")
|
||||
}
|
||||
|
|
@ -251,7 +251,7 @@ func testLookupVM(ctx context.Context, t *testing.T, f *Finder, path string, exp
|
|||
var vm []mo.VirtualMachine
|
||||
err := f.Find(ctx, "VirtualMachine", path, &vm)
|
||||
require.NoError(t, err)
|
||||
require.Equal(t, expected, len(vm))
|
||||
require.Len(t, vm, expected)
|
||||
if expectedName != "" {
|
||||
require.Equal(t, expectedName, vm[0].Name)
|
||||
}
|
||||
|
|
@ -281,31 +281,31 @@ func TestFinder(t *testing.T) {
|
|||
var dc []mo.Datacenter
|
||||
err = f.Find(ctx, "Datacenter", "/DC0", &dc)
|
||||
require.NoError(t, err)
|
||||
require.Equal(t, 1, len(dc))
|
||||
require.Len(t, dc, 1)
|
||||
require.Equal(t, "DC0", dc[0].Name)
|
||||
|
||||
var host []mo.HostSystem
|
||||
err = f.Find(ctx, "HostSystem", "/DC0/host/DC0_H0/DC0_H0", &host)
|
||||
require.NoError(t, err)
|
||||
require.Equal(t, 1, len(host))
|
||||
require.Len(t, host, 1)
|
||||
require.Equal(t, "DC0_H0", host[0].Name)
|
||||
|
||||
host = []mo.HostSystem{}
|
||||
err = f.Find(ctx, "HostSystem", "/DC0/host/DC0_C0/DC0_C0_H0", &host)
|
||||
require.NoError(t, err)
|
||||
require.Equal(t, 1, len(host))
|
||||
require.Len(t, host, 1)
|
||||
require.Equal(t, "DC0_C0_H0", host[0].Name)
|
||||
|
||||
var resourcepool = []mo.ResourcePool{}
|
||||
err = f.Find(ctx, "ResourcePool", "/DC0/host/DC0_C0/Resources/DC0_C0_RP0", &resourcepool)
|
||||
require.NoError(t, err)
|
||||
require.Equal(t, 1, len(host))
|
||||
require.Len(t, host, 1)
|
||||
require.Equal(t, "DC0_C0_H0", host[0].Name)
|
||||
|
||||
host = []mo.HostSystem{}
|
||||
err = f.Find(ctx, "HostSystem", "/DC0/host/DC0_C0/*", &host)
|
||||
require.NoError(t, err)
|
||||
require.Equal(t, 3, len(host))
|
||||
require.Len(t, host, 3)
|
||||
|
||||
var vm []mo.VirtualMachine
|
||||
testLookupVM(ctx, t, &f, "/DC0/vm/DC0_H0_VM0", 1, "")
|
||||
|
|
@ -324,7 +324,7 @@ func TestFinder(t *testing.T) {
|
|||
vm = []mo.VirtualMachine{}
|
||||
err = f.FindAll(ctx, "VirtualMachine", []string{"/DC0/vm/DC0_H0*", "/DC0/vm/DC0_C0*"}, []string{}, &vm)
|
||||
require.NoError(t, err)
|
||||
require.Equal(t, 4, len(vm))
|
||||
require.Len(t, vm, 4)
|
||||
|
||||
rf := ResourceFilter{
|
||||
finder: &f,
|
||||
|
|
@ -334,7 +334,7 @@ func TestFinder(t *testing.T) {
|
|||
}
|
||||
vm = []mo.VirtualMachine{}
|
||||
require.NoError(t, rf.FindAll(ctx, &vm))
|
||||
require.Equal(t, 3, len(vm))
|
||||
require.Len(t, vm, 3)
|
||||
|
||||
rf = ResourceFilter{
|
||||
finder: &f,
|
||||
|
|
@ -364,7 +364,7 @@ func TestFinder(t *testing.T) {
|
|||
}
|
||||
vm = []mo.VirtualMachine{}
|
||||
require.NoError(t, rf.FindAll(ctx, &vm))
|
||||
require.Equal(t, 8, len(vm))
|
||||
require.Len(t, vm, 8)
|
||||
|
||||
rf = ResourceFilter{
|
||||
finder: &f,
|
||||
|
|
@ -374,7 +374,7 @@ func TestFinder(t *testing.T) {
|
|||
}
|
||||
vm = []mo.VirtualMachine{}
|
||||
require.NoError(t, rf.FindAll(ctx, &vm))
|
||||
require.Equal(t, 4, len(vm))
|
||||
require.Len(t, vm, 4)
|
||||
}
|
||||
|
||||
func TestFolders(t *testing.T) {
|
||||
|
|
@ -399,13 +399,13 @@ func TestFolders(t *testing.T) {
|
|||
var folder []mo.Folder
|
||||
err = f.Find(ctx, "Folder", "/F0", &folder)
|
||||
require.NoError(t, err)
|
||||
require.Equal(t, 1, len(folder))
|
||||
require.Len(t, folder, 1)
|
||||
require.Equal(t, "F0", folder[0].Name)
|
||||
|
||||
var dc []mo.Datacenter
|
||||
err = f.Find(ctx, "Datacenter", "/F0/DC1", &dc)
|
||||
require.NoError(t, err)
|
||||
require.Equal(t, 1, len(dc))
|
||||
require.Len(t, dc, 1)
|
||||
require.Equal(t, "DC1", dc[0].Name)
|
||||
|
||||
testLookupVM(ctx, t, &f, "/F0/DC0/vm/**/F*", 0, "")
|
||||
|
|
@ -448,11 +448,11 @@ func TestVsanTags(t *testing.T) {
|
|||
host: {UUID: host, Type: "HOSTNAME", Owner: host, Content: CmmdsContent{Hostname: hostname}},
|
||||
}
|
||||
tags := populateCMMDSTags(make(map[string]string), "capacity-disk", disk, cmmds)
|
||||
require.Equal(t, 2, len(tags))
|
||||
require.Len(t, tags, 2)
|
||||
tags = populateCMMDSTags(make(map[string]string), "cache-disk", ssdDisk, cmmds)
|
||||
require.Equal(t, 3, len(tags))
|
||||
require.Len(t, tags, 3)
|
||||
tags = populateCMMDSTags(make(map[string]string), "host-domclient", host, cmmds)
|
||||
require.Equal(t, 1, len(tags))
|
||||
require.Len(t, tags, 1)
|
||||
}
|
||||
|
||||
func TestCollectionNoClusterMetrics(t *testing.T) {
|
||||
|
|
|
|||
|
|
@ -431,8 +431,8 @@ func TestSourcesToURLs(t *testing.T) {
|
|||
for _, p := range m.locations {
|
||||
actual = append(actual, p.String())
|
||||
}
|
||||
require.Equal(t, len(m.globpaths), 5)
|
||||
require.Equal(t, len(m.locations), 3)
|
||||
require.Len(t, m.globpaths, 5)
|
||||
require.Len(t, m.locations, 3)
|
||||
require.ElementsMatch(t, expected, actual)
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -31,7 +31,7 @@ func TestBuildDimensions(t *testing.T) {
|
|||
sort.Strings(tagKeys)
|
||||
|
||||
if len(testPoint.Tags()) >= maxDimensions {
|
||||
require.Equal(t, maxDimensions, len(dimensions), "Number of dimensions should be less than MaxDimensions")
|
||||
require.Len(t, dimensions, maxDimensions, "Number of dimensions should be less than MaxDimensions")
|
||||
} else {
|
||||
require.Equal(t, len(testPoint.Tags()), len(dimensions), "Number of dimensions should be equal to number of tags")
|
||||
}
|
||||
|
|
@ -68,7 +68,7 @@ func TestBuildMetricDatums(t *testing.T) {
|
|||
}
|
||||
for _, point := range validMetrics {
|
||||
datums := BuildMetricDatum(false, false, point)
|
||||
require.Equal(t, 1, len(datums), fmt.Sprintf("Valid point should create a Datum {value: %v}", point))
|
||||
require.Len(t, datums, 1, fmt.Sprintf("Valid point should create a Datum {value: %v}", point))
|
||||
}
|
||||
for _, point := range invalidMetrics {
|
||||
datums := BuildMetricDatum(false, false, point)
|
||||
|
|
@ -82,7 +82,7 @@ func TestBuildMetricDatums(t *testing.T) {
|
|||
time.Date(2009, time.November, 10, 23, 0, 0, 0, time.UTC),
|
||||
)
|
||||
datums := BuildMetricDatum(true, false, statisticMetric)
|
||||
require.Equal(t, 1, len(datums), fmt.Sprintf("Valid point should create a Datum {value: %v}", statisticMetric))
|
||||
require.Len(t, datums, 1, fmt.Sprintf("Valid point should create a Datum {value: %v}", statisticMetric))
|
||||
|
||||
multiFieldsMetric := metric.New(
|
||||
"test1",
|
||||
|
|
@ -91,7 +91,7 @@ func TestBuildMetricDatums(t *testing.T) {
|
|||
time.Date(2009, time.November, 10, 23, 0, 0, 0, time.UTC),
|
||||
)
|
||||
datums = BuildMetricDatum(true, false, multiFieldsMetric)
|
||||
require.Equal(t, 4, len(datums), fmt.Sprintf("Each field should create a Datum {value: %v}", multiFieldsMetric))
|
||||
require.Len(t, datums, 4, fmt.Sprintf("Each field should create a Datum {value: %v}", multiFieldsMetric))
|
||||
|
||||
multiStatisticMetric := metric.New(
|
||||
"test1",
|
||||
|
|
@ -105,7 +105,7 @@ func TestBuildMetricDatums(t *testing.T) {
|
|||
time.Date(2009, time.November, 10, 23, 0, 0, 0, time.UTC),
|
||||
)
|
||||
datums = BuildMetricDatum(true, false, multiStatisticMetric)
|
||||
require.Equal(t, 7, len(datums), fmt.Sprintf("Valid point should create a Datum {value: %v}", multiStatisticMetric))
|
||||
require.Len(t, datums, 7, fmt.Sprintf("Valid point should create a Datum {value: %v}", multiStatisticMetric))
|
||||
}
|
||||
|
||||
func TestMetricDatumResolution(t *testing.T) {
|
||||
|
|
|
|||
|
|
@ -573,7 +573,7 @@ func TestWrite(t *testing.T) {
|
|||
mockCwl.Init(tt.logStreamName)
|
||||
plugin.svc = mockCwl
|
||||
require.NoError(t, plugin.Write(tt.metrics))
|
||||
require.Equal(t, tt.expectedMetricsCount, len(mockCwl.pushedLogEvents))
|
||||
require.Len(t, mockCwl.pushedLogEvents, tt.expectedMetricsCount)
|
||||
|
||||
for index, elem := range mockCwl.pushedLogEvents {
|
||||
require.Equal(t, *elem.Message, tt.metrics[tt.expectedMetricsOrder[index]].Fields()["message"])
|
||||
|
|
|
|||
|
|
@ -214,7 +214,7 @@ func TestSendSingleMetricWithUnorderedTags(t *testing.T) {
|
|||
require.NoError(t, err)
|
||||
bodyString := string(bodyBytes)
|
||||
// use regex because dimension order isn't guaranteed
|
||||
require.Equal(t, len(bodyString), 94)
|
||||
require.Len(t, bodyString, 94)
|
||||
require.Regexp(t, regexp.MustCompile(`^mymeasurement\.myfield`), bodyString)
|
||||
require.Regexp(t, regexp.MustCompile(`a=test`), bodyString)
|
||||
require.Regexp(t, regexp.MustCompile(`b=test`), bodyString)
|
||||
|
|
@ -302,7 +302,7 @@ func TestSendMetricWithUpperCaseTagKeys(t *testing.T) {
|
|||
bodyString := string(bodyBytes)
|
||||
|
||||
// use regex because dimension order isn't guaranteed
|
||||
require.Equal(t, len(bodyString), 100)
|
||||
require.Len(t, bodyString, 100)
|
||||
require.Regexp(t, regexp.MustCompile(`^mymeasurement\.myfield`), bodyString)
|
||||
require.Regexp(t, regexp.MustCompile(`aaa=test`), bodyString)
|
||||
require.Regexp(t, regexp.MustCompile(`b_b=test`), bodyString)
|
||||
|
|
@ -348,7 +348,7 @@ func TestSendBooleanMetricWithoutTags(t *testing.T) {
|
|||
require.NoError(t, err)
|
||||
bodyString := string(bodyBytes)
|
||||
// use regex because field order isn't guaranteed
|
||||
require.Equal(t, len(bodyString), 132)
|
||||
require.Len(t, bodyString, 132)
|
||||
require.Contains(t, bodyString, "mymeasurement.yes,dt.metrics.source=telegraf gauge,1 1289430000000")
|
||||
require.Contains(t, bodyString, "mymeasurement.no,dt.metrics.source=telegraf gauge,0 1289430000000")
|
||||
err = json.NewEncoder(w).Encode(`{"linesOk":1,"linesInvalid":0,"error":null}`)
|
||||
|
|
@ -389,7 +389,7 @@ func TestSendMetricWithDefaultDimensions(t *testing.T) {
|
|||
require.NoError(t, err)
|
||||
bodyString := string(bodyBytes)
|
||||
// use regex because field order isn't guaranteed
|
||||
require.Equal(t, len(bodyString), 78)
|
||||
require.Len(t, bodyString, 78)
|
||||
require.Regexp(t, regexp.MustCompile("^mymeasurement.value"), bodyString)
|
||||
require.Regexp(t, regexp.MustCompile("dt.metrics.source=telegraf"), bodyString)
|
||||
require.Regexp(t, regexp.MustCompile("dim=value"), bodyString)
|
||||
|
|
@ -432,7 +432,7 @@ func TestMetricDimensionsOverrideDefault(t *testing.T) {
|
|||
require.NoError(t, err)
|
||||
bodyString := string(bodyBytes)
|
||||
// use regex because field order isn't guaranteed
|
||||
require.Equal(t, len(bodyString), 80)
|
||||
require.Len(t, bodyString, 80)
|
||||
require.Regexp(t, regexp.MustCompile("^mymeasurement.value"), bodyString)
|
||||
require.Regexp(t, regexp.MustCompile("dt.metrics.source=telegraf"), bodyString)
|
||||
require.Regexp(t, regexp.MustCompile("dim=metric"), bodyString)
|
||||
|
|
@ -475,7 +475,7 @@ func TestStaticDimensionsOverrideMetric(t *testing.T) {
|
|||
require.NoError(t, err)
|
||||
bodyString := string(bodyBytes)
|
||||
// use regex because field order isn't guaranteed
|
||||
require.Equal(t, len(bodyString), 53)
|
||||
require.Len(t, bodyString, 53)
|
||||
require.Regexp(t, regexp.MustCompile("^mymeasurement.value"), bodyString)
|
||||
require.Regexp(t, regexp.MustCompile("dim=static"), bodyString)
|
||||
require.Regexp(t, regexp.MustCompile("gauge,32 1289430000000$"), bodyString)
|
||||
|
|
|
|||
|
|
@ -9,11 +9,12 @@ import (
|
|||
"time"
|
||||
|
||||
eventhub "github.com/Azure/azure-event-hubs-go/v3"
|
||||
"github.com/stretchr/testify/mock"
|
||||
"github.com/stretchr/testify/require"
|
||||
|
||||
"github.com/influxdata/telegraf/config"
|
||||
"github.com/influxdata/telegraf/plugins/serializers/json"
|
||||
"github.com/influxdata/telegraf/testutil"
|
||||
"github.com/stretchr/testify/mock"
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
/*
|
||||
|
|
@ -157,5 +158,5 @@ wait:
|
|||
}
|
||||
|
||||
// Make sure received == sent
|
||||
require.Equal(t, received, len(metrics))
|
||||
require.Len(t, metrics, received)
|
||||
}
|
||||
|
|
|
|||
|
|
@ -171,7 +171,7 @@ func TestTruncate(t *testing.T) {
|
|||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
s := c.truncate(*tt.buf)
|
||||
require.Equal(t, tt.len, len(s))
|
||||
require.Len(t, s, tt.len)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -166,7 +166,7 @@ func TestRoutingKey(t *testing.T) {
|
|||
return m
|
||||
}(),
|
||||
check: func(t *testing.T, routingKey string) {
|
||||
require.Equal(t, 36, len(routingKey))
|
||||
require.Len(t, routingKey, 36)
|
||||
},
|
||||
},
|
||||
}
|
||||
|
|
|
|||
|
|
@ -662,7 +662,7 @@ func TestGetStackdriverLabels(t *testing.T) {
|
|||
}
|
||||
|
||||
labels := s.getStackdriverLabels(tags)
|
||||
require.Equal(t, QuotaLabelsPerMetricDescriptor, len(labels))
|
||||
require.Len(t, labels, QuotaLabelsPerMetricDescriptor)
|
||||
}
|
||||
|
||||
func TestGetStackdriverIntervalEndpoints(t *testing.T) {
|
||||
|
|
|
|||
|
|
@ -262,7 +262,7 @@ func TestWriteMultiMeasuresSingleTableMode(t *testing.T) {
|
|||
// validate multi-record generation
|
||||
result := plugin.TransformMetrics(inputs)
|
||||
// 'inputs' has a total of 101 metrics transformed to 2 writeRecord calls to TS
|
||||
require.Equal(t, 2, len(result), "Expected 2 WriteRecordsInput requests")
|
||||
require.Len(t, result, 2, "Expected 2 WriteRecordsInput requests")
|
||||
|
||||
var transformedRecords []types.Record
|
||||
for _, r := range result {
|
||||
|
|
@ -271,7 +271,7 @@ func TestWriteMultiMeasuresSingleTableMode(t *testing.T) {
|
|||
require.Equal(t, *r.Records[0].MeasureName, "multi_measure_name")
|
||||
}
|
||||
// Expected 101 records
|
||||
require.Equal(t, recordCount+1, len(transformedRecords), "Expected 101 records after transforming")
|
||||
require.Len(t, transformedRecords, recordCount+1, "Expected 101 records after transforming")
|
||||
// validate write to TS
|
||||
err := plugin.Write(inputs)
|
||||
require.NoError(t, err, "Write to Timestream failed")
|
||||
|
|
@ -320,7 +320,7 @@ func TestWriteMultiMeasuresMultiTableMode(t *testing.T) {
|
|||
// validate multi-record generation
|
||||
result := plugin.TransformMetrics(inputs)
|
||||
// 'inputs' has a total of 101 metrics transformed to 2 writeRecord calls to TS
|
||||
require.Equal(t, 1, len(result), "Expected 1 WriteRecordsInput requests")
|
||||
require.Len(t, result, 1, "Expected 1 WriteRecordsInput requests")
|
||||
|
||||
// Assert that we use measure name from config
|
||||
require.Equal(t, *result[0].Records[0].MeasureName, "config-multi-measure-name")
|
||||
|
|
@ -330,7 +330,7 @@ func TestWriteMultiMeasuresMultiTableMode(t *testing.T) {
|
|||
transformedRecords = append(transformedRecords, r.Records...)
|
||||
}
|
||||
// Expected 100 records
|
||||
require.Equal(t, recordCount, len(transformedRecords), "Expected 100 records after transforming")
|
||||
require.Len(t, transformedRecords, recordCount, "Expected 100 records after transforming")
|
||||
|
||||
for _, input := range inputs {
|
||||
fmt.Println("Input", input)
|
||||
|
|
@ -397,7 +397,7 @@ func TestBuildMultiMeasuresInSingleAndMultiTableMode(t *testing.T) {
|
|||
|
||||
// validate multi-record generation with MappingModeMultiTable
|
||||
result := plugin.TransformMetrics([]telegraf.Metric{input1, input2, input3, input4})
|
||||
require.Equal(t, 1, len(result), "Expected 1 WriteRecordsInput requests")
|
||||
require.Len(t, result, 1, "Expected 1 WriteRecordsInput requests")
|
||||
|
||||
require.EqualValues(t, result[0], expectedResultMultiTable)
|
||||
|
||||
|
|
@ -422,7 +422,7 @@ func TestBuildMultiMeasuresInSingleAndMultiTableMode(t *testing.T) {
|
|||
|
||||
// validate multi-record generation with MappingModeSingleTable
|
||||
result = plugin.TransformMetrics([]telegraf.Metric{input1, input2, input3, input4})
|
||||
require.Equal(t, 1, len(result), "Expected 1 WriteRecordsInput requests")
|
||||
require.Len(t, result, 1, "Expected 1 WriteRecordsInput requests")
|
||||
|
||||
require.EqualValues(t, result[0], expectedResultSingleTable)
|
||||
|
||||
|
|
|
|||
|
|
@ -408,7 +408,7 @@ func TestMakeAuthOptions(t *testing.T) {
|
|||
cspAPIWavefront.AuthCSPAPIToken = config.NewSecret([]byte("fake-app-token"))
|
||||
options, err := cspAPIWavefront.makeAuthOptions()
|
||||
require.NoError(t, err)
|
||||
require.Equal(t, 1, len(options))
|
||||
require.Len(t, options, 1)
|
||||
|
||||
cspClientCredsWavefront := outputs.Outputs["wavefront"]().(*Wavefront)
|
||||
cspClientCredsWavefront.AuthCSPClientCredentials = &authCSPClientCredentials{
|
||||
|
|
@ -417,7 +417,7 @@ func TestMakeAuthOptions(t *testing.T) {
|
|||
}
|
||||
options, err = cspClientCredsWavefront.makeAuthOptions()
|
||||
require.NoError(t, err)
|
||||
require.Equal(t, 1, len(options))
|
||||
require.Len(t, options, 1)
|
||||
|
||||
orgID := "org-id"
|
||||
cspClientCredsWithOrgIDWavefront := outputs.Outputs["wavefront"]().(*Wavefront)
|
||||
|
|
@ -428,13 +428,13 @@ func TestMakeAuthOptions(t *testing.T) {
|
|||
}
|
||||
options, err = cspClientCredsWithOrgIDWavefront.makeAuthOptions()
|
||||
require.NoError(t, err)
|
||||
require.Equal(t, 1, len(options))
|
||||
require.Len(t, options, 1)
|
||||
|
||||
apiTokenWavefront := outputs.Outputs["wavefront"]().(*Wavefront)
|
||||
apiTokenWavefront.AuthCSPAPIToken = config.NewSecret([]byte("fake-wavefront-api-token"))
|
||||
options, err = apiTokenWavefront.makeAuthOptions()
|
||||
require.NoError(t, err)
|
||||
require.Equal(t, 1, len(options))
|
||||
require.Len(t, options, 1)
|
||||
|
||||
noAuthOptionsWavefront := outputs.Outputs["wavefront"]().(*Wavefront)
|
||||
options, err = noAuthOptionsWavefront.makeAuthOptions()
|
||||
|
|
|
|||
|
|
@ -146,7 +146,7 @@ func TestParseMultiValueSplit(t *testing.T) {
|
|||
metrics, err := parser.Parse(bytes)
|
||||
require.NoError(t, err)
|
||||
|
||||
require.Equal(t, 2, len(metrics))
|
||||
require.Len(t, metrics, 2)
|
||||
}
|
||||
|
||||
func TestParseMultiValueJoin(t *testing.T) {
|
||||
|
|
@ -160,7 +160,7 @@ func TestParseMultiValueJoin(t *testing.T) {
|
|||
metrics, err := parser.Parse(bytes)
|
||||
require.NoError(t, err)
|
||||
|
||||
require.Equal(t, 1, len(metrics))
|
||||
require.Len(t, metrics, 1)
|
||||
}
|
||||
|
||||
func TestParse_DefaultTags(t *testing.T) {
|
||||
|
|
|
|||
|
|
@ -580,7 +580,7 @@ func TestJSONParseNestedArray(t *testing.T) {
|
|||
actual, err := parser.Parse([]byte(testString))
|
||||
require.Len(t, actual, 1)
|
||||
require.NoError(t, err)
|
||||
require.Equal(t, 3, len(actual[0].Tags()))
|
||||
require.Len(t, actual[0].Tags(), 3)
|
||||
}
|
||||
|
||||
func TestJSONQueryErrorOnArray(t *testing.T) {
|
||||
|
|
@ -643,7 +643,7 @@ func TestArrayOfObjects(t *testing.T) {
|
|||
|
||||
actual, err := parser.Parse([]byte(testString))
|
||||
require.NoError(t, err)
|
||||
require.Equal(t, 3, len(actual))
|
||||
require.Len(t, actual, 3)
|
||||
}
|
||||
|
||||
func TestUseCaseJSONQuery(t *testing.T) {
|
||||
|
|
@ -671,7 +671,7 @@ func TestUseCaseJSONQuery(t *testing.T) {
|
|||
|
||||
actual, err := parser.Parse([]byte(testString))
|
||||
require.NoError(t, err)
|
||||
require.Equal(t, 3, len(actual))
|
||||
require.Len(t, actual, 3)
|
||||
require.Equal(t, actual[0].Fields()["last"], "Murphy")
|
||||
}
|
||||
|
||||
|
|
@ -706,7 +706,7 @@ func TestTimeParser(t *testing.T) {
|
|||
|
||||
actual, err := parser.Parse([]byte(testString))
|
||||
require.NoError(t, err)
|
||||
require.Equal(t, 2, len(actual))
|
||||
require.Len(t, actual, 2)
|
||||
require.NotEqual(t, actual[0].Time(), actual[1].Time())
|
||||
}
|
||||
|
||||
|
|
@ -725,7 +725,7 @@ func TestTimeParserWithTimezone(t *testing.T) {
|
|||
|
||||
actual, err := parser.Parse([]byte(testString))
|
||||
require.NoError(t, err)
|
||||
require.Equal(t, 1, len(actual))
|
||||
require.Len(t, actual, 1)
|
||||
require.EqualValues(t, int64(1136405040000000000), actual[0].Time().UnixNano())
|
||||
}
|
||||
|
||||
|
|
@ -760,7 +760,7 @@ func TestUnixTimeParser(t *testing.T) {
|
|||
|
||||
actual, err := parser.Parse([]byte(testString))
|
||||
require.NoError(t, err)
|
||||
require.Equal(t, 2, len(actual))
|
||||
require.Len(t, actual, 2)
|
||||
require.NotEqual(t, actual[0].Time(), actual[1].Time())
|
||||
}
|
||||
|
||||
|
|
@ -795,7 +795,7 @@ func TestUnixMsTimeParser(t *testing.T) {
|
|||
|
||||
actual, err := parser.Parse([]byte(testString))
|
||||
require.NoError(t, err)
|
||||
require.Equal(t, 2, len(actual))
|
||||
require.Len(t, actual, 2)
|
||||
require.NotEqual(t, actual[0].Time(), actual[1].Time())
|
||||
}
|
||||
|
||||
|
|
@ -849,7 +849,7 @@ func TestShareTimestamp(t *testing.T) {
|
|||
|
||||
actual, err := parser.Parse([]byte(validJSONArrayMultiple))
|
||||
require.NoError(t, err)
|
||||
require.Equal(t, 2, len(actual))
|
||||
require.Len(t, actual, 2)
|
||||
require.Equal(t, actual[0].Time(), actual[1].Time())
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -42,7 +42,7 @@ func TestAddTags(t *testing.T) {
|
|||
value, present := tags["added_tag"]
|
||||
require.True(t, present, "Additional Tag of metric was not present")
|
||||
require.Equal(t, "from_config", value, "Value of Tag was changed")
|
||||
require.Equal(t, 3, len(tags), "Should have one previous and two added tags.")
|
||||
require.Len(t, tags, 3, "Should have one previous and two added tags.")
|
||||
}
|
||||
|
||||
func TestOverwritesPresentTagValues(t *testing.T) {
|
||||
|
|
@ -52,7 +52,7 @@ func TestOverwritesPresentTagValues(t *testing.T) {
|
|||
|
||||
value, present := tags["metric_tag"]
|
||||
require.True(t, present, "Tag of metric was not present")
|
||||
require.Equal(t, 1, len(tags), "Should only have one tag.")
|
||||
require.Len(t, tags, 1, "Should only have one tag.")
|
||||
require.Equal(t, "from_config", value, "Value of Tag was not changed")
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -42,7 +42,7 @@ func TestAddTags(t *testing.T) {
|
|||
value, present := tags["added_tag"]
|
||||
require.True(t, present, "Additional Tag of metric was not present")
|
||||
require.Equal(t, "from_config", value, "Value of Tag was changed")
|
||||
require.Equal(t, 3, len(tags), "Should have one previous and two added tags.")
|
||||
require.Len(t, tags, 3, "Should have one previous and two added tags.")
|
||||
}
|
||||
|
||||
func TestOverwritesPresentTagValues(t *testing.T) {
|
||||
|
|
@ -52,7 +52,7 @@ func TestOverwritesPresentTagValues(t *testing.T) {
|
|||
|
||||
value, present := tags["metric_tag"]
|
||||
require.True(t, present, "Tag of metric was not present")
|
||||
require.Equal(t, 1, len(tags), "Should only have one tag.")
|
||||
require.Len(t, tags, 1, "Should only have one tag.")
|
||||
require.Equal(t, "from_config", value, "Value of Tag was not changed")
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -81,7 +81,7 @@ func TestTrim(t *testing.T) {
|
|||
limitApply := tagLimitConfig.Apply(m1, m2)
|
||||
require.Equal(t, threeTags, limitApply[0].Tags(), "three tags")
|
||||
trimmedTags := limitApply[1].Tags()
|
||||
require.Equal(t, 3, len(trimmedTags), "ten tags")
|
||||
require.Len(t, trimmedTags, 3, "ten tags")
|
||||
require.Equal(t, "foo", trimmedTags["a"], "preserved: a")
|
||||
require.Equal(t, "bar", trimmedTags["b"], "preserved: b")
|
||||
}
|
||||
|
|
|
|||
|
|
@ -131,7 +131,7 @@ func TestMetricMissingTagsIsNotLost(t *testing.T) {
|
|||
|
||||
// assert
|
||||
// make sure no metrics are lost when a template process fails
|
||||
require.Equal(t, 2, len(actual), "Number of metrics input should equal number of metrics output")
|
||||
require.Len(t, actual, 2, "Number of metrics input should equal number of metrics output")
|
||||
}
|
||||
|
||||
func TestTagAndFieldConcatenate(t *testing.T) {
|
||||
|
|
|
|||
|
|
@ -6,9 +6,10 @@ import (
|
|||
"testing"
|
||||
"time"
|
||||
|
||||
"github.com/stretchr/testify/require"
|
||||
|
||||
"github.com/influxdata/telegraf"
|
||||
"github.com/influxdata/telegraf/metric"
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
func TestReader(t *testing.T) {
|
||||
|
|
@ -148,7 +149,7 @@ func TestReader(t *testing.T) {
|
|||
require.NoError(t, err)
|
||||
}
|
||||
require.Equal(t, tt.expected, data.Bytes())
|
||||
require.Equal(t, len(tt.expected), total)
|
||||
require.Len(t, tt.expected, total)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
|
|
|||
Loading…
Reference in New Issue