chore: Fix linter findings for `revive:enforce-slice-style` in `plugins/inputs/[e-j]*` (#16088)
This commit is contained in:
parent
7c0fe8a3e6
commit
f8b2b0a914
|
|
@ -331,7 +331,6 @@ var testEsAggregationData = []esAggregationQueryTest{
|
||||||
FilterQuery: "response: 200",
|
FilterQuery: "response: 200",
|
||||||
DateField: "@timestamp",
|
DateField: "@timestamp",
|
||||||
QueryPeriod: queryPeriod,
|
QueryPeriod: queryPeriod,
|
||||||
Tags: []string{},
|
|
||||||
mapMetricFields: map[string]string{},
|
mapMetricFields: map[string]string{},
|
||||||
},
|
},
|
||||||
nil,
|
nil,
|
||||||
|
|
@ -357,7 +356,6 @@ var testEsAggregationData = []esAggregationQueryTest{
|
||||||
MetricFunction: "max",
|
MetricFunction: "max",
|
||||||
DateField: "@timestamp",
|
DateField: "@timestamp",
|
||||||
QueryPeriod: queryPeriod,
|
QueryPeriod: queryPeriod,
|
||||||
Tags: []string{},
|
|
||||||
mapMetricFields: map[string]string{"size": "long"},
|
mapMetricFields: map[string]string{"size": "long"},
|
||||||
},
|
},
|
||||||
[]aggregationQueryData{
|
[]aggregationQueryData{
|
||||||
|
|
@ -388,7 +386,6 @@ var testEsAggregationData = []esAggregationQueryTest{
|
||||||
MetricFunction: "average",
|
MetricFunction: "average",
|
||||||
DateField: "@timestamp",
|
DateField: "@timestamp",
|
||||||
QueryPeriod: queryPeriod,
|
QueryPeriod: queryPeriod,
|
||||||
Tags: []string{},
|
|
||||||
mapMetricFields: map[string]string{"size": "long"},
|
mapMetricFields: map[string]string{"size": "long"},
|
||||||
},
|
},
|
||||||
nil,
|
nil,
|
||||||
|
|
@ -405,7 +402,6 @@ var testEsAggregationData = []esAggregationQueryTest{
|
||||||
MetricFields: []string{"none"},
|
MetricFields: []string{"none"},
|
||||||
DateField: "@timestamp",
|
DateField: "@timestamp",
|
||||||
QueryPeriod: queryPeriod,
|
QueryPeriod: queryPeriod,
|
||||||
Tags: []string{},
|
|
||||||
mapMetricFields: map[string]string{},
|
mapMetricFields: map[string]string{},
|
||||||
},
|
},
|
||||||
nil,
|
nil,
|
||||||
|
|
@ -421,7 +417,6 @@ var testEsAggregationData = []esAggregationQueryTest{
|
||||||
MeasurementName: "measurement11",
|
MeasurementName: "measurement11",
|
||||||
DateField: "@timestamp",
|
DateField: "@timestamp",
|
||||||
QueryPeriod: queryPeriod,
|
QueryPeriod: queryPeriod,
|
||||||
Tags: []string{},
|
|
||||||
mapMetricFields: map[string]string{},
|
mapMetricFields: map[string]string{},
|
||||||
},
|
},
|
||||||
nil,
|
nil,
|
||||||
|
|
@ -439,7 +434,6 @@ var testEsAggregationData = []esAggregationQueryTest{
|
||||||
MetricFunction: "avg",
|
MetricFunction: "avg",
|
||||||
DateField: "@notatimestamp",
|
DateField: "@notatimestamp",
|
||||||
QueryPeriod: queryPeriod,
|
QueryPeriod: queryPeriod,
|
||||||
Tags: []string{},
|
|
||||||
mapMetricFields: map[string]string{"size": "long"},
|
mapMetricFields: map[string]string{"size": "long"},
|
||||||
},
|
},
|
||||||
[]aggregationQueryData{
|
[]aggregationQueryData{
|
||||||
|
|
@ -496,7 +490,6 @@ var testEsAggregationData = []esAggregationQueryTest{
|
||||||
DateField: "@timestamp",
|
DateField: "@timestamp",
|
||||||
DateFieldFormat: "yyyy",
|
DateFieldFormat: "yyyy",
|
||||||
QueryPeriod: queryPeriod,
|
QueryPeriod: queryPeriod,
|
||||||
Tags: []string{},
|
|
||||||
mapMetricFields: map[string]string{},
|
mapMetricFields: map[string]string{},
|
||||||
},
|
},
|
||||||
nil,
|
nil,
|
||||||
|
|
|
||||||
|
|
@ -360,11 +360,7 @@ func (c *commandEthtool) interfaces(includeNamespaces bool) ([]namespacedInterfa
|
||||||
func init() {
|
func init() {
|
||||||
inputs.Add(pluginName, func() telegraf.Input {
|
inputs.Add(pluginName, func() telegraf.Input {
|
||||||
return &Ethtool{
|
return &Ethtool{
|
||||||
InterfaceInclude: []string{},
|
command: newCommandEthtool(),
|
||||||
InterfaceExclude: []string{},
|
|
||||||
NamespaceInclude: []string{},
|
|
||||||
NamespaceExclude: []string{},
|
|
||||||
command: newCommandEthtool(),
|
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -580,10 +580,8 @@ func setup() {
|
||||||
|
|
||||||
c := &commandEthtoolMock{interfaceMap}
|
c := &commandEthtoolMock{interfaceMap}
|
||||||
eth = &Ethtool{
|
eth = &Ethtool{
|
||||||
InterfaceInclude: []string{},
|
DownInterfaces: "expose",
|
||||||
InterfaceExclude: []string{},
|
command: c,
|
||||||
DownInterfaces: "expose",
|
|
||||||
command: c,
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -932,7 +930,6 @@ func TestNormalizedKeys(t *testing.T) {
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
normalization: []string{},
|
|
||||||
stats: map[string]interface{}{
|
stats: map[string]interface{}{
|
||||||
" Port RX ": uint64(1),
|
" Port RX ": uint64(1),
|
||||||
" Port_tx": uint64(0),
|
" Port_tx": uint64(0),
|
||||||
|
|
@ -946,7 +943,6 @@ func TestNormalizedKeys(t *testing.T) {
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
normalization: []string{},
|
|
||||||
stats: map[string]interface{}{
|
stats: map[string]interface{}{
|
||||||
" Port RX ": uint64(1),
|
" Port RX ": uint64(1),
|
||||||
" Port_tx": uint64(0),
|
" Port_tx": uint64(0),
|
||||||
|
|
@ -972,10 +968,8 @@ func TestNormalizedKeys(t *testing.T) {
|
||||||
|
|
||||||
cmd := &commandEthtoolMock{interfaceMap}
|
cmd := &commandEthtoolMock{interfaceMap}
|
||||||
eth = &Ethtool{
|
eth = &Ethtool{
|
||||||
InterfaceInclude: []string{},
|
NormalizeKeys: c.normalization,
|
||||||
InterfaceExclude: []string{},
|
command: cmd,
|
||||||
NormalizeKeys: c.normalization,
|
|
||||||
command: cmd,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
err := eth.Init()
|
err := eth.Init()
|
||||||
|
|
|
||||||
|
|
@ -81,7 +81,7 @@ func (e *EventHub) Init() (err error) {
|
||||||
}
|
}
|
||||||
|
|
||||||
// Set hub options
|
// Set hub options
|
||||||
hubOpts := []eventhub.HubOption{}
|
hubOpts := make([]eventhub.HubOption, 0, 2)
|
||||||
|
|
||||||
if e.PersistenceDir != "" {
|
if e.PersistenceDir != "" {
|
||||||
persister, err := persist.NewFilePersister(e.PersistenceDir)
|
persister, err := persist.NewFilePersister(e.PersistenceDir)
|
||||||
|
|
@ -162,7 +162,7 @@ func (e *EventHub) Stop() {
|
||||||
}
|
}
|
||||||
|
|
||||||
func (e *EventHub) configureReceiver() []eventhub.ReceiveOption {
|
func (e *EventHub) configureReceiver() []eventhub.ReceiveOption {
|
||||||
receiveOpts := []eventhub.ReceiveOption{}
|
receiveOpts := make([]eventhub.ReceiveOption, 0, 4)
|
||||||
|
|
||||||
if e.ConsumerGroup != "" {
|
if e.ConsumerGroup != "" {
|
||||||
receiveOpts = append(receiveOpts, eventhub.ReceiveWithConsumerGroup(e.ConsumerGroup))
|
receiveOpts = append(receiveOpts, eventhub.ReceiveWithConsumerGroup(e.ConsumerGroup))
|
||||||
|
|
|
||||||
|
|
@ -304,8 +304,7 @@ func getEnv(key string) string {
|
||||||
}
|
}
|
||||||
|
|
||||||
func loadConfigIntoInputs(md toml.MetaData, inputConfigs map[string][]toml.Primitive) ([]telegraf.Input, error) {
|
func loadConfigIntoInputs(md toml.MetaData, inputConfigs map[string][]toml.Primitive) ([]telegraf.Input, error) {
|
||||||
renderedInputs := []telegraf.Input{}
|
renderedInputs := make([]telegraf.Input, 0, len(inputConfigs))
|
||||||
|
|
||||||
for name, primitives := range inputConfigs {
|
for name, primitives := range inputConfigs {
|
||||||
inputCreator, ok := inputs.Inputs[name]
|
inputCreator, ok := inputs.Inputs[name]
|
||||||
if !ok {
|
if !ok {
|
||||||
|
|
|
||||||
|
|
@ -285,8 +285,9 @@ func (fc *FileCount) getDirs() []string {
|
||||||
}
|
}
|
||||||
|
|
||||||
func (fc *FileCount) initGlobPaths(acc telegraf.Accumulator) {
|
func (fc *FileCount) initGlobPaths(acc telegraf.Accumulator) {
|
||||||
fc.globPaths = []globpath.GlobPath{}
|
dirs := fc.getDirs()
|
||||||
for _, directory := range fc.getDirs() {
|
fc.globPaths = make([]globpath.GlobPath, 0, len(dirs))
|
||||||
|
for _, directory := range dirs {
|
||||||
glob, err := globpath.Compile(directory)
|
glob, err := globpath.Compile(directory)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
acc.AddError(err)
|
acc.AddError(err)
|
||||||
|
|
@ -299,7 +300,6 @@ func (fc *FileCount) initGlobPaths(acc telegraf.Accumulator) {
|
||||||
func newFileCount() *FileCount {
|
func newFileCount() *FileCount {
|
||||||
return &FileCount{
|
return &FileCount{
|
||||||
Directory: "",
|
Directory: "",
|
||||||
Directories: []string{},
|
|
||||||
Name: "*",
|
Name: "*",
|
||||||
Recursive: true,
|
Recursive: true,
|
||||||
RegularOnly: true,
|
RegularOnly: true,
|
||||||
|
|
|
||||||
|
|
@ -330,9 +330,10 @@ func guessPrefixFromUpdate(fields []updateField) string {
|
||||||
if len(fields) == 1 {
|
if len(fields) == 1 {
|
||||||
return fields[0].path.dir()
|
return fields[0].path.dir()
|
||||||
}
|
}
|
||||||
|
segments := make([]segment, 0, len(fields[0].path.segments))
|
||||||
commonPath := &pathInfo{
|
commonPath := &pathInfo{
|
||||||
origin: fields[0].path.origin,
|
origin: fields[0].path.origin,
|
||||||
segments: append([]segment{}, fields[0].path.segments...),
|
segments: append(segments, fields[0].path.segments...),
|
||||||
}
|
}
|
||||||
for _, f := range fields[1:] {
|
for _, f := range fields[1:] {
|
||||||
commonPath.keepCommonPart(f.path)
|
commonPath.keepCommonPart(f.path)
|
||||||
|
|
|
||||||
|
|
@ -103,10 +103,11 @@ func (pi *pathInfo) empty() bool {
|
||||||
|
|
||||||
func (pi *pathInfo) append(paths ...*gnmi.Path) *pathInfo {
|
func (pi *pathInfo) append(paths ...*gnmi.Path) *pathInfo {
|
||||||
// Copy the existing info
|
// Copy the existing info
|
||||||
|
segments := make([]segment, 0, len(pi.segments))
|
||||||
path := &pathInfo{
|
path := &pathInfo{
|
||||||
origin: pi.origin,
|
origin: pi.origin,
|
||||||
target: pi.target,
|
target: pi.target,
|
||||||
segments: append([]segment{}, pi.segments...),
|
segments: append(segments, pi.segments...),
|
||||||
keyValues: make([]keySegment, 0, len(pi.keyValues)),
|
keyValues: make([]keySegment, 0, len(pi.keyValues)),
|
||||||
}
|
}
|
||||||
for _, elem := range pi.keyValues {
|
for _, elem := range pi.keyValues {
|
||||||
|
|
@ -150,10 +151,11 @@ func (pi *pathInfo) append(paths ...*gnmi.Path) *pathInfo {
|
||||||
|
|
||||||
func (pi *pathInfo) appendSegments(segments ...string) *pathInfo {
|
func (pi *pathInfo) appendSegments(segments ...string) *pathInfo {
|
||||||
// Copy the existing info
|
// Copy the existing info
|
||||||
|
seg := make([]segment, 0, len(segments))
|
||||||
path := &pathInfo{
|
path := &pathInfo{
|
||||||
origin: pi.origin,
|
origin: pi.origin,
|
||||||
target: pi.target,
|
target: pi.target,
|
||||||
segments: append([]segment{}, pi.segments...),
|
segments: append(seg, pi.segments...),
|
||||||
keyValues: make([]keySegment, 0, len(pi.keyValues)),
|
keyValues: make([]keySegment, 0, len(pi.keyValues)),
|
||||||
}
|
}
|
||||||
for _, elem := range pi.keyValues {
|
for _, elem := range pi.keyValues {
|
||||||
|
|
|
||||||
|
|
@ -315,7 +315,7 @@ func stateFullGCSServer(t *testing.T) *httptest.Server {
|
||||||
} else if pageToken == "page4" {
|
} else if pageToken == "page4" {
|
||||||
objListing["items"] = []interface{}{fourthElement}
|
objListing["items"] = []interface{}{fourthElement}
|
||||||
} else if offset == "prefix/1604148850994" {
|
} else if offset == "prefix/1604148850994" {
|
||||||
objListing["items"] = []interface{}{}
|
objListing["items"] = make([]interface{}, 0)
|
||||||
} else {
|
} else {
|
||||||
objListing["items"] = []interface{}{firstElement}
|
objListing["items"] = []interface{}{firstElement}
|
||||||
objListing["nextPageToken"] = "page2"
|
objListing["nextPageToken"] = "page2"
|
||||||
|
|
|
||||||
|
|
@ -25,7 +25,7 @@ func TestInit(t *testing.T) {
|
||||||
})
|
})
|
||||||
|
|
||||||
t.Run("when empty hugepages types is provided then plugin should fail to initialize", func(t *testing.T) {
|
t.Run("when empty hugepages types is provided then plugin should fail to initialize", func(t *testing.T) {
|
||||||
h := Hugepages{Types: []string{}}
|
h := Hugepages{Types: make([]string, 0)}
|
||||||
err := h.Init()
|
err := h.Init()
|
||||||
|
|
||||||
require.Error(t, err)
|
require.Error(t, err)
|
||||||
|
|
|
||||||
|
|
@ -30,7 +30,6 @@ func TestIcinga2Default(t *testing.T) {
|
||||||
func TestIcinga2DeprecatedHostConfig(t *testing.T) {
|
func TestIcinga2DeprecatedHostConfig(t *testing.T) {
|
||||||
icinga2 := &Icinga2{
|
icinga2 := &Icinga2{
|
||||||
ObjectType: "hosts", // deprecated
|
ObjectType: "hosts", // deprecated
|
||||||
Objects: []string{},
|
|
||||||
}
|
}
|
||||||
require.NoError(t, icinga2.Init())
|
require.NoError(t, icinga2.Init())
|
||||||
|
|
||||||
|
|
@ -40,7 +39,6 @@ func TestIcinga2DeprecatedHostConfig(t *testing.T) {
|
||||||
func TestIcinga2DeprecatedServicesConfig(t *testing.T) {
|
func TestIcinga2DeprecatedServicesConfig(t *testing.T) {
|
||||||
icinga2 := &Icinga2{
|
icinga2 := &Icinga2{
|
||||||
ObjectType: "services", // deprecated
|
ObjectType: "services", // deprecated
|
||||||
Objects: []string{},
|
|
||||||
}
|
}
|
||||||
require.NoError(t, icinga2.Init())
|
require.NoError(t, icinga2.Init())
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -149,7 +149,7 @@ func TestDLB_writeReadSocketMessage(t *testing.T) {
|
||||||
connection: mockConn,
|
connection: mockConn,
|
||||||
Log: testutil.Logger{},
|
Log: testutil.Logger{},
|
||||||
}
|
}
|
||||||
mockConn.On("Write", []byte{}).Return(0, errors.New("write error")).Once().
|
mockConn.On("Write", make([]byte, 0)).Return(0, errors.New("write error")).Once().
|
||||||
On("Close").Return(nil).Once()
|
On("Close").Return(nil).Once()
|
||||||
|
|
||||||
_, _, err := dlb.writeReadSocketMessage("")
|
_, _, err := dlb.writeReadSocketMessage("")
|
||||||
|
|
@ -180,7 +180,7 @@ func TestDLB_writeReadSocketMessage(t *testing.T) {
|
||||||
connection: mockConn,
|
connection: mockConn,
|
||||||
Log: testutil.Logger{},
|
Log: testutil.Logger{},
|
||||||
}
|
}
|
||||||
mockConn.On("Write", []byte{}).Return(0, nil).Once().
|
mockConn.On("Write", make([]byte, 0)).Return(0, nil).Once().
|
||||||
On("Read", mock.Anything).Return(0, nil).
|
On("Read", mock.Anything).Return(0, nil).
|
||||||
On("Close").Return(nil).Once()
|
On("Close").Return(nil).Once()
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -348,7 +348,7 @@ func TestGather(t *testing.T) {
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
files: []testFile{
|
files: []testFile{
|
||||||
{guid: "test-guid", content: []byte{}},
|
{guid: "test-guid"},
|
||||||
},
|
},
|
||||||
wantErr: true,
|
wantErr: true,
|
||||||
},
|
},
|
||||||
|
|
|
||||||
|
|
@ -472,7 +472,7 @@ func TestAddFiles(t *testing.T) {
|
||||||
mError := errors.New("mock error")
|
mError := errors.New("mock error")
|
||||||
|
|
||||||
t.Run("no paths", func(t *testing.T) {
|
t.Run("no paths", func(t *testing.T) {
|
||||||
err := checkFiles([]string{}, mFileInfo)
|
err := checkFiles(nil, mFileInfo)
|
||||||
require.Error(t, err)
|
require.Error(t, err)
|
||||||
require.Contains(t, err.Error(), "no paths were given")
|
require.Contains(t, err.Error(), "no paths were given")
|
||||||
})
|
})
|
||||||
|
|
|
||||||
|
|
@ -42,8 +42,8 @@ func TestParsePackageMetrics(t *testing.T) {
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "EmptySlice",
|
name: "EmptySlice",
|
||||||
metrics: []packageMetricType{},
|
metrics: make([]packageMetricType, 0),
|
||||||
parsed: []packageMetricType{},
|
parsed: make([]packageMetricType, 0),
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "HasDuplicates",
|
name: "HasDuplicates",
|
||||||
|
|
@ -84,8 +84,8 @@ func TestParseCPUMetrics(t *testing.T) {
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "EmptySlice",
|
name: "EmptySlice",
|
||||||
metrics: []cpuMetricType{},
|
metrics: make([]cpuMetricType, 0),
|
||||||
parsed: []cpuMetricType{},
|
parsed: make([]cpuMetricType, 0),
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "HasDuplicates",
|
name: "HasDuplicates",
|
||||||
|
|
@ -121,8 +121,8 @@ func TestParseCPUTimeRelatedMsrMetrics(t *testing.T) {
|
||||||
testCases := []parseCPUMetricTestCase{
|
testCases := []parseCPUMetricTestCase{
|
||||||
{
|
{
|
||||||
name: "EmptySlice",
|
name: "EmptySlice",
|
||||||
metrics: []cpuMetricType{},
|
metrics: make([]cpuMetricType, 0),
|
||||||
parsed: []cpuMetricType{},
|
parsed: make([]cpuMetricType, 0),
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "NotFound",
|
name: "NotFound",
|
||||||
|
|
@ -138,7 +138,7 @@ func TestParseCPUTimeRelatedMsrMetrics(t *testing.T) {
|
||||||
cpuC0SubstateC02Percent,
|
cpuC0SubstateC02Percent,
|
||||||
cpuC0SubstateC0WaitPercent,
|
cpuC0SubstateC0WaitPercent,
|
||||||
},
|
},
|
||||||
parsed: []cpuMetricType{},
|
parsed: make([]cpuMetricType, 0),
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "Found",
|
name: "Found",
|
||||||
|
|
@ -190,8 +190,8 @@ func TestParseCPUPerfMetrics(t *testing.T) {
|
||||||
testCases := []parseCPUMetricTestCase{
|
testCases := []parseCPUMetricTestCase{
|
||||||
{
|
{
|
||||||
name: "EmptySlice",
|
name: "EmptySlice",
|
||||||
metrics: []cpuMetricType{},
|
metrics: make([]cpuMetricType, 0),
|
||||||
parsed: []cpuMetricType{},
|
parsed: make([]cpuMetricType, 0),
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "NotFound",
|
name: "NotFound",
|
||||||
|
|
@ -207,7 +207,7 @@ func TestParseCPUPerfMetrics(t *testing.T) {
|
||||||
cpuC6StateResidency,
|
cpuC6StateResidency,
|
||||||
cpuBusyFrequency,
|
cpuBusyFrequency,
|
||||||
},
|
},
|
||||||
parsed: []cpuMetricType{},
|
parsed: make([]cpuMetricType, 0),
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "Found",
|
name: "Found",
|
||||||
|
|
@ -252,8 +252,8 @@ func TestParsePackageRaplMetrics(t *testing.T) {
|
||||||
testCases := []parsePackageMetricTestCase{
|
testCases := []parsePackageMetricTestCase{
|
||||||
{
|
{
|
||||||
name: "EmptySlice",
|
name: "EmptySlice",
|
||||||
metrics: []packageMetricType{},
|
metrics: make([]packageMetricType, 0),
|
||||||
parsed: []packageMetricType{},
|
parsed: make([]packageMetricType, 0),
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "NotFound",
|
name: "NotFound",
|
||||||
|
|
@ -263,7 +263,7 @@ func TestParsePackageRaplMetrics(t *testing.T) {
|
||||||
packageCPUBaseFrequency,
|
packageCPUBaseFrequency,
|
||||||
packageUncoreFrequency,
|
packageUncoreFrequency,
|
||||||
},
|
},
|
||||||
parsed: []packageMetricType{},
|
parsed: make([]packageMetricType, 0),
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "Found",
|
name: "Found",
|
||||||
|
|
@ -302,8 +302,8 @@ func TestParsePackageMsrMetrics(t *testing.T) {
|
||||||
testCases := []parsePackageMetricTestCase{
|
testCases := []parsePackageMetricTestCase{
|
||||||
{
|
{
|
||||||
name: "EmptySlice",
|
name: "EmptySlice",
|
||||||
metrics: []packageMetricType{},
|
metrics: make([]packageMetricType, 0),
|
||||||
parsed: []packageMetricType{},
|
parsed: make([]packageMetricType, 0),
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "NotFound",
|
name: "NotFound",
|
||||||
|
|
@ -313,7 +313,7 @@ func TestParsePackageMsrMetrics(t *testing.T) {
|
||||||
packageCurrentDramPowerConsumption,
|
packageCurrentDramPowerConsumption,
|
||||||
packageThermalDesignPower,
|
packageThermalDesignPower,
|
||||||
},
|
},
|
||||||
parsed: []packageMetricType{},
|
parsed: make([]packageMetricType, 0),
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "Found",
|
name: "Found",
|
||||||
|
|
@ -503,12 +503,12 @@ func TestParseCores(t *testing.T) {
|
||||||
{
|
{
|
||||||
name: "CoresIsNil",
|
name: "CoresIsNil",
|
||||||
coreGroups: nil,
|
coreGroups: nil,
|
||||||
cores: []int{},
|
cores: make([]int, 0),
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "CoresIsEmpty",
|
name: "CoresIsEmpty",
|
||||||
coreGroups: []string{},
|
coreGroups: make([]string, 0),
|
||||||
cores: []int{},
|
cores: make([]int, 0),
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "Ok",
|
name: "Ok",
|
||||||
|
|
@ -598,7 +598,7 @@ func TestParseConfig(t *testing.T) {
|
||||||
t.Run("NoMetricsProvided", func(t *testing.T) {
|
t.Run("NoMetricsProvided", func(t *testing.T) {
|
||||||
p := &PowerStat{
|
p := &PowerStat{
|
||||||
// Disable default package metrics.
|
// Disable default package metrics.
|
||||||
PackageMetrics: []packageMetricType{},
|
PackageMetrics: make([]packageMetricType, 0),
|
||||||
}
|
}
|
||||||
|
|
||||||
require.ErrorContains(t, p.parseConfig(), "no metrics were found in the configuration file")
|
require.ErrorContains(t, p.parseConfig(), "no metrics were found in the configuration file")
|
||||||
|
|
@ -610,7 +610,7 @@ func TestParseConfig(t *testing.T) {
|
||||||
cpuBusyFrequency,
|
cpuBusyFrequency,
|
||||||
},
|
},
|
||||||
// Disable default package metrics.
|
// Disable default package metrics.
|
||||||
PackageMetrics: []packageMetricType{},
|
PackageMetrics: make([]packageMetricType, 0),
|
||||||
}
|
}
|
||||||
|
|
||||||
require.NoError(t, p.parseConfig())
|
require.NoError(t, p.parseConfig())
|
||||||
|
|
@ -798,7 +798,7 @@ func TestGather(t *testing.T) {
|
||||||
acc := &testutil.Accumulator{}
|
acc := &testutil.Accumulator{}
|
||||||
|
|
||||||
p := &PowerStat{
|
p := &PowerStat{
|
||||||
PackageMetrics: []packageMetricType{},
|
PackageMetrics: make([]packageMetricType, 0),
|
||||||
}
|
}
|
||||||
|
|
||||||
require.NoError(t, p.Gather(acc))
|
require.NoError(t, p.Gather(acc))
|
||||||
|
|
@ -918,7 +918,7 @@ func TestGather(t *testing.T) {
|
||||||
|
|
||||||
p := &PowerStat{
|
p := &PowerStat{
|
||||||
// Disables package metrics
|
// Disables package metrics
|
||||||
PackageMetrics: []packageMetricType{},
|
PackageMetrics: make([]packageMetricType, 0),
|
||||||
CPUMetrics: []cpuMetricType{
|
CPUMetrics: []cpuMetricType{
|
||||||
cpuFrequency,
|
cpuFrequency,
|
||||||
cpuTemperature,
|
cpuTemperature,
|
||||||
|
|
@ -974,7 +974,7 @@ func TestGather(t *testing.T) {
|
||||||
|
|
||||||
p := &PowerStat{
|
p := &PowerStat{
|
||||||
// Disables package metrics
|
// Disables package metrics
|
||||||
PackageMetrics: []packageMetricType{},
|
PackageMetrics: make([]packageMetricType, 0),
|
||||||
CPUMetrics: []cpuMetricType{
|
CPUMetrics: []cpuMetricType{
|
||||||
cpuC0SubstateC01Percent,
|
cpuC0SubstateC01Percent,
|
||||||
cpuC0SubstateC02Percent,
|
cpuC0SubstateC02Percent,
|
||||||
|
|
@ -1042,7 +1042,7 @@ func TestGather(t *testing.T) {
|
||||||
|
|
||||||
p := &PowerStat{
|
p := &PowerStat{
|
||||||
// Disables package metrics
|
// Disables package metrics
|
||||||
PackageMetrics: []packageMetricType{},
|
PackageMetrics: make([]packageMetricType, 0),
|
||||||
CPUMetrics: []cpuMetricType{
|
CPUMetrics: []cpuMetricType{
|
||||||
cpuC0SubstateC01Percent,
|
cpuC0SubstateC01Percent,
|
||||||
cpuC0SubstateC02Percent,
|
cpuC0SubstateC02Percent,
|
||||||
|
|
@ -1299,7 +1299,7 @@ func TestDisableUnsupportedMetrics(t *testing.T) {
|
||||||
// Metrics not relying on dts flag
|
// Metrics not relying on dts flag
|
||||||
cpuBusyFrequency,
|
cpuBusyFrequency,
|
||||||
},
|
},
|
||||||
PackageMetrics: []packageMetricType{},
|
PackageMetrics: make([]packageMetricType, 0),
|
||||||
|
|
||||||
Log: logger,
|
Log: logger,
|
||||||
}
|
}
|
||||||
|
|
@ -1592,7 +1592,7 @@ func (m *fetcherMock) GetMaxTurboFreqList(packageID int) ([]ptel.MaxTurboFreq, e
|
||||||
|
|
||||||
func TestAddCPUMetrics(t *testing.T) {
|
func TestAddCPUMetrics(t *testing.T) {
|
||||||
// Disable package metrics when parseConfig method is called.
|
// Disable package metrics when parseConfig method is called.
|
||||||
packageMetrics := []packageMetricType{}
|
packageMetrics := make([]packageMetricType, 0)
|
||||||
|
|
||||||
t.Run("NoAvailableCPUs", func(t *testing.T) {
|
t.Run("NoAvailableCPUs", func(t *testing.T) {
|
||||||
acc := &testutil.Accumulator{}
|
acc := &testutil.Accumulator{}
|
||||||
|
|
@ -1900,7 +1900,7 @@ func TestAddCPUMetrics(t *testing.T) {
|
||||||
|
|
||||||
func TestAddPerCPUMsrMetrics(t *testing.T) {
|
func TestAddPerCPUMsrMetrics(t *testing.T) {
|
||||||
// Disable package metrics when parseConfig method is called.
|
// Disable package metrics when parseConfig method is called.
|
||||||
packageMetrics := []packageMetricType{}
|
packageMetrics := make([]packageMetricType, 0)
|
||||||
|
|
||||||
t.Run("WithoutMsrMetrics", func(t *testing.T) {
|
t.Run("WithoutMsrMetrics", func(t *testing.T) {
|
||||||
cpuID := 0
|
cpuID := 0
|
||||||
|
|
@ -2233,7 +2233,7 @@ func TestAddCPUTimeRelatedMsrMetrics(t *testing.T) {
|
||||||
cpuC6StateResidency,
|
cpuC6StateResidency,
|
||||||
cpuBusyCycles,
|
cpuBusyCycles,
|
||||||
},
|
},
|
||||||
PackageMetrics: []packageMetricType{},
|
PackageMetrics: make([]packageMetricType, 0),
|
||||||
EventDefinitions: "./testdata/sapphirerapids_core.json",
|
EventDefinitions: "./testdata/sapphirerapids_core.json",
|
||||||
|
|
||||||
fetcher: mFetcher,
|
fetcher: mFetcher,
|
||||||
|
|
@ -2326,7 +2326,7 @@ func TestAddCPUTimeRelatedMsrMetrics(t *testing.T) {
|
||||||
|
|
||||||
func TestAddCPUPerfMetrics(t *testing.T) {
|
func TestAddCPUPerfMetrics(t *testing.T) {
|
||||||
// Disable package metrics when parseConfig method is called.
|
// Disable package metrics when parseConfig method is called.
|
||||||
packageMetrics := []packageMetricType{}
|
packageMetrics := make([]packageMetricType, 0)
|
||||||
|
|
||||||
t.Run("FailedToReadPerfEvents", func(t *testing.T) {
|
t.Run("FailedToReadPerfEvents", func(t *testing.T) {
|
||||||
acc := &testutil.Accumulator{}
|
acc := &testutil.Accumulator{}
|
||||||
|
|
@ -2402,7 +2402,7 @@ func TestAddCPUPerfMetrics(t *testing.T) {
|
||||||
cpuC0SubstateC02Percent,
|
cpuC0SubstateC02Percent,
|
||||||
cpuC0SubstateC0WaitPercent,
|
cpuC0SubstateC0WaitPercent,
|
||||||
},
|
},
|
||||||
PackageMetrics: []packageMetricType{},
|
PackageMetrics: make([]packageMetricType, 0),
|
||||||
EventDefinitions: "./testdata/sapphirerapids_core.json",
|
EventDefinitions: "./testdata/sapphirerapids_core.json",
|
||||||
|
|
||||||
fetcher: mFetcher,
|
fetcher: mFetcher,
|
||||||
|
|
@ -2449,7 +2449,6 @@ func TestAddCPUPerfMetrics(t *testing.T) {
|
||||||
cpuC0SubstateC02Percent,
|
cpuC0SubstateC02Percent,
|
||||||
cpuC0SubstateC0WaitPercent,
|
cpuC0SubstateC0WaitPercent,
|
||||||
},
|
},
|
||||||
PackageMetrics: []packageMetricType{},
|
|
||||||
EventDefinitions: "./testdata/sapphirerapids_core.json",
|
EventDefinitions: "./testdata/sapphirerapids_core.json",
|
||||||
|
|
||||||
fetcher: mFetcher,
|
fetcher: mFetcher,
|
||||||
|
|
@ -2659,7 +2658,7 @@ func TestAddPerCPUPerfMetrics(t *testing.T) {
|
||||||
cpuC0SubstateC01Percent,
|
cpuC0SubstateC01Percent,
|
||||||
cpuC0SubstateC02Percent,
|
cpuC0SubstateC02Percent,
|
||||||
},
|
},
|
||||||
PackageMetrics: []packageMetricType{},
|
PackageMetrics: make([]packageMetricType, 0),
|
||||||
EventDefinitions: "./testdata/sapphirerapids_core.json",
|
EventDefinitions: "./testdata/sapphirerapids_core.json",
|
||||||
|
|
||||||
fetcher: mFetcher,
|
fetcher: mFetcher,
|
||||||
|
|
|
||||||
|
|
@ -135,7 +135,7 @@ func reportMetrics(measurement string, irqs []irq, acc telegraf.Accumulator, cpu
|
||||||
}
|
}
|
||||||
|
|
||||||
func newIRQ(id string) *irq {
|
func newIRQ(id string) *irq {
|
||||||
return &irq{id: id, cpus: []int64{}}
|
return &irq{id: id}
|
||||||
}
|
}
|
||||||
|
|
||||||
func init() {
|
func init() {
|
||||||
|
|
|
||||||
|
|
@ -928,9 +928,7 @@ func TestSanitizeIPMICmd(t *testing.T) {
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "empty args",
|
name: "empty args",
|
||||||
args: []string{},
|
|
||||||
expected: []string{},
|
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
for _, tt := range tests {
|
for _, tt := range tests {
|
||||||
|
|
|
||||||
|
|
@ -151,8 +151,6 @@ func TestIptables_Gather(t *testing.T) {
|
||||||
a a ACCEPT all -- * * 1.3.5.7 0.0.0.0/0 /* test */
|
a a ACCEPT all -- * * 1.3.5.7 0.0.0.0/0 /* test */
|
||||||
a a CLASSIFY all -- * * 1.3.5.7 0.0.0.0/0 /* test2 */ CLASSIFY set 1:4
|
a a CLASSIFY all -- * * 1.3.5.7 0.0.0.0/0 /* test2 */ CLASSIFY set 1:4
|
||||||
`},
|
`},
|
||||||
tags: []map[string]string{},
|
|
||||||
fields: [][]map[string]interface{}{},
|
|
||||||
},
|
},
|
||||||
{ // 11 - all target and ports
|
{ // 11 - all target and ports
|
||||||
table: "all_recv",
|
table: "all_recv",
|
||||||
|
|
|
||||||
|
|
@ -244,9 +244,8 @@ func (j *Jenkins) gatherJobs(acc telegraf.Accumulator) {
|
||||||
go func(name string, wg *sync.WaitGroup, acc telegraf.Accumulator) {
|
go func(name string, wg *sync.WaitGroup, acc telegraf.Accumulator) {
|
||||||
defer wg.Done()
|
defer wg.Done()
|
||||||
if err := j.getJobDetail(jobRequest{
|
if err := j.getJobDetail(jobRequest{
|
||||||
name: name,
|
name: name,
|
||||||
parents: []string{},
|
layer: 0,
|
||||||
layer: 0,
|
|
||||||
}, acc); err != nil {
|
}, acc); err != nil {
|
||||||
acc.AddError(err)
|
acc.AddError(err)
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -152,9 +152,7 @@ func TestGatherNodeData(t *testing.T) {
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
output: &testutil.Accumulator{
|
output: &testutil.Accumulator{},
|
||||||
Metrics: []*testutil.Metric{},
|
|
||||||
},
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "filtered nodes (excluded)",
|
name: "filtered nodes (excluded)",
|
||||||
|
|
@ -797,9 +795,7 @@ func TestGatherJobs(t *testing.T) {
|
||||||
{Name: "ignore-1"},
|
{Name: "ignore-1"},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
"/job/ignore-1/api/json": &jobResponse{
|
"/job/ignore-1/api/json": &jobResponse{},
|
||||||
Jobs: []innerJob{},
|
|
||||||
},
|
|
||||||
"/job/apps/api/json": &jobResponse{
|
"/job/apps/api/json": &jobResponse{
|
||||||
Jobs: []innerJob{
|
Jobs: []innerJob{
|
||||||
{Name: "k8s-cloud"},
|
{Name: "k8s-cloud"},
|
||||||
|
|
|
||||||
|
|
@ -99,7 +99,6 @@ func (ja *JolokiaAgent) createClient(url string) (*common.Client, error) {
|
||||||
func init() {
|
func init() {
|
||||||
inputs.Add("jolokia2_agent", func() telegraf.Input {
|
inputs.Add("jolokia2_agent", func() telegraf.Input {
|
||||||
return &JolokiaAgent{
|
return &JolokiaAgent{
|
||||||
Metrics: []common.MetricConfig{},
|
|
||||||
DefaultFieldSeparator: ".",
|
DefaultFieldSeparator: ".",
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
|
|
|
||||||
|
|
@ -914,7 +914,6 @@ func setupPlugin(t *testing.T, conf string) telegraf.Input {
|
||||||
object := table.Fields[name]
|
object := table.Fields[name]
|
||||||
if name == "jolokia2_agent" {
|
if name == "jolokia2_agent" {
|
||||||
plugin := jolokia2_agent.JolokiaAgent{
|
plugin := jolokia2_agent.JolokiaAgent{
|
||||||
Metrics: []common.MetricConfig{},
|
|
||||||
DefaultFieldSeparator: ".",
|
DefaultFieldSeparator: ".",
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -99,7 +99,6 @@ func (jp *JolokiaProxy) createClient() (*common.Client, error) {
|
||||||
func init() {
|
func init() {
|
||||||
inputs.Add("jolokia2_proxy", func() telegraf.Input {
|
inputs.Add("jolokia2_proxy", func() telegraf.Input {
|
||||||
return &JolokiaProxy{
|
return &JolokiaProxy{
|
||||||
Metrics: []common.MetricConfig{},
|
|
||||||
DefaultFieldSeparator: ".",
|
DefaultFieldSeparator: ".",
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
|
|
|
||||||
|
|
@ -179,7 +179,6 @@ func setupPlugin(t *testing.T, conf string) telegraf.Input {
|
||||||
object := table.Fields[name]
|
object := table.Fields[name]
|
||||||
if name == "jolokia2_proxy" {
|
if name == "jolokia2_proxy" {
|
||||||
plugin := jolokia2_proxy.JolokiaProxy{
|
plugin := jolokia2_proxy.JolokiaProxy{
|
||||||
Metrics: []common.MetricConfig{},
|
|
||||||
DefaultFieldSeparator: ".",
|
DefaultFieldSeparator: ".",
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -217,8 +217,7 @@ func (m *OpenConfigTelemetry) extractData(r *telemetry.OpenConfigData, grpcServe
|
||||||
// Use empty prefix. We will update this when we iterate over key-value pairs
|
// Use empty prefix. We will update this when we iterate over key-value pairs
|
||||||
prefix := ""
|
prefix := ""
|
||||||
|
|
||||||
dgroups := []dataGroup{}
|
dgroups := make([]dataGroup, 0, 5*len(r.Kv))
|
||||||
|
|
||||||
for _, v := range r.Kv {
|
for _, v := range r.Kv {
|
||||||
kv := make(map[string]interface{})
|
kv := make(map[string]interface{})
|
||||||
|
|
||||||
|
|
|
||||||
Loading…
Reference in New Issue