2015-11-25 05:22:11 +08:00
|
|
|
package config
|
|
|
|
|
|
|
|
|
|
import (
|
2022-09-14 00:43:03 +08:00
|
|
|
"bytes"
|
2021-04-10 01:15:04 +08:00
|
|
|
"fmt"
|
2021-02-13 00:38:40 +08:00
|
|
|
"net/http"
|
|
|
|
|
"net/http/httptest"
|
2016-04-02 03:53:34 +08:00
|
|
|
"os"
|
2022-09-14 00:43:03 +08:00
|
|
|
"os/exec"
|
|
|
|
|
"path/filepath"
|
2022-01-13 06:54:42 +08:00
|
|
|
"reflect"
|
2021-05-27 00:13:50 +08:00
|
|
|
"runtime"
|
2020-11-23 23:40:32 +08:00
|
|
|
"strings"
|
2022-02-11 06:01:19 +08:00
|
|
|
"sync"
|
2015-11-25 05:22:11 +08:00
|
|
|
"testing"
|
|
|
|
|
"time"
|
|
|
|
|
|
2022-02-11 06:01:19 +08:00
|
|
|
"github.com/google/go-cmp/cmp"
|
|
|
|
|
"github.com/google/go-cmp/cmp/cmpopts"
|
2021-11-25 02:52:51 +08:00
|
|
|
"github.com/stretchr/testify/require"
|
|
|
|
|
|
2021-04-10 01:15:04 +08:00
|
|
|
"github.com/influxdata/telegraf"
|
2020-05-05 02:09:10 +08:00
|
|
|
"github.com/influxdata/telegraf/models"
|
2021-04-10 01:15:04 +08:00
|
|
|
"github.com/influxdata/telegraf/plugins/common/tls"
|
2016-01-21 02:57:35 +08:00
|
|
|
"github.com/influxdata/telegraf/plugins/inputs"
|
2021-04-10 01:15:04 +08:00
|
|
|
"github.com/influxdata/telegraf/plugins/outputs"
|
2016-02-06 08:36:35 +08:00
|
|
|
"github.com/influxdata/telegraf/plugins/parsers"
|
2022-01-13 06:54:42 +08:00
|
|
|
_ "github.com/influxdata/telegraf/plugins/parsers/all" // Blank import to have all parsers for testing
|
2022-07-29 04:30:36 +08:00
|
|
|
"github.com/influxdata/telegraf/plugins/parsers/json"
|
2022-09-27 04:24:34 +08:00
|
|
|
"github.com/influxdata/telegraf/plugins/processors"
|
2015-11-25 05:22:11 +08:00
|
|
|
)
|
|
|
|
|
|
2022-09-14 00:43:03 +08:00
|
|
|
func TestReadBinaryFile(t *testing.T) {
|
|
|
|
|
// Create a temporary binary file using the Telegraf tool custom_builder to pass as a config
|
|
|
|
|
wd, err := os.Getwd()
|
|
|
|
|
require.NoError(t, err)
|
|
|
|
|
t.Cleanup(func() {
|
|
|
|
|
err := os.Chdir(wd)
|
|
|
|
|
require.NoError(t, err)
|
|
|
|
|
})
|
|
|
|
|
|
|
|
|
|
err = os.Chdir("../")
|
|
|
|
|
require.NoError(t, err)
|
|
|
|
|
tmpdir := t.TempDir()
|
|
|
|
|
binaryFile := filepath.Join(tmpdir, "custom_builder")
|
|
|
|
|
cmd := exec.Command("go", "build", "-o", binaryFile, "./tools/custom_builder")
|
|
|
|
|
var outb, errb bytes.Buffer
|
|
|
|
|
cmd.Stdout = &outb
|
|
|
|
|
cmd.Stderr = &errb
|
|
|
|
|
err = cmd.Run()
|
|
|
|
|
|
|
|
|
|
require.NoError(t, err, fmt.Sprintf("stdout: %s, stderr: %s", outb.String(), errb.String()))
|
|
|
|
|
c := NewConfig()
|
|
|
|
|
err = c.LoadConfig(binaryFile)
|
|
|
|
|
require.Error(t, err)
|
|
|
|
|
require.ErrorContains(t, err, "provided config is not a TOML file")
|
|
|
|
|
}
|
|
|
|
|
|
2016-04-02 03:53:34 +08:00
|
|
|
func TestConfig_LoadSingleInputWithEnvVars(t *testing.T) {
|
|
|
|
|
c := NewConfig()
|
2021-04-10 01:15:04 +08:00
|
|
|
require.NoError(t, os.Setenv("MY_TEST_SERVER", "192.168.1.1"))
|
|
|
|
|
require.NoError(t, os.Setenv("TEST_INTERVAL", "10s"))
|
2022-07-29 04:30:36 +08:00
|
|
|
require.NoError(t, c.LoadConfig("./testdata/single_plugin_env_vars.toml"))
|
2016-04-02 03:53:34 +08:00
|
|
|
|
2021-04-10 01:15:04 +08:00
|
|
|
input := inputs.Inputs["memcached"]().(*MockupInputPlugin)
|
|
|
|
|
input.Servers = []string{"192.168.1.1"}
|
2016-04-02 03:53:34 +08:00
|
|
|
|
2016-07-28 19:31:11 +08:00
|
|
|
filter := models.Filter{
|
2016-04-13 07:06:27 +08:00
|
|
|
NameDrop: []string{"metricname2"},
|
2019-03-30 07:02:10 +08:00
|
|
|
NamePass: []string{"metricname1", "ip_192.168.1.1_name"},
|
2016-04-13 07:06:27 +08:00
|
|
|
FieldDrop: []string{"other", "stuff"},
|
|
|
|
|
FieldPass: []string{"some", "strings"},
|
2022-10-13 03:19:47 +08:00
|
|
|
TagDropFilters: []models.TagFilter{
|
2018-10-20 04:32:54 +08:00
|
|
|
{
|
2016-04-13 07:06:27 +08:00
|
|
|
Name: "badtag",
|
2022-10-13 03:19:47 +08:00
|
|
|
Values: []string{"othertag"},
|
2016-04-02 03:53:34 +08:00
|
|
|
},
|
2016-04-13 07:06:27 +08:00
|
|
|
},
|
2022-10-13 03:19:47 +08:00
|
|
|
TagPassFilters: []models.TagFilter{
|
2018-10-20 04:32:54 +08:00
|
|
|
{
|
2016-04-13 07:06:27 +08:00
|
|
|
Name: "goodtag",
|
2022-10-13 03:19:47 +08:00
|
|
|
Values: []string{"mytag"},
|
2016-04-02 03:53:34 +08:00
|
|
|
},
|
|
|
|
|
},
|
2016-04-13 07:06:27 +08:00
|
|
|
}
|
2021-04-10 01:15:04 +08:00
|
|
|
require.NoError(t, filter.Compile())
|
|
|
|
|
inputConfig := &models.InputConfig{
|
2016-04-13 07:06:27 +08:00
|
|
|
Name: "memcached",
|
|
|
|
|
Filter: filter,
|
2016-04-02 03:53:34 +08:00
|
|
|
Interval: 10 * time.Second,
|
|
|
|
|
}
|
2021-04-10 01:15:04 +08:00
|
|
|
inputConfig.Tags = make(map[string]string)
|
2016-04-02 03:53:34 +08:00
|
|
|
|
2021-04-10 01:15:04 +08:00
|
|
|
// Ignore Log and Parser
|
|
|
|
|
c.Inputs[0].Input.(*MockupInputPlugin).Log = nil
|
|
|
|
|
c.Inputs[0].Input.(*MockupInputPlugin).parser = nil
|
|
|
|
|
require.Equal(t, input, c.Inputs[0].Input, "Testdata did not produce a correct mockup struct.")
|
|
|
|
|
require.Equal(t, inputConfig, c.Inputs[0].Config, "Testdata did not produce correct input metadata.")
|
2016-04-02 03:53:34 +08:00
|
|
|
}
|
|
|
|
|
|
2016-01-08 04:39:43 +08:00
|
|
|
func TestConfig_LoadSingleInput(t *testing.T) {
|
2015-11-25 05:22:11 +08:00
|
|
|
c := NewConfig()
|
2022-10-04 23:26:02 +08:00
|
|
|
require.NoError(t, c.LoadConfig("./testdata/single_plugin.toml"))
|
2015-11-25 05:22:11 +08:00
|
|
|
|
2021-04-10 01:15:04 +08:00
|
|
|
input := inputs.Inputs["memcached"]().(*MockupInputPlugin)
|
|
|
|
|
input.Servers = []string{"localhost"}
|
2015-11-25 05:22:11 +08:00
|
|
|
|
2016-07-28 19:31:11 +08:00
|
|
|
filter := models.Filter{
|
2016-04-13 07:06:27 +08:00
|
|
|
NameDrop: []string{"metricname2"},
|
|
|
|
|
NamePass: []string{"metricname1"},
|
|
|
|
|
FieldDrop: []string{"other", "stuff"},
|
|
|
|
|
FieldPass: []string{"some", "strings"},
|
2022-10-13 03:19:47 +08:00
|
|
|
TagDropFilters: []models.TagFilter{
|
2018-10-20 04:32:54 +08:00
|
|
|
{
|
2016-04-13 07:06:27 +08:00
|
|
|
Name: "badtag",
|
2022-10-13 03:19:47 +08:00
|
|
|
Values: []string{"othertag"},
|
2015-11-25 05:22:11 +08:00
|
|
|
},
|
2016-04-13 07:06:27 +08:00
|
|
|
},
|
2022-10-13 03:19:47 +08:00
|
|
|
TagPassFilters: []models.TagFilter{
|
2018-10-20 04:32:54 +08:00
|
|
|
{
|
2016-04-13 07:06:27 +08:00
|
|
|
Name: "goodtag",
|
2022-10-13 03:19:47 +08:00
|
|
|
Values: []string{"mytag"},
|
2015-11-25 05:22:11 +08:00
|
|
|
},
|
|
|
|
|
},
|
2016-04-13 07:06:27 +08:00
|
|
|
}
|
2021-04-10 01:15:04 +08:00
|
|
|
require.NoError(t, filter.Compile())
|
|
|
|
|
inputConfig := &models.InputConfig{
|
2016-04-13 07:06:27 +08:00
|
|
|
Name: "memcached",
|
|
|
|
|
Filter: filter,
|
2015-11-25 05:22:11 +08:00
|
|
|
Interval: 5 * time.Second,
|
|
|
|
|
}
|
2021-04-10 01:15:04 +08:00
|
|
|
inputConfig.Tags = make(map[string]string)
|
2015-11-25 05:22:11 +08:00
|
|
|
|
2021-04-10 01:15:04 +08:00
|
|
|
// Ignore Log and Parser
|
|
|
|
|
c.Inputs[0].Input.(*MockupInputPlugin).Log = nil
|
|
|
|
|
c.Inputs[0].Input.(*MockupInputPlugin).parser = nil
|
|
|
|
|
require.Equal(t, input, c.Inputs[0].Input, "Testdata did not produce a correct memcached struct.")
|
|
|
|
|
require.Equal(t, inputConfig, c.Inputs[0].Config, "Testdata did not produce correct memcached metadata.")
|
2015-11-25 05:22:11 +08:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
func TestConfig_LoadDirectory(t *testing.T) {
|
|
|
|
|
c := NewConfig()
|
2022-11-08 03:54:52 +08:00
|
|
|
|
|
|
|
|
files, err := WalkDirectory("./testdata/subconfig")
|
|
|
|
|
files = append([]string{"./testdata/single_plugin.toml"}, files...)
|
|
|
|
|
require.NoError(t, err)
|
|
|
|
|
require.NoError(t, c.LoadAll(files...))
|
2015-11-25 05:22:11 +08:00
|
|
|
|
2021-04-10 01:15:04 +08:00
|
|
|
// Create the expected data
|
|
|
|
|
expectedPlugins := make([]*MockupInputPlugin, 4)
|
|
|
|
|
expectedConfigs := make([]*models.InputConfig, 4)
|
2015-11-25 05:22:11 +08:00
|
|
|
|
2021-04-10 01:15:04 +08:00
|
|
|
expectedPlugins[0] = inputs.Inputs["memcached"]().(*MockupInputPlugin)
|
|
|
|
|
expectedPlugins[0].Servers = []string{"localhost"}
|
|
|
|
|
|
|
|
|
|
filterMockup := models.Filter{
|
2016-04-13 07:06:27 +08:00
|
|
|
NameDrop: []string{"metricname2"},
|
|
|
|
|
NamePass: []string{"metricname1"},
|
|
|
|
|
FieldDrop: []string{"other", "stuff"},
|
|
|
|
|
FieldPass: []string{"some", "strings"},
|
2022-10-13 03:19:47 +08:00
|
|
|
TagDropFilters: []models.TagFilter{
|
2018-10-20 04:32:54 +08:00
|
|
|
{
|
2016-04-13 07:06:27 +08:00
|
|
|
Name: "badtag",
|
2022-10-13 03:19:47 +08:00
|
|
|
Values: []string{"othertag"},
|
2015-11-25 05:22:11 +08:00
|
|
|
},
|
2016-04-13 07:06:27 +08:00
|
|
|
},
|
2022-10-13 03:19:47 +08:00
|
|
|
TagPassFilters: []models.TagFilter{
|
2018-10-20 04:32:54 +08:00
|
|
|
{
|
2016-04-13 07:06:27 +08:00
|
|
|
Name: "goodtag",
|
2022-10-13 03:19:47 +08:00
|
|
|
Values: []string{"mytag"},
|
2015-11-25 05:22:11 +08:00
|
|
|
},
|
|
|
|
|
},
|
2016-04-13 07:06:27 +08:00
|
|
|
}
|
2021-04-10 01:15:04 +08:00
|
|
|
require.NoError(t, filterMockup.Compile())
|
|
|
|
|
expectedConfigs[0] = &models.InputConfig{
|
2016-04-13 07:06:27 +08:00
|
|
|
Name: "memcached",
|
2021-04-10 01:15:04 +08:00
|
|
|
Filter: filterMockup,
|
2015-11-25 05:22:11 +08:00
|
|
|
Interval: 5 * time.Second,
|
|
|
|
|
}
|
2021-04-10 01:15:04 +08:00
|
|
|
expectedConfigs[0].Tags = make(map[string]string)
|
2015-11-25 05:22:11 +08:00
|
|
|
|
2021-04-10 01:15:04 +08:00
|
|
|
expectedPlugins[1] = inputs.Inputs["exec"]().(*MockupInputPlugin)
|
2022-07-29 04:30:36 +08:00
|
|
|
parser := &json.Parser{
|
2018-08-23 10:26:48 +08:00
|
|
|
MetricName: "exec",
|
2022-07-29 04:30:36 +08:00
|
|
|
Strict: true,
|
2021-11-25 02:52:51 +08:00
|
|
|
}
|
2022-07-29 04:30:36 +08:00
|
|
|
require.NoError(t, parser.Init())
|
2021-11-25 02:52:51 +08:00
|
|
|
|
2022-07-29 04:30:36 +08:00
|
|
|
expectedPlugins[1].SetParser(parser)
|
2021-04-10 01:15:04 +08:00
|
|
|
expectedPlugins[1].Command = "/usr/bin/myothercollector --foo=bar"
|
|
|
|
|
expectedConfigs[1] = &models.InputConfig{
|
2016-01-08 04:39:43 +08:00
|
|
|
Name: "exec",
|
|
|
|
|
MeasurementSuffix: "_myothercollector",
|
|
|
|
|
}
|
2021-04-10 01:15:04 +08:00
|
|
|
expectedConfigs[1].Tags = make(map[string]string)
|
2015-11-25 05:22:11 +08:00
|
|
|
|
2021-04-10 01:15:04 +08:00
|
|
|
expectedPlugins[2] = inputs.Inputs["memcached"]().(*MockupInputPlugin)
|
|
|
|
|
expectedPlugins[2].Servers = []string{"192.168.1.1"}
|
2015-11-25 05:22:11 +08:00
|
|
|
|
2021-04-10 01:15:04 +08:00
|
|
|
filterMemcached := models.Filter{
|
|
|
|
|
NameDrop: []string{"metricname2"},
|
|
|
|
|
NamePass: []string{"metricname1"},
|
|
|
|
|
FieldDrop: []string{"other", "stuff"},
|
|
|
|
|
FieldPass: []string{"some", "strings"},
|
2022-10-13 03:19:47 +08:00
|
|
|
TagDropFilters: []models.TagFilter{
|
2021-04-10 01:15:04 +08:00
|
|
|
{
|
|
|
|
|
Name: "badtag",
|
2022-10-13 03:19:47 +08:00
|
|
|
Values: []string{"othertag"},
|
2021-04-10 01:15:04 +08:00
|
|
|
},
|
|
|
|
|
},
|
2022-10-13 03:19:47 +08:00
|
|
|
TagPassFilters: []models.TagFilter{
|
2021-04-10 01:15:04 +08:00
|
|
|
{
|
|
|
|
|
Name: "goodtag",
|
2022-10-13 03:19:47 +08:00
|
|
|
Values: []string{"mytag"},
|
2021-04-10 01:15:04 +08:00
|
|
|
},
|
|
|
|
|
},
|
|
|
|
|
}
|
|
|
|
|
require.NoError(t, filterMemcached.Compile())
|
|
|
|
|
expectedConfigs[2] = &models.InputConfig{
|
|
|
|
|
Name: "memcached",
|
|
|
|
|
Filter: filterMemcached,
|
|
|
|
|
Interval: 5 * time.Second,
|
|
|
|
|
}
|
|
|
|
|
expectedConfigs[2].Tags = make(map[string]string)
|
|
|
|
|
|
|
|
|
|
expectedPlugins[3] = inputs.Inputs["procstat"]().(*MockupInputPlugin)
|
|
|
|
|
expectedPlugins[3].PidFile = "/var/run/grafana-server.pid"
|
|
|
|
|
expectedConfigs[3] = &models.InputConfig{Name: "procstat"}
|
|
|
|
|
expectedConfigs[3].Tags = make(map[string]string)
|
|
|
|
|
|
|
|
|
|
// Check the generated plugins
|
|
|
|
|
require.Len(t, c.Inputs, len(expectedPlugins))
|
|
|
|
|
require.Len(t, c.Inputs, len(expectedConfigs))
|
|
|
|
|
for i, plugin := range c.Inputs {
|
|
|
|
|
input := plugin.Input.(*MockupInputPlugin)
|
|
|
|
|
// Check the logger and ignore it for comparison
|
|
|
|
|
require.NotNil(t, input.Log)
|
|
|
|
|
input.Log = nil
|
|
|
|
|
|
2022-07-29 04:30:36 +08:00
|
|
|
// Check the parsers if any
|
|
|
|
|
if expectedPlugins[i].parser != nil {
|
|
|
|
|
runningParser, ok := input.parser.(*models.RunningParser)
|
|
|
|
|
require.True(t, ok)
|
|
|
|
|
|
|
|
|
|
// We only use the JSON parser here
|
|
|
|
|
parser, ok := runningParser.Parser.(*json.Parser)
|
|
|
|
|
require.True(t, ok)
|
|
|
|
|
|
|
|
|
|
// Prepare parser for comparison
|
|
|
|
|
require.NoError(t, parser.Init())
|
|
|
|
|
parser.Log = nil
|
|
|
|
|
|
|
|
|
|
// Compare the parser
|
|
|
|
|
require.Equalf(t, expectedPlugins[i].parser, parser, "Plugin %d: incorrect parser produced", i)
|
2021-04-10 01:15:04 +08:00
|
|
|
}
|
2015-11-25 05:22:11 +08:00
|
|
|
|
2022-07-29 04:30:36 +08:00
|
|
|
// Ignore the parsers for further comparisons
|
|
|
|
|
input.parser = nil
|
|
|
|
|
expectedPlugins[i].parser = nil
|
|
|
|
|
|
2021-04-10 01:15:04 +08:00
|
|
|
require.Equalf(t, expectedPlugins[i], plugin.Input, "Plugin %d: incorrect struct produced", i)
|
|
|
|
|
require.Equalf(t, expectedConfigs[i], plugin.Config, "Plugin %d: incorrect config produced", i)
|
|
|
|
|
}
|
2015-11-25 05:22:11 +08:00
|
|
|
}
|
2019-04-26 11:19:58 +08:00
|
|
|
|
2022-01-19 06:04:09 +08:00
|
|
|
func TestConfig_WrongCertPath(t *testing.T) {
|
|
|
|
|
c := NewConfig()
|
|
|
|
|
require.Error(t, c.LoadConfig("./testdata/wrong_cert_path.toml"))
|
|
|
|
|
}
|
|
|
|
|
|
2022-10-25 03:51:07 +08:00
|
|
|
func TestConfig_DefaultParser(t *testing.T) {
|
|
|
|
|
c := NewConfig()
|
|
|
|
|
require.NoError(t, c.LoadConfig("./testdata/default_parser.toml"))
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
func TestConfig_DefaultExecParser(t *testing.T) {
|
|
|
|
|
c := NewConfig()
|
|
|
|
|
require.NoError(t, c.LoadConfig("./testdata/default_parser_exec.toml"))
|
|
|
|
|
}
|
|
|
|
|
|
2019-04-26 11:19:58 +08:00
|
|
|
func TestConfig_LoadSpecialTypes(t *testing.T) {
|
|
|
|
|
c := NewConfig()
|
2021-04-10 01:15:04 +08:00
|
|
|
require.NoError(t, c.LoadConfig("./testdata/special_types.toml"))
|
|
|
|
|
require.Len(t, c.Inputs, 1)
|
2019-04-26 11:19:58 +08:00
|
|
|
|
2021-04-10 01:15:04 +08:00
|
|
|
input, ok := c.Inputs[0].Input.(*MockupInputPlugin)
|
|
|
|
|
require.True(t, ok)
|
2019-04-26 11:19:58 +08:00
|
|
|
// Tests telegraf duration parsing.
|
2021-04-10 01:15:04 +08:00
|
|
|
require.Equal(t, Duration(time.Second), input.WriteTimeout)
|
2019-04-26 11:19:58 +08:00
|
|
|
// Tests telegraf size parsing.
|
2021-04-10 01:15:04 +08:00
|
|
|
require.Equal(t, Size(1024*1024), input.MaxBodySize)
|
2022-01-19 06:04:09 +08:00
|
|
|
// Tests toml multiline basic strings on single line.
|
|
|
|
|
require.Equal(t, "./testdata/special_types.pem", input.TLSCert)
|
|
|
|
|
// Tests toml multiline basic strings on single line.
|
|
|
|
|
require.Equal(t, "./testdata/special_types.key", input.TLSKey)
|
|
|
|
|
// Tests toml multiline basic strings on multiple lines.
|
|
|
|
|
require.Equal(t, "/path/", strings.TrimRight(input.Paths[0], "\r\n"))
|
2019-04-26 11:19:58 +08:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
func TestConfig_FieldNotDefined(t *testing.T) {
|
2022-10-21 17:09:20 +08:00
|
|
|
tests := []struct {
|
|
|
|
|
name string
|
|
|
|
|
filename string
|
|
|
|
|
expected string
|
|
|
|
|
}{
|
|
|
|
|
{
|
|
|
|
|
name: "in input plugin without parser",
|
|
|
|
|
filename: "./testdata/invalid_field.toml",
|
|
|
|
|
expected: `line 1: configuration specified the fields ["not_a_field"], but they weren't used`,
|
|
|
|
|
},
|
|
|
|
|
{
|
|
|
|
|
name: "in input plugin with parser",
|
|
|
|
|
filename: "./testdata/invalid_field_with_parser.toml",
|
|
|
|
|
expected: `line 1: configuration specified the fields ["not_a_field"], but they weren't used`,
|
|
|
|
|
},
|
|
|
|
|
{
|
|
|
|
|
name: "in input plugin with parser func",
|
|
|
|
|
filename: "./testdata/invalid_field_with_parserfunc.toml",
|
|
|
|
|
expected: `line 1: configuration specified the fields ["not_a_field"], but they weren't used`,
|
|
|
|
|
},
|
|
|
|
|
{
|
|
|
|
|
name: "in parser of input plugin",
|
|
|
|
|
filename: "./testdata/invalid_field_in_parser_table.toml",
|
|
|
|
|
expected: `line 1: configuration specified the fields ["not_a_field"], but they weren't used`,
|
|
|
|
|
},
|
|
|
|
|
{
|
|
|
|
|
name: "in parser of input plugin with parser-func",
|
|
|
|
|
filename: "./testdata/invalid_field_in_parserfunc_table.toml",
|
|
|
|
|
expected: `line 1: configuration specified the fields ["not_a_field"], but they weren't used`,
|
|
|
|
|
},
|
|
|
|
|
{
|
|
|
|
|
name: "in processor plugin without parser",
|
|
|
|
|
filename: "./testdata/invalid_field_processor.toml",
|
|
|
|
|
expected: `line 1: configuration specified the fields ["not_a_field"], but they weren't used`,
|
|
|
|
|
},
|
|
|
|
|
{
|
|
|
|
|
name: "in processor plugin with parser",
|
|
|
|
|
filename: "./testdata/invalid_field_processor_with_parser.toml",
|
|
|
|
|
expected: `line 1: configuration specified the fields ["not_a_field"], but they weren't used`,
|
|
|
|
|
},
|
|
|
|
|
{
|
|
|
|
|
name: "in processor plugin with parser func",
|
|
|
|
|
filename: "./testdata/invalid_field_processor_with_parserfunc.toml",
|
|
|
|
|
expected: `line 1: configuration specified the fields ["not_a_field"], but they weren't used`,
|
|
|
|
|
},
|
|
|
|
|
{
|
|
|
|
|
name: "in parser of processor plugin",
|
|
|
|
|
filename: "./testdata/invalid_field_processor_in_parser_table.toml",
|
|
|
|
|
expected: `line 1: configuration specified the fields ["not_a_field"], but they weren't used`,
|
|
|
|
|
},
|
|
|
|
|
{
|
|
|
|
|
name: "in parser of processor plugin with parser-func",
|
|
|
|
|
filename: "./testdata/invalid_field_processor_in_parserfunc_table.toml",
|
|
|
|
|
expected: `line 1: configuration specified the fields ["not_a_field"], but they weren't used`,
|
|
|
|
|
},
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
for _, tt := range tests {
|
|
|
|
|
t.Run(tt.name, func(t *testing.T) {
|
|
|
|
|
c := NewConfig()
|
|
|
|
|
err := c.LoadConfig(tt.filename)
|
|
|
|
|
require.ErrorContains(t, err, tt.expected)
|
|
|
|
|
})
|
|
|
|
|
}
|
2019-04-26 11:19:58 +08:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
func TestConfig_WrongFieldType(t *testing.T) {
|
|
|
|
|
c := NewConfig()
|
|
|
|
|
err := c.LoadConfig("./testdata/wrong_field_type.toml")
|
|
|
|
|
require.Error(t, err, "invalid field type")
|
2022-11-11 21:32:11 +08:00
|
|
|
require.Equal(
|
|
|
|
|
t,
|
|
|
|
|
"error loading config file ./testdata/wrong_field_type.toml: error parsing http_listener_v2, line 2: "+
|
|
|
|
|
"(config.MockupInputPlugin.Port) cannot unmarshal TOML string into int",
|
|
|
|
|
err.Error(),
|
|
|
|
|
)
|
2019-04-26 11:19:58 +08:00
|
|
|
|
|
|
|
|
c = NewConfig()
|
|
|
|
|
err = c.LoadConfig("./testdata/wrong_field_type2.toml")
|
|
|
|
|
require.Error(t, err, "invalid field type2")
|
2022-11-11 21:32:11 +08:00
|
|
|
require.Equal(
|
|
|
|
|
t,
|
|
|
|
|
"error loading config file ./testdata/wrong_field_type2.toml: error parsing http_listener_v2, line 2: "+
|
|
|
|
|
"(config.MockupInputPlugin.Methods) cannot unmarshal TOML string into []string",
|
|
|
|
|
err.Error(),
|
|
|
|
|
)
|
2019-04-26 11:19:58 +08:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
func TestConfig_InlineTables(t *testing.T) {
|
|
|
|
|
// #4098
|
|
|
|
|
c := NewConfig()
|
2021-04-10 01:15:04 +08:00
|
|
|
require.NoError(t, c.LoadConfig("./testdata/inline_table.toml"))
|
|
|
|
|
require.Len(t, c.Outputs, 2)
|
|
|
|
|
|
|
|
|
|
output, ok := c.Outputs[1].Output.(*MockupOuputPlugin)
|
|
|
|
|
require.True(t, ok)
|
2022-02-24 05:48:11 +08:00
|
|
|
require.Equal(t, map[string]string{"Authorization": "Token $TOKEN", "Content-Type": "application/json"}, output.Headers)
|
2021-04-10 01:15:04 +08:00
|
|
|
require.Equal(t, []string{"org_id"}, c.Outputs[0].Config.Filter.TagInclude)
|
2019-04-26 11:19:58 +08:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
func TestConfig_SliceComment(t *testing.T) {
|
|
|
|
|
t.Skipf("Skipping until #3642 is resolved")
|
|
|
|
|
|
|
|
|
|
c := NewConfig()
|
2021-04-10 01:15:04 +08:00
|
|
|
require.NoError(t, c.LoadConfig("./testdata/slice_comment.toml"))
|
|
|
|
|
require.Len(t, c.Outputs, 1)
|
2019-04-26 11:19:58 +08:00
|
|
|
|
2021-04-10 01:15:04 +08:00
|
|
|
output, ok := c.Outputs[0].Output.(*MockupOuputPlugin)
|
|
|
|
|
require.True(t, ok)
|
|
|
|
|
require.Equal(t, []string{"test"}, output.Scopes)
|
2019-04-26 11:19:58 +08:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
func TestConfig_BadOrdering(t *testing.T) {
|
|
|
|
|
// #3444: when not using inline tables, care has to be taken so subsequent configuration
|
|
|
|
|
// doesn't become part of the table. This is not a bug, but TOML syntax.
|
|
|
|
|
c := NewConfig()
|
|
|
|
|
err := c.LoadConfig("./testdata/non_slice_slice.toml")
|
|
|
|
|
require.Error(t, err, "bad ordering")
|
2022-11-11 21:32:11 +08:00
|
|
|
require.Equal(
|
|
|
|
|
t,
|
|
|
|
|
"error loading config file ./testdata/non_slice_slice.toml: error parsing http array, line 4: cannot unmarshal TOML array into string (need slice)",
|
|
|
|
|
err.Error(),
|
|
|
|
|
)
|
2019-04-26 11:19:58 +08:00
|
|
|
}
|
2020-10-20 22:16:22 +08:00
|
|
|
|
|
|
|
|
func TestConfig_AzureMonitorNamespacePrefix(t *testing.T) {
|
|
|
|
|
// #8256 Cannot use empty string as the namespace prefix
|
|
|
|
|
c := NewConfig()
|
2021-04-10 01:15:04 +08:00
|
|
|
require.NoError(t, c.LoadConfig("./testdata/azure_monitor.toml"))
|
|
|
|
|
require.Len(t, c.Outputs, 2)
|
|
|
|
|
|
|
|
|
|
expectedPrefix := []string{"Telegraf/", ""}
|
|
|
|
|
for i, plugin := range c.Outputs {
|
|
|
|
|
output, ok := plugin.Output.(*MockupOuputPlugin)
|
|
|
|
|
require.True(t, ok)
|
|
|
|
|
require.Equal(t, expectedPrefix[i], output.NamespacePrefix)
|
|
|
|
|
}
|
2020-10-20 22:16:22 +08:00
|
|
|
}
|
2021-02-13 00:38:40 +08:00
|
|
|
|
|
|
|
|
func TestConfig_URLRetries3Fails(t *testing.T) {
|
|
|
|
|
httpLoadConfigRetryInterval = 0 * time.Second
|
|
|
|
|
responseCounter := 0
|
|
|
|
|
ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
|
|
|
|
w.WriteHeader(http.StatusNotFound)
|
|
|
|
|
responseCounter++
|
|
|
|
|
}))
|
|
|
|
|
defer ts.Close()
|
|
|
|
|
|
2022-10-13 03:19:47 +08:00
|
|
|
expected := fmt.Sprintf("error loading config file %s: retry 3 of 3 failed to retrieve remote config: 404 Not Found", ts.URL)
|
2021-04-10 01:15:04 +08:00
|
|
|
|
2021-02-13 00:38:40 +08:00
|
|
|
c := NewConfig()
|
|
|
|
|
err := c.LoadConfig(ts.URL)
|
|
|
|
|
require.Error(t, err)
|
2021-04-10 01:15:04 +08:00
|
|
|
require.Equal(t, expected, err.Error())
|
2021-02-13 00:38:40 +08:00
|
|
|
require.Equal(t, 4, responseCounter)
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
func TestConfig_URLRetries3FailsThenPasses(t *testing.T) {
|
|
|
|
|
httpLoadConfigRetryInterval = 0 * time.Second
|
|
|
|
|
responseCounter := 0
|
|
|
|
|
ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
|
|
|
|
if responseCounter <= 2 {
|
|
|
|
|
w.WriteHeader(http.StatusNotFound)
|
|
|
|
|
} else {
|
|
|
|
|
w.WriteHeader(http.StatusOK)
|
|
|
|
|
}
|
|
|
|
|
responseCounter++
|
|
|
|
|
}))
|
|
|
|
|
defer ts.Close()
|
|
|
|
|
|
|
|
|
|
c := NewConfig()
|
2021-04-10 01:15:04 +08:00
|
|
|
require.NoError(t, c.LoadConfig(ts.URL))
|
2021-02-13 00:38:40 +08:00
|
|
|
require.Equal(t, 4, responseCounter)
|
|
|
|
|
}
|
2021-04-10 01:15:04 +08:00
|
|
|
|
2021-06-03 11:22:15 +08:00
|
|
|
func TestConfig_getDefaultConfigPathFromEnvURL(t *testing.T) {
|
|
|
|
|
ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
|
|
|
|
w.WriteHeader(http.StatusOK)
|
|
|
|
|
}))
|
|
|
|
|
defer ts.Close()
|
|
|
|
|
|
|
|
|
|
c := NewConfig()
|
2023-02-08 00:12:42 +08:00
|
|
|
t.Setenv("TELEGRAF_CONFIG_PATH", ts.URL)
|
2021-06-03 11:22:15 +08:00
|
|
|
configPath, err := getDefaultConfigPath()
|
|
|
|
|
require.NoError(t, err)
|
2023-02-08 00:02:01 +08:00
|
|
|
require.Equal(t, []string{ts.URL}, configPath)
|
2021-06-03 11:22:15 +08:00
|
|
|
err = c.LoadConfig("")
|
|
|
|
|
require.NoError(t, err)
|
|
|
|
|
}
|
|
|
|
|
|
2021-05-27 00:13:50 +08:00
|
|
|
func TestConfig_URLLikeFileName(t *testing.T) {
|
|
|
|
|
c := NewConfig()
|
|
|
|
|
err := c.LoadConfig("http:##www.example.com.conf")
|
|
|
|
|
require.Error(t, err)
|
|
|
|
|
|
|
|
|
|
if runtime.GOOS == "windows" {
|
2022-10-13 03:19:47 +08:00
|
|
|
// The error file not found error message is different on Windows
|
2022-11-11 21:32:11 +08:00
|
|
|
require.Equal(
|
|
|
|
|
t,
|
|
|
|
|
"error loading config file http:##www.example.com.conf: open http:##www.example.com.conf: The system cannot find the file specified.",
|
|
|
|
|
err.Error(),
|
|
|
|
|
)
|
2021-05-27 00:13:50 +08:00
|
|
|
} else {
|
2022-10-13 03:19:47 +08:00
|
|
|
require.Equal(t, "error loading config file http:##www.example.com.conf: open http:##www.example.com.conf: no such file or directory", err.Error())
|
2021-05-27 00:13:50 +08:00
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2022-01-13 06:54:42 +08:00
|
|
|
func TestConfig_ParserInterfaceNewFormat(t *testing.T) {
|
|
|
|
|
formats := []string{
|
|
|
|
|
"collectd",
|
|
|
|
|
"csv",
|
|
|
|
|
"dropwizard",
|
|
|
|
|
"form_urlencoded",
|
|
|
|
|
"graphite",
|
|
|
|
|
"grok",
|
|
|
|
|
"influx",
|
|
|
|
|
"json",
|
|
|
|
|
"json_v2",
|
|
|
|
|
"logfmt",
|
|
|
|
|
"nagios",
|
|
|
|
|
"prometheus",
|
|
|
|
|
"prometheusremotewrite",
|
|
|
|
|
"value",
|
|
|
|
|
"wavefront",
|
|
|
|
|
"xml", "xpath_json", "xpath_msgpack", "xpath_protobuf",
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
c := NewConfig()
|
|
|
|
|
require.NoError(t, c.LoadConfig("./testdata/parsers_new.toml"))
|
|
|
|
|
require.Len(t, c.Inputs, len(formats))
|
|
|
|
|
|
|
|
|
|
cfg := parsers.Config{
|
|
|
|
|
CSVHeaderRowCount: 42,
|
|
|
|
|
DropwizardTagPathsMap: make(map[string]string),
|
|
|
|
|
GrokPatterns: []string{"%{COMBINED_LOG_FORMAT}"},
|
|
|
|
|
JSONStrict: true,
|
|
|
|
|
MetricName: "parser_test_new",
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
override := map[string]struct {
|
|
|
|
|
param map[string]interface{}
|
|
|
|
|
mask []string
|
|
|
|
|
}{
|
|
|
|
|
"csv": {
|
|
|
|
|
param: map[string]interface{}{
|
|
|
|
|
"HeaderRowCount": cfg.CSVHeaderRowCount,
|
|
|
|
|
},
|
2022-07-01 02:11:25 +08:00
|
|
|
mask: []string{"TimeFunc", "ResetMode"},
|
2022-01-13 06:54:42 +08:00
|
|
|
},
|
|
|
|
|
"xpath_protobuf": {
|
|
|
|
|
param: map[string]interface{}{
|
|
|
|
|
"ProtobufMessageDef": "testdata/addressbook.proto",
|
|
|
|
|
"ProtobufMessageType": "addressbook.AddressBook",
|
|
|
|
|
},
|
|
|
|
|
},
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
expected := make([]telegraf.Parser, 0, len(formats))
|
|
|
|
|
for _, format := range formats {
|
|
|
|
|
formatCfg := &cfg
|
|
|
|
|
formatCfg.DataFormat = format
|
|
|
|
|
|
|
|
|
|
logger := models.NewLogger("parsers", format, cfg.MetricName)
|
|
|
|
|
|
2022-07-29 04:30:36 +08:00
|
|
|
creator, found := parsers.Parsers[format]
|
|
|
|
|
require.Truef(t, found, "No parser for format %q", format)
|
2022-01-13 06:54:42 +08:00
|
|
|
|
2022-07-29 04:30:36 +08:00
|
|
|
parser := creator(formatCfg.MetricName)
|
|
|
|
|
if settings, found := override[format]; found {
|
|
|
|
|
s := reflect.Indirect(reflect.ValueOf(parser))
|
|
|
|
|
for key, value := range settings.param {
|
|
|
|
|
v := reflect.ValueOf(value)
|
|
|
|
|
s.FieldByName(key).Set(v)
|
2022-01-13 06:54:42 +08:00
|
|
|
}
|
|
|
|
|
}
|
2022-07-29 04:30:36 +08:00
|
|
|
models.SetLoggerOnPlugin(parser, logger)
|
|
|
|
|
if p, ok := parser.(telegraf.Initializer); ok {
|
|
|
|
|
require.NoError(t, p.Init())
|
|
|
|
|
}
|
|
|
|
|
expected = append(expected, parser)
|
2022-01-13 06:54:42 +08:00
|
|
|
}
|
|
|
|
|
require.Len(t, expected, len(formats))
|
|
|
|
|
|
|
|
|
|
actual := make([]interface{}, 0)
|
|
|
|
|
generated := make([]interface{}, 0)
|
|
|
|
|
for _, plugin := range c.Inputs {
|
|
|
|
|
input, ok := plugin.Input.(*MockupInputPluginParserNew)
|
|
|
|
|
require.True(t, ok)
|
|
|
|
|
// Get the parser set with 'SetParser()'
|
|
|
|
|
if p, ok := input.Parser.(*models.RunningParser); ok {
|
|
|
|
|
actual = append(actual, p.Parser)
|
|
|
|
|
} else {
|
|
|
|
|
actual = append(actual, input.Parser)
|
|
|
|
|
}
|
|
|
|
|
// Get the parser set with 'SetParserFunc()'
|
|
|
|
|
g, err := input.ParserFunc()
|
|
|
|
|
require.NoError(t, err)
|
|
|
|
|
if rp, ok := g.(*models.RunningParser); ok {
|
|
|
|
|
generated = append(generated, rp.Parser)
|
|
|
|
|
} else {
|
|
|
|
|
generated = append(generated, g)
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
require.Len(t, actual, len(formats))
|
|
|
|
|
|
|
|
|
|
for i, format := range formats {
|
2022-02-11 06:01:19 +08:00
|
|
|
// Determine the underlying type of the parser
|
|
|
|
|
stype := reflect.Indirect(reflect.ValueOf(expected[i])).Interface()
|
|
|
|
|
// Ignore all unexported fields and fields not relevant for functionality
|
|
|
|
|
options := []cmp.Option{
|
|
|
|
|
cmpopts.IgnoreUnexported(stype),
|
|
|
|
|
cmpopts.IgnoreTypes(sync.Mutex{}),
|
|
|
|
|
cmpopts.IgnoreInterfaces(struct{ telegraf.Logger }{}),
|
|
|
|
|
}
|
2022-01-13 06:54:42 +08:00
|
|
|
if settings, found := override[format]; found {
|
2022-02-11 06:01:19 +08:00
|
|
|
options = append(options, cmpopts.IgnoreFields(stype, settings.mask...))
|
2022-01-13 06:54:42 +08:00
|
|
|
}
|
|
|
|
|
|
2022-02-11 06:01:19 +08:00
|
|
|
// Do a manual comparision as require.EqualValues will also work on unexported fields
|
|
|
|
|
// that cannot be cleared or ignored.
|
|
|
|
|
diff := cmp.Diff(expected[i], actual[i], options...)
|
|
|
|
|
require.Emptyf(t, diff, "Difference in SetParser() for %q", format)
|
|
|
|
|
diff = cmp.Diff(expected[i], generated[i], options...)
|
|
|
|
|
require.Emptyf(t, diff, "Difference in SetParserFunc() for %q", format)
|
2022-01-13 06:54:42 +08:00
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
func TestConfig_ParserInterfaceOldFormat(t *testing.T) {
|
|
|
|
|
formats := []string{
|
|
|
|
|
"collectd",
|
|
|
|
|
"csv",
|
|
|
|
|
"dropwizard",
|
|
|
|
|
"form_urlencoded",
|
|
|
|
|
"graphite",
|
|
|
|
|
"grok",
|
|
|
|
|
"influx",
|
|
|
|
|
"json",
|
|
|
|
|
"json_v2",
|
|
|
|
|
"logfmt",
|
|
|
|
|
"nagios",
|
|
|
|
|
"prometheus",
|
|
|
|
|
"prometheusremotewrite",
|
|
|
|
|
"value",
|
|
|
|
|
"wavefront",
|
|
|
|
|
"xml", "xpath_json", "xpath_msgpack", "xpath_protobuf",
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
c := NewConfig()
|
|
|
|
|
require.NoError(t, c.LoadConfig("./testdata/parsers_old.toml"))
|
|
|
|
|
require.Len(t, c.Inputs, len(formats))
|
|
|
|
|
|
|
|
|
|
cfg := parsers.Config{
|
|
|
|
|
CSVHeaderRowCount: 42,
|
|
|
|
|
DropwizardTagPathsMap: make(map[string]string),
|
|
|
|
|
GrokPatterns: []string{"%{COMBINED_LOG_FORMAT}"},
|
|
|
|
|
JSONStrict: true,
|
|
|
|
|
MetricName: "parser_test_old",
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
override := map[string]struct {
|
|
|
|
|
param map[string]interface{}
|
|
|
|
|
mask []string
|
|
|
|
|
}{
|
|
|
|
|
"csv": {
|
|
|
|
|
param: map[string]interface{}{
|
|
|
|
|
"HeaderRowCount": cfg.CSVHeaderRowCount,
|
|
|
|
|
},
|
2022-07-01 02:11:25 +08:00
|
|
|
mask: []string{"TimeFunc", "ResetMode"},
|
2022-01-13 06:54:42 +08:00
|
|
|
},
|
|
|
|
|
"xpath_protobuf": {
|
|
|
|
|
param: map[string]interface{}{
|
|
|
|
|
"ProtobufMessageDef": "testdata/addressbook.proto",
|
|
|
|
|
"ProtobufMessageType": "addressbook.AddressBook",
|
|
|
|
|
},
|
|
|
|
|
},
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
expected := make([]telegraf.Parser, 0, len(formats))
|
|
|
|
|
for _, format := range formats {
|
|
|
|
|
formatCfg := &cfg
|
|
|
|
|
formatCfg.DataFormat = format
|
|
|
|
|
|
|
|
|
|
logger := models.NewLogger("parsers", format, cfg.MetricName)
|
|
|
|
|
|
2022-07-29 04:30:36 +08:00
|
|
|
creator, found := parsers.Parsers[format]
|
|
|
|
|
require.Truef(t, found, "No parser for format %q", format)
|
2022-01-13 06:54:42 +08:00
|
|
|
|
2022-07-29 04:30:36 +08:00
|
|
|
parser := creator(formatCfg.MetricName)
|
|
|
|
|
if settings, found := override[format]; found {
|
|
|
|
|
s := reflect.Indirect(reflect.ValueOf(parser))
|
|
|
|
|
for key, value := range settings.param {
|
|
|
|
|
v := reflect.ValueOf(value)
|
|
|
|
|
s.FieldByName(key).Set(v)
|
2022-01-13 06:54:42 +08:00
|
|
|
}
|
|
|
|
|
}
|
2022-07-29 04:30:36 +08:00
|
|
|
models.SetLoggerOnPlugin(parser, logger)
|
|
|
|
|
if p, ok := parser.(telegraf.Initializer); ok {
|
|
|
|
|
require.NoError(t, p.Init())
|
|
|
|
|
}
|
|
|
|
|
expected = append(expected, parser)
|
2022-01-13 06:54:42 +08:00
|
|
|
}
|
|
|
|
|
require.Len(t, expected, len(formats))
|
|
|
|
|
|
|
|
|
|
actual := make([]interface{}, 0)
|
|
|
|
|
generated := make([]interface{}, 0)
|
|
|
|
|
for _, plugin := range c.Inputs {
|
|
|
|
|
input, ok := plugin.Input.(*MockupInputPluginParserOld)
|
|
|
|
|
require.True(t, ok)
|
|
|
|
|
// Get the parser set with 'SetParser()'
|
|
|
|
|
if p, ok := input.Parser.(*models.RunningParser); ok {
|
|
|
|
|
actual = append(actual, p.Parser)
|
|
|
|
|
} else {
|
|
|
|
|
actual = append(actual, input.Parser)
|
|
|
|
|
}
|
|
|
|
|
// Get the parser set with 'SetParserFunc()'
|
|
|
|
|
g, err := input.ParserFunc()
|
|
|
|
|
require.NoError(t, err)
|
|
|
|
|
if rp, ok := g.(*models.RunningParser); ok {
|
|
|
|
|
generated = append(generated, rp.Parser)
|
|
|
|
|
} else {
|
|
|
|
|
generated = append(generated, g)
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
require.Len(t, actual, len(formats))
|
|
|
|
|
|
|
|
|
|
for i, format := range formats {
|
2022-02-11 06:01:19 +08:00
|
|
|
// Determine the underlying type of the parser
|
|
|
|
|
stype := reflect.Indirect(reflect.ValueOf(expected[i])).Interface()
|
|
|
|
|
// Ignore all unexported fields and fields not relevant for functionality
|
|
|
|
|
options := []cmp.Option{
|
|
|
|
|
cmpopts.IgnoreUnexported(stype),
|
|
|
|
|
cmpopts.IgnoreTypes(sync.Mutex{}),
|
|
|
|
|
cmpopts.IgnoreInterfaces(struct{ telegraf.Logger }{}),
|
|
|
|
|
}
|
2022-01-13 06:54:42 +08:00
|
|
|
if settings, found := override[format]; found {
|
2022-02-11 06:01:19 +08:00
|
|
|
options = append(options, cmpopts.IgnoreFields(stype, settings.mask...))
|
2022-01-13 06:54:42 +08:00
|
|
|
}
|
|
|
|
|
|
2022-07-01 02:11:25 +08:00
|
|
|
// Do a manual comparison as require.EqualValues will also work on unexported fields
|
2022-02-11 06:01:19 +08:00
|
|
|
// that cannot be cleared or ignored.
|
|
|
|
|
diff := cmp.Diff(expected[i], actual[i], options...)
|
|
|
|
|
require.Emptyf(t, diff, "Difference in SetParser() for %q", format)
|
|
|
|
|
diff = cmp.Diff(expected[i], generated[i], options...)
|
|
|
|
|
require.Emptyf(t, diff, "Difference in SetParserFunc() for %q", format)
|
2022-01-13 06:54:42 +08:00
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2022-11-09 05:16:26 +08:00
|
|
|
func TestConfig_MultipleProcessorsOrder(t *testing.T) {
|
|
|
|
|
tests := []struct {
|
|
|
|
|
name string
|
|
|
|
|
filename []string
|
|
|
|
|
expectedOrder []string
|
|
|
|
|
}{
|
|
|
|
|
{
|
|
|
|
|
name: "Test the order of multiple unique processosr",
|
|
|
|
|
filename: []string{"multiple_processors.toml"},
|
|
|
|
|
expectedOrder: []string{
|
|
|
|
|
"processor",
|
|
|
|
|
"parser_test",
|
|
|
|
|
"processor_parser",
|
|
|
|
|
"processor_parserfunc",
|
|
|
|
|
},
|
|
|
|
|
},
|
|
|
|
|
{
|
|
|
|
|
name: "Test using a single 'order' configuration",
|
|
|
|
|
filename: []string{"multiple_processors_simple_order.toml"},
|
|
|
|
|
expectedOrder: []string{
|
|
|
|
|
"parser_test",
|
|
|
|
|
"processor_parser",
|
|
|
|
|
"processor_parserfunc",
|
|
|
|
|
"processor",
|
|
|
|
|
},
|
|
|
|
|
},
|
|
|
|
|
{
|
|
|
|
|
name: "Test using multiple 'order' configurations",
|
|
|
|
|
filename: []string{"multiple_processors_messy_order.toml"},
|
|
|
|
|
expectedOrder: []string{
|
|
|
|
|
"parser_test",
|
|
|
|
|
"processor_parserfunc",
|
|
|
|
|
"processor",
|
|
|
|
|
"processor_parser",
|
|
|
|
|
"processor_parser",
|
|
|
|
|
"processor_parserfunc",
|
|
|
|
|
},
|
|
|
|
|
},
|
|
|
|
|
{
|
|
|
|
|
name: "Test loading multiple configuration files",
|
|
|
|
|
filename: []string{
|
|
|
|
|
"multiple_processors.toml",
|
|
|
|
|
"multiple_processors_simple_order.toml",
|
|
|
|
|
},
|
|
|
|
|
expectedOrder: []string{
|
|
|
|
|
"processor",
|
|
|
|
|
"parser_test",
|
|
|
|
|
"processor_parser",
|
|
|
|
|
"processor_parserfunc",
|
|
|
|
|
"parser_test",
|
|
|
|
|
"processor_parser",
|
|
|
|
|
"processor_parserfunc",
|
|
|
|
|
"processor",
|
|
|
|
|
},
|
|
|
|
|
},
|
|
|
|
|
{
|
|
|
|
|
name: "Test loading multiple configuration files both with order",
|
|
|
|
|
filename: []string{
|
|
|
|
|
"multiple_processors_simple_order.toml",
|
|
|
|
|
"multiple_processors_messy_order.toml",
|
|
|
|
|
},
|
|
|
|
|
expectedOrder: []string{
|
|
|
|
|
"parser_test",
|
|
|
|
|
"processor_parser",
|
|
|
|
|
"processor_parserfunc",
|
|
|
|
|
"parser_test",
|
|
|
|
|
"processor_parserfunc",
|
|
|
|
|
"processor",
|
|
|
|
|
"processor",
|
|
|
|
|
"processor_parser",
|
|
|
|
|
"processor_parser",
|
|
|
|
|
"processor_parserfunc",
|
|
|
|
|
},
|
|
|
|
|
},
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
for _, test := range tests {
|
|
|
|
|
t.Run(test.name, func(t *testing.T) {
|
|
|
|
|
c := NewConfig()
|
2022-12-01 00:28:23 +08:00
|
|
|
filenames := make([]string, 0, len(test.filename))
|
|
|
|
|
for _, fn := range test.filename {
|
|
|
|
|
filenames = append(filenames, filepath.Join("./testdata/processor_order", fn))
|
2022-11-09 05:16:26 +08:00
|
|
|
}
|
2022-12-01 00:28:23 +08:00
|
|
|
require.NoError(t, c.LoadAll(filenames...))
|
2022-11-09 05:16:26 +08:00
|
|
|
|
|
|
|
|
require.Equal(t, len(test.expectedOrder), len(c.Processors))
|
|
|
|
|
|
|
|
|
|
var order []string
|
|
|
|
|
for _, p := range c.Processors {
|
|
|
|
|
order = append(order, p.Config.Name)
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
require.Equal(t, test.expectedOrder, order)
|
|
|
|
|
})
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2022-09-27 04:24:34 +08:00
|
|
|
func TestConfig_ProcessorsWithParsers(t *testing.T) {
|
|
|
|
|
formats := []string{
|
|
|
|
|
"collectd",
|
|
|
|
|
"csv",
|
|
|
|
|
"dropwizard",
|
|
|
|
|
"form_urlencoded",
|
|
|
|
|
"graphite",
|
|
|
|
|
"grok",
|
|
|
|
|
"influx",
|
|
|
|
|
"json",
|
|
|
|
|
"json_v2",
|
|
|
|
|
"logfmt",
|
|
|
|
|
"nagios",
|
|
|
|
|
"prometheus",
|
|
|
|
|
"prometheusremotewrite",
|
|
|
|
|
"value",
|
|
|
|
|
"wavefront",
|
|
|
|
|
"xml", "xpath_json", "xpath_msgpack", "xpath_protobuf",
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
c := NewConfig()
|
2022-12-01 00:28:23 +08:00
|
|
|
require.NoError(t, c.LoadAll("./testdata/processors_with_parsers.toml"))
|
2022-09-27 04:24:34 +08:00
|
|
|
require.Len(t, c.Processors, len(formats))
|
|
|
|
|
|
|
|
|
|
override := map[string]struct {
|
|
|
|
|
param map[string]interface{}
|
|
|
|
|
mask []string
|
|
|
|
|
}{
|
|
|
|
|
"csv": {
|
|
|
|
|
param: map[string]interface{}{
|
|
|
|
|
"HeaderRowCount": 42,
|
|
|
|
|
},
|
|
|
|
|
mask: []string{"TimeFunc", "ResetMode"},
|
|
|
|
|
},
|
|
|
|
|
"xpath_protobuf": {
|
|
|
|
|
param: map[string]interface{}{
|
|
|
|
|
"ProtobufMessageDef": "testdata/addressbook.proto",
|
|
|
|
|
"ProtobufMessageType": "addressbook.AddressBook",
|
|
|
|
|
},
|
|
|
|
|
},
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
expected := make([]telegraf.Parser, 0, len(formats))
|
|
|
|
|
for _, format := range formats {
|
|
|
|
|
logger := models.NewLogger("parsers", format, "processors_with_parsers")
|
|
|
|
|
|
|
|
|
|
creator, found := parsers.Parsers[format]
|
|
|
|
|
require.Truef(t, found, "No parser for format %q", format)
|
|
|
|
|
|
|
|
|
|
parser := creator("parser_test")
|
|
|
|
|
if settings, found := override[format]; found {
|
|
|
|
|
s := reflect.Indirect(reflect.ValueOf(parser))
|
|
|
|
|
for key, value := range settings.param {
|
|
|
|
|
v := reflect.ValueOf(value)
|
|
|
|
|
s.FieldByName(key).Set(v)
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
models.SetLoggerOnPlugin(parser, logger)
|
|
|
|
|
if p, ok := parser.(telegraf.Initializer); ok {
|
|
|
|
|
require.NoError(t, p.Init())
|
|
|
|
|
}
|
|
|
|
|
expected = append(expected, parser)
|
|
|
|
|
}
|
|
|
|
|
require.Len(t, expected, len(formats))
|
|
|
|
|
|
|
|
|
|
actual := make([]interface{}, 0)
|
|
|
|
|
generated := make([]interface{}, 0)
|
|
|
|
|
for _, plugin := range c.Processors {
|
|
|
|
|
var processorIF telegraf.Processor
|
|
|
|
|
if p, ok := plugin.Processor.(unwrappable); ok {
|
|
|
|
|
processorIF = p.Unwrap()
|
|
|
|
|
} else {
|
|
|
|
|
processorIF = plugin.Processor.(telegraf.Processor)
|
|
|
|
|
}
|
|
|
|
|
require.NotNil(t, processorIF)
|
|
|
|
|
|
|
|
|
|
processor, ok := processorIF.(*MockupProcessorPluginParser)
|
|
|
|
|
require.True(t, ok)
|
|
|
|
|
|
|
|
|
|
// Get the parser set with 'SetParser()'
|
|
|
|
|
if p, ok := processor.Parser.(*models.RunningParser); ok {
|
|
|
|
|
actual = append(actual, p.Parser)
|
|
|
|
|
} else {
|
|
|
|
|
actual = append(actual, processor.Parser)
|
|
|
|
|
}
|
|
|
|
|
// Get the parser set with 'SetParserFunc()'
|
|
|
|
|
if processor.ParserFunc != nil {
|
|
|
|
|
g, err := processor.ParserFunc()
|
|
|
|
|
require.NoError(t, err)
|
|
|
|
|
if rp, ok := g.(*models.RunningParser); ok {
|
|
|
|
|
generated = append(generated, rp.Parser)
|
|
|
|
|
} else {
|
|
|
|
|
generated = append(generated, g)
|
|
|
|
|
}
|
|
|
|
|
} else {
|
|
|
|
|
generated = append(generated, nil)
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
require.Len(t, actual, len(formats))
|
|
|
|
|
|
|
|
|
|
for i, format := range formats {
|
|
|
|
|
// Determine the underlying type of the parser
|
|
|
|
|
stype := reflect.Indirect(reflect.ValueOf(expected[i])).Interface()
|
|
|
|
|
// Ignore all unexported fields and fields not relevant for functionality
|
|
|
|
|
options := []cmp.Option{
|
|
|
|
|
cmpopts.IgnoreUnexported(stype),
|
|
|
|
|
cmpopts.IgnoreTypes(sync.Mutex{}),
|
|
|
|
|
cmpopts.IgnoreInterfaces(struct{ telegraf.Logger }{}),
|
|
|
|
|
}
|
|
|
|
|
if settings, found := override[format]; found {
|
|
|
|
|
options = append(options, cmpopts.IgnoreFields(stype, settings.mask...))
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Do a manual comparision as require.EqualValues will also work on unexported fields
|
|
|
|
|
// that cannot be cleared or ignored.
|
|
|
|
|
diff := cmp.Diff(expected[i], actual[i], options...)
|
|
|
|
|
require.Emptyf(t, diff, "Difference in SetParser() for %q", format)
|
|
|
|
|
diff = cmp.Diff(expected[i], generated[i], options...)
|
|
|
|
|
require.Emptyf(t, diff, "Difference in SetParserFunc() for %q", format)
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2022-01-13 06:54:42 +08:00
|
|
|
/*** Mockup INPUT plugin for (old) parser testing to avoid cyclic dependencies ***/
|
|
|
|
|
type MockupInputPluginParserOld struct {
|
|
|
|
|
Parser parsers.Parser
|
|
|
|
|
ParserFunc parsers.ParserFunc
|
|
|
|
|
}
|
|
|
|
|
|
2022-10-04 23:26:02 +08:00
|
|
|
func (m *MockupInputPluginParserOld) SampleConfig() string {
|
|
|
|
|
return "Mockup old parser test plugin"
|
|
|
|
|
}
|
2022-10-13 03:19:47 +08:00
|
|
|
func (m *MockupInputPluginParserOld) Gather(_ telegraf.Accumulator) error {
|
2022-10-04 23:26:02 +08:00
|
|
|
return nil
|
|
|
|
|
}
|
|
|
|
|
func (m *MockupInputPluginParserOld) SetParser(parser parsers.Parser) {
|
|
|
|
|
m.Parser = parser
|
|
|
|
|
}
|
|
|
|
|
func (m *MockupInputPluginParserOld) SetParserFunc(f parsers.ParserFunc) {
|
|
|
|
|
m.ParserFunc = f
|
|
|
|
|
}
|
2022-01-13 06:54:42 +08:00
|
|
|
|
|
|
|
|
/*** Mockup INPUT plugin for (new) parser testing to avoid cyclic dependencies ***/
|
|
|
|
|
type MockupInputPluginParserNew struct {
|
|
|
|
|
Parser telegraf.Parser
|
|
|
|
|
ParserFunc telegraf.ParserFunc
|
|
|
|
|
}
|
|
|
|
|
|
2022-10-04 23:26:02 +08:00
|
|
|
func (m *MockupInputPluginParserNew) SampleConfig() string {
|
|
|
|
|
return "Mockup old parser test plugin"
|
|
|
|
|
}
|
2022-10-13 03:19:47 +08:00
|
|
|
func (m *MockupInputPluginParserNew) Gather(_ telegraf.Accumulator) error {
|
2022-10-04 23:26:02 +08:00
|
|
|
return nil
|
|
|
|
|
}
|
|
|
|
|
func (m *MockupInputPluginParserNew) SetParser(parser telegraf.Parser) {
|
|
|
|
|
m.Parser = parser
|
|
|
|
|
}
|
|
|
|
|
func (m *MockupInputPluginParserNew) SetParserFunc(f telegraf.ParserFunc) {
|
|
|
|
|
m.ParserFunc = f
|
|
|
|
|
}
|
2022-01-13 06:54:42 +08:00
|
|
|
|
2021-04-10 01:15:04 +08:00
|
|
|
/*** Mockup INPUT plugin for testing to avoid cyclic dependencies ***/
|
|
|
|
|
type MockupInputPlugin struct {
|
|
|
|
|
Servers []string `toml:"servers"`
|
|
|
|
|
Methods []string `toml:"methods"`
|
|
|
|
|
Timeout Duration `toml:"timeout"`
|
|
|
|
|
ReadTimeout Duration `toml:"read_timeout"`
|
|
|
|
|
WriteTimeout Duration `toml:"write_timeout"`
|
|
|
|
|
MaxBodySize Size `toml:"max_body_size"`
|
2022-01-19 06:04:09 +08:00
|
|
|
Paths []string `toml:"paths"`
|
2021-04-10 01:15:04 +08:00
|
|
|
Port int `toml:"port"`
|
|
|
|
|
Command string
|
2022-10-25 03:51:07 +08:00
|
|
|
Files []string
|
2021-04-10 01:15:04 +08:00
|
|
|
PidFile string
|
|
|
|
|
Log telegraf.Logger `toml:"-"`
|
|
|
|
|
tls.ServerConfig
|
|
|
|
|
|
2022-01-13 06:54:42 +08:00
|
|
|
parser telegraf.Parser
|
2021-04-10 01:15:04 +08:00
|
|
|
}
|
|
|
|
|
|
2022-10-04 23:26:02 +08:00
|
|
|
func (m *MockupInputPlugin) SampleConfig() string {
|
|
|
|
|
return "Mockup test input plugin"
|
|
|
|
|
}
|
2022-10-13 03:19:47 +08:00
|
|
|
func (m *MockupInputPlugin) Gather(_ telegraf.Accumulator) error {
|
2022-10-04 23:26:02 +08:00
|
|
|
return nil
|
|
|
|
|
}
|
|
|
|
|
func (m *MockupInputPlugin) SetParser(parser telegraf.Parser) {
|
|
|
|
|
m.parser = parser
|
|
|
|
|
}
|
2021-04-10 01:15:04 +08:00
|
|
|
|
2022-10-21 17:09:20 +08:00
|
|
|
/*** Mockup INPUT plugin with ParserFunc interface ***/
|
|
|
|
|
type MockupInputPluginParserFunc struct {
|
|
|
|
|
parserFunc telegraf.ParserFunc
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
func (m *MockupInputPluginParserFunc) SampleConfig() string {
|
|
|
|
|
return "Mockup test input plugin"
|
|
|
|
|
}
|
|
|
|
|
func (m *MockupInputPluginParserFunc) Gather(_ telegraf.Accumulator) error {
|
|
|
|
|
return nil
|
|
|
|
|
}
|
|
|
|
|
func (m *MockupInputPluginParserFunc) SetParserFunc(pf telegraf.ParserFunc) {
|
|
|
|
|
m.parserFunc = pf
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/*** Mockup INPUT plugin without ParserFunc interface ***/
|
|
|
|
|
type MockupInputPluginParserOnly struct {
|
|
|
|
|
parser telegraf.Parser
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
func (m *MockupInputPluginParserOnly) SampleConfig() string {
|
|
|
|
|
return "Mockup test input plugin"
|
|
|
|
|
}
|
|
|
|
|
func (m *MockupInputPluginParserOnly) Gather(_ telegraf.Accumulator) error {
|
|
|
|
|
return nil
|
|
|
|
|
}
|
|
|
|
|
func (m *MockupInputPluginParserOnly) SetParser(p telegraf.Parser) {
|
|
|
|
|
m.parser = p
|
|
|
|
|
}
|
|
|
|
|
|
2022-09-27 04:24:34 +08:00
|
|
|
/*** Mockup PROCESSOR plugin for testing to avoid cyclic dependencies ***/
|
|
|
|
|
type MockupProcessorPluginParser struct {
|
|
|
|
|
Parser telegraf.Parser
|
|
|
|
|
ParserFunc telegraf.ParserFunc
|
|
|
|
|
}
|
|
|
|
|
|
2022-10-13 03:19:47 +08:00
|
|
|
func (m *MockupProcessorPluginParser) Start(_ telegraf.Accumulator) error {
|
2022-10-04 23:26:02 +08:00
|
|
|
return nil
|
|
|
|
|
}
|
2022-10-26 18:06:08 +08:00
|
|
|
func (m *MockupProcessorPluginParser) Stop() {
|
2022-10-04 23:26:02 +08:00
|
|
|
}
|
2022-09-27 04:24:34 +08:00
|
|
|
func (m *MockupProcessorPluginParser) SampleConfig() string {
|
|
|
|
|
return "Mockup test processor plugin with parser"
|
|
|
|
|
}
|
2022-10-13 03:19:47 +08:00
|
|
|
func (m *MockupProcessorPluginParser) Apply(_ ...telegraf.Metric) []telegraf.Metric {
|
2022-10-04 23:26:02 +08:00
|
|
|
return nil
|
|
|
|
|
}
|
2022-10-13 03:19:47 +08:00
|
|
|
func (m *MockupProcessorPluginParser) Add(_ telegraf.Metric, _ telegraf.Accumulator) error {
|
2022-09-27 04:24:34 +08:00
|
|
|
return nil
|
|
|
|
|
}
|
2022-10-04 23:26:02 +08:00
|
|
|
func (m *MockupProcessorPluginParser) SetParser(parser telegraf.Parser) {
|
|
|
|
|
m.Parser = parser
|
|
|
|
|
}
|
|
|
|
|
func (m *MockupProcessorPluginParser) SetParserFunc(f telegraf.ParserFunc) {
|
|
|
|
|
m.ParserFunc = f
|
|
|
|
|
}
|
2022-09-27 04:24:34 +08:00
|
|
|
|
2022-10-21 17:09:20 +08:00
|
|
|
/*** Mockup PROCESSOR plugin without parser ***/
|
|
|
|
|
type MockupProcessorPlugin struct{}
|
|
|
|
|
|
|
|
|
|
func (m *MockupProcessorPlugin) Start(_ telegraf.Accumulator) error {
|
|
|
|
|
return nil
|
|
|
|
|
}
|
2022-10-26 18:06:08 +08:00
|
|
|
func (m *MockupProcessorPlugin) Stop() {
|
2022-10-21 17:09:20 +08:00
|
|
|
}
|
|
|
|
|
func (m *MockupProcessorPlugin) SampleConfig() string {
|
|
|
|
|
return "Mockup test processor plugin with parser"
|
|
|
|
|
}
|
|
|
|
|
func (m *MockupProcessorPlugin) Apply(_ ...telegraf.Metric) []telegraf.Metric {
|
|
|
|
|
return nil
|
|
|
|
|
}
|
|
|
|
|
func (m *MockupProcessorPlugin) Add(_ telegraf.Metric, _ telegraf.Accumulator) error {
|
|
|
|
|
return nil
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/*** Mockup PROCESSOR plugin with parser ***/
|
|
|
|
|
type MockupProcessorPluginParserOnly struct {
|
|
|
|
|
Parser telegraf.Parser
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
func (m *MockupProcessorPluginParserOnly) Start(_ telegraf.Accumulator) error {
|
|
|
|
|
return nil
|
|
|
|
|
}
|
2022-10-26 18:06:08 +08:00
|
|
|
func (m *MockupProcessorPluginParserOnly) Stop() {
|
2022-10-21 17:09:20 +08:00
|
|
|
}
|
|
|
|
|
func (m *MockupProcessorPluginParserOnly) SampleConfig() string {
|
|
|
|
|
return "Mockup test processor plugin with parser"
|
|
|
|
|
}
|
|
|
|
|
func (m *MockupProcessorPluginParserOnly) Apply(_ ...telegraf.Metric) []telegraf.Metric {
|
|
|
|
|
return nil
|
|
|
|
|
}
|
|
|
|
|
func (m *MockupProcessorPluginParserOnly) Add(_ telegraf.Metric, _ telegraf.Accumulator) error {
|
|
|
|
|
return nil
|
|
|
|
|
}
|
|
|
|
|
func (m *MockupProcessorPluginParserOnly) SetParser(parser telegraf.Parser) {
|
|
|
|
|
m.Parser = parser
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/*** Mockup PROCESSOR plugin with parser-function ***/
|
|
|
|
|
type MockupProcessorPluginParserFunc struct {
|
|
|
|
|
Parser telegraf.ParserFunc
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
func (m *MockupProcessorPluginParserFunc) Start(_ telegraf.Accumulator) error {
|
|
|
|
|
return nil
|
|
|
|
|
}
|
2022-10-26 18:06:08 +08:00
|
|
|
func (m *MockupProcessorPluginParserFunc) Stop() {
|
2022-10-21 17:09:20 +08:00
|
|
|
}
|
|
|
|
|
func (m *MockupProcessorPluginParserFunc) SampleConfig() string {
|
|
|
|
|
return "Mockup test processor plugin with parser"
|
|
|
|
|
}
|
|
|
|
|
func (m *MockupProcessorPluginParserFunc) Apply(_ ...telegraf.Metric) []telegraf.Metric {
|
|
|
|
|
return nil
|
|
|
|
|
}
|
|
|
|
|
func (m *MockupProcessorPluginParserFunc) Add(_ telegraf.Metric, _ telegraf.Accumulator) error {
|
|
|
|
|
return nil
|
|
|
|
|
}
|
|
|
|
|
func (m *MockupProcessorPluginParserFunc) SetParserFunc(pf telegraf.ParserFunc) {
|
|
|
|
|
m.Parser = pf
|
|
|
|
|
}
|
|
|
|
|
|
2021-04-10 01:15:04 +08:00
|
|
|
/*** Mockup OUTPUT plugin for testing to avoid cyclic dependencies ***/
|
|
|
|
|
type MockupOuputPlugin struct {
|
|
|
|
|
URL string `toml:"url"`
|
|
|
|
|
Headers map[string]string `toml:"headers"`
|
|
|
|
|
Scopes []string `toml:"scopes"`
|
|
|
|
|
NamespacePrefix string `toml:"namespace_prefix"`
|
|
|
|
|
Log telegraf.Logger `toml:"-"`
|
|
|
|
|
tls.ClientConfig
|
|
|
|
|
}
|
|
|
|
|
|
2022-10-04 23:26:02 +08:00
|
|
|
func (m *MockupOuputPlugin) Connect() error {
|
|
|
|
|
return nil
|
|
|
|
|
}
|
|
|
|
|
func (m *MockupOuputPlugin) Close() error {
|
|
|
|
|
return nil
|
|
|
|
|
}
|
|
|
|
|
func (m *MockupOuputPlugin) SampleConfig() string {
|
|
|
|
|
return "Mockup test output plugin"
|
|
|
|
|
}
|
2022-10-13 03:19:47 +08:00
|
|
|
func (m *MockupOuputPlugin) Write(_ []telegraf.Metric) error {
|
2022-10-04 23:26:02 +08:00
|
|
|
return nil
|
|
|
|
|
}
|
2021-04-10 01:15:04 +08:00
|
|
|
|
|
|
|
|
// Register the mockup plugin on loading
|
|
|
|
|
func init() {
|
|
|
|
|
// Register the mockup input plugin for the required names
|
2022-10-04 23:26:02 +08:00
|
|
|
inputs.Add("parser_test_new", func() telegraf.Input {
|
|
|
|
|
return &MockupInputPluginParserNew{}
|
|
|
|
|
})
|
|
|
|
|
inputs.Add("parser_test_old", func() telegraf.Input {
|
|
|
|
|
return &MockupInputPluginParserOld{}
|
|
|
|
|
})
|
2022-10-21 17:09:20 +08:00
|
|
|
inputs.Add("parser", func() telegraf.Input {
|
|
|
|
|
return &MockupInputPluginParserOnly{}
|
|
|
|
|
})
|
|
|
|
|
inputs.Add("parser_func", func() telegraf.Input {
|
|
|
|
|
return &MockupInputPluginParserFunc{}
|
|
|
|
|
})
|
2022-10-04 23:26:02 +08:00
|
|
|
inputs.Add("exec", func() telegraf.Input {
|
|
|
|
|
return &MockupInputPlugin{Timeout: Duration(time.Second * 5)}
|
|
|
|
|
})
|
2022-10-25 03:51:07 +08:00
|
|
|
inputs.Add("file", func() telegraf.Input {
|
|
|
|
|
return &MockupInputPlugin{}
|
|
|
|
|
})
|
2022-10-04 23:26:02 +08:00
|
|
|
inputs.Add("http_listener_v2", func() telegraf.Input {
|
|
|
|
|
return &MockupInputPlugin{}
|
|
|
|
|
})
|
|
|
|
|
inputs.Add("memcached", func() telegraf.Input {
|
|
|
|
|
return &MockupInputPlugin{}
|
|
|
|
|
})
|
|
|
|
|
inputs.Add("procstat", func() telegraf.Input {
|
|
|
|
|
return &MockupInputPlugin{}
|
|
|
|
|
})
|
2021-04-10 01:15:04 +08:00
|
|
|
|
2022-10-21 17:09:20 +08:00
|
|
|
// Register the mockup processor plugin for the required names
|
2022-10-04 23:26:02 +08:00
|
|
|
processors.Add("parser_test", func() telegraf.Processor {
|
|
|
|
|
return &MockupProcessorPluginParser{}
|
|
|
|
|
})
|
2022-10-21 17:09:20 +08:00
|
|
|
processors.Add("processor", func() telegraf.Processor {
|
|
|
|
|
return &MockupProcessorPlugin{}
|
|
|
|
|
})
|
|
|
|
|
processors.Add("processor_parser", func() telegraf.Processor {
|
|
|
|
|
return &MockupProcessorPluginParserOnly{}
|
|
|
|
|
})
|
|
|
|
|
processors.Add("processor_parserfunc", func() telegraf.Processor {
|
|
|
|
|
return &MockupProcessorPluginParserFunc{}
|
|
|
|
|
})
|
2022-09-27 04:24:34 +08:00
|
|
|
|
2021-04-10 01:15:04 +08:00
|
|
|
// Register the mockup output plugin for the required names
|
2022-10-04 23:26:02 +08:00
|
|
|
outputs.Add("azure_monitor", func() telegraf.Output {
|
|
|
|
|
return &MockupOuputPlugin{NamespacePrefix: "Telegraf/"}
|
|
|
|
|
})
|
|
|
|
|
outputs.Add("http", func() telegraf.Output {
|
|
|
|
|
return &MockupOuputPlugin{}
|
|
|
|
|
})
|
2021-04-10 01:15:04 +08:00
|
|
|
}
|