feat(config): Deprecate `fieldpass` and `fielddrop` modifiers (#14012)

This commit is contained in:
Thomas Casteleyn 2023-12-04 16:22:12 +01:00 committed by GitHub
parent ce64421419
commit 193479a988
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
28 changed files with 231 additions and 153 deletions

View File

@ -1289,7 +1289,7 @@ func (c *Config) buildAggregator(name string, tbl *ast.Table) (*models.Aggregato
}
var err error
conf.Filter, err = c.buildFilter(tbl)
conf.Filter, err = c.buildFilter("aggregators."+name, tbl)
if err != nil {
return conf, err
}
@ -1313,7 +1313,7 @@ func (c *Config) buildProcessor(category, name string, tbl *ast.Table) (*models.
}
var err error
conf.Filter, err = c.buildFilter(tbl)
conf.Filter, err = c.buildFilter(category+"."+name, tbl)
if err != nil {
return conf, err
}
@ -1324,10 +1324,10 @@ func (c *Config) buildProcessor(category, name string, tbl *ast.Table) (*models.
}
// buildFilter builds a Filter
// (tagpass/tagdrop/namepass/namedrop/fieldpass/fielddrop) to
// (tags, fields, namepass, namedrop, metricpass) to
// be inserted into the models.OutputConfig/models.InputConfig
// to be used for glob filtering on tags and measurements
func (c *Config) buildFilter(tbl *ast.Table) (models.Filter, error) {
func (c *Config) buildFilter(plugin string, tbl *ast.Table) (models.Filter, error) {
f := models.Filter{}
c.getFieldStringSlice(tbl, "namepass", &f.NamePass)
@ -1335,11 +1335,49 @@ func (c *Config) buildFilter(tbl *ast.Table) (models.Filter, error) {
c.getFieldStringSlice(tbl, "namedrop", &f.NameDrop)
c.getFieldString(tbl, "namedrop_separator", &f.NameDropSeparators)
c.getFieldStringSlice(tbl, "pass", &f.FieldPass)
c.getFieldStringSlice(tbl, "fieldpass", &f.FieldPass)
var oldPass []string
c.getFieldStringSlice(tbl, "pass", &oldPass)
if len(oldPass) > 0 {
models.PrintOptionDeprecationNotice(telegraf.Warn, plugin, "pass", telegraf.DeprecationInfo{
Since: "0.10.4",
RemovalIn: "2.0.0",
Notice: "use 'fieldinclude' instead",
})
f.FieldInclude = append(f.FieldInclude, oldPass...)
}
var oldFieldPass []string
c.getFieldStringSlice(tbl, "fieldpass", &oldFieldPass)
if len(oldFieldPass) > 0 {
models.PrintOptionDeprecationNotice(telegraf.Warn, plugin, "fieldpass", telegraf.DeprecationInfo{
Since: "1.29.0",
RemovalIn: "2.0.0",
Notice: "use 'fieldinclude' instead",
})
f.FieldInclude = append(f.FieldInclude, oldFieldPass...)
}
c.getFieldStringSlice(tbl, "fieldinclude", &f.FieldInclude)
c.getFieldStringSlice(tbl, "drop", &f.FieldDrop)
c.getFieldStringSlice(tbl, "fielddrop", &f.FieldDrop)
var oldDrop []string
c.getFieldStringSlice(tbl, "drop", &oldDrop)
if len(oldDrop) > 0 {
models.PrintOptionDeprecationNotice(telegraf.Warn, plugin, "drop", telegraf.DeprecationInfo{
Since: "0.10.4",
RemovalIn: "2.0.0",
Notice: "use 'fieldexclude' instead",
})
f.FieldExclude = append(f.FieldExclude, oldDrop...)
}
var oldFieldDrop []string
c.getFieldStringSlice(tbl, "fielddrop", &oldFieldDrop)
if len(oldFieldDrop) > 0 {
models.PrintOptionDeprecationNotice(telegraf.Warn, plugin, "fielddrop", telegraf.DeprecationInfo{
Since: "1.29.0",
RemovalIn: "2.0.0",
Notice: "use 'fieldexclude' instead",
})
f.FieldExclude = append(f.FieldExclude, oldFieldDrop...)
}
c.getFieldStringSlice(tbl, "fieldexclude", &f.FieldExclude)
c.getFieldTagFilter(tbl, "tagpass", &f.TagPassFilters)
c.getFieldTagFilter(tbl, "tagdrop", &f.TagDropFilters)
@ -1392,7 +1430,7 @@ func (c *Config) buildInput(name string, tbl *ast.Table) (*models.InputConfig, e
}
var err error
cp.Filter, err = c.buildFilter(tbl)
cp.Filter, err = c.buildFilter("inputs."+name, tbl)
if err != nil {
return cp, err
}
@ -1407,7 +1445,7 @@ func (c *Config) buildInput(name string, tbl *ast.Table) (*models.InputConfig, e
// models.OutputConfig to be inserted into models.RunningInput
// Note: error exists in the return for future calls that might require error
func (c *Config) buildOutput(name string, tbl *ast.Table) (*models.OutputConfig, error) {
filter, err := c.buildFilter(tbl)
filter, err := c.buildFilter("outputs."+name, tbl)
if err != nil {
return nil, err
}
@ -1443,7 +1481,7 @@ func (c *Config) missingTomlField(_ reflect.Type, key string) error {
case "alias", "always_include_local_tags",
"collection_jitter", "collection_offset",
"data_format", "delay", "drop", "drop_original",
"fielddrop", "fieldpass", "flush_interval", "flush_jitter",
"fielddrop", "fieldexclude", "fieldinclude", "fieldpass", "flush_interval", "flush_jitter",
"grace",
"interval",
"lvm", // What is this used for?

View File

@ -76,10 +76,10 @@ func TestConfig_LoadSingleInputWithEnvVars(t *testing.T) {
# is unique`
filter := models.Filter{
NameDrop: []string{"metricname2"},
NamePass: []string{"metricname1", "ip_192.168.1.1_name"},
FieldDrop: []string{"other", "stuff"},
FieldPass: []string{"some", "strings"},
NameDrop: []string{"metricname2"},
NamePass: []string{"metricname1", "ip_192.168.1.1_name"},
FieldExclude: []string{"other", "stuff"},
FieldInclude: []string{"some", "strings"},
TagDropFilters: []models.TagFilter{
{
Name: "badtag",
@ -117,10 +117,10 @@ func TestConfig_LoadSingleInput(t *testing.T) {
input.Servers = []string{"localhost"}
filter := models.Filter{
NameDrop: []string{"metricname2"},
NamePass: []string{"metricname1"},
FieldDrop: []string{"other", "stuff"},
FieldPass: []string{"some", "strings"},
NameDrop: []string{"metricname2"},
NamePass: []string{"metricname1"},
FieldExclude: []string{"other", "stuff"},
FieldInclude: []string{"some", "strings"},
TagDropFilters: []models.TagFilter{
{
Name: "badtag",
@ -162,8 +162,8 @@ func TestConfig_LoadSingleInput_WithSeparators(t *testing.T) {
NameDropSeparators: ".",
NamePass: []string{"metricname1"},
NamePassSeparators: ".",
FieldDrop: []string{"other", "stuff"},
FieldPass: []string{"some", "strings"},
FieldExclude: []string{"other", "stuff"},
FieldInclude: []string{"some", "strings"},
TagDropFilters: []models.TagFilter{
{
Name: "badtag",
@ -209,10 +209,10 @@ func TestConfig_LoadDirectory(t *testing.T) {
expectedPlugins[0].Servers = []string{"localhost"}
filterMockup := models.Filter{
NameDrop: []string{"metricname2"},
NamePass: []string{"metricname1"},
FieldDrop: []string{"other", "stuff"},
FieldPass: []string{"some", "strings"},
NameDrop: []string{"metricname2"},
NamePass: []string{"metricname1"},
FieldExclude: []string{"other", "stuff"},
FieldInclude: []string{"some", "strings"},
TagDropFilters: []models.TagFilter{
{
Name: "badtag",
@ -253,10 +253,10 @@ func TestConfig_LoadDirectory(t *testing.T) {
expectedPlugins[2].Servers = []string{"192.168.1.1"}
filterMemcached := models.Filter{
NameDrop: []string{"metricname2"},
NamePass: []string{"metricname1"},
FieldDrop: []string{"other", "stuff"},
FieldPass: []string{"some", "strings"},
NameDrop: []string{"metricname2"},
NamePass: []string{"metricname1"},
FieldExclude: []string{"other", "stuff"},
FieldInclude: []string{"some", "strings"},
TagDropFilters: []models.TagFilter{
{
Name: "badtag",
@ -355,6 +355,15 @@ func TestConfig_LoadSpecialTypes(t *testing.T) {
require.Equal(t, "/path/", strings.TrimRight(input.Paths[0], "\r\n"))
}
func TestConfig_DeprecatedFilters(t *testing.T) {
c := config.NewConfig()
require.NoError(t, c.LoadConfig("./testdata/deprecated_field_filter.toml"))
require.Len(t, c.Inputs, 1)
require.Equal(t, []string{"foo", "bar", "baz"}, c.Inputs[0].Config.Filter.FieldInclude)
require.Equal(t, []string{"foo", "bar", "baz"}, c.Inputs[0].Config.Filter.FieldExclude)
}
func TestConfig_FieldNotDefined(t *testing.T) {
tests := []struct {
name string

View File

@ -0,0 +1,8 @@
[[inputs.file]]
pass = ["foo"]
fieldpass = ["bar"]
fieldinclude = ["baz"]
drop = ["foo"]
fielddrop = ["bar"]
fieldexclude = ["baz"]

View File

@ -2,8 +2,8 @@
servers = ["localhost"]
namepass = ["metricname1"]
namedrop = ["metricname2"]
fieldpass = ["some", "strings"]
fielddrop = ["other", "stuff"]
fieldinclude = ["some", "strings"]
fieldexclude = ["other", "stuff"]
interval = "5s"
[inputs.memcached.tagpass]
goodtag = ["mytag"]

View File

@ -18,8 +18,8 @@
servers = ["$MY_TEST_SERVER"]
namepass = ["metricname1", "ip_${MY_TEST_SERVER}_name"] # this comment will be ignored as well
namedrop = ["metricname2"]
fieldpass = ["some", "strings"]
fielddrop = ["other", "stuff"]
fieldinclude = ["some", "strings"]
fieldexclude = ["other", "stuff"]
interval = "$TEST_INTERVAL"
##### this input is provided to test multiline strings
command = """

View File

@ -4,8 +4,8 @@
namepass_separator = "."
namedrop = ["metricname2"]
namedrop_separator = "."
fieldpass = ["some", "strings"]
fielddrop = ["other", "stuff"]
fieldinclude = ["some", "strings"]
fieldexclude = ["other", "stuff"]
interval = "5s"
[inputs.memcached.tagpass]
goodtag = ["mytag"]

View File

@ -457,7 +457,7 @@ avoid measurement collisions when defining multiple plugins:
percpu = true
totalcpu = false
name_override = "percpu_usage"
fielddrop = ["cpu_time*"]
fieldexclude = ["cpu_time*"]
```
### Output Plugins
@ -582,7 +582,7 @@ the originals.
```toml
[[inputs.system]]
fieldpass = ["load1"] # collects system load1 metric.
fieldinclude = ["load1"] # collects system load1 metric.
[[aggregators.minmax]]
period = "30s" # send & clear the aggregate every 30s.
@ -600,7 +600,7 @@ to the `namepass` parameter.
[[inputs.swap]]
[[inputs.system]]
fieldpass = ["load1"] # collects system load1 metric.
fieldinclude = ["load1"] # collects system load1 metric.
[[aggregators.minmax]]
period = "30s" # send & clear the aggregate every 30s.
@ -688,14 +688,14 @@ removed the metric is removed. Tags and fields are modified before a metric is
passed to a processor, aggregator, or output plugin. When used with an input
plugin the filter applies after the input runs.
- **fieldpass**:
- **fieldinclude**:
An array of [glob pattern][] strings. Only fields whose field key matches a
pattern in this list are emitted.
- **fielddrop**:
The inverse of `fieldpass`. Fields with a field key matching one of the
- **fieldexclude**:
The inverse of `fieldinclude`. Fields with a field key matching one of the
patterns will be discarded from the metric. This is tested on metrics after
they have passed the `fieldpass` test.
they have passed the `fieldinclude` test.
- **taginclude**:
An array of [glob pattern][] strings. Only tags with a tag key matching one of
@ -717,7 +717,7 @@ tags and the agent `host` tag.
[[inputs.cpu]]
percpu = true
totalcpu = false
fielddrop = ["cpu_time"]
fieldexclude = ["cpu_time"]
# Don't collect CPU data for cpu6 & cpu7
[inputs.cpu.tagdrop]
cpu = [ "cpu6", "cpu7" ]
@ -746,18 +746,18 @@ tags and the agent `host` tag.
instance = ["isatap*", "Local*"]
```
#### Using fieldpass and fielddrop
#### Using fieldinclude and fieldexclude
```toml
# Drop all metrics for guest & steal CPU usage
[[inputs.cpu]]
percpu = false
totalcpu = true
fielddrop = ["usage_guest", "usage_steal"]
fieldexclude = ["usage_guest", "usage_steal"]
# Only store inode related metrics for disks
[[inputs.disk]]
fieldpass = ["inodes*"]
fieldinclude = ["inodes*"]
```
#### Using namepass and namedrop

View File

@ -89,5 +89,5 @@ Add filtering to the sample config, leave it commented out.
```toml
[[inputs.system]]
## Uncomment to remove deprecated metrics.
# fielddrop = ["uptime_format"]
# fieldexclude = ["uptime_format"]
```

View File

@ -5,8 +5,10 @@ type FilterOptions struct {
NameDrop []string `toml:"namedrop,omitempty"`
FieldPassOld []string `toml:"pass,omitempty"`
FieldPass []string `toml:"fieldpass,omitempty"`
FieldInclude []string `toml:"fieldinclude,omitempty"`
FieldDropOld []string `toml:"drop,omitempty"`
FieldDrop []string `toml:"fielddrop,omitempty"`
FieldExclude []string `toml:"fieldexclude,omitempty"`
TagPassFilters map[string][]string `toml:"tagpass,omitempty"`
TagDropFilters map[string][]string `toml:"tagdrop,omitempty"`
TagExclude []string `toml:"tagexclude,omitempty"`

View File

@ -17,11 +17,27 @@ type InputOptions struct {
NameDrop []string `toml:"namedrop,omitempty"`
FieldPassOld []string `toml:"pass,omitempty"`
FieldPass []string `toml:"fieldpass,omitempty"`
FieldInclude []string `toml:"fieldinclude,omitempty"`
FieldDropOld []string `toml:"drop,omitempty"`
FieldDrop []string `toml:"fielddrop,omitempty"`
FieldExclude []string `toml:"fieldexclude,omitempty"`
TagPassFilters map[string][]string `toml:"tagpass,omitempty"`
TagDropFilters map[string][]string `toml:"tagdrop,omitempty"`
TagExclude []string `toml:"tagexclude,omitempty"`
TagInclude []string `toml:"taginclude,omitempty"`
MetricPass string `toml:"metricpass,omitempty"`
}
func (io *InputOptions) Migrate() {
io.FieldInclude = append(io.FieldInclude, io.FieldPassOld...)
io.FieldInclude = append(io.FieldInclude, io.FieldPass...)
io.FieldPassOld = nil
io.FieldPass = nil
io.FieldExclude = append(io.FieldExclude, io.FieldDropOld...)
io.FieldExclude = append(io.FieldExclude, io.FieldDrop...)
io.FieldDropOld = nil
io.FieldDrop = nil
}

View File

@ -9,11 +9,27 @@ type OutputOptions struct {
NameDrop []string `toml:"namedrop,omitempty"`
FieldPassOld []string `toml:"pass,omitempty"`
FieldPass []string `toml:"fieldpass,omitempty"`
FieldInclude []string `toml:"fieldinclude,omitempty"`
FieldDropOld []string `toml:"drop,omitempty"`
FieldDrop []string `toml:"fielddrop,omitempty"`
FieldExclude []string `toml:"fieldexclude,omitempty"`
TagPassFilters map[string][]string `toml:"tagpass,omitempty"`
TagDropFilters map[string][]string `toml:"tagdrop,omitempty"`
TagExclude []string `toml:"tagexclude,omitempty"`
TagInclude []string `toml:"taginclude,omitempty"`
MetricPass string `toml:"metricpass,omitempty"`
}
func (oo *OutputOptions) Migrate() {
oo.FieldInclude = append(oo.FieldInclude, oo.FieldPassOld...)
oo.FieldInclude = append(oo.FieldInclude, oo.FieldPass...)
oo.FieldPassOld = nil
oo.FieldPass = nil
oo.FieldExclude = append(oo.FieldExclude, oo.FieldDropOld...)
oo.FieldExclude = append(oo.FieldExclude, oo.FieldDrop...)
oo.FieldDropOld = nil
oo.FieldDrop = nil
}

View File

@ -48,8 +48,8 @@ type jolokiaAgent struct {
NamePass []string `toml:"namepass,omitempty"`
NameDrop []string `toml:"namedrop,omitempty"`
FieldPass []string `toml:"fieldpass,omitempty"`
FieldDrop []string `toml:"fielddrop,omitempty"`
FieldInclude []string `toml:"fieldinclude,omitempty"`
FieldExclude []string `toml:"fieldexclude,omitempty"`
TagPassFilters map[string][]string `toml:"tagpass,omitempty"`
TagDropFilters map[string][]string `toml:"tagdrop,omitempty"`
TagExclude []string `toml:"tagexclude,omitempty"`
@ -191,6 +191,8 @@ func migrate(tbl *ast.Table) ([]byte, string, error) {
}
func (j *jolokiaAgent) fillCommon(o common.InputOptions) {
o.Migrate()
j.Interval = o.Interval
j.Precision = o.Precision
j.CollectionJitter = o.CollectionJitter
@ -212,13 +214,11 @@ func (j *jolokiaAgent) fillCommon(o common.InputOptions) {
if len(o.NameDrop) > 0 {
j.NameDrop = append(j.NameDrop, o.NameDrop...)
}
if len(o.FieldPass) > 0 || len(o.FieldDropOld) > 0 {
j.FieldPass = append(j.FieldPass, o.FieldPass...)
j.FieldPass = append(j.FieldPass, o.FieldPassOld...)
if len(o.FieldInclude) > 0 {
j.FieldInclude = append(j.FieldInclude, o.FieldInclude...)
}
if len(o.FieldDrop) > 0 || len(o.FieldDropOld) > 0 {
j.FieldDrop = append(j.FieldDrop, o.FieldDrop...)
j.FieldDrop = append(j.FieldDrop, o.FieldDropOld...)
if len(o.FieldExclude) > 0 {
j.FieldExclude = append(j.FieldExclude, o.FieldExclude...)
}
if len(o.TagPassFilters) > 0 {
j.TagPassFilters = make(map[string][]string, len(o.TagPassFilters))

View File

@ -40,6 +40,7 @@ func migrate(tbl *ast.Table) ([]byte, string, error) {
// Fill common options
plugin := make(map[string]interface{})
old.InputOptions.Migrate()
general, err := toml.Marshal(old.InputOptions)
if err != nil {
return nil, "", fmt.Errorf("marshalling general options failed: %w", err)

View File

@ -0,0 +1,8 @@
[[inputs.http]]
data_format = "json"
fieldinclude = ["a"]
name_override = "httpjson"
urls = ["http://localhost:9999/stats/"]
[inputs.http.tags]
foo = "bar"

View File

@ -0,0 +1,6 @@
[[inputs.httpjson]]
servers = ["http://localhost:9999/stats/"]
fieldpass = ["a"]
[inputs.httpjson.tags]
foo = "bar"

View File

@ -60,8 +60,8 @@ type jolokiaAgent struct {
NamePass []string `toml:"namepass,omitempty"`
NameDrop []string `toml:"namedrop,omitempty"`
FieldPass []string `toml:"fieldpass,omitempty"`
FieldDrop []string `toml:"fielddrop,omitempty"`
FieldInclude []string `toml:"fieldinclude,omitempty"`
FieldExclude []string `toml:"fieldexclude,omitempty"`
TagPassFilters map[string][]string `toml:"tagpass,omitempty"`
TagDropFilters map[string][]string `toml:"tagdrop,omitempty"`
TagExclude []string `toml:"tagexclude,omitempty"`
@ -91,8 +91,8 @@ type jolokiaProxy struct {
NamePass []string `toml:"namepass,omitempty"`
NameDrop []string `toml:"namedrop,omitempty"`
FieldPass []string `toml:"fieldpass,omitempty"`
FieldDrop []string `toml:"fielddrop,omitempty"`
FieldInclude []string `toml:"fieldinclude,omitempty"`
FieldExclude []string `toml:"fieldexclude,omitempty"`
TagPassFilters map[string][]string `toml:"tagpass,omitempty"`
TagDropFilters map[string][]string `toml:"tagdrop,omitempty"`
TagExclude []string `toml:"tagexclude,omitempty"`
@ -238,6 +238,8 @@ func migrate(tbl *ast.Table) ([]byte, string, error) {
}
func (j *jolokiaAgent) fillCommon(o common.InputOptions) {
o.Migrate()
j.Interval = o.Interval
j.Precision = o.Precision
j.CollectionJitter = o.CollectionJitter
@ -263,13 +265,11 @@ func (j *jolokiaAgent) fillCommon(o common.InputOptions) {
if len(o.NameDrop) > 0 {
j.NameDrop = append(j.NameDrop, o.NameDrop...)
}
if len(o.FieldPass) > 0 || len(o.FieldDropOld) > 0 {
j.FieldPass = append(j.FieldPass, o.FieldPass...)
j.FieldPass = append(j.FieldPass, o.FieldPassOld...)
if len(o.FieldInclude) > 0 {
j.FieldInclude = append(j.FieldInclude, o.FieldInclude...)
}
if len(o.FieldDrop) > 0 || len(o.FieldDropOld) > 0 {
j.FieldDrop = append(j.FieldDrop, o.FieldDrop...)
j.FieldDrop = append(j.FieldDrop, o.FieldDropOld...)
if len(o.FieldExclude) > 0 {
j.FieldExclude = append(j.FieldExclude, o.FieldExclude...)
}
if len(o.TagPassFilters) > 0 {
j.TagPassFilters = make(map[string][]string, len(o.TagPassFilters))
@ -293,6 +293,8 @@ func (j *jolokiaAgent) fillCommon(o common.InputOptions) {
}
func (j *jolokiaProxy) fillCommon(o common.InputOptions) {
o.Migrate()
j.Interval = o.Interval
j.Precision = o.Precision
j.CollectionJitter = o.CollectionJitter
@ -318,13 +320,11 @@ func (j *jolokiaProxy) fillCommon(o common.InputOptions) {
if len(o.NameDrop) > 0 {
j.NameDrop = append(j.NameDrop, o.NameDrop...)
}
if len(o.FieldPass) > 0 || len(o.FieldDropOld) > 0 {
j.FieldPass = append(j.FieldPass, o.FieldPass...)
j.FieldPass = append(j.FieldPass, o.FieldPassOld...)
if len(o.FieldInclude) > 0 {
j.FieldInclude = append(j.FieldInclude, o.FieldInclude...)
}
if len(o.FieldDrop) > 0 || len(o.FieldDropOld) > 0 {
j.FieldDrop = append(j.FieldDrop, o.FieldDrop...)
j.FieldDrop = append(j.FieldDrop, o.FieldDropOld...)
if len(o.FieldExclude) > 0 {
j.FieldExclude = append(j.FieldExclude, o.FieldExclude...)
}
if len(o.TagPassFilters) > 0 {
j.TagPassFilters = make(map[string][]string, len(o.TagPassFilters))

View File

@ -25,8 +25,8 @@ type riemann struct {
Alias string `toml:"alias,omitempty"`
NamePass []string `toml:"namepass,omitempty"`
NameDrop []string `toml:"namedrop,omitempty"`
FieldPass []string `toml:"fieldpass,omitempty"`
FieldDrop []string `toml:"fielddrop,omitempty"`
FieldInclude []string `toml:"fieldinclude,omitempty"`
FieldExclude []string `toml:"fieldexclude,omitempty"`
TagPassFilters map[string][]string `toml:"tagpass,omitempty"`
TagDropFilters map[string][]string `toml:"tagdrop,omitempty"`
TagExclude []string `toml:"tagexclude,omitempty"`
@ -63,6 +63,8 @@ func migrate(tbl *ast.Table) ([]byte, string, error) {
}
func (j *riemann) fillCommon(o common.OutputOptions) {
o.Migrate()
j.Alias = o.Alias
if len(o.NamePass) > 0 {
@ -71,13 +73,11 @@ func (j *riemann) fillCommon(o common.OutputOptions) {
if len(o.NameDrop) > 0 {
j.NameDrop = append(j.NameDrop, o.NameDrop...)
}
if len(o.FieldPass) > 0 || len(o.FieldDropOld) > 0 {
j.FieldPass = append(j.FieldPass, o.FieldPass...)
j.FieldPass = append(j.FieldPass, o.FieldPassOld...)
if len(o.FieldInclude) > 0 {
j.FieldInclude = append(j.FieldInclude, o.FieldInclude...)
}
if len(o.FieldDrop) > 0 || len(o.FieldDropOld) > 0 {
j.FieldDrop = append(j.FieldDrop, o.FieldDrop...)
j.FieldDrop = append(j.FieldDrop, o.FieldDropOld...)
if len(o.FieldExclude) > 0 {
j.FieldExclude = append(j.FieldExclude, o.FieldExclude...)
}
if len(o.TagPassFilters) > 0 {
j.TagPassFilters = make(map[string][]string, len(o.TagPassFilters))

View File

@ -2,4 +2,4 @@
url = "udp://localhost:5555"
separator = "."
namepass = ["foo"]
fieldpass = ["motor_*"]
fieldinclude = ["motor_*"]

View File

@ -31,7 +31,7 @@ func (tf *TagFilter) Compile() error {
return nil
}
// Filter containing drop/pass and tagdrop/tagpass rules
// Filter containing drop/pass and include/exclude rules
type Filter struct {
NameDrop []string
NameDropSeparators string
@ -40,10 +40,10 @@ type Filter struct {
NamePassSeparators string
namePassFilter filter.Filter
FieldDrop []string
fieldDropFilter filter.Filter
FieldPass []string
fieldPassFilter filter.Filter
FieldExclude []string
fieldExcludeFilter filter.Filter
FieldInclude []string
fieldIncludeFilter filter.Filter
TagDropFilters []TagFilter
TagPassFilters []TagFilter
@ -69,7 +69,7 @@ func (f *Filter) Compile() error {
f.selectActive = f.selectActive || len(f.TagPassFilters) > 0 || len(f.TagDropFilters) > 0
f.selectActive = f.selectActive || f.MetricPass != ""
f.modifyActive = len(f.FieldPass) > 0 || len(f.FieldDrop) > 0
f.modifyActive = len(f.FieldInclude) > 0 || len(f.FieldExclude) > 0
f.modifyActive = f.modifyActive || len(f.TagInclude) > 0 || len(f.TagExclude) > 0
f.isActive = f.selectActive || f.modifyActive
@ -103,13 +103,13 @@ func (f *Filter) Compile() error {
if f.modifyActive {
var err error
f.fieldDropFilter, err = filter.Compile(f.FieldDrop)
f.fieldExcludeFilter, err = filter.Compile(f.FieldExclude)
if err != nil {
return fmt.Errorf("error compiling 'fielddrop', %w", err)
return fmt.Errorf("error compiling 'fieldexclude', %w", err)
}
f.fieldPassFilter, err = filter.Compile(f.FieldPass)
f.fieldIncludeFilter, err = filter.Compile(f.FieldInclude)
if err != nil {
return fmt.Errorf("error compiling 'fieldpass', %w", err)
return fmt.Errorf("error compiling 'fieldinclude', %w", err)
}
f.tagExcludeFilter, err = filter.Compile(f.TagExclude)
@ -161,9 +161,9 @@ func (f *Filter) Select(metric telegraf.Metric) (bool, error) {
}
// Modify removes any tags and fields from the metric according to the
// fieldpass/fielddrop and taginclude/tagexclude filters.
// fieldinclude/fieldexclude and taginclude/tagexclude filters.
func (f *Filter) Modify(metric telegraf.Metric) {
if !f.isActive {
if !f.modifyActive {
return
}
@ -198,23 +198,17 @@ func (f *Filter) shouldNamePass(key string) bool {
return true
}
// shouldFieldPass returns true if the metric should pass, false if it should drop
// based on the drop/pass filter parameters
func (f *Filter) shouldFieldPass(key string) bool {
return ShouldPassFilters(f.fieldPassFilter, f.fieldDropFilter, key)
}
// shouldTagsPass returns true if the metric should pass, false if it should drop
// based on the tagdrop/tagpass filter parameters
func (f *Filter) shouldTagsPass(tags []*telegraf.Tag) bool {
return ShouldTagsPass(f.TagPassFilters, f.TagDropFilters, tags)
}
// filterFields removes fields according to fieldpass/fielddrop.
// filterFields removes fields according to fieldinclude/fieldexclude.
func (f *Filter) filterFields(metric telegraf.Metric) {
filterKeys := []string{}
for _, field := range metric.FieldList() {
if !f.shouldFieldPass(field.Key) {
if !ShouldPassFilters(f.fieldIncludeFilter, f.fieldExcludeFilter, field.Key) {
filterKeys = append(filterKeys, field.Key)
}
}
@ -227,24 +221,12 @@ func (f *Filter) filterFields(metric telegraf.Metric) {
// filterTags removes tags according to taginclude/tagexclude.
func (f *Filter) filterTags(metric telegraf.Metric) {
filterKeys := []string{}
if f.tagIncludeFilter != nil {
for _, tag := range metric.TagList() {
if !f.tagIncludeFilter.Match(tag.Key) {
filterKeys = append(filterKeys, tag.Key)
}
for _, tag := range metric.TagList() {
if !ShouldPassFilters(f.tagIncludeFilter, f.tagExcludeFilter, tag.Key) {
filterKeys = append(filterKeys, tag.Key)
}
}
for _, key := range filterKeys {
metric.RemoveTag(key)
}
if f.tagExcludeFilter != nil {
for _, tag := range metric.TagList() {
if f.tagExcludeFilter.Match(tag.Key) {
filterKeys = append(filterKeys, tag.Key)
}
}
}
for _, key := range filterKeys {
metric.RemoveTag(key)
}

View File

@ -50,7 +50,7 @@ func TestFilter_ApplyTagsDontPass(t *testing.T) {
func TestFilter_ApplyDeleteFields(t *testing.T) {
f := Filter{
FieldDrop: []string{"value"},
FieldExclude: []string{"value"},
}
require.NoError(t, f.Compile())
require.NoError(t, f.Compile())
@ -72,7 +72,7 @@ func TestFilter_ApplyDeleteFields(t *testing.T) {
func TestFilter_ApplyDeleteAllFields(t *testing.T) {
f := Filter{
FieldDrop: []string{"value*"},
FieldExclude: []string{"value*"},
}
require.NoError(t, f.Compile())
require.NoError(t, f.Compile())
@ -106,7 +106,7 @@ func TestFilter_Empty(t *testing.T) {
}
for _, measurement := range measurements {
if !f.shouldFieldPass(measurement) {
if !f.shouldNamePass(measurement) {
t.Errorf("Expected measurement %s to pass", measurement)
}
}
@ -252,9 +252,9 @@ func TestFilter_NameDrop_WithSeparator(t *testing.T) {
}
}
func TestFilter_FieldPass(t *testing.T) {
func TestFilter_FieldInclude(t *testing.T) {
f := Filter{
FieldPass: []string{"foo*", "cpu_usage_idle"},
FieldInclude: []string{"foo*", "cpu_usage_idle"},
}
require.NoError(t, f.Compile())
@ -273,22 +273,18 @@ func TestFilter_FieldPass(t *testing.T) {
"cpu_usage_busy",
}
for _, measurement := range passes {
if !f.shouldFieldPass(measurement) {
t.Errorf("Expected measurement %s to pass", measurement)
}
for _, field := range passes {
require.Truef(t, ShouldPassFilters(f.fieldIncludeFilter, f.fieldExcludeFilter, field), "Expected field %s to pass", field)
}
for _, measurement := range drops {
if f.shouldFieldPass(measurement) {
t.Errorf("Expected measurement %s to drop", measurement)
}
for _, field := range drops {
require.Falsef(t, ShouldPassFilters(f.fieldIncludeFilter, f.fieldExcludeFilter, field), "Expected field %s to drop", field)
}
}
func TestFilter_FieldDrop(t *testing.T) {
func TestFilter_FieldExclude(t *testing.T) {
f := Filter{
FieldDrop: []string{"foo*", "cpu_usage_idle"},
FieldExclude: []string{"foo*", "cpu_usage_idle"},
}
require.NoError(t, f.Compile())
@ -307,16 +303,12 @@ func TestFilter_FieldDrop(t *testing.T) {
"cpu_usage_busy",
}
for _, measurement := range passes {
if !f.shouldFieldPass(measurement) {
t.Errorf("Expected measurement %s to pass", measurement)
}
for _, field := range passes {
require.Truef(t, ShouldPassFilters(f.fieldIncludeFilter, f.fieldExcludeFilter, field), "Expected field %s to pass", field)
}
for _, measurement := range drops {
if f.shouldFieldPass(measurement) {
t.Errorf("Expected measurement %s to drop", measurement)
}
for _, field := range drops {
require.Falsef(t, ShouldPassFilters(f.fieldIncludeFilter, f.fieldExcludeFilter, field), "Expected field %s to drop", field)
}
}
@ -491,22 +483,22 @@ func TestFilter_FilterNamePassAndDrop(t *testing.T) {
}
}
// TestFilter_FilterFieldPassAndDrop used for check case when
// TestFilter_FieldIncludeAndExclude used for check case when
// both parameters were defined
// see: https://github.com/influxdata/telegraf/issues/2860
func TestFilter_FilterFieldPassAndDrop(t *testing.T) {
func TestFilter_FieldIncludeAndExclude(t *testing.T) {
inputData := []string{"field1", "field2", "field3", "field4"}
expectedResult := []bool{false, true, false, false}
f := Filter{
FieldPass: []string{"field1", "field2"},
FieldDrop: []string{"field1", "field3"},
FieldInclude: []string{"field1", "field2"},
FieldExclude: []string{"field1", "field3"},
}
require.NoError(t, f.Compile())
for i, field := range inputData {
require.Equal(t, f.shouldFieldPass(field), expectedResult[i])
require.Equal(t, ShouldPassFilters(f.fieldIncludeFilter, f.fieldExcludeFilter, field), expectedResult[i])
}
}

View File

@ -217,7 +217,7 @@ func TestAddDoesNotModifyMetric(t *testing.T) {
ra := NewRunningAggregator(&TestAggregator{}, &AggregatorConfig{
Name: "TestRunningAggregator",
Filter: Filter{
FieldPass: []string{"a"},
FieldInclude: []string{"a"},
},
DropOriginal: true,
})

View File

@ -3,7 +3,7 @@
name_prefix = "kafka_"
## If you intend to use "non_negative_derivative(1s)" with "*.count" fields, you don't need precalculated fields.
# fielddrop = [
# fieldexclude = [
# "*.EventType",
# "*.FifteenMinuteRate",
# "*.FiveMinuteRate",

View File

@ -108,7 +108,7 @@ See the [CONFIGURATION.md][CONFIGURATION.md] for more details.
# insecure_skip_verify = false
## Uncomment to remove deprecated metrics.
# fielddrop = ["terminated_reason"]
# fieldexclude = ["terminated_reason"]
```
## Kubernetes Permissions

View File

@ -60,4 +60,4 @@
# insecure_skip_verify = false
## Uncomment to remove deprecated metrics.
# fielddrop = ["terminated_reason"]
# fieldexclude = ["terminated_reason"]

View File

@ -39,7 +39,7 @@ See the [CONFIGURATION.md][CONFIGURATION.md] for more details.
# expect = "ssh"
## Uncomment to remove deprecated fields; recommended for new deploys
# fielddrop = ["result_type", "string_found"]
# fieldexclude = ["result_type", "string_found"]
```
## Metrics

View File

@ -22,4 +22,4 @@
# expect = "ssh"
## Uncomment to remove deprecated fields; recommended for new deploys
# fielddrop = ["result_type", "string_found"]
# fieldexclude = ["result_type", "string_found"]

View File

@ -154,8 +154,8 @@ plugin will deterministically dropped any dimensions that exceed the 10
dimension limit.
To convert only a subset of string-typed fields as dimensions, enable
`strings_as_dimensions` and use the [`fieldpass` or `fielddrop`
processors][conf-processor] to limit the string-typed fields that are sent to
`strings_as_dimensions` and use the [`fieldinclude` or `fieldexclude`
modifiers][conf-modifiers] to limit the string-typed fields that are sent to
the plugin.
[conf-processor]: https://docs.influxdata.com/telegraf/v1.7/administration/configuration/#processor-configuration
[conf-modifiers]: ../../../docs/CONFIGURATION.md#modifiers

View File

@ -3,7 +3,7 @@
#[[inputs.diskio]]
# alias = "diskio1s"
# interval = "1s"
# fieldpass = ["reads", "writes"]
# fieldinclude = ["reads", "writes"]
# name_suffix = "1s"
#
# Example Input: