diff --git a/config/config.go b/config/config.go index 4fd65139e..1071ffb45 100644 --- a/config/config.go +++ b/config/config.go @@ -2,15 +2,14 @@ package config import ( "bytes" - "errors" "fmt" "io/ioutil" "log" - "math" "net/http" "net/url" "os" "path/filepath" + "reflect" "regexp" "runtime" "sort" @@ -56,6 +55,10 @@ var ( // will be logging to, as well as all the plugins that the user has // specified type Config struct { + toml *toml.Config + errs []error // config load errors. + UnusedFields map[string]bool + Tags map[string]string InputFilters []string OutputFilters []string @@ -69,8 +72,13 @@ type Config struct { AggProcessors models.RunningProcessors } +// NewConfig creates a new struct to hold the Telegraf config. +// For historical reasons, It holds the actual instances of the running plugins +// once the configuration is parsed. func NewConfig() *Config { c := &Config{ + UnusedFields: map[string]bool{}, + // Agent defaults: Agent: &AgentConfig{ Interval: internal.Duration{Duration: 10 * time.Second}, @@ -88,9 +96,18 @@ func NewConfig() *Config { InputFilters: make([]string, 0), OutputFilters: make([]string, 0), } + + tomlCfg := &toml.Config{ + NormFieldName: toml.DefaultConfig.NormFieldName, + FieldToKey: toml.DefaultConfig.FieldToKey, + MissingField: c.missingTomlField, + } + c.toml = tomlCfg + return c } +// AgentConfig defines configuration that will be used by the Telegraf agent type AgentConfig struct { // Interval at which to gather information Interval internal.Duration @@ -623,7 +640,7 @@ func PrintInputConfig(name string) error { if creator, ok := inputs.Inputs[name]; ok { printConfig(name, creator(), "inputs", false) } else { - return errors.New(fmt.Sprintf("Input %s not found", name)) + return fmt.Errorf("Input %s not found", name) } return nil } @@ -633,11 +650,12 @@ func PrintOutputConfig(name string) error { if creator, ok := outputs.Outputs[name]; ok { printConfig(name, creator(), "outputs", false) } else { - return errors.New(fmt.Sprintf("Output %s not found", name)) + return fmt.Errorf("Output %s not found", name) } return nil } +// LoadDirectory loads all toml config files found in the specified path, recursively. func (c *Config) LoadDirectory(path string) error { walkfn := func(thispath string, info os.FileInfo, _ error) error { if info == nil { @@ -727,8 +745,8 @@ func (c *Config) LoadConfigData(data []byte) error { if !ok { return fmt.Errorf("invalid configuration, bad table name %q", tableName) } - if err = toml.UnmarshalTable(subTable, c.Tags); err != nil { - return fmt.Errorf("error parsing table name %q: %w", tableName, err) + if err = c.toml.UnmarshalTable(subTable, c.Tags); err != nil { + return fmt.Errorf("error parsing table name %q: %s", tableName, err) } } } @@ -739,8 +757,8 @@ func (c *Config) LoadConfigData(data []byte) error { if !ok { return fmt.Errorf("invalid configuration, error parsing agent table") } - if err = toml.UnmarshalTable(subTable, c.Agent); err != nil { - return fmt.Errorf("error parsing agent table: %w", err) + if err = c.toml.UnmarshalTable(subTable, c.Agent); err != nil { + return fmt.Errorf("error parsing [agent]: %w", err) } } @@ -757,6 +775,10 @@ func (c *Config) LoadConfigData(data []byte) error { c.Tags["host"] = c.Agent.Hostname } + if len(c.UnusedFields) > 0 { + return fmt.Errorf("line %d: configuration specified the fields %q, but they weren't used", tbl.Line, keys(c.UnusedFields)) + } + // Parse all the rest of the plugins: for name, val := range tbl.Fields { subTable, ok := val.(*ast.Table) @@ -772,18 +794,21 @@ func (c *Config) LoadConfigData(data []byte) error { // legacy [outputs.influxdb] support case *ast.Table: if err = c.addOutput(pluginName, pluginSubTable); err != nil { - return fmt.Errorf("Error parsing %s, %s", pluginName, err) + return fmt.Errorf("error parsing %s, %w", pluginName, err) } case []*ast.Table: for _, t := range pluginSubTable { if err = c.addOutput(pluginName, t); err != nil { - return fmt.Errorf("Error parsing %s array, %s", pluginName, err) + return fmt.Errorf("error parsing %s array, %w", pluginName, err) } } default: - return fmt.Errorf("Unsupported config format: %s", + return fmt.Errorf("unsupported config format: %s", pluginName) } + if len(c.UnusedFields) > 0 { + return fmt.Errorf("plugin %s.%s: line %d: configuration specified the fields %q, but they weren't used", name, pluginName, subTable.Line, keys(c.UnusedFields)) + } } case "inputs", "plugins": for pluginName, pluginVal := range subTable.Fields { @@ -791,18 +816,21 @@ func (c *Config) LoadConfigData(data []byte) error { // legacy [inputs.cpu] support case *ast.Table: if err = c.addInput(pluginName, pluginSubTable); err != nil { - return fmt.Errorf("Error parsing %s, %s", pluginName, err) + return fmt.Errorf("error parsing %s, %w", pluginName, err) } case []*ast.Table: for _, t := range pluginSubTable { if err = c.addInput(pluginName, t); err != nil { - return fmt.Errorf("Error parsing %s, %s", pluginName, err) + return fmt.Errorf("error parsing %s, %w", pluginName, err) } } default: return fmt.Errorf("Unsupported config format: %s", pluginName) } + if len(c.UnusedFields) > 0 { + return fmt.Errorf("plugin %s.%s: line %d: configuration specified the fields %q, but they weren't used", name, pluginName, subTable.Line, keys(c.UnusedFields)) + } } case "processors": for pluginName, pluginVal := range subTable.Fields { @@ -810,13 +838,16 @@ func (c *Config) LoadConfigData(data []byte) error { case []*ast.Table: for _, t := range pluginSubTable { if err = c.addProcessor(pluginName, t); err != nil { - return fmt.Errorf("Error parsing %s, %s", pluginName, err) + return fmt.Errorf("error parsing %s, %w", pluginName, err) } } default: return fmt.Errorf("Unsupported config format: %s", pluginName) } + if len(c.UnusedFields) > 0 { + return fmt.Errorf("plugin %s.%s: line %d: configuration specified the fields %q, but they weren't used", name, pluginName, subTable.Line, keys(c.UnusedFields)) + } } case "aggregators": for pluginName, pluginVal := range subTable.Fields { @@ -831,6 +862,9 @@ func (c *Config) LoadConfigData(data []byte) error { return fmt.Errorf("Unsupported config format: %s", pluginName) } + if len(c.UnusedFields) > 0 { + return fmt.Errorf("plugin %s.%s: line %d: configuration specified the fields %q, but they weren't used", name, pluginName, subTable.Line, keys(c.UnusedFields)) + } } // Assume it's an input input for legacy config file support if no other // identifiers are present @@ -912,19 +946,19 @@ func parseConfig(contents []byte) (*ast.Table, error) { continue } - var env_var []byte + var envVar []byte if parameter[1] != nil { - env_var = parameter[1] + envVar = parameter[1] } else if parameter[2] != nil { - env_var = parameter[2] + envVar = parameter[2] } else { continue } - env_val, ok := os.LookupEnv(strings.TrimPrefix(string(env_var), "$")) + envVal, ok := os.LookupEnv(strings.TrimPrefix(string(envVar), "$")) if ok { - env_val = escapeEnv(env_val) - contents = bytes.Replace(contents, parameter[0], []byte(env_val), 1) + envVal = escapeEnv(envVal) + contents = bytes.Replace(contents, parameter[0], []byte(envVal), 1) } } @@ -938,12 +972,12 @@ func (c *Config) addAggregator(name string, table *ast.Table) error { } aggregator := creator() - conf, err := buildAggregator(name, table) + conf, err := c.buildAggregator(name, table) if err != nil { return err } - if err := toml.UnmarshalTable(table, aggregator); err != nil { + if err := c.toml.UnmarshalTable(table, aggregator); err != nil { return err } @@ -957,7 +991,7 @@ func (c *Config) addProcessor(name string, table *ast.Table) error { return fmt.Errorf("Undefined but requested processor: %s", name) } - processorConfig, err := buildProcessor(name, table) + processorConfig, err := c.buildProcessor(name, table) if err != nil { return err } @@ -987,11 +1021,11 @@ func (c *Config) newRunningProcessor( processor := creator() if p, ok := processor.(unwrappable); ok { - if err := toml.UnmarshalTable(table, p.Unwrap()); err != nil { + if err := c.toml.UnmarshalTable(table, p.Unwrap()); err != nil { return nil, err } } else { - if err := toml.UnmarshalTable(table, processor); err != nil { + if err := c.toml.UnmarshalTable(table, processor); err != nil { return nil, err } } @@ -1014,19 +1048,19 @@ func (c *Config) addOutput(name string, table *ast.Table) error { // arbitrary types of output, so build the serializer and set it. switch t := output.(type) { case serializers.SerializerOutput: - serializer, err := buildSerializer(name, table) + serializer, err := c.buildSerializer(name, table) if err != nil { return err } t.SetSerializer(serializer) } - outputConfig, err := buildOutput(name, table) + outputConfig, err := c.buildOutput(name, table) if err != nil { return err } - if err := toml.UnmarshalTable(table, output); err != nil { + if err := c.toml.UnmarshalTable(table, output); err != nil { return err } @@ -1054,7 +1088,7 @@ func (c *Config) addInput(name string, table *ast.Table) error { // If the input has a SetParser function, then this means it can accept // arbitrary types of input, so build the parser and set it. if t, ok := input.(parsers.ParserInput); ok { - parser, err := buildParser(name, table) + parser, err := c.buildParser(name, table) if err != nil { return err } @@ -1062,7 +1096,7 @@ func (c *Config) addInput(name string, table *ast.Table) error { } if t, ok := input.(parsers.ParserFuncInput); ok { - config, err := getParserConfig(name, table) + config, err := c.getParserConfig(name, table) if err != nil { return err } @@ -1071,12 +1105,12 @@ func (c *Config) addInput(name string, table *ast.Table) error { }) } - pluginConfig, err := buildInput(name, table) + pluginConfig, err := c.buildInput(name, table) if err != nil { return err } - if err := toml.UnmarshalTable(table, input); err != nil { + if err := c.toml.UnmarshalTable(table, input); err != nil { return err } @@ -1089,7 +1123,7 @@ func (c *Config) addInput(name string, table *ast.Table) error { // buildAggregator parses Aggregator specific items from the ast.Table, // builds the filter and returns a // models.AggregatorConfig to be inserted into models.RunningAggregator -func buildAggregator(name string, tbl *ast.Table) (*models.AggregatorConfig, error) { +func (c *Config) buildAggregator(name string, tbl *ast.Table) (*models.AggregatorConfig, error) { conf := &models.AggregatorConfig{ Name: name, Delay: time.Millisecond * 100, @@ -1097,79 +1131,30 @@ func buildAggregator(name string, tbl *ast.Table) (*models.AggregatorConfig, err Grace: time.Second * 0, } - if err := getConfigDuration(tbl, "period", &conf.Period); err != nil { - return nil, err - } - - if err := getConfigDuration(tbl, "delay", &conf.Delay); err != nil { - return nil, err - } - - if err := getConfigDuration(tbl, "grace", &conf.Grace); err != nil { - return nil, err - } - - if node, ok := tbl.Fields["drop_original"]; ok { - if kv, ok := node.(*ast.KeyValue); ok { - if b, ok := kv.Value.(*ast.Boolean); ok { - var err error - conf.DropOriginal, err = strconv.ParseBool(b.Value) - if err != nil { - return nil, fmt.Errorf("error parsing boolean value for %s: %s", name, err) - } - } - } - } - - if node, ok := tbl.Fields["name_prefix"]; ok { - if kv, ok := node.(*ast.KeyValue); ok { - if str, ok := kv.Value.(*ast.String); ok { - conf.MeasurementPrefix = str.Value - } - } - } - - if node, ok := tbl.Fields["name_suffix"]; ok { - if kv, ok := node.(*ast.KeyValue); ok { - if str, ok := kv.Value.(*ast.String); ok { - conf.MeasurementSuffix = str.Value - } - } - } - - if node, ok := tbl.Fields["name_override"]; ok { - if kv, ok := node.(*ast.KeyValue); ok { - if str, ok := kv.Value.(*ast.String); ok { - conf.NameOverride = str.Value - } - } - } - - if node, ok := tbl.Fields["alias"]; ok { - if kv, ok := node.(*ast.KeyValue); ok { - if str, ok := kv.Value.(*ast.String); ok { - conf.Alias = str.Value - } - } - } + c.getFieldDuration(tbl, "period", &conf.Period) + c.getFieldDuration(tbl, "delay", &conf.Delay) + c.getFieldDuration(tbl, "grace", &conf.Grace) + c.getFieldBool(tbl, "drop_original", &conf.DropOriginal) + c.getFieldString(tbl, "name_prefix", &conf.MeasurementPrefix) + c.getFieldString(tbl, "name_suffix", &conf.MeasurementSuffix) + c.getFieldString(tbl, "name_override", &conf.NameOverride) + c.getFieldString(tbl, "alias", &conf.Alias) conf.Tags = make(map[string]string) if node, ok := tbl.Fields["tags"]; ok { if subtbl, ok := node.(*ast.Table); ok { - if err := toml.UnmarshalTable(subtbl, conf.Tags); err != nil { + if err := c.toml.UnmarshalTable(subtbl, conf.Tags); err != nil { return nil, fmt.Errorf("could not parse tags for input %s", name) } } } - delete(tbl.Fields, "drop_original") - delete(tbl.Fields, "name_prefix") - delete(tbl.Fields, "name_suffix") - delete(tbl.Fields, "name_override") - delete(tbl.Fields, "alias") - delete(tbl.Fields, "tags") + if c.hasErrs() { + return nil, c.firstErr() + } + var err error - conf.Filter, err = buildFilter(tbl) + conf.Filter, err = c.buildFilter(tbl) if err != nil { return conf, err } @@ -1179,33 +1164,18 @@ func buildAggregator(name string, tbl *ast.Table) (*models.AggregatorConfig, err // buildProcessor parses Processor specific items from the ast.Table, // builds the filter and returns a // models.ProcessorConfig to be inserted into models.RunningProcessor -func buildProcessor(name string, tbl *ast.Table) (*models.ProcessorConfig, error) { +func (c *Config) buildProcessor(name string, tbl *ast.Table) (*models.ProcessorConfig, error) { conf := &models.ProcessorConfig{Name: name} - if node, ok := tbl.Fields["order"]; ok { - if kv, ok := node.(*ast.KeyValue); ok { - if b, ok := kv.Value.(*ast.Integer); ok { - var err error - conf.Order, err = strconv.ParseInt(b.Value, 10, 64) - if err != nil { - return nil, fmt.Errorf("error parsing int value for %s: %s", name, err) - } - } - } + c.getFieldInt64(tbl, "order", &conf.Order) + c.getFieldString(tbl, "alias", &conf.Alias) + + if c.hasErrs() { + return nil, c.firstErr() } - if node, ok := tbl.Fields["alias"]; ok { - if kv, ok := node.(*ast.KeyValue); ok { - if str, ok := kv.Value.(*ast.String); ok { - conf.Alias = str.Value - } - } - } - - delete(tbl.Fields, "alias") - delete(tbl.Fields, "order") var err error - conf.Filter, err = buildFilter(tbl) + conf.Filter, err = c.buildFilter(tbl) if err != nil { return conf, err } @@ -1216,205 +1186,63 @@ func buildProcessor(name string, tbl *ast.Table) (*models.ProcessorConfig, error // (tagpass/tagdrop/namepass/namedrop/fieldpass/fielddrop) to // be inserted into the models.OutputConfig/models.InputConfig // to be used for glob filtering on tags and measurements -func buildFilter(tbl *ast.Table) (models.Filter, error) { +func (c *Config) buildFilter(tbl *ast.Table) (models.Filter, error) { f := models.Filter{} - if node, ok := tbl.Fields["namepass"]; ok { - if kv, ok := node.(*ast.KeyValue); ok { - if ary, ok := kv.Value.(*ast.Array); ok { - for _, elem := range ary.Value { - if str, ok := elem.(*ast.String); ok { - f.NamePass = append(f.NamePass, str.Value) - } - } - } - } + c.getFieldStringSlice(tbl, "namepass", &f.NamePass) + c.getFieldStringSlice(tbl, "namedrop", &f.NameDrop) + + c.getFieldStringSlice(tbl, "pass", &f.FieldPass) + c.getFieldStringSlice(tbl, "fieldpass", &f.FieldPass) + + c.getFieldStringSlice(tbl, "drop", &f.FieldDrop) + c.getFieldStringSlice(tbl, "fielddrop", &f.FieldDrop) + + c.getFieldTagFilter(tbl, "tagpass", &f.TagPass) + c.getFieldTagFilter(tbl, "tagdrop", &f.TagDrop) + + c.getFieldStringSlice(tbl, "tagexclude", &f.TagExclude) + c.getFieldStringSlice(tbl, "taginclude", &f.TagInclude) + + if c.hasErrs() { + return f, c.firstErr() } - if node, ok := tbl.Fields["namedrop"]; ok { - if kv, ok := node.(*ast.KeyValue); ok { - if ary, ok := kv.Value.(*ast.Array); ok { - for _, elem := range ary.Value { - if str, ok := elem.(*ast.String); ok { - f.NameDrop = append(f.NameDrop, str.Value) - } - } - } - } - } - - fields := []string{"pass", "fieldpass"} - for _, field := range fields { - if node, ok := tbl.Fields[field]; ok { - if kv, ok := node.(*ast.KeyValue); ok { - if ary, ok := kv.Value.(*ast.Array); ok { - for _, elem := range ary.Value { - if str, ok := elem.(*ast.String); ok { - f.FieldPass = append(f.FieldPass, str.Value) - } - } - } - } - } - } - - fields = []string{"drop", "fielddrop"} - for _, field := range fields { - if node, ok := tbl.Fields[field]; ok { - if kv, ok := node.(*ast.KeyValue); ok { - if ary, ok := kv.Value.(*ast.Array); ok { - for _, elem := range ary.Value { - if str, ok := elem.(*ast.String); ok { - f.FieldDrop = append(f.FieldDrop, str.Value) - } - } - } - } - } - } - - if node, ok := tbl.Fields["tagpass"]; ok { - if subtbl, ok := node.(*ast.Table); ok { - for name, val := range subtbl.Fields { - if kv, ok := val.(*ast.KeyValue); ok { - tagfilter := &models.TagFilter{Name: name} - if ary, ok := kv.Value.(*ast.Array); ok { - for _, elem := range ary.Value { - if str, ok := elem.(*ast.String); ok { - tagfilter.Filter = append(tagfilter.Filter, str.Value) - } - } - } - f.TagPass = append(f.TagPass, *tagfilter) - } - } - } - } - - if node, ok := tbl.Fields["tagdrop"]; ok { - if subtbl, ok := node.(*ast.Table); ok { - for name, val := range subtbl.Fields { - if kv, ok := val.(*ast.KeyValue); ok { - tagfilter := &models.TagFilter{Name: name} - if ary, ok := kv.Value.(*ast.Array); ok { - for _, elem := range ary.Value { - if str, ok := elem.(*ast.String); ok { - tagfilter.Filter = append(tagfilter.Filter, str.Value) - } - } - } - f.TagDrop = append(f.TagDrop, *tagfilter) - } - } - } - } - - if node, ok := tbl.Fields["tagexclude"]; ok { - if kv, ok := node.(*ast.KeyValue); ok { - if ary, ok := kv.Value.(*ast.Array); ok { - for _, elem := range ary.Value { - if str, ok := elem.(*ast.String); ok { - f.TagExclude = append(f.TagExclude, str.Value) - } - } - } - } - } - - if node, ok := tbl.Fields["taginclude"]; ok { - if kv, ok := node.(*ast.KeyValue); ok { - if ary, ok := kv.Value.(*ast.Array); ok { - for _, elem := range ary.Value { - if str, ok := elem.(*ast.String); ok { - f.TagInclude = append(f.TagInclude, str.Value) - } - } - } - } - } if err := f.Compile(); err != nil { return f, err } - delete(tbl.Fields, "namedrop") - delete(tbl.Fields, "namepass") - delete(tbl.Fields, "fielddrop") - delete(tbl.Fields, "fieldpass") - delete(tbl.Fields, "drop") - delete(tbl.Fields, "pass") - delete(tbl.Fields, "tagdrop") - delete(tbl.Fields, "tagpass") - delete(tbl.Fields, "tagexclude") - delete(tbl.Fields, "taginclude") return f, nil } // buildInput parses input specific items from the ast.Table, // builds the filter and returns a // models.InputConfig to be inserted into models.RunningInput -func buildInput(name string, tbl *ast.Table) (*models.InputConfig, error) { +func (c *Config) buildInput(name string, tbl *ast.Table) (*models.InputConfig, error) { cp := &models.InputConfig{Name: name} - - if err := getConfigDuration(tbl, "interval", &cp.Interval); err != nil { - return nil, err - } - - if err := getConfigDuration(tbl, "precision", &cp.Precision); err != nil { - return nil, err - } - - if err := getConfigDuration(tbl, "collection_jitter", &cp.CollectionJitter); err != nil { - return nil, err - } - - if node, ok := tbl.Fields["name_prefix"]; ok { - if kv, ok := node.(*ast.KeyValue); ok { - if str, ok := kv.Value.(*ast.String); ok { - cp.MeasurementPrefix = str.Value - } - } - } - - if node, ok := tbl.Fields["name_suffix"]; ok { - if kv, ok := node.(*ast.KeyValue); ok { - if str, ok := kv.Value.(*ast.String); ok { - cp.MeasurementSuffix = str.Value - } - } - } - - if node, ok := tbl.Fields["name_override"]; ok { - if kv, ok := node.(*ast.KeyValue); ok { - if str, ok := kv.Value.(*ast.String); ok { - cp.NameOverride = str.Value - } - } - } - - if node, ok := tbl.Fields["alias"]; ok { - if kv, ok := node.(*ast.KeyValue); ok { - if str, ok := kv.Value.(*ast.String); ok { - cp.Alias = str.Value - } - } - } + c.getFieldDuration(tbl, "interval", &cp.Interval) + c.getFieldDuration(tbl, "precision", &cp.Precision) + c.getFieldDuration(tbl, "collection_jitter", &cp.CollectionJitter) + c.getFieldString(tbl, "name_prefix", &cp.MeasurementPrefix) + c.getFieldString(tbl, "name_suffix", &cp.MeasurementSuffix) + c.getFieldString(tbl, "name_override", &cp.NameOverride) + c.getFieldString(tbl, "alias", &cp.Alias) cp.Tags = make(map[string]string) if node, ok := tbl.Fields["tags"]; ok { if subtbl, ok := node.(*ast.Table); ok { - if err := toml.UnmarshalTable(subtbl, cp.Tags); err != nil { - return nil, fmt.Errorf("could not parse tags for input %s\n", name) + if err := c.toml.UnmarshalTable(subtbl, cp.Tags); err != nil { + return nil, fmt.Errorf("could not parse tags for input %s", name) } } } - delete(tbl.Fields, "name_prefix") - delete(tbl.Fields, "name_suffix") - delete(tbl.Fields, "name_override") - delete(tbl.Fields, "alias") - delete(tbl.Fields, "tags") + if c.hasErrs() { + return nil, c.firstErr() + } + var err error - cp.Filter, err = buildFilter(tbl) + cp.Filter, err = c.buildFilter(tbl) if err != nil { return cp, err } @@ -1424,707 +1252,134 @@ func buildInput(name string, tbl *ast.Table) (*models.InputConfig, error) { // buildParser grabs the necessary entries from the ast.Table for creating // a parsers.Parser object, and creates it, which can then be added onto // an Input object. -func buildParser(name string, tbl *ast.Table) (parsers.Parser, error) { - config, err := getParserConfig(name, tbl) +func (c *Config) buildParser(name string, tbl *ast.Table) (parsers.Parser, error) { + config, err := c.getParserConfig(name, tbl) if err != nil { return nil, err } return parsers.NewParser(config) } -func getParserConfig(name string, tbl *ast.Table) (*parsers.Config, error) { - c := &parsers.Config{ +func (c *Config) getParserConfig(name string, tbl *ast.Table) (*parsers.Config, error) { + pc := &parsers.Config{ JSONStrict: true, } - if node, ok := tbl.Fields["data_format"]; ok { - if kv, ok := node.(*ast.KeyValue); ok { - if str, ok := kv.Value.(*ast.String); ok { - c.DataFormat = str.Value - } - } - } + c.getFieldString(tbl, "data_format", &pc.DataFormat) // Legacy support, exec plugin originally parsed JSON by default. - if name == "exec" && c.DataFormat == "" { - c.DataFormat = "json" - } else if c.DataFormat == "" { - c.DataFormat = "influx" + if name == "exec" && pc.DataFormat == "" { + pc.DataFormat = "json" + } else if pc.DataFormat == "" { + pc.DataFormat = "influx" } - if node, ok := tbl.Fields["separator"]; ok { - if kv, ok := node.(*ast.KeyValue); ok { - if str, ok := kv.Value.(*ast.String); ok { - c.Separator = str.Value - } - } - } + c.getFieldString(tbl, "separator", &pc.Separator) - if node, ok := tbl.Fields["templates"]; ok { - if kv, ok := node.(*ast.KeyValue); ok { - if ary, ok := kv.Value.(*ast.Array); ok { - for _, elem := range ary.Value { - if str, ok := elem.(*ast.String); ok { - c.Templates = append(c.Templates, str.Value) - } - } - } - } - } + c.getFieldStringSlice(tbl, "templates", &pc.Templates) + c.getFieldStringSlice(tbl, "tag_keys", &pc.TagKeys) + c.getFieldStringSlice(tbl, "json_string_fields", &pc.JSONStringFields) + c.getFieldString(tbl, "json_name_key", &pc.JSONNameKey) + c.getFieldString(tbl, "json_query", &pc.JSONQuery) + c.getFieldString(tbl, "json_time_key", &pc.JSONTimeKey) + c.getFieldString(tbl, "json_time_format", &pc.JSONTimeFormat) + c.getFieldString(tbl, "json_timezone", &pc.JSONTimezone) + c.getFieldBool(tbl, "json_strict", &pc.JSONStrict) + c.getFieldString(tbl, "data_type", &pc.DataType) + c.getFieldString(tbl, "collectd_auth_file", &pc.CollectdAuthFile) + c.getFieldString(tbl, "collectd_security_level", &pc.CollectdSecurityLevel) + c.getFieldString(tbl, "collectd_parse_multivalue", &pc.CollectdSplit) - if node, ok := tbl.Fields["tag_keys"]; ok { - if kv, ok := node.(*ast.KeyValue); ok { - if ary, ok := kv.Value.(*ast.Array); ok { - for _, elem := range ary.Value { - if str, ok := elem.(*ast.String); ok { - c.TagKeys = append(c.TagKeys, str.Value) - } - } - } - } - } + c.getFieldStringSlice(tbl, "collectd_typesdb", &pc.CollectdTypesDB) - if node, ok := tbl.Fields["json_string_fields"]; ok { - if kv, ok := node.(*ast.KeyValue); ok { - if ary, ok := kv.Value.(*ast.Array); ok { - for _, elem := range ary.Value { - if str, ok := elem.(*ast.String); ok { - c.JSONStringFields = append(c.JSONStringFields, str.Value) - } - } - } - } - } - - if node, ok := tbl.Fields["json_name_key"]; ok { - if kv, ok := node.(*ast.KeyValue); ok { - if str, ok := kv.Value.(*ast.String); ok { - c.JSONNameKey = str.Value - } - } - } - - if node, ok := tbl.Fields["json_query"]; ok { - if kv, ok := node.(*ast.KeyValue); ok { - if str, ok := kv.Value.(*ast.String); ok { - c.JSONQuery = str.Value - } - } - } - - if node, ok := tbl.Fields["json_time_key"]; ok { - if kv, ok := node.(*ast.KeyValue); ok { - if str, ok := kv.Value.(*ast.String); ok { - c.JSONTimeKey = str.Value - } - } - } - - if node, ok := tbl.Fields["json_time_format"]; ok { - if kv, ok := node.(*ast.KeyValue); ok { - if str, ok := kv.Value.(*ast.String); ok { - c.JSONTimeFormat = str.Value - } - } - } - - if node, ok := tbl.Fields["json_timezone"]; ok { - if kv, ok := node.(*ast.KeyValue); ok { - if str, ok := kv.Value.(*ast.String); ok { - c.JSONTimezone = str.Value - } - } - } - - if node, ok := tbl.Fields["json_strict"]; ok { - if kv, ok := node.(*ast.KeyValue); ok { - if b, ok := kv.Value.(*ast.Boolean); ok { - var err error - c.JSONStrict, err = b.Boolean() - if err != nil { - return nil, err - } - } - } - } - - if node, ok := tbl.Fields["data_type"]; ok { - if kv, ok := node.(*ast.KeyValue); ok { - if str, ok := kv.Value.(*ast.String); ok { - c.DataType = str.Value - } - } - } - - if node, ok := tbl.Fields["collectd_auth_file"]; ok { - if kv, ok := node.(*ast.KeyValue); ok { - if str, ok := kv.Value.(*ast.String); ok { - c.CollectdAuthFile = str.Value - } - } - } - - if node, ok := tbl.Fields["collectd_security_level"]; ok { - if kv, ok := node.(*ast.KeyValue); ok { - if str, ok := kv.Value.(*ast.String); ok { - c.CollectdSecurityLevel = str.Value - } - } - } - - if node, ok := tbl.Fields["collectd_parse_multivalue"]; ok { - if kv, ok := node.(*ast.KeyValue); ok { - if str, ok := kv.Value.(*ast.String); ok { - c.CollectdSplit = str.Value - } - } - } - - if node, ok := tbl.Fields["collectd_typesdb"]; ok { - if kv, ok := node.(*ast.KeyValue); ok { - if ary, ok := kv.Value.(*ast.Array); ok { - for _, elem := range ary.Value { - if str, ok := elem.(*ast.String); ok { - c.CollectdTypesDB = append(c.CollectdTypesDB, str.Value) - } - } - } - } - } - - if node, ok := tbl.Fields["dropwizard_metric_registry_path"]; ok { - if kv, ok := node.(*ast.KeyValue); ok { - if str, ok := kv.Value.(*ast.String); ok { - c.DropwizardMetricRegistryPath = str.Value - } - } - } - if node, ok := tbl.Fields["dropwizard_time_path"]; ok { - if kv, ok := node.(*ast.KeyValue); ok { - if str, ok := kv.Value.(*ast.String); ok { - c.DropwizardTimePath = str.Value - } - } - } - if node, ok := tbl.Fields["dropwizard_time_format"]; ok { - if kv, ok := node.(*ast.KeyValue); ok { - if str, ok := kv.Value.(*ast.String); ok { - c.DropwizardTimeFormat = str.Value - } - } - } - if node, ok := tbl.Fields["dropwizard_tags_path"]; ok { - if kv, ok := node.(*ast.KeyValue); ok { - if str, ok := kv.Value.(*ast.String); ok { - c.DropwizardTagsPath = str.Value - } - } - } - c.DropwizardTagPathsMap = make(map[string]string) - if node, ok := tbl.Fields["dropwizard_tag_paths"]; ok { - if subtbl, ok := node.(*ast.Table); ok { - for name, val := range subtbl.Fields { - if kv, ok := val.(*ast.KeyValue); ok { - if str, ok := kv.Value.(*ast.String); ok { - c.DropwizardTagPathsMap[name] = str.Value - } - } - } - } - } + c.getFieldString(tbl, "dropwizard_metric_registry_path", &pc.DropwizardMetricRegistryPath) + c.getFieldString(tbl, "dropwizard_time_path", &pc.DropwizardTimePath) + c.getFieldString(tbl, "dropwizard_time_format", &pc.DropwizardTimeFormat) + c.getFieldString(tbl, "dropwizard_tags_path", &pc.DropwizardTagsPath) + c.getFieldStringMap(tbl, "dropwizard_tag_paths", &pc.DropwizardTagPathsMap) //for grok data_format - if node, ok := tbl.Fields["grok_named_patterns"]; ok { - if kv, ok := node.(*ast.KeyValue); ok { - if ary, ok := kv.Value.(*ast.Array); ok { - for _, elem := range ary.Value { - if str, ok := elem.(*ast.String); ok { - c.GrokNamedPatterns = append(c.GrokNamedPatterns, str.Value) - } - } - } - } - } - - if node, ok := tbl.Fields["grok_patterns"]; ok { - if kv, ok := node.(*ast.KeyValue); ok { - if ary, ok := kv.Value.(*ast.Array); ok { - for _, elem := range ary.Value { - if str, ok := elem.(*ast.String); ok { - c.GrokPatterns = append(c.GrokPatterns, str.Value) - } - } - } - } - } - - if node, ok := tbl.Fields["grok_custom_patterns"]; ok { - if kv, ok := node.(*ast.KeyValue); ok { - if str, ok := kv.Value.(*ast.String); ok { - c.GrokCustomPatterns = str.Value - } - } - } - - if node, ok := tbl.Fields["grok_custom_pattern_files"]; ok { - if kv, ok := node.(*ast.KeyValue); ok { - if ary, ok := kv.Value.(*ast.Array); ok { - for _, elem := range ary.Value { - if str, ok := elem.(*ast.String); ok { - c.GrokCustomPatternFiles = append(c.GrokCustomPatternFiles, str.Value) - } - } - } - } - } - - if node, ok := tbl.Fields["grok_timezone"]; ok { - if kv, ok := node.(*ast.KeyValue); ok { - if str, ok := kv.Value.(*ast.String); ok { - c.GrokTimezone = str.Value - } - } - } - - if node, ok := tbl.Fields["grok_unique_timestamp"]; ok { - if kv, ok := node.(*ast.KeyValue); ok { - if str, ok := kv.Value.(*ast.String); ok { - c.GrokUniqueTimestamp = str.Value - } - } - } + c.getFieldStringSlice(tbl, "grok_named_patterns", &pc.GrokNamedPatterns) + c.getFieldStringSlice(tbl, "grok_patterns", &pc.GrokPatterns) + c.getFieldString(tbl, "grok_custom_patterns", &pc.GrokCustomPatterns) + c.getFieldStringSlice(tbl, "grok_custom_pattern_files", &pc.GrokCustomPatternFiles) + c.getFieldString(tbl, "grok_timezone", &pc.GrokTimezone) + c.getFieldString(tbl, "grok_unique_timestamp", &pc.GrokUniqueTimestamp) //for csv parser - if node, ok := tbl.Fields["csv_column_names"]; ok { - if kv, ok := node.(*ast.KeyValue); ok { - if ary, ok := kv.Value.(*ast.Array); ok { - for _, elem := range ary.Value { - if str, ok := elem.(*ast.String); ok { - c.CSVColumnNames = append(c.CSVColumnNames, str.Value) - } - } - } - } + c.getFieldStringSlice(tbl, "csv_column_names", &pc.CSVColumnNames) + c.getFieldStringSlice(tbl, "csv_column_types", &pc.CSVColumnTypes) + c.getFieldStringSlice(tbl, "csv_tag_columns", &pc.CSVTagColumns) + c.getFieldString(tbl, "csv_timezone", &pc.CSVTimezone) + c.getFieldString(tbl, "csv_delimiter", &pc.CSVDelimiter) + c.getFieldString(tbl, "csv_comment", &pc.CSVComment) + c.getFieldString(tbl, "csv_measurement_column", &pc.CSVMeasurementColumn) + c.getFieldString(tbl, "csv_timestamp_column", &pc.CSVTimestampColumn) + c.getFieldString(tbl, "csv_timestamp_format", &pc.CSVTimestampFormat) + c.getFieldInt(tbl, "csv_header_row_count", &pc.CSVHeaderRowCount) + c.getFieldInt(tbl, "csv_skip_rows", &pc.CSVSkipRows) + c.getFieldInt(tbl, "csv_skip_columns", &pc.CSVSkipColumns) + c.getFieldBool(tbl, "csv_trim_space", &pc.CSVTrimSpace) + + c.getFieldStringSlice(tbl, "form_urlencoded_tag_keys", &pc.FormUrlencodedTagKeys) + + pc.MetricName = name + + if c.hasErrs() { + return nil, c.firstErr() } - if node, ok := tbl.Fields["csv_column_types"]; ok { - if kv, ok := node.(*ast.KeyValue); ok { - if ary, ok := kv.Value.(*ast.Array); ok { - for _, elem := range ary.Value { - if str, ok := elem.(*ast.String); ok { - c.CSVColumnTypes = append(c.CSVColumnTypes, str.Value) - } - } - } - } - } - - if node, ok := tbl.Fields["csv_tag_columns"]; ok { - if kv, ok := node.(*ast.KeyValue); ok { - if ary, ok := kv.Value.(*ast.Array); ok { - for _, elem := range ary.Value { - if str, ok := elem.(*ast.String); ok { - c.CSVTagColumns = append(c.CSVTagColumns, str.Value) - } - } - } - } - } - - if node, ok := tbl.Fields["csv_delimiter"]; ok { - if kv, ok := node.(*ast.KeyValue); ok { - if str, ok := kv.Value.(*ast.String); ok { - c.CSVDelimiter = str.Value - } - } - } - - if node, ok := tbl.Fields["csv_comment"]; ok { - if kv, ok := node.(*ast.KeyValue); ok { - if str, ok := kv.Value.(*ast.String); ok { - c.CSVComment = str.Value - } - } - } - - if node, ok := tbl.Fields["csv_measurement_column"]; ok { - if kv, ok := node.(*ast.KeyValue); ok { - if str, ok := kv.Value.(*ast.String); ok { - c.CSVMeasurementColumn = str.Value - } - } - } - - if node, ok := tbl.Fields["csv_timestamp_column"]; ok { - if kv, ok := node.(*ast.KeyValue); ok { - if str, ok := kv.Value.(*ast.String); ok { - c.CSVTimestampColumn = str.Value - } - } - } - - if node, ok := tbl.Fields["csv_timestamp_format"]; ok { - if kv, ok := node.(*ast.KeyValue); ok { - if str, ok := kv.Value.(*ast.String); ok { - c.CSVTimestampFormat = str.Value - } - } - } - - if node, ok := tbl.Fields["csv_timezone"]; ok { - if kv, ok := node.(*ast.KeyValue); ok { - if str, ok := kv.Value.(*ast.String); ok { - c.CSVTimezone = str.Value - } - } - } - - if node, ok := tbl.Fields["csv_header_row_count"]; ok { - if kv, ok := node.(*ast.KeyValue); ok { - if integer, ok := kv.Value.(*ast.Integer); ok { - v, err := integer.Int() - if err != nil { - return nil, err - } - c.CSVHeaderRowCount = int(v) - } - } - } - - if node, ok := tbl.Fields["csv_skip_rows"]; ok { - if kv, ok := node.(*ast.KeyValue); ok { - if integer, ok := kv.Value.(*ast.Integer); ok { - v, err := integer.Int() - if err != nil { - return nil, err - } - c.CSVSkipRows = int(v) - } - } - } - - if node, ok := tbl.Fields["csv_skip_columns"]; ok { - if kv, ok := node.(*ast.KeyValue); ok { - if integer, ok := kv.Value.(*ast.Integer); ok { - v, err := integer.Int() - if err != nil { - return nil, err - } - c.CSVSkipColumns = int(v) - } - } - } - - if node, ok := tbl.Fields["csv_trim_space"]; ok { - if kv, ok := node.(*ast.KeyValue); ok { - if str, ok := kv.Value.(*ast.Boolean); ok { - //for config with no quotes - val, err := strconv.ParseBool(str.Value) - c.CSVTrimSpace = val - if err != nil { - return nil, fmt.Errorf("E! parsing to bool: %v", err) - } - } - } - } - - if node, ok := tbl.Fields["form_urlencoded_tag_keys"]; ok { - if kv, ok := node.(*ast.KeyValue); ok { - if ary, ok := kv.Value.(*ast.Array); ok { - for _, elem := range ary.Value { - if str, ok := elem.(*ast.String); ok { - c.FormUrlencodedTagKeys = append(c.FormUrlencodedTagKeys, str.Value) - } - } - } - } - } - - c.MetricName = name - - delete(tbl.Fields, "data_format") - delete(tbl.Fields, "separator") - delete(tbl.Fields, "templates") - delete(tbl.Fields, "tag_keys") - delete(tbl.Fields, "json_name_key") - delete(tbl.Fields, "json_query") - delete(tbl.Fields, "json_string_fields") - delete(tbl.Fields, "json_time_format") - delete(tbl.Fields, "json_time_key") - delete(tbl.Fields, "json_timezone") - delete(tbl.Fields, "json_strict") - delete(tbl.Fields, "data_type") - delete(tbl.Fields, "collectd_auth_file") - delete(tbl.Fields, "collectd_security_level") - delete(tbl.Fields, "collectd_typesdb") - delete(tbl.Fields, "collectd_parse_multivalue") - delete(tbl.Fields, "dropwizard_metric_registry_path") - delete(tbl.Fields, "dropwizard_time_path") - delete(tbl.Fields, "dropwizard_time_format") - delete(tbl.Fields, "dropwizard_tags_path") - delete(tbl.Fields, "dropwizard_tag_paths") - delete(tbl.Fields, "grok_named_patterns") - delete(tbl.Fields, "grok_patterns") - delete(tbl.Fields, "grok_custom_patterns") - delete(tbl.Fields, "grok_custom_pattern_files") - delete(tbl.Fields, "grok_timezone") - delete(tbl.Fields, "grok_unique_timestamp") - delete(tbl.Fields, "csv_column_names") - delete(tbl.Fields, "csv_column_types") - delete(tbl.Fields, "csv_comment") - delete(tbl.Fields, "csv_delimiter") - delete(tbl.Fields, "csv_field_columns") - delete(tbl.Fields, "csv_header_row_count") - delete(tbl.Fields, "csv_measurement_column") - delete(tbl.Fields, "csv_skip_columns") - delete(tbl.Fields, "csv_skip_rows") - delete(tbl.Fields, "csv_tag_columns") - delete(tbl.Fields, "csv_timestamp_column") - delete(tbl.Fields, "csv_timestamp_format") - delete(tbl.Fields, "csv_timezone") - delete(tbl.Fields, "csv_trim_space") - delete(tbl.Fields, "form_urlencoded_tag_keys") - - return c, nil + return pc, nil } // buildSerializer grabs the necessary entries from the ast.Table for creating // a serializers.Serializer object, and creates it, which can then be added onto // an Output object. -func buildSerializer(name string, tbl *ast.Table) (serializers.Serializer, error) { - c := &serializers.Config{TimestampUnits: time.Duration(1 * time.Second)} +func (c *Config) buildSerializer(name string, tbl *ast.Table) (serializers.Serializer, error) { + sc := &serializers.Config{TimestampUnits: time.Duration(1 * time.Second)} - if node, ok := tbl.Fields["data_format"]; ok { - if kv, ok := node.(*ast.KeyValue); ok { - if str, ok := kv.Value.(*ast.String); ok { - c.DataFormat = str.Value - } - } + c.getFieldString(tbl, "data_format", &sc.DataFormat) + + if sc.DataFormat == "" { + sc.DataFormat = "influx" } - if c.DataFormat == "" { - c.DataFormat = "influx" + c.getFieldString(tbl, "prefix", &sc.Prefix) + c.getFieldString(tbl, "template", &sc.Template) + c.getFieldStringSlice(tbl, "templates", &sc.Templates) + c.getFieldString(tbl, "carbon2_format", &sc.Carbon2Format) + c.getFieldInt(tbl, "influx_max_line_bytes", &sc.InfluxMaxLineBytes) + + c.getFieldBool(tbl, "influx_sort_fields", &sc.InfluxSortFields) + c.getFieldBool(tbl, "influx_uint_support", &sc.InfluxUintSupport) + c.getFieldBool(tbl, "graphite_tag_support", &sc.GraphiteTagSupport) + c.getFieldString(tbl, "graphite_separator", &sc.GraphiteSeparator) + + c.getFieldDuration(tbl, "json_timestamp_units", &sc.TimestampUnits) + + c.getFieldBool(tbl, "splunkmetric_hec_routing", &sc.HecRouting) + c.getFieldBool(tbl, "splunkmetric_multimetric", &sc.SplunkmetricMultiMetric) + + c.getFieldStringSlice(tbl, "wavefront_source_override", &sc.WavefrontSourceOverride) + c.getFieldBool(tbl, "wavefront_use_strict", &sc.WavefrontUseStrict) + + c.getFieldBool(tbl, "prometheus_export_timestamp", &sc.PrometheusExportTimestamp) + c.getFieldBool(tbl, "prometheus_sort_metrics", &sc.PrometheusSortMetrics) + c.getFieldBool(tbl, "prometheus_string_as_label", &sc.PrometheusStringAsLabel) + + if c.hasErrs() { + return nil, c.firstErr() } - if node, ok := tbl.Fields["prefix"]; ok { - if kv, ok := node.(*ast.KeyValue); ok { - if str, ok := kv.Value.(*ast.String); ok { - c.Prefix = str.Value - } - } - } - - if node, ok := tbl.Fields["template"]; ok { - if kv, ok := node.(*ast.KeyValue); ok { - if str, ok := kv.Value.(*ast.String); ok { - c.Template = str.Value - } - } - } - - if node, ok := tbl.Fields["templates"]; ok { - if kv, ok := node.(*ast.KeyValue); ok { - if ary, ok := kv.Value.(*ast.Array); ok { - for _, elem := range ary.Value { - if str, ok := elem.(*ast.String); ok { - c.Templates = append(c.Templates, str.Value) - } - } - } - } - } - - if node, ok := tbl.Fields["carbon2_format"]; ok { - if kv, ok := node.(*ast.KeyValue); ok { - if str, ok := kv.Value.(*ast.String); ok { - c.Carbon2Format = str.Value - } - } - } - - if node, ok := tbl.Fields["influx_max_line_bytes"]; ok { - if kv, ok := node.(*ast.KeyValue); ok { - if integer, ok := kv.Value.(*ast.Integer); ok { - v, err := integer.Int() - if err != nil { - return nil, err - } - c.InfluxMaxLineBytes = int(v) - } - } - } - - if node, ok := tbl.Fields["influx_sort_fields"]; ok { - if kv, ok := node.(*ast.KeyValue); ok { - if b, ok := kv.Value.(*ast.Boolean); ok { - var err error - c.InfluxSortFields, err = b.Boolean() - if err != nil { - return nil, err - } - } - } - } - - if node, ok := tbl.Fields["influx_uint_support"]; ok { - if kv, ok := node.(*ast.KeyValue); ok { - if b, ok := kv.Value.(*ast.Boolean); ok { - var err error - c.InfluxUintSupport, err = b.Boolean() - if err != nil { - return nil, err - } - } - } - } - - if node, ok := tbl.Fields["graphite_tag_support"]; ok { - if kv, ok := node.(*ast.KeyValue); ok { - if b, ok := kv.Value.(*ast.Boolean); ok { - var err error - c.GraphiteTagSupport, err = b.Boolean() - if err != nil { - return nil, err - } - } - } - } - - if node, ok := tbl.Fields["graphite_separator"]; ok { - if kv, ok := node.(*ast.KeyValue); ok { - if str, ok := kv.Value.(*ast.String); ok { - c.GraphiteSeparator = str.Value - } - } - } - - if node, ok := tbl.Fields["json_timestamp_units"]; ok { - if kv, ok := node.(*ast.KeyValue); ok { - if str, ok := kv.Value.(*ast.String); ok { - timestampVal, err := time.ParseDuration(str.Value) - if err != nil { - return nil, fmt.Errorf("Unable to parse json_timestamp_units as a duration, %s", err) - } - // now that we have a duration, truncate it to the nearest - // power of ten (just in case) - nearest_exponent := int64(math.Log10(float64(timestampVal.Nanoseconds()))) - new_nanoseconds := int64(math.Pow(10.0, float64(nearest_exponent))) - c.TimestampUnits = time.Duration(new_nanoseconds) - } - } - } - - if node, ok := tbl.Fields["splunkmetric_hec_routing"]; ok { - if kv, ok := node.(*ast.KeyValue); ok { - if b, ok := kv.Value.(*ast.Boolean); ok { - var err error - c.HecRouting, err = b.Boolean() - if err != nil { - return nil, err - } - } - } - } - - if node, ok := tbl.Fields["splunkmetric_multimetric"]; ok { - if kv, ok := node.(*ast.KeyValue); ok { - if b, ok := kv.Value.(*ast.Boolean); ok { - var err error - c.SplunkmetricMultiMetric, err = b.Boolean() - if err != nil { - return nil, err - } - } - } - } - - if node, ok := tbl.Fields["wavefront_source_override"]; ok { - if kv, ok := node.(*ast.KeyValue); ok { - if ary, ok := kv.Value.(*ast.Array); ok { - for _, elem := range ary.Value { - if str, ok := elem.(*ast.String); ok { - c.WavefrontSourceOverride = append(c.WavefrontSourceOverride, str.Value) - } - } - } - } - } - - if node, ok := tbl.Fields["wavefront_use_strict"]; ok { - if kv, ok := node.(*ast.KeyValue); ok { - if b, ok := kv.Value.(*ast.Boolean); ok { - var err error - c.WavefrontUseStrict, err = b.Boolean() - if err != nil { - return nil, err - } - } - } - } - - if node, ok := tbl.Fields["prometheus_export_timestamp"]; ok { - if kv, ok := node.(*ast.KeyValue); ok { - if b, ok := kv.Value.(*ast.Boolean); ok { - var err error - c.PrometheusExportTimestamp, err = b.Boolean() - if err != nil { - return nil, err - } - } - } - } - - if node, ok := tbl.Fields["prometheus_sort_metrics"]; ok { - if kv, ok := node.(*ast.KeyValue); ok { - if b, ok := kv.Value.(*ast.Boolean); ok { - var err error - c.PrometheusSortMetrics, err = b.Boolean() - if err != nil { - return nil, err - } - } - } - } - - if node, ok := tbl.Fields["prometheus_string_as_label"]; ok { - if kv, ok := node.(*ast.KeyValue); ok { - if b, ok := kv.Value.(*ast.Boolean); ok { - var err error - c.PrometheusStringAsLabel, err = b.Boolean() - if err != nil { - return nil, err - } - } - } - } - - delete(tbl.Fields, "carbon2_format") - delete(tbl.Fields, "influx_max_line_bytes") - delete(tbl.Fields, "influx_sort_fields") - delete(tbl.Fields, "influx_uint_support") - delete(tbl.Fields, "graphite_tag_support") - delete(tbl.Fields, "graphite_separator") - delete(tbl.Fields, "data_format") - delete(tbl.Fields, "prefix") - delete(tbl.Fields, "template") - delete(tbl.Fields, "templates") - delete(tbl.Fields, "json_timestamp_units") - delete(tbl.Fields, "splunkmetric_hec_routing") - delete(tbl.Fields, "splunkmetric_multimetric") - delete(tbl.Fields, "wavefront_source_override") - delete(tbl.Fields, "wavefront_use_strict") - delete(tbl.Fields, "prometheus_export_timestamp") - delete(tbl.Fields, "prometheus_sort_metrics") - delete(tbl.Fields, "prometheus_string_as_label") - return serializers.NewSerializer(c) + return serializers.NewSerializer(sc) } // buildOutput parses output specific items from the ast.Table, // builds the filter and returns an // models.OutputConfig to be inserted into models.RunningInput // Note: error exists in the return for future calls that might require error -func buildOutput(name string, tbl *ast.Table) (*models.OutputConfig, error) { - filter, err := buildFilter(tbl) +func (c *Config) buildOutput(name string, tbl *ast.Table) (*models.OutputConfig, error) { + filter, err := c.buildFilter(tbl) if err != nil { return nil, err } @@ -2133,108 +1388,191 @@ func buildOutput(name string, tbl *ast.Table) (*models.OutputConfig, error) { Filter: filter, } - // TODO - // Outputs don't support FieldDrop/FieldPass, so set to NameDrop/NamePass - if len(oc.Filter.FieldDrop) > 0 { - oc.Filter.NameDrop = oc.Filter.FieldDrop - } - if len(oc.Filter.FieldPass) > 0 { - oc.Filter.NamePass = oc.Filter.FieldPass - } + // TODO: support FieldPass/FieldDrop on outputs - if err := getConfigDuration(tbl, "flush_interval", &oc.FlushInterval); err != nil { - return nil, err - } + c.getFieldDuration(tbl, "flush_interval", &oc.FlushInterval) + c.getFieldDuration(tbl, "flush_jitter", oc.FlushJitter) - if err := getConfigDuration(tbl, "flush_jitter", &oc.FlushJitter); err != nil { - return nil, err - } + c.getFieldInt(tbl, "metric_buffer_limit", &oc.MetricBufferLimit) + c.getFieldInt(tbl, "metric_batch_size", &oc.MetricBatchSize) + c.getFieldString(tbl, "alias", &oc.Alias) + c.getFieldString(tbl, "name_override", &oc.NameOverride) + c.getFieldString(tbl, "name_suffix", &oc.NameSuffix) + c.getFieldString(tbl, "name_prefix", &oc.NamePrefix) - if node, ok := tbl.Fields["metric_buffer_limit"]; ok { - if kv, ok := node.(*ast.KeyValue); ok { - if integer, ok := kv.Value.(*ast.Integer); ok { - v, err := integer.Int() - if err != nil { - return nil, err - } - oc.MetricBufferLimit = int(v) - } - } + if c.hasErrs() { + return nil, c.firstErr() } - if node, ok := tbl.Fields["metric_batch_size"]; ok { - if kv, ok := node.(*ast.KeyValue); ok { - if integer, ok := kv.Value.(*ast.Integer); ok { - v, err := integer.Int() - if err != nil { - return nil, err - } - oc.MetricBatchSize = int(v) - } - } - } - - if node, ok := tbl.Fields["alias"]; ok { - if kv, ok := node.(*ast.KeyValue); ok { - if str, ok := kv.Value.(*ast.String); ok { - oc.Alias = str.Value - } - } - } - - if node, ok := tbl.Fields["name_override"]; ok { - if kv, ok := node.(*ast.KeyValue); ok { - if str, ok := kv.Value.(*ast.String); ok { - oc.NameOverride = str.Value - } - } - } - - if node, ok := tbl.Fields["name_suffix"]; ok { - if kv, ok := node.(*ast.KeyValue); ok { - if str, ok := kv.Value.(*ast.String); ok { - oc.NameSuffix = str.Value - } - } - } - - if node, ok := tbl.Fields["name_prefix"]; ok { - if kv, ok := node.(*ast.KeyValue); ok { - if str, ok := kv.Value.(*ast.String); ok { - oc.NamePrefix = str.Value - } - } - } - - delete(tbl.Fields, "metric_buffer_limit") - delete(tbl.Fields, "metric_batch_size") - delete(tbl.Fields, "alias") - delete(tbl.Fields, "name_override") - delete(tbl.Fields, "name_suffix") - delete(tbl.Fields, "name_prefix") - return oc, nil } +func (c *Config) missingTomlField(typ reflect.Type, key string) error { + switch key { + case "interval", "tagpass", "tagdrop", "namepass", "namedrop", "name_suffix", + "fieldpass", "fielddrop", "pass", "drop", "taginclude", "tagexclude", "data_format": + // ignore fields that are common to all plugins. + default: + c.UnusedFields[key] = true + } + return nil +} + +func (c *Config) getFieldString(tbl *ast.Table, fieldName string, target *string) { + if node, ok := tbl.Fields[fieldName]; ok { + if kv, ok := node.(*ast.KeyValue); ok { + if str, ok := kv.Value.(*ast.String); ok { + *target = str.Value + } + } + } +} + +func (c *Config) getFieldDuration(tbl *ast.Table, fieldName string, target interface{}) { + if node, ok := tbl.Fields[fieldName]; ok { + if kv, ok := node.(*ast.KeyValue); ok { + if str, ok := kv.Value.(*ast.String); ok { + d, err := time.ParseDuration(str.Value) + if err != nil { + c.addError(tbl, fmt.Errorf("error parsing duration: %w", err)) + return + } + targetVal := reflect.ValueOf(target).Elem() + targetVal.Set(reflect.ValueOf(d)) + } + } + } +} + +func (c *Config) getFieldBool(tbl *ast.Table, fieldName string, target *bool) { + var err error + if node, ok := tbl.Fields[fieldName]; ok { + if kv, ok := node.(*ast.KeyValue); ok { + switch t := kv.Value.(type) { + case *ast.Boolean: + *target, err = t.Boolean() + if err != nil { + c.addError(tbl, fmt.Errorf("unknown boolean value type %q, expecting boolean", kv.Value)) + return + } + case *ast.String: + *target, err = strconv.ParseBool(t.Value) + if err != nil { + c.addError(tbl, fmt.Errorf("unknown boolean value type %q, expecting boolean", kv.Value)) + return + } + default: + c.addError(tbl, fmt.Errorf("unknown boolean value type %q, expecting boolean", kv.Value.Source())) + return + } + } + } +} + +func (c *Config) getFieldInt(tbl *ast.Table, fieldName string, target *int) { + if node, ok := tbl.Fields[fieldName]; ok { + if kv, ok := node.(*ast.KeyValue); ok { + if iAst, ok := kv.Value.(*ast.Integer); ok { + i, err := iAst.Int() + if err != nil { + c.addError(tbl, fmt.Errorf("unexpected int type %q, expecting int", iAst.Value)) + return + } + *target = int(i) + } + } + } +} + +func (c *Config) getFieldInt64(tbl *ast.Table, fieldName string, target *int64) { + if node, ok := tbl.Fields[fieldName]; ok { + if kv, ok := node.(*ast.KeyValue); ok { + if iAst, ok := kv.Value.(*ast.Integer); ok { + i, err := iAst.Int() + if err != nil { + c.addError(tbl, fmt.Errorf("unexpected int type %q, expecting int", iAst.Value)) + return + } + *target = i + } + } + } +} + +func (c *Config) getFieldStringSlice(tbl *ast.Table, fieldName string, target *[]string) { + if node, ok := tbl.Fields[fieldName]; ok { + if kv, ok := node.(*ast.KeyValue); ok { + if ary, ok := kv.Value.(*ast.Array); ok { + for _, elem := range ary.Value { + if str, ok := elem.(*ast.String); ok { + *target = append(*target, str.Value) + } + } + } + } + } +} +func (c *Config) getFieldTagFilter(tbl *ast.Table, fieldName string, target *[]models.TagFilter) { + if node, ok := tbl.Fields[fieldName]; ok { + if subtbl, ok := node.(*ast.Table); ok { + for name, val := range subtbl.Fields { + if kv, ok := val.(*ast.KeyValue); ok { + tagfilter := models.TagFilter{Name: name} + if ary, ok := kv.Value.(*ast.Array); ok { + for _, elem := range ary.Value { + if str, ok := elem.(*ast.String); ok { + tagfilter.Filter = append(tagfilter.Filter, str.Value) + } + } + } + *target = append(*target, tagfilter) + } + } + } + } +} + +func (c *Config) getFieldStringMap(tbl *ast.Table, fieldName string, target *map[string]string) { + *target = map[string]string{} + if node, ok := tbl.Fields[fieldName]; ok { + if subtbl, ok := node.(*ast.Table); ok { + for name, val := range subtbl.Fields { + if kv, ok := val.(*ast.KeyValue); ok { + if str, ok := kv.Value.(*ast.String); ok { + (*target)[name] = str.Value + } + } + } + } + } +} + +func keys(m map[string]bool) []string { + result := []string{} + for k := range m { + result = append(result, k) + } + return result +} + +func (c *Config) hasErrs() bool { + return len(c.errs) > 0 +} + +func (c *Config) firstErr() error { + if len(c.errs) == 0 { + return nil + } + return c.errs[0] +} + +func (c *Config) addError(tbl *ast.Table, err error) { + c.errs = append(c.errs, fmt.Errorf("line %d:%d: %w", tbl.Line, tbl.Position, err)) +} + // unwrappable lets you retrieve the original telegraf.Processor from the // StreamingProcessor. This is necessary because the toml Unmarshaller won't // look inside composed types. type unwrappable interface { Unwrap() telegraf.Processor } - -func getConfigDuration(tbl *ast.Table, key string, target *time.Duration) error { - if node, ok := tbl.Fields[key]; ok { - if kv, ok := node.(*ast.KeyValue); ok { - if str, ok := kv.Value.(*ast.String); ok { - d, err := time.ParseDuration(str.Value) - if err != nil { - return err - } - delete(tbl.Fields, key) - *target = d - } - } - } - return nil -} diff --git a/config/config_test.go b/config/config_test.go index 42aefff15..5543c60e7 100644 --- a/config/config_test.go +++ b/config/config_test.go @@ -208,20 +208,19 @@ func TestConfig_FieldNotDefined(t *testing.T) { c := NewConfig() err := c.LoadConfig("./testdata/invalid_field.toml") require.Error(t, err, "invalid field name") - assert.Equal(t, "Error loading config file ./testdata/invalid_field.toml: Error parsing http_listener_v2, line 2: field corresponding to `not_a_field' is not defined in http_listener_v2.HTTPListenerV2", err.Error()) - + assert.Equal(t, "Error loading config file ./testdata/invalid_field.toml: plugin inputs.http_listener_v2: line 1: configuration specified the fields [\"not_a_field\"], but they weren't used", err.Error()) } func TestConfig_WrongFieldType(t *testing.T) { c := NewConfig() err := c.LoadConfig("./testdata/wrong_field_type.toml") require.Error(t, err, "invalid field type") - assert.Equal(t, "Error loading config file ./testdata/wrong_field_type.toml: Error parsing http_listener_v2, line 2: (http_listener_v2.HTTPListenerV2.Port) cannot unmarshal TOML string into int", err.Error()) + assert.Equal(t, "Error loading config file ./testdata/wrong_field_type.toml: error parsing http_listener_v2, line 2: (http_listener_v2.HTTPListenerV2.Port) cannot unmarshal TOML string into int", err.Error()) c = NewConfig() err = c.LoadConfig("./testdata/wrong_field_type2.toml") require.Error(t, err, "invalid field type2") - assert.Equal(t, "Error loading config file ./testdata/wrong_field_type2.toml: Error parsing http_listener_v2, line 2: (http_listener_v2.HTTPListenerV2.Methods) cannot unmarshal TOML string into []string", err.Error()) + assert.Equal(t, "Error loading config file ./testdata/wrong_field_type2.toml: error parsing http_listener_v2, line 2: (http_listener_v2.HTTPListenerV2.Methods) cannot unmarshal TOML string into []string", err.Error()) } func TestConfig_InlineTables(t *testing.T) { @@ -256,7 +255,7 @@ func TestConfig_BadOrdering(t *testing.T) { c := NewConfig() err := c.LoadConfig("./testdata/non_slice_slice.toml") require.Error(t, err, "bad ordering") - assert.Equal(t, "Error loading config file ./testdata/non_slice_slice.toml: Error parsing http array, line 4: cannot unmarshal TOML array into string (need slice)", err.Error()) + assert.Equal(t, "Error loading config file ./testdata/non_slice_slice.toml: error parsing http array, line 4: cannot unmarshal TOML array into string (need slice)", err.Error()) } func TestConfig_AzureMonitorNamespacePrefix(t *testing.T) {