feat: Migrate json_v2 parser to new style (#11343)
This commit is contained in:
parent
a049175e58
commit
3f114e0921
|
|
@ -30,7 +30,8 @@ import (
|
|||
"github.com/influxdata/telegraf/plugins/inputs"
|
||||
"github.com/influxdata/telegraf/plugins/outputs"
|
||||
"github.com/influxdata/telegraf/plugins/parsers"
|
||||
"github.com/influxdata/telegraf/plugins/parsers/json_v2"
|
||||
"github.com/influxdata/telegraf/plugins/parsers/temporary/json_v2"
|
||||
"github.com/influxdata/telegraf/plugins/parsers/temporary/xpath"
|
||||
"github.com/influxdata/telegraf/plugins/processors"
|
||||
"github.com/influxdata/telegraf/plugins/serializers"
|
||||
"github.com/influxdata/toml"
|
||||
|
|
@ -1526,7 +1527,7 @@ func (c *Config) getParserConfig(name string, tbl *ast.Table) (*parsers.Config,
|
|||
// for influx parser
|
||||
c.getFieldString(tbl, "influx_parser_type", &pc.InfluxParserType)
|
||||
|
||||
//for XPath parser family
|
||||
// for XPath parser family
|
||||
if choice.Contains(pc.DataFormat, []string{"xml", "xpath_json", "xpath_msgpack", "xpath_protobuf"}) {
|
||||
c.getFieldString(tbl, "xpath_protobuf_file", &pc.XPathProtobufFile)
|
||||
c.getFieldString(tbl, "xpath_protobuf_type", &pc.XPathProtobufType)
|
||||
|
|
@ -1541,7 +1542,7 @@ func (c *Config) getParserConfig(name string, tbl *ast.Table) (*parsers.Config,
|
|||
}
|
||||
if xpathOK {
|
||||
if subtbls, ok := node.([]*ast.Table); ok {
|
||||
pc.XPathConfig = make([]parsers.XPathConfig, len(subtbls))
|
||||
pc.XPathConfig = make([]xpath.Config, len(subtbls))
|
||||
for i, subtbl := range subtbls {
|
||||
subcfg := pc.XPathConfig[i]
|
||||
c.getFieldString(subtbl, "metric_name", &subcfg.MetricQuery)
|
||||
|
|
@ -1565,10 +1566,10 @@ func (c *Config) getParserConfig(name string, tbl *ast.Table) (*parsers.Config,
|
|||
}
|
||||
}
|
||||
|
||||
//for JSONPath parser
|
||||
// for JSON_v2 parser
|
||||
if node, ok := tbl.Fields["json_v2"]; ok {
|
||||
if metricConfigs, ok := node.([]*ast.Table); ok {
|
||||
pc.JSONV2Config = make([]parsers.JSONV2Config, len(metricConfigs))
|
||||
pc.JSONV2Config = make([]json_v2.Config, len(metricConfigs))
|
||||
for i, metricConfig := range metricConfigs {
|
||||
mc := pc.JSONV2Config[i]
|
||||
c.getFieldString(metricConfig, "measurement_name", &mc.MeasurementName)
|
||||
|
|
@ -1586,7 +1587,7 @@ func (c *Config) getParserConfig(name string, tbl *ast.Table) (*parsers.Config,
|
|||
if objectconfigs, ok := metricConfig.Fields["object"]; ok {
|
||||
if objectconfigs, ok := objectconfigs.([]*ast.Table); ok {
|
||||
for _, objectConfig := range objectconfigs {
|
||||
var o json_v2.JSONObject
|
||||
var o json_v2.Object
|
||||
c.getFieldString(objectConfig, "path", &o.Path)
|
||||
c.getFieldBool(objectConfig, "optional", &o.Optional)
|
||||
c.getFieldString(objectConfig, "timestamp_key", &o.TimestampKey)
|
||||
|
|
@ -1744,25 +1745,43 @@ func (c *Config) buildOutput(name string, tbl *ast.Table) (*models.OutputConfig,
|
|||
|
||||
func (c *Config) missingTomlField(_ reflect.Type, key string) error {
|
||||
switch key {
|
||||
case "alias", "carbon2_format", "carbon2_sanitize_replace_char", "collectd_auth_file",
|
||||
"collectd_parse_multivalue", "collectd_security_level", "collectd_typesdb", "collection_jitter",
|
||||
"collection_offset",
|
||||
"csv_separator", "csv_header", "csv_column_prefix", "csv_timestamp_format",
|
||||
"data_format", "data_type", "delay", "drop", "drop_original", "dropwizard_metric_registry_path",
|
||||
"dropwizard_tag_paths", "dropwizard_tags_path", "dropwizard_time_format", "dropwizard_time_path",
|
||||
"fielddrop", "fieldpass", "flush_interval", "flush_jitter", "form_urlencoded_tag_keys",
|
||||
"grace", "graphite_separator", "graphite_tag_sanitize_mode", "graphite_tag_support",
|
||||
"grok_custom_pattern_files", "grok_custom_patterns", "grok_named_patterns", "grok_patterns",
|
||||
"grok_timezone", "grok_unique_timestamp", "influx_max_line_bytes", "influx_parser_type", "influx_sort_fields",
|
||||
"influx_uint_support", "interval", "json_timestamp_units", "json_v2",
|
||||
"lvm", "metric_batch_size", "metric_buffer_limit", "name_override", "name_prefix",
|
||||
"name_suffix", "namedrop", "namepass", "order", "pass", "period", "precision",
|
||||
"prefix", "prometheus_export_timestamp", "prometheus_ignore_timestamp", "prometheus_sort_metrics", "prometheus_string_as_label",
|
||||
"separator", "splunkmetric_hec_routing", "splunkmetric_multimetric", "tag_keys",
|
||||
"tagdrop", "tagexclude", "taginclude", "tagpass", "tags", "template", "templates",
|
||||
"value_field_name", "wavefront_source_override", "wavefront_use_strict", "wavefront_disable_prefix_conversion":
|
||||
// General options to ignore
|
||||
case "alias",
|
||||
"collection_jitter", "collection_offset",
|
||||
"data_format", "delay", "drop", "drop_original",
|
||||
"fielddrop", "fieldpass", "flush_interval", "flush_jitter",
|
||||
"grace",
|
||||
"interval",
|
||||
"lvm", // What is this used for?
|
||||
"metric_batch_size", "metric_buffer_limit",
|
||||
"name_override", "name_prefix", "name_suffix", "namedrop", "namepass",
|
||||
"order",
|
||||
"pass", "period", "precision",
|
||||
"tagdrop", "tagexclude", "taginclude", "tagpass", "tags":
|
||||
|
||||
// ignore fields that are common to all plugins.
|
||||
// Parser options to ignore
|
||||
case "data_type", "separator", "tag_keys",
|
||||
// "templates", // shared with serializers
|
||||
"collectd_auth_file", "collectd_parse_multivalue", "collectd_security_level", "collectd_typesdb",
|
||||
"dropwizard_metric_registry_path", "dropwizard_tags_path", "dropwizard_tag_paths",
|
||||
"dropwizard_time_format", "dropwizard_time_path",
|
||||
"form_urlencoded_tag_keys",
|
||||
"grok_custom_pattern_files", "grok_custom_patterns", "grok_named_patterns", "grok_patterns",
|
||||
"grok_timezone", "grok_unique_timestamp",
|
||||
"influx_parser_type",
|
||||
"prometheus_ignore_timestamp", // not used anymore?
|
||||
"value_field_name":
|
||||
|
||||
// Serializer options to ignore
|
||||
case "prefix", "template", "templates",
|
||||
"carbon2_format", "carbon2_sanitize_replace_char",
|
||||
"csv_column_prefix", "csv_header", "csv_separator", "csv_timestamp_format",
|
||||
"graphite_tag_sanitize_mode", "graphite_tag_support", "graphite_separator",
|
||||
"influx_max_line_bytes", "influx_sort_fields", "influx_uint_support",
|
||||
"json_timestamp_format", "json_timestamp_units",
|
||||
"prometheus_export_timestamp", "prometheus_sort_metrics", "prometheus_string_as_label",
|
||||
"splunkmetric_hec_routing", "splunkmetric_multimetric",
|
||||
"wavefront_disable_prefix_conversion", "wavefront_source_override", "wavefront_use_strict":
|
||||
default:
|
||||
c.unusedFieldsMutex.Lock()
|
||||
c.UnusedFields[key] = true
|
||||
|
|
|
|||
|
|
@ -4,5 +4,6 @@ import (
|
|||
//Blank imports for plugins to register themselves
|
||||
_ "github.com/influxdata/telegraf/plugins/parsers/csv"
|
||||
_ "github.com/influxdata/telegraf/plugins/parsers/json"
|
||||
_ "github.com/influxdata/telegraf/plugins/parsers/json_v2"
|
||||
_ "github.com/influxdata/telegraf/plugins/parsers/xpath"
|
||||
)
|
||||
|
|
|
|||
|
|
@ -9,15 +9,17 @@ import (
|
|||
"github.com/influxdata/telegraf"
|
||||
"github.com/influxdata/telegraf/internal"
|
||||
"github.com/influxdata/telegraf/metric"
|
||||
"github.com/influxdata/telegraf/plugins/parsers"
|
||||
"github.com/influxdata/telegraf/plugins/parsers/temporary/json_v2"
|
||||
"github.com/tidwall/gjson"
|
||||
)
|
||||
|
||||
// Parser adheres to the parser interface, contains the parser configuration, and data required to parse JSON
|
||||
type Parser struct {
|
||||
// These struct fields are common for a parser
|
||||
Configs []Config
|
||||
DefaultTags map[string]string
|
||||
Log telegraf.Logger
|
||||
Configs []json_v2.Config `toml:"json_v2"`
|
||||
DefaultMetricName string `toml:"-"`
|
||||
DefaultTags map[string]string `toml:"-"`
|
||||
Log telegraf.Logger `toml:"-"`
|
||||
|
||||
// **** The struct fields bellow this comment are used for processing indvidual configs ****
|
||||
|
||||
|
|
@ -30,48 +32,13 @@ type Parser struct {
|
|||
// iterateObjects dictates if ExpandArray function will handle objects
|
||||
iterateObjects bool
|
||||
// objectConfig contains the config for an object, some info is needed while iterating over the gjson results
|
||||
objectConfig JSONObject
|
||||
objectConfig json_v2.Object
|
||||
}
|
||||
|
||||
type PathResult struct {
|
||||
result gjson.Result
|
||||
tag bool
|
||||
DataSet
|
||||
}
|
||||
|
||||
type Config struct {
|
||||
MeasurementName string `toml:"measurement_name"` // OPTIONAL
|
||||
MeasurementNamePath string `toml:"measurement_name_path"` // OPTIONAL
|
||||
TimestampPath string `toml:"timestamp_path"` // OPTIONAL
|
||||
TimestampFormat string `toml:"timestamp_format"` // OPTIONAL, but REQUIRED when timestamp_path is defined
|
||||
TimestampTimezone string `toml:"timestamp_timezone"` // OPTIONAL, but REQUIRES timestamp_path
|
||||
|
||||
Fields []DataSet
|
||||
Tags []DataSet
|
||||
JSONObjects []JSONObject
|
||||
}
|
||||
|
||||
type DataSet struct {
|
||||
Path string `toml:"path"` // REQUIRED
|
||||
Type string `toml:"type"` // OPTIONAL, can't be set for tags they will always be a string
|
||||
Rename string `toml:"rename"`
|
||||
Optional bool `toml:"optional"` // Will suppress errors if there isn't a match with Path
|
||||
}
|
||||
|
||||
type JSONObject struct {
|
||||
Path string `toml:"path"` // REQUIRED
|
||||
Optional bool `toml:"optional"` // Will suppress errors if there isn't a match with Path
|
||||
TimestampKey string `toml:"timestamp_key"`
|
||||
TimestampFormat string `toml:"timestamp_format"` // OPTIONAL, but REQUIRED when timestamp_path is defined
|
||||
TimestampTimezone string `toml:"timestamp_timezone"` // OPTIONAL, but REQUIRES timestamp_path
|
||||
Renames map[string]string `toml:"renames"`
|
||||
Fields map[string]string `toml:"fields"`
|
||||
Tags []string `toml:"tags"`
|
||||
IncludedKeys []string `toml:"included_keys"`
|
||||
ExcludedKeys []string `toml:"excluded_keys"`
|
||||
DisablePrependKeys bool `toml:"disable_prepend_keys"`
|
||||
FieldPaths []DataSet
|
||||
TagPaths []DataSet
|
||||
json_v2.DataSet
|
||||
}
|
||||
|
||||
type MetricNode struct {
|
||||
|
|
@ -90,6 +57,16 @@ type MetricNode struct {
|
|||
gjson.Result
|
||||
}
|
||||
|
||||
func (p *Parser) Init() error {
|
||||
// Propagate the default metric name to the configs in case it is not set there
|
||||
for i, cfg := range p.Configs {
|
||||
if cfg.MeasurementName == "" {
|
||||
p.Configs[i].MeasurementName = p.DefaultMetricName
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (p *Parser) Parse(input []byte) ([]telegraf.Metric, error) {
|
||||
// Only valid JSON is supported
|
||||
if !gjson.Valid(string(input)) {
|
||||
|
|
@ -168,7 +145,7 @@ func (p *Parser) Parse(input []byte) ([]telegraf.Metric, error) {
|
|||
// processMetric will iterate over all 'field' or 'tag' configs and create metrics for each
|
||||
// A field/tag can either be a single value or an array of values, each resulting in its own metric
|
||||
// For multiple configs, a set of metrics is created from the cartesian product of each separate config
|
||||
func (p *Parser) processMetric(input []byte, data []DataSet, tag bool, timestamp time.Time) ([]telegraf.Metric, error) {
|
||||
func (p *Parser) processMetric(input []byte, data []json_v2.DataSet, tag bool, timestamp time.Time) ([]telegraf.Metric, error) {
|
||||
if len(data) == 0 {
|
||||
return nil, nil
|
||||
}
|
||||
|
|
@ -410,7 +387,7 @@ func (p *Parser) existsInpathResults(index int) *PathResult {
|
|||
}
|
||||
|
||||
// processObjects will iterate over all 'object' configs and create metrics for each
|
||||
func (p *Parser) processObjects(input []byte, objects []JSONObject, timestamp time.Time) ([]telegraf.Metric, error) {
|
||||
func (p *Parser) processObjects(input []byte, objects []json_v2.Object, timestamp time.Time) ([]telegraf.Metric, error) {
|
||||
p.iterateObjects = true
|
||||
var t []telegraf.Metric
|
||||
for _, c := range objects {
|
||||
|
|
@ -678,3 +655,26 @@ func (p *Parser) checkResult(result gjson.Result, path string, optional bool) (b
|
|||
|
||||
return false, nil
|
||||
}
|
||||
|
||||
func init() {
|
||||
// Register all variants
|
||||
parsers.Add("json_v2",
|
||||
func(defaultMetricName string) telegraf.Parser {
|
||||
return &Parser{DefaultMetricName: defaultMetricName}
|
||||
},
|
||||
)
|
||||
}
|
||||
|
||||
// InitFromConfig is a compatibility function to construct the parser the old way
|
||||
func (p *Parser) InitFromConfig(config *parsers.Config) error {
|
||||
p.DefaultMetricName = config.MetricName
|
||||
p.DefaultTags = config.DefaultTags
|
||||
|
||||
// Convert the config formats which is a one-to-one copy
|
||||
if len(config.JSONV2Config) > 0 {
|
||||
p.Configs = make([]json_v2.Config, 0, len(config.JSONV2Config))
|
||||
p.Configs = append(p.Configs, config.JSONV2Config...)
|
||||
}
|
||||
|
||||
return p.Init()
|
||||
}
|
||||
|
|
|
|||
|
|
@ -11,11 +11,12 @@ import (
|
|||
"github.com/influxdata/telegraf/plugins/parsers/grok"
|
||||
"github.com/influxdata/telegraf/plugins/parsers/influx"
|
||||
"github.com/influxdata/telegraf/plugins/parsers/influx/influx_upstream"
|
||||
"github.com/influxdata/telegraf/plugins/parsers/json_v2"
|
||||
"github.com/influxdata/telegraf/plugins/parsers/logfmt"
|
||||
"github.com/influxdata/telegraf/plugins/parsers/nagios"
|
||||
"github.com/influxdata/telegraf/plugins/parsers/prometheus"
|
||||
"github.com/influxdata/telegraf/plugins/parsers/prometheusremotewrite"
|
||||
"github.com/influxdata/telegraf/plugins/parsers/temporary/json_v2"
|
||||
"github.com/influxdata/telegraf/plugins/parsers/temporary/xpath"
|
||||
"github.com/influxdata/telegraf/plugins/parsers/value"
|
||||
"github.com/influxdata/telegraf/plugins/parsers/wavefront"
|
||||
)
|
||||
|
|
@ -182,15 +183,15 @@ type Config struct {
|
|||
ValueFieldName string `toml:"value_field_name"`
|
||||
|
||||
// XPath configuration
|
||||
XPathPrintDocument bool `toml:"xpath_print_document"`
|
||||
XPathProtobufFile string `toml:"xpath_protobuf_file"`
|
||||
XPathProtobufType string `toml:"xpath_protobuf_type"`
|
||||
XPathProtobufImportPaths []string `toml:"xpath_protobuf_import_paths"`
|
||||
XPathAllowEmptySelection bool `toml:"xpath_allow_empty_selection"`
|
||||
XPathConfig []XPathConfig `toml:"xpath"`
|
||||
XPathPrintDocument bool `toml:"xpath_print_document"`
|
||||
XPathProtobufFile string `toml:"xpath_protobuf_file"`
|
||||
XPathProtobufType string `toml:"xpath_protobuf_type"`
|
||||
XPathProtobufImportPaths []string `toml:"xpath_protobuf_import_paths"`
|
||||
XPathAllowEmptySelection bool `toml:"xpath_allow_empty_selection"`
|
||||
XPathConfig []xpath.Config `toml:"xpath"`
|
||||
|
||||
// JSONPath configuration
|
||||
JSONV2Config []JSONV2Config `toml:"json_v2"`
|
||||
JSONV2Config []json_v2.Config `toml:"json_v2"`
|
||||
|
||||
// Influx configuration
|
||||
InfluxParserType string `toml:"influx_parser_type"`
|
||||
|
|
@ -199,34 +200,6 @@ type Config struct {
|
|||
LogFmtTagKeys []string `toml:"logfmt_tag_keys"`
|
||||
}
|
||||
|
||||
// XPathConfig definition for backward compatibitlity ONLY.
|
||||
// We need this here to avoid cyclic dependencies. However, we need
|
||||
// to move this to plugins/parsers/xpath once we deprecate parser
|
||||
// construction via `NewParser()`.
|
||||
type XPathConfig struct {
|
||||
MetricQuery string `toml:"metric_name"`
|
||||
Selection string `toml:"metric_selection"`
|
||||
Timestamp string `toml:"timestamp"`
|
||||
TimestampFmt string `toml:"timestamp_format"`
|
||||
Tags map[string]string `toml:"tags"`
|
||||
Fields map[string]string `toml:"fields"`
|
||||
FieldsInt map[string]string `toml:"fields_int"`
|
||||
|
||||
FieldSelection string `toml:"field_selection"`
|
||||
FieldNameQuery string `toml:"field_name"`
|
||||
FieldValueQuery string `toml:"field_value"`
|
||||
FieldNameExpand bool `toml:"field_name_expansion"`
|
||||
|
||||
TagSelection string `toml:"tag_selection"`
|
||||
TagNameQuery string `toml:"tag_name"`
|
||||
TagValueQuery string `toml:"tag_value"`
|
||||
TagNameExpand bool `toml:"tag_name_expansion"`
|
||||
}
|
||||
|
||||
type JSONV2Config struct {
|
||||
json_v2.Config
|
||||
}
|
||||
|
||||
// NewParser returns a Parser interface based on the given config.
|
||||
func NewParser(config *Config) (Parser, error) {
|
||||
var err error
|
||||
|
|
@ -285,8 +258,6 @@ func NewParser(config *Config) (Parser, error) {
|
|||
)
|
||||
case "prometheusremotewrite":
|
||||
parser, err = NewPrometheusRemoteWriteParser(config.DefaultTags)
|
||||
case "json_v2":
|
||||
parser, err = NewJSONPathParser(config.JSONV2Config)
|
||||
default:
|
||||
creator, found := Parsers[config.DataFormat]
|
||||
if !found {
|
||||
|
|
@ -422,23 +393,3 @@ func NewPrometheusRemoteWriteParser(defaultTags map[string]string) (Parser, erro
|
|||
DefaultTags: defaultTags,
|
||||
}, nil
|
||||
}
|
||||
|
||||
func NewJSONPathParser(jsonv2config []JSONV2Config) (Parser, error) {
|
||||
configs := make([]json_v2.Config, len(jsonv2config))
|
||||
for i, cfg := range jsonv2config {
|
||||
configs[i].MeasurementName = cfg.MeasurementName
|
||||
configs[i].MeasurementNamePath = cfg.MeasurementNamePath
|
||||
|
||||
configs[i].TimestampPath = cfg.TimestampPath
|
||||
configs[i].TimestampFormat = cfg.TimestampFormat
|
||||
configs[i].TimestampTimezone = cfg.TimestampTimezone
|
||||
|
||||
configs[i].Fields = cfg.Fields
|
||||
configs[i].Tags = cfg.Tags
|
||||
|
||||
configs[i].JSONObjects = cfg.JSONObjects
|
||||
}
|
||||
return &json_v2.Parser{
|
||||
Configs: configs,
|
||||
}, nil
|
||||
}
|
||||
|
|
|
|||
|
|
@ -0,0 +1,40 @@
|
|||
package json_v2
|
||||
|
||||
// Config definition for backward compatibility ONLY.
|
||||
// We need this here to avoid cyclic dependencies. However, we need
|
||||
// to move this to plugins/parsers/json_v2 once we deprecate parser
|
||||
// construction via `NewParser()`.
|
||||
type Config struct {
|
||||
MeasurementName string `toml:"measurement_name"` // OPTIONAL
|
||||
MeasurementNamePath string `toml:"measurement_name_path"` // OPTIONAL
|
||||
TimestampPath string `toml:"timestamp_path"` // OPTIONAL
|
||||
TimestampFormat string `toml:"timestamp_format"` // OPTIONAL, but REQUIRED when timestamp_path is defined
|
||||
TimestampTimezone string `toml:"timestamp_timezone"` // OPTIONAL, but REQUIRES timestamp_path
|
||||
|
||||
Fields []DataSet `toml:"field"`
|
||||
Tags []DataSet `toml:"tag"`
|
||||
JSONObjects []Object `toml:"object"`
|
||||
}
|
||||
|
||||
type DataSet struct {
|
||||
Path string `toml:"path"` // REQUIRED
|
||||
Type string `toml:"type"` // OPTIONAL, can't be set for tags they will always be a string
|
||||
Rename string `toml:"rename"`
|
||||
Optional bool `toml:"optional"` // Will suppress errors if there isn't a match with Path
|
||||
}
|
||||
|
||||
type Object struct {
|
||||
Path string `toml:"path"` // REQUIRED
|
||||
Optional bool `toml:"optional"` // Will suppress errors if there isn't a match with Path
|
||||
TimestampKey string `toml:"timestamp_key"`
|
||||
TimestampFormat string `toml:"timestamp_format"` // OPTIONAL, but REQUIRED when timestamp_path is defined
|
||||
TimestampTimezone string `toml:"timestamp_timezone"` // OPTIONAL, but REQUIRES timestamp_path
|
||||
Renames map[string]string `toml:"renames"`
|
||||
Fields map[string]string `toml:"fields"`
|
||||
Tags []string `toml:"tags"`
|
||||
IncludedKeys []string `toml:"included_keys"`
|
||||
ExcludedKeys []string `toml:"excluded_keys"`
|
||||
DisablePrependKeys bool `toml:"disable_prepend_keys"`
|
||||
FieldPaths []DataSet `toml:"field"`
|
||||
TagPaths []DataSet `toml:"tag"`
|
||||
}
|
||||
|
|
@ -0,0 +1,25 @@
|
|||
package xpath
|
||||
|
||||
// Config definition for backward compatibility ONLY.
|
||||
// We need this here to avoid cyclic dependencies. However, we need
|
||||
// to move this to plugins/parsers/xpath once we deprecate parser
|
||||
// construction via `NewParser()`.
|
||||
type Config struct {
|
||||
MetricQuery string `toml:"metric_name"`
|
||||
Selection string `toml:"metric_selection"`
|
||||
Timestamp string `toml:"timestamp"`
|
||||
TimestampFmt string `toml:"timestamp_format"`
|
||||
Tags map[string]string `toml:"tags"`
|
||||
Fields map[string]string `toml:"fields"`
|
||||
FieldsInt map[string]string `toml:"fields_int"`
|
||||
|
||||
FieldSelection string `toml:"field_selection"`
|
||||
FieldNameQuery string `toml:"field_name"`
|
||||
FieldValueQuery string `toml:"field_value"`
|
||||
FieldNameExpand bool `toml:"field_name_expansion"`
|
||||
|
||||
TagSelection string `toml:"tag_selection"`
|
||||
TagNameQuery string `toml:"tag_name"`
|
||||
TagValueQuery string `toml:"tag_value"`
|
||||
TagNameExpand bool `toml:"tag_name_expansion"`
|
||||
}
|
||||
|
|
@ -13,6 +13,7 @@ import (
|
|||
"github.com/influxdata/telegraf/internal"
|
||||
"github.com/influxdata/telegraf/metric"
|
||||
"github.com/influxdata/telegraf/plugins/parsers"
|
||||
"github.com/influxdata/telegraf/plugins/parsers/temporary/xpath"
|
||||
)
|
||||
|
||||
type dataNode interface{}
|
||||
|
|
@ -32,7 +33,7 @@ type Parser struct {
|
|||
ProtobufImportPaths []string `toml:"xpath_protobuf_import_paths"`
|
||||
PrintDocument bool `toml:"xpath_print_document"`
|
||||
AllowEmptySelection bool `toml:"xpath_allow_empty_selection"`
|
||||
Configs []Config `toml:"xpath"`
|
||||
Configs []xpath.Config `toml:"xpath"`
|
||||
DefaultMetricName string `toml:"-"`
|
||||
DefaultTags map[string]string `toml:"-"`
|
||||
Log telegraf.Logger `toml:"-"`
|
||||
|
|
@ -40,12 +41,6 @@ type Parser struct {
|
|||
document dataDocument
|
||||
}
|
||||
|
||||
// Config definition
|
||||
// This should be replaced by the actual definition once
|
||||
// the compatibitlity-code is removed.
|
||||
// Please check plugins/parsers/registry.go for now.
|
||||
type Config parsers.XPathConfig
|
||||
|
||||
func (p *Parser) Init() error {
|
||||
switch p.Format {
|
||||
case "", "xml":
|
||||
|
|
@ -139,7 +134,7 @@ func (p *Parser) SetDefaultTags(tags map[string]string) {
|
|||
p.DefaultTags = tags
|
||||
}
|
||||
|
||||
func (p *Parser) parseQuery(starttime time.Time, doc, selected dataNode, config Config) (telegraf.Metric, error) {
|
||||
func (p *Parser) parseQuery(starttime time.Time, doc, selected dataNode, config xpath.Config) (telegraf.Metric, error) {
|
||||
var timestamp time.Time
|
||||
var metricname string
|
||||
|
||||
|
|
@ -552,11 +547,8 @@ func (p *Parser) InitFromConfig(config *parsers.Config) error {
|
|||
|
||||
// Convert the config formats which is a one-to-one copy
|
||||
if len(config.XPathConfig) > 0 {
|
||||
p.Configs = make([]Config, 0, len(config.XPathConfig))
|
||||
for _, cfg := range config.XPathConfig {
|
||||
config := Config(cfg)
|
||||
p.Configs = append(p.Configs, config)
|
||||
}
|
||||
p.Configs = make([]xpath.Config, 0, len(config.XPathConfig))
|
||||
p.Configs = append(p.Configs, config.XPathConfig...)
|
||||
}
|
||||
|
||||
return p.Init()
|
||||
|
|
|
|||
|
|
@ -10,6 +10,7 @@ import (
|
|||
|
||||
"github.com/influxdata/telegraf"
|
||||
"github.com/influxdata/telegraf/plugins/parsers/influx"
|
||||
"github.com/influxdata/telegraf/plugins/parsers/temporary/xpath"
|
||||
"github.com/influxdata/telegraf/testutil"
|
||||
"github.com/influxdata/toml"
|
||||
|
||||
|
|
@ -106,14 +107,14 @@ func TestParseInvalidXML(t *testing.T) {
|
|||
var tests = []struct {
|
||||
name string
|
||||
input string
|
||||
configs []Config
|
||||
configs []xpath.Config
|
||||
defaultTags map[string]string
|
||||
expectedError string
|
||||
}{
|
||||
{
|
||||
name: "invalid XML (missing close tag)",
|
||||
input: invalidXML,
|
||||
configs: []Config{
|
||||
configs: []xpath.Config{
|
||||
{
|
||||
MetricQuery: "test",
|
||||
Timestamp: "/Device_1/Timestamp_unix",
|
||||
|
|
@ -145,14 +146,14 @@ func TestInvalidTypeQueriesFail(t *testing.T) {
|
|||
var tests = []struct {
|
||||
name string
|
||||
input string
|
||||
configs []Config
|
||||
configs []xpath.Config
|
||||
defaultTags map[string]string
|
||||
expectedError string
|
||||
}{
|
||||
{
|
||||
name: "invalid field (int) type",
|
||||
input: singleMetricValuesXML,
|
||||
configs: []Config{
|
||||
configs: []xpath.Config{
|
||||
{
|
||||
Timestamp: "/Device_1/Timestamp_unix",
|
||||
FieldsInt: map[string]string{
|
||||
|
|
@ -186,14 +187,14 @@ func TestInvalidTypeQueries(t *testing.T) {
|
|||
var tests = []struct {
|
||||
name string
|
||||
input string
|
||||
configs []Config
|
||||
configs []xpath.Config
|
||||
defaultTags map[string]string
|
||||
expected telegraf.Metric
|
||||
}{
|
||||
{
|
||||
name: "invalid field type (number)",
|
||||
input: singleMetricValuesXML,
|
||||
configs: []Config{
|
||||
configs: []xpath.Config{
|
||||
{
|
||||
Timestamp: "/Device_1/Timestamp_unix",
|
||||
Fields: map[string]string{
|
||||
|
|
@ -214,7 +215,7 @@ func TestInvalidTypeQueries(t *testing.T) {
|
|||
{
|
||||
name: "invalid field type (boolean)",
|
||||
input: singleMetricValuesXML,
|
||||
configs: []Config{
|
||||
configs: []xpath.Config{
|
||||
{
|
||||
Timestamp: "/Device_1/Timestamp_unix",
|
||||
Fields: map[string]string{
|
||||
|
|
@ -256,14 +257,14 @@ func TestParseTimestamps(t *testing.T) {
|
|||
var tests = []struct {
|
||||
name string
|
||||
input string
|
||||
configs []Config
|
||||
configs []xpath.Config
|
||||
defaultTags map[string]string
|
||||
expected telegraf.Metric
|
||||
}{
|
||||
{
|
||||
name: "parse timestamp (no fmt)",
|
||||
input: singleMetricValuesXML,
|
||||
configs: []Config{
|
||||
configs: []xpath.Config{
|
||||
{
|
||||
Timestamp: "/Device_1/Timestamp_unix",
|
||||
},
|
||||
|
|
@ -279,7 +280,7 @@ func TestParseTimestamps(t *testing.T) {
|
|||
{
|
||||
name: "parse timestamp (unix)",
|
||||
input: singleMetricValuesXML,
|
||||
configs: []Config{
|
||||
configs: []xpath.Config{
|
||||
{
|
||||
Timestamp: "/Device_1/Timestamp_unix",
|
||||
TimestampFmt: "unix",
|
||||
|
|
@ -296,7 +297,7 @@ func TestParseTimestamps(t *testing.T) {
|
|||
{
|
||||
name: "parse timestamp (unix_ms)",
|
||||
input: singleMetricValuesXML,
|
||||
configs: []Config{
|
||||
configs: []xpath.Config{
|
||||
{
|
||||
Timestamp: "/Device_1/Timestamp_unix_ms",
|
||||
TimestampFmt: "unix_ms",
|
||||
|
|
@ -313,7 +314,7 @@ func TestParseTimestamps(t *testing.T) {
|
|||
{
|
||||
name: "parse timestamp (unix_us)",
|
||||
input: singleMetricValuesXML,
|
||||
configs: []Config{
|
||||
configs: []xpath.Config{
|
||||
{
|
||||
Timestamp: "/Device_1/Timestamp_unix_us",
|
||||
TimestampFmt: "unix_us",
|
||||
|
|
@ -330,7 +331,7 @@ func TestParseTimestamps(t *testing.T) {
|
|||
{
|
||||
name: "parse timestamp (unix_us)",
|
||||
input: singleMetricValuesXML,
|
||||
configs: []Config{
|
||||
configs: []xpath.Config{
|
||||
{
|
||||
Timestamp: "/Device_1/Timestamp_unix_ns",
|
||||
TimestampFmt: "unix_ns",
|
||||
|
|
@ -347,7 +348,7 @@ func TestParseTimestamps(t *testing.T) {
|
|||
{
|
||||
name: "parse timestamp (RFC3339)",
|
||||
input: singleMetricValuesXML,
|
||||
configs: []Config{
|
||||
configs: []xpath.Config{
|
||||
{
|
||||
Timestamp: "/Device_1/Timestamp_iso",
|
||||
TimestampFmt: "2006-01-02T15:04:05Z",
|
||||
|
|
@ -385,14 +386,14 @@ func TestParseSingleValues(t *testing.T) {
|
|||
var tests = []struct {
|
||||
name string
|
||||
input string
|
||||
configs []Config
|
||||
configs []xpath.Config
|
||||
defaultTags map[string]string
|
||||
expected telegraf.Metric
|
||||
}{
|
||||
{
|
||||
name: "parse scalar values as string fields",
|
||||
input: singleMetricValuesXML,
|
||||
configs: []Config{
|
||||
configs: []xpath.Config{
|
||||
{
|
||||
Timestamp: "/Device_1/Timestamp_unix",
|
||||
Fields: map[string]string{
|
||||
|
|
@ -419,7 +420,7 @@ func TestParseSingleValues(t *testing.T) {
|
|||
{
|
||||
name: "parse scalar values as typed fields (w/o int)",
|
||||
input: singleMetricValuesXML,
|
||||
configs: []Config{
|
||||
configs: []xpath.Config{
|
||||
{
|
||||
Timestamp: "/Device_1/Timestamp_unix",
|
||||
Fields: map[string]string{
|
||||
|
|
@ -446,7 +447,7 @@ func TestParseSingleValues(t *testing.T) {
|
|||
{
|
||||
name: "parse values as typed fields (w/ int)",
|
||||
input: singleMetricValuesXML,
|
||||
configs: []Config{
|
||||
configs: []xpath.Config{
|
||||
{
|
||||
Timestamp: "/Device_1/Timestamp_unix",
|
||||
Fields: map[string]string{
|
||||
|
|
@ -475,7 +476,7 @@ func TestParseSingleValues(t *testing.T) {
|
|||
{
|
||||
name: "parse substring values",
|
||||
input: singleMetricValuesXML,
|
||||
configs: []Config{
|
||||
configs: []xpath.Config{
|
||||
{
|
||||
Timestamp: "/Device_1/Timestamp_unix",
|
||||
Fields: map[string]string{
|
||||
|
|
@ -498,7 +499,7 @@ func TestParseSingleValues(t *testing.T) {
|
|||
{
|
||||
name: "parse substring values (typed)",
|
||||
input: singleMetricValuesXML,
|
||||
configs: []Config{
|
||||
configs: []xpath.Config{
|
||||
{
|
||||
Timestamp: "/Device_1/Timestamp_unix",
|
||||
Fields: map[string]string{
|
||||
|
|
@ -521,7 +522,7 @@ func TestParseSingleValues(t *testing.T) {
|
|||
{
|
||||
name: "parse substring values (typed int)",
|
||||
input: singleMetricValuesXML,
|
||||
configs: []Config{
|
||||
configs: []xpath.Config{
|
||||
{
|
||||
Timestamp: "/Device_1/Timestamp_unix",
|
||||
FieldsInt: map[string]string{
|
||||
|
|
@ -544,7 +545,7 @@ func TestParseSingleValues(t *testing.T) {
|
|||
{
|
||||
name: "parse tags",
|
||||
input: singleMetricValuesXML,
|
||||
configs: []Config{
|
||||
configs: []xpath.Config{
|
||||
{
|
||||
Timestamp: "/Device_1/Timestamp_unix",
|
||||
Tags: map[string]string{
|
||||
|
|
@ -588,14 +589,14 @@ func TestParseSingleAttributes(t *testing.T) {
|
|||
var tests = []struct {
|
||||
name string
|
||||
input string
|
||||
configs []Config
|
||||
configs []xpath.Config
|
||||
defaultTags map[string]string
|
||||
expected telegraf.Metric
|
||||
}{
|
||||
{
|
||||
name: "parse attr timestamp (unix)",
|
||||
input: singleMetricAttributesXML,
|
||||
configs: []Config{
|
||||
configs: []xpath.Config{
|
||||
{
|
||||
Timestamp: "/Device_1/Timestamp_unix/@value",
|
||||
},
|
||||
|
|
@ -611,7 +612,7 @@ func TestParseSingleAttributes(t *testing.T) {
|
|||
{
|
||||
name: "parse attr timestamp (RFC3339)",
|
||||
input: singleMetricAttributesXML,
|
||||
configs: []Config{
|
||||
configs: []xpath.Config{
|
||||
{
|
||||
Timestamp: "/Device_1/Timestamp_iso/@value",
|
||||
TimestampFmt: "2006-01-02T15:04:05Z",
|
||||
|
|
@ -628,7 +629,7 @@ func TestParseSingleAttributes(t *testing.T) {
|
|||
{
|
||||
name: "parse attr as string fields",
|
||||
input: singleMetricAttributesXML,
|
||||
configs: []Config{
|
||||
configs: []xpath.Config{
|
||||
{
|
||||
Timestamp: "/Device_1/Timestamp_unix/@value",
|
||||
Fields: map[string]string{
|
||||
|
|
@ -655,7 +656,7 @@ func TestParseSingleAttributes(t *testing.T) {
|
|||
{
|
||||
name: "parse attr as typed fields (w/o int)",
|
||||
input: singleMetricAttributesXML,
|
||||
configs: []Config{
|
||||
configs: []xpath.Config{
|
||||
{
|
||||
Timestamp: "/Device_1/Timestamp_unix/@value",
|
||||
Fields: map[string]string{
|
||||
|
|
@ -682,7 +683,7 @@ func TestParseSingleAttributes(t *testing.T) {
|
|||
{
|
||||
name: "parse attr as typed fields (w/ int)",
|
||||
input: singleMetricAttributesXML,
|
||||
configs: []Config{
|
||||
configs: []xpath.Config{
|
||||
{
|
||||
Timestamp: "/Device_1/Timestamp_unix/@value",
|
||||
Fields: map[string]string{
|
||||
|
|
@ -711,7 +712,7 @@ func TestParseSingleAttributes(t *testing.T) {
|
|||
{
|
||||
name: "parse attr substring",
|
||||
input: singleMetricAttributesXML,
|
||||
configs: []Config{
|
||||
configs: []xpath.Config{
|
||||
{
|
||||
Timestamp: "/Device_1/Timestamp_unix/@value",
|
||||
Fields: map[string]string{
|
||||
|
|
@ -732,7 +733,7 @@ func TestParseSingleAttributes(t *testing.T) {
|
|||
{
|
||||
name: "parse attr tags",
|
||||
input: singleMetricAttributesXML,
|
||||
configs: []Config{
|
||||
configs: []xpath.Config{
|
||||
{
|
||||
Timestamp: "/Device_1/Timestamp_unix/@value",
|
||||
Tags: map[string]string{
|
||||
|
|
@ -755,7 +756,7 @@ func TestParseSingleAttributes(t *testing.T) {
|
|||
{
|
||||
name: "parse attr bool",
|
||||
input: singleMetricAttributesXML,
|
||||
configs: []Config{
|
||||
configs: []xpath.Config{
|
||||
{
|
||||
Timestamp: "/Device_1/Timestamp_unix/@value",
|
||||
Fields: map[string]string{
|
||||
|
|
@ -797,14 +798,14 @@ func TestParseMultiValues(t *testing.T) {
|
|||
var tests = []struct {
|
||||
name string
|
||||
input string
|
||||
configs []Config
|
||||
configs []xpath.Config
|
||||
defaultTags map[string]string
|
||||
expected telegraf.Metric
|
||||
}{
|
||||
{
|
||||
name: "select values (float)",
|
||||
input: singleMetricMultiValuesXML,
|
||||
configs: []Config{
|
||||
configs: []xpath.Config{
|
||||
{
|
||||
Timestamp: "/Timestamp/@value",
|
||||
Fields: map[string]string{
|
||||
|
|
@ -835,7 +836,7 @@ func TestParseMultiValues(t *testing.T) {
|
|||
{
|
||||
name: "select values (int)",
|
||||
input: singleMetricMultiValuesXML,
|
||||
configs: []Config{
|
||||
configs: []xpath.Config{
|
||||
{
|
||||
Timestamp: "/Timestamp/@value",
|
||||
FieldsInt: map[string]string{
|
||||
|
|
@ -887,14 +888,14 @@ func TestParseMultiNodes(t *testing.T) {
|
|||
var tests = []struct {
|
||||
name string
|
||||
input string
|
||||
configs []Config
|
||||
configs []xpath.Config
|
||||
defaultTags map[string]string
|
||||
expected []telegraf.Metric
|
||||
}{
|
||||
{
|
||||
name: "select all devices",
|
||||
input: multipleNodesXML,
|
||||
configs: []Config{
|
||||
configs: []xpath.Config{
|
||||
{
|
||||
Selection: "/Device",
|
||||
Timestamp: "/Timestamp/@value",
|
||||
|
|
@ -1004,14 +1005,14 @@ func TestParseMetricQuery(t *testing.T) {
|
|||
var tests = []struct {
|
||||
name string
|
||||
input string
|
||||
configs []Config
|
||||
configs []xpath.Config
|
||||
defaultTags map[string]string
|
||||
expected telegraf.Metric
|
||||
}{
|
||||
{
|
||||
name: "parse metric name query",
|
||||
input: metricNameQueryXML,
|
||||
configs: []Config{
|
||||
configs: []xpath.Config{
|
||||
{
|
||||
MetricQuery: "name(/Device_1/Metric/@*[1])",
|
||||
Timestamp: "/Device_1/Timestamp_unix",
|
||||
|
|
@ -1033,7 +1034,7 @@ func TestParseMetricQuery(t *testing.T) {
|
|||
{
|
||||
name: "parse metric name constant",
|
||||
input: metricNameQueryXML,
|
||||
configs: []Config{
|
||||
configs: []xpath.Config{
|
||||
{
|
||||
MetricQuery: "'the_metric'",
|
||||
Timestamp: "/Device_1/Timestamp_unix",
|
||||
|
|
@ -1076,13 +1077,13 @@ func TestParseErrors(t *testing.T) {
|
|||
var tests = []struct {
|
||||
name string
|
||||
input string
|
||||
configs []Config
|
||||
configs []xpath.Config
|
||||
expected string
|
||||
}{
|
||||
{
|
||||
name: "string metric name query",
|
||||
input: metricNameQueryXML,
|
||||
configs: []Config{
|
||||
configs: []xpath.Config{
|
||||
{
|
||||
MetricQuery: "arbitrary",
|
||||
Timestamp: "/Device_1/Timestamp_unix",
|
||||
|
|
@ -1116,12 +1117,12 @@ func TestEmptySelection(t *testing.T) {
|
|||
var tests = []struct {
|
||||
name string
|
||||
input string
|
||||
configs []Config
|
||||
configs []xpath.Config
|
||||
}{
|
||||
{
|
||||
name: "empty path",
|
||||
input: multipleNodesXML,
|
||||
configs: []Config{
|
||||
configs: []xpath.Config{
|
||||
{
|
||||
Selection: "/Device/NonExisting",
|
||||
Fields: map[string]string{"value": "number(Value)"},
|
||||
|
|
@ -1133,7 +1134,7 @@ func TestEmptySelection(t *testing.T) {
|
|||
{
|
||||
name: "empty pattern",
|
||||
input: multipleNodesXML,
|
||||
configs: []Config{
|
||||
configs: []xpath.Config{
|
||||
{
|
||||
Selection: "//NonExisting",
|
||||
Fields: map[string]string{"value": "number(Value)"},
|
||||
|
|
@ -1145,7 +1146,7 @@ func TestEmptySelection(t *testing.T) {
|
|||
{
|
||||
name: "empty axis",
|
||||
input: multipleNodesXML,
|
||||
configs: []Config{
|
||||
configs: []xpath.Config{
|
||||
{
|
||||
Selection: "/Device/child::NonExisting",
|
||||
Fields: map[string]string{"value": "number(Value)"},
|
||||
|
|
@ -1157,7 +1158,7 @@ func TestEmptySelection(t *testing.T) {
|
|||
{
|
||||
name: "empty predicate",
|
||||
input: multipleNodesXML,
|
||||
configs: []Config{
|
||||
configs: []xpath.Config{
|
||||
{
|
||||
Selection: "/Device[@NonExisting=true]",
|
||||
Fields: map[string]string{"value": "number(Value)"},
|
||||
|
|
@ -1189,12 +1190,12 @@ func TestEmptySelectionAllowed(t *testing.T) {
|
|||
var tests = []struct {
|
||||
name string
|
||||
input string
|
||||
configs []Config
|
||||
configs []xpath.Config
|
||||
}{
|
||||
{
|
||||
name: "empty path",
|
||||
input: multipleNodesXML,
|
||||
configs: []Config{
|
||||
configs: []xpath.Config{
|
||||
{
|
||||
Selection: "/Device/NonExisting",
|
||||
Fields: map[string]string{"value": "number(Value)"},
|
||||
|
|
@ -1206,7 +1207,7 @@ func TestEmptySelectionAllowed(t *testing.T) {
|
|||
{
|
||||
name: "empty pattern",
|
||||
input: multipleNodesXML,
|
||||
configs: []Config{
|
||||
configs: []xpath.Config{
|
||||
{
|
||||
Selection: "//NonExisting",
|
||||
Fields: map[string]string{"value": "number(Value)"},
|
||||
|
|
@ -1218,7 +1219,7 @@ func TestEmptySelectionAllowed(t *testing.T) {
|
|||
{
|
||||
name: "empty axis",
|
||||
input: multipleNodesXML,
|
||||
configs: []Config{
|
||||
configs: []xpath.Config{
|
||||
{
|
||||
Selection: "/Device/child::NonExisting",
|
||||
Fields: map[string]string{"value": "number(Value)"},
|
||||
|
|
@ -1230,7 +1231,7 @@ func TestEmptySelectionAllowed(t *testing.T) {
|
|||
{
|
||||
name: "empty predicate",
|
||||
input: multipleNodesXML,
|
||||
configs: []Config{
|
||||
configs: []xpath.Config{
|
||||
{
|
||||
Selection: "/Device[@NonExisting=true]",
|
||||
Fields: map[string]string{"value": "number(Value)"},
|
||||
|
|
@ -1354,7 +1355,7 @@ func TestTestCases(t *testing.T) {
|
|||
Format: fileformat,
|
||||
ProtobufMessageDef: pbmsgdef,
|
||||
ProtobufMessageType: pbmsgtype,
|
||||
Configs: []Config{*cfg},
|
||||
Configs: []xpath.Config{*cfg},
|
||||
Log: testutil.Logger{Name: "parsers.xml"},
|
||||
}
|
||||
require.NoError(t, parser.Init())
|
||||
|
|
@ -1380,13 +1381,13 @@ func TestProtobufImporting(t *testing.T) {
|
|||
ProtobufMessageDef: "person.proto",
|
||||
ProtobufMessageType: "importtest.Person",
|
||||
ProtobufImportPaths: []string{"testcases/protos"},
|
||||
Configs: []Config{},
|
||||
Configs: []xpath.Config{},
|
||||
Log: testutil.Logger{Name: "parsers.protobuf"},
|
||||
}
|
||||
require.NoError(t, parser.Init())
|
||||
}
|
||||
|
||||
func loadTestConfiguration(filename string) (*Config, []string, error) {
|
||||
func loadTestConfiguration(filename string) (*xpath.Config, []string, error) {
|
||||
buf, err := os.ReadFile(filename)
|
||||
if err != nil {
|
||||
return nil, nil, err
|
||||
|
|
@ -1399,7 +1400,7 @@ func loadTestConfiguration(filename string) (*Config, []string, error) {
|
|||
header = append(header, line)
|
||||
}
|
||||
}
|
||||
cfg := Config{}
|
||||
cfg := xpath.Config{}
|
||||
err = toml.Unmarshal(buf, &cfg)
|
||||
return &cfg, header, err
|
||||
}
|
||||
|
|
|
|||
Loading…
Reference in New Issue