feat: Migrate json_v2 parser to new style (#11343)
This commit is contained in:
parent
a049175e58
commit
3f114e0921
|
|
@ -30,7 +30,8 @@ import (
|
||||||
"github.com/influxdata/telegraf/plugins/inputs"
|
"github.com/influxdata/telegraf/plugins/inputs"
|
||||||
"github.com/influxdata/telegraf/plugins/outputs"
|
"github.com/influxdata/telegraf/plugins/outputs"
|
||||||
"github.com/influxdata/telegraf/plugins/parsers"
|
"github.com/influxdata/telegraf/plugins/parsers"
|
||||||
"github.com/influxdata/telegraf/plugins/parsers/json_v2"
|
"github.com/influxdata/telegraf/plugins/parsers/temporary/json_v2"
|
||||||
|
"github.com/influxdata/telegraf/plugins/parsers/temporary/xpath"
|
||||||
"github.com/influxdata/telegraf/plugins/processors"
|
"github.com/influxdata/telegraf/plugins/processors"
|
||||||
"github.com/influxdata/telegraf/plugins/serializers"
|
"github.com/influxdata/telegraf/plugins/serializers"
|
||||||
"github.com/influxdata/toml"
|
"github.com/influxdata/toml"
|
||||||
|
|
@ -1526,7 +1527,7 @@ func (c *Config) getParserConfig(name string, tbl *ast.Table) (*parsers.Config,
|
||||||
// for influx parser
|
// for influx parser
|
||||||
c.getFieldString(tbl, "influx_parser_type", &pc.InfluxParserType)
|
c.getFieldString(tbl, "influx_parser_type", &pc.InfluxParserType)
|
||||||
|
|
||||||
//for XPath parser family
|
// for XPath parser family
|
||||||
if choice.Contains(pc.DataFormat, []string{"xml", "xpath_json", "xpath_msgpack", "xpath_protobuf"}) {
|
if choice.Contains(pc.DataFormat, []string{"xml", "xpath_json", "xpath_msgpack", "xpath_protobuf"}) {
|
||||||
c.getFieldString(tbl, "xpath_protobuf_file", &pc.XPathProtobufFile)
|
c.getFieldString(tbl, "xpath_protobuf_file", &pc.XPathProtobufFile)
|
||||||
c.getFieldString(tbl, "xpath_protobuf_type", &pc.XPathProtobufType)
|
c.getFieldString(tbl, "xpath_protobuf_type", &pc.XPathProtobufType)
|
||||||
|
|
@ -1541,7 +1542,7 @@ func (c *Config) getParserConfig(name string, tbl *ast.Table) (*parsers.Config,
|
||||||
}
|
}
|
||||||
if xpathOK {
|
if xpathOK {
|
||||||
if subtbls, ok := node.([]*ast.Table); ok {
|
if subtbls, ok := node.([]*ast.Table); ok {
|
||||||
pc.XPathConfig = make([]parsers.XPathConfig, len(subtbls))
|
pc.XPathConfig = make([]xpath.Config, len(subtbls))
|
||||||
for i, subtbl := range subtbls {
|
for i, subtbl := range subtbls {
|
||||||
subcfg := pc.XPathConfig[i]
|
subcfg := pc.XPathConfig[i]
|
||||||
c.getFieldString(subtbl, "metric_name", &subcfg.MetricQuery)
|
c.getFieldString(subtbl, "metric_name", &subcfg.MetricQuery)
|
||||||
|
|
@ -1565,10 +1566,10 @@ func (c *Config) getParserConfig(name string, tbl *ast.Table) (*parsers.Config,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
//for JSONPath parser
|
// for JSON_v2 parser
|
||||||
if node, ok := tbl.Fields["json_v2"]; ok {
|
if node, ok := tbl.Fields["json_v2"]; ok {
|
||||||
if metricConfigs, ok := node.([]*ast.Table); ok {
|
if metricConfigs, ok := node.([]*ast.Table); ok {
|
||||||
pc.JSONV2Config = make([]parsers.JSONV2Config, len(metricConfigs))
|
pc.JSONV2Config = make([]json_v2.Config, len(metricConfigs))
|
||||||
for i, metricConfig := range metricConfigs {
|
for i, metricConfig := range metricConfigs {
|
||||||
mc := pc.JSONV2Config[i]
|
mc := pc.JSONV2Config[i]
|
||||||
c.getFieldString(metricConfig, "measurement_name", &mc.MeasurementName)
|
c.getFieldString(metricConfig, "measurement_name", &mc.MeasurementName)
|
||||||
|
|
@ -1586,7 +1587,7 @@ func (c *Config) getParserConfig(name string, tbl *ast.Table) (*parsers.Config,
|
||||||
if objectconfigs, ok := metricConfig.Fields["object"]; ok {
|
if objectconfigs, ok := metricConfig.Fields["object"]; ok {
|
||||||
if objectconfigs, ok := objectconfigs.([]*ast.Table); ok {
|
if objectconfigs, ok := objectconfigs.([]*ast.Table); ok {
|
||||||
for _, objectConfig := range objectconfigs {
|
for _, objectConfig := range objectconfigs {
|
||||||
var o json_v2.JSONObject
|
var o json_v2.Object
|
||||||
c.getFieldString(objectConfig, "path", &o.Path)
|
c.getFieldString(objectConfig, "path", &o.Path)
|
||||||
c.getFieldBool(objectConfig, "optional", &o.Optional)
|
c.getFieldBool(objectConfig, "optional", &o.Optional)
|
||||||
c.getFieldString(objectConfig, "timestamp_key", &o.TimestampKey)
|
c.getFieldString(objectConfig, "timestamp_key", &o.TimestampKey)
|
||||||
|
|
@ -1744,25 +1745,43 @@ func (c *Config) buildOutput(name string, tbl *ast.Table) (*models.OutputConfig,
|
||||||
|
|
||||||
func (c *Config) missingTomlField(_ reflect.Type, key string) error {
|
func (c *Config) missingTomlField(_ reflect.Type, key string) error {
|
||||||
switch key {
|
switch key {
|
||||||
case "alias", "carbon2_format", "carbon2_sanitize_replace_char", "collectd_auth_file",
|
// General options to ignore
|
||||||
"collectd_parse_multivalue", "collectd_security_level", "collectd_typesdb", "collection_jitter",
|
case "alias",
|
||||||
"collection_offset",
|
"collection_jitter", "collection_offset",
|
||||||
"csv_separator", "csv_header", "csv_column_prefix", "csv_timestamp_format",
|
"data_format", "delay", "drop", "drop_original",
|
||||||
"data_format", "data_type", "delay", "drop", "drop_original", "dropwizard_metric_registry_path",
|
"fielddrop", "fieldpass", "flush_interval", "flush_jitter",
|
||||||
"dropwizard_tag_paths", "dropwizard_tags_path", "dropwizard_time_format", "dropwizard_time_path",
|
"grace",
|
||||||
"fielddrop", "fieldpass", "flush_interval", "flush_jitter", "form_urlencoded_tag_keys",
|
"interval",
|
||||||
"grace", "graphite_separator", "graphite_tag_sanitize_mode", "graphite_tag_support",
|
"lvm", // What is this used for?
|
||||||
"grok_custom_pattern_files", "grok_custom_patterns", "grok_named_patterns", "grok_patterns",
|
"metric_batch_size", "metric_buffer_limit",
|
||||||
"grok_timezone", "grok_unique_timestamp", "influx_max_line_bytes", "influx_parser_type", "influx_sort_fields",
|
"name_override", "name_prefix", "name_suffix", "namedrop", "namepass",
|
||||||
"influx_uint_support", "interval", "json_timestamp_units", "json_v2",
|
"order",
|
||||||
"lvm", "metric_batch_size", "metric_buffer_limit", "name_override", "name_prefix",
|
"pass", "period", "precision",
|
||||||
"name_suffix", "namedrop", "namepass", "order", "pass", "period", "precision",
|
"tagdrop", "tagexclude", "taginclude", "tagpass", "tags":
|
||||||
"prefix", "prometheus_export_timestamp", "prometheus_ignore_timestamp", "prometheus_sort_metrics", "prometheus_string_as_label",
|
|
||||||
"separator", "splunkmetric_hec_routing", "splunkmetric_multimetric", "tag_keys",
|
|
||||||
"tagdrop", "tagexclude", "taginclude", "tagpass", "tags", "template", "templates",
|
|
||||||
"value_field_name", "wavefront_source_override", "wavefront_use_strict", "wavefront_disable_prefix_conversion":
|
|
||||||
|
|
||||||
// ignore fields that are common to all plugins.
|
// Parser options to ignore
|
||||||
|
case "data_type", "separator", "tag_keys",
|
||||||
|
// "templates", // shared with serializers
|
||||||
|
"collectd_auth_file", "collectd_parse_multivalue", "collectd_security_level", "collectd_typesdb",
|
||||||
|
"dropwizard_metric_registry_path", "dropwizard_tags_path", "dropwizard_tag_paths",
|
||||||
|
"dropwizard_time_format", "dropwizard_time_path",
|
||||||
|
"form_urlencoded_tag_keys",
|
||||||
|
"grok_custom_pattern_files", "grok_custom_patterns", "grok_named_patterns", "grok_patterns",
|
||||||
|
"grok_timezone", "grok_unique_timestamp",
|
||||||
|
"influx_parser_type",
|
||||||
|
"prometheus_ignore_timestamp", // not used anymore?
|
||||||
|
"value_field_name":
|
||||||
|
|
||||||
|
// Serializer options to ignore
|
||||||
|
case "prefix", "template", "templates",
|
||||||
|
"carbon2_format", "carbon2_sanitize_replace_char",
|
||||||
|
"csv_column_prefix", "csv_header", "csv_separator", "csv_timestamp_format",
|
||||||
|
"graphite_tag_sanitize_mode", "graphite_tag_support", "graphite_separator",
|
||||||
|
"influx_max_line_bytes", "influx_sort_fields", "influx_uint_support",
|
||||||
|
"json_timestamp_format", "json_timestamp_units",
|
||||||
|
"prometheus_export_timestamp", "prometheus_sort_metrics", "prometheus_string_as_label",
|
||||||
|
"splunkmetric_hec_routing", "splunkmetric_multimetric",
|
||||||
|
"wavefront_disable_prefix_conversion", "wavefront_source_override", "wavefront_use_strict":
|
||||||
default:
|
default:
|
||||||
c.unusedFieldsMutex.Lock()
|
c.unusedFieldsMutex.Lock()
|
||||||
c.UnusedFields[key] = true
|
c.UnusedFields[key] = true
|
||||||
|
|
|
||||||
|
|
@ -4,5 +4,6 @@ import (
|
||||||
//Blank imports for plugins to register themselves
|
//Blank imports for plugins to register themselves
|
||||||
_ "github.com/influxdata/telegraf/plugins/parsers/csv"
|
_ "github.com/influxdata/telegraf/plugins/parsers/csv"
|
||||||
_ "github.com/influxdata/telegraf/plugins/parsers/json"
|
_ "github.com/influxdata/telegraf/plugins/parsers/json"
|
||||||
|
_ "github.com/influxdata/telegraf/plugins/parsers/json_v2"
|
||||||
_ "github.com/influxdata/telegraf/plugins/parsers/xpath"
|
_ "github.com/influxdata/telegraf/plugins/parsers/xpath"
|
||||||
)
|
)
|
||||||
|
|
|
||||||
|
|
@ -9,15 +9,17 @@ import (
|
||||||
"github.com/influxdata/telegraf"
|
"github.com/influxdata/telegraf"
|
||||||
"github.com/influxdata/telegraf/internal"
|
"github.com/influxdata/telegraf/internal"
|
||||||
"github.com/influxdata/telegraf/metric"
|
"github.com/influxdata/telegraf/metric"
|
||||||
|
"github.com/influxdata/telegraf/plugins/parsers"
|
||||||
|
"github.com/influxdata/telegraf/plugins/parsers/temporary/json_v2"
|
||||||
"github.com/tidwall/gjson"
|
"github.com/tidwall/gjson"
|
||||||
)
|
)
|
||||||
|
|
||||||
// Parser adheres to the parser interface, contains the parser configuration, and data required to parse JSON
|
// Parser adheres to the parser interface, contains the parser configuration, and data required to parse JSON
|
||||||
type Parser struct {
|
type Parser struct {
|
||||||
// These struct fields are common for a parser
|
Configs []json_v2.Config `toml:"json_v2"`
|
||||||
Configs []Config
|
DefaultMetricName string `toml:"-"`
|
||||||
DefaultTags map[string]string
|
DefaultTags map[string]string `toml:"-"`
|
||||||
Log telegraf.Logger
|
Log telegraf.Logger `toml:"-"`
|
||||||
|
|
||||||
// **** The struct fields bellow this comment are used for processing indvidual configs ****
|
// **** The struct fields bellow this comment are used for processing indvidual configs ****
|
||||||
|
|
||||||
|
|
@ -30,48 +32,13 @@ type Parser struct {
|
||||||
// iterateObjects dictates if ExpandArray function will handle objects
|
// iterateObjects dictates if ExpandArray function will handle objects
|
||||||
iterateObjects bool
|
iterateObjects bool
|
||||||
// objectConfig contains the config for an object, some info is needed while iterating over the gjson results
|
// objectConfig contains the config for an object, some info is needed while iterating over the gjson results
|
||||||
objectConfig JSONObject
|
objectConfig json_v2.Object
|
||||||
}
|
}
|
||||||
|
|
||||||
type PathResult struct {
|
type PathResult struct {
|
||||||
result gjson.Result
|
result gjson.Result
|
||||||
tag bool
|
tag bool
|
||||||
DataSet
|
json_v2.DataSet
|
||||||
}
|
|
||||||
|
|
||||||
type Config struct {
|
|
||||||
MeasurementName string `toml:"measurement_name"` // OPTIONAL
|
|
||||||
MeasurementNamePath string `toml:"measurement_name_path"` // OPTIONAL
|
|
||||||
TimestampPath string `toml:"timestamp_path"` // OPTIONAL
|
|
||||||
TimestampFormat string `toml:"timestamp_format"` // OPTIONAL, but REQUIRED when timestamp_path is defined
|
|
||||||
TimestampTimezone string `toml:"timestamp_timezone"` // OPTIONAL, but REQUIRES timestamp_path
|
|
||||||
|
|
||||||
Fields []DataSet
|
|
||||||
Tags []DataSet
|
|
||||||
JSONObjects []JSONObject
|
|
||||||
}
|
|
||||||
|
|
||||||
type DataSet struct {
|
|
||||||
Path string `toml:"path"` // REQUIRED
|
|
||||||
Type string `toml:"type"` // OPTIONAL, can't be set for tags they will always be a string
|
|
||||||
Rename string `toml:"rename"`
|
|
||||||
Optional bool `toml:"optional"` // Will suppress errors if there isn't a match with Path
|
|
||||||
}
|
|
||||||
|
|
||||||
type JSONObject struct {
|
|
||||||
Path string `toml:"path"` // REQUIRED
|
|
||||||
Optional bool `toml:"optional"` // Will suppress errors if there isn't a match with Path
|
|
||||||
TimestampKey string `toml:"timestamp_key"`
|
|
||||||
TimestampFormat string `toml:"timestamp_format"` // OPTIONAL, but REQUIRED when timestamp_path is defined
|
|
||||||
TimestampTimezone string `toml:"timestamp_timezone"` // OPTIONAL, but REQUIRES timestamp_path
|
|
||||||
Renames map[string]string `toml:"renames"`
|
|
||||||
Fields map[string]string `toml:"fields"`
|
|
||||||
Tags []string `toml:"tags"`
|
|
||||||
IncludedKeys []string `toml:"included_keys"`
|
|
||||||
ExcludedKeys []string `toml:"excluded_keys"`
|
|
||||||
DisablePrependKeys bool `toml:"disable_prepend_keys"`
|
|
||||||
FieldPaths []DataSet
|
|
||||||
TagPaths []DataSet
|
|
||||||
}
|
}
|
||||||
|
|
||||||
type MetricNode struct {
|
type MetricNode struct {
|
||||||
|
|
@ -90,6 +57,16 @@ type MetricNode struct {
|
||||||
gjson.Result
|
gjson.Result
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (p *Parser) Init() error {
|
||||||
|
// Propagate the default metric name to the configs in case it is not set there
|
||||||
|
for i, cfg := range p.Configs {
|
||||||
|
if cfg.MeasurementName == "" {
|
||||||
|
p.Configs[i].MeasurementName = p.DefaultMetricName
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
func (p *Parser) Parse(input []byte) ([]telegraf.Metric, error) {
|
func (p *Parser) Parse(input []byte) ([]telegraf.Metric, error) {
|
||||||
// Only valid JSON is supported
|
// Only valid JSON is supported
|
||||||
if !gjson.Valid(string(input)) {
|
if !gjson.Valid(string(input)) {
|
||||||
|
|
@ -168,7 +145,7 @@ func (p *Parser) Parse(input []byte) ([]telegraf.Metric, error) {
|
||||||
// processMetric will iterate over all 'field' or 'tag' configs and create metrics for each
|
// processMetric will iterate over all 'field' or 'tag' configs and create metrics for each
|
||||||
// A field/tag can either be a single value or an array of values, each resulting in its own metric
|
// A field/tag can either be a single value or an array of values, each resulting in its own metric
|
||||||
// For multiple configs, a set of metrics is created from the cartesian product of each separate config
|
// For multiple configs, a set of metrics is created from the cartesian product of each separate config
|
||||||
func (p *Parser) processMetric(input []byte, data []DataSet, tag bool, timestamp time.Time) ([]telegraf.Metric, error) {
|
func (p *Parser) processMetric(input []byte, data []json_v2.DataSet, tag bool, timestamp time.Time) ([]telegraf.Metric, error) {
|
||||||
if len(data) == 0 {
|
if len(data) == 0 {
|
||||||
return nil, nil
|
return nil, nil
|
||||||
}
|
}
|
||||||
|
|
@ -410,7 +387,7 @@ func (p *Parser) existsInpathResults(index int) *PathResult {
|
||||||
}
|
}
|
||||||
|
|
||||||
// processObjects will iterate over all 'object' configs and create metrics for each
|
// processObjects will iterate over all 'object' configs and create metrics for each
|
||||||
func (p *Parser) processObjects(input []byte, objects []JSONObject, timestamp time.Time) ([]telegraf.Metric, error) {
|
func (p *Parser) processObjects(input []byte, objects []json_v2.Object, timestamp time.Time) ([]telegraf.Metric, error) {
|
||||||
p.iterateObjects = true
|
p.iterateObjects = true
|
||||||
var t []telegraf.Metric
|
var t []telegraf.Metric
|
||||||
for _, c := range objects {
|
for _, c := range objects {
|
||||||
|
|
@ -678,3 +655,26 @@ func (p *Parser) checkResult(result gjson.Result, path string, optional bool) (b
|
||||||
|
|
||||||
return false, nil
|
return false, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func init() {
|
||||||
|
// Register all variants
|
||||||
|
parsers.Add("json_v2",
|
||||||
|
func(defaultMetricName string) telegraf.Parser {
|
||||||
|
return &Parser{DefaultMetricName: defaultMetricName}
|
||||||
|
},
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
// InitFromConfig is a compatibility function to construct the parser the old way
|
||||||
|
func (p *Parser) InitFromConfig(config *parsers.Config) error {
|
||||||
|
p.DefaultMetricName = config.MetricName
|
||||||
|
p.DefaultTags = config.DefaultTags
|
||||||
|
|
||||||
|
// Convert the config formats which is a one-to-one copy
|
||||||
|
if len(config.JSONV2Config) > 0 {
|
||||||
|
p.Configs = make([]json_v2.Config, 0, len(config.JSONV2Config))
|
||||||
|
p.Configs = append(p.Configs, config.JSONV2Config...)
|
||||||
|
}
|
||||||
|
|
||||||
|
return p.Init()
|
||||||
|
}
|
||||||
|
|
|
||||||
|
|
@ -11,11 +11,12 @@ import (
|
||||||
"github.com/influxdata/telegraf/plugins/parsers/grok"
|
"github.com/influxdata/telegraf/plugins/parsers/grok"
|
||||||
"github.com/influxdata/telegraf/plugins/parsers/influx"
|
"github.com/influxdata/telegraf/plugins/parsers/influx"
|
||||||
"github.com/influxdata/telegraf/plugins/parsers/influx/influx_upstream"
|
"github.com/influxdata/telegraf/plugins/parsers/influx/influx_upstream"
|
||||||
"github.com/influxdata/telegraf/plugins/parsers/json_v2"
|
|
||||||
"github.com/influxdata/telegraf/plugins/parsers/logfmt"
|
"github.com/influxdata/telegraf/plugins/parsers/logfmt"
|
||||||
"github.com/influxdata/telegraf/plugins/parsers/nagios"
|
"github.com/influxdata/telegraf/plugins/parsers/nagios"
|
||||||
"github.com/influxdata/telegraf/plugins/parsers/prometheus"
|
"github.com/influxdata/telegraf/plugins/parsers/prometheus"
|
||||||
"github.com/influxdata/telegraf/plugins/parsers/prometheusremotewrite"
|
"github.com/influxdata/telegraf/plugins/parsers/prometheusremotewrite"
|
||||||
|
"github.com/influxdata/telegraf/plugins/parsers/temporary/json_v2"
|
||||||
|
"github.com/influxdata/telegraf/plugins/parsers/temporary/xpath"
|
||||||
"github.com/influxdata/telegraf/plugins/parsers/value"
|
"github.com/influxdata/telegraf/plugins/parsers/value"
|
||||||
"github.com/influxdata/telegraf/plugins/parsers/wavefront"
|
"github.com/influxdata/telegraf/plugins/parsers/wavefront"
|
||||||
)
|
)
|
||||||
|
|
@ -182,15 +183,15 @@ type Config struct {
|
||||||
ValueFieldName string `toml:"value_field_name"`
|
ValueFieldName string `toml:"value_field_name"`
|
||||||
|
|
||||||
// XPath configuration
|
// XPath configuration
|
||||||
XPathPrintDocument bool `toml:"xpath_print_document"`
|
XPathPrintDocument bool `toml:"xpath_print_document"`
|
||||||
XPathProtobufFile string `toml:"xpath_protobuf_file"`
|
XPathProtobufFile string `toml:"xpath_protobuf_file"`
|
||||||
XPathProtobufType string `toml:"xpath_protobuf_type"`
|
XPathProtobufType string `toml:"xpath_protobuf_type"`
|
||||||
XPathProtobufImportPaths []string `toml:"xpath_protobuf_import_paths"`
|
XPathProtobufImportPaths []string `toml:"xpath_protobuf_import_paths"`
|
||||||
XPathAllowEmptySelection bool `toml:"xpath_allow_empty_selection"`
|
XPathAllowEmptySelection bool `toml:"xpath_allow_empty_selection"`
|
||||||
XPathConfig []XPathConfig `toml:"xpath"`
|
XPathConfig []xpath.Config `toml:"xpath"`
|
||||||
|
|
||||||
// JSONPath configuration
|
// JSONPath configuration
|
||||||
JSONV2Config []JSONV2Config `toml:"json_v2"`
|
JSONV2Config []json_v2.Config `toml:"json_v2"`
|
||||||
|
|
||||||
// Influx configuration
|
// Influx configuration
|
||||||
InfluxParserType string `toml:"influx_parser_type"`
|
InfluxParserType string `toml:"influx_parser_type"`
|
||||||
|
|
@ -199,34 +200,6 @@ type Config struct {
|
||||||
LogFmtTagKeys []string `toml:"logfmt_tag_keys"`
|
LogFmtTagKeys []string `toml:"logfmt_tag_keys"`
|
||||||
}
|
}
|
||||||
|
|
||||||
// XPathConfig definition for backward compatibitlity ONLY.
|
|
||||||
// We need this here to avoid cyclic dependencies. However, we need
|
|
||||||
// to move this to plugins/parsers/xpath once we deprecate parser
|
|
||||||
// construction via `NewParser()`.
|
|
||||||
type XPathConfig struct {
|
|
||||||
MetricQuery string `toml:"metric_name"`
|
|
||||||
Selection string `toml:"metric_selection"`
|
|
||||||
Timestamp string `toml:"timestamp"`
|
|
||||||
TimestampFmt string `toml:"timestamp_format"`
|
|
||||||
Tags map[string]string `toml:"tags"`
|
|
||||||
Fields map[string]string `toml:"fields"`
|
|
||||||
FieldsInt map[string]string `toml:"fields_int"`
|
|
||||||
|
|
||||||
FieldSelection string `toml:"field_selection"`
|
|
||||||
FieldNameQuery string `toml:"field_name"`
|
|
||||||
FieldValueQuery string `toml:"field_value"`
|
|
||||||
FieldNameExpand bool `toml:"field_name_expansion"`
|
|
||||||
|
|
||||||
TagSelection string `toml:"tag_selection"`
|
|
||||||
TagNameQuery string `toml:"tag_name"`
|
|
||||||
TagValueQuery string `toml:"tag_value"`
|
|
||||||
TagNameExpand bool `toml:"tag_name_expansion"`
|
|
||||||
}
|
|
||||||
|
|
||||||
type JSONV2Config struct {
|
|
||||||
json_v2.Config
|
|
||||||
}
|
|
||||||
|
|
||||||
// NewParser returns a Parser interface based on the given config.
|
// NewParser returns a Parser interface based on the given config.
|
||||||
func NewParser(config *Config) (Parser, error) {
|
func NewParser(config *Config) (Parser, error) {
|
||||||
var err error
|
var err error
|
||||||
|
|
@ -285,8 +258,6 @@ func NewParser(config *Config) (Parser, error) {
|
||||||
)
|
)
|
||||||
case "prometheusremotewrite":
|
case "prometheusremotewrite":
|
||||||
parser, err = NewPrometheusRemoteWriteParser(config.DefaultTags)
|
parser, err = NewPrometheusRemoteWriteParser(config.DefaultTags)
|
||||||
case "json_v2":
|
|
||||||
parser, err = NewJSONPathParser(config.JSONV2Config)
|
|
||||||
default:
|
default:
|
||||||
creator, found := Parsers[config.DataFormat]
|
creator, found := Parsers[config.DataFormat]
|
||||||
if !found {
|
if !found {
|
||||||
|
|
@ -422,23 +393,3 @@ func NewPrometheusRemoteWriteParser(defaultTags map[string]string) (Parser, erro
|
||||||
DefaultTags: defaultTags,
|
DefaultTags: defaultTags,
|
||||||
}, nil
|
}, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func NewJSONPathParser(jsonv2config []JSONV2Config) (Parser, error) {
|
|
||||||
configs := make([]json_v2.Config, len(jsonv2config))
|
|
||||||
for i, cfg := range jsonv2config {
|
|
||||||
configs[i].MeasurementName = cfg.MeasurementName
|
|
||||||
configs[i].MeasurementNamePath = cfg.MeasurementNamePath
|
|
||||||
|
|
||||||
configs[i].TimestampPath = cfg.TimestampPath
|
|
||||||
configs[i].TimestampFormat = cfg.TimestampFormat
|
|
||||||
configs[i].TimestampTimezone = cfg.TimestampTimezone
|
|
||||||
|
|
||||||
configs[i].Fields = cfg.Fields
|
|
||||||
configs[i].Tags = cfg.Tags
|
|
||||||
|
|
||||||
configs[i].JSONObjects = cfg.JSONObjects
|
|
||||||
}
|
|
||||||
return &json_v2.Parser{
|
|
||||||
Configs: configs,
|
|
||||||
}, nil
|
|
||||||
}
|
|
||||||
|
|
|
||||||
|
|
@ -0,0 +1,40 @@
|
||||||
|
package json_v2
|
||||||
|
|
||||||
|
// Config definition for backward compatibility ONLY.
|
||||||
|
// We need this here to avoid cyclic dependencies. However, we need
|
||||||
|
// to move this to plugins/parsers/json_v2 once we deprecate parser
|
||||||
|
// construction via `NewParser()`.
|
||||||
|
type Config struct {
|
||||||
|
MeasurementName string `toml:"measurement_name"` // OPTIONAL
|
||||||
|
MeasurementNamePath string `toml:"measurement_name_path"` // OPTIONAL
|
||||||
|
TimestampPath string `toml:"timestamp_path"` // OPTIONAL
|
||||||
|
TimestampFormat string `toml:"timestamp_format"` // OPTIONAL, but REQUIRED when timestamp_path is defined
|
||||||
|
TimestampTimezone string `toml:"timestamp_timezone"` // OPTIONAL, but REQUIRES timestamp_path
|
||||||
|
|
||||||
|
Fields []DataSet `toml:"field"`
|
||||||
|
Tags []DataSet `toml:"tag"`
|
||||||
|
JSONObjects []Object `toml:"object"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type DataSet struct {
|
||||||
|
Path string `toml:"path"` // REQUIRED
|
||||||
|
Type string `toml:"type"` // OPTIONAL, can't be set for tags they will always be a string
|
||||||
|
Rename string `toml:"rename"`
|
||||||
|
Optional bool `toml:"optional"` // Will suppress errors if there isn't a match with Path
|
||||||
|
}
|
||||||
|
|
||||||
|
type Object struct {
|
||||||
|
Path string `toml:"path"` // REQUIRED
|
||||||
|
Optional bool `toml:"optional"` // Will suppress errors if there isn't a match with Path
|
||||||
|
TimestampKey string `toml:"timestamp_key"`
|
||||||
|
TimestampFormat string `toml:"timestamp_format"` // OPTIONAL, but REQUIRED when timestamp_path is defined
|
||||||
|
TimestampTimezone string `toml:"timestamp_timezone"` // OPTIONAL, but REQUIRES timestamp_path
|
||||||
|
Renames map[string]string `toml:"renames"`
|
||||||
|
Fields map[string]string `toml:"fields"`
|
||||||
|
Tags []string `toml:"tags"`
|
||||||
|
IncludedKeys []string `toml:"included_keys"`
|
||||||
|
ExcludedKeys []string `toml:"excluded_keys"`
|
||||||
|
DisablePrependKeys bool `toml:"disable_prepend_keys"`
|
||||||
|
FieldPaths []DataSet `toml:"field"`
|
||||||
|
TagPaths []DataSet `toml:"tag"`
|
||||||
|
}
|
||||||
|
|
@ -0,0 +1,25 @@
|
||||||
|
package xpath
|
||||||
|
|
||||||
|
// Config definition for backward compatibility ONLY.
|
||||||
|
// We need this here to avoid cyclic dependencies. However, we need
|
||||||
|
// to move this to plugins/parsers/xpath once we deprecate parser
|
||||||
|
// construction via `NewParser()`.
|
||||||
|
type Config struct {
|
||||||
|
MetricQuery string `toml:"metric_name"`
|
||||||
|
Selection string `toml:"metric_selection"`
|
||||||
|
Timestamp string `toml:"timestamp"`
|
||||||
|
TimestampFmt string `toml:"timestamp_format"`
|
||||||
|
Tags map[string]string `toml:"tags"`
|
||||||
|
Fields map[string]string `toml:"fields"`
|
||||||
|
FieldsInt map[string]string `toml:"fields_int"`
|
||||||
|
|
||||||
|
FieldSelection string `toml:"field_selection"`
|
||||||
|
FieldNameQuery string `toml:"field_name"`
|
||||||
|
FieldValueQuery string `toml:"field_value"`
|
||||||
|
FieldNameExpand bool `toml:"field_name_expansion"`
|
||||||
|
|
||||||
|
TagSelection string `toml:"tag_selection"`
|
||||||
|
TagNameQuery string `toml:"tag_name"`
|
||||||
|
TagValueQuery string `toml:"tag_value"`
|
||||||
|
TagNameExpand bool `toml:"tag_name_expansion"`
|
||||||
|
}
|
||||||
|
|
@ -13,6 +13,7 @@ import (
|
||||||
"github.com/influxdata/telegraf/internal"
|
"github.com/influxdata/telegraf/internal"
|
||||||
"github.com/influxdata/telegraf/metric"
|
"github.com/influxdata/telegraf/metric"
|
||||||
"github.com/influxdata/telegraf/plugins/parsers"
|
"github.com/influxdata/telegraf/plugins/parsers"
|
||||||
|
"github.com/influxdata/telegraf/plugins/parsers/temporary/xpath"
|
||||||
)
|
)
|
||||||
|
|
||||||
type dataNode interface{}
|
type dataNode interface{}
|
||||||
|
|
@ -32,7 +33,7 @@ type Parser struct {
|
||||||
ProtobufImportPaths []string `toml:"xpath_protobuf_import_paths"`
|
ProtobufImportPaths []string `toml:"xpath_protobuf_import_paths"`
|
||||||
PrintDocument bool `toml:"xpath_print_document"`
|
PrintDocument bool `toml:"xpath_print_document"`
|
||||||
AllowEmptySelection bool `toml:"xpath_allow_empty_selection"`
|
AllowEmptySelection bool `toml:"xpath_allow_empty_selection"`
|
||||||
Configs []Config `toml:"xpath"`
|
Configs []xpath.Config `toml:"xpath"`
|
||||||
DefaultMetricName string `toml:"-"`
|
DefaultMetricName string `toml:"-"`
|
||||||
DefaultTags map[string]string `toml:"-"`
|
DefaultTags map[string]string `toml:"-"`
|
||||||
Log telegraf.Logger `toml:"-"`
|
Log telegraf.Logger `toml:"-"`
|
||||||
|
|
@ -40,12 +41,6 @@ type Parser struct {
|
||||||
document dataDocument
|
document dataDocument
|
||||||
}
|
}
|
||||||
|
|
||||||
// Config definition
|
|
||||||
// This should be replaced by the actual definition once
|
|
||||||
// the compatibitlity-code is removed.
|
|
||||||
// Please check plugins/parsers/registry.go for now.
|
|
||||||
type Config parsers.XPathConfig
|
|
||||||
|
|
||||||
func (p *Parser) Init() error {
|
func (p *Parser) Init() error {
|
||||||
switch p.Format {
|
switch p.Format {
|
||||||
case "", "xml":
|
case "", "xml":
|
||||||
|
|
@ -139,7 +134,7 @@ func (p *Parser) SetDefaultTags(tags map[string]string) {
|
||||||
p.DefaultTags = tags
|
p.DefaultTags = tags
|
||||||
}
|
}
|
||||||
|
|
||||||
func (p *Parser) parseQuery(starttime time.Time, doc, selected dataNode, config Config) (telegraf.Metric, error) {
|
func (p *Parser) parseQuery(starttime time.Time, doc, selected dataNode, config xpath.Config) (telegraf.Metric, error) {
|
||||||
var timestamp time.Time
|
var timestamp time.Time
|
||||||
var metricname string
|
var metricname string
|
||||||
|
|
||||||
|
|
@ -552,11 +547,8 @@ func (p *Parser) InitFromConfig(config *parsers.Config) error {
|
||||||
|
|
||||||
// Convert the config formats which is a one-to-one copy
|
// Convert the config formats which is a one-to-one copy
|
||||||
if len(config.XPathConfig) > 0 {
|
if len(config.XPathConfig) > 0 {
|
||||||
p.Configs = make([]Config, 0, len(config.XPathConfig))
|
p.Configs = make([]xpath.Config, 0, len(config.XPathConfig))
|
||||||
for _, cfg := range config.XPathConfig {
|
p.Configs = append(p.Configs, config.XPathConfig...)
|
||||||
config := Config(cfg)
|
|
||||||
p.Configs = append(p.Configs, config)
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
return p.Init()
|
return p.Init()
|
||||||
|
|
|
||||||
|
|
@ -10,6 +10,7 @@ import (
|
||||||
|
|
||||||
"github.com/influxdata/telegraf"
|
"github.com/influxdata/telegraf"
|
||||||
"github.com/influxdata/telegraf/plugins/parsers/influx"
|
"github.com/influxdata/telegraf/plugins/parsers/influx"
|
||||||
|
"github.com/influxdata/telegraf/plugins/parsers/temporary/xpath"
|
||||||
"github.com/influxdata/telegraf/testutil"
|
"github.com/influxdata/telegraf/testutil"
|
||||||
"github.com/influxdata/toml"
|
"github.com/influxdata/toml"
|
||||||
|
|
||||||
|
|
@ -106,14 +107,14 @@ func TestParseInvalidXML(t *testing.T) {
|
||||||
var tests = []struct {
|
var tests = []struct {
|
||||||
name string
|
name string
|
||||||
input string
|
input string
|
||||||
configs []Config
|
configs []xpath.Config
|
||||||
defaultTags map[string]string
|
defaultTags map[string]string
|
||||||
expectedError string
|
expectedError string
|
||||||
}{
|
}{
|
||||||
{
|
{
|
||||||
name: "invalid XML (missing close tag)",
|
name: "invalid XML (missing close tag)",
|
||||||
input: invalidXML,
|
input: invalidXML,
|
||||||
configs: []Config{
|
configs: []xpath.Config{
|
||||||
{
|
{
|
||||||
MetricQuery: "test",
|
MetricQuery: "test",
|
||||||
Timestamp: "/Device_1/Timestamp_unix",
|
Timestamp: "/Device_1/Timestamp_unix",
|
||||||
|
|
@ -145,14 +146,14 @@ func TestInvalidTypeQueriesFail(t *testing.T) {
|
||||||
var tests = []struct {
|
var tests = []struct {
|
||||||
name string
|
name string
|
||||||
input string
|
input string
|
||||||
configs []Config
|
configs []xpath.Config
|
||||||
defaultTags map[string]string
|
defaultTags map[string]string
|
||||||
expectedError string
|
expectedError string
|
||||||
}{
|
}{
|
||||||
{
|
{
|
||||||
name: "invalid field (int) type",
|
name: "invalid field (int) type",
|
||||||
input: singleMetricValuesXML,
|
input: singleMetricValuesXML,
|
||||||
configs: []Config{
|
configs: []xpath.Config{
|
||||||
{
|
{
|
||||||
Timestamp: "/Device_1/Timestamp_unix",
|
Timestamp: "/Device_1/Timestamp_unix",
|
||||||
FieldsInt: map[string]string{
|
FieldsInt: map[string]string{
|
||||||
|
|
@ -186,14 +187,14 @@ func TestInvalidTypeQueries(t *testing.T) {
|
||||||
var tests = []struct {
|
var tests = []struct {
|
||||||
name string
|
name string
|
||||||
input string
|
input string
|
||||||
configs []Config
|
configs []xpath.Config
|
||||||
defaultTags map[string]string
|
defaultTags map[string]string
|
||||||
expected telegraf.Metric
|
expected telegraf.Metric
|
||||||
}{
|
}{
|
||||||
{
|
{
|
||||||
name: "invalid field type (number)",
|
name: "invalid field type (number)",
|
||||||
input: singleMetricValuesXML,
|
input: singleMetricValuesXML,
|
||||||
configs: []Config{
|
configs: []xpath.Config{
|
||||||
{
|
{
|
||||||
Timestamp: "/Device_1/Timestamp_unix",
|
Timestamp: "/Device_1/Timestamp_unix",
|
||||||
Fields: map[string]string{
|
Fields: map[string]string{
|
||||||
|
|
@ -214,7 +215,7 @@ func TestInvalidTypeQueries(t *testing.T) {
|
||||||
{
|
{
|
||||||
name: "invalid field type (boolean)",
|
name: "invalid field type (boolean)",
|
||||||
input: singleMetricValuesXML,
|
input: singleMetricValuesXML,
|
||||||
configs: []Config{
|
configs: []xpath.Config{
|
||||||
{
|
{
|
||||||
Timestamp: "/Device_1/Timestamp_unix",
|
Timestamp: "/Device_1/Timestamp_unix",
|
||||||
Fields: map[string]string{
|
Fields: map[string]string{
|
||||||
|
|
@ -256,14 +257,14 @@ func TestParseTimestamps(t *testing.T) {
|
||||||
var tests = []struct {
|
var tests = []struct {
|
||||||
name string
|
name string
|
||||||
input string
|
input string
|
||||||
configs []Config
|
configs []xpath.Config
|
||||||
defaultTags map[string]string
|
defaultTags map[string]string
|
||||||
expected telegraf.Metric
|
expected telegraf.Metric
|
||||||
}{
|
}{
|
||||||
{
|
{
|
||||||
name: "parse timestamp (no fmt)",
|
name: "parse timestamp (no fmt)",
|
||||||
input: singleMetricValuesXML,
|
input: singleMetricValuesXML,
|
||||||
configs: []Config{
|
configs: []xpath.Config{
|
||||||
{
|
{
|
||||||
Timestamp: "/Device_1/Timestamp_unix",
|
Timestamp: "/Device_1/Timestamp_unix",
|
||||||
},
|
},
|
||||||
|
|
@ -279,7 +280,7 @@ func TestParseTimestamps(t *testing.T) {
|
||||||
{
|
{
|
||||||
name: "parse timestamp (unix)",
|
name: "parse timestamp (unix)",
|
||||||
input: singleMetricValuesXML,
|
input: singleMetricValuesXML,
|
||||||
configs: []Config{
|
configs: []xpath.Config{
|
||||||
{
|
{
|
||||||
Timestamp: "/Device_1/Timestamp_unix",
|
Timestamp: "/Device_1/Timestamp_unix",
|
||||||
TimestampFmt: "unix",
|
TimestampFmt: "unix",
|
||||||
|
|
@ -296,7 +297,7 @@ func TestParseTimestamps(t *testing.T) {
|
||||||
{
|
{
|
||||||
name: "parse timestamp (unix_ms)",
|
name: "parse timestamp (unix_ms)",
|
||||||
input: singleMetricValuesXML,
|
input: singleMetricValuesXML,
|
||||||
configs: []Config{
|
configs: []xpath.Config{
|
||||||
{
|
{
|
||||||
Timestamp: "/Device_1/Timestamp_unix_ms",
|
Timestamp: "/Device_1/Timestamp_unix_ms",
|
||||||
TimestampFmt: "unix_ms",
|
TimestampFmt: "unix_ms",
|
||||||
|
|
@ -313,7 +314,7 @@ func TestParseTimestamps(t *testing.T) {
|
||||||
{
|
{
|
||||||
name: "parse timestamp (unix_us)",
|
name: "parse timestamp (unix_us)",
|
||||||
input: singleMetricValuesXML,
|
input: singleMetricValuesXML,
|
||||||
configs: []Config{
|
configs: []xpath.Config{
|
||||||
{
|
{
|
||||||
Timestamp: "/Device_1/Timestamp_unix_us",
|
Timestamp: "/Device_1/Timestamp_unix_us",
|
||||||
TimestampFmt: "unix_us",
|
TimestampFmt: "unix_us",
|
||||||
|
|
@ -330,7 +331,7 @@ func TestParseTimestamps(t *testing.T) {
|
||||||
{
|
{
|
||||||
name: "parse timestamp (unix_us)",
|
name: "parse timestamp (unix_us)",
|
||||||
input: singleMetricValuesXML,
|
input: singleMetricValuesXML,
|
||||||
configs: []Config{
|
configs: []xpath.Config{
|
||||||
{
|
{
|
||||||
Timestamp: "/Device_1/Timestamp_unix_ns",
|
Timestamp: "/Device_1/Timestamp_unix_ns",
|
||||||
TimestampFmt: "unix_ns",
|
TimestampFmt: "unix_ns",
|
||||||
|
|
@ -347,7 +348,7 @@ func TestParseTimestamps(t *testing.T) {
|
||||||
{
|
{
|
||||||
name: "parse timestamp (RFC3339)",
|
name: "parse timestamp (RFC3339)",
|
||||||
input: singleMetricValuesXML,
|
input: singleMetricValuesXML,
|
||||||
configs: []Config{
|
configs: []xpath.Config{
|
||||||
{
|
{
|
||||||
Timestamp: "/Device_1/Timestamp_iso",
|
Timestamp: "/Device_1/Timestamp_iso",
|
||||||
TimestampFmt: "2006-01-02T15:04:05Z",
|
TimestampFmt: "2006-01-02T15:04:05Z",
|
||||||
|
|
@ -385,14 +386,14 @@ func TestParseSingleValues(t *testing.T) {
|
||||||
var tests = []struct {
|
var tests = []struct {
|
||||||
name string
|
name string
|
||||||
input string
|
input string
|
||||||
configs []Config
|
configs []xpath.Config
|
||||||
defaultTags map[string]string
|
defaultTags map[string]string
|
||||||
expected telegraf.Metric
|
expected telegraf.Metric
|
||||||
}{
|
}{
|
||||||
{
|
{
|
||||||
name: "parse scalar values as string fields",
|
name: "parse scalar values as string fields",
|
||||||
input: singleMetricValuesXML,
|
input: singleMetricValuesXML,
|
||||||
configs: []Config{
|
configs: []xpath.Config{
|
||||||
{
|
{
|
||||||
Timestamp: "/Device_1/Timestamp_unix",
|
Timestamp: "/Device_1/Timestamp_unix",
|
||||||
Fields: map[string]string{
|
Fields: map[string]string{
|
||||||
|
|
@ -419,7 +420,7 @@ func TestParseSingleValues(t *testing.T) {
|
||||||
{
|
{
|
||||||
name: "parse scalar values as typed fields (w/o int)",
|
name: "parse scalar values as typed fields (w/o int)",
|
||||||
input: singleMetricValuesXML,
|
input: singleMetricValuesXML,
|
||||||
configs: []Config{
|
configs: []xpath.Config{
|
||||||
{
|
{
|
||||||
Timestamp: "/Device_1/Timestamp_unix",
|
Timestamp: "/Device_1/Timestamp_unix",
|
||||||
Fields: map[string]string{
|
Fields: map[string]string{
|
||||||
|
|
@ -446,7 +447,7 @@ func TestParseSingleValues(t *testing.T) {
|
||||||
{
|
{
|
||||||
name: "parse values as typed fields (w/ int)",
|
name: "parse values as typed fields (w/ int)",
|
||||||
input: singleMetricValuesXML,
|
input: singleMetricValuesXML,
|
||||||
configs: []Config{
|
configs: []xpath.Config{
|
||||||
{
|
{
|
||||||
Timestamp: "/Device_1/Timestamp_unix",
|
Timestamp: "/Device_1/Timestamp_unix",
|
||||||
Fields: map[string]string{
|
Fields: map[string]string{
|
||||||
|
|
@ -475,7 +476,7 @@ func TestParseSingleValues(t *testing.T) {
|
||||||
{
|
{
|
||||||
name: "parse substring values",
|
name: "parse substring values",
|
||||||
input: singleMetricValuesXML,
|
input: singleMetricValuesXML,
|
||||||
configs: []Config{
|
configs: []xpath.Config{
|
||||||
{
|
{
|
||||||
Timestamp: "/Device_1/Timestamp_unix",
|
Timestamp: "/Device_1/Timestamp_unix",
|
||||||
Fields: map[string]string{
|
Fields: map[string]string{
|
||||||
|
|
@ -498,7 +499,7 @@ func TestParseSingleValues(t *testing.T) {
|
||||||
{
|
{
|
||||||
name: "parse substring values (typed)",
|
name: "parse substring values (typed)",
|
||||||
input: singleMetricValuesXML,
|
input: singleMetricValuesXML,
|
||||||
configs: []Config{
|
configs: []xpath.Config{
|
||||||
{
|
{
|
||||||
Timestamp: "/Device_1/Timestamp_unix",
|
Timestamp: "/Device_1/Timestamp_unix",
|
||||||
Fields: map[string]string{
|
Fields: map[string]string{
|
||||||
|
|
@ -521,7 +522,7 @@ func TestParseSingleValues(t *testing.T) {
|
||||||
{
|
{
|
||||||
name: "parse substring values (typed int)",
|
name: "parse substring values (typed int)",
|
||||||
input: singleMetricValuesXML,
|
input: singleMetricValuesXML,
|
||||||
configs: []Config{
|
configs: []xpath.Config{
|
||||||
{
|
{
|
||||||
Timestamp: "/Device_1/Timestamp_unix",
|
Timestamp: "/Device_1/Timestamp_unix",
|
||||||
FieldsInt: map[string]string{
|
FieldsInt: map[string]string{
|
||||||
|
|
@ -544,7 +545,7 @@ func TestParseSingleValues(t *testing.T) {
|
||||||
{
|
{
|
||||||
name: "parse tags",
|
name: "parse tags",
|
||||||
input: singleMetricValuesXML,
|
input: singleMetricValuesXML,
|
||||||
configs: []Config{
|
configs: []xpath.Config{
|
||||||
{
|
{
|
||||||
Timestamp: "/Device_1/Timestamp_unix",
|
Timestamp: "/Device_1/Timestamp_unix",
|
||||||
Tags: map[string]string{
|
Tags: map[string]string{
|
||||||
|
|
@ -588,14 +589,14 @@ func TestParseSingleAttributes(t *testing.T) {
|
||||||
var tests = []struct {
|
var tests = []struct {
|
||||||
name string
|
name string
|
||||||
input string
|
input string
|
||||||
configs []Config
|
configs []xpath.Config
|
||||||
defaultTags map[string]string
|
defaultTags map[string]string
|
||||||
expected telegraf.Metric
|
expected telegraf.Metric
|
||||||
}{
|
}{
|
||||||
{
|
{
|
||||||
name: "parse attr timestamp (unix)",
|
name: "parse attr timestamp (unix)",
|
||||||
input: singleMetricAttributesXML,
|
input: singleMetricAttributesXML,
|
||||||
configs: []Config{
|
configs: []xpath.Config{
|
||||||
{
|
{
|
||||||
Timestamp: "/Device_1/Timestamp_unix/@value",
|
Timestamp: "/Device_1/Timestamp_unix/@value",
|
||||||
},
|
},
|
||||||
|
|
@ -611,7 +612,7 @@ func TestParseSingleAttributes(t *testing.T) {
|
||||||
{
|
{
|
||||||
name: "parse attr timestamp (RFC3339)",
|
name: "parse attr timestamp (RFC3339)",
|
||||||
input: singleMetricAttributesXML,
|
input: singleMetricAttributesXML,
|
||||||
configs: []Config{
|
configs: []xpath.Config{
|
||||||
{
|
{
|
||||||
Timestamp: "/Device_1/Timestamp_iso/@value",
|
Timestamp: "/Device_1/Timestamp_iso/@value",
|
||||||
TimestampFmt: "2006-01-02T15:04:05Z",
|
TimestampFmt: "2006-01-02T15:04:05Z",
|
||||||
|
|
@ -628,7 +629,7 @@ func TestParseSingleAttributes(t *testing.T) {
|
||||||
{
|
{
|
||||||
name: "parse attr as string fields",
|
name: "parse attr as string fields",
|
||||||
input: singleMetricAttributesXML,
|
input: singleMetricAttributesXML,
|
||||||
configs: []Config{
|
configs: []xpath.Config{
|
||||||
{
|
{
|
||||||
Timestamp: "/Device_1/Timestamp_unix/@value",
|
Timestamp: "/Device_1/Timestamp_unix/@value",
|
||||||
Fields: map[string]string{
|
Fields: map[string]string{
|
||||||
|
|
@ -655,7 +656,7 @@ func TestParseSingleAttributes(t *testing.T) {
|
||||||
{
|
{
|
||||||
name: "parse attr as typed fields (w/o int)",
|
name: "parse attr as typed fields (w/o int)",
|
||||||
input: singleMetricAttributesXML,
|
input: singleMetricAttributesXML,
|
||||||
configs: []Config{
|
configs: []xpath.Config{
|
||||||
{
|
{
|
||||||
Timestamp: "/Device_1/Timestamp_unix/@value",
|
Timestamp: "/Device_1/Timestamp_unix/@value",
|
||||||
Fields: map[string]string{
|
Fields: map[string]string{
|
||||||
|
|
@ -682,7 +683,7 @@ func TestParseSingleAttributes(t *testing.T) {
|
||||||
{
|
{
|
||||||
name: "parse attr as typed fields (w/ int)",
|
name: "parse attr as typed fields (w/ int)",
|
||||||
input: singleMetricAttributesXML,
|
input: singleMetricAttributesXML,
|
||||||
configs: []Config{
|
configs: []xpath.Config{
|
||||||
{
|
{
|
||||||
Timestamp: "/Device_1/Timestamp_unix/@value",
|
Timestamp: "/Device_1/Timestamp_unix/@value",
|
||||||
Fields: map[string]string{
|
Fields: map[string]string{
|
||||||
|
|
@ -711,7 +712,7 @@ func TestParseSingleAttributes(t *testing.T) {
|
||||||
{
|
{
|
||||||
name: "parse attr substring",
|
name: "parse attr substring",
|
||||||
input: singleMetricAttributesXML,
|
input: singleMetricAttributesXML,
|
||||||
configs: []Config{
|
configs: []xpath.Config{
|
||||||
{
|
{
|
||||||
Timestamp: "/Device_1/Timestamp_unix/@value",
|
Timestamp: "/Device_1/Timestamp_unix/@value",
|
||||||
Fields: map[string]string{
|
Fields: map[string]string{
|
||||||
|
|
@ -732,7 +733,7 @@ func TestParseSingleAttributes(t *testing.T) {
|
||||||
{
|
{
|
||||||
name: "parse attr tags",
|
name: "parse attr tags",
|
||||||
input: singleMetricAttributesXML,
|
input: singleMetricAttributesXML,
|
||||||
configs: []Config{
|
configs: []xpath.Config{
|
||||||
{
|
{
|
||||||
Timestamp: "/Device_1/Timestamp_unix/@value",
|
Timestamp: "/Device_1/Timestamp_unix/@value",
|
||||||
Tags: map[string]string{
|
Tags: map[string]string{
|
||||||
|
|
@ -755,7 +756,7 @@ func TestParseSingleAttributes(t *testing.T) {
|
||||||
{
|
{
|
||||||
name: "parse attr bool",
|
name: "parse attr bool",
|
||||||
input: singleMetricAttributesXML,
|
input: singleMetricAttributesXML,
|
||||||
configs: []Config{
|
configs: []xpath.Config{
|
||||||
{
|
{
|
||||||
Timestamp: "/Device_1/Timestamp_unix/@value",
|
Timestamp: "/Device_1/Timestamp_unix/@value",
|
||||||
Fields: map[string]string{
|
Fields: map[string]string{
|
||||||
|
|
@ -797,14 +798,14 @@ func TestParseMultiValues(t *testing.T) {
|
||||||
var tests = []struct {
|
var tests = []struct {
|
||||||
name string
|
name string
|
||||||
input string
|
input string
|
||||||
configs []Config
|
configs []xpath.Config
|
||||||
defaultTags map[string]string
|
defaultTags map[string]string
|
||||||
expected telegraf.Metric
|
expected telegraf.Metric
|
||||||
}{
|
}{
|
||||||
{
|
{
|
||||||
name: "select values (float)",
|
name: "select values (float)",
|
||||||
input: singleMetricMultiValuesXML,
|
input: singleMetricMultiValuesXML,
|
||||||
configs: []Config{
|
configs: []xpath.Config{
|
||||||
{
|
{
|
||||||
Timestamp: "/Timestamp/@value",
|
Timestamp: "/Timestamp/@value",
|
||||||
Fields: map[string]string{
|
Fields: map[string]string{
|
||||||
|
|
@ -835,7 +836,7 @@ func TestParseMultiValues(t *testing.T) {
|
||||||
{
|
{
|
||||||
name: "select values (int)",
|
name: "select values (int)",
|
||||||
input: singleMetricMultiValuesXML,
|
input: singleMetricMultiValuesXML,
|
||||||
configs: []Config{
|
configs: []xpath.Config{
|
||||||
{
|
{
|
||||||
Timestamp: "/Timestamp/@value",
|
Timestamp: "/Timestamp/@value",
|
||||||
FieldsInt: map[string]string{
|
FieldsInt: map[string]string{
|
||||||
|
|
@ -887,14 +888,14 @@ func TestParseMultiNodes(t *testing.T) {
|
||||||
var tests = []struct {
|
var tests = []struct {
|
||||||
name string
|
name string
|
||||||
input string
|
input string
|
||||||
configs []Config
|
configs []xpath.Config
|
||||||
defaultTags map[string]string
|
defaultTags map[string]string
|
||||||
expected []telegraf.Metric
|
expected []telegraf.Metric
|
||||||
}{
|
}{
|
||||||
{
|
{
|
||||||
name: "select all devices",
|
name: "select all devices",
|
||||||
input: multipleNodesXML,
|
input: multipleNodesXML,
|
||||||
configs: []Config{
|
configs: []xpath.Config{
|
||||||
{
|
{
|
||||||
Selection: "/Device",
|
Selection: "/Device",
|
||||||
Timestamp: "/Timestamp/@value",
|
Timestamp: "/Timestamp/@value",
|
||||||
|
|
@ -1004,14 +1005,14 @@ func TestParseMetricQuery(t *testing.T) {
|
||||||
var tests = []struct {
|
var tests = []struct {
|
||||||
name string
|
name string
|
||||||
input string
|
input string
|
||||||
configs []Config
|
configs []xpath.Config
|
||||||
defaultTags map[string]string
|
defaultTags map[string]string
|
||||||
expected telegraf.Metric
|
expected telegraf.Metric
|
||||||
}{
|
}{
|
||||||
{
|
{
|
||||||
name: "parse metric name query",
|
name: "parse metric name query",
|
||||||
input: metricNameQueryXML,
|
input: metricNameQueryXML,
|
||||||
configs: []Config{
|
configs: []xpath.Config{
|
||||||
{
|
{
|
||||||
MetricQuery: "name(/Device_1/Metric/@*[1])",
|
MetricQuery: "name(/Device_1/Metric/@*[1])",
|
||||||
Timestamp: "/Device_1/Timestamp_unix",
|
Timestamp: "/Device_1/Timestamp_unix",
|
||||||
|
|
@ -1033,7 +1034,7 @@ func TestParseMetricQuery(t *testing.T) {
|
||||||
{
|
{
|
||||||
name: "parse metric name constant",
|
name: "parse metric name constant",
|
||||||
input: metricNameQueryXML,
|
input: metricNameQueryXML,
|
||||||
configs: []Config{
|
configs: []xpath.Config{
|
||||||
{
|
{
|
||||||
MetricQuery: "'the_metric'",
|
MetricQuery: "'the_metric'",
|
||||||
Timestamp: "/Device_1/Timestamp_unix",
|
Timestamp: "/Device_1/Timestamp_unix",
|
||||||
|
|
@ -1076,13 +1077,13 @@ func TestParseErrors(t *testing.T) {
|
||||||
var tests = []struct {
|
var tests = []struct {
|
||||||
name string
|
name string
|
||||||
input string
|
input string
|
||||||
configs []Config
|
configs []xpath.Config
|
||||||
expected string
|
expected string
|
||||||
}{
|
}{
|
||||||
{
|
{
|
||||||
name: "string metric name query",
|
name: "string metric name query",
|
||||||
input: metricNameQueryXML,
|
input: metricNameQueryXML,
|
||||||
configs: []Config{
|
configs: []xpath.Config{
|
||||||
{
|
{
|
||||||
MetricQuery: "arbitrary",
|
MetricQuery: "arbitrary",
|
||||||
Timestamp: "/Device_1/Timestamp_unix",
|
Timestamp: "/Device_1/Timestamp_unix",
|
||||||
|
|
@ -1116,12 +1117,12 @@ func TestEmptySelection(t *testing.T) {
|
||||||
var tests = []struct {
|
var tests = []struct {
|
||||||
name string
|
name string
|
||||||
input string
|
input string
|
||||||
configs []Config
|
configs []xpath.Config
|
||||||
}{
|
}{
|
||||||
{
|
{
|
||||||
name: "empty path",
|
name: "empty path",
|
||||||
input: multipleNodesXML,
|
input: multipleNodesXML,
|
||||||
configs: []Config{
|
configs: []xpath.Config{
|
||||||
{
|
{
|
||||||
Selection: "/Device/NonExisting",
|
Selection: "/Device/NonExisting",
|
||||||
Fields: map[string]string{"value": "number(Value)"},
|
Fields: map[string]string{"value": "number(Value)"},
|
||||||
|
|
@ -1133,7 +1134,7 @@ func TestEmptySelection(t *testing.T) {
|
||||||
{
|
{
|
||||||
name: "empty pattern",
|
name: "empty pattern",
|
||||||
input: multipleNodesXML,
|
input: multipleNodesXML,
|
||||||
configs: []Config{
|
configs: []xpath.Config{
|
||||||
{
|
{
|
||||||
Selection: "//NonExisting",
|
Selection: "//NonExisting",
|
||||||
Fields: map[string]string{"value": "number(Value)"},
|
Fields: map[string]string{"value": "number(Value)"},
|
||||||
|
|
@ -1145,7 +1146,7 @@ func TestEmptySelection(t *testing.T) {
|
||||||
{
|
{
|
||||||
name: "empty axis",
|
name: "empty axis",
|
||||||
input: multipleNodesXML,
|
input: multipleNodesXML,
|
||||||
configs: []Config{
|
configs: []xpath.Config{
|
||||||
{
|
{
|
||||||
Selection: "/Device/child::NonExisting",
|
Selection: "/Device/child::NonExisting",
|
||||||
Fields: map[string]string{"value": "number(Value)"},
|
Fields: map[string]string{"value": "number(Value)"},
|
||||||
|
|
@ -1157,7 +1158,7 @@ func TestEmptySelection(t *testing.T) {
|
||||||
{
|
{
|
||||||
name: "empty predicate",
|
name: "empty predicate",
|
||||||
input: multipleNodesXML,
|
input: multipleNodesXML,
|
||||||
configs: []Config{
|
configs: []xpath.Config{
|
||||||
{
|
{
|
||||||
Selection: "/Device[@NonExisting=true]",
|
Selection: "/Device[@NonExisting=true]",
|
||||||
Fields: map[string]string{"value": "number(Value)"},
|
Fields: map[string]string{"value": "number(Value)"},
|
||||||
|
|
@ -1189,12 +1190,12 @@ func TestEmptySelectionAllowed(t *testing.T) {
|
||||||
var tests = []struct {
|
var tests = []struct {
|
||||||
name string
|
name string
|
||||||
input string
|
input string
|
||||||
configs []Config
|
configs []xpath.Config
|
||||||
}{
|
}{
|
||||||
{
|
{
|
||||||
name: "empty path",
|
name: "empty path",
|
||||||
input: multipleNodesXML,
|
input: multipleNodesXML,
|
||||||
configs: []Config{
|
configs: []xpath.Config{
|
||||||
{
|
{
|
||||||
Selection: "/Device/NonExisting",
|
Selection: "/Device/NonExisting",
|
||||||
Fields: map[string]string{"value": "number(Value)"},
|
Fields: map[string]string{"value": "number(Value)"},
|
||||||
|
|
@ -1206,7 +1207,7 @@ func TestEmptySelectionAllowed(t *testing.T) {
|
||||||
{
|
{
|
||||||
name: "empty pattern",
|
name: "empty pattern",
|
||||||
input: multipleNodesXML,
|
input: multipleNodesXML,
|
||||||
configs: []Config{
|
configs: []xpath.Config{
|
||||||
{
|
{
|
||||||
Selection: "//NonExisting",
|
Selection: "//NonExisting",
|
||||||
Fields: map[string]string{"value": "number(Value)"},
|
Fields: map[string]string{"value": "number(Value)"},
|
||||||
|
|
@ -1218,7 +1219,7 @@ func TestEmptySelectionAllowed(t *testing.T) {
|
||||||
{
|
{
|
||||||
name: "empty axis",
|
name: "empty axis",
|
||||||
input: multipleNodesXML,
|
input: multipleNodesXML,
|
||||||
configs: []Config{
|
configs: []xpath.Config{
|
||||||
{
|
{
|
||||||
Selection: "/Device/child::NonExisting",
|
Selection: "/Device/child::NonExisting",
|
||||||
Fields: map[string]string{"value": "number(Value)"},
|
Fields: map[string]string{"value": "number(Value)"},
|
||||||
|
|
@ -1230,7 +1231,7 @@ func TestEmptySelectionAllowed(t *testing.T) {
|
||||||
{
|
{
|
||||||
name: "empty predicate",
|
name: "empty predicate",
|
||||||
input: multipleNodesXML,
|
input: multipleNodesXML,
|
||||||
configs: []Config{
|
configs: []xpath.Config{
|
||||||
{
|
{
|
||||||
Selection: "/Device[@NonExisting=true]",
|
Selection: "/Device[@NonExisting=true]",
|
||||||
Fields: map[string]string{"value": "number(Value)"},
|
Fields: map[string]string{"value": "number(Value)"},
|
||||||
|
|
@ -1354,7 +1355,7 @@ func TestTestCases(t *testing.T) {
|
||||||
Format: fileformat,
|
Format: fileformat,
|
||||||
ProtobufMessageDef: pbmsgdef,
|
ProtobufMessageDef: pbmsgdef,
|
||||||
ProtobufMessageType: pbmsgtype,
|
ProtobufMessageType: pbmsgtype,
|
||||||
Configs: []Config{*cfg},
|
Configs: []xpath.Config{*cfg},
|
||||||
Log: testutil.Logger{Name: "parsers.xml"},
|
Log: testutil.Logger{Name: "parsers.xml"},
|
||||||
}
|
}
|
||||||
require.NoError(t, parser.Init())
|
require.NoError(t, parser.Init())
|
||||||
|
|
@ -1380,13 +1381,13 @@ func TestProtobufImporting(t *testing.T) {
|
||||||
ProtobufMessageDef: "person.proto",
|
ProtobufMessageDef: "person.proto",
|
||||||
ProtobufMessageType: "importtest.Person",
|
ProtobufMessageType: "importtest.Person",
|
||||||
ProtobufImportPaths: []string{"testcases/protos"},
|
ProtobufImportPaths: []string{"testcases/protos"},
|
||||||
Configs: []Config{},
|
Configs: []xpath.Config{},
|
||||||
Log: testutil.Logger{Name: "parsers.protobuf"},
|
Log: testutil.Logger{Name: "parsers.protobuf"},
|
||||||
}
|
}
|
||||||
require.NoError(t, parser.Init())
|
require.NoError(t, parser.Init())
|
||||||
}
|
}
|
||||||
|
|
||||||
func loadTestConfiguration(filename string) (*Config, []string, error) {
|
func loadTestConfiguration(filename string) (*xpath.Config, []string, error) {
|
||||||
buf, err := os.ReadFile(filename)
|
buf, err := os.ReadFile(filename)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, nil, err
|
return nil, nil, err
|
||||||
|
|
@ -1399,7 +1400,7 @@ func loadTestConfiguration(filename string) (*Config, []string, error) {
|
||||||
header = append(header, line)
|
header = append(header, line)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
cfg := Config{}
|
cfg := xpath.Config{}
|
||||||
err = toml.Unmarshal(buf, &cfg)
|
err = toml.Unmarshal(buf, &cfg)
|
||||||
return &cfg, header, err
|
return &cfg, header, err
|
||||||
}
|
}
|
||||||
|
|
|
||||||
Loading…
Reference in New Issue