2021-07-02 04:48:16 +08:00
|
|
|
package xpath
|
2021-03-04 04:26:09 +08:00
|
|
|
|
|
|
|
|
import (
|
2022-06-09 03:39:02 +08:00
|
|
|
"errors"
|
2021-03-04 04:26:09 +08:00
|
|
|
"fmt"
|
|
|
|
|
"strconv"
|
|
|
|
|
"strings"
|
|
|
|
|
"time"
|
|
|
|
|
|
2022-07-28 20:18:55 +08:00
|
|
|
"github.com/antchfx/jsonquery"
|
2021-07-02 04:48:16 +08:00
|
|
|
path "github.com/antchfx/xpath"
|
2022-07-28 20:18:55 +08:00
|
|
|
"github.com/doclambda/protobufquery"
|
2021-03-04 04:26:09 +08:00
|
|
|
|
|
|
|
|
"github.com/influxdata/telegraf"
|
2022-02-25 06:09:33 +08:00
|
|
|
"github.com/influxdata/telegraf/internal"
|
2021-03-04 04:26:09 +08:00
|
|
|
"github.com/influxdata/telegraf/metric"
|
2022-06-30 05:30:58 +08:00
|
|
|
"github.com/influxdata/telegraf/models"
|
2022-06-09 03:39:02 +08:00
|
|
|
"github.com/influxdata/telegraf/plugins/parsers"
|
2022-06-23 03:50:43 +08:00
|
|
|
"github.com/influxdata/telegraf/plugins/parsers/temporary/xpath"
|
2021-03-04 04:26:09 +08:00
|
|
|
)
|
|
|
|
|
|
2021-07-02 04:48:16 +08:00
|
|
|
type dataNode interface{}
|
|
|
|
|
|
|
|
|
|
type dataDocument interface {
|
|
|
|
|
Parse(buf []byte) (dataNode, error)
|
|
|
|
|
QueryAll(node dataNode, expr string) ([]dataNode, error)
|
|
|
|
|
CreateXPathNavigator(node dataNode) path.NodeNavigator
|
|
|
|
|
GetNodePath(node, relativeTo dataNode, sep string) string
|
|
|
|
|
OutputXML(node dataNode) string
|
|
|
|
|
}
|
|
|
|
|
|
2021-03-04 04:26:09 +08:00
|
|
|
type Parser struct {
|
2022-06-09 03:39:02 +08:00
|
|
|
Format string `toml:"-"`
|
|
|
|
|
ProtobufMessageDef string `toml:"xpath_protobuf_file"`
|
|
|
|
|
ProtobufMessageType string `toml:"xpath_protobuf_type"`
|
|
|
|
|
ProtobufImportPaths []string `toml:"xpath_protobuf_import_paths"`
|
|
|
|
|
PrintDocument bool `toml:"xpath_print_document"`
|
|
|
|
|
AllowEmptySelection bool `toml:"xpath_allow_empty_selection"`
|
2022-07-28 20:18:55 +08:00
|
|
|
NativeTypes bool `toml:"xpath_native_types"`
|
2022-06-23 03:50:43 +08:00
|
|
|
Configs []xpath.Config `toml:"xpath"`
|
2022-06-09 03:39:02 +08:00
|
|
|
DefaultMetricName string `toml:"-"`
|
|
|
|
|
DefaultTags map[string]string `toml:"-"`
|
|
|
|
|
Log telegraf.Logger `toml:"-"`
|
2021-07-02 04:48:16 +08:00
|
|
|
|
2022-06-30 05:30:58 +08:00
|
|
|
// Required for backward compatibility
|
|
|
|
|
ConfigsXML []xpath.Config `toml:"xml" deprecated:"1.23.1;use 'xpath' instead"`
|
|
|
|
|
ConfigsJSON []xpath.Config `toml:"xpath_json"`
|
|
|
|
|
ConfigsMsgPack []xpath.Config `toml:"xpath_msgpack"`
|
|
|
|
|
ConfigsProto []xpath.Config `toml:"xpath_protobuf"`
|
|
|
|
|
|
2021-07-02 04:48:16 +08:00
|
|
|
document dataDocument
|
2021-03-04 04:26:09 +08:00
|
|
|
}
|
|
|
|
|
|
2021-07-02 04:48:16 +08:00
|
|
|
func (p *Parser) Init() error {
|
|
|
|
|
switch p.Format {
|
|
|
|
|
case "", "xml":
|
|
|
|
|
p.document = &xmlDocument{}
|
2022-06-30 05:30:58 +08:00
|
|
|
|
|
|
|
|
// Required for backward compatibility
|
|
|
|
|
if len(p.ConfigsXML) > 0 {
|
|
|
|
|
p.Configs = append(p.Configs, p.ConfigsXML...)
|
|
|
|
|
models.PrintOptionDeprecationNotice(telegraf.Warn, "parsers.xpath", "xml", telegraf.DeprecationInfo{
|
|
|
|
|
Since: "1.23.1",
|
|
|
|
|
RemovalIn: "2.0.0",
|
|
|
|
|
Notice: "use 'xpath' instead",
|
|
|
|
|
})
|
|
|
|
|
}
|
2021-07-02 04:48:16 +08:00
|
|
|
case "xpath_json":
|
|
|
|
|
p.document = &jsonDocument{}
|
2022-06-30 05:30:58 +08:00
|
|
|
|
|
|
|
|
// Required for backward compatibility
|
|
|
|
|
if len(p.ConfigsJSON) > 0 {
|
|
|
|
|
p.Configs = append(p.Configs, p.ConfigsJSON...)
|
|
|
|
|
models.PrintOptionDeprecationNotice(telegraf.Warn, "parsers.xpath", "xpath_json", telegraf.DeprecationInfo{
|
|
|
|
|
Since: "1.23.1",
|
|
|
|
|
RemovalIn: "2.0.0",
|
|
|
|
|
Notice: "use 'xpath' instead",
|
|
|
|
|
})
|
|
|
|
|
}
|
2021-07-02 04:48:16 +08:00
|
|
|
case "xpath_msgpack":
|
|
|
|
|
p.document = &msgpackDocument{}
|
2022-06-30 05:30:58 +08:00
|
|
|
|
|
|
|
|
// Required for backward compatibility
|
|
|
|
|
if len(p.ConfigsMsgPack) > 0 {
|
|
|
|
|
p.Configs = append(p.Configs, p.ConfigsMsgPack...)
|
|
|
|
|
models.PrintOptionDeprecationNotice(telegraf.Warn, "parsers.xpath", "xpath_msgpack", telegraf.DeprecationInfo{
|
|
|
|
|
Since: "1.23.1",
|
|
|
|
|
RemovalIn: "2.0.0",
|
|
|
|
|
Notice: "use 'xpath' instead",
|
|
|
|
|
})
|
|
|
|
|
}
|
2021-07-02 04:48:16 +08:00
|
|
|
case "xpath_protobuf":
|
|
|
|
|
pbdoc := protobufDocument{
|
|
|
|
|
MessageDefinition: p.ProtobufMessageDef,
|
|
|
|
|
MessageType: p.ProtobufMessageType,
|
2022-03-23 23:28:17 +08:00
|
|
|
ImportPaths: p.ProtobufImportPaths,
|
2021-07-02 04:48:16 +08:00
|
|
|
Log: p.Log,
|
|
|
|
|
}
|
|
|
|
|
if err := pbdoc.Init(); err != nil {
|
|
|
|
|
return err
|
|
|
|
|
}
|
|
|
|
|
p.document = &pbdoc
|
2022-06-30 05:30:58 +08:00
|
|
|
|
|
|
|
|
// Required for backward compatibility
|
|
|
|
|
if len(p.ConfigsProto) > 0 {
|
|
|
|
|
p.Configs = append(p.Configs, p.ConfigsProto...)
|
|
|
|
|
models.PrintOptionDeprecationNotice(telegraf.Warn, "parsers.xpath", "xpath_proto", telegraf.DeprecationInfo{
|
|
|
|
|
Since: "1.23.1",
|
|
|
|
|
RemovalIn: "2.0.0",
|
|
|
|
|
Notice: "use 'xpath' instead",
|
|
|
|
|
})
|
|
|
|
|
}
|
2021-07-02 04:48:16 +08:00
|
|
|
default:
|
|
|
|
|
return fmt.Errorf("unknown data-format %q for xpath parser", p.Format)
|
|
|
|
|
}
|
|
|
|
|
|
2022-06-09 03:39:02 +08:00
|
|
|
// Make sure we do have a metric name
|
|
|
|
|
if p.DefaultMetricName == "" {
|
|
|
|
|
return errors.New("missing default metric name")
|
|
|
|
|
}
|
|
|
|
|
|
2022-10-03 22:32:52 +08:00
|
|
|
// Update the configs with default values
|
|
|
|
|
for i, config := range p.Configs {
|
|
|
|
|
if config.Selection == "" {
|
|
|
|
|
config.Selection = "/"
|
|
|
|
|
}
|
|
|
|
|
if config.TimestampFmt == "" {
|
|
|
|
|
config.TimestampFmt = "unix"
|
|
|
|
|
}
|
|
|
|
|
p.Configs[i] = config
|
|
|
|
|
}
|
|
|
|
|
|
2021-07-02 04:48:16 +08:00
|
|
|
return nil
|
|
|
|
|
}
|
|
|
|
|
|
2021-03-04 04:26:09 +08:00
|
|
|
func (p *Parser) Parse(buf []byte) ([]telegraf.Metric, error) {
|
|
|
|
|
t := time.Now()
|
|
|
|
|
|
|
|
|
|
// Parse the XML
|
2021-07-02 04:48:16 +08:00
|
|
|
doc, err := p.document.Parse(buf)
|
2021-03-04 04:26:09 +08:00
|
|
|
if err != nil {
|
|
|
|
|
return nil, err
|
|
|
|
|
}
|
2021-07-02 04:48:16 +08:00
|
|
|
if p.PrintDocument {
|
|
|
|
|
p.Log.Debugf("XML document equivalent: %q", p.document.OutputXML(doc))
|
|
|
|
|
}
|
2021-03-04 04:26:09 +08:00
|
|
|
|
|
|
|
|
// Queries
|
|
|
|
|
metrics := make([]telegraf.Metric, 0)
|
2022-02-25 06:09:33 +08:00
|
|
|
p.Log.Debugf("Number of configs: %d", len(p.Configs))
|
2021-03-04 04:26:09 +08:00
|
|
|
for _, config := range p.Configs {
|
2021-07-02 04:48:16 +08:00
|
|
|
selectedNodes, err := p.document.QueryAll(doc, config.Selection)
|
2021-03-04 04:26:09 +08:00
|
|
|
if err != nil {
|
|
|
|
|
return nil, err
|
|
|
|
|
}
|
2022-06-16 03:10:02 +08:00
|
|
|
if (len(selectedNodes) < 1 || selectedNodes[0] == nil) && !p.AllowEmptySelection {
|
2021-03-04 04:26:09 +08:00
|
|
|
p.debugEmptyQuery("metric selection", doc, config.Selection)
|
2022-06-16 03:10:02 +08:00
|
|
|
return metrics, fmt.Errorf("cannot parse with empty selection node")
|
2021-03-04 04:26:09 +08:00
|
|
|
}
|
2021-03-17 04:15:18 +08:00
|
|
|
p.Log.Debugf("Number of selected metric nodes: %d", len(selectedNodes))
|
2021-03-04 04:26:09 +08:00
|
|
|
|
|
|
|
|
for _, selected := range selectedNodes {
|
|
|
|
|
m, err := p.parseQuery(t, doc, selected, config)
|
|
|
|
|
if err != nil {
|
|
|
|
|
return metrics, err
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
metrics = append(metrics, m)
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
return metrics, nil
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
func (p *Parser) ParseLine(line string) (telegraf.Metric, error) {
|
2022-05-19 23:00:23 +08:00
|
|
|
metrics, err := p.Parse([]byte(line))
|
|
|
|
|
if err != nil {
|
|
|
|
|
return nil, err
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
switch len(metrics) {
|
2021-03-04 04:26:09 +08:00
|
|
|
case 0:
|
|
|
|
|
return nil, nil
|
|
|
|
|
case 1:
|
2022-05-19 23:00:23 +08:00
|
|
|
return metrics[0], nil
|
|
|
|
|
default:
|
|
|
|
|
return metrics[0], fmt.Errorf("cannot parse line with multiple (%d) metrics", len(metrics))
|
2021-03-04 04:26:09 +08:00
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
func (p *Parser) SetDefaultTags(tags map[string]string) {
|
|
|
|
|
p.DefaultTags = tags
|
|
|
|
|
}
|
|
|
|
|
|
2022-06-23 03:50:43 +08:00
|
|
|
func (p *Parser) parseQuery(starttime time.Time, doc, selected dataNode, config xpath.Config) (telegraf.Metric, error) {
|
2021-03-04 04:26:09 +08:00
|
|
|
var timestamp time.Time
|
|
|
|
|
var metricname string
|
|
|
|
|
|
|
|
|
|
// Determine the metric name. If a query was specified, use the result of this query and the default metric name
|
|
|
|
|
// otherwise.
|
2022-06-09 03:39:02 +08:00
|
|
|
metricname = p.DefaultMetricName
|
2021-03-04 04:26:09 +08:00
|
|
|
if len(config.MetricQuery) > 0 {
|
2021-07-02 04:48:16 +08:00
|
|
|
v, err := p.executeQuery(doc, selected, config.MetricQuery)
|
2021-03-04 04:26:09 +08:00
|
|
|
if err != nil {
|
|
|
|
|
return nil, fmt.Errorf("failed to query metric name: %v", err)
|
|
|
|
|
}
|
2021-09-16 01:58:40 +08:00
|
|
|
var ok bool
|
|
|
|
|
if metricname, ok = v.(string); !ok {
|
|
|
|
|
if v == nil {
|
|
|
|
|
p.Log.Infof("Hint: Empty metric-name-node. If you wanted to set a constant please use `metric_name = \"'name'\"`.")
|
|
|
|
|
}
|
|
|
|
|
return nil, fmt.Errorf("failed to query metric name: query result is of type %T not 'string'", v)
|
|
|
|
|
}
|
2021-03-04 04:26:09 +08:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// By default take the time the parser was invoked and override the value
|
|
|
|
|
// with the queried timestamp if an expresion was specified.
|
|
|
|
|
timestamp = starttime
|
|
|
|
|
if len(config.Timestamp) > 0 {
|
2021-07-02 04:48:16 +08:00
|
|
|
v, err := p.executeQuery(doc, selected, config.Timestamp)
|
2021-03-04 04:26:09 +08:00
|
|
|
if err != nil {
|
|
|
|
|
return nil, fmt.Errorf("failed to query timestamp: %v", err)
|
|
|
|
|
}
|
2022-10-03 22:32:52 +08:00
|
|
|
if v != nil {
|
|
|
|
|
timestamp, err = internal.ParseTimestamp(config.TimestampFmt, v, "")
|
|
|
|
|
if err != nil {
|
|
|
|
|
return nil, fmt.Errorf("failed to parse timestamp: %w", err)
|
2021-03-04 04:26:09 +08:00
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Query tags and add default ones
|
|
|
|
|
tags := make(map[string]string)
|
|
|
|
|
for name, query := range config.Tags {
|
|
|
|
|
// Execute the query and cast the returned values into strings
|
2021-07-02 04:48:16 +08:00
|
|
|
v, err := p.executeQuery(doc, selected, query)
|
2021-03-04 04:26:09 +08:00
|
|
|
if err != nil {
|
|
|
|
|
return nil, fmt.Errorf("failed to query tag '%s': %v", name, err)
|
|
|
|
|
}
|
2021-03-26 01:57:01 +08:00
|
|
|
switch v := v.(type) {
|
2021-03-04 04:26:09 +08:00
|
|
|
case string:
|
2021-03-26 01:57:01 +08:00
|
|
|
tags[name] = v
|
2021-03-04 04:26:09 +08:00
|
|
|
case bool:
|
2021-03-26 01:57:01 +08:00
|
|
|
tags[name] = strconv.FormatBool(v)
|
2021-03-04 04:26:09 +08:00
|
|
|
case float64:
|
2021-03-26 01:57:01 +08:00
|
|
|
tags[name] = strconv.FormatFloat(v, 'G', -1, 64)
|
2021-03-17 04:15:18 +08:00
|
|
|
case nil:
|
|
|
|
|
continue
|
2021-03-04 04:26:09 +08:00
|
|
|
default:
|
|
|
|
|
return nil, fmt.Errorf("unknown format '%T' for tag '%s'", v, name)
|
|
|
|
|
}
|
|
|
|
|
}
|
2022-02-25 06:09:33 +08:00
|
|
|
|
|
|
|
|
// Handle the tag batch definitions if any.
|
|
|
|
|
if len(config.TagSelection) > 0 {
|
|
|
|
|
tagnamequery := "name()"
|
|
|
|
|
tagvaluequery := "."
|
|
|
|
|
if len(config.TagNameQuery) > 0 {
|
|
|
|
|
tagnamequery = config.TagNameQuery
|
|
|
|
|
}
|
|
|
|
|
if len(config.TagValueQuery) > 0 {
|
|
|
|
|
tagvaluequery = config.TagValueQuery
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Query all tags
|
|
|
|
|
selectedTagNodes, err := p.document.QueryAll(selected, config.TagSelection)
|
|
|
|
|
if err != nil {
|
|
|
|
|
return nil, err
|
|
|
|
|
}
|
|
|
|
|
p.Log.Debugf("Number of selected tag nodes: %d", len(selectedTagNodes))
|
|
|
|
|
if len(selectedTagNodes) > 0 && selectedTagNodes[0] != nil {
|
|
|
|
|
for _, selectedtag := range selectedTagNodes {
|
|
|
|
|
n, err := p.executeQuery(doc, selectedtag, tagnamequery)
|
|
|
|
|
if err != nil {
|
|
|
|
|
return nil, fmt.Errorf("failed to query tag name with query '%s': %v", tagnamequery, err)
|
|
|
|
|
}
|
|
|
|
|
name, ok := n.(string)
|
|
|
|
|
if !ok {
|
|
|
|
|
return nil, fmt.Errorf("failed to query tag name with query '%s': result is not a string (%v)", tagnamequery, n)
|
|
|
|
|
}
|
|
|
|
|
v, err := p.executeQuery(doc, selectedtag, tagvaluequery)
|
|
|
|
|
if err != nil {
|
|
|
|
|
return nil, fmt.Errorf("failed to query tag value for '%s': %v", name, err)
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
if config.TagNameExpand {
|
|
|
|
|
p := p.document.GetNodePath(selectedtag, selected, "_")
|
|
|
|
|
if len(p) > 0 {
|
|
|
|
|
name = p + "_" + name
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Check if field name already exists and if so, append an index number.
|
|
|
|
|
if _, ok := tags[name]; ok {
|
|
|
|
|
for i := 1; ; i++ {
|
|
|
|
|
p := name + "_" + strconv.Itoa(i)
|
|
|
|
|
if _, ok := tags[p]; !ok {
|
|
|
|
|
name = p
|
|
|
|
|
break
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Convert the tag to be a string
|
|
|
|
|
s, err := internal.ToString(v)
|
|
|
|
|
if err != nil {
|
|
|
|
|
return nil, fmt.Errorf("failed to query tag value for '%s': result is not a string (%v)", name, v)
|
|
|
|
|
}
|
|
|
|
|
tags[name] = s
|
|
|
|
|
}
|
|
|
|
|
} else {
|
|
|
|
|
p.debugEmptyQuery("tag selection", selected, config.TagSelection)
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2021-03-04 04:26:09 +08:00
|
|
|
for name, v := range p.DefaultTags {
|
|
|
|
|
tags[name] = v
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Query fields
|
|
|
|
|
fields := make(map[string]interface{})
|
|
|
|
|
for name, query := range config.FieldsInt {
|
|
|
|
|
// Execute the query and cast the returned values into integers
|
2021-07-02 04:48:16 +08:00
|
|
|
v, err := p.executeQuery(doc, selected, query)
|
2021-03-04 04:26:09 +08:00
|
|
|
if err != nil {
|
|
|
|
|
return nil, fmt.Errorf("failed to query field (int) '%s': %v", name, err)
|
|
|
|
|
}
|
2021-03-26 01:57:01 +08:00
|
|
|
switch v := v.(type) {
|
2021-03-04 04:26:09 +08:00
|
|
|
case string:
|
2021-03-26 01:57:01 +08:00
|
|
|
fields[name], err = strconv.ParseInt(v, 10, 54)
|
2021-03-04 04:26:09 +08:00
|
|
|
if err != nil {
|
|
|
|
|
return nil, fmt.Errorf("failed to parse field (int) '%s': %v", name, err)
|
|
|
|
|
}
|
|
|
|
|
case bool:
|
|
|
|
|
fields[name] = int64(0)
|
2021-03-26 01:57:01 +08:00
|
|
|
if v {
|
2021-03-04 04:26:09 +08:00
|
|
|
fields[name] = int64(1)
|
|
|
|
|
}
|
|
|
|
|
case float64:
|
2021-03-26 01:57:01 +08:00
|
|
|
fields[name] = int64(v)
|
2021-03-17 04:15:18 +08:00
|
|
|
case nil:
|
|
|
|
|
continue
|
2021-03-04 04:26:09 +08:00
|
|
|
default:
|
|
|
|
|
return nil, fmt.Errorf("unknown format '%T' for field (int) '%s'", v, name)
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
for name, query := range config.Fields {
|
|
|
|
|
// Execute the query and store the result in fields
|
2021-07-02 04:48:16 +08:00
|
|
|
v, err := p.executeQuery(doc, selected, query)
|
2021-03-04 04:26:09 +08:00
|
|
|
if err != nil {
|
|
|
|
|
return nil, fmt.Errorf("failed to query field '%s': %v", name, err)
|
|
|
|
|
}
|
|
|
|
|
fields[name] = v
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Handle the field batch definitions if any.
|
|
|
|
|
if len(config.FieldSelection) > 0 {
|
|
|
|
|
fieldnamequery := "name()"
|
|
|
|
|
fieldvaluequery := "."
|
|
|
|
|
if len(config.FieldNameQuery) > 0 {
|
|
|
|
|
fieldnamequery = config.FieldNameQuery
|
|
|
|
|
}
|
|
|
|
|
if len(config.FieldValueQuery) > 0 {
|
|
|
|
|
fieldvaluequery = config.FieldValueQuery
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Query all fields
|
2021-07-02 04:48:16 +08:00
|
|
|
selectedFieldNodes, err := p.document.QueryAll(selected, config.FieldSelection)
|
2021-03-04 04:26:09 +08:00
|
|
|
if err != nil {
|
|
|
|
|
return nil, err
|
|
|
|
|
}
|
2021-03-17 04:15:18 +08:00
|
|
|
p.Log.Debugf("Number of selected field nodes: %d", len(selectedFieldNodes))
|
2021-03-04 04:26:09 +08:00
|
|
|
if len(selectedFieldNodes) > 0 && selectedFieldNodes[0] != nil {
|
|
|
|
|
for _, selectedfield := range selectedFieldNodes {
|
2021-07-02 04:48:16 +08:00
|
|
|
n, err := p.executeQuery(doc, selectedfield, fieldnamequery)
|
2021-03-04 04:26:09 +08:00
|
|
|
if err != nil {
|
|
|
|
|
return nil, fmt.Errorf("failed to query field name with query '%s': %v", fieldnamequery, err)
|
|
|
|
|
}
|
|
|
|
|
name, ok := n.(string)
|
|
|
|
|
if !ok {
|
|
|
|
|
return nil, fmt.Errorf("failed to query field name with query '%s': result is not a string (%v)", fieldnamequery, n)
|
|
|
|
|
}
|
2021-07-02 04:48:16 +08:00
|
|
|
v, err := p.executeQuery(doc, selectedfield, fieldvaluequery)
|
2021-03-04 04:26:09 +08:00
|
|
|
if err != nil {
|
|
|
|
|
return nil, fmt.Errorf("failed to query field value for '%s': %v", name, err)
|
|
|
|
|
}
|
2021-11-25 02:52:51 +08:00
|
|
|
|
2021-03-04 04:26:09 +08:00
|
|
|
if config.FieldNameExpand {
|
2021-07-02 04:48:16 +08:00
|
|
|
p := p.document.GetNodePath(selectedfield, selected, "_")
|
2021-03-04 04:26:09 +08:00
|
|
|
if len(p) > 0 {
|
2021-11-25 02:52:51 +08:00
|
|
|
name = p + "_" + name
|
2021-03-04 04:26:09 +08:00
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Check if field name already exists and if so, append an index number.
|
2021-11-25 02:52:51 +08:00
|
|
|
if _, ok := fields[name]; ok {
|
2021-03-04 04:26:09 +08:00
|
|
|
for i := 1; ; i++ {
|
2021-11-25 02:52:51 +08:00
|
|
|
p := name + "_" + strconv.Itoa(i)
|
2021-03-04 04:26:09 +08:00
|
|
|
if _, ok := fields[p]; !ok {
|
2021-11-25 02:52:51 +08:00
|
|
|
name = p
|
2021-03-04 04:26:09 +08:00
|
|
|
break
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2021-11-25 02:52:51 +08:00
|
|
|
fields[name] = v
|
2021-03-04 04:26:09 +08:00
|
|
|
}
|
|
|
|
|
} else {
|
|
|
|
|
p.debugEmptyQuery("field selection", selected, config.FieldSelection)
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2021-04-14 02:40:03 +08:00
|
|
|
return metric.New(metricname, tags, fields, timestamp), nil
|
2021-03-04 04:26:09 +08:00
|
|
|
}
|
|
|
|
|
|
2021-07-02 04:48:16 +08:00
|
|
|
func (p *Parser) executeQuery(doc, selected dataNode, query string) (r interface{}, err error) {
|
2021-03-04 04:26:09 +08:00
|
|
|
// Check if the query is relative or absolute and set the root for the query
|
|
|
|
|
root := selected
|
|
|
|
|
if strings.HasPrefix(query, "/") {
|
|
|
|
|
root = doc
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Compile the query
|
2021-07-02 04:48:16 +08:00
|
|
|
expr, err := path.Compile(query)
|
2021-03-04 04:26:09 +08:00
|
|
|
if err != nil {
|
|
|
|
|
return nil, fmt.Errorf("failed to compile query '%s': %v", query, err)
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Evaluate the compiled expression and handle returned node-iterators
|
|
|
|
|
// separately. Those iterators will be returned for queries directly
|
|
|
|
|
// referencing a node (value or attribute).
|
2021-07-02 04:48:16 +08:00
|
|
|
n := expr.Evaluate(p.document.CreateXPathNavigator(root))
|
2022-07-28 20:18:55 +08:00
|
|
|
iter, ok := n.(*path.NodeIterator)
|
|
|
|
|
if !ok {
|
|
|
|
|
return n, nil
|
|
|
|
|
}
|
|
|
|
|
// We got an iterator, so take the first match and get the referenced
|
|
|
|
|
// property. This will always be a string.
|
|
|
|
|
if iter.MoveNext() {
|
|
|
|
|
current := iter.Current()
|
|
|
|
|
// If the dataformat supports native types and if support is
|
|
|
|
|
// enabled, we should return the native type of the data
|
|
|
|
|
if p.NativeTypes {
|
|
|
|
|
switch nn := current.(type) {
|
|
|
|
|
case *jsonquery.NodeNavigator:
|
|
|
|
|
return nn.GetValue(), nil
|
|
|
|
|
case *protobufquery.NodeNavigator:
|
|
|
|
|
return nn.GetValue(), nil
|
|
|
|
|
}
|
2021-03-04 04:26:09 +08:00
|
|
|
}
|
2022-07-28 20:18:55 +08:00
|
|
|
// Fallback to get the string value representation
|
|
|
|
|
return iter.Current().Value(), nil
|
2021-03-04 04:26:09 +08:00
|
|
|
}
|
|
|
|
|
|
2022-07-28 20:18:55 +08:00
|
|
|
return nil, nil
|
2021-03-04 04:26:09 +08:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
func splitLastPathElement(query string) []string {
|
|
|
|
|
// This is a rudimentary xpath-parser that splits the path
|
|
|
|
|
// into the last path element and the remaining path-part.
|
|
|
|
|
// The last path element is then further splitted into
|
|
|
|
|
// parts such as attributes or selectors. Each returned
|
|
|
|
|
// element is a full path!
|
|
|
|
|
|
|
|
|
|
// Nothing left
|
|
|
|
|
if query == "" || query == "/" || query == "//" || query == "." {
|
|
|
|
|
return []string{}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
seperatorIdx := strings.LastIndex(query, "/")
|
|
|
|
|
if seperatorIdx < 0 {
|
|
|
|
|
query = "./" + query
|
|
|
|
|
seperatorIdx = 1
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// For double slash we want to split at the first slash
|
|
|
|
|
if seperatorIdx > 0 && query[seperatorIdx-1] == byte('/') {
|
|
|
|
|
seperatorIdx--
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
base := query[:seperatorIdx]
|
|
|
|
|
if base == "" {
|
|
|
|
|
base = "/"
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
elements := make([]string, 1)
|
|
|
|
|
elements[0] = base
|
|
|
|
|
|
|
|
|
|
offset := seperatorIdx
|
|
|
|
|
if i := strings.Index(query[offset:], "::"); i >= 0 {
|
|
|
|
|
// Check for axis operator
|
|
|
|
|
offset += i
|
|
|
|
|
elements = append(elements, query[:offset]+"::*")
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
if i := strings.Index(query[offset:], "["); i >= 0 {
|
|
|
|
|
// Check for predicates
|
|
|
|
|
offset += i
|
|
|
|
|
elements = append(elements, query[:offset])
|
|
|
|
|
} else if i := strings.Index(query[offset:], "@"); i >= 0 {
|
|
|
|
|
// Check for attributes
|
|
|
|
|
offset += i
|
|
|
|
|
elements = append(elements, query[:offset])
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
return elements
|
|
|
|
|
}
|
|
|
|
|
|
2021-07-02 04:48:16 +08:00
|
|
|
func (p *Parser) debugEmptyQuery(operation string, root dataNode, initialquery string) {
|
2021-03-04 04:26:09 +08:00
|
|
|
if p.Log == nil {
|
|
|
|
|
return
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
query := initialquery
|
|
|
|
|
|
|
|
|
|
// We already know that the
|
|
|
|
|
p.Log.Debugf("got 0 nodes for query %q in %s", query, operation)
|
|
|
|
|
for {
|
|
|
|
|
parts := splitLastPathElement(query)
|
|
|
|
|
if len(parts) < 1 {
|
|
|
|
|
return
|
|
|
|
|
}
|
|
|
|
|
for i := len(parts) - 1; i >= 0; i-- {
|
|
|
|
|
q := parts[i]
|
2021-07-02 04:48:16 +08:00
|
|
|
nodes, err := p.document.QueryAll(root, q)
|
2021-03-04 04:26:09 +08:00
|
|
|
if err != nil {
|
|
|
|
|
p.Log.Debugf("executing query %q in %s failed: %v", q, operation, err)
|
|
|
|
|
return
|
|
|
|
|
}
|
|
|
|
|
p.Log.Debugf("got %d nodes for query %q in %s", len(nodes), q, operation)
|
|
|
|
|
if len(nodes) > 0 && nodes[0] != nil {
|
|
|
|
|
return
|
|
|
|
|
}
|
|
|
|
|
query = parts[0]
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
2022-06-09 03:39:02 +08:00
|
|
|
|
|
|
|
|
func init() {
|
|
|
|
|
// Register all variants
|
|
|
|
|
parsers.Add("xml",
|
|
|
|
|
func(defaultMetricName string) telegraf.Parser {
|
|
|
|
|
return &Parser{
|
|
|
|
|
Format: "xml",
|
|
|
|
|
DefaultMetricName: defaultMetricName,
|
|
|
|
|
}
|
|
|
|
|
},
|
|
|
|
|
)
|
|
|
|
|
parsers.Add("xpath_json",
|
|
|
|
|
func(defaultMetricName string) telegraf.Parser {
|
|
|
|
|
return &Parser{
|
|
|
|
|
Format: "xpath_json",
|
|
|
|
|
DefaultMetricName: defaultMetricName,
|
|
|
|
|
}
|
|
|
|
|
},
|
|
|
|
|
)
|
|
|
|
|
parsers.Add("xpath_msgpack",
|
|
|
|
|
func(defaultMetricName string) telegraf.Parser {
|
|
|
|
|
return &Parser{
|
|
|
|
|
Format: "xpath_msgpack",
|
|
|
|
|
DefaultMetricName: defaultMetricName,
|
|
|
|
|
}
|
|
|
|
|
},
|
|
|
|
|
)
|
|
|
|
|
parsers.Add("xpath_protobuf",
|
|
|
|
|
func(defaultMetricName string) telegraf.Parser {
|
|
|
|
|
return &Parser{
|
|
|
|
|
Format: "xpath_protobuf",
|
|
|
|
|
DefaultMetricName: defaultMetricName,
|
|
|
|
|
}
|
|
|
|
|
},
|
|
|
|
|
)
|
|
|
|
|
}
|
|
|
|
|
|
2022-06-30 05:30:58 +08:00
|
|
|
// InitFromConfig is a compatibility function to construct the parser the old way
|
2022-06-09 03:39:02 +08:00
|
|
|
func (p *Parser) InitFromConfig(config *parsers.Config) error {
|
|
|
|
|
p.Format = config.DataFormat
|
|
|
|
|
if p.Format == "xpath_protobuf" {
|
|
|
|
|
p.ProtobufMessageDef = config.XPathProtobufFile
|
|
|
|
|
p.ProtobufMessageType = config.XPathProtobufType
|
|
|
|
|
}
|
|
|
|
|
p.PrintDocument = config.XPathPrintDocument
|
|
|
|
|
p.DefaultMetricName = config.MetricName
|
|
|
|
|
p.DefaultTags = config.DefaultTags
|
|
|
|
|
|
|
|
|
|
// Convert the config formats which is a one-to-one copy
|
|
|
|
|
if len(config.XPathConfig) > 0 {
|
2022-06-23 03:50:43 +08:00
|
|
|
p.Configs = make([]xpath.Config, 0, len(config.XPathConfig))
|
|
|
|
|
p.Configs = append(p.Configs, config.XPathConfig...)
|
2022-06-09 03:39:02 +08:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
return p.Init()
|
|
|
|
|
}
|