feat (parser.json_v2): Support defining field/tag tables within an object table (#9449)

This commit is contained in:
Sebastian Spaink 2021-10-04 11:19:06 -07:00 committed by GitHub
parent c1f51b0645
commit df5c19c17e
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
17 changed files with 618 additions and 87 deletions

View File

@ -1421,28 +1421,8 @@ func (c *Config) getParserConfig(name string, tbl *ast.Table) (*parsers.Config,
c.getFieldString(metricConfig, "timestamp_format", &mc.TimestampFormat)
c.getFieldString(metricConfig, "timestamp_timezone", &mc.TimestampTimezone)
if fieldConfigs, ok := metricConfig.Fields["field"]; ok {
if fieldConfigs, ok := fieldConfigs.([]*ast.Table); ok {
for _, fieldconfig := range fieldConfigs {
var f json_v2.DataSet
c.getFieldString(fieldconfig, "path", &f.Path)
c.getFieldString(fieldconfig, "rename", &f.Rename)
c.getFieldString(fieldconfig, "type", &f.Type)
mc.Fields = append(mc.Fields, f)
}
}
}
if fieldConfigs, ok := metricConfig.Fields["tag"]; ok {
if fieldConfigs, ok := fieldConfigs.([]*ast.Table); ok {
for _, fieldconfig := range fieldConfigs {
var t json_v2.DataSet
c.getFieldString(fieldconfig, "path", &t.Path)
c.getFieldString(fieldconfig, "rename", &t.Rename)
t.Type = "string"
mc.Tags = append(mc.Tags, t)
}
}
}
mc.Fields = getFieldSubtable(c, metricConfig)
mc.Tags = getTagSubtable(c, metricConfig)
if objectconfigs, ok := metricConfig.Fields["object"]; ok {
if objectconfigs, ok := objectconfigs.([]*ast.Table); ok {
@ -1458,6 +1438,10 @@ func (c *Config) getParserConfig(name string, tbl *ast.Table) (*parsers.Config,
c.getFieldStringSlice(objectConfig, "tags", &o.Tags)
c.getFieldStringMap(objectConfig, "renames", &o.Renames)
c.getFieldStringMap(objectConfig, "fields", &o.Fields)
o.FieldPaths = getFieldSubtable(c, objectConfig)
o.TagPaths = getTagSubtable(c, objectConfig)
mc.JSONObjects = append(mc.JSONObjects, o)
}
}
@ -1477,6 +1461,42 @@ func (c *Config) getParserConfig(name string, tbl *ast.Table) (*parsers.Config,
return pc, nil
}
func getFieldSubtable(c *Config, metricConfig *ast.Table) []json_v2.DataSet {
var fields []json_v2.DataSet
if fieldConfigs, ok := metricConfig.Fields["field"]; ok {
if fieldConfigs, ok := fieldConfigs.([]*ast.Table); ok {
for _, fieldconfig := range fieldConfigs {
var f json_v2.DataSet
c.getFieldString(fieldconfig, "path", &f.Path)
c.getFieldString(fieldconfig, "rename", &f.Rename)
c.getFieldString(fieldconfig, "type", &f.Type)
fields = append(fields, f)
}
}
}
return fields
}
func getTagSubtable(c *Config, metricConfig *ast.Table) []json_v2.DataSet {
var tags []json_v2.DataSet
if fieldConfigs, ok := metricConfig.Fields["tag"]; ok {
if fieldConfigs, ok := fieldConfigs.([]*ast.Table); ok {
for _, fieldconfig := range fieldConfigs {
var t json_v2.DataSet
c.getFieldString(fieldconfig, "path", &t.Path)
c.getFieldString(fieldconfig, "rename", &t.Rename)
t.Type = "string"
tags = append(tags, t)
}
}
}
return tags
}
// buildSerializer grabs the necessary entries from the ast.Table for creating
// a serializers.Serializer object, and creates it, which can then be added onto
// an Output object.

2
go.mod
View File

@ -246,7 +246,7 @@ require (
github.com/stretchr/testify v1.7.0
github.com/tbrandon/mbserver v0.0.0-20170611213546-993e1772cc62
github.com/testcontainers/testcontainers-go v0.11.1
github.com/tidwall/gjson v1.8.0
github.com/tidwall/gjson v1.9.0
github.com/tidwall/match v1.0.3 // indirect
github.com/tidwall/pretty v1.1.0 // indirect
github.com/tinylib/msgp v1.1.6

4
go.sum
View File

@ -1535,8 +1535,8 @@ github.com/tchap/go-patricia v2.2.6+incompatible/go.mod h1:bmLyhP68RS6kStMGxByiQ
github.com/tedsuo/ifrit v0.0.0-20180802180643-bea94bb476cc/go.mod h1:eyZnKCc955uh98WQvzOm0dgAeLnf2O0Rz0LPoC5ze+0=
github.com/testcontainers/testcontainers-go v0.11.1 h1:FiYsB83LSGbiawoV8TpAZGfcCUbtaeeg1SXqEKUxh08=
github.com/testcontainers/testcontainers-go v0.11.1/go.mod h1:/V0UVq+1e7NWYoqTPog179clf0Qp9TOyp4EcXaEFQz8=
github.com/tidwall/gjson v1.8.0 h1:Qt+orfosKn0rbNTZqHYDqBrmm3UDA4KRkv70fDzG+PQ=
github.com/tidwall/gjson v1.8.0/go.mod h1:5/xDoumyyDNerp2U36lyolv46b3uF/9Bu6OfyQ9GImk=
github.com/tidwall/gjson v1.9.0 h1:+Od7AE26jAaMgVC31cQV/Ope5iKXulNMflrlB7k+F9E=
github.com/tidwall/gjson v1.9.0/go.mod h1:5/xDoumyyDNerp2U36lyolv46b3uF/9Bu6OfyQ9GImk=
github.com/tidwall/match v1.0.3 h1:FQUVvBImDutD8wJLN6c5eMzWtjgONK9MwIBCOrUJKeE=
github.com/tidwall/match v1.0.3/go.mod h1:eRSPERbgtNPcGhD8UCthc6PmLEQXEWd3PRB5JTxsfmM=
github.com/tidwall/pretty v1.0.0/go.mod h1:XNkn88O1ChpSDQmQeStsy+sBenx6DDtFZJxhVysOjyk=

View File

@ -1,10 +1,10 @@
# JSON Parser - Version 2
This parser takes valid JSON input and turns it into metrics. The query syntax supported is [GJSON Path Syntax](https://github.com/tidwall/gjson/blob/v1.7.5/SYNTAX.md), you can go to this playground to test out your GJSON path here: https://gjson.dev/. You can find multiple examples under the `testdata` folder.
This parser takes valid JSON input and turns it into line protocol. The query syntax supported is [GJSON Path Syntax](https://github.com/tidwall/gjson/blob/v1.7.5/SYNTAX.md), you can go to this playground to test out your GJSON path here: https://gjson.dev/. You can find multiple examples under the `testdata` folder.
## Configuration
You configure this parser by describing the metric you want by defining the fields and tags from the input. The configuration is divided into config sub-tables called `field`, `tag`, and `object`. In the example below you can see all the possible configuration keys you can define for each config table. In the sections that follow these configuration keys are defined in more detail.
You configure this parser by describing the line protocol you want by defining the fields and tags from the input. The configuration is divided into config sub-tables called `field`, `tag`, and `object`. In the example below you can see all the possible configuration keys you can define for each config table. In the sections that follow these configuration keys are defined in more detail.
**Example configuration:**
@ -19,27 +19,45 @@ You configure this parser by describing the metric you want by defining the fiel
timestamp_format = "" # A string with a valid timestamp format (see below for possible values)
timestamp_timezone = "" # A string with with a valid timezone (see below for possible values)
[[inputs.file.json_v2.tag]]
path = "" # A string with valid GJSON path syntax
path = "" # A string with valid GJSON path syntax to a non-array/non-object value
rename = "new name" # A string with a new name for the tag key
[[inputs.file.json_v2.field]]
path = "" # A string with valid GJSON path syntax
path = "" # A string with valid GJSON path syntax to a non-array/non-object value
rename = "new name" # A string with a new name for the tag key
type = "int" # A string specifying the type (int,uint,float,string,bool)
[[inputs.file.json_v2.object]]
path = "" # A string with valid GJSON path syntax
path = "" # A string with valid GJSON path syntax, can include array's and object's
## Configuration to define what JSON keys should be used as timestamps ##
timestamp_key = "" # A JSON key (for a nested key, prepend the parent keys with underscores) to a valid timestamp
timestamp_format = "" # A string with a valid timestamp format (see below for possible values)
timestamp_timezone = "" # A string with with a valid timezone (see below for possible values)
disable_prepend_keys = false (or true, just not both)
### Configuration to define what JSON keys should be included and how (field/tag) ###
tags = [] # List of JSON keys (for a nested key, prepend the parent keys with underscores) to be a tag instead of a field, when adding a JSON key in this list you don't have to define it in the included_keys list
included_keys = [] # List of JSON keys (for a nested key, prepend the parent keys with underscores) that should be only included in result
excluded_keys = [] # List of JSON keys (for a nested key, prepend the parent keys with underscores) that shouldn't be included in result
tags = [] # List of JSON keys (for a nested key, prepend the parent keys with underscores) to be a tag instead of a field
# When a tag/field sub-table is defined, they will be the only field/tag's along with any keys defined in the included_keys list.
# If the resulting values aren't included in the object/array returned by the root object path, it won't be included.
# You can define as many tag/field sub-tables as you want.
[[inputs.file.json_v2.object.tag]]
path = "" # # A string with valid GJSON path syntax, can include array's and object's
rename = "new name" # A string with a new name for the tag key
[[inputs.file.json_v2.object.field]]
path = "" # # A string with valid GJSON path syntax, can include array's and object's
rename = "new name" # A string with a new name for the tag key
type = "int" # A string specifying the type (int,uint,float,string,bool)
### Configuration to modify the resutling line protocol ###
disable_prepend_keys = false (or true, just not both)
[inputs.file.json_v2.object.renames] # A map of JSON keys (for a nested key, prepend the parent keys with underscores) with a new name for the tag key
key = "new name"
[inputs.file.json_v2.object.fields] # A map of JSON keys (for a nested key, prepend the parent keys with underscores) with a type (int,uint,float,string,bool)
key = "int"
```
---
### root config options
* **measurement_name (OPTIONAL)**: Will set the measurement name to the provided string.
@ -56,7 +74,7 @@ such as `America/New_York`, to `Local` to utilize the system timezone, or to `UT
### `field` and `tag` config options
`field` and `tag` represent the elements of [line protocol](https://docs.influxdata.com/influxdb/v2.0/reference/syntax/line-protocol/), which is used to define a `metric`. You can use the `field` and `tag` config tables to gather a single value or an array of values that all share the same type and name. With this you can add a field or tag to a metric from data stored anywhere in your JSON. If you define the GJSON path to return a single value then you will get a single resutling metric that contains the field/tag. If you define the GJSON path to return an array of values, then each field/tag will be put into a separate metric (you use the # character to retrieve JSON arrays, find examples [here](https://github.com/tidwall/gjson/blob/v1.7.5/SYNTAX.md#arrays)).
`field` and `tag` represent the elements of [line protocol](https://docs.influxdata.com/influxdb/v2.0/reference/syntax/line-protocol/). You can use the `field` and `tag` config tables to gather a single value or an array of values that all share the same type and name. With this you can add a field or tag to a line protocol from data stored anywhere in your JSON. If you define the GJSON path to return a single value then you will get a single resutling line protocol that contains the field/tag. If you define the GJSON path to return an array of values, then each field/tag will be put into a separate line protocol (you use the # character to retrieve JSON arrays, find examples [here](https://github.com/tidwall/gjson/blob/v1.7.5/SYNTAX.md#arrays)).
Note that objects are handled separately, therefore if you provide a path that returns a object it will be ignored. You will need use the `object` config table to parse objects, because `field` and `tag` doesn't handle relationships between data. Each `field` and `tag` you define is handled as a separate data point.
@ -70,26 +88,34 @@ The notable difference between `field` and `tag`, is that `tag` values will alwa
#### **field**
* **path (REQUIRED)**: You must define the path query that gathers the object with [GJSON Path Syntax](https://github.com/tidwall/gjson/blob/v1.7.5/SYNTAX.md).
Using this field configuration you can gather a non-array/non-object values. Note this acts as a global field when used with the `object` configuration, if you gather an array of values using `object` then the field gathered will be added to each resulting line protocol without acknowledging its location in the original JSON. This is defined in TOML as an array table using double brackets.
* **path (REQUIRED)**: A string with valid GJSON path syntax to a non-array/non-object value
* **name (OPTIONAL)**: You can define a string value to set the field name. If not defined it will use the trailing word from the provided query.
* **type (OPTIONAL)**: You can define a string value to set the desired type (float, int, uint, string, bool). If not defined it won't enforce a type and default to using the original type defined in the JSON (bool, float, or string).
#### **tag**
* **path (REQUIRED)**: You must define the path query that gathers the object with [GJSON Path Syntax](https://github.com/tidwall/gjson/blob/v1.7.5/SYNTAX.md).
Using this tag configuration you can gather a non-array/non-object values. Note this acts as a global tag when used with the `object` configuration, if you gather an array of values using `object` then the tag gathered will be added to each resulting line protocol without acknowledging its location in the original JSON. This is defined in TOML as an array table using double brackets.
* **path (REQUIRED)**: A string with valid GJSON path syntax to a non-array/non-object value
* **name (OPTIONAL)**: You can define a string value to set the field name. If not defined it will use the trailing word from the provided query.
For good examples in using `field` and `tag` you can reference the following example configs:
* [fields_and_tags](testdata/fields_and_tags/telegraf.conf)
---
### object
With the configuration section `object`, you can gather metrics from [JSON objects](https://www.w3schools.com/js/js_json_objects.asp).
With the configuration section `object`, you can gather values from [JSON objects](https://www.w3schools.com/js/js_json_objects.asp). This is defined in TOML as an array table using double brackets.
The following keys can be set for `object`:
#### The following keys can be set for `object`
* **path (REQUIRED)**: You must define the path query that gathers the object with [GJSON Path Syntax](https://github.com/tidwall/gjson/blob/v1.7.5/SYNTAX.md)
*Keys to define what JSON keys should be used as timestamps:*
* **timestamp_key(OPTIONAL)**: You can define a json key (for a nested key, prepend the parent keys with underscores) for the value to be set as the timestamp from the JSON input.
* **timestamp_format (OPTIONAL, but REQUIRED when timestamp_query is defined**: Must be set to `unix`, `unix_ms`, `unix_us`, `unix_ns`, or
the Go "reference time" which is defined to be the specific time:
@ -97,22 +123,30 @@ the Go "reference time" which is defined to be the specific time:
* **timestamp_timezone (OPTIONAL, but REQUIRES timestamp_query**: This option should be set to a
[Unix TZ value](https://en.wikipedia.org/wiki/List_of_tz_database_time_zones),
such as `America/New_York`, to `Local` to utilize the system timezone, or to `UTC`. Defaults to `UTC`
* **disable_prepend_keys (OPTIONAL)**: Set to true to prevent resulting nested data to contain the parent key prepended to its key **NOTE**: duplicate names can overwrite each other when this is enabled
* **included_keys (OPTIONAL)**: You can define a list of key's that should be the only data included in the metric, by default it will include everything.
* **excluded_keys (OPTIONAL)**: You can define json keys to be excluded in the metric, for a nested key, prepend the parent keys with underscores
*Configuration to define what JSON keys should be included and how (field/tag):*
* **included_keys (OPTIONAL)**: You can define a list of key's that should be the only data included in the line protocol, by default it will include everything.
* **excluded_keys (OPTIONAL)**: You can define json keys to be excluded in the line protocol, for a nested key, prepend the parent keys with underscores
* **tags (OPTIONAL)**: You can define json keys to be set as tags instead of fields, if you define a key that is an array or object then all nested values will become a tag
* **renames (OPTIONAL)**: A table matching the json key with the desired name (oppossed to defaulting to using the key), use names that include the prepended keys of its parent keys for nested results
* **fields (OPTIONAL)**: A table matching the json key with the desired type (int,string,bool,float), if you define a key that is an array or object then all nested values will become that type
* **field (OPTIONAL, defined in TOML as an array table using double brackets)**: Identical to the [field](#field) table you can define, but with two key differences. The path supports arrays and objects and is defined under the object table and therefore will adhere to how the JSON is structured. You want to use this if you want the field/tag to be added as it would if it were in the included_key list, but then use the GJSON path syntax.
* **tag (OPTIONAL, defined in TOML as an array table using double brackets)**: Identical to the [tag](#tag) table you can define, but with two key differences. The path supports arrays and objects and is defined under the object table and therefore will adhere to how the JSON is structured. You want to use this if you want the field/tag to be added as it would if it were in the included_key list, but then use the GJSON path syntax.
*Configuration to modify the resutling line protocol:*
* **disable_prepend_keys (OPTIONAL)**: Set to true to prevent resulting nested data to contain the parent key prepended to its key **NOTE**: duplicate names can overwrite each other when this is enabled
* **renames (OPTIONAL, defined in TOML as a table using single bracket)**: A table matching the json key with the desired name (oppossed to defaulting to using the key), use names that include the prepended keys of its parent keys for nested results
* **fields (OPTIONAL, defined in TOML as a table using single bracket)**: A table matching the json key with the desired type (int,string,bool,float), if you define a key that is an array or object then all nested values will become that type
## Arrays and Objects
The following describes the high-level approach when parsing arrays and objects:
**Array**: Every element in an array is treated as a *separate* metric
**Array**: Every element in an array is treated as a *separate* line protocol
**Object**: Every key/value in a object is treated as a *single* metric
**Object**: Every key/value in a object is treated as a *single* line protocol
When handling nested arrays and objects, these above rules continue to apply as the parser creates metrics. When an object has multiple array's as values, the array's will become separate metrics containing only non-array values from the obejct. Below you can see an example of this behavior, with an input json containing an array of book objects that has a nested array of characters.
When handling nested arrays and objects, these above rules continue to apply as the parser creates line protocol. When an object has multiple array's as values, the array's will become separate line protocol containing only non-array values from the obejct. Below you can see an example of this behavior, with an input json containing an array of book objects that has a nested array of characters.
Example JSON:
@ -157,7 +191,7 @@ Example configuration:
disable_prepend_keys = true
```
Expected metrics:
Expected line protocol:
```
file,title=The\ Lord\ Of\ The\ Rings author="Tolkien",chapters="A Long-expected Party"
@ -173,7 +207,7 @@ You can find more complicated examples under the folder `testdata`.
## Types
For each field you have the option to define the types for each metric. The following rules are in place for this configuration:
For each field you have the option to define the types. The following rules are in place for this configuration:
* If a type is explicitly defined, the parser will enforce this type and convert the data to the defined type if possible. If the type can't be converted then the parser will fail.
* If a type isn't defined, the parser will use the default type defined in the JSON (int, float, string)

View File

@ -13,6 +13,7 @@ import (
)
type Parser struct {
InputJSON []byte
Configs []Config
DefaultTags map[string]string
Log telegraf.Logger
@ -20,8 +21,16 @@ type Parser struct {
measurementName string
iterateObjects bool
iterateObjects bool
currentSettings JSONObject
pathResults []PathResult
}
type PathResult struct {
result gjson.Result
tag bool
DataSet
}
type Config struct {
@ -53,21 +62,30 @@ type JSONObject struct {
IncludedKeys []string `toml:"included_keys"` // OPTIONAL
ExcludedKeys []string `toml:"excluded_keys"` // OPTIONAL
DisablePrependKeys bool `toml:"disable_prepend_keys"` // OPTIONAL
FieldPaths []DataSet // OPTIONAL
TagPaths []DataSet // OPTIONAL
}
type MetricNode struct {
ParentIndex int
OutputName string
SetName string
Tag bool
DesiredType string // Can be "int", "uint", "float", "bool", "string"
/*
IncludeCollection is only used when processing objects and is responsible for containing the gjson results
found by the gjson paths provided in the FieldPaths and TagPaths configs.
*/
IncludeCollection *PathResult
Metric telegraf.Metric
gjson.Result
}
func (p *Parser) Parse(input []byte) ([]telegraf.Metric, error) {
p.InputJSON = input
// Only valid JSON is supported
if !gjson.Valid(string(input)) {
if !gjson.Valid(string(p.InputJSON)) {
return nil, fmt.Errorf("Invalid JSON provided, unable to parse")
}
@ -77,7 +95,7 @@ func (p *Parser) Parse(input []byte) ([]telegraf.Metric, error) {
// Measurement name configuration
p.measurementName = c.MeasurementName
if c.MeasurementNamePath != "" {
result := gjson.GetBytes(input, c.MeasurementNamePath)
result := gjson.GetBytes(p.InputJSON, c.MeasurementNamePath)
if !result.IsArray() && !result.IsObject() {
p.measurementName = result.String()
}
@ -86,7 +104,7 @@ func (p *Parser) Parse(input []byte) ([]telegraf.Metric, error) {
// Timestamp configuration
p.Timestamp = time.Now()
if c.TimestampPath != "" {
result := gjson.GetBytes(input, c.TimestampPath)
result := gjson.GetBytes(p.InputJSON, c.TimestampPath)
if !result.IsArray() && !result.IsObject() {
if c.TimestampFormat == "" {
err := fmt.Errorf("use of 'timestamp_query' requires 'timestamp_format'")
@ -101,17 +119,17 @@ func (p *Parser) Parse(input []byte) ([]telegraf.Metric, error) {
}
}
fields, err := p.processMetric(c.Fields, input, false)
fields, err := p.processMetric(c.Fields, false)
if err != nil {
return nil, err
}
tags, err := p.processMetric(c.Tags, input, true)
tags, err := p.processMetric(c.Tags, true)
if err != nil {
return nil, err
}
objects, err := p.processObjects(c.JSONObjects, input)
objects, err := p.processObjects(c.JSONObjects)
if err != nil {
return nil, err
}
@ -137,7 +155,7 @@ func (p *Parser) Parse(input []byte) ([]telegraf.Metric, error) {
// processMetric will iterate over all 'field' or 'tag' configs and create metrics for each
// A field/tag can either be a single value or an array of values, each resulting in its own metric
// For multiple configs, a set of metrics is created from the cartesian product of each separate config
func (p *Parser) processMetric(data []DataSet, input []byte, tag bool) ([]telegraf.Metric, error) {
func (p *Parser) processMetric(data []DataSet, tag bool) ([]telegraf.Metric, error) {
if len(data) == 0 {
return nil, nil
}
@ -149,7 +167,7 @@ func (p *Parser) processMetric(data []DataSet, input []byte, tag bool) ([]telegr
if c.Path == "" {
return nil, fmt.Errorf("GJSON path is required")
}
result := gjson.GetBytes(input, c.Path)
result := gjson.GetBytes(p.InputJSON, c.Path)
if result.IsObject() {
p.Log.Debugf("Found object in the path: %s, ignoring it please use 'object' to gather metrics from objects", c.Path)
@ -233,6 +251,9 @@ func (p *Parser) expandArray(result MetricNode) ([]telegraf.Metric, error) {
p.Log.Debugf("Found object in query ignoring it please use 'object' to gather metrics from objects")
return results, nil
}
if result.IncludeCollection == nil && (len(p.currentSettings.FieldPaths) > 0 || len(p.currentSettings.TagPaths) > 0) {
result.IncludeCollection = p.existsInpathResults(result.Index, result.Raw)
}
r, err := p.combineObject(result)
if err != nil {
return nil, err
@ -243,6 +264,9 @@ func (p *Parser) expandArray(result MetricNode) ([]telegraf.Metric, error) {
if result.IsArray() {
var err error
if result.IncludeCollection == nil && (len(p.currentSettings.FieldPaths) > 0 || len(p.currentSettings.TagPaths) > 0) {
result.IncludeCollection = p.existsInpathResults(result.Index, result.Raw)
}
result.ForEach(func(_, val gjson.Result) bool {
m := metric.New(
p.measurementName,
@ -250,13 +274,14 @@ func (p *Parser) expandArray(result MetricNode) ([]telegraf.Metric, error) {
map[string]interface{}{},
p.Timestamp,
)
if val.IsObject() {
if p.iterateObjects {
n := MetricNode{
SetName: result.SetName,
Metric: m,
Result: val,
n := result
n.ParentIndex += val.Index
n.Metric = m
n.Result = val
if n.IncludeCollection == nil && (len(p.currentSettings.FieldPaths) > 0 || len(p.currentSettings.TagPaths) > 0) {
n.IncludeCollection = p.existsInpathResults(n.Index, n.Raw)
}
r, err := p.combineObject(n)
if err != nil {
@ -281,13 +306,12 @@ func (p *Parser) expandArray(result MetricNode) ([]telegraf.Metric, error) {
for _, f := range result.Metric.TagList() {
m.AddTag(f.Key, f.Value)
}
n := MetricNode{
Tag: result.Tag,
DesiredType: result.DesiredType,
OutputName: result.OutputName,
SetName: result.SetName,
Metric: m,
Result: val,
n := result
n.ParentIndex += val.Index
n.Metric = m
n.Result = val
if n.IncludeCollection == nil && (len(p.currentSettings.FieldPaths) > 0 || len(p.currentSettings.TagPaths) > 0) {
n.IncludeCollection = p.existsInpathResults(n.Index, n.Raw)
}
r, err := p.expandArray(n)
if err != nil {
@ -314,17 +338,43 @@ func (p *Parser) expandArray(result MetricNode) ([]telegraf.Metric, error) {
switch result.Value().(type) {
case nil: // Ignore JSON values that are set as null
default:
if result.Tag {
result.DesiredType = "string"
outputName := result.OutputName
desiredType := result.DesiredType
if len(p.currentSettings.FieldPaths) > 0 || len(p.currentSettings.TagPaths) > 0 {
var pathResult *PathResult
// When IncludeCollection isn't nil, that means the current result is included in the collection.
if result.IncludeCollection != nil {
pathResult = result.IncludeCollection
} else {
// Verify that the result should be included based on the results of fieldpaths and tag paths
pathResult = p.existsInpathResults(result.ParentIndex, result.Raw)
}
if pathResult == nil {
return results, nil
}
if pathResult.tag {
result.Tag = true
}
if !pathResult.tag {
desiredType = pathResult.Type
}
if pathResult.Rename != "" {
outputName = pathResult.Rename
}
}
v, err := p.convertType(result.Result, result.DesiredType, result.SetName)
if result.Tag {
desiredType = "string"
}
v, err := p.convertType(result.Result, desiredType, result.SetName)
if err != nil {
return nil, err
}
if result.Tag {
result.Metric.AddTag(result.OutputName, v.(string))
result.Metric.AddTag(outputName, v.(string))
} else {
result.Metric.AddField(result.OutputName, v)
result.Metric.AddField(outputName, v)
}
}
}
@ -335,22 +385,55 @@ func (p *Parser) expandArray(result MetricNode) ([]telegraf.Metric, error) {
return results, nil
}
func (p *Parser) existsInpathResults(index int, raw string) *PathResult {
for _, f := range p.pathResults {
if f.result.Index == 0 {
for _, i := range f.result.Indexes {
if i == index {
return &f
}
}
} else if f.result.Index == index {
return &f
}
}
return nil
}
// processObjects will iterate over all 'object' configs and create metrics for each
func (p *Parser) processObjects(objects []JSONObject, input []byte) ([]telegraf.Metric, error) {
func (p *Parser) processObjects(objects []JSONObject) ([]telegraf.Metric, error) {
p.iterateObjects = true
var t []telegraf.Metric
for _, c := range objects {
p.currentSettings = c
if c.Path == "" {
return nil, fmt.Errorf("GJSON path is required")
}
result := gjson.GetBytes(input, c.Path)
result := gjson.GetBytes(p.InputJSON, c.Path)
scopedJSON := []byte(result.Raw)
for _, f := range c.FieldPaths {
var r PathResult
r.result = gjson.GetBytes(scopedJSON, f.Path)
r.DataSet = f
p.pathResults = append(p.pathResults, r)
}
for _, f := range c.TagPaths {
var r PathResult
r.result = gjson.GetBytes(scopedJSON, f.Path)
r.DataSet = f
r.tag = true
p.pathResults = append(p.pathResults, r)
}
if result.Type == gjson.Null {
return nil, fmt.Errorf("GJSON Path returned null")
}
rootObject := MetricNode{
ParentIndex: 0,
Metric: metric.New(
p.measurementName,
map[string]string{},
@ -401,14 +484,11 @@ func (p *Parser) combineObject(result MetricNode) ([]telegraf.Metric, error) {
}
}
arrayNode := MetricNode{
DesiredType: result.DesiredType,
Tag: result.Tag,
OutputName: outputName,
SetName: setName,
Metric: result.Metric,
Result: val,
}
arrayNode := result
arrayNode.ParentIndex += val.Index
arrayNode.OutputName = outputName
arrayNode.SetName = setName
arrayNode.Result = val
for k, t := range p.currentSettings.Fields {
if setName == k {
@ -455,8 +535,8 @@ func (p *Parser) isIncluded(key string, val gjson.Result) bool {
return true
}
// automatically adds tags to included_keys so it does NOT have to be repeated in the config
p.currentSettings.IncludedKeys = append(p.currentSettings.IncludedKeys, p.currentSettings.Tags...)
for _, i := range p.currentSettings.IncludedKeys {
allKeys := append(p.currentSettings.IncludedKeys, p.currentSettings.Tags...)
for _, i := range allKeys {
if i == key {
return true
}

View File

@ -28,6 +28,18 @@ func TestData(t *testing.T) {
name: "Test having an array of objects",
test: "array_of_objects",
},
{
name: "Test having multiple JSON inputs",
test: "multiple_json_input",
},
{
name: "A second test when selecting with sub field and tags",
test: "subfieldtag_in_object_2",
},
{
name: "Test selecting with sub field and tags",
test: "subfieldtag_in_object",
},
{
name: "Test using just fields and tags",
test: "fields_and_tags",

View File

@ -6,4 +6,3 @@ file,title=The\ Lord\ Of\ The\ Rings author="Tolkien",chapters="The Shadow of th
file,title=The\ Lord\ Of\ The\ Rings author="Tolkien",chapters="The Shadow of the Past",name="Bilbo",species="hobbit",random=2
file,title=The\ Lord\ Of\ The\ Rings author="Tolkien",chapters="The Shadow of the Past",name="Frodo",species="hobbit",random=1
file,title=The\ Lord\ Of\ The\ Rings author="Tolkien",chapters="The Shadow of the Past",name="Frodo",species="hobbit",random=2

View File

@ -0,0 +1,2 @@
file,from_station=COLM,to_station=ANTC,etd_estimate_direction=North minutes=2i
file,from_station=POWL,to_station=DALY,etd_estimate_direction=South minutes=6i

View File

@ -0,0 +1,87 @@
{
"?xml": {
"@version": "1.0",
"@encoding": "utf-8"
},
"root": {
"@id": "1",
"uri": {
"#cdata-section": "http://api.bart.gov/api/etd.aspx?cmd=etd&orig=COLM&dir=n&json=y"
},
"date": "07/02/2021",
"time": "06:05:47 PM PDT",
"station": [
{
"name": "Colma",
"abbr": "COLM",
"etd": [
{
"destination": "Antioch",
"abbreviation": "ANTC",
"limited": "0",
"estimate": [
{
"minutes": "2",
"platform": "2",
"direction": "North",
"length": "10",
"color": "YELLOW",
"hexcolor": "#ffff33",
"bikeflag": "1",
"delay": "0"
},
{
"minutes": "16",
"platform": "2",
"direction": "North",
"length": "10",
"color": "YELLOW",
"hexcolor": "#ffff33",
"bikeflag": "1",
"delay": "0"
},
{
"minutes": "31",
"platform": "2",
"direction": "North",
"length": "10",
"color": "YELLOW",
"hexcolor": "#ffff33",
"bikeflag": "1",
"delay": "0"
}
]
},
{
"destination": "Richmond",
"abbreviation": "RICH",
"limited": "0",
"estimate": [
{
"minutes": "22",
"platform": "2",
"direction": "North",
"length": "10",
"color": "RED",
"hexcolor": "#ff0000",
"bikeflag": "1",
"delay": "0"
},
{
"minutes": "52",
"platform": "2",
"direction": "North",
"length": "10",
"color": "RED",
"hexcolor": "#ff0000",
"bikeflag": "1",
"delay": "0"
}
]
}
]
}
],
"message": ""
}
}

View File

@ -0,0 +1,134 @@
{
"?xml": {
"@version": "1.0",
"@encoding": "utf-8"
},
"root": {
"@id": "1",
"uri": {
"#cdata-section": "http://api.bart.gov/api/etd.aspx?cmd=etd&orig=POWL&dir=s&json=y"
},
"date": "07/02/2021",
"time": "06:06:01 PM PDT",
"station": [
{
"name": "Powell St.",
"abbr": "POWL",
"etd": [
{
"destination": "Daly City",
"abbreviation": "DALY",
"limited": "0",
"estimate": [
{
"minutes": "6",
"platform": "1",
"direction": "South",
"length": "10",
"color": "GREEN",
"hexcolor": "#339933",
"bikeflag": "1",
"delay": "0"
},
{
"minutes": "26",
"platform": "1",
"direction": "South",
"length": "9",
"color": "BLUE",
"hexcolor": "#0099cc",
"bikeflag": "1",
"delay": "0"
},
{
"minutes": "36",
"platform": "1",
"direction": "South",
"length": "10",
"color": "GREEN",
"hexcolor": "#339933",
"bikeflag": "1",
"delay": "0"
}
]
},
{
"destination": "Millbrae",
"abbreviation": "MLBR",
"limited": "0",
"estimate": [
{
"minutes": "19",
"platform": "1",
"direction": "South",
"length": "10",
"color": "RED",
"hexcolor": "#ff0000",
"bikeflag": "1",
"delay": "0"
},
{
"minutes": "49",
"platform": "1",
"direction": "South",
"length": "10",
"color": "RED",
"hexcolor": "#ff0000",
"bikeflag": "1",
"delay": "0"
},
{
"minutes": "79",
"platform": "1",
"direction": "South",
"length": "10",
"color": "RED",
"hexcolor": "#ff0000",
"bikeflag": "1",
"delay": "0"
}
]
},
{
"destination": "SF Airport",
"abbreviation": "SFIA",
"limited": "0",
"estimate": [
{
"minutes": "7",
"platform": "1",
"direction": "South",
"length": "10",
"color": "YELLOW",
"hexcolor": "#ffff33",
"bikeflag": "1",
"delay": "0"
},
{
"minutes": "37",
"platform": "1",
"direction": "South",
"length": "10",
"color": "YELLOW",
"hexcolor": "#ffff33",
"bikeflag": "1",
"delay": "0"
},
{
"minutes": "67",
"platform": "1",
"direction": "South",
"length": "10",
"color": "YELLOW",
"hexcolor": "#ffff33",
"bikeflag": "1",
"delay": "0"
}
]
}
]
}
],
"message": ""
}
}

View File

@ -0,0 +1,18 @@
[[inputs.file]]
files = ["./testdata/multiple_json_input/input_1.json", "./testdata/multiple_json_input/input_2.json"]
data_format = "json_v2"
[[inputs.file.json_v2]]
[[inputs.file.json_v2.object]]
path = "root.station"
[[inputs.file.json_v2.object.tag]]
path="#.abbr"
rename = "from_station"
[[inputs.file.json_v2.object.field]]
path = "#.etd.0.estimate.0.minutes"
rename = "minutes"
type = "int"
[[inputs.file.json_v2.object.tag]]
path = "#.etd.0.abbreviation"
rename = "to_station"
[[inputs.file.json_v2.object.tag]]
path = "#.etd.0.estimate.0.direction"

View File

@ -0,0 +1 @@
file,from_station=COLM,to_station=ANTC,etd_estimate_direction=North etd_estimate_minutes=6i

View File

@ -0,0 +1,97 @@
{
"?xml": {
"@version": "1.0",
"@encoding": "utf-8"
},
"root": {
"@id": "1",
"uri": {
"#cdata-section": "http://api.bart.gov/api/etd.aspx?cmd=etd&orig=COLM&dir=n&json=y"
},
"date": "06/25/2021",
"time": "05:01:31 PM PDT",
"station": [
{
"name": "Colma",
"abbr": "COLM",
"etd": [
{
"destination": "Antioch",
"abbreviation": "ANTC",
"limited": "0",
"estimate": [
{
"minutes": "6",
"platform": "2",
"direction": "North",
"length": "10",
"color": "YELLOW",
"hexcolor": "#ffff33",
"bikeflag": "1",
"delay": "0"
},
{
"minutes": "36",
"platform": "2",
"direction": "North",
"length": "10",
"color": "YELLOW",
"hexcolor": "#ffff33",
"bikeflag": "1",
"delay": "0"
},
{
"minutes": "51",
"platform": "2",
"direction": "North",
"length": "10",
"color": "YELLOW",
"hexcolor": "#ffff33",
"bikeflag": "1",
"delay": "0"
}
]
},
{
"destination": "Richmond",
"abbreviation": "RICH",
"limited": "0",
"estimate": [
{
"minutes": "12",
"platform": "2",
"direction": "North",
"length": "10",
"color": "RED",
"hexcolor": "#ff0000",
"bikeflag": "1",
"delay": "0"
},
{
"minutes": "26",
"platform": "2",
"direction": "North",
"length": "10",
"color": "RED",
"hexcolor": "#ff0000",
"bikeflag": "1",
"delay": "0"
},
{
"minutes": "41",
"platform": "2",
"direction": "North",
"length": "10",
"color": "RED",
"hexcolor": "#ff0000",
"bikeflag": "1",
"delay": "0"
}
]
}
]
}
],
"message": ""
}
}

View File

@ -0,0 +1,17 @@
[[inputs.file]]
files = ["./testdata/subfieldtag_in_object/input.json"]
data_format = "json_v2"
[[inputs.file.json_v2]]
[[inputs.file.json_v2.object]]
path = "root.station"
[[inputs.file.json_v2.object.field]]
path = "#.etd.0.estimate.0.minutes"
type = "int"
[[inputs.file.json_v2.object.tag]]
path = "#.abbr"
rename = "from_station"
[[inputs.file.json_v2.object.tag]]
path = "#.etd.0.abbreviation"
rename = "to_station"
[[inputs.file.json_v2.object.tag]]
path = "#.etd.0.estimate.0.direction"

View File

@ -0,0 +1,4 @@
file,data=3 cnt=23i,format=0i
file,data=7 cnt=23i,format=0i
file,data=10 cnt=23i,format=0i
file,data=23 cnt=23i,format=0i

View File

@ -0,0 +1,10 @@
{
"cnt": 23,
"data": [
3,
7,
10,
23
],
"format": 0
}

View File

@ -0,0 +1,16 @@
# Example taken from: https://github.com/influxdata/telegraf/issues/5940
[[inputs.file]]
files = ["./testdata/subfieldtag_in_object_2/input.json"]
data_format = "json_v2"
[[inputs.file.json_v2]]
[[inputs.file.json_v2.object]]
path = "@this"
[[inputs.file.json_v2.object.tag]]
path = "data"
[[inputs.file.json_v2.object.field]]
path = "cnt"
type = "int"
[[inputs.file.json_v2.object.field]]
path = "format"
type = "int"