2016-02-06 08:36:35 +08:00
|
|
|
package parsers
|
|
|
|
|
|
|
|
|
|
import (
|
|
|
|
|
"fmt"
|
|
|
|
|
|
|
|
|
|
"github.com/influxdata/telegraf"
|
2017-04-13 01:41:26 +08:00
|
|
|
"github.com/influxdata/telegraf/plugins/parsers/collectd"
|
2018-08-25 07:40:41 +08:00
|
|
|
"github.com/influxdata/telegraf/plugins/parsers/csv"
|
2018-01-09 07:11:36 +08:00
|
|
|
"github.com/influxdata/telegraf/plugins/parsers/dropwizard"
|
2019-06-18 05:44:25 +08:00
|
|
|
"github.com/influxdata/telegraf/plugins/parsers/form_urlencoded"
|
2016-02-06 08:36:35 +08:00
|
|
|
"github.com/influxdata/telegraf/plugins/parsers/graphite"
|
2018-07-14 14:22:59 +08:00
|
|
|
"github.com/influxdata/telegraf/plugins/parsers/grok"
|
2016-02-06 08:36:35 +08:00
|
|
|
"github.com/influxdata/telegraf/plugins/parsers/influx"
|
|
|
|
|
"github.com/influxdata/telegraf/plugins/parsers/json"
|
2021-06-11 03:22:18 +08:00
|
|
|
"github.com/influxdata/telegraf/plugins/parsers/json_v2"
|
2018-08-23 04:55:41 +08:00
|
|
|
"github.com/influxdata/telegraf/plugins/parsers/logfmt"
|
2016-02-25 12:32:22 +08:00
|
|
|
"github.com/influxdata/telegraf/plugins/parsers/nagios"
|
2020-12-03 03:48:44 +08:00
|
|
|
"github.com/influxdata/telegraf/plugins/parsers/prometheus"
|
2021-03-18 23:33:58 +08:00
|
|
|
"github.com/influxdata/telegraf/plugins/parsers/prometheusremotewrite"
|
2016-03-18 08:01:01 +08:00
|
|
|
"github.com/influxdata/telegraf/plugins/parsers/value"
|
2018-08-14 07:37:06 +08:00
|
|
|
"github.com/influxdata/telegraf/plugins/parsers/wavefront"
|
2021-07-02 04:48:16 +08:00
|
|
|
"github.com/influxdata/telegraf/plugins/parsers/xpath"
|
2016-02-06 08:36:35 +08:00
|
|
|
)
|
|
|
|
|
|
2018-09-19 00:23:45 +08:00
|
|
|
type ParserFunc func() (Parser, error)
|
|
|
|
|
|
2016-02-06 08:36:35 +08:00
|
|
|
// ParserInput is an interface for input plugins that are able to parse
|
|
|
|
|
// arbitrary data formats.
|
|
|
|
|
type ParserInput interface {
|
|
|
|
|
// SetParser sets the parser function for the interface
|
|
|
|
|
SetParser(parser Parser)
|
|
|
|
|
}
|
|
|
|
|
|
2018-09-19 00:23:45 +08:00
|
|
|
// ParserFuncInput is an interface for input plugins that are able to parse
|
|
|
|
|
// arbitrary data formats.
|
|
|
|
|
type ParserFuncInput interface {
|
2022-01-05 23:20:10 +08:00
|
|
|
// SetParserFunc returns a new parser.
|
2018-09-19 00:23:45 +08:00
|
|
|
SetParserFunc(fn ParserFunc)
|
|
|
|
|
}
|
|
|
|
|
|
2016-02-06 08:36:35 +08:00
|
|
|
// Parser is an interface defining functions that a parser plugin must satisfy.
|
|
|
|
|
type Parser interface {
|
|
|
|
|
// Parse takes a byte buffer separated by newlines
|
|
|
|
|
// ie, `cpu.usage.idle 90\ncpu.usage.busy 10`
|
|
|
|
|
// and parses it into telegraf metrics
|
2018-05-15 02:00:03 +08:00
|
|
|
//
|
|
|
|
|
// Must be thread-safe.
|
2016-02-06 08:36:35 +08:00
|
|
|
Parse(buf []byte) ([]telegraf.Metric, error)
|
|
|
|
|
|
|
|
|
|
// ParseLine takes a single string metric
|
|
|
|
|
// ie, "cpu.usage.idle 90"
|
|
|
|
|
// and parses it into a telegraf metric.
|
2018-05-15 02:00:03 +08:00
|
|
|
//
|
|
|
|
|
// Must be thread-safe.
|
2021-06-11 03:22:18 +08:00
|
|
|
// This function is only called by plugins that expect line based protocols
|
|
|
|
|
// Doesn't need to be implemented by non-linebased parsers (e.g. json, xml)
|
2016-02-06 08:36:35 +08:00
|
|
|
ParseLine(line string) (telegraf.Metric, error)
|
2016-02-10 06:03:46 +08:00
|
|
|
|
|
|
|
|
// SetDefaultTags tells the parser to add all of the given tags
|
|
|
|
|
// to each parsed metric.
|
|
|
|
|
// NOTE: do _not_ modify the map after you've passed it here!!
|
|
|
|
|
SetDefaultTags(tags map[string]string)
|
2016-02-06 08:36:35 +08:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Config is a struct that covers the data types needed for all parser types,
|
|
|
|
|
// and can be used to instantiate _any_ of the parsers.
|
|
|
|
|
type Config struct {
|
2022-01-05 23:20:10 +08:00
|
|
|
// DataFormat can be one of: json, influx, graphite, value, nagios
|
2019-01-03 05:55:59 +08:00
|
|
|
DataFormat string `toml:"data_format"`
|
2016-02-06 08:36:35 +08:00
|
|
|
|
|
|
|
|
// Separator only applied to Graphite data.
|
2019-01-03 05:55:59 +08:00
|
|
|
Separator string `toml:"separator"`
|
2016-02-06 08:36:35 +08:00
|
|
|
// Templates only apply to Graphite data.
|
2019-01-03 05:55:59 +08:00
|
|
|
Templates []string `toml:"templates"`
|
2016-02-06 08:36:35 +08:00
|
|
|
|
|
|
|
|
// TagKeys only apply to JSON data
|
2019-01-03 05:55:59 +08:00
|
|
|
TagKeys []string `toml:"tag_keys"`
|
2019-07-16 07:48:19 +08:00
|
|
|
// Array of glob pattern strings keys that should be added as string fields.
|
2019-01-03 05:55:59 +08:00
|
|
|
JSONStringFields []string `toml:"json_string_fields"`
|
2018-08-23 10:26:48 +08:00
|
|
|
|
2019-01-03 05:55:59 +08:00
|
|
|
JSONNameKey string `toml:"json_name_key"`
|
2016-03-18 08:01:01 +08:00
|
|
|
// MetricName applies to JSON & value. This will be the name of the measurement.
|
2019-01-03 05:55:59 +08:00
|
|
|
MetricName string `toml:"metric_name"`
|
2016-02-06 08:36:35 +08:00
|
|
|
|
2018-08-23 10:26:48 +08:00
|
|
|
// holds a gjson path for json parser
|
2019-01-03 05:55:59 +08:00
|
|
|
JSONQuery string `toml:"json_query"`
|
2018-08-23 10:26:48 +08:00
|
|
|
|
|
|
|
|
// key of time
|
2019-01-03 05:55:59 +08:00
|
|
|
JSONTimeKey string `toml:"json_time_key"`
|
2018-08-23 10:26:48 +08:00
|
|
|
|
|
|
|
|
// time format
|
2019-01-03 05:55:59 +08:00
|
|
|
JSONTimeFormat string `toml:"json_time_format"`
|
2018-08-23 10:26:48 +08:00
|
|
|
|
2019-02-26 03:30:33 +08:00
|
|
|
// default timezone
|
|
|
|
|
JSONTimezone string `toml:"json_timezone"`
|
|
|
|
|
|
2019-10-24 05:06:39 +08:00
|
|
|
// Whether to continue if a JSON object can't be coerced
|
|
|
|
|
JSONStrict bool `toml:"json_strict"`
|
|
|
|
|
|
2017-04-13 01:41:26 +08:00
|
|
|
// Authentication file for collectd
|
2019-01-03 05:55:59 +08:00
|
|
|
CollectdAuthFile string `toml:"collectd_auth_file"`
|
2017-04-13 01:41:26 +08:00
|
|
|
// One of none (default), sign, or encrypt
|
2019-01-03 05:55:59 +08:00
|
|
|
CollectdSecurityLevel string `toml:"collectd_security_level"`
|
2017-04-13 01:41:26 +08:00
|
|
|
// Dataset specification for collectd
|
2019-01-03 05:55:59 +08:00
|
|
|
CollectdTypesDB []string `toml:"collectd_types_db"`
|
2017-04-13 01:41:26 +08:00
|
|
|
|
2018-07-12 08:29:23 +08:00
|
|
|
// whether to split or join multivalue metrics
|
2019-01-03 05:55:59 +08:00
|
|
|
CollectdSplit string `toml:"collectd_split"`
|
2018-07-12 08:29:23 +08:00
|
|
|
|
2016-03-18 08:01:01 +08:00
|
|
|
// DataType only applies to value, this will be the type to parse value to
|
2019-01-03 05:55:59 +08:00
|
|
|
DataType string `toml:"data_type"`
|
2016-03-18 08:01:01 +08:00
|
|
|
|
2016-02-06 08:36:35 +08:00
|
|
|
// DefaultTags are the default tags that will be added to all parsed metrics.
|
2019-01-03 05:55:59 +08:00
|
|
|
DefaultTags map[string]string `toml:"default_tags"`
|
2018-01-09 07:11:36 +08:00
|
|
|
|
|
|
|
|
// an optional json path containing the metric registry object
|
|
|
|
|
// if left empty, the whole json object is parsed as a metric registry
|
2019-01-03 05:55:59 +08:00
|
|
|
DropwizardMetricRegistryPath string `toml:"dropwizard_metric_registry_path"`
|
2018-01-09 07:11:36 +08:00
|
|
|
// an optional json path containing the default time of the metrics
|
|
|
|
|
// if left empty, the processing time is used
|
2019-01-03 05:55:59 +08:00
|
|
|
DropwizardTimePath string `toml:"dropwizard_time_path"`
|
2018-01-09 07:11:36 +08:00
|
|
|
// time format to use for parsing the time field
|
|
|
|
|
// defaults to time.RFC3339
|
2019-01-03 05:55:59 +08:00
|
|
|
DropwizardTimeFormat string `toml:"dropwizard_time_format"`
|
2018-01-09 07:11:36 +08:00
|
|
|
// an optional json path pointing to a json object with tag key/value pairs
|
|
|
|
|
// takes precedence over DropwizardTagPathsMap
|
2019-01-03 05:55:59 +08:00
|
|
|
DropwizardTagsPath string `toml:"dropwizard_tags_path"`
|
2018-01-09 07:11:36 +08:00
|
|
|
// an optional map containing tag names as keys and json paths to retrieve the tag values from as values
|
|
|
|
|
// used if TagsPath is empty or doesn't return any tags
|
2019-01-03 05:55:59 +08:00
|
|
|
DropwizardTagPathsMap map[string]string `toml:"dropwizard_tag_paths_map"`
|
2018-07-14 14:22:59 +08:00
|
|
|
|
|
|
|
|
//grok patterns
|
2019-01-03 05:55:59 +08:00
|
|
|
GrokPatterns []string `toml:"grok_patterns"`
|
|
|
|
|
GrokNamedPatterns []string `toml:"grok_named_patterns"`
|
|
|
|
|
GrokCustomPatterns string `toml:"grok_custom_patterns"`
|
|
|
|
|
GrokCustomPatternFiles []string `toml:"grok_custom_pattern_files"`
|
|
|
|
|
GrokTimezone string `toml:"grok_timezone"`
|
2019-02-27 09:35:57 +08:00
|
|
|
GrokUniqueTimestamp string `toml:"grok_unique_timestamp"`
|
2018-08-25 07:40:41 +08:00
|
|
|
|
|
|
|
|
//csv configuration
|
2018-09-19 00:23:45 +08:00
|
|
|
CSVColumnNames []string `toml:"csv_column_names"`
|
2018-10-04 09:19:44 +08:00
|
|
|
CSVColumnTypes []string `toml:"csv_column_types"`
|
2018-09-19 00:23:45 +08:00
|
|
|
CSVComment string `toml:"csv_comment"`
|
|
|
|
|
CSVDelimiter string `toml:"csv_delimiter"`
|
|
|
|
|
CSVHeaderRowCount int `toml:"csv_header_row_count"`
|
|
|
|
|
CSVMeasurementColumn string `toml:"csv_measurement_column"`
|
|
|
|
|
CSVSkipColumns int `toml:"csv_skip_columns"`
|
|
|
|
|
CSVSkipRows int `toml:"csv_skip_rows"`
|
|
|
|
|
CSVTagColumns []string `toml:"csv_tag_columns"`
|
|
|
|
|
CSVTimestampColumn string `toml:"csv_timestamp_column"`
|
|
|
|
|
CSVTimestampFormat string `toml:"csv_timestamp_format"`
|
2020-06-09 05:52:46 +08:00
|
|
|
CSVTimezone string `toml:"csv_timezone"`
|
2018-09-19 00:23:45 +08:00
|
|
|
CSVTrimSpace bool `toml:"csv_trim_space"`
|
2021-01-12 02:53:06 +08:00
|
|
|
CSVSkipValues []string `toml:"csv_skip_values"`
|
2021-12-23 04:07:14 +08:00
|
|
|
CSVSkipErrors bool `toml:"csv_skip_errors"`
|
2019-06-18 04:34:54 +08:00
|
|
|
|
|
|
|
|
// FormData configuration
|
2019-06-18 05:44:25 +08:00
|
|
|
FormUrlencodedTagKeys []string `toml:"form_urlencoded_tag_keys"`
|
2021-03-04 04:26:09 +08:00
|
|
|
|
2021-10-06 05:11:46 +08:00
|
|
|
// Prometheus configuration
|
|
|
|
|
PrometheusIgnoreTimestamp bool `toml:"prometheus_ignore_timestamp"`
|
|
|
|
|
|
2021-03-12 10:53:32 +08:00
|
|
|
// Value configuration
|
|
|
|
|
ValueFieldName string `toml:"value_field_name"`
|
|
|
|
|
|
2021-07-02 04:48:16 +08:00
|
|
|
// XPath configuration
|
|
|
|
|
XPathPrintDocument bool `toml:"xpath_print_document"`
|
|
|
|
|
XPathProtobufFile string `toml:"xpath_protobuf_file"`
|
|
|
|
|
XPathProtobufType string `toml:"xpath_protobuf_type"`
|
|
|
|
|
XPathConfig []XPathConfig
|
2021-06-11 03:22:18 +08:00
|
|
|
|
|
|
|
|
// JSONPath configuration
|
|
|
|
|
JSONV2Config []JSONV2Config `toml:"json_v2"`
|
2021-03-04 04:26:09 +08:00
|
|
|
}
|
|
|
|
|
|
2021-07-02 04:48:16 +08:00
|
|
|
type XPathConfig xpath.Config
|
2016-02-06 08:36:35 +08:00
|
|
|
|
2021-06-11 03:22:18 +08:00
|
|
|
type JSONV2Config struct {
|
|
|
|
|
json_v2.Config
|
|
|
|
|
}
|
|
|
|
|
|
2016-02-06 08:36:35 +08:00
|
|
|
// NewParser returns a Parser interface based on the given config.
|
|
|
|
|
func NewParser(config *Config) (Parser, error) {
|
|
|
|
|
var err error
|
|
|
|
|
var parser Parser
|
|
|
|
|
switch config.DataFormat {
|
|
|
|
|
case "json":
|
2019-07-16 07:48:19 +08:00
|
|
|
parser, err = json.New(
|
|
|
|
|
&json.Config{
|
|
|
|
|
MetricName: config.MetricName,
|
|
|
|
|
TagKeys: config.TagKeys,
|
|
|
|
|
NameKey: config.JSONNameKey,
|
|
|
|
|
StringFields: config.JSONStringFields,
|
|
|
|
|
Query: config.JSONQuery,
|
|
|
|
|
TimeKey: config.JSONTimeKey,
|
|
|
|
|
TimeFormat: config.JSONTimeFormat,
|
|
|
|
|
Timezone: config.JSONTimezone,
|
|
|
|
|
DefaultTags: config.DefaultTags,
|
2019-10-24 05:06:39 +08:00
|
|
|
Strict: config.JSONStrict,
|
2019-07-16 07:48:19 +08:00
|
|
|
},
|
|
|
|
|
)
|
2016-03-18 08:01:01 +08:00
|
|
|
case "value":
|
|
|
|
|
parser, err = NewValueParser(config.MetricName,
|
2021-03-12 10:53:32 +08:00
|
|
|
config.DataType, config.ValueFieldName, config.DefaultTags)
|
2016-02-06 08:36:35 +08:00
|
|
|
case "influx":
|
|
|
|
|
parser, err = NewInfluxParser()
|
2016-02-25 12:32:22 +08:00
|
|
|
case "nagios":
|
|
|
|
|
parser, err = NewNagiosParser()
|
2016-02-06 08:36:35 +08:00
|
|
|
case "graphite":
|
|
|
|
|
parser, err = NewGraphiteParser(config.Separator,
|
|
|
|
|
config.Templates, config.DefaultTags)
|
2017-04-13 01:41:26 +08:00
|
|
|
case "collectd":
|
|
|
|
|
parser, err = NewCollectdParser(config.CollectdAuthFile,
|
2018-07-12 08:29:23 +08:00
|
|
|
config.CollectdSecurityLevel, config.CollectdTypesDB, config.CollectdSplit)
|
2018-01-09 07:11:36 +08:00
|
|
|
case "dropwizard":
|
2018-05-15 02:00:03 +08:00
|
|
|
parser, err = NewDropwizardParser(
|
|
|
|
|
config.DropwizardMetricRegistryPath,
|
|
|
|
|
config.DropwizardTimePath,
|
|
|
|
|
config.DropwizardTimeFormat,
|
|
|
|
|
config.DropwizardTagsPath,
|
|
|
|
|
config.DropwizardTagPathsMap,
|
|
|
|
|
config.DefaultTags,
|
|
|
|
|
config.Separator,
|
|
|
|
|
config.Templates)
|
2018-08-14 07:37:06 +08:00
|
|
|
case "wavefront":
|
|
|
|
|
parser, err = NewWavefrontParser(config.DefaultTags)
|
2018-07-14 14:22:59 +08:00
|
|
|
case "grok":
|
|
|
|
|
parser, err = newGrokParser(
|
|
|
|
|
config.MetricName,
|
|
|
|
|
config.GrokPatterns,
|
|
|
|
|
config.GrokNamedPatterns,
|
|
|
|
|
config.GrokCustomPatterns,
|
|
|
|
|
config.GrokCustomPatternFiles,
|
2019-02-27 09:35:57 +08:00
|
|
|
config.GrokTimezone,
|
|
|
|
|
config.GrokUniqueTimestamp)
|
2018-08-25 07:40:41 +08:00
|
|
|
case "csv":
|
2020-07-08 03:43:32 +08:00
|
|
|
config := &csv.Config{
|
|
|
|
|
MetricName: config.MetricName,
|
|
|
|
|
HeaderRowCount: config.CSVHeaderRowCount,
|
|
|
|
|
SkipRows: config.CSVSkipRows,
|
|
|
|
|
SkipColumns: config.CSVSkipColumns,
|
|
|
|
|
Delimiter: config.CSVDelimiter,
|
|
|
|
|
Comment: config.CSVComment,
|
|
|
|
|
TrimSpace: config.CSVTrimSpace,
|
|
|
|
|
ColumnNames: config.CSVColumnNames,
|
|
|
|
|
ColumnTypes: config.CSVColumnTypes,
|
|
|
|
|
TagColumns: config.CSVTagColumns,
|
|
|
|
|
MeasurementColumn: config.CSVMeasurementColumn,
|
|
|
|
|
TimestampColumn: config.CSVTimestampColumn,
|
|
|
|
|
TimestampFormat: config.CSVTimestampFormat,
|
|
|
|
|
Timezone: config.CSVTimezone,
|
|
|
|
|
DefaultTags: config.DefaultTags,
|
2021-01-12 02:53:06 +08:00
|
|
|
SkipValues: config.CSVSkipValues,
|
2021-12-23 04:07:14 +08:00
|
|
|
SkipErrors: config.CSVSkipErrors,
|
2020-07-08 03:43:32 +08:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
return csv.NewParser(config)
|
2018-08-23 04:55:41 +08:00
|
|
|
case "logfmt":
|
|
|
|
|
parser, err = NewLogFmtParser(config.MetricName, config.DefaultTags)
|
2019-06-18 05:44:25 +08:00
|
|
|
case "form_urlencoded":
|
|
|
|
|
parser, err = NewFormUrlencodedParser(
|
2019-06-18 04:34:54 +08:00
|
|
|
config.MetricName,
|
|
|
|
|
config.DefaultTags,
|
2019-06-18 05:44:25 +08:00
|
|
|
config.FormUrlencodedTagKeys,
|
2019-06-18 04:34:54 +08:00
|
|
|
)
|
2020-12-03 03:48:44 +08:00
|
|
|
case "prometheus":
|
2021-10-06 05:11:46 +08:00
|
|
|
parser, err = NewPrometheusParser(
|
|
|
|
|
config.DefaultTags,
|
|
|
|
|
config.PrometheusIgnoreTimestamp,
|
|
|
|
|
)
|
2021-03-18 23:33:58 +08:00
|
|
|
case "prometheusremotewrite":
|
|
|
|
|
parser, err = NewPrometheusRemoteWriteParser(config.DefaultTags)
|
2021-07-02 04:48:16 +08:00
|
|
|
case "xml", "xpath_json", "xpath_msgpack", "xpath_protobuf":
|
|
|
|
|
parser = &xpath.Parser{
|
|
|
|
|
Format: config.DataFormat,
|
|
|
|
|
ProtobufMessageDef: config.XPathProtobufFile,
|
|
|
|
|
ProtobufMessageType: config.XPathProtobufType,
|
|
|
|
|
PrintDocument: config.XPathPrintDocument,
|
|
|
|
|
DefaultTags: config.DefaultTags,
|
|
|
|
|
Configs: NewXPathParserConfigs(config.MetricName, config.XPathConfig),
|
|
|
|
|
}
|
2021-06-11 03:22:18 +08:00
|
|
|
case "json_v2":
|
|
|
|
|
parser, err = NewJSONPathParser(config.JSONV2Config)
|
2016-02-06 08:36:35 +08:00
|
|
|
default:
|
|
|
|
|
err = fmt.Errorf("Invalid data format: %s", config.DataFormat)
|
|
|
|
|
}
|
|
|
|
|
return parser, err
|
|
|
|
|
}
|
|
|
|
|
|
2018-07-14 14:22:59 +08:00
|
|
|
func newGrokParser(metricName string,
|
2019-02-27 09:35:57 +08:00
|
|
|
patterns []string, nPatterns []string,
|
|
|
|
|
cPatterns string, cPatternFiles []string,
|
|
|
|
|
tZone string, uniqueTimestamp string) (Parser, error) {
|
2018-07-14 14:22:59 +08:00
|
|
|
parser := grok.Parser{
|
|
|
|
|
Measurement: metricName,
|
|
|
|
|
Patterns: patterns,
|
|
|
|
|
NamedPatterns: nPatterns,
|
|
|
|
|
CustomPatterns: cPatterns,
|
|
|
|
|
CustomPatternFiles: cPatternFiles,
|
|
|
|
|
Timezone: tZone,
|
2019-02-27 09:35:57 +08:00
|
|
|
UniqueTimestamp: uniqueTimestamp,
|
2018-07-14 14:22:59 +08:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
err := parser.Compile()
|
|
|
|
|
return &parser, err
|
|
|
|
|
}
|
|
|
|
|
|
2016-02-25 12:32:22 +08:00
|
|
|
func NewNagiosParser() (Parser, error) {
|
|
|
|
|
return &nagios.NagiosParser{}, nil
|
|
|
|
|
}
|
|
|
|
|
|
2016-02-06 08:36:35 +08:00
|
|
|
func NewInfluxParser() (Parser, error) {
|
2018-03-28 08:30:51 +08:00
|
|
|
handler := influx.NewMetricHandler()
|
|
|
|
|
return influx.NewParser(handler), nil
|
2016-02-06 08:36:35 +08:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
func NewGraphiteParser(
|
|
|
|
|
separator string,
|
|
|
|
|
templates []string,
|
|
|
|
|
defaultTags map[string]string,
|
|
|
|
|
) (Parser, error) {
|
|
|
|
|
return graphite.NewGraphiteParser(separator, templates, defaultTags)
|
|
|
|
|
}
|
2016-03-18 08:01:01 +08:00
|
|
|
|
|
|
|
|
func NewValueParser(
|
|
|
|
|
metricName string,
|
|
|
|
|
dataType string,
|
2021-03-12 10:53:32 +08:00
|
|
|
fieldName string,
|
2016-03-18 08:01:01 +08:00
|
|
|
defaultTags map[string]string,
|
|
|
|
|
) (Parser, error) {
|
2021-03-12 10:53:32 +08:00
|
|
|
return value.NewValueParser(metricName, dataType, fieldName, defaultTags), nil
|
2016-03-18 08:01:01 +08:00
|
|
|
}
|
2017-04-13 01:41:26 +08:00
|
|
|
|
|
|
|
|
func NewCollectdParser(
|
|
|
|
|
authFile string,
|
|
|
|
|
securityLevel string,
|
|
|
|
|
typesDB []string,
|
2018-07-12 08:29:23 +08:00
|
|
|
split string,
|
2017-04-13 01:41:26 +08:00
|
|
|
) (Parser, error) {
|
2018-07-12 08:29:23 +08:00
|
|
|
return collectd.NewCollectdParser(authFile, securityLevel, typesDB, split)
|
2017-04-13 01:41:26 +08:00
|
|
|
}
|
2018-01-09 07:11:36 +08:00
|
|
|
|
|
|
|
|
func NewDropwizardParser(
|
|
|
|
|
metricRegistryPath string,
|
|
|
|
|
timePath string,
|
|
|
|
|
timeFormat string,
|
|
|
|
|
tagsPath string,
|
|
|
|
|
tagPathsMap map[string]string,
|
|
|
|
|
defaultTags map[string]string,
|
|
|
|
|
separator string,
|
|
|
|
|
templates []string,
|
|
|
|
|
|
|
|
|
|
) (Parser, error) {
|
2018-05-15 02:00:03 +08:00
|
|
|
parser := dropwizard.NewParser()
|
|
|
|
|
parser.MetricRegistryPath = metricRegistryPath
|
|
|
|
|
parser.TimePath = timePath
|
|
|
|
|
parser.TimeFormat = timeFormat
|
|
|
|
|
parser.TagsPath = tagsPath
|
|
|
|
|
parser.TagPathsMap = tagPathsMap
|
|
|
|
|
parser.DefaultTags = defaultTags
|
|
|
|
|
err := parser.SetTemplates(separator, templates)
|
|
|
|
|
if err != nil {
|
|
|
|
|
return nil, err
|
2018-01-09 07:11:36 +08:00
|
|
|
}
|
|
|
|
|
return parser, err
|
|
|
|
|
}
|
2018-08-14 07:37:06 +08:00
|
|
|
|
2018-08-23 04:55:41 +08:00
|
|
|
// NewLogFmtParser returns a logfmt parser with the default options.
|
|
|
|
|
func NewLogFmtParser(metricName string, defaultTags map[string]string) (Parser, error) {
|
|
|
|
|
return logfmt.NewParser(metricName, defaultTags), nil
|
|
|
|
|
}
|
|
|
|
|
|
2018-08-14 07:37:06 +08:00
|
|
|
func NewWavefrontParser(defaultTags map[string]string) (Parser, error) {
|
|
|
|
|
return wavefront.NewWavefrontParser(defaultTags), nil
|
|
|
|
|
}
|
2019-06-18 04:34:54 +08:00
|
|
|
|
2019-06-18 05:44:25 +08:00
|
|
|
func NewFormUrlencodedParser(
|
2019-06-18 04:34:54 +08:00
|
|
|
metricName string,
|
|
|
|
|
defaultTags map[string]string,
|
|
|
|
|
tagKeys []string,
|
|
|
|
|
) (Parser, error) {
|
2019-06-18 05:44:25 +08:00
|
|
|
return &form_urlencoded.Parser{
|
2019-06-18 04:34:54 +08:00
|
|
|
MetricName: metricName,
|
|
|
|
|
DefaultTags: defaultTags,
|
|
|
|
|
TagKeys: tagKeys,
|
|
|
|
|
}, nil
|
|
|
|
|
}
|
2020-12-03 03:48:44 +08:00
|
|
|
|
2021-10-06 05:11:46 +08:00
|
|
|
func NewPrometheusParser(defaultTags map[string]string, ignoreTimestamp bool) (Parser, error) {
|
2020-12-03 03:48:44 +08:00
|
|
|
return &prometheus.Parser{
|
2021-10-06 05:11:46 +08:00
|
|
|
DefaultTags: defaultTags,
|
|
|
|
|
IgnoreTimestamp: ignoreTimestamp,
|
2020-12-03 03:48:44 +08:00
|
|
|
}, nil
|
|
|
|
|
}
|
2021-03-04 04:26:09 +08:00
|
|
|
|
2021-03-18 23:33:58 +08:00
|
|
|
func NewPrometheusRemoteWriteParser(defaultTags map[string]string) (Parser, error) {
|
|
|
|
|
return &prometheusremotewrite.Parser{
|
|
|
|
|
DefaultTags: defaultTags,
|
|
|
|
|
}, nil
|
|
|
|
|
}
|
|
|
|
|
|
2021-07-02 04:48:16 +08:00
|
|
|
func NewXPathParserConfigs(metricName string, cfgs []XPathConfig) []xpath.Config {
|
2021-03-04 04:26:09 +08:00
|
|
|
// Convert the config formats which is a one-to-one copy
|
2021-07-02 04:48:16 +08:00
|
|
|
configs := make([]xpath.Config, 0, len(cfgs))
|
|
|
|
|
for _, cfg := range cfgs {
|
|
|
|
|
config := xpath.Config(cfg)
|
2021-09-16 01:58:40 +08:00
|
|
|
config.MetricDefaultName = metricName
|
2021-07-02 04:48:16 +08:00
|
|
|
configs = append(configs, config)
|
2021-03-04 04:26:09 +08:00
|
|
|
}
|
2021-07-02 04:48:16 +08:00
|
|
|
return configs
|
2021-03-04 04:26:09 +08:00
|
|
|
}
|
2021-06-11 03:22:18 +08:00
|
|
|
|
|
|
|
|
func NewJSONPathParser(jsonv2config []JSONV2Config) (Parser, error) {
|
|
|
|
|
configs := make([]json_v2.Config, len(jsonv2config))
|
|
|
|
|
for i, cfg := range jsonv2config {
|
|
|
|
|
configs[i].MeasurementName = cfg.MeasurementName
|
|
|
|
|
configs[i].MeasurementNamePath = cfg.MeasurementNamePath
|
|
|
|
|
|
|
|
|
|
configs[i].TimestampPath = cfg.TimestampPath
|
|
|
|
|
configs[i].TimestampFormat = cfg.TimestampFormat
|
|
|
|
|
configs[i].TimestampTimezone = cfg.TimestampTimezone
|
|
|
|
|
|
|
|
|
|
configs[i].Fields = cfg.Fields
|
|
|
|
|
configs[i].Tags = cfg.Tags
|
|
|
|
|
|
|
|
|
|
configs[i].JSONObjects = cfg.JSONObjects
|
|
|
|
|
}
|
|
|
|
|
return &json_v2.Parser{
|
|
|
|
|
Configs: configs,
|
|
|
|
|
}, nil
|
|
|
|
|
}
|