fix: Linter fixes for plugins/parsers/[a-z]* (#10145)
This commit is contained in:
parent
7d3531a29b
commit
8e85a67ee1
|
|
@ -10,13 +10,14 @@ import (
|
||||||
"testing"
|
"testing"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
|
"github.com/stretchr/testify/require"
|
||||||
|
|
||||||
"github.com/influxdata/telegraf"
|
"github.com/influxdata/telegraf"
|
||||||
"github.com/influxdata/telegraf/models"
|
"github.com/influxdata/telegraf/models"
|
||||||
"github.com/influxdata/telegraf/plugins/common/tls"
|
"github.com/influxdata/telegraf/plugins/common/tls"
|
||||||
"github.com/influxdata/telegraf/plugins/inputs"
|
"github.com/influxdata/telegraf/plugins/inputs"
|
||||||
"github.com/influxdata/telegraf/plugins/outputs"
|
"github.com/influxdata/telegraf/plugins/outputs"
|
||||||
"github.com/influxdata/telegraf/plugins/parsers"
|
"github.com/influxdata/telegraf/plugins/parsers"
|
||||||
"github.com/stretchr/testify/require"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
func TestConfig_LoadSingleInputWithEnvVars(t *testing.T) {
|
func TestConfig_LoadSingleInputWithEnvVars(t *testing.T) {
|
||||||
|
|
@ -140,12 +141,17 @@ func TestConfig_LoadDirectory(t *testing.T) {
|
||||||
expectedConfigs[0].Tags = make(map[string]string)
|
expectedConfigs[0].Tags = make(map[string]string)
|
||||||
|
|
||||||
expectedPlugins[1] = inputs.Inputs["exec"]().(*MockupInputPlugin)
|
expectedPlugins[1] = inputs.Inputs["exec"]().(*MockupInputPlugin)
|
||||||
p, err := parsers.NewParser(&parsers.Config{
|
parserConfig := &parsers.Config{
|
||||||
MetricName: "exec",
|
MetricName: "exec",
|
||||||
DataFormat: "json",
|
DataFormat: "json",
|
||||||
JSONStrict: true,
|
JSONStrict: true,
|
||||||
})
|
}
|
||||||
|
p, err := parsers.NewParser(parserConfig)
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
// Inject logger to have proper struct for comparison
|
||||||
|
models.SetLoggerOnPlugin(p, models.NewLogger("parsers", parserConfig.DataFormat, parserConfig.MetricName))
|
||||||
|
|
||||||
expectedPlugins[1].SetParser(p)
|
expectedPlugins[1].SetParser(p)
|
||||||
expectedPlugins[1].Command = "/usr/bin/myothercollector --foo=bar"
|
expectedPlugins[1].Command = "/usr/bin/myothercollector --foo=bar"
|
||||||
expectedConfigs[1] = &models.InputConfig{
|
expectedConfigs[1] = &models.InputConfig{
|
||||||
|
|
|
||||||
|
|
@ -3,7 +3,6 @@ package collectd
|
||||||
import (
|
import (
|
||||||
"errors"
|
"errors"
|
||||||
"fmt"
|
"fmt"
|
||||||
"log"
|
|
||||||
"os"
|
"os"
|
||||||
|
|
||||||
"collectd.org/api"
|
"collectd.org/api"
|
||||||
|
|
@ -24,6 +23,7 @@ type CollectdParser struct {
|
||||||
//whether or not to split multi value metric into multiple metrics
|
//whether or not to split multi value metric into multiple metrics
|
||||||
//default value is split
|
//default value is split
|
||||||
ParseMultiValue string
|
ParseMultiValue string
|
||||||
|
Log telegraf.Logger `toml:"-"`
|
||||||
popts network.ParseOpts
|
popts network.ParseOpts
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -81,7 +81,7 @@ func (p *CollectdParser) Parse(buf []byte) ([]telegraf.Metric, error) {
|
||||||
|
|
||||||
metrics := []telegraf.Metric{}
|
metrics := []telegraf.Metric{}
|
||||||
for _, valueList := range valueLists {
|
for _, valueList := range valueLists {
|
||||||
metrics = append(metrics, UnmarshalValueList(valueList, p.ParseMultiValue)...)
|
metrics = append(metrics, p.unmarshalValueList(valueList)...)
|
||||||
}
|
}
|
||||||
|
|
||||||
if len(p.DefaultTags) > 0 {
|
if len(p.DefaultTags) > 0 {
|
||||||
|
|
@ -115,12 +115,13 @@ func (p *CollectdParser) SetDefaultTags(tags map[string]string) {
|
||||||
p.DefaultTags = tags
|
p.DefaultTags = tags
|
||||||
}
|
}
|
||||||
|
|
||||||
// UnmarshalValueList translates a ValueList into a Telegraf metric.
|
// unmarshalValueList translates a ValueList into a Telegraf metric.
|
||||||
func UnmarshalValueList(vl *api.ValueList, multiValue string) []telegraf.Metric {
|
func (p *CollectdParser) unmarshalValueList(vl *api.ValueList) []telegraf.Metric {
|
||||||
timestamp := vl.Time.UTC()
|
timestamp := vl.Time.UTC()
|
||||||
|
|
||||||
var metrics []telegraf.Metric
|
var metrics []telegraf.Metric
|
||||||
|
|
||||||
|
var multiValue = p.ParseMultiValue
|
||||||
//set multiValue to default "split" if nothing is specified
|
//set multiValue to default "split" if nothing is specified
|
||||||
if multiValue == "" {
|
if multiValue == "" {
|
||||||
multiValue = "split"
|
multiValue = "split"
|
||||||
|
|
@ -192,7 +193,7 @@ func UnmarshalValueList(vl *api.ValueList, multiValue string) []telegraf.Metric
|
||||||
|
|
||||||
metrics = append(metrics, m)
|
metrics = append(metrics, m)
|
||||||
default:
|
default:
|
||||||
log.Printf("parse-multi-value config can only be 'split' or 'join'")
|
p.Log.Info("parse-multi-value config can only be 'split' or 'join'")
|
||||||
}
|
}
|
||||||
return metrics
|
return metrics
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -6,7 +6,6 @@ import (
|
||||||
|
|
||||||
"collectd.org/api"
|
"collectd.org/api"
|
||||||
"collectd.org/network"
|
"collectd.org/network"
|
||||||
"github.com/stretchr/testify/assert"
|
|
||||||
"github.com/stretchr/testify/require"
|
"github.com/stretchr/testify/require"
|
||||||
|
|
||||||
"github.com/influxdata/telegraf"
|
"github.com/influxdata/telegraf"
|
||||||
|
|
@ -144,7 +143,7 @@ func TestParseMultiValueSplit(t *testing.T) {
|
||||||
metrics, err := parser.Parse(bytes)
|
metrics, err := parser.Parse(bytes)
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
|
||||||
assert.Equal(t, 2, len(metrics))
|
require.Equal(t, 2, len(metrics))
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestParse_DefaultTags(t *testing.T) {
|
func TestParse_DefaultTags(t *testing.T) {
|
||||||
|
|
@ -215,7 +214,7 @@ func TestParse_SignSecurityLevel(t *testing.T) {
|
||||||
bytes, err = buf.Bytes()
|
bytes, err = buf.Bytes()
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
|
||||||
metrics, err = parser.Parse(bytes)
|
_, err = parser.Parse(bytes)
|
||||||
require.Error(t, err)
|
require.Error(t, err)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -270,7 +269,7 @@ func TestParse_EncryptSecurityLevel(t *testing.T) {
|
||||||
bytes, err = buf.Bytes()
|
bytes, err = buf.Bytes()
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
|
||||||
metrics, err = parser.Parse(bytes)
|
_, err = parser.Parse(bytes)
|
||||||
require.Error(t, err)
|
require.Error(t, err)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -6,10 +6,11 @@ import (
|
||||||
"testing"
|
"testing"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
|
"github.com/stretchr/testify/require"
|
||||||
|
|
||||||
"github.com/influxdata/telegraf"
|
"github.com/influxdata/telegraf"
|
||||||
"github.com/influxdata/telegraf/metric"
|
"github.com/influxdata/telegraf/metric"
|
||||||
"github.com/influxdata/telegraf/testutil"
|
"github.com/influxdata/telegraf/testutil"
|
||||||
"github.com/stretchr/testify/require"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
var DefaultTime = func() time.Time {
|
var DefaultTime = func() time.Time {
|
||||||
|
|
@ -100,6 +101,8 @@ func TestTimestamp(t *testing.T) {
|
||||||
TimeFunc: DefaultTime,
|
TimeFunc: DefaultTime,
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
testCSV := `line1,line2,line3
|
testCSV := `line1,line2,line3
|
||||||
23/05/09 04:05:06 PM,70,test_name
|
23/05/09 04:05:06 PM,70,test_name
|
||||||
07/11/09 04:05:06 PM,80,test_name2`
|
07/11/09 04:05:06 PM,80,test_name2`
|
||||||
|
|
@ -121,6 +124,8 @@ func TestTimestampYYYYMMDDHHmm(t *testing.T) {
|
||||||
TimeFunc: DefaultTime,
|
TimeFunc: DefaultTime,
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
testCSV := `line1,line2,line3
|
testCSV := `line1,line2,line3
|
||||||
200905231605,70,test_name
|
200905231605,70,test_name
|
||||||
200907111605,80,test_name2`
|
200907111605,80,test_name2`
|
||||||
|
|
|
||||||
|
|
@ -3,14 +3,14 @@ package dropwizard
|
||||||
import (
|
import (
|
||||||
"encoding/json"
|
"encoding/json"
|
||||||
"fmt"
|
"fmt"
|
||||||
"log"
|
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
|
"github.com/tidwall/gjson"
|
||||||
|
|
||||||
"github.com/influxdata/telegraf"
|
"github.com/influxdata/telegraf"
|
||||||
"github.com/influxdata/telegraf/internal/templating"
|
"github.com/influxdata/telegraf/internal/templating"
|
||||||
"github.com/influxdata/telegraf/metric"
|
"github.com/influxdata/telegraf/metric"
|
||||||
"github.com/influxdata/telegraf/plugins/parsers/influx"
|
"github.com/influxdata/telegraf/plugins/parsers/influx"
|
||||||
"github.com/tidwall/gjson"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
type TimeFunc func() time.Time
|
type TimeFunc func() time.Time
|
||||||
|
|
@ -42,6 +42,8 @@ type parser struct {
|
||||||
// an optional map of default tags to use for metrics
|
// an optional map of default tags to use for metrics
|
||||||
DefaultTags map[string]string
|
DefaultTags map[string]string
|
||||||
|
|
||||||
|
Log telegraf.Logger `toml:"-"`
|
||||||
|
|
||||||
separator string
|
separator string
|
||||||
templateEngine *templating.Engine
|
templateEngine *templating.Engine
|
||||||
|
|
||||||
|
|
@ -152,7 +154,7 @@ func (p *parser) readTags(buf []byte) map[string]string {
|
||||||
var tags map[string]string
|
var tags map[string]string
|
||||||
err := json.Unmarshal(tagsBytes, &tags)
|
err := json.Unmarshal(tagsBytes, &tags)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Printf("W! failed to parse tags from JSON path '%s': %s\n", p.TagsPath, err)
|
p.Log.Warnf("Failed to parse tags from JSON path '%s': %s\n", p.TagsPath, err)
|
||||||
} else if len(tags) > 0 {
|
} else if len(tags) > 0 {
|
||||||
return tags
|
return tags
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -1,16 +1,15 @@
|
||||||
package dropwizard
|
package dropwizard
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"fmt"
|
||||||
|
"github.com/influxdata/telegraf/testutil"
|
||||||
"testing"
|
"testing"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/stretchr/testify/require"
|
||||||
|
|
||||||
"github.com/influxdata/telegraf"
|
"github.com/influxdata/telegraf"
|
||||||
"github.com/influxdata/telegraf/metric"
|
"github.com/influxdata/telegraf/metric"
|
||||||
|
|
||||||
"fmt"
|
|
||||||
"time"
|
|
||||||
|
|
||||||
"github.com/stretchr/testify/assert"
|
|
||||||
"github.com/stretchr/testify/require"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
var testTimeFunc = func() time.Time {
|
var testTimeFunc = func() time.Time {
|
||||||
|
|
@ -34,8 +33,8 @@ func TestParseValidEmptyJSON(t *testing.T) {
|
||||||
|
|
||||||
// Most basic vanilla test
|
// Most basic vanilla test
|
||||||
metrics, err := parser.Parse([]byte(validEmptyJSON))
|
metrics, err := parser.Parse([]byte(validEmptyJSON))
|
||||||
assert.NoError(t, err)
|
require.NoError(t, err)
|
||||||
assert.Len(t, metrics, 0)
|
require.Len(t, metrics, 0)
|
||||||
}
|
}
|
||||||
|
|
||||||
// validCounterJSON is a valid dropwizard json document containing one counter
|
// validCounterJSON is a valid dropwizard json document containing one counter
|
||||||
|
|
@ -58,13 +57,13 @@ func TestParseValidCounterJSON(t *testing.T) {
|
||||||
parser := NewParser()
|
parser := NewParser()
|
||||||
|
|
||||||
metrics, err := parser.Parse([]byte(validCounterJSON))
|
metrics, err := parser.Parse([]byte(validCounterJSON))
|
||||||
assert.NoError(t, err)
|
require.NoError(t, err)
|
||||||
assert.Len(t, metrics, 1)
|
require.Len(t, metrics, 1)
|
||||||
assert.Equal(t, "measurement", metrics[0].Name())
|
require.Equal(t, "measurement", metrics[0].Name())
|
||||||
assert.Equal(t, map[string]interface{}{
|
require.Equal(t, map[string]interface{}{
|
||||||
"count": float64(1),
|
"count": float64(1),
|
||||||
}, metrics[0].Fields())
|
}, metrics[0].Fields())
|
||||||
assert.Equal(t, map[string]string{"metric_type": "counter"}, metrics[0].Tags())
|
require.Equal(t, map[string]string{"metric_type": "counter"}, metrics[0].Tags())
|
||||||
}
|
}
|
||||||
|
|
||||||
// validEmbeddedCounterJSON is a valid json document containing separate fields for dropwizard metrics, tags and time override.
|
// validEmbeddedCounterJSON is a valid json document containing separate fields for dropwizard metrics, tags and time override.
|
||||||
|
|
@ -99,19 +98,19 @@ func TestParseValidEmbeddedCounterJSON(t *testing.T) {
|
||||||
parser.TimePath = "time"
|
parser.TimePath = "time"
|
||||||
|
|
||||||
metrics, err := parser.Parse([]byte(validEmbeddedCounterJSON))
|
metrics, err := parser.Parse([]byte(validEmbeddedCounterJSON))
|
||||||
assert.NoError(t, err)
|
require.NoError(t, err)
|
||||||
assert.Len(t, metrics, 1)
|
require.Len(t, metrics, 1)
|
||||||
assert.Equal(t, "measurement", metrics[0].Name())
|
require.Equal(t, "measurement", metrics[0].Name())
|
||||||
assert.Equal(t, map[string]interface{}{
|
require.Equal(t, map[string]interface{}{
|
||||||
"count": float64(1),
|
"count": float64(1),
|
||||||
}, metrics[0].Fields())
|
}, metrics[0].Fields())
|
||||||
assert.Equal(t, map[string]string{
|
require.Equal(t, map[string]string{
|
||||||
"metric_type": "counter",
|
"metric_type": "counter",
|
||||||
"tag1": "green",
|
"tag1": "green",
|
||||||
"tag2": "yellow",
|
"tag2": "yellow",
|
||||||
"tag3 space,comma=equals": "red ,=",
|
"tag3 space,comma=equals": "red ,=",
|
||||||
}, metrics[0].Tags())
|
}, metrics[0].Tags())
|
||||||
assert.True(t, metricTime.Equal(metrics[0].Time()), fmt.Sprintf("%s should be equal to %s", metrics[0].Time(), metricTime))
|
require.True(t, metricTime.Equal(metrics[0].Time()), fmt.Sprintf("%s should be equal to %s", metrics[0].Time(), metricTime))
|
||||||
|
|
||||||
// now test json tags through TagPathsMap
|
// now test json tags through TagPathsMap
|
||||||
parser2 := NewParser()
|
parser2 := NewParser()
|
||||||
|
|
@ -119,8 +118,8 @@ func TestParseValidEmbeddedCounterJSON(t *testing.T) {
|
||||||
parser2.TagPathsMap = map[string]string{"tag1": "tags.tag1"}
|
parser2.TagPathsMap = map[string]string{"tag1": "tags.tag1"}
|
||||||
parser2.TimePath = "time"
|
parser2.TimePath = "time"
|
||||||
metrics2, err2 := parser2.Parse([]byte(validEmbeddedCounterJSON))
|
metrics2, err2 := parser2.Parse([]byte(validEmbeddedCounterJSON))
|
||||||
assert.NoError(t, err2)
|
require.NoError(t, err2)
|
||||||
assert.Equal(t, map[string]string{"metric_type": "counter", "tag1": "green"}, metrics2[0].Tags())
|
require.Equal(t, map[string]string{"metric_type": "counter", "tag1": "green"}, metrics2[0].Tags())
|
||||||
}
|
}
|
||||||
|
|
||||||
// validMeterJSON1 is a valid dropwizard json document containing one meter
|
// validMeterJSON1 is a valid dropwizard json document containing one meter
|
||||||
|
|
@ -148,10 +147,10 @@ func TestParseValidMeterJSON1(t *testing.T) {
|
||||||
parser := NewParser()
|
parser := NewParser()
|
||||||
|
|
||||||
metrics, err := parser.Parse([]byte(validMeterJSON1))
|
metrics, err := parser.Parse([]byte(validMeterJSON1))
|
||||||
assert.NoError(t, err)
|
require.NoError(t, err)
|
||||||
assert.Len(t, metrics, 1)
|
require.Len(t, metrics, 1)
|
||||||
assert.Equal(t, "measurement1", metrics[0].Name())
|
require.Equal(t, "measurement1", metrics[0].Name())
|
||||||
assert.Equal(t, map[string]interface{}{
|
require.Equal(t, map[string]interface{}{
|
||||||
"count": float64(1),
|
"count": float64(1),
|
||||||
"m15_rate": float64(1),
|
"m15_rate": float64(1),
|
||||||
"m1_rate": float64(1),
|
"m1_rate": float64(1),
|
||||||
|
|
@ -160,7 +159,7 @@ func TestParseValidMeterJSON1(t *testing.T) {
|
||||||
"units": "events/second",
|
"units": "events/second",
|
||||||
}, metrics[0].Fields())
|
}, metrics[0].Fields())
|
||||||
|
|
||||||
assert.Equal(t, map[string]string{"metric_type": "meter"}, metrics[0].Tags())
|
require.Equal(t, map[string]string{"metric_type": "meter"}, metrics[0].Tags())
|
||||||
}
|
}
|
||||||
|
|
||||||
// validMeterJSON2 is a valid dropwizard json document containing one meter with one tag
|
// validMeterJSON2 is a valid dropwizard json document containing one meter with one tag
|
||||||
|
|
@ -188,10 +187,10 @@ func TestParseValidMeterJSON2(t *testing.T) {
|
||||||
parser := NewParser()
|
parser := NewParser()
|
||||||
|
|
||||||
metrics, err := parser.Parse([]byte(validMeterJSON2))
|
metrics, err := parser.Parse([]byte(validMeterJSON2))
|
||||||
assert.NoError(t, err)
|
require.NoError(t, err)
|
||||||
assert.Len(t, metrics, 1)
|
require.Len(t, metrics, 1)
|
||||||
assert.Equal(t, "measurement2", metrics[0].Name())
|
require.Equal(t, "measurement2", metrics[0].Name())
|
||||||
assert.Equal(t, map[string]interface{}{
|
require.Equal(t, map[string]interface{}{
|
||||||
"count": float64(2),
|
"count": float64(2),
|
||||||
"m15_rate": float64(2),
|
"m15_rate": float64(2),
|
||||||
"m1_rate": float64(2),
|
"m1_rate": float64(2),
|
||||||
|
|
@ -199,7 +198,7 @@ func TestParseValidMeterJSON2(t *testing.T) {
|
||||||
"mean_rate": float64(2),
|
"mean_rate": float64(2),
|
||||||
"units": "events/second",
|
"units": "events/second",
|
||||||
}, metrics[0].Fields())
|
}, metrics[0].Fields())
|
||||||
assert.Equal(t, map[string]string{"metric_type": "meter", "key": "value"}, metrics[0].Tags())
|
require.Equal(t, map[string]string{"metric_type": "meter", "key": "value"}, metrics[0].Tags())
|
||||||
}
|
}
|
||||||
|
|
||||||
// validGaugeJSON is a valid dropwizard json document containing one gauge
|
// validGaugeJSON is a valid dropwizard json document containing one gauge
|
||||||
|
|
@ -222,13 +221,13 @@ func TestParseValidGaugeJSON(t *testing.T) {
|
||||||
parser := NewParser()
|
parser := NewParser()
|
||||||
|
|
||||||
metrics, err := parser.Parse([]byte(validGaugeJSON))
|
metrics, err := parser.Parse([]byte(validGaugeJSON))
|
||||||
assert.NoError(t, err)
|
require.NoError(t, err)
|
||||||
assert.Len(t, metrics, 1)
|
require.Len(t, metrics, 1)
|
||||||
assert.Equal(t, "measurement", metrics[0].Name())
|
require.Equal(t, "measurement", metrics[0].Name())
|
||||||
assert.Equal(t, map[string]interface{}{
|
require.Equal(t, map[string]interface{}{
|
||||||
"value": true,
|
"value": true,
|
||||||
}, metrics[0].Fields())
|
}, metrics[0].Fields())
|
||||||
assert.Equal(t, map[string]string{"metric_type": "gauge"}, metrics[0].Tags())
|
require.Equal(t, map[string]string{"metric_type": "gauge"}, metrics[0].Tags())
|
||||||
}
|
}
|
||||||
|
|
||||||
// validHistogramJSON is a valid dropwizard json document containing one histogram
|
// validHistogramJSON is a valid dropwizard json document containing one histogram
|
||||||
|
|
@ -261,10 +260,10 @@ func TestParseValidHistogramJSON(t *testing.T) {
|
||||||
parser := NewParser()
|
parser := NewParser()
|
||||||
|
|
||||||
metrics, err := parser.Parse([]byte(validHistogramJSON))
|
metrics, err := parser.Parse([]byte(validHistogramJSON))
|
||||||
assert.NoError(t, err)
|
require.NoError(t, err)
|
||||||
assert.Len(t, metrics, 1)
|
require.Len(t, metrics, 1)
|
||||||
assert.Equal(t, "measurement", metrics[0].Name())
|
require.Equal(t, "measurement", metrics[0].Name())
|
||||||
assert.Equal(t, map[string]interface{}{
|
require.Equal(t, map[string]interface{}{
|
||||||
"count": float64(1),
|
"count": float64(1),
|
||||||
"max": float64(2),
|
"max": float64(2),
|
||||||
"mean": float64(3),
|
"mean": float64(3),
|
||||||
|
|
@ -277,7 +276,7 @@ func TestParseValidHistogramJSON(t *testing.T) {
|
||||||
"p999": float64(10),
|
"p999": float64(10),
|
||||||
"stddev": float64(11),
|
"stddev": float64(11),
|
||||||
}, metrics[0].Fields())
|
}, metrics[0].Fields())
|
||||||
assert.Equal(t, map[string]string{"metric_type": "histogram"}, metrics[0].Tags())
|
require.Equal(t, map[string]string{"metric_type": "histogram"}, metrics[0].Tags())
|
||||||
}
|
}
|
||||||
|
|
||||||
// validTimerJSON is a valid dropwizard json document containing one timer
|
// validTimerJSON is a valid dropwizard json document containing one timer
|
||||||
|
|
@ -316,10 +315,10 @@ func TestParseValidTimerJSON(t *testing.T) {
|
||||||
parser := NewParser()
|
parser := NewParser()
|
||||||
|
|
||||||
metrics, err := parser.Parse([]byte(validTimerJSON))
|
metrics, err := parser.Parse([]byte(validTimerJSON))
|
||||||
assert.NoError(t, err)
|
require.NoError(t, err)
|
||||||
assert.Len(t, metrics, 1)
|
require.Len(t, metrics, 1)
|
||||||
assert.Equal(t, "measurement", metrics[0].Name())
|
require.Equal(t, "measurement", metrics[0].Name())
|
||||||
assert.Equal(t, map[string]interface{}{
|
require.Equal(t, map[string]interface{}{
|
||||||
"count": float64(1),
|
"count": float64(1),
|
||||||
"max": float64(2),
|
"max": float64(2),
|
||||||
"mean": float64(3),
|
"mean": float64(3),
|
||||||
|
|
@ -338,7 +337,7 @@ func TestParseValidTimerJSON(t *testing.T) {
|
||||||
"duration_units": "seconds",
|
"duration_units": "seconds",
|
||||||
"rate_units": "calls/second",
|
"rate_units": "calls/second",
|
||||||
}, metrics[0].Fields())
|
}, metrics[0].Fields())
|
||||||
assert.Equal(t, map[string]string{"metric_type": "timer"}, metrics[0].Tags())
|
require.Equal(t, map[string]string{"metric_type": "timer"}, metrics[0].Tags())
|
||||||
}
|
}
|
||||||
|
|
||||||
// validAllJSON is a valid dropwizard json document containing one metric of each type
|
// validAllJSON is a valid dropwizard json document containing one metric of each type
|
||||||
|
|
@ -367,8 +366,8 @@ func TestParseValidAllJSON(t *testing.T) {
|
||||||
parser := NewParser()
|
parser := NewParser()
|
||||||
|
|
||||||
metrics, err := parser.Parse([]byte(validAllJSON))
|
metrics, err := parser.Parse([]byte(validAllJSON))
|
||||||
assert.NoError(t, err)
|
require.NoError(t, err)
|
||||||
assert.Len(t, metrics, 5)
|
require.Len(t, metrics, 5)
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestTagParsingProblems(t *testing.T) {
|
func TestTagParsingProblems(t *testing.T) {
|
||||||
|
|
@ -376,20 +375,22 @@ func TestTagParsingProblems(t *testing.T) {
|
||||||
parser1 := NewParser()
|
parser1 := NewParser()
|
||||||
parser1.MetricRegistryPath = "metrics"
|
parser1.MetricRegistryPath = "metrics"
|
||||||
parser1.TagsPath = "tags1"
|
parser1.TagsPath = "tags1"
|
||||||
|
parser1.Log = testutil.Logger{}
|
||||||
metrics1, err1 := parser1.Parse([]byte(validEmbeddedCounterJSON))
|
metrics1, err1 := parser1.Parse([]byte(validEmbeddedCounterJSON))
|
||||||
assert.NoError(t, err1)
|
require.NoError(t, err1)
|
||||||
assert.Len(t, metrics1, 1)
|
require.Len(t, metrics1, 1)
|
||||||
assert.Equal(t, map[string]string{"metric_type": "counter"}, metrics1[0].Tags())
|
require.Equal(t, map[string]string{"metric_type": "counter"}, metrics1[0].Tags())
|
||||||
|
|
||||||
// giving a wrong TagsPath falls back to TagPathsMap
|
// giving a wrong TagsPath falls back to TagPathsMap
|
||||||
parser2 := NewParser()
|
parser2 := NewParser()
|
||||||
parser2.MetricRegistryPath = "metrics"
|
parser2.MetricRegistryPath = "metrics"
|
||||||
parser2.TagsPath = "tags1"
|
parser2.TagsPath = "tags1"
|
||||||
parser2.TagPathsMap = map[string]string{"tag1": "tags.tag1"}
|
parser2.TagPathsMap = map[string]string{"tag1": "tags.tag1"}
|
||||||
|
parser2.Log = testutil.Logger{}
|
||||||
metrics2, err2 := parser2.Parse([]byte(validEmbeddedCounterJSON))
|
metrics2, err2 := parser2.Parse([]byte(validEmbeddedCounterJSON))
|
||||||
assert.NoError(t, err2)
|
require.NoError(t, err2)
|
||||||
assert.Len(t, metrics2, 1)
|
require.Len(t, metrics2, 1)
|
||||||
assert.Equal(t, map[string]string{"metric_type": "counter", "tag1": "green"}, metrics2[0].Tags())
|
require.Equal(t, map[string]string{"metric_type": "counter", "tag1": "green"}, metrics2[0].Tags())
|
||||||
}
|
}
|
||||||
|
|
||||||
// sampleTemplateJSON is a sample json document containing metrics to be tested against the templating engine.
|
// sampleTemplateJSON is a sample json document containing metrics to be tested against the templating engine.
|
||||||
|
|
|
||||||
|
|
@ -20,7 +20,6 @@ var (
|
||||||
MaxDate = time.Date(2038, 1, 19, 0, 0, 0, 0, time.UTC)
|
MaxDate = time.Date(2038, 1, 19, 0, 0, 0, 0, time.UTC)
|
||||||
)
|
)
|
||||||
|
|
||||||
// Parser encapsulates a Graphite Parser.
|
|
||||||
type GraphiteParser struct {
|
type GraphiteParser struct {
|
||||||
Separator string
|
Separator string
|
||||||
Templates []string
|
Templates []string
|
||||||
|
|
@ -77,9 +76,9 @@ func (p *GraphiteParser) Parse(buf []byte) ([]telegraf.Metric, error) {
|
||||||
line = bytes.TrimSpace(buf) // last line
|
line = bytes.TrimSpace(buf) // last line
|
||||||
}
|
}
|
||||||
if len(line) != 0 {
|
if len(line) != 0 {
|
||||||
metric, err := p.ParseLine(string(line))
|
m, err := p.ParseLine(string(line))
|
||||||
if err == nil {
|
if err == nil {
|
||||||
metrics = append(metrics, metric)
|
metrics = append(metrics, m)
|
||||||
} else {
|
} else {
|
||||||
errs = append(errs, err.Error())
|
errs = append(errs, err.Error())
|
||||||
}
|
}
|
||||||
|
|
@ -95,7 +94,7 @@ func (p *GraphiteParser) Parse(buf []byte) ([]telegraf.Metric, error) {
|
||||||
return metrics, nil
|
return metrics, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// Parse performs Graphite parsing of a single line.
|
// ParseLine performs Graphite parsing of a single line.
|
||||||
func (p *GraphiteParser) ParseLine(line string) (telegraf.Metric, error) {
|
func (p *GraphiteParser) ParseLine(line string) (telegraf.Metric, error) {
|
||||||
// Break into 3 fields (name, value, timestamp).
|
// Break into 3 fields (name, value, timestamp).
|
||||||
fields := strings.Fields(line)
|
fields := strings.Fields(line)
|
||||||
|
|
|
||||||
|
|
@ -6,11 +6,11 @@ import (
|
||||||
"testing"
|
"testing"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
|
"github.com/stretchr/testify/require"
|
||||||
|
|
||||||
"github.com/influxdata/telegraf/internal/templating"
|
"github.com/influxdata/telegraf/internal/templating"
|
||||||
"github.com/influxdata/telegraf/metric"
|
"github.com/influxdata/telegraf/metric"
|
||||||
"github.com/influxdata/telegraf/testutil"
|
"github.com/influxdata/telegraf/testutil"
|
||||||
"github.com/stretchr/testify/assert"
|
|
||||||
"github.com/stretchr/testify/require"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
func BenchmarkParse(b *testing.B) {
|
func BenchmarkParse(b *testing.B) {
|
||||||
|
|
@ -30,7 +30,8 @@ func BenchmarkParse(b *testing.B) {
|
||||||
}
|
}
|
||||||
|
|
||||||
for i := 0; i < b.N; i++ {
|
for i := 0; i < b.N; i++ {
|
||||||
p.Parse([]byte("servers.localhost.cpu.load 11 1435077219"))
|
_, err := p.Parse([]byte("servers.localhost.cpu.load 11 1435077219"))
|
||||||
|
require.NoError(b, err)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -285,7 +286,7 @@ func TestParseLine(t *testing.T) {
|
||||||
t.Fatalf("unexpected error creating graphite parser: %v", err)
|
t.Fatalf("unexpected error creating graphite parser: %v", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
metric, err := p.ParseLine(test.input)
|
m, err := p.ParseLine(test.input)
|
||||||
if errstr(err) != test.err {
|
if errstr(err) != test.err {
|
||||||
t.Fatalf("err does not match. expected %v, got %v", test.err, err)
|
t.Fatalf("err does not match. expected %v, got %v", test.err, err)
|
||||||
}
|
}
|
||||||
|
|
@ -293,22 +294,22 @@ func TestParseLine(t *testing.T) {
|
||||||
// If we erred out,it was intended and the following tests won't work
|
// If we erred out,it was intended and the following tests won't work
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
if metric.Name() != test.measurement {
|
if m.Name() != test.measurement {
|
||||||
t.Fatalf("name parse failer. expected %v, got %v",
|
t.Fatalf("name parse failer. expected %v, got %v",
|
||||||
test.measurement, metric.Name())
|
test.measurement, m.Name())
|
||||||
}
|
}
|
||||||
if len(metric.Tags()) != len(test.tags) {
|
if len(m.Tags()) != len(test.tags) {
|
||||||
t.Fatalf("tags len mismatch. expected %d, got %d",
|
t.Fatalf("tags len mismatch. expected %d, got %d",
|
||||||
len(test.tags), len(metric.Tags()))
|
len(test.tags), len(m.Tags()))
|
||||||
}
|
}
|
||||||
f := metric.Fields()["value"].(float64)
|
f := m.Fields()["value"].(float64)
|
||||||
if f != test.value {
|
if f != test.value {
|
||||||
t.Fatalf("floatValue value mismatch. expected %v, got %v",
|
t.Fatalf("floatValue value mismatch. expected %v, got %v",
|
||||||
test.value, f)
|
test.value, f)
|
||||||
}
|
}
|
||||||
if metric.Time().UnixNano()/1000000 != test.time.UnixNano()/1000000 {
|
if m.Time().UnixNano()/1000000 != test.time.UnixNano()/1000000 {
|
||||||
t.Fatalf("time value mismatch. expected %v, got %v",
|
t.Fatalf("time value mismatch. expected %v, got %v",
|
||||||
test.time.UnixNano(), metric.Time().UnixNano())
|
test.time.UnixNano(), m.Time().UnixNano())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
@ -478,9 +479,9 @@ func TestFilterMatchDefault(t *testing.T) {
|
||||||
time.Unix(1435077219, 0))
|
time.Unix(1435077219, 0))
|
||||||
|
|
||||||
m, err := p.ParseLine("miss.servers.localhost.cpu_load 11 1435077219")
|
m, err := p.ParseLine("miss.servers.localhost.cpu_load 11 1435077219")
|
||||||
assert.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
|
||||||
assert.Equal(t, exp, m)
|
require.Equal(t, exp, m)
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestFilterMatchMultipleMeasurement(t *testing.T) {
|
func TestFilterMatchMultipleMeasurement(t *testing.T) {
|
||||||
|
|
@ -495,9 +496,9 @@ func TestFilterMatchMultipleMeasurement(t *testing.T) {
|
||||||
time.Unix(1435077219, 0))
|
time.Unix(1435077219, 0))
|
||||||
|
|
||||||
m, err := p.ParseLine("servers.localhost.cpu.cpu_load.10 11 1435077219")
|
m, err := p.ParseLine("servers.localhost.cpu.cpu_load.10 11 1435077219")
|
||||||
assert.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
|
||||||
assert.Equal(t, exp, m)
|
require.Equal(t, exp, m)
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestFilterMatchMultipleMeasurementSeparator(t *testing.T) {
|
func TestFilterMatchMultipleMeasurementSeparator(t *testing.T) {
|
||||||
|
|
@ -505,7 +506,7 @@ func TestFilterMatchMultipleMeasurementSeparator(t *testing.T) {
|
||||||
[]string{"servers.localhost .host.measurement.measurement*"},
|
[]string{"servers.localhost .host.measurement.measurement*"},
|
||||||
nil,
|
nil,
|
||||||
)
|
)
|
||||||
assert.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
|
||||||
exp := metric.New("cpu_cpu_load_10",
|
exp := metric.New("cpu_cpu_load_10",
|
||||||
map[string]string{"host": "localhost"},
|
map[string]string{"host": "localhost"},
|
||||||
|
|
@ -513,9 +514,9 @@ func TestFilterMatchMultipleMeasurementSeparator(t *testing.T) {
|
||||||
time.Unix(1435077219, 0))
|
time.Unix(1435077219, 0))
|
||||||
|
|
||||||
m, err := p.ParseLine("servers.localhost.cpu.cpu_load.10 11 1435077219")
|
m, err := p.ParseLine("servers.localhost.cpu.cpu_load.10 11 1435077219")
|
||||||
assert.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
|
||||||
assert.Equal(t, exp, m)
|
require.Equal(t, exp, m)
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestFilterMatchSingle(t *testing.T) {
|
func TestFilterMatchSingle(t *testing.T) {
|
||||||
|
|
@ -530,9 +531,9 @@ func TestFilterMatchSingle(t *testing.T) {
|
||||||
time.Unix(1435077219, 0))
|
time.Unix(1435077219, 0))
|
||||||
|
|
||||||
m, err := p.ParseLine("servers.localhost.cpu_load 11 1435077219")
|
m, err := p.ParseLine("servers.localhost.cpu_load 11 1435077219")
|
||||||
assert.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
|
||||||
assert.Equal(t, exp, m)
|
require.Equal(t, exp, m)
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestParseNoMatch(t *testing.T) {
|
func TestParseNoMatch(t *testing.T) {
|
||||||
|
|
@ -547,9 +548,9 @@ func TestParseNoMatch(t *testing.T) {
|
||||||
time.Unix(1435077219, 0))
|
time.Unix(1435077219, 0))
|
||||||
|
|
||||||
m, err := p.ParseLine("servers.localhost.memory.VmallocChunk 11 1435077219")
|
m, err := p.ParseLine("servers.localhost.memory.VmallocChunk 11 1435077219")
|
||||||
assert.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
|
||||||
assert.Equal(t, exp, m)
|
require.Equal(t, exp, m)
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestFilterMatchWildcard(t *testing.T) {
|
func TestFilterMatchWildcard(t *testing.T) {
|
||||||
|
|
@ -564,9 +565,9 @@ func TestFilterMatchWildcard(t *testing.T) {
|
||||||
time.Unix(1435077219, 0))
|
time.Unix(1435077219, 0))
|
||||||
|
|
||||||
m, err := p.ParseLine("servers.localhost.cpu_load 11 1435077219")
|
m, err := p.ParseLine("servers.localhost.cpu_load 11 1435077219")
|
||||||
assert.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
|
||||||
assert.Equal(t, exp, m)
|
require.Equal(t, exp, m)
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestFilterMatchExactBeforeWildcard(t *testing.T) {
|
func TestFilterMatchExactBeforeWildcard(t *testing.T) {
|
||||||
|
|
@ -583,9 +584,9 @@ func TestFilterMatchExactBeforeWildcard(t *testing.T) {
|
||||||
time.Unix(1435077219, 0))
|
time.Unix(1435077219, 0))
|
||||||
|
|
||||||
m, err := p.ParseLine("servers.localhost.cpu_load 11 1435077219")
|
m, err := p.ParseLine("servers.localhost.cpu_load 11 1435077219")
|
||||||
assert.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
|
||||||
assert.Equal(t, exp, m)
|
require.Equal(t, exp, m)
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestFilterMatchMostLongestFilter(t *testing.T) {
|
func TestFilterMatchMostLongestFilter(t *testing.T) {
|
||||||
|
|
@ -602,7 +603,7 @@ func TestFilterMatchMostLongestFilter(t *testing.T) {
|
||||||
}
|
}
|
||||||
|
|
||||||
m, err := p.ParseLine("servers.localhost.cpu.cpu_load 11 1435077219")
|
m, err := p.ParseLine("servers.localhost.cpu.cpu_load 11 1435077219")
|
||||||
assert.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
|
||||||
value, ok := m.GetTag("host")
|
value, ok := m.GetTag("host")
|
||||||
require.True(t, ok)
|
require.True(t, ok)
|
||||||
|
|
@ -631,9 +632,9 @@ func TestFilterMatchMultipleWildcards(t *testing.T) {
|
||||||
time.Unix(1435077219, 0))
|
time.Unix(1435077219, 0))
|
||||||
|
|
||||||
m, err := p.ParseLine("servers.server01.cpu_load 11 1435077219")
|
m, err := p.ParseLine("servers.server01.cpu_load 11 1435077219")
|
||||||
assert.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
|
||||||
assert.Equal(t, exp, m)
|
require.Equal(t, exp, m)
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestParseDefaultTags(t *testing.T) {
|
func TestParseDefaultTags(t *testing.T) {
|
||||||
|
|
@ -647,7 +648,7 @@ func TestParseDefaultTags(t *testing.T) {
|
||||||
}
|
}
|
||||||
|
|
||||||
m, err := p.ParseLine("servers.localhost.cpu_load 11 1435077219")
|
m, err := p.ParseLine("servers.localhost.cpu_load 11 1435077219")
|
||||||
assert.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
|
||||||
value, ok := m.GetTag("host")
|
value, ok := m.GetTag("host")
|
||||||
require.True(t, ok)
|
require.True(t, ok)
|
||||||
|
|
@ -672,7 +673,7 @@ func TestParseDefaultTemplateTags(t *testing.T) {
|
||||||
}
|
}
|
||||||
|
|
||||||
m, err := p.ParseLine("servers.localhost.cpu_load 11 1435077219")
|
m, err := p.ParseLine("servers.localhost.cpu_load 11 1435077219")
|
||||||
assert.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
|
||||||
value, ok := m.GetTag("host")
|
value, ok := m.GetTag("host")
|
||||||
require.True(t, ok)
|
require.True(t, ok)
|
||||||
|
|
@ -698,7 +699,7 @@ func TestParseDefaultTemplateTagsOverridGlobal(t *testing.T) {
|
||||||
|
|
||||||
m, err := p.ParseLine("servers.localhost.cpu_load 11 1435077219")
|
m, err := p.ParseLine("servers.localhost.cpu_load 11 1435077219")
|
||||||
_ = m
|
_ = m
|
||||||
assert.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
|
||||||
value, ok := m.GetTag("host")
|
value, ok := m.GetTag("host")
|
||||||
require.True(t, ok)
|
require.True(t, ok)
|
||||||
|
|
@ -725,7 +726,7 @@ func TestParseTemplateWhitespace(t *testing.T) {
|
||||||
}
|
}
|
||||||
|
|
||||||
m, err := p.ParseLine("servers.localhost.cpu_load 11 1435077219")
|
m, err := p.ParseLine("servers.localhost.cpu_load 11 1435077219")
|
||||||
assert.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
|
||||||
value, ok := m.GetTag("host")
|
value, ok := m.GetTag("host")
|
||||||
require.True(t, ok)
|
require.True(t, ok)
|
||||||
|
|
@ -745,10 +746,11 @@ func TestApplyTemplate(t *testing.T) {
|
||||||
p, err := NewGraphiteParser("_",
|
p, err := NewGraphiteParser("_",
|
||||||
[]string{"current.* measurement.measurement"},
|
[]string{"current.* measurement.measurement"},
|
||||||
nil)
|
nil)
|
||||||
assert.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
|
||||||
measurement, _, _, _ := p.ApplyTemplate("current.users")
|
measurement, _, _, err := p.ApplyTemplate("current.users")
|
||||||
assert.Equal(t, "current_users", measurement)
|
require.NoError(t, err)
|
||||||
|
require.Equal(t, "current_users", measurement)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Test basic functionality of ApplyTemplate
|
// Test basic functionality of ApplyTemplate
|
||||||
|
|
@ -756,10 +758,11 @@ func TestApplyTemplateNoMatch(t *testing.T) {
|
||||||
p, err := NewGraphiteParser(".",
|
p, err := NewGraphiteParser(".",
|
||||||
[]string{"foo.bar measurement.measurement"},
|
[]string{"foo.bar measurement.measurement"},
|
||||||
nil)
|
nil)
|
||||||
assert.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
|
||||||
measurement, _, _, _ := p.ApplyTemplate("current.users")
|
measurement, _, _, err := p.ApplyTemplate("current.users")
|
||||||
assert.Equal(t, "current.users", measurement)
|
require.NoError(t, err)
|
||||||
|
require.Equal(t, "current.users", measurement)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Test that most specific template is chosen
|
// Test that most specific template is chosen
|
||||||
|
|
@ -769,10 +772,10 @@ func TestApplyTemplateSpecific(t *testing.T) {
|
||||||
"current.* measurement.measurement",
|
"current.* measurement.measurement",
|
||||||
"current.*.* measurement.measurement.service",
|
"current.*.* measurement.measurement.service",
|
||||||
}, nil)
|
}, nil)
|
||||||
assert.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
|
||||||
measurement, tags, _, _ := p.ApplyTemplate("current.users.facebook")
|
measurement, tags, _, _ := p.ApplyTemplate("current.users.facebook")
|
||||||
assert.Equal(t, "current_users", measurement)
|
require.Equal(t, "current_users", measurement)
|
||||||
|
|
||||||
service, ok := tags["service"]
|
service, ok := tags["service"]
|
||||||
if !ok {
|
if !ok {
|
||||||
|
|
@ -786,10 +789,10 @@ func TestApplyTemplateSpecific(t *testing.T) {
|
||||||
func TestApplyTemplateTags(t *testing.T) {
|
func TestApplyTemplateTags(t *testing.T) {
|
||||||
p, err := NewGraphiteParser("_",
|
p, err := NewGraphiteParser("_",
|
||||||
[]string{"current.* measurement.measurement region=us-west"}, nil)
|
[]string{"current.* measurement.measurement region=us-west"}, nil)
|
||||||
assert.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
|
||||||
measurement, tags, _, _ := p.ApplyTemplate("current.users")
|
measurement, tags, _, _ := p.ApplyTemplate("current.users")
|
||||||
assert.Equal(t, "current_users", measurement)
|
require.Equal(t, "current_users", measurement)
|
||||||
|
|
||||||
region, ok := tags["region"]
|
region, ok := tags["region"]
|
||||||
if !ok {
|
if !ok {
|
||||||
|
|
@ -803,11 +806,11 @@ func TestApplyTemplateTags(t *testing.T) {
|
||||||
func TestApplyTemplateField(t *testing.T) {
|
func TestApplyTemplateField(t *testing.T) {
|
||||||
p, err := NewGraphiteParser("_",
|
p, err := NewGraphiteParser("_",
|
||||||
[]string{"current.* measurement.measurement.field"}, nil)
|
[]string{"current.* measurement.measurement.field"}, nil)
|
||||||
assert.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
|
||||||
measurement, _, field, err := p.ApplyTemplate("current.users.logged_in")
|
measurement, _, field, err := p.ApplyTemplate("current.users.logged_in")
|
||||||
|
require.NoError(t, err)
|
||||||
assert.Equal(t, "current_users", measurement)
|
require.Equal(t, "current_users", measurement)
|
||||||
|
|
||||||
if field != "logged_in" {
|
if field != "logged_in" {
|
||||||
t.Errorf("Parser.ApplyTemplate unexpected result. got %s, exp %s",
|
t.Errorf("Parser.ApplyTemplate unexpected result. got %s, exp %s",
|
||||||
|
|
@ -818,11 +821,11 @@ func TestApplyTemplateField(t *testing.T) {
|
||||||
func TestApplyTemplateMultipleFieldsTogether(t *testing.T) {
|
func TestApplyTemplateMultipleFieldsTogether(t *testing.T) {
|
||||||
p, err := NewGraphiteParser("_",
|
p, err := NewGraphiteParser("_",
|
||||||
[]string{"current.* measurement.measurement.field.field"}, nil)
|
[]string{"current.* measurement.measurement.field.field"}, nil)
|
||||||
assert.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
|
||||||
measurement, _, field, err := p.ApplyTemplate("current.users.logged_in.ssh")
|
measurement, _, field, err := p.ApplyTemplate("current.users.logged_in.ssh")
|
||||||
|
require.NoError(t, err)
|
||||||
assert.Equal(t, "current_users", measurement)
|
require.Equal(t, "current_users", measurement)
|
||||||
|
|
||||||
if field != "logged_in_ssh" {
|
if field != "logged_in_ssh" {
|
||||||
t.Errorf("Parser.ApplyTemplate unexpected result. got %s, exp %s",
|
t.Errorf("Parser.ApplyTemplate unexpected result. got %s, exp %s",
|
||||||
|
|
@ -833,11 +836,11 @@ func TestApplyTemplateMultipleFieldsTogether(t *testing.T) {
|
||||||
func TestApplyTemplateMultipleFieldsApart(t *testing.T) {
|
func TestApplyTemplateMultipleFieldsApart(t *testing.T) {
|
||||||
p, err := NewGraphiteParser("_",
|
p, err := NewGraphiteParser("_",
|
||||||
[]string{"current.* measurement.measurement.field.method.field"}, nil)
|
[]string{"current.* measurement.measurement.field.method.field"}, nil)
|
||||||
assert.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
|
||||||
measurement, _, field, err := p.ApplyTemplate("current.users.logged_in.ssh.total")
|
measurement, _, field, err := p.ApplyTemplate("current.users.logged_in.ssh.total")
|
||||||
|
require.NoError(t, err)
|
||||||
assert.Equal(t, "current_users", measurement)
|
require.Equal(t, "current_users", measurement)
|
||||||
|
|
||||||
if field != "logged_in_total" {
|
if field != "logged_in_total" {
|
||||||
t.Errorf("Parser.ApplyTemplate unexpected result. got %s, exp %s",
|
t.Errorf("Parser.ApplyTemplate unexpected result. got %s, exp %s",
|
||||||
|
|
@ -848,11 +851,11 @@ func TestApplyTemplateMultipleFieldsApart(t *testing.T) {
|
||||||
func TestApplyTemplateGreedyField(t *testing.T) {
|
func TestApplyTemplateGreedyField(t *testing.T) {
|
||||||
p, err := NewGraphiteParser("_",
|
p, err := NewGraphiteParser("_",
|
||||||
[]string{"current.* measurement.measurement.field*"}, nil)
|
[]string{"current.* measurement.measurement.field*"}, nil)
|
||||||
assert.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
|
||||||
measurement, _, field, err := p.ApplyTemplate("current.users.logged_in")
|
measurement, _, field, err := p.ApplyTemplate("current.users.logged_in")
|
||||||
|
require.NoError(t, err)
|
||||||
assert.Equal(t, "current_users", measurement)
|
require.Equal(t, "current_users", measurement)
|
||||||
|
|
||||||
if field != "logged_in" {
|
if field != "logged_in" {
|
||||||
t.Errorf("Parser.ApplyTemplate unexpected result. got %s, exp %s",
|
t.Errorf("Parser.ApplyTemplate unexpected result. got %s, exp %s",
|
||||||
|
|
@ -868,11 +871,12 @@ func TestApplyTemplateOverSpecific(t *testing.T) {
|
||||||
},
|
},
|
||||||
nil,
|
nil,
|
||||||
)
|
)
|
||||||
assert.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
|
||||||
measurement, tags, _, err := p.ApplyTemplate("net.server001.a.b 2")
|
measurement, tags, _, err := p.ApplyTemplate("net.server001.a.b 2")
|
||||||
assert.Equal(t, "net", measurement)
|
require.NoError(t, err)
|
||||||
assert.Equal(t,
|
require.Equal(t, "net", measurement)
|
||||||
|
require.Equal(t,
|
||||||
map[string]string{"host": "server001", "metric": "a.b"},
|
map[string]string{"host": "server001", "metric": "a.b"},
|
||||||
tags)
|
tags)
|
||||||
}
|
}
|
||||||
|
|
@ -887,17 +891,19 @@ func TestApplyTemplateMostSpecificTemplate(t *testing.T) {
|
||||||
},
|
},
|
||||||
nil,
|
nil,
|
||||||
)
|
)
|
||||||
assert.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
|
||||||
measurement, tags, _, err := p.ApplyTemplate("net.server001.a.b.c 2")
|
measurement, tags, _, err := p.ApplyTemplate("net.server001.a.b.c 2")
|
||||||
assert.Equal(t, "net", measurement)
|
require.NoError(t, err)
|
||||||
assert.Equal(t,
|
require.Equal(t, "net", measurement)
|
||||||
|
require.Equal(t,
|
||||||
map[string]string{"host": "server001", "metric": "a.b.c"},
|
map[string]string{"host": "server001", "metric": "a.b.c"},
|
||||||
tags)
|
tags)
|
||||||
|
|
||||||
measurement, tags, _, err = p.ApplyTemplate("net.server001.a.b 2")
|
measurement, tags, _, err = p.ApplyTemplate("net.server001.a.b 2")
|
||||||
assert.Equal(t, "net", measurement)
|
require.NoError(t, err)
|
||||||
assert.Equal(t,
|
require.Equal(t, "net", measurement)
|
||||||
|
require.Equal(t,
|
||||||
map[string]string{"host": "server001", "metric": "a.b"},
|
map[string]string{"host": "server001", "metric": "a.b"},
|
||||||
tags)
|
tags)
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -4,16 +4,16 @@ import (
|
||||||
"bufio"
|
"bufio"
|
||||||
"bytes"
|
"bytes"
|
||||||
"fmt"
|
"fmt"
|
||||||
"log"
|
|
||||||
"os"
|
"os"
|
||||||
"regexp"
|
"regexp"
|
||||||
"strconv"
|
"strconv"
|
||||||
"strings"
|
"strings"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
|
"github.com/vjeantet/grok"
|
||||||
|
|
||||||
"github.com/influxdata/telegraf"
|
"github.com/influxdata/telegraf"
|
||||||
"github.com/influxdata/telegraf/metric"
|
"github.com/influxdata/telegraf/metric"
|
||||||
"github.com/vjeantet/grok"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
var timeLayouts = map[string]string{
|
var timeLayouts = map[string]string{
|
||||||
|
|
@ -76,6 +76,7 @@ type Parser struct {
|
||||||
CustomPatternFiles []string
|
CustomPatternFiles []string
|
||||||
Measurement string
|
Measurement string
|
||||||
DefaultTags map[string]string
|
DefaultTags map[string]string
|
||||||
|
Log telegraf.Logger `toml:"-"`
|
||||||
|
|
||||||
// Timezone is an optional component to help render log dates to
|
// Timezone is an optional component to help render log dates to
|
||||||
// your chosen zone.
|
// your chosen zone.
|
||||||
|
|
@ -107,13 +108,13 @@ type Parser struct {
|
||||||
// }
|
// }
|
||||||
// }
|
// }
|
||||||
tsMap map[string]map[string]string
|
tsMap map[string]map[string]string
|
||||||
// patterns is a map of all of the parsed patterns from CustomPatterns
|
// patternsMap is a map of all of the parsed patterns from CustomPatterns
|
||||||
// and CustomPatternFiles.
|
// and CustomPatternFiles.
|
||||||
// ie, {
|
// ie, {
|
||||||
// "DURATION": "%{NUMBER}[nuµm]?s"
|
// "DURATION": "%{NUMBER}[nuµm]?s"
|
||||||
// "RESPONSE_CODE": "%{NUMBER:rc:tag}"
|
// "RESPONSE_CODE": "%{NUMBER:rc:tag}"
|
||||||
// }
|
// }
|
||||||
patterns map[string]string
|
patternsMap map[string]string
|
||||||
// foundTsLayouts is a slice of timestamp patterns that have been found
|
// foundTsLayouts is a slice of timestamp patterns that have been found
|
||||||
// in the log lines. This slice gets updated if the user uses the generic
|
// in the log lines. This slice gets updated if the user uses the generic
|
||||||
// 'ts' modifier for timestamps. This slice is checked first for matches,
|
// 'ts' modifier for timestamps. This slice is checked first for matches,
|
||||||
|
|
@ -130,7 +131,7 @@ type Parser struct {
|
||||||
func (p *Parser) Compile() error {
|
func (p *Parser) Compile() error {
|
||||||
p.typeMap = make(map[string]map[string]string)
|
p.typeMap = make(map[string]map[string]string)
|
||||||
p.tsMap = make(map[string]map[string]string)
|
p.tsMap = make(map[string]map[string]string)
|
||||||
p.patterns = make(map[string]string)
|
p.patternsMap = make(map[string]string)
|
||||||
p.tsModder = &tsModder{}
|
p.tsModder = &tsModder{}
|
||||||
var err error
|
var err error
|
||||||
p.g, err = grok.NewWithConfig(&grok.Config{NamedCapturesOnly: true})
|
p.g, err = grok.NewWithConfig(&grok.Config{NamedCapturesOnly: true})
|
||||||
|
|
@ -180,7 +181,7 @@ func (p *Parser) Compile() error {
|
||||||
|
|
||||||
p.loc, err = time.LoadLocation(p.Timezone)
|
p.loc, err = time.LoadLocation(p.Timezone)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Printf("W! improper timezone supplied (%s), setting loc to UTC", p.Timezone)
|
p.Log.Warnf("Improper timezone supplied (%s), setting loc to UTC", p.Timezone)
|
||||||
p.loc, _ = time.LoadLocation("UTC")
|
p.loc, _ = time.LoadLocation("UTC")
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -209,7 +210,7 @@ func (p *Parser) ParseLine(line string) (telegraf.Metric, error) {
|
||||||
}
|
}
|
||||||
|
|
||||||
if len(values) == 0 {
|
if len(values) == 0 {
|
||||||
log.Printf("D! Grok no match found for: %q", line)
|
p.Log.Debugf("Grok no match found for: %q", line)
|
||||||
return nil, nil
|
return nil, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -252,21 +253,21 @@ func (p *Parser) ParseLine(line string) (telegraf.Metric, error) {
|
||||||
case Int:
|
case Int:
|
||||||
iv, err := strconv.ParseInt(v, 0, 64)
|
iv, err := strconv.ParseInt(v, 0, 64)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Printf("E! Error parsing %s to int: %s", v, err)
|
p.Log.Errorf("Error parsing %s to int: %s", v, err)
|
||||||
} else {
|
} else {
|
||||||
fields[k] = iv
|
fields[k] = iv
|
||||||
}
|
}
|
||||||
case Float:
|
case Float:
|
||||||
fv, err := strconv.ParseFloat(v, 64)
|
fv, err := strconv.ParseFloat(v, 64)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Printf("E! Error parsing %s to float: %s", v, err)
|
p.Log.Errorf("Error parsing %s to float: %s", v, err)
|
||||||
} else {
|
} else {
|
||||||
fields[k] = fv
|
fields[k] = fv
|
||||||
}
|
}
|
||||||
case Duration:
|
case Duration:
|
||||||
d, err := time.ParseDuration(v)
|
d, err := time.ParseDuration(v)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Printf("E! Error parsing %s to duration: %s", v, err)
|
p.Log.Errorf("Error parsing %s to duration: %s", v, err)
|
||||||
} else {
|
} else {
|
||||||
fields[k] = int64(d)
|
fields[k] = int64(d)
|
||||||
}
|
}
|
||||||
|
|
@ -277,13 +278,13 @@ func (p *Parser) ParseLine(line string) (telegraf.Metric, error) {
|
||||||
case Epoch:
|
case Epoch:
|
||||||
parts := strings.SplitN(v, ".", 2)
|
parts := strings.SplitN(v, ".", 2)
|
||||||
if len(parts) == 0 {
|
if len(parts) == 0 {
|
||||||
log.Printf("E! Error parsing %s to timestamp: %s", v, err)
|
p.Log.Errorf("Error parsing %s to timestamp: %s", v, err)
|
||||||
break
|
break
|
||||||
}
|
}
|
||||||
|
|
||||||
sec, err := strconv.ParseInt(parts[0], 10, 64)
|
sec, err := strconv.ParseInt(parts[0], 10, 64)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Printf("E! Error parsing %s to timestamp: %s", v, err)
|
p.Log.Errorf("Error parsing %s to timestamp: %s", v, err)
|
||||||
break
|
break
|
||||||
}
|
}
|
||||||
ts := time.Unix(sec, 0)
|
ts := time.Unix(sec, 0)
|
||||||
|
|
@ -293,7 +294,7 @@ func (p *Parser) ParseLine(line string) (telegraf.Metric, error) {
|
||||||
nsString := strings.Replace(padded[:9], " ", "0", -1)
|
nsString := strings.Replace(padded[:9], " ", "0", -1)
|
||||||
nanosec, err := strconv.ParseInt(nsString, 10, 64)
|
nanosec, err := strconv.ParseInt(nsString, 10, 64)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Printf("E! Error parsing %s to timestamp: %s", v, err)
|
p.Log.Errorf("Error parsing %s to timestamp: %s", v, err)
|
||||||
break
|
break
|
||||||
}
|
}
|
||||||
ts = ts.Add(time.Duration(nanosec) * time.Nanosecond)
|
ts = ts.Add(time.Duration(nanosec) * time.Nanosecond)
|
||||||
|
|
@ -302,14 +303,14 @@ func (p *Parser) ParseLine(line string) (telegraf.Metric, error) {
|
||||||
case EpochMilli:
|
case EpochMilli:
|
||||||
ms, err := strconv.ParseInt(v, 10, 64)
|
ms, err := strconv.ParseInt(v, 10, 64)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Printf("E! Error parsing %s to int: %s", v, err)
|
p.Log.Errorf("Error parsing %s to int: %s", v, err)
|
||||||
} else {
|
} else {
|
||||||
timestamp = time.Unix(0, ms*int64(time.Millisecond))
|
timestamp = time.Unix(0, ms*int64(time.Millisecond))
|
||||||
}
|
}
|
||||||
case EpochNano:
|
case EpochNano:
|
||||||
iv, err := strconv.ParseInt(v, 10, 64)
|
iv, err := strconv.ParseInt(v, 10, 64)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Printf("E! Error parsing %s to int: %s", v, err)
|
p.Log.Errorf("Error parsing %s to int: %s", v, err)
|
||||||
} else {
|
} else {
|
||||||
timestamp = time.Unix(0, iv)
|
timestamp = time.Unix(0, iv)
|
||||||
}
|
}
|
||||||
|
|
@ -321,7 +322,7 @@ func (p *Parser) ParseLine(line string) (telegraf.Metric, error) {
|
||||||
}
|
}
|
||||||
timestamp = ts
|
timestamp = ts
|
||||||
} else {
|
} else {
|
||||||
log.Printf("E! Error parsing %s to time layout [%s]: %s", v, t, err)
|
p.Log.Errorf("Error parsing %s to time layout [%s]: %s", v, t, err)
|
||||||
}
|
}
|
||||||
case GenericTimestamp:
|
case GenericTimestamp:
|
||||||
var foundTs bool
|
var foundTs bool
|
||||||
|
|
@ -350,7 +351,7 @@ func (p *Parser) ParseLine(line string) (telegraf.Metric, error) {
|
||||||
// if we still haven't found a timestamp layout, log it and we will
|
// if we still haven't found a timestamp layout, log it and we will
|
||||||
// just use time.Now()
|
// just use time.Now()
|
||||||
if !foundTs {
|
if !foundTs {
|
||||||
log.Printf("E! Error parsing timestamp [%s], could not find any "+
|
p.Log.Errorf("Error parsing timestamp [%s], could not find any "+
|
||||||
"suitable time layouts.", v)
|
"suitable time layouts.", v)
|
||||||
}
|
}
|
||||||
case Drop:
|
case Drop:
|
||||||
|
|
@ -364,7 +365,7 @@ func (p *Parser) ParseLine(line string) (telegraf.Metric, error) {
|
||||||
}
|
}
|
||||||
timestamp = ts
|
timestamp = ts
|
||||||
} else {
|
} else {
|
||||||
log.Printf("E! Error parsing %s to time layout [%s]: %s", v, t, err)
|
p.Log.Errorf("Error parsing %s to time layout [%s]: %s", v, t, err)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
@ -405,7 +406,7 @@ func (p *Parser) addCustomPatterns(scanner *bufio.Scanner) {
|
||||||
line := strings.TrimSpace(scanner.Text())
|
line := strings.TrimSpace(scanner.Text())
|
||||||
if len(line) > 0 && line[0] != '#' {
|
if len(line) > 0 && line[0] != '#' {
|
||||||
names := strings.SplitN(line, " ", 2)
|
names := strings.SplitN(line, " ", 2)
|
||||||
p.patterns[names[0]] = names[1]
|
p.patternsMap[names[0]] = names[1]
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
@ -415,30 +416,30 @@ func (p *Parser) compileCustomPatterns() error {
|
||||||
// check if the pattern contains a subpattern that is already defined
|
// check if the pattern contains a subpattern that is already defined
|
||||||
// replace it with the subpattern for modifier inheritance.
|
// replace it with the subpattern for modifier inheritance.
|
||||||
for i := 0; i < 2; i++ {
|
for i := 0; i < 2; i++ {
|
||||||
for name, pattern := range p.patterns {
|
for name, pattern := range p.patternsMap {
|
||||||
subNames := patternOnlyRe.FindAllStringSubmatch(pattern, -1)
|
subNames := patternOnlyRe.FindAllStringSubmatch(pattern, -1)
|
||||||
for _, subName := range subNames {
|
for _, subName := range subNames {
|
||||||
if subPattern, ok := p.patterns[subName[1]]; ok {
|
if subPattern, ok := p.patternsMap[subName[1]]; ok {
|
||||||
pattern = strings.Replace(pattern, subName[0], subPattern, 1)
|
pattern = strings.Replace(pattern, subName[0], subPattern, 1)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
p.patterns[name] = pattern
|
p.patternsMap[name] = pattern
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// check if pattern contains modifiers. Parse them out if it does.
|
// check if pattern contains modifiers. Parse them out if it does.
|
||||||
for name, pattern := range p.patterns {
|
for name, pattern := range p.patternsMap {
|
||||||
if modifierRe.MatchString(pattern) {
|
if modifierRe.MatchString(pattern) {
|
||||||
// this pattern has modifiers, so parse out the modifiers
|
// this pattern has modifiers, so parse out the modifiers
|
||||||
pattern, err = p.parseTypedCaptures(name, pattern)
|
pattern, err = p.parseTypedCaptures(name, pattern)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
p.patterns[name] = pattern
|
p.patternsMap[name] = pattern
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return p.g.AddPatternsFromMap(p.patterns)
|
return p.g.AddPatternsFromMap(p.patternsMap)
|
||||||
}
|
}
|
||||||
|
|
||||||
// parseTypedCaptures parses the capture modifiers, and then deletes the
|
// parseTypedCaptures parses the capture modifiers, and then deletes the
|
||||||
|
|
|
||||||
|
|
@ -5,9 +5,9 @@ import (
|
||||||
"testing"
|
"testing"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
"github.com/influxdata/telegraf/testutil"
|
|
||||||
"github.com/stretchr/testify/assert"
|
|
||||||
"github.com/stretchr/testify/require"
|
"github.com/stretchr/testify/require"
|
||||||
|
|
||||||
|
"github.com/influxdata/telegraf/testutil"
|
||||||
)
|
)
|
||||||
|
|
||||||
func TestGrokParse(t *testing.T) {
|
func TestGrokParse(t *testing.T) {
|
||||||
|
|
@ -15,9 +15,11 @@ func TestGrokParse(t *testing.T) {
|
||||||
Measurement: "t_met",
|
Measurement: "t_met",
|
||||||
Patterns: []string{"%{COMMON_LOG_FORMAT}"},
|
Patterns: []string{"%{COMMON_LOG_FORMAT}"},
|
||||||
}
|
}
|
||||||
parser.Compile()
|
err := parser.Compile()
|
||||||
_, err := parser.Parse([]byte(`127.0.0.1 user-identifier frank [10/Oct/2000:13:55:36 -0700] "GET /apache_pb.gif HTTP/1.0" 200 2326`))
|
require.NoError(t, err)
|
||||||
assert.NoError(t, err)
|
|
||||||
|
_, err = parser.Parse([]byte(`127.0.0.1 user-identifier frank [10/Oct/2000:13:55:36 -0700] "GET /apache_pb.gif HTTP/1.0" 200 2326`))
|
||||||
|
require.NoError(t, err)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Verify that patterns with a regex lookahead fail at compile time.
|
// Verify that patterns with a regex lookahead fail at compile time.
|
||||||
|
|
@ -29,23 +31,23 @@ func TestParsePatternsWithLookahead(t *testing.T) {
|
||||||
MYLOG %{NUMBER:num:int} %{NOBOT:client}
|
MYLOG %{NUMBER:num:int} %{NOBOT:client}
|
||||||
`,
|
`,
|
||||||
}
|
}
|
||||||
assert.NoError(t, p.Compile())
|
require.NoError(t, p.Compile())
|
||||||
|
|
||||||
_, err := p.ParseLine(`1466004605359052000 bot`)
|
_, err := p.ParseLine(`1466004605359052000 bot`)
|
||||||
assert.Error(t, err)
|
require.Error(t, err)
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestMeasurementName(t *testing.T) {
|
func TestMeasurementName(t *testing.T) {
|
||||||
p := &Parser{
|
p := &Parser{
|
||||||
Patterns: []string{"%{COMMON_LOG_FORMAT}"},
|
Patterns: []string{"%{COMMON_LOG_FORMAT}"},
|
||||||
}
|
}
|
||||||
assert.NoError(t, p.Compile())
|
require.NoError(t, p.Compile())
|
||||||
|
|
||||||
// Parse an influxdb POST request
|
// Parse an influxdb POST request
|
||||||
m, err := p.ParseLine(`127.0.0.1 user-identifier frank [10/Oct/2000:13:55:36 -0700] "GET /apache_pb.gif HTTP/1.0" 200 2326`)
|
m, err := p.ParseLine(`127.0.0.1 user-identifier frank [10/Oct/2000:13:55:36 -0700] "GET /apache_pb.gif HTTP/1.0" 200 2326`)
|
||||||
require.NotNil(t, m)
|
require.NotNil(t, m)
|
||||||
assert.NoError(t, err)
|
require.NoError(t, err)
|
||||||
assert.Equal(t,
|
require.Equal(t,
|
||||||
map[string]interface{}{
|
map[string]interface{}{
|
||||||
"resp_bytes": int64(2326),
|
"resp_bytes": int64(2326),
|
||||||
"auth": "frank",
|
"auth": "frank",
|
||||||
|
|
@ -55,19 +57,19 @@ func TestMeasurementName(t *testing.T) {
|
||||||
"request": "/apache_pb.gif",
|
"request": "/apache_pb.gif",
|
||||||
},
|
},
|
||||||
m.Fields())
|
m.Fields())
|
||||||
assert.Equal(t, map[string]string{"verb": "GET", "resp_code": "200"}, m.Tags())
|
require.Equal(t, map[string]string{"verb": "GET", "resp_code": "200"}, m.Tags())
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestCLF_IPv6(t *testing.T) {
|
func TestCLF_IPv6(t *testing.T) {
|
||||||
p := &Parser{
|
p := &Parser{
|
||||||
Patterns: []string{"%{COMMON_LOG_FORMAT}"},
|
Patterns: []string{"%{COMMON_LOG_FORMAT}"},
|
||||||
}
|
}
|
||||||
assert.NoError(t, p.Compile())
|
require.NoError(t, p.Compile())
|
||||||
|
|
||||||
m, err := p.ParseLine(`2001:0db8:85a3:0000:0000:8a2e:0370:7334 user-identifier frank [10/Oct/2000:13:55:36 -0700] "GET /apache_pb.gif HTTP/1.0" 200 2326`)
|
m, err := p.ParseLine(`2001:0db8:85a3:0000:0000:8a2e:0370:7334 user-identifier frank [10/Oct/2000:13:55:36 -0700] "GET /apache_pb.gif HTTP/1.0" 200 2326`)
|
||||||
require.NotNil(t, m)
|
require.NotNil(t, m)
|
||||||
assert.NoError(t, err)
|
require.NoError(t, err)
|
||||||
assert.Equal(t,
|
require.Equal(t,
|
||||||
map[string]interface{}{
|
map[string]interface{}{
|
||||||
"resp_bytes": int64(2326),
|
"resp_bytes": int64(2326),
|
||||||
"auth": "frank",
|
"auth": "frank",
|
||||||
|
|
@ -77,12 +79,12 @@ func TestCLF_IPv6(t *testing.T) {
|
||||||
"request": "/apache_pb.gif",
|
"request": "/apache_pb.gif",
|
||||||
},
|
},
|
||||||
m.Fields())
|
m.Fields())
|
||||||
assert.Equal(t, map[string]string{"verb": "GET", "resp_code": "200"}, m.Tags())
|
require.Equal(t, map[string]string{"verb": "GET", "resp_code": "200"}, m.Tags())
|
||||||
|
|
||||||
m, err = p.ParseLine(`::1 user-identifier frank [10/Oct/2000:13:55:36 -0700] "GET /apache_pb.gif HTTP/1.0" 200 2326`)
|
m, err = p.ParseLine(`::1 user-identifier frank [10/Oct/2000:13:55:36 -0700] "GET /apache_pb.gif HTTP/1.0" 200 2326`)
|
||||||
require.NotNil(t, m)
|
require.NotNil(t, m)
|
||||||
assert.NoError(t, err)
|
require.NoError(t, err)
|
||||||
assert.Equal(t,
|
require.Equal(t,
|
||||||
map[string]interface{}{
|
map[string]interface{}{
|
||||||
"resp_bytes": int64(2326),
|
"resp_bytes": int64(2326),
|
||||||
"auth": "frank",
|
"auth": "frank",
|
||||||
|
|
@ -92,20 +94,20 @@ func TestCLF_IPv6(t *testing.T) {
|
||||||
"request": "/apache_pb.gif",
|
"request": "/apache_pb.gif",
|
||||||
},
|
},
|
||||||
m.Fields())
|
m.Fields())
|
||||||
assert.Equal(t, map[string]string{"verb": "GET", "resp_code": "200"}, m.Tags())
|
require.Equal(t, map[string]string{"verb": "GET", "resp_code": "200"}, m.Tags())
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestCustomInfluxdbHttpd(t *testing.T) {
|
func TestCustomInfluxdbHttpd(t *testing.T) {
|
||||||
p := &Parser{
|
p := &Parser{
|
||||||
Patterns: []string{`\[httpd\] %{COMBINED_LOG_FORMAT} %{UUID:uuid:drop} %{NUMBER:response_time_us:int}`},
|
Patterns: []string{`\[httpd\] %{COMBINED_LOG_FORMAT} %{UUID:uuid:drop} %{NUMBER:response_time_us:int}`},
|
||||||
}
|
}
|
||||||
assert.NoError(t, p.Compile())
|
require.NoError(t, p.Compile())
|
||||||
|
|
||||||
// Parse an influxdb POST request
|
// Parse an influxdb POST request
|
||||||
m, err := p.ParseLine(`[httpd] ::1 - - [14/Jun/2016:11:33:29 +0100] "POST /write?consistency=any&db=telegraf&precision=ns&rp= HTTP/1.1" 204 0 "-" "InfluxDBClient" 6f61bc44-321b-11e6-8050-000000000000 2513`)
|
m, err := p.ParseLine(`[httpd] ::1 - - [14/Jun/2016:11:33:29 +0100] "POST /write?consistency=any&db=telegraf&precision=ns&rp= HTTP/1.1" 204 0 "-" "InfluxDBClient" 6f61bc44-321b-11e6-8050-000000000000 2513`)
|
||||||
require.NotNil(t, m)
|
require.NotNil(t, m)
|
||||||
assert.NoError(t, err)
|
require.NoError(t, err)
|
||||||
assert.Equal(t,
|
require.Equal(t,
|
||||||
map[string]interface{}{
|
map[string]interface{}{
|
||||||
"resp_bytes": int64(0),
|
"resp_bytes": int64(0),
|
||||||
"auth": "-",
|
"auth": "-",
|
||||||
|
|
@ -118,13 +120,13 @@ func TestCustomInfluxdbHttpd(t *testing.T) {
|
||||||
"agent": "InfluxDBClient",
|
"agent": "InfluxDBClient",
|
||||||
},
|
},
|
||||||
m.Fields())
|
m.Fields())
|
||||||
assert.Equal(t, map[string]string{"verb": "POST", "resp_code": "204"}, m.Tags())
|
require.Equal(t, map[string]string{"verb": "POST", "resp_code": "204"}, m.Tags())
|
||||||
|
|
||||||
// Parse an influxdb GET request
|
// Parse an influxdb GET request
|
||||||
m, err = p.ParseLine(`[httpd] ::1 - - [14/Jun/2016:12:10:02 +0100] "GET /query?db=telegraf&q=SELECT+bytes%2Cresponse_time_us+FROM+logparser_grok+WHERE+http_method+%3D+%27GET%27+AND+response_time_us+%3E+0+AND+time+%3E+now%28%29+-+1h HTTP/1.1" 200 578 "http://localhost:8083/" "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/51.0.2704.84 Safari/537.36" 8a3806f1-3220-11e6-8006-000000000000 988`)
|
m, err = p.ParseLine(`[httpd] ::1 - - [14/Jun/2016:12:10:02 +0100] "GET /query?db=telegraf&q=SELECT+bytes%2Cresponse_time_us+FROM+logparser_grok+WHERE+http_method+%3D+%27GET%27+AND+response_time_us+%3E+0+AND+time+%3E+now%28%29+-+1h HTTP/1.1" 200 578 "http://localhost:8083/" "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/51.0.2704.84 Safari/537.36" 8a3806f1-3220-11e6-8006-000000000000 988`)
|
||||||
require.NotNil(t, m)
|
require.NotNil(t, m)
|
||||||
assert.NoError(t, err)
|
require.NoError(t, err)
|
||||||
assert.Equal(t,
|
require.Equal(t,
|
||||||
map[string]interface{}{
|
map[string]interface{}{
|
||||||
"resp_bytes": int64(578),
|
"resp_bytes": int64(578),
|
||||||
"auth": "-",
|
"auth": "-",
|
||||||
|
|
@ -137,7 +139,7 @@ func TestCustomInfluxdbHttpd(t *testing.T) {
|
||||||
"agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/51.0.2704.84 Safari/537.36",
|
"agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/51.0.2704.84 Safari/537.36",
|
||||||
},
|
},
|
||||||
m.Fields())
|
m.Fields())
|
||||||
assert.Equal(t, map[string]string{"verb": "GET", "resp_code": "200"}, m.Tags())
|
require.Equal(t, map[string]string{"verb": "GET", "resp_code": "200"}, m.Tags())
|
||||||
}
|
}
|
||||||
|
|
||||||
// common log format
|
// common log format
|
||||||
|
|
@ -146,13 +148,13 @@ func TestBuiltinCommonLogFormat(t *testing.T) {
|
||||||
p := &Parser{
|
p := &Parser{
|
||||||
Patterns: []string{"%{COMMON_LOG_FORMAT}"},
|
Patterns: []string{"%{COMMON_LOG_FORMAT}"},
|
||||||
}
|
}
|
||||||
assert.NoError(t, p.Compile())
|
require.NoError(t, p.Compile())
|
||||||
|
|
||||||
// Parse an influxdb POST request
|
// Parse an influxdb POST request
|
||||||
m, err := p.ParseLine(`127.0.0.1 user-identifier frank [10/Oct/2000:13:55:36 -0700] "GET /apache_pb.gif HTTP/1.0" 200 2326`)
|
m, err := p.ParseLine(`127.0.0.1 user-identifier frank [10/Oct/2000:13:55:36 -0700] "GET /apache_pb.gif HTTP/1.0" 200 2326`)
|
||||||
require.NotNil(t, m)
|
require.NotNil(t, m)
|
||||||
assert.NoError(t, err)
|
require.NoError(t, err)
|
||||||
assert.Equal(t,
|
require.Equal(t,
|
||||||
map[string]interface{}{
|
map[string]interface{}{
|
||||||
"resp_bytes": int64(2326),
|
"resp_bytes": int64(2326),
|
||||||
"auth": "frank",
|
"auth": "frank",
|
||||||
|
|
@ -162,7 +164,7 @@ func TestBuiltinCommonLogFormat(t *testing.T) {
|
||||||
"request": "/apache_pb.gif",
|
"request": "/apache_pb.gif",
|
||||||
},
|
},
|
||||||
m.Fields())
|
m.Fields())
|
||||||
assert.Equal(t, map[string]string{"verb": "GET", "resp_code": "200"}, m.Tags())
|
require.Equal(t, map[string]string{"verb": "GET", "resp_code": "200"}, m.Tags())
|
||||||
}
|
}
|
||||||
|
|
||||||
// common log format
|
// common log format
|
||||||
|
|
@ -171,13 +173,13 @@ func TestBuiltinCommonLogFormatWithNumbers(t *testing.T) {
|
||||||
p := &Parser{
|
p := &Parser{
|
||||||
Patterns: []string{"%{COMMON_LOG_FORMAT}"},
|
Patterns: []string{"%{COMMON_LOG_FORMAT}"},
|
||||||
}
|
}
|
||||||
assert.NoError(t, p.Compile())
|
require.NoError(t, p.Compile())
|
||||||
|
|
||||||
// Parse an influxdb POST request
|
// Parse an influxdb POST request
|
||||||
m, err := p.ParseLine(`127.0.0.1 user1234 frank1234 [10/Oct/2000:13:55:36 -0700] "GET /apache_pb.gif HTTP/1.0" 200 2326`)
|
m, err := p.ParseLine(`127.0.0.1 user1234 frank1234 [10/Oct/2000:13:55:36 -0700] "GET /apache_pb.gif HTTP/1.0" 200 2326`)
|
||||||
require.NotNil(t, m)
|
require.NotNil(t, m)
|
||||||
assert.NoError(t, err)
|
require.NoError(t, err)
|
||||||
assert.Equal(t,
|
require.Equal(t,
|
||||||
map[string]interface{}{
|
map[string]interface{}{
|
||||||
"resp_bytes": int64(2326),
|
"resp_bytes": int64(2326),
|
||||||
"auth": "frank1234",
|
"auth": "frank1234",
|
||||||
|
|
@ -187,7 +189,7 @@ func TestBuiltinCommonLogFormatWithNumbers(t *testing.T) {
|
||||||
"request": "/apache_pb.gif",
|
"request": "/apache_pb.gif",
|
||||||
},
|
},
|
||||||
m.Fields())
|
m.Fields())
|
||||||
assert.Equal(t, map[string]string{"verb": "GET", "resp_code": "200"}, m.Tags())
|
require.Equal(t, map[string]string{"verb": "GET", "resp_code": "200"}, m.Tags())
|
||||||
}
|
}
|
||||||
|
|
||||||
// combined log format
|
// combined log format
|
||||||
|
|
@ -196,13 +198,13 @@ func TestBuiltinCombinedLogFormat(t *testing.T) {
|
||||||
p := &Parser{
|
p := &Parser{
|
||||||
Patterns: []string{"%{COMBINED_LOG_FORMAT}"},
|
Patterns: []string{"%{COMBINED_LOG_FORMAT}"},
|
||||||
}
|
}
|
||||||
assert.NoError(t, p.Compile())
|
require.NoError(t, p.Compile())
|
||||||
|
|
||||||
// Parse an influxdb POST request
|
// Parse an influxdb POST request
|
||||||
m, err := p.ParseLine(`127.0.0.1 user-identifier frank [10/Oct/2000:13:55:36 -0700] "GET /apache_pb.gif HTTP/1.0" 200 2326 "-" "Mozilla"`)
|
m, err := p.ParseLine(`127.0.0.1 user-identifier frank [10/Oct/2000:13:55:36 -0700] "GET /apache_pb.gif HTTP/1.0" 200 2326 "-" "Mozilla"`)
|
||||||
require.NotNil(t, m)
|
require.NotNil(t, m)
|
||||||
assert.NoError(t, err)
|
require.NoError(t, err)
|
||||||
assert.Equal(t,
|
require.Equal(t,
|
||||||
map[string]interface{}{
|
map[string]interface{}{
|
||||||
"resp_bytes": int64(2326),
|
"resp_bytes": int64(2326),
|
||||||
"auth": "frank",
|
"auth": "frank",
|
||||||
|
|
@ -214,7 +216,7 @@ func TestBuiltinCombinedLogFormat(t *testing.T) {
|
||||||
"agent": "Mozilla",
|
"agent": "Mozilla",
|
||||||
},
|
},
|
||||||
m.Fields())
|
m.Fields())
|
||||||
assert.Equal(t, map[string]string{"verb": "GET", "resp_code": "200"}, m.Tags())
|
require.Equal(t, map[string]string{"verb": "GET", "resp_code": "200"}, m.Tags())
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestCompileStringAndParse(t *testing.T) {
|
func TestCompileStringAndParse(t *testing.T) {
|
||||||
|
|
@ -227,19 +229,19 @@ func TestCompileStringAndParse(t *testing.T) {
|
||||||
TEST_LOG_A %{NUMBER:myfloat:float} %{RESPONSE_CODE} %{IPORHOST:clientip} %{RESPONSE_TIME}
|
TEST_LOG_A %{NUMBER:myfloat:float} %{RESPONSE_CODE} %{IPORHOST:clientip} %{RESPONSE_TIME}
|
||||||
`,
|
`,
|
||||||
}
|
}
|
||||||
assert.NoError(t, p.Compile())
|
require.NoError(t, p.Compile())
|
||||||
|
|
||||||
metricA, err := p.ParseLine(`1.25 200 192.168.1.1 5.432µs`)
|
metricA, err := p.ParseLine(`1.25 200 192.168.1.1 5.432µs`)
|
||||||
require.NotNil(t, metricA)
|
require.NotNil(t, metricA)
|
||||||
assert.NoError(t, err)
|
require.NoError(t, err)
|
||||||
assert.Equal(t,
|
require.Equal(t,
|
||||||
map[string]interface{}{
|
map[string]interface{}{
|
||||||
"clientip": "192.168.1.1",
|
"clientip": "192.168.1.1",
|
||||||
"myfloat": float64(1.25),
|
"myfloat": float64(1.25),
|
||||||
"response_time": int64(5432),
|
"response_time": int64(5432),
|
||||||
},
|
},
|
||||||
metricA.Fields())
|
metricA.Fields())
|
||||||
assert.Equal(t, map[string]string{"response_code": "200"}, metricA.Tags())
|
require.Equal(t, map[string]string{"response_code": "200"}, metricA.Tags())
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestCompileErrorsOnInvalidPattern(t *testing.T) {
|
func TestCompileErrorsOnInvalidPattern(t *testing.T) {
|
||||||
|
|
@ -252,7 +254,7 @@ func TestCompileErrorsOnInvalidPattern(t *testing.T) {
|
||||||
TEST_LOG_A %{NUMBER:myfloat:float} %{RESPONSE_CODE} %{IPORHOST:clientip} %{RESPONSE_TIME}
|
TEST_LOG_A %{NUMBER:myfloat:float} %{RESPONSE_CODE} %{IPORHOST:clientip} %{RESPONSE_TIME}
|
||||||
`,
|
`,
|
||||||
}
|
}
|
||||||
assert.Error(t, p.Compile())
|
require.Error(t, p.Compile())
|
||||||
|
|
||||||
metricA, _ := p.ParseLine(`1.25 200 192.168.1.1 5.432µs`)
|
metricA, _ := p.ParseLine(`1.25 200 192.168.1.1 5.432µs`)
|
||||||
require.Nil(t, metricA)
|
require.Nil(t, metricA)
|
||||||
|
|
@ -262,19 +264,19 @@ func TestParsePatternsWithoutCustom(t *testing.T) {
|
||||||
p := &Parser{
|
p := &Parser{
|
||||||
Patterns: []string{"%{POSINT:ts:ts-epochnano} response_time=%{POSINT:response_time:int} mymetric=%{NUMBER:metric:float}"},
|
Patterns: []string{"%{POSINT:ts:ts-epochnano} response_time=%{POSINT:response_time:int} mymetric=%{NUMBER:metric:float}"},
|
||||||
}
|
}
|
||||||
assert.NoError(t, p.Compile())
|
require.NoError(t, p.Compile())
|
||||||
|
|
||||||
metricA, err := p.ParseLine(`1466004605359052000 response_time=20821 mymetric=10890.645`)
|
metricA, err := p.ParseLine(`1466004605359052000 response_time=20821 mymetric=10890.645`)
|
||||||
require.NotNil(t, metricA)
|
require.NotNil(t, metricA)
|
||||||
assert.NoError(t, err)
|
require.NoError(t, err)
|
||||||
assert.Equal(t,
|
require.Equal(t,
|
||||||
map[string]interface{}{
|
map[string]interface{}{
|
||||||
"response_time": int64(20821),
|
"response_time": int64(20821),
|
||||||
"metric": float64(10890.645),
|
"metric": float64(10890.645),
|
||||||
},
|
},
|
||||||
metricA.Fields())
|
metricA.Fields())
|
||||||
assert.Equal(t, map[string]string{}, metricA.Tags())
|
require.Equal(t, map[string]string{}, metricA.Tags())
|
||||||
assert.Equal(t, time.Unix(0, 1466004605359052000), metricA.Time())
|
require.Equal(t, time.Unix(0, 1466004605359052000), metricA.Time())
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestParseEpochMilli(t *testing.T) {
|
func TestParseEpochMilli(t *testing.T) {
|
||||||
|
|
@ -284,19 +286,19 @@ func TestParseEpochMilli(t *testing.T) {
|
||||||
MYAPP %{POSINT:ts:ts-epochmilli} response_time=%{POSINT:response_time:int} mymetric=%{NUMBER:metric:float}
|
MYAPP %{POSINT:ts:ts-epochmilli} response_time=%{POSINT:response_time:int} mymetric=%{NUMBER:metric:float}
|
||||||
`,
|
`,
|
||||||
}
|
}
|
||||||
assert.NoError(t, p.Compile())
|
require.NoError(t, p.Compile())
|
||||||
|
|
||||||
metricA, err := p.ParseLine(`1568540909963 response_time=20821 mymetric=10890.645`)
|
metricA, err := p.ParseLine(`1568540909963 response_time=20821 mymetric=10890.645`)
|
||||||
require.NotNil(t, metricA)
|
require.NotNil(t, metricA)
|
||||||
assert.NoError(t, err)
|
require.NoError(t, err)
|
||||||
assert.Equal(t,
|
require.Equal(t,
|
||||||
map[string]interface{}{
|
map[string]interface{}{
|
||||||
"response_time": int64(20821),
|
"response_time": int64(20821),
|
||||||
"metric": float64(10890.645),
|
"metric": float64(10890.645),
|
||||||
},
|
},
|
||||||
metricA.Fields())
|
metricA.Fields())
|
||||||
assert.Equal(t, map[string]string{}, metricA.Tags())
|
require.Equal(t, map[string]string{}, metricA.Tags())
|
||||||
assert.Equal(t, time.Unix(0, 1568540909963000000), metricA.Time())
|
require.Equal(t, time.Unix(0, 1568540909963000000), metricA.Time())
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestParseEpochNano(t *testing.T) {
|
func TestParseEpochNano(t *testing.T) {
|
||||||
|
|
@ -306,19 +308,19 @@ func TestParseEpochNano(t *testing.T) {
|
||||||
MYAPP %{POSINT:ts:ts-epochnano} response_time=%{POSINT:response_time:int} mymetric=%{NUMBER:metric:float}
|
MYAPP %{POSINT:ts:ts-epochnano} response_time=%{POSINT:response_time:int} mymetric=%{NUMBER:metric:float}
|
||||||
`,
|
`,
|
||||||
}
|
}
|
||||||
assert.NoError(t, p.Compile())
|
require.NoError(t, p.Compile())
|
||||||
|
|
||||||
metricA, err := p.ParseLine(`1466004605359052000 response_time=20821 mymetric=10890.645`)
|
metricA, err := p.ParseLine(`1466004605359052000 response_time=20821 mymetric=10890.645`)
|
||||||
require.NotNil(t, metricA)
|
require.NotNil(t, metricA)
|
||||||
assert.NoError(t, err)
|
require.NoError(t, err)
|
||||||
assert.Equal(t,
|
require.Equal(t,
|
||||||
map[string]interface{}{
|
map[string]interface{}{
|
||||||
"response_time": int64(20821),
|
"response_time": int64(20821),
|
||||||
"metric": float64(10890.645),
|
"metric": float64(10890.645),
|
||||||
},
|
},
|
||||||
metricA.Fields())
|
metricA.Fields())
|
||||||
assert.Equal(t, map[string]string{}, metricA.Tags())
|
require.Equal(t, map[string]string{}, metricA.Tags())
|
||||||
assert.Equal(t, time.Unix(0, 1466004605359052000), metricA.Time())
|
require.Equal(t, time.Unix(0, 1466004605359052000), metricA.Time())
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestParseEpoch(t *testing.T) {
|
func TestParseEpoch(t *testing.T) {
|
||||||
|
|
@ -328,19 +330,19 @@ func TestParseEpoch(t *testing.T) {
|
||||||
MYAPP %{POSINT:ts:ts-epoch} response_time=%{POSINT:response_time:int} mymetric=%{NUMBER:metric:float}
|
MYAPP %{POSINT:ts:ts-epoch} response_time=%{POSINT:response_time:int} mymetric=%{NUMBER:metric:float}
|
||||||
`,
|
`,
|
||||||
}
|
}
|
||||||
assert.NoError(t, p.Compile())
|
require.NoError(t, p.Compile())
|
||||||
|
|
||||||
metricA, err := p.ParseLine(`1466004605 response_time=20821 mymetric=10890.645`)
|
metricA, err := p.ParseLine(`1466004605 response_time=20821 mymetric=10890.645`)
|
||||||
require.NotNil(t, metricA)
|
require.NotNil(t, metricA)
|
||||||
assert.NoError(t, err)
|
require.NoError(t, err)
|
||||||
assert.Equal(t,
|
require.Equal(t,
|
||||||
map[string]interface{}{
|
map[string]interface{}{
|
||||||
"response_time": int64(20821),
|
"response_time": int64(20821),
|
||||||
"metric": float64(10890.645),
|
"metric": float64(10890.645),
|
||||||
},
|
},
|
||||||
metricA.Fields())
|
metricA.Fields())
|
||||||
assert.Equal(t, map[string]string{}, metricA.Tags())
|
require.Equal(t, map[string]string{}, metricA.Tags())
|
||||||
assert.Equal(t, time.Unix(1466004605, 0), metricA.Time())
|
require.Equal(t, time.Unix(1466004605, 0), metricA.Time())
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestParseEpochDecimal(t *testing.T) {
|
func TestParseEpochDecimal(t *testing.T) {
|
||||||
|
|
@ -395,7 +397,7 @@ func TestParseEpochDecimal(t *testing.T) {
|
||||||
parser := &Parser{
|
parser := &Parser{
|
||||||
Patterns: []string{"%{NUMBER:ts:ts-epoch} value=%{NUMBER:value:int}"},
|
Patterns: []string{"%{NUMBER:ts:ts-epoch} value=%{NUMBER:value:int}"},
|
||||||
}
|
}
|
||||||
assert.NoError(t, parser.Compile())
|
require.NoError(t, parser.Compile())
|
||||||
m, err := parser.ParseLine(tt.line)
|
m, err := parser.ParseLine(tt.line)
|
||||||
|
|
||||||
if tt.noMatch {
|
if tt.noMatch {
|
||||||
|
|
@ -420,71 +422,74 @@ func TestParseEpochErrors(t *testing.T) {
|
||||||
CustomPatterns: `
|
CustomPatterns: `
|
||||||
MYAPP %{WORD:ts:ts-epoch} response_time=%{POSINT:response_time:int} mymetric=%{NUMBER:metric:float}
|
MYAPP %{WORD:ts:ts-epoch} response_time=%{POSINT:response_time:int} mymetric=%{NUMBER:metric:float}
|
||||||
`,
|
`,
|
||||||
|
Log: testutil.Logger{},
|
||||||
}
|
}
|
||||||
assert.NoError(t, p.Compile())
|
require.NoError(t, p.Compile())
|
||||||
|
|
||||||
_, err := p.ParseLine(`foobar response_time=20821 mymetric=10890.645`)
|
_, err := p.ParseLine(`foobar response_time=20821 mymetric=10890.645`)
|
||||||
assert.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
|
||||||
p = &Parser{
|
p = &Parser{
|
||||||
Patterns: []string{"%{MYAPP}"},
|
Patterns: []string{"%{MYAPP}"},
|
||||||
CustomPatterns: `
|
CustomPatterns: `
|
||||||
MYAPP %{WORD:ts:ts-epochnano} response_time=%{POSINT:response_time:int} mymetric=%{NUMBER:metric:float}
|
MYAPP %{WORD:ts:ts-epochnano} response_time=%{POSINT:response_time:int} mymetric=%{NUMBER:metric:float}
|
||||||
`,
|
`,
|
||||||
|
Log: testutil.Logger{},
|
||||||
}
|
}
|
||||||
assert.NoError(t, p.Compile())
|
require.NoError(t, p.Compile())
|
||||||
|
|
||||||
_, err = p.ParseLine(`foobar response_time=20821 mymetric=10890.645`)
|
_, err = p.ParseLine(`foobar response_time=20821 mymetric=10890.645`)
|
||||||
assert.NoError(t, err)
|
require.NoError(t, err)
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestParseGenericTimestamp(t *testing.T) {
|
func TestParseGenericTimestamp(t *testing.T) {
|
||||||
p := &Parser{
|
p := &Parser{
|
||||||
Patterns: []string{`\[%{HTTPDATE:ts:ts}\] response_time=%{POSINT:response_time:int} mymetric=%{NUMBER:metric:float}`},
|
Patterns: []string{`\[%{HTTPDATE:ts:ts}\] response_time=%{POSINT:response_time:int} mymetric=%{NUMBER:metric:float}`},
|
||||||
}
|
}
|
||||||
assert.NoError(t, p.Compile())
|
require.NoError(t, p.Compile())
|
||||||
|
|
||||||
metricA, err := p.ParseLine(`[09/Jun/2016:03:37:03 +0000] response_time=20821 mymetric=10890.645`)
|
metricA, err := p.ParseLine(`[09/Jun/2016:03:37:03 +0000] response_time=20821 mymetric=10890.645`)
|
||||||
require.NotNil(t, metricA)
|
require.NotNil(t, metricA)
|
||||||
assert.NoError(t, err)
|
require.NoError(t, err)
|
||||||
assert.Equal(t,
|
require.Equal(t,
|
||||||
map[string]interface{}{
|
map[string]interface{}{
|
||||||
"response_time": int64(20821),
|
"response_time": int64(20821),
|
||||||
"metric": float64(10890.645),
|
"metric": float64(10890.645),
|
||||||
},
|
},
|
||||||
metricA.Fields())
|
metricA.Fields())
|
||||||
assert.Equal(t, map[string]string{}, metricA.Tags())
|
require.Equal(t, map[string]string{}, metricA.Tags())
|
||||||
assert.Equal(t, time.Unix(1465443423, 0).UTC(), metricA.Time().UTC())
|
require.Equal(t, time.Unix(1465443423, 0).UTC(), metricA.Time().UTC())
|
||||||
|
|
||||||
metricB, err := p.ParseLine(`[09/Jun/2016:03:37:04 +0000] response_time=20821 mymetric=10890.645`)
|
metricB, err := p.ParseLine(`[09/Jun/2016:03:37:04 +0000] response_time=20821 mymetric=10890.645`)
|
||||||
require.NotNil(t, metricB)
|
require.NotNil(t, metricB)
|
||||||
assert.NoError(t, err)
|
require.NoError(t, err)
|
||||||
assert.Equal(t,
|
require.Equal(t,
|
||||||
map[string]interface{}{
|
map[string]interface{}{
|
||||||
"response_time": int64(20821),
|
"response_time": int64(20821),
|
||||||
"metric": float64(10890.645),
|
"metric": float64(10890.645),
|
||||||
},
|
},
|
||||||
metricB.Fields())
|
metricB.Fields())
|
||||||
assert.Equal(t, map[string]string{}, metricB.Tags())
|
require.Equal(t, map[string]string{}, metricB.Tags())
|
||||||
assert.Equal(t, time.Unix(1465443424, 0).UTC(), metricB.Time().UTC())
|
require.Equal(t, time.Unix(1465443424, 0).UTC(), metricB.Time().UTC())
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestParseGenericTimestampNotFound(t *testing.T) {
|
func TestParseGenericTimestampNotFound(t *testing.T) {
|
||||||
p := &Parser{
|
p := &Parser{
|
||||||
Patterns: []string{`\[%{NOTSPACE:ts:ts}\] response_time=%{POSINT:response_time:int} mymetric=%{NUMBER:metric:float}`},
|
Patterns: []string{`\[%{NOTSPACE:ts:ts}\] response_time=%{POSINT:response_time:int} mymetric=%{NUMBER:metric:float}`},
|
||||||
|
Log: testutil.Logger{},
|
||||||
}
|
}
|
||||||
assert.NoError(t, p.Compile())
|
require.NoError(t, p.Compile())
|
||||||
|
|
||||||
metricA, err := p.ParseLine(`[foobar] response_time=20821 mymetric=10890.645`)
|
metricA, err := p.ParseLine(`[foobar] response_time=20821 mymetric=10890.645`)
|
||||||
require.NotNil(t, metricA)
|
require.NotNil(t, metricA)
|
||||||
assert.NoError(t, err)
|
require.NoError(t, err)
|
||||||
assert.Equal(t,
|
require.Equal(t,
|
||||||
map[string]interface{}{
|
map[string]interface{}{
|
||||||
"response_time": int64(20821),
|
"response_time": int64(20821),
|
||||||
"metric": float64(10890.645),
|
"metric": float64(10890.645),
|
||||||
},
|
},
|
||||||
metricA.Fields())
|
metricA.Fields())
|
||||||
assert.Equal(t, map[string]string{}, metricA.Tags())
|
require.Equal(t, map[string]string{}, metricA.Tags())
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestCompileFileAndParse(t *testing.T) {
|
func TestCompileFileAndParse(t *testing.T) {
|
||||||
|
|
@ -492,12 +497,12 @@ func TestCompileFileAndParse(t *testing.T) {
|
||||||
Patterns: []string{"%{TEST_LOG_A}", "%{TEST_LOG_B}"},
|
Patterns: []string{"%{TEST_LOG_A}", "%{TEST_LOG_B}"},
|
||||||
CustomPatternFiles: []string{"./testdata/test-patterns"},
|
CustomPatternFiles: []string{"./testdata/test-patterns"},
|
||||||
}
|
}
|
||||||
assert.NoError(t, p.Compile())
|
require.NoError(t, p.Compile())
|
||||||
|
|
||||||
metricA, err := p.ParseLine(`[04/Jun/2016:12:41:45 +0100] 1.25 200 192.168.1.1 5.432µs 101`)
|
metricA, err := p.ParseLine(`[04/Jun/2016:12:41:45 +0100] 1.25 200 192.168.1.1 5.432µs 101`)
|
||||||
require.NotNil(t, metricA)
|
require.NotNil(t, metricA)
|
||||||
assert.NoError(t, err)
|
require.NoError(t, err)
|
||||||
assert.Equal(t,
|
require.Equal(t,
|
||||||
map[string]interface{}{
|
map[string]interface{}{
|
||||||
"clientip": "192.168.1.1",
|
"clientip": "192.168.1.1",
|
||||||
"myfloat": float64(1.25),
|
"myfloat": float64(1.25),
|
||||||
|
|
@ -505,23 +510,23 @@ func TestCompileFileAndParse(t *testing.T) {
|
||||||
"myint": int64(101),
|
"myint": int64(101),
|
||||||
},
|
},
|
||||||
metricA.Fields())
|
metricA.Fields())
|
||||||
assert.Equal(t, map[string]string{"response_code": "200"}, metricA.Tags())
|
require.Equal(t, map[string]string{"response_code": "200"}, metricA.Tags())
|
||||||
assert.Equal(t,
|
require.Equal(t,
|
||||||
time.Date(2016, time.June, 4, 12, 41, 45, 0, time.FixedZone("foo", 60*60)).Nanosecond(),
|
time.Date(2016, time.June, 4, 12, 41, 45, 0, time.FixedZone("foo", 60*60)).Nanosecond(),
|
||||||
metricA.Time().Nanosecond())
|
metricA.Time().Nanosecond())
|
||||||
|
|
||||||
metricB, err := p.ParseLine(`[04/06/2016--12:41:46] 1.25 mystring dropme nomodifier`)
|
metricB, err := p.ParseLine(`[04/06/2016--12:41:46] 1.25 mystring dropme nomodifier`)
|
||||||
require.NotNil(t, metricB)
|
require.NotNil(t, metricB)
|
||||||
assert.NoError(t, err)
|
require.NoError(t, err)
|
||||||
assert.Equal(t,
|
require.Equal(t,
|
||||||
map[string]interface{}{
|
map[string]interface{}{
|
||||||
"myfloat": 1.25,
|
"myfloat": 1.25,
|
||||||
"mystring": "mystring",
|
"mystring": "mystring",
|
||||||
"nomodifier": "nomodifier",
|
"nomodifier": "nomodifier",
|
||||||
},
|
},
|
||||||
metricB.Fields())
|
metricB.Fields())
|
||||||
assert.Equal(t, map[string]string{}, metricB.Tags())
|
require.Equal(t, map[string]string{}, metricB.Tags())
|
||||||
assert.Equal(t,
|
require.Equal(t,
|
||||||
time.Date(2016, time.June, 4, 12, 41, 46, 0, time.FixedZone("foo", 60*60)).Nanosecond(),
|
time.Date(2016, time.June, 4, 12, 41, 46, 0, time.FixedZone("foo", 60*60)).Nanosecond(),
|
||||||
metricB.Time().Nanosecond())
|
metricB.Time().Nanosecond())
|
||||||
}
|
}
|
||||||
|
|
@ -534,19 +539,19 @@ func TestCompileNoModifiersAndParse(t *testing.T) {
|
||||||
TEST_LOG_C %{NUMBER:myfloat} %{NUMBER} %{IPORHOST:clientip} %{DURATION:rt}
|
TEST_LOG_C %{NUMBER:myfloat} %{NUMBER} %{IPORHOST:clientip} %{DURATION:rt}
|
||||||
`,
|
`,
|
||||||
}
|
}
|
||||||
assert.NoError(t, p.Compile())
|
require.NoError(t, p.Compile())
|
||||||
|
|
||||||
metricA, err := p.ParseLine(`1.25 200 192.168.1.1 5.432µs`)
|
metricA, err := p.ParseLine(`1.25 200 192.168.1.1 5.432µs`)
|
||||||
require.NotNil(t, metricA)
|
require.NotNil(t, metricA)
|
||||||
assert.NoError(t, err)
|
require.NoError(t, err)
|
||||||
assert.Equal(t,
|
require.Equal(t,
|
||||||
map[string]interface{}{
|
map[string]interface{}{
|
||||||
"clientip": "192.168.1.1",
|
"clientip": "192.168.1.1",
|
||||||
"myfloat": "1.25",
|
"myfloat": "1.25",
|
||||||
"rt": "5.432µs",
|
"rt": "5.432µs",
|
||||||
},
|
},
|
||||||
metricA.Fields())
|
metricA.Fields())
|
||||||
assert.Equal(t, map[string]string{}, metricA.Tags())
|
require.Equal(t, map[string]string{}, metricA.Tags())
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestCompileNoNamesAndParse(t *testing.T) {
|
func TestCompileNoNamesAndParse(t *testing.T) {
|
||||||
|
|
@ -556,24 +561,26 @@ func TestCompileNoNamesAndParse(t *testing.T) {
|
||||||
DURATION %{NUMBER}[nuµm]?s
|
DURATION %{NUMBER}[nuµm]?s
|
||||||
TEST_LOG_C %{NUMBER} %{NUMBER} %{IPORHOST} %{DURATION}
|
TEST_LOG_C %{NUMBER} %{NUMBER} %{IPORHOST} %{DURATION}
|
||||||
`,
|
`,
|
||||||
|
Log: testutil.Logger{},
|
||||||
}
|
}
|
||||||
assert.NoError(t, p.Compile())
|
require.NoError(t, p.Compile())
|
||||||
|
|
||||||
metricA, err := p.ParseLine(`1.25 200 192.168.1.1 5.432µs`)
|
metricA, err := p.ParseLine(`1.25 200 192.168.1.1 5.432µs`)
|
||||||
require.Nil(t, metricA)
|
require.Nil(t, metricA)
|
||||||
assert.NoError(t, err)
|
require.NoError(t, err)
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestParseNoMatch(t *testing.T) {
|
func TestParseNoMatch(t *testing.T) {
|
||||||
p := &Parser{
|
p := &Parser{
|
||||||
Patterns: []string{"%{TEST_LOG_A}", "%{TEST_LOG_B}"},
|
Patterns: []string{"%{TEST_LOG_A}", "%{TEST_LOG_B}"},
|
||||||
CustomPatternFiles: []string{"./testdata/test-patterns"},
|
CustomPatternFiles: []string{"./testdata/test-patterns"},
|
||||||
|
Log: testutil.Logger{},
|
||||||
}
|
}
|
||||||
assert.NoError(t, p.Compile())
|
require.NoError(t, p.Compile())
|
||||||
|
|
||||||
metricA, err := p.ParseLine(`[04/Jun/2016:12:41:45 +0100] notnumber 200 192.168.1.1 5.432µs 101`)
|
metricA, err := p.ParseLine(`[04/Jun/2016:12:41:45 +0100] notnumber 200 192.168.1.1 5.432µs 101`)
|
||||||
assert.NoError(t, err)
|
require.NoError(t, err)
|
||||||
assert.Nil(t, metricA)
|
require.Nil(t, metricA)
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestCompileErrors(t *testing.T) {
|
func TestCompileErrors(t *testing.T) {
|
||||||
|
|
@ -584,14 +591,14 @@ func TestCompileErrors(t *testing.T) {
|
||||||
TEST_LOG_A %{HTTPDATE:ts1:ts-httpd} %{HTTPDATE:ts2:ts-httpd} %{NUMBER:mynum:int}
|
TEST_LOG_A %{HTTPDATE:ts1:ts-httpd} %{HTTPDATE:ts2:ts-httpd} %{NUMBER:mynum:int}
|
||||||
`,
|
`,
|
||||||
}
|
}
|
||||||
assert.Error(t, p.Compile())
|
require.Error(t, p.Compile())
|
||||||
|
|
||||||
// Compile fails because file doesn't exist:
|
// Compile fails because file doesn't exist:
|
||||||
p = &Parser{
|
p = &Parser{
|
||||||
Patterns: []string{"%{TEST_LOG_A}", "%{TEST_LOG_B}"},
|
Patterns: []string{"%{TEST_LOG_A}", "%{TEST_LOG_B}"},
|
||||||
CustomPatternFiles: []string{"/tmp/foo/bar/baz"},
|
CustomPatternFiles: []string{"/tmp/foo/bar/baz"},
|
||||||
}
|
}
|
||||||
assert.Error(t, p.Compile())
|
require.Error(t, p.Compile())
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestParseErrors_MissingPattern(t *testing.T) {
|
func TestParseErrors_MissingPattern(t *testing.T) {
|
||||||
|
|
@ -614,6 +621,7 @@ func TestParseErrors_WrongIntegerType(t *testing.T) {
|
||||||
CustomPatterns: `
|
CustomPatterns: `
|
||||||
TEST_LOG_A %{NUMBER:ts:ts-epoch} %{WORD:myword:int}
|
TEST_LOG_A %{NUMBER:ts:ts-epoch} %{WORD:myword:int}
|
||||||
`,
|
`,
|
||||||
|
Log: testutil.Logger{},
|
||||||
}
|
}
|
||||||
require.NoError(t, p.Compile())
|
require.NoError(t, p.Compile())
|
||||||
m, err := p.ParseLine(`0 notnumber`)
|
m, err := p.ParseLine(`0 notnumber`)
|
||||||
|
|
@ -630,6 +638,7 @@ func TestParseErrors_WrongFloatType(t *testing.T) {
|
||||||
CustomPatterns: `
|
CustomPatterns: `
|
||||||
TEST_LOG_A %{NUMBER:ts:ts-epoch} %{WORD:myword:float}
|
TEST_LOG_A %{NUMBER:ts:ts-epoch} %{WORD:myword:float}
|
||||||
`,
|
`,
|
||||||
|
Log: testutil.Logger{},
|
||||||
}
|
}
|
||||||
require.NoError(t, p.Compile())
|
require.NoError(t, p.Compile())
|
||||||
m, err := p.ParseLine(`0 notnumber`)
|
m, err := p.ParseLine(`0 notnumber`)
|
||||||
|
|
@ -646,6 +655,7 @@ func TestParseErrors_WrongDurationType(t *testing.T) {
|
||||||
CustomPatterns: `
|
CustomPatterns: `
|
||||||
TEST_LOG_A %{NUMBER:ts:ts-epoch} %{WORD:myword:duration}
|
TEST_LOG_A %{NUMBER:ts:ts-epoch} %{WORD:myword:duration}
|
||||||
`,
|
`,
|
||||||
|
Log: testutil.Logger{},
|
||||||
}
|
}
|
||||||
require.NoError(t, p.Compile())
|
require.NoError(t, p.Compile())
|
||||||
m, err := p.ParseLine(`0 notnumber`)
|
m, err := p.ParseLine(`0 notnumber`)
|
||||||
|
|
@ -662,6 +672,7 @@ func TestParseErrors_WrongTimeLayout(t *testing.T) {
|
||||||
CustomPatterns: `
|
CustomPatterns: `
|
||||||
TEST_LOG_A %{NUMBER:ts:ts-epoch} %{WORD:myword:duration}
|
TEST_LOG_A %{NUMBER:ts:ts-epoch} %{WORD:myword:duration}
|
||||||
`,
|
`,
|
||||||
|
Log: testutil.Logger{},
|
||||||
}
|
}
|
||||||
require.NoError(t, p.Compile())
|
require.NoError(t, p.Compile())
|
||||||
m, err := p.ParseLine(`0 notnumber`)
|
m, err := p.ParseLine(`0 notnumber`)
|
||||||
|
|
@ -680,12 +691,12 @@ func TestParseInteger_Base16(t *testing.T) {
|
||||||
TEST_LOG_C %{NUMBER:myfloat} %{BASE10OR16NUM:response_code:int} %{IPORHOST:clientip} %{DURATION:rt}
|
TEST_LOG_C %{NUMBER:myfloat} %{BASE10OR16NUM:response_code:int} %{IPORHOST:clientip} %{DURATION:rt}
|
||||||
`,
|
`,
|
||||||
}
|
}
|
||||||
assert.NoError(t, p.Compile())
|
require.NoError(t, p.Compile())
|
||||||
|
|
||||||
metricA, err := p.ParseLine(`1.25 0xc8 192.168.1.1 5.432µs`)
|
metricA, err := p.ParseLine(`1.25 0xc8 192.168.1.1 5.432µs`)
|
||||||
require.NotNil(t, metricA)
|
require.NotNil(t, metricA)
|
||||||
assert.NoError(t, err)
|
require.NoError(t, err)
|
||||||
assert.Equal(t,
|
require.Equal(t,
|
||||||
map[string]interface{}{
|
map[string]interface{}{
|
||||||
"clientip": "192.168.1.1",
|
"clientip": "192.168.1.1",
|
||||||
"response_code": int64(200),
|
"response_code": int64(200),
|
||||||
|
|
@ -693,7 +704,7 @@ func TestParseInteger_Base16(t *testing.T) {
|
||||||
"rt": "5.432µs",
|
"rt": "5.432µs",
|
||||||
},
|
},
|
||||||
metricA.Fields())
|
metricA.Fields())
|
||||||
assert.Equal(t, map[string]string{}, metricA.Tags())
|
require.Equal(t, map[string]string{}, metricA.Tags())
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestTsModder(t *testing.T) {
|
func TestTsModder(t *testing.T) {
|
||||||
|
|
@ -701,47 +712,47 @@ func TestTsModder(t *testing.T) {
|
||||||
|
|
||||||
reftime := time.Date(2006, time.December, 1, 1, 1, 1, int(time.Millisecond), time.UTC)
|
reftime := time.Date(2006, time.December, 1, 1, 1, 1, int(time.Millisecond), time.UTC)
|
||||||
modt := tsm.tsMod(reftime)
|
modt := tsm.tsMod(reftime)
|
||||||
assert.Equal(t, reftime, modt)
|
require.Equal(t, reftime, modt)
|
||||||
modt = tsm.tsMod(reftime)
|
modt = tsm.tsMod(reftime)
|
||||||
assert.Equal(t, reftime.Add(time.Microsecond*1), modt)
|
require.Equal(t, reftime.Add(time.Microsecond*1), modt)
|
||||||
modt = tsm.tsMod(reftime)
|
modt = tsm.tsMod(reftime)
|
||||||
assert.Equal(t, reftime.Add(time.Microsecond*2), modt)
|
require.Equal(t, reftime.Add(time.Microsecond*2), modt)
|
||||||
modt = tsm.tsMod(reftime)
|
modt = tsm.tsMod(reftime)
|
||||||
assert.Equal(t, reftime.Add(time.Microsecond*3), modt)
|
require.Equal(t, reftime.Add(time.Microsecond*3), modt)
|
||||||
|
|
||||||
reftime = time.Date(2006, time.December, 1, 1, 1, 1, int(time.Microsecond), time.UTC)
|
reftime = time.Date(2006, time.December, 1, 1, 1, 1, int(time.Microsecond), time.UTC)
|
||||||
modt = tsm.tsMod(reftime)
|
modt = tsm.tsMod(reftime)
|
||||||
assert.Equal(t, reftime, modt)
|
require.Equal(t, reftime, modt)
|
||||||
modt = tsm.tsMod(reftime)
|
modt = tsm.tsMod(reftime)
|
||||||
assert.Equal(t, reftime.Add(time.Nanosecond*1), modt)
|
require.Equal(t, reftime.Add(time.Nanosecond*1), modt)
|
||||||
modt = tsm.tsMod(reftime)
|
modt = tsm.tsMod(reftime)
|
||||||
assert.Equal(t, reftime.Add(time.Nanosecond*2), modt)
|
require.Equal(t, reftime.Add(time.Nanosecond*2), modt)
|
||||||
modt = tsm.tsMod(reftime)
|
modt = tsm.tsMod(reftime)
|
||||||
assert.Equal(t, reftime.Add(time.Nanosecond*3), modt)
|
require.Equal(t, reftime.Add(time.Nanosecond*3), modt)
|
||||||
|
|
||||||
reftime = time.Date(2006, time.December, 1, 1, 1, 1, int(time.Microsecond)*999, time.UTC)
|
reftime = time.Date(2006, time.December, 1, 1, 1, 1, int(time.Microsecond)*999, time.UTC)
|
||||||
modt = tsm.tsMod(reftime)
|
modt = tsm.tsMod(reftime)
|
||||||
assert.Equal(t, reftime, modt)
|
require.Equal(t, reftime, modt)
|
||||||
modt = tsm.tsMod(reftime)
|
modt = tsm.tsMod(reftime)
|
||||||
assert.Equal(t, reftime.Add(time.Nanosecond*1), modt)
|
require.Equal(t, reftime.Add(time.Nanosecond*1), modt)
|
||||||
modt = tsm.tsMod(reftime)
|
modt = tsm.tsMod(reftime)
|
||||||
assert.Equal(t, reftime.Add(time.Nanosecond*2), modt)
|
require.Equal(t, reftime.Add(time.Nanosecond*2), modt)
|
||||||
modt = tsm.tsMod(reftime)
|
modt = tsm.tsMod(reftime)
|
||||||
assert.Equal(t, reftime.Add(time.Nanosecond*3), modt)
|
require.Equal(t, reftime.Add(time.Nanosecond*3), modt)
|
||||||
|
|
||||||
reftime = time.Date(2006, time.December, 1, 1, 1, 1, 0, time.UTC)
|
reftime = time.Date(2006, time.December, 1, 1, 1, 1, 0, time.UTC)
|
||||||
modt = tsm.tsMod(reftime)
|
modt = tsm.tsMod(reftime)
|
||||||
assert.Equal(t, reftime, modt)
|
require.Equal(t, reftime, modt)
|
||||||
modt = tsm.tsMod(reftime)
|
modt = tsm.tsMod(reftime)
|
||||||
assert.Equal(t, reftime.Add(time.Millisecond*1), modt)
|
require.Equal(t, reftime.Add(time.Millisecond*1), modt)
|
||||||
modt = tsm.tsMod(reftime)
|
modt = tsm.tsMod(reftime)
|
||||||
assert.Equal(t, reftime.Add(time.Millisecond*2), modt)
|
require.Equal(t, reftime.Add(time.Millisecond*2), modt)
|
||||||
modt = tsm.tsMod(reftime)
|
modt = tsm.tsMod(reftime)
|
||||||
assert.Equal(t, reftime.Add(time.Millisecond*3), modt)
|
require.Equal(t, reftime.Add(time.Millisecond*3), modt)
|
||||||
|
|
||||||
reftime = time.Time{}
|
reftime = time.Time{}
|
||||||
modt = tsm.tsMod(reftime)
|
modt = tsm.tsMod(reftime)
|
||||||
assert.Equal(t, reftime, modt)
|
require.Equal(t, reftime, modt)
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestTsModder_Rollover(t *testing.T) {
|
func TestTsModder_Rollover(t *testing.T) {
|
||||||
|
|
@ -752,14 +763,14 @@ func TestTsModder_Rollover(t *testing.T) {
|
||||||
for i := 1; i < 1000; i++ {
|
for i := 1; i < 1000; i++ {
|
||||||
modt = tsm.tsMod(reftime)
|
modt = tsm.tsMod(reftime)
|
||||||
}
|
}
|
||||||
assert.Equal(t, reftime.Add(time.Microsecond*999+time.Nanosecond), modt)
|
require.Equal(t, reftime.Add(time.Microsecond*999+time.Nanosecond), modt)
|
||||||
|
|
||||||
reftime = time.Date(2006, time.December, 1, 1, 1, 1, int(time.Microsecond), time.UTC)
|
reftime = time.Date(2006, time.December, 1, 1, 1, 1, int(time.Microsecond), time.UTC)
|
||||||
modt = tsm.tsMod(reftime)
|
modt = tsm.tsMod(reftime)
|
||||||
for i := 1; i < 1001; i++ {
|
for i := 1; i < 1001; i++ {
|
||||||
modt = tsm.tsMod(reftime)
|
modt = tsm.tsMod(reftime)
|
||||||
}
|
}
|
||||||
assert.Equal(t, reftime.Add(time.Nanosecond*1000), modt)
|
require.Equal(t, reftime.Add(time.Nanosecond*1000), modt)
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestShortPatternRegression(t *testing.T) {
|
func TestShortPatternRegression(t *testing.T) {
|
||||||
|
|
@ -788,12 +799,12 @@ func TestTimezoneEmptyCompileFileAndParse(t *testing.T) {
|
||||||
CustomPatternFiles: []string{"./testdata/test-patterns"},
|
CustomPatternFiles: []string{"./testdata/test-patterns"},
|
||||||
Timezone: "",
|
Timezone: "",
|
||||||
}
|
}
|
||||||
assert.NoError(t, p.Compile())
|
require.NoError(t, p.Compile())
|
||||||
|
|
||||||
metricA, err := p.ParseLine(`[04/Jun/2016:12:41:45 +0100] 1.25 200 192.168.1.1 5.432µs 101`)
|
metricA, err := p.ParseLine(`[04/Jun/2016:12:41:45 +0100] 1.25 200 192.168.1.1 5.432µs 101`)
|
||||||
require.NotNil(t, metricA)
|
require.NotNil(t, metricA)
|
||||||
assert.NoError(t, err)
|
require.NoError(t, err)
|
||||||
assert.Equal(t,
|
require.Equal(t,
|
||||||
map[string]interface{}{
|
map[string]interface{}{
|
||||||
"clientip": "192.168.1.1",
|
"clientip": "192.168.1.1",
|
||||||
"myfloat": float64(1.25),
|
"myfloat": float64(1.25),
|
||||||
|
|
@ -801,21 +812,21 @@ func TestTimezoneEmptyCompileFileAndParse(t *testing.T) {
|
||||||
"myint": int64(101),
|
"myint": int64(101),
|
||||||
},
|
},
|
||||||
metricA.Fields())
|
metricA.Fields())
|
||||||
assert.Equal(t, map[string]string{"response_code": "200"}, metricA.Tags())
|
require.Equal(t, map[string]string{"response_code": "200"}, metricA.Tags())
|
||||||
assert.Equal(t, int64(1465040505000000000), metricA.Time().UnixNano())
|
require.Equal(t, int64(1465040505000000000), metricA.Time().UnixNano())
|
||||||
|
|
||||||
metricB, err := p.ParseLine(`[04/06/2016--12:41:46] 1.25 mystring dropme nomodifier`)
|
metricB, err := p.ParseLine(`[04/06/2016--12:41:46] 1.25 mystring dropme nomodifier`)
|
||||||
require.NotNil(t, metricB)
|
require.NotNil(t, metricB)
|
||||||
assert.NoError(t, err)
|
require.NoError(t, err)
|
||||||
assert.Equal(t,
|
require.Equal(t,
|
||||||
map[string]interface{}{
|
map[string]interface{}{
|
||||||
"myfloat": 1.25,
|
"myfloat": 1.25,
|
||||||
"mystring": "mystring",
|
"mystring": "mystring",
|
||||||
"nomodifier": "nomodifier",
|
"nomodifier": "nomodifier",
|
||||||
},
|
},
|
||||||
metricB.Fields())
|
metricB.Fields())
|
||||||
assert.Equal(t, map[string]string{}, metricB.Tags())
|
require.Equal(t, map[string]string{}, metricB.Tags())
|
||||||
assert.Equal(t, int64(1465044106000000000), metricB.Time().UnixNano())
|
require.Equal(t, int64(1465044106000000000), metricB.Time().UnixNano())
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestTimezoneMalformedCompileFileAndParse(t *testing.T) {
|
func TestTimezoneMalformedCompileFileAndParse(t *testing.T) {
|
||||||
|
|
@ -823,13 +834,14 @@ func TestTimezoneMalformedCompileFileAndParse(t *testing.T) {
|
||||||
Patterns: []string{"%{TEST_LOG_A}", "%{TEST_LOG_B}"},
|
Patterns: []string{"%{TEST_LOG_A}", "%{TEST_LOG_B}"},
|
||||||
CustomPatternFiles: []string{"./testdata/test-patterns"},
|
CustomPatternFiles: []string{"./testdata/test-patterns"},
|
||||||
Timezone: "Something/Weird",
|
Timezone: "Something/Weird",
|
||||||
|
Log: testutil.Logger{},
|
||||||
}
|
}
|
||||||
assert.NoError(t, p.Compile())
|
require.NoError(t, p.Compile())
|
||||||
|
|
||||||
metricA, err := p.ParseLine(`[04/Jun/2016:12:41:45 +0100] 1.25 200 192.168.1.1 5.432µs 101`)
|
metricA, err := p.ParseLine(`[04/Jun/2016:12:41:45 +0100] 1.25 200 192.168.1.1 5.432µs 101`)
|
||||||
require.NotNil(t, metricA)
|
require.NotNil(t, metricA)
|
||||||
assert.NoError(t, err)
|
require.NoError(t, err)
|
||||||
assert.Equal(t,
|
require.Equal(t,
|
||||||
map[string]interface{}{
|
map[string]interface{}{
|
||||||
"clientip": "192.168.1.1",
|
"clientip": "192.168.1.1",
|
||||||
"myfloat": float64(1.25),
|
"myfloat": float64(1.25),
|
||||||
|
|
@ -837,21 +849,21 @@ func TestTimezoneMalformedCompileFileAndParse(t *testing.T) {
|
||||||
"myint": int64(101),
|
"myint": int64(101),
|
||||||
},
|
},
|
||||||
metricA.Fields())
|
metricA.Fields())
|
||||||
assert.Equal(t, map[string]string{"response_code": "200"}, metricA.Tags())
|
require.Equal(t, map[string]string{"response_code": "200"}, metricA.Tags())
|
||||||
assert.Equal(t, int64(1465040505000000000), metricA.Time().UnixNano())
|
require.Equal(t, int64(1465040505000000000), metricA.Time().UnixNano())
|
||||||
|
|
||||||
metricB, err := p.ParseLine(`[04/06/2016--12:41:46] 1.25 mystring dropme nomodifier`)
|
metricB, err := p.ParseLine(`[04/06/2016--12:41:46] 1.25 mystring dropme nomodifier`)
|
||||||
require.NotNil(t, metricB)
|
require.NotNil(t, metricB)
|
||||||
assert.NoError(t, err)
|
require.NoError(t, err)
|
||||||
assert.Equal(t,
|
require.Equal(t,
|
||||||
map[string]interface{}{
|
map[string]interface{}{
|
||||||
"myfloat": 1.25,
|
"myfloat": 1.25,
|
||||||
"mystring": "mystring",
|
"mystring": "mystring",
|
||||||
"nomodifier": "nomodifier",
|
"nomodifier": "nomodifier",
|
||||||
},
|
},
|
||||||
metricB.Fields())
|
metricB.Fields())
|
||||||
assert.Equal(t, map[string]string{}, metricB.Tags())
|
require.Equal(t, map[string]string{}, metricB.Tags())
|
||||||
assert.Equal(t, int64(1465044106000000000), metricB.Time().UnixNano())
|
require.Equal(t, int64(1465044106000000000), metricB.Time().UnixNano())
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestTimezoneEuropeCompileFileAndParse(t *testing.T) {
|
func TestTimezoneEuropeCompileFileAndParse(t *testing.T) {
|
||||||
|
|
@ -860,12 +872,12 @@ func TestTimezoneEuropeCompileFileAndParse(t *testing.T) {
|
||||||
CustomPatternFiles: []string{"./testdata/test-patterns"},
|
CustomPatternFiles: []string{"./testdata/test-patterns"},
|
||||||
Timezone: "Europe/Berlin",
|
Timezone: "Europe/Berlin",
|
||||||
}
|
}
|
||||||
assert.NoError(t, p.Compile())
|
require.NoError(t, p.Compile())
|
||||||
|
|
||||||
metricA, err := p.ParseLine(`[04/Jun/2016:12:41:45 +0100] 1.25 200 192.168.1.1 5.432µs 101`)
|
metricA, err := p.ParseLine(`[04/Jun/2016:12:41:45 +0100] 1.25 200 192.168.1.1 5.432µs 101`)
|
||||||
require.NotNil(t, metricA)
|
require.NotNil(t, metricA)
|
||||||
assert.NoError(t, err)
|
require.NoError(t, err)
|
||||||
assert.Equal(t,
|
require.Equal(t,
|
||||||
map[string]interface{}{
|
map[string]interface{}{
|
||||||
"clientip": "192.168.1.1",
|
"clientip": "192.168.1.1",
|
||||||
"myfloat": float64(1.25),
|
"myfloat": float64(1.25),
|
||||||
|
|
@ -873,21 +885,21 @@ func TestTimezoneEuropeCompileFileAndParse(t *testing.T) {
|
||||||
"myint": int64(101),
|
"myint": int64(101),
|
||||||
},
|
},
|
||||||
metricA.Fields())
|
metricA.Fields())
|
||||||
assert.Equal(t, map[string]string{"response_code": "200"}, metricA.Tags())
|
require.Equal(t, map[string]string{"response_code": "200"}, metricA.Tags())
|
||||||
assert.Equal(t, int64(1465040505000000000), metricA.Time().UnixNano())
|
require.Equal(t, int64(1465040505000000000), metricA.Time().UnixNano())
|
||||||
|
|
||||||
metricB, err := p.ParseLine(`[04/06/2016--12:41:46] 1.25 mystring dropme nomodifier`)
|
metricB, err := p.ParseLine(`[04/06/2016--12:41:46] 1.25 mystring dropme nomodifier`)
|
||||||
require.NotNil(t, metricB)
|
require.NotNil(t, metricB)
|
||||||
assert.NoError(t, err)
|
require.NoError(t, err)
|
||||||
assert.Equal(t,
|
require.Equal(t,
|
||||||
map[string]interface{}{
|
map[string]interface{}{
|
||||||
"myfloat": 1.25,
|
"myfloat": 1.25,
|
||||||
"mystring": "mystring",
|
"mystring": "mystring",
|
||||||
"nomodifier": "nomodifier",
|
"nomodifier": "nomodifier",
|
||||||
},
|
},
|
||||||
metricB.Fields())
|
metricB.Fields())
|
||||||
assert.Equal(t, map[string]string{}, metricB.Tags())
|
require.Equal(t, map[string]string{}, metricB.Tags())
|
||||||
assert.Equal(t, int64(1465036906000000000), metricB.Time().UnixNano())
|
require.Equal(t, int64(1465036906000000000), metricB.Time().UnixNano())
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestTimezoneAmericasCompileFileAndParse(t *testing.T) {
|
func TestTimezoneAmericasCompileFileAndParse(t *testing.T) {
|
||||||
|
|
@ -896,12 +908,12 @@ func TestTimezoneAmericasCompileFileAndParse(t *testing.T) {
|
||||||
CustomPatternFiles: []string{"./testdata/test-patterns"},
|
CustomPatternFiles: []string{"./testdata/test-patterns"},
|
||||||
Timezone: "Canada/Eastern",
|
Timezone: "Canada/Eastern",
|
||||||
}
|
}
|
||||||
assert.NoError(t, p.Compile())
|
require.NoError(t, p.Compile())
|
||||||
|
|
||||||
metricA, err := p.ParseLine(`[04/Jun/2016:12:41:45 +0100] 1.25 200 192.168.1.1 5.432µs 101`)
|
metricA, err := p.ParseLine(`[04/Jun/2016:12:41:45 +0100] 1.25 200 192.168.1.1 5.432µs 101`)
|
||||||
require.NotNil(t, metricA)
|
require.NotNil(t, metricA)
|
||||||
assert.NoError(t, err)
|
require.NoError(t, err)
|
||||||
assert.Equal(t,
|
require.Equal(t,
|
||||||
map[string]interface{}{
|
map[string]interface{}{
|
||||||
"clientip": "192.168.1.1",
|
"clientip": "192.168.1.1",
|
||||||
"myfloat": float64(1.25),
|
"myfloat": float64(1.25),
|
||||||
|
|
@ -909,21 +921,21 @@ func TestTimezoneAmericasCompileFileAndParse(t *testing.T) {
|
||||||
"myint": int64(101),
|
"myint": int64(101),
|
||||||
},
|
},
|
||||||
metricA.Fields())
|
metricA.Fields())
|
||||||
assert.Equal(t, map[string]string{"response_code": "200"}, metricA.Tags())
|
require.Equal(t, map[string]string{"response_code": "200"}, metricA.Tags())
|
||||||
assert.Equal(t, int64(1465040505000000000), metricA.Time().UnixNano())
|
require.Equal(t, int64(1465040505000000000), metricA.Time().UnixNano())
|
||||||
|
|
||||||
metricB, err := p.ParseLine(`[04/06/2016--12:41:46] 1.25 mystring dropme nomodifier`)
|
metricB, err := p.ParseLine(`[04/06/2016--12:41:46] 1.25 mystring dropme nomodifier`)
|
||||||
require.NotNil(t, metricB)
|
require.NotNil(t, metricB)
|
||||||
assert.NoError(t, err)
|
require.NoError(t, err)
|
||||||
assert.Equal(t,
|
require.Equal(t,
|
||||||
map[string]interface{}{
|
map[string]interface{}{
|
||||||
"myfloat": 1.25,
|
"myfloat": 1.25,
|
||||||
"mystring": "mystring",
|
"mystring": "mystring",
|
||||||
"nomodifier": "nomodifier",
|
"nomodifier": "nomodifier",
|
||||||
},
|
},
|
||||||
metricB.Fields())
|
metricB.Fields())
|
||||||
assert.Equal(t, map[string]string{}, metricB.Tags())
|
require.Equal(t, map[string]string{}, metricB.Tags())
|
||||||
assert.Equal(t, int64(1465058506000000000), metricB.Time().UnixNano())
|
require.Equal(t, int64(1465058506000000000), metricB.Time().UnixNano())
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestTimezoneLocalCompileFileAndParse(t *testing.T) {
|
func TestTimezoneLocalCompileFileAndParse(t *testing.T) {
|
||||||
|
|
@ -932,12 +944,12 @@ func TestTimezoneLocalCompileFileAndParse(t *testing.T) {
|
||||||
CustomPatternFiles: []string{"./testdata/test-patterns"},
|
CustomPatternFiles: []string{"./testdata/test-patterns"},
|
||||||
Timezone: "Local",
|
Timezone: "Local",
|
||||||
}
|
}
|
||||||
assert.NoError(t, p.Compile())
|
require.NoError(t, p.Compile())
|
||||||
|
|
||||||
metricA, err := p.ParseLine(`[04/Jun/2016:12:41:45 +0100] 1.25 200 192.168.1.1 5.432µs 101`)
|
metricA, err := p.ParseLine(`[04/Jun/2016:12:41:45 +0100] 1.25 200 192.168.1.1 5.432µs 101`)
|
||||||
require.NotNil(t, metricA)
|
require.NotNil(t, metricA)
|
||||||
assert.NoError(t, err)
|
require.NoError(t, err)
|
||||||
assert.Equal(t,
|
require.Equal(t,
|
||||||
map[string]interface{}{
|
map[string]interface{}{
|
||||||
"clientip": "192.168.1.1",
|
"clientip": "192.168.1.1",
|
||||||
"myfloat": float64(1.25),
|
"myfloat": float64(1.25),
|
||||||
|
|
@ -945,21 +957,21 @@ func TestTimezoneLocalCompileFileAndParse(t *testing.T) {
|
||||||
"myint": int64(101),
|
"myint": int64(101),
|
||||||
},
|
},
|
||||||
metricA.Fields())
|
metricA.Fields())
|
||||||
assert.Equal(t, map[string]string{"response_code": "200"}, metricA.Tags())
|
require.Equal(t, map[string]string{"response_code": "200"}, metricA.Tags())
|
||||||
assert.Equal(t, int64(1465040505000000000), metricA.Time().UnixNano())
|
require.Equal(t, int64(1465040505000000000), metricA.Time().UnixNano())
|
||||||
|
|
||||||
metricB, err := p.ParseLine(`[04/06/2016--12:41:46] 1.25 mystring dropme nomodifier`)
|
metricB, err := p.ParseLine(`[04/06/2016--12:41:46] 1.25 mystring dropme nomodifier`)
|
||||||
require.NotNil(t, metricB)
|
require.NotNil(t, metricB)
|
||||||
assert.NoError(t, err)
|
require.NoError(t, err)
|
||||||
assert.Equal(t,
|
require.Equal(t,
|
||||||
map[string]interface{}{
|
map[string]interface{}{
|
||||||
"myfloat": 1.25,
|
"myfloat": 1.25,
|
||||||
"mystring": "mystring",
|
"mystring": "mystring",
|
||||||
"nomodifier": "nomodifier",
|
"nomodifier": "nomodifier",
|
||||||
},
|
},
|
||||||
metricB.Fields())
|
metricB.Fields())
|
||||||
assert.Equal(t, map[string]string{}, metricB.Tags())
|
require.Equal(t, map[string]string{}, metricB.Tags())
|
||||||
assert.Equal(t, time.Date(2016, time.June, 4, 12, 41, 46, 0, time.Local).UnixNano(), metricB.Time().UnixNano())
|
require.Equal(t, time.Date(2016, time.June, 4, 12, 41, 46, 0, time.Local).UnixNano(), metricB.Time().UnixNano())
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestNewlineInPatterns(t *testing.T) {
|
func TestNewlineInPatterns(t *testing.T) {
|
||||||
|
|
@ -1087,7 +1099,8 @@ func TestEmptyYearInTimestamp(t *testing.T) {
|
||||||
`,
|
`,
|
||||||
}
|
}
|
||||||
require.NoError(t, p.Compile())
|
require.NoError(t, p.Compile())
|
||||||
p.ParseLine("Nov 6 13:57:03 generic iTunes[6504]: info> Scale factor of main display = 2.0")
|
_, err := p.ParseLine("Nov 6 13:57:03 generic iTunes[6504]: info> Scale factor of main display = 2.0")
|
||||||
|
require.NoError(t, err)
|
||||||
m, err := p.ParseLine("Nov 6 13:57:03 generic iTunes[6504]: objc[6504]: Object descriptor was null.")
|
m, err := p.ParseLine("Nov 6 13:57:03 generic iTunes[6504]: objc[6504]: Object descriptor was null.")
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
require.NotNil(t, m)
|
require.NotNil(t, m)
|
||||||
|
|
|
||||||
|
|
@ -7,8 +7,9 @@ import (
|
||||||
"io"
|
"io"
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
"github.com/influxdata/telegraf/plugins/parsers/influx"
|
|
||||||
"github.com/stretchr/testify/require"
|
"github.com/stretchr/testify/require"
|
||||||
|
|
||||||
|
"github.com/influxdata/telegraf/plugins/parsers/influx"
|
||||||
)
|
)
|
||||||
|
|
||||||
type TestingHandler struct {
|
type TestingHandler struct {
|
||||||
|
|
@ -1950,7 +1951,10 @@ type MockHandler struct {
|
||||||
}
|
}
|
||||||
|
|
||||||
func (h *MockHandler) SetMeasurement(name []byte) error {
|
func (h *MockHandler) SetMeasurement(name []byte) error {
|
||||||
h.TestingHandler.SetMeasurement(name)
|
err := h.TestingHandler.SetMeasurement(name)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
return h.SetMeasurementF(name)
|
return h.SetMeasurementF(name)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -1963,8 +1967,7 @@ func (h *MockHandler) AddInt(name, value []byte) error {
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
h.TestingHandler.AddInt(name, value)
|
return h.TestingHandler.AddInt(name, value)
|
||||||
return nil
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func (h *MockHandler) AddUint(name, value []byte) error {
|
func (h *MockHandler) AddUint(name, value []byte) error {
|
||||||
|
|
@ -1972,8 +1975,7 @@ func (h *MockHandler) AddUint(name, value []byte) error {
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
h.TestingHandler.AddUint(name, value)
|
return h.TestingHandler.AddUint(name, value)
|
||||||
return nil
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func (h *MockHandler) AddFloat(name, value []byte) error {
|
func (h *MockHandler) AddFloat(name, value []byte) error {
|
||||||
|
|
|
||||||
|
|
@ -9,10 +9,11 @@ import (
|
||||||
"testing"
|
"testing"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
|
"github.com/stretchr/testify/require"
|
||||||
|
|
||||||
"github.com/influxdata/telegraf"
|
"github.com/influxdata/telegraf"
|
||||||
"github.com/influxdata/telegraf/metric"
|
"github.com/influxdata/telegraf/metric"
|
||||||
"github.com/influxdata/telegraf/testutil"
|
"github.com/influxdata/telegraf/testutil"
|
||||||
"github.com/stretchr/testify/require"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
var DefaultTime = func() time.Time {
|
var DefaultTime = func() time.Time {
|
||||||
|
|
@ -849,7 +850,10 @@ func TestStreamParserProducesAllAvailableMetrics(t *testing.T) {
|
||||||
parser := NewStreamParser(r)
|
parser := NewStreamParser(r)
|
||||||
parser.SetTimeFunc(DefaultTime)
|
parser.SetTimeFunc(DefaultTime)
|
||||||
|
|
||||||
go w.Write([]byte("metric value=1\nmetric2 value=1\n"))
|
go func() {
|
||||||
|
_, err := w.Write([]byte("metric value=1\nmetric2 value=1\n"))
|
||||||
|
require.NoError(t, err)
|
||||||
|
}()
|
||||||
|
|
||||||
_, err := parser.Next()
|
_, err := parser.Next()
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
|
|
||||||
|
|
@ -5,15 +5,15 @@ import (
|
||||||
"encoding/json"
|
"encoding/json"
|
||||||
"errors"
|
"errors"
|
||||||
"fmt"
|
"fmt"
|
||||||
"log"
|
|
||||||
"strconv"
|
"strconv"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
|
"github.com/tidwall/gjson"
|
||||||
|
|
||||||
"github.com/influxdata/telegraf"
|
"github.com/influxdata/telegraf"
|
||||||
"github.com/influxdata/telegraf/filter"
|
"github.com/influxdata/telegraf/filter"
|
||||||
"github.com/influxdata/telegraf/internal"
|
"github.com/influxdata/telegraf/internal"
|
||||||
"github.com/influxdata/telegraf/metric"
|
"github.com/influxdata/telegraf/metric"
|
||||||
"github.com/tidwall/gjson"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
var (
|
var (
|
||||||
|
|
@ -45,6 +45,8 @@ type Parser struct {
|
||||||
timezone string
|
timezone string
|
||||||
defaultTags map[string]string
|
defaultTags map[string]string
|
||||||
strict bool
|
strict bool
|
||||||
|
|
||||||
|
Log telegraf.Logger `toml:"-"`
|
||||||
}
|
}
|
||||||
|
|
||||||
func New(config *Config) (*Parser, error) {
|
func New(config *Config) (*Parser, error) {
|
||||||
|
|
@ -110,8 +112,7 @@ func (p *Parser) parseObject(data map[string]interface{}, timestamp time.Time) (
|
||||||
|
|
||||||
// checks if json_name_key is set
|
// checks if json_name_key is set
|
||||||
if p.nameKey != "" {
|
if p.nameKey != "" {
|
||||||
switch field := f.Fields[p.nameKey].(type) {
|
if field, ok := f.Fields[p.nameKey].(string); ok {
|
||||||
case string:
|
|
||||||
name = field
|
name = field
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
@ -172,7 +173,7 @@ func (p *Parser) switchFieldToTag(tags map[string]string, fields map[string]inte
|
||||||
tags[name] = strconv.FormatFloat(t, 'f', -1, 64)
|
tags[name] = strconv.FormatFloat(t, 'f', -1, 64)
|
||||||
delete(fields, name)
|
delete(fields, name)
|
||||||
default:
|
default:
|
||||||
log.Printf("E! [parsers.json] Unrecognized type %T", value)
|
p.Log.Errorf("Unrecognized type %T", value)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -194,7 +195,7 @@ func (p *Parser) Parse(buf []byte) ([]telegraf.Metric, error) {
|
||||||
result := gjson.GetBytes(buf, p.query)
|
result := gjson.GetBytes(buf, p.query)
|
||||||
buf = []byte(result.Raw)
|
buf = []byte(result.Raw)
|
||||||
if !result.IsArray() && !result.IsObject() && result.Type != gjson.Null {
|
if !result.IsArray() && !result.IsObject() && result.Type != gjson.Null {
|
||||||
err := fmt.Errorf("E! Query path must lead to a JSON object, array of objects or null, but lead to: %v", result.Type)
|
err := fmt.Errorf("query path must lead to a JSON object, array of objects or null, but lead to: %v", result.Type)
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
if result.Type == gjson.Null {
|
if result.Type == gjson.Null {
|
||||||
|
|
@ -292,23 +293,21 @@ func (f *JSONFlattener) FullFlattenJSON(
|
||||||
}
|
}
|
||||||
err := f.FullFlattenJSON(fieldkey, v, convertString, convertBool)
|
err := f.FullFlattenJSON(fieldkey, v, convertString, convertBool)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil
|
return err
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
case float64:
|
case float64:
|
||||||
f.Fields[fieldname] = t
|
f.Fields[fieldname] = t
|
||||||
case string:
|
case string:
|
||||||
if convertString {
|
if !convertString {
|
||||||
f.Fields[fieldname] = v.(string)
|
|
||||||
} else {
|
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
f.Fields[fieldname] = v.(string)
|
||||||
case bool:
|
case bool:
|
||||||
if convertBool {
|
if !convertBool {
|
||||||
f.Fields[fieldname] = v.(bool)
|
|
||||||
} else {
|
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
f.Fields[fieldname] = v.(bool)
|
||||||
case nil:
|
case nil:
|
||||||
return nil
|
return nil
|
||||||
default:
|
default:
|
||||||
|
|
|
||||||
|
|
@ -5,7 +5,6 @@ import (
|
||||||
"bytes"
|
"bytes"
|
||||||
"errors"
|
"errors"
|
||||||
"fmt"
|
"fmt"
|
||||||
"log"
|
|
||||||
"os/exec"
|
"os/exec"
|
||||||
"regexp"
|
"regexp"
|
||||||
"strconv"
|
"strconv"
|
||||||
|
|
@ -74,6 +73,7 @@ func TryAddState(runErr error, metrics []telegraf.Metric) ([]telegraf.Metric, er
|
||||||
type NagiosParser struct {
|
type NagiosParser struct {
|
||||||
MetricName string
|
MetricName string
|
||||||
DefaultTags map[string]string
|
DefaultTags map[string]string
|
||||||
|
Log telegraf.Logger `toml:"-"`
|
||||||
}
|
}
|
||||||
|
|
||||||
// Got from Alignak
|
// Got from Alignak
|
||||||
|
|
@ -111,12 +111,12 @@ func (p *NagiosParser) Parse(buf []byte) ([]telegraf.Metric, error) {
|
||||||
case 2:
|
case 2:
|
||||||
ms, err := parsePerfData(string(parts[1]), ts)
|
ms, err := parsePerfData(string(parts[1]), ts)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Printf("E! [parser.nagios] failed to parse performance data: %s\n", err.Error())
|
p.Log.Errorf("Failed to parse performance data: %s\n", err.Error())
|
||||||
}
|
}
|
||||||
metrics = append(metrics, ms...)
|
metrics = append(metrics, ms...)
|
||||||
fallthrough
|
fallthrough
|
||||||
case 1:
|
case 1:
|
||||||
msg.Write(bytes.TrimSpace(parts[0]))
|
msg.Write(bytes.TrimSpace(parts[0])) //nolint:revive // from buffer.go: "err is always nil"
|
||||||
default:
|
default:
|
||||||
return nil, errors.New("illegal output format")
|
return nil, errors.New("illegal output format")
|
||||||
}
|
}
|
||||||
|
|
@ -126,34 +126,34 @@ func (p *NagiosParser) Parse(buf []byte) ([]telegraf.Metric, error) {
|
||||||
if bytes.Contains(s.Bytes(), []byte{'|'}) {
|
if bytes.Contains(s.Bytes(), []byte{'|'}) {
|
||||||
parts := bytes.Split(s.Bytes(), []byte{'|'})
|
parts := bytes.Split(s.Bytes(), []byte{'|'})
|
||||||
if longmsg.Len() != 0 {
|
if longmsg.Len() != 0 {
|
||||||
longmsg.WriteByte('\n')
|
longmsg.WriteByte('\n') //nolint:revive // from buffer.go: "err is always nil"
|
||||||
}
|
}
|
||||||
longmsg.Write(bytes.TrimSpace(parts[0]))
|
longmsg.Write(bytes.TrimSpace(parts[0])) //nolint:revive // from buffer.go: "err is always nil"
|
||||||
|
|
||||||
ms, err := parsePerfData(string(parts[1]), ts)
|
ms, err := parsePerfData(string(parts[1]), ts)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Printf("E! [parser.nagios] failed to parse performance data: %s\n", err.Error())
|
p.Log.Errorf("Failed to parse performance data: %s\n", err.Error())
|
||||||
}
|
}
|
||||||
metrics = append(metrics, ms...)
|
metrics = append(metrics, ms...)
|
||||||
break
|
break
|
||||||
}
|
}
|
||||||
if longmsg.Len() != 0 {
|
if longmsg.Len() != 0 {
|
||||||
longmsg.WriteByte('\n')
|
longmsg.WriteByte('\n') //nolint:revive // from buffer.go: "err is always nil"
|
||||||
}
|
}
|
||||||
longmsg.Write(bytes.TrimSpace((s.Bytes())))
|
longmsg.Write(bytes.TrimSpace(s.Bytes())) //nolint:revive // from buffer.go: "err is always nil"
|
||||||
}
|
}
|
||||||
|
|
||||||
// Parse extra performance data.
|
// Parse extra performance data.
|
||||||
for s.Scan() {
|
for s.Scan() {
|
||||||
ms, err := parsePerfData(s.Text(), ts)
|
ms, err := parsePerfData(s.Text(), ts)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Printf("E! [parser.nagios] failed to parse performance data: %s\n", err.Error())
|
p.Log.Errorf("Failed to parse performance data: %s\n", err.Error())
|
||||||
}
|
}
|
||||||
metrics = append(metrics, ms...)
|
metrics = append(metrics, ms...)
|
||||||
}
|
}
|
||||||
|
|
||||||
if s.Err() != nil {
|
if s.Err() != nil {
|
||||||
log.Printf("D! [parser.nagios] unexpected io error: %s\n", s.Err())
|
p.Log.Debugf("Unexpected io error: %s\n", s.Err())
|
||||||
}
|
}
|
||||||
|
|
||||||
// Create nagios state.
|
// Create nagios state.
|
||||||
|
|
@ -291,5 +291,5 @@ func parseThreshold(threshold string) (min float64, max float64, err error) {
|
||||||
return 0, 0, ErrBadThresholdFormat
|
return 0, 0, ErrBadThresholdFormat
|
||||||
}
|
}
|
||||||
|
|
||||||
return
|
return min, max, err
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -5,7 +5,6 @@ import (
|
||||||
"testing"
|
"testing"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
"github.com/stretchr/testify/assert"
|
|
||||||
"github.com/stretchr/testify/require"
|
"github.com/stretchr/testify/require"
|
||||||
|
|
||||||
"github.com/influxdata/telegraf"
|
"github.com/influxdata/telegraf"
|
||||||
|
|
@ -195,8 +194,8 @@ func TestTryAddState(t *testing.T) {
|
||||||
}
|
}
|
||||||
|
|
||||||
func assertNagiosState(t *testing.T, m telegraf.Metric, f map[string]interface{}) {
|
func assertNagiosState(t *testing.T, m telegraf.Metric, f map[string]interface{}) {
|
||||||
assert.Equal(t, map[string]string{}, m.Tags())
|
require.Equal(t, map[string]string{}, m.Tags())
|
||||||
assert.Equal(t, f, m.Fields())
|
require.Equal(t, f, m.Fields())
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestParse(t *testing.T) {
|
func TestParse(t *testing.T) {
|
||||||
|
|
@ -219,11 +218,11 @@ with three lines
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
require.Len(t, metrics, 3)
|
require.Len(t, metrics, 3)
|
||||||
// rta
|
// rta
|
||||||
assert.Equal(t, map[string]string{
|
require.Equal(t, map[string]string{
|
||||||
"unit": "ms",
|
"unit": "ms",
|
||||||
"perfdata": "rta",
|
"perfdata": "rta",
|
||||||
}, metrics[0].Tags())
|
}, metrics[0].Tags())
|
||||||
assert.Equal(t, map[string]interface{}{
|
require.Equal(t, map[string]interface{}{
|
||||||
"value": float64(0.298),
|
"value": float64(0.298),
|
||||||
"warning_lt": float64(0),
|
"warning_lt": float64(0),
|
||||||
"warning_gt": float64(4000),
|
"warning_gt": float64(4000),
|
||||||
|
|
@ -233,11 +232,11 @@ with three lines
|
||||||
}, metrics[0].Fields())
|
}, metrics[0].Fields())
|
||||||
|
|
||||||
// pl
|
// pl
|
||||||
assert.Equal(t, map[string]string{
|
require.Equal(t, map[string]string{
|
||||||
"unit": "%",
|
"unit": "%",
|
||||||
"perfdata": "pl",
|
"perfdata": "pl",
|
||||||
}, metrics[1].Tags())
|
}, metrics[1].Tags())
|
||||||
assert.Equal(t, map[string]interface{}{
|
require.Equal(t, map[string]interface{}{
|
||||||
"value": float64(0),
|
"value": float64(0),
|
||||||
"warning_lt": float64(0),
|
"warning_lt": float64(0),
|
||||||
"warning_gt": float64(80),
|
"warning_gt": float64(80),
|
||||||
|
|
@ -260,11 +259,11 @@ with three lines
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
require.Len(t, metrics, 2)
|
require.Len(t, metrics, 2)
|
||||||
// time
|
// time
|
||||||
assert.Equal(t, map[string]string{
|
require.Equal(t, map[string]string{
|
||||||
"unit": "s",
|
"unit": "s",
|
||||||
"perfdata": "time",
|
"perfdata": "time",
|
||||||
}, metrics[0].Tags())
|
}, metrics[0].Tags())
|
||||||
assert.Equal(t, map[string]interface{}{
|
require.Equal(t, map[string]interface{}{
|
||||||
"value": float64(0.008457),
|
"value": float64(0.008457),
|
||||||
"min": float64(0),
|
"min": float64(0),
|
||||||
"max": float64(10),
|
"max": float64(10),
|
||||||
|
|
@ -282,10 +281,10 @@ with three lines
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
require.Len(t, metrics, 2)
|
require.Len(t, metrics, 2)
|
||||||
// time
|
// time
|
||||||
assert.Equal(t, map[string]string{
|
require.Equal(t, map[string]string{
|
||||||
"perfdata": "time",
|
"perfdata": "time",
|
||||||
}, metrics[0].Tags())
|
}, metrics[0].Tags())
|
||||||
assert.Equal(t, map[string]interface{}{
|
require.Equal(t, map[string]interface{}{
|
||||||
"value": float64(0.008457),
|
"value": float64(0.008457),
|
||||||
}, metrics[0].Fields())
|
}, metrics[0].Fields())
|
||||||
|
|
||||||
|
|
@ -301,10 +300,10 @@ with three lines
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
require.Len(t, metrics, 4)
|
require.Len(t, metrics, 4)
|
||||||
// load1
|
// load1
|
||||||
assert.Equal(t, map[string]string{
|
require.Equal(t, map[string]string{
|
||||||
"perfdata": "load1",
|
"perfdata": "load1",
|
||||||
}, metrics[0].Tags())
|
}, metrics[0].Tags())
|
||||||
assert.Equal(t, map[string]interface{}{
|
require.Equal(t, map[string]interface{}{
|
||||||
"value": float64(0.00),
|
"value": float64(0.00),
|
||||||
"warning_lt": MinFloat64,
|
"warning_lt": MinFloat64,
|
||||||
"warning_gt": float64(4),
|
"warning_gt": float64(4),
|
||||||
|
|
@ -314,10 +313,10 @@ with three lines
|
||||||
}, metrics[0].Fields())
|
}, metrics[0].Fields())
|
||||||
|
|
||||||
// load5
|
// load5
|
||||||
assert.Equal(t, map[string]string{
|
require.Equal(t, map[string]string{
|
||||||
"perfdata": "load5",
|
"perfdata": "load5",
|
||||||
}, metrics[1].Tags())
|
}, metrics[1].Tags())
|
||||||
assert.Equal(t, map[string]interface{}{
|
require.Equal(t, map[string]interface{}{
|
||||||
"value": float64(0.01),
|
"value": float64(0.01),
|
||||||
"warning_gt": float64(3),
|
"warning_gt": float64(3),
|
||||||
"warning_lt": float64(0),
|
"warning_lt": float64(0),
|
||||||
|
|
@ -327,10 +326,10 @@ with three lines
|
||||||
}, metrics[1].Fields())
|
}, metrics[1].Fields())
|
||||||
|
|
||||||
// load15
|
// load15
|
||||||
assert.Equal(t, map[string]string{
|
require.Equal(t, map[string]string{
|
||||||
"perfdata": "load15",
|
"perfdata": "load15",
|
||||||
}, metrics[2].Tags())
|
}, metrics[2].Tags())
|
||||||
assert.Equal(t, map[string]interface{}{
|
require.Equal(t, map[string]interface{}{
|
||||||
"value": float64(0.05),
|
"value": float64(0.05),
|
||||||
"warning_lt": float64(0),
|
"warning_lt": float64(0),
|
||||||
"warning_gt": float64(2),
|
"warning_gt": float64(2),
|
||||||
|
|
@ -382,11 +381,11 @@ with three lines
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
require.Len(t, metrics, 5)
|
require.Len(t, metrics, 5)
|
||||||
// /=2643MB;5948;5958;0;5968
|
// /=2643MB;5948;5958;0;5968
|
||||||
assert.Equal(t, map[string]string{
|
require.Equal(t, map[string]string{
|
||||||
"unit": "MB",
|
"unit": "MB",
|
||||||
"perfdata": "/",
|
"perfdata": "/",
|
||||||
}, metrics[0].Tags())
|
}, metrics[0].Tags())
|
||||||
assert.Equal(t, map[string]interface{}{
|
require.Equal(t, map[string]interface{}{
|
||||||
"value": float64(2643),
|
"value": float64(2643),
|
||||||
"warning_lt": float64(0),
|
"warning_lt": float64(0),
|
||||||
"warning_gt": float64(5948),
|
"warning_gt": float64(5948),
|
||||||
|
|
@ -397,11 +396,11 @@ with three lines
|
||||||
}, metrics[0].Fields())
|
}, metrics[0].Fields())
|
||||||
|
|
||||||
// /boot=68MB;88;93;0;98
|
// /boot=68MB;88;93;0;98
|
||||||
assert.Equal(t, map[string]string{
|
require.Equal(t, map[string]string{
|
||||||
"unit": "MB",
|
"unit": "MB",
|
||||||
"perfdata": "/boot",
|
"perfdata": "/boot",
|
||||||
}, metrics[1].Tags())
|
}, metrics[1].Tags())
|
||||||
assert.Equal(t, map[string]interface{}{
|
require.Equal(t, map[string]interface{}{
|
||||||
"value": float64(68),
|
"value": float64(68),
|
||||||
"warning_lt": float64(0),
|
"warning_lt": float64(0),
|
||||||
"warning_gt": float64(88),
|
"warning_gt": float64(88),
|
||||||
|
|
@ -412,11 +411,11 @@ with three lines
|
||||||
}, metrics[1].Fields())
|
}, metrics[1].Fields())
|
||||||
|
|
||||||
// /home=69357MB;253404;253409;0;253414
|
// /home=69357MB;253404;253409;0;253414
|
||||||
assert.Equal(t, map[string]string{
|
require.Equal(t, map[string]string{
|
||||||
"unit": "MB",
|
"unit": "MB",
|
||||||
"perfdata": "/home",
|
"perfdata": "/home",
|
||||||
}, metrics[2].Tags())
|
}, metrics[2].Tags())
|
||||||
assert.Equal(t, map[string]interface{}{
|
require.Equal(t, map[string]interface{}{
|
||||||
"value": float64(69357),
|
"value": float64(69357),
|
||||||
"warning_lt": float64(0),
|
"warning_lt": float64(0),
|
||||||
"warning_gt": float64(253404),
|
"warning_gt": float64(253404),
|
||||||
|
|
@ -427,11 +426,11 @@ with three lines
|
||||||
}, metrics[2].Fields())
|
}, metrics[2].Fields())
|
||||||
|
|
||||||
// /var/log=818MB;970;975;0;980
|
// /var/log=818MB;970;975;0;980
|
||||||
assert.Equal(t, map[string]string{
|
require.Equal(t, map[string]string{
|
||||||
"unit": "MB",
|
"unit": "MB",
|
||||||
"perfdata": "/var/log",
|
"perfdata": "/var/log",
|
||||||
}, metrics[3].Tags())
|
}, metrics[3].Tags())
|
||||||
assert.Equal(t, map[string]interface{}{
|
require.Equal(t, map[string]interface{}{
|
||||||
"value": float64(818),
|
"value": float64(818),
|
||||||
"warning_lt": float64(0),
|
"warning_lt": float64(0),
|
||||||
"warning_gt": float64(970),
|
"warning_gt": float64(970),
|
||||||
|
|
|
||||||
|
|
@ -11,13 +11,12 @@ import (
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
"github.com/matttproud/golang_protobuf_extensions/pbutil"
|
"github.com/matttproud/golang_protobuf_extensions/pbutil"
|
||||||
|
dto "github.com/prometheus/client_model/go"
|
||||||
|
"github.com/prometheus/common/expfmt"
|
||||||
|
|
||||||
"github.com/influxdata/telegraf"
|
"github.com/influxdata/telegraf"
|
||||||
"github.com/influxdata/telegraf/metric"
|
"github.com/influxdata/telegraf/metric"
|
||||||
"github.com/influxdata/telegraf/plugins/parsers/prometheus/common"
|
"github.com/influxdata/telegraf/plugins/parsers/prometheus/common"
|
||||||
|
|
||||||
dto "github.com/prometheus/client_model/go"
|
|
||||||
"github.com/prometheus/common/expfmt"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
type Parser struct {
|
type Parser struct {
|
||||||
|
|
@ -119,7 +118,7 @@ func makeQuantiles(m *dto.Metric, tags map[string]string, metricName string, met
|
||||||
fields := make(map[string]interface{})
|
fields := make(map[string]interface{})
|
||||||
|
|
||||||
fields[metricName+"_count"] = float64(m.GetSummary().GetSampleCount())
|
fields[metricName+"_count"] = float64(m.GetSummary().GetSampleCount())
|
||||||
fields[metricName+"_sum"] = float64(m.GetSummary().GetSampleSum())
|
fields[metricName+"_sum"] = m.GetSummary().GetSampleSum()
|
||||||
met := metric.New("prometheus", tags, fields, t, common.ValueType(metricType))
|
met := metric.New("prometheus", tags, fields, t, common.ValueType(metricType))
|
||||||
metrics = append(metrics, met)
|
metrics = append(metrics, met)
|
||||||
|
|
||||||
|
|
@ -128,7 +127,7 @@ func makeQuantiles(m *dto.Metric, tags map[string]string, metricName string, met
|
||||||
fields = make(map[string]interface{})
|
fields = make(map[string]interface{})
|
||||||
|
|
||||||
newTags["quantile"] = fmt.Sprint(q.GetQuantile())
|
newTags["quantile"] = fmt.Sprint(q.GetQuantile())
|
||||||
fields[metricName] = float64(q.GetValue())
|
fields[metricName] = q.GetValue()
|
||||||
|
|
||||||
quantileMetric := metric.New("prometheus", newTags, fields, t, common.ValueType(metricType))
|
quantileMetric := metric.New("prometheus", newTags, fields, t, common.ValueType(metricType))
|
||||||
metrics = append(metrics, quantileMetric)
|
metrics = append(metrics, quantileMetric)
|
||||||
|
|
@ -142,7 +141,7 @@ func makeBuckets(m *dto.Metric, tags map[string]string, metricName string, metri
|
||||||
fields := make(map[string]interface{})
|
fields := make(map[string]interface{})
|
||||||
|
|
||||||
fields[metricName+"_count"] = float64(m.GetHistogram().GetSampleCount())
|
fields[metricName+"_count"] = float64(m.GetHistogram().GetSampleCount())
|
||||||
fields[metricName+"_sum"] = float64(m.GetHistogram().GetSampleSum())
|
fields[metricName+"_sum"] = m.GetHistogram().GetSampleSum()
|
||||||
|
|
||||||
met := metric.New("prometheus", tags, fields, t, common.ValueType(metricType))
|
met := metric.New("prometheus", tags, fields, t, common.ValueType(metricType))
|
||||||
metrics = append(metrics, met)
|
metrics = append(metrics, met)
|
||||||
|
|
@ -164,15 +163,15 @@ func getNameAndValue(m *dto.Metric, metricName string) map[string]interface{} {
|
||||||
fields := make(map[string]interface{})
|
fields := make(map[string]interface{})
|
||||||
if m.Gauge != nil {
|
if m.Gauge != nil {
|
||||||
if !math.IsNaN(m.GetGauge().GetValue()) {
|
if !math.IsNaN(m.GetGauge().GetValue()) {
|
||||||
fields[metricName] = float64(m.GetGauge().GetValue())
|
fields[metricName] = m.GetGauge().GetValue()
|
||||||
}
|
}
|
||||||
} else if m.Counter != nil {
|
} else if m.Counter != nil {
|
||||||
if !math.IsNaN(m.GetCounter().GetValue()) {
|
if !math.IsNaN(m.GetCounter().GetValue()) {
|
||||||
fields[metricName] = float64(m.GetCounter().GetValue())
|
fields[metricName] = m.GetCounter().GetValue()
|
||||||
}
|
}
|
||||||
} else if m.Untyped != nil {
|
} else if m.Untyped != nil {
|
||||||
if !math.IsNaN(m.GetUntyped().GetValue()) {
|
if !math.IsNaN(m.GetUntyped().GetValue()) {
|
||||||
fields[metricName] = float64(m.GetUntyped().GetValue())
|
fields[metricName] = m.GetUntyped().GetValue()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return fields
|
return fields
|
||||||
|
|
|
||||||
|
|
@ -8,10 +8,10 @@ import (
|
||||||
"testing"
|
"testing"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
|
"github.com/stretchr/testify/require"
|
||||||
|
|
||||||
"github.com/influxdata/telegraf"
|
"github.com/influxdata/telegraf"
|
||||||
"github.com/influxdata/telegraf/testutil"
|
"github.com/influxdata/telegraf/testutil"
|
||||||
|
|
||||||
"github.com/stretchr/testify/assert"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
const (
|
const (
|
||||||
|
|
@ -69,8 +69,8 @@ func TestParsingValidGauge(t *testing.T) {
|
||||||
|
|
||||||
metrics, err := parse([]byte(validUniqueGauge))
|
metrics, err := parse([]byte(validUniqueGauge))
|
||||||
|
|
||||||
assert.NoError(t, err)
|
require.NoError(t, err)
|
||||||
assert.Len(t, metrics, 1)
|
require.Len(t, metrics, 1)
|
||||||
testutil.RequireMetricsEqual(t, expected, metrics, testutil.IgnoreTime(), testutil.SortMetrics())
|
testutil.RequireMetricsEqual(t, expected, metrics, testutil.IgnoreTime(), testutil.SortMetrics())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -89,8 +89,8 @@ func TestParsingValidCounter(t *testing.T) {
|
||||||
|
|
||||||
metrics, err := parse([]byte(validUniqueCounter))
|
metrics, err := parse([]byte(validUniqueCounter))
|
||||||
|
|
||||||
assert.NoError(t, err)
|
require.NoError(t, err)
|
||||||
assert.Len(t, metrics, 1)
|
require.Len(t, metrics, 1)
|
||||||
testutil.RequireMetricsEqual(t, expected, metrics, testutil.IgnoreTime(), testutil.SortMetrics())
|
testutil.RequireMetricsEqual(t, expected, metrics, testutil.IgnoreTime(), testutil.SortMetrics())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -148,8 +148,8 @@ func TestParsingValidSummary(t *testing.T) {
|
||||||
|
|
||||||
metrics, err := parse([]byte(validUniqueSummary))
|
metrics, err := parse([]byte(validUniqueSummary))
|
||||||
|
|
||||||
assert.NoError(t, err)
|
require.NoError(t, err)
|
||||||
assert.Len(t, metrics, 4)
|
require.Len(t, metrics, 4)
|
||||||
testutil.RequireMetricsEqual(t, expected, metrics, testutil.IgnoreTime(), testutil.SortMetrics())
|
testutil.RequireMetricsEqual(t, expected, metrics, testutil.IgnoreTime(), testutil.SortMetrics())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -276,8 +276,8 @@ func TestParsingValidHistogram(t *testing.T) {
|
||||||
|
|
||||||
metrics, err := parse([]byte(validUniqueHistogram))
|
metrics, err := parse([]byte(validUniqueHistogram))
|
||||||
|
|
||||||
assert.NoError(t, err)
|
require.NoError(t, err)
|
||||||
assert.Len(t, metrics, 9)
|
require.Len(t, metrics, 9)
|
||||||
testutil.RequireMetricsEqual(t, expected, metrics, testutil.IgnoreTime(), testutil.SortMetrics())
|
testutil.RequireMetricsEqual(t, expected, metrics, testutil.IgnoreTime(), testutil.SortMetrics())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -309,8 +309,8 @@ func TestDefautTags(t *testing.T) {
|
||||||
}
|
}
|
||||||
metrics, err := parser.Parse([]byte(validUniqueGauge))
|
metrics, err := parser.Parse([]byte(validUniqueGauge))
|
||||||
|
|
||||||
assert.NoError(t, err)
|
require.NoError(t, err)
|
||||||
assert.Len(t, metrics, 1)
|
require.Len(t, metrics, 1)
|
||||||
testutil.RequireMetricsEqual(t, expected, metrics, testutil.IgnoreTime(), testutil.SortMetrics())
|
testutil.RequireMetricsEqual(t, expected, metrics, testutil.IgnoreTime(), testutil.SortMetrics())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -363,7 +363,7 @@ test_counter{label="test"} 1 %d
|
||||||
metric, _ := parser.ParseLine(metricsWithTimestamps)
|
metric, _ := parser.ParseLine(metricsWithTimestamps)
|
||||||
|
|
||||||
testutil.RequireMetricEqual(t, expected, metric, testutil.IgnoreTime(), testutil.SortMetrics())
|
testutil.RequireMetricEqual(t, expected, metric, testutil.IgnoreTime(), testutil.SortMetrics())
|
||||||
assert.WithinDuration(t, time.Now(), metric.Time(), 5*time.Second)
|
require.WithinDuration(t, time.Now(), metric.Time(), 5*time.Second)
|
||||||
}
|
}
|
||||||
|
|
||||||
func parse(buf []byte) ([]telegraf.Metric, error) {
|
func parse(buf []byte) ([]telegraf.Metric, error) {
|
||||||
|
|
@ -448,7 +448,8 @@ func TestParserProtobufHeader(t *testing.T) {
|
||||||
sampleProtoBufData := []uint8{67, 10, 9, 115, 119, 97, 112, 95, 102, 114, 101, 101, 18, 25, 84, 101, 108, 101, 103, 114, 97, 102, 32, 99, 111, 108, 108, 101, 99, 116, 101, 100, 32, 109, 101, 116, 114, 105, 99, 24, 1, 34, 25, 10, 12, 10, 4, 104, 111, 115, 116, 18, 4, 111, 109, 115, 107, 18, 9, 9, 0, 0, 0, 0, 224, 36, 205, 65, 65, 10, 7, 115, 119, 97, 112, 95, 105, 110, 18, 25, 84, 101, 108, 101, 103, 114, 97, 102, 32, 99, 111, 108, 108, 101, 99, 116, 101, 100, 32, 109, 101, 116, 114, 105, 99, 24, 0, 34, 25, 10, 12, 10, 4, 104, 111, 115, 116, 18, 4, 111, 109, 115, 107, 26, 9, 9, 0, 0, 0, 0, 0, 0, 63, 65, 66, 10, 8, 115, 119, 97, 112, 95, 111, 117, 116, 18, 25, 84, 101, 108, 101, 103, 114, 97, 102, 32, 99, 111, 108, 108, 101, 99, 116, 101, 100, 32, 109, 101, 116, 114, 105, 99, 24, 0, 34, 25, 10, 12, 10, 4, 104, 111, 115, 116, 18, 4, 111, 109, 115, 107, 26, 9, 9, 0, 0, 0, 0, 0, 30, 110, 65, 68, 10, 10, 115, 119, 97, 112, 95, 116, 111, 116, 97, 108, 18, 25, 84, 101, 108, 101, 103, 114, 97, 102, 32, 99, 111, 108, 108, 101, 99, 116, 101, 100, 32, 109, 101, 116, 114, 105, 99, 24, 1, 34, 25, 10, 12, 10, 4, 104, 111, 115, 116, 18, 4, 111, 109, 115, 107, 18, 9, 9, 0, 0, 0, 0, 104, 153, 205, 65, 67, 10, 9, 115, 119, 97, 112, 95, 117, 115, 101, 100, 18, 25, 84, 101, 108, 101, 103, 114, 97, 102, 32, 99, 111, 108, 108, 101, 99, 116, 101, 100, 32, 109, 101, 116, 114, 105, 99, 24, 1, 34, 25, 10, 12, 10, 4, 104, 111, 115, 116, 18, 4, 111, 109, 115, 107, 18, 9, 9, 0, 0, 0, 0, 0, 34, 109, 65, 75, 10, 17, 115, 119, 97, 112, 95, 117, 115, 101, 100, 95, 112, 101, 114, 99, 101, 110, 116, 18, 25, 84, 101, 108, 101, 103, 114, 97, 102, 32, 99, 111, 108, 108, 101, 99, 116, 101, 100, 32, 109, 101, 116, 114, 105, 99, 24, 1, 34, 25, 10, 12, 10, 4, 104, 111, 115, 116, 18, 4, 111, 109, 115, 107, 18, 9, 9, 109, 234, 180, 197, 37, 155, 248, 63}
|
sampleProtoBufData := []uint8{67, 10, 9, 115, 119, 97, 112, 95, 102, 114, 101, 101, 18, 25, 84, 101, 108, 101, 103, 114, 97, 102, 32, 99, 111, 108, 108, 101, 99, 116, 101, 100, 32, 109, 101, 116, 114, 105, 99, 24, 1, 34, 25, 10, 12, 10, 4, 104, 111, 115, 116, 18, 4, 111, 109, 115, 107, 18, 9, 9, 0, 0, 0, 0, 224, 36, 205, 65, 65, 10, 7, 115, 119, 97, 112, 95, 105, 110, 18, 25, 84, 101, 108, 101, 103, 114, 97, 102, 32, 99, 111, 108, 108, 101, 99, 116, 101, 100, 32, 109, 101, 116, 114, 105, 99, 24, 0, 34, 25, 10, 12, 10, 4, 104, 111, 115, 116, 18, 4, 111, 109, 115, 107, 26, 9, 9, 0, 0, 0, 0, 0, 0, 63, 65, 66, 10, 8, 115, 119, 97, 112, 95, 111, 117, 116, 18, 25, 84, 101, 108, 101, 103, 114, 97, 102, 32, 99, 111, 108, 108, 101, 99, 116, 101, 100, 32, 109, 101, 116, 114, 105, 99, 24, 0, 34, 25, 10, 12, 10, 4, 104, 111, 115, 116, 18, 4, 111, 109, 115, 107, 26, 9, 9, 0, 0, 0, 0, 0, 30, 110, 65, 68, 10, 10, 115, 119, 97, 112, 95, 116, 111, 116, 97, 108, 18, 25, 84, 101, 108, 101, 103, 114, 97, 102, 32, 99, 111, 108, 108, 101, 99, 116, 101, 100, 32, 109, 101, 116, 114, 105, 99, 24, 1, 34, 25, 10, 12, 10, 4, 104, 111, 115, 116, 18, 4, 111, 109, 115, 107, 18, 9, 9, 0, 0, 0, 0, 104, 153, 205, 65, 67, 10, 9, 115, 119, 97, 112, 95, 117, 115, 101, 100, 18, 25, 84, 101, 108, 101, 103, 114, 97, 102, 32, 99, 111, 108, 108, 101, 99, 116, 101, 100, 32, 109, 101, 116, 114, 105, 99, 24, 1, 34, 25, 10, 12, 10, 4, 104, 111, 115, 116, 18, 4, 111, 109, 115, 107, 18, 9, 9, 0, 0, 0, 0, 0, 34, 109, 65, 75, 10, 17, 115, 119, 97, 112, 95, 117, 115, 101, 100, 95, 112, 101, 114, 99, 101, 110, 116, 18, 25, 84, 101, 108, 101, 103, 114, 97, 102, 32, 99, 111, 108, 108, 101, 99, 116, 101, 100, 32, 109, 101, 116, 114, 105, 99, 24, 1, 34, 25, 10, 12, 10, 4, 104, 111, 115, 116, 18, 4, 111, 109, 115, 107, 18, 9, 9, 109, 234, 180, 197, 37, 155, 248, 63}
|
||||||
ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||||
w.Header().Set("Content-Type", "application/vnd.google.protobuf; proto=io.prometheus.client.MetricFamily; encoding=delimited")
|
w.Header().Set("Content-Type", "application/vnd.google.protobuf; proto=io.prometheus.client.MetricFamily; encoding=delimited")
|
||||||
w.Write(sampleProtoBufData)
|
_, err := w.Write(sampleProtoBufData)
|
||||||
|
require.NoError(t, err)
|
||||||
}))
|
}))
|
||||||
defer ts.Close()
|
defer ts.Close()
|
||||||
req, err := http.NewRequest("GET", ts.URL, nil)
|
req, err := http.NewRequest("GET", ts.URL, nil)
|
||||||
|
|
|
||||||
|
|
@ -4,10 +4,11 @@ import (
|
||||||
"testing"
|
"testing"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
|
"github.com/prometheus/prometheus/prompb"
|
||||||
|
"github.com/stretchr/testify/require"
|
||||||
|
|
||||||
"github.com/influxdata/telegraf"
|
"github.com/influxdata/telegraf"
|
||||||
"github.com/influxdata/telegraf/testutil"
|
"github.com/influxdata/telegraf/testutil"
|
||||||
"github.com/prometheus/prometheus/prompb"
|
|
||||||
"github.com/stretchr/testify/assert"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
func TestParse(t *testing.T) {
|
func TestParse(t *testing.T) {
|
||||||
|
|
@ -35,7 +36,7 @@ func TestParse(t *testing.T) {
|
||||||
}
|
}
|
||||||
|
|
||||||
inoutBytes, err := prompbInput.Marshal()
|
inoutBytes, err := prompbInput.Marshal()
|
||||||
assert.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
|
||||||
expected := []telegraf.Metric{
|
expected := []telegraf.Metric{
|
||||||
testutil.MustMetric(
|
testutil.MustMetric(
|
||||||
|
|
@ -65,8 +66,8 @@ func TestParse(t *testing.T) {
|
||||||
}
|
}
|
||||||
|
|
||||||
metrics, err := parser.Parse(inoutBytes)
|
metrics, err := parser.Parse(inoutBytes)
|
||||||
assert.NoError(t, err)
|
require.NoError(t, err)
|
||||||
assert.Len(t, metrics, 2)
|
require.Len(t, metrics, 2)
|
||||||
testutil.RequireMetricsEqual(t, expected, metrics, testutil.IgnoreTime(), testutil.SortMetrics())
|
testutil.RequireMetricsEqual(t, expected, metrics, testutil.IgnoreTime(), testutil.SortMetrics())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -86,7 +87,7 @@ func TestDefaultTags(t *testing.T) {
|
||||||
}
|
}
|
||||||
|
|
||||||
inoutBytes, err := prompbInput.Marshal()
|
inoutBytes, err := prompbInput.Marshal()
|
||||||
assert.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
|
||||||
expected := []telegraf.Metric{
|
expected := []telegraf.Metric{
|
||||||
testutil.MustMetric(
|
testutil.MustMetric(
|
||||||
|
|
@ -109,8 +110,8 @@ func TestDefaultTags(t *testing.T) {
|
||||||
}
|
}
|
||||||
|
|
||||||
metrics, err := parser.Parse(inoutBytes)
|
metrics, err := parser.Parse(inoutBytes)
|
||||||
assert.NoError(t, err)
|
require.NoError(t, err)
|
||||||
assert.Len(t, metrics, 1)
|
require.Len(t, metrics, 1)
|
||||||
testutil.RequireMetricsEqual(t, expected, metrics, testutil.IgnoreTime(), testutil.SortMetrics())
|
testutil.RequireMetricsEqual(t, expected, metrics, testutil.IgnoreTime(), testutil.SortMetrics())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -132,7 +133,7 @@ func TestMetricsWithTimestamp(t *testing.T) {
|
||||||
}
|
}
|
||||||
|
|
||||||
inoutBytes, err := prompbInput.Marshal()
|
inoutBytes, err := prompbInput.Marshal()
|
||||||
assert.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
|
||||||
expected := []telegraf.Metric{
|
expected := []telegraf.Metric{
|
||||||
testutil.MustMetric(
|
testutil.MustMetric(
|
||||||
|
|
@ -151,7 +152,7 @@ func TestMetricsWithTimestamp(t *testing.T) {
|
||||||
}
|
}
|
||||||
|
|
||||||
metrics, err := parser.Parse(inoutBytes)
|
metrics, err := parser.Parse(inoutBytes)
|
||||||
assert.NoError(t, err)
|
require.NoError(t, err)
|
||||||
assert.Len(t, metrics, 1)
|
require.Len(t, metrics, 1)
|
||||||
testutil.RequireMetricsEqual(t, expected, metrics, testutil.SortMetrics())
|
testutil.RequireMetricsEqual(t, expected, metrics, testutil.SortMetrics())
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -3,49 +3,49 @@ package value
|
||||||
import (
|
import (
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
"github.com/stretchr/testify/assert"
|
"github.com/stretchr/testify/require"
|
||||||
)
|
)
|
||||||
|
|
||||||
func TestParseValidValues(t *testing.T) {
|
func TestParseValidValues(t *testing.T) {
|
||||||
parser := NewValueParser("value_test", "integer", "", nil)
|
parser := NewValueParser("value_test", "integer", "", nil)
|
||||||
metrics, err := parser.Parse([]byte("55"))
|
metrics, err := parser.Parse([]byte("55"))
|
||||||
assert.NoError(t, err)
|
require.NoError(t, err)
|
||||||
assert.Len(t, metrics, 1)
|
require.Len(t, metrics, 1)
|
||||||
assert.Equal(t, "value_test", metrics[0].Name())
|
require.Equal(t, "value_test", metrics[0].Name())
|
||||||
assert.Equal(t, map[string]interface{}{
|
require.Equal(t, map[string]interface{}{
|
||||||
"value": int64(55),
|
"value": int64(55),
|
||||||
}, metrics[0].Fields())
|
}, metrics[0].Fields())
|
||||||
assert.Equal(t, map[string]string{}, metrics[0].Tags())
|
require.Equal(t, map[string]string{}, metrics[0].Tags())
|
||||||
|
|
||||||
parser = NewValueParser("value_test", "float", "", nil)
|
parser = NewValueParser("value_test", "float", "", nil)
|
||||||
metrics, err = parser.Parse([]byte("64"))
|
metrics, err = parser.Parse([]byte("64"))
|
||||||
assert.NoError(t, err)
|
require.NoError(t, err)
|
||||||
assert.Len(t, metrics, 1)
|
require.Len(t, metrics, 1)
|
||||||
assert.Equal(t, "value_test", metrics[0].Name())
|
require.Equal(t, "value_test", metrics[0].Name())
|
||||||
assert.Equal(t, map[string]interface{}{
|
require.Equal(t, map[string]interface{}{
|
||||||
"value": float64(64),
|
"value": float64(64),
|
||||||
}, metrics[0].Fields())
|
}, metrics[0].Fields())
|
||||||
assert.Equal(t, map[string]string{}, metrics[0].Tags())
|
require.Equal(t, map[string]string{}, metrics[0].Tags())
|
||||||
|
|
||||||
parser = NewValueParser("value_test", "string", "", nil)
|
parser = NewValueParser("value_test", "string", "", nil)
|
||||||
metrics, err = parser.Parse([]byte("foobar"))
|
metrics, err = parser.Parse([]byte("foobar"))
|
||||||
assert.NoError(t, err)
|
require.NoError(t, err)
|
||||||
assert.Len(t, metrics, 1)
|
require.Len(t, metrics, 1)
|
||||||
assert.Equal(t, "value_test", metrics[0].Name())
|
require.Equal(t, "value_test", metrics[0].Name())
|
||||||
assert.Equal(t, map[string]interface{}{
|
require.Equal(t, map[string]interface{}{
|
||||||
"value": "foobar",
|
"value": "foobar",
|
||||||
}, metrics[0].Fields())
|
}, metrics[0].Fields())
|
||||||
assert.Equal(t, map[string]string{}, metrics[0].Tags())
|
require.Equal(t, map[string]string{}, metrics[0].Tags())
|
||||||
|
|
||||||
parser = NewValueParser("value_test", "boolean", "", nil)
|
parser = NewValueParser("value_test", "boolean", "", nil)
|
||||||
metrics, err = parser.Parse([]byte("true"))
|
metrics, err = parser.Parse([]byte("true"))
|
||||||
assert.NoError(t, err)
|
require.NoError(t, err)
|
||||||
assert.Len(t, metrics, 1)
|
require.Len(t, metrics, 1)
|
||||||
assert.Equal(t, "value_test", metrics[0].Name())
|
require.Equal(t, "value_test", metrics[0].Name())
|
||||||
assert.Equal(t, map[string]interface{}{
|
require.Equal(t, map[string]interface{}{
|
||||||
"value": true,
|
"value": true,
|
||||||
}, metrics[0].Fields())
|
}, metrics[0].Fields())
|
||||||
assert.Equal(t, map[string]string{}, metrics[0].Tags())
|
require.Equal(t, map[string]string{}, metrics[0].Tags())
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestParseMultipleValues(t *testing.T) {
|
func TestParseMultipleValues(t *testing.T) {
|
||||||
|
|
@ -56,13 +56,13 @@ func TestParseMultipleValues(t *testing.T) {
|
||||||
12
|
12
|
||||||
999
|
999
|
||||||
`))
|
`))
|
||||||
assert.NoError(t, err)
|
require.NoError(t, err)
|
||||||
assert.Len(t, metrics, 1)
|
require.Len(t, metrics, 1)
|
||||||
assert.Equal(t, "value_test", metrics[0].Name())
|
require.Equal(t, "value_test", metrics[0].Name())
|
||||||
assert.Equal(t, map[string]interface{}{
|
require.Equal(t, map[string]interface{}{
|
||||||
"value": int64(999),
|
"value": int64(999),
|
||||||
}, metrics[0].Fields())
|
}, metrics[0].Fields())
|
||||||
assert.Equal(t, map[string]string{}, metrics[0].Tags())
|
require.Equal(t, map[string]string{}, metrics[0].Tags())
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestParseCustomFieldName(t *testing.T) {
|
func TestParseCustomFieldName(t *testing.T) {
|
||||||
|
|
@ -70,8 +70,8 @@ func TestParseCustomFieldName(t *testing.T) {
|
||||||
parser.FieldName = "penguin"
|
parser.FieldName = "penguin"
|
||||||
metrics, err := parser.Parse([]byte(`55`))
|
metrics, err := parser.Parse([]byte(`55`))
|
||||||
|
|
||||||
assert.NoError(t, err)
|
require.NoError(t, err)
|
||||||
assert.Equal(t, map[string]interface{}{
|
require.Equal(t, map[string]interface{}{
|
||||||
"penguin": int64(55),
|
"penguin": int64(55),
|
||||||
}, metrics[0].Fields())
|
}, metrics[0].Fields())
|
||||||
}
|
}
|
||||||
|
|
@ -79,126 +79,126 @@ func TestParseCustomFieldName(t *testing.T) {
|
||||||
func TestParseLineValidValues(t *testing.T) {
|
func TestParseLineValidValues(t *testing.T) {
|
||||||
parser := NewValueParser("value_test", "integer", "", nil)
|
parser := NewValueParser("value_test", "integer", "", nil)
|
||||||
metric, err := parser.ParseLine("55")
|
metric, err := parser.ParseLine("55")
|
||||||
assert.NoError(t, err)
|
require.NoError(t, err)
|
||||||
assert.Equal(t, "value_test", metric.Name())
|
require.Equal(t, "value_test", metric.Name())
|
||||||
assert.Equal(t, map[string]interface{}{
|
require.Equal(t, map[string]interface{}{
|
||||||
"value": int64(55),
|
"value": int64(55),
|
||||||
}, metric.Fields())
|
}, metric.Fields())
|
||||||
assert.Equal(t, map[string]string{}, metric.Tags())
|
require.Equal(t, map[string]string{}, metric.Tags())
|
||||||
|
|
||||||
parser = NewValueParser("value_test", "float", "", nil)
|
parser = NewValueParser("value_test", "float", "", nil)
|
||||||
metric, err = parser.ParseLine("64")
|
metric, err = parser.ParseLine("64")
|
||||||
assert.NoError(t, err)
|
require.NoError(t, err)
|
||||||
assert.Equal(t, "value_test", metric.Name())
|
require.Equal(t, "value_test", metric.Name())
|
||||||
assert.Equal(t, map[string]interface{}{
|
require.Equal(t, map[string]interface{}{
|
||||||
"value": float64(64),
|
"value": float64(64),
|
||||||
}, metric.Fields())
|
}, metric.Fields())
|
||||||
assert.Equal(t, map[string]string{}, metric.Tags())
|
require.Equal(t, map[string]string{}, metric.Tags())
|
||||||
|
|
||||||
parser = NewValueParser("value_test", "string", "", nil)
|
parser = NewValueParser("value_test", "string", "", nil)
|
||||||
metric, err = parser.ParseLine("foobar")
|
metric, err = parser.ParseLine("foobar")
|
||||||
assert.NoError(t, err)
|
require.NoError(t, err)
|
||||||
assert.Equal(t, "value_test", metric.Name())
|
require.Equal(t, "value_test", metric.Name())
|
||||||
assert.Equal(t, map[string]interface{}{
|
require.Equal(t, map[string]interface{}{
|
||||||
"value": "foobar",
|
"value": "foobar",
|
||||||
}, metric.Fields())
|
}, metric.Fields())
|
||||||
assert.Equal(t, map[string]string{}, metric.Tags())
|
require.Equal(t, map[string]string{}, metric.Tags())
|
||||||
|
|
||||||
parser = NewValueParser("value_test", "boolean", "", nil)
|
parser = NewValueParser("value_test", "boolean", "", nil)
|
||||||
metric, err = parser.ParseLine("true")
|
metric, err = parser.ParseLine("true")
|
||||||
assert.NoError(t, err)
|
require.NoError(t, err)
|
||||||
assert.Equal(t, "value_test", metric.Name())
|
require.Equal(t, "value_test", metric.Name())
|
||||||
assert.Equal(t, map[string]interface{}{
|
require.Equal(t, map[string]interface{}{
|
||||||
"value": true,
|
"value": true,
|
||||||
}, metric.Fields())
|
}, metric.Fields())
|
||||||
assert.Equal(t, map[string]string{}, metric.Tags())
|
require.Equal(t, map[string]string{}, metric.Tags())
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestParseInvalidValues(t *testing.T) {
|
func TestParseInvalidValues(t *testing.T) {
|
||||||
parser := NewValueParser("value_test", "integer", "", nil)
|
parser := NewValueParser("value_test", "integer", "", nil)
|
||||||
metrics, err := parser.Parse([]byte("55.0"))
|
metrics, err := parser.Parse([]byte("55.0"))
|
||||||
assert.Error(t, err)
|
require.Error(t, err)
|
||||||
assert.Len(t, metrics, 0)
|
require.Len(t, metrics, 0)
|
||||||
|
|
||||||
parser = NewValueParser("value_test", "float", "", nil)
|
parser = NewValueParser("value_test", "float", "", nil)
|
||||||
metrics, err = parser.Parse([]byte("foobar"))
|
metrics, err = parser.Parse([]byte("foobar"))
|
||||||
assert.Error(t, err)
|
require.Error(t, err)
|
||||||
assert.Len(t, metrics, 0)
|
require.Len(t, metrics, 0)
|
||||||
|
|
||||||
parser = NewValueParser("value_test", "boolean", "", nil)
|
parser = NewValueParser("value_test", "boolean", "", nil)
|
||||||
metrics, err = parser.Parse([]byte("213"))
|
metrics, err = parser.Parse([]byte("213"))
|
||||||
assert.Error(t, err)
|
require.Error(t, err)
|
||||||
assert.Len(t, metrics, 0)
|
require.Len(t, metrics, 0)
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestParseLineInvalidValues(t *testing.T) {
|
func TestParseLineInvalidValues(t *testing.T) {
|
||||||
parser := NewValueParser("value_test", "integer", "", nil)
|
parser := NewValueParser("value_test", "integer", "", nil)
|
||||||
_, err := parser.ParseLine("55.0")
|
_, err := parser.ParseLine("55.0")
|
||||||
assert.Error(t, err)
|
require.Error(t, err)
|
||||||
|
|
||||||
parser = NewValueParser("value_test", "float", "", nil)
|
parser = NewValueParser("value_test", "float", "", nil)
|
||||||
_, err = parser.ParseLine("foobar")
|
_, err = parser.ParseLine("foobar")
|
||||||
assert.Error(t, err)
|
require.Error(t, err)
|
||||||
|
|
||||||
parser = NewValueParser("value_test", "boolean", "", nil)
|
parser = NewValueParser("value_test", "boolean", "", nil)
|
||||||
_, err = parser.ParseLine("213")
|
_, err = parser.ParseLine("213")
|
||||||
assert.Error(t, err)
|
require.Error(t, err)
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestParseValidValuesDefaultTags(t *testing.T) {
|
func TestParseValidValuesDefaultTags(t *testing.T) {
|
||||||
parser := NewValueParser("value_test", "integer", "", nil)
|
parser := NewValueParser("value_test", "integer", "", nil)
|
||||||
parser.SetDefaultTags(map[string]string{"test": "tag"})
|
parser.SetDefaultTags(map[string]string{"test": "tag"})
|
||||||
metrics, err := parser.Parse([]byte("55"))
|
metrics, err := parser.Parse([]byte("55"))
|
||||||
assert.NoError(t, err)
|
require.NoError(t, err)
|
||||||
assert.Len(t, metrics, 1)
|
require.Len(t, metrics, 1)
|
||||||
assert.Equal(t, "value_test", metrics[0].Name())
|
require.Equal(t, "value_test", metrics[0].Name())
|
||||||
assert.Equal(t, map[string]interface{}{
|
require.Equal(t, map[string]interface{}{
|
||||||
"value": int64(55),
|
"value": int64(55),
|
||||||
}, metrics[0].Fields())
|
}, metrics[0].Fields())
|
||||||
assert.Equal(t, map[string]string{"test": "tag"}, metrics[0].Tags())
|
require.Equal(t, map[string]string{"test": "tag"}, metrics[0].Tags())
|
||||||
|
|
||||||
parser = NewValueParser("value_test", "float", "", nil)
|
parser = NewValueParser("value_test", "float", "", nil)
|
||||||
parser.SetDefaultTags(map[string]string{"test": "tag"})
|
parser.SetDefaultTags(map[string]string{"test": "tag"})
|
||||||
metrics, err = parser.Parse([]byte("64"))
|
metrics, err = parser.Parse([]byte("64"))
|
||||||
assert.NoError(t, err)
|
require.NoError(t, err)
|
||||||
assert.Len(t, metrics, 1)
|
require.Len(t, metrics, 1)
|
||||||
assert.Equal(t, "value_test", metrics[0].Name())
|
require.Equal(t, "value_test", metrics[0].Name())
|
||||||
assert.Equal(t, map[string]interface{}{
|
require.Equal(t, map[string]interface{}{
|
||||||
"value": float64(64),
|
"value": float64(64),
|
||||||
}, metrics[0].Fields())
|
}, metrics[0].Fields())
|
||||||
assert.Equal(t, map[string]string{"test": "tag"}, metrics[0].Tags())
|
require.Equal(t, map[string]string{"test": "tag"}, metrics[0].Tags())
|
||||||
|
|
||||||
parser = NewValueParser("value_test", "string", "", nil)
|
parser = NewValueParser("value_test", "string", "", nil)
|
||||||
parser.SetDefaultTags(map[string]string{"test": "tag"})
|
parser.SetDefaultTags(map[string]string{"test": "tag"})
|
||||||
metrics, err = parser.Parse([]byte("foobar"))
|
metrics, err = parser.Parse([]byte("foobar"))
|
||||||
assert.NoError(t, err)
|
require.NoError(t, err)
|
||||||
assert.Len(t, metrics, 1)
|
require.Len(t, metrics, 1)
|
||||||
assert.Equal(t, "value_test", metrics[0].Name())
|
require.Equal(t, "value_test", metrics[0].Name())
|
||||||
assert.Equal(t, map[string]interface{}{
|
require.Equal(t, map[string]interface{}{
|
||||||
"value": "foobar",
|
"value": "foobar",
|
||||||
}, metrics[0].Fields())
|
}, metrics[0].Fields())
|
||||||
assert.Equal(t, map[string]string{"test": "tag"}, metrics[0].Tags())
|
require.Equal(t, map[string]string{"test": "tag"}, metrics[0].Tags())
|
||||||
|
|
||||||
parser = NewValueParser("value_test", "boolean", "", nil)
|
parser = NewValueParser("value_test", "boolean", "", nil)
|
||||||
parser.SetDefaultTags(map[string]string{"test": "tag"})
|
parser.SetDefaultTags(map[string]string{"test": "tag"})
|
||||||
metrics, err = parser.Parse([]byte("true"))
|
metrics, err = parser.Parse([]byte("true"))
|
||||||
assert.NoError(t, err)
|
require.NoError(t, err)
|
||||||
assert.Len(t, metrics, 1)
|
require.Len(t, metrics, 1)
|
||||||
assert.Equal(t, "value_test", metrics[0].Name())
|
require.Equal(t, "value_test", metrics[0].Name())
|
||||||
assert.Equal(t, map[string]interface{}{
|
require.Equal(t, map[string]interface{}{
|
||||||
"value": true,
|
"value": true,
|
||||||
}, metrics[0].Fields())
|
}, metrics[0].Fields())
|
||||||
assert.Equal(t, map[string]string{"test": "tag"}, metrics[0].Tags())
|
require.Equal(t, map[string]string{"test": "tag"}, metrics[0].Tags())
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestParseValuesWithNullCharacter(t *testing.T) {
|
func TestParseValuesWithNullCharacter(t *testing.T) {
|
||||||
parser := NewValueParser("value_test", "integer", "", nil)
|
parser := NewValueParser("value_test", "integer", "", nil)
|
||||||
metrics, err := parser.Parse([]byte("55\x00"))
|
metrics, err := parser.Parse([]byte("55\x00"))
|
||||||
assert.NoError(t, err)
|
require.NoError(t, err)
|
||||||
assert.Len(t, metrics, 1)
|
require.Len(t, metrics, 1)
|
||||||
assert.Equal(t, "value_test", metrics[0].Name())
|
require.Equal(t, "value_test", metrics[0].Name())
|
||||||
assert.Equal(t, map[string]interface{}{
|
require.Equal(t, map[string]interface{}{
|
||||||
"value": int64(55),
|
"value": int64(55),
|
||||||
}, metrics[0].Fields())
|
}, metrics[0].Fields())
|
||||||
assert.Equal(t, map[string]string{}, metrics[0].Tags())
|
require.Equal(t, map[string]string{}, metrics[0].Tags())
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -9,10 +9,9 @@ import (
|
||||||
|
|
||||||
var (
|
var (
|
||||||
ErrEOF = errors.New("EOF")
|
ErrEOF = errors.New("EOF")
|
||||||
ErrInvalidTimestamp = errors.New("Invalid timestamp")
|
ErrInvalidTimestamp = errors.New("invalid timestamp")
|
||||||
)
|
)
|
||||||
|
|
||||||
// Interface for parsing line elements.
|
|
||||||
type ElementParser interface {
|
type ElementParser interface {
|
||||||
parse(p *PointParser, pt *Point) error
|
parse(p *PointParser, pt *Point) error
|
||||||
}
|
}
|
||||||
|
|
@ -116,11 +115,10 @@ func setTimestamp(pt *Point, ts int64, numDigits int) error {
|
||||||
ts = ts / 1e3
|
ts = ts / 1e3
|
||||||
} else if numDigits != 10 {
|
} else if numDigits != 10 {
|
||||||
// must be in seconds, return error if not 0
|
// must be in seconds, return error if not 0
|
||||||
if ts == 0 {
|
if ts != 0 {
|
||||||
ts = getCurrentTime()
|
|
||||||
} else {
|
|
||||||
return ErrInvalidTimestamp
|
return ErrInvalidTimestamp
|
||||||
}
|
}
|
||||||
|
ts = getCurrentTime()
|
||||||
}
|
}
|
||||||
pt.Timestamp = ts
|
pt.Timestamp = ts
|
||||||
return nil
|
return nil
|
||||||
|
|
|
||||||
|
|
@ -4,7 +4,6 @@ import (
|
||||||
"bufio"
|
"bufio"
|
||||||
"bytes"
|
"bytes"
|
||||||
"io"
|
"io"
|
||||||
"log"
|
|
||||||
"strconv"
|
"strconv"
|
||||||
"sync"
|
"sync"
|
||||||
"time"
|
"time"
|
||||||
|
|
@ -26,6 +25,7 @@ type Point struct {
|
||||||
type WavefrontParser struct {
|
type WavefrontParser struct {
|
||||||
parsers *sync.Pool
|
parsers *sync.Pool
|
||||||
defaultTags map[string]string
|
defaultTags map[string]string
|
||||||
|
Log telegraf.Logger `toml:"-"`
|
||||||
}
|
}
|
||||||
|
|
||||||
// PointParser is a thread-unsafe parser and must be kept in a pool.
|
// PointParser is a thread-unsafe parser and must be kept in a pool.
|
||||||
|
|
@ -42,7 +42,7 @@ type PointParser struct {
|
||||||
parent *WavefrontParser
|
parent *WavefrontParser
|
||||||
}
|
}
|
||||||
|
|
||||||
// Returns a slice of ElementParser's for the Graphite format
|
// NewWavefrontElements returns a slice of ElementParser's for the Graphite format
|
||||||
func NewWavefrontElements() []ElementParser {
|
func NewWavefrontElements() []ElementParser {
|
||||||
var elements []ElementParser
|
var elements []ElementParser
|
||||||
wsParser := WhiteSpaceParser{}
|
wsParser := WhiteSpaceParser{}
|
||||||
|
|
@ -200,7 +200,7 @@ func (p *PointParser) unscan() {
|
||||||
func (p *PointParser) unscanTokens(n int) {
|
func (p *PointParser) unscanTokens(n int) {
|
||||||
if n > MaxBufferSize {
|
if n > MaxBufferSize {
|
||||||
// just log for now
|
// just log for now
|
||||||
log.Printf("cannot unscan more than %d tokens", MaxBufferSize)
|
p.parent.Log.Infof("Cannot unscan more than %d tokens", MaxBufferSize)
|
||||||
}
|
}
|
||||||
p.buf.n += n
|
p.buf.n += n
|
||||||
}
|
}
|
||||||
|
|
@ -208,7 +208,7 @@ func (p *PointParser) unscanTokens(n int) {
|
||||||
func (p *PointParser) reset(buf []byte) {
|
func (p *PointParser) reset(buf []byte) {
|
||||||
// reset the scan buffer and write new byte
|
// reset the scan buffer and write new byte
|
||||||
p.scanBuf.Reset()
|
p.scanBuf.Reset()
|
||||||
p.scanBuf.Write(buf)
|
p.scanBuf.Write(buf) //nolint:revive // from buffer.go: "err is always nil"
|
||||||
|
|
||||||
if p.s == nil {
|
if p.s == nil {
|
||||||
p.s = NewScanner(&p.scanBuf)
|
p.s = NewScanner(&p.scanBuf)
|
||||||
|
|
|
||||||
|
|
@ -4,208 +4,209 @@ import (
|
||||||
"testing"
|
"testing"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
|
"github.com/stretchr/testify/require"
|
||||||
|
|
||||||
"github.com/influxdata/telegraf"
|
"github.com/influxdata/telegraf"
|
||||||
"github.com/influxdata/telegraf/metric"
|
"github.com/influxdata/telegraf/metric"
|
||||||
"github.com/stretchr/testify/assert"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
func TestParse(t *testing.T) {
|
func TestParse(t *testing.T) {
|
||||||
parser := NewWavefrontParser(nil)
|
parser := NewWavefrontParser(nil)
|
||||||
|
|
||||||
parsedMetrics, err := parser.Parse([]byte("test.metric 1"))
|
parsedMetrics, err := parser.Parse([]byte("test.metric 1"))
|
||||||
assert.NoError(t, err)
|
require.NoError(t, err)
|
||||||
testMetric := metric.New("test.metric", map[string]string{}, map[string]interface{}{"value": 1.}, time.Unix(0, 0))
|
testMetric := metric.New("test.metric", map[string]string{}, map[string]interface{}{"value": 1.}, time.Unix(0, 0))
|
||||||
assert.Equal(t, parsedMetrics[0].Name(), testMetric.Name())
|
require.Equal(t, parsedMetrics[0].Name(), testMetric.Name())
|
||||||
assert.Equal(t, parsedMetrics[0].Fields(), testMetric.Fields())
|
require.Equal(t, parsedMetrics[0].Fields(), testMetric.Fields())
|
||||||
|
|
||||||
parsedMetrics, err = parser.Parse([]byte("\u2206test.delta 1 1530939936"))
|
parsedMetrics, err = parser.Parse([]byte("\u2206test.delta 1 1530939936"))
|
||||||
assert.NoError(t, err)
|
require.NoError(t, err)
|
||||||
testMetric = metric.New("\u2206test.delta", map[string]string{},
|
testMetric = metric.New("\u2206test.delta", map[string]string{},
|
||||||
map[string]interface{}{"value": 1.}, time.Unix(1530939936, 0))
|
map[string]interface{}{"value": 1.}, time.Unix(1530939936, 0))
|
||||||
assert.EqualValues(t, parsedMetrics[0], testMetric)
|
require.EqualValues(t, parsedMetrics[0], testMetric)
|
||||||
|
|
||||||
parsedMetrics, err = parser.Parse([]byte("\u0394test.delta 1 1530939936"))
|
parsedMetrics, err = parser.Parse([]byte("\u0394test.delta 1 1530939936"))
|
||||||
assert.NoError(t, err)
|
require.NoError(t, err)
|
||||||
testMetric = metric.New("\u0394test.delta", map[string]string{},
|
testMetric = metric.New("\u0394test.delta", map[string]string{},
|
||||||
map[string]interface{}{"value": 1.}, time.Unix(1530939936, 0))
|
map[string]interface{}{"value": 1.}, time.Unix(1530939936, 0))
|
||||||
assert.EqualValues(t, parsedMetrics[0], testMetric)
|
require.EqualValues(t, parsedMetrics[0], testMetric)
|
||||||
|
|
||||||
parsedMetrics, err = parser.Parse([]byte("\u0394test.delta 1.234 1530939936 source=\"mysource\" tag2=value2"))
|
parsedMetrics, err = parser.Parse([]byte("\u0394test.delta 1.234 1530939936 source=\"mysource\" tag2=value2"))
|
||||||
assert.NoError(t, err)
|
require.NoError(t, err)
|
||||||
testMetric = metric.New("\u0394test.delta", map[string]string{"source": "mysource", "tag2": "value2"}, map[string]interface{}{"value": 1.234}, time.Unix(1530939936, 0))
|
testMetric = metric.New("\u0394test.delta", map[string]string{"source": "mysource", "tag2": "value2"}, map[string]interface{}{"value": 1.234}, time.Unix(1530939936, 0))
|
||||||
assert.EqualValues(t, parsedMetrics[0], testMetric)
|
require.EqualValues(t, parsedMetrics[0], testMetric)
|
||||||
|
|
||||||
parsedMetrics, err = parser.Parse([]byte("test.metric 1 1530939936"))
|
parsedMetrics, err = parser.Parse([]byte("test.metric 1 1530939936"))
|
||||||
assert.NoError(t, err)
|
require.NoError(t, err)
|
||||||
testMetric = metric.New("test.metric", map[string]string{}, map[string]interface{}{"value": 1.}, time.Unix(1530939936, 0))
|
testMetric = metric.New("test.metric", map[string]string{}, map[string]interface{}{"value": 1.}, time.Unix(1530939936, 0))
|
||||||
assert.EqualValues(t, parsedMetrics[0], testMetric)
|
require.EqualValues(t, parsedMetrics[0], testMetric)
|
||||||
|
|
||||||
parsedMetrics, err = parser.Parse([]byte("test.metric 1 1530939936 source=mysource"))
|
parsedMetrics, err = parser.Parse([]byte("test.metric 1 1530939936 source=mysource"))
|
||||||
assert.NoError(t, err)
|
require.NoError(t, err)
|
||||||
testMetric = metric.New("test.metric", map[string]string{"source": "mysource"}, map[string]interface{}{"value": 1.}, time.Unix(1530939936, 0))
|
testMetric = metric.New("test.metric", map[string]string{"source": "mysource"}, map[string]interface{}{"value": 1.}, time.Unix(1530939936, 0))
|
||||||
assert.EqualValues(t, parsedMetrics[0], testMetric)
|
require.EqualValues(t, parsedMetrics[0], testMetric)
|
||||||
|
|
||||||
parsedMetrics, err = parser.Parse([]byte("\"test.metric\" 1.1234 1530939936 source=\"mysource\""))
|
parsedMetrics, err = parser.Parse([]byte("\"test.metric\" 1.1234 1530939936 source=\"mysource\""))
|
||||||
assert.NoError(t, err)
|
require.NoError(t, err)
|
||||||
testMetric = metric.New("test.metric", map[string]string{"source": "mysource"}, map[string]interface{}{"value": 1.1234}, time.Unix(1530939936, 0))
|
testMetric = metric.New("test.metric", map[string]string{"source": "mysource"}, map[string]interface{}{"value": 1.1234}, time.Unix(1530939936, 0))
|
||||||
assert.EqualValues(t, parsedMetrics[0], testMetric)
|
require.EqualValues(t, parsedMetrics[0], testMetric)
|
||||||
|
|
||||||
parsedMetrics, err = parser.Parse([]byte("\"test.metric\" 1.1234 1530939936 \"source\"=\"mysource\" tag2=value2"))
|
parsedMetrics, err = parser.Parse([]byte("\"test.metric\" 1.1234 1530939936 \"source\"=\"mysource\" tag2=value2"))
|
||||||
assert.NoError(t, err)
|
require.NoError(t, err)
|
||||||
testMetric = metric.New("test.metric", map[string]string{"source": "mysource", "tag2": "value2"}, map[string]interface{}{"value": 1.1234}, time.Unix(1530939936, 0))
|
testMetric = metric.New("test.metric", map[string]string{"source": "mysource", "tag2": "value2"}, map[string]interface{}{"value": 1.1234}, time.Unix(1530939936, 0))
|
||||||
assert.EqualValues(t, parsedMetrics[0], testMetric)
|
require.EqualValues(t, parsedMetrics[0], testMetric)
|
||||||
|
|
||||||
parsedMetrics, err = parser.Parse([]byte("\"test.metric\" -1.1234 1530939936 \"source\"=\"mysource\" tag2=value2"))
|
parsedMetrics, err = parser.Parse([]byte("\"test.metric\" -1.1234 1530939936 \"source\"=\"mysource\" tag2=value2"))
|
||||||
assert.NoError(t, err)
|
require.NoError(t, err)
|
||||||
testMetric = metric.New("test.metric", map[string]string{"source": "mysource", "tag2": "value2"}, map[string]interface{}{"value": -1.1234}, time.Unix(1530939936, 0))
|
testMetric = metric.New("test.metric", map[string]string{"source": "mysource", "tag2": "value2"}, map[string]interface{}{"value": -1.1234}, time.Unix(1530939936, 0))
|
||||||
assert.EqualValues(t, parsedMetrics[0], testMetric)
|
require.EqualValues(t, parsedMetrics[0], testMetric)
|
||||||
|
|
||||||
parsedMetrics, err = parser.Parse([]byte("\"test.metric\" 1.1234e04 1530939936 \"source\"=\"mysource\" tag2=value2"))
|
parsedMetrics, err = parser.Parse([]byte("\"test.metric\" 1.1234e04 1530939936 \"source\"=\"mysource\" tag2=value2"))
|
||||||
assert.NoError(t, err)
|
require.NoError(t, err)
|
||||||
testMetric = metric.New("test.metric", map[string]string{"source": "mysource", "tag2": "value2"}, map[string]interface{}{"value": 1.1234e04}, time.Unix(1530939936, 0))
|
testMetric = metric.New("test.metric", map[string]string{"source": "mysource", "tag2": "value2"}, map[string]interface{}{"value": 1.1234e04}, time.Unix(1530939936, 0))
|
||||||
assert.EqualValues(t, parsedMetrics[0], testMetric)
|
require.EqualValues(t, parsedMetrics[0], testMetric)
|
||||||
|
|
||||||
parsedMetrics, err = parser.Parse([]byte("\"test.metric\" 1.1234e-04 1530939936 \"source\"=\"mysource\" tag2=value2"))
|
parsedMetrics, err = parser.Parse([]byte("\"test.metric\" 1.1234e-04 1530939936 \"source\"=\"mysource\" tag2=value2"))
|
||||||
assert.NoError(t, err)
|
require.NoError(t, err)
|
||||||
testMetric = metric.New("test.metric", map[string]string{"source": "mysource", "tag2": "value2"}, map[string]interface{}{"value": 1.1234e-04}, time.Unix(1530939936, 0))
|
testMetric = metric.New("test.metric", map[string]string{"source": "mysource", "tag2": "value2"}, map[string]interface{}{"value": 1.1234e-04}, time.Unix(1530939936, 0))
|
||||||
assert.EqualValues(t, parsedMetrics[0], testMetric)
|
require.EqualValues(t, parsedMetrics[0], testMetric)
|
||||||
|
|
||||||
parsedMetrics, err = parser.Parse([]byte("test.metric 1.1234 1530939936 source=\"mysource\" tag2=value2 "))
|
parsedMetrics, err = parser.Parse([]byte("test.metric 1.1234 1530939936 source=\"mysource\" tag2=value2 "))
|
||||||
assert.NoError(t, err)
|
require.NoError(t, err)
|
||||||
testMetric = metric.New("test.metric", map[string]string{"source": "mysource", "tag2": "value2"}, map[string]interface{}{"value": 1.1234}, time.Unix(1530939936, 0))
|
testMetric = metric.New("test.metric", map[string]string{"source": "mysource", "tag2": "value2"}, map[string]interface{}{"value": 1.1234}, time.Unix(1530939936, 0))
|
||||||
assert.EqualValues(t, parsedMetrics[0], testMetric)
|
require.EqualValues(t, parsedMetrics[0], testMetric)
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestParseLine(t *testing.T) {
|
func TestParseLine(t *testing.T) {
|
||||||
parser := NewWavefrontParser(nil)
|
parser := NewWavefrontParser(nil)
|
||||||
|
|
||||||
parsedMetric, err := parser.ParseLine("test.metric 1")
|
parsedMetric, err := parser.ParseLine("test.metric 1")
|
||||||
assert.NoError(t, err)
|
require.NoError(t, err)
|
||||||
testMetric := metric.New("test.metric", map[string]string{}, map[string]interface{}{"value": 1.}, time.Unix(0, 0))
|
testMetric := metric.New("test.metric", map[string]string{}, map[string]interface{}{"value": 1.}, time.Unix(0, 0))
|
||||||
assert.Equal(t, parsedMetric.Name(), testMetric.Name())
|
require.Equal(t, parsedMetric.Name(), testMetric.Name())
|
||||||
assert.Equal(t, parsedMetric.Fields(), testMetric.Fields())
|
require.Equal(t, parsedMetric.Fields(), testMetric.Fields())
|
||||||
|
|
||||||
parsedMetric, err = parser.ParseLine("test.metric 1 1530939936")
|
parsedMetric, err = parser.ParseLine("test.metric 1 1530939936")
|
||||||
assert.NoError(t, err)
|
require.NoError(t, err)
|
||||||
testMetric = metric.New("test.metric", map[string]string{}, map[string]interface{}{"value": 1.}, time.Unix(1530939936, 0))
|
testMetric = metric.New("test.metric", map[string]string{}, map[string]interface{}{"value": 1.}, time.Unix(1530939936, 0))
|
||||||
assert.EqualValues(t, parsedMetric, testMetric)
|
require.EqualValues(t, parsedMetric, testMetric)
|
||||||
|
|
||||||
parsedMetric, err = parser.ParseLine("test.metric 1 1530939936 source=mysource")
|
parsedMetric, err = parser.ParseLine("test.metric 1 1530939936 source=mysource")
|
||||||
assert.NoError(t, err)
|
require.NoError(t, err)
|
||||||
testMetric = metric.New("test.metric", map[string]string{"source": "mysource"}, map[string]interface{}{"value": 1.}, time.Unix(1530939936, 0))
|
testMetric = metric.New("test.metric", map[string]string{"source": "mysource"}, map[string]interface{}{"value": 1.}, time.Unix(1530939936, 0))
|
||||||
assert.EqualValues(t, parsedMetric, testMetric)
|
require.EqualValues(t, parsedMetric, testMetric)
|
||||||
|
|
||||||
parsedMetric, err = parser.ParseLine("\"test.metric\" 1.1234 1530939936 source=\"mysource\"")
|
parsedMetric, err = parser.ParseLine("\"test.metric\" 1.1234 1530939936 source=\"mysource\"")
|
||||||
assert.NoError(t, err)
|
require.NoError(t, err)
|
||||||
testMetric = metric.New("test.metric", map[string]string{"source": "mysource"}, map[string]interface{}{"value": 1.1234}, time.Unix(1530939936, 0))
|
testMetric = metric.New("test.metric", map[string]string{"source": "mysource"}, map[string]interface{}{"value": 1.1234}, time.Unix(1530939936, 0))
|
||||||
assert.EqualValues(t, parsedMetric, testMetric)
|
require.EqualValues(t, parsedMetric, testMetric)
|
||||||
|
|
||||||
parsedMetric, err = parser.ParseLine("\"test.metric\" 1.1234 1530939936 \"source\"=\"mysource\" tag2=value2")
|
parsedMetric, err = parser.ParseLine("\"test.metric\" 1.1234 1530939936 \"source\"=\"mysource\" tag2=value2")
|
||||||
assert.NoError(t, err)
|
require.NoError(t, err)
|
||||||
testMetric = metric.New("test.metric", map[string]string{"source": "mysource", "tag2": "value2"}, map[string]interface{}{"value": 1.1234}, time.Unix(1530939936, 0))
|
testMetric = metric.New("test.metric", map[string]string{"source": "mysource", "tag2": "value2"}, map[string]interface{}{"value": 1.1234}, time.Unix(1530939936, 0))
|
||||||
assert.EqualValues(t, parsedMetric, testMetric)
|
require.EqualValues(t, parsedMetric, testMetric)
|
||||||
|
|
||||||
parsedMetric, err = parser.ParseLine("test.metric 1.1234 1530939936 source=\"mysource\" tag2=value2 ")
|
parsedMetric, err = parser.ParseLine("test.metric 1.1234 1530939936 source=\"mysource\" tag2=value2 ")
|
||||||
assert.NoError(t, err)
|
require.NoError(t, err)
|
||||||
testMetric = metric.New("test.metric", map[string]string{"source": "mysource", "tag2": "value2"}, map[string]interface{}{"value": 1.1234}, time.Unix(1530939936, 0))
|
testMetric = metric.New("test.metric", map[string]string{"source": "mysource", "tag2": "value2"}, map[string]interface{}{"value": 1.1234}, time.Unix(1530939936, 0))
|
||||||
assert.EqualValues(t, parsedMetric, testMetric)
|
require.EqualValues(t, parsedMetric, testMetric)
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestParseMultiple(t *testing.T) {
|
func TestParseMultiple(t *testing.T) {
|
||||||
parser := NewWavefrontParser(nil)
|
parser := NewWavefrontParser(nil)
|
||||||
|
|
||||||
parsedMetrics, err := parser.Parse([]byte("test.metric 1\ntest.metric2 2 1530939936"))
|
parsedMetrics, err := parser.Parse([]byte("test.metric 1\ntest.metric2 2 1530939936"))
|
||||||
assert.NoError(t, err)
|
require.NoError(t, err)
|
||||||
testMetric1 := metric.New("test.metric", map[string]string{}, map[string]interface{}{"value": 1.}, time.Unix(0, 0))
|
testMetric1 := metric.New("test.metric", map[string]string{}, map[string]interface{}{"value": 1.}, time.Unix(0, 0))
|
||||||
testMetric2 := metric.New("test.metric2", map[string]string{}, map[string]interface{}{"value": 2.}, time.Unix(1530939936, 0))
|
testMetric2 := metric.New("test.metric2", map[string]string{}, map[string]interface{}{"value": 2.}, time.Unix(1530939936, 0))
|
||||||
testMetrics := []telegraf.Metric{testMetric1, testMetric2}
|
testMetrics := []telegraf.Metric{testMetric1, testMetric2}
|
||||||
assert.Equal(t, parsedMetrics[0].Name(), testMetrics[0].Name())
|
require.Equal(t, parsedMetrics[0].Name(), testMetrics[0].Name())
|
||||||
assert.Equal(t, parsedMetrics[0].Fields(), testMetrics[0].Fields())
|
require.Equal(t, parsedMetrics[0].Fields(), testMetrics[0].Fields())
|
||||||
assert.EqualValues(t, parsedMetrics[1], testMetrics[1])
|
require.EqualValues(t, parsedMetrics[1], testMetrics[1])
|
||||||
|
|
||||||
parsedMetrics, err = parser.Parse([]byte("test.metric 1 1530939936 source=mysource\n\"test.metric\" 1.1234 1530939936 source=\"mysource\""))
|
parsedMetrics, err = parser.Parse([]byte("test.metric 1 1530939936 source=mysource\n\"test.metric\" 1.1234 1530939936 source=\"mysource\""))
|
||||||
assert.NoError(t, err)
|
require.NoError(t, err)
|
||||||
testMetric1 = metric.New("test.metric", map[string]string{"source": "mysource"}, map[string]interface{}{"value": 1.}, time.Unix(1530939936, 0))
|
testMetric1 = metric.New("test.metric", map[string]string{"source": "mysource"}, map[string]interface{}{"value": 1.}, time.Unix(1530939936, 0))
|
||||||
testMetric2 = metric.New("test.metric", map[string]string{"source": "mysource"}, map[string]interface{}{"value": 1.1234}, time.Unix(1530939936, 0))
|
testMetric2 = metric.New("test.metric", map[string]string{"source": "mysource"}, map[string]interface{}{"value": 1.1234}, time.Unix(1530939936, 0))
|
||||||
testMetrics = []telegraf.Metric{testMetric1, testMetric2}
|
testMetrics = []telegraf.Metric{testMetric1, testMetric2}
|
||||||
assert.EqualValues(t, parsedMetrics, testMetrics)
|
require.EqualValues(t, parsedMetrics, testMetrics)
|
||||||
|
|
||||||
parsedMetrics, err = parser.Parse([]byte("\"test.metric\" 1.1234 1530939936 \"source\"=\"mysource\" tag2=value2\ntest.metric 1.1234 1530939936 source=\"mysource\" tag2=value2 "))
|
parsedMetrics, err = parser.Parse([]byte("\"test.metric\" 1.1234 1530939936 \"source\"=\"mysource\" tag2=value2\ntest.metric 1.1234 1530939936 source=\"mysource\" tag2=value2 "))
|
||||||
assert.NoError(t, err)
|
require.NoError(t, err)
|
||||||
testMetric1 = metric.New("test.metric", map[string]string{"source": "mysource", "tag2": "value2"}, map[string]interface{}{"value": 1.1234}, time.Unix(1530939936, 0))
|
testMetric1 = metric.New("test.metric", map[string]string{"source": "mysource", "tag2": "value2"}, map[string]interface{}{"value": 1.1234}, time.Unix(1530939936, 0))
|
||||||
testMetric2 = metric.New("test.metric", map[string]string{"source": "mysource", "tag2": "value2"}, map[string]interface{}{"value": 1.1234}, time.Unix(1530939936, 0))
|
testMetric2 = metric.New("test.metric", map[string]string{"source": "mysource", "tag2": "value2"}, map[string]interface{}{"value": 1.1234}, time.Unix(1530939936, 0))
|
||||||
testMetrics = []telegraf.Metric{testMetric1, testMetric2}
|
testMetrics = []telegraf.Metric{testMetric1, testMetric2}
|
||||||
assert.EqualValues(t, parsedMetrics, testMetrics)
|
require.EqualValues(t, parsedMetrics, testMetrics)
|
||||||
|
|
||||||
parsedMetrics, err = parser.Parse([]byte("test.metric 1 1530939936 source=mysource\n\"test.metric\" 1.1234 1530939936 source=\"mysource\"\ntest.metric3 333 1530939936 tagit=valueit"))
|
parsedMetrics, err = parser.Parse([]byte("test.metric 1 1530939936 source=mysource\n\"test.metric\" 1.1234 1530939936 source=\"mysource\"\ntest.metric3 333 1530939936 tagit=valueit"))
|
||||||
assert.NoError(t, err)
|
require.NoError(t, err)
|
||||||
testMetric1 = metric.New("test.metric", map[string]string{"source": "mysource"}, map[string]interface{}{"value": 1.}, time.Unix(1530939936, 0))
|
testMetric1 = metric.New("test.metric", map[string]string{"source": "mysource"}, map[string]interface{}{"value": 1.}, time.Unix(1530939936, 0))
|
||||||
testMetric2 = metric.New("test.metric", map[string]string{"source": "mysource"}, map[string]interface{}{"value": 1.1234}, time.Unix(1530939936, 0))
|
testMetric2 = metric.New("test.metric", map[string]string{"source": "mysource"}, map[string]interface{}{"value": 1.1234}, time.Unix(1530939936, 0))
|
||||||
testMetric3 := metric.New("test.metric3", map[string]string{"tagit": "valueit"}, map[string]interface{}{"value": 333.}, time.Unix(1530939936, 0))
|
testMetric3 := metric.New("test.metric3", map[string]string{"tagit": "valueit"}, map[string]interface{}{"value": 333.}, time.Unix(1530939936, 0))
|
||||||
testMetrics = []telegraf.Metric{testMetric1, testMetric2, testMetric3}
|
testMetrics = []telegraf.Metric{testMetric1, testMetric2, testMetric3}
|
||||||
assert.EqualValues(t, parsedMetrics, testMetrics)
|
require.EqualValues(t, parsedMetrics, testMetrics)
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestParseSpecial(t *testing.T) {
|
func TestParseSpecial(t *testing.T) {
|
||||||
parser := NewWavefrontParser(nil)
|
parser := NewWavefrontParser(nil)
|
||||||
|
|
||||||
parsedMetric, err := parser.ParseLine("\"test.metric\" 1 1530939936")
|
parsedMetric, err := parser.ParseLine("\"test.metric\" 1 1530939936")
|
||||||
assert.NoError(t, err)
|
require.NoError(t, err)
|
||||||
testMetric := metric.New("test.metric", map[string]string{}, map[string]interface{}{"value": 1.}, time.Unix(1530939936, 0))
|
testMetric := metric.New("test.metric", map[string]string{}, map[string]interface{}{"value": 1.}, time.Unix(1530939936, 0))
|
||||||
assert.EqualValues(t, parsedMetric, testMetric)
|
require.EqualValues(t, parsedMetric, testMetric)
|
||||||
|
|
||||||
parsedMetric, err = parser.ParseLine("test.metric 1 1530939936 tag1=\"val\\\"ue1\"")
|
parsedMetric, err = parser.ParseLine("test.metric 1 1530939936 tag1=\"val\\\"ue1\"")
|
||||||
assert.NoError(t, err)
|
require.NoError(t, err)
|
||||||
testMetric = metric.New("test.metric", map[string]string{"tag1": "val\\\"ue1"}, map[string]interface{}{"value": 1.}, time.Unix(1530939936, 0))
|
testMetric = metric.New("test.metric", map[string]string{"tag1": "val\\\"ue1"}, map[string]interface{}{"value": 1.}, time.Unix(1530939936, 0))
|
||||||
assert.EqualValues(t, parsedMetric, testMetric)
|
require.EqualValues(t, parsedMetric, testMetric)
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestParseInvalid(t *testing.T) {
|
func TestParseInvalid(t *testing.T) {
|
||||||
parser := NewWavefrontParser(nil)
|
parser := NewWavefrontParser(nil)
|
||||||
|
|
||||||
_, err := parser.Parse([]byte("test.metric"))
|
_, err := parser.Parse([]byte("test.metric"))
|
||||||
assert.Error(t, err)
|
require.Error(t, err)
|
||||||
|
|
||||||
_, err = parser.Parse([]byte("test.metric string"))
|
_, err = parser.Parse([]byte("test.metric string"))
|
||||||
assert.Error(t, err)
|
require.Error(t, err)
|
||||||
|
|
||||||
_, err = parser.Parse([]byte("test.metric 1 string"))
|
_, err = parser.Parse([]byte("test.metric 1 string"))
|
||||||
assert.Error(t, err)
|
require.Error(t, err)
|
||||||
|
|
||||||
_, err = parser.Parse([]byte("test.\u2206delta 1"))
|
_, err = parser.Parse([]byte("test.\u2206delta 1"))
|
||||||
assert.Error(t, err)
|
require.Error(t, err)
|
||||||
|
|
||||||
_, err = parser.Parse([]byte("test.metric 1 1530939936 tag_no_pair"))
|
_, err = parser.Parse([]byte("test.metric 1 1530939936 tag_no_pair"))
|
||||||
assert.Error(t, err)
|
require.Error(t, err)
|
||||||
|
|
||||||
_, err = parser.Parse([]byte("test.metric 1 1530939936 tag_broken_value=\""))
|
_, err = parser.Parse([]byte("test.metric 1 1530939936 tag_broken_value=\""))
|
||||||
assert.Error(t, err)
|
require.Error(t, err)
|
||||||
|
|
||||||
_, err = parser.Parse([]byte("\"test.metric 1 1530939936"))
|
_, err = parser.Parse([]byte("\"test.metric 1 1530939936"))
|
||||||
assert.Error(t, err)
|
require.Error(t, err)
|
||||||
|
|
||||||
_, err = parser.Parse([]byte("test.metric 1 1530939936 tag1=val\\\"ue1"))
|
_, err = parser.Parse([]byte("test.metric 1 1530939936 tag1=val\\\"ue1"))
|
||||||
assert.Error(t, err)
|
require.Error(t, err)
|
||||||
|
|
||||||
_, err = parser.Parse([]byte("\"test.metric\" -1.12-34 1530939936 \"source\"=\"mysource\" tag2=value2"))
|
_, err = parser.Parse([]byte("\"test.metric\" -1.12-34 1530939936 \"source\"=\"mysource\" tag2=value2"))
|
||||||
assert.Error(t, err)
|
require.Error(t, err)
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestParseDefaultTags(t *testing.T) {
|
func TestParseDefaultTags(t *testing.T) {
|
||||||
parser := NewWavefrontParser(map[string]string{"myDefault": "value1", "another": "test2"})
|
parser := NewWavefrontParser(map[string]string{"myDefault": "value1", "another": "test2"})
|
||||||
|
|
||||||
parsedMetrics, err := parser.Parse([]byte("test.metric 1 1530939936"))
|
parsedMetrics, err := parser.Parse([]byte("test.metric 1 1530939936"))
|
||||||
assert.NoError(t, err)
|
require.NoError(t, err)
|
||||||
testMetric := metric.New("test.metric", map[string]string{"myDefault": "value1", "another": "test2"}, map[string]interface{}{"value": 1.}, time.Unix(1530939936, 0))
|
testMetric := metric.New("test.metric", map[string]string{"myDefault": "value1", "another": "test2"}, map[string]interface{}{"value": 1.}, time.Unix(1530939936, 0))
|
||||||
assert.EqualValues(t, parsedMetrics[0], testMetric)
|
require.EqualValues(t, parsedMetrics[0], testMetric)
|
||||||
|
|
||||||
parsedMetrics, err = parser.Parse([]byte("test.metric 1 1530939936 source=mysource"))
|
parsedMetrics, err = parser.Parse([]byte("test.metric 1 1530939936 source=mysource"))
|
||||||
assert.NoError(t, err)
|
require.NoError(t, err)
|
||||||
testMetric = metric.New("test.metric", map[string]string{"myDefault": "value1", "another": "test2", "source": "mysource"}, map[string]interface{}{"value": 1.}, time.Unix(1530939936, 0))
|
testMetric = metric.New("test.metric", map[string]string{"myDefault": "value1", "another": "test2", "source": "mysource"}, map[string]interface{}{"value": 1.}, time.Unix(1530939936, 0))
|
||||||
assert.EqualValues(t, parsedMetrics[0], testMetric)
|
require.EqualValues(t, parsedMetrics[0], testMetric)
|
||||||
|
|
||||||
parsedMetrics, err = parser.Parse([]byte("\"test.metric\" 1.1234 1530939936 another=\"test3\""))
|
parsedMetrics, err = parser.Parse([]byte("\"test.metric\" 1.1234 1530939936 another=\"test3\""))
|
||||||
assert.NoError(t, err)
|
require.NoError(t, err)
|
||||||
testMetric = metric.New("test.metric", map[string]string{"myDefault": "value1", "another": "test2"}, map[string]interface{}{"value": 1.1234}, time.Unix(1530939936, 0))
|
testMetric = metric.New("test.metric", map[string]string{"myDefault": "value1", "another": "test2"}, map[string]interface{}{"value": 1.1234}, time.Unix(1530939936, 0))
|
||||||
assert.EqualValues(t, parsedMetrics[0], testMetric)
|
require.EqualValues(t, parsedMetrics[0], testMetric)
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -315,26 +315,26 @@ func (p *Parser) parseQuery(starttime time.Time, doc, selected dataNode, config
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, fmt.Errorf("failed to query field value for '%s': %v", name, err)
|
return nil, fmt.Errorf("failed to query field value for '%s': %v", name, err)
|
||||||
}
|
}
|
||||||
path := name
|
|
||||||
if config.FieldNameExpand {
|
if config.FieldNameExpand {
|
||||||
p := p.document.GetNodePath(selectedfield, selected, "_")
|
p := p.document.GetNodePath(selectedfield, selected, "_")
|
||||||
if len(p) > 0 {
|
if len(p) > 0 {
|
||||||
path = p + "_" + name
|
name = p + "_" + name
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Check if field name already exists and if so, append an index number.
|
// Check if field name already exists and if so, append an index number.
|
||||||
if _, ok := fields[path]; ok {
|
if _, ok := fields[name]; ok {
|
||||||
for i := 1; ; i++ {
|
for i := 1; ; i++ {
|
||||||
p := path + "_" + strconv.Itoa(i)
|
p := name + "_" + strconv.Itoa(i)
|
||||||
if _, ok := fields[p]; !ok {
|
if _, ok := fields[p]; !ok {
|
||||||
path = p
|
name = p
|
||||||
break
|
break
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fields[path] = v
|
fields[name] = v
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
p.debugEmptyQuery("field selection", selected, config.FieldSelection)
|
p.debugEmptyQuery("field selection", selected, config.FieldSelection)
|
||||||
|
|
|
||||||
Loading…
Reference in New Issue