diff --git a/config/testdata/telegraf-agent.toml b/config/testdata/telegraf-agent.toml index 6967d6e86..a0add155b 100644 --- a/config/testdata/telegraf-agent.toml +++ b/config/testdata/telegraf-agent.toml @@ -154,20 +154,6 @@ ## Offset (must be either "oldest" or "newest") offset = "oldest" -# read metrics from a Kafka legacy topic -[[inputs.kafka_consumer_legacy]] - ## topic(s) to consume - topics = ["telegraf"] - # an array of Zookeeper connection strings - zookeeper_peers = ["localhost:2181"] - ## the name of the consumer group - consumer_group = "telegraf_metrics_consumers" - # Maximum number of points to buffer between collection intervals - point_buffer = 100000 - ## Offset (must be either "oldest" or "newest") - offset = "oldest" - - # Read metrics from a LeoFS Server via SNMP [[inputs.leofs]] # An array of URI to gather stats about LeoFS. diff --git a/docs/LICENSE_OF_DEPENDENCIES.md b/docs/LICENSE_OF_DEPENDENCIES.md index 32fac580e..787f12f76 100644 --- a/docs/LICENSE_OF_DEPENDENCIES.md +++ b/docs/LICENSE_OF_DEPENDENCIES.md @@ -37,7 +37,6 @@ following works: - github.com/Microsoft/go-winio [MIT License](https://github.com/Microsoft/go-winio/blob/master/LICENSE) - github.com/Microsoft/hcsshim [MIT License](https://github.com/microsoft/hcsshim/blob/main/LICENSE) - github.com/PaesslerAG/gval [BSD 3-Clause "New" or "Revised" License](https://github.com/PaesslerAG/gval/blob/master/LICENSE) -- github.com/Shopify/sarama [MIT License](https://github.com/IBM/sarama/blob/master/LICENSE.md) - github.com/aerospike/aerospike-client-go [Apache License 2.0](https://github.com/aerospike/aerospike-client-go/blob/master/LICENSE) - github.com/alecthomas/participle [MIT License](https://github.com/alecthomas/participle/blob/master/COPYING) - github.com/alecthomas/units [MIT License](https://github.com/alecthomas/units/blob/master/COPYING) @@ -317,7 +316,6 @@ following works: - github.com/russross/blackfriday [BSD 2-Clause "Simplified" License](https://github.com/russross/blackfriday/blob/master/LICENSE.txt) - github.com/safchain/ethtool [Apache License 2.0](https://github.com/safchain/ethtool/blob/master/LICENSE) - github.com/samber/lo [MIT License](https://github.com/samber/lo/blob/master/LICENSE) -- github.com/samuel/go-zookeeper [BSD 3-Clause Clear License](https://github.com/samuel/go-zookeeper/blob/master/LICENSE) - github.com/shirou/gopsutil [BSD 3-Clause Clear License](https://github.com/shirou/gopsutil/blob/master/LICENSE) - github.com/shoenig/go-m1cpu [Mozilla Public License 2.0](https://github.com/shoenig/go-m1cpu/blob/main/LICENSE) - github.com/shopspring/decimal [MIT License](https://github.com/shopspring/decimal/blob/master/LICENSE) @@ -355,8 +353,6 @@ following works: - github.com/vjeantet/grok [Apache License 2.0](https://github.com/vjeantet/grok/blob/master/LICENSE) - github.com/vmware/govmomi [Apache License 2.0](https://github.com/vmware/govmomi/blob/master/LICENSE.txt) - github.com/wavefronthq/wavefront-sdk-go [Apache License 2.0](https://github.com/wavefrontHQ/wavefront-sdk-go/blob/master/LICENSE) -- github.com/wvanbergen/kafka [MIT License](https://github.com/wvanbergen/kafka/blob/master/LICENSE) -- github.com/wvanbergen/kazoo-go [MIT License](https://github.com/wvanbergen/kazoo-go/blob/master/MIT-LICENSE) - github.com/x448/float16 [MIT License](https://github.com/x448/float16/blob/master/LICENSE) - github.com/xdg-go/pbkdf2 [Apache License 2.0](https://github.com/xdg-go/pbkdf2/blob/main/LICENSE) - github.com/xdg-go/scram [Apache License 2.0](https://github.com/xdg-go/scram/blob/master/LICENSE) diff --git a/go.mod b/go.mod index 6b1314fdb..624e27e50 100644 --- a/go.mod +++ b/go.mod @@ -26,7 +26,6 @@ require ( github.com/Masterminds/sprig/v3 v3.2.3 github.com/Mellanox/rdmamap v1.1.0 github.com/PaesslerAG/gval v1.2.2 - github.com/Shopify/sarama v1.38.1 github.com/aerospike/aerospike-client-go/v5 v5.11.0 github.com/alecthomas/units v0.0.0-20211218093645-b94a6e3cc137 github.com/aliyun/alibaba-cloud-sdk-go v1.62.563 @@ -188,7 +187,6 @@ require ( github.com/vjeantet/grok v1.0.1 github.com/vmware/govmomi v0.33.1 github.com/wavefronthq/wavefront-sdk-go v0.15.0 - github.com/wvanbergen/kafka v0.0.0-20171203153745-e2edea948ddf github.com/x448/float16 v0.8.4 github.com/xdg/scram v1.0.5 github.com/yuin/goldmark v1.6.0 @@ -434,7 +432,6 @@ require ( github.com/robfig/cron/v3 v3.0.1 // indirect github.com/russross/blackfriday/v2 v2.1.0 // indirect github.com/samber/lo v1.38.1 // indirect - github.com/samuel/go-zookeeper v0.0.0-20200724154423-2164a8ac840e // indirect github.com/shoenig/go-m1cpu v0.1.6 // indirect github.com/shopspring/decimal v1.3.1 // indirect github.com/signalfx/com_signalfx_metrics_protobuf v0.0.3 // indirect @@ -454,7 +451,6 @@ require ( github.com/uber/jaeger-lib v2.4.1+incompatible // indirect github.com/vishvananda/netlink v1.2.1-beta.2 // indirect github.com/vishvananda/netns v0.0.4 - github.com/wvanbergen/kazoo-go v0.0.0-20180202103751-f72d8611297a // indirect github.com/xdg-go/pbkdf2 v1.0.0 // indirect github.com/xdg-go/scram v1.1.2 // indirect github.com/xdg-go/stringprep v1.0.4 // indirect diff --git a/go.sum b/go.sum index 9e50cb6f5..d187e3018 100644 --- a/go.sum +++ b/go.sum @@ -745,10 +745,6 @@ github.com/PaesslerAG/gval v1.2.2 h1:Y7iBzhgE09IGTt5QgGQ2IdaYYYOU134YGHBThD+wm9E github.com/PaesslerAG/gval v1.2.2/go.mod h1:XRFLwvmkTEdYziLdaCeCa5ImcGVrfQbeNUbVR+C6xac= github.com/PaesslerAG/jsonpath v0.1.0 h1:gADYeifvlqK3R3i2cR5B4DGgxLXIPb3TRTH1mGi0jPI= github.com/PaesslerAG/jsonpath v0.1.0/go.mod h1:4BzmtoM/PI8fPO4aQGIusjGxGir2BzcV0grWtFzq1Y8= -github.com/Shopify/sarama v1.38.1 h1:lqqPUPQZ7zPqYlWpTh+LQ9bhYNu2xJL6k1SJN4WVe2A= -github.com/Shopify/sarama v1.38.1/go.mod h1:iwv9a67Ha8VNa+TifujYoWGxWnu2kNVAQdSdZ4X2o5g= -github.com/Shopify/toxiproxy/v2 v2.5.0 h1:i4LPT+qrSlKNtQf5QliVjdP08GyAH8+BUIc9gT0eahc= -github.com/Shopify/toxiproxy/v2 v2.5.0/go.mod h1:yhM2epWtAmel9CB8r2+L+PCmhH6yH2pITaPAo7jxJl0= github.com/aerospike/aerospike-client-go/v5 v5.11.0 h1:z3ZmDSm3I10VMXXIIrsFCFq3IenwFqTCnLNyvnFVzrk= github.com/aerospike/aerospike-client-go/v5 v5.11.0/go.mod h1:e/zYeIoBg9We63fLKa+h+198+fT1GdoLfKa+Pu4QSpg= github.com/ajstarks/deck v0.0.0-20200831202436-30c9fc6549a9/go.mod h1:JynElWSGnm/4RlzPXRlREEwqTHAN3T56Bv2ITsFT3gY= @@ -2052,8 +2048,6 @@ github.com/safchain/ethtool v0.3.0 h1:gimQJpsI6sc1yIqP/y8GYgiXn/NjgvpM0RNoWLVVmP github.com/safchain/ethtool v0.3.0/go.mod h1:SA9BwrgyAqNo7M+uaL6IYbxpm5wk3L7Mm6ocLW+CJUs= github.com/samber/lo v1.38.1 h1:j2XEAqXKb09Am4ebOg31SpvzUTTs6EN3VfgeLUhPdXM= github.com/samber/lo v1.38.1/go.mod h1:+m/ZKRl6ClXCE2Lgf3MsQlWfh4bn1bz6CXEOxnEXnEA= -github.com/samuel/go-zookeeper v0.0.0-20200724154423-2164a8ac840e h1:CGjiMQ0wMH4wtNWrlj6kiTbkPt2F3rbYnhGX6TWLfco= -github.com/samuel/go-zookeeper v0.0.0-20200724154423-2164a8ac840e/go.mod h1:gi+0XIa01GRL2eRQVjQkKGqKF3SF9vZR/HnPullcV2E= github.com/santhosh-tekuri/jsonschema/v5 v5.3.1 h1:lZUw3E0/J3roVtGQ+SCrUrg3ON6NgVqpn3+iol9aGu4= github.com/santhosh-tekuri/jsonschema/v5 v5.3.1/go.mod h1:uToXkOrWAZ6/Oc07xWQrPOhJotwFIyu2bBVN41fcDUY= github.com/satori/go.uuid v1.2.0/go.mod h1:dA0hQrYB0VpLJoorglMZABFdXlWrHn1NEOzdhQKdks0= @@ -2212,10 +2206,6 @@ github.com/vmware/govmomi v0.33.1 h1:qS2VpEBd/WLbzLO5McI6h5o5zaKsrezUxRY5r9jkW8A github.com/vmware/govmomi v0.33.1/go.mod h1:QuzWGiEMA/FYlu5JXKjytiORQoxv2hTHdS2lWnIqKMM= github.com/wavefronthq/wavefront-sdk-go v0.15.0 h1:po9E3vh/0y7kOx8D9EtFp7kbSLLLKbmu/w/s1xGJAQU= github.com/wavefronthq/wavefront-sdk-go v0.15.0/go.mod h1:V72c8e+bXuLK8HpA6ioW0ll5mK9IPD+4IHNNDY75ksA= -github.com/wvanbergen/kafka v0.0.0-20171203153745-e2edea948ddf h1:TOV5PC6fIWwFOFra9xJfRXZcL2pLhMI8oNuDugNxg9Q= -github.com/wvanbergen/kafka v0.0.0-20171203153745-e2edea948ddf/go.mod h1:nxx7XRXbR9ykhnC8lXqQyJS0rfvJGxKyKw/sT1YOttg= -github.com/wvanbergen/kazoo-go v0.0.0-20180202103751-f72d8611297a h1:ILoU84rj4AQ3q6cjQvtb9jBjx4xzR/Riq/zYhmDQiOk= -github.com/wvanbergen/kazoo-go v0.0.0-20180202103751-f72d8611297a/go.mod h1:vQQATAGxVK20DC1rRubTJbZDDhhpA4QfU02pMdPxGO4= github.com/x448/float16 v0.8.4 h1:qLwI1I70+NjRFUR3zs1JPUCgaCXSh3SW62uAKT1mSBM= github.com/x448/float16 v0.8.4/go.mod h1:14CWIYCyZA/cWjXOioeEpHeN/83MdbZDRQHoFcYsOfg= github.com/xdg-go/pbkdf2 v1.0.0 h1:Su7DPu48wXMwC3bs7MCNG+z4FhcyEuz5dlvchbq0B0c= diff --git a/plugins/inputs/all/kafka_consumer_legacy.go b/plugins/inputs/all/kafka_consumer_legacy.go deleted file mode 100644 index 818552a75..000000000 --- a/plugins/inputs/all/kafka_consumer_legacy.go +++ /dev/null @@ -1,5 +0,0 @@ -//go:build !custom || inputs || inputs.kafka_consumer_legacy - -package all - -import _ "github.com/influxdata/telegraf/plugins/inputs/kafka_consumer_legacy" // register plugin diff --git a/plugins/inputs/kafka_consumer/README.md b/plugins/inputs/kafka_consumer/README.md index 0e5a3cc52..0daadebb6 100644 --- a/plugins/inputs/kafka_consumer/README.md +++ b/plugins/inputs/kafka_consumer/README.md @@ -3,9 +3,6 @@ The [Kafka][kafka] consumer plugin reads from Kafka and creates metrics using one of the supported [input data formats][]. -For old kafka version (< 0.8), please use the [kafka_consumer_legacy][] input -plugin and use the old zookeeper connection method. - ## Service Input This plugin is a service input. Normal plugins gather metrics determined by the @@ -199,7 +196,6 @@ to use them. ``` [kafka]: https://kafka.apache.org -[kafka_consumer_legacy]: /plugins/inputs/kafka_consumer_legacy/README.md [input data formats]: /docs/DATA_FORMATS_INPUT.md ## Metrics diff --git a/plugins/inputs/kafka_consumer_legacy/README.md b/plugins/inputs/kafka_consumer_legacy/README.md deleted file mode 100644 index e26ba7a9d..000000000 --- a/plugins/inputs/kafka_consumer_legacy/README.md +++ /dev/null @@ -1,72 +0,0 @@ -# Kafka Consumer Legacy Input Plugin - -**Deprecated in version 1.4. Please use [Kafka Consumer input plugin][]** - -The [Kafka](http://kafka.apache.org/) consumer plugin polls a specified Kafka -topic and adds messages to InfluxDB. The plugin assumes messages follow the line -protocol. [Consumer Group][1] is used to talk to the Kafka cluster so multiple -instances of telegraf can read from the same topic in parallel. - -[1]: http://godoc.org/github.com/wvanbergen/kafka/consumergroup - -## Service Input - -This plugin is a service input. Normal plugins gather metrics determined by the -interval setting. Service plugins start a service to listens and waits for -metrics or events to occur. Service plugins have two key differences from -normal plugins: - -1. The global or plugin specific `interval` setting may not apply -2. The CLI options of `--test`, `--test-wait`, and `--once` may not produce - output for this plugin - -## Global configuration options - -In addition to the plugin-specific configuration settings, plugins support -additional global and plugin configuration settings. These settings are used to -modify metrics, tags, and field or create aliases and configure ordering, etc. -See the [CONFIGURATION.md][CONFIGURATION.md] for more details. - -[CONFIGURATION.md]: ../../../docs/CONFIGURATION.md#plugins - -## Configuration - -```toml @sample.conf -# Read metrics from Kafka topic(s) -[[inputs.kafka_consumer_legacy]] - ## topic(s) to consume - topics = ["telegraf"] - - ## an array of Zookeeper connection strings - zookeeper_peers = ["localhost:2181"] - - ## Zookeeper Chroot - zookeeper_chroot = "" - - ## the name of the consumer group - consumer_group = "telegraf_metrics_consumers" - - ## Offset (must be either "oldest" or "newest") - offset = "oldest" - - ## Data format to consume. - ## Each data format has its own unique set of configuration options, read - ## more about them here: - ## https://github.com/influxdata/telegraf/blob/master/docs/DATA_FORMATS_INPUT.md - data_format = "influx" - - ## Maximum length of a message to consume, in bytes (default 0/unlimited); - ## larger messages are dropped - max_message_len = 65536 -``` - -## Testing - -Running integration tests requires running Zookeeper & Kafka. See Makefile -for kafka container command. - -[Kafka Consumer input plugin]: ../kafka_consumer/README.md - -## Metrics - -## Example Output diff --git a/plugins/inputs/kafka_consumer_legacy/kafka_consumer_legacy.go b/plugins/inputs/kafka_consumer_legacy/kafka_consumer_legacy.go deleted file mode 100644 index 0d76099e2..000000000 --- a/plugins/inputs/kafka_consumer_legacy/kafka_consumer_legacy.go +++ /dev/null @@ -1,163 +0,0 @@ -//go:generate ../../../tools/readme_config_includer/generator -package kafka_consumer_legacy - -import ( - _ "embed" - "fmt" - "strings" - "sync" - - "github.com/Shopify/sarama" - "github.com/wvanbergen/kafka/consumergroup" - - "github.com/influxdata/telegraf" - "github.com/influxdata/telegraf/plugins/inputs" -) - -//go:embed sample.conf -var sampleConfig string - -type Kafka struct { - ConsumerGroup string - Topics []string - MaxMessageLen int - ZookeeperPeers []string - ZookeeperChroot string - Consumer *consumergroup.ConsumerGroup - - // Legacy metric buffer support - MetricBuffer int - // TODO remove PointBuffer, legacy support - PointBuffer int - - Offset string - parser telegraf.Parser - - Log telegraf.Logger - - sync.Mutex - - // channel for all incoming kafka messages - in <-chan *sarama.ConsumerMessage - // channel for all kafka consumer errors - errs <-chan error - done chan struct{} - - // keep the accumulator internally: - acc telegraf.Accumulator - - // doNotCommitMsgs tells the parser not to call CommitUpTo on the consumer - // this is mostly for test purposes, but there may be a use-case for it later. - doNotCommitMsgs bool -} - -func (*Kafka) SampleConfig() string { - return sampleConfig -} - -func (k *Kafka) SetParser(parser telegraf.Parser) { - k.parser = parser -} - -func (k *Kafka) Start(acc telegraf.Accumulator) error { - k.Lock() - defer k.Unlock() - var consumerErr error - - k.acc = acc - - config := consumergroup.NewConfig() - config.Zookeeper.Chroot = k.ZookeeperChroot - switch strings.ToLower(k.Offset) { - case "oldest", "": - config.Offsets.Initial = sarama.OffsetOldest - case "newest": - config.Offsets.Initial = sarama.OffsetNewest - default: - k.Log.Infof("WARNING: Kafka consumer invalid offset %q, using 'oldest'\n", - k.Offset) - config.Offsets.Initial = sarama.OffsetOldest - } - - if k.Consumer == nil || k.Consumer.Closed() { - k.Consumer, consumerErr = consumergroup.JoinConsumerGroup( - k.ConsumerGroup, - k.Topics, - k.ZookeeperPeers, - config, - ) - if consumerErr != nil { - return consumerErr - } - - // Setup message and error channels - k.in = k.Consumer.Messages() - k.errs = k.Consumer.Errors() - } - - k.done = make(chan struct{}) - - // Start the kafka message reader - go k.receiver() - k.Log.Infof("Started the kafka consumer service, peers: %v, topics: %v\n", - k.ZookeeperPeers, k.Topics) - return nil -} - -// receiver() reads all incoming messages from the consumer, and parses them into -// influxdb metric points. -func (k *Kafka) receiver() { - for { - select { - case <-k.done: - return - case err := <-k.errs: - if err != nil { - k.acc.AddError(fmt.Errorf("consumer error: %w", err)) - } - case msg := <-k.in: - if k.MaxMessageLen != 0 && len(msg.Value) > k.MaxMessageLen { - k.acc.AddError(fmt.Errorf("message longer than max_message_len (%d > %d)", - len(msg.Value), k.MaxMessageLen)) - } else { - metrics, err := k.parser.Parse(msg.Value) - if err != nil { - k.acc.AddError(fmt.Errorf("error during parsing message %q: %w", string(msg.Value), err)) - } - for _, metric := range metrics { - k.acc.AddFields(metric.Name(), metric.Fields(), metric.Tags(), metric.Time()) - } - } - - if !k.doNotCommitMsgs { - // TODO(cam) this locking can be removed if this PR gets merged: - // https://github.com/wvanbergen/kafka/pull/84 - k.Lock() - err := k.Consumer.CommitUpto(msg) - k.Unlock() - if err != nil { - k.acc.AddError(fmt.Errorf("committing to consumer failed: %w", err)) - } - } - } - } -} - -func (k *Kafka) Stop() { - k.Lock() - defer k.Unlock() - close(k.done) - if err := k.Consumer.Close(); err != nil { - k.acc.AddError(fmt.Errorf("error closing consumer: %w", err)) - } -} - -func (k *Kafka) Gather(_ telegraf.Accumulator) error { - return nil -} - -func init() { - inputs.Add("kafka_consumer_legacy", func() telegraf.Input { - return &Kafka{} - }) -} diff --git a/plugins/inputs/kafka_consumer_legacy/kafka_consumer_legacy_integration_test.go b/plugins/inputs/kafka_consumer_legacy/kafka_consumer_legacy_integration_test.go deleted file mode 100644 index 372f25d0c..000000000 --- a/plugins/inputs/kafka_consumer_legacy/kafka_consumer_legacy_integration_test.go +++ /dev/null @@ -1,98 +0,0 @@ -package kafka_consumer_legacy - -import ( - "fmt" - "testing" - "time" - - "github.com/IBM/sarama" - "github.com/stretchr/testify/require" - - "github.com/influxdata/telegraf/plugins/parsers/influx" - "github.com/influxdata/telegraf/testutil" -) - -func TestReadsMetricsFromKafkaIntegration(t *testing.T) { - if testing.Short() { - t.Skip("Skipping integration test in short mode") - } - t.Skip("Skipping test due to circleci issue; ref #2487") - - brokerPeers := []string{testutil.GetLocalHost() + ":9092"} - zkPeers := []string{testutil.GetLocalHost() + ":2181"} - testTopic := fmt.Sprintf("telegraf_test_topic_legacy_%d", time.Now().Unix()) - - // Send a Kafka message to the kafka host - msg := "cpu_load_short,direction=in,host=server01,region=us-west value=23422.0 1422568543702900257\n" - producer, err := sarama.NewSyncProducer(brokerPeers, nil) - require.NoError(t, err) - _, _, err = producer.SendMessage( - &sarama.ProducerMessage{ - Topic: testTopic, - Value: sarama.StringEncoder(msg), - }) - require.NoError(t, err) - defer producer.Close() - - // Start the Kafka Consumer - k := &Kafka{ - Log: testutil.Logger{}, - ConsumerGroup: "telegraf_test_consumers", - Topics: []string{testTopic}, - ZookeeperPeers: zkPeers, - PointBuffer: 100000, - Offset: "oldest", - } - parser := &influx.Parser{} - require.NoError(t, parser.Init()) - k.SetParser(parser) - - // Verify that we can now gather the sent message - var acc testutil.Accumulator - - // Sanity check - require.Empty(t, acc.Metrics, "There should not be any points") - if err := k.Start(&acc); err != nil { - t.Fatal(err.Error()) - } else { - defer k.Stop() - } - - waitForPoint(&acc, t) - - // Gather points - err = acc.GatherError(k.Gather) - require.NoError(t, err) - if len(acc.Metrics) == 1 { - point := acc.Metrics[0] - require.Equal(t, "cpu_load_short", point.Measurement) - require.Equal(t, map[string]interface{}{"value": 23422.0}, point.Fields) - require.Equal(t, map[string]string{ - "host": "server01", - "direction": "in", - "region": "us-west", - }, point.Tags) - require.Equal(t, time.Unix(0, 1422568543702900257).Unix(), point.Time.Unix()) - } else { - t.Errorf("No points found in accumulator, expected 1") - } -} - -// Waits for the metric that was sent to the kafka broker to arrive at the kafka consumer -func waitForPoint(acc *testutil.Accumulator, t *testing.T) { - // Give the kafka container up to 2 seconds to get the point to the consumer - ticker := time.NewTicker(5 * time.Millisecond) - counter := 0 - //nolint:gosimple // for-select used on purpose - for { - select { - case <-ticker.C: - counter++ - if counter > 1000 { - t.Fatal("Waited for 5s, point never arrived to consumer") - } else if acc.NFields() == 1 { - return - } - } - } -} diff --git a/plugins/inputs/kafka_consumer_legacy/kafka_consumer_legacy_test.go b/plugins/inputs/kafka_consumer_legacy/kafka_consumer_legacy_test.go deleted file mode 100644 index 1aeeefc4d..000000000 --- a/plugins/inputs/kafka_consumer_legacy/kafka_consumer_legacy_test.go +++ /dev/null @@ -1,168 +0,0 @@ -package kafka_consumer_legacy - -import ( - "strings" - "testing" - - "github.com/Shopify/sarama" - "github.com/stretchr/testify/require" - - "github.com/influxdata/telegraf/plugins/parsers/graphite" - "github.com/influxdata/telegraf/plugins/parsers/influx" - "github.com/influxdata/telegraf/plugins/parsers/json" - "github.com/influxdata/telegraf/testutil" -) - -const ( - testMsg = "cpu_load_short,host=server01 value=23422.0 1422568543702900257\n" - testMsgGraphite = "cpu.load.short.graphite 23422 1454780029" - testMsgJSON = "{\"a\": 5, \"b\": {\"c\": 6}}\n" - invalidMsg = "cpu_load_short,host=server01 1422568543702900257\n" -) - -func newTestKafka() (*Kafka, chan *sarama.ConsumerMessage) { - in := make(chan *sarama.ConsumerMessage, 1000) - k := Kafka{ - Log: testutil.Logger{}, - ConsumerGroup: "test", - Topics: []string{"telegraf"}, - ZookeeperPeers: []string{"localhost:2181"}, - Offset: "oldest", - in: in, - doNotCommitMsgs: true, - errs: make(chan error, 1000), - done: make(chan struct{}), - } - return &k, in -} - -// Test that the parser parses kafka messages into points -func TestRunParser(t *testing.T) { - k, in := newTestKafka() - acc := testutil.Accumulator{} - k.acc = &acc - defer close(k.done) - - parser := &influx.Parser{} - require.NoError(t, parser.Init()) - k.parser = parser - - go k.receiver() - in <- saramaMsg(testMsg) - acc.Wait(1) - - require.Equal(t, 1, acc.NFields()) -} - -// Test that the parser ignores invalid messages -func TestRunParserInvalidMsg(t *testing.T) { - k, in := newTestKafka() - acc := testutil.Accumulator{} - k.acc = &acc - defer close(k.done) - - parser := &influx.Parser{} - require.NoError(t, parser.Init()) - k.parser = parser - - go k.receiver() - in <- saramaMsg(invalidMsg) - acc.WaitError(1) - - require.Equal(t, 0, acc.NFields()) -} - -// Test that overlong messages are dropped -func TestDropOverlongMsg(t *testing.T) { - const maxMessageLen = 64 * 1024 - k, in := newTestKafka() - k.MaxMessageLen = maxMessageLen - acc := testutil.Accumulator{} - k.acc = &acc - defer close(k.done) - overlongMsg := strings.Repeat("v", maxMessageLen+1) - - go k.receiver() - in <- saramaMsg(overlongMsg) - acc.WaitError(1) - - require.Equal(t, 0, acc.NFields()) -} - -// Test that the parser parses kafka messages into points -func TestRunParserAndGather(t *testing.T) { - k, in := newTestKafka() - acc := testutil.Accumulator{} - k.acc = &acc - defer close(k.done) - - parser := &influx.Parser{} - require.NoError(t, parser.Init()) - k.parser = parser - - go k.receiver() - in <- saramaMsg(testMsg) - acc.Wait(1) - - require.NoError(t, acc.GatherError(k.Gather)) - - require.Equal(t, 1, acc.NFields()) - acc.AssertContainsFields(t, "cpu_load_short", - map[string]interface{}{"value": float64(23422)}) -} - -// Test that the parser parses kafka messages into points -func TestRunParserAndGatherGraphite(t *testing.T) { - k, in := newTestKafka() - acc := testutil.Accumulator{} - k.acc = &acc - defer close(k.done) - - p := graphite.Parser{Separator: "_", Templates: []string{}} - require.NoError(t, p.Init()) - k.parser = &p - go k.receiver() - in <- saramaMsg(testMsgGraphite) - acc.Wait(1) - - require.NoError(t, acc.GatherError(k.Gather)) - - require.Equal(t, 1, acc.NFields()) - acc.AssertContainsFields(t, "cpu_load_short_graphite", - map[string]interface{}{"value": float64(23422)}) -} - -// Test that the parser parses kafka messages into points -func TestRunParserAndGatherJSON(t *testing.T) { - k, in := newTestKafka() - acc := testutil.Accumulator{} - k.acc = &acc - defer close(k.done) - - parser := &json.Parser{ - MetricName: "kafka_json_test", - } - require.NoError(t, parser.Init()) - k.parser = parser - go k.receiver() - in <- saramaMsg(testMsgJSON) - acc.Wait(1) - - require.NoError(t, acc.GatherError(k.Gather)) - - require.Equal(t, 2, acc.NFields()) - acc.AssertContainsFields(t, "kafka_json_test", - map[string]interface{}{ - "a": float64(5), - "b_c": float64(6), - }) -} - -func saramaMsg(val string) *sarama.ConsumerMessage { - return &sarama.ConsumerMessage{ - Key: nil, - Value: []byte(val), - Offset: 0, - Partition: 0, - } -} diff --git a/plugins/inputs/kafka_consumer_legacy/sample.conf b/plugins/inputs/kafka_consumer_legacy/sample.conf deleted file mode 100644 index 3646b66aa..000000000 --- a/plugins/inputs/kafka_consumer_legacy/sample.conf +++ /dev/null @@ -1,26 +0,0 @@ -# Read metrics from Kafka topic(s) -[[inputs.kafka_consumer_legacy]] - ## topic(s) to consume - topics = ["telegraf"] - - ## an array of Zookeeper connection strings - zookeeper_peers = ["localhost:2181"] - - ## Zookeeper Chroot - zookeeper_chroot = "" - - ## the name of the consumer group - consumer_group = "telegraf_metrics_consumers" - - ## Offset (must be either "oldest" or "newest") - offset = "oldest" - - ## Data format to consume. - ## Each data format has its own unique set of configuration options, read - ## more about them here: - ## https://github.com/influxdata/telegraf/blob/master/docs/DATA_FORMATS_INPUT.md - data_format = "influx" - - ## Maximum length of a message to consume, in bytes (default 0/unlimited); - ## larger messages are dropped - max_message_len = 65536