chore: Enable G110 rule for gosec (#13044)

Co-authored-by: Pawel Zak <Pawel Zak>
This commit is contained in:
Paweł Żak 2023-04-14 17:14:55 +02:00 committed by GitHub
parent 596ecc4a67
commit ba16eeb495
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
14 changed files with 120 additions and 36 deletions

View File

@ -95,6 +95,7 @@ linters-settings:
- G107
- G108
- G109
- G110
- G111
- G112
- G114

View File

@ -6,9 +6,12 @@ import (
"compress/gzip"
"compress/zlib"
"errors"
"fmt"
"io"
)
const DefaultMaxDecompressionSize = 500 * 1024 * 1024 //500MB
// NewStreamContentDecoder returns a reader that will decode the stream
// according to the encoding type.
func NewStreamContentDecoder(encoding string, r io.Reader) (io.Reader, error) {
@ -92,11 +95,11 @@ func (a *AutoDecoder) SetEncoding(encoding string) {
a.encoding = encoding
}
func (a *AutoDecoder) Decode(data []byte) ([]byte, error) {
func (a *AutoDecoder) Decode(data []byte, maxDecompressionSize int64) ([]byte, error) {
if a.encoding == "gzip" {
return a.gzip.Decode(data)
return a.gzip.Decode(data, maxDecompressionSize)
}
return a.identity.Decode(data)
return a.identity.Decode(data, maxDecompressionSize)
}
func NewAutoContentDecoder() *AutoDecoder {
@ -199,7 +202,7 @@ func (*IdentityEncoder) Encode(data []byte) ([]byte, error) {
// ContentDecoder removes a wrapper encoding from byte buffers.
type ContentDecoder interface {
SetEncoding(string)
Decode([]byte) ([]byte, error)
Decode([]byte, int64) ([]byte, error)
}
// GzipDecoder decompresses buffers with gzip compression.
@ -217,17 +220,20 @@ func NewGzipDecoder() *GzipDecoder {
func (*GzipDecoder) SetEncoding(string) {}
func (d *GzipDecoder) Decode(data []byte) ([]byte, error) {
func (d *GzipDecoder) Decode(data []byte, maxDecompressionSize int64) ([]byte, error) {
err := d.reader.Reset(bytes.NewBuffer(data))
if err != nil {
return nil, err
}
d.buf.Reset()
_, err = d.buf.ReadFrom(d.reader)
n, err := io.CopyN(d.buf, d.reader, maxDecompressionSize)
if err != nil && !errors.Is(err, io.EOF) {
return nil, err
} else if n == maxDecompressionSize {
return nil, fmt.Errorf("size of decoded data exceeds allowed size %d", maxDecompressionSize)
}
err = d.reader.Close()
if err != nil {
return nil, err
@ -247,7 +253,7 @@ func NewZlibDecoder() *ZlibDecoder {
func (*ZlibDecoder) SetEncoding(string) {}
func (d *ZlibDecoder) Decode(data []byte) ([]byte, error) {
func (d *ZlibDecoder) Decode(data []byte, maxDecompressionSize int64) ([]byte, error) {
d.buf.Reset()
b := bytes.NewBuffer(data)
@ -255,10 +261,14 @@ func (d *ZlibDecoder) Decode(data []byte) ([]byte, error) {
if err != nil {
return nil, err
}
_, err = io.Copy(d.buf, r)
n, err := io.CopyN(d.buf, r, maxDecompressionSize)
if err != nil && !errors.Is(err, io.EOF) {
return nil, err
} else if n == maxDecompressionSize {
return nil, fmt.Errorf("size of decoded data exceeds allowed size %d", maxDecompressionSize)
}
err = r.Close()
if err != nil {
return nil, err
@ -275,6 +285,10 @@ func NewIdentityDecoder() *IdentityDecoder {
func (*IdentityDecoder) SetEncoding(string) {}
func (*IdentityDecoder) Decode(data []byte) ([]byte, error) {
func (*IdentityDecoder) Decode(data []byte, maxDecompressionSize int64) ([]byte, error) {
size := int64(len(data))
if size > maxDecompressionSize {
return nil, fmt.Errorf("size of decoded data: %d exceeds allowed size %d", size, maxDecompressionSize)
}
return data, nil
}

View File

@ -8,6 +8,8 @@ import (
"github.com/stretchr/testify/require"
)
const maxDecompressionSize = 1024
func TestGzipEncodeDecode(t *testing.T) {
enc := NewGzipEncoder()
dec := NewGzipDecoder()
@ -15,7 +17,7 @@ func TestGzipEncodeDecode(t *testing.T) {
payload, err := enc.Encode([]byte("howdy"))
require.NoError(t, err)
actual, err := dec.Decode(payload)
actual, err := dec.Decode(payload, maxDecompressionSize)
require.NoError(t, err)
require.Equal(t, "howdy", string(actual))
@ -28,7 +30,7 @@ func TestGzipReuse(t *testing.T) {
payload, err := enc.Encode([]byte("howdy"))
require.NoError(t, err)
actual, err := dec.Decode(payload)
actual, err := dec.Decode(payload, maxDecompressionSize)
require.NoError(t, err)
require.Equal(t, "howdy", string(actual))
@ -36,7 +38,7 @@ func TestGzipReuse(t *testing.T) {
payload, err = enc.Encode([]byte("doody"))
require.NoError(t, err)
actual, err = dec.Decode(payload)
actual, err = dec.Decode(payload, maxDecompressionSize)
require.NoError(t, err)
require.Equal(t, "doody", string(actual))
@ -49,12 +51,23 @@ func TestZlibEncodeDecode(t *testing.T) {
payload, err := enc.Encode([]byte("howdy"))
require.NoError(t, err)
actual, err := dec.Decode(payload)
actual, err := dec.Decode(payload, maxDecompressionSize)
require.NoError(t, err)
require.Equal(t, "howdy", string(actual))
}
func TestZlibEncodeDecodeWithTooLargeMessage(t *testing.T) {
enc := NewZlibEncoder()
dec := NewZlibDecoder()
payload, err := enc.Encode([]byte("howdy"))
require.NoError(t, err)
_, err = dec.Decode(payload, 3)
require.ErrorContains(t, err, "size of decoded data exceeds allowed size 3")
}
func TestIdentityEncodeDecode(t *testing.T) {
enc := NewIdentityEncoder()
dec := NewIdentityDecoder()
@ -62,7 +75,7 @@ func TestIdentityEncodeDecode(t *testing.T) {
payload, err := enc.Encode([]byte("howdy"))
require.NoError(t, err)
actual, err := dec.Decode(payload)
actual, err := dec.Decode(payload, maxDecompressionSize)
require.NoError(t, err)
require.Equal(t, "howdy", string(actual))

View File

@ -111,6 +111,11 @@ See the [CONFIGURATION.md][CONFIGURATION.md] for more details.
## - Use "auto" determine the encoding using the ContentEncoding header
# content_encoding = "identity"
## Maximum size of decoded message.
## Acceptable units are B, KiB, KB, MiB, MB...
## Without quotes and units, interpreted as size in bytes.
# max_decompression_size = "500MB"
## Data format to consume.
## Each data format has its own unique set of configuration options, read
## more about them here:

View File

@ -14,6 +14,7 @@ import (
amqp "github.com/rabbitmq/amqp091-go"
"github.com/influxdata/telegraf"
"github.com/influxdata/telegraf/config"
"github.com/influxdata/telegraf/internal"
"github.com/influxdata/telegraf/plugins/common/tls"
"github.com/influxdata/telegraf/plugins/inputs"
@ -55,8 +56,9 @@ type AMQPConsumer struct {
AuthMethod string
tls.ClientConfig
ContentEncoding string `toml:"content_encoding"`
Log telegraf.Logger
ContentEncoding string `toml:"content_encoding"`
MaxDecompressionSize config.Size `toml:"max_decompression_size"`
Log telegraf.Logger
deliveries map[telegraf.TrackingID]amqp.Delivery
@ -113,6 +115,10 @@ func (a *AMQPConsumer) Init() error {
a.MaxUndeliveredMessages = 1000
}
if a.MaxDecompressionSize <= 0 {
a.MaxDecompressionSize = internal.DefaultMaxDecompressionSize
}
return nil
}
@ -144,11 +150,11 @@ func (a *AMQPConsumer) createConfig() (*amqp.Config, error) {
}
}
config := amqp.Config{
amqpConfig := amqp.Config{
TLSClientConfig: tlsCfg,
SASL: auth, // if nil, it will be PLAIN
}
return &config, nil
return &amqpConfig, nil
}
// Start satisfies the telegraf.ServiceInput interface
@ -412,7 +418,7 @@ func (a *AMQPConsumer) onMessage(acc telegraf.TrackingAccumulator, d amqp.Delive
}
a.decoder.SetEncoding(d.ContentEncoding)
body, err := a.decoder.Decode(d.Body)
body, err := a.decoder.Decode(d.Body, int64(a.MaxDecompressionSize))
if err != nil {
onError()
return err

View File

@ -23,6 +23,7 @@ func TestAutoEncoding(t *testing.T) {
a.deliveries = make(map[telegraf.TrackingID]amqp091.Delivery)
a.parser = parser
a.decoder, err = internal.NewContentDecoder("auto")
a.MaxDecompressionSize = internal.DefaultMaxDecompressionSize
require.NoError(t, err)
acc := &testutil.Accumulator{}

View File

@ -72,6 +72,11 @@
## - Use "auto" determine the encoding using the ContentEncoding header
# content_encoding = "identity"
## Maximum size of decoded message.
## Acceptable units are B, KiB, KB, MiB, MB...
## Without quotes and units, interpreted as size in bytes.
# max_decompression_size = "500MB"
## Data format to consume.
## Each data format has its own unique set of configuration options, read
## more about them here:

View File

@ -89,6 +89,11 @@ See the [CONFIGURATION.md][CONFIGURATION.md] for more details.
## "identity" to apply no encoding.
# content_encoding = "identity"
## Maximum size of decoded packet.
## Acceptable units are B, KiB, KB, MiB, MB...
## Without quotes and units, interpreted as size in bytes.
# max_decompression_size = "500MB"
## Message splitting strategy and corresponding settings for stream sockets
## (tcp, tcp4, tcp6, unix or unixpacket). The setting is ignored for packet
## listeners such as udp.

View File

@ -14,11 +14,12 @@ import (
)
type packetListener struct {
Encoding string
SocketMode string
ReadBufferSize int
Parser telegraf.Parser
Log telegraf.Logger
Encoding string
MaxDecompressionSize int64
SocketMode string
ReadBufferSize int
Parser telegraf.Parser
Log telegraf.Logger
conn net.PacketConn
decoder internal.ContentDecoder
@ -36,7 +37,7 @@ func (l *packetListener) listen(acc telegraf.Accumulator) {
break
}
body, err := l.decoder.Decode(buf[:n])
body, err := l.decoder.Decode(buf[:n], l.MaxDecompressionSize)
if err != nil {
acc.AddError(fmt.Errorf("unable to decode incoming packet: %w", err))
}

View File

@ -58,6 +58,11 @@
## "identity" to apply no encoding.
# content_encoding = "identity"
## Maximum size of decoded packet.
## Acceptable units are B, KiB, KB, MiB, MB...
## Without quotes and units, interpreted as size in bytes.
# max_decompression_size = "500MB"
## Message splitting strategy and corresponding settings for stream sockets
## (tcp, tcp4, tcp6, unix or unixpacket). The setting is ignored for packet
## listeners such as udp.

View File

@ -15,6 +15,7 @@ import (
"github.com/influxdata/telegraf"
"github.com/influxdata/telegraf/config"
"github.com/influxdata/telegraf/internal"
tlsint "github.com/influxdata/telegraf/plugins/common/tls"
"github.com/influxdata/telegraf/plugins/inputs"
"github.com/influxdata/telegraf/plugins/parsers"
@ -45,6 +46,7 @@ type SocketListener struct {
KeepAlivePeriod *config.Duration `toml:"keep_alive_period"`
SocketMode string `toml:"socket_mode"`
ContentEncoding string `toml:"content_encoding"`
MaxDecompressionSize config.Size `toml:"max_decompression_size"`
SplittingStrategy string `toml:"splitting_strategy"`
SplittingDelimiter string `toml:"splitting_delimiter"`
SplittingLength int `toml:"splitting_length"`
@ -159,6 +161,10 @@ func (sl *SocketListener) Start(acc telegraf.Accumulator) error {
return fmt.Errorf("parsing address failed: %w", err)
}
if sl.MaxDecompressionSize <= 0 {
sl.MaxDecompressionSize = internal.DefaultMaxDecompressionSize
}
switch u.Scheme {
case "tcp", "tcp4", "tcp6":
ssl := &streamListener{
@ -195,8 +201,9 @@ func (sl *SocketListener) Start(acc telegraf.Accumulator) error {
case "udp", "udp4", "udp6":
psl := &packetListener{
Encoding: sl.ContentEncoding,
Parser: sl.parser,
Encoding: sl.ContentEncoding,
MaxDecompressionSize: int64(sl.MaxDecompressionSize),
Parser: sl.parser,
}
if err := psl.setupUDP(u, ifname, int(sl.ReadBufferSize)); err != nil {
return err
@ -204,8 +211,9 @@ func (sl *SocketListener) Start(acc telegraf.Accumulator) error {
sl.listener = psl
case "ip", "ip4", "ip6":
psl := &packetListener{
Encoding: sl.ContentEncoding,
Parser: sl.parser,
Encoding: sl.ContentEncoding,
MaxDecompressionSize: int64(sl.MaxDecompressionSize),
Parser: sl.parser,
}
if err := psl.setupIP(u); err != nil {
return err
@ -213,8 +221,9 @@ func (sl *SocketListener) Start(acc telegraf.Accumulator) error {
sl.listener = psl
case "unixgram":
psl := &packetListener{
Encoding: sl.ContentEncoding,
Parser: sl.parser,
Encoding: sl.ContentEncoding,
MaxDecompressionSize: int64(sl.MaxDecompressionSize),
Parser: sl.parser,
}
if err := psl.setupUnixgram(u, sl.SocketMode); err != nil {
return err

View File

@ -7,6 +7,7 @@ import (
"compress/zlib"
"crypto/tls"
"encoding/json"
"errors"
"fmt"
"io"
"net"
@ -150,11 +151,15 @@ func UDPServer(t *testing.T, wg *sync.WaitGroup, namefieldnoprefix bool) string
return err
}
var maxDecompressionSize int64 = 500 * 1024 * 1024
bufW := bytes.NewBuffer(nil)
_, err = io.Copy(bufW, r)
if err != nil {
written, err := io.CopyN(bufW, r, maxDecompressionSize)
if err != nil && !errors.Is(err, io.EOF) {
return err
} else if written == maxDecompressionSize {
return fmt.Errorf("size of decoded data exceeds allowed size %d", maxDecompressionSize)
}
err = r.Close()
if err != nil {
return err

View File

@ -4,6 +4,7 @@ import (
"bytes"
"compress/gzip"
"encoding/json"
"errors"
"io"
"net/http"
"net/http/httptest"
@ -72,8 +73,13 @@ func TestWrite(t *testing.T) {
gz, err := gzip.NewReader(r.Body)
require.NoError(t, err)
_, err = io.Copy(&body, gz)
var maxDecompressionSize int64 = 500 * 1024 * 1024
n, err := io.CopyN(&body, gz, maxDecompressionSize)
if errors.Is(err, io.EOF) {
err = nil
}
require.NoError(t, err)
require.NotEqualf(t, n, maxDecompressionSize, "size of decoded data exceeds allowed size %d", maxDecompressionSize)
var lm Metric
err = json.Unmarshal(body.Bytes(), &lm)

View File

@ -4,8 +4,8 @@ import (
"bufio"
"bytes"
"compress/gzip"
"errors"
"fmt"
"github.com/influxdata/telegraf/testutil"
"io"
"net/http"
"net/http/httptest"
@ -23,6 +23,7 @@ import (
"github.com/influxdata/telegraf/plugins/serializers/carbon2"
"github.com/influxdata/telegraf/plugins/serializers/graphite"
"github.com/influxdata/telegraf/plugins/serializers/prometheus"
"github.com/influxdata/telegraf/testutil"
)
func getMetric() telegraf.Metric {
@ -247,8 +248,15 @@ func TestContentType(t *testing.T) {
ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
gz, err := gzip.NewReader(r.Body)
require.NoError(t, err)
_, err = io.Copy(&body, gz)
var maxDecompressionSize int64 = 500 * 1024 * 1024
n, err := io.CopyN(&body, gz, maxDecompressionSize)
if errors.Is(err, io.EOF) {
err = nil
}
require.NoError(t, err)
require.NotEqualf(t, n, maxDecompressionSize, "size of decoded data exceeds allowed size %d", maxDecompressionSize)
w.WriteHeader(http.StatusOK)
}))
defer ts.Close()