chore(linters): Remove blank identifiers and errors which are not needed to handle (#14399)

This commit is contained in:
Paweł Żak 2023-12-07 16:09:01 +01:00 committed by GitHub
parent c37de06aef
commit aa681be594
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
34 changed files with 124 additions and 313 deletions

View File

@ -109,7 +109,7 @@ To also reveal the actual secret, i.e. the value, you can pass the
}
sort.Strings(keys)
_, _ = fmt.Printf("Known secrets for store %q:\n", storeID)
fmt.Printf("Known secrets for store %q:\n", storeID)
for _, k := range keys {
var v []byte
if reveal {
@ -117,7 +117,7 @@ To also reveal the actual secret, i.e. the value, you can pass the
return fmt.Errorf("unable to get value of secret %q from store %q: %w", k, storeID, err)
}
}
_, _ = fmt.Printf(" %-30s %s\n", k, string(v))
fmt.Printf(" %-30s %s\n", k, string(v))
memguard.WipeBytes(v)
}
}
@ -178,7 +178,7 @@ with the ID 'mystore'.
if err != nil {
return fmt.Errorf("unable to get secret: %w", err)
}
_, _ = fmt.Printf("%s:%s = %s\n", storeID, key, value)
fmt.Printf("%s:%s = %s\n", storeID, key, value)
return nil
},

View File

@ -58,9 +58,7 @@ func (t *trimmer) process() error {
case '#':
err = t.comment()
default:
if err := t.output.WriteByte(c); err != nil {
return err
}
t.output.WriteByte(c)
continue
}
if err != nil {
@ -165,9 +163,7 @@ func (t *trimmer) doubleQuote() error {
// Found terminator
return t.output.WriteByte(c)
}
if err := t.output.WriteByte(c); err != nil {
return err
}
t.output.WriteByte(c)
}
}
@ -192,7 +188,7 @@ func (t *trimmer) tripleDoubleQuote() error {
}
continue
case '"':
_ = t.output.WriteByte(c)
t.output.WriteByte(c)
if t.hasNQuotes('"', 2) {
// Consumer the two additional ending quotes
_, _ = t.readWriteByte()
@ -201,9 +197,7 @@ func (t *trimmer) tripleDoubleQuote() error {
}
continue
}
if err := t.output.WriteByte(c); err != nil {
return err
}
t.output.WriteByte(c)
}
}

View File

@ -92,8 +92,8 @@ func assignTextToSections(data []byte, sections []section) ([]section, error) {
line := strings.TrimSpace(scanner.Text())
if strings.HasPrefix(line, "#") {
_, _ = buf.Write(scanner.Bytes())
_, _ = buf.WriteString("\n")
buf.Write(scanner.Bytes())
buf.WriteString("\n")
continue
} else if buf.Len() > 0 {
if _, err := io.Copy(sections[idx].raw, &buf); err != nil {
@ -102,8 +102,8 @@ func assignTextToSections(data []byte, sections []section) ([]section, error) {
buf.Reset()
}
_, _ = sections[idx].raw.Write(scanner.Bytes())
_, _ = sections[idx].raw.WriteString("\n")
sections[idx].raw.Write(scanner.Bytes())
sections[idx].raw.WriteString("\n")
}
if err := scanner.Err(); err != nil {
return nil, fmt.Errorf("splitting by line failed: %w", err)
@ -120,8 +120,8 @@ func assignTextToSections(data []byte, sections []section) ([]section, error) {
}
// Write the remaining to the last section
for scanner.Scan() {
_, _ = sections[len(sections)-1].raw.Write(scanner.Bytes())
_, _ = sections[len(sections)-1].raw.WriteString("\n")
sections[len(sections)-1].raw.Write(scanner.Bytes())
sections[len(sections)-1].raw.WriteString("\n")
}
if err := scanner.Err(); err != nil {
return nil, fmt.Errorf("splitting by line failed: %w", err)

View File

@ -69,16 +69,10 @@ func generatePluginID(prefix string, table *ast.Table) (string, error) {
// Hash the config options to get the ID. We also prefix the ID with
// the plugin name to prevent overlap with other plugin types.
hash := sha256.New()
if _, err := hash.Write(append([]byte(prefix), 0)); err != nil {
return "", fmt.Errorf("hashing name failed: %w", err)
}
hash.Write(append([]byte(prefix), 0))
for _, kv := range cfg {
if _, err := hash.Write([]byte(kv.Key + ":" + kv.Value)); err != nil {
return "", fmt.Errorf("hashing entry %q failed: %w", kv.Key, err)
}
if _, err := hash.Write([]byte{0}); err != nil {
return "", fmt.Errorf("adding option end marker failed: %w", err)
}
hash.Write([]byte(kv.Key + ":" + kv.Value))
hash.Write([]byte{0})
}
return hex.EncodeToString(hash.Sum(nil)), nil

View File

@ -225,7 +225,7 @@ func CompressWithGzip(data io.Reader) io.ReadCloser {
// instance reading from the reader returned by the CompressWithGzip
// function. If "err" is nil, the below function will correctly report
// io.EOF.
_ = pipeWriter.CloseWithError(err)
pipeWriter.CloseWithError(err)
}()
// Return a reader which then can be read by the caller to collect the

View File

@ -173,8 +173,7 @@ func assertFoundSocket(t *testing.T, dir, sockType string, i int, sockets []*soc
expected := filepath.Join(dir, sockFile(prefix, i))
found := false
for _, s := range sockets {
_, err := fmt.Printf("Checking %s\n", s.socket)
require.NoError(t, err)
fmt.Printf("Checking %s\n", s.socket)
if s.socket == expected {
found = true
require.Equal(t, s.sockType, sockType, "Unexpected socket type for %q", s)

View File

@ -337,9 +337,7 @@ func (c *CiscoTelemetryMDT) MdtDialout(stream dialout.GRPCMdtDialout_MdtDialoutS
if packet.TotalSize == 0 {
c.handleTelemetry(packet.Data)
} else if int(packet.TotalSize) <= c.MaxMsgSize {
if _, err := chunkBuffer.Write(packet.Data); err != nil {
c.acc.AddError(fmt.Errorf("writing packet %q failed: %w", packet.Data, err))
}
chunkBuffer.Write(packet.Data)
if chunkBuffer.Len() >= int(packet.TotalSize) {
c.handleTelemetry(chunkBuffer.Bytes())
chunkBuffer.Reset()

View File

@ -85,7 +85,7 @@ func removeWindowsCarriageReturns(b bytes.Buffer) bytes.Buffer {
byt, err := b.ReadBytes(0x0D)
byt = bytes.TrimRight(byt, "\x0d")
if len(byt) > 0 {
_, _ = buf.Write(byt)
buf.Write(byt)
}
if errors.Is(err, io.EOF) {
return buf

View File

@ -232,14 +232,12 @@ func TestTruncate(t *testing.T) {
name: "should not truncate",
bufF: func() *bytes.Buffer {
var b bytes.Buffer
_, err := b.WriteString("hello world")
require.NoError(t, err)
b.WriteString("hello world")
return &b
},
expF: func() *bytes.Buffer {
var b bytes.Buffer
_, err := b.WriteString("hello world")
require.NoError(t, err)
b.WriteString("hello world")
return &b
},
},
@ -247,14 +245,12 @@ func TestTruncate(t *testing.T) {
name: "should truncate up to the new line",
bufF: func() *bytes.Buffer {
var b bytes.Buffer
_, err := b.WriteString("hello world\nand all the people")
require.NoError(t, err)
b.WriteString("hello world\nand all the people")
return &b
},
expF: func() *bytes.Buffer {
var b bytes.Buffer
_, err := b.WriteString("hello world...")
require.NoError(t, err)
b.WriteString("hello world...")
return &b
},
},
@ -263,17 +259,16 @@ func TestTruncate(t *testing.T) {
bufF: func() *bytes.Buffer {
var b bytes.Buffer
for i := 0; i < 2*MaxStderrBytes; i++ {
require.NoError(t, b.WriteByte('b'))
b.WriteByte('b')
}
return &b
},
expF: func() *bytes.Buffer {
var b bytes.Buffer
for i := 0; i < MaxStderrBytes; i++ {
require.NoError(t, b.WriteByte('b'))
b.WriteByte('b')
}
_, err := b.WriteString("...")
require.NoError(t, err)
b.WriteString("...")
return &b
},
},

View File

@ -74,12 +74,8 @@ func (p Packet) Compile() (payload []byte, err error) {
return nil, err
}
if _, err = buffer.WriteString(p.Body); err != nil {
return nil, err
}
if _, err = buffer.Write(padding[:]); err != nil {
return nil, err
}
buffer.WriteString(p.Body)
buffer.Write(padding[:])
return buffer.Bytes(), nil
}

View File

@ -142,10 +142,7 @@ func (c *ConfigurationPerMetric) Check() error {
def.Fields[fidx] = f
// Check for duplicate field definitions
id, err := c.fieldID(seed, def, f)
if err != nil {
return fmt.Errorf("cannot determine field id for %q: %w", f.Name, err)
}
id := c.fieldID(seed, def, f)
if seenFields[id] {
return fmt.Errorf("field %q duplicated in measurement %q (slave %d)", f.Name, def.Measurement, def.SlaveID)
}
@ -313,55 +310,29 @@ func (c *ConfigurationPerMetric) newField(def metricFieldDefinition, mdef metric
return f, nil
}
func (c *ConfigurationPerMetric) fieldID(seed maphash.Seed, def metricDefinition, field metricFieldDefinition) (uint64, error) {
func (c *ConfigurationPerMetric) fieldID(seed maphash.Seed, def metricDefinition, field metricFieldDefinition) uint64 {
var mh maphash.Hash
mh.SetSeed(seed)
if err := mh.WriteByte(def.SlaveID); err != nil {
return 0, err
}
if err := mh.WriteByte(0); err != nil {
return 0, err
}
if _, err := mh.WriteString(field.RegisterType); err != nil {
return 0, err
}
if err := mh.WriteByte(0); err != nil {
return 0, err
}
if _, err := mh.WriteString(def.Measurement); err != nil {
return 0, err
}
if err := mh.WriteByte(0); err != nil {
return 0, err
}
if _, err := mh.WriteString(field.Name); err != nil {
return 0, err
}
if err := mh.WriteByte(0); err != nil {
return 0, err
}
mh.WriteByte(def.SlaveID)
mh.WriteByte(0)
mh.WriteString(field.RegisterType)
mh.WriteByte(0)
mh.WriteString(def.Measurement)
mh.WriteByte(0)
mh.WriteString(field.Name)
mh.WriteByte(0)
// Tags
for k, v := range def.Tags {
if _, err := mh.WriteString(k); err != nil {
return 0, err
}
if err := mh.WriteByte('='); err != nil {
return 0, err
}
if _, err := mh.WriteString(v); err != nil {
return 0, err
}
if err := mh.WriteByte(':'); err != nil {
return 0, err
}
}
if err := mh.WriteByte(0); err != nil {
return 0, err
mh.WriteString(k)
mh.WriteByte('=')
mh.WriteString(v)
mh.WriteByte(':')
}
mh.WriteByte(0)
return mh.Sum64(), nil
return mh.Sum64()
}
func (c *ConfigurationPerMetric) determineOutputDatatype(input string) (string, error) {

View File

@ -179,10 +179,7 @@ func (c *ConfigurationPerRequest) Check() error {
def.Fields[fidx] = f
// Check for duplicate field definitions
id, err := c.fieldID(seed, def, f)
if err != nil {
return fmt.Errorf("cannot determine field id for %q: %w", f.Name, err)
}
id := c.fieldID(seed, def, f)
if seenFields[id] {
return fmt.Errorf("field %q duplicated in measurement %q (slave %d/%q)", f.Name, f.Measurement, def.SlaveID, def.RegisterType)
}
@ -360,55 +357,29 @@ func (c *ConfigurationPerRequest) newFieldFromDefinition(def requestFieldDefinit
return f, nil
}
func (c *ConfigurationPerRequest) fieldID(seed maphash.Seed, def requestDefinition, field requestFieldDefinition) (uint64, error) {
func (c *ConfigurationPerRequest) fieldID(seed maphash.Seed, def requestDefinition, field requestFieldDefinition) uint64 {
var mh maphash.Hash
mh.SetSeed(seed)
if err := mh.WriteByte(def.SlaveID); err != nil {
return 0, err
}
if err := mh.WriteByte(0); err != nil {
return 0, err
}
if _, err := mh.WriteString(def.RegisterType); err != nil {
return 0, err
}
if err := mh.WriteByte(0); err != nil {
return 0, err
}
if _, err := mh.WriteString(field.Measurement); err != nil {
return 0, err
}
if err := mh.WriteByte(0); err != nil {
return 0, err
}
if _, err := mh.WriteString(field.Name); err != nil {
return 0, err
}
if err := mh.WriteByte(0); err != nil {
return 0, err
}
mh.WriteByte(def.SlaveID)
mh.WriteByte(0)
mh.WriteString(def.RegisterType)
mh.WriteByte(0)
mh.WriteString(field.Measurement)
mh.WriteByte(0)
mh.WriteString(field.Name)
mh.WriteByte(0)
// Tags
for k, v := range def.Tags {
if _, err := mh.WriteString(k); err != nil {
return 0, err
}
if err := mh.WriteByte('='); err != nil {
return 0, err
}
if _, err := mh.WriteString(v); err != nil {
return 0, err
}
if err := mh.WriteByte(':'); err != nil {
return 0, err
}
}
if err := mh.WriteByte(0); err != nil {
return 0, err
mh.WriteString(k)
mh.WriteByte('=')
mh.WriteString(v)
mh.WriteByte(':')
}
mh.WriteByte(0)
return mh.Sum64(), nil
return mh.Sum64()
}
func (c *ConfigurationPerRequest) determineOutputDatatype(input string) (string, error) {

View File

@ -15,8 +15,8 @@ func determineConverterString(byteOrder string) (fieldConverterFunc, error) {
var buf bytes.Buffer
for i := 0; i < len(b); i += 2 {
v := tohost(b[i : i+2])
_ = buf.WriteByte(byte(v >> 8))
_ = buf.WriteByte(byte(v & 0xFF))
buf.WriteByte(byte(v >> 8))
buf.WriteByte(byte(v & 0xFF))
}
// Remove everything after null-termination
s, _ := bytes.CutSuffix(buf.Bytes(), []byte{0x00})

View File

@ -204,11 +204,7 @@ func (n *mockNSQD) handle(conn net.Conn) {
}
rdyCount--
}
buf, err := framedResponse(inst.frameType, inst.body)
if err != nil {
log.Print(err.Error())
goto exit
}
buf := framedResponse(inst.frameType, inst.body)
_, err = conn.Write(buf)
if err != nil {
log.Print(err.Error())
@ -224,26 +220,20 @@ exit:
conn.Close()
}
func framedResponse(frameType int32, data []byte) ([]byte, error) {
func framedResponse(frameType int32, data []byte) []byte {
var w bytes.Buffer
beBuf := make([]byte, 4)
size := uint32(len(data)) + 4
binary.BigEndian.PutUint32(beBuf, size)
_, err := w.Write(beBuf)
if err != nil {
return nil, err
}
w.Write(beBuf)
binary.BigEndian.PutUint32(beBuf, uint32(frameType))
_, err = w.Write(beBuf)
if err != nil {
return nil, err
}
w.Write(beBuf)
_, err = w.Write(data)
return w.Bytes(), err
w.Write(data)
return w.Bytes()
}
func frameMessage(m *nsq.Message) ([]byte, error) {

View File

@ -98,15 +98,9 @@ func (p *PgBouncer) accRow(row scanner, columns []string) (map[string]string, ma
if !ok {
return nil, nil, fmt.Errorf("database not a string, but %T", *columnMap["database"])
}
_, err := dbname.WriteString(name)
if err != nil {
return nil, nil, fmt.Errorf("writing database name failed: %w", err)
}
dbname.WriteString(name)
} else {
_, err := dbname.WriteString("pgbouncer")
if err != nil {
return nil, nil, fmt.Errorf("writing 'pgbouncer' failed: %w", err)
}
dbname.WriteString("pgbouncer")
}
var tagAddress string

View File

@ -247,9 +247,7 @@ func (c *child) handleRecord(rec *record) error {
return err
}
if req.pw != nil {
if err := req.pw.CloseWithError(ErrRequestAborted); err != nil {
return err
}
req.pw.CloseWithError(ErrRequestAborted)
}
if !req.keepConn {
// connection will close upon return

View File

@ -146,12 +146,8 @@ func (c *conn) writeRecord(recType recType, reqID uint16, b []byte) error {
if err := binary.Write(&c.buf, binary.BigEndian, c.h); err != nil {
return err
}
if _, err := c.buf.Write(b); err != nil {
return err
}
if _, err := c.buf.Write(pad[:c.h.PaddingLength]); err != nil {
return err
}
c.buf.Write(b)
c.buf.Write(pad[:c.h.PaddingLength])
_, err := c.rwc.Write(c.buf.Bytes())
return err
}

View File

@ -134,23 +134,17 @@ func (p *Postgresql) accRow(row scanner, acc telegraf.Accumulator, columns []str
if columnMap["datname"] != nil {
// extract the database name from the column map
if dbNameStr, ok := (*columnMap["datname"]).(string); ok {
if _, err := dbname.WriteString(dbNameStr); err != nil {
return err
}
dbname.WriteString(dbNameStr)
} else {
// PG 12 adds tracking of global objects to pg_stat_database
if _, err := dbname.WriteString("postgres_global"); err != nil {
return err
}
dbname.WriteString("postgres_global")
}
} else {
database, err := p.GetConnectDatabase(tagAddress)
if err != nil {
return err
}
if _, err := dbname.WriteString(database); err != nil {
return err
}
dbname.WriteString(database)
}
tags := map[string]string{"server": tagAddress, "db": dbname.String()}

View File

@ -206,26 +206,20 @@ func (p *Postgresql) accRow(measName string, row scanner, acc telegraf.Accumulat
// extract the database name from the column map
switch datname := (*c).(type) {
case string:
if _, err := dbname.WriteString(datname); err != nil {
return err
}
dbname.WriteString(datname)
default:
database, err := p.GetConnectDatabase(tagAddress)
if err != nil {
return err
}
if _, err := dbname.WriteString(database); err != nil {
return err
}
dbname.WriteString(database)
}
} else {
database, err := p.GetConnectDatabase(tagAddress)
if err != nil {
return err
}
if _, err := dbname.WriteString(database); err != nil {
return err
}
dbname.WriteString(database)
}
// Process the additional tags

View File

@ -236,10 +236,7 @@ func (s *S7comm) createRequests() error {
}
// Check for duplicate field definitions
id, err := fieldID(seed, cfg, f)
if err != nil {
return fmt.Errorf("cannot determine field id for %q: %w", f.Name, err)
}
id := fieldID(seed, cfg, f)
if seenFields[id] {
return fmt.Errorf("duplicate field definition field %q in metric %q", f.Name, cfg.Name)
}
@ -382,43 +379,25 @@ func handleFieldAddress(address string) (*gos7.S7DataItem, converterFunc, error)
return item, f, nil
}
func fieldID(seed maphash.Seed, def metricDefinition, field metricFieldDefinition) (uint64, error) {
func fieldID(seed maphash.Seed, def metricDefinition, field metricFieldDefinition) uint64 {
var mh maphash.Hash
mh.SetSeed(seed)
if _, err := mh.WriteString(def.Name); err != nil {
return 0, err
}
if err := mh.WriteByte(0); err != nil {
return 0, err
}
if _, err := mh.WriteString(field.Name); err != nil {
return 0, err
}
if err := mh.WriteByte(0); err != nil {
return 0, err
}
mh.WriteString(def.Name)
mh.WriteByte(0)
mh.WriteString(field.Name)
mh.WriteByte(0)
// Tags
for k, v := range def.Tags {
if _, err := mh.WriteString(k); err != nil {
return 0, err
}
if err := mh.WriteByte('='); err != nil {
return 0, err
}
if _, err := mh.WriteString(v); err != nil {
return 0, err
}
if err := mh.WriteByte(':'); err != nil {
return 0, err
}
}
if err := mh.WriteByte(0); err != nil {
return 0, err
mh.WriteString(k)
mh.WriteByte('=')
mh.WriteString(v)
mh.WriteByte(':')
}
mh.WriteByte(0)
return mh.Sum64(), nil
return mh.Sum64()
}
// Add this plugin to telegraf

View File

@ -494,9 +494,7 @@ func (s *Statsd) udpListen(conn *net.UDPConn) error {
return fmt.Errorf("bufPool is not a bytes buffer")
}
b.Reset()
if _, err := b.Write(buf[:n]); err != nil {
return err
}
b.Write(buf[:n])
select {
case s.in <- input{
Buffer: b,

View File

@ -84,29 +84,24 @@ func (m *Multiline) ProcessLine(text string, buffer *bytes.Buffer) string {
if m.matchQuotation(text) || m.matchString(text) {
// Restore the newline removed by tail's scanner
if buffer.Len() > 0 && m.config.PreserveNewline {
_, _ = buffer.WriteString("\n")
buffer.WriteString("\n")
}
// Ignore the returned error as we cannot do anything about it anyway
_, _ = buffer.WriteString(text)
buffer.WriteString(text)
return ""
}
if m.config.MatchWhichLine == Previous {
previousText := buffer.String()
buffer.Reset()
if _, err := buffer.WriteString(text); err != nil {
return ""
}
buffer.WriteString(text)
text = previousText
} else {
// Next
if buffer.Len() > 0 {
if m.config.PreserveNewline {
_, _ = buffer.WriteString("\n")
}
if _, err := buffer.WriteString(text); err != nil {
return ""
buffer.WriteString("\n")
}
buffer.WriteString(text)
text = buffer.String()
buffer.Reset()
}

View File

@ -108,11 +108,9 @@ func TestMultilineFlush(t *testing.T) {
m, err := c.NewMultiline()
require.NoError(t, err, "Configuration was OK.")
var buffer bytes.Buffer
_, err = buffer.WriteString("foo")
require.NoError(t, err)
buffer.WriteString("foo")
text := m.Flush(&buffer)
require.Equal(t, "foo", text)
require.Zero(t, buffer.Len())
}

View File

@ -92,10 +92,7 @@ func flatten(metrics []*testutil.Metric) map[string]interface{} {
for _, m := range metrics {
buf := &bytes.Buffer{}
for k, v := range m.Tags {
_, err := buf.WriteString(fmt.Sprintf("%s=%s", k, v))
if err != nil {
return nil
}
buf.WriteString(fmt.Sprintf("%s=%s", k, v))
}
for k, v := range m.Fields {
flat[fmt.Sprintf("%s %s", buf.String(), k)] = v

View File

@ -122,9 +122,7 @@ func jsonToZipkinThrift(jsonRaw []byte) ([]byte, error) {
func thriftToJSONSpans(thriftData []byte) ([]byte, error) {
buffer := thrift.NewTMemoryBuffer()
if _, err := buffer.Write(thriftData); err != nil {
return nil, fmt.Errorf("error in buffer write: %w", err)
}
buffer.Write(thriftData)
transport := thrift.NewTBinaryProtocolConf(buffer, nil)
_, size, err := transport.ReadListBegin(context.Background())

View File

@ -9,6 +9,7 @@ import (
"time"
"github.com/apache/thrift/lib/go/thrift"
"github.com/influxdata/telegraf/plugins/inputs/zipkin/codec"
"github.com/influxdata/telegraf/plugins/inputs/zipkin/codec/thrift/gen-go/zipkincore"
)
@ -16,9 +17,7 @@ import (
// UnmarshalThrift converts raw bytes in thrift format to a slice of spans
func UnmarshalThrift(body []byte) ([]*zipkincore.Span, error) {
buffer := thrift.NewTMemoryBuffer()
if _, err := buffer.Write(body); err != nil {
return nil, err
}
buffer.Write(body)
transport := thrift.NewTBinaryProtocolConf(buffer, nil)
_, size, err := transport.ReadListBegin(context.Background())

View File

@ -221,10 +221,7 @@ func (q *AMQP) serialize(metrics []telegraf.Metric) ([]byte, error) {
q.Log.Debugf("Could not serialize metric: %v", err)
continue
}
_, err = buf.Write(octets)
if err != nil {
return nil, err
}
buf.Write(octets)
}
body := buf.Bytes()
return body, nil

View File

@ -180,7 +180,7 @@ func removeWindowsCarriageReturns(b bytes.Buffer) bytes.Buffer {
byt, err := b.ReadBytes(0x0D)
byt = bytes.TrimRight(byt, "\x0d")
if len(byt) > 0 {
_, _ = buf.Write(byt)
buf.Write(byt)
}
if errors.Is(err, io.EOF) {
return buf

View File

@ -238,10 +238,7 @@ func TCPServer(t *testing.T, wg *sync.WaitGroup, tlsConfig *tls.Config, errs cha
if bufR[0] == 0 { // message delimiter found
break
}
_, err = bufW.Write(bufR)
if err != nil {
return err
}
bufW.Write(bufR)
}
}

View File

@ -11,6 +11,8 @@ import (
"time"
"github.com/google/go-cmp/cmp"
"github.com/stretchr/testify/require"
"github.com/influxdata/telegraf"
"github.com/influxdata/telegraf/config"
"github.com/influxdata/telegraf/internal"
@ -19,8 +21,6 @@ import (
"github.com/influxdata/telegraf/plugins/inputs/file"
"github.com/influxdata/telegraf/plugins/parsers/influx"
"github.com/influxdata/telegraf/testutil"
"github.com/stretchr/testify/require"
)
var dummyEntry = Entry{
@ -37,9 +37,9 @@ func generateBinary(data []interface{}, order binary.ByteOrder) ([]byte, error)
var err error
switch v := x.(type) {
case []byte:
_, err = buf.Write(v)
buf.Write(v)
case string:
_, err = buf.WriteString(v)
buf.WriteString(v)
default:
err = binary.Write(&buf, order, x)
}

View File

@ -52,16 +52,12 @@ func (ep *ValueParser) parse(p *PointParser, pt *Point) error {
p.writeBuf.Reset()
if tok == MinusSign {
if _, err := p.writeBuf.WriteString(lit); err != nil {
return fmt.Errorf("unable to write: %w", err)
}
p.writeBuf.WriteString(lit)
tok, lit = p.scan()
}
for tok != EOF && (tok == Letter || tok == Number || tok == Dot || tok == MinusSign) {
if _, err := p.writeBuf.WriteString(lit); err != nil {
return fmt.Errorf("unable to write: %w", err)
}
p.writeBuf.WriteString(lit)
tok, lit = p.scan()
}
p.unscan()
@ -93,9 +89,7 @@ func (ep *TimestampParser) parse(p *PointParser, pt *Point) error {
p.writeBuf.Reset()
for tok == Number {
if _, err := p.writeBuf.WriteString(lit); err != nil {
return fmt.Errorf("unable to write: %w", err)
}
p.writeBuf.WriteString(lit)
tok, lit = p.scan()
}
p.unscan()
@ -192,9 +186,7 @@ func parseQuotedLiteral(p *PointParser) (string, error) {
for tok != EOF && (tok != Quotes || (tok == Quotes && escaped)) {
// let everything through
escaped = tok == Backslash
if _, err := p.writeBuf.WriteString(lit); err != nil {
return "", fmt.Errorf("unable to write: %w", err)
}
p.writeBuf.WriteString(lit)
tok, lit = p.scan()
}
if tok == EOF {
@ -215,9 +207,7 @@ func parseLiteral(p *PointParser) (string, error) {
p.writeBuf.Reset()
for tok != EOF && tok > literalBeg && tok < literalEnd {
if _, err := p.writeBuf.WriteString(lit); err != nil {
return "", fmt.Errorf("unable to write: %w", err)
}
p.writeBuf.WriteString(lit)
tok, lit = p.scan()
if tok == Delta {
return "", errors.New("found delta inside metric name")

View File

@ -144,10 +144,7 @@ func (s *GraphiteSerializer) SerializeBatch(metrics []telegraf.Metric) ([]byte,
if err != nil {
return nil, err
}
_, err = batch.Write(buf)
if err != nil {
return nil, err
}
batch.Write(buf)
}
return batch.Bytes(), nil
}

View File

@ -706,14 +706,9 @@ func prompbToText(data []byte) ([]byte, error) {
}
samples := protoToSamples(&req)
for _, sample := range samples {
_, err = buf.Write([]byte(fmt.Sprintf("%s %s\n", sample.Metric.String(), sample.Value.String())))
if err != nil {
return nil, err
}
}
if err != nil {
return nil, err
buf.Write([]byte(fmt.Sprintf("%s %s\n", sample.Metric.String(), sample.Value.String())))
}
return buf.Bytes(), nil
}

View File

@ -12,7 +12,6 @@ package main
import (
"bytes"
"errors"
"fmt"
"io"
"log"
@ -92,18 +91,14 @@ func insertInclude(buf *bytes.Buffer, include string) error {
func insertIncludes(buf *bytes.Buffer, b *includeBlock) error {
// Insert newlines before and after
if b.Newlines {
if _, err := buf.Write([]byte("\n")); err != nil {
return errors.New("adding newline failed")
}
buf.Write([]byte("\n"))
}
// Insert all includes in the order they occurred
for i, include := range b.Includes {
if i > 0 {
// Add a separating newline between included blocks
if _, err := buf.Write([]byte("\n")); err != nil {
return errors.New("adding newline failed")
}
buf.Write([]byte("\n"))
}
if err := insertInclude(buf, include); err != nil {
return err
@ -111,9 +106,7 @@ func insertIncludes(buf *bytes.Buffer, b *includeBlock) error {
}
// Make sure we add a trailing newline
if !bytes.HasSuffix(buf.Bytes(), []byte("\n")) || b.Newlines {
if _, err := buf.Write([]byte("\n")); err != nil {
return errors.New("adding newline failed")
}
buf.Write([]byte("\n"))
}
return nil
@ -228,13 +221,9 @@ func main() {
offset := 0
for _, b := range blocksToReplace {
// Copy everything up to the beginning of the block we want to replace and make sure we get a newline
if _, err := output.Write(readme[offset:b.Start]); err != nil {
log.Fatalf("Writing non-replaced content failed: %v", err)
}
output.Write(readme[offset:b.Start])
if !bytes.HasSuffix(output.Bytes(), []byte("\n")) {
if _, err := output.Write([]byte("\n")); err != nil {
log.Fatalf("Writing failed: %v", err)
}
output.Write([]byte("\n"))
}
offset = b.Stop
@ -244,9 +233,7 @@ func main() {
}
}
// Copy the remaining of the original file...
if _, err := output.Write(readme[offset:]); err != nil {
log.Fatalf("Writing remaining content failed: %v", err)
}
output.Write(readme[offset:])
// Write output with same permission as input
file, err := os.OpenFile(inputFilename, os.O_CREATE|os.O_TRUNC|os.O_WRONLY, perm)