chore: Fix linter findings for `revive:comment-spacings` (part 1) (#15896)

This commit is contained in:
Paweł Żak 2024-09-19 10:55:45 +02:00 committed by GitHub
parent ee9c47cc7a
commit 43590ca730
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
31 changed files with 118 additions and 118 deletions

View File

@ -265,7 +265,7 @@ func (t *Telegraf) watchRemoteConfigs(ctx context.Context, signals chan os.Signa
return
case <-ticker.C:
for _, configURL := range remoteConfigs {
resp, err := http.Head(configURL) //nolint: gosec // user provided URL
resp, err := http.Head(configURL) //nolint:gosec // user provided URL
if err != nil {
log.Printf("W! Error fetching config URL, %s: %s\n", configURL, err)
continue

View File

@ -1292,7 +1292,7 @@ func TestPersisterProcessorRegistration(t *testing.T) {
}
}
/*** Mockup INPUT plugin for (new) parser testing to avoid cyclic dependencies ***/
// Mockup INPUT plugin for (new) parser testing to avoid cyclic dependencies
type MockupInputPluginParserNew struct {
Parser telegraf.Parser
ParserFunc telegraf.ParserFunc
@ -1311,7 +1311,7 @@ func (m *MockupInputPluginParserNew) SetParserFunc(f telegraf.ParserFunc) {
m.ParserFunc = f
}
/*** Mockup INPUT plugin for testing to avoid cyclic dependencies ***/
// Mockup INPUT plugin for testing to avoid cyclic dependencies
type MockupInputPlugin struct {
Servers []string `toml:"servers"`
Methods []string `toml:"methods"`
@ -1341,7 +1341,7 @@ func (m *MockupInputPlugin) SetParser(parser telegraf.Parser) {
m.parser = parser
}
/*** Mockup INPUT plugin with ParserFunc interface ***/
// Mockup INPUT plugin with ParserFunc interface
type MockupInputPluginParserFunc struct {
parserFunc telegraf.ParserFunc
}
@ -1356,7 +1356,7 @@ func (m *MockupInputPluginParserFunc) SetParserFunc(pf telegraf.ParserFunc) {
m.parserFunc = pf
}
/*** Mockup INPUT plugin without ParserFunc interface ***/
// Mockup INPUT plugin without ParserFunc interface
type MockupInputPluginParserOnly struct {
parser telegraf.Parser
}
@ -1371,7 +1371,7 @@ func (m *MockupInputPluginParserOnly) SetParser(p telegraf.Parser) {
m.parser = p
}
/*** Mockup PROCESSOR plugin for testing to avoid cyclic dependencies ***/
// Mockup PROCESSOR plugin for testing to avoid cyclic dependencies
type MockupProcessorPluginParser struct {
Parser telegraf.Parser
ParserFunc telegraf.ParserFunc
@ -1398,7 +1398,7 @@ func (m *MockupProcessorPluginParser) SetParserFunc(f telegraf.ParserFunc) {
m.ParserFunc = f
}
/*** Mockup PROCESSOR plugin without parser ***/
// Mockup PROCESSOR plugin without parser
type MockupProcessorPlugin struct {
Option string `toml:"option"`
state []uint64
@ -1433,7 +1433,7 @@ func (m *MockupProcessorPlugin) SetState(state interface{}) error {
return nil
}
/*** Mockup PROCESSOR plugin with parser ***/
// Mockup PROCESSOR plugin with parser
type MockupProcessorPluginParserOnly struct {
Parser telegraf.Parser
}
@ -1456,7 +1456,7 @@ func (m *MockupProcessorPluginParserOnly) SetParser(parser telegraf.Parser) {
m.Parser = parser
}
/*** Mockup PROCESSOR plugin with parser-function ***/
// Mockup PROCESSOR plugin with parser-function
type MockupProcessorPluginParserFunc struct {
Parser telegraf.ParserFunc
}
@ -1479,7 +1479,7 @@ func (m *MockupProcessorPluginParserFunc) SetParserFunc(pf telegraf.ParserFunc)
m.Parser = pf
}
/*** Mockup OUTPUT plugin for testing to avoid cyclic dependencies ***/
// Mockup OUTPUT plugin for testing to avoid cyclic dependencies
type MockupOutputPlugin struct {
URL string `toml:"url"`
Headers map[string]string `toml:"headers"`
@ -1502,7 +1502,7 @@ func (m *MockupOutputPlugin) Write(_ []telegraf.Metric) error {
return nil
}
/*** Mockup OUTPUT plugin for serializer testing to avoid cyclic dependencies ***/
// Mockup OUTPUT plugin for serializer testing to avoid cyclic dependencies
type MockupOutputPluginSerializerOld struct {
Serializer serializers.Serializer
}
@ -1543,7 +1543,7 @@ func (*MockupOutputPluginSerializerNew) Write(_ []telegraf.Metric) error {
return nil
}
/*** Mockup INPUT plugin with state for testing to avoid cyclic dependencies ***/
// Mockup INPUT plugin with state for testing to avoid cyclic dependencies
type MockupState struct {
Name string
Version uint64

View File

@ -787,7 +787,7 @@ func TestSecretImplTestSuiteProtected(t *testing.T) {
suite.Run(t, &SecretImplTestSuite{protected: true})
}
/*** Mockup (input) plugin for testing to avoid cyclic dependencies ***/
// Mockup (input) plugin for testing to avoid cyclic dependencies
type MockupSecretPlugin struct {
Secret Secret `toml:"secret"`
Expected string `toml:"expected"`

View File

@ -261,7 +261,7 @@ func TestTOMLParsingIntegerSizes(t *testing.T) {
}
}
/*** Mockup (input) plugin for testing to avoid cyclic dependencies ***/
// Mockup (input) plugin for testing to avoid cyclic dependencies
type MockupTypesPlugin struct {
Durations []config.Duration `toml:"durations"`
Sizes []config.Size `toml:"sizes"`

View File

@ -13,7 +13,7 @@ import (
"github.com/klauspost/pgzip"
)
const defaultMaxDecompressionSize int64 = 500 * 1024 * 1024 //500MB
const defaultMaxDecompressionSize int64 = 500 * 1024 * 1024 // 500MB
// DecodingOption provide methods to change the decoding from the standard
// configuration.

View File

@ -25,7 +25,7 @@ func TestCompileAndMatch(t *testing.T) {
}
tests := []test{
//test super asterisk
// test super asterisk
{path: filepath.Join(testdataDir, "**"), matches: 7},
// test single asterisk
{path: filepath.Join(testdataDir, "*.log"), matches: 3},

View File

@ -135,7 +135,7 @@ func (w *FileWriter) rotateIfNeeded() error {
if (w.interval > 0 && time.Now().After(w.expireTime)) ||
(w.maxSizeInBytes > 0 && w.bytesWritten >= w.maxSizeInBytes) {
if err := w.rotate(); err != nil {
//Ignore rotation errors and keep the log open
// Ignore rotation errors and keep the log open
fmt.Printf("unable to rotate the file %q, %s", w.filename, err.Error())
}
return w.openCurrent()
@ -160,7 +160,7 @@ func (w *FileWriter) rotate() (err error) {
func (w *FileWriter) purgeArchivesIfNeeded() (err error) {
if w.maxArchives == -1 {
//Skip archiving
// Skip archiving
return nil
}
@ -169,9 +169,9 @@ func (w *FileWriter) purgeArchivesIfNeeded() (err error) {
return err
}
//if there are more archives than the configured maximum, then purge older files
// if there are more archives than the configured maximum, then purge older files
if len(matches) > w.maxArchives {
//sort files alphanumerically to delete older files first
// sort files alphanumerically to delete older files first
sort.Strings(matches)
for _, filename := range matches[:len(matches)-w.maxArchives] {
if err := os.Remove(filename); err != nil {

View File

@ -78,7 +78,7 @@ func (f *Field) Init(tr Translator) error {
if f.Conversion == "" {
f.Conversion = conversion
}
//TODO use textual convention conversion from the MIB
// TODO use textual convention conversion from the MIB
}
if f.SecondaryIndexTable && f.SecondaryIndexUse {

View File

@ -70,7 +70,7 @@ func LoadMibsFromPath(paths []string, log telegraf.Logger, loader MibLoader) err
log.Warnf("Couldn't evaluate symbolic links for %v: %v", symlink, err)
continue
}
//replace symlink's info with the target's info
// replace symlink's info with the target's info
info, err = os.Lstat(target)
if err != nil {
log.Warnf("Couldn't stat target %v: %v", target, err)

View File

@ -53,8 +53,8 @@ type RTableRow struct {
// Init() builds & initializes the nested fields.
func (t *Table) Init(tr Translator) error {
//makes sure oid or name is set in config file
//otherwise snmp will produce metrics with an empty name
// makes sure oid or name is set in config file
// otherwise snmp will produce metrics with an empty name
if t.Oid == "" && t.Name == "" {
return errors.New("SNMP table in config file is not named. One or both of the oid and name settings must be set")
}
@ -120,7 +120,7 @@ func (t *Table) initBuild() error {
func (t Table) Build(gs Connection, walk bool) (*RTable, error) {
rows := map[string]RTableRow{}
//translation table for secondary index (when performing join on two tables)
// translation table for secondary index (when performing join on two tables)
secIdxTab := make(map[string]string)
secGlobalOuterJoin := false
for i, f := range t.Fields {
@ -262,7 +262,7 @@ func (t Table) Build(gs Connection, walk bool) (*RTable, error) {
rtr.Fields[f.Name] = v
}
if f.SecondaryIndexTable {
//indexes are stored here with prepending "." so we need to add them if needed
// indexes are stored here with prepending "." so we need to add them if needed
var vss string
if ok {
vss = "." + vs
@ -281,7 +281,7 @@ func (t Table) Build(gs Connection, walk bool) (*RTable, error) {
rt := RTable{
Name: t.Name,
Time: time.Now(), //TODO record time at start
Time: time.Now(), // TODO record time at start
Rows: make([]RTableRow, 0, len(rows)),
}
for _, r := range rows {

View File

@ -210,7 +210,7 @@ func TestTableJoinNoIndexAsTag_walk(t *testing.T) {
Tags: map[string]string{
"myfield1": "instance",
"myfield4": "bar",
//"index": "10",
// "index": "10",
},
Fields: map[string]interface{}{
"myfield2": 10,
@ -221,7 +221,7 @@ func TestTableJoinNoIndexAsTag_walk(t *testing.T) {
rtr2 := RTableRow{
Tags: map[string]string{
"myfield1": "instance2",
//"index": "11",
// "index": "11",
},
Fields: map[string]interface{}{
"myfield2": 20,
@ -232,7 +232,7 @@ func TestTableJoinNoIndexAsTag_walk(t *testing.T) {
rtr3 := RTableRow{
Tags: map[string]string{
"myfield1": "instance3",
//"index": "12",
// "index": "12",
},
Fields: map[string]interface{}{
"myfield2": 20,

View File

@ -560,7 +560,7 @@ func TestTableJoinNoIndexAsTag_walkGosmi(t *testing.T) {
Tags: map[string]string{
"myfield1": "instance",
"myfield4": "bar",
//"index": "10",
// "index": "10",
},
Fields: map[string]interface{}{
"myfield2": 10,
@ -571,7 +571,7 @@ func TestTableJoinNoIndexAsTag_walkGosmi(t *testing.T) {
rtr2 := RTableRow{
Tags: map[string]string{
"myfield1": "instance2",
//"index": "11",
// "index": "11",
},
Fields: map[string]interface{}{
"myfield2": 20,
@ -582,7 +582,7 @@ func TestTableJoinNoIndexAsTag_walkGosmi(t *testing.T) {
rtr3 := RTableRow{
Tags: map[string]string{
"myfield1": "instance3",
//"index": "12",
// "index": "12",
},
Fields: map[string]interface{}{
"myfield2": 20,

View File

@ -15,7 +15,7 @@ import (
// We interact through an interface so we can mock it out in tests.
type Connection interface {
Host() string
//BulkWalkAll(string) ([]gosnmp.SnmpPDU, error)
// BulkWalkAll(string) ([]gosnmp.SnmpPDU, error)
Walk(string, gosnmp.WalkFunc) error
Get(oids []string) (*gosnmp.SnmpPacket, error)
Reconnect() error

View File

@ -81,7 +81,7 @@ func TestRestrictedEventLogIntegration(t *testing.T) {
}
require.NoError(t, SetupLogging(config))
//separate previous log messages by small delay
// separate previous log messages by small delay
time.Sleep(time.Second)
now := time.Now()
log.Println("I! Info message")

View File

@ -42,7 +42,7 @@ func BenchmarkRunningOutputAddWrite(b *testing.B) {
for n := 0; n < b.N; n++ {
ro.AddMetric(testutil.TestMetric(101, "metric1"))
ro.Write() //nolint: errcheck // skip checking err for benchmark tests
ro.Write() //nolint:errcheck // skip checking err for benchmark tests
}
}
@ -58,7 +58,7 @@ func BenchmarkRunningOutputAddWriteEvery100(b *testing.B) {
for n := 0; n < b.N; n++ {
ro.AddMetric(testutil.TestMetric(101, "metric1"))
if n%100 == 0 {
ro.Write() //nolint: errcheck // skip checking err for benchmark tests
ro.Write() //nolint:errcheck // skip checking err for benchmark tests
}
}
}

View File

@ -60,9 +60,9 @@ type basicstats struct {
rate float64
interval time.Duration
last float64
M2 float64 //intermediate value for variance/stdev
PREVIOUS float64 //intermediate value for diff
TIME time.Time //intermediate value for rate
M2 float64 // intermediate value for variance/stdev
PREVIOUS float64 // intermediate value for diff
TIME time.Time // intermediate value for rate
}
func (*BasicStats) SampleConfig() string {
@ -119,40 +119,40 @@ func (b *BasicStats) Add(in telegraf.Metric) {
}
tmp := b.cache[id].fields[field.Key]
//https://en.m.wikipedia.org/wiki/Algorithms_for_calculating_variance
//variable initialization
// https://en.m.wikipedia.org/wiki/Algorithms_for_calculating_variance
// variable initialization
x := fv
mean := tmp.mean
m2 := tmp.M2
//counter compute
// counter compute
n := tmp.count + 1
tmp.count = n
//mean compute
// mean compute
delta := x - mean
mean = mean + delta/n
tmp.mean = mean
//variance/stdev compute
// variance/stdev compute
m2 = m2 + delta*(x-mean)
tmp.M2 = m2
//max/min compute
// max/min compute
if fv < tmp.min {
tmp.min = fv
} else if fv > tmp.max {
tmp.max = fv
}
//sum compute
// sum compute
tmp.sum += fv
//diff compute
// diff compute
tmp.diff = fv - tmp.PREVIOUS
//interval compute
// interval compute
tmp.interval = in.Time().Sub(tmp.TIME)
//rate compute
// rate compute
if !in.Time().Equal(tmp.TIME) {
tmp.rate = tmp.diff / tmp.interval.Seconds()
}
//last compute
// last compute
tmp.last = fv
//store final data
// store final data
b.cache[id].fields[field.Key] = tmp
}
}
@ -182,7 +182,7 @@ func (b *BasicStats) Push(acc telegraf.Accumulator) {
fields[k+"_last"] = v.last
}
//v.count always >=1
// v.count always >=1
if v.count > 1 {
variance := v.M2 / (v.count - 1)
@ -211,7 +211,7 @@ func (b *BasicStats) Push(acc telegraf.Accumulator) {
fields[k+"_interval"] = v.interval.Nanoseconds()
}
}
//if count == 1 StdDev = infinite => so I won't send data
// if count == 1 StdDev = infinite => so I won't send data
}
if len(fields) > 0 {

View File

@ -61,39 +61,39 @@ func TestBasicStatsWithPeriod(t *testing.T) {
minmax.Push(&acc)
expectedFields := map[string]interface{}{
"a_count": float64(2), //a
"a_count": float64(2), // a
"a_max": float64(1),
"a_min": float64(1),
"a_mean": float64(1),
"a_stdev": float64(0),
"a_s2": float64(0),
"b_count": float64(2), //b
"b_count": float64(2), // b
"b_max": float64(3),
"b_min": float64(1),
"b_mean": float64(2),
"b_s2": float64(2),
"b_stdev": math.Sqrt(2),
"c_count": float64(2), //c
"c_count": float64(2), // c
"c_max": float64(4),
"c_min": float64(2),
"c_mean": float64(3),
"c_s2": float64(2),
"c_stdev": math.Sqrt(2),
"d_count": float64(2), //d
"d_count": float64(2), // d
"d_max": float64(6),
"d_min": float64(2),
"d_mean": float64(4),
"d_s2": float64(8),
"d_stdev": math.Sqrt(8),
"e_count": float64(1), //e
"e_count": float64(1), // e
"e_max": float64(200),
"e_min": float64(200),
"e_mean": float64(200),
"f_count": float64(1), //f
"f_count": float64(1), // f
"f_max": float64(200),
"f_min": float64(200),
"f_mean": float64(200),
"g_count": float64(2), //g
"g_count": float64(2), // g
"g_max": float64(3),
"g_min": float64(1),
"g_mean": float64(2),
@ -118,27 +118,27 @@ func TestBasicStatsDifferentPeriods(t *testing.T) {
minmax.Add(m1)
minmax.Push(&acc)
expectedFields := map[string]interface{}{
"a_count": float64(1), //a
"a_count": float64(1), // a
"a_max": float64(1),
"a_min": float64(1),
"a_mean": float64(1),
"a_last": float64(1),
"b_count": float64(1), //b
"b_count": float64(1), // b
"b_max": float64(1),
"b_min": float64(1),
"b_mean": float64(1),
"b_last": float64(1),
"c_count": float64(1), //c
"c_count": float64(1), // c
"c_max": float64(2),
"c_min": float64(2),
"c_mean": float64(2),
"c_last": float64(2),
"d_count": float64(1), //d
"d_count": float64(1), // d
"d_max": float64(2),
"d_min": float64(2),
"d_mean": float64(2),
"d_last": float64(2),
"g_count": float64(1), //g
"g_count": float64(1), // g
"g_max": float64(3),
"g_min": float64(3),
"g_mean": float64(3),
@ -154,37 +154,37 @@ func TestBasicStatsDifferentPeriods(t *testing.T) {
minmax.Add(m2)
minmax.Push(&acc)
expectedFields = map[string]interface{}{
"a_count": float64(1), //a
"a_count": float64(1), // a
"a_max": float64(1),
"a_min": float64(1),
"a_mean": float64(1),
"a_last": float64(1),
"b_count": float64(1), //b
"b_count": float64(1), // b
"b_max": float64(3),
"b_min": float64(3),
"b_mean": float64(3),
"b_last": float64(3),
"c_count": float64(1), //c
"c_count": float64(1), // c
"c_max": float64(4),
"c_min": float64(4),
"c_mean": float64(4),
"c_last": float64(4),
"d_count": float64(1), //d
"d_count": float64(1), // d
"d_max": float64(6),
"d_min": float64(6),
"d_mean": float64(6),
"d_last": float64(6),
"e_count": float64(1), //e
"e_count": float64(1), // e
"e_max": float64(200),
"e_min": float64(200),
"e_mean": float64(200),
"e_last": float64(200),
"f_count": float64(1), //f
"f_count": float64(1), // f
"f_max": float64(200),
"f_min": float64(200),
"f_mean": float64(200),
"f_last": float64(200),
"g_count": float64(1), //g
"g_count": float64(1), // g
"g_max": float64(1),
"g_min": float64(1),
"g_mean": float64(1),
@ -455,19 +455,19 @@ func TestBasicStatsWithMinAndMax(t *testing.T) {
aggregator.Push(&acc)
expectedFields := map[string]interface{}{
"a_max": float64(1), //a
"a_max": float64(1), // a
"a_min": float64(1),
"b_max": float64(3), //b
"b_max": float64(3), // b
"b_min": float64(1),
"c_max": float64(4), //c
"c_max": float64(4), // c
"c_min": float64(2),
"d_max": float64(6), //d
"d_max": float64(6), // d
"d_min": float64(2),
"e_max": float64(200), //e
"e_max": float64(200), // e
"e_min": float64(200),
"f_max": float64(200), //f
"f_max": float64(200), // f
"f_min": float64(200),
"g_max": float64(3), //g
"g_max": float64(3), // g
"g_min": float64(1),
}
expectedTags := map[string]string{
@ -637,7 +637,7 @@ func TestBasicStatsWithAllStats(t *testing.T) {
minmax.Push(&acc)
expectedFields := map[string]interface{}{
"a_count": float64(2), //a
"a_count": float64(2), // a
"a_max": float64(1),
"a_min": float64(1),
"a_mean": float64(1),
@ -645,7 +645,7 @@ func TestBasicStatsWithAllStats(t *testing.T) {
"a_s2": float64(0),
"a_sum": float64(2),
"a_last": float64(1),
"b_count": float64(2), //b
"b_count": float64(2), // b
"b_max": float64(3),
"b_min": float64(1),
"b_mean": float64(2),
@ -653,7 +653,7 @@ func TestBasicStatsWithAllStats(t *testing.T) {
"b_sum": float64(4),
"b_last": float64(3),
"b_stdev": math.Sqrt(2),
"c_count": float64(2), //c
"c_count": float64(2), // c
"c_max": float64(4),
"c_min": float64(2),
"c_mean": float64(3),
@ -661,7 +661,7 @@ func TestBasicStatsWithAllStats(t *testing.T) {
"c_stdev": math.Sqrt(2),
"c_sum": float64(6),
"c_last": float64(4),
"d_count": float64(2), //d
"d_count": float64(2), // d
"d_max": float64(6),
"d_min": float64(2),
"d_mean": float64(4),
@ -669,19 +669,19 @@ func TestBasicStatsWithAllStats(t *testing.T) {
"d_stdev": math.Sqrt(8),
"d_sum": float64(8),
"d_last": float64(6),
"e_count": float64(1), //e
"e_count": float64(1), // e
"e_max": float64(200),
"e_min": float64(200),
"e_mean": float64(200),
"e_sum": float64(200),
"e_last": float64(200),
"f_count": float64(1), //f
"f_count": float64(1), // f
"f_max": float64(200),
"f_min": float64(200),
"f_mean": float64(200),
"f_sum": float64(200),
"f_last": float64(200),
"g_count": float64(2), //g
"g_count": float64(2), // g
"g_max": float64(3),
"g_min": float64(1),
"g_mean": float64(2),

View File

@ -133,7 +133,7 @@ func (c *Client) read(requests []ReadRequest) ([]ReadResponse, error) {
req, err := http.NewRequest("POST", requestURL, bytes.NewBuffer(requestBody))
if err != nil {
//err is not contained in returned error - it may contain sensitive data (password) which should not be logged
// err is not contained in returned error - it may contain sensitive data (password) which should not be logged
return nil, fmt.Errorf("unable to create new request for: %q", c.URL)
}

View File

@ -20,8 +20,8 @@ const (
type Parser struct {
DefaultTags map[string]string `toml:"-"`
//whether or not to split multi value metric into multiple metrics
//default value is split
// whether or not to split multi value metric into multiple metrics
// default value is split
ParseMultiValue string `toml:"collectd_parse_multivalue"`
popts network.ParseOpts
@ -114,7 +114,7 @@ func (p *Parser) unmarshalValueList(vl *api.ValueList) []telegraf.Metric {
var metrics []telegraf.Metric
var multiValue = p.ParseMultiValue
//set multiValue to default "split" if nothing is specified
// set multiValue to default "split" if nothing is specified
if multiValue == "" {
multiValue = "split"
}

View File

@ -304,7 +304,7 @@ func parseCSV(p *Parser, r io.Reader) ([]telegraf.Metric, error) {
// Ignore header lines if columns are named
continue
}
//concatenate header names
// concatenate header names
for i, name := range header {
if p.TrimSpace {
name = strings.Trim(name, " ")

View File

@ -266,7 +266,7 @@ func TestValueConversion(t *testing.T) {
expectedMetric := metric.New("test_value", expectedTags, expectedFields, time.Unix(0, 0))
returnedMetric := metric.New(metrics[0].Name(), metrics[0].Tags(), metrics[0].Fields(), time.Unix(0, 0))
//deep equal fields
// deep equal fields
require.Equal(t, expectedMetric.Fields(), returnedMetric.Fields())
// Test explicit type conversion.
@ -277,7 +277,7 @@ func TestValueConversion(t *testing.T) {
returnedMetric = metric.New(metrics[0].Name(), metrics[0].Tags(), metrics[0].Fields(), time.Unix(0, 0))
//deep equal fields
// deep equal fields
require.Equal(t, expectedMetric.Fields(), returnedMetric.Fields())
}

View File

@ -221,7 +221,7 @@ func (p *Parser) ParseLine(line string) (telegraf.Metric, error) {
fields := make(map[string]interface{})
tags := make(map[string]string)
//add default tags
// add default tags
for k, v := range p.DefaultTags {
tags[k] = v
}

View File

@ -127,7 +127,7 @@ func (h *MetricHandler) SetTimestamp(tm []byte) error {
return err
}
//time precision is overloaded to mean time unit here
// time precision is overloaded to mean time unit here
ns := v * int64(h.timePrecision)
h.metric.SetTime(time.Unix(0, ns))
return nil

View File

@ -47,7 +47,7 @@ func (p *Parser) Parse(b []byte) ([]telegraf.Metric, error) {
continue
}
//type conversions
// type conversions
value := string(decoder.Value())
if p.tagFilter != nil && p.tagFilter.Match(string(decoder.Key())) {
tags[string(decoder.Key())] = value

View File

@ -31,7 +31,7 @@ type loopedParser struct {
}
func (ep *nameParser) parse(p *PointParser, pt *Point) error {
//Valid characters are: a-z, A-Z, 0-9, hyphen ("-"), underscore ("_"), dot (".").
// Valid characters are: a-z, A-Z, 0-9, hyphen ("-"), underscore ("_"), dot (".").
// Forward slash ("/") and comma (",") are allowed if metricName is enclosed in double quotes.
// Delta (U+2206) is allowed as the first character of the
// metricName

View File

@ -211,7 +211,7 @@ func (d *IfName) getMap(agent string) (entry nameMap, age time.Duration, err err
d.lock.Lock()
if err != nil {
//snmp failure. signal without saving to cache
// snmp failure. signal without saving to cache
close(sig)
delete(d.sigs, agent)
@ -243,8 +243,8 @@ func (d *IfName) getMapRemoteNoMock(agent string) (nameMap, error) {
return nil, fmt.Errorf("connecting when fetching interface names: %w", err)
}
//try ifXtable and ifName first. if that fails, fall back to
//ifTable and ifDescr
// try ifXtable and ifName first. if that fails, fall back to
// ifTable and ifDescr
var m nameMap
if m, err = d.buildMap(gs, d.ifXTable); err == nil {
return m, nil
@ -283,7 +283,7 @@ func (d *IfName) makeTable(oid string) (*snmp.Table, error) {
err = tab.Init(nil)
if err != nil {
//Init already wraps
// Init already wraps
return nil, err
}
@ -295,7 +295,7 @@ func (d *IfName) buildMap(gs snmp.GosnmpWrapper, tab *snmp.Table) (nameMap, erro
rtab, err := tab.Build(gs, true)
if err != nil {
//Build already wraps
// Build already wraps
return nil, err
}
@ -307,8 +307,8 @@ func (d *IfName) buildMap(gs snmp.GosnmpWrapper, tab *snmp.Table) (nameMap, erro
for _, v := range rtab.Rows {
iStr, ok := v.Tags["index"]
if !ok {
//should always have an index tag because the table should
//always have IndexAsTag true
// should always have an index tag because the table should
// always have IndexAsTag true
return nil, errors.New("no index tag")
}
i, err := strconv.ParseUint(iStr, 10, 64)

View File

@ -150,7 +150,7 @@ func TestTopkAggregatorsSmokeTests(t *testing.T) {
aggregators := []string{"mean", "sum", "max", "min"}
//The answer is equal to the original set for these particular scenarios
// The answer is equal to the original set for these particular scenarios
input := MetricsSet1
answer := MetricsSet1

View File

@ -17,8 +17,8 @@ import (
const DefaultTemplate = "host.tags.measurement.field"
var (
compatibleAllowedCharsName = regexp.MustCompile(`[^ "-:\<>-\]_a-~\p{L}]`) //nolint: gocritic // valid range for use-case
compatibleAllowedCharsValue = regexp.MustCompile(`[^ -:<-~\p{L}]`) //nolint: gocritic // valid range for use-case
compatibleAllowedCharsName = regexp.MustCompile(`[^ "-:\<>-\]_a-~\p{L}]`) //nolint:gocritic // valid range for use-case
compatibleAllowedCharsValue = regexp.MustCompile(`[^ -:<-~\p{L}]`) //nolint:gocritic // valid range for use-case
compatibleLeadingTildeDrop = regexp.MustCompile(`^[~]*(.*)`)
hyphenChars = strings.NewReplacer(
"/", "-",
@ -100,7 +100,7 @@ func (s *GraphiteSerializer) Serialize(metric telegraf.Metric) ([]byte, error) {
metricString := fmt.Sprintf("%s %s %d\n",
// insert "field" section of template
bucket,
//bucket,
// bucket,
fieldValue,
timestamp)
point := []byte(metricString)

View File

@ -37,8 +37,8 @@ func (t *T) assertNodef(n ast.Node, format string, args ...interface{}) {
}
func (t *T) assertLinef(line int, format string, args ...interface{}) {
//this func only exists to make the call stack to t.printRule the same depth
//as when called through assertf
// this func only exists to make the call stack to t.printRule the same depth
// as when called through assertf
t.assertLine2f(line, format, args...)
}

View File

@ -97,7 +97,7 @@ func noLongLinesInParagraphs(threshold int) func(*T, ast.Node) error {
var p *ast.Paragraph
var ok bool
if p, ok = n.(*ast.Paragraph); !ok {
continue //only looking for paragraphs
continue // only looking for paragraphs
}
segs := p.Lines()

View File

@ -59,19 +59,19 @@ func findHashes(body io.Reader, version string) (map[string]string, error) {
for {
tokenType := htmlTokens.Next()
//if it's an error token, we either reached
//the end of the file, or the HTML was malformed
// if it's an error token, we either reached
// the end of the file, or the HTML was malformed
if tokenType == html.ErrorToken {
err := htmlTokens.Err()
if errors.Is(err, io.EOF) {
//end of the file, break out of the loop
// end of the file, break out of the loop
break
}
return nil, htmlTokens.Err()
}
if tokenType == html.StartTagToken {
//get the token
// get the token
token := htmlTokens.Token()
if "table" == token.Data && len(token.Attr) == 1 && token.Attr[0].Val == "downloadtable" {
insideDownloadTable = true
@ -88,9 +88,9 @@ func findHashes(body io.Reader, version string) (map[string]string, error) {
}
if currentRow != "" && token.Data == "tt" {
//the next token should be the page title
// the next token should be the page title
tokenType = htmlTokens.Next()
//just make sure it's actually a text token
// just make sure it's actually a text token
if tokenType == html.TextToken {
hashes[currentRow] = htmlTokens.Token().Data
currentRow = ""