chore(tools): Add metadata check to README linter (#16587)

This commit is contained in:
Sven Rebhan 2025-03-28 20:55:27 +01:00 committed by GitHub
parent b5fe07de9a
commit 04de888cdd
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
3 changed files with 261 additions and 7 deletions

View File

@ -15,6 +15,7 @@ type T struct {
markdown []byte markdown []byte
newlineOffsets []int newlineOffsets []int
sourceFlag bool sourceFlag bool
pluginType plugin
fails int fails int
} }
@ -36,6 +37,13 @@ func (t *T) assertNodef(n ast.Node, format string, args ...interface{}) {
t.printFailedAssertf(n, format, args...) t.printFailedAssertf(n, format, args...)
} }
func (t *T) assertNodeLineOffsetf(n ast.Node, offset int, format string, args ...interface{}) {
t.printFileOffset(n, offset)
fmt.Printf(format+"\n", args...)
t.printRule(3)
t.fails++
}
func (t *T) assertLinef(line int, format string, args ...interface{}) { func (t *T) assertLinef(line int, format string, args ...interface{}) {
// this func only exists to make the call stack to t.printRule the same depth // this func only exists to make the call stack to t.printRule the same depth
// as when called through assertf // as when called through assertf
@ -79,14 +87,17 @@ func (t *T) line(offset int) int {
} }
func (t *T) printFile(n ast.Node) { func (t *T) printFile(n ast.Node) {
t.printFileOffset(n, 0)
}
func (t *T) printFileOffset(n ast.Node, offset int) {
lines := n.Lines() lines := n.Lines()
if lines == nil || lines.Len() == 0 { if lines == nil || lines.Len() == 0 {
t.printFileLine(0) t.printFileLine(0)
return return
} }
offset := lines.At(0).Start line := t.line(lines.At(0).Start)
line := t.line(offset) t.printFileLine(line + offset)
t.printFileLine(line)
} }
func (t *T) printFileLine(line int) { func (t *T) printFileLine(line int) {

View File

@ -16,6 +16,7 @@ import (
func main() { func main() {
sourceFlag := flag.Bool("source", false, "include location of linter code that failed assertion") sourceFlag := flag.Bool("source", false, "include location of linter code that failed assertion")
quiet := flag.Bool("quiet", false, "only print failed assertion but no pass information")
flag.Parse() flag.Parse()
@ -23,7 +24,7 @@ func main() {
pass := true pass := true
for _, filename := range flag.Args() { for _, filename := range flag.Args() {
var filePass bool var filePass bool
filePass, err = checkFile(filename, guessPluginType(filename), *sourceFlag) filePass, err = checkFile(filename, guessPluginType(filename), *sourceFlag, *quiet)
if err != nil { if err != nil {
panic(err) panic(err)
} }
@ -46,9 +47,10 @@ func init() {
// Rules for all plugin types // Rules for all plugin types
all := []ruleFunc{ all := []ruleFunc{
firstSection, firstSection,
noLongLinesInParagraphs(80), metadata,
configSection, configSection,
relativeTelegrafLinks, relativeTelegrafLinks,
noLongLinesInParagraphs(80),
} }
for i := pluginInput; i <= pluginParser; i++ { for i := pluginInput; i <= pluginParser; i++ {
rules[i] = all rules[i] = all
@ -85,7 +87,7 @@ func init() {
}...) }...)
} }
func checkFile(filename string, pluginType plugin, sourceFlag bool) (bool, error) { func checkFile(filename string, pluginType plugin, sourceFlag, quiet bool) (bool, error) {
md, err := os.ReadFile(filename) md, err := os.ReadFile(filename)
if err != nil { if err != nil {
return false, err return false, err
@ -131,6 +133,7 @@ func checkFile(filename string, pluginType plugin, sourceFlag bool) (bool, error
markdown: md, markdown: md,
newlineOffsets: newlineOffsets, newlineOffsets: newlineOffsets,
sourceFlag: sourceFlag, sourceFlag: sourceFlag,
pluginType: pluginType,
} }
for _, rule := range rules { for _, rule := range rules {
err = rule(&tester, root) err = rule(&tester, root)
@ -138,7 +141,9 @@ func checkFile(filename string, pluginType plugin, sourceFlag bool) (bool, error
return false, err return false, err
} }
} }
tester.printPassFail() if !quiet {
tester.printPassFail()
}
return tester.pass(), nil return tester.pass(), nil
} }

View File

@ -1,12 +1,86 @@
package main package main
import ( import (
"bufio"
"bytes" "bytes"
"regexp"
"slices"
"strings" "strings"
"github.com/yuin/goldmark/ast" "github.com/yuin/goldmark/ast"
) )
var (
// Setup regular expression for checking versions and valid choices
// Matches HTML comments (e.g., <!-- some comment -->) surrounded by optional whitespace
metaComment = regexp.MustCompile(`(?:\s*<!-- .* -->\s*)`)
// Matches Telegraf versioning format (e.g., "Telegraf v1.2.3")
metaVersion = regexp.MustCompile(`^Telegraf v\d+\.\d+\.\d+(?:\s+<!-- .* -->\s*)?$`)
metaTags = map[plugin][]string{
pluginInput: {
"applications",
"cloud",
"containers",
"datastore",
"hardware",
"iot",
"logging",
"messaging",
"network",
"server",
"system",
"testing",
"web",
},
pluginOutput: {
"applications",
"cloud",
"containers",
"datastore",
"hardware",
"iot",
"logging",
"messaging",
"network",
"server",
"system",
"testing",
"web",
},
pluginAggregator: {
"math",
"sampling",
"statistics",
"transformation",
},
pluginProcessor: {
"math",
"sampling",
"statistics",
"transformation",
},
}
metaOSes = []string{
"all",
"freebsd",
"linux",
"macos",
"solaris",
"windows",
}
metaOrder = []string{
"introduction version",
"deprecation version",
"removal version",
"tags",
"operating systems",
}
)
// The first section is a heading with plugin name and paragraph short // The first section is a heading with plugin name and paragraph short
// description // description
func firstSection(t *T, root ast.Node) error { func firstSection(t *T, root ast.Node) error {
@ -212,6 +286,170 @@ func relativeTelegrafLinks(t *T, root ast.Node) error {
return nil return nil
} }
// Each plugin should have metadata for documentation generation
func metadata(t *T, root ast.Node) error {
const icons string = "⭐🚩🔥🏷️💻"
n := root.FirstChild()
if n == nil {
t.assertf("no metadata section found")
return nil
}
// Advance to the first heading which should be the plugin header
for n != nil {
if _, ok := n.(*ast.Heading); ok {
t.assertHeadingLevel(1, n)
break
}
n = n.NextSibling()
}
// Get the description text and check for metadata
positions := make([]string, 0, 5)
for n != nil {
n = n.NextSibling()
// The next heading will end the initial section
if _, ok := n.(*ast.Heading); ok {
break
}
// Ignore everything that is not text
para, ok := n.(*ast.Paragraph)
if !ok {
continue
}
// Metadata should be separate paragraph with the items ordered.
var inMetadata bool
var counter int
scanner := bufio.NewScanner(bytes.NewBuffer(para.Lines().Value(t.markdown)))
for scanner.Scan() {
txt := scanner.Text()
if counter == 0 {
inMetadata = strings.ContainsAny(txt, icons)
}
counter++
// If we are not in a metadata section, we need to make sure we don't
// see any metadata in this text.
if !inMetadata {
if strings.ContainsAny(txt, icons) {
t.assertNodeLineOffsetf(n, counter-1, "metadata found in section not surrounded by empty lines")
return nil
}
continue
}
icon, remainder, found := strings.Cut(txt, " ")
if !found || !strings.Contains(icons, icon) {
t.assertNodeLineOffsetf(n, counter-1, "metadata line must start with a valid icon and a space")
continue
}
if strings.ContainsAny(remainder, icons) {
t.assertNodeLineOffsetf(n, counter-1, "each metadata entry must be on a separate line")
continue
}
// We are in a metadata section, so test for the correct structure
switch icon {
case "⭐":
if !metaVersion.MatchString(remainder) {
t.assertNodeLineOffsetf(n, counter-1, "invalid introduction version format; has to be 'Telegraf vX.Y.Z'")
}
positions = append(positions, "introduction version")
case "🚩":
if !metaVersion.MatchString(remainder) {
t.assertNodeLineOffsetf(n, counter-1, "invalid deprecation version format; has to be 'Telegraf vX.Y.Z'")
}
positions = append(positions, "deprecation version")
case "🔥":
if !metaVersion.MatchString(remainder) {
t.assertNodeLineOffsetf(n, counter-1, "invalid removal version format; has to be 'Telegraf vX.Y.Z'")
}
positions = append(positions, "removal version")
case "🏷️":
validTags, found := metaTags[t.pluginType]
if !found {
t.assertNodeLineOffsetf(n, counter-1, "no tags expected for plugin type")
continue
}
for _, tag := range strings.Split(remainder, ",") {
tag = metaComment.ReplaceAllString(tag, "")
if !slices.Contains(validTags, strings.TrimSpace(tag)) {
t.assertNodeLineOffsetf(n, counter-1, "unknown tag %q", tag)
}
}
positions = append(positions, "tags")
case "💻":
for _, os := range strings.Split(remainder, ",") {
os = metaComment.ReplaceAllString(os, "")
if !slices.Contains(metaOSes, strings.TrimSpace(os)) {
t.assertNodeLineOffsetf(n, counter-1, "unknown operating system %q", os)
}
}
positions = append(positions, "operating systems")
default:
t.assertNodeLineOffsetf(n, counter-1, "invalid metadata icon")
continue
}
}
}
if len(positions) == 0 {
t.assertf("metadata is missing")
return nil
}
// Check for duplicate entries
seen := make(map[string]bool)
for _, p := range positions {
if seen[p] {
t.assertNodef(n, "duplicate metadata entry for %q", p)
return nil
}
seen[p] = true
}
// Remove the optional entries from the checklist
validOrder := append(make([]string, 0, len(metaOrder)), metaOrder...)
if !slices.Contains(positions, "deprecation version") && !slices.Contains(positions, "removal version") {
idx := slices.Index(validOrder, "deprecation version")
validOrder = slices.Delete(validOrder, idx, idx+1)
idx = slices.Index(validOrder, "removal version")
validOrder = slices.Delete(validOrder, idx, idx+1)
}
if _, found := metaTags[t.pluginType]; !found {
idx := slices.Index(metaOrder, "tags")
metaOrder = slices.Delete(metaOrder, idx, idx+1)
}
// Check the order of the metadata entries and required entries
if len(validOrder) != len(positions) {
for _, v := range validOrder {
if !slices.Contains(positions, v) {
t.assertNodef(n, "metadata entry for %q is missing", v)
}
}
return nil
}
for i, v := range validOrder {
if v != positions[i] {
if i == 0 {
t.assertNodef(n, "%q has to be the first entry", v)
} else {
t.assertNodef(n, "%q has to follow %q", v, validOrder[i-1])
}
return nil
}
}
return nil
}
// To do: Check markdown files that aren't plugin readme files for paragraphs // To do: Check markdown files that aren't plugin readme files for paragraphs
// with long lines // with long lines