fix(inputs.x509_cert): Fix Windows path handling (#12629)
This commit is contained in:
parent
f206c27d89
commit
ff89b7778b
|
|
@ -16,6 +16,7 @@ import (
|
||||||
"net/url"
|
"net/url"
|
||||||
"os"
|
"os"
|
||||||
"path/filepath"
|
"path/filepath"
|
||||||
|
"regexp"
|
||||||
"strings"
|
"strings"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
|
|
@ -32,6 +33,9 @@ import (
|
||||||
//go:embed sample.conf
|
//go:embed sample.conf
|
||||||
var sampleConfig string
|
var sampleConfig string
|
||||||
|
|
||||||
|
// Regexp for handling file URIs containing a drive letter and leading slash
|
||||||
|
var reDriveLetter = regexp.MustCompile(`^/([a-zA-Z]:/)`)
|
||||||
|
|
||||||
// X509Cert holds the configuration of the plugin.
|
// X509Cert holds the configuration of the plugin.
|
||||||
type X509Cert struct {
|
type X509Cert struct {
|
||||||
Sources []string `toml:"sources"`
|
Sources []string `toml:"sources"`
|
||||||
|
|
@ -86,12 +90,9 @@ func (c *X509Cert) Init() error {
|
||||||
// Gather adds metrics into the accumulator.
|
// Gather adds metrics into the accumulator.
|
||||||
func (c *X509Cert) Gather(acc telegraf.Accumulator) error {
|
func (c *X509Cert) Gather(acc telegraf.Accumulator) error {
|
||||||
now := time.Now()
|
now := time.Now()
|
||||||
collectedUrls, err := c.collectCertURLs()
|
|
||||||
if err != nil {
|
|
||||||
acc.AddError(fmt.Errorf("getting some certificates failed: %w", err))
|
|
||||||
}
|
|
||||||
|
|
||||||
for _, location := range append(c.locations, collectedUrls...) {
|
collectedUrls := append(c.locations, c.collectCertURLs()...)
|
||||||
|
for _, location := range collectedUrls {
|
||||||
certs, err := c.getCert(location, time.Duration(c.Timeout))
|
certs, err := c.getCert(location, time.Duration(c.Timeout))
|
||||||
if err != nil {
|
if err != nil {
|
||||||
acc.AddError(fmt.Errorf("cannot get SSL cert '%s': %s", location, err.Error()))
|
acc.AddError(fmt.Errorf("cannot get SSL cert '%s': %s", location, err.Error()))
|
||||||
|
|
@ -153,9 +154,11 @@ func (c *X509Cert) Gather(acc telegraf.Accumulator) error {
|
||||||
|
|
||||||
func (c *X509Cert) sourcesToURLs() error {
|
func (c *X509Cert) sourcesToURLs() error {
|
||||||
for _, source := range c.Sources {
|
for _, source := range c.Sources {
|
||||||
if strings.HasPrefix(source, "file://") ||
|
if strings.HasPrefix(source, "file://") || strings.HasPrefix(source, "/") {
|
||||||
strings.HasPrefix(source, "/") {
|
|
||||||
source = filepath.ToSlash(strings.TrimPrefix(source, "file://"))
|
source = filepath.ToSlash(strings.TrimPrefix(source, "file://"))
|
||||||
|
// Removing leading slash in Windows path containing a drive-letter
|
||||||
|
// like "file:///C:/Windows/..."
|
||||||
|
source = reDriveLetter.ReplaceAllString(source, "$1")
|
||||||
g, err := globpath.Compile(source)
|
g, err := globpath.Compile(source)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return fmt.Errorf("could not compile glob %v: %v", source, err)
|
return fmt.Errorf("could not compile glob %v: %v", source, err)
|
||||||
|
|
@ -381,26 +384,22 @@ func getTags(cert *x509.Certificate, location string) map[string]string {
|
||||||
return tags
|
return tags
|
||||||
}
|
}
|
||||||
|
|
||||||
func (c *X509Cert) collectCertURLs() ([]*url.URL, error) {
|
func (c *X509Cert) collectCertURLs() []*url.URL {
|
||||||
var urls []*url.URL
|
var urls []*url.URL
|
||||||
|
|
||||||
for _, path := range c.globpaths {
|
for _, path := range c.globpaths {
|
||||||
files := path.Match()
|
files := path.Match()
|
||||||
if len(files) <= 0 {
|
if len(files) <= 0 {
|
||||||
c.Log.Errorf("could not find file: %v", path)
|
c.Log.Errorf("could not find file: %v", path.GetRoots())
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
for _, file := range files {
|
for _, file := range files {
|
||||||
file = "file://" + file
|
fn := filepath.ToSlash(file)
|
||||||
u, err := url.Parse(file)
|
urls = append(urls, &url.URL{Scheme: "file", Path: fn})
|
||||||
if err != nil {
|
|
||||||
return urls, fmt.Errorf("failed to parse cert location - %s", err.Error())
|
|
||||||
}
|
|
||||||
urls = append(urls, u)
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return urls, nil
|
return urls
|
||||||
}
|
}
|
||||||
|
|
||||||
func init() {
|
func init() {
|
||||||
|
|
|
||||||
|
|
@ -388,14 +388,42 @@ func TestSourcesToURLs(t *testing.T) {
|
||||||
"tcp://influxdata.com:443",
|
"tcp://influxdata.com:443",
|
||||||
"smtp://influxdata.com:25",
|
"smtp://influxdata.com:25",
|
||||||
"file:///dummy_test_path_file.pem",
|
"file:///dummy_test_path_file.pem",
|
||||||
|
"file:///windows/temp/test.pem",
|
||||||
|
`file://C:\windows\temp\test.pem`,
|
||||||
|
`file:///C:/windows/temp/test.pem`,
|
||||||
"/tmp/dummy_test_path_glob*.pem",
|
"/tmp/dummy_test_path_glob*.pem",
|
||||||
},
|
},
|
||||||
Log: testutil.Logger{},
|
Log: testutil.Logger{},
|
||||||
}
|
}
|
||||||
require.NoError(t, m.Init())
|
require.NoError(t, m.Init())
|
||||||
|
|
||||||
require.Equal(t, len(m.globpaths), 2)
|
expected := []string{
|
||||||
|
"https://www.influxdata.com:443",
|
||||||
|
"tcp://influxdata.com:443",
|
||||||
|
"smtp://influxdata.com:25",
|
||||||
|
}
|
||||||
|
|
||||||
|
expectedPaths := []string{
|
||||||
|
"/dummy_test_path_file.pem",
|
||||||
|
"/windows/temp/test.pem",
|
||||||
|
"C:\\windows\\temp\\test.pem",
|
||||||
|
"C:/windows/temp/test.pem",
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, p := range expectedPaths {
|
||||||
|
expected = append(expected, filepath.FromSlash(p))
|
||||||
|
}
|
||||||
|
|
||||||
|
actual := make([]string, 0, len(m.globpaths)+len(m.locations))
|
||||||
|
for _, p := range m.globpaths {
|
||||||
|
actual = append(actual, p.GetRoots()...)
|
||||||
|
}
|
||||||
|
for _, p := range m.locations {
|
||||||
|
actual = append(actual, p.String())
|
||||||
|
}
|
||||||
|
require.Equal(t, len(m.globpaths), 5)
|
||||||
require.Equal(t, len(m.locations), 3)
|
require.Equal(t, len(m.locations), 3)
|
||||||
|
require.ElementsMatch(t, expected, actual)
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestServerName(t *testing.T) {
|
func TestServerName(t *testing.T) {
|
||||||
|
|
|
||||||
Loading…
Reference in New Issue