feat(inputs.cloudwatch): Add support for cross account oberservability (#12448)
This commit is contained in:
parent
7ced2606b2
commit
2010926e25
|
|
@ -50,7 +50,12 @@ See the [CONFIGURATION.md][CONFIGURATION.md] for more details.
|
||||||
# role_session_name = ""
|
# role_session_name = ""
|
||||||
# profile = ""
|
# profile = ""
|
||||||
# shared_credential_file = ""
|
# shared_credential_file = ""
|
||||||
|
|
||||||
|
## If you are using CloudWatch cross-account observability, you can
|
||||||
|
## set IncludeLinkedAccounts to true in a monitoring account
|
||||||
|
## and collect metrics from the linked source accounts
|
||||||
|
# include_linked_accounts = false
|
||||||
|
|
||||||
## Endpoint to make request against, the correct endpoint is automatically
|
## Endpoint to make request against, the correct endpoint is automatically
|
||||||
## determined and this option should only be set if you wish to override the
|
## determined and this option should only be set if you wish to override the
|
||||||
## default.
|
## default.
|
||||||
|
|
@ -226,6 +231,8 @@ case](https://en.wikipedia.org/wiki/Snake_case)
|
||||||
- All measurements have the following tags:
|
- All measurements have the following tags:
|
||||||
- region (CloudWatch Region)
|
- region (CloudWatch Region)
|
||||||
- {dimension-name} (Cloudwatch Dimension value - one per metric dimension)
|
- {dimension-name} (Cloudwatch Dimension value - one per metric dimension)
|
||||||
|
- If `include_linked_accounts` is set to true then below tag is also provided:
|
||||||
|
- account (The ID of the account where the metrics are located.)
|
||||||
|
|
||||||
## Troubleshooting
|
## Troubleshooting
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -30,14 +30,6 @@ import (
|
||||||
//go:embed sample.conf
|
//go:embed sample.conf
|
||||||
var sampleConfig string
|
var sampleConfig string
|
||||||
|
|
||||||
const (
|
|
||||||
StatisticAverage = "Average"
|
|
||||||
StatisticMaximum = "Maximum"
|
|
||||||
StatisticMinimum = "Minimum"
|
|
||||||
StatisticSum = "Sum"
|
|
||||||
StatisticSampleCount = "SampleCount"
|
|
||||||
)
|
|
||||||
|
|
||||||
// CloudWatch contains the configuration and cache for the cloudwatch plugin.
|
// CloudWatch contains the configuration and cache for the cloudwatch plugin.
|
||||||
type CloudWatch struct {
|
type CloudWatch struct {
|
||||||
StatisticExclude []string `toml:"statistic_exclude"`
|
StatisticExclude []string `toml:"statistic_exclude"`
|
||||||
|
|
@ -46,15 +38,16 @@ type CloudWatch struct {
|
||||||
|
|
||||||
internalProxy.HTTPProxy
|
internalProxy.HTTPProxy
|
||||||
|
|
||||||
Period config.Duration `toml:"period"`
|
Period config.Duration `toml:"period"`
|
||||||
Delay config.Duration `toml:"delay"`
|
Delay config.Duration `toml:"delay"`
|
||||||
Namespace string `toml:"namespace" deprecated:"1.25.0;use 'namespaces' instead"`
|
Namespace string `toml:"namespace" deprecated:"1.25.0;use 'namespaces' instead"`
|
||||||
Namespaces []string `toml:"namespaces"`
|
Namespaces []string `toml:"namespaces"`
|
||||||
Metrics []*Metric `toml:"metrics"`
|
Metrics []*Metric `toml:"metrics"`
|
||||||
CacheTTL config.Duration `toml:"cache_ttl"`
|
CacheTTL config.Duration `toml:"cache_ttl"`
|
||||||
RateLimit int `toml:"ratelimit"`
|
RateLimit int `toml:"ratelimit"`
|
||||||
RecentlyActive string `toml:"recently_active"`
|
RecentlyActive string `toml:"recently_active"`
|
||||||
BatchSize int `toml:"batch_size"`
|
BatchSize int `toml:"batch_size"`
|
||||||
|
IncludeLinkedAccounts bool `toml:"include_linked_accounts"`
|
||||||
|
|
||||||
Log telegraf.Logger `toml:"-"`
|
Log telegraf.Logger `toml:"-"`
|
||||||
|
|
||||||
|
|
@ -126,7 +119,6 @@ func (c *CloudWatch) Gather(acc telegraf.Accumulator) error {
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
c.updateWindow(time.Now())
|
c.updateWindow(time.Now())
|
||||||
|
|
||||||
// Get all of the possible queries so we can send groups of 100.
|
// Get all of the possible queries so we can send groups of 100.
|
||||||
|
|
@ -172,7 +164,6 @@ func (c *CloudWatch) Gather(acc telegraf.Accumulator) error {
|
||||||
}
|
}
|
||||||
|
|
||||||
wg.Wait()
|
wg.Wait()
|
||||||
|
|
||||||
return c.aggregateMetrics(acc, results)
|
return c.aggregateMetrics(acc, results)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -233,6 +224,7 @@ func (c *CloudWatch) initializeCloudWatch() error {
|
||||||
|
|
||||||
type filteredMetric struct {
|
type filteredMetric struct {
|
||||||
metrics []types.Metric
|
metrics []types.Metric
|
||||||
|
accounts []string
|
||||||
statFilter filter.Filter
|
statFilter filter.Filter
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -248,6 +240,7 @@ func getFilteredMetrics(c *CloudWatch) ([]filteredMetric, error) {
|
||||||
if c.Metrics != nil {
|
if c.Metrics != nil {
|
||||||
for _, m := range c.Metrics {
|
for _, m := range c.Metrics {
|
||||||
metrics := []types.Metric{}
|
metrics := []types.Metric{}
|
||||||
|
var accounts []string
|
||||||
if !hasWildcard(m.Dimensions) {
|
if !hasWildcard(m.Dimensions) {
|
||||||
dimensions := make([]types.Dimension, 0, len(m.Dimensions))
|
dimensions := make([]types.Dimension, 0, len(m.Dimensions))
|
||||||
for _, d := range m.Dimensions {
|
for _, d := range m.Dimensions {
|
||||||
|
|
@ -266,9 +259,10 @@ func getFilteredMetrics(c *CloudWatch) ([]filteredMetric, error) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
allMetrics := c.fetchNamespaceMetrics()
|
allMetrics, allAccounts := c.fetchNamespaceMetrics()
|
||||||
|
|
||||||
for _, name := range m.MetricNames {
|
for _, name := range m.MetricNames {
|
||||||
for _, metric := range allMetrics {
|
for i, metric := range allMetrics {
|
||||||
if isSelected(name, metric, m.Dimensions) {
|
if isSelected(name, metric, m.Dimensions) {
|
||||||
for _, namespace := range c.Namespaces {
|
for _, namespace := range c.Namespaces {
|
||||||
metrics = append(metrics, types.Metric{
|
metrics = append(metrics, types.Metric{
|
||||||
|
|
@ -277,6 +271,9 @@ func getFilteredMetrics(c *CloudWatch) ([]filteredMetric, error) {
|
||||||
Dimensions: metric.Dimensions,
|
Dimensions: metric.Dimensions,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
if c.IncludeLinkedAccounts {
|
||||||
|
accounts = append(accounts, allAccounts[i])
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
@ -292,39 +289,39 @@ func getFilteredMetrics(c *CloudWatch) ([]filteredMetric, error) {
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
fMetrics = append(fMetrics, filteredMetric{
|
fMetrics = append(fMetrics, filteredMetric{
|
||||||
metrics: metrics,
|
metrics: metrics,
|
||||||
statFilter: statFilter,
|
statFilter: statFilter,
|
||||||
|
accounts: accounts,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
metrics := c.fetchNamespaceMetrics()
|
metrics, accounts := c.fetchNamespaceMetrics()
|
||||||
fMetrics = []filteredMetric{
|
fMetrics = []filteredMetric{
|
||||||
{
|
{
|
||||||
metrics: metrics,
|
metrics: metrics,
|
||||||
statFilter: c.statFilter,
|
statFilter: c.statFilter,
|
||||||
|
accounts: accounts,
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
c.metricCache = &metricCache{
|
c.metricCache = &metricCache{
|
||||||
metrics: fMetrics,
|
metrics: fMetrics,
|
||||||
built: time.Now(),
|
built: time.Now(),
|
||||||
ttl: time.Duration(c.CacheTTL),
|
ttl: time.Duration(c.CacheTTL),
|
||||||
}
|
}
|
||||||
|
|
||||||
return fMetrics, nil
|
return fMetrics, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// fetchNamespaceMetrics retrieves available metrics for a given CloudWatch namespace.
|
// fetchNamespaceMetrics retrieves available metrics for a given CloudWatch namespace.
|
||||||
func (c *CloudWatch) fetchNamespaceMetrics() []types.Metric {
|
func (c *CloudWatch) fetchNamespaceMetrics() ([]types.Metric, []string) {
|
||||||
metrics := []types.Metric{}
|
metrics := []types.Metric{}
|
||||||
|
var accounts []string
|
||||||
for _, namespace := range c.Namespaces {
|
for _, namespace := range c.Namespaces {
|
||||||
params := &cwClient.ListMetricsInput{
|
params := &cwClient.ListMetricsInput{
|
||||||
Dimensions: []types.DimensionFilter{},
|
Dimensions: []types.DimensionFilter{},
|
||||||
Namespace: aws.String(namespace),
|
Namespace: aws.String(namespace),
|
||||||
|
IncludeLinkedAccounts: c.IncludeLinkedAccounts,
|
||||||
}
|
}
|
||||||
if c.RecentlyActive == "PT3H" {
|
if c.RecentlyActive == "PT3H" {
|
||||||
params.RecentlyActive = types.RecentlyActivePt3h
|
params.RecentlyActive = types.RecentlyActivePt3h
|
||||||
|
|
@ -338,6 +335,7 @@ func (c *CloudWatch) fetchNamespaceMetrics() []types.Metric {
|
||||||
break
|
break
|
||||||
}
|
}
|
||||||
metrics = append(metrics, resp.Metrics...)
|
metrics = append(metrics, resp.Metrics...)
|
||||||
|
accounts = append(accounts, resp.OwningAccounts...)
|
||||||
|
|
||||||
if resp.NextToken == nil {
|
if resp.NextToken == nil {
|
||||||
break
|
break
|
||||||
|
|
@ -345,7 +343,7 @@ func (c *CloudWatch) fetchNamespaceMetrics() []types.Metric {
|
||||||
params.NextToken = resp.NextToken
|
params.NextToken = resp.NextToken
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return metrics
|
return metrics, accounts
|
||||||
}
|
}
|
||||||
|
|
||||||
func (c *CloudWatch) updateWindow(relativeTo time.Time) {
|
func (c *CloudWatch) updateWindow(relativeTo time.Time) {
|
||||||
|
|
@ -375,63 +373,34 @@ func (c *CloudWatch) getDataQueries(filteredMetrics []filteredMetric) map[string
|
||||||
for j, metric := range filtered.metrics {
|
for j, metric := range filtered.metrics {
|
||||||
id := strconv.Itoa(j) + "_" + strconv.Itoa(i)
|
id := strconv.Itoa(j) + "_" + strconv.Itoa(i)
|
||||||
dimension := ctod(metric.Dimensions)
|
dimension := ctod(metric.Dimensions)
|
||||||
if filtered.statFilter.Match("average") {
|
var accountID *string
|
||||||
c.queryDimensions["average_"+id] = dimension
|
if c.IncludeLinkedAccounts {
|
||||||
dataQueries[*metric.Namespace] = append(dataQueries[*metric.Namespace], types.MetricDataQuery{
|
accountID = aws.String(filtered.accounts[j])
|
||||||
Id: aws.String("average_" + id),
|
(*dimension)["account"] = filtered.accounts[j]
|
||||||
Label: aws.String(snakeCase(*metric.MetricName + "_average")),
|
|
||||||
MetricStat: &types.MetricStat{
|
|
||||||
Metric: &filtered.metrics[j],
|
|
||||||
Period: aws.Int32(int32(time.Duration(c.Period).Seconds())),
|
|
||||||
Stat: aws.String(StatisticAverage),
|
|
||||||
},
|
|
||||||
})
|
|
||||||
}
|
}
|
||||||
if filtered.statFilter.Match("maximum") {
|
|
||||||
c.queryDimensions["maximum_"+id] = dimension
|
statisticTypes := map[string]string{
|
||||||
dataQueries[*metric.Namespace] = append(dataQueries[*metric.Namespace], types.MetricDataQuery{
|
"average": "Average",
|
||||||
Id: aws.String("maximum_" + id),
|
"maximum": "Maximum",
|
||||||
Label: aws.String(snakeCase(*metric.MetricName + "_maximum")),
|
"minimum": "Minimum",
|
||||||
MetricStat: &types.MetricStat{
|
"sum": "Sum",
|
||||||
Metric: &filtered.metrics[j],
|
"sample_count": "SampleCount",
|
||||||
Period: aws.Int32(int32(time.Duration(c.Period).Seconds())),
|
|
||||||
Stat: aws.String(StatisticMaximum),
|
|
||||||
},
|
|
||||||
})
|
|
||||||
}
|
}
|
||||||
if filtered.statFilter.Match("minimum") {
|
|
||||||
c.queryDimensions["minimum_"+id] = dimension
|
for statisticType, statistic := range statisticTypes {
|
||||||
|
if !filtered.statFilter.Match(statisticType) {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
queryID := statisticType + "_" + id
|
||||||
|
c.queryDimensions[queryID] = dimension
|
||||||
dataQueries[*metric.Namespace] = append(dataQueries[*metric.Namespace], types.MetricDataQuery{
|
dataQueries[*metric.Namespace] = append(dataQueries[*metric.Namespace], types.MetricDataQuery{
|
||||||
Id: aws.String("minimum_" + id),
|
Id: aws.String(queryID),
|
||||||
Label: aws.String(snakeCase(*metric.MetricName + "_minimum")),
|
AccountId: accountID,
|
||||||
|
Label: aws.String(snakeCase(*metric.MetricName + "_" + statisticType)),
|
||||||
MetricStat: &types.MetricStat{
|
MetricStat: &types.MetricStat{
|
||||||
Metric: &filtered.metrics[j],
|
Metric: &filtered.metrics[j],
|
||||||
Period: aws.Int32(int32(time.Duration(c.Period).Seconds())),
|
Period: aws.Int32(int32(time.Duration(c.Period).Seconds())),
|
||||||
Stat: aws.String(StatisticMinimum),
|
Stat: aws.String(statistic),
|
||||||
},
|
|
||||||
})
|
|
||||||
}
|
|
||||||
if filtered.statFilter.Match("sum") {
|
|
||||||
c.queryDimensions["sum_"+id] = dimension
|
|
||||||
dataQueries[*metric.Namespace] = append(dataQueries[*metric.Namespace], types.MetricDataQuery{
|
|
||||||
Id: aws.String("sum_" + id),
|
|
||||||
Label: aws.String(snakeCase(*metric.MetricName + "_sum")),
|
|
||||||
MetricStat: &types.MetricStat{
|
|
||||||
Metric: &filtered.metrics[j],
|
|
||||||
Period: aws.Int32(int32(time.Duration(c.Period).Seconds())),
|
|
||||||
Stat: aws.String(StatisticSum),
|
|
||||||
},
|
|
||||||
})
|
|
||||||
}
|
|
||||||
if filtered.statFilter.Match("sample_count") {
|
|
||||||
c.queryDimensions["sample_count_"+id] = dimension
|
|
||||||
dataQueries[*metric.Namespace] = append(dataQueries[*metric.Namespace], types.MetricDataQuery{
|
|
||||||
Id: aws.String("sample_count_" + id),
|
|
||||||
Label: aws.String(snakeCase(*metric.MetricName + "_sample_count")),
|
|
||||||
MetricStat: &types.MetricStat{
|
|
||||||
Metric: &filtered.metrics[j],
|
|
||||||
Period: aws.Int32(int32(time.Duration(c.Period).Seconds())),
|
|
||||||
Stat: aws.String(StatisticSampleCount),
|
|
||||||
},
|
},
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -26,7 +26,7 @@ func (m *mockGatherCloudWatchClient) ListMetrics(
|
||||||
params *cwClient.ListMetricsInput,
|
params *cwClient.ListMetricsInput,
|
||||||
_ ...func(*cwClient.Options),
|
_ ...func(*cwClient.Options),
|
||||||
) (*cwClient.ListMetricsOutput, error) {
|
) (*cwClient.ListMetricsOutput, error) {
|
||||||
return &cwClient.ListMetricsOutput{
|
response := &cwClient.ListMetricsOutput{
|
||||||
Metrics: []types.Metric{
|
Metrics: []types.Metric{
|
||||||
{
|
{
|
||||||
Namespace: params.Namespace,
|
Namespace: params.Namespace,
|
||||||
|
|
@ -34,12 +34,26 @@ func (m *mockGatherCloudWatchClient) ListMetrics(
|
||||||
Dimensions: []types.Dimension{
|
Dimensions: []types.Dimension{
|
||||||
{
|
{
|
||||||
Name: aws.String("LoadBalancerName"),
|
Name: aws.String("LoadBalancerName"),
|
||||||
Value: aws.String("p-example"),
|
Value: aws.String("p-example1"),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Namespace: params.Namespace,
|
||||||
|
MetricName: aws.String("Latency"),
|
||||||
|
Dimensions: []types.Dimension{
|
||||||
|
{
|
||||||
|
Name: aws.String("LoadBalancerName"),
|
||||||
|
Value: aws.String("p-example2"),
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
}, nil
|
}
|
||||||
|
if params.IncludeLinkedAccounts {
|
||||||
|
(*response).OwningAccounts = []string{"123456789012", "923456789017"}
|
||||||
|
}
|
||||||
|
return response, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (m *mockGatherCloudWatchClient) GetMetricData(
|
func (m *mockGatherCloudWatchClient) GetMetricData(
|
||||||
|
|
@ -94,6 +108,51 @@ func (m *mockGatherCloudWatchClient) GetMetricData(
|
||||||
},
|
},
|
||||||
Values: []float64{100},
|
Values: []float64{100},
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
Id: aws.String("minimum_1_0"),
|
||||||
|
Label: aws.String("latency_minimum"),
|
||||||
|
StatusCode: types.StatusCodeComplete,
|
||||||
|
Timestamps: []time.Time{
|
||||||
|
*params.EndTime,
|
||||||
|
},
|
||||||
|
Values: []float64{0.1},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Id: aws.String("maximum_1_0"),
|
||||||
|
Label: aws.String("latency_maximum"),
|
||||||
|
StatusCode: types.StatusCodeComplete,
|
||||||
|
Timestamps: []time.Time{
|
||||||
|
*params.EndTime,
|
||||||
|
},
|
||||||
|
Values: []float64{0.3},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Id: aws.String("average_1_0"),
|
||||||
|
Label: aws.String("latency_average"),
|
||||||
|
StatusCode: types.StatusCodeComplete,
|
||||||
|
Timestamps: []time.Time{
|
||||||
|
*params.EndTime,
|
||||||
|
},
|
||||||
|
Values: []float64{0.2},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Id: aws.String("sum_1_0"),
|
||||||
|
Label: aws.String("latency_sum"),
|
||||||
|
StatusCode: types.StatusCodeComplete,
|
||||||
|
Timestamps: []time.Time{
|
||||||
|
*params.EndTime,
|
||||||
|
},
|
||||||
|
Values: []float64{124},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Id: aws.String("sample_count_1_0"),
|
||||||
|
Label: aws.String("latency_sample_count"),
|
||||||
|
StatusCode: types.StatusCodeComplete,
|
||||||
|
Timestamps: []time.Time{
|
||||||
|
*params.EndTime,
|
||||||
|
},
|
||||||
|
Values: []float64{100},
|
||||||
|
},
|
||||||
},
|
},
|
||||||
}, nil
|
}, nil
|
||||||
}
|
}
|
||||||
|
|
@ -133,12 +192,55 @@ func TestGather(t *testing.T) {
|
||||||
|
|
||||||
tags := map[string]string{}
|
tags := map[string]string{}
|
||||||
tags["region"] = "us-east-1"
|
tags["region"] = "us-east-1"
|
||||||
tags["load_balancer_name"] = "p-example"
|
tags["load_balancer_name"] = "p-example1"
|
||||||
|
|
||||||
require.True(t, acc.HasMeasurement("cloudwatch_aws_elb"))
|
require.True(t, acc.HasMeasurement("cloudwatch_aws_elb"))
|
||||||
acc.AssertContainsTaggedFields(t, "cloudwatch_aws_elb", fields, tags)
|
acc.AssertContainsTaggedFields(t, "cloudwatch_aws_elb", fields, tags)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func TestMultiAccountGather(t *testing.T) {
|
||||||
|
duration, _ := time.ParseDuration("1m")
|
||||||
|
internalDuration := config.Duration(duration)
|
||||||
|
c := &CloudWatch{
|
||||||
|
CredentialConfig: internalaws.CredentialConfig{
|
||||||
|
Region: "us-east-1",
|
||||||
|
},
|
||||||
|
Namespace: "AWS/ELB",
|
||||||
|
Delay: internalDuration,
|
||||||
|
Period: internalDuration,
|
||||||
|
RateLimit: 200,
|
||||||
|
BatchSize: 500,
|
||||||
|
Log: testutil.Logger{},
|
||||||
|
IncludeLinkedAccounts: true,
|
||||||
|
}
|
||||||
|
|
||||||
|
var acc testutil.Accumulator
|
||||||
|
|
||||||
|
require.NoError(t, c.Init())
|
||||||
|
c.client = &mockGatherCloudWatchClient{}
|
||||||
|
require.NoError(t, acc.GatherError(c.Gather))
|
||||||
|
|
||||||
|
fields := map[string]interface{}{}
|
||||||
|
fields["latency_minimum"] = 0.1
|
||||||
|
fields["latency_maximum"] = 0.3
|
||||||
|
fields["latency_average"] = 0.2
|
||||||
|
fields["latency_sum"] = 123.0
|
||||||
|
fields["latency_sample_count"] = 100.0
|
||||||
|
|
||||||
|
tags := map[string]string{}
|
||||||
|
tags["region"] = "us-east-1"
|
||||||
|
tags["load_balancer_name"] = "p-example1"
|
||||||
|
tags["account"] = "123456789012"
|
||||||
|
|
||||||
|
require.True(t, acc.HasMeasurement("cloudwatch_aws_elb"))
|
||||||
|
acc.AssertContainsTaggedFields(t, "cloudwatch_aws_elb", fields, tags)
|
||||||
|
|
||||||
|
tags["load_balancer_name"] = "p-example2"
|
||||||
|
tags["account"] = "923456789017"
|
||||||
|
fields["latency_sum"] = 124.0
|
||||||
|
acc.AssertContainsTaggedFields(t, "cloudwatch_aws_elb", fields, tags)
|
||||||
|
}
|
||||||
|
|
||||||
func TestGather_MultipleNamespaces(t *testing.T) {
|
func TestGather_MultipleNamespaces(t *testing.T) {
|
||||||
duration, _ := time.ParseDuration("1m")
|
duration, _ := time.ParseDuration("1m")
|
||||||
internalDuration := config.Duration(duration)
|
internalDuration := config.Duration(duration)
|
||||||
|
|
|
||||||
|
|
@ -21,7 +21,12 @@
|
||||||
# role_session_name = ""
|
# role_session_name = ""
|
||||||
# profile = ""
|
# profile = ""
|
||||||
# shared_credential_file = ""
|
# shared_credential_file = ""
|
||||||
|
|
||||||
|
## If you are using CloudWatch cross-account observability, you can
|
||||||
|
## set IncludeLinkedAccounts to true in a monitoring account
|
||||||
|
## and collect metrics from the linked source accounts
|
||||||
|
# include_linked_accounts = false
|
||||||
|
|
||||||
## Endpoint to make request against, the correct endpoint is automatically
|
## Endpoint to make request against, the correct endpoint is automatically
|
||||||
## determined and this option should only be set if you wish to override the
|
## determined and this option should only be set if you wish to override the
|
||||||
## default.
|
## default.
|
||||||
|
|
|
||||||
Loading…
Reference in New Issue