chore: Update to AWS SDK v2 (#9647)

This commit is contained in:
Alexander Krantz 2021-10-21 14:32:10 -07:00 committed by GitHub
parent a4d8a4b84f
commit 06f2a37b4a
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
17 changed files with 666 additions and 523 deletions

View File

@ -1,11 +1,12 @@
package aws package aws
import ( import (
"github.com/aws/aws-sdk-go/aws" "context"
"github.com/aws/aws-sdk-go/aws/client" awsV2 "github.com/aws/aws-sdk-go-v2/aws"
"github.com/aws/aws-sdk-go/aws/credentials" configV2 "github.com/aws/aws-sdk-go-v2/config"
"github.com/aws/aws-sdk-go/aws/credentials/stscreds" credentialsV2 "github.com/aws/aws-sdk-go-v2/credentials"
"github.com/aws/aws-sdk-go/aws/session" stscredsV2 "github.com/aws/aws-sdk-go-v2/credentials/stscreds"
"github.com/aws/aws-sdk-go-v2/service/sts"
) )
type CredentialConfig struct { type CredentialConfig struct {
@ -21,45 +22,66 @@ type CredentialConfig struct {
WebIdentityTokenFile string `toml:"web_identity_token_file"` WebIdentityTokenFile string `toml:"web_identity_token_file"`
} }
func (c *CredentialConfig) Credentials() (client.ConfigProvider, error) { func (c *CredentialConfig) Credentials() (awsV2.Config, error) {
if c.RoleARN != "" { if c.RoleARN != "" {
return c.assumeCredentials() return c.assumeCredentials()
} }
return c.rootCredentials() return c.rootCredentials()
} }
func (c *CredentialConfig) rootCredentials() (client.ConfigProvider, error) { func (c *CredentialConfig) rootCredentials() (awsV2.Config, error) {
config := &aws.Config{ options := []func(*configV2.LoadOptions) error{
Region: aws.String(c.Region), configV2.WithRegion(c.Region),
}
if c.EndpointURL != "" {
config.Endpoint = &c.EndpointURL
}
if c.AccessKey != "" || c.SecretKey != "" {
config.Credentials = credentials.NewStaticCredentials(c.AccessKey, c.SecretKey, c.Token)
} else if c.Profile != "" || c.Filename != "" {
config.Credentials = credentials.NewSharedCredentials(c.Filename, c.Profile)
} }
return session.NewSession(config) if c.EndpointURL != "" {
resolver := awsV2.EndpointResolverFunc(func(service, region string) (awsV2.Endpoint, error) {
return awsV2.Endpoint{
URL: c.EndpointURL,
HostnameImmutable: true,
Source: awsV2.EndpointSourceCustom,
}, nil
})
options = append(options, configV2.WithEndpointResolver(resolver))
}
if c.Profile != "" {
options = append(options, configV2.WithSharedConfigProfile(c.Profile))
}
if c.Filename != "" {
options = append(options, configV2.WithSharedCredentialsFiles([]string{c.Filename}))
}
if c.AccessKey != "" || c.SecretKey != "" {
provider := credentialsV2.NewStaticCredentialsProvider(c.AccessKey, c.SecretKey, c.Token)
options = append(options, configV2.WithCredentialsProvider(provider))
}
return configV2.LoadDefaultConfig(context.Background(), options...)
} }
func (c *CredentialConfig) assumeCredentials() (client.ConfigProvider, error) { func (c *CredentialConfig) assumeCredentials() (awsV2.Config, error) {
rootCredentials, err := c.rootCredentials() rootCredentials, err := c.rootCredentials()
if err != nil { if err != nil {
return nil, err return awsV2.Config{}, err
}
config := &aws.Config{
Region: aws.String(c.Region),
Endpoint: &c.EndpointURL,
} }
var provider awsV2.CredentialsProvider
stsService := sts.NewFromConfig(rootCredentials)
if c.WebIdentityTokenFile != "" { if c.WebIdentityTokenFile != "" {
config.Credentials = stscreds.NewWebIdentityCredentials(rootCredentials, c.RoleARN, c.RoleSessionName, c.WebIdentityTokenFile) provider = stscredsV2.NewWebIdentityRoleProvider(stsService, c.RoleARN, stscredsV2.IdentityTokenFile(c.WebIdentityTokenFile), func(opts *stscredsV2.WebIdentityRoleOptions) {
if c.RoleSessionName != "" {
opts.RoleSessionName = c.RoleSessionName
}
})
} else { } else {
config.Credentials = stscreds.NewCredentials(rootCredentials, c.RoleARN) provider = stscredsV2.NewAssumeRoleProvider(stsService, c.RoleARN, func(opts *stscredsV2.AssumeRoleOptions) {
if c.RoleSessionName != "" {
opts.RoleSessionName = c.RoleSessionName
}
})
} }
return session.NewSession(config) rootCredentials.Credentials = awsV2.NewCredentialsCache(provider)
return rootCredentials, nil
} }

View File

@ -36,17 +36,27 @@ following works:
- github.com/aws/aws-sdk-go-v2 [Apache License 2.0](https://github.com/aws/aws-sdk-go-v2/blob/main/LICENSE.txt) - github.com/aws/aws-sdk-go-v2 [Apache License 2.0](https://github.com/aws/aws-sdk-go-v2/blob/main/LICENSE.txt)
- github.com/aws/aws-sdk-go-v2/config [Apache License 2.0](https://github.com/aws/aws-sdk-go-v2/blob/main/config/LICENSE.txt) - github.com/aws/aws-sdk-go-v2/config [Apache License 2.0](https://github.com/aws/aws-sdk-go-v2/blob/main/config/LICENSE.txt)
- github.com/aws/aws-sdk-go-v2/credentials [Apache License 2.0](https://github.com/aws/aws-sdk-go-v2/blob/main/credentials/LICENSE.txt) - github.com/aws/aws-sdk-go-v2/credentials [Apache License 2.0](https://github.com/aws/aws-sdk-go-v2/blob/main/credentials/LICENSE.txt)
- github.com/aws/aws-sdk-go-v2/feature/dynamodb/attributevalue [Apache License 2.0](https://github.com/aws/aws-sdk-go-v2/blob/main/feature/dynamodb/attributevalue/LICENSE.txt)
- github.com/aws/aws-sdk-go-v2/feature/ec2/imds [Apache License 2.0](https://github.com/aws/aws-sdk-go-v2/blob/main/feature/ec2/imds/LICENSE.txt) - github.com/aws/aws-sdk-go-v2/feature/ec2/imds [Apache License 2.0](https://github.com/aws/aws-sdk-go-v2/blob/main/feature/ec2/imds/LICENSE.txt)
- github.com/aws/aws-sdk-go-v2/feature/s3/manager [Apache License 2.0](https://github.com/aws/aws-sdk-go-v2/blob/main/LICENSE.txt) - github.com/aws/aws-sdk-go-v2/feature/s3/manager [Apache License 2.0](https://github.com/aws/aws-sdk-go-v2/blob/main/feature/s3/manager/LICENSE.txt)
- github.com/aws/aws-sdk-go-v2/internal/ini [Apache License 2.0](https://github.com/aws/aws-sdk-go-v2/blob/main/service/ec2/LICENSE.txt) - github.com/aws/aws-sdk-go-v2/internal/configsources [Apache License 2.0](https://github.com/aws/aws-sdk-go-v2/blob/main/internal/configsources/LICENSE.txt)
- github.com/aws/aws-sdk-go-v2/internal/ini [Apache License 2.0](https://github.com/aws/aws-sdk-go-v2/blob/main/internal/ini/LICENSE.txt)
- github.com/aws/aws-sdk-go-v2/service/cloudwatch [Apache License 2.0](https://github.com/aws/aws-sdk-go-v2/blob/main/service/cloudwatch/LICENSE.txt)
- github.com/aws/aws-sdk-go-v2/service/cloudwatchlogs [Apache License 2.0](https://github.com/aws/aws-sdk-go-v2/blob/main/service/cloudwatchlogs/LICENSE.txt)
- github.com/aws/aws-sdk-go-v2/service/dynamodb [Apache License 2.0](https://github.com/aws/aws-sdk-go-v2/blob/main/service/dynamodb/LICENSE.txt)
- github.com/aws/aws-sdk-go-v2/service/dynamodbstreams [Apache License 2.0](https://github.com/aws/aws-sdk-go-v2/blob/main/service/dynamodbstreams/LICENSE.txt)
- github.com/aws/aws-sdk-go-v2/service/ec2 [Apache License 2.0](https://github.com/aws/aws-sdk-go-v2/blob/main/service/ec2/LICENSE.txt) - github.com/aws/aws-sdk-go-v2/service/ec2 [Apache License 2.0](https://github.com/aws/aws-sdk-go-v2/blob/main/service/ec2/LICENSE.txt)
- github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding [Apache License 2.0](https://github.com/aws/aws-sdk-go-v2/blob/main/service/internal/accept-encoding/LICENSE.txt) - github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding [Apache License 2.0](https://github.com/aws/aws-sdk-go-v2/blob/main/service/internal/accept-encoding/LICENSE.txt)
- github.com/aws/aws-sdk-go-v2/service/internal/endpoint-discovery [Apache License 2.0](https://github.com/aws/aws-sdk-go-v2/blob/main/service/internal/endpoint-discovery/LICENSE.txt)
- github.com/aws/aws-sdk-go-v2/service/internal/presigned-url [Apache License 2.0](https://github.com/aws/aws-sdk-go-v2/blob/main/service/internal/presigned-url/LICENSE.txt) - github.com/aws/aws-sdk-go-v2/service/internal/presigned-url [Apache License 2.0](https://github.com/aws/aws-sdk-go-v2/blob/main/service/internal/presigned-url/LICENSE.txt)
- github.com/aws/aws-sdk-go-v2/service/internal/s3shared [Apache License 2.0](https://github.com/aws/aws-sdk-go-v2/blob/main/service/internal/s3shared/LICENSE.txt) - github.com/aws/aws-sdk-go-v2/service/internal/s3shared [Apache License 2.0](https://github.com/aws/aws-sdk-go-v2/blob/main/service/internal/s3shared/LICENSE.txt)
- github.com/aws/aws-sdk-go-v2/service/kinesis [Apache License 2.0](https://github.com/aws/aws-sdk-go-v2/blob/main/service/kinesis/LICENSE.txt)
- github.com/aws/aws-sdk-go-v2/service/s3 [Apache License 2.0](https://github.com/aws/aws-sdk-go-v2/blob/main/service/s3/LICENSE.txt) - github.com/aws/aws-sdk-go-v2/service/s3 [Apache License 2.0](https://github.com/aws/aws-sdk-go-v2/blob/main/service/s3/LICENSE.txt)
- github.com/aws/aws-sdk-go-v2/service/sso [Apache License 2.0](https://github.com/aws/aws-sdk-go-v2/blob/main/service/ec2/LICENSE.txt) - github.com/aws/aws-sdk-go-v2/service/sso [Apache License 2.0](https://github.com/aws/aws-sdk-go-v2/blob/main/service/ec2/LICENSE.txt)
- github.com/aws/aws-sdk-go-v2/service/sts [Apache License 2.0](https://github.com/aws/aws-sdk-go-v2/blob/main/service/sts/LICENSE.txt) - github.com/aws/aws-sdk-go-v2/service/sts [Apache License 2.0](https://github.com/aws/aws-sdk-go-v2/blob/main/service/sts/LICENSE.txt)
- github.com/aws/aws-sdk-go-v2/service/timestreamwrite [Apache License 2.0](https://github.com/aws/aws-sdk-go-v2/blob/main/service/timestreamwrite/LICENSE.txt)
- github.com/aws/smithy-go [Apache License 2.0](https://github.com/aws/smithy-go/blob/main/LICENSE) - github.com/aws/smithy-go [Apache License 2.0](https://github.com/aws/smithy-go/blob/main/LICENSE)
- github.com/awslabs/kinesis-aggregation/go [Apache License 2.0](https://github.com/awslabs/kinesis-aggregation/blob/master/LICENSE.txt)
- github.com/benbjohnson/clock [MIT License](https://github.com/benbjohnson/clock/blob/master/LICENSE) - github.com/benbjohnson/clock [MIT License](https://github.com/benbjohnson/clock/blob/master/LICENSE)
- github.com/beorn7/perks [MIT License](https://github.com/beorn7/perks/blob/master/LICENSE) - github.com/beorn7/perks [MIT License](https://github.com/beorn7/perks/blob/master/LICENSE)
- github.com/bmatcuk/doublestar [MIT License](https://github.com/bmatcuk/doublestar/blob/master/LICENSE) - github.com/bmatcuk/doublestar [MIT License](https://github.com/bmatcuk/doublestar/blob/master/LICENSE)

24
go.mod
View File

@ -47,19 +47,26 @@ require (
github.com/aristanetworks/glog v0.0.0-20191112221043-67e8567f59f3 // indirect github.com/aristanetworks/glog v0.0.0-20191112221043-67e8567f59f3 // indirect
github.com/aristanetworks/goarista v0.0.0-20190325233358-a123909ec740 github.com/aristanetworks/goarista v0.0.0-20190325233358-a123909ec740
github.com/armon/go-metrics v0.3.3 // indirect github.com/armon/go-metrics v0.3.3 // indirect
github.com/aws/aws-sdk-go v1.38.69
github.com/aws/aws-sdk-go-v2 v1.9.1 github.com/aws/aws-sdk-go-v2 v1.9.1
github.com/aws/aws-sdk-go-v2/config v1.8.2 github.com/aws/aws-sdk-go-v2/config v1.8.2
github.com/aws/aws-sdk-go-v2/credentials v1.4.2 // indirect github.com/aws/aws-sdk-go-v2/credentials v1.4.2
github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.5.1 github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.5.1
github.com/aws/aws-sdk-go-v2/feature/s3/manager v1.5.3 // indirect github.com/aws/aws-sdk-go-v2/feature/s3/manager v1.5.3 // indirect
github.com/aws/aws-sdk-go-v2/internal/configsources v1.0.4 // indirect
github.com/aws/aws-sdk-go-v2/internal/ini v1.2.3 // indirect
github.com/aws/aws-sdk-go-v2/service/cloudwatch v1.7.0
github.com/aws/aws-sdk-go-v2/service/cloudwatchlogs v1.5.2
github.com/aws/aws-sdk-go-v2/service/dynamodb v1.5.0
github.com/aws/aws-sdk-go-v2/service/ec2 v1.1.0 github.com/aws/aws-sdk-go-v2/service/ec2 v1.1.0
github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.3.0 // indirect github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.3.0 // indirect
github.com/aws/aws-sdk-go-v2/service/internal/endpoint-discovery v1.1.0 // indirect
github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.3.1 // indirect github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.3.1 // indirect
github.com/aws/aws-sdk-go-v2/service/internal/s3shared v1.7.1 // indirect github.com/aws/aws-sdk-go-v2/service/internal/s3shared v1.7.1 // indirect
github.com/aws/aws-sdk-go-v2/service/kinesis v1.6.0
github.com/aws/aws-sdk-go-v2/service/s3 v1.16.0 // indirect github.com/aws/aws-sdk-go-v2/service/s3 v1.16.0 // indirect
github.com/aws/aws-sdk-go-v2/service/sso v1.4.1 // indirect github.com/aws/aws-sdk-go-v2/service/sso v1.4.1 // indirect
github.com/aws/aws-sdk-go-v2/service/sts v1.7.1 // indirect github.com/aws/aws-sdk-go-v2/service/sts v1.7.1
github.com/aws/aws-sdk-go-v2/service/timestreamwrite v1.3.2
github.com/aws/smithy-go v1.8.0 github.com/aws/smithy-go v1.8.0
github.com/benbjohnson/clock v1.1.0 github.com/benbjohnson/clock v1.1.0
github.com/beorn7/perks v1.0.1 // indirect github.com/beorn7/perks v1.0.1 // indirect
@ -129,8 +136,8 @@ require (
github.com/grid-x/serial v0.0.0-20191104121038-e24bc9bf6f08 // indirect github.com/grid-x/serial v0.0.0-20191104121038-e24bc9bf6f08 // indirect
github.com/grpc-ecosystem/grpc-gateway v1.16.0 // indirect github.com/grpc-ecosystem/grpc-gateway v1.16.0 // indirect
github.com/hailocab/go-hostpool v0.0.0-20160125115350-e80d13ce29ed // indirect github.com/hailocab/go-hostpool v0.0.0-20160125115350-e80d13ce29ed // indirect
github.com/harlow/kinesis-consumer v0.3.1-0.20181230152818-2f58b136fee0 github.com/harlow/kinesis-consumer v0.3.6-0.20210911031324-5a873d6e9fec
github.com/hashicorp/consul/api v1.11.0 github.com/hashicorp/consul/api v1.9.1
github.com/hashicorp/go-cleanhttp v0.5.1 // indirect github.com/hashicorp/go-cleanhttp v0.5.1 // indirect
github.com/hashicorp/go-hclog v0.16.2 // indirect github.com/hashicorp/go-hclog v0.16.2 // indirect
github.com/hashicorp/go-immutable-radix v1.2.0 // indirect github.com/hashicorp/go-immutable-radix v1.2.0 // indirect
@ -264,7 +271,7 @@ require (
github.com/xdg/scram v1.0.3 github.com/xdg/scram v1.0.3
github.com/xdg/stringprep v1.0.3 // indirect github.com/xdg/stringprep v1.0.3 // indirect
github.com/youmark/pkcs8 v0.0.0-20201027041543-1326539a0a0a // indirect github.com/youmark/pkcs8 v0.0.0-20201027041543-1326539a0a0a // indirect
github.com/yuin/gopher-lua v0.0.0-20180630135845-46796da1b0b4 // indirect github.com/yuin/gopher-lua v0.0.0-20200603152657-dc2b0ca8b37e // indirect
go.etcd.io/etcd/api/v3 v3.5.0 // indirect go.etcd.io/etcd/api/v3 v3.5.0 // indirect
go.mongodb.org/mongo-driver v1.5.3 go.mongodb.org/mongo-driver v1.5.3
go.opencensus.io v0.23.0 // indirect go.opencensus.io v0.23.0 // indirect
@ -327,7 +334,10 @@ require (
) )
require ( require (
github.com/aws/aws-sdk-go-v2/internal/ini v1.2.3 // indirect github.com/aws/aws-sdk-go v1.38.3 // indirect
github.com/aws/aws-sdk-go-v2/feature/dynamodb/attributevalue v1.2.0 // indirect
github.com/aws/aws-sdk-go-v2/service/dynamodbstreams v1.4.0 // indirect
github.com/awslabs/kinesis-aggregation/go v0.0.0-20210630091500-54e17340d32f // indirect
github.com/cenkalti/backoff/v4 v4.1.1 // indirect github.com/cenkalti/backoff/v4 v4.1.1 // indirect
github.com/jcmturner/aescts/v2 v2.0.0 // indirect github.com/jcmturner/aescts/v2 v2.0.0 // indirect
github.com/jcmturner/dnsutils/v2 v2.0.0 // indirect github.com/jcmturner/dnsutils/v2 v2.0.0 // indirect

105
go.sum
View File

@ -153,7 +153,9 @@ github.com/BurntSushi/toml v0.3.1 h1:WXkYYl6Yr3qBf1K79EBnL4mak0OimBfB0XUf9Vl28OQ
github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU= github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU=
github.com/BurntSushi/xgb v0.0.0-20160522181843-27f122750802/go.mod h1:IVnqGOEym/WlBOVXweHU+Q+/VP0lqqI8lqeDx9IjBqo= github.com/BurntSushi/xgb v0.0.0-20160522181843-27f122750802/go.mod h1:IVnqGOEym/WlBOVXweHU+Q+/VP0lqqI8lqeDx9IjBqo=
github.com/DATA-DOG/go-sqlmock v1.3.3/go.mod h1:f/Ixk793poVmq4qj/V1dPUg2JEAKC73Q5eFN3EC/SaM= github.com/DATA-DOG/go-sqlmock v1.3.3/go.mod h1:f/Ixk793poVmq4qj/V1dPUg2JEAKC73Q5eFN3EC/SaM=
github.com/DATA-DOG/go-sqlmock v1.4.1/go.mod h1:f/Ixk793poVmq4qj/V1dPUg2JEAKC73Q5eFN3EC/SaM=
github.com/DataDog/datadog-go v3.2.0+incompatible/go.mod h1:LButxg5PwREeZtORoXG3tL4fMGNddJ+vMq1mwgfaqoQ= github.com/DataDog/datadog-go v3.2.0+incompatible/go.mod h1:LButxg5PwREeZtORoXG3tL4fMGNddJ+vMq1mwgfaqoQ=
github.com/DataDog/sketches-go v0.0.0-20190923095040-43f19ad77ff7/go.mod h1:Q5DbzQ+3AkgGwymQO7aZFNP7ns2lZKGtvRBzRXfdi60=
github.com/DataDog/zstd v1.3.6-0.20190409195224-796139022798/go.mod h1:1jcaCB/ufaK+sKp1NBhlGmpz41jOoPQ35bpF36t7BBo= github.com/DataDog/zstd v1.3.6-0.20190409195224-796139022798/go.mod h1:1jcaCB/ufaK+sKp1NBhlGmpz41jOoPQ35bpF36t7BBo=
github.com/DataDog/zstd v1.4.4/go.mod h1:1jcaCB/ufaK+sKp1NBhlGmpz41jOoPQ35bpF36t7BBo= github.com/DataDog/zstd v1.4.4/go.mod h1:1jcaCB/ufaK+sKp1NBhlGmpz41jOoPQ35bpF36t7BBo=
github.com/Djarvur/go-err113 v0.0.0-20210108212216-aea10b59be24/go.mod h1:4UJr5HIiMZrwgkSPdsjy2uOQExX/WEILpIrO9UPGuXs= github.com/Djarvur/go-err113 v0.0.0-20210108212216-aea10b59be24/go.mod h1:4UJr5HIiMZrwgkSPdsjy2uOQExX/WEILpIrO9UPGuXs=
@ -238,6 +240,8 @@ github.com/alecthomas/units v0.0.0-20210208195552-ff826a37aa15 h1:AUNCr9CiJuwrRY
github.com/alecthomas/units v0.0.0-20210208195552-ff826a37aa15/go.mod h1:OMCwj8VM1Kc9e19TLln2VL61YJF0x1XFtfdL4JdbSyE= github.com/alecthomas/units v0.0.0-20210208195552-ff826a37aa15/go.mod h1:OMCwj8VM1Kc9e19TLln2VL61YJF0x1XFtfdL4JdbSyE=
github.com/alexflint/go-filemutex v0.0.0-20171022225611-72bdc8eae2ae/go.mod h1:CgnQgUtFrFz9mxFNtED3jI5tLDjKlOM+oUF/sTk6ps0= github.com/alexflint/go-filemutex v0.0.0-20171022225611-72bdc8eae2ae/go.mod h1:CgnQgUtFrFz9mxFNtED3jI5tLDjKlOM+oUF/sTk6ps0=
github.com/alexkohler/prealloc v1.0.0/go.mod h1:VetnK3dIgFBBKmg0YnD9F9x6Icjd+9cvfHR56wJVlKE= github.com/alexkohler/prealloc v1.0.0/go.mod h1:VetnK3dIgFBBKmg0YnD9F9x6Icjd+9cvfHR56wJVlKE=
github.com/alicebob/gopher-json v0.0.0-20200520072559-a9ecdc9d1d3a/go.mod h1:SGnFV6hVsYE877CKEZ6tDNTjaSXYUk6QqoIK6PrAtcc=
github.com/alicebob/miniredis v2.5.0+incompatible/go.mod h1:8HZjEj4yU0dwhYHky+DxYx+6BMjkBbe5ONFIF1MXffk=
github.com/aliyun/alibaba-cloud-sdk-go v1.61.1004 h1:YtaYjXmemIMyySUbs0VGFPqsLpsNHf4TW/L6yqpJQ9s= github.com/aliyun/alibaba-cloud-sdk-go v1.61.1004 h1:YtaYjXmemIMyySUbs0VGFPqsLpsNHf4TW/L6yqpJQ9s=
github.com/aliyun/alibaba-cloud-sdk-go v1.61.1004/go.mod h1:pUKYbK5JQ+1Dfxk80P0qxGqe5dkxDoabbZS7zOcouyA= github.com/aliyun/alibaba-cloud-sdk-go v1.61.1004/go.mod h1:pUKYbK5JQ+1Dfxk80P0qxGqe5dkxDoabbZS7zOcouyA=
github.com/amir/raidman v0.0.0-20170415203553-1ccc43bfb9c9 h1:FXrPTd8Rdlc94dKccl7KPmdmIbVh/OjelJ8/vgMRzcQ= github.com/amir/raidman v0.0.0-20170415203553-1ccc43bfb9c9 h1:FXrPTd8Rdlc94dKccl7KPmdmIbVh/OjelJ8/vgMRzcQ=
@ -265,6 +269,10 @@ github.com/apache/thrift v0.14.1/go.mod h1:cp2SuWMxlEZw2r+iP2GNCdIi4C1qmUzdZFSVb
github.com/apache/thrift v0.14.2/go.mod h1:cp2SuWMxlEZw2r+iP2GNCdIi4C1qmUzdZFSVb+bacwQ= github.com/apache/thrift v0.14.2/go.mod h1:cp2SuWMxlEZw2r+iP2GNCdIi4C1qmUzdZFSVb+bacwQ=
github.com/apache/thrift v0.15.0 h1:aGvdaR0v1t9XLgjtBYwxcBvBOTMqClzwE26CHOgjW1Y= github.com/apache/thrift v0.15.0 h1:aGvdaR0v1t9XLgjtBYwxcBvBOTMqClzwE26CHOgjW1Y=
github.com/apache/thrift v0.15.0/go.mod h1:PHK3hniurgQaNMZYaCLEqXKsYK8upmhPbmdP2FXSqgU= github.com/apache/thrift v0.15.0/go.mod h1:PHK3hniurgQaNMZYaCLEqXKsYK8upmhPbmdP2FXSqgU=
github.com/apex/log v1.6.0/go.mod h1:x7s+P9VtvFBXge9Vbn+8TrqKmuzmD35TTkeBHul8UtY=
github.com/apex/logs v1.0.0/go.mod h1:XzxuLZ5myVHDy9SAmYpamKKRNApGj54PfYLcFrXqDwo=
github.com/aphistic/golf v0.0.0-20180712155816-02c07f170c5a/go.mod h1:3NqKYiepwy8kCu4PNA+aP7WUV72eXWJeP9/r3/K9aLE=
github.com/aphistic/sweet v0.2.0/go.mod h1:fWDlIh/isSE9n6EPsRmC0det+whmX6dJid3stzu0Xys=
github.com/aristanetworks/glog v0.0.0-20191112221043-67e8567f59f3 h1:Bmjk+DjIi3tTAU0wxGaFbfjGUqlxxSXARq9A96Kgoos= github.com/aristanetworks/glog v0.0.0-20191112221043-67e8567f59f3 h1:Bmjk+DjIi3tTAU0wxGaFbfjGUqlxxSXARq9A96Kgoos=
github.com/aristanetworks/glog v0.0.0-20191112221043-67e8567f59f3/go.mod h1:KASm+qXFKs/xjSoWn30NrWBBvdTTQq+UjkhjEJHfSFA= github.com/aristanetworks/glog v0.0.0-20191112221043-67e8567f59f3/go.mod h1:KASm+qXFKs/xjSoWn30NrWBBvdTTQq+UjkhjEJHfSFA=
github.com/aristanetworks/goarista v0.0.0-20190325233358-a123909ec740 h1:FD4/ikKOFxwP8muWDypbmBWc634+YcAs3eBrYAmRdZY= github.com/aristanetworks/goarista v0.0.0-20190325233358-a123909ec740 h1:FD4/ikKOFxwP8muWDypbmBWc634+YcAs3eBrYAmRdZY=
@ -287,56 +295,111 @@ github.com/ashanbrown/forbidigo v1.1.0/go.mod h1:vVW7PEdqEFqapJe95xHkTfB1+XvZXBF
github.com/ashanbrown/makezero v0.0.0-20201205152432-7b7cdbb3025a/go.mod h1:oG9Dnez7/ESBqc4EdrdNlryeo7d0KcW1ftXHm7nU/UU= github.com/ashanbrown/makezero v0.0.0-20201205152432-7b7cdbb3025a/go.mod h1:oG9Dnez7/ESBqc4EdrdNlryeo7d0KcW1ftXHm7nU/UU=
github.com/aws/aws-lambda-go v1.13.3/go.mod h1:4UKl9IzQMoD+QF79YdCuzCwp8VbmG4VAQwij/eHl5CU= github.com/aws/aws-lambda-go v1.13.3/go.mod h1:4UKl9IzQMoD+QF79YdCuzCwp8VbmG4VAQwij/eHl5CU=
github.com/aws/aws-sdk-go v1.15.11/go.mod h1:mFuSZ37Z9YOHbQEwBWztmVzqXrEkub65tZoCYDt7FT0= github.com/aws/aws-sdk-go v1.15.11/go.mod h1:mFuSZ37Z9YOHbQEwBWztmVzqXrEkub65tZoCYDt7FT0=
github.com/aws/aws-sdk-go v1.19.48/go.mod h1:KmX6BPdI08NWTb3/sm4ZGu5ShLoqVDhKgpiN924inxo=
github.com/aws/aws-sdk-go v1.20.6/go.mod h1:KmX6BPdI08NWTb3/sm4ZGu5ShLoqVDhKgpiN924inxo=
github.com/aws/aws-sdk-go v1.27.0/go.mod h1:KmX6BPdI08NWTb3/sm4ZGu5ShLoqVDhKgpiN924inxo= github.com/aws/aws-sdk-go v1.27.0/go.mod h1:KmX6BPdI08NWTb3/sm4ZGu5ShLoqVDhKgpiN924inxo=
github.com/aws/aws-sdk-go v1.34.28/go.mod h1:H7NKnBqNVzoTJpGfLrQkkD+ytBA93eiDYi/+8rV9s48= github.com/aws/aws-sdk-go v1.34.28/go.mod h1:H7NKnBqNVzoTJpGfLrQkkD+ytBA93eiDYi/+8rV9s48=
github.com/aws/aws-sdk-go v1.38.3 h1:QCL/le04oAz2jELMRSuJVjGT7H+4hhoQc66eMPCfU/k=
github.com/aws/aws-sdk-go v1.38.3/go.mod h1:hcU610XS61/+aQV88ixoOzUoG7v3b31pl2zKMmprdro= github.com/aws/aws-sdk-go v1.38.3/go.mod h1:hcU610XS61/+aQV88ixoOzUoG7v3b31pl2zKMmprdro=
github.com/aws/aws-sdk-go v1.38.69 h1:V489lmrdkIQSfF6OAGZZ1Cavcm7eczCm2JcGvX+yHRg=
github.com/aws/aws-sdk-go v1.38.69/go.mod h1:hcU610XS61/+aQV88ixoOzUoG7v3b31pl2zKMmprdro=
github.com/aws/aws-sdk-go-v2 v0.18.0/go.mod h1:JWVYvqSMppoMJC0x5wdwiImzgXTI9FuZwxzkQq9wy+g= github.com/aws/aws-sdk-go-v2 v0.18.0/go.mod h1:JWVYvqSMppoMJC0x5wdwiImzgXTI9FuZwxzkQq9wy+g=
github.com/aws/aws-sdk-go-v2 v1.1.0/go.mod h1:smfAbmpW+tcRVuNUjo3MOArSZmW72t62rkCzc2i0TWM= github.com/aws/aws-sdk-go-v2 v1.1.0/go.mod h1:smfAbmpW+tcRVuNUjo3MOArSZmW72t62rkCzc2i0TWM=
github.com/aws/aws-sdk-go-v2 v1.8.0/go.mod h1:xEFuWz+3TYdlPRuo+CqATbeDWIWyaT5uAPwPaWtgse0= github.com/aws/aws-sdk-go-v2 v1.8.0/go.mod h1:xEFuWz+3TYdlPRuo+CqATbeDWIWyaT5uAPwPaWtgse0=
github.com/aws/aws-sdk-go-v2 v1.8.0/go.mod h1:xEFuWz+3TYdlPRuo+CqATbeDWIWyaT5uAPwPaWtgse0=
github.com/aws/aws-sdk-go-v2 v1.8.0/go.mod h1:xEFuWz+3TYdlPRuo+CqATbeDWIWyaT5uAPwPaWtgse0=
github.com/aws/aws-sdk-go-v2 v1.8.1/go.mod h1:xEFuWz+3TYdlPRuo+CqATbeDWIWyaT5uAPwPaWtgse0=
github.com/aws/aws-sdk-go-v2 v1.9.0/go.mod h1:cK/D0BBs0b/oWPIcX/Z/obahJK1TT7IPVjy53i/mX/4=
github.com/aws/aws-sdk-go-v2 v1.9.1 h1:ZbovGV/qo40nrOJ4q8G33AGICzaPI45FHQWJ9650pF4= github.com/aws/aws-sdk-go-v2 v1.9.1 h1:ZbovGV/qo40nrOJ4q8G33AGICzaPI45FHQWJ9650pF4=
github.com/aws/aws-sdk-go-v2 v1.9.1/go.mod h1:cK/D0BBs0b/oWPIcX/Z/obahJK1TT7IPVjy53i/mX/4= github.com/aws/aws-sdk-go-v2 v1.9.1/go.mod h1:cK/D0BBs0b/oWPIcX/Z/obahJK1TT7IPVjy53i/mX/4=
github.com/aws/aws-sdk-go-v2/config v1.6.0/go.mod h1:TNtBVmka80lRPk5+S9ZqVfFszOQAGJJ9KbT3EM3CHNU= github.com/aws/aws-sdk-go-v2/config v1.6.0/go.mod h1:TNtBVmka80lRPk5+S9ZqVfFszOQAGJJ9KbT3EM3CHNU=
github.com/aws/aws-sdk-go-v2/config v1.6.0/go.mod h1:TNtBVmka80lRPk5+S9ZqVfFszOQAGJJ9KbT3EM3CHNU=
github.com/aws/aws-sdk-go-v2/config v1.6.1/go.mod h1:t/y3UPu0XEDy0cEw6mvygaBQaPzWiYAxfP2SzgtvclA=
github.com/aws/aws-sdk-go-v2/config v1.8.2 h1:Dqy4ySXFmulRmZhfynm/5CD4Y6aXiTVhDtXLIuUe/r0= github.com/aws/aws-sdk-go-v2/config v1.8.2 h1:Dqy4ySXFmulRmZhfynm/5CD4Y6aXiTVhDtXLIuUe/r0=
github.com/aws/aws-sdk-go-v2/config v1.8.2/go.mod h1:r0bkX9NyuCuf28qVcsEMtpAQibT7gA1Q0gzkjvgJdLU= github.com/aws/aws-sdk-go-v2/config v1.8.2/go.mod h1:r0bkX9NyuCuf28qVcsEMtpAQibT7gA1Q0gzkjvgJdLU=
github.com/aws/aws-sdk-go-v2/credentials v1.3.2/go.mod h1:PACKuTJdt6AlXvEq8rFI4eDmoqDFC5DpVKQbWysaDgM= github.com/aws/aws-sdk-go-v2/credentials v1.3.2/go.mod h1:PACKuTJdt6AlXvEq8rFI4eDmoqDFC5DpVKQbWysaDgM=
github.com/aws/aws-sdk-go-v2/credentials v1.3.2/go.mod h1:PACKuTJdt6AlXvEq8rFI4eDmoqDFC5DpVKQbWysaDgM=
github.com/aws/aws-sdk-go-v2/credentials v1.3.3/go.mod h1:oVieKMT3m9BSfqhOfuQ+E0j/yN84ZAJ7Qv8Sfume/ak=
github.com/aws/aws-sdk-go-v2/credentials v1.4.2 h1:8kVE4Og6wlhVrMGiORQ3p9gRj2exjzhFRB+QzWBUa5Q= github.com/aws/aws-sdk-go-v2/credentials v1.4.2 h1:8kVE4Og6wlhVrMGiORQ3p9gRj2exjzhFRB+QzWBUa5Q=
github.com/aws/aws-sdk-go-v2/credentials v1.4.2/go.mod h1:9Sp6u121/f0NnvHyhG7dgoYeUTEFC2vsvJqJ6wXpkaI= github.com/aws/aws-sdk-go-v2/credentials v1.4.2/go.mod h1:9Sp6u121/f0NnvHyhG7dgoYeUTEFC2vsvJqJ6wXpkaI=
github.com/aws/aws-sdk-go-v2/feature/dynamodb/attributevalue v1.2.0 h1:8kvinmbIDObqsWegKP0JjeanYPiA4GUVpAtciNWE+jw=
github.com/aws/aws-sdk-go-v2/feature/dynamodb/attributevalue v1.2.0/go.mod h1:UVFtSYSWCHj2+brBLDHUdlJXmz8LxUpZhA+Ewypc+xQ=
github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.4.0/go.mod h1:Mj/U8OpDbcVcoctrYwA2bak8k/HFPdcLzI/vaiXMwuM= github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.4.0/go.mod h1:Mj/U8OpDbcVcoctrYwA2bak8k/HFPdcLzI/vaiXMwuM=
github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.4.0/go.mod h1:Mj/U8OpDbcVcoctrYwA2bak8k/HFPdcLzI/vaiXMwuM=
github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.4.1/go.mod h1:+GTydg3uHmVlQdkRoetz6VHKbOMEYof70m19IpMLifc=
github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.5.1 h1:Nm+BxqBtT0r+AnD6byGMCGT4Km0QwHBy8mAYptNPXY4= github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.5.1 h1:Nm+BxqBtT0r+AnD6byGMCGT4Km0QwHBy8mAYptNPXY4=
github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.5.1/go.mod h1:W1ldHfsgeGlKpJ4xZMKZUI6Wmp6EAstU7PxnhbXWWrI= github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.5.1/go.mod h1:W1ldHfsgeGlKpJ4xZMKZUI6Wmp6EAstU7PxnhbXWWrI=
github.com/aws/aws-sdk-go-v2/feature/s3/manager v1.4.0/go.mod h1:eHwXu2+uE/T6gpnYWwBwqoeqRf9IXyCcolyOWDRAErQ= github.com/aws/aws-sdk-go-v2/feature/s3/manager v1.4.0/go.mod h1:eHwXu2+uE/T6gpnYWwBwqoeqRf9IXyCcolyOWDRAErQ=
github.com/aws/aws-sdk-go-v2/feature/s3/manager v1.5.3 h1:0O72494cCsazjpsGfo+LXezru6PMSp0HUB1m5UfpaRU= github.com/aws/aws-sdk-go-v2/feature/s3/manager v1.5.3 h1:0O72494cCsazjpsGfo+LXezru6PMSp0HUB1m5UfpaRU=
github.com/aws/aws-sdk-go-v2/feature/s3/manager v1.5.3/go.mod h1:claNkz2j/N/AZceFcAbR0NyuWnrn+jCYpI+6Ozjsc0k= github.com/aws/aws-sdk-go-v2/feature/s3/manager v1.5.3/go.mod h1:claNkz2j/N/AZceFcAbR0NyuWnrn+jCYpI+6Ozjsc0k=
github.com/aws/aws-sdk-go-v2/internal/configsources v1.0.2/go.mod h1:1QsSZvLUuaQ6VJsCXolYCEzV0mVBkNBp64pIJy9yRks=
github.com/aws/aws-sdk-go-v2/internal/configsources v1.0.2/go.mod h1:1QsSZvLUuaQ6VJsCXolYCEzV0mVBkNBp64pIJy9yRks=
github.com/aws/aws-sdk-go-v2/internal/configsources v1.0.4 h1:IM9b6hlCcVFJFydPoyphs/t7YrHfqKy7T4/7AG5Eprs=
github.com/aws/aws-sdk-go-v2/internal/configsources v1.0.4/go.mod h1:W5gGbtNXFpF9/ssYZTaItzG/B+j0bjTnwStiCP2AtWU=
github.com/aws/aws-sdk-go-v2/internal/ini v1.2.0/go.mod h1:Q5jATQc+f1MfZp3PDMhn6ry18hGvE0i8yvbXoKbnZaE= github.com/aws/aws-sdk-go-v2/internal/ini v1.2.0/go.mod h1:Q5jATQc+f1MfZp3PDMhn6ry18hGvE0i8yvbXoKbnZaE=
github.com/aws/aws-sdk-go-v2/internal/ini v1.2.0/go.mod h1:Q5jATQc+f1MfZp3PDMhn6ry18hGvE0i8yvbXoKbnZaE=
github.com/aws/aws-sdk-go-v2/internal/ini v1.2.0/go.mod h1:Q5jATQc+f1MfZp3PDMhn6ry18hGvE0i8yvbXoKbnZaE=
github.com/aws/aws-sdk-go-v2/internal/ini v1.2.1/go.mod h1:Pv3WenDjI0v2Jl7UaMFIIbPOBbhn33RmmAmGgkXDoqY=
github.com/aws/aws-sdk-go-v2/internal/ini v1.2.3 h1:NnXJXUz7oihrSlPKEM0yZ19b+7GQ47MX/LluLlEyE/Y= github.com/aws/aws-sdk-go-v2/internal/ini v1.2.3 h1:NnXJXUz7oihrSlPKEM0yZ19b+7GQ47MX/LluLlEyE/Y=
github.com/aws/aws-sdk-go-v2/internal/ini v1.2.3/go.mod h1:EES9ToeC3h063zCFDdqWGnARExNdULPaBvARm1FLwxA= github.com/aws/aws-sdk-go-v2/internal/ini v1.2.3/go.mod h1:EES9ToeC3h063zCFDdqWGnARExNdULPaBvARm1FLwxA=
github.com/aws/aws-sdk-go-v2/service/cloudwatch v1.7.0 h1:vXZPcDQg7e5z2IKz0huei6zhfAxDoZdXej2o3jUbjCI=
github.com/aws/aws-sdk-go-v2/service/cloudwatch v1.7.0/go.mod h1:BlrFkwOhSgESkbdS+zJBy4+1mQ3f3Fq9Gp8nT+gaSwk=
github.com/aws/aws-sdk-go-v2/service/cloudwatchlogs v1.5.2 h1:B120/boLr82yRaQFEPn9u01OwWMnc+xGvz5SOHfBrHY=
github.com/aws/aws-sdk-go-v2/service/cloudwatchlogs v1.5.2/go.mod h1:td1djV1rAzEPcit9L8urGneIi2pYvtI7b/kfMWdpe84=
github.com/aws/aws-sdk-go-v2/service/dynamodb v1.5.0 h1:SGwKUQaJudQQZE72dDQlL2FGuHNAEK1CyqKLTjh6mqE=
github.com/aws/aws-sdk-go-v2/service/dynamodb v1.5.0/go.mod h1:XY5YhCS9SLul3JSQ08XG/nfxXxrkh6RR21XPq/J//NY=
github.com/aws/aws-sdk-go-v2/service/dynamodbstreams v1.4.0 h1:QbFWJr2SAyVYvyoOHvJU6sCGLnqNT94ZbWElJMEI1JY=
github.com/aws/aws-sdk-go-v2/service/dynamodbstreams v1.4.0/go.mod h1:bYsEP8w5YnbYyrx/Zi5hy4hTwRRQISSJS3RWrsGRijg=
github.com/aws/aws-sdk-go-v2/service/ec2 v1.1.0 h1:+VnEgB1yp+7KlOsk6FXX/v/fU9uL5oSujIMkKQBBmp8= github.com/aws/aws-sdk-go-v2/service/ec2 v1.1.0 h1:+VnEgB1yp+7KlOsk6FXX/v/fU9uL5oSujIMkKQBBmp8=
github.com/aws/aws-sdk-go-v2/service/ec2 v1.1.0/go.mod h1:/6514fU/SRcY3+ousB1zjUqiXjruSuti2qcfE70osOc= github.com/aws/aws-sdk-go-v2/service/ec2 v1.1.0/go.mod h1:/6514fU/SRcY3+ousB1zjUqiXjruSuti2qcfE70osOc=
github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.2.2/go.mod h1:EASdTcM1lGhUe1/p4gkojHwlGJkeoRjjr1sRCzup3Is= github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.2.2/go.mod h1:EASdTcM1lGhUe1/p4gkojHwlGJkeoRjjr1sRCzup3Is=
github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.2.2/go.mod h1:EASdTcM1lGhUe1/p4gkojHwlGJkeoRjjr1sRCzup3Is=
github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.3.0 h1:gceOysEWNNwLd6cki65IMBZ4WAM0MwgBQq2n7kejoT8=
github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.3.0 h1:gceOysEWNNwLd6cki65IMBZ4WAM0MwgBQq2n7kejoT8= github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.3.0 h1:gceOysEWNNwLd6cki65IMBZ4WAM0MwgBQq2n7kejoT8=
github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.3.0/go.mod h1:v8ygadNyATSm6elwJ/4gzJwcFhri9RqS8skgHKiwXPU= github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.3.0/go.mod h1:v8ygadNyATSm6elwJ/4gzJwcFhri9RqS8skgHKiwXPU=
github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.3.0/go.mod h1:v8ygadNyATSm6elwJ/4gzJwcFhri9RqS8skgHKiwXPU=
github.com/aws/aws-sdk-go-v2/service/internal/endpoint-discovery v1.0.2/go.mod h1:Gej5xRE+MK0r35OnxJJ07iqQ5JC1avTW/4MwGfsC2io=
github.com/aws/aws-sdk-go-v2/service/internal/endpoint-discovery v1.1.0 h1:QCPbsMPMcM4iGbui5SH6O4uxvZffPoBJ4CIGX7dU0l4=
github.com/aws/aws-sdk-go-v2/service/internal/endpoint-discovery v1.1.0/go.mod h1:enkU5tq2HoXY+ZMiQprgF3Q83T3PbO77E83yXXzRZWE=
github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.0.1/go.mod h1:PISaKWylTYAyruocNk4Lr9miOOJjOcVBd7twCPbydDk= github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.0.1/go.mod h1:PISaKWylTYAyruocNk4Lr9miOOJjOcVBd7twCPbydDk=
github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.2.2/go.mod h1:NXmNI41bdEsJMrD0v9rUvbGCB5GwdBEpKvUvIY3vTFg= github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.2.2/go.mod h1:NXmNI41bdEsJMrD0v9rUvbGCB5GwdBEpKvUvIY3vTFg=
github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.2.2/go.mod h1:NXmNI41bdEsJMrD0v9rUvbGCB5GwdBEpKvUvIY3vTFg=
github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.2.3/go.mod h1:7gcsONBmFoCcKrAqrm95trrMd2+C/ReYKP7Vfu8yHHA=
github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.3.1 h1:APEjhKZLFlNVLATnA/TJyA+w1r/xd5r5ACWBDZ9aIvc= github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.3.1 h1:APEjhKZLFlNVLATnA/TJyA+w1r/xd5r5ACWBDZ9aIvc=
github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.3.1/go.mod h1:Ve+eJOx9UWaT/lMVebnFhDhO49fSLVedHoA82+Rqme0= github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.3.1/go.mod h1:Ve+eJOx9UWaT/lMVebnFhDhO49fSLVedHoA82+Rqme0=
github.com/aws/aws-sdk-go-v2/service/internal/s3shared v1.5.2/go.mod h1:QuL2Ym8BkrLmN4lUofXYq6000/i5jPjosCNK//t6gak= github.com/aws/aws-sdk-go-v2/service/internal/s3shared v1.5.2/go.mod h1:QuL2Ym8BkrLmN4lUofXYq6000/i5jPjosCNK//t6gak=
github.com/aws/aws-sdk-go-v2/service/internal/s3shared v1.7.1 h1:YEz2KMyqK2zyG3uOa0l2xBc/H6NUVJir8FhwHQHF3rc= github.com/aws/aws-sdk-go-v2/service/internal/s3shared v1.7.1 h1:YEz2KMyqK2zyG3uOa0l2xBc/H6NUVJir8FhwHQHF3rc=
github.com/aws/aws-sdk-go-v2/service/internal/s3shared v1.7.1/go.mod h1:yg4EN/BKoc7+DLhNOxxdvoO3+iyW2FuynvaKqLcLDUM= github.com/aws/aws-sdk-go-v2/service/internal/s3shared v1.7.1/go.mod h1:yg4EN/BKoc7+DLhNOxxdvoO3+iyW2FuynvaKqLcLDUM=
github.com/aws/aws-sdk-go-v2/service/kinesis v1.6.0 h1:hb+NupVMUzINGUCfDs2+YqMkWKu47dBIQHpulM0XWh4=
github.com/aws/aws-sdk-go-v2/service/kinesis v1.6.0/go.mod h1:9O7UG2pELnP0hq35+Gd7XDjOLBkg7tmgRQ0y14ZjoJI=
github.com/aws/aws-sdk-go-v2/service/s3 v1.12.0/go.mod h1:6J++A5xpo7QDsIeSqPK4UHqMSyPOCopa+zKtqAMhqVQ= github.com/aws/aws-sdk-go-v2/service/s3 v1.12.0/go.mod h1:6J++A5xpo7QDsIeSqPK4UHqMSyPOCopa+zKtqAMhqVQ=
github.com/aws/aws-sdk-go-v2/service/s3 v1.16.0 h1:dt1JQFj/135ozwGIWeCM3aQ8N/kB3Xu3Uu4r9zuOIyc= github.com/aws/aws-sdk-go-v2/service/s3 v1.16.0 h1:dt1JQFj/135ozwGIWeCM3aQ8N/kB3Xu3Uu4r9zuOIyc=
github.com/aws/aws-sdk-go-v2/service/s3 v1.16.0/go.mod h1:Tk23mCmfL3wb3tNIeMk/0diUZ0W4R6uZtjYKguMLW2s= github.com/aws/aws-sdk-go-v2/service/s3 v1.16.0/go.mod h1:Tk23mCmfL3wb3tNIeMk/0diUZ0W4R6uZtjYKguMLW2s=
github.com/aws/aws-sdk-go-v2/service/sso v1.3.2/go.mod h1:J21I6kF+d/6XHVk7kp/cx9YVD2TMD2TbLwtRGVcinXo= github.com/aws/aws-sdk-go-v2/service/sso v1.3.2/go.mod h1:J21I6kF+d/6XHVk7kp/cx9YVD2TMD2TbLwtRGVcinXo=
github.com/aws/aws-sdk-go-v2/service/sso v1.3.2/go.mod h1:J21I6kF+d/6XHVk7kp/cx9YVD2TMD2TbLwtRGVcinXo=
github.com/aws/aws-sdk-go-v2/service/sso v1.3.3/go.mod h1:Jgw5O+SK7MZ2Yi9Yvzb4PggAPYaFSliiQuWR0hNjexk=
github.com/aws/aws-sdk-go-v2/service/sso v1.4.1 h1:RfgQyv3bFT2Js6XokcrNtTjQ6wAVBRpoCgTFsypihHA= github.com/aws/aws-sdk-go-v2/service/sso v1.4.1 h1:RfgQyv3bFT2Js6XokcrNtTjQ6wAVBRpoCgTFsypihHA=
github.com/aws/aws-sdk-go-v2/service/sso v1.4.1/go.mod h1:ycPdbJZlM0BLhuBnd80WX9PucWPG88qps/2jl9HugXs= github.com/aws/aws-sdk-go-v2/service/sso v1.4.1/go.mod h1:ycPdbJZlM0BLhuBnd80WX9PucWPG88qps/2jl9HugXs=
github.com/aws/aws-sdk-go-v2/service/sts v1.6.1/go.mod h1:hLZ/AnkIKHLuPGjEiyghNEdvJ2PP0MgOxcmv9EBJ4xs= github.com/aws/aws-sdk-go-v2/service/sts v1.6.1/go.mod h1:hLZ/AnkIKHLuPGjEiyghNEdvJ2PP0MgOxcmv9EBJ4xs=
github.com/aws/aws-sdk-go-v2/service/sts v1.6.1/go.mod h1:hLZ/AnkIKHLuPGjEiyghNEdvJ2PP0MgOxcmv9EBJ4xs=
github.com/aws/aws-sdk-go-v2/service/sts v1.6.2/go.mod h1:RBhoMJB8yFToaCnbe0jNq5Dcdy0jp6LhHqg55rjClkM=
github.com/aws/aws-sdk-go-v2/service/sts v1.7.1 h1:7ce9ugapSgBapwLhg7AJTqKW5U92VRX3vX65k2tsB+g= github.com/aws/aws-sdk-go-v2/service/sts v1.7.1 h1:7ce9ugapSgBapwLhg7AJTqKW5U92VRX3vX65k2tsB+g=
github.com/aws/aws-sdk-go-v2/service/sts v1.7.1/go.mod h1:r1i8QwKPzwByXqZb3POQfBs7jozrdnHz8PVbsvyx73w= github.com/aws/aws-sdk-go-v2/service/sts v1.7.1/go.mod h1:r1i8QwKPzwByXqZb3POQfBs7jozrdnHz8PVbsvyx73w=
github.com/aws/aws-sdk-go-v2/service/timestreamwrite v1.3.2 h1:1s/RRA5Owuz4/G/eWCdCKgC+9zaz2vxFsRSwe7R3cPY=
github.com/aws/aws-sdk-go-v2/service/timestreamwrite v1.3.2 h1:1s/RRA5Owuz4/G/eWCdCKgC+9zaz2vxFsRSwe7R3cPY=
github.com/aws/aws-sdk-go-v2/service/timestreamwrite v1.3.2/go.mod h1:XoDkdZ5pBf2za2GWbFHQ8Ps0K8fRbmbwrHh7PF5xnzQ=
github.com/aws/aws-sdk-go-v2/service/timestreamwrite v1.3.2/go.mod h1:XoDkdZ5pBf2za2GWbFHQ8Ps0K8fRbmbwrHh7PF5xnzQ=
github.com/aws/smithy-go v1.0.0/go.mod h1:EzMw8dbp/YJL4A5/sbhGddag+NPT7q084agLbB9LgIw=
github.com/aws/smithy-go v1.0.0/go.mod h1:EzMw8dbp/YJL4A5/sbhGddag+NPT7q084agLbB9LgIw= github.com/aws/smithy-go v1.0.0/go.mod h1:EzMw8dbp/YJL4A5/sbhGddag+NPT7q084agLbB9LgIw=
github.com/aws/smithy-go v1.7.0/go.mod h1:SObp3lf9smib00L/v3U2eAKG8FyQ7iLrJnQiAmR5n+E= github.com/aws/smithy-go v1.7.0/go.mod h1:SObp3lf9smib00L/v3U2eAKG8FyQ7iLrJnQiAmR5n+E=
github.com/aws/smithy-go v1.7.0/go.mod h1:SObp3lf9smib00L/v3U2eAKG8FyQ7iLrJnQiAmR5n+E=
github.com/aws/smithy-go v1.7.0/go.mod h1:SObp3lf9smib00L/v3U2eAKG8FyQ7iLrJnQiAmR5n+E=
github.com/aws/smithy-go v1.8.0 h1:AEwwwXQZtUwP5Mz506FeXXrKBe0jA8gVM+1gEcSRooc=
github.com/aws/smithy-go v1.8.0 h1:AEwwwXQZtUwP5Mz506FeXXrKBe0jA8gVM+1gEcSRooc= github.com/aws/smithy-go v1.8.0 h1:AEwwwXQZtUwP5Mz506FeXXrKBe0jA8gVM+1gEcSRooc=
github.com/aws/smithy-go v1.8.0/go.mod h1:SObp3lf9smib00L/v3U2eAKG8FyQ7iLrJnQiAmR5n+E= github.com/aws/smithy-go v1.8.0/go.mod h1:SObp3lf9smib00L/v3U2eAKG8FyQ7iLrJnQiAmR5n+E=
github.com/aws/smithy-go v1.8.0/go.mod h1:SObp3lf9smib00L/v3U2eAKG8FyQ7iLrJnQiAmR5n+E=
github.com/awslabs/kinesis-aggregation/go v0.0.0-20210630091500-54e17340d32f h1:Pf0BjJDga7C98f0vhw+Ip5EaiE07S3lTKpIYPNS0nMo=
github.com/awslabs/kinesis-aggregation/go v0.0.0-20210630091500-54e17340d32f/go.mod h1:SghidfnxvX7ribW6nHI7T+IBbc9puZ9kk5Tx/88h8P4=
github.com/aybabtme/rgbterm v0.0.0-20170906152045-cc83f3b3ce59/go.mod h1:q/89r3U2H7sSsE2t6Kca0lfwTK8JdoNGS/yzM/4iH5I=
github.com/benbjohnson/clock v1.0.3/go.mod h1:bGMdMPoPVvcYyt1gHDf4J2KE153Yf9BuiUKYMaxlTDM=
github.com/benbjohnson/clock v1.1.0 h1:Q92kusRqC1XV2MjkWETPvjJVqKetz1OzxZB7mHJLju8= github.com/benbjohnson/clock v1.1.0 h1:Q92kusRqC1XV2MjkWETPvjJVqKetz1OzxZB7mHJLju8=
github.com/benbjohnson/clock v1.1.0/go.mod h1:J11/hYXuz8f4ySSvYwY0FKfm+ezbsZBKZxNJlLklBHA= github.com/benbjohnson/clock v1.1.0/go.mod h1:J11/hYXuz8f4ySSvYwY0FKfm+ezbsZBKZxNJlLklBHA=
github.com/beorn7/perks v0.0.0-20160804104726-4c0e84591b9a/go.mod h1:Dwedo/Wpr24TaqPxmxbtue+5NUziq4I4S80YR8gNf3Q= github.com/beorn7/perks v0.0.0-20160804104726-4c0e84591b9a/go.mod h1:Dwedo/Wpr24TaqPxmxbtue+5NUziq4I4S80YR8gNf3Q=
@ -580,6 +643,7 @@ github.com/dgrijalva/jwt-go v3.2.0+incompatible/go.mod h1:E3ru+11k8xSBh+hMPgOLZm
github.com/dgryski/go-bitstream v0.0.0-20180413035011-3522498ce2c8/go.mod h1:VMaSuZ+SZcx/wljOQKvp5srsbCiKDEb6K2wC4+PiBmQ= github.com/dgryski/go-bitstream v0.0.0-20180413035011-3522498ce2c8/go.mod h1:VMaSuZ+SZcx/wljOQKvp5srsbCiKDEb6K2wC4+PiBmQ=
github.com/dgryski/go-farm v0.0.0-20190423205320-6a90982ecee2/go.mod h1:SqUrOPUnsFjfmXRMNPybcSiG0BgUW2AuFH8PAnS2iTw= github.com/dgryski/go-farm v0.0.0-20190423205320-6a90982ecee2/go.mod h1:SqUrOPUnsFjfmXRMNPybcSiG0BgUW2AuFH8PAnS2iTw=
github.com/dgryski/go-farm v0.0.0-20200201041132-a6ae2369ad13/go.mod h1:SqUrOPUnsFjfmXRMNPybcSiG0BgUW2AuFH8PAnS2iTw= github.com/dgryski/go-farm v0.0.0-20200201041132-a6ae2369ad13/go.mod h1:SqUrOPUnsFjfmXRMNPybcSiG0BgUW2AuFH8PAnS2iTw=
github.com/dgryski/go-rendezvous v0.0.0-20200624174652-8d2f3be8b2d9/go.mod h1:cuUVRXasLTGF7a8hSLbxyZXjz+1KgoB3wDUb6vlszIc=
github.com/dgryski/go-sip13 v0.0.0-20181026042036-e10d5fee7954/go.mod h1:vAd38F8PWV+bWy6jNmig1y/TA+kYO4g3RSRF0IAv0no= github.com/dgryski/go-sip13 v0.0.0-20181026042036-e10d5fee7954/go.mod h1:vAd38F8PWV+bWy6jNmig1y/TA+kYO4g3RSRF0IAv0no=
github.com/dgryski/go-sip13 v0.0.0-20200911182023-62edffca9245/go.mod h1:vAd38F8PWV+bWy6jNmig1y/TA+kYO4g3RSRF0IAv0no= github.com/dgryski/go-sip13 v0.0.0-20200911182023-62edffca9245/go.mod h1:vAd38F8PWV+bWy6jNmig1y/TA+kYO4g3RSRF0IAv0no=
github.com/digitalocean/godo v1.58.0/go.mod h1:p7dOjjtSBqCTUksqtA5Fd3uaKs9kyTq2xcz76ulEJRU= github.com/digitalocean/godo v1.58.0/go.mod h1:p7dOjjtSBqCTUksqtA5Fd3uaKs9kyTq2xcz76ulEJRU=
@ -819,6 +883,7 @@ github.com/go-playground/locales v0.12.1/go.mod h1:IUMDtCfWo/w/mtMfIE/IG2K+Ey3yg
github.com/go-playground/universal-translator v0.16.0/go.mod h1:1AnU7NaIRDWWzGEKwgtJRd2xk99HeFyHw3yid4rvQIY= github.com/go-playground/universal-translator v0.16.0/go.mod h1:1AnU7NaIRDWWzGEKwgtJRd2xk99HeFyHw3yid4rvQIY=
github.com/go-redis/redis v6.15.9+incompatible h1:K0pv1D7EQUjfyoMql+r/jZqCLizCGKFlFgcHWWmHQjg= github.com/go-redis/redis v6.15.9+incompatible h1:K0pv1D7EQUjfyoMql+r/jZqCLizCGKFlFgcHWWmHQjg=
github.com/go-redis/redis v6.15.9+incompatible/go.mod h1:NAIEuMOZ/fxfXJIrKDQDz8wamY7mA7PouImQ2Jvg6kA= github.com/go-redis/redis v6.15.9+incompatible/go.mod h1:NAIEuMOZ/fxfXJIrKDQDz8wamY7mA7PouImQ2Jvg6kA=
github.com/go-redis/redis/v8 v8.0.0-beta.6/go.mod h1:g79Vpae8JMzg5qjk8BiwU9tK+HmU3iDVyS4UAJLFycI=
github.com/go-sql-driver/mysql v1.4.0/go.mod h1:zAC/RDZ24gD3HViQzih4MyKcchzm+sOG5ZlKdlhCg5w= github.com/go-sql-driver/mysql v1.4.0/go.mod h1:zAC/RDZ24gD3HViQzih4MyKcchzm+sOG5ZlKdlhCg5w=
github.com/go-sql-driver/mysql v1.4.1/go.mod h1:zAC/RDZ24gD3HViQzih4MyKcchzm+sOG5ZlKdlhCg5w= github.com/go-sql-driver/mysql v1.4.1/go.mod h1:zAC/RDZ24gD3HViQzih4MyKcchzm+sOG5ZlKdlhCg5w=
github.com/go-sql-driver/mysql v1.5.0/go.mod h1:DCzpHaOWr8IXmIStZouvnhqoel9Qv2LBy8hT2VhHyBg= github.com/go-sql-driver/mysql v1.5.0/go.mod h1:DCzpHaOWr8IXmIStZouvnhqoel9Qv2LBy8hT2VhHyBg=
@ -954,6 +1019,7 @@ github.com/golangci/maligned v0.0.0-20180506175553-b1d89398deca/go.mod h1:tvlJhZ
github.com/golangci/misspell v0.3.5/go.mod h1:dEbvlSfYbMQDtrpRMQU675gSDLDNa8sCPPChZ7PhiVA= github.com/golangci/misspell v0.3.5/go.mod h1:dEbvlSfYbMQDtrpRMQU675gSDLDNa8sCPPChZ7PhiVA=
github.com/golangci/revgrep v0.0.0-20210208091834-cd28932614b5/go.mod h1:LK+zW4MpyytAWQRz0M4xnzEk50lSvqDQKfx304apFkY= github.com/golangci/revgrep v0.0.0-20210208091834-cd28932614b5/go.mod h1:LK+zW4MpyytAWQRz0M4xnzEk50lSvqDQKfx304apFkY=
github.com/golangci/unconvert v0.0.0-20180507085042-28b1c447d1f4/go.mod h1:Izgrg8RkN3rCIMLGE9CyYmU9pY2Jer6DgANEnZ/L/cQ= github.com/golangci/unconvert v0.0.0-20180507085042-28b1c447d1f4/go.mod h1:Izgrg8RkN3rCIMLGE9CyYmU9pY2Jer6DgANEnZ/L/cQ=
github.com/gomodule/redigo v2.0.0+incompatible/go.mod h1:B4C85qUVwatsJoIUNIfCRsp7qO0iAmpGFZ4EELWSbC4=
github.com/google/addlicense v0.0.0-20200906110928-a0294312aa76/go.mod h1:EMjYTRimagHs1FwlIqKyX3wAM0u3rA+McvlIIWmSamA= github.com/google/addlicense v0.0.0-20200906110928-a0294312aa76/go.mod h1:EMjYTRimagHs1FwlIqKyX3wAM0u3rA+McvlIIWmSamA=
github.com/google/btree v0.0.0-20180813153112-4030bb1f1f0c/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ= github.com/google/btree v0.0.0-20180813153112-4030bb1f1f0c/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ=
github.com/google/btree v1.0.0/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ= github.com/google/btree v1.0.0/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ=
@ -1070,13 +1136,13 @@ github.com/grpc-ecosystem/grpc-gateway v1.16.0/go.mod h1:BDjrQk3hbvj6Nolgz8mAMFb
github.com/grpc-ecosystem/grpc-opentracing v0.0.0-20180507213350-8e809c8a8645/go.mod h1:6iZfnjpejD4L/4DwD7NryNaJyCQdzwWwH2MWhCA90Kw= github.com/grpc-ecosystem/grpc-opentracing v0.0.0-20180507213350-8e809c8a8645/go.mod h1:6iZfnjpejD4L/4DwD7NryNaJyCQdzwWwH2MWhCA90Kw=
github.com/hailocab/go-hostpool v0.0.0-20160125115350-e80d13ce29ed h1:5upAirOpQc1Q53c0bnx2ufif5kANL7bfZWcc6VJWJd8= github.com/hailocab/go-hostpool v0.0.0-20160125115350-e80d13ce29ed h1:5upAirOpQc1Q53c0bnx2ufif5kANL7bfZWcc6VJWJd8=
github.com/hailocab/go-hostpool v0.0.0-20160125115350-e80d13ce29ed/go.mod h1:tMWxXQ9wFIaZeTI9F+hmhFiGpFmhOHzyShyFUhRm0H4= github.com/hailocab/go-hostpool v0.0.0-20160125115350-e80d13ce29ed/go.mod h1:tMWxXQ9wFIaZeTI9F+hmhFiGpFmhOHzyShyFUhRm0H4=
github.com/harlow/kinesis-consumer v0.3.1-0.20181230152818-2f58b136fee0 h1:U0KvGD9CJIl1nbgu9yLsfWxMT6WqL8fG0IBB7RvOZZQ= github.com/harlow/kinesis-consumer v0.3.6-0.20210911031324-5a873d6e9fec h1:ya+kv1eNnd5QhcHuaj5g5eMq5Ra3VCNaPY2ZI7Aq91o=
github.com/harlow/kinesis-consumer v0.3.1-0.20181230152818-2f58b136fee0/go.mod h1:dk23l2BruuUzRP8wbybQbPn3J7sZga2QHICCeaEy5rQ= github.com/harlow/kinesis-consumer v0.3.6-0.20210911031324-5a873d6e9fec/go.mod h1:FIT1uhdVv2iXO0l6aACPZSVHxdth7RdmoT34jk9MEm0=
github.com/hashicorp/consul/api v1.1.0/go.mod h1:VmuI/Lkw1nC05EYQWNKwWGbkg+FbDBtguAZLlVdkD9Q= github.com/hashicorp/consul/api v1.1.0/go.mod h1:VmuI/Lkw1nC05EYQWNKwWGbkg+FbDBtguAZLlVdkD9Q=
github.com/hashicorp/consul/api v1.3.0/go.mod h1:MmDNSzIMUjNpY/mQ398R4bk2FnqQLoPndWW5VkKPlCE= github.com/hashicorp/consul/api v1.3.0/go.mod h1:MmDNSzIMUjNpY/mQ398R4bk2FnqQLoPndWW5VkKPlCE=
github.com/hashicorp/consul/api v1.8.1/go.mod h1:sDjTOq0yUyv5G4h+BqSea7Fn6BU+XbolEz1952UB+mk= github.com/hashicorp/consul/api v1.8.1/go.mod h1:sDjTOq0yUyv5G4h+BqSea7Fn6BU+XbolEz1952UB+mk=
github.com/hashicorp/consul/api v1.11.0 h1:Hw/G8TtRvOElqxVIhBzXciiSTbapq8hZ2XKZsXk5ZCE= github.com/hashicorp/consul/api v1.9.1 h1:SngrdG2L62qqLsUz85qcPhFZ78rPf8tcD5qjMgs6MME=
github.com/hashicorp/consul/api v1.11.0/go.mod h1:XjsvQN+RJGWI2TWy1/kqaE16HrR2J/FWgkYjdZQsX9M= github.com/hashicorp/consul/api v1.9.1/go.mod h1:XjsvQN+RJGWI2TWy1/kqaE16HrR2J/FWgkYjdZQsX9M=
github.com/hashicorp/consul/sdk v0.1.1/go.mod h1:VKf9jXwCTEY1QZP2MOLRhb5i/I/ssyNV1vwHyQBF0x8= github.com/hashicorp/consul/sdk v0.1.1/go.mod h1:VKf9jXwCTEY1QZP2MOLRhb5i/I/ssyNV1vwHyQBF0x8=
github.com/hashicorp/consul/sdk v0.3.0/go.mod h1:VKf9jXwCTEY1QZP2MOLRhb5i/I/ssyNV1vwHyQBF0x8= github.com/hashicorp/consul/sdk v0.3.0/go.mod h1:VKf9jXwCTEY1QZP2MOLRhb5i/I/ssyNV1vwHyQBF0x8=
github.com/hashicorp/consul/sdk v0.7.0/go.mod h1:fY08Y9z5SvJqevyZNy6WWPXiG3KwBPAvlcdx16zZ0fM= github.com/hashicorp/consul/sdk v0.7.0/go.mod h1:fY08Y9z5SvJqevyZNy6WWPXiG3KwBPAvlcdx16zZ0fM=
@ -1344,6 +1410,7 @@ github.com/leoluk/perflib_exporter v0.1.0/go.mod h1:rpV0lYj7lemdTm31t7zpCqYqPnw7
github.com/lib/pq v1.0.0/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo= github.com/lib/pq v1.0.0/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo=
github.com/lib/pq v1.1.0/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo= github.com/lib/pq v1.1.0/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo=
github.com/lib/pq v1.2.0/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo= github.com/lib/pq v1.2.0/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo=
github.com/lib/pq v1.7.0/go.mod h1:AlVN5x4E4T544tWzH6hKfbfQvm3HdbOxrmggDNAPY9o=
github.com/lib/pq v1.9.0 h1:L8nSXQQzAYByakOFMTwpjRoHsMJklur4Gi59b6VivR8= github.com/lib/pq v1.9.0 h1:L8nSXQQzAYByakOFMTwpjRoHsMJklur4Gi59b6VivR8=
github.com/lib/pq v1.9.0/go.mod h1:AlVN5x4E4T544tWzH6hKfbfQvm3HdbOxrmggDNAPY9o= github.com/lib/pq v1.9.0/go.mod h1:AlVN5x4E4T544tWzH6hKfbfQvm3HdbOxrmggDNAPY9o=
github.com/lightstep/lightstep-tracer-common/golang/gogo v0.0.0-20190605223551-bc2310a04743/go.mod h1:qklhhLq1aX+mtWk9cPHPzaBjWImj5ULL6C7HFJtXQMM= github.com/lightstep/lightstep-tracer-common/golang/gogo v0.0.0-20190605223551-bc2310a04743/go.mod h1:qklhhLq1aX+mtWk9cPHPzaBjWImj5ULL6C7HFJtXQMM=
@ -1415,6 +1482,7 @@ github.com/mdlayher/netlink v1.1.0 h1:mpdLgm+brq10nI9zM1BpX1kpDbh3NLl3RSnVq6ZSkf
github.com/mdlayher/netlink v1.1.0/go.mod h1:H4WCitaheIsdF9yOYu8CFmCgQthAPIWZmcKp9uZHgmY= github.com/mdlayher/netlink v1.1.0/go.mod h1:H4WCitaheIsdF9yOYu8CFmCgQthAPIWZmcKp9uZHgmY=
github.com/mgechev/dots v0.0.0-20190921121421-c36f7dcfbb81/go.mod h1:KQ7+USdGKfpPjXk4Ga+5XxQM4Lm4e3gAogrreFAYpOg= github.com/mgechev/dots v0.0.0-20190921121421-c36f7dcfbb81/go.mod h1:KQ7+USdGKfpPjXk4Ga+5XxQM4Lm4e3gAogrreFAYpOg=
github.com/mgechev/revive v1.0.3/go.mod h1:POGGZagSo/0frdr7VeAifzS5Uka0d0GPiM35MsTO8nE= github.com/mgechev/revive v1.0.3/go.mod h1:POGGZagSo/0frdr7VeAifzS5Uka0d0GPiM35MsTO8nE=
github.com/mgutz/ansi v0.0.0-20170206155736-9520e82c474b/go.mod h1:01TrycV0kFyexm33Z7vhZRXopbI8J3TDReVlkTgMUxE=
github.com/microsoft/ApplicationInsights-Go v0.4.4 h1:G4+H9WNs6ygSCe6sUyxRc2U81TI5Es90b2t/MwX5KqY= github.com/microsoft/ApplicationInsights-Go v0.4.4 h1:G4+H9WNs6ygSCe6sUyxRc2U81TI5Es90b2t/MwX5KqY=
github.com/microsoft/ApplicationInsights-Go v0.4.4/go.mod h1:fKRUseBqkw6bDiXTs3ESTiU/4YTIHsQS4W3fP2ieF4U= github.com/microsoft/ApplicationInsights-Go v0.4.4/go.mod h1:fKRUseBqkw6bDiXTs3ESTiU/4YTIHsQS4W3fP2ieF4U=
github.com/miekg/dns v1.0.14/go.mod h1:W1PPwlIAgtquWBMBEV9nkV9Cazfe8ScdGz/Lj7v3Nrg= github.com/miekg/dns v1.0.14/go.mod h1:W1PPwlIAgtquWBMBEV9nkV9Cazfe8ScdGz/Lj7v3Nrg=
@ -1609,6 +1677,7 @@ github.com/opentracing/basictracer-go v1.0.0/go.mod h1:QfBfYuafItcjQuMwinw9GhYKw
github.com/opentracing/opentracing-go v1.0.2/go.mod h1:UkNAQd3GIcIGf0SeVgPpRdFStlNbqXla1AfSYxPUl2o= github.com/opentracing/opentracing-go v1.0.2/go.mod h1:UkNAQd3GIcIGf0SeVgPpRdFStlNbqXla1AfSYxPUl2o=
github.com/opentracing/opentracing-go v1.0.3-0.20180606204148-bd9c31933947/go.mod h1:UkNAQd3GIcIGf0SeVgPpRdFStlNbqXla1AfSYxPUl2o= github.com/opentracing/opentracing-go v1.0.3-0.20180606204148-bd9c31933947/go.mod h1:UkNAQd3GIcIGf0SeVgPpRdFStlNbqXla1AfSYxPUl2o=
github.com/opentracing/opentracing-go v1.1.0/go.mod h1:UkNAQd3GIcIGf0SeVgPpRdFStlNbqXla1AfSYxPUl2o= github.com/opentracing/opentracing-go v1.1.0/go.mod h1:UkNAQd3GIcIGf0SeVgPpRdFStlNbqXla1AfSYxPUl2o=
github.com/opentracing/opentracing-go v1.1.1-0.20190913142402-a7454ce5950e/go.mod h1:UkNAQd3GIcIGf0SeVgPpRdFStlNbqXla1AfSYxPUl2o=
github.com/opentracing/opentracing-go v1.2.0 h1:uEJPy/1a5RIPAJ0Ov+OIO8OxWu77jEv+1B0VhjKrZUs= github.com/opentracing/opentracing-go v1.2.0 h1:uEJPy/1a5RIPAJ0Ov+OIO8OxWu77jEv+1B0VhjKrZUs=
github.com/opentracing/opentracing-go v1.2.0/go.mod h1:GxEUsuufX4nBwe+T+Wl9TAgYrxe9dPLANfrWvHYVTgc= github.com/opentracing/opentracing-go v1.2.0/go.mod h1:GxEUsuufX4nBwe+T+Wl9TAgYrxe9dPLANfrWvHYVTgc=
github.com/openzipkin-contrib/zipkin-go-opentracing v0.4.5 h1:ZCnq+JUrvXcDVhX/xRolRBZifmabN1HcS1wrPSvxhrU= github.com/openzipkin-contrib/zipkin-go-opentracing v0.4.5 h1:ZCnq+JUrvXcDVhX/xRolRBZifmabN1HcS1wrPSvxhrU=
@ -1767,6 +1836,7 @@ github.com/robertkrimen/otto v0.0.0-20191219234010-c382bd3c16ff/go.mod h1:xvqspo
github.com/robfig/cron/v3 v3.0.1 h1:WdRxkvbJztn8LMz/QEvLN5sBU+xKpSqwwUO1Pjr4qDs= github.com/robfig/cron/v3 v3.0.1 h1:WdRxkvbJztn8LMz/QEvLN5sBU+xKpSqwwUO1Pjr4qDs=
github.com/robfig/cron/v3 v3.0.1/go.mod h1:eQICP3HwyT7UooqI/z+Ov+PtYAWygg1TEWWzGIFLtro= github.com/robfig/cron/v3 v3.0.1/go.mod h1:eQICP3HwyT7UooqI/z+Ov+PtYAWygg1TEWWzGIFLtro=
github.com/rogpeppe/fastuuid v0.0.0-20150106093220-6724a57986af/go.mod h1:XWv6SoW27p1b0cqNHllgS5HIMJraePCO15w5zCzIWYg= github.com/rogpeppe/fastuuid v0.0.0-20150106093220-6724a57986af/go.mod h1:XWv6SoW27p1b0cqNHllgS5HIMJraePCO15w5zCzIWYg=
github.com/rogpeppe/fastuuid v1.1.0/go.mod h1:jVj6XXZzXRy/MSR5jhDC/2q6DgLz+nrA6LYCDYWNEvQ=
github.com/rogpeppe/fastuuid v1.2.0/go.mod h1:jVj6XXZzXRy/MSR5jhDC/2q6DgLz+nrA6LYCDYWNEvQ= github.com/rogpeppe/fastuuid v1.2.0/go.mod h1:jVj6XXZzXRy/MSR5jhDC/2q6DgLz+nrA6LYCDYWNEvQ=
github.com/rogpeppe/go-internal v1.1.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4= github.com/rogpeppe/go-internal v1.1.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4=
github.com/rogpeppe/go-internal v1.2.2/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4= github.com/rogpeppe/go-internal v1.2.2/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4=
@ -1846,11 +1916,14 @@ github.com/sirupsen/logrus v1.8.1 h1:dJKuHgqk1NNQlqoA6BTlM1Wf9DOH3NBjQyu0h9+AZZE
github.com/sirupsen/logrus v1.8.1/go.mod h1:yWOB1SBYBC5VeMP7gHvWumXLIWorT60ONWic61uBYv0= github.com/sirupsen/logrus v1.8.1/go.mod h1:yWOB1SBYBC5VeMP7gHvWumXLIWorT60ONWic61uBYv0=
github.com/sleepinggenius2/gosmi v0.4.3 h1:99Zwzy1Cvgsh396sw07oR2G4ab88ILGZFMxSlGWnR6o= github.com/sleepinggenius2/gosmi v0.4.3 h1:99Zwzy1Cvgsh396sw07oR2G4ab88ILGZFMxSlGWnR6o=
github.com/sleepinggenius2/gosmi v0.4.3/go.mod h1:l8OniPmd3bJzw0MXP2/qh7AhP/e+bTY2CNivIhsnDT0= github.com/sleepinggenius2/gosmi v0.4.3/go.mod h1:l8OniPmd3bJzw0MXP2/qh7AhP/e+bTY2CNivIhsnDT0=
github.com/smartystreets/assertions v0.0.0-20180927180507-b2de0cb4f26d h1:zE9ykElWQ6/NYmHa3jpm/yHnI4xSofP+UP6SpjHcSeM=
github.com/smartystreets/assertions v0.0.0-20180927180507-b2de0cb4f26d/go.mod h1:OnSkiWE9lh6wB0YB77sQom3nweQdgAjqCqsofrRNTgc= github.com/smartystreets/assertions v0.0.0-20180927180507-b2de0cb4f26d/go.mod h1:OnSkiWE9lh6wB0YB77sQom3nweQdgAjqCqsofrRNTgc=
github.com/smartystreets/assertions v1.0.0 h1:UVQPSSmc3qtTi+zPPkCXvZX9VvW/xT/NsRvKfwY81a8=
github.com/smartystreets/assertions v1.0.0/go.mod h1:kHHU4qYBaI3q23Pp3VPrmWhuIUrLW/7eUrw0BU5VaoM=
github.com/smartystreets/go-aws-auth v0.0.0-20180515143844-0c1422d1fdb9/go.mod h1:SnhjPscd9TpLiy1LpzGSKh3bXCfxxXuqd9xmQJy3slM=
github.com/smartystreets/goconvey v0.0.0-20190330032615-68dc04aab96a/go.mod h1:syvi0/a8iFYH4r/RixwvyeAJjdLS9QV7WQ/tjFTllLA= github.com/smartystreets/goconvey v0.0.0-20190330032615-68dc04aab96a/go.mod h1:syvi0/a8iFYH4r/RixwvyeAJjdLS9QV7WQ/tjFTllLA=
github.com/smartystreets/goconvey v1.6.4 h1:fv0U8FUIMPNf1L9lnHLvLhgicrIVChEkdzIKYqbNC9s= github.com/smartystreets/goconvey v1.6.4 h1:fv0U8FUIMPNf1L9lnHLvLhgicrIVChEkdzIKYqbNC9s=
github.com/smartystreets/goconvey v1.6.4/go.mod h1:syvi0/a8iFYH4r/RixwvyeAJjdLS9QV7WQ/tjFTllLA= github.com/smartystreets/goconvey v1.6.4/go.mod h1:syvi0/a8iFYH4r/RixwvyeAJjdLS9QV7WQ/tjFTllLA=
github.com/smartystreets/gunit v1.0.0/go.mod h1:qwPWnhz6pn0NnRBP++URONOVyNkPyr4SauJk4cUOwJs=
github.com/snowflakedb/gosnowflake v1.6.2 h1:drZkX7Ve3qr3lLD/f0vxwesgJZfNerivknAvPRAMy88= github.com/snowflakedb/gosnowflake v1.6.2 h1:drZkX7Ve3qr3lLD/f0vxwesgJZfNerivknAvPRAMy88=
github.com/snowflakedb/gosnowflake v1.6.2/go.mod h1:k1Wq+O8dRD/jmFBLyStEv2OrgHoMFQpqHCRSy70P0dI= github.com/snowflakedb/gosnowflake v1.6.2/go.mod h1:k1Wq+O8dRD/jmFBLyStEv2OrgHoMFQpqHCRSy70P0dI=
github.com/soheilhy/cmux v0.1.4/go.mod h1:IM3LyeVVIOuxMH7sFAkER9+bJ4dT7Ms6E4xg4kGIyLM= github.com/soheilhy/cmux v0.1.4/go.mod h1:IM3LyeVVIOuxMH7sFAkER9+bJ4dT7Ms6E4xg4kGIyLM=
@ -1934,6 +2007,12 @@ github.com/timakin/bodyclose v0.0.0-20200424151742-cb6215831a94/go.mod h1:Qimiff
github.com/tinylib/msgp v1.0.2/go.mod h1:+d+yLhGm8mzTaHzB+wgMYrodPfmZrzkirds8fDWklFE= github.com/tinylib/msgp v1.0.2/go.mod h1:+d+yLhGm8mzTaHzB+wgMYrodPfmZrzkirds8fDWklFE=
github.com/tinylib/msgp v1.1.6 h1:i+SbKraHhnrf9M5MYmvQhFnbLhAXSDWF8WWsuyRdocw= github.com/tinylib/msgp v1.1.6 h1:i+SbKraHhnrf9M5MYmvQhFnbLhAXSDWF8WWsuyRdocw=
github.com/tinylib/msgp v1.1.6/go.mod h1:75BAfg2hauQhs3qedfdDZmWAPcFMAvJE5b9rGOMufyw= github.com/tinylib/msgp v1.1.6/go.mod h1:75BAfg2hauQhs3qedfdDZmWAPcFMAvJE5b9rGOMufyw=
github.com/tj/assert v0.0.0-20171129193455-018094318fb0/go.mod h1:mZ9/Rh9oLWpLLDRpvE+3b7gP/C2YyLFYxNmcLnPTMe0=
github.com/tj/assert v0.0.3/go.mod h1:Ne6X72Q+TB1AteidzQncjw9PabbMp4PBMZ1k+vd1Pvk=
github.com/tj/go-buffer v1.0.1/go.mod h1:iyiJpfFcR2B9sXu7KvjbT9fpM4mOelRSDTbntVj52Uc=
github.com/tj/go-elastic v0.0.0-20171221160941-36157cbbebc2/go.mod h1:WjeM0Oo1eNAjXGDx2yma7uG2XoyRZTq1uv3M/o7imD0=
github.com/tj/go-kinesis v0.0.0-20171128231115-08b17f58cb1b/go.mod h1:/yhzCV0xPfx6jb1bBgRFjl5lytqVqZXEaeqWP8lTEao=
github.com/tj/go-spin v1.1.0/go.mod h1:Mg1mzmePZm4dva8Qz60H2lHwmJ2loum4VIrLgVnKwh4=
github.com/tklauser/go-sysconf v0.3.5/go.mod h1:MkWzOF4RMCshBAMXuhXJs64Rte09mITnppBXY/rYEFI= github.com/tklauser/go-sysconf v0.3.5/go.mod h1:MkWzOF4RMCshBAMXuhXJs64Rte09mITnppBXY/rYEFI=
github.com/tklauser/go-sysconf v0.3.9 h1:JeUVdAOWhhxVcU6Eqr/ATFHgXk/mmiItdKeJPev3vTo= github.com/tklauser/go-sysconf v0.3.9 h1:JeUVdAOWhhxVcU6Eqr/ATFHgXk/mmiItdKeJPev3vTo=
github.com/tklauser/go-sysconf v0.3.9/go.mod h1:11DU/5sG7UexIrp/O6g35hrWzu0JxlwQ3LSFUzyeuhs= github.com/tklauser/go-sysconf v0.3.9/go.mod h1:11DU/5sG7UexIrp/O6g35hrWzu0JxlwQ3LSFUzyeuhs=
@ -2020,8 +2099,8 @@ github.com/yuin/goldmark v1.1.27/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9de
github.com/yuin/goldmark v1.1.32/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= github.com/yuin/goldmark v1.1.32/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74=
github.com/yuin/goldmark v1.2.1/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= github.com/yuin/goldmark v1.2.1/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74=
github.com/yuin/goldmark v1.3.5/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1Zlc8k= github.com/yuin/goldmark v1.3.5/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1Zlc8k=
github.com/yuin/gopher-lua v0.0.0-20180630135845-46796da1b0b4 h1:f6CCNiTjQZ0uWK4jPwhwYB8QIGGfn0ssD9kVzRUUUpk= github.com/yuin/gopher-lua v0.0.0-20200603152657-dc2b0ca8b37e h1:oIpIX9VKxSCFrfjsKpluGbNPBGq9iNnT9crH781j9wY=
github.com/yuin/gopher-lua v0.0.0-20180630135845-46796da1b0b4/go.mod h1:aEV29XrmTYFr3CiRxZeGHpkvbwq+prZduBqMaascyCU= github.com/yuin/gopher-lua v0.0.0-20200603152657-dc2b0ca8b37e/go.mod h1:gqRgreBUhTSL0GeU64rtZ3Uq3wtjOa/TB2YfrtkCbVQ=
github.com/yvasiyarov/go-metrics v0.0.0-20140926110328-57bccd1ccd43/go.mod h1:aX5oPXxHm3bOH+xeAttToC8pqch2ScQN/JoXYupl6xs= github.com/yvasiyarov/go-metrics v0.0.0-20140926110328-57bccd1ccd43/go.mod h1:aX5oPXxHm3bOH+xeAttToC8pqch2ScQN/JoXYupl6xs=
github.com/yvasiyarov/gorelic v0.0.0-20141212073537-a9bba5b9ab50/go.mod h1:NUSPSUX/bi6SeDMUh6brw0nXpxHnc96TguQh0+r/ssA= github.com/yvasiyarov/gorelic v0.0.0-20141212073537-a9bba5b9ab50/go.mod h1:NUSPSUX/bi6SeDMUh6brw0nXpxHnc96TguQh0+r/ssA=
github.com/yvasiyarov/newrelic_platform_go v0.0.0-20140908184405-b21fdbd4370f/go.mod h1:GlGEuHIJweS1mbCqG+7vt2nvWLzLLnRHbXz5JKd/Qbg= github.com/yvasiyarov/newrelic_platform_go v0.0.0-20140908184405-b21fdbd4370f/go.mod h1:GlGEuHIJweS1mbCqG+7vt2nvWLzLLnRHbXz5JKd/Qbg=
@ -2064,6 +2143,7 @@ go.opentelemetry.io/collector v0.28.0 h1:XmRwoSj3HZtC7O/12fBoQ9DInvwBwFHgHLZrwNx
go.opentelemetry.io/collector v0.28.0/go.mod h1:AP/BTXwo1eedoJO7V+HQ68CSvJU1lcdqOzJCgt1VsNs= go.opentelemetry.io/collector v0.28.0/go.mod h1:AP/BTXwo1eedoJO7V+HQ68CSvJU1lcdqOzJCgt1VsNs=
go.opentelemetry.io/collector/model v0.35.0 h1:NpKjghiqlei4ecwjOYOMhD6tj4gY8yiWHPJmbFs/ArI= go.opentelemetry.io/collector/model v0.35.0 h1:NpKjghiqlei4ecwjOYOMhD6tj4gY8yiWHPJmbFs/ArI=
go.opentelemetry.io/collector/model v0.35.0/go.mod h1:+7YCSjJG+MqiIFjauzt7oM2qkqBsaJWh5hcsO4fwsAc= go.opentelemetry.io/collector/model v0.35.0/go.mod h1:+7YCSjJG+MqiIFjauzt7oM2qkqBsaJWh5hcsO4fwsAc=
go.opentelemetry.io/otel v0.7.0/go.mod h1:aZMyHG5TqDOXEgH2tyLiXSUKly1jT3yqE9PmrzIeCdo=
go.opentelemetry.io/otel v1.0.0-RC3 h1:kvwiyEkiUT/JaadXzVLI/R1wDO934A7r3Bs2wEe6wqA= go.opentelemetry.io/otel v1.0.0-RC3 h1:kvwiyEkiUT/JaadXzVLI/R1wDO934A7r3Bs2wEe6wqA=
go.opentelemetry.io/otel v1.0.0-RC3/go.mod h1:Ka5j3ua8tZs4Rkq4Ex3hwgBgOchyPVq5S6P2lz//nKQ= go.opentelemetry.io/otel v1.0.0-RC3/go.mod h1:Ka5j3ua8tZs4Rkq4Ex3hwgBgOchyPVq5S6P2lz//nKQ=
go.opentelemetry.io/otel/exporters/otlp/otlpmetric v0.23.0 h1:vKIEsT6IJU0NYd+iZccjgCmk80zsa7dTiC2Bu7U1jz0= go.opentelemetry.io/otel/exporters/otlp/otlpmetric v0.23.0 h1:vKIEsT6IJU0NYd+iZccjgCmk80zsa7dTiC2Bu7U1jz0=
@ -2119,6 +2199,7 @@ golang.org/x/crypto v0.0.0-20190325154230-a5d413f7728c/go.mod h1:djNgcEr1/C05ACk
golang.org/x/crypto v0.0.0-20190404164418-38d8ce5564a5/go.mod h1:WFFai1msRO1wXaEeE5yQxYXgSfI8pQAWXbQop6sCtWE= golang.org/x/crypto v0.0.0-20190404164418-38d8ce5564a5/go.mod h1:WFFai1msRO1wXaEeE5yQxYXgSfI8pQAWXbQop6sCtWE=
golang.org/x/crypto v0.0.0-20190411191339-88737f569e3a/go.mod h1:WFFai1msRO1wXaEeE5yQxYXgSfI8pQAWXbQop6sCtWE= golang.org/x/crypto v0.0.0-20190411191339-88737f569e3a/go.mod h1:WFFai1msRO1wXaEeE5yQxYXgSfI8pQAWXbQop6sCtWE=
golang.org/x/crypto v0.0.0-20190422162423-af44ce270edf/go.mod h1:WFFai1msRO1wXaEeE5yQxYXgSfI8pQAWXbQop6sCtWE= golang.org/x/crypto v0.0.0-20190422162423-af44ce270edf/go.mod h1:WFFai1msRO1wXaEeE5yQxYXgSfI8pQAWXbQop6sCtWE=
golang.org/x/crypto v0.0.0-20190426145343-a29dc8fdc734/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
golang.org/x/crypto v0.0.0-20190510104115-cbcb75029529/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= golang.org/x/crypto v0.0.0-20190510104115-cbcb75029529/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
golang.org/x/crypto v0.0.0-20190530122614-20be4c3c3ed5/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= golang.org/x/crypto v0.0.0-20190530122614-20be4c3c3ed5/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
golang.org/x/crypto v0.0.0-20190605123033-f99c8df09eb5/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= golang.org/x/crypto v0.0.0-20190605123033-f99c8df09eb5/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
@ -2164,8 +2245,9 @@ golang.org/x/exp v0.0.0-20191227195350-da58074b4299/go.mod h1:2RIsYlXP63K8oxa1u0
golang.org/x/exp v0.0.0-20200119233911-0405dc783f0a/go.mod h1:2RIsYlXP63K8oxa1u096TMicItID8zy7Y6sNkU49FU4= golang.org/x/exp v0.0.0-20200119233911-0405dc783f0a/go.mod h1:2RIsYlXP63K8oxa1u096TMicItID8zy7Y6sNkU49FU4=
golang.org/x/exp v0.0.0-20200207192155-f17229e696bd/go.mod h1:J/WKrq2StrnmMY6+EHIKF9dgMWnmCNThgcyBT1FY9mM= golang.org/x/exp v0.0.0-20200207192155-f17229e696bd/go.mod h1:J/WKrq2StrnmMY6+EHIKF9dgMWnmCNThgcyBT1FY9mM=
golang.org/x/exp v0.0.0-20200224162631-6cc2880d07d6/go.mod h1:3jZMyOhIsHpP37uCMkUooju7aAi5cS1Q23tOzKc+0MU= golang.org/x/exp v0.0.0-20200224162631-6cc2880d07d6/go.mod h1:3jZMyOhIsHpP37uCMkUooju7aAi5cS1Q23tOzKc+0MU=
golang.org/x/exp v0.0.0-20200331195152-e8c3332aa8e5 h1:FR+oGxGfbQu1d+jglI3rCkjAjUnhRSZcUxr+DqlDLNo=
golang.org/x/exp v0.0.0-20200331195152-e8c3332aa8e5/go.mod h1:4M0jN8W1tt0AVLNr8HDosyJCDCDuyL9N9+3m7wDWgKw= golang.org/x/exp v0.0.0-20200331195152-e8c3332aa8e5/go.mod h1:4M0jN8W1tt0AVLNr8HDosyJCDCDuyL9N9+3m7wDWgKw=
golang.org/x/exp v0.0.0-20200513190911-00229845015e h1:rMqLP+9XLy+LdbCXHjJHAmTfXCr93W7oruWA6Hq1Alc=
golang.org/x/exp v0.0.0-20200513190911-00229845015e/go.mod h1:4M0jN8W1tt0AVLNr8HDosyJCDCDuyL9N9+3m7wDWgKw=
golang.org/x/image v0.0.0-20180708004352-c73c2afc3b81/go.mod h1:ux5Hcp/YLpHSI86hEcLt0YII63i6oz57MZXIpbrjZUs= golang.org/x/image v0.0.0-20180708004352-c73c2afc3b81/go.mod h1:ux5Hcp/YLpHSI86hEcLt0YII63i6oz57MZXIpbrjZUs=
golang.org/x/image v0.0.0-20190227222117-0694c2d4d067/go.mod h1:kZ7UVZpmo3dzQBMxlp+ypCbDeSB+sBbTgSJuh5dn5js= golang.org/x/image v0.0.0-20190227222117-0694c2d4d067/go.mod h1:kZ7UVZpmo3dzQBMxlp+ypCbDeSB+sBbTgSJuh5dn5js=
golang.org/x/image v0.0.0-20190802002840-cff245a6509b/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0= golang.org/x/image v0.0.0-20190802002840-cff245a6509b/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0=
@ -2362,6 +2444,7 @@ golang.org/x/sys v0.0.0-20191001151750-bb3f8db39f24/go.mod h1:h1NjWce9XRLGQEsW7w
golang.org/x/sys v0.0.0-20191003212358-c178f38b412c/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20191003212358-c178f38b412c/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20191005200804-aed5e4c7ecf9/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20191005200804-aed5e4c7ecf9/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20191008105621-543471e840be/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20191008105621-543471e840be/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20191010194322-b09406accb47/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20191022100944-742c48ecaeb7/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20191022100944-742c48ecaeb7/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20191026070338-33540a1f6037/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20191026070338-33540a1f6037/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20191026070338-33540a1f6037/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20191026070338-33540a1f6037/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
@ -2670,6 +2753,7 @@ google.golang.org/genproto v0.0.0-20190801165951-fa694d86fc64/go.mod h1:DMBHOl98
google.golang.org/genproto v0.0.0-20190819201941-24fa4b261c55/go.mod h1:DMBHOl98Agz4BDEuKkezgsaosCRResVns1a3J2ZsMNc= google.golang.org/genproto v0.0.0-20190819201941-24fa4b261c55/go.mod h1:DMBHOl98Agz4BDEuKkezgsaosCRResVns1a3J2ZsMNc=
google.golang.org/genproto v0.0.0-20190911173649-1774047e7e51/go.mod h1:IbNlFCBrqXvoKpeg0TB2l7cyZUmoaFKYIwrEpbDKLA8= google.golang.org/genproto v0.0.0-20190911173649-1774047e7e51/go.mod h1:IbNlFCBrqXvoKpeg0TB2l7cyZUmoaFKYIwrEpbDKLA8=
google.golang.org/genproto v0.0.0-20190927181202-20e1ac93f88c/go.mod h1:IbNlFCBrqXvoKpeg0TB2l7cyZUmoaFKYIwrEpbDKLA8= google.golang.org/genproto v0.0.0-20190927181202-20e1ac93f88c/go.mod h1:IbNlFCBrqXvoKpeg0TB2l7cyZUmoaFKYIwrEpbDKLA8=
google.golang.org/genproto v0.0.0-20191009194640-548a555dbc03/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc=
google.golang.org/genproto v0.0.0-20191108220845-16a3f7862a1a/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= google.golang.org/genproto v0.0.0-20191108220845-16a3f7862a1a/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc=
google.golang.org/genproto v0.0.0-20191115194625-c23dd37a84c9/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= google.golang.org/genproto v0.0.0-20191115194625-c23dd37a84c9/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc=
google.golang.org/genproto v0.0.0-20191216164720-4f79533eabd1/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= google.golang.org/genproto v0.0.0-20191216164720-4f79533eabd1/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc=
@ -2839,6 +2923,7 @@ gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
gopkg.in/yaml.v2 v2.2.3/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.2.3/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
gopkg.in/yaml.v2 v2.2.4/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.2.4/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
gopkg.in/yaml.v2 v2.2.5/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.2.5/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
gopkg.in/yaml.v2 v2.2.7/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
gopkg.in/yaml.v2 v2.2.8/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.2.8/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
gopkg.in/yaml.v2 v2.3.0/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.3.0/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY= gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY=

View File

@ -1,6 +1,7 @@
package cloudwatch package cloudwatch
import ( import (
"context"
"fmt" "fmt"
"net" "net"
"net/http" "net/http"
@ -9,8 +10,9 @@ import (
"sync" "sync"
"time" "time"
"github.com/aws/aws-sdk-go/aws" "github.com/aws/aws-sdk-go-v2/aws"
cwClient "github.com/aws/aws-sdk-go/service/cloudwatch" cwClient "github.com/aws/aws-sdk-go-v2/service/cloudwatch"
"github.com/aws/aws-sdk-go-v2/service/cloudwatch/types"
"github.com/influxdata/telegraf" "github.com/influxdata/telegraf"
"github.com/influxdata/telegraf/config" "github.com/influxdata/telegraf/config"
@ -23,6 +25,14 @@ import (
"github.com/influxdata/telegraf/plugins/inputs" "github.com/influxdata/telegraf/plugins/inputs"
) )
const (
StatisticAverage = "Average"
StatisticMaximum = "Maximum"
StatisticMinimum = "Minimum"
StatisticSum = "Sum"
StatisticSampleCount = "SampleCount"
)
// CloudWatch contains the configuration and cache for the cloudwatch plugin. // CloudWatch contains the configuration and cache for the cloudwatch plugin.
type CloudWatch struct { type CloudWatch struct {
StatisticExclude []string `toml:"statistic_exclude"` StatisticExclude []string `toml:"statistic_exclude"`
@ -72,12 +82,12 @@ type metricCache struct {
ttl time.Duration ttl time.Duration
built time.Time built time.Time
metrics []filteredMetric metrics []filteredMetric
queries map[string][]*cwClient.MetricDataQuery queries map[string][]types.MetricDataQuery
} }
type cloudwatchClient interface { type cloudwatchClient interface {
ListMetrics(*cwClient.ListMetricsInput) (*cwClient.ListMetricsOutput, error) ListMetrics(context.Context, *cwClient.ListMetricsInput, ...func(*cwClient.Options)) (*cwClient.ListMetricsOutput, error)
GetMetricData(*cwClient.GetMetricDataInput) (*cwClient.GetMetricDataOutput, error) GetMetricData(context.Context, *cwClient.GetMetricDataInput, ...func(*cwClient.Options)) (*cwClient.GetMetricDataOutput, error)
} }
// SampleConfig returns the default configuration of the Cloudwatch input plugin. // SampleConfig returns the default configuration of the Cloudwatch input plugin.
@ -227,12 +237,12 @@ func (c *CloudWatch) Gather(acc telegraf.Accumulator) error {
wg := sync.WaitGroup{} wg := sync.WaitGroup{}
rLock := sync.Mutex{} rLock := sync.Mutex{}
results := map[string][]*cwClient.MetricDataResult{} results := map[string][]types.MetricDataResult{}
for namespace, namespacedQueries := range queries { for namespace, namespacedQueries := range queries {
// 500 is the maximum number of metric data queries a `GetMetricData` request can contain. // 500 is the maximum number of metric data queries a `GetMetricData` request can contain.
batchSize := 500 batchSize := 500
var batches [][]*cwClient.MetricDataQuery var batches [][]types.MetricDataQuery
for batchSize < len(namespacedQueries) { for batchSize < len(namespacedQueries) {
namespacedQueries, batches = namespacedQueries[batchSize:], append(batches, namespacedQueries[0:batchSize:batchSize]) namespacedQueries, batches = namespacedQueries[batchSize:], append(batches, namespacedQueries[0:batchSize:batchSize])
@ -242,7 +252,7 @@ func (c *CloudWatch) Gather(acc telegraf.Accumulator) error {
for i := range batches { for i := range batches {
wg.Add(1) wg.Add(1)
<-lmtr.C <-lmtr.C
go func(n string, inm []*cwClient.MetricDataQuery) { go func(n string, inm []types.MetricDataQuery) {
defer wg.Done() defer wg.Done()
result, err := c.gatherMetrics(c.getDataInputs(inm)) result, err := c.gatherMetrics(c.getDataInputs(inm))
if err != nil { if err != nil {
@ -268,8 +278,15 @@ func (c *CloudWatch) initializeCloudWatch() error {
return err return err
} }
cfg := &aws.Config{ cfg, err := c.CredentialConfig.Credentials()
HTTPClient: &http.Client{ if err != nil {
return err
}
c.client = cwClient.NewFromConfig(cfg, func(options *cwClient.Options) {
// Disable logging
options.ClientLogMode = 0
options.HTTPClient = &http.Client{
// use values from DefaultTransport // use values from DefaultTransport
Transport: &http.Transport{ Transport: &http.Transport{
Proxy: proxy, Proxy: proxy,
@ -284,15 +301,8 @@ func (c *CloudWatch) initializeCloudWatch() error {
ExpectContinueTimeout: 1 * time.Second, ExpectContinueTimeout: 1 * time.Second,
}, },
Timeout: time.Duration(c.Timeout), Timeout: time.Duration(c.Timeout),
}, }
} })
loglevel := aws.LogOff
p, err := c.CredentialConfig.Credentials()
if err != nil {
return err
}
c.client = cwClient.New(p, cfg.WithLogLevel(loglevel))
// Initialize regex matchers for each Dimension value. // Initialize regex matchers for each Dimension value.
for _, m := range c.Metrics { for _, m := range c.Metrics {
@ -310,7 +320,7 @@ func (c *CloudWatch) initializeCloudWatch() error {
} }
type filteredMetric struct { type filteredMetric struct {
metrics []*cwClient.Metric metrics []types.Metric
statFilter filter.Filter statFilter filter.Filter
} }
@ -325,18 +335,18 @@ func getFilteredMetrics(c *CloudWatch) ([]filteredMetric, error) {
// check for provided metric filter // check for provided metric filter
if c.Metrics != nil { if c.Metrics != nil {
for _, m := range c.Metrics { for _, m := range c.Metrics {
metrics := []*cwClient.Metric{} metrics := []types.Metric{}
if !hasWildcard(m.Dimensions) { if !hasWildcard(m.Dimensions) {
dimensions := make([]*cwClient.Dimension, len(m.Dimensions)) dimensions := make([]types.Dimension, len(m.Dimensions))
for k, d := range m.Dimensions { for k, d := range m.Dimensions {
dimensions[k] = &cwClient.Dimension{ dimensions[k] = types.Dimension{
Name: aws.String(d.Name), Name: aws.String(d.Name),
Value: aws.String(d.Value), Value: aws.String(d.Value),
} }
} }
for _, name := range m.MetricNames { for _, name := range m.MetricNames {
for _, namespace := range c.Namespaces { for _, namespace := range c.Namespaces {
metrics = append(metrics, &cwClient.Metric{ metrics = append(metrics, types.Metric{
Namespace: aws.String(namespace), Namespace: aws.String(namespace),
MetricName: aws.String(name), MetricName: aws.String(name),
Dimensions: dimensions, Dimensions: dimensions,
@ -352,7 +362,7 @@ func getFilteredMetrics(c *CloudWatch) ([]filteredMetric, error) {
for _, metric := range allMetrics { for _, metric := range allMetrics {
if isSelected(name, metric, m.Dimensions) { if isSelected(name, metric, m.Dimensions) {
for _, namespace := range c.Namespaces { for _, namespace := range c.Namespaces {
metrics = append(metrics, &cwClient.Metric{ metrics = append(metrics, types.Metric{
Namespace: aws.String(namespace), Namespace: aws.String(namespace),
MetricName: aws.String(name), MetricName: aws.String(name),
Dimensions: metric.Dimensions, Dimensions: metric.Dimensions,
@ -401,32 +411,24 @@ func getFilteredMetrics(c *CloudWatch) ([]filteredMetric, error) {
} }
// fetchNamespaceMetrics retrieves available metrics for a given CloudWatch namespace. // fetchNamespaceMetrics retrieves available metrics for a given CloudWatch namespace.
func (c *CloudWatch) fetchNamespaceMetrics() ([]*cwClient.Metric, error) { func (c *CloudWatch) fetchNamespaceMetrics() ([]types.Metric, error) {
metrics := []*cwClient.Metric{} metrics := []types.Metric{}
var token *string var token *string
var params *cwClient.ListMetricsInput
var recentlyActive *string
switch c.RecentlyActive { params := &cwClient.ListMetricsInput{
case "PT3H": Dimensions: []types.DimensionFilter{},
recentlyActive = &c.RecentlyActive NextToken: token,
default: MetricName: nil,
recentlyActive = nil }
if c.RecentlyActive == "PT3H" {
params.RecentlyActive = types.RecentlyActivePt3h
} }
for _, namespace := range c.Namespaces { for _, namespace := range c.Namespaces {
params.Namespace = aws.String(namespace)
params = &cwClient.ListMetricsInput{
Dimensions: []*cwClient.DimensionFilter{},
NextToken: token,
MetricName: nil,
RecentlyActive: recentlyActive,
Namespace: aws.String(namespace),
}
for { for {
resp, err := c.client.ListMetrics(params) resp, err := c.client.ListMetrics(context.Background(), params)
if err != nil { if err != nil {
return nil, fmt.Errorf("failed to list metrics with params per namespace: %v", err) return nil, fmt.Errorf("failed to list metrics with params per namespace: %v", err)
} }
@ -457,75 +459,75 @@ func (c *CloudWatch) updateWindow(relativeTo time.Time) {
} }
// getDataQueries gets all of the possible queries so we can maximize the request payload. // getDataQueries gets all of the possible queries so we can maximize the request payload.
func (c *CloudWatch) getDataQueries(filteredMetrics []filteredMetric) map[string][]*cwClient.MetricDataQuery { func (c *CloudWatch) getDataQueries(filteredMetrics []filteredMetric) map[string][]types.MetricDataQuery {
if c.metricCache != nil && c.metricCache.queries != nil && c.metricCache.isValid() { if c.metricCache != nil && c.metricCache.queries != nil && c.metricCache.isValid() {
return c.metricCache.queries return c.metricCache.queries
} }
c.queryDimensions = map[string]*map[string]string{} c.queryDimensions = map[string]*map[string]string{}
dataQueries := map[string][]*cwClient.MetricDataQuery{} dataQueries := map[string][]types.MetricDataQuery{}
for i, filtered := range filteredMetrics { for i, filtered := range filteredMetrics {
for j, metric := range filtered.metrics { for j, metric := range filtered.metrics {
id := strconv.Itoa(j) + "_" + strconv.Itoa(i) id := strconv.Itoa(j) + "_" + strconv.Itoa(i)
dimension := ctod(metric.Dimensions) dimension := ctod(metric.Dimensions)
if filtered.statFilter.Match("average") { if filtered.statFilter.Match("average") {
c.queryDimensions["average_"+id] = dimension c.queryDimensions["average_"+id] = dimension
dataQueries[*metric.Namespace] = append(dataQueries[*metric.Namespace], &cwClient.MetricDataQuery{ dataQueries[*metric.Namespace] = append(dataQueries[*metric.Namespace], types.MetricDataQuery{
Id: aws.String("average_" + id), Id: aws.String("average_" + id),
Label: aws.String(snakeCase(*metric.MetricName + "_average")), Label: aws.String(snakeCase(*metric.MetricName + "_average")),
MetricStat: &cwClient.MetricStat{ MetricStat: &types.MetricStat{
Metric: metric, Metric: &metric,
Period: aws.Int64(int64(time.Duration(c.Period).Seconds())), Period: aws.Int32(int32(time.Duration(c.Period).Seconds())),
Stat: aws.String(cwClient.StatisticAverage), Stat: aws.String(StatisticAverage),
}, },
}) })
} }
if filtered.statFilter.Match("maximum") { if filtered.statFilter.Match("maximum") {
c.queryDimensions["maximum_"+id] = dimension c.queryDimensions["maximum_"+id] = dimension
dataQueries[*metric.Namespace] = append(dataQueries[*metric.Namespace], &cwClient.MetricDataQuery{ dataQueries[*metric.Namespace] = append(dataQueries[*metric.Namespace], types.MetricDataQuery{
Id: aws.String("maximum_" + id), Id: aws.String("maximum_" + id),
Label: aws.String(snakeCase(*metric.MetricName + "_maximum")), Label: aws.String(snakeCase(*metric.MetricName + "_maximum")),
MetricStat: &cwClient.MetricStat{ MetricStat: &types.MetricStat{
Metric: metric, Metric: &metric,
Period: aws.Int64(int64(time.Duration(c.Period).Seconds())), Period: aws.Int32(int32(time.Duration(c.Period).Seconds())),
Stat: aws.String(cwClient.StatisticMaximum), Stat: aws.String(StatisticMaximum),
}, },
}) })
} }
if filtered.statFilter.Match("minimum") { if filtered.statFilter.Match("minimum") {
c.queryDimensions["minimum_"+id] = dimension c.queryDimensions["minimum_"+id] = dimension
dataQueries[*metric.Namespace] = append(dataQueries[*metric.Namespace], &cwClient.MetricDataQuery{ dataQueries[*metric.Namespace] = append(dataQueries[*metric.Namespace], types.MetricDataQuery{
Id: aws.String("minimum_" + id), Id: aws.String("minimum_" + id),
Label: aws.String(snakeCase(*metric.MetricName + "_minimum")), Label: aws.String(snakeCase(*metric.MetricName + "_minimum")),
MetricStat: &cwClient.MetricStat{ MetricStat: &types.MetricStat{
Metric: metric, Metric: &metric,
Period: aws.Int64(int64(time.Duration(c.Period).Seconds())), Period: aws.Int32(int32(time.Duration(c.Period).Seconds())),
Stat: aws.String(cwClient.StatisticMinimum), Stat: aws.String(StatisticMinimum),
}, },
}) })
} }
if filtered.statFilter.Match("sum") { if filtered.statFilter.Match("sum") {
c.queryDimensions["sum_"+id] = dimension c.queryDimensions["sum_"+id] = dimension
dataQueries[*metric.Namespace] = append(dataQueries[*metric.Namespace], &cwClient.MetricDataQuery{ dataQueries[*metric.Namespace] = append(dataQueries[*metric.Namespace], types.MetricDataQuery{
Id: aws.String("sum_" + id), Id: aws.String("sum_" + id),
Label: aws.String(snakeCase(*metric.MetricName + "_sum")), Label: aws.String(snakeCase(*metric.MetricName + "_sum")),
MetricStat: &cwClient.MetricStat{ MetricStat: &types.MetricStat{
Metric: metric, Metric: &metric,
Period: aws.Int64(int64(time.Duration(c.Period).Seconds())), Period: aws.Int32(int32(time.Duration(c.Period).Seconds())),
Stat: aws.String(cwClient.StatisticSum), Stat: aws.String(StatisticSum),
}, },
}) })
} }
if filtered.statFilter.Match("sample_count") { if filtered.statFilter.Match("sample_count") {
c.queryDimensions["sample_count_"+id] = dimension c.queryDimensions["sample_count_"+id] = dimension
dataQueries[*metric.Namespace] = append(dataQueries[*metric.Namespace], &cwClient.MetricDataQuery{ dataQueries[*metric.Namespace] = append(dataQueries[*metric.Namespace], types.MetricDataQuery{
Id: aws.String("sample_count_" + id), Id: aws.String("sample_count_" + id),
Label: aws.String(snakeCase(*metric.MetricName + "_sample_count")), Label: aws.String(snakeCase(*metric.MetricName + "_sample_count")),
MetricStat: &cwClient.MetricStat{ MetricStat: &types.MetricStat{
Metric: metric, Metric: &metric,
Period: aws.Int64(int64(time.Duration(c.Period).Seconds())), Period: aws.Int32(int32(time.Duration(c.Period).Seconds())),
Stat: aws.String(cwClient.StatisticSampleCount), Stat: aws.String(StatisticSampleCount),
}, },
}) })
} }
@ -553,11 +555,11 @@ func (c *CloudWatch) getDataQueries(filteredMetrics []filteredMetric) map[string
// gatherMetrics gets metric data from Cloudwatch. // gatherMetrics gets metric data from Cloudwatch.
func (c *CloudWatch) gatherMetrics( func (c *CloudWatch) gatherMetrics(
params *cwClient.GetMetricDataInput, params *cwClient.GetMetricDataInput,
) ([]*cwClient.MetricDataResult, error) { ) ([]types.MetricDataResult, error) {
results := []*cwClient.MetricDataResult{} results := []types.MetricDataResult{}
for { for {
resp, err := c.client.GetMetricData(params) resp, err := c.client.GetMetricData(context.Background(), params)
if err != nil { if err != nil {
return nil, fmt.Errorf("failed to get metric data: %v", err) return nil, fmt.Errorf("failed to get metric data: %v", err)
} }
@ -574,7 +576,7 @@ func (c *CloudWatch) gatherMetrics(
func (c *CloudWatch) aggregateMetrics( func (c *CloudWatch) aggregateMetrics(
acc telegraf.Accumulator, acc telegraf.Accumulator,
metricDataResults map[string][]*cwClient.MetricDataResult, metricDataResults map[string][]types.MetricDataResult,
) error { ) error {
var ( var (
grouper = internalMetric.NewSeriesGrouper() grouper = internalMetric.NewSeriesGrouper()
@ -592,7 +594,7 @@ func (c *CloudWatch) aggregateMetrics(
tags["region"] = c.Region tags["region"] = c.Region
for i := range result.Values { for i := range result.Values {
if err := grouper.Add(namespace, tags, *result.Timestamps[i], *result.Label, *result.Values[i]); err != nil { if err := grouper.Add(namespace, tags, result.Timestamps[i], *result.Label, result.Values[i]); err != nil {
acc.AddError(err) acc.AddError(err)
} }
} }
@ -635,7 +637,7 @@ func snakeCase(s string) string {
} }
// ctod converts cloudwatch dimensions to regular dimensions. // ctod converts cloudwatch dimensions to regular dimensions.
func ctod(cDimensions []*cwClient.Dimension) *map[string]string { func ctod(cDimensions []types.Dimension) *map[string]string {
dimensions := map[string]string{} dimensions := map[string]string{}
for i := range cDimensions { for i := range cDimensions {
dimensions[snakeCase(*cDimensions[i].Name)] = *cDimensions[i].Value dimensions[snakeCase(*cDimensions[i].Name)] = *cDimensions[i].Value
@ -643,7 +645,7 @@ func ctod(cDimensions []*cwClient.Dimension) *map[string]string {
return &dimensions return &dimensions
} }
func (c *CloudWatch) getDataInputs(dataQueries []*cwClient.MetricDataQuery) *cwClient.GetMetricDataInput { func (c *CloudWatch) getDataInputs(dataQueries []types.MetricDataQuery) *cwClient.GetMetricDataInput {
return &cwClient.GetMetricDataInput{ return &cwClient.GetMetricDataInput{
StartTime: aws.Time(c.windowStart), StartTime: aws.Time(c.windowStart),
EndTime: aws.Time(c.windowEnd), EndTime: aws.Time(c.windowEnd),
@ -665,7 +667,7 @@ func hasWildcard(dimensions []*Dimension) bool {
return false return false
} }
func isSelected(name string, metric *cwClient.Metric, dimensions []*Dimension) bool { func isSelected(name string, metric types.Metric, dimensions []*Dimension) bool {
if name != *metric.MetricName { if name != *metric.MetricName {
return false return false
} }

View File

@ -1,12 +1,14 @@
package cloudwatch package cloudwatch
import ( import (
"context"
"net/http" "net/http"
"testing" "testing"
"time" "time"
"github.com/aws/aws-sdk-go/aws" "github.com/aws/aws-sdk-go-v2/aws"
cwClient "github.com/aws/aws-sdk-go/service/cloudwatch" cwClient "github.com/aws/aws-sdk-go-v2/service/cloudwatch"
"github.com/aws/aws-sdk-go-v2/service/cloudwatch/types"
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
"github.com/influxdata/telegraf/config" "github.com/influxdata/telegraf/config"
@ -18,13 +20,13 @@ import (
type mockGatherCloudWatchClient struct{} type mockGatherCloudWatchClient struct{}
func (m *mockGatherCloudWatchClient) ListMetrics(params *cwClient.ListMetricsInput) (*cwClient.ListMetricsOutput, error) { func (m *mockGatherCloudWatchClient) ListMetrics(_ context.Context, params *cwClient.ListMetricsInput, _ ...func(*cwClient.Options)) (*cwClient.ListMetricsOutput, error) {
return &cwClient.ListMetricsOutput{ return &cwClient.ListMetricsOutput{
Metrics: []*cwClient.Metric{ Metrics: []types.Metric{
{ {
Namespace: params.Namespace, Namespace: params.Namespace,
MetricName: aws.String("Latency"), MetricName: aws.String("Latency"),
Dimensions: []*cwClient.Dimension{ Dimensions: []types.Dimension{
{ {
Name: aws.String("LoadBalancerName"), Name: aws.String("LoadBalancerName"),
Value: aws.String("p-example"), Value: aws.String("p-example"),
@ -35,63 +37,53 @@ func (m *mockGatherCloudWatchClient) ListMetrics(params *cwClient.ListMetricsInp
}, nil }, nil
} }
func (m *mockGatherCloudWatchClient) GetMetricData(params *cwClient.GetMetricDataInput) (*cwClient.GetMetricDataOutput, error) { func (m *mockGatherCloudWatchClient) GetMetricData(_ context.Context, params *cwClient.GetMetricDataInput, _ ...func(*cwClient.Options)) (*cwClient.GetMetricDataOutput, error) {
return &cwClient.GetMetricDataOutput{ return &cwClient.GetMetricDataOutput{
MetricDataResults: []*cwClient.MetricDataResult{ MetricDataResults: []types.MetricDataResult{
{ {
Id: aws.String("minimum_0_0"), Id: aws.String("minimum_0_0"),
Label: aws.String("latency_minimum"), Label: aws.String("latency_minimum"),
StatusCode: aws.String("completed"), StatusCode: types.StatusCodeComplete,
Timestamps: []*time.Time{ Timestamps: []time.Time{
params.EndTime, *params.EndTime,
},
Values: []*float64{
aws.Float64(0.1),
}, },
Values: []float64{0.1},
}, },
{ {
Id: aws.String("maximum_0_0"), Id: aws.String("maximum_0_0"),
Label: aws.String("latency_maximum"), Label: aws.String("latency_maximum"),
StatusCode: aws.String("completed"), StatusCode: types.StatusCodeComplete,
Timestamps: []*time.Time{ Timestamps: []time.Time{
params.EndTime, *params.EndTime,
},
Values: []*float64{
aws.Float64(0.3),
}, },
Values: []float64{0.3},
}, },
{ {
Id: aws.String("average_0_0"), Id: aws.String("average_0_0"),
Label: aws.String("latency_average"), Label: aws.String("latency_average"),
StatusCode: aws.String("completed"), StatusCode: types.StatusCodeComplete,
Timestamps: []*time.Time{ Timestamps: []time.Time{
params.EndTime, *params.EndTime,
},
Values: []*float64{
aws.Float64(0.2),
}, },
Values: []float64{0.2},
}, },
{ {
Id: aws.String("sum_0_0"), Id: aws.String("sum_0_0"),
Label: aws.String("latency_sum"), Label: aws.String("latency_sum"),
StatusCode: aws.String("completed"), StatusCode: types.StatusCodeComplete,
Timestamps: []*time.Time{ Timestamps: []time.Time{
params.EndTime, *params.EndTime,
},
Values: []*float64{
aws.Float64(123),
}, },
Values: []float64{123},
}, },
{ {
Id: aws.String("sample_count_0_0"), Id: aws.String("sample_count_0_0"),
Label: aws.String("latency_sample_count"), Label: aws.String("latency_sample_count"),
StatusCode: aws.String("completed"), StatusCode: types.StatusCodeComplete,
Timestamps: []*time.Time{ Timestamps: []time.Time{
params.EndTime, *params.EndTime,
},
Values: []*float64{
aws.Float64(100),
}, },
Values: []float64{100},
}, },
}, },
}, nil }, nil
@ -158,8 +150,8 @@ func TestGather_MultipleNamespaces(t *testing.T) {
type mockSelectMetricsCloudWatchClient struct{} type mockSelectMetricsCloudWatchClient struct{}
func (m *mockSelectMetricsCloudWatchClient) ListMetrics(_ *cwClient.ListMetricsInput) (*cwClient.ListMetricsOutput, error) { func (m *mockSelectMetricsCloudWatchClient) ListMetrics(_ context.Context, params *cwClient.ListMetricsInput, _ ...func(*cwClient.Options)) (*cwClient.ListMetricsOutput, error) {
metrics := []*cwClient.Metric{} metrics := []types.Metric{}
// 4 metrics are available // 4 metrics are available
metricNames := []string{"Latency", "RequestCount", "HealthyHostCount", "UnHealthyHostCount"} metricNames := []string{"Latency", "RequestCount", "HealthyHostCount", "UnHealthyHostCount"}
// for 3 ELBs // for 3 ELBs
@ -169,10 +161,10 @@ func (m *mockSelectMetricsCloudWatchClient) ListMetrics(_ *cwClient.ListMetricsI
for _, m := range metricNames { for _, m := range metricNames {
for _, lb := range loadBalancers { for _, lb := range loadBalancers {
// For each metric/ELB pair, we get an aggregate value across all AZs. // For each metric/ELB pair, we get an aggregate value across all AZs.
metrics = append(metrics, &cwClient.Metric{ metrics = append(metrics, types.Metric{
Namespace: aws.String("AWS/ELB"), Namespace: aws.String("AWS/ELB"),
MetricName: aws.String(m), MetricName: aws.String(m),
Dimensions: []*cwClient.Dimension{ Dimensions: []types.Dimension{
{ {
Name: aws.String("LoadBalancerName"), Name: aws.String("LoadBalancerName"),
Value: aws.String(lb), Value: aws.String(lb),
@ -181,10 +173,10 @@ func (m *mockSelectMetricsCloudWatchClient) ListMetrics(_ *cwClient.ListMetricsI
}) })
for _, az := range availabilityZones { for _, az := range availabilityZones {
// We get a metric for each metric/ELB/AZ triplet. // We get a metric for each metric/ELB/AZ triplet.
metrics = append(metrics, &cwClient.Metric{ metrics = append(metrics, types.Metric{
Namespace: aws.String("AWS/ELB"), Namespace: aws.String("AWS/ELB"),
MetricName: aws.String(m), MetricName: aws.String(m),
Dimensions: []*cwClient.Dimension{ Dimensions: []types.Dimension{
{ {
Name: aws.String("LoadBalancerName"), Name: aws.String("LoadBalancerName"),
Value: aws.String(lb), Value: aws.String(lb),
@ -205,7 +197,7 @@ func (m *mockSelectMetricsCloudWatchClient) ListMetrics(_ *cwClient.ListMetricsI
return result, nil return result, nil
} }
func (m *mockSelectMetricsCloudWatchClient) GetMetricData(_ *cwClient.GetMetricDataInput) (*cwClient.GetMetricDataOutput, error) { func (m *mockSelectMetricsCloudWatchClient) GetMetricData(_ context.Context, params *cwClient.GetMetricDataInput, _ ...func(*cwClient.Options)) (*cwClient.GetMetricDataOutput, error) {
return nil, nil return nil, nil
} }
@ -246,16 +238,16 @@ func TestSelectMetrics(t *testing.T) {
} }
func TestGenerateStatisticsInputParams(t *testing.T) { func TestGenerateStatisticsInputParams(t *testing.T) {
d := &cwClient.Dimension{ d := types.Dimension{
Name: aws.String("LoadBalancerName"), Name: aws.String("LoadBalancerName"),
Value: aws.String("p-example"), Value: aws.String("p-example"),
} }
namespace := "AWS/ELB" namespace := "AWS/ELB"
m := &cwClient.Metric{ m := types.Metric{
MetricName: aws.String("Latency"), MetricName: aws.String("Latency"),
Dimensions: []*cwClient.Dimension{d}, Dimensions: []types.Dimension{d},
Namespace: &namespace, Namespace: aws.String(namespace),
} }
duration, _ := time.ParseDuration("1m") duration, _ := time.ParseDuration("1m")
@ -274,7 +266,7 @@ func TestGenerateStatisticsInputParams(t *testing.T) {
c.updateWindow(now) c.updateWindow(now)
statFilter, _ := filter.NewIncludeExcludeFilter(nil, nil) statFilter, _ := filter.NewIncludeExcludeFilter(nil, nil)
queries := c.getDataQueries([]filteredMetric{{metrics: []*cwClient.Metric{m}, statFilter: statFilter}}) queries := c.getDataQueries([]filteredMetric{{metrics: []types.Metric{m}, statFilter: statFilter}})
params := c.getDataInputs(queries[namespace]) params := c.getDataInputs(queries[namespace])
require.EqualValues(t, *params.EndTime, now.Add(-time.Duration(c.Delay))) require.EqualValues(t, *params.EndTime, now.Add(-time.Duration(c.Delay)))
@ -285,16 +277,16 @@ func TestGenerateStatisticsInputParams(t *testing.T) {
} }
func TestGenerateStatisticsInputParamsFiltered(t *testing.T) { func TestGenerateStatisticsInputParamsFiltered(t *testing.T) {
d := &cwClient.Dimension{ d := types.Dimension{
Name: aws.String("LoadBalancerName"), Name: aws.String("LoadBalancerName"),
Value: aws.String("p-example"), Value: aws.String("p-example"),
} }
namespace := "AWS/ELB" namespace := "AWS/ELB"
m := &cwClient.Metric{ m := types.Metric{
MetricName: aws.String("Latency"), MetricName: aws.String("Latency"),
Dimensions: []*cwClient.Dimension{d}, Dimensions: []types.Dimension{d},
Namespace: &namespace, Namespace: aws.String(namespace),
} }
duration, _ := time.ParseDuration("1m") duration, _ := time.ParseDuration("1m")
@ -313,7 +305,7 @@ func TestGenerateStatisticsInputParamsFiltered(t *testing.T) {
c.updateWindow(now) c.updateWindow(now)
statFilter, _ := filter.NewIncludeExcludeFilter([]string{"average", "sample_count"}, nil) statFilter, _ := filter.NewIncludeExcludeFilter([]string{"average", "sample_count"}, nil)
queries := c.getDataQueries([]filteredMetric{{metrics: []*cwClient.Metric{m}, statFilter: statFilter}}) queries := c.getDataQueries([]filteredMetric{{metrics: []types.Metric{m}, statFilter: statFilter}})
params := c.getDataInputs(queries[namespace]) params := c.getDataInputs(queries[namespace])
require.EqualValues(t, *params.EndTime, now.Add(-time.Duration(c.Delay))) require.EqualValues(t, *params.EndTime, now.Add(-time.Duration(c.Delay)))

View File

@ -12,10 +12,10 @@ import (
"sync" "sync"
"time" "time"
"github.com/aws/aws-sdk-go/service/dynamodb" "github.com/aws/aws-sdk-go-v2/service/dynamodb"
"github.com/aws/aws-sdk-go/service/kinesis" "github.com/aws/aws-sdk-go-v2/service/kinesis"
consumer "github.com/harlow/kinesis-consumer" consumer "github.com/harlow/kinesis-consumer"
"github.com/harlow/kinesis-consumer/checkpoint/ddb" "github.com/harlow/kinesis-consumer/store/ddb"
"github.com/influxdata/telegraf" "github.com/influxdata/telegraf"
internalaws "github.com/influxdata/telegraf/config/aws" internalaws "github.com/influxdata/telegraf/config/aws"
@ -44,7 +44,7 @@ type (
acc telegraf.TrackingAccumulator acc telegraf.TrackingAccumulator
sem chan struct{} sem chan struct{}
checkpoint consumer.Checkpoint checkpoint consumer.Store
checkpoints map[string]checkpoint checkpoints map[string]checkpoint
records map[telegraf.TrackingID]string records map[telegraf.TrackingID]string
checkpointTex sync.Mutex checkpointTex sync.Mutex
@ -153,31 +153,19 @@ func (k *KinesisConsumer) SetParser(parser parsers.Parser) {
} }
func (k *KinesisConsumer) connect(ac telegraf.Accumulator) error { func (k *KinesisConsumer) connect(ac telegraf.Accumulator) error {
p, err := k.CredentialConfig.Credentials() cfg, err := k.CredentialConfig.Credentials()
if err != nil { if err != nil {
return err return err
} }
client := kinesis.New(p) client := kinesis.NewFromConfig(cfg)
k.checkpoint = &noopCheckpoint{} k.checkpoint = &noopStore{}
if k.DynamoDB != nil { if k.DynamoDB != nil {
p, err := (&internalaws.CredentialConfig{ var err error
Region: k.Region,
AccessKey: k.AccessKey,
SecretKey: k.SecretKey,
RoleARN: k.RoleARN,
Profile: k.Profile,
Filename: k.Filename,
Token: k.Token,
EndpointURL: k.EndpointURL,
}).Credentials()
if err != nil {
return err
}
k.checkpoint, err = ddb.New( k.checkpoint, err = ddb.New(
k.DynamoDB.AppName, k.DynamoDB.AppName,
k.DynamoDB.TableName, k.DynamoDB.TableName,
ddb.WithDynamoClient(dynamodb.New(p)), ddb.WithDynamoClient(dynamodb.NewFromConfig(cfg)),
ddb.WithMaxInterval(time.Second*10), ddb.WithMaxInterval(time.Second*10),
) )
if err != nil { if err != nil {
@ -189,7 +177,7 @@ func (k *KinesisConsumer) connect(ac telegraf.Accumulator) error {
k.StreamName, k.StreamName,
consumer.WithClient(client), consumer.WithClient(client),
consumer.WithShardIteratorType(k.ShardIteratorType), consumer.WithShardIteratorType(k.ShardIteratorType),
consumer.WithCheckpoint(k), consumer.WithStore(k),
) )
if err != nil { if err != nil {
return err return err
@ -214,10 +202,10 @@ func (k *KinesisConsumer) connect(ac telegraf.Accumulator) error {
k.wg.Add(1) k.wg.Add(1)
go func() { go func() {
defer k.wg.Done() defer k.wg.Done()
err := k.cons.Scan(ctx, func(r *consumer.Record) consumer.ScanStatus { err := k.cons.Scan(ctx, func(r *consumer.Record) error {
select { select {
case <-ctx.Done(): case <-ctx.Done():
return consumer.ScanStatus{Error: ctx.Err()} return ctx.Err()
case k.sem <- struct{}{}: case k.sem <- struct{}{}:
break break
} }
@ -227,7 +215,7 @@ func (k *KinesisConsumer) connect(ac telegraf.Accumulator) error {
k.Log.Errorf("Scan parser error: %s", err.Error()) k.Log.Errorf("Scan parser error: %s", err.Error())
} }
return consumer.ScanStatus{} return nil
}) })
if err != nil { if err != nil {
k.cancel() k.cancel()
@ -298,7 +286,7 @@ func (k *KinesisConsumer) onDelivery(ctx context.Context) {
} }
k.lastSeqNum = strToBint(sequenceNum) k.lastSeqNum = strToBint(sequenceNum)
if err := k.checkpoint.Set(chk.streamName, chk.shardID, sequenceNum); err != nil { if err := k.checkpoint.SetCheckpoint(chk.streamName, chk.shardID, sequenceNum); err != nil {
k.Log.Debug("Setting checkpoint failed: %v", err) k.Log.Debug("Setting checkpoint failed: %v", err)
} }
} else { } else {
@ -332,13 +320,13 @@ func (k *KinesisConsumer) Gather(acc telegraf.Accumulator) error {
return nil return nil
} }
// Get wraps the checkpoint's Get function (called by consumer library) // Get wraps the checkpoint's GetCheckpoint function (called by consumer library)
func (k *KinesisConsumer) Get(streamName, shardID string) (string, error) { func (k *KinesisConsumer) GetCheckpoint(streamName, shardID string) (string, error) {
return k.checkpoint.Get(streamName, shardID) return k.checkpoint.GetCheckpoint(streamName, shardID)
} }
// Set wraps the checkpoint's Set function (called by consumer library) // Set wraps the checkpoint's SetCheckpoint function (called by consumer library)
func (k *KinesisConsumer) Set(streamName, shardID, sequenceNumber string) error { func (k *KinesisConsumer) SetCheckpoint(streamName, shardID, sequenceNumber string) error {
if sequenceNumber == "" { if sequenceNumber == "" {
return fmt.Errorf("sequence number should not be empty") return fmt.Errorf("sequence number should not be empty")
} }
@ -390,10 +378,10 @@ func (k *KinesisConsumer) Init() error {
return k.configureProcessContentEncodingFunc() return k.configureProcessContentEncodingFunc()
} }
type noopCheckpoint struct{} type noopStore struct{}
func (n noopCheckpoint) Set(string, string, string) error { return nil } func (n noopStore) SetCheckpoint(string, string, string) error { return nil }
func (n noopCheckpoint) Get(string, string) (string, error) { return "", nil } func (n noopStore) GetCheckpoint(string, string) (string, error) { return "", nil }
func init() { func init() {
negOne, _ = new(big.Int).SetString("-1", 10) negOne, _ = new(big.Int).SetString("-1", 10)

View File

@ -2,7 +2,8 @@ package kinesis_consumer
import ( import (
"encoding/base64" "encoding/base64"
"github.com/aws/aws-sdk-go/aws" "github.com/aws/aws-sdk-go-v2/aws"
"github.com/aws/aws-sdk-go-v2/service/kinesis/types"
consumer "github.com/harlow/kinesis-consumer" consumer "github.com/harlow/kinesis-consumer"
"github.com/influxdata/telegraf" "github.com/influxdata/telegraf"
"github.com/influxdata/telegraf/plugins/parsers" "github.com/influxdata/telegraf/plugins/parsers"
@ -53,7 +54,12 @@ func TestKinesisConsumer_onMessage(t *testing.T) {
records: make(map[telegraf.TrackingID]string), records: make(map[telegraf.TrackingID]string),
}, },
args: args{ args: args{
r: &consumer.Record{Data: notZippedBytes, SequenceNumber: aws.String("anything")}, r: &consumer.Record{
Record: types.Record{
Data: notZippedBytes,
SequenceNumber: aws.String("anything"),
},
},
}, },
wantErr: false, wantErr: false,
expected: expected{ expected: expected{
@ -69,7 +75,12 @@ func TestKinesisConsumer_onMessage(t *testing.T) {
records: make(map[telegraf.TrackingID]string), records: make(map[telegraf.TrackingID]string),
}, },
args: args{ args: args{
r: &consumer.Record{Data: notZippedBytes, SequenceNumber: aws.String("anything")}, r: &consumer.Record{
Record: types.Record{
Data: notZippedBytes,
SequenceNumber: aws.String("anything"),
},
},
}, },
wantErr: false, wantErr: false,
expected: expected{ expected: expected{
@ -85,7 +96,12 @@ func TestKinesisConsumer_onMessage(t *testing.T) {
records: make(map[telegraf.TrackingID]string), records: make(map[telegraf.TrackingID]string),
}, },
args: args{ args: args{
r: &consumer.Record{Data: notZippedBytes, SequenceNumber: aws.String("anything")}, r: &consumer.Record{
Record: types.Record{
Data: notZippedBytes,
SequenceNumber: aws.String("anything"),
},
},
}, },
wantErr: false, wantErr: false,
expected: expected{ expected: expected{
@ -100,7 +116,12 @@ func TestKinesisConsumer_onMessage(t *testing.T) {
records: make(map[telegraf.TrackingID]string), records: make(map[telegraf.TrackingID]string),
}, },
args: args{ args: args{
r: &consumer.Record{Data: notZippedBytes, SequenceNumber: aws.String("anything")}, r: &consumer.Record{
Record: types.Record{
Data: notZippedBytes,
SequenceNumber: aws.String("anything"),
},
},
}, },
wantErr: false, wantErr: false,
expected: expected{ expected: expected{
@ -116,7 +137,12 @@ func TestKinesisConsumer_onMessage(t *testing.T) {
records: make(map[telegraf.TrackingID]string), records: make(map[telegraf.TrackingID]string),
}, },
args: args{ args: args{
r: &consumer.Record{Data: gzippedBytes, SequenceNumber: aws.String("anything")}, r: &consumer.Record{
Record: types.Record{
Data: gzippedBytes,
SequenceNumber: aws.String("anything"),
},
},
}, },
wantErr: false, wantErr: false,
expected: expected{ expected: expected{
@ -132,7 +158,12 @@ func TestKinesisConsumer_onMessage(t *testing.T) {
records: make(map[telegraf.TrackingID]string), records: make(map[telegraf.TrackingID]string),
}, },
args: args{ args: args{
r: &consumer.Record{Data: zlibBytpes, SequenceNumber: aws.String("anything")}, r: &consumer.Record{
Record: types.Record{
Data: zlibBytpes,
SequenceNumber: aws.String("anything"),
},
},
}, },
wantErr: false, wantErr: false,
expected: expected{ expected: expected{

View File

@ -1,13 +1,15 @@
package cloudwatch package cloudwatch
import ( import (
"context"
"math" "math"
"sort" "sort"
"strings" "strings"
"time" "time"
"github.com/aws/aws-sdk-go/aws" "github.com/aws/aws-sdk-go-v2/aws"
"github.com/aws/aws-sdk-go/service/cloudwatch" "github.com/aws/aws-sdk-go-v2/service/cloudwatch"
"github.com/aws/aws-sdk-go-v2/service/cloudwatch/types"
"github.com/influxdata/telegraf" "github.com/influxdata/telegraf"
internalaws "github.com/influxdata/telegraf/config/aws" internalaws "github.com/influxdata/telegraf/config/aws"
@ -17,7 +19,7 @@ import (
type CloudWatch struct { type CloudWatch struct {
Namespace string `toml:"namespace"` // CloudWatch Metrics Namespace Namespace string `toml:"namespace"` // CloudWatch Metrics Namespace
HighResolutionMetrics bool `toml:"high_resolution_metrics"` HighResolutionMetrics bool `toml:"high_resolution_metrics"`
svc *cloudwatch.CloudWatch svc *cloudwatch.Client
WriteStatistics bool `toml:"write_statistics"` WriteStatistics bool `toml:"write_statistics"`
@ -38,7 +40,7 @@ const (
type cloudwatchField interface { type cloudwatchField interface {
addValue(sType statisticType, value float64) addValue(sType statisticType, value float64)
buildDatum() []*cloudwatch.MetricDatum buildDatum() []types.MetricDatum
} }
type statisticField struct { type statisticField struct {
@ -56,8 +58,8 @@ func (f *statisticField) addValue(sType statisticType, value float64) {
} }
} }
func (f *statisticField) buildDatum() []*cloudwatch.MetricDatum { func (f *statisticField) buildDatum() []types.MetricDatum {
var datums []*cloudwatch.MetricDatum var datums []types.MetricDatum
if f.hasAllFields() { if f.hasAllFields() {
// If we have all required fields, we build datum with StatisticValues // If we have all required fields, we build datum with StatisticValues
@ -66,24 +68,24 @@ func (f *statisticField) buildDatum() []*cloudwatch.MetricDatum {
sum := f.values[statisticTypeSum] sum := f.values[statisticTypeSum]
count := f.values[statisticTypeCount] count := f.values[statisticTypeCount]
datum := &cloudwatch.MetricDatum{ datum := types.MetricDatum{
MetricName: aws.String(strings.Join([]string{f.metricName, f.fieldName}, "_")), MetricName: aws.String(strings.Join([]string{f.metricName, f.fieldName}, "_")),
Dimensions: BuildDimensions(f.tags), Dimensions: BuildDimensions(f.tags),
Timestamp: aws.Time(f.timestamp), Timestamp: aws.Time(f.timestamp),
StatisticValues: &cloudwatch.StatisticSet{ StatisticValues: &types.StatisticSet{
Minimum: aws.Float64(min), Minimum: aws.Float64(min),
Maximum: aws.Float64(max), Maximum: aws.Float64(max),
Sum: aws.Float64(sum), Sum: aws.Float64(sum),
SampleCount: aws.Float64(count), SampleCount: aws.Float64(count),
}, },
StorageResolution: aws.Int64(f.storageResolution), StorageResolution: aws.Int32(int32(f.storageResolution)),
} }
datums = append(datums, datum) datums = append(datums, datum)
} else { } else {
// If we don't have all required fields, we build each field as independent datum // If we don't have all required fields, we build each field as independent datum
for sType, value := range f.values { for sType, value := range f.values {
datum := &cloudwatch.MetricDatum{ datum := types.MetricDatum{
Value: aws.Float64(value), Value: aws.Float64(value),
Dimensions: BuildDimensions(f.tags), Dimensions: BuildDimensions(f.tags),
Timestamp: aws.Time(f.timestamp), Timestamp: aws.Time(f.timestamp),
@ -134,14 +136,14 @@ func (f *valueField) addValue(sType statisticType, value float64) {
} }
} }
func (f *valueField) buildDatum() []*cloudwatch.MetricDatum { func (f *valueField) buildDatum() []types.MetricDatum {
return []*cloudwatch.MetricDatum{ return []types.MetricDatum{
{ {
MetricName: aws.String(strings.Join([]string{f.metricName, f.fieldName}, "_")), MetricName: aws.String(strings.Join([]string{f.metricName, f.fieldName}, "_")),
Value: aws.Float64(f.value), Value: aws.Float64(f.value),
Dimensions: BuildDimensions(f.tags), Dimensions: BuildDimensions(f.tags),
Timestamp: aws.Time(f.timestamp), Timestamp: aws.Time(f.timestamp),
StorageResolution: aws.Int64(f.storageResolution), StorageResolution: aws.Int32(int32(f.storageResolution)),
}, },
} }
} }
@ -198,11 +200,12 @@ func (c *CloudWatch) Description() string {
} }
func (c *CloudWatch) Connect() error { func (c *CloudWatch) Connect() error {
p, err := c.CredentialConfig.Credentials() cfg, err := c.CredentialConfig.Credentials()
if err != nil { if err != nil {
return err return err
} }
c.svc = cloudwatch.New(p)
c.svc = cloudwatch.NewFromConfig(cfg)
return nil return nil
} }
@ -211,7 +214,7 @@ func (c *CloudWatch) Close() error {
} }
func (c *CloudWatch) Write(metrics []telegraf.Metric) error { func (c *CloudWatch) Write(metrics []telegraf.Metric) error {
var datums []*cloudwatch.MetricDatum var datums []types.MetricDatum
for _, m := range metrics { for _, m := range metrics {
d := BuildMetricDatum(c.WriteStatistics, c.HighResolutionMetrics, m) d := BuildMetricDatum(c.WriteStatistics, c.HighResolutionMetrics, m)
datums = append(datums, d...) datums = append(datums, d...)
@ -229,13 +232,13 @@ func (c *CloudWatch) Write(metrics []telegraf.Metric) error {
return nil return nil
} }
func (c *CloudWatch) WriteToCloudWatch(datums []*cloudwatch.MetricDatum) error { func (c *CloudWatch) WriteToCloudWatch(datums []types.MetricDatum) error {
params := &cloudwatch.PutMetricDataInput{ params := &cloudwatch.PutMetricDataInput{
MetricData: datums, MetricData: datums,
Namespace: aws.String(c.Namespace), Namespace: aws.String(c.Namespace),
} }
_, err := c.svc.PutMetricData(params) _, err := c.svc.PutMetricData(context.Background(), params)
if err != nil { if err != nil {
c.Log.Errorf("Unable to write to CloudWatch : %+v", err.Error()) c.Log.Errorf("Unable to write to CloudWatch : %+v", err.Error())
@ -246,13 +249,13 @@ func (c *CloudWatch) WriteToCloudWatch(datums []*cloudwatch.MetricDatum) error {
// Partition the MetricDatums into smaller slices of a max size so that are under the limit // Partition the MetricDatums into smaller slices of a max size so that are under the limit
// for the AWS API calls. // for the AWS API calls.
func PartitionDatums(size int, datums []*cloudwatch.MetricDatum) [][]*cloudwatch.MetricDatum { func PartitionDatums(size int, datums []types.MetricDatum) [][]types.MetricDatum {
numberOfPartitions := len(datums) / size numberOfPartitions := len(datums) / size
if len(datums)%size != 0 { if len(datums)%size != 0 {
numberOfPartitions++ numberOfPartitions++
} }
partitions := make([][]*cloudwatch.MetricDatum, numberOfPartitions) partitions := make([][]types.MetricDatum, numberOfPartitions)
for i := 0; i < numberOfPartitions; i++ { for i := 0; i < numberOfPartitions; i++ {
start := size * i start := size * i
@ -270,7 +273,7 @@ func PartitionDatums(size int, datums []*cloudwatch.MetricDatum) [][]*cloudwatch
// Make a MetricDatum from telegraf.Metric. It would check if all required fields of // Make a MetricDatum from telegraf.Metric. It would check if all required fields of
// cloudwatch.StatisticSet are available. If so, it would build MetricDatum from statistic values. // cloudwatch.StatisticSet are available. If so, it would build MetricDatum from statistic values.
// Otherwise, fields would still been built independently. // Otherwise, fields would still been built independently.
func BuildMetricDatum(buildStatistic bool, highResolutionMetrics bool, point telegraf.Metric) []*cloudwatch.MetricDatum { func BuildMetricDatum(buildStatistic bool, highResolutionMetrics bool, point telegraf.Metric) []types.MetricDatum {
fields := make(map[string]cloudwatchField) fields := make(map[string]cloudwatchField)
tags := point.Tags() tags := point.Tags()
storageResolution := int64(60) storageResolution := int64(60)
@ -320,7 +323,7 @@ func BuildMetricDatum(buildStatistic bool, highResolutionMetrics bool, point tel
} }
} }
var datums []*cloudwatch.MetricDatum var datums []types.MetricDatum
for _, f := range fields { for _, f := range fields {
d := f.buildDatum() d := f.buildDatum()
datums = append(datums, d...) datums = append(datums, d...)
@ -332,13 +335,13 @@ func BuildMetricDatum(buildStatistic bool, highResolutionMetrics bool, point tel
// Make a list of Dimensions by using a Point's tags. CloudWatch supports up to // Make a list of Dimensions by using a Point's tags. CloudWatch supports up to
// 10 dimensions per metric so we only keep up to the first 10 alphabetically. // 10 dimensions per metric so we only keep up to the first 10 alphabetically.
// This always includes the "host" tag if it exists. // This always includes the "host" tag if it exists.
func BuildDimensions(mTags map[string]string) []*cloudwatch.Dimension { func BuildDimensions(mTags map[string]string) []types.Dimension {
const MaxDimensions = 10 const MaxDimensions = 10
dimensions := make([]*cloudwatch.Dimension, 0, MaxDimensions) dimensions := make([]types.Dimension, 0, MaxDimensions)
// This is pretty ugly but we always want to include the "host" tag if it exists. // This is pretty ugly but we always want to include the "host" tag if it exists.
if host, ok := mTags["host"]; ok { if host, ok := mTags["host"]; ok {
dimensions = append(dimensions, &cloudwatch.Dimension{ dimensions = append(dimensions, types.Dimension{
Name: aws.String("host"), Name: aws.String("host"),
Value: aws.String(host), Value: aws.String(host),
}) })
@ -362,7 +365,7 @@ func BuildDimensions(mTags map[string]string) []*cloudwatch.Dimension {
continue continue
} }
dimensions = append(dimensions, &cloudwatch.Dimension{ dimensions = append(dimensions, types.Dimension{
Name: aws.String(k), Name: aws.String(k),
Value: aws.String(mTags[k]), Value: aws.String(mTags[k]),
}) })

View File

@ -2,14 +2,13 @@ package cloudwatch
import ( import (
"fmt" "fmt"
"github.com/aws/aws-sdk-go-v2/service/cloudwatch/types"
"math" "math"
"sort" "sort"
"testing" "testing"
"time" "time"
"github.com/aws/aws-sdk-go/aws" "github.com/aws/aws-sdk-go-v2/aws"
"github.com/aws/aws-sdk-go/service/cloudwatch"
"github.com/influxdata/telegraf" "github.com/influxdata/telegraf"
"github.com/influxdata/telegraf/metric" "github.com/influxdata/telegraf/metric"
"github.com/influxdata/telegraf/testutil" "github.com/influxdata/telegraf/testutil"
@ -117,8 +116,8 @@ func TestBuildMetricDatums(t *testing.T) {
} }
func TestMetricDatumResolution(t *testing.T) { func TestMetricDatumResolution(t *testing.T) {
const expectedStandardResolutionValue = int64(60) const expectedStandardResolutionValue = int32(60)
const expectedHighResolutionValue = int64(1) const expectedHighResolutionValue = int32(1)
assert := assert.New(t) assert := assert.New(t)
@ -153,19 +152,19 @@ func TestBuildMetricDatums_SkipEmptyTags(t *testing.T) {
func TestPartitionDatums(t *testing.T) { func TestPartitionDatums(t *testing.T) {
assert := assert.New(t) assert := assert.New(t)
testDatum := cloudwatch.MetricDatum{ testDatum := types.MetricDatum{
MetricName: aws.String("Foo"), MetricName: aws.String("Foo"),
Value: aws.Float64(1), Value: aws.Float64(1),
} }
zeroDatum := []*cloudwatch.MetricDatum{} zeroDatum := []types.MetricDatum{}
oneDatum := []*cloudwatch.MetricDatum{&testDatum} oneDatum := []types.MetricDatum{testDatum}
twoDatum := []*cloudwatch.MetricDatum{&testDatum, &testDatum} twoDatum := []types.MetricDatum{testDatum, testDatum}
threeDatum := []*cloudwatch.MetricDatum{&testDatum, &testDatum, &testDatum} threeDatum := []types.MetricDatum{testDatum, testDatum, testDatum}
assert.Equal([][]*cloudwatch.MetricDatum{}, PartitionDatums(2, zeroDatum)) assert.Equal([][]types.MetricDatum{}, PartitionDatums(2, zeroDatum))
assert.Equal([][]*cloudwatch.MetricDatum{oneDatum}, PartitionDatums(2, oneDatum)) assert.Equal([][]types.MetricDatum{oneDatum}, PartitionDatums(2, oneDatum))
assert.Equal([][]*cloudwatch.MetricDatum{oneDatum}, PartitionDatums(2, oneDatum)) assert.Equal([][]types.MetricDatum{oneDatum}, PartitionDatums(2, oneDatum))
assert.Equal([][]*cloudwatch.MetricDatum{twoDatum}, PartitionDatums(2, twoDatum)) assert.Equal([][]types.MetricDatum{twoDatum}, PartitionDatums(2, twoDatum))
assert.Equal([][]*cloudwatch.MetricDatum{twoDatum, oneDatum}, PartitionDatums(2, threeDatum)) assert.Equal([][]types.MetricDatum{twoDatum, oneDatum}, PartitionDatums(2, threeDatum))
} }

View File

@ -1,19 +1,21 @@
package cloudwatch_logs package cloudwatch_logs
import ( import (
"context"
"fmt" "fmt"
"sort" "sort"
"strings" "strings"
"time" "time"
"github.com/aws/aws-sdk-go/service/cloudwatchlogs" "github.com/aws/aws-sdk-go-v2/service/cloudwatchlogs"
"github.com/aws/aws-sdk-go-v2/service/cloudwatchlogs/types"
"github.com/influxdata/telegraf" "github.com/influxdata/telegraf"
internalaws "github.com/influxdata/telegraf/config/aws" internalaws "github.com/influxdata/telegraf/config/aws"
"github.com/influxdata/telegraf/plugins/outputs" "github.com/influxdata/telegraf/plugins/outputs"
) )
type messageBatch struct { type messageBatch struct {
logEvents []*cloudwatchlogs.InputLogEvent logEvents []types.InputLogEvent
messageCount int messageCount int
} }
type logStreamContainer struct { type logStreamContainer struct {
@ -25,16 +27,16 @@ type logStreamContainer struct {
//Cloudwatch Logs service interface //Cloudwatch Logs service interface
type cloudWatchLogs interface { type cloudWatchLogs interface {
DescribeLogGroups(*cloudwatchlogs.DescribeLogGroupsInput) (*cloudwatchlogs.DescribeLogGroupsOutput, error) DescribeLogGroups(context.Context, *cloudwatchlogs.DescribeLogGroupsInput, ...func(options *cloudwatchlogs.Options)) (*cloudwatchlogs.DescribeLogGroupsOutput, error)
DescribeLogStreams(*cloudwatchlogs.DescribeLogStreamsInput) (*cloudwatchlogs.DescribeLogStreamsOutput, error) DescribeLogStreams(context.Context, *cloudwatchlogs.DescribeLogStreamsInput, ...func(options *cloudwatchlogs.Options)) (*cloudwatchlogs.DescribeLogStreamsOutput, error)
CreateLogStream(*cloudwatchlogs.CreateLogStreamInput) (*cloudwatchlogs.CreateLogStreamOutput, error) CreateLogStream(context.Context, *cloudwatchlogs.CreateLogStreamInput, ...func(options *cloudwatchlogs.Options)) (*cloudwatchlogs.CreateLogStreamOutput, error)
PutLogEvents(*cloudwatchlogs.PutLogEventsInput) (*cloudwatchlogs.PutLogEventsOutput, error) PutLogEvents(context.Context, *cloudwatchlogs.PutLogEventsInput, ...func(options *cloudwatchlogs.Options)) (*cloudwatchlogs.PutLogEventsOutput, error)
} }
// CloudWatchLogs plugin object definition // CloudWatchLogs plugin object definition
type CloudWatchLogs struct { type CloudWatchLogs struct {
LogGroup string `toml:"log_group"` LogGroup string `toml:"log_group"`
lg *cloudwatchlogs.LogGroup //log group data lg *types.LogGroup //log group data
LogStream string `toml:"log_stream"` LogStream string `toml:"log_stream"`
lsKey string //log stream source: tag or field lsKey string //log stream source: tag or field
@ -187,19 +189,17 @@ func (c *CloudWatchLogs) Connect() error {
var logGroupsOutput = &cloudwatchlogs.DescribeLogGroupsOutput{NextToken: &dummyToken} var logGroupsOutput = &cloudwatchlogs.DescribeLogGroupsOutput{NextToken: &dummyToken}
var err error var err error
p, err := c.CredentialConfig.Credentials() cfg, err := c.CredentialConfig.Credentials()
if err != nil { if err != nil {
return err return err
} }
c.svc = cloudwatchlogs.New(p) c.svc = cloudwatchlogs.NewFromConfig(cfg)
if c.svc == nil {
return fmt.Errorf("can't create cloudwatch logs service endpoint")
}
//Find log group with name 'c.LogGroup' //Find log group with name 'c.LogGroup'
if c.lg == nil { //In case connection is not retried, first time if c.lg == nil { //In case connection is not retried, first time
for logGroupsOutput.NextToken != nil { for logGroupsOutput.NextToken != nil {
logGroupsOutput, err = c.svc.DescribeLogGroups( logGroupsOutput, err = c.svc.DescribeLogGroups(
context.Background(),
&cloudwatchlogs.DescribeLogGroupsInput{ &cloudwatchlogs.DescribeLogGroupsInput{
LogGroupNamePrefix: &c.LogGroup, LogGroupNamePrefix: &c.LogGroup,
NextToken: queryToken}) NextToken: queryToken})
@ -212,7 +212,7 @@ func (c *CloudWatchLogs) Connect() error {
for _, logGroup := range logGroupsOutput.LogGroups { for _, logGroup := range logGroupsOutput.LogGroups {
if *(logGroup.LogGroupName) == c.LogGroup { if *(logGroup.LogGroupName) == c.LogGroup {
c.Log.Debugf("Found log group %q", c.LogGroup) c.Log.Debugf("Found log group %q", c.LogGroup)
c.lg = logGroup c.lg = &logGroup //nolint:revive
} }
} }
} }
@ -325,7 +325,7 @@ func (c *CloudWatchLogs) Write(metrics []telegraf.Metric) error {
lsContainer = val lsContainer = val
} else { } else {
lsContainer.messageBatches[0].messageCount = 0 lsContainer.messageBatches[0].messageCount = 0
lsContainer.messageBatches[0].logEvents = []*cloudwatchlogs.InputLogEvent{} lsContainer.messageBatches[0].logEvents = []types.InputLogEvent{}
c.ls[logStream] = lsContainer c.ls[logStream] = lsContainer
} }
@ -335,7 +335,7 @@ func (c *CloudWatchLogs) Write(metrics []telegraf.Metric) error {
lsContainer.currentBatchIndex++ lsContainer.currentBatchIndex++
lsContainer.messageBatches = append(lsContainer.messageBatches, lsContainer.messageBatches = append(lsContainer.messageBatches,
messageBatch{ messageBatch{
logEvents: []*cloudwatchlogs.InputLogEvent{}, logEvents: []types.InputLogEvent{},
messageCount: 0}) messageCount: 0})
lsContainer.currentBatchSizeBytes = messageSizeInBytesForAWS lsContainer.currentBatchSizeBytes = messageSizeInBytesForAWS
} else { } else {
@ -349,7 +349,7 @@ func (c *CloudWatchLogs) Write(metrics []telegraf.Metric) error {
//Adding metring to batch //Adding metring to batch
lsContainer.messageBatches[lsContainer.currentBatchIndex].logEvents = lsContainer.messageBatches[lsContainer.currentBatchIndex].logEvents =
append(lsContainer.messageBatches[lsContainer.currentBatchIndex].logEvents, append(lsContainer.messageBatches[lsContainer.currentBatchIndex].logEvents,
&cloudwatchlogs.InputLogEvent{ types.InputLogEvent{
Message: &logData, Message: &logData,
Timestamp: &metricTime}) Timestamp: &metricTime})
} }
@ -370,11 +370,11 @@ func (c *CloudWatchLogs) Write(metrics []telegraf.Metric) error {
if elem.sequenceToken == "" { if elem.sequenceToken == "" {
//This is the first attempt to write to log stream, //This is the first attempt to write to log stream,
//need to check log stream existence and create it if necessary //need to check log stream existence and create it if necessary
describeLogStreamOutput, err := c.svc.DescribeLogStreams(&cloudwatchlogs.DescribeLogStreamsInput{ describeLogStreamOutput, err := c.svc.DescribeLogStreams(context.Background(), &cloudwatchlogs.DescribeLogStreamsInput{
LogGroupName: &c.LogGroup, LogGroupName: &c.LogGroup,
LogStreamNamePrefix: &logStream}) LogStreamNamePrefix: &logStream})
if err == nil && len(describeLogStreamOutput.LogStreams) == 0 { if err == nil && len(describeLogStreamOutput.LogStreams) == 0 {
_, err := c.svc.CreateLogStream(&cloudwatchlogs.CreateLogStreamInput{ _, err := c.svc.CreateLogStream(context.Background(), &cloudwatchlogs.CreateLogStreamInput{
LogGroupName: &c.LogGroup, LogGroupName: &c.LogGroup,
LogStreamName: &logStream}) LogStreamName: &logStream})
if err != nil { if err != nil {
@ -404,14 +404,14 @@ func (c *CloudWatchLogs) Write(metrics []telegraf.Metric) error {
//There is a quota of 5 requests per second per log stream. Additional //There is a quota of 5 requests per second per log stream. Additional
//requests are throttled. This quota can't be changed. //requests are throttled. This quota can't be changed.
putLogEventsOutput, err := c.svc.PutLogEvents(&putLogEvents) putLogEventsOutput, err := c.svc.PutLogEvents(context.Background(), &putLogEvents)
if err != nil { if err != nil {
c.Log.Errorf("Can't push logs batch to AWS. Reason: %v", err) c.Log.Errorf("Can't push logs batch to AWS. Reason: %v", err)
continue continue
} }
//Cleanup batch //Cleanup batch
elem.messageBatches[index] = messageBatch{ elem.messageBatches[index] = messageBatch{
logEvents: []*cloudwatchlogs.InputLogEvent{}, logEvents: []types.InputLogEvent{},
messageCount: 0} messageCount: 0}
elem.sequenceToken = *putLogEventsOutput.NextSequenceToken elem.sequenceToken = *putLogEventsOutput.NextSequenceToken

View File

@ -1,6 +1,7 @@
package cloudwatch_logs package cloudwatch_logs
import ( import (
"context"
"fmt" "fmt"
"math/rand" "math/rand"
"net/http" "net/http"
@ -8,7 +9,8 @@ import (
"testing" "testing"
"time" "time"
"github.com/aws/aws-sdk-go/service/cloudwatchlogs" cloudwatchlogsV2 "github.com/aws/aws-sdk-go-v2/service/cloudwatchlogs"
"github.com/aws/aws-sdk-go-v2/service/cloudwatchlogs/types"
"github.com/influxdata/telegraf" "github.com/influxdata/telegraf"
internalaws "github.com/influxdata/telegraf/config/aws" internalaws "github.com/influxdata/telegraf/config/aws"
"github.com/influxdata/telegraf/testutil" "github.com/influxdata/telegraf/testutil"
@ -17,24 +19,24 @@ import (
type mockCloudWatchLogs struct { type mockCloudWatchLogs struct {
logStreamName string logStreamName string
pushedLogEvents []cloudwatchlogs.InputLogEvent pushedLogEvents []types.InputLogEvent
} }
func (c *mockCloudWatchLogs) Init(lsName string) { func (c *mockCloudWatchLogs) Init(lsName string) {
c.logStreamName = lsName c.logStreamName = lsName
c.pushedLogEvents = make([]cloudwatchlogs.InputLogEvent, 0) c.pushedLogEvents = make([]types.InputLogEvent, 0)
} }
func (c *mockCloudWatchLogs) DescribeLogGroups(*cloudwatchlogs.DescribeLogGroupsInput) (*cloudwatchlogs.DescribeLogGroupsOutput, error) { func (c *mockCloudWatchLogs) DescribeLogGroups(context.Context, *cloudwatchlogsV2.DescribeLogGroupsInput, ...func(options *cloudwatchlogsV2.Options)) (*cloudwatchlogsV2.DescribeLogGroupsOutput, error) {
return nil, nil return nil, nil
} }
func (c *mockCloudWatchLogs) DescribeLogStreams(*cloudwatchlogs.DescribeLogStreamsInput) (*cloudwatchlogs.DescribeLogStreamsOutput, error) { func (c *mockCloudWatchLogs) DescribeLogStreams(context.Context, *cloudwatchlogsV2.DescribeLogStreamsInput, ...func(options *cloudwatchlogsV2.Options)) (*cloudwatchlogsV2.DescribeLogStreamsOutput, error) {
arn := "arn" arn := "arn"
creationTime := time.Now().Unix() creationTime := time.Now().Unix()
sequenceToken := "arbitraryToken" sequenceToken := "arbitraryToken"
output := &cloudwatchlogs.DescribeLogStreamsOutput{ output := &cloudwatchlogsV2.DescribeLogStreamsOutput{
LogStreams: []*cloudwatchlogs.LogStream{ LogStreams: []types.LogStream{
{ {
Arn: &arn, Arn: &arn,
CreationTime: &creationTime, CreationTime: &creationTime,
@ -48,15 +50,15 @@ func (c *mockCloudWatchLogs) DescribeLogStreams(*cloudwatchlogs.DescribeLogStrea
} }
return output, nil return output, nil
} }
func (c *mockCloudWatchLogs) CreateLogStream(*cloudwatchlogs.CreateLogStreamInput) (*cloudwatchlogs.CreateLogStreamOutput, error) { func (c *mockCloudWatchLogs) CreateLogStream(context.Context, *cloudwatchlogsV2.CreateLogStreamInput, ...func(options *cloudwatchlogsV2.Options)) (*cloudwatchlogsV2.CreateLogStreamOutput, error) {
return nil, nil return nil, nil
} }
func (c *mockCloudWatchLogs) PutLogEvents(input *cloudwatchlogs.PutLogEventsInput) (*cloudwatchlogs.PutLogEventsOutput, error) { func (c *mockCloudWatchLogs) PutLogEvents(_ context.Context, input *cloudwatchlogsV2.PutLogEventsInput, _ ...func(options *cloudwatchlogsV2.Options)) (*cloudwatchlogsV2.PutLogEventsOutput, error) {
sequenceToken := "arbitraryToken" sequenceToken := "arbitraryToken"
output := &cloudwatchlogs.PutLogEventsOutput{NextSequenceToken: &sequenceToken} output := &cloudwatchlogsV2.PutLogEventsOutput{NextSequenceToken: &sequenceToken}
//Saving messages //Saving messages
for _, event := range input.LogEvents { for _, event := range input.LogEvents {
c.pushedLogEvents = append(c.pushedLogEvents, *event) c.pushedLogEvents = append(c.pushedLogEvents, event)
} }
return output, nil return output, nil

View File

@ -1,11 +1,12 @@
package kinesis package kinesis
import ( import (
"context"
"time" "time"
"github.com/aws/aws-sdk-go/aws" "github.com/aws/aws-sdk-go-v2/aws"
"github.com/aws/aws-sdk-go/service/kinesis" "github.com/aws/aws-sdk-go-v2/service/kinesis"
"github.com/aws/aws-sdk-go/service/kinesis/kinesisiface" "github.com/aws/aws-sdk-go-v2/service/kinesis/types"
"github.com/gofrs/uuid" "github.com/gofrs/uuid"
"github.com/influxdata/telegraf" "github.com/influxdata/telegraf"
internalaws "github.com/influxdata/telegraf/config/aws" internalaws "github.com/influxdata/telegraf/config/aws"
@ -26,7 +27,7 @@ type (
Log telegraf.Logger `toml:"-"` Log telegraf.Logger `toml:"-"`
serializer serializers.Serializer serializer serializers.Serializer
svc kinesisiface.KinesisAPI svc kinesisClient
internalaws.CredentialConfig internalaws.CredentialConfig
} }
@ -38,6 +39,10 @@ type (
} }
) )
type kinesisClient interface {
PutRecords(context.Context, *kinesis.PutRecordsInput, ...func(*kinesis.Options)) (*kinesis.PutRecordsOutput, error)
}
var sampleConfig = ` var sampleConfig = `
## Amazon REGION of kinesis endpoint. ## Amazon REGION of kinesis endpoint.
region = "ap-southeast-2" region = "ap-southeast-2"
@ -126,13 +131,14 @@ func (k *KinesisOutput) Connect() error {
k.Log.Infof("Establishing a connection to Kinesis in %s", k.Region) k.Log.Infof("Establishing a connection to Kinesis in %s", k.Region)
} }
p, err := k.CredentialConfig.Credentials() cfg, err := k.CredentialConfig.Credentials()
if err != nil { if err != nil {
return err return err
} }
svc := kinesis.New(p)
_, err = svc.DescribeStreamSummary(&kinesis.DescribeStreamSummaryInput{ svc := kinesis.NewFromConfig(cfg)
_, err = svc.DescribeStreamSummary(context.Background(), &kinesis.DescribeStreamSummaryInput{
StreamName: aws.String(k.StreamName), StreamName: aws.String(k.StreamName),
}) })
k.svc = svc k.svc = svc
@ -147,14 +153,14 @@ func (k *KinesisOutput) SetSerializer(serializer serializers.Serializer) {
k.serializer = serializer k.serializer = serializer
} }
func (k *KinesisOutput) writeKinesis(r []*kinesis.PutRecordsRequestEntry) time.Duration { func (k *KinesisOutput) writeKinesis(r []types.PutRecordsRequestEntry) time.Duration {
start := time.Now() start := time.Now()
payload := &kinesis.PutRecordsInput{ payload := &kinesis.PutRecordsInput{
Records: r, Records: r,
StreamName: aws.String(k.StreamName), StreamName: aws.String(k.StreamName),
} }
resp, err := k.svc.PutRecords(payload) resp, err := k.svc.PutRecords(context.Background(), payload)
if err != nil { if err != nil {
k.Log.Errorf("Unable to write to Kinesis : %s", err.Error()) k.Log.Errorf("Unable to write to Kinesis : %s", err.Error())
return time.Since(start) return time.Since(start)
@ -214,7 +220,7 @@ func (k *KinesisOutput) Write(metrics []telegraf.Metric) error {
return nil return nil
} }
r := []*kinesis.PutRecordsRequestEntry{} r := []types.PutRecordsRequestEntry{}
for _, metric := range metrics { for _, metric := range metrics {
sz++ sz++
@ -227,12 +233,12 @@ func (k *KinesisOutput) Write(metrics []telegraf.Metric) error {
partitionKey := k.getPartitionKey(metric) partitionKey := k.getPartitionKey(metric)
d := kinesis.PutRecordsRequestEntry{ d := types.PutRecordsRequestEntry{
Data: values, Data: values,
PartitionKey: aws.String(partitionKey), PartitionKey: aws.String(partitionKey),
} }
r = append(r, &d) r = append(r, d)
if sz == maxRecordsPerRequest { if sz == maxRecordsPerRequest {
elapsed := k.writeKinesis(r) elapsed := k.writeKinesis(r)

View File

@ -1,13 +1,13 @@
package kinesis package kinesis
import ( import (
"context"
"fmt" "fmt"
"testing" "testing"
"github.com/aws/aws-sdk-go/aws" "github.com/aws/aws-sdk-go-v2/aws"
"github.com/aws/aws-sdk-go/aws/awserr" "github.com/aws/aws-sdk-go-v2/service/kinesis"
"github.com/aws/aws-sdk-go/service/kinesis" "github.com/aws/aws-sdk-go-v2/service/kinesis/types"
"github.com/aws/aws-sdk-go/service/kinesis/kinesisiface"
"github.com/gofrs/uuid" "github.com/gofrs/uuid"
"github.com/influxdata/telegraf" "github.com/influxdata/telegraf"
"github.com/influxdata/telegraf/plugins/serializers" "github.com/influxdata/telegraf/plugins/serializers"
@ -110,7 +110,7 @@ func TestPartitionKey(t *testing.T) {
func TestWriteKinesis_WhenSuccess(t *testing.T) { func TestWriteKinesis_WhenSuccess(t *testing.T) {
assert := assert.New(t) assert := assert.New(t)
records := []*kinesis.PutRecordsRequestEntry{ records := []types.PutRecordsRequestEntry{
{ {
PartitionKey: aws.String(testPartitionKey), PartitionKey: aws.String(testPartitionKey),
Data: []byte{0x65}, Data: []byte{0x65},
@ -120,7 +120,7 @@ func TestWriteKinesis_WhenSuccess(t *testing.T) {
svc := &mockKinesisPutRecords{} svc := &mockKinesisPutRecords{}
svc.SetupResponse( svc.SetupResponse(
0, 0,
[]*kinesis.PutRecordsResultEntry{ []types.PutRecordsResultEntry{
{ {
SequenceNumber: aws.String(testSequenceNumber), SequenceNumber: aws.String(testSequenceNumber),
ShardId: aws.String(testShardID), ShardId: aws.String(testShardID),
@ -148,7 +148,7 @@ func TestWriteKinesis_WhenSuccess(t *testing.T) {
func TestWriteKinesis_WhenRecordErrors(t *testing.T) { func TestWriteKinesis_WhenRecordErrors(t *testing.T) {
assert := assert.New(t) assert := assert.New(t)
records := []*kinesis.PutRecordsRequestEntry{ records := []types.PutRecordsRequestEntry{
{ {
PartitionKey: aws.String(testPartitionKey), PartitionKey: aws.String(testPartitionKey),
Data: []byte{0x66}, Data: []byte{0x66},
@ -158,7 +158,7 @@ func TestWriteKinesis_WhenRecordErrors(t *testing.T) {
svc := &mockKinesisPutRecords{} svc := &mockKinesisPutRecords{}
svc.SetupResponse( svc.SetupResponse(
1, 1,
[]*kinesis.PutRecordsResultEntry{ []types.PutRecordsResultEntry{
{ {
ErrorCode: aws.String("InternalFailure"), ErrorCode: aws.String("InternalFailure"),
ErrorMessage: aws.String("Internal Service Failure"), ErrorMessage: aws.String("Internal Service Failure"),
@ -186,7 +186,7 @@ func TestWriteKinesis_WhenRecordErrors(t *testing.T) {
func TestWriteKinesis_WhenServiceError(t *testing.T) { func TestWriteKinesis_WhenServiceError(t *testing.T) {
assert := assert.New(t) assert := assert.New(t)
records := []*kinesis.PutRecordsRequestEntry{ records := []types.PutRecordsRequestEntry{
{ {
PartitionKey: aws.String(testPartitionKey), PartitionKey: aws.String(testPartitionKey),
Data: []byte{}, Data: []byte{},
@ -195,7 +195,7 @@ func TestWriteKinesis_WhenServiceError(t *testing.T) {
svc := &mockKinesisPutRecords{} svc := &mockKinesisPutRecords{}
svc.SetupErrorResponse( svc.SetupErrorResponse(
awserr.New("InvalidArgumentException", "Invalid record", nil), &types.InvalidArgumentException{Message: aws.String("Invalid record")},
) )
k := KinesisOutput{ k := KinesisOutput{
@ -262,7 +262,7 @@ func TestWrite_SingleMetric(t *testing.T) {
svc.AssertRequests(t, []*kinesis.PutRecordsInput{ svc.AssertRequests(t, []*kinesis.PutRecordsInput{
{ {
StreamName: aws.String(testStreamName), StreamName: aws.String(testStreamName),
Records: []*kinesis.PutRecordsRequestEntry{ Records: []types.PutRecordsRequestEntry{
{ {
PartitionKey: aws.String(testPartitionKey), PartitionKey: aws.String(testPartitionKey),
Data: metricData, Data: metricData,
@ -449,7 +449,7 @@ func TestWrite_SerializerError(t *testing.T) {
svc.AssertRequests(t, []*kinesis.PutRecordsInput{ svc.AssertRequests(t, []*kinesis.PutRecordsInput{
{ {
StreamName: aws.String(testStreamName), StreamName: aws.String(testStreamName),
Records: []*kinesis.PutRecordsRequestEntry{ Records: []types.PutRecordsRequestEntry{
{ {
PartitionKey: aws.String(testPartitionKey), PartitionKey: aws.String(testPartitionKey),
Data: metric1Data, Data: metric1Data,
@ -469,20 +469,18 @@ type mockKinesisPutRecordsResponse struct {
} }
type mockKinesisPutRecords struct { type mockKinesisPutRecords struct {
kinesisiface.KinesisAPI
requests []*kinesis.PutRecordsInput requests []*kinesis.PutRecordsInput
responses []*mockKinesisPutRecordsResponse responses []*mockKinesisPutRecordsResponse
} }
func (m *mockKinesisPutRecords) SetupResponse( func (m *mockKinesisPutRecords) SetupResponse(
failedRecordCount int64, failedRecordCount int32,
records []*kinesis.PutRecordsResultEntry, records []types.PutRecordsResultEntry,
) { ) {
m.responses = append(m.responses, &mockKinesisPutRecordsResponse{ m.responses = append(m.responses, &mockKinesisPutRecordsResponse{
Err: nil, Err: nil,
Output: &kinesis.PutRecordsOutput{ Output: &kinesis.PutRecordsOutput{
FailedRecordCount: aws.Int64(failedRecordCount), FailedRecordCount: aws.Int32(failedRecordCount),
Records: records, Records: records,
}, },
}) })
@ -490,25 +488,25 @@ func (m *mockKinesisPutRecords) SetupResponse(
func (m *mockKinesisPutRecords) SetupGenericResponse( func (m *mockKinesisPutRecords) SetupGenericResponse(
successfulRecordCount uint32, successfulRecordCount uint32,
failedRecordCount uint32, failedRecordCount int32,
) { ) {
records := []*kinesis.PutRecordsResultEntry{} records := []types.PutRecordsResultEntry{}
for i := uint32(0); i < successfulRecordCount; i++ { for i := uint32(0); i < successfulRecordCount; i++ {
records = append(records, &kinesis.PutRecordsResultEntry{ records = append(records, types.PutRecordsResultEntry{
SequenceNumber: aws.String(testSequenceNumber), SequenceNumber: aws.String(testSequenceNumber),
ShardId: aws.String(testShardID), ShardId: aws.String(testShardID),
}) })
} }
for i := uint32(0); i < failedRecordCount; i++ { for i := int32(0); i < failedRecordCount; i++ {
records = append(records, &kinesis.PutRecordsResultEntry{ records = append(records, types.PutRecordsResultEntry{
ErrorCode: aws.String("InternalFailure"), ErrorCode: aws.String("InternalFailure"),
ErrorMessage: aws.String("Internal Service Failure"), ErrorMessage: aws.String("Internal Service Failure"),
}) })
} }
m.SetupResponse(int64(failedRecordCount), records) m.SetupResponse(failedRecordCount, records)
} }
func (m *mockKinesisPutRecords) SetupErrorResponse(err error) { func (m *mockKinesisPutRecords) SetupErrorResponse(err error) {
@ -518,7 +516,7 @@ func (m *mockKinesisPutRecords) SetupErrorResponse(err error) {
}) })
} }
func (m *mockKinesisPutRecords) PutRecords(input *kinesis.PutRecordsInput) (*kinesis.PutRecordsOutput, error) { func (m *mockKinesisPutRecords) PutRecords(_ context.Context, input *kinesis.PutRecordsInput, _ ...func(*kinesis.Options)) (*kinesis.PutRecordsOutput, error) {
reqNum := len(m.requests) reqNum := len(m.requests)
if reqNum > len(m.responses) { if reqNum > len(m.responses) {
return nil, fmt.Errorf("Response for request %+v not setup", reqNum) return nil, fmt.Errorf("Response for request %+v not setup", reqNum)
@ -612,12 +610,12 @@ func createTestMetrics(
func createPutRecordsRequestEntries( func createPutRecordsRequestEntries(
metricsData [][]byte, metricsData [][]byte,
) []*kinesis.PutRecordsRequestEntry { ) []types.PutRecordsRequestEntry {
count := len(metricsData) count := len(metricsData)
records := make([]*kinesis.PutRecordsRequestEntry, count) records := make([]types.PutRecordsRequestEntry, count)
for i := 0; i < count; i++ { for i := 0; i < count; i++ {
records[i] = &kinesis.PutRecordsRequestEntry{ records[i] = types.PutRecordsRequestEntry{
PartitionKey: aws.String(testPartitionKey), PartitionKey: aws.String(testPartitionKey),
Data: metricsData[i], Data: metricsData[i],
} }

View File

@ -1,7 +1,9 @@
package timestream package timestream
import ( import (
"context"
"encoding/binary" "encoding/binary"
"errors"
"fmt" "fmt"
"hash/fnv" "hash/fnv"
"reflect" "reflect"
@ -11,9 +13,10 @@ import (
"github.com/influxdata/telegraf" "github.com/influxdata/telegraf"
"github.com/influxdata/telegraf/plugins/outputs" "github.com/influxdata/telegraf/plugins/outputs"
"github.com/aws/aws-sdk-go/aws" "github.com/aws/aws-sdk-go-v2/aws"
"github.com/aws/aws-sdk-go/aws/awserr" "github.com/aws/aws-sdk-go-v2/service/timestreamwrite"
"github.com/aws/aws-sdk-go/service/timestreamwrite" "github.com/aws/aws-sdk-go-v2/service/timestreamwrite/types"
"github.com/aws/smithy-go"
internalaws "github.com/influxdata/telegraf/config/aws" internalaws "github.com/influxdata/telegraf/config/aws"
) )
@ -38,9 +41,9 @@ type (
} }
WriteClient interface { WriteClient interface {
CreateTable(*timestreamwrite.CreateTableInput) (*timestreamwrite.CreateTableOutput, error) CreateTable(context.Context, *timestreamwrite.CreateTableInput, ...func(*timestreamwrite.Options)) (*timestreamwrite.CreateTableOutput, error)
WriteRecords(*timestreamwrite.WriteRecordsInput) (*timestreamwrite.WriteRecordsOutput, error) WriteRecords(context.Context, *timestreamwrite.WriteRecordsInput, ...func(*timestreamwrite.Options)) (*timestreamwrite.WriteRecordsOutput, error)
DescribeDatabase(*timestreamwrite.DescribeDatabaseInput) (*timestreamwrite.DescribeDatabaseOutput, error) DescribeDatabase(context.Context, *timestreamwrite.DescribeDatabaseInput, ...func(*timestreamwrite.Options)) (*timestreamwrite.DescribeDatabaseOutput, error)
} }
) )
@ -170,11 +173,11 @@ var sampleConfig = `
// WriteFactory function provides a way to mock the client instantiation for testing purposes. // WriteFactory function provides a way to mock the client instantiation for testing purposes.
var WriteFactory = func(credentialConfig *internalaws.CredentialConfig) (WriteClient, error) { var WriteFactory = func(credentialConfig *internalaws.CredentialConfig) (WriteClient, error) {
configProvider, err := credentialConfig.Credentials() cfg, err := credentialConfig.Credentials()
if err != nil { if err != nil {
return nil, err return &timestreamwrite.Client{}, err
} }
return timestreamwrite.New(configProvider), nil return timestreamwrite.NewFromConfig(cfg), nil
} }
func (t *Timestream) Connect() error { func (t *Timestream) Connect() error {
@ -235,7 +238,7 @@ func (t *Timestream) Connect() error {
describeDatabaseInput := &timestreamwrite.DescribeDatabaseInput{ describeDatabaseInput := &timestreamwrite.DescribeDatabaseInput{
DatabaseName: aws.String(t.DatabaseName), DatabaseName: aws.String(t.DatabaseName),
} }
describeDatabaseOutput, err := svc.DescribeDatabase(describeDatabaseInput) describeDatabaseOutput, err := svc.DescribeDatabase(context.Background(), describeDatabaseInput)
if err != nil { if err != nil {
t.Log.Errorf("Couldn't describe database '%s'. Check error, fix permissions, connectivity, create database.", t.DatabaseName) t.Log.Errorf("Couldn't describe database '%s'. Check error, fix permissions, connectivity, create database.", t.DatabaseName)
return err return err
@ -278,33 +281,45 @@ func (t *Timestream) Write(metrics []telegraf.Metric) error {
func (t *Timestream) writeToTimestream(writeRecordsInput *timestreamwrite.WriteRecordsInput, resourceNotFoundRetry bool) error { func (t *Timestream) writeToTimestream(writeRecordsInput *timestreamwrite.WriteRecordsInput, resourceNotFoundRetry bool) error {
t.Log.Debugf("Writing to Timestream: '%v' with ResourceNotFoundRetry: '%t'", writeRecordsInput, resourceNotFoundRetry) t.Log.Debugf("Writing to Timestream: '%v' with ResourceNotFoundRetry: '%t'", writeRecordsInput, resourceNotFoundRetry)
_, err := t.svc.WriteRecords(writeRecordsInput) _, err := t.svc.WriteRecords(context.Background(), writeRecordsInput)
if err != nil { if err != nil {
// Telegraf will retry ingesting the metrics if an error is returned from the plugin. // Telegraf will retry ingesting the metrics if an error is returned from the plugin.
// Therefore, return error only for retryable exceptions: ThrottlingException and 5xx exceptions. // Therefore, return error only for retryable exceptions: ThrottlingException and 5xx exceptions.
if e, ok := err.(awserr.Error); ok { var notFound *types.ResourceNotFoundException
switch e.Code() { if errors.As(err, &notFound) {
case timestreamwrite.ErrCodeResourceNotFoundException: if resourceNotFoundRetry {
if resourceNotFoundRetry { t.Log.Warnf("Failed to write to Timestream database '%s' table '%s'. Error: '%s'",
t.Log.Warnf("Failed to write to Timestream database '%s' table '%s'. Error: '%s'", t.DatabaseName, *writeRecordsInput.TableName, notFound)
t.DatabaseName, *writeRecordsInput.TableName, e) return t.createTableAndRetry(writeRecordsInput)
return t.createTableAndRetry(writeRecordsInput)
}
t.logWriteToTimestreamError(err, writeRecordsInput.TableName)
case timestreamwrite.ErrCodeThrottlingException:
return fmt.Errorf("unable to write to Timestream database '%s' table '%s'. Error: %s",
t.DatabaseName, *writeRecordsInput.TableName, err)
case timestreamwrite.ErrCodeInternalServerException:
return fmt.Errorf("unable to write to Timestream database '%s' table '%s'. Error: %s",
t.DatabaseName, *writeRecordsInput.TableName, err)
default:
t.logWriteToTimestreamError(err, writeRecordsInput.TableName)
} }
} else { t.logWriteToTimestreamError(notFound, writeRecordsInput.TableName)
}
var rejected *types.RejectedRecordsException
if errors.As(err, &rejected) {
t.logWriteToTimestreamError(err, writeRecordsInput.TableName)
return nil
}
var throttling *types.ThrottlingException
if errors.As(err, &throttling) {
return fmt.Errorf("unable to write to Timestream database '%s' table '%s'. Error: %s",
t.DatabaseName, *writeRecordsInput.TableName, throttling)
}
var internal *types.InternalServerException
if errors.As(err, &internal) {
return fmt.Errorf("unable to write to Timestream database '%s' table '%s'. Error: %s",
t.DatabaseName, *writeRecordsInput.TableName, internal)
}
var operation *smithy.OperationError
if !errors.As(err, &operation) {
// Retry other, non-aws errors. // Retry other, non-aws errors.
return fmt.Errorf("unable to write to Timestream database '%s' table '%s'. Error: %s", return fmt.Errorf("unable to write to Timestream database '%s' table '%s'. Error: %s",
t.DatabaseName, *writeRecordsInput.TableName, err) t.DatabaseName, *writeRecordsInput.TableName, err)
} }
t.logWriteToTimestreamError(err, writeRecordsInput.TableName)
} }
return nil return nil
} }
@ -334,27 +349,25 @@ func (t *Timestream) createTable(tableName *string) error {
createTableInput := &timestreamwrite.CreateTableInput{ createTableInput := &timestreamwrite.CreateTableInput{
DatabaseName: aws.String(t.DatabaseName), DatabaseName: aws.String(t.DatabaseName),
TableName: aws.String(*tableName), TableName: aws.String(*tableName),
RetentionProperties: &timestreamwrite.RetentionProperties{ RetentionProperties: &types.RetentionProperties{
MagneticStoreRetentionPeriodInDays: aws.Int64(t.CreateTableMagneticStoreRetentionPeriodInDays), MagneticStoreRetentionPeriodInDays: t.CreateTableMagneticStoreRetentionPeriodInDays,
MemoryStoreRetentionPeriodInHours: aws.Int64(t.CreateTableMemoryStoreRetentionPeriodInHours), MemoryStoreRetentionPeriodInHours: t.CreateTableMemoryStoreRetentionPeriodInHours,
}, },
} }
var tags []*timestreamwrite.Tag var tags []types.Tag
for key, val := range t.CreateTableTags { for key, val := range t.CreateTableTags {
tags = append(tags, &timestreamwrite.Tag{ tags = append(tags, types.Tag{
Key: aws.String(key), Key: aws.String(key),
Value: aws.String(val), Value: aws.String(val),
}) })
} }
createTableInput.SetTags(tags) createTableInput.Tags = tags
_, err := t.svc.CreateTable(createTableInput) _, err := t.svc.CreateTable(context.Background(), createTableInput)
if err != nil { if err != nil {
if e, ok := err.(awserr.Error); ok { if _, ok := err.(*types.ConflictException); ok {
// if the table was created in the meantime, it's ok. // if the table was created in the meantime, it's ok.
if e.Code() == timestreamwrite.ErrCodeConflictException { return nil
return nil
}
} }
return err return err
} }
@ -380,17 +393,17 @@ func (t *Timestream) TransformMetrics(metrics []telegraf.Metric) []*timestreamwr
newWriteRecord := &timestreamwrite.WriteRecordsInput{ newWriteRecord := &timestreamwrite.WriteRecordsInput{
DatabaseName: aws.String(t.DatabaseName), DatabaseName: aws.String(t.DatabaseName),
Records: records, Records: records,
CommonAttributes: &timestreamwrite.Record{ CommonAttributes: &types.Record{
Dimensions: dimensions, Dimensions: dimensions,
Time: aws.String(timeValue), Time: aws.String(timeValue),
TimeUnit: aws.String(timeUnit), TimeUnit: timeUnit,
}, },
} }
if t.MappingMode == MappingModeSingleTable { if t.MappingMode == MappingModeSingleTable {
newWriteRecord.SetTableName(t.SingleTableName) newWriteRecord.TableName = &t.SingleTableName
} }
if t.MappingMode == MappingModeMultiTable { if t.MappingMode == MappingModeMultiTable {
newWriteRecord.SetTableName(m.Name()) newWriteRecord.TableName = aws.String(m.Name())
} }
writeRequests[id] = newWriteRecord writeRequests[id] = newWriteRecord
@ -440,17 +453,17 @@ func hashFromMetricTimeNameTagKeys(m telegraf.Metric) uint64 {
return h.Sum64() return h.Sum64()
} }
func (t *Timestream) buildDimensions(point telegraf.Metric) []*timestreamwrite.Dimension { func (t *Timestream) buildDimensions(point telegraf.Metric) []types.Dimension {
var dimensions []*timestreamwrite.Dimension var dimensions []types.Dimension
for tagName, tagValue := range point.Tags() { for tagName, tagValue := range point.Tags() {
dimension := &timestreamwrite.Dimension{ dimension := types.Dimension{
Name: aws.String(tagName), Name: aws.String(tagName),
Value: aws.String(tagValue), Value: aws.String(tagValue),
} }
dimensions = append(dimensions, dimension) dimensions = append(dimensions, dimension)
} }
if t.MappingMode == MappingModeSingleTable { if t.MappingMode == MappingModeSingleTable {
dimension := &timestreamwrite.Dimension{ dimension := types.Dimension{
Name: aws.String(t.SingleTableDimensionNameForTelegrafMeasurementName), Name: aws.String(t.SingleTableDimensionNameForTelegrafMeasurementName),
Value: aws.String(point.Name()), Value: aws.String(point.Name()),
} }
@ -463,8 +476,8 @@ func (t *Timestream) buildDimensions(point telegraf.Metric) []*timestreamwrite.D
// Tags and time are not included - common attributes are built separately. // Tags and time are not included - common attributes are built separately.
// Records with unsupported Metric Field type are skipped. // Records with unsupported Metric Field type are skipped.
// It returns an array of Timestream write records. // It returns an array of Timestream write records.
func (t *Timestream) buildWriteRecords(point telegraf.Metric) []*timestreamwrite.Record { func (t *Timestream) buildWriteRecords(point telegraf.Metric) []types.Record {
var records []*timestreamwrite.Record var records []types.Record
for fieldName, fieldValue := range point.Fields() { for fieldName, fieldValue := range point.Fields() {
stringFieldValue, stringFieldValueType, ok := convertValue(fieldValue) stringFieldValue, stringFieldValueType, ok := convertValue(fieldValue)
if !ok { if !ok {
@ -473,9 +486,9 @@ func (t *Timestream) buildWriteRecords(point telegraf.Metric) []*timestreamwrite
fieldName, reflect.TypeOf(fieldValue)) fieldName, reflect.TypeOf(fieldValue))
continue continue
} }
record := &timestreamwrite.Record{ record := types.Record{
MeasureName: aws.String(fieldName), MeasureName: aws.String(fieldName),
MeasureValueType: aws.String(stringFieldValueType), MeasureValueType: stringFieldValueType,
MeasureValue: aws.String(stringFieldValue), MeasureValue: aws.String(stringFieldValue),
} }
records = append(records, record) records = append(records, record)
@ -486,13 +499,13 @@ func (t *Timestream) buildWriteRecords(point telegraf.Metric) []*timestreamwrite
// partitionRecords splits the Timestream records into smaller slices of a max size // partitionRecords splits the Timestream records into smaller slices of a max size
// so that are under the limit for the Timestream API call. // so that are under the limit for the Timestream API call.
// It returns the array of array of records. // It returns the array of array of records.
func partitionRecords(size int, records []*timestreamwrite.Record) [][]*timestreamwrite.Record { func partitionRecords(size int, records []types.Record) [][]types.Record {
numberOfPartitions := len(records) / size numberOfPartitions := len(records) / size
if len(records)%size != 0 { if len(records)%size != 0 {
numberOfPartitions++ numberOfPartitions++
} }
partitions := make([][]*timestreamwrite.Record, numberOfPartitions) partitions := make([][]types.Record, numberOfPartitions)
for i := 0; i < numberOfPartitions; i++ { for i := 0; i < numberOfPartitions; i++ {
start := size * i start := size * i
@ -509,25 +522,19 @@ func partitionRecords(size int, records []*timestreamwrite.Record) [][]*timestre
// getTimestreamTime produces Timestream TimeUnit and TimeValue with minimum possible granularity // getTimestreamTime produces Timestream TimeUnit and TimeValue with minimum possible granularity
// while maintaining the same information. // while maintaining the same information.
func getTimestreamTime(time time.Time) (timeUnit string, timeValue string) { func getTimestreamTime(t time.Time) (timeUnit types.TimeUnit, timeValue string) {
const ( nanosTime := t.UnixNano()
TimeUnitS = "SECONDS"
TimeUnitMS = "MILLISECONDS"
TimeUnitUS = "MICROSECONDS"
TimeUnitNS = "NANOSECONDS"
)
nanosTime := time.UnixNano()
if nanosTime%1e9 == 0 { if nanosTime%1e9 == 0 {
timeUnit = TimeUnitS timeUnit = types.TimeUnitSeconds
timeValue = strconv.FormatInt(nanosTime/1e9, 10) timeValue = strconv.FormatInt(nanosTime/1e9, 10)
} else if nanosTime%1e6 == 0 { } else if nanosTime%1e6 == 0 {
timeUnit = TimeUnitMS timeUnit = types.TimeUnitMilliseconds
timeValue = strconv.FormatInt(nanosTime/1e6, 10) timeValue = strconv.FormatInt(nanosTime/1e6, 10)
} else if nanosTime%1e3 == 0 { } else if nanosTime%1e3 == 0 {
timeUnit = TimeUnitUS timeUnit = types.TimeUnitMicroseconds
timeValue = strconv.FormatInt(nanosTime/1e3, 10) timeValue = strconv.FormatInt(nanosTime/1e3, 10)
} else { } else {
timeUnit = TimeUnitNS timeUnit = types.TimeUnitNanoseconds
timeValue = strconv.FormatInt(nanosTime, 10) timeValue = strconv.FormatInt(nanosTime, 10)
} }
return return
@ -535,61 +542,55 @@ func getTimestreamTime(time time.Time) (timeUnit string, timeValue string) {
// convertValue converts single Field value from Telegraf Metric and produces // convertValue converts single Field value from Telegraf Metric and produces
// value, valueType Timestream representation. // value, valueType Timestream representation.
func convertValue(v interface{}) (value string, valueType string, ok bool) { func convertValue(v interface{}) (value string, valueType types.MeasureValueType, ok bool) {
const (
TypeBigInt = "BIGINT"
TypeDouble = "DOUBLE"
TypeBoolean = "BOOLEAN"
TypeVarchar = "VARCHAR"
)
ok = true ok = true
switch t := v.(type) { switch t := v.(type) {
case int: case int:
valueType = TypeBigInt valueType = types.MeasureValueTypeBigint
value = strconv.FormatInt(int64(t), 10) value = strconv.FormatInt(int64(t), 10)
case int8: case int8:
valueType = TypeBigInt valueType = types.MeasureValueTypeBigint
value = strconv.FormatInt(int64(t), 10) value = strconv.FormatInt(int64(t), 10)
case int16: case int16:
valueType = TypeBigInt valueType = types.MeasureValueTypeBigint
value = strconv.FormatInt(int64(t), 10) value = strconv.FormatInt(int64(t), 10)
case int32: case int32:
valueType = TypeBigInt valueType = types.MeasureValueTypeBigint
value = strconv.FormatInt(int64(t), 10) value = strconv.FormatInt(int64(t), 10)
case int64: case int64:
valueType = TypeBigInt valueType = types.MeasureValueTypeBigint
value = strconv.FormatInt(t, 10) value = strconv.FormatInt(t, 10)
case uint: case uint:
valueType = TypeBigInt valueType = types.MeasureValueTypeBigint
value = strconv.FormatUint(uint64(t), 10) value = strconv.FormatUint(uint64(t), 10)
case uint8: case uint8:
valueType = TypeBigInt valueType = types.MeasureValueTypeBigint
value = strconv.FormatUint(uint64(t), 10) value = strconv.FormatUint(uint64(t), 10)
case uint16: case uint16:
valueType = TypeBigInt valueType = types.MeasureValueTypeBigint
value = strconv.FormatUint(uint64(t), 10) value = strconv.FormatUint(uint64(t), 10)
case uint32: case uint32:
valueType = TypeBigInt valueType = types.MeasureValueTypeBigint
value = strconv.FormatUint(uint64(t), 10) value = strconv.FormatUint(uint64(t), 10)
case uint64: case uint64:
valueType = TypeBigInt valueType = types.MeasureValueTypeBigint
value = strconv.FormatUint(t, 10) value = strconv.FormatUint(t, 10)
case float32: case float32:
valueType = TypeDouble valueType = types.MeasureValueTypeDouble
value = strconv.FormatFloat(float64(t), 'f', -1, 32) value = strconv.FormatFloat(float64(t), 'f', -1, 32)
case float64: case float64:
valueType = TypeDouble valueType = types.MeasureValueTypeDouble
value = strconv.FormatFloat(t, 'f', -1, 64) value = strconv.FormatFloat(t, 'f', -1, 64)
case bool: case bool:
valueType = TypeBoolean valueType = types.MeasureValueTypeBoolean
if t { if t {
value = "true" value = "true"
} else { } else {
value = "false" value = "false"
} }
case string: case string:
valueType = TypeVarchar valueType = types.MeasureValueTypeVarchar
value = t value = t
default: default:
// Skip unsupported type. // Skip unsupported type.

View File

@ -4,8 +4,8 @@ import (
"testing" "testing"
"time" "time"
"github.com/aws/aws-sdk-go/aws" "github.com/aws/aws-sdk-go-v2/aws"
"github.com/aws/aws-sdk-go/service/timestreamwrite" "github.com/aws/aws-sdk-go-v2/service/timestreamwrite/types"
"github.com/stretchr/testify/assert" "github.com/stretchr/testify/assert"
) )
@ -19,57 +19,57 @@ func TestGetTimestreamTime(t *testing.T) {
tOnlySeconds := time.Date(2020, time.November, 10, 23, 44, 20, 0, time.UTC) tOnlySeconds := time.Date(2020, time.November, 10, 23, 44, 20, 0, time.UTC)
tUnitNanos, tValueNanos := getTimestreamTime(tWithNanos) tUnitNanos, tValueNanos := getTimestreamTime(tWithNanos)
assertions.Equal("NANOSECONDS", tUnitNanos) assertions.Equal(types.TimeUnitNanoseconds, tUnitNanos)
assertions.Equal("1605051860000000123", tValueNanos) assertions.Equal("1605051860000000123", tValueNanos)
tUnitMicros, tValueMicros := getTimestreamTime(tWithMicros) tUnitMicros, tValueMicros := getTimestreamTime(tWithMicros)
assertions.Equal("MICROSECONDS", tUnitMicros) assertions.Equal(types.TimeUnitMicroseconds, tUnitMicros)
assertions.Equal("1605051860000123", tValueMicros) assertions.Equal("1605051860000123", tValueMicros)
tUnitMillis, tValueMillis := getTimestreamTime(tWithMillis) tUnitMillis, tValueMillis := getTimestreamTime(tWithMillis)
assertions.Equal("MILLISECONDS", tUnitMillis) assertions.Equal(types.TimeUnitMilliseconds, tUnitMillis)
assertions.Equal("1605051860123", tValueMillis) assertions.Equal("1605051860123", tValueMillis)
tUnitSeconds, tValueSeconds := getTimestreamTime(tOnlySeconds) tUnitSeconds, tValueSeconds := getTimestreamTime(tOnlySeconds)
assertions.Equal("SECONDS", tUnitSeconds) assertions.Equal(types.TimeUnitSeconds, tUnitSeconds)
assertions.Equal("1605051860", tValueSeconds) assertions.Equal("1605051860", tValueSeconds)
} }
func TestPartitionRecords(t *testing.T) { func TestPartitionRecords(t *testing.T) {
assertions := assert.New(t) assertions := assert.New(t)
testDatum := timestreamwrite.Record{ testDatum := types.Record{
MeasureName: aws.String("Foo"), MeasureName: aws.String("Foo"),
MeasureValueType: aws.String("DOUBLE"), MeasureValueType: types.MeasureValueTypeDouble,
MeasureValue: aws.String("123"), MeasureValue: aws.String("123"),
} }
var zeroDatum []*timestreamwrite.Record var zeroDatum []types.Record
oneDatum := []*timestreamwrite.Record{&testDatum} oneDatum := []types.Record{testDatum}
twoDatum := []*timestreamwrite.Record{&testDatum, &testDatum} twoDatum := []types.Record{testDatum, testDatum}
threeDatum := []*timestreamwrite.Record{&testDatum, &testDatum, &testDatum} threeDatum := []types.Record{testDatum, testDatum, testDatum}
assertions.Equal([][]*timestreamwrite.Record{}, partitionRecords(2, zeroDatum)) assertions.Equal([][]types.Record{}, partitionRecords(2, zeroDatum))
assertions.Equal([][]*timestreamwrite.Record{oneDatum}, partitionRecords(2, oneDatum)) assertions.Equal([][]types.Record{oneDatum}, partitionRecords(2, oneDatum))
assertions.Equal([][]*timestreamwrite.Record{oneDatum}, partitionRecords(2, oneDatum)) assertions.Equal([][]types.Record{oneDatum}, partitionRecords(2, oneDatum))
assertions.Equal([][]*timestreamwrite.Record{twoDatum}, partitionRecords(2, twoDatum)) assertions.Equal([][]types.Record{twoDatum}, partitionRecords(2, twoDatum))
assertions.Equal([][]*timestreamwrite.Record{twoDatum, oneDatum}, partitionRecords(2, threeDatum)) assertions.Equal([][]types.Record{twoDatum, oneDatum}, partitionRecords(2, threeDatum))
} }
func TestConvertValueSupported(t *testing.T) { func TestConvertValueSupported(t *testing.T) {
intInputValues := []interface{}{-1, int8(-2), int16(-3), int32(-4), int64(-5)} intInputValues := []interface{}{-1, int8(-2), int16(-3), int32(-4), int64(-5)}
intOutputValues := []string{"-1", "-2", "-3", "-4", "-5"} intOutputValues := []string{"-1", "-2", "-3", "-4", "-5"}
intOutputValueTypes := []string{"BIGINT", "BIGINT", "BIGINT", "BIGINT", "BIGINT"} intOutputValueTypes := []types.MeasureValueType{types.MeasureValueTypeBigint, types.MeasureValueTypeBigint, types.MeasureValueTypeBigint, types.MeasureValueTypeBigint, types.MeasureValueTypeBigint}
testConvertValueSupportedCases(t, intInputValues, intOutputValues, intOutputValueTypes) testConvertValueSupportedCases(t, intInputValues, intOutputValues, intOutputValueTypes)
uintInputValues := []interface{}{uint(1), uint8(2), uint16(3), uint32(4), uint64(5)} uintInputValues := []interface{}{uint(1), uint8(2), uint16(3), uint32(4), uint64(5)}
uintOutputValues := []string{"1", "2", "3", "4", "5"} uintOutputValues := []string{"1", "2", "3", "4", "5"}
uintOutputValueTypes := []string{"BIGINT", "BIGINT", "BIGINT", "BIGINT", "BIGINT"} uintOutputValueTypes := []types.MeasureValueType{types.MeasureValueTypeBigint, types.MeasureValueTypeBigint, types.MeasureValueTypeBigint, types.MeasureValueTypeBigint, types.MeasureValueTypeBigint}
testConvertValueSupportedCases(t, uintInputValues, uintOutputValues, uintOutputValueTypes) testConvertValueSupportedCases(t, uintInputValues, uintOutputValues, uintOutputValueTypes)
otherInputValues := []interface{}{"foo", float32(22.123), 22.1234, true} otherInputValues := []interface{}{"foo", float32(22.123), 22.1234, true}
otherOutputValues := []string{"foo", "22.123", "22.1234", "true"} otherOutputValues := []string{"foo", "22.123", "22.1234", "true"}
otherOutputValueTypes := []string{"VARCHAR", "DOUBLE", "DOUBLE", "BOOLEAN"} otherOutputValueTypes := []types.MeasureValueType{types.MeasureValueTypeVarchar, types.MeasureValueTypeDouble, types.MeasureValueTypeDouble, types.MeasureValueTypeBoolean}
testConvertValueSupportedCases(t, otherInputValues, otherOutputValues, otherOutputValueTypes) testConvertValueSupportedCases(t, otherInputValues, otherOutputValues, otherOutputValueTypes)
} }
@ -80,7 +80,7 @@ func TestConvertValueUnsupported(t *testing.T) {
} }
func testConvertValueSupportedCases(t *testing.T, func testConvertValueSupportedCases(t *testing.T,
inputValues []interface{}, outputValues []string, outputValueTypes []string) { inputValues []interface{}, outputValues []string, outputValueTypes []types.MeasureValueType) {
assertions := assert.New(t) assertions := assert.New(t)
for i, inputValue := range inputValues { for i, inputValue := range inputValues {
v, vt, ok := convertValue(inputValue) v, vt, ok := convertValue(inputValue)

View File

@ -1,6 +1,7 @@
package timestream_test package timestream
import ( import (
"context"
"fmt" "fmt"
"reflect" "reflect"
"sort" "sort"
@ -9,13 +10,11 @@ import (
"testing" "testing"
"time" "time"
"github.com/aws/aws-sdk-go/aws/awserr" "github.com/aws/aws-sdk-go-v2/aws"
"github.com/aws/aws-sdk-go-v2/service/timestreamwrite"
"github.com/aws/aws-sdk-go/aws" "github.com/aws/aws-sdk-go-v2/service/timestreamwrite/types"
"github.com/aws/aws-sdk-go/service/timestreamwrite"
"github.com/influxdata/telegraf" "github.com/influxdata/telegraf"
internalaws "github.com/influxdata/telegraf/config/aws" internalaws "github.com/influxdata/telegraf/config/aws"
ts "github.com/influxdata/telegraf/plugins/outputs/timestream"
"github.com/influxdata/telegraf/testutil" "github.com/influxdata/telegraf/testutil"
"github.com/stretchr/testify/assert" "github.com/stretchr/testify/assert"
@ -34,40 +33,37 @@ var time2 = time.Date(2009, time.November, 10, 23, 0, 0, 0, time.UTC)
const time2Epoch = "1257894000" const time2Epoch = "1257894000"
const timeUnit = "SECONDS"
const metricName1 = "metricName1" const metricName1 = "metricName1"
const metricName2 = "metricName2" const metricName2 = "metricName2"
type mockTimestreamClient struct { type mockTimestreamClient struct{}
}
func (m *mockTimestreamClient) CreateTable(*timestreamwrite.CreateTableInput) (*timestreamwrite.CreateTableOutput, error) { func (m *mockTimestreamClient) CreateTable(context.Context, *timestreamwrite.CreateTableInput, ...func(*timestreamwrite.Options)) (*timestreamwrite.CreateTableOutput, error) {
return nil, nil return nil, nil
} }
func (m *mockTimestreamClient) WriteRecords(*timestreamwrite.WriteRecordsInput) (*timestreamwrite.WriteRecordsOutput, error) { func (m *mockTimestreamClient) WriteRecords(context.Context, *timestreamwrite.WriteRecordsInput, ...func(*timestreamwrite.Options)) (*timestreamwrite.WriteRecordsOutput, error) {
return nil, nil return nil, nil
} }
func (m *mockTimestreamClient) DescribeDatabase(*timestreamwrite.DescribeDatabaseInput) (*timestreamwrite.DescribeDatabaseOutput, error) { func (m *mockTimestreamClient) DescribeDatabase(context.Context, *timestreamwrite.DescribeDatabaseInput, ...func(*timestreamwrite.Options)) (*timestreamwrite.DescribeDatabaseOutput, error) {
return nil, fmt.Errorf("hello from DescribeDatabase") return nil, fmt.Errorf("hello from DescribeDatabase")
} }
func TestConnectValidatesConfigParameters(t *testing.T) { func TestConnectValidatesConfigParameters(t *testing.T) {
assertions := assert.New(t) assertions := assert.New(t)
ts.WriteFactory = func(credentialConfig *internalaws.CredentialConfig) (ts.WriteClient, error) { WriteFactory = func(credentialConfig *internalaws.CredentialConfig) (WriteClient, error) {
return &mockTimestreamClient{}, nil return &mockTimestreamClient{}, nil
} }
// checking base arguments // checking base arguments
noDatabaseName := ts.Timestream{Log: testutil.Logger{}} noDatabaseName := Timestream{Log: testutil.Logger{}}
assertions.Contains(noDatabaseName.Connect().Error(), "DatabaseName") assertions.Contains(noDatabaseName.Connect().Error(), "DatabaseName")
noMappingMode := ts.Timestream{ noMappingMode := Timestream{
DatabaseName: tsDbName, DatabaseName: tsDbName,
Log: testutil.Logger{}, Log: testutil.Logger{},
} }
assertions.Contains(noMappingMode.Connect().Error(), "MappingMode") assertions.Contains(noMappingMode.Connect().Error(), "MappingMode")
incorrectMappingMode := ts.Timestream{ incorrectMappingMode := Timestream{
DatabaseName: tsDbName, DatabaseName: tsDbName,
MappingMode: "foo", MappingMode: "foo",
Log: testutil.Logger{}, Log: testutil.Logger{},
@ -75,24 +71,24 @@ func TestConnectValidatesConfigParameters(t *testing.T) {
assertions.Contains(incorrectMappingMode.Connect().Error(), "single-table") assertions.Contains(incorrectMappingMode.Connect().Error(), "single-table")
// multi-table arguments // multi-table arguments
validMappingModeMultiTable := ts.Timestream{ validMappingModeMultiTable := Timestream{
DatabaseName: tsDbName, DatabaseName: tsDbName,
MappingMode: ts.MappingModeMultiTable, MappingMode: MappingModeMultiTable,
Log: testutil.Logger{}, Log: testutil.Logger{},
} }
assertions.Nil(validMappingModeMultiTable.Connect()) assertions.Nil(validMappingModeMultiTable.Connect())
singleTableNameWithMultiTable := ts.Timestream{ singleTableNameWithMultiTable := Timestream{
DatabaseName: tsDbName, DatabaseName: tsDbName,
MappingMode: ts.MappingModeMultiTable, MappingMode: MappingModeMultiTable,
SingleTableName: testSingleTableName, SingleTableName: testSingleTableName,
Log: testutil.Logger{}, Log: testutil.Logger{},
} }
assertions.Contains(singleTableNameWithMultiTable.Connect().Error(), "SingleTableName") assertions.Contains(singleTableNameWithMultiTable.Connect().Error(), "SingleTableName")
singleTableDimensionWithMultiTable := ts.Timestream{ singleTableDimensionWithMultiTable := Timestream{
DatabaseName: tsDbName, DatabaseName: tsDbName,
MappingMode: ts.MappingModeMultiTable, MappingMode: MappingModeMultiTable,
SingleTableDimensionNameForTelegrafMeasurementName: testSingleTableDim, SingleTableDimensionNameForTelegrafMeasurementName: testSingleTableDim,
Log: testutil.Logger{}, Log: testutil.Logger{},
} }
@ -100,25 +96,25 @@ func TestConnectValidatesConfigParameters(t *testing.T) {
"SingleTableDimensionNameForTelegrafMeasurementName") "SingleTableDimensionNameForTelegrafMeasurementName")
// single-table arguments // single-table arguments
noTableNameMappingModeSingleTable := ts.Timestream{ noTableNameMappingModeSingleTable := Timestream{
DatabaseName: tsDbName, DatabaseName: tsDbName,
MappingMode: ts.MappingModeSingleTable, MappingMode: MappingModeSingleTable,
Log: testutil.Logger{}, Log: testutil.Logger{},
} }
assertions.Contains(noTableNameMappingModeSingleTable.Connect().Error(), "SingleTableName") assertions.Contains(noTableNameMappingModeSingleTable.Connect().Error(), "SingleTableName")
noDimensionNameMappingModeSingleTable := ts.Timestream{ noDimensionNameMappingModeSingleTable := Timestream{
DatabaseName: tsDbName, DatabaseName: tsDbName,
MappingMode: ts.MappingModeSingleTable, MappingMode: MappingModeSingleTable,
SingleTableName: testSingleTableName, SingleTableName: testSingleTableName,
Log: testutil.Logger{}, Log: testutil.Logger{},
} }
assertions.Contains(noDimensionNameMappingModeSingleTable.Connect().Error(), assertions.Contains(noDimensionNameMappingModeSingleTable.Connect().Error(),
"SingleTableDimensionNameForTelegrafMeasurementName") "SingleTableDimensionNameForTelegrafMeasurementName")
validConfigurationMappingModeSingleTable := ts.Timestream{ validConfigurationMappingModeSingleTable := Timestream{
DatabaseName: tsDbName, DatabaseName: tsDbName,
MappingMode: ts.MappingModeSingleTable, MappingMode: MappingModeSingleTable,
SingleTableName: testSingleTableName, SingleTableName: testSingleTableName,
SingleTableDimensionNameForTelegrafMeasurementName: testSingleTableDim, SingleTableDimensionNameForTelegrafMeasurementName: testSingleTableDim,
Log: testutil.Logger{}, Log: testutil.Logger{},
@ -126,18 +122,18 @@ func TestConnectValidatesConfigParameters(t *testing.T) {
assertions.Nil(validConfigurationMappingModeSingleTable.Connect()) assertions.Nil(validConfigurationMappingModeSingleTable.Connect())
// create table arguments // create table arguments
createTableNoMagneticRetention := ts.Timestream{ createTableNoMagneticRetention := Timestream{
DatabaseName: tsDbName, DatabaseName: tsDbName,
MappingMode: ts.MappingModeMultiTable, MappingMode: MappingModeMultiTable,
CreateTableIfNotExists: true, CreateTableIfNotExists: true,
Log: testutil.Logger{}, Log: testutil.Logger{},
} }
assertions.Contains(createTableNoMagneticRetention.Connect().Error(), assertions.Contains(createTableNoMagneticRetention.Connect().Error(),
"CreateTableMagneticStoreRetentionPeriodInDays") "CreateTableMagneticStoreRetentionPeriodInDays")
createTableNoMemoryRetention := ts.Timestream{ createTableNoMemoryRetention := Timestream{
DatabaseName: tsDbName, DatabaseName: tsDbName,
MappingMode: ts.MappingModeMultiTable, MappingMode: MappingModeMultiTable,
CreateTableIfNotExists: true, CreateTableIfNotExists: true,
CreateTableMagneticStoreRetentionPeriodInDays: 3, CreateTableMagneticStoreRetentionPeriodInDays: 3,
Log: testutil.Logger{}, Log: testutil.Logger{},
@ -145,9 +141,9 @@ func TestConnectValidatesConfigParameters(t *testing.T) {
assertions.Contains(createTableNoMemoryRetention.Connect().Error(), assertions.Contains(createTableNoMemoryRetention.Connect().Error(),
"CreateTableMemoryStoreRetentionPeriodInHours") "CreateTableMemoryStoreRetentionPeriodInHours")
createTableValid := ts.Timestream{ createTableValid := Timestream{
DatabaseName: tsDbName, DatabaseName: tsDbName,
MappingMode: ts.MappingModeMultiTable, MappingMode: MappingModeMultiTable,
CreateTableIfNotExists: true, CreateTableIfNotExists: true,
CreateTableMagneticStoreRetentionPeriodInDays: 3, CreateTableMagneticStoreRetentionPeriodInDays: 3,
CreateTableMemoryStoreRetentionPeriodInHours: 3, CreateTableMemoryStoreRetentionPeriodInHours: 3,
@ -156,9 +152,9 @@ func TestConnectValidatesConfigParameters(t *testing.T) {
assertions.Nil(createTableValid.Connect()) assertions.Nil(createTableValid.Connect())
// describe table on start arguments // describe table on start arguments
describeTableInvoked := ts.Timestream{ describeTableInvoked := Timestream{
DatabaseName: tsDbName, DatabaseName: tsDbName,
MappingMode: ts.MappingModeMultiTable, MappingMode: MappingModeMultiTable,
DescribeDatabaseOnStart: true, DescribeDatabaseOnStart: true,
Log: testutil.Logger{}, Log: testutil.Logger{},
} }
@ -169,31 +165,30 @@ type mockTimestreamErrorClient struct {
ErrorToReturnOnWriteRecords error ErrorToReturnOnWriteRecords error
} }
func (m *mockTimestreamErrorClient) CreateTable(*timestreamwrite.CreateTableInput) (*timestreamwrite.CreateTableOutput, error) { func (m *mockTimestreamErrorClient) CreateTable(context.Context, *timestreamwrite.CreateTableInput, ...func(*timestreamwrite.Options)) (*timestreamwrite.CreateTableOutput, error) {
return nil, nil return nil, nil
} }
func (m *mockTimestreamErrorClient) WriteRecords(*timestreamwrite.WriteRecordsInput) (*timestreamwrite.WriteRecordsOutput, error) { func (m *mockTimestreamErrorClient) WriteRecords(context.Context, *timestreamwrite.WriteRecordsInput, ...func(*timestreamwrite.Options)) (*timestreamwrite.WriteRecordsOutput, error) {
return nil, m.ErrorToReturnOnWriteRecords return nil, m.ErrorToReturnOnWriteRecords
} }
func (m *mockTimestreamErrorClient) DescribeDatabase(*timestreamwrite.DescribeDatabaseInput) (*timestreamwrite.DescribeDatabaseOutput, error) { func (m *mockTimestreamErrorClient) DescribeDatabase(context.Context, *timestreamwrite.DescribeDatabaseInput, ...func(*timestreamwrite.Options)) (*timestreamwrite.DescribeDatabaseOutput, error) {
return nil, nil return nil, nil
} }
func TestThrottlingErrorIsReturnedToTelegraf(t *testing.T) { func TestThrottlingErrorIsReturnedToTelegraf(t *testing.T) {
assertions := assert.New(t) assertions := assert.New(t)
WriteFactory = func(credentialConfig *internalaws.CredentialConfig) (WriteClient, error) {
ts.WriteFactory = func(credentialConfig *internalaws.CredentialConfig) (ts.WriteClient, error) {
return &mockTimestreamErrorClient{ return &mockTimestreamErrorClient{
awserr.New(timestreamwrite.ErrCodeThrottlingException, ErrorToReturnOnWriteRecords: &types.ThrottlingException{Message: aws.String("Throttling Test")},
"Throttling Test", nil),
}, nil }, nil
} }
plugin := ts.Timestream{
MappingMode: ts.MappingModeMultiTable, plugin := Timestream{
MappingMode: MappingModeMultiTable,
DatabaseName: tsDbName, DatabaseName: tsDbName,
Log: testutil.Logger{}, Log: testutil.Logger{},
} }
plugin.Connect() assertions.NoError(plugin.Connect())
input := testutil.MustMetric( input := testutil.MustMetric(
metricName1, metricName1,
map[string]string{"tag1": "value1"}, map[string]string{"tag1": "value1"},
@ -209,19 +204,18 @@ func TestThrottlingErrorIsReturnedToTelegraf(t *testing.T) {
func TestRejectedRecordsErrorResultsInMetricsBeingSkipped(t *testing.T) { func TestRejectedRecordsErrorResultsInMetricsBeingSkipped(t *testing.T) {
assertions := assert.New(t) assertions := assert.New(t)
WriteFactory = func(credentialConfig *internalaws.CredentialConfig) (WriteClient, error) {
ts.WriteFactory = func(credentialConfig *internalaws.CredentialConfig) (ts.WriteClient, error) {
return &mockTimestreamErrorClient{ return &mockTimestreamErrorClient{
awserr.New(timestreamwrite.ErrCodeRejectedRecordsException, ErrorToReturnOnWriteRecords: &types.RejectedRecordsException{Message: aws.String("RejectedRecords Test")},
"RejectedRecords Test", nil),
}, nil }, nil
} }
plugin := ts.Timestream{
MappingMode: ts.MappingModeMultiTable, plugin := Timestream{
MappingMode: MappingModeMultiTable,
DatabaseName: tsDbName, DatabaseName: tsDbName,
Log: testutil.Logger{}, Log: testutil.Logger{},
} }
plugin.Connect() assertions.NoError(plugin.Connect())
input := testutil.MustMetric( input := testutil.MustMetric(
metricName1, metricName1,
map[string]string{"tag1": "value1"}, map[string]string{"tag1": "value1"},
@ -271,7 +265,7 @@ func TestTransformMetricsSkipEmptyMetric(t *testing.T) {
dimensions: map[string]string{testSingleTableDim: metricName1}, dimensions: map[string]string{testSingleTableDim: metricName1},
measureValues: map[string]string{"value": "20"}, measureValues: map[string]string{"value": "20"},
}) })
comparisonTest(t, ts.MappingModeSingleTable, comparisonTest(t, MappingModeSingleTable,
[]telegraf.Metric{input1, input2, input3}, []telegraf.Metric{input1, input2, input3},
[]*timestreamwrite.WriteRecordsInput{expectedResult1SingleTable, expectedResult2SingleTable}) []*timestreamwrite.WriteRecordsInput{expectedResult1SingleTable, expectedResult2SingleTable})
@ -287,7 +281,7 @@ func TestTransformMetricsSkipEmptyMetric(t *testing.T) {
dimensions: map[string]string{}, dimensions: map[string]string{},
measureValues: map[string]string{"value": "20"}, measureValues: map[string]string{"value": "20"},
}) })
comparisonTest(t, ts.MappingModeMultiTable, comparisonTest(t, MappingModeMultiTable,
[]telegraf.Metric{input1, input2, input3}, []telegraf.Metric{input1, input2, input3},
[]*timestreamwrite.WriteRecordsInput{expectedResult1MultiTable, expectedResult2MultiTable}) []*timestreamwrite.WriteRecordsInput{expectedResult1MultiTable, expectedResult2MultiTable})
} }
@ -326,7 +320,7 @@ func TestTransformMetricsRequestsAboveLimitAreSplit(t *testing.T) {
dimensions: map[string]string{"tag1": "value1", testSingleTableDim: metricName1}, dimensions: map[string]string{"tag1": "value1", testSingleTableDim: metricName1},
measureValues: map[string]string{"value_supported" + strconv.Itoa(maxRecordsInWriteRecordsCall+1): "10"}, measureValues: map[string]string{"value_supported" + strconv.Itoa(maxRecordsInWriteRecordsCall+1): "10"},
}) })
comparisonTest(t, ts.MappingModeSingleTable, comparisonTest(t, MappingModeSingleTable,
inputs, inputs,
[]*timestreamwrite.WriteRecordsInput{expectedResult1SingleTable, expectedResult2SingleTable}) []*timestreamwrite.WriteRecordsInput{expectedResult1SingleTable, expectedResult2SingleTable})
@ -342,7 +336,7 @@ func TestTransformMetricsRequestsAboveLimitAreSplit(t *testing.T) {
dimensions: map[string]string{"tag1": "value1"}, dimensions: map[string]string{"tag1": "value1"},
measureValues: map[string]string{"value_supported" + strconv.Itoa(maxRecordsInWriteRecordsCall+1): "10"}, measureValues: map[string]string{"value_supported" + strconv.Itoa(maxRecordsInWriteRecordsCall+1): "10"},
}) })
comparisonTest(t, ts.MappingModeMultiTable, comparisonTest(t, MappingModeMultiTable,
inputs, inputs,
[]*timestreamwrite.WriteRecordsInput{expectedResult1MultiTable, expectedResult2MultiTable}) []*timestreamwrite.WriteRecordsInput{expectedResult1MultiTable, expectedResult2MultiTable})
} }
@ -378,7 +372,7 @@ func TestTransformMetricsDifferentDimensionsSameTimestampsAreWrittenSeparate(t *
measureValues: map[string]string{"value_supported3": "30"}, measureValues: map[string]string{"value_supported3": "30"},
}) })
comparisonTest(t, ts.MappingModeSingleTable, comparisonTest(t, MappingModeSingleTable,
[]telegraf.Metric{input1, input2}, []telegraf.Metric{input1, input2},
[]*timestreamwrite.WriteRecordsInput{expectedResult1SingleTable, expectedResult2SingleTable}) []*timestreamwrite.WriteRecordsInput{expectedResult1SingleTable, expectedResult2SingleTable})
@ -395,7 +389,7 @@ func TestTransformMetricsDifferentDimensionsSameTimestampsAreWrittenSeparate(t *
measureValues: map[string]string{"value_supported3": "30"}, measureValues: map[string]string{"value_supported3": "30"},
}) })
comparisonTest(t, ts.MappingModeMultiTable, comparisonTest(t, MappingModeMultiTable,
[]telegraf.Metric{input1, input2}, []telegraf.Metric{input1, input2},
[]*timestreamwrite.WriteRecordsInput{expectedResult1MultiTable, expectedResult2MultiTable}) []*timestreamwrite.WriteRecordsInput{expectedResult1MultiTable, expectedResult2MultiTable})
} }
@ -431,7 +425,7 @@ func TestTransformMetricsSameDimensionsDifferentDimensionValuesAreWrittenSeparat
measureValues: map[string]string{"value_supported1": "20"}, measureValues: map[string]string{"value_supported1": "20"},
}) })
comparisonTest(t, ts.MappingModeSingleTable, comparisonTest(t, MappingModeSingleTable,
[]telegraf.Metric{input1, input2}, []telegraf.Metric{input1, input2},
[]*timestreamwrite.WriteRecordsInput{expectedResult1SingleTable, expectedResult2SingleTable}) []*timestreamwrite.WriteRecordsInput{expectedResult1SingleTable, expectedResult2SingleTable})
@ -448,7 +442,7 @@ func TestTransformMetricsSameDimensionsDifferentDimensionValuesAreWrittenSeparat
measureValues: map[string]string{"value_supported1": "20"}, measureValues: map[string]string{"value_supported1": "20"},
}) })
comparisonTest(t, ts.MappingModeMultiTable, comparisonTest(t, MappingModeMultiTable,
[]telegraf.Metric{input1, input2}, []telegraf.Metric{input1, input2},
[]*timestreamwrite.WriteRecordsInput{expectedResult1MultiTable, expectedResult2MultiTable}) []*timestreamwrite.WriteRecordsInput{expectedResult1MultiTable, expectedResult2MultiTable})
} }
@ -484,7 +478,7 @@ func TestTransformMetricsSameDimensionsDifferentTimestampsAreWrittenSeparate(t *
measureValues: map[string]string{"value_supported3": "30"}, measureValues: map[string]string{"value_supported3": "30"},
}) })
comparisonTest(t, ts.MappingModeSingleTable, comparisonTest(t, MappingModeSingleTable,
[]telegraf.Metric{input1, input2}, []telegraf.Metric{input1, input2},
[]*timestreamwrite.WriteRecordsInput{expectedResult1SingleTable, expectedResult2SingleTable}) []*timestreamwrite.WriteRecordsInput{expectedResult1SingleTable, expectedResult2SingleTable})
@ -501,7 +495,7 @@ func TestTransformMetricsSameDimensionsDifferentTimestampsAreWrittenSeparate(t *
measureValues: map[string]string{"value_supported3": "30"}, measureValues: map[string]string{"value_supported3": "30"},
}) })
comparisonTest(t, ts.MappingModeMultiTable, comparisonTest(t, MappingModeMultiTable,
[]telegraf.Metric{input1, input2}, []telegraf.Metric{input1, input2},
[]*timestreamwrite.WriteRecordsInput{expectedResult1MultiTable, expectedResult2MultiTable}) []*timestreamwrite.WriteRecordsInput{expectedResult1MultiTable, expectedResult2MultiTable})
} }
@ -531,7 +525,7 @@ func TestTransformMetricsSameDimensionsSameTimestampsAreWrittenTogether(t *testi
measureValues: map[string]string{"value_supported1": "10", "value_supported2": "20", "value_supported3": "30"}, measureValues: map[string]string{"value_supported1": "10", "value_supported2": "20", "value_supported3": "30"},
}) })
comparisonTest(t, ts.MappingModeSingleTable, comparisonTest(t, MappingModeSingleTable,
[]telegraf.Metric{input1, input2}, []telegraf.Metric{input1, input2},
[]*timestreamwrite.WriteRecordsInput{expectedResultSingleTable}) []*timestreamwrite.WriteRecordsInput{expectedResultSingleTable})
@ -542,7 +536,7 @@ func TestTransformMetricsSameDimensionsSameTimestampsAreWrittenTogether(t *testi
measureValues: map[string]string{"value_supported1": "10", "value_supported2": "20", "value_supported3": "30"}, measureValues: map[string]string{"value_supported1": "10", "value_supported2": "20", "value_supported3": "30"},
}) })
comparisonTest(t, ts.MappingModeMultiTable, comparisonTest(t, MappingModeMultiTable,
[]telegraf.Metric{input1, input2}, []telegraf.Metric{input1, input2},
[]*timestreamwrite.WriteRecordsInput{expectedResultMultiTable}) []*timestreamwrite.WriteRecordsInput{expectedResultMultiTable})
} }
@ -578,7 +572,7 @@ func TestTransformMetricsDifferentMetricsAreWrittenToDifferentTablesInMultiTable
measureValues: map[string]string{"value_supported3": "30"}, measureValues: map[string]string{"value_supported3": "30"},
}) })
comparisonTest(t, ts.MappingModeSingleTable, comparisonTest(t, MappingModeSingleTable,
[]telegraf.Metric{input1, input2}, []telegraf.Metric{input1, input2},
[]*timestreamwrite.WriteRecordsInput{expectedResult1SingleTable, expectedResult2SingleTable}) []*timestreamwrite.WriteRecordsInput{expectedResult1SingleTable, expectedResult2SingleTable})
@ -595,7 +589,7 @@ func TestTransformMetricsDifferentMetricsAreWrittenToDifferentTablesInMultiTable
measureValues: map[string]string{"value_supported3": "30"}, measureValues: map[string]string{"value_supported3": "30"},
}) })
comparisonTest(t, ts.MappingModeMultiTable, comparisonTest(t, MappingModeMultiTable,
[]telegraf.Metric{input1, input2}, []telegraf.Metric{input1, input2},
[]*timestreamwrite.WriteRecordsInput{expectedResult1MultiTable, expectedResult2MultiTable}) []*timestreamwrite.WriteRecordsInput{expectedResult1MultiTable, expectedResult2MultiTable})
} }
@ -616,7 +610,7 @@ func TestTransformMetricsUnsupportedFieldsAreSkipped(t *testing.T) {
measureValues: map[string]string{"value_supported1": "10"}, measureValues: map[string]string{"value_supported1": "10"},
}) })
comparisonTest(t, ts.MappingModeSingleTable, comparisonTest(t, MappingModeSingleTable,
[]telegraf.Metric{metricWithUnsupportedField}, []telegraf.Metric{metricWithUnsupportedField},
[]*timestreamwrite.WriteRecordsInput{expectedResultSingleTable}) []*timestreamwrite.WriteRecordsInput{expectedResultSingleTable})
@ -627,7 +621,7 @@ func TestTransformMetricsUnsupportedFieldsAreSkipped(t *testing.T) {
measureValues: map[string]string{"value_supported1": "10"}, measureValues: map[string]string{"value_supported1": "10"},
}) })
comparisonTest(t, ts.MappingModeMultiTable, comparisonTest(t, MappingModeMultiTable,
[]telegraf.Metric{metricWithUnsupportedField}, []telegraf.Metric{metricWithUnsupportedField},
[]*timestreamwrite.WriteRecordsInput{expectedResultMultiTable}) []*timestreamwrite.WriteRecordsInput{expectedResultMultiTable})
} }
@ -637,10 +631,10 @@ func comparisonTest(t *testing.T,
telegrafMetrics []telegraf.Metric, telegrafMetrics []telegraf.Metric,
timestreamRecords []*timestreamwrite.WriteRecordsInput, timestreamRecords []*timestreamwrite.WriteRecordsInput,
) { ) {
var plugin ts.Timestream var plugin Timestream
switch mappingMode { switch mappingMode {
case ts.MappingModeSingleTable: case MappingModeSingleTable:
plugin = ts.Timestream{ plugin = Timestream{
MappingMode: mappingMode, MappingMode: mappingMode,
DatabaseName: tsDbName, DatabaseName: tsDbName,
@ -648,8 +642,8 @@ func comparisonTest(t *testing.T,
SingleTableDimensionNameForTelegrafMeasurementName: testSingleTableDim, SingleTableDimensionNameForTelegrafMeasurementName: testSingleTableDim,
Log: testutil.Logger{}, Log: testutil.Logger{},
} }
case ts.MappingModeMultiTable: case MappingModeMultiTable:
plugin = ts.Timestream{ plugin = Timestream{
MappingMode: mappingMode, MappingMode: mappingMode,
DatabaseName: tsDbName, DatabaseName: tsDbName,
Log: testutil.Logger{}, Log: testutil.Logger{},
@ -710,20 +704,20 @@ type SimpleInput struct {
} }
func buildExpectedRecords(i SimpleInput) *timestreamwrite.WriteRecordsInput { func buildExpectedRecords(i SimpleInput) *timestreamwrite.WriteRecordsInput {
var tsDimensions []*timestreamwrite.Dimension var tsDimensions []types.Dimension
for k, v := range i.dimensions { for k, v := range i.dimensions {
tsDimensions = append(tsDimensions, &timestreamwrite.Dimension{ tsDimensions = append(tsDimensions, types.Dimension{
Name: aws.String(k), Name: aws.String(k),
Value: aws.String(v), Value: aws.String(v),
}) })
} }
var tsRecords []*timestreamwrite.Record var tsRecords []types.Record
for k, v := range i.measureValues { for k, v := range i.measureValues {
tsRecords = append(tsRecords, &timestreamwrite.Record{ tsRecords = append(tsRecords, types.Record{
MeasureName: aws.String(k), MeasureName: aws.String(k),
MeasureValue: aws.String(v), MeasureValue: aws.String(v),
MeasureValueType: aws.String("DOUBLE"), MeasureValueType: types.MeasureValueTypeDouble,
}) })
} }
@ -731,10 +725,10 @@ func buildExpectedRecords(i SimpleInput) *timestreamwrite.WriteRecordsInput {
DatabaseName: aws.String(tsDbName), DatabaseName: aws.String(tsDbName),
TableName: aws.String(i.tableName), TableName: aws.String(i.tableName),
Records: tsRecords, Records: tsRecords,
CommonAttributes: &timestreamwrite.Record{ CommonAttributes: &types.Record{
Dimensions: tsDimensions, Dimensions: tsDimensions,
Time: aws.String(i.t), Time: aws.String(i.t),
TimeUnit: aws.String(timeUnit), TimeUnit: types.TimeUnitSeconds,
}, },
} }