2022-05-24 21:49:47 +08:00
|
|
|
//go:generate ../../../tools/readme_config_includer/generator
|
2016-03-14 17:27:07 +08:00
|
|
|
package postgresql_extensible
|
|
|
|
|
|
|
|
|
|
import (
|
|
|
|
|
"bytes"
|
2022-05-24 21:49:47 +08:00
|
|
|
_ "embed"
|
2016-03-14 17:27:07 +08:00
|
|
|
"fmt"
|
2021-09-29 05:16:32 +08:00
|
|
|
"io"
|
2019-09-13 04:38:35 +08:00
|
|
|
"os"
|
2016-03-14 17:27:07 +08:00
|
|
|
"strings"
|
2021-01-12 02:53:44 +08:00
|
|
|
"time"
|
2016-03-14 17:27:07 +08:00
|
|
|
|
2022-05-24 21:49:47 +08:00
|
|
|
// Required for SQL framework driver
|
|
|
|
|
_ "github.com/jackc/pgx/v4/stdlib"
|
2017-04-05 08:37:44 +08:00
|
|
|
|
2016-03-14 17:27:07 +08:00
|
|
|
"github.com/influxdata/telegraf"
|
2021-04-10 01:15:04 +08:00
|
|
|
"github.com/influxdata/telegraf/config"
|
2016-03-14 17:27:07 +08:00
|
|
|
"github.com/influxdata/telegraf/plugins/inputs"
|
2017-01-25 04:36:36 +08:00
|
|
|
"github.com/influxdata/telegraf/plugins/inputs/postgresql"
|
2016-03-14 17:27:07 +08:00
|
|
|
)
|
|
|
|
|
|
2022-05-24 21:49:47 +08:00
|
|
|
//go:embed sample.conf
|
|
|
|
|
var sampleConfig string
|
|
|
|
|
|
2016-03-14 17:27:07 +08:00
|
|
|
type Postgresql struct {
|
2018-01-06 08:03:09 +08:00
|
|
|
postgresql.Service
|
2022-05-11 05:26:04 +08:00
|
|
|
Databases []string `deprecated:"1.22.4;use the sqlquery option to specify database to use"`
|
2021-12-23 04:29:53 +08:00
|
|
|
AdditionalTags []string
|
|
|
|
|
Timestamp string
|
|
|
|
|
Query query
|
|
|
|
|
Debug bool
|
|
|
|
|
PreparedStatements bool `toml:"prepared_statements"`
|
2019-09-24 06:39:50 +08:00
|
|
|
|
|
|
|
|
Log telegraf.Logger
|
2016-03-14 17:27:07 +08:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
type query []struct {
|
2016-04-25 20:10:13 +08:00
|
|
|
Sqlquery string
|
2019-09-13 04:38:35 +08:00
|
|
|
Script string
|
2016-04-25 20:10:13 +08:00
|
|
|
Version int
|
2022-05-11 05:26:04 +08:00
|
|
|
Withdbname bool `deprecated:"1.22.4;use the sqlquery option to specify database to use"`
|
2016-04-25 20:10:13 +08:00
|
|
|
Tagvalue string
|
|
|
|
|
Measurement string
|
2021-01-12 02:53:44 +08:00
|
|
|
Timestamp string
|
2016-03-14 17:27:07 +08:00
|
|
|
}
|
|
|
|
|
|
2017-01-25 04:36:36 +08:00
|
|
|
var ignoredColumns = map[string]bool{"stats_reset": true}
|
2016-03-14 17:27:07 +08:00
|
|
|
|
2022-05-24 21:49:47 +08:00
|
|
|
func (*Postgresql) SampleConfig() string {
|
|
|
|
|
return sampleConfig
|
|
|
|
|
}
|
|
|
|
|
|
2019-09-13 04:38:35 +08:00
|
|
|
func (p *Postgresql) Init() error {
|
|
|
|
|
var err error
|
|
|
|
|
for i := range p.Query {
|
|
|
|
|
if p.Query[i].Sqlquery == "" {
|
|
|
|
|
p.Query[i].Sqlquery, err = ReadQueryFromFile(p.Query[i].Script)
|
|
|
|
|
if err != nil {
|
|
|
|
|
return err
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
2021-12-23 04:29:53 +08:00
|
|
|
p.Service.IsPgBouncer = !p.PreparedStatements
|
2019-09-13 04:38:35 +08:00
|
|
|
return nil
|
|
|
|
|
}
|
|
|
|
|
|
2016-03-14 17:27:07 +08:00
|
|
|
func (p *Postgresql) IgnoredColumns() map[string]bool {
|
|
|
|
|
return ignoredColumns
|
|
|
|
|
}
|
|
|
|
|
|
2019-09-13 04:38:35 +08:00
|
|
|
func ReadQueryFromFile(filePath string) (string, error) {
|
|
|
|
|
file, err := os.Open(filePath)
|
|
|
|
|
if err != nil {
|
|
|
|
|
return "", err
|
|
|
|
|
}
|
|
|
|
|
defer file.Close()
|
|
|
|
|
|
2021-09-29 05:16:32 +08:00
|
|
|
query, err := io.ReadAll(file)
|
2019-09-13 04:38:35 +08:00
|
|
|
if err != nil {
|
|
|
|
|
return "", err
|
|
|
|
|
}
|
|
|
|
|
return string(query), err
|
|
|
|
|
}
|
|
|
|
|
|
2016-03-14 17:27:07 +08:00
|
|
|
func (p *Postgresql) Gather(acc telegraf.Accumulator) error {
|
2017-04-05 08:37:44 +08:00
|
|
|
var (
|
2021-03-02 05:04:35 +08:00
|
|
|
err error
|
|
|
|
|
sqlQuery string
|
|
|
|
|
queryAddon string
|
|
|
|
|
dbVersion int
|
|
|
|
|
query string
|
|
|
|
|
measName string
|
2017-04-05 08:37:44 +08:00
|
|
|
)
|
2016-03-14 17:27:07 +08:00
|
|
|
|
2020-05-14 15:41:58 +08:00
|
|
|
// Retrieving the database version
|
2018-10-31 05:06:47 +08:00
|
|
|
query = `SELECT setting::integer / 100 AS version FROM pg_settings WHERE name = 'server_version_num'`
|
2021-03-02 05:04:35 +08:00
|
|
|
if err = p.DB.QueryRow(query).Scan(&dbVersion); err != nil {
|
|
|
|
|
dbVersion = 0
|
2016-03-14 17:27:07 +08:00
|
|
|
}
|
2018-01-06 08:03:09 +08:00
|
|
|
|
2016-03-14 17:27:07 +08:00
|
|
|
// We loop in order to process each query
|
|
|
|
|
// Query is not run if Database version does not match the query version.
|
|
|
|
|
for i := range p.Query {
|
2021-03-02 05:04:35 +08:00
|
|
|
sqlQuery = p.Query[i].Sqlquery
|
2019-09-13 04:38:35 +08:00
|
|
|
|
2016-04-25 20:10:13 +08:00
|
|
|
if p.Query[i].Measurement != "" {
|
2021-03-02 05:04:35 +08:00
|
|
|
measName = p.Query[i].Measurement
|
2016-04-25 20:10:13 +08:00
|
|
|
} else {
|
2021-03-02 05:04:35 +08:00
|
|
|
measName = "postgresql"
|
2016-04-25 20:10:13 +08:00
|
|
|
}
|
2016-03-14 17:27:07 +08:00
|
|
|
|
|
|
|
|
if p.Query[i].Withdbname {
|
|
|
|
|
if len(p.Databases) != 0 {
|
2021-03-02 05:04:35 +08:00
|
|
|
queryAddon = fmt.Sprintf(` IN ('%s')`, strings.Join(p.Databases, "','"))
|
2016-03-14 17:27:07 +08:00
|
|
|
} else {
|
2021-03-02 05:04:35 +08:00
|
|
|
queryAddon = " is not null"
|
2016-03-14 17:27:07 +08:00
|
|
|
}
|
|
|
|
|
} else {
|
2021-03-02 05:04:35 +08:00
|
|
|
queryAddon = ""
|
2016-03-14 17:27:07 +08:00
|
|
|
}
|
2021-03-02 05:04:35 +08:00
|
|
|
sqlQuery += queryAddon
|
2016-03-14 17:27:07 +08:00
|
|
|
|
2021-03-02 05:04:35 +08:00
|
|
|
if p.Query[i].Version <= dbVersion {
|
2021-11-15 23:14:09 +08:00
|
|
|
p.gatherMetricsFromQuery(acc, sqlQuery, p.Query[i].Tagvalue, p.Query[i].Timestamp, measName)
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
return nil
|
|
|
|
|
}
|
2016-03-14 17:27:07 +08:00
|
|
|
|
2021-11-15 23:14:09 +08:00
|
|
|
func (p *Postgresql) gatherMetricsFromQuery(acc telegraf.Accumulator, sqlQuery string, tagValue string, timestamp string, measName string) {
|
|
|
|
|
var columns []string
|
2016-03-14 17:27:07 +08:00
|
|
|
|
2021-11-15 23:14:09 +08:00
|
|
|
rows, err := p.DB.Query(sqlQuery)
|
|
|
|
|
if err != nil {
|
|
|
|
|
acc.AddError(err)
|
|
|
|
|
return
|
|
|
|
|
}
|
2018-01-06 08:03:09 +08:00
|
|
|
|
2021-11-15 23:14:09 +08:00
|
|
|
defer rows.Close()
|
2016-03-14 17:27:07 +08:00
|
|
|
|
2021-11-15 23:14:09 +08:00
|
|
|
// grab the column information from the result
|
|
|
|
|
if columns, err = rows.Columns(); err != nil {
|
|
|
|
|
acc.AddError(err)
|
|
|
|
|
return
|
|
|
|
|
}
|
2021-01-12 02:53:44 +08:00
|
|
|
|
2021-11-15 23:14:09 +08:00
|
|
|
p.AdditionalTags = nil
|
|
|
|
|
if tagValue != "" {
|
|
|
|
|
tagList := strings.Split(tagValue, ",")
|
|
|
|
|
for t := range tagList {
|
|
|
|
|
p.AdditionalTags = append(p.AdditionalTags, tagList[t])
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
p.Timestamp = timestamp
|
|
|
|
|
|
|
|
|
|
for rows.Next() {
|
|
|
|
|
err = p.accRow(measName, rows, acc, columns)
|
|
|
|
|
if err != nil {
|
|
|
|
|
acc.AddError(err)
|
|
|
|
|
break
|
2016-03-14 17:27:07 +08:00
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
type scanner interface {
|
|
|
|
|
Scan(dest ...interface{}) error
|
|
|
|
|
}
|
|
|
|
|
|
2021-03-02 05:04:35 +08:00
|
|
|
func (p *Postgresql) accRow(measName string, row scanner, acc telegraf.Accumulator, columns []string) error {
|
2017-09-26 01:58:10 +08:00
|
|
|
var (
|
|
|
|
|
err error
|
|
|
|
|
dbname bytes.Buffer
|
|
|
|
|
tagAddress string
|
2021-01-12 02:53:44 +08:00
|
|
|
timestamp time.Time
|
2017-09-26 01:58:10 +08:00
|
|
|
)
|
2016-03-14 17:27:07 +08:00
|
|
|
|
|
|
|
|
// this is where we'll store the column name with its *interface{}
|
|
|
|
|
columnMap := make(map[string]*interface{})
|
|
|
|
|
|
2018-01-06 08:03:09 +08:00
|
|
|
for _, column := range columns {
|
2016-03-14 17:27:07 +08:00
|
|
|
columnMap[column] = new(interface{})
|
|
|
|
|
}
|
|
|
|
|
|
2022-11-22 04:53:55 +08:00
|
|
|
columnVars := make([]interface{}, 0, len(columnMap))
|
2016-03-14 17:27:07 +08:00
|
|
|
// populate the array of interface{} with the pointers in the right order
|
|
|
|
|
for i := 0; i < len(columnMap); i++ {
|
2018-01-06 08:03:09 +08:00
|
|
|
columnVars = append(columnVars, columnMap[columns[i]])
|
2016-03-14 17:27:07 +08:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// deconstruct array of variables and send to Scan
|
2017-09-26 01:58:10 +08:00
|
|
|
if err = row.Scan(columnVars...); err != nil {
|
2016-03-14 17:27:07 +08:00
|
|
|
return err
|
|
|
|
|
}
|
2017-09-26 01:58:10 +08:00
|
|
|
|
2018-09-19 00:08:13 +08:00
|
|
|
if c, ok := columnMap["datname"]; ok && *c != nil {
|
2016-03-14 17:27:07 +08:00
|
|
|
// extract the database name from the column map
|
2018-09-19 00:08:13 +08:00
|
|
|
switch datname := (*c).(type) {
|
|
|
|
|
case string:
|
2021-04-09 00:43:39 +08:00
|
|
|
if _, err := dbname.WriteString(datname); err != nil {
|
|
|
|
|
return err
|
|
|
|
|
}
|
2018-09-19 00:08:13 +08:00
|
|
|
default:
|
2021-04-09 00:43:39 +08:00
|
|
|
if _, err := dbname.WriteString("postgres"); err != nil {
|
|
|
|
|
return err
|
|
|
|
|
}
|
2018-09-19 00:08:13 +08:00
|
|
|
}
|
2016-03-14 17:27:07 +08:00
|
|
|
} else {
|
2021-04-09 00:43:39 +08:00
|
|
|
if _, err := dbname.WriteString("postgres"); err != nil {
|
|
|
|
|
return err
|
|
|
|
|
}
|
2016-03-14 17:27:07 +08:00
|
|
|
}
|
|
|
|
|
|
2017-09-26 01:58:10 +08:00
|
|
|
if tagAddress, err = p.SanitizedAddress(); err != nil {
|
2016-03-14 17:27:07 +08:00
|
|
|
return err
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Process the additional tags
|
2017-09-26 01:58:10 +08:00
|
|
|
tags := map[string]string{
|
|
|
|
|
"server": tagAddress,
|
|
|
|
|
"db": dbname.String(),
|
|
|
|
|
}
|
2016-03-14 17:27:07 +08:00
|
|
|
|
2021-01-12 02:53:44 +08:00
|
|
|
// set default timestamp to Now
|
|
|
|
|
timestamp = time.Now()
|
|
|
|
|
|
2016-03-14 17:27:07 +08:00
|
|
|
fields := make(map[string]interface{})
|
2016-08-09 15:25:59 +08:00
|
|
|
COLUMN:
|
2016-03-14 17:27:07 +08:00
|
|
|
for col, val := range columnMap {
|
2019-09-24 06:39:50 +08:00
|
|
|
p.Log.Debugf("Column: %s = %T: %v\n", col, *val, *val)
|
2016-03-14 17:27:07 +08:00
|
|
|
_, ignore := ignoredColumns[col]
|
2016-08-09 15:25:59 +08:00
|
|
|
if ignore || *val == nil {
|
|
|
|
|
continue
|
|
|
|
|
}
|
2017-01-25 04:36:36 +08:00
|
|
|
|
2021-01-12 02:53:44 +08:00
|
|
|
if col == p.Timestamp {
|
|
|
|
|
if v, ok := (*val).(time.Time); ok {
|
|
|
|
|
timestamp = v
|
|
|
|
|
}
|
|
|
|
|
continue
|
|
|
|
|
}
|
|
|
|
|
|
2016-08-09 15:25:59 +08:00
|
|
|
for _, tag := range p.AdditionalTags {
|
|
|
|
|
if col != tag {
|
|
|
|
|
continue
|
2016-03-14 17:27:07 +08:00
|
|
|
}
|
2016-08-09 15:25:59 +08:00
|
|
|
switch v := (*val).(type) {
|
2017-01-25 04:36:36 +08:00
|
|
|
case string:
|
|
|
|
|
tags[col] = v
|
2016-08-09 15:25:59 +08:00
|
|
|
case []byte:
|
|
|
|
|
tags[col] = string(v)
|
2017-01-25 04:36:36 +08:00
|
|
|
case int64, int32, int:
|
2016-08-09 15:25:59 +08:00
|
|
|
tags[col] = fmt.Sprintf("%d", v)
|
2017-01-25 04:36:36 +08:00
|
|
|
default:
|
2019-09-24 06:39:50 +08:00
|
|
|
p.Log.Debugf("Failed to add %q as additional tag", col)
|
2016-03-17 22:01:08 +08:00
|
|
|
}
|
2016-08-09 15:25:59 +08:00
|
|
|
continue COLUMN
|
|
|
|
|
}
|
2017-09-26 01:58:10 +08:00
|
|
|
|
2016-08-09 15:25:59 +08:00
|
|
|
if v, ok := (*val).([]byte); ok {
|
|
|
|
|
fields[col] = string(v)
|
|
|
|
|
} else {
|
|
|
|
|
fields[col] = *val
|
2016-03-14 17:27:07 +08:00
|
|
|
}
|
|
|
|
|
}
|
2021-03-02 05:04:35 +08:00
|
|
|
acc.AddFields(measName, fields, tags, timestamp)
|
2016-03-14 17:27:07 +08:00
|
|
|
return nil
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
func init() {
|
|
|
|
|
inputs.Add("postgresql_extensible", func() telegraf.Input {
|
2018-01-06 08:03:09 +08:00
|
|
|
return &Postgresql{
|
|
|
|
|
Service: postgresql.Service{
|
2021-04-10 01:15:04 +08:00
|
|
|
MaxIdle: 1,
|
|
|
|
|
MaxOpen: 1,
|
|
|
|
|
MaxLifetime: config.Duration(0),
|
2018-08-02 06:44:10 +08:00
|
|
|
IsPgBouncer: false,
|
2018-01-06 08:03:09 +08:00
|
|
|
},
|
2021-12-23 04:29:53 +08:00
|
|
|
PreparedStatements: true,
|
2018-01-06 08:03:09 +08:00
|
|
|
}
|
2016-03-14 17:27:07 +08:00
|
|
|
})
|
|
|
|
|
}
|