Add a logger to stat_database collector to get better handle on error

(also clean up some metric validity checks)

Signed-off-by: Felix Yuan <felix.yuan@reddit.com>
This commit is contained in:
Felix Yuan 2023-07-19 14:24:08 -07:00
parent 4aa8cd4996
commit 12c12cf368
2 changed files with 225 additions and 186 deletions

View File

@ -17,6 +17,8 @@ import (
"context" "context"
"database/sql" "database/sql"
"github.com/go-kit/log"
"github.com/go-kit/log/level"
"github.com/prometheus/client_golang/prometheus" "github.com/prometheus/client_golang/prometheus"
) )
@ -26,10 +28,12 @@ func init() {
registerCollector(statDatabaseSubsystem, defaultEnabled, NewPGStatDatabaseCollector) registerCollector(statDatabaseSubsystem, defaultEnabled, NewPGStatDatabaseCollector)
} }
type PGStatDatabaseCollector struct{} type PGStatDatabaseCollector struct {
log log.Logger
}
func NewPGStatDatabaseCollector(config collectorConfig) (Collector, error) { func NewPGStatDatabaseCollector(config collectorConfig) (Collector, error) {
return &PGStatDatabaseCollector{}, nil return &PGStatDatabaseCollector{log: config.logger}, nil
} }
var ( var (
@ -228,7 +232,7 @@ var (
` `
) )
func (PGStatDatabaseCollector) Update(ctx context.Context, instance *instance, ch chan<- prometheus.Metric) error { func (c *PGStatDatabaseCollector) Update(ctx context.Context, instance *instance, ch chan<- prometheus.Metric) error {
db := instance.getDB() db := instance.getDB()
rows, err := db.QueryContext(ctx, rows, err := db.QueryContext(ctx,
statDatabaseQuery, statDatabaseQuery,
@ -267,217 +271,203 @@ func (PGStatDatabaseCollector) Update(ctx context.Context, instance *instance, c
if err != nil { if err != nil {
return err return err
} }
datidLabel := "unknown"
if datid.Valid { if !datid.Valid {
datidLabel = datid.String level.Debug(c.log).Log("msg", "Skipping collecting metric because it has no datid")
continue
} }
datnameLabel := "unknown" if !datname.Valid {
if datname.Valid { level.Debug(c.log).Log("msg", "Skipping collecting metric because it has no datname")
datnameLabel = datname.String continue
}
if !numBackends.Valid {
level.Debug(c.log).Log("msg", "Skipping collecting metric because it has no numbackends")
continue
}
if !xactCommit.Valid {
level.Debug(c.log).Log("msg", "Skipping collecting metric because it has no xact_commit")
continue
}
if !xactRollback.Valid {
level.Debug(c.log).Log("msg", "Skipping collecting metric because it has no xact_rollback")
continue
}
if !blksRead.Valid {
level.Debug(c.log).Log("msg", "Skipping collecting metric because it has no blks_read")
continue
}
if !blksHit.Valid {
level.Debug(c.log).Log("msg", "Skipping collecting metric because it has no blks_hit")
continue
}
if !tupReturned.Valid {
level.Debug(c.log).Log("msg", "Skipping collecting metric because it has no tup_returned")
continue
}
if !tupFetched.Valid {
level.Debug(c.log).Log("msg", "Skipping collecting metric because it has no tup_fetched")
continue
}
if !tupInserted.Valid {
level.Debug(c.log).Log("msg", "Skipping collecting metric because it has no tup_inserted")
continue
}
if !tupUpdated.Valid {
level.Debug(c.log).Log("msg", "Skipping collecting metric because it has no tup_updated")
continue
}
if !tupDeleted.Valid {
level.Debug(c.log).Log("msg", "Skipping collecting metric because it has no tup_deleted")
continue
}
if !conflicts.Valid {
level.Debug(c.log).Log("msg", "Skipping collecting metric because it has no conflicts")
continue
}
if !tempFiles.Valid {
level.Debug(c.log).Log("msg", "Skipping collecting metric because it has no temp_files")
continue
}
if !tempBytes.Valid {
level.Debug(c.log).Log("msg", "Skipping collecting metric because it has no temp_bytes")
continue
}
if !deadlocks.Valid {
level.Debug(c.log).Log("msg", "Skipping collecting metric because it has no deadlocks")
continue
}
if !blkReadTime.Valid {
level.Debug(c.log).Log("msg", "Skipping collecting metric because it has no blk_read_time")
continue
}
if !blkWriteTime.Valid {
level.Debug(c.log).Log("msg", "Skipping collecting metric because it has no blk_write_time")
continue
}
if !statsReset.Valid {
level.Debug(c.log).Log("msg", "Skipping collecting metric because it has no stats_reset")
continue
} }
numBackendsMetric := 0.0 labels := []string{datid.String, datname.String}
if numBackends.Valid {
numBackendsMetric = numBackends.Float64
}
ch <- prometheus.MustNewConstMetric( ch <- prometheus.MustNewConstMetric(
statDatabaseNumbackends, statDatabaseNumbackends,
prometheus.GaugeValue, prometheus.GaugeValue,
numBackendsMetric, numBackends.Float64,
datidLabel, labels...,
datnameLabel,
) )
xactCommitMetric := 0.0
if xactCommit.Valid {
xactCommitMetric = xactCommit.Float64
}
ch <- prometheus.MustNewConstMetric( ch <- prometheus.MustNewConstMetric(
statDatabaseXactCommit, statDatabaseXactCommit,
prometheus.CounterValue, prometheus.CounterValue,
xactCommitMetric, xactCommit.Float64,
datidLabel, labels...,
datnameLabel,
) )
xactRollbackMetric := 0.0
if xactRollback.Valid {
xactRollbackMetric = xactRollback.Float64
}
ch <- prometheus.MustNewConstMetric( ch <- prometheus.MustNewConstMetric(
statDatabaseXactRollback, statDatabaseXactRollback,
prometheus.CounterValue, prometheus.CounterValue,
xactRollbackMetric, xactRollback.Float64,
datidLabel, labels...,
datnameLabel,
) )
blksReadMetric := 0.0
if blksRead.Valid {
blksReadMetric = blksRead.Float64
}
ch <- prometheus.MustNewConstMetric( ch <- prometheus.MustNewConstMetric(
statDatabaseBlksRead, statDatabaseBlksRead,
prometheus.CounterValue, prometheus.CounterValue,
blksReadMetric, blksRead.Float64,
datidLabel, labels...,
datnameLabel,
) )
blksHitMetric := 0.0
if blksHit.Valid {
blksHitMetric = blksHit.Float64
}
ch <- prometheus.MustNewConstMetric( ch <- prometheus.MustNewConstMetric(
statDatabaseBlksHit, statDatabaseBlksHit,
prometheus.CounterValue, prometheus.CounterValue,
blksHitMetric, blksHit.Float64,
datidLabel, labels...,
datnameLabel,
) )
tupReturnedMetric := 0.0
if tupReturned.Valid {
tupReturnedMetric = tupReturned.Float64
}
ch <- prometheus.MustNewConstMetric( ch <- prometheus.MustNewConstMetric(
statDatabaseTupReturned, statDatabaseTupReturned,
prometheus.CounterValue, prometheus.CounterValue,
tupReturnedMetric, tupReturned.Float64,
datidLabel, labels...,
datnameLabel,
) )
tupFetchedMetric := 0.0
if tupFetched.Valid {
tupFetchedMetric = tupFetched.Float64
}
ch <- prometheus.MustNewConstMetric( ch <- prometheus.MustNewConstMetric(
statDatabaseTupFetched, statDatabaseTupFetched,
prometheus.CounterValue, prometheus.CounterValue,
tupFetchedMetric, tupFetched.Float64,
datidLabel, labels...,
datnameLabel,
) )
tupInsertedMetric := 0.0
if tupInserted.Valid {
tupInsertedMetric = tupInserted.Float64
}
ch <- prometheus.MustNewConstMetric( ch <- prometheus.MustNewConstMetric(
statDatabaseTupInserted, statDatabaseTupInserted,
prometheus.CounterValue, prometheus.CounterValue,
tupInsertedMetric, tupInserted.Float64,
datidLabel, labels...,
datnameLabel,
) )
tupUpdatedMetric := 0.0
if tupUpdated.Valid {
tupUpdatedMetric = tupUpdated.Float64
}
ch <- prometheus.MustNewConstMetric( ch <- prometheus.MustNewConstMetric(
statDatabaseTupUpdated, statDatabaseTupUpdated,
prometheus.CounterValue, prometheus.CounterValue,
tupUpdatedMetric, tupUpdated.Float64,
datidLabel, labels...,
datnameLabel,
) )
tupDeletedMetric := 0.0
if tupDeleted.Valid {
tupDeletedMetric = tupDeleted.Float64
}
ch <- prometheus.MustNewConstMetric( ch <- prometheus.MustNewConstMetric(
statDatabaseTupDeleted, statDatabaseTupDeleted,
prometheus.CounterValue, prometheus.CounterValue,
tupDeletedMetric, tupDeleted.Float64,
datidLabel, labels...,
datnameLabel,
) )
conflictsMetric := 0.0
if conflicts.Valid {
conflictsMetric = conflicts.Float64
}
ch <- prometheus.MustNewConstMetric( ch <- prometheus.MustNewConstMetric(
statDatabaseConflicts, statDatabaseConflicts,
prometheus.CounterValue, prometheus.CounterValue,
conflictsMetric, conflicts.Float64,
datidLabel, labels...,
datnameLabel,
) )
tempFilesMetric := 0.0
if tempFiles.Valid {
tempFilesMetric = tempFiles.Float64
}
ch <- prometheus.MustNewConstMetric( ch <- prometheus.MustNewConstMetric(
statDatabaseTempFiles, statDatabaseTempFiles,
prometheus.CounterValue, prometheus.CounterValue,
tempFilesMetric, tempFiles.Float64,
datidLabel, labels...,
datnameLabel,
) )
tempBytesMetric := 0.0
if tempBytes.Valid {
tempBytesMetric = tempBytes.Float64
}
ch <- prometheus.MustNewConstMetric( ch <- prometheus.MustNewConstMetric(
statDatabaseTempBytes, statDatabaseTempBytes,
prometheus.CounterValue, prometheus.CounterValue,
tempBytesMetric, tempBytes.Float64,
datidLabel, labels...,
datnameLabel,
) )
deadlocksMetric := 0.0
if deadlocks.Valid {
deadlocksMetric = deadlocks.Float64
}
ch <- prometheus.MustNewConstMetric( ch <- prometheus.MustNewConstMetric(
statDatabaseDeadlocks, statDatabaseDeadlocks,
prometheus.CounterValue, prometheus.CounterValue,
deadlocksMetric, deadlocks.Float64,
datidLabel, labels...,
datnameLabel,
) )
blkReadTimeMetric := 0.0
if blkReadTime.Valid {
blkReadTimeMetric = blkReadTime.Float64
}
ch <- prometheus.MustNewConstMetric( ch <- prometheus.MustNewConstMetric(
statDatabaseBlkReadTime, statDatabaseBlkReadTime,
prometheus.CounterValue, prometheus.CounterValue,
blkReadTimeMetric, blkReadTime.Float64,
datidLabel, labels...,
datnameLabel,
) )
blkWriteTimeMetric := 0.0
if blkWriteTime.Valid {
blkWriteTimeMetric = blkWriteTime.Float64
}
ch <- prometheus.MustNewConstMetric( ch <- prometheus.MustNewConstMetric(
statDatabaseBlkWriteTime, statDatabaseBlkWriteTime,
prometheus.CounterValue, prometheus.CounterValue,
blkWriteTimeMetric, blkWriteTime.Float64,
datidLabel, labels...,
datnameLabel,
) )
statsResetMetric := 0.0
if statsReset.Valid {
statsResetMetric = float64(statsReset.Time.Unix())
}
ch <- prometheus.MustNewConstMetric( ch <- prometheus.MustNewConstMetric(
statDatabaseStatsReset, statDatabaseStatsReset,
prometheus.CounterValue, prometheus.CounterValue,
statsResetMetric, float64(statsReset.Time.Unix()),
datidLabel, labels...,
datnameLabel,
) )
} }
return nil return nil

View File

@ -18,6 +18,7 @@ import (
"time" "time"
"github.com/DATA-DOG/go-sqlmock" "github.com/DATA-DOG/go-sqlmock"
"github.com/go-kit/log"
"github.com/prometheus/client_golang/prometheus" "github.com/prometheus/client_golang/prometheus"
dto "github.com/prometheus/client_model/go" dto "github.com/prometheus/client_model/go"
"github.com/smartystreets/goconvey/convey" "github.com/smartystreets/goconvey/convey"
@ -86,7 +87,9 @@ func TestPGStatDatabaseCollector(t *testing.T) {
ch := make(chan prometheus.Metric) ch := make(chan prometheus.Metric)
go func() { go func() {
defer close(ch) defer close(ch)
c := PGStatDatabaseCollector{} c := PGStatDatabaseCollector{
log: log.With(log.NewNopLogger(), "collector", "pg_stat_database"),
}
if err := c.Update(context.Background(), inst, ch); err != nil { if err := c.Update(context.Background(), inst, ch); err != nil {
t.Errorf("Error calling PGStatDatabaseCollector.Update: %s", err) t.Errorf("Error calling PGStatDatabaseCollector.Update: %s", err)
@ -131,6 +134,10 @@ func TestPGStatDatabaseCollectorNullValues(t *testing.T) {
} }
defer db.Close() defer db.Close()
srT, err := time.Parse("2006-01-02 15:04:05.00000-07", "2023-05-25 17:10:42.81132-07")
if err != nil {
t.Fatalf("Error parsing time: %s", err)
}
inst := &instance{db: db} inst := &instance{db: db}
columns := []string{ columns := []string{
@ -158,31 +165,52 @@ func TestPGStatDatabaseCollectorNullValues(t *testing.T) {
rows := sqlmock.NewRows(columns). rows := sqlmock.NewRows(columns).
AddRow( AddRow(
nil, nil,
nil, "postgres",
nil, 354,
nil, 4945,
nil, 289097744,
nil, 1242257,
nil, int64(3275602074),
nil, 89320867,
nil, 450139,
nil, 2034563757,
nil, 0,
nil, int64(2725688749),
nil, 23,
nil, 52,
nil, 74,
nil, 925,
nil, 16,
nil, 823,
nil, srT).
) AddRow(
"pid",
"postgres",
354,
4945,
289097744,
1242257,
int64(3275602074),
89320867,
450139,
2034563757,
0,
int64(2725688749),
23,
52,
74,
925,
16,
823,
srT)
mock.ExpectQuery(sanitizeQuery(statDatabaseQuery)).WillReturnRows(rows) mock.ExpectQuery(sanitizeQuery(statDatabaseQuery)).WillReturnRows(rows)
ch := make(chan prometheus.Metric) ch := make(chan prometheus.Metric)
go func() { go func() {
defer close(ch) defer close(ch)
c := PGStatDatabaseCollector{} c := PGStatDatabaseCollector{
log: log.With(log.NewNopLogger(), "collector", "pg_stat_database"),
}
if err := c.Update(context.Background(), inst, ch); err != nil { if err := c.Update(context.Background(), inst, ch); err != nil {
t.Errorf("Error calling PGStatDatabaseCollector.Update: %s", err) t.Errorf("Error calling PGStatDatabaseCollector.Update: %s", err)
@ -190,23 +218,23 @@ func TestPGStatDatabaseCollectorNullValues(t *testing.T) {
}() }()
expected := []MetricResult{ expected := []MetricResult{
{labels: labelMap{"datid": "unknown", "datname": "unknown"}, metricType: dto.MetricType_GAUGE, value: 0}, {labels: labelMap{"datid": "pid", "datname": "postgres"}, metricType: dto.MetricType_GAUGE, value: 354},
{labels: labelMap{"datid": "unknown", "datname": "unknown"}, metricType: dto.MetricType_COUNTER, value: 0}, {labels: labelMap{"datid": "pid", "datname": "postgres"}, metricType: dto.MetricType_COUNTER, value: 4945},
{labels: labelMap{"datid": "unknown", "datname": "unknown"}, metricType: dto.MetricType_COUNTER, value: 0}, {labels: labelMap{"datid": "pid", "datname": "postgres"}, metricType: dto.MetricType_COUNTER, value: 289097744},
{labels: labelMap{"datid": "unknown", "datname": "unknown"}, metricType: dto.MetricType_COUNTER, value: 0}, {labels: labelMap{"datid": "pid", "datname": "postgres"}, metricType: dto.MetricType_COUNTER, value: 1242257},
{labels: labelMap{"datid": "unknown", "datname": "unknown"}, metricType: dto.MetricType_COUNTER, value: 0}, {labels: labelMap{"datid": "pid", "datname": "postgres"}, metricType: dto.MetricType_COUNTER, value: 3275602074},
{labels: labelMap{"datid": "unknown", "datname": "unknown"}, metricType: dto.MetricType_COUNTER, value: 0}, {labels: labelMap{"datid": "pid", "datname": "postgres"}, metricType: dto.MetricType_COUNTER, value: 89320867},
{labels: labelMap{"datid": "unknown", "datname": "unknown"}, metricType: dto.MetricType_COUNTER, value: 0}, {labels: labelMap{"datid": "pid", "datname": "postgres"}, metricType: dto.MetricType_COUNTER, value: 450139},
{labels: labelMap{"datid": "unknown", "datname": "unknown"}, metricType: dto.MetricType_COUNTER, value: 0}, {labels: labelMap{"datid": "pid", "datname": "postgres"}, metricType: dto.MetricType_COUNTER, value: 2034563757},
{labels: labelMap{"datid": "unknown", "datname": "unknown"}, metricType: dto.MetricType_COUNTER, value: 0}, {labels: labelMap{"datid": "pid", "datname": "postgres"}, metricType: dto.MetricType_COUNTER, value: 0},
{labels: labelMap{"datid": "unknown", "datname": "unknown"}, metricType: dto.MetricType_COUNTER, value: 0}, {labels: labelMap{"datid": "pid", "datname": "postgres"}, metricType: dto.MetricType_COUNTER, value: 2725688749},
{labels: labelMap{"datid": "unknown", "datname": "unknown"}, metricType: dto.MetricType_COUNTER, value: 0}, {labels: labelMap{"datid": "pid", "datname": "postgres"}, metricType: dto.MetricType_COUNTER, value: 23},
{labels: labelMap{"datid": "unknown", "datname": "unknown"}, metricType: dto.MetricType_COUNTER, value: 0}, {labels: labelMap{"datid": "pid", "datname": "postgres"}, metricType: dto.MetricType_COUNTER, value: 52},
{labels: labelMap{"datid": "unknown", "datname": "unknown"}, metricType: dto.MetricType_COUNTER, value: 0}, {labels: labelMap{"datid": "pid", "datname": "postgres"}, metricType: dto.MetricType_COUNTER, value: 74},
{labels: labelMap{"datid": "unknown", "datname": "unknown"}, metricType: dto.MetricType_COUNTER, value: 0}, {labels: labelMap{"datid": "pid", "datname": "postgres"}, metricType: dto.MetricType_COUNTER, value: 925},
{labels: labelMap{"datid": "unknown", "datname": "unknown"}, metricType: dto.MetricType_COUNTER, value: 0}, {labels: labelMap{"datid": "pid", "datname": "postgres"}, metricType: dto.MetricType_COUNTER, value: 16},
{labels: labelMap{"datid": "unknown", "datname": "unknown"}, metricType: dto.MetricType_COUNTER, value: 0}, {labels: labelMap{"datid": "pid", "datname": "postgres"}, metricType: dto.MetricType_COUNTER, value: 823},
{labels: labelMap{"datid": "unknown", "datname": "unknown"}, metricType: dto.MetricType_COUNTER, value: 0}, {labels: labelMap{"datid": "pid", "datname": "postgres"}, metricType: dto.MetricType_COUNTER, value: 1685059842},
} }
convey.Convey("Metrics comparison", t, func() { convey.Convey("Metrics comparison", t, func() {
@ -296,14 +324,35 @@ func TestPGStatDatabaseCollectorRowLeakTest(t *testing.T) {
nil, nil,
nil, nil,
nil, nil,
) ).
AddRow(
"pid",
"postgres",
355,
4946,
289097745,
1242258,
int64(3275602075),
89320868,
450140,
2034563758,
1,
int64(2725688750),
24,
53,
75,
926,
17,
824,
srT)
mock.ExpectQuery(sanitizeQuery(statDatabaseQuery)).WillReturnRows(rows) mock.ExpectQuery(sanitizeQuery(statDatabaseQuery)).WillReturnRows(rows)
ch := make(chan prometheus.Metric) ch := make(chan prometheus.Metric)
go func() { go func() {
defer close(ch) defer close(ch)
c := PGStatDatabaseCollector{} c := PGStatDatabaseCollector{
log: log.With(log.NewNopLogger(), "collector", "pg_stat_database"),
}
if err := c.Update(context.Background(), inst, ch); err != nil { if err := c.Update(context.Background(), inst, ch); err != nil {
t.Errorf("Error calling PGStatDatabaseCollector.Update: %s", err) t.Errorf("Error calling PGStatDatabaseCollector.Update: %s", err)
@ -328,23 +377,23 @@ func TestPGStatDatabaseCollectorRowLeakTest(t *testing.T) {
{labels: labelMap{"datid": "pid", "datname": "postgres"}, metricType: dto.MetricType_COUNTER, value: 16}, {labels: labelMap{"datid": "pid", "datname": "postgres"}, metricType: dto.MetricType_COUNTER, value: 16},
{labels: labelMap{"datid": "pid", "datname": "postgres"}, metricType: dto.MetricType_COUNTER, value: 823}, {labels: labelMap{"datid": "pid", "datname": "postgres"}, metricType: dto.MetricType_COUNTER, value: 823},
{labels: labelMap{"datid": "pid", "datname": "postgres"}, metricType: dto.MetricType_COUNTER, value: 1685059842}, {labels: labelMap{"datid": "pid", "datname": "postgres"}, metricType: dto.MetricType_COUNTER, value: 1685059842},
{labels: labelMap{"datid": "unknown", "datname": "unknown"}, metricType: dto.MetricType_GAUGE, value: 0}, {labels: labelMap{"datid": "pid", "datname": "postgres"}, metricType: dto.MetricType_GAUGE, value: 355},
{labels: labelMap{"datid": "unknown", "datname": "unknown"}, metricType: dto.MetricType_COUNTER, value: 0}, {labels: labelMap{"datid": "pid", "datname": "postgres"}, metricType: dto.MetricType_COUNTER, value: 4946},
{labels: labelMap{"datid": "unknown", "datname": "unknown"}, metricType: dto.MetricType_COUNTER, value: 0}, {labels: labelMap{"datid": "pid", "datname": "postgres"}, metricType: dto.MetricType_COUNTER, value: 289097745},
{labels: labelMap{"datid": "unknown", "datname": "unknown"}, metricType: dto.MetricType_COUNTER, value: 0}, {labels: labelMap{"datid": "pid", "datname": "postgres"}, metricType: dto.MetricType_COUNTER, value: 1242258},
{labels: labelMap{"datid": "unknown", "datname": "unknown"}, metricType: dto.MetricType_COUNTER, value: 0}, {labels: labelMap{"datid": "pid", "datname": "postgres"}, metricType: dto.MetricType_COUNTER, value: 3275602075},
{labels: labelMap{"datid": "unknown", "datname": "unknown"}, metricType: dto.MetricType_COUNTER, value: 0}, {labels: labelMap{"datid": "pid", "datname": "postgres"}, metricType: dto.MetricType_COUNTER, value: 89320868},
{labels: labelMap{"datid": "unknown", "datname": "unknown"}, metricType: dto.MetricType_COUNTER, value: 0}, {labels: labelMap{"datid": "pid", "datname": "postgres"}, metricType: dto.MetricType_COUNTER, value: 450140},
{labels: labelMap{"datid": "unknown", "datname": "unknown"}, metricType: dto.MetricType_COUNTER, value: 0}, {labels: labelMap{"datid": "pid", "datname": "postgres"}, metricType: dto.MetricType_COUNTER, value: 2034563758},
{labels: labelMap{"datid": "unknown", "datname": "unknown"}, metricType: dto.MetricType_COUNTER, value: 0}, {labels: labelMap{"datid": "pid", "datname": "postgres"}, metricType: dto.MetricType_COUNTER, value: 1},
{labels: labelMap{"datid": "unknown", "datname": "unknown"}, metricType: dto.MetricType_COUNTER, value: 0}, {labels: labelMap{"datid": "pid", "datname": "postgres"}, metricType: dto.MetricType_COUNTER, value: 2725688750},
{labels: labelMap{"datid": "unknown", "datname": "unknown"}, metricType: dto.MetricType_COUNTER, value: 0}, {labels: labelMap{"datid": "pid", "datname": "postgres"}, metricType: dto.MetricType_COUNTER, value: 24},
{labels: labelMap{"datid": "unknown", "datname": "unknown"}, metricType: dto.MetricType_COUNTER, value: 0}, {labels: labelMap{"datid": "pid", "datname": "postgres"}, metricType: dto.MetricType_COUNTER, value: 53},
{labels: labelMap{"datid": "unknown", "datname": "unknown"}, metricType: dto.MetricType_COUNTER, value: 0}, {labels: labelMap{"datid": "pid", "datname": "postgres"}, metricType: dto.MetricType_COUNTER, value: 75},
{labels: labelMap{"datid": "unknown", "datname": "unknown"}, metricType: dto.MetricType_COUNTER, value: 0}, {labels: labelMap{"datid": "pid", "datname": "postgres"}, metricType: dto.MetricType_COUNTER, value: 926},
{labels: labelMap{"datid": "unknown", "datname": "unknown"}, metricType: dto.MetricType_COUNTER, value: 0}, {labels: labelMap{"datid": "pid", "datname": "postgres"}, metricType: dto.MetricType_COUNTER, value: 17},
{labels: labelMap{"datid": "unknown", "datname": "unknown"}, metricType: dto.MetricType_COUNTER, value: 0}, {labels: labelMap{"datid": "pid", "datname": "postgres"}, metricType: dto.MetricType_COUNTER, value: 824},
{labels: labelMap{"datid": "unknown", "datname": "unknown"}, metricType: dto.MetricType_COUNTER, value: 0}, {labels: labelMap{"datid": "pid", "datname": "postgres"}, metricType: dto.MetricType_COUNTER, value: 1685059842},
} }
convey.Convey("Metrics comparison", t, func() { convey.Convey("Metrics comparison", t, func() {