mirror of
https://github.com/prometheus-community/postgres_exporter
synced 2025-05-03 08:28:02 +00:00
Gofmt the code.
This commit is contained in:
parent
ff8e0c5382
commit
5211415ebe
@ -148,7 +148,7 @@ type MetricMap struct {
|
|||||||
|
|
||||||
// Metric descriptors for dynamically created metrics.
|
// Metric descriptors for dynamically created metrics.
|
||||||
var variableMaps = map[string]map[string]ColumnMapping{
|
var variableMaps = map[string]map[string]ColumnMapping{
|
||||||
"pg_runtime_variable": map[string]ColumnMapping{
|
"pg_runtime_variable": {
|
||||||
"max_connections": {GAUGE, "Sets the maximum number of concurrent connections.", nil, nil},
|
"max_connections": {GAUGE, "Sets the maximum number of concurrent connections.", nil, nil},
|
||||||
"max_files_per_process": {GAUGE, "Sets the maximum number of simultaneously open files for each server process.", nil, nil},
|
"max_files_per_process": {GAUGE, "Sets the maximum number of simultaneously open files for each server process.", nil, nil},
|
||||||
"max_function_args": {GAUGE, "Shows the maximum number of function arguments.", nil, nil},
|
"max_function_args": {GAUGE, "Shows the maximum number of function arguments.", nil, nil},
|
||||||
@ -184,7 +184,7 @@ func dumpMaps() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
var metricMaps = map[string]map[string]ColumnMapping{
|
var metricMaps = map[string]map[string]ColumnMapping{
|
||||||
"pg_stat_bgwriter": map[string]ColumnMapping{
|
"pg_stat_bgwriter": {
|
||||||
"checkpoints_timed": {COUNTER, "Number of scheduled checkpoints that have been performed", nil, nil},
|
"checkpoints_timed": {COUNTER, "Number of scheduled checkpoints that have been performed", nil, nil},
|
||||||
"checkpoints_req": {COUNTER, "Number of requested checkpoints that have been performed", nil, nil},
|
"checkpoints_req": {COUNTER, "Number of requested checkpoints that have been performed", nil, nil},
|
||||||
"checkpoint_write_time": {COUNTER, "Total amount of time that has been spent in the portion of checkpoint processing where files are written to disk, in milliseconds", nil, nil},
|
"checkpoint_write_time": {COUNTER, "Total amount of time that has been spent in the portion of checkpoint processing where files are written to disk, in milliseconds", nil, nil},
|
||||||
@ -197,7 +197,7 @@ var metricMaps = map[string]map[string]ColumnMapping{
|
|||||||
"buffers_alloc": {COUNTER, "Number of buffers allocated", nil, nil},
|
"buffers_alloc": {COUNTER, "Number of buffers allocated", nil, nil},
|
||||||
"stats_reset": {COUNTER, "Time at which these statistics were last reset", nil, nil},
|
"stats_reset": {COUNTER, "Time at which these statistics were last reset", nil, nil},
|
||||||
},
|
},
|
||||||
"pg_stat_database": map[string]ColumnMapping{
|
"pg_stat_database": {
|
||||||
"datid": {LABEL, "OID of a database", nil, nil},
|
"datid": {LABEL, "OID of a database", nil, nil},
|
||||||
"datname": {LABEL, "Name of this database", nil, nil},
|
"datname": {LABEL, "Name of this database", nil, nil},
|
||||||
"numbackends": {GAUGE, "Number of backends currently connected to this database. This is the only column in this view that returns a value reflecting current state; all other columns return the accumulated values since the last reset.", nil, nil},
|
"numbackends": {GAUGE, "Number of backends currently connected to this database. This is the only column in this view that returns a value reflecting current state; all other columns return the accumulated values since the last reset.", nil, nil},
|
||||||
@ -218,7 +218,7 @@ var metricMaps = map[string]map[string]ColumnMapping{
|
|||||||
"blk_write_time": {COUNTER, "Time spent writing data file blocks by backends in this database, in milliseconds", nil, nil},
|
"blk_write_time": {COUNTER, "Time spent writing data file blocks by backends in this database, in milliseconds", nil, nil},
|
||||||
"stats_reset": {COUNTER, "Time at which these statistics were last reset", nil, nil},
|
"stats_reset": {COUNTER, "Time at which these statistics were last reset", nil, nil},
|
||||||
},
|
},
|
||||||
"pg_stat_database_conflicts": map[string]ColumnMapping{
|
"pg_stat_database_conflicts": {
|
||||||
"datid": {LABEL, "OID of a database", nil, nil},
|
"datid": {LABEL, "OID of a database", nil, nil},
|
||||||
"datname": {LABEL, "Name of this database", nil, nil},
|
"datname": {LABEL, "Name of this database", nil, nil},
|
||||||
"confl_tablespace": {COUNTER, "Number of queries in this database that have been canceled due to dropped tablespaces", nil, nil},
|
"confl_tablespace": {COUNTER, "Number of queries in this database that have been canceled due to dropped tablespaces", nil, nil},
|
||||||
@ -227,12 +227,12 @@ var metricMaps = map[string]map[string]ColumnMapping{
|
|||||||
"confl_bufferpin": {COUNTER, "Number of queries in this database that have been canceled due to pinned buffers", nil, nil},
|
"confl_bufferpin": {COUNTER, "Number of queries in this database that have been canceled due to pinned buffers", nil, nil},
|
||||||
"confl_deadlock": {COUNTER, "Number of queries in this database that have been canceled due to deadlocks", nil, nil},
|
"confl_deadlock": {COUNTER, "Number of queries in this database that have been canceled due to deadlocks", nil, nil},
|
||||||
},
|
},
|
||||||
"pg_locks": map[string]ColumnMapping{
|
"pg_locks": {
|
||||||
"datname": {LABEL, "Name of this database", nil, nil},
|
"datname": {LABEL, "Name of this database", nil, nil},
|
||||||
"mode": {LABEL, "Type of Lock", nil, nil},
|
"mode": {LABEL, "Type of Lock", nil, nil},
|
||||||
"count": {GAUGE, "Number of locks", nil, nil},
|
"count": {GAUGE, "Number of locks", nil, nil},
|
||||||
},
|
},
|
||||||
"pg_stat_replication": map[string]ColumnMapping{
|
"pg_stat_replication": {
|
||||||
"procpid": {DISCARD, "Process ID of a WAL sender process", nil, semver.MustParseRange("<9.2.0")},
|
"procpid": {DISCARD, "Process ID of a WAL sender process", nil, semver.MustParseRange("<9.2.0")},
|
||||||
"pid": {DISCARD, "Process ID of a WAL sender process", nil, semver.MustParseRange(">=9.2.0")},
|
"pid": {DISCARD, "Process ID of a WAL sender process", nil, semver.MustParseRange(">=9.2.0")},
|
||||||
"usesysid": {DISCARD, "OID of the user logged into this WAL sender process", nil, nil},
|
"usesysid": {DISCARD, "OID of the user logged into this WAL sender process", nil, nil},
|
||||||
@ -263,7 +263,7 @@ var metricMaps = map[string]map[string]ColumnMapping{
|
|||||||
"pg_current_xlog_location": {DISCARD, "pg_current_xlog_location", nil, nil},
|
"pg_current_xlog_location": {DISCARD, "pg_current_xlog_location", nil, nil},
|
||||||
"pg_xlog_location_diff": {GAUGE, "Lag in bytes between master and slave", nil, semver.MustParseRange(">=9.2.0")},
|
"pg_xlog_location_diff": {GAUGE, "Lag in bytes between master and slave", nil, semver.MustParseRange(">=9.2.0")},
|
||||||
},
|
},
|
||||||
"pg_stat_activity": map[string]ColumnMapping{
|
"pg_stat_activity": {
|
||||||
"datname": {LABEL, "Name of this database", nil, nil},
|
"datname": {LABEL, "Name of this database", nil, nil},
|
||||||
"state": {LABEL, "connection state", nil, semver.MustParseRange(">=9.2.0")},
|
"state": {LABEL, "connection state", nil, semver.MustParseRange(">=9.2.0")},
|
||||||
"count": {GAUGE, "number of connections in this state", nil, nil},
|
"count": {GAUGE, "number of connections in this state", nil, nil},
|
||||||
@ -283,7 +283,7 @@ type OverrideQuery struct {
|
|||||||
// Overriding queries for namespaces above.
|
// Overriding queries for namespaces above.
|
||||||
// TODO: validate this is a closed set in tests, and there are no overlaps
|
// TODO: validate this is a closed set in tests, and there are no overlaps
|
||||||
var queryOverrides = map[string][]OverrideQuery{
|
var queryOverrides = map[string][]OverrideQuery{
|
||||||
"pg_locks": []OverrideQuery{
|
"pg_locks": {
|
||||||
{
|
{
|
||||||
semver.MustParseRange(">0.0.0"),
|
semver.MustParseRange(">0.0.0"),
|
||||||
`SELECT pg_database.datname,tmp.mode,COALESCE(count,0) as count
|
`SELECT pg_database.datname,tmp.mode,COALESCE(count,0) as count
|
||||||
@ -307,7 +307,7 @@ var queryOverrides = map[string][]OverrideQuery{
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
|
||||||
"pg_stat_replication": []OverrideQuery{
|
"pg_stat_replication": {
|
||||||
{
|
{
|
||||||
semver.MustParseRange(">=9.2.0"),
|
semver.MustParseRange(">=9.2.0"),
|
||||||
`
|
`
|
||||||
@ -327,7 +327,7 @@ var queryOverrides = map[string][]OverrideQuery{
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
|
||||||
"pg_stat_activity": []OverrideQuery{
|
"pg_stat_activity": {
|
||||||
// This query only works
|
// This query only works
|
||||||
{
|
{
|
||||||
semver.MustParseRange(">=9.2.0"),
|
semver.MustParseRange(">=9.2.0"),
|
||||||
|
@ -11,16 +11,16 @@ import (
|
|||||||
|
|
||||||
. "gopkg.in/check.v1"
|
. "gopkg.in/check.v1"
|
||||||
|
|
||||||
"github.com/prometheus/client_golang/prometheus"
|
|
||||||
"database/sql"
|
"database/sql"
|
||||||
_ "github.com/lib/pq"
|
|
||||||
"fmt"
|
"fmt"
|
||||||
|
_ "github.com/lib/pq"
|
||||||
|
"github.com/prometheus/client_golang/prometheus"
|
||||||
)
|
)
|
||||||
|
|
||||||
// Hook up gocheck into the "go test" runner.
|
// Hook up gocheck into the "go test" runner.
|
||||||
func Test(t *testing.T) { TestingT(t) }
|
func Test(t *testing.T) { TestingT(t) }
|
||||||
|
|
||||||
type IntegrationSuite struct{
|
type IntegrationSuite struct {
|
||||||
e *Exporter
|
e *Exporter
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -43,7 +43,8 @@ func (s *IntegrationSuite) TestAllNamespacesReturnResults(c *C) {
|
|||||||
// Setup a dummy channel to consume metrics
|
// Setup a dummy channel to consume metrics
|
||||||
ch := make(chan prometheus.Metric, 100)
|
ch := make(chan prometheus.Metric, 100)
|
||||||
go func() {
|
go func() {
|
||||||
for _ = range ch {}
|
for range ch {
|
||||||
|
}
|
||||||
}()
|
}()
|
||||||
|
|
||||||
// Open a database connection
|
// Open a database connection
|
||||||
@ -65,7 +66,6 @@ func (s *IntegrationSuite) TestAllNamespacesReturnResults(c *C) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
// This should never happen in our test cases.
|
// This should never happen in our test cases.
|
||||||
errMap := queryNamespaceMappings(ch, db, s.e.metricMap, s.e.queryOverrides)
|
errMap := queryNamespaceMappings(ch, db, s.e.metricMap, s.e.queryOverrides)
|
||||||
if !c.Check(len(errMap), Equals, 0) {
|
if !c.Check(len(errMap), Equals, 0) {
|
||||||
@ -74,4 +74,4 @@ func (s *IntegrationSuite) TestAllNamespacesReturnResults(c *C) {
|
|||||||
fmt.Println(namespace, ":", err)
|
fmt.Println(namespace, ":", err)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -3,8 +3,8 @@
|
|||||||
package main
|
package main
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"testing"
|
|
||||||
. "gopkg.in/check.v1"
|
. "gopkg.in/check.v1"
|
||||||
|
"testing"
|
||||||
|
|
||||||
"github.com/blang/semver"
|
"github.com/blang/semver"
|
||||||
)
|
)
|
||||||
@ -12,7 +12,7 @@ import (
|
|||||||
// Hook up gocheck into the "go test" runner.
|
// Hook up gocheck into the "go test" runner.
|
||||||
func Test(t *testing.T) { TestingT(t) }
|
func Test(t *testing.T) { TestingT(t) }
|
||||||
|
|
||||||
type FunctionalSuite struct{
|
type FunctionalSuite struct {
|
||||||
e *Exporter
|
e *Exporter
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -24,9 +24,9 @@ func (s *FunctionalSuite) SetUpSuite(c *C) {
|
|||||||
|
|
||||||
func (s *FunctionalSuite) TestSemanticVersionColumnDiscard(c *C) {
|
func (s *FunctionalSuite) TestSemanticVersionColumnDiscard(c *C) {
|
||||||
testMetricMap := map[string]map[string]ColumnMapping{
|
testMetricMap := map[string]map[string]ColumnMapping{
|
||||||
"test_namespace" : map[string]ColumnMapping{
|
"test_namespace": {
|
||||||
"metric_which_stays" : {COUNTER, "This metric should not be eliminated", nil, nil},
|
"metric_which_stays": {COUNTER, "This metric should not be eliminated", nil, nil},
|
||||||
"metric_which_discards" : {COUNTER, "This metric should be forced to DISCARD", nil, nil},
|
"metric_which_discards": {COUNTER, "This metric should be forced to DISCARD", nil, nil},
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -84,4 +84,4 @@ func (s *FunctionalSuite) TestSemanticVersionColumnDiscard(c *C) {
|
|||||||
false,
|
false,
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
Loading…
Reference in New Issue
Block a user