mirror of
https://github.com/prometheus-community/postgres_exporter
synced 2025-04-23 15:35:28 +00:00
PMM-10512, PMM-10820 Capture usename and application_name for pg_stat_activity, update exporter (#77)
This commit is contained in:
parent
a6e78dc07a
commit
80bb494040
@ -299,6 +299,8 @@ var builtinMetricMaps = map[string]intermediateMetricMap{
|
||||
map[string]ColumnMapping{
|
||||
"datname": {LABEL, "Name of this database", nil, nil},
|
||||
"state": {LABEL, "connection state", nil, semver.MustParseRange(">=9.2.0")},
|
||||
"usename": {LABEL, "Name of the user logged into this backend", nil, nil},
|
||||
"application_name": {LABEL, "Name of the application that is connected to this backend", nil, nil},
|
||||
"count": {GAUGE, "number of connections in this state", nil, nil},
|
||||
"max_tx_duration": {GAUGE, "max duration in seconds any active transaction has been running", nil, nil},
|
||||
},
|
||||
|
@ -137,6 +137,8 @@ var queryOverrides = map[string][]OverrideQuery{
|
||||
SELECT
|
||||
pg_database.datname,
|
||||
tmp.state,
|
||||
tmp2.usename,
|
||||
tmp2.application_name,
|
||||
COALESCE(count,0) as count,
|
||||
COALESCE(max_tx_duration,0) as max_tx_duration
|
||||
FROM
|
||||
@ -153,9 +155,11 @@ var queryOverrides = map[string][]OverrideQuery{
|
||||
SELECT
|
||||
datname,
|
||||
state,
|
||||
usename,
|
||||
application_name,
|
||||
count(*) AS count,
|
||||
MAX(EXTRACT(EPOCH FROM now() - xact_start))::float AS max_tx_duration
|
||||
FROM pg_stat_activity GROUP BY datname,state) AS tmp2
|
||||
FROM pg_stat_activity GROUP BY datname,state,usename,application_name) AS tmp2
|
||||
ON tmp.state = tmp2.state AND pg_database.datname = tmp2.datname
|
||||
`,
|
||||
},
|
||||
@ -165,9 +169,11 @@ var queryOverrides = map[string][]OverrideQuery{
|
||||
SELECT
|
||||
datname,
|
||||
'unknown' AS state,
|
||||
usename,
|
||||
application_name,
|
||||
COALESCE(count(*),0) AS count,
|
||||
COALESCE(MAX(EXTRACT(EPOCH FROM now() - xact_start))::float,0) AS max_tx_duration
|
||||
FROM pg_stat_activity GROUP BY datname
|
||||
FROM pg_stat_activity GROUP BY datname,usename,application_name
|
||||
`,
|
||||
},
|
||||
},
|
||||
|
@ -8,6 +8,7 @@ test-performance:
|
||||
extraMetrics = false
|
||||
multipleLabels = false
|
||||
dumpMetrics = false
|
||||
endpoint = ''
|
||||
|
||||
test-metrics:
|
||||
go test -v -run '^TestMissingMetrics$$' -args -doRun=true
|
||||
@ -22,7 +23,7 @@ test-resolutions:
|
||||
go test -v -run '^TestResolutions$$' -args -doRun=true
|
||||
|
||||
dump-metrics:
|
||||
go test -v -run '^TestDumpMetrics$$' -args -doRun=true -extraMetrics=$(extraMetrics) -multipleLabels=$(multipleLabels) -dumpMetrics=$(dumpMetrics)
|
||||
go test -v -run '^TestDumpMetrics$$' -args -doRun=true -extraMetrics=$(extraMetrics) -multipleLabels=$(multipleLabels) -endpoint=$(endpoint) -dumpMetrics=true
|
||||
|
||||
test-consistency: test-metrics test-resolutions test-resolutions-duplicates
|
||||
|
||||
|
@ -14,10 +14,12 @@ import (
|
||||
var dumpMetricsFlag = flag.Bool("dumpMetrics", false, "")
|
||||
var printExtraMetrics = flag.Bool("extraMetrics", false, "")
|
||||
var printMultipleLabels = flag.Bool("multipleLabels", false, "")
|
||||
var endpointFlag = flag.String("endpoint", "", "")
|
||||
|
||||
type Metric struct {
|
||||
name string
|
||||
labels string
|
||||
labelsRawStr string
|
||||
labelsWithValues []string
|
||||
}
|
||||
|
||||
type MetricsCollection struct {
|
||||
@ -86,13 +88,25 @@ func TestDumpMetrics(t *testing.T) {
|
||||
return
|
||||
}
|
||||
|
||||
newMetrics, err := getMetrics(updatedExporterFileName)
|
||||
var ep string
|
||||
switch *endpointFlag {
|
||||
case "hr":
|
||||
ep = highResolutionEndpoint
|
||||
case "mr":
|
||||
ep = medResolutionEndpoint
|
||||
case "lr":
|
||||
ep = lowResolutionEndpoint
|
||||
default:
|
||||
ep = "metrics"
|
||||
}
|
||||
|
||||
newMetrics, err := getMetricsFrom(updatedExporterFileName, ep)
|
||||
if err != nil {
|
||||
t.Error(err)
|
||||
return
|
||||
}
|
||||
|
||||
oldMetrics, err := getMetrics(oldExporterFileName)
|
||||
oldMetrics, err := getMetricsFrom(oldExporterFileName, ep)
|
||||
if err != nil {
|
||||
t.Error(err)
|
||||
return
|
||||
@ -202,20 +216,50 @@ func testResolution(t *testing.T, resolutionEp, resolutionName string) {
|
||||
|
||||
missingCount := 0
|
||||
missingMetrics := ""
|
||||
for _, metric := range oldMetricsCollection.MetricNamesWithLabels {
|
||||
if metric == "" || strings.HasPrefix(metric, "# ") {
|
||||
missingLabelsCount := 0
|
||||
missingLabels := ""
|
||||
for _, oldMetric := range oldMetricsCollection.MetricsData {
|
||||
if oldMetric.name == "" || strings.HasPrefix(oldMetric.name, "# ") {
|
||||
continue
|
||||
}
|
||||
|
||||
if !contains(newMetricsCollection.MetricNamesWithLabels, metric) {
|
||||
metricFound := false
|
||||
labelsMatch := false
|
||||
for _, newMetric := range newMetricsCollection.MetricsData {
|
||||
if newMetric.name != oldMetric.name {
|
||||
continue
|
||||
}
|
||||
|
||||
metricFound = true
|
||||
|
||||
if newMetric.labelsRawStr == oldMetric.labelsRawStr {
|
||||
labelsMatch = true
|
||||
break
|
||||
}
|
||||
|
||||
if arrIsSubsetOf(oldMetric.labelsWithValues, newMetric.labelsWithValues) {
|
||||
labelsMatch = true
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
if !metricFound {
|
||||
missingCount++
|
||||
missingMetrics += fmt.Sprintf("%s\n", metric)
|
||||
missingMetrics += fmt.Sprintf("%s\n", oldMetric.name)
|
||||
} else if !labelsMatch {
|
||||
missingLabelsCount++
|
||||
missingLabels += fmt.Sprintf("%s\n", oldMetric.name)
|
||||
}
|
||||
}
|
||||
|
||||
if missingCount > 0 {
|
||||
t.Errorf("%d metrics are missing in new exporter for %s resolution:\n%s", missingCount, resolutionName, missingMetrics)
|
||||
}
|
||||
|
||||
if missingLabelsCount > 0 {
|
||||
t.Errorf("%d metrics's labels missing in new exporter for %s resolution:\n%s", missingCount, resolutionName, missingLabels)
|
||||
}
|
||||
|
||||
extraCount := 0
|
||||
extraMetrics := ""
|
||||
for _, metric := range newMetricsCollection.MetricNamesWithLabels {
|
||||
@ -362,13 +406,13 @@ func parseMetricsCollection(metricRaw string) MetricsCollection {
|
||||
}
|
||||
}
|
||||
|
||||
func arrIsSubsetOf(a, b []string) bool {
|
||||
if len(a) == 0 {
|
||||
return len(b) == 0
|
||||
func arrIsSubsetOf(smaller, larger []string) bool {
|
||||
if len(smaller) == 0 {
|
||||
return len(larger) == 0
|
||||
}
|
||||
|
||||
for _, x := range a {
|
||||
if !contains(b, x) {
|
||||
for _, x := range smaller {
|
||||
if !contains(larger, x) {
|
||||
return false
|
||||
}
|
||||
}
|
||||
@ -394,10 +438,10 @@ func groupByMetrics(metrics []Metric) map[string][]string {
|
||||
metric := metrics[i]
|
||||
if _, ok := mtr[metric.name]; ok {
|
||||
labels := mtr[metric.name]
|
||||
labels = append(labels, metric.labels)
|
||||
labels = append(labels, metric.labelsRawStr)
|
||||
mtr[metric.name] = labels
|
||||
} else {
|
||||
mtr[metric.name] = []string{metric.labels}
|
||||
mtr[metric.name] = []string{metric.labelsRawStr}
|
||||
}
|
||||
}
|
||||
|
||||
@ -414,16 +458,21 @@ func parseMetrics(metrics []string) []Metric {
|
||||
}
|
||||
|
||||
var mName, mLabels string
|
||||
var labelsArr []string
|
||||
if strings.Contains(metricRawStr, "{") {
|
||||
mName = metricRawStr[:strings.Index(metricRawStr, "{")]
|
||||
mLabels = metricRawStr[strings.Index(metricRawStr, "{")+1 : len(metricRawStr)-1]
|
||||
if mLabels != "" {
|
||||
labelsArr = strings.Split(mLabels, ",")
|
||||
}
|
||||
} else {
|
||||
mName = metricRawStr
|
||||
}
|
||||
|
||||
metric := Metric{
|
||||
name: mName,
|
||||
labels: mLabels,
|
||||
labelsRawStr: mLabels,
|
||||
labelsWithValues: labelsArr,
|
||||
}
|
||||
|
||||
metricsData = append(metricsData, metric)
|
||||
|
Loading…
Reference in New Issue
Block a user