postgres_exporter/percona_tests/metrics_test.go
Taras Shcherban 7199257c0e
PMM-10820 exporter update (#76)
* Update build

* Update to Go 1.18.
* Update minimum Go version to 1.17.
* Update Go modules for 1.17 format.
* Bump Go modules
* Enable dependabot.
* Update Prometheus common files.
* Fixup yamllint.

Signed-off-by: SuperQ <superq@gmail.com>

* Update common Prometheus files

Signed-off-by: prombot <prometheus-team@googlegroups.com>

* Update common Prometheus files (#650)

Signed-off-by: prombot <prometheus-team@googlegroups.com>

* Update common Prometheus files

Signed-off-by: prombot <prometheus-team@googlegroups.com>

* Update readme to include Postgres 14 support

It looks like postgres 14.1 was added to CI here:

fcb2535aff

See also: https://github.com/prometheus-community/postgres_exporter/issues/651#issuecomment-1156947591

Signed-off-by: Austin Godber <godber@uberhip.com>

* Bump github.com/prometheus/common from 0.34.0 to 0.35.0

Bumps [github.com/prometheus/common](https://github.com/prometheus/common) from 0.34.0 to 0.35.0.
- [Release notes](https://github.com/prometheus/common/releases)
- [Commits](https://github.com/prometheus/common/compare/v0.34.0...v0.35.0)

---
updated-dependencies:
- dependency-name: github.com/prometheus/common
  dependency-type: direct:production
  update-type: version-update:semver-minor
...

Signed-off-by: dependabot[bot] <support@github.com>

* Release v0.11.0

NOTE: pg_stat_bgwriter counter metrics had the `_total` suffix added #556

* [CHANGE] refactor pg_stat_bgwriter metrics into standalone collector #556
* [FEATURE] Add pg_database collector #613
* [ENHANCEMENT] Add pg_database_size_bytes metric #613
* [BUGFIX] Avoid parsing error from bogus Azure Flexible Server custom GUC #587
* [BUGFIX] Fix pg_stat_archiver error in 9.4 and earlier. #599
* [BUGFIX] Sanitize setting values because of Aurora irregularity #620

Signed-off-by: SuperQ <superq@gmail.com>

* fix for exporter issue 633

fix for exporter issue 633: https://github.com/prometheus-community/postgres_exporter/issues/633

"Scan error on column index 2, name \"checkpoint_write_time\": converting driver.Value type float64 (\"6.594096e+06\") to a int: invalid syntax #633"

Signed-off-by: bravosierrasierra <bravosierrasierra@users.noreply.github.com>

* Fix checkpoint_sync_time value type

Error:
sql: Scan error on column index 3, name \"checkpoint_sync_time\": converting driver.Value type float64 (\"1.876469e+06\") to a int: invalid syntax

See also:
https://github.com/prometheus-community/postgres_exporter/issues/633
https://github.com/prometheus-community/postgres_exporter/pull/666

Signed-off-by: Nicolas Rodriguez <nico@nicoladmin.fr>

* Release 0.11.1

* [BUGFIX] Fix checkpoint_write_time value type #666
* [BUGFIX] Fix checkpoint_sync_time value type #667

Signed-off-by: SuperQ <superq@gmail.com>

* PMM-10820 missing metric restore

* PMM-10820 missing metric drop

* PMM-10820 merge fix

* PMM-10820 PR review fix

Signed-off-by: SuperQ <superq@gmail.com>
Signed-off-by: prombot <prometheus-team@googlegroups.com>
Signed-off-by: Austin Godber <godber@uberhip.com>
Signed-off-by: dependabot[bot] <support@github.com>
Signed-off-by: bravosierrasierra <bravosierrasierra@users.noreply.github.com>
Signed-off-by: Nicolas Rodriguez <nico@nicoladmin.fr>
Co-authored-by: SuperQ <superq@gmail.com>
Co-authored-by: prombot <prometheus-team@googlegroups.com>
Co-authored-by: Austin Godber <godber@uberhip.com>
Co-authored-by: Joe Adams <github@joeadams.io>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
Co-authored-by: bravosierrasierra <bravosierrasierra@users.noreply.github.com>
Co-authored-by: Nicolas Rodriguez <nico@nicoladmin.fr>
2022-10-12 12:24:58 +03:00

566 lines
14 KiB
Go

package percona_tests
import (
"flag"
"fmt"
"os"
"sort"
"strings"
"testing"
"github.com/pkg/errors"
)
var dumpMetricsFlag = flag.Bool("dumpMetrics", false, "")
var printExtraMetrics = flag.Bool("extraMetrics", false, "")
var printMultipleLabels = flag.Bool("multipleLabels", false, "")
var endpointFlag = flag.String("endpoint", "", "")
const highResolutionEndpoint = "metrics?collect%5B%5D=custom_query.hr&collect%5B%5D=exporter&collect%5B%5D=standard.go&collect%5B%5D=standard.process"
const medResolutionEndpoint = "metrics?collect%5B%5D=custom_query.mr"
const lowResolutionEndpoint = "metrics?collect%5B%5D=custom_query.lr"
// that metric is disabled by default in new exporters, so will trigger test
// however we don't use it at all in our dashboards, so for now - safe to skip it
const skipMetricName = "go_memstats_gc_cpu_fraction"
type Metric struct {
name string
labelsRawStr string
labelsWithValues []string
}
type MetricsCollection struct {
RawMetricStr string
RawMetricStrArr []string
MetricNamesWithLabels []string
MetricsData []Metric
LabelsByMetric map[string][]string
}
func TestMissingMetrics(t *testing.T) {
if !getBool(doRun) {
t.Skip("For manual runs only through make")
return
}
newMetrics, err := getMetrics(updatedExporterFileName)
if err != nil {
t.Error(err)
return
}
oldMetrics, err := getMetrics(oldExporterFileName)
if err != nil {
t.Error(err)
return
}
oldMetricsCollection := parseMetricsCollection(oldMetrics)
newMetricsCollection := parseMetricsCollection(newMetrics)
if ok, msg := testForMissingMetrics(oldMetricsCollection, newMetricsCollection); !ok {
t.Error(msg)
}
}
func TestMissingLabels(t *testing.T) {
if !getBool(doRun) {
t.Skip("For manual runs only through make")
return
}
newMetrics, err := getMetrics(updatedExporterFileName)
if err != nil {
t.Error(err)
return
}
oldMetrics, err := getMetrics(oldExporterFileName)
if err != nil {
t.Error(err)
return
}
oldMetricsCollection := parseMetricsCollection(oldMetrics)
newMetricsCollection := parseMetricsCollection(newMetrics)
if ok, msg := testForMissingMetricsLabels(oldMetricsCollection, newMetricsCollection); !ok {
t.Error(msg)
}
}
func TestDumpMetrics(t *testing.T) {
if !getBool(doRun) {
t.Skip("For manual runs only through make")
return
}
var ep string
switch *endpointFlag {
case "hr":
ep = highResolutionEndpoint
case "mr":
ep = medResolutionEndpoint
case "lr":
ep = lowResolutionEndpoint
default:
ep = "metrics"
}
newMetrics, err := getMetricsFrom(updatedExporterFileName, ep)
if err != nil {
t.Error(err)
return
}
oldMetrics, err := getMetricsFrom(oldExporterFileName, ep)
if err != nil {
t.Error(err)
return
}
oldMetricsCollection := parseMetricsCollection(oldMetrics)
newMetricsCollection := parseMetricsCollection(newMetrics)
dumpMetricsInfo(oldMetricsCollection, newMetricsCollection)
}
func TestResolutionsMetricDuplicates(t *testing.T) {
if !getBool(doRun) {
t.Skip("For manual runs only through make")
return
}
hrMetrics, err := getMetricsFrom(updatedExporterFileName, highResolutionEndpoint)
if err != nil {
t.Error(err)
return
}
mrMetrics, err := getMetricsFrom(updatedExporterFileName, medResolutionEndpoint)
if err != nil {
t.Error(err)
return
}
lrMetrics, err := getMetricsFrom(updatedExporterFileName, lowResolutionEndpoint)
if err != nil {
t.Error(err)
return
}
hrMetricsColl := parseMetricsCollection(hrMetrics)
mrMetricsColl := parseMetricsCollection(mrMetrics)
lrMetricsColl := parseMetricsCollection(lrMetrics)
ms := make(map[string][]string)
addMetrics(ms, hrMetricsColl.MetricNamesWithLabels, "HR")
addMetrics(ms, mrMetricsColl.MetricNamesWithLabels, "MR")
addMetrics(ms, lrMetricsColl.MetricNamesWithLabels, "LR")
count := 0
msg := ""
for metric, resolutions := range ms {
if len(resolutions) > 1 {
count++
msg += fmt.Sprintf("'%s' is duplicated in %s\n", metric, resolutions)
}
}
if count > 0 {
t.Errorf("Found %d duplicated metrics:\n%s", count, msg)
}
}
func addMetrics(ms map[string][]string, metrics []string, resolution string) {
for _, m := range metrics {
if m == "" || strings.HasPrefix(m, "# ") {
continue
}
ms[m] = append(ms[m], resolution)
}
}
func TestResolutions(t *testing.T) {
if !getBool(doRun) {
t.Skip("For manual runs only through make")
return
}
t.Run("TestLowResolution", func(t *testing.T) {
testResolution(t, lowResolutionEndpoint, "Low")
})
t.Run("TestMediumResolution", func(t *testing.T) {
testResolution(t, medResolutionEndpoint, "Medium")
})
t.Run("TestHighResolution", func(t *testing.T) {
testResolution(t, highResolutionEndpoint, "High")
})
}
func testResolution(t *testing.T, resolutionEp, resolutionName string) {
newMetrics, err := getMetricsFrom(updatedExporterFileName, resolutionEp)
if err != nil {
t.Error(err)
return
}
oldMetrics, err := getMetricsFrom(oldExporterFileName, resolutionEp)
if err != nil {
t.Error(err)
return
}
oldMetricsCollection := parseMetricsCollection(oldMetrics)
newMetricsCollection := parseMetricsCollection(newMetrics)
missingCount := 0
missingMetrics := ""
missingLabelsCount := 0
missingLabels := ""
for _, oldMetric := range oldMetricsCollection.MetricsData {
// skip empty lines, comments and redundant metrics
if oldMetric.name == "" || strings.HasPrefix(oldMetric.name, "# ") || oldMetric.name == skipMetricName {
continue
}
metricFound := false
labelsMatch := false
for _, newMetric := range newMetricsCollection.MetricsData {
if newMetric.name != oldMetric.name {
continue
}
metricFound = true
if newMetric.labelsRawStr == oldMetric.labelsRawStr {
labelsMatch = true
break
}
if arrIsSubsetOf(oldMetric.labelsWithValues, newMetric.labelsWithValues) {
labelsMatch = true
break
}
}
if !metricFound {
missingCount++
missingMetrics += fmt.Sprintf("%s\n", oldMetric.name)
} else if !labelsMatch {
missingLabelsCount++
missingLabels += fmt.Sprintf("%s\n", oldMetric.name)
}
}
if missingCount > 0 {
t.Errorf("%d metric(s) are missing in new exporter for %s resolution:\n%s", missingCount, resolutionName, missingMetrics)
}
if missingLabelsCount > 0 {
t.Errorf("%d metrics's labels missing in new exporter for %s resolution:\n%s", missingCount, resolutionName, missingLabels)
}
extraCount := 0
extraMetrics := ""
for _, metric := range newMetricsCollection.MetricNamesWithLabels {
if metric == "" || strings.HasPrefix(metric, "# ") {
continue
}
if !contains(oldMetricsCollection.MetricNamesWithLabels, metric) {
extraCount++
extraMetrics += fmt.Sprintf("%s\n", metric)
}
}
if extraCount > 0 {
fmt.Printf("[WARN] %d metrics are redundant in new exporter for %s resolution\n%s", extraCount, resolutionName, extraMetrics)
}
}
func dumpMetricsInfo(oldMetricsCollection, newMetricsCollection MetricsCollection) {
if getBool(dumpMetricsFlag) {
dumpMetrics(oldMetricsCollection, newMetricsCollection)
}
if getBool(printExtraMetrics) {
dumpExtraMetrics(newMetricsCollection, oldMetricsCollection)
}
if getBool(printMultipleLabels) {
dumpMetricsWithMultipleLabelSets(newMetricsCollection)
}
}
func testForMissingMetricsLabels(oldMetricsCollection, newMetricsCollection MetricsCollection) (bool, string) {
missingMetricLabels := make(map[string]string)
missingMetricLabelsNames := make([]string, 0)
for metric, labels := range oldMetricsCollection.LabelsByMetric {
// skip version info label mismatch
if metric == "postgres_exporter_build_info" || metric == "go_info" {
continue
}
if _, ok := newMetricsCollection.LabelsByMetric[metric]; ok {
newLabels := newMetricsCollection.LabelsByMetric[metric]
if !arrIsSubsetOf(labels, newLabels) {
missingMetricLabels[metric] = fmt.Sprintf(" expected: %s\n actual: %s", labels, newLabels)
missingMetricLabelsNames = append(missingMetricLabelsNames, metric)
}
}
}
sort.Strings(missingMetricLabelsNames)
if len(missingMetricLabelsNames) > 0 {
ll := make([]string, 0)
for _, metric := range missingMetricLabelsNames {
labels := missingMetricLabels[metric]
ll = append(ll, metric+"\n"+labels)
}
return false, fmt.Sprintf("Missing metric's labels (%d metrics):\n%s", len(missingMetricLabelsNames), strings.Join(ll, "\n"))
}
return true, ""
}
func testForMissingMetrics(oldMetricsCollection, newMetricsCollection MetricsCollection) (bool, string) {
missingMetrics := make([]string, 0)
for metricName := range oldMetricsCollection.LabelsByMetric {
if metricName == skipMetricName {
continue
}
if _, ok := newMetricsCollection.LabelsByMetric[metricName]; !ok {
missingMetrics = append(missingMetrics, metricName)
}
}
sort.Strings(missingMetrics)
if len(missingMetrics) > 0 {
return false, fmt.Sprintf("Missing metrics (%d items):\n%s", len(missingMetrics), strings.Join(missingMetrics, "\n"))
}
return true, ""
}
func dumpMetricsWithMultipleLabelSets(newMetricsCollection MetricsCollection) {
metricsWithMultipleLabels := make(map[string][]string)
for metricName, newMetricLabels := range newMetricsCollection.LabelsByMetric {
if len(newMetricLabels) > 1 {
found := false
for i := 0; !found && i < len(newMetricLabels); i++ {
lbl := newMetricLabels[i]
for j := 0; j < len(newMetricLabels); j++ {
if i == j {
continue
}
lbl1 := newMetricLabels[j]
if lbl == "" || lbl1 == "" {
continue
}
if strings.Contains(lbl, lbl1) || strings.Contains(lbl1, lbl) {
found = true
break
}
}
}
if found {
metricsWithMultipleLabels[metricName] = newMetricLabels
}
}
}
if len(metricsWithMultipleLabels) > 0 {
ss := make([]string, 0, len(metricsWithMultipleLabels))
for k, v := range metricsWithMultipleLabels {
ss = append(ss, fmt.Sprintf("%s\n %s", k, strings.Join(v, "\n ")))
}
fmt.Printf("Some metrics were collected multiple times with extra labels (%d items):\n %s\n\n", len(metricsWithMultipleLabels), strings.Join(ss, "\n "))
}
}
func dumpExtraMetrics(newMetricsCollection, oldMetricsCollection MetricsCollection) {
extraMetrics := make([]string, 0)
for metricName := range newMetricsCollection.LabelsByMetric {
if _, ok := oldMetricsCollection.LabelsByMetric[metricName]; !ok {
extraMetrics = append(extraMetrics, metricName)
}
}
sort.Strings(extraMetrics)
if len(extraMetrics) > 0 {
fmt.Printf("Extra metrics (%d items):\n %s\n\n", len(extraMetrics), strings.Join(extraMetrics, "\n "))
}
}
func parseMetricsCollection(metricRaw string) MetricsCollection {
rawMetricsArr := strings.Split(metricRaw, "\n")
metricNamesArr := getMetricNames(rawMetricsArr)
metrics := parseMetrics(metricNamesArr)
labelsByMetrics := groupByMetrics(metrics)
return MetricsCollection{
MetricNamesWithLabels: metricNamesArr,
MetricsData: metrics,
RawMetricStr: metricRaw,
RawMetricStrArr: rawMetricsArr,
LabelsByMetric: labelsByMetrics,
}
}
func arrIsSubsetOf(smaller, larger []string) bool {
if len(smaller) == 0 {
return len(larger) == 0
}
for _, x := range smaller {
if !contains(larger, x) {
return false
}
}
return true
}
func contains(s []string, e string) bool {
for _, a := range s {
if a == e {
return true
}
}
return false
}
// groupByMetrics returns labels grouped by metric
func groupByMetrics(metrics []Metric) map[string][]string {
mtr := make(map[string][]string)
for i := 0; i < len(metrics); i++ {
metric := metrics[i]
if _, ok := mtr[metric.name]; ok {
labels := mtr[metric.name]
labels = append(labels, metric.labelsRawStr)
mtr[metric.name] = labels
} else {
mtr[metric.name] = []string{metric.labelsRawStr}
}
}
return mtr
}
func parseMetrics(metrics []string) []Metric {
metricsLength := len(metrics)
metricsData := make([]Metric, 0, metricsLength)
for i := 0; i < metricsLength; i++ {
metricRawStr := metrics[i]
if metricRawStr == "" || strings.HasPrefix(metricRawStr, "# ") {
continue
}
var mName, mLabels string
var labelsArr []string
if strings.Contains(metricRawStr, "{") {
mName = metricRawStr[:strings.Index(metricRawStr, "{")]
mLabels = metricRawStr[strings.Index(metricRawStr, "{")+1 : len(metricRawStr)-1]
if mLabels != "" {
labelsArr = strings.Split(mLabels, ",")
}
} else {
mName = metricRawStr
}
metric := Metric{
name: mName,
labelsRawStr: mLabels,
labelsWithValues: labelsArr,
}
metricsData = append(metricsData, metric)
}
return metricsData
}
func dumpMetrics(oldMetrics, newMetrics MetricsCollection) {
f, _ := os.Create("assets/metrics.old.txt")
for _, s := range oldMetrics.RawMetricStrArr {
f.WriteString(s)
f.WriteString("\n")
}
f.Close()
f, _ = os.Create("assets/metrics.new.txt")
for _, s := range newMetrics.RawMetricStrArr {
f.WriteString(s)
f.WriteString("\n")
}
f.Close()
f, _ = os.Create("assets/metrics.names.old.txt")
for _, s := range oldMetrics.MetricNamesWithLabels {
f.WriteString(s)
f.WriteString("\n")
}
f.Close()
f, _ = os.Create("assets/metrics.names.new.txt")
for _, s := range newMetrics.MetricNamesWithLabels {
f.WriteString(s)
f.WriteString("\n")
}
f.Close()
}
func getMetricNames(metrics []string) []string {
length := len(metrics)
ret := make([]string, length)
for i := 0; i < length; i++ {
str := metrics[i]
if str == "" || strings.HasPrefix(str, "# ") {
ret[i] = str
continue
}
idx := strings.LastIndex(str, " ")
if idx >= 0 {
str1 := str[:idx]
ret[i] = str1
} else {
ret[i] = str
}
}
return ret
}
func getMetrics(fileName string) (string, error) {
return getMetricsFrom(fileName, "metrics")
}
func getMetricsFrom(fileName, endpoint string) (string, error) {
cmd, port, collectOutput, err := launchExporter(fileName)
if err != nil {
return "", errors.Wrap(err, "Failed to launch exporter")
}
metrics, err := tryGetMetricsFrom(port, endpoint)
if err != nil {
return "", errors.Wrap(err, "Failed to get metrics")
}
err = stopExporter(cmd, collectOutput)
if err != nil {
return "", errors.Wrap(err, "Failed to stop exporter")
}
return metrics, nil
}