mirror of
https://github.com/prometheus-community/postgres_exporter
synced 2025-04-22 23:15:26 +00:00
PMM-7806: add compatibility with percona pg exporter (#68)
* add compatibility with old exporter
This commit is contained in:
parent
0bacea243a
commit
db4c6e8ee7
@ -1,90 +0,0 @@
|
|||||||
---
|
|
||||||
version: 2.1
|
|
||||||
|
|
||||||
orbs:
|
|
||||||
prometheus: prometheus/prometheus@0.15.0
|
|
||||||
|
|
||||||
executors:
|
|
||||||
# This must match .promu.yml.
|
|
||||||
golang:
|
|
||||||
docker:
|
|
||||||
- image: circleci/golang:1.17
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
test:
|
|
||||||
executor: golang
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- prometheus/setup_environment
|
|
||||||
- run: make
|
|
||||||
- prometheus/store_artifact:
|
|
||||||
file: postgres_exporter
|
|
||||||
|
|
||||||
integration:
|
|
||||||
docker:
|
|
||||||
- image: circleci/golang:1.17
|
|
||||||
- image: << parameters.postgres_image >>
|
|
||||||
environment:
|
|
||||||
POSTGRES_DB: circle_test
|
|
||||||
POSTGRES_USER: postgres
|
|
||||||
POSTGRES_PASSWORD: test
|
|
||||||
|
|
||||||
parameters:
|
|
||||||
postgres_image:
|
|
||||||
type: string
|
|
||||||
|
|
||||||
environment:
|
|
||||||
DATA_SOURCE_NAME: 'postgresql://postgres:test@localhost:5432/circle_test?sslmode=disable'
|
|
||||||
GOOPTS: '-v -tags integration'
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- checkout
|
|
||||||
- setup_remote_docker
|
|
||||||
- run: docker version
|
|
||||||
- run: make build
|
|
||||||
- run: make test
|
|
||||||
|
|
||||||
workflows:
|
|
||||||
version: 2
|
|
||||||
postgres_exporter:
|
|
||||||
jobs:
|
|
||||||
- test:
|
|
||||||
filters:
|
|
||||||
tags:
|
|
||||||
only: /.*/
|
|
||||||
- integration:
|
|
||||||
matrix:
|
|
||||||
parameters:
|
|
||||||
postgres_image:
|
|
||||||
- circleci/postgres:10
|
|
||||||
- circleci/postgres:11
|
|
||||||
- circleci/postgres:12
|
|
||||||
- circleci/postgres:13
|
|
||||||
- cimg/postgres:14.1
|
|
||||||
- prometheus/build:
|
|
||||||
name: build
|
|
||||||
filters:
|
|
||||||
tags:
|
|
||||||
only: /.*/
|
|
||||||
- prometheus/publish_master:
|
|
||||||
context: org-context
|
|
||||||
docker_hub_organization: prometheuscommunity
|
|
||||||
quay_io_organization: prometheuscommunity
|
|
||||||
requires:
|
|
||||||
- test
|
|
||||||
- build
|
|
||||||
filters:
|
|
||||||
branches:
|
|
||||||
only: master
|
|
||||||
- prometheus/publish_release:
|
|
||||||
context: org-context
|
|
||||||
docker_hub_organization: prometheuscommunity
|
|
||||||
quay_io_organization: prometheuscommunity
|
|
||||||
requires:
|
|
||||||
- test
|
|
||||||
- build
|
|
||||||
filters:
|
|
||||||
tags:
|
|
||||||
only: /^v.*/
|
|
||||||
branches:
|
|
||||||
ignore: /.*/
|
|
47
.github/ISSUE_TEMPLATE/bug_report.md
vendored
47
.github/ISSUE_TEMPLATE/bug_report.md
vendored
@ -1,47 +0,0 @@
|
|||||||
---
|
|
||||||
name: Bug report
|
|
||||||
about: Create a report to help us improve.
|
|
||||||
title: ''
|
|
||||||
assignees: ''
|
|
||||||
---
|
|
||||||
|
|
||||||
<!--
|
|
||||||
|
|
||||||
Please do *NOT* ask usage questions in Github issues.
|
|
||||||
|
|
||||||
If your issue is not a feature request or bug report use our community support.
|
|
||||||
|
|
||||||
https://prometheus.io/community/
|
|
||||||
|
|
||||||
-->
|
|
||||||
|
|
||||||
**What did you do?**
|
|
||||||
|
|
||||||
**What did you expect to see?**
|
|
||||||
|
|
||||||
**What did you see instead? Under which circumstances?**
|
|
||||||
|
|
||||||
**Environment**
|
|
||||||
|
|
||||||
* System information:
|
|
||||||
|
|
||||||
insert output of `uname -srm` here
|
|
||||||
|
|
||||||
* postgres_exporter version:
|
|
||||||
|
|
||||||
insert output of `postgres_exporter --version` here
|
|
||||||
|
|
||||||
* postgres_exporter flags:
|
|
||||||
|
|
||||||
```
|
|
||||||
insert list of flags used here
|
|
||||||
```
|
|
||||||
|
|
||||||
* PostgreSQL version:
|
|
||||||
|
|
||||||
insert PostgreSQL version here
|
|
||||||
|
|
||||||
* Logs:
|
|
||||||
```
|
|
||||||
insert logs relevant to the issue here
|
|
||||||
```
|
|
5
.github/ISSUE_TEMPLATE/config.yml
vendored
5
.github/ISSUE_TEMPLATE/config.yml
vendored
@ -1,5 +0,0 @@
|
|||||||
blank_issues_enabled: false
|
|
||||||
contact_links:
|
|
||||||
- name: Prometheus community support
|
|
||||||
url: https://prometheus.io/community/
|
|
||||||
about: List of communication channels for the Prometheus community.
|
|
21
.github/ISSUE_TEMPLATE/feature_request.md
vendored
21
.github/ISSUE_TEMPLATE/feature_request.md
vendored
@ -1,21 +0,0 @@
|
|||||||
---
|
|
||||||
name: Feature request
|
|
||||||
about: Suggest an idea for this project.
|
|
||||||
title: ''
|
|
||||||
labels: ''
|
|
||||||
assignees: ''
|
|
||||||
---
|
|
||||||
|
|
||||||
<!--
|
|
||||||
|
|
||||||
Please do *NOT* ask usage questions in Github issues.
|
|
||||||
|
|
||||||
If your issue is not a feature request or bug report use our community support.
|
|
||||||
|
|
||||||
https://prometheus.io/community/
|
|
||||||
|
|
||||||
-->
|
|
||||||
## Proposal
|
|
||||||
**Use case. Why is this important?**
|
|
||||||
|
|
||||||
*“Nice to have” is not a good use case. :)*
|
|
60
.github/workflows/go.yml
vendored
Normal file
60
.github/workflows/go.yml
vendored
Normal file
@ -0,0 +1,60 @@
|
|||||||
|
name: Go
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches:
|
||||||
|
- main
|
||||||
|
tags:
|
||||||
|
- v[0-9]+.[0-9]+.[0-9]+*
|
||||||
|
pull_request:
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
test:
|
||||||
|
name: Test
|
||||||
|
strategy:
|
||||||
|
matrix:
|
||||||
|
go-version:
|
||||||
|
- 1.17
|
||||||
|
postgresql-image:
|
||||||
|
- postgres:10
|
||||||
|
- postgres:11
|
||||||
|
- postgres:12
|
||||||
|
- postgres:13
|
||||||
|
- postgres:14
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
# The environment this job references
|
||||||
|
environment:
|
||||||
|
name: CI
|
||||||
|
steps:
|
||||||
|
- name: Set up Go release
|
||||||
|
uses: percona-platform/setup-go@v2
|
||||||
|
with:
|
||||||
|
go-version: ${{ matrix.go-version }}
|
||||||
|
- name: Checkout code
|
||||||
|
uses: percona-platform/checkout@v2
|
||||||
|
- name: Run checks
|
||||||
|
run: |
|
||||||
|
go build -modfile=tools/go.mod -o bin/golangci-lint github.com/golangci/golangci-lint/cmd/golangci-lint
|
||||||
|
go build -modfile=tools/go.mod -o bin/reviewdog github.com/reviewdog/reviewdog/cmd/reviewdog
|
||||||
|
bin/golangci-lint run -c=.golangci-required.yml --out-format=line-number | env REVIEWDOG_GITHUB_API_TOKEN=${{ secrets.GITHUB_TOKEN }} bin/reviewdog -f=golangci-lint -level=error -reporter=github-pr-check
|
||||||
|
bin/golangci-lint run -c=.golangci.yml --out-format=line-number | env REVIEWDOG_GITHUB_API_TOKEN=${{ secrets.GITHUB_TOKEN }} bin/reviewdog -f=golangci-lint -level=error -reporter=github-pr-review
|
||||||
|
- name: Run Tests
|
||||||
|
run: |
|
||||||
|
sudo chown 999:999 testdata/ssl/server/*
|
||||||
|
sudo chmod 600 testdata/ssl/server/*
|
||||||
|
docker-compose up -d
|
||||||
|
make
|
||||||
|
make test
|
||||||
|
env:
|
||||||
|
POSTGRESQL_IMAGE: ${{ matrix.postgresql-image }}
|
||||||
|
- name: Run debug commands on failure
|
||||||
|
if: ${{ failure() }}
|
||||||
|
run: |
|
||||||
|
env
|
||||||
|
go version
|
||||||
|
go env
|
||||||
|
pwd
|
||||||
|
git status
|
||||||
|
docker --version
|
||||||
|
docker-compose --version
|
||||||
|
docker-compose logs
|
@ -124,7 +124,7 @@ endif
|
|||||||
%: common-% ;
|
%: common-% ;
|
||||||
|
|
||||||
.PHONY: common-all
|
.PHONY: common-all
|
||||||
common-all: precheck style check_license lint yamllint unused build test
|
common-all: precheck style lint unused build test
|
||||||
|
|
||||||
.PHONY: common-style
|
.PHONY: common-style
|
||||||
common-style:
|
common-style:
|
||||||
@ -136,17 +136,6 @@ common-style:
|
|||||||
exit 1; \
|
exit 1; \
|
||||||
fi
|
fi
|
||||||
|
|
||||||
.PHONY: common-check_license
|
|
||||||
common-check_license:
|
|
||||||
@echo ">> checking license header"
|
|
||||||
@licRes=$$(for file in $$(find . -type f -iname '*.go' ! -path './vendor/*') ; do \
|
|
||||||
awk 'NR<=3' $$file | grep -Eq "(Copyright|generated|GENERATED)" || echo $$file; \
|
|
||||||
done); \
|
|
||||||
if [ -n "$${licRes}" ]; then \
|
|
||||||
echo "license header checking failed:"; echo "$${licRes}"; \
|
|
||||||
exit 1; \
|
|
||||||
fi
|
|
||||||
|
|
||||||
.PHONY: common-deps
|
.PHONY: common-deps
|
||||||
common-deps:
|
common-deps:
|
||||||
@echo ">> getting dependencies"
|
@echo ">> getting dependencies"
|
||||||
@ -204,15 +193,6 @@ else
|
|||||||
endif
|
endif
|
||||||
endif
|
endif
|
||||||
|
|
||||||
.PHONY: common-yamllint
|
|
||||||
common-yamllint:
|
|
||||||
@echo ">> running yamllint on all YAML files in the repository"
|
|
||||||
ifeq (, $(shell which yamllint))
|
|
||||||
@echo "yamllint not installed so skipping"
|
|
||||||
else
|
|
||||||
yamllint .
|
|
||||||
endif
|
|
||||||
|
|
||||||
# For backward-compatibility.
|
# For backward-compatibility.
|
||||||
.PHONY: common-staticcheck
|
.PHONY: common-staticcheck
|
||||||
common-staticcheck: lint
|
common-staticcheck: lint
|
||||||
|
@ -36,7 +36,7 @@ func (e *Exporter) discoverDatabaseDSNs() []string {
|
|||||||
var dsnURI *url.URL
|
var dsnURI *url.URL
|
||||||
var dsnConnstring string
|
var dsnConnstring string
|
||||||
|
|
||||||
if strings.HasPrefix(dsn, "postgresql://") {
|
if strings.HasPrefix(dsn, "postgresql://") || strings.HasPrefix(dsn, "postgres://") {
|
||||||
var err error
|
var err error
|
||||||
dsnURI, err = url.Parse(dsn)
|
dsnURI, err = url.Parse(dsn)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
@ -31,13 +31,16 @@ import (
|
|||||||
)
|
)
|
||||||
|
|
||||||
var (
|
var (
|
||||||
listenAddress = kingpin.Flag("web.listen-address", "Address to listen on for web interface and telemetry.").Default(":9187").Envar("PG_EXPORTER_WEB_LISTEN_ADDRESS").String()
|
listenAddress = kingpin.Flag("web.listen-address", "Address to listen on for web interface and telemetry.").Default(":9187").Envar("PG_EXPORTER_WEB_LISTEN_ADDRESS").String()
|
||||||
webConfig = webflag.AddFlags(kingpin.CommandLine)
|
webConfig = webflag.AddFlags(kingpin.CommandLine)
|
||||||
|
webConfigFile = kingpin.Flag(
|
||||||
|
"web.config",
|
||||||
|
"[EXPERIMENTAL] Path to config yaml file that can enable TLS or authentication.",
|
||||||
|
).Default("").String()
|
||||||
metricPath = kingpin.Flag("web.telemetry-path", "Path under which to expose metrics.").Default("/metrics").Envar("PG_EXPORTER_WEB_TELEMETRY_PATH").String()
|
metricPath = kingpin.Flag("web.telemetry-path", "Path under which to expose metrics.").Default("/metrics").Envar("PG_EXPORTER_WEB_TELEMETRY_PATH").String()
|
||||||
disableDefaultMetrics = kingpin.Flag("disable-default-metrics", "Do not include default metrics.").Default("false").Envar("PG_EXPORTER_DISABLE_DEFAULT_METRICS").Bool()
|
disableDefaultMetrics = kingpin.Flag("disable-default-metrics", "Do not include default metrics.").Default("false").Envar("PG_EXPORTER_DISABLE_DEFAULT_METRICS").Bool()
|
||||||
disableSettingsMetrics = kingpin.Flag("disable-settings-metrics", "Do not include pg_settings metrics.").Default("false").Envar("PG_EXPORTER_DISABLE_SETTINGS_METRICS").Bool()
|
disableSettingsMetrics = kingpin.Flag("disable-settings-metrics", "Do not include pg_settings metrics.").Default("false").Envar("PG_EXPORTER_DISABLE_SETTINGS_METRICS").Bool()
|
||||||
autoDiscoverDatabases = kingpin.Flag("auto-discover-databases", "Whether to discover the databases on a server dynamically.").Default("false").Envar("PG_EXPORTER_AUTO_DISCOVER_DATABASES").Bool()
|
autoDiscoverDatabases = kingpin.Flag("auto-discover-databases", "Whether to discover the databases on a server dynamically.").Default("false").Envar("PG_EXPORTER_AUTO_DISCOVER_DATABASES").Bool()
|
||||||
queriesPath = kingpin.Flag("extend.query-path", "Path to custom queries to run.").Default("").Envar("PG_EXPORTER_EXTEND_QUERY_PATH").String()
|
|
||||||
onlyDumpMaps = kingpin.Flag("dumpmaps", "Do not run, simply dump the maps.").Bool()
|
onlyDumpMaps = kingpin.Flag("dumpmaps", "Do not run, simply dump the maps.").Bool()
|
||||||
constantLabelsList = kingpin.Flag("constantLabels", "A list of label=value separated by comma(,).").Default("").Envar("PG_EXPORTER_CONSTANT_LABELS").String()
|
constantLabelsList = kingpin.Flag("constantLabels", "A list of label=value separated by comma(,).").Default("").Envar("PG_EXPORTER_CONSTANT_LABELS").String()
|
||||||
excludeDatabases = kingpin.Flag("exclude-databases", "A list of databases to remove when autoDiscoverDatabases is enabled").Default("").Envar("PG_EXPORTER_EXCLUDE_DATABASES").String()
|
excludeDatabases = kingpin.Flag("exclude-databases", "A list of databases to remove when autoDiscoverDatabases is enabled").Default("").Envar("PG_EXPORTER_EXCLUDE_DATABASES").String()
|
||||||
@ -100,7 +103,6 @@ func main() {
|
|||||||
DisableDefaultMetrics(*disableDefaultMetrics),
|
DisableDefaultMetrics(*disableDefaultMetrics),
|
||||||
DisableSettingsMetrics(*disableSettingsMetrics),
|
DisableSettingsMetrics(*disableSettingsMetrics),
|
||||||
AutoDiscoverDatabases(*autoDiscoverDatabases),
|
AutoDiscoverDatabases(*autoDiscoverDatabases),
|
||||||
WithUserQueriesPath(*queriesPath),
|
|
||||||
WithConstantLabels(*constantLabelsList),
|
WithConstantLabels(*constantLabelsList),
|
||||||
ExcludeDatabases(*excludeDatabases),
|
ExcludeDatabases(*excludeDatabases),
|
||||||
IncludeDatabases(*includeDatabases),
|
IncludeDatabases(*includeDatabases),
|
||||||
@ -115,6 +117,9 @@ func main() {
|
|||||||
|
|
||||||
prometheus.MustRegister(exporter)
|
prometheus.MustRegister(exporter)
|
||||||
|
|
||||||
|
cleanup := initializePerconaExporters(dsn, opts)
|
||||||
|
defer cleanup()
|
||||||
|
|
||||||
pe, err := collector.NewPostgresCollector(
|
pe, err := collector.NewPostgresCollector(
|
||||||
logger,
|
logger,
|
||||||
dsn,
|
dsn,
|
||||||
@ -132,9 +137,17 @@ func main() {
|
|||||||
w.Write(landingPage) // nolint: errcheck
|
w.Write(landingPage) // nolint: errcheck
|
||||||
})
|
})
|
||||||
|
|
||||||
|
var webCfg string
|
||||||
|
if *webConfigFile != "" {
|
||||||
|
webCfg = *webConfigFile
|
||||||
|
}
|
||||||
|
if *webConfig != "" {
|
||||||
|
webCfg = *webConfig
|
||||||
|
}
|
||||||
|
|
||||||
level.Info(logger).Log("msg", "Listening on address", "address", *listenAddress)
|
level.Info(logger).Log("msg", "Listening on address", "address", *listenAddress)
|
||||||
srv := &http.Server{Addr: *listenAddress}
|
srv := &http.Server{Addr: *listenAddress}
|
||||||
if err := web.ListenAndServe(srv, *webConfig, logger); err != nil {
|
if err := web.ListenAndServe(srv, webCfg, logger); err != nil {
|
||||||
level.Error(logger).Log("msg", "Error running HTTP server", "err", err)
|
level.Error(logger).Log("msg", "Error running HTTP server", "err", err)
|
||||||
os.Exit(1)
|
os.Exit(1)
|
||||||
}
|
}
|
||||||
|
1638
cmd/postgres_exporter/percona-reference-metrics.txt
Normal file
1638
cmd/postgres_exporter/percona-reference-metrics.txt
Normal file
File diff suppressed because it is too large
Load Diff
142
cmd/postgres_exporter/percona_compatibility_test.go
Normal file
142
cmd/postgres_exporter/percona_compatibility_test.go
Normal file
@ -0,0 +1,142 @@
|
|||||||
|
//go:build manual
|
||||||
|
// +build manual
|
||||||
|
|
||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bufio"
|
||||||
|
_ "embed"
|
||||||
|
"fmt"
|
||||||
|
"github.com/stretchr/testify/assert"
|
||||||
|
"io/ioutil"
|
||||||
|
"net/http"
|
||||||
|
"regexp"
|
||||||
|
"sort"
|
||||||
|
"strings"
|
||||||
|
"testing"
|
||||||
|
"time"
|
||||||
|
)
|
||||||
|
|
||||||
|
//go:embed percona-reference-metrics.txt
|
||||||
|
var referenceMetrics string
|
||||||
|
|
||||||
|
// TestReferenceCompatibility checks that exposed metrics are not missed.
|
||||||
|
//
|
||||||
|
// Used to make sure that metrics are present after updating from upstream.
|
||||||
|
// You need you run exporter locally on port 42002.
|
||||||
|
func TestReferenceCompatibility(t *testing.T) {
|
||||||
|
client := &http.Client{
|
||||||
|
Timeout: time.Second * 10,
|
||||||
|
}
|
||||||
|
req, err := http.NewRequest("GET", "http://localhost:42000/metrics", nil)
|
||||||
|
assert.Nil(t, err)
|
||||||
|
req.SetBasicAuth("pmm", "/agent_id/825dcdbf-af1c-4eb4-9e96-21699aa6ff7b")
|
||||||
|
resp, err := client.Do(req)
|
||||||
|
assert.Nil(t, err)
|
||||||
|
defer resp.Body.Close()
|
||||||
|
currentMetricsBytes, err := ioutil.ReadAll(resp.Body)
|
||||||
|
assert.Nil(t, err)
|
||||||
|
|
||||||
|
currentMetrics := toMap(t, string(currentMetricsBytes))
|
||||||
|
referenceMetrics := toMap(t, referenceMetrics)
|
||||||
|
|
||||||
|
//remove matches
|
||||||
|
for m := range currentMetrics {
|
||||||
|
_, found := referenceMetrics[m]
|
||||||
|
if found {
|
||||||
|
delete(referenceMetrics, m)
|
||||||
|
delete(currentMetrics, m)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fmt.Printf("Extra metrics [%d]:\n", len(currentMetrics))
|
||||||
|
for _, metric := range sortedKeys(currentMetrics) {
|
||||||
|
fmt.Printf("\t%s\n", metric)
|
||||||
|
}
|
||||||
|
if len(referenceMetrics) != 0 {
|
||||||
|
fmt.Printf("Not Supported metrics [%d]:\n", len(referenceMetrics))
|
||||||
|
for _, metric := range sortedKeys(referenceMetrics) {
|
||||||
|
fmt.Printf("\t%s\n", metric)
|
||||||
|
}
|
||||||
|
assert.FailNowf(t, "Found not supported metrics", "Count: %d", len(referenceMetrics))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func sortedKeys(m map[string]string) []string {
|
||||||
|
keys := make([]string, 0, len(m))
|
||||||
|
for k := range m {
|
||||||
|
keys = append(keys, k)
|
||||||
|
}
|
||||||
|
sort.Strings(keys)
|
||||||
|
return keys
|
||||||
|
}
|
||||||
|
|
||||||
|
func toMap(t *testing.T, rawMetrics string) map[string]string {
|
||||||
|
result := make(map[string]string)
|
||||||
|
|
||||||
|
scanner := bufio.NewScanner(strings.NewReader(rawMetrics))
|
||||||
|
scanner.Split(bufio.ScanLines)
|
||||||
|
|
||||||
|
for scanner.Scan() {
|
||||||
|
next := scanner.Text()
|
||||||
|
isComment := strings.HasPrefix(next, "#")
|
||||||
|
if isComment {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
next = cleanKeyOrValue(next)
|
||||||
|
if next != "" {
|
||||||
|
items := strings.Split(next, " ")
|
||||||
|
if len(items) > 1 {
|
||||||
|
result[items[0]] = items[1]
|
||||||
|
} else {
|
||||||
|
fmt.Println("WARN: ")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return result
|
||||||
|
}
|
||||||
|
|
||||||
|
func cleanKeyOrValue(s string) (res string) {
|
||||||
|
res = s
|
||||||
|
|
||||||
|
itemsToIgnore := []string{
|
||||||
|
"example-queries",
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, each := range itemsToIgnore {
|
||||||
|
if strings.Contains(s, each) {
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
regexpsToRemove := []*regexp.Regexp{
|
||||||
|
regexp.MustCompile(`[+-]?(\d*[.])?\d+(e[+-]?\d*)?`),
|
||||||
|
regexp.MustCompile(`\d*\.\d*\.\d*\.\d*:\d*`),
|
||||||
|
regexp.MustCompile(`go1.\d*.\d*`),
|
||||||
|
regexp.MustCompile(`filename=".*",`),
|
||||||
|
regexp.MustCompile(`hashsum=".*"`),
|
||||||
|
}
|
||||||
|
for _, each := range regexpsToRemove {
|
||||||
|
res = each.ReplaceAllString(res, "")
|
||||||
|
}
|
||||||
|
|
||||||
|
stringsToRemove := []string{
|
||||||
|
"PostgreSQL 11.15 (Debian 11.15-1.pgdg90+1) on x86_64-pc-linux-gnu, compiled by gcc (Debian 6.3.0-18+deb9u1) 6.3.0 20170516, 64-bit",
|
||||||
|
"PostgreSQL 11.16 (Debian 11.16-1.pgdg90+1) on x86_64-pc-linux-gnu, compiled by gcc (Debian 6.3.0-18+deb9u1) 6.3.0 20170516, 64-bit",
|
||||||
|
"collector=\"exporter\",",
|
||||||
|
"fastpath function call",
|
||||||
|
"idle in transaction (aborted)",
|
||||||
|
"idle in transaction",
|
||||||
|
"+Inf",
|
||||||
|
"0.0.1",
|
||||||
|
"collector=\"custom_query.mr\",",
|
||||||
|
"datname=\"pmm-managed\"",
|
||||||
|
"datname=\"pmm-agent\"",
|
||||||
|
}
|
||||||
|
for _, each := range stringsToRemove {
|
||||||
|
res = strings.ReplaceAll(res, each, "")
|
||||||
|
}
|
||||||
|
|
||||||
|
return
|
||||||
|
}
|
143
cmd/postgres_exporter/percona_exporter.go
Normal file
143
cmd/postgres_exporter/percona_exporter.go
Normal file
@ -0,0 +1,143 @@
|
|||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"crypto/sha256"
|
||||||
|
"fmt"
|
||||||
|
"github.com/blang/semver"
|
||||||
|
"github.com/go-kit/log/level"
|
||||||
|
"github.com/prometheus/client_golang/prometheus"
|
||||||
|
"gopkg.in/alecthomas/kingpin.v2"
|
||||||
|
"io/ioutil"
|
||||||
|
"path/filepath"
|
||||||
|
)
|
||||||
|
|
||||||
|
type MetricResolution string
|
||||||
|
|
||||||
|
const (
|
||||||
|
LR MetricResolution = "lr"
|
||||||
|
MR MetricResolution = "mr"
|
||||||
|
HR MetricResolution = "hr"
|
||||||
|
)
|
||||||
|
|
||||||
|
var (
|
||||||
|
collectCustomQueryLr = kingpin.Flag("collect.custom_query.lr", "Enable custom queries with low resolution directory.").Default("false").Envar("PG_EXPORTER_EXTEND_QUERY_LR").Bool()
|
||||||
|
collectCustomQueryMr = kingpin.Flag("collect.custom_query.mr", "Enable custom queries with medium resolution directory.").Default("false").Envar("PG_EXPORTER_EXTEND_QUERY_MR").Bool()
|
||||||
|
collectCustomQueryHr = kingpin.Flag("collect.custom_query.hr", "Enable custom queries with high resolution directory.").Default("false").Envar("PG_EXPORTER_EXTEND_QUERY_HR").Bool()
|
||||||
|
collectCustomQueryLrDirectory = kingpin.Flag("collect.custom_query.lr.directory", "Path to custom queries with low resolution directory.").Envar("PG_EXPORTER_EXTEND_QUERY_LR_PATH").String()
|
||||||
|
collectCustomQueryMrDirectory = kingpin.Flag("collect.custom_query.mr.directory", "Path to custom queries with medium resolution directory.").Envar("PG_EXPORTER_EXTEND_QUERY_MR_PATH").String()
|
||||||
|
collectCustomQueryHrDirectory = kingpin.Flag("collect.custom_query.hr.directory", "Path to custom queries with high resolution directory.").Envar("PG_EXPORTER_EXTEND_QUERY_HR_PATH").String()
|
||||||
|
)
|
||||||
|
|
||||||
|
func initializePerconaExporters(dsn []string, opts []ExporterOpt) func() {
|
||||||
|
queriesPath := map[MetricResolution]string{
|
||||||
|
HR: *collectCustomQueryHrDirectory,
|
||||||
|
MR: *collectCustomQueryMrDirectory,
|
||||||
|
LR: *collectCustomQueryLrDirectory,
|
||||||
|
}
|
||||||
|
|
||||||
|
defaultOpts := []ExporterOpt{CollectorName("exporter")}
|
||||||
|
defaultOpts = append(defaultOpts, opts...)
|
||||||
|
defaultExporter := NewExporter(
|
||||||
|
dsn,
|
||||||
|
defaultOpts...,
|
||||||
|
)
|
||||||
|
prometheus.MustRegister(defaultExporter)
|
||||||
|
|
||||||
|
hrExporter := NewExporter(dsn,
|
||||||
|
CollectorName("custom_query.hr"),
|
||||||
|
DisableDefaultMetrics(true),
|
||||||
|
DisableSettingsMetrics(true),
|
||||||
|
AutoDiscoverDatabases(*autoDiscoverDatabases),
|
||||||
|
WithUserQueriesEnabled(map[MetricResolution]bool{
|
||||||
|
HR: *collectCustomQueryHr,
|
||||||
|
MR: false,
|
||||||
|
LR: false,
|
||||||
|
}),
|
||||||
|
WithUserQueriesPath(queriesPath),
|
||||||
|
WithConstantLabels(*constantLabelsList),
|
||||||
|
ExcludeDatabases(*excludeDatabases),
|
||||||
|
)
|
||||||
|
prometheus.MustRegister(hrExporter)
|
||||||
|
|
||||||
|
mrExporter := NewExporter(dsn,
|
||||||
|
CollectorName("custom_query.mr"),
|
||||||
|
DisableDefaultMetrics(true),
|
||||||
|
DisableSettingsMetrics(true),
|
||||||
|
AutoDiscoverDatabases(*autoDiscoverDatabases),
|
||||||
|
WithUserQueriesEnabled(map[MetricResolution]bool{
|
||||||
|
HR: false,
|
||||||
|
MR: *collectCustomQueryMr,
|
||||||
|
LR: false,
|
||||||
|
}),
|
||||||
|
WithUserQueriesPath(queriesPath),
|
||||||
|
WithConstantLabels(*constantLabelsList),
|
||||||
|
ExcludeDatabases(*excludeDatabases),
|
||||||
|
)
|
||||||
|
prometheus.MustRegister(mrExporter)
|
||||||
|
|
||||||
|
lrExporter := NewExporter(dsn,
|
||||||
|
CollectorName("custom_query.lr"),
|
||||||
|
DisableDefaultMetrics(true),
|
||||||
|
DisableSettingsMetrics(true),
|
||||||
|
AutoDiscoverDatabases(*autoDiscoverDatabases),
|
||||||
|
WithUserQueriesEnabled(map[MetricResolution]bool{
|
||||||
|
HR: false,
|
||||||
|
MR: false,
|
||||||
|
LR: *collectCustomQueryLr,
|
||||||
|
}),
|
||||||
|
WithUserQueriesPath(queriesPath),
|
||||||
|
WithConstantLabels(*constantLabelsList),
|
||||||
|
ExcludeDatabases(*excludeDatabases),
|
||||||
|
)
|
||||||
|
prometheus.MustRegister(lrExporter)
|
||||||
|
|
||||||
|
return func() {
|
||||||
|
defaultExporter.servers.Close()
|
||||||
|
hrExporter.servers.Close()
|
||||||
|
mrExporter.servers.Close()
|
||||||
|
lrExporter.servers.Close()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (e *Exporter) loadCustomQueries(res MetricResolution, version semver.Version, server *Server) {
|
||||||
|
if e.userQueriesPath[res] != "" {
|
||||||
|
fi, err := ioutil.ReadDir(e.userQueriesPath[res])
|
||||||
|
if err != nil {
|
||||||
|
level.Error(logger).Log("msg", fmt.Sprintf("failed read dir %q for custom query", e.userQueriesPath[res]),
|
||||||
|
"err", err)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, v := range fi {
|
||||||
|
if v.IsDir() {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
if filepath.Ext(v.Name()) == ".yml" || filepath.Ext(v.Name()) == ".yaml" {
|
||||||
|
path := filepath.Join(e.userQueriesPath[res], v.Name())
|
||||||
|
e.addCustomQueriesFromFile(path, version, server)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (e *Exporter) addCustomQueriesFromFile(path string, version semver.Version, server *Server) {
|
||||||
|
// Calculate the hashsum of the useQueries
|
||||||
|
userQueriesData, err := ioutil.ReadFile(path)
|
||||||
|
if err != nil {
|
||||||
|
level.Error(logger).Log("msg", "Failed to reload user queries:"+path, "err", err)
|
||||||
|
e.userQueriesError.WithLabelValues(path, "").Set(1)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
hashsumStr := fmt.Sprintf("%x", sha256.Sum256(userQueriesData))
|
||||||
|
|
||||||
|
if err := addQueries(userQueriesData, version, server); err != nil {
|
||||||
|
level.Error(logger).Log("msg", "Failed to reload user queries:"+path, "err", err)
|
||||||
|
e.userQueriesError.WithLabelValues(path, hashsumStr).Set(1)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Mark user queries as successfully loaded
|
||||||
|
e.userQueriesError.WithLabelValues(path, hashsumStr).Set(0)
|
||||||
|
}
|
@ -14,11 +14,9 @@
|
|||||||
package main
|
package main
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"crypto/sha256"
|
|
||||||
"database/sql"
|
"database/sql"
|
||||||
"errors"
|
"errors"
|
||||||
"fmt"
|
"fmt"
|
||||||
"io/ioutil"
|
|
||||||
"math"
|
"math"
|
||||||
"regexp"
|
"regexp"
|
||||||
"strings"
|
"strings"
|
||||||
@ -435,6 +433,10 @@ type cachedMetrics struct {
|
|||||||
|
|
||||||
// Exporter collects Postgres metrics. It implements prometheus.Collector.
|
// Exporter collects Postgres metrics. It implements prometheus.Collector.
|
||||||
type Exporter struct {
|
type Exporter struct {
|
||||||
|
collectorName string
|
||||||
|
userQueriesPath map[MetricResolution]string
|
||||||
|
userQueriesEnabled map[MetricResolution]bool
|
||||||
|
|
||||||
// Holds a reference to the build in column mappings. Currently this is for testing purposes
|
// Holds a reference to the build in column mappings. Currently this is for testing purposes
|
||||||
// only, since it just points to the global.
|
// only, since it just points to the global.
|
||||||
builtinMetricMaps map[string]intermediateMetricMap
|
builtinMetricMaps map[string]intermediateMetricMap
|
||||||
@ -444,7 +446,6 @@ type Exporter struct {
|
|||||||
excludeDatabases []string
|
excludeDatabases []string
|
||||||
includeDatabases []string
|
includeDatabases []string
|
||||||
dsn []string
|
dsn []string
|
||||||
userQueriesPath string
|
|
||||||
constantLabels prometheus.Labels
|
constantLabels prometheus.Labels
|
||||||
duration prometheus.Gauge
|
duration prometheus.Gauge
|
||||||
error prometheus.Gauge
|
error prometheus.Gauge
|
||||||
@ -467,6 +468,20 @@ func DisableDefaultMetrics(b bool) ExporterOpt {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// CollectorName configures collector name.
|
||||||
|
func CollectorName(name string) ExporterOpt {
|
||||||
|
return func(e *Exporter) {
|
||||||
|
e.collectorName = name
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// WithUserQueriesEnabled enables user's queries.
|
||||||
|
func WithUserQueriesEnabled(p map[MetricResolution]bool) ExporterOpt {
|
||||||
|
return func(e *Exporter) {
|
||||||
|
e.userQueriesEnabled = p
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// DisableSettingsMetrics configures pg_settings export.
|
// DisableSettingsMetrics configures pg_settings export.
|
||||||
func DisableSettingsMetrics(b bool) ExporterOpt {
|
func DisableSettingsMetrics(b bool) ExporterOpt {
|
||||||
return func(e *Exporter) {
|
return func(e *Exporter) {
|
||||||
@ -498,7 +513,7 @@ func IncludeDatabases(s string) ExporterOpt {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// WithUserQueriesPath configures user's queries path.
|
// WithUserQueriesPath configures user's queries path.
|
||||||
func WithUserQueriesPath(p string) ExporterOpt {
|
func WithUserQueriesPath(p map[MetricResolution]string) ExporterOpt {
|
||||||
return func(e *Exporter) {
|
return func(e *Exporter) {
|
||||||
e.userQueriesPath = p
|
e.userQueriesPath = p
|
||||||
}
|
}
|
||||||
@ -508,6 +523,9 @@ func WithUserQueriesPath(p string) ExporterOpt {
|
|||||||
func WithConstantLabels(s string) ExporterOpt {
|
func WithConstantLabels(s string) ExporterOpt {
|
||||||
return func(e *Exporter) {
|
return func(e *Exporter) {
|
||||||
e.constantLabels = parseConstLabels(s)
|
e.constantLabels = parseConstLabels(s)
|
||||||
|
if e.collectorName != "" {
|
||||||
|
e.constantLabels["collector"] = e.collectorName
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -656,25 +674,14 @@ func (e *Exporter) checkMapVersions(ch chan<- prometheus.Metric, server *Server)
|
|||||||
|
|
||||||
server.lastMapVersion = semanticVersion
|
server.lastMapVersion = semanticVersion
|
||||||
|
|
||||||
if e.userQueriesPath != "" {
|
if e.userQueriesPath[HR] != "" || e.userQueriesPath[MR] != "" || e.userQueriesPath[LR] != "" {
|
||||||
// Clear the metric while a reload is happening
|
// Clear the metric while reload
|
||||||
e.userQueriesError.Reset()
|
e.userQueriesError.Reset()
|
||||||
|
}
|
||||||
|
|
||||||
// Calculate the hashsum of the useQueries
|
for res := range e.userQueriesPath {
|
||||||
userQueriesData, err := ioutil.ReadFile(e.userQueriesPath)
|
if e.userQueriesEnabled[res] {
|
||||||
if err != nil {
|
e.loadCustomQueries(res, semanticVersion, server)
|
||||||
level.Error(logger).Log("msg", "Failed to reload user queries", "path", e.userQueriesPath, "err", err)
|
|
||||||
e.userQueriesError.WithLabelValues(e.userQueriesPath, "").Set(1)
|
|
||||||
} else {
|
|
||||||
hashsumStr := fmt.Sprintf("%x", sha256.Sum256(userQueriesData))
|
|
||||||
|
|
||||||
if err := addQueries(userQueriesData, semanticVersion, server); err != nil {
|
|
||||||
level.Error(logger).Log("msg", "Failed to reload user queries", "path", e.userQueriesPath, "err", err)
|
|
||||||
e.userQueriesError.WithLabelValues(e.userQueriesPath, hashsumStr).Set(1)
|
|
||||||
} else {
|
|
||||||
// Mark user queries as successfully loaded
|
|
||||||
e.userQueriesError.WithLabelValues(e.userQueriesPath, hashsumStr).Set(0)
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -101,6 +101,72 @@ var statBGWriter = map[string]*prometheus.Desc{
|
|||||||
[]string{"server"},
|
[]string{"server"},
|
||||||
prometheus.Labels{},
|
prometheus.Labels{},
|
||||||
),
|
),
|
||||||
|
"percona_checkpoints_timed": prometheus.NewDesc(
|
||||||
|
prometheus.BuildFQName(namespace, bgWriterSubsystem, "checkpoints_timed"),
|
||||||
|
"Number of scheduled checkpoints that have been performed",
|
||||||
|
[]string{"server"},
|
||||||
|
prometheus.Labels{},
|
||||||
|
),
|
||||||
|
"percona_checkpoints_req": prometheus.NewDesc(
|
||||||
|
prometheus.BuildFQName(namespace, bgWriterSubsystem, "checkpoints_req"),
|
||||||
|
"Number of requested checkpoints that have been performed",
|
||||||
|
[]string{"server"},
|
||||||
|
prometheus.Labels{},
|
||||||
|
),
|
||||||
|
"percona_checkpoint_write_time": prometheus.NewDesc(
|
||||||
|
prometheus.BuildFQName(namespace, bgWriterSubsystem, "checkpoint_write_time"),
|
||||||
|
"Total amount of time that has been spent in the portion of checkpoint processing where files are written to disk, in milliseconds",
|
||||||
|
[]string{"server"},
|
||||||
|
prometheus.Labels{},
|
||||||
|
),
|
||||||
|
"percona_checkpoint_sync_time": prometheus.NewDesc(
|
||||||
|
prometheus.BuildFQName(namespace, bgWriterSubsystem, "checkpoint_sync_time"),
|
||||||
|
"Total amount of time that has been spent in the portion of checkpoint processing where files are synchronized to disk, in milliseconds",
|
||||||
|
[]string{"server"},
|
||||||
|
prometheus.Labels{},
|
||||||
|
),
|
||||||
|
"percona_buffers_checkpoint": prometheus.NewDesc(
|
||||||
|
prometheus.BuildFQName(namespace, bgWriterSubsystem, "buffers_checkpoint"),
|
||||||
|
"Number of buffers written during checkpoints",
|
||||||
|
[]string{"server"},
|
||||||
|
prometheus.Labels{},
|
||||||
|
),
|
||||||
|
"percona_buffers_clean": prometheus.NewDesc(
|
||||||
|
prometheus.BuildFQName(namespace, bgWriterSubsystem, "buffers_clean"),
|
||||||
|
"Number of buffers written by the background writer",
|
||||||
|
[]string{"server"},
|
||||||
|
prometheus.Labels{},
|
||||||
|
),
|
||||||
|
"percona_maxwritten_clean": prometheus.NewDesc(
|
||||||
|
prometheus.BuildFQName(namespace, bgWriterSubsystem, "maxwritten_clean"),
|
||||||
|
"Number of times the background writer stopped a cleaning scan because it had written too many buffers",
|
||||||
|
[]string{"server"},
|
||||||
|
prometheus.Labels{},
|
||||||
|
),
|
||||||
|
"percona_buffers_backend": prometheus.NewDesc(
|
||||||
|
prometheus.BuildFQName(namespace, bgWriterSubsystem, "buffers_backend"),
|
||||||
|
"Number of buffers written directly by a backend",
|
||||||
|
[]string{"server"},
|
||||||
|
prometheus.Labels{},
|
||||||
|
),
|
||||||
|
"percona_buffers_backend_fsync": prometheus.NewDesc(
|
||||||
|
prometheus.BuildFQName(namespace, bgWriterSubsystem, "buffers_backend_fsync"),
|
||||||
|
"Number of times a backend had to execute its own fsync call (normally the background writer handles those even when the backend does its own write)",
|
||||||
|
[]string{"server"},
|
||||||
|
prometheus.Labels{},
|
||||||
|
),
|
||||||
|
"percona_buffers_alloc": prometheus.NewDesc(
|
||||||
|
prometheus.BuildFQName(namespace, bgWriterSubsystem, "buffers_alloc"),
|
||||||
|
"Number of buffers allocated",
|
||||||
|
[]string{"server"},
|
||||||
|
prometheus.Labels{},
|
||||||
|
),
|
||||||
|
"percona_stats_reset": prometheus.NewDesc(
|
||||||
|
prometheus.BuildFQName(namespace, bgWriterSubsystem, "stats_reset"),
|
||||||
|
"Time at which these statistics were last reset",
|
||||||
|
[]string{"server"},
|
||||||
|
prometheus.Labels{},
|
||||||
|
),
|
||||||
}
|
}
|
||||||
|
|
||||||
func (PGStatBGWriterCollector) Update(ctx context.Context, server *server, ch chan<- prometheus.Metric) error {
|
func (PGStatBGWriterCollector) Update(ctx context.Context, server *server, ch chan<- prometheus.Metric) error {
|
||||||
@ -208,5 +274,72 @@ func (PGStatBGWriterCollector) Update(ctx context.Context, server *server, ch ch
|
|||||||
server.GetName(),
|
server.GetName(),
|
||||||
)
|
)
|
||||||
|
|
||||||
|
ch <- prometheus.MustNewConstMetric(
|
||||||
|
statBGWriter["percona_checkpoints_timed"],
|
||||||
|
prometheus.CounterValue,
|
||||||
|
float64(cpt),
|
||||||
|
server.GetName(),
|
||||||
|
)
|
||||||
|
ch <- prometheus.MustNewConstMetric(
|
||||||
|
statBGWriter["percona_checkpoints_req"],
|
||||||
|
prometheus.CounterValue,
|
||||||
|
float64(cpr),
|
||||||
|
server.GetName(),
|
||||||
|
)
|
||||||
|
ch <- prometheus.MustNewConstMetric(
|
||||||
|
statBGWriter["percona_checkpoint_write_time"],
|
||||||
|
prometheus.CounterValue,
|
||||||
|
float64(cpwt),
|
||||||
|
server.GetName(),
|
||||||
|
)
|
||||||
|
ch <- prometheus.MustNewConstMetric(
|
||||||
|
statBGWriter["percona_checkpoint_sync_time"],
|
||||||
|
prometheus.CounterValue,
|
||||||
|
float64(cpst),
|
||||||
|
server.GetName(),
|
||||||
|
)
|
||||||
|
ch <- prometheus.MustNewConstMetric(
|
||||||
|
statBGWriter["percona_buffers_checkpoint"],
|
||||||
|
prometheus.CounterValue,
|
||||||
|
float64(bcp),
|
||||||
|
server.GetName(),
|
||||||
|
)
|
||||||
|
ch <- prometheus.MustNewConstMetric(
|
||||||
|
statBGWriter["percona_buffers_clean"],
|
||||||
|
prometheus.CounterValue,
|
||||||
|
float64(bc),
|
||||||
|
server.GetName(),
|
||||||
|
)
|
||||||
|
ch <- prometheus.MustNewConstMetric(
|
||||||
|
statBGWriter["percona_maxwritten_clean"],
|
||||||
|
prometheus.CounterValue,
|
||||||
|
float64(mwc),
|
||||||
|
server.GetName(),
|
||||||
|
)
|
||||||
|
ch <- prometheus.MustNewConstMetric(
|
||||||
|
statBGWriter["percona_buffers_backend"],
|
||||||
|
prometheus.CounterValue,
|
||||||
|
float64(bb),
|
||||||
|
server.GetName(),
|
||||||
|
)
|
||||||
|
ch <- prometheus.MustNewConstMetric(
|
||||||
|
statBGWriter["percona_buffers_backend_fsync"],
|
||||||
|
prometheus.CounterValue,
|
||||||
|
float64(bbf),
|
||||||
|
server.GetName(),
|
||||||
|
)
|
||||||
|
ch <- prometheus.MustNewConstMetric(
|
||||||
|
statBGWriter["percona_buffers_alloc"],
|
||||||
|
prometheus.CounterValue,
|
||||||
|
float64(ba),
|
||||||
|
server.GetName(),
|
||||||
|
)
|
||||||
|
ch <- prometheus.MustNewConstMetric(
|
||||||
|
statBGWriter["percona_stats_reset"],
|
||||||
|
prometheus.CounterValue,
|
||||||
|
float64(sr.Unix()),
|
||||||
|
server.GetName(),
|
||||||
|
)
|
||||||
|
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
24
docker-compose.md
Normal file
24
docker-compose.md
Normal file
@ -0,0 +1,24 @@
|
|||||||
|
In order to start PostgreSQL with SSL support, we need to change the file permissions
|
||||||
|
for the ssl cert and key.
|
||||||
|
|
||||||
|
Please run these commands if you want to run docker-compose locally.
|
||||||
|
|
||||||
|
sudo chown 999:999 testdata/ssl/server/*
|
||||||
|
sudo chmod 0600 testdata/ssl/server/*
|
||||||
|
|
||||||
|
Start the container:
|
||||||
|
```
|
||||||
|
docker-compose up
|
||||||
|
```
|
||||||
|
|
||||||
|
To be able to connect from pgsql you need to own the certs
|
||||||
|
|
||||||
|
sudo chown ${USER}:${USER} testdata/ssl/client*
|
||||||
|
sudo chmod 0600 testdata/ssl/client/*
|
||||||
|
|
||||||
|
Connect using psql
|
||||||
|
|
||||||
|
```
|
||||||
|
psql "host=127.0.0.1 port=5433 user=root password=root dbname=postgres sslmode=verify-ca sslcert=${PWD}/testdata/ssl/client/server.crt sslkey=${PWD}/testdata/ssl/client/server.key sslrootcert=${PWD}/testdata/ssl/client/CA.crt"
|
||||||
|
```
|
||||||
|
|
28
docker-compose.yml
Normal file
28
docker-compose.yml
Normal file
@ -0,0 +1,28 @@
|
|||||||
|
---
|
||||||
|
version: '3'
|
||||||
|
services:
|
||||||
|
postgresql:
|
||||||
|
image: ${POSTGRESQL_IMAGE:-postgres}
|
||||||
|
container_name: postgres_exporter_postgresql
|
||||||
|
ports:
|
||||||
|
- 127.0.0.1:5432:5432
|
||||||
|
environment:
|
||||||
|
- POSTGRES_USER=root
|
||||||
|
- POSTGRES_PASSWORD=root
|
||||||
|
|
||||||
|
postgresql-ssl:
|
||||||
|
image: ${POSTGRESQL_IMAGE:-postgres}
|
||||||
|
container_name: postgres_exporter_postgresql-ssl
|
||||||
|
command: >
|
||||||
|
-c ssl=on
|
||||||
|
-c ssl_cert_file=/ssl/server.crt
|
||||||
|
-c ssl_key_file=/ssl/server.key
|
||||||
|
-c ssl_ca_file=/ssl/CA.crt
|
||||||
|
-c log_min_messages=DEBUG4
|
||||||
|
ports:
|
||||||
|
- 5433:5432
|
||||||
|
environment:
|
||||||
|
- POSTGRES_USER=root
|
||||||
|
- POSTGRES_PASSWORD=root
|
||||||
|
volumes:
|
||||||
|
- ./testdata/ssl/server:/ssl
|
11
example-queries-postgres.yml
Normal file
11
example-queries-postgres.yml
Normal file
@ -0,0 +1,11 @@
|
|||||||
|
## ######################################################
|
||||||
|
## WARNING: This is an example. Do not edit this file.
|
||||||
|
## To create your own Custom Queries - create a new file
|
||||||
|
## ######################################################
|
||||||
|
## Custom query example.
|
||||||
|
#pg_replication:
|
||||||
|
# query: "SELECT EXTRACT(EPOCH FROM (now() - pg_last_xact_replay_timestamp())) as lag"
|
||||||
|
# metrics:
|
||||||
|
# - lag:
|
||||||
|
# usage: "GAUGE"
|
||||||
|
# description: "Replication lag behind master in seconds"
|
57
example.alerts.yml
Normal file
57
example.alerts.yml
Normal file
@ -0,0 +1,57 @@
|
|||||||
|
---
|
||||||
|
groups:
|
||||||
|
- name: PostgreSQL
|
||||||
|
rules:
|
||||||
|
- alert: PostgreSQLMaxConnectionsReached
|
||||||
|
expr: sum(pg_stat_activity_count) by (instance) > sum(pg_settings_max_connections) by (instance)
|
||||||
|
for: 1m
|
||||||
|
labels:
|
||||||
|
severity: email
|
||||||
|
annotations:
|
||||||
|
summary: "{{ $labels.instance }} has maxed out Postgres connections."
|
||||||
|
description: "{{ $labels.instance }} is exceeding the currently configured maximum Postgres connection limit (current value: {{ $value }}s). Services may be degraded - please take immediate action (you probably need to increase max_connections in the Docker image and re-deploy."
|
||||||
|
|
||||||
|
- alert: PostgreSQLHighConnections
|
||||||
|
expr: sum(pg_stat_activity_count) by (instance) > sum(pg_settings_max_connections * 0.8) by (instance)
|
||||||
|
for: 10m
|
||||||
|
labels:
|
||||||
|
severity: email
|
||||||
|
annotations:
|
||||||
|
summary: "{{ $labels.instance }} is over 80% of max Postgres connections."
|
||||||
|
description: "{{ $labels.instance }} is exceeding 80% of the currently configured maximum Postgres connection limit (current value: {{ $value }}s). Please check utilization graphs and confirm if this is normal service growth, abuse or an otherwise temporary condition or if new resources need to be provisioned (or the limits increased, which is mostly likely)."
|
||||||
|
|
||||||
|
- alert: PostgreSQLDown
|
||||||
|
expr: pg_up != 1
|
||||||
|
for: 1m
|
||||||
|
labels:
|
||||||
|
severity: email
|
||||||
|
annotations:
|
||||||
|
summary: "PostgreSQL is not processing queries: {{ $labels.instance }}"
|
||||||
|
description: "{{ $labels.instance }} is rejecting query requests from the exporter, and thus probably not allowing DNS requests to work either. User services should not be effected provided at least 1 node is still alive."
|
||||||
|
|
||||||
|
- alert: PostgreSQLSlowQueries
|
||||||
|
expr: avg(rate(pg_stat_activity_max_tx_duration{datname!~"template.*"}[2m])) by (datname) > 2 * 60
|
||||||
|
for: 2m
|
||||||
|
labels:
|
||||||
|
severity: email
|
||||||
|
annotations:
|
||||||
|
summary: "PostgreSQL high number of slow on {{ $labels.cluster }} for database {{ $labels.datname }} "
|
||||||
|
description: "PostgreSQL high number of slow queries {{ $labels.cluster }} for database {{ $labels.datname }} with a value of {{ $value }} "
|
||||||
|
|
||||||
|
- alert: PostgreSQLQPS
|
||||||
|
expr: avg(irate(pg_stat_database_xact_commit{datname!~"template.*"}[5m]) + irate(pg_stat_database_xact_rollback{datname!~"template.*"}[5m])) by (datname) > 10000
|
||||||
|
for: 5m
|
||||||
|
labels:
|
||||||
|
severity: email
|
||||||
|
annotations:
|
||||||
|
summary: "PostgreSQL high number of queries per second {{ $labels.cluster }} for database {{ $labels.datname }}"
|
||||||
|
description: "PostgreSQL high number of queries per second on {{ $labels.cluster }} for database {{ $labels.datname }} with a value of {{ $value }}"
|
||||||
|
|
||||||
|
- alert: PostgreSQLCacheHitRatio
|
||||||
|
expr: avg(rate(pg_stat_database_blks_hit{datname!~"template.*"}[5m]) / (rate(pg_stat_database_blks_hit{datname!~"template.*"}[5m]) + rate(pg_stat_database_blks_read{datname!~"template.*"}[5m]))) by (datname) < 0.98
|
||||||
|
for: 5m
|
||||||
|
labels:
|
||||||
|
severity: email
|
||||||
|
annotations:
|
||||||
|
summary: "PostgreSQL low cache hit rate on {{ $labels.cluster }} for database {{ $labels.datname }}"
|
||||||
|
description: "PostgreSQL low on cache hit rate on {{ $labels.cluster }} for database {{ $labels.datname }} with a value of {{ $value }}"
|
28
go.mod
28
go.mod
@ -1,6 +1,6 @@
|
|||||||
module github.com/prometheus-community/postgres_exporter
|
module github.com/prometheus-community/postgres_exporter
|
||||||
|
|
||||||
go 1.14
|
go 1.17
|
||||||
|
|
||||||
require (
|
require (
|
||||||
github.com/blang/semver v3.5.1+incompatible
|
github.com/blang/semver v3.5.1+incompatible
|
||||||
@ -10,7 +10,33 @@ require (
|
|||||||
github.com/prometheus/client_model v0.2.0
|
github.com/prometheus/client_model v0.2.0
|
||||||
github.com/prometheus/common v0.32.1
|
github.com/prometheus/common v0.32.1
|
||||||
github.com/prometheus/exporter-toolkit v0.7.1
|
github.com/prometheus/exporter-toolkit v0.7.1
|
||||||
|
github.com/stretchr/testify v1.4.0
|
||||||
gopkg.in/alecthomas/kingpin.v2 v2.2.6
|
gopkg.in/alecthomas/kingpin.v2 v2.2.6
|
||||||
gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c
|
gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c
|
||||||
gopkg.in/yaml.v2 v2.4.0
|
gopkg.in/yaml.v2 v2.4.0
|
||||||
)
|
)
|
||||||
|
|
||||||
|
require (
|
||||||
|
github.com/alecthomas/template v0.0.0-20190718012654-fb15b899a751 // indirect
|
||||||
|
github.com/alecthomas/units v0.0.0-20190924025748-f65c72e2690d // indirect
|
||||||
|
github.com/beorn7/perks v1.0.1 // indirect
|
||||||
|
github.com/cespare/xxhash/v2 v2.1.2 // indirect
|
||||||
|
github.com/davecgh/go-spew v1.1.1 // indirect
|
||||||
|
github.com/go-logfmt/logfmt v0.5.1 // indirect
|
||||||
|
github.com/golang/protobuf v1.5.2 // indirect
|
||||||
|
github.com/jpillora/backoff v1.0.0 // indirect
|
||||||
|
github.com/kr/pretty v0.2.1 // indirect
|
||||||
|
github.com/kr/text v0.1.0 // indirect
|
||||||
|
github.com/matttproud/golang_protobuf_extensions v1.0.1 // indirect
|
||||||
|
github.com/mwitkow/go-conntrack v0.0.0-20190716064945-2f068394615f // indirect
|
||||||
|
github.com/pkg/errors v0.9.1 // indirect
|
||||||
|
github.com/pmezard/go-difflib v1.0.0 // indirect
|
||||||
|
github.com/prometheus/procfs v0.7.3 // indirect
|
||||||
|
golang.org/x/crypto v0.0.0-20210616213533-5ff15b29337e // indirect
|
||||||
|
golang.org/x/net v0.0.0-20210525063256-abc453219eb5 // indirect
|
||||||
|
golang.org/x/oauth2 v0.0.0-20210514164344-f6687ab2804c // indirect
|
||||||
|
golang.org/x/sys v0.0.0-20220114195835-da31bd327af9 // indirect
|
||||||
|
golang.org/x/text v0.3.6 // indirect
|
||||||
|
google.golang.org/appengine v1.6.6 // indirect
|
||||||
|
google.golang.org/protobuf v1.26.0 // indirect
|
||||||
|
)
|
||||||
|
7
queries-postgres-uptime.yml
Normal file
7
queries-postgres-uptime.yml
Normal file
@ -0,0 +1,7 @@
|
|||||||
|
pg_postmaster_uptime:
|
||||||
|
query: "select extract(epoch from current_timestamp - pg_postmaster_start_time()) as seconds"
|
||||||
|
master: true
|
||||||
|
metrics:
|
||||||
|
- seconds:
|
||||||
|
usage: "GAUGE"
|
||||||
|
description: "Service uptime"
|
238
queries.yaml
238
queries.yaml
@ -1,3 +1,4 @@
|
|||||||
|
#### Queries are commented due to PMM-8859
|
||||||
pg_replication:
|
pg_replication:
|
||||||
query: "SELECT CASE WHEN NOT pg_is_in_recovery() THEN 0 ELSE GREATEST (0, EXTRACT(EPOCH FROM (now() - pg_last_xact_replay_timestamp()))) END AS lag"
|
query: "SELECT CASE WHEN NOT pg_is_in_recovery() THEN 0 ELSE GREATEST (0, EXTRACT(EPOCH FROM (now() - pg_last_xact_replay_timestamp()))) END AS lag"
|
||||||
master: true
|
master: true
|
||||||
@ -16,31 +17,31 @@ pg_postmaster:
|
|||||||
|
|
||||||
pg_stat_user_tables:
|
pg_stat_user_tables:
|
||||||
query: |
|
query: |
|
||||||
SELECT
|
SELECT
|
||||||
current_database() datname,
|
current_database() datname,
|
||||||
schemaname,
|
schemaname,
|
||||||
relname,
|
relname,
|
||||||
seq_scan,
|
seq_scan,
|
||||||
seq_tup_read,
|
seq_tup_read,
|
||||||
idx_scan,
|
idx_scan,
|
||||||
idx_tup_fetch,
|
idx_tup_fetch,
|
||||||
n_tup_ins,
|
n_tup_ins,
|
||||||
n_tup_upd,
|
n_tup_upd,
|
||||||
n_tup_del,
|
n_tup_del,
|
||||||
n_tup_hot_upd,
|
n_tup_hot_upd,
|
||||||
n_live_tup,
|
n_live_tup,
|
||||||
n_dead_tup,
|
n_dead_tup,
|
||||||
n_mod_since_analyze,
|
n_mod_since_analyze,
|
||||||
COALESCE(last_vacuum, '1970-01-01Z') as last_vacuum,
|
COALESCE(last_vacuum, '1970-01-01Z') as last_vacuum,
|
||||||
COALESCE(last_autovacuum, '1970-01-01Z') as last_autovacuum,
|
COALESCE(last_autovacuum, '1970-01-01Z') as last_autovacuum,
|
||||||
COALESCE(last_analyze, '1970-01-01Z') as last_analyze,
|
COALESCE(last_analyze, '1970-01-01Z') as last_analyze,
|
||||||
COALESCE(last_autoanalyze, '1970-01-01Z') as last_autoanalyze,
|
COALESCE(last_autoanalyze, '1970-01-01Z') as last_autoanalyze,
|
||||||
vacuum_count,
|
vacuum_count,
|
||||||
autovacuum_count,
|
autovacuum_count,
|
||||||
analyze_count,
|
analyze_count,
|
||||||
autoanalyze_count
|
autoanalyze_count
|
||||||
FROM
|
FROM
|
||||||
pg_stat_user_tables
|
pg_stat_user_tables
|
||||||
metrics:
|
metrics:
|
||||||
- datname:
|
- datname:
|
||||||
usage: "LABEL"
|
usage: "LABEL"
|
||||||
@ -146,118 +147,85 @@ pg_statio_user_tables:
|
|||||||
usage: "COUNTER"
|
usage: "COUNTER"
|
||||||
description: "Number of buffer hits in this table's TOAST table indexes (if any)"
|
description: "Number of buffer hits in this table's TOAST table indexes (if any)"
|
||||||
|
|
||||||
# WARNING: This set of metrics can be very expensive on a busy server as every unique query executed will create an additional time series
|
pg_database:
|
||||||
pg_stat_statements:
|
query: "SELECT pg_database.datname, pg_database_size(pg_database.datname) as size_bytes FROM pg_database"
|
||||||
query: "SELECT t2.rolname, t3.datname, queryid, calls, total_time / 1000 as total_time_seconds, min_time / 1000 as min_time_seconds, max_time / 1000 as max_time_seconds, mean_time / 1000 as mean_time_seconds, stddev_time / 1000 as stddev_time_seconds, rows, shared_blks_hit, shared_blks_read, shared_blks_dirtied, shared_blks_written, local_blks_hit, local_blks_read, local_blks_dirtied, local_blks_written, temp_blks_read, temp_blks_written, blk_read_time / 1000 as blk_read_time_seconds, blk_write_time / 1000 as blk_write_time_seconds FROM pg_stat_statements t1 JOIN pg_roles t2 ON (t1.userid=t2.oid) JOIN pg_database t3 ON (t1.dbid=t3.oid) WHERE t2.rolname != 'rdsadmin'"
|
|
||||||
master: true
|
master: true
|
||||||
|
cache_seconds: 30
|
||||||
metrics:
|
metrics:
|
||||||
- rolname:
|
|
||||||
usage: "LABEL"
|
|
||||||
description: "Name of user"
|
|
||||||
- datname:
|
- datname:
|
||||||
usage: "LABEL"
|
usage: "LABEL"
|
||||||
description: "Name of database"
|
description: "Name of the database"
|
||||||
- queryid:
|
- size_bytes:
|
||||||
usage: "LABEL"
|
|
||||||
description: "Query ID"
|
|
||||||
- calls:
|
|
||||||
usage: "COUNTER"
|
|
||||||
description: "Number of times executed"
|
|
||||||
- total_time_seconds:
|
|
||||||
usage: "COUNTER"
|
|
||||||
description: "Total time spent in the statement, in milliseconds"
|
|
||||||
- min_time_seconds:
|
|
||||||
usage: "GAUGE"
|
usage: "GAUGE"
|
||||||
description: "Minimum time spent in the statement, in milliseconds"
|
description: "Disk space used by the database"
|
||||||
- max_time_seconds:
|
####
|
||||||
usage: "GAUGE"
|
#pg_stat_statements:
|
||||||
description: "Maximum time spent in the statement, in milliseconds"
|
# query: "SELECT t2.rolname, t3.datname, queryid, calls, total_time / 1000 as total_time_seconds, min_time / 1000 as min_time_seconds, max_time / 1000 as max_time_seconds, mean_time / 1000 as mean_time_seconds, stddev_time / 1000 as stddev_time_seconds, rows, shared_blks_hit, shared_blks_read, shared_blks_dirtied, shared_blks_written, local_blks_hit, local_blks_read, local_blks_dirtied, local_blks_written, temp_blks_read, temp_blks_written, blk_read_time / 1000 as blk_read_time_seconds, blk_write_time / 1000 as blk_write_time_seconds FROM pg_stat_statements t1 JOIN pg_roles t2 ON (t1.userid=t2.oid) JOIN pg_database t3 ON (t1.dbid=t3.oid) WHERE t2.rolname != 'rdsadmin'"
|
||||||
- mean_time_seconds:
|
# master: true
|
||||||
usage: "GAUGE"
|
# metrics:
|
||||||
description: "Mean time spent in the statement, in milliseconds"
|
# - rolname:
|
||||||
- stddev_time_seconds:
|
# usage: "LABEL"
|
||||||
usage: "GAUGE"
|
# description: "Name of user"
|
||||||
description: "Population standard deviation of time spent in the statement, in milliseconds"
|
# - datname:
|
||||||
- rows:
|
# usage: "LABEL"
|
||||||
usage: "COUNTER"
|
# description: "Name of database"
|
||||||
description: "Total number of rows retrieved or affected by the statement"
|
# - queryid:
|
||||||
- shared_blks_hit:
|
# usage: "LABEL"
|
||||||
usage: "COUNTER"
|
# description: "Query ID"
|
||||||
description: "Total number of shared block cache hits by the statement"
|
# - calls:
|
||||||
- shared_blks_read:
|
# usage: "COUNTER"
|
||||||
usage: "COUNTER"
|
# description: "Number of times executed"
|
||||||
description: "Total number of shared blocks read by the statement"
|
# - total_time_seconds:
|
||||||
- shared_blks_dirtied:
|
# usage: "COUNTER"
|
||||||
usage: "COUNTER"
|
# description: "Total time spent in the statement, in milliseconds"
|
||||||
description: "Total number of shared blocks dirtied by the statement"
|
# - min_time_seconds:
|
||||||
- shared_blks_written:
|
# usage: "GAUGE"
|
||||||
usage: "COUNTER"
|
# description: "Minimum time spent in the statement, in milliseconds"
|
||||||
description: "Total number of shared blocks written by the statement"
|
# - max_time_seconds:
|
||||||
- local_blks_hit:
|
# usage: "GAUGE"
|
||||||
usage: "COUNTER"
|
# description: "Maximum time spent in the statement, in milliseconds"
|
||||||
description: "Total number of local block cache hits by the statement"
|
# - mean_time_seconds:
|
||||||
- local_blks_read:
|
# usage: "GAUGE"
|
||||||
usage: "COUNTER"
|
# description: "Mean time spent in the statement, in milliseconds"
|
||||||
description: "Total number of local blocks read by the statement"
|
# - stddev_time_seconds:
|
||||||
- local_blks_dirtied:
|
# usage: "GAUGE"
|
||||||
usage: "COUNTER"
|
# description: "Population standard deviation of time spent in the statement, in milliseconds"
|
||||||
description: "Total number of local blocks dirtied by the statement"
|
# - rows:
|
||||||
- local_blks_written:
|
# usage: "COUNTER"
|
||||||
usage: "COUNTER"
|
# description: "Total number of rows retrieved or affected by the statement"
|
||||||
description: "Total number of local blocks written by the statement"
|
# - shared_blks_hit:
|
||||||
- temp_blks_read:
|
# usage: "COUNTER"
|
||||||
usage: "COUNTER"
|
# description: "Total number of shared block cache hits by the statement"
|
||||||
description: "Total number of temp blocks read by the statement"
|
# - shared_blks_read:
|
||||||
- temp_blks_written:
|
# usage: "COUNTER"
|
||||||
usage: "COUNTER"
|
# description: "Total number of shared blocks read by the statement"
|
||||||
description: "Total number of temp blocks written by the statement"
|
# - shared_blks_dirtied:
|
||||||
- blk_read_time_seconds:
|
# usage: "COUNTER"
|
||||||
usage: "COUNTER"
|
# description: "Total number of shared blocks dirtied by the statement"
|
||||||
description: "Total time the statement spent reading blocks, in milliseconds (if track_io_timing is enabled, otherwise zero)"
|
# - shared_blks_written:
|
||||||
- blk_write_time_seconds:
|
# usage: "COUNTER"
|
||||||
usage: "COUNTER"
|
# description: "Total number of shared blocks written by the statement"
|
||||||
description: "Total time the statement spent writing blocks, in milliseconds (if track_io_timing is enabled, otherwise zero)"
|
# - local_blks_hit:
|
||||||
|
# usage: "COUNTER"
|
||||||
pg_process_idle:
|
# description: "Total number of local block cache hits by the statement"
|
||||||
query: |
|
# - local_blks_read:
|
||||||
WITH
|
# usage: "COUNTER"
|
||||||
metrics AS (
|
# description: "Total number of local blocks read by the statement"
|
||||||
SELECT
|
# - local_blks_dirtied:
|
||||||
application_name,
|
# usage: "COUNTER"
|
||||||
SUM(EXTRACT(EPOCH FROM (CURRENT_TIMESTAMP - state_change))::bigint)::float AS process_idle_seconds_sum,
|
# description: "Total number of local blocks dirtied by the statement"
|
||||||
COUNT(*) AS process_idle_seconds_count
|
# - local_blks_written:
|
||||||
FROM pg_stat_activity
|
# usage: "COUNTER"
|
||||||
WHERE state = 'idle'
|
# description: "Total number of local blocks written by the statement"
|
||||||
GROUP BY application_name
|
# - temp_blks_read:
|
||||||
),
|
# usage: "COUNTER"
|
||||||
buckets AS (
|
# description: "Total number of temp blocks read by the statement"
|
||||||
SELECT
|
# - temp_blks_written:
|
||||||
application_name,
|
# usage: "COUNTER"
|
||||||
le,
|
# description: "Total number of temp blocks written by the statement"
|
||||||
SUM(
|
# - blk_read_time_seconds:
|
||||||
CASE WHEN EXTRACT(EPOCH FROM (CURRENT_TIMESTAMP - state_change)) <= le
|
# usage: "COUNTER"
|
||||||
THEN 1
|
# description: "Total time the statement spent reading blocks, in milliseconds (if track_io_timing is enabled, otherwise zero)"
|
||||||
ELSE 0
|
# - blk_write_time_seconds:
|
||||||
END
|
# usage: "COUNTER"
|
||||||
)::bigint AS bucket
|
# description: "Total time the statement spent writing blocks, in milliseconds (if track_io_timing is enabled, otherwise zero)"
|
||||||
FROM
|
|
||||||
pg_stat_activity,
|
|
||||||
UNNEST(ARRAY[1, 2, 5, 15, 30, 60, 90, 120, 300]) AS le
|
|
||||||
GROUP BY application_name, le
|
|
||||||
ORDER BY application_name, le
|
|
||||||
)
|
|
||||||
SELECT
|
|
||||||
application_name,
|
|
||||||
process_idle_seconds_sum as seconds_sum,
|
|
||||||
process_idle_seconds_count as seconds_count,
|
|
||||||
ARRAY_AGG(le) AS seconds,
|
|
||||||
ARRAY_AGG(bucket) AS seconds_bucket
|
|
||||||
FROM metrics JOIN buckets USING (application_name)
|
|
||||||
GROUP BY 1, 2, 3
|
|
||||||
metrics:
|
|
||||||
- application_name:
|
|
||||||
usage: "LABEL"
|
|
||||||
description: "Application Name"
|
|
||||||
- seconds:
|
|
||||||
usage: "HISTOGRAM"
|
|
||||||
description: "Idle time of server processes"
|
|
||||||
|
28
testdata/ssl/client/CA.crt
vendored
Normal file
28
testdata/ssl/client/CA.crt
vendored
Normal file
@ -0,0 +1,28 @@
|
|||||||
|
-----BEGIN CERTIFICATE-----
|
||||||
|
MIIE2jCCAsKgAwIBAgIBATANBgkqhkiG9w0BAQsFADANMQswCQYDVQQDEwJDQTAe
|
||||||
|
Fw0yMTA5MjkxMzUzMjZaFw0yMzAzMjkxNDAzMjRaMA0xCzAJBgNVBAMTAkNBMIIC
|
||||||
|
IjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAy3AfoZtrIsDx+qzJAaNcbSv7
|
||||||
|
s6+gVhw/RYbAT2nJ8w8uRxhjhhkY+0KI16m8TeuJvSS082RQbVTnw7g3viqmSz+P
|
||||||
|
rc5okrfpjDTT0ArM63SrYsKd53t3JUT0hX/mOGoetQD3pSQDsg/f/mNA0Ezosb6q
|
||||||
|
0iO3yIlLDzQ3igMMCBLsPYMYSfIv8+iKOiaWXmjH0sOszNNIvMKXi/u9E1LumjDl
|
||||||
|
R1zpi05YrWscj1yplBgBVYH5aBxy9V8jU3NR6jPWuAVmOUOjeCS7zSS3Oc31qNpW
|
||||||
|
t9/PosydAaOfkGVYYXytwHk0Xc7h25SSN6kS/KTxJb6oP9ASGoMXxUk9T0q6xtUZ
|
||||||
|
gvY8GDGQmUH8+xUazd64jQxEjq3RzIThASgAtG6I1VuH1lWxSRq73cWx4XtDmR4i
|
||||||
|
Acfv4y4s4LN564zcKjeLGTulJebXdDqnGyZWA6+LqkNMKKR5T9aHNFghSiol34pz
|
||||||
|
icCe6Z4pt/nkoRTlPv30+cwcqgZF27QP31ozaif/lzxq686T40mdxEneRA0Wpr2P
|
||||||
|
Zxpj1OdSZ7oqIX6/MFcHR4LLwv2VnLgZ4uBOPVeXBnQ/4LoOsBah09r0fyjvuhSJ
|
||||||
|
dTqP4+VDBXQG+6l7buU0a1Wl5mehRes6CHMFlq2f0fOvvkW/NoB4RkIOnFdP+g7E
|
||||||
|
RwefiIR+Y8PEYQPIjc8CAwEAAaNFMEMwDgYDVR0PAQH/BAQDAgEGMBIGA1UdEwEB
|
||||||
|
/wQIMAYBAf8CAQAwHQYDVR0OBBYEFAVtr/8ICsWeABZqEp9Mzp1Mprk6MA0GCSqG
|
||||||
|
SIb3DQEBCwUAA4ICAQCdSOYDXmpNdZcX7Vmn05p0szxBM8/PrtL9xwSlsnCWomLj
|
||||||
|
KE8L1lP9G8qda80RFNp4lEXvF21Iflmc2Xf2wDkQj2//qk678labV2sZapje/JqJ
|
||||||
|
UCxGbFAVV3V3HGs5lvSgC0mhGZkavGijI5LYgJkQ6C5bxwGA0AUeCmi2DTnWVeYb
|
||||||
|
LHQyo+Nku/Ko6pVgJ3N7CbxJG6kd7A7i/rZzkXhpz5e8SSWLIDUIRtbytZ/tyU/Z
|
||||||
|
oUgzDf13pUrt6I40VTFeUQKtaUkmiBNpC3ZhkuO+alGSJeDfa/KSn/CnvkJ398nF
|
||||||
|
VUmi4ihLSwvC0vSO9VRmYBMwo+JEjLFT9+n5B1uUfzs27snJQ5q40zMKrfTfKYaw
|
||||||
|
K/sXsVdrIfEjyClM9C4PWhPbSycc4HtVpLPJKCP05l4G+aO7HwfqV+SYbZd3ii+E
|
||||||
|
yAcoH7UGQw92JCNK2JXhKE1vzZtuzcZNedd1cqYDo/vKCeBrPhR9qVVVOta9gFps
|
||||||
|
OEWzdLL2YSunMnoFvy34OumeSzzAL5MMughbHfO+fmUwI9vtdKINtRyE9A7j8X3p
|
||||||
|
H+Mm+avMEErcBbZ7u6LgI2aPfZfQbwoy8fv0VG5JN6bAKAh0n2QBGG99JC8OzI5q
|
||||||
|
q6Ash0DqwDNzWkM7IQkECeXQ5PASkah2alBg7mewlS8d6R6NzQ3ILzCB6qCRgA==
|
||||||
|
-----END CERTIFICATE-----
|
24
testdata/ssl/client/server.crt
vendored
Normal file
24
testdata/ssl/client/server.crt
vendored
Normal file
@ -0,0 +1,24 @@
|
|||||||
|
-----BEGIN CERTIFICATE-----
|
||||||
|
MIIEGTCCAgGgAwIBAgIQGIGv8QdtH0oxH27TNeil7TANBgkqhkiG9w0BAQsFADAN
|
||||||
|
MQswCQYDVQQDEwJDQTAeFw0yMTA5MjkxMzU2NTFaFw0yMzAzMjkxNDAzMjNaMBEx
|
||||||
|
DzANBgNVBAMTBnNlcnZlcjCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEB
|
||||||
|
ALrWoFyRmJvQnVoczotp39PJnwzMwfOM4o7611sSp/X/0jwa7SoW5nCFn5CCskgR
|
||||||
|
cCIbmlV16Mf/5WJXNhWHdzHpsS7t24fQIUFjHmsFpd1VQ/S/7vVUIOXLNwl0ZJpl
|
||||||
|
la3DGKqx9AAP26cyuI34wa09JnA3PF9BkaFDImE5VjujlIt/S4QJXbnBZOnwuO5D
|
||||||
|
qDJKeldX801GFf9Wvvn9SxGFoIlIPoTKZLhaT0RVHlc8Hoxwglxk/tIVVLChfqed
|
||||||
|
jSnqOnUmgLN5B7my+4CjXuup7+4MU5qIgvYcmZNwuI++lB5E6uUmX4BDJmquDM4Y
|
||||||
|
y4ldm0QspKDtHcSC20Pgow8CAwEAAaNxMG8wDgYDVR0PAQH/BAQDAgO4MB0GA1Ud
|
||||||
|
JQQWMBQGCCsGAQUFBwMBBggrBgEFBQcDAjAdBgNVHQ4EFgQU65mWMudTWDMs1JjR
|
||||||
|
aYtoeuu3RWUwHwYDVR0jBBgwFoAUBW2v/wgKxZ4AFmoSn0zOnUymuTowDQYJKoZI
|
||||||
|
hvcNAQELBQADggIBALlss5WJKauFOv0gomk8yR5o2xweMfNeftQj9NNhjhyy0fkM
|
||||||
|
I1C6a+pYry2f3gSqc+4haGmhCRVFSoRh8o2A9bsgyKeJ6DfRKeiopiZqBE8XRy+s
|
||||||
|
LolxWX8v7bOr81rhJgcsvfdgjSxq3ErBSWxB3MFVOq/4vyc96PeZ/74feVfvvgrD
|
||||||
|
RpE2IRDWygJUMpbtwF64+sE0vRJNwdCwhtA4DX+KQRLfhEhu67Pjc5LvH33kSdl8
|
||||||
|
J/uj666dWSaqSpAd6qY3EOq9FfAPYRNauzV3M9NHH0BZZPSqZZp0ovJ2PaLHWk/z
|
||||||
|
fErEPMgc7qlCK9tJ2uDh3RdyshOULx1DFK7xNZ7tdrBSbZvGptS5CUAzAmBN2E06
|
||||||
|
EnyaWftqsKmSOi9ydz6tngQTuovGi8RPZGdsT03rtrBJ/hDXiM79nlDDd0ofTjb4
|
||||||
|
o6eRoS+qQZst87SOMWROi0J8ZilrPNz0aBoY4OWjNKZbyqgADlnatkwH+rPM+13f
|
||||||
|
sDDaNvFG4bFBAFXaaBFMMWet9GVeh9eNrBMF+80p5GmfIhqeXELAijHabuhqBcKD
|
||||||
|
tlZdSicjsb2h71xVgv8yCoH9zoOkW7SHWGIOXpvPUUiWJz869yBnUOf8cd69IoUT
|
||||||
|
BWGHKi74uExIdT82A69xYvWsqnRATHXcJoS0j+NNVOWxTI0XdhS7Sbl3xRe2
|
||||||
|
-----END CERTIFICATE-----
|
27
testdata/ssl/client/server.key
vendored
Normal file
27
testdata/ssl/client/server.key
vendored
Normal file
@ -0,0 +1,27 @@
|
|||||||
|
-----BEGIN RSA PRIVATE KEY-----
|
||||||
|
MIIEowIBAAKCAQEAutagXJGYm9CdWhzOi2nf08mfDMzB84zijvrXWxKn9f/SPBrt
|
||||||
|
KhbmcIWfkIKySBFwIhuaVXXox//lYlc2FYd3MemxLu3bh9AhQWMeawWl3VVD9L/u
|
||||||
|
9VQg5cs3CXRkmmWVrcMYqrH0AA/bpzK4jfjBrT0mcDc8X0GRoUMiYTlWO6OUi39L
|
||||||
|
hAlducFk6fC47kOoMkp6V1fzTUYV/1a++f1LEYWgiUg+hMpkuFpPRFUeVzwejHCC
|
||||||
|
XGT+0hVUsKF+p52NKeo6dSaAs3kHubL7gKNe66nv7gxTmoiC9hyZk3C4j76UHkTq
|
||||||
|
5SZfgEMmaq4MzhjLiV2bRCykoO0dxILbQ+CjDwIDAQABAoIBAQCg479V1162Fo/Z
|
||||||
|
kGLgNLy00LNwv0XpJ5HVE/atC0Stlq0AayN9khjAHqEIPTOu0a1hAaoGG7Hnv9lU
|
||||||
|
tMrzASNITPfx9IuNyVFJ0EPaXxO0o/8P3NO2WMq3dyYesycKuZ2J8y3jl7gI8Z7x
|
||||||
|
vMCmKOcG+liGW1pWoMz6NCh/1nMGiN0OAwrY5MuO+K0lGxz2umI9fKTX3BSvd1SK
|
||||||
|
pvQQK7tRj6Dqntz5j/OTZknFyoV2QNdw5Ng1ImODdgJmazZASLX4B2vETkZoyFrW
|
||||||
|
gwfGL0Er88WgRvUbFmZMAaLnzDrW9XJssUF34cqITDzh/O9tLVjXj+PvanF6+gp9
|
||||||
|
P1I0f6GBAoGBAMXfwcLFN+wwaOWlCQQBy/Ngtlp1uinT6kCxJ0e/IzcUQw9TNxY9
|
||||||
|
heiiZBvkwKShA3emrkYNY2CxGuKGDjqJndMgthVmBNpYWw44cpYqQkIsKd+4wqyb
|
||||||
|
a1oemNbcEBTweqYepMasSRVYnqtM8+9bPeOn3JVC35rLSm3erf5pXDDPAoGBAPG4
|
||||||
|
+yeI+s1tXYp1/8etUn3WCJ8cMnQlU2CyuSwWWZ2Ncpp0b6Xm2eEIWWbhp2mzN4q+
|
||||||
|
F80ivnYBwyxPwXIuEiIoBn0pinyvqxHv+wgZfEFYWPVuSEWG9PsR4K0DYXrgkAJK
|
||||||
|
9s4Bste3JDXPp8qeBcSnOWY2N+savMuZV9RgUnnBAoGAfitapRKTwepsOGGvCfsL
|
||||||
|
TLhSDgQbHCifqXhMD5d0oN4ulEr/SRMZm2hQZOjLXS29xEFnxgsrXrAO8HmCARlW
|
||||||
|
pRSqhaJzXbYQ+VRM3Cs97Gu0l457swu2u9PeqMHRD0j3K41Gi9e3EgFbyuZadDi9
|
||||||
|
kberExF8+nq9jqj6UMplmkkCgYA5DCoiE371eokoA19BVdNxJVFVk8cIiEKcQLHZ
|
||||||
|
CNFPkLFqaG5tHgVvyZOn5zumg4hpMyHU5Q1ENnhEffIuq1bWPtIBOguYD7F1A0kg
|
||||||
|
iTs9BMfB7Kwb1JT+qCJ5UqHv6Q2zrNOAnQADTxK5rG9yL0c3OSwfxk3+K4pBFsiW
|
||||||
|
s8DjQQKBgHXDhkTgAujyurvYURLRzlAFQJ4hc8Shqpl5xXKj9vh0KmxySZIYBlBf
|
||||||
|
VoFtZML/aPSwjPVNLDeIp9DCxGDxB5pEY2cBJucovlqTYSdI0qwnhoHEqHGTm2Vk
|
||||||
|
Fo6kyaAnbb8/N7VRES0wHKc5yzaTD0m6BBl2pLm5saQvpjslP6aw
|
||||||
|
-----END RSA PRIVATE KEY-----
|
28
testdata/ssl/server/CA.crt
vendored
Normal file
28
testdata/ssl/server/CA.crt
vendored
Normal file
@ -0,0 +1,28 @@
|
|||||||
|
-----BEGIN CERTIFICATE-----
|
||||||
|
MIIE2jCCAsKgAwIBAgIBATANBgkqhkiG9w0BAQsFADANMQswCQYDVQQDEwJDQTAe
|
||||||
|
Fw0yMTA5MjkxMzUzMjZaFw0yMzAzMjkxNDAzMjRaMA0xCzAJBgNVBAMTAkNBMIIC
|
||||||
|
IjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAy3AfoZtrIsDx+qzJAaNcbSv7
|
||||||
|
s6+gVhw/RYbAT2nJ8w8uRxhjhhkY+0KI16m8TeuJvSS082RQbVTnw7g3viqmSz+P
|
||||||
|
rc5okrfpjDTT0ArM63SrYsKd53t3JUT0hX/mOGoetQD3pSQDsg/f/mNA0Ezosb6q
|
||||||
|
0iO3yIlLDzQ3igMMCBLsPYMYSfIv8+iKOiaWXmjH0sOszNNIvMKXi/u9E1LumjDl
|
||||||
|
R1zpi05YrWscj1yplBgBVYH5aBxy9V8jU3NR6jPWuAVmOUOjeCS7zSS3Oc31qNpW
|
||||||
|
t9/PosydAaOfkGVYYXytwHk0Xc7h25SSN6kS/KTxJb6oP9ASGoMXxUk9T0q6xtUZ
|
||||||
|
gvY8GDGQmUH8+xUazd64jQxEjq3RzIThASgAtG6I1VuH1lWxSRq73cWx4XtDmR4i
|
||||||
|
Acfv4y4s4LN564zcKjeLGTulJebXdDqnGyZWA6+LqkNMKKR5T9aHNFghSiol34pz
|
||||||
|
icCe6Z4pt/nkoRTlPv30+cwcqgZF27QP31ozaif/lzxq686T40mdxEneRA0Wpr2P
|
||||||
|
Zxpj1OdSZ7oqIX6/MFcHR4LLwv2VnLgZ4uBOPVeXBnQ/4LoOsBah09r0fyjvuhSJ
|
||||||
|
dTqP4+VDBXQG+6l7buU0a1Wl5mehRes6CHMFlq2f0fOvvkW/NoB4RkIOnFdP+g7E
|
||||||
|
RwefiIR+Y8PEYQPIjc8CAwEAAaNFMEMwDgYDVR0PAQH/BAQDAgEGMBIGA1UdEwEB
|
||||||
|
/wQIMAYBAf8CAQAwHQYDVR0OBBYEFAVtr/8ICsWeABZqEp9Mzp1Mprk6MA0GCSqG
|
||||||
|
SIb3DQEBCwUAA4ICAQCdSOYDXmpNdZcX7Vmn05p0szxBM8/PrtL9xwSlsnCWomLj
|
||||||
|
KE8L1lP9G8qda80RFNp4lEXvF21Iflmc2Xf2wDkQj2//qk678labV2sZapje/JqJ
|
||||||
|
UCxGbFAVV3V3HGs5lvSgC0mhGZkavGijI5LYgJkQ6C5bxwGA0AUeCmi2DTnWVeYb
|
||||||
|
LHQyo+Nku/Ko6pVgJ3N7CbxJG6kd7A7i/rZzkXhpz5e8SSWLIDUIRtbytZ/tyU/Z
|
||||||
|
oUgzDf13pUrt6I40VTFeUQKtaUkmiBNpC3ZhkuO+alGSJeDfa/KSn/CnvkJ398nF
|
||||||
|
VUmi4ihLSwvC0vSO9VRmYBMwo+JEjLFT9+n5B1uUfzs27snJQ5q40zMKrfTfKYaw
|
||||||
|
K/sXsVdrIfEjyClM9C4PWhPbSycc4HtVpLPJKCP05l4G+aO7HwfqV+SYbZd3ii+E
|
||||||
|
yAcoH7UGQw92JCNK2JXhKE1vzZtuzcZNedd1cqYDo/vKCeBrPhR9qVVVOta9gFps
|
||||||
|
OEWzdLL2YSunMnoFvy34OumeSzzAL5MMughbHfO+fmUwI9vtdKINtRyE9A7j8X3p
|
||||||
|
H+Mm+avMEErcBbZ7u6LgI2aPfZfQbwoy8fv0VG5JN6bAKAh0n2QBGG99JC8OzI5q
|
||||||
|
q6Ash0DqwDNzWkM7IQkECeXQ5PASkah2alBg7mewlS8d6R6NzQ3ILzCB6qCRgA==
|
||||||
|
-----END CERTIFICATE-----
|
24
testdata/ssl/server/server.crt
vendored
Normal file
24
testdata/ssl/server/server.crt
vendored
Normal file
@ -0,0 +1,24 @@
|
|||||||
|
-----BEGIN CERTIFICATE-----
|
||||||
|
MIIEGTCCAgGgAwIBAgIQGIGv8QdtH0oxH27TNeil7TANBgkqhkiG9w0BAQsFADAN
|
||||||
|
MQswCQYDVQQDEwJDQTAeFw0yMTA5MjkxMzU2NTFaFw0yMzAzMjkxNDAzMjNaMBEx
|
||||||
|
DzANBgNVBAMTBnNlcnZlcjCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEB
|
||||||
|
ALrWoFyRmJvQnVoczotp39PJnwzMwfOM4o7611sSp/X/0jwa7SoW5nCFn5CCskgR
|
||||||
|
cCIbmlV16Mf/5WJXNhWHdzHpsS7t24fQIUFjHmsFpd1VQ/S/7vVUIOXLNwl0ZJpl
|
||||||
|
la3DGKqx9AAP26cyuI34wa09JnA3PF9BkaFDImE5VjujlIt/S4QJXbnBZOnwuO5D
|
||||||
|
qDJKeldX801GFf9Wvvn9SxGFoIlIPoTKZLhaT0RVHlc8Hoxwglxk/tIVVLChfqed
|
||||||
|
jSnqOnUmgLN5B7my+4CjXuup7+4MU5qIgvYcmZNwuI++lB5E6uUmX4BDJmquDM4Y
|
||||||
|
y4ldm0QspKDtHcSC20Pgow8CAwEAAaNxMG8wDgYDVR0PAQH/BAQDAgO4MB0GA1Ud
|
||||||
|
JQQWMBQGCCsGAQUFBwMBBggrBgEFBQcDAjAdBgNVHQ4EFgQU65mWMudTWDMs1JjR
|
||||||
|
aYtoeuu3RWUwHwYDVR0jBBgwFoAUBW2v/wgKxZ4AFmoSn0zOnUymuTowDQYJKoZI
|
||||||
|
hvcNAQELBQADggIBALlss5WJKauFOv0gomk8yR5o2xweMfNeftQj9NNhjhyy0fkM
|
||||||
|
I1C6a+pYry2f3gSqc+4haGmhCRVFSoRh8o2A9bsgyKeJ6DfRKeiopiZqBE8XRy+s
|
||||||
|
LolxWX8v7bOr81rhJgcsvfdgjSxq3ErBSWxB3MFVOq/4vyc96PeZ/74feVfvvgrD
|
||||||
|
RpE2IRDWygJUMpbtwF64+sE0vRJNwdCwhtA4DX+KQRLfhEhu67Pjc5LvH33kSdl8
|
||||||
|
J/uj666dWSaqSpAd6qY3EOq9FfAPYRNauzV3M9NHH0BZZPSqZZp0ovJ2PaLHWk/z
|
||||||
|
fErEPMgc7qlCK9tJ2uDh3RdyshOULx1DFK7xNZ7tdrBSbZvGptS5CUAzAmBN2E06
|
||||||
|
EnyaWftqsKmSOi9ydz6tngQTuovGi8RPZGdsT03rtrBJ/hDXiM79nlDDd0ofTjb4
|
||||||
|
o6eRoS+qQZst87SOMWROi0J8ZilrPNz0aBoY4OWjNKZbyqgADlnatkwH+rPM+13f
|
||||||
|
sDDaNvFG4bFBAFXaaBFMMWet9GVeh9eNrBMF+80p5GmfIhqeXELAijHabuhqBcKD
|
||||||
|
tlZdSicjsb2h71xVgv8yCoH9zoOkW7SHWGIOXpvPUUiWJz869yBnUOf8cd69IoUT
|
||||||
|
BWGHKi74uExIdT82A69xYvWsqnRATHXcJoS0j+NNVOWxTI0XdhS7Sbl3xRe2
|
||||||
|
-----END CERTIFICATE-----
|
27
testdata/ssl/server/server.key
vendored
Normal file
27
testdata/ssl/server/server.key
vendored
Normal file
@ -0,0 +1,27 @@
|
|||||||
|
-----BEGIN RSA PRIVATE KEY-----
|
||||||
|
MIIEowIBAAKCAQEAutagXJGYm9CdWhzOi2nf08mfDMzB84zijvrXWxKn9f/SPBrt
|
||||||
|
KhbmcIWfkIKySBFwIhuaVXXox//lYlc2FYd3MemxLu3bh9AhQWMeawWl3VVD9L/u
|
||||||
|
9VQg5cs3CXRkmmWVrcMYqrH0AA/bpzK4jfjBrT0mcDc8X0GRoUMiYTlWO6OUi39L
|
||||||
|
hAlducFk6fC47kOoMkp6V1fzTUYV/1a++f1LEYWgiUg+hMpkuFpPRFUeVzwejHCC
|
||||||
|
XGT+0hVUsKF+p52NKeo6dSaAs3kHubL7gKNe66nv7gxTmoiC9hyZk3C4j76UHkTq
|
||||||
|
5SZfgEMmaq4MzhjLiV2bRCykoO0dxILbQ+CjDwIDAQABAoIBAQCg479V1162Fo/Z
|
||||||
|
kGLgNLy00LNwv0XpJ5HVE/atC0Stlq0AayN9khjAHqEIPTOu0a1hAaoGG7Hnv9lU
|
||||||
|
tMrzASNITPfx9IuNyVFJ0EPaXxO0o/8P3NO2WMq3dyYesycKuZ2J8y3jl7gI8Z7x
|
||||||
|
vMCmKOcG+liGW1pWoMz6NCh/1nMGiN0OAwrY5MuO+K0lGxz2umI9fKTX3BSvd1SK
|
||||||
|
pvQQK7tRj6Dqntz5j/OTZknFyoV2QNdw5Ng1ImODdgJmazZASLX4B2vETkZoyFrW
|
||||||
|
gwfGL0Er88WgRvUbFmZMAaLnzDrW9XJssUF34cqITDzh/O9tLVjXj+PvanF6+gp9
|
||||||
|
P1I0f6GBAoGBAMXfwcLFN+wwaOWlCQQBy/Ngtlp1uinT6kCxJ0e/IzcUQw9TNxY9
|
||||||
|
heiiZBvkwKShA3emrkYNY2CxGuKGDjqJndMgthVmBNpYWw44cpYqQkIsKd+4wqyb
|
||||||
|
a1oemNbcEBTweqYepMasSRVYnqtM8+9bPeOn3JVC35rLSm3erf5pXDDPAoGBAPG4
|
||||||
|
+yeI+s1tXYp1/8etUn3WCJ8cMnQlU2CyuSwWWZ2Ncpp0b6Xm2eEIWWbhp2mzN4q+
|
||||||
|
F80ivnYBwyxPwXIuEiIoBn0pinyvqxHv+wgZfEFYWPVuSEWG9PsR4K0DYXrgkAJK
|
||||||
|
9s4Bste3JDXPp8qeBcSnOWY2N+savMuZV9RgUnnBAoGAfitapRKTwepsOGGvCfsL
|
||||||
|
TLhSDgQbHCifqXhMD5d0oN4ulEr/SRMZm2hQZOjLXS29xEFnxgsrXrAO8HmCARlW
|
||||||
|
pRSqhaJzXbYQ+VRM3Cs97Gu0l457swu2u9PeqMHRD0j3K41Gi9e3EgFbyuZadDi9
|
||||||
|
kberExF8+nq9jqj6UMplmkkCgYA5DCoiE371eokoA19BVdNxJVFVk8cIiEKcQLHZ
|
||||||
|
CNFPkLFqaG5tHgVvyZOn5zumg4hpMyHU5Q1ENnhEffIuq1bWPtIBOguYD7F1A0kg
|
||||||
|
iTs9BMfB7Kwb1JT+qCJ5UqHv6Q2zrNOAnQADTxK5rG9yL0c3OSwfxk3+K4pBFsiW
|
||||||
|
s8DjQQKBgHXDhkTgAujyurvYURLRzlAFQJ4hc8Shqpl5xXKj9vh0KmxySZIYBlBf
|
||||||
|
VoFtZML/aPSwjPVNLDeIp9DCxGDxB5pEY2cBJucovlqTYSdI0qwnhoHEqHGTm2Vk
|
||||||
|
Fo6kyaAnbb8/N7VRES0wHKc5yzaTD0m6BBl2pLm5saQvpjslP6aw
|
||||||
|
-----END RSA PRIVATE KEY-----
|
187
tools/go.mod
Normal file
187
tools/go.mod
Normal file
@ -0,0 +1,187 @@
|
|||||||
|
module github.com/percona/postgres_exporter/tools
|
||||||
|
|
||||||
|
go 1.17
|
||||||
|
|
||||||
|
require (
|
||||||
|
github.com/golangci/golangci-lint v1.43.0
|
||||||
|
github.com/prometheus/promu v0.12.0
|
||||||
|
github.com/reviewdog/reviewdog v0.11.0
|
||||||
|
honnef.co/go/tools v0.2.1
|
||||||
|
)
|
||||||
|
|
||||||
|
require (
|
||||||
|
4d63.com/gochecknoglobals v0.1.0 // indirect
|
||||||
|
cloud.google.com/go v0.93.3 // indirect
|
||||||
|
cloud.google.com/go/datastore v1.1.0 // indirect
|
||||||
|
github.com/Antonboom/errname v0.1.5 // indirect
|
||||||
|
github.com/Antonboom/nilnil v0.1.0 // indirect
|
||||||
|
github.com/BurntSushi/toml v0.4.1 // indirect
|
||||||
|
github.com/Djarvur/go-err113 v0.0.0-20210108212216-aea10b59be24 // indirect
|
||||||
|
github.com/Masterminds/semver v1.5.0 // indirect
|
||||||
|
github.com/OpenPeeDeeP/depguard v1.0.1 // indirect
|
||||||
|
github.com/alecthomas/template v0.0.0-20190718012654-fb15b899a751 // indirect
|
||||||
|
github.com/alecthomas/units v0.0.0-20190924025748-f65c72e2690d // indirect
|
||||||
|
github.com/alexkohler/prealloc v1.0.0 // indirect
|
||||||
|
github.com/ashanbrown/forbidigo v1.2.0 // indirect
|
||||||
|
github.com/ashanbrown/makezero v0.0.0-20210520155254-b6261585ddde // indirect
|
||||||
|
github.com/beorn7/perks v1.0.1 // indirect
|
||||||
|
github.com/bkielbasa/cyclop v1.2.0 // indirect
|
||||||
|
github.com/blizzy78/varnamelen v0.3.0 // indirect
|
||||||
|
github.com/bombsimon/wsl/v3 v3.3.0 // indirect
|
||||||
|
github.com/bradleyfalzon/ghinstallation v1.1.1 // indirect
|
||||||
|
github.com/breml/bidichk v0.1.1 // indirect
|
||||||
|
github.com/butuzov/ireturn v0.1.1 // indirect
|
||||||
|
github.com/cespare/xxhash/v2 v2.1.1 // indirect
|
||||||
|
github.com/charithe/durationcheck v0.0.9 // indirect
|
||||||
|
github.com/chavacava/garif v0.0.0-20210405164556-e8a0a408d6af // indirect
|
||||||
|
github.com/daixiang0/gci v0.2.9 // indirect
|
||||||
|
github.com/davecgh/go-spew v1.1.1 // indirect
|
||||||
|
github.com/denis-tingajkin/go-header v0.4.2 // indirect
|
||||||
|
github.com/dgrijalva/jwt-go v3.2.0+incompatible // indirect
|
||||||
|
github.com/esimonov/ifshort v1.0.3 // indirect
|
||||||
|
github.com/ettle/strcase v0.1.1 // indirect
|
||||||
|
github.com/fatih/color v1.13.0 // indirect
|
||||||
|
github.com/fatih/structtag v1.2.0 // indirect
|
||||||
|
github.com/fsnotify/fsnotify v1.5.1 // indirect
|
||||||
|
github.com/fzipp/gocyclo v0.3.1 // indirect
|
||||||
|
github.com/go-critic/go-critic v0.6.1 // indirect
|
||||||
|
github.com/go-toolsmith/astcast v1.0.0 // indirect
|
||||||
|
github.com/go-toolsmith/astcopy v1.0.0 // indirect
|
||||||
|
github.com/go-toolsmith/astequal v1.0.1 // indirect
|
||||||
|
github.com/go-toolsmith/astfmt v1.0.0 // indirect
|
||||||
|
github.com/go-toolsmith/astp v1.0.0 // indirect
|
||||||
|
github.com/go-toolsmith/strparse v1.0.0 // indirect
|
||||||
|
github.com/go-toolsmith/typep v1.0.2 // indirect
|
||||||
|
github.com/go-xmlfmt/xmlfmt v0.0.0-20191208150333-d5b6f63a941b // indirect
|
||||||
|
github.com/gobwas/glob v0.2.3 // indirect
|
||||||
|
github.com/gofrs/flock v0.8.1 // indirect
|
||||||
|
github.com/golang/groupcache v0.0.0-20200121045136-8c9f03a8e57e // indirect
|
||||||
|
github.com/golang/protobuf v1.5.2 // indirect
|
||||||
|
github.com/golangci/check v0.0.0-20180506172741-cfe4005ccda2 // indirect
|
||||||
|
github.com/golangci/dupl v0.0.0-20180902072040-3e9179ac440a // indirect
|
||||||
|
github.com/golangci/go-misc v0.0.0-20180628070357-927a3d87b613 // indirect
|
||||||
|
github.com/golangci/gofmt v0.0.0-20190930125516-244bba706f1a // indirect
|
||||||
|
github.com/golangci/lint-1 v0.0.0-20191013205115-297bf364a8e0 // indirect
|
||||||
|
github.com/golangci/maligned v0.0.0-20180506175553-b1d89398deca // indirect
|
||||||
|
github.com/golangci/misspell v0.3.5 // indirect
|
||||||
|
github.com/golangci/revgrep v0.0.0-20210930125155-c22e5001d4f2 // indirect
|
||||||
|
github.com/golangci/unconvert v0.0.0-20180507085042-28b1c447d1f4 // indirect
|
||||||
|
github.com/google/go-cmp v0.5.6 // indirect
|
||||||
|
github.com/google/go-github/v25 v25.1.3 // indirect
|
||||||
|
github.com/google/go-github/v29 v29.0.2 // indirect
|
||||||
|
github.com/google/go-github/v32 v32.1.0 // indirect
|
||||||
|
github.com/google/go-querystring v1.0.0 // indirect
|
||||||
|
github.com/googleapis/gax-go/v2 v2.1.0 // indirect
|
||||||
|
github.com/gordonklaus/ineffassign v0.0.0-20210225214923-2e10b2664254 // indirect
|
||||||
|
github.com/gostaticanalysis/analysisutil v0.7.1 // indirect
|
||||||
|
github.com/gostaticanalysis/comment v1.4.2 // indirect
|
||||||
|
github.com/gostaticanalysis/forcetypeassert v0.0.0-20200621232751-01d4955beaa5 // indirect
|
||||||
|
github.com/gostaticanalysis/nilerr v0.1.1 // indirect
|
||||||
|
github.com/hashicorp/errwrap v1.0.0 // indirect
|
||||||
|
github.com/hashicorp/go-cleanhttp v0.5.1 // indirect
|
||||||
|
github.com/hashicorp/go-multierror v1.1.1 // indirect
|
||||||
|
github.com/hashicorp/go-retryablehttp v0.6.4 // indirect
|
||||||
|
github.com/hashicorp/hcl v1.0.0 // indirect
|
||||||
|
github.com/haya14busa/go-actions-toolkit v0.0.0-20200105081403-ca0307860f01 // indirect
|
||||||
|
github.com/inconshreveable/mousetrap v1.0.0 // indirect
|
||||||
|
github.com/jgautheron/goconst v1.5.1 // indirect
|
||||||
|
github.com/jingyugao/rowserrcheck v1.1.1 // indirect
|
||||||
|
github.com/jirfag/go-printf-func-name v0.0.0-20200119135958-7558a9eaa5af // indirect
|
||||||
|
github.com/julz/importas v0.0.0-20210419104244-841f0c0fe66d // indirect
|
||||||
|
github.com/kisielk/errcheck v1.6.0 // indirect
|
||||||
|
github.com/kisielk/gotool v1.0.0 // indirect
|
||||||
|
github.com/kulti/thelper v0.4.0 // indirect
|
||||||
|
github.com/kunwardeep/paralleltest v1.0.3 // indirect
|
||||||
|
github.com/kyoh86/exportloopref v0.1.8 // indirect
|
||||||
|
github.com/ldez/gomoddirectives v0.2.2 // indirect
|
||||||
|
github.com/ldez/tagliatelle v0.2.0 // indirect
|
||||||
|
github.com/magiconair/properties v1.8.5 // indirect
|
||||||
|
github.com/maratori/testpackage v1.0.1 // indirect
|
||||||
|
github.com/matoous/godox v0.0.0-20210227103229-6504466cf951 // indirect
|
||||||
|
github.com/mattn/go-colorable v0.1.11 // indirect
|
||||||
|
github.com/mattn/go-isatty v0.0.14 // indirect
|
||||||
|
github.com/mattn/go-runewidth v0.0.9 // indirect
|
||||||
|
github.com/mattn/go-shellwords v1.0.10 // indirect
|
||||||
|
github.com/matttproud/golang_protobuf_extensions v1.0.1 // indirect
|
||||||
|
github.com/mbilski/exhaustivestruct v1.2.0 // indirect
|
||||||
|
github.com/mgechev/dots v0.0.0-20210922191527-e955255bf517 // indirect
|
||||||
|
github.com/mgechev/revive v1.1.2 // indirect
|
||||||
|
github.com/mitchellh/go-homedir v1.1.0 // indirect
|
||||||
|
github.com/mitchellh/mapstructure v1.4.2 // indirect
|
||||||
|
github.com/moricho/tparallel v0.2.1 // indirect
|
||||||
|
github.com/nakabonne/nestif v0.3.1 // indirect
|
||||||
|
github.com/nbutton23/zxcvbn-go v0.0.0-20210217022336-fa2cb2858354 // indirect
|
||||||
|
github.com/nishanths/exhaustive v0.2.3 // indirect
|
||||||
|
github.com/nishanths/predeclared v0.2.1 // indirect
|
||||||
|
github.com/olekukonko/tablewriter v0.0.5 // indirect
|
||||||
|
github.com/pelletier/go-toml v1.9.4 // indirect
|
||||||
|
github.com/phayes/checkstyle v0.0.0-20170904204023-bfd46e6a821d // indirect
|
||||||
|
github.com/pkg/errors v0.9.1 // indirect
|
||||||
|
github.com/pmezard/go-difflib v1.0.0 // indirect
|
||||||
|
github.com/polyfloyd/go-errorlint v0.0.0-20210722154253-910bb7978349 // indirect
|
||||||
|
github.com/prometheus/client_golang v1.7.1 // indirect
|
||||||
|
github.com/prometheus/client_model v0.2.0 // indirect
|
||||||
|
github.com/prometheus/common v0.19.0 // indirect
|
||||||
|
github.com/prometheus/procfs v0.6.0 // indirect
|
||||||
|
github.com/quasilyte/go-ruleguard v0.3.13 // indirect
|
||||||
|
github.com/quasilyte/regex/syntax v0.0.0-20200407221936-30656e2c4a95 // indirect
|
||||||
|
github.com/reviewdog/errorformat v0.0.0-20201020160743-a656ed371170 // indirect
|
||||||
|
github.com/reviewdog/go-bitbucket v0.0.0-20201024094602-708c3f6a7de0 // indirect
|
||||||
|
github.com/ryancurrah/gomodguard v1.2.3 // indirect
|
||||||
|
github.com/ryanrolds/sqlclosecheck v0.3.0 // indirect
|
||||||
|
github.com/sanposhiho/wastedassign/v2 v2.0.6 // indirect
|
||||||
|
github.com/securego/gosec/v2 v2.9.1 // indirect
|
||||||
|
github.com/shazow/go-diff v0.0.0-20160112020656-b6b7b6733b8c // indirect
|
||||||
|
github.com/sirupsen/logrus v1.8.1 // indirect
|
||||||
|
github.com/sivchari/tenv v1.4.7 // indirect
|
||||||
|
github.com/sonatard/noctx v0.0.1 // indirect
|
||||||
|
github.com/sourcegraph/go-diff v0.6.1 // indirect
|
||||||
|
github.com/spf13/afero v1.6.0 // indirect
|
||||||
|
github.com/spf13/cast v1.4.1 // indirect
|
||||||
|
github.com/spf13/cobra v1.2.1 // indirect
|
||||||
|
github.com/spf13/jwalterweatherman v1.1.0 // indirect
|
||||||
|
github.com/spf13/pflag v1.0.5 // indirect
|
||||||
|
github.com/spf13/viper v1.9.0 // indirect
|
||||||
|
github.com/ssgreg/nlreturn/v2 v2.2.1 // indirect
|
||||||
|
github.com/stretchr/objx v0.1.1 // indirect
|
||||||
|
github.com/stretchr/testify v1.7.0 // indirect
|
||||||
|
github.com/subosito/gotenv v1.2.0 // indirect
|
||||||
|
github.com/sylvia7788/contextcheck v1.0.4 // indirect
|
||||||
|
github.com/tdakkota/asciicheck v0.0.0-20200416200610-e657995f937b // indirect
|
||||||
|
github.com/tetafro/godot v1.4.11 // indirect
|
||||||
|
github.com/timakin/bodyclose v0.0.0-20200424151742-cb6215831a94 // indirect
|
||||||
|
github.com/tomarrell/wrapcheck/v2 v2.4.0 // indirect
|
||||||
|
github.com/tommy-muehle/go-mnd/v2 v2.4.0 // indirect
|
||||||
|
github.com/ultraware/funlen v0.0.3 // indirect
|
||||||
|
github.com/ultraware/whitespace v0.0.4 // indirect
|
||||||
|
github.com/uudashr/gocognit v1.0.5 // indirect
|
||||||
|
github.com/vvakame/sdlog v0.0.0-20200409072131-7c0d359efddc // indirect
|
||||||
|
github.com/xanzy/go-gitlab v0.38.2 // indirect
|
||||||
|
github.com/yeya24/promlinter v0.1.0 // indirect
|
||||||
|
go.opencensus.io v0.23.0 // indirect
|
||||||
|
go.uber.org/atomic v1.7.0 // indirect
|
||||||
|
golang.org/x/build v0.0.0-20200616162219-07bebbe343e9 // indirect
|
||||||
|
golang.org/x/crypto v0.0.0-20210921155107-089bfa567519 // indirect
|
||||||
|
golang.org/x/mod v0.5.0 // indirect
|
||||||
|
golang.org/x/net v0.0.0-20210805182204-aaa1db679c0d // indirect
|
||||||
|
golang.org/x/oauth2 v0.0.0-20210819190943-2bc19b11175f // indirect
|
||||||
|
golang.org/x/sync v0.0.0-20210220032951-036812b2e83c // indirect
|
||||||
|
golang.org/x/sys v0.0.0-20211013075003-97ac67df715c // indirect
|
||||||
|
golang.org/x/text v0.3.7 // indirect
|
||||||
|
golang.org/x/time v0.0.0-20200416051211-89c76fbcd5d1 // indirect
|
||||||
|
golang.org/x/tools v0.1.7 // indirect
|
||||||
|
golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1 // indirect
|
||||||
|
google.golang.org/api v0.56.0 // indirect
|
||||||
|
google.golang.org/appengine v1.6.7 // indirect
|
||||||
|
google.golang.org/genproto v0.0.0-20210828152312-66f60bf46e71 // indirect
|
||||||
|
google.golang.org/grpc v1.40.0 // indirect
|
||||||
|
google.golang.org/protobuf v1.27.1 // indirect
|
||||||
|
gopkg.in/alecthomas/kingpin.v2 v2.2.6 // indirect
|
||||||
|
gopkg.in/ini.v1 v1.63.2 // indirect
|
||||||
|
gopkg.in/yaml.v2 v2.4.0 // indirect
|
||||||
|
gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b // indirect
|
||||||
|
mvdan.cc/gofumpt v0.1.1 // indirect
|
||||||
|
mvdan.cc/interfacer v0.0.0-20180901003855-c20040233aed // indirect
|
||||||
|
mvdan.cc/lint v0.0.0-20170908181259-adc824a0674b // indirect
|
||||||
|
mvdan.cc/unparam v0.0.0-20210104141923-aac4ce9116a7 // indirect
|
||||||
|
)
|
1499
tools/go.sum
Normal file
1499
tools/go.sum
Normal file
File diff suppressed because it is too large
Load Diff
13
tools/tools.go
Normal file
13
tools/tools.go
Normal file
@ -0,0 +1,13 @@
|
|||||||
|
// postgres_exporter
|
||||||
|
|
||||||
|
//go:build tools
|
||||||
|
// +build tools
|
||||||
|
|
||||||
|
package tools
|
||||||
|
|
||||||
|
import (
|
||||||
|
_ "github.com/golangci/golangci-lint/cmd/golangci-lint"
|
||||||
|
_ "github.com/prometheus/promu"
|
||||||
|
_ "github.com/reviewdog/reviewdog/cmd/reviewdog"
|
||||||
|
_ "honnef.co/go/tools/cmd/staticcheck"
|
||||||
|
)
|
Loading…
Reference in New Issue
Block a user