2015-06-18 09:13:28 +00:00
// Copyright 2015 The Prometheus Authors
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package main
import (
2018-11-23 15:57:31 +00:00
"bytes"
2018-04-26 18:41:56 +00:00
"context"
2018-11-14 17:40:07 +00:00
"encoding/json"
2015-06-18 09:13:28 +00:00
"fmt"
2018-04-26 18:41:56 +00:00
"math"
2019-06-30 10:50:23 +00:00
"net/http"
2018-07-18 08:26:45 +00:00
"net/url"
2015-06-18 09:13:28 +00:00
"os"
"path/filepath"
2019-09-20 10:29:47 +00:00
"reflect"
2018-04-26 18:41:56 +00:00
"strconv"
2015-06-18 09:13:28 +00:00
"strings"
2018-04-26 18:41:56 +00:00
"time"
2015-06-18 09:13:28 +00:00
2018-11-23 15:57:31 +00:00
"github.com/google/pprof/profile"
"github.com/pkg/errors"
2018-04-26 18:41:56 +00:00
"github.com/prometheus/client_golang/api"
2019-03-25 23:01:12 +00:00
v1 "github.com/prometheus/client_golang/api/prometheus/v1"
2019-06-30 10:50:23 +00:00
"github.com/prometheus/client_golang/prometheus/promhttp"
2020-05-07 10:34:39 +00:00
"github.com/prometheus/client_golang/prometheus/testutil/promlint"
2018-01-11 15:10:25 +00:00
config_util "github.com/prometheus/common/config"
2018-11-14 17:40:07 +00:00
"github.com/prometheus/common/model"
2016-05-05 11:46:51 +00:00
"github.com/prometheus/common/version"
2020-07-23 18:35:50 +00:00
"gopkg.in/alecthomas/kingpin.v2"
2019-03-25 23:01:12 +00:00
2015-06-18 09:13:28 +00:00
"github.com/prometheus/prometheus/config"
2020-08-20 12:48:26 +00:00
"github.com/prometheus/prometheus/discovery/file"
"github.com/prometheus/prometheus/discovery/kubernetes"
2017-06-14 05:43:00 +00:00
"github.com/prometheus/prometheus/pkg/rulefmt"
2020-08-20 12:48:26 +00:00
_ "github.com/prometheus/prometheus/discovery/install" // Register service discovery implementations.
2015-06-18 09:13:28 +00:00
)
2017-06-21 11:32:04 +00:00
func main ( ) {
app := kingpin . New ( filepath . Base ( os . Args [ 0 ] ) , "Tooling for the Prometheus monitoring system." )
app . Version ( version . Print ( "promtool" ) )
app . HelpFlag . Short ( 'h' )
checkCmd := app . Command ( "check" , "Check the resources for validity." )
checkConfigCmd := checkCmd . Command ( "config" , "Check if the config files are valid or not." )
configFiles := checkConfigCmd . Arg (
"config-files" ,
"The config files to check." ,
) . Required ( ) . ExistingFiles ( )
checkRulesCmd := checkCmd . Command ( "rules" , "Check if the rule files are valid or not." )
ruleFiles := checkRulesCmd . Arg (
"rule-files" ,
"The rule files to check." ,
) . Required ( ) . ExistingFiles ( )
checkMetricsCmd := checkCmd . Command ( "metrics" , checkMetricsUsage )
2018-04-26 18:41:56 +00:00
queryCmd := app . Command ( "query" , "Run query against a Prometheus server." )
2018-11-14 17:40:07 +00:00
queryCmdFmt := queryCmd . Flag ( "format" , "Output format of the query." ) . Short ( 'o' ) . Default ( "promql" ) . Enum ( "promql" , "json" )
2020-08-25 10:32:25 +00:00
2018-04-26 18:41:56 +00:00
queryInstantCmd := queryCmd . Command ( "instant" , "Run instant query." )
2020-08-25 10:32:25 +00:00
queryInstantServer := queryInstantCmd . Arg ( "server" , "Prometheus server to query." ) . Required ( ) . String ( )
queryInstantExpr := queryInstantCmd . Arg ( "expr" , "PromQL query expression." ) . Required ( ) . String ( )
queryInstantTime := queryInstantCmd . Flag ( "time" , "Query evaluation time (RFC3339 or Unix timestamp)." ) . String ( )
2018-04-26 18:41:56 +00:00
queryRangeCmd := queryCmd . Command ( "range" , "Run range query." )
2018-07-18 07:52:01 +00:00
queryRangeServer := queryRangeCmd . Arg ( "server" , "Prometheus server to query." ) . Required ( ) . String ( )
2018-04-26 18:41:56 +00:00
queryRangeExpr := queryRangeCmd . Arg ( "expr" , "PromQL query expression." ) . Required ( ) . String ( )
2019-06-30 10:50:23 +00:00
queryRangeHeaders := queryRangeCmd . Flag ( "header" , "Extra headers to send to server." ) . StringMap ( )
2018-04-26 18:41:56 +00:00
queryRangeBegin := queryRangeCmd . Flag ( "start" , "Query range start time (RFC3339 or Unix timestamp)." ) . String ( )
queryRangeEnd := queryRangeCmd . Flag ( "end" , "Query range end time (RFC3339 or Unix timestamp)." ) . String ( )
2018-08-05 09:03:18 +00:00
queryRangeStep := queryRangeCmd . Flag ( "step" , "Query step size (duration)." ) . Duration ( )
2018-04-26 18:41:56 +00:00
2018-07-18 08:15:58 +00:00
querySeriesCmd := queryCmd . Command ( "series" , "Run series query." )
querySeriesServer := querySeriesCmd . Arg ( "server" , "Prometheus server to query." ) . Required ( ) . URL ( )
querySeriesMatch := querySeriesCmd . Flag ( "match" , "Series selector. Can be specified multiple times." ) . Required ( ) . Strings ( )
querySeriesBegin := querySeriesCmd . Flag ( "start" , "Start time (RFC3339 or Unix timestamp)." ) . String ( )
querySeriesEnd := querySeriesCmd . Flag ( "end" , "End time (RFC3339 or Unix timestamp)." ) . String ( )
2018-07-18 07:52:01 +00:00
debugCmd := app . Command ( "debug" , "Fetch debug information." )
debugPprofCmd := debugCmd . Command ( "pprof" , "Fetch profiling debug information." )
debugPprofServer := debugPprofCmd . Arg ( "server" , "Prometheus server to get pprof files from." ) . Required ( ) . String ( )
debugMetricsCmd := debugCmd . Command ( "metrics" , "Fetch metrics debug information." )
debugMetricsServer := debugMetricsCmd . Arg ( "server" , "Prometheus server to get metrics from." ) . Required ( ) . String ( )
debugAllCmd := debugCmd . Command ( "all" , "Fetch all debug information." )
debugAllServer := debugAllCmd . Arg ( "server" , "Prometheus server to get all debug information from." ) . Required ( ) . String ( )
2018-07-18 14:27:28 +00:00
queryLabelsCmd := queryCmd . Command ( "labels" , "Run labels query." )
queryLabelsServer := queryLabelsCmd . Arg ( "server" , "Prometheus server to query." ) . Required ( ) . URL ( )
queryLabelsName := queryLabelsCmd . Arg ( "name" , "Label name to provide label values for." ) . Required ( ) . String ( )
2020-06-25 23:28:03 +00:00
queryLabelsBegin := queryLabelsCmd . Flag ( "start" , "Start time (RFC3339 or Unix timestamp)." ) . String ( )
queryLabelsEnd := queryLabelsCmd . Flag ( "end" , "End time (RFC3339 or Unix timestamp)." ) . String ( )
2018-07-18 14:27:28 +00:00
2018-09-25 16:06:26 +00:00
testCmd := app . Command ( "test" , "Unit testing." )
testRulesCmd := testCmd . Command ( "rules" , "Unit tests for rules." )
testRulesFiles := testRulesCmd . Arg (
"test-rule-file" ,
"The unit test file." ,
) . Required ( ) . ExistingFiles ( )
2020-07-23 18:35:50 +00:00
defaultDBPath := "data/"
tsdbCmd := app . Command ( "tsdb" , "Run tsdb commands." )
tsdbBenchCmd := tsdbCmd . Command ( "bench" , "Run benchmarks." )
tsdbBenchWriteCmd := tsdbBenchCmd . Command ( "write" , "Run a write performance benchmark." )
benchWriteOutPath := tsdbBenchWriteCmd . Flag ( "out" , "Set the output path." ) . Default ( "benchout" ) . String ( )
benchWriteNumMetrics := tsdbBenchWriteCmd . Flag ( "metrics" , "Number of metrics to read." ) . Default ( "10000" ) . Int ( )
benchSamplesFile := tsdbBenchWriteCmd . Arg ( "file" , "Input file with samples data, default is (" + filepath . Join ( ".." , ".." , "tsdb" , "testdata" , "20kseries.json" ) + ")." ) . Default ( filepath . Join ( ".." , ".." , "tsdb" , "testdata" , "20kseries.json" ) ) . String ( )
tsdbAnalyzeCmd := tsdbCmd . Command ( "analyze" , "Analyze churn, label pair cardinality." )
analyzePath := tsdbAnalyzeCmd . Arg ( "db path" , "Database path (default is " + defaultDBPath + ")." ) . Default ( defaultDBPath ) . String ( )
analyzeBlockID := tsdbAnalyzeCmd . Arg ( "block id" , "Block to analyze (default is the last block)." ) . String ( )
analyzeLimit := tsdbAnalyzeCmd . Flag ( "limit" , "How many items to show in each list." ) . Default ( "20" ) . Int ( )
tsdbListCmd := tsdbCmd . Command ( "list" , "List tsdb blocks." )
listHumanReadable := tsdbListCmd . Flag ( "human-readable" , "Print human readable values." ) . Short ( 'r' ) . Bool ( )
listPath := tsdbListCmd . Arg ( "db path" , "Database path (default is " + defaultDBPath + ")." ) . Default ( defaultDBPath ) . String ( )
tsdbDumpCmd := tsdbCmd . Command ( "dump" , "Dump samples from a TSDB." )
dumpPath := tsdbDumpCmd . Arg ( "db path" , "Database path (default is " + defaultDBPath + ")." ) . Default ( defaultDBPath ) . String ( )
dumpMinTime := tsdbDumpCmd . Flag ( "min-time" , "Minimum timestamp to dump." ) . Default ( strconv . FormatInt ( math . MinInt64 , 10 ) ) . Int64 ( )
dumpMaxTime := tsdbDumpCmd . Flag ( "max-time" , "Maximum timestamp to dump." ) . Default ( strconv . FormatInt ( math . MaxInt64 , 10 ) ) . Int64 ( )
2018-11-14 17:40:07 +00:00
parsedCmd := kingpin . MustParse ( app . Parse ( os . Args [ 1 : ] ) )
var p printer
switch * queryCmdFmt {
case "json" :
p = & jsonPrinter { }
case "promql" :
p = & promqlPrinter { }
}
switch parsedCmd {
2017-06-21 11:32:04 +00:00
case checkConfigCmd . FullCommand ( ) :
os . Exit ( CheckConfig ( * configFiles ... ) )
case checkRulesCmd . FullCommand ( ) :
os . Exit ( CheckRules ( * ruleFiles ... ) )
case checkMetricsCmd . FullCommand ( ) :
os . Exit ( CheckMetrics ( ) )
2018-04-26 18:41:56 +00:00
case queryInstantCmd . FullCommand ( ) :
2020-08-25 10:32:25 +00:00
os . Exit ( QueryInstant ( * queryInstantServer , * queryInstantExpr , * queryInstantTime , p ) )
2018-04-26 18:41:56 +00:00
case queryRangeCmd . FullCommand ( ) :
2019-06-30 10:50:23 +00:00
os . Exit ( QueryRange ( * queryRangeServer , * queryRangeHeaders , * queryRangeExpr , * queryRangeBegin , * queryRangeEnd , * queryRangeStep , p ) )
2018-07-18 07:52:01 +00:00
2018-07-18 08:15:58 +00:00
case querySeriesCmd . FullCommand ( ) :
2018-11-14 17:40:07 +00:00
os . Exit ( QuerySeries ( * querySeriesServer , * querySeriesMatch , * querySeriesBegin , * querySeriesEnd , p ) )
2018-07-18 08:15:58 +00:00
2018-07-18 07:52:01 +00:00
case debugPprofCmd . FullCommand ( ) :
os . Exit ( debugPprof ( * debugPprofServer ) )
case debugMetricsCmd . FullCommand ( ) :
os . Exit ( debugMetrics ( * debugMetricsServer ) )
case debugAllCmd . FullCommand ( ) :
os . Exit ( debugAll ( * debugAllServer ) )
2018-07-18 14:27:28 +00:00
case queryLabelsCmd . FullCommand ( ) :
2020-06-25 23:28:03 +00:00
os . Exit ( QueryLabels ( * queryLabelsServer , * queryLabelsName , * queryLabelsBegin , * queryLabelsEnd , p ) )
2018-09-25 16:06:26 +00:00
case testRulesCmd . FullCommand ( ) :
os . Exit ( RulesUnitTest ( * testRulesFiles ... ) )
2020-07-23 18:35:50 +00:00
case tsdbBenchWriteCmd . FullCommand ( ) :
os . Exit ( checkErr ( benchmarkWrite ( * benchWriteOutPath , * benchSamplesFile , * benchWriteNumMetrics ) ) )
case tsdbAnalyzeCmd . FullCommand ( ) :
os . Exit ( checkErr ( analyzeBlock ( * analyzePath , * analyzeBlockID , * analyzeLimit ) ) )
case tsdbListCmd . FullCommand ( ) :
os . Exit ( checkErr ( listBlocks ( * listPath , * listHumanReadable ) ) )
case tsdbDumpCmd . FullCommand ( ) :
os . Exit ( checkErr ( dumpSamples ( * dumpPath , * dumpMinTime , * dumpMaxTime ) ) )
2015-06-18 09:13:28 +00:00
}
2017-06-21 11:32:04 +00:00
}
// CheckConfig validates configuration files.
func CheckConfig ( files ... string ) int {
2015-06-18 09:13:28 +00:00
failed := false
2017-06-21 11:32:04 +00:00
for _ , f := range files {
ruleFiles , err := checkConfig ( f )
2015-06-18 09:13:28 +00:00
if err != nil {
2017-06-21 11:32:04 +00:00
fmt . Fprintln ( os . Stderr , " FAILED:" , err )
2015-06-18 09:13:28 +00:00
failed = true
} else {
2017-06-21 11:32:04 +00:00
fmt . Printf ( " SUCCESS: %d rule files found\n" , len ( ruleFiles ) )
2015-06-18 09:13:28 +00:00
}
2017-06-21 11:32:04 +00:00
fmt . Println ( )
2015-06-18 09:13:28 +00:00
for _ , rf := range ruleFiles {
2019-08-28 15:36:48 +00:00
if n , errs := checkRules ( rf ) ; len ( errs ) > 0 {
fmt . Fprintln ( os . Stderr , " FAILED:" )
for _ , err := range errs {
fmt . Fprintln ( os . Stderr , " " , err )
}
2015-06-18 09:13:28 +00:00
failed = true
} else {
2017-06-21 11:32:04 +00:00
fmt . Printf ( " SUCCESS: %d rules found\n" , n )
2015-06-18 09:13:28 +00:00
}
2017-06-21 11:32:04 +00:00
fmt . Println ( )
2015-06-18 09:13:28 +00:00
}
}
if failed {
return 1
}
return 0
}
2015-09-09 12:08:05 +00:00
func checkFileExists ( fn string ) error {
// Nothing set, nothing to error on.
if fn == "" {
return nil
}
_ , err := os . Stat ( fn )
return err
}
2017-06-21 11:32:04 +00:00
func checkConfig ( filename string ) ( [ ] string , error ) {
fmt . Println ( "Checking" , filename )
2015-06-18 09:13:28 +00:00
2015-08-05 16:30:37 +00:00
cfg , err := config . LoadFile ( filename )
2015-06-18 09:13:28 +00:00
if err != nil {
return nil , err
}
var ruleFiles [ ] string
for _ , rf := range cfg . RuleFiles {
rfs , err := filepath . Glob ( rf )
if err != nil {
return nil , err
}
2015-08-05 16:30:37 +00:00
// If an explicit file was given, error if it is not accessible.
if ! strings . Contains ( rf , "*" ) {
if len ( rfs ) == 0 {
2019-03-25 23:01:12 +00:00
return nil , errors . Errorf ( "%q does not point to an existing file" , rf )
2015-08-05 16:30:37 +00:00
}
2015-09-09 12:08:05 +00:00
if err := checkFileExists ( rfs [ 0 ] ) ; err != nil {
2019-03-25 23:01:12 +00:00
return nil , errors . Wrapf ( err , "error checking rule file %q" , rfs [ 0 ] )
2015-08-05 16:30:37 +00:00
}
2015-06-18 09:13:28 +00:00
}
ruleFiles = append ( ruleFiles , rfs ... )
}
2015-08-05 16:30:37 +00:00
for _ , scfg := range cfg . ScrapeConfigs {
2016-11-23 11:41:19 +00:00
if err := checkFileExists ( scfg . HTTPClientConfig . BearerTokenFile ) ; err != nil {
2019-03-25 23:01:12 +00:00
return nil , errors . Wrapf ( err , "error checking bearer token file %q" , scfg . HTTPClientConfig . BearerTokenFile )
2015-08-05 16:30:37 +00:00
}
2016-11-23 11:41:19 +00:00
if err := checkTLSConfig ( scfg . HTTPClientConfig . TLSConfig ) ; err != nil {
2015-09-09 12:08:05 +00:00
return nil , err
2015-09-06 23:07:44 +00:00
}
2020-08-20 12:48:26 +00:00
for _ , c := range scfg . ServiceDiscoveryConfigs {
switch c := c . ( type ) {
case * kubernetes . SDConfig :
if err := checkTLSConfig ( c . HTTPClientConfig . TLSConfig ) ; err != nil {
2017-08-22 22:25:30 +00:00
return nil , err
}
2020-08-20 12:48:26 +00:00
case * file . SDConfig :
for _ , file := range c . Files {
files , err := filepath . Glob ( file )
if err != nil {
return nil , err
}
if len ( files ) != 0 {
// There was at least one match for the glob and we can assume checkFileExists
// for all matches would pass, we can continue the loop.
continue
}
fmt . Printf ( " WARNING: file %q for file_sd in scrape job %q does not exist\n" , file , scfg . JobName )
2017-08-22 22:25:30 +00:00
}
}
}
2015-08-05 16:30:37 +00:00
}
2015-06-18 09:13:28 +00:00
return ruleFiles , nil
}
Refactor SD configuration to remove `config` dependency (#3629)
* refactor: move targetGroup struct and CheckOverflow() to their own package
* refactor: move auth and security related structs to a utility package, fix import error in utility package
* refactor: Azure SD, remove SD struct from config
* refactor: DNS SD, remove SD struct from config into dns package
* refactor: ec2 SD, move SD struct from config into the ec2 package
* refactor: file SD, move SD struct from config to file discovery package
* refactor: gce, move SD struct from config to gce discovery package
* refactor: move HTTPClientConfig and URL into util/config, fix import error in httputil
* refactor: consul, move SD struct from config into consul discovery package
* refactor: marathon, move SD struct from config into marathon discovery package
* refactor: triton, move SD struct from config to triton discovery package, fix test
* refactor: zookeeper, move SD structs from config to zookeeper discovery package
* refactor: openstack, remove SD struct from config, move into openstack discovery package
* refactor: kubernetes, move SD struct from config into kubernetes discovery package
* refactor: notifier, use targetgroup package instead of config
* refactor: tests for file, marathon, triton SD - use targetgroup package instead of config.TargetGroup
* refactor: retrieval, use targetgroup package instead of config.TargetGroup
* refactor: storage, use config util package
* refactor: discovery manager, use targetgroup package instead of config.TargetGroup
* refactor: use HTTPClient and TLS config from configUtil instead of config
* refactor: tests, use targetgroup package instead of config.TargetGroup
* refactor: fix tagetgroup.Group pointers that were removed by mistake
* refactor: openstack, kubernetes: drop prefixes
* refactor: remove import aliases forced due to vscode bug
* refactor: move main SD struct out of config into discovery/config
* refactor: rename configUtil to config_util
* refactor: rename yamlUtil to yaml_config
* refactor: kubernetes, remove prefixes
* refactor: move the TargetGroup package to discovery/
* refactor: fix order of imports
2017-12-29 20:01:34 +00:00
func checkTLSConfig ( tlsConfig config_util . TLSConfig ) error {
2015-09-09 12:08:05 +00:00
if err := checkFileExists ( tlsConfig . CertFile ) ; err != nil {
2019-03-25 23:01:12 +00:00
return errors . Wrapf ( err , "error checking client cert file %q" , tlsConfig . CertFile )
2015-09-09 12:08:05 +00:00
}
if err := checkFileExists ( tlsConfig . KeyFile ) ; err != nil {
2019-03-25 23:01:12 +00:00
return errors . Wrapf ( err , "error checking client key file %q" , tlsConfig . KeyFile )
2015-09-09 12:08:05 +00:00
}
if len ( tlsConfig . CertFile ) > 0 && len ( tlsConfig . KeyFile ) == 0 {
2019-03-25 23:01:12 +00:00
return errors . Errorf ( "client cert file %q specified without client key file" , tlsConfig . CertFile )
2015-09-09 12:08:05 +00:00
}
if len ( tlsConfig . KeyFile ) > 0 && len ( tlsConfig . CertFile ) == 0 {
2019-03-25 23:01:12 +00:00
return errors . Errorf ( "client key file %q specified without client cert file" , tlsConfig . KeyFile )
2015-09-09 12:08:05 +00:00
}
return nil
}
2017-06-21 11:32:04 +00:00
// CheckRules validates rule files.
func CheckRules ( files ... string ) int {
2015-06-18 09:13:28 +00:00
failed := false
2017-06-21 11:32:04 +00:00
for _ , f := range files {
if n , errs := checkRules ( f ) ; errs != nil {
fmt . Fprintln ( os . Stderr , " FAILED:" )
2017-06-16 11:14:33 +00:00
for _ , e := range errs {
2017-06-21 11:32:04 +00:00
fmt . Fprintln ( os . Stderr , e . Error ( ) )
2017-06-16 11:14:33 +00:00
}
2015-06-18 09:13:28 +00:00
failed = true
} else {
2017-06-21 11:32:04 +00:00
fmt . Printf ( " SUCCESS: %d rules found\n" , n )
2015-06-18 09:13:28 +00:00
}
2017-06-21 11:32:04 +00:00
fmt . Println ( )
2015-06-18 09:13:28 +00:00
}
if failed {
return 1
}
return 0
}
2017-06-21 11:32:04 +00:00
func checkRules ( filename string ) ( int , [ ] error ) {
fmt . Println ( "Checking" , filename )
2015-06-18 09:13:28 +00:00
2017-06-14 08:02:26 +00:00
rgs , errs := rulefmt . ParseFile ( filename )
if errs != nil {
2017-06-16 11:14:33 +00:00
return 0 , errs
2015-06-18 09:13:28 +00:00
}
2017-06-14 08:02:26 +00:00
numRules := 0
for _ , rg := range rgs . Groups {
numRules += len ( rg . Rules )
2015-06-18 09:13:28 +00:00
}
2017-06-14 08:02:26 +00:00
2019-09-20 10:29:47 +00:00
dRules := checkDuplicates ( rgs . Groups )
if len ( dRules ) != 0 {
fmt . Printf ( "%d duplicate rules(s) found.\n" , len ( dRules ) )
for _ , n := range dRules {
fmt . Printf ( "Metric: %s\nLabel(s):\n" , n . metric )
for i , l := range n . label {
fmt . Printf ( "\t%s: %s\n" , i , l )
}
}
fmt . Println ( "Might cause inconsistency while recording expressions." )
}
2017-06-14 08:02:26 +00:00
return numRules , nil
2015-06-18 09:13:28 +00:00
}
2019-09-20 10:29:47 +00:00
type compareRuleType struct {
metric string
label map [ string ] string
}
2019-11-05 18:22:31 +00:00
func checkDuplicates ( groups [ ] rulefmt . RuleGroup ) [ ] compareRuleType {
2019-09-20 10:29:47 +00:00
var duplicates [ ] compareRuleType
2019-11-05 18:22:31 +00:00
for _ , group := range groups {
for index , rule := range group . Rules {
2019-09-20 10:29:47 +00:00
inst := compareRuleType {
2019-11-05 18:22:31 +00:00
metric : ruleMetric ( rule ) ,
label : rule . Labels ,
2019-09-20 10:29:47 +00:00
}
for i := 0 ; i < index ; i ++ {
t := compareRuleType {
2019-11-05 18:22:31 +00:00
metric : ruleMetric ( group . Rules [ i ] ) ,
label : group . Rules [ i ] . Labels ,
2019-09-20 10:29:47 +00:00
}
if reflect . DeepEqual ( t , inst ) {
duplicates = append ( duplicates , t )
}
}
}
}
return duplicates
}
2020-01-15 18:07:54 +00:00
func ruleMetric ( rule rulefmt . RuleNode ) string {
if rule . Alert . Value != "" {
return rule . Alert . Value
2019-11-05 18:22:31 +00:00
}
2020-01-15 18:07:54 +00:00
return rule . Record . Value
2019-11-05 18:22:31 +00:00
}
2017-04-13 21:53:41 +00:00
var checkMetricsUsage = strings . TrimSpace ( `
Pass Prometheus metrics over stdin to lint them for consistency and correctness .
examples :
2017-06-21 11:32:04 +00:00
$ cat metrics . prom | promtool check metrics
2017-04-13 21:53:41 +00:00
2017-06-21 11:32:04 +00:00
$ curl - s http : //localhost:9090/metrics | promtool check metrics
` )
2017-04-13 21:53:41 +00:00
2017-06-21 11:32:04 +00:00
// CheckMetrics performs a linting pass on input metrics.
func CheckMetrics ( ) int {
2017-04-13 21:53:41 +00:00
l := promlint . New ( os . Stdin )
problems , err := l . Lint ( )
if err != nil {
2017-06-21 11:32:04 +00:00
fmt . Fprintln ( os . Stderr , "error while linting:" , err )
2017-04-13 21:53:41 +00:00
return 1
}
for _ , p := range problems {
2017-06-21 11:32:04 +00:00
fmt . Fprintln ( os . Stderr , p . Metric , p . Text )
2017-04-13 21:53:41 +00:00
}
if len ( problems ) > 0 {
return 3
}
return 0
}
2018-04-26 18:41:56 +00:00
// QueryInstant performs an instant query against a Prometheus server.
2020-08-25 10:32:25 +00:00
func QueryInstant ( url , query , evalTime string , p printer ) int {
2018-04-26 18:41:56 +00:00
config := api . Config {
2018-07-18 07:52:01 +00:00
Address : url ,
2018-04-26 18:41:56 +00:00
}
// Create new client.
c , err := api . NewClient ( config )
if err != nil {
fmt . Fprintln ( os . Stderr , "error creating API client:" , err )
return 1
}
2020-08-25 10:32:25 +00:00
eTime := time . Now ( )
if evalTime != "" {
eTime , err = parseTime ( evalTime )
if err != nil {
fmt . Fprintln ( os . Stderr , "error parsing evaluation time:" , err )
return 1
}
}
2018-04-26 18:41:56 +00:00
// Run query against client.
api := v1 . NewAPI ( c )
ctx , cancel := context . WithTimeout ( context . Background ( ) , 2 * time . Minute )
2020-08-25 10:32:25 +00:00
val , _ , err := api . Query ( ctx , query , eTime ) // Ignoring warnings for now.
2018-04-26 18:41:56 +00:00
cancel ( )
if err != nil {
fmt . Fprintln ( os . Stderr , "query error:" , err )
return 1
}
2018-11-14 17:40:07 +00:00
p . printValue ( val )
2018-04-26 18:41:56 +00:00
return 0
}
// QueryRange performs a range query against a Prometheus server.
2019-06-30 10:50:23 +00:00
func QueryRange ( url string , headers map [ string ] string , query , start , end string , step time . Duration , p printer ) int {
2018-04-26 18:41:56 +00:00
config := api . Config {
2018-07-18 07:52:01 +00:00
Address : url ,
2018-04-26 18:41:56 +00:00
}
2019-06-30 10:50:23 +00:00
if len ( headers ) > 0 {
config . RoundTripper = promhttp . RoundTripperFunc ( func ( req * http . Request ) ( * http . Response , error ) {
for key , value := range headers {
req . Header . Add ( key , value )
}
return http . DefaultTransport . RoundTrip ( req )
} )
}
2018-04-26 18:41:56 +00:00
// Create new client.
c , err := api . NewClient ( config )
if err != nil {
fmt . Fprintln ( os . Stderr , "error creating API client:" , err )
return 1
}
var stime , etime time . Time
if end == "" {
etime = time . Now ( )
} else {
etime , err = parseTime ( end )
if err != nil {
fmt . Fprintln ( os . Stderr , "error parsing end time:" , err )
return 1
}
}
if start == "" {
stime = etime . Add ( - 5 * time . Minute )
} else {
stime , err = parseTime ( start )
if err != nil {
fmt . Fprintln ( os . Stderr , "error parsing start time:" , err )
2020-07-16 22:53:04 +00:00
return 1
2018-04-26 18:41:56 +00:00
}
}
if ! stime . Before ( etime ) {
fmt . Fprintln ( os . Stderr , "start time is not before end time" )
2020-07-16 22:53:04 +00:00
return 1
2018-04-26 18:41:56 +00:00
}
2018-08-05 09:03:18 +00:00
if step == 0 {
resolution := math . Max ( math . Floor ( etime . Sub ( stime ) . Seconds ( ) / 250 ) , 1 )
// Convert seconds to nanoseconds such that time.Duration parses correctly.
step = time . Duration ( resolution ) * time . Second
}
2018-04-26 18:41:56 +00:00
// Run query against client.
api := v1 . NewAPI ( c )
r := v1 . Range { Start : stime , End : etime , Step : step }
ctx , cancel := context . WithTimeout ( context . Background ( ) , 2 * time . Minute )
2019-06-17 18:14:36 +00:00
val , _ , err := api . QueryRange ( ctx , query , r ) // Ignoring warnings for now.
2018-04-26 18:41:56 +00:00
cancel ( )
if err != nil {
fmt . Fprintln ( os . Stderr , "query error:" , err )
return 1
}
2018-11-14 17:40:07 +00:00
p . printValue ( val )
2018-04-26 18:41:56 +00:00
return 0
}
2018-07-18 08:15:58 +00:00
// QuerySeries queries for a series against a Prometheus server.
2018-11-14 17:40:07 +00:00
func QuerySeries ( url * url . URL , matchers [ ] string , start , end string , p printer ) int {
2018-07-18 08:15:58 +00:00
config := api . Config {
Address : url . String ( ) ,
}
// Create new client.
c , err := api . NewClient ( config )
if err != nil {
fmt . Fprintln ( os . Stderr , "error creating API client:" , err )
return 1
}
2020-06-25 23:28:03 +00:00
stime , etime , err := parseStartTimeAndEndTime ( start , end )
if err != nil {
fmt . Fprintln ( os . Stderr , err )
return 1
2018-07-18 08:15:58 +00:00
}
// Run query against client.
api := v1 . NewAPI ( c )
ctx , cancel := context . WithTimeout ( context . Background ( ) , 2 * time . Minute )
2019-06-17 18:14:36 +00:00
val , _ , err := api . Series ( ctx , matchers , stime , etime ) // Ignoring warnings for now.
2018-07-18 08:15:58 +00:00
cancel ( )
if err != nil {
fmt . Fprintln ( os . Stderr , "query error:" , err )
return 1
}
2018-11-14 17:40:07 +00:00
p . printSeries ( val )
2018-07-18 08:15:58 +00:00
return 0
}
2018-07-18 14:27:28 +00:00
// QueryLabels queries for label values against a Prometheus server.
2020-06-25 23:28:03 +00:00
func QueryLabels ( url * url . URL , name string , start , end string , p printer ) int {
2018-07-18 14:27:28 +00:00
config := api . Config {
Address : url . String ( ) ,
}
// Create new client.
c , err := api . NewClient ( config )
if err != nil {
fmt . Fprintln ( os . Stderr , "error creating API client:" , err )
return 1
}
2020-06-25 23:28:03 +00:00
stime , etime , err := parseStartTimeAndEndTime ( start , end )
if err != nil {
fmt . Fprintln ( os . Stderr , err )
return 1
}
2018-07-18 14:27:28 +00:00
// Run query against client.
api := v1 . NewAPI ( c )
ctx , cancel := context . WithTimeout ( context . Background ( ) , 2 * time . Minute )
2020-06-25 23:28:03 +00:00
val , warn , err := api . LabelValues ( ctx , name , stime , etime )
2018-07-18 14:27:28 +00:00
cancel ( )
2019-08-24 17:42:21 +00:00
for _ , v := range warn {
fmt . Fprintln ( os . Stderr , "query warning:" , v )
}
2018-07-18 14:27:28 +00:00
if err != nil {
fmt . Fprintln ( os . Stderr , "query error:" , err )
return 1
}
2018-11-14 17:40:07 +00:00
p . printLabelValues ( val )
2018-07-18 14:27:28 +00:00
return 0
}
2020-06-25 23:28:03 +00:00
func parseStartTimeAndEndTime ( start , end string ) ( time . Time , time . Time , error ) {
var (
minTime = time . Now ( ) . Add ( - 9999 * time . Hour )
maxTime = time . Now ( ) . Add ( 9999 * time . Hour )
err error
)
stime := minTime
etime := maxTime
if start != "" {
stime , err = parseTime ( start )
if err != nil {
return stime , etime , errors . Wrap ( err , "error parsing start time" )
}
}
if end != "" {
etime , err = parseTime ( end )
if err != nil {
return stime , etime , errors . Wrap ( err , "error parsing end time" )
}
}
return stime , etime , nil
}
2018-04-26 18:41:56 +00:00
func parseTime ( s string ) ( time . Time , error ) {
if t , err := strconv . ParseFloat ( s , 64 ) ; err == nil {
s , ns := math . Modf ( t )
2020-03-29 16:35:39 +00:00
return time . Unix ( int64 ( s ) , int64 ( ns * float64 ( time . Second ) ) ) . UTC ( ) , nil
2018-04-26 18:41:56 +00:00
}
if t , err := time . Parse ( time . RFC3339Nano , s ) ; err == nil {
return t , nil
}
2019-03-25 23:01:12 +00:00
return time . Time { } , errors . Errorf ( "cannot parse %q to a valid timestamp" , s )
2018-04-26 18:41:56 +00:00
}
2018-07-18 07:52:01 +00:00
2018-11-23 15:57:31 +00:00
type endpointsGroup struct {
urlToFilename map [ string ] string
postProcess func ( b [ ] byte ) ( [ ] byte , error )
}
var (
pprofEndpoints = [ ] endpointsGroup {
{
urlToFilename : map [ string ] string {
"/debug/pprof/profile?seconds=30" : "cpu.pb" ,
"/debug/pprof/block" : "block.pb" ,
"/debug/pprof/goroutine" : "goroutine.pb" ,
"/debug/pprof/heap" : "heap.pb" ,
"/debug/pprof/mutex" : "mutex.pb" ,
"/debug/pprof/threadcreate" : "threadcreate.pb" ,
} ,
postProcess : func ( b [ ] byte ) ( [ ] byte , error ) {
p , err := profile . Parse ( bytes . NewReader ( b ) )
if err != nil {
return nil , err
}
var buf bytes . Buffer
if err := p . WriteUncompressed ( & buf ) ; err != nil {
return nil , errors . Wrap ( err , "writing the profile to the buffer" )
}
return buf . Bytes ( ) , nil
} ,
2018-07-18 07:52:01 +00:00
} ,
2018-11-23 15:57:31 +00:00
{
urlToFilename : map [ string ] string {
"/debug/pprof/trace?seconds=30" : "trace.pb" ,
} ,
} ,
}
metricsEndpoints = [ ] endpointsGroup {
{
urlToFilename : map [ string ] string {
"/metrics" : "metrics.txt" ,
} ,
} ,
}
allEndpoints = append ( pprofEndpoints , metricsEndpoints ... )
)
func debugPprof ( url string ) int {
if err := debugWrite ( debugWriterConfig {
serverURL : url ,
tarballName : "debug.tar.gz" ,
endPointGroups : pprofEndpoints ,
} ) ; err != nil {
fmt . Fprintln ( os . Stderr , "error completing debug command:" , err )
2018-07-18 07:52:01 +00:00
return 1
}
2018-11-23 15:57:31 +00:00
return 0
2018-07-18 07:52:01 +00:00
}
func debugMetrics ( url string ) int {
2018-11-23 15:57:31 +00:00
if err := debugWrite ( debugWriterConfig {
serverURL : url ,
tarballName : "debug.tar.gz" ,
endPointGroups : metricsEndpoints ,
} ) ; err != nil {
fmt . Fprintln ( os . Stderr , "error completing debug command:" , err )
2018-07-18 07:52:01 +00:00
return 1
}
2018-11-23 15:57:31 +00:00
return 0
2018-07-18 07:52:01 +00:00
}
func debugAll ( url string ) int {
2018-11-23 15:57:31 +00:00
if err := debugWrite ( debugWriterConfig {
serverURL : url ,
tarballName : "debug.tar.gz" ,
endPointGroups : allEndpoints ,
} ) ; err != nil {
fmt . Fprintln ( os . Stderr , "error completing debug command:" , err )
2018-07-18 07:52:01 +00:00
return 1
}
2018-11-23 15:57:31 +00:00
return 0
2018-07-18 07:52:01 +00:00
}
2018-11-14 17:40:07 +00:00
type printer interface {
printValue ( v model . Value )
printSeries ( v [ ] model . LabelSet )
printLabelValues ( v model . LabelValues )
}
type promqlPrinter struct { }
func ( p * promqlPrinter ) printValue ( v model . Value ) {
fmt . Println ( v )
}
func ( p * promqlPrinter ) printSeries ( val [ ] model . LabelSet ) {
for _ , v := range val {
fmt . Println ( v )
}
}
2018-11-23 15:57:31 +00:00
func ( p * promqlPrinter ) printLabelValues ( val model . LabelValues ) {
2018-11-14 17:40:07 +00:00
for _ , v := range val {
fmt . Println ( v )
}
}
type jsonPrinter struct { }
func ( j * jsonPrinter ) printValue ( v model . Value ) {
2019-05-03 13:11:28 +00:00
//nolint:errcheck
2018-11-14 17:40:07 +00:00
json . NewEncoder ( os . Stdout ) . Encode ( v )
}
func ( j * jsonPrinter ) printSeries ( v [ ] model . LabelSet ) {
2019-05-03 13:11:28 +00:00
//nolint:errcheck
2018-11-14 17:40:07 +00:00
json . NewEncoder ( os . Stdout ) . Encode ( v )
}
func ( j * jsonPrinter ) printLabelValues ( v model . LabelValues ) {
2019-05-03 13:11:28 +00:00
//nolint:errcheck
2018-11-14 17:40:07 +00:00
json . NewEncoder ( os . Stdout ) . Encode ( v )
}