Merge branch 'beorn7/storage5' into beorn7/storage6

This commit is contained in:
beorn7 2016-03-16 17:02:18 +01:00
commit 79628ae883
2 changed files with 42 additions and 0 deletions

View File

@ -43,6 +43,7 @@ const (
// Constants for instrumentation. // Constants for instrumentation.
namespace = "prometheus" namespace = "prometheus"
interval = "interval" interval = "interval"
scrapeJob = "scrape_job"
) )
var ( var (
@ -65,11 +66,40 @@ var (
}, },
[]string{interval}, []string{interval},
) )
targetReloadIntervalLength = prometheus.NewSummaryVec(
prometheus.SummaryOpts{
Namespace: namespace,
Name: "target_reload_length_seconds",
Help: "Actual interval to reload the scrape pool with a given configuration.",
Objectives: map[float64]float64{0.01: 0.001, 0.05: 0.005, 0.5: 0.05, 0.90: 0.01, 0.99: 0.001},
},
[]string{interval},
)
targetSyncIntervalLength = prometheus.NewSummaryVec(
prometheus.SummaryOpts{
Namespace: namespace,
Name: "target_sync_length_seconds",
Help: "Actual interval to sync the scrape pool.",
Objectives: map[float64]float64{0.01: 0.001, 0.05: 0.005, 0.5: 0.05, 0.90: 0.01, 0.99: 0.001},
},
[]string{scrapeJob},
)
targetScrapePoolSyncsCounter = prometheus.NewCounterVec(
prometheus.CounterOpts{
Namespace: namespace,
Name: "target_scrape_pool_sync_total",
Help: "Total number of syncs that were executed on a scrape pool.",
},
[]string{scrapeJob},
)
) )
func init() { func init() {
prometheus.MustRegister(targetIntervalLength) prometheus.MustRegister(targetIntervalLength)
prometheus.MustRegister(targetSkippedScrapes) prometheus.MustRegister(targetSkippedScrapes)
prometheus.MustRegister(targetReloadIntervalLength)
prometheus.MustRegister(targetSyncIntervalLength)
prometheus.MustRegister(targetScrapePoolSyncsCounter)
} }
// scrapePool manages scrapes for sets of targets. // scrapePool manages scrapes for sets of targets.
@ -132,6 +162,7 @@ func (sp *scrapePool) stop() {
// but all scrape loops are restarted with the new scrape configuration. // but all scrape loops are restarted with the new scrape configuration.
// This method returns after all scrape loops that were stopped have fully terminated. // This method returns after all scrape loops that were stopped have fully terminated.
func (sp *scrapePool) reload(cfg *config.ScrapeConfig) { func (sp *scrapePool) reload(cfg *config.ScrapeConfig) {
start := time.Now()
sp.mtx.Lock() sp.mtx.Lock()
defer sp.mtx.Unlock() defer sp.mtx.Unlock()
@ -168,12 +199,16 @@ func (sp *scrapePool) reload(cfg *config.ScrapeConfig) {
} }
wg.Wait() wg.Wait()
targetReloadIntervalLength.WithLabelValues(interval.String()).Observe(
float64(time.Since(start)) / float64(time.Second),
)
} }
// sync takes a list of potentially duplicated targets, deduplicates them, starts // sync takes a list of potentially duplicated targets, deduplicates them, starts
// scrape loops for new targets, and stops scrape loops for disappeared targets. // scrape loops for new targets, and stops scrape loops for disappeared targets.
// It returns after all stopped scrape loops terminated. // It returns after all stopped scrape loops terminated.
func (sp *scrapePool) sync(targets []*Target) { func (sp *scrapePool) sync(targets []*Target) {
start := time.Now()
sp.mtx.Lock() sp.mtx.Lock()
defer sp.mtx.Unlock() defer sp.mtx.Unlock()
@ -219,6 +254,10 @@ func (sp *scrapePool) sync(targets []*Target) {
// may be active and tries to insert. The old scraper that didn't terminate yet could still // may be active and tries to insert. The old scraper that didn't terminate yet could still
// be inserting a previous sample set. // be inserting a previous sample set.
wg.Wait() wg.Wait()
targetSyncIntervalLength.WithLabelValues(sp.config.JobName).Observe(
float64(time.Since(start)) / float64(time.Second),
)
targetScrapePoolSyncsCounter.WithLabelValues(sp.config.JobName).Inc()
} }
// sampleAppender returns an appender for ingested samples from the target. // sampleAppender returns an appender for ingested samples from the target.

View File

@ -19,6 +19,7 @@ import (
"net/http/httptest" "net/http/httptest"
"net/url" "net/url"
"reflect" "reflect"
"sort"
"strings" "strings"
"sync" "sync"
"testing" "testing"
@ -469,6 +470,8 @@ func TestTargetScraperScrapeOK(t *testing.T) {
Value: 2, Value: 2,
}, },
} }
sort.Sort(expectedSamples)
sort.Sort(samples)
if !reflect.DeepEqual(samples, expectedSamples) { if !reflect.DeepEqual(samples, expectedSamples) {
t.Errorf("Scraped samples did not match served metrics") t.Errorf("Scraped samples did not match served metrics")