mirror of
https://github.com/prometheus/prometheus
synced 2024-12-27 17:13:22 +00:00
Merge pull request #1486 from prometheus/instrument-scrape-pool-sync
Instrument scrape pool `sync()`
This commit is contained in:
commit
19c5eb6194
@ -43,6 +43,7 @@ const (
|
||||
// Constants for instrumentation.
|
||||
namespace = "prometheus"
|
||||
interval = "interval"
|
||||
scrapeJob = "scrape_job"
|
||||
)
|
||||
|
||||
var (
|
||||
@ -74,12 +75,31 @@ var (
|
||||
},
|
||||
[]string{interval},
|
||||
)
|
||||
targetSyncIntervalLength = prometheus.NewSummaryVec(
|
||||
prometheus.SummaryOpts{
|
||||
Namespace: namespace,
|
||||
Name: "target_sync_length_seconds",
|
||||
Help: "Actual interval to sync the scrape pool.",
|
||||
Objectives: map[float64]float64{0.01: 0.001, 0.05: 0.005, 0.5: 0.05, 0.90: 0.01, 0.99: 0.001},
|
||||
},
|
||||
[]string{scrapeJob},
|
||||
)
|
||||
targetScrapePoolSyncsCounter = prometheus.NewCounterVec(
|
||||
prometheus.CounterOpts{
|
||||
Namespace: namespace,
|
||||
Name: "target_scrape_pool_sync_total",
|
||||
Help: "Total number of syncs that were executed on a scrape pool.",
|
||||
},
|
||||
[]string{scrapeJob},
|
||||
)
|
||||
)
|
||||
|
||||
func init() {
|
||||
prometheus.MustRegister(targetIntervalLength)
|
||||
prometheus.MustRegister(targetSkippedScrapes)
|
||||
prometheus.MustRegister(targetReloadIntervalLength)
|
||||
prometheus.MustRegister(targetSyncIntervalLength)
|
||||
prometheus.MustRegister(targetScrapePoolSyncsCounter)
|
||||
}
|
||||
|
||||
// scrapePool manages scrapes for sets of targets.
|
||||
@ -188,6 +208,7 @@ func (sp *scrapePool) reload(cfg *config.ScrapeConfig) {
|
||||
// scrape loops for new targets, and stops scrape loops for disappeared targets.
|
||||
// It returns after all stopped scrape loops terminated.
|
||||
func (sp *scrapePool) sync(targets []*Target) {
|
||||
start := time.Now()
|
||||
sp.mtx.Lock()
|
||||
defer sp.mtx.Unlock()
|
||||
|
||||
@ -233,6 +254,10 @@ func (sp *scrapePool) sync(targets []*Target) {
|
||||
// may be active and tries to insert. The old scraper that didn't terminate yet could still
|
||||
// be inserting a previous sample set.
|
||||
wg.Wait()
|
||||
targetSyncIntervalLength.WithLabelValues(sp.config.JobName).Observe(
|
||||
float64(time.Since(start)) / float64(time.Second),
|
||||
)
|
||||
targetScrapePoolSyncsCounter.WithLabelValues(sp.config.JobName).Inc()
|
||||
}
|
||||
|
||||
// sampleAppender returns an appender for ingested samples from the target.
|
||||
|
Loading…
Reference in New Issue
Block a user