mirror of
https://github.com/prometheus/prometheus
synced 2024-12-27 17:13:22 +00:00
Apply new scrape config on reload.
This commit updates a target set's scrape configuration on reload. This will cause all running scrape loops to be stopped and started again with new parameters.
This commit is contained in:
parent
02f635dc24
commit
84f74b9a84
@ -107,6 +107,34 @@ func (sp *scrapePool) stop() {
|
||||
wg.Wait()
|
||||
}
|
||||
|
||||
func (sp *scrapePool) reload(cfg *config.ScrapeConfig) {
|
||||
log.Debugln("reload scrapepool")
|
||||
defer log.Debugln("reload done")
|
||||
|
||||
sp.mtx.Lock()
|
||||
defer sp.mtx.Unlock()
|
||||
|
||||
sp.config = cfg
|
||||
|
||||
var wg sync.WaitGroup
|
||||
|
||||
for _, tgroup := range sp.tgroups {
|
||||
for _, t := range tgroup {
|
||||
wg.Add(1)
|
||||
|
||||
go func(t *Target) {
|
||||
t.scrapeLoop.stop()
|
||||
|
||||
t.scrapeLoop = newScrapeLoop(sp.ctx, t, sp.sampleAppender(t), sp.reportAppender(t))
|
||||
go t.scrapeLoop.run(time.Duration(cfg.ScrapeInterval), time.Duration(cfg.ScrapeTimeout), nil)
|
||||
wg.Done()
|
||||
}(t)
|
||||
}
|
||||
}
|
||||
|
||||
wg.Wait()
|
||||
}
|
||||
|
||||
// sampleAppender returns an appender for ingested samples from the target.
|
||||
func (sp *scrapePool) sampleAppender(target *Target) storage.SampleAppender {
|
||||
app := sp.appender
|
||||
@ -143,6 +171,7 @@ func (sp *scrapePool) reportAppender(target *Target) storage.SampleAppender {
|
||||
|
||||
func (sp *scrapePool) sync(tgroups map[string]map[model.Fingerprint]*Target) {
|
||||
sp.mtx.Lock()
|
||||
defer sp.mtx.Unlock()
|
||||
|
||||
var (
|
||||
wg sync.WaitGroup
|
||||
@ -203,9 +232,6 @@ func (sp *scrapePool) sync(tgroups map[string]map[model.Fingerprint]*Target) {
|
||||
// may be active and tries to insert. The old scraper that didn't terminate yet could still
|
||||
// be inserting a previous sample set.
|
||||
wg.Wait()
|
||||
|
||||
// TODO(fabxc): maybe this can be released earlier with subsequent refactoring.
|
||||
sp.mtx.Unlock()
|
||||
}
|
||||
|
||||
// A scraper retrieves samples and accepts a status report at the end.
|
||||
|
@ -117,6 +117,8 @@ func (tm *TargetManager) reload() {
|
||||
ts.runScraping(tm.ctx)
|
||||
tm.wg.Done()
|
||||
}(ts)
|
||||
} else {
|
||||
ts.reload(scfg)
|
||||
}
|
||||
ts.runProviders(tm.ctx, providersFromConfig(scfg))
|
||||
}
|
||||
@ -203,6 +205,14 @@ func (ts *targetSet) cancel() {
|
||||
}
|
||||
}
|
||||
|
||||
func (ts *targetSet) reload(cfg *config.ScrapeConfig) {
|
||||
ts.mtx.Lock()
|
||||
ts.config = cfg
|
||||
ts.mtx.Unlock()
|
||||
|
||||
ts.scrapePool.reload(cfg)
|
||||
}
|
||||
|
||||
func (ts *targetSet) runScraping(ctx context.Context) {
|
||||
ctx, ts.cancelScraping = context.WithCancel(ctx)
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user