Merge pull request #3741 from krasi-georgiev/discovery-race

read/write race for the  context field in the discovery package
This commit is contained in:
Frederic Branczyk 2018-01-30 18:17:09 +01:00 committed by GitHub
commit d3ae1ac40e
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
3 changed files with 23 additions and 21 deletions

View File

@ -238,12 +238,17 @@ func main() {
ctxWeb, cancelWeb = context.WithCancel(context.Background())
ctxRule = context.Background()
notifier = notifier.New(&cfg.notifier, log.With(logger, "component", "notifier"))
discoveryManagerScrape = discovery.NewManager(log.With(logger, "component", "discovery manager scrape"))
discoveryManagerNotify = discovery.NewManager(log.With(logger, "component", "discovery manager notify"))
scrapeManager = retrieval.NewScrapeManager(log.With(logger, "component", "scrape manager"), fanoutStorage)
queryEngine = promql.NewEngine(fanoutStorage, &cfg.queryEngine)
ruleManager = rules.NewManager(&rules.ManagerOptions{
notifier = notifier.New(&cfg.notifier, log.With(logger, "component", "notifier"))
ctxScrape, cancelScrape = context.WithCancel(context.Background())
discoveryManagerScrape = discovery.NewManager(ctxScrape, log.With(logger, "component", "discovery manager scrape"))
ctxNotify, cancelNotify = context.WithCancel(context.Background())
discoveryManagerNotify = discovery.NewManager(ctxNotify, log.With(logger, "component", "discovery manager notify"))
scrapeManager = retrieval.NewScrapeManager(log.With(logger, "component", "scrape manager"), fanoutStorage)
queryEngine = promql.NewEngine(fanoutStorage, &cfg.queryEngine)
ruleManager = rules.NewManager(&rules.ManagerOptions{
Appendable: fanoutStorage,
QueryFunc: rules.EngineQueryFunc(queryEngine),
NotifyFunc: sendAlerts(notifier, cfg.web.ExternalURL.String()),
@ -375,30 +380,28 @@ func main() {
)
}
{
ctx, cancel := context.WithCancel(context.Background())
g.Add(
func() error {
err := discoveryManagerScrape.Run(ctx)
err := discoveryManagerScrape.Run()
level.Info(logger).Log("msg", "Scrape discovery manager stopped")
return err
},
func(err error) {
level.Info(logger).Log("msg", "Stopping scrape discovery manager...")
cancel()
cancelScrape()
},
)
}
{
ctx, cancel := context.WithCancel(context.Background())
g.Add(
func() error {
err := discoveryManagerNotify.Run(ctx)
err := discoveryManagerNotify.Run()
level.Info(logger).Log("msg", "Notify discovery manager stopped")
return err
},
func(err error) {
level.Info(logger).Log("msg", "Stopping notify discovery manager...")
cancel()
cancelNotify()
},
)
}

View File

@ -60,13 +60,13 @@ type poolKey struct {
}
// NewManager is the Discovery Manager constructor
func NewManager(logger log.Logger) *Manager {
func NewManager(ctx context.Context, logger log.Logger) *Manager {
return &Manager{
logger: logger,
syncCh: make(chan map[string][]*targetgroup.Group),
targets: make(map[poolKey]map[string]*targetgroup.Group),
discoverCancel: []context.CancelFunc{},
ctx: context.Background(),
ctx: ctx,
}
}
@ -89,13 +89,12 @@ type Manager struct {
}
// Run starts the background processing
func (m *Manager) Run(ctx context.Context) error {
m.ctx = ctx
func (m *Manager) Run() error {
for {
select {
case <-ctx.Done():
case <-m.ctx.Done():
m.cancelDiscoverers()
return ctx.Err()
return m.ctx.Err()
}
}
}

View File

@ -655,7 +655,7 @@ func TestTargetUpdatesOrder(t *testing.T) {
for testIndex, testCase := range testCases {
ctx, cancel := context.WithCancel(context.Background())
defer cancel()
discoveryManager := NewManager(nil)
discoveryManager := NewManager(ctx, nil)
var totalUpdatesCount int
@ -741,8 +741,8 @@ scrape_configs:
}
ctx, cancel := context.WithCancel(context.Background())
defer cancel()
discoveryManager := NewManager(nil)
go discoveryManager.Run(ctx)
discoveryManager := NewManager(ctx, nil)
go discoveryManager.Run()
c := make(map[string]sd_config.ServiceDiscoveryConfig)
for _, v := range cfg.ScrapeConfigs {