retrieval: Fix "up" reporting for failed scrapes

This commit is contained in:
Julius Volz 2017-06-14 22:08:03 -04:00
parent 7640960469
commit 6f66125809
2 changed files with 34 additions and 7 deletions

View File

@ -579,7 +579,6 @@ mainLoop:
} }
var ( var (
total, added int
start = time.Now() start = time.Now()
scrapeCtx, cancel = context.WithTimeout(sl.ctx, timeout) scrapeCtx, cancel = context.WithTimeout(sl.ctx, timeout)
) )
@ -591,18 +590,20 @@ mainLoop:
) )
} }
err := sl.scraper.scrape(scrapeCtx, buf) scrapeErr := sl.scraper.scrape(scrapeCtx, buf)
cancel() cancel()
var b []byte var b []byte
if err == nil { if scrapeErr == nil {
b = buf.Bytes() b = buf.Bytes()
} else if errc != nil { } else if errc != nil {
errc <- err errc <- scrapeErr
} }
// A failed scrape is the same as an empty scrape, // A failed scrape is the same as an empty scrape,
// we still call sl.append to trigger stale markers. // we still call sl.append to trigger stale markers.
if total, added, err = sl.append(b, start); err != nil { total, added, appErr := sl.append(b, start)
sl.l.With("err", err).Warn("append failed") if appErr != nil {
sl.l.With("err", appErr).Warn("append failed")
// The append failed, probably due to a parse error or sample limit. // The append failed, probably due to a parse error or sample limit.
// Call sl.append again with an empty scrape to trigger stale markers. // Call sl.append again with an empty scrape to trigger stale markers.
if _, _, err := sl.append([]byte{}, start); err != nil { if _, _, err := sl.append([]byte{}, start); err != nil {
@ -610,7 +611,11 @@ mainLoop:
} }
} }
sl.report(start, time.Since(start), total, added, err) if scrapeErr == nil {
scrapeErr = appErr
}
sl.report(start, time.Since(start), total, added, scrapeErr)
last = start last = start
select { select {

View File

@ -821,6 +821,28 @@ func TestScrapeLoopRunAppliesScrapeLimit(t *testing.T) {
} }
} }
func TestScrapeLoopRunReportsTargetDownOnScrapeError(t *testing.T) {
var (
scraper = &testScraper{}
reportAppender = &collectResultAppender{}
reportApp = func() storage.Appender { return reportAppender }
)
ctx, cancel := context.WithCancel(context.Background())
sl := newScrapeLoop(ctx, scraper, func() storage.Appender { return nopAppender{} }, reportApp, nil)
scraper.scrapeFunc = func(ctx context.Context, w io.Writer) error {
cancel()
return fmt.Errorf("scrape failed")
}
sl.run(10*time.Millisecond, time.Hour, nil)
if reportAppender.result[0].v != 0 {
t.Fatalf("bad 'up' value; want 0, got %v", reportAppender.result[0].v)
}
}
type errorAppender struct { type errorAppender struct {
collectResultAppender collectResultAppender
} }