Scrape: defer report (#7700)

When I started wotking on target_limit, scrapeAndReport did not exist
yet. Then I simply rebased my work without thinking.

It appears that there is a lot that can be inline if I defer() the
report.

Signed-off-by: Julien Pivotto <roidelapluie@inuits.eu>
This commit is contained in:
Julien Pivotto 2020-07-31 19:11:08 +02:00 committed by GitHub
parent 30e079bbd5
commit 7b5507ce4b
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
1 changed files with 45 additions and 39 deletions

View File

@ -1033,11 +1033,13 @@ func (sl *scrapeLoop) scrapeAndReport(interval, timeout time.Duration, last time
}
b := sl.buffers.Get(sl.lastScrapeSize).([]byte)
defer sl.buffers.Put(b)
buf := bytes.NewBuffer(b)
app := sl.appender(sl.ctx)
var total, added, seriesAdded int
var err, appErr, scrapeErr error
app := sl.appender(sl.ctx)
defer func() {
if err != nil {
app.Rollback()
@ -1048,8 +1050,15 @@ func (sl *scrapeLoop) scrapeAndReport(interval, timeout time.Duration, last time
level.Error(sl.l).Log("msg", "Scrape commit failed", "err", err)
}
}()
defer func() {
if err = sl.report(app, start, time.Since(start), total, added, seriesAdded, scrapeErr); err != nil {
level.Warn(sl.l).Log("msg", "Appending scrape report failed", "err", err)
}
}()
if forcedErr := sl.getForcedError(); forcedErr != nil {
appErr = forcedErr
scrapeErr = forcedErr
// Add stale markers.
if _, _, _, err := sl.append(app, []byte{}, "", start); err != nil {
app.Rollback()
@ -1059,53 +1068,50 @@ func (sl *scrapeLoop) scrapeAndReport(interval, timeout time.Duration, last time
if errc != nil {
errc <- forcedErr
}
} else {
var contentType string
scrapeCtx, cancel := context.WithTimeout(sl.ctx, timeout)
contentType, scrapeErr = sl.scraper.scrape(scrapeCtx, buf)
cancel()
if scrapeErr == nil {
b = buf.Bytes()
// NOTE: There were issues with misbehaving clients in the past
// that occasionally returned empty results. We don't want those
// to falsely reset our buffer size.
if len(b) > 0 {
sl.lastScrapeSize = len(b)
}
} else {
level.Debug(sl.l).Log("msg", "Scrape failed", "err", scrapeErr.Error())
if errc != nil {
errc <- scrapeErr
}
return start
}
var contentType string
scrapeCtx, cancel := context.WithTimeout(sl.ctx, timeout)
contentType, scrapeErr = sl.scraper.scrape(scrapeCtx, buf)
cancel()
if scrapeErr == nil {
b = buf.Bytes()
// NOTE: There were issues with misbehaving clients in the past
// that occasionally returned empty results. We don't want those
// to falsely reset our buffer size.
if len(b) > 0 {
sl.lastScrapeSize = len(b)
}
// A failed scrape is the same as an empty scrape,
// we still call sl.append to trigger stale markers.
total, added, seriesAdded, appErr = sl.append(app, b, contentType, start)
if appErr != nil {
app.Rollback()
app = sl.appender(sl.ctx)
level.Debug(sl.l).Log("msg", "Append failed", "err", appErr)
// The append failed, probably due to a parse error or sample limit.
// Call sl.append again with an empty scrape to trigger stale markers.
if _, _, _, err := sl.append(app, []byte{}, "", start); err != nil {
app.Rollback()
app = sl.appender(sl.ctx)
level.Warn(sl.l).Log("msg", "Append failed", "err", err)
}
} else {
level.Debug(sl.l).Log("msg", "Scrape failed", "err", scrapeErr.Error())
if errc != nil {
errc <- scrapeErr
}
}
sl.buffers.Put(b)
// A failed scrape is the same as an empty scrape,
// we still call sl.append to trigger stale markers.
total, added, seriesAdded, appErr = sl.append(app, b, contentType, start)
if appErr != nil {
app.Rollback()
app = sl.appender(sl.ctx)
level.Debug(sl.l).Log("msg", "Append failed", "err", appErr)
// The append failed, probably due to a parse error or sample limit.
// Call sl.append again with an empty scrape to trigger stale markers.
if _, _, _, err := sl.append(app, []byte{}, "", start); err != nil {
app.Rollback()
app = sl.appender(sl.ctx)
level.Warn(sl.l).Log("msg", "Append failed", "err", err)
}
}
if scrapeErr == nil {
scrapeErr = appErr
}
if err = sl.report(app, start, time.Since(start), total, added, seriesAdded, scrapeErr); err != nil {
level.Warn(sl.l).Log("msg", "Appending scrape report failed", "err", err)
}
return start
}