Scrape: defer report (#7700)

When I started wotking on target_limit, scrapeAndReport did not exist
yet. Then I simply rebased my work without thinking.

It appears that there is a lot that can be inline if I defer() the
report.

Signed-off-by: Julien Pivotto <roidelapluie@inuits.eu>
This commit is contained in:
Julien Pivotto 2020-07-31 19:11:08 +02:00 committed by GitHub
parent 30e079bbd5
commit 7b5507ce4b
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23

View file

@ -1033,11 +1033,13 @@ func (sl *scrapeLoop) scrapeAndReport(interval, timeout time.Duration, last time
} }
b := sl.buffers.Get(sl.lastScrapeSize).([]byte) b := sl.buffers.Get(sl.lastScrapeSize).([]byte)
defer sl.buffers.Put(b)
buf := bytes.NewBuffer(b) buf := bytes.NewBuffer(b)
app := sl.appender(sl.ctx)
var total, added, seriesAdded int var total, added, seriesAdded int
var err, appErr, scrapeErr error var err, appErr, scrapeErr error
app := sl.appender(sl.ctx)
defer func() { defer func() {
if err != nil { if err != nil {
app.Rollback() app.Rollback()
@ -1048,8 +1050,15 @@ func (sl *scrapeLoop) scrapeAndReport(interval, timeout time.Duration, last time
level.Error(sl.l).Log("msg", "Scrape commit failed", "err", err) level.Error(sl.l).Log("msg", "Scrape commit failed", "err", err)
} }
}() }()
defer func() {
if err = sl.report(app, start, time.Since(start), total, added, seriesAdded, scrapeErr); err != nil {
level.Warn(sl.l).Log("msg", "Appending scrape report failed", "err", err)
}
}()
if forcedErr := sl.getForcedError(); forcedErr != nil { if forcedErr := sl.getForcedError(); forcedErr != nil {
appErr = forcedErr scrapeErr = forcedErr
// Add stale markers. // Add stale markers.
if _, _, _, err := sl.append(app, []byte{}, "", start); err != nil { if _, _, _, err := sl.append(app, []byte{}, "", start); err != nil {
app.Rollback() app.Rollback()
@ -1059,53 +1068,50 @@ func (sl *scrapeLoop) scrapeAndReport(interval, timeout time.Duration, last time
if errc != nil { if errc != nil {
errc <- forcedErr errc <- forcedErr
} }
} else {
var contentType string
scrapeCtx, cancel := context.WithTimeout(sl.ctx, timeout)
contentType, scrapeErr = sl.scraper.scrape(scrapeCtx, buf)
cancel()
if scrapeErr == nil { return start
b = buf.Bytes() }
// NOTE: There were issues with misbehaving clients in the past
// that occasionally returned empty results. We don't want those var contentType string
// to falsely reset our buffer size. scrapeCtx, cancel := context.WithTimeout(sl.ctx, timeout)
if len(b) > 0 { contentType, scrapeErr = sl.scraper.scrape(scrapeCtx, buf)
sl.lastScrapeSize = len(b) cancel()
}
} else { if scrapeErr == nil {
level.Debug(sl.l).Log("msg", "Scrape failed", "err", scrapeErr.Error()) b = buf.Bytes()
if errc != nil { // NOTE: There were issues with misbehaving clients in the past
errc <- scrapeErr // that occasionally returned empty results. We don't want those
} // to falsely reset our buffer size.
if len(b) > 0 {
sl.lastScrapeSize = len(b)
} }
} else {
// A failed scrape is the same as an empty scrape, level.Debug(sl.l).Log("msg", "Scrape failed", "err", scrapeErr.Error())
// we still call sl.append to trigger stale markers. if errc != nil {
total, added, seriesAdded, appErr = sl.append(app, b, contentType, start) errc <- scrapeErr
if appErr != nil {
app.Rollback()
app = sl.appender(sl.ctx)
level.Debug(sl.l).Log("msg", "Append failed", "err", appErr)
// The append failed, probably due to a parse error or sample limit.
// Call sl.append again with an empty scrape to trigger stale markers.
if _, _, _, err := sl.append(app, []byte{}, "", start); err != nil {
app.Rollback()
app = sl.appender(sl.ctx)
level.Warn(sl.l).Log("msg", "Append failed", "err", err)
}
} }
} }
sl.buffers.Put(b) // A failed scrape is the same as an empty scrape,
// we still call sl.append to trigger stale markers.
total, added, seriesAdded, appErr = sl.append(app, b, contentType, start)
if appErr != nil {
app.Rollback()
app = sl.appender(sl.ctx)
level.Debug(sl.l).Log("msg", "Append failed", "err", appErr)
// The append failed, probably due to a parse error or sample limit.
// Call sl.append again with an empty scrape to trigger stale markers.
if _, _, _, err := sl.append(app, []byte{}, "", start); err != nil {
app.Rollback()
app = sl.appender(sl.ctx)
level.Warn(sl.l).Log("msg", "Append failed", "err", err)
}
}
if scrapeErr == nil { if scrapeErr == nil {
scrapeErr = appErr scrapeErr = appErr
} }
if err = sl.report(app, start, time.Since(start), total, added, seriesAdded, scrapeErr); err != nil {
level.Warn(sl.l).Log("msg", "Appending scrape report failed", "err", err)
}
return start return start
} }