Scrape: defer report (#7700)

When I started wotking on target_limit, scrapeAndReport did not exist
yet. Then I simply rebased my work without thinking.

It appears that there is a lot that can be inline if I defer() the
report.

Signed-off-by: Julien Pivotto <roidelapluie@inuits.eu>
This commit is contained in:
Julien Pivotto 2020-07-31 19:11:08 +02:00 committed by GitHub
parent 30e079bbd5
commit 7b5507ce4b
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23

View file

@ -1033,11 +1033,13 @@ func (sl *scrapeLoop) scrapeAndReport(interval, timeout time.Duration, last time
} }
b := sl.buffers.Get(sl.lastScrapeSize).([]byte) b := sl.buffers.Get(sl.lastScrapeSize).([]byte)
defer sl.buffers.Put(b)
buf := bytes.NewBuffer(b) buf := bytes.NewBuffer(b)
app := sl.appender(sl.ctx)
var total, added, seriesAdded int var total, added, seriesAdded int
var err, appErr, scrapeErr error var err, appErr, scrapeErr error
app := sl.appender(sl.ctx)
defer func() { defer func() {
if err != nil { if err != nil {
app.Rollback() app.Rollback()
@ -1048,8 +1050,15 @@ func (sl *scrapeLoop) scrapeAndReport(interval, timeout time.Duration, last time
level.Error(sl.l).Log("msg", "Scrape commit failed", "err", err) level.Error(sl.l).Log("msg", "Scrape commit failed", "err", err)
} }
}() }()
defer func() {
if err = sl.report(app, start, time.Since(start), total, added, seriesAdded, scrapeErr); err != nil {
level.Warn(sl.l).Log("msg", "Appending scrape report failed", "err", err)
}
}()
if forcedErr := sl.getForcedError(); forcedErr != nil { if forcedErr := sl.getForcedError(); forcedErr != nil {
appErr = forcedErr scrapeErr = forcedErr
// Add stale markers. // Add stale markers.
if _, _, _, err := sl.append(app, []byte{}, "", start); err != nil { if _, _, _, err := sl.append(app, []byte{}, "", start); err != nil {
app.Rollback() app.Rollback()
@ -1059,7 +1068,10 @@ func (sl *scrapeLoop) scrapeAndReport(interval, timeout time.Duration, last time
if errc != nil { if errc != nil {
errc <- forcedErr errc <- forcedErr
} }
} else {
return start
}
var contentType string var contentType string
scrapeCtx, cancel := context.WithTimeout(sl.ctx, timeout) scrapeCtx, cancel := context.WithTimeout(sl.ctx, timeout)
contentType, scrapeErr = sl.scraper.scrape(scrapeCtx, buf) contentType, scrapeErr = sl.scraper.scrape(scrapeCtx, buf)
@ -1095,17 +1107,11 @@ func (sl *scrapeLoop) scrapeAndReport(interval, timeout time.Duration, last time
level.Warn(sl.l).Log("msg", "Append failed", "err", err) level.Warn(sl.l).Log("msg", "Append failed", "err", err)
} }
} }
}
sl.buffers.Put(b)
if scrapeErr == nil { if scrapeErr == nil {
scrapeErr = appErr scrapeErr = appErr
} }
if err = sl.report(app, start, time.Since(start), total, added, seriesAdded, scrapeErr); err != nil {
level.Warn(sl.l).Log("msg", "Appending scrape report failed", "err", err)
}
return start return start
} }