Consistently use the Seconds() method for conversion of durations

This also fixes one remaining case of recording integral numbers
of seconds only for a metric, i.e. this will probably fix #1796.
This commit is contained in:
beorn7 2016-07-07 15:24:35 +02:00
parent b16f49bb44
commit 064b57858e
6 changed files with 13 additions and 15 deletions

View file

@ -293,7 +293,7 @@ func (n *Notifier) sendAll(alerts ...*model.Alert) int {
n.errors.WithLabelValues(u).Inc()
atomic.AddUint64(&numErrors, 1)
}
n.latency.WithLabelValues(u).Observe(float64(time.Since(begin)) / float64(time.Second))
n.latency.WithLabelValues(u).Observe(time.Since(begin).Seconds())
n.sent.WithLabelValues(u).Add(float64(len(alerts)))
wg.Done()

View file

@ -201,7 +201,7 @@ func (sp *scrapePool) reload(cfg *config.ScrapeConfig) {
wg.Wait()
targetReloadIntervalLength.WithLabelValues(interval.String()).Observe(
float64(time.Since(start)) / float64(time.Second),
time.Since(start).Seconds(),
)
}
@ -256,7 +256,7 @@ func (sp *scrapePool) sync(targets []*Target) {
// be inserting a previous sample set.
wg.Wait()
targetSyncIntervalLength.WithLabelValues(sp.config.JobName).Observe(
float64(time.Since(start)) / float64(time.Second),
time.Since(start).Seconds(),
)
targetScrapePoolSyncsCounter.WithLabelValues(sp.config.JobName).Inc()
}
@ -413,7 +413,7 @@ func (sl *scrapeLoop) run(interval, timeout time.Duration, errc chan<- error) {
// Only record after the first scrape.
if !last.IsZero() {
targetIntervalLength.WithLabelValues(interval.String()).Observe(
float64(time.Since(last)) / float64(time.Second), // Sub-second precision.
time.Since(last).Seconds(),
)
}
@ -493,7 +493,7 @@ func (sl *scrapeLoop) report(start time.Time, duration time.Duration, err error)
model.MetricNameLabel: scrapeDurationMetricName,
},
Timestamp: ts,
Value: model.SampleValue(float64(duration) / float64(time.Second)),
Value: model.SampleValue(duration.Seconds()),
}
if err := sl.reportAppender.Append(healthSample); err != nil {

View file

@ -153,7 +153,7 @@ func (g *Group) run() {
start := time.Now()
g.eval()
iterationDuration.Observe(float64(time.Since(start)) / float64(time.Second))
iterationDuration.Observe(time.Since(start).Seconds())
}
iter()
@ -252,7 +252,7 @@ func (g *Group) eval() {
defer wg.Done()
defer func(t time.Time) {
evalDuration.WithLabelValues(rtyp).Observe(float64(time.Since(t)) / float64(time.Second))
evalDuration.WithLabelValues(rtyp).Observe(time.Since(t).Seconds())
}(time.Now())
evalTotal.WithLabelValues(rtyp).Inc()

View file

@ -559,7 +559,7 @@ func (p *persistence) checkpointSeriesMapAndHeads(fingerprintToSeries *seriesMap
}
err = os.Rename(p.headsTempFileName(), p.headsFileName())
duration := time.Since(begin)
p.checkpointDuration.Set(float64(duration) / float64(time.Second))
p.checkpointDuration.Set(duration.Seconds())
log.Infof("Done checkpointing in-memory metrics and chunks in %v.", duration)
}()
@ -1238,9 +1238,7 @@ func (p *persistence) processIndexingQueue() {
commitBatch := func() {
p.indexingBatchSizes.Observe(float64(batchSize))
defer func(begin time.Time) {
p.indexingBatchDuration.Observe(
float64(time.Since(begin)) / float64(time.Second),
)
p.indexingBatchDuration.Observe(time.Since(begin).Seconds())
}(time.Now())
if err := p.labelPairToFingerprints.IndexBatch(pairToFPs); err != nil {

View file

@ -1119,7 +1119,7 @@ func (s *MemorySeriesStorage) maintainMemorySeries(
) (becameDirty bool) {
defer func(begin time.Time) {
s.maintainSeriesDuration.WithLabelValues(maintainInMemory).Observe(
float64(time.Since(begin)) / float64(time.Second),
time.Since(begin).Seconds(),
)
}(time.Now())
@ -1272,7 +1272,7 @@ func (s *MemorySeriesStorage) writeMemorySeries(
func (s *MemorySeriesStorage) maintainArchivedSeries(fp model.Fingerprint, beforeTime model.Time) {
defer func(begin time.Time) {
s.maintainSeriesDuration.WithLabelValues(maintainArchived).Observe(
float64(time.Since(begin)) / float64(time.Second),
time.Since(begin).Seconds(),
)
}(time.Now())

View file

@ -190,7 +190,7 @@ func (t *StorageQueueManager) sendSamples(s model.Samples) {
// floor.
begin := time.Now()
err := t.tsdb.Store(s)
duration := time.Since(begin) / time.Second
duration := time.Since(begin).Seconds()
labelValue := success
if err != nil {
@ -200,7 +200,7 @@ func (t *StorageQueueManager) sendSamples(s model.Samples) {
t.failedSamples.Add(float64(len(s)))
}
t.samplesCount.WithLabelValues(labelValue).Add(float64(len(s)))
t.sendLatency.Observe(float64(duration))
t.sendLatency.Observe(duration)
}()
}