mirror of
https://github.com/prometheus/prometheus.git
synced 2024-11-10 15:44:05 -08:00
7a9777b4b5
``TargetPool`` is a pool of targets pending scraping. For now, it uses the ``heap.Interface`` from ``container/heap`` to provide a priority queue for the system to scrape from the next target. It is my supposition that we'll use a model whereby we create a ``TargetPool`` for each scrape interval, into which ``Target`` instances are registered.
39 lines
655 B
Go
39 lines
655 B
Go
package retrieval
|
|
|
|
import (
|
|
"encoding/json"
|
|
"github.com/matttproud/prometheus/model"
|
|
"io/ioutil"
|
|
"net/http"
|
|
"strconv"
|
|
"time"
|
|
)
|
|
|
|
type TargetPool []*Target
|
|
|
|
func (p TargetPool) Len() int {
|
|
return len(p)
|
|
}
|
|
|
|
func (p TargetPool) Less(i, j int) bool {
|
|
return p[i].scheduledFor.Before(p[j].scheduledFor)
|
|
}
|
|
|
|
func (p *TargetPool) Pop() interface{} {
|
|
oldPool := *p
|
|
futureLength := p.Len() - 1
|
|
element := oldPool[futureLength]
|
|
futurePool := oldPool[0:futureLength]
|
|
*p = futurePool
|
|
|
|
return element
|
|
}
|
|
|
|
func (p *TargetPool) Push(element interface{}) {
|
|
*p = append(*p, element.(*Target))
|
|
}
|
|
|
|
func (p TargetPool) Swap(i, j int) {
|
|
p[i], p[j] = p[j], p[i]
|
|
}
|