mirror of
https://github.com/prometheus/prometheus.git
synced 2025-02-02 08:31:11 -08:00
Add targetScraper tests
This commit is contained in:
parent
1ede7b9d72
commit
50c2f20756
|
@ -15,7 +15,11 @@ package retrieval
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"fmt"
|
"fmt"
|
||||||
|
"net/http"
|
||||||
|
"net/http/httptest"
|
||||||
|
"net/url"
|
||||||
"reflect"
|
"reflect"
|
||||||
|
"strings"
|
||||||
"sync"
|
"sync"
|
||||||
"testing"
|
"testing"
|
||||||
"time"
|
"time"
|
||||||
|
@ -423,6 +427,134 @@ func TestScrapeLoopRun(t *testing.T) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func TestTargetScraperScrapeOK(t *testing.T) {
|
||||||
|
server := httptest.NewServer(
|
||||||
|
http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||||
|
w.Header().Set("Content-Type", `text/plain; version=0.0.4`)
|
||||||
|
w.Write([]byte("metric_a 1\nmetric_b 2\n"))
|
||||||
|
}),
|
||||||
|
)
|
||||||
|
defer server.Close()
|
||||||
|
|
||||||
|
serverURL, err := url.Parse(server.URL)
|
||||||
|
if err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
|
||||||
|
ts := &targetScraper{
|
||||||
|
Target: &Target{
|
||||||
|
labels: model.LabelSet{
|
||||||
|
model.SchemeLabel: model.LabelValue(serverURL.Scheme),
|
||||||
|
model.AddressLabel: model.LabelValue(serverURL.Host),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
client: http.DefaultClient,
|
||||||
|
}
|
||||||
|
now := time.Now()
|
||||||
|
|
||||||
|
samples, err := ts.scrape(context.Background(), now)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("Unexpected scrape error: %s", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
expectedSamples := model.Samples{
|
||||||
|
{
|
||||||
|
Metric: model.Metric{"__name__": "metric_a"},
|
||||||
|
Timestamp: model.TimeFromUnixNano(now.UnixNano()),
|
||||||
|
Value: 1,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Metric: model.Metric{"__name__": "metric_b"},
|
||||||
|
Timestamp: model.TimeFromUnixNano(now.UnixNano()),
|
||||||
|
Value: 2,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
if !reflect.DeepEqual(samples, expectedSamples) {
|
||||||
|
t.Errorf("Scraped samples did not match served metrics")
|
||||||
|
t.Errorf("Expected: %v", expectedSamples)
|
||||||
|
t.Fatalf("Got: %v", samples)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestTargetScrapeScrapeCancel(t *testing.T) {
|
||||||
|
block := make(chan struct{})
|
||||||
|
|
||||||
|
server := httptest.NewServer(
|
||||||
|
http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||||
|
<-block
|
||||||
|
}),
|
||||||
|
)
|
||||||
|
defer server.Close()
|
||||||
|
|
||||||
|
serverURL, err := url.Parse(server.URL)
|
||||||
|
if err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
|
||||||
|
ts := &targetScraper{
|
||||||
|
Target: &Target{
|
||||||
|
labels: model.LabelSet{
|
||||||
|
model.SchemeLabel: model.LabelValue(serverURL.Scheme),
|
||||||
|
model.AddressLabel: model.LabelValue(serverURL.Host),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
client: http.DefaultClient,
|
||||||
|
}
|
||||||
|
ctx, cancel := context.WithCancel(context.Background())
|
||||||
|
|
||||||
|
done := make(chan struct{})
|
||||||
|
|
||||||
|
go func() {
|
||||||
|
time.Sleep(1 * time.Second)
|
||||||
|
cancel()
|
||||||
|
}()
|
||||||
|
|
||||||
|
go func() {
|
||||||
|
if _, err := ts.scrape(ctx, time.Now()); err != context.Canceled {
|
||||||
|
t.Fatalf("Expected context cancelation error but got: %s", err)
|
||||||
|
}
|
||||||
|
close(done)
|
||||||
|
}()
|
||||||
|
|
||||||
|
select {
|
||||||
|
case <-time.After(5 * time.Second):
|
||||||
|
t.Fatalf("Scrape function did not return unexpectedly")
|
||||||
|
case <-done:
|
||||||
|
}
|
||||||
|
// If this is closed in a defer above the function the test server
|
||||||
|
// does not terminate and the test doens't complete.
|
||||||
|
close(block)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestTargetScrapeScrapeNotFound(t *testing.T) {
|
||||||
|
server := httptest.NewServer(
|
||||||
|
http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||||
|
w.WriteHeader(http.StatusNotFound)
|
||||||
|
}),
|
||||||
|
)
|
||||||
|
defer server.Close()
|
||||||
|
|
||||||
|
serverURL, err := url.Parse(server.URL)
|
||||||
|
if err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
|
||||||
|
ts := &targetScraper{
|
||||||
|
Target: &Target{
|
||||||
|
labels: model.LabelSet{
|
||||||
|
model.SchemeLabel: model.LabelValue(serverURL.Scheme),
|
||||||
|
model.AddressLabel: model.LabelValue(serverURL.Host),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
client: http.DefaultClient,
|
||||||
|
}
|
||||||
|
|
||||||
|
if _, err := ts.scrape(context.Background(), time.Now()); !strings.Contains(err.Error(), "404") {
|
||||||
|
t.Fatalf("Expected \"404 NotFound\" error but got: %s", err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// testScraper implements the scraper interface and allows setting values
|
// testScraper implements the scraper interface and allows setting values
|
||||||
// returned by its methods. It also allows setting a custom scrape function.
|
// returned by its methods. It also allows setting a custom scrape function.
|
||||||
type testScraper struct {
|
type testScraper struct {
|
||||||
|
|
Loading…
Reference in a new issue