cleanup for review

This commit is contained in:
Owen Williams 2024-02-06 14:01:48 -05:00
parent 680bcdcadf
commit 18003c14c3
8 changed files with 7 additions and 47 deletions

View file

@ -522,11 +522,6 @@ func main() {
level.Error(logger).Log("msg", fmt.Sprintf("Error loading scrape config files from config (--config.file=%q)", cfg.configFile), "file", absPath, "err", err)
os.Exit(2)
}
for _, c := range cfgs {
level.Info(logger).Log("msg", fmt.Sprintf("utf8????? %v", c.AllowUTF8Names))
}
if cfg.tsdb.EnableExemplarStorage {
if cfgFile.StorageConfig.ExemplarsConfig == nil {
cfgFile.StorageConfig.ExemplarsConfig = &config.DefaultExemplarsConfig

View file

@ -102,7 +102,6 @@ func PushMetrics(url *url.URL, roundTripper http.RoundTripper, headers map[strin
}
func parseAndPushMetrics(client *remote.Client, data []byte, labels map[string]string) bool {
// XXXXX ok we need communication between this call (it should escape)
metricsData, err := fmtutil.MetricTextToWriteRequest(bytes.NewReader(data), labels)
if err != nil {
fmt.Fprintln(os.Stderr, " FAILED:", err)
@ -117,7 +116,6 @@ func parseAndPushMetrics(client *remote.Client, data []byte, labels map[string]s
// Encode the request body into snappy encoding.
compressed := snappy.Encode(nil, raw)
// XXXXXXXXX and this call to store (which sets the content headers)
err = client.Store(context.Background(), compressed, 0)
if err != nil {
fmt.Fprintln(os.Stderr, " FAILED:", err)

View file

@ -446,28 +446,22 @@ func (s ScrapeProtocol) Validate() error {
var (
PrometheusProto ScrapeProtocol = "PrometheusProto"
PrometheusText0_0_4 ScrapeProtocol = "PrometheusText0.0.4"
PrometheusText1_0_0 ScrapeProtocol = "PrometheusText1.0.0"
OpenMetricsText0_0_1 ScrapeProtocol = "OpenMetricsText0.0.1"
OpenMetricsText1_0_0 ScrapeProtocol = "OpenMetricsText1.0.0"
OpenMetricsText2_0_0 ScrapeProtocol = "OpenMetricsText2.0.0"
UTF8NamesHeader string = "validchars=utf8"
UTF8NamesHeader string = model.EscapingKey+"="+model.AllowUTF8
ScrapeProtocolsHeaders = map[ScrapeProtocol]string{
PrometheusProto: "application/vnd.google.protobuf;proto=io.prometheus.client.MetricFamily;encoding=delimited",
PrometheusText0_0_4: "text/plain;version=0.0.4",
PrometheusText1_0_0: "text/plain;version=1_0_0",
OpenMetricsText0_0_1: "application/openmetrics-text;version=0.0.1",
OpenMetricsText1_0_0: "application/openmetrics-text;version=1.0.0",
OpenMetricsText2_0_0: "application/openmetrics-text;version=2.0.0",
}
// DefaultScrapeProtocols is the set of scrape protocols that will be proposed
// to scrape target, ordered by priority.
DefaultScrapeProtocols = []ScrapeProtocol{
OpenMetricsText2_0_0,
OpenMetricsText1_0_0,
OpenMetricsText0_0_1,
PrometheusText1_0_0,
PrometheusText0_0_4,
}
@ -477,7 +471,6 @@ var (
// "native-histograms" and "created-timestamp-zero-ingestion".
DefaultProtoFirstScrapeProtocols = []ScrapeProtocol{
PrometheusProto,
OpenMetricsText2_0_0,
OpenMetricsText1_0_0,
OpenMetricsText0_0_1,
PrometheusText0_0_4,

View file

@ -85,10 +85,6 @@ func New(b []byte, contentType string, parseClassicHistograms bool) (Parser, err
return NewPromParser(b), nil
}
// XXXX looks like this could be a place to decide if UTF8 is ok?? or is this
// all about we need a global option ---- yeah I think prometheus is either
// utf8 on and some preferred escaping, or its utf8 off. not per scrape target.
// In wihch case, nothing needs to change here.
mediaType, _, err := mime.ParseMediaType(contentType)
if err != nil {
return NewPromParser(b), err

View file

@ -160,26 +160,6 @@ func (m *Manager) reloader() {
func (m *Manager) reload() {
m.mtxScrape.Lock()
defer m.mtxScrape.Unlock()
// var err error
// if m.opts.UTF8Names {
// model.NameValidationScheme = model.UTF8Validation
// } else {
// model.NameValidationScheme = model.LegacyValidation
// }
// level.Info(m.logger).Log("msg", "validation scheme", "scheme", model.NameValidationScheme, "arg", m.opts.UTF8Names)
// XXXXX the problem with this is that agent does not really use scrape.Options. Also too, this is like per-scrape not per-instance, so it's not really the right place for this at all.
// if m.opts.NameEscapingScheme != "" {
// model.NameEscapingScheme, err = model.ToEscapingScheme(m.opts.NameEscapingScheme)
// if err != nil {
// level.Error(m.logger).Log("msg", "error setting escaping scheme", "err", err)
// return
// }
// } else {
// model.NameEscapingScheme = DefaultNameEscapingScheme
// }
level.Info(m.logger).Log("msg", "ESCAPING SCHEME", "scheme", model.NameEscapingScheme.String())
var wg sync.WaitGroup
for setName, groups := range m.targetSets {
if _, ok := m.scrapePools[setName]; !ok {
@ -206,6 +186,7 @@ func (m *Manager) reload() {
}(m.scrapePools[setName], groups)
}
m.mtxScrape.Unlock()
wg.Wait()
}

View file

@ -693,9 +693,7 @@ func acceptHeader(sps []config.ScrapeProtocol, allowUTF8Names bool) string {
}
// Default match anything.
vals = append(vals, fmt.Sprintf("*/*;q=0.%d", weight))
ret := strings.Join(vals, ",")
fmt.Println("~~~~~~~~~~~~~~~~~~~accept header", ret)
return ret
return strings.Join(vals, ",")
}
func acceptEncodingHeader(enableCompression bool) string {

View file

@ -2361,7 +2361,7 @@ func TestTargetScraperScrapeOK(t *testing.T) {
t.Errorf("Expected Accept header to prefer application/vnd.google.protobuf, got %q", accept)
}
}
if strings.Contains(accept, "validchars=utf8") {
if strings.Contains(accept, "escaping=allow-utf-8") {
t.Errorf("Expected Accept header not to allow utf8, got %q", accept)
}
@ -2417,7 +2417,7 @@ func TestUTF8TargetScraperScrapeOK(t *testing.T) {
server := httptest.NewServer(
http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
accept := r.Header.Get("Accept")
if !strings.Contains(accept, "validchars=utf8") {
if !strings.Contains(accept, "escaping=allow-utf-8") {
t.Errorf("Expected Accept header to allow utf8, got %q", accept)
}
@ -2426,7 +2426,7 @@ func TestUTF8TargetScraperScrapeOK(t *testing.T) {
t.Errorf("Expected scrape timeout header %q, got %q", expectedTimeout, timeout)
}
w.Header().Set("Content-Type", `text/plain; version=1.0.0; validchars=utf8`)
w.Header().Set("Content-Type", `text/plain; version=1.0.0; escaping=allow-utf-8`)
w.Write([]byte(`{"metric.a"} 1
{"metric.b"} 2
`))
@ -2457,7 +2457,7 @@ func TestUTF8TargetScraperScrapeOK(t *testing.T) {
require.NoError(t, err)
contentType, err := ts.readResponse(context.Background(), resp, &buf)
require.NoError(t, err)
require.Equal(t, "text/plain; version=1.0.0; validchars=utf8", contentType)
require.Equal(t, "text/plain; version=1.0.0; escaping=allow-utf-8", contentType)
require.Equal(t, `{"metric.a"} 1
{"metric.b"} 2
`, buf.String())

View file

@ -274,7 +274,6 @@ func (c *Client) Read(ctx context.Context, query *prompb.Query) (*prompb.QueryRe
c.readQueries.Inc()
defer c.readQueries.Dec()
// XXXXXX based on content negotiation we may need to escape the query. (If the other side is old)
req := &prompb.ReadRequest{
// TODO: Support batching multiple queries into one read request,
// as the protobuf interface allows for it.