mirror of
https://github.com/prometheus/prometheus.git
synced 2025-01-30 23:20:51 -08:00
Merge pull request #4700 from prometheus/openmetrics
Openmetrics Parser
This commit is contained in:
commit
105ed5c112
4
Makefile
4
Makefile
|
@ -17,8 +17,8 @@ STATICCHECK_IGNORE = \
|
|||
github.com/prometheus/prometheus/discovery/kubernetes/kubernetes.go:SA1019 \
|
||||
github.com/prometheus/prometheus/discovery/kubernetes/node.go:SA1019 \
|
||||
github.com/prometheus/prometheus/documentation/examples/remote_storage/remote_storage_adapter/main.go:SA1019 \
|
||||
github.com/prometheus/prometheus/pkg/textparse/lex.l.go:SA4006 \
|
||||
github.com/prometheus/prometheus/pkg/textparse/lex.l.go:SA4006 \
|
||||
github.com/prometheus/prometheus/pkg/textparse/promlex.l.go:SA4006 \
|
||||
github.com/prometheus/prometheus/pkg/textparse/openmetricslex.l.go:SA4006 \
|
||||
github.com/prometheus/prometheus/pkg/pool/pool.go:SA6002 \
|
||||
github.com/prometheus/prometheus/promql/engine.go:SA6002 \
|
||||
github.com/prometheus/prometheus/prompb/rpc.pb.gw.go:SA1019
|
||||
|
|
|
@ -499,7 +499,8 @@ curl -G http://localhost:9091/api/v1/targets/metadata \
|
|||
"job": "prometheus"
|
||||
},
|
||||
"type": "gauge",
|
||||
"help": "Number of goroutines that currently exist."
|
||||
"help": "Number of goroutines that currently exist.",
|
||||
"unit": ""
|
||||
},
|
||||
{
|
||||
"target": {
|
||||
|
@ -507,7 +508,8 @@ curl -G http://localhost:9091/api/v1/targets/metadata \
|
|||
"job": "prometheus"
|
||||
},
|
||||
"type": "gauge",
|
||||
"help": "Number of goroutines that currently exist."
|
||||
"help": "Number of goroutines that currently exist.",
|
||||
"unit": ""
|
||||
}
|
||||
]
|
||||
}
|
||||
|
@ -530,7 +532,8 @@ curl -G http://localhost:9091/api/v1/targets/metadata \
|
|||
},
|
||||
"metric": "prometheus_treecache_zookeeper_failures_total",
|
||||
"type": "counter",
|
||||
"help": "The total number of ZooKeeper failures."
|
||||
"help": "The total number of ZooKeeper failures.",
|
||||
"unit": ""
|
||||
},
|
||||
{
|
||||
"target": {
|
||||
|
@ -539,7 +542,8 @@ curl -G http://localhost:9091/api/v1/targets/metadata \
|
|||
},
|
||||
"metric": "prometheus_tsdb_reloads_total",
|
||||
"type": "counter",
|
||||
"help": "Number of times the database reloaded block data from disk."
|
||||
"help": "Number of times the database reloaded block data from disk.",
|
||||
"unit": ""
|
||||
},
|
||||
// ...
|
||||
]
|
||||
|
|
91
pkg/textparse/interface.go
Normal file
91
pkg/textparse/interface.go
Normal file
|
@ -0,0 +1,91 @@
|
|||
// Copyright 2018 The Prometheus Authors
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package textparse
|
||||
|
||||
import (
|
||||
"mime"
|
||||
|
||||
"github.com/prometheus/prometheus/pkg/labels"
|
||||
)
|
||||
|
||||
// Parser parses samples from a byte slice of samples in the official
|
||||
// Prometheus and OpenMetrics text exposition formats.
|
||||
type Parser interface {
|
||||
// Series returns the bytes of the series, the timestamp if set, and the value
|
||||
// of the current sample.
|
||||
Series() ([]byte, *int64, float64)
|
||||
|
||||
// Help returns the metric name and help text in the current entry.
|
||||
// Must only be called after Next returned a help entry.
|
||||
// The returned byte slices become invalid after the next call to Next.
|
||||
Help() ([]byte, []byte)
|
||||
|
||||
// Type returns the metric name and type in the current entry.
|
||||
// Must only be called after Next returned a type entry.
|
||||
// The returned byte slices become invalid after the next call to Next.
|
||||
Type() ([]byte, MetricType)
|
||||
|
||||
// Unit returns the metric name and unit in the current entry.
|
||||
// Must only be called after Next returned a unit entry.
|
||||
// The returned byte slices become invalid after the next call to Next.
|
||||
Unit() ([]byte, []byte)
|
||||
|
||||
// Comment returns the text of the current comment.
|
||||
// Must only be called after Next returned a comment entry.
|
||||
// The returned byte slice becomes invalid after the next call to Next.
|
||||
Comment() []byte
|
||||
|
||||
// Metric writes the labels of the current sample into the passed labels.
|
||||
// It returns the string from which the metric was parsed.
|
||||
Metric(l *labels.Labels) string
|
||||
|
||||
// Next advances the parser to the next sample. It returns false if no
|
||||
// more samples were read or an error occurred.
|
||||
Next() (Entry, error)
|
||||
}
|
||||
|
||||
// New returns a new parser of the byte slice.
|
||||
func New(b []byte, contentType string) Parser {
|
||||
mediaType, _, err := mime.ParseMediaType(contentType)
|
||||
if err == nil && mediaType == "application/openmetrics-text" {
|
||||
return NewOpenMetricsParser(b)
|
||||
}
|
||||
return NewPromParser(b)
|
||||
}
|
||||
|
||||
// Entry represents the type of a parsed entry.
|
||||
type Entry int
|
||||
|
||||
const (
|
||||
EntryInvalid Entry = -1
|
||||
EntryType Entry = 0
|
||||
EntryHelp Entry = 1
|
||||
EntrySeries Entry = 2
|
||||
EntryComment Entry = 3
|
||||
EntryUnit Entry = 4
|
||||
)
|
||||
|
||||
// MetricType represents metric type values.
|
||||
type MetricType string
|
||||
|
||||
const (
|
||||
MetricTypeCounter = "counter"
|
||||
MetricTypeGauge = "gauge"
|
||||
MetricTypeHistogram = "histogram"
|
||||
MetricTypeGaugeHistogram = "gaugehistogram"
|
||||
MetricTypeSummary = "summary"
|
||||
MetricTypeInfo = "info"
|
||||
MetricTypeStateset = "stateset"
|
||||
MetricTypeUnknown = "unknown"
|
||||
)
|
71
pkg/textparse/openmetricslex.l
Normal file
71
pkg/textparse/openmetricslex.l
Normal file
|
@ -0,0 +1,71 @@
|
|||
%{
|
||||
// Copyright 2018 The Prometheus Authors
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package textparse
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
)
|
||||
|
||||
// Lex is called by the parser generated by "go tool yacc" to obtain each
|
||||
// token. The method is opened before the matching rules block and closed at
|
||||
// the end of the file.
|
||||
func (l *openMetricsLexer) Lex() token {
|
||||
if l.i >= len(l.b) {
|
||||
return tEOF
|
||||
}
|
||||
c := l.b[l.i]
|
||||
l.start = l.i
|
||||
|
||||
%}
|
||||
|
||||
D [0-9]
|
||||
L [a-zA-Z_]
|
||||
M [a-zA-Z_:]
|
||||
C [^\n]
|
||||
S [ ]
|
||||
|
||||
%x sComment sMeta1 sMeta2 sLabels sLValue sValue sTimestamp
|
||||
|
||||
%yyc c
|
||||
%yyn c = l.next()
|
||||
%yyt l.state
|
||||
|
||||
|
||||
%%
|
||||
|
||||
#{S} l.state = sComment
|
||||
<sComment>HELP{S} l.state = sMeta1; return tHelp
|
||||
<sComment>TYPE{S} l.state = sMeta1; return tType
|
||||
<sComment>UNIT{S} l.state = sMeta1; return tUnit
|
||||
<sComment>"EOF"\n? l.state = sInit; return tEofWord
|
||||
<sMeta1>{M}({M}|{D})* l.state = sMeta2; return tMName
|
||||
<sMeta2>{S}{C}*\n l.state = sInit; return tText
|
||||
|
||||
{M}({M}|{D})* l.state = sValue; return tMName
|
||||
<sValue>\{ l.state = sLabels; return tBraceOpen
|
||||
<sLabels>{L}({L}|{D})* return tLName
|
||||
<sLabels>\} l.state = sValue; return tBraceClose
|
||||
<sLabels>= l.state = sLValue; return tEqual
|
||||
<sLabels>, return tComma
|
||||
<sLValue>\"(\\.|[^\\"\n])*\" l.state = sLabels; return tLValue
|
||||
<sValue>{S}[^ \n]+ l.state = sTimestamp; return tValue
|
||||
<sTimestamp>{S}[^ \n]+ return tTimestamp
|
||||
<sTimestamp>{S}#{S}{C}*\n l.state = sInit; return tLinebreak
|
||||
<sTimestamp>\n l.state = sInit; return tLinebreak
|
||||
|
||||
%%
|
||||
|
||||
return tInvalid
|
||||
}
|
586
pkg/textparse/openmetricslex.l.go
Normal file
586
pkg/textparse/openmetricslex.l.go
Normal file
|
@ -0,0 +1,586 @@
|
|||
// CAUTION: Generated file - DO NOT EDIT.
|
||||
|
||||
// Copyright 2018 The Prometheus Authors
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package textparse
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
)
|
||||
|
||||
// Lex is called by the parser generated by "go tool yacc" to obtain each
|
||||
// token. The method is opened before the matching rules block and closed at
|
||||
// the end of the file.
|
||||
func (l *openMetricsLexer) Lex() token {
|
||||
if l.i >= len(l.b) {
|
||||
return tEOF
|
||||
}
|
||||
c := l.b[l.i]
|
||||
l.start = l.i
|
||||
|
||||
yystate0:
|
||||
|
||||
switch yyt := l.state; yyt {
|
||||
default:
|
||||
panic(fmt.Errorf(`invalid start condition %d`, yyt))
|
||||
case 0: // start condition: INITIAL
|
||||
goto yystart1
|
||||
case 1: // start condition: sComment
|
||||
goto yystart5
|
||||
case 2: // start condition: sMeta1
|
||||
goto yystart25
|
||||
case 3: // start condition: sMeta2
|
||||
goto yystart27
|
||||
case 4: // start condition: sLabels
|
||||
goto yystart30
|
||||
case 5: // start condition: sLValue
|
||||
goto yystart35
|
||||
case 6: // start condition: sValue
|
||||
goto yystart39
|
||||
case 7: // start condition: sTimestamp
|
||||
goto yystart43
|
||||
}
|
||||
|
||||
goto yystate0 // silence unused label error
|
||||
goto yystate1 // silence unused label error
|
||||
yystate1:
|
||||
c = l.next()
|
||||
yystart1:
|
||||
switch {
|
||||
default:
|
||||
goto yyabort
|
||||
case c == '#':
|
||||
goto yystate2
|
||||
case c == ':' || c >= 'A' && c <= 'Z' || c == '_' || c >= 'a' && c <= 'z':
|
||||
goto yystate4
|
||||
}
|
||||
|
||||
yystate2:
|
||||
c = l.next()
|
||||
switch {
|
||||
default:
|
||||
goto yyabort
|
||||
case c == ' ':
|
||||
goto yystate3
|
||||
}
|
||||
|
||||
yystate3:
|
||||
c = l.next()
|
||||
goto yyrule1
|
||||
|
||||
yystate4:
|
||||
c = l.next()
|
||||
switch {
|
||||
default:
|
||||
goto yyrule8
|
||||
case c >= '0' && c <= ':' || c >= 'A' && c <= 'Z' || c == '_' || c >= 'a' && c <= 'z':
|
||||
goto yystate4
|
||||
}
|
||||
|
||||
goto yystate5 // silence unused label error
|
||||
yystate5:
|
||||
c = l.next()
|
||||
yystart5:
|
||||
switch {
|
||||
default:
|
||||
goto yyabort
|
||||
case c == 'E':
|
||||
goto yystate6
|
||||
case c == 'H':
|
||||
goto yystate10
|
||||
case c == 'T':
|
||||
goto yystate15
|
||||
case c == 'U':
|
||||
goto yystate20
|
||||
}
|
||||
|
||||
yystate6:
|
||||
c = l.next()
|
||||
switch {
|
||||
default:
|
||||
goto yyabort
|
||||
case c == 'O':
|
||||
goto yystate7
|
||||
}
|
||||
|
||||
yystate7:
|
||||
c = l.next()
|
||||
switch {
|
||||
default:
|
||||
goto yyabort
|
||||
case c == 'F':
|
||||
goto yystate8
|
||||
}
|
||||
|
||||
yystate8:
|
||||
c = l.next()
|
||||
switch {
|
||||
default:
|
||||
goto yyrule5
|
||||
case c == '\n':
|
||||
goto yystate9
|
||||
}
|
||||
|
||||
yystate9:
|
||||
c = l.next()
|
||||
goto yyrule5
|
||||
|
||||
yystate10:
|
||||
c = l.next()
|
||||
switch {
|
||||
default:
|
||||
goto yyabort
|
||||
case c == 'E':
|
||||
goto yystate11
|
||||
}
|
||||
|
||||
yystate11:
|
||||
c = l.next()
|
||||
switch {
|
||||
default:
|
||||
goto yyabort
|
||||
case c == 'L':
|
||||
goto yystate12
|
||||
}
|
||||
|
||||
yystate12:
|
||||
c = l.next()
|
||||
switch {
|
||||
default:
|
||||
goto yyabort
|
||||
case c == 'P':
|
||||
goto yystate13
|
||||
}
|
||||
|
||||
yystate13:
|
||||
c = l.next()
|
||||
switch {
|
||||
default:
|
||||
goto yyabort
|
||||
case c == ' ':
|
||||
goto yystate14
|
||||
}
|
||||
|
||||
yystate14:
|
||||
c = l.next()
|
||||
goto yyrule2
|
||||
|
||||
yystate15:
|
||||
c = l.next()
|
||||
switch {
|
||||
default:
|
||||
goto yyabort
|
||||
case c == 'Y':
|
||||
goto yystate16
|
||||
}
|
||||
|
||||
yystate16:
|
||||
c = l.next()
|
||||
switch {
|
||||
default:
|
||||
goto yyabort
|
||||
case c == 'P':
|
||||
goto yystate17
|
||||
}
|
||||
|
||||
yystate17:
|
||||
c = l.next()
|
||||
switch {
|
||||
default:
|
||||
goto yyabort
|
||||
case c == 'E':
|
||||
goto yystate18
|
||||
}
|
||||
|
||||
yystate18:
|
||||
c = l.next()
|
||||
switch {
|
||||
default:
|
||||
goto yyabort
|
||||
case c == ' ':
|
||||
goto yystate19
|
||||
}
|
||||
|
||||
yystate19:
|
||||
c = l.next()
|
||||
goto yyrule3
|
||||
|
||||
yystate20:
|
||||
c = l.next()
|
||||
switch {
|
||||
default:
|
||||
goto yyabort
|
||||
case c == 'N':
|
||||
goto yystate21
|
||||
}
|
||||
|
||||
yystate21:
|
||||
c = l.next()
|
||||
switch {
|
||||
default:
|
||||
goto yyabort
|
||||
case c == 'I':
|
||||
goto yystate22
|
||||
}
|
||||
|
||||
yystate22:
|
||||
c = l.next()
|
||||
switch {
|
||||
default:
|
||||
goto yyabort
|
||||
case c == 'T':
|
||||
goto yystate23
|
||||
}
|
||||
|
||||
yystate23:
|
||||
c = l.next()
|
||||
switch {
|
||||
default:
|
||||
goto yyabort
|
||||
case c == ' ':
|
||||
goto yystate24
|
||||
}
|
||||
|
||||
yystate24:
|
||||
c = l.next()
|
||||
goto yyrule4
|
||||
|
||||
goto yystate25 // silence unused label error
|
||||
yystate25:
|
||||
c = l.next()
|
||||
yystart25:
|
||||
switch {
|
||||
default:
|
||||
goto yyabort
|
||||
case c == ':' || c >= 'A' && c <= 'Z' || c == '_' || c >= 'a' && c <= 'z':
|
||||
goto yystate26
|
||||
}
|
||||
|
||||
yystate26:
|
||||
c = l.next()
|
||||
switch {
|
||||
default:
|
||||
goto yyrule6
|
||||
case c >= '0' && c <= ':' || c >= 'A' && c <= 'Z' || c == '_' || c >= 'a' && c <= 'z':
|
||||
goto yystate26
|
||||
}
|
||||
|
||||
goto yystate27 // silence unused label error
|
||||
yystate27:
|
||||
c = l.next()
|
||||
yystart27:
|
||||
switch {
|
||||
default:
|
||||
goto yyabort
|
||||
case c == ' ':
|
||||
goto yystate28
|
||||
}
|
||||
|
||||
yystate28:
|
||||
c = l.next()
|
||||
switch {
|
||||
default:
|
||||
goto yyabort
|
||||
case c == '\n':
|
||||
goto yystate29
|
||||
case c >= '\x01' && c <= '\t' || c >= '\v' && c <= 'ÿ':
|
||||
goto yystate28
|
||||
}
|
||||
|
||||
yystate29:
|
||||
c = l.next()
|
||||
goto yyrule7
|
||||
|
||||
goto yystate30 // silence unused label error
|
||||
yystate30:
|
||||
c = l.next()
|
||||
yystart30:
|
||||
switch {
|
||||
default:
|
||||
goto yyabort
|
||||
case c == ',':
|
||||
goto yystate31
|
||||
case c == '=':
|
||||
goto yystate32
|
||||
case c == '}':
|
||||
goto yystate34
|
||||
case c >= 'A' && c <= 'Z' || c == '_' || c >= 'a' && c <= 'z':
|
||||
goto yystate33
|
||||
}
|
||||
|
||||
yystate31:
|
||||
c = l.next()
|
||||
goto yyrule13
|
||||
|
||||
yystate32:
|
||||
c = l.next()
|
||||
goto yyrule12
|
||||
|
||||
yystate33:
|
||||
c = l.next()
|
||||
switch {
|
||||
default:
|
||||
goto yyrule10
|
||||
case c >= '0' && c <= '9' || c >= 'A' && c <= 'Z' || c == '_' || c >= 'a' && c <= 'z':
|
||||
goto yystate33
|
||||
}
|
||||
|
||||
yystate34:
|
||||
c = l.next()
|
||||
goto yyrule11
|
||||
|
||||
goto yystate35 // silence unused label error
|
||||
yystate35:
|
||||
c = l.next()
|
||||
yystart35:
|
||||
switch {
|
||||
default:
|
||||
goto yyabort
|
||||
case c == '"':
|
||||
goto yystate36
|
||||
}
|
||||
|
||||
yystate36:
|
||||
c = l.next()
|
||||
switch {
|
||||
default:
|
||||
goto yyabort
|
||||
case c == '"':
|
||||
goto yystate37
|
||||
case c == '\\':
|
||||
goto yystate38
|
||||
case c >= '\x01' && c <= '\t' || c >= '\v' && c <= '!' || c >= '#' && c <= '[' || c >= ']' && c <= 'ÿ':
|
||||
goto yystate36
|
||||
}
|
||||
|
||||
yystate37:
|
||||
c = l.next()
|
||||
goto yyrule14
|
||||
|
||||
yystate38:
|
||||
c = l.next()
|
||||
switch {
|
||||
default:
|
||||
goto yyabort
|
||||
case c >= '\x01' && c <= '\t' || c >= '\v' && c <= 'ÿ':
|
||||
goto yystate36
|
||||
}
|
||||
|
||||
goto yystate39 // silence unused label error
|
||||
yystate39:
|
||||
c = l.next()
|
||||
yystart39:
|
||||
switch {
|
||||
default:
|
||||
goto yyabort
|
||||
case c == ' ':
|
||||
goto yystate40
|
||||
case c == '{':
|
||||
goto yystate42
|
||||
}
|
||||
|
||||
yystate40:
|
||||
c = l.next()
|
||||
switch {
|
||||
default:
|
||||
goto yyabort
|
||||
case c >= '\x01' && c <= '\t' || c >= '\v' && c <= '\x1f' || c >= '!' && c <= 'ÿ':
|
||||
goto yystate41
|
||||
}
|
||||
|
||||
yystate41:
|
||||
c = l.next()
|
||||
switch {
|
||||
default:
|
||||
goto yyrule15
|
||||
case c >= '\x01' && c <= '\t' || c >= '\v' && c <= '\x1f' || c >= '!' && c <= 'ÿ':
|
||||
goto yystate41
|
||||
}
|
||||
|
||||
yystate42:
|
||||
c = l.next()
|
||||
goto yyrule9
|
||||
|
||||
goto yystate43 // silence unused label error
|
||||
yystate43:
|
||||
c = l.next()
|
||||
yystart43:
|
||||
switch {
|
||||
default:
|
||||
goto yyabort
|
||||
case c == ' ':
|
||||
goto yystate45
|
||||
case c == '\n':
|
||||
goto yystate44
|
||||
}
|
||||
|
||||
yystate44:
|
||||
c = l.next()
|
||||
goto yyrule18
|
||||
|
||||
yystate45:
|
||||
c = l.next()
|
||||
switch {
|
||||
default:
|
||||
goto yyabort
|
||||
case c == '#':
|
||||
goto yystate47
|
||||
case c >= '\x01' && c <= '\t' || c >= '\v' && c <= '\x1f' || c == '!' || c == '"' || c >= '$' && c <= 'ÿ':
|
||||
goto yystate46
|
||||
}
|
||||
|
||||
yystate46:
|
||||
c = l.next()
|
||||
switch {
|
||||
default:
|
||||
goto yyrule16
|
||||
case c >= '\x01' && c <= '\t' || c >= '\v' && c <= '\x1f' || c >= '!' && c <= 'ÿ':
|
||||
goto yystate46
|
||||
}
|
||||
|
||||
yystate47:
|
||||
c = l.next()
|
||||
switch {
|
||||
default:
|
||||
goto yyrule16
|
||||
case c == ' ':
|
||||
goto yystate48
|
||||
case c >= '\x01' && c <= '\t' || c >= '\v' && c <= '\x1f' || c >= '!' && c <= 'ÿ':
|
||||
goto yystate46
|
||||
}
|
||||
|
||||
yystate48:
|
||||
c = l.next()
|
||||
switch {
|
||||
default:
|
||||
goto yyabort
|
||||
case c == '\n':
|
||||
goto yystate49
|
||||
case c >= '\x01' && c <= '\t' || c >= '\v' && c <= 'ÿ':
|
||||
goto yystate48
|
||||
}
|
||||
|
||||
yystate49:
|
||||
c = l.next()
|
||||
goto yyrule17
|
||||
|
||||
yyrule1: // #{S}
|
||||
{
|
||||
l.state = sComment
|
||||
goto yystate0
|
||||
}
|
||||
yyrule2: // HELP{S}
|
||||
{
|
||||
l.state = sMeta1
|
||||
return tHelp
|
||||
goto yystate0
|
||||
}
|
||||
yyrule3: // TYPE{S}
|
||||
{
|
||||
l.state = sMeta1
|
||||
return tType
|
||||
goto yystate0
|
||||
}
|
||||
yyrule4: // UNIT{S}
|
||||
{
|
||||
l.state = sMeta1
|
||||
return tUnit
|
||||
goto yystate0
|
||||
}
|
||||
yyrule5: // "EOF"\n?
|
||||
{
|
||||
l.state = sInit
|
||||
return tEofWord
|
||||
goto yystate0
|
||||
}
|
||||
yyrule6: // {M}({M}|{D})*
|
||||
{
|
||||
l.state = sMeta2
|
||||
return tMName
|
||||
goto yystate0
|
||||
}
|
||||
yyrule7: // {S}{C}*\n
|
||||
{
|
||||
l.state = sInit
|
||||
return tText
|
||||
goto yystate0
|
||||
}
|
||||
yyrule8: // {M}({M}|{D})*
|
||||
{
|
||||
l.state = sValue
|
||||
return tMName
|
||||
goto yystate0
|
||||
}
|
||||
yyrule9: // \{
|
||||
{
|
||||
l.state = sLabels
|
||||
return tBraceOpen
|
||||
goto yystate0
|
||||
}
|
||||
yyrule10: // {L}({L}|{D})*
|
||||
{
|
||||
return tLName
|
||||
}
|
||||
yyrule11: // \}
|
||||
{
|
||||
l.state = sValue
|
||||
return tBraceClose
|
||||
goto yystate0
|
||||
}
|
||||
yyrule12: // =
|
||||
{
|
||||
l.state = sLValue
|
||||
return tEqual
|
||||
goto yystate0
|
||||
}
|
||||
yyrule13: // ,
|
||||
{
|
||||
return tComma
|
||||
}
|
||||
yyrule14: // \"(\\.|[^\\"\n])*\"
|
||||
{
|
||||
l.state = sLabels
|
||||
return tLValue
|
||||
goto yystate0
|
||||
}
|
||||
yyrule15: // {S}[^ \n]+
|
||||
{
|
||||
l.state = sTimestamp
|
||||
return tValue
|
||||
goto yystate0
|
||||
}
|
||||
yyrule16: // {S}[^ \n]+
|
||||
{
|
||||
return tTimestamp
|
||||
}
|
||||
yyrule17: // {S}#{S}{C}*\n
|
||||
{
|
||||
l.state = sInit
|
||||
return tLinebreak
|
||||
goto yystate0
|
||||
}
|
||||
yyrule18: // \n
|
||||
{
|
||||
l.state = sInit
|
||||
return tLinebreak
|
||||
goto yystate0
|
||||
}
|
||||
panic("unreachable")
|
||||
|
||||
goto yyabort // silence unused label error
|
||||
|
||||
yyabort: // no lexem recognized
|
||||
|
||||
return tInvalid
|
||||
}
|
347
pkg/textparse/openmetricsparse.go
Normal file
347
pkg/textparse/openmetricsparse.go
Normal file
|
@ -0,0 +1,347 @@
|
|||
// Copyright 2018 The Prometheus Authors
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
//go:generate go get github.com/cznic/golex
|
||||
//go:generate golex -o=openmetricslex.l.go openmetricslex.l
|
||||
|
||||
package textparse
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"fmt"
|
||||
"io"
|
||||
"math"
|
||||
"sort"
|
||||
"strconv"
|
||||
"strings"
|
||||
"unicode/utf8"
|
||||
|
||||
"github.com/prometheus/prometheus/pkg/labels"
|
||||
"github.com/prometheus/prometheus/pkg/value"
|
||||
)
|
||||
|
||||
type openMetricsLexer struct {
|
||||
b []byte
|
||||
i int
|
||||
start int
|
||||
err error
|
||||
state int
|
||||
}
|
||||
|
||||
// buf returns the buffer of the current token.
|
||||
func (l *openMetricsLexer) buf() []byte {
|
||||
return l.b[l.start:l.i]
|
||||
}
|
||||
|
||||
func (l *openMetricsLexer) cur() byte {
|
||||
return l.b[l.i]
|
||||
}
|
||||
|
||||
// next advances the openMetricsLexer to the next character.
|
||||
func (l *openMetricsLexer) next() byte {
|
||||
l.i++
|
||||
if l.i >= len(l.b) {
|
||||
l.err = io.EOF
|
||||
return byte(tEOF)
|
||||
}
|
||||
// Lex struggles with null bytes. If we are in a label value or help string, where
|
||||
// they are allowed, consume them here immediately.
|
||||
for l.b[l.i] == 0 && (l.state == sLValue || l.state == sMeta2 || l.state == sComment) {
|
||||
l.i++
|
||||
if l.i >= len(l.b) {
|
||||
l.err = io.EOF
|
||||
return byte(tEOF)
|
||||
}
|
||||
}
|
||||
return l.b[l.i]
|
||||
}
|
||||
|
||||
func (l *openMetricsLexer) Error(es string) {
|
||||
l.err = errors.New(es)
|
||||
}
|
||||
|
||||
// OpenMetricsParser parses samples from a byte slice of samples in the official
|
||||
// OpenMetrics text exposition format.
|
||||
// This is based on the working draft https://docs.google.com/document/u/1/d/1KwV0mAXwwbvvifBvDKH_LU1YjyXE_wxCkHNoCGq1GX0/edit
|
||||
type OpenMetricsParser struct {
|
||||
l *openMetricsLexer
|
||||
series []byte
|
||||
text []byte
|
||||
mtype MetricType
|
||||
val float64
|
||||
ts int64
|
||||
hasTS bool
|
||||
start int
|
||||
offsets []int
|
||||
}
|
||||
|
||||
// New returns a new parser of the byte slice.
|
||||
func NewOpenMetricsParser(b []byte) Parser {
|
||||
return &OpenMetricsParser{l: &openMetricsLexer{b: b}}
|
||||
}
|
||||
|
||||
// Series returns the bytes of the series, the timestamp if set, and the value
|
||||
// of the current sample.
|
||||
func (p *OpenMetricsParser) Series() ([]byte, *int64, float64) {
|
||||
if p.hasTS {
|
||||
return p.series, &p.ts, p.val
|
||||
}
|
||||
return p.series, nil, p.val
|
||||
}
|
||||
|
||||
// Help returns the metric name and help text in the current entry.
|
||||
// Must only be called after Next returned a help entry.
|
||||
// The returned byte slices become invalid after the next call to Next.
|
||||
func (p *OpenMetricsParser) Help() ([]byte, []byte) {
|
||||
m := p.l.b[p.offsets[0]:p.offsets[1]]
|
||||
|
||||
// Replacer causes allocations. Replace only when necessary.
|
||||
if strings.IndexByte(yoloString(p.text), byte('\\')) >= 0 {
|
||||
// OpenMetrics always uses the Prometheus format label value escaping.
|
||||
return m, []byte(lvalReplacer.Replace(string(p.text)))
|
||||
}
|
||||
return m, p.text
|
||||
}
|
||||
|
||||
// Type returns the metric name and type in the current entry.
|
||||
// Must only be called after Next returned a type entry.
|
||||
// The returned byte slices become invalid after the next call to Next.
|
||||
func (p *OpenMetricsParser) Type() ([]byte, MetricType) {
|
||||
return p.l.b[p.offsets[0]:p.offsets[1]], p.mtype
|
||||
}
|
||||
|
||||
// Unit returns the metric name and unit in the current entry.
|
||||
// Must only be called after Next returned a unit entry.
|
||||
// The returned byte slices become invalid after the next call to Next.
|
||||
func (p *OpenMetricsParser) Unit() ([]byte, []byte) {
|
||||
// The Prometheus format does not have units.
|
||||
return p.l.b[p.offsets[0]:p.offsets[1]], p.text
|
||||
}
|
||||
|
||||
// Comment returns the text of the current comment.
|
||||
// Must only be called after Next returned a comment entry.
|
||||
// The returned byte slice becomes invalid after the next call to Next.
|
||||
func (p *OpenMetricsParser) Comment() []byte {
|
||||
return p.text
|
||||
}
|
||||
|
||||
// Metric writes the labels of the current sample into the passed labels.
|
||||
// It returns the string from which the metric was parsed.
|
||||
func (p *OpenMetricsParser) Metric(l *labels.Labels) string {
|
||||
// Allocate the full immutable string immediately, so we just
|
||||
// have to create references on it below.
|
||||
s := string(p.series)
|
||||
|
||||
*l = append(*l, labels.Label{
|
||||
Name: labels.MetricName,
|
||||
Value: s[:p.offsets[0]-p.start],
|
||||
})
|
||||
|
||||
for i := 1; i < len(p.offsets); i += 4 {
|
||||
a := p.offsets[i] - p.start
|
||||
b := p.offsets[i+1] - p.start
|
||||
c := p.offsets[i+2] - p.start
|
||||
d := p.offsets[i+3] - p.start
|
||||
|
||||
// Replacer causes allocations. Replace only when necessary.
|
||||
if strings.IndexByte(s[c:d], byte('\\')) >= 0 {
|
||||
*l = append(*l, labels.Label{Name: s[a:b], Value: lvalReplacer.Replace(s[c:d])})
|
||||
continue
|
||||
}
|
||||
*l = append(*l, labels.Label{Name: s[a:b], Value: s[c:d]})
|
||||
}
|
||||
|
||||
// Sort labels. We can skip the first entry since the metric name is
|
||||
// already at the right place.
|
||||
sort.Sort((*l)[1:])
|
||||
|
||||
return s
|
||||
}
|
||||
|
||||
// nextToken returns the next token from the openMetricsLexer.
|
||||
func (p *OpenMetricsParser) nextToken() token {
|
||||
tok := p.l.Lex()
|
||||
return tok
|
||||
}
|
||||
|
||||
// Next advances the parser to the next sample. It returns false if no
|
||||
// more samples were read or an error occurred.
|
||||
func (p *OpenMetricsParser) Next() (Entry, error) {
|
||||
var err error
|
||||
|
||||
p.start = p.l.i
|
||||
p.offsets = p.offsets[:0]
|
||||
|
||||
switch t := p.nextToken(); t {
|
||||
case tEofWord:
|
||||
if t := p.nextToken(); t != tEOF {
|
||||
return EntryInvalid, fmt.Errorf("unexpected data after # EOF")
|
||||
}
|
||||
return EntryInvalid, io.EOF
|
||||
case tEOF:
|
||||
return EntryInvalid, parseError("unexpected end of data", t)
|
||||
case tHelp, tType, tUnit:
|
||||
switch t := p.nextToken(); t {
|
||||
case tMName:
|
||||
p.offsets = append(p.offsets, p.l.start, p.l.i)
|
||||
default:
|
||||
return EntryInvalid, parseError("expected metric name after HELP", t)
|
||||
}
|
||||
switch t := p.nextToken(); t {
|
||||
case tText:
|
||||
if len(p.l.buf()) > 1 {
|
||||
p.text = p.l.buf()[1 : len(p.l.buf())-1]
|
||||
} else {
|
||||
p.text = []byte{}
|
||||
}
|
||||
default:
|
||||
return EntryInvalid, parseError("expected text in HELP", t)
|
||||
}
|
||||
switch t {
|
||||
case tType:
|
||||
switch s := yoloString(p.text); s {
|
||||
case "counter":
|
||||
p.mtype = MetricTypeCounter
|
||||
case "gauge":
|
||||
p.mtype = MetricTypeGauge
|
||||
case "histogram":
|
||||
p.mtype = MetricTypeHistogram
|
||||
case "gaugehistogram":
|
||||
p.mtype = MetricTypeGaugeHistogram
|
||||
case "summary":
|
||||
p.mtype = MetricTypeSummary
|
||||
case "info":
|
||||
p.mtype = MetricTypeInfo
|
||||
case "stateset":
|
||||
p.mtype = MetricTypeStateset
|
||||
case "unknown":
|
||||
p.mtype = MetricTypeUnknown
|
||||
default:
|
||||
return EntryInvalid, fmt.Errorf("invalid metric type %q", s)
|
||||
}
|
||||
case tHelp:
|
||||
if !utf8.Valid(p.text) {
|
||||
return EntryInvalid, fmt.Errorf("help text is not a valid utf8 string")
|
||||
}
|
||||
}
|
||||
switch t {
|
||||
case tHelp:
|
||||
return EntryHelp, nil
|
||||
case tType:
|
||||
return EntryType, nil
|
||||
case tUnit:
|
||||
m := yoloString(p.l.b[p.offsets[0]:p.offsets[1]])
|
||||
u := yoloString(p.text)
|
||||
if len(u) > 0 {
|
||||
if !strings.HasSuffix(m, u) || len(m) < len(u)+1 || p.l.b[p.offsets[1]-len(u)-1] != '_' {
|
||||
return EntryInvalid, fmt.Errorf("unit not a suffix of metric %q", m)
|
||||
}
|
||||
}
|
||||
return EntryUnit, nil
|
||||
}
|
||||
|
||||
case tMName:
|
||||
p.offsets = append(p.offsets, p.l.i)
|
||||
p.series = p.l.b[p.start:p.l.i]
|
||||
|
||||
t2 := p.nextToken()
|
||||
if t2 == tBraceOpen {
|
||||
if err := p.parseLVals(); err != nil {
|
||||
return EntryInvalid, err
|
||||
}
|
||||
p.series = p.l.b[p.start:p.l.i]
|
||||
t2 = p.nextToken()
|
||||
}
|
||||
if t2 != tValue {
|
||||
return EntryInvalid, parseError("expected value after metric", t)
|
||||
}
|
||||
if p.val, err = strconv.ParseFloat(yoloString(p.l.buf()[1:]), 64); err != nil {
|
||||
return EntryInvalid, err
|
||||
}
|
||||
// Ensure canonical NaN value.
|
||||
if math.IsNaN(p.val) {
|
||||
p.val = math.Float64frombits(value.NormalNaN)
|
||||
}
|
||||
p.hasTS = false
|
||||
switch p.nextToken() {
|
||||
case tLinebreak:
|
||||
break
|
||||
case tTimestamp:
|
||||
p.hasTS = true
|
||||
var ts float64
|
||||
// A float is enough to hold what we need for millisecond resolution.
|
||||
if ts, err = strconv.ParseFloat(yoloString(p.l.buf()[1:]), 64); err != nil {
|
||||
return EntryInvalid, err
|
||||
}
|
||||
p.ts = int64(ts * 1000)
|
||||
if t2 := p.nextToken(); t2 != tLinebreak {
|
||||
return EntryInvalid, parseError("expected next entry after timestamp", t)
|
||||
}
|
||||
default:
|
||||
return EntryInvalid, parseError("expected timestamp or new record", t)
|
||||
}
|
||||
return EntrySeries, nil
|
||||
|
||||
default:
|
||||
err = fmt.Errorf("%q %q is not a valid start token", t, string(p.l.cur()))
|
||||
}
|
||||
return EntryInvalid, err
|
||||
}
|
||||
|
||||
func (p *OpenMetricsParser) parseLVals() error {
|
||||
first := true
|
||||
for {
|
||||
t := p.nextToken()
|
||||
switch t {
|
||||
case tBraceClose:
|
||||
return nil
|
||||
case tComma:
|
||||
if first {
|
||||
return parseError("expected label name or left brace", t)
|
||||
}
|
||||
t = p.nextToken()
|
||||
if t != tLName {
|
||||
return parseError("expected label name", t)
|
||||
}
|
||||
case tLName:
|
||||
if !first {
|
||||
return parseError("expected comma", t)
|
||||
}
|
||||
default:
|
||||
if first {
|
||||
return parseError("expected label name or left brace", t)
|
||||
}
|
||||
return parseError("expected comma or left brace", t)
|
||||
|
||||
}
|
||||
first = false
|
||||
// t is now a label name.
|
||||
|
||||
p.offsets = append(p.offsets, p.l.start, p.l.i)
|
||||
|
||||
if t := p.nextToken(); t != tEqual {
|
||||
return parseError("expected equal", t)
|
||||
}
|
||||
if t := p.nextToken(); t != tLValue {
|
||||
return parseError("expected label value", t)
|
||||
}
|
||||
if !utf8.Valid(p.l.buf()) {
|
||||
return fmt.Errorf("invalid UTF-8 label value")
|
||||
}
|
||||
|
||||
// The openMetricsLexer ensures the value string is quoted. Strip first
|
||||
// and last character.
|
||||
p.offsets = append(p.offsets, p.l.start+1, p.l.i-1)
|
||||
|
||||
}
|
||||
}
|
440
pkg/textparse/openmetricsparse_test.go
Normal file
440
pkg/textparse/openmetricsparse_test.go
Normal file
|
@ -0,0 +1,440 @@
|
|||
// Copyright 2017 The OMetheus Authors
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package textparse
|
||||
|
||||
import (
|
||||
"io"
|
||||
"testing"
|
||||
|
||||
"github.com/prometheus/prometheus/pkg/labels"
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
func TestOpenMetricsParse(t *testing.T) {
|
||||
input := `# HELP go_gc_duration_seconds A summary of the GC invocation durations.
|
||||
# TYPE go_gc_duration_seconds summary
|
||||
# UNIT go_gc_duration_seconds seconds
|
||||
go_gc_duration_seconds{quantile="0"} 4.9351e-05
|
||||
go_gc_duration_seconds{quantile="0.25"} 7.424100000000001e-05
|
||||
go_gc_duration_seconds{quantile="0.5",a="b"} 8.3835e-05
|
||||
# HELP nohelp1
|
||||
# HELP help2 escape \ \n \\ \" \x chars
|
||||
# UNIT nounit
|
||||
go_gc_duration_seconds{quantile="1.0",a="b"} 8.3835e-05
|
||||
go_gc_duration_seconds_count 99
|
||||
some:aggregate:rate5m{a_b="c"} 1
|
||||
# HELP go_goroutines Number of goroutines that currently exist.
|
||||
# TYPE go_goroutines gauge
|
||||
go_goroutines 33 123.123
|
||||
# TYPE hh histogram
|
||||
hh_bucket{le="+Inf"} 1 # {} 4
|
||||
# TYPE gh gaugehistogram
|
||||
gh_bucket{le="+Inf"} 1 # {} 4
|
||||
# TYPE ii info
|
||||
ii{foo="bar"} 1
|
||||
# TYPE ss stateset
|
||||
ss{ss="foo"} 1
|
||||
ss{ss="bar"} 0
|
||||
# TYPE un unknown
|
||||
_metric_starting_with_underscore 1
|
||||
testmetric{_label_starting_with_underscore="foo"} 1
|
||||
testmetric{label="\"bar\""} 1`
|
||||
|
||||
input += "\n# HELP metric foo\x00bar"
|
||||
input += "\nnull_byte_metric{a=\"abc\x00\"} 1"
|
||||
input += "\n# EOF\n"
|
||||
|
||||
int64p := func(x int64) *int64 { return &x }
|
||||
|
||||
exp := []struct {
|
||||
lset labels.Labels
|
||||
m string
|
||||
t *int64
|
||||
v float64
|
||||
typ MetricType
|
||||
help string
|
||||
unit string
|
||||
comment string
|
||||
}{
|
||||
{
|
||||
m: "go_gc_duration_seconds",
|
||||
help: "A summary of the GC invocation durations.",
|
||||
}, {
|
||||
m: "go_gc_duration_seconds",
|
||||
typ: MetricTypeSummary,
|
||||
}, {
|
||||
m: "go_gc_duration_seconds",
|
||||
unit: "seconds",
|
||||
}, {
|
||||
m: `go_gc_duration_seconds{quantile="0"}`,
|
||||
v: 4.9351e-05,
|
||||
lset: labels.FromStrings("__name__", "go_gc_duration_seconds", "quantile", "0"),
|
||||
}, {
|
||||
m: `go_gc_duration_seconds{quantile="0.25"}`,
|
||||
v: 7.424100000000001e-05,
|
||||
lset: labels.FromStrings("__name__", "go_gc_duration_seconds", "quantile", "0.25"),
|
||||
}, {
|
||||
m: `go_gc_duration_seconds{quantile="0.5",a="b"}`,
|
||||
v: 8.3835e-05,
|
||||
lset: labels.FromStrings("__name__", "go_gc_duration_seconds", "quantile", "0.5", "a", "b"),
|
||||
}, {
|
||||
m: "nohelp1",
|
||||
help: "",
|
||||
}, {
|
||||
m: "help2",
|
||||
help: "escape \\ \n \\ \" \\x chars",
|
||||
}, {
|
||||
m: "nounit",
|
||||
unit: "",
|
||||
}, {
|
||||
m: `go_gc_duration_seconds{quantile="1.0",a="b"}`,
|
||||
v: 8.3835e-05,
|
||||
lset: labels.FromStrings("__name__", "go_gc_duration_seconds", "quantile", "1.0", "a", "b"),
|
||||
}, {
|
||||
m: `go_gc_duration_seconds_count`,
|
||||
v: 99,
|
||||
lset: labels.FromStrings("__name__", "go_gc_duration_seconds_count"),
|
||||
}, {
|
||||
m: `some:aggregate:rate5m{a_b="c"}`,
|
||||
v: 1,
|
||||
lset: labels.FromStrings("__name__", "some:aggregate:rate5m", "a_b", "c"),
|
||||
}, {
|
||||
m: "go_goroutines",
|
||||
help: "Number of goroutines that currently exist.",
|
||||
}, {
|
||||
m: "go_goroutines",
|
||||
typ: MetricTypeGauge,
|
||||
}, {
|
||||
m: `go_goroutines`,
|
||||
v: 33,
|
||||
t: int64p(123123),
|
||||
lset: labels.FromStrings("__name__", "go_goroutines"),
|
||||
}, {
|
||||
m: "hh",
|
||||
typ: MetricTypeHistogram,
|
||||
}, {
|
||||
m: `hh_bucket{le="+Inf"}`,
|
||||
v: 1,
|
||||
lset: labels.FromStrings("__name__", "hh_bucket", "le", "+Inf"),
|
||||
}, {
|
||||
m: "gh",
|
||||
typ: MetricTypeGaugeHistogram,
|
||||
}, {
|
||||
m: `gh_bucket{le="+Inf"}`,
|
||||
v: 1,
|
||||
lset: labels.FromStrings("__name__", "gh_bucket", "le", "+Inf"),
|
||||
}, {
|
||||
m: "ii",
|
||||
typ: MetricTypeInfo,
|
||||
}, {
|
||||
m: `ii{foo="bar"}`,
|
||||
v: 1,
|
||||
lset: labels.FromStrings("__name__", "ii", "foo", "bar"),
|
||||
}, {
|
||||
m: "ss",
|
||||
typ: MetricTypeStateset,
|
||||
}, {
|
||||
m: `ss{ss="foo"}`,
|
||||
v: 1,
|
||||
lset: labels.FromStrings("__name__", "ss", "ss", "foo"),
|
||||
}, {
|
||||
m: `ss{ss="bar"}`,
|
||||
v: 0,
|
||||
lset: labels.FromStrings("__name__", "ss", "ss", "bar"),
|
||||
}, {
|
||||
m: "un",
|
||||
typ: MetricTypeUnknown,
|
||||
}, {
|
||||
m: "_metric_starting_with_underscore",
|
||||
v: 1,
|
||||
lset: labels.FromStrings("__name__", "_metric_starting_with_underscore"),
|
||||
}, {
|
||||
m: "testmetric{_label_starting_with_underscore=\"foo\"}",
|
||||
v: 1,
|
||||
lset: labels.FromStrings("__name__", "testmetric", "_label_starting_with_underscore", "foo"),
|
||||
}, {
|
||||
m: "testmetric{label=\"\\\"bar\\\"\"}",
|
||||
v: 1,
|
||||
lset: labels.FromStrings("__name__", "testmetric", "label", `"bar"`),
|
||||
}, {
|
||||
m: "metric",
|
||||
help: "foo\x00bar",
|
||||
}, {
|
||||
m: "null_byte_metric{a=\"abc\x00\"}",
|
||||
v: 1,
|
||||
lset: labels.FromStrings("__name__", "null_byte_metric", "a", "abc\x00"),
|
||||
},
|
||||
}
|
||||
|
||||
p := NewOpenMetricsParser([]byte(input))
|
||||
i := 0
|
||||
|
||||
var res labels.Labels
|
||||
|
||||
for {
|
||||
et, err := p.Next()
|
||||
if err == io.EOF {
|
||||
break
|
||||
}
|
||||
require.NoError(t, err)
|
||||
|
||||
switch et {
|
||||
case EntrySeries:
|
||||
m, ts, v := p.Series()
|
||||
|
||||
p.Metric(&res)
|
||||
|
||||
require.Equal(t, exp[i].m, string(m))
|
||||
require.Equal(t, exp[i].t, ts)
|
||||
require.Equal(t, exp[i].v, v)
|
||||
require.Equal(t, exp[i].lset, res)
|
||||
res = res[:0]
|
||||
|
||||
case EntryType:
|
||||
m, typ := p.Type()
|
||||
require.Equal(t, exp[i].m, string(m))
|
||||
require.Equal(t, exp[i].typ, typ)
|
||||
|
||||
case EntryHelp:
|
||||
m, h := p.Help()
|
||||
require.Equal(t, exp[i].m, string(m))
|
||||
require.Equal(t, exp[i].help, string(h))
|
||||
|
||||
case EntryUnit:
|
||||
m, u := p.Unit()
|
||||
require.Equal(t, exp[i].m, string(m))
|
||||
require.Equal(t, exp[i].unit, string(u))
|
||||
|
||||
case EntryComment:
|
||||
require.Equal(t, exp[i].comment, string(p.Comment()))
|
||||
}
|
||||
|
||||
i++
|
||||
}
|
||||
require.Equal(t, len(exp), i)
|
||||
}
|
||||
|
||||
func TestOpenMetricsParseErrors(t *testing.T) {
|
||||
cases := []struct {
|
||||
input string
|
||||
err string
|
||||
}{
|
||||
{
|
||||
input: "",
|
||||
err: "unexpected end of data, got \"EOF\"",
|
||||
},
|
||||
{
|
||||
input: "a",
|
||||
err: "expected value after metric, got \"MNAME\"",
|
||||
},
|
||||
{
|
||||
input: "\n",
|
||||
err: "\"INVALID\" \"\\n\" is not a valid start token",
|
||||
},
|
||||
{
|
||||
input: " a 1\n",
|
||||
err: "\"INVALID\" \" \" is not a valid start token",
|
||||
},
|
||||
{
|
||||
input: "9\n",
|
||||
err: "\"INVALID\" \"9\" is not a valid start token",
|
||||
},
|
||||
{
|
||||
input: "# TYPE u untyped\n",
|
||||
err: "invalid metric type \"untyped\"",
|
||||
},
|
||||
{
|
||||
input: "# TYPE c counter \n",
|
||||
err: "invalid metric type \"counter \"",
|
||||
},
|
||||
{
|
||||
input: "# TYPE c counter\n",
|
||||
err: "\"INVALID\" \" \" is not a valid start token",
|
||||
},
|
||||
{
|
||||
input: "# UNIT metric suffix\n",
|
||||
err: "unit not a suffix of metric \"metric\"",
|
||||
},
|
||||
{
|
||||
input: "# UNIT metricsuffix suffix\n",
|
||||
err: "unit not a suffix of metric \"metricsuffix\"",
|
||||
},
|
||||
{
|
||||
input: "# UNIT m suffix\n",
|
||||
err: "unit not a suffix of metric \"m\"",
|
||||
},
|
||||
{
|
||||
input: "# HELP m\n",
|
||||
err: "expected text in HELP, got \"INVALID\"",
|
||||
},
|
||||
{
|
||||
input: "a\t1\n",
|
||||
err: "expected value after metric, got \"MNAME\"",
|
||||
},
|
||||
{
|
||||
input: "a 1\t2\n",
|
||||
err: "strconv.ParseFloat: parsing \"1\\t2\": invalid syntax",
|
||||
},
|
||||
{
|
||||
input: "a 1 2 \n",
|
||||
err: "expected next entry after timestamp, got \"MNAME\"",
|
||||
},
|
||||
{
|
||||
input: "a 1 2 #\n",
|
||||
err: "expected next entry after timestamp, got \"MNAME\"",
|
||||
},
|
||||
{
|
||||
input: "a 1 1z\n",
|
||||
err: "strconv.ParseFloat: parsing \"1z\": invalid syntax",
|
||||
},
|
||||
{
|
||||
input: " # EOF\n",
|
||||
err: "\"INVALID\" \" \" is not a valid start token",
|
||||
},
|
||||
{
|
||||
input: "# EOF\na 1",
|
||||
err: "unexpected data after # EOF",
|
||||
},
|
||||
{
|
||||
input: "# EOF\n\n",
|
||||
err: "unexpected data after # EOF",
|
||||
},
|
||||
{
|
||||
input: "# EOFa 1",
|
||||
err: "unexpected data after # EOF",
|
||||
},
|
||||
{
|
||||
input: "#\tTYPE c counter\n",
|
||||
err: "\"INVALID\" \"\\t\" is not a valid start token",
|
||||
},
|
||||
{
|
||||
input: "# TYPE c counter\n",
|
||||
err: "invalid metric type \" counter\"",
|
||||
},
|
||||
{
|
||||
input: "a 1 1 1\n",
|
||||
err: "expected next entry after timestamp, got \"MNAME\"",
|
||||
},
|
||||
{
|
||||
input: "a{b='c'} 1\n",
|
||||
err: "expected label value, got \"INVALID\"",
|
||||
},
|
||||
{
|
||||
input: "a{b=\"c\",} 1\n",
|
||||
err: "expected label name, got \"BCLOSE\"",
|
||||
},
|
||||
{
|
||||
input: "a{,b=\"c\"} 1\n",
|
||||
err: "expected label name or left brace, got \"COMMA\"",
|
||||
},
|
||||
{
|
||||
input: "a{b=\"c\"d=\"e\"} 1\n",
|
||||
err: "expected comma, got \"LNAME\"",
|
||||
},
|
||||
{
|
||||
input: "a{b=\"c\",,d=\"e\"} 1\n",
|
||||
err: "expected label name, got \"COMMA\"",
|
||||
},
|
||||
{
|
||||
input: "a{b=\n",
|
||||
err: "expected label value, got \"INVALID\"",
|
||||
},
|
||||
{
|
||||
input: "a{\xff=\"foo\"} 1\n",
|
||||
err: "expected label name or left brace, got \"INVALID\"",
|
||||
},
|
||||
{
|
||||
input: "a{b=\"\xff\"} 1\n",
|
||||
err: "invalid UTF-8 label value",
|
||||
},
|
||||
{
|
||||
input: "a true\n",
|
||||
err: "strconv.ParseFloat: parsing \"true\": invalid syntax",
|
||||
},
|
||||
{
|
||||
input: "something_weird{problem=\"",
|
||||
err: "expected label value, got \"INVALID\"",
|
||||
},
|
||||
{
|
||||
input: "empty_label_name{=\"\"} 0",
|
||||
err: "expected label name or left brace, got \"EQUAL\"",
|
||||
},
|
||||
}
|
||||
|
||||
for i, c := range cases {
|
||||
p := NewOpenMetricsParser([]byte(c.input))
|
||||
var err error
|
||||
for err == nil {
|
||||
_, err = p.Next()
|
||||
}
|
||||
require.NotNil(t, err)
|
||||
require.Equal(t, c.err, err.Error(), "test %d", i)
|
||||
}
|
||||
}
|
||||
|
||||
func TestOMNullByteHandling(t *testing.T) {
|
||||
cases := []struct {
|
||||
input string
|
||||
err string
|
||||
}{
|
||||
{
|
||||
input: "null_byte_metric{a=\"abc\x00\"} 1\n# EOF\n",
|
||||
err: "",
|
||||
},
|
||||
{
|
||||
input: "a{b=\"\x00ss\"} 1\n# EOF\n",
|
||||
err: "",
|
||||
},
|
||||
{
|
||||
input: "a{b=\"\x00\"} 1\n# EOF\n",
|
||||
err: "",
|
||||
},
|
||||
{
|
||||
input: "a{b=\"\x00\"} 1\n# EOF",
|
||||
err: "",
|
||||
},
|
||||
{
|
||||
input: "a{b=\x00\"ssss\"} 1\n# EOF\n",
|
||||
err: "expected label value, got \"INVALID\"",
|
||||
},
|
||||
{
|
||||
input: "a{b=\"\x00",
|
||||
err: "expected label value, got \"INVALID\"",
|
||||
},
|
||||
{
|
||||
input: "a{b\x00=\"hiih\"} 1",
|
||||
err: "expected equal, got \"INVALID\"",
|
||||
},
|
||||
{
|
||||
input: "a\x00{b=\"ddd\"} 1",
|
||||
err: "expected value after metric, got \"MNAME\"",
|
||||
},
|
||||
}
|
||||
|
||||
for i, c := range cases {
|
||||
p := NewOpenMetricsParser([]byte(c.input))
|
||||
var err error
|
||||
for err == nil {
|
||||
_, err = p.Next()
|
||||
}
|
||||
|
||||
if c.err == "" {
|
||||
require.Equal(t, io.EOF, err, "test %d", i)
|
||||
continue
|
||||
}
|
||||
|
||||
require.Error(t, err)
|
||||
require.Equal(t, c.err, err.Error(), "test %d", i)
|
||||
}
|
||||
}
|
|
@ -32,7 +32,7 @@ const (
|
|||
// Lex is called by the parser generated by "go tool yacc" to obtain each
|
||||
// token. The method is opened before the matching rules block and closed at
|
||||
// the end of the file.
|
||||
func (l *lexer) Lex() token {
|
||||
func (l *promlexer) Lex() token {
|
||||
if l.i >= len(l.b) {
|
||||
return tEOF
|
||||
}
|
||||
|
@ -87,7 +87,7 @@ C [^\n]
|
|||
return tInvalid
|
||||
}
|
||||
|
||||
func (l *lexer) consumeComment() token {
|
||||
func (l *promlexer) consumeComment() token {
|
||||
for c := l.cur(); ; c = l.next() {
|
||||
switch c {
|
||||
case 0:
|
|
@ -33,7 +33,7 @@ const (
|
|||
// Lex is called by the parser generated by "go tool yacc" to obtain each
|
||||
// token. The method is opened before the matching rules block and closed at
|
||||
// the end of the file.
|
||||
func (l *lexer) Lex() token {
|
||||
func (l *promlexer) Lex() token {
|
||||
if l.i >= len(l.b) {
|
||||
return tEOF
|
||||
}
|
||||
|
@ -537,7 +537,7 @@ yyabort: // no lexem recognized
|
|||
return tInvalid
|
||||
}
|
||||
|
||||
func (l *lexer) consumeComment() token {
|
||||
func (l *promlexer) consumeComment() token {
|
||||
for c := l.cur(); ; c = l.next() {
|
||||
switch c {
|
||||
case 0:
|
|
@ -12,9 +12,8 @@
|
|||
// limitations under the License.
|
||||
|
||||
//go:generate go get github.com/cznic/golex
|
||||
//go:generate golex -o=lex.l.go lex.l
|
||||
//go:generate golex -o=promlex.l.go promlex.l
|
||||
|
||||
// Package textparse contains an efficient parser for the Prometheus text format.
|
||||
package textparse
|
||||
|
||||
import (
|
||||
|
@ -32,7 +31,7 @@ import (
|
|||
"github.com/prometheus/prometheus/pkg/value"
|
||||
)
|
||||
|
||||
type lexer struct {
|
||||
type promlexer struct {
|
||||
b []byte
|
||||
i int
|
||||
start int
|
||||
|
@ -49,6 +48,8 @@ const (
|
|||
tWhitespace
|
||||
tHelp
|
||||
tType
|
||||
tUnit
|
||||
tEofWord
|
||||
tText
|
||||
tComment
|
||||
tBlank
|
||||
|
@ -77,6 +78,10 @@ func (t token) String() string {
|
|||
return "HELP"
|
||||
case tType:
|
||||
return "TYPE"
|
||||
case tUnit:
|
||||
return "UNIT"
|
||||
case tEofWord:
|
||||
return "EOFWORD"
|
||||
case tText:
|
||||
return "TEXT"
|
||||
case tComment:
|
||||
|
@ -106,16 +111,16 @@ func (t token) String() string {
|
|||
}
|
||||
|
||||
// buf returns the buffer of the current token.
|
||||
func (l *lexer) buf() []byte {
|
||||
func (l *promlexer) buf() []byte {
|
||||
return l.b[l.start:l.i]
|
||||
}
|
||||
|
||||
func (l *lexer) cur() byte {
|
||||
func (l *promlexer) cur() byte {
|
||||
return l.b[l.i]
|
||||
}
|
||||
|
||||
// next advances the lexer to the next character.
|
||||
func (l *lexer) next() byte {
|
||||
// next advances the promlexer to the next character.
|
||||
func (l *promlexer) next() byte {
|
||||
l.i++
|
||||
if l.i >= len(l.b) {
|
||||
l.err = io.EOF
|
||||
|
@ -129,14 +134,14 @@ func (l *lexer) next() byte {
|
|||
return l.b[l.i]
|
||||
}
|
||||
|
||||
func (l *lexer) Error(es string) {
|
||||
func (l *promlexer) Error(es string) {
|
||||
l.err = errors.New(es)
|
||||
}
|
||||
|
||||
// Parser parses samples from a byte slice of samples in the official
|
||||
// PromParser parses samples from a byte slice of samples in the official
|
||||
// Prometheus text exposition format.
|
||||
type Parser struct {
|
||||
l *lexer
|
||||
type PromParser struct {
|
||||
l *promlexer
|
||||
series []byte
|
||||
text []byte
|
||||
mtype MetricType
|
||||
|
@ -148,13 +153,13 @@ type Parser struct {
|
|||
}
|
||||
|
||||
// New returns a new parser of the byte slice.
|
||||
func New(b []byte) *Parser {
|
||||
return &Parser{l: &lexer{b: append(b, '\n')}}
|
||||
func NewPromParser(b []byte) Parser {
|
||||
return &PromParser{l: &promlexer{b: append(b, '\n')}}
|
||||
}
|
||||
|
||||
// Series returns the bytes of the series, the timestamp if set, and the value
|
||||
// of the current sample.
|
||||
func (p *Parser) Series() ([]byte, *int64, float64) {
|
||||
func (p *PromParser) Series() ([]byte, *int64, float64) {
|
||||
if p.hasTS {
|
||||
return p.series, &p.ts, p.val
|
||||
}
|
||||
|
@ -164,7 +169,7 @@ func (p *Parser) Series() ([]byte, *int64, float64) {
|
|||
// Help returns the metric name and help text in the current entry.
|
||||
// Must only be called after Next returned a help entry.
|
||||
// The returned byte slices become invalid after the next call to Next.
|
||||
func (p *Parser) Help() ([]byte, []byte) {
|
||||
func (p *PromParser) Help() ([]byte, []byte) {
|
||||
m := p.l.b[p.offsets[0]:p.offsets[1]]
|
||||
|
||||
// Replacer causes allocations. Replace only when necessary.
|
||||
|
@ -177,20 +182,28 @@ func (p *Parser) Help() ([]byte, []byte) {
|
|||
// Type returns the metric name and type in the current entry.
|
||||
// Must only be called after Next returned a type entry.
|
||||
// The returned byte slices become invalid after the next call to Next.
|
||||
func (p *Parser) Type() ([]byte, MetricType) {
|
||||
func (p *PromParser) Type() ([]byte, MetricType) {
|
||||
return p.l.b[p.offsets[0]:p.offsets[1]], p.mtype
|
||||
}
|
||||
|
||||
// Unit returns the metric name and unit in the current entry.
|
||||
// Must only be called after Next returned a unit entry.
|
||||
// The returned byte slices become invalid after the next call to Next.
|
||||
func (p *PromParser) Unit() ([]byte, []byte) {
|
||||
// The Prometheus format does not have units.
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
// Comment returns the text of the current comment.
|
||||
// Must only be called after Next returned a comment entry.
|
||||
// The returned byte slice becomes invalid after the next call to Next.
|
||||
func (p *Parser) Comment() []byte {
|
||||
func (p *PromParser) Comment() []byte {
|
||||
return p.text
|
||||
}
|
||||
|
||||
// Metric writes the labels of the current sample into the passed labels.
|
||||
// It returns the string from which the metric was parsed.
|
||||
func (p *Parser) Metric(l *labels.Labels) string {
|
||||
func (p *PromParser) Metric(l *labels.Labels) string {
|
||||
// Allocate the full immutable string immediately, so we just
|
||||
// have to create references on it below.
|
||||
s := string(p.series)
|
||||
|
@ -221,9 +234,9 @@ func (p *Parser) Metric(l *labels.Labels) string {
|
|||
return s
|
||||
}
|
||||
|
||||
// nextToken returns the next token from the lexer. It skips over tabs
|
||||
// nextToken returns the next token from the promlexer. It skips over tabs
|
||||
// and spaces.
|
||||
func (p *Parser) nextToken() token {
|
||||
func (p *PromParser) nextToken() token {
|
||||
for {
|
||||
if tok := p.l.Lex(); tok != tWhitespace {
|
||||
return tok
|
||||
|
@ -231,35 +244,13 @@ func (p *Parser) nextToken() token {
|
|||
}
|
||||
}
|
||||
|
||||
// Entry represents the type of a parsed entry.
|
||||
type Entry int
|
||||
|
||||
const (
|
||||
EntryInvalid Entry = -1
|
||||
EntryType Entry = 0
|
||||
EntryHelp Entry = 1
|
||||
EntrySeries Entry = 2
|
||||
EntryComment Entry = 3
|
||||
)
|
||||
|
||||
// MetricType represents metric type values.
|
||||
type MetricType string
|
||||
|
||||
const (
|
||||
MetricTypeCounter = "counter"
|
||||
MetricTypeGauge = "gauge"
|
||||
MetricTypeHistogram = "histogram"
|
||||
MetricTypeSummary = "summary"
|
||||
MetricTypeUntyped = "untyped"
|
||||
)
|
||||
|
||||
func parseError(exp string, got token) error {
|
||||
return fmt.Errorf("%s, got %q", exp, got)
|
||||
}
|
||||
|
||||
// Next advances the parser to the next sample. It returns false if no
|
||||
// more samples were read or an error occurred.
|
||||
func (p *Parser) Next() (Entry, error) {
|
||||
func (p *PromParser) Next() (Entry, error) {
|
||||
var err error
|
||||
|
||||
p.start = p.l.i
|
||||
|
@ -301,7 +292,7 @@ func (p *Parser) Next() (Entry, error) {
|
|||
case "summary":
|
||||
p.mtype = MetricTypeSummary
|
||||
case "untyped":
|
||||
p.mtype = MetricTypeUntyped
|
||||
p.mtype = MetricTypeUnknown
|
||||
default:
|
||||
return EntryInvalid, fmt.Errorf("invalid metric type %q", s)
|
||||
}
|
||||
|
@ -371,7 +362,7 @@ func (p *Parser) Next() (Entry, error) {
|
|||
return EntryInvalid, err
|
||||
}
|
||||
|
||||
func (p *Parser) parseLVals() error {
|
||||
func (p *PromParser) parseLVals() error {
|
||||
t := p.nextToken()
|
||||
for {
|
||||
switch t {
|
||||
|
@ -393,7 +384,7 @@ func (p *Parser) parseLVals() error {
|
|||
return fmt.Errorf("invalid UTF-8 label value")
|
||||
}
|
||||
|
||||
// The lexer ensures the value string is quoted. Strip first
|
||||
// The promlexer ensures the value string is quoted. Strip first
|
||||
// and last character.
|
||||
p.offsets = append(p.offsets, p.l.start+1, p.l.i-1)
|
||||
|
|
@ -27,7 +27,7 @@ import (
|
|||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
func TestParse(t *testing.T) {
|
||||
func TestPromParse(t *testing.T) {
|
||||
input := `# HELP go_gc_duration_seconds A summary of the GC invocation durations.
|
||||
# TYPE go_gc_duration_seconds summary
|
||||
go_gc_duration_seconds{quantile="0"} 4.9351e-05
|
||||
|
@ -164,7 +164,7 @@ testmetric{label="\"bar\""} 1`
|
|||
},
|
||||
}
|
||||
|
||||
p := New([]byte(input))
|
||||
p := NewPromParser([]byte(input))
|
||||
i := 0
|
||||
|
||||
var res labels.Labels
|
||||
|
@ -207,7 +207,7 @@ testmetric{label="\"bar\""} 1`
|
|||
require.Equal(t, len(exp), i)
|
||||
}
|
||||
|
||||
func TestParseErrors(t *testing.T) {
|
||||
func TestPromParseErrors(t *testing.T) {
|
||||
cases := []struct {
|
||||
input string
|
||||
err string
|
||||
|
@ -247,7 +247,7 @@ func TestParseErrors(t *testing.T) {
|
|||
}
|
||||
|
||||
for i, c := range cases {
|
||||
p := New([]byte(c.input))
|
||||
p := NewPromParser([]byte(c.input))
|
||||
var err error
|
||||
for err == nil {
|
||||
_, err = p.Next()
|
||||
|
@ -257,7 +257,7 @@ func TestParseErrors(t *testing.T) {
|
|||
}
|
||||
}
|
||||
|
||||
func TestNullByteHandling(t *testing.T) {
|
||||
func TestPromNullByteHandling(t *testing.T) {
|
||||
cases := []struct {
|
||||
input string
|
||||
err string
|
||||
|
@ -297,7 +297,7 @@ func TestNullByteHandling(t *testing.T) {
|
|||
}
|
||||
|
||||
for i, c := range cases {
|
||||
p := New([]byte(c.input))
|
||||
p := NewPromParser([]byte(c.input))
|
||||
var err error
|
||||
for err == nil {
|
||||
_, err = p.Next()
|
||||
|
@ -314,143 +314,149 @@ func TestNullByteHandling(t *testing.T) {
|
|||
}
|
||||
|
||||
const (
|
||||
testdataSampleCount = 410
|
||||
promtestdataSampleCount = 410
|
||||
)
|
||||
|
||||
func BenchmarkParse(b *testing.B) {
|
||||
for _, fn := range []string{"testdata.txt", "testdata.nometa.txt"} {
|
||||
f, err := os.Open(fn)
|
||||
require.NoError(b, err)
|
||||
defer f.Close()
|
||||
for parserName, parser := range map[string]func([]byte) Parser{
|
||||
"prometheus": NewPromParser,
|
||||
"openmetrics": NewOpenMetricsParser,
|
||||
} {
|
||||
|
||||
buf, err := ioutil.ReadAll(f)
|
||||
require.NoError(b, err)
|
||||
for _, fn := range []string{"promtestdata.txt", "promtestdata.nometa.txt"} {
|
||||
f, err := os.Open(fn)
|
||||
require.NoError(b, err)
|
||||
defer f.Close()
|
||||
|
||||
b.Run("no-decode-metric/"+fn, func(b *testing.B) {
|
||||
total := 0
|
||||
buf, err := ioutil.ReadAll(f)
|
||||
require.NoError(b, err)
|
||||
|
||||
b.SetBytes(int64(len(buf) * (b.N / testdataSampleCount)))
|
||||
b.ReportAllocs()
|
||||
b.ResetTimer()
|
||||
b.Run(parserName+"/no-decode-metric/"+fn, func(b *testing.B) {
|
||||
total := 0
|
||||
|
||||
for i := 0; i < b.N; i += testdataSampleCount {
|
||||
p := New(buf)
|
||||
b.SetBytes(int64(len(buf) * (b.N / promtestdataSampleCount)))
|
||||
b.ReportAllocs()
|
||||
b.ResetTimer()
|
||||
|
||||
Outer:
|
||||
for i < b.N {
|
||||
t, err := p.Next()
|
||||
switch t {
|
||||
case EntryInvalid:
|
||||
if err == io.EOF {
|
||||
break Outer
|
||||
for i := 0; i < b.N; i += promtestdataSampleCount {
|
||||
p := parser(buf)
|
||||
|
||||
Outer:
|
||||
for i < b.N {
|
||||
t, err := p.Next()
|
||||
switch t {
|
||||
case EntryInvalid:
|
||||
if err == io.EOF {
|
||||
break Outer
|
||||
}
|
||||
b.Fatal(err)
|
||||
case EntrySeries:
|
||||
m, _, _ := p.Series()
|
||||
total += len(m)
|
||||
i++
|
||||
}
|
||||
b.Fatal(err)
|
||||
case EntrySeries:
|
||||
m, _, _ := p.Series()
|
||||
total += len(m)
|
||||
i++
|
||||
}
|
||||
}
|
||||
}
|
||||
_ = total
|
||||
})
|
||||
b.Run("decode-metric/"+fn, func(b *testing.B) {
|
||||
total := 0
|
||||
_ = total
|
||||
})
|
||||
b.Run(parserName+"/decode-metric/"+fn, func(b *testing.B) {
|
||||
total := 0
|
||||
|
||||
b.SetBytes(int64(len(buf) * (b.N / testdataSampleCount)))
|
||||
b.ReportAllocs()
|
||||
b.ResetTimer()
|
||||
b.SetBytes(int64(len(buf) * (b.N / promtestdataSampleCount)))
|
||||
b.ReportAllocs()
|
||||
b.ResetTimer()
|
||||
|
||||
for i := 0; i < b.N; i += testdataSampleCount {
|
||||
p := New(buf)
|
||||
for i := 0; i < b.N; i += promtestdataSampleCount {
|
||||
p := parser(buf)
|
||||
|
||||
Outer:
|
||||
for i < b.N {
|
||||
t, err := p.Next()
|
||||
switch t {
|
||||
case EntryInvalid:
|
||||
if err == io.EOF {
|
||||
break Outer
|
||||
Outer:
|
||||
for i < b.N {
|
||||
t, err := p.Next()
|
||||
switch t {
|
||||
case EntryInvalid:
|
||||
if err == io.EOF {
|
||||
break Outer
|
||||
}
|
||||
b.Fatal(err)
|
||||
case EntrySeries:
|
||||
m, _, _ := p.Series()
|
||||
|
||||
res := make(labels.Labels, 0, 5)
|
||||
p.Metric(&res)
|
||||
|
||||
total += len(m)
|
||||
i++
|
||||
}
|
||||
b.Fatal(err)
|
||||
case EntrySeries:
|
||||
m, _, _ := p.Series()
|
||||
|
||||
res := make(labels.Labels, 0, 5)
|
||||
p.Metric(&res)
|
||||
|
||||
total += len(m)
|
||||
i++
|
||||
}
|
||||
}
|
||||
}
|
||||
_ = total
|
||||
})
|
||||
b.Run("decode-metric-reuse/"+fn, func(b *testing.B) {
|
||||
total := 0
|
||||
res := make(labels.Labels, 0, 5)
|
||||
_ = total
|
||||
})
|
||||
b.Run(parserName+"/decode-metric-reuse/"+fn, func(b *testing.B) {
|
||||
total := 0
|
||||
res := make(labels.Labels, 0, 5)
|
||||
|
||||
b.SetBytes(int64(len(buf) * (b.N / testdataSampleCount)))
|
||||
b.ReportAllocs()
|
||||
b.ResetTimer()
|
||||
b.SetBytes(int64(len(buf) * (b.N / promtestdataSampleCount)))
|
||||
b.ReportAllocs()
|
||||
b.ResetTimer()
|
||||
|
||||
for i := 0; i < b.N; i += testdataSampleCount {
|
||||
p := New(buf)
|
||||
for i := 0; i < b.N; i += promtestdataSampleCount {
|
||||
p := parser(buf)
|
||||
|
||||
Outer:
|
||||
for i < b.N {
|
||||
t, err := p.Next()
|
||||
switch t {
|
||||
case EntryInvalid:
|
||||
if err == io.EOF {
|
||||
break Outer
|
||||
Outer:
|
||||
for i < b.N {
|
||||
t, err := p.Next()
|
||||
switch t {
|
||||
case EntryInvalid:
|
||||
if err == io.EOF {
|
||||
break Outer
|
||||
}
|
||||
b.Fatal(err)
|
||||
case EntrySeries:
|
||||
m, _, _ := p.Series()
|
||||
|
||||
p.Metric(&res)
|
||||
|
||||
total += len(m)
|
||||
i++
|
||||
res = res[:0]
|
||||
}
|
||||
b.Fatal(err)
|
||||
case EntrySeries:
|
||||
m, _, _ := p.Series()
|
||||
|
||||
p.Metric(&res)
|
||||
|
||||
total += len(m)
|
||||
i++
|
||||
res = res[:0]
|
||||
}
|
||||
}
|
||||
}
|
||||
_ = total
|
||||
})
|
||||
b.Run("expfmt-text/"+fn, func(b *testing.B) {
|
||||
b.SetBytes(int64(len(buf) * (b.N / testdataSampleCount)))
|
||||
b.ReportAllocs()
|
||||
b.ResetTimer()
|
||||
_ = total
|
||||
})
|
||||
b.Run("expfmt-text/"+fn, func(b *testing.B) {
|
||||
b.SetBytes(int64(len(buf) * (b.N / promtestdataSampleCount)))
|
||||
b.ReportAllocs()
|
||||
b.ResetTimer()
|
||||
|
||||
total := 0
|
||||
total := 0
|
||||
|
||||
for i := 0; i < b.N; i += testdataSampleCount {
|
||||
var (
|
||||
decSamples = make(model.Vector, 0, 50)
|
||||
)
|
||||
sdec := expfmt.SampleDecoder{
|
||||
Dec: expfmt.NewDecoder(bytes.NewReader(buf), expfmt.FmtText),
|
||||
Opts: &expfmt.DecodeOptions{
|
||||
Timestamp: model.TimeFromUnixNano(0),
|
||||
},
|
||||
}
|
||||
|
||||
for {
|
||||
if err = sdec.Decode(&decSamples); err != nil {
|
||||
break
|
||||
for i := 0; i < b.N; i += promtestdataSampleCount {
|
||||
var (
|
||||
decSamples = make(model.Vector, 0, 50)
|
||||
)
|
||||
sdec := expfmt.SampleDecoder{
|
||||
Dec: expfmt.NewDecoder(bytes.NewReader(buf), expfmt.FmtText),
|
||||
Opts: &expfmt.DecodeOptions{
|
||||
Timestamp: model.TimeFromUnixNano(0),
|
||||
},
|
||||
}
|
||||
|
||||
for {
|
||||
if err = sdec.Decode(&decSamples); err != nil {
|
||||
break
|
||||
}
|
||||
total += len(decSamples)
|
||||
decSamples = decSamples[:0]
|
||||
}
|
||||
total += len(decSamples)
|
||||
decSamples = decSamples[:0]
|
||||
}
|
||||
}
|
||||
_ = total
|
||||
})
|
||||
_ = total
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
func BenchmarkGzip(b *testing.B) {
|
||||
for _, fn := range []string{"testdata.txt", "testdata.nometa.txt"} {
|
||||
for _, fn := range []string{"promtestdata.txt", "promtestdata.nometa.txt"} {
|
||||
b.Run(fn, func(b *testing.B) {
|
||||
f, err := os.Open(fn)
|
||||
require.NoError(b, err)
|
||||
|
@ -466,7 +472,7 @@ func BenchmarkGzip(b *testing.B) {
|
|||
gbuf, err := ioutil.ReadAll(&buf)
|
||||
require.NoError(b, err)
|
||||
|
||||
k := b.N / testdataSampleCount
|
||||
k := b.N / promtestdataSampleCount
|
||||
|
||||
b.ReportAllocs()
|
||||
b.SetBytes(int64(k) * int64(n))
|
|
@ -408,3 +408,4 @@ prometheus_target_sync_length_seconds_sum{scrape_job="prometheus"} 0.00020043300
|
|||
prometheus_target_sync_length_seconds_count{scrape_job="prometheus"} 1
|
||||
prometheus_treecache_watcher_goroutines 0
|
||||
prometheus_treecache_zookeeper_failures_total 0
|
||||
# EOF
|
|
@ -525,4 +525,5 @@ prometheus_target_sync_length_seconds_count{scrape_job="prometheus"} 1
|
|||
prometheus_treecache_watcher_goroutines 0
|
||||
# HELP prometheus_treecache_zookeeper_failures_total The total number of ZooKeeper failures.
|
||||
# TYPE prometheus_treecache_zookeeper_failures_total counter
|
||||
prometheus_treecache_zookeeper_failures_total 0
|
||||
prometheus_treecache_zookeeper_failures_total 0
|
||||
# EOF
|
|
@ -434,7 +434,7 @@ func appender(app storage.Appender, limit int) storage.Appender {
|
|||
|
||||
// A scraper retrieves samples and accepts a status report at the end.
|
||||
type scraper interface {
|
||||
scrape(ctx context.Context, w io.Writer) error
|
||||
scrape(ctx context.Context, w io.Writer) (string, error)
|
||||
report(start time.Time, dur time.Duration, err error)
|
||||
offset(interval time.Duration) time.Duration
|
||||
}
|
||||
|
@ -451,15 +451,15 @@ type targetScraper struct {
|
|||
buf *bufio.Reader
|
||||
}
|
||||
|
||||
const acceptHeader = `text/plain;version=0.0.4;q=1,*/*;q=0.1`
|
||||
const acceptHeader = `application/openmetrics-text; version=0.0.1,text/plain;version=0.0.4;q=0.5,*/*;q=0.1`
|
||||
|
||||
var userAgentHeader = fmt.Sprintf("Prometheus/%s", version.Version)
|
||||
|
||||
func (s *targetScraper) scrape(ctx context.Context, w io.Writer) error {
|
||||
func (s *targetScraper) scrape(ctx context.Context, w io.Writer) (string, error) {
|
||||
if s.req == nil {
|
||||
req, err := http.NewRequest("GET", s.URL().String(), nil)
|
||||
if err != nil {
|
||||
return err
|
||||
return "", err
|
||||
}
|
||||
req.Header.Add("Accept", acceptHeader)
|
||||
req.Header.Add("Accept-Encoding", "gzip")
|
||||
|
@ -471,35 +471,38 @@ func (s *targetScraper) scrape(ctx context.Context, w io.Writer) error {
|
|||
|
||||
resp, err := ctxhttp.Do(ctx, s.client, s.req)
|
||||
if err != nil {
|
||||
return err
|
||||
return "", err
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
|
||||
if resp.StatusCode != http.StatusOK {
|
||||
return fmt.Errorf("server returned HTTP status %s", resp.Status)
|
||||
return "", fmt.Errorf("server returned HTTP status %s", resp.Status)
|
||||
}
|
||||
|
||||
if resp.Header.Get("Content-Encoding") != "gzip" {
|
||||
_, err = io.Copy(w, resp.Body)
|
||||
return err
|
||||
return "", err
|
||||
}
|
||||
|
||||
if s.gzipr == nil {
|
||||
s.buf = bufio.NewReader(resp.Body)
|
||||
s.gzipr, err = gzip.NewReader(s.buf)
|
||||
if err != nil {
|
||||
return err
|
||||
return "", err
|
||||
}
|
||||
} else {
|
||||
s.buf.Reset(resp.Body)
|
||||
if err = s.gzipr.Reset(s.buf); err != nil {
|
||||
return err
|
||||
return "", err
|
||||
}
|
||||
}
|
||||
|
||||
_, err = io.Copy(w, s.gzipr)
|
||||
s.gzipr.Close()
|
||||
return err
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
return resp.Header.Get("Content-Type"), nil
|
||||
}
|
||||
|
||||
// A loop can run and be stopped again. It must not be reused after it was stopped.
|
||||
|
@ -562,6 +565,7 @@ type metaEntry struct {
|
|||
lastIter uint64 // Last scrape iteration the entry was observed at.
|
||||
typ textparse.MetricType
|
||||
help string
|
||||
unit string
|
||||
}
|
||||
|
||||
func newScrapeCache() *scrapeCache {
|
||||
|
@ -656,7 +660,7 @@ func (c *scrapeCache) setType(metric []byte, t textparse.MetricType) {
|
|||
|
||||
e, ok := c.metadata[yoloString(metric)]
|
||||
if !ok {
|
||||
e = &metaEntry{typ: textparse.MetricTypeUntyped}
|
||||
e = &metaEntry{typ: textparse.MetricTypeUnknown}
|
||||
c.metadata[string(metric)] = e
|
||||
}
|
||||
e.typ = t
|
||||
|
@ -670,7 +674,7 @@ func (c *scrapeCache) setHelp(metric, help []byte) {
|
|||
|
||||
e, ok := c.metadata[yoloString(metric)]
|
||||
if !ok {
|
||||
e = &metaEntry{typ: textparse.MetricTypeUntyped}
|
||||
e = &metaEntry{typ: textparse.MetricTypeUnknown}
|
||||
c.metadata[string(metric)] = e
|
||||
}
|
||||
if e.help != yoloString(help) {
|
||||
|
@ -681,6 +685,22 @@ func (c *scrapeCache) setHelp(metric, help []byte) {
|
|||
c.metaMtx.Unlock()
|
||||
}
|
||||
|
||||
func (c *scrapeCache) setUnit(metric, unit []byte) {
|
||||
c.metaMtx.Lock()
|
||||
|
||||
e, ok := c.metadata[yoloString(metric)]
|
||||
if !ok {
|
||||
e = &metaEntry{typ: textparse.MetricTypeUnknown}
|
||||
c.metadata[string(metric)] = e
|
||||
}
|
||||
if e.unit != yoloString(unit) {
|
||||
e.unit = string(unit)
|
||||
}
|
||||
e.lastIter = c.iter
|
||||
|
||||
c.metaMtx.Unlock()
|
||||
}
|
||||
|
||||
func (c *scrapeCache) getMetadata(metric string) (MetricMetadata, bool) {
|
||||
c.metaMtx.Lock()
|
||||
defer c.metaMtx.Unlock()
|
||||
|
@ -693,6 +713,7 @@ func (c *scrapeCache) getMetadata(metric string) (MetricMetadata, bool) {
|
|||
Metric: metric,
|
||||
Type: m.typ,
|
||||
Help: m.help,
|
||||
Unit: m.unit,
|
||||
}, true
|
||||
}
|
||||
|
||||
|
@ -707,6 +728,7 @@ func (c *scrapeCache) listMetadata() []MetricMetadata {
|
|||
Metric: m,
|
||||
Type: e.typ,
|
||||
Help: e.help,
|
||||
Unit: e.unit,
|
||||
})
|
||||
}
|
||||
return res
|
||||
|
@ -789,7 +811,7 @@ mainLoop:
|
|||
b := sl.buffers.Get(sl.lastScrapeSize).([]byte)
|
||||
buf := bytes.NewBuffer(b)
|
||||
|
||||
scrapeErr := sl.scraper.scrape(scrapeCtx, buf)
|
||||
contentType, scrapeErr := sl.scraper.scrape(scrapeCtx, buf)
|
||||
cancel()
|
||||
|
||||
if scrapeErr == nil {
|
||||
|
@ -809,12 +831,12 @@ mainLoop:
|
|||
|
||||
// A failed scrape is the same as an empty scrape,
|
||||
// we still call sl.append to trigger stale markers.
|
||||
total, added, appErr := sl.append(b, start)
|
||||
total, added, appErr := sl.append(b, contentType, start)
|
||||
if appErr != nil {
|
||||
level.Warn(sl.l).Log("msg", "append failed", "err", appErr)
|
||||
// The append failed, probably due to a parse error or sample limit.
|
||||
// Call sl.append again with an empty scrape to trigger stale markers.
|
||||
if _, _, err := sl.append([]byte{}, start); err != nil {
|
||||
if _, _, err := sl.append([]byte{}, "", start); err != nil {
|
||||
level.Warn(sl.l).Log("msg", "append failed", "err", err)
|
||||
}
|
||||
}
|
||||
|
@ -885,7 +907,7 @@ func (sl *scrapeLoop) endOfRunStaleness(last time.Time, ticker *time.Ticker, int
|
|||
// Call sl.append again with an empty scrape to trigger stale markers.
|
||||
// If the target has since been recreated and scraped, the
|
||||
// stale markers will be out of order and ignored.
|
||||
if _, _, err := sl.append([]byte{}, staleTime); err != nil {
|
||||
if _, _, err := sl.append([]byte{}, "", staleTime); err != nil {
|
||||
level.Error(sl.l).Log("msg", "stale append failed", "err", err)
|
||||
}
|
||||
if err := sl.reportStale(staleTime); err != nil {
|
||||
|
@ -921,10 +943,10 @@ func (s samples) Less(i, j int) bool {
|
|||
return s[i].t < s[j].t
|
||||
}
|
||||
|
||||
func (sl *scrapeLoop) append(b []byte, ts time.Time) (total, added int, err error) {
|
||||
func (sl *scrapeLoop) append(b []byte, contentType string, ts time.Time) (total, added int, err error) {
|
||||
var (
|
||||
app = sl.appender()
|
||||
p = textparse.New(b)
|
||||
p = textparse.New(b, contentType)
|
||||
defTime = timestamp.FromTime(ts)
|
||||
numOutOfOrder = 0
|
||||
numDuplicates = 0
|
||||
|
@ -948,6 +970,9 @@ loop:
|
|||
case textparse.EntryHelp:
|
||||
sl.cache.setHelp(p.Help())
|
||||
continue
|
||||
case textparse.EntryUnit:
|
||||
sl.cache.setUnit(p.Unit())
|
||||
continue
|
||||
case textparse.EntryComment:
|
||||
continue
|
||||
default:
|
||||
|
|
|
@ -615,13 +615,13 @@ func TestScrapeLoopMetadata(t *testing.T) {
|
|||
)
|
||||
defer cancel()
|
||||
|
||||
total, _, err := sl.append([]byte(`
|
||||
# TYPE test_metric counter
|
||||
total, _, err := sl.append([]byte(`# TYPE test_metric counter
|
||||
# HELP test_metric some help text
|
||||
# other comment
|
||||
# UNIT test_metric metric
|
||||
test_metric 1
|
||||
# TYPE test_metric_no_help gauge
|
||||
# HELP test_metric_no_type other help text`), time.Now())
|
||||
# HELP test_metric_no_type other help text
|
||||
# EOF`), "application/openmetrics-text", time.Now())
|
||||
testutil.Ok(t, err)
|
||||
testutil.Equals(t, 1, total)
|
||||
|
||||
|
@ -629,16 +629,19 @@ test_metric 1
|
|||
testutil.Assert(t, ok, "expected metadata to be present")
|
||||
testutil.Assert(t, textparse.MetricTypeCounter == md.Type, "unexpected metric type")
|
||||
testutil.Equals(t, "some help text", md.Help)
|
||||
testutil.Equals(t, "metric", md.Unit)
|
||||
|
||||
md, ok = cache.getMetadata("test_metric_no_help")
|
||||
testutil.Assert(t, ok, "expected metadata to be present")
|
||||
testutil.Assert(t, textparse.MetricTypeGauge == md.Type, "unexpected metric type")
|
||||
testutil.Equals(t, "", md.Help)
|
||||
testutil.Equals(t, "", md.Unit)
|
||||
|
||||
md, ok = cache.getMetadata("test_metric_no_type")
|
||||
testutil.Assert(t, ok, "expected metadata to be present")
|
||||
testutil.Assert(t, textparse.MetricTypeUntyped == md.Type, "unexpected metric type")
|
||||
testutil.Assert(t, textparse.MetricTypeUnknown == md.Type, "unexpected metric type")
|
||||
testutil.Equals(t, "other help text", md.Help)
|
||||
testutil.Equals(t, "", md.Unit)
|
||||
}
|
||||
|
||||
func TestScrapeLoopRunCreatesStaleMarkersOnFailedScrape(t *testing.T) {
|
||||
|
@ -824,7 +827,7 @@ func TestScrapeLoopAppend(t *testing.T) {
|
|||
|
||||
now := time.Now()
|
||||
|
||||
_, _, err := sl.append([]byte(test.scrapeLabels), now)
|
||||
_, _, err := sl.append([]byte(test.scrapeLabels), "", now)
|
||||
if err != nil {
|
||||
t.Fatalf("Unexpected append error: %s", err)
|
||||
}
|
||||
|
@ -870,7 +873,7 @@ func TestScrapeLoopAppendSampleLimit(t *testing.T) {
|
|||
beforeMetricValue := beforeMetric.GetCounter().GetValue()
|
||||
|
||||
now := time.Now()
|
||||
_, _, err = sl.append([]byte("metric_a 1\nmetric_b 1\nmetric_c 1\n"), now)
|
||||
_, _, err = sl.append([]byte("metric_a 1\nmetric_b 1\nmetric_c 1\n"), "", now)
|
||||
if err != errSampleLimit {
|
||||
t.Fatalf("Did not see expected sample limit error: %s", err)
|
||||
}
|
||||
|
@ -922,11 +925,11 @@ func TestScrapeLoop_ChangingMetricString(t *testing.T) {
|
|||
)
|
||||
|
||||
now := time.Now()
|
||||
_, _, err = sl.append([]byte(`metric_a{a="1",b="1"} 1`), now)
|
||||
_, _, err = sl.append([]byte(`metric_a{a="1",b="1"} 1`), "", now)
|
||||
if err != nil {
|
||||
t.Fatalf("Unexpected append error: %s", err)
|
||||
}
|
||||
_, _, err = sl.append([]byte(`metric_a{b="1",a="1"} 2`), now.Add(time.Minute))
|
||||
_, _, err = sl.append([]byte(`metric_a{b="1",a="1"} 2`), "", now.Add(time.Minute))
|
||||
if err != nil {
|
||||
t.Fatalf("Unexpected append error: %s", err)
|
||||
}
|
||||
|
@ -961,11 +964,11 @@ func TestScrapeLoopAppendStaleness(t *testing.T) {
|
|||
)
|
||||
|
||||
now := time.Now()
|
||||
_, _, err := sl.append([]byte("metric_a 1\n"), now)
|
||||
_, _, err := sl.append([]byte("metric_a 1\n"), "", now)
|
||||
if err != nil {
|
||||
t.Fatalf("Unexpected append error: %s", err)
|
||||
}
|
||||
_, _, err = sl.append([]byte(""), now.Add(time.Second))
|
||||
_, _, err = sl.append([]byte(""), "", now.Add(time.Second))
|
||||
if err != nil {
|
||||
t.Fatalf("Unexpected append error: %s", err)
|
||||
}
|
||||
|
@ -1006,11 +1009,11 @@ func TestScrapeLoopAppendNoStalenessIfTimestamp(t *testing.T) {
|
|||
)
|
||||
|
||||
now := time.Now()
|
||||
_, _, err := sl.append([]byte("metric_a 1 1000\n"), now)
|
||||
_, _, err := sl.append([]byte("metric_a 1 1000\n"), "", now)
|
||||
if err != nil {
|
||||
t.Fatalf("Unexpected append error: %s", err)
|
||||
}
|
||||
_, _, err = sl.append([]byte(""), now.Add(time.Second))
|
||||
_, _, err = sl.append([]byte(""), "", now.Add(time.Second))
|
||||
if err != nil {
|
||||
t.Fatalf("Unexpected append error: %s", err)
|
||||
}
|
||||
|
@ -1120,7 +1123,7 @@ func TestScrapeLoopAppendGracefullyIfAmendOrOutOfOrderOrOutOfBounds(t *testing.T
|
|||
)
|
||||
|
||||
now := time.Unix(1, 0)
|
||||
_, _, err := sl.append([]byte("out_of_order 1\namend 1\nnormal 1\nout_of_bounds 1\n"), now)
|
||||
_, _, err := sl.append([]byte("out_of_order 1\namend 1\nnormal 1\nout_of_bounds 1\n"), "", now)
|
||||
if err != nil {
|
||||
t.Fatalf("Unexpected append error: %s", err)
|
||||
}
|
||||
|
@ -1153,7 +1156,7 @@ func TestScrapeLoopOutOfBoundsTimeError(t *testing.T) {
|
|||
)
|
||||
|
||||
now := time.Now().Add(20 * time.Minute)
|
||||
total, added, err := sl.append([]byte("normal 1\n"), now)
|
||||
total, added, err := sl.append([]byte("normal 1\n"), "", now)
|
||||
if total != 1 {
|
||||
t.Error("expected 1 metric")
|
||||
return
|
||||
|
@ -1177,8 +1180,8 @@ func TestTargetScraperScrapeOK(t *testing.T) {
|
|||
server := httptest.NewServer(
|
||||
http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||
accept := r.Header.Get("Accept")
|
||||
if !strings.HasPrefix(accept, "text/plain;") {
|
||||
t.Errorf("Expected Accept header to prefer text/plain, got %q", accept)
|
||||
if !strings.HasPrefix(accept, "application/openmetrics-text;") {
|
||||
t.Errorf("Expected Accept header to prefer application/openmetrics-text, got %q", accept)
|
||||
}
|
||||
|
||||
timeout := r.Header.Get("X-Prometheus-Scrape-Timeout-Seconds")
|
||||
|
@ -1209,7 +1212,7 @@ func TestTargetScraperScrapeOK(t *testing.T) {
|
|||
}
|
||||
var buf bytes.Buffer
|
||||
|
||||
if err := ts.scrape(context.Background(), &buf); err != nil {
|
||||
if _, err := ts.scrape(context.Background(), &buf); err != nil {
|
||||
t.Fatalf("Unexpected scrape error: %s", err)
|
||||
}
|
||||
require.Equal(t, "metric_a 1\nmetric_b 2\n", buf.String())
|
||||
|
@ -1249,7 +1252,7 @@ func TestTargetScrapeScrapeCancel(t *testing.T) {
|
|||
}()
|
||||
|
||||
go func() {
|
||||
if err := ts.scrape(ctx, ioutil.Discard); err != context.Canceled {
|
||||
if _, err := ts.scrape(ctx, ioutil.Discard); err != context.Canceled {
|
||||
errc <- fmt.Errorf("Expected context cancelation error but got: %s", err)
|
||||
}
|
||||
close(errc)
|
||||
|
@ -1291,7 +1294,7 @@ func TestTargetScrapeScrapeNotFound(t *testing.T) {
|
|||
client: http.DefaultClient,
|
||||
}
|
||||
|
||||
if err := ts.scrape(context.Background(), ioutil.Discard); !strings.Contains(err.Error(), "404") {
|
||||
if _, err := ts.scrape(context.Background(), ioutil.Discard); !strings.Contains(err.Error(), "404") {
|
||||
t.Fatalf("Expected \"404 NotFound\" error but got: %s", err)
|
||||
}
|
||||
}
|
||||
|
@ -1319,9 +1322,9 @@ func (ts *testScraper) report(start time.Time, duration time.Duration, err error
|
|||
ts.lastError = err
|
||||
}
|
||||
|
||||
func (ts *testScraper) scrape(ctx context.Context, w io.Writer) error {
|
||||
func (ts *testScraper) scrape(ctx context.Context, w io.Writer) (string, error) {
|
||||
if ts.scrapeFunc != nil {
|
||||
return ts.scrapeFunc(ctx, w)
|
||||
return "", ts.scrapeFunc(ctx, w)
|
||||
}
|
||||
return ts.scrapeErr
|
||||
return "", ts.scrapeErr
|
||||
}
|
||||
|
|
|
@ -85,6 +85,7 @@ type MetricMetadata struct {
|
|||
Metric string
|
||||
Type textparse.MetricType
|
||||
Help string
|
||||
Unit string
|
||||
}
|
||||
|
||||
func (t *Target) MetadataList() []MetricMetadata {
|
||||
|
|
|
@ -573,6 +573,7 @@ Outer:
|
|||
Metric: md.Metric,
|
||||
Type: md.Type,
|
||||
Help: md.Help,
|
||||
Unit: md.Unit,
|
||||
})
|
||||
}
|
||||
continue
|
||||
|
@ -583,6 +584,7 @@ Outer:
|
|||
Target: t.Labels(),
|
||||
Type: md.Type,
|
||||
Help: md.Help,
|
||||
Unit: md.Unit,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
@ -598,6 +600,7 @@ type metricMetadata struct {
|
|||
Metric string `json:"metric,omitempty"`
|
||||
Type textparse.MetricType `json:"type"`
|
||||
Help string `json:"help"`
|
||||
Unit string `json:"unit"`
|
||||
}
|
||||
|
||||
// AlertmanagerDiscovery has all the active Alertmanagers.
|
||||
|
|
Loading…
Reference in a new issue