2017-04-19 05:43:09 -07:00
|
|
|
// Copyright 2017 The Prometheus Authors
|
|
|
|
// Licensed under the Apache License, Version 2.0 (the "License");
|
|
|
|
// you may not use this file except in compliance with the License.
|
|
|
|
// You may obtain a copy of the License at
|
|
|
|
//
|
|
|
|
// http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
//
|
|
|
|
// Unless required by applicable law or agreed to in writing, software
|
|
|
|
// distributed under the License is distributed on an "AS IS" BASIS,
|
|
|
|
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
|
|
// See the License for the specific language governing permissions and
|
|
|
|
// limitations under the License.
|
|
|
|
|
2018-12-24 14:59:32 -08:00
|
|
|
//go:generate go get -u modernc.org/golex
|
2018-10-04 06:52:03 -07:00
|
|
|
//go:generate golex -o=promlex.l.go promlex.l
|
2017-01-16 11:34:49 -08:00
|
|
|
|
2017-01-14 07:39:04 -08:00
|
|
|
package textparse
|
|
|
|
|
|
|
|
import (
|
2022-06-27 12:29:19 -07:00
|
|
|
"errors"
|
2018-05-14 13:19:53 -07:00
|
|
|
"fmt"
|
2017-01-14 07:39:04 -08:00
|
|
|
"io"
|
2018-05-14 13:19:53 -07:00
|
|
|
"math"
|
|
|
|
"strconv"
|
2017-07-27 06:15:41 -07:00
|
|
|
"strings"
|
2018-05-14 13:19:53 -07:00
|
|
|
"unicode/utf8"
|
2017-01-14 07:39:04 -08:00
|
|
|
"unsafe"
|
|
|
|
|
2021-11-08 06:23:17 -08:00
|
|
|
"github.com/prometheus/prometheus/model/exemplar"
|
Style cleanup of all the changes in sparsehistogram so far
A lot of this code was hacked together, literally during a
hackathon. This commit intends not to change the code substantially,
but just make the code obey the usual style practices.
A (possibly incomplete) list of areas:
* Generally address linter warnings.
* The `pgk` directory is deprecated as per dev-summit. No new packages should
be added to it. I moved the new `pkg/histogram` package to `model`
anticipating what's proposed in #9478.
* Make the naming of the Sparse Histogram more consistent. Including
abbreviations, there were just too many names for it: SparseHistogram,
Histogram, Histo, hist, his, shs, h. The idea is to call it "Histogram" in
general. Only add "Sparse" if it is needed to avoid confusion with
conventional Histograms (which is rare because the TSDB really has no notion
of conventional Histograms). Use abbreviations only in local scope, and then
really abbreviate (not just removing three out of seven letters like in
"Histo"). This is in the spirit of
https://github.com/golang/go/wiki/CodeReviewComments#variable-names
* Several other minor name changes.
* A lot of formatting of doc comments. For one, following
https://github.com/golang/go/wiki/CodeReviewComments#comment-sentences
, but also layout question, anticipating how things will look like
when rendered by `godoc` (even where `godoc` doesn't render them
right now because they are for unexported types or not a doc comment
at all but just a normal code comment - consistency is queen!).
* Re-enabled `TestQueryLog` and `TestEndopints` (they pass now,
leaving them disabled was presumably an oversight).
* Bucket iterator for histogram.Histogram is now created with a
method.
* HistogramChunk.iterator now allows iterator recycling. (I think
@dieterbe only commented it out because he was confused by the
question in the comment.)
* HistogramAppender.Append panics now because we decided to treat
staleness marker differently.
Signed-off-by: beorn7 <beorn@grafana.com>
2021-10-09 06:57:07 -07:00
|
|
|
"github.com/prometheus/prometheus/model/histogram"
|
2021-11-08 06:23:17 -08:00
|
|
|
"github.com/prometheus/prometheus/model/labels"
|
|
|
|
"github.com/prometheus/prometheus/model/value"
|
2017-01-14 07:39:04 -08:00
|
|
|
)
|
|
|
|
|
2018-10-04 06:52:03 -07:00
|
|
|
type promlexer struct {
|
2018-05-14 13:19:53 -07:00
|
|
|
b []byte
|
|
|
|
i int
|
|
|
|
start int
|
|
|
|
err error
|
2017-07-07 01:29:38 -07:00
|
|
|
state int
|
2017-01-14 07:39:04 -08:00
|
|
|
}
|
|
|
|
|
2018-05-14 13:19:53 -07:00
|
|
|
type token int
|
|
|
|
|
|
|
|
const (
|
|
|
|
tInvalid token = -1
|
|
|
|
tEOF token = 0
|
|
|
|
tLinebreak token = iota
|
|
|
|
tWhitespace
|
|
|
|
tHelp
|
|
|
|
tType
|
2018-10-04 09:57:47 -07:00
|
|
|
tUnit
|
2020-03-23 07:47:11 -07:00
|
|
|
tEOFWord
|
2018-05-14 13:19:53 -07:00
|
|
|
tText
|
|
|
|
tComment
|
|
|
|
tBlank
|
|
|
|
tMName
|
|
|
|
tBraceOpen
|
|
|
|
tBraceClose
|
|
|
|
tLName
|
|
|
|
tLValue
|
|
|
|
tComma
|
|
|
|
tEqual
|
|
|
|
tTimestamp
|
|
|
|
tValue
|
|
|
|
)
|
|
|
|
|
|
|
|
func (t token) String() string {
|
|
|
|
switch t {
|
|
|
|
case tInvalid:
|
|
|
|
return "INVALID"
|
|
|
|
case tEOF:
|
|
|
|
return "EOF"
|
|
|
|
case tLinebreak:
|
|
|
|
return "LINEBREAK"
|
|
|
|
case tWhitespace:
|
|
|
|
return "WHITESPACE"
|
|
|
|
case tHelp:
|
|
|
|
return "HELP"
|
|
|
|
case tType:
|
|
|
|
return "TYPE"
|
2018-10-04 09:57:47 -07:00
|
|
|
case tUnit:
|
|
|
|
return "UNIT"
|
2020-03-23 07:47:11 -07:00
|
|
|
case tEOFWord:
|
2018-10-04 09:57:47 -07:00
|
|
|
return "EOFWORD"
|
2018-05-14 13:19:53 -07:00
|
|
|
case tText:
|
|
|
|
return "TEXT"
|
|
|
|
case tComment:
|
|
|
|
return "COMMENT"
|
|
|
|
case tBlank:
|
|
|
|
return "BLANK"
|
|
|
|
case tMName:
|
|
|
|
return "MNAME"
|
|
|
|
case tBraceOpen:
|
|
|
|
return "BOPEN"
|
|
|
|
case tBraceClose:
|
|
|
|
return "BCLOSE"
|
|
|
|
case tLName:
|
|
|
|
return "LNAME"
|
|
|
|
case tLValue:
|
|
|
|
return "LVALUE"
|
|
|
|
case tEqual:
|
|
|
|
return "EQUAL"
|
|
|
|
case tComma:
|
|
|
|
return "COMMA"
|
|
|
|
case tTimestamp:
|
|
|
|
return "TIMESTAMP"
|
|
|
|
case tValue:
|
|
|
|
return "VALUE"
|
|
|
|
}
|
|
|
|
return fmt.Sprintf("<invalid: %d>", t)
|
|
|
|
}
|
|
|
|
|
|
|
|
// buf returns the buffer of the current token.
|
2018-10-04 06:52:03 -07:00
|
|
|
func (l *promlexer) buf() []byte {
|
2018-05-14 13:19:53 -07:00
|
|
|
return l.b[l.start:l.i]
|
|
|
|
}
|
2017-01-14 07:39:04 -08:00
|
|
|
|
2018-10-04 06:52:03 -07:00
|
|
|
func (l *promlexer) cur() byte {
|
2018-05-14 13:19:53 -07:00
|
|
|
return l.b[l.i]
|
|
|
|
}
|
|
|
|
|
2018-10-04 06:52:03 -07:00
|
|
|
// next advances the promlexer to the next character.
|
|
|
|
func (l *promlexer) next() byte {
|
2017-01-14 07:39:04 -08:00
|
|
|
l.i++
|
|
|
|
if l.i >= len(l.b) {
|
|
|
|
l.err = io.EOF
|
2018-05-14 13:19:53 -07:00
|
|
|
return byte(tEOF)
|
2017-01-14 07:39:04 -08:00
|
|
|
}
|
2018-06-01 01:22:32 -07:00
|
|
|
// Lex struggles with null bytes. If we are in a label value or help string, where
|
2018-05-14 13:19:53 -07:00
|
|
|
// they are allowed, consume them here immediately.
|
2018-06-01 01:22:32 -07:00
|
|
|
for l.b[l.i] == 0 && (l.state == sLValue || l.state == sMeta2 || l.state == sComment) {
|
2018-05-14 13:19:53 -07:00
|
|
|
l.i++
|
2017-07-07 01:29:38 -07:00
|
|
|
}
|
2018-05-14 13:19:53 -07:00
|
|
|
return l.b[l.i]
|
2017-01-14 07:39:04 -08:00
|
|
|
}
|
|
|
|
|
2018-10-04 06:52:03 -07:00
|
|
|
func (l *promlexer) Error(es string) {
|
2017-01-14 07:39:04 -08:00
|
|
|
l.err = errors.New(es)
|
|
|
|
}
|
|
|
|
|
2018-10-04 06:52:03 -07:00
|
|
|
// PromParser parses samples from a byte slice of samples in the official
|
2017-01-16 11:34:49 -08:00
|
|
|
// Prometheus text exposition format.
|
2018-10-04 06:52:03 -07:00
|
|
|
type PromParser struct {
|
|
|
|
l *promlexer
|
2022-03-09 14:13:50 -08:00
|
|
|
builder labels.ScratchBuilder
|
2018-05-14 13:19:53 -07:00
|
|
|
series []byte
|
|
|
|
text []byte
|
|
|
|
mtype MetricType
|
|
|
|
val float64
|
|
|
|
ts int64
|
|
|
|
hasTS bool
|
|
|
|
start int
|
|
|
|
offsets []int
|
2017-01-14 07:39:04 -08:00
|
|
|
}
|
|
|
|
|
2019-09-10 06:45:09 -07:00
|
|
|
// NewPromParser returns a new parser of the byte slice.
|
2018-10-04 06:52:03 -07:00
|
|
|
func NewPromParser(b []byte) Parser {
|
|
|
|
return &PromParser{l: &promlexer{b: append(b, '\n')}}
|
2017-01-14 07:39:04 -08:00
|
|
|
}
|
|
|
|
|
2018-05-14 13:19:53 -07:00
|
|
|
// Series returns the bytes of the series, the timestamp if set, and the value
|
|
|
|
// of the current sample.
|
2018-10-04 06:52:03 -07:00
|
|
|
func (p *PromParser) Series() ([]byte, *int64, float64) {
|
2018-05-14 13:19:53 -07:00
|
|
|
if p.hasTS {
|
|
|
|
return p.series, &p.ts, p.val
|
2017-01-14 07:39:04 -08:00
|
|
|
}
|
2018-05-14 13:19:53 -07:00
|
|
|
return p.series, nil, p.val
|
2017-01-14 07:39:04 -08:00
|
|
|
}
|
|
|
|
|
2023-01-10 15:30:55 -08:00
|
|
|
// Histogram returns (nil, nil, nil, nil) for now because the Prometheus text
|
|
|
|
// format does not support sparse histograms yet.
|
2022-08-25 08:07:41 -07:00
|
|
|
func (p *PromParser) Histogram() ([]byte, *int64, *histogram.Histogram, *histogram.FloatHistogram) {
|
|
|
|
return nil, nil, nil, nil
|
2021-06-29 14:45:23 -07:00
|
|
|
}
|
|
|
|
|
2018-05-14 13:19:53 -07:00
|
|
|
// Help returns the metric name and help text in the current entry.
|
|
|
|
// Must only be called after Next returned a help entry.
|
|
|
|
// The returned byte slices become invalid after the next call to Next.
|
2018-10-04 06:52:03 -07:00
|
|
|
func (p *PromParser) Help() ([]byte, []byte) {
|
2018-06-05 03:24:20 -07:00
|
|
|
m := p.l.b[p.offsets[0]:p.offsets[1]]
|
|
|
|
|
|
|
|
// Replacer causes allocations. Replace only when necessary.
|
|
|
|
if strings.IndexByte(yoloString(p.text), byte('\\')) >= 0 {
|
|
|
|
return m, []byte(helpReplacer.Replace(string(p.text)))
|
|
|
|
}
|
|
|
|
return m, p.text
|
2017-01-14 07:39:04 -08:00
|
|
|
}
|
|
|
|
|
2018-05-14 13:19:53 -07:00
|
|
|
// Type returns the metric name and type in the current entry.
|
|
|
|
// Must only be called after Next returned a type entry.
|
|
|
|
// The returned byte slices become invalid after the next call to Next.
|
2018-10-04 06:52:03 -07:00
|
|
|
func (p *PromParser) Type() ([]byte, MetricType) {
|
2018-05-14 13:19:53 -07:00
|
|
|
return p.l.b[p.offsets[0]:p.offsets[1]], p.mtype
|
|
|
|
}
|
|
|
|
|
2018-10-04 09:57:47 -07:00
|
|
|
// Unit returns the metric name and unit in the current entry.
|
|
|
|
// Must only be called after Next returned a unit entry.
|
|
|
|
// The returned byte slices become invalid after the next call to Next.
|
|
|
|
func (p *PromParser) Unit() ([]byte, []byte) {
|
|
|
|
// The Prometheus format does not have units.
|
|
|
|
return nil, nil
|
|
|
|
}
|
|
|
|
|
2018-05-14 13:19:53 -07:00
|
|
|
// Comment returns the text of the current comment.
|
|
|
|
// Must only be called after Next returned a comment entry.
|
|
|
|
// The returned byte slice becomes invalid after the next call to Next.
|
2018-10-04 06:52:03 -07:00
|
|
|
func (p *PromParser) Comment() []byte {
|
2018-05-14 13:19:53 -07:00
|
|
|
return p.text
|
2017-01-14 07:39:04 -08:00
|
|
|
}
|
|
|
|
|
2017-01-16 11:34:49 -08:00
|
|
|
// Metric writes the labels of the current sample into the passed labels.
|
2017-05-26 01:44:48 -07:00
|
|
|
// It returns the string from which the metric was parsed.
|
2018-10-04 06:52:03 -07:00
|
|
|
func (p *PromParser) Metric(l *labels.Labels) string {
|
2022-03-09 14:13:50 -08:00
|
|
|
// Copy the buffer to a string: this is only necessary for the return value.
|
2018-05-14 13:19:53 -07:00
|
|
|
s := string(p.series)
|
2017-01-16 08:24:00 -08:00
|
|
|
|
2022-03-09 14:13:50 -08:00
|
|
|
p.builder.Reset()
|
|
|
|
p.builder.Add(labels.MetricName, s[:p.offsets[0]-p.start])
|
2017-01-14 10:30:19 -08:00
|
|
|
|
2018-05-14 13:19:53 -07:00
|
|
|
for i := 1; i < len(p.offsets); i += 4 {
|
|
|
|
a := p.offsets[i] - p.start
|
|
|
|
b := p.offsets[i+1] - p.start
|
|
|
|
c := p.offsets[i+2] - p.start
|
|
|
|
d := p.offsets[i+3] - p.start
|
2017-01-14 10:30:19 -08:00
|
|
|
|
2022-03-09 14:13:50 -08:00
|
|
|
value := s[c:d]
|
2017-07-27 06:15:41 -07:00
|
|
|
// Replacer causes allocations. Replace only when necessary.
|
|
|
|
if strings.IndexByte(s[c:d], byte('\\')) >= 0 {
|
2022-03-09 14:13:50 -08:00
|
|
|
value = lvalReplacer.Replace(value)
|
2017-07-27 06:15:41 -07:00
|
|
|
}
|
2022-03-09 14:13:50 -08:00
|
|
|
p.builder.Add(s[a:b], value)
|
2017-01-14 10:30:19 -08:00
|
|
|
}
|
|
|
|
|
2022-03-09 14:13:50 -08:00
|
|
|
p.builder.Sort()
|
|
|
|
*l = p.builder.Labels()
|
2017-05-25 23:44:24 -07:00
|
|
|
|
|
|
|
return s
|
2017-01-14 07:39:04 -08:00
|
|
|
}
|
|
|
|
|
2023-04-12 04:05:41 -07:00
|
|
|
// Exemplar implements the Parser interface. However, since the classic
|
|
|
|
// Prometheus text format does not support exemplars, this implementation simply
|
|
|
|
// returns false and does nothing else.
|
|
|
|
func (p *PromParser) Exemplar(*exemplar.Exemplar) bool {
|
2019-11-19 01:33:30 -08:00
|
|
|
return false
|
|
|
|
}
|
|
|
|
|
2018-10-04 06:52:03 -07:00
|
|
|
// nextToken returns the next token from the promlexer. It skips over tabs
|
2018-05-14 13:19:53 -07:00
|
|
|
// and spaces.
|
2018-10-04 06:52:03 -07:00
|
|
|
func (p *PromParser) nextToken() token {
|
2018-05-14 13:19:53 -07:00
|
|
|
for {
|
|
|
|
if tok := p.l.Lex(); tok != tWhitespace {
|
|
|
|
return tok
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2022-12-07 05:32:03 -08:00
|
|
|
func (p *PromParser) parseError(exp string, got token) error {
|
|
|
|
e := p.l.i + 1
|
2023-02-03 07:50:15 -08:00
|
|
|
if len(p.l.b) < e {
|
2022-12-07 05:32:03 -08:00
|
|
|
e = len(p.l.b)
|
|
|
|
}
|
|
|
|
return fmt.Errorf("%s, got %q (%q) while parsing: %q", exp, p.l.b[p.l.start:e], got, p.l.b[p.start:e])
|
2018-05-14 13:19:53 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
// Next advances the parser to the next sample. It returns false if no
|
|
|
|
// more samples were read or an error occurred.
|
2018-10-04 06:52:03 -07:00
|
|
|
func (p *PromParser) Next() (Entry, error) {
|
2018-05-14 13:19:53 -07:00
|
|
|
var err error
|
|
|
|
|
|
|
|
p.start = p.l.i
|
|
|
|
p.offsets = p.offsets[:0]
|
|
|
|
|
|
|
|
switch t := p.nextToken(); t {
|
|
|
|
case tEOF:
|
|
|
|
return EntryInvalid, io.EOF
|
|
|
|
case tLinebreak:
|
|
|
|
// Allow full blank lines.
|
|
|
|
return p.Next()
|
|
|
|
|
|
|
|
case tHelp, tType:
|
2022-03-08 05:04:11 -08:00
|
|
|
switch t2 := p.nextToken(); t2 {
|
2018-05-14 13:19:53 -07:00
|
|
|
case tMName:
|
|
|
|
p.offsets = append(p.offsets, p.l.start, p.l.i)
|
|
|
|
default:
|
2022-12-07 05:32:03 -08:00
|
|
|
return EntryInvalid, p.parseError("expected metric name after "+t.String(), t2)
|
2018-05-14 13:19:53 -07:00
|
|
|
}
|
2022-03-08 05:04:11 -08:00
|
|
|
switch t2 := p.nextToken(); t2 {
|
2018-05-14 13:19:53 -07:00
|
|
|
case tText:
|
2018-08-01 05:29:58 -07:00
|
|
|
if len(p.l.buf()) > 1 {
|
|
|
|
p.text = p.l.buf()[1:]
|
|
|
|
} else {
|
|
|
|
p.text = []byte{}
|
|
|
|
}
|
2018-05-14 13:19:53 -07:00
|
|
|
default:
|
2022-03-08 05:04:11 -08:00
|
|
|
return EntryInvalid, fmt.Errorf("expected text in %s", t.String())
|
2018-05-14 13:19:53 -07:00
|
|
|
}
|
2018-06-01 01:22:32 -07:00
|
|
|
switch t {
|
|
|
|
case tType:
|
2018-05-14 13:19:53 -07:00
|
|
|
switch s := yoloString(p.text); s {
|
|
|
|
case "counter":
|
|
|
|
p.mtype = MetricTypeCounter
|
|
|
|
case "gauge":
|
|
|
|
p.mtype = MetricTypeGauge
|
|
|
|
case "histogram":
|
|
|
|
p.mtype = MetricTypeHistogram
|
|
|
|
case "summary":
|
|
|
|
p.mtype = MetricTypeSummary
|
|
|
|
case "untyped":
|
2018-10-04 09:57:47 -07:00
|
|
|
p.mtype = MetricTypeUnknown
|
2018-05-14 13:19:53 -07:00
|
|
|
default:
|
2022-06-27 12:29:19 -07:00
|
|
|
return EntryInvalid, fmt.Errorf("invalid metric type %q", s)
|
2018-05-14 13:19:53 -07:00
|
|
|
}
|
2018-06-01 01:22:32 -07:00
|
|
|
case tHelp:
|
|
|
|
if !utf8.Valid(p.text) {
|
2022-12-07 05:32:03 -08:00
|
|
|
return EntryInvalid, fmt.Errorf("help text %q is not a valid utf8 string", p.text)
|
2018-06-01 01:22:32 -07:00
|
|
|
}
|
2018-05-14 13:19:53 -07:00
|
|
|
}
|
|
|
|
if t := p.nextToken(); t != tLinebreak {
|
2022-12-07 05:32:03 -08:00
|
|
|
return EntryInvalid, p.parseError("linebreak expected after metadata", t)
|
2018-05-14 13:19:53 -07:00
|
|
|
}
|
|
|
|
switch t {
|
|
|
|
case tHelp:
|
|
|
|
return EntryHelp, nil
|
|
|
|
case tType:
|
|
|
|
return EntryType, nil
|
|
|
|
}
|
|
|
|
case tComment:
|
|
|
|
p.text = p.l.buf()
|
|
|
|
if t := p.nextToken(); t != tLinebreak {
|
2022-12-07 05:32:03 -08:00
|
|
|
return EntryInvalid, p.parseError("linebreak expected after comment", t)
|
2018-05-14 13:19:53 -07:00
|
|
|
}
|
|
|
|
return EntryComment, nil
|
|
|
|
|
|
|
|
case tMName:
|
|
|
|
p.offsets = append(p.offsets, p.l.i)
|
|
|
|
p.series = p.l.b[p.start:p.l.i]
|
|
|
|
|
|
|
|
t2 := p.nextToken()
|
|
|
|
if t2 == tBraceOpen {
|
|
|
|
if err := p.parseLVals(); err != nil {
|
|
|
|
return EntryInvalid, err
|
|
|
|
}
|
|
|
|
p.series = p.l.b[p.start:p.l.i]
|
|
|
|
t2 = p.nextToken()
|
|
|
|
}
|
|
|
|
if t2 != tValue {
|
2022-12-07 05:32:03 -08:00
|
|
|
return EntryInvalid, p.parseError("expected value after metric", t2)
|
2018-05-14 13:19:53 -07:00
|
|
|
}
|
2019-09-05 07:18:18 -07:00
|
|
|
if p.val, err = parseFloat(yoloString(p.l.buf())); err != nil {
|
2022-12-07 05:32:03 -08:00
|
|
|
return EntryInvalid, fmt.Errorf("%v while parsing: %q", err, p.l.b[p.start:p.l.i])
|
2018-05-14 13:19:53 -07:00
|
|
|
}
|
|
|
|
// Ensure canonical NaN value.
|
|
|
|
if math.IsNaN(p.val) {
|
|
|
|
p.val = math.Float64frombits(value.NormalNaN)
|
|
|
|
}
|
|
|
|
p.hasTS = false
|
2022-12-07 01:46:14 -08:00
|
|
|
switch t := p.nextToken(); t {
|
2018-05-14 13:19:53 -07:00
|
|
|
case tLinebreak:
|
|
|
|
break
|
|
|
|
case tTimestamp:
|
|
|
|
p.hasTS = true
|
|
|
|
if p.ts, err = strconv.ParseInt(yoloString(p.l.buf()), 10, 64); err != nil {
|
2022-12-07 05:32:03 -08:00
|
|
|
return EntryInvalid, fmt.Errorf("%v while parsing: %q", err, p.l.b[p.start:p.l.i])
|
2018-05-14 13:19:53 -07:00
|
|
|
}
|
|
|
|
if t2 := p.nextToken(); t2 != tLinebreak {
|
2022-12-07 05:32:03 -08:00
|
|
|
return EntryInvalid, p.parseError("expected next entry after timestamp", t2)
|
2018-05-14 13:19:53 -07:00
|
|
|
}
|
|
|
|
default:
|
2022-12-07 05:32:03 -08:00
|
|
|
return EntryInvalid, p.parseError("expected timestamp or new record", t)
|
2018-05-14 13:19:53 -07:00
|
|
|
}
|
|
|
|
return EntrySeries, nil
|
|
|
|
|
|
|
|
default:
|
2022-12-07 05:32:03 -08:00
|
|
|
err = p.parseError("expected a valid start token", t)
|
2018-05-14 13:19:53 -07:00
|
|
|
}
|
|
|
|
return EntryInvalid, err
|
|
|
|
}
|
|
|
|
|
2018-10-04 06:52:03 -07:00
|
|
|
func (p *PromParser) parseLVals() error {
|
2018-05-14 13:19:53 -07:00
|
|
|
t := p.nextToken()
|
|
|
|
for {
|
|
|
|
switch t {
|
|
|
|
case tBraceClose:
|
|
|
|
return nil
|
|
|
|
case tLName:
|
|
|
|
default:
|
2022-12-07 05:32:03 -08:00
|
|
|
return p.parseError("expected label name", t)
|
2018-05-14 13:19:53 -07:00
|
|
|
}
|
|
|
|
p.offsets = append(p.offsets, p.l.start, p.l.i)
|
|
|
|
|
|
|
|
if t := p.nextToken(); t != tEqual {
|
2022-12-07 05:32:03 -08:00
|
|
|
return p.parseError("expected equal", t)
|
2018-05-14 13:19:53 -07:00
|
|
|
}
|
|
|
|
if t := p.nextToken(); t != tLValue {
|
2022-12-07 05:32:03 -08:00
|
|
|
return p.parseError("expected label value", t)
|
2018-05-14 13:19:53 -07:00
|
|
|
}
|
|
|
|
if !utf8.Valid(p.l.buf()) {
|
2022-12-07 05:32:03 -08:00
|
|
|
return fmt.Errorf("invalid UTF-8 label value: %q", p.l.buf())
|
2018-05-14 13:19:53 -07:00
|
|
|
}
|
|
|
|
|
2018-10-04 06:52:03 -07:00
|
|
|
// The promlexer ensures the value string is quoted. Strip first
|
2018-05-14 13:19:53 -07:00
|
|
|
// and last character.
|
|
|
|
p.offsets = append(p.offsets, p.l.start+1, p.l.i-1)
|
|
|
|
|
|
|
|
// Free trailing commas are allowed.
|
|
|
|
if t = p.nextToken(); t == tComma {
|
|
|
|
t = p.nextToken()
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2018-06-05 03:24:20 -07:00
|
|
|
var lvalReplacer = strings.NewReplacer(
|
|
|
|
`\"`, "\"",
|
|
|
|
`\\`, "\\",
|
|
|
|
`\n`, "\n",
|
|
|
|
)
|
|
|
|
|
|
|
|
var helpReplacer = strings.NewReplacer(
|
|
|
|
`\\`, "\\",
|
|
|
|
`\n`, "\n",
|
2017-07-27 06:15:41 -07:00
|
|
|
)
|
|
|
|
|
2017-01-14 07:39:04 -08:00
|
|
|
func yoloString(b []byte) string {
|
2017-03-07 02:41:11 -08:00
|
|
|
return *((*string)(unsafe.Pointer(&b)))
|
2017-01-14 07:39:04 -08:00
|
|
|
}
|
2019-09-05 07:18:18 -07:00
|
|
|
|
|
|
|
func parseFloat(s string) (float64, error) {
|
|
|
|
// Keep to pre-Go 1.13 float formats.
|
|
|
|
if strings.ContainsAny(s, "pP_") {
|
|
|
|
return 0, fmt.Errorf("unsupported character in float")
|
|
|
|
}
|
|
|
|
return strconv.ParseFloat(s, 64)
|
|
|
|
}
|