mirror of
https://github.com/prometheus/prometheus.git
synced 2024-12-24 21:24:05 -08:00
promql: Clean up parser struct (#6360)
* promql: Clean up parser struct The parser struct used two have two somewhat misused fields: peekCount int token [3]item By reading the code carefully one notices, that peekCount always has the value 0 or 1 and that only the first element of token is ever accessed. To make this clearer, this commit replaces the token array with a single variable and the peekCount int with a boolean. Signed-off-by: Tobias Guggenmos <tguggenm@redhat.com>
This commit is contained in:
parent
9497764011
commit
c63259b83c
|
@ -32,9 +32,9 @@ import (
|
||||||
)
|
)
|
||||||
|
|
||||||
type parser struct {
|
type parser struct {
|
||||||
lex *lexer
|
lex *lexer
|
||||||
token [3]item
|
token item
|
||||||
peekCount int
|
peeking bool
|
||||||
}
|
}
|
||||||
|
|
||||||
// ParseErr wraps a parsing error with line and position context.
|
// ParseErr wraps a parsing error with line and position context.
|
||||||
|
@ -245,41 +245,42 @@ func (p *parser) typecheck(node Node) (err error) {
|
||||||
|
|
||||||
// next returns the next token.
|
// next returns the next token.
|
||||||
func (p *parser) next() item {
|
func (p *parser) next() item {
|
||||||
if p.peekCount > 0 {
|
if !p.peeking {
|
||||||
p.peekCount--
|
|
||||||
} else {
|
|
||||||
t := p.lex.nextItem()
|
t := p.lex.nextItem()
|
||||||
// Skip comments.
|
// Skip comments.
|
||||||
for t.typ == ItemComment {
|
for t.typ == ItemComment {
|
||||||
t = p.lex.nextItem()
|
t = p.lex.nextItem()
|
||||||
}
|
}
|
||||||
p.token[0] = t
|
p.token = t
|
||||||
}
|
}
|
||||||
if p.token[p.peekCount].typ == ItemError {
|
|
||||||
p.errorf("%s", p.token[p.peekCount].val)
|
p.peeking = false
|
||||||
|
|
||||||
|
if p.token.typ == ItemError {
|
||||||
|
p.errorf("%s", p.token.val)
|
||||||
}
|
}
|
||||||
return p.token[p.peekCount]
|
return p.token
|
||||||
}
|
}
|
||||||
|
|
||||||
// peek returns but does not consume the next token.
|
// peek returns but does not consume the next token.
|
||||||
func (p *parser) peek() item {
|
func (p *parser) peek() item {
|
||||||
if p.peekCount > 0 {
|
if p.peeking {
|
||||||
return p.token[p.peekCount-1]
|
return p.token
|
||||||
}
|
}
|
||||||
p.peekCount = 1
|
p.peeking = true
|
||||||
|
|
||||||
t := p.lex.nextItem()
|
t := p.lex.nextItem()
|
||||||
// Skip comments.
|
// Skip comments.
|
||||||
for t.typ == ItemComment {
|
for t.typ == ItemComment {
|
||||||
t = p.lex.nextItem()
|
t = p.lex.nextItem()
|
||||||
}
|
}
|
||||||
p.token[0] = t
|
p.token = t
|
||||||
return p.token[0]
|
return p.token
|
||||||
}
|
}
|
||||||
|
|
||||||
// backup backs the input stream up one token.
|
// backup backs the input stream up one token.
|
||||||
func (p *parser) backup() {
|
func (p *parser) backup() {
|
||||||
p.peekCount++
|
p.peeking = true
|
||||||
}
|
}
|
||||||
|
|
||||||
// errorf formats the error and terminates processing.
|
// errorf formats the error and terminates processing.
|
||||||
|
|
Loading…
Reference in a new issue