mirror of
https://github.com/prometheus/prometheus.git
synced 2024-11-10 23:54:05 -08:00
Merge pull request #567 from prometheus/floats
Support scientific notation and special float values.
This commit is contained in:
commit
cce2f30a8b
|
@ -55,6 +55,11 @@ L [a-zA-Z_]
|
||||||
M [a-zA-Z_:]
|
M [a-zA-Z_:]
|
||||||
U [smhdwy]
|
U [smhdwy]
|
||||||
|
|
||||||
|
FLOAT [-+]?({D}*\.?{D}+|{D}+\.?{D}*){EXPONENT}?|[+-]?[iI][nN][fF]|[nN][aA][nN]
|
||||||
|
EXPONENT [eE][-+]?[0-9]+
|
||||||
|
|
||||||
|
STR \"(\\.|[^\\"])*\"|\'(\\.|[^\\'])*\'
|
||||||
|
|
||||||
%x S_COMMENTS
|
%x S_COMMENTS
|
||||||
|
|
||||||
%yyc c
|
%yyc c
|
||||||
|
@ -88,19 +93,18 @@ avg|sum|max|min|count lval.str = strings.ToUpper(lexer.token()); return AGGR_
|
||||||
[+\-] lval.str = lexer.token(); return ADDITIVE_OP
|
[+\-] lval.str = lexer.token(); return ADDITIVE_OP
|
||||||
[*/%] lval.str = lexer.token(); return MULT_OP
|
[*/%] lval.str = lexer.token(); return MULT_OP
|
||||||
|
|
||||||
{D}+{U} lval.str = lexer.token(); return DURATION
|
{FLOAT} num, err := strconv.ParseFloat(lexer.token(), 64);
|
||||||
{L}({L}|{D})* lval.str = lexer.token(); return IDENTIFIER
|
|
||||||
{M}({M}|{D})* lval.str = lexer.token(); return METRICNAME
|
|
||||||
|
|
||||||
\-?{D}+(\.{D}*)? num, err := strconv.ParseFloat(lexer.token(), 64);
|
|
||||||
if (err != nil && err.(*strconv.NumError).Err == strconv.ErrSyntax) {
|
if (err != nil && err.(*strconv.NumError).Err == strconv.ErrSyntax) {
|
||||||
panic("Invalid float")
|
panic("Invalid float")
|
||||||
}
|
}
|
||||||
lval.num = clientmodel.SampleValue(num)
|
lval.num = clientmodel.SampleValue(num)
|
||||||
return NUMBER
|
return NUMBER
|
||||||
|
|
||||||
\"(\\.|[^\\"])*\" lval.str = lexer.token()[1:len(lexer.token()) - 1]; return STRING
|
{D}+{U} lval.str = lexer.token(); return DURATION
|
||||||
\'(\\.|[^\\'])*\' lval.str = lexer.token()[1:len(lexer.token()) - 1]; return STRING
|
{L}({L}|{D})* lval.str = lexer.token(); return IDENTIFIER
|
||||||
|
{M}({M}|{D})* lval.str = lexer.token(); return METRICNAME
|
||||||
|
|
||||||
|
{STR} lval.str = lexer.token()[1:len(lexer.token()) - 1]; return STRING
|
||||||
|
|
||||||
[{}\[\]()=,] return int(lexer.buf[0])
|
[{}\[\]()=,] return int(lexer.buf[0])
|
||||||
[\t\n\r ] /* gobble up any whitespace */
|
[\t\n\r ] /* gobble up any whitespace */
|
||||||
|
|
1858
rules/lexer.l.go
1858
rules/lexer.l.go
File diff suppressed because it is too large
Load diff
|
@ -36,7 +36,7 @@ var (
|
||||||
testEvalTime = testStartTime.Add(testSampleInterval * 10)
|
testEvalTime = testStartTime.Add(testSampleInterval * 10)
|
||||||
fixturesPath = "fixtures"
|
fixturesPath = "fixtures"
|
||||||
|
|
||||||
reSample = regexp.MustCompile(`^(.*) \=\> (\-?\d+\.?\d*e?\d*|[+-]Inf|NaN) \@\[(\d+)\]$`)
|
reSample = regexp.MustCompile(`^(.*)(?: \=\>|:) (\-?\d+\.?\d*e?\d*|[+-]Inf|NaN) \@\[(\d+)\]$`)
|
||||||
minNormal = math.Float64frombits(0x0010000000000000) // The smallest positive normal value of type float64.
|
minNormal = math.Float64frombits(0x0010000000000000) // The smallest positive normal value of type float64.
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -902,6 +902,58 @@ func TestExpressions(t *testing.T) {
|
||||||
`{instance="ins2", job="job2"} => 0.11666666666666667 @[%v]`,
|
`{instance="ins2", job="job2"} => 0.11666666666666667 @[%v]`,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
expr: `12.34e6`,
|
||||||
|
output: []string{`scalar: 12340000 @[%v]`},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
expr: `12.34e+6`,
|
||||||
|
output: []string{`scalar: 12340000 @[%v]`},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
expr: `12.34e-6`,
|
||||||
|
output: []string{`scalar: 0.00001234 @[%v]`},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
expr: `.2`,
|
||||||
|
output: []string{`scalar: 0.2 @[%v]`},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
expr: `+0.2`,
|
||||||
|
output: []string{`scalar: 0.2 @[%v]`},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
expr: `-0.2e-6`,
|
||||||
|
output: []string{`scalar: -0.0000002 @[%v]`},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
expr: `+Inf`,
|
||||||
|
output: []string{`scalar: +Inf @[%v]`},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
expr: `inF`,
|
||||||
|
output: []string{`scalar: +Inf @[%v]`},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
expr: `-inf`,
|
||||||
|
output: []string{`scalar: -Inf @[%v]`},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
expr: `NaN`,
|
||||||
|
output: []string{`scalar: NaN @[%v]`},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
expr: `nan`,
|
||||||
|
output: []string{`scalar: NaN @[%v]`},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
expr: `2.`,
|
||||||
|
output: []string{`scalar: 2 @[%v]`},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
expr: `999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999`,
|
||||||
|
output: []string{`scalar: +Inf @[%v]`},
|
||||||
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
storage, closer := newTestStorage(t)
|
storage, closer := newTestStorage(t)
|
||||||
|
|
Loading…
Reference in a new issue