Merge pull request #2728 from prometheus/uptsdb

vendor: update prometheus/tsdb
This commit is contained in:
Fabian Reinartz 2017-05-16 18:42:28 +02:00 committed by GitHub
commit 73e7ff1edd
26 changed files with 830 additions and 372 deletions

View file

@ -72,6 +72,7 @@ func Main() int {
log.Infoln("Starting prometheus", version.Info()) log.Infoln("Starting prometheus", version.Info())
log.Infoln("Build context", version.BuildContext()) log.Infoln("Build context", version.BuildContext())
log.Infoln("Host details", Uname())
var ( var (
// sampleAppender = storage.Fanout{} // sampleAppender = storage.Fanout{}

View file

@ -0,0 +1,23 @@
// Copyright 2017 The Prometheus Authors
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// +build !linux
package main
import "runtime"
// Uname for any platform other than linux.
func Uname() string {
return "(" + runtime.GOOS + ")"
}

View file

@ -0,0 +1,35 @@
// Copyright 2017 The Prometheus Authors
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package main
import (
"log"
"syscall"
)
// Uname returns the uname of the host machine.
func Uname() string {
buf := syscall.Utsname{}
err := syscall.Uname(&buf)
if err != nil {
log.Fatal("Error!")
}
str := "(" + charsToString(buf.Sysname[:])
str += " " + charsToString(buf.Release[:])
str += " " + charsToString(buf.Version[:])
str += " " + charsToString(buf.Machine[:])
str += " " + charsToString(buf.Nodename[:])
str += " " + charsToString(buf.Domainname[:]) + ")"
return str
}

View file

@ -0,0 +1,25 @@
// Copyright 2017 The Prometheus Authors
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// +build 386 amd64 arm64 mips64 mips64le mips mipsle
// +build linux
package main
func charsToString(ca []int8) string {
s := make([]byte, len(ca))
for i, c := range ca {
s[i] = byte(c)
}
return string(s[0:len(ca)])
}

View file

@ -0,0 +1,25 @@
// Copyright 2017 The Prometheus Authors
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// +build arm ppc64 ppc64le s390x
// +build linux
package main
func charsToString(ca []uint8) string {
s := make([]byte, len(ca))
for i, c := range ca {
s[i] = byte(c)
}
return string(s[0:len(ca)])
}

View file

@ -15,6 +15,7 @@ package rules
import ( import (
"fmt" "fmt"
"net/url"
"sync" "sync"
"time" "time"
@ -151,7 +152,7 @@ const resolvedRetention = 15 * time.Minute
// Eval evaluates the rule expression and then creates pending alerts and fires // Eval evaluates the rule expression and then creates pending alerts and fires
// or removes previously pending alerts accordingly. // or removes previously pending alerts accordingly.
func (r *AlertingRule) Eval(ctx context.Context, ts time.Time, engine *promql.Engine, externalURLPath string) (promql.Vector, error) { func (r *AlertingRule) Eval(ctx context.Context, ts time.Time, engine *promql.Engine, externalURL *url.URL) (promql.Vector, error) {
query, err := engine.NewInstantQuery(r.vector.String(), ts) query, err := engine.NewInstantQuery(r.vector.String(), ts)
if err != nil { if err != nil {
return nil, err return nil, err
@ -194,7 +195,7 @@ func (r *AlertingRule) Eval(ctx context.Context, ts time.Time, engine *promql.En
tmplData, tmplData,
model.Time(timestamp.FromTime(ts)), model.Time(timestamp.FromTime(ts)),
engine, engine,
externalURLPath, externalURL,
) )
result, err := tmpl.Expand() result, err := tmpl.Expand()
if err != nil { if err != nil {

View file

@ -112,7 +112,7 @@ const (
type Rule interface { type Rule interface {
Name() string Name() string
// eval evaluates the rule, including any associated recording or alerting actions. // eval evaluates the rule, including any associated recording or alerting actions.
Eval(context.Context, time.Time, *promql.Engine, string) (promql.Vector, error) Eval(context.Context, time.Time, *promql.Engine, *url.URL) (promql.Vector, error)
// String returns a human-readable string representation of the rule. // String returns a human-readable string representation of the rule.
String() string String() string
// HTMLSnippet returns a human-readable string representation of the rule, // HTMLSnippet returns a human-readable string representation of the rule,
@ -270,7 +270,7 @@ func (g *Group) Eval() {
evalTotal.WithLabelValues(rtyp).Inc() evalTotal.WithLabelValues(rtyp).Inc()
vector, err := rule.Eval(g.opts.Context, now, g.opts.QueryEngine, g.opts.ExternalURL.Path) vector, err := rule.Eval(g.opts.Context, now, g.opts.QueryEngine, g.opts.ExternalURL)
if err != nil { if err != nil {
// Canceled queries are intentional termination of queries. This normally // Canceled queries are intentional termination of queries. This normally
// happens on shutdown and thus we skip logging of any errors here. // happens on shutdown and thus we skip logging of any errors here.

View file

@ -109,7 +109,7 @@ func TestAlertingRule(t *testing.T) {
for i, test := range tests { for i, test := range tests {
evalTime := baseTime.Add(test.time) evalTime := baseTime.Add(test.time)
res, err := rule.Eval(suite.Context(), evalTime, suite.QueryEngine(), "") res, err := rule.Eval(suite.Context(), evalTime, suite.QueryEngine(), nil)
if err != nil { if err != nil {
t.Fatalf("Error during alerting rule evaluation: %s", err) t.Fatalf("Error during alerting rule evaluation: %s", err)
} }

View file

@ -16,6 +16,7 @@ package rules
import ( import (
"fmt" "fmt"
"html/template" "html/template"
"net/url"
"time" "time"
"golang.org/x/net/context" "golang.org/x/net/context"
@ -47,7 +48,7 @@ func (rule RecordingRule) Name() string {
} }
// Eval evaluates the rule and then overrides the metric names and labels accordingly. // Eval evaluates the rule and then overrides the metric names and labels accordingly.
func (rule RecordingRule) Eval(ctx context.Context, ts time.Time, engine *promql.Engine, _ string) (promql.Vector, error) { func (rule RecordingRule) Eval(ctx context.Context, ts time.Time, engine *promql.Engine, _ *url.URL) (promql.Vector, error) {
query, err := engine.NewInstantQuery(rule.vector.String(), ts) query, err := engine.NewInstantQuery(rule.vector.String(), ts)
if err != nil { if err != nil {
return nil, err return nil, err

View file

@ -64,7 +64,7 @@ func TestRuleEval(t *testing.T) {
for _, test := range suite { for _, test := range suite {
rule := NewRecordingRule(test.name, test.expr, test.labels) rule := NewRecordingRule(test.name, test.expr, test.labels)
result, err := rule.Eval(ctx, now, engine, "") result, err := rule.Eval(ctx, now, engine, nil)
if err != nil { if err != nil {
t.Fatalf("Error evaluating %s", test.name) t.Fatalf("Error evaluating %s", test.name)
} }

View file

@ -18,6 +18,7 @@ import (
"errors" "errors"
"fmt" "fmt"
"math" "math"
"net/url"
"regexp" "regexp"
"sort" "sort"
"strings" "strings"
@ -110,7 +111,7 @@ type Expander struct {
} }
// NewTemplateExpander returns a template expander ready to use. // NewTemplateExpander returns a template expander ready to use.
func NewTemplateExpander(ctx context.Context, text string, name string, data interface{}, timestamp model.Time, queryEngine *promql.Engine, pathPrefix string) *Expander { func NewTemplateExpander(ctx context.Context, text string, name string, data interface{}, timestamp model.Time, queryEngine *promql.Engine, externalURL *url.URL) *Expander {
return &Expander{ return &Expander{
text: text, text: text,
name: name, name: name,
@ -246,7 +247,10 @@ func NewTemplateExpander(ctx context.Context, text string, name string, data int
return fmt.Sprint(t) return fmt.Sprint(t)
}, },
"pathPrefix": func() string { "pathPrefix": func() string {
return pathPrefix return externalURL.Path
},
"externalURL": func() string {
return externalURL.String()
}, },
}, },
} }

View file

@ -15,6 +15,7 @@ package template
import ( import (
"math" "math"
"net/url"
"testing" "testing"
"github.com/prometheus/common/model" "github.com/prometheus/common/model"
@ -198,6 +199,16 @@ func TestTemplateExpansion(t *testing.T) {
output: "x", output: "x",
html: true, html: true,
}, },
{
// pathPrefix.
text: "{{ pathPrefix }}",
output: "/path/prefix",
},
{
// externalURL.
text: "{{ externalURL }}",
output: "http://testhost:9090/path/prefix",
},
} }
time := model.Time(0) time := model.Time(0)
@ -221,10 +232,15 @@ func TestTemplateExpansion(t *testing.T) {
engine := promql.NewEngine(storage, nil) engine := promql.NewEngine(storage, nil)
extURL, err := url.Parse("http://testhost:9090/path/prefix")
if err != nil {
panic(err)
}
for i, s := range scenarios { for i, s := range scenarios {
var result string var result string
var err error var err error
expander := NewTemplateExpander(context.Background(), s.text, "test", s.input, time, engine, "") expander := NewTemplateExpander(context.Background(), s.text, "test", s.input, time, engine, extURL)
if s.html { if s.html {
result, err = expander.ExpandHTML(nil) result, err = expander.ExpandHTML(nil)
} else { } else {

View file

@ -1,7 +1,7 @@
govalidator govalidator
=========== ===========
[![Gitter](https://badges.gitter.im/Join%20Chat.svg)](https://gitter.im/asaskevich/govalidator?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge) [![GoDoc](https://godoc.org/github.com/asaskevich/govalidator?status.png)](https://godoc.org/github.com/asaskevich/govalidator) [![Coverage Status](https://img.shields.io/coveralls/asaskevich/govalidator.svg)](https://coveralls.io/r/asaskevich/govalidator?branch=master) [![wercker status](https://app.wercker.com/status/1ec990b09ea86c910d5f08b0e02c6043/s "wercker status")](https://app.wercker.com/project/bykey/1ec990b09ea86c910d5f08b0e02c6043) [![Gitter](https://badges.gitter.im/Join%20Chat.svg)](https://gitter.im/asaskevich/govalidator?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge) [![GoDoc](https://godoc.org/github.com/asaskevich/govalidator?status.png)](https://godoc.org/github.com/asaskevich/govalidator) [![Coverage Status](https://img.shields.io/coveralls/asaskevich/govalidator.svg)](https://coveralls.io/r/asaskevich/govalidator?branch=master) [![wercker status](https://app.wercker.com/status/1ec990b09ea86c910d5f08b0e02c6043/s "wercker status")](https://app.wercker.com/project/bykey/1ec990b09ea86c910d5f08b0e02c6043)
[![Build Status](https://travis-ci.org/asaskevich/govalidator.svg?branch=master)](https://travis-ci.org/asaskevich/govalidator) [![Build Status](https://travis-ci.org/asaskevich/govalidator.svg?branch=master)](https://travis-ci.org/asaskevich/govalidator) [![Go Report Card](https://goreportcard.com/badge/github.com/asaskevich/govalidator)](https://goreportcard.com/report/github.com/asaskevich/govalidator) [![GoSearch](http://go-search.org/badge?id=github.com%2Fasaskevich%2Fgovalidator)](http://go-search.org/view?id=github.com%2Fasaskevich%2Fgovalidator)
A package of validators and sanitizers for strings, structs and collections. Based on [validator.js](https://github.com/chriso/validator.js). A package of validators and sanitizers for strings, structs and collections. Based on [validator.js](https://github.com/chriso/validator.js).
@ -96,28 +96,27 @@ govalidator.CustomTypeTagMap.Set("customByteArrayValidator", CustomTypeValidator
func Abs(value float64) float64 func Abs(value float64) float64
func BlackList(str, chars string) string func BlackList(str, chars string) string
func ByteLength(str string, params ...string) bool func ByteLength(str string, params ...string) bool
func StringLength(str string, params ...string) bool
func StringMatches(s string, params ...string) bool
func CamelCaseToUnderscore(str string) string func CamelCaseToUnderscore(str string) string
func Contains(str, substring string) bool func Contains(str, substring string) bool
func Count(array []interface{}, iterator ConditionIterator) int func Count(array []interface{}, iterator ConditionIterator) int
func Each(array []interface{}, iterator Iterator) func Each(array []interface{}, iterator Iterator)
func ErrorByField(e error, field string) string func ErrorByField(e error, field string) string
func ErrorsByField(e error) map[string]string
func Filter(array []interface{}, iterator ConditionIterator) []interface{} func Filter(array []interface{}, iterator ConditionIterator) []interface{}
func Find(array []interface{}, iterator ConditionIterator) interface{} func Find(array []interface{}, iterator ConditionIterator) interface{}
func GetLine(s string, index int) (string, error) func GetLine(s string, index int) (string, error)
func GetLines(s string) []string func GetLines(s string) []string
func IsHost(s string) bool
func InRange(value, left, right float64) bool func InRange(value, left, right float64) bool
func IsASCII(str string) bool func IsASCII(str string) bool
func IsAlpha(str string) bool func IsAlpha(str string) bool
func IsAlphanumeric(str string) bool func IsAlphanumeric(str string) bool
func IsBase64(str string) bool func IsBase64(str string) bool
func IsByteLength(str string, min, max int) bool func IsByteLength(str string, min, max int) bool
func IsCIDR(str string) bool
func IsCreditCard(str string) bool func IsCreditCard(str string) bool
func IsDNSName(str string) bool
func IsDataURI(str string) bool func IsDataURI(str string) bool
func IsDialString(str string) bool func IsDialString(str string) bool
func IsDNSName(str string) bool
func IsDivisibleBy(str, num string) bool func IsDivisibleBy(str, num string) bool
func IsEmail(str string) bool func IsEmail(str string) bool
func IsFilePath(str string) (bool, int) func IsFilePath(str string) (bool, int)
@ -126,6 +125,7 @@ func IsFullWidth(str string) bool
func IsHalfWidth(str string) bool func IsHalfWidth(str string) bool
func IsHexadecimal(str string) bool func IsHexadecimal(str string) bool
func IsHexcolor(str string) bool func IsHexcolor(str string) bool
func IsHost(str string) bool
func IsIP(str string) bool func IsIP(str string) bool
func IsIPv4(str string) bool func IsIPv4(str string) bool
func IsIPv6(str string) bool func IsIPv6(str string) bool
@ -134,6 +134,8 @@ func IsISBN10(str string) bool
func IsISBN13(str string) bool func IsISBN13(str string) bool
func IsISO3166Alpha2(str string) bool func IsISO3166Alpha2(str string) bool
func IsISO3166Alpha3(str string) bool func IsISO3166Alpha3(str string) bool
func IsISO4217(str string) bool
func IsIn(str string, params ...string) bool
func IsInt(str string) bool func IsInt(str string) bool
func IsJSON(str string) bool func IsJSON(str string) bool
func IsLatitude(str string) bool func IsLatitude(str string) bool
@ -151,11 +153,13 @@ func IsNumeric(str string) bool
func IsPort(str string) bool func IsPort(str string) bool
func IsPositive(value float64) bool func IsPositive(value float64) bool
func IsPrintableASCII(str string) bool func IsPrintableASCII(str string) bool
func IsRFC3339(str string) bool
func IsRGBcolor(str string) bool func IsRGBcolor(str string) bool
func IsRequestURI(rawurl string) bool func IsRequestURI(rawurl string) bool
func IsRequestURL(rawurl string) bool func IsRequestURL(rawurl string) bool
func IsSSN(str string) bool func IsSSN(str string) bool
func IsSemver(str string) bool func IsSemver(str string) bool
func IsTime(str string, format string) bool
func IsURL(str string) bool func IsURL(str string) bool
func IsUTFDigit(str string) bool func IsUTFDigit(str string) bool
func IsUTFLetter(str string) bool func IsUTFLetter(str string) bool
@ -172,12 +176,20 @@ func LeftTrim(str, chars string) string
func Map(array []interface{}, iterator ResultIterator) []interface{} func Map(array []interface{}, iterator ResultIterator) []interface{}
func Matches(str, pattern string) bool func Matches(str, pattern string) bool
func NormalizeEmail(str string) (string, error) func NormalizeEmail(str string) (string, error)
func PadBoth(str string, padStr string, padLen int) string
func PadLeft(str string, padStr string, padLen int) string
func PadRight(str string, padStr string, padLen int) string
func Range(str string, params ...string) bool
func RemoveTags(s string) string func RemoveTags(s string) string
func ReplacePattern(str, pattern, replace string) string func ReplacePattern(str, pattern, replace string) string
func Reverse(s string) string func Reverse(s string) string
func RightTrim(str, chars string) string func RightTrim(str, chars string) string
func RuneLength(str string, params ...string) bool
func SafeFileName(str string) string func SafeFileName(str string) string
func SetFieldsRequiredByDefault(value bool)
func Sign(value float64) float64 func Sign(value float64) float64
func StringLength(str string, params ...string) bool
func StringMatches(s string, params ...string) bool
func StripLow(str string, keepNewLines bool) string func StripLow(str string, keepNewLines bool) string
func ToBoolean(str string) (bool, error) func ToBoolean(str string) (bool, error)
func ToFloat(str string) (float64, error) func ToFloat(str string) (float64, error)
@ -190,10 +202,12 @@ func UnderscoreToCamelCase(s string) string
func ValidateStruct(s interface{}) (bool, error) func ValidateStruct(s interface{}) (bool, error)
func WhiteList(str, chars string) string func WhiteList(str, chars string) string
type ConditionIterator type ConditionIterator
type CustomTypeValidator
type Error type Error
func (e Error) Error() string func (e Error) Error() string
type Errors type Errors
func (es Errors) Error() string func (es Errors) Error() string
func (es Errors) Errors() []error
type ISO3166Entry type ISO3166Entry
type Iterator type Iterator
type ParamValidator type ParamValidator
@ -253,59 +267,65 @@ For completely custom validators (interface-based), see below.
Here is a list of available validators for struct fields (validator - used function): Here is a list of available validators for struct fields (validator - used function):
```go ```go
"alpha": IsAlpha,
"alphanum": IsAlphanumeric,
"ascii": IsASCII,
"base64": IsBase64,
"creditcard": IsCreditCard,
"datauri": IsDataURI,
"dialstring": IsDialString,
"dns": IsDNSName,
"email": IsEmail, "email": IsEmail,
"float": IsFloat, "url": IsURL,
"fullwidth": IsFullWidth, "dialstring": IsDialString,
"halfwidth": IsHalfWidth, "requrl": IsRequestURL,
"requri": IsRequestURI,
"alpha": IsAlpha,
"utfletter": IsUTFLetter,
"alphanum": IsAlphanumeric,
"utfletternum": IsUTFLetterNumeric,
"numeric": IsNumeric,
"utfnumeric": IsUTFNumeric,
"utfdigit": IsUTFDigit,
"hexadecimal": IsHexadecimal, "hexadecimal": IsHexadecimal,
"hexcolor": IsHexcolor, "hexcolor": IsHexcolor,
"host": IsHost,
"int": IsInt,
"ip": IsIP,
"ipv4": IsIPv4,
"ipv6": IsIPv6,
"isbn10": IsISBN10,
"isbn13": IsISBN13,
"json": IsJSON,
"latitude": IsLatitude,
"longitude": IsLongitude,
"lowercase": IsLowerCase,
"mac": IsMAC,
"multibyte": IsMultibyte,
"null": IsNull,
"numeric": IsNumeric,
"port": IsPort,
"printableascii": IsPrintableASCII,
"requri": IsRequestURI,
"requrl": IsRequestURL,
"rgbcolor": IsRGBcolor, "rgbcolor": IsRGBcolor,
"ssn": IsSSN, "lowercase": IsLowerCase,
"semver": IsSemver,
"uppercase": IsUpperCase, "uppercase": IsUpperCase,
"url": IsURL, "int": IsInt,
"utfdigit": IsUTFDigit, "float": IsFloat,
"utfletter": IsUTFLetter, "null": IsNull,
"utfletternum": IsUTFLetterNumeric,
"utfnumeric": IsUTFNumeric,
"uuid": IsUUID, "uuid": IsUUID,
"uuidv3": IsUUIDv3, "uuidv3": IsUUIDv3,
"uuidv4": IsUUIDv4, "uuidv4": IsUUIDv4,
"uuidv5": IsUUIDv5, "uuidv5": IsUUIDv5,
"creditcard": IsCreditCard,
"isbn10": IsISBN10,
"isbn13": IsISBN13,
"json": IsJSON,
"multibyte": IsMultibyte,
"ascii": IsASCII,
"printableascii": IsPrintableASCII,
"fullwidth": IsFullWidth,
"halfwidth": IsHalfWidth,
"variablewidth": IsVariableWidth, "variablewidth": IsVariableWidth,
"base64": IsBase64,
"datauri": IsDataURI,
"ip": IsIP,
"port": IsPort,
"ipv4": IsIPv4,
"ipv6": IsIPv6,
"dns": IsDNSName,
"host": IsHost,
"mac": IsMAC,
"latitude": IsLatitude,
"longitude": IsLongitude,
"ssn": IsSSN,
"semver": IsSemver,
"rfc3339": IsRFC3339,
"ISO3166Alpha2": IsISO3166Alpha2,
"ISO3166Alpha3": IsISO3166Alpha3,
``` ```
Validators with parameters Validators with parameters
```go ```go
"range(min|max)": Range,
"length(min|max)": ByteLength, "length(min|max)": ByteLength,
"runelength(min|max)": RuneLength,
"matches(pattern)": StringMatches, "matches(pattern)": StringMatches,
"in(string1|string2|...|stringN)": IsIn,
``` ```
And here is small example of usage: And here is small example of usage:

View file

@ -4,7 +4,7 @@ import "regexp"
// Basic regular expressions for validating strings // Basic regular expressions for validating strings
const ( const (
Email string = "^(((([a-zA-Z]|\\d|[!#\\$%&'\\*\\+\\-\\/=\\?\\^_`{\\|}~]|[\\x{00A0}-\\x{D7FF}\\x{F900}-\\x{FDCF}\\x{FDF0}-\\x{FFEF}])+(\\.([a-zA-Z]|\\d|[!#\\$%&'\\*\\+\\-\\/=\\?\\^_`{\\|}~]|[\\x{00A0}-\\x{D7FF}\\x{F900}-\\x{FDCF}\\x{FDF0}-\\x{FFEF}])+)*)|((\\x22)((((\\x20|\\x09)*(\\x0d\\x0a))?(\\x20|\\x09)+)?(([\\x01-\\x08\\x0b\\x0c\\x0e-\\x1f\\x7f]|\\x21|[\\x23-\\x5b]|[\\x5d-\\x7e]|[\\x{00A0}-\\x{D7FF}\\x{F900}-\\x{FDCF}\\x{FDF0}-\\x{FFEF}])|(\\([\\x01-\\x09\\x0b\\x0c\\x0d-\\x7f]|[\\x{00A0}-\\x{D7FF}\\x{F900}-\\x{FDCF}\\x{FDF0}-\\x{FFEF}]))))*(((\\x20|\\x09)*(\\x0d\\x0a))?(\\x20|\\x09)+)?(\\x22)))@((([a-zA-Z]|\\d|[\\x{00A0}-\\x{D7FF}\\x{F900}-\\x{FDCF}\\x{FDF0}-\\x{FFEF}])|(([a-zA-Z]|\\d|[\\x{00A0}-\\x{D7FF}\\x{F900}-\\x{FDCF}\\x{FDF0}-\\x{FFEF}])([a-zA-Z]|\\d|-|\\.|_|~|[\\x{00A0}-\\x{D7FF}\\x{F900}-\\x{FDCF}\\x{FDF0}-\\x{FFEF}])*([a-zA-Z]|\\d|[\\x{00A0}-\\x{D7FF}\\x{F900}-\\x{FDCF}\\x{FDF0}-\\x{FFEF}])))\\.)+(([a-zA-Z]|[\\x{00A0}-\\x{D7FF}\\x{F900}-\\x{FDCF}\\x{FDF0}-\\x{FFEF}])|(([a-zA-Z]|[\\x{00A0}-\\x{D7FF}\\x{F900}-\\x{FDCF}\\x{FDF0}-\\x{FFEF}])([a-zA-Z]|\\d|-|\\.|_|~|[\\x{00A0}-\\x{D7FF}\\x{F900}-\\x{FDCF}\\x{FDF0}-\\x{FFEF}])*([a-zA-Z]|[\\x{00A0}-\\x{D7FF}\\x{F900}-\\x{FDCF}\\x{FDF0}-\\x{FFEF}])))\\.?$" Email string = "^(((([a-zA-Z]|\\d|[!#\\$%&'\\*\\+\\-\\/=\\?\\^_`{\\|}~]|[\\x{00A0}-\\x{D7FF}\\x{F900}-\\x{FDCF}\\x{FDF0}-\\x{FFEF}])+(\\.([a-zA-Z]|\\d|[!#\\$%&'\\*\\+\\-\\/=\\?\\^_`{\\|}~]|[\\x{00A0}-\\x{D7FF}\\x{F900}-\\x{FDCF}\\x{FDF0}-\\x{FFEF}])+)*)|((\\x22)((((\\x20|\\x09)*(\\x0d\\x0a))?(\\x20|\\x09)+)?(([\\x01-\\x08\\x0b\\x0c\\x0e-\\x1f\\x7f]|\\x21|[\\x23-\\x5b]|[\\x5d-\\x7e]|[\\x{00A0}-\\x{D7FF}\\x{F900}-\\x{FDCF}\\x{FDF0}-\\x{FFEF}])|(\\([\\x01-\\x09\\x0b\\x0c\\x0d-\\x7f]|[\\x{00A0}-\\x{D7FF}\\x{F900}-\\x{FDCF}\\x{FDF0}-\\x{FFEF}]))))*(((\\x20|\\x09)*(\\x0d\\x0a))?(\\x20|\\x09)+)?(\\x22)))@((([a-zA-Z]|\\d|[\\x{00A0}-\\x{D7FF}\\x{F900}-\\x{FDCF}\\x{FDF0}-\\x{FFEF}])|(([a-zA-Z]|\\d|[\\x{00A0}-\\x{D7FF}\\x{F900}-\\x{FDCF}\\x{FDF0}-\\x{FFEF}])([a-zA-Z]|\\d|-|\\.|_|~|[\\x{00A0}-\\x{D7FF}\\x{F900}-\\x{FDCF}\\x{FDF0}-\\x{FFEF}])*([a-zA-Z]|\\d|[\\x{00A0}-\\x{D7FF}\\x{F900}-\\x{FDCF}\\x{FDF0}-\\x{FFEF}])))\\.)+(([a-zA-Z]|[\\x{00A0}-\\x{D7FF}\\x{F900}-\\x{FDCF}\\x{FDF0}-\\x{FFEF}])|(([a-zA-Z]|[\\x{00A0}-\\x{D7FF}\\x{F900}-\\x{FDCF}\\x{FDF0}-\\x{FFEF}])([a-zA-Z]|\\d|-|_|~|[\\x{00A0}-\\x{D7FF}\\x{F900}-\\x{FDCF}\\x{FDF0}-\\x{FFEF}])*([a-zA-Z]|[\\x{00A0}-\\x{D7FF}\\x{F900}-\\x{FDCF}\\x{FDF0}-\\x{FFEF}])))\\.?$"
CreditCard string = "^(?:4[0-9]{12}(?:[0-9]{3})?|5[1-5][0-9]{14}|6(?:011|5[0-9][0-9])[0-9]{12}|3[47][0-9]{13}|3(?:0[0-5]|[68][0-9])[0-9]{11}|(?:2131|1800|35\\d{3})\\d{11})$" CreditCard string = "^(?:4[0-9]{12}(?:[0-9]{3})?|5[1-5][0-9]{14}|6(?:011|5[0-9][0-9])[0-9]{12}|3[47][0-9]{13}|3(?:0[0-5]|[68][0-9])[0-9]{11}|(?:2131|1800|35\\d{3})\\d{11})$"
ISBN10 string = "^(?:[0-9]{9}X|[0-9]{10})$" ISBN10 string = "^(?:[0-9]{9}X|[0-9]{10})$"
ISBN13 string = "^(?:[0-9]{13})$" ISBN13 string = "^(?:[0-9]{13})$"
@ -14,7 +14,7 @@ const (
UUID string = "^[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}$" UUID string = "^[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}$"
Alpha string = "^[a-zA-Z]+$" Alpha string = "^[a-zA-Z]+$"
Alphanumeric string = "^[a-zA-Z0-9]+$" Alphanumeric string = "^[a-zA-Z0-9]+$"
Numeric string = "^[-+]?[0-9]+$" Numeric string = "^[0-9]+$"
Int string = "^(?:[-+]?(?:0|[1-9][0-9]*))$" Int string = "^(?:[-+]?(?:0|[1-9][0-9]*))$"
Float string = "^(?:[-+]?(?:[0-9]+))?(?:\\.[0-9]*)?(?:[eE][\\+\\-]?(?:[0-9]+))?$" Float string = "^(?:[-+]?(?:[0-9]+))?(?:\\.[0-9]*)?(?:[eE][\\+\\-]?(?:[0-9]+))?$"
Hexadecimal string = "^[0-9a-fA-F]+$" Hexadecimal string = "^[0-9a-fA-F]+$"
@ -29,7 +29,7 @@ const (
DataURI string = "^data:.+\\/(.+);base64$" DataURI string = "^data:.+\\/(.+);base64$"
Latitude string = "^[-+]?([1-8]?\\d(\\.\\d+)?|90(\\.0+)?)$" Latitude string = "^[-+]?([1-8]?\\d(\\.\\d+)?|90(\\.0+)?)$"
Longitude string = "^[-+]?(180(\\.0+)?|((1[0-7]\\d)|([1-9]?\\d))(\\.\\d+)?)$" Longitude string = "^[-+]?(180(\\.0+)?|((1[0-7]\\d)|([1-9]?\\d))(\\.\\d+)?)$"
DNSName string = `^([a-zA-Z0-9]{1}[a-zA-Z0-9_-]{1,62}){1}(\.[a-zA-Z0-9]{1}[a-zA-Z0-9_-]{1,62})*$` DNSName string = `^([a-zA-Z0-9]{1}[a-zA-Z0-9_-]{0,62}){1}(\.[a-zA-Z0-9]{1}[a-zA-Z0-9_-]{0,62})*$`
IP string = `(([0-9a-fA-F]{1,4}:){7,7}[0-9a-fA-F]{1,4}|([0-9a-fA-F]{1,4}:){1,7}:|([0-9a-fA-F]{1,4}:){1,6}:[0-9a-fA-F]{1,4}|([0-9a-fA-F]{1,4}:){1,5}(:[0-9a-fA-F]{1,4}){1,2}|([0-9a-fA-F]{1,4}:){1,4}(:[0-9a-fA-F]{1,4}){1,3}|([0-9a-fA-F]{1,4}:){1,3}(:[0-9a-fA-F]{1,4}){1,4}|([0-9a-fA-F]{1,4}:){1,2}(:[0-9a-fA-F]{1,4}){1,5}|[0-9a-fA-F]{1,4}:((:[0-9a-fA-F]{1,4}){1,6})|:((:[0-9a-fA-F]{1,4}){1,7}|:)|fe80:(:[0-9a-fA-F]{0,4}){0,4}%[0-9a-zA-Z]{1,}|::(ffff(:0{1,4}){0,1}:){0,1}((25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])\.){3,3}(25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])|([0-9a-fA-F]{1,4}:){1,4}:((25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])\.){3,3}(25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9]))` IP string = `(([0-9a-fA-F]{1,4}:){7,7}[0-9a-fA-F]{1,4}|([0-9a-fA-F]{1,4}:){1,7}:|([0-9a-fA-F]{1,4}:){1,6}:[0-9a-fA-F]{1,4}|([0-9a-fA-F]{1,4}:){1,5}(:[0-9a-fA-F]{1,4}){1,2}|([0-9a-fA-F]{1,4}:){1,4}(:[0-9a-fA-F]{1,4}){1,3}|([0-9a-fA-F]{1,4}:){1,3}(:[0-9a-fA-F]{1,4}){1,4}|([0-9a-fA-F]{1,4}:){1,2}(:[0-9a-fA-F]{1,4}){1,5}|[0-9a-fA-F]{1,4}:((:[0-9a-fA-F]{1,4}){1,6})|:((:[0-9a-fA-F]{1,4}){1,7}|:)|fe80:(:[0-9a-fA-F]{0,4}){0,4}%[0-9a-zA-Z]{1,}|::(ffff(:0{1,4}){0,1}:){0,1}((25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])\.){3,3}(25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])|([0-9a-fA-F]{1,4}:){1,4}:((25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])\.){3,3}(25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9]))`
URLSchema string = `((ftp|tcp|udp|wss?|https?):\/\/)` URLSchema string = `((ftp|tcp|udp|wss?|https?):\/\/)`
URLUsername string = `(\S+(:\S*)?@)` URLUsername string = `(\S+(:\S*)?@)`
@ -37,11 +37,11 @@ const (
URLPath string = `((\/|\?|#)[^\s]*)` URLPath string = `((\/|\?|#)[^\s]*)`
URLPort string = `(:(\d{1,5}))` URLPort string = `(:(\d{1,5}))`
URLIP string = `([1-9]\d?|1\d\d|2[01]\d|22[0-3])(\.(1?\d{1,2}|2[0-4]\d|25[0-5])){2}(?:\.([0-9]\d?|1\d\d|2[0-4]\d|25[0-4]))` URLIP string = `([1-9]\d?|1\d\d|2[01]\d|22[0-3])(\.(1?\d{1,2}|2[0-4]\d|25[0-5])){2}(?:\.([0-9]\d?|1\d\d|2[0-4]\d|25[0-4]))`
URLSubdomain string = `((www\.)|([a-zA-Z0-9]([-\.][a-zA-Z0-9]+)*))` URLSubdomain string = `((www\.)|([a-zA-Z0-9]([-\.][-\._a-zA-Z0-9]+)*))`
URL string = `^` + URLSchema + `?` + URLUsername + `?` + `((` + URLIP + `|(\[` + IP + `\])|(([a-zA-Z0-9]([a-zA-Z0-9-]+)?[a-zA-Z0-9]([-\.][a-zA-Z0-9]+)*)|(` + URLSubdomain + `?))?(([a-zA-Z\x{00a1}-\x{ffff}0-9]+-?-?)*[a-zA-Z\x{00a1}-\x{ffff}0-9]+)(?:\.([a-zA-Z\x{00a1}-\x{ffff}]{1,}))?))` + URLPort + `?` + URLPath + `?$` URL string = `^` + URLSchema + `?` + URLUsername + `?` + `((` + URLIP + `|(\[` + IP + `\])|(([a-zA-Z0-9]([a-zA-Z0-9-_]+)?[a-zA-Z0-9]([-\.][a-zA-Z0-9]+)*)|(` + URLSubdomain + `?))?(([a-zA-Z\x{00a1}-\x{ffff}0-9]+-?-?)*[a-zA-Z\x{00a1}-\x{ffff}0-9]+)(?:\.([a-zA-Z\x{00a1}-\x{ffff}]{1,}))?))\.?` + URLPort + `?` + URLPath + `?$`
SSN string = `^\d{3}[- ]?\d{2}[- ]?\d{4}$` SSN string = `^\d{3}[- ]?\d{2}[- ]?\d{4}$`
WinPath string = `^[a-zA-Z]:\\(?:[^\\/:*?"<>|\r\n]+\\)*[^\\/:*?"<>|\r\n]*$` WinPath string = `^[a-zA-Z]:\\(?:[^\\/:*?"<>|\r\n]+\\)*[^\\/:*?"<>|\r\n]*$`
UnixPath string = `^((?:\/[a-zA-Z0-9\.\:]+(?:_[a-zA-Z0-9\:\.]+)*(?:\-[\:a-zA-Z0-9\.]+)*)+\/?)$` UnixPath string = `^(/[^/\x00]*)+/?$`
Semver string = "^v?(?:0|[1-9]\\d*)\\.(?:0|[1-9]\\d*)\\.(?:0|[1-9]\\d*)(-(0|[1-9]\\d*|\\d*[a-zA-Z-][0-9a-zA-Z-]*)(\\.(0|[1-9]\\d*|\\d*[a-zA-Z-][0-9a-zA-Z-]*))*)?(\\+[0-9a-zA-Z-]+(\\.[0-9a-zA-Z-]+)*)?$" Semver string = "^v?(?:0|[1-9]\\d*)\\.(?:0|[1-9]\\d*)\\.(?:0|[1-9]\\d*)(-(0|[1-9]\\d*|\\d*[a-zA-Z-][0-9a-zA-Z-]*)(\\.(0|[1-9]\\d*|\\d*[a-zA-Z-][0-9a-zA-Z-]*))*)?(\\+[0-9a-zA-Z-]+(\\.[0-9a-zA-Z-]+)*)?$"
tagName string = "valid" tagName string = "valid"
) )

View file

@ -29,15 +29,21 @@ type stringValues []reflect.Value
// ParamTagMap is a map of functions accept variants parameters // ParamTagMap is a map of functions accept variants parameters
var ParamTagMap = map[string]ParamValidator{ var ParamTagMap = map[string]ParamValidator{
"length": ByteLength, "length": ByteLength,
"range": Range,
"runelength": RuneLength,
"stringlength": StringLength, "stringlength": StringLength,
"matches": StringMatches, "matches": StringMatches,
"in": isInRaw,
} }
// ParamTagRegexMap maps param tags to their respective regexes. // ParamTagRegexMap maps param tags to their respective regexes.
var ParamTagRegexMap = map[string]*regexp.Regexp{ var ParamTagRegexMap = map[string]*regexp.Regexp{
"range": regexp.MustCompile("^range\\((\\d+)\\|(\\d+)\\)$"),
"length": regexp.MustCompile("^length\\((\\d+)\\|(\\d+)\\)$"), "length": regexp.MustCompile("^length\\((\\d+)\\|(\\d+)\\)$"),
"runelength": regexp.MustCompile("^runelength\\((\\d+)\\|(\\d+)\\)$"),
"stringlength": regexp.MustCompile("^stringlength\\((\\d+)\\|(\\d+)\\)$"), "stringlength": regexp.MustCompile("^stringlength\\((\\d+)\\|(\\d+)\\)$"),
"matches": regexp.MustCompile(`matches\(([^)]+)\)`), "in": regexp.MustCompile(`^in\((.*)\)`),
"matches": regexp.MustCompile(`^matches\((.+)\)$`),
} }
type customTypeTagMap struct { type customTypeTagMap struct {
@ -113,6 +119,10 @@ var TagMap = map[string]Validator{
"longitude": IsLongitude, "longitude": IsLongitude,
"ssn": IsSSN, "ssn": IsSSN,
"semver": IsSemver, "semver": IsSemver,
"rfc3339": IsRFC3339,
"ISO3166Alpha2": IsISO3166Alpha2,
"ISO3166Alpha3": IsISO3166Alpha3,
"ISO4217": IsISO4217,
} }
// ISO3166Entry stores country codes // ISO3166Entry stores country codes
@ -376,3 +386,33 @@ var ISO3166List = []ISO3166Entry{
{"Yemen", "Yémen (le)", "YE", "YEM", "887"}, {"Yemen", "Yémen (le)", "YE", "YEM", "887"},
{"Zambia", "Zambie (la)", "ZM", "ZMB", "894"}, {"Zambia", "Zambie (la)", "ZM", "ZMB", "894"},
} }
// ISO4217List is the list of ISO currency codes
var ISO4217List = []string{
"AED", "AFN", "ALL", "AMD", "ANG", "AOA", "ARS", "AUD", "AWG", "AZN",
"BAM", "BBD", "BDT", "BGN", "BHD", "BIF", "BMD", "BND", "BOB", "BOV", "BRL", "BSD", "BTN", "BWP", "BYN", "BZD",
"CAD", "CDF", "CHE", "CHF", "CHW", "CLF", "CLP", "CNY", "COP", "COU", "CRC", "CUC", "CUP", "CVE", "CZK",
"DJF", "DKK", "DOP", "DZD",
"EGP", "ERN", "ETB", "EUR",
"FJD", "FKP",
"GBP", "GEL", "GHS", "GIP", "GMD", "GNF", "GTQ", "GYD",
"HKD", "HNL", "HRK", "HTG", "HUF",
"IDR", "ILS", "INR", "IQD", "IRR", "ISK",
"JMD", "JOD", "JPY",
"KES", "KGS", "KHR", "KMF", "KPW", "KRW", "KWD", "KYD", "KZT",
"LAK", "LBP", "LKR", "LRD", "LSL", "LYD",
"MAD", "MDL", "MGA", "MKD", "MMK", "MNT", "MOP", "MRO", "MUR", "MVR", "MWK", "MXN", "MXV", "MYR", "MZN",
"NAD", "NGN", "NIO", "NOK", "NPR", "NZD",
"OMR",
"PAB", "PEN", "PGK", "PHP", "PKR", "PLN", "PYG",
"QAR",
"RON", "RSD", "RUB", "RWF",
"SAR", "SBD", "SCR", "SDG", "SEK", "SGD", "SHP", "SLL", "SOS", "SRD", "SSP", "STD", "SVC", "SYP", "SZL",
"THB", "TJS", "TMT", "TND", "TOP", "TRY", "TTD", "TWD", "TZS",
"UAH", "UGX", "USD", "USN", "UYI", "UYU", "UZS",
"VEF", "VND", "VUV",
"WST",
"XAF", "XAG", "XAU", "XBA", "XBB", "XBC", "XBD", "XCD", "XDR", "XOF", "XPD", "XPF", "XPT", "XSU", "XTS", "XUA", "XXX",
"YER",
"ZAR", "ZMW", "ZWL",
}

View file

@ -4,10 +4,12 @@ import (
"errors" "errors"
"fmt" "fmt"
"html" "html"
"math"
"path" "path"
"regexp" "regexp"
"strings" "strings"
"unicode" "unicode"
"unicode/utf8"
) )
// Contains check if the string contains the substring. // Contains check if the string contains the substring.
@ -211,3 +213,56 @@ func Truncate(str string, length int, ending string) string {
return str return str
} }
// PadLeft pad left side of string if size of string is less then indicated pad length
func PadLeft(str string, padStr string, padLen int) string {
return buildPadStr(str, padStr, padLen, true, false)
}
// PadRight pad right side of string if size of string is less then indicated pad length
func PadRight(str string, padStr string, padLen int) string {
return buildPadStr(str, padStr, padLen, false, true)
}
// PadBoth pad sides of string if size of string is less then indicated pad length
func PadBoth(str string, padStr string, padLen int) string {
return buildPadStr(str, padStr, padLen, true, true)
}
// PadString either left, right or both sides, not the padding string can be unicode and more then one
// character
func buildPadStr(str string, padStr string, padLen int, padLeft bool, padRight bool) string {
// When padded length is less then the current string size
if padLen < utf8.RuneCountInString(str) {
return str
}
padLen -= utf8.RuneCountInString(str)
targetLen := padLen
targetLenLeft := targetLen
targetLenRight := targetLen
if padLeft && padRight {
targetLenLeft = padLen / 2
targetLenRight = padLen - targetLenLeft
}
strToRepeatLen := utf8.RuneCountInString(padStr)
repeatTimes := int(math.Ceil(float64(targetLen) / float64(strToRepeatLen)))
repeatedString := strings.Repeat(padStr, repeatTimes)
leftSide := ""
if padLeft {
leftSide = repeatedString[0:targetLenLeft]
}
rightSide := ""
if padRight {
rightSide = repeatedString[0:targetLenRight]
}
return leftSide + str + rightSide
}

View file

@ -11,13 +11,16 @@ import (
"sort" "sort"
"strconv" "strconv"
"strings" "strings"
"time"
"unicode" "unicode"
"unicode/utf8" "unicode/utf8"
) )
var fieldsRequiredByDefault bool var fieldsRequiredByDefault bool
const maxURLRuneCount = 2083
const minURLRuneCount = 3
// SetFieldsRequiredByDefault causes validation to fail when struct fields // SetFieldsRequiredByDefault causes validation to fail when struct fields
// do not include validations or are not explicitly marked as exempt (using `valid:"-"` or `valid:"email,optional"`). // do not include validations or are not explicitly marked as exempt (using `valid:"-"` or `valid:"email,optional"`).
// This struct definition will fail govalidator.ValidateStruct() (and the field values do not matter): // This struct definition will fail govalidator.ValidateStruct() (and the field values do not matter):
@ -44,7 +47,7 @@ func IsEmail(str string) bool {
// IsURL check if the string is an URL. // IsURL check if the string is an URL.
func IsURL(str string) bool { func IsURL(str string) bool {
if str == "" || len(str) >= 2083 || len(str) <= 3 || strings.HasPrefix(str, ".") { if str == "" || utf8.RuneCountInString(str) >= maxURLRuneCount || len(str) <= minURLRuneCount || strings.HasPrefix(str, ".") {
return false return false
} }
u, err := url.Parse(str) u, err := url.Parse(str)
@ -62,7 +65,7 @@ func IsURL(str string) bool {
} }
// IsRequestURL check if the string rawurl, assuming // IsRequestURL check if the string rawurl, assuming
// it was recieved in an HTTP request, is a valid // it was received in an HTTP request, is a valid
// URL confirm to RFC 3986 // URL confirm to RFC 3986
func IsRequestURL(rawurl string) bool { func IsRequestURL(rawurl string) bool {
url, err := url.ParseRequestURI(rawurl) url, err := url.ParseRequestURI(rawurl)
@ -76,7 +79,7 @@ func IsRequestURL(rawurl string) bool {
} }
// IsRequestURI check if the string rawurl, assuming // IsRequestURI check if the string rawurl, assuming
// it was recieved in an HTTP request, is an // it was received in an HTTP request, is an
// absolute URI or an absolute path. // absolute URI or an absolute path.
func IsRequestURI(rawurl string) bool { func IsRequestURI(rawurl string) bool {
_, err := url.ParseRequestURI(rawurl) _, err := url.ParseRequestURI(rawurl)
@ -458,7 +461,7 @@ func IsDNSName(str string) bool {
// constraints already violated // constraints already violated
return false return false
} }
return rxDNSName.MatchString(str) return !IsIP(str) && rxDNSName.MatchString(str)
} }
// IsDialString validates the given string for usage with the various Dial() functions // IsDialString validates the given string for usage with the various Dial() functions
@ -535,6 +538,17 @@ func IsLongitude(str string) bool {
return rxLongitude.MatchString(str) return rxLongitude.MatchString(str)
} }
func toJSONName(tag string) string {
if tag == "" {
return ""
}
// JSON name always comes first. If there's no options then split[0] is
// JSON name, if JSON name is not set, then split[0] is an empty string.
split := strings.SplitN(tag, ",", 2)
return split[0]
}
// ValidateStruct use tags for fields. // ValidateStruct use tags for fields.
// result will be equal to `false` if there are any errors. // result will be equal to `false` if there are any errors.
func ValidateStruct(s interface{}) (bool, error) { func ValidateStruct(s interface{}) (bool, error) {
@ -558,11 +572,39 @@ func ValidateStruct(s interface{}) (bool, error) {
if typeField.PkgPath != "" { if typeField.PkgPath != "" {
continue // Private field continue // Private field
} }
resultField, err2 := typeCheck(valueField, typeField, val) structResult := true
if valueField.Kind() == reflect.Struct {
var err error
structResult, err = ValidateStruct(valueField.Interface())
if err != nil {
errs = append(errs, err)
}
}
resultField, err2 := typeCheck(valueField, typeField, val, nil)
if err2 != nil { if err2 != nil {
// Replace structure name with JSON name if there is a tag on the variable
jsonTag := toJSONName(typeField.Tag.Get("json"))
if jsonTag != "" {
switch jsonError := err2.(type) {
case Error:
jsonError.Name = jsonTag
err2 = jsonError
case Errors:
for _, e := range jsonError.Errors() {
switch tempErr := e.(type) {
case Error:
tempErr.Name = jsonTag
_ = tempErr
}
}
err2 = jsonError
}
}
errs = append(errs, err2) errs = append(errs, err2)
} }
result = result && resultField result = result && resultField && structResult
} }
if len(errs) > 0 { if len(errs) > 0 {
err = errs err = errs
@ -594,7 +636,7 @@ func isValidTag(s string) bool {
} }
for _, c := range s { for _, c := range s {
switch { switch {
case strings.ContainsRune("!#$%&()*+-./:<=>?@[]^_{|}~ ", c): case strings.ContainsRune("\\'\"!#$%&()*+-./:<=>?@[]^_{|}~ ", c):
// Backslash and quote chars are reserved, but // Backslash and quote chars are reserved, but
// otherwise any punctuation chars are allowed // otherwise any punctuation chars are allowed
// in a tag name. // in a tag name.
@ -620,6 +662,28 @@ func IsSemver(str string) bool {
return rxSemver.MatchString(str) return rxSemver.MatchString(str)
} }
// IsTime check if string is valid according to given format
func IsTime(str string, format string) bool {
_, err := time.Parse(format, str)
return err == nil
}
// IsRFC3339 check if string is valid timestamp value according to RFC3339
func IsRFC3339(str string) bool {
return IsTime(str, time.RFC3339)
}
// IsISO4217 check if string is valid ISO currency code
func IsISO4217(str string) bool {
for _, currency := range ISO4217List {
if str == currency {
return true
}
}
return false
}
// ByteLength check string's length // ByteLength check string's length
func ByteLength(str string, params ...string) bool { func ByteLength(str string, params ...string) bool {
if len(params) == 2 { if len(params) == 2 {
@ -631,6 +695,12 @@ func ByteLength(str string, params ...string) bool {
return false return false
} }
// RuneLength check string's length
// Alias for StringLength
func RuneLength(str string, params ...string) bool {
return StringLength(str, params...)
}
// StringMatches checks if a string matches a given pattern. // StringMatches checks if a string matches a given pattern.
func StringMatches(s string, params ...string) bool { func StringMatches(s string, params ...string) bool {
if len(params) == 1 { if len(params) == 1 {
@ -653,6 +723,41 @@ func StringLength(str string, params ...string) bool {
return false return false
} }
// Range check string's length
func Range(str string, params ...string) bool {
if len(params) == 2 {
value, _ := ToFloat(str)
min, _ := ToFloat(params[0])
max, _ := ToFloat(params[1])
return InRange(value, min, max)
}
return false
}
func isInRaw(str string, params ...string) bool {
if len(params) == 1 {
rawParams := params[0]
parsedParams := strings.Split(rawParams, "|")
return IsIn(str, parsedParams...)
}
return false
}
// IsIn check if string str is a member of the set of strings params
func IsIn(str string, params ...string) bool {
for _, param := range params {
if str == param {
return true
}
}
return false
}
func checkRequired(v reflect.Value, t reflect.StructField, options tagOptionsMap) (bool, error) { func checkRequired(v reflect.Value, t reflect.StructField, options tagOptionsMap) (bool, error) {
if requiredOption, isRequired := options["required"]; isRequired { if requiredOption, isRequired := options["required"]; isRequired {
if len(requiredOption) > 0 { if len(requiredOption) > 0 {
@ -666,7 +771,7 @@ func checkRequired(v reflect.Value, t reflect.StructField, options tagOptionsMap
return true, nil return true, nil
} }
func typeCheck(v reflect.Value, t reflect.StructField, o reflect.Value) (bool, error) { func typeCheck(v reflect.Value, t reflect.StructField, o reflect.Value, options tagOptionsMap) (isValid bool, resultErr error) {
if !v.IsValid() { if !v.IsValid() {
return false, nil return false, nil
} }
@ -684,12 +789,22 @@ func typeCheck(v reflect.Value, t reflect.StructField, o reflect.Value) (bool, e
return true, nil return true, nil
} }
options := parseTagIntoMap(tag) isRootType := false
if options == nil {
isRootType = true
options = parseTagIntoMap(tag)
}
if isEmptyValue(v) {
// an empty value is not validated, check only required
return checkRequired(v, t, options)
}
var customTypeErrors Errors var customTypeErrors Errors
var customTypeValidatorsExist bool
for validatorName, customErrorMessage := range options { for validatorName, customErrorMessage := range options {
if validatefunc, ok := CustomTypeTagMap.Get(validatorName); ok { if validatefunc, ok := CustomTypeTagMap.Get(validatorName); ok {
customTypeValidatorsExist = true delete(options, validatorName)
if result := validatefunc(v.Interface(), o.Interface()); !result { if result := validatefunc(v.Interface(), o.Interface()); !result {
if len(customErrorMessage) > 0 { if len(customErrorMessage) > 0 {
customTypeErrors = append(customTypeErrors, Error{Name: t.Name, Err: fmt.Errorf(customErrorMessage), CustomErrorMessageExists: true}) customTypeErrors = append(customTypeErrors, Error{Name: t.Name, Err: fmt.Errorf(customErrorMessage), CustomErrorMessageExists: true})
@ -699,16 +814,26 @@ func typeCheck(v reflect.Value, t reflect.StructField, o reflect.Value) (bool, e
} }
} }
} }
if customTypeValidatorsExist {
if len(customTypeErrors.Errors()) > 0 { if len(customTypeErrors.Errors()) > 0 {
return false, customTypeErrors return false, customTypeErrors
}
return true, nil
} }
if isEmptyValue(v) { if isRootType {
// an empty value is not validated, check only required // Ensure that we've checked the value by all specified validators before report that the value is valid
return checkRequired(v, t, options) defer func() {
delete(options, "optional")
delete(options, "required")
if isValid && resultErr == nil && len(options) != 0 {
for validator := range options {
isValid = false
resultErr = Error{t.Name, fmt.Errorf(
"The following validator is invalid or can't be applied to the field: %q", validator), false}
return
}
}
}()
} }
switch v.Kind() { switch v.Kind() {
@ -718,10 +843,12 @@ func typeCheck(v reflect.Value, t reflect.StructField, o reflect.Value) (bool, e
reflect.Float32, reflect.Float64, reflect.Float32, reflect.Float64,
reflect.String: reflect.String:
// for each tag option check the map of validator functions // for each tag option check the map of validator functions
for validator, customErrorMessage := range options { for validatorSpec, customErrorMessage := range options {
var negate bool var negate bool
validator := validatorSpec
customMsgExists := (len(customErrorMessage) > 0) customMsgExists := (len(customErrorMessage) > 0)
// Check wether the tag looks like '!something' or 'something'
// Check whether the tag looks like '!something' or 'something'
if validator[0] == '!' { if validator[0] == '!' {
validator = string(validator[1:]) validator = string(validator[1:])
negate = true negate = true
@ -730,38 +857,47 @@ func typeCheck(v reflect.Value, t reflect.StructField, o reflect.Value) (bool, e
// Check for param validators // Check for param validators
for key, value := range ParamTagRegexMap { for key, value := range ParamTagRegexMap {
ps := value.FindStringSubmatch(validator) ps := value.FindStringSubmatch(validator)
if len(ps) > 0 { if len(ps) == 0 {
if validatefunc, ok := ParamTagMap[key]; ok { continue
switch v.Kind() { }
case reflect.String:
field := fmt.Sprint(v) // make value into string, then validate with regex
if result := validatefunc(field, ps[1:]...); (!result && !negate) || (result && negate) {
var err error
if !negate {
if customMsgExists {
err = fmt.Errorf(customErrorMessage)
} else {
err = fmt.Errorf("%s does not validate as %s", field, validator)
}
} else { validatefunc, ok := ParamTagMap[key]
if customMsgExists { if !ok {
err = fmt.Errorf(customErrorMessage) continue
} else { }
err = fmt.Errorf("%s does validate as %s", field, validator)
} delete(options, validatorSpec)
}
return false, Error{t.Name, err, customMsgExists} switch v.Kind() {
case reflect.String:
field := fmt.Sprint(v) // make value into string, then validate with regex
if result := validatefunc(field, ps[1:]...); (!result && !negate) || (result && negate) {
var err error
if !negate {
if customMsgExists {
err = fmt.Errorf(customErrorMessage)
} else {
err = fmt.Errorf("%s does not validate as %s", field, validator)
}
} else {
if customMsgExists {
err = fmt.Errorf(customErrorMessage)
} else {
err = fmt.Errorf("%s does validate as %s", field, validator)
} }
default:
// type not yet supported, fail
return false, Error{t.Name, fmt.Errorf("Validator %s doesn't support kind %s", validator, v.Kind()), false}
} }
return false, Error{t.Name, err, customMsgExists}
} }
default:
// type not yet supported, fail
return false, Error{t.Name, fmt.Errorf("Validator %s doesn't support kind %s", validator, v.Kind()), false}
} }
} }
if validatefunc, ok := TagMap[validator]; ok { if validatefunc, ok := TagMap[validator]; ok {
delete(options, validatorSpec)
switch v.Kind() { switch v.Kind() {
case reflect.String: case reflect.String:
field := fmt.Sprint(v) // make value into string, then validate with regex field := fmt.Sprint(v) // make value into string, then validate with regex
@ -813,7 +949,7 @@ func typeCheck(v reflect.Value, t reflect.StructField, o reflect.Value) (bool, e
var resultItem bool var resultItem bool
var err error var err error
if v.Index(i).Kind() != reflect.Struct { if v.Index(i).Kind() != reflect.Struct {
resultItem, err = typeCheck(v.Index(i), t, o) resultItem, err = typeCheck(v.Index(i), t, o, options)
if err != nil { if err != nil {
return false, err return false, err
} }
@ -832,7 +968,7 @@ func typeCheck(v reflect.Value, t reflect.StructField, o reflect.Value) (bool, e
var resultItem bool var resultItem bool
var err error var err error
if v.Index(i).Kind() != reflect.Struct { if v.Index(i).Kind() != reflect.Struct {
resultItem, err = typeCheck(v.Index(i), t, o) resultItem, err = typeCheck(v.Index(i), t, o, options)
if err != nil { if err != nil {
return false, err return false, err
} }
@ -856,7 +992,7 @@ func typeCheck(v reflect.Value, t reflect.StructField, o reflect.Value) (bool, e
if v.IsNil() { if v.IsNil() {
return true, nil return true, nil
} }
return typeCheck(v.Elem(), t, o) return typeCheck(v.Elem(), t, o, options)
case reflect.Struct: case reflect.Struct:
return ValidateStruct(v.Interface()) return ValidateStruct(v.Interface())
default: default:

View file

@ -1,4 +1,4 @@
box: wercker/golang box: golang
build: build:
steps: steps:
- setup-go-workspace - setup-go-workspace

View file

@ -48,8 +48,8 @@ type Block interface {
Queryable Queryable
} }
// HeadBlock is a regular block that can still be appended to. // headBlock is a regular block that can still be appended to.
type HeadBlock interface { type headBlock interface {
Block Block
Appendable Appendable
} }

View file

@ -118,7 +118,7 @@ type DB struct {
// or the general layout. // or the general layout.
// Must never be held when acquiring a blocks's mutex! // Must never be held when acquiring a blocks's mutex!
headmtx sync.RWMutex headmtx sync.RWMutex
heads []HeadBlock heads []headBlock
compactor Compactor compactor Compactor
@ -401,7 +401,7 @@ func (db *DB) reloadBlocks() error {
var ( var (
metas []*BlockMeta metas []*BlockMeta
blocks []Block blocks []Block
heads []HeadBlock heads []headBlock
seqBlocks = make(map[int]Block, len(dirs)) seqBlocks = make(map[int]Block, len(dirs))
) )
@ -418,7 +418,7 @@ func (db *DB) reloadBlocks() error {
if meta.Compaction.Generation == 0 { if meta.Compaction.Generation == 0 {
if !ok { if !ok {
b, err = openHeadBlock(dirs[i], db.logger) b, err = db.openHeadBlock(dirs[i])
if err != nil { if err != nil {
return errors.Wrapf(err, "load head at %s", dirs[i]) return errors.Wrapf(err, "load head at %s", dirs[i])
} }
@ -426,7 +426,7 @@ func (db *DB) reloadBlocks() error {
if meta.ULID != b.Meta().ULID { if meta.ULID != b.Meta().ULID {
return errors.Errorf("head block ULID changed unexpectedly") return errors.Errorf("head block ULID changed unexpectedly")
} }
heads = append(heads, b.(HeadBlock)) heads = append(heads, b.(headBlock))
} else { } else {
if !ok || meta.ULID != b.Meta().ULID { if !ok || meta.ULID != b.Meta().ULID {
b, err = newPersistedBlock(dirs[i]) b, err = newPersistedBlock(dirs[i])
@ -559,7 +559,7 @@ func (a *dbAppender) appenderFor(t int64) (*metaAppender, error) {
if len(a.heads) == 0 || t >= a.heads[len(a.heads)-1].meta.MaxTime { if len(a.heads) == 0 || t >= a.heads[len(a.heads)-1].meta.MaxTime {
a.db.headmtx.Lock() a.db.headmtx.Lock()
var newHeads []HeadBlock var newHeads []headBlock
if err := a.db.ensureHead(t); err != nil { if err := a.db.ensureHead(t); err != nil {
a.db.headmtx.Unlock() a.db.headmtx.Unlock()
@ -670,9 +670,9 @@ func (a *dbAppender) Rollback() error {
} }
// appendable returns a copy of a slice of HeadBlocks that can still be appended to. // appendable returns a copy of a slice of HeadBlocks that can still be appended to.
func (db *DB) appendable() []HeadBlock { func (db *DB) appendable() []headBlock {
var i int var i int
app := make([]HeadBlock, 0, db.opts.AppendableBlocks) app := make([]headBlock, 0, db.opts.AppendableBlocks)
if len(db.heads) > db.opts.AppendableBlocks { if len(db.heads) > db.opts.AppendableBlocks {
i = len(db.heads) - db.opts.AppendableBlocks i = len(db.heads) - db.opts.AppendableBlocks
@ -709,16 +709,37 @@ func (db *DB) blocksForInterval(mint, maxt int64) []Block {
return bs return bs
} }
// openHeadBlock opens the head block at dir.
func (db *DB) openHeadBlock(dir string) (*HeadBlock, error) {
var (
wdir = filepath.Join(dir, "wal")
l = log.With(db.logger, "wal", wdir)
)
wal, err := OpenSegmentWAL(wdir, l, 5*time.Second)
if err != nil {
return nil, errors.Wrap(err, "open WAL %s")
}
h, err := OpenHeadBlock(dir, log.With(db.logger, "block", dir), wal)
if err != nil {
return nil, errors.Wrapf(err, "open head block %s", dir)
}
return h, nil
}
// cut starts a new head block to append to. The completed head block // cut starts a new head block to append to. The completed head block
// will still be appendable for the configured grace period. // will still be appendable for the configured grace period.
func (db *DB) cut(mint int64) (HeadBlock, error) { func (db *DB) cut(mint int64) (headBlock, error) {
maxt := mint + int64(db.opts.MinBlockDuration) maxt := mint + int64(db.opts.MinBlockDuration)
dir, seq, err := nextSequenceFile(db.dir, "b-") dir, seq, err := nextSequenceFile(db.dir, "b-")
if err != nil { if err != nil {
return nil, err return nil, err
} }
newHead, err := createHeadBlock(dir, seq, db.logger, mint, maxt) if err := TouchHeadBlock(dir, seq, mint, maxt); err != nil {
return nil, errors.Wrapf(err, "touch head block %s", dir)
}
newHead, err := db.openHeadBlock(dir)
if err != nil { if err != nil {
return nil, err return nil, err
} }

View file

@ -47,11 +47,11 @@ var (
ErrOutOfBounds = errors.New("out of bounds") ErrOutOfBounds = errors.New("out of bounds")
) )
// headBlock handles reads and writes of time series data within a time window. // HeadBlock handles reads and writes of time series data within a time window.
type headBlock struct { type HeadBlock struct {
mtx sync.RWMutex mtx sync.RWMutex
dir string dir string
wal *WAL wal WAL
activeWriters uint64 activeWriters uint64
closed bool closed bool
@ -69,19 +69,21 @@ type headBlock struct {
meta BlockMeta meta BlockMeta
} }
func createHeadBlock(dir string, seq int, l log.Logger, mint, maxt int64) (*headBlock, error) { // TouchHeadBlock atomically touches a new head block in dir for
// samples in the range [mint,maxt).
func TouchHeadBlock(dir string, seq int, mint, maxt int64) error {
// Make head block creation appear atomic. // Make head block creation appear atomic.
tmp := dir + ".tmp" tmp := dir + ".tmp"
if err := os.MkdirAll(tmp, 0777); err != nil { if err := os.MkdirAll(tmp, 0777); err != nil {
return nil, err return err
} }
entropy := rand.New(rand.NewSource(time.Now().UnixNano())) entropy := rand.New(rand.NewSource(time.Now().UnixNano()))
ulid, err := ulid.New(ulid.Now(), entropy) ulid, err := ulid.New(ulid.Now(), entropy)
if err != nil { if err != nil {
return nil, err return err
} }
if err := writeMetaFile(tmp, &BlockMeta{ if err := writeMetaFile(tmp, &BlockMeta{
@ -90,38 +92,33 @@ func createHeadBlock(dir string, seq int, l log.Logger, mint, maxt int64) (*head
MinTime: mint, MinTime: mint,
MaxTime: maxt, MaxTime: maxt,
}); err != nil { }); err != nil {
return nil, err return err
} }
if err := renameFile(tmp, dir); err != nil { return renameFile(tmp, dir)
return nil, err
}
return openHeadBlock(dir, l)
} }
// openHeadBlock creates a new empty head block. // OpenHeadBlock opens the head block in dir.
func openHeadBlock(dir string, l log.Logger) (*headBlock, error) { func OpenHeadBlock(dir string, l log.Logger, wal WAL) (*HeadBlock, error) {
wal, err := OpenWAL(dir, log.With(l, "component", "wal"), 5*time.Second)
if err != nil {
return nil, err
}
meta, err := readMetaFile(dir) meta, err := readMetaFile(dir)
if err != nil { if err != nil {
return nil, err return nil, err
} }
h := &headBlock{ h := &HeadBlock{
dir: dir, dir: dir,
wal: wal, wal: wal,
series: []*memSeries{}, series: []*memSeries{nil}, // 0 is not a valid posting, filled with nil.
hashes: map[uint64][]*memSeries{}, hashes: map[uint64][]*memSeries{},
values: map[string]stringset{}, values: map[string]stringset{},
postings: &memPostings{m: make(map[term][]uint32)}, postings: &memPostings{m: make(map[term][]uint32)},
meta: *meta, meta: *meta,
} }
return h, h.init()
}
r := wal.Reader() func (h *HeadBlock) init() error {
r := h.wal.Reader()
Outer:
for r.Next() { for r.Next() {
series, samples := r.At() series, samples := r.At()
@ -130,37 +127,32 @@ Outer:
h.meta.Stats.NumSeries++ h.meta.Stats.NumSeries++
} }
for _, s := range samples { for _, s := range samples {
if int(s.ref) >= len(h.series) { if int(s.Ref) >= len(h.series) {
l.Log("msg", "unknown series reference, abort WAL restore", "got", s.ref, "max", len(h.series)-1) return errors.Errorf("unknown series reference %d (max %d); abort WAL restore", s.Ref, len(h.series))
break Outer
} }
h.series[s.ref].append(s.t, s.v) h.series[s.Ref].append(s.T, s.V)
if !h.inBounds(s.t) { if !h.inBounds(s.T) {
return nil, errors.Wrap(ErrOutOfBounds, "consume WAL") return errors.Wrap(ErrOutOfBounds, "consume WAL")
} }
h.meta.Stats.NumSamples++ h.meta.Stats.NumSamples++
} }
} }
if err := r.Err(); err != nil { return errors.Wrap(r.Err(), "consume WAL")
return nil, errors.Wrap(err, "consume WAL")
}
return h, nil
} }
// inBounds returns true if the given timestamp is within the valid // inBounds returns true if the given timestamp is within the valid
// time bounds of the block. // time bounds of the block.
func (h *headBlock) inBounds(t int64) bool { func (h *HeadBlock) inBounds(t int64) bool {
return t >= h.meta.MinTime && t <= h.meta.MaxTime return t >= h.meta.MinTime && t <= h.meta.MaxTime
} }
func (h *headBlock) String() string { func (h *HeadBlock) String() string {
return fmt.Sprintf("(%d, %s)", h.meta.Sequence, h.meta.ULID) return fmt.Sprintf("(%d, %s)", h.meta.Sequence, h.meta.ULID)
} }
// Close syncs all data and closes underlying resources of the head block. // Close syncs all data and closes underlying resources of the head block.
func (h *headBlock) Close() error { func (h *HeadBlock) Close() error {
h.mtx.Lock() h.mtx.Lock()
defer h.mtx.Unlock() defer h.mtx.Unlock()
@ -184,7 +176,7 @@ func (h *headBlock) Close() error {
return nil return nil
} }
func (h *headBlock) Meta() BlockMeta { func (h *HeadBlock) Meta() BlockMeta {
m := BlockMeta{ m := BlockMeta{
ULID: h.meta.ULID, ULID: h.meta.ULID,
Sequence: h.meta.Sequence, Sequence: h.meta.Sequence,
@ -200,12 +192,12 @@ func (h *headBlock) Meta() BlockMeta {
return m return m
} }
func (h *headBlock) Dir() string { return h.dir } func (h *HeadBlock) Dir() string { return h.dir }
func (h *headBlock) Persisted() bool { return false } func (h *HeadBlock) Persisted() bool { return false }
func (h *headBlock) Index() IndexReader { return &headIndexReader{h} } func (h *HeadBlock) Index() IndexReader { return &headIndexReader{h} }
func (h *headBlock) Chunks() ChunkReader { return &headChunkReader{h} } func (h *HeadBlock) Chunks() ChunkReader { return &headChunkReader{h} }
func (h *headBlock) Querier(mint, maxt int64) Querier { func (h *HeadBlock) Querier(mint, maxt int64) Querier {
h.mtx.RLock() h.mtx.RLock()
defer h.mtx.RUnlock() defer h.mtx.RUnlock()
@ -244,7 +236,7 @@ func (h *headBlock) Querier(mint, maxt int64) Querier {
} }
} }
func (h *headBlock) Appender() Appender { func (h *HeadBlock) Appender() Appender {
atomic.AddUint64(&h.activeWriters, 1) atomic.AddUint64(&h.activeWriters, 1)
h.mtx.RLock() h.mtx.RLock()
@ -252,36 +244,36 @@ func (h *headBlock) Appender() Appender {
if h.closed { if h.closed {
panic(fmt.Sprintf("block %s already closed", h.dir)) panic(fmt.Sprintf("block %s already closed", h.dir))
} }
return &headAppender{headBlock: h, samples: getHeadAppendBuffer()} return &headAppender{HeadBlock: h, samples: getHeadAppendBuffer()}
} }
func (h *headBlock) Busy() bool { func (h *HeadBlock) Busy() bool {
return atomic.LoadUint64(&h.activeWriters) > 0 return atomic.LoadUint64(&h.activeWriters) > 0
} }
var headPool = sync.Pool{} var headPool = sync.Pool{}
func getHeadAppendBuffer() []refdSample { func getHeadAppendBuffer() []RefSample {
b := headPool.Get() b := headPool.Get()
if b == nil { if b == nil {
return make([]refdSample, 0, 512) return make([]RefSample, 0, 512)
} }
return b.([]refdSample) return b.([]RefSample)
} }
func putHeadAppendBuffer(b []refdSample) { func putHeadAppendBuffer(b []RefSample) {
headPool.Put(b[:0]) headPool.Put(b[:0])
} }
type headAppender struct { type headAppender struct {
*headBlock *HeadBlock
newSeries map[uint64]hashedLabels newSeries map[uint64]hashedLabels
newHashes map[uint64]uint64 newHashes map[uint64]uint64
refmap map[uint64]uint64 refmap map[uint64]uint64
newLabels []labels.Labels newLabels []labels.Labels
samples []refdSample samples []RefSample
} }
type hashedLabels struct { type hashedLabels struct {
@ -289,12 +281,6 @@ type hashedLabels struct {
labels labels.Labels labels labels.Labels
} }
type refdSample struct {
ref uint64
t int64
v float64
}
func (a *headAppender) Add(lset labels.Labels, t int64, v float64) (uint64, error) { func (a *headAppender) Add(lset labels.Labels, t int64, v float64) (uint64, error) {
if !a.inBounds(t) { if !a.inBounds(t) {
return 0, ErrOutOfBounds return 0, ErrOutOfBounds
@ -369,10 +355,10 @@ func (a *headAppender) AddFast(ref uint64, t int64, v float64) error {
} }
} }
a.samples = append(a.samples, refdSample{ a.samples = append(a.samples, RefSample{
ref: ref, Ref: ref,
t: t, T: t,
v: v, V: v,
}) })
return nil return nil
} }
@ -418,8 +404,8 @@ func (a *headAppender) Commit() error {
for i := range a.samples { for i := range a.samples {
s := &a.samples[i] s := &a.samples[i]
if s.ref&(1<<32) > 0 { if s.Ref&(1<<32) > 0 {
s.ref = a.refmap[s.ref] s.Ref = a.refmap[s.Ref]
} }
} }
@ -433,7 +419,7 @@ func (a *headAppender) Commit() error {
total := uint64(len(a.samples)) total := uint64(len(a.samples))
for _, s := range a.samples { for _, s := range a.samples {
if !a.series[s.ref].append(s.t, s.v) { if !a.series[s.Ref].append(s.T, s.V) {
total-- total--
} }
} }
@ -454,7 +440,7 @@ func (a *headAppender) Rollback() error {
} }
type headChunkReader struct { type headChunkReader struct {
*headBlock *HeadBlock
} }
// Chunk returns the chunk for the reference number. // Chunk returns the chunk for the reference number.
@ -490,7 +476,7 @@ func (c *safeChunk) Iterator() chunks.Iterator {
// func (c *safeChunk) Encoding() chunks.Encoding { panic("illegal") } // func (c *safeChunk) Encoding() chunks.Encoding { panic("illegal") }
type headIndexReader struct { type headIndexReader struct {
*headBlock *HeadBlock
} }
// LabelValues returns the possible label values // LabelValues returns the possible label values
@ -558,7 +544,7 @@ func (h *headIndexReader) LabelIndices() ([][]string, error) {
// get retrieves the chunk with the hash and label set and creates // get retrieves the chunk with the hash and label set and creates
// a new one if it doesn't exist yet. // a new one if it doesn't exist yet.
func (h *headBlock) get(hash uint64, lset labels.Labels) *memSeries { func (h *HeadBlock) get(hash uint64, lset labels.Labels) *memSeries {
series := h.hashes[hash] series := h.hashes[hash]
for _, s := range series { for _, s := range series {
@ -569,11 +555,13 @@ func (h *headBlock) get(hash uint64, lset labels.Labels) *memSeries {
return nil return nil
} }
func (h *headBlock) create(hash uint64, lset labels.Labels) *memSeries { func (h *HeadBlock) create(hash uint64, lset labels.Labels) *memSeries {
s := &memSeries{ s := &memSeries{
lset: lset, lset: lset,
ref: uint32(len(h.series)), ref: uint32(len(h.series)),
} }
// create the initial chunk and appender
s.cut()
// Allocate empty space until we can insert at the given index. // Allocate empty space until we can insert at the given index.
h.series = append(h.series, s) h.series = append(h.series, s)
@ -636,7 +624,7 @@ func (s *memSeries) append(t int64, v float64) bool {
var c *memChunk var c *memChunk
if s.app == nil || s.head().samples > 130 { if s.head().samples > 130 {
c = s.cut() c = s.cut()
c.minTime = t c.minTime = t
} else { } else {

View file

@ -33,7 +33,7 @@ func (p *memPostings) get(t term) Postings {
if l == nil { if l == nil {
return emptyPostings return emptyPostings
} }
return &listPostings{list: l, idx: -1} return newListPostings(l)
} }
// add adds a document to the index. The caller has to ensure that no // add adds a document to the index. The caller has to ensure that no
@ -70,18 +70,13 @@ func (e errPostings) Seek(uint32) bool { return false }
func (e errPostings) At() uint32 { return 0 } func (e errPostings) At() uint32 { return 0 }
func (e errPostings) Err() error { return e.err } func (e errPostings) Err() error { return e.err }
func expandPostings(p Postings) (res []uint32, err error) { var emptyPostings = errPostings{}
for p.Next() {
res = append(res, p.At())
}
return res, p.Err()
}
// Intersect returns a new postings list over the intersection of the // Intersect returns a new postings list over the intersection of the
// input postings. // input postings.
func Intersect(its ...Postings) Postings { func Intersect(its ...Postings) Postings {
if len(its) == 0 { if len(its) == 0 {
return errPostings{err: nil} return emptyPostings
} }
a := its[0] a := its[0]
@ -91,8 +86,6 @@ func Intersect(its ...Postings) Postings {
return a return a
} }
var emptyPostings = errPostings{}
type intersectPostings struct { type intersectPostings struct {
a, b Postings a, b Postings
aok, bok bool aok, bok bool
@ -100,41 +93,44 @@ type intersectPostings struct {
} }
func newIntersectPostings(a, b Postings) *intersectPostings { func newIntersectPostings(a, b Postings) *intersectPostings {
it := &intersectPostings{a: a, b: b} return &intersectPostings{a: a, b: b}
it.aok = it.a.Next()
it.bok = it.b.Next()
return it
} }
func (it *intersectPostings) At() uint32 { func (it *intersectPostings) At() uint32 {
return it.cur return it.cur
} }
func (it *intersectPostings) Next() bool { func (it *intersectPostings) doNext(id uint32) bool {
for { for {
if !it.aok || !it.bok { if !it.b.Seek(id) {
return false return false
} }
av, bv := it.a.At(), it.b.At() if vb := it.b.At(); vb != id {
if !it.a.Seek(vb) {
if av < bv { return false
it.aok = it.a.Seek(bv) }
} else if bv < av { id = it.a.At()
it.bok = it.b.Seek(av) if vb != id {
} else { continue
it.cur = av }
it.aok = it.a.Next()
it.bok = it.b.Next()
return true
} }
it.cur = id
return true
} }
} }
func (it *intersectPostings) Next() bool {
if !it.a.Next() {
return false
}
return it.doNext(it.a.At())
}
func (it *intersectPostings) Seek(id uint32) bool { func (it *intersectPostings) Seek(id uint32) bool {
it.aok = it.a.Seek(id) if !it.a.Seek(id) {
it.bok = it.b.Seek(id) return false
return it.Next() }
return it.doNext(it.a.At())
} }
func (it *intersectPostings) Err() error { func (it *intersectPostings) Err() error {
@ -158,17 +154,14 @@ func Merge(its ...Postings) Postings {
} }
type mergedPostings struct { type mergedPostings struct {
a, b Postings a, b Postings
aok, bok bool initialized bool
cur uint32 aok, bok bool
cur uint32
} }
func newMergedPostings(a, b Postings) *mergedPostings { func newMergedPostings(a, b Postings) *mergedPostings {
it := &mergedPostings{a: a, b: b} return &mergedPostings{a: a, b: b}
it.aok = it.a.Next()
it.bok = it.b.Next()
return it
} }
func (it *mergedPostings) At() uint32 { func (it *mergedPostings) At() uint32 {
@ -176,6 +169,12 @@ func (it *mergedPostings) At() uint32 {
} }
func (it *mergedPostings) Next() bool { func (it *mergedPostings) Next() bool {
if !it.initialized {
it.aok = it.a.Next()
it.bok = it.b.Next()
it.initialized = true
}
if !it.aok && !it.bok { if !it.aok && !it.bok {
return false return false
} }
@ -196,23 +195,25 @@ func (it *mergedPostings) Next() bool {
if acur < bcur { if acur < bcur {
it.cur = acur it.cur = acur
it.aok = it.a.Next() it.aok = it.a.Next()
return true } else if acur > bcur {
}
if bcur < acur {
it.cur = bcur it.cur = bcur
it.bok = it.b.Next() it.bok = it.b.Next()
return true } else {
it.cur = acur
it.aok = it.a.Next()
it.bok = it.b.Next()
} }
it.cur = acur
it.aok = it.a.Next()
it.bok = it.b.Next()
return true return true
} }
func (it *mergedPostings) Seek(id uint32) bool { func (it *mergedPostings) Seek(id uint32) bool {
if it.cur >= id {
return true
}
it.aok = it.a.Seek(id) it.aok = it.a.Seek(id)
it.bok = it.b.Seek(id) it.bok = it.b.Seek(id)
it.initialized = true
return it.Next() return it.Next()
} }
@ -227,28 +228,44 @@ func (it *mergedPostings) Err() error {
// listPostings implements the Postings interface over a plain list. // listPostings implements the Postings interface over a plain list.
type listPostings struct { type listPostings struct {
list []uint32 list []uint32
idx int cur uint32
} }
func newListPostings(list []uint32) *listPostings { func newListPostings(list []uint32) *listPostings {
return &listPostings{list: list, idx: -1} return &listPostings{list: list}
} }
func (it *listPostings) At() uint32 { func (it *listPostings) At() uint32 {
return it.list[it.idx] return it.cur
} }
func (it *listPostings) Next() bool { func (it *listPostings) Next() bool {
it.idx++ if len(it.list) > 0 {
return it.idx < len(it.list) it.cur = it.list[0]
it.list = it.list[1:]
return true
}
it.cur = 0
return false
} }
func (it *listPostings) Seek(x uint32) bool { func (it *listPostings) Seek(x uint32) bool {
// If the current value satisfies, then return.
if it.cur >= x {
return true
}
// Do binary search between current position and end. // Do binary search between current position and end.
it.idx += sort.Search(len(it.list)-it.idx, func(i int) bool { i := sort.Search(len(it.list), func(i int) bool {
return it.list[i+it.idx] >= x return it.list[i] >= x
}) })
return it.idx < len(it.list) if i < len(it.list) {
it.cur = it.list[i]
it.list = it.list[i+1:]
return true
}
it.list = nil
return false
} }
func (it *listPostings) Err() error { func (it *listPostings) Err() error {
@ -259,32 +276,44 @@ func (it *listPostings) Err() error {
// big endian numbers. // big endian numbers.
type bigEndianPostings struct { type bigEndianPostings struct {
list []byte list []byte
idx int cur uint32
} }
func newBigEndianPostings(list []byte) *bigEndianPostings { func newBigEndianPostings(list []byte) *bigEndianPostings {
return &bigEndianPostings{list: list, idx: -1} return &bigEndianPostings{list: list}
} }
func (it *bigEndianPostings) At() uint32 { func (it *bigEndianPostings) At() uint32 {
idx := 4 * it.idx return it.cur
return binary.BigEndian.Uint32(it.list[idx : idx+4])
} }
func (it *bigEndianPostings) Next() bool { func (it *bigEndianPostings) Next() bool {
it.idx++ if len(it.list) >= 4 {
return it.idx*4 < len(it.list) it.cur = binary.BigEndian.Uint32(it.list)
it.list = it.list[4:]
return true
}
return false
} }
func (it *bigEndianPostings) Seek(x uint32) bool { func (it *bigEndianPostings) Seek(x uint32) bool {
if it.cur >= x {
return true
}
num := len(it.list) / 4 num := len(it.list) / 4
// Do binary search between current position and end. // Do binary search between current position and end.
it.idx += sort.Search(num-it.idx, func(i int) bool { i := sort.Search(num, func(i int) bool {
idx := 4 * (it.idx + i) return binary.BigEndian.Uint32(it.list[i*4:]) >= x
val := binary.BigEndian.Uint32(it.list[idx : idx+4])
return val >= x
}) })
return it.idx*4 < len(it.list) if i < num {
j := i * 4
it.cur = binary.BigEndian.Uint32(it.list[j:])
it.list = it.list[j+4:]
return true
}
it.list = nil
return false
} }
func (it *bigEndianPostings) Err() error { func (it *bigEndianPostings) Err() error {

View file

@ -135,21 +135,9 @@ type blockQuerier struct {
} }
func (q *blockQuerier) Select(ms ...labels.Matcher) SeriesSet { func (q *blockQuerier) Select(ms ...labels.Matcher) SeriesSet {
var ( pr := newPostingsReader(q.index)
its []Postings
absent []string
)
for _, m := range ms {
// If the matcher checks absence of a label, don't select them
// but propagate the check into the series set.
if _, ok := m.(*labels.EqualMatcher); ok && m.Matches("") {
absent = append(absent, m.Name())
continue
}
its = append(its, q.selectSingle(m))
}
p := Intersect(its...) p, absent := pr.Select(ms...)
if q.postingsMapper != nil { if q.postingsMapper != nil {
p = q.postingsMapper(p) p = q.postingsMapper(p)
@ -172,50 +160,6 @@ func (q *blockQuerier) Select(ms ...labels.Matcher) SeriesSet {
} }
} }
func (q *blockQuerier) selectSingle(m labels.Matcher) Postings {
// Fast-path for equal matching.
if em, ok := m.(*labels.EqualMatcher); ok {
it, err := q.index.Postings(em.Name(), em.Value())
if err != nil {
return errPostings{err: err}
}
return it
}
tpls, err := q.index.LabelValues(m.Name())
if err != nil {
return errPostings{err: err}
}
// TODO(fabxc): use interface upgrading to provide fast solution
// for equality and prefix matches. Tuples are lexicographically sorted.
var res []string
for i := 0; i < tpls.Len(); i++ {
vals, err := tpls.At(i)
if err != nil {
return errPostings{err: err}
}
if m.Matches(vals[0]) {
res = append(res, vals[0])
}
}
if len(res) == 0 {
return emptyPostings
}
var rit []Postings
for _, v := range res {
it, err := q.index.Postings(m.Name(), v)
if err != nil {
return errPostings{err: err}
}
rit = append(rit, it)
}
return Merge(rit...)
}
func (q *blockQuerier) LabelValues(name string) ([]string, error) { func (q *blockQuerier) LabelValues(name string) ([]string, error) {
tpls, err := q.index.LabelValues(name) tpls, err := q.index.LabelValues(name)
if err != nil { if err != nil {
@ -241,6 +185,81 @@ func (q *blockQuerier) Close() error {
return nil return nil
} }
// postingsReader is used to select matching postings from an IndexReader.
type postingsReader struct {
index IndexReader
}
func newPostingsReader(i IndexReader) *postingsReader {
return &postingsReader{index: i}
}
func (r *postingsReader) Select(ms ...labels.Matcher) (Postings, []string) {
var (
its []Postings
absent []string
)
for _, m := range ms {
// If the matcher checks absence of a label, don't select them
// but propagate the check into the series set.
if _, ok := m.(*labels.EqualMatcher); ok && m.Matches("") {
absent = append(absent, m.Name())
continue
}
its = append(its, r.selectSingle(m))
}
p := Intersect(its...)
return p, absent
}
func (r *postingsReader) selectSingle(m labels.Matcher) Postings {
// Fast-path for equal matching.
if em, ok := m.(*labels.EqualMatcher); ok {
it, err := r.index.Postings(em.Name(), em.Value())
if err != nil {
return errPostings{err: err}
}
return it
}
// TODO(fabxc): use interface upgrading to provide fast solution
// for prefix matches. Tuples are lexicographically sorted.
tpls, err := r.index.LabelValues(m.Name())
if err != nil {
return errPostings{err: err}
}
var res []string
for i := 0; i < tpls.Len(); i++ {
vals, err := tpls.At(i)
if err != nil {
return errPostings{err: err}
}
if m.Matches(vals[0]) {
res = append(res, vals[0])
}
}
if len(res) == 0 {
return emptyPostings
}
var rit []Postings
for _, v := range res {
it, err := r.index.Postings(m.Name(), v)
if err != nil {
return errPostings{err: err}
}
rit = append(rit, it)
}
return Merge(rit...)
}
func mergeStrings(a, b []string) []string { func mergeStrings(a, b []string) []string {
maxl := len(a) maxl := len(a)
if len(b) > len(a) { if len(b) > len(a) {

View file

@ -49,9 +49,8 @@ const (
WALEntrySamples WALEntryType = 3 WALEntrySamples WALEntryType = 3
) )
// WAL is a write ahead log for series data. It can only be written to. // SegmentWAL is a write ahead log for series data.
// Use WALReader to read back from a write ahead log. type SegmentWAL struct {
type WAL struct {
mtx sync.Mutex mtx sync.Mutex
dirFile *os.File dirFile *os.File
@ -69,6 +68,28 @@ type WAL struct {
donec chan struct{} donec chan struct{}
} }
// WAL is a write ahead log that can log new series labels and samples.
// It must be completely read before new entries are logged.
type WAL interface {
Reader() WALReader
Log([]labels.Labels, []RefSample) error
Close() error
}
// WALReader reads entries from a WAL.
type WALReader interface {
At() ([]labels.Labels, []RefSample)
Next() bool
Err() error
}
// RefSample is a timestamp/value pair associated with a reference to a series.
type RefSample struct {
Ref uint64
T int64
V float64
}
const ( const (
walDirName = "wal" walDirName = "wal"
walSegmentSizeBytes = 256 * 1024 * 1024 // 256 MB walSegmentSizeBytes = 256 * 1024 * 1024 // 256 MB
@ -83,9 +104,9 @@ func init() {
castagnoliTable = crc32.MakeTable(crc32.Castagnoli) castagnoliTable = crc32.MakeTable(crc32.Castagnoli)
} }
// OpenWAL opens or creates a write ahead log in the given directory. // OpenSegmentWAL opens or creates a write ahead log in the given directory.
// The WAL must be read completely before new data is written. // The WAL must be read completely before new data is written.
func OpenWAL(dir string, logger log.Logger, flushInterval time.Duration) (*WAL, error) { func OpenSegmentWAL(dir string, logger log.Logger, flushInterval time.Duration) (*SegmentWAL, error) {
dir = filepath.Join(dir, walDirName) dir = filepath.Join(dir, walDirName)
if err := os.MkdirAll(dir, 0777); err != nil { if err := os.MkdirAll(dir, 0777); err != nil {
@ -99,7 +120,7 @@ func OpenWAL(dir string, logger log.Logger, flushInterval time.Duration) (*WAL,
logger = log.NewNopLogger() logger = log.NewNopLogger()
} }
w := &WAL{ w := &SegmentWAL{
dirFile: df, dirFile: df,
logger: logger, logger: logger,
flushInterval: flushInterval, flushInterval: flushInterval,
@ -119,12 +140,12 @@ func OpenWAL(dir string, logger log.Logger, flushInterval time.Duration) (*WAL,
// Reader returns a new reader over the the write ahead log data. // Reader returns a new reader over the the write ahead log data.
// It must be completely consumed before writing to the WAL. // It must be completely consumed before writing to the WAL.
func (w *WAL) Reader() *WALReader { func (w *SegmentWAL) Reader() WALReader {
return NewWALReader(w.logger, w) return newWALReader(w, w.logger)
} }
// Log writes a batch of new series labels and samples to the log. // Log writes a batch of new series labels and samples to the log.
func (w *WAL) Log(series []labels.Labels, samples []refdSample) error { func (w *SegmentWAL) Log(series []labels.Labels, samples []RefSample) error {
if err := w.encodeSeries(series); err != nil { if err := w.encodeSeries(series); err != nil {
return err return err
} }
@ -139,7 +160,7 @@ func (w *WAL) Log(series []labels.Labels, samples []refdSample) error {
// initSegments finds all existing segment files and opens them in the // initSegments finds all existing segment files and opens them in the
// appropriate file modes. // appropriate file modes.
func (w *WAL) initSegments() error { func (w *SegmentWAL) initSegments() error {
fns, err := sequenceFiles(w.dirFile.Name(), "") fns, err := sequenceFiles(w.dirFile.Name(), "")
if err != nil { if err != nil {
return err return err
@ -180,7 +201,7 @@ func (w *WAL) initSegments() error {
// cut finishes the currently active segments and opens the next one. // cut finishes the currently active segments and opens the next one.
// The encoder is reset to point to the new segment. // The encoder is reset to point to the new segment.
func (w *WAL) cut() error { func (w *SegmentWAL) cut() error {
// Sync current tail to disk and close. // Sync current tail to disk and close.
if tf := w.tail(); tf != nil { if tf := w.tail(); tf != nil {
if err := w.sync(); err != nil { if err := w.sync(); err != nil {
@ -229,7 +250,7 @@ func (w *WAL) cut() error {
return nil return nil
} }
func (w *WAL) tail() *os.File { func (w *SegmentWAL) tail() *os.File {
if len(w.files) == 0 { if len(w.files) == 0 {
return nil return nil
} }
@ -237,14 +258,14 @@ func (w *WAL) tail() *os.File {
} }
// Sync flushes the changes to disk. // Sync flushes the changes to disk.
func (w *WAL) Sync() error { func (w *SegmentWAL) Sync() error {
w.mtx.Lock() w.mtx.Lock()
defer w.mtx.Unlock() defer w.mtx.Unlock()
return w.sync() return w.sync()
} }
func (w *WAL) sync() error { func (w *SegmentWAL) sync() error {
if w.cur == nil { if w.cur == nil {
return nil return nil
} }
@ -254,7 +275,7 @@ func (w *WAL) sync() error {
return fileutil.Fdatasync(w.tail()) return fileutil.Fdatasync(w.tail())
} }
func (w *WAL) run(interval time.Duration) { func (w *SegmentWAL) run(interval time.Duration) {
var tick <-chan time.Time var tick <-chan time.Time
if interval > 0 { if interval > 0 {
@ -277,7 +298,7 @@ func (w *WAL) run(interval time.Duration) {
} }
// Close syncs all data and closes the underlying resources. // Close syncs all data and closes the underlying resources.
func (w *WAL) Close() error { func (w *SegmentWAL) Close() error {
close(w.stopc) close(w.stopc)
<-w.donec <-w.donec
@ -305,7 +326,7 @@ const (
walPageBytes = 16 * minSectorSize walPageBytes = 16 * minSectorSize
) )
func (w *WAL) entry(et WALEntryType, flag byte, buf []byte) error { func (w *SegmentWAL) entry(et WALEntryType, flag byte, buf []byte) error {
w.mtx.Lock() w.mtx.Lock()
defer w.mtx.Unlock() defer w.mtx.Unlock()
@ -369,7 +390,7 @@ func putWALBuffer(b []byte) {
walBuffers.Put(b) walBuffers.Put(b)
} }
func (w *WAL) encodeSeries(series []labels.Labels) error { func (w *SegmentWAL) encodeSeries(series []labels.Labels) error {
if len(series) == 0 { if len(series) == 0 {
return nil return nil
} }
@ -395,7 +416,7 @@ func (w *WAL) encodeSeries(series []labels.Labels) error {
return w.entry(WALEntrySeries, walSeriesSimple, buf) return w.entry(WALEntrySeries, walSeriesSimple, buf)
} }
func (w *WAL) encodeSamples(samples []refdSample) error { func (w *SegmentWAL) encodeSamples(samples []RefSample) error {
if len(samples) == 0 { if len(samples) == 0 {
return nil return nil
} }
@ -409,67 +430,65 @@ func (w *WAL) encodeSamples(samples []refdSample) error {
// TODO(fabxc): optimize for all samples having the same timestamp. // TODO(fabxc): optimize for all samples having the same timestamp.
first := samples[0] first := samples[0]
binary.BigEndian.PutUint64(b, first.ref) binary.BigEndian.PutUint64(b, first.Ref)
buf = append(buf, b[:8]...) buf = append(buf, b[:8]...)
binary.BigEndian.PutUint64(b, uint64(first.t)) binary.BigEndian.PutUint64(b, uint64(first.T))
buf = append(buf, b[:8]...) buf = append(buf, b[:8]...)
for _, s := range samples { for _, s := range samples {
n := binary.PutVarint(b, int64(s.ref)-int64(first.ref)) n := binary.PutVarint(b, int64(s.Ref)-int64(first.Ref))
buf = append(buf, b[:n]...) buf = append(buf, b[:n]...)
n = binary.PutVarint(b, s.t-first.t) n = binary.PutVarint(b, s.T-first.T)
buf = append(buf, b[:n]...) buf = append(buf, b[:n]...)
binary.BigEndian.PutUint64(b, math.Float64bits(s.v)) binary.BigEndian.PutUint64(b, math.Float64bits(s.V))
buf = append(buf, b[:8]...) buf = append(buf, b[:8]...)
} }
return w.entry(WALEntrySamples, walSamplesSimple, buf) return w.entry(WALEntrySamples, walSamplesSimple, buf)
} }
// WALReader decodes and emits write ahead log entries. // walReader decodes and emits write ahead log entries.
type WALReader struct { type walReader struct {
logger log.Logger logger log.Logger
wal *WAL wal *SegmentWAL
cur int cur int
buf []byte buf []byte
crc32 hash.Hash32 crc32 hash.Hash32
err error err error
labels []labels.Labels labels []labels.Labels
samples []refdSample samples []RefSample
} }
// NewWALReader returns a new WALReader over the sequence of the given ReadClosers. func newWALReader(w *SegmentWAL, l log.Logger) *walReader {
func NewWALReader(logger log.Logger, w *WAL) *WALReader { if l == nil {
if logger == nil { l = log.NewNopLogger()
logger = log.NewNopLogger()
} }
r := &WALReader{ return &walReader{
logger: logger, logger: l,
wal: w, wal: w,
buf: make([]byte, 0, 128*4096), buf: make([]byte, 0, 128*4096),
crc32: crc32.New(crc32.MakeTable(crc32.Castagnoli)), crc32: crc32.New(crc32.MakeTable(crc32.Castagnoli)),
} }
return r
} }
// At returns the last decoded entry of labels or samples. // At returns the last decoded entry of labels or samples.
// The returned slices are only valid until the next call to Next(). Their elements // The returned slices are only valid until the next call to Next(). Their elements
// have to be copied to preserve them. // have to be copied to preserve them.
func (r *WALReader) At() ([]labels.Labels, []refdSample) { func (r *walReader) At() ([]labels.Labels, []RefSample) {
return r.labels, r.samples return r.labels, r.samples
} }
// Err returns the last error the reader encountered. // Err returns the last error the reader encountered.
func (r *WALReader) Err() error { func (r *walReader) Err() error {
return r.err return r.err
} }
// nextEntry retrieves the next entry. It is also used as a testing hook. // nextEntry retrieves the next entry. It is also used as a testing hook.
func (r *WALReader) nextEntry() (WALEntryType, byte, []byte, error) { func (r *walReader) nextEntry() (WALEntryType, byte, []byte, error) {
if r.cur >= len(r.wal.files) { if r.cur >= len(r.wal.files) {
return 0, 0, nil, io.EOF return 0, 0, nil, io.EOF
} }
@ -492,7 +511,7 @@ func (r *WALReader) nextEntry() (WALEntryType, byte, []byte, error) {
// Next returns decodes the next entry pair and returns true // Next returns decodes the next entry pair and returns true
// if it was succesful. // if it was succesful.
func (r *WALReader) Next() bool { func (r *walReader) Next() bool {
r.labels = r.labels[:0] r.labels = r.labels[:0]
r.samples = r.samples[:0] r.samples = r.samples[:0]
@ -549,12 +568,12 @@ func (r *WALReader) Next() bool {
return r.err == nil return r.err == nil
} }
func (r *WALReader) current() *os.File { func (r *walReader) current() *os.File {
return r.wal.files[r.cur] return r.wal.files[r.cur]
} }
// truncate the WAL after the last valid entry. // truncate the WAL after the last valid entry.
func (r *WALReader) truncate(lastOffset int64) error { func (r *walReader) truncate(lastOffset int64) error {
r.logger.Log("msg", "WAL corruption detected; truncating", r.logger.Log("msg", "WAL corruption detected; truncating",
"err", r.err, "file", r.current().Name(), "pos", lastOffset) "err", r.err, "file", r.current().Name(), "pos", lastOffset)
@ -582,7 +601,7 @@ func walCorruptionErrf(s string, args ...interface{}) error {
return walCorruptionErr(errors.Errorf(s, args...)) return walCorruptionErr(errors.Errorf(s, args...))
} }
func (r *WALReader) entry(cr io.Reader) (WALEntryType, byte, []byte, error) { func (r *walReader) entry(cr io.Reader) (WALEntryType, byte, []byte, error) {
r.crc32.Reset() r.crc32.Reset()
tr := io.TeeReader(cr, r.crc32) tr := io.TeeReader(cr, r.crc32)
@ -629,7 +648,7 @@ func (r *WALReader) entry(cr io.Reader) (WALEntryType, byte, []byte, error) {
return etype, flag, buf, nil return etype, flag, buf, nil
} }
func (r *WALReader) decodeSeries(flag byte, b []byte) error { func (r *walReader) decodeSeries(flag byte, b []byte) error {
for len(b) > 0 { for len(b) > 0 {
l, n := binary.Uvarint(b) l, n := binary.Uvarint(b)
if n < 1 { if n < 1 {
@ -659,7 +678,7 @@ func (r *WALReader) decodeSeries(flag byte, b []byte) error {
return nil return nil
} }
func (r *WALReader) decodeSamples(flag byte, b []byte) error { func (r *walReader) decodeSamples(flag byte, b []byte) error {
if len(b) < 16 { if len(b) < 16 {
return errors.Wrap(errInvalidSize, "header length") return errors.Wrap(errInvalidSize, "header length")
} }
@ -670,7 +689,7 @@ func (r *WALReader) decodeSamples(flag byte, b []byte) error {
b = b[16:] b = b[16:]
for len(b) > 0 { for len(b) > 0 {
var smpl refdSample var smpl RefSample
dref, n := binary.Varint(b) dref, n := binary.Varint(b)
if n < 1 { if n < 1 {
@ -678,19 +697,19 @@ func (r *WALReader) decodeSamples(flag byte, b []byte) error {
} }
b = b[n:] b = b[n:]
smpl.ref = uint64(int64(baseRef) + dref) smpl.Ref = uint64(int64(baseRef) + dref)
dtime, n := binary.Varint(b) dtime, n := binary.Varint(b)
if n < 1 { if n < 1 {
return errors.Wrap(errInvalidSize, "sample timestamp delta") return errors.Wrap(errInvalidSize, "sample timestamp delta")
} }
b = b[n:] b = b[n:]
smpl.t = baseTime + dtime smpl.T = baseTime + dtime
if len(b) < 8 { if len(b) < 8 {
return errors.Wrapf(errInvalidSize, "sample value bits %d", len(b)) return errors.Wrapf(errInvalidSize, "sample value bits %d", len(b))
} }
smpl.v = float64(math.Float64frombits(binary.BigEndian.Uint64(b))) smpl.V = float64(math.Float64frombits(binary.BigEndian.Uint64(b)))
b = b[8:] b = b[8:]
r.samples = append(r.samples, smpl) r.samples = append(r.samples, smpl)

20
vendor/vendor.json vendored
View file

@ -59,10 +59,10 @@
"revisionTime": "2016-09-30T00:14:02Z" "revisionTime": "2016-09-30T00:14:02Z"
}, },
{ {
"checksumSHA1": "BdLdZP/C2uOO3lqk9X3NCKFpXa4=", "checksumSHA1": "ddYc7mKe3g1x1UUKBrGR4vArJs8=",
"path": "github.com/asaskevich/govalidator", "path": "github.com/asaskevich/govalidator",
"revision": "7b3beb6df3c42abd3509abfc3bcacc0fbfb7c877", "revision": "065ea97278837088c52c0cd0d963473f61b2d98c",
"revisionTime": "2016-10-01T16:31:30Z" "revisionTime": "2017-05-13T08:31:01Z"
}, },
{ {
"checksumSHA1": "WNfR3yhLjRC5/uccgju/bwrdsxQ=", "checksumSHA1": "WNfR3yhLjRC5/uccgju/bwrdsxQ=",
@ -661,22 +661,22 @@
"revisionTime": "2016-04-11T19:08:41Z" "revisionTime": "2016-04-11T19:08:41Z"
}, },
{ {
"checksumSHA1": "0wu/AzUWMurN/T5VBKCrvhf7c7E=", "checksumSHA1": "T+9Tl4utHkpYSdVFRpdfLloShTM=",
"path": "github.com/prometheus/tsdb", "path": "github.com/prometheus/tsdb",
"revision": "44769c1654f699931b2d3a2928326ac2d02d9149", "revision": "c8438cfc8113a39f75e398bf00c481d3cb1069f6",
"revisionTime": "2017-05-09T10:52:47Z" "revisionTime": "2017-05-14T09:51:56Z"
}, },
{ {
"checksumSHA1": "9EH3v+JdbikCUJAgD4VEOPIaWfs=", "checksumSHA1": "9EH3v+JdbikCUJAgD4VEOPIaWfs=",
"path": "github.com/prometheus/tsdb/chunks", "path": "github.com/prometheus/tsdb/chunks",
"revision": "44769c1654f699931b2d3a2928326ac2d02d9149", "revision": "c8438cfc8113a39f75e398bf00c481d3cb1069f6",
"revisionTime": "2017-05-09T10:52:47Z" "revisionTime": "2017-05-14T09:51:56Z"
}, },
{ {
"checksumSHA1": "3RHZcB/ZvIae9K0tJxNlajJg0jA=", "checksumSHA1": "3RHZcB/ZvIae9K0tJxNlajJg0jA=",
"path": "github.com/prometheus/tsdb/labels", "path": "github.com/prometheus/tsdb/labels",
"revision": "44769c1654f699931b2d3a2928326ac2d02d9149", "revision": "c8438cfc8113a39f75e398bf00c481d3cb1069f6",
"revisionTime": "2017-05-09T10:52:47Z" "revisionTime": "2017-05-14T09:51:56Z"
}, },
{ {
"checksumSHA1": "+49Vr4Me28p3cR+gxX5SUQHbbas=", "checksumSHA1": "+49Vr4Me28p3cR+gxX5SUQHbbas=",

View file

@ -327,7 +327,7 @@ func (h *Handler) consoles(w http.ResponseWriter, r *http.Request) {
Path: strings.TrimLeft(name, "/"), Path: strings.TrimLeft(name, "/"),
} }
tmpl := template.NewTemplateExpander(h.context, string(text), "__console_"+name, data, h.now(), h.queryEngine, h.options.ExternalURL.Path) tmpl := template.NewTemplateExpander(h.context, string(text), "__console_"+name, data, h.now(), h.queryEngine, h.options.ExternalURL)
filenames, err := filepath.Glob(h.options.ConsoleLibrariesPath + "/*.lib") filenames, err := filepath.Glob(h.options.ConsoleLibrariesPath + "/*.lib")
if err != nil { if err != nil {
http.Error(w, err.Error(), http.StatusInternalServerError) http.Error(w, err.Error(), http.StatusInternalServerError)
@ -522,7 +522,7 @@ func (h *Handler) executeTemplate(w http.ResponseWriter, name string, data inter
http.Error(w, err.Error(), http.StatusInternalServerError) http.Error(w, err.Error(), http.StatusInternalServerError)
} }
tmpl := template.NewTemplateExpander(h.context, text, name, data, h.now(), h.queryEngine, h.options.ExternalURL.Path) tmpl := template.NewTemplateExpander(h.context, text, name, data, h.now(), h.queryEngine, h.options.ExternalURL)
tmpl.Funcs(tmplFuncs(h.consolesPath(), h.options)) tmpl.Funcs(tmplFuncs(h.consolesPath(), h.options))
result, err := tmpl.ExpandHTML(nil) result, err := tmpl.ExpandHTML(nil)