Merge pull request #480 from grafana/sync-upstream-14-apr-2023

Sync with Prometheus up to 2023-04-14 (7309ac27)
This commit is contained in:
George Krajcsovits 2023-04-18 10:21:17 +02:00 committed by GitHub
commit 21131bf6ad
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
67 changed files with 2843 additions and 1657 deletions

View file

@ -89,7 +89,6 @@ func main() {
app.HelpFlag.Short('h') app.HelpFlag.Short('h')
checkCmd := app.Command("check", "Check the resources for validity.") checkCmd := app.Command("check", "Check the resources for validity.")
checkCmd.Flag("http.config.file", "HTTP client configuration file for promtool to connect to Prometheus.").PlaceHolder("<filename>").ExistingFileVar(&httpConfigFilePath)
sdCheckCmd := checkCmd.Command("service-discovery", "Perform service discovery for the given job name and report the results, including relabeling.") sdCheckCmd := checkCmd.Command("service-discovery", "Perform service discovery for the given job name and report the results, including relabeling.")
sdConfigFile := sdCheckCmd.Arg("config-file", "The prometheus config file.").Required().ExistingFile() sdConfigFile := sdCheckCmd.Arg("config-file", "The prometheus config file.").Required().ExistingFile()
@ -117,16 +116,12 @@ func main() {
).Required().ExistingFiles() ).Required().ExistingFiles()
checkServerHealthCmd := checkCmd.Command("healthy", "Check if the Prometheus server is healthy.") checkServerHealthCmd := checkCmd.Command("healthy", "Check if the Prometheus server is healthy.")
serverHealthURLArg := checkServerHealthCmd.Arg( checkServerHealthCmd.Flag("http.config.file", "HTTP client configuration file for promtool to connect to Prometheus.").PlaceHolder("<filename>").ExistingFileVar(&httpConfigFilePath)
"server", checkServerHealthCmd.Flag("url", "The URL for the Prometheus server.").Default("http://localhost:9090").URLVar(&serverURL)
"The URL of the Prometheus server to check (e.g. http://localhost:9090)",
).URL()
checkServerReadyCmd := checkCmd.Command("ready", "Check if the Prometheus server is ready.") checkServerReadyCmd := checkCmd.Command("ready", "Check if the Prometheus server is ready.")
serverReadyURLArg := checkServerReadyCmd.Arg( checkServerReadyCmd.Flag("http.config.file", "HTTP client configuration file for promtool to connect to Prometheus.").PlaceHolder("<filename>").ExistingFileVar(&httpConfigFilePath)
"server", checkServerReadyCmd.Flag("url", "The URL for the Prometheus server.").Default("http://localhost:9090").URLVar(&serverURL)
"The URL of the Prometheus server to check (e.g. http://localhost:9090)",
).URL()
checkRulesCmd := checkCmd.Command("rules", "Check if the rule files are valid or not.") checkRulesCmd := checkCmd.Command("rules", "Check if the rule files are valid or not.")
ruleFiles := checkRulesCmd.Arg( ruleFiles := checkRulesCmd.Arg(
@ -292,10 +287,10 @@ func main() {
os.Exit(CheckConfig(*agentMode, *checkConfigSyntaxOnly, newLintConfig(*checkConfigLint, *checkConfigLintFatal), *configFiles...)) os.Exit(CheckConfig(*agentMode, *checkConfigSyntaxOnly, newLintConfig(*checkConfigLint, *checkConfigLintFatal), *configFiles...))
case checkServerHealthCmd.FullCommand(): case checkServerHealthCmd.FullCommand():
os.Exit(checkErr(CheckServerStatus(*serverHealthURLArg, checkHealth, httpRoundTripper))) os.Exit(checkErr(CheckServerStatus(serverURL, checkHealth, httpRoundTripper)))
case checkServerReadyCmd.FullCommand(): case checkServerReadyCmd.FullCommand():
os.Exit(checkErr(CheckServerStatus(*serverReadyURLArg, checkReadiness, httpRoundTripper))) os.Exit(checkErr(CheckServerStatus(serverURL, checkReadiness, httpRoundTripper)))
case checkWebConfigCmd.FullCommand(): case checkWebConfigCmd.FullCommand():
os.Exit(CheckWebConfig(*webConfigFiles...)) os.Exit(CheckWebConfig(*webConfigFiles...))
@ -390,12 +385,10 @@ func (ls lintConfig) lintDuplicateRules() bool {
return ls.all || ls.duplicateRules return ls.all || ls.duplicateRules
} }
const promDefaultURL = "http://localhost:9090"
// Check server status - healthy & ready. // Check server status - healthy & ready.
func CheckServerStatus(serverURL *url.URL, checkEndpoint string, roundTripper http.RoundTripper) error { func CheckServerStatus(serverURL *url.URL, checkEndpoint string, roundTripper http.RoundTripper) error {
if serverURL == nil { if serverURL.Scheme == "" {
serverURL, _ = url.Parse(promDefaultURL) serverURL.Scheme = "http"
} }
config := api.Config{ config := api.Config{

View file

@ -163,7 +163,7 @@ func (importer *ruleImporter) importRule(ctx context.Context, ruleExpr, ruleName
}) })
lb.Set(labels.MetricName, ruleName) lb.Set(labels.MetricName, ruleName)
lbls := lb.Labels(labels.EmptyLabels()) lbls := lb.Labels()
for _, value := range sample.Values { for _, value := range sample.Values {
if err := app.add(ctx, lbls, timestamp.FromTime(value.Timestamp.Time()), float64(value.Value)); err != nil { if err := app.add(ctx, lbls, timestamp.FromTime(value.Timestamp.Time()), float64(value.Value)); err != nil {

View file

@ -347,7 +347,7 @@ Outer:
for _, s := range got { for _, s := range got {
gotSamples = append(gotSamples, parsedSample{ gotSamples = append(gotSamples, parsedSample{
Labels: s.Metric.Copy(), Labels: s.Metric.Copy(),
Value: s.V, Value: s.F,
}) })
} }
@ -447,7 +447,8 @@ func query(ctx context.Context, qs string, t time.Time, engine *promql.Engine, q
return v, nil return v, nil
case promql.Scalar: case promql.Scalar:
return promql.Vector{promql.Sample{ return promql.Vector{promql.Sample{
Point: promql.Point{T: v.T, V: v.V}, T: v.T,
F: v.V,
Metric: labels.Labels{}, Metric: labels.Labels{},
}}, nil }}, nil
default: default:

View file

@ -173,16 +173,16 @@ var (
// DefaultQueueConfig is the default remote queue configuration. // DefaultQueueConfig is the default remote queue configuration.
DefaultQueueConfig = QueueConfig{ DefaultQueueConfig = QueueConfig{
// With a maximum of 200 shards, assuming an average of 100ms remote write // With a maximum of 50 shards, assuming an average of 100ms remote write
// time and 500 samples per batch, we will be able to push 1M samples/s. // time and 2000 samples per batch, we will be able to push 1M samples/s.
MaxShards: 200, MaxShards: 50,
MinShards: 1, MinShards: 1,
MaxSamplesPerSend: 500, MaxSamplesPerSend: 2000,
// Each shard will have a max of 2500 samples pending in its channel, plus the pending // Each shard will have a max of 10,000 samples pending in its channel, plus the pending
// samples that have been enqueued. Theoretically we should only ever have about 3000 samples // samples that have been enqueued. Theoretically we should only ever have about 12,000 samples
// per shard pending. At 200 shards that's 600k. // per shard pending. At 50 shards that's 600k.
Capacity: 2500, Capacity: 10000,
BatchSendDeadline: model.Duration(5 * time.Second), BatchSendDeadline: model.Duration(5 * time.Second),
// Backoff times for retrying a batch of samples on recoverable errors. // Backoff times for retrying a batch of samples on recoverable errors.
@ -194,7 +194,7 @@ var (
DefaultMetadataConfig = MetadataConfig{ DefaultMetadataConfig = MetadataConfig{
Send: true, Send: true,
SendInterval: model.Duration(1 * time.Minute), SendInterval: model.Duration(1 * time.Minute),
MaxSamplesPerSend: 500, MaxSamplesPerSend: 2000,
} }
// DefaultRemoteReadConfig is the default remote read configuration. // DefaultRemoteReadConfig is the default remote read configuration.

View file

@ -58,7 +58,6 @@ Check the resources for validity.
| Flag | Description | | Flag | Description |
| --- | --- | | --- | --- |
| <code class="text-nowrap">--http.config.file</code> | HTTP client configuration file for promtool to connect to Prometheus. |
| <code class="text-nowrap">--extended</code> | Print extended information related to the cardinality of the metrics. | | <code class="text-nowrap">--extended</code> | Print extended information related to the cardinality of the metrics. |
@ -137,11 +136,12 @@ Check if the Prometheus server is healthy.
###### Arguments ###### Flags
| Argument | Description | | Flag | Description | Default |
| --- | --- | | --- | --- | --- |
| server | The URL of the Prometheus server to check (e.g. http://localhost:9090) | | <code class="text-nowrap">--http.config.file</code> | HTTP client configuration file for promtool to connect to Prometheus. | |
| <code class="text-nowrap">--url</code> | The URL for the Prometheus server. | `http://localhost:9090` |
@ -152,11 +152,12 @@ Check if the Prometheus server is ready.
###### Arguments ###### Flags
| Argument | Description | | Flag | Description | Default |
| --- | --- | | --- | --- | --- |
| server | The URL of the Prometheus server to check (e.g. http://localhost:9090) | | <code class="text-nowrap">--http.config.file</code> | HTTP client configuration file for promtool to connect to Prometheus. | |
| <code class="text-nowrap">--url</code> | The URL for the Prometheus server. | `http://localhost:9090` |

View file

@ -17,9 +17,7 @@ _Notes about the experimental native histograms:_
flag](../feature_flags/#native-histograms). As long as no native histograms flag](../feature_flags/#native-histograms). As long as no native histograms
have been ingested into the TSDB, all functions will behave as usual. have been ingested into the TSDB, all functions will behave as usual.
* Functions that do not explicitly mention native histograms in their * Functions that do not explicitly mention native histograms in their
documentation (see below) effectively treat a native histogram as a float documentation (see below) will ignore histogram samples.
sample of value 0. (This is confusing and will change before native
histograms become a stable feature.)
* Functions that do already act on native histograms might still change their * Functions that do already act on native histograms might still change their
behavior in the future. behavior in the future.
* If a function requires the same bucket layout between multiple native * If a function requires the same bucket layout between multiple native
@ -404,6 +402,8 @@ For each timeseries in `v`, `label_join(v instant-vector, dst_label string, sepa
using `separator` and returns the timeseries with the label `dst_label` containing the joined value. using `separator` and returns the timeseries with the label `dst_label` containing the joined value.
There can be any number of `src_labels` in this function. There can be any number of `src_labels` in this function.
`label_join` acts on float and histogram samples in the same way.
This example will return a vector with each time series having a `foo` label with the value `a,b,c` added to it: This example will return a vector with each time series having a `foo` label with the value `a,b,c` added to it:
``` ```
@ -419,6 +419,8 @@ of `replacement`, together with the original labels in the input. Capturing grou
regular expression can be referenced with `$1`, `$2`, etc. If the regular expression doesn't regular expression can be referenced with `$1`, `$2`, etc. If the regular expression doesn't
match then the timeseries is returned unchanged. match then the timeseries is returned unchanged.
`label_replace` acts on float and histogram samples in the same way.
This example will return timeseries with the values `a:c` at label `service` and `a` at label `foo`: This example will return timeseries with the values `a:c` at label `service` and `a` at label `foo`:
``` ```
@ -501,10 +503,21 @@ counter resets when your target restarts.
For each input time series, `resets(v range-vector)` returns the number of For each input time series, `resets(v range-vector)` returns the number of
counter resets within the provided time range as an instant vector. Any counter resets within the provided time range as an instant vector. Any
decrease in the value between two consecutive samples is interpreted as a decrease in the value between two consecutive float samples is interpreted as a
counter reset. counter reset. A reset in a native histogram is detected in a more complex way:
Any decrease in any bucket, including the zero bucket, or in the count of
observation constitutes a counter reset, but also the disappearance of any
previously populated bucket, an increase in bucket resolution, or a decrease of
the zero-bucket width.
`resets` should only be used with counters. `resets` should only be used with counters and counter-like native
histograms.
If the range vector contains a mix of float and histogram samples for the same
series, counter resets are detected separately and their numbers added up. The
change from a float to a histogram sample is _not_ considered a counter
reset. Each float sample is compared to the next float sample, and each
histogram is comprared to the next histogram.
## `round()` ## `round()`
@ -526,7 +539,7 @@ have exactly one element, `scalar` will return `NaN`.
## `sort()` ## `sort()`
`sort(v instant-vector)` returns vector elements sorted by their sample values, `sort(v instant-vector)` returns vector elements sorted by their sample values,
in ascending order. in ascending order. Native histograms are sorted by their sum of observations.
## `sort_desc()` ## `sort_desc()`
@ -545,7 +558,8 @@ expression is to be evaluated.
## `timestamp()` ## `timestamp()`
`timestamp(v instant-vector)` returns the timestamp of each of the samples of `timestamp(v instant-vector)` returns the timestamp of each of the samples of
the given vector as the number of seconds since January 1, 1970 UTC. the given vector as the number of seconds since January 1, 1970 UTC. It also
works with histogram samples.
## `vector()` ## `vector()`
@ -569,12 +583,15 @@ over time and return an instant vector with per-series aggregation results:
* `quantile_over_time(scalar, range-vector)`: the φ-quantile (0 ≤ φ ≤ 1) of the values in the specified interval. * `quantile_over_time(scalar, range-vector)`: the φ-quantile (0 ≤ φ ≤ 1) of the values in the specified interval.
* `stddev_over_time(range-vector)`: the population standard deviation of the values in the specified interval. * `stddev_over_time(range-vector)`: the population standard deviation of the values in the specified interval.
* `stdvar_over_time(range-vector)`: the population standard variance of the values in the specified interval. * `stdvar_over_time(range-vector)`: the population standard variance of the values in the specified interval.
* `last_over_time(range-vector)`: the most recent point value in specified interval. * `last_over_time(range-vector)`: the most recent point value in the specified interval.
* `present_over_time(range-vector)`: the value 1 for any series in the specified interval. * `present_over_time(range-vector)`: the value 1 for any series in the specified interval.
Note that all values in the specified interval have the same weight in the Note that all values in the specified interval have the same weight in the
aggregation even if the values are not equally spaced throughout the interval. aggregation even if the values are not equally spaced throughout the interval.
`count_over_time`, `last_over_time`, and `present_over_time` handle native
histograms as expected. All other functions ignore histogram samples.
## Trigonometric Functions ## Trigonometric Functions
The trigonometric functions work in radians: The trigonometric functions work in radians:

2
go.mod
View file

@ -5,7 +5,7 @@ go 1.19
require ( require (
github.com/Azure/azure-sdk-for-go v65.0.0+incompatible github.com/Azure/azure-sdk-for-go v65.0.0+incompatible
github.com/Azure/go-autorest/autorest v0.11.28 github.com/Azure/go-autorest/autorest v0.11.28
github.com/Azure/go-autorest/autorest/adal v0.9.22 github.com/Azure/go-autorest/autorest/adal v0.9.23
github.com/DmitriyVTitov/size v1.5.0 github.com/DmitriyVTitov/size v1.5.0
github.com/alecthomas/kingpin/v2 v2.3.2 github.com/alecthomas/kingpin/v2 v2.3.2
github.com/alecthomas/units v0.0.0-20211218093645-b94a6e3cc137 github.com/alecthomas/units v0.0.0-20211218093645-b94a6e3cc137

9
go.sum
View file

@ -45,8 +45,8 @@ github.com/Azure/go-autorest v14.2.0+incompatible/go.mod h1:r+4oMnoxhatjLLJ6zxSW
github.com/Azure/go-autorest/autorest v0.11.28 h1:ndAExarwr5Y+GaHE6VCaY1kyS/HwwGGyuimVhWsHOEM= github.com/Azure/go-autorest/autorest v0.11.28 h1:ndAExarwr5Y+GaHE6VCaY1kyS/HwwGGyuimVhWsHOEM=
github.com/Azure/go-autorest/autorest v0.11.28/go.mod h1:MrkzG3Y3AH668QyF9KRk5neJnGgmhQ6krbhR8Q5eMvA= github.com/Azure/go-autorest/autorest v0.11.28/go.mod h1:MrkzG3Y3AH668QyF9KRk5neJnGgmhQ6krbhR8Q5eMvA=
github.com/Azure/go-autorest/autorest/adal v0.9.18/go.mod h1:XVVeme+LZwABT8K5Lc3hA4nAe8LDBVle26gTrguhhPQ= github.com/Azure/go-autorest/autorest/adal v0.9.18/go.mod h1:XVVeme+LZwABT8K5Lc3hA4nAe8LDBVle26gTrguhhPQ=
github.com/Azure/go-autorest/autorest/adal v0.9.22 h1:/GblQdIudfEM3AWWZ0mrYJQSd7JS4S/Mbzh6F0ov0Xc= github.com/Azure/go-autorest/autorest/adal v0.9.23 h1:Yepx8CvFxwNKpH6ja7RZ+sKX+DWYNldbLiALMC3BTz8=
github.com/Azure/go-autorest/autorest/adal v0.9.22/go.mod h1:XuAbAEUv2Tta//+voMI038TrJBqjKam0me7qR+L8Cmk= github.com/Azure/go-autorest/autorest/adal v0.9.23/go.mod h1:5pcMqFkdPhviJdlEy3kC/v1ZLnQl0MH6XA5YCcMhy4c=
github.com/Azure/go-autorest/autorest/date v0.3.0 h1:7gUk1U5M/CQbp9WoqinNzJar+8KY+LPI6wiWrP/myHw= github.com/Azure/go-autorest/autorest/date v0.3.0 h1:7gUk1U5M/CQbp9WoqinNzJar+8KY+LPI6wiWrP/myHw=
github.com/Azure/go-autorest/autorest/date v0.3.0/go.mod h1:BI0uouVdmngYNUzGWeSYnokU+TrmwEsOqdt8Y6sso74= github.com/Azure/go-autorest/autorest/date v0.3.0/go.mod h1:BI0uouVdmngYNUzGWeSYnokU+TrmwEsOqdt8Y6sso74=
github.com/Azure/go-autorest/autorest/mocks v0.4.1/go.mod h1:LTp+uSrOhSkaKrUy935gNZuuIPPVsHlr9DSOxSayd+k= github.com/Azure/go-autorest/autorest/mocks v0.4.1/go.mod h1:LTp+uSrOhSkaKrUy935gNZuuIPPVsHlr9DSOxSayd+k=
@ -834,6 +834,7 @@ golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5y
golang.org/x/crypto v0.0.0-20220622213112-05595931fe9d/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4= golang.org/x/crypto v0.0.0-20220622213112-05595931fe9d/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4=
golang.org/x/crypto v0.0.0-20220722155217-630584e8d5aa/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4= golang.org/x/crypto v0.0.0-20220722155217-630584e8d5aa/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4=
golang.org/x/crypto v0.0.0-20220829220503-c86fa9a7ed90/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4= golang.org/x/crypto v0.0.0-20220829220503-c86fa9a7ed90/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4=
golang.org/x/crypto v0.6.0/go.mod h1:OFC/31mSvZgRz0V1QTNCzfAI1aIRzbiufJtkMIlEp58=
golang.org/x/crypto v0.7.0 h1:AvwMYaRytfdeVt3u6mLaxYtErKYjxA2OXjJ1HHq6t3A= golang.org/x/crypto v0.7.0 h1:AvwMYaRytfdeVt3u6mLaxYtErKYjxA2OXjJ1HHq6t3A=
golang.org/x/crypto v0.7.0/go.mod h1:pYwdfH91IfpZVANVyUOhSIPZaFoJGxTFbZhFTx+dXZU= golang.org/x/crypto v0.7.0/go.mod h1:pYwdfH91IfpZVANVyUOhSIPZaFoJGxTFbZhFTx+dXZU=
golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA=
@ -917,6 +918,7 @@ golang.org/x/net v0.0.0-20211029224645-99673261e6eb/go.mod h1:9nx3DQGgdP8bBQD5qx
golang.org/x/net v0.0.0-20211112202133-69e39bad7dc2/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= golang.org/x/net v0.0.0-20211112202133-69e39bad7dc2/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y=
golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c= golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c=
golang.org/x/net v0.1.0/go.mod h1:Cx3nUiGt4eDBEyega/BKRp+/AlGL8hYe7U9odMt2Cco= golang.org/x/net v0.1.0/go.mod h1:Cx3nUiGt4eDBEyega/BKRp+/AlGL8hYe7U9odMt2Cco=
golang.org/x/net v0.6.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs=
golang.org/x/net v0.8.0 h1:Zrh2ngAOFYneWTAIAPethzeaQLuHwhuBkuV6ZiRnUaQ= golang.org/x/net v0.8.0 h1:Zrh2ngAOFYneWTAIAPethzeaQLuHwhuBkuV6ZiRnUaQ=
golang.org/x/net v0.8.0/go.mod h1:QVkue5JL9kW//ek3r6jTKnTFis1tRmNAW2P1shuFdJc= golang.org/x/net v0.8.0/go.mod h1:QVkue5JL9kW//ek3r6jTKnTFis1tRmNAW2P1shuFdJc=
golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U=
@ -1009,11 +1011,13 @@ golang.org/x/sys v0.0.0-20220811171246-fbc7d0a398ab/go.mod h1:oPkhp1MJrh7nUepCBc
golang.org/x/sys v0.0.0-20220908164124-27713097b956/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220908164124-27713097b956/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20221010170243-090e33056c14/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20221010170243-090e33056c14/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.1.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.1.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.6.0 h1:MVltZSvRTcU2ljQOhs94SXPftV6DCNnZViHeQps87pQ= golang.org/x/sys v0.6.0 h1:MVltZSvRTcU2ljQOhs94SXPftV6DCNnZViHeQps87pQ=
golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8=
golang.org/x/term v0.1.0/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= golang.org/x/term v0.1.0/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8=
golang.org/x/term v0.5.0/go.mod h1:jMB1sMXY+tzblOD4FWmEbocvup2/aLOaQEp7JmGp78k=
golang.org/x/term v0.6.0 h1:clScbb1cHjoCkyRbWwBEUZ5H/tIFu5TAXIqaZD0Gcjw= golang.org/x/term v0.6.0 h1:clScbb1cHjoCkyRbWwBEUZ5H/tIFu5TAXIqaZD0Gcjw=
golang.org/x/term v0.6.0/go.mod h1:m6U89DPEgQRMq3DNkDClhWw02AUbt2daBVO4cn4Hv9U= golang.org/x/term v0.6.0/go.mod h1:m6U89DPEgQRMq3DNkDClhWw02AUbt2daBVO4cn4Hv9U=
golang.org/x/text v0.0.0-20170915032832-14c0d48ead0c/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.0.0-20170915032832-14c0d48ead0c/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
@ -1025,6 +1029,7 @@ golang.org/x/text v0.3.5/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ= golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ=
golang.org/x/text v0.4.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8= golang.org/x/text v0.4.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8=
golang.org/x/text v0.7.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8=
golang.org/x/text v0.8.0 h1:57P1ETyNKtuIjB4SRd15iJxuhj8Gc416Y78H3qgMh68= golang.org/x/text v0.8.0 h1:57P1ETyNKtuIjB4SRd15iJxuhj8Gc416Y78H3qgMh68=
golang.org/x/text v0.8.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8= golang.org/x/text v0.8.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8=
golang.org/x/time v0.0.0-20180412165947-fbb02b2291d2/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/time v0.0.0-20180412165947-fbb02b2291d2/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=

View file

@ -570,24 +570,18 @@ func contains(s []Label, n string) bool {
return false return false
} }
// Labels returns the labels from the builder, adding them to res if non-nil. // Labels returns the labels from the builder.
// Argument res can be the same as b.base, if caller wants to overwrite that slice.
// If no modifications were made, the original labels are returned. // If no modifications were made, the original labels are returned.
func (b *Builder) Labels(res Labels) Labels { func (b *Builder) Labels() Labels {
if len(b.del) == 0 && len(b.add) == 0 { if len(b.del) == 0 && len(b.add) == 0 {
return b.base return b.base
} }
if res == nil { expectedSize := len(b.base) + len(b.add) - len(b.del)
// In the general case, labels are removed, modified or moved if expectedSize < 1 {
// rather than added. expectedSize = 1
res = make(Labels, 0, len(b.base))
} else {
res = res[:0]
} }
// Justification that res can be the same slice as base: in this loop res := make(Labels, 0, expectedSize)
// we move forward through base, and either skip an element or assign
// it to res at its current position or an earlier position.
for _, l := range b.base { for _, l := range b.base {
if slices.Contains(b.del, l.Name) || contains(b.add, l.Name) { if slices.Contains(b.del, l.Name) || contains(b.add, l.Name) {
continue continue
@ -637,3 +631,9 @@ func (b *ScratchBuilder) Labels() Labels {
// Copy the slice, so the next use of ScratchBuilder doesn't overwrite. // Copy the slice, so the next use of ScratchBuilder doesn't overwrite.
return append([]Label{}, b.add...) return append([]Label{}, b.add...)
} }
// Write the newly-built Labels out to ls.
// Callers must ensure that there are no other references to ls, or any strings fetched from it.
func (b *ScratchBuilder) Overwrite(ls *Labels) {
*ls = append((*ls)[:0], b.add...)
}

View file

@ -158,7 +158,7 @@ func (ls Labels) MatchLabels(on bool, names ...string) Labels {
b.Del(MetricName) b.Del(MetricName)
b.Del(names...) b.Del(names...)
} }
return b.Labels(EmptyLabels()) return b.Labels()
} }
// Hash returns a hash value for the label set. // Hash returns a hash value for the label set.
@ -624,10 +624,9 @@ func contains(s []Label, n string) bool {
return false return false
} }
// Labels returns the labels from the builder, adding them to res if non-nil. // Labels returns the labels from the builder.
// Argument res can be the same as b.base, if caller wants to overwrite that slice.
// If no modifications were made, the original labels are returned. // If no modifications were made, the original labels are returned.
func (b *Builder) Labels(res Labels) Labels { func (b *Builder) Labels() Labels {
if len(b.del) == 0 && len(b.add) == 0 { if len(b.del) == 0 && len(b.add) == 0 {
return b.base return b.base
} }
@ -637,7 +636,7 @@ func (b *Builder) Labels(res Labels) Labels {
a, d := 0, 0 a, d := 0, 0
bufSize := len(b.base.data) + labelsSize(b.add) bufSize := len(b.base.data) + labelsSize(b.add)
buf := make([]byte, 0, bufSize) // TODO: see if we can re-use the buffer from res. buf := make([]byte, 0, bufSize)
for pos := 0; pos < len(b.base.data); { for pos := 0; pos < len(b.base.data); {
oldPos := pos oldPos := pos
var lName string var lName string
@ -812,7 +811,7 @@ func (b *ScratchBuilder) Labels() Labels {
} }
// Write the newly-built Labels out to ls, reusing an internal buffer. // Write the newly-built Labels out to ls, reusing an internal buffer.
// Callers must ensure that there are no other references to ls. // Callers must ensure that there are no other references to ls, or any strings fetched from it.
func (b *ScratchBuilder) Overwrite(ls *Labels) { func (b *ScratchBuilder) Overwrite(ls *Labels) {
size := labelsSize(b.add) size := labelsSize(b.add)
if size <= cap(b.overwriteBuffer) { if size <= cap(b.overwriteBuffer) {

View file

@ -596,7 +596,7 @@ func TestBuilder(t *testing.T) {
b.Keep(tcase.keep...) b.Keep(tcase.keep...)
} }
b.Del(tcase.del...) b.Del(tcase.del...)
require.Equal(t, tcase.want, b.Labels(EmptyLabels())) require.Equal(t, tcase.want, b.Labels())
// Check what happens when we call Range and mutate the builder. // Check what happens when we call Range and mutate the builder.
b.Range(func(l Label) { b.Range(func(l Label) {
@ -604,7 +604,7 @@ func TestBuilder(t *testing.T) {
b.Del(l.Name) b.Del(l.Name)
} }
}) })
require.Equal(t, tcase.want.BytesWithoutLabels(nil, "aaa", "bbb"), b.Labels(tcase.base).Bytes(nil)) require.Equal(t, tcase.want.BytesWithoutLabels(nil, "aaa", "bbb"), b.Labels().Bytes(nil))
}) })
} }
} }
@ -669,7 +669,7 @@ func BenchmarkLabels_Hash(b *testing.B) {
// Label ~20B name, 50B value. // Label ~20B name, 50B value.
b.Set(fmt.Sprintf("abcdefghijabcdefghijabcdefghij%d", i), fmt.Sprintf("abcdefghijabcdefghijabcdefghijabcdefghijabcdefghij%d", i)) b.Set(fmt.Sprintf("abcdefghijabcdefghijabcdefghij%d", i), fmt.Sprintf("abcdefghijabcdefghijabcdefghijabcdefghijabcdefghij%d", i))
} }
return b.Labels(EmptyLabels()) return b.Labels()
}(), }(),
}, },
{ {
@ -680,7 +680,7 @@ func BenchmarkLabels_Hash(b *testing.B) {
// Label ~50B name, 50B value. // Label ~50B name, 50B value.
b.Set(fmt.Sprintf("abcdefghijabcdefghijabcdefghijabcdefghijabcdefghij%d", i), fmt.Sprintf("abcdefghijabcdefghijabcdefghijabcdefghijabcdefghij%d", i)) b.Set(fmt.Sprintf("abcdefghijabcdefghijabcdefghijabcdefghijabcdefghij%d", i), fmt.Sprintf("abcdefghijabcdefghijabcdefghijabcdefghijabcdefghij%d", i))
} }
return b.Labels(EmptyLabels()) return b.Labels()
}(), }(),
}, },
{ {
@ -729,7 +729,7 @@ func BenchmarkBuilder(b *testing.B) {
for _, l := range m { for _, l := range m {
builder.Set(l.Name, l.Value) builder.Set(l.Name, l.Value)
} }
l = builder.Labels(EmptyLabels()) l = builder.Labels()
} }
require.Equal(b, 9, l.Len()) require.Equal(b, 9, l.Len())
} }

View file

@ -211,7 +211,7 @@ func Process(lbls labels.Labels, cfgs ...*Config) (ret labels.Labels, keep bool)
if !ProcessBuilder(lb, cfgs...) { if !ProcessBuilder(lb, cfgs...) {
return labels.EmptyLabels(), false return labels.EmptyLabels(), false
} }
return lb.Labels(lbls), true return lb.Labels(), true
} }
// ProcessBuilder is like Process, but the caller passes a labels.Builder // ProcessBuilder is like Process, but the caller passes a labels.Builder

View file

@ -359,7 +359,7 @@ func (n *Manager) Send(alerts ...*Alert) {
} }
}) })
a.Labels = lb.Labels(a.Labels) a.Labels = lb.Labels()
} }
alerts = n.relabelAlerts(alerts) alerts = n.relabelAlerts(alerts)

View file

@ -189,7 +189,8 @@ func (q *query) Cancel() {
// Close implements the Query interface. // Close implements the Query interface.
func (q *query) Close() { func (q *query) Close() {
for _, s := range q.matrix { for _, s := range q.matrix {
putPointSlice(s.Points) putFPointSlice(s.Floats)
putHPointSlice(s.Histograms)
} }
} }
@ -680,11 +681,15 @@ func (ng *Engine) execEvalStmt(ctx context.Context, query *query, s *parser.Eval
for i, s := range mat { for i, s := range mat {
// Point might have a different timestamp, force it to the evaluation // Point might have a different timestamp, force it to the evaluation
// timestamp as that is when we ran the evaluation. // timestamp as that is when we ran the evaluation.
vector[i] = Sample{Metric: s.Metric, Point: Point{V: s.Points[0].V, H: s.Points[0].H, T: start}} if len(s.Histograms) > 0 {
vector[i] = Sample{Metric: s.Metric, H: s.Histograms[0].H, T: start}
} else {
vector[i] = Sample{Metric: s.Metric, F: s.Floats[0].F, T: start}
}
} }
return vector, warnings, nil return vector, warnings, nil
case parser.ValueTypeScalar: case parser.ValueTypeScalar:
return Scalar{V: mat[0].Points[0].V, T: start}, warnings, nil return Scalar{V: mat[0].Floats[0].F, T: start}, warnings, nil
case parser.ValueTypeMatrix: case parser.ValueTypeMatrix:
return mat, warnings, nil return mat, warnings, nil
default: default:
@ -940,9 +945,10 @@ type errWithWarnings struct {
func (e errWithWarnings) Error() string { return e.err.Error() } func (e errWithWarnings) Error() string { return e.err.Error() }
// An evaluator evaluates given expressions over given fixed timestamps. It // An evaluator evaluates the given expressions over the given fixed
// is attached to an engine through which it connects to a querier and reports // timestamps. It is attached to an engine through which it connects to a
// errors. On timeout or cancellation of its context it terminates. // querier and reports errors. On timeout or cancellation of its context it
// terminates.
type evaluator struct { type evaluator struct {
ctx context.Context ctx context.Context
@ -1137,17 +1143,35 @@ func (ev *evaluator) rangeEval(prepSeries func(labels.Labels, *EvalSeriesHelper)
} }
for si, series := range matrixes[i] { for si, series := range matrixes[i] {
for _, point := range series.Points { for _, point := range series.Floats {
if point.T == ts { if point.T == ts {
if ev.currentSamples < ev.maxSamples { if ev.currentSamples < ev.maxSamples {
vectors[i] = append(vectors[i], Sample{Metric: series.Metric, Point: point}) vectors[i] = append(vectors[i], Sample{Metric: series.Metric, F: point.F, T: ts})
if prepSeries != nil { if prepSeries != nil {
bufHelpers[i] = append(bufHelpers[i], seriesHelpers[i][si]) bufHelpers[i] = append(bufHelpers[i], seriesHelpers[i][si])
} }
// Move input vectors forward so we don't have to re-scan the same // Move input vectors forward so we don't have to re-scan the same
// past points at the next step. // past points at the next step.
matrixes[i][si].Points = series.Points[1:] matrixes[i][si].Floats = series.Floats[1:]
ev.currentSamples++
} else {
ev.error(ErrTooManySamples(env))
}
}
break
}
for _, point := range series.Histograms {
if point.T == ts {
if ev.currentSamples < ev.maxSamples {
vectors[i] = append(vectors[i], Sample{Metric: series.Metric, H: point.H, T: ts})
if prepSeries != nil {
bufHelpers[i] = append(bufHelpers[i], seriesHelpers[i][si])
}
// Move input vectors forward so we don't have to re-scan the same
// past points at the next step.
matrixes[i][si].Histograms = series.Histograms[1:]
ev.currentSamples++ ev.currentSamples++
} else { } else {
ev.error(ErrTooManySamples(env)) ev.error(ErrTooManySamples(env))
@ -1184,8 +1208,11 @@ func (ev *evaluator) rangeEval(prepSeries func(labels.Labels, *EvalSeriesHelper)
if ev.endTimestamp == ev.startTimestamp { if ev.endTimestamp == ev.startTimestamp {
mat := make(Matrix, len(result)) mat := make(Matrix, len(result))
for i, s := range result { for i, s := range result {
s.Point.T = ts if s.H == nil {
mat[i] = Series{Metric: s.Metric, Points: []Point{s.Point}} mat[i] = Series{Metric: s.Metric, Floats: []FPoint{{T: ts, F: s.F}}}
} else {
mat[i] = Series{Metric: s.Metric, Histograms: []HPoint{{T: ts, H: s.H}}}
}
} }
ev.currentSamples = originalNumSamples + mat.TotalSamples() ev.currentSamples = originalNumSamples + mat.TotalSamples()
ev.samplesStats.UpdatePeak(ev.currentSamples) ev.samplesStats.UpdatePeak(ev.currentSamples)
@ -1197,22 +1224,28 @@ func (ev *evaluator) rangeEval(prepSeries func(labels.Labels, *EvalSeriesHelper)
h := sample.Metric.Hash() h := sample.Metric.Hash()
ss, ok := seriess[h] ss, ok := seriess[h]
if !ok { if !ok {
ss = Series{ ss = Series{Metric: sample.Metric}
Metric: sample.Metric, }
Points: getPointSlice(numSteps), if sample.H == nil {
} if ss.Floats == nil {
ss.Floats = getFPointSlice(numSteps)
}
ss.Floats = append(ss.Floats, FPoint{T: ts, F: sample.F})
} else {
if ss.Histograms == nil {
ss.Histograms = getHPointSlice(numSteps)
}
ss.Histograms = append(ss.Histograms, HPoint{T: ts, H: sample.H})
} }
sample.Point.T = ts
ss.Points = append(ss.Points, sample.Point)
seriess[h] = ss seriess[h] = ss
} }
} }
// Reuse the original point slices. // Reuse the original point slices.
for _, m := range origMatrixes { for _, m := range origMatrixes {
for _, s := range m { for _, s := range m {
putPointSlice(s.Points) putFPointSlice(s.Floats)
putHPointSlice(s.Histograms)
} }
} }
// Assemble the output matrix. By the time we get here we know we don't have too many samples. // Assemble the output matrix. By the time we get here we know we don't have too many samples.
@ -1253,7 +1286,7 @@ func (ev *evaluator) evalSubquery(subq *parser.SubqueryExpr) (*parser.MatrixSele
} }
totalSamples := 0 totalSamples := 0
for _, s := range mat { for _, s := range mat {
totalSamples += len(s.Points) totalSamples += len(s.Floats) + len(s.Histograms)
vs.Series = append(vs.Series, NewStorageSeries(s)) vs.Series = append(vs.Series, NewStorageSeries(s))
} }
return ms, totalSamples, ws return ms, totalSamples, ws
@ -1297,7 +1330,7 @@ func (ev *evaluator) eval(expr parser.Expr) (parser.Value, storage.Warnings) {
return ev.rangeEval(initSeries, func(v []parser.Value, sh [][]EvalSeriesHelper, enh *EvalNodeHelper) (Vector, storage.Warnings) { return ev.rangeEval(initSeries, func(v []parser.Value, sh [][]EvalSeriesHelper, enh *EvalNodeHelper) (Vector, storage.Warnings) {
var param float64 var param float64
if e.Param != nil { if e.Param != nil {
param = v[0].(Vector)[0].V param = v[0].(Vector)[0].F
} }
return ev.aggregation(e.Op, sortedGrouping, e.Without, param, v[1].(Vector), sh[1], enh), nil return ev.aggregation(e.Op, sortedGrouping, e.Without, param, v[1].(Vector), sh[1], enh), nil
}, e.Param, e.Expr) }, e.Param, e.Expr)
@ -1396,7 +1429,8 @@ func (ev *evaluator) eval(expr parser.Expr) (parser.Value, storage.Warnings) {
stepRange = ev.interval stepRange = ev.interval
} }
// Reuse objects across steps to save memory allocations. // Reuse objects across steps to save memory allocations.
points := getPointSlice(16) var floats []FPoint
var histograms []HPoint
inMatrix := make(Matrix, 1) inMatrix := make(Matrix, 1)
inArgs[matrixArgIndex] = inMatrix inArgs[matrixArgIndex] = inMatrix
enh := &EvalNodeHelper{Out: make(Vector, 0, 1)} enh := &EvalNodeHelper{Out: make(Vector, 0, 1)}
@ -1404,8 +1438,13 @@ func (ev *evaluator) eval(expr parser.Expr) (parser.Value, storage.Warnings) {
it := storage.NewBuffer(selRange) it := storage.NewBuffer(selRange)
var chkIter chunkenc.Iterator var chkIter chunkenc.Iterator
for i, s := range selVS.Series { for i, s := range selVS.Series {
ev.currentSamples -= len(points) ev.currentSamples -= len(floats) + len(histograms)
points = points[:0] if floats != nil {
floats = floats[:0]
}
if histograms != nil {
histograms = histograms[:0]
}
chkIter = s.Iterator(chkIter) chkIter = s.Iterator(chkIter)
it.Reset(chkIter) it.Reset(chkIter)
metric := selVS.Series[i].Labels() metric := selVS.Series[i].Labels()
@ -1418,7 +1457,6 @@ func (ev *evaluator) eval(expr parser.Expr) (parser.Value, storage.Warnings) {
} }
ss := Series{ ss := Series{
Metric: metric, Metric: metric,
Points: getPointSlice(numSteps),
} }
inMatrix[0].Metric = selVS.Series[i].Labels() inMatrix[0].Metric = selVS.Series[i].Labels()
for ts, step := ev.startTimestamp, -1; ts <= ev.endTimestamp; ts += ev.interval { for ts, step := ev.startTimestamp, -1; ts <= ev.endTimestamp; ts += ev.interval {
@ -1428,44 +1466,54 @@ func (ev *evaluator) eval(expr parser.Expr) (parser.Value, storage.Warnings) {
// when looking up the argument, as there will be no gaps. // when looking up the argument, as there will be no gaps.
for j := range e.Args { for j := range e.Args {
if j != matrixArgIndex { if j != matrixArgIndex {
otherInArgs[j][0].V = otherArgs[j][0].Points[step].V otherInArgs[j][0].F = otherArgs[j][0].Floats[step].F
} }
} }
maxt := ts - offset maxt := ts - offset
mint := maxt - selRange mint := maxt - selRange
// Evaluate the matrix selector for this series for this step. // Evaluate the matrix selector for this series for this step.
points = ev.matrixIterSlice(it, mint, maxt, points) floats, histograms = ev.matrixIterSlice(it, mint, maxt, floats, histograms)
if len(points) == 0 { if len(floats)+len(histograms) == 0 {
continue continue
} }
inMatrix[0].Points = points inMatrix[0].Floats = floats
inMatrix[0].Histograms = histograms
enh.Ts = ts enh.Ts = ts
// Make the function call. // Make the function call.
outVec := call(inArgs, e.Args, enh) outVec := call(inArgs, e.Args, enh)
ev.samplesStats.IncrementSamplesAtStep(step, int64(len(points))) ev.samplesStats.IncrementSamplesAtStep(step, int64(len(floats)+len(histograms)))
enh.Out = outVec[:0] enh.Out = outVec[:0]
if len(outVec) > 0 { if len(outVec) > 0 {
ss.Points = append(ss.Points, Point{V: outVec[0].Point.V, H: outVec[0].Point.H, T: ts}) if outVec[0].H == nil {
if ss.Floats == nil {
ss.Floats = getFPointSlice(numSteps)
}
ss.Floats = append(ss.Floats, FPoint{F: outVec[0].F, T: ts})
} else {
if ss.Histograms == nil {
ss.Histograms = getHPointSlice(numSteps)
}
ss.Histograms = append(ss.Histograms, HPoint{H: outVec[0].H, T: ts})
}
} }
// Only buffer stepRange milliseconds from the second step on. // Only buffer stepRange milliseconds from the second step on.
it.ReduceDelta(stepRange) it.ReduceDelta(stepRange)
} }
if len(ss.Points) > 0 { if len(ss.Floats)+len(ss.Histograms) > 0 {
if ev.currentSamples+len(ss.Points) <= ev.maxSamples { if ev.currentSamples+len(ss.Floats)+len(ss.Histograms) <= ev.maxSamples {
mat = append(mat, ss) mat = append(mat, ss)
ev.currentSamples += len(ss.Points) ev.currentSamples += len(ss.Floats) + len(ss.Histograms)
} else { } else {
ev.error(ErrTooManySamples(env)) ev.error(ErrTooManySamples(env))
} }
} else {
putPointSlice(ss.Points)
} }
ev.samplesStats.UpdatePeak(ev.currentSamples) ev.samplesStats.UpdatePeak(ev.currentSamples)
} }
ev.samplesStats.UpdatePeak(ev.currentSamples) ev.samplesStats.UpdatePeak(ev.currentSamples)
ev.currentSamples -= len(points) ev.currentSamples -= len(floats) + len(histograms)
putPointSlice(points) putFPointSlice(floats)
putHPointSlice(histograms)
// The absent_over_time function returns 0 or 1 series. So far, the matrix // The absent_over_time function returns 0 or 1 series. So far, the matrix
// contains multiple series. The following code will create a new series // contains multiple series. The following code will create a new series
@ -1474,7 +1522,7 @@ func (ev *evaluator) eval(expr parser.Expr) (parser.Value, storage.Warnings) {
steps := int(1 + (ev.endTimestamp-ev.startTimestamp)/ev.interval) steps := int(1 + (ev.endTimestamp-ev.startTimestamp)/ev.interval)
// Iterate once to look for a complete series. // Iterate once to look for a complete series.
for _, s := range mat { for _, s := range mat {
if len(s.Points) == steps { if len(s.Floats)+len(s.Histograms) == steps {
return Matrix{}, warnings return Matrix{}, warnings
} }
} }
@ -1482,7 +1530,10 @@ func (ev *evaluator) eval(expr parser.Expr) (parser.Value, storage.Warnings) {
found := map[int64]struct{}{} found := map[int64]struct{}{}
for i, s := range mat { for i, s := range mat {
for _, p := range s.Points { for _, p := range s.Floats {
found[p.T] = struct{}{}
}
for _, p := range s.Histograms {
found[p.T] = struct{}{} found[p.T] = struct{}{}
} }
if i > 0 && len(found) == steps { if i > 0 && len(found) == steps {
@ -1490,17 +1541,17 @@ func (ev *evaluator) eval(expr parser.Expr) (parser.Value, storage.Warnings) {
} }
} }
newp := make([]Point, 0, steps-len(found)) newp := make([]FPoint, 0, steps-len(found))
for ts := ev.startTimestamp; ts <= ev.endTimestamp; ts += ev.interval { for ts := ev.startTimestamp; ts <= ev.endTimestamp; ts += ev.interval {
if _, ok := found[ts]; !ok { if _, ok := found[ts]; !ok {
newp = append(newp, Point{T: ts, V: 1}) newp = append(newp, FPoint{T: ts, F: 1})
} }
} }
return Matrix{ return Matrix{
Series{ Series{
Metric: createLabelsForAbsentFunction(e.Args[0]), Metric: createLabelsForAbsentFunction(e.Args[0]),
Points: newp, Floats: newp,
}, },
}, warnings }, warnings
} }
@ -1520,8 +1571,8 @@ func (ev *evaluator) eval(expr parser.Expr) (parser.Value, storage.Warnings) {
if e.Op == parser.SUB { if e.Op == parser.SUB {
for i := range mat { for i := range mat {
mat[i].Metric = dropMetricName(mat[i].Metric) mat[i].Metric = dropMetricName(mat[i].Metric)
for j := range mat[i].Points { for j := range mat[i].Floats {
mat[i].Points[j].V = -mat[i].Points[j].V mat[i].Floats[j].F = -mat[i].Floats[j].F
} }
} }
if mat.ContainsSameLabelset() { if mat.ContainsSameLabelset() {
@ -1534,8 +1585,8 @@ func (ev *evaluator) eval(expr parser.Expr) (parser.Value, storage.Warnings) {
switch lt, rt := e.LHS.Type(), e.RHS.Type(); { switch lt, rt := e.LHS.Type(), e.RHS.Type(); {
case lt == parser.ValueTypeScalar && rt == parser.ValueTypeScalar: case lt == parser.ValueTypeScalar && rt == parser.ValueTypeScalar:
return ev.rangeEval(nil, func(v []parser.Value, _ [][]EvalSeriesHelper, enh *EvalNodeHelper) (Vector, storage.Warnings) { return ev.rangeEval(nil, func(v []parser.Value, _ [][]EvalSeriesHelper, enh *EvalNodeHelper) (Vector, storage.Warnings) {
val := scalarBinop(e.Op, v[0].(Vector)[0].Point.V, v[1].(Vector)[0].Point.V) val := scalarBinop(e.Op, v[0].(Vector)[0].F, v[1].(Vector)[0].F)
return append(enh.Out, Sample{Point: Point{V: val}}), nil return append(enh.Out, Sample{F: val}), nil
}, e.LHS, e.RHS) }, e.LHS, e.RHS)
case lt == parser.ValueTypeVector && rt == parser.ValueTypeVector: case lt == parser.ValueTypeVector && rt == parser.ValueTypeVector:
// Function to compute the join signature for each series. // Function to compute the join signature for each series.
@ -1565,18 +1616,18 @@ func (ev *evaluator) eval(expr parser.Expr) (parser.Value, storage.Warnings) {
case lt == parser.ValueTypeVector && rt == parser.ValueTypeScalar: case lt == parser.ValueTypeVector && rt == parser.ValueTypeScalar:
return ev.rangeEval(nil, func(v []parser.Value, _ [][]EvalSeriesHelper, enh *EvalNodeHelper) (Vector, storage.Warnings) { return ev.rangeEval(nil, func(v []parser.Value, _ [][]EvalSeriesHelper, enh *EvalNodeHelper) (Vector, storage.Warnings) {
return ev.VectorscalarBinop(e.Op, v[0].(Vector), Scalar{V: v[1].(Vector)[0].Point.V}, false, e.ReturnBool, enh), nil return ev.VectorscalarBinop(e.Op, v[0].(Vector), Scalar{V: v[1].(Vector)[0].F}, false, e.ReturnBool, enh), nil
}, e.LHS, e.RHS) }, e.LHS, e.RHS)
case lt == parser.ValueTypeScalar && rt == parser.ValueTypeVector: case lt == parser.ValueTypeScalar && rt == parser.ValueTypeVector:
return ev.rangeEval(nil, func(v []parser.Value, _ [][]EvalSeriesHelper, enh *EvalNodeHelper) (Vector, storage.Warnings) { return ev.rangeEval(nil, func(v []parser.Value, _ [][]EvalSeriesHelper, enh *EvalNodeHelper) (Vector, storage.Warnings) {
return ev.VectorscalarBinop(e.Op, v[1].(Vector), Scalar{V: v[0].(Vector)[0].Point.V}, true, e.ReturnBool, enh), nil return ev.VectorscalarBinop(e.Op, v[1].(Vector), Scalar{V: v[0].(Vector)[0].F}, true, e.ReturnBool, enh), nil
}, e.LHS, e.RHS) }, e.LHS, e.RHS)
} }
case *parser.NumberLiteral: case *parser.NumberLiteral:
return ev.rangeEval(nil, func(v []parser.Value, _ [][]EvalSeriesHelper, enh *EvalNodeHelper) (Vector, storage.Warnings) { return ev.rangeEval(nil, func(v []parser.Value, _ [][]EvalSeriesHelper, enh *EvalNodeHelper) (Vector, storage.Warnings) {
return append(enh.Out, Sample{Point: Point{V: e.Val}, Metric: labels.EmptyLabels()}), nil return append(enh.Out, Sample{F: e.Val, Metric: labels.EmptyLabels()}), nil
}) })
case *parser.StringLiteral: case *parser.StringLiteral:
@ -1595,15 +1646,24 @@ func (ev *evaluator) eval(expr parser.Expr) (parser.Value, storage.Warnings) {
it.Reset(chkIter) it.Reset(chkIter)
ss := Series{ ss := Series{
Metric: e.Series[i].Labels(), Metric: e.Series[i].Labels(),
Points: getPointSlice(numSteps),
} }
for ts, step := ev.startTimestamp, -1; ts <= ev.endTimestamp; ts += ev.interval { for ts, step := ev.startTimestamp, -1; ts <= ev.endTimestamp; ts += ev.interval {
step++ step++
_, v, h, ok := ev.vectorSelectorSingle(it, e, ts) _, f, h, ok := ev.vectorSelectorSingle(it, e, ts)
if ok { if ok {
if ev.currentSamples < ev.maxSamples { if ev.currentSamples < ev.maxSamples {
ss.Points = append(ss.Points, Point{V: v, H: h, T: ts}) if h == nil {
if ss.Floats == nil {
ss.Floats = getFPointSlice(numSteps)
}
ss.Floats = append(ss.Floats, FPoint{F: f, T: ts})
} else {
if ss.Histograms == nil {
ss.Histograms = getHPointSlice(numSteps)
}
ss.Histograms = append(ss.Histograms, HPoint{H: h, T: ts})
}
ev.samplesStats.IncrementSamplesAtStep(step, 1) ev.samplesStats.IncrementSamplesAtStep(step, 1)
ev.currentSamples++ ev.currentSamples++
} else { } else {
@ -1612,10 +1672,8 @@ func (ev *evaluator) eval(expr parser.Expr) (parser.Value, storage.Warnings) {
} }
} }
if len(ss.Points) > 0 { if len(ss.Floats)+len(ss.Histograms) > 0 {
mat = append(mat, ss) mat = append(mat, ss)
} else {
putPointSlice(ss.Points)
} }
} }
ev.samplesStats.UpdatePeak(ev.currentSamples) ev.samplesStats.UpdatePeak(ev.currentSamples)
@ -1706,15 +1764,21 @@ func (ev *evaluator) eval(expr parser.Expr) (parser.Value, storage.Warnings) {
panic(fmt.Errorf("unexpected result in StepInvariantExpr evaluation: %T", expr)) panic(fmt.Errorf("unexpected result in StepInvariantExpr evaluation: %T", expr))
} }
for i := range mat { for i := range mat {
if len(mat[i].Points) != 1 { if len(mat[i].Floats)+len(mat[i].Histograms) != 1 {
panic(fmt.Errorf("unexpected number of samples")) panic(fmt.Errorf("unexpected number of samples"))
} }
for ts := ev.startTimestamp + ev.interval; ts <= ev.endTimestamp; ts = ts + ev.interval { for ts := ev.startTimestamp + ev.interval; ts <= ev.endTimestamp; ts = ts + ev.interval {
mat[i].Points = append(mat[i].Points, Point{ if len(mat[i].Floats) > 0 {
T: ts, mat[i].Floats = append(mat[i].Floats, FPoint{
V: mat[i].Points[0].V, T: ts,
H: mat[i].Points[0].H, F: mat[i].Floats[0].F,
}) })
} else {
mat[i].Histograms = append(mat[i].Histograms, HPoint{
T: ts,
H: mat[i].Histograms[0].H,
})
}
ev.currentSamples++ ev.currentSamples++
if ev.currentSamples > ev.maxSamples { if ev.currentSamples > ev.maxSamples {
ev.error(ErrTooManySamples(env)) ev.error(ErrTooManySamples(env))
@ -1741,11 +1805,13 @@ func (ev *evaluator) vectorSelector(node *parser.VectorSelector, ts int64) (Vect
chkIter = s.Iterator(chkIter) chkIter = s.Iterator(chkIter)
it.Reset(chkIter) it.Reset(chkIter)
t, v, h, ok := ev.vectorSelectorSingle(it, node, ts) t, f, h, ok := ev.vectorSelectorSingle(it, node, ts)
if ok { if ok {
vec = append(vec, Sample{ vec = append(vec, Sample{
Metric: node.Series[i].Labels(), Metric: node.Series[i].Labels(),
Point: Point{V: v, H: h, T: t}, T: t,
F: f,
H: h,
}) })
ev.currentSamples++ ev.currentSamples++
@ -1795,17 +1861,35 @@ func (ev *evaluator) vectorSelectorSingle(it *storage.MemoizedSeriesIterator, no
return t, v, h, true return t, v, h, true
} }
var pointPool zeropool.Pool[[]Point] var (
fPointPool zeropool.Pool[[]FPoint]
hPointPool zeropool.Pool[[]HPoint]
)
func getPointSlice(sz int) []Point { func getFPointSlice(sz int) []FPoint {
if p := pointPool.Get(); p != nil { if p := fPointPool.Get(); p != nil {
return p return p
} }
return make([]Point, 0, sz) return make([]FPoint, 0, sz)
} }
func putPointSlice(p []Point) { func putFPointSlice(p []FPoint) {
pointPool.Put(p[:0]) if p != nil {
fPointPool.Put(p[:0])
}
}
func getHPointSlice(sz int) []HPoint {
if p := hPointPool.Get(); p != nil {
return p
}
return make([]HPoint, 0, sz)
}
func putHPointSlice(p []HPoint) {
if p != nil {
hPointPool.Put(p[:0])
}
} }
// matrixSelector evaluates a *parser.MatrixSelector expression. // matrixSelector evaluates a *parser.MatrixSelector expression.
@ -1837,13 +1921,15 @@ func (ev *evaluator) matrixSelector(node *parser.MatrixSelector) (Matrix, storag
Metric: series[i].Labels(), Metric: series[i].Labels(),
} }
ss.Points = ev.matrixIterSlice(it, mint, maxt, getPointSlice(16)) ss.Floats, ss.Histograms = ev.matrixIterSlice(it, mint, maxt, nil, nil)
ev.samplesStats.IncrementSamplesAtTimestamp(ev.startTimestamp, int64(len(ss.Points))) totalLen := int64(len(ss.Floats)) + int64(len(ss.Histograms))
ev.samplesStats.IncrementSamplesAtTimestamp(ev.startTimestamp, totalLen)
if len(ss.Points) > 0 { if totalLen > 0 {
matrix = append(matrix, ss) matrix = append(matrix, ss)
} else { } else {
putPointSlice(ss.Points) putFPointSlice(ss.Floats)
putHPointSlice(ss.Histograms)
} }
} }
return matrix, ws return matrix, ws
@ -1857,24 +1943,54 @@ func (ev *evaluator) matrixSelector(node *parser.MatrixSelector) (Matrix, storag
// values). Any such points falling before mint are discarded; points that fall // values). Any such points falling before mint are discarded; points that fall
// into the [mint, maxt] range are retained; only points with later timestamps // into the [mint, maxt] range are retained; only points with later timestamps
// are populated from the iterator. // are populated from the iterator.
func (ev *evaluator) matrixIterSlice(it *storage.BufferedSeriesIterator, mint, maxt int64, out []Point) []Point { func (ev *evaluator) matrixIterSlice(
if len(out) > 0 && out[len(out)-1].T >= mint { it *storage.BufferedSeriesIterator, mint, maxt int64,
floats []FPoint, histograms []HPoint,
) ([]FPoint, []HPoint) {
mintFloats, mintHistograms := mint, mint
// First floats...
if len(floats) > 0 && floats[len(floats)-1].T >= mint {
// There is an overlap between previous and current ranges, retain common // There is an overlap between previous and current ranges, retain common
// points. In most such cases: // points. In most such cases:
// (a) the overlap is significantly larger than the eval step; and/or // (a) the overlap is significantly larger than the eval step; and/or
// (b) the number of samples is relatively small. // (b) the number of samples is relatively small.
// so a linear search will be as fast as a binary search. // so a linear search will be as fast as a binary search.
var drop int var drop int
for drop = 0; out[drop].T < mint; drop++ { for drop = 0; floats[drop].T < mint; drop++ {
} }
ev.currentSamples -= drop ev.currentSamples -= drop
copy(out, out[drop:]) copy(floats, floats[drop:])
out = out[:len(out)-drop] floats = floats[:len(floats)-drop]
// Only append points with timestamps after the last timestamp we have. // Only append points with timestamps after the last timestamp we have.
mint = out[len(out)-1].T + 1 mintFloats = floats[len(floats)-1].T + 1
} else { } else {
ev.currentSamples -= len(out) ev.currentSamples -= len(floats)
out = out[:0] if floats != nil {
floats = floats[:0]
}
}
// ...then the same for histograms. TODO(beorn7): Use generics?
if len(histograms) > 0 && histograms[len(histograms)-1].T >= mint {
// There is an overlap between previous and current ranges, retain common
// points. In most such cases:
// (a) the overlap is significantly larger than the eval step; and/or
// (b) the number of samples is relatively small.
// so a linear search will be as fast as a binary search.
var drop int
for drop = 0; histograms[drop].T < mint; drop++ {
}
ev.currentSamples -= drop
copy(histograms, histograms[drop:])
histograms = histograms[:len(histograms)-drop]
// Only append points with timestamps after the last timestamp we have.
mintHistograms = histograms[len(histograms)-1].T + 1
} else {
ev.currentSamples -= len(histograms)
if histograms != nil {
histograms = histograms[:0]
}
} }
soughtValueType := it.Seek(maxt) soughtValueType := it.Seek(maxt)
@ -1896,25 +2012,31 @@ loop:
continue loop continue loop
} }
// Values in the buffer are guaranteed to be smaller than maxt. // Values in the buffer are guaranteed to be smaller than maxt.
if t >= mint { if t >= mintHistograms {
if ev.currentSamples >= ev.maxSamples { if ev.currentSamples >= ev.maxSamples {
ev.error(ErrTooManySamples(env)) ev.error(ErrTooManySamples(env))
} }
ev.currentSamples++ ev.currentSamples++
out = append(out, Point{T: t, H: h}) if histograms == nil {
histograms = getHPointSlice(16)
}
histograms = append(histograms, HPoint{T: t, H: h})
} }
case chunkenc.ValFloat: case chunkenc.ValFloat:
t, v := buf.At() t, f := buf.At()
if value.IsStaleNaN(v) { if value.IsStaleNaN(f) {
continue loop continue loop
} }
// Values in the buffer are guaranteed to be smaller than maxt. // Values in the buffer are guaranteed to be smaller than maxt.
if t >= mint { if t >= mintFloats {
if ev.currentSamples >= ev.maxSamples { if ev.currentSamples >= ev.maxSamples {
ev.error(ErrTooManySamples(env)) ev.error(ErrTooManySamples(env))
} }
ev.currentSamples++ ev.currentSamples++
out = append(out, Point{T: t, V: v}) if floats == nil {
floats = getFPointSlice(16)
}
floats = append(floats, FPoint{T: t, F: f})
} }
} }
} }
@ -1926,21 +2048,27 @@ loop:
if ev.currentSamples >= ev.maxSamples { if ev.currentSamples >= ev.maxSamples {
ev.error(ErrTooManySamples(env)) ev.error(ErrTooManySamples(env))
} }
out = append(out, Point{T: t, H: h}) if histograms == nil {
histograms = getHPointSlice(16)
}
histograms = append(histograms, HPoint{T: t, H: h})
ev.currentSamples++ ev.currentSamples++
} }
case chunkenc.ValFloat: case chunkenc.ValFloat:
t, v := it.At() t, f := it.At()
if t == maxt && !value.IsStaleNaN(v) { if t == maxt && !value.IsStaleNaN(f) {
if ev.currentSamples >= ev.maxSamples { if ev.currentSamples >= ev.maxSamples {
ev.error(ErrTooManySamples(env)) ev.error(ErrTooManySamples(env))
} }
out = append(out, Point{T: t, V: v}) if floats == nil {
floats = getFPointSlice(16)
}
floats = append(floats, FPoint{T: t, F: f})
ev.currentSamples++ ev.currentSamples++
} }
} }
ev.samplesStats.UpdatePeak(ev.currentSamples) ev.samplesStats.UpdatePeak(ev.currentSamples)
return out return floats, histograms
} }
func (ev *evaluator) VectorAnd(lhs, rhs Vector, matching *parser.VectorMatching, lhsh, rhsh []EvalSeriesHelper, enh *EvalNodeHelper) Vector { func (ev *evaluator) VectorAnd(lhs, rhs Vector, matching *parser.VectorMatching, lhsh, rhsh []EvalSeriesHelper, enh *EvalNodeHelper) Vector {
@ -2086,18 +2214,18 @@ func (ev *evaluator) VectorBinop(op parser.ItemType, lhs, rhs Vector, matching *
} }
// Account for potentially swapped sidedness. // Account for potentially swapped sidedness.
vl, vr := ls.V, rs.V fl, fr := ls.F, rs.F
hl, hr := ls.H, rs.H hl, hr := ls.H, rs.H
if matching.Card == parser.CardOneToMany { if matching.Card == parser.CardOneToMany {
vl, vr = vr, vl fl, fr = fr, fl
hl, hr = hr, hl hl, hr = hr, hl
} }
value, histogramValue, keep := vectorElemBinop(op, vl, vr, hl, hr) floatValue, histogramValue, keep := vectorElemBinop(op, fl, fr, hl, hr)
if returnBool { if returnBool {
if keep { if keep {
value = 1.0 floatValue = 1.0
} else { } else {
value = 0.0 floatValue = 0.0
} }
} else if !keep { } else if !keep {
continue continue
@ -2131,7 +2259,8 @@ func (ev *evaluator) VectorBinop(op parser.ItemType, lhs, rhs Vector, matching *
// Both lhs and rhs are of same type. // Both lhs and rhs are of same type.
enh.Out = append(enh.Out, Sample{ enh.Out = append(enh.Out, Sample{
Metric: metric, Metric: metric,
Point: Point{V: value, H: histogramValue}, F: floatValue,
H: histogramValue,
}) })
} }
} }
@ -2192,7 +2321,7 @@ func resultMetric(lhs, rhs labels.Labels, op parser.ItemType, matching *parser.V
} }
} }
ret := enh.lb.Labels(labels.EmptyLabels()) ret := enh.lb.Labels()
enh.resultMetric[str] = ret enh.resultMetric[str] = ret
return ret return ret
} }
@ -2200,7 +2329,7 @@ func resultMetric(lhs, rhs labels.Labels, op parser.ItemType, matching *parser.V
// VectorscalarBinop evaluates a binary operation between a Vector and a Scalar. // VectorscalarBinop evaluates a binary operation between a Vector and a Scalar.
func (ev *evaluator) VectorscalarBinop(op parser.ItemType, lhs Vector, rhs Scalar, swap, returnBool bool, enh *EvalNodeHelper) Vector { func (ev *evaluator) VectorscalarBinop(op parser.ItemType, lhs Vector, rhs Scalar, swap, returnBool bool, enh *EvalNodeHelper) Vector {
for _, lhsSample := range lhs { for _, lhsSample := range lhs {
lv, rv := lhsSample.V, rhs.V lv, rv := lhsSample.F, rhs.V
// lhs always contains the Vector. If the original position was different // lhs always contains the Vector. If the original position was different
// swap for calculating the value. // swap for calculating the value.
if swap { if swap {
@ -2221,7 +2350,7 @@ func (ev *evaluator) VectorscalarBinop(op parser.ItemType, lhs Vector, rhs Scala
keep = true keep = true
} }
if keep { if keep {
lhsSample.V = value lhsSample.F = value
if shouldDropMetricName(op) || returnBool { if shouldDropMetricName(op) || returnBool {
lhsSample.Metric = enh.DropMetricName(lhsSample.Metric) lhsSample.Metric = enh.DropMetricName(lhsSample.Metric)
} }
@ -2232,7 +2361,7 @@ func (ev *evaluator) VectorscalarBinop(op parser.ItemType, lhs Vector, rhs Scala
} }
func dropMetricName(l labels.Labels) labels.Labels { func dropMetricName(l labels.Labels) labels.Labels {
return labels.NewBuilder(l).Del(labels.MetricName).Labels(labels.EmptyLabels()) return labels.NewBuilder(l).Del(labels.MetricName).Labels()
} }
// scalarBinop evaluates a binary operation between two Scalars. // scalarBinop evaluates a binary operation between two Scalars.
@ -2313,7 +2442,7 @@ type groupedAggregation struct {
hasFloat bool // Has at least 1 float64 sample aggregated. hasFloat bool // Has at least 1 float64 sample aggregated.
hasHistogram bool // Has at least 1 histogram sample aggregated. hasHistogram bool // Has at least 1 histogram sample aggregated.
labels labels.Labels labels labels.Labels
value float64 floatValue float64
histogramValue *histogram.FloatHistogram histogramValue *histogram.FloatHistogram
mean float64 mean float64
groupCount int groupCount int
@ -2365,8 +2494,8 @@ func (ev *evaluator) aggregation(op parser.ItemType, grouping []string, without
if op == parser.COUNT_VALUES { if op == parser.COUNT_VALUES {
enh.resetBuilder(metric) enh.resetBuilder(metric)
enh.lb.Set(valueLabel, strconv.FormatFloat(s.V, 'f', -1, 64)) enh.lb.Set(valueLabel, strconv.FormatFloat(s.F, 'f', -1, 64))
metric = enh.lb.Labels(labels.EmptyLabels()) metric = enh.lb.Labels()
// We've changed the metric so we have to recompute the grouping key. // We've changed the metric so we have to recompute the grouping key.
recomputeGroupingKey = true recomputeGroupingKey = true
@ -2388,17 +2517,17 @@ func (ev *evaluator) aggregation(op parser.ItemType, grouping []string, without
if without { if without {
enh.lb.Del(grouping...) enh.lb.Del(grouping...)
enh.lb.Del(labels.MetricName) enh.lb.Del(labels.MetricName)
m = enh.lb.Labels(labels.EmptyLabels()) m = enh.lb.Labels()
} else if len(grouping) > 0 { } else if len(grouping) > 0 {
enh.lb.Keep(grouping...) enh.lb.Keep(grouping...)
m = enh.lb.Labels(labels.EmptyLabels()) m = enh.lb.Labels()
} else { } else {
m = labels.EmptyLabels() m = labels.EmptyLabels()
} }
newAgg := &groupedAggregation{ newAgg := &groupedAggregation{
labels: m, labels: m,
value: s.V, floatValue: s.F,
mean: s.V, mean: s.F,
groupCount: 1, groupCount: 1,
} }
if s.H == nil { if s.H == nil {
@ -2420,21 +2549,21 @@ func (ev *evaluator) aggregation(op parser.ItemType, grouping []string, without
} }
switch op { switch op {
case parser.STDVAR, parser.STDDEV: case parser.STDVAR, parser.STDDEV:
result[groupingKey].value = 0 result[groupingKey].floatValue = 0
case parser.TOPK, parser.QUANTILE: case parser.TOPK, parser.QUANTILE:
result[groupingKey].heap = make(vectorByValueHeap, 1, resultSize) result[groupingKey].heap = make(vectorByValueHeap, 1, resultSize)
result[groupingKey].heap[0] = Sample{ result[groupingKey].heap[0] = Sample{
Point: Point{V: s.V}, F: s.F,
Metric: s.Metric, Metric: s.Metric,
} }
case parser.BOTTOMK: case parser.BOTTOMK:
result[groupingKey].reverseHeap = make(vectorByReverseValueHeap, 1, resultSize) result[groupingKey].reverseHeap = make(vectorByReverseValueHeap, 1, resultSize)
result[groupingKey].reverseHeap[0] = Sample{ result[groupingKey].reverseHeap[0] = Sample{
Point: Point{V: s.V}, F: s.F,
Metric: s.Metric, Metric: s.Metric,
} }
case parser.GROUP: case parser.GROUP:
result[groupingKey].value = 1 result[groupingKey].floatValue = 1
} }
continue continue
} }
@ -2459,19 +2588,19 @@ func (ev *evaluator) aggregation(op parser.ItemType, grouping []string, without
// point in copying the histogram in that case. // point in copying the histogram in that case.
} else { } else {
group.hasFloat = true group.hasFloat = true
group.value += s.V group.floatValue += s.F
} }
case parser.AVG: case parser.AVG:
group.groupCount++ group.groupCount++
if math.IsInf(group.mean, 0) { if math.IsInf(group.mean, 0) {
if math.IsInf(s.V, 0) && (group.mean > 0) == (s.V > 0) { if math.IsInf(s.F, 0) && (group.mean > 0) == (s.F > 0) {
// The `mean` and `s.V` values are `Inf` of the same sign. They // The `mean` and `s.V` values are `Inf` of the same sign. They
// can't be subtracted, but the value of `mean` is correct // can't be subtracted, but the value of `mean` is correct
// already. // already.
break break
} }
if !math.IsInf(s.V, 0) && !math.IsNaN(s.V) { if !math.IsInf(s.F, 0) && !math.IsNaN(s.F) {
// At this stage, the mean is an infinite. If the added // At this stage, the mean is an infinite. If the added
// value is neither an Inf or a Nan, we can keep that mean // value is neither an Inf or a Nan, we can keep that mean
// value. // value.
@ -2482,19 +2611,19 @@ func (ev *evaluator) aggregation(op parser.ItemType, grouping []string, without
} }
} }
// Divide each side of the `-` by `group.groupCount` to avoid float64 overflows. // Divide each side of the `-` by `group.groupCount` to avoid float64 overflows.
group.mean += s.V/float64(group.groupCount) - group.mean/float64(group.groupCount) group.mean += s.F/float64(group.groupCount) - group.mean/float64(group.groupCount)
case parser.GROUP: case parser.GROUP:
// Do nothing. Required to avoid the panic in `default:` below. // Do nothing. Required to avoid the panic in `default:` below.
case parser.MAX: case parser.MAX:
if group.value < s.V || math.IsNaN(group.value) { if group.floatValue < s.F || math.IsNaN(group.floatValue) {
group.value = s.V group.floatValue = s.F
} }
case parser.MIN: case parser.MIN:
if group.value > s.V || math.IsNaN(group.value) { if group.floatValue > s.F || math.IsNaN(group.floatValue) {
group.value = s.V group.floatValue = s.F
} }
case parser.COUNT, parser.COUNT_VALUES: case parser.COUNT, parser.COUNT_VALUES:
@ -2502,21 +2631,21 @@ func (ev *evaluator) aggregation(op parser.ItemType, grouping []string, without
case parser.STDVAR, parser.STDDEV: case parser.STDVAR, parser.STDDEV:
group.groupCount++ group.groupCount++
delta := s.V - group.mean delta := s.F - group.mean
group.mean += delta / float64(group.groupCount) group.mean += delta / float64(group.groupCount)
group.value += delta * (s.V - group.mean) group.floatValue += delta * (s.F - group.mean)
case parser.TOPK: case parser.TOPK:
// We build a heap of up to k elements, with the smallest element at heap[0]. // We build a heap of up to k elements, with the smallest element at heap[0].
if int64(len(group.heap)) < k { if int64(len(group.heap)) < k {
heap.Push(&group.heap, &Sample{ heap.Push(&group.heap, &Sample{
Point: Point{V: s.V}, F: s.F,
Metric: s.Metric, Metric: s.Metric,
}) })
} else if group.heap[0].V < s.V || (math.IsNaN(group.heap[0].V) && !math.IsNaN(s.V)) { } else if group.heap[0].F < s.F || (math.IsNaN(group.heap[0].F) && !math.IsNaN(s.F)) {
// This new element is bigger than the previous smallest element - overwrite that. // This new element is bigger than the previous smallest element - overwrite that.
group.heap[0] = Sample{ group.heap[0] = Sample{
Point: Point{V: s.V}, F: s.F,
Metric: s.Metric, Metric: s.Metric,
} }
if k > 1 { if k > 1 {
@ -2528,13 +2657,13 @@ func (ev *evaluator) aggregation(op parser.ItemType, grouping []string, without
// We build a heap of up to k elements, with the biggest element at heap[0]. // We build a heap of up to k elements, with the biggest element at heap[0].
if int64(len(group.reverseHeap)) < k { if int64(len(group.reverseHeap)) < k {
heap.Push(&group.reverseHeap, &Sample{ heap.Push(&group.reverseHeap, &Sample{
Point: Point{V: s.V}, F: s.F,
Metric: s.Metric, Metric: s.Metric,
}) })
} else if group.reverseHeap[0].V > s.V || (math.IsNaN(group.reverseHeap[0].V) && !math.IsNaN(s.V)) { } else if group.reverseHeap[0].F > s.F || (math.IsNaN(group.reverseHeap[0].F) && !math.IsNaN(s.F)) {
// This new element is smaller than the previous biggest element - overwrite that. // This new element is smaller than the previous biggest element - overwrite that.
group.reverseHeap[0] = Sample{ group.reverseHeap[0] = Sample{
Point: Point{V: s.V}, F: s.F,
Metric: s.Metric, Metric: s.Metric,
} }
if k > 1 { if k > 1 {
@ -2554,16 +2683,16 @@ func (ev *evaluator) aggregation(op parser.ItemType, grouping []string, without
for _, aggr := range orderedResult { for _, aggr := range orderedResult {
switch op { switch op {
case parser.AVG: case parser.AVG:
aggr.value = aggr.mean aggr.floatValue = aggr.mean
case parser.COUNT, parser.COUNT_VALUES: case parser.COUNT, parser.COUNT_VALUES:
aggr.value = float64(aggr.groupCount) aggr.floatValue = float64(aggr.groupCount)
case parser.STDVAR: case parser.STDVAR:
aggr.value = aggr.value / float64(aggr.groupCount) aggr.floatValue = aggr.floatValue / float64(aggr.groupCount)
case parser.STDDEV: case parser.STDDEV:
aggr.value = math.Sqrt(aggr.value / float64(aggr.groupCount)) aggr.floatValue = math.Sqrt(aggr.floatValue / float64(aggr.groupCount))
case parser.TOPK: case parser.TOPK:
// The heap keeps the lowest value on top, so reverse it. // The heap keeps the lowest value on top, so reverse it.
@ -2573,7 +2702,7 @@ func (ev *evaluator) aggregation(op parser.ItemType, grouping []string, without
for _, v := range aggr.heap { for _, v := range aggr.heap {
enh.Out = append(enh.Out, Sample{ enh.Out = append(enh.Out, Sample{
Metric: v.Metric, Metric: v.Metric,
Point: Point{V: v.V}, F: v.F,
}) })
} }
continue // Bypass default append. continue // Bypass default append.
@ -2586,13 +2715,13 @@ func (ev *evaluator) aggregation(op parser.ItemType, grouping []string, without
for _, v := range aggr.reverseHeap { for _, v := range aggr.reverseHeap {
enh.Out = append(enh.Out, Sample{ enh.Out = append(enh.Out, Sample{
Metric: v.Metric, Metric: v.Metric,
Point: Point{V: v.V}, F: v.F,
}) })
} }
continue // Bypass default append. continue // Bypass default append.
case parser.QUANTILE: case parser.QUANTILE:
aggr.value = quantile(q, aggr.heap) aggr.floatValue = quantile(q, aggr.heap)
case parser.SUM: case parser.SUM:
if aggr.hasFloat && aggr.hasHistogram { if aggr.hasFloat && aggr.hasHistogram {
@ -2605,7 +2734,8 @@ func (ev *evaluator) aggregation(op parser.ItemType, grouping []string, without
enh.Out = append(enh.Out, Sample{ enh.Out = append(enh.Out, Sample{
Metric: aggr.labels, Metric: aggr.labels,
Point: Point{V: aggr.value, H: aggr.histogramValue}, F: aggr.floatValue,
H: aggr.histogramValue,
}) })
} }
return enh.Out return enh.Out

View file

@ -662,7 +662,8 @@ load 10s
Query: "metric", Query: "metric",
Result: Vector{ Result: Vector{
Sample{ Sample{
Point: Point{V: 1, T: 1000}, F: 1,
T: 1000,
Metric: labels.FromStrings("__name__", "metric"), Metric: labels.FromStrings("__name__", "metric"),
}, },
}, },
@ -672,7 +673,7 @@ load 10s
Query: "metric[20s]", Query: "metric[20s]",
Result: Matrix{ Result: Matrix{
Series{ Series{
Points: []Point{{V: 1, T: 0}, {V: 2, T: 10000}}, Floats: []FPoint{{F: 1, T: 0}, {F: 2, T: 10000}},
Metric: labels.FromStrings("__name__", "metric"), Metric: labels.FromStrings("__name__", "metric"),
}, },
}, },
@ -683,7 +684,7 @@ load 10s
Query: "1", Query: "1",
Result: Matrix{ Result: Matrix{
Series{ Series{
Points: []Point{{V: 1, T: 0}, {V: 1, T: 1000}, {V: 1, T: 2000}}, Floats: []FPoint{{F: 1, T: 0}, {F: 1, T: 1000}, {F: 1, T: 2000}},
Metric: labels.EmptyLabels(), Metric: labels.EmptyLabels(),
}, },
}, },
@ -695,7 +696,7 @@ load 10s
Query: "metric", Query: "metric",
Result: Matrix{ Result: Matrix{
Series{ Series{
Points: []Point{{V: 1, T: 0}, {V: 1, T: 1000}, {V: 1, T: 2000}}, Floats: []FPoint{{F: 1, T: 0}, {F: 1, T: 1000}, {F: 1, T: 2000}},
Metric: labels.FromStrings("__name__", "metric"), Metric: labels.FromStrings("__name__", "metric"),
}, },
}, },
@ -707,7 +708,7 @@ load 10s
Query: "metric", Query: "metric",
Result: Matrix{ Result: Matrix{
Series{ Series{
Points: []Point{{V: 1, T: 0}, {V: 1, T: 5000}, {V: 2, T: 10000}}, Floats: []FPoint{{F: 1, T: 0}, {F: 1, T: 5000}, {F: 2, T: 10000}},
Metric: labels.FromStrings("__name__", "metric"), Metric: labels.FromStrings("__name__", "metric"),
}, },
}, },
@ -1462,20 +1463,20 @@ load 1ms
query: `metric_neg @ 0`, query: `metric_neg @ 0`,
start: 100, start: 100,
result: Vector{ result: Vector{
Sample{Point: Point{V: 1, T: 100000}, Metric: lblsneg}, Sample{F: 1, T: 100000, Metric: lblsneg},
}, },
}, { }, {
query: `metric_neg @ -200`, query: `metric_neg @ -200`,
start: 100, start: 100,
result: Vector{ result: Vector{
Sample{Point: Point{V: 201, T: 100000}, Metric: lblsneg}, Sample{F: 201, T: 100000, Metric: lblsneg},
}, },
}, { }, {
query: `metric{job="2"} @ 50`, query: `metric{job="2"} @ 50`,
start: -2, end: 2, interval: 1, start: -2, end: 2, interval: 1,
result: Matrix{ result: Matrix{
Series{ Series{
Points: []Point{{V: 10, T: -2000}, {V: 10, T: -1000}, {V: 10, T: 0}, {V: 10, T: 1000}, {V: 10, T: 2000}}, Floats: []FPoint{{F: 10, T: -2000}, {F: 10, T: -1000}, {F: 10, T: 0}, {F: 10, T: 1000}, {F: 10, T: 2000}},
Metric: lbls2, Metric: lbls2,
}, },
}, },
@ -1484,11 +1485,11 @@ load 1ms
start: 10, start: 10,
result: Matrix{ result: Matrix{
Series{ Series{
Points: []Point{{V: 28, T: 280000}, {V: 29, T: 290000}, {V: 30, T: 300000}}, Floats: []FPoint{{F: 28, T: 280000}, {F: 29, T: 290000}, {F: 30, T: 300000}},
Metric: lbls1, Metric: lbls1,
}, },
Series{ Series{
Points: []Point{{V: 56, T: 280000}, {V: 58, T: 290000}, {V: 60, T: 300000}}, Floats: []FPoint{{F: 56, T: 280000}, {F: 58, T: 290000}, {F: 60, T: 300000}},
Metric: lbls2, Metric: lbls2,
}, },
}, },
@ -1497,7 +1498,7 @@ load 1ms
start: 100, start: 100,
result: Matrix{ result: Matrix{
Series{ Series{
Points: []Point{{V: 3, T: -2000}, {V: 2, T: -1000}, {V: 1, T: 0}}, Floats: []FPoint{{F: 3, T: -2000}, {F: 2, T: -1000}, {F: 1, T: 0}},
Metric: lblsneg, Metric: lblsneg,
}, },
}, },
@ -1506,7 +1507,7 @@ load 1ms
start: 100, start: 100,
result: Matrix{ result: Matrix{
Series{ Series{
Points: []Point{{V: 504, T: -503000}, {V: 503, T: -502000}, {V: 502, T: -501000}, {V: 501, T: -500000}}, Floats: []FPoint{{F: 504, T: -503000}, {F: 503, T: -502000}, {F: 502, T: -501000}, {F: 501, T: -500000}},
Metric: lblsneg, Metric: lblsneg,
}, },
}, },
@ -1515,7 +1516,7 @@ load 1ms
start: 100, start: 100,
result: Matrix{ result: Matrix{
Series{ Series{
Points: []Point{{V: 2342, T: 2342}, {V: 2343, T: 2343}, {V: 2344, T: 2344}, {V: 2345, T: 2345}}, Floats: []FPoint{{F: 2342, T: 2342}, {F: 2343, T: 2343}, {F: 2344, T: 2344}, {F: 2345, T: 2345}},
Metric: lblsms, Metric: lblsms,
}, },
}, },
@ -1524,11 +1525,11 @@ load 1ms
start: 100, start: 100,
result: Matrix{ result: Matrix{
Series{ Series{
Points: []Point{{V: 20, T: 200000}, {V: 22, T: 225000}, {V: 25, T: 250000}, {V: 27, T: 275000}, {V: 30, T: 300000}}, Floats: []FPoint{{F: 20, T: 200000}, {F: 22, T: 225000}, {F: 25, T: 250000}, {F: 27, T: 275000}, {F: 30, T: 300000}},
Metric: lbls1, Metric: lbls1,
}, },
Series{ Series{
Points: []Point{{V: 40, T: 200000}, {V: 44, T: 225000}, {V: 50, T: 250000}, {V: 54, T: 275000}, {V: 60, T: 300000}}, Floats: []FPoint{{F: 40, T: 200000}, {F: 44, T: 225000}, {F: 50, T: 250000}, {F: 54, T: 275000}, {F: 60, T: 300000}},
Metric: lbls2, Metric: lbls2,
}, },
}, },
@ -1537,7 +1538,7 @@ load 1ms
start: 100, start: 100,
result: Matrix{ result: Matrix{
Series{ Series{
Points: []Point{{V: 51, T: -50000}, {V: 26, T: -25000}, {V: 1, T: 0}}, Floats: []FPoint{{F: 51, T: -50000}, {F: 26, T: -25000}, {F: 1, T: 0}},
Metric: lblsneg, Metric: lblsneg,
}, },
}, },
@ -1546,7 +1547,7 @@ load 1ms
start: 100, start: 100,
result: Matrix{ result: Matrix{
Series{ Series{
Points: []Point{{V: 151, T: -150000}, {V: 126, T: -125000}, {V: 101, T: -100000}}, Floats: []FPoint{{F: 151, T: -150000}, {F: 126, T: -125000}, {F: 101, T: -100000}},
Metric: lblsneg, Metric: lblsneg,
}, },
}, },
@ -1555,7 +1556,7 @@ load 1ms
start: 100, start: 100,
result: Matrix{ result: Matrix{
Series{ Series{
Points: []Point{{V: 2250, T: 2250}, {V: 2275, T: 2275}, {V: 2300, T: 2300}, {V: 2325, T: 2325}}, Floats: []FPoint{{F: 2250, T: 2250}, {F: 2275, T: 2275}, {F: 2300, T: 2300}, {F: 2325, T: 2325}},
Metric: lblsms, Metric: lblsms,
}, },
}, },
@ -1564,7 +1565,7 @@ load 1ms
start: 50, end: 80, interval: 10, start: 50, end: 80, interval: 10,
result: Matrix{ result: Matrix{
Series{ Series{
Points: []Point{{V: 995, T: 50000}, {V: 994, T: 60000}, {V: 993, T: 70000}, {V: 992, T: 80000}}, Floats: []FPoint{{F: 995, T: 50000}, {F: 994, T: 60000}, {F: 993, T: 70000}, {F: 992, T: 80000}},
Metric: lblstopk3, Metric: lblstopk3,
}, },
}, },
@ -1573,7 +1574,7 @@ load 1ms
start: 50, end: 80, interval: 10, start: 50, end: 80, interval: 10,
result: Matrix{ result: Matrix{
Series{ Series{
Points: []Point{{V: 10, T: 50000}, {V: 12, T: 60000}, {V: 14, T: 70000}, {V: 16, T: 80000}}, Floats: []FPoint{{F: 10, T: 50000}, {F: 12, T: 60000}, {F: 14, T: 70000}, {F: 16, T: 80000}},
Metric: lblstopk2, Metric: lblstopk2,
}, },
}, },
@ -1582,7 +1583,7 @@ load 1ms
start: 70, end: 100, interval: 10, start: 70, end: 100, interval: 10,
result: Matrix{ result: Matrix{
Series{ Series{
Points: []Point{{V: 993, T: 70000}, {V: 992, T: 80000}, {V: 991, T: 90000}, {V: 990, T: 100000}}, Floats: []FPoint{{F: 993, T: 70000}, {F: 992, T: 80000}, {F: 991, T: 90000}, {F: 990, T: 100000}},
Metric: lblstopk3, Metric: lblstopk3,
}, },
}, },
@ -1591,7 +1592,7 @@ load 1ms
start: 100, end: 130, interval: 10, start: 100, end: 130, interval: 10,
result: Matrix{ result: Matrix{
Series{ Series{
Points: []Point{{V: 990, T: 100000}, {V: 989, T: 110000}, {V: 988, T: 120000}, {V: 987, T: 130000}}, Floats: []FPoint{{F: 990, T: 100000}, {F: 989, T: 110000}, {F: 988, T: 120000}, {F: 987, T: 130000}},
Metric: lblstopk3, Metric: lblstopk3,
}, },
}, },
@ -1602,15 +1603,15 @@ load 1ms
start: 0, end: 7 * 60, interval: 60, start: 0, end: 7 * 60, interval: 60,
result: Matrix{ result: Matrix{
Series{ Series{
Points: []Point{ Floats: []FPoint{
{V: 3600, T: 0}, {F: 3600, T: 0},
{V: 3600, T: 60 * 1000}, {F: 3600, T: 60 * 1000},
{V: 3600, T: 2 * 60 * 1000}, {F: 3600, T: 2 * 60 * 1000},
{V: 3600, T: 3 * 60 * 1000}, {F: 3600, T: 3 * 60 * 1000},
{V: 3600, T: 4 * 60 * 1000}, {F: 3600, T: 4 * 60 * 1000},
{V: 3600, T: 5 * 60 * 1000}, {F: 3600, T: 5 * 60 * 1000},
{V: 3600, T: 6 * 60 * 1000}, {F: 3600, T: 6 * 60 * 1000},
{V: 3600, T: 7 * 60 * 1000}, {F: 3600, T: 7 * 60 * 1000},
}, },
Metric: labels.EmptyLabels(), Metric: labels.EmptyLabels(),
}, },
@ -1723,7 +1724,7 @@ func TestSubquerySelector(t *testing.T) {
nil, nil,
Matrix{ Matrix{
Series{ Series{
Points: []Point{{V: 1, T: 0}, {V: 2, T: 10000}}, Floats: []FPoint{{F: 1, T: 0}, {F: 2, T: 10000}},
Metric: labels.FromStrings("__name__", "metric"), Metric: labels.FromStrings("__name__", "metric"),
}, },
}, },
@ -1737,7 +1738,7 @@ func TestSubquerySelector(t *testing.T) {
nil, nil,
Matrix{ Matrix{
Series{ Series{
Points: []Point{{V: 1, T: 0}, {V: 1, T: 5000}, {V: 2, T: 10000}}, Floats: []FPoint{{F: 1, T: 0}, {F: 1, T: 5000}, {F: 2, T: 10000}},
Metric: labels.FromStrings("__name__", "metric"), Metric: labels.FromStrings("__name__", "metric"),
}, },
}, },
@ -1751,7 +1752,7 @@ func TestSubquerySelector(t *testing.T) {
nil, nil,
Matrix{ Matrix{
Series{ Series{
Points: []Point{{V: 1, T: 0}, {V: 1, T: 5000}, {V: 2, T: 10000}}, Floats: []FPoint{{F: 1, T: 0}, {F: 1, T: 5000}, {F: 2, T: 10000}},
Metric: labels.FromStrings("__name__", "metric"), Metric: labels.FromStrings("__name__", "metric"),
}, },
}, },
@ -1765,7 +1766,7 @@ func TestSubquerySelector(t *testing.T) {
nil, nil,
Matrix{ Matrix{
Series{ Series{
Points: []Point{{V: 1, T: 0}, {V: 1, T: 5000}, {V: 2, T: 10000}}, Floats: []FPoint{{F: 1, T: 0}, {F: 1, T: 5000}, {F: 2, T: 10000}},
Metric: labels.FromStrings("__name__", "metric"), Metric: labels.FromStrings("__name__", "metric"),
}, },
}, },
@ -1779,7 +1780,7 @@ func TestSubquerySelector(t *testing.T) {
nil, nil,
Matrix{ Matrix{
Series{ Series{
Points: []Point{{V: 2, T: 15000}, {V: 2, T: 20000}, {V: 2, T: 25000}, {V: 2, T: 30000}}, Floats: []FPoint{{F: 2, T: 15000}, {F: 2, T: 20000}, {F: 2, T: 25000}, {F: 2, T: 30000}},
Metric: labels.FromStrings("__name__", "metric"), Metric: labels.FromStrings("__name__", "metric"),
}, },
}, },
@ -1793,7 +1794,7 @@ func TestSubquerySelector(t *testing.T) {
nil, nil,
Matrix{ Matrix{
Series{ Series{
Points: []Point{{V: 2, T: 10000}, {V: 2, T: 15000}, {V: 2, T: 20000}, {V: 2, T: 25000}, {V: 2, T: 30000}}, Floats: []FPoint{{F: 2, T: 10000}, {F: 2, T: 15000}, {F: 2, T: 20000}, {F: 2, T: 25000}, {F: 2, T: 30000}},
Metric: labels.FromStrings("__name__", "metric"), Metric: labels.FromStrings("__name__", "metric"),
}, },
}, },
@ -1807,7 +1808,7 @@ func TestSubquerySelector(t *testing.T) {
nil, nil,
Matrix{ Matrix{
Series{ Series{
Points: []Point{{V: 2, T: 10000}, {V: 2, T: 15000}, {V: 2, T: 20000}, {V: 2, T: 25000}}, Floats: []FPoint{{F: 2, T: 10000}, {F: 2, T: 15000}, {F: 2, T: 20000}, {F: 2, T: 25000}},
Metric: labels.FromStrings("__name__", "metric"), Metric: labels.FromStrings("__name__", "metric"),
}, },
}, },
@ -1821,7 +1822,7 @@ func TestSubquerySelector(t *testing.T) {
nil, nil,
Matrix{ Matrix{
Series{ Series{
Points: []Point{{V: 2, T: 10000}, {V: 2, T: 15000}, {V: 2, T: 20000}, {V: 2, T: 25000}}, Floats: []FPoint{{F: 2, T: 10000}, {F: 2, T: 15000}, {F: 2, T: 20000}, {F: 2, T: 25000}},
Metric: labels.FromStrings("__name__", "metric"), Metric: labels.FromStrings("__name__", "metric"),
}, },
}, },
@ -1844,7 +1845,7 @@ func TestSubquerySelector(t *testing.T) {
nil, nil,
Matrix{ Matrix{
Series{ Series{
Points: []Point{{V: 9990, T: 9990000}, {V: 10000, T: 10000000}, {V: 100, T: 10010000}, {V: 130, T: 10020000}}, Floats: []FPoint{{F: 9990, T: 9990000}, {F: 10000, T: 10000000}, {F: 100, T: 10010000}, {F: 130, T: 10020000}},
Metric: labels.FromStrings("__name__", "http_requests", "job", "api-server", "instance", "0", "group", "production"), Metric: labels.FromStrings("__name__", "http_requests", "job", "api-server", "instance", "0", "group", "production"),
}, },
}, },
@ -1858,7 +1859,7 @@ func TestSubquerySelector(t *testing.T) {
nil, nil,
Matrix{ Matrix{
Series{ Series{
Points: []Point{{V: 9840, T: 9840000}, {V: 9900, T: 9900000}, {V: 9960, T: 9960000}, {V: 130, T: 10020000}, {V: 310, T: 10080000}}, Floats: []FPoint{{F: 9840, T: 9840000}, {F: 9900, T: 9900000}, {F: 9960, T: 9960000}, {F: 130, T: 10020000}, {F: 310, T: 10080000}},
Metric: labels.FromStrings("__name__", "http_requests", "job", "api-server", "instance", "0", "group", "production"), Metric: labels.FromStrings("__name__", "http_requests", "job", "api-server", "instance", "0", "group", "production"),
}, },
}, },
@ -1872,7 +1873,7 @@ func TestSubquerySelector(t *testing.T) {
nil, nil,
Matrix{ Matrix{
Series{ Series{
Points: []Point{{V: 8640, T: 8640000}, {V: 8700, T: 8700000}, {V: 8760, T: 8760000}, {V: 8820, T: 8820000}, {V: 8880, T: 8880000}}, Floats: []FPoint{{F: 8640, T: 8640000}, {F: 8700, T: 8700000}, {F: 8760, T: 8760000}, {F: 8820, T: 8820000}, {F: 8880, T: 8880000}},
Metric: labels.FromStrings("__name__", "http_requests", "job", "api-server", "instance", "0", "group", "production"), Metric: labels.FromStrings("__name__", "http_requests", "job", "api-server", "instance", "0", "group", "production"),
}, },
}, },
@ -1886,19 +1887,19 @@ func TestSubquerySelector(t *testing.T) {
nil, nil,
Matrix{ Matrix{
Series{ Series{
Points: []Point{{V: 3, T: 7985000}, {V: 3, T: 7990000}, {V: 3, T: 7995000}, {V: 3, T: 8000000}}, Floats: []FPoint{{F: 3, T: 7985000}, {F: 3, T: 7990000}, {F: 3, T: 7995000}, {F: 3, T: 8000000}},
Metric: labels.FromStrings("job", "api-server", "instance", "0", "group", "canary"), Metric: labels.FromStrings("job", "api-server", "instance", "0", "group", "canary"),
}, },
Series{ Series{
Points: []Point{{V: 4, T: 7985000}, {V: 4, T: 7990000}, {V: 4, T: 7995000}, {V: 4, T: 8000000}}, Floats: []FPoint{{F: 4, T: 7985000}, {F: 4, T: 7990000}, {F: 4, T: 7995000}, {F: 4, T: 8000000}},
Metric: labels.FromStrings("job", "api-server", "instance", "1", "group", "canary"), Metric: labels.FromStrings("job", "api-server", "instance", "1", "group", "canary"),
}, },
Series{ Series{
Points: []Point{{V: 1, T: 7985000}, {V: 1, T: 7990000}, {V: 1, T: 7995000}, {V: 1, T: 8000000}}, Floats: []FPoint{{F: 1, T: 7985000}, {F: 1, T: 7990000}, {F: 1, T: 7995000}, {F: 1, T: 8000000}},
Metric: labels.FromStrings("job", "api-server", "instance", "0", "group", "production"), Metric: labels.FromStrings("job", "api-server", "instance", "0", "group", "production"),
}, },
Series{ Series{
Points: []Point{{V: 2, T: 7985000}, {V: 2, T: 7990000}, {V: 2, T: 7995000}, {V: 2, T: 8000000}}, Floats: []FPoint{{F: 2, T: 7985000}, {F: 2, T: 7990000}, {F: 2, T: 7995000}, {F: 2, T: 8000000}},
Metric: labels.FromStrings("job", "api-server", "instance", "1", "group", "production"), Metric: labels.FromStrings("job", "api-server", "instance", "1", "group", "production"),
}, },
}, },
@ -1912,7 +1913,7 @@ func TestSubquerySelector(t *testing.T) {
nil, nil,
Matrix{ Matrix{
Series{ Series{
Points: []Point{{V: 270, T: 90000}, {V: 300, T: 100000}, {V: 330, T: 110000}, {V: 360, T: 120000}}, Floats: []FPoint{{F: 270, T: 90000}, {F: 300, T: 100000}, {F: 330, T: 110000}, {F: 360, T: 120000}},
Metric: labels.EmptyLabels(), Metric: labels.EmptyLabels(),
}, },
}, },
@ -1926,7 +1927,7 @@ func TestSubquerySelector(t *testing.T) {
nil, nil,
Matrix{ Matrix{
Series{ Series{
Points: []Point{{V: 800, T: 80000}, {V: 900, T: 90000}, {V: 1000, T: 100000}, {V: 1100, T: 110000}, {V: 1200, T: 120000}}, Floats: []FPoint{{F: 800, T: 80000}, {F: 900, T: 90000}, {F: 1000, T: 100000}, {F: 1100, T: 110000}, {F: 1200, T: 120000}},
Metric: labels.EmptyLabels(), Metric: labels.EmptyLabels(),
}, },
}, },
@ -1940,7 +1941,7 @@ func TestSubquerySelector(t *testing.T) {
nil, nil,
Matrix{ Matrix{
Series{ Series{
Points: []Point{{V: 1000, T: 100000}, {V: 1000, T: 105000}, {V: 1100, T: 110000}, {V: 1100, T: 115000}, {V: 1200, T: 120000}}, Floats: []FPoint{{F: 1000, T: 100000}, {F: 1000, T: 105000}, {F: 1100, T: 110000}, {F: 1100, T: 115000}, {F: 1200, T: 120000}},
Metric: labels.EmptyLabels(), Metric: labels.EmptyLabels(),
}, },
}, },
@ -2996,7 +2997,7 @@ func TestRangeQuery(t *testing.T) {
Query: "sum_over_time(bar[30s])", Query: "sum_over_time(bar[30s])",
Result: Matrix{ Result: Matrix{
Series{ Series{
Points: []Point{{V: 0, T: 0}, {V: 11, T: 60000}, {V: 1100, T: 120000}}, Floats: []FPoint{{F: 0, T: 0}, {F: 11, T: 60000}, {F: 1100, T: 120000}},
Metric: labels.EmptyLabels(), Metric: labels.EmptyLabels(),
}, },
}, },
@ -3011,7 +3012,7 @@ func TestRangeQuery(t *testing.T) {
Query: "sum_over_time(bar[30s])", Query: "sum_over_time(bar[30s])",
Result: Matrix{ Result: Matrix{
Series{ Series{
Points: []Point{{V: 0, T: 0}, {V: 11, T: 60000}, {V: 1100, T: 120000}}, Floats: []FPoint{{F: 0, T: 0}, {F: 11, T: 60000}, {F: 1100, T: 120000}},
Metric: labels.EmptyLabels(), Metric: labels.EmptyLabels(),
}, },
}, },
@ -3026,7 +3027,7 @@ func TestRangeQuery(t *testing.T) {
Query: "sum_over_time(bar[30s])", Query: "sum_over_time(bar[30s])",
Result: Matrix{ Result: Matrix{
Series{ Series{
Points: []Point{{V: 0, T: 0}, {V: 11, T: 60000}, {V: 1100, T: 120000}, {V: 110000, T: 180000}, {V: 11000000, T: 240000}}, Floats: []FPoint{{F: 0, T: 0}, {F: 11, T: 60000}, {F: 1100, T: 120000}, {F: 110000, T: 180000}, {F: 11000000, T: 240000}},
Metric: labels.EmptyLabels(), Metric: labels.EmptyLabels(),
}, },
}, },
@ -3041,7 +3042,7 @@ func TestRangeQuery(t *testing.T) {
Query: "sum_over_time(bar[30s])", Query: "sum_over_time(bar[30s])",
Result: Matrix{ Result: Matrix{
Series{ Series{
Points: []Point{{V: 5, T: 0}, {V: 59, T: 60000}, {V: 9, T: 120000}, {V: 956, T: 180000}}, Floats: []FPoint{{F: 5, T: 0}, {F: 59, T: 60000}, {F: 9, T: 120000}, {F: 956, T: 180000}},
Metric: labels.EmptyLabels(), Metric: labels.EmptyLabels(),
}, },
}, },
@ -3056,7 +3057,7 @@ func TestRangeQuery(t *testing.T) {
Query: "metric", Query: "metric",
Result: Matrix{ Result: Matrix{
Series{ Series{
Points: []Point{{V: 1, T: 0}, {V: 3, T: 60000}, {V: 5, T: 120000}}, Floats: []FPoint{{F: 1, T: 0}, {F: 3, T: 60000}, {F: 5, T: 120000}},
Metric: labels.FromStrings("__name__", "metric"), Metric: labels.FromStrings("__name__", "metric"),
}, },
}, },
@ -3071,7 +3072,7 @@ func TestRangeQuery(t *testing.T) {
Query: "metric", Query: "metric",
Result: Matrix{ Result: Matrix{
Series{ Series{
Points: []Point{{V: 1, T: 0}, {V: 3, T: 60000}, {V: 5, T: 120000}}, Floats: []FPoint{{F: 1, T: 0}, {F: 3, T: 60000}, {F: 5, T: 120000}},
Metric: labels.FromStrings("__name__", "metric"), Metric: labels.FromStrings("__name__", "metric"),
}, },
}, },
@ -3087,14 +3088,14 @@ func TestRangeQuery(t *testing.T) {
Query: `foo > 2 or bar`, Query: `foo > 2 or bar`,
Result: Matrix{ Result: Matrix{
Series{ Series{
Points: []Point{{V: 1, T: 0}, {V: 3, T: 60000}, {V: 5, T: 120000}}, Floats: []FPoint{{F: 1, T: 0}, {F: 3, T: 60000}, {F: 5, T: 120000}},
Metric: labels.FromStrings( Metric: labels.FromStrings(
"__name__", "bar", "__name__", "bar",
"job", "2", "job", "2",
), ),
}, },
Series{ Series{
Points: []Point{{V: 3, T: 60000}, {V: 5, T: 120000}}, Floats: []FPoint{{F: 3, T: 60000}, {F: 5, T: 120000}},
Metric: labels.FromStrings( Metric: labels.FromStrings(
"__name__", "foo", "__name__", "foo",
"job", "1", "job", "1",
@ -3113,11 +3114,11 @@ func TestRangeQuery(t *testing.T) {
Query: `foo > 2 or bar`, Query: `foo > 2 or bar`,
Result: Matrix{ Result: Matrix{
Series{ Series{
Points: []Point{{V: 1, T: 0}, {V: 3, T: 60000}, {V: 5, T: 120000}}, Floats: []FPoint{{F: 1, T: 0}, {F: 3, T: 60000}, {F: 5, T: 120000}},
Metric: labels.FromStrings("__name__", "bar", "job", "2"), Metric: labels.FromStrings("__name__", "bar", "job", "2"),
}, },
Series{ Series{
Points: []Point{{V: 3, T: 60000}, {V: 5, T: 120000}}, Floats: []FPoint{{F: 3, T: 60000}, {F: 5, T: 120000}},
Metric: labels.FromStrings("__name__", "foo", "job", "1"), Metric: labels.FromStrings("__name__", "foo", "job", "1"),
}, },
}, },
@ -3286,9 +3287,9 @@ func TestNativeHistogram_HistogramCountAndSum(t *testing.T) {
require.Len(t, vector, 1) require.Len(t, vector, 1)
require.Nil(t, vector[0].H) require.Nil(t, vector[0].H)
if floatHisto { if floatHisto {
require.Equal(t, float64(h.ToFloat().Count), vector[0].V) require.Equal(t, float64(h.ToFloat().Count), vector[0].F)
} else { } else {
require.Equal(t, float64(h.Count), vector[0].V) require.Equal(t, float64(h.Count), vector[0].F)
} }
queryString = fmt.Sprintf("histogram_sum(%s)", seriesName) queryString = fmt.Sprintf("histogram_sum(%s)", seriesName)
@ -3304,9 +3305,9 @@ func TestNativeHistogram_HistogramCountAndSum(t *testing.T) {
require.Len(t, vector, 1) require.Len(t, vector, 1)
require.Nil(t, vector[0].H) require.Nil(t, vector[0].H)
if floatHisto { if floatHisto {
require.Equal(t, h.ToFloat().Sum, vector[0].V) require.Equal(t, h.ToFloat().Sum, vector[0].F)
} else { } else {
require.Equal(t, h.Sum, vector[0].V) require.Equal(t, h.Sum, vector[0].F)
} }
}) })
} }
@ -3539,7 +3540,7 @@ func TestNativeHistogram_HistogramQuantile(t *testing.T) {
require.Len(t, vector, 1) require.Len(t, vector, 1)
require.Nil(t, vector[0].H) require.Nil(t, vector[0].H)
require.True(t, almostEqual(sc.value, vector[0].V)) require.True(t, almostEqual(sc.value, vector[0].F))
}) })
} }
idx++ idx++
@ -3971,10 +3972,10 @@ func TestNativeHistogram_HistogramFraction(t *testing.T) {
require.Len(t, vector, 1) require.Len(t, vector, 1)
require.Nil(t, vector[0].H) require.Nil(t, vector[0].H)
if math.IsNaN(sc.value) { if math.IsNaN(sc.value) {
require.True(t, math.IsNaN(vector[0].V)) require.True(t, math.IsNaN(vector[0].F))
return return
} }
require.Equal(t, sc.value, vector[0].V) require.Equal(t, sc.value, vector[0].F)
}) })
} }
idx++ idx++
@ -4110,24 +4111,18 @@ func TestNativeHistogram_Sum_Count_AddOperator(t *testing.T) {
// sum(). // sum().
queryString := fmt.Sprintf("sum(%s)", seriesName) queryString := fmt.Sprintf("sum(%s)", seriesName)
queryAndCheck(queryString, []Sample{ queryAndCheck(queryString, []Sample{{T: ts, H: &c.expected, Metric: labels.EmptyLabels()}})
{Point{T: ts, H: &c.expected}, labels.EmptyLabels()},
})
// + operator. // + operator.
queryString = fmt.Sprintf(`%s{idx="0"}`, seriesName) queryString = fmt.Sprintf(`%s{idx="0"}`, seriesName)
for idx := 1; idx < len(c.histograms); idx++ { for idx := 1; idx < len(c.histograms); idx++ {
queryString += fmt.Sprintf(` + ignoring(idx) %s{idx="%d"}`, seriesName, idx) queryString += fmt.Sprintf(` + ignoring(idx) %s{idx="%d"}`, seriesName, idx)
} }
queryAndCheck(queryString, []Sample{ queryAndCheck(queryString, []Sample{{T: ts, H: &c.expected, Metric: labels.EmptyLabels()}})
{Point{T: ts, H: &c.expected}, labels.EmptyLabels()},
})
// count(). // count().
queryString = fmt.Sprintf("count(%s)", seriesName) queryString = fmt.Sprintf("count(%s)", seriesName)
queryAndCheck(queryString, []Sample{ queryAndCheck(queryString, []Sample{{T: ts, F: 3, Metric: labels.EmptyLabels()}})
{Point{T: ts, V: 3}, labels.EmptyLabels()},
})
}) })
idx0++ idx0++
} }

View file

@ -54,9 +54,9 @@ type FunctionCall func(vals []parser.Value, args parser.Expressions, enh *EvalNo
// === time() float64 === // === time() float64 ===
func funcTime(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) Vector { func funcTime(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) Vector {
return Vector{Sample{Point: Point{ return Vector{Sample{
V: float64(enh.Ts) / 1000, F: float64(enh.Ts) / 1000,
}}} }}
} }
// extrapolatedRate is a utility function for rate/increase/delta. // extrapolatedRate is a utility function for rate/increase/delta.
@ -67,65 +67,71 @@ func extrapolatedRate(vals []parser.Value, args parser.Expressions, enh *EvalNod
ms := args[0].(*parser.MatrixSelector) ms := args[0].(*parser.MatrixSelector)
vs := ms.VectorSelector.(*parser.VectorSelector) vs := ms.VectorSelector.(*parser.VectorSelector)
var ( var (
samples = vals[0].(Matrix)[0] samples = vals[0].(Matrix)[0]
rangeStart = enh.Ts - durationMilliseconds(ms.Range+vs.Offset) rangeStart = enh.Ts - durationMilliseconds(ms.Range+vs.Offset)
rangeEnd = enh.Ts - durationMilliseconds(vs.Offset) rangeEnd = enh.Ts - durationMilliseconds(vs.Offset)
resultValue float64 resultFloat float64
resultHistogram *histogram.FloatHistogram resultHistogram *histogram.FloatHistogram
firstT, lastT int64
numSamplesMinusOne int
) )
// No sense in trying to compute a rate without at least two points. Drop // We need either at least two Histograms and no Floats, or at least two
// this Vector element. // Floats and no Histograms to calculate a rate. Otherwise, drop this
if len(samples.Points) < 2 { // Vector element.
if len(samples.Histograms) > 0 && len(samples.Floats) > 0 {
// Mix of histograms and floats. TODO(beorn7): Communicate this failure reason.
return enh.Out return enh.Out
} }
if samples.Points[0].H != nil { switch {
resultHistogram = histogramRate(samples.Points, isCounter) case len(samples.Histograms) > 1:
numSamplesMinusOne = len(samples.Histograms) - 1
firstT = samples.Histograms[0].T
lastT = samples.Histograms[numSamplesMinusOne].T
resultHistogram = histogramRate(samples.Histograms, isCounter)
if resultHistogram == nil { if resultHistogram == nil {
// Points are a mix of floats and histograms, or the histograms // The histograms are not compatible with each other.
// are not compatible with each other. // TODO(beorn7): Communicate this failure reason.
// TODO(beorn7): find a way of communicating the exact reason
return enh.Out return enh.Out
} }
} else { case len(samples.Floats) > 1:
resultValue = samples.Points[len(samples.Points)-1].V - samples.Points[0].V numSamplesMinusOne = len(samples.Floats) - 1
prevValue := samples.Points[0].V firstT = samples.Floats[0].T
// We have to iterate through everything even in the non-counter lastT = samples.Floats[numSamplesMinusOne].T
// case because we have to check that everything is a float. resultFloat = samples.Floats[numSamplesMinusOne].F - samples.Floats[0].F
// TODO(beorn7): Find a way to check that earlier, e.g. by if !isCounter {
// handing in a []FloatPoint and a []HistogramPoint separately. break
for _, currPoint := range samples.Points[1:] {
if currPoint.H != nil {
return nil // Range contains a mix of histograms and floats.
}
if !isCounter {
continue
}
if currPoint.V < prevValue {
resultValue += prevValue
}
prevValue = currPoint.V
} }
// Handle counter resets:
prevValue := samples.Floats[0].F
for _, currPoint := range samples.Floats[1:] {
if currPoint.F < prevValue {
resultFloat += prevValue
}
prevValue = currPoint.F
}
default:
// Not enough samples. TODO(beorn7): Communicate this failure reason.
return enh.Out
} }
// Duration between first/last samples and boundary of range. // Duration between first/last samples and boundary of range.
durationToStart := float64(samples.Points[0].T-rangeStart) / 1000 durationToStart := float64(firstT-rangeStart) / 1000
durationToEnd := float64(rangeEnd-samples.Points[len(samples.Points)-1].T) / 1000 durationToEnd := float64(rangeEnd-lastT) / 1000
sampledInterval := float64(samples.Points[len(samples.Points)-1].T-samples.Points[0].T) / 1000 sampledInterval := float64(lastT-firstT) / 1000
averageDurationBetweenSamples := sampledInterval / float64(len(samples.Points)-1) averageDurationBetweenSamples := sampledInterval / float64(numSamplesMinusOne)
// TODO(beorn7): Do this for histograms, too. // TODO(beorn7): Do this for histograms, too.
if isCounter && resultValue > 0 && samples.Points[0].V >= 0 { if isCounter && resultFloat > 0 && len(samples.Floats) > 0 && samples.Floats[0].F >= 0 {
// Counters cannot be negative. If we have any slope at // Counters cannot be negative. If we have any slope at all
// all (i.e. resultValue went up), we can extrapolate // (i.e. resultFloat went up), we can extrapolate the zero point
// the zero point of the counter. If the duration to the // of the counter. If the duration to the zero point is shorter
// zero point is shorter than the durationToStart, we // than the durationToStart, we take the zero point as the start
// take the zero point as the start of the series, // of the series, thereby avoiding extrapolation to negative
// thereby avoiding extrapolation to negative counter // counter values.
// values. durationToZero := sampledInterval * (samples.Floats[0].F / resultFloat)
durationToZero := sampledInterval * (samples.Points[0].V / resultValue)
if durationToZero < durationToStart { if durationToZero < durationToStart {
durationToStart = durationToZero durationToStart = durationToZero
} }
@ -153,21 +159,19 @@ func extrapolatedRate(vals []parser.Value, args parser.Expressions, enh *EvalNod
factor /= ms.Range.Seconds() factor /= ms.Range.Seconds()
} }
if resultHistogram == nil { if resultHistogram == nil {
resultValue *= factor resultFloat *= factor
} else { } else {
resultHistogram.Scale(factor) resultHistogram.Scale(factor)
} }
return append(enh.Out, Sample{ return append(enh.Out, Sample{F: resultFloat, H: resultHistogram})
Point: Point{V: resultValue, H: resultHistogram},
})
} }
// histogramRate is a helper function for extrapolatedRate. It requires // histogramRate is a helper function for extrapolatedRate. It requires
// points[0] to be a histogram. It returns nil if any other Point in points is // points[0] to be a histogram. It returns nil if any other Point in points is
// not a histogram. // not a histogram.
func histogramRate(points []Point, isCounter bool) *histogram.FloatHistogram { func histogramRate(points []HPoint, isCounter bool) *histogram.FloatHistogram {
prev := points[0].H // We already know that this is a histogram. prev := points[0].H
last := points[len(points)-1].H last := points[len(points)-1].H
if last == nil { if last == nil {
return nil // Range contains a mix of histograms and floats. return nil // Range contains a mix of histograms and floats.
@ -243,19 +247,19 @@ func instantValue(vals []parser.Value, out Vector, isRate bool) Vector {
samples := vals[0].(Matrix)[0] samples := vals[0].(Matrix)[0]
// No sense in trying to compute a rate without at least two points. Drop // No sense in trying to compute a rate without at least two points. Drop
// this Vector element. // this Vector element.
if len(samples.Points) < 2 { if len(samples.Floats) < 2 {
return out return out
} }
lastSample := samples.Points[len(samples.Points)-1] lastSample := samples.Floats[len(samples.Floats)-1]
previousSample := samples.Points[len(samples.Points)-2] previousSample := samples.Floats[len(samples.Floats)-2]
var resultValue float64 var resultValue float64
if isRate && lastSample.V < previousSample.V { if isRate && lastSample.F < previousSample.F {
// Counter reset. // Counter reset.
resultValue = lastSample.V resultValue = lastSample.F
} else { } else {
resultValue = lastSample.V - previousSample.V resultValue = lastSample.F - previousSample.F
} }
sampledInterval := lastSample.T - previousSample.T sampledInterval := lastSample.T - previousSample.T
@ -269,9 +273,7 @@ func instantValue(vals []parser.Value, out Vector, isRate bool) Vector {
resultValue /= float64(sampledInterval) / 1000 resultValue /= float64(sampledInterval) / 1000
} }
return append(out, Sample{ return append(out, Sample{F: resultValue})
Point: Point{V: resultValue},
})
} }
// Calculate the trend value at the given index i in raw data d. // Calculate the trend value at the given index i in raw data d.
@ -300,10 +302,10 @@ func funcHoltWinters(vals []parser.Value, args parser.Expressions, enh *EvalNode
samples := vals[0].(Matrix)[0] samples := vals[0].(Matrix)[0]
// The smoothing factor argument. // The smoothing factor argument.
sf := vals[1].(Vector)[0].V sf := vals[1].(Vector)[0].F
// The trend factor argument. // The trend factor argument.
tf := vals[2].(Vector)[0].V tf := vals[2].(Vector)[0].F
// Check that the input parameters are valid. // Check that the input parameters are valid.
if sf <= 0 || sf >= 1 { if sf <= 0 || sf >= 1 {
@ -313,7 +315,7 @@ func funcHoltWinters(vals []parser.Value, args parser.Expressions, enh *EvalNode
panic(fmt.Errorf("invalid trend factor. Expected: 0 < tf < 1, got: %f", tf)) panic(fmt.Errorf("invalid trend factor. Expected: 0 < tf < 1, got: %f", tf))
} }
l := len(samples.Points) l := len(samples.Floats)
// Can't do the smoothing operation with less than two points. // Can't do the smoothing operation with less than two points.
if l < 2 { if l < 2 {
@ -322,15 +324,15 @@ func funcHoltWinters(vals []parser.Value, args parser.Expressions, enh *EvalNode
var s0, s1, b float64 var s0, s1, b float64
// Set initial values. // Set initial values.
s1 = samples.Points[0].V s1 = samples.Floats[0].F
b = samples.Points[1].V - samples.Points[0].V b = samples.Floats[1].F - samples.Floats[0].F
// Run the smoothing operation. // Run the smoothing operation.
var x, y float64 var x, y float64
for i := 1; i < l; i++ { for i := 1; i < l; i++ {
// Scale the raw value against the smoothing factor. // Scale the raw value against the smoothing factor.
x = sf * samples.Points[i].V x = sf * samples.Floats[i].F
// Scale the last smoothed value with the trend at this point. // Scale the last smoothed value with the trend at this point.
b = calcTrendValue(i-1, tf, s0, s1, b) b = calcTrendValue(i-1, tf, s0, s1, b)
@ -339,9 +341,7 @@ func funcHoltWinters(vals []parser.Value, args parser.Expressions, enh *EvalNode
s0, s1 = s1, x+y s0, s1 = s1, x+y
} }
return append(enh.Out, Sample{ return append(enh.Out, Sample{F: s1})
Point: Point{V: s1},
})
} }
// === sort(node parser.ValueTypeVector) Vector === // === sort(node parser.ValueTypeVector) Vector ===
@ -365,15 +365,15 @@ func funcSortDesc(vals []parser.Value, args parser.Expressions, enh *EvalNodeHel
// === clamp(Vector parser.ValueTypeVector, min, max Scalar) Vector === // === clamp(Vector parser.ValueTypeVector, min, max Scalar) Vector ===
func funcClamp(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) Vector { func funcClamp(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) Vector {
vec := vals[0].(Vector) vec := vals[0].(Vector)
min := vals[1].(Vector)[0].Point.V min := vals[1].(Vector)[0].F
max := vals[2].(Vector)[0].Point.V max := vals[2].(Vector)[0].F
if max < min { if max < min {
return enh.Out return enh.Out
} }
for _, el := range vec { for _, el := range vec {
enh.Out = append(enh.Out, Sample{ enh.Out = append(enh.Out, Sample{
Metric: enh.DropMetricName(el.Metric), Metric: enh.DropMetricName(el.Metric),
Point: Point{V: math.Max(min, math.Min(max, el.V))}, F: math.Max(min, math.Min(max, el.F)),
}) })
} }
return enh.Out return enh.Out
@ -382,11 +382,11 @@ func funcClamp(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper
// === clamp_max(Vector parser.ValueTypeVector, max Scalar) Vector === // === clamp_max(Vector parser.ValueTypeVector, max Scalar) Vector ===
func funcClampMax(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) Vector { func funcClampMax(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) Vector {
vec := vals[0].(Vector) vec := vals[0].(Vector)
max := vals[1].(Vector)[0].Point.V max := vals[1].(Vector)[0].F
for _, el := range vec { for _, el := range vec {
enh.Out = append(enh.Out, Sample{ enh.Out = append(enh.Out, Sample{
Metric: enh.DropMetricName(el.Metric), Metric: enh.DropMetricName(el.Metric),
Point: Point{V: math.Min(max, el.V)}, F: math.Min(max, el.F),
}) })
} }
return enh.Out return enh.Out
@ -395,11 +395,11 @@ func funcClampMax(vals []parser.Value, args parser.Expressions, enh *EvalNodeHel
// === clamp_min(Vector parser.ValueTypeVector, min Scalar) Vector === // === clamp_min(Vector parser.ValueTypeVector, min Scalar) Vector ===
func funcClampMin(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) Vector { func funcClampMin(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) Vector {
vec := vals[0].(Vector) vec := vals[0].(Vector)
min := vals[1].(Vector)[0].Point.V min := vals[1].(Vector)[0].F
for _, el := range vec { for _, el := range vec {
enh.Out = append(enh.Out, Sample{ enh.Out = append(enh.Out, Sample{
Metric: enh.DropMetricName(el.Metric), Metric: enh.DropMetricName(el.Metric),
Point: Point{V: math.Max(min, el.V)}, F: math.Max(min, el.F),
}) })
} }
return enh.Out return enh.Out
@ -412,16 +412,16 @@ func funcRound(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper
// Ties are solved by rounding up. // Ties are solved by rounding up.
toNearest := float64(1) toNearest := float64(1)
if len(args) >= 2 { if len(args) >= 2 {
toNearest = vals[1].(Vector)[0].Point.V toNearest = vals[1].(Vector)[0].F
} }
// Invert as it seems to cause fewer floating point accuracy issues. // Invert as it seems to cause fewer floating point accuracy issues.
toNearestInverse := 1.0 / toNearest toNearestInverse := 1.0 / toNearest
for _, el := range vec { for _, el := range vec {
v := math.Floor(el.V*toNearestInverse+0.5) / toNearestInverse f := math.Floor(el.F*toNearestInverse+0.5) / toNearestInverse
enh.Out = append(enh.Out, Sample{ enh.Out = append(enh.Out, Sample{
Metric: enh.DropMetricName(el.Metric), Metric: enh.DropMetricName(el.Metric),
Point: Point{V: v}, F: f,
}) })
} }
return enh.Out return enh.Out
@ -431,37 +431,38 @@ func funcRound(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper
func funcScalar(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) Vector { func funcScalar(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) Vector {
v := vals[0].(Vector) v := vals[0].(Vector)
if len(v) != 1 { if len(v) != 1 {
return append(enh.Out, Sample{ return append(enh.Out, Sample{F: math.NaN()})
Point: Point{V: math.NaN()},
})
} }
return append(enh.Out, Sample{ return append(enh.Out, Sample{F: v[0].F})
Point: Point{V: v[0].V},
})
} }
func aggrOverTime(vals []parser.Value, enh *EvalNodeHelper, aggrFn func([]Point) float64) Vector { func aggrOverTime(vals []parser.Value, enh *EvalNodeHelper, aggrFn func(Series) float64) Vector {
el := vals[0].(Matrix)[0] el := vals[0].(Matrix)[0]
return append(enh.Out, Sample{ return append(enh.Out, Sample{F: aggrFn(el)})
Point: Point{V: aggrFn(el.Points)},
})
} }
// === avg_over_time(Matrix parser.ValueTypeMatrix) Vector === // === avg_over_time(Matrix parser.ValueTypeMatrix) Vector ===
func funcAvgOverTime(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) Vector { func funcAvgOverTime(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) Vector {
return aggrOverTime(vals, enh, func(values []Point) float64 { if len(vals[0].(Matrix)[0].Floats) == 0 {
// TODO(beorn7): The passed values only contain
// histograms. avg_over_time ignores histograms for now. If
// there are only histograms, we have to return without adding
// anything to enh.Out.
return enh.Out
}
return aggrOverTime(vals, enh, func(s Series) float64 {
var mean, count, c float64 var mean, count, c float64
for _, v := range values { for _, f := range s.Floats {
count++ count++
if math.IsInf(mean, 0) { if math.IsInf(mean, 0) {
if math.IsInf(v.V, 0) && (mean > 0) == (v.V > 0) { if math.IsInf(f.F, 0) && (mean > 0) == (f.F > 0) {
// The `mean` and `v.V` values are `Inf` of the same sign. They // The `mean` and `f.F` values are `Inf` of the same sign. They
// can't be subtracted, but the value of `mean` is correct // can't be subtracted, but the value of `mean` is correct
// already. // already.
continue continue
} }
if !math.IsInf(v.V, 0) && !math.IsNaN(v.V) { if !math.IsInf(f.F, 0) && !math.IsNaN(f.F) {
// At this stage, the mean is an infinite. If the added // At this stage, the mean is an infinite. If the added
// value is neither an Inf or a Nan, we can keep that mean // value is neither an Inf or a Nan, we can keep that mean
// value. // value.
@ -471,7 +472,7 @@ func funcAvgOverTime(vals []parser.Value, args parser.Expressions, enh *EvalNode
continue continue
} }
} }
mean, c = kahanSumInc(v.V/count-mean/count, mean, c) mean, c = kahanSumInc(f.F/count-mean/count, mean, c)
} }
if math.IsInf(mean, 0) { if math.IsInf(mean, 0) {
@ -483,8 +484,8 @@ func funcAvgOverTime(vals []parser.Value, args parser.Expressions, enh *EvalNode
// === count_over_time(Matrix parser.ValueTypeMatrix) Vector === // === count_over_time(Matrix parser.ValueTypeMatrix) Vector ===
func funcCountOverTime(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) Vector { func funcCountOverTime(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) Vector {
return aggrOverTime(vals, enh, func(values []Point) float64 { return aggrOverTime(vals, enh, func(s Series) float64 {
return float64(len(values)) return float64(len(s.Floats) + len(s.Histograms))
}) })
} }
@ -492,19 +493,42 @@ func funcCountOverTime(vals []parser.Value, args parser.Expressions, enh *EvalNo
func funcLastOverTime(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) Vector { func funcLastOverTime(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) Vector {
el := vals[0].(Matrix)[0] el := vals[0].(Matrix)[0]
var f FPoint
if len(el.Floats) > 0 {
f = el.Floats[len(el.Floats)-1]
}
var h HPoint
if len(el.Histograms) > 0 {
h = el.Histograms[len(el.Histograms)-1]
}
if h.H == nil || h.T < f.T {
return append(enh.Out, Sample{
Metric: el.Metric,
F: f.F,
})
}
return append(enh.Out, Sample{ return append(enh.Out, Sample{
Metric: el.Metric, Metric: el.Metric,
Point: Point{V: el.Points[len(el.Points)-1].V}, H: h.H,
}) })
} }
// === max_over_time(Matrix parser.ValueTypeMatrix) Vector === // === max_over_time(Matrix parser.ValueTypeMatrix) Vector ===
func funcMaxOverTime(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) Vector { func funcMaxOverTime(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) Vector {
return aggrOverTime(vals, enh, func(values []Point) float64 { if len(vals[0].(Matrix)[0].Floats) == 0 {
max := values[0].V // TODO(beorn7): The passed values only contain
for _, v := range values { // histograms. max_over_time ignores histograms for now. If
if v.V > max || math.IsNaN(max) { // there are only histograms, we have to return without adding
max = v.V // anything to enh.Out.
return enh.Out
}
return aggrOverTime(vals, enh, func(s Series) float64 {
max := s.Floats[0].F
for _, f := range s.Floats {
if f.F > max || math.IsNaN(max) {
max = f.F
} }
} }
return max return max
@ -513,11 +537,18 @@ func funcMaxOverTime(vals []parser.Value, args parser.Expressions, enh *EvalNode
// === min_over_time(Matrix parser.ValueTypeMatrix) Vector === // === min_over_time(Matrix parser.ValueTypeMatrix) Vector ===
func funcMinOverTime(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) Vector { func funcMinOverTime(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) Vector {
return aggrOverTime(vals, enh, func(values []Point) float64 { if len(vals[0].(Matrix)[0].Floats) == 0 {
min := values[0].V // TODO(beorn7): The passed values only contain
for _, v := range values { // histograms. min_over_time ignores histograms for now. If
if v.V < min || math.IsNaN(min) { // there are only histograms, we have to return without adding
min = v.V // anything to enh.Out.
return enh.Out
}
return aggrOverTime(vals, enh, func(s Series) float64 {
min := s.Floats[0].F
for _, f := range s.Floats {
if f.F < min || math.IsNaN(min) {
min = f.F
} }
} }
return min return min
@ -526,10 +557,17 @@ func funcMinOverTime(vals []parser.Value, args parser.Expressions, enh *EvalNode
// === sum_over_time(Matrix parser.ValueTypeMatrix) Vector === // === sum_over_time(Matrix parser.ValueTypeMatrix) Vector ===
func funcSumOverTime(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) Vector { func funcSumOverTime(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) Vector {
return aggrOverTime(vals, enh, func(values []Point) float64 { if len(vals[0].(Matrix)[0].Floats) == 0 {
// TODO(beorn7): The passed values only contain
// histograms. sum_over_time ignores histograms for now. If
// there are only histograms, we have to return without adding
// anything to enh.Out.
return enh.Out
}
return aggrOverTime(vals, enh, func(s Series) float64 {
var sum, c float64 var sum, c float64
for _, v := range values { for _, f := range s.Floats {
sum, c = kahanSumInc(v.V, sum, c) sum, c = kahanSumInc(f.F, sum, c)
} }
if math.IsInf(sum, 0) { if math.IsInf(sum, 0) {
return sum return sum
@ -540,29 +578,41 @@ func funcSumOverTime(vals []parser.Value, args parser.Expressions, enh *EvalNode
// === quantile_over_time(Matrix parser.ValueTypeMatrix) Vector === // === quantile_over_time(Matrix parser.ValueTypeMatrix) Vector ===
func funcQuantileOverTime(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) Vector { func funcQuantileOverTime(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) Vector {
q := vals[0].(Vector)[0].V q := vals[0].(Vector)[0].F
el := vals[1].(Matrix)[0] el := vals[1].(Matrix)[0]
if len(el.Floats) == 0 {
values := make(vectorByValueHeap, 0, len(el.Points)) // TODO(beorn7): The passed values only contain
for _, v := range el.Points { // histograms. quantile_over_time ignores histograms for now. If
values = append(values, Sample{Point: Point{V: v.V}}) // there are only histograms, we have to return without adding
// anything to enh.Out.
return enh.Out
} }
return append(enh.Out, Sample{
Point: Point{V: quantile(q, values)}, values := make(vectorByValueHeap, 0, len(el.Floats))
}) for _, f := range el.Floats {
values = append(values, Sample{F: f.F})
}
return append(enh.Out, Sample{F: quantile(q, values)})
} }
// === stddev_over_time(Matrix parser.ValueTypeMatrix) Vector === // === stddev_over_time(Matrix parser.ValueTypeMatrix) Vector ===
func funcStddevOverTime(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) Vector { func funcStddevOverTime(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) Vector {
return aggrOverTime(vals, enh, func(values []Point) float64 { if len(vals[0].(Matrix)[0].Floats) == 0 {
// TODO(beorn7): The passed values only contain
// histograms. stddev_over_time ignores histograms for now. If
// there are only histograms, we have to return without adding
// anything to enh.Out.
return enh.Out
}
return aggrOverTime(vals, enh, func(s Series) float64 {
var count float64 var count float64
var mean, cMean float64 var mean, cMean float64
var aux, cAux float64 var aux, cAux float64
for _, v := range values { for _, f := range s.Floats {
count++ count++
delta := v.V - (mean + cMean) delta := f.F - (mean + cMean)
mean, cMean = kahanSumInc(delta/count, mean, cMean) mean, cMean = kahanSumInc(delta/count, mean, cMean)
aux, cAux = kahanSumInc(delta*(v.V-(mean+cMean)), aux, cAux) aux, cAux = kahanSumInc(delta*(f.F-(mean+cMean)), aux, cAux)
} }
return math.Sqrt((aux + cAux) / count) return math.Sqrt((aux + cAux) / count)
}) })
@ -570,15 +620,22 @@ func funcStddevOverTime(vals []parser.Value, args parser.Expressions, enh *EvalN
// === stdvar_over_time(Matrix parser.ValueTypeMatrix) Vector === // === stdvar_over_time(Matrix parser.ValueTypeMatrix) Vector ===
func funcStdvarOverTime(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) Vector { func funcStdvarOverTime(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) Vector {
return aggrOverTime(vals, enh, func(values []Point) float64 { if len(vals[0].(Matrix)[0].Floats) == 0 {
// TODO(beorn7): The passed values only contain
// histograms. stdvar_over_time ignores histograms for now. If
// there are only histograms, we have to return without adding
// anything to enh.Out.
return enh.Out
}
return aggrOverTime(vals, enh, func(s Series) float64 {
var count float64 var count float64
var mean, cMean float64 var mean, cMean float64
var aux, cAux float64 var aux, cAux float64
for _, v := range values { for _, f := range s.Floats {
count++ count++
delta := v.V - (mean + cMean) delta := f.F - (mean + cMean)
mean, cMean = kahanSumInc(delta/count, mean, cMean) mean, cMean = kahanSumInc(delta/count, mean, cMean)
aux, cAux = kahanSumInc(delta*(v.V-(mean+cMean)), aux, cAux) aux, cAux = kahanSumInc(delta*(f.F-(mean+cMean)), aux, cAux)
} }
return (aux + cAux) / count return (aux + cAux) / count
}) })
@ -592,7 +649,7 @@ func funcAbsent(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelpe
return append(enh.Out, return append(enh.Out,
Sample{ Sample{
Metric: createLabelsForAbsentFunction(args[0]), Metric: createLabelsForAbsentFunction(args[0]),
Point: Point{V: 1}, F: 1,
}) })
} }
@ -602,25 +659,24 @@ func funcAbsent(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelpe
// Due to engine optimization, this function is only called when this condition is true. // Due to engine optimization, this function is only called when this condition is true.
// Then, the engine post-processes the results to get the expected output. // Then, the engine post-processes the results to get the expected output.
func funcAbsentOverTime(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) Vector { func funcAbsentOverTime(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) Vector {
return append(enh.Out, return append(enh.Out, Sample{F: 1})
Sample{
Point: Point{V: 1},
})
} }
// === present_over_time(Vector parser.ValueTypeMatrix) Vector === // === present_over_time(Vector parser.ValueTypeMatrix) Vector ===
func funcPresentOverTime(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) Vector { func funcPresentOverTime(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) Vector {
return aggrOverTime(vals, enh, func(values []Point) float64 { return aggrOverTime(vals, enh, func(s Series) float64 {
return 1 return 1
}) })
} }
func simpleFunc(vals []parser.Value, enh *EvalNodeHelper, f func(float64) float64) Vector { func simpleFunc(vals []parser.Value, enh *EvalNodeHelper, f func(float64) float64) Vector {
for _, el := range vals[0].(Vector) { for _, el := range vals[0].(Vector) {
enh.Out = append(enh.Out, Sample{ if el.H == nil { // Process only float samples.
Metric: enh.DropMetricName(el.Metric), enh.Out = append(enh.Out, Sample{
Point: Point{V: f(el.V)}, Metric: enh.DropMetricName(el.Metric),
}) F: f(el.F),
})
}
} }
return enh.Out return enh.Out
} }
@ -741,9 +797,7 @@ func funcDeg(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper)
// === pi() Scalar === // === pi() Scalar ===
func funcPi(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) Vector { func funcPi(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) Vector {
return Vector{Sample{Point: Point{ return Vector{Sample{F: math.Pi}}
V: math.Pi,
}}}
} }
// === sgn(Vector parser.ValueTypeVector) Vector === // === sgn(Vector parser.ValueTypeVector) Vector ===
@ -764,7 +818,7 @@ func funcTimestamp(vals []parser.Value, args parser.Expressions, enh *EvalNodeHe
for _, el := range vec { for _, el := range vec {
enh.Out = append(enh.Out, Sample{ enh.Out = append(enh.Out, Sample{
Metric: enh.DropMetricName(el.Metric), Metric: enh.DropMetricName(el.Metric),
Point: Point{V: float64(el.T) / 1000}, F: float64(el.T) / 1000,
}) })
} }
return enh.Out return enh.Out
@ -793,7 +847,7 @@ func kahanSumInc(inc, sum, c float64) (newSum, newC float64) {
// linearRegression performs a least-square linear regression analysis on the // linearRegression performs a least-square linear regression analysis on the
// provided SamplePairs. It returns the slope, and the intercept value at the // provided SamplePairs. It returns the slope, and the intercept value at the
// provided time. // provided time.
func linearRegression(samples []Point, interceptTime int64) (slope, intercept float64) { func linearRegression(samples []FPoint, interceptTime int64) (slope, intercept float64) {
var ( var (
n float64 n float64
sumX, cX float64 sumX, cX float64
@ -803,18 +857,18 @@ func linearRegression(samples []Point, interceptTime int64) (slope, intercept fl
initY float64 initY float64
constY bool constY bool
) )
initY = samples[0].V initY = samples[0].F
constY = true constY = true
for i, sample := range samples { for i, sample := range samples {
// Set constY to false if any new y values are encountered. // Set constY to false if any new y values are encountered.
if constY && i > 0 && sample.V != initY { if constY && i > 0 && sample.F != initY {
constY = false constY = false
} }
n += 1.0 n += 1.0
x := float64(sample.T-interceptTime) / 1e3 x := float64(sample.T-interceptTime) / 1e3
sumX, cX = kahanSumInc(x, sumX, cX) sumX, cX = kahanSumInc(x, sumX, cX)
sumY, cY = kahanSumInc(sample.V, sumY, cY) sumY, cY = kahanSumInc(sample.F, sumY, cY)
sumXY, cXY = kahanSumInc(x*sample.V, sumXY, cXY) sumXY, cXY = kahanSumInc(x*sample.F, sumXY, cXY)
sumX2, cX2 = kahanSumInc(x*x, sumX2, cX2) sumX2, cX2 = kahanSumInc(x*x, sumX2, cX2)
} }
if constY { if constY {
@ -842,33 +896,29 @@ func funcDeriv(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper
// No sense in trying to compute a derivative without at least two points. // No sense in trying to compute a derivative without at least two points.
// Drop this Vector element. // Drop this Vector element.
if len(samples.Points) < 2 { if len(samples.Floats) < 2 {
return enh.Out return enh.Out
} }
// We pass in an arbitrary timestamp that is near the values in use // We pass in an arbitrary timestamp that is near the values in use
// to avoid floating point accuracy issues, see // to avoid floating point accuracy issues, see
// https://github.com/prometheus/prometheus/issues/2674 // https://github.com/prometheus/prometheus/issues/2674
slope, _ := linearRegression(samples.Points, samples.Points[0].T) slope, _ := linearRegression(samples.Floats, samples.Floats[0].T)
return append(enh.Out, Sample{ return append(enh.Out, Sample{F: slope})
Point: Point{V: slope},
})
} }
// === predict_linear(node parser.ValueTypeMatrix, k parser.ValueTypeScalar) Vector === // === predict_linear(node parser.ValueTypeMatrix, k parser.ValueTypeScalar) Vector ===
func funcPredictLinear(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) Vector { func funcPredictLinear(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) Vector {
samples := vals[0].(Matrix)[0] samples := vals[0].(Matrix)[0]
duration := vals[1].(Vector)[0].V duration := vals[1].(Vector)[0].F
// No sense in trying to predict anything without at least two points. // No sense in trying to predict anything without at least two points.
// Drop this Vector element. // Drop this Vector element.
if len(samples.Points) < 2 { if len(samples.Floats) < 2 {
return enh.Out return enh.Out
} }
slope, intercept := linearRegression(samples.Points, enh.Ts) slope, intercept := linearRegression(samples.Floats, enh.Ts)
return append(enh.Out, Sample{ return append(enh.Out, Sample{F: slope*duration + intercept})
Point: Point{V: slope*duration + intercept},
})
} }
// === histogram_count(Vector parser.ValueTypeVector) Vector === // === histogram_count(Vector parser.ValueTypeVector) Vector ===
@ -882,7 +932,7 @@ func funcHistogramCount(vals []parser.Value, args parser.Expressions, enh *EvalN
} }
enh.Out = append(enh.Out, Sample{ enh.Out = append(enh.Out, Sample{
Metric: enh.DropMetricName(sample.Metric), Metric: enh.DropMetricName(sample.Metric),
Point: Point{V: sample.H.Count}, F: sample.H.Count,
}) })
} }
return enh.Out return enh.Out
@ -899,7 +949,7 @@ func funcHistogramSum(vals []parser.Value, args parser.Expressions, enh *EvalNod
} }
enh.Out = append(enh.Out, Sample{ enh.Out = append(enh.Out, Sample{
Metric: enh.DropMetricName(sample.Metric), Metric: enh.DropMetricName(sample.Metric),
Point: Point{V: sample.H.Sum}, F: sample.H.Sum,
}) })
} }
return enh.Out return enh.Out
@ -907,8 +957,8 @@ func funcHistogramSum(vals []parser.Value, args parser.Expressions, enh *EvalNod
// === histogram_fraction(lower, upper parser.ValueTypeScalar, Vector parser.ValueTypeVector) Vector === // === histogram_fraction(lower, upper parser.ValueTypeScalar, Vector parser.ValueTypeVector) Vector ===
func funcHistogramFraction(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) Vector { func funcHistogramFraction(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) Vector {
lower := vals[0].(Vector)[0].V lower := vals[0].(Vector)[0].F
upper := vals[1].(Vector)[0].V upper := vals[1].(Vector)[0].F
inVec := vals[2].(Vector) inVec := vals[2].(Vector)
for _, sample := range inVec { for _, sample := range inVec {
@ -918,7 +968,7 @@ func funcHistogramFraction(vals []parser.Value, args parser.Expressions, enh *Ev
} }
enh.Out = append(enh.Out, Sample{ enh.Out = append(enh.Out, Sample{
Metric: enh.DropMetricName(sample.Metric), Metric: enh.DropMetricName(sample.Metric),
Point: Point{V: histogramFraction(lower, upper, sample.H)}, F: histogramFraction(lower, upper, sample.H),
}) })
} }
return enh.Out return enh.Out
@ -926,7 +976,7 @@ func funcHistogramFraction(vals []parser.Value, args parser.Expressions, enh *Ev
// === histogram_quantile(k parser.ValueTypeScalar, Vector parser.ValueTypeVector) Vector === // === histogram_quantile(k parser.ValueTypeScalar, Vector parser.ValueTypeVector) Vector ===
func funcHistogramQuantile(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) Vector { func funcHistogramQuantile(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) Vector {
q := vals[0].(Vector)[0].V q := vals[0].(Vector)[0].F
inVec := vals[1].(Vector) inVec := vals[1].(Vector)
if enh.signatureToMetricWithBuckets == nil { if enh.signatureToMetricWithBuckets == nil {
@ -960,12 +1010,12 @@ func funcHistogramQuantile(vals []parser.Value, args parser.Expressions, enh *Ev
if !ok { if !ok {
sample.Metric = labels.NewBuilder(sample.Metric). sample.Metric = labels.NewBuilder(sample.Metric).
Del(excludedLabels...). Del(excludedLabels...).
Labels(labels.EmptyLabels()) Labels()
mb = &metricWithBuckets{sample.Metric, nil} mb = &metricWithBuckets{sample.Metric, nil}
enh.signatureToMetricWithBuckets[string(enh.lblBuf)] = mb enh.signatureToMetricWithBuckets[string(enh.lblBuf)] = mb
} }
mb.buckets = append(mb.buckets, bucket{upperBound, sample.V}) mb.buckets = append(mb.buckets, bucket{upperBound, sample.F})
} }
@ -985,7 +1035,7 @@ func funcHistogramQuantile(vals []parser.Value, args parser.Expressions, enh *Ev
enh.Out = append(enh.Out, Sample{ enh.Out = append(enh.Out, Sample{
Metric: enh.DropMetricName(sample.Metric), Metric: enh.DropMetricName(sample.Metric),
Point: Point{V: histogramQuantile(q, sample.H)}, F: histogramQuantile(q, sample.H),
}) })
} }
@ -993,7 +1043,7 @@ func funcHistogramQuantile(vals []parser.Value, args parser.Expressions, enh *Ev
if len(mb.buckets) > 0 { if len(mb.buckets) > 0 {
enh.Out = append(enh.Out, Sample{ enh.Out = append(enh.Out, Sample{
Metric: mb.metric, Metric: mb.metric,
Point: Point{V: bucketQuantile(q, mb.buckets)}, F: bucketQuantile(q, mb.buckets),
}) })
} }
} }
@ -1003,40 +1053,55 @@ func funcHistogramQuantile(vals []parser.Value, args parser.Expressions, enh *Ev
// === resets(Matrix parser.ValueTypeMatrix) Vector === // === resets(Matrix parser.ValueTypeMatrix) Vector ===
func funcResets(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) Vector { func funcResets(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) Vector {
samples := vals[0].(Matrix)[0] floats := vals[0].(Matrix)[0].Floats
histograms := vals[0].(Matrix)[0].Histograms
resets := 0 resets := 0
prev := samples.Points[0].V
for _, sample := range samples.Points[1:] { if len(floats) > 1 {
current := sample.V prev := floats[0].F
if current < prev { for _, sample := range floats[1:] {
resets++ current := sample.F
if current < prev {
resets++
}
prev = current
} }
prev = current
} }
return append(enh.Out, Sample{ if len(histograms) > 1 {
Point: Point{V: float64(resets)}, prev := histograms[0].H
}) for _, sample := range histograms[1:] {
current := sample.H
if current.DetectReset(prev) {
resets++
}
prev = current
}
}
return append(enh.Out, Sample{F: float64(resets)})
} }
// === changes(Matrix parser.ValueTypeMatrix) Vector === // === changes(Matrix parser.ValueTypeMatrix) Vector ===
func funcChanges(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) Vector { func funcChanges(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) Vector {
samples := vals[0].(Matrix)[0] floats := vals[0].(Matrix)[0].Floats
changes := 0 changes := 0
prev := samples.Points[0].V
for _, sample := range samples.Points[1:] { if len(floats) == 0 {
current := sample.V // TODO(beorn7): Only histogram values, still need to add support.
return enh.Out
}
prev := floats[0].F
for _, sample := range floats[1:] {
current := sample.F
if current != prev && !(math.IsNaN(current) && math.IsNaN(prev)) { if current != prev && !(math.IsNaN(current) && math.IsNaN(prev)) {
changes++ changes++
} }
prev = current prev = current
} }
return append(enh.Out, Sample{ return append(enh.Out, Sample{F: float64(changes)})
Point: Point{V: float64(changes)},
})
} }
// === label_replace(Vector parser.ValueTypeVector, dst_label, replacement, src_labelname, regex parser.ValueTypeString) Vector === // === label_replace(Vector parser.ValueTypeVector, dst_label, replacement, src_labelname, regex parser.ValueTypeString) Vector ===
@ -1080,14 +1145,15 @@ func funcLabelReplace(vals []parser.Value, args parser.Expressions, enh *EvalNod
if len(res) > 0 { if len(res) > 0 {
lb.Set(dst, string(res)) lb.Set(dst, string(res))
} }
outMetric = lb.Labels(labels.EmptyLabels()) outMetric = lb.Labels()
enh.Dmn[h] = outMetric enh.Dmn[h] = outMetric
} }
} }
enh.Out = append(enh.Out, Sample{ enh.Out = append(enh.Out, Sample{
Metric: outMetric, Metric: outMetric,
Point: Point{V: el.Point.V}, F: el.F,
H: el.H,
}) })
} }
return enh.Out return enh.Out
@ -1098,7 +1164,7 @@ func funcVector(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelpe
return append(enh.Out, return append(enh.Out,
Sample{ Sample{
Metric: labels.Labels{}, Metric: labels.Labels{},
Point: Point{V: vals[0].(Vector)[0].V}, F: vals[0].(Vector)[0].F,
}) })
} }
@ -1148,13 +1214,14 @@ func funcLabelJoin(vals []parser.Value, args parser.Expressions, enh *EvalNodeHe
lb.Set(dst, strval) lb.Set(dst, strval)
} }
outMetric = lb.Labels(labels.EmptyLabels()) outMetric = lb.Labels()
enh.Dmn[h] = outMetric enh.Dmn[h] = outMetric
} }
enh.Out = append(enh.Out, Sample{ enh.Out = append(enh.Out, Sample{
Metric: outMetric, Metric: outMetric,
Point: Point{V: el.Point.V}, F: el.F,
H: el.H,
}) })
} }
return enh.Out return enh.Out
@ -1166,15 +1233,15 @@ func dateWrapper(vals []parser.Value, enh *EvalNodeHelper, f func(time.Time) flo
return append(enh.Out, return append(enh.Out,
Sample{ Sample{
Metric: labels.Labels{}, Metric: labels.Labels{},
Point: Point{V: f(time.Unix(enh.Ts/1000, 0).UTC())}, F: f(time.Unix(enh.Ts/1000, 0).UTC()),
}) })
} }
for _, el := range vals[0].(Vector) { for _, el := range vals[0].(Vector) {
t := time.Unix(int64(el.V), 0).UTC() t := time.Unix(int64(el.F), 0).UTC()
enh.Out = append(enh.Out, Sample{ enh.Out = append(enh.Out, Sample{
Metric: enh.DropMetricName(el.Metric), Metric: enh.DropMetricName(el.Metric),
Point: Point{V: f(t)}, F: f(t),
}) })
} }
return enh.Out return enh.Out
@ -1332,10 +1399,20 @@ func (s vectorByValueHeap) Len() int {
} }
func (s vectorByValueHeap) Less(i, j int) bool { func (s vectorByValueHeap) Less(i, j int) bool {
if math.IsNaN(s[i].V) { // We compare histograms based on their sum of observations.
// TODO(beorn7): Is that what we want?
vi, vj := s[i].F, s[j].F
if s[i].H != nil {
vi = s[i].H.Sum
}
if s[j].H != nil {
vj = s[j].H.Sum
}
if math.IsNaN(vi) {
return true return true
} }
return s[i].V < s[j].V return vi < vj
} }
func (s vectorByValueHeap) Swap(i, j int) { func (s vectorByValueHeap) Swap(i, j int) {
@ -1361,10 +1438,20 @@ func (s vectorByReverseValueHeap) Len() int {
} }
func (s vectorByReverseValueHeap) Less(i, j int) bool { func (s vectorByReverseValueHeap) Less(i, j int) bool {
if math.IsNaN(s[i].V) { // We compare histograms based on their sum of observations.
// TODO(beorn7): Is that what we want?
vi, vj := s[i].F, s[j].F
if s[i].H != nil {
vi = s[i].H.Sum
}
if s[j].H != nil {
vj = s[j].H.Sum
}
if math.IsNaN(vi) {
return true return true
} }
return s[i].V > s[j].V return vi > vj
} }
func (s vectorByReverseValueHeap) Swap(i, j int) { func (s vectorByReverseValueHeap) Swap(i, j int) {
@ -1414,7 +1501,7 @@ func createLabelsForAbsentFunction(expr parser.Expr) labels.Labels {
} }
} }
return b.Labels(labels.EmptyLabels()) return b.Labels()
} }
func stringFromArg(e parser.Expr) string { func stringFromArg(e parser.Expr) string {

View file

@ -64,7 +64,7 @@ func TestDeriv(t *testing.T) {
vec, _ := result.Vector() vec, _ := result.Vector()
require.Equal(t, 1, len(vec), "Expected 1 result, got %d", len(vec)) require.Equal(t, 1, len(vec), "Expected 1 result, got %d", len(vec))
require.Equal(t, 0.0, vec[0].V, "Expected 0.0 as value, got %f", vec[0].V) require.Equal(t, 0.0, vec[0].F, "Expected 0.0 as value, got %f", vec[0].F)
} }
func TestFunctionList(t *testing.T) { func TestFunctionList(t *testing.T) {

View file

@ -567,7 +567,7 @@ label_matcher : IDENTIFIER match_op STRING
*/ */
metric : metric_identifier label_set metric : metric_identifier label_set
{ b := labels.NewBuilder($2); b.Set(labels.MetricName, $1.Val); $$ = b.Labels(labels.EmptyLabels()) } { b := labels.NewBuilder($2); b.Set(labels.MetricName, $1.Val); $$ = b.Labels() }
| label_set | label_set
{$$ = $1} {$$ = $1}
; ;

View file

@ -1494,7 +1494,7 @@ yydefault:
{ {
b := labels.NewBuilder(yyDollar[2].labels) b := labels.NewBuilder(yyDollar[2].labels)
b.Set(labels.MetricName, yyDollar[1].item.Val) b.Set(labels.MetricName, yyDollar[1].item.Val)
yyVAL.labels = b.Labels(labels.EmptyLabels()) yyVAL.labels = b.Labels()
} }
case 96: case 96:
yyDollar = yyS[yypt-1 : yypt+1] yyDollar = yyS[yypt-1 : yypt+1]

View file

@ -382,5 +382,5 @@ func quantile(q float64, values vectorByValueHeap) float64 {
upperIndex := math.Min(n-1, lowerIndex+1) upperIndex := math.Min(n-1, lowerIndex+1)
weight := rank - math.Floor(rank) weight := rank - math.Floor(rank)
return values[int(lowerIndex)].V*(1-weight) + values[int(upperIndex)].V*weight return values[int(lowerIndex)].F*(1-weight) + values[int(upperIndex)].F*weight
} }

View file

@ -281,7 +281,7 @@ func (*evalCmd) testCmd() {}
type loadCmd struct { type loadCmd struct {
gap time.Duration gap time.Duration
metrics map[uint64]labels.Labels metrics map[uint64]labels.Labels
defs map[uint64][]Point defs map[uint64][]FPoint
exemplars map[uint64][]exemplar.Exemplar exemplars map[uint64][]exemplar.Exemplar
} }
@ -289,7 +289,7 @@ func newLoadCmd(gap time.Duration) *loadCmd {
return &loadCmd{ return &loadCmd{
gap: gap, gap: gap,
metrics: map[uint64]labels.Labels{}, metrics: map[uint64]labels.Labels{},
defs: map[uint64][]Point{}, defs: map[uint64][]FPoint{},
exemplars: map[uint64][]exemplar.Exemplar{}, exemplars: map[uint64][]exemplar.Exemplar{},
} }
} }
@ -302,13 +302,13 @@ func (cmd loadCmd) String() string {
func (cmd *loadCmd) set(m labels.Labels, vals ...parser.SequenceValue) { func (cmd *loadCmd) set(m labels.Labels, vals ...parser.SequenceValue) {
h := m.Hash() h := m.Hash()
samples := make([]Point, 0, len(vals)) samples := make([]FPoint, 0, len(vals))
ts := testStartTime ts := testStartTime
for _, v := range vals { for _, v := range vals {
if !v.Omitted { if !v.Omitted {
samples = append(samples, Point{ samples = append(samples, FPoint{
T: ts.UnixNano() / int64(time.Millisecond/time.Nanosecond), T: ts.UnixNano() / int64(time.Millisecond/time.Nanosecond),
V: v.Value, F: v.Value,
}) })
} }
ts = ts.Add(cmd.gap) ts = ts.Add(cmd.gap)
@ -323,7 +323,7 @@ func (cmd *loadCmd) append(a storage.Appender) error {
m := cmd.metrics[h] m := cmd.metrics[h]
for _, s := range smpls { for _, s := range smpls {
if _, err := a.Append(0, m, s.T, s.V); err != nil { if _, err := a.Append(0, m, s.T, s.F); err != nil {
return err return err
} }
} }
@ -399,8 +399,8 @@ func (ev *evalCmd) compareResult(result parser.Value) error {
if ev.ordered && exp.pos != pos+1 { if ev.ordered && exp.pos != pos+1 {
return fmt.Errorf("expected metric %s with %v at position %d but was at %d", v.Metric, exp.vals, exp.pos, pos+1) return fmt.Errorf("expected metric %s with %v at position %d but was at %d", v.Metric, exp.vals, exp.pos, pos+1)
} }
if !almostEqual(exp.vals[0].Value, v.V) { if !almostEqual(exp.vals[0].Value, v.F) {
return fmt.Errorf("expected %v for %s but got %v", exp.vals[0].Value, v.Metric, v.V) return fmt.Errorf("expected %v for %s but got %v", exp.vals[0].Value, v.Metric, v.F)
} }
seen[fp] = true seen[fp] = true
@ -409,7 +409,7 @@ func (ev *evalCmd) compareResult(result parser.Value) error {
if !seen[fp] { if !seen[fp] {
fmt.Println("vector result", len(val), ev.expr) fmt.Println("vector result", len(val), ev.expr)
for _, ss := range val { for _, ss := range val {
fmt.Println(" ", ss.Metric, ss.Point) fmt.Println(" ", ss.Metric, ss.T, ss.F)
} }
return fmt.Errorf("expected metric %s with %v not found", ev.metrics[fp], expVals) return fmt.Errorf("expected metric %s with %v not found", ev.metrics[fp], expVals)
} }
@ -576,15 +576,15 @@ func (t *Test) exec(tc testCommand) error {
mat := rangeRes.Value.(Matrix) mat := rangeRes.Value.(Matrix)
vec := make(Vector, 0, len(mat)) vec := make(Vector, 0, len(mat))
for _, series := range mat { for _, series := range mat {
for _, point := range series.Points { for _, point := range series.Floats {
if point.T == timeMilliseconds(iq.evalTime) { if point.T == timeMilliseconds(iq.evalTime) {
vec = append(vec, Sample{Metric: series.Metric, Point: point}) vec = append(vec, Sample{Metric: series.Metric, T: point.T, F: point.F})
break break
} }
} }
} }
if _, ok := res.Value.(Scalar); ok { if _, ok := res.Value.(Scalar); ok {
err = cmd.compareResult(Scalar{V: vec[0].Point.V}) err = cmd.compareResult(Scalar{V: vec[0].F})
} else { } else {
err = cmd.compareResult(vec) err = cmd.compareResult(vec)
} }
@ -763,7 +763,7 @@ func (ll *LazyLoader) appendTill(ts int64) error {
ll.loadCmd.defs[h] = smpls[i:] ll.loadCmd.defs[h] = smpls[i:]
break break
} }
if _, err := app.Append(0, m, s.T, s.V); err != nil { if _, err := app.Append(0, m, s.T, s.F); err != nil {
return err return err
} }
if i == len(smpls)-1 { if i == len(smpls)-1 {

View file

@ -47,8 +47,8 @@ func TestLazyLoader_WithSamplesTill(t *testing.T) {
series: []Series{ series: []Series{
{ {
Metric: labels.FromStrings("__name__", "metric1"), Metric: labels.FromStrings("__name__", "metric1"),
Points: []Point{ Floats: []FPoint{
{0, 1, nil}, {10000, 2, nil}, {20000, 3, nil}, {30000, 4, nil}, {40000, 5, nil}, {0, 1}, {10000, 2}, {20000, 3}, {30000, 4}, {40000, 5},
}, },
}, },
}, },
@ -58,8 +58,8 @@ func TestLazyLoader_WithSamplesTill(t *testing.T) {
series: []Series{ series: []Series{
{ {
Metric: labels.FromStrings("__name__", "metric1"), Metric: labels.FromStrings("__name__", "metric1"),
Points: []Point{ Floats: []FPoint{
{0, 1, nil}, {10000, 2, nil}, {20000, 3, nil}, {30000, 4, nil}, {40000, 5, nil}, {0, 1}, {10000, 2}, {20000, 3}, {30000, 4}, {40000, 5},
}, },
}, },
}, },
@ -69,8 +69,8 @@ func TestLazyLoader_WithSamplesTill(t *testing.T) {
series: []Series{ series: []Series{
{ {
Metric: labels.FromStrings("__name__", "metric1"), Metric: labels.FromStrings("__name__", "metric1"),
Points: []Point{ Floats: []FPoint{
{0, 1, nil}, {10000, 2, nil}, {20000, 3, nil}, {30000, 4, nil}, {40000, 5, nil}, {50000, 6, nil}, {60000, 7, nil}, {0, 1}, {10000, 2}, {20000, 3}, {30000, 4}, {40000, 5}, {50000, 6}, {60000, 7},
}, },
}, },
}, },
@ -89,14 +89,14 @@ func TestLazyLoader_WithSamplesTill(t *testing.T) {
series: []Series{ series: []Series{
{ {
Metric: labels.FromStrings("__name__", "metric1"), Metric: labels.FromStrings("__name__", "metric1"),
Points: []Point{ Floats: []FPoint{
{0, 1, nil}, {10000, 1, nil}, {20000, 1, nil}, {30000, 1, nil}, {40000, 1, nil}, {50000, 1, nil}, {0, 1}, {10000, 1}, {20000, 1}, {30000, 1}, {40000, 1}, {50000, 1},
}, },
}, },
{ {
Metric: labels.FromStrings("__name__", "metric2"), Metric: labels.FromStrings("__name__", "metric2"),
Points: []Point{ Floats: []FPoint{
{0, 1, nil}, {10000, 2, nil}, {20000, 3, nil}, {30000, 4, nil}, {40000, 5, nil}, {50000, 6, nil}, {60000, 7, nil}, {70000, 8, nil}, {0, 1}, {10000, 2}, {20000, 3}, {30000, 4}, {40000, 5}, {50000, 6}, {60000, 7}, {70000, 8},
}, },
}, },
}, },
@ -146,7 +146,7 @@ func TestLazyLoader_WithSamplesTill(t *testing.T) {
it := storageSeries.Iterator(nil) it := storageSeries.Iterator(nil)
for it.Next() == chunkenc.ValFloat { for it.Next() == chunkenc.ValFloat {
t, v := it.At() t, v := it.At()
got.Points = append(got.Points, Point{T: t, V: v}) got.Floats = append(got.Floats, FPoint{T: t, F: v})
} }
require.NoError(t, it.Err()) require.NoError(t, it.Err())

View file

@ -17,6 +17,7 @@ import (
"encoding/json" "encoding/json"
"errors" "errors"
"fmt" "fmt"
"math"
"strconv" "strconv"
"strings" "strings"
@ -64,76 +65,72 @@ func (s Scalar) MarshalJSON() ([]byte, error) {
// Series is a stream of data points belonging to a metric. // Series is a stream of data points belonging to a metric.
type Series struct { type Series struct {
Metric labels.Labels Metric labels.Labels `json:"metric"`
Points []Point Floats []FPoint `json:"values,omitempty"`
Histograms []HPoint `json:"histograms,omitempty"`
} }
func (s Series) String() string { func (s Series) String() string {
vals := make([]string, len(s.Points)) // TODO(beorn7): This currently renders floats first and then
for i, v := range s.Points { // histograms, each sorted by timestamp. Maybe, in mixed series, that's
vals[i] = v.String() // fine. Maybe, however, primary sorting by timestamp is preferred, in
// which case this has to be changed.
vals := make([]string, 0, len(s.Floats)+len(s.Histograms))
for _, f := range s.Floats {
vals = append(vals, f.String())
}
for _, h := range s.Histograms {
vals = append(vals, h.String())
} }
return fmt.Sprintf("%s =>\n%s", s.Metric, strings.Join(vals, "\n")) return fmt.Sprintf("%s =>\n%s", s.Metric, strings.Join(vals, "\n"))
} }
// MarshalJSON is mirrored in web/api/v1/api.go for efficiency reasons. // FPoint represents a single float data point for a given timestamp.
// This implementation is still provided for debug purposes and usage type FPoint struct {
// without jsoniter.
func (s Series) MarshalJSON() ([]byte, error) {
// Note that this is rather inefficient because it re-creates the whole
// series, just separated by Histogram Points and Value Points. For API
// purposes, there is a more efficient jsoniter implementation in
// web/api/v1/api.go.
series := struct {
M labels.Labels `json:"metric"`
V []Point `json:"values,omitempty"`
H []Point `json:"histograms,omitempty"`
}{
M: s.Metric,
}
for _, p := range s.Points {
if p.H == nil {
series.V = append(series.V, p)
continue
}
series.H = append(series.H, p)
}
return json.Marshal(series)
}
// Point represents a single data point for a given timestamp.
// If H is not nil, then this is a histogram point and only (T, H) is valid.
// If H is nil, then only (T, V) is valid.
type Point struct {
T int64 T int64
V float64 F float64
H *histogram.FloatHistogram
} }
func (p Point) String() string { func (p FPoint) String() string {
var s string s := strconv.FormatFloat(p.F, 'f', -1, 64)
if p.H != nil {
s = p.H.String()
} else {
s = strconv.FormatFloat(p.V, 'f', -1, 64)
}
return fmt.Sprintf("%s @[%v]", s, p.T) return fmt.Sprintf("%s @[%v]", s, p.T)
} }
// MarshalJSON implements json.Marshaler. // MarshalJSON implements json.Marshaler.
// //
// JSON marshaling is only needed for the HTTP API. Since Point is such a // JSON marshaling is only needed for the HTTP API. Since FPoint is such a
// frequently marshaled type, it gets an optimized treatment directly in // frequently marshaled type, it gets an optimized treatment directly in
// web/api/v1/api.go. Therefore, this method is unused within Prometheus. It is // web/api/v1/api.go. Therefore, this method is unused within Prometheus. It is
// still provided here as convenience for debugging and for other users of this // still provided here as convenience for debugging and for other users of this
// code. Also note that the different marshaling implementations might lead to // code. Also note that the different marshaling implementations might lead to
// slightly different results in terms of formatting and rounding of the // slightly different results in terms of formatting and rounding of the
// timestamp. // timestamp.
func (p Point) MarshalJSON() ([]byte, error) { func (p FPoint) MarshalJSON() ([]byte, error) {
if p.H == nil { v := strconv.FormatFloat(p.F, 'f', -1, 64)
v := strconv.FormatFloat(p.V, 'f', -1, 64) return json.Marshal([...]interface{}{float64(p.T) / 1000, v})
return json.Marshal([...]interface{}{float64(p.T) / 1000, v}) }
}
// HPoint represents a single histogram data point for a given timestamp.
// H must never be nil.
type HPoint struct {
T int64
H *histogram.FloatHistogram
}
func (p HPoint) String() string {
return fmt.Sprintf("%s @[%v]", p.H.String(), p.T)
}
// MarshalJSON implements json.Marshaler.
//
// JSON marshaling is only needed for the HTTP API. Since HPoint is such a
// frequently marshaled type, it gets an optimized treatment directly in
// web/api/v1/api.go. Therefore, this method is unused within Prometheus. It is
// still provided here as convenience for debugging and for other users of this
// code. Also note that the different marshaling implementations might lead to
// slightly different results in terms of formatting and rounding of the
// timestamp.
func (p HPoint) MarshalJSON() ([]byte, error) {
h := struct { h := struct {
Count string `json:"count"` Count string `json:"count"`
Sum string `json:"sum"` Sum string `json:"sum"`
@ -171,42 +168,54 @@ func (p Point) MarshalJSON() ([]byte, error) {
return json.Marshal([...]interface{}{float64(p.T) / 1000, h}) return json.Marshal([...]interface{}{float64(p.T) / 1000, h})
} }
// Sample is a single sample belonging to a metric. // Sample is a single sample belonging to a metric. It represents either a float
// sample or a histogram sample. If H is nil, it is a float sample. Otherwise,
// it is a histogram sample.
type Sample struct { type Sample struct {
Point T int64
F float64
H *histogram.FloatHistogram
Metric labels.Labels Metric labels.Labels
} }
func (s Sample) String() string { func (s Sample) String() string {
return fmt.Sprintf("%s => %s", s.Metric, s.Point) var str string
if s.H == nil {
p := FPoint{T: s.T, F: s.F}
str = p.String()
} else {
p := HPoint{T: s.T, H: s.H}
str = p.String()
}
return fmt.Sprintf("%s => %s", s.Metric, str)
} }
// MarshalJSON is mirrored in web/api/v1/api.go with jsoniter because Point // MarshalJSON is mirrored in web/api/v1/api.go with jsoniter because FPoint and
// wouldn't be marshaled with jsoniter in all cases otherwise. // HPoint wouldn't be marshaled with jsoniter otherwise.
func (s Sample) MarshalJSON() ([]byte, error) { func (s Sample) MarshalJSON() ([]byte, error) {
if s.Point.H == nil { if s.H == nil {
v := struct { f := struct {
M labels.Labels `json:"metric"` M labels.Labels `json:"metric"`
V Point `json:"value"` F FPoint `json:"value"`
}{ }{
M: s.Metric, M: s.Metric,
V: s.Point, F: FPoint{T: s.T, F: s.F},
} }
return json.Marshal(v) return json.Marshal(f)
} }
h := struct { h := struct {
M labels.Labels `json:"metric"` M labels.Labels `json:"metric"`
H Point `json:"histogram"` H HPoint `json:"histogram"`
}{ }{
M: s.Metric, M: s.Metric,
H: s.Point, H: HPoint{T: s.T, H: s.H},
} }
return json.Marshal(h) return json.Marshal(h)
} }
// Vector is basically only an alias for model.Samples, but the // Vector is basically only an an alias for []Sample, but the contract is that
// contract is that in a Vector, all Samples have the same timestamp. // in a Vector, all Samples have the same timestamp.
type Vector []Sample type Vector []Sample
func (vec Vector) String() string { func (vec Vector) String() string {
@ -258,7 +267,7 @@ func (m Matrix) String() string {
func (m Matrix) TotalSamples() int { func (m Matrix) TotalSamples() int {
numSamples := 0 numSamples := 0
for _, series := range m { for _, series := range m {
numSamples += len(series.Points) numSamples += len(series.Floats) + len(series.Histograms)
} }
return numSamples return numSamples
} }
@ -362,7 +371,8 @@ func (ss *StorageSeries) Labels() labels.Labels {
return ss.series.Metric return ss.series.Metric
} }
// Iterator returns a new iterator of the data of the series. // Iterator returns a new iterator of the data of the series. In case of
// multiple samples with the same timestamp, it returns the float samples first.
func (ss *StorageSeries) Iterator(it chunkenc.Iterator) chunkenc.Iterator { func (ss *StorageSeries) Iterator(it chunkenc.Iterator) chunkenc.Iterator {
if ssi, ok := it.(*storageSeriesIterator); ok { if ssi, ok := it.(*storageSeriesIterator); ok {
ssi.reset(ss.series) ssi.reset(ss.series)
@ -372,44 +382,51 @@ func (ss *StorageSeries) Iterator(it chunkenc.Iterator) chunkenc.Iterator {
} }
type storageSeriesIterator struct { type storageSeriesIterator struct {
points []Point floats []FPoint
curr int histograms []HPoint
iFloats, iHistograms int
currT int64
currF float64
currH *histogram.FloatHistogram
} }
func newStorageSeriesIterator(series Series) *storageSeriesIterator { func newStorageSeriesIterator(series Series) *storageSeriesIterator {
return &storageSeriesIterator{ return &storageSeriesIterator{
points: series.Points, floats: series.Floats,
curr: -1, histograms: series.Histograms,
iFloats: -1,
iHistograms: 0,
currT: math.MinInt64,
} }
} }
func (ssi *storageSeriesIterator) reset(series Series) { func (ssi *storageSeriesIterator) reset(series Series) {
ssi.points = series.Points ssi.floats = series.Floats
ssi.curr = -1 ssi.histograms = series.Histograms
ssi.iFloats = -1
ssi.iHistograms = 0
ssi.currT = math.MinInt64
ssi.currF = 0
ssi.currH = nil
} }
func (ssi *storageSeriesIterator) Seek(t int64) chunkenc.ValueType { func (ssi *storageSeriesIterator) Seek(t int64) chunkenc.ValueType {
i := ssi.curr if ssi.iFloats >= len(ssi.floats) && ssi.iHistograms >= len(ssi.histograms) {
if i < 0 { return chunkenc.ValNone
i = 0
} }
for ; i < len(ssi.points); i++ { for ssi.currT < t {
p := ssi.points[i] if ssi.Next() == chunkenc.ValNone {
if p.T >= t { return chunkenc.ValNone
ssi.curr = i
if p.H != nil {
return chunkenc.ValFloatHistogram
}
return chunkenc.ValFloat
} }
} }
ssi.curr = len(ssi.points) - 1 if ssi.currH != nil {
return chunkenc.ValNone return chunkenc.ValFloatHistogram
}
return chunkenc.ValFloat
} }
func (ssi *storageSeriesIterator) At() (t int64, v float64) { func (ssi *storageSeriesIterator) At() (t int64, v float64) {
p := ssi.points[ssi.curr] return ssi.currT, ssi.currF
return p.T, p.V
} }
func (ssi *storageSeriesIterator) AtHistogram() (int64, *histogram.Histogram) { func (ssi *storageSeriesIterator) AtHistogram() (int64, *histogram.Histogram) {
@ -417,25 +434,59 @@ func (ssi *storageSeriesIterator) AtHistogram() (int64, *histogram.Histogram) {
} }
func (ssi *storageSeriesIterator) AtFloatHistogram() (int64, *histogram.FloatHistogram) { func (ssi *storageSeriesIterator) AtFloatHistogram() (int64, *histogram.FloatHistogram) {
p := ssi.points[ssi.curr] return ssi.currT, ssi.currH
return p.T, p.H
} }
func (ssi *storageSeriesIterator) AtT() int64 { func (ssi *storageSeriesIterator) AtT() int64 {
p := ssi.points[ssi.curr] return ssi.currT
return p.T
} }
func (ssi *storageSeriesIterator) Next() chunkenc.ValueType { func (ssi *storageSeriesIterator) Next() chunkenc.ValueType {
ssi.curr++ if ssi.currH != nil {
if ssi.curr >= len(ssi.points) { ssi.iHistograms++
return chunkenc.ValNone } else {
ssi.iFloats++
} }
p := ssi.points[ssi.curr] var (
if p.H != nil { pickH, pickF = false, false
floatsExhausted = ssi.iFloats >= len(ssi.floats)
histogramsExhausted = ssi.iHistograms >= len(ssi.histograms)
)
switch {
case floatsExhausted:
if histogramsExhausted { // Both exhausted!
return chunkenc.ValNone
}
pickH = true
case histogramsExhausted: // and floats not exhausted.
pickF = true
// From here on, we have to look at timestamps.
case ssi.histograms[ssi.iHistograms].T < ssi.floats[ssi.iFloats].T:
// Next histogram comes before next float.
pickH = true
default:
// In all other cases, we pick float so that we first iterate
// through floats if the timestamp is the same.
pickF = true
}
switch {
case pickF:
p := ssi.floats[ssi.iFloats]
ssi.currT = p.T
ssi.currF = p.F
ssi.currH = nil
return chunkenc.ValFloat
case pickH:
p := ssi.histograms[ssi.iHistograms]
ssi.currT = p.T
ssi.currF = 0
ssi.currH = p.H
return chunkenc.ValFloatHistogram return chunkenc.ValFloatHistogram
default:
panic("storageSeriesIterater.Next failed to pick value type")
} }
return chunkenc.ValFloat
} }
func (ssi *storageSeriesIterator) Err() error { func (ssi *storageSeriesIterator) Err() error {

View file

@ -234,8 +234,9 @@ func (r *AlertingRule) sample(alert *Alert, ts time.Time) promql.Sample {
lb.Set(alertStateLabel, alert.State.String()) lb.Set(alertStateLabel, alert.State.String())
s := promql.Sample{ s := promql.Sample{
Metric: lb.Labels(labels.EmptyLabels()), Metric: lb.Labels(),
Point: promql.Point{T: timestamp.FromTime(ts), V: 1}, T: timestamp.FromTime(ts),
F: 1,
} }
return s return s
} }
@ -252,8 +253,9 @@ func (r *AlertingRule) forStateSample(alert *Alert, ts time.Time, v float64) pro
lb.Set(labels.AlertName, r.name) lb.Set(labels.AlertName, r.name)
s := promql.Sample{ s := promql.Sample{
Metric: lb.Labels(labels.EmptyLabels()), Metric: lb.Labels(),
Point: promql.Point{T: timestamp.FromTime(ts), V: v}, T: timestamp.FromTime(ts),
F: v,
} }
return s return s
} }
@ -339,7 +341,7 @@ func (r *AlertingRule) Eval(ctx context.Context, evalDelay time.Duration, ts tim
// Provide the alert information to the template. // Provide the alert information to the template.
l := smpl.Metric.Map() l := smpl.Metric.Map()
tmplData := template.AlertTemplateData(l, r.externalLabels, r.externalURL, smpl.V) tmplData := template.AlertTemplateData(l, r.externalLabels, r.externalURL, smpl.F)
// Inject some convenience variables that are easier to remember for users // Inject some convenience variables that are easier to remember for users
// who are not used to Go's templating system. // who are not used to Go's templating system.
defs := []string{ defs := []string{
@ -381,7 +383,7 @@ func (r *AlertingRule) Eval(ctx context.Context, evalDelay time.Duration, ts tim
}) })
annotations := sb.Labels() annotations := sb.Labels()
lbs := lb.Labels(labels.EmptyLabels()) lbs := lb.Labels()
h := lbs.Hash() h := lbs.Hash()
resultFPs[h] = struct{}{} resultFPs[h] = struct{}{}
@ -394,7 +396,7 @@ func (r *AlertingRule) Eval(ctx context.Context, evalDelay time.Duration, ts tim
Annotations: annotations, Annotations: annotations,
ActiveAt: ts, ActiveAt: ts,
State: StatePending, State: StatePending,
Value: smpl.V, Value: smpl.F,
} }
} }

View file

@ -109,7 +109,7 @@ func TestAlertingRuleLabelsUpdate(t *testing.T) {
"job", "app-server", "job", "app-server",
"severity", "critical", "severity", "critical",
), ),
Point: promql.Point{V: 1}, F: 1,
}, },
}, },
{ {
@ -122,7 +122,7 @@ func TestAlertingRuleLabelsUpdate(t *testing.T) {
"job", "app-server", "job", "app-server",
"severity", "warning", "severity", "warning",
), ),
Point: promql.Point{V: 1}, F: 1,
}, },
}, },
{ {
@ -135,7 +135,7 @@ func TestAlertingRuleLabelsUpdate(t *testing.T) {
"job", "app-server", "job", "app-server",
"severity", "critical", "severity", "critical",
), ),
Point: promql.Point{V: 1}, F: 1,
}, },
}, },
{ {
@ -148,7 +148,7 @@ func TestAlertingRuleLabelsUpdate(t *testing.T) {
"job", "app-server", "job", "app-server",
"severity", "critical", "severity", "critical",
), ),
Point: promql.Point{V: 1}, F: 1,
}, },
}, },
} }
@ -157,8 +157,9 @@ func TestAlertingRuleLabelsUpdate(t *testing.T) {
for i, result := range results { for i, result := range results {
t.Logf("case %d", i) t.Logf("case %d", i)
evalTime := baseTime.Add(time.Duration(i) * time.Minute) evalTime := baseTime.Add(time.Duration(i) * time.Minute)
result[0].Point.T = timestamp.FromTime(evalTime) result[0].T = timestamp.FromTime(evalTime)
res, err := rule.Eval(suite.Context(), 0, evalTime, EngineQueryFunc(suite.QueryEngine(), suite.Storage()), nil, 0) res, err := rule.Eval(suite.Context(), 0, evalTime, EngineQueryFunc(suite.QueryEngine(), suite.Storage()), nil, 0)
require.NoError(t, err) require.NoError(t, err)
var filteredRes promql.Vector // After removing 'ALERTS_FOR_STATE' samples. var filteredRes promql.Vector // After removing 'ALERTS_FOR_STATE' samples.
@ -225,7 +226,7 @@ func TestAlertingRuleExternalLabelsInTemplate(t *testing.T) {
"job", "app-server", "job", "app-server",
"templated_label", "There are 0 external Labels, of which foo is .", "templated_label", "There are 0 external Labels, of which foo is .",
), ),
Point: promql.Point{V: 1}, F: 1,
}, },
promql.Sample{ promql.Sample{
Metric: labels.FromStrings( Metric: labels.FromStrings(
@ -236,13 +237,13 @@ func TestAlertingRuleExternalLabelsInTemplate(t *testing.T) {
"job", "app-server", "job", "app-server",
"templated_label", "There are 2 external Labels, of which foo is bar.", "templated_label", "There are 2 external Labels, of which foo is bar.",
), ),
Point: promql.Point{V: 1}, F: 1,
}, },
} }
evalTime := time.Unix(0, 0) evalTime := time.Unix(0, 0)
result[0].Point.T = timestamp.FromTime(evalTime) result[0].T = timestamp.FromTime(evalTime)
result[1].Point.T = timestamp.FromTime(evalTime) result[1].T = timestamp.FromTime(evalTime)
var filteredRes promql.Vector // After removing 'ALERTS_FOR_STATE' samples. var filteredRes promql.Vector // After removing 'ALERTS_FOR_STATE' samples.
res, err := ruleWithoutExternalLabels.Eval( res, err := ruleWithoutExternalLabels.Eval(
@ -321,7 +322,7 @@ func TestAlertingRuleExternalURLInTemplate(t *testing.T) {
"job", "app-server", "job", "app-server",
"templated_label", "The external URL is .", "templated_label", "The external URL is .",
), ),
Point: promql.Point{V: 1}, F: 1,
}, },
promql.Sample{ promql.Sample{
Metric: labels.FromStrings( Metric: labels.FromStrings(
@ -332,13 +333,13 @@ func TestAlertingRuleExternalURLInTemplate(t *testing.T) {
"job", "app-server", "job", "app-server",
"templated_label", "The external URL is http://localhost:1234.", "templated_label", "The external URL is http://localhost:1234.",
), ),
Point: promql.Point{V: 1}, F: 1,
}, },
} }
evalTime := time.Unix(0, 0) evalTime := time.Unix(0, 0)
result[0].Point.T = timestamp.FromTime(evalTime) result[0].T = timestamp.FromTime(evalTime)
result[1].Point.T = timestamp.FromTime(evalTime) result[1].T = timestamp.FromTime(evalTime)
var filteredRes promql.Vector // After removing 'ALERTS_FOR_STATE' samples. var filteredRes promql.Vector // After removing 'ALERTS_FOR_STATE' samples.
res, err := ruleWithoutExternalURL.Eval( res, err := ruleWithoutExternalURL.Eval(
@ -405,12 +406,12 @@ func TestAlertingRuleEmptyLabelFromTemplate(t *testing.T) {
"instance", "0", "instance", "0",
"job", "app-server", "job", "app-server",
), ),
Point: promql.Point{V: 1}, F: 1,
}, },
} }
evalTime := time.Unix(0, 0) evalTime := time.Unix(0, 0)
result[0].Point.T = timestamp.FromTime(evalTime) result[0].T = timestamp.FromTime(evalTime)
var filteredRes promql.Vector // After removing 'ALERTS_FOR_STATE' samples. var filteredRes promql.Vector // After removing 'ALERTS_FOR_STATE' samples.
res, err := rule.Eval( res, err := rule.Eval(
@ -760,7 +761,7 @@ func TestKeepFiringFor(t *testing.T) {
"instance", "0", "instance", "0",
"job", "app-server", "job", "app-server",
), ),
Point: promql.Point{V: 1}, F: 1,
}, },
}, },
{ {
@ -772,7 +773,7 @@ func TestKeepFiringFor(t *testing.T) {
"instance", "0", "instance", "0",
"job", "app-server", "job", "app-server",
), ),
Point: promql.Point{V: 1}, F: 1,
}, },
}, },
{ {
@ -784,7 +785,7 @@ func TestKeepFiringFor(t *testing.T) {
"instance", "0", "instance", "0",
"job", "app-server", "job", "app-server",
), ),
Point: promql.Point{V: 1}, F: 1,
}, },
}, },
{ {
@ -796,7 +797,7 @@ func TestKeepFiringFor(t *testing.T) {
"instance", "0", "instance", "0",
"job", "app-server", "job", "app-server",
), ),
Point: promql.Point{V: 1}, F: 1,
}, },
}, },
// From now on the alert should keep firing. // From now on the alert should keep firing.
@ -809,7 +810,7 @@ func TestKeepFiringFor(t *testing.T) {
"instance", "0", "instance", "0",
"job", "app-server", "job", "app-server",
), ),
Point: promql.Point{V: 1}, F: 1,
}, },
}, },
} }
@ -818,7 +819,7 @@ func TestKeepFiringFor(t *testing.T) {
for i, result := range results { for i, result := range results {
t.Logf("case %d", i) t.Logf("case %d", i)
evalTime := baseTime.Add(time.Duration(i) * time.Minute) evalTime := baseTime.Add(time.Duration(i) * time.Minute)
result[0].Point.T = timestamp.FromTime(evalTime) result[0].T = timestamp.FromTime(evalTime)
res, err := rule.Eval(suite.Context(), 0, evalTime, EngineQueryFunc(suite.QueryEngine(), suite.Storage()), nil, 0) res, err := rule.Eval(suite.Context(), 0, evalTime, EngineQueryFunc(suite.QueryEngine(), suite.Storage()), nil, 0)
require.NoError(t, err) require.NoError(t, err)
@ -871,11 +872,11 @@ func TestPendingAndKeepFiringFor(t *testing.T) {
"instance", "0", "instance", "0",
"job", "app-server", "job", "app-server",
), ),
Point: promql.Point{V: 1}, F: 1,
} }
baseTime := time.Unix(0, 0) baseTime := time.Unix(0, 0)
result.Point.T = timestamp.FromTime(baseTime) result.T = timestamp.FromTime(baseTime)
res, err := rule.Eval(suite.Context(), 0, baseTime, EngineQueryFunc(suite.QueryEngine(), suite.Storage()), nil, 0) res, err := rule.Eval(suite.Context(), 0, baseTime, EngineQueryFunc(suite.QueryEngine(), suite.Storage()), nil, 0)
require.NoError(t, err) require.NoError(t, err)

View file

@ -202,7 +202,8 @@ func EngineQueryFunc(engine *promql.Engine, q storage.Queryable) QueryFunc {
return v, nil return v, nil
case promql.Scalar: case promql.Scalar:
return promql.Vector{promql.Sample{ return promql.Vector{promql.Sample{
Point: promql.Point{T: v.T, V: v.V}, T: v.T,
F: v.V,
Metric: labels.Labels{}, Metric: labels.Labels{},
}}, nil }}, nil
default: default:
@ -256,7 +257,8 @@ type Group struct {
opts *ManagerOptions opts *ManagerOptions
mtx sync.Mutex mtx sync.Mutex
evaluationTime time.Duration evaluationTime time.Duration
lastEvaluation time.Time lastEvaluation time.Time // Wall-clock time of most recent evaluation.
lastEvalTimestamp time.Time // Time slot used for most recent evaluation.
shouldRestore bool shouldRestore bool
@ -269,13 +271,19 @@ type Group struct {
metrics *Metrics metrics *Metrics
ruleGroupPostProcessFunc RuleGroupPostProcessFunc // Rule group evaluation iteration function,
// defaults to DefaultEvalIterationFunc.
evalIterationFunc GroupEvalIterationFunc
alignEvaluationTimeOnInterval bool alignEvaluationTimeOnInterval bool
} }
// This function will be used before each rule group evaluation if not nil. // GroupEvalIterationFunc is used to implement and extend rule group
// Use this function type if the rule group post processing is needed. // evaluation iteration logic. It is configured in Group.evalIterationFunc,
type RuleGroupPostProcessFunc func(g *Group, lastEvalTimestamp time.Time, log log.Logger) error // and periodically invoked at each group evaluation interval to
// evaluate the rules in the group at that point in time.
// DefaultEvalIterationFunc is the default implementation.
type GroupEvalIterationFunc func(ctx context.Context, g *Group, evalTimestamp time.Time)
type GroupOptions struct { type GroupOptions struct {
Name, File string Name, File string
@ -287,7 +295,7 @@ type GroupOptions struct {
Opts *ManagerOptions Opts *ManagerOptions
EvaluationDelay *time.Duration EvaluationDelay *time.Duration
done chan struct{} done chan struct{}
RuleGroupPostProcessFunc RuleGroupPostProcessFunc EvalIterationFunc GroupEvalIterationFunc
AlignEvaluationTimeOnInterval bool AlignEvaluationTimeOnInterval bool
} }
@ -309,6 +317,11 @@ func NewGroup(o GroupOptions) *Group {
metrics.GroupSamples.WithLabelValues(key) metrics.GroupSamples.WithLabelValues(key)
metrics.GroupInterval.WithLabelValues(key).Set(o.Interval.Seconds()) metrics.GroupInterval.WithLabelValues(key).Set(o.Interval.Seconds())
evalIterationFunc := o.EvalIterationFunc
if evalIterationFunc == nil {
evalIterationFunc = DefaultEvalIterationFunc
}
return &Group{ return &Group{
name: o.Name, name: o.Name,
file: o.File, file: o.File,
@ -325,7 +338,7 @@ func NewGroup(o GroupOptions) *Group {
terminated: make(chan struct{}), terminated: make(chan struct{}),
logger: log.With(o.Opts.Logger, "file", o.File, "group", o.Name), logger: log.With(o.Opts.Logger, "file", o.File, "group", o.Name),
metrics: metrics, metrics: metrics,
ruleGroupPostProcessFunc: o.RuleGroupPostProcessFunc, evalIterationFunc: evalIterationFunc,
alignEvaluationTimeOnInterval: o.AlignEvaluationTimeOnInterval, alignEvaluationTimeOnInterval: o.AlignEvaluationTimeOnInterval,
} }
} }
@ -355,6 +368,8 @@ func (g *Group) Limit() int { return g.limit }
// If it's empty or nil, then the owning user/tenant is considered to be the source tenant. // If it's empty or nil, then the owning user/tenant is considered to be the source tenant.
func (g *Group) SourceTenants() []string { return g.sourceTenants } func (g *Group) SourceTenants() []string { return g.sourceTenants }
func (g *Group) Logger() log.Logger { return g.logger }
func (g *Group) run(ctx context.Context) { func (g *Group) run(ctx context.Context) {
defer close(g.terminated) defer close(g.terminated)
@ -373,18 +388,6 @@ func (g *Group) run(ctx context.Context) {
}, },
}) })
iter := func() {
g.metrics.IterationsScheduled.WithLabelValues(GroupKey(g.file, g.name)).Inc()
start := time.Now()
g.Eval(ctx, evalTimestamp)
timeSinceStart := time.Since(start)
g.metrics.IterationDuration.Observe(timeSinceStart.Seconds())
g.setEvaluationTime(timeSinceStart)
g.setLastEvaluation(start)
}
// The assumption here is that since the ticker was started after having // The assumption here is that since the ticker was started after having
// waited for `evalTimestamp` to pass, the ticks will trigger soon // waited for `evalTimestamp` to pass, the ticks will trigger soon
// after each `evalTimestamp + N * g.interval` occurrence. // after each `evalTimestamp + N * g.interval` occurrence.
@ -414,7 +417,7 @@ func (g *Group) run(ctx context.Context) {
}(time.Now()) }(time.Now())
}() }()
iter() g.evalIterationFunc(ctx, g, evalTimestamp)
if g.shouldRestore { if g.shouldRestore {
// If we have to restore, we wait for another Eval to finish. // If we have to restore, we wait for another Eval to finish.
// The reason behind this is, during first eval (or before it) // The reason behind this is, during first eval (or before it)
@ -430,7 +433,7 @@ func (g *Group) run(ctx context.Context) {
g.metrics.IterationsScheduled.WithLabelValues(GroupKey(g.file, g.name)).Add(float64(missed)) g.metrics.IterationsScheduled.WithLabelValues(GroupKey(g.file, g.name)).Add(float64(missed))
} }
evalTimestamp = evalTimestamp.Add((missed + 1) * g.interval) evalTimestamp = evalTimestamp.Add((missed + 1) * g.interval)
iter() g.evalIterationFunc(ctx, g, evalTimestamp)
} }
g.RestoreForState(time.Now()) g.RestoreForState(time.Now())
@ -453,21 +456,29 @@ func (g *Group) run(ctx context.Context) {
} }
evalTimestamp = evalTimestamp.Add((missed + 1) * g.interval) evalTimestamp = evalTimestamp.Add((missed + 1) * g.interval)
useRuleGroupPostProcessFunc(g, evalTimestamp.Add(-(missed+1)*g.interval)) g.evalIterationFunc(ctx, g, evalTimestamp)
iter()
} }
} }
} }
} }
func useRuleGroupPostProcessFunc(g *Group, lastEvalTimestamp time.Time) { // DefaultEvalIterationFunc is the default implementation of
if g.ruleGroupPostProcessFunc != nil { // GroupEvalIterationFunc that is periodically invoked to evaluate the rules
err := g.ruleGroupPostProcessFunc(g, lastEvalTimestamp, g.logger) // in a group at a given point in time and updates Group state and metrics
if err != nil { // accordingly. Custom GroupEvalIterationFunc implementations are recommended
level.Warn(g.logger).Log("msg", "ruleGroupPostProcessFunc failed", "err", err) // to invoke this function as well, to ensure correct Group state and metrics
} // are maintained.
} func DefaultEvalIterationFunc(ctx context.Context, g *Group, evalTimestamp time.Time) {
g.metrics.IterationsScheduled.WithLabelValues(GroupKey(g.file, g.name)).Inc()
start := time.Now()
g.Eval(ctx, evalTimestamp)
timeSinceStart := time.Since(start)
g.metrics.IterationDuration.Observe(timeSinceStart.Seconds())
g.setEvaluationTime(timeSinceStart)
g.setLastEvaluation(start)
g.setLastEvalTimestamp(evalTimestamp)
} }
func (g *Group) stop() { func (g *Group) stop() {
@ -547,6 +558,20 @@ func (g *Group) setLastEvaluation(ts time.Time) {
g.lastEvaluation = ts g.lastEvaluation = ts
} }
// GetLastEvalTimestamp returns the timestamp of the last evaluation.
func (g *Group) GetLastEvalTimestamp() time.Time {
g.mtx.Lock()
defer g.mtx.Unlock()
return g.lastEvalTimestamp
}
// setLastEvalTimestamp updates lastEvalTimestamp to the timestamp of the last evaluation.
func (g *Group) setLastEvalTimestamp(ts time.Time) {
g.mtx.Lock()
defer g.mtx.Unlock()
g.lastEvalTimestamp = ts
}
// EvalTimestamp returns the immediately preceding consistently slotted evaluation time. // EvalTimestamp returns the immediately preceding consistently slotted evaluation time.
func (g *Group) EvalTimestamp(startTime int64) time.Time { func (g *Group) EvalTimestamp(startTime int64) time.Time {
var offset int64 var offset int64
@ -689,7 +714,7 @@ func (g *Group) Eval(ctx context.Context, ts time.Time) {
if s.H != nil { if s.H != nil {
_, err = app.AppendHistogram(0, s.Metric, s.T, nil, s.H) _, err = app.AppendHistogram(0, s.Metric, s.T, nil, s.H)
} else { } else {
_, err = app.Append(0, s.Metric, s.T, s.V) _, err = app.Append(0, s.Metric, s.T, s.F)
} }
if err != nil { if err != nil {
@ -1060,11 +1085,11 @@ func (m *Manager) Stop() {
// Update the rule manager's state as the config requires. If // Update the rule manager's state as the config requires. If
// loading the new rules failed the old rule set is restored. // loading the new rules failed the old rule set is restored.
func (m *Manager) Update(interval time.Duration, files []string, externalLabels labels.Labels, externalURL string, ruleGroupPostProcessFunc RuleGroupPostProcessFunc) error { func (m *Manager) Update(interval time.Duration, files []string, externalLabels labels.Labels, externalURL string, groupEvalIterationFunc GroupEvalIterationFunc) error {
m.mtx.Lock() m.mtx.Lock()
defer m.mtx.Unlock() defer m.mtx.Unlock()
groups, errs := m.LoadGroups(interval, externalLabels, externalURL, ruleGroupPostProcessFunc, files...) groups, errs := m.LoadGroups(interval, externalLabels, externalURL, groupEvalIterationFunc, files...)
if errs != nil { if errs != nil {
for _, e := range errs { for _, e := range errs {
@ -1154,7 +1179,7 @@ func (FileLoader) Parse(query string) (parser.Expr, error) { return parser.Parse
// LoadGroups reads groups from a list of files. // LoadGroups reads groups from a list of files.
func (m *Manager) LoadGroups( func (m *Manager) LoadGroups(
interval time.Duration, externalLabels labels.Labels, externalURL string, ruleGroupPostProcessFunc RuleGroupPostProcessFunc, filenames ...string, interval time.Duration, externalLabels labels.Labels, externalURL string, groupEvalIterationFunc GroupEvalIterationFunc, filenames ...string,
) (map[string]*Group, []error) { ) (map[string]*Group, []error) {
groups := make(map[string]*Group) groups := make(map[string]*Group)
@ -1212,7 +1237,7 @@ func (m *Manager) LoadGroups(
Opts: m.opts, Opts: m.opts,
EvaluationDelay: (*time.Duration)(rg.EvaluationDelay), EvaluationDelay: (*time.Duration)(rg.EvaluationDelay),
done: m.done, done: m.done,
RuleGroupPostProcessFunc: ruleGroupPostProcessFunc, EvalIterationFunc: groupEvalIterationFunc,
AlignEvaluationTimeOnInterval: rg.AlignEvaluationTimeOnInterval, AlignEvaluationTimeOnInterval: rg.AlignEvaluationTimeOnInterval,
}) })
} }

View file

@ -82,7 +82,7 @@ func TestAlertingRule(t *testing.T) {
"job", "app-server", "job", "app-server",
"severity", "critical", "severity", "critical",
), ),
Point: promql.Point{V: 1}, F: 1,
}, },
promql.Sample{ promql.Sample{
Metric: labels.FromStrings( Metric: labels.FromStrings(
@ -94,7 +94,7 @@ func TestAlertingRule(t *testing.T) {
"job", "app-server", "job", "app-server",
"severity", "critical", "severity", "critical",
), ),
Point: promql.Point{V: 1}, F: 1,
}, },
promql.Sample{ promql.Sample{
Metric: labels.FromStrings( Metric: labels.FromStrings(
@ -106,7 +106,7 @@ func TestAlertingRule(t *testing.T) {
"job", "app-server", "job", "app-server",
"severity", "critical", "severity", "critical",
), ),
Point: promql.Point{V: 1}, F: 1,
}, },
promql.Sample{ promql.Sample{
Metric: labels.FromStrings( Metric: labels.FromStrings(
@ -118,7 +118,7 @@ func TestAlertingRule(t *testing.T) {
"job", "app-server", "job", "app-server",
"severity", "critical", "severity", "critical",
), ),
Point: promql.Point{V: 1}, F: 1,
}, },
} }
@ -227,7 +227,7 @@ func TestForStateAddSamples(t *testing.T) {
"job", "app-server", "job", "app-server",
"severity", "critical", "severity", "critical",
), ),
Point: promql.Point{V: 1}, F: 1,
}, },
promql.Sample{ promql.Sample{
Metric: labels.FromStrings( Metric: labels.FromStrings(
@ -238,7 +238,7 @@ func TestForStateAddSamples(t *testing.T) {
"job", "app-server", "job", "app-server",
"severity", "critical", "severity", "critical",
), ),
Point: promql.Point{V: 1}, F: 1,
}, },
promql.Sample{ promql.Sample{
Metric: labels.FromStrings( Metric: labels.FromStrings(
@ -249,7 +249,7 @@ func TestForStateAddSamples(t *testing.T) {
"job", "app-server", "job", "app-server",
"severity", "critical", "severity", "critical",
), ),
Point: promql.Point{V: 1}, F: 1,
}, },
promql.Sample{ promql.Sample{
Metric: labels.FromStrings( Metric: labels.FromStrings(
@ -260,7 +260,7 @@ func TestForStateAddSamples(t *testing.T) {
"job", "app-server", "job", "app-server",
"severity", "critical", "severity", "critical",
), ),
Point: promql.Point{V: 1}, F: 1,
}, },
} }
@ -331,8 +331,8 @@ func TestForStateAddSamples(t *testing.T) {
for i := range test.result { for i := range test.result {
test.result[i].T = timestamp.FromTime(evalTime.Add(-evalDelay)) test.result[i].T = timestamp.FromTime(evalTime.Add(-evalDelay))
// Updating the expected 'for' state. // Updating the expected 'for' state.
if test.result[i].V >= 0 { if test.result[i].F >= 0 {
test.result[i].V = forState test.result[i].F = forState
} }
} }
require.Equal(t, len(test.result), len(filteredRes), "%d. Number of samples in expected and actual output don't match (%d vs. %d)", i, len(test.result), len(res)) require.Equal(t, len(test.result), len(filteredRes), "%d. Number of samples in expected and actual output don't match (%d vs. %d)", i, len(test.result), len(res))
@ -598,11 +598,11 @@ func TestStaleness(t *testing.T) {
metricSample, ok := samples[metric] metricSample, ok := samples[metric]
require.True(t, ok, "Series %s not returned.", metric) require.True(t, ok, "Series %s not returned.", metric)
require.True(t, value.IsStaleNaN(metricSample[2].V), "Appended second sample not as expected. Wanted: stale NaN Got: %x", math.Float64bits(metricSample[2].V)) require.True(t, value.IsStaleNaN(metricSample[2].F), "Appended second sample not as expected. Wanted: stale NaN Got: %x", math.Float64bits(metricSample[2].F))
metricSample[2].V = 42 // require.Equal cannot handle NaN. metricSample[2].F = 42 // require.Equal cannot handle NaN.
want := map[string][]promql.Point{ want := map[string][]promql.FPoint{
metric: {{T: 0, V: 2}, {T: 1000, V: 3}, {T: 2000, V: 42}}, metric: {{T: 0, F: 2}, {T: 1000, F: 3}, {T: 2000, F: 42}},
} }
require.Equal(t, want, samples) require.Equal(t, want, samples)
@ -610,18 +610,18 @@ func TestStaleness(t *testing.T) {
} }
// Convert a SeriesSet into a form usable with require.Equal. // Convert a SeriesSet into a form usable with require.Equal.
func readSeriesSet(ss storage.SeriesSet) (map[string][]promql.Point, error) { func readSeriesSet(ss storage.SeriesSet) (map[string][]promql.FPoint, error) {
result := map[string][]promql.Point{} result := map[string][]promql.FPoint{}
var it chunkenc.Iterator var it chunkenc.Iterator
for ss.Next() { for ss.Next() {
series := ss.At() series := ss.At()
points := []promql.Point{} points := []promql.FPoint{}
it := series.Iterator(it) it := series.Iterator(it)
for it.Next() == chunkenc.ValFloat { for it.Next() == chunkenc.ValFloat {
t, v := it.At() t, v := it.At()
points = append(points, promql.Point{T: t, V: v}) points = append(points, promql.FPoint{T: t, F: v})
} }
name := series.Labels().String() name := series.Labels().String()
@ -722,7 +722,7 @@ func TestDeletedRuleMarkedStale(t *testing.T) {
metricSample, ok := samples[metric] metricSample, ok := samples[metric]
require.True(t, ok, "Series %s not returned.", metric) require.True(t, ok, "Series %s not returned.", metric)
require.True(t, value.IsStaleNaN(metricSample[0].V), "Appended sample not as expected. Wanted: stale NaN Got: %x", math.Float64bits(metricSample[0].V)) require.True(t, value.IsStaleNaN(metricSample[0].F), "Appended sample not as expected. Wanted: stale NaN Got: %x", math.Float64bits(metricSample[0].F))
} }
func TestUpdate(t *testing.T) { func TestUpdate(t *testing.T) {
@ -1386,7 +1386,7 @@ func countStaleNaN(t *testing.T, st storage.Storage) int {
require.True(t, ok, "Series %s not returned.", metric) require.True(t, ok, "Series %s not returned.", metric)
for _, s := range metricSample { for _, s := range metricSample {
if value.IsStaleNaN(s.V) { if value.IsStaleNaN(s.F) {
c++ c++
} }
} }
@ -1704,7 +1704,7 @@ groups:
m.Stop() m.Stop()
} }
func TestUpdateMissedEvalMetrics(t *testing.T) { func TestRuleGroupEvalIterationFunc(t *testing.T) {
suite, err := promql.NewTest(t, ` suite, err := promql.NewTest(t, `
load 5m load 5m
http_requests{instance="0"} 75 85 50 0 0 25 0 0 40 0 120 http_requests{instance="0"} 75 85 50 0 0 25 0 0 40 0 120
@ -1721,26 +1721,40 @@ func TestUpdateMissedEvalMetrics(t *testing.T) {
testValue := 1 testValue := 1
overrideFunc := func(g *Group, lastEvalTimestamp time.Time, log log.Logger) error { evalIterationFunc := func(ctx context.Context, g *Group, evalTimestamp time.Time) {
testValue = 2 testValue = 2
return nil DefaultEvalIterationFunc(ctx, g, evalTimestamp)
testValue = 3
}
skipEvalIterationFunc := func(ctx context.Context, g *Group, evalTimestamp time.Time) {
testValue = 4
} }
type testInput struct { type testInput struct {
overrideFunc func(g *Group, lastEvalTimestamp time.Time, logger log.Logger) error evalIterationFunc GroupEvalIterationFunc
expectedValue int expectedValue int
lastEvalTimestampIsZero bool
} }
tests := []testInput{ tests := []testInput{
// testValue should still have value of 1 since overrideFunc is nil. // testValue should still have value of 1 since the default iteration function will be called.
{ {
overrideFunc: nil, evalIterationFunc: nil,
expectedValue: 1, expectedValue: 1,
lastEvalTimestampIsZero: false,
}, },
// testValue should be incremented to 2 since overrideFunc is called. // testValue should be incremented to 3 since evalIterationFunc is called.
{ {
overrideFunc: overrideFunc, evalIterationFunc: evalIterationFunc,
expectedValue: 2, expectedValue: 3,
lastEvalTimestampIsZero: false,
},
// testValue should be incremented to 4 since skipEvalIterationFunc is called.
{
evalIterationFunc: skipEvalIterationFunc,
expectedValue: 4,
lastEvalTimestampIsZero: true,
}, },
} }
@ -1782,12 +1796,12 @@ func TestUpdateMissedEvalMetrics(t *testing.T) {
} }
group := NewGroup(GroupOptions{ group := NewGroup(GroupOptions{
Name: "default", Name: "default",
Interval: time.Second, Interval: time.Second,
Rules: []Rule{rule}, Rules: []Rule{rule},
ShouldRestore: true, ShouldRestore: true,
Opts: opts, Opts: opts,
RuleGroupPostProcessFunc: tst.overrideFunc, EvalIterationFunc: tst.evalIterationFunc,
}) })
go func() { go func() {
@ -1796,10 +1810,18 @@ func TestUpdateMissedEvalMetrics(t *testing.T) {
time.Sleep(3 * time.Second) time.Sleep(3 * time.Second)
group.stop() group.stop()
require.Equal(t, tst.expectedValue, testValue) require.Equal(t, tst.expectedValue, testValue)
if tst.lastEvalTimestampIsZero {
require.Zero(t, group.GetLastEvalTimestamp())
} else {
oneMinute, _ := time.ParseDuration("1m")
require.WithinDuration(t, time.Now(), group.GetLastEvalTimestamp(), oneMinute)
}
} }
for _, tst := range tests { for i, tst := range tests {
t.Logf("case %d", i)
testFunc(tst) testFunc(tst)
} }
} }

View file

@ -93,7 +93,7 @@ func (rule *RecordingRule) Eval(ctx context.Context, evalDelay time.Duration, ts
lb.Set(l.Name, l.Value) lb.Set(l.Name, l.Value)
}) })
sample.Metric = lb.Labels(sample.Metric) sample.Metric = lb.Labels()
} }
// Check that the rule does not produce identical metrics after applying // Check that the rule does not produce identical metrics after applying

View file

@ -46,11 +46,13 @@ var ruleEvalTestScenarios = []struct {
expected: promql.Vector{ expected: promql.Vector{
promql.Sample{ promql.Sample{
Metric: labels.FromStrings("__name__", "test_rule", "label_a", "1", "label_b", "3"), Metric: labels.FromStrings("__name__", "test_rule", "label_a", "1", "label_b", "3"),
Point: promql.Point{V: 1, T: timestamp.FromTime(ruleEvaluationTime)}, F: 1,
T: timestamp.FromTime(ruleEvaluationTime),
}, },
promql.Sample{ promql.Sample{
Metric: labels.FromStrings("__name__", "test_rule", "label_a", "2", "label_b", "4"), Metric: labels.FromStrings("__name__", "test_rule", "label_a", "2", "label_b", "4"),
Point: promql.Point{V: 10, T: timestamp.FromTime(ruleEvaluationTime)}, F: 10,
T: timestamp.FromTime(ruleEvaluationTime),
}, },
}, },
}, },
@ -61,11 +63,13 @@ var ruleEvalTestScenarios = []struct {
expected: promql.Vector{ expected: promql.Vector{
promql.Sample{ promql.Sample{
Metric: labels.FromStrings("__name__", "test_rule", "label_a", "1", "label_b", "3", "extra_from_rule", "foo"), Metric: labels.FromStrings("__name__", "test_rule", "label_a", "1", "label_b", "3", "extra_from_rule", "foo"),
Point: promql.Point{V: 1, T: timestamp.FromTime(ruleEvaluationTime)}, F: 1,
T: timestamp.FromTime(ruleEvaluationTime),
}, },
promql.Sample{ promql.Sample{
Metric: labels.FromStrings("__name__", "test_rule", "label_a", "2", "label_b", "4", "extra_from_rule", "foo"), Metric: labels.FromStrings("__name__", "test_rule", "label_a", "2", "label_b", "4", "extra_from_rule", "foo"),
Point: promql.Point{V: 10, T: timestamp.FromTime(ruleEvaluationTime)}, F: 10,
T: timestamp.FromTime(ruleEvaluationTime),
}, },
}, },
}, },
@ -76,11 +80,13 @@ var ruleEvalTestScenarios = []struct {
expected: promql.Vector{ expected: promql.Vector{
promql.Sample{ promql.Sample{
Metric: labels.FromStrings("__name__", "test_rule", "label_a", "from_rule", "label_b", "3"), Metric: labels.FromStrings("__name__", "test_rule", "label_a", "from_rule", "label_b", "3"),
Point: promql.Point{V: 1, T: timestamp.FromTime(ruleEvaluationTime)}, F: 1,
T: timestamp.FromTime(ruleEvaluationTime),
}, },
promql.Sample{ promql.Sample{
Metric: labels.FromStrings("__name__", "test_rule", "label_a", "from_rule", "label_b", "4"), Metric: labels.FromStrings("__name__", "test_rule", "label_a", "from_rule", "label_b", "4"),
Point: promql.Point{V: 10, T: timestamp.FromTime(ruleEvaluationTime)}, F: 10,
T: timestamp.FromTime(ruleEvaluationTime),
}, },
}, },
}, },
@ -91,11 +97,13 @@ var ruleEvalTestScenarios = []struct {
expected: promql.Vector{ expected: promql.Vector{
promql.Sample{ promql.Sample{
Metric: labels.FromStrings("__name__", "test_rule", "label_a", "1", "label_b", "3"), Metric: labels.FromStrings("__name__", "test_rule", "label_a", "1", "label_b", "3"),
Point: promql.Point{V: 2, T: timestamp.FromTime(ruleEvaluationTime)}, F: 2,
T: timestamp.FromTime(ruleEvaluationTime),
}, },
promql.Sample{ promql.Sample{
Metric: labels.FromStrings("__name__", "test_rule", "label_a", "2", "label_b", "4"), Metric: labels.FromStrings("__name__", "test_rule", "label_a", "2", "label_b", "4"),
Point: promql.Point{V: 20, T: timestamp.FromTime(ruleEvaluationTime)}, F: 20,
T: timestamp.FromTime(ruleEvaluationTime),
}, },
}, },
}, },

View file

@ -692,7 +692,7 @@ func mutateSampleLabels(lset labels.Labels, target *Target, honor bool, rc []*re
} }
} }
res := lb.Labels(labels.EmptyLabels()) res := lb.Labels()
if len(rc) > 0 { if len(rc) > 0 {
res, _ = relabel.Process(res, rc...) res, _ = relabel.Process(res, rc...)
@ -726,7 +726,7 @@ func mutateReportSampleLabels(lset labels.Labels, target *Target) labels.Labels
lb.Set(l.Name, l.Value) lb.Set(l.Name, l.Value)
}) })
return lb.Labels(labels.EmptyLabels()) return lb.Labels()
} }
// appender returns an appender for ingested samples from the target. // appender returns an appender for ingested samples from the target.

View file

@ -380,7 +380,7 @@ func PopulateLabels(lb *labels.Builder, cfg *config.ScrapeConfig, noDefaultPort
} }
} }
preRelabelLabels := lb.Labels(labels.EmptyLabels()) preRelabelLabels := lb.Labels()
keep := relabel.ProcessBuilder(lb, cfg.RelabelConfigs...) keep := relabel.ProcessBuilder(lb, cfg.RelabelConfigs...)
// Check if the target was dropped. // Check if the target was dropped.
@ -476,7 +476,7 @@ func PopulateLabels(lb *labels.Builder, cfg *config.ScrapeConfig, noDefaultPort
lb.Set(model.InstanceLabel, addr) lb.Set(model.InstanceLabel, addr)
} }
res = lb.Labels(labels.EmptyLabels()) res = lb.Labels()
err = res.Validate(func(l labels.Label) error { err = res.Validate(func(l labels.Label) error {
// Check label values are valid, drop the target if not. // Check label values are valid, drop the target if not.
if !model.LabelValue(l.Value).IsValid() { if !model.LabelValue(l.Value).IsValid() {

View file

@ -140,7 +140,7 @@ func newTestTarget(targetURL string, deadline time.Duration, lbls labels.Labels)
lb.Set(model.AddressLabel, strings.TrimPrefix(targetURL, "http://")) lb.Set(model.AddressLabel, strings.TrimPrefix(targetURL, "http://"))
lb.Set(model.MetricsPathLabel, "/metrics") lb.Set(model.MetricsPathLabel, "/metrics")
return &Target{labels: lb.Labels(labels.EmptyLabels())} return &Target{labels: lb.Labels()}
} }
func TestNewHTTPBearerToken(t *testing.T) { func TestNewHTTPBearerToken(t *testing.T) {

View file

@ -19,6 +19,7 @@ import (
"github.com/prometheus/prometheus/model/histogram" "github.com/prometheus/prometheus/model/histogram"
"github.com/prometheus/prometheus/tsdb/chunkenc" "github.com/prometheus/prometheus/tsdb/chunkenc"
"github.com/prometheus/prometheus/tsdb/tsdbutil"
) )
// BufferedSeriesIterator wraps an iterator with a look-back buffer. // BufferedSeriesIterator wraps an iterator with a look-back buffer.
@ -43,7 +44,7 @@ func NewBuffer(delta int64) *BufferedSeriesIterator {
func NewBufferIterator(it chunkenc.Iterator, delta int64) *BufferedSeriesIterator { func NewBufferIterator(it chunkenc.Iterator, delta int64) *BufferedSeriesIterator {
// TODO(codesome): based on encoding, allocate different buffer. // TODO(codesome): based on encoding, allocate different buffer.
bit := &BufferedSeriesIterator{ bit := &BufferedSeriesIterator{
buf: newSampleRing(delta, 16), buf: newSampleRing(delta, 0, chunkenc.ValNone),
delta: delta, delta: delta,
} }
bit.Reset(it) bit.Reset(it)
@ -68,11 +69,8 @@ func (b *BufferedSeriesIterator) ReduceDelta(delta int64) bool {
// PeekBack returns the nth previous element of the iterator. If there is none buffered, // PeekBack returns the nth previous element of the iterator. If there is none buffered,
// ok is false. // ok is false.
func (b *BufferedSeriesIterator) PeekBack(n int) ( func (b *BufferedSeriesIterator) PeekBack(n int) (sample tsdbutil.Sample, ok bool) {
t int64, v float64, h *histogram.Histogram, fh *histogram.FloatHistogram, ok bool, return b.buf.nthLast(n)
) {
s, ok := b.buf.nthLast(n)
return s.t, s.v, s.h, s.fh, ok
} }
// Buffer returns an iterator over the buffered data. Invalidates previously // Buffer returns an iterator over the buffered data. Invalidates previously
@ -122,14 +120,14 @@ func (b *BufferedSeriesIterator) Next() chunkenc.ValueType {
case chunkenc.ValNone: case chunkenc.ValNone:
return chunkenc.ValNone return chunkenc.ValNone
case chunkenc.ValFloat: case chunkenc.ValFloat:
t, v := b.it.At() t, f := b.it.At()
b.buf.add(sample{t: t, v: v}) b.buf.addF(fSample{t: t, f: f})
case chunkenc.ValHistogram: case chunkenc.ValHistogram:
t, h := b.it.AtHistogram() t, h := b.it.AtHistogram()
b.buf.add(sample{t: t, h: h}) b.buf.addH(hSample{t: t, h: h})
case chunkenc.ValFloatHistogram: case chunkenc.ValFloatHistogram:
t, fh := b.it.AtFloatHistogram() t, fh := b.it.AtFloatHistogram()
b.buf.add(sample{t: t, fh: fh}) b.buf.addFH(fhSample{t: t, fh: fh})
default: default:
panic(fmt.Errorf("BufferedSeriesIterator: unknown value type %v", b.valueType)) panic(fmt.Errorf("BufferedSeriesIterator: unknown value type %v", b.valueType))
} }
@ -166,56 +164,122 @@ func (b *BufferedSeriesIterator) Err() error {
return b.it.Err() return b.it.Err()
} }
// TODO(beorn7): Consider having different sample types for different value types. type fSample struct {
type sample struct { t int64
t int64 f float64
v float64
h *histogram.Histogram
fh *histogram.FloatHistogram
} }
func (s sample) T() int64 { func (s fSample) T() int64 {
return s.t return s.t
} }
func (s sample) V() float64 { func (s fSample) F() float64 {
return s.v return s.f
} }
func (s sample) H() *histogram.Histogram { func (s fSample) H() *histogram.Histogram {
panic("H() called for fSample")
}
func (s fSample) FH() *histogram.FloatHistogram {
panic("FH() called for fSample")
}
func (s fSample) Type() chunkenc.ValueType {
return chunkenc.ValFloat
}
type hSample struct {
t int64
h *histogram.Histogram
}
func (s hSample) T() int64 {
return s.t
}
func (s hSample) F() float64 {
panic("F() called for hSample")
}
func (s hSample) H() *histogram.Histogram {
return s.h return s.h
} }
func (s sample) FH() *histogram.FloatHistogram { func (s hSample) FH() *histogram.FloatHistogram {
return s.h.ToFloat()
}
func (s hSample) Type() chunkenc.ValueType {
return chunkenc.ValHistogram
}
type fhSample struct {
t int64
fh *histogram.FloatHistogram
}
func (s fhSample) T() int64 {
return s.t
}
func (s fhSample) F() float64 {
panic("F() called for fhSample")
}
func (s fhSample) H() *histogram.Histogram {
panic("H() called for fhSample")
}
func (s fhSample) FH() *histogram.FloatHistogram {
return s.fh return s.fh
} }
func (s sample) Type() chunkenc.ValueType { func (s fhSample) Type() chunkenc.ValueType {
switch { return chunkenc.ValFloatHistogram
case s.h != nil:
return chunkenc.ValHistogram
case s.fh != nil:
return chunkenc.ValFloatHistogram
default:
return chunkenc.ValFloat
}
} }
type sampleRing struct { type sampleRing struct {
delta int64 delta int64
buf []sample // lookback buffer // Lookback buffers. We use buf for mixed samples, but one of the three
i int // position of most recent element in ring buffer // concrete ones for homogenous samples. (Only one of the four bufs is
f int // position of first element in ring buffer // allowed to be populated!) This avoids the overhead of the interface
l int // number of elements in buffer // wrapper for the happy (and by far most common) case of homogenous
// samples.
buf []tsdbutil.Sample
fBuf []fSample
hBuf []hSample
fhBuf []fhSample
i int // Position of most recent element in ring buffer.
f int // Position of first element in ring buffer.
l int // Number of elements in buffer.
it sampleRingIterator it sampleRingIterator
} }
func newSampleRing(delta int64, sz int) *sampleRing { // newSampleRing creates a new sampleRing. If you do not know the prefereed
r := &sampleRing{delta: delta, buf: make([]sample, sz)} // value type yet, use a size of 0 (in which case the provided typ doesn't
// matter). On the first add, a buffer of size 16 will be allocated with the
// preferred type being the type of the first added sample.
func newSampleRing(delta int64, size int, typ chunkenc.ValueType) *sampleRing {
r := &sampleRing{delta: delta}
r.reset() r.reset()
if size <= 0 {
// Will initialize on first add.
return r
}
switch typ {
case chunkenc.ValFloat:
r.fBuf = make([]fSample, size)
case chunkenc.ValHistogram:
r.hBuf = make([]hSample, size)
case chunkenc.ValFloatHistogram:
r.fhBuf = make([]fhSample, size)
default:
r.buf = make([]tsdbutil.Sample, size)
}
return r return r
} }
@ -236,7 +300,7 @@ type sampleRingIterator struct {
r *sampleRing r *sampleRing
i int i int
t int64 t int64
v float64 f float64
h *histogram.Histogram h *histogram.Histogram
fh *histogram.FloatHistogram fh *histogram.FloatHistogram
} }
@ -246,17 +310,34 @@ func (it *sampleRingIterator) Next() chunkenc.ValueType {
if it.i >= it.r.l { if it.i >= it.r.l {
return chunkenc.ValNone return chunkenc.ValNone
} }
s := it.r.at(it.i)
it.t = s.t
switch { switch {
case s.h != nil: case len(it.r.fBuf) > 0:
s := it.r.atF(it.i)
it.t = s.t
it.f = s.f
return chunkenc.ValFloat
case len(it.r.hBuf) > 0:
s := it.r.atH(it.i)
it.t = s.t
it.h = s.h it.h = s.h
return chunkenc.ValHistogram return chunkenc.ValHistogram
case s.fh != nil: case len(it.r.fhBuf) > 0:
s := it.r.atFH(it.i)
it.t = s.t
it.fh = s.fh it.fh = s.fh
return chunkenc.ValFloatHistogram return chunkenc.ValFloatHistogram
}
s := it.r.at(it.i)
it.t = s.T()
switch s.Type() {
case chunkenc.ValHistogram:
it.h = s.H()
return chunkenc.ValHistogram
case chunkenc.ValFloatHistogram:
it.fh = s.FH()
return chunkenc.ValFloatHistogram
default: default:
it.v = s.v it.f = s.F()
return chunkenc.ValFloat return chunkenc.ValFloat
} }
} }
@ -270,7 +351,7 @@ func (it *sampleRingIterator) Err() error {
} }
func (it *sampleRingIterator) At() (int64, float64) { func (it *sampleRingIterator) At() (int64, float64) {
return it.t, it.v return it.t, it.f
} }
func (it *sampleRingIterator) AtHistogram() (int64, *histogram.Histogram) { func (it *sampleRingIterator) AtHistogram() (int64, *histogram.Histogram) {
@ -288,22 +369,182 @@ func (it *sampleRingIterator) AtT() int64 {
return it.t return it.t
} }
func (r *sampleRing) at(i int) sample { func (r *sampleRing) at(i int) tsdbutil.Sample {
j := (r.f + i) % len(r.buf) j := (r.f + i) % len(r.buf)
return r.buf[j] return r.buf[j]
} }
// add adds a sample to the ring buffer and frees all samples that fall func (r *sampleRing) atF(i int) fSample {
// out of the delta range. j := (r.f + i) % len(r.fBuf)
func (r *sampleRing) add(s sample) { return r.fBuf[j]
l := len(r.buf) }
// Grow the ring buffer if it fits no more elements.
if l == r.l {
buf := make([]sample, 2*l)
copy(buf[l+r.f:], r.buf[r.f:])
copy(buf, r.buf[:r.f])
r.buf = buf func (r *sampleRing) atH(i int) hSample {
j := (r.f + i) % len(r.hBuf)
return r.hBuf[j]
}
func (r *sampleRing) atFH(i int) fhSample {
j := (r.f + i) % len(r.fhBuf)
return r.fhBuf[j]
}
// add adds a sample to the ring buffer and frees all samples that fall out of
// the delta range. Note that this method works for any sample
// implementation. If you know you are dealing with one of the implementations
// from this package (fSample, hSample, fhSample), call one of the specialized
// methods addF, addH, or addFH for better performance.
func (r *sampleRing) add(s tsdbutil.Sample) {
if len(r.buf) == 0 {
// Nothing added to the interface buf yet. Let's check if we can
// stay specialized.
switch s := s.(type) {
case fSample:
if len(r.hBuf)+len(r.fhBuf) == 0 {
r.fBuf = addF(s, r.fBuf, r)
return
}
case hSample:
if len(r.fBuf)+len(r.fhBuf) == 0 {
r.hBuf = addH(s, r.hBuf, r)
return
}
case fhSample:
if len(r.fBuf)+len(r.hBuf) == 0 {
r.fhBuf = addFH(s, r.fhBuf, r)
return
}
}
// The new sample isn't a fit for the already existing
// ones. Copy the latter into the interface buffer where needed.
switch {
case len(r.fBuf) > 0:
for _, s := range r.fBuf {
r.buf = append(r.buf, s)
}
r.fBuf = nil
case len(r.hBuf) > 0:
for _, s := range r.hBuf {
r.buf = append(r.buf, s)
}
r.hBuf = nil
case len(r.fhBuf) > 0:
for _, s := range r.fhBuf {
r.buf = append(r.buf, s)
}
r.fhBuf = nil
}
}
r.buf = addSample(s, r.buf, r)
}
// addF is a version of the add method specialized for fSample.
func (r *sampleRing) addF(s fSample) {
switch {
case len(r.buf) > 0:
// Already have interface samples. Add to the interface buf.
r.buf = addSample(s, r.buf, r)
case len(r.hBuf)+len(r.fhBuf) > 0:
// Already have specialized samples that are not fSamples.
// Need to call the checked add method for conversion.
r.add(s)
default:
r.fBuf = addF(s, r.fBuf, r)
}
}
// addH is a version of the add method specialized for hSample.
func (r *sampleRing) addH(s hSample) {
switch {
case len(r.buf) > 0:
// Already have interface samples. Add to the interface buf.
r.buf = addSample(s, r.buf, r)
case len(r.fBuf)+len(r.fhBuf) > 0:
// Already have samples that are not hSamples.
// Need to call the checked add method for conversion.
r.add(s)
default:
r.hBuf = addH(s, r.hBuf, r)
}
}
// addFH is a version of the add method specialized for fhSample.
func (r *sampleRing) addFH(s fhSample) {
switch {
case len(r.buf) > 0:
// Already have interface samples. Add to the interface buf.
r.buf = addSample(s, r.buf, r)
case len(r.fBuf)+len(r.hBuf) > 0:
// Already have samples that are not fhSamples.
// Need to call the checked add method for conversion.
r.add(s)
default:
r.fhBuf = addFH(s, r.fhBuf, r)
}
}
// genericAdd is a generic implementation of adding a tsdbutil.Sample
// implementation to a buffer of a sample ring. However, the Go compiler
// currently (go1.20) decides to not expand the code during compile time, but
// creates dynamic code to handle the different types. That has a significant
// overhead during runtime, noticeable in PromQL benchmarks. For example, the
// "RangeQuery/expr=rate(a_hundred[1d]),steps=.*" benchmarks show about 7%
// longer runtime, 9% higher allocation size, and 10% more allocations.
// Therefore, genericAdd has been manually implemented for all the types
// (addSample, addF, addH, addFH) below.
//
// func genericAdd[T tsdbutil.Sample](s T, buf []T, r *sampleRing) []T {
// l := len(buf)
// // Grow the ring buffer if it fits no more elements.
// if l == 0 {
// buf = make([]T, 16)
// l = 16
// }
// if l == r.l {
// newBuf := make([]T, 2*l)
// copy(newBuf[l+r.f:], buf[r.f:])
// copy(newBuf, buf[:r.f])
//
// buf = newBuf
// r.i = r.f
// r.f += l
// l = 2 * l
// } else {
// r.i++
// if r.i >= l {
// r.i -= l
// }
// }
//
// buf[r.i] = s
// r.l++
//
// // Free head of the buffer of samples that just fell out of the range.
// tmin := s.T() - r.delta
// for buf[r.f].T() < tmin {
// r.f++
// if r.f >= l {
// r.f -= l
// }
// r.l--
// }
// return buf
// }
// addSample is a handcoded specialization of genericAdd (see above).
func addSample(s tsdbutil.Sample, buf []tsdbutil.Sample, r *sampleRing) []tsdbutil.Sample {
l := len(buf)
// Grow the ring buffer if it fits no more elements.
if l == 0 {
buf = make([]tsdbutil.Sample, 16)
l = 16
}
if l == r.l {
newBuf := make([]tsdbutil.Sample, 2*l)
copy(newBuf[l+r.f:], buf[r.f:])
copy(newBuf, buf[:r.f])
buf = newBuf
r.i = r.f r.i = r.f
r.f += l r.f += l
l = 2 * l l = 2 * l
@ -314,18 +555,136 @@ func (r *sampleRing) add(s sample) {
} }
} }
r.buf[r.i] = s buf[r.i] = s
r.l++ r.l++
// Free head of the buffer of samples that just fell out of the range. // Free head of the buffer of samples that just fell out of the range.
tmin := s.t - r.delta tmin := s.T() - r.delta
for r.buf[r.f].t < tmin { for buf[r.f].T() < tmin {
r.f++ r.f++
if r.f >= l { if r.f >= l {
r.f -= l r.f -= l
} }
r.l-- r.l--
} }
return buf
}
// addF is a handcoded specialization of genericAdd (see above).
func addF(s fSample, buf []fSample, r *sampleRing) []fSample {
l := len(buf)
// Grow the ring buffer if it fits no more elements.
if l == 0 {
buf = make([]fSample, 16)
l = 16
}
if l == r.l {
newBuf := make([]fSample, 2*l)
copy(newBuf[l+r.f:], buf[r.f:])
copy(newBuf, buf[:r.f])
buf = newBuf
r.i = r.f
r.f += l
l = 2 * l
} else {
r.i++
if r.i >= l {
r.i -= l
}
}
buf[r.i] = s
r.l++
// Free head of the buffer of samples that just fell out of the range.
tmin := s.T() - r.delta
for buf[r.f].T() < tmin {
r.f++
if r.f >= l {
r.f -= l
}
r.l--
}
return buf
}
// addH is a handcoded specialization of genericAdd (see above).
func addH(s hSample, buf []hSample, r *sampleRing) []hSample {
l := len(buf)
// Grow the ring buffer if it fits no more elements.
if l == 0 {
buf = make([]hSample, 16)
l = 16
}
if l == r.l {
newBuf := make([]hSample, 2*l)
copy(newBuf[l+r.f:], buf[r.f:])
copy(newBuf, buf[:r.f])
buf = newBuf
r.i = r.f
r.f += l
l = 2 * l
} else {
r.i++
if r.i >= l {
r.i -= l
}
}
buf[r.i] = s
r.l++
// Free head of the buffer of samples that just fell out of the range.
tmin := s.T() - r.delta
for buf[r.f].T() < tmin {
r.f++
if r.f >= l {
r.f -= l
}
r.l--
}
return buf
}
// addFH is a handcoded specialization of genericAdd (see above).
func addFH(s fhSample, buf []fhSample, r *sampleRing) []fhSample {
l := len(buf)
// Grow the ring buffer if it fits no more elements.
if l == 0 {
buf = make([]fhSample, 16)
l = 16
}
if l == r.l {
newBuf := make([]fhSample, 2*l)
copy(newBuf[l+r.f:], buf[r.f:])
copy(newBuf, buf[:r.f])
buf = newBuf
r.i = r.f
r.f += l
l = 2 * l
} else {
r.i++
if r.i >= l {
r.i -= l
}
}
buf[r.i] = s
r.l++
// Free head of the buffer of samples that just fell out of the range.
tmin := s.T() - r.delta
for buf[r.f].T() < tmin {
r.f++
if r.f >= l {
r.f -= l
}
r.l--
}
return buf
} }
// reduceDelta lowers the buffered time delta, dropping any samples that are // reduceDelta lowers the buffered time delta, dropping any samples that are
@ -340,39 +699,98 @@ func (r *sampleRing) reduceDelta(delta int64) bool {
return true return true
} }
switch {
case len(r.fBuf) > 0:
genericReduceDelta(r.fBuf, r)
case len(r.hBuf) > 0:
genericReduceDelta(r.hBuf, r)
case len(r.fhBuf) > 0:
genericReduceDelta(r.fhBuf, r)
default:
genericReduceDelta(r.buf, r)
}
return true
}
func genericReduceDelta[T tsdbutil.Sample](buf []T, r *sampleRing) {
// Free head of the buffer of samples that just fell out of the range. // Free head of the buffer of samples that just fell out of the range.
l := len(r.buf) l := len(buf)
tmin := r.buf[r.i].t - delta tmin := buf[r.i].T() - r.delta
for r.buf[r.f].t < tmin { for buf[r.f].T() < tmin {
r.f++ r.f++
if r.f >= l { if r.f >= l {
r.f -= l r.f -= l
} }
r.l-- r.l--
} }
return true
} }
// nthLast returns the nth most recent element added to the ring. // nthLast returns the nth most recent element added to the ring.
func (r *sampleRing) nthLast(n int) (sample, bool) { func (r *sampleRing) nthLast(n int) (tsdbutil.Sample, bool) {
if n > r.l { if n > r.l {
return sample{}, false return fSample{}, false
}
i := r.l - n
switch {
case len(r.fBuf) > 0:
return r.atF(i), true
case len(r.hBuf) > 0:
return r.atH(i), true
case len(r.fhBuf) > 0:
return r.atFH(i), true
default:
return r.at(i), true
} }
return r.at(r.l - n), true
} }
func (r *sampleRing) samples() []sample { func (r *sampleRing) samples() []tsdbutil.Sample {
res := make([]sample, r.l) res := make([]tsdbutil.Sample, r.l)
k := r.f + r.l k := r.f + r.l
var j int var j int
if k > len(r.buf) {
k = len(r.buf)
j = r.l - k + r.f
}
n := copy(res, r.buf[r.f:k]) switch {
copy(res[n:], r.buf[:j]) case len(r.buf) > 0:
if k > len(r.buf) {
k = len(r.buf)
j = r.l - k + r.f
}
n := copy(res, r.buf[r.f:k])
copy(res[n:], r.buf[:j])
case len(r.fBuf) > 0:
if k > len(r.fBuf) {
k = len(r.fBuf)
j = r.l - k + r.f
}
resF := make([]fSample, r.l)
n := copy(resF, r.fBuf[r.f:k])
copy(resF[n:], r.fBuf[:j])
for i, s := range resF {
res[i] = s
}
case len(r.hBuf) > 0:
if k > len(r.hBuf) {
k = len(r.hBuf)
j = r.l - k + r.f
}
resH := make([]hSample, r.l)
n := copy(resH, r.hBuf[r.f:k])
copy(resH[n:], r.hBuf[:j])
for i, s := range resH {
res[i] = s
}
case len(r.fhBuf) > 0:
if k > len(r.fhBuf) {
k = len(r.fhBuf)
j = r.l - k + r.f
}
resFH := make([]fhSample, r.l)
n := copy(resFH, r.fhBuf[r.f:k])
copy(resFH[n:], r.fhBuf[:j])
for i, s := range resFH {
res[i] = s
}
}
return res return res
} }

View file

@ -56,13 +56,13 @@ func TestSampleRing(t *testing.T) {
}, },
} }
for _, c := range cases { for _, c := range cases {
r := newSampleRing(c.delta, c.size) r := newSampleRing(c.delta, c.size, chunkenc.ValFloat)
input := []sample{} input := []fSample{}
for _, t := range c.input { for _, t := range c.input {
input = append(input, sample{ input = append(input, fSample{
t: t, t: t,
v: float64(rand.Intn(100)), f: float64(rand.Intn(100)),
}) })
} }
@ -73,7 +73,7 @@ func TestSampleRing(t *testing.T) {
for _, sold := range input[:i] { for _, sold := range input[:i] {
found := false found := false
for _, bs := range buffered { for _, bs := range buffered {
if bs.t == sold.t && bs.v == sold.v { if bs.T() == sold.t && bs.F() == sold.f {
found = true found = true
break break
} }
@ -92,12 +92,12 @@ func TestSampleRing(t *testing.T) {
func TestBufferedSeriesIterator(t *testing.T) { func TestBufferedSeriesIterator(t *testing.T) {
var it *BufferedSeriesIterator var it *BufferedSeriesIterator
bufferEq := func(exp []sample) { bufferEq := func(exp []fSample) {
var b []sample var b []fSample
bit := it.Buffer() bit := it.Buffer()
for bit.Next() == chunkenc.ValFloat { for bit.Next() == chunkenc.ValFloat {
t, v := bit.At() t, f := bit.At()
b = append(b, sample{t: t, v: v}) b = append(b, fSample{t: t, f: f})
} }
require.Equal(t, exp, b, "buffer mismatch") require.Equal(t, exp, b, "buffer mismatch")
} }
@ -107,21 +107,21 @@ func TestBufferedSeriesIterator(t *testing.T) {
require.Equal(t, ev, v, "value mismatch") require.Equal(t, ev, v, "value mismatch")
} }
prevSampleEq := func(ets int64, ev float64, eok bool) { prevSampleEq := func(ets int64, ev float64, eok bool) {
ts, v, _, _, ok := it.PeekBack(1) s, ok := it.PeekBack(1)
require.Equal(t, eok, ok, "exist mismatch") require.Equal(t, eok, ok, "exist mismatch")
require.Equal(t, ets, ts, "timestamp mismatch") require.Equal(t, ets, s.T(), "timestamp mismatch")
require.Equal(t, ev, v, "value mismatch") require.Equal(t, ev, s.F(), "value mismatch")
} }
it = NewBufferIterator(NewListSeriesIterator(samples{ it = NewBufferIterator(NewListSeriesIterator(samples{
sample{t: 1, v: 2}, fSample{t: 1, f: 2},
sample{t: 2, v: 3}, fSample{t: 2, f: 3},
sample{t: 3, v: 4}, fSample{t: 3, f: 4},
sample{t: 4, v: 5}, fSample{t: 4, f: 5},
sample{t: 5, v: 6}, fSample{t: 5, f: 6},
sample{t: 99, v: 8}, fSample{t: 99, f: 8},
sample{t: 100, v: 9}, fSample{t: 100, f: 9},
sample{t: 101, v: 10}, fSample{t: 101, f: 10},
}), 2) }), 2)
require.Equal(t, chunkenc.ValFloat, it.Seek(-123), "seek failed") require.Equal(t, chunkenc.ValFloat, it.Seek(-123), "seek failed")
@ -132,24 +132,24 @@ func TestBufferedSeriesIterator(t *testing.T) {
require.Equal(t, chunkenc.ValFloat, it.Next(), "next failed") require.Equal(t, chunkenc.ValFloat, it.Next(), "next failed")
sampleEq(2, 3) sampleEq(2, 3)
prevSampleEq(1, 2, true) prevSampleEq(1, 2, true)
bufferEq([]sample{{t: 1, v: 2}}) bufferEq([]fSample{{t: 1, f: 2}})
require.Equal(t, chunkenc.ValFloat, it.Next(), "next failed") require.Equal(t, chunkenc.ValFloat, it.Next(), "next failed")
require.Equal(t, chunkenc.ValFloat, it.Next(), "next failed") require.Equal(t, chunkenc.ValFloat, it.Next(), "next failed")
require.Equal(t, chunkenc.ValFloat, it.Next(), "next failed") require.Equal(t, chunkenc.ValFloat, it.Next(), "next failed")
sampleEq(5, 6) sampleEq(5, 6)
prevSampleEq(4, 5, true) prevSampleEq(4, 5, true)
bufferEq([]sample{{t: 2, v: 3}, {t: 3, v: 4}, {t: 4, v: 5}}) bufferEq([]fSample{{t: 2, f: 3}, {t: 3, f: 4}, {t: 4, f: 5}})
require.Equal(t, chunkenc.ValFloat, it.Seek(5), "seek failed") require.Equal(t, chunkenc.ValFloat, it.Seek(5), "seek failed")
sampleEq(5, 6) sampleEq(5, 6)
prevSampleEq(4, 5, true) prevSampleEq(4, 5, true)
bufferEq([]sample{{t: 2, v: 3}, {t: 3, v: 4}, {t: 4, v: 5}}) bufferEq([]fSample{{t: 2, f: 3}, {t: 3, f: 4}, {t: 4, f: 5}})
require.Equal(t, chunkenc.ValFloat, it.Seek(101), "seek failed") require.Equal(t, chunkenc.ValFloat, it.Seek(101), "seek failed")
sampleEq(101, 10) sampleEq(101, 10)
prevSampleEq(100, 9, true) prevSampleEq(100, 9, true)
bufferEq([]sample{{t: 99, v: 8}, {t: 100, v: 9}}) bufferEq([]fSample{{t: 99, f: 8}, {t: 100, f: 9}})
require.Equal(t, chunkenc.ValNone, it.Next(), "next succeeded unexpectedly") require.Equal(t, chunkenc.ValNone, it.Next(), "next succeeded unexpectedly")
require.Equal(t, chunkenc.ValNone, it.Seek(1024), "seek succeeded unexpectedly") require.Equal(t, chunkenc.ValNone, it.Seek(1024), "seek succeeded unexpectedly")

View file

@ -38,14 +38,14 @@ func TestMemoizedSeriesIterator(t *testing.T) {
} }
it = NewMemoizedIterator(NewListSeriesIterator(samples{ it = NewMemoizedIterator(NewListSeriesIterator(samples{
sample{t: 1, v: 2}, fSample{t: 1, f: 2},
sample{t: 2, v: 3}, fSample{t: 2, f: 3},
sample{t: 3, v: 4}, fSample{t: 3, f: 4},
sample{t: 4, v: 5}, fSample{t: 4, f: 5},
sample{t: 5, v: 6}, fSample{t: 5, f: 6},
sample{t: 99, v: 8}, fSample{t: 99, f: 8},
sample{t: 100, v: 9}, fSample{t: 100, f: 9},
sample{t: 101, v: 10}, fSample{t: 101, f: 10},
}), 2) }), 2)
require.Equal(t, it.Seek(-123), chunkenc.ValFloat, "seek failed") require.Equal(t, it.Seek(-123), chunkenc.ValFloat, "seek failed")

View file

@ -62,116 +62,116 @@ func TestMergeQuerierWithChainMerger(t *testing.T) {
{ {
name: "one querier, two series", name: "one querier, two series",
querierSeries: [][]Series{{ querierSeries: [][]Series{{
NewListSeries(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{1, 1, nil, nil}, sample{2, 2, nil, nil}, sample{3, 3, nil, nil}}), NewListSeries(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{fSample{1, 1}, fSample{2, 2}, fSample{3, 3}}),
NewListSeries(labels.FromStrings("foo", "bar"), []tsdbutil.Sample{sample{0, 0, nil, nil}, sample{1, 1, nil, nil}, sample{2, 2, nil, nil}}), NewListSeries(labels.FromStrings("foo", "bar"), []tsdbutil.Sample{fSample{0, 0}, fSample{1, 1}, fSample{2, 2}}),
}}, }},
expected: NewMockSeriesSet( expected: NewMockSeriesSet(
NewListSeries(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{1, 1, nil, nil}, sample{2, 2, nil, nil}, sample{3, 3, nil, nil}}), NewListSeries(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{fSample{1, 1}, fSample{2, 2}, fSample{3, 3}}),
NewListSeries(labels.FromStrings("foo", "bar"), []tsdbutil.Sample{sample{0, 0, nil, nil}, sample{1, 1, nil, nil}, sample{2, 2, nil, nil}}), NewListSeries(labels.FromStrings("foo", "bar"), []tsdbutil.Sample{fSample{0, 0}, fSample{1, 1}, fSample{2, 2}}),
), ),
}, },
{ {
name: "two queriers, one different series each", name: "two queriers, one different series each",
querierSeries: [][]Series{{ querierSeries: [][]Series{{
NewListSeries(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{1, 1, nil, nil}, sample{2, 2, nil, nil}, sample{3, 3, nil, nil}}), NewListSeries(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{fSample{1, 1}, fSample{2, 2}, fSample{3, 3}}),
}, { }, {
NewListSeries(labels.FromStrings("foo", "bar"), []tsdbutil.Sample{sample{0, 0, nil, nil}, sample{1, 1, nil, nil}, sample{2, 2, nil, nil}}), NewListSeries(labels.FromStrings("foo", "bar"), []tsdbutil.Sample{fSample{0, 0}, fSample{1, 1}, fSample{2, 2}}),
}}, }},
expected: NewMockSeriesSet( expected: NewMockSeriesSet(
NewListSeries(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{1, 1, nil, nil}, sample{2, 2, nil, nil}, sample{3, 3, nil, nil}}), NewListSeries(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{fSample{1, 1}, fSample{2, 2}, fSample{3, 3}}),
NewListSeries(labels.FromStrings("foo", "bar"), []tsdbutil.Sample{sample{0, 0, nil, nil}, sample{1, 1, nil, nil}, sample{2, 2, nil, nil}}), NewListSeries(labels.FromStrings("foo", "bar"), []tsdbutil.Sample{fSample{0, 0}, fSample{1, 1}, fSample{2, 2}}),
), ),
}, },
{ {
name: "two time unsorted queriers, two series each", name: "two time unsorted queriers, two series each",
querierSeries: [][]Series{{ querierSeries: [][]Series{{
NewListSeries(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{5, 5, nil, nil}, sample{6, 6, nil, nil}}), NewListSeries(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{fSample{5, 5}, fSample{6, 6}}),
NewListSeries(labels.FromStrings("foo", "bar"), []tsdbutil.Sample{sample{0, 0, nil, nil}, sample{1, 1, nil, nil}, sample{2, 2, nil, nil}}), NewListSeries(labels.FromStrings("foo", "bar"), []tsdbutil.Sample{fSample{0, 0}, fSample{1, 1}, fSample{2, 2}}),
}, { }, {
NewListSeries(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{1, 1, nil, nil}, sample{2, 2, nil, nil}, sample{3, 3, nil, nil}}), NewListSeries(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{fSample{1, 1}, fSample{2, 2}, fSample{3, 3}}),
NewListSeries(labels.FromStrings("foo", "bar"), []tsdbutil.Sample{sample{3, 3, nil, nil}, sample{4, 4, nil, nil}}), NewListSeries(labels.FromStrings("foo", "bar"), []tsdbutil.Sample{fSample{3, 3}, fSample{4, 4}}),
}}, }},
expected: NewMockSeriesSet( expected: NewMockSeriesSet(
NewListSeries( NewListSeries(
labels.FromStrings("bar", "baz"), labels.FromStrings("bar", "baz"),
[]tsdbutil.Sample{sample{1, 1, nil, nil}, sample{2, 2, nil, nil}, sample{3, 3, nil, nil}, sample{5, 5, nil, nil}, sample{6, 6, nil, nil}}, []tsdbutil.Sample{fSample{1, 1}, fSample{2, 2}, fSample{3, 3}, fSample{5, 5}, fSample{6, 6}},
), ),
NewListSeries( NewListSeries(
labels.FromStrings("foo", "bar"), labels.FromStrings("foo", "bar"),
[]tsdbutil.Sample{sample{0, 0, nil, nil}, sample{1, 1, nil, nil}, sample{2, 2, nil, nil}, sample{3, 3, nil, nil}, sample{4, 4, nil, nil}}, []tsdbutil.Sample{fSample{0, 0}, fSample{1, 1}, fSample{2, 2}, fSample{3, 3}, fSample{4, 4}},
), ),
), ),
}, },
{ {
name: "five queriers, only two queriers have two time unsorted series each", name: "five queriers, only two queriers have two time unsorted series each",
querierSeries: [][]Series{{}, {}, { querierSeries: [][]Series{{}, {}, {
NewListSeries(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{5, 5, nil, nil}, sample{6, 6, nil, nil}}), NewListSeries(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{fSample{5, 5}, fSample{6, 6}}),
NewListSeries(labels.FromStrings("foo", "bar"), []tsdbutil.Sample{sample{0, 0, nil, nil}, sample{1, 1, nil, nil}, sample{2, 2, nil, nil}}), NewListSeries(labels.FromStrings("foo", "bar"), []tsdbutil.Sample{fSample{0, 0}, fSample{1, 1}, fSample{2, 2}}),
}, { }, {
NewListSeries(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{1, 1, nil, nil}, sample{2, 2, nil, nil}, sample{3, 3, nil, nil}}), NewListSeries(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{fSample{1, 1}, fSample{2, 2}, fSample{3, 3}}),
NewListSeries(labels.FromStrings("foo", "bar"), []tsdbutil.Sample{sample{3, 3, nil, nil}, sample{4, 4, nil, nil}}), NewListSeries(labels.FromStrings("foo", "bar"), []tsdbutil.Sample{fSample{3, 3}, fSample{4, 4}}),
}, {}}, }, {}},
expected: NewMockSeriesSet( expected: NewMockSeriesSet(
NewListSeries( NewListSeries(
labels.FromStrings("bar", "baz"), labels.FromStrings("bar", "baz"),
[]tsdbutil.Sample{sample{1, 1, nil, nil}, sample{2, 2, nil, nil}, sample{3, 3, nil, nil}, sample{5, 5, nil, nil}, sample{6, 6, nil, nil}}, []tsdbutil.Sample{fSample{1, 1}, fSample{2, 2}, fSample{3, 3}, fSample{5, 5}, fSample{6, 6}},
), ),
NewListSeries( NewListSeries(
labels.FromStrings("foo", "bar"), labels.FromStrings("foo", "bar"),
[]tsdbutil.Sample{sample{0, 0, nil, nil}, sample{1, 1, nil, nil}, sample{2, 2, nil, nil}, sample{3, 3, nil, nil}, sample{4, 4, nil, nil}}, []tsdbutil.Sample{fSample{0, 0}, fSample{1, 1}, fSample{2, 2}, fSample{3, 3}, fSample{4, 4}},
), ),
), ),
}, },
{ {
name: "two queriers, only two queriers have two time unsorted series each, with 3 noop and one nil querier together", name: "two queriers, only two queriers have two time unsorted series each, with 3 noop and one nil querier together",
querierSeries: [][]Series{{}, {}, { querierSeries: [][]Series{{}, {}, {
NewListSeries(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{5, 5, nil, nil}, sample{6, 6, nil, nil}}), NewListSeries(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{fSample{5, 5}, fSample{6, 6}}),
NewListSeries(labels.FromStrings("foo", "bar"), []tsdbutil.Sample{sample{0, 0, nil, nil}, sample{1, 1, nil, nil}, sample{2, 2, nil, nil}}), NewListSeries(labels.FromStrings("foo", "bar"), []tsdbutil.Sample{fSample{0, 0}, fSample{1, 1}, fSample{2, 2}}),
}, { }, {
NewListSeries(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{1, 1, nil, nil}, sample{2, 2, nil, nil}, sample{3, 3, nil, nil}}), NewListSeries(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{fSample{1, 1}, fSample{2, 2}, fSample{3, 3}}),
NewListSeries(labels.FromStrings("foo", "bar"), []tsdbutil.Sample{sample{3, 3, nil, nil}, sample{4, 4, nil, nil}}), NewListSeries(labels.FromStrings("foo", "bar"), []tsdbutil.Sample{fSample{3, 3}, fSample{4, 4}}),
}, {}}, }, {}},
extraQueriers: []Querier{NoopQuerier(), NoopQuerier(), nil, NoopQuerier()}, extraQueriers: []Querier{NoopQuerier(), NoopQuerier(), nil, NoopQuerier()},
expected: NewMockSeriesSet( expected: NewMockSeriesSet(
NewListSeries( NewListSeries(
labels.FromStrings("bar", "baz"), labels.FromStrings("bar", "baz"),
[]tsdbutil.Sample{sample{1, 1, nil, nil}, sample{2, 2, nil, nil}, sample{3, 3, nil, nil}, sample{5, 5, nil, nil}, sample{6, 6, nil, nil}}, []tsdbutil.Sample{fSample{1, 1}, fSample{2, 2}, fSample{3, 3}, fSample{5, 5}, fSample{6, 6}},
), ),
NewListSeries( NewListSeries(
labels.FromStrings("foo", "bar"), labels.FromStrings("foo", "bar"),
[]tsdbutil.Sample{sample{0, 0, nil, nil}, sample{1, 1, nil, nil}, sample{2, 2, nil, nil}, sample{3, 3, nil, nil}, sample{4, 4, nil, nil}}, []tsdbutil.Sample{fSample{0, 0}, fSample{1, 1}, fSample{2, 2}, fSample{3, 3}, fSample{4, 4}},
), ),
), ),
}, },
{ {
name: "two queriers, with two series, one is overlapping", name: "two queriers, with two series, one is overlapping",
querierSeries: [][]Series{{}, {}, { querierSeries: [][]Series{{}, {}, {
NewListSeries(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{2, 21, nil, nil}, sample{3, 31, nil, nil}, sample{5, 5, nil, nil}, sample{6, 6, nil, nil}}), NewListSeries(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{fSample{2, 21}, fSample{3, 31}, fSample{5, 5}, fSample{6, 6}}),
NewListSeries(labels.FromStrings("foo", "bar"), []tsdbutil.Sample{sample{0, 0, nil, nil}, sample{1, 1, nil, nil}, sample{2, 2, nil, nil}}), NewListSeries(labels.FromStrings("foo", "bar"), []tsdbutil.Sample{fSample{0, 0}, fSample{1, 1}, fSample{2, 2}}),
}, { }, {
NewListSeries(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{1, 1, nil, nil}, sample{2, 22, nil, nil}, sample{3, 32, nil, nil}}), NewListSeries(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{fSample{1, 1}, fSample{2, 22}, fSample{3, 32}}),
NewListSeries(labels.FromStrings("foo", "bar"), []tsdbutil.Sample{sample{3, 3, nil, nil}, sample{4, 4, nil, nil}}), NewListSeries(labels.FromStrings("foo", "bar"), []tsdbutil.Sample{fSample{3, 3}, fSample{4, 4}}),
}, {}}, }, {}},
expected: NewMockSeriesSet( expected: NewMockSeriesSet(
NewListSeries( NewListSeries(
labels.FromStrings("bar", "baz"), labels.FromStrings("bar", "baz"),
[]tsdbutil.Sample{sample{1, 1, nil, nil}, sample{2, 21, nil, nil}, sample{3, 31, nil, nil}, sample{5, 5, nil, nil}, sample{6, 6, nil, nil}}, []tsdbutil.Sample{fSample{1, 1}, fSample{2, 21}, fSample{3, 31}, fSample{5, 5}, fSample{6, 6}},
), ),
NewListSeries( NewListSeries(
labels.FromStrings("foo", "bar"), labels.FromStrings("foo", "bar"),
[]tsdbutil.Sample{sample{0, 0, nil, nil}, sample{1, 1, nil, nil}, sample{2, 2, nil, nil}, sample{3, 3, nil, nil}, sample{4, 4, nil, nil}}, []tsdbutil.Sample{fSample{0, 0}, fSample{1, 1}, fSample{2, 2}, fSample{3, 3}, fSample{4, 4}},
), ),
), ),
}, },
{ {
name: "two queries, one with NaN samples series", name: "two queries, one with NaN samples series",
querierSeries: [][]Series{{ querierSeries: [][]Series{{
NewListSeries(labels.FromStrings("foo", "bar"), []tsdbutil.Sample{sample{0, math.NaN(), nil, nil}}), NewListSeries(labels.FromStrings("foo", "bar"), []tsdbutil.Sample{fSample{0, math.NaN()}}),
}, { }, {
NewListSeries(labels.FromStrings("foo", "bar"), []tsdbutil.Sample{sample{1, 1, nil, nil}}), NewListSeries(labels.FromStrings("foo", "bar"), []tsdbutil.Sample{fSample{1, 1}}),
}}, }},
expected: NewMockSeriesSet( expected: NewMockSeriesSet(
NewListSeries(labels.FromStrings("foo", "bar"), []tsdbutil.Sample{sample{0, math.NaN(), nil, nil}, sample{1, 1, nil, nil}}), NewListSeries(labels.FromStrings("foo", "bar"), []tsdbutil.Sample{fSample{0, math.NaN()}, fSample{1, 1}}),
), ),
}, },
} { } {
@ -245,108 +245,108 @@ func TestMergeChunkQuerierWithNoVerticalChunkSeriesMerger(t *testing.T) {
{ {
name: "one querier, two series", name: "one querier, two series",
chkQuerierSeries: [][]ChunkSeries{{ chkQuerierSeries: [][]ChunkSeries{{
NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{1, 1, nil, nil}, sample{2, 2, nil, nil}}, []tsdbutil.Sample{sample{3, 3, nil, nil}}), NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{fSample{1, 1}, fSample{2, 2}}, []tsdbutil.Sample{fSample{3, 3}}),
NewListChunkSeriesFromSamples(labels.FromStrings("foo", "bar"), []tsdbutil.Sample{sample{0, 0, nil, nil}, sample{1, 1, nil, nil}}, []tsdbutil.Sample{sample{2, 2, nil, nil}}), NewListChunkSeriesFromSamples(labels.FromStrings("foo", "bar"), []tsdbutil.Sample{fSample{0, 0}, fSample{1, 1}}, []tsdbutil.Sample{fSample{2, 2}}),
}}, }},
expected: NewMockChunkSeriesSet( expected: NewMockChunkSeriesSet(
NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{1, 1, nil, nil}, sample{2, 2, nil, nil}}, []tsdbutil.Sample{sample{3, 3, nil, nil}}), NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{fSample{1, 1}, fSample{2, 2}}, []tsdbutil.Sample{fSample{3, 3}}),
NewListChunkSeriesFromSamples(labels.FromStrings("foo", "bar"), []tsdbutil.Sample{sample{0, 0, nil, nil}, sample{1, 1, nil, nil}}, []tsdbutil.Sample{sample{2, 2, nil, nil}}), NewListChunkSeriesFromSamples(labels.FromStrings("foo", "bar"), []tsdbutil.Sample{fSample{0, 0}, fSample{1, 1}}, []tsdbutil.Sample{fSample{2, 2}}),
), ),
}, },
{ {
name: "two secondaries, one different series each", name: "two secondaries, one different series each",
chkQuerierSeries: [][]ChunkSeries{{ chkQuerierSeries: [][]ChunkSeries{{
NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{1, 1, nil, nil}, sample{2, 2, nil, nil}}, []tsdbutil.Sample{sample{3, 3, nil, nil}}), NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{fSample{1, 1}, fSample{2, 2}}, []tsdbutil.Sample{fSample{3, 3}}),
}, { }, {
NewListChunkSeriesFromSamples(labels.FromStrings("foo", "bar"), []tsdbutil.Sample{sample{0, 0, nil, nil}, sample{1, 1, nil, nil}}, []tsdbutil.Sample{sample{2, 2, nil, nil}}), NewListChunkSeriesFromSamples(labels.FromStrings("foo", "bar"), []tsdbutil.Sample{fSample{0, 0}, fSample{1, 1}}, []tsdbutil.Sample{fSample{2, 2}}),
}}, }},
expected: NewMockChunkSeriesSet( expected: NewMockChunkSeriesSet(
NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{1, 1, nil, nil}, sample{2, 2, nil, nil}}, []tsdbutil.Sample{sample{3, 3, nil, nil}}), NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{fSample{1, 1}, fSample{2, 2}}, []tsdbutil.Sample{fSample{3, 3}}),
NewListChunkSeriesFromSamples(labels.FromStrings("foo", "bar"), []tsdbutil.Sample{sample{0, 0, nil, nil}, sample{1, 1, nil, nil}}, []tsdbutil.Sample{sample{2, 2, nil, nil}}), NewListChunkSeriesFromSamples(labels.FromStrings("foo", "bar"), []tsdbutil.Sample{fSample{0, 0}, fSample{1, 1}}, []tsdbutil.Sample{fSample{2, 2}}),
), ),
}, },
{ {
name: "two secondaries, two not in time order series each", name: "two secondaries, two not in time order series each",
chkQuerierSeries: [][]ChunkSeries{{ chkQuerierSeries: [][]ChunkSeries{{
NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{5, 5, nil, nil}}, []tsdbutil.Sample{sample{6, 6, nil, nil}}), NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{fSample{5, 5}}, []tsdbutil.Sample{fSample{6, 6}}),
NewListChunkSeriesFromSamples(labels.FromStrings("foo", "bar"), []tsdbutil.Sample{sample{0, 0, nil, nil}, sample{1, 1, nil, nil}}, []tsdbutil.Sample{sample{2, 2, nil, nil}}), NewListChunkSeriesFromSamples(labels.FromStrings("foo", "bar"), []tsdbutil.Sample{fSample{0, 0}, fSample{1, 1}}, []tsdbutil.Sample{fSample{2, 2}}),
}, { }, {
NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{1, 1, nil, nil}, sample{2, 2, nil, nil}}, []tsdbutil.Sample{sample{3, 3, nil, nil}}), NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{fSample{1, 1}, fSample{2, 2}}, []tsdbutil.Sample{fSample{3, 3}}),
NewListChunkSeriesFromSamples(labels.FromStrings("foo", "bar"), []tsdbutil.Sample{sample{3, 3, nil, nil}}, []tsdbutil.Sample{sample{4, 4, nil, nil}}), NewListChunkSeriesFromSamples(labels.FromStrings("foo", "bar"), []tsdbutil.Sample{fSample{3, 3}}, []tsdbutil.Sample{fSample{4, 4}}),
}}, }},
expected: NewMockChunkSeriesSet( expected: NewMockChunkSeriesSet(
NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"),
[]tsdbutil.Sample{sample{1, 1, nil, nil}, sample{2, 2, nil, nil}}, []tsdbutil.Sample{fSample{1, 1}, fSample{2, 2}},
[]tsdbutil.Sample{sample{3, 3, nil, nil}}, []tsdbutil.Sample{fSample{3, 3}},
[]tsdbutil.Sample{sample{5, 5, nil, nil}}, []tsdbutil.Sample{fSample{5, 5}},
[]tsdbutil.Sample{sample{6, 6, nil, nil}}, []tsdbutil.Sample{fSample{6, 6}},
), ),
NewListChunkSeriesFromSamples(labels.FromStrings("foo", "bar"), NewListChunkSeriesFromSamples(labels.FromStrings("foo", "bar"),
[]tsdbutil.Sample{sample{0, 0, nil, nil}, sample{1, 1, nil, nil}}, []tsdbutil.Sample{fSample{0, 0}, fSample{1, 1}},
[]tsdbutil.Sample{sample{2, 2, nil, nil}}, []tsdbutil.Sample{fSample{2, 2}},
[]tsdbutil.Sample{sample{3, 3, nil, nil}}, []tsdbutil.Sample{fSample{3, 3}},
[]tsdbutil.Sample{sample{4, 4, nil, nil}}, []tsdbutil.Sample{fSample{4, 4}},
), ),
), ),
}, },
{ {
name: "five secondaries, only two have two not in time order series each", name: "five secondaries, only two have two not in time order series each",
chkQuerierSeries: [][]ChunkSeries{{}, {}, { chkQuerierSeries: [][]ChunkSeries{{}, {}, {
NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{5, 5, nil, nil}}, []tsdbutil.Sample{sample{6, 6, nil, nil}}), NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{fSample{5, 5}}, []tsdbutil.Sample{fSample{6, 6}}),
NewListChunkSeriesFromSamples(labels.FromStrings("foo", "bar"), []tsdbutil.Sample{sample{0, 0, nil, nil}, sample{1, 1, nil, nil}}, []tsdbutil.Sample{sample{2, 2, nil, nil}}), NewListChunkSeriesFromSamples(labels.FromStrings("foo", "bar"), []tsdbutil.Sample{fSample{0, 0}, fSample{1, 1}}, []tsdbutil.Sample{fSample{2, 2}}),
}, { }, {
NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{1, 1, nil, nil}, sample{2, 2, nil, nil}}, []tsdbutil.Sample{sample{3, 3, nil, nil}}), NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{fSample{1, 1}, fSample{2, 2}}, []tsdbutil.Sample{fSample{3, 3}}),
NewListChunkSeriesFromSamples(labels.FromStrings("foo", "bar"), []tsdbutil.Sample{sample{3, 3, nil, nil}}, []tsdbutil.Sample{sample{4, 4, nil, nil}}), NewListChunkSeriesFromSamples(labels.FromStrings("foo", "bar"), []tsdbutil.Sample{fSample{3, 3}}, []tsdbutil.Sample{fSample{4, 4}}),
}, {}}, }, {}},
expected: NewMockChunkSeriesSet( expected: NewMockChunkSeriesSet(
NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"),
[]tsdbutil.Sample{sample{1, 1, nil, nil}, sample{2, 2, nil, nil}}, []tsdbutil.Sample{fSample{1, 1}, fSample{2, 2}},
[]tsdbutil.Sample{sample{3, 3, nil, nil}}, []tsdbutil.Sample{fSample{3, 3}},
[]tsdbutil.Sample{sample{5, 5, nil, nil}}, []tsdbutil.Sample{fSample{5, 5}},
[]tsdbutil.Sample{sample{6, 6, nil, nil}}, []tsdbutil.Sample{fSample{6, 6}},
), ),
NewListChunkSeriesFromSamples(labels.FromStrings("foo", "bar"), NewListChunkSeriesFromSamples(labels.FromStrings("foo", "bar"),
[]tsdbutil.Sample{sample{0, 0, nil, nil}, sample{1, 1, nil, nil}}, []tsdbutil.Sample{fSample{0, 0}, fSample{1, 1}},
[]tsdbutil.Sample{sample{2, 2, nil, nil}}, []tsdbutil.Sample{fSample{2, 2}},
[]tsdbutil.Sample{sample{3, 3, nil, nil}}, []tsdbutil.Sample{fSample{3, 3}},
[]tsdbutil.Sample{sample{4, 4, nil, nil}}, []tsdbutil.Sample{fSample{4, 4}},
), ),
), ),
}, },
{ {
name: "two secondaries, with two not in time order series each, with 3 noop queries and one nil together", name: "two secondaries, with two not in time order series each, with 3 noop queries and one nil together",
chkQuerierSeries: [][]ChunkSeries{{ chkQuerierSeries: [][]ChunkSeries{{
NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{5, 5, nil, nil}}, []tsdbutil.Sample{sample{6, 6, nil, nil}}), NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{fSample{5, 5}}, []tsdbutil.Sample{fSample{6, 6}}),
NewListChunkSeriesFromSamples(labels.FromStrings("foo", "bar"), []tsdbutil.Sample{sample{0, 0, nil, nil}, sample{1, 1, nil, nil}}, []tsdbutil.Sample{sample{2, 2, nil, nil}}), NewListChunkSeriesFromSamples(labels.FromStrings("foo", "bar"), []tsdbutil.Sample{fSample{0, 0}, fSample{1, 1}}, []tsdbutil.Sample{fSample{2, 2}}),
}, { }, {
NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{1, 1, nil, nil}, sample{2, 2, nil, nil}}, []tsdbutil.Sample{sample{3, 3, nil, nil}}), NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{fSample{1, 1}, fSample{2, 2}}, []tsdbutil.Sample{fSample{3, 3}}),
NewListChunkSeriesFromSamples(labels.FromStrings("foo", "bar"), []tsdbutil.Sample{sample{3, 3, nil, nil}}, []tsdbutil.Sample{sample{4, 4, nil, nil}}), NewListChunkSeriesFromSamples(labels.FromStrings("foo", "bar"), []tsdbutil.Sample{fSample{3, 3}}, []tsdbutil.Sample{fSample{4, 4}}),
}}, }},
extraQueriers: []ChunkQuerier{NoopChunkedQuerier(), NoopChunkedQuerier(), nil, NoopChunkedQuerier()}, extraQueriers: []ChunkQuerier{NoopChunkedQuerier(), NoopChunkedQuerier(), nil, NoopChunkedQuerier()},
expected: NewMockChunkSeriesSet( expected: NewMockChunkSeriesSet(
NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"),
[]tsdbutil.Sample{sample{1, 1, nil, nil}, sample{2, 2, nil, nil}}, []tsdbutil.Sample{fSample{1, 1}, fSample{2, 2}},
[]tsdbutil.Sample{sample{3, 3, nil, nil}}, []tsdbutil.Sample{fSample{3, 3}},
[]tsdbutil.Sample{sample{5, 5, nil, nil}}, []tsdbutil.Sample{fSample{5, 5}},
[]tsdbutil.Sample{sample{6, 6, nil, nil}}, []tsdbutil.Sample{fSample{6, 6}},
), ),
NewListChunkSeriesFromSamples(labels.FromStrings("foo", "bar"), NewListChunkSeriesFromSamples(labels.FromStrings("foo", "bar"),
[]tsdbutil.Sample{sample{0, 0, nil, nil}, sample{1, 1, nil, nil}}, []tsdbutil.Sample{fSample{0, 0}, fSample{1, 1}},
[]tsdbutil.Sample{sample{2, 2, nil, nil}}, []tsdbutil.Sample{fSample{2, 2}},
[]tsdbutil.Sample{sample{3, 3, nil, nil}}, []tsdbutil.Sample{fSample{3, 3}},
[]tsdbutil.Sample{sample{4, 4, nil, nil}}, []tsdbutil.Sample{fSample{4, 4}},
), ),
), ),
}, },
{ {
name: "two queries, one with NaN samples series", name: "two queries, one with NaN samples series",
chkQuerierSeries: [][]ChunkSeries{{ chkQuerierSeries: [][]ChunkSeries{{
NewListChunkSeriesFromSamples(labels.FromStrings("foo", "bar"), []tsdbutil.Sample{sample{0, math.NaN(), nil, nil}}), NewListChunkSeriesFromSamples(labels.FromStrings("foo", "bar"), []tsdbutil.Sample{fSample{0, math.NaN()}}),
}, { }, {
NewListChunkSeriesFromSamples(labels.FromStrings("foo", "bar"), []tsdbutil.Sample{sample{1, 1, nil, nil}}), NewListChunkSeriesFromSamples(labels.FromStrings("foo", "bar"), []tsdbutil.Sample{fSample{1, 1}}),
}}, }},
expected: NewMockChunkSeriesSet( expected: NewMockChunkSeriesSet(
NewListChunkSeriesFromSamples(labels.FromStrings("foo", "bar"), []tsdbutil.Sample{sample{0, math.NaN(), nil, nil}}, []tsdbutil.Sample{sample{1, 1, nil, nil}}), NewListChunkSeriesFromSamples(labels.FromStrings("foo", "bar"), []tsdbutil.Sample{fSample{0, math.NaN()}}, []tsdbutil.Sample{fSample{1, 1}}),
), ),
}, },
} { } {
@ -385,12 +385,12 @@ func TestCompactingChunkSeriesMerger(t *testing.T) {
m := NewCompactingChunkSeriesMerger(ChainedSeriesMerge) m := NewCompactingChunkSeriesMerger(ChainedSeriesMerge)
// histogramSample returns a histogram that is unique to the ts. // histogramSample returns a histogram that is unique to the ts.
histogramSample := func(ts int64) sample { histogramSample := func(ts int64) hSample {
return sample{t: ts, h: tsdbutil.GenerateTestHistogram(int(ts + 1))} return hSample{t: ts, h: tsdbutil.GenerateTestHistogram(int(ts + 1))}
} }
floatHistogramSample := func(ts int64) sample { floatHistogramSample := func(ts int64) fhSample {
return sample{t: ts, fh: tsdbutil.GenerateTestFloatHistogram(int(ts + 1))} return fhSample{t: ts, fh: tsdbutil.GenerateTestFloatHistogram(int(ts + 1))}
} }
for _, tc := range []struct { for _, tc := range []struct {
@ -408,9 +408,9 @@ func TestCompactingChunkSeriesMerger(t *testing.T) {
{ {
name: "single series", name: "single series",
input: []ChunkSeries{ input: []ChunkSeries{
NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{1, 1, nil, nil}, sample{2, 2, nil, nil}}, []tsdbutil.Sample{sample{3, 3, nil, nil}}), NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{fSample{1, 1}, fSample{2, 2}}, []tsdbutil.Sample{fSample{3, 3}}),
}, },
expected: NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{1, 1, nil, nil}, sample{2, 2, nil, nil}}, []tsdbutil.Sample{sample{3, 3, nil, nil}}), expected: NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{fSample{1, 1}, fSample{2, 2}}, []tsdbutil.Sample{fSample{3, 3}}),
}, },
{ {
name: "two empty series", name: "two empty series",
@ -423,55 +423,55 @@ func TestCompactingChunkSeriesMerger(t *testing.T) {
{ {
name: "two non overlapping", name: "two non overlapping",
input: []ChunkSeries{ input: []ChunkSeries{
NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{1, 1, nil, nil}, sample{2, 2, nil, nil}}, []tsdbutil.Sample{sample{3, 3, nil, nil}, sample{5, 5, nil, nil}}), NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{fSample{1, 1}, fSample{2, 2}}, []tsdbutil.Sample{fSample{3, 3}, fSample{5, 5}}),
NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{7, 7, nil, nil}, sample{9, 9, nil, nil}}, []tsdbutil.Sample{sample{10, 10, nil, nil}}), NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{fSample{7, 7}, fSample{9, 9}}, []tsdbutil.Sample{fSample{10, 10}}),
}, },
expected: NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{1, 1, nil, nil}, sample{2, 2, nil, nil}}, []tsdbutil.Sample{sample{3, 3, nil, nil}, sample{5, 5, nil, nil}}, []tsdbutil.Sample{sample{7, 7, nil, nil}, sample{9, 9, nil, nil}}, []tsdbutil.Sample{sample{10, 10, nil, nil}}), expected: NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{fSample{1, 1}, fSample{2, 2}}, []tsdbutil.Sample{fSample{3, 3}, fSample{5, 5}}, []tsdbutil.Sample{fSample{7, 7}, fSample{9, 9}}, []tsdbutil.Sample{fSample{10, 10}}),
}, },
{ {
name: "two overlapping", name: "two overlapping",
input: []ChunkSeries{ input: []ChunkSeries{
NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{1, 1, nil, nil}, sample{2, 2, nil, nil}}, []tsdbutil.Sample{sample{3, 3, nil, nil}, sample{8, 8, nil, nil}}), NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{fSample{1, 1}, fSample{2, 2}}, []tsdbutil.Sample{fSample{3, 3}, fSample{8, 8}}),
NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{7, 7, nil, nil}, sample{9, 9, nil, nil}}, []tsdbutil.Sample{sample{10, 10, nil, nil}}), NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{fSample{7, 7}, fSample{9, 9}}, []tsdbutil.Sample{fSample{10, 10}}),
}, },
expected: NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{1, 1, nil, nil}, sample{2, 2, nil, nil}}, []tsdbutil.Sample{sample{3, 3, nil, nil}, sample{7, 7, nil, nil}, sample{8, 8, nil, nil}, sample{9, 9, nil, nil}}, []tsdbutil.Sample{sample{10, 10, nil, nil}}), expected: NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{fSample{1, 1}, fSample{2, 2}}, []tsdbutil.Sample{fSample{3, 3}, fSample{7, 7}, fSample{8, 8}, fSample{9, 9}}, []tsdbutil.Sample{fSample{10, 10}}),
}, },
{ {
name: "two duplicated", name: "two duplicated",
input: []ChunkSeries{ input: []ChunkSeries{
NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{1, 1, nil, nil}, sample{2, 2, nil, nil}, sample{3, 3, nil, nil}, sample{5, 5, nil, nil}}), NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{fSample{1, 1}, fSample{2, 2}, fSample{3, 3}, fSample{5, 5}}),
NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{2, 2, nil, nil}, sample{3, 3, nil, nil}, sample{5, 5, nil, nil}}), NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{fSample{2, 2}, fSample{3, 3}, fSample{5, 5}}),
}, },
expected: NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{1, 1, nil, nil}, sample{2, 2, nil, nil}, sample{3, 3, nil, nil}, sample{5, 5, nil, nil}}), expected: NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{fSample{1, 1}, fSample{2, 2}, fSample{3, 3}, fSample{5, 5}}),
}, },
{ {
name: "three overlapping", name: "three overlapping",
input: []ChunkSeries{ input: []ChunkSeries{
NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{1, 1, nil, nil}, sample{2, 2, nil, nil}, sample{3, 3, nil, nil}, sample{5, 5, nil, nil}}), NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{fSample{1, 1}, fSample{2, 2}, fSample{3, 3}, fSample{5, 5}}),
NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{2, 2, nil, nil}, sample{3, 3, nil, nil}, sample{6, 6, nil, nil}}), NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{fSample{2, 2}, fSample{3, 3}, fSample{6, 6}}),
NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{0, 0, nil, nil}, sample{4, 4, nil, nil}}), NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{fSample{0, 0}, fSample{4, 4}}),
}, },
expected: NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{0, 0, nil, nil}, sample{1, 1, nil, nil}, sample{2, 2, nil, nil}, sample{3, 3, nil, nil}, sample{4, 4, nil, nil}, sample{5, 5, nil, nil}, sample{6, 6, nil, nil}}), expected: NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{fSample{0, 0}, fSample{1, 1}, fSample{2, 2}, fSample{3, 3}, fSample{4, 4}, fSample{5, 5}, fSample{6, 6}}),
}, },
{ {
name: "three in chained overlap", name: "three in chained overlap",
input: []ChunkSeries{ input: []ChunkSeries{
NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{1, 1, nil, nil}, sample{2, 2, nil, nil}, sample{3, 3, nil, nil}, sample{5, 5, nil, nil}}), NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{fSample{1, 1}, fSample{2, 2}, fSample{3, 3}, fSample{5, 5}}),
NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{4, 4, nil, nil}, sample{6, 66, nil, nil}}), NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{fSample{4, 4}, fSample{6, 66}}),
NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{6, 6, nil, nil}, sample{10, 10, nil, nil}}), NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{fSample{6, 6}, fSample{10, 10}}),
}, },
expected: NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{1, 1, nil, nil}, sample{2, 2, nil, nil}, sample{3, 3, nil, nil}, sample{4, 4, nil, nil}, sample{5, 5, nil, nil}, sample{6, 66, nil, nil}, sample{10, 10, nil, nil}}), expected: NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{fSample{1, 1}, fSample{2, 2}, fSample{3, 3}, fSample{4, 4}, fSample{5, 5}, fSample{6, 66}, fSample{10, 10}}),
}, },
{ {
name: "three in chained overlap complex", name: "three in chained overlap complex",
input: []ChunkSeries{ input: []ChunkSeries{
NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{0, 0, nil, nil}, sample{5, 5, nil, nil}}, []tsdbutil.Sample{sample{10, 10, nil, nil}, sample{15, 15, nil, nil}}), NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{fSample{0, 0}, fSample{5, 5}}, []tsdbutil.Sample{fSample{10, 10}, fSample{15, 15}}),
NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{2, 2, nil, nil}, sample{20, 20, nil, nil}}, []tsdbutil.Sample{sample{25, 25, nil, nil}, sample{30, 30, nil, nil}}), NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{fSample{2, 2}, fSample{20, 20}}, []tsdbutil.Sample{fSample{25, 25}, fSample{30, 30}}),
NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{18, 18, nil, nil}, sample{26, 26, nil, nil}}, []tsdbutil.Sample{sample{31, 31, nil, nil}, sample{35, 35, nil, nil}}), NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{fSample{18, 18}, fSample{26, 26}}, []tsdbutil.Sample{fSample{31, 31}, fSample{35, 35}}),
}, },
expected: NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), expected: NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"),
[]tsdbutil.Sample{sample{0, 0, nil, nil}, sample{2, 2, nil, nil}, sample{5, 5, nil, nil}, sample{10, 10, nil, nil}, sample{15, 15, nil, nil}, sample{18, 18, nil, nil}, sample{20, 20, nil, nil}, sample{25, 25, nil, nil}, sample{26, 26, nil, nil}, sample{30, 30, nil, nil}}, []tsdbutil.Sample{fSample{0, 0}, fSample{2, 2}, fSample{5, 5}, fSample{10, 10}, fSample{15, 15}, fSample{18, 18}, fSample{20, 20}, fSample{25, 25}, fSample{26, 26}, fSample{30, 30}},
[]tsdbutil.Sample{sample{31, 31, nil, nil}, sample{35, 35, nil, nil}}, []tsdbutil.Sample{fSample{31, 31}, fSample{35, 35}},
), ),
}, },
{ {
@ -511,13 +511,13 @@ func TestCompactingChunkSeriesMerger(t *testing.T) {
name: "histogram chunks overlapping with float chunks", name: "histogram chunks overlapping with float chunks",
input: []ChunkSeries{ input: []ChunkSeries{
NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{histogramSample(0), histogramSample(5)}, []tsdbutil.Sample{histogramSample(10), histogramSample(15)}), NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{histogramSample(0), histogramSample(5)}, []tsdbutil.Sample{histogramSample(10), histogramSample(15)}),
NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{1, 1, nil, nil}, sample{12, 12, nil, nil}}, []tsdbutil.Sample{sample{14, 14, nil, nil}}), NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{fSample{1, 1}, fSample{12, 12}}, []tsdbutil.Sample{fSample{14, 14}}),
}, },
expected: NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), expected: NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"),
[]tsdbutil.Sample{histogramSample(0)}, []tsdbutil.Sample{histogramSample(0)},
[]tsdbutil.Sample{sample{1, 1, nil, nil}}, []tsdbutil.Sample{fSample{1, 1}},
[]tsdbutil.Sample{histogramSample(5), histogramSample(10)}, []tsdbutil.Sample{histogramSample(5), histogramSample(10)},
[]tsdbutil.Sample{sample{12, 12, nil, nil}, sample{14, 14, nil, nil}}, []tsdbutil.Sample{fSample{12, 12}, fSample{14, 14}},
[]tsdbutil.Sample{histogramSample(15)}, []tsdbutil.Sample{histogramSample(15)},
), ),
}, },
@ -537,13 +537,13 @@ func TestCompactingChunkSeriesMerger(t *testing.T) {
name: "float histogram chunks overlapping with float chunks", name: "float histogram chunks overlapping with float chunks",
input: []ChunkSeries{ input: []ChunkSeries{
NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{floatHistogramSample(0), floatHistogramSample(5)}, []tsdbutil.Sample{floatHistogramSample(10), floatHistogramSample(15)}), NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{floatHistogramSample(0), floatHistogramSample(5)}, []tsdbutil.Sample{floatHistogramSample(10), floatHistogramSample(15)}),
NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{1, 1, nil, nil}, sample{12, 12, nil, nil}}, []tsdbutil.Sample{sample{14, 14, nil, nil}}), NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{fSample{1, 1}, fSample{12, 12}}, []tsdbutil.Sample{fSample{14, 14}}),
}, },
expected: NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), expected: NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"),
[]tsdbutil.Sample{floatHistogramSample(0)}, []tsdbutil.Sample{floatHistogramSample(0)},
[]tsdbutil.Sample{sample{1, 1, nil, nil}}, []tsdbutil.Sample{fSample{1, 1}},
[]tsdbutil.Sample{floatHistogramSample(5), floatHistogramSample(10)}, []tsdbutil.Sample{floatHistogramSample(5), floatHistogramSample(10)},
[]tsdbutil.Sample{sample{12, 12, nil, nil}, sample{14, 14, nil, nil}}, []tsdbutil.Sample{fSample{12, 12}, fSample{14, 14}},
[]tsdbutil.Sample{floatHistogramSample(15)}, []tsdbutil.Sample{floatHistogramSample(15)},
), ),
}, },
@ -592,9 +592,9 @@ func TestConcatenatingChunkSeriesMerger(t *testing.T) {
{ {
name: "single series", name: "single series",
input: []ChunkSeries{ input: []ChunkSeries{
NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{1, 1, nil, nil}, sample{2, 2, nil, nil}}, []tsdbutil.Sample{sample{3, 3, nil, nil}}), NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{fSample{1, 1}, fSample{2, 2}}, []tsdbutil.Sample{fSample{3, 3}}),
}, },
expected: NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{1, 1, nil, nil}, sample{2, 2, nil, nil}}, []tsdbutil.Sample{sample{3, 3, nil, nil}}), expected: NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{fSample{1, 1}, fSample{2, 2}}, []tsdbutil.Sample{fSample{3, 3}}),
}, },
{ {
name: "two empty series", name: "two empty series",
@ -607,70 +607,70 @@ func TestConcatenatingChunkSeriesMerger(t *testing.T) {
{ {
name: "two non overlapping", name: "two non overlapping",
input: []ChunkSeries{ input: []ChunkSeries{
NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{1, 1, nil, nil}, sample{2, 2, nil, nil}}, []tsdbutil.Sample{sample{3, 3, nil, nil}, sample{5, 5, nil, nil}}), NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{fSample{1, 1}, fSample{2, 2}}, []tsdbutil.Sample{fSample{3, 3}, fSample{5, 5}}),
NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{7, 7, nil, nil}, sample{9, 9, nil, nil}}, []tsdbutil.Sample{sample{10, 10, nil, nil}}), NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{fSample{7, 7}, fSample{9, 9}}, []tsdbutil.Sample{fSample{10, 10}}),
}, },
expected: NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{1, 1, nil, nil}, sample{2, 2, nil, nil}}, []tsdbutil.Sample{sample{3, 3, nil, nil}, sample{5, 5, nil, nil}}, []tsdbutil.Sample{sample{7, 7, nil, nil}, sample{9, 9, nil, nil}}, []tsdbutil.Sample{sample{10, 10, nil, nil}}), expected: NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{fSample{1, 1}, fSample{2, 2}}, []tsdbutil.Sample{fSample{3, 3}, fSample{5, 5}}, []tsdbutil.Sample{fSample{7, 7}, fSample{9, 9}}, []tsdbutil.Sample{fSample{10, 10}}),
}, },
{ {
name: "two overlapping", name: "two overlapping",
input: []ChunkSeries{ input: []ChunkSeries{
NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{1, 1, nil, nil}, sample{2, 2, nil, nil}}, []tsdbutil.Sample{sample{3, 3, nil, nil}, sample{8, 8, nil, nil}}), NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{fSample{1, 1}, fSample{2, 2}}, []tsdbutil.Sample{fSample{3, 3}, fSample{8, 8}}),
NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{7, 7, nil, nil}, sample{9, 9, nil, nil}}, []tsdbutil.Sample{sample{10, 10, nil, nil}}), NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{fSample{7, 7}, fSample{9, 9}}, []tsdbutil.Sample{fSample{10, 10}}),
}, },
expected: NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), expected: NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"),
[]tsdbutil.Sample{sample{1, 1, nil, nil}, sample{2, 2, nil, nil}}, []tsdbutil.Sample{sample{3, 3, nil, nil}, sample{8, 8, nil, nil}}, []tsdbutil.Sample{fSample{1, 1}, fSample{2, 2}}, []tsdbutil.Sample{fSample{3, 3}, fSample{8, 8}},
[]tsdbutil.Sample{sample{7, 7, nil, nil}, sample{9, 9, nil, nil}}, []tsdbutil.Sample{sample{10, 10, nil, nil}}, []tsdbutil.Sample{fSample{7, 7}, fSample{9, 9}}, []tsdbutil.Sample{fSample{10, 10}},
), ),
}, },
{ {
name: "two duplicated", name: "two duplicated",
input: []ChunkSeries{ input: []ChunkSeries{
NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{1, 1, nil, nil}, sample{2, 2, nil, nil}, sample{3, 3, nil, nil}, sample{5, 5, nil, nil}}), NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{fSample{1, 1}, fSample{2, 2}, fSample{3, 3}, fSample{5, 5}}),
NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{2, 2, nil, nil}, sample{3, 3, nil, nil}, sample{5, 5, nil, nil}}), NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{fSample{2, 2}, fSample{3, 3}, fSample{5, 5}}),
}, },
expected: NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), expected: NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"),
[]tsdbutil.Sample{sample{1, 1, nil, nil}, sample{2, 2, nil, nil}, sample{3, 3, nil, nil}, sample{5, 5, nil, nil}}, []tsdbutil.Sample{fSample{1, 1}, fSample{2, 2}, fSample{3, 3}, fSample{5, 5}},
[]tsdbutil.Sample{sample{2, 2, nil, nil}, sample{3, 3, nil, nil}, sample{5, 5, nil, nil}}, []tsdbutil.Sample{fSample{2, 2}, fSample{3, 3}, fSample{5, 5}},
), ),
}, },
{ {
name: "three overlapping", name: "three overlapping",
input: []ChunkSeries{ input: []ChunkSeries{
NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{1, 1, nil, nil}, sample{2, 2, nil, nil}, sample{3, 3, nil, nil}, sample{5, 5, nil, nil}}), NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{fSample{1, 1}, fSample{2, 2}, fSample{3, 3}, fSample{5, 5}}),
NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{2, 2, nil, nil}, sample{3, 3, nil, nil}, sample{6, 6, nil, nil}}), NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{fSample{2, 2}, fSample{3, 3}, fSample{6, 6}}),
NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{0, 0, nil, nil}, sample{4, 4, nil, nil}}), NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{fSample{0, 0}, fSample{4, 4}}),
}, },
expected: NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), expected: NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"),
[]tsdbutil.Sample{sample{1, 1, nil, nil}, sample{2, 2, nil, nil}, sample{3, 3, nil, nil}, sample{5, 5, nil, nil}}, []tsdbutil.Sample{fSample{1, 1}, fSample{2, 2}, fSample{3, 3}, fSample{5, 5}},
[]tsdbutil.Sample{sample{2, 2, nil, nil}, sample{3, 3, nil, nil}, sample{6, 6, nil, nil}}, []tsdbutil.Sample{fSample{2, 2}, fSample{3, 3}, fSample{6, 6}},
[]tsdbutil.Sample{sample{0, 0, nil, nil}, sample{4, 4, nil, nil}}, []tsdbutil.Sample{fSample{0, 0}, fSample{4, 4}},
), ),
}, },
{ {
name: "three in chained overlap", name: "three in chained overlap",
input: []ChunkSeries{ input: []ChunkSeries{
NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{1, 1, nil, nil}, sample{2, 2, nil, nil}, sample{3, 3, nil, nil}, sample{5, 5, nil, nil}}), NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{fSample{1, 1}, fSample{2, 2}, fSample{3, 3}, fSample{5, 5}}),
NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{4, 4, nil, nil}, sample{6, 66, nil, nil}}), NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{fSample{4, 4}, fSample{6, 66}}),
NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{6, 6, nil, nil}, sample{10, 10, nil, nil}}), NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{fSample{6, 6}, fSample{10, 10}}),
}, },
expected: NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), expected: NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"),
[]tsdbutil.Sample{sample{1, 1, nil, nil}, sample{2, 2, nil, nil}, sample{3, 3, nil, nil}, sample{5, 5, nil, nil}}, []tsdbutil.Sample{fSample{1, 1}, fSample{2, 2}, fSample{3, 3}, fSample{5, 5}},
[]tsdbutil.Sample{sample{4, 4, nil, nil}, sample{6, 66, nil, nil}}, []tsdbutil.Sample{fSample{4, 4}, fSample{6, 66}},
[]tsdbutil.Sample{sample{6, 6, nil, nil}, sample{10, 10, nil, nil}}, []tsdbutil.Sample{fSample{6, 6}, fSample{10, 10}},
), ),
}, },
{ {
name: "three in chained overlap complex", name: "three in chained overlap complex",
input: []ChunkSeries{ input: []ChunkSeries{
NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{0, 0, nil, nil}, sample{5, 5, nil, nil}}, []tsdbutil.Sample{sample{10, 10, nil, nil}, sample{15, 15, nil, nil}}), NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{fSample{0, 0}, fSample{5, 5}}, []tsdbutil.Sample{fSample{10, 10}, fSample{15, 15}}),
NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{2, 2, nil, nil}, sample{20, 20, nil, nil}}, []tsdbutil.Sample{sample{25, 25, nil, nil}, sample{30, 30, nil, nil}}), NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{fSample{2, 2}, fSample{20, 20}}, []tsdbutil.Sample{fSample{25, 25}, fSample{30, 30}}),
NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{18, 18, nil, nil}, sample{26, 26, nil, nil}}, []tsdbutil.Sample{sample{31, 31, nil, nil}, sample{35, 35, nil, nil}}), NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{fSample{18, 18}, fSample{26, 26}}, []tsdbutil.Sample{fSample{31, 31}, fSample{35, 35}}),
}, },
expected: NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), expected: NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"),
[]tsdbutil.Sample{sample{0, 0, nil, nil}, sample{5, 5, nil, nil}}, []tsdbutil.Sample{sample{10, 10, nil, nil}, sample{15, 15, nil, nil}}, []tsdbutil.Sample{fSample{0, 0}, fSample{5, 5}}, []tsdbutil.Sample{fSample{10, 10}, fSample{15, 15}},
[]tsdbutil.Sample{sample{2, 2, nil, nil}, sample{20, 20, nil, nil}}, []tsdbutil.Sample{sample{25, 25, nil, nil}, sample{30, 30, nil, nil}}, []tsdbutil.Sample{fSample{2, 2}, fSample{20, 20}}, []tsdbutil.Sample{fSample{25, 25}, fSample{30, 30}},
[]tsdbutil.Sample{sample{18, 18, nil, nil}, sample{26, 26, nil, nil}}, []tsdbutil.Sample{sample{31, 31, nil, nil}, sample{35, 35, nil, nil}}, []tsdbutil.Sample{fSample{18, 18}, fSample{26, 26}}, []tsdbutil.Sample{fSample{31, 31}, fSample{35, 35}},
), ),
}, },
{ {
@ -807,38 +807,38 @@ func TestChainSampleIterator(t *testing.T) {
}{ }{
{ {
input: []chunkenc.Iterator{ input: []chunkenc.Iterator{
NewListSeriesIterator(samples{sample{0, 0, nil, nil}, sample{1, 1, nil, nil}}), NewListSeriesIterator(samples{fSample{0, 0}, fSample{1, 1}}),
}, },
expected: []tsdbutil.Sample{sample{0, 0, nil, nil}, sample{1, 1, nil, nil}}, expected: []tsdbutil.Sample{fSample{0, 0}, fSample{1, 1}},
}, },
{ {
input: []chunkenc.Iterator{ input: []chunkenc.Iterator{
NewListSeriesIterator(samples{sample{0, 0, nil, nil}, sample{1, 1, nil, nil}}), NewListSeriesIterator(samples{fSample{0, 0}, fSample{1, 1}}),
NewListSeriesIterator(samples{sample{2, 2, nil, nil}, sample{3, 3, nil, nil}}), NewListSeriesIterator(samples{fSample{2, 2}, fSample{3, 3}}),
}, },
expected: []tsdbutil.Sample{sample{0, 0, nil, nil}, sample{1, 1, nil, nil}, sample{2, 2, nil, nil}, sample{3, 3, nil, nil}}, expected: []tsdbutil.Sample{fSample{0, 0}, fSample{1, 1}, fSample{2, 2}, fSample{3, 3}},
}, },
{ {
input: []chunkenc.Iterator{ input: []chunkenc.Iterator{
NewListSeriesIterator(samples{sample{0, 0, nil, nil}, sample{3, 3, nil, nil}}), NewListSeriesIterator(samples{fSample{0, 0}, fSample{3, 3}}),
NewListSeriesIterator(samples{sample{1, 1, nil, nil}, sample{4, 4, nil, nil}}), NewListSeriesIterator(samples{fSample{1, 1}, fSample{4, 4}}),
NewListSeriesIterator(samples{sample{2, 2, nil, nil}, sample{5, 5, nil, nil}}), NewListSeriesIterator(samples{fSample{2, 2}, fSample{5, 5}}),
}, },
expected: []tsdbutil.Sample{ expected: []tsdbutil.Sample{
sample{0, 0, nil, nil}, sample{1, 1, nil, nil}, sample{2, 2, nil, nil}, sample{3, 3, nil, nil}, sample{4, 4, nil, nil}, sample{5, 5, nil, nil}, fSample{0, 0}, fSample{1, 1}, fSample{2, 2}, fSample{3, 3}, fSample{4, 4}, fSample{5, 5},
}, },
}, },
// Overlap. // Overlap.
{ {
input: []chunkenc.Iterator{ input: []chunkenc.Iterator{
NewListSeriesIterator(samples{sample{0, 0, nil, nil}, sample{1, 1, nil, nil}}), NewListSeriesIterator(samples{fSample{0, 0}, fSample{1, 1}}),
NewListSeriesIterator(samples{sample{0, 0, nil, nil}, sample{2, 2, nil, nil}}), NewListSeriesIterator(samples{fSample{0, 0}, fSample{2, 2}}),
NewListSeriesIterator(samples{sample{2, 2, nil, nil}, sample{3, 3, nil, nil}}), NewListSeriesIterator(samples{fSample{2, 2}, fSample{3, 3}}),
NewListSeriesIterator(samples{}), NewListSeriesIterator(samples{}),
NewListSeriesIterator(samples{}), NewListSeriesIterator(samples{}),
NewListSeriesIterator(samples{}), NewListSeriesIterator(samples{}),
}, },
expected: []tsdbutil.Sample{sample{0, 0, nil, nil}, sample{1, 1, nil, nil}, sample{2, 2, nil, nil}, sample{3, 3, nil, nil}}, expected: []tsdbutil.Sample{fSample{0, 0}, fSample{1, 1}, fSample{2, 2}, fSample{3, 3}},
}, },
} { } {
merged := ChainSampleIteratorFromIterators(nil, tc.input) merged := ChainSampleIteratorFromIterators(nil, tc.input)
@ -856,42 +856,42 @@ func TestChainSampleIteratorSeek(t *testing.T) {
}{ }{
{ {
input: []chunkenc.Iterator{ input: []chunkenc.Iterator{
NewListSeriesIterator(samples{sample{0, 0, nil, nil}, sample{1, 1, nil, nil}, sample{2, 2, nil, nil}}), NewListSeriesIterator(samples{fSample{0, 0}, fSample{1, 1}, fSample{2, 2}}),
}, },
seek: 1, seek: 1,
expected: []tsdbutil.Sample{sample{1, 1, nil, nil}, sample{2, 2, nil, nil}}, expected: []tsdbutil.Sample{fSample{1, 1}, fSample{2, 2}},
}, },
{ {
input: []chunkenc.Iterator{ input: []chunkenc.Iterator{
NewListSeriesIterator(samples{sample{0, 0, nil, nil}, sample{1, 1, nil, nil}}), NewListSeriesIterator(samples{fSample{0, 0}, fSample{1, 1}}),
NewListSeriesIterator(samples{sample{2, 2, nil, nil}, sample{3, 3, nil, nil}}), NewListSeriesIterator(samples{fSample{2, 2}, fSample{3, 3}}),
}, },
seek: 2, seek: 2,
expected: []tsdbutil.Sample{sample{2, 2, nil, nil}, sample{3, 3, nil, nil}}, expected: []tsdbutil.Sample{fSample{2, 2}, fSample{3, 3}},
}, },
{ {
input: []chunkenc.Iterator{ input: []chunkenc.Iterator{
NewListSeriesIterator(samples{sample{0, 0, nil, nil}, sample{3, 3, nil, nil}}), NewListSeriesIterator(samples{fSample{0, 0}, fSample{3, 3}}),
NewListSeriesIterator(samples{sample{1, 1, nil, nil}, sample{4, 4, nil, nil}}), NewListSeriesIterator(samples{fSample{1, 1}, fSample{4, 4}}),
NewListSeriesIterator(samples{sample{2, 2, nil, nil}, sample{5, 5, nil, nil}}), NewListSeriesIterator(samples{fSample{2, 2}, fSample{5, 5}}),
}, },
seek: 2, seek: 2,
expected: []tsdbutil.Sample{sample{2, 2, nil, nil}, sample{3, 3, nil, nil}, sample{4, 4, nil, nil}, sample{5, 5, nil, nil}}, expected: []tsdbutil.Sample{fSample{2, 2}, fSample{3, 3}, fSample{4, 4}, fSample{5, 5}},
}, },
{ {
input: []chunkenc.Iterator{ input: []chunkenc.Iterator{
NewListSeriesIterator(samples{sample{0, 0, nil, nil}, sample{2, 2, nil, nil}, sample{3, 3, nil, nil}}), NewListSeriesIterator(samples{fSample{0, 0}, fSample{2, 2}, fSample{3, 3}}),
NewListSeriesIterator(samples{sample{0, 0, nil, nil}, sample{1, 1, nil, nil}, sample{2, 2, nil, nil}}), NewListSeriesIterator(samples{fSample{0, 0}, fSample{1, 1}, fSample{2, 2}}),
}, },
seek: 0, seek: 0,
expected: []tsdbutil.Sample{sample{0, 0, nil, nil}, sample{1, 1, nil, nil}, sample{2, 2, nil, nil}, sample{3, 3, nil, nil}}, expected: []tsdbutil.Sample{fSample{0, 0}, fSample{1, 1}, fSample{2, 2}, fSample{3, 3}},
}, },
} { } {
merged := ChainSampleIteratorFromIterators(nil, tc.input) merged := ChainSampleIteratorFromIterators(nil, tc.input)
actual := []tsdbutil.Sample{} actual := []tsdbutil.Sample{}
if merged.Seek(tc.seek) == chunkenc.ValFloat { if merged.Seek(tc.seek) == chunkenc.ValFloat {
t, v := merged.At() t, f := merged.At()
actual = append(actual, sample{t, v, nil, nil}) actual = append(actual, fSample{t, f})
} }
s, err := ExpandSamples(merged, nil) s, err := ExpandSamples(merged, nil)
require.NoError(t, err) require.NoError(t, err)
@ -906,7 +906,7 @@ func makeSeries(numSeries, numSamples int) []Series {
labels := labels.FromStrings("foo", fmt.Sprintf("bar%d", j)) labels := labels.FromStrings("foo", fmt.Sprintf("bar%d", j))
samples := []tsdbutil.Sample{} samples := []tsdbutil.Sample{}
for k := 0; k < numSamples; k++ { for k := 0; k < numSamples; k++ {
samples = append(samples, sample{t: int64(k), v: float64(k)}) samples = append(samples, fSample{t: int64(k), f: float64(k)})
} }
series = append(series, NewListSeries(labels, samples)) series = append(series, NewListSeries(labels, samples))
} }

View file

@ -17,6 +17,7 @@ import (
"errors" "errors"
"fmt" "fmt"
"io" "io"
"math"
"net/http" "net/http"
"sort" "sort"
"strings" "strings"
@ -173,7 +174,7 @@ func FromQueryResult(sortSeries bool, res *prompb.QueryResult) storage.SeriesSet
return errSeriesSet{err: err} return errSeriesSet{err: err}
} }
lbls := labelProtosToLabels(ts.Labels) lbls := labelProtosToLabels(ts.Labels)
series = append(series, &concreteSeries{labels: lbls, samples: ts.Samples}) series = append(series, &concreteSeries{labels: lbls, floats: ts.Samples, histograms: ts.Histograms})
} }
if sortSeries { if sortSeries {
@ -359,8 +360,9 @@ func (c *concreteSeriesSet) Warnings() storage.Warnings { return nil }
// concreteSeries implements storage.Series. // concreteSeries implements storage.Series.
type concreteSeries struct { type concreteSeries struct {
labels labels.Labels labels labels.Labels
samples []prompb.Sample floats []prompb.Sample
histograms []prompb.Histogram
} }
func (c *concreteSeries) Labels() labels.Labels { func (c *concreteSeries) Labels() labels.Labels {
@ -372,84 +374,167 @@ func (c *concreteSeries) Iterator(it chunkenc.Iterator) chunkenc.Iterator {
csi.reset(c) csi.reset(c)
return csi return csi
} }
return newConcreteSeriersIterator(c) return newConcreteSeriesIterator(c)
} }
// concreteSeriesIterator implements storage.SeriesIterator. // concreteSeriesIterator implements storage.SeriesIterator.
type concreteSeriesIterator struct { type concreteSeriesIterator struct {
cur int floatsCur int
series *concreteSeries histogramsCur int
curValType chunkenc.ValueType
series *concreteSeries
} }
func newConcreteSeriersIterator(series *concreteSeries) chunkenc.Iterator { func newConcreteSeriesIterator(series *concreteSeries) chunkenc.Iterator {
return &concreteSeriesIterator{ return &concreteSeriesIterator{
cur: -1, floatsCur: -1,
series: series, histogramsCur: -1,
curValType: chunkenc.ValNone,
series: series,
} }
} }
func (c *concreteSeriesIterator) reset(series *concreteSeries) { func (c *concreteSeriesIterator) reset(series *concreteSeries) {
c.cur = -1 c.floatsCur = -1
c.histogramsCur = -1
c.curValType = chunkenc.ValNone
c.series = series c.series = series
} }
// Seek implements storage.SeriesIterator. // Seek implements storage.SeriesIterator.
func (c *concreteSeriesIterator) Seek(t int64) chunkenc.ValueType { func (c *concreteSeriesIterator) Seek(t int64) chunkenc.ValueType {
if c.cur == -1 { if c.floatsCur == -1 {
c.cur = 0 c.floatsCur = 0
} }
if c.cur >= len(c.series.samples) { if c.histogramsCur == -1 {
c.histogramsCur = 0
}
if c.floatsCur >= len(c.series.floats) && c.histogramsCur >= len(c.series.histograms) {
return chunkenc.ValNone return chunkenc.ValNone
} }
// No-op check. // No-op check.
if s := c.series.samples[c.cur]; s.Timestamp >= t { if (c.curValType == chunkenc.ValFloat && c.series.floats[c.floatsCur].Timestamp >= t) ||
return chunkenc.ValFloat ((c.curValType == chunkenc.ValHistogram || c.curValType == chunkenc.ValFloatHistogram) && c.series.histograms[c.histogramsCur].Timestamp >= t) {
return c.curValType
} }
// Do binary search between current position and end.
c.cur += sort.Search(len(c.series.samples)-c.cur, func(n int) bool { c.curValType = chunkenc.ValNone
return c.series.samples[n+c.cur].Timestamp >= t
// Binary search between current position and end for both float and histograms samples.
c.floatsCur += sort.Search(len(c.series.floats)-c.floatsCur, func(n int) bool {
return c.series.floats[n+c.floatsCur].Timestamp >= t
}) })
if c.cur < len(c.series.samples) { c.histogramsCur += sort.Search(len(c.series.histograms)-c.histogramsCur, func(n int) bool {
return chunkenc.ValFloat return c.series.histograms[n+c.histogramsCur].Timestamp >= t
})
if c.floatsCur < len(c.series.floats) && c.histogramsCur < len(c.series.histograms) {
// If float samples and histogram samples have overlapping timestamps prefer the float samples.
if c.series.floats[c.floatsCur].Timestamp <= c.series.histograms[c.histogramsCur].Timestamp {
c.curValType = chunkenc.ValFloat
} else {
c.curValType = getHistogramValType(&c.series.histograms[c.histogramsCur])
}
// When the timestamps do not overlap the cursor for the non-selected sample type has advanced too
// far; we decrement it back down here.
if c.series.floats[c.floatsCur].Timestamp != c.series.histograms[c.histogramsCur].Timestamp {
if c.curValType == chunkenc.ValFloat {
c.histogramsCur--
} else {
c.floatsCur--
}
}
} else if c.floatsCur < len(c.series.floats) {
c.curValType = chunkenc.ValFloat
} else if c.histogramsCur < len(c.series.histograms) {
c.curValType = getHistogramValType(&c.series.histograms[c.histogramsCur])
} }
return chunkenc.ValNone
// TODO(beorn7): Add histogram support. return c.curValType
}
func getHistogramValType(h *prompb.Histogram) chunkenc.ValueType {
if _, isInt := h.GetCount().(*prompb.Histogram_CountInt); isInt {
return chunkenc.ValHistogram
}
return chunkenc.ValFloatHistogram
} }
// At implements chunkenc.Iterator. // At implements chunkenc.Iterator.
func (c *concreteSeriesIterator) At() (t int64, v float64) { func (c *concreteSeriesIterator) At() (t int64, v float64) {
s := c.series.samples[c.cur] if c.curValType != chunkenc.ValFloat {
panic("iterator is not on a float sample")
}
s := c.series.floats[c.floatsCur]
return s.Timestamp, s.Value return s.Timestamp, s.Value
} }
// AtHistogram always returns (0, nil) because there is no support for histogram // AtHistogram implements chunkenc.Iterator
// values yet.
// TODO(beorn7): Fix that for histogram support in remote storage.
func (c *concreteSeriesIterator) AtHistogram() (int64, *histogram.Histogram) { func (c *concreteSeriesIterator) AtHistogram() (int64, *histogram.Histogram) {
return 0, nil if c.curValType != chunkenc.ValHistogram {
panic("iterator is not on an integer histogram sample")
}
h := c.series.histograms[c.histogramsCur]
return h.Timestamp, HistogramProtoToHistogram(h)
} }
// AtFloatHistogram always returns (0, nil) because there is no support for histogram // AtFloatHistogram implements chunkenc.Iterator
// values yet.
// TODO(beorn7): Fix that for histogram support in remote storage.
func (c *concreteSeriesIterator) AtFloatHistogram() (int64, *histogram.FloatHistogram) { func (c *concreteSeriesIterator) AtFloatHistogram() (int64, *histogram.FloatHistogram) {
return 0, nil switch c.curValType {
case chunkenc.ValHistogram:
fh := c.series.histograms[c.histogramsCur]
return fh.Timestamp, HistogramProtoToFloatHistogram(fh)
case chunkenc.ValFloatHistogram:
fh := c.series.histograms[c.histogramsCur]
return fh.Timestamp, FloatHistogramProtoToFloatHistogram(fh)
default:
panic("iterator is not on a histogram sample")
}
} }
// AtT implements chunkenc.Iterator. // AtT implements chunkenc.Iterator.
func (c *concreteSeriesIterator) AtT() int64 { func (c *concreteSeriesIterator) AtT() int64 {
s := c.series.samples[c.cur] if c.curValType == chunkenc.ValHistogram || c.curValType == chunkenc.ValFloatHistogram {
return s.Timestamp return c.series.histograms[c.histogramsCur].Timestamp
}
return c.series.floats[c.floatsCur].Timestamp
} }
const noTS = int64(math.MaxInt64)
// Next implements chunkenc.Iterator. // Next implements chunkenc.Iterator.
func (c *concreteSeriesIterator) Next() chunkenc.ValueType { func (c *concreteSeriesIterator) Next() chunkenc.ValueType {
c.cur++ peekFloatTS := noTS
if c.cur < len(c.series.samples) { if c.floatsCur+1 < len(c.series.floats) {
return chunkenc.ValFloat peekFloatTS = c.series.floats[c.floatsCur+1].Timestamp
} }
return chunkenc.ValNone peekHistTS := noTS
// TODO(beorn7): Add histogram support. if c.histogramsCur+1 < len(c.series.histograms) {
peekHistTS = c.series.histograms[c.histogramsCur+1].Timestamp
}
c.curValType = chunkenc.ValNone
if peekFloatTS < peekHistTS {
c.floatsCur++
c.curValType = chunkenc.ValFloat
} else if peekHistTS < peekFloatTS {
c.histogramsCur++
c.curValType = chunkenc.ValHistogram
} else if peekFloatTS == noTS && peekHistTS == noTS {
// This only happens when the iterator is exhausted; we set the cursors off the end to prevent
// Seek() from returning anything afterwards.
c.floatsCur = len(c.series.floats)
c.histogramsCur = len(c.series.histograms)
} else {
// Prefer float samples to histogram samples if there's a conflict. We advance the cursor for histograms
// anyway otherwise the histogram sample will get selected on the next call to Next().
c.floatsCur++
c.histogramsCur++
c.curValType = chunkenc.ValFloat
}
return c.curValType
} }
// Err implements chunkenc.Iterator. // Err implements chunkenc.Iterator.
@ -557,10 +642,10 @@ func HistogramProtoToHistogram(hp prompb.Histogram) *histogram.Histogram {
} }
} }
// HistogramProtoToFloatHistogram extracts a (normal integer) Histogram from the // FloatHistogramProtoToFloatHistogram extracts a float Histogram from the
// provided proto message to a Float Histogram. The caller has to make sure that // provided proto message to a Float Histogram. The caller has to make sure that
// the proto message represents an float histogram and not a integer histogram. // the proto message represents a float histogram and not an integer histogram.
func HistogramProtoToFloatHistogram(hp prompb.Histogram) *histogram.FloatHistogram { func FloatHistogramProtoToFloatHistogram(hp prompb.Histogram) *histogram.FloatHistogram {
return &histogram.FloatHistogram{ return &histogram.FloatHistogram{
CounterResetHint: histogram.CounterResetHint(hp.ResetHint), CounterResetHint: histogram.CounterResetHint(hp.ResetHint),
Schema: hp.Schema, Schema: hp.Schema,
@ -575,6 +660,24 @@ func HistogramProtoToFloatHistogram(hp prompb.Histogram) *histogram.FloatHistogr
} }
} }
// HistogramProtoToFloatHistogram extracts and converts a (normal integer) histogram from the provided proto message
// to a float histogram. The caller has to make sure that the proto message represents an integer histogram and not a
// float histogram.
func HistogramProtoToFloatHistogram(hp prompb.Histogram) *histogram.FloatHistogram {
return &histogram.FloatHistogram{
CounterResetHint: histogram.CounterResetHint(hp.ResetHint),
Schema: hp.Schema,
ZeroThreshold: hp.ZeroThreshold,
ZeroCount: float64(hp.GetZeroCountInt()),
Count: float64(hp.GetCountInt()),
Sum: hp.Sum,
PositiveSpans: spansProtoToSpans(hp.GetPositiveSpans()),
PositiveBuckets: deltasToCounts(hp.GetPositiveDeltas()),
NegativeSpans: spansProtoToSpans(hp.GetNegativeSpans()),
NegativeBuckets: deltasToCounts(hp.GetNegativeDeltas()),
}
}
func spansProtoToSpans(s []prompb.BucketSpan) []histogram.Span { func spansProtoToSpans(s []prompb.BucketSpan) []histogram.Span {
spans := make([]histogram.Span, len(s)) spans := make([]histogram.Span, len(s))
for i := 0; i < len(s); i++ { for i := 0; i < len(s); i++ {
@ -584,6 +687,16 @@ func spansProtoToSpans(s []prompb.BucketSpan) []histogram.Span {
return spans return spans
} }
func deltasToCounts(deltas []int64) []float64 {
counts := make([]float64, len(deltas))
var cur float64
for i, d := range deltas {
cur += float64(d)
counts[i] = cur
}
return counts
}
func HistogramToHistogramProto(timestamp int64, h *histogram.Histogram) prompb.Histogram { func HistogramToHistogramProto(timestamp int64, h *histogram.Histogram) prompb.Histogram {
return prompb.Histogram{ return prompb.Histogram{
Count: &prompb.Histogram_CountInt{CountInt: h.Count}, Count: &prompb.Histogram_CountInt{CountInt: h.Count},

View file

@ -29,6 +29,7 @@ import (
"github.com/prometheus/prometheus/storage" "github.com/prometheus/prometheus/storage"
"github.com/prometheus/prometheus/tsdb/chunkenc" "github.com/prometheus/prometheus/tsdb/chunkenc"
"github.com/prometheus/prometheus/tsdb/chunks" "github.com/prometheus/prometheus/tsdb/chunks"
"github.com/prometheus/prometheus/tsdb/tsdbutil"
) )
var testHistogram = histogram.Histogram{ var testHistogram = histogram.Histogram{
@ -174,12 +175,12 @@ func TestValidateLabelsAndMetricName(t *testing.T) {
func TestConcreteSeriesSet(t *testing.T) { func TestConcreteSeriesSet(t *testing.T) {
series1 := &concreteSeries{ series1 := &concreteSeries{
labels: labels.FromStrings("foo", "bar"), labels: labels.FromStrings("foo", "bar"),
samples: []prompb.Sample{{Value: 1, Timestamp: 2}}, floats: []prompb.Sample{{Value: 1, Timestamp: 2}},
} }
series2 := &concreteSeries{ series2 := &concreteSeries{
labels: labels.FromStrings("foo", "baz"), labels: labels.FromStrings("foo", "baz"),
samples: []prompb.Sample{{Value: 3, Timestamp: 4}}, floats: []prompb.Sample{{Value: 3, Timestamp: 4}},
} }
c := &concreteSeriesSet{ c := &concreteSeriesSet{
series: []storage.Series{series1, series2}, series: []storage.Series{series1, series2},
@ -206,10 +207,10 @@ func TestConcreteSeriesClonesLabels(t *testing.T) {
require.Equal(t, lbls, gotLabels) require.Equal(t, lbls, gotLabels)
} }
func TestConcreteSeriesIterator(t *testing.T) { func TestConcreteSeriesIterator_FloatSamples(t *testing.T) {
series := &concreteSeries{ series := &concreteSeries{
labels: labels.FromStrings("foo", "bar"), labels: labels.FromStrings("foo", "bar"),
samples: []prompb.Sample{ floats: []prompb.Sample{
{Value: 1, Timestamp: 1}, {Value: 1, Timestamp: 1},
{Value: 1.5, Timestamp: 1}, {Value: 1.5, Timestamp: 1},
{Value: 2, Timestamp: 2}, {Value: 2, Timestamp: 2},
@ -255,6 +256,165 @@ func TestConcreteSeriesIterator(t *testing.T) {
require.Equal(t, chunkenc.ValNone, it.Seek(2)) require.Equal(t, chunkenc.ValNone, it.Seek(2))
} }
func TestConcreteSeriesIterator_HistogramSamples(t *testing.T) {
histograms := tsdbutil.GenerateTestHistograms(5)
histProtos := make([]prompb.Histogram, len(histograms))
for i, h := range histograms {
// Results in ts sequence of 1, 1, 2, 3, 4.
var ts int64
if i == 0 {
ts = 1
} else {
ts = int64(i)
}
histProtos[i] = HistogramToHistogramProto(ts, h)
}
series := &concreteSeries{
labels: labels.FromStrings("foo", "bar"),
histograms: histProtos,
}
it := series.Iterator(nil)
// Seek to the first sample with ts=1.
require.Equal(t, chunkenc.ValHistogram, it.Seek(1))
ts, v := it.AtHistogram()
require.Equal(t, int64(1), ts)
require.Equal(t, histograms[0], v)
// Seek one further, next sample still has ts=1.
require.Equal(t, chunkenc.ValHistogram, it.Next())
ts, v = it.AtHistogram()
require.Equal(t, int64(1), ts)
require.Equal(t, histograms[1], v)
// Seek again to 1 and make sure we stay where we are.
require.Equal(t, chunkenc.ValHistogram, it.Seek(1))
ts, v = it.AtHistogram()
require.Equal(t, int64(1), ts)
require.Equal(t, histograms[1], v)
// Another seek.
require.Equal(t, chunkenc.ValHistogram, it.Seek(3))
ts, v = it.AtHistogram()
require.Equal(t, int64(3), ts)
require.Equal(t, histograms[3], v)
// And we don't go back.
require.Equal(t, chunkenc.ValHistogram, it.Seek(2))
ts, v = it.AtHistogram()
require.Equal(t, int64(3), ts)
require.Equal(t, histograms[3], v)
// Seek beyond the end.
require.Equal(t, chunkenc.ValNone, it.Seek(5))
// And we don't go back. (This exposes issue #10027.)
require.Equal(t, chunkenc.ValNone, it.Seek(2))
}
func TestConcreteSeriesIterator_FloatAndHistogramSamples(t *testing.T) {
// Series starts as histograms, then transitions to floats at ts=8 (with an overlap from ts=8 to ts=10), then
// transitions back to histograms at ts=16.
histograms := tsdbutil.GenerateTestHistograms(15)
histProtos := make([]prompb.Histogram, len(histograms))
for i, h := range histograms {
if i < 10 {
histProtos[i] = HistogramToHistogramProto(int64(i+1), h)
} else {
histProtos[i] = HistogramToHistogramProto(int64(i+6), h)
}
}
series := &concreteSeries{
labels: labels.FromStrings("foo", "bar"),
floats: []prompb.Sample{
{Value: 1, Timestamp: 8},
{Value: 2, Timestamp: 9},
{Value: 3, Timestamp: 10},
{Value: 4, Timestamp: 11},
{Value: 5, Timestamp: 12},
{Value: 6, Timestamp: 13},
{Value: 7, Timestamp: 14},
{Value: 8, Timestamp: 15},
},
histograms: histProtos,
}
it := series.Iterator(nil)
var (
ts int64
v float64
h *histogram.Histogram
fh *histogram.FloatHistogram
)
require.Equal(t, chunkenc.ValHistogram, it.Next())
ts, h = it.AtHistogram()
require.Equal(t, int64(1), ts)
require.Equal(t, histograms[0], h)
require.Equal(t, chunkenc.ValHistogram, it.Next())
ts, h = it.AtHistogram()
require.Equal(t, int64(2), ts)
require.Equal(t, histograms[1], h)
// Seek to the first float/histogram sample overlap at ts=8 (should prefer the float sample).
require.Equal(t, chunkenc.ValFloat, it.Seek(8))
ts, v = it.At()
require.Equal(t, int64(8), ts)
require.Equal(t, 1., v)
// Attempting to seek backwards should do nothing.
require.Equal(t, chunkenc.ValFloat, it.Seek(1))
ts, v = it.At()
require.Equal(t, int64(8), ts)
require.Equal(t, 1., v)
// Seeking to 8 again should also do nothing.
require.Equal(t, chunkenc.ValFloat, it.Seek(8))
ts, v = it.At()
require.Equal(t, int64(8), ts)
require.Equal(t, 1., v)
// Again, should prefer the float sample.
require.Equal(t, chunkenc.ValFloat, it.Next())
ts, v = it.At()
require.Equal(t, int64(9), ts)
require.Equal(t, 2., v)
// Seek to ts=11 where there are only float samples.
require.Equal(t, chunkenc.ValFloat, it.Seek(11))
ts, v = it.At()
require.Equal(t, int64(11), ts)
require.Equal(t, 4., v)
// Seek to ts=15 right before the transition back to histogram samples.
require.Equal(t, chunkenc.ValFloat, it.Seek(15))
ts, v = it.At()
require.Equal(t, int64(15), ts)
require.Equal(t, 8., v)
require.Equal(t, chunkenc.ValHistogram, it.Next())
ts, h = it.AtHistogram()
require.Equal(t, int64(16), ts)
require.Equal(t, histograms[10], h)
// Getting a float histogram from an int histogram works.
require.Equal(t, chunkenc.ValHistogram, it.Next())
ts, fh = it.AtFloatHistogram()
require.Equal(t, int64(17), ts)
expected := HistogramProtoToFloatHistogram(HistogramToHistogramProto(int64(17), histograms[11]))
require.Equal(t, expected, fh)
// Keep calling Next() until the end.
for i := 0; i < 3; i++ {
require.Equal(t, chunkenc.ValHistogram, it.Next())
}
// The iterator is exhausted.
require.Equal(t, chunkenc.ValNone, it.Next())
require.Equal(t, chunkenc.ValNone, it.Next())
// Should also not be able to seek backwards again.
require.Equal(t, chunkenc.ValNone, it.Seek(1))
}
func TestFromQueryResultWithDuplicates(t *testing.T) { func TestFromQueryResultWithDuplicates(t *testing.T) {
ts1 := prompb.TimeSeries{ ts1 := prompb.TimeSeries{
Labels: []prompb.Label{ Labels: []prompb.Label{
@ -368,7 +528,7 @@ func TestNilHistogramProto(t *testing.T) {
// This function will panic if it impromperly handles nil // This function will panic if it impromperly handles nil
// values, causing the test to fail. // values, causing the test to fail.
HistogramProtoToHistogram(prompb.Histogram{}) HistogramProtoToHistogram(prompb.Histogram{})
HistogramProtoToFloatHistogram(prompb.Histogram{}) FloatHistogramProtoToFloatHistogram(prompb.Histogram{})
} }
func exampleHistogram() histogram.Histogram { func exampleHistogram() histogram.Histogram {
@ -563,7 +723,7 @@ func TestFloatHistogramToProtoConvert(t *testing.T) {
require.Equal(t, p, FloatHistogramToHistogramProto(1337, &h)) require.Equal(t, p, FloatHistogramToHistogramProto(1337, &h))
require.Equal(t, h, *HistogramProtoToFloatHistogram(p)) require.Equal(t, h, *FloatHistogramProtoToFloatHistogram(p))
} }
} }

View file

@ -278,5 +278,5 @@ func (sf seriesFilter) Labels() labels.Labels {
b := labels.NewBuilder(sf.Series.Labels()) b := labels.NewBuilder(sf.Series.Labels())
// todo: check if this is too inefficient. // todo: check if this is too inefficient.
b.Del(sf.toFilter...) b.Del(sf.toFilter...)
return b.Labels(labels.EmptyLabels()) return b.Labels()
} }

View file

@ -126,7 +126,7 @@ func (h *writeHandler) write(ctx context.Context, req *prompb.WriteRequest) (err
for _, hp := range ts.Histograms { for _, hp := range ts.Histograms {
if hp.GetCountFloat() > 0 || hp.GetZeroCountFloat() > 0 { // It is a float histogram. if hp.GetCountFloat() > 0 || hp.GetZeroCountFloat() > 0 { // It is a float histogram.
fhs := HistogramProtoToFloatHistogram(hp) fhs := FloatHistogramProtoToFloatHistogram(hp)
_, err = app.AppendHistogram(0, labels, hp.Timestamp, nil, fhs) _, err = app.AppendHistogram(0, labels, hp.Timestamp, nil, fhs)
} else { } else {
hs := HistogramProtoToHistogram(hp) hs := HistogramProtoToHistogram(hp)

View file

@ -69,7 +69,7 @@ func TestRemoteWriteHandler(t *testing.T) {
for _, hp := range ts.Histograms { for _, hp := range ts.Histograms {
if hp.GetCountFloat() > 0 || hp.GetZeroCountFloat() > 0 { // It is a float histogram. if hp.GetCountFloat() > 0 || hp.GetZeroCountFloat() > 0 { // It is a float histogram.
fh := HistogramProtoToFloatHistogram(hp) fh := FloatHistogramProtoToFloatHistogram(hp)
require.Equal(t, mockHistogram{labels, hp.Timestamp, nil, fh}, appendable.histograms[k]) require.Equal(t, mockHistogram{labels, hp.Timestamp, nil, fh}, appendable.histograms[k])
} else { } else {
h := HistogramProtoToHistogram(hp) h := HistogramProtoToHistogram(hp)

View file

@ -109,7 +109,7 @@ func (it *listSeriesIterator) Reset(samples Samples) {
func (it *listSeriesIterator) At() (int64, float64) { func (it *listSeriesIterator) At() (int64, float64) {
s := it.samples.Get(it.idx) s := it.samples.Get(it.idx)
return s.T(), s.V() return s.T(), s.F()
} }
func (it *listSeriesIterator) AtHistogram() (int64, *histogram.Histogram) { func (it *listSeriesIterator) AtHistogram() (int64, *histogram.Histogram) {
@ -376,10 +376,17 @@ func (e errChunksIterator) Err() error { return e.err }
// ExpandSamples iterates over all samples in the iterator, buffering all in slice. // ExpandSamples iterates over all samples in the iterator, buffering all in slice.
// Optionally it takes samples constructor, useful when you want to compare sample slices with different // Optionally it takes samples constructor, useful when you want to compare sample slices with different
// sample implementations. if nil, sample type from this package will be used. // sample implementations. if nil, sample type from this package will be used.
func ExpandSamples(iter chunkenc.Iterator, newSampleFn func(t int64, v float64, h *histogram.Histogram, fh *histogram.FloatHistogram) tsdbutil.Sample) ([]tsdbutil.Sample, error) { func ExpandSamples(iter chunkenc.Iterator, newSampleFn func(t int64, f float64, h *histogram.Histogram, fh *histogram.FloatHistogram) tsdbutil.Sample) ([]tsdbutil.Sample, error) {
if newSampleFn == nil { if newSampleFn == nil {
newSampleFn = func(t int64, v float64, h *histogram.Histogram, fh *histogram.FloatHistogram) tsdbutil.Sample { newSampleFn = func(t int64, f float64, h *histogram.Histogram, fh *histogram.FloatHistogram) tsdbutil.Sample {
return sample{t, v, h, fh} switch {
case h != nil:
return hSample{t, h}
case fh != nil:
return fhSample{t, fh}
default:
return fSample{t, f}
}
} }
} }
@ -389,12 +396,12 @@ func ExpandSamples(iter chunkenc.Iterator, newSampleFn func(t int64, v float64,
case chunkenc.ValNone: case chunkenc.ValNone:
return result, iter.Err() return result, iter.Err()
case chunkenc.ValFloat: case chunkenc.ValFloat:
t, v := iter.At() t, f := iter.At()
// NaNs can't be compared normally, so substitute for another value. // NaNs can't be compared normally, so substitute for another value.
if math.IsNaN(v) { if math.IsNaN(f) {
v = -42 f = -42
} }
result = append(result, newSampleFn(t, v, nil, nil)) result = append(result, newSampleFn(t, f, nil, nil))
case chunkenc.ValHistogram: case chunkenc.ValHistogram:
t, h := iter.AtHistogram() t, h := iter.AtHistogram()
result = append(result, newSampleFn(t, 0, h, nil)) result = append(result, newSampleFn(t, 0, h, nil))

View file

@ -25,11 +25,11 @@ import (
func TestListSeriesIterator(t *testing.T) { func TestListSeriesIterator(t *testing.T) {
it := NewListSeriesIterator(samples{ it := NewListSeriesIterator(samples{
sample{0, 0, nil, nil}, fSample{0, 0},
sample{1, 1, nil, nil}, fSample{1, 1},
sample{1, 1.5, nil, nil}, fSample{1, 1.5},
sample{2, 2, nil, nil}, fSample{2, 2},
sample{3, 3, nil, nil}, fSample{3, 3},
}) })
// Seek to the first sample with ts=1. // Seek to the first sample with ts=1.
@ -78,20 +78,20 @@ func TestChunkSeriesSetToSeriesSet(t *testing.T) {
{ {
lbs: labels.FromStrings("__name__", "up", "instance", "localhost:8080"), lbs: labels.FromStrings("__name__", "up", "instance", "localhost:8080"),
samples: []tsdbutil.Sample{ samples: []tsdbutil.Sample{
sample{t: 1, v: 1}, fSample{t: 1, f: 1},
sample{t: 2, v: 2}, fSample{t: 2, f: 2},
sample{t: 3, v: 3}, fSample{t: 3, f: 3},
sample{t: 4, v: 4}, fSample{t: 4, f: 4},
}, },
}, { }, {
lbs: labels.FromStrings("__name__", "up", "instance", "localhost:8081"), lbs: labels.FromStrings("__name__", "up", "instance", "localhost:8081"),
samples: []tsdbutil.Sample{ samples: []tsdbutil.Sample{
sample{t: 1, v: 2}, fSample{t: 1, f: 2},
sample{t: 2, v: 3}, fSample{t: 2, f: 3},
sample{t: 3, v: 4}, fSample{t: 3, f: 4},
sample{t: 4, v: 5}, fSample{t: 4, f: 5},
sample{t: 5, v: 6}, fSample{t: 5, f: 6},
sample{t: 6, v: 7}, fSample{t: 6, f: 7},
}, },
}, },
} }
@ -114,7 +114,7 @@ func TestChunkSeriesSetToSeriesSet(t *testing.T) {
j := 0 j := 0
for iter.Next() == chunkenc.ValFloat { for iter.Next() == chunkenc.ValFloat {
ts, v := iter.At() ts, v := iter.At()
require.EqualValues(t, series[i].samples[j], sample{t: ts, v: v}) require.EqualValues(t, series[i].samples[j], fSample{t: ts, f: v})
j++ j++
} }
} }

View file

@ -93,7 +93,7 @@ func query(ctx context.Context, q string, ts time.Time, queryFn QueryFunc) (quer
result := make(queryResult, len(vector)) result := make(queryResult, len(vector))
for n, v := range vector { for n, v := range vector {
s := sample{ s := sample{
Value: v.V, Value: v.F,
Labels: v.Metric.Map(), Labels: v.Metric.Map(),
} }
result[n] = &s result[n] = &s

View file

@ -70,7 +70,7 @@ func TestTemplateExpansion(t *testing.T) {
{ {
text: "{{ query \"1.5\" | first | value }}", text: "{{ query \"1.5\" | first | value }}",
output: "1.5", output: "1.5",
queryResult: promql.Vector{{Point: promql.Point{T: 0, V: 1.5}}}, queryResult: promql.Vector{{T: 0, F: 1.5}},
}, },
{ {
// Get value from query. // Get value from query.
@ -78,7 +78,8 @@ func TestTemplateExpansion(t *testing.T) {
queryResult: promql.Vector{ queryResult: promql.Vector{
{ {
Metric: labels.FromStrings(labels.MetricName, "metric", "instance", "a"), Metric: labels.FromStrings(labels.MetricName, "metric", "instance", "a"),
Point: promql.Point{T: 0, V: 11}, T: 0,
F: 11,
}, },
}, },
output: "11", output: "11",
@ -90,7 +91,8 @@ func TestTemplateExpansion(t *testing.T) {
queryResult: promql.Vector{ queryResult: promql.Vector{
{ {
Metric: labels.FromStrings(labels.MetricName, "metric", "instance", "a"), Metric: labels.FromStrings(labels.MetricName, "metric", "instance", "a"),
Point: promql.Point{T: 0, V: 11}, T: 0,
F: 11,
}, },
}, },
output: "a", output: "a",
@ -101,7 +103,8 @@ func TestTemplateExpansion(t *testing.T) {
queryResult: promql.Vector{ queryResult: promql.Vector{
{ {
Metric: labels.FromStrings(labels.MetricName, "metric", "__value__", "a"), Metric: labels.FromStrings(labels.MetricName, "metric", "__value__", "a"),
Point: promql.Point{T: 0, V: 11}, T: 0,
F: 11,
}, },
}, },
output: "a", output: "a",
@ -112,7 +115,8 @@ func TestTemplateExpansion(t *testing.T) {
queryResult: promql.Vector{ queryResult: promql.Vector{
{ {
Metric: labels.FromStrings(labels.MetricName, "metric", "instance", "a"), Metric: labels.FromStrings(labels.MetricName, "metric", "instance", "a"),
Point: promql.Point{T: 0, V: 11}, T: 0,
F: 11,
}, },
}, },
output: "", output: "",
@ -123,7 +127,8 @@ func TestTemplateExpansion(t *testing.T) {
queryResult: promql.Vector{ queryResult: promql.Vector{
{ {
Metric: labels.FromStrings(labels.MetricName, "metric", "instance", "a"), Metric: labels.FromStrings(labels.MetricName, "metric", "instance", "a"),
Point: promql.Point{T: 0, V: 11}, T: 0,
F: 11,
}, },
}, },
output: "", output: "",
@ -133,7 +138,8 @@ func TestTemplateExpansion(t *testing.T) {
queryResult: promql.Vector{ queryResult: promql.Vector{
{ {
Metric: labels.FromStrings(labels.MetricName, "metric", "instance", "a"), Metric: labels.FromStrings(labels.MetricName, "metric", "instance", "a"),
Point: promql.Point{T: 0, V: 11}, T: 0,
F: 11,
}, },
}, },
output: "", output: "",
@ -145,10 +151,12 @@ func TestTemplateExpansion(t *testing.T) {
queryResult: promql.Vector{ queryResult: promql.Vector{
{ {
Metric: labels.FromStrings(labels.MetricName, "metric", "instance", "b"), Metric: labels.FromStrings(labels.MetricName, "metric", "instance", "b"),
Point: promql.Point{T: 0, V: 21}, T: 0,
F: 21,
}, { }, {
Metric: labels.FromStrings(labels.MetricName, "metric", "instance", "a"), Metric: labels.FromStrings(labels.MetricName, "metric", "instance", "a"),
Point: promql.Point{T: 0, V: 11}, T: 0,
F: 11,
}, },
}, },
output: "a:11: b:21: ", output: "a:11: b:21: ",

View file

@ -133,13 +133,13 @@ func TestCommit(t *testing.T) {
for i := 0; i < numDatapoints; i++ { for i := 0; i < numDatapoints; i++ {
sample := tsdbutil.GenerateSamples(0, 1) sample := tsdbutil.GenerateSamples(0, 1)
ref, err := app.Append(0, lset, sample[0].T(), sample[0].V()) ref, err := app.Append(0, lset, sample[0].T(), sample[0].F())
require.NoError(t, err) require.NoError(t, err)
e := exemplar.Exemplar{ e := exemplar.Exemplar{
Labels: lset, Labels: lset,
Ts: sample[0].T() + int64(i), Ts: sample[0].T() + int64(i),
Value: sample[0].V(), Value: sample[0].F(),
HasTs: true, HasTs: true,
} }
_, err = app.AppendExemplar(ref, lset, e) _, err = app.AppendExemplar(ref, lset, e)
@ -248,7 +248,7 @@ func TestRollback(t *testing.T) {
for i := 0; i < numDatapoints; i++ { for i := 0; i < numDatapoints; i++ {
sample := tsdbutil.GenerateSamples(0, 1) sample := tsdbutil.GenerateSamples(0, 1)
_, err := app.Append(0, lset, sample[0].T(), sample[0].V()) _, err := app.Append(0, lset, sample[0].T(), sample[0].F())
require.NoError(t, err) require.NoError(t, err)
} }
} }

View file

@ -356,14 +356,14 @@ func TestReadIndexFormatV1(t *testing.T) {
q, err := NewBlockQuerier(block, 0, 1000) q, err := NewBlockQuerier(block, 0, 1000)
require.NoError(t, err) require.NoError(t, err)
require.Equal(t, query(t, q, labels.MustNewMatcher(labels.MatchEqual, "foo", "bar")), require.Equal(t, query(t, q, labels.MustNewMatcher(labels.MatchEqual, "foo", "bar")),
map[string][]tsdbutil.Sample{`{foo="bar"}`: {sample{t: 1, v: 2}}}) map[string][]tsdbutil.Sample{`{foo="bar"}`: {sample{t: 1, f: 2}}})
q, err = NewBlockQuerier(block, 0, 1000) q, err = NewBlockQuerier(block, 0, 1000)
require.NoError(t, err) require.NoError(t, err)
require.Equal(t, query(t, q, labels.MustNewMatcher(labels.MatchNotRegexp, "foo", "^.?$")), require.Equal(t, query(t, q, labels.MustNewMatcher(labels.MatchNotRegexp, "foo", "^.?$")),
map[string][]tsdbutil.Sample{ map[string][]tsdbutil.Sample{
`{foo="bar"}`: {sample{t: 1, v: 2}}, `{foo="bar"}`: {sample{t: 1, f: 2}},
`{foo="baz"}`: {sample{t: 3, v: 4}}, `{foo="baz"}`: {sample{t: 3, f: 4}},
}) })
} }
@ -571,7 +571,7 @@ func createHeadWithOOOSamples(tb testing.TB, w *wlog.WL, series []storage.Series
count++ count++
t, v := it.At() t, v := it.At()
if count%oooSampleFrequency == 0 { if count%oooSampleFrequency == 0 {
os = append(os, sample{t: t, v: v}) os = append(os, sample{t: t, f: v})
continue continue
} }
ref, err = app.Append(ref, lset, t, v) ref, err = app.Append(ref, lset, t, v)
@ -592,7 +592,7 @@ func createHeadWithOOOSamples(tb testing.TB, w *wlog.WL, series []storage.Series
for i, lset := range oooSampleLabels { for i, lset := range oooSampleLabels {
ref := storage.SeriesRef(0) ref := storage.SeriesRef(0)
for _, sample := range oooSamples[i] { for _, sample := range oooSamples[i] {
ref, err = app.Append(ref, lset, sample.T(), sample.V()) ref, err = app.Append(ref, lset, sample.T(), sample.F())
require.NoError(tb, err) require.NoError(tb, err)
oooSamplesAppended++ oooSamplesAppended++
} }
@ -616,7 +616,7 @@ const (
// genSeries generates series of float64 samples with a given number of labels and values. // genSeries generates series of float64 samples with a given number of labels and values.
func genSeries(totalSeries, labelCount int, mint, maxt int64) []storage.Series { func genSeries(totalSeries, labelCount int, mint, maxt int64) []storage.Series {
return genSeriesFromSampleGenerator(totalSeries, labelCount, mint, maxt, 1, func(ts int64) tsdbutil.Sample { return genSeriesFromSampleGenerator(totalSeries, labelCount, mint, maxt, 1, func(ts int64) tsdbutil.Sample {
return sample{t: ts, v: rand.Float64()} return sample{t: ts, f: rand.Float64()}
}) })
} }
@ -660,7 +660,7 @@ func genHistogramAndFloatSeries(totalSeries, labelCount int, mint, maxt, step in
count++ count++
var s sample var s sample
if floatSample { if floatSample {
s = sample{t: ts, v: rand.Float64()} s = sample{t: ts, f: rand.Float64()}
} else { } else {
h := &histogram.Histogram{ h := &histogram.Histogram{
Count: 5 + uint64(ts*4), Count: 5 + uint64(ts*4),
@ -732,7 +732,7 @@ func populateSeries(lbls []map[string]string, mint, maxt int64) []storage.Series
} }
samples := make([]tsdbutil.Sample, 0, maxt-mint+1) samples := make([]tsdbutil.Sample, 0, maxt-mint+1)
for t := mint; t <= maxt; t++ { for t := mint; t <= maxt; t++ {
samples = append(samples, sample{t: t, v: rand.Float64()}) samples = append(samples, sample{t: t, f: rand.Float64()})
} }
series = append(series, storage.NewListSeries(labels.FromMap(lbl), samples)) series = append(series, storage.NewListSeries(labels.FromMap(lbl), samples))
} }

View file

@ -52,8 +52,8 @@ func TestBlockWriter(t *testing.T) {
q, err := NewBlockQuerier(b, math.MinInt64, math.MaxInt64) q, err := NewBlockQuerier(b, math.MinInt64, math.MaxInt64)
require.NoError(t, err) require.NoError(t, err)
series := query(t, q, labels.MustNewMatcher(labels.MatchRegexp, "", ".*")) series := query(t, q, labels.MustNewMatcher(labels.MatchRegexp, "", ".*"))
sample1 := []tsdbutil.Sample{sample{t: ts1, v: v1}} sample1 := []tsdbutil.Sample{sample{t: ts1, f: v1}}
sample2 := []tsdbutil.Sample{sample{t: ts2, v: v2}} sample2 := []tsdbutil.Sample{sample{t: ts2, f: v2}}
expectedSeries := map[string][]tsdbutil.Sample{"{a=\"b\"}": sample1, "{c=\"d\"}": sample2} expectedSeries := map[string][]tsdbutil.Sample{"{a=\"b\"}": sample1, "{c=\"d\"}": sample2}
require.Equal(t, expectedSeries, series) require.Equal(t, expectedSeries, series)

View file

@ -96,7 +96,7 @@ type Iterator interface {
// timestamp equal or greater than t. If the current sample found by a // timestamp equal or greater than t. If the current sample found by a
// previous `Next` or `Seek` operation already has this property, Seek // previous `Next` or `Seek` operation already has this property, Seek
// has no effect. If a sample has been found, Seek returns the type of // has no effect. If a sample has been found, Seek returns the type of
// its value. Otherwise, it returns ValNone, after with the iterator is // its value. Otherwise, it returns ValNone, after which the iterator is
// exhausted. // exhausted.
Seek(t int64) ValueType Seek(t int64) ValueType
// At returns the current timestamp/value pair if the value is a float. // At returns the current timestamp/value pair if the value is a float.

View file

@ -83,7 +83,7 @@ type Compactor interface {
// LeveledCompactor implements the Compactor interface. // LeveledCompactor implements the Compactor interface.
type LeveledCompactor struct { type LeveledCompactor struct {
metrics *compactorMetrics metrics *CompactorMetrics
logger log.Logger logger log.Logger
ranges []int64 ranges []int64
chunkPool chunkenc.Pool chunkPool chunkenc.Pool
@ -95,47 +95,47 @@ type LeveledCompactor struct {
concurrencyOpts LeveledCompactorConcurrencyOptions concurrencyOpts LeveledCompactorConcurrencyOptions
} }
type compactorMetrics struct { type CompactorMetrics struct {
ran prometheus.Counter Ran prometheus.Counter
populatingBlocks prometheus.Gauge PopulatingBlocks prometheus.Gauge
overlappingBlocks prometheus.Counter OverlappingBlocks prometheus.Counter
duration prometheus.Histogram Duration prometheus.Histogram
chunkSize prometheus.Histogram ChunkSize prometheus.Histogram
chunkSamples prometheus.Histogram ChunkSamples prometheus.Histogram
chunkRange prometheus.Histogram ChunkRange prometheus.Histogram
} }
func newCompactorMetrics(r prometheus.Registerer) *compactorMetrics { func newCompactorMetrics(r prometheus.Registerer) *CompactorMetrics {
m := &compactorMetrics{} m := &CompactorMetrics{}
m.ran = prometheus.NewCounter(prometheus.CounterOpts{ m.Ran = prometheus.NewCounter(prometheus.CounterOpts{
Name: "prometheus_tsdb_compactions_total", Name: "prometheus_tsdb_compactions_total",
Help: "Total number of compactions that were executed for the partition.", Help: "Total number of compactions that were executed for the partition.",
}) })
m.populatingBlocks = prometheus.NewGauge(prometheus.GaugeOpts{ m.PopulatingBlocks = prometheus.NewGauge(prometheus.GaugeOpts{
Name: "prometheus_tsdb_compaction_populating_block", Name: "prometheus_tsdb_compaction_populating_block",
Help: "Set to 1 when a block is currently being written to the disk.", Help: "Set to 1 when a block is currently being written to the disk.",
}) })
m.overlappingBlocks = prometheus.NewCounter(prometheus.CounterOpts{ m.OverlappingBlocks = prometheus.NewCounter(prometheus.CounterOpts{
Name: "prometheus_tsdb_vertical_compactions_total", Name: "prometheus_tsdb_vertical_compactions_total",
Help: "Total number of compactions done on overlapping blocks.", Help: "Total number of compactions done on overlapping blocks.",
}) })
m.duration = prometheus.NewHistogram(prometheus.HistogramOpts{ m.Duration = prometheus.NewHistogram(prometheus.HistogramOpts{
Name: "prometheus_tsdb_compaction_duration_seconds", Name: "prometheus_tsdb_compaction_duration_seconds",
Help: "Duration of compaction runs", Help: "Duration of compaction runs",
Buckets: prometheus.ExponentialBuckets(1, 2, 14), Buckets: prometheus.ExponentialBuckets(1, 2, 14),
}) })
m.chunkSize = prometheus.NewHistogram(prometheus.HistogramOpts{ m.ChunkSize = prometheus.NewHistogram(prometheus.HistogramOpts{
Name: "prometheus_tsdb_compaction_chunk_size_bytes", Name: "prometheus_tsdb_compaction_chunk_size_bytes",
Help: "Final size of chunks on their first compaction", Help: "Final size of chunks on their first compaction",
Buckets: prometheus.ExponentialBuckets(32, 1.5, 12), Buckets: prometheus.ExponentialBuckets(32, 1.5, 12),
}) })
m.chunkSamples = prometheus.NewHistogram(prometheus.HistogramOpts{ m.ChunkSamples = prometheus.NewHistogram(prometheus.HistogramOpts{
Name: "prometheus_tsdb_compaction_chunk_samples", Name: "prometheus_tsdb_compaction_chunk_samples",
Help: "Final number of samples on their first compaction", Help: "Final number of samples on their first compaction",
Buckets: prometheus.ExponentialBuckets(4, 1.5, 12), Buckets: prometheus.ExponentialBuckets(4, 1.5, 12),
}) })
m.chunkRange = prometheus.NewHistogram(prometheus.HistogramOpts{ m.ChunkRange = prometheus.NewHistogram(prometheus.HistogramOpts{
Name: "prometheus_tsdb_compaction_chunk_range_seconds", Name: "prometheus_tsdb_compaction_chunk_range_seconds",
Help: "Final time range of chunks on their first compaction", Help: "Final time range of chunks on their first compaction",
Buckets: prometheus.ExponentialBuckets(100, 4, 10), Buckets: prometheus.ExponentialBuckets(100, 4, 10),
@ -143,13 +143,13 @@ func newCompactorMetrics(r prometheus.Registerer) *compactorMetrics {
if r != nil { if r != nil {
r.MustRegister( r.MustRegister(
m.ran, m.Ran,
m.populatingBlocks, m.PopulatingBlocks,
m.overlappingBlocks, m.OverlappingBlocks,
m.duration, m.Duration,
m.chunkRange, m.ChunkRange,
m.chunkSamples, m.ChunkSamples,
m.chunkSize, m.ChunkSize,
) )
} }
return m return m
@ -428,13 +428,13 @@ func CompactBlockMetas(uid ulid.ULID, blocks ...*BlockMeta) *BlockMeta {
// and returns slice of block IDs. Position of returned block ID in the result slice corresponds to the shard index. // and returns slice of block IDs. Position of returned block ID in the result slice corresponds to the shard index.
// If given output block has no series, corresponding block ID will be zero ULID value. // If given output block has no series, corresponding block ID will be zero ULID value.
func (c *LeveledCompactor) CompactWithSplitting(dest string, dirs []string, open []*Block, shardCount uint64) (result []ulid.ULID, _ error) { func (c *LeveledCompactor) CompactWithSplitting(dest string, dirs []string, open []*Block, shardCount uint64) (result []ulid.ULID, _ error) {
return c.compact(dest, dirs, open, shardCount) return c.CompactWithBlockPopulator(dest, dirs, open, DefaultBlockPopulator{}, shardCount)
} }
// Compact creates a new block in the compactor's directory from the blocks in the // Compact creates a new block in the compactor's directory from the blocks in the
// provided directories. // provided directories.
func (c *LeveledCompactor) Compact(dest string, dirs []string, open []*Block) (uid ulid.ULID, err error) { func (c *LeveledCompactor) Compact(dest string, dirs []string, open []*Block) (uid ulid.ULID, err error) {
ulids, err := c.compact(dest, dirs, open, 1) ulids, err := c.CompactWithBlockPopulator(dest, dirs, open, DefaultBlockPopulator{}, 1)
if err != nil { if err != nil {
return ulid.ULID{}, err return ulid.ULID{}, err
} }
@ -453,7 +453,7 @@ type shardedBlock struct {
indexw IndexWriter indexw IndexWriter
} }
func (c *LeveledCompactor) compact(dest string, dirs []string, open []*Block, shardCount uint64) (_ []ulid.ULID, err error) { func (c *LeveledCompactor) CompactWithBlockPopulator(dest string, dirs []string, open []*Block, blockPopulator BlockPopulator, shardCount uint64) (_ []ulid.ULID, err error) {
if shardCount == 0 { if shardCount == 0 {
shardCount = 1 shardCount = 1
} }
@ -487,7 +487,7 @@ func (c *LeveledCompactor) compact(dest string, dirs []string, open []*Block, sh
outBlocks[ix] = shardedBlock{meta: CompactBlockMetas(ulid.MustNew(outBlocksTime, rand.Reader), metas...)} outBlocks[ix] = shardedBlock{meta: CompactBlockMetas(ulid.MustNew(outBlocksTime, rand.Reader), metas...)}
} }
err = c.write(dest, outBlocks, blocks...) err = c.write(dest, outBlocks, blockPopulator, blocks...)
if err == nil { if err == nil {
ulids := make([]ulid.ULID, len(outBlocks)) ulids := make([]ulid.ULID, len(outBlocks))
allOutputBlocksAreEmpty := true allOutputBlocksAreEmpty := true
@ -619,7 +619,7 @@ func (c *LeveledCompactor) compactOOO(dest string, oooHead *OOOCompactionHead, s
} }
// Block intervals are half-open: [b.MinTime, b.MaxTime). Block intervals are always +1 than the total samples it includes. // Block intervals are half-open: [b.MinTime, b.MaxTime). Block intervals are always +1 than the total samples it includes.
err := c.write(dest, outBlocks[ix], oooHead.CloneForTimeRange(mint, maxt-1)) err := c.write(dest, outBlocks[ix], DefaultBlockPopulator{}, oooHead.CloneForTimeRange(mint, maxt-1))
if err != nil { if err != nil {
// We need to delete all blocks in case there was an error. // We need to delete all blocks in case there was an error.
for _, obs := range outBlocks { for _, obs := range outBlocks {
@ -691,7 +691,7 @@ func (c *LeveledCompactor) Write(dest string, b BlockReader, mint, maxt int64, p
} }
} }
err := c.write(dest, []shardedBlock{{meta: meta}}, b) err := c.write(dest, []shardedBlock{{meta: meta}}, DefaultBlockPopulator{}, b)
if err != nil { if err != nil {
return uid, err return uid, err
} }
@ -736,7 +736,7 @@ func (w *instrumentedChunkWriter) WriteChunks(chunks ...chunks.Meta) error {
} }
// write creates new output blocks that are the union of the provided blocks into dir. // write creates new output blocks that are the union of the provided blocks into dir.
func (c *LeveledCompactor) write(dest string, outBlocks []shardedBlock, blocks ...BlockReader) (err error) { func (c *LeveledCompactor) write(dest string, outBlocks []shardedBlock, blockPopulator BlockPopulator, blocks ...BlockReader) (err error) {
var closers []io.Closer var closers []io.Closer
defer func(t time.Time) { defer func(t time.Time) {
@ -760,8 +760,8 @@ func (c *LeveledCompactor) write(dest string, outBlocks []shardedBlock, blocks .
} }
} }
} }
c.metrics.ran.Inc() c.metrics.Ran.Inc()
c.metrics.duration.Observe(time.Since(t).Seconds()) c.metrics.Duration.Observe(time.Since(t).Seconds())
}(time.Now()) }(time.Now())
for ix := range outBlocks { for ix := range outBlocks {
@ -794,9 +794,9 @@ func (c *LeveledCompactor) write(dest string, outBlocks []shardedBlock, blocks .
if outBlocks[ix].meta.Compaction.Level == 1 { if outBlocks[ix].meta.Compaction.Level == 1 {
chunkw = &instrumentedChunkWriter{ chunkw = &instrumentedChunkWriter{
ChunkWriter: chunkw, ChunkWriter: chunkw,
size: c.metrics.chunkSize, size: c.metrics.ChunkSize,
samples: c.metrics.chunkSamples, samples: c.metrics.ChunkSamples,
trange: c.metrics.chunkRange, trange: c.metrics.ChunkRange,
} }
} }
@ -814,7 +814,7 @@ func (c *LeveledCompactor) write(dest string, outBlocks []shardedBlock, blocks .
} }
// We use MinTime and MaxTime from first output block, because ALL output blocks have the same min/max times set. // We use MinTime and MaxTime from first output block, because ALL output blocks have the same min/max times set.
if err := c.populateBlock(blocks, outBlocks[0].meta.MinTime, outBlocks[0].meta.MaxTime, outBlocks); err != nil { if err := blockPopulator.PopulateBlock(c.ctx, c.metrics, c.logger, c.chunkPool, c.mergeFunc, c.concurrencyOpts, blocks, outBlocks[0].meta.MinTime, outBlocks[0].meta.MaxTime, outBlocks); err != nil {
return errors.Wrap(err, "populate block") return errors.Wrap(err, "populate block")
} }
@ -938,12 +938,18 @@ func timeFromMillis(ms int64) time.Time {
return time.Unix(0, ms*int64(time.Millisecond)) return time.Unix(0, ms*int64(time.Millisecond))
} }
// populateBlock fills the index and chunk writers of output blocks with new data gathered as the union type BlockPopulator interface {
// of the provided blocks. PopulateBlock(ctx context.Context, metrics *CompactorMetrics, logger log.Logger, chunkPool chunkenc.Pool, mergeFunc storage.VerticalChunkSeriesMergeFunc, concurrencyOpts LeveledCompactorConcurrencyOptions, blocks []BlockReader, minT, maxT int64, outBlocks []shardedBlock) error
}
type DefaultBlockPopulator struct{}
// PopulateBlock fills the index and chunk writers with new data gathered as the union
// of the provided blocks. It returns meta information for the new block.
// It expects sorted blocks input by mint. // It expects sorted blocks input by mint.
// If there is more than 1 output block, each output block will only contain series that hash into its shard // If there is more than 1 output block, each output block will only contain series that hash into its shard
// (based on total number of output blocks). // (based on total number of output blocks).
func (c *LeveledCompactor) populateBlock(blocks []BlockReader, minT, maxT int64, outBlocks []shardedBlock) (err error) { func (c DefaultBlockPopulator) PopulateBlock(ctx context.Context, metrics *CompactorMetrics, logger log.Logger, chunkPool chunkenc.Pool, mergeFunc storage.VerticalChunkSeriesMergeFunc, concurrencyOpts LeveledCompactorConcurrencyOptions, blocks []BlockReader, minT, maxT int64, outBlocks []shardedBlock) (err error) {
if len(blocks) == 0 { if len(blocks) == 0 {
return errors.New("cannot populate block(s) from no readers") return errors.New("cannot populate block(s) from no readers")
} }
@ -961,23 +967,23 @@ func (c *LeveledCompactor) populateBlock(blocks []BlockReader, minT, maxT int64,
errs.Add(errors.Wrap(cerr, "close")) errs.Add(errors.Wrap(cerr, "close"))
} }
err = errs.Err() err = errs.Err()
c.metrics.populatingBlocks.Set(0) metrics.PopulatingBlocks.Set(0)
}() }()
c.metrics.populatingBlocks.Set(1) metrics.PopulatingBlocks.Set(1)
globalMaxt := blocks[0].Meta().MaxTime globalMaxt := blocks[0].Meta().MaxTime
for i, b := range blocks { for i, b := range blocks {
select { select {
case <-c.ctx.Done(): case <-ctx.Done():
return c.ctx.Err() return ctx.Err()
default: default:
} }
if !overlapping { if !overlapping {
if i > 0 && b.Meta().MinTime < globalMaxt { if i > 0 && b.Meta().MinTime < globalMaxt {
c.metrics.overlappingBlocks.Inc() metrics.OverlappingBlocks.Inc()
overlapping = true overlapping = true
level.Info(c.logger).Log("msg", "Found overlapping blocks during compaction") level.Info(logger).Log("msg", "Found overlapping blocks during compaction")
} }
if b.Meta().MaxTime > globalMaxt { if b.Meta().MaxTime > globalMaxt {
globalMaxt = b.Meta().MaxTime globalMaxt = b.Meta().MaxTime
@ -1009,7 +1015,7 @@ func (c *LeveledCompactor) populateBlock(blocks []BlockReader, minT, maxT int64,
} }
all = indexr.SortedPostings(all) all = indexr.SortedPostings(all)
// Blocks meta is half open: [min, max), so subtract 1 to ensure we don't hold samples with exact meta.MaxTime timestamp. // Blocks meta is half open: [min, max), so subtract 1 to ensure we don't hold samples with exact meta.MaxTime timestamp.
sets = append(sets, newBlockChunkSeriesSet(b.Meta().ULID, indexr, chunkr, tombsr, all, minT, maxT-1, false)) sets = append(sets, NewBlockChunkSeriesSet(b.Meta().ULID, indexr, chunkr, tombsr, all, minT, maxT-1, false))
if len(outBlocks) > 1 { if len(outBlocks) > 1 {
// To iterate series when populating symbols, we cannot reuse postings we just got, but need to get a new copy. // To iterate series when populating symbols, we cannot reuse postings we just got, but need to get a new copy.
@ -1021,7 +1027,7 @@ func (c *LeveledCompactor) populateBlock(blocks []BlockReader, minT, maxT int64,
} }
all = indexr.SortedPostings(all) all = indexr.SortedPostings(all)
// Blocks meta is half open: [min, max), so subtract 1 to ensure we don't hold samples with exact meta.MaxTime timestamp. // Blocks meta is half open: [min, max), so subtract 1 to ensure we don't hold samples with exact meta.MaxTime timestamp.
symbolsSets = append(symbolsSets, newBlockChunkSeriesSet(b.Meta().ULID, indexr, chunkr, tombsr, all, minT, maxT-1, false)) symbolsSets = append(symbolsSets, NewBlockChunkSeriesSet(b.Meta().ULID, indexr, chunkr, tombsr, all, minT, maxT-1, false))
} else { } else {
syms := indexr.Symbols() syms := indexr.Symbols()
if i == 0 { if i == 0 {
@ -1042,17 +1048,17 @@ func (c *LeveledCompactor) populateBlock(blocks []BlockReader, minT, maxT int64,
return errors.Wrap(symbols.Err(), "next symbol") return errors.Wrap(symbols.Err(), "next symbol")
} }
} else { } else {
if err := c.populateSymbols(symbolsSets, outBlocks); err != nil { if err := populateSymbols(ctx, mergeFunc, concurrencyOpts, symbolsSets, outBlocks); err != nil {
return err return err
} }
} }
// Semaphore for number of blocks that can be closed at once. // Semaphore for number of blocks that can be closed at once.
sema := semaphore.NewWeighted(int64(c.concurrencyOpts.MaxClosingBlocks)) sema := semaphore.NewWeighted(int64(concurrencyOpts.MaxClosingBlocks))
blockWriters := make([]*asyncBlockWriter, len(outBlocks)) blockWriters := make([]*asyncBlockWriter, len(outBlocks))
for ix := range outBlocks { for ix := range outBlocks {
blockWriters[ix] = newAsyncBlockWriter(c.chunkPool, outBlocks[ix].chunkw, outBlocks[ix].indexw, sema) blockWriters[ix] = newAsyncBlockWriter(chunkPool, outBlocks[ix].chunkw, outBlocks[ix].indexw, sema)
} }
defer func() { defer func() {
// Stop all async writers. // Stop all async writers.
@ -1072,14 +1078,14 @@ func (c *LeveledCompactor) populateBlock(blocks []BlockReader, minT, maxT int64,
if len(sets) > 1 { if len(sets) > 1 {
// Merge series using specified chunk series merger. // Merge series using specified chunk series merger.
// The default one is the compacting series merger. // The default one is the compacting series merger.
set = storage.NewMergeChunkSeriesSet(sets, c.mergeFunc) set = storage.NewMergeChunkSeriesSet(sets, mergeFunc)
} }
// Iterate over all sorted chunk series. // Iterate over all sorted chunk series.
for set.Next() { for set.Next() {
select { select {
case <-c.ctx.Done(): case <-ctx.Done():
return c.ctx.Err() return ctx.Err()
default: default:
} }
s := set.At() s := set.At()
@ -1101,7 +1107,7 @@ func (c *LeveledCompactor) populateBlock(blocks []BlockReader, minT, maxT int64,
continue continue
} }
debugOutOfOrderChunks(chks, c.logger) debugOutOfOrderChunks(chks, logger)
obIx := uint64(0) obIx := uint64(0)
if len(outBlocks) > 1 { if len(outBlocks) > 1 {
@ -1139,12 +1145,12 @@ const inMemorySymbolsLimit = 1_000_000
// populateSymbols writes symbols to output blocks. We need to iterate through all series to find // populateSymbols writes symbols to output blocks. We need to iterate through all series to find
// which series belongs to what block. We collect symbols per sharded block, and then add sorted symbols to // which series belongs to what block. We collect symbols per sharded block, and then add sorted symbols to
// block's index. // block's index.
func (c *LeveledCompactor) populateSymbols(sets []storage.ChunkSeriesSet, outBlocks []shardedBlock) error { func populateSymbols(ctx context.Context, mergeFunc storage.VerticalChunkSeriesMergeFunc, concurrencyOpts LeveledCompactorConcurrencyOptions, sets []storage.ChunkSeriesSet, outBlocks []shardedBlock) error {
if len(outBlocks) == 0 { if len(outBlocks) == 0 {
return errors.New("no output block") return errors.New("no output block")
} }
flushers := newSymbolFlushers(c.concurrencyOpts.SymbolsFlushersCount) flushers := newSymbolFlushers(concurrencyOpts.SymbolsFlushersCount)
defer flushers.close() // Make sure to stop flushers before exiting to avoid leaking goroutines. defer flushers.close() // Make sure to stop flushers before exiting to avoid leaking goroutines.
batchers := make([]*symbolsBatcher, len(outBlocks)) batchers := make([]*symbolsBatcher, len(outBlocks))
@ -1161,11 +1167,11 @@ func (c *LeveledCompactor) populateSymbols(sets []storage.ChunkSeriesSet, outBlo
seriesSet := sets[0] seriesSet := sets[0]
if len(sets) > 1 { if len(sets) > 1 {
seriesSet = storage.NewMergeChunkSeriesSet(sets, c.mergeFunc) seriesSet = storage.NewMergeChunkSeriesSet(sets, mergeFunc)
} }
for seriesSet.Next() { for seriesSet.Next() {
if err := c.ctx.Err(); err != nil { if err := ctx.Err(); err != nil {
return err return err
} }
@ -1203,7 +1209,7 @@ func (c *LeveledCompactor) populateSymbols(sets []storage.ChunkSeriesSet, outBlo
} }
for ix := range outBlocks { for ix := range outBlocks {
if err := c.ctx.Err(); err != nil { if err := ctx.Err(); err != nil {
return err return err
} }

View file

@ -450,7 +450,7 @@ func TestCompactionFailWillCleanUpTempDir(t *testing.T) {
{meta: &BlockMeta{ULID: ulid.MustNew(ulid.Now(), crand.Reader)}}, {meta: &BlockMeta{ULID: ulid.MustNew(ulid.Now(), crand.Reader)}},
} }
require.Error(t, compactor.write(tmpdir, shardedBlocks, erringBReader{})) require.Error(t, compactor.write(tmpdir, shardedBlocks, DefaultBlockPopulator{}, erringBReader{}))
// We rely on the fact that blockDir and tmpDir will be updated by compactor.write. // We rely on the fact that blockDir and tmpDir will be updated by compactor.write.
for _, b := range shardedBlocks { for _, b := range shardedBlocks {
@ -1155,7 +1155,9 @@ func TestCompaction_populateBlock(t *testing.T) {
iw := &mockIndexWriter{} iw := &mockIndexWriter{}
ob := shardedBlock{meta: meta, indexw: iw, chunkw: nopChunkWriter{}} ob := shardedBlock{meta: meta, indexw: iw, chunkw: nopChunkWriter{}}
err = c.populateBlock(blocks, meta.MinTime, meta.MaxTime, []shardedBlock{ob}) blockPopulator := DefaultBlockPopulator{}
err = blockPopulator.PopulateBlock(c.ctx, c.metrics, c.logger, c.chunkPool, c.mergeFunc, c.concurrencyOpts, blocks, meta.MinTime, meta.MaxTime, []shardedBlock{ob})
if tc.expErr != nil { if tc.expErr != nil {
require.Error(t, err) require.Error(t, err)
require.Equal(t, tc.expErr.Error(), err.Error()) require.Equal(t, tc.expErr.Error(), err.Error())
@ -1176,7 +1178,7 @@ func TestCompaction_populateBlock(t *testing.T) {
s sample s sample
) )
for iter.Next() == chunkenc.ValFloat { for iter.Next() == chunkenc.ValFloat {
s.t, s.v = iter.At() s.t, s.f = iter.At()
if firstTs == math.MaxInt64 { if firstTs == math.MaxInt64 {
firstTs = s.t firstTs = s.t
} }
@ -1383,14 +1385,14 @@ func TestCancelCompactions(t *testing.T) {
db, err := open(tmpdir, log.NewNopLogger(), nil, DefaultOptions(), []int64{1, 2000}, nil) db, err := open(tmpdir, log.NewNopLogger(), nil, DefaultOptions(), []int64{1, 2000}, nil)
require.NoError(t, err) require.NoError(t, err)
require.Equal(t, 3, len(db.Blocks()), "initial block count mismatch") require.Equal(t, 3, len(db.Blocks()), "initial block count mismatch")
require.Equal(t, 0.0, prom_testutil.ToFloat64(db.compactor.(*LeveledCompactor).metrics.ran), "initial compaction counter mismatch") require.Equal(t, 0.0, prom_testutil.ToFloat64(db.compactor.(*LeveledCompactor).metrics.Ran), "initial compaction counter mismatch")
db.compactc <- struct{}{} // Trigger a compaction. db.compactc <- struct{}{} // Trigger a compaction.
for prom_testutil.ToFloat64(db.compactor.(*LeveledCompactor).metrics.populatingBlocks) <= 0 { for prom_testutil.ToFloat64(db.compactor.(*LeveledCompactor).metrics.PopulatingBlocks) <= 0 {
time.Sleep(3 * time.Millisecond) time.Sleep(3 * time.Millisecond)
} }
start := time.Now() start := time.Now()
for prom_testutil.ToFloat64(db.compactor.(*LeveledCompactor).metrics.ran) != 1 { for prom_testutil.ToFloat64(db.compactor.(*LeveledCompactor).metrics.Ran) != 1 {
time.Sleep(3 * time.Millisecond) time.Sleep(3 * time.Millisecond)
} }
timeCompactionUninterrupted = time.Since(start) timeCompactionUninterrupted = time.Since(start)
@ -1402,10 +1404,10 @@ func TestCancelCompactions(t *testing.T) {
db, err := open(tmpdirCopy, log.NewNopLogger(), nil, DefaultOptions(), []int64{1, 2000}, nil) db, err := open(tmpdirCopy, log.NewNopLogger(), nil, DefaultOptions(), []int64{1, 2000}, nil)
require.NoError(t, err) require.NoError(t, err)
require.Equal(t, 3, len(db.Blocks()), "initial block count mismatch") require.Equal(t, 3, len(db.Blocks()), "initial block count mismatch")
require.Equal(t, 0.0, prom_testutil.ToFloat64(db.compactor.(*LeveledCompactor).metrics.ran), "initial compaction counter mismatch") require.Equal(t, 0.0, prom_testutil.ToFloat64(db.compactor.(*LeveledCompactor).metrics.Ran), "initial compaction counter mismatch")
db.compactc <- struct{}{} // Trigger a compaction. db.compactc <- struct{}{} // Trigger a compaction.
for prom_testutil.ToFloat64(db.compactor.(*LeveledCompactor).metrics.populatingBlocks) <= 0 { for prom_testutil.ToFloat64(db.compactor.(*LeveledCompactor).metrics.PopulatingBlocks) <= 0 {
time.Sleep(3 * time.Millisecond) time.Sleep(3 * time.Millisecond)
} }
@ -1486,7 +1488,7 @@ func TestDeleteCompactionBlockAfterFailedReload(t *testing.T) {
require.NoError(t, os.RemoveAll(lastBlockIndex)) // Corrupt the block by removing the index file. require.NoError(t, os.RemoveAll(lastBlockIndex)) // Corrupt the block by removing the index file.
require.Equal(t, 0.0, prom_testutil.ToFloat64(db.metrics.reloadsFailed), "initial 'failed db reloadBlocks' count metrics mismatch") require.Equal(t, 0.0, prom_testutil.ToFloat64(db.metrics.reloadsFailed), "initial 'failed db reloadBlocks' count metrics mismatch")
require.Equal(t, 0.0, prom_testutil.ToFloat64(db.compactor.(*LeveledCompactor).metrics.ran), "initial `compactions` count metric mismatch") require.Equal(t, 0.0, prom_testutil.ToFloat64(db.compactor.(*LeveledCompactor).metrics.Ran), "initial `compactions` count metric mismatch")
require.Equal(t, 0.0, prom_testutil.ToFloat64(db.metrics.compactionsFailed), "initial `compactions failed` count metric mismatch") require.Equal(t, 0.0, prom_testutil.ToFloat64(db.metrics.compactionsFailed), "initial `compactions failed` count metric mismatch")
// Do the compaction and check the metrics. // Do the compaction and check the metrics.
@ -1494,7 +1496,7 @@ func TestDeleteCompactionBlockAfterFailedReload(t *testing.T) {
// the new block created from the compaction should be deleted. // the new block created from the compaction should be deleted.
require.Error(t, db.Compact()) require.Error(t, db.Compact())
require.Equal(t, 1.0, prom_testutil.ToFloat64(db.metrics.reloadsFailed), "'failed db reloadBlocks' count metrics mismatch") require.Equal(t, 1.0, prom_testutil.ToFloat64(db.metrics.reloadsFailed), "'failed db reloadBlocks' count metrics mismatch")
require.Equal(t, 1.0, prom_testutil.ToFloat64(db.compactor.(*LeveledCompactor).metrics.ran), "`compaction` count metric mismatch") require.Equal(t, 1.0, prom_testutil.ToFloat64(db.compactor.(*LeveledCompactor).metrics.Ran), "`compaction` count metric mismatch")
require.Equal(t, 1.0, prom_testutil.ToFloat64(db.metrics.compactionsFailed), "`compactions failed` count metric mismatch") require.Equal(t, 1.0, prom_testutil.ToFloat64(db.metrics.compactionsFailed), "`compactions failed` count metric mismatch")
actBlocks, err = blockDirs(db.Dir()) actBlocks, err = blockDirs(db.Dir())
@ -1666,7 +1668,7 @@ func TestHeadCompactionWithHistograms(t *testing.T) {
for tsMinute := from; tsMinute <= to; tsMinute++ { for tsMinute := from; tsMinute <= to; tsMinute++ {
_, err := app.Append(0, lbls, minute(tsMinute), float64(tsMinute)) _, err := app.Append(0, lbls, minute(tsMinute), float64(tsMinute))
require.NoError(t, err) require.NoError(t, err)
*exp = append(*exp, sample{t: minute(tsMinute), v: float64(tsMinute)}) *exp = append(*exp, sample{t: minute(tsMinute), f: float64(tsMinute)})
} }
require.NoError(t, app.Commit()) require.NoError(t, app.Commit())
} }
@ -1891,20 +1893,20 @@ func TestSparseHistogramSpaceSavings(t *testing.T) {
for it.Next() { for it.Next() {
numOldSeriesPerHistogram++ numOldSeriesPerHistogram++
b := it.At() b := it.At()
lbls := labels.NewBuilder(ah.baseLabels).Set("le", fmt.Sprintf("%.16f", b.Upper)).Labels(labels.EmptyLabels()) lbls := labels.NewBuilder(ah.baseLabels).Set("le", fmt.Sprintf("%.16f", b.Upper)).Labels()
refs[itIdx], err = oldApp.Append(refs[itIdx], lbls, ts, float64(b.Count)) refs[itIdx], err = oldApp.Append(refs[itIdx], lbls, ts, float64(b.Count))
require.NoError(t, err) require.NoError(t, err)
itIdx++ itIdx++
} }
baseName := ah.baseLabels.Get(labels.MetricName) baseName := ah.baseLabels.Get(labels.MetricName)
// _count metric. // _count metric.
countLbls := labels.NewBuilder(ah.baseLabels).Set(labels.MetricName, baseName+"_count").Labels(labels.EmptyLabels()) countLbls := labels.NewBuilder(ah.baseLabels).Set(labels.MetricName, baseName+"_count").Labels()
_, err = oldApp.Append(0, countLbls, ts, float64(h.Count)) _, err = oldApp.Append(0, countLbls, ts, float64(h.Count))
require.NoError(t, err) require.NoError(t, err)
numOldSeriesPerHistogram++ numOldSeriesPerHistogram++
// _sum metric. // _sum metric.
sumLbls := labels.NewBuilder(ah.baseLabels).Set(labels.MetricName, baseName+"_sum").Labels(labels.EmptyLabels()) sumLbls := labels.NewBuilder(ah.baseLabels).Set(labels.MetricName, baseName+"_sum").Labels()
_, err = oldApp.Append(0, sumLbls, ts, h.Sum) _, err = oldApp.Append(0, sumLbls, ts, h.Sum)
require.NoError(t, err) require.NoError(t, err)
numOldSeriesPerHistogram++ numOldSeriesPerHistogram++

View file

@ -111,7 +111,7 @@ func query(t testing.TB, q storage.Querier, matchers ...*labels.Matcher) map[str
switch typ { switch typ {
case chunkenc.ValFloat: case chunkenc.ValFloat:
ts, v := it.At() ts, v := it.At()
samples = append(samples, sample{t: ts, v: v}) samples = append(samples, sample{t: ts, f: v})
case chunkenc.ValHistogram: case chunkenc.ValHistogram:
ts, h := it.AtHistogram() ts, h := it.AtHistogram()
samples = append(samples, sample{t: ts, h: h}) samples = append(samples, sample{t: ts, h: h})
@ -240,7 +240,7 @@ func TestDataAvailableOnlyAfterCommit(t *testing.T) {
seriesSet = query(t, querier, labels.MustNewMatcher(labels.MatchEqual, "foo", "bar")) seriesSet = query(t, querier, labels.MustNewMatcher(labels.MatchEqual, "foo", "bar"))
require.Equal(t, map[string][]tsdbutil.Sample{`{foo="bar"}`: {sample{t: 0, v: 0}}}, seriesSet) require.Equal(t, map[string][]tsdbutil.Sample{`{foo="bar"}`: {sample{t: 0, f: 0}}}, seriesSet)
} }
// TestNoPanicAfterWALCorruption ensures that querying the db after a WAL corruption doesn't cause a panic. // TestNoPanicAfterWALCorruption ensures that querying the db after a WAL corruption doesn't cause a panic.
@ -258,7 +258,7 @@ func TestNoPanicAfterWALCorruption(t *testing.T) {
for i := 0; i < 121; i++ { for i := 0; i < 121; i++ {
app := db.Appender(ctx) app := db.Appender(ctx)
_, err := app.Append(0, labels.FromStrings("foo", "bar"), maxt, 0) _, err := app.Append(0, labels.FromStrings("foo", "bar"), maxt, 0)
expSamples = append(expSamples, sample{t: maxt, v: 0}) expSamples = append(expSamples, sample{t: maxt, f: 0})
require.NoError(t, err) require.NoError(t, err)
require.NoError(t, app.Commit()) require.NoError(t, app.Commit())
maxt++ maxt++
@ -371,11 +371,11 @@ func TestDBAppenderAddRef(t *testing.T) {
require.Equal(t, map[string][]tsdbutil.Sample{ require.Equal(t, map[string][]tsdbutil.Sample{
labels.FromStrings("a", "b").String(): { labels.FromStrings("a", "b").String(): {
sample{t: 123, v: 0}, sample{t: 123, f: 0},
sample{t: 124, v: 1}, sample{t: 124, f: 1},
sample{t: 125, v: 0}, sample{t: 125, f: 0},
sample{t: 133, v: 1}, sample{t: 133, f: 1},
sample{t: 143, v: 2}, sample{t: 143, f: 2},
}, },
}, res) }, res)
} }
@ -1747,7 +1747,7 @@ func expandSeriesSet(ss storage.SeriesSet) ([]labels.Labels, map[string][]sample
it = series.Iterator(it) it = series.Iterator(it)
for it.Next() == chunkenc.ValFloat { for it.Next() == chunkenc.ValFloat {
t, v := it.At() t, v := it.At()
samples = append(samples, sample{t: t, v: v}) samples = append(samples, sample{t: t, f: v})
} }
resultLabels = append(resultLabels, series.Labels()) resultLabels = append(resultLabels, series.Labels())
resultSamples[series.Labels().String()] = samples resultSamples[series.Labels().String()] = samples
@ -2062,7 +2062,7 @@ func TestNoEmptyBlocks(t *testing.T) {
require.NoError(t, err) require.NoError(t, err)
require.Equal(t, len(db.Blocks()), len(actBlocks)) require.Equal(t, len(db.Blocks()), len(actBlocks))
require.Equal(t, 0, len(actBlocks)) require.Equal(t, 0, len(actBlocks))
require.Equal(t, 0, int(prom_testutil.ToFloat64(db.compactor.(*LeveledCompactor).metrics.ran)), "no compaction should be triggered here") require.Equal(t, 0, int(prom_testutil.ToFloat64(db.compactor.(*LeveledCompactor).metrics.Ran)), "no compaction should be triggered here")
}) })
t.Run("Test no blocks after deleting all samples from head.", func(t *testing.T) { t.Run("Test no blocks after deleting all samples from head.", func(t *testing.T) {
@ -2076,7 +2076,7 @@ func TestNoEmptyBlocks(t *testing.T) {
require.NoError(t, app.Commit()) require.NoError(t, app.Commit())
require.NoError(t, db.Delete(math.MinInt64, math.MaxInt64, defaultMatcher)) require.NoError(t, db.Delete(math.MinInt64, math.MaxInt64, defaultMatcher))
require.NoError(t, db.Compact()) require.NoError(t, db.Compact())
require.Equal(t, 1, int(prom_testutil.ToFloat64(db.compactor.(*LeveledCompactor).metrics.ran)), "compaction should have been triggered here") require.Equal(t, 1, int(prom_testutil.ToFloat64(db.compactor.(*LeveledCompactor).metrics.Ran)), "compaction should have been triggered here")
actBlocks, err := blockDirs(db.Dir()) actBlocks, err := blockDirs(db.Dir())
require.NoError(t, err) require.NoError(t, err)
@ -2098,7 +2098,7 @@ func TestNoEmptyBlocks(t *testing.T) {
require.NoError(t, app.Commit()) require.NoError(t, app.Commit())
require.NoError(t, db.Compact()) require.NoError(t, db.Compact())
require.Equal(t, 2, int(prom_testutil.ToFloat64(db.compactor.(*LeveledCompactor).metrics.ran)), "compaction should have been triggered here") require.Equal(t, 2, int(prom_testutil.ToFloat64(db.compactor.(*LeveledCompactor).metrics.Ran)), "compaction should have been triggered here")
actBlocks, err = blockDirs(db.Dir()) actBlocks, err = blockDirs(db.Dir())
require.NoError(t, err) require.NoError(t, err)
require.Equal(t, len(db.Blocks()), len(actBlocks)) require.Equal(t, len(db.Blocks()), len(actBlocks))
@ -2119,7 +2119,7 @@ func TestNoEmptyBlocks(t *testing.T) {
require.NoError(t, app.Commit()) require.NoError(t, app.Commit())
require.NoError(t, db.head.Delete(math.MinInt64, math.MaxInt64, defaultMatcher)) require.NoError(t, db.head.Delete(math.MinInt64, math.MaxInt64, defaultMatcher))
require.NoError(t, db.Compact()) require.NoError(t, db.Compact())
require.Equal(t, 3, int(prom_testutil.ToFloat64(db.compactor.(*LeveledCompactor).metrics.ran)), "compaction should have been triggered here") require.Equal(t, 3, int(prom_testutil.ToFloat64(db.compactor.(*LeveledCompactor).metrics.Ran)), "compaction should have been triggered here")
require.Equal(t, oldBlocks, db.Blocks()) require.Equal(t, oldBlocks, db.Blocks())
}) })
@ -2138,7 +2138,7 @@ func TestNoEmptyBlocks(t *testing.T) {
require.Equal(t, len(blocks)+len(oldBlocks), len(db.Blocks())) // Ensure all blocks are registered. require.Equal(t, len(blocks)+len(oldBlocks), len(db.Blocks())) // Ensure all blocks are registered.
require.NoError(t, db.Delete(math.MinInt64, math.MaxInt64, defaultMatcher)) require.NoError(t, db.Delete(math.MinInt64, math.MaxInt64, defaultMatcher))
require.NoError(t, db.Compact()) require.NoError(t, db.Compact())
require.Equal(t, 5, int(prom_testutil.ToFloat64(db.compactor.(*LeveledCompactor).metrics.ran)), "compaction should have been triggered here once for each block that have tombstones") require.Equal(t, 5, int(prom_testutil.ToFloat64(db.compactor.(*LeveledCompactor).metrics.Ran)), "compaction should have been triggered here once for each block that have tombstones")
actBlocks, err := blockDirs(db.Dir()) actBlocks, err := blockDirs(db.Dir())
require.NoError(t, err) require.NoError(t, err)
@ -2624,7 +2624,7 @@ func TestDBCannotSeePartialCommits(t *testing.T) {
values := map[float64]struct{}{} values := map[float64]struct{}{}
for _, series := range seriesSet { for _, series := range seriesSet {
values[series[len(series)-1].v] = struct{}{} values[series[len(series)-1].f] = struct{}{}
} }
if len(values) != 1 { if len(values) != 1 {
inconsistencies++ inconsistencies++
@ -2700,7 +2700,7 @@ func TestDBQueryDoesntSeeAppendsAfterCreation(t *testing.T) {
_, seriesSet, ws, err = expandSeriesSet(ss) _, seriesSet, ws, err = expandSeriesSet(ss)
require.NoError(t, err) require.NoError(t, err)
require.Equal(t, 0, len(ws)) require.Equal(t, 0, len(ws))
require.Equal(t, map[string][]sample{`{foo="bar"}`: {{t: 0, v: 0}}}, seriesSet) require.Equal(t, map[string][]sample{`{foo="bar"}`: {{t: 0, f: 0}}}, seriesSet)
} }
// TestChunkWriter_ReadAfterWrite ensures that chunk segment are cut at the set segment size and // TestChunkWriter_ReadAfterWrite ensures that chunk segment are cut at the set segment size and
@ -4458,6 +4458,115 @@ func TestOOOCompactionWithDisabledWriteLog(t *testing.T) {
verifySamples(db.Blocks()[1], 250, 350) verifySamples(db.Blocks()[1], 250, 350)
} }
// TestOOOQueryAfterRestartWithSnapshotAndRemovedWBL tests the scenario where the WBL goes
// missing after a restart while snapshot was enabled, but the query still returns the right
// data from the mmap chunks.
func TestOOOQueryAfterRestartWithSnapshotAndRemovedWBL(t *testing.T) {
dir := t.TempDir()
opts := DefaultOptions()
opts.OutOfOrderCapMax = 10
opts.OutOfOrderTimeWindow = 300 * time.Minute.Milliseconds()
opts.EnableMemorySnapshotOnShutdown = true
db, err := Open(dir, nil, nil, opts, nil)
require.NoError(t, err)
db.DisableCompactions() // We want to manually call it.
t.Cleanup(func() {
require.NoError(t, db.Close())
})
series1 := labels.FromStrings("foo", "bar1")
series2 := labels.FromStrings("foo", "bar2")
addSamples := func(fromMins, toMins int64) {
app := db.Appender(context.Background())
for min := fromMins; min <= toMins; min++ {
ts := min * time.Minute.Milliseconds()
_, err := app.Append(0, series1, ts, float64(ts))
require.NoError(t, err)
_, err = app.Append(0, series2, ts, float64(2*ts))
require.NoError(t, err)
}
require.NoError(t, app.Commit())
}
// Add an in-order samples.
addSamples(250, 350)
// Add ooo samples that will result into a single block.
addSamples(90, 110) // The sample 110 will not be in m-map chunks.
// Checking that there are some ooo m-map chunks.
for _, lbls := range []labels.Labels{series1, series2} {
ms, created, err := db.head.getOrCreate(lbls.Hash(), lbls)
require.NoError(t, err)
require.False(t, created)
require.Equal(t, 2, len(ms.ooo.oooMmappedChunks))
require.NotNil(t, ms.ooo.oooHeadChunk)
}
// Restart DB.
require.NoError(t, db.Close())
// For some reason wbl goes missing.
require.NoError(t, os.RemoveAll(path.Join(dir, "wbl")))
db, err = Open(dir, nil, nil, opts, nil)
require.NoError(t, err)
db.DisableCompactions() // We want to manually call it.
// Check ooo m-map chunks again.
for _, lbls := range []labels.Labels{series1, series2} {
ms, created, err := db.head.getOrCreate(lbls.Hash(), lbls)
require.NoError(t, err)
require.False(t, created)
require.Equal(t, 2, len(ms.ooo.oooMmappedChunks))
require.Equal(t, 109*time.Minute.Milliseconds(), ms.ooo.oooMmappedChunks[1].maxTime)
require.Nil(t, ms.ooo.oooHeadChunk) // Because of missing wbl.
}
verifySamples := func(fromMins, toMins int64) {
series1Samples := make([]tsdbutil.Sample, 0, toMins-fromMins+1)
series2Samples := make([]tsdbutil.Sample, 0, toMins-fromMins+1)
for min := fromMins; min <= toMins; min++ {
ts := min * time.Minute.Milliseconds()
series1Samples = append(series1Samples, sample{ts, float64(ts), nil, nil})
series2Samples = append(series2Samples, sample{ts, float64(2 * ts), nil, nil})
}
expRes := map[string][]tsdbutil.Sample{
series1.String(): series1Samples,
series2.String(): series2Samples,
}
q, err := db.Querier(context.Background(), fromMins*time.Minute.Milliseconds(), toMins*time.Minute.Milliseconds())
require.NoError(t, err)
actRes := query(t, q, labels.MustNewMatcher(labels.MatchRegexp, "foo", "bar.*"))
require.Equal(t, expRes, actRes)
}
// Checking for expected ooo data from mmap chunks.
verifySamples(90, 109)
// Compaction should also work fine.
require.Equal(t, len(db.Blocks()), 0)
require.NoError(t, db.CompactOOOHead())
require.Equal(t, len(db.Blocks()), 1) // One block from OOO data.
require.Equal(t, int64(0), db.Blocks()[0].MinTime())
require.Equal(t, 120*time.Minute.Milliseconds(), db.Blocks()[0].MaxTime())
// Checking that ooo chunk is empty in Head.
for _, lbls := range []labels.Labels{series1, series2} {
ms, created, err := db.head.getOrCreate(lbls.Hash(), lbls)
require.NoError(t, err)
require.False(t, created)
require.Nil(t, ms.ooo)
}
verifySamples(90, 109)
}
func Test_Querier_OOOQuery(t *testing.T) { func Test_Querier_OOOQuery(t *testing.T) {
opts := DefaultOptions() opts := DefaultOptions()
opts.OutOfOrderCapMax = 30 opts.OutOfOrderCapMax = 30
@ -4473,7 +4582,7 @@ func Test_Querier_OOOQuery(t *testing.T) {
for min := fromMins; min <= toMins; min += time.Minute.Milliseconds() { for min := fromMins; min <= toMins; min += time.Minute.Milliseconds() {
_, err := app.Append(0, series1, min, float64(min)) _, err := app.Append(0, series1, min, float64(min))
if min >= queryMinT && min <= queryMaxT { if min >= queryMinT && min <= queryMaxT {
expSamples = append(expSamples, sample{t: min, v: float64(min)}) expSamples = append(expSamples, sample{t: min, f: float64(min)})
} }
require.NoError(t, err) require.NoError(t, err)
totalAppended++ totalAppended++
@ -4558,7 +4667,7 @@ func Test_ChunkQuerier_OOOQuery(t *testing.T) {
for min := fromMins; min <= toMins; min += time.Minute.Milliseconds() { for min := fromMins; min <= toMins; min += time.Minute.Milliseconds() {
_, err := app.Append(0, series1, min, float64(min)) _, err := app.Append(0, series1, min, float64(min))
if min >= queryMinT && min <= queryMaxT { if min >= queryMinT && min <= queryMaxT {
expSamples = append(expSamples, sample{t: min, v: float64(min)}) expSamples = append(expSamples, sample{t: min, f: float64(min)})
} }
require.NoError(t, err) require.NoError(t, err)
totalAppended++ totalAppended++
@ -4628,7 +4737,7 @@ func Test_ChunkQuerier_OOOQuery(t *testing.T) {
it := chunk.Chunk.Iterator(nil) it := chunk.Chunk.Iterator(nil)
for it.Next() == chunkenc.ValFloat { for it.Next() == chunkenc.ValFloat {
ts, v := it.At() ts, v := it.At()
gotSamples = append(gotSamples, sample{t: ts, v: v}) gotSamples = append(gotSamples, sample{t: ts, f: v})
} }
} }
require.Equal(t, expSamples, gotSamples) require.Equal(t, expSamples, gotSamples)
@ -4664,7 +4773,7 @@ func TestOOOAppendAndQuery(t *testing.T) {
require.Error(t, err) require.Error(t, err)
} else { } else {
require.NoError(t, err) require.NoError(t, err)
appendedSamples[key] = append(appendedSamples[key], sample{t: min, v: val}) appendedSamples[key] = append(appendedSamples[key], sample{t: min, f: val})
totalSamples++ totalSamples++
} }
} }
@ -4787,7 +4896,7 @@ func TestOOODisabled(t *testing.T) {
failedSamples++ failedSamples++
} else { } else {
require.NoError(t, err) require.NoError(t, err)
expSamples[key] = append(expSamples[key], sample{t: min, v: val}) expSamples[key] = append(expSamples[key], sample{t: min, f: val})
totalSamples++ totalSamples++
} }
} }
@ -4850,7 +4959,7 @@ func TestWBLAndMmapReplay(t *testing.T) {
val := rand.Float64() val := rand.Float64()
_, err := app.Append(0, lbls, min, val) _, err := app.Append(0, lbls, min, val)
require.NoError(t, err) require.NoError(t, err)
expSamples[key] = append(expSamples[key], sample{t: min, v: val}) expSamples[key] = append(expSamples[key], sample{t: min, f: val})
totalSamples++ totalSamples++
} }
require.NoError(t, app.Commit()) require.NoError(t, app.Commit())
@ -4893,7 +5002,7 @@ func TestWBLAndMmapReplay(t *testing.T) {
it := chk.Iterator(nil) it := chk.Iterator(nil)
for it.Next() == chunkenc.ValFloat { for it.Next() == chunkenc.ValFloat {
ts, val := it.At() ts, val := it.At()
s1MmapSamples = append(s1MmapSamples, sample{t: ts, v: val}) s1MmapSamples = append(s1MmapSamples, sample{t: ts, f: val})
} }
} }
require.Greater(t, len(s1MmapSamples), 0) require.Greater(t, len(s1MmapSamples), 0)
@ -5171,9 +5280,9 @@ func TestWBLCorruption(t *testing.T) {
ts := min * time.Minute.Milliseconds() ts := min * time.Minute.Milliseconds()
_, err := app.Append(0, series1, ts, float64(ts)) _, err := app.Append(0, series1, ts, float64(ts))
require.NoError(t, err) require.NoError(t, err)
allSamples = append(allSamples, sample{t: ts, v: float64(ts)}) allSamples = append(allSamples, sample{t: ts, f: float64(ts)})
if afterRestart { if afterRestart {
expAfterRestart = append(expAfterRestart, sample{t: ts, v: float64(ts)}) expAfterRestart = append(expAfterRestart, sample{t: ts, f: float64(ts)})
} }
} }
require.NoError(t, app.Commit()) require.NoError(t, app.Commit())
@ -5317,9 +5426,9 @@ func TestOOOMmapCorruption(t *testing.T) {
ts := min * time.Minute.Milliseconds() ts := min * time.Minute.Milliseconds()
_, err := app.Append(0, series1, ts, float64(ts)) _, err := app.Append(0, series1, ts, float64(ts))
require.NoError(t, err) require.NoError(t, err)
allSamples = append(allSamples, sample{t: ts, v: float64(ts)}) allSamples = append(allSamples, sample{t: ts, f: float64(ts)})
if inMmapAfterCorruption { if inMmapAfterCorruption {
expInMmapChunks = append(expInMmapChunks, sample{t: ts, v: float64(ts)}) expInMmapChunks = append(expInMmapChunks, sample{t: ts, f: float64(ts)})
} }
} }
require.NoError(t, app.Commit()) require.NoError(t, app.Commit())
@ -5453,7 +5562,7 @@ func TestOutOfOrderRuntimeConfig(t *testing.T) {
_, err := app.Append(0, series1, ts, float64(ts)) _, err := app.Append(0, series1, ts, float64(ts))
if success { if success {
require.NoError(t, err) require.NoError(t, err)
allSamples = append(allSamples, sample{t: ts, v: float64(ts)}) allSamples = append(allSamples, sample{t: ts, f: float64(ts)})
} else { } else {
require.Error(t, err) require.Error(t, err)
} }
@ -5667,7 +5776,7 @@ func TestNoGapAfterRestartWithOOO(t *testing.T) {
var expSamples []tsdbutil.Sample var expSamples []tsdbutil.Sample
for min := fromMins; min <= toMins; min++ { for min := fromMins; min <= toMins; min++ {
ts := min * time.Minute.Milliseconds() ts := min * time.Minute.Milliseconds()
expSamples = append(expSamples, sample{t: ts, v: float64(ts)}) expSamples = append(expSamples, sample{t: ts, f: float64(ts)})
} }
expRes := map[string][]tsdbutil.Sample{ expRes := map[string][]tsdbutil.Sample{
@ -5774,7 +5883,7 @@ func TestWblReplayAfterOOODisableAndRestart(t *testing.T) {
ts := min * time.Minute.Milliseconds() ts := min * time.Minute.Milliseconds()
_, err := app.Append(0, series1, ts, float64(ts)) _, err := app.Append(0, series1, ts, float64(ts))
require.NoError(t, err) require.NoError(t, err)
allSamples = append(allSamples, sample{t: ts, v: float64(ts)}) allSamples = append(allSamples, sample{t: ts, f: float64(ts)})
} }
require.NoError(t, app.Commit()) require.NoError(t, app.Commit())
} }
@ -5833,7 +5942,7 @@ func TestPanicOnApplyConfig(t *testing.T) {
ts := min * time.Minute.Milliseconds() ts := min * time.Minute.Milliseconds()
_, err := app.Append(0, series1, ts, float64(ts)) _, err := app.Append(0, series1, ts, float64(ts))
require.NoError(t, err) require.NoError(t, err)
allSamples = append(allSamples, sample{t: ts, v: float64(ts)}) allSamples = append(allSamples, sample{t: ts, f: float64(ts)})
} }
require.NoError(t, app.Commit()) require.NoError(t, app.Commit())
} }
@ -5881,7 +5990,7 @@ func TestDiskFillingUpAfterDisablingOOO(t *testing.T) {
ts := min * time.Minute.Milliseconds() ts := min * time.Minute.Milliseconds()
_, err := app.Append(0, series1, ts, float64(ts)) _, err := app.Append(0, series1, ts, float64(ts))
require.NoError(t, err) require.NoError(t, err)
allSamples = append(allSamples, sample{t: ts, v: float64(ts)}) allSamples = append(allSamples, sample{t: ts, f: float64(ts)})
} }
require.NoError(t, app.Commit()) require.NoError(t, app.Commit())
} }
@ -5988,7 +6097,7 @@ func testHistogramAppendAndQueryHelper(t *testing.T, floatHistogram bool) {
_, err := app.Append(0, lbls, minute(tsMinute), val) _, err := app.Append(0, lbls, minute(tsMinute), val)
require.NoError(t, err) require.NoError(t, err)
require.NoError(t, app.Commit()) require.NoError(t, app.Commit())
*exp = append(*exp, sample{t: minute(tsMinute), v: val}) *exp = append(*exp, sample{t: minute(tsMinute), f: val})
} }
testQuery := func(name, value string, exp map[string][]tsdbutil.Sample) { testQuery := func(name, value string, exp map[string][]tsdbutil.Sample) {
@ -6244,7 +6353,7 @@ func TestQueryHistogramFromBlocksWithCompaction(t *testing.T) {
switch typ { switch typ {
case chunkenc.ValFloat: case chunkenc.ValFloat:
ts, v := it.At() ts, v := it.At()
slice = append(slice, sample{t: ts, v: v}) slice = append(slice, sample{t: ts, f: v})
case chunkenc.ValHistogram: case chunkenc.ValHistogram:
ts, h := it.AtHistogram() ts, h := it.AtHistogram()
slice = append(slice, sample{t: ts, h: h}) slice = append(slice, sample{t: ts, h: h})
@ -6316,7 +6425,7 @@ func TestQueryHistogramFromBlocksWithCompaction(t *testing.T) {
testBlockQuerying(t, testBlockQuerying(t,
genHistogramSeries(10, 5, minute(0), minute(119), minute(1), floatHistogram), genHistogramSeries(10, 5, minute(0), minute(119), minute(1), floatHistogram),
genSeriesFromSampleGenerator(10, 5, minute(120), minute(239), minute(1), func(ts int64) tsdbutil.Sample { genSeriesFromSampleGenerator(10, 5, minute(120), minute(239), minute(1), func(ts int64) tsdbutil.Sample {
return sample{t: ts, v: rand.Float64()} return sample{t: ts, f: rand.Float64()}
}), }),
genHistogramSeries(10, 5, minute(240), minute(359), minute(1), floatHistogram), genHistogramSeries(10, 5, minute(240), minute(359), minute(1), floatHistogram),
) )
@ -6328,7 +6437,7 @@ func TestQueryHistogramFromBlocksWithCompaction(t *testing.T) {
genHistogramSeries(10, 5, minute(61), minute(120), minute(1), floatHistogram), genHistogramSeries(10, 5, minute(61), minute(120), minute(1), floatHistogram),
genHistogramAndFloatSeries(10, 5, minute(121), minute(180), minute(1), floatHistogram), genHistogramAndFloatSeries(10, 5, minute(121), minute(180), minute(1), floatHistogram),
genSeriesFromSampleGenerator(10, 5, minute(181), minute(240), minute(1), func(ts int64) tsdbutil.Sample { genSeriesFromSampleGenerator(10, 5, minute(181), minute(240), minute(1), func(ts int64) tsdbutil.Sample {
return sample{t: ts, v: rand.Float64()} return sample{t: ts, f: rand.Float64()}
}), }),
) )
}) })
@ -6345,7 +6454,7 @@ func TestQueryHistogramFromBlocksWithCompaction(t *testing.T) {
testBlockQuerying(t, testBlockQuerying(t,
genHistogramSeries(10, 5, minute(0), minute(120), minute(3), floatHistogram), genHistogramSeries(10, 5, minute(0), minute(120), minute(3), floatHistogram),
genSeriesFromSampleGenerator(10, 5, minute(1), minute(120), minute(3), func(ts int64) tsdbutil.Sample { genSeriesFromSampleGenerator(10, 5, minute(1), minute(120), minute(3), func(ts int64) tsdbutil.Sample {
return sample{t: ts, v: rand.Float64()} return sample{t: ts, f: rand.Float64()}
}), }),
genHistogramSeries(10, 5, minute(2), minute(120), minute(3), floatHistogram), genHistogramSeries(10, 5, minute(2), minute(120), minute(3), floatHistogram),
) )
@ -6357,7 +6466,7 @@ func TestQueryHistogramFromBlocksWithCompaction(t *testing.T) {
genHistogramSeries(10, 5, minute(46), minute(100), minute(3), floatHistogram), genHistogramSeries(10, 5, minute(46), minute(100), minute(3), floatHistogram),
genHistogramAndFloatSeries(10, 5, minute(89), minute(140), minute(3), floatHistogram), genHistogramAndFloatSeries(10, 5, minute(89), minute(140), minute(3), floatHistogram),
genSeriesFromSampleGenerator(10, 5, minute(126), minute(200), minute(3), func(ts int64) tsdbutil.Sample { genSeriesFromSampleGenerator(10, 5, minute(126), minute(200), minute(3), func(ts int64) tsdbutil.Sample {
return sample{t: ts, v: rand.Float64()} return sample{t: ts, f: rand.Float64()}
}), }),
) )
}) })

View file

@ -618,6 +618,7 @@ func (h *Head) Init(minValidTime int64) error {
snapIdx, snapOffset := -1, 0 snapIdx, snapOffset := -1, 0
refSeries := make(map[chunks.HeadSeriesRef]*memSeries) refSeries := make(map[chunks.HeadSeriesRef]*memSeries)
snapshotLoaded := false
if h.opts.EnableMemorySnapshotOnShutdown { if h.opts.EnableMemorySnapshotOnShutdown {
level.Info(h.logger).Log("msg", "Chunk snapshot is enabled, replaying from the snapshot") level.Info(h.logger).Log("msg", "Chunk snapshot is enabled, replaying from the snapshot")
// If there are any WAL files, there should be at least one WAL file with an index that is current or newer // If there are any WAL files, there should be at least one WAL file with an index that is current or newer
@ -647,6 +648,7 @@ func (h *Head) Init(minValidTime int64) error {
var err error var err error
snapIdx, snapOffset, refSeries, err = h.loadChunkSnapshot() snapIdx, snapOffset, refSeries, err = h.loadChunkSnapshot()
if err == nil { if err == nil {
snapshotLoaded = true
level.Info(h.logger).Log("msg", "Chunk snapshot loading time", "duration", time.Since(start).String()) level.Info(h.logger).Log("msg", "Chunk snapshot loading time", "duration", time.Since(start).String())
} }
if err != nil { if err != nil {
@ -664,26 +666,36 @@ func (h *Head) Init(minValidTime int64) error {
} }
mmapChunkReplayStart := time.Now() mmapChunkReplayStart := time.Now()
mmappedChunks, oooMmappedChunks, lastMmapRef, err := h.loadMmappedChunks(refSeries) var (
if err != nil { mmappedChunks map[chunks.HeadSeriesRef][]*mmappedChunk
// TODO(codesome): clear out all m-map chunks here for refSeries. oooMmappedChunks map[chunks.HeadSeriesRef][]*mmappedChunk
level.Error(h.logger).Log("msg", "Loading on-disk chunks failed", "err", err) lastMmapRef chunks.ChunkDiskMapperRef
if _, ok := errors.Cause(err).(*chunks.CorruptionErr); ok { err error
h.metrics.mmapChunkCorruptionTotal.Inc() )
} if snapshotLoaded || h.wal != nil {
// If snapshot was not loaded and if there is no WAL, then m-map chunks will be discarded
// Discard snapshot data since we need to replay the WAL for the missed m-map chunks data. // anyway. So we only load m-map chunks when it won't be discarded.
snapIdx, snapOffset = -1, 0 mmappedChunks, oooMmappedChunks, lastMmapRef, err = h.loadMmappedChunks(refSeries)
// If this fails, data will be recovered from WAL.
// Hence we wont lose any data (given WAL is not corrupt).
mmappedChunks, oooMmappedChunks, lastMmapRef, err = h.removeCorruptedMmappedChunks(err)
if err != nil { if err != nil {
return err // TODO(codesome): clear out all m-map chunks here for refSeries.
level.Error(h.logger).Log("msg", "Loading on-disk chunks failed", "err", err)
if _, ok := errors.Cause(err).(*chunks.CorruptionErr); ok {
h.metrics.mmapChunkCorruptionTotal.Inc()
}
// Discard snapshot data since we need to replay the WAL for the missed m-map chunks data.
snapIdx, snapOffset = -1, 0
// If this fails, data will be recovered from WAL.
// Hence we wont lose any data (given WAL is not corrupt).
mmappedChunks, oooMmappedChunks, lastMmapRef, err = h.removeCorruptedMmappedChunks(err)
if err != nil {
return err
}
} }
level.Info(h.logger).Log("msg", "On-disk memory mappable chunks replay completed", "duration", time.Since(mmapChunkReplayStart).String())
} }
level.Info(h.logger).Log("msg", "On-disk memory mappable chunks replay completed", "duration", time.Since(mmapChunkReplayStart).String())
if h.wal == nil { if h.wal == nil {
level.Info(h.logger).Log("msg", "WAL not found") level.Info(h.logger).Log("msg", "WAL not found")
return nil return nil
@ -851,6 +863,7 @@ func (h *Head) loadMmappedChunks(refSeries map[chunks.HeadSeriesRef]*memSeries)
numSamples: numSamples, numSamples: numSamples,
}) })
h.updateMinOOOMaxOOOTime(mint, maxt)
return nil return nil
} }
@ -1878,7 +1891,7 @@ func (s *stripeSeries) getOrSet(hash uint64, lset labels.Labels, createSeries fu
type sample struct { type sample struct {
t int64 t int64
v float64 f float64
h *histogram.Histogram h *histogram.Histogram
fh *histogram.FloatHistogram fh *histogram.FloatHistogram
} }
@ -1888,7 +1901,7 @@ func newSample(t int64, v float64, h *histogram.Histogram, fh *histogram.FloatHi
} }
func (s sample) T() int64 { return s.t } func (s sample) T() int64 { return s.t }
func (s sample) V() float64 { return s.v } func (s sample) F() float64 { return s.f }
func (s sample) H() *histogram.Histogram { return s.h } func (s sample) H() *histogram.Histogram { return s.h }
func (s sample) FH() *histogram.FloatHistogram { return s.fh } func (s sample) FH() *histogram.FloatHistogram { return s.fh }

View file

@ -437,7 +437,7 @@ func (s *memSeries) appendable(t int64, v float64, headMaxt, minValidTime, oooTi
return false, headMaxt - t, storage.ErrOutOfOrderSample return false, headMaxt - t, storage.ErrOutOfOrderSample
} }
// appendableHistogram checks whether the given sample is valid for appending to the series. // appendableHistogram checks whether the given histogram is valid for appending to the series.
func (s *memSeries) appendableHistogram(t int64, h *histogram.Histogram) error { func (s *memSeries) appendableHistogram(t int64, h *histogram.Histogram) error {
c := s.head() c := s.head()
if c == nil { if c == nil {
@ -459,7 +459,7 @@ func (s *memSeries) appendableHistogram(t int64, h *histogram.Histogram) error {
return nil return nil
} }
// appendableFloatHistogram checks whether the given sample is valid for appending to the series. // appendableFloatHistogram checks whether the given float histogram is valid for appending to the series.
func (s *memSeries) appendableFloatHistogram(t int64, fh *histogram.FloatHistogram) error { func (s *memSeries) appendableFloatHistogram(t int64, fh *histogram.FloatHistogram) error {
c := s.head() c := s.head()
if c == nil { if c == nil {

View file

@ -466,8 +466,8 @@ func TestHead_HighConcurrencyReadAndWrite(t *testing.T) {
if sample.T() != int64(expectedValue) { if sample.T() != int64(expectedValue) {
return false, fmt.Errorf("expected sample %d to have ts %d, got %d", sampleIdx, expectedValue, sample.T()) return false, fmt.Errorf("expected sample %d to have ts %d, got %d", sampleIdx, expectedValue, sample.T())
} }
if sample.V() != float64(expectedValue) { if sample.F() != float64(expectedValue) {
return false, fmt.Errorf("expected sample %d to have value %d, got %f", sampleIdx, expectedValue, sample.V()) return false, fmt.Errorf("expected sample %d to have value %d, got %f", sampleIdx, expectedValue, sample.F())
} }
} }
@ -576,7 +576,7 @@ func TestHead_ReadWAL(t *testing.T) {
expandChunk := func(c chunkenc.Iterator) (x []sample) { expandChunk := func(c chunkenc.Iterator) (x []sample) {
for c.Next() == chunkenc.ValFloat { for c.Next() == chunkenc.ValFloat {
t, v := c.At() t, v := c.At()
x = append(x, sample{t: t, v: v}) x = append(x, sample{t: t, f: v})
} }
require.NoError(t, c.Err()) require.NoError(t, c.Err())
return x return x
@ -872,7 +872,7 @@ func TestHeadDeleteSimple(t *testing.T) {
buildSmpls := func(s []int64) []sample { buildSmpls := func(s []int64) []sample {
ss := make([]sample, 0, len(s)) ss := make([]sample, 0, len(s))
for _, t := range s { for _, t := range s {
ss = append(ss, sample{t: t, v: float64(t)}) ss = append(ss, sample{t: t, f: float64(t)})
} }
return ss return ss
} }
@ -927,7 +927,7 @@ func TestHeadDeleteSimple(t *testing.T) {
app := head.Appender(context.Background()) app := head.Appender(context.Background())
for _, smpl := range smplsAll { for _, smpl := range smplsAll {
_, err := app.Append(0, lblsDefault, smpl.t, smpl.v) _, err := app.Append(0, lblsDefault, smpl.t, smpl.f)
require.NoError(t, err) require.NoError(t, err)
} }
@ -941,7 +941,7 @@ func TestHeadDeleteSimple(t *testing.T) {
// Add more samples. // Add more samples.
app = head.Appender(context.Background()) app = head.Appender(context.Background())
for _, smpl := range c.addSamples { for _, smpl := range c.addSamples {
_, err := app.Append(0, lblsDefault, smpl.t, smpl.v) _, err := app.Append(0, lblsDefault, smpl.t, smpl.f)
require.NoError(t, err) require.NoError(t, err)
} }
@ -1929,7 +1929,7 @@ func TestMemSeriesIsolation(t *testing.T) {
require.Equal(t, 0, len(ws)) require.Equal(t, 0, len(ws))
for _, series := range seriesSet { for _, series := range seriesSet {
return int(series[len(series)-1].v) return int(series[len(series)-1].f)
} }
return -1 return -1
} }
@ -3155,7 +3155,7 @@ func TestHistogramInWALAndMmapChunk(t *testing.T) {
ts++ ts++
_, err := app.Append(0, s2, int64(ts), float64(ts)) _, err := app.Append(0, s2, int64(ts), float64(ts))
require.NoError(t, err) require.NoError(t, err)
exp[k2] = append(exp[k2], sample{t: int64(ts), v: float64(ts)}) exp[k2] = append(exp[k2], sample{t: int64(ts), f: float64(ts)})
} }
require.NoError(t, app.Commit()) require.NoError(t, app.Commit())
app = head.Appender(context.Background()) app = head.Appender(context.Background())
@ -3192,7 +3192,7 @@ func TestHistogramInWALAndMmapChunk(t *testing.T) {
ts++ ts++
_, err := app.Append(0, s2, int64(ts), float64(ts)) _, err := app.Append(0, s2, int64(ts), float64(ts))
require.NoError(t, err) require.NoError(t, err)
exp[k2] = append(exp[k2], sample{t: int64(ts), v: float64(ts)}) exp[k2] = append(exp[k2], sample{t: int64(ts), f: float64(ts)})
} }
require.NoError(t, app.Commit()) require.NoError(t, app.Commit())
app = head.Appender(context.Background()) app = head.Appender(context.Background())
@ -3879,7 +3879,7 @@ func TestAppendingDifferentEncodingToSameSeries(t *testing.T) {
expChunks: 1, expChunks: 1,
}, },
{ {
samples: []tsdbutil.Sample{sample{t: 200, v: 2}}, samples: []tsdbutil.Sample{sample{t: 200, f: 2}},
expChunks: 2, expChunks: 2,
}, },
{ {
@ -3903,7 +3903,7 @@ func TestAppendingDifferentEncodingToSameSeries(t *testing.T) {
expChunks: 6, expChunks: 6,
}, },
{ {
samples: []tsdbutil.Sample{sample{t: 100, v: 2}}, samples: []tsdbutil.Sample{sample{t: 100, f: 2}},
err: storage.ErrOutOfOrderSample, err: storage.ErrOutOfOrderSample,
}, },
{ {
@ -3914,13 +3914,13 @@ func TestAppendingDifferentEncodingToSameSeries(t *testing.T) {
// Combination of histograms and float64 in the same commit. The behaviour is undefined, but we want to also // Combination of histograms and float64 in the same commit. The behaviour is undefined, but we want to also
// verify how TSDB would behave. Here the histogram is appended at the end, hence will be considered as out of order. // verify how TSDB would behave. Here the histogram is appended at the end, hence will be considered as out of order.
samples: []tsdbutil.Sample{ samples: []tsdbutil.Sample{
sample{t: 400, v: 4}, sample{t: 400, f: 4},
sample{t: 500, h: hists[5]}, // This won't be committed. sample{t: 500, h: hists[5]}, // This won't be committed.
sample{t: 600, v: 6}, sample{t: 600, f: 6},
}, },
addToExp: []tsdbutil.Sample{ addToExp: []tsdbutil.Sample{
sample{t: 400, v: 4}, sample{t: 400, f: 4},
sample{t: 600, v: 6}, sample{t: 600, f: 6},
}, },
expChunks: 7, // Only 1 new chunk for float64. expChunks: 7, // Only 1 new chunk for float64.
}, },
@ -3928,11 +3928,11 @@ func TestAppendingDifferentEncodingToSameSeries(t *testing.T) {
// Here the histogram is appended at the end, hence the first histogram is out of order. // Here the histogram is appended at the end, hence the first histogram is out of order.
samples: []tsdbutil.Sample{ samples: []tsdbutil.Sample{
sample{t: 700, h: hists[7]}, // Out of order w.r.t. the next float64 sample that is appended first. sample{t: 700, h: hists[7]}, // Out of order w.r.t. the next float64 sample that is appended first.
sample{t: 800, v: 8}, sample{t: 800, f: 8},
sample{t: 900, h: hists[9]}, sample{t: 900, h: hists[9]},
}, },
addToExp: []tsdbutil.Sample{ addToExp: []tsdbutil.Sample{
sample{t: 800, v: 8}, sample{t: 800, f: 8},
sample{t: 900, h: hists[9].Copy()}, sample{t: 900, h: hists[9].Copy()},
}, },
expChunks: 8, // float64 added to old chunk, only 1 new for histograms. expChunks: 8, // float64 added to old chunk, only 1 new for histograms.
@ -3957,7 +3957,7 @@ func TestAppendingDifferentEncodingToSameSeries(t *testing.T) {
if s.H() != nil || s.FH() != nil { if s.H() != nil || s.FH() != nil {
_, err = app.AppendHistogram(0, lbls, s.T(), s.H(), s.FH()) _, err = app.AppendHistogram(0, lbls, s.T(), s.H(), s.FH())
} else { } else {
_, err = app.Append(0, lbls, s.T(), s.V()) _, err = app.Append(0, lbls, s.T(), s.F())
} }
require.Equal(t, a.err, err) require.Equal(t, a.err, err)
} }
@ -4123,7 +4123,7 @@ func TestOOOWalReplay(t *testing.T) {
require.NoError(t, app.Commit()) require.NoError(t, app.Commit())
if isOOO { if isOOO {
expOOOSamples = append(expOOOSamples, sample{t: ts, v: v}) expOOOSamples = append(expOOOSamples, sample{t: ts, f: v})
} }
} }
@ -4167,7 +4167,7 @@ func TestOOOWalReplay(t *testing.T) {
actOOOSamples := make([]sample, 0, len(expOOOSamples)) actOOOSamples := make([]sample, 0, len(expOOOSamples))
for it.Next() == chunkenc.ValFloat { for it.Next() == chunkenc.ValFloat {
ts, v := it.At() ts, v := it.At()
actOOOSamples = append(actOOOSamples, sample{t: ts, v: v}) actOOOSamples = append(actOOOSamples, sample{t: ts, f: v})
} }
// OOO chunk will be sorted. Hence sort the expected samples. // OOO chunk will be sorted. Hence sort the expected samples.
@ -4427,7 +4427,7 @@ func TestReplayAfterMmapReplayError(t *testing.T) {
var ref storage.SeriesRef var ref storage.SeriesRef
for i := 0; i < numSamples; i++ { for i := 0; i < numSamples; i++ {
ref, err = app.Append(ref, lbls, lastTs, float64(lastTs)) ref, err = app.Append(ref, lbls, lastTs, float64(lastTs))
expSamples = append(expSamples, sample{t: lastTs, v: float64(lastTs)}) expSamples = append(expSamples, sample{t: lastTs, f: float64(lastTs)})
require.NoError(t, err) require.NoError(t, err)
lastTs += itvl lastTs += itvl
if i%10 == 0 { if i%10 == 0 {

View file

@ -78,7 +78,7 @@ func (o *OOOChunk) ToXOR() (*chunkenc.XORChunk, error) {
return nil, err return nil, err
} }
for _, s := range o.samples { for _, s := range o.samples {
app.Append(s.t, s.v) app.Append(s.t, s.f)
} }
return x, nil return x, nil
} }
@ -96,7 +96,7 @@ func (o *OOOChunk) ToXORBetweenTimestamps(mint, maxt int64) (*chunkenc.XORChunk,
if s.t > maxt { if s.t > maxt {
break break
} }
app.Append(s.t, s.v) app.Append(s.t, s.f)
} }
return x, nil return x, nil
} }

View file

@ -504,8 +504,8 @@ func TestOOOHeadChunkReader_Chunk(t *testing.T) {
queryMaxT: minutes(100), queryMaxT: minutes(100),
firstInOrderSampleAt: minutes(120), firstInOrderSampleAt: minutes(120),
inputSamples: tsdbutil.SampleSlice{ inputSamples: tsdbutil.SampleSlice{
sample{t: minutes(30), v: float64(0)}, sample{t: minutes(30), f: float64(0)},
sample{t: minutes(40), v: float64(0)}, sample{t: minutes(40), f: float64(0)},
}, },
expChunkError: false, expChunkError: false,
// ts (in minutes) 0 10 20 30 40 50 60 70 80 90 100 // ts (in minutes) 0 10 20 30 40 50 60 70 80 90 100
@ -514,8 +514,8 @@ func TestOOOHeadChunkReader_Chunk(t *testing.T) {
// Output Graphically [--------] (With 2 samples) // Output Graphically [--------] (With 2 samples)
expChunksSamples: []tsdbutil.SampleSlice{ expChunksSamples: []tsdbutil.SampleSlice{
{ {
sample{t: minutes(30), v: float64(0)}, sample{t: minutes(30), f: float64(0)},
sample{t: minutes(40), v: float64(0)}, sample{t: minutes(40), f: float64(0)},
}, },
}, },
}, },
@ -526,15 +526,15 @@ func TestOOOHeadChunkReader_Chunk(t *testing.T) {
firstInOrderSampleAt: minutes(120), firstInOrderSampleAt: minutes(120),
inputSamples: tsdbutil.SampleSlice{ inputSamples: tsdbutil.SampleSlice{
// opts.OOOCapMax is 5 so these will be mmapped to the first mmapped chunk // opts.OOOCapMax is 5 so these will be mmapped to the first mmapped chunk
sample{t: minutes(41), v: float64(0)}, sample{t: minutes(41), f: float64(0)},
sample{t: minutes(42), v: float64(0)}, sample{t: minutes(42), f: float64(0)},
sample{t: minutes(43), v: float64(0)}, sample{t: minutes(43), f: float64(0)},
sample{t: minutes(44), v: float64(0)}, sample{t: minutes(44), f: float64(0)},
sample{t: minutes(45), v: float64(0)}, sample{t: minutes(45), f: float64(0)},
// The following samples will go to the head chunk, and we want it // The following samples will go to the head chunk, and we want it
// to overlap with the previous chunk // to overlap with the previous chunk
sample{t: minutes(30), v: float64(1)}, sample{t: minutes(30), f: float64(1)},
sample{t: minutes(50), v: float64(1)}, sample{t: minutes(50), f: float64(1)},
}, },
expChunkError: false, expChunkError: false,
// ts (in minutes) 0 10 20 30 40 50 60 70 80 90 100 // ts (in minutes) 0 10 20 30 40 50 60 70 80 90 100
@ -544,13 +544,13 @@ func TestOOOHeadChunkReader_Chunk(t *testing.T) {
// Output Graphically [-----------------] (With 7 samples) // Output Graphically [-----------------] (With 7 samples)
expChunksSamples: []tsdbutil.SampleSlice{ expChunksSamples: []tsdbutil.SampleSlice{
{ {
sample{t: minutes(30), v: float64(1)}, sample{t: minutes(30), f: float64(1)},
sample{t: minutes(41), v: float64(0)}, sample{t: minutes(41), f: float64(0)},
sample{t: minutes(42), v: float64(0)}, sample{t: minutes(42), f: float64(0)},
sample{t: minutes(43), v: float64(0)}, sample{t: minutes(43), f: float64(0)},
sample{t: minutes(44), v: float64(0)}, sample{t: minutes(44), f: float64(0)},
sample{t: minutes(45), v: float64(0)}, sample{t: minutes(45), f: float64(0)},
sample{t: minutes(50), v: float64(1)}, sample{t: minutes(50), f: float64(1)},
}, },
}, },
}, },
@ -561,26 +561,26 @@ func TestOOOHeadChunkReader_Chunk(t *testing.T) {
firstInOrderSampleAt: minutes(120), firstInOrderSampleAt: minutes(120),
inputSamples: tsdbutil.SampleSlice{ inputSamples: tsdbutil.SampleSlice{
// Chunk 0 // Chunk 0
sample{t: minutes(10), v: float64(0)}, sample{t: minutes(10), f: float64(0)},
sample{t: minutes(12), v: float64(0)}, sample{t: minutes(12), f: float64(0)},
sample{t: minutes(14), v: float64(0)}, sample{t: minutes(14), f: float64(0)},
sample{t: minutes(16), v: float64(0)}, sample{t: minutes(16), f: float64(0)},
sample{t: minutes(20), v: float64(0)}, sample{t: minutes(20), f: float64(0)},
// Chunk 1 // Chunk 1
sample{t: minutes(20), v: float64(1)}, sample{t: minutes(20), f: float64(1)},
sample{t: minutes(22), v: float64(1)}, sample{t: minutes(22), f: float64(1)},
sample{t: minutes(24), v: float64(1)}, sample{t: minutes(24), f: float64(1)},
sample{t: minutes(26), v: float64(1)}, sample{t: minutes(26), f: float64(1)},
sample{t: minutes(29), v: float64(1)}, sample{t: minutes(29), f: float64(1)},
// Chunk 2 // Chunk 2
sample{t: minutes(30), v: float64(2)}, sample{t: minutes(30), f: float64(2)},
sample{t: minutes(32), v: float64(2)}, sample{t: minutes(32), f: float64(2)},
sample{t: minutes(34), v: float64(2)}, sample{t: minutes(34), f: float64(2)},
sample{t: minutes(36), v: float64(2)}, sample{t: minutes(36), f: float64(2)},
sample{t: minutes(40), v: float64(2)}, sample{t: minutes(40), f: float64(2)},
// Head // Head
sample{t: minutes(40), v: float64(3)}, sample{t: minutes(40), f: float64(3)},
sample{t: minutes(50), v: float64(3)}, sample{t: minutes(50), f: float64(3)},
}, },
expChunkError: false, expChunkError: false,
// ts (in minutes) 0 10 20 30 40 50 60 70 80 90 100 // ts (in minutes) 0 10 20 30 40 50 60 70 80 90 100
@ -592,23 +592,23 @@ func TestOOOHeadChunkReader_Chunk(t *testing.T) {
// Output Graphically [----------------][-----------------] // Output Graphically [----------------][-----------------]
expChunksSamples: []tsdbutil.SampleSlice{ expChunksSamples: []tsdbutil.SampleSlice{
{ {
sample{t: minutes(10), v: float64(0)}, sample{t: minutes(10), f: float64(0)},
sample{t: minutes(12), v: float64(0)}, sample{t: minutes(12), f: float64(0)},
sample{t: minutes(14), v: float64(0)}, sample{t: minutes(14), f: float64(0)},
sample{t: minutes(16), v: float64(0)}, sample{t: minutes(16), f: float64(0)},
sample{t: minutes(20), v: float64(1)}, sample{t: minutes(20), f: float64(1)},
sample{t: minutes(22), v: float64(1)}, sample{t: minutes(22), f: float64(1)},
sample{t: minutes(24), v: float64(1)}, sample{t: minutes(24), f: float64(1)},
sample{t: minutes(26), v: float64(1)}, sample{t: minutes(26), f: float64(1)},
sample{t: minutes(29), v: float64(1)}, sample{t: minutes(29), f: float64(1)},
}, },
{ {
sample{t: minutes(30), v: float64(2)}, sample{t: minutes(30), f: float64(2)},
sample{t: minutes(32), v: float64(2)}, sample{t: minutes(32), f: float64(2)},
sample{t: minutes(34), v: float64(2)}, sample{t: minutes(34), f: float64(2)},
sample{t: minutes(36), v: float64(2)}, sample{t: minutes(36), f: float64(2)},
sample{t: minutes(40), v: float64(3)}, sample{t: minutes(40), f: float64(3)},
sample{t: minutes(50), v: float64(3)}, sample{t: minutes(50), f: float64(3)},
}, },
}, },
}, },
@ -619,26 +619,26 @@ func TestOOOHeadChunkReader_Chunk(t *testing.T) {
firstInOrderSampleAt: minutes(120), firstInOrderSampleAt: minutes(120),
inputSamples: tsdbutil.SampleSlice{ inputSamples: tsdbutil.SampleSlice{
// Chunk 0 // Chunk 0
sample{t: minutes(40), v: float64(0)}, sample{t: minutes(40), f: float64(0)},
sample{t: minutes(42), v: float64(0)}, sample{t: minutes(42), f: float64(0)},
sample{t: minutes(44), v: float64(0)}, sample{t: minutes(44), f: float64(0)},
sample{t: minutes(46), v: float64(0)}, sample{t: minutes(46), f: float64(0)},
sample{t: minutes(50), v: float64(0)}, sample{t: minutes(50), f: float64(0)},
// Chunk 1 // Chunk 1
sample{t: minutes(30), v: float64(1)}, sample{t: minutes(30), f: float64(1)},
sample{t: minutes(32), v: float64(1)}, sample{t: minutes(32), f: float64(1)},
sample{t: minutes(34), v: float64(1)}, sample{t: minutes(34), f: float64(1)},
sample{t: minutes(36), v: float64(1)}, sample{t: minutes(36), f: float64(1)},
sample{t: minutes(40), v: float64(1)}, sample{t: minutes(40), f: float64(1)},
// Chunk 2 // Chunk 2
sample{t: minutes(20), v: float64(2)}, sample{t: minutes(20), f: float64(2)},
sample{t: minutes(22), v: float64(2)}, sample{t: minutes(22), f: float64(2)},
sample{t: minutes(24), v: float64(2)}, sample{t: minutes(24), f: float64(2)},
sample{t: minutes(26), v: float64(2)}, sample{t: minutes(26), f: float64(2)},
sample{t: minutes(29), v: float64(2)}, sample{t: minutes(29), f: float64(2)},
// Head // Head
sample{t: minutes(10), v: float64(3)}, sample{t: minutes(10), f: float64(3)},
sample{t: minutes(20), v: float64(3)}, sample{t: minutes(20), f: float64(3)},
}, },
expChunkError: false, expChunkError: false,
// ts (in minutes) 0 10 20 30 40 50 60 70 80 90 100 // ts (in minutes) 0 10 20 30 40 50 60 70 80 90 100
@ -650,23 +650,23 @@ func TestOOOHeadChunkReader_Chunk(t *testing.T) {
// Output Graphically [----------------][-----------------] // Output Graphically [----------------][-----------------]
expChunksSamples: []tsdbutil.SampleSlice{ expChunksSamples: []tsdbutil.SampleSlice{
{ {
sample{t: minutes(10), v: float64(3)}, sample{t: minutes(10), f: float64(3)},
sample{t: minutes(20), v: float64(2)}, sample{t: minutes(20), f: float64(2)},
sample{t: minutes(22), v: float64(2)}, sample{t: minutes(22), f: float64(2)},
sample{t: minutes(24), v: float64(2)}, sample{t: minutes(24), f: float64(2)},
sample{t: minutes(26), v: float64(2)}, sample{t: minutes(26), f: float64(2)},
sample{t: minutes(29), v: float64(2)}, sample{t: minutes(29), f: float64(2)},
}, },
{ {
sample{t: minutes(30), v: float64(1)}, sample{t: minutes(30), f: float64(1)},
sample{t: minutes(32), v: float64(1)}, sample{t: minutes(32), f: float64(1)},
sample{t: minutes(34), v: float64(1)}, sample{t: minutes(34), f: float64(1)},
sample{t: minutes(36), v: float64(1)}, sample{t: minutes(36), f: float64(1)},
sample{t: minutes(40), v: float64(0)}, sample{t: minutes(40), f: float64(0)},
sample{t: minutes(42), v: float64(0)}, sample{t: minutes(42), f: float64(0)},
sample{t: minutes(44), v: float64(0)}, sample{t: minutes(44), f: float64(0)},
sample{t: minutes(46), v: float64(0)}, sample{t: minutes(46), f: float64(0)},
sample{t: minutes(50), v: float64(0)}, sample{t: minutes(50), f: float64(0)},
}, },
}, },
}, },
@ -677,26 +677,26 @@ func TestOOOHeadChunkReader_Chunk(t *testing.T) {
firstInOrderSampleAt: minutes(120), firstInOrderSampleAt: minutes(120),
inputSamples: tsdbutil.SampleSlice{ inputSamples: tsdbutil.SampleSlice{
// Chunk 0 // Chunk 0
sample{t: minutes(10), v: float64(0)}, sample{t: minutes(10), f: float64(0)},
sample{t: minutes(12), v: float64(0)}, sample{t: minutes(12), f: float64(0)},
sample{t: minutes(14), v: float64(0)}, sample{t: minutes(14), f: float64(0)},
sample{t: minutes(16), v: float64(0)}, sample{t: minutes(16), f: float64(0)},
sample{t: minutes(18), v: float64(0)}, sample{t: minutes(18), f: float64(0)},
// Chunk 1 // Chunk 1
sample{t: minutes(20), v: float64(1)}, sample{t: minutes(20), f: float64(1)},
sample{t: minutes(22), v: float64(1)}, sample{t: minutes(22), f: float64(1)},
sample{t: minutes(24), v: float64(1)}, sample{t: minutes(24), f: float64(1)},
sample{t: minutes(26), v: float64(1)}, sample{t: minutes(26), f: float64(1)},
sample{t: minutes(28), v: float64(1)}, sample{t: minutes(28), f: float64(1)},
// Chunk 2 // Chunk 2
sample{t: minutes(30), v: float64(2)}, sample{t: minutes(30), f: float64(2)},
sample{t: minutes(32), v: float64(2)}, sample{t: minutes(32), f: float64(2)},
sample{t: minutes(34), v: float64(2)}, sample{t: minutes(34), f: float64(2)},
sample{t: minutes(36), v: float64(2)}, sample{t: minutes(36), f: float64(2)},
sample{t: minutes(38), v: float64(2)}, sample{t: minutes(38), f: float64(2)},
// Head // Head
sample{t: minutes(40), v: float64(3)}, sample{t: minutes(40), f: float64(3)},
sample{t: minutes(42), v: float64(3)}, sample{t: minutes(42), f: float64(3)},
}, },
expChunkError: false, expChunkError: false,
// ts (in minutes) 0 10 20 30 40 50 60 70 80 90 100 // ts (in minutes) 0 10 20 30 40 50 60 70 80 90 100
@ -708,29 +708,29 @@ func TestOOOHeadChunkReader_Chunk(t *testing.T) {
// Output Graphically [-------][-------][-------][--------] // Output Graphically [-------][-------][-------][--------]
expChunksSamples: []tsdbutil.SampleSlice{ expChunksSamples: []tsdbutil.SampleSlice{
{ {
sample{t: minutes(10), v: float64(0)}, sample{t: minutes(10), f: float64(0)},
sample{t: minutes(12), v: float64(0)}, sample{t: minutes(12), f: float64(0)},
sample{t: minutes(14), v: float64(0)}, sample{t: minutes(14), f: float64(0)},
sample{t: minutes(16), v: float64(0)}, sample{t: minutes(16), f: float64(0)},
sample{t: minutes(18), v: float64(0)}, sample{t: minutes(18), f: float64(0)},
}, },
{ {
sample{t: minutes(20), v: float64(1)}, sample{t: minutes(20), f: float64(1)},
sample{t: minutes(22), v: float64(1)}, sample{t: minutes(22), f: float64(1)},
sample{t: minutes(24), v: float64(1)}, sample{t: minutes(24), f: float64(1)},
sample{t: minutes(26), v: float64(1)}, sample{t: minutes(26), f: float64(1)},
sample{t: minutes(28), v: float64(1)}, sample{t: minutes(28), f: float64(1)},
}, },
{ {
sample{t: minutes(30), v: float64(2)}, sample{t: minutes(30), f: float64(2)},
sample{t: minutes(32), v: float64(2)}, sample{t: minutes(32), f: float64(2)},
sample{t: minutes(34), v: float64(2)}, sample{t: minutes(34), f: float64(2)},
sample{t: minutes(36), v: float64(2)}, sample{t: minutes(36), f: float64(2)},
sample{t: minutes(38), v: float64(2)}, sample{t: minutes(38), f: float64(2)},
}, },
{ {
sample{t: minutes(40), v: float64(3)}, sample{t: minutes(40), f: float64(3)},
sample{t: minutes(42), v: float64(3)}, sample{t: minutes(42), f: float64(3)},
}, },
}, },
}, },
@ -741,20 +741,20 @@ func TestOOOHeadChunkReader_Chunk(t *testing.T) {
firstInOrderSampleAt: minutes(120), firstInOrderSampleAt: minutes(120),
inputSamples: tsdbutil.SampleSlice{ inputSamples: tsdbutil.SampleSlice{
// Chunk 0 // Chunk 0
sample{t: minutes(10), v: float64(0)}, sample{t: minutes(10), f: float64(0)},
sample{t: minutes(15), v: float64(0)}, sample{t: minutes(15), f: float64(0)},
sample{t: minutes(20), v: float64(0)}, sample{t: minutes(20), f: float64(0)},
sample{t: minutes(25), v: float64(0)}, sample{t: minutes(25), f: float64(0)},
sample{t: minutes(30), v: float64(0)}, sample{t: minutes(30), f: float64(0)},
// Chunk 1 // Chunk 1
sample{t: minutes(20), v: float64(1)}, sample{t: minutes(20), f: float64(1)},
sample{t: minutes(25), v: float64(1)}, sample{t: minutes(25), f: float64(1)},
sample{t: minutes(30), v: float64(1)}, sample{t: minutes(30), f: float64(1)},
sample{t: minutes(35), v: float64(1)}, sample{t: minutes(35), f: float64(1)},
sample{t: minutes(42), v: float64(1)}, sample{t: minutes(42), f: float64(1)},
// Chunk 2 Head // Chunk 2 Head
sample{t: minutes(32), v: float64(2)}, sample{t: minutes(32), f: float64(2)},
sample{t: minutes(50), v: float64(2)}, sample{t: minutes(50), f: float64(2)},
}, },
expChunkError: false, expChunkError: false,
// ts (in minutes) 0 10 20 30 40 50 60 70 80 90 100 // ts (in minutes) 0 10 20 30 40 50 60 70 80 90 100
@ -765,15 +765,15 @@ func TestOOOHeadChunkReader_Chunk(t *testing.T) {
// Output Graphically [-----------------------------------] // Output Graphically [-----------------------------------]
expChunksSamples: []tsdbutil.SampleSlice{ expChunksSamples: []tsdbutil.SampleSlice{
{ {
sample{t: minutes(10), v: float64(0)}, sample{t: minutes(10), f: float64(0)},
sample{t: minutes(15), v: float64(0)}, sample{t: minutes(15), f: float64(0)},
sample{t: minutes(20), v: float64(1)}, sample{t: minutes(20), f: float64(1)},
sample{t: minutes(25), v: float64(1)}, sample{t: minutes(25), f: float64(1)},
sample{t: minutes(30), v: float64(1)}, sample{t: minutes(30), f: float64(1)},
sample{t: minutes(32), v: float64(2)}, sample{t: minutes(32), f: float64(2)},
sample{t: minutes(35), v: float64(1)}, sample{t: minutes(35), f: float64(1)},
sample{t: minutes(42), v: float64(1)}, sample{t: minutes(42), f: float64(1)},
sample{t: minutes(50), v: float64(2)}, sample{t: minutes(50), f: float64(2)},
}, },
}, },
}, },
@ -784,20 +784,20 @@ func TestOOOHeadChunkReader_Chunk(t *testing.T) {
firstInOrderSampleAt: minutes(120), firstInOrderSampleAt: minutes(120),
inputSamples: tsdbutil.SampleSlice{ inputSamples: tsdbutil.SampleSlice{
// Chunk 0 // Chunk 0
sample{t: minutes(10), v: float64(0)}, sample{t: minutes(10), f: float64(0)},
sample{t: minutes(15), v: float64(0)}, sample{t: minutes(15), f: float64(0)},
sample{t: minutes(20), v: float64(0)}, sample{t: minutes(20), f: float64(0)},
sample{t: minutes(25), v: float64(0)}, sample{t: minutes(25), f: float64(0)},
sample{t: minutes(30), v: float64(0)}, sample{t: minutes(30), f: float64(0)},
// Chunk 1 // Chunk 1
sample{t: minutes(20), v: float64(1)}, sample{t: minutes(20), f: float64(1)},
sample{t: minutes(25), v: float64(1)}, sample{t: minutes(25), f: float64(1)},
sample{t: minutes(30), v: float64(1)}, sample{t: minutes(30), f: float64(1)},
sample{t: minutes(35), v: float64(1)}, sample{t: minutes(35), f: float64(1)},
sample{t: minutes(42), v: float64(1)}, sample{t: minutes(42), f: float64(1)},
// Chunk 2 Head // Chunk 2 Head
sample{t: minutes(32), v: float64(2)}, sample{t: minutes(32), f: float64(2)},
sample{t: minutes(50), v: float64(2)}, sample{t: minutes(50), f: float64(2)},
}, },
expChunkError: false, expChunkError: false,
// ts (in minutes) 0 10 20 30 40 50 60 70 80 90 100 // ts (in minutes) 0 10 20 30 40 50 60 70 80 90 100
@ -808,15 +808,15 @@ func TestOOOHeadChunkReader_Chunk(t *testing.T) {
// Output Graphically [-----------------------------------] // Output Graphically [-----------------------------------]
expChunksSamples: []tsdbutil.SampleSlice{ expChunksSamples: []tsdbutil.SampleSlice{
{ {
sample{t: minutes(10), v: float64(0)}, sample{t: minutes(10), f: float64(0)},
sample{t: minutes(15), v: float64(0)}, sample{t: minutes(15), f: float64(0)},
sample{t: minutes(20), v: float64(1)}, sample{t: minutes(20), f: float64(1)},
sample{t: minutes(25), v: float64(1)}, sample{t: minutes(25), f: float64(1)},
sample{t: minutes(30), v: float64(1)}, sample{t: minutes(30), f: float64(1)},
sample{t: minutes(32), v: float64(2)}, sample{t: minutes(32), f: float64(2)},
sample{t: minutes(35), v: float64(1)}, sample{t: minutes(35), f: float64(1)},
sample{t: minutes(42), v: float64(1)}, sample{t: minutes(42), f: float64(1)},
sample{t: minutes(50), v: float64(2)}, sample{t: minutes(50), f: float64(2)},
}, },
}, },
}, },
@ -833,7 +833,7 @@ func TestOOOHeadChunkReader_Chunk(t *testing.T) {
// OOO few samples for s1. // OOO few samples for s1.
app = db.Appender(context.Background()) app = db.Appender(context.Background())
for _, s := range tc.inputSamples { for _, s := range tc.inputSamples {
appendSample(app, s1, s.T(), s.V()) appendSample(app, s1, s.T(), s.F())
} }
require.NoError(t, app.Commit()) require.NoError(t, app.Commit())
@ -855,7 +855,7 @@ func TestOOOHeadChunkReader_Chunk(t *testing.T) {
it := c.Iterator(nil) it := c.Iterator(nil)
for it.Next() == chunkenc.ValFloat { for it.Next() == chunkenc.ValFloat {
t, v := it.At() t, v := it.At()
resultSamples = append(resultSamples, sample{t: t, v: v}) resultSamples = append(resultSamples, sample{t: t, f: v})
} }
require.Equal(t, tc.expChunksSamples[i], resultSamples) require.Equal(t, tc.expChunksSamples[i], resultSamples)
} }
@ -902,19 +902,19 @@ func TestOOOHeadChunkReader_Chunk_ConsistentQueryResponseDespiteOfHeadExpanding(
firstInOrderSampleAt: minutes(120), firstInOrderSampleAt: minutes(120),
initialSamples: tsdbutil.SampleSlice{ initialSamples: tsdbutil.SampleSlice{
// Chunk 0 // Chunk 0
sample{t: minutes(20), v: float64(0)}, sample{t: minutes(20), f: float64(0)},
sample{t: minutes(22), v: float64(0)}, sample{t: minutes(22), f: float64(0)},
sample{t: minutes(24), v: float64(0)}, sample{t: minutes(24), f: float64(0)},
sample{t: minutes(26), v: float64(0)}, sample{t: minutes(26), f: float64(0)},
sample{t: minutes(30), v: float64(0)}, sample{t: minutes(30), f: float64(0)},
// Chunk 1 Head // Chunk 1 Head
sample{t: minutes(25), v: float64(1)}, sample{t: minutes(25), f: float64(1)},
sample{t: minutes(35), v: float64(1)}, sample{t: minutes(35), f: float64(1)},
}, },
samplesAfterSeriesCall: tsdbutil.SampleSlice{ samplesAfterSeriesCall: tsdbutil.SampleSlice{
sample{t: minutes(10), v: float64(1)}, sample{t: minutes(10), f: float64(1)},
sample{t: minutes(32), v: float64(1)}, sample{t: minutes(32), f: float64(1)},
sample{t: minutes(50), v: float64(1)}, sample{t: minutes(50), f: float64(1)},
}, },
expChunkError: false, expChunkError: false,
// ts (in minutes) 0 10 20 30 40 50 60 70 80 90 100 // ts (in minutes) 0 10 20 30 40 50 60 70 80 90 100
@ -926,14 +926,14 @@ func TestOOOHeadChunkReader_Chunk_ConsistentQueryResponseDespiteOfHeadExpanding(
// Output Graphically [------------] (With 8 samples, samples newer than lastmint or older than lastmaxt are omitted but the ones in between are kept) // Output Graphically [------------] (With 8 samples, samples newer than lastmint or older than lastmaxt are omitted but the ones in between are kept)
expChunksSamples: []tsdbutil.SampleSlice{ expChunksSamples: []tsdbutil.SampleSlice{
{ {
sample{t: minutes(20), v: float64(0)}, sample{t: minutes(20), f: float64(0)},
sample{t: minutes(22), v: float64(0)}, sample{t: minutes(22), f: float64(0)},
sample{t: minutes(24), v: float64(0)}, sample{t: minutes(24), f: float64(0)},
sample{t: minutes(25), v: float64(1)}, sample{t: minutes(25), f: float64(1)},
sample{t: minutes(26), v: float64(0)}, sample{t: minutes(26), f: float64(0)},
sample{t: minutes(30), v: float64(0)}, sample{t: minutes(30), f: float64(0)},
sample{t: minutes(32), v: float64(1)}, // This sample was added after Series() but before Chunk() and its in between the lastmint and maxt so it should be kept sample{t: minutes(32), f: float64(1)}, // This sample was added after Series() but before Chunk() and its in between the lastmint and maxt so it should be kept
sample{t: minutes(35), v: float64(1)}, sample{t: minutes(35), f: float64(1)},
}, },
}, },
}, },
@ -944,22 +944,22 @@ func TestOOOHeadChunkReader_Chunk_ConsistentQueryResponseDespiteOfHeadExpanding(
firstInOrderSampleAt: minutes(120), firstInOrderSampleAt: minutes(120),
initialSamples: tsdbutil.SampleSlice{ initialSamples: tsdbutil.SampleSlice{
// Chunk 0 // Chunk 0
sample{t: minutes(20), v: float64(0)}, sample{t: minutes(20), f: float64(0)},
sample{t: minutes(22), v: float64(0)}, sample{t: minutes(22), f: float64(0)},
sample{t: minutes(24), v: float64(0)}, sample{t: minutes(24), f: float64(0)},
sample{t: minutes(26), v: float64(0)}, sample{t: minutes(26), f: float64(0)},
sample{t: minutes(30), v: float64(0)}, sample{t: minutes(30), f: float64(0)},
// Chunk 1 Head // Chunk 1 Head
sample{t: minutes(25), v: float64(1)}, sample{t: minutes(25), f: float64(1)},
sample{t: minutes(35), v: float64(1)}, sample{t: minutes(35), f: float64(1)},
}, },
samplesAfterSeriesCall: tsdbutil.SampleSlice{ samplesAfterSeriesCall: tsdbutil.SampleSlice{
sample{t: minutes(10), v: float64(1)}, sample{t: minutes(10), f: float64(1)},
sample{t: minutes(32), v: float64(1)}, sample{t: minutes(32), f: float64(1)},
sample{t: minutes(50), v: float64(1)}, sample{t: minutes(50), f: float64(1)},
// Chunk 1 gets mmapped and Chunk 2, the new head is born // Chunk 1 gets mmapped and Chunk 2, the new head is born
sample{t: minutes(25), v: float64(2)}, sample{t: minutes(25), f: float64(2)},
sample{t: minutes(31), v: float64(2)}, sample{t: minutes(31), f: float64(2)},
}, },
expChunkError: false, expChunkError: false,
// ts (in minutes) 0 10 20 30 40 50 60 70 80 90 100 // ts (in minutes) 0 10 20 30 40 50 60 70 80 90 100
@ -972,14 +972,14 @@ func TestOOOHeadChunkReader_Chunk_ConsistentQueryResponseDespiteOfHeadExpanding(
// Output Graphically [------------] (8 samples) It has 5 from Chunk 0 and 3 from Chunk 1 // Output Graphically [------------] (8 samples) It has 5 from Chunk 0 and 3 from Chunk 1
expChunksSamples: []tsdbutil.SampleSlice{ expChunksSamples: []tsdbutil.SampleSlice{
{ {
sample{t: minutes(20), v: float64(0)}, sample{t: minutes(20), f: float64(0)},
sample{t: minutes(22), v: float64(0)}, sample{t: minutes(22), f: float64(0)},
sample{t: minutes(24), v: float64(0)}, sample{t: minutes(24), f: float64(0)},
sample{t: minutes(25), v: float64(1)}, sample{t: minutes(25), f: float64(1)},
sample{t: minutes(26), v: float64(0)}, sample{t: minutes(26), f: float64(0)},
sample{t: minutes(30), v: float64(0)}, sample{t: minutes(30), f: float64(0)},
sample{t: minutes(32), v: float64(1)}, // This sample was added after Series() but before Chunk() and its in between the lastmint and maxt so it should be kept sample{t: minutes(32), f: float64(1)}, // This sample was added after Series() but before Chunk() and its in between the lastmint and maxt so it should be kept
sample{t: minutes(35), v: float64(1)}, sample{t: minutes(35), f: float64(1)},
}, },
}, },
}, },
@ -996,7 +996,7 @@ func TestOOOHeadChunkReader_Chunk_ConsistentQueryResponseDespiteOfHeadExpanding(
// OOO few samples for s1. // OOO few samples for s1.
app = db.Appender(context.Background()) app = db.Appender(context.Background())
for _, s := range tc.initialSamples { for _, s := range tc.initialSamples {
appendSample(app, s1, s.T(), s.V()) appendSample(app, s1, s.T(), s.F())
} }
require.NoError(t, app.Commit()) require.NoError(t, app.Commit())
@ -1013,7 +1013,7 @@ func TestOOOHeadChunkReader_Chunk_ConsistentQueryResponseDespiteOfHeadExpanding(
// OOO few samples for s1. // OOO few samples for s1.
app = db.Appender(context.Background()) app = db.Appender(context.Background())
for _, s := range tc.samplesAfterSeriesCall { for _, s := range tc.samplesAfterSeriesCall {
appendSample(app, s1, s.T(), s.V()) appendSample(app, s1, s.T(), s.F())
} }
require.NoError(t, app.Commit()) require.NoError(t, app.Commit())
@ -1026,7 +1026,7 @@ func TestOOOHeadChunkReader_Chunk_ConsistentQueryResponseDespiteOfHeadExpanding(
it := c.Iterator(nil) it := c.Iterator(nil)
for it.Next() == chunkenc.ValFloat { for it.Next() == chunkenc.ValFloat {
ts, v := it.At() ts, v := it.At()
resultSamples = append(resultSamples, sample{t: ts, v: v}) resultSamples = append(resultSamples, sample{t: ts, f: v})
} }
require.Equal(t, tc.expChunksSamples[i], resultSamples) require.Equal(t, tc.expChunksSamples[i], resultSamples)
} }

View file

@ -52,7 +52,7 @@ func TestOOOInsert(t *testing.T) {
chunk := NewOOOChunk() chunk := NewOOOChunk()
chunk.samples = makeEvenSampleSlice(numPreExisting) chunk.samples = makeEvenSampleSlice(numPreExisting)
newSample := samplify(valOdd(insertPos)) newSample := samplify(valOdd(insertPos))
chunk.Insert(newSample.t, newSample.v) chunk.Insert(newSample.t, newSample.f)
var expSamples []sample var expSamples []sample
// Our expected new samples slice, will be first the original samples. // Our expected new samples slice, will be first the original samples.
@ -81,9 +81,9 @@ func TestOOOInsertDuplicate(t *testing.T) {
chunk.samples = makeEvenSampleSlice(num) chunk.samples = makeEvenSampleSlice(num)
dupSample := chunk.samples[dupPos] dupSample := chunk.samples[dupPos]
dupSample.v = 0.123 dupSample.f = 0.123
ok := chunk.Insert(dupSample.t, dupSample.v) ok := chunk.Insert(dupSample.t, dupSample.f)
expSamples := makeEvenSampleSlice(num) // We expect no change. expSamples := makeEvenSampleSlice(num) // We expect no change.
require.False(t, ok) require.False(t, ok)

View file

@ -171,7 +171,7 @@ func (q *blockChunkQuerier) Select(sortSeries bool, hints *storage.SelectHints,
if sortSeries { if sortSeries {
p = q.index.SortedPostings(p) p = q.index.SortedPostings(p)
} }
return newBlockChunkSeriesSet(q.blockID, q.index, q.chunks, q.tombstones, p, mint, maxt, disableTrimming) return NewBlockChunkSeriesSet(q.blockID, q.index, q.chunks, q.tombstones, p, mint, maxt, disableTrimming)
} }
// PostingsForMatchers assembles a single postings iterator against the index reader // PostingsForMatchers assembles a single postings iterator against the index reader
@ -387,7 +387,7 @@ func (s *seriesData) Labels() labels.Labels { return s.labels }
// blockBaseSeriesSet allows to iterate over all series in the single block. // blockBaseSeriesSet allows to iterate over all series in the single block.
// Iterated series are trimmed with given min and max time as well as tombstones. // Iterated series are trimmed with given min and max time as well as tombstones.
// See newBlockSeriesSet and newBlockChunkSeriesSet to use it for either sample or chunk iterating. // See newBlockSeriesSet and NewBlockChunkSeriesSet to use it for either sample or chunk iterating.
type blockBaseSeriesSet struct { type blockBaseSeriesSet struct {
blockID ulid.ULID blockID ulid.ULID
p index.Postings p index.Postings
@ -873,7 +873,7 @@ type blockChunkSeriesSet struct {
blockBaseSeriesSet blockBaseSeriesSet
} }
func newBlockChunkSeriesSet(id ulid.ULID, i IndexReader, c ChunkReader, t tombstones.Reader, p index.Postings, mint, maxt int64, disableTrimming bool) storage.ChunkSeriesSet { func NewBlockChunkSeriesSet(id ulid.ULID, i IndexReader, c ChunkReader, t tombstones.Reader, p index.Postings, mint, maxt int64, disableTrimming bool) storage.ChunkSeriesSet {
return &blockChunkSeriesSet{ return &blockChunkSeriesSet{
blockBaseSeriesSet{ blockBaseSeriesSet{
blockID: id, blockID: id,

View file

@ -132,7 +132,7 @@ func createIdxChkReaders(t *testing.T, tc []seriesSamples) (IndexReader, ChunkRe
chunk := chunkenc.NewXORChunk() chunk := chunkenc.NewXORChunk()
app, _ := chunk.Appender() app, _ := chunk.Appender()
for _, smpl := range chk { for _, smpl := range chk {
app.Append(smpl.t, smpl.v) app.Append(smpl.t, smpl.f)
} }
chkReader[chunkRef] = chunk chkReader[chunkRef] = chunk
chunkRef++ chunkRef++
@ -519,7 +519,7 @@ func TestBlockQuerier_AgainstHeadWithOpenChunks(t *testing.T) {
for _, s := range testData { for _, s := range testData {
for _, chk := range s.chunks { for _, chk := range s.chunks {
for _, sample := range chk { for _, sample := range chk {
_, err = app.Append(0, labels.FromMap(s.lset), sample.t, sample.v) _, err = app.Append(0, labels.FromMap(s.lset), sample.t, sample.f)
require.NoError(t, err) require.NoError(t, err)
} }
} }
@ -922,7 +922,7 @@ func TestPopulateWithTombSeriesIterators(t *testing.T) {
if tc.seekSuccess { if tc.seekSuccess {
// After successful seek iterator is ready. Grab the value. // After successful seek iterator is ready. Grab the value.
t, v := it.At() t, v := it.At()
r = append(r, sample{t: t, v: v}) r = append(r, sample{t: t, f: v})
} }
} }
expandedResult, err := storage.ExpandSamples(it, newSample) expandedResult, err := storage.ExpandSamples(it, newSample)
@ -1094,8 +1094,8 @@ func TestDeletedIterator(t *testing.T) {
act := make([]sample, 1000) act := make([]sample, 1000)
for i := 0; i < 1000; i++ { for i := 0; i < 1000; i++ {
act[i].t = int64(i) act[i].t = int64(i)
act[i].v = rand.Float64() act[i].f = rand.Float64()
app.Append(act[i].t, act[i].v) app.Append(act[i].t, act[i].f)
} }
cases := []struct { cases := []struct {
@ -1130,7 +1130,7 @@ func TestDeletedIterator(t *testing.T) {
ts, v := it.At() ts, v := it.At()
require.Equal(t, act[i].t, ts) require.Equal(t, act[i].t, ts)
require.Equal(t, act[i].v, v) require.Equal(t, act[i].f, v)
} }
// There has been an extra call to Next(). // There has been an extra call to Next().
i++ i++
@ -1154,8 +1154,8 @@ func TestDeletedIterator_WithSeek(t *testing.T) {
act := make([]sample, 1000) act := make([]sample, 1000)
for i := 0; i < 1000; i++ { for i := 0; i < 1000; i++ {
act[i].t = int64(i) act[i].t = int64(i)
act[i].v = float64(i) act[i].f = float64(i)
app.Append(act[i].t, act[i].v) app.Append(act[i].t, act[i].f)
} }
cases := []struct { cases := []struct {

View file

@ -28,7 +28,7 @@ type Samples interface {
type Sample interface { type Sample interface {
T() int64 T() int64
V() float64 F() float64
H() *histogram.Histogram H() *histogram.Histogram
FH() *histogram.FloatHistogram FH() *histogram.FloatHistogram
Type() chunkenc.ValueType Type() chunkenc.ValueType
@ -69,7 +69,7 @@ func ChunkFromSamplesGeneric(s Samples) chunks.Meta {
for i := 0; i < s.Len(); i++ { for i := 0; i < s.Len(); i++ {
switch sampleType { switch sampleType {
case chunkenc.ValFloat: case chunkenc.ValFloat:
ca.Append(s.Get(i).T(), s.Get(i).V()) ca.Append(s.Get(i).T(), s.Get(i).F())
case chunkenc.ValHistogram: case chunkenc.ValHistogram:
ca.AppendHistogram(s.Get(i).T(), s.Get(i).H()) ca.AppendHistogram(s.Get(i).T(), s.Get(i).H())
case chunkenc.ValFloatHistogram: case chunkenc.ValFloatHistogram:
@ -87,7 +87,7 @@ func ChunkFromSamplesGeneric(s Samples) chunks.Meta {
type sample struct { type sample struct {
t int64 t int64
v float64 f float64
h *histogram.Histogram h *histogram.Histogram
fh *histogram.FloatHistogram fh *histogram.FloatHistogram
} }
@ -96,8 +96,8 @@ func (s sample) T() int64 {
return s.t return s.t
} }
func (s sample) V() float64 { func (s sample) F() float64 {
return s.v return s.f
} }
func (s sample) H() *histogram.Histogram { func (s sample) H() *histogram.Histogram {
@ -123,7 +123,7 @@ func (s sample) Type() chunkenc.ValueType {
func PopulatedChunk(numSamples int, minTime int64) chunks.Meta { func PopulatedChunk(numSamples int, minTime int64) chunks.Meta {
samples := make([]Sample, numSamples) samples := make([]Sample, numSamples)
for i := 0; i < numSamples; i++ { for i := 0; i < numSamples; i++ {
samples[i] = sample{t: minTime + int64(i*1000), v: 1.0} samples[i] = sample{t: minTime + int64(i*1000), f: 1.0}
} }
return ChunkFromSamples(samples) return ChunkFromSamples(samples)
} }
@ -133,7 +133,7 @@ func GenerateSamples(start, numSamples int) []Sample {
return generateSamples(start, numSamples, func(i int) Sample { return generateSamples(start, numSamples, func(i int) Sample {
return sample{ return sample{
t: int64(i), t: int64(i),
v: float64(i), f: float64(i),
} }
}) })
} }

View file

@ -18,6 +18,8 @@ import (
"strconv" "strconv"
jsoniter "github.com/json-iterator/go" jsoniter "github.com/json-iterator/go"
"github.com/prometheus/prometheus/model/histogram"
) )
// MarshalTimestamp marshals a point timestamp using the passed jsoniter stream. // MarshalTimestamp marshals a point timestamp using the passed jsoniter stream.
@ -42,13 +44,13 @@ func MarshalTimestamp(t int64, stream *jsoniter.Stream) {
} }
} }
// MarshalValue marshals a point value using the passed jsoniter stream. // MarshalFloat marshals a float value using the passed jsoniter stream.
func MarshalValue(v float64, stream *jsoniter.Stream) { func MarshalFloat(f float64, stream *jsoniter.Stream) {
stream.WriteRaw(`"`) stream.WriteRaw(`"`)
// Taken from https://github.com/json-iterator/go/blob/master/stream_float.go#L71 as a workaround // Taken from https://github.com/json-iterator/go/blob/master/stream_float.go#L71 as a workaround
// to https://github.com/json-iterator/go/issues/365 (jsoniter, to follow json standard, doesn't allow inf/nan). // to https://github.com/json-iterator/go/issues/365 (jsoniter, to follow json standard, doesn't allow inf/nan).
buf := stream.Buffer() buf := stream.Buffer()
abs := math.Abs(v) abs := math.Abs(f)
fmt := byte('f') fmt := byte('f')
// Note: Must use float32 comparisons for underlying float32 value to get precise cutoffs right. // Note: Must use float32 comparisons for underlying float32 value to get precise cutoffs right.
if abs != 0 { if abs != 0 {
@ -56,7 +58,80 @@ func MarshalValue(v float64, stream *jsoniter.Stream) {
fmt = 'e' fmt = 'e'
} }
} }
buf = strconv.AppendFloat(buf, v, fmt, -1, 64) buf = strconv.AppendFloat(buf, f, fmt, -1, 64)
stream.SetBuffer(buf) stream.SetBuffer(buf)
stream.WriteRaw(`"`) stream.WriteRaw(`"`)
} }
// MarshalHistogram marshals a histogram value using the passed jsoniter stream.
// It writes something like:
//
// {
// "count": "42",
// "sum": "34593.34",
// "buckets": [
// [ 3, "-0.25", "0.25", "3"],
// [ 0, "0.25", "0.5", "12"],
// [ 0, "0.5", "1", "21"],
// [ 0, "2", "4", "6"]
// ]
// }
//
// The 1st element in each bucket array determines if the boundaries are
// inclusive (AKA closed) or exclusive (AKA open):
//
// 0: lower exclusive, upper inclusive
// 1: lower inclusive, upper exclusive
// 2: both exclusive
// 3: both inclusive
//
// The 2nd and 3rd elements are the lower and upper boundary. The 4th element is
// the bucket count.
func MarshalHistogram(h *histogram.FloatHistogram, stream *jsoniter.Stream) {
stream.WriteObjectStart()
stream.WriteObjectField(`count`)
MarshalFloat(h.Count, stream)
stream.WriteMore()
stream.WriteObjectField(`sum`)
MarshalFloat(h.Sum, stream)
bucketFound := false
it := h.AllBucketIterator()
for it.Next() {
bucket := it.At()
if bucket.Count == 0 {
continue // No need to expose empty buckets in JSON.
}
stream.WriteMore()
if !bucketFound {
stream.WriteObjectField(`buckets`)
stream.WriteArrayStart()
}
bucketFound = true
boundaries := 2 // Exclusive on both sides AKA open interval.
if bucket.LowerInclusive {
if bucket.UpperInclusive {
boundaries = 3 // Inclusive on both sides AKA closed interval.
} else {
boundaries = 1 // Inclusive only on lower end AKA right open.
}
} else {
if bucket.UpperInclusive {
boundaries = 0 // Inclusive only on upper end AKA left open.
}
}
stream.WriteArrayStart()
stream.WriteInt(boundaries)
stream.WriteMore()
MarshalFloat(bucket.Lower, stream)
stream.WriteMore()
MarshalFloat(bucket.Upper, stream)
stream.WriteMore()
MarshalFloat(bucket.Count, stream)
stream.WriteArrayEnd()
}
if bucketFound {
stream.WriteArrayEnd()
}
stream.WriteObjectEnd()
}

View file

@ -1100,10 +1100,10 @@ func testEndpoints(t *testing.T, api *API, tr *testTargetRetriever, es storage.E
ResultType: parser.ValueTypeMatrix, ResultType: parser.ValueTypeMatrix,
Result: promql.Matrix{ Result: promql.Matrix{
promql.Series{ promql.Series{
Points: []promql.Point{ Floats: []promql.FPoint{
{V: 0, T: timestamp.FromTime(start)}, {F: 0, T: timestamp.FromTime(start)},
{V: 1, T: timestamp.FromTime(start.Add(1 * time.Second))}, {F: 1, T: timestamp.FromTime(start.Add(1 * time.Second))},
{V: 2, T: timestamp.FromTime(start.Add(2 * time.Second))}, {F: 2, T: timestamp.FromTime(start.Add(2 * time.Second))},
}, },
// No Metric returned - use zero value for comparison. // No Metric returned - use zero value for comparison.
}, },
@ -3208,15 +3208,15 @@ func BenchmarkRespond(b *testing.B) {
b.ReportAllocs() b.ReportAllocs()
request, err := http.NewRequest(http.MethodGet, "/does-not-matter", nil) request, err := http.NewRequest(http.MethodGet, "/does-not-matter", nil)
require.NoError(b, err) require.NoError(b, err)
points := []promql.Point{} points := []promql.FPoint{}
for i := 0; i < 10000; i++ { for i := 0; i < 10000; i++ {
points = append(points, promql.Point{V: float64(i * 1000000), T: int64(i)}) points = append(points, promql.FPoint{F: float64(i * 1000000), T: int64(i)})
} }
response := &QueryData{ response := &QueryData{
ResultType: parser.ValueTypeMatrix, ResultType: parser.ValueTypeMatrix,
Result: promql.Matrix{ Result: promql.Matrix{
promql.Series{ promql.Series{
Points: points, Floats: points,
Metric: labels.EmptyLabels(), Metric: labels.EmptyLabels(),
}, },
}, },

View file

@ -19,7 +19,6 @@ import (
jsoniter "github.com/json-iterator/go" jsoniter "github.com/json-iterator/go"
"github.com/prometheus/prometheus/model/exemplar" "github.com/prometheus/prometheus/model/exemplar"
"github.com/prometheus/prometheus/model/histogram"
"github.com/prometheus/prometheus/promql" "github.com/prometheus/prometheus/promql"
"github.com/prometheus/prometheus/util/jsonutil" "github.com/prometheus/prometheus/util/jsonutil"
) )
@ -27,7 +26,8 @@ import (
func init() { func init() {
jsoniter.RegisterTypeEncoderFunc("promql.Series", marshalSeriesJSON, marshalSeriesJSONIsEmpty) jsoniter.RegisterTypeEncoderFunc("promql.Series", marshalSeriesJSON, marshalSeriesJSONIsEmpty)
jsoniter.RegisterTypeEncoderFunc("promql.Sample", marshalSampleJSON, marshalSampleJSONIsEmpty) jsoniter.RegisterTypeEncoderFunc("promql.Sample", marshalSampleJSON, marshalSampleJSONIsEmpty)
jsoniter.RegisterTypeEncoderFunc("promql.Point", marshalPointJSON, marshalPointJSONIsEmpty) jsoniter.RegisterTypeEncoderFunc("promql.FPoint", marshalFPointJSON, marshalPointJSONIsEmpty)
jsoniter.RegisterTypeEncoderFunc("promql.HPoint", marshalHPointJSON, marshalPointJSONIsEmpty)
jsoniter.RegisterTypeEncoderFunc("exemplar.Exemplar", marshalExemplarJSON, marshalExemplarJSONEmpty) jsoniter.RegisterTypeEncoderFunc("exemplar.Exemplar", marshalExemplarJSON, marshalExemplarJSONEmpty)
} }
@ -60,7 +60,7 @@ func (j JSONCodec) Encode(resp *Response) ([]byte, error) {
// < more values> // < more values>
// ], // ],
// "histograms": [ // "histograms": [
// [ 1435781451.781, { < histogram, see below > } ], // [ 1435781451.781, { < histogram, see jsonutil.MarshalHistogram > } ],
// < more histograms > // < more histograms >
// ], // ],
// }, // },
@ -75,41 +75,26 @@ func marshalSeriesJSON(ptr unsafe.Pointer, stream *jsoniter.Stream) {
} }
stream.SetBuffer(append(stream.Buffer(), m...)) stream.SetBuffer(append(stream.Buffer(), m...))
// We make two passes through the series here: In the first marshaling for i, p := range s.Floats {
// all value points, in the second marshaling all histogram stream.WriteMore()
// points. That's probably cheaper than just one pass in which we copy if i == 0 {
// out histogram Points into a newly allocated slice for separate stream.WriteObjectField(`values`)
// marshaling. (Could be benchmarked, though.) stream.WriteArrayStart()
var foundValue, foundHistogram bool
for _, p := range s.Points {
if p.H == nil {
stream.WriteMore()
if !foundValue {
stream.WriteObjectField(`values`)
stream.WriteArrayStart()
}
foundValue = true
marshalPointJSON(unsafe.Pointer(&p), stream)
} else {
foundHistogram = true
} }
marshalFPointJSON(unsafe.Pointer(&p), stream)
} }
if foundValue { if len(s.Floats) > 0 {
stream.WriteArrayEnd() stream.WriteArrayEnd()
} }
if foundHistogram { for i, p := range s.Histograms {
firstHistogram := true stream.WriteMore()
for _, p := range s.Points { if i == 0 {
if p.H != nil { stream.WriteObjectField(`histograms`)
stream.WriteMore() stream.WriteArrayStart()
if firstHistogram {
stream.WriteObjectField(`histograms`)
stream.WriteArrayStart()
}
firstHistogram = false
marshalPointJSON(unsafe.Pointer(&p), stream)
}
} }
marshalHPointJSON(unsafe.Pointer(&p), stream)
}
if len(s.Histograms) > 0 {
stream.WriteArrayEnd() stream.WriteArrayEnd()
} }
stream.WriteObjectEnd() stream.WriteObjectEnd()
@ -127,7 +112,7 @@ func marshalSeriesJSONIsEmpty(ptr unsafe.Pointer) bool {
// "job" : "prometheus", // "job" : "prometheus",
// "instance" : "localhost:9090" // "instance" : "localhost:9090"
// }, // },
// "value": [ 1435781451.781, "1" ] // "value": [ 1435781451.781, "1.234" ]
// }, // },
// //
// For histogram samples, it writes something like this: // For histogram samples, it writes something like this:
@ -138,7 +123,7 @@ func marshalSeriesJSONIsEmpty(ptr unsafe.Pointer) bool {
// "job" : "prometheus", // "job" : "prometheus",
// "instance" : "localhost:9090" // "instance" : "localhost:9090"
// }, // },
// "histogram": [ 1435781451.781, { < histogram, see below > } ] // "histogram": [ 1435781451.781, { < histogram, see jsonutil.MarshalHistogram > } ]
// }, // },
func marshalSampleJSON(ptr unsafe.Pointer, stream *jsoniter.Stream) { func marshalSampleJSON(ptr unsafe.Pointer, stream *jsoniter.Stream) {
s := *((*promql.Sample)(ptr)) s := *((*promql.Sample)(ptr))
@ -151,12 +136,20 @@ func marshalSampleJSON(ptr unsafe.Pointer, stream *jsoniter.Stream) {
} }
stream.SetBuffer(append(stream.Buffer(), m...)) stream.SetBuffer(append(stream.Buffer(), m...))
stream.WriteMore() stream.WriteMore()
if s.Point.H == nil { if s.H == nil {
stream.WriteObjectField(`value`) stream.WriteObjectField(`value`)
} else { } else {
stream.WriteObjectField(`histogram`) stream.WriteObjectField(`histogram`)
} }
marshalPointJSON(unsafe.Pointer(&s.Point), stream) stream.WriteArrayStart()
jsonutil.MarshalTimestamp(s.T, stream)
stream.WriteMore()
if s.H == nil {
jsonutil.MarshalFloat(s.F, stream)
} else {
jsonutil.MarshalHistogram(s.H, stream)
}
stream.WriteArrayEnd()
stream.WriteObjectEnd() stream.WriteObjectEnd()
} }
@ -164,17 +157,23 @@ func marshalSampleJSONIsEmpty(ptr unsafe.Pointer) bool {
return false return false
} }
// marshalPointJSON writes `[ts, "val"]`. // marshalFPointJSON writes `[ts, "1.234"]`.
func marshalPointJSON(ptr unsafe.Pointer, stream *jsoniter.Stream) { func marshalFPointJSON(ptr unsafe.Pointer, stream *jsoniter.Stream) {
p := *((*promql.Point)(ptr)) p := *((*promql.FPoint)(ptr))
stream.WriteArrayStart() stream.WriteArrayStart()
jsonutil.MarshalTimestamp(p.T, stream) jsonutil.MarshalTimestamp(p.T, stream)
stream.WriteMore() stream.WriteMore()
if p.H == nil { jsonutil.MarshalFloat(p.F, stream)
jsonutil.MarshalValue(p.V, stream) stream.WriteArrayEnd()
} else { }
marshalHistogram(p.H, stream)
} // marshalHPointJSON writes `[ts, { < histogram, see jsonutil.MarshalHistogram > } ]`.
func marshalHPointJSON(ptr unsafe.Pointer, stream *jsoniter.Stream) {
p := *((*promql.HPoint)(ptr))
stream.WriteArrayStart()
jsonutil.MarshalTimestamp(p.T, stream)
stream.WriteMore()
jsonutil.MarshalHistogram(p.H, stream)
stream.WriteArrayEnd() stream.WriteArrayEnd()
} }
@ -182,78 +181,6 @@ func marshalPointJSONIsEmpty(ptr unsafe.Pointer) bool {
return false return false
} }
// marshalHistogramJSON writes something like:
//
// {
// "count": "42",
// "sum": "34593.34",
// "buckets": [
// [ 3, "-0.25", "0.25", "3"],
// [ 0, "0.25", "0.5", "12"],
// [ 0, "0.5", "1", "21"],
// [ 0, "2", "4", "6"]
// ]
// }
//
// The 1st element in each bucket array determines if the boundaries are
// inclusive (AKA closed) or exclusive (AKA open):
//
// 0: lower exclusive, upper inclusive
// 1: lower inclusive, upper exclusive
// 2: both exclusive
// 3: both inclusive
//
// The 2nd and 3rd elements are the lower and upper boundary. The 4th element is
// the bucket count.
func marshalHistogram(h *histogram.FloatHistogram, stream *jsoniter.Stream) {
stream.WriteObjectStart()
stream.WriteObjectField(`count`)
jsonutil.MarshalValue(h.Count, stream)
stream.WriteMore()
stream.WriteObjectField(`sum`)
jsonutil.MarshalValue(h.Sum, stream)
bucketFound := false
it := h.AllBucketIterator()
for it.Next() {
bucket := it.At()
if bucket.Count == 0 {
continue // No need to expose empty buckets in JSON.
}
stream.WriteMore()
if !bucketFound {
stream.WriteObjectField(`buckets`)
stream.WriteArrayStart()
}
bucketFound = true
boundaries := 2 // Exclusive on both sides AKA open interval.
if bucket.LowerInclusive {
if bucket.UpperInclusive {
boundaries = 3 // Inclusive on both sides AKA closed interval.
} else {
boundaries = 1 // Inclusive only on lower end AKA right open.
}
} else {
if bucket.UpperInclusive {
boundaries = 0 // Inclusive only on upper end AKA left open.
}
}
stream.WriteArrayStart()
stream.WriteInt(boundaries)
stream.WriteMore()
jsonutil.MarshalValue(bucket.Lower, stream)
stream.WriteMore()
jsonutil.MarshalValue(bucket.Upper, stream)
stream.WriteMore()
jsonutil.MarshalValue(bucket.Count, stream)
stream.WriteArrayEnd()
}
if bucketFound {
stream.WriteArrayEnd()
}
stream.WriteObjectEnd()
}
// marshalExemplarJSON writes. // marshalExemplarJSON writes.
// //
// { // {
@ -277,7 +204,7 @@ func marshalExemplarJSON(ptr unsafe.Pointer, stream *jsoniter.Stream) {
// "value" key. // "value" key.
stream.WriteMore() stream.WriteMore()
stream.WriteObjectField(`value`) stream.WriteObjectField(`value`)
jsonutil.MarshalValue(p.Value, stream) jsonutil.MarshalFloat(p.Value, stream)
// "timestamp" key. // "timestamp" key.
stream.WriteMore() stream.WriteMore()

View file

@ -34,7 +34,7 @@ func TestJsonCodec_Encode(t *testing.T) {
ResultType: parser.ValueTypeMatrix, ResultType: parser.ValueTypeMatrix,
Result: promql.Matrix{ Result: promql.Matrix{
promql.Series{ promql.Series{
Points: []promql.Point{{V: 1, T: 1000}}, Floats: []promql.FPoint{{F: 1, T: 1000}},
Metric: labels.FromStrings("__name__", "foo"), Metric: labels.FromStrings("__name__", "foo"),
}, },
}, },
@ -46,7 +46,7 @@ func TestJsonCodec_Encode(t *testing.T) {
ResultType: parser.ValueTypeMatrix, ResultType: parser.ValueTypeMatrix,
Result: promql.Matrix{ Result: promql.Matrix{
promql.Series{ promql.Series{
Points: []promql.Point{{H: &histogram.FloatHistogram{ Histograms: []promql.HPoint{{H: &histogram.FloatHistogram{
Schema: 2, Schema: 2,
ZeroThreshold: 0.001, ZeroThreshold: 0.001,
ZeroCount: 12, ZeroCount: 12,
@ -69,63 +69,63 @@ func TestJsonCodec_Encode(t *testing.T) {
expected: `{"status":"success","data":{"resultType":"matrix","result":[{"metric":{"__name__":"foo"},"histograms":[[1,{"count":"10","sum":"20","buckets":[[1,"-1.6817928305074288","-1.414213562373095","1"],[1,"-1.414213562373095","-1.189207115002721","2"],[3,"-0.001","0.001","12"],[0,"1.414213562373095","1.6817928305074288","1"],[0,"1.6817928305074288","2","2"],[0,"2.378414230005442","2.82842712474619","2"],[0,"2.82842712474619","3.3635856610148576","1"],[0,"3.3635856610148576","4","1"]]}]]}]}}`, expected: `{"status":"success","data":{"resultType":"matrix","result":[{"metric":{"__name__":"foo"},"histograms":[[1,{"count":"10","sum":"20","buckets":[[1,"-1.6817928305074288","-1.414213562373095","1"],[1,"-1.414213562373095","-1.189207115002721","2"],[3,"-0.001","0.001","12"],[0,"1.414213562373095","1.6817928305074288","1"],[0,"1.6817928305074288","2","2"],[0,"2.378414230005442","2.82842712474619","2"],[0,"2.82842712474619","3.3635856610148576","1"],[0,"3.3635856610148576","4","1"]]}]]}]}}`,
}, },
{ {
response: promql.Point{V: 0, T: 0}, response: promql.FPoint{F: 0, T: 0},
expected: `{"status":"success","data":[0,"0"]}`, expected: `{"status":"success","data":[0,"0"]}`,
}, },
{ {
response: promql.Point{V: 20, T: 1}, response: promql.FPoint{F: 20, T: 1},
expected: `{"status":"success","data":[0.001,"20"]}`, expected: `{"status":"success","data":[0.001,"20"]}`,
}, },
{ {
response: promql.Point{V: 20, T: 10}, response: promql.FPoint{F: 20, T: 10},
expected: `{"status":"success","data":[0.010,"20"]}`, expected: `{"status":"success","data":[0.010,"20"]}`,
}, },
{ {
response: promql.Point{V: 20, T: 100}, response: promql.FPoint{F: 20, T: 100},
expected: `{"status":"success","data":[0.100,"20"]}`, expected: `{"status":"success","data":[0.100,"20"]}`,
}, },
{ {
response: promql.Point{V: 20, T: 1001}, response: promql.FPoint{F: 20, T: 1001},
expected: `{"status":"success","data":[1.001,"20"]}`, expected: `{"status":"success","data":[1.001,"20"]}`,
}, },
{ {
response: promql.Point{V: 20, T: 1010}, response: promql.FPoint{F: 20, T: 1010},
expected: `{"status":"success","data":[1.010,"20"]}`, expected: `{"status":"success","data":[1.010,"20"]}`,
}, },
{ {
response: promql.Point{V: 20, T: 1100}, response: promql.FPoint{F: 20, T: 1100},
expected: `{"status":"success","data":[1.100,"20"]}`, expected: `{"status":"success","data":[1.100,"20"]}`,
}, },
{ {
response: promql.Point{V: 20, T: 12345678123456555}, response: promql.FPoint{F: 20, T: 12345678123456555},
expected: `{"status":"success","data":[12345678123456.555,"20"]}`, expected: `{"status":"success","data":[12345678123456.555,"20"]}`,
}, },
{ {
response: promql.Point{V: 20, T: -1}, response: promql.FPoint{F: 20, T: -1},
expected: `{"status":"success","data":[-0.001,"20"]}`, expected: `{"status":"success","data":[-0.001,"20"]}`,
}, },
{ {
response: promql.Point{V: math.NaN(), T: 0}, response: promql.FPoint{F: math.NaN(), T: 0},
expected: `{"status":"success","data":[0,"NaN"]}`, expected: `{"status":"success","data":[0,"NaN"]}`,
}, },
{ {
response: promql.Point{V: math.Inf(1), T: 0}, response: promql.FPoint{F: math.Inf(1), T: 0},
expected: `{"status":"success","data":[0,"+Inf"]}`, expected: `{"status":"success","data":[0,"+Inf"]}`,
}, },
{ {
response: promql.Point{V: math.Inf(-1), T: 0}, response: promql.FPoint{F: math.Inf(-1), T: 0},
expected: `{"status":"success","data":[0,"-Inf"]}`, expected: `{"status":"success","data":[0,"-Inf"]}`,
}, },
{ {
response: promql.Point{V: 1.2345678e6, T: 0}, response: promql.FPoint{F: 1.2345678e6, T: 0},
expected: `{"status":"success","data":[0,"1234567.8"]}`, expected: `{"status":"success","data":[0,"1234567.8"]}`,
}, },
{ {
response: promql.Point{V: 1.2345678e-6, T: 0}, response: promql.FPoint{F: 1.2345678e-6, T: 0},
expected: `{"status":"success","data":[0,"0.0000012345678"]}`, expected: `{"status":"success","data":[0,"0.0000012345678"]}`,
}, },
{ {
response: promql.Point{V: 1.2345678e-67, T: 0}, response: promql.FPoint{F: 1.2345678e-67, T: 0},
expected: `{"status":"success","data":[0,"1.2345678e-67"]}`, expected: `{"status":"success","data":[0,"1.2345678e-67"]}`,
}, },
{ {

View file

@ -115,37 +115,45 @@ Loop:
var ( var (
t int64 t int64
v float64 f float64
h *histogram.Histogram
fh *histogram.FloatHistogram fh *histogram.FloatHistogram
ok bool
) )
valueType := it.Seek(maxt) valueType := it.Seek(maxt)
switch valueType { switch valueType {
case chunkenc.ValFloat: case chunkenc.ValFloat:
t, v = it.At() t, f = it.At()
case chunkenc.ValFloatHistogram, chunkenc.ValHistogram: case chunkenc.ValFloatHistogram, chunkenc.ValHistogram:
t, fh = it.AtFloatHistogram() t, fh = it.AtFloatHistogram()
default: default:
t, v, h, fh, ok = it.PeekBack(1) sample, ok := it.PeekBack(1)
if !ok { if !ok {
continue Loop continue Loop
} }
if h != nil { t = sample.T()
fh = h.ToFloat() switch sample.Type() {
case chunkenc.ValFloat:
f = sample.F()
case chunkenc.ValHistogram:
fh = sample.H().ToFloat()
case chunkenc.ValFloatHistogram:
fh = sample.FH()
default:
continue Loop
} }
} }
// The exposition formats do not support stale markers, so drop them. This // The exposition formats do not support stale markers, so drop them. This
// is good enough for staleness handling of federated data, as the // is good enough for staleness handling of federated data, as the
// interval-based limits on staleness will do the right thing for supported // interval-based limits on staleness will do the right thing for supported
// use cases (which is to say federating aggregated time series). // use cases (which is to say federating aggregated time series).
if value.IsStaleNaN(v) { if value.IsStaleNaN(f) || (fh != nil && value.IsStaleNaN(fh.Sum)) {
continue continue
} }
vec = append(vec, promql.Sample{ vec = append(vec, promql.Sample{
Metric: s.Labels(), Metric: s.Labels(),
Point: promql.Point{T: t, V: v, H: fh}, T: t,
F: f,
H: fh,
}) })
} }
if ws := set.Warnings(); len(ws) > 0 { if ws := set.Warnings(); len(ws) > 0 {
@ -262,7 +270,7 @@ Loop:
if !isHistogram { if !isHistogram {
lastHistogramWasGauge = false lastHistogramWasGauge = false
protMetric.Untyped = &dto.Untyped{ protMetric.Untyped = &dto.Untyped{
Value: proto.Float64(s.V), Value: proto.Float64(s.F),
} }
} else { } else {
lastHistogramWasGauge = s.H.CounterResetHint == histogram.GaugeType lastHistogramWasGauge = s.H.CounterResetHint == histogram.GaugeType

View file

@ -342,14 +342,16 @@ func TestFederationWithNativeHistograms(t *testing.T) {
if i%3 == 0 { if i%3 == 0 {
_, err = app.Append(0, l, 100*60*1000, float64(i*100)) _, err = app.Append(0, l, 100*60*1000, float64(i*100))
expVec = append(expVec, promql.Sample{ expVec = append(expVec, promql.Sample{
Point: promql.Point{T: 100 * 60 * 1000, V: float64(i * 100)}, T: 100 * 60 * 1000,
F: float64(i * 100),
Metric: expL, Metric: expL,
}) })
} else { } else {
hist.ZeroCount++ hist.ZeroCount++
_, err = app.AppendHistogram(0, l, 100*60*1000, hist.Copy(), nil) _, err = app.AppendHistogram(0, l, 100*60*1000, hist.Copy(), nil)
expVec = append(expVec, promql.Sample{ expVec = append(expVec, promql.Sample{
Point: promql.Point{T: 100 * 60 * 1000, H: hist.ToFloat()}, T: 100 * 60 * 1000,
H: hist.ToFloat(),
Metric: expL, Metric: expL,
}) })
} }
@ -379,6 +381,7 @@ func TestFederationWithNativeHistograms(t *testing.T) {
p := textparse.NewProtobufParser(body) p := textparse.NewProtobufParser(body)
var actVec promql.Vector var actVec promql.Vector
metricFamilies := 0 metricFamilies := 0
l := labels.Labels{}
for { for {
et, err := p.Next() et, err := p.Next()
if err == io.EOF { if err == io.EOF {
@ -389,23 +392,23 @@ func TestFederationWithNativeHistograms(t *testing.T) {
metricFamilies++ metricFamilies++
} }
if et == textparse.EntryHistogram || et == textparse.EntrySeries { if et == textparse.EntryHistogram || et == textparse.EntrySeries {
l := labels.Labels{}
p.Metric(&l) p.Metric(&l)
actVec = append(actVec, promql.Sample{Metric: l})
} }
if et == textparse.EntryHistogram { if et == textparse.EntryHistogram {
_, parsedTimestamp, h, fh := p.Histogram() _, parsedTimestamp, h, fh := p.Histogram()
require.Nil(t, h) require.Nil(t, h)
actVec[len(actVec)-1].Point = promql.Point{ actVec = append(actVec, promql.Sample{
T: *parsedTimestamp, T: *parsedTimestamp,
H: fh, H: fh,
} Metric: l,
})
} else if et == textparse.EntrySeries { } else if et == textparse.EntrySeries {
_, parsedTimestamp, v := p.Series() _, parsedTimestamp, f := p.Series()
actVec[len(actVec)-1].Point = promql.Point{ actVec = append(actVec, promql.Sample{
T: *parsedTimestamp, T: *parsedTimestamp,
V: v, F: f,
} Metric: l,
})
} }
} }