mirror of
https://github.com/prometheus/prometheus.git
synced 2024-12-25 21:54:10 -08:00
decouple tsdb main.go (and tests) from prometheus/pkg
This commit is contained in:
parent
df7cc4dff5
commit
8326e410d0
|
@ -14,6 +14,7 @@
|
||||||
package main
|
package main
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"bufio"
|
||||||
"flag"
|
"flag"
|
||||||
"fmt"
|
"fmt"
|
||||||
"io"
|
"io"
|
||||||
|
@ -22,15 +23,13 @@ import (
|
||||||
"path/filepath"
|
"path/filepath"
|
||||||
"runtime"
|
"runtime"
|
||||||
"runtime/pprof"
|
"runtime/pprof"
|
||||||
|
"strings"
|
||||||
"sync"
|
"sync"
|
||||||
"text/tabwriter"
|
"text/tabwriter"
|
||||||
"time"
|
"time"
|
||||||
"unsafe"
|
|
||||||
|
|
||||||
"github.com/go-kit/kit/log"
|
"github.com/go-kit/kit/log"
|
||||||
"github.com/pkg/errors"
|
"github.com/pkg/errors"
|
||||||
promlabels "github.com/prometheus/prometheus/pkg/labels"
|
|
||||||
"github.com/prometheus/prometheus/pkg/textparse"
|
|
||||||
"github.com/prometheus/tsdb"
|
"github.com/prometheus/tsdb"
|
||||||
"github.com/prometheus/tsdb/labels"
|
"github.com/prometheus/tsdb/labels"
|
||||||
"gopkg.in/alecthomas/kingpin.v2"
|
"gopkg.in/alecthomas/kingpin.v2"
|
||||||
|
@ -302,20 +301,32 @@ func measureTime(stage string, f func()) time.Duration {
|
||||||
return time.Since(start)
|
return time.Since(start)
|
||||||
}
|
}
|
||||||
|
|
||||||
func readPrometheusLabels(r io.Reader, n int) ([]labels.Labels, error) {
|
func mapToLabels(m map[string]interface{}, l *labels.Labels) {
|
||||||
b, err := ioutil.ReadAll(r)
|
for k, v := range m {
|
||||||
if err != nil {
|
*l = append(*l, labels.Label{Name: k, Value: v.(string)})
|
||||||
return nil, err
|
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func readPrometheusLabels(r io.Reader, n int) ([]labels.Labels, error) {
|
||||||
|
scanner := bufio.NewScanner(r)
|
||||||
|
|
||||||
p := textparse.New(b)
|
|
||||||
i := 0
|
|
||||||
var mets []labels.Labels
|
var mets []labels.Labels
|
||||||
hashes := map[uint64]struct{}{}
|
hashes := map[uint64]struct{}{}
|
||||||
|
i := 0
|
||||||
|
|
||||||
for p.Next() && i < n {
|
for scanner.Scan() && i < n {
|
||||||
m := make(labels.Labels, 0, 10)
|
m := make(labels.Labels, 0, 10)
|
||||||
p.Metric((*promlabels.Labels)(unsafe.Pointer(&m)))
|
|
||||||
|
// Order of the k/v labels matters, so rather than decoding arbitrary json into an
|
||||||
|
// interface{}, parse the line ourselves and remove unnecessary characters.
|
||||||
|
r := strings.NewReplacer("\"", "", "{", "", "}", "")
|
||||||
|
s := r.Replace(scanner.Text())
|
||||||
|
|
||||||
|
labelChunks := strings.Split(s, ",")
|
||||||
|
for _, labelChunk := range labelChunks {
|
||||||
|
split := strings.Split(labelChunk, ":")
|
||||||
|
m = append(m, labels.Label{Name: split[0], Value: split[1]})
|
||||||
|
}
|
||||||
|
|
||||||
h := m.Hash()
|
h := m.Hash()
|
||||||
if _, ok := hashes[h]; ok {
|
if _, ok := hashes[h]; ok {
|
||||||
|
@ -325,7 +336,7 @@ func readPrometheusLabels(r io.Reader, n int) ([]labels.Labels, error) {
|
||||||
hashes[h] = struct{}{}
|
hashes[h] = struct{}{}
|
||||||
i++
|
i++
|
||||||
}
|
}
|
||||||
return mets, p.Err()
|
return mets, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func exitWithError(err error) {
|
func exitWithError(err error) {
|
||||||
|
|
33
head_test.go
33
head_test.go
|
@ -14,18 +14,16 @@
|
||||||
package tsdb
|
package tsdb
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"io/ioutil"
|
"bufio"
|
||||||
|
"fmt"
|
||||||
"math/rand"
|
"math/rand"
|
||||||
"os"
|
"os"
|
||||||
|
"strings"
|
||||||
"testing"
|
"testing"
|
||||||
"unsafe"
|
|
||||||
|
|
||||||
"github.com/pkg/errors"
|
"github.com/pkg/errors"
|
||||||
"github.com/prometheus/tsdb/chunks"
|
"github.com/prometheus/tsdb/chunks"
|
||||||
"github.com/prometheus/tsdb/labels"
|
"github.com/prometheus/tsdb/labels"
|
||||||
|
|
||||||
promlabels "github.com/prometheus/prometheus/pkg/labels"
|
|
||||||
"github.com/prometheus/prometheus/pkg/textparse"
|
|
||||||
"github.com/prometheus/tsdb/testutil"
|
"github.com/prometheus/tsdb/testutil"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -54,19 +52,26 @@ func readPrometheusLabels(fn string, n int) ([]labels.Labels, error) {
|
||||||
}
|
}
|
||||||
defer f.Close()
|
defer f.Close()
|
||||||
|
|
||||||
b, err := ioutil.ReadAll(f)
|
scanner := bufio.NewScanner(f)
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
p := textparse.New(b)
|
|
||||||
i := 0
|
|
||||||
var mets []labels.Labels
|
var mets []labels.Labels
|
||||||
hashes := map[uint64]struct{}{}
|
hashes := map[uint64]struct{}{}
|
||||||
|
i := 0
|
||||||
|
|
||||||
for p.Next() && i < n {
|
for scanner.Scan() && i < n {
|
||||||
m := make(labels.Labels, 0, 10)
|
m := make(labels.Labels, 0, 10)
|
||||||
p.Metric((*promlabels.Labels)(unsafe.Pointer(&m)))
|
|
||||||
|
// Order of the k/v labels matters, so rather than decoding arbitrary json into an
|
||||||
|
// interface{}, parse the line ourselves and remove unnecessary characters.
|
||||||
|
r := strings.NewReplacer("\"", "", "{", "", "}", "")
|
||||||
|
s := r.Replace(scanner.Text())
|
||||||
|
|
||||||
|
labelChunks := strings.Split(s, ",")
|
||||||
|
for _, labelChunk := range labelChunks {
|
||||||
|
split := strings.Split(labelChunk, ":")
|
||||||
|
fmt.Println("split: ", split)
|
||||||
|
m = append(m, labels.Label{Name: split[0], Value: split[1]})
|
||||||
|
}
|
||||||
|
|
||||||
h := m.Hash()
|
h := m.Hash()
|
||||||
if _, ok := hashes[h]; ok {
|
if _, ok := hashes[h]; ok {
|
||||||
|
@ -76,7 +81,7 @@ func readPrometheusLabels(fn string, n int) ([]labels.Labels, error) {
|
||||||
hashes[h] = struct{}{}
|
hashes[h] = struct{}{}
|
||||||
i++
|
i++
|
||||||
}
|
}
|
||||||
if err := p.Err(); err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
if i != n {
|
if i != n {
|
||||||
|
|
|
@ -236,7 +236,7 @@ func TestPersistence_index_e2e(t *testing.T) {
|
||||||
testutil.Ok(t, err)
|
testutil.Ok(t, err)
|
||||||
defer os.RemoveAll(dir)
|
defer os.RemoveAll(dir)
|
||||||
|
|
||||||
lbls, err := readPrometheusLabels("testdata/20k.series", 20000)
|
lbls, err := readPrometheusLabels("testdata/20kseries.json", 20000)
|
||||||
testutil.Ok(t, err)
|
testutil.Ok(t, err)
|
||||||
|
|
||||||
// Sort labels as the index writer expects series in sorted order.
|
// Sort labels as the index writer expects series in sorted order.
|
||||||
|
|
|
@ -14,17 +14,14 @@
|
||||||
package labels
|
package labels
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"bufio"
|
||||||
"fmt"
|
"fmt"
|
||||||
"io/ioutil"
|
|
||||||
"math/rand"
|
"math/rand"
|
||||||
"os"
|
"os"
|
||||||
"sort"
|
"sort"
|
||||||
|
"strings"
|
||||||
"testing"
|
"testing"
|
||||||
"unsafe"
|
|
||||||
|
|
||||||
"github.com/pkg/errors"
|
|
||||||
promlabels "github.com/prometheus/prometheus/pkg/labels"
|
|
||||||
"github.com/prometheus/prometheus/pkg/textparse"
|
|
||||||
"github.com/prometheus/tsdb/testutil"
|
"github.com/prometheus/tsdb/testutil"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -129,19 +126,25 @@ func readPrometheusLabels(fn string, n int) ([]Labels, error) {
|
||||||
}
|
}
|
||||||
defer f.Close()
|
defer f.Close()
|
||||||
|
|
||||||
b, err := ioutil.ReadAll(f)
|
scanner := bufio.NewScanner(f)
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
p := textparse.New(b)
|
|
||||||
i := 0
|
|
||||||
var mets []Labels
|
var mets []Labels
|
||||||
hashes := map[uint64]struct{}{}
|
hashes := map[uint64]struct{}{}
|
||||||
|
i := 0
|
||||||
|
|
||||||
for p.Next() && i < n {
|
for scanner.Scan() && i < n {
|
||||||
m := make(Labels, 0, 10)
|
m := make(Labels, 0, 10)
|
||||||
p.Metric((*promlabels.Labels)(unsafe.Pointer(&m)))
|
|
||||||
|
// Order of the k/v labels matters, so rather than decoding arbitrary json into an
|
||||||
|
// interface{}, parse the line ourselves and remove unnecessary characters.
|
||||||
|
r := strings.NewReplacer("\"", "", "{", "", "}", "")
|
||||||
|
s := r.Replace(scanner.Text())
|
||||||
|
|
||||||
|
labelChunks := strings.Split(s, ",")
|
||||||
|
for _, labelChunk := range labelChunks {
|
||||||
|
split := strings.Split(labelChunk, ":")
|
||||||
|
m = append(m, Label{Name: split[0], Value: split[1]})
|
||||||
|
}
|
||||||
|
|
||||||
h := m.Hash()
|
h := m.Hash()
|
||||||
if _, ok := hashes[h]; ok {
|
if _, ok := hashes[h]; ok {
|
||||||
|
@ -151,12 +154,6 @@ func readPrometheusLabels(fn string, n int) ([]Labels, error) {
|
||||||
hashes[h] = struct{}{}
|
hashes[h] = struct{}{}
|
||||||
i++
|
i++
|
||||||
}
|
}
|
||||||
if err := p.Err(); err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
if i != n {
|
|
||||||
return mets, errors.Errorf("requested %d metrics but found %d", n, i)
|
|
||||||
}
|
|
||||||
return mets, nil
|
return mets, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
20000
testdata/20k.series
vendored
20000
testdata/20k.series
vendored
File diff suppressed because it is too large
Load diff
20000
testdata/20kseries.json
vendored
Normal file
20000
testdata/20kseries.json
vendored
Normal file
File diff suppressed because it is too large
Load diff
|
@ -71,7 +71,7 @@ func TestSegmentWAL_Truncate(t *testing.T) {
|
||||||
numMetrics = 20000
|
numMetrics = 20000
|
||||||
batch = 100
|
batch = 100
|
||||||
)
|
)
|
||||||
series, err := readPrometheusLabels("testdata/20k.series", numMetrics)
|
series, err := readPrometheusLabels("testdata/20kseries.json", numMetrics)
|
||||||
testutil.Ok(t, err)
|
testutil.Ok(t, err)
|
||||||
|
|
||||||
dir, err := ioutil.TempDir("", "test_wal_log_truncate")
|
dir, err := ioutil.TempDir("", "test_wal_log_truncate")
|
||||||
|
@ -150,7 +150,7 @@ func TestSegmentWAL_Log_Restore(t *testing.T) {
|
||||||
)
|
)
|
||||||
// Generate testing data. It does not make semantical sense but
|
// Generate testing data. It does not make semantical sense but
|
||||||
// for the purpose of this test.
|
// for the purpose of this test.
|
||||||
series, err := readPrometheusLabels("testdata/20k.series", numMetrics)
|
series, err := readPrometheusLabels("testdata/20kseries.json", numMetrics)
|
||||||
testutil.Ok(t, err)
|
testutil.Ok(t, err)
|
||||||
|
|
||||||
dir, err := ioutil.TempDir("", "test_wal_log_restore")
|
dir, err := ioutil.TempDir("", "test_wal_log_restore")
|
||||||
|
|
Loading…
Reference in a new issue