fix some unchecked errors and remove unused vars. (#592)

Signed-off-by: Krasi Georgiev <kgeorgie@redhat.com>
This commit is contained in:
Krasi Georgiev 2019-04-25 13:07:04 +03:00 committed by GitHub
parent a10c001689
commit 288f67efbf
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
11 changed files with 26 additions and 29 deletions

View file

@ -107,7 +107,9 @@ func main() {
} }
dumpSamples(db, *dumpMinTime, *dumpMaxTime) dumpSamples(db, *dumpMinTime, *dumpMaxTime)
} }
flag.CommandLine.Set("log.level", "debug") if err := flag.CommandLine.Set("log.level", "debug"); err != nil {
exitWithError(err)
}
} }
type writeBenchmark struct { type writeBenchmark struct {

View file

@ -27,7 +27,6 @@ import (
"github.com/go-kit/kit/log" "github.com/go-kit/kit/log"
"github.com/pkg/errors" "github.com/pkg/errors"
prom_testutil "github.com/prometheus/client_golang/prometheus/testutil" prom_testutil "github.com/prometheus/client_golang/prometheus/testutil"
dto "github.com/prometheus/client_model/go"
"github.com/prometheus/tsdb/chunks" "github.com/prometheus/tsdb/chunks"
"github.com/prometheus/tsdb/fileutil" "github.com/prometheus/tsdb/fileutil"
"github.com/prometheus/tsdb/labels" "github.com/prometheus/tsdb/labels"
@ -744,7 +743,7 @@ func TestCompaction_populateBlock(t *testing.T) {
if ok := t.Run(tc.title, func(t *testing.T) { if ok := t.Run(tc.title, func(t *testing.T) {
blocks := make([]BlockReader, 0, len(tc.inputSeriesSamples)) blocks := make([]BlockReader, 0, len(tc.inputSeriesSamples))
for _, b := range tc.inputSeriesSamples { for _, b := range tc.inputSeriesSamples {
ir, cr, mint, maxt := createIdxChkReaders(b) ir, cr, mint, maxt := createIdxChkReaders(t, b)
blocks = append(blocks, &mockBReader{ir: ir, cr: cr, mint: mint, maxt: maxt}) blocks = append(blocks, &mockBReader{ir: ir, cr: cr, mint: mint, maxt: maxt})
} }
@ -890,16 +889,14 @@ func TestDisableAutoCompactions(t *testing.T) {
default: default:
} }
m := &dto.Metric{}
for x := 0; x < 10; x++ { for x := 0; x < 10; x++ {
db.metrics.compactionsSkipped.Write(m) if prom_testutil.ToFloat64(db.metrics.compactionsSkipped) > 0.0 {
if *m.Counter.Value > float64(0) {
break break
} }
time.Sleep(10 * time.Millisecond) time.Sleep(10 * time.Millisecond)
} }
testutil.Assert(t, *m.Counter.Value > float64(0), "No compaction was skipped after the set timeout.") testutil.Assert(t, prom_testutil.ToFloat64(db.metrics.compactionsSkipped) > 0.0, "No compaction was skipped after the set timeout.")
testutil.Equals(t, 0, len(db.blocks)) testutil.Equals(t, 0, len(db.blocks))
// Enable the compaction, trigger it and check that the block is persisted. // Enable the compaction, trigger it and check that the block is persisted.

2
go.mod
View file

@ -18,7 +18,7 @@ require (
github.com/pkg/errors v0.8.0 github.com/pkg/errors v0.8.0
github.com/pmezard/go-difflib v1.0.0 // indirect github.com/pmezard/go-difflib v1.0.0 // indirect
github.com/prometheus/client_golang v0.9.1 github.com/prometheus/client_golang v0.9.1
github.com/prometheus/client_model v0.0.0-20180712105110-5c3871d89910 github.com/prometheus/client_model v0.0.0-20180712105110-5c3871d89910 // indirect
github.com/prometheus/common v0.0.0-20181113130724-41aa239b4cce // indirect github.com/prometheus/common v0.0.0-20181113130724-41aa239b4cce // indirect
github.com/prometheus/procfs v0.0.0-20181005140218-185b4288413d // indirect github.com/prometheus/procfs v0.0.0-20181005140218-185b4288413d // indirect
github.com/stretchr/testify v1.2.2 // indirect github.com/stretchr/testify v1.2.2 // indirect

View file

@ -859,7 +859,7 @@ func TestGCChunkAccess(t *testing.T) {
_, err = cr.Chunk(chunks[1].Ref) _, err = cr.Chunk(chunks[1].Ref)
testutil.Ok(t, err) testutil.Ok(t, err)
h.Truncate(1500) // Remove a chunk. testutil.Ok(t, h.Truncate(1500)) // Remove a chunk.
_, err = cr.Chunk(chunks[0].Ref) _, err = cr.Chunk(chunks[0].Ref)
testutil.Equals(t, ErrNotFound, err) testutil.Equals(t, ErrNotFound, err)
@ -899,7 +899,7 @@ func TestGCSeriesAccess(t *testing.T) {
_, err = cr.Chunk(chunks[1].Ref) _, err = cr.Chunk(chunks[1].Ref)
testutil.Ok(t, err) testutil.Ok(t, err)
h.Truncate(2000) // Remove the series. testutil.Ok(t, h.Truncate(2000)) // Remove the series.
testutil.Equals(t, (*memSeries)(nil), h.series.getByID(1)) testutil.Equals(t, (*memSeries)(nil), h.series.getByID(1))

View file

@ -53,7 +53,6 @@ const (
type indexWriterSeries struct { type indexWriterSeries struct {
labels labels.Labels labels labels.Labels
chunks []chunks.Meta // series file offset of chunks chunks []chunks.Meta // series file offset of chunks
offset uint32 // index file offset of series reference
} }
type indexWriterSeriesSlice []*indexWriterSeries type indexWriterSeriesSlice []*indexWriterSeries

View file

@ -295,7 +295,7 @@ func TestPersistence_index_e2e(t *testing.T) {
for i, s := range input { for i, s := range input {
err = iw.AddSeries(uint64(i), s.labels, s.chunks...) err = iw.AddSeries(uint64(i), s.labels, s.chunks...)
testutil.Ok(t, err) testutil.Ok(t, err)
mi.AddSeries(uint64(i), s.labels, s.chunks...) testutil.Ok(t, mi.AddSeries(uint64(i), s.labels, s.chunks...))
for _, l := range s.labels { for _, l := range s.labels {
valset, ok := values[l.Name] valset, ok := values[l.Name]
@ -325,7 +325,7 @@ func TestPersistence_index_e2e(t *testing.T) {
} }
err = iw.WritePostings("", "", newListPostings(all...)) err = iw.WritePostings("", "", newListPostings(all...))
testutil.Ok(t, err) testutil.Ok(t, err)
mi.WritePostings("", "", newListPostings(all...)) testutil.Ok(t, mi.WritePostings("", "", newListPostings(all...)))
for n, e := range postings.m { for n, e := range postings.m {
for v := range e { for v := range e {

View file

@ -204,9 +204,7 @@ func ReadLabels(fn string, n int) ([]Labels, error) {
hashes[h] = struct{}{} hashes[h] = struct{}{}
i++ i++
} }
if err != nil {
return nil, err
}
if i != n { if i != n {
return mets, errors.Errorf("requested %d metrics but found %d", n, i) return mets, errors.Errorf("requested %d metrics but found %d", n, i)
} }

View file

@ -194,7 +194,7 @@ type seriesSamples struct {
// Index: labels -> postings -> chunkMetas -> chunkRef // Index: labels -> postings -> chunkMetas -> chunkRef
// ChunkReader: ref -> vals // ChunkReader: ref -> vals
func createIdxChkReaders(tc []seriesSamples) (IndexReader, ChunkReader, int64, int64) { func createIdxChkReaders(t *testing.T, tc []seriesSamples) (IndexReader, ChunkReader, int64, int64) {
sort.Slice(tc, func(i, j int) bool { sort.Slice(tc, func(i, j int) bool {
return labels.Compare(labels.FromMap(tc[i].lset), labels.FromMap(tc[i].lset)) < 0 return labels.Compare(labels.FromMap(tc[i].lset), labels.FromMap(tc[i].lset)) < 0
}) })
@ -234,7 +234,7 @@ func createIdxChkReaders(tc []seriesSamples) (IndexReader, ChunkReader, int64, i
} }
ls := labels.FromMap(s.lset) ls := labels.FromMap(s.lset)
mi.AddSeries(uint64(i), ls, metas...) testutil.Ok(t, mi.AddSeries(uint64(i), ls, metas...))
postings.Add(uint64(i), ls) postings.Add(uint64(i), ls)
@ -249,12 +249,12 @@ func createIdxChkReaders(tc []seriesSamples) (IndexReader, ChunkReader, int64, i
} }
for l, vs := range lblIdx { for l, vs := range lblIdx {
mi.WriteLabelIndex([]string{l}, vs.slice()) testutil.Ok(t, mi.WriteLabelIndex([]string{l}, vs.slice()))
} }
postings.Iter(func(l labels.Label, p index.Postings) error { testutil.Ok(t, postings.Iter(func(l labels.Label, p index.Postings) error {
return mi.WritePostings(l.Name, l.Value, p) return mi.WritePostings(l.Name, l.Value, p)
}) }))
return mi, chkReader, blockMint, blockMaxt return mi, chkReader, blockMint, blockMaxt
} }
@ -363,7 +363,7 @@ func TestBlockQuerier(t *testing.T) {
Outer: Outer:
for _, c := range cases.queries { for _, c := range cases.queries {
ir, cr, _, _ := createIdxChkReaders(cases.data) ir, cr, _, _ := createIdxChkReaders(t, cases.data)
querier := &blockQuerier{ querier := &blockQuerier{
index: ir, index: ir,
chunks: cr, chunks: cr,
@ -525,7 +525,7 @@ func TestBlockQuerierDelete(t *testing.T) {
Outer: Outer:
for _, c := range cases.queries { for _, c := range cases.queries {
ir, cr, _, _ := createIdxChkReaders(cases.data) ir, cr, _, _ := createIdxChkReaders(t, cases.data)
querier := &blockQuerier{ querier := &blockQuerier{
index: ir, index: ir,
chunks: cr, chunks: cr,
@ -630,7 +630,7 @@ func TestBaseChunkSeries(t *testing.T) {
for _, tc := range cases { for _, tc := range cases {
mi := newMockIndex() mi := newMockIndex()
for _, s := range tc.series { for _, s := range tc.series {
mi.AddSeries(s.ref, s.lset, s.chunks...) testutil.Ok(t, mi.AddSeries(s.ref, s.lset, s.chunks...))
} }
bcs := &baseChunkSeries{ bcs := &baseChunkSeries{
@ -1884,7 +1884,7 @@ func TestPostingsForMatchers(t *testing.T) {
for p.Next() { for p.Next() {
lbls := labels.Labels{} lbls := labels.Labels{}
ir.Series(p.At(), &lbls, &[]chunks.Meta{}) testutil.Ok(t, ir.Series(p.At(), &lbls, &[]chunks.Meta{}))
if _, ok := exp[lbls.String()]; !ok { if _, ok := exp[lbls.String()]; !ok {
t.Errorf("Evaluating %v, unexpected result %s", c.matchers, lbls.String()) t.Errorf("Evaluating %v, unexpected result %s", c.matchers, lbls.String())
} else { } else {

View file

@ -69,7 +69,7 @@ func TestRepairBadIndexVersion(t *testing.T) {
testutil.NotOk(t, err) testutil.NotOk(t, err)
// Touch chunks dir in block. // Touch chunks dir in block.
os.MkdirAll(filepath.Join(dbDir, "chunks"), 0777) testutil.Ok(t, os.MkdirAll(filepath.Join(dbDir, "chunks"), 0777))
defer func() { defer func() {
testutil.Ok(t, os.RemoveAll(filepath.Join(dbDir, "chunks"))) testutil.Ok(t, os.RemoveAll(filepath.Join(dbDir, "chunks")))
}() }()

View file

@ -139,7 +139,8 @@ func TestMemTombstonesConcurrency(t *testing.T) {
}() }()
go func() { go func() {
for x := 0; x < totalRuns; x++ { for x := 0; x < totalRuns; x++ {
tomb.Get(uint64(x)) _, err := tomb.Get(uint64(x))
testutil.Ok(t, err)
} }
wg.Done() wg.Done()
}() }()

View file

@ -146,9 +146,9 @@ func TestSegmentWAL_Truncate(t *testing.T) {
var readSeries []RefSeries var readSeries []RefSeries
r := w.Reader() r := w.Reader()
r.Read(func(s []RefSeries) { testutil.Ok(t, r.Read(func(s []RefSeries) {
readSeries = append(readSeries, s...) readSeries = append(readSeries, s...)
}, nil, nil) }, nil, nil))
testutil.Equals(t, expected, readSeries) testutil.Equals(t, expected, readSeries)
} }