fix statick check errors ()

fix the tests for `check_license` and `staticcheck`

the static check also found some actual bugs.

Signed-off-by: Krasi Georgiev <kgeorgie@redhat.com>
This commit is contained in:
Krasi Georgiev 2019-01-02 19:48:42 +03:00 committed by GitHub
parent eb6586f513
commit 48c439d26d
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
14 changed files with 36 additions and 22 deletions

View file

@ -15,10 +15,10 @@ go_import_path: github.com/prometheus/tsdb
before_install: before_install:
- if [[ "$TRAVIS_OS_NAME" == "windows" ]]; then choco install make; fi - if [[ "$TRAVIS_OS_NAME" == "windows" ]]; then choco install make; fi
install: install:
- go get -v -t ./... - make deps
script: script:
# `staticcheck` target is omitted due to linting errors # `staticcheck` target is omitted due to linting errors
- if [[ "$TRAVIS_OS_NAME" == "windows" ]]; then make test; else make check_license style unused test; fi - if [[ "$TRAVIS_OS_NAME" == "windows" ]]; then make test; else make; fi

View file

@ -18,11 +18,15 @@ TSDB_BENCHMARK_NUM_METRICS ?= 1000
TSDB_BENCHMARK_DATASET ?= "$(TSDB_PROJECT_DIR)/testdata/20kseries.json" TSDB_BENCHMARK_DATASET ?= "$(TSDB_PROJECT_DIR)/testdata/20kseries.json"
TSDB_BENCHMARK_OUTPUT_DIR ?= "$(TSDB_CLI_DIR)/benchout" TSDB_BENCHMARK_OUTPUT_DIR ?= "$(TSDB_CLI_DIR)/benchout"
STATICCHECK_IGNORE =
include Makefile.common include Makefile.common
.PHONY: deps
deps:
@echo ">> getting dependencies"
GO111MODULE=$(GO111MODULE) $(GO) get $(GOOPTS) -t ./...
build: build:
@$(GO) build -o $(TSDB_BIN) $(TSDB_CLI_DIR) GO111MODULE=$(GO111MODULE) $(GO) build -o $(TSDB_BIN) $(TSDB_CLI_DIR)
bench: build bench: build
@echo ">> running benchmark, writing result to $(TSDB_BENCHMARK_OUTPUT_DIR)" @echo ">> running benchmark, writing result to $(TSDB_BENCHMARK_OUTPUT_DIR)"

View file

@ -31,11 +31,11 @@ func TestLastCheckpoint(t *testing.T) {
testutil.Ok(t, err) testutil.Ok(t, err)
defer os.RemoveAll(dir) defer os.RemoveAll(dir)
s, k, err := LastCheckpoint(dir) _, _, err = LastCheckpoint(dir)
testutil.Equals(t, ErrNotFound, err) testutil.Equals(t, ErrNotFound, err)
testutil.Ok(t, os.MkdirAll(filepath.Join(dir, "checkpoint.0000"), 0777)) testutil.Ok(t, os.MkdirAll(filepath.Join(dir, "checkpoint.0000"), 0777))
s, k, err = LastCheckpoint(dir) s, k, err := LastCheckpoint(dir)
testutil.Ok(t, err) testutil.Ok(t, err)
testutil.Equals(t, filepath.Join(dir, "checkpoint.0000"), s) testutil.Equals(t, filepath.Join(dir, "checkpoint.0000"), s)
testutil.Equals(t, 0, k) testutil.Equals(t, 0, k)

View file

@ -51,7 +51,7 @@ func main() {
listPath = listCmd.Arg("db path", "database path (default is "+filepath.Join("benchout", "storage")+")").Default(filepath.Join("benchout", "storage")).String() listPath = listCmd.Arg("db path", "database path (default is "+filepath.Join("benchout", "storage")+")").Default(filepath.Join("benchout", "storage")).String()
analyzeCmd = cli.Command("analyze", "analyze churn, label pair cardinality.") analyzeCmd = cli.Command("analyze", "analyze churn, label pair cardinality.")
analyzePath = analyzeCmd.Arg("db path", "database path (default is "+filepath.Join("benchout", "storage")+")").Default(filepath.Join("benchout", "storage")).String() analyzePath = analyzeCmd.Arg("db path", "database path (default is "+filepath.Join("benchout", "storage")+")").Default(filepath.Join("benchout", "storage")).String()
analyzeBlockId = analyzeCmd.Arg("block id", "block to analyze (default is the last block)").String() analyzeBlockID = analyzeCmd.Arg("block id", "block to analyze (default is the last block)").String()
analyzeLimit = analyzeCmd.Flag("limit", "how many items to show in each list").Default("20").Int() analyzeLimit = analyzeCmd.Flag("limit", "how many items to show in each list").Default("20").Int()
) )
@ -76,9 +76,9 @@ func main() {
} }
blocks := db.Blocks() blocks := db.Blocks()
var block *tsdb.Block var block *tsdb.Block
if *analyzeBlockId != "" { if *analyzeBlockID != "" {
for _, b := range blocks { for _, b := range blocks {
if b.Meta().ULID.String() == *analyzeBlockId { if b.Meta().ULID.String() == *analyzeBlockID {
block = b block = b
break break
} }
@ -455,15 +455,17 @@ func analyzeBlock(b *tsdb.Block, limit int) {
lbls := labels.Labels{} lbls := labels.Labels{}
chks := []chunks.Meta{} chks := []chunks.Meta{}
for p.Next() { for p.Next() {
err = ir.Series(p.At(), &lbls, &chks) if err = ir.Series(p.At(), &lbls, &chks); err != nil {
exitWithError(err)
}
// Amount of the block time range not covered by this series. // Amount of the block time range not covered by this series.
uncovered := uint64(meta.MaxTime-meta.MinTime) - uint64(chks[len(chks)-1].MaxTime-chks[0].MinTime) uncovered := uint64(meta.MaxTime-meta.MinTime) - uint64(chks[len(chks)-1].MaxTime-chks[0].MinTime)
for _, lbl := range lbls { for _, lbl := range lbls {
key := lbl.Name + "=" + lbl.Value key := lbl.Name + "=" + lbl.Value
labelsUncovered[lbl.Name] += uncovered labelsUncovered[lbl.Name] += uncovered
labelpairsUncovered[key] += uncovered labelpairsUncovered[key] += uncovered
labelpairsCount[key] += 1 labelpairsCount[key]++
entries += 1 entries++
} }
} }
if p.Err() != nil { if p.Err() != nil {

View file

@ -1522,6 +1522,7 @@ func TestBlockRanges(t *testing.T) {
_, err = app.Add(lbl, secondBlockMaxt+3, rand.Float64()) _, err = app.Add(lbl, secondBlockMaxt+3, rand.Float64())
testutil.Ok(t, err) testutil.Ok(t, err)
_, err = app.Add(lbl, secondBlockMaxt+4, rand.Float64()) _, err = app.Add(lbl, secondBlockMaxt+4, rand.Float64())
testutil.Ok(t, err)
testutil.Ok(t, app.Commit()) testutil.Ok(t, app.Commit())
testutil.Ok(t, db.Close()) testutil.Ok(t, db.Close())

View file

@ -685,6 +685,7 @@ func (h *Head) getAppendBuffer() []RefSample {
} }
func (h *Head) putAppendBuffer(b []RefSample) { func (h *Head) putAppendBuffer(b []RefSample) {
//lint:ignore SA6002 safe to ignore and actually fixing it has some performance penalty.
h.appendPool.Put(b[:0]) h.appendPool.Put(b[:0])
} }
@ -697,6 +698,7 @@ func (h *Head) getBytesBuffer() []byte {
} }
func (h *Head) putBytesBuffer(b []byte) { func (h *Head) putBytesBuffer(b []byte) {
//lint:ignore SA6002 safe to ignore and actually fixing it has some performance penalty.
h.bytesPool.Put(b[:0]) h.bytesPool.Put(b[:0])
} }

View file

@ -390,8 +390,8 @@ Outer:
func TestDeleteUntilCurMax(t *testing.T) { func TestDeleteUntilCurMax(t *testing.T) {
numSamples := int64(10) numSamples := int64(10)
hb, err := NewHead(nil, nil, nil, 1000000) hb, err := NewHead(nil, nil, nil, 1000000)
defer hb.Close()
testutil.Ok(t, err) testutil.Ok(t, err)
defer hb.Close()
app := hb.Appender() app := hb.Appender()
smpls := make([]float64, numSamples) smpls := make([]float64, numSamples)
for i := int64(0); i < numSamples; i++ { for i := int64(0); i < numSamples; i++ {
@ -677,7 +677,7 @@ func TestMemSeries_append(t *testing.T) {
ok, chunkCreated = s.append(1000, 3) ok, chunkCreated = s.append(1000, 3)
testutil.Assert(t, ok, "append failed") testutil.Assert(t, ok, "append failed")
testutil.Assert(t, ok, "expected new chunk on boundary") testutil.Assert(t, chunkCreated, "expected new chunk on boundary")
ok, chunkCreated = s.append(1001, 4) ok, chunkCreated = s.append(1001, 4)
testutil.Assert(t, ok, "append failed") testutil.Assert(t, ok, "append failed")

View file

@ -339,6 +339,7 @@ func TestPersistence_index_e2e(t *testing.T) {
testutil.Ok(t, err) testutil.Ok(t, err)
expp, err := mi.Postings(p.Name, p.Value) expp, err := mi.Postings(p.Name, p.Value)
testutil.Ok(t, err)
var lset, explset labels.Labels var lset, explset labels.Labels
var chks, expchks []chunks.Meta var chks, expchks []chunks.Meta
@ -352,6 +353,7 @@ func TestPersistence_index_e2e(t *testing.T) {
testutil.Ok(t, err) testutil.Ok(t, err)
err = mi.Series(expp.At(), &explset, &expchks) err = mi.Series(expp.At(), &explset, &expchks)
testutil.Ok(t, err)
testutil.Equals(t, explset, lset) testutil.Equals(t, explset, lset)
testutil.Equals(t, expchks, chks) testutil.Equals(t, expchks, chks)
} }

View file

@ -150,7 +150,7 @@ func BenchmarkMapFromLabels(b *testing.B) {
b.ReportAllocs() b.ReportAllocs()
for i := 0; i < b.N; i++ { for i := 0; i < b.N; i++ {
m = ls.Map() _ = ls.Map()
} }
} }

View file

@ -581,11 +581,9 @@ func (s *populatedChunkSeries) Next() bool {
// This means that the chunk has be garbage collected. Remove it from the list. // This means that the chunk has be garbage collected. Remove it from the list.
if s.err == ErrNotFound { if s.err == ErrNotFound {
s.err = nil s.err = nil
// Delete in-place. // Delete in-place.
chks = append(chks[:j], chks[j+1:]...) s.chks = append(chks[:j], chks[j+1:]...)
} }
return false return false
} }
} }

View file

@ -65,7 +65,7 @@ func TestRepairBadIndexVersion(t *testing.T) {
// Check the current db. // Check the current db.
// In its current state, lookups should fail with the fixed code. // In its current state, lookups should fail with the fixed code.
meta, err := readMetaFile(dbDir) _, err := readMetaFile(dbDir)
testutil.NotOk(t, err) testutil.NotOk(t, err)
// Touch chunks dir in block. // Touch chunks dir in block.
@ -116,7 +116,7 @@ func TestRepairBadIndexVersion(t *testing.T) {
{{"a", "2"}, {"b", "1"}}, {{"a", "2"}, {"b", "1"}},
}, res) }, res)
meta, err = readMetaFile(tmpDbDir) meta, err := readMetaFile(tmpDbDir)
testutil.Ok(t, err) testutil.Ok(t, err)
testutil.Assert(t, meta.Version == 1, "unexpected meta version %d", meta.Version) testutil.Assert(t, meta.Version == 1, "unexpected meta version %d", meta.Version)
} }

2
staticcheck.conf Normal file
View file

@ -0,0 +1,2 @@
# Enable only "legacy" staticcheck verifications.
checks = [ "SA*" ]

3
wal.go
View file

@ -888,16 +888,19 @@ func (r *walReader) Read(
if seriesf != nil { if seriesf != nil {
seriesf(v) seriesf(v)
} }
//lint:ignore SA6002 safe to ignore and actually fixing it has some performance penalty.
seriesPool.Put(v[:0]) seriesPool.Put(v[:0])
case []RefSample: case []RefSample:
if samplesf != nil { if samplesf != nil {
samplesf(v) samplesf(v)
} }
//lint:ignore SA6002 safe to ignore and actually fixing it has some performance penalty.
samplePool.Put(v[:0]) samplePool.Put(v[:0])
case []Stone: case []Stone:
if deletesf != nil { if deletesf != nil {
deletesf(v) deletesf(v)
} }
//lint:ignore SA6002 safe to ignore and actually fixing it has some performance penalty.
deletePool.Put(v[:0]) deletePool.Put(v[:0])
default: default:
level.Error(r.logger).Log("msg", "unexpected data type") level.Error(r.logger).Log("msg", "unexpected data type")

View file

@ -290,7 +290,7 @@ func TestWALRestoreCorrupted_invalidSegment(t *testing.T) {
testutil.Ok(t, wal.Close()) testutil.Ok(t, wal.Close())
wal, err = OpenSegmentWAL(dir, log.NewLogfmtLogger(os.Stderr), 0, nil) _, err = OpenSegmentWAL(dir, log.NewLogfmtLogger(os.Stderr), 0, nil)
testutil.Ok(t, err) testutil.Ok(t, err)
fns, err := fileutil.ReadDir(dir) fns, err := fileutil.ReadDir(dir)