diff --git a/discovery/file/file_test.go b/discovery/file/file_test.go index 3062f01df..a1880220c 100644 --- a/discovery/file/file_test.go +++ b/discovery/file/file_test.go @@ -231,7 +231,7 @@ func (t *testRunner) requireTargetGroups(expected, got []*targetgroup.Group) { // validTg() maps to fixtures/valid.{json,yml}. func validTg(file string) []*targetgroup.Group { return []*targetgroup.Group{ - &targetgroup.Group{ + { Targets: []model.LabelSet{ { model.AddressLabel: model.LabelValue("localhost:9090"), @@ -246,7 +246,7 @@ func validTg(file string) []*targetgroup.Group { }, Source: fileSource(file, 0), }, - &targetgroup.Group{ + { Targets: []model.LabelSet{ { model.AddressLabel: model.LabelValue("my.domain"), @@ -263,7 +263,7 @@ func validTg(file string) []*targetgroup.Group { // valid2Tg() maps to fixtures/valid2.{json,yml}. func valid2Tg(file string) []*targetgroup.Group { return []*targetgroup.Group{ - &targetgroup.Group{ + { Targets: []model.LabelSet{ { model.AddressLabel: model.LabelValue("my.domain"), @@ -274,7 +274,7 @@ func valid2Tg(file string) []*targetgroup.Group { }, Source: fileSource(file, 0), }, - &targetgroup.Group{ + { Targets: []model.LabelSet{ { model.AddressLabel: model.LabelValue("localhost:9090"), @@ -287,7 +287,7 @@ func valid2Tg(file string) []*targetgroup.Group { }, Source: fileSource(file, 1), }, - &targetgroup.Group{ + { Targets: []model.LabelSet{ { model.AddressLabel: model.LabelValue("example.org:443"), @@ -431,7 +431,7 @@ func TestUpdateFileWithPartialWrites(t *testing.T) { runner.appendString(sdFile, `: ["localhost:9091"]`) runner.requireUpdate(ref, []*targetgroup.Group{ - &targetgroup.Group{ + { Targets: []model.LabelSet{ { model.AddressLabel: model.LabelValue("localhost:9091"), @@ -442,7 +442,7 @@ func TestUpdateFileWithPartialWrites(t *testing.T) { }, Source: fileSource(sdFile, 0), }, - &targetgroup.Group{ + { Source: fileSource(sdFile, 1), }, }, @@ -467,10 +467,10 @@ func TestRemoveFile(t *testing.T) { runner.requireUpdate( ref, []*targetgroup.Group{ - &targetgroup.Group{ + { Source: fileSource(sdFile, 0), }, - &targetgroup.Group{ + { Source: fileSource(sdFile, 1), }}, ) diff --git a/discovery/targetgroup/targetgroup_test.go b/discovery/targetgroup/targetgroup_test.go index 5c28c4d23..2676e1abe 100644 --- a/discovery/targetgroup/targetgroup_test.go +++ b/discovery/targetgroup/targetgroup_test.go @@ -38,8 +38,8 @@ func TestTargetGroupStrictJsonUnmarshal(t *testing.T) { json: ` {"labels": {"my":"label"},"targets": ["localhost:9090","localhost:9091"]}`, expectedReply: nil, expectedGroup: Group{Targets: []model.LabelSet{ - model.LabelSet{"__address__": "localhost:9090"}, - model.LabelSet{"__address__": "localhost:9091"}}, Labels: model.LabelSet{"my": "label"}}, + {"__address__": "localhost:9090"}, + {"__address__": "localhost:9091"}}, Labels: model.LabelSet{"my": "label"}}, }, { json: ` {"label": {},"targets": []}`, @@ -83,8 +83,8 @@ func TestTargetGroupYamlMarshal(t *testing.T) { { // targets only exposes addresses. group: Group{Targets: []model.LabelSet{ - model.LabelSet{"__address__": "localhost:9090"}, - model.LabelSet{"__address__": "localhost:9091"}}, + {"__address__": "localhost:9090"}, + {"__address__": "localhost:9091"}}, Labels: model.LabelSet{"foo": "bar", "bar": "baz"}}, expectedYaml: "targets:\n- localhost:9090\n- localhost:9091\nlabels:\n bar: baz\n foo: bar\n", expectedErr: nil, @@ -120,8 +120,8 @@ func TestTargetGroupYamlUnmarshal(t *testing.T) { yaml: "labels:\n my: label\ntargets:\n ['localhost:9090', 'localhost:9191']", expectedReply: nil, expectedGroup: Group{Targets: []model.LabelSet{ - model.LabelSet{"__address__": "localhost:9090"}, - model.LabelSet{"__address__": "localhost:9191"}}, Labels: model.LabelSet{"my": "label"}}, + {"__address__": "localhost:9090"}, + {"__address__": "localhost:9191"}}, Labels: model.LabelSet{"my": "label"}}, }, { // incorrect syntax. @@ -143,8 +143,8 @@ func TestString(t *testing.T) { // String() should return only the source, regardless of other attributes. group1 := Group{Targets: []model.LabelSet{ - model.LabelSet{"__address__": "localhost:9090"}, - model.LabelSet{"__address__": "localhost:9091"}}, + {"__address__": "localhost:9090"}, + {"__address__": "localhost:9091"}}, Source: "", Labels: model.LabelSet{"foo": "bar", "bar": "baz"}} group2 := diff --git a/rules/manager_test.go b/rules/manager_test.go index 99e305ca6..17cc1870d 100644 --- a/rules/manager_test.go +++ b/rules/manager_test.go @@ -654,7 +654,7 @@ func TestCopyState(t *testing.T) { testutil.Equals(t, want, newGroup.seriesInPreviousEval) testutil.Equals(t, oldGroup.rules[0], newGroup.rules[3]) testutil.Equals(t, oldGroup.evaluationDuration, newGroup.evaluationDuration) - testutil.Equals(t, []labels.Labels{labels.Labels{{Name: "l1", Value: "v3"}}}, newGroup.staleSeries) + testutil.Equals(t, []labels.Labels{{{Name: "l1", Value: "v3"}}}, newGroup.staleSeries) } func TestDeletedRuleMarkedStale(t *testing.T) { diff --git a/scrape/scrape_test.go b/scrape/scrape_test.go index 5b120cead..3522a130f 100644 --- a/scrape/scrape_test.go +++ b/scrape/scrape_test.go @@ -1669,37 +1669,37 @@ func TestScrapeLoopDiscardUnnamedMetrics(t *testing.T) { func TestReusableConfig(t *testing.T) { variants := []*config.ScrapeConfig{ - &config.ScrapeConfig{ + { JobName: "prometheus", ScrapeTimeout: model.Duration(15 * time.Second), }, - &config.ScrapeConfig{ + { JobName: "httpd", ScrapeTimeout: model.Duration(15 * time.Second), }, - &config.ScrapeConfig{ + { JobName: "prometheus", ScrapeTimeout: model.Duration(5 * time.Second), }, - &config.ScrapeConfig{ + { JobName: "prometheus", MetricsPath: "/metrics", }, - &config.ScrapeConfig{ + { JobName: "prometheus", MetricsPath: "/metrics2", }, - &config.ScrapeConfig{ + { JobName: "prometheus", ScrapeTimeout: model.Duration(5 * time.Second), MetricsPath: "/metrics2", }, - &config.ScrapeConfig{ + { JobName: "prometheus", ScrapeInterval: model.Duration(5 * time.Second), MetricsPath: "/metrics2", }, - &config.ScrapeConfig{ + { JobName: "prometheus", ScrapeInterval: model.Duration(5 * time.Second), SampleLimit: 1000, @@ -1708,18 +1708,18 @@ func TestReusableConfig(t *testing.T) { } match := [][]int{ - []int{0, 2}, - []int{4, 5}, - []int{4, 6}, - []int{4, 7}, - []int{5, 6}, - []int{5, 7}, - []int{6, 7}, + {0, 2}, + {4, 5}, + {4, 6}, + {4, 7}, + {5, 6}, + {5, 7}, + {6, 7}, } noMatch := [][]int{ - []int{1, 2}, - []int{0, 4}, - []int{3, 4}, + {1, 2}, + {0, 4}, + {3, 4}, } for i, m := range match { diff --git a/storage/remote/chunked.go b/storage/remote/chunked.go index baee2350a..670a16834 100644 --- a/storage/remote/chunked.go +++ b/storage/remote/chunked.go @@ -10,6 +10,7 @@ // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. + package remote import ( diff --git a/storage/remote/chunked_test.go b/storage/remote/chunked_test.go index d64884c4a..df2c5b27f 100644 --- a/storage/remote/chunked_test.go +++ b/storage/remote/chunked_test.go @@ -39,7 +39,7 @@ func TestChunkedReaderCanReadFromChunkedWriter(t *testing.T) { []byte("test2"), []byte("test3"), []byte("test4"), - []byte{}, // This is ignored by writer. + {}, // This is ignored by writer. []byte("test5-after-empty"), } diff --git a/storage/remote/codec_test.go b/storage/remote/codec_test.go index d694b487c..daf4b496d 100644 --- a/storage/remote/codec_test.go +++ b/storage/remote/codec_test.go @@ -164,11 +164,11 @@ func TestConcreteSeriesClonesLabels(t *testing.T) { func TestFromQueryResultWithDuplicates(t *testing.T) { ts1 := prompb.TimeSeries{ Labels: []prompb.Label{ - prompb.Label{Name: "foo", Value: "bar"}, - prompb.Label{Name: "foo", Value: "def"}, + {Name: "foo", Value: "bar"}, + {Name: "foo", Value: "def"}, }, Samples: []prompb.Sample{ - prompb.Sample{Value: 0.0, Timestamp: 0}, + {Value: 0.0, Timestamp: 0}, }, } diff --git a/tsdb/block_test.go b/tsdb/block_test.go index f812d2970..df18a08ac 100644 --- a/tsdb/block_test.go +++ b/tsdb/block_test.go @@ -292,14 +292,14 @@ func TestReadIndexFormatV1(t *testing.T) { q, err := NewBlockQuerier(block, 0, 1000) testutil.Ok(t, err) testutil.Equals(t, query(t, q, labels.MustNewMatcher(labels.MatchEqual, "foo", "bar")), - map[string][]tsdbutil.Sample{`{foo="bar"}`: []tsdbutil.Sample{sample{t: 1, v: 2}}}) + map[string][]tsdbutil.Sample{`{foo="bar"}`: {sample{t: 1, v: 2}}}) q, err = NewBlockQuerier(block, 0, 1000) testutil.Ok(t, err) testutil.Equals(t, query(t, q, labels.MustNewMatcher(labels.MatchNotRegexp, "foo", "^.?$")), map[string][]tsdbutil.Sample{ - `{foo="bar"}`: []tsdbutil.Sample{sample{t: 1, v: 2}}, - `{foo="baz"}`: []tsdbutil.Sample{sample{t: 3, v: 4}}, + `{foo="bar"}`: {sample{t: 1, v: 2}}, + `{foo="baz"}`: {sample{t: 3, v: 4}}, }) } diff --git a/tsdb/db_test.go b/tsdb/db_test.go index 29b2bdbea..3ce6a3c2a 100644 --- a/tsdb/db_test.go +++ b/tsdb/db_test.go @@ -2619,7 +2619,7 @@ func TestDBQueryDoesntSeeAppendsAfterCreation(t *testing.T) { testutil.Ok(t, err) _, seriesSet, err = expandSeriesSet(ss) testutil.Ok(t, err) - testutil.Equals(t, map[string][]sample{`{foo="bar"}`: []sample{}}, seriesSet) + testutil.Equals(t, map[string][]sample{`{foo="bar"}`: {}}, seriesSet) querierAfterCommit, err := db.Querier(context.Background(), 0, 1000000) testutil.Ok(t, err) @@ -2630,7 +2630,7 @@ func TestDBQueryDoesntSeeAppendsAfterCreation(t *testing.T) { testutil.Ok(t, err) _, seriesSet, err = expandSeriesSet(ss) testutil.Ok(t, err) - testutil.Equals(t, map[string][]sample{`{foo="bar"}`: []sample{{t: 0, v: 0}}}, seriesSet) + testutil.Equals(t, map[string][]sample{`{foo="bar"}`: {{t: 0, v: 0}}}, seriesSet) } // TestChunkWriter_ReadAfterWrite ensures that chunk segment are cut at the set segment size and @@ -2653,7 +2653,7 @@ func TestChunkWriter_ReadAfterWrite(t *testing.T) { // all chunks should fit in a single segment. { chks: [][]chunks.Meta{ - []chunks.Meta{ + { chk1, chk2, chk3, @@ -2666,7 +2666,7 @@ func TestChunkWriter_ReadAfterWrite(t *testing.T) { // 1:Two chunks can fit in a single segment so the last one should result in a new segment. { chks: [][]chunks.Meta{ - []chunks.Meta{ + { chk1, chk2, chk3, @@ -2682,7 +2682,7 @@ func TestChunkWriter_ReadAfterWrite(t *testing.T) { // the last segment should still create a new segment. { chks: [][]chunks.Meta{ - []chunks.Meta{ + { chk1, chk2, chk3, @@ -2696,7 +2696,7 @@ func TestChunkWriter_ReadAfterWrite(t *testing.T) { // it should still be written by ignoring the max segment size. { chks: [][]chunks.Meta{ - []chunks.Meta{ + { chk1, }, }, @@ -2709,7 +2709,7 @@ func TestChunkWriter_ReadAfterWrite(t *testing.T) { // Each segment will hold a single chunk. { chks: [][]chunks.Meta{ - []chunks.Meta{ + { chk1, chk2, chk3, @@ -2722,12 +2722,12 @@ func TestChunkWriter_ReadAfterWrite(t *testing.T) { // 5:Adding multiple batches of chunks. { chks: [][]chunks.Meta{ - []chunks.Meta{ + { chk1, chk2, chk3, }, - []chunks.Meta{ + { chk4, chk5, }, @@ -2739,14 +2739,14 @@ func TestChunkWriter_ReadAfterWrite(t *testing.T) { // 6:Adding multiple batches of chunks. { chks: [][]chunks.Meta{ - []chunks.Meta{ + { chk1, }, - []chunks.Meta{ + { chk2, chk3, }, - []chunks.Meta{ + { chk4, }, }, diff --git a/tsdb/index/index_test.go b/tsdb/index/index_test.go index a7d38a88b..e0a3c1967 100644 --- a/tsdb/index/index_test.go +++ b/tsdb/index/index_test.go @@ -234,8 +234,8 @@ func TestIndexRW_Postings(t *testing.T) { return d.Err() })) testutil.Equals(t, map[string][]string{ - "a": []string{"1"}, - "b": []string{"1", "2", "3", "4"}, + "a": {"1"}, + "b": {"1", "2", "3", "4"}, }, labelIndices) testutil.Ok(t, ir.Close()) diff --git a/tsdb/index/postings_test.go b/tsdb/index/postings_test.go index 3e4d30b78..f75d9ea6e 100644 --- a/tsdb/index/postings_test.go +++ b/tsdb/index/postings_test.go @@ -852,7 +852,7 @@ func TestMemPostings_Delete(t *testing.T) { before := p.Get(allPostingsKey.Name, allPostingsKey.Value) p.Delete(map[uint64]struct{}{ - 2: struct{}{}, + 2: {}, }) after := p.Get(allPostingsKey.Name, allPostingsKey.Value) diff --git a/tsdb/index/postingsstats.go b/tsdb/index/postingsstats.go index 5cb17bd0c..2fc2465d9 100644 --- a/tsdb/index/postingsstats.go +++ b/tsdb/index/postingsstats.go @@ -10,6 +10,7 @@ // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. + package index import ( diff --git a/tsdb/querier_test.go b/tsdb/querier_test.go index 4de8cf3a6..d03ec2cd0 100644 --- a/tsdb/querier_test.go +++ b/tsdb/querier_test.go @@ -461,9 +461,9 @@ func TestBlockQuerierDelete(t *testing.T) { }, }, tombstones: tombstones.NewTestMemTombstones([]tombstones.Intervals{ - tombstones.Intervals{{Mint: 1, Maxt: 3}}, - tombstones.Intervals{{Mint: 1, Maxt: 3}, {Mint: 6, Maxt: 10}}, - tombstones.Intervals{{Mint: 6, Maxt: 10}}, + {{Mint: 1, Maxt: 3}}, + {{Mint: 1, Maxt: 3}, {Mint: 6, Maxt: 10}}, + {{Mint: 6, Maxt: 10}}, }), queries: []query{ { diff --git a/util/promlint/promlint_test.go b/util/promlint/promlint_test.go index f6f5a39a7..e9039c981 100644 --- a/util/promlint/promlint_test.go +++ b/util/promlint/promlint_test.go @@ -739,7 +739,7 @@ func TestLintUnitAbbreviations(t *testing.T) { %s 10 `, n, n, n), problems: []promlint.Problem{ - promlint.Problem{ + { Metric: n, Text: "metric names should not contain abbreviated units", },