Fix some warnings: 'redundant type from array, slice, or map composite literal' (#7109)

Signed-off-by: ZouYu <zouy.fnst@cn.fujitsu.com>
This commit is contained in:
ZouYu 2020-04-15 18:17:41 +08:00 committed by GitHub
parent 386aea7774
commit 2b7437d60e
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
14 changed files with 64 additions and 62 deletions

View file

@ -231,7 +231,7 @@ func (t *testRunner) requireTargetGroups(expected, got []*targetgroup.Group) {
// validTg() maps to fixtures/valid.{json,yml}. // validTg() maps to fixtures/valid.{json,yml}.
func validTg(file string) []*targetgroup.Group { func validTg(file string) []*targetgroup.Group {
return []*targetgroup.Group{ return []*targetgroup.Group{
&targetgroup.Group{ {
Targets: []model.LabelSet{ Targets: []model.LabelSet{
{ {
model.AddressLabel: model.LabelValue("localhost:9090"), model.AddressLabel: model.LabelValue("localhost:9090"),
@ -246,7 +246,7 @@ func validTg(file string) []*targetgroup.Group {
}, },
Source: fileSource(file, 0), Source: fileSource(file, 0),
}, },
&targetgroup.Group{ {
Targets: []model.LabelSet{ Targets: []model.LabelSet{
{ {
model.AddressLabel: model.LabelValue("my.domain"), model.AddressLabel: model.LabelValue("my.domain"),
@ -263,7 +263,7 @@ func validTg(file string) []*targetgroup.Group {
// valid2Tg() maps to fixtures/valid2.{json,yml}. // valid2Tg() maps to fixtures/valid2.{json,yml}.
func valid2Tg(file string) []*targetgroup.Group { func valid2Tg(file string) []*targetgroup.Group {
return []*targetgroup.Group{ return []*targetgroup.Group{
&targetgroup.Group{ {
Targets: []model.LabelSet{ Targets: []model.LabelSet{
{ {
model.AddressLabel: model.LabelValue("my.domain"), model.AddressLabel: model.LabelValue("my.domain"),
@ -274,7 +274,7 @@ func valid2Tg(file string) []*targetgroup.Group {
}, },
Source: fileSource(file, 0), Source: fileSource(file, 0),
}, },
&targetgroup.Group{ {
Targets: []model.LabelSet{ Targets: []model.LabelSet{
{ {
model.AddressLabel: model.LabelValue("localhost:9090"), model.AddressLabel: model.LabelValue("localhost:9090"),
@ -287,7 +287,7 @@ func valid2Tg(file string) []*targetgroup.Group {
}, },
Source: fileSource(file, 1), Source: fileSource(file, 1),
}, },
&targetgroup.Group{ {
Targets: []model.LabelSet{ Targets: []model.LabelSet{
{ {
model.AddressLabel: model.LabelValue("example.org:443"), model.AddressLabel: model.LabelValue("example.org:443"),
@ -431,7 +431,7 @@ func TestUpdateFileWithPartialWrites(t *testing.T) {
runner.appendString(sdFile, `: ["localhost:9091"]`) runner.appendString(sdFile, `: ["localhost:9091"]`)
runner.requireUpdate(ref, runner.requireUpdate(ref,
[]*targetgroup.Group{ []*targetgroup.Group{
&targetgroup.Group{ {
Targets: []model.LabelSet{ Targets: []model.LabelSet{
{ {
model.AddressLabel: model.LabelValue("localhost:9091"), model.AddressLabel: model.LabelValue("localhost:9091"),
@ -442,7 +442,7 @@ func TestUpdateFileWithPartialWrites(t *testing.T) {
}, },
Source: fileSource(sdFile, 0), Source: fileSource(sdFile, 0),
}, },
&targetgroup.Group{ {
Source: fileSource(sdFile, 1), Source: fileSource(sdFile, 1),
}, },
}, },
@ -467,10 +467,10 @@ func TestRemoveFile(t *testing.T) {
runner.requireUpdate( runner.requireUpdate(
ref, ref,
[]*targetgroup.Group{ []*targetgroup.Group{
&targetgroup.Group{ {
Source: fileSource(sdFile, 0), Source: fileSource(sdFile, 0),
}, },
&targetgroup.Group{ {
Source: fileSource(sdFile, 1), Source: fileSource(sdFile, 1),
}}, }},
) )

View file

@ -38,8 +38,8 @@ func TestTargetGroupStrictJsonUnmarshal(t *testing.T) {
json: ` {"labels": {"my":"label"},"targets": ["localhost:9090","localhost:9091"]}`, json: ` {"labels": {"my":"label"},"targets": ["localhost:9090","localhost:9091"]}`,
expectedReply: nil, expectedReply: nil,
expectedGroup: Group{Targets: []model.LabelSet{ expectedGroup: Group{Targets: []model.LabelSet{
model.LabelSet{"__address__": "localhost:9090"}, {"__address__": "localhost:9090"},
model.LabelSet{"__address__": "localhost:9091"}}, Labels: model.LabelSet{"my": "label"}}, {"__address__": "localhost:9091"}}, Labels: model.LabelSet{"my": "label"}},
}, },
{ {
json: ` {"label": {},"targets": []}`, json: ` {"label": {},"targets": []}`,
@ -83,8 +83,8 @@ func TestTargetGroupYamlMarshal(t *testing.T) {
{ {
// targets only exposes addresses. // targets only exposes addresses.
group: Group{Targets: []model.LabelSet{ group: Group{Targets: []model.LabelSet{
model.LabelSet{"__address__": "localhost:9090"}, {"__address__": "localhost:9090"},
model.LabelSet{"__address__": "localhost:9091"}}, {"__address__": "localhost:9091"}},
Labels: model.LabelSet{"foo": "bar", "bar": "baz"}}, Labels: model.LabelSet{"foo": "bar", "bar": "baz"}},
expectedYaml: "targets:\n- localhost:9090\n- localhost:9091\nlabels:\n bar: baz\n foo: bar\n", expectedYaml: "targets:\n- localhost:9090\n- localhost:9091\nlabels:\n bar: baz\n foo: bar\n",
expectedErr: nil, expectedErr: nil,
@ -120,8 +120,8 @@ func TestTargetGroupYamlUnmarshal(t *testing.T) {
yaml: "labels:\n my: label\ntargets:\n ['localhost:9090', 'localhost:9191']", yaml: "labels:\n my: label\ntargets:\n ['localhost:9090', 'localhost:9191']",
expectedReply: nil, expectedReply: nil,
expectedGroup: Group{Targets: []model.LabelSet{ expectedGroup: Group{Targets: []model.LabelSet{
model.LabelSet{"__address__": "localhost:9090"}, {"__address__": "localhost:9090"},
model.LabelSet{"__address__": "localhost:9191"}}, Labels: model.LabelSet{"my": "label"}}, {"__address__": "localhost:9191"}}, Labels: model.LabelSet{"my": "label"}},
}, },
{ {
// incorrect syntax. // incorrect syntax.
@ -143,8 +143,8 @@ func TestString(t *testing.T) {
// String() should return only the source, regardless of other attributes. // String() should return only the source, regardless of other attributes.
group1 := group1 :=
Group{Targets: []model.LabelSet{ Group{Targets: []model.LabelSet{
model.LabelSet{"__address__": "localhost:9090"}, {"__address__": "localhost:9090"},
model.LabelSet{"__address__": "localhost:9091"}}, {"__address__": "localhost:9091"}},
Source: "<source>", Source: "<source>",
Labels: model.LabelSet{"foo": "bar", "bar": "baz"}} Labels: model.LabelSet{"foo": "bar", "bar": "baz"}}
group2 := group2 :=

View file

@ -654,7 +654,7 @@ func TestCopyState(t *testing.T) {
testutil.Equals(t, want, newGroup.seriesInPreviousEval) testutil.Equals(t, want, newGroup.seriesInPreviousEval)
testutil.Equals(t, oldGroup.rules[0], newGroup.rules[3]) testutil.Equals(t, oldGroup.rules[0], newGroup.rules[3])
testutil.Equals(t, oldGroup.evaluationDuration, newGroup.evaluationDuration) testutil.Equals(t, oldGroup.evaluationDuration, newGroup.evaluationDuration)
testutil.Equals(t, []labels.Labels{labels.Labels{{Name: "l1", Value: "v3"}}}, newGroup.staleSeries) testutil.Equals(t, []labels.Labels{{{Name: "l1", Value: "v3"}}}, newGroup.staleSeries)
} }
func TestDeletedRuleMarkedStale(t *testing.T) { func TestDeletedRuleMarkedStale(t *testing.T) {

View file

@ -1669,37 +1669,37 @@ func TestScrapeLoopDiscardUnnamedMetrics(t *testing.T) {
func TestReusableConfig(t *testing.T) { func TestReusableConfig(t *testing.T) {
variants := []*config.ScrapeConfig{ variants := []*config.ScrapeConfig{
&config.ScrapeConfig{ {
JobName: "prometheus", JobName: "prometheus",
ScrapeTimeout: model.Duration(15 * time.Second), ScrapeTimeout: model.Duration(15 * time.Second),
}, },
&config.ScrapeConfig{ {
JobName: "httpd", JobName: "httpd",
ScrapeTimeout: model.Duration(15 * time.Second), ScrapeTimeout: model.Duration(15 * time.Second),
}, },
&config.ScrapeConfig{ {
JobName: "prometheus", JobName: "prometheus",
ScrapeTimeout: model.Duration(5 * time.Second), ScrapeTimeout: model.Duration(5 * time.Second),
}, },
&config.ScrapeConfig{ {
JobName: "prometheus", JobName: "prometheus",
MetricsPath: "/metrics", MetricsPath: "/metrics",
}, },
&config.ScrapeConfig{ {
JobName: "prometheus", JobName: "prometheus",
MetricsPath: "/metrics2", MetricsPath: "/metrics2",
}, },
&config.ScrapeConfig{ {
JobName: "prometheus", JobName: "prometheus",
ScrapeTimeout: model.Duration(5 * time.Second), ScrapeTimeout: model.Duration(5 * time.Second),
MetricsPath: "/metrics2", MetricsPath: "/metrics2",
}, },
&config.ScrapeConfig{ {
JobName: "prometheus", JobName: "prometheus",
ScrapeInterval: model.Duration(5 * time.Second), ScrapeInterval: model.Duration(5 * time.Second),
MetricsPath: "/metrics2", MetricsPath: "/metrics2",
}, },
&config.ScrapeConfig{ {
JobName: "prometheus", JobName: "prometheus",
ScrapeInterval: model.Duration(5 * time.Second), ScrapeInterval: model.Duration(5 * time.Second),
SampleLimit: 1000, SampleLimit: 1000,
@ -1708,18 +1708,18 @@ func TestReusableConfig(t *testing.T) {
} }
match := [][]int{ match := [][]int{
[]int{0, 2}, {0, 2},
[]int{4, 5}, {4, 5},
[]int{4, 6}, {4, 6},
[]int{4, 7}, {4, 7},
[]int{5, 6}, {5, 6},
[]int{5, 7}, {5, 7},
[]int{6, 7}, {6, 7},
} }
noMatch := [][]int{ noMatch := [][]int{
[]int{1, 2}, {1, 2},
[]int{0, 4}, {0, 4},
[]int{3, 4}, {3, 4},
} }
for i, m := range match { for i, m := range match {

View file

@ -10,6 +10,7 @@
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and // See the License for the specific language governing permissions and
// limitations under the License. // limitations under the License.
package remote package remote
import ( import (

View file

@ -39,7 +39,7 @@ func TestChunkedReaderCanReadFromChunkedWriter(t *testing.T) {
[]byte("test2"), []byte("test2"),
[]byte("test3"), []byte("test3"),
[]byte("test4"), []byte("test4"),
[]byte{}, // This is ignored by writer. {}, // This is ignored by writer.
[]byte("test5-after-empty"), []byte("test5-after-empty"),
} }

View file

@ -164,11 +164,11 @@ func TestConcreteSeriesClonesLabels(t *testing.T) {
func TestFromQueryResultWithDuplicates(t *testing.T) { func TestFromQueryResultWithDuplicates(t *testing.T) {
ts1 := prompb.TimeSeries{ ts1 := prompb.TimeSeries{
Labels: []prompb.Label{ Labels: []prompb.Label{
prompb.Label{Name: "foo", Value: "bar"}, {Name: "foo", Value: "bar"},
prompb.Label{Name: "foo", Value: "def"}, {Name: "foo", Value: "def"},
}, },
Samples: []prompb.Sample{ Samples: []prompb.Sample{
prompb.Sample{Value: 0.0, Timestamp: 0}, {Value: 0.0, Timestamp: 0},
}, },
} }

View file

@ -292,14 +292,14 @@ func TestReadIndexFormatV1(t *testing.T) {
q, err := NewBlockQuerier(block, 0, 1000) q, err := NewBlockQuerier(block, 0, 1000)
testutil.Ok(t, err) testutil.Ok(t, err)
testutil.Equals(t, query(t, q, labels.MustNewMatcher(labels.MatchEqual, "foo", "bar")), testutil.Equals(t, query(t, q, labels.MustNewMatcher(labels.MatchEqual, "foo", "bar")),
map[string][]tsdbutil.Sample{`{foo="bar"}`: []tsdbutil.Sample{sample{t: 1, v: 2}}}) map[string][]tsdbutil.Sample{`{foo="bar"}`: {sample{t: 1, v: 2}}})
q, err = NewBlockQuerier(block, 0, 1000) q, err = NewBlockQuerier(block, 0, 1000)
testutil.Ok(t, err) testutil.Ok(t, err)
testutil.Equals(t, query(t, q, labels.MustNewMatcher(labels.MatchNotRegexp, "foo", "^.?$")), testutil.Equals(t, query(t, q, labels.MustNewMatcher(labels.MatchNotRegexp, "foo", "^.?$")),
map[string][]tsdbutil.Sample{ map[string][]tsdbutil.Sample{
`{foo="bar"}`: []tsdbutil.Sample{sample{t: 1, v: 2}}, `{foo="bar"}`: {sample{t: 1, v: 2}},
`{foo="baz"}`: []tsdbutil.Sample{sample{t: 3, v: 4}}, `{foo="baz"}`: {sample{t: 3, v: 4}},
}) })
} }

View file

@ -2619,7 +2619,7 @@ func TestDBQueryDoesntSeeAppendsAfterCreation(t *testing.T) {
testutil.Ok(t, err) testutil.Ok(t, err)
_, seriesSet, err = expandSeriesSet(ss) _, seriesSet, err = expandSeriesSet(ss)
testutil.Ok(t, err) testutil.Ok(t, err)
testutil.Equals(t, map[string][]sample{`{foo="bar"}`: []sample{}}, seriesSet) testutil.Equals(t, map[string][]sample{`{foo="bar"}`: {}}, seriesSet)
querierAfterCommit, err := db.Querier(context.Background(), 0, 1000000) querierAfterCommit, err := db.Querier(context.Background(), 0, 1000000)
testutil.Ok(t, err) testutil.Ok(t, err)
@ -2630,7 +2630,7 @@ func TestDBQueryDoesntSeeAppendsAfterCreation(t *testing.T) {
testutil.Ok(t, err) testutil.Ok(t, err)
_, seriesSet, err = expandSeriesSet(ss) _, seriesSet, err = expandSeriesSet(ss)
testutil.Ok(t, err) testutil.Ok(t, err)
testutil.Equals(t, map[string][]sample{`{foo="bar"}`: []sample{{t: 0, v: 0}}}, seriesSet) testutil.Equals(t, map[string][]sample{`{foo="bar"}`: {{t: 0, v: 0}}}, seriesSet)
} }
// TestChunkWriter_ReadAfterWrite ensures that chunk segment are cut at the set segment size and // TestChunkWriter_ReadAfterWrite ensures that chunk segment are cut at the set segment size and
@ -2653,7 +2653,7 @@ func TestChunkWriter_ReadAfterWrite(t *testing.T) {
// all chunks should fit in a single segment. // all chunks should fit in a single segment.
{ {
chks: [][]chunks.Meta{ chks: [][]chunks.Meta{
[]chunks.Meta{ {
chk1, chk1,
chk2, chk2,
chk3, chk3,
@ -2666,7 +2666,7 @@ func TestChunkWriter_ReadAfterWrite(t *testing.T) {
// 1:Two chunks can fit in a single segment so the last one should result in a new segment. // 1:Two chunks can fit in a single segment so the last one should result in a new segment.
{ {
chks: [][]chunks.Meta{ chks: [][]chunks.Meta{
[]chunks.Meta{ {
chk1, chk1,
chk2, chk2,
chk3, chk3,
@ -2682,7 +2682,7 @@ func TestChunkWriter_ReadAfterWrite(t *testing.T) {
// the last segment should still create a new segment. // the last segment should still create a new segment.
{ {
chks: [][]chunks.Meta{ chks: [][]chunks.Meta{
[]chunks.Meta{ {
chk1, chk1,
chk2, chk2,
chk3, chk3,
@ -2696,7 +2696,7 @@ func TestChunkWriter_ReadAfterWrite(t *testing.T) {
// it should still be written by ignoring the max segment size. // it should still be written by ignoring the max segment size.
{ {
chks: [][]chunks.Meta{ chks: [][]chunks.Meta{
[]chunks.Meta{ {
chk1, chk1,
}, },
}, },
@ -2709,7 +2709,7 @@ func TestChunkWriter_ReadAfterWrite(t *testing.T) {
// Each segment will hold a single chunk. // Each segment will hold a single chunk.
{ {
chks: [][]chunks.Meta{ chks: [][]chunks.Meta{
[]chunks.Meta{ {
chk1, chk1,
chk2, chk2,
chk3, chk3,
@ -2722,12 +2722,12 @@ func TestChunkWriter_ReadAfterWrite(t *testing.T) {
// 5:Adding multiple batches of chunks. // 5:Adding multiple batches of chunks.
{ {
chks: [][]chunks.Meta{ chks: [][]chunks.Meta{
[]chunks.Meta{ {
chk1, chk1,
chk2, chk2,
chk3, chk3,
}, },
[]chunks.Meta{ {
chk4, chk4,
chk5, chk5,
}, },
@ -2739,14 +2739,14 @@ func TestChunkWriter_ReadAfterWrite(t *testing.T) {
// 6:Adding multiple batches of chunks. // 6:Adding multiple batches of chunks.
{ {
chks: [][]chunks.Meta{ chks: [][]chunks.Meta{
[]chunks.Meta{ {
chk1, chk1,
}, },
[]chunks.Meta{ {
chk2, chk2,
chk3, chk3,
}, },
[]chunks.Meta{ {
chk4, chk4,
}, },
}, },

View file

@ -234,8 +234,8 @@ func TestIndexRW_Postings(t *testing.T) {
return d.Err() return d.Err()
})) }))
testutil.Equals(t, map[string][]string{ testutil.Equals(t, map[string][]string{
"a": []string{"1"}, "a": {"1"},
"b": []string{"1", "2", "3", "4"}, "b": {"1", "2", "3", "4"},
}, labelIndices) }, labelIndices)
testutil.Ok(t, ir.Close()) testutil.Ok(t, ir.Close())

View file

@ -852,7 +852,7 @@ func TestMemPostings_Delete(t *testing.T) {
before := p.Get(allPostingsKey.Name, allPostingsKey.Value) before := p.Get(allPostingsKey.Name, allPostingsKey.Value)
p.Delete(map[uint64]struct{}{ p.Delete(map[uint64]struct{}{
2: struct{}{}, 2: {},
}) })
after := p.Get(allPostingsKey.Name, allPostingsKey.Value) after := p.Get(allPostingsKey.Name, allPostingsKey.Value)

View file

@ -10,6 +10,7 @@
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and // See the License for the specific language governing permissions and
// limitations under the License. // limitations under the License.
package index package index
import ( import (

View file

@ -461,9 +461,9 @@ func TestBlockQuerierDelete(t *testing.T) {
}, },
}, },
tombstones: tombstones.NewTestMemTombstones([]tombstones.Intervals{ tombstones: tombstones.NewTestMemTombstones([]tombstones.Intervals{
tombstones.Intervals{{Mint: 1, Maxt: 3}}, {{Mint: 1, Maxt: 3}},
tombstones.Intervals{{Mint: 1, Maxt: 3}, {Mint: 6, Maxt: 10}}, {{Mint: 1, Maxt: 3}, {Mint: 6, Maxt: 10}},
tombstones.Intervals{{Mint: 6, Maxt: 10}}, {{Mint: 6, Maxt: 10}},
}), }),
queries: []query{ queries: []query{
{ {

View file

@ -739,7 +739,7 @@ func TestLintUnitAbbreviations(t *testing.T) {
%s 10 %s 10
`, n, n, n), `, n, n, n),
problems: []promlint.Problem{ problems: []promlint.Problem{
promlint.Problem{ {
Metric: n, Metric: n,
Text: "metric names should not contain abbreviated units", Text: "metric names should not contain abbreviated units",
}, },