Merge pull request #5464 from prometheus/rel291

Prepare 2.9.1
This commit is contained in:
Brian Brazil 2019-04-16 14:52:27 +01:00 committed by GitHub
commit ad71f2785f
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
10 changed files with 84 additions and 52 deletions

View file

@ -1,3 +1,8 @@
## 2.9.1 / 2019-04-16
* [BUGFIX] Discovery/kubernetes: fix missing label sanitization #5462
* [BUGFIX] Remote_write: Prevent reshard concurrent with calling stop #5460
## 2.9.0 / 2019-04-15
This releases uses Go 1.12, which includes a change in how memory is released

View file

@ -1 +1 @@
2.9.0
2.9.1

View file

@ -357,7 +357,7 @@ func TestEndpointsDiscoveryWithService(t *testing.T) {
Name: "testendpoints",
Namespace: "default",
Labels: map[string]string{
"app": "test",
"app/name": "test",
},
},
}
@ -389,8 +389,8 @@ func TestEndpointsDiscoveryWithService(t *testing.T) {
Labels: model.LabelSet{
"__meta_kubernetes_namespace": "default",
"__meta_kubernetes_endpoints_name": "testendpoints",
"__meta_kubernetes_service_label_app": "test",
"__meta_kubernetes_service_labelpresent_app": "true",
"__meta_kubernetes_service_label_app_name": "test",
"__meta_kubernetes_service_labelpresent_app_name": "true",
"__meta_kubernetes_service_name": "testendpoints",
},
Source: "endpoints/default/testendpoints",
@ -410,7 +410,7 @@ func TestEndpointsDiscoveryWithServiceUpdate(t *testing.T) {
Name: "testendpoints",
Namespace: "default",
Labels: map[string]string{
"app": "test",
"app/name": "test",
},
},
}
@ -422,7 +422,7 @@ func TestEndpointsDiscoveryWithServiceUpdate(t *testing.T) {
Name: "testendpoints",
Namespace: "default",
Labels: map[string]string{
"app": "svc",
"app/name": "svc",
"component": "testing",
},
},
@ -455,8 +455,8 @@ func TestEndpointsDiscoveryWithServiceUpdate(t *testing.T) {
Labels: model.LabelSet{
"__meta_kubernetes_namespace": "default",
"__meta_kubernetes_endpoints_name": "testendpoints",
"__meta_kubernetes_service_label_app": "svc",
"__meta_kubernetes_service_labelpresent_app": "true",
"__meta_kubernetes_service_label_app_name": "svc",
"__meta_kubernetes_service_labelpresent_app_name": "true",
"__meta_kubernetes_service_name": "testendpoints",
"__meta_kubernetes_service_label_component": "testing",
"__meta_kubernetes_service_labelpresent_component": "true",

View file

@ -36,8 +36,8 @@ func makeIngress(tls TLSMode) *v1beta1.Ingress {
ObjectMeta: metav1.ObjectMeta{
Name: "testingress",
Namespace: "default",
Labels: map[string]string{"testlabel": "testvalue"},
Annotations: map[string]string{"testannotation": "testannotationvalue"},
Labels: map[string]string{"test/label": "testvalue"},
Annotations: map[string]string{"test/annotation": "testannotationvalue"},
},
Spec: v1beta1.IngressSpec{
TLS: nil,
@ -120,10 +120,10 @@ func expectedTargetGroups(ns string, tls TLSMode) map[string]*targetgroup.Group
Labels: model.LabelSet{
"__meta_kubernetes_ingress_name": "testingress",
"__meta_kubernetes_namespace": lv(ns),
"__meta_kubernetes_ingress_label_testlabel": "testvalue",
"__meta_kubernetes_ingress_labelpresent_testlabel": "true",
"__meta_kubernetes_ingress_annotation_testannotation": "testannotationvalue",
"__meta_kubernetes_ingress_annotationpresent_testannotation": "true",
"__meta_kubernetes_ingress_label_test_label": "testvalue",
"__meta_kubernetes_ingress_labelpresent_test_label": "true",
"__meta_kubernetes_ingress_annotation_test_annotation": "testannotationvalue",
"__meta_kubernetes_ingress_annotationpresent_test_annotation": "true",
},
Source: key,
},

View file

@ -59,8 +59,8 @@ func TestNodeDiscoveryBeforeStart(t *testing.T) {
obj := makeNode(
"test",
"1.2.3.4",
map[string]string{"testlabel": "testvalue"},
map[string]string{"testannotation": "testannotationvalue"},
map[string]string{"test-label": "testvalue"},
map[string]string{"test-annotation": "testannotationvalue"},
)
c.CoreV1().Nodes().Create(obj)
},
@ -76,10 +76,10 @@ func TestNodeDiscoveryBeforeStart(t *testing.T) {
},
Labels: model.LabelSet{
"__meta_kubernetes_node_name": "test",
"__meta_kubernetes_node_label_testlabel": "testvalue",
"__meta_kubernetes_node_labelpresent_testlabel": "true",
"__meta_kubernetes_node_annotation_testannotation": "testannotationvalue",
"__meta_kubernetes_node_annotationpresent_testannotation": "true",
"__meta_kubernetes_node_label_test_label": "testvalue",
"__meta_kubernetes_node_labelpresent_test_label": "true",
"__meta_kubernetes_node_annotation_test_annotation": "testannotationvalue",
"__meta_kubernetes_node_annotationpresent_test_annotation": "true",
},
Source: "node/test",
},

View file

@ -188,7 +188,7 @@ func podLabels(pod *apiv1.Pod) model.LabelSet {
for k, v := range pod.Labels {
ln := strutil.SanitizeLabelName(k)
ls[model.LabelName(podLabelPrefix+k)] = lv(v)
ls[model.LabelName(podLabelPrefix+ln)] = lv(v)
ls[model.LabelName(podLabelPresentPrefix+ln)] = presentValue
}

View file

@ -33,8 +33,8 @@ func makeMultiPortPods() *v1.Pod {
ObjectMeta: metav1.ObjectMeta{
Name: "testpod",
Namespace: "default",
Labels: map[string]string{"testlabel": "testvalue"},
Annotations: map[string]string{"testannotation": "testannotationvalue"},
Labels: map[string]string{"test/label": "testvalue"},
Annotations: map[string]string{"test/annotation": "testannotationvalue"},
UID: types.UID("abc123"),
OwnerReferences: []metav1.OwnerReference{
{
@ -180,10 +180,10 @@ func TestPodDiscoveryBeforeRun(t *testing.T) {
Labels: model.LabelSet{
"__meta_kubernetes_pod_name": "testpod",
"__meta_kubernetes_namespace": "default",
"__meta_kubernetes_pod_label_testlabel": "testvalue",
"__meta_kubernetes_pod_labelpresent_testlabel": "true",
"__meta_kubernetes_pod_annotation_testannotation": "testannotationvalue",
"__meta_kubernetes_pod_annotationpresent_testannotation": "true",
"__meta_kubernetes_pod_label_test_label": "testvalue",
"__meta_kubernetes_pod_labelpresent_test_label": "true",
"__meta_kubernetes_pod_annotation_test_annotation": "testannotationvalue",
"__meta_kubernetes_pod_annotationpresent_test_annotation": "true",
"__meta_kubernetes_pod_node_name": "testnode",
"__meta_kubernetes_pod_ip": "1.2.3.4",
"__meta_kubernetes_pod_host_ip": "2.3.4.5",

View file

@ -28,8 +28,8 @@ func makeMultiPortService() *v1.Service {
ObjectMeta: metav1.ObjectMeta{
Name: "testservice",
Namespace: "default",
Labels: map[string]string{"testlabel": "testvalue"},
Annotations: map[string]string{"testannotation": "testannotationvalue"},
Labels: map[string]string{"test-label": "testvalue"},
Annotations: map[string]string{"test-annotation": "testannotationvalue"},
},
Spec: v1.ServiceSpec{
Ports: []v1.ServicePort{
@ -188,10 +188,10 @@ func TestServiceDiscoveryUpdate(t *testing.T) {
Labels: model.LabelSet{
"__meta_kubernetes_service_name": "testservice",
"__meta_kubernetes_namespace": "default",
"__meta_kubernetes_service_label_testlabel": "testvalue",
"__meta_kubernetes_service_labelpresent_testlabel": "true",
"__meta_kubernetes_service_annotation_testannotation": "testannotationvalue",
"__meta_kubernetes_service_annotationpresent_testannotation": "true",
"__meta_kubernetes_service_label_test_label": "testvalue",
"__meta_kubernetes_service_labelpresent_test_label": "true",
"__meta_kubernetes_service_annotation_test_annotation": "testannotationvalue",
"__meta_kubernetes_service_annotationpresent_test_annotation": "true",
},
Source: "svc/default/testservice",
},

View file

@ -322,9 +322,12 @@ func (t *QueueManager) Stop() {
defer level.Info(t.logger).Log("msg", "Remote storage stopped.")
close(t.quit)
t.wg.Wait()
// Wait for all QueueManager routines to end before stopping shards and WAL watcher. This
// is to ensure we don't end up executing a reshard and shards.stop() at the same time, which
// causes a closed channel panic.
t.shards.stop()
t.watcher.Stop()
t.wg.Wait()
// On shutdown, release the strings in the labels from the intern pool.
t.seriesMtx.Lock()

View file

@ -233,6 +233,30 @@ func TestReshard(t *testing.T) {
c.waitForExpectedSamples(t)
}
func TestReshardRaceWithStop(t *testing.T) {
c := NewTestStorageClient()
var m *QueueManager
h := sync.Mutex{}
h.Lock()
go func() {
for {
m = NewQueueManager(nil, "", newEWMARate(ewmaWeight, shardUpdateDuration), config.DefaultQueueConfig, nil, nil, c, defaultFlushDeadline)
m.Start()
h.Unlock()
h.Lock()
m.Stop()
}
}()
for i := 1; i < 100; i++ {
h.Lock()
m.reshardChan <- i
h.Unlock()
}
}
func createTimeseries(n int) ([]tsdb.RefSample, []tsdb.RefSeries) {
samples := make([]tsdb.RefSample, 0, n)
series := make([]tsdb.RefSeries, 0, n)