mirror of
https://github.com/prometheus/prometheus.git
synced 2025-03-05 20:59:13 -08:00
Merge ec9c9f1383
into 61aa82865d
This commit is contained in:
commit
010bf95e39
|
@ -1443,6 +1443,7 @@ type OTLPConfig struct {
|
||||||
PromoteResourceAttributes []string `yaml:"promote_resource_attributes,omitempty"`
|
PromoteResourceAttributes []string `yaml:"promote_resource_attributes,omitempty"`
|
||||||
TranslationStrategy translationStrategyOption `yaml:"translation_strategy,omitempty"`
|
TranslationStrategy translationStrategyOption `yaml:"translation_strategy,omitempty"`
|
||||||
KeepIdentifyingResourceAttributes bool `yaml:"keep_identifying_resource_attributes,omitempty"`
|
KeepIdentifyingResourceAttributes bool `yaml:"keep_identifying_resource_attributes,omitempty"`
|
||||||
|
ConvertHistogramsToNHCB bool `yaml:"convert_histograms_to_nhcb,omitempty"`
|
||||||
}
|
}
|
||||||
|
|
||||||
// UnmarshalYAML implements the yaml.Unmarshaler interface.
|
// UnmarshalYAML implements the yaml.Unmarshaler interface.
|
||||||
|
|
|
@ -1569,6 +1569,20 @@ func TestOTLPAllowServiceNameInTargetInfo(t *testing.T) {
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func TestOTLPConvertHistogramsToNHCB(t *testing.T) {
|
||||||
|
t.Run("good config", func(t *testing.T) {
|
||||||
|
want, err := LoadFile(filepath.Join("testdata", "otlp_convert_histograms_to_nhcb.good.yml"), false, promslog.NewNopLogger())
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
out, err := yaml.Marshal(want)
|
||||||
|
require.NoError(t, err)
|
||||||
|
var got Config
|
||||||
|
require.NoError(t, yaml.UnmarshalStrict(out, &got))
|
||||||
|
|
||||||
|
require.True(t, got.OTLPConfig.ConvertHistogramsToNHCB)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
func TestOTLPAllowUTF8(t *testing.T) {
|
func TestOTLPAllowUTF8(t *testing.T) {
|
||||||
t.Run("good config", func(t *testing.T) {
|
t.Run("good config", func(t *testing.T) {
|
||||||
fpath := filepath.Join("testdata", "otlp_allow_utf8.good.yml")
|
fpath := filepath.Join("testdata", "otlp_allow_utf8.good.yml")
|
||||||
|
|
2
config/testdata/otlp_convert_histograms_to_nhcb.good.yml
vendored
Normal file
2
config/testdata/otlp_convert_histograms_to_nhcb.good.yml
vendored
Normal file
|
@ -0,0 +1,2 @@
|
||||||
|
otlp:
|
||||||
|
convert_histograms_to_nhcb: true
|
|
@ -403,6 +403,7 @@ type Histogram struct {
|
||||||
// timestamp is in ms format, see model/timestamp/timestamp.go for
|
// timestamp is in ms format, see model/timestamp/timestamp.go for
|
||||||
// conversion from time.Time to Prometheus timestamp.
|
// conversion from time.Time to Prometheus timestamp.
|
||||||
Timestamp int64 `protobuf:"varint,15,opt,name=timestamp,proto3" json:"timestamp,omitempty"`
|
Timestamp int64 `protobuf:"varint,15,opt,name=timestamp,proto3" json:"timestamp,omitempty"`
|
||||||
|
CustomValues []float64 `protobuf:"fixed64,16,rep,packed,name=custom_values,json=customValues,proto3" json:"custom_values,omitempty"`
|
||||||
XXX_NoUnkeyedLiteral struct{} `json:"-"`
|
XXX_NoUnkeyedLiteral struct{} `json:"-"`
|
||||||
XXX_unrecognized []byte `json:"-"`
|
XXX_unrecognized []byte `json:"-"`
|
||||||
XXX_sizecache int32 `json:"-"`
|
XXX_sizecache int32 `json:"-"`
|
||||||
|
@ -588,6 +589,13 @@ func (m *Histogram) GetTimestamp() int64 {
|
||||||
return 0
|
return 0
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (m *Histogram) GetCustomValues() []float64 {
|
||||||
|
if m != nil {
|
||||||
|
return m.CustomValues
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
// XXX_OneofWrappers is for the internal use of the proto package.
|
// XXX_OneofWrappers is for the internal use of the proto package.
|
||||||
func (*Histogram) XXX_OneofWrappers() []interface{} {
|
func (*Histogram) XXX_OneofWrappers() []interface{} {
|
||||||
return []interface{}{
|
return []interface{}{
|
||||||
|
@ -1146,76 +1154,77 @@ func init() {
|
||||||
func init() { proto.RegisterFile("types.proto", fileDescriptor_d938547f84707355) }
|
func init() { proto.RegisterFile("types.proto", fileDescriptor_d938547f84707355) }
|
||||||
|
|
||||||
var fileDescriptor_d938547f84707355 = []byte{
|
var fileDescriptor_d938547f84707355 = []byte{
|
||||||
// 1092 bytes of a gzipped FileDescriptorProto
|
// 1114 bytes of a gzipped FileDescriptorProto
|
||||||
0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x9c, 0x56, 0xdb, 0x6e, 0xdb, 0x46,
|
0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x9c, 0x56, 0xdb, 0x6e, 0xdb, 0x46,
|
||||||
0x13, 0x36, 0x49, 0x89, 0x12, 0x47, 0x87, 0xd0, 0xfb, 0x3b, 0xf9, 0x59, 0xa3, 0x71, 0x54, 0x02,
|
0x13, 0x36, 0x49, 0x89, 0x12, 0x47, 0x87, 0xd0, 0xfb, 0x3b, 0xf9, 0xd9, 0xa0, 0x71, 0x54, 0x16,
|
||||||
0x69, 0x85, 0xa2, 0x90, 0x11, 0xb7, 0x17, 0x0d, 0x1a, 0x14, 0xb0, 0x1d, 0xf9, 0x80, 0x5a, 0x12,
|
0x69, 0x85, 0xa2, 0x90, 0x11, 0xb7, 0x17, 0x0d, 0x1a, 0x14, 0xb0, 0x1d, 0xf9, 0x80, 0x5a, 0x12,
|
||||||
0xb2, 0x92, 0xd1, 0xa6, 0x37, 0xc2, 0x5a, 0x5a, 0x4b, 0x44, 0xc4, 0x43, 0xb9, 0xab, 0xc0, 0xea,
|
0xb2, 0x92, 0xdb, 0xa6, 0x37, 0xc2, 0x5a, 0x5a, 0x4b, 0x44, 0xc4, 0x43, 0xb9, 0xab, 0xc0, 0xea,
|
||||||
0x7b, 0xf4, 0xae, 0x2f, 0xd1, 0xb7, 0x08, 0xd0, 0x9b, 0xf6, 0x05, 0x8a, 0xc2, 0x57, 0x7d, 0x8c,
|
0x7b, 0xf4, 0xae, 0x2f, 0xd1, 0xb7, 0xc8, 0x65, 0xfb, 0x02, 0x45, 0xe1, 0xab, 0x5e, 0xf6, 0x11,
|
||||||
0x62, 0x87, 0xa4, 0x48, 0xc5, 0x29, 0xd0, 0xf4, 0x6e, 0xe7, 0x9b, 0x6f, 0x76, 0x3e, 0xee, 0xce,
|
0x8a, 0x1d, 0x92, 0x22, 0x15, 0xa7, 0x40, 0xd3, 0xbb, 0x9d, 0x6f, 0xbe, 0x99, 0xf9, 0xb8, 0x3b,
|
||||||
0xcc, 0x12, 0x6a, 0x72, 0x15, 0x71, 0xd1, 0x89, 0xe2, 0x50, 0x86, 0x04, 0xa2, 0x38, 0xf4, 0xb9,
|
0x3b, 0x4b, 0xa8, 0xc9, 0x55, 0xc4, 0x45, 0x27, 0x8a, 0x43, 0x19, 0x12, 0x88, 0xe2, 0xd0, 0xe7,
|
||||||
0x9c, 0xf3, 0xa5, 0xd8, 0xdd, 0x99, 0x85, 0xb3, 0x10, 0xe1, 0x7d, 0xb5, 0x4a, 0x18, 0xee, 0xcf,
|
0x72, 0xce, 0x97, 0xe2, 0xfe, 0xce, 0x2c, 0x9c, 0x85, 0x08, 0xef, 0xa9, 0x55, 0xc2, 0x70, 0x7f,
|
||||||
0x3a, 0x34, 0x7b, 0x5c, 0xc6, 0xde, 0xa4, 0xc7, 0x25, 0x9b, 0x32, 0xc9, 0xc8, 0x53, 0x28, 0xa9,
|
0xd6, 0xa1, 0xd9, 0xe3, 0x32, 0xf6, 0x26, 0x3d, 0x2e, 0xd9, 0x94, 0x49, 0x46, 0x9e, 0x40, 0x49,
|
||||||
0x3d, 0x1c, 0xad, 0xa5, 0xb5, 0x9b, 0x07, 0x8f, 0x3b, 0xf9, 0x1e, 0x9d, 0x4d, 0x66, 0x6a, 0x8e,
|
0xe5, 0x70, 0xb4, 0x96, 0xd6, 0x6e, 0xee, 0x3f, 0xea, 0xe4, 0x39, 0x3a, 0x9b, 0xcc, 0xd4, 0x1c,
|
||||||
0x56, 0x11, 0xa7, 0x18, 0x42, 0x3e, 0x03, 0xe2, 0x23, 0x36, 0xbe, 0x66, 0xbe, 0xb7, 0x58, 0x8d,
|
0xad, 0x22, 0x4e, 0x31, 0x84, 0x7c, 0x0a, 0xc4, 0x47, 0x6c, 0x7c, 0xc5, 0x7c, 0x6f, 0xb1, 0x1a,
|
||||||
0x03, 0xe6, 0x73, 0x47, 0x6f, 0x69, 0x6d, 0x8b, 0xda, 0x89, 0xe7, 0x04, 0x1d, 0x7d, 0xe6, 0x73,
|
0x07, 0xcc, 0xe7, 0x8e, 0xde, 0xd2, 0xda, 0x16, 0xb5, 0x13, 0xcf, 0x31, 0x3a, 0xfa, 0xcc, 0xe7,
|
||||||
0x42, 0xa0, 0x34, 0xe7, 0x8b, 0xc8, 0x29, 0xa1, 0x1f, 0xd7, 0x0a, 0x5b, 0x06, 0x9e, 0x74, 0xca,
|
0x84, 0x40, 0x69, 0xce, 0x17, 0x91, 0x53, 0x42, 0x3f, 0xae, 0x15, 0xb6, 0x0c, 0x3c, 0xe9, 0x94,
|
||||||
0x09, 0xa6, 0xd6, 0xee, 0x0a, 0x20, 0xcf, 0x44, 0x6a, 0x50, 0xb9, 0xec, 0x7f, 0xd3, 0x1f, 0x7c,
|
0x13, 0x4c, 0xad, 0xdd, 0x15, 0x40, 0x5e, 0x89, 0xd4, 0xa0, 0x72, 0xd1, 0xff, 0xba, 0x3f, 0xf8,
|
||||||
0xdb, 0xb7, 0xb7, 0x94, 0x71, 0x3c, 0xb8, 0xec, 0x8f, 0xba, 0xd4, 0xd6, 0x88, 0x05, 0xe5, 0xd3,
|
0xb6, 0x6f, 0x6f, 0x29, 0xe3, 0x68, 0x70, 0xd1, 0x1f, 0x75, 0xa9, 0xad, 0x11, 0x0b, 0xca, 0x27,
|
||||||
0xc3, 0xcb, 0xd3, 0xae, 0xad, 0x93, 0x06, 0x58, 0x67, 0xe7, 0xc3, 0xd1, 0xe0, 0x94, 0x1e, 0xf6,
|
0x07, 0x17, 0x27, 0x5d, 0x5b, 0x27, 0x0d, 0xb0, 0x4e, 0xcf, 0x86, 0xa3, 0xc1, 0x09, 0x3d, 0xe8,
|
||||||
0x6c, 0x83, 0x10, 0x68, 0xa2, 0x27, 0xc7, 0x4a, 0x2a, 0x74, 0x78, 0xd9, 0xeb, 0x1d, 0xd2, 0x97,
|
0xd9, 0x06, 0x21, 0xd0, 0x44, 0x4f, 0x8e, 0x95, 0x54, 0xe8, 0xf0, 0xa2, 0xd7, 0x3b, 0xa0, 0x2f,
|
||||||
0x76, 0x99, 0x54, 0xa1, 0x74, 0xde, 0x3f, 0x19, 0xd8, 0x26, 0xa9, 0x43, 0x75, 0x38, 0x3a, 0x1c,
|
0xec, 0x32, 0xa9, 0x42, 0xe9, 0xac, 0x7f, 0x3c, 0xb0, 0x4d, 0x52, 0x87, 0xea, 0x70, 0x74, 0x30,
|
||||||
0x75, 0x87, 0xdd, 0x91, 0x5d, 0x71, 0x9f, 0x81, 0x39, 0x64, 0x7e, 0xb4, 0xe0, 0x64, 0x07, 0xca,
|
0xea, 0x0e, 0xbb, 0x23, 0xbb, 0xe2, 0x3e, 0x05, 0x73, 0xc8, 0xfc, 0x68, 0xc1, 0xc9, 0x0e, 0x94,
|
||||||
0xaf, 0xd9, 0x62, 0x99, 0x1c, 0x8b, 0x46, 0x13, 0x83, 0x7c, 0x08, 0x96, 0xf4, 0x7c, 0x2e, 0x24,
|
0x5f, 0xb1, 0xc5, 0x32, 0xd9, 0x16, 0x8d, 0x26, 0x06, 0x79, 0x1f, 0x2c, 0xe9, 0xf9, 0x5c, 0x48,
|
||||||
0xf3, 0x23, 0xfc, 0x4e, 0x83, 0xe6, 0x80, 0x1b, 0x42, 0xb5, 0x7b, 0xc3, 0xfd, 0x68, 0xc1, 0x62,
|
0xe6, 0x47, 0xf8, 0x9d, 0x06, 0xcd, 0x01, 0x37, 0x84, 0x6a, 0xf7, 0x9a, 0xfb, 0xd1, 0x82, 0xc5,
|
||||||
0xb2, 0x0f, 0xe6, 0x82, 0x5d, 0xf1, 0x85, 0x70, 0xb4, 0x96, 0xd1, 0xae, 0x1d, 0x6c, 0x17, 0xcf,
|
0x64, 0x0f, 0xcc, 0x05, 0xbb, 0xe4, 0x0b, 0xe1, 0x68, 0x2d, 0xa3, 0x5d, 0xdb, 0xdf, 0x2e, 0xee,
|
||||||
0xf5, 0x42, 0x79, 0x8e, 0x4a, 0x6f, 0xfe, 0x78, 0xb4, 0x45, 0x53, 0x5a, 0x9e, 0x50, 0xff, 0xc7,
|
0xeb, 0xb9, 0xf2, 0x1c, 0x96, 0x5e, 0xff, 0xfe, 0x70, 0x8b, 0xa6, 0xb4, 0xbc, 0xa0, 0xfe, 0x8f,
|
||||||
0x84, 0xc6, 0xdb, 0x09, 0x7f, 0x2d, 0x83, 0x75, 0xe6, 0x09, 0x19, 0xce, 0x62, 0xe6, 0x93, 0x87,
|
0x05, 0x8d, 0x37, 0x0b, 0xfe, 0x55, 0x06, 0xeb, 0xd4, 0x13, 0x32, 0x9c, 0xc5, 0xcc, 0x27, 0x0f,
|
||||||
0x60, 0x4d, 0xc2, 0x65, 0x20, 0xc7, 0x5e, 0x20, 0x51, 0x76, 0xe9, 0x6c, 0x8b, 0x56, 0x11, 0x3a,
|
0xc0, 0x9a, 0x84, 0xcb, 0x40, 0x8e, 0xbd, 0x40, 0xa2, 0xec, 0xd2, 0xe9, 0x16, 0xad, 0x22, 0x74,
|
||||||
0x0f, 0x24, 0xf9, 0x08, 0x6a, 0x89, 0xfb, 0x7a, 0x11, 0x32, 0x99, 0xa4, 0x39, 0xdb, 0xa2, 0x80,
|
0x16, 0x48, 0xf2, 0x01, 0xd4, 0x12, 0xf7, 0xd5, 0x22, 0x64, 0x32, 0x29, 0x73, 0xba, 0x45, 0x01,
|
||||||
0xe0, 0x89, 0xc2, 0x88, 0x0d, 0x86, 0x58, 0xfa, 0x98, 0x47, 0xa3, 0x6a, 0x49, 0x1e, 0x80, 0x29,
|
0xc1, 0x63, 0x85, 0x11, 0x1b, 0x0c, 0xb1, 0xf4, 0xb1, 0x8e, 0x46, 0xd5, 0x92, 0xdc, 0x03, 0x53,
|
||||||
0x26, 0x73, 0xee, 0x33, 0xbc, 0xb5, 0x6d, 0x9a, 0x5a, 0xe4, 0x31, 0x34, 0x7f, 0xe4, 0x71, 0x38,
|
0x4c, 0xe6, 0xdc, 0x67, 0x78, 0x6a, 0xdb, 0x34, 0xb5, 0xc8, 0x23, 0x68, 0xfe, 0xc8, 0xe3, 0x70,
|
||||||
0x96, 0xf3, 0x98, 0x8b, 0x79, 0xb8, 0x98, 0xe2, 0x0d, 0x6a, 0xb4, 0xa1, 0xd0, 0x51, 0x06, 0x92,
|
0x2c, 0xe7, 0x31, 0x17, 0xf3, 0x70, 0x31, 0xc5, 0x13, 0xd4, 0x68, 0x43, 0xa1, 0xa3, 0x0c, 0x24,
|
||||||
0x8f, 0x53, 0x5a, 0xae, 0xcb, 0x44, 0x5d, 0x1a, 0xad, 0x2b, 0xfc, 0x38, 0xd3, 0xf6, 0x29, 0xd8,
|
0x1f, 0xa5, 0xb4, 0x5c, 0x97, 0x89, 0xba, 0x34, 0x5a, 0x57, 0xf8, 0x51, 0xa6, 0xed, 0x13, 0xb0,
|
||||||
0x05, 0x5e, 0x22, 0xb0, 0x82, 0x02, 0x35, 0xda, 0x5c, 0x33, 0x13, 0x91, 0xc7, 0xd0, 0x0c, 0xf8,
|
0x0b, 0xbc, 0x44, 0x60, 0x05, 0x05, 0x6a, 0xb4, 0xb9, 0x66, 0x26, 0x22, 0x8f, 0xa0, 0x19, 0xf0,
|
||||||
0x8c, 0x49, 0xef, 0x35, 0x1f, 0x8b, 0x88, 0x05, 0xc2, 0xa9, 0xe2, 0x09, 0x3f, 0x28, 0x9e, 0xf0,
|
0x19, 0x93, 0xde, 0x2b, 0x3e, 0x16, 0x11, 0x0b, 0x84, 0x53, 0xc5, 0x1d, 0xbe, 0x57, 0xdc, 0xe1,
|
||||||
0xd1, 0x72, 0xf2, 0x8a, 0xcb, 0x61, 0xc4, 0x82, 0xf4, 0x98, 0x1b, 0x59, 0x8c, 0xc2, 0x04, 0xf9,
|
0xc3, 0xe5, 0xe4, 0x25, 0x97, 0xc3, 0x88, 0x05, 0xe9, 0x36, 0x37, 0xb2, 0x18, 0x85, 0x09, 0xf2,
|
||||||
0x04, 0xee, 0xad, 0x37, 0x99, 0xf2, 0x85, 0x64, 0xc2, 0xb1, 0x5a, 0x46, 0x9b, 0xd0, 0xf5, 0xde,
|
0x31, 0xdc, 0x59, 0x27, 0x99, 0xf2, 0x85, 0x64, 0xc2, 0xb1, 0x5a, 0x46, 0x9b, 0xd0, 0x75, 0xee,
|
||||||
0xcf, 0x11, 0xdd, 0x20, 0xa2, 0x3a, 0xe1, 0x40, 0xcb, 0x68, 0x6b, 0x39, 0x11, 0xa5, 0x09, 0x25,
|
0x67, 0x88, 0x6e, 0x10, 0x51, 0x9d, 0x70, 0xa0, 0x65, 0xb4, 0xb5, 0x9c, 0x88, 0xd2, 0x84, 0x92,
|
||||||
0x2b, 0x0a, 0x85, 0x57, 0x90, 0x55, 0xfb, 0x37, 0xb2, 0xb2, 0x98, 0xb5, 0xac, 0xf5, 0x26, 0xa9,
|
0x15, 0x85, 0xc2, 0x2b, 0xc8, 0xaa, 0xfd, 0x1b, 0x59, 0x59, 0xcc, 0x5a, 0xd6, 0x3a, 0x49, 0x2a,
|
||||||
0xac, 0x7a, 0x22, 0x2b, 0x83, 0x73, 0x59, 0x6b, 0x62, 0x2a, 0xab, 0x91, 0xc8, 0xca, 0xe0, 0x54,
|
0xab, 0x9e, 0xc8, 0xca, 0xe0, 0x5c, 0xd6, 0x9a, 0x98, 0xca, 0x6a, 0x24, 0xb2, 0x32, 0x38, 0x95,
|
||||||
0xd6, 0xd7, 0x00, 0x31, 0x17, 0x5c, 0x8e, 0xe7, 0xea, 0xf4, 0x9b, 0xd8, 0xe3, 0x8f, 0x8a, 0x92,
|
0xf5, 0x15, 0x40, 0xcc, 0x05, 0x97, 0xe3, 0xb9, 0xda, 0xfd, 0x26, 0xde, 0xf1, 0x87, 0x45, 0x49,
|
||||||
0xd6, 0xf5, 0xd3, 0xa1, 0x8a, 0x77, 0xe6, 0x05, 0x92, 0x5a, 0x71, 0xb6, 0xdc, 0x2c, 0xc0, 0x7b,
|
0xeb, 0xfe, 0xe9, 0x50, 0xc5, 0x3b, 0xf5, 0x02, 0x49, 0xad, 0x38, 0x5b, 0x6e, 0x36, 0xe0, 0x9d,
|
||||||
0x6f, 0x17, 0xe0, 0x17, 0x60, 0xad, 0xa3, 0x36, 0x3b, 0xb5, 0x02, 0xc6, 0xcb, 0xee, 0xd0, 0xd6,
|
0x37, 0x1a, 0x90, 0x7c, 0x08, 0x8d, 0xc9, 0x52, 0xc8, 0xd0, 0x1f, 0x63, 0xbb, 0x0a, 0xc7, 0x46,
|
||||||
0x88, 0x09, 0x7a, 0x7f, 0x60, 0xeb, 0x79, 0xb7, 0x1a, 0x47, 0x15, 0x28, 0xa3, 0xe6, 0xa3, 0x3a,
|
0x11, 0xf5, 0x04, 0xfc, 0x06, 0x31, 0xf7, 0x73, 0xb0, 0xd6, 0xa9, 0x37, 0xaf, 0x73, 0x05, 0x8c,
|
||||||
0x40, 0x7e, 0xed, 0xee, 0x33, 0x80, 0xfc, 0x7c, 0x54, 0xe5, 0x85, 0xd7, 0xd7, 0x82, 0x27, 0xa5,
|
0x17, 0xdd, 0xa1, 0xad, 0x11, 0x13, 0xf4, 0xfe, 0xc0, 0xd6, 0xf3, 0x2b, 0x6d, 0x1c, 0x56, 0xa0,
|
||||||
0xbc, 0x4d, 0x53, 0x4b, 0xe1, 0x0b, 0x1e, 0xcc, 0xe4, 0x1c, 0x2b, 0xb8, 0x41, 0x53, 0xcb, 0xfd,
|
0x8c, 0x1f, 0x76, 0x58, 0x07, 0xc8, 0x7b, 0xc3, 0x7d, 0x0a, 0x90, 0x6f, 0xa2, 0x6a, 0xcf, 0xf0,
|
||||||
0x4b, 0x03, 0x18, 0x79, 0x3e, 0x1f, 0xf2, 0xd8, 0xe3, 0xe2, 0xfd, 0xfb, 0xef, 0x00, 0x2a, 0x02,
|
0xea, 0x4a, 0xf0, 0xa4, 0xdf, 0xb7, 0x69, 0x6a, 0x29, 0x7c, 0xc1, 0x83, 0x99, 0x9c, 0x63, 0x9b,
|
||||||
0x5b, 0x5f, 0x38, 0x3a, 0x46, 0x90, 0x62, 0x44, 0x32, 0x15, 0xd2, 0x90, 0x8c, 0x48, 0xbe, 0x04,
|
0x37, 0x68, 0x6a, 0xb9, 0x7f, 0x6a, 0x00, 0x23, 0xcf, 0xe7, 0x43, 0x1e, 0x7b, 0x5c, 0xbc, 0xfb,
|
||||||
0x8b, 0xa7, 0x0d, 0x2f, 0x1c, 0x03, 0xa3, 0x76, 0x8a, 0x51, 0xd9, 0x34, 0x48, 0xe3, 0x72, 0x32,
|
0x25, 0xdd, 0x87, 0x8a, 0xc0, 0xf9, 0x20, 0x1c, 0x1d, 0x23, 0x48, 0x31, 0x22, 0x19, 0x1d, 0x69,
|
||||||
0xf9, 0x0a, 0x60, 0x9e, 0x1d, 0xbc, 0x70, 0x4a, 0x18, 0x7a, 0xff, 0x9d, 0xd7, 0x92, 0xc6, 0x16,
|
0x48, 0x46, 0x24, 0x5f, 0x80, 0xc5, 0xd3, 0xa9, 0x20, 0x1c, 0x03, 0xa3, 0x76, 0x8a, 0x51, 0xd9,
|
||||||
0xe8, 0xee, 0x13, 0x28, 0xe3, 0x17, 0xa8, 0xe9, 0x89, 0x13, 0x57, 0x4b, 0xa6, 0xa7, 0x5a, 0x6f,
|
0xc8, 0x48, 0xe3, 0x72, 0x32, 0xf9, 0x12, 0x60, 0x9e, 0x9d, 0x8e, 0x70, 0x4a, 0x18, 0x7a, 0xf7,
|
||||||
0xce, 0x11, 0x2b, 0x9d, 0x23, 0xee, 0x53, 0x30, 0x2f, 0x92, 0xef, 0x7c, 0xdf, 0x83, 0x71, 0x7f,
|
0xad, 0x67, 0x97, 0xc6, 0x16, 0xe8, 0xee, 0x63, 0x28, 0xe3, 0x17, 0xa8, 0x11, 0x8b, 0x63, 0x59,
|
||||||
0xd2, 0xa0, 0x8e, 0x78, 0x8f, 0xc9, 0xc9, 0x9c, 0xc7, 0xe4, 0xc9, 0xc6, 0x83, 0xf1, 0xf0, 0x4e,
|
0x4b, 0x46, 0xac, 0x5a, 0x6f, 0x0e, 0x1b, 0x2b, 0x1d, 0x36, 0xee, 0x13, 0x30, 0xcf, 0x93, 0xef,
|
||||||
0x7c, 0xca, 0xeb, 0x14, 0x1e, 0x8a, 0x4c, 0xa8, 0xfe, 0x2e, 0xa1, 0x46, 0x51, 0x68, 0x1b, 0x4a,
|
0x7c, 0xd7, 0x8d, 0x71, 0x7f, 0xd2, 0xa0, 0x8e, 0x78, 0x8f, 0xc9, 0xc9, 0x9c, 0xc7, 0xe4, 0xf1,
|
||||||
0x38, 0xf6, 0x4d, 0xd0, 0xbb, 0x2f, 0x92, 0x3a, 0xea, 0x77, 0x5f, 0x24, 0x75, 0x44, 0xd5, 0xa8,
|
0xc6, 0xab, 0xf2, 0xe0, 0x56, 0x7c, 0xca, 0xeb, 0x14, 0x5e, 0x93, 0x4c, 0xa8, 0xfe, 0x36, 0xa1,
|
||||||
0x57, 0x00, 0xed, 0xda, 0x86, 0xfb, 0x8b, 0xa6, 0x8a, 0x8f, 0x4d, 0x55, 0xed, 0x09, 0xf2, 0x7f,
|
0x46, 0x51, 0x68, 0x1b, 0x4a, 0xf8, 0x36, 0x98, 0xa0, 0x77, 0x9f, 0x27, 0x7d, 0xd4, 0xef, 0x3e,
|
||||||
0xa8, 0x08, 0xc9, 0xa3, 0xb1, 0x2f, 0x50, 0x97, 0x41, 0x4d, 0x65, 0xf6, 0x84, 0x4a, 0x7d, 0xbd,
|
0x4f, 0xfa, 0x88, 0xaa, 0xf7, 0x40, 0x01, 0xb4, 0x6b, 0x1b, 0xee, 0x2f, 0x9a, 0x6a, 0x3e, 0x36,
|
||||||
0x0c, 0x26, 0x59, 0x6a, 0xb5, 0x26, 0x1f, 0x40, 0x55, 0x48, 0x16, 0x4b, 0xc5, 0x4e, 0x86, 0x6a,
|
0x55, 0xbd, 0x27, 0xc8, 0xff, 0xa1, 0x22, 0x24, 0x8f, 0xc6, 0xbe, 0x40, 0x5d, 0x06, 0x35, 0x95,
|
||||||
0x05, 0xed, 0x9e, 0x20, 0xf7, 0xc1, 0xe4, 0xc1, 0x74, 0x8c, 0x97, 0xa2, 0x1c, 0x65, 0x1e, 0x4c,
|
0xd9, 0x13, 0xaa, 0xf4, 0xd5, 0x32, 0x98, 0x64, 0xa5, 0xd5, 0x9a, 0xbc, 0x07, 0x55, 0x21, 0x59,
|
||||||
0x7b, 0x82, 0xec, 0x42, 0x75, 0x16, 0x87, 0xcb, 0xc8, 0x0b, 0x66, 0x4e, 0xb9, 0x65, 0xb4, 0x2d,
|
0x2c, 0x15, 0x3b, 0x99, 0xbc, 0x15, 0xb4, 0x7b, 0x82, 0xdc, 0x05, 0x93, 0x07, 0xd3, 0x31, 0x1e,
|
||||||
0xba, 0xb6, 0x49, 0x13, 0xf4, 0xab, 0x15, 0x0e, 0xb6, 0x2a, 0xd5, 0xaf, 0x56, 0x6a, 0xf7, 0x98,
|
0x8a, 0x72, 0x94, 0x79, 0x30, 0xed, 0x09, 0x72, 0x1f, 0xaa, 0xb3, 0x38, 0x5c, 0x46, 0x5e, 0x30,
|
||||||
0x05, 0x33, 0xae, 0x36, 0xa9, 0x24, 0xbb, 0xa3, 0xdd, 0x13, 0xee, 0xef, 0x1a, 0x94, 0x8f, 0xe7,
|
0x73, 0xca, 0x2d, 0xa3, 0x6d, 0xd1, 0xb5, 0x4d, 0x9a, 0xa0, 0x5f, 0xae, 0x70, 0xfa, 0x55, 0xa9,
|
||||||
0xcb, 0xe0, 0x15, 0xd9, 0x83, 0x9a, 0xef, 0x05, 0x63, 0xd5, 0x4a, 0xb9, 0x66, 0xcb, 0xf7, 0x02,
|
0x7e, 0xb9, 0x52, 0xd9, 0x63, 0x16, 0xcc, 0xb8, 0x4a, 0x52, 0x49, 0xb2, 0xa3, 0xdd, 0x13, 0xee,
|
||||||
0x55, 0xc3, 0x3d, 0x81, 0x7e, 0x76, 0xb3, 0xf6, 0xa7, 0x6f, 0x8d, 0xcf, 0x6e, 0x52, 0x7f, 0x27,
|
0x6f, 0x1a, 0x94, 0x8f, 0xe6, 0xcb, 0xe0, 0x25, 0xd9, 0x85, 0x9a, 0xef, 0x05, 0x63, 0x75, 0xdf,
|
||||||
0xbd, 0x04, 0x03, 0x2f, 0x61, 0xb7, 0x78, 0x09, 0x98, 0xa0, 0xd3, 0x0d, 0x26, 0xe1, 0xd4, 0x0b,
|
0x72, 0xcd, 0x96, 0xef, 0x05, 0xaa, 0x87, 0x7b, 0x02, 0xfd, 0xec, 0x7a, 0xed, 0x4f, 0x1f, 0x24,
|
||||||
0x66, 0xf9, 0x0d, 0xa8, 0x37, 0x1c, 0xbf, 0xaa, 0x4e, 0x71, 0xed, 0x3e, 0x87, 0x6a, 0xc6, 0xba,
|
0x9f, 0x5d, 0xa7, 0xfe, 0x4e, 0x7a, 0x08, 0x06, 0x1e, 0xc2, 0xfd, 0xe2, 0x21, 0x60, 0x81, 0x4e,
|
||||||
0xd3, 0xbc, 0xdf, 0x0d, 0xd4, 0x13, 0xbb, 0xf1, 0xae, 0xea, 0xe4, 0x7f, 0x70, 0xef, 0xe4, 0x62,
|
0x37, 0x98, 0x84, 0x53, 0x2f, 0x98, 0xe5, 0x27, 0xa0, 0x1e, 0x7a, 0xfc, 0xaa, 0x3a, 0xc5, 0xb5,
|
||||||
0x70, 0x38, 0x1a, 0x17, 0x1e, 0x5b, 0xf7, 0x07, 0x68, 0x60, 0x46, 0x3e, 0xfd, 0xaf, 0xad, 0xb7,
|
0xfb, 0x0c, 0xaa, 0x19, 0xeb, 0xd6, 0xe5, 0xfd, 0x6e, 0xa0, 0xde, 0xe1, 0x8d, 0xc7, 0x57, 0x27,
|
||||||
0x0f, 0xe6, 0x44, 0xed, 0x90, 0x75, 0xde, 0xf6, 0x9d, 0xaf, 0xc9, 0x02, 0x12, 0xda, 0xd1, 0xce,
|
0xff, 0x83, 0x3b, 0xc7, 0xe7, 0x83, 0x83, 0xd1, 0xb8, 0xf0, 0x22, 0xbb, 0x3f, 0x40, 0x03, 0x2b,
|
||||||
0x9b, 0xdb, 0x3d, 0xed, 0xb7, 0xdb, 0x3d, 0xed, 0xcf, 0xdb, 0x3d, 0xed, 0x7b, 0x53, 0xb1, 0xa3,
|
0xf2, 0xe9, 0x7f, 0xbd, 0x7a, 0x7b, 0x60, 0x4e, 0x54, 0x86, 0xec, 0xe6, 0x6d, 0xdf, 0xfa, 0x9a,
|
||||||
0xab, 0x2b, 0x13, 0x7f, 0x71, 0x3e, 0xff, 0x3b, 0x00, 0x00, 0xff, 0xff, 0xfb, 0x5f, 0xf2, 0x4d,
|
0x2c, 0x20, 0xa1, 0x1d, 0xee, 0xbc, 0xbe, 0xd9, 0xd5, 0x7e, 0xbd, 0xd9, 0xd5, 0xfe, 0xb8, 0xd9,
|
||||||
0x13, 0x09, 0x00, 0x00,
|
0xd5, 0xbe, 0x37, 0x15, 0x3b, 0xba, 0xbc, 0x34, 0xf1, 0x3f, 0xe8, 0xb3, 0xbf, 0x03, 0x00, 0x00,
|
||||||
|
0xff, 0xff, 0x8b, 0x63, 0xd6, 0x2e, 0x38, 0x09, 0x00, 0x00,
|
||||||
}
|
}
|
||||||
|
|
||||||
func (m *MetricMetadata) Marshal() (dAtA []byte, err error) {
|
func (m *MetricMetadata) Marshal() (dAtA []byte, err error) {
|
||||||
|
@ -1385,6 +1394,18 @@ func (m *Histogram) MarshalToSizedBuffer(dAtA []byte) (int, error) {
|
||||||
i -= len(m.XXX_unrecognized)
|
i -= len(m.XXX_unrecognized)
|
||||||
copy(dAtA[i:], m.XXX_unrecognized)
|
copy(dAtA[i:], m.XXX_unrecognized)
|
||||||
}
|
}
|
||||||
|
if len(m.CustomValues) > 0 {
|
||||||
|
for iNdEx := len(m.CustomValues) - 1; iNdEx >= 0; iNdEx-- {
|
||||||
|
f1 := math.Float64bits(float64(m.CustomValues[iNdEx]))
|
||||||
|
i -= 8
|
||||||
|
encoding_binary.LittleEndian.PutUint64(dAtA[i:], uint64(f1))
|
||||||
|
}
|
||||||
|
i = encodeVarintTypes(dAtA, i, uint64(len(m.CustomValues)*8))
|
||||||
|
i--
|
||||||
|
dAtA[i] = 0x1
|
||||||
|
i--
|
||||||
|
dAtA[i] = 0x82
|
||||||
|
}
|
||||||
if m.Timestamp != 0 {
|
if m.Timestamp != 0 {
|
||||||
i = encodeVarintTypes(dAtA, i, uint64(m.Timestamp))
|
i = encodeVarintTypes(dAtA, i, uint64(m.Timestamp))
|
||||||
i--
|
i--
|
||||||
|
@ -1397,30 +1418,30 @@ func (m *Histogram) MarshalToSizedBuffer(dAtA []byte) (int, error) {
|
||||||
}
|
}
|
||||||
if len(m.PositiveCounts) > 0 {
|
if len(m.PositiveCounts) > 0 {
|
||||||
for iNdEx := len(m.PositiveCounts) - 1; iNdEx >= 0; iNdEx-- {
|
for iNdEx := len(m.PositiveCounts) - 1; iNdEx >= 0; iNdEx-- {
|
||||||
f1 := math.Float64bits(float64(m.PositiveCounts[iNdEx]))
|
f2 := math.Float64bits(float64(m.PositiveCounts[iNdEx]))
|
||||||
i -= 8
|
i -= 8
|
||||||
encoding_binary.LittleEndian.PutUint64(dAtA[i:], uint64(f1))
|
encoding_binary.LittleEndian.PutUint64(dAtA[i:], uint64(f2))
|
||||||
}
|
}
|
||||||
i = encodeVarintTypes(dAtA, i, uint64(len(m.PositiveCounts)*8))
|
i = encodeVarintTypes(dAtA, i, uint64(len(m.PositiveCounts)*8))
|
||||||
i--
|
i--
|
||||||
dAtA[i] = 0x6a
|
dAtA[i] = 0x6a
|
||||||
}
|
}
|
||||||
if len(m.PositiveDeltas) > 0 {
|
if len(m.PositiveDeltas) > 0 {
|
||||||
var j2 int
|
var j3 int
|
||||||
dAtA4 := make([]byte, len(m.PositiveDeltas)*10)
|
dAtA5 := make([]byte, len(m.PositiveDeltas)*10)
|
||||||
for _, num := range m.PositiveDeltas {
|
for _, num := range m.PositiveDeltas {
|
||||||
x3 := (uint64(num) << 1) ^ uint64((num >> 63))
|
x4 := (uint64(num) << 1) ^ uint64((num >> 63))
|
||||||
for x3 >= 1<<7 {
|
for x4 >= 1<<7 {
|
||||||
dAtA4[j2] = uint8(uint64(x3)&0x7f | 0x80)
|
dAtA5[j3] = uint8(uint64(x4)&0x7f | 0x80)
|
||||||
j2++
|
j3++
|
||||||
x3 >>= 7
|
x4 >>= 7
|
||||||
}
|
}
|
||||||
dAtA4[j2] = uint8(x3)
|
dAtA5[j3] = uint8(x4)
|
||||||
j2++
|
j3++
|
||||||
}
|
}
|
||||||
i -= j2
|
i -= j3
|
||||||
copy(dAtA[i:], dAtA4[:j2])
|
copy(dAtA[i:], dAtA5[:j3])
|
||||||
i = encodeVarintTypes(dAtA, i, uint64(j2))
|
i = encodeVarintTypes(dAtA, i, uint64(j3))
|
||||||
i--
|
i--
|
||||||
dAtA[i] = 0x62
|
dAtA[i] = 0x62
|
||||||
}
|
}
|
||||||
|
@ -1440,30 +1461,30 @@ func (m *Histogram) MarshalToSizedBuffer(dAtA []byte) (int, error) {
|
||||||
}
|
}
|
||||||
if len(m.NegativeCounts) > 0 {
|
if len(m.NegativeCounts) > 0 {
|
||||||
for iNdEx := len(m.NegativeCounts) - 1; iNdEx >= 0; iNdEx-- {
|
for iNdEx := len(m.NegativeCounts) - 1; iNdEx >= 0; iNdEx-- {
|
||||||
f5 := math.Float64bits(float64(m.NegativeCounts[iNdEx]))
|
f6 := math.Float64bits(float64(m.NegativeCounts[iNdEx]))
|
||||||
i -= 8
|
i -= 8
|
||||||
encoding_binary.LittleEndian.PutUint64(dAtA[i:], uint64(f5))
|
encoding_binary.LittleEndian.PutUint64(dAtA[i:], uint64(f6))
|
||||||
}
|
}
|
||||||
i = encodeVarintTypes(dAtA, i, uint64(len(m.NegativeCounts)*8))
|
i = encodeVarintTypes(dAtA, i, uint64(len(m.NegativeCounts)*8))
|
||||||
i--
|
i--
|
||||||
dAtA[i] = 0x52
|
dAtA[i] = 0x52
|
||||||
}
|
}
|
||||||
if len(m.NegativeDeltas) > 0 {
|
if len(m.NegativeDeltas) > 0 {
|
||||||
var j6 int
|
var j7 int
|
||||||
dAtA8 := make([]byte, len(m.NegativeDeltas)*10)
|
dAtA9 := make([]byte, len(m.NegativeDeltas)*10)
|
||||||
for _, num := range m.NegativeDeltas {
|
for _, num := range m.NegativeDeltas {
|
||||||
x7 := (uint64(num) << 1) ^ uint64((num >> 63))
|
x8 := (uint64(num) << 1) ^ uint64((num >> 63))
|
||||||
for x7 >= 1<<7 {
|
for x8 >= 1<<7 {
|
||||||
dAtA8[j6] = uint8(uint64(x7)&0x7f | 0x80)
|
dAtA9[j7] = uint8(uint64(x8)&0x7f | 0x80)
|
||||||
j6++
|
j7++
|
||||||
x7 >>= 7
|
x8 >>= 7
|
||||||
}
|
}
|
||||||
dAtA8[j6] = uint8(x7)
|
dAtA9[j7] = uint8(x8)
|
||||||
j6++
|
j7++
|
||||||
}
|
}
|
||||||
i -= j6
|
i -= j7
|
||||||
copy(dAtA[i:], dAtA8[:j6])
|
copy(dAtA[i:], dAtA9[:j7])
|
||||||
i = encodeVarintTypes(dAtA, i, uint64(j6))
|
i = encodeVarintTypes(dAtA, i, uint64(j7))
|
||||||
i--
|
i--
|
||||||
dAtA[i] = 0x4a
|
dAtA[i] = 0x4a
|
||||||
}
|
}
|
||||||
|
@ -2133,6 +2154,9 @@ func (m *Histogram) Size() (n int) {
|
||||||
if m.Timestamp != 0 {
|
if m.Timestamp != 0 {
|
||||||
n += 1 + sovTypes(uint64(m.Timestamp))
|
n += 1 + sovTypes(uint64(m.Timestamp))
|
||||||
}
|
}
|
||||||
|
if len(m.CustomValues) > 0 {
|
||||||
|
n += 2 + sovTypes(uint64(len(m.CustomValues)*8)) + len(m.CustomValues)*8
|
||||||
|
}
|
||||||
if m.XXX_unrecognized != nil {
|
if m.XXX_unrecognized != nil {
|
||||||
n += len(m.XXX_unrecognized)
|
n += len(m.XXX_unrecognized)
|
||||||
}
|
}
|
||||||
|
@ -3248,6 +3272,60 @@ func (m *Histogram) Unmarshal(dAtA []byte) error {
|
||||||
break
|
break
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
case 16:
|
||||||
|
if wireType == 1 {
|
||||||
|
var v uint64
|
||||||
|
if (iNdEx + 8) > l {
|
||||||
|
return io.ErrUnexpectedEOF
|
||||||
|
}
|
||||||
|
v = uint64(encoding_binary.LittleEndian.Uint64(dAtA[iNdEx:]))
|
||||||
|
iNdEx += 8
|
||||||
|
v2 := float64(math.Float64frombits(v))
|
||||||
|
m.CustomValues = append(m.CustomValues, v2)
|
||||||
|
} else if wireType == 2 {
|
||||||
|
var packedLen int
|
||||||
|
for shift := uint(0); ; shift += 7 {
|
||||||
|
if shift >= 64 {
|
||||||
|
return ErrIntOverflowTypes
|
||||||
|
}
|
||||||
|
if iNdEx >= l {
|
||||||
|
return io.ErrUnexpectedEOF
|
||||||
|
}
|
||||||
|
b := dAtA[iNdEx]
|
||||||
|
iNdEx++
|
||||||
|
packedLen |= int(b&0x7F) << shift
|
||||||
|
if b < 0x80 {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if packedLen < 0 {
|
||||||
|
return ErrInvalidLengthTypes
|
||||||
|
}
|
||||||
|
postIndex := iNdEx + packedLen
|
||||||
|
if postIndex < 0 {
|
||||||
|
return ErrInvalidLengthTypes
|
||||||
|
}
|
||||||
|
if postIndex > l {
|
||||||
|
return io.ErrUnexpectedEOF
|
||||||
|
}
|
||||||
|
var elementCount int
|
||||||
|
elementCount = packedLen / 8
|
||||||
|
if elementCount != 0 && len(m.CustomValues) == 0 {
|
||||||
|
m.CustomValues = make([]float64, 0, elementCount)
|
||||||
|
}
|
||||||
|
for iNdEx < postIndex {
|
||||||
|
var v uint64
|
||||||
|
if (iNdEx + 8) > l {
|
||||||
|
return io.ErrUnexpectedEOF
|
||||||
|
}
|
||||||
|
v = uint64(encoding_binary.LittleEndian.Uint64(dAtA[iNdEx:]))
|
||||||
|
iNdEx += 8
|
||||||
|
v2 := float64(math.Float64frombits(v))
|
||||||
|
m.CustomValues = append(m.CustomValues, v2)
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
return fmt.Errorf("proto: wrong wireType = %d for field CustomValues", wireType)
|
||||||
|
}
|
||||||
default:
|
default:
|
||||||
iNdEx = preIndex
|
iNdEx = preIndex
|
||||||
skippy, err := skipTypes(dAtA[iNdEx:])
|
skippy, err := skipTypes(dAtA[iNdEx:])
|
||||||
|
|
|
@ -107,6 +107,8 @@ message Histogram {
|
||||||
// timestamp is in ms format, see model/timestamp/timestamp.go for
|
// timestamp is in ms format, see model/timestamp/timestamp.go for
|
||||||
// conversion from time.Time to Prometheus timestamp.
|
// conversion from time.Time to Prometheus timestamp.
|
||||||
int64 timestamp = 15;
|
int64 timestamp = 15;
|
||||||
|
|
||||||
|
repeated double custom_values = 16;
|
||||||
}
|
}
|
||||||
|
|
||||||
// A BucketSpan defines a number of consecutive buckets with their
|
// A BucketSpan defines a number of consecutive buckets with their
|
||||||
|
|
|
@ -89,8 +89,8 @@ func exponentialToNativeHistogram(p pmetric.ExponentialHistogramDataPoint) (prom
|
||||||
scale = 8
|
scale = 8
|
||||||
}
|
}
|
||||||
|
|
||||||
pSpans, pDeltas := convertBucketsLayout(p.Positive(), scaleDown)
|
pSpans, pDeltas := convertBucketsLayout(p.Positive().BucketCounts().AsRaw(), p.Positive().Offset(), scaleDown, true)
|
||||||
nSpans, nDeltas := convertBucketsLayout(p.Negative(), scaleDown)
|
nSpans, nDeltas := convertBucketsLayout(p.Negative().BucketCounts().AsRaw(), p.Negative().Offset(), scaleDown, true)
|
||||||
|
|
||||||
h := prompb.Histogram{
|
h := prompb.Histogram{
|
||||||
// The counter reset detection must be compatible with Prometheus to
|
// The counter reset detection must be compatible with Prometheus to
|
||||||
|
@ -133,19 +133,25 @@ func exponentialToNativeHistogram(p pmetric.ExponentialHistogramDataPoint) (prom
|
||||||
return h, annots, nil
|
return h, annots, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// convertBucketsLayout translates OTel Exponential Histogram dense buckets
|
// convertBucketsLayout translates OTel Explicit or Exponential Histogram dense buckets
|
||||||
// representation to Prometheus Native Histogram sparse bucket representation.
|
// representation to Prometheus Native Histogram sparse bucket representation. This is used
|
||||||
|
// for translating Exponential Histograms into Native Histograms, and Explicit Histograms
|
||||||
|
// into Native Histograms with Custom Buckets.
|
||||||
//
|
//
|
||||||
// The translation logic is taken from the client_golang `histogram.go#makeBuckets`
|
// The translation logic is taken from the client_golang `histogram.go#makeBuckets`
|
||||||
// function, see `makeBuckets` https://github.com/prometheus/client_golang/blob/main/prometheus/histogram.go
|
// function, see `makeBuckets` https://github.com/prometheus/client_golang/blob/main/prometheus/histogram.go
|
||||||
// The bucket indexes conversion was adjusted, since OTel exp. histogram bucket
|
//
|
||||||
|
// scaleDown is the factor by which the buckets are scaled down. In other words 2^scaleDown buckets will be merged into one.
|
||||||
|
//
|
||||||
|
// When converting from OTel Exponential Histograms to Native Histograms, the
|
||||||
|
// bucket indexes conversion is adjusted, since OTel exp. histogram bucket
|
||||||
// index 0 corresponds to the range (1, base] while Prometheus bucket index 0
|
// index 0 corresponds to the range (1, base] while Prometheus bucket index 0
|
||||||
// to the range (base 1].
|
// to the range (base 1].
|
||||||
//
|
//
|
||||||
// scaleDown is the factor by which the buckets are scaled down. In other words 2^scaleDown buckets will be merged into one.
|
// When converting from OTel Explicit Histograms to Native Histograms with Custom Buckets,
|
||||||
func convertBucketsLayout(buckets pmetric.ExponentialHistogramDataPointBuckets, scaleDown int32) ([]prompb.BucketSpan, []int64) {
|
// the bucket indexes are not scaled, and the indices are not adjusted by 1.
|
||||||
bucketCounts := buckets.BucketCounts()
|
func convertBucketsLayout(bucketCounts []uint64, offset int32, scaleDown int32, adjustOffset bool) ([]prompb.BucketSpan, []int64) {
|
||||||
if bucketCounts.Len() == 0 {
|
if len(bucketCounts) == 0 {
|
||||||
return nil, nil
|
return nil, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -164,24 +170,28 @@ func convertBucketsLayout(buckets pmetric.ExponentialHistogramDataPointBuckets,
|
||||||
|
|
||||||
// Let the compiler figure out that this is const during this function by
|
// Let the compiler figure out that this is const during this function by
|
||||||
// moving it into a local variable.
|
// moving it into a local variable.
|
||||||
numBuckets := bucketCounts.Len()
|
numBuckets := len(bucketCounts)
|
||||||
|
|
||||||
|
bucketIdx := offset>>scaleDown + 1
|
||||||
|
|
||||||
|
initialOffset := offset
|
||||||
|
if adjustOffset {
|
||||||
|
initialOffset = initialOffset>>scaleDown + 1
|
||||||
|
}
|
||||||
|
|
||||||
// The offset is scaled and adjusted by 1 as described above.
|
|
||||||
bucketIdx := buckets.Offset()>>scaleDown + 1
|
|
||||||
spans = append(spans, prompb.BucketSpan{
|
spans = append(spans, prompb.BucketSpan{
|
||||||
Offset: bucketIdx,
|
Offset: initialOffset,
|
||||||
Length: 0,
|
Length: 0,
|
||||||
})
|
})
|
||||||
|
|
||||||
for i := 0; i < numBuckets; i++ {
|
for i := 0; i < numBuckets; i++ {
|
||||||
// The offset is scaled and adjusted by 1 as described above.
|
nextBucketIdx := (int32(i)+offset)>>scaleDown + 1
|
||||||
nextBucketIdx := (int32(i)+buckets.Offset())>>scaleDown + 1
|
|
||||||
if bucketIdx == nextBucketIdx { // We have not collected enough buckets to merge yet.
|
if bucketIdx == nextBucketIdx { // We have not collected enough buckets to merge yet.
|
||||||
count += int64(bucketCounts.At(i))
|
count += int64(bucketCounts[i])
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
if count == 0 {
|
if count == 0 {
|
||||||
count = int64(bucketCounts.At(i))
|
count = int64(bucketCounts[i])
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -202,11 +212,12 @@ func convertBucketsLayout(buckets pmetric.ExponentialHistogramDataPointBuckets,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
appendDelta(count)
|
appendDelta(count)
|
||||||
count = int64(bucketCounts.At(i))
|
count = int64(bucketCounts[i])
|
||||||
bucketIdx = nextBucketIdx
|
bucketIdx = nextBucketIdx
|
||||||
}
|
}
|
||||||
|
|
||||||
// Need to use the last item's index. The offset is scaled and adjusted by 1 as described above.
|
// Need to use the last item's index. The offset is scaled and adjusted by 1 as described above.
|
||||||
gap := (int32(numBuckets)+buckets.Offset()-1)>>scaleDown + 1 - bucketIdx
|
gap := (int32(numBuckets)+offset-1)>>scaleDown + 1 - bucketIdx
|
||||||
if gap > 2 {
|
if gap > 2 {
|
||||||
// We have to create a new span, because we have found a gap
|
// We have to create a new span, because we have found a gap
|
||||||
// of more than two buckets. The constant 2 is copied from the logic in
|
// of more than two buckets. The constant 2 is copied from the logic in
|
||||||
|
@ -226,3 +237,93 @@ func convertBucketsLayout(buckets pmetric.ExponentialHistogramDataPointBuckets,
|
||||||
|
|
||||||
return spans, deltas
|
return spans, deltas
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (c *PrometheusConverter) addCustomBucketsHistogramDataPoints(ctx context.Context, dataPoints pmetric.HistogramDataPointSlice,
|
||||||
|
resource pcommon.Resource, settings Settings, promName string) (annotations.Annotations, error) {
|
||||||
|
var annots annotations.Annotations
|
||||||
|
|
||||||
|
for x := 0; x < dataPoints.Len(); x++ {
|
||||||
|
if err := c.everyN.checkContext(ctx); err != nil {
|
||||||
|
return annots, err
|
||||||
|
}
|
||||||
|
|
||||||
|
pt := dataPoints.At(x)
|
||||||
|
|
||||||
|
histogram, ws, err := explicitHistogramToCustomBucketsHistogram(pt)
|
||||||
|
annots.Merge(ws)
|
||||||
|
if err != nil {
|
||||||
|
return annots, err
|
||||||
|
}
|
||||||
|
|
||||||
|
lbls := createAttributes(
|
||||||
|
resource,
|
||||||
|
pt.Attributes(),
|
||||||
|
settings,
|
||||||
|
nil,
|
||||||
|
true,
|
||||||
|
model.MetricNameLabel,
|
||||||
|
promName,
|
||||||
|
)
|
||||||
|
|
||||||
|
ts, _ := c.getOrCreateTimeSeries(lbls)
|
||||||
|
ts.Histograms = append(ts.Histograms, histogram)
|
||||||
|
|
||||||
|
exemplars, err := getPromExemplars[pmetric.HistogramDataPoint](ctx, &c.everyN, pt)
|
||||||
|
if err != nil {
|
||||||
|
return annots, err
|
||||||
|
}
|
||||||
|
ts.Exemplars = append(ts.Exemplars, exemplars...)
|
||||||
|
}
|
||||||
|
|
||||||
|
return annots, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func explicitHistogramToCustomBucketsHistogram(p pmetric.HistogramDataPoint) (prompb.Histogram, annotations.Annotations, error) {
|
||||||
|
var annots annotations.Annotations
|
||||||
|
|
||||||
|
buckets := p.BucketCounts().AsRaw()
|
||||||
|
offset := getBucketOffset(buckets)
|
||||||
|
bucketCounts := buckets[offset:]
|
||||||
|
positiveSpans, positiveDeltas := convertBucketsLayout(bucketCounts, int32(offset), 0, false)
|
||||||
|
|
||||||
|
h := prompb.Histogram{
|
||||||
|
// The counter reset detection must be compatible with Prometheus to
|
||||||
|
// safely set ResetHint to NO. This is not ensured currently.
|
||||||
|
// Sending a sample that triggers counter reset but with ResetHint==NO
|
||||||
|
// would lead to Prometheus panic as it does not double check the hint.
|
||||||
|
// Thus we're explicitly saying UNKNOWN here, which is always safe.
|
||||||
|
// TODO: using created time stamp should be accurate, but we
|
||||||
|
// need to know here if it was used for the detection.
|
||||||
|
// Ref: https://github.com/open-telemetry/opentelemetry-collector-contrib/pull/28663#issuecomment-1810577303
|
||||||
|
// Counter reset detection in Prometheus: https://github.com/prometheus/prometheus/blob/f997c72f294c0f18ca13fa06d51889af04135195/tsdb/chunkenc/histogram.go#L232
|
||||||
|
ResetHint: prompb.Histogram_UNKNOWN,
|
||||||
|
Schema: -53,
|
||||||
|
|
||||||
|
PositiveSpans: positiveSpans,
|
||||||
|
PositiveDeltas: positiveDeltas,
|
||||||
|
CustomValues: p.ExplicitBounds().AsRaw(),
|
||||||
|
|
||||||
|
Timestamp: convertTimeStamp(p.Timestamp()),
|
||||||
|
}
|
||||||
|
|
||||||
|
if p.Flags().NoRecordedValue() {
|
||||||
|
h.Sum = math.Float64frombits(value.StaleNaN)
|
||||||
|
h.Count = &prompb.Histogram_CountInt{CountInt: value.StaleNaN}
|
||||||
|
} else {
|
||||||
|
if p.HasSum() {
|
||||||
|
h.Sum = p.Sum()
|
||||||
|
}
|
||||||
|
h.Count = &prompb.Histogram_CountInt{CountInt: p.Count()}
|
||||||
|
if p.Count() == 0 && h.Sum != 0 {
|
||||||
|
annots.Add(fmt.Errorf("histogram data point has zero count, but non-zero sum: %f", h.Sum))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return h, annots, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func getBucketOffset(buckets []uint64) (offset int) {
|
||||||
|
for offset < len(buckets) && buckets[offset] == 0 {
|
||||||
|
offset++
|
||||||
|
}
|
||||||
|
return offset
|
||||||
|
}
|
||||||
|
|
|
@ -380,7 +380,7 @@ func TestConvertBucketsLayout(t *testing.T) {
|
||||||
for _, tt := range tests {
|
for _, tt := range tests {
|
||||||
for scaleDown, wantLayout := range tt.wantLayout {
|
for scaleDown, wantLayout := range tt.wantLayout {
|
||||||
t.Run(fmt.Sprintf("%s-scaleby-%d", tt.name, scaleDown), func(t *testing.T) {
|
t.Run(fmt.Sprintf("%s-scaleby-%d", tt.name, scaleDown), func(t *testing.T) {
|
||||||
gotSpans, gotDeltas := convertBucketsLayout(tt.buckets(), scaleDown)
|
gotSpans, gotDeltas := convertBucketsLayout(tt.buckets().BucketCounts().AsRaw(), tt.buckets().Offset(), scaleDown, true)
|
||||||
assert.Equal(t, wantLayout.wantSpans, gotSpans)
|
assert.Equal(t, wantLayout.wantSpans, gotSpans)
|
||||||
assert.Equal(t, wantLayout.wantDeltas, gotDeltas)
|
assert.Equal(t, wantLayout.wantDeltas, gotDeltas)
|
||||||
})
|
})
|
||||||
|
@ -410,7 +410,7 @@ func BenchmarkConvertBucketLayout(b *testing.B) {
|
||||||
}
|
}
|
||||||
b.Run(fmt.Sprintf("gap %d", scenario.gap), func(b *testing.B) {
|
b.Run(fmt.Sprintf("gap %d", scenario.gap), func(b *testing.B) {
|
||||||
for i := 0; i < b.N; i++ {
|
for i := 0; i < b.N; i++ {
|
||||||
convertBucketsLayout(buckets, 0)
|
convertBucketsLayout(buckets.BucketCounts().AsRaw(), buckets.Offset(), 0, true)
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
@ -582,6 +582,14 @@ func TestExponentialToNativeHistogram(t *testing.T) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func validateHistogramCount(t *testing.T, h pmetric.HistogramDataPoint) {
|
||||||
|
actualCount := uint64(0)
|
||||||
|
for _, bucket := range h.BucketCounts().AsRaw() {
|
||||||
|
actualCount += bucket
|
||||||
|
}
|
||||||
|
require.Equal(t, h.Count(), actualCount, "histogram count mismatch")
|
||||||
|
}
|
||||||
|
|
||||||
func validateExponentialHistogramCount(t *testing.T, h pmetric.ExponentialHistogramDataPoint) {
|
func validateExponentialHistogramCount(t *testing.T, h pmetric.ExponentialHistogramDataPoint) {
|
||||||
actualCount := uint64(0)
|
actualCount := uint64(0)
|
||||||
for _, bucket := range h.Positive().BucketCounts().AsRaw() {
|
for _, bucket := range h.Positive().BucketCounts().AsRaw() {
|
||||||
|
@ -772,3 +780,373 @@ func TestPrometheusConverter_addExponentialHistogramDataPoints(t *testing.T) {
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func TestConvertExplicitHistogramBucketsToNHCBLayout(t *testing.T) {
|
||||||
|
tests := []struct {
|
||||||
|
name string
|
||||||
|
buckets []uint64
|
||||||
|
wantLayout expectedBucketLayout
|
||||||
|
}{
|
||||||
|
{
|
||||||
|
name: "zero offset",
|
||||||
|
buckets: []uint64{4, 3, 2, 1},
|
||||||
|
wantLayout: expectedBucketLayout{
|
||||||
|
wantSpans: []prompb.BucketSpan{
|
||||||
|
{
|
||||||
|
Offset: 0,
|
||||||
|
Length: 4,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
wantDeltas: []int64{4, -1, -1, -1},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "leading empty buckets",
|
||||||
|
buckets: []uint64{0, 0, 1, 1, 2, 3},
|
||||||
|
wantLayout: expectedBucketLayout{
|
||||||
|
wantSpans: []prompb.BucketSpan{
|
||||||
|
{
|
||||||
|
Offset: 2,
|
||||||
|
Length: 4,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
wantDeltas: []int64{1, 0, 1, 1},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "trailing empty buckets",
|
||||||
|
buckets: []uint64{0, 0, 1, 1, 2, 3, 0, 0}, //TODO: add tests for 3 trailing buckets
|
||||||
|
wantLayout: expectedBucketLayout{
|
||||||
|
wantSpans: []prompb.BucketSpan{
|
||||||
|
{
|
||||||
|
Offset: 2,
|
||||||
|
Length: 6,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
wantDeltas: []int64{1, 0, 1, 1, -3, 0},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "bucket gap of 2",
|
||||||
|
buckets: []uint64{1, 2, 0, 0, 2},
|
||||||
|
wantLayout: expectedBucketLayout{
|
||||||
|
wantSpans: []prompb.BucketSpan{
|
||||||
|
{
|
||||||
|
Offset: 0,
|
||||||
|
Length: 5,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
wantDeltas: []int64{1, 1, -2, 0, 2},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "bucket gap > 2",
|
||||||
|
buckets: []uint64{1, 2, 0, 0, 0, 2, 4, 4},
|
||||||
|
wantLayout: expectedBucketLayout{
|
||||||
|
wantSpans: []prompb.BucketSpan{
|
||||||
|
{
|
||||||
|
Offset: 0,
|
||||||
|
Length: 2,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Offset: 3,
|
||||||
|
Length: 3,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
wantDeltas: []int64{1, 1, 0, 2, 0},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "multiple bucket gaps",
|
||||||
|
buckets: []uint64{0, 0, 1, 2, 0, 0, 0, 2, 4, 4, 0, 0},
|
||||||
|
wantLayout: expectedBucketLayout{
|
||||||
|
wantSpans: []prompb.BucketSpan{
|
||||||
|
{
|
||||||
|
Offset: 2,
|
||||||
|
Length: 2,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Offset: 3,
|
||||||
|
Length: 5,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
wantDeltas: []int64{1, 1, 0, 2, 0, -4, 0},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
for _, tt := range tests {
|
||||||
|
t.Run(tt.name, func(t *testing.T) {
|
||||||
|
|
||||||
|
buckets := tt.buckets
|
||||||
|
offset := getBucketOffset(buckets)
|
||||||
|
bucketCounts := buckets[offset:]
|
||||||
|
|
||||||
|
gotSpans, gotDeltas := convertBucketsLayout(bucketCounts, int32(offset), 0, false)
|
||||||
|
assert.Equal(t, tt.wantLayout.wantSpans, gotSpans)
|
||||||
|
assert.Equal(t, tt.wantLayout.wantDeltas, gotDeltas)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func BenchmarkConvertHistogramBucketsToNHCBLayout(b *testing.B) {
|
||||||
|
scenarios := []struct {
|
||||||
|
gap int
|
||||||
|
}{
|
||||||
|
{gap: 0},
|
||||||
|
{gap: 1},
|
||||||
|
{gap: 2},
|
||||||
|
{gap: 3},
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, scenario := range scenarios {
|
||||||
|
var buckets []uint64
|
||||||
|
for i := 0; i < 1000; i++ {
|
||||||
|
if i%(scenario.gap+1) == 0 {
|
||||||
|
buckets = append(buckets, uint64(10))
|
||||||
|
} else {
|
||||||
|
buckets = append(buckets, uint64(0))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
b.Run(fmt.Sprintf("gap %d", scenario.gap), func(b *testing.B) {
|
||||||
|
for i := 0; i < b.N; i++ {
|
||||||
|
offset := getBucketOffset(buckets)
|
||||||
|
convertBucketsLayout(buckets, int32(offset), 0, false)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestHistogramToCustomBucketsHistogram(t *testing.T) {
|
||||||
|
tests := []struct {
|
||||||
|
name string
|
||||||
|
hist func() pmetric.HistogramDataPoint
|
||||||
|
wantNativeHist func() prompb.Histogram
|
||||||
|
wantErrMessage string
|
||||||
|
}{
|
||||||
|
{
|
||||||
|
name: "convert hist to custom buckets hist",
|
||||||
|
hist: func() pmetric.HistogramDataPoint {
|
||||||
|
pt := pmetric.NewHistogramDataPoint()
|
||||||
|
pt.SetStartTimestamp(pcommon.NewTimestampFromTime(time.UnixMilli(100)))
|
||||||
|
pt.SetTimestamp(pcommon.NewTimestampFromTime(time.UnixMilli(500)))
|
||||||
|
pt.SetCount(2)
|
||||||
|
pt.SetSum(10.1)
|
||||||
|
|
||||||
|
pt.BucketCounts().FromRaw([]uint64{1, 1})
|
||||||
|
pt.ExplicitBounds().FromRaw([]float64{0, 1})
|
||||||
|
return pt
|
||||||
|
},
|
||||||
|
wantNativeHist: func() prompb.Histogram {
|
||||||
|
return prompb.Histogram{
|
||||||
|
Count: &prompb.Histogram_CountInt{CountInt: 2},
|
||||||
|
Sum: 10.1,
|
||||||
|
Schema: -53,
|
||||||
|
PositiveSpans: []prompb.BucketSpan{{Offset: 0, Length: 2}},
|
||||||
|
PositiveDeltas: []int64{1, 0},
|
||||||
|
CustomValues: []float64{0, 1},
|
||||||
|
Timestamp: 500,
|
||||||
|
}
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "convert hist to custom buckets hist with no sum",
|
||||||
|
hist: func() pmetric.HistogramDataPoint {
|
||||||
|
pt := pmetric.NewHistogramDataPoint()
|
||||||
|
pt.SetStartTimestamp(pcommon.NewTimestampFromTime(time.UnixMilli(100)))
|
||||||
|
pt.SetTimestamp(pcommon.NewTimestampFromTime(time.UnixMilli(500)))
|
||||||
|
pt.SetCount(4)
|
||||||
|
|
||||||
|
pt.BucketCounts().FromRaw([]uint64{2, 2})
|
||||||
|
pt.ExplicitBounds().FromRaw([]float64{0, 1})
|
||||||
|
return pt
|
||||||
|
},
|
||||||
|
wantNativeHist: func() prompb.Histogram {
|
||||||
|
return prompb.Histogram{
|
||||||
|
Count: &prompb.Histogram_CountInt{CountInt: 4},
|
||||||
|
Schema: -53,
|
||||||
|
PositiveSpans: []prompb.BucketSpan{{Offset: 0, Length: 2}},
|
||||||
|
PositiveDeltas: []int64{2, 0},
|
||||||
|
CustomValues: []float64{0, 1},
|
||||||
|
Timestamp: 500,
|
||||||
|
}
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
for _, tt := range tests {
|
||||||
|
t.Run(tt.name, func(t *testing.T) {
|
||||||
|
validateHistogramCount(t, tt.hist())
|
||||||
|
got, annots, err := explicitHistogramToCustomBucketsHistogram(tt.hist())
|
||||||
|
if tt.wantErrMessage != "" {
|
||||||
|
assert.ErrorContains(t, err, tt.wantErrMessage)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
require.NoError(t, err)
|
||||||
|
require.Empty(t, annots)
|
||||||
|
assert.Equal(t, tt.wantNativeHist(), got)
|
||||||
|
validateNativeHistogramCount(t, got)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestPrometheusConverter_addCustomBucketsHistogramDataPoints(t *testing.T) {
|
||||||
|
tests := []struct {
|
||||||
|
name string
|
||||||
|
metric func() pmetric.Metric
|
||||||
|
wantSeries func() map[uint64]*prompb.TimeSeries
|
||||||
|
}{
|
||||||
|
{
|
||||||
|
name: "histogram data points with same labels",
|
||||||
|
metric: func() pmetric.Metric {
|
||||||
|
metric := pmetric.NewMetric()
|
||||||
|
metric.SetName("test_hist_to_nhcb")
|
||||||
|
metric.SetEmptyHistogram().SetAggregationTemporality(pmetric.AggregationTemporalityCumulative)
|
||||||
|
|
||||||
|
pt := metric.Histogram().DataPoints().AppendEmpty()
|
||||||
|
pt.SetCount(3)
|
||||||
|
pt.SetSum(3)
|
||||||
|
pt.BucketCounts().FromRaw([]uint64{2, 0, 1})
|
||||||
|
pt.ExplicitBounds().FromRaw([]float64{5, 10})
|
||||||
|
pt.Exemplars().AppendEmpty().SetDoubleValue(1)
|
||||||
|
pt.Attributes().PutStr("attr", "test_attr")
|
||||||
|
|
||||||
|
pt = metric.Histogram().DataPoints().AppendEmpty()
|
||||||
|
pt.SetCount(11)
|
||||||
|
pt.SetSum(5)
|
||||||
|
pt.BucketCounts().FromRaw([]uint64{3, 8, 0})
|
||||||
|
pt.ExplicitBounds().FromRaw([]float64{0, 1})
|
||||||
|
pt.Exemplars().AppendEmpty().SetDoubleValue(2)
|
||||||
|
pt.Attributes().PutStr("attr", "test_attr")
|
||||||
|
|
||||||
|
return metric
|
||||||
|
},
|
||||||
|
wantSeries: func() map[uint64]*prompb.TimeSeries {
|
||||||
|
labels := []prompb.Label{
|
||||||
|
{Name: model.MetricNameLabel, Value: "test_hist_to_nhcb"},
|
||||||
|
{Name: "attr", Value: "test_attr"},
|
||||||
|
}
|
||||||
|
return map[uint64]*prompb.TimeSeries{
|
||||||
|
timeSeriesSignature(labels): {
|
||||||
|
Labels: labels,
|
||||||
|
Histograms: []prompb.Histogram{
|
||||||
|
{
|
||||||
|
Count: &prompb.Histogram_CountInt{CountInt: 3},
|
||||||
|
Sum: 3,
|
||||||
|
Schema: -53,
|
||||||
|
PositiveSpans: []prompb.BucketSpan{{Offset: 0, Length: 3}},
|
||||||
|
PositiveDeltas: []int64{2, -2, 1},
|
||||||
|
CustomValues: []float64{5, 10},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Count: &prompb.Histogram_CountInt{CountInt: 11},
|
||||||
|
Sum: 5,
|
||||||
|
Schema: -53,
|
||||||
|
PositiveSpans: []prompb.BucketSpan{{Offset: 0, Length: 3}},
|
||||||
|
PositiveDeltas: []int64{3, 5, -8},
|
||||||
|
CustomValues: []float64{0, 1},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
Exemplars: []prompb.Exemplar{
|
||||||
|
{Value: 1},
|
||||||
|
{Value: 2},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "histogram data points with different labels",
|
||||||
|
metric: func() pmetric.Metric {
|
||||||
|
metric := pmetric.NewMetric()
|
||||||
|
metric.SetName("test_hist_to_nhcb")
|
||||||
|
metric.SetEmptyHistogram().SetAggregationTemporality(pmetric.AggregationTemporalityCumulative)
|
||||||
|
|
||||||
|
pt := metric.Histogram().DataPoints().AppendEmpty()
|
||||||
|
pt.SetCount(6)
|
||||||
|
pt.SetSum(3)
|
||||||
|
pt.BucketCounts().FromRaw([]uint64{4, 2})
|
||||||
|
pt.ExplicitBounds().FromRaw([]float64{0, 1})
|
||||||
|
pt.Exemplars().AppendEmpty().SetDoubleValue(1)
|
||||||
|
pt.Attributes().PutStr("attr", "test_attr")
|
||||||
|
|
||||||
|
pt = metric.Histogram().DataPoints().AppendEmpty()
|
||||||
|
pt.SetCount(11)
|
||||||
|
pt.SetSum(5)
|
||||||
|
pt.BucketCounts().FromRaw([]uint64{3, 8})
|
||||||
|
pt.ExplicitBounds().FromRaw([]float64{0, 1})
|
||||||
|
pt.Exemplars().AppendEmpty().SetDoubleValue(2)
|
||||||
|
pt.Attributes().PutStr("attr", "test_attr_two")
|
||||||
|
|
||||||
|
return metric
|
||||||
|
},
|
||||||
|
wantSeries: func() map[uint64]*prompb.TimeSeries {
|
||||||
|
labels := []prompb.Label{
|
||||||
|
{Name: model.MetricNameLabel, Value: "test_hist_to_nhcb"},
|
||||||
|
{Name: "attr", Value: "test_attr"},
|
||||||
|
}
|
||||||
|
labelsAnother := []prompb.Label{
|
||||||
|
{Name: model.MetricNameLabel, Value: "test_hist_to_nhcb"},
|
||||||
|
{Name: "attr", Value: "test_attr_two"},
|
||||||
|
}
|
||||||
|
|
||||||
|
return map[uint64]*prompb.TimeSeries{
|
||||||
|
timeSeriesSignature(labels): {
|
||||||
|
Labels: labels,
|
||||||
|
Histograms: []prompb.Histogram{
|
||||||
|
{
|
||||||
|
Count: &prompb.Histogram_CountInt{CountInt: 6},
|
||||||
|
Sum: 3,
|
||||||
|
Schema: -53,
|
||||||
|
PositiveSpans: []prompb.BucketSpan{{Offset: 0, Length: 2}},
|
||||||
|
PositiveDeltas: []int64{4, -2},
|
||||||
|
CustomValues: []float64{0, 1},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
Exemplars: []prompb.Exemplar{
|
||||||
|
{Value: 1},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
timeSeriesSignature(labelsAnother): {
|
||||||
|
Labels: labelsAnother,
|
||||||
|
Histograms: []prompb.Histogram{
|
||||||
|
{
|
||||||
|
Count: &prompb.Histogram_CountInt{CountInt: 11},
|
||||||
|
Sum: 5,
|
||||||
|
Schema: -53,
|
||||||
|
PositiveSpans: []prompb.BucketSpan{{Offset: 0, Length: 2}},
|
||||||
|
PositiveDeltas: []int64{3, 5},
|
||||||
|
CustomValues: []float64{0, 1},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
Exemplars: []prompb.Exemplar{
|
||||||
|
{Value: 2},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
for _, tt := range tests {
|
||||||
|
t.Run(tt.name, func(t *testing.T) {
|
||||||
|
metric := tt.metric()
|
||||||
|
|
||||||
|
converter := NewPrometheusConverter()
|
||||||
|
annots, err := converter.addCustomBucketsHistogramDataPoints(
|
||||||
|
context.Background(),
|
||||||
|
metric.Histogram().DataPoints(),
|
||||||
|
pcommon.NewResource(),
|
||||||
|
Settings{
|
||||||
|
ExportCreatedMetric: true,
|
||||||
|
ConvertHistogramsToNHCB: true,
|
||||||
|
},
|
||||||
|
prometheustranslator.BuildCompliantMetricName(metric, "", true),
|
||||||
|
)
|
||||||
|
|
||||||
|
require.NoError(t, err)
|
||||||
|
require.Empty(t, annots)
|
||||||
|
|
||||||
|
assert.Equal(t, tt.wantSeries(), converter.unique)
|
||||||
|
assert.Empty(t, converter.conflicts)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
|
@ -40,6 +40,7 @@ type Settings struct {
|
||||||
AllowUTF8 bool
|
AllowUTF8 bool
|
||||||
PromoteResourceAttributes []string
|
PromoteResourceAttributes []string
|
||||||
KeepIdentifyingResourceAttributes bool
|
KeepIdentifyingResourceAttributes bool
|
||||||
|
ConvertHistogramsToNHCB bool
|
||||||
}
|
}
|
||||||
|
|
||||||
// PrometheusConverter converts from OTel write format to Prometheus remote write format.
|
// PrometheusConverter converts from OTel write format to Prometheus remote write format.
|
||||||
|
@ -142,12 +143,23 @@ func (c *PrometheusConverter) FromMetrics(ctx context.Context, md pmetric.Metric
|
||||||
errs = multierr.Append(errs, fmt.Errorf("empty data points. %s is dropped", metric.Name()))
|
errs = multierr.Append(errs, fmt.Errorf("empty data points. %s is dropped", metric.Name()))
|
||||||
break
|
break
|
||||||
}
|
}
|
||||||
|
if settings.ConvertHistogramsToNHCB {
|
||||||
|
ws, err := c.addCustomBucketsHistogramDataPoints(ctx, dataPoints, resource, settings, promName)
|
||||||
|
annots.Merge(ws)
|
||||||
|
if err != nil {
|
||||||
|
errs = multierr.Append(errs, err)
|
||||||
|
if errors.Is(err, context.Canceled) || errors.Is(err, context.DeadlineExceeded) {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
if err := c.addHistogramDataPoints(ctx, dataPoints, resource, settings, promName); err != nil {
|
if err := c.addHistogramDataPoints(ctx, dataPoints, resource, settings, promName); err != nil {
|
||||||
errs = multierr.Append(errs, err)
|
errs = multierr.Append(errs, err)
|
||||||
if errors.Is(err, context.Canceled) || errors.Is(err, context.DeadlineExceeded) {
|
if errors.Is(err, context.Canceled) || errors.Is(err, context.DeadlineExceeded) {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
}
|
||||||
case pmetric.MetricTypeExponentialHistogram:
|
case pmetric.MetricTypeExponentialHistogram:
|
||||||
dataPoints := metric.ExponentialHistogram().DataPoints()
|
dataPoints := metric.ExponentialHistogram().DataPoints()
|
||||||
if dataPoints.Len() == 0 {
|
if dataPoints.Len() == 0 {
|
||||||
|
|
|
@ -19,18 +19,17 @@ package prometheusremotewrite
|
||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
"fmt"
|
"fmt"
|
||||||
|
"github.com/google/go-cmp/cmp"
|
||||||
|
"github.com/prometheus/prometheus/model/labels"
|
||||||
|
"github.com/prometheus/prometheus/prompb"
|
||||||
|
prometheustranslator "github.com/prometheus/prometheus/storage/remote/otlptranslator/prometheus"
|
||||||
"testing"
|
"testing"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
"github.com/google/go-cmp/cmp"
|
|
||||||
"github.com/stretchr/testify/require"
|
"github.com/stretchr/testify/require"
|
||||||
"go.opentelemetry.io/collector/pdata/pcommon"
|
"go.opentelemetry.io/collector/pdata/pcommon"
|
||||||
"go.opentelemetry.io/collector/pdata/pmetric"
|
"go.opentelemetry.io/collector/pdata/pmetric"
|
||||||
"go.opentelemetry.io/collector/pdata/pmetric/pmetricotlp"
|
"go.opentelemetry.io/collector/pdata/pmetric/pmetricotlp"
|
||||||
|
|
||||||
"github.com/prometheus/prometheus/model/labels"
|
|
||||||
"github.com/prometheus/prometheus/prompb"
|
|
||||||
prometheustranslator "github.com/prometheus/prometheus/storage/remote/otlptranslator/prometheus"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
func TestFromMetrics(t *testing.T) {
|
func TestFromMetrics(t *testing.T) {
|
||||||
|
@ -95,6 +94,51 @@ func TestFromMetrics(t *testing.T) {
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
for _, convertHistogramsToNHCB := range []bool{false, true} {
|
||||||
|
t.Run(fmt.Sprintf("successful/convertHistogramsToNHCB=%v", convertHistogramsToNHCB), func(t *testing.T) {
|
||||||
|
request := pmetricotlp.NewExportRequest()
|
||||||
|
rm := request.Metrics().ResourceMetrics().AppendEmpty()
|
||||||
|
generateAttributes(rm.Resource().Attributes(), "resource", 10)
|
||||||
|
|
||||||
|
metrics := rm.ScopeMetrics().AppendEmpty().Metrics()
|
||||||
|
ts := pcommon.NewTimestampFromTime(time.Now())
|
||||||
|
|
||||||
|
m := metrics.AppendEmpty()
|
||||||
|
m.SetEmptyHistogram()
|
||||||
|
m.SetName("histogram-1")
|
||||||
|
m.Histogram().SetAggregationTemporality(pmetric.AggregationTemporalityCumulative)
|
||||||
|
h := m.Histogram().DataPoints().AppendEmpty()
|
||||||
|
h.SetTimestamp(ts)
|
||||||
|
|
||||||
|
h.SetCount(15)
|
||||||
|
h.SetSum(155)
|
||||||
|
|
||||||
|
generateAttributes(h.Attributes(), "series", 1)
|
||||||
|
|
||||||
|
converter := NewPrometheusConverter()
|
||||||
|
annots, err := converter.FromMetrics(
|
||||||
|
context.Background(),
|
||||||
|
request.Metrics(),
|
||||||
|
Settings{ConvertHistogramsToNHCB: convertHistogramsToNHCB},
|
||||||
|
)
|
||||||
|
require.NoError(t, err)
|
||||||
|
require.Empty(t, annots)
|
||||||
|
|
||||||
|
series := converter.TimeSeries()
|
||||||
|
|
||||||
|
if convertHistogramsToNHCB {
|
||||||
|
require.Len(t, series[0].Histograms, 1)
|
||||||
|
require.Len(t, series[0].Samples, 0)
|
||||||
|
} else {
|
||||||
|
require.Len(t, series, 3)
|
||||||
|
for i := range series {
|
||||||
|
require.Len(t, series[i].Samples, 1)
|
||||||
|
require.Nil(t, series[i].Histograms)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
t.Run("context cancellation", func(t *testing.T) {
|
t.Run("context cancellation", func(t *testing.T) {
|
||||||
converter := NewPrometheusConverter()
|
converter := NewPrometheusConverter()
|
||||||
ctx, cancel := context.WithCancel(context.Background())
|
ctx, cancel := context.WithCancel(context.Background())
|
||||||
|
@ -151,6 +195,43 @@ func TestFromMetrics(t *testing.T) {
|
||||||
"exponential histogram data point has zero count, but non-zero sum: 155.000000",
|
"exponential histogram data point has zero count, but non-zero sum: 155.000000",
|
||||||
}, ws)
|
}, ws)
|
||||||
})
|
})
|
||||||
|
|
||||||
|
t.Run("explicit histogram to NHCB warnings for zero count and non-zero sum", func(t *testing.T) {
|
||||||
|
request := pmetricotlp.NewExportRequest()
|
||||||
|
rm := request.Metrics().ResourceMetrics().AppendEmpty()
|
||||||
|
generateAttributes(rm.Resource().Attributes(), "resource", 10)
|
||||||
|
|
||||||
|
metrics := rm.ScopeMetrics().AppendEmpty().Metrics()
|
||||||
|
ts := pcommon.NewTimestampFromTime(time.Now())
|
||||||
|
|
||||||
|
for i := 1; i <= 10; i++ {
|
||||||
|
m := metrics.AppendEmpty()
|
||||||
|
m.SetEmptyHistogram()
|
||||||
|
m.SetName(fmt.Sprintf("histogram-%d", i))
|
||||||
|
m.Histogram().SetAggregationTemporality(pmetric.AggregationTemporalityCumulative)
|
||||||
|
h := m.Histogram().DataPoints().AppendEmpty()
|
||||||
|
h.SetTimestamp(ts)
|
||||||
|
|
||||||
|
h.SetCount(0)
|
||||||
|
h.SetSum(155)
|
||||||
|
|
||||||
|
generateAttributes(h.Attributes(), "series", 10)
|
||||||
|
}
|
||||||
|
|
||||||
|
converter := NewPrometheusConverter()
|
||||||
|
annots, err := converter.FromMetrics(
|
||||||
|
context.Background(),
|
||||||
|
request.Metrics(),
|
||||||
|
Settings{ConvertHistogramsToNHCB: true},
|
||||||
|
)
|
||||||
|
require.NoError(t, err)
|
||||||
|
require.NotEmpty(t, annots)
|
||||||
|
ws, infos := annots.AsStrings("", 0, 0)
|
||||||
|
require.Empty(t, infos)
|
||||||
|
require.Equal(t, []string{
|
||||||
|
"histogram data point has zero count, but non-zero sum: 155.000000",
|
||||||
|
}, ws)
|
||||||
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
func BenchmarkPrometheusConverter_FromMetrics(b *testing.B) {
|
func BenchmarkPrometheusConverter_FromMetrics(b *testing.B) {
|
||||||
|
|
|
@ -581,6 +581,7 @@ func (rw *rwExporter) ConsumeMetrics(ctx context.Context, md pmetric.Metrics) er
|
||||||
AllowUTF8: otlpCfg.TranslationStrategy == config.NoUTF8EscapingWithSuffixes,
|
AllowUTF8: otlpCfg.TranslationStrategy == config.NoUTF8EscapingWithSuffixes,
|
||||||
PromoteResourceAttributes: otlpCfg.PromoteResourceAttributes,
|
PromoteResourceAttributes: otlpCfg.PromoteResourceAttributes,
|
||||||
KeepIdentifyingResourceAttributes: otlpCfg.KeepIdentifyingResourceAttributes,
|
KeepIdentifyingResourceAttributes: otlpCfg.KeepIdentifyingResourceAttributes,
|
||||||
|
ConvertHistogramsToNHCB: otlpCfg.ConvertHistogramsToNHCB,
|
||||||
})
|
})
|
||||||
if err != nil {
|
if err != nil {
|
||||||
rw.logger.Warn("Error translating OTLP metrics to Prometheus write request", "err", err)
|
rw.logger.Warn("Error translating OTLP metrics to Prometheus write request", "err", err)
|
||||||
|
|
Loading…
Reference in a new issue