util/httputil: reduce heap allocations in newCompressedResponseWriter (#16001)

* util/httputil: Benchmark newCompressedResponseWriter

This benchmark illustrates that newCompressedResponseWriter incurs a
prohibitive amount of heap allocations when handling a request containing a
malicious Accept-Encoding header.¬

Signed-off-by: jub0bs <jcretel-infosec+github@protonmail.com>

* util/httputil: Improve newCompressedResponseWriter

This change dramatically reduces the heap allocations (in bytes)
incurred when handling a request containing a malicious Accept-Encoding header.

Below are some benchmark results; for conciseness, I've omitted the name of the
benchmark function (BenchmarkNewCompressionHandler_MaliciousAcceptEncoding):

```
goos: darwin
goarch: amd64
pkg: github.com/prometheus/prometheus/util/httputil
cpu: Intel(R) Core(TM) i7-6700HQ CPU @ 2.60GHz
│     old     │                 new                 │
│   sec/op    │   sec/op     vs base                │
  18.60m ± 2%   13.54m ± 3%  -27.17% (p=0.000 n=10)

│       old        │                 new                 │
│       B/op       │    B/op     vs base                 │
  16785442.50 ± 0%   32.00 ± 0%  -100.00% (p=0.000 n=10)

│    old     │                new                 │
│ allocs/op  │ allocs/op   vs base                │
  2.000 ± 0%   1.000 ± 0%  -50.00% (p=0.000 n=10)
```

Signed-off-by: jub0bs <jcretel-infosec+github@protonmail.com>

---------

Signed-off-by: jub0bs <jcretel-infosec+github@protonmail.com>
This commit is contained in:
jub0bs 2025-02-11 14:14:55 +01:00 committed by GitHub
parent b74cebf6bf
commit 329ec6831a
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
2 changed files with 22 additions and 2 deletions

View file

@ -56,8 +56,13 @@ func (c *compressedResponseWriter) Close() {
// Constructs a new compressedResponseWriter based on client request headers.
func newCompressedResponseWriter(writer http.ResponseWriter, req *http.Request) *compressedResponseWriter {
encodings := strings.Split(req.Header.Get(acceptEncodingHeader), ",")
for _, encoding := range encodings {
raw := req.Header.Get(acceptEncodingHeader)
var (
encoding string
commaFound bool
)
for {
encoding, raw, commaFound = strings.Cut(raw, ",")
switch strings.TrimSpace(encoding) {
case gzipEncoding:
writer.Header().Set(contentEncodingHeader, gzipEncoding)
@ -72,6 +77,9 @@ func newCompressedResponseWriter(writer http.ResponseWriter, req *http.Request)
writer: zlib.NewWriter(writer),
}
}
if !commaFound {
break
}
}
return &compressedResponseWriter{
ResponseWriter: writer,

View file

@ -18,6 +18,7 @@ import (
"io"
"net/http"
"net/http/httptest"
"strings"
"testing"
"github.com/klauspost/compress/gzip"
@ -72,6 +73,17 @@ func TestCompressionHandler_PlainText(t *testing.T) {
require.Equal(t, expected, actual, "expected response with content")
}
func BenchmarkNewCompressionHandler_MaliciousAcceptEncoding(b *testing.B) {
rec := httptest.NewRecorder()
req := httptest.NewRequest(http.MethodGet, "/whatever", nil)
req.Header.Set("Accept-Encoding", strings.Repeat(",", http.DefaultMaxHeaderBytes))
b.ReportAllocs()
b.ResetTimer()
for range b.N {
newCompressedResponseWriter(rec, req)
}
}
func TestCompressionHandler_Gzip(t *testing.T) {
tearDown := setup()
defer tearDown()