Optimize zstd decompression memory usage

This commit is contained in:
Nicolás Pazos 2023-11-06 17:14:03 -03:00 committed by Callum Styan
parent 1abce2acfc
commit 6f957dba95

View file

@ -164,10 +164,11 @@ func (s *s2Compression) Decompress(data []byte) ([]byte, error) {
} }
type zstdCompression struct { type zstdCompression struct {
level zstd.EncoderLevel level zstd.EncoderLevel
buf bytes.Buffer buf bytes.Buffer
r *reZstd.Decoder rawBuf []byte
w *reZstd.Encoder r *reZstd.Decoder
w *reZstd.Encoder
} }
func (z *zstdCompression) Compress(data []byte) ([]byte, error) { func (z *zstdCompression) Compress(data []byte) ([]byte, error) {
@ -178,7 +179,8 @@ func (z *zstdCompression) Compress(data []byte) ([]byte, error) {
return nil, err return nil, err
} }
} }
// NOTE: from my observations EncodeAll takes a bit less CPU but considerably more memory.
// Taking this decision deliberately.
z.buf.Reset() z.buf.Reset()
z.w.Reset(&z.buf) z.w.Reset(&z.buf)
_, err = z.w.Write(data) _, err = z.w.Write(data)
@ -200,17 +202,20 @@ func (z *zstdCompression) Decompress(data []byte) ([]byte, error) {
return nil, err return nil, err
} }
} }
// NOTE: interestingly, I'm seeing much better memory usage using DecodeAll, for the same CPU
err = z.r.Reset(bytes.NewReader(data)) z.rawBuf = z.rawBuf[:0]
if err != nil { z.rawBuf, err = z.r.DecodeAll(data, z.rawBuf)
return nil, err return z.rawBuf, err
} // err = z.r.Reset(bytes.NewReader(data))
z.buf.Reset() // if err != nil {
_, err = io.Copy(&z.buf, z.r) // return nil, err
if err != nil { // }
return nil, err // z.buf.Reset()
} // _, err = io.Copy(&z.buf, z.r)
return z.buf.Bytes(), nil // if err != nil {
// return nil, err
// }
// return z.buf.Bytes(), nil
} }
type lzwCompression struct { type lzwCompression struct {