Benchmarks refactor and include benchmark with dataset

This commit is contained in:
Nicolás Pazos 2023-11-03 16:17:35 -03:00 committed by Callum Styan
parent c62a862aa6
commit 6afb30a06f

View file

@ -1,6 +1,7 @@
package remote
import (
"os"
"testing"
"time"
)
@ -46,83 +47,37 @@ func TestCompressions(t *testing.T) {
}
}
func BenchmarkCompressions_V11(b *testing.B) {
data := makeUncompressedReducedWriteRequestBenchData(b)
bc := []struct {
name string
algo CompAlgorithm
}{
{"Snappy", Snappy},
{"SnappyAlt", SnappyAlt},
{"S2", S2},
{"ZstdFast", ZstdFast},
{"ZstdDefault", ZstdDefault},
{"ZstdBestComp", ZstdBestComp},
{"Lzw", Lzw},
{"FlateFast", FlateFast},
{"FlateComp", FlateComp},
{"BrotliFast", BrotliFast},
// {"BrotliComp", BrotliComp},
{"BrotliDefault", BrotliDefault},
}
comps := make(map[CompAlgorithm]Compression)
decomps := make(map[CompAlgorithm]Compression)
for _, c := range bc {
UseAlgorithm = c.algo
comp := createComp()
decomp := createComp()
comps[c.algo] = comp
decomps[c.algo] = decomp
// warmup
for i := 0; i < 2; i++ {
compressed, err := comp.Compress(data)
if err != nil {
b.Fatal(err)
}
_, err = decomp.Decompress(compressed)
if err != nil {
b.Fatal(err)
}
}
}
for _, c := range bc {
b.Run(c.name, func(b *testing.B) {
comp := comps[c.algo]
decomp := decomps[c.algo]
totalSize := 0
compTime := 0
decompTime := 0
rate := 0.0
var start time.Time
for i := 0; i < b.N; i++ {
start = time.Now()
res, err := comp.Compress(data)
if err != nil {
b.Fatal(err)
}
compTime += int(time.Since(start))
totalSize += len(res)
rate += float64(len(data)) / float64(len(res))
start = time.Now()
_, err = decomp.Decompress(res)
if err != nil {
b.Fatal(err)
}
decompTime += int(time.Since(start))
}
b.ReportMetric(float64(totalSize)/float64(b.N), "compressedSize/op")
b.ReportMetric(float64(compTime)/float64(b.N), "nsCompress/op")
b.ReportMetric(float64(decompTime)/float64(b.N), "nsDecompress/op")
b.ReportMetric(rate/float64(b.N), "compressionX/op")
})
}
}
func BenchmarkCompressions_V1(b *testing.B) {
// Synthetic data, attempts to be representative
data := makeUncompressedWriteRequestBenchData(b)
benchmarkCompressionsForData(b, [][]byte{data})
}
func BenchmarkCompressions_V11(b *testing.B) {
// Synthetic data, attempts to be representative
data := makeUncompressedWriteRequestBenchData(b)
benchmarkCompressionsForData(b, [][]byte{data})
}
// Needs the dataset to be present in /home/nicolas/rw11data/v11_raw/
func BenchmarkCompressions_V11_FileDataSet(b *testing.B) {
datas := readAllFiles("/home/nicolas/rw11data/v11_raw/")
if len(datas) != 10 {
b.Fatal("unexpected number of files")
}
benchmarkCompressionsForData(b, datas)
}
// Needs the dataset to be present in /home/nicolas/rw11data/v1_raw/
func BenchmarkCompressions_V1_FileDataSet(b *testing.B) {
datas := readAllFiles("/home/nicolas/rw11data/v1_raw/")
if len(datas) != 10 {
b.Fatal("unexpected number of files")
}
benchmarkCompressionsForData(b, datas)
}
func benchmarkCompressionsForData(b *testing.B, datas [][]byte) {
bc := []struct {
name string
algo CompAlgorithm
@ -135,10 +90,11 @@ func BenchmarkCompressions_V1(b *testing.B) {
{"ZstdBestComp", ZstdBestComp},
{"Lzw", Lzw},
{"FlateFast", FlateFast},
{"FlateDefault", FlateDefault},
{"FlateComp", FlateComp},
{"BrotliFast", BrotliFast},
// {"BrotliComp", BrotliComp},
{"BrotliDefault", BrotliDefault},
// {"BrotliComp", BrotliComp},
}
comps := make(map[CompAlgorithm]Compression)
decomps := make(map[CompAlgorithm]Compression)
@ -150,13 +106,15 @@ func BenchmarkCompressions_V1(b *testing.B) {
decomps[c.algo] = decomp
// warmup
for i := 0; i < 2; i++ {
compressed, err := comp.Compress(data)
if err != nil {
b.Fatal(err)
}
_, err = decomp.Decompress(compressed)
if err != nil {
b.Fatal(err)
for _, data := range datas {
compressed, err := comp.Compress(data)
if err != nil {
b.Fatal(err)
}
_, err = decomp.Decompress(compressed)
if err != nil {
b.Fatal(err)
}
}
}
}
@ -167,31 +125,50 @@ func BenchmarkCompressions_V1(b *testing.B) {
decomp := decomps[c.algo]
totalSize := 0
totalRawSize := 0
compTime := 0
decompTime := 0
rate := 0.0
var start time.Time
for i := 0; i < b.N; i++ {
start = time.Now()
res, err := comp.Compress(data)
if err != nil {
b.Fatal(err)
for _, data := range datas {
start = time.Now()
res, err := comp.Compress(data)
if err != nil {
b.Fatal(err)
}
compTime += int(time.Since(start))
totalSize += len(res)
totalRawSize += len(data)
start = time.Now()
_, err = decomp.Decompress(res)
if err != nil {
b.Fatal(err)
}
decompTime += int(time.Since(start))
}
compTime += int(time.Since(start))
totalSize += len(res)
rate += float64(len(data)) / float64(len(res))
start = time.Now()
_, err = decomp.Decompress(res)
if err != nil {
b.Fatal(err)
}
decompTime += int(time.Since(start))
}
b.ReportMetric(float64(totalSize)/float64(b.N), "compressedSize/op")
b.ReportMetric(float64(compTime)/float64(b.N), "nsCompress/op")
b.ReportMetric(float64(decompTime)/float64(b.N), "nsDecompress/op")
b.ReportMetric(rate/float64(b.N), "compressionX/op")
rate := float64(totalRawSize) / float64(totalSize)
b.ReportMetric(rate, "compressionX")
})
}
}
func readAllFiles(dir string) (res [][]byte) {
files, err := os.ReadDir(dir)
if err != nil {
panic(err)
}
for _, file := range files {
filename := dir + file.Name()
data, err := os.ReadFile(filename)
if err != nil {
panic(err)
}
res = append(res, data)
}
return
}