@@ -35,7 +35,7 @@ func BenchmarkNewChunkMergeIterator_CreateAndIterate(b *testing.B) {
3535 scenario .duplicationFactor ,
3636 scenario .enc .String ())
3737
38- chunks := createChunks (b , scenario .numChunks , scenario .numSamplesPerChunk , scenario .duplicationFactor , scenario .enc )
38+ chunks := createChunks (b , step , scenario .numChunks , scenario .numSamplesPerChunk , scenario .duplicationFactor , scenario .enc )
3939
4040 b .Run (name , func (b * testing.B ) {
4141 b .ReportAllocs ()
@@ -55,10 +55,59 @@ func BenchmarkNewChunkMergeIterator_CreateAndIterate(b *testing.B) {
5555 }
5656}
5757
58+ func BenchmarkNewChunkMergeIterator_Seek (b * testing.B ) {
59+ scenarios := []struct {
60+ numChunks int
61+ numSamplesPerChunk int
62+ duplicationFactor int
63+ seekStep time.Duration
64+ scrapeInterval time.Duration
65+ enc promchunk.Encoding
66+ }{
67+ {numChunks : 1000 , numSamplesPerChunk : 120 , duplicationFactor : 3 , scrapeInterval : 30 * time .Second , seekStep : 30 * time .Second / 2 , enc : promchunk .PrometheusXorChunk },
68+ {numChunks : 1000 , numSamplesPerChunk : 120 , duplicationFactor : 3 , scrapeInterval : 30 * time .Second , seekStep : 30 * time .Second , enc : promchunk .PrometheusXorChunk },
69+ {numChunks : 1000 , numSamplesPerChunk : 120 , duplicationFactor : 3 , scrapeInterval : 30 * time .Second , seekStep : 30 * time .Second * 2 , enc : promchunk .PrometheusXorChunk },
70+ {numChunks : 1000 , numSamplesPerChunk : 120 , duplicationFactor : 3 , scrapeInterval : 30 * time .Second , seekStep : 30 * time .Second * 10 , enc : promchunk .PrometheusXorChunk },
71+ {numChunks : 1000 , numSamplesPerChunk : 120 , duplicationFactor : 3 , scrapeInterval : 30 * time .Second , seekStep : 30 * time .Second * 30 , enc : promchunk .PrometheusXorChunk },
72+ {numChunks : 1000 , numSamplesPerChunk : 120 , duplicationFactor : 3 , scrapeInterval : 30 * time .Second , seekStep : 30 * time .Second * 50 , enc : promchunk .PrometheusXorChunk },
73+ {numChunks : 1000 , numSamplesPerChunk : 120 , duplicationFactor : 3 , scrapeInterval : 30 * time .Second , seekStep : 30 * time .Second * 100 , enc : promchunk .PrometheusXorChunk },
74+ {numChunks : 1000 , numSamplesPerChunk : 120 , duplicationFactor : 3 , scrapeInterval : 30 * time .Second , seekStep : 30 * time .Second * 200 , enc : promchunk .PrometheusXorChunk },
75+
76+ {numChunks : 1000 , numSamplesPerChunk : 120 , duplicationFactor : 3 , scrapeInterval : 10 * time .Second , seekStep : 10 * time .Second / 2 , enc : promchunk .PrometheusXorChunk },
77+ {numChunks : 1000 , numSamplesPerChunk : 120 , duplicationFactor : 3 , scrapeInterval : 10 * time .Second , seekStep : 10 * time .Second , enc : promchunk .PrometheusXorChunk },
78+ {numChunks : 1000 , numSamplesPerChunk : 120 , duplicationFactor : 3 , scrapeInterval : 10 * time .Second , seekStep : 10 * time .Second * 2 , enc : promchunk .PrometheusXorChunk },
79+ {numChunks : 1000 , numSamplesPerChunk : 120 , duplicationFactor : 3 , scrapeInterval : 10 * time .Second , seekStep : 10 * time .Second * 10 , enc : promchunk .PrometheusXorChunk },
80+ {numChunks : 1000 , numSamplesPerChunk : 120 , duplicationFactor : 3 , scrapeInterval : 10 * time .Second , seekStep : 10 * time .Second * 30 , enc : promchunk .PrometheusXorChunk },
81+ {numChunks : 1000 , numSamplesPerChunk : 120 , duplicationFactor : 3 , scrapeInterval : 10 * time .Second , seekStep : 10 * time .Second * 50 , enc : promchunk .PrometheusXorChunk },
82+ {numChunks : 1000 , numSamplesPerChunk : 120 , duplicationFactor : 3 , scrapeInterval : 10 * time .Second , seekStep : 10 * time .Second * 100 , enc : promchunk .PrometheusXorChunk },
83+ {numChunks : 1000 , numSamplesPerChunk : 120 , duplicationFactor : 3 , scrapeInterval : 10 * time .Second , seekStep : 10 * time .Second * 200 , enc : promchunk .PrometheusXorChunk },
84+ }
85+
86+ for _ , scenario := range scenarios {
87+ name := fmt .Sprintf ("scrapeInterval %vs seekStep: %vs" ,
88+ scenario .scrapeInterval .Seconds (),
89+ scenario .seekStep .Seconds ())
90+
91+ chunks := createChunks (b , scenario .scrapeInterval , scenario .numChunks , scenario .numSamplesPerChunk , scenario .duplicationFactor , scenario .enc )
92+
93+ b .Run (name , func (b * testing.B ) {
94+ b .ReportAllocs ()
95+
96+ for n := 0 ; n < b .N ; n ++ {
97+ it := NewChunkMergeIterator (chunks , 0 , 0 )
98+ i := int64 (0 )
99+ for it .Seek (i * scenario .seekStep .Milliseconds ()) != chunkenc .ValNone {
100+ i ++
101+ }
102+ }
103+ })
104+ }
105+ }
106+
58107func TestSeekCorrectlyDealWithSinglePointChunks (t * testing.T ) {
59108 t .Parallel ()
60- chunkOne := mkChunk (t , model .Time (1 * step / time .Millisecond ), 1 , promchunk .PrometheusXorChunk )
61- chunkTwo := mkChunk (t , model .Time (10 * step / time .Millisecond ), 1 , promchunk .PrometheusXorChunk )
109+ chunkOne := mkChunk (t , step , model .Time (1 * step / time .Millisecond ), 1 , promchunk .PrometheusXorChunk )
110+ chunkTwo := mkChunk (t , step , model .Time (10 * step / time .Millisecond ), 1 , promchunk .PrometheusXorChunk )
62111 chunks := []chunk.Chunk {chunkOne , chunkTwo }
63112
64113 sut := NewChunkMergeIterator (chunks , 0 , 0 )
@@ -72,13 +121,13 @@ func TestSeekCorrectlyDealWithSinglePointChunks(t *testing.T) {
72121 require .Equal (t , int64 (1 * time .Second / time .Millisecond ), actual )
73122}
74123
75- func createChunks (b * testing.B , numChunks , numSamplesPerChunk , duplicationFactor int , enc promchunk.Encoding ) []chunk.Chunk {
124+ func createChunks (b * testing.B , step time. Duration , numChunks , numSamplesPerChunk , duplicationFactor int , enc promchunk.Encoding ) []chunk.Chunk {
76125 result := make ([]chunk.Chunk , 0 , numChunks )
77126
78127 for d := 0 ; d < duplicationFactor ; d ++ {
79128 for c := 0 ; c < numChunks ; c ++ {
80129 minTime := step * time .Duration (c * numSamplesPerChunk )
81- result = append (result , mkChunk (b , model .Time (minTime .Milliseconds ()), numSamplesPerChunk , enc ))
130+ result = append (result , mkChunk (b , step , model .Time (minTime .Milliseconds ()), numSamplesPerChunk , enc ))
82131 }
83132 }
84133
0 commit comments