Skip to content

Commit f1e0a13

Browse files
committed
Testing Multiples scrape intervals
Signed-off-by: Alan Protasio <[email protected]>
1 parent 050f9d8 commit f1e0a13

File tree

2 files changed

+36
-29
lines changed

2 files changed

+36
-29
lines changed

pkg/querier/batch/batch.go

Lines changed: 8 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -106,7 +106,14 @@ func (a *iteratorAdapter) Seek(t int64) bool {
106106
// If the number of samples is < 60 (each chunk has 120 samples) we assume that the samples belongs to the current
107107
// chunk and forward the iterator to the right point - this is more efficient than the seek call.
108108
// See: https://github.com/prometheus/prometheus/blob/211ae4f1f0a2cdaae09c4c52735f75345c1817c6/tsdb/head_append.go#L1337
109-
approxNumberOfSamples := model.Time(t).Sub(model.Time(a.curr.Timestamps[a.curr.Length-1])) / (30 * time.Second)
109+
scrapeInterval := 30 * time.Second
110+
111+
// We have 2 samples we can estimate the scrape interval
112+
if a.curr.Length > 1 {
113+
scrapeInterval = time.Duration(a.curr.Timestamps[1]-a.curr.Timestamps[0]) * time.Millisecond
114+
}
115+
116+
approxNumberOfSamples := model.Time(t).Sub(model.Time(a.curr.Timestamps[a.curr.Length-1])) / scrapeInterval
110117
if approxNumberOfSamples < 60 {
111118
for a.underlying.Next(promchunk.BatchSize) {
112119
a.curr = a.underlying.Batch()

pkg/querier/batch/batch_test.go

Lines changed: 28 additions & 28 deletions
Original file line numberDiff line numberDiff line change
@@ -56,50 +56,50 @@ func BenchmarkNewChunkMergeIterator_CreateAndIterate(b *testing.B) {
5656
}
5757

5858
func BenchmarkNewChunkMergeIterator_Seek(b *testing.B) {
59-
scrapeInterval := 30 * time.Second
60-
6159
scenarios := []struct {
6260
numChunks int
6361
numSamplesPerChunk int
6462
duplicationFactor int
6563
seekStep time.Duration
64+
scrapeInterval time.Duration
6665
enc promchunk.Encoding
6766
}{
68-
{numChunks: 1000, numSamplesPerChunk: 120, duplicationFactor: 1, seekStep: scrapeInterval / 2, enc: promchunk.PrometheusXorChunk},
69-
{numChunks: 1000, numSamplesPerChunk: 120, duplicationFactor: 1, seekStep: scrapeInterval, enc: promchunk.PrometheusXorChunk},
70-
{numChunks: 1000, numSamplesPerChunk: 120, duplicationFactor: 1, seekStep: scrapeInterval * 2, enc: promchunk.PrometheusXorChunk},
71-
{numChunks: 1000, numSamplesPerChunk: 120, duplicationFactor: 1, seekStep: scrapeInterval * 10, enc: promchunk.PrometheusXorChunk},
72-
{numChunks: 1000, numSamplesPerChunk: 120, duplicationFactor: 1, seekStep: scrapeInterval * 100, enc: promchunk.PrometheusXorChunk},
73-
{numChunks: 1000, numSamplesPerChunk: 120, duplicationFactor: 1, seekStep: scrapeInterval * 1000, enc: promchunk.PrometheusXorChunk},
74-
{numChunks: 1000, numSamplesPerChunk: 120, duplicationFactor: 3, seekStep: scrapeInterval / 2, enc: promchunk.PrometheusXorChunk},
75-
{numChunks: 1000, numSamplesPerChunk: 120, duplicationFactor: 3, seekStep: scrapeInterval, enc: promchunk.PrometheusXorChunk},
76-
{numChunks: 1000, numSamplesPerChunk: 120, duplicationFactor: 3, seekStep: scrapeInterval * 2, enc: promchunk.PrometheusXorChunk},
77-
{numChunks: 1000, numSamplesPerChunk: 120, duplicationFactor: 3, seekStep: scrapeInterval * 10, enc: promchunk.PrometheusXorChunk},
78-
{numChunks: 1000, numSamplesPerChunk: 120, duplicationFactor: 3, seekStep: scrapeInterval * 100, enc: promchunk.PrometheusXorChunk},
79-
{numChunks: 1000, numSamplesPerChunk: 120, duplicationFactor: 3, seekStep: scrapeInterval * 1000, enc: promchunk.PrometheusXorChunk},
80-
{numChunks: 100, numSamplesPerChunk: 120, duplicationFactor: 1, seekStep: scrapeInterval / 2, enc: promchunk.PrometheusXorChunk},
81-
{numChunks: 100, numSamplesPerChunk: 120, duplicationFactor: 1, seekStep: scrapeInterval, enc: promchunk.PrometheusXorChunk},
82-
{numChunks: 100, numSamplesPerChunk: 120, duplicationFactor: 1, seekStep: scrapeInterval * 2, enc: promchunk.PrometheusXorChunk},
83-
{numChunks: 100, numSamplesPerChunk: 120, duplicationFactor: 1, seekStep: scrapeInterval * 10, enc: promchunk.PrometheusXorChunk},
84-
{numChunks: 100, numSamplesPerChunk: 120, duplicationFactor: 1, seekStep: scrapeInterval * 100, enc: promchunk.PrometheusXorChunk},
85-
{numChunks: 100, numSamplesPerChunk: 120, duplicationFactor: 1, seekStep: scrapeInterval * 1000, enc: promchunk.PrometheusXorChunk},
86-
{numChunks: 100, numSamplesPerChunk: 120, duplicationFactor: 3, seekStep: scrapeInterval / 2, enc: promchunk.PrometheusXorChunk},
87-
{numChunks: 100, numSamplesPerChunk: 120, duplicationFactor: 3, seekStep: scrapeInterval, enc: promchunk.PrometheusXorChunk},
88-
{numChunks: 100, numSamplesPerChunk: 120, duplicationFactor: 3, seekStep: scrapeInterval * 2, enc: promchunk.PrometheusXorChunk},
89-
{numChunks: 100, numSamplesPerChunk: 120, duplicationFactor: 3, seekStep: scrapeInterval * 10, enc: promchunk.PrometheusXorChunk},
90-
{numChunks: 100, numSamplesPerChunk: 120, duplicationFactor: 3, seekStep: scrapeInterval * 100, enc: promchunk.PrometheusXorChunk},
91-
{numChunks: 100, numSamplesPerChunk: 120, duplicationFactor: 3, seekStep: scrapeInterval * 1000, enc: promchunk.PrometheusXorChunk},
67+
{numChunks: 1000, numSamplesPerChunk: 120, duplicationFactor: 1, scrapeInterval: 30 * time.Second, seekStep: 30 * time.Second / 2, enc: promchunk.PrometheusXorChunk},
68+
{numChunks: 1000, numSamplesPerChunk: 120, duplicationFactor: 1, scrapeInterval: 30 * time.Second, seekStep: 30 * time.Second, enc: promchunk.PrometheusXorChunk},
69+
{numChunks: 1000, numSamplesPerChunk: 120, duplicationFactor: 1, scrapeInterval: 30 * time.Second, seekStep: 30 * time.Second * 2, enc: promchunk.PrometheusXorChunk},
70+
{numChunks: 1000, numSamplesPerChunk: 120, duplicationFactor: 1, scrapeInterval: 30 * time.Second, seekStep: 30 * time.Second * 10, enc: promchunk.PrometheusXorChunk},
71+
{numChunks: 1000, numSamplesPerChunk: 120, duplicationFactor: 1, scrapeInterval: 30 * time.Second, seekStep: 30 * time.Second * 100, enc: promchunk.PrometheusXorChunk},
72+
{numChunks: 1000, numSamplesPerChunk: 120, duplicationFactor: 1, scrapeInterval: 30 * time.Second, seekStep: 30 * time.Second * 1000, enc: promchunk.PrometheusXorChunk},
73+
{numChunks: 1000, numSamplesPerChunk: 120, duplicationFactor: 3, scrapeInterval: 30 * time.Second, seekStep: 30 * time.Second / 2, enc: promchunk.PrometheusXorChunk},
74+
{numChunks: 1000, numSamplesPerChunk: 120, duplicationFactor: 3, scrapeInterval: 30 * time.Second, seekStep: 30 * time.Second, enc: promchunk.PrometheusXorChunk},
75+
{numChunks: 1000, numSamplesPerChunk: 120, duplicationFactor: 3, scrapeInterval: 30 * time.Second, seekStep: 30 * time.Second * 2, enc: promchunk.PrometheusXorChunk},
76+
{numChunks: 1000, numSamplesPerChunk: 120, duplicationFactor: 3, scrapeInterval: 30 * time.Second, seekStep: 30 * time.Second * 10, enc: promchunk.PrometheusXorChunk},
77+
{numChunks: 1000, numSamplesPerChunk: 120, duplicationFactor: 3, scrapeInterval: 30 * time.Second, seekStep: 30 * time.Second * 100, enc: promchunk.PrometheusXorChunk},
78+
{numChunks: 1000, numSamplesPerChunk: 120, duplicationFactor: 3, scrapeInterval: 30 * time.Second, seekStep: 30 * time.Second * 1000, enc: promchunk.PrometheusXorChunk},
79+
{numChunks: 1000, numSamplesPerChunk: 120, duplicationFactor: 3, scrapeInterval: 10 * time.Second, seekStep: 10 * time.Second / 2, enc: promchunk.PrometheusXorChunk},
80+
{numChunks: 1000, numSamplesPerChunk: 120, duplicationFactor: 3, scrapeInterval: 10 * time.Second, seekStep: 10 * time.Second, enc: promchunk.PrometheusXorChunk},
81+
{numChunks: 1000, numSamplesPerChunk: 120, duplicationFactor: 3, scrapeInterval: 10 * time.Second, seekStep: 10 * time.Second * 2, enc: promchunk.PrometheusXorChunk},
82+
{numChunks: 1000, numSamplesPerChunk: 120, duplicationFactor: 3, scrapeInterval: 10 * time.Second, seekStep: 10 * time.Second * 10, enc: promchunk.PrometheusXorChunk},
83+
{numChunks: 1000, numSamplesPerChunk: 120, duplicationFactor: 3, scrapeInterval: 10 * time.Second, seekStep: 10 * time.Second * 100, enc: promchunk.PrometheusXorChunk},
84+
{numChunks: 1000, numSamplesPerChunk: 120, duplicationFactor: 3, scrapeInterval: 10 * time.Second, seekStep: 10 * time.Second * 1000, enc: promchunk.PrometheusXorChunk},
85+
{numChunks: 1000, numSamplesPerChunk: 120, duplicationFactor: 3, scrapeInterval: 60 * time.Second, seekStep: 60 * time.Second / 2, enc: promchunk.PrometheusXorChunk},
86+
{numChunks: 1000, numSamplesPerChunk: 120, duplicationFactor: 3, scrapeInterval: 60 * time.Second, seekStep: 60 * time.Second, enc: promchunk.PrometheusXorChunk},
87+
{numChunks: 1000, numSamplesPerChunk: 120, duplicationFactor: 3, scrapeInterval: 60 * time.Second, seekStep: 60 * time.Second * 2, enc: promchunk.PrometheusXorChunk},
88+
{numChunks: 1000, numSamplesPerChunk: 120, duplicationFactor: 3, scrapeInterval: 60 * time.Second, seekStep: 60 * time.Second * 10, enc: promchunk.PrometheusXorChunk},
89+
{numChunks: 1000, numSamplesPerChunk: 120, duplicationFactor: 3, scrapeInterval: 60 * time.Second, seekStep: 60 * time.Second * 100, enc: promchunk.PrometheusXorChunk},
90+
{numChunks: 1000, numSamplesPerChunk: 120, duplicationFactor: 3, scrapeInterval: 60 * time.Second, seekStep: 60 * time.Second * 1000, enc: promchunk.PrometheusXorChunk},
9291
}
9392

9493
for _, scenario := range scenarios {
95-
name := fmt.Sprintf("chunks: %d samples per chunk: %d duplication factor: %d seekStep %vs encoding: %s",
94+
name := fmt.Sprintf("chunks: %d samples per chunk: %d duplication factor: %d scrapeInterval %vs seekStep: %vs encoding: %s",
9695
scenario.numChunks,
9796
scenario.numSamplesPerChunk,
9897
scenario.duplicationFactor,
98+
scenario.scrapeInterval.Seconds(),
9999
scenario.seekStep.Seconds(),
100100
scenario.enc.String())
101101

102-
chunks := createChunks(b, scrapeInterval, scenario.numChunks, scenario.numSamplesPerChunk, scenario.duplicationFactor, scenario.enc)
102+
chunks := createChunks(b, scenario.scrapeInterval, scenario.numChunks, scenario.numSamplesPerChunk, scenario.duplicationFactor, scenario.enc)
103103

104104
b.Run(name, func(b *testing.B) {
105105
b.ReportAllocs()

0 commit comments

Comments
 (0)