|
1 | 1 | package batch |
2 | 2 |
|
3 | 3 | import ( |
4 | | - "time" |
5 | | - |
6 | 4 | "github.com/cortexproject/cortex/pkg/chunk" |
7 | 5 | "github.com/cortexproject/cortex/pkg/chunk/encoding" |
8 | 6 | promchunk "github.com/cortexproject/cortex/pkg/chunk/encoding" |
@@ -44,6 +42,7 @@ type iterator interface { |
44 | 42 | // AtTime returns the start time of the next batch. Must only be called after |
45 | 43 | // Seek or Next have returned true. |
46 | 44 | AtTime() int64 |
| 45 | + MaxTime() int64 |
47 | 46 |
|
48 | 47 | // Batch returns the current batch. Must only be called after Seek or Next |
49 | 48 | // have returned true. |
@@ -100,31 +99,16 @@ func (a *iteratorAdapter) Seek(t int64) bool { |
100 | 99 | a.curr.Index++ |
101 | 100 | } |
102 | 101 | return true |
103 | | - } else { |
104 | | - // In this case, t is after the end of the current batch. |
105 | | - // Here we assume that the scrape interval is 30s and try to calculate how many samples ahead we are trying to seek. |
106 | | - // If the number of samples is < 60 (each chunk has 120 samples) we assume that the samples belongs to the current |
107 | | - // chunk and forward the iterator to the right point - this is more efficient than the seek call. |
108 | | - // See: https://github.com/prometheus/prometheus/blob/211ae4f1f0a2cdaae09c4c52735f75345c1817c6/tsdb/head_append.go#L1337 |
109 | | - scrapeInterval := 30 * time.Second |
110 | | - |
111 | | - // We have 2 samples we can estimate the scrape interval |
112 | | - if a.curr.Length > 1 { |
113 | | - scrapeInterval = time.Duration(a.curr.Timestamps[1]-a.curr.Timestamps[0]) * time.Millisecond |
114 | | - } |
115 | | - |
116 | | - approxNumberOfSamples := model.Time(t).Sub(model.Time(a.curr.Timestamps[a.curr.Length-1])) / scrapeInterval |
117 | | - if approxNumberOfSamples < 60 { |
118 | | - for a.underlying.Next(promchunk.BatchSize) { |
119 | | - a.curr = a.underlying.Batch() |
120 | | - if t <= a.curr.Timestamps[a.curr.Length-1] { |
121 | | - //In this case, some timestamp between current sample and end of batch can fulfill |
122 | | - //the seek. Let's find it. |
123 | | - for a.curr.Index < a.curr.Length && t > a.curr.Timestamps[a.curr.Index] { |
124 | | - a.curr.Index++ |
125 | | - } |
126 | | - return true |
| 102 | + } else if t <= a.underlying.MaxTime() { |
| 103 | + for a.underlying.Next(promchunk.BatchSize) { |
| 104 | + a.curr = a.underlying.Batch() |
| 105 | + if t <= a.curr.Timestamps[a.curr.Length-1] { |
| 106 | + //In this case, some timestamp between current sample and end of batch can fulfill |
| 107 | + //the seek. Let's find it. |
| 108 | + for a.curr.Index < a.curr.Length && t > a.curr.Timestamps[a.curr.Index] { |
| 109 | + a.curr.Index++ |
127 | 110 | } |
| 111 | + return true |
128 | 112 | } |
129 | 113 | } |
130 | 114 | } |
|
0 commit comments