diff --git a/cmd/promtool/backfill_test.go b/cmd/promtool/backfill_test.go index c9493f134c..b76b56543d 100644 --- a/cmd/promtool/backfill_test.go +++ b/cmd/promtool/backfill_test.go @@ -27,6 +27,7 @@ import ( "github.com/prometheus/prometheus/model/labels" "github.com/prometheus/prometheus/storage" "github.com/prometheus/prometheus/tsdb" + "github.com/prometheus/prometheus/tsdb/chunkenc" ) type backfillSample struct { @@ -52,7 +53,7 @@ func queryAllSeries(t testing.TB, q storage.Querier, expectedMinTime, expectedMa series := ss.At() it := series.Iterator() require.NoError(t, it.Err()) - for it.Next() { + for it.Next() == chunkenc.ValFloat { ts, v := it.At() samples = append(samples, backfillSample{Timestamp: ts, Value: v, Labels: series.Labels()}) } diff --git a/cmd/promtool/rules_test.go b/cmd/promtool/rules_test.go index 10d59d5cc5..29c7f623c6 100644 --- a/cmd/promtool/rules_test.go +++ b/cmd/promtool/rules_test.go @@ -29,6 +29,7 @@ import ( "github.com/prometheus/prometheus/model/labels" "github.com/prometheus/prometheus/tsdb" + "github.com/prometheus/prometheus/tsdb/chunkenc" ) type mockQueryRangeAPI struct { @@ -148,7 +149,7 @@ func TestBackfillRuleIntegration(t *testing.T) { require.Equal(t, 3, len(series.Labels())) } it := series.Iterator() - for it.Next() { + for it.Next() == chunkenc.ValFloat { samplesCount++ ts, v := it.At() if v == testValue { diff --git a/cmd/promtool/tsdb.go b/cmd/promtool/tsdb.go index 2e5d854dee..414a07176e 100644 --- a/cmd/promtool/tsdb.go +++ b/cmd/promtool/tsdb.go @@ -32,6 +32,7 @@ import ( "time" "github.com/prometheus/prometheus/storage" + "github.com/prometheus/prometheus/tsdb/chunkenc" "github.com/prometheus/prometheus/tsdb/index" "github.com/alecthomas/units" @@ -646,7 +647,7 @@ func dumpSamples(path string, mint, maxt int64) (err error) { series := ss.At() lbs := series.Labels() it := series.Iterator() - for it.Next() { + for it.Next() == chunkenc.ValFloat { ts, val := it.At() fmt.Printf("%s %g %d\n", lbs, val, ts) } diff --git a/model/histogram/float_histogram.go b/model/histogram/float_histogram.go index 954afc5948..576f8a6e98 100644 --- a/model/histogram/float_histogram.go +++ b/model/histogram/float_histogram.go @@ -150,9 +150,9 @@ type FloatBucketIterator interface { } // FloatBucket represents a bucket with lower and upper limit and the count of -// samples in the bucket. It also specifies if each limit is inclusive or -// not. (Mathematically, inclusive limits create a closed interval, and -// non-inclusive limits an open interval.) +// samples in the bucket as a float64. It also specifies if each limit is +// inclusive or not. (Mathematically, inclusive limits create a closed interval, +// and non-inclusive limits an open interval.) // // To represent cumulative buckets, Lower is set to -Inf, and the Count is then // cumulative (including the counts of all buckets for smaller values). diff --git a/model/histogram/histogram.go b/model/histogram/histogram.go index 96a4d1bbdd..0a0d0800ad 100644 --- a/model/histogram/histogram.go +++ b/model/histogram/histogram.go @@ -70,19 +70,19 @@ type Span struct { func (h *Histogram) Copy() *Histogram { c := *h - if h.PositiveSpans != nil { + if len(h.PositiveSpans) != 0 { c.PositiveSpans = make([]Span, len(h.PositiveSpans)) copy(c.PositiveSpans, h.PositiveSpans) } - if h.NegativeSpans != nil { + if len(h.NegativeSpans) != 0 { c.NegativeSpans = make([]Span, len(h.NegativeSpans)) copy(c.NegativeSpans, h.NegativeSpans) } - if h.PositiveBuckets != nil { + if len(h.PositiveBuckets) != 0 { c.PositiveBuckets = make([]int64, len(h.PositiveBuckets)) copy(c.PositiveBuckets, h.PositiveBuckets) } - if h.NegativeBuckets != nil { + if len(h.NegativeBuckets) != 0 { c.NegativeBuckets = make([]int64, len(h.NegativeBuckets)) copy(c.NegativeBuckets, h.NegativeBuckets) } @@ -162,15 +162,15 @@ func (h *Histogram) ToFloat() *FloatHistogram { positiveSpans, negativeSpans []Span positiveBuckets, negativeBuckets []float64 ) - if h.PositiveSpans != nil { + if len(h.PositiveSpans) != 0 { positiveSpans = make([]Span, len(h.PositiveSpans)) copy(positiveSpans, h.PositiveSpans) } - if h.NegativeSpans != nil { + if len(h.NegativeSpans) != 0 { negativeSpans = make([]Span, len(h.NegativeSpans)) copy(negativeSpans, h.NegativeSpans) } - if h.PositiveBuckets != nil { + if len(h.PositiveBuckets) != 0 { positiveBuckets = make([]float64, len(h.PositiveBuckets)) var current float64 for i, b := range h.PositiveBuckets { @@ -178,7 +178,7 @@ func (h *Histogram) ToFloat() *FloatHistogram { positiveBuckets[i] = current } } - if h.NegativeBuckets != nil { + if len(h.NegativeBuckets) != 0 { negativeBuckets = make([]float64, len(h.NegativeBuckets)) var current float64 for i, b := range h.NegativeBuckets { diff --git a/promql/engine.go b/promql/engine.go index 3997630131..0e96f458e3 100644 --- a/promql/engine.go +++ b/promql/engine.go @@ -1634,28 +1634,30 @@ func (ev *evaluator) vectorSelector(node *parser.VectorSelector, ts int64) (Vect } // vectorSelectorSingle evaluates an instant vector for the iterator of one time series. -func (ev *evaluator) vectorSelectorSingle(it *storage.MemoizedSeriesIterator, node *parser.VectorSelector, ts int64) (int64, float64, *histogram.Histogram, bool) { +func (ev *evaluator) vectorSelectorSingle(it *storage.MemoizedSeriesIterator, node *parser.VectorSelector, ts int64) ( + int64, float64, *histogram.FloatHistogram, bool, +) { refTime := ts - durationMilliseconds(node.Offset) var t int64 var v float64 - var h *histogram.Histogram + var h *histogram.FloatHistogram valueType := it.Seek(refTime) switch valueType { - case storage.ValNone: + case chunkenc.ValNone: if it.Err() != nil { ev.error(it.Err()) } - case storage.ValFloat: + case chunkenc.ValFloat: t, v = it.Values() - case storage.ValHistogram: - t, h = it.HistogramValues() + case chunkenc.ValHistogram, chunkenc.ValFloatHistogram: + t, h = it.FloatHistogramValues() default: panic(fmt.Errorf("unknown value type %v", valueType)) } - if valueType == storage.ValNone || t > refTime { + if valueType == chunkenc.ValNone || t > refTime { var ok bool - t, v, h, ok = it.PeekPrev() + t, v, _, h, ok = it.PeekPrev() if !ok || t < refTime-durationMilliseconds(ev.lookbackDelta) { return 0, 0, nil, false } @@ -1747,19 +1749,23 @@ func (ev *evaluator) matrixIterSlice(it *storage.BufferedSeriesIterator, mint, m out = out[:0] } - ok := it.Seek(maxt) - if !ok { + soughtValueType := it.Seek(maxt) + if soughtValueType == chunkenc.ValNone { if it.Err() != nil { ev.error(it.Err()) } } buf := it.Buffer() - for buf.Next() { - if buf.ChunkEncoding() == chunkenc.EncHistogram { - t, h := buf.AtHistogram() +loop: + for { + switch buf.Next() { + case chunkenc.ValNone: + break loop + case chunkenc.ValFloatHistogram, chunkenc.ValHistogram: + t, h := buf.AtFloatHistogram() if value.IsStaleNaN(h.Sum) { - continue + continue loop } // Values in the buffer are guaranteed to be smaller than maxt. if t >= mint { @@ -1769,10 +1775,10 @@ func (ev *evaluator) matrixIterSlice(it *storage.BufferedSeriesIterator, mint, m ev.currentSamples++ out = append(out, Point{T: t, H: h}) } - } else { + case chunkenc.ValFloat: t, v := buf.At() if value.IsStaleNaN(v) { - continue + continue loop } // Values in the buffer are guaranteed to be smaller than maxt. if t >= mint { @@ -1785,25 +1791,24 @@ func (ev *evaluator) matrixIterSlice(it *storage.BufferedSeriesIterator, mint, m } } // The sought sample might also be in the range. - if ok { - if it.ChunkEncoding() == chunkenc.EncHistogram { - t, h := it.HistogramValues() - if t == maxt && !value.IsStaleNaN(h.Sum) { - if ev.currentSamples >= ev.maxSamples { - ev.error(ErrTooManySamples(env)) - } - out = append(out, Point{T: t, H: h}) - ev.currentSamples++ + switch soughtValueType { + case chunkenc.ValFloatHistogram, chunkenc.ValHistogram: + t, h := it.FloatHistogramValues() + if t == maxt && !value.IsStaleNaN(h.Sum) { + if ev.currentSamples >= ev.maxSamples { + ev.error(ErrTooManySamples(env)) } - } else { - t, v := it.Values() - if t == maxt && !value.IsStaleNaN(v) { - if ev.currentSamples >= ev.maxSamples { - ev.error(ErrTooManySamples(env)) - } - out = append(out, Point{T: t, V: v}) - ev.currentSamples++ + out = append(out, Point{T: t, H: h}) + ev.currentSamples++ + } + case chunkenc.ValFloat: + t, v := it.Values() + if t == maxt && !value.IsStaleNaN(v) { + if ev.currentSamples >= ev.maxSamples { + ev.error(ErrTooManySamples(env)) } + out = append(out, Point{T: t, V: v}) + ev.currentSamples++ } } return out diff --git a/promql/test_test.go b/promql/test_test.go index eb7b9e023c..5c16e57a29 100644 --- a/promql/test_test.go +++ b/promql/test_test.go @@ -21,6 +21,7 @@ import ( "github.com/stretchr/testify/require" "github.com/prometheus/prometheus/model/labels" + "github.com/prometheus/prometheus/tsdb/chunkenc" ) func TestLazyLoader_WithSamplesTill(t *testing.T) { @@ -143,7 +144,7 @@ func TestLazyLoader_WithSamplesTill(t *testing.T) { Metric: storageSeries.Labels(), } it := storageSeries.Iterator() - for it.Next() { + for it.Next() == chunkenc.ValFloat { t, v := it.At() got.Points = append(got.Points, Point{T: t, V: v}) } diff --git a/promql/value.go b/promql/value.go index 9fdf5e5091..02bfe5c18c 100644 --- a/promql/value.go +++ b/promql/value.go @@ -83,7 +83,7 @@ func (s Series) String() string { type Point struct { T int64 V float64 - H *histogram.Histogram + H *histogram.FloatHistogram } func (p Point) String() string { @@ -98,6 +98,7 @@ func (p Point) String() string { // MarshalJSON implements json.Marshaler. func (p Point) MarshalJSON() ([]byte, error) { + // TODO(beorn7): Support histogram. v := strconv.FormatFloat(p.V, 'f', -1, 64) return json.Marshal([...]interface{}{float64(p.T) / 1000, v}) } @@ -284,19 +285,23 @@ func newStorageSeriesIterator(series Series) *storageSeriesIterator { } } -func (ssi *storageSeriesIterator) Seek(t int64) bool { +func (ssi *storageSeriesIterator) Seek(t int64) chunkenc.ValueType { i := ssi.curr if i < 0 { i = 0 } for ; i < len(ssi.points); i++ { - if ssi.points[i].T >= t { + p := ssi.points[i] + if p.T >= t { ssi.curr = i - return true + if p.H != nil { + return chunkenc.ValFloatHistogram + } + return chunkenc.ValFloat } } ssi.curr = len(ssi.points) - 1 - return false + return chunkenc.ValNone } func (ssi *storageSeriesIterator) At() (t int64, v float64) { @@ -305,17 +310,29 @@ func (ssi *storageSeriesIterator) At() (t int64, v float64) { } func (ssi *storageSeriesIterator) AtHistogram() (int64, *histogram.Histogram) { + panic(errors.New("storageSeriesIterator: AtHistogram not supported")) +} + +func (ssi *storageSeriesIterator) AtFloatHistogram() (int64, *histogram.FloatHistogram) { p := ssi.points[ssi.curr] return p.T, p.H } -func (ssi *storageSeriesIterator) ChunkEncoding() chunkenc.Encoding { - return chunkenc.EncXOR +func (ssi *storageSeriesIterator) AtT() int64 { + p := ssi.points[ssi.curr] + return p.T } -func (ssi *storageSeriesIterator) Next() bool { +func (ssi *storageSeriesIterator) Next() chunkenc.ValueType { ssi.curr++ - return ssi.curr < len(ssi.points) + if ssi.curr >= len(ssi.points) { + return chunkenc.ValNone + } + p := ssi.points[ssi.curr] + if p.H != nil { + return chunkenc.ValFloatHistogram + } + return chunkenc.ValFloat } func (ssi *storageSeriesIterator) Err() error { diff --git a/rules/manager.go b/rules/manager.go index 23a63eade4..5bceec6e8b 100644 --- a/rules/manager.go +++ b/rules/manager.go @@ -36,6 +36,7 @@ import ( "github.com/prometheus/prometheus/promql" "github.com/prometheus/prometheus/promql/parser" "github.com/prometheus/prometheus/storage" + "github.com/prometheus/prometheus/tsdb/chunkenc" ) // RuleHealth describes the health state of a rule. @@ -787,7 +788,7 @@ func (g *Group) RestoreForState(ts time.Time) { var t int64 var v float64 it := s.Iterator() - for it.Next() { + for it.Next() == chunkenc.ValFloat { t, v = it.At() } if it.Err() != nil { diff --git a/rules/manager_test.go b/rules/manager_test.go index 3be0d68850..f8f379b4cc 100644 --- a/rules/manager_test.go +++ b/rules/manager_test.go @@ -37,6 +37,7 @@ import ( "github.com/prometheus/prometheus/promql" "github.com/prometheus/prometheus/promql/parser" "github.com/prometheus/prometheus/storage" + "github.com/prometheus/prometheus/tsdb/chunkenc" "github.com/prometheus/prometheus/util/teststorage" ) @@ -597,7 +598,7 @@ func readSeriesSet(ss storage.SeriesSet) (map[string][]promql.Point, error) { points := []promql.Point{} it := series.Iterator() - for it.Next() { + for it.Next() == chunkenc.ValFloat { t, v := it.At() points = append(points, promql.Point{T: t, V: v}) } diff --git a/scrape/scrape_test.go b/scrape/scrape_test.go index eca6bf714b..b6fd170044 100644 --- a/scrape/scrape_test.go +++ b/scrape/scrape_test.go @@ -45,6 +45,7 @@ import ( "github.com/prometheus/prometheus/model/timestamp" "github.com/prometheus/prometheus/model/value" "github.com/prometheus/prometheus/storage" + "github.com/prometheus/prometheus/tsdb/chunkenc" "github.com/prometheus/prometheus/util/teststorage" "github.com/prometheus/prometheus/util/testutil" ) @@ -2755,7 +2756,7 @@ func TestScrapeReportSingleAppender(t *testing.T) { c := 0 for series.Next() { i := series.At().Iterator() - for i.Next() { + for i.Next() != chunkenc.ValNone { c++ } } diff --git a/storage/buffer.go b/storage/buffer.go index 767e204b33..3dad74d089 100644 --- a/storage/buffer.go +++ b/storage/buffer.go @@ -14,6 +14,7 @@ package storage import ( + "fmt" "math" "github.com/prometheus/prometheus/model/histogram" @@ -26,8 +27,8 @@ type BufferedSeriesIterator struct { buf *sampleRing delta int64 - lastTime int64 - ok bool + lastTime int64 + valueType chunkenc.ValueType } // NewBuffer returns a new iterator that buffers the values within the time range @@ -42,7 +43,7 @@ func NewBuffer(delta int64) *BufferedSeriesIterator { func NewBufferIterator(it chunkenc.Iterator, delta int64) *BufferedSeriesIterator { // TODO(codesome): based on encoding, allocate different buffer. bit := &BufferedSeriesIterator{ - buf: newSampleRing(delta, 16, it.ChunkEncoding()), + buf: newSampleRing(delta, 16), delta: delta, } bit.Reset(it) @@ -55,10 +56,9 @@ func NewBufferIterator(it chunkenc.Iterator, delta int64) *BufferedSeriesIterato func (b *BufferedSeriesIterator) Reset(it chunkenc.Iterator) { b.it = it b.lastTime = math.MinInt64 - b.ok = true b.buf.reset() b.buf.delta = b.delta - it.Next() + b.valueType = it.Next() } // ReduceDelta lowers the buffered time delta, for the current SeriesIterator only. @@ -80,7 +80,7 @@ func (b *BufferedSeriesIterator) Buffer() chunkenc.Iterator { } // Seek advances the iterator to the element at time t or greater. -func (b *BufferedSeriesIterator) Seek(t int64) bool { +func (b *BufferedSeriesIterator) Seek(t int64) chunkenc.ValueType { t0 := t - b.buf.delta // If the delta would cause us to seek backwards, preserve the buffer @@ -88,54 +88,64 @@ func (b *BufferedSeriesIterator) Seek(t int64) bool { if t0 > b.lastTime { b.buf.reset() - b.ok = b.it.Seek(t0) - if !b.ok { - return false - } - if b.it.ChunkEncoding() == chunkenc.EncHistogram { - b.lastTime, _ = b.HistogramValues() - } else { + b.valueType = b.it.Seek(t0) + switch b.valueType { + case chunkenc.ValNone: + return chunkenc.ValNone + case chunkenc.ValFloat: b.lastTime, _ = b.Values() + case chunkenc.ValHistogram: + b.lastTime, _ = b.HistogramValues() + case chunkenc.ValFloatHistogram: + b.lastTime, _ = b.FloatHistogramValues() + default: + panic(fmt.Errorf("BufferedSeriesIterator: unknown value type %v", b.valueType)) } } if b.lastTime >= t { - return true + return b.valueType } - for b.Next() { - if b.lastTime >= t { - return true + for { + if b.valueType = b.Next(); b.valueType == chunkenc.ValNone || b.lastTime >= t { + return b.valueType } } - - return false } // Next advances the iterator to the next element. -func (b *BufferedSeriesIterator) Next() bool { - if !b.ok { - return false - } - +func (b *BufferedSeriesIterator) Next() chunkenc.ValueType { // Add current element to buffer before advancing. - if b.it.ChunkEncoding() == chunkenc.EncHistogram { - t, h := b.it.AtHistogram() - b.buf.add(sample{t: t, h: h}) - } else { + switch b.valueType { + case chunkenc.ValNone: + return chunkenc.ValNone + case chunkenc.ValFloat: t, v := b.it.At() b.buf.add(sample{t: t, v: v}) + case chunkenc.ValHistogram: + t, h := b.it.AtHistogram() + b.buf.add(sample{t: t, h: h}) + case chunkenc.ValFloatHistogram: + t, fh := b.it.AtFloatHistogram() + b.buf.add(sample{t: t, fh: fh}) + default: + panic(fmt.Errorf("BufferedSeriesIterator: unknown value type %v", b.valueType)) } - b.ok = b.it.Next() - if b.ok { - if b.it.ChunkEncoding() == chunkenc.EncHistogram { - b.lastTime, _ = b.HistogramValues() - } else { - b.lastTime, _ = b.Values() - } + b.valueType = b.it.Next() + switch b.valueType { + case chunkenc.ValNone: + // Do nothing. + case chunkenc.ValFloat: + b.lastTime, _ = b.Values() + case chunkenc.ValHistogram: + b.lastTime, _ = b.HistogramValues() + case chunkenc.ValFloatHistogram: + b.lastTime, _ = b.FloatHistogramValues() + default: + panic(fmt.Errorf("BufferedSeriesIterator: unknown value type %v", b.valueType)) } - - return b.ok + return b.valueType } // Values returns the current element of the iterator. @@ -148,9 +158,9 @@ func (b *BufferedSeriesIterator) HistogramValues() (int64, *histogram.Histogram) return b.it.AtHistogram() } -// ChunkEncoding return the chunk encoding of the underlying iterator. -func (b *BufferedSeriesIterator) ChunkEncoding() chunkenc.Encoding { - return b.it.ChunkEncoding() +// FloatHistogramValues returns the current float-histogram element of the iterator. +func (b *BufferedSeriesIterator) FloatHistogramValues() (int64, *histogram.FloatHistogram) { + return b.it.AtFloatHistogram() } // Err returns the last encountered error. @@ -158,10 +168,12 @@ func (b *BufferedSeriesIterator) Err() error { return b.it.Err() } +// TODO(beorn7): Consider having different sample types for different value types. type sample struct { - t int64 - v float64 - h *histogram.Histogram + t int64 + v float64 + h *histogram.Histogram + fh *histogram.FloatHistogram } func (s sample) T() int64 { @@ -176,10 +188,24 @@ func (s sample) H() *histogram.Histogram { return s.h } +func (s sample) FH() *histogram.FloatHistogram { + return s.fh +} + +func (s sample) Type() chunkenc.ValueType { + switch { + case s.h != nil: + return chunkenc.ValHistogram + case s.fh != nil: + return chunkenc.ValFloatHistogram + default: + return chunkenc.ValFloat + } +} + type sampleRing struct { delta int64 - enc chunkenc.Encoding buf []sample // lookback buffer i int // position of most recent element in ring buffer f int // position of first element in ring buffer @@ -188,8 +214,8 @@ type sampleRing struct { it sampleRingIterator } -func newSampleRing(delta int64, sz int, enc chunkenc.Encoding) *sampleRing { - r := &sampleRing{delta: delta, buf: make([]sample, sz), enc: enc} +func newSampleRing(delta int64, sz int) *sampleRing { + r := &sampleRing{delta: delta, buf: make([]sample, sz)} r.reset() return r @@ -213,13 +239,24 @@ type sampleRingIterator struct { i int } -func (it *sampleRingIterator) Next() bool { +func (it *sampleRingIterator) Next() chunkenc.ValueType { it.i++ - return it.i < it.r.l + if it.i >= it.r.l { + return chunkenc.ValNone + } + s := it.r.at(it.i) + switch { + case s.h != nil: + return chunkenc.ValHistogram + case s.fh != nil: + return chunkenc.ValFloatHistogram + default: + return chunkenc.ValFloat + } } -func (it *sampleRingIterator) Seek(int64) bool { - return false +func (it *sampleRingIterator) Seek(int64) chunkenc.ValueType { + return chunkenc.ValNone } func (it *sampleRingIterator) Err() error { @@ -227,30 +264,29 @@ func (it *sampleRingIterator) Err() error { } func (it *sampleRingIterator) At() (int64, float64) { - return it.r.at(it.i) -} - -func (it *sampleRingIterator) AtHistogram() (int64, *histogram.Histogram) { - return it.r.atHistogram(it.i) -} - -func (it *sampleRingIterator) ChunkEncoding() chunkenc.Encoding { - return it.r.enc -} - -func (r *sampleRing) at(i int) (int64, float64) { - j := (r.f + i) % len(r.buf) - s := r.buf[j] + s := it.r.at(it.i) return s.t, s.v } -func (r *sampleRing) atHistogram(i int) (int64, *histogram.Histogram) { - j := (r.f + i) % len(r.buf) - s := r.buf[j] +func (it *sampleRingIterator) AtHistogram() (int64, *histogram.Histogram) { + s := it.r.at(it.i) return s.t, s.h } -func (r *sampleRing) atSample(i int) sample { +func (it *sampleRingIterator) AtFloatHistogram() (int64, *histogram.FloatHistogram) { + s := it.r.at(it.i) + if s.fh == nil { + return s.t, s.h.ToFloat() + } + return s.t, s.fh +} + +func (it *sampleRingIterator) AtT() int64 { + s := it.r.at(it.i) + return s.t +} + +func (r *sampleRing) at(i int) sample { j := (r.f + i) % len(r.buf) return r.buf[j] } @@ -320,7 +356,7 @@ func (r *sampleRing) nthLast(n int) (sample, bool) { if n > r.l { return sample{}, false } - return r.atSample(r.l - n), true + return r.at(r.l - n), true } func (r *sampleRing) samples() []sample { diff --git a/storage/buffer_test.go b/storage/buffer_test.go index a108749030..1bcc820583 100644 --- a/storage/buffer_test.go +++ b/storage/buffer_test.go @@ -56,7 +56,7 @@ func TestSampleRing(t *testing.T) { }, } for _, c := range cases { - r := newSampleRing(c.delta, c.size, chunkenc.EncNone) + r := newSampleRing(c.delta, c.size) input := []sample{} for _, t := range c.input { @@ -95,7 +95,7 @@ func TestBufferedSeriesIterator(t *testing.T) { bufferEq := func(exp []sample) { var b []sample bit := it.Buffer() - for bit.Next() { + for bit.Next() == chunkenc.ValFloat { t, v := bit.At() b = append(b, sample{t: t, v: v}) } @@ -124,34 +124,34 @@ func TestBufferedSeriesIterator(t *testing.T) { sample{t: 101, v: 10}, }), 2) - require.True(t, it.Seek(-123), "seek failed") + require.Equal(t, chunkenc.ValFloat, it.Seek(-123), "seek failed") sampleEq(1, 2) prevSampleEq(0, 0, false) bufferEq(nil) - require.True(t, it.Next(), "next failed") + require.Equal(t, chunkenc.ValFloat, it.Next(), "next failed") sampleEq(2, 3) prevSampleEq(1, 2, true) bufferEq([]sample{{t: 1, v: 2}}) - require.True(t, it.Next(), "next failed") - require.True(t, it.Next(), "next failed") - require.True(t, it.Next(), "next failed") + require.Equal(t, chunkenc.ValFloat, it.Next(), "next failed") + require.Equal(t, chunkenc.ValFloat, it.Next(), "next failed") + require.Equal(t, chunkenc.ValFloat, it.Next(), "next failed") sampleEq(5, 6) prevSampleEq(4, 5, true) bufferEq([]sample{{t: 2, v: 3}, {t: 3, v: 4}, {t: 4, v: 5}}) - require.True(t, it.Seek(5), "seek failed") + require.Equal(t, chunkenc.ValFloat, it.Seek(5), "seek failed") sampleEq(5, 6) prevSampleEq(4, 5, true) bufferEq([]sample{{t: 2, v: 3}, {t: 3, v: 4}, {t: 4, v: 5}}) - require.True(t, it.Seek(101), "seek failed") + require.Equal(t, chunkenc.ValFloat, it.Seek(101), "seek failed") sampleEq(101, 10) prevSampleEq(100, 9, true) bufferEq([]sample{{t: 99, v: 8}, {t: 100, v: 9}}) - require.False(t, it.Next(), "next succeeded unexpectedly") + require.Equal(t, chunkenc.ValNone, it.Next(), "next succeeded unexpectedly") } // At() should not be called once Next() returns false. @@ -159,14 +159,19 @@ func TestBufferedSeriesIteratorNoBadAt(t *testing.T) { done := false m := &mockSeriesIterator{ - seek: func(int64) bool { return false }, + seek: func(int64) chunkenc.ValueType { return chunkenc.ValNone }, at: func() (int64, float64) { require.False(t, done, "unexpectedly done") done = true return 0, 0 }, - next: func() bool { return !done }, - err: func() error { return nil }, + next: func() chunkenc.ValueType { + if done { + return chunkenc.ValNone + } + return chunkenc.ValFloat + }, + err: func() error { return nil }, } it := NewBufferIterator(m, 60) @@ -182,30 +187,35 @@ func BenchmarkBufferedSeriesIterator(b *testing.B) { b.ReportAllocs() b.ResetTimer() - for it.Next() { + for it.Next() != chunkenc.ValNone { // scan everything } require.NoError(b, it.Err()) } type mockSeriesIterator struct { - seek func(int64) bool + seek func(int64) chunkenc.ValueType at func() (int64, float64) - next func() bool + next func() chunkenc.ValueType err func() error } -func (m *mockSeriesIterator) Seek(t int64) bool { return m.seek(t) } -func (m *mockSeriesIterator) At() (int64, float64) { return m.at() } +func (m *mockSeriesIterator) Seek(t int64) chunkenc.ValueType { return m.seek(t) } +func (m *mockSeriesIterator) At() (int64, float64) { return m.at() } +func (m *mockSeriesIterator) Next() chunkenc.ValueType { return m.next() } +func (m *mockSeriesIterator) Err() error { return m.err() } + func (m *mockSeriesIterator) AtHistogram() (int64, *histogram.Histogram) { - return 0, nil + return 0, nil // Not really mocked. } -func (m *mockSeriesIterator) ChunkEncoding() chunkenc.Encoding { - return chunkenc.EncXOR +func (m *mockSeriesIterator) AtFloatHistogram() (int64, *histogram.FloatHistogram) { + return 0, nil // Not really mocked. +} + +func (m *mockSeriesIterator) AtT() int64 { + return 0 // Not really mocked. } -func (m *mockSeriesIterator) Next() bool { return m.next() } -func (m *mockSeriesIterator) Err() error { return m.err() } type fakeSeriesIterator struct { nsamples int64 @@ -225,18 +235,28 @@ func (it *fakeSeriesIterator) AtHistogram() (int64, *histogram.Histogram) { return it.idx * it.step, &histogram.Histogram{} // Value doesn't matter. } -func (it *fakeSeriesIterator) ChunkEncoding() chunkenc.Encoding { - return chunkenc.EncXOR +func (it *fakeSeriesIterator) AtFloatHistogram() (int64, *histogram.FloatHistogram) { + return it.idx * it.step, &histogram.FloatHistogram{} // Value doesn't matter. } -func (it *fakeSeriesIterator) Next() bool { +func (it *fakeSeriesIterator) AtT() int64 { + return it.idx * it.step +} + +func (it *fakeSeriesIterator) Next() chunkenc.ValueType { it.idx++ - return it.idx < it.nsamples + if it.idx >= it.nsamples { + return chunkenc.ValNone + } + return chunkenc.ValFloat } -func (it *fakeSeriesIterator) Seek(t int64) bool { +func (it *fakeSeriesIterator) Seek(t int64) chunkenc.ValueType { it.idx = t / it.step - return it.idx < it.nsamples + if it.idx >= it.nsamples { + return chunkenc.ValNone + } + return chunkenc.ValFloat } func (it *fakeSeriesIterator) Err() error { return nil } diff --git a/storage/fanout_test.go b/storage/fanout_test.go index cc228e6666..ee6623397b 100644 --- a/storage/fanout_test.go +++ b/storage/fanout_test.go @@ -23,6 +23,7 @@ import ( "github.com/prometheus/prometheus/model/labels" "github.com/prometheus/prometheus/storage" + "github.com/prometheus/prometheus/tsdb/chunkenc" "github.com/prometheus/prometheus/util/teststorage" ) @@ -90,7 +91,7 @@ func TestFanout_SelectSorted(t *testing.T) { seriesLabels := series.Labels() labelsResult = seriesLabels iterator := series.Iterator() - for iterator.Next() { + for iterator.Next() == chunkenc.ValFloat { timestamp, value := iterator.At() result[timestamp] = value } @@ -116,7 +117,7 @@ func TestFanout_SelectSorted(t *testing.T) { seriesLabels := series.Labels() labelsResult = seriesLabels iterator := series.Iterator() - for iterator.Next() { + for iterator.Next() == chunkenc.ValFloat { timestamp, value := iterator.At() result[timestamp] = value } diff --git a/storage/memoized_iterator.go b/storage/memoized_iterator.go index 7701238cd8..a4001cde24 100644 --- a/storage/memoized_iterator.go +++ b/storage/memoized_iterator.go @@ -20,27 +20,23 @@ import ( "github.com/prometheus/prometheus/tsdb/chunkenc" ) -// ValueType defines the type of a value in the storage. -type ValueType int - -const ( - ValNone ValueType = iota - ValFloat - ValHistogram -) - // MemoizedSeriesIterator wraps an iterator with a buffer to look back the previous element. type MemoizedSeriesIterator struct { it chunkenc.Iterator delta int64 lastTime int64 - valueType ValueType + valueType chunkenc.ValueType // Keep track of the previously returned value. - prevTime int64 - prevValue float64 - prevHistogram *histogram.Histogram + prevTime int64 + prevValue float64 + prevHistogram *histogram.Histogram + prevFloatHistogram *histogram.FloatHistogram + // TODO(beorn7): MemoizedSeriesIterator is currently only used by the + // PromQL engine, which only works with FloatHistograms. For better + // performance, we could change MemoizedSeriesIterator to also only + // handle FloatHistograms. } // NewMemoizedEmptyIterator is like NewMemoizedIterator but it's initialised with an empty iterator. @@ -65,25 +61,20 @@ func (b *MemoizedSeriesIterator) Reset(it chunkenc.Iterator) { b.it = it b.lastTime = math.MinInt64 b.prevTime = math.MinInt64 - it.Next() - if it.ChunkEncoding() == chunkenc.EncHistogram { - b.valueType = ValHistogram - } else { - b.valueType = ValFloat - } + b.valueType = it.Next() } // PeekPrev returns the previous element of the iterator. If there is none buffered, // ok is false. -func (b *MemoizedSeriesIterator) PeekPrev() (t int64, v float64, h *histogram.Histogram, ok bool) { +func (b *MemoizedSeriesIterator) PeekPrev() (t int64, v float64, h *histogram.Histogram, fh *histogram.FloatHistogram, ok bool) { if b.prevTime == math.MinInt64 { - return 0, 0, nil, false + return 0, 0, nil, nil, false } - return b.prevTime, b.prevValue, b.prevHistogram, true + return b.prevTime, b.prevValue, b.prevHistogram, b.prevFloatHistogram, true } // Seek advances the iterator to the element at time t or greater. -func (b *MemoizedSeriesIterator) Seek(t int64) ValueType { +func (b *MemoizedSeriesIterator) Seek(t int64) chunkenc.ValueType { t0 := t - b.delta if t0 > b.lastTime { @@ -91,59 +82,47 @@ func (b *MemoizedSeriesIterator) Seek(t int64) ValueType { // more than the delta. b.prevTime = math.MinInt64 - ok := b.it.Seek(t0) - if !ok { - b.valueType = ValNone - return ValNone - } - if b.it.ChunkEncoding() == chunkenc.EncHistogram { - b.valueType = ValHistogram - b.lastTime, _ = b.it.AtHistogram() - } else { - b.valueType = ValFloat - b.lastTime, _ = b.it.At() + b.valueType = b.it.Seek(t0) + if b.valueType == chunkenc.ValNone { + return chunkenc.ValNone } + b.lastTime = b.it.AtT() } - if b.lastTime >= t { return b.valueType } - for b.Next() != ValNone { + for b.Next() != chunkenc.ValNone { if b.lastTime >= t { return b.valueType } } - return ValNone + return chunkenc.ValNone } // Next advances the iterator to the next element. -func (b *MemoizedSeriesIterator) Next() ValueType { - if b.valueType == ValNone { - return ValNone - } - +func (b *MemoizedSeriesIterator) Next() chunkenc.ValueType { // Keep track of the previous element. - if b.it.ChunkEncoding() == chunkenc.EncHistogram { - b.prevTime, b.prevHistogram = b.it.AtHistogram() - b.prevValue = 0 - } else { + switch b.valueType { + case chunkenc.ValNone: + return chunkenc.ValNone + case chunkenc.ValFloat: b.prevTime, b.prevValue = b.it.At() b.prevHistogram = nil + b.prevFloatHistogram = nil + case chunkenc.ValHistogram: + b.prevValue = 0 + b.prevTime, b.prevHistogram = b.it.AtHistogram() + _, b.prevFloatHistogram = b.it.AtFloatHistogram() + case chunkenc.ValFloatHistogram: + b.prevValue = 0 + b.prevHistogram = nil + b.prevTime, b.prevFloatHistogram = b.it.AtFloatHistogram() } - ok := b.it.Next() - if ok { - if b.it.ChunkEncoding() == chunkenc.EncHistogram { - b.lastTime, _ = b.it.AtHistogram() - b.valueType = ValHistogram - - } else { - b.lastTime, _ = b.it.At() - b.valueType = ValFloat - } - } else { - b.valueType = ValNone + b.valueType = b.it.Next() + if b.valueType != chunkenc.ValNone { + b.lastTime = b.it.AtT() } return b.valueType } @@ -158,6 +137,11 @@ func (b *MemoizedSeriesIterator) HistogramValues() (int64, *histogram.Histogram) return b.it.AtHistogram() } +// Values returns the current element of the iterator. +func (b *MemoizedSeriesIterator) FloatHistogramValues() (int64, *histogram.FloatHistogram) { + return b.it.AtFloatHistogram() +} + // Err returns the last encountered error. func (b *MemoizedSeriesIterator) Err() error { return b.it.Err() diff --git a/storage/memoized_iterator_test.go b/storage/memoized_iterator_test.go index ee5155199f..22c7bbdfcb 100644 --- a/storage/memoized_iterator_test.go +++ b/storage/memoized_iterator_test.go @@ -17,6 +17,8 @@ import ( "testing" "github.com/stretchr/testify/require" + + "github.com/prometheus/prometheus/tsdb/chunkenc" ) func TestMemoizedSeriesIterator(t *testing.T) { @@ -29,7 +31,7 @@ func TestMemoizedSeriesIterator(t *testing.T) { require.Equal(t, ev, v, "value mismatch") } prevSampleEq := func(ets int64, ev float64, eok bool) { - ts, v, _, ok := it.PeekPrev() + ts, v, _, _, ok := it.PeekPrev() require.Equal(t, eok, ok, "exist mismatch") require.Equal(t, ets, ts, "timestamp mismatch") require.Equal(t, ev, v, "value mismatch") @@ -46,29 +48,29 @@ func TestMemoizedSeriesIterator(t *testing.T) { sample{t: 101, v: 10}, }), 2) - require.Equal(t, it.Seek(-123), ValFloat, "seek failed") + require.Equal(t, it.Seek(-123), chunkenc.ValFloat, "seek failed") sampleEq(1, 2) prevSampleEq(0, 0, false) - require.Equal(t, it.Next(), ValFloat, "next failed") + require.Equal(t, it.Next(), chunkenc.ValFloat, "next failed") sampleEq(2, 3) prevSampleEq(1, 2, true) - require.Equal(t, it.Next(), ValFloat, "next failed") - require.Equal(t, it.Next(), ValFloat, "next failed") - require.Equal(t, it.Next(), ValFloat, "next failed") + require.Equal(t, it.Next(), chunkenc.ValFloat, "next failed") + require.Equal(t, it.Next(), chunkenc.ValFloat, "next failed") + require.Equal(t, it.Next(), chunkenc.ValFloat, "next failed") sampleEq(5, 6) prevSampleEq(4, 5, true) - require.Equal(t, it.Seek(5), ValFloat, "seek failed") + require.Equal(t, it.Seek(5), chunkenc.ValFloat, "seek failed") sampleEq(5, 6) prevSampleEq(4, 5, true) - require.Equal(t, it.Seek(101), ValFloat, "seek failed") + require.Equal(t, it.Seek(101), chunkenc.ValFloat, "seek failed") sampleEq(101, 10) prevSampleEq(100, 9, true) - require.Equal(t, it.Next(), ValNone, "next succeeded unexpectedly") + require.Equal(t, it.Next(), chunkenc.ValNone, "next succeeded unexpectedly") } func BenchmarkMemoizedSeriesIterator(b *testing.B) { @@ -79,7 +81,7 @@ func BenchmarkMemoizedSeriesIterator(b *testing.B) { b.ReportAllocs() b.ResetTimer() - for it.Next() != ValNone { + for it.Next() != chunkenc.ValNone { // scan everything } require.NoError(b, it.Err()) diff --git a/storage/merge.go b/storage/merge.go index 464e77a175..a745a9c4d6 100644 --- a/storage/merge.go +++ b/storage/merge.go @@ -443,7 +443,7 @@ type chainSampleIterator struct { h samplesIteratorHeap curr chunkenc.Iterator - lastt int64 + lastT int64 } // NewChainSampleIterator returns a single iterator that iterates over the samples from the given iterators in a sorted @@ -453,77 +453,82 @@ func NewChainSampleIterator(iterators []chunkenc.Iterator) chunkenc.Iterator { return &chainSampleIterator{ iterators: iterators, h: nil, - lastt: math.MinInt64, + lastT: math.MinInt64, } } -func (c *chainSampleIterator) Seek(t int64) bool { +func (c *chainSampleIterator) Seek(t int64) chunkenc.ValueType { + // No-op check. + if c.curr != nil && c.lastT >= t { + return c.curr.Seek(c.lastT) + } c.h = samplesIteratorHeap{} for _, iter := range c.iterators { - if iter.Seek(t) { + if iter.Seek(t) != chunkenc.ValNone { heap.Push(&c.h, iter) } } if len(c.h) > 0 { c.curr = heap.Pop(&c.h).(chunkenc.Iterator) - if c.curr.ChunkEncoding() == chunkenc.EncHistogram { - c.lastt, _ = c.curr.AtHistogram() - } else { - c.lastt, _ = c.curr.At() - } - return true + c.lastT = c.curr.AtT() + return c.curr.Seek(c.lastT) } c.curr = nil - return false + return chunkenc.ValNone } func (c *chainSampleIterator) At() (t int64, v float64) { if c.curr == nil { - panic("chainSampleIterator.At() called before first .Next() or after .Next() returned false.") + panic("chainSampleIterator.At called before first .Next or after .Next returned false.") } return c.curr.At() } func (c *chainSampleIterator) AtHistogram() (int64, *histogram.Histogram) { if c.curr == nil { - panic("chainSampleIterator.AtHistogram() called before first .Next() or after .Next() returned false.") + panic("chainSampleIterator.AtHistogram called before first .Next or after .Next returned false.") } return c.curr.AtHistogram() } -func (c *chainSampleIterator) ChunkEncoding() chunkenc.Encoding { +func (c *chainSampleIterator) AtFloatHistogram() (int64, *histogram.FloatHistogram) { if c.curr == nil { - panic("chainSampleIterator.ChunkEncoding() called before first .Next() or after .Next() returned false.") + panic("chainSampleIterator.AtFloatHistogram called before first .Next or after .Next returned false.") } - return c.curr.ChunkEncoding() + return c.curr.AtFloatHistogram() } -func (c *chainSampleIterator) Next() bool { +func (c *chainSampleIterator) AtT() int64 { + if c.curr == nil { + panic("chainSampleIterator.AtT called before first .Next or after .Next returned false.") + } + return c.curr.AtT() +} + +func (c *chainSampleIterator) Next() chunkenc.ValueType { if c.h == nil { c.h = samplesIteratorHeap{} // We call c.curr.Next() as the first thing below. // So, we don't call Next() on it here. c.curr = c.iterators[0] for _, iter := range c.iterators[1:] { - if iter.Next() { + if iter.Next() != chunkenc.ValNone { heap.Push(&c.h, iter) } } } if c.curr == nil { - return false + return chunkenc.ValNone } - var currt int64 + var currT int64 + var currValueType chunkenc.ValueType for { - if c.curr.Next() { - if c.curr.ChunkEncoding() == chunkenc.EncHistogram { - currt, _ = c.curr.AtHistogram() - } else { - currt, _ = c.curr.At() - } - if currt == c.lastt { + currValueType = c.curr.Next() + if currValueType != chunkenc.ValNone { + currT = c.curr.AtT() + if currT == c.lastT { // Ignoring sample for the same timestamp. continue } @@ -534,13 +539,8 @@ func (c *chainSampleIterator) Next() bool { } // Check current iterator with the top of the heap. - var nextt int64 - if c.h[0].ChunkEncoding() == chunkenc.EncHistogram { - nextt, _ = c.h[0].AtHistogram() - } else { - nextt, _ = c.h[0].At() - } - if currt < nextt { + nextT := c.h[0].AtT() + if currT < nextT { // Current iterator has smaller timestamp than the heap. break } @@ -549,22 +549,19 @@ func (c *chainSampleIterator) Next() bool { } else if len(c.h) == 0 { // No iterator left to iterate. c.curr = nil - return false + return chunkenc.ValNone } c.curr = heap.Pop(&c.h).(chunkenc.Iterator) - if c.curr.ChunkEncoding() == chunkenc.EncHistogram { - currt, _ = c.curr.AtHistogram() - } else { - currt, _ = c.curr.At() - } - if currt != c.lastt { + currT = c.curr.AtT() + currValueType = c.curr.Seek(currT) + if currT != c.lastT { break } } - c.lastt = currt - return true + c.lastT = currT + return currValueType } func (c *chainSampleIterator) Err() error { @@ -581,18 +578,7 @@ func (h samplesIteratorHeap) Len() int { return len(h) } func (h samplesIteratorHeap) Swap(i, j int) { h[i], h[j] = h[j], h[i] } func (h samplesIteratorHeap) Less(i, j int) bool { - var at, bt int64 - if h[i].ChunkEncoding() == chunkenc.EncHistogram { - at, _ = h[i].AtHistogram() - } else { - at, _ = h[i].At() - } - if h[j].ChunkEncoding() == chunkenc.EncHistogram { - bt, _ = h[j].AtHistogram() - } else { - bt, _ = h[j].At() - } - return at < bt + return h[i].AtT() < h[j].AtT() } func (h *samplesIteratorHeap) Push(x interface{}) { diff --git a/storage/merge_test.go b/storage/merge_test.go index 61844a5663..7e232bc227 100644 --- a/storage/merge_test.go +++ b/storage/merge_test.go @@ -62,116 +62,116 @@ func TestMergeQuerierWithChainMerger(t *testing.T) { { name: "one querier, two series", querierSeries: [][]Series{{ - NewListSeries(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{1, 1, nil}, sample{2, 2, nil}, sample{3, 3, nil}}), - NewListSeries(labels.FromStrings("foo", "bar"), []tsdbutil.Sample{sample{0, 0, nil}, sample{1, 1, nil}, sample{2, 2, nil}}), + NewListSeries(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{1, 1, nil, nil}, sample{2, 2, nil, nil}, sample{3, 3, nil, nil}}), + NewListSeries(labels.FromStrings("foo", "bar"), []tsdbutil.Sample{sample{0, 0, nil, nil}, sample{1, 1, nil, nil}, sample{2, 2, nil, nil}}), }}, expected: NewMockSeriesSet( - NewListSeries(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{1, 1, nil}, sample{2, 2, nil}, sample{3, 3, nil}}), - NewListSeries(labels.FromStrings("foo", "bar"), []tsdbutil.Sample{sample{0, 0, nil}, sample{1, 1, nil}, sample{2, 2, nil}}), + NewListSeries(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{1, 1, nil, nil}, sample{2, 2, nil, nil}, sample{3, 3, nil, nil}}), + NewListSeries(labels.FromStrings("foo", "bar"), []tsdbutil.Sample{sample{0, 0, nil, nil}, sample{1, 1, nil, nil}, sample{2, 2, nil, nil}}), ), }, { name: "two queriers, one different series each", querierSeries: [][]Series{{ - NewListSeries(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{1, 1, nil}, sample{2, 2, nil}, sample{3, 3, nil}}), + NewListSeries(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{1, 1, nil, nil}, sample{2, 2, nil, nil}, sample{3, 3, nil, nil}}), }, { - NewListSeries(labels.FromStrings("foo", "bar"), []tsdbutil.Sample{sample{0, 0, nil}, sample{1, 1, nil}, sample{2, 2, nil}}), + NewListSeries(labels.FromStrings("foo", "bar"), []tsdbutil.Sample{sample{0, 0, nil, nil}, sample{1, 1, nil, nil}, sample{2, 2, nil, nil}}), }}, expected: NewMockSeriesSet( - NewListSeries(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{1, 1, nil}, sample{2, 2, nil}, sample{3, 3, nil}}), - NewListSeries(labels.FromStrings("foo", "bar"), []tsdbutil.Sample{sample{0, 0, nil}, sample{1, 1, nil}, sample{2, 2, nil}}), + NewListSeries(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{1, 1, nil, nil}, sample{2, 2, nil, nil}, sample{3, 3, nil, nil}}), + NewListSeries(labels.FromStrings("foo", "bar"), []tsdbutil.Sample{sample{0, 0, nil, nil}, sample{1, 1, nil, nil}, sample{2, 2, nil, nil}}), ), }, { name: "two time unsorted queriers, two series each", querierSeries: [][]Series{{ - NewListSeries(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{5, 5, nil}, sample{6, 6, nil}}), - NewListSeries(labels.FromStrings("foo", "bar"), []tsdbutil.Sample{sample{0, 0, nil}, sample{1, 1, nil}, sample{2, 2, nil}}), + NewListSeries(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{5, 5, nil, nil}, sample{6, 6, nil, nil}}), + NewListSeries(labels.FromStrings("foo", "bar"), []tsdbutil.Sample{sample{0, 0, nil, nil}, sample{1, 1, nil, nil}, sample{2, 2, nil, nil}}), }, { - NewListSeries(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{1, 1, nil}, sample{2, 2, nil}, sample{3, 3, nil}}), - NewListSeries(labels.FromStrings("foo", "bar"), []tsdbutil.Sample{sample{3, 3, nil}, sample{4, 4, nil}}), + NewListSeries(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{1, 1, nil, nil}, sample{2, 2, nil, nil}, sample{3, 3, nil, nil}}), + NewListSeries(labels.FromStrings("foo", "bar"), []tsdbutil.Sample{sample{3, 3, nil, nil}, sample{4, 4, nil, nil}}), }}, expected: NewMockSeriesSet( NewListSeries( labels.FromStrings("bar", "baz"), - []tsdbutil.Sample{sample{1, 1, nil}, sample{2, 2, nil}, sample{3, 3, nil}, sample{5, 5, nil}, sample{6, 6, nil}}, + []tsdbutil.Sample{sample{1, 1, nil, nil}, sample{2, 2, nil, nil}, sample{3, 3, nil, nil}, sample{5, 5, nil, nil}, sample{6, 6, nil, nil}}, ), NewListSeries( labels.FromStrings("foo", "bar"), - []tsdbutil.Sample{sample{0, 0, nil}, sample{1, 1, nil}, sample{2, 2, nil}, sample{3, 3, nil}, sample{4, 4, nil}}, + []tsdbutil.Sample{sample{0, 0, nil, nil}, sample{1, 1, nil, nil}, sample{2, 2, nil, nil}, sample{3, 3, nil, nil}, sample{4, 4, nil, nil}}, ), ), }, { name: "five queriers, only two queriers have two time unsorted series each", querierSeries: [][]Series{{}, {}, { - NewListSeries(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{5, 5, nil}, sample{6, 6, nil}}), - NewListSeries(labels.FromStrings("foo", "bar"), []tsdbutil.Sample{sample{0, 0, nil}, sample{1, 1, nil}, sample{2, 2, nil}}), + NewListSeries(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{5, 5, nil, nil}, sample{6, 6, nil, nil}}), + NewListSeries(labels.FromStrings("foo", "bar"), []tsdbutil.Sample{sample{0, 0, nil, nil}, sample{1, 1, nil, nil}, sample{2, 2, nil, nil}}), }, { - NewListSeries(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{1, 1, nil}, sample{2, 2, nil}, sample{3, 3, nil}}), - NewListSeries(labels.FromStrings("foo", "bar"), []tsdbutil.Sample{sample{3, 3, nil}, sample{4, 4, nil}}), + NewListSeries(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{1, 1, nil, nil}, sample{2, 2, nil, nil}, sample{3, 3, nil, nil}}), + NewListSeries(labels.FromStrings("foo", "bar"), []tsdbutil.Sample{sample{3, 3, nil, nil}, sample{4, 4, nil, nil}}), }, {}}, expected: NewMockSeriesSet( NewListSeries( labels.FromStrings("bar", "baz"), - []tsdbutil.Sample{sample{1, 1, nil}, sample{2, 2, nil}, sample{3, 3, nil}, sample{5, 5, nil}, sample{6, 6, nil}}, + []tsdbutil.Sample{sample{1, 1, nil, nil}, sample{2, 2, nil, nil}, sample{3, 3, nil, nil}, sample{5, 5, nil, nil}, sample{6, 6, nil, nil}}, ), NewListSeries( labels.FromStrings("foo", "bar"), - []tsdbutil.Sample{sample{0, 0, nil}, sample{1, 1, nil}, sample{2, 2, nil}, sample{3, 3, nil}, sample{4, 4, nil}}, + []tsdbutil.Sample{sample{0, 0, nil, nil}, sample{1, 1, nil, nil}, sample{2, 2, nil, nil}, sample{3, 3, nil, nil}, sample{4, 4, nil, nil}}, ), ), }, { name: "two queriers, only two queriers have two time unsorted series each, with 3 noop and one nil querier together", querierSeries: [][]Series{{}, {}, { - NewListSeries(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{5, 5, nil}, sample{6, 6, nil}}), - NewListSeries(labels.FromStrings("foo", "bar"), []tsdbutil.Sample{sample{0, 0, nil}, sample{1, 1, nil}, sample{2, 2, nil}}), + NewListSeries(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{5, 5, nil, nil}, sample{6, 6, nil, nil}}), + NewListSeries(labels.FromStrings("foo", "bar"), []tsdbutil.Sample{sample{0, 0, nil, nil}, sample{1, 1, nil, nil}, sample{2, 2, nil, nil}}), }, { - NewListSeries(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{1, 1, nil}, sample{2, 2, nil}, sample{3, 3, nil}}), - NewListSeries(labels.FromStrings("foo", "bar"), []tsdbutil.Sample{sample{3, 3, nil}, sample{4, 4, nil}}), + NewListSeries(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{1, 1, nil, nil}, sample{2, 2, nil, nil}, sample{3, 3, nil, nil}}), + NewListSeries(labels.FromStrings("foo", "bar"), []tsdbutil.Sample{sample{3, 3, nil, nil}, sample{4, 4, nil, nil}}), }, {}}, extraQueriers: []Querier{NoopQuerier(), NoopQuerier(), nil, NoopQuerier()}, expected: NewMockSeriesSet( NewListSeries( labels.FromStrings("bar", "baz"), - []tsdbutil.Sample{sample{1, 1, nil}, sample{2, 2, nil}, sample{3, 3, nil}, sample{5, 5, nil}, sample{6, 6, nil}}, + []tsdbutil.Sample{sample{1, 1, nil, nil}, sample{2, 2, nil, nil}, sample{3, 3, nil, nil}, sample{5, 5, nil, nil}, sample{6, 6, nil, nil}}, ), NewListSeries( labels.FromStrings("foo", "bar"), - []tsdbutil.Sample{sample{0, 0, nil}, sample{1, 1, nil}, sample{2, 2, nil}, sample{3, 3, nil}, sample{4, 4, nil}}, + []tsdbutil.Sample{sample{0, 0, nil, nil}, sample{1, 1, nil, nil}, sample{2, 2, nil, nil}, sample{3, 3, nil, nil}, sample{4, 4, nil, nil}}, ), ), }, { name: "two queriers, with two series, one is overlapping", querierSeries: [][]Series{{}, {}, { - NewListSeries(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{2, 21, nil}, sample{3, 31, nil}, sample{5, 5, nil}, sample{6, 6, nil}}), - NewListSeries(labels.FromStrings("foo", "bar"), []tsdbutil.Sample{sample{0, 0, nil}, sample{1, 1, nil}, sample{2, 2, nil}}), + NewListSeries(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{2, 21, nil, nil}, sample{3, 31, nil, nil}, sample{5, 5, nil, nil}, sample{6, 6, nil, nil}}), + NewListSeries(labels.FromStrings("foo", "bar"), []tsdbutil.Sample{sample{0, 0, nil, nil}, sample{1, 1, nil, nil}, sample{2, 2, nil, nil}}), }, { - NewListSeries(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{1, 1, nil}, sample{2, 22, nil}, sample{3, 32, nil}}), - NewListSeries(labels.FromStrings("foo", "bar"), []tsdbutil.Sample{sample{3, 3, nil}, sample{4, 4, nil}}), + NewListSeries(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{1, 1, nil, nil}, sample{2, 22, nil, nil}, sample{3, 32, nil, nil}}), + NewListSeries(labels.FromStrings("foo", "bar"), []tsdbutil.Sample{sample{3, 3, nil, nil}, sample{4, 4, nil, nil}}), }, {}}, expected: NewMockSeriesSet( NewListSeries( labels.FromStrings("bar", "baz"), - []tsdbutil.Sample{sample{1, 1, nil}, sample{2, 21, nil}, sample{3, 31, nil}, sample{5, 5, nil}, sample{6, 6, nil}}, + []tsdbutil.Sample{sample{1, 1, nil, nil}, sample{2, 21, nil, nil}, sample{3, 31, nil, nil}, sample{5, 5, nil, nil}, sample{6, 6, nil, nil}}, ), NewListSeries( labels.FromStrings("foo", "bar"), - []tsdbutil.Sample{sample{0, 0, nil}, sample{1, 1, nil}, sample{2, 2, nil}, sample{3, 3, nil}, sample{4, 4, nil}}, + []tsdbutil.Sample{sample{0, 0, nil, nil}, sample{1, 1, nil, nil}, sample{2, 2, nil, nil}, sample{3, 3, nil, nil}, sample{4, 4, nil, nil}}, ), ), }, { name: "two queries, one with NaN samples series", querierSeries: [][]Series{{ - NewListSeries(labels.FromStrings("foo", "bar"), []tsdbutil.Sample{sample{0, math.NaN(), nil}}), + NewListSeries(labels.FromStrings("foo", "bar"), []tsdbutil.Sample{sample{0, math.NaN(), nil, nil}}), }, { - NewListSeries(labels.FromStrings("foo", "bar"), []tsdbutil.Sample{sample{1, 1, nil}}), + NewListSeries(labels.FromStrings("foo", "bar"), []tsdbutil.Sample{sample{1, 1, nil, nil}}), }}, expected: NewMockSeriesSet( - NewListSeries(labels.FromStrings("foo", "bar"), []tsdbutil.Sample{sample{0, math.NaN(), nil}, sample{1, 1, nil}}), + NewListSeries(labels.FromStrings("foo", "bar"), []tsdbutil.Sample{sample{0, math.NaN(), nil, nil}, sample{1, 1, nil, nil}}), ), }, } { @@ -245,108 +245,108 @@ func TestMergeChunkQuerierWithNoVerticalChunkSeriesMerger(t *testing.T) { { name: "one querier, two series", chkQuerierSeries: [][]ChunkSeries{{ - NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{1, 1, nil}, sample{2, 2, nil}}, []tsdbutil.Sample{sample{3, 3, nil}}), - NewListChunkSeriesFromSamples(labels.FromStrings("foo", "bar"), []tsdbutil.Sample{sample{0, 0, nil}, sample{1, 1, nil}}, []tsdbutil.Sample{sample{2, 2, nil}}), + NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{1, 1, nil, nil}, sample{2, 2, nil, nil}}, []tsdbutil.Sample{sample{3, 3, nil, nil}}), + NewListChunkSeriesFromSamples(labels.FromStrings("foo", "bar"), []tsdbutil.Sample{sample{0, 0, nil, nil}, sample{1, 1, nil, nil}}, []tsdbutil.Sample{sample{2, 2, nil, nil}}), }}, expected: NewMockChunkSeriesSet( - NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{1, 1, nil}, sample{2, 2, nil}}, []tsdbutil.Sample{sample{3, 3, nil}}), - NewListChunkSeriesFromSamples(labels.FromStrings("foo", "bar"), []tsdbutil.Sample{sample{0, 0, nil}, sample{1, 1, nil}}, []tsdbutil.Sample{sample{2, 2, nil}}), + NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{1, 1, nil, nil}, sample{2, 2, nil, nil}}, []tsdbutil.Sample{sample{3, 3, nil, nil}}), + NewListChunkSeriesFromSamples(labels.FromStrings("foo", "bar"), []tsdbutil.Sample{sample{0, 0, nil, nil}, sample{1, 1, nil, nil}}, []tsdbutil.Sample{sample{2, 2, nil, nil}}), ), }, { name: "two secondaries, one different series each", chkQuerierSeries: [][]ChunkSeries{{ - NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{1, 1, nil}, sample{2, 2, nil}}, []tsdbutil.Sample{sample{3, 3, nil}}), + NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{1, 1, nil, nil}, sample{2, 2, nil, nil}}, []tsdbutil.Sample{sample{3, 3, nil, nil}}), }, { - NewListChunkSeriesFromSamples(labels.FromStrings("foo", "bar"), []tsdbutil.Sample{sample{0, 0, nil}, sample{1, 1, nil}}, []tsdbutil.Sample{sample{2, 2, nil}}), + NewListChunkSeriesFromSamples(labels.FromStrings("foo", "bar"), []tsdbutil.Sample{sample{0, 0, nil, nil}, sample{1, 1, nil, nil}}, []tsdbutil.Sample{sample{2, 2, nil, nil}}), }}, expected: NewMockChunkSeriesSet( - NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{1, 1, nil}, sample{2, 2, nil}}, []tsdbutil.Sample{sample{3, 3, nil}}), - NewListChunkSeriesFromSamples(labels.FromStrings("foo", "bar"), []tsdbutil.Sample{sample{0, 0, nil}, sample{1, 1, nil}}, []tsdbutil.Sample{sample{2, 2, nil}}), + NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{1, 1, nil, nil}, sample{2, 2, nil, nil}}, []tsdbutil.Sample{sample{3, 3, nil, nil}}), + NewListChunkSeriesFromSamples(labels.FromStrings("foo", "bar"), []tsdbutil.Sample{sample{0, 0, nil, nil}, sample{1, 1, nil, nil}}, []tsdbutil.Sample{sample{2, 2, nil, nil}}), ), }, { name: "two secondaries, two not in time order series each", chkQuerierSeries: [][]ChunkSeries{{ - NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{5, 5, nil}}, []tsdbutil.Sample{sample{6, 6, nil}}), - NewListChunkSeriesFromSamples(labels.FromStrings("foo", "bar"), []tsdbutil.Sample{sample{0, 0, nil}, sample{1, 1, nil}}, []tsdbutil.Sample{sample{2, 2, nil}}), + NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{5, 5, nil, nil}}, []tsdbutil.Sample{sample{6, 6, nil, nil}}), + NewListChunkSeriesFromSamples(labels.FromStrings("foo", "bar"), []tsdbutil.Sample{sample{0, 0, nil, nil}, sample{1, 1, nil, nil}}, []tsdbutil.Sample{sample{2, 2, nil, nil}}), }, { - NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{1, 1, nil}, sample{2, 2, nil}}, []tsdbutil.Sample{sample{3, 3, nil}}), - NewListChunkSeriesFromSamples(labels.FromStrings("foo", "bar"), []tsdbutil.Sample{sample{3, 3, nil}}, []tsdbutil.Sample{sample{4, 4, nil}}), + NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{1, 1, nil, nil}, sample{2, 2, nil, nil}}, []tsdbutil.Sample{sample{3, 3, nil, nil}}), + NewListChunkSeriesFromSamples(labels.FromStrings("foo", "bar"), []tsdbutil.Sample{sample{3, 3, nil, nil}}, []tsdbutil.Sample{sample{4, 4, nil, nil}}), }}, expected: NewMockChunkSeriesSet( NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), - []tsdbutil.Sample{sample{1, 1, nil}, sample{2, 2, nil}}, - []tsdbutil.Sample{sample{3, 3, nil}}, - []tsdbutil.Sample{sample{5, 5, nil}}, - []tsdbutil.Sample{sample{6, 6, nil}}, + []tsdbutil.Sample{sample{1, 1, nil, nil}, sample{2, 2, nil, nil}}, + []tsdbutil.Sample{sample{3, 3, nil, nil}}, + []tsdbutil.Sample{sample{5, 5, nil, nil}}, + []tsdbutil.Sample{sample{6, 6, nil, nil}}, ), NewListChunkSeriesFromSamples(labels.FromStrings("foo", "bar"), - []tsdbutil.Sample{sample{0, 0, nil}, sample{1, 1, nil}}, - []tsdbutil.Sample{sample{2, 2, nil}}, - []tsdbutil.Sample{sample{3, 3, nil}}, - []tsdbutil.Sample{sample{4, 4, nil}}, + []tsdbutil.Sample{sample{0, 0, nil, nil}, sample{1, 1, nil, nil}}, + []tsdbutil.Sample{sample{2, 2, nil, nil}}, + []tsdbutil.Sample{sample{3, 3, nil, nil}}, + []tsdbutil.Sample{sample{4, 4, nil, nil}}, ), ), }, { name: "five secondaries, only two have two not in time order series each", chkQuerierSeries: [][]ChunkSeries{{}, {}, { - NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{5, 5, nil}}, []tsdbutil.Sample{sample{6, 6, nil}}), - NewListChunkSeriesFromSamples(labels.FromStrings("foo", "bar"), []tsdbutil.Sample{sample{0, 0, nil}, sample{1, 1, nil}}, []tsdbutil.Sample{sample{2, 2, nil}}), + NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{5, 5, nil, nil}}, []tsdbutil.Sample{sample{6, 6, nil, nil}}), + NewListChunkSeriesFromSamples(labels.FromStrings("foo", "bar"), []tsdbutil.Sample{sample{0, 0, nil, nil}, sample{1, 1, nil, nil}}, []tsdbutil.Sample{sample{2, 2, nil, nil}}), }, { - NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{1, 1, nil}, sample{2, 2, nil}}, []tsdbutil.Sample{sample{3, 3, nil}}), - NewListChunkSeriesFromSamples(labels.FromStrings("foo", "bar"), []tsdbutil.Sample{sample{3, 3, nil}}, []tsdbutil.Sample{sample{4, 4, nil}}), + NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{1, 1, nil, nil}, sample{2, 2, nil, nil}}, []tsdbutil.Sample{sample{3, 3, nil, nil}}), + NewListChunkSeriesFromSamples(labels.FromStrings("foo", "bar"), []tsdbutil.Sample{sample{3, 3, nil, nil}}, []tsdbutil.Sample{sample{4, 4, nil, nil}}), }, {}}, expected: NewMockChunkSeriesSet( NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), - []tsdbutil.Sample{sample{1, 1, nil}, sample{2, 2, nil}}, - []tsdbutil.Sample{sample{3, 3, nil}}, - []tsdbutil.Sample{sample{5, 5, nil}}, - []tsdbutil.Sample{sample{6, 6, nil}}, + []tsdbutil.Sample{sample{1, 1, nil, nil}, sample{2, 2, nil, nil}}, + []tsdbutil.Sample{sample{3, 3, nil, nil}}, + []tsdbutil.Sample{sample{5, 5, nil, nil}}, + []tsdbutil.Sample{sample{6, 6, nil, nil}}, ), NewListChunkSeriesFromSamples(labels.FromStrings("foo", "bar"), - []tsdbutil.Sample{sample{0, 0, nil}, sample{1, 1, nil}}, - []tsdbutil.Sample{sample{2, 2, nil}}, - []tsdbutil.Sample{sample{3, 3, nil}}, - []tsdbutil.Sample{sample{4, 4, nil}}, + []tsdbutil.Sample{sample{0, 0, nil, nil}, sample{1, 1, nil, nil}}, + []tsdbutil.Sample{sample{2, 2, nil, nil}}, + []tsdbutil.Sample{sample{3, 3, nil, nil}}, + []tsdbutil.Sample{sample{4, 4, nil, nil}}, ), ), }, { name: "two secondaries, with two not in time order series each, with 3 noop queries and one nil together", chkQuerierSeries: [][]ChunkSeries{{ - NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{5, 5, nil}}, []tsdbutil.Sample{sample{6, 6, nil}}), - NewListChunkSeriesFromSamples(labels.FromStrings("foo", "bar"), []tsdbutil.Sample{sample{0, 0, nil}, sample{1, 1, nil}}, []tsdbutil.Sample{sample{2, 2, nil}}), + NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{5, 5, nil, nil}}, []tsdbutil.Sample{sample{6, 6, nil, nil}}), + NewListChunkSeriesFromSamples(labels.FromStrings("foo", "bar"), []tsdbutil.Sample{sample{0, 0, nil, nil}, sample{1, 1, nil, nil}}, []tsdbutil.Sample{sample{2, 2, nil, nil}}), }, { - NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{1, 1, nil}, sample{2, 2, nil}}, []tsdbutil.Sample{sample{3, 3, nil}}), - NewListChunkSeriesFromSamples(labels.FromStrings("foo", "bar"), []tsdbutil.Sample{sample{3, 3, nil}}, []tsdbutil.Sample{sample{4, 4, nil}}), + NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{1, 1, nil, nil}, sample{2, 2, nil, nil}}, []tsdbutil.Sample{sample{3, 3, nil, nil}}), + NewListChunkSeriesFromSamples(labels.FromStrings("foo", "bar"), []tsdbutil.Sample{sample{3, 3, nil, nil}}, []tsdbutil.Sample{sample{4, 4, nil, nil}}), }}, extraQueriers: []ChunkQuerier{NoopChunkedQuerier(), NoopChunkedQuerier(), nil, NoopChunkedQuerier()}, expected: NewMockChunkSeriesSet( NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), - []tsdbutil.Sample{sample{1, 1, nil}, sample{2, 2, nil}}, - []tsdbutil.Sample{sample{3, 3, nil}}, - []tsdbutil.Sample{sample{5, 5, nil}}, - []tsdbutil.Sample{sample{6, 6, nil}}, + []tsdbutil.Sample{sample{1, 1, nil, nil}, sample{2, 2, nil, nil}}, + []tsdbutil.Sample{sample{3, 3, nil, nil}}, + []tsdbutil.Sample{sample{5, 5, nil, nil}}, + []tsdbutil.Sample{sample{6, 6, nil, nil}}, ), NewListChunkSeriesFromSamples(labels.FromStrings("foo", "bar"), - []tsdbutil.Sample{sample{0, 0, nil}, sample{1, 1, nil}}, - []tsdbutil.Sample{sample{2, 2, nil}}, - []tsdbutil.Sample{sample{3, 3, nil}}, - []tsdbutil.Sample{sample{4, 4, nil}}, + []tsdbutil.Sample{sample{0, 0, nil, nil}, sample{1, 1, nil, nil}}, + []tsdbutil.Sample{sample{2, 2, nil, nil}}, + []tsdbutil.Sample{sample{3, 3, nil, nil}}, + []tsdbutil.Sample{sample{4, 4, nil, nil}}, ), ), }, { name: "two queries, one with NaN samples series", chkQuerierSeries: [][]ChunkSeries{{ - NewListChunkSeriesFromSamples(labels.FromStrings("foo", "bar"), []tsdbutil.Sample{sample{0, math.NaN(), nil}}), + NewListChunkSeriesFromSamples(labels.FromStrings("foo", "bar"), []tsdbutil.Sample{sample{0, math.NaN(), nil, nil}}), }, { - NewListChunkSeriesFromSamples(labels.FromStrings("foo", "bar"), []tsdbutil.Sample{sample{1, 1, nil}}), + NewListChunkSeriesFromSamples(labels.FromStrings("foo", "bar"), []tsdbutil.Sample{sample{1, 1, nil, nil}}), }}, expected: NewMockChunkSeriesSet( - NewListChunkSeriesFromSamples(labels.FromStrings("foo", "bar"), []tsdbutil.Sample{sample{0, math.NaN(), nil}}, []tsdbutil.Sample{sample{1, 1, nil}}), + NewListChunkSeriesFromSamples(labels.FromStrings("foo", "bar"), []tsdbutil.Sample{sample{0, math.NaN(), nil, nil}}, []tsdbutil.Sample{sample{1, 1, nil, nil}}), ), }, } { @@ -399,9 +399,9 @@ func TestCompactingChunkSeriesMerger(t *testing.T) { { name: "single series", input: []ChunkSeries{ - NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{1, 1, nil}, sample{2, 2, nil}}, []tsdbutil.Sample{sample{3, 3, nil}}), + NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{1, 1, nil, nil}, sample{2, 2, nil, nil}}, []tsdbutil.Sample{sample{3, 3, nil, nil}}), }, - expected: NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{1, 1, nil}, sample{2, 2, nil}}, []tsdbutil.Sample{sample{3, 3, nil}}), + expected: NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{1, 1, nil, nil}, sample{2, 2, nil, nil}}, []tsdbutil.Sample{sample{3, 3, nil, nil}}), }, { name: "two empty series", @@ -414,55 +414,55 @@ func TestCompactingChunkSeriesMerger(t *testing.T) { { name: "two non overlapping", input: []ChunkSeries{ - NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{1, 1, nil}, sample{2, 2, nil}}, []tsdbutil.Sample{sample{3, 3, nil}, sample{5, 5, nil}}), - NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{7, 7, nil}, sample{9, 9, nil}}, []tsdbutil.Sample{sample{10, 10, nil}}), + NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{1, 1, nil, nil}, sample{2, 2, nil, nil}}, []tsdbutil.Sample{sample{3, 3, nil, nil}, sample{5, 5, nil, nil}}), + NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{7, 7, nil, nil}, sample{9, 9, nil, nil}}, []tsdbutil.Sample{sample{10, 10, nil, nil}}), }, - expected: NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{1, 1, nil}, sample{2, 2, nil}}, []tsdbutil.Sample{sample{3, 3, nil}, sample{5, 5, nil}}, []tsdbutil.Sample{sample{7, 7, nil}, sample{9, 9, nil}}, []tsdbutil.Sample{sample{10, 10, nil}}), + expected: NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{1, 1, nil, nil}, sample{2, 2, nil, nil}}, []tsdbutil.Sample{sample{3, 3, nil, nil}, sample{5, 5, nil, nil}}, []tsdbutil.Sample{sample{7, 7, nil, nil}, sample{9, 9, nil, nil}}, []tsdbutil.Sample{sample{10, 10, nil, nil}}), }, { name: "two overlapping", input: []ChunkSeries{ - NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{1, 1, nil}, sample{2, 2, nil}}, []tsdbutil.Sample{sample{3, 3, nil}, sample{8, 8, nil}}), - NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{7, 7, nil}, sample{9, 9, nil}}, []tsdbutil.Sample{sample{10, 10, nil}}), + NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{1, 1, nil, nil}, sample{2, 2, nil, nil}}, []tsdbutil.Sample{sample{3, 3, nil, nil}, sample{8, 8, nil, nil}}), + NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{7, 7, nil, nil}, sample{9, 9, nil, nil}}, []tsdbutil.Sample{sample{10, 10, nil, nil}}), }, - expected: NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{1, 1, nil}, sample{2, 2, nil}}, []tsdbutil.Sample{sample{3, 3, nil}, sample{7, 7, nil}, sample{8, 8, nil}, sample{9, 9, nil}}, []tsdbutil.Sample{sample{10, 10, nil}}), + expected: NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{1, 1, nil, nil}, sample{2, 2, nil, nil}}, []tsdbutil.Sample{sample{3, 3, nil, nil}, sample{7, 7, nil, nil}, sample{8, 8, nil, nil}, sample{9, 9, nil, nil}}, []tsdbutil.Sample{sample{10, 10, nil, nil}}), }, { name: "two duplicated", input: []ChunkSeries{ - NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{1, 1, nil}, sample{2, 2, nil}, sample{3, 3, nil}, sample{5, 5, nil}}), - NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{2, 2, nil}, sample{3, 3, nil}, sample{5, 5, nil}}), + NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{1, 1, nil, nil}, sample{2, 2, nil, nil}, sample{3, 3, nil, nil}, sample{5, 5, nil, nil}}), + NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{2, 2, nil, nil}, sample{3, 3, nil, nil}, sample{5, 5, nil, nil}}), }, - expected: NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{1, 1, nil}, sample{2, 2, nil}, sample{3, 3, nil}, sample{5, 5, nil}}), + expected: NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{1, 1, nil, nil}, sample{2, 2, nil, nil}, sample{3, 3, nil, nil}, sample{5, 5, nil, nil}}), }, { name: "three overlapping", input: []ChunkSeries{ - NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{1, 1, nil}, sample{2, 2, nil}, sample{3, 3, nil}, sample{5, 5, nil}}), - NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{2, 2, nil}, sample{3, 3, nil}, sample{6, 6, nil}}), - NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{0, 0, nil}, sample{4, 4, nil}}), + NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{1, 1, nil, nil}, sample{2, 2, nil, nil}, sample{3, 3, nil, nil}, sample{5, 5, nil, nil}}), + NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{2, 2, nil, nil}, sample{3, 3, nil, nil}, sample{6, 6, nil, nil}}), + NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{0, 0, nil, nil}, sample{4, 4, nil, nil}}), }, - expected: NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{0, 0, nil}, sample{1, 1, nil}, sample{2, 2, nil}, sample{3, 3, nil}, sample{4, 4, nil}, sample{5, 5, nil}, sample{6, 6, nil}}), + expected: NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{0, 0, nil, nil}, sample{1, 1, nil, nil}, sample{2, 2, nil, nil}, sample{3, 3, nil, nil}, sample{4, 4, nil, nil}, sample{5, 5, nil, nil}, sample{6, 6, nil, nil}}), }, { name: "three in chained overlap", input: []ChunkSeries{ - NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{1, 1, nil}, sample{2, 2, nil}, sample{3, 3, nil}, sample{5, 5, nil}}), - NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{4, 4, nil}, sample{6, 66, nil}}), - NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{6, 6, nil}, sample{10, 10, nil}}), + NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{1, 1, nil, nil}, sample{2, 2, nil, nil}, sample{3, 3, nil, nil}, sample{5, 5, nil, nil}}), + NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{4, 4, nil, nil}, sample{6, 66, nil, nil}}), + NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{6, 6, nil, nil}, sample{10, 10, nil, nil}}), }, - expected: NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{1, 1, nil}, sample{2, 2, nil}, sample{3, 3, nil}, sample{4, 4, nil}, sample{5, 5, nil}, sample{6, 66, nil}, sample{10, 10, nil}}), + expected: NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{1, 1, nil, nil}, sample{2, 2, nil, nil}, sample{3, 3, nil, nil}, sample{4, 4, nil, nil}, sample{5, 5, nil, nil}, sample{6, 66, nil, nil}, sample{10, 10, nil, nil}}), }, { name: "three in chained overlap complex", input: []ChunkSeries{ - NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{0, 0, nil}, sample{5, 5, nil}}, []tsdbutil.Sample{sample{10, 10, nil}, sample{15, 15, nil}}), - NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{2, 2, nil}, sample{20, 20, nil}}, []tsdbutil.Sample{sample{25, 25, nil}, sample{30, 30, nil}}), - NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{18, 18, nil}, sample{26, 26, nil}}, []tsdbutil.Sample{sample{31, 31, nil}, sample{35, 35, nil}}), + NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{0, 0, nil, nil}, sample{5, 5, nil, nil}}, []tsdbutil.Sample{sample{10, 10, nil, nil}, sample{15, 15, nil, nil}}), + NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{2, 2, nil, nil}, sample{20, 20, nil, nil}}, []tsdbutil.Sample{sample{25, 25, nil, nil}, sample{30, 30, nil, nil}}), + NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), []tsdbutil.Sample{sample{18, 18, nil, nil}, sample{26, 26, nil, nil}}, []tsdbutil.Sample{sample{31, 31, nil, nil}, sample{35, 35, nil, nil}}), }, expected: NewListChunkSeriesFromSamples(labels.FromStrings("bar", "baz"), - []tsdbutil.Sample{sample{0, 0, nil}, sample{2, 2, nil}, sample{5, 5, nil}, sample{10, 10, nil}, sample{15, 15, nil}, sample{18, 18, nil}, sample{20, 20, nil}, sample{25, 25, nil}, sample{26, 26, nil}, sample{30, 30, nil}}, - []tsdbutil.Sample{sample{31, 31, nil}, sample{35, 35, nil}}, + []tsdbutil.Sample{sample{0, 0, nil, nil}, sample{2, 2, nil, nil}, sample{5, 5, nil, nil}, sample{10, 10, nil, nil}, sample{15, 15, nil, nil}, sample{18, 18, nil, nil}, sample{20, 20, nil, nil}, sample{25, 25, nil, nil}, sample{26, 26, nil, nil}, sample{30, 30, nil, nil}}, + []tsdbutil.Sample{sample{31, 31, nil, nil}, sample{35, 35, nil, nil}}, ), }, { @@ -598,38 +598,38 @@ func TestChainSampleIterator(t *testing.T) { }{ { input: []chunkenc.Iterator{ - NewListSeriesIterator(samples{sample{0, 0, nil}, sample{1, 1, nil}}), + NewListSeriesIterator(samples{sample{0, 0, nil, nil}, sample{1, 1, nil, nil}}), }, - expected: []tsdbutil.Sample{sample{0, 0, nil}, sample{1, 1, nil}}, + expected: []tsdbutil.Sample{sample{0, 0, nil, nil}, sample{1, 1, nil, nil}}, }, { input: []chunkenc.Iterator{ - NewListSeriesIterator(samples{sample{0, 0, nil}, sample{1, 1, nil}}), - NewListSeriesIterator(samples{sample{2, 2, nil}, sample{3, 3, nil}}), + NewListSeriesIterator(samples{sample{0, 0, nil, nil}, sample{1, 1, nil, nil}}), + NewListSeriesIterator(samples{sample{2, 2, nil, nil}, sample{3, 3, nil, nil}}), }, - expected: []tsdbutil.Sample{sample{0, 0, nil}, sample{1, 1, nil}, sample{2, 2, nil}, sample{3, 3, nil}}, + expected: []tsdbutil.Sample{sample{0, 0, nil, nil}, sample{1, 1, nil, nil}, sample{2, 2, nil, nil}, sample{3, 3, nil, nil}}, }, { input: []chunkenc.Iterator{ - NewListSeriesIterator(samples{sample{0, 0, nil}, sample{3, 3, nil}}), - NewListSeriesIterator(samples{sample{1, 1, nil}, sample{4, 4, nil}}), - NewListSeriesIterator(samples{sample{2, 2, nil}, sample{5, 5, nil}}), + NewListSeriesIterator(samples{sample{0, 0, nil, nil}, sample{3, 3, nil, nil}}), + NewListSeriesIterator(samples{sample{1, 1, nil, nil}, sample{4, 4, nil, nil}}), + NewListSeriesIterator(samples{sample{2, 2, nil, nil}, sample{5, 5, nil, nil}}), }, expected: []tsdbutil.Sample{ - sample{0, 0, nil}, sample{1, 1, nil}, sample{2, 2, nil}, sample{3, 3, nil}, sample{4, 4, nil}, sample{5, 5, nil}, + sample{0, 0, nil, nil}, sample{1, 1, nil, nil}, sample{2, 2, nil, nil}, sample{3, 3, nil, nil}, sample{4, 4, nil, nil}, sample{5, 5, nil, nil}, }, }, // Overlap. { input: []chunkenc.Iterator{ - NewListSeriesIterator(samples{sample{0, 0, nil}, sample{1, 1, nil}}), - NewListSeriesIterator(samples{sample{0, 0, nil}, sample{2, 2, nil}}), - NewListSeriesIterator(samples{sample{2, 2, nil}, sample{3, 3, nil}}), + NewListSeriesIterator(samples{sample{0, 0, nil, nil}, sample{1, 1, nil, nil}}), + NewListSeriesIterator(samples{sample{0, 0, nil, nil}, sample{2, 2, nil, nil}}), + NewListSeriesIterator(samples{sample{2, 2, nil, nil}, sample{3, 3, nil, nil}}), NewListSeriesIterator(samples{}), NewListSeriesIterator(samples{}), NewListSeriesIterator(samples{}), }, - expected: []tsdbutil.Sample{sample{0, 0, nil}, sample{1, 1, nil}, sample{2, 2, nil}, sample{3, 3, nil}}, + expected: []tsdbutil.Sample{sample{0, 0, nil, nil}, sample{1, 1, nil, nil}, sample{2, 2, nil, nil}, sample{3, 3, nil, nil}}, }, } { merged := NewChainSampleIterator(tc.input) @@ -647,42 +647,42 @@ func TestChainSampleIteratorSeek(t *testing.T) { }{ { input: []chunkenc.Iterator{ - NewListSeriesIterator(samples{sample{0, 0, nil}, sample{1, 1, nil}, sample{2, 2, nil}}), + NewListSeriesIterator(samples{sample{0, 0, nil, nil}, sample{1, 1, nil, nil}, sample{2, 2, nil, nil}}), }, seek: 1, - expected: []tsdbutil.Sample{sample{1, 1, nil}, sample{2, 2, nil}}, + expected: []tsdbutil.Sample{sample{1, 1, nil, nil}, sample{2, 2, nil, nil}}, }, { input: []chunkenc.Iterator{ - NewListSeriesIterator(samples{sample{0, 0, nil}, sample{1, 1, nil}}), - NewListSeriesIterator(samples{sample{2, 2, nil}, sample{3, 3, nil}}), + NewListSeriesIterator(samples{sample{0, 0, nil, nil}, sample{1, 1, nil, nil}}), + NewListSeriesIterator(samples{sample{2, 2, nil, nil}, sample{3, 3, nil, nil}}), }, seek: 2, - expected: []tsdbutil.Sample{sample{2, 2, nil}, sample{3, 3, nil}}, + expected: []tsdbutil.Sample{sample{2, 2, nil, nil}, sample{3, 3, nil, nil}}, }, { input: []chunkenc.Iterator{ - NewListSeriesIterator(samples{sample{0, 0, nil}, sample{3, 3, nil}}), - NewListSeriesIterator(samples{sample{1, 1, nil}, sample{4, 4, nil}}), - NewListSeriesIterator(samples{sample{2, 2, nil}, sample{5, 5, nil}}), + NewListSeriesIterator(samples{sample{0, 0, nil, nil}, sample{3, 3, nil, nil}}), + NewListSeriesIterator(samples{sample{1, 1, nil, nil}, sample{4, 4, nil, nil}}), + NewListSeriesIterator(samples{sample{2, 2, nil, nil}, sample{5, 5, nil, nil}}), }, seek: 2, - expected: []tsdbutil.Sample{sample{2, 2, nil}, sample{3, 3, nil}, sample{4, 4, nil}, sample{5, 5, nil}}, + expected: []tsdbutil.Sample{sample{2, 2, nil, nil}, sample{3, 3, nil, nil}, sample{4, 4, nil, nil}, sample{5, 5, nil, nil}}, }, { input: []chunkenc.Iterator{ - NewListSeriesIterator(samples{sample{0, 0, nil}, sample{2, 2, nil}, sample{3, 3, nil}}), - NewListSeriesIterator(samples{sample{0, 0, nil}, sample{1, 1, nil}, sample{2, 2, nil}}), + NewListSeriesIterator(samples{sample{0, 0, nil, nil}, sample{2, 2, nil, nil}, sample{3, 3, nil, nil}}), + NewListSeriesIterator(samples{sample{0, 0, nil, nil}, sample{1, 1, nil, nil}, sample{2, 2, nil, nil}}), }, seek: 0, - expected: []tsdbutil.Sample{sample{0, 0, nil}, sample{1, 1, nil}, sample{2, 2, nil}, sample{3, 3, nil}}, + expected: []tsdbutil.Sample{sample{0, 0, nil, nil}, sample{1, 1, nil, nil}, sample{2, 2, nil, nil}, sample{3, 3, nil, nil}}, }, } { merged := NewChainSampleIterator(tc.input) actual := []tsdbutil.Sample{} - if merged.Seek(tc.seek) { + if merged.Seek(tc.seek) == chunkenc.ValFloat { t, v := merged.At() - actual = append(actual, sample{t, v, nil}) + actual = append(actual, sample{t, v, nil, nil}) } s, err := ExpandSamples(merged, nil) require.NoError(t, err) diff --git a/storage/remote/codec.go b/storage/remote/codec.go index 393a03a13b..c4c374cc40 100644 --- a/storage/remote/codec.go +++ b/storage/remote/codec.go @@ -120,7 +120,8 @@ func ToQueryResult(ss storage.SeriesSet, sampleLimit int) (*prompb.QueryResult, iter := series.Iterator() samples := []prompb.Sample{} - for iter.Next() { + for iter.Next() == chunkenc.ValFloat { + // TODO(beorn7): Add Histogram support. numSamples++ if sampleLimit > 0 && numSamples > sampleLimit { return nil, ss.Warnings(), HTTPError{ @@ -357,14 +358,26 @@ func newConcreteSeriersIterator(series *concreteSeries) chunkenc.Iterator { } // Seek implements storage.SeriesIterator. -func (c *concreteSeriesIterator) Seek(t int64) bool { - c.cur = sort.Search(len(c.series.samples), func(n int) bool { - return c.series.samples[n].Timestamp >= t +func (c *concreteSeriesIterator) Seek(t int64) chunkenc.ValueType { + if c.cur == -1 { + c.cur = 0 + } + // No-op check. + if s := c.series.samples[c.cur]; s.Timestamp >= t { + return chunkenc.ValFloat + } + // Do binary search between current position and end. + c.cur += sort.Search(len(c.series.samples)-c.cur, func(n int) bool { + return c.series.samples[n+c.cur].Timestamp >= t }) - return c.cur < len(c.series.samples) + if c.cur < len(c.series.samples) { + return chunkenc.ValFloat + } + return chunkenc.ValNone + // TODO(beorn7): Add histogram support. } -// At implements storage.SeriesIterator. +// At implements chunkenc.Iterator. func (c *concreteSeriesIterator) At() (t int64, v float64) { s := c.series.samples[c.cur] return s.Timestamp, s.Value @@ -377,17 +390,30 @@ func (c *concreteSeriesIterator) AtHistogram() (int64, *histogram.Histogram) { return 0, nil } -func (c *concreteSeriesIterator) ChunkEncoding() chunkenc.Encoding { - return chunkenc.EncXOR +// AtFloatHistogram always returns (0, nil) because there is no support for histogram +// values yet. +// TODO(beorn7): Fix that for histogram support in remote storage. +func (c *concreteSeriesIterator) AtFloatHistogram() (int64, *histogram.FloatHistogram) { + return 0, nil } -// Next implements storage.SeriesIterator. -func (c *concreteSeriesIterator) Next() bool { +// AtT implements chunkenc.Iterator. +func (c *concreteSeriesIterator) AtT() int64 { + s := c.series.samples[c.cur] + return s.Timestamp +} + +// Next implements chunkenc.Iterator. +func (c *concreteSeriesIterator) Next() chunkenc.ValueType { c.cur++ - return c.cur < len(c.series.samples) + if c.cur < len(c.series.samples) { + return chunkenc.ValFloat + } + return chunkenc.ValNone + // TODO(beorn7): Add histogram support. } -// Err implements storage.SeriesIterator. +// Err implements chunkenc.Iterator. func (c *concreteSeriesIterator) Err() error { return nil } diff --git a/storage/series.go b/storage/series.go index 1c72df7337..9f09e5ee6b 100644 --- a/storage/series.go +++ b/storage/series.go @@ -96,26 +96,42 @@ func (it *listSeriesIterator) AtHistogram() (int64, *histogram.Histogram) { return s.T(), s.H() } -func (it *listSeriesIterator) ChunkEncoding() chunkenc.Encoding { - return chunkenc.EncXOR +func (it *listSeriesIterator) AtFloatHistogram() (int64, *histogram.FloatHistogram) { + s := it.samples.Get(it.idx) + return s.T(), s.FH() } -func (it *listSeriesIterator) Next() bool { +func (it *listSeriesIterator) AtT() int64 { + s := it.samples.Get(it.idx) + return s.T() +} + +func (it *listSeriesIterator) Next() chunkenc.ValueType { it.idx++ - return it.idx < it.samples.Len() + if it.idx >= it.samples.Len() { + return chunkenc.ValNone + } + return it.samples.Get(it.idx).Type() } -func (it *listSeriesIterator) Seek(t int64) bool { +func (it *listSeriesIterator) Seek(t int64) chunkenc.ValueType { if it.idx == -1 { it.idx = 0 } + // No-op check. + if s := it.samples.Get(it.idx); s.T() >= t { + return s.Type() + } // Do binary search between current position and end. - it.idx = sort.Search(it.samples.Len()-it.idx, func(i int) bool { + it.idx += sort.Search(it.samples.Len()-it.idx, func(i int) bool { s := it.samples.Get(i + it.idx) return s.T() >= t }) - return it.idx < it.samples.Len() + if it.idx >= it.samples.Len() { + return chunkenc.ValNone + } + return it.samples.Get(it.idx).Type() } func (it *listSeriesIterator) Err() error { return nil } @@ -233,6 +249,7 @@ func NewSeriesToChunkEncoder(series Series) ChunkSeries { } func (s *seriesToChunkEncoder) Iterator() chunks.Iterator { + // TODO(beorn7): Add Histogram support. chk := chunkenc.NewXORChunk() app, err := chk.Appender() if err != nil { @@ -245,7 +262,7 @@ func (s *seriesToChunkEncoder) Iterator() chunks.Iterator { i := 0 seriesIter := s.Series.Iterator() - for seriesIter.Next() { + for seriesIter.Next() == chunkenc.ValFloat { // Create a new chunk if too many samples in the current one. if i >= seriesToChunkEncoderSplit { chks = append(chks, chunks.Meta{ @@ -296,27 +313,34 @@ func (e errChunksIterator) Err() error { return e.err } // ExpandSamples iterates over all samples in the iterator, buffering all in slice. // Optionally it takes samples constructor, useful when you want to compare sample slices with different // sample implementations. if nil, sample type from this package will be used. -func ExpandSamples(iter chunkenc.Iterator, newSampleFn func(t int64, v float64, h *histogram.Histogram) tsdbutil.Sample) ([]tsdbutil.Sample, error) { +func ExpandSamples(iter chunkenc.Iterator, newSampleFn func(t int64, v float64, h *histogram.Histogram, fh *histogram.FloatHistogram) tsdbutil.Sample) ([]tsdbutil.Sample, error) { if newSampleFn == nil { - newSampleFn = func(t int64, v float64, h *histogram.Histogram) tsdbutil.Sample { return sample{t, v, h} } + newSampleFn = func(t int64, v float64, h *histogram.Histogram, fh *histogram.FloatHistogram) tsdbutil.Sample { + return sample{t, v, h, fh} + } } var result []tsdbutil.Sample - for iter.Next() { - // Only after Next() returned true, it is safe to ask for the ChunkEncoding. - if iter.ChunkEncoding() == chunkenc.EncHistogram { - t, h := iter.AtHistogram() - result = append(result, newSampleFn(t, 0, h)) - } else { + for { + switch iter.Next() { + case chunkenc.ValNone: + return result, iter.Err() + case chunkenc.ValFloat: t, v := iter.At() // NaNs can't be compared normally, so substitute for another value. if math.IsNaN(v) { v = -42 } - result = append(result, newSampleFn(t, v, nil)) + result = append(result, newSampleFn(t, v, nil, nil)) + case chunkenc.ValHistogram: + t, h := iter.AtHistogram() + result = append(result, newSampleFn(t, 0, h, nil)) + case chunkenc.ValFloatHistogram: + t, fh := iter.AtFloatHistogram() + result = append(result, newSampleFn(t, 0, nil, fh)) + } } - return result, iter.Err() } // ExpandChunks iterates over all chunks in the iterator, buffering all in slice. diff --git a/tsdb/block_test.go b/tsdb/block_test.go index e677ce2a30..54c37ee993 100644 --- a/tsdb/block_test.go +++ b/tsdb/block_test.go @@ -32,6 +32,7 @@ import ( "github.com/prometheus/prometheus/model/labels" "github.com/prometheus/prometheus/storage" + "github.com/prometheus/prometheus/tsdb/chunkenc" "github.com/prometheus/prometheus/tsdb/chunks" "github.com/prometheus/prometheus/tsdb/fileutil" "github.com/prometheus/prometheus/tsdb/tsdbutil" @@ -179,7 +180,7 @@ func TestCorruptedChunk(t *testing.T) { require.NoError(t, os.RemoveAll(tmpdir)) }() - series := storage.NewListSeries(labels.FromStrings("a", "b"), []tsdbutil.Sample{sample{1, 1, nil}}) + series := storage.NewListSeries(labels.FromStrings("a", "b"), []tsdbutil.Sample{sample{1, 1, nil, nil}}) blockDir := createBlock(t, tmpdir, []storage.Series{series}) files, err := sequenceFiles(chunkDir(blockDir)) require.NoError(t, err) @@ -208,7 +209,7 @@ func TestCorruptedChunk(t *testing.T) { // Check chunk errors during iter time. require.True(t, set.Next()) it := set.At().Iterator() - require.Equal(t, false, it.Next()) + require.Equal(t, chunkenc.ValNone, it.Next()) require.Equal(t, tc.iterErr.Error(), it.Err().Error()) }) } @@ -226,7 +227,7 @@ func TestLabelValuesWithMatchers(t *testing.T) { seriesEntries = append(seriesEntries, storage.NewListSeries(labels.Labels{ {Name: "unique", Value: fmt.Sprintf("value%d", i)}, {Name: "tens", Value: fmt.Sprintf("value%d", i/10)}, - }, []tsdbutil.Sample{sample{100, 0, nil}})) + }, []tsdbutil.Sample{sample{100, 0, nil, nil}})) } blockDir := createBlock(t, tmpdir, seriesEntries) @@ -389,7 +390,7 @@ func BenchmarkLabelValuesWithMatchers(b *testing.B) { {Name: "unique", Value: fmt.Sprintf("value%d", i)}, {Name: "tens", Value: fmt.Sprintf("value%d", i/(metricCount/10))}, {Name: "ninety", Value: fmt.Sprintf("value%d", i/(metricCount/10)/9)}, // "0" for the first 90%, then "1" - }, []tsdbutil.Sample{sample{100, 0, nil}})) + }, []tsdbutil.Sample{sample{100, 0, nil, nil}})) } blockDir := createBlock(b, tmpdir, seriesEntries) @@ -427,13 +428,13 @@ func TestLabelNamesWithMatchers(t *testing.T) { for i := 0; i < 100; i++ { seriesEntries = append(seriesEntries, storage.NewListSeries(labels.Labels{ {Name: "unique", Value: fmt.Sprintf("value%d", i)}, - }, []tsdbutil.Sample{sample{100, 0, nil}})) + }, []tsdbutil.Sample{sample{100, 0, nil, nil}})) if i%10 == 0 { seriesEntries = append(seriesEntries, storage.NewListSeries(labels.Labels{ {Name: "unique", Value: fmt.Sprintf("value%d", i)}, {Name: "tens", Value: fmt.Sprintf("value%d", i/10)}, - }, []tsdbutil.Sample{sample{100, 0, nil}})) + }, []tsdbutil.Sample{sample{100, 0, nil, nil}})) } if i%20 == 0 { @@ -441,7 +442,7 @@ func TestLabelNamesWithMatchers(t *testing.T) { {Name: "unique", Value: fmt.Sprintf("value%d", i)}, {Name: "tens", Value: fmt.Sprintf("value%d", i/10)}, {Name: "twenties", Value: fmt.Sprintf("value%d", i/20)}, - }, []tsdbutil.Sample{sample{100, 0, nil}})) + }, []tsdbutil.Sample{sample{100, 0, nil, nil}})) } } @@ -525,7 +526,8 @@ func createHead(tb testing.TB, w *wal.WAL, series []storage.Series, chunkDir str ref := storage.SeriesRef(0) it := s.Iterator() lset := s.Labels() - for it.Next() { + for it.Next() == chunkenc.ValFloat { + // TODO(beorn7): Also treat histograms. t, v := it.At() ref, err = app.Append(ref, lset, t, v) require.NoError(tb, err) diff --git a/tsdb/chunkenc/chunk.go b/tsdb/chunkenc/chunk.go index 1eff428414..b9c90904de 100644 --- a/tsdb/chunkenc/chunk.go +++ b/tsdb/chunkenc/chunk.go @@ -25,6 +25,13 @@ import ( // Encoding is the identifier for a chunk encoding. type Encoding uint8 +// The different available chunk encodings. +const ( + EncNone Encoding = iota + EncXOR + EncHistogram +) + func (e Encoding) String() string { switch e { case EncNone: @@ -46,13 +53,6 @@ func IsValidEncoding(e Encoding) bool { return false } -// The different available chunk encodings. -const ( - EncNone Encoding = iota - EncXOR - EncHistogram -) - // Chunk holds a sequence of sample pairs that can be iterated over and appended to. type Chunk interface { // Bytes returns the underlying byte slice of the chunk. @@ -89,26 +89,61 @@ type Appender interface { // Iterator is a simple iterator that can only get the next value. // Iterator iterates over the samples of a time series, in timestamp-increasing order. type Iterator interface { - // Next advances the iterator by one. - // TODO(beorn7): Perhaps this should return if the next value is a float or a histogram - // to make it easier calling the right method (At vs AtHistogram)? - Next() bool - // Seek advances the iterator forward to the first sample with the timestamp equal or greater than t. - // If current sample found by previous `Next` or `Seek` operation already has this property, Seek has no effect. - // Seek returns true, if such sample exists, false otherwise. - // Iterator is exhausted when the Seek returns false. - Seek(t int64) bool - // At returns the current timestamp/value pair. - // Before the iterator has advanced At behaviour is unspecified. + // Next advances the iterator by one and returns the type of the value + // at the new position (or ValNone if the iterator is exhausted). + Next() ValueType + // Seek advances the iterator forward to the first sample with a + // timestamp equal or greater than t. If the current sample found by a + // previous `Next` or `Seek` operation already has this property, Seek + // has no effect. If a sample has been found, Seek returns the type of + // its value. Otherwise, it returns ValNone, after with the iterator is + // exhausted. + Seek(t int64) ValueType + // At returns the current timestamp/value pair if the value is a float. + // Before the iterator has advanced, the behaviour is unspecified. At() (int64, float64) - // AtHistogram returns the current timestamp/histogram pair. - // Before the iterator has advanced AtHistogram behaviour is unspecified. + // AtHistogram returns the current timestamp/value pair if the value is + // a histogram with integer counts. Before the iterator has advanced, + // the behaviour is unspecified. AtHistogram() (int64, *histogram.Histogram) - // Err returns the current error. It should be used only after iterator is - // exhausted, that is `Next` or `Seek` returns false. + // AtFloatHistogram returns the current timestamp/value pair if the + // value is a histogram with floating-point counts. It also works if the + // value is a histogram with integer counts, in which case a + // FloatHistogram copy of the histogram is returned. Before the iterator + // has advanced, the behaviour is unspecified. + AtFloatHistogram() (int64, *histogram.FloatHistogram) + // AtT returns the current timestamp. + // Before the iterator has advanced, the behaviour is unspecified. + AtT() int64 + // Err returns the current error. It should be used only after the + // iterator is exhausted, i.e. `Next` or `Seek` have returned ValNone. Err() error - // ChunkEncoding returns the encoding of the chunk that it is iterating. - ChunkEncoding() Encoding +} + +// ValueType defines the type of a value an Iterator points to. +type ValueType uint8 + +// Possible values for ValueType. +const ( + ValNone ValueType = iota // No value at the current position. + ValFloat // A simple float, retrieved with At. + ValHistogram // A histogram, retrieve with AtHistogram, but AtFloatHistogram works, too. + ValFloatHistogram // A floating-point histogram, retrive with AtFloatHistogram. +) + +func (v ValueType) String() string { + switch v { + case ValNone: + return "none" + case ValFloat: + return "float" + case ValHistogram: + return "histogram" + case ValFloatHistogram: + return "floathistogram" + default: + return "unknown" + } } // NewNopIterator returns a new chunk iterator that does not hold any data. @@ -118,14 +153,13 @@ func NewNopIterator() Iterator { type nopIterator struct{} -func (nopIterator) Seek(int64) bool { return false } -func (nopIterator) At() (int64, float64) { return math.MinInt64, 0 } -func (nopIterator) AtHistogram() (int64, *histogram.Histogram) { - return math.MinInt64, nil -} -func (nopIterator) Next() bool { return false } -func (nopIterator) Err() error { return nil } -func (nopIterator) ChunkEncoding() Encoding { return EncNone } +func (nopIterator) Next() ValueType { return ValNone } +func (nopIterator) Seek(int64) ValueType { return ValNone } +func (nopIterator) At() (int64, float64) { return math.MinInt64, 0 } +func (nopIterator) AtHistogram() (int64, *histogram.Histogram) { return math.MinInt64, nil } +func (nopIterator) AtFloatHistogram() (int64, *histogram.FloatHistogram) { return math.MinInt64, nil } +func (nopIterator) AtT() int64 { return math.MinInt64 } +func (nopIterator) Err() error { return nil } // Pool is used to create and reuse chunk references to avoid allocations. type Pool interface { diff --git a/tsdb/chunkenc/chunk_test.go b/tsdb/chunkenc/chunk_test.go index 419f5d301c..3d22f74cca 100644 --- a/tsdb/chunkenc/chunk_test.go +++ b/tsdb/chunkenc/chunk_test.go @@ -71,7 +71,7 @@ func testChunk(t *testing.T, c Chunk) { // 1. Expand iterator in simple case. it1 := c.Iterator(nil) var res1 []pair - for it1.Next() { + for it1.Next() == ValFloat { ts, v := it1.At() res1 = append(res1, pair{t: ts, v: v}) } @@ -81,7 +81,7 @@ func testChunk(t *testing.T, c Chunk) { // 2. Expand second iterator while reusing first one. it2 := c.Iterator(it1) var res2 []pair - for it2.Next() { + for it2.Next() == ValFloat { ts, v := it2.At() res2 = append(res2, pair{t: ts, v: v}) } @@ -93,20 +93,20 @@ func testChunk(t *testing.T, c Chunk) { it3 := c.Iterator(nil) var res3 []pair - require.Equal(t, true, it3.Seek(exp[mid].t)) + require.Equal(t, ValFloat, it3.Seek(exp[mid].t)) // Below ones should not matter. - require.Equal(t, true, it3.Seek(exp[mid].t)) - require.Equal(t, true, it3.Seek(exp[mid].t)) + require.Equal(t, ValFloat, it3.Seek(exp[mid].t)) + require.Equal(t, ValFloat, it3.Seek(exp[mid].t)) ts, v = it3.At() res3 = append(res3, pair{t: ts, v: v}) - for it3.Next() { + for it3.Next() == ValFloat { ts, v := it3.At() res3 = append(res3, pair{t: ts, v: v}) } require.NoError(t, it3.Err()) require.Equal(t, exp[mid:], res3) - require.Equal(t, false, it3.Seek(exp[len(exp)-1].t+1)) + require.Equal(t, ValNone, it3.Seek(exp[len(exp)-1].t+1)) } func benchmarkIterator(b *testing.B, newChunk func() Chunk) { @@ -148,7 +148,7 @@ func benchmarkIterator(b *testing.B, newChunk func() Chunk) { for i := 0; i < b.N; { it := chunk.Iterator(it) - for it.Next() { + for it.Next() == ValFloat { _, v := it.At() res = v i++ diff --git a/tsdb/chunkenc/histogram.go b/tsdb/chunkenc/histogram.go index aca1ad6937..949aa32afc 100644 --- a/tsdb/chunkenc/histogram.go +++ b/tsdb/chunkenc/histogram.go @@ -121,7 +121,7 @@ func (c *HistogramChunk) Appender() (Appender, error) { // To get an appender, we must know the state it would have if we had // appended all existing data from scratch. We iterate through the end // and populate via the iterator's state. - for it.Next() { + for it.Next() == ValHistogram { } if err := it.Err(); err != nil { return nil, err @@ -385,16 +385,34 @@ func (a *HistogramAppender) AppendHistogram(t int64, h *histogram.Histogram) { a.schema = h.Schema a.zThreshold = h.ZeroThreshold - a.pSpans = make([]histogram.Span, len(h.PositiveSpans)) - copy(a.pSpans, h.PositiveSpans) - a.nSpans = make([]histogram.Span, len(h.NegativeSpans)) - copy(a.nSpans, h.NegativeSpans) + if len(h.PositiveSpans) > 0 { + a.pSpans = make([]histogram.Span, len(h.PositiveSpans)) + copy(a.pSpans, h.PositiveSpans) + } else { + a.pSpans = nil + } + if len(h.NegativeSpans) > 0 { + a.nSpans = make([]histogram.Span, len(h.NegativeSpans)) + copy(a.nSpans, h.NegativeSpans) + } else { + a.nSpans = nil + } numPBuckets, numNBuckets := countSpans(h.PositiveSpans), countSpans(h.NegativeSpans) - a.pBuckets = make([]int64, numPBuckets) - a.nBuckets = make([]int64, numNBuckets) - a.pBucketsDelta = make([]int64, numPBuckets) - a.nBucketsDelta = make([]int64, numNBuckets) + if numPBuckets > 0 { + a.pBuckets = make([]int64, numPBuckets) + a.pBucketsDelta = make([]int64, numPBuckets) + } else { + a.pBuckets = nil + a.pBucketsDelta = nil + } + if numNBuckets > 0 { + a.nBuckets = make([]int64, numNBuckets) + a.nBucketsDelta = make([]int64, numNBuckets) + } else { + a.nBuckets = nil + a.nBucketsDelta = nil + } // Now store the actual data. putVarbitInt(a.b, t) @@ -505,15 +523,20 @@ func (a *HistogramAppender) Recode( } numPositiveBuckets, numNegativeBuckets := countSpans(positiveSpans), countSpans(negativeSpans) - for it.Next() { + for it.Next() == ValHistogram { tOld, hOld := it.AtHistogram() // We have to newly allocate slices for the modified buckets // here because they are kept by the appender until the next // append. // TODO(beorn7): We might be able to optimize this. - positiveBuckets := make([]int64, numPositiveBuckets) - negativeBuckets := make([]int64, numNegativeBuckets) + var positiveBuckets, negativeBuckets []int64 + if numPositiveBuckets > 0 { + positiveBuckets = make([]int64, numPositiveBuckets) + } + if numNegativeBuckets > 0 { + negativeBuckets = make([]int64, numNegativeBuckets) + } // Save the modified histogram to the new chunk. hOld.PositiveSpans, hOld.NegativeSpans = positiveSpans, negativeSpans @@ -548,7 +571,8 @@ type histogramIterator struct { t int64 cnt, zCnt uint64 tDelta, cntDelta, zCntDelta int64 - pBuckets, nBuckets []int64 + pBuckets, nBuckets []int64 // Delta between buckets. + pFloatBuckets, nFloatBuckets []float64 // Absolute counts. pBucketsDelta, nBucketsDelta []int64 // The sum is Gorilla xor encoded. @@ -556,34 +580,36 @@ type histogramIterator struct { leading uint8 trailing uint8 + // Track calls to retrieve methods. Once they have been called, we + // cannot recycle the bucket slices anymore because we have returned + // them in the histogram. + atHistogramCalled, atFloatHistogramCalled bool + err error } -func (it *histogramIterator) Seek(t int64) bool { +func (it *histogramIterator) Seek(t int64) ValueType { if it.err != nil { - return false + return ValNone } for t > it.t || it.numRead == 0 { - if !it.Next() { - return false + if it.Next() == ValNone { + return ValNone } } - return true + return ValHistogram } func (it *histogramIterator) At() (int64, float64) { panic("cannot call histogramIterator.At") } -func (it *histogramIterator) ChunkEncoding() Encoding { - return EncHistogram -} - func (it *histogramIterator) AtHistogram() (int64, *histogram.Histogram) { if value.IsStaleNaN(it.sum) { return it.t, &histogram.Histogram{Sum: it.sum} } + it.atHistogramCalled = true return it.t, &histogram.Histogram{ Count: it.cnt, ZeroCount: it.zCnt, @@ -597,6 +623,28 @@ func (it *histogramIterator) AtHistogram() (int64, *histogram.Histogram) { } } +func (it *histogramIterator) AtFloatHistogram() (int64, *histogram.FloatHistogram) { + if value.IsStaleNaN(it.sum) { + return it.t, &histogram.FloatHistogram{Sum: it.sum} + } + it.atFloatHistogramCalled = true + return it.t, &histogram.FloatHistogram{ + Count: float64(it.cnt), + ZeroCount: float64(it.zCnt), + Sum: it.sum, + ZeroThreshold: it.zThreshold, + Schema: it.schema, + PositiveSpans: it.pSpans, + NegativeSpans: it.nSpans, + PositiveBuckets: it.pFloatBuckets, + NegativeBuckets: it.nFloatBuckets, + } +} + +func (it *histogramIterator) AtT() int64 { + return it.t +} + func (it *histogramIterator) Err() error { return it.err } @@ -611,9 +659,24 @@ func (it *histogramIterator) Reset(b []byte) { it.t, it.cnt, it.zCnt = 0, 0, 0 it.tDelta, it.cntDelta, it.zCntDelta = 0, 0, 0 - it.pBuckets = it.pBuckets[:0] + // Recycle slices that have not been returned yet. Otherwise, start from + // scratch. + if it.atHistogramCalled { + it.atHistogramCalled = false + it.pBuckets, it.nBuckets = nil, nil + } else { + it.pBuckets = it.pBuckets[:0] + it.nBuckets = it.nBuckets[:0] + } + if it.atFloatHistogramCalled { + it.atFloatHistogramCalled = false + it.pFloatBuckets, it.nFloatBuckets = nil, nil + } else { + it.pFloatBuckets = it.pFloatBuckets[:0] + it.nFloatBuckets = it.nFloatBuckets[:0] + } + it.pBucketsDelta = it.pBucketsDelta[:0] - it.nBuckets = it.nBuckets[:0] it.pBucketsDelta = it.pBucketsDelta[:0] it.sum = 0 @@ -622,9 +685,9 @@ func (it *histogramIterator) Reset(b []byte) { it.err = nil } -func (it *histogramIterator) Next() bool { +func (it *histogramIterator) Next() ValueType { if it.err != nil || it.numRead == it.numTotal { - return false + return ValNone } if it.numRead == 0 { @@ -634,7 +697,7 @@ func (it *histogramIterator) Next() bool { schema, zeroThreshold, posSpans, negSpans, err := readHistogramChunkLayout(&it.br) if err != nil { it.err = err - return false + return ValNone } it.schema = schema it.zThreshold = zeroThreshold @@ -642,28 +705,32 @@ func (it *histogramIterator) Next() bool { numPBuckets, numNBuckets := countSpans(posSpans), countSpans(negSpans) // Allocate bucket slices as needed, recycling existing slices // in case this iterator was reset and already has slices of a - // sufficient capacity.. + // sufficient capacity. if numPBuckets > 0 { if cap(it.pBuckets) < numPBuckets { it.pBuckets = make([]int64, numPBuckets) - // If cap(it.pBuckets) isn't sufficient, neither is cap(it.pBucketsDelta). + // If cap(it.pBuckets) isn't sufficient, neither is the cap of the others. it.pBucketsDelta = make([]int64, numPBuckets) + it.pFloatBuckets = make([]float64, numPBuckets) } else { for i := 0; i < numPBuckets; i++ { it.pBuckets = append(it.pBuckets, 0) it.pBucketsDelta = append(it.pBucketsDelta, 0) + it.pFloatBuckets = append(it.pFloatBuckets, 0) } } } if numNBuckets > 0 { if cap(it.nBuckets) < numNBuckets { it.nBuckets = make([]int64, numNBuckets) - // If cap(it.nBuckets) isn't sufficient, neither is cap(it.nBucketsDelta). + // If cap(it.nBuckets) isn't sufficient, neither is the cap of the others. it.nBucketsDelta = make([]int64, numNBuckets) + it.nFloatBuckets = make([]float64, numNBuckets) } else { for i := 0; i < numNBuckets; i++ { it.nBuckets = append(it.nBuckets, 0) it.nBucketsDelta = append(it.nBucketsDelta, 0) + it.pFloatBuckets = append(it.pFloatBuckets, 0) } } } @@ -672,28 +739,28 @@ func (it *histogramIterator) Next() bool { t, err := readVarbitInt(&it.br) if err != nil { it.err = err - return false + return ValNone } it.t = t cnt, err := readVarbitUint(&it.br) if err != nil { it.err = err - return false + return ValNone } it.cnt = cnt zcnt, err := readVarbitUint(&it.br) if err != nil { it.err = err - return false + return ValNone } it.zCnt = zcnt sum, err := it.br.readBits(64) if err != nil { it.err = err - return false + return ValNone } it.sum = math.Float64frombits(sum) @@ -701,28 +768,64 @@ func (it *histogramIterator) Next() bool { v, err := readVarbitInt(&it.br) if err != nil { it.err = err - return false + return ValNone } it.pBuckets[i] = v + it.pFloatBuckets[i] = float64(v) } for i := range it.nBuckets { v, err := readVarbitInt(&it.br) if err != nil { it.err = err - return false + return ValNone } it.nBuckets[i] = v + it.nFloatBuckets[i] = float64(v) } it.numRead++ - return true + return ValHistogram + } + + // Recycle bucket slices that have not been returned yet. Otherwise, + // copy them. + if it.atHistogramCalled { + it.atHistogramCalled = false + if len(it.pBuckets) > 0 { + newBuckets := make([]int64, len(it.pBuckets)) + copy(newBuckets, it.pBuckets) + it.pBuckets = newBuckets + } else { + it.pBuckets = nil + } + if len(it.nBuckets) > 0 { + newBuckets := make([]int64, len(it.nBuckets)) + copy(newBuckets, it.nBuckets) + it.nBuckets = newBuckets + } else { + it.nBuckets = nil + } + } + // FloatBuckets are set from scratch, so simply create empty ones. + if it.atFloatHistogramCalled { + it.atFloatHistogramCalled = false + if len(it.pFloatBuckets) > 0 { + it.pFloatBuckets = make([]float64, len(it.pFloatBuckets)) + } else { + it.pFloatBuckets = nil + } + if len(it.nFloatBuckets) > 0 { + it.nFloatBuckets = make([]float64, len(it.nFloatBuckets)) + } else { + it.nFloatBuckets = nil + } } if it.numRead == 1 { tDelta, err := readVarbitUint(&it.br) if err != nil { it.err = err - return false + return ValNone } it.tDelta = int64(tDelta) it.t += it.tDelta @@ -730,7 +833,7 @@ func (it *histogramIterator) Next() bool { cntDelta, err := readVarbitInt(&it.br) if err != nil { it.err = err - return false + return ValNone } it.cntDelta = cntDelta it.cnt = uint64(int64(it.cnt) + it.cntDelta) @@ -738,49 +841,55 @@ func (it *histogramIterator) Next() bool { zcntDelta, err := readVarbitInt(&it.br) if err != nil { it.err = err - return false + return ValNone } it.zCntDelta = zcntDelta it.zCnt = uint64(int64(it.zCnt) + it.zCntDelta) ok := it.readSum() if !ok { - return false + return ValNone } if value.IsStaleNaN(it.sum) { it.numRead++ - return true + return ValHistogram } + var current int64 for i := range it.pBuckets { delta, err := readVarbitInt(&it.br) if err != nil { it.err = err - return false + return ValNone } it.pBucketsDelta[i] = delta - it.pBuckets[i] = it.pBuckets[i] + delta + it.pBuckets[i] += delta + current += it.pBuckets[i] + it.pFloatBuckets[i] = float64(current) } + current = 0 for i := range it.nBuckets { delta, err := readVarbitInt(&it.br) if err != nil { it.err = err - return false + return ValNone } it.nBucketsDelta[i] = delta - it.nBuckets[i] = it.nBuckets[i] + delta + it.nBuckets[i] += delta + current += it.nBuckets[i] + it.nFloatBuckets[i] = float64(current) } it.numRead++ - return true + return ValHistogram } tDod, err := readVarbitInt(&it.br) if err != nil { it.err = err - return false + return ValNone } it.tDelta = it.tDelta + tDod it.t += it.tDelta @@ -788,7 +897,7 @@ func (it *histogramIterator) Next() bool { cntDod, err := readVarbitInt(&it.br) if err != nil { it.err = err - return false + return ValNone } it.cntDelta = it.cntDelta + cntDod it.cnt = uint64(int64(it.cnt) + it.cntDelta) @@ -796,43 +905,49 @@ func (it *histogramIterator) Next() bool { zcntDod, err := readVarbitInt(&it.br) if err != nil { it.err = err - return false + return ValNone } it.zCntDelta = it.zCntDelta + zcntDod it.zCnt = uint64(int64(it.zCnt) + it.zCntDelta) ok := it.readSum() if !ok { - return false + return ValNone } if value.IsStaleNaN(it.sum) { it.numRead++ - return true + return ValHistogram } + var current int64 for i := range it.pBuckets { dod, err := readVarbitInt(&it.br) if err != nil { it.err = err - return false + return ValNone } - it.pBucketsDelta[i] = it.pBucketsDelta[i] + dod - it.pBuckets[i] = it.pBuckets[i] + it.pBucketsDelta[i] + it.pBucketsDelta[i] += dod + it.pBuckets[i] += it.pBucketsDelta[i] + current += it.pBuckets[i] + it.pFloatBuckets[i] = float64(current) } + current = 0 for i := range it.nBuckets { dod, err := readVarbitInt(&it.br) if err != nil { it.err = err - return false + return ValNone } - it.nBucketsDelta[i] = it.nBucketsDelta[i] + dod - it.nBuckets[i] = it.nBuckets[i] + it.nBucketsDelta[i] + it.nBucketsDelta[i] += dod + it.nBuckets[i] += it.nBucketsDelta[i] + current += it.nBuckets[i] + it.nFloatBuckets[i] = float64(current) } it.numRead++ - return true + return ValHistogram } func (it *histogramIterator) readSum() bool { diff --git a/tsdb/chunkenc/histogram_test.go b/tsdb/chunkenc/histogram_test.go index ede26cf0eb..a7b8fbb209 100644 --- a/tsdb/chunkenc/histogram_test.go +++ b/tsdb/chunkenc/histogram_test.go @@ -76,9 +76,9 @@ func TestHistogramChunkSameBuckets(t *testing.T) { it := c.iterator(nil) require.NoError(t, it.Err()) var act []res - for it.Next() { + for it.Next() == ValHistogram { ts, h := it.AtHistogram() - act = append(act, res{t: ts, h: h.Copy()}) + act = append(act, res{t: ts, h: h}) } require.NoError(t, it.Err()) require.Equal(t, exp, act) @@ -188,9 +188,9 @@ func TestHistogramChunkBucketChanges(t *testing.T) { } it := c.Iterator(nil) var act []res - for it.Next() { + for it.Next() == ValHistogram { ts, h := it.AtHistogram() - act = append(act, res{t: ts, h: h.Copy()}) + act = append(act, res{t: ts, h: h}) } require.NoError(t, it.Err()) require.Equal(t, exp, act) diff --git a/tsdb/chunkenc/xor.go b/tsdb/chunkenc/xor.go index fed09a6b67..50d6a1d30f 100644 --- a/tsdb/chunkenc/xor.go +++ b/tsdb/chunkenc/xor.go @@ -97,7 +97,7 @@ func (c *XORChunk) Appender() (Appender, error) { // To get an appender we must know the state it would have if we had // appended all existing data from scratch. // We iterate through the end and populate via the iterator's state. - for it.Next() { + for it.Next() != ValNone { } if err := it.Err(); err != nil { return nil, err @@ -238,17 +238,17 @@ type xorIterator struct { err error } -func (it *xorIterator) Seek(t int64) bool { +func (it *xorIterator) Seek(t int64) ValueType { if it.err != nil { - return false + return ValNone } for t > it.t || it.numRead == 0 { - if !it.Next() { - return false + if it.Next() == ValNone { + return ValNone } } - return true + return ValFloat } func (it *xorIterator) At() (int64, float64) { @@ -259,8 +259,12 @@ func (it *xorIterator) AtHistogram() (int64, *histogram.Histogram) { panic("cannot call xorIterator.AtHistogram") } -func (it *xorIterator) ChunkEncoding() Encoding { - return EncXOR +func (it *xorIterator) AtFloatHistogram() (int64, *histogram.FloatHistogram) { + panic("cannot call xorIterator.AtFloatHistogram") +} + +func (it *xorIterator) AtT() int64 { + return it.t } func (it *xorIterator) Err() error { @@ -282,33 +286,33 @@ func (it *xorIterator) Reset(b []byte) { it.err = nil } -func (it *xorIterator) Next() bool { +func (it *xorIterator) Next() ValueType { if it.err != nil || it.numRead == it.numTotal { - return false + return ValNone } if it.numRead == 0 { t, err := binary.ReadVarint(&it.br) if err != nil { it.err = err - return false + return ValNone } v, err := it.br.readBits(64) if err != nil { it.err = err - return false + return ValNone } it.t = t it.val = math.Float64frombits(v) it.numRead++ - return true + return ValFloat } if it.numRead == 1 { tDelta, err := binary.ReadUvarint(&it.br) if err != nil { it.err = err - return false + return ValNone } it.tDelta = tDelta it.t = it.t + int64(it.tDelta) @@ -326,7 +330,7 @@ func (it *xorIterator) Next() bool { } if err != nil { it.err = err - return false + return ValNone } if bit == zero { break @@ -349,7 +353,7 @@ func (it *xorIterator) Next() bool { bits, err := it.br.readBits(64) if err != nil { it.err = err - return false + return ValNone } dod = int64(bits) @@ -362,7 +366,7 @@ func (it *xorIterator) Next() bool { } if err != nil { it.err = err - return false + return ValNone } // Account for negative numbers, which come back as high unsigned numbers. @@ -379,15 +383,15 @@ func (it *xorIterator) Next() bool { return it.readValue() } -func (it *xorIterator) readValue() bool { +func (it *xorIterator) readValue() ValueType { val, leading, trailing, err := xorRead(&it.br, it.val, it.leading, it.trailing) if err != nil { it.err = err - return false + return ValNone } it.val, it.leading, it.trailing = val, leading, trailing it.numRead++ - return true + return ValFloat } func xorWrite( diff --git a/tsdb/compact_test.go b/tsdb/compact_test.go index 2c34355b36..3663c03da7 100644 --- a/tsdb/compact_test.go +++ b/tsdb/compact_test.go @@ -977,7 +977,7 @@ func TestCompaction_populateBlock(t *testing.T) { firstTs int64 = math.MaxInt64 s sample ) - for iter.Next() { + for iter.Next() == chunkenc.ValFloat { s.t, s.v = iter.At() if firstTs == math.MaxInt64 { firstTs = s.t @@ -1373,9 +1373,10 @@ func TestHeadCompactionWithHistograms(t *testing.T) { it := s.Iterator() actHists := make([]timedHistogram, 0, len(expHists)) - for it.Next() { + for it.Next() == chunkenc.ValHistogram { + // TODO(beorn7): Test mixed series? t, h := it.AtHistogram() - actHists = append(actHists, timedHistogram{t, h.Copy()}) + actHists = append(actHists, timedHistogram{t, h}) } require.Equal(t, expHists, actHists) @@ -1744,7 +1745,7 @@ func TestSparseHistogramCompactionAndQuery(t *testing.T) { for ss.Next() { s := ss.At() it := s.Iterator() - for it.Next() { + for it.Next() == chunkenc.ValHistogram { ts, h := it.AtHistogram() actHists[s.Labels().String()] = append(actHists[s.Labels().String()], timedHistogram{ts, h.Copy()}) } diff --git a/tsdb/db_test.go b/tsdb/db_test.go index 1773579f9f..e92036f1a8 100644 --- a/tsdb/db_test.go +++ b/tsdb/db_test.go @@ -89,7 +89,8 @@ func query(t testing.TB, q storage.Querier, matchers ...*labels.Matcher) map[str samples := []tsdbutil.Sample{} it := series.Iterator() - for it.Next() { + for it.Next() == chunkenc.ValFloat { + // TODO(beorn7): Also handle histograms. t, v := it.At() samples = append(samples, sample{t: t, v: v}) } @@ -420,7 +421,7 @@ Outer: expSamples := make([]tsdbutil.Sample, 0, len(c.remaint)) for _, ts := range c.remaint { - expSamples = append(expSamples, sample{ts, smpls[ts], nil}) + expSamples = append(expSamples, sample{ts, smpls[ts], nil, nil}) } expss := newMockSeriesSet([]storage.Series{ @@ -536,7 +537,7 @@ func TestSkippingInvalidValuesInSameTxn(t *testing.T) { ssMap := query(t, q, labels.MustNewMatcher(labels.MatchEqual, "a", "b")) require.Equal(t, map[string][]tsdbutil.Sample{ - labels.New(labels.Label{Name: "a", Value: "b"}).String(): {sample{0, 1, nil}}, + labels.New(labels.Label{Name: "a", Value: "b"}).String(): {sample{0, 1, nil, nil}}, }, ssMap) // Append Out of Order Value. @@ -553,7 +554,7 @@ func TestSkippingInvalidValuesInSameTxn(t *testing.T) { ssMap = query(t, q, labels.MustNewMatcher(labels.MatchEqual, "a", "b")) require.Equal(t, map[string][]tsdbutil.Sample{ - labels.New(labels.Label{Name: "a", Value: "b"}).String(): {sample{0, 1, nil}, sample{10, 3, nil}}, + labels.New(labels.Label{Name: "a", Value: "b"}).String(): {sample{0, 1, nil, nil}, sample{10, 3, nil, nil}}, }, ssMap) } @@ -594,7 +595,7 @@ func TestDB_Snapshot(t *testing.T) { sum := 0.0 for seriesSet.Next() { series := seriesSet.At().Iterator() - for series.Next() { + for series.Next() == chunkenc.ValFloat { _, v := series.At() sum += v } @@ -646,7 +647,7 @@ func TestDB_Snapshot_ChunksOutsideOfCompactedRange(t *testing.T) { sum := 0.0 for seriesSet.Next() { series := seriesSet.At().Iterator() - for series.Next() { + for series.Next() == chunkenc.ValFloat { _, v := series.At() sum += v } @@ -716,7 +717,7 @@ Outer: expSamples := make([]tsdbutil.Sample, 0, len(c.remaint)) for _, ts := range c.remaint { - expSamples = append(expSamples, sample{ts, smpls[ts], nil}) + expSamples = append(expSamples, sample{ts, smpls[ts], nil, nil}) } expss := newMockSeriesSet([]storage.Series{ @@ -821,7 +822,7 @@ func TestDB_e2e(t *testing.T) { for i := 0; i < numDatapoints; i++ { v := rand.Float64() - series = append(series, sample{ts, v, nil}) + series = append(series, sample{ts, v, nil, nil}) _, err := app.Append(0, lset, ts, v) require.NoError(t, err) @@ -1066,7 +1067,7 @@ func TestTombstoneClean(t *testing.T) { expSamples := make([]tsdbutil.Sample, 0, len(c.remaint)) for _, ts := range c.remaint { - expSamples = append(expSamples, sample{ts, smpls[ts], nil}) + expSamples = append(expSamples, sample{ts, smpls[ts], nil, nil}) } expss := newMockSeriesSet([]storage.Series{ @@ -1363,7 +1364,7 @@ func TestSizeRetention(t *testing.T) { series := genSeries(100, 10, m.MinTime, m.MaxTime+1) for _, s := range series { it := s.Iterator() - for it.Next() { + for it.Next() == chunkenc.ValFloat { tim, v := it.At() _, err := headApp.Append(0, s.Labels(), tim, v) require.NoError(t, err) @@ -1550,7 +1551,7 @@ func expandSeriesSet(ss storage.SeriesSet) ([]labels.Labels, map[string][]sample series := ss.At() samples := []sample{} it := series.Iterator() - for it.Next() { + for it.Next() == chunkenc.ValFloat { t, v := it.At() samples = append(samples, sample{t: t, v: v}) } @@ -2395,7 +2396,7 @@ func TestDBReadOnly_FlushWAL(t *testing.T) { sum := 0.0 for seriesSet.Next() { series := seriesSet.At().Iterator() - for series.Next() { + for series.Next() == chunkenc.ValFloat { _, v := series.At() sum += v } @@ -2545,11 +2546,11 @@ func TestDBQueryDoesntSeeAppendsAfterCreation(t *testing.T) { // TestChunkWriter_ReadAfterWrite ensures that chunk segment are cut at the set segment size and // that the resulted segments includes the expected chunks data. func TestChunkWriter_ReadAfterWrite(t *testing.T) { - chk1 := tsdbutil.ChunkFromSamples([]tsdbutil.Sample{sample{1, 1, nil}}) - chk2 := tsdbutil.ChunkFromSamples([]tsdbutil.Sample{sample{1, 2, nil}}) - chk3 := tsdbutil.ChunkFromSamples([]tsdbutil.Sample{sample{1, 3, nil}}) - chk4 := tsdbutil.ChunkFromSamples([]tsdbutil.Sample{sample{1, 4, nil}}) - chk5 := tsdbutil.ChunkFromSamples([]tsdbutil.Sample{sample{1, 5, nil}}) + chk1 := tsdbutil.ChunkFromSamples([]tsdbutil.Sample{sample{1, 1, nil, nil}}) + chk2 := tsdbutil.ChunkFromSamples([]tsdbutil.Sample{sample{1, 2, nil, nil}}) + chk3 := tsdbutil.ChunkFromSamples([]tsdbutil.Sample{sample{1, 3, nil, nil}}) + chk4 := tsdbutil.ChunkFromSamples([]tsdbutil.Sample{sample{1, 4, nil, nil}}) + chk5 := tsdbutil.ChunkFromSamples([]tsdbutil.Sample{sample{1, 5, nil, nil}}) chunkSize := len(chk1.Chunk.Bytes()) + chunks.MaxChunkLengthFieldSize + chunks.ChunkEncodingSize + crc32.Size tests := []struct { @@ -2749,11 +2750,11 @@ func TestRangeForTimestamp(t *testing.T) { // Regression test for https://github.com/prometheus/prometheus/pull/6514. func TestChunkReader_ConcurrentReads(t *testing.T) { chks := []chunks.Meta{ - tsdbutil.ChunkFromSamples([]tsdbutil.Sample{sample{1, 1, nil}}), - tsdbutil.ChunkFromSamples([]tsdbutil.Sample{sample{1, 2, nil}}), - tsdbutil.ChunkFromSamples([]tsdbutil.Sample{sample{1, 3, nil}}), - tsdbutil.ChunkFromSamples([]tsdbutil.Sample{sample{1, 4, nil}}), - tsdbutil.ChunkFromSamples([]tsdbutil.Sample{sample{1, 5, nil}}), + tsdbutil.ChunkFromSamples([]tsdbutil.Sample{sample{1, 1, nil, nil}}), + tsdbutil.ChunkFromSamples([]tsdbutil.Sample{sample{1, 2, nil, nil}}), + tsdbutil.ChunkFromSamples([]tsdbutil.Sample{sample{1, 3, nil, nil}}), + tsdbutil.ChunkFromSamples([]tsdbutil.Sample{sample{1, 4, nil, nil}}), + tsdbutil.ChunkFromSamples([]tsdbutil.Sample{sample{1, 5, nil, nil}}), } tempDir, err := ioutil.TempDir("", "test_chunk_writer") @@ -2818,7 +2819,7 @@ func TestCompactHead(t *testing.T) { val := rand.Float64() _, err := app.Append(0, labels.FromStrings("a", "b"), int64(i), val) require.NoError(t, err) - expSamples = append(expSamples, sample{int64(i), val, nil}) + expSamples = append(expSamples, sample{int64(i), val, nil, nil}) } require.NoError(t, app.Commit()) @@ -2843,9 +2844,9 @@ func TestCompactHead(t *testing.T) { for seriesSet.Next() { series := seriesSet.At().Iterator() - for series.Next() { + for series.Next() == chunkenc.ValFloat { time, val := series.At() - actSamples = append(actSamples, sample{int64(time), val, nil}) + actSamples = append(actSamples, sample{int64(time), val, nil, nil}) } require.NoError(t, series.Err()) } @@ -3259,7 +3260,7 @@ func testQuerierShouldNotPanicIfHeadChunkIsTruncatedWhileReadingQueriedChunks(t var sum float64 var firstErr error for _, it := range iterators { - for it.Next() { + for it.Next() == chunkenc.ValFloat { _, v := it.At() sum += v } diff --git a/tsdb/example_test.go b/tsdb/example_test.go index afe73b64ac..501e4fe2ce 100644 --- a/tsdb/example_test.go +++ b/tsdb/example_test.go @@ -23,6 +23,7 @@ import ( "github.com/stretchr/testify/require" "github.com/prometheus/prometheus/model/labels" + "github.com/prometheus/prometheus/tsdb/chunkenc" ) func TestExample(t *testing.T) { @@ -44,7 +45,7 @@ func TestExample(t *testing.T) { ts, v := time.Now().Unix(), 123.0 ref, err := app.Append(0, lbls, ts, v) require.NoError(t, err) - appendedSamples = append(appendedSamples, sample{ts, v, nil}) + appendedSamples = append(appendedSamples, sample{ts, v, nil, nil}) // Another append for a second later. // Re-using the ref from above since it's the same series, makes append faster. @@ -52,7 +53,7 @@ func TestExample(t *testing.T) { ts, v = time.Now().Unix(), 124 _, err = app.Append(ref, lbls, ts, v) require.NoError(t, err) - appendedSamples = append(appendedSamples, sample{ts, v, nil}) + appendedSamples = append(appendedSamples, sample{ts, v, nil, nil}) // Commit to storage. err = app.Commit() @@ -79,10 +80,10 @@ func TestExample(t *testing.T) { fmt.Println("series:", series.Labels().String()) it := series.Iterator() - for it.Next() { + for it.Next() == chunkenc.ValFloat { ts, v := it.At() fmt.Println("sample", ts, v) - queriedSamples = append(queriedSamples, sample{ts, v, nil}) + queriedSamples = append(queriedSamples, sample{ts, v, nil, nil}) } require.NoError(t, it.Err()) diff --git a/tsdb/head.go b/tsdb/head.go index 0ad1718079..0461263c8f 100644 --- a/tsdb/head.go +++ b/tsdb/head.go @@ -274,7 +274,6 @@ type headMetrics struct { // Sparse histogram metrics for experiments. // TODO: remove these in the final version. histogramSamplesTotal prometheus.Counter - histogramSeries prometheus.Gauge } func newHeadMetrics(h *Head, r prometheus.Registerer) *headMetrics { @@ -377,10 +376,6 @@ func newHeadMetrics(h *Head, r prometheus.Registerer) *headMetrics { Name: "prometheus_tsdb_histogram_samples_total", Help: "Total number of histograms samples added.", }), - histogramSeries: prometheus.NewGauge(prometheus.GaugeOpts{ - Name: "prometheus_tsdb_histogram_series", - Help: "Number of histogram series currently present in the head block.", - }), } if r != nil { @@ -409,7 +404,6 @@ func newHeadMetrics(h *Head, r prometheus.Registerer) *headMetrics { m.mmapChunkCorruptionTotal, m.snapshotReplayErrorTotal, m.histogramSamplesTotal, - m.histogramSeries, // Metrics bound to functions and not needed in tests // can be created and registered on the spot. prometheus.NewGaugeFunc(prometheus.GaugeOpts{ @@ -607,21 +601,6 @@ func (h *Head) Init(minValidTime int64) error { h.updateWALReplayStatusRead(i) } - { - // Set the sparseHistogramSeries metric once replay is done. - // This is a temporary hack. - // TODO: remove this hack and do it while replaying WAL if we keep this metric around. - sparseHistogramSeries := 0 - for _, m := range h.series.series { - for _, ms := range m { - if ms.isHistogramSeries { - sparseHistogramSeries++ - } - } - } - h.metrics.histogramSeries.Set(float64(sparseHistogramSeries)) - } - walReplayDuration := time.Since(start) h.metrics.walTotalReplayDuration.Set(walReplayDuration.Seconds()) level.Info(h.logger).Log( @@ -1142,13 +1121,12 @@ func (h *Head) gc() int64 { // Drop old chunks and remember series IDs and hashes if they can be // deleted entirely. - deleted, chunksRemoved, actualMint, sparseHistogramSeriesDeleted := h.series.gc(mint) + deleted, chunksRemoved, actualMint := h.series.gc(mint) seriesRemoved := len(deleted) h.metrics.seriesRemoved.Add(float64(seriesRemoved)) h.metrics.chunksRemoved.Add(float64(chunksRemoved)) h.metrics.chunks.Sub(float64(chunksRemoved)) - h.metrics.histogramSeries.Sub(float64(sparseHistogramSeriesDeleted)) h.numSeries.Sub(uint64(seriesRemoved)) // Remove deleted series IDs from the postings lists. @@ -1366,13 +1344,12 @@ func newStripeSeries(stripeSize int, seriesCallback SeriesLifecycleCallback) *st // note: returning map[chunks.HeadSeriesRef]struct{} would be more accurate, // but the returned map goes into postings.Delete() which expects a map[storage.SeriesRef]struct // and there's no easy way to cast maps. -func (s *stripeSeries) gc(mint int64) (map[storage.SeriesRef]struct{}, int, int64, int) { +func (s *stripeSeries) gc(mint int64) (map[storage.SeriesRef]struct{}, int, int64) { var ( - deleted = map[storage.SeriesRef]struct{}{} - deletedForCallback = []labels.Labels{} - rmChunks = 0 - actualMint int64 = math.MaxInt64 - sparseHistogramSeriesDeleted = 0 + deleted = map[storage.SeriesRef]struct{}{} + deletedForCallback = []labels.Labels{} + rmChunks = 0 + actualMint int64 = math.MaxInt64 ) // Run through all series and truncate old chunks. Mark those with no // chunks left as deleted and store their ID. @@ -1404,9 +1381,6 @@ func (s *stripeSeries) gc(mint int64) (map[storage.SeriesRef]struct{}, int, int6 s.locks[j].Lock() } - if series.isHistogramSeries { - sparseHistogramSeriesDeleted++ - } deleted[storage.SeriesRef(series.ref)] = struct{}{} s.hashes[i].del(hash, series.lset) delete(s.series[j], series.ref) @@ -1430,7 +1404,7 @@ func (s *stripeSeries) gc(mint int64) (map[storage.SeriesRef]struct{}, int, int6 actualMint = mint } - return deleted, rmChunks, actualMint, sparseHistogramSeriesDeleted + return deleted, rmChunks, actualMint } func (s *stripeSeries) getByID(id chunks.HeadSeriesRef) *memSeries { @@ -1495,22 +1469,32 @@ func (s *stripeSeries) getOrSet(hash uint64, lset labels.Labels, createSeries fu return series, true, nil } -type histogramSample struct { - t int64 - h *histogram.Histogram -} - type sample struct { - t int64 - v float64 - h *histogram.Histogram + t int64 + v float64 + h *histogram.Histogram + fh *histogram.FloatHistogram } -func newSample(t int64, v float64, h *histogram.Histogram) tsdbutil.Sample { return sample{t, v, h} } +func newSample(t int64, v float64, h *histogram.Histogram, fh *histogram.FloatHistogram) tsdbutil.Sample { + return sample{t, v, h, fh} +} -func (s sample) T() int64 { return s.t } -func (s sample) V() float64 { return s.v } -func (s sample) H() *histogram.Histogram { return s.h } +func (s sample) T() int64 { return s.t } +func (s sample) V() float64 { return s.v } +func (s sample) H() *histogram.Histogram { return s.h } +func (s sample) FH() *histogram.FloatHistogram { return s.fh } + +func (s sample) Type() chunkenc.ValueType { + switch { + case s.h != nil: + return chunkenc.ValHistogram + case s.fh != nil: + return chunkenc.ValFloatHistogram + default: + return chunkenc.ValFloat + } +} // memSeries is the in-memory representation of a series. None of its methods // are goroutine safe and it is the caller's responsibility to lock it. @@ -1540,8 +1524,7 @@ type memSeries struct { // We keep the last 4 samples here (in addition to appending them to the chunk) so we don't need coordination between appender and querier. // Even the most compact encoding of a sample takes 2 bits, so the last byte is not contended. - sampleBuf [4]sample - histogramBuf [4]histogramSample + sampleBuf [4]sample pendingCommit bool // Whether there are samples waiting to be committed to this series. @@ -1554,6 +1537,8 @@ type memSeries struct { txs *txRing + // TODO(beorn7): The only reason we track this is to create a staleness + // marker as either histogram or float sample. Perhaps there is a better way. isHistogramSeries bool } diff --git a/tsdb/head_append.go b/tsdb/head_append.go index 91f0dd3581..fb51b38d12 100644 --- a/tsdb/head_append.go +++ b/tsdb/head_append.go @@ -416,7 +416,6 @@ func (a *headAppender) AppendHistogram(ref storage.SeriesRef, lset labels.Labels } s.isHistogramSeries = true if created { - a.head.metrics.histogramSeries.Inc() a.series = append(a.series, record.RefSeries{ Ref: s.ref, Labels: lset, @@ -607,7 +606,6 @@ func (s *memSeries) append(t int64, v float64, appendID uint64, chunkDiskMapper if !sampleInOrder { return sampleInOrder, chunkCreated } - s.app.Append(t, v) s.isHistogramSeries = false @@ -683,10 +681,10 @@ func (s *memSeries) appendHistogram(t int64, h *histogram.Histogram, appendID ui c.maxTime = t - s.histogramBuf[0] = s.histogramBuf[1] - s.histogramBuf[1] = s.histogramBuf[2] - s.histogramBuf[2] = s.histogramBuf[3] - s.histogramBuf[3] = histogramSample{t: t, h: h} + s.sampleBuf[0] = s.sampleBuf[1] + s.sampleBuf[1] = s.sampleBuf[2] + s.sampleBuf[2] = s.sampleBuf[3] + s.sampleBuf[3] = sample{t: t, h: h} if appendID > 0 { s.txs.add(appendID) diff --git a/tsdb/head_read.go b/tsdb/head_read.go index 7ec49d3db8..b2fed619d8 100644 --- a/tsdb/head_read.go +++ b/tsdb/head_read.go @@ -428,8 +428,6 @@ func (s *memSeries) iterator(id chunks.HeadChunkID, isoState *isolationState, ch msIter.total = numSamples msIter.stopAfter = stopAfter msIter.buf = s.sampleBuf - msIter.histogramBuf = s.histogramBuf - msIter.isHistogramSeries = s.isHistogramSeries return msIter } return &memSafeIterator{ @@ -438,10 +436,8 @@ func (s *memSeries) iterator(id chunks.HeadChunkID, isoState *isolationState, ch i: -1, stopAfter: stopAfter, }, - total: numSamples, - buf: s.sampleBuf, - histogramBuf: s.histogramBuf, - isHistogramSeries: s.isHistogramSeries, + total: numSamples, + buf: s.sampleBuf, } } @@ -450,52 +446,50 @@ func (s *memSeries) iterator(id chunks.HeadChunkID, isoState *isolationState, ch type memSafeIterator struct { stopIterator - isHistogramSeries bool - total int - buf [4]sample - histogramBuf [4]histogramSample + total int + buf [4]sample } -func (it *memSafeIterator) Seek(t int64) bool { +func (it *memSafeIterator) Seek(t int64) chunkenc.ValueType { if it.Err() != nil { - return false + return chunkenc.ValNone } - var ts int64 - if it.isHistogramSeries { - ts, _ = it.AtHistogram() - } else { - ts, _ = it.At() + var valueType chunkenc.ValueType + var ts int64 = math.MinInt64 + + if it.i > -1 { + ts = it.AtT() } - if it.isHistogramSeries { - for t > ts || it.i == -1 { - if !it.Next() { - return false - } - ts, _ = it.AtHistogram() - } - } else { - for t > ts || it.i == -1 { - if !it.Next() { - return false - } - ts, _ = it.At() + if t <= ts { + // We are already at the right sample, but we have to find out + // its ValueType. + if it.total-it.i > 4 { + return it.Iterator.Seek(ts) } + return it.buf[4-(it.total-it.i)].Type() } - return true + for t > ts || it.i == -1 { + if valueType = it.Next(); valueType == chunkenc.ValNone { + return chunkenc.ValNone + } + ts = it.AtT() + } + + return valueType } -func (it *memSafeIterator) Next() bool { +func (it *memSafeIterator) Next() chunkenc.ValueType { if it.i+1 >= it.stopAfter { - return false + return chunkenc.ValNone } it.i++ if it.total-it.i > 4 { return it.Iterator.Next() } - return true + return it.buf[4-(it.total-it.i)].Type() } func (it *memSafeIterator) At() (int64, float64) { @@ -510,10 +504,29 @@ func (it *memSafeIterator) AtHistogram() (int64, *histogram.Histogram) { if it.total-it.i > 4 { return it.Iterator.AtHistogram() } - s := it.histogramBuf[4-(it.total-it.i)] + s := it.buf[4-(it.total-it.i)] return s.t, s.h } +func (it *memSafeIterator) AtFloatHistogram() (int64, *histogram.FloatHistogram) { + if it.total-it.i > 4 { + return it.Iterator.AtFloatHistogram() + } + s := it.buf[4-(it.total-it.i)] + if s.fh != nil { + return s.t, s.fh + } + return s.t, s.h.ToFloat() +} + +func (it *memSafeIterator) AtT() int64 { + if it.total-it.i > 4 { + return it.Iterator.AtT() + } + s := it.buf[4-(it.total-it.i)] + return s.t +} + // stopIterator wraps an Iterator, but only returns the first // stopAfter values, if initialized with i=-1. type stopIterator struct { @@ -522,9 +535,9 @@ type stopIterator struct { i, stopAfter int } -func (it *stopIterator) Next() bool { +func (it *stopIterator) Next() chunkenc.ValueType { if it.i+1 >= it.stopAfter { - return false + return chunkenc.ValNone } it.i++ return it.Iterator.Next() diff --git a/tsdb/head_test.go b/tsdb/head_test.go index ac1a8725e4..25b7c51aef 100644 --- a/tsdb/head_test.go +++ b/tsdb/head_test.go @@ -324,18 +324,18 @@ func TestHead_ReadWAL(t *testing.T) { require.Equal(t, labels.FromStrings("a", "3"), s100.lset) expandChunk := func(c chunkenc.Iterator) (x []sample) { - for c.Next() { + for c.Next() == chunkenc.ValFloat { t, v := c.At() x = append(x, sample{t: t, v: v}) } require.NoError(t, c.Err()) return x } - require.Equal(t, []sample{{100, 2, nil}, {101, 5, nil}}, expandChunk(s10.iterator(0, nil, head.chunkDiskMapper, nil))) - require.Equal(t, []sample{{101, 6, nil}}, expandChunk(s50.iterator(0, nil, head.chunkDiskMapper, nil))) + require.Equal(t, []sample{{100, 2, nil, nil}, {101, 5, nil, nil}}, expandChunk(s10.iterator(0, nil, head.chunkDiskMapper, nil))) + require.Equal(t, []sample{{101, 6, nil, nil}}, expandChunk(s50.iterator(0, nil, head.chunkDiskMapper, nil))) // The samples before the new series record should be discarded since a duplicate record // is only possible when old samples were compacted. - require.Equal(t, []sample{{101, 7, nil}}, expandChunk(s100.iterator(0, nil, head.chunkDiskMapper, nil))) + require.Equal(t, []sample{{101, 7, nil, nil}}, expandChunk(s100.iterator(0, nil, head.chunkDiskMapper, nil))) q, err := head.ExemplarQuerier(context.Background()) require.NoError(t, err) @@ -401,8 +401,8 @@ func TestHead_WALMultiRef(t *testing.T) { // The samples before the new ref should be discarded since Head truncation // happens only after compacting the Head. require.Equal(t, map[string][]tsdbutil.Sample{`{foo="bar"}`: { - sample{1700, 3, nil}, - sample{2000, 4, nil}, + sample{1700, 3, nil, nil}, + sample{2000, 4, nil, nil}, }}, series) } @@ -779,7 +779,7 @@ func TestDeleteUntilCurMax(t *testing.T) { require.True(t, res.Next(), "series is not present") s := res.At() it := s.Iterator() - require.False(t, it.Next(), "expected no samples") + require.Equal(t, chunkenc.ValNone, it.Next(), "expected no samples") for res.Next() { } require.NoError(t, res.Err()) @@ -798,7 +798,7 @@ func TestDeleteUntilCurMax(t *testing.T) { it = exps.Iterator() resSamples, err := storage.ExpandSamples(it, newSample) require.NoError(t, err) - require.Equal(t, []tsdbutil.Sample{sample{11, 1, nil}}, resSamples) + require.Equal(t, []tsdbutil.Sample{sample{11, 1, nil, nil}}, resSamples) for res.Next() { } require.NoError(t, res.Err()) @@ -912,7 +912,7 @@ func TestDelete_e2e(t *testing.T) { v := rand.Float64() _, err := app.Append(0, ls, ts, v) require.NoError(t, err) - series = append(series, sample{ts, v, nil}) + series = append(series, sample{ts, v, nil, nil}) ts += rand.Int63n(timeInterval) + 1 } seriesMap[labels.New(l...).String()] = series @@ -979,7 +979,7 @@ func TestDelete_e2e(t *testing.T) { eok, rok := expSs.Next(), ss.Next() // Skip a series if iterator is empty. if rok { - for !ss.At().Iterator().Next() { + for ss.At().Iterator().Next() == chunkenc.ValNone { rok = ss.Next() if !rok { break @@ -2269,47 +2269,40 @@ func TestMemSafeIteratorSeekIntoBuffer(t *testing.T) { require.True(t, ok) // First point. - ok = it.Seek(0) - require.True(t, ok) + require.Equal(t, chunkenc.ValFloat, it.Seek(0)) ts, val := it.At() require.Equal(t, int64(0), ts) require.Equal(t, float64(0), val) // Advance one point. - ok = it.Next() - require.True(t, ok) + require.Equal(t, chunkenc.ValFloat, it.Next()) ts, val = it.At() require.Equal(t, int64(1), ts) require.Equal(t, float64(1), val) // Seeking an older timestamp shouldn't cause the iterator to go backwards. - ok = it.Seek(0) - require.True(t, ok) + require.Equal(t, chunkenc.ValFloat, it.Seek(0)) ts, val = it.At() require.Equal(t, int64(1), ts) require.Equal(t, float64(1), val) // Seek into the buffer. - ok = it.Seek(3) - require.True(t, ok) + require.Equal(t, chunkenc.ValFloat, it.Seek(3)) ts, val = it.At() require.Equal(t, int64(3), ts) require.Equal(t, float64(3), val) // Iterate through the rest of the buffer. for i := 4; i < 7; i++ { - ok = it.Next() - require.True(t, ok) + require.Equal(t, chunkenc.ValFloat, it.Next()) ts, val = it.At() require.Equal(t, int64(i), ts) require.Equal(t, float64(i), val) } // Run out of elements in the iterator. - ok = it.Next() - require.False(t, ok) - ok = it.Seek(7) - require.False(t, ok) + require.Equal(t, chunkenc.ValNone, it.Next()) + require.Equal(t, chunkenc.ValNone, it.Seek(7)) } // Tests https://github.com/prometheus/prometheus/issues/8221. @@ -2358,7 +2351,7 @@ func TestChunkNotFoundHeadGCRace(t *testing.T) { // Now consume after compaction when it's gone. it := s.Iterator() - for it.Next() { + for it.Next() == chunkenc.ValFloat { _, _ = it.At() } // It should error here without any fix for the mentioned issue. @@ -2366,7 +2359,7 @@ func TestChunkNotFoundHeadGCRace(t *testing.T) { for ss.Next() { s = ss.At() it := s.Iterator() - for it.Next() { + for it.Next() == chunkenc.ValFloat { _, _ = it.At() } require.NoError(t, it.Err()) @@ -2397,7 +2390,7 @@ func TestDataMissingOnQueryDuringCompaction(t *testing.T) { ref, err = app.Append(ref, labels.FromStrings("a", "b"), ts, float64(i)) require.NoError(t, err) maxt = ts - expSamples = append(expSamples, sample{ts, float64(i), nil}) + expSamples = append(expSamples, sample{ts, float64(i), nil, nil}) } require.NoError(t, app.Commit()) @@ -2565,9 +2558,9 @@ func TestAppendHistogram(t *testing.T) { it := s.Iterator() actHistograms := make([]timedHistogram, 0, len(expHistograms)) - for it.Next() { + for it.Next() == chunkenc.ValHistogram { t, h := it.AtHistogram() - actHistograms = append(actHistograms, timedHistogram{t, h.Copy()}) + actHistograms = append(actHistograms, timedHistogram{t, h}) } require.Equal(t, expHistograms, actHistograms) @@ -2622,9 +2615,9 @@ func TestHistogramInWAL(t *testing.T) { it := s.Iterator() actHistograms := make([]timedHistogram, 0, len(expHistograms)) - for it.Next() { + for it.Next() == chunkenc.ValHistogram { t, h := it.AtHistogram() - actHistograms = append(actHistograms, timedHistogram{t, h.Copy()}) + actHistograms = append(actHistograms, timedHistogram{t, h}) } require.Equal(t, expHistograms, actHistograms) @@ -2728,7 +2721,7 @@ func TestChunkSnapshot(t *testing.T) { // 240 samples should m-map at least 1 chunk. for ts := int64(1); ts <= 240; ts++ { val := rand.Float64() - expSeries[lblStr] = append(expSeries[lblStr], sample{ts, val, nil}) + expSeries[lblStr] = append(expSeries[lblStr], sample{ts, val, nil, nil}) ref, err := app.Append(0, lbls, ts, val) require.NoError(t, err) @@ -2788,7 +2781,7 @@ func TestChunkSnapshot(t *testing.T) { // 240 samples should m-map at least 1 chunk. for ts := int64(241); ts <= 480; ts++ { val := rand.Float64() - expSeries[lblStr] = append(expSeries[lblStr], sample{ts, val, nil}) + expSeries[lblStr] = append(expSeries[lblStr], sample{ts, val, nil, nil}) ref, err := app.Append(0, lbls, ts, val) require.NoError(t, err) @@ -2951,7 +2944,6 @@ func TestHistogramMetrics(t *testing.T) { } } - require.Equal(t, float64(expHSeries), prom_testutil.ToFloat64(head.metrics.histogramSeries)) require.Equal(t, float64(expHSamples), prom_testutil.ToFloat64(head.metrics.histogramSamplesTotal)) require.NoError(t, head.Close()) @@ -2961,7 +2953,6 @@ func TestHistogramMetrics(t *testing.T) { require.NoError(t, err) require.NoError(t, head.Init(0)) - require.Equal(t, float64(expHSeries), prom_testutil.ToFloat64(head.metrics.histogramSeries)) require.Equal(t, float64(0), prom_testutil.ToFloat64(head.metrics.histogramSamplesTotal)) // Counter reset. } @@ -2995,9 +2986,9 @@ func TestHistogramStaleSample(t *testing.T) { it := s.Iterator() actHistograms := make([]timedHistogram, 0, len(expHistograms)) - for it.Next() { + for it.Next() == chunkenc.ValHistogram { t, h := it.AtHistogram() - actHistograms = append(actHistograms, timedHistogram{t, h.Copy()}) + actHistograms = append(actHistograms, timedHistogram{t, h}) } // We cannot compare StaleNAN with require.Equal, hence checking each histogram manually. @@ -3173,10 +3164,10 @@ func TestAppendingDifferentEncodingToSameSeries(t *testing.T) { lbls := labels.Labels{{Name: "a", Value: "b"}} type result struct { - t int64 - v float64 - h *histogram.Histogram - enc chunkenc.Encoding + t int64 + v float64 + h *histogram.Histogram + vt chunkenc.ValueType } expResult := []result{} ref := storage.SeriesRef(0) @@ -3184,18 +3175,18 @@ func TestAppendingDifferentEncodingToSameSeries(t *testing.T) { ref, err = app.Append(ref, lbls, ts, v) require.NoError(t, err) expResult = append(expResult, result{ - t: ts, - v: v, - enc: chunkenc.EncXOR, + t: ts, + v: v, + vt: chunkenc.ValFloat, }) } addHistogramSample := func(app storage.Appender, ts int64, h *histogram.Histogram) { ref, err = app.AppendHistogram(ref, lbls, ts, h) require.NoError(t, err) expResult = append(expResult, result{ - t: ts, - h: h, - enc: chunkenc.EncHistogram, + t: ts, + h: h, + vt: chunkenc.ValHistogram, }) } checkExpChunks := func(count int) { @@ -3269,17 +3260,25 @@ func TestAppendingDifferentEncodingToSameSeries(t *testing.T) { s := ss.At() it := s.Iterator() expIdx := 0 - for it.Next() { - require.Equal(t, expResult[expIdx].enc, it.ChunkEncoding()) - if it.ChunkEncoding() == chunkenc.EncHistogram { - ts, h := it.AtHistogram() - require.Equal(t, expResult[expIdx].t, ts) - require.Equal(t, expResult[expIdx].h, h) - } else { +loop: + for { + vt := it.Next() + switch vt { + case chunkenc.ValNone: + require.Equal(t, len(expResult), expIdx) + break loop + case chunkenc.ValFloat: ts, v := it.At() require.Equal(t, expResult[expIdx].t, ts) require.Equal(t, expResult[expIdx].v, v) + case chunkenc.ValHistogram: + ts, h := it.AtHistogram() + require.Equal(t, expResult[expIdx].t, ts) + require.Equal(t, expResult[expIdx].h, h) + default: + require.Error(t, fmt.Errorf("unexpected ValueType %v", vt)) } + require.Equal(t, expResult[expIdx].vt, vt) expIdx++ } require.NoError(t, it.Err()) diff --git a/tsdb/querier.go b/tsdb/querier.go index c3574d61d5..ad4733cc9c 100644 --- a/tsdb/querier.go +++ b/tsdb/querier.go @@ -14,6 +14,7 @@ package tsdb import ( + "fmt" "math" "sort" "strings" @@ -627,9 +628,11 @@ type populateWithDelSeriesIterator struct { curr chunkenc.Iterator } -func (p *populateWithDelSeriesIterator) Next() bool { - if p.curr != nil && p.curr.Next() { - return true +func (p *populateWithDelSeriesIterator) Next() chunkenc.ValueType { + if p.curr != nil { + if valueType := p.curr.Next(); valueType != chunkenc.ValNone { + return valueType + } } for p.next() { @@ -638,33 +641,41 @@ func (p *populateWithDelSeriesIterator) Next() bool { } else { p.curr = p.currChkMeta.Chunk.Iterator(nil) } - if p.curr.Next() { - return true + if valueType := p.curr.Next(); valueType != chunkenc.ValNone { + return valueType } } - return false + return chunkenc.ValNone } -func (p *populateWithDelSeriesIterator) Seek(t int64) bool { - if p.curr != nil && p.curr.Seek(t) { - return true - } - for p.Next() { - if p.curr.Seek(t) { - return true +func (p *populateWithDelSeriesIterator) Seek(t int64) chunkenc.ValueType { + if p.curr != nil { + if valueType := p.curr.Seek(t); valueType != chunkenc.ValNone { + return valueType } } - return false + for p.Next() != chunkenc.ValNone { + if valueType := p.curr.Seek(t); valueType != chunkenc.ValNone { + return valueType + } + } + return chunkenc.ValNone } -func (p *populateWithDelSeriesIterator) At() (int64, float64) { return p.curr.At() } +func (p *populateWithDelSeriesIterator) At() (int64, float64) { + return p.curr.At() +} func (p *populateWithDelSeriesIterator) AtHistogram() (int64, *histogram.Histogram) { return p.curr.AtHistogram() } -func (p *populateWithDelSeriesIterator) ChunkEncoding() chunkenc.Encoding { - return p.curr.ChunkEncoding() +func (p *populateWithDelSeriesIterator) AtFloatHistogram() (int64, *histogram.FloatHistogram) { + return p.curr.AtFloatHistogram() +} + +func (p *populateWithDelSeriesIterator) AtT() int64 { + return p.curr.AtT() } func (p *populateWithDelSeriesIterator) Err() error { @@ -693,61 +704,67 @@ func (p *populateWithDelChunkSeriesIterator) Next() bool { return true } - // Re-encode the chunk if iterator is provider. This means that it has some samples to be deleted or chunk is opened. - var ( - newChunk chunkenc.Chunk - app chunkenc.Appender - err error - ) - if p.currDelIter.ChunkEncoding() == chunkenc.EncHistogram { - newChunk = chunkenc.NewHistogramChunk() - app, err = newChunk.Appender() - } else { - newChunk = chunkenc.NewXORChunk() - app, err = newChunk.Appender() - } - if err != nil { - p.err = err - return false - } - - if !p.currDelIter.Next() { + valueType := p.currDelIter.Next() + if valueType == chunkenc.ValNone { if err := p.currDelIter.Err(); err != nil { p.err = errors.Wrap(err, "iterate chunk while re-encoding") return false } // Empty chunk, this should not happen, as we assume full deletions being filtered before this iterator. - p.err = errors.Wrap(err, "populateWithDelChunkSeriesIterator: unexpected empty chunk found while rewriting chunk") + p.err = errors.New("populateWithDelChunkSeriesIterator: unexpected empty chunk found while rewriting chunk") return false } + // Re-encode the chunk if iterator is provider. This means that it has some samples to be deleted or chunk is opened. var ( - t int64 - v float64 - h *histogram.Histogram + newChunk chunkenc.Chunk + app chunkenc.Appender + t int64 + err error ) - if p.currDelIter.ChunkEncoding() == chunkenc.EncHistogram { + switch valueType { + case chunkenc.ValHistogram: + newChunk = chunkenc.NewHistogramChunk() + if app, err = newChunk.Appender(); err != nil { + break + } if hc, ok := p.currChkMeta.Chunk.(*chunkenc.HistogramChunk); ok { newChunk.(*chunkenc.HistogramChunk).SetCounterResetHeader(hc.GetCounterResetHeader()) } + var h *histogram.Histogram t, h = p.currDelIter.AtHistogram() p.curr.MinTime = t - app.AppendHistogram(t, h.Copy()) - for p.currDelIter.Next() { + app.AppendHistogram(t, h) + for p.currDelIter.Next() == chunkenc.ValHistogram { + // TODO(beorn7): Is it possible that the value type changes during iteration? t, h = p.currDelIter.AtHistogram() - app.AppendHistogram(t, h.Copy()) + app.AppendHistogram(t, h) } - } else { + case chunkenc.ValFloat: + newChunk = chunkenc.NewXORChunk() + if app, err = newChunk.Appender(); err != nil { + break + } + var v float64 t, v = p.currDelIter.At() p.curr.MinTime = t app.Append(t, v) - for p.currDelIter.Next() { + for p.currDelIter.Next() == chunkenc.ValFloat { + // TODO(beorn7): Is it possible that the value type changes during iteration? t, v = p.currDelIter.At() app.Append(t, v) } + + default: + // TODO(beorn7): Need FloatHistogram eventually. + err = fmt.Errorf("populateWithDelChunkSeriesIterator: value type %v unsupported", valueType) } + if err != nil { + p.err = errors.Wrap(err, "iterate chunk while re-encoding") + return false + } if err := p.currDelIter.Err(); err != nil { p.err = errors.Wrap(err, "iterate chunk while re-encoding") return false @@ -888,28 +905,29 @@ func (it *DeletedIterator) AtHistogram() (int64, *histogram.Histogram) { return t, h } -func (it *DeletedIterator) ChunkEncoding() chunkenc.Encoding { - return it.Iter.ChunkEncoding() +func (it *DeletedIterator) AtFloatHistogram() (int64, *histogram.FloatHistogram) { + t, h := it.Iter.AtFloatHistogram() + return t, h } -func (it *DeletedIterator) Seek(t int64) bool { +func (it *DeletedIterator) AtT() int64 { + return it.Iter.AtT() +} + +func (it *DeletedIterator) Seek(t int64) chunkenc.ValueType { if it.Iter.Err() != nil { - return false + return chunkenc.ValNone } - if ok := it.Iter.Seek(t); !ok { - return false + valueType := it.Iter.Seek(t) + if valueType == chunkenc.ValNone { + return chunkenc.ValNone } // Now double check if the entry falls into a deleted interval. - var ts int64 - if it.ChunkEncoding() == chunkenc.EncHistogram { - ts, _ = it.AtHistogram() - } else { - ts, _ = it.At() - } + ts := it.AtT() for _, itv := range it.Intervals { if ts < itv.Mint { - return true + return valueType } if ts > itv.Maxt { @@ -922,32 +940,26 @@ func (it *DeletedIterator) Seek(t int64) bool { } // The timestamp is greater than all the deleted intervals. - return true + return valueType } -func (it *DeletedIterator) Next() bool { +func (it *DeletedIterator) Next() chunkenc.ValueType { Outer: - for it.Iter.Next() { - var ts int64 - if it.ChunkEncoding() == chunkenc.EncHistogram { - ts, _ = it.AtHistogram() - } else { - ts, _ = it.At() - } - + for valueType := it.Iter.Next(); valueType != chunkenc.ValNone; valueType = it.Iter.Next() { + ts := it.AtT() for _, tr := range it.Intervals { if tr.InBounds(ts) { continue Outer } if ts <= tr.Maxt { - return true + return valueType } it.Intervals = it.Intervals[1:] } - return true + return valueType } - return false + return chunkenc.ValNone } func (it *DeletedIterator) Err() error { return it.Iter.Err() } diff --git a/tsdb/querier_test.go b/tsdb/querier_test.go index 555b029a7a..b930a0b39b 100644 --- a/tsdb/querier_test.go +++ b/tsdb/querier_test.go @@ -278,24 +278,24 @@ func TestBlockQuerier(t *testing.T) { ms: []*labels.Matcher{labels.MustNewMatcher(labels.MatchRegexp, "a", ".*")}, exp: newMockSeriesSet([]storage.Series{ storage.NewListSeries(labels.Labels{{Name: "a", Value: "a"}}, - []tsdbutil.Sample{sample{1, 2, nil}, sample{2, 3, nil}, sample{3, 4, nil}, sample{5, 2, nil}, sample{6, 3, nil}, sample{7, 4, nil}}, + []tsdbutil.Sample{sample{1, 2, nil, nil}, sample{2, 3, nil, nil}, sample{3, 4, nil, nil}, sample{5, 2, nil, nil}, sample{6, 3, nil, nil}, sample{7, 4, nil, nil}}, ), storage.NewListSeries(labels.Labels{{Name: "a", Value: "a"}, {Name: "b", Value: "b"}}, - []tsdbutil.Sample{sample{1, 1, nil}, sample{2, 2, nil}, sample{3, 3, nil}, sample{5, 3, nil}, sample{6, 6, nil}}, + []tsdbutil.Sample{sample{1, 1, nil, nil}, sample{2, 2, nil, nil}, sample{3, 3, nil, nil}, sample{5, 3, nil, nil}, sample{6, 6, nil, nil}}, ), storage.NewListSeries(labels.Labels{{Name: "b", Value: "b"}}, - []tsdbutil.Sample{sample{1, 3, nil}, sample{2, 2, nil}, sample{3, 6, nil}, sample{5, 1, nil}, sample{6, 7, nil}, sample{7, 2, nil}}, + []tsdbutil.Sample{sample{1, 3, nil, nil}, sample{2, 2, nil, nil}, sample{3, 6, nil, nil}, sample{5, 1, nil, nil}, sample{6, 7, nil, nil}, sample{7, 2, nil, nil}}, ), }), expChks: newMockChunkSeriesSet([]storage.ChunkSeries{ storage.NewListChunkSeriesFromSamples(labels.Labels{{Name: "a", Value: "a"}}, - []tsdbutil.Sample{sample{1, 2, nil}, sample{2, 3, nil}, sample{3, 4, nil}}, []tsdbutil.Sample{sample{5, 2, nil}, sample{6, 3, nil}, sample{7, 4, nil}}, + []tsdbutil.Sample{sample{1, 2, nil, nil}, sample{2, 3, nil, nil}, sample{3, 4, nil, nil}}, []tsdbutil.Sample{sample{5, 2, nil, nil}, sample{6, 3, nil, nil}, sample{7, 4, nil, nil}}, ), storage.NewListChunkSeriesFromSamples(labels.Labels{{Name: "a", Value: "a"}, {Name: "b", Value: "b"}}, - []tsdbutil.Sample{sample{1, 1, nil}, sample{2, 2, nil}, sample{3, 3, nil}}, []tsdbutil.Sample{sample{5, 3, nil}, sample{6, 6, nil}}, + []tsdbutil.Sample{sample{1, 1, nil, nil}, sample{2, 2, nil, nil}, sample{3, 3, nil, nil}}, []tsdbutil.Sample{sample{5, 3, nil, nil}, sample{6, 6, nil, nil}}, ), storage.NewListChunkSeriesFromSamples(labels.Labels{{Name: "b", Value: "b"}}, - []tsdbutil.Sample{sample{1, 3, nil}, sample{2, 2, nil}, sample{3, 6, nil}}, []tsdbutil.Sample{sample{5, 1, nil}, sample{6, 7, nil}, sample{7, 2, nil}}, + []tsdbutil.Sample{sample{1, 3, nil, nil}, sample{2, 2, nil, nil}, sample{3, 6, nil, nil}}, []tsdbutil.Sample{sample{5, 1, nil, nil}, sample{6, 7, nil, nil}, sample{7, 2, nil, nil}}, ), }), }, @@ -305,18 +305,18 @@ func TestBlockQuerier(t *testing.T) { ms: []*labels.Matcher{labels.MustNewMatcher(labels.MatchEqual, "a", "a")}, exp: newMockSeriesSet([]storage.Series{ storage.NewListSeries(labels.Labels{{Name: "a", Value: "a"}}, - []tsdbutil.Sample{sample{2, 3, nil}, sample{3, 4, nil}, sample{5, 2, nil}, sample{6, 3, nil}}, + []tsdbutil.Sample{sample{2, 3, nil, nil}, sample{3, 4, nil, nil}, sample{5, 2, nil, nil}, sample{6, 3, nil, nil}}, ), storage.NewListSeries(labels.Labels{{Name: "a", Value: "a"}, {Name: "b", Value: "b"}}, - []tsdbutil.Sample{sample{2, 2, nil}, sample{3, 3, nil}, sample{5, 3, nil}, sample{6, 6, nil}}, + []tsdbutil.Sample{sample{2, 2, nil, nil}, sample{3, 3, nil, nil}, sample{5, 3, nil, nil}, sample{6, 6, nil, nil}}, ), }), expChks: newMockChunkSeriesSet([]storage.ChunkSeries{ storage.NewListChunkSeriesFromSamples(labels.Labels{{Name: "a", Value: "a"}}, - []tsdbutil.Sample{sample{2, 3, nil}, sample{3, 4, nil}}, []tsdbutil.Sample{sample{5, 2, nil}, sample{6, 3, nil}}, + []tsdbutil.Sample{sample{2, 3, nil, nil}, sample{3, 4, nil, nil}}, []tsdbutil.Sample{sample{5, 2, nil, nil}, sample{6, 3, nil, nil}}, ), storage.NewListChunkSeriesFromSamples(labels.Labels{{Name: "a", Value: "a"}, {Name: "b", Value: "b"}}, - []tsdbutil.Sample{sample{2, 2, nil}, sample{3, 3, nil}}, []tsdbutil.Sample{sample{5, 3, nil}, sample{6, 6, nil}}, + []tsdbutil.Sample{sample{2, 2, nil, nil}, sample{3, 3, nil, nil}}, []tsdbutil.Sample{sample{5, 3, nil, nil}, sample{6, 6, nil, nil}}, ), }), }, @@ -329,20 +329,20 @@ func TestBlockQuerier(t *testing.T) { ms: []*labels.Matcher{labels.MustNewMatcher(labels.MatchEqual, "a", "a")}, exp: newMockSeriesSet([]storage.Series{ storage.NewListSeries(labels.Labels{{Name: "a", Value: "a"}}, - []tsdbutil.Sample{sample{1, 2, nil}, sample{2, 3, nil}, sample{3, 4, nil}, sample{5, 2, nil}, sample{6, 3, nil}, sample{7, 4, nil}}, + []tsdbutil.Sample{sample{1, 2, nil, nil}, sample{2, 3, nil, nil}, sample{3, 4, nil, nil}, sample{5, 2, nil, nil}, sample{6, 3, nil, nil}, sample{7, 4, nil, nil}}, ), storage.NewListSeries(labels.Labels{{Name: "a", Value: "a"}, {Name: "b", Value: "b"}}, - []tsdbutil.Sample{sample{1, 1, nil}, sample{2, 2, nil}, sample{3, 3, nil}, sample{5, 3, nil}, sample{6, 6, nil}}, + []tsdbutil.Sample{sample{1, 1, nil, nil}, sample{2, 2, nil, nil}, sample{3, 3, nil, nil}, sample{5, 3, nil, nil}, sample{6, 6, nil, nil}}, ), }), expChks: newMockChunkSeriesSet([]storage.ChunkSeries{ storage.NewListChunkSeriesFromSamples(labels.Labels{{Name: "a", Value: "a"}}, - []tsdbutil.Sample{sample{1, 2, nil}, sample{2, 3, nil}, sample{3, 4, nil}}, - []tsdbutil.Sample{sample{5, 2, nil}, sample{6, 3, nil}, sample{7, 4, nil}}, + []tsdbutil.Sample{sample{1, 2, nil, nil}, sample{2, 3, nil, nil}, sample{3, 4, nil, nil}}, + []tsdbutil.Sample{sample{5, 2, nil, nil}, sample{6, 3, nil, nil}, sample{7, 4, nil, nil}}, ), storage.NewListChunkSeriesFromSamples(labels.Labels{{Name: "a", Value: "a"}, {Name: "b", Value: "b"}}, - []tsdbutil.Sample{sample{1, 1, nil}, sample{2, 2, nil}, sample{3, 3, nil}}, - []tsdbutil.Sample{sample{5, 3, nil}, sample{6, 6, nil}}, + []tsdbutil.Sample{sample{1, 1, nil, nil}, sample{2, 2, nil, nil}, sample{3, 3, nil, nil}}, + []tsdbutil.Sample{sample{5, 3, nil, nil}, sample{6, 6, nil, nil}}, ), }), }, @@ -355,18 +355,18 @@ func TestBlockQuerier(t *testing.T) { ms: []*labels.Matcher{labels.MustNewMatcher(labels.MatchEqual, "a", "a")}, exp: newMockSeriesSet([]storage.Series{ storage.NewListSeries(labels.Labels{{Name: "a", Value: "a"}}, - []tsdbutil.Sample{sample{5, 2, nil}, sample{6, 3, nil}, sample{7, 4, nil}}, + []tsdbutil.Sample{sample{5, 2, nil, nil}, sample{6, 3, nil, nil}, sample{7, 4, nil, nil}}, ), storage.NewListSeries(labels.Labels{{Name: "a", Value: "a"}, {Name: "b", Value: "b"}}, - []tsdbutil.Sample{sample{5, 3, nil}, sample{6, 6, nil}}, + []tsdbutil.Sample{sample{5, 3, nil, nil}, sample{6, 6, nil, nil}}, ), }), expChks: newMockChunkSeriesSet([]storage.ChunkSeries{ storage.NewListChunkSeriesFromSamples(labels.Labels{{Name: "a", Value: "a"}}, - []tsdbutil.Sample{sample{5, 2, nil}, sample{6, 3, nil}, sample{7, 4, nil}}, + []tsdbutil.Sample{sample{5, 2, nil, nil}, sample{6, 3, nil, nil}, sample{7, 4, nil, nil}}, ), storage.NewListChunkSeriesFromSamples(labels.Labels{{Name: "a", Value: "a"}, {Name: "b", Value: "b"}}, - []tsdbutil.Sample{sample{5, 3, nil}, sample{6, 6, nil}}, + []tsdbutil.Sample{sample{5, 3, nil, nil}, sample{6, 6, nil, nil}}, ), }), }, @@ -414,24 +414,24 @@ func TestBlockQuerier_AgainstHeadWithOpenChunks(t *testing.T) { ms: []*labels.Matcher{labels.MustNewMatcher(labels.MatchRegexp, "a", ".*")}, exp: newMockSeriesSet([]storage.Series{ storage.NewListSeries(labels.Labels{{Name: "a", Value: "a"}}, - []tsdbutil.Sample{sample{1, 2, nil}, sample{2, 3, nil}, sample{3, 4, nil}, sample{5, 2, nil}, sample{6, 3, nil}, sample{7, 4, nil}}, + []tsdbutil.Sample{sample{1, 2, nil, nil}, sample{2, 3, nil, nil}, sample{3, 4, nil, nil}, sample{5, 2, nil, nil}, sample{6, 3, nil, nil}, sample{7, 4, nil, nil}}, ), storage.NewListSeries(labels.Labels{{Name: "a", Value: "a"}, {Name: "b", Value: "b"}}, - []tsdbutil.Sample{sample{1, 1, nil}, sample{2, 2, nil}, sample{3, 3, nil}, sample{5, 3, nil}, sample{6, 6, nil}}, + []tsdbutil.Sample{sample{1, 1, nil, nil}, sample{2, 2, nil, nil}, sample{3, 3, nil, nil}, sample{5, 3, nil, nil}, sample{6, 6, nil, nil}}, ), storage.NewListSeries(labels.Labels{{Name: "b", Value: "b"}}, - []tsdbutil.Sample{sample{1, 3, nil}, sample{2, 2, nil}, sample{3, 6, nil}, sample{5, 1, nil}, sample{6, 7, nil}, sample{7, 2, nil}}, + []tsdbutil.Sample{sample{1, 3, nil, nil}, sample{2, 2, nil, nil}, sample{3, 6, nil, nil}, sample{5, 1, nil, nil}, sample{6, 7, nil, nil}, sample{7, 2, nil, nil}}, ), }), expChks: newMockChunkSeriesSet([]storage.ChunkSeries{ storage.NewListChunkSeriesFromSamples(labels.Labels{{Name: "a", Value: "a"}}, - []tsdbutil.Sample{sample{1, 2, nil}, sample{2, 3, nil}, sample{3, 4, nil}, sample{5, 2, nil}, sample{6, 3, nil}, sample{7, 4, nil}}, + []tsdbutil.Sample{sample{1, 2, nil, nil}, sample{2, 3, nil, nil}, sample{3, 4, nil, nil}, sample{5, 2, nil, nil}, sample{6, 3, nil, nil}, sample{7, 4, nil, nil}}, ), storage.NewListChunkSeriesFromSamples(labels.Labels{{Name: "a", Value: "a"}, {Name: "b", Value: "b"}}, - []tsdbutil.Sample{sample{1, 1, nil}, sample{2, 2, nil}, sample{3, 3, nil}, sample{5, 3, nil}, sample{6, 6, nil}}, + []tsdbutil.Sample{sample{1, 1, nil, nil}, sample{2, 2, nil, nil}, sample{3, 3, nil, nil}, sample{5, 3, nil, nil}, sample{6, 6, nil, nil}}, ), storage.NewListChunkSeriesFromSamples(labels.Labels{{Name: "b", Value: "b"}}, - []tsdbutil.Sample{sample{1, 3, nil}, sample{2, 2, nil}, sample{3, 6, nil}, sample{5, 1, nil}, sample{6, 7, nil}, sample{7, 2, nil}}, + []tsdbutil.Sample{sample{1, 3, nil, nil}, sample{2, 2, nil, nil}, sample{3, 6, nil, nil}, sample{5, 1, nil, nil}, sample{6, 7, nil, nil}, sample{7, 2, nil, nil}}, ), }), }, @@ -441,18 +441,18 @@ func TestBlockQuerier_AgainstHeadWithOpenChunks(t *testing.T) { ms: []*labels.Matcher{labels.MustNewMatcher(labels.MatchEqual, "a", "a")}, exp: newMockSeriesSet([]storage.Series{ storage.NewListSeries(labels.Labels{{Name: "a", Value: "a"}}, - []tsdbutil.Sample{sample{2, 3, nil}, sample{3, 4, nil}, sample{5, 2, nil}, sample{6, 3, nil}}, + []tsdbutil.Sample{sample{2, 3, nil, nil}, sample{3, 4, nil, nil}, sample{5, 2, nil, nil}, sample{6, 3, nil, nil}}, ), storage.NewListSeries(labels.Labels{{Name: "a", Value: "a"}, {Name: "b", Value: "b"}}, - []tsdbutil.Sample{sample{2, 2, nil}, sample{3, 3, nil}, sample{5, 3, nil}, sample{6, 6, nil}}, + []tsdbutil.Sample{sample{2, 2, nil, nil}, sample{3, 3, nil, nil}, sample{5, 3, nil, nil}, sample{6, 6, nil, nil}}, ), }), expChks: newMockChunkSeriesSet([]storage.ChunkSeries{ storage.NewListChunkSeriesFromSamples(labels.Labels{{Name: "a", Value: "a"}}, - []tsdbutil.Sample{sample{2, 3, nil}, sample{3, 4, nil}, sample{5, 2, nil}, sample{6, 3, nil}}, + []tsdbutil.Sample{sample{2, 3, nil, nil}, sample{3, 4, nil, nil}, sample{5, 2, nil, nil}, sample{6, 3, nil, nil}}, ), storage.NewListChunkSeriesFromSamples(labels.Labels{{Name: "a", Value: "a"}, {Name: "b", Value: "b"}}, - []tsdbutil.Sample{sample{2, 2, nil}, sample{3, 3, nil}, sample{5, 3, nil}, sample{6, 6, nil}}, + []tsdbutil.Sample{sample{2, 2, nil, nil}, sample{3, 3, nil, nil}, sample{5, 3, nil, nil}, sample{6, 6, nil, nil}}, ), }), }, @@ -493,22 +493,22 @@ var testData = []seriesSamples{ { lset: map[string]string{"a": "a"}, chunks: [][]sample{ - {{1, 2, nil}, {2, 3, nil}, {3, 4, nil}}, - {{5, 2, nil}, {6, 3, nil}, {7, 4, nil}}, + {{1, 2, nil, nil}, {2, 3, nil, nil}, {3, 4, nil, nil}}, + {{5, 2, nil, nil}, {6, 3, nil, nil}, {7, 4, nil, nil}}, }, }, { lset: map[string]string{"a": "a", "b": "b"}, chunks: [][]sample{ - {{1, 1, nil}, {2, 2, nil}, {3, 3, nil}}, - {{5, 3, nil}, {6, 6, nil}}, + {{1, 1, nil, nil}, {2, 2, nil, nil}, {3, 3, nil, nil}}, + {{5, 3, nil, nil}, {6, 6, nil, nil}}, }, }, { lset: map[string]string{"b": "b"}, chunks: [][]sample{ - {{1, 3, nil}, {2, 2, nil}, {3, 6, nil}}, - {{5, 1, nil}, {6, 7, nil}, {7, 2, nil}}, + {{1, 3, nil, nil}, {2, 2, nil, nil}, {3, 6, nil, nil}}, + {{5, 1, nil, nil}, {6, 7, nil, nil}, {7, 2, nil, nil}}, }, }, } @@ -555,24 +555,24 @@ func TestBlockQuerierDelete(t *testing.T) { ms: []*labels.Matcher{labels.MustNewMatcher(labels.MatchRegexp, "a", ".*")}, exp: newMockSeriesSet([]storage.Series{ storage.NewListSeries(labels.Labels{{Name: "a", Value: "a"}}, - []tsdbutil.Sample{sample{5, 2, nil}, sample{6, 3, nil}, sample{7, 4, nil}}, + []tsdbutil.Sample{sample{5, 2, nil, nil}, sample{6, 3, nil, nil}, sample{7, 4, nil, nil}}, ), storage.NewListSeries(labels.Labels{{Name: "a", Value: "a"}, {Name: "b", Value: "b"}}, - []tsdbutil.Sample{sample{5, 3, nil}}, + []tsdbutil.Sample{sample{5, 3, nil, nil}}, ), storage.NewListSeries(labels.Labels{{Name: "b", Value: "b"}}, - []tsdbutil.Sample{sample{1, 3, nil}, sample{2, 2, nil}, sample{3, 6, nil}, sample{5, 1, nil}}, + []tsdbutil.Sample{sample{1, 3, nil, nil}, sample{2, 2, nil, nil}, sample{3, 6, nil, nil}, sample{5, 1, nil, nil}}, ), }), expChks: newMockChunkSeriesSet([]storage.ChunkSeries{ storage.NewListChunkSeriesFromSamples(labels.Labels{{Name: "a", Value: "a"}}, - []tsdbutil.Sample{sample{5, 2, nil}, sample{6, 3, nil}, sample{7, 4, nil}}, + []tsdbutil.Sample{sample{5, 2, nil, nil}, sample{6, 3, nil, nil}, sample{7, 4, nil, nil}}, ), storage.NewListChunkSeriesFromSamples(labels.Labels{{Name: "a", Value: "a"}, {Name: "b", Value: "b"}}, - []tsdbutil.Sample{sample{5, 3, nil}}, + []tsdbutil.Sample{sample{5, 3, nil, nil}}, ), storage.NewListChunkSeriesFromSamples(labels.Labels{{Name: "b", Value: "b"}}, - []tsdbutil.Sample{sample{1, 3, nil}, sample{2, 2, nil}, sample{3, 6, nil}}, []tsdbutil.Sample{sample{5, 1, nil}}, + []tsdbutil.Sample{sample{1, 3, nil, nil}, sample{2, 2, nil, nil}, sample{3, 6, nil, nil}}, []tsdbutil.Sample{sample{5, 1, nil, nil}}, ), }), }, @@ -582,18 +582,18 @@ func TestBlockQuerierDelete(t *testing.T) { ms: []*labels.Matcher{labels.MustNewMatcher(labels.MatchEqual, "a", "a")}, exp: newMockSeriesSet([]storage.Series{ storage.NewListSeries(labels.Labels{{Name: "a", Value: "a"}}, - []tsdbutil.Sample{sample{5, 2, nil}, sample{6, 3, nil}}, + []tsdbutil.Sample{sample{5, 2, nil, nil}, sample{6, 3, nil, nil}}, ), storage.NewListSeries(labels.Labels{{Name: "a", Value: "a"}, {Name: "b", Value: "b"}}, - []tsdbutil.Sample{sample{5, 3, nil}}, + []tsdbutil.Sample{sample{5, 3, nil, nil}}, ), }), expChks: newMockChunkSeriesSet([]storage.ChunkSeries{ storage.NewListChunkSeriesFromSamples(labels.Labels{{Name: "a", Value: "a"}}, - []tsdbutil.Sample{sample{5, 2, nil}, sample{6, 3, nil}}, + []tsdbutil.Sample{sample{5, 2, nil, nil}, sample{6, 3, nil, nil}}, ), storage.NewListChunkSeriesFromSamples(labels.Labels{{Name: "a", Value: "a"}, {Name: "b", Value: "b"}}, - []tsdbutil.Sample{sample{5, 3, nil}}, + []tsdbutil.Sample{sample{5, 3, nil, nil}}, ), }), }, @@ -676,57 +676,57 @@ func TestPopulateWithTombSeriesIterators(t *testing.T) { { name: "one chunk", chks: [][]tsdbutil.Sample{ - {sample{1, 2, nil}, sample{2, 3, nil}, sample{3, 5, nil}, sample{6, 1, nil}}, + {sample{1, 2, nil, nil}, sample{2, 3, nil, nil}, sample{3, 5, nil, nil}, sample{6, 1, nil, nil}}, }, expected: []tsdbutil.Sample{ - sample{1, 2, nil}, sample{2, 3, nil}, sample{3, 5, nil}, sample{6, 1, nil}, + sample{1, 2, nil, nil}, sample{2, 3, nil, nil}, sample{3, 5, nil, nil}, sample{6, 1, nil, nil}, }, expectedChks: []chunks.Meta{ tsdbutil.ChunkFromSamples([]tsdbutil.Sample{ - sample{1, 2, nil}, sample{2, 3, nil}, sample{3, 5, nil}, sample{6, 1, nil}, + sample{1, 2, nil, nil}, sample{2, 3, nil, nil}, sample{3, 5, nil, nil}, sample{6, 1, nil, nil}, }), }, }, { name: "two full chunks", chks: [][]tsdbutil.Sample{ - {sample{1, 2, nil}, sample{2, 3, nil}, sample{3, 5, nil}, sample{6, 1, nil}}, - {sample{7, 89, nil}, sample{9, 8, nil}}, + {sample{1, 2, nil, nil}, sample{2, 3, nil, nil}, sample{3, 5, nil, nil}, sample{6, 1, nil, nil}}, + {sample{7, 89, nil, nil}, sample{9, 8, nil, nil}}, }, expected: []tsdbutil.Sample{ - sample{1, 2, nil}, sample{2, 3, nil}, sample{3, 5, nil}, sample{6, 1, nil}, sample{7, 89, nil}, sample{9, 8, nil}, + sample{1, 2, nil, nil}, sample{2, 3, nil, nil}, sample{3, 5, nil, nil}, sample{6, 1, nil, nil}, sample{7, 89, nil, nil}, sample{9, 8, nil, nil}, }, expectedChks: []chunks.Meta{ tsdbutil.ChunkFromSamples([]tsdbutil.Sample{ - sample{1, 2, nil}, sample{2, 3, nil}, sample{3, 5, nil}, sample{6, 1, nil}, + sample{1, 2, nil, nil}, sample{2, 3, nil, nil}, sample{3, 5, nil, nil}, sample{6, 1, nil, nil}, }), tsdbutil.ChunkFromSamples([]tsdbutil.Sample{ - sample{7, 89, nil}, sample{9, 8, nil}, + sample{7, 89, nil, nil}, sample{9, 8, nil, nil}, }), }, }, { name: "three full chunks", chks: [][]tsdbutil.Sample{ - {sample{1, 2, nil}, sample{2, 3, nil}, sample{3, 5, nil}, sample{6, 1, nil}}, - {sample{7, 89, nil}, sample{9, 8, nil}}, - {sample{10, 22, nil}, sample{203, 3493, nil}}, + {sample{1, 2, nil, nil}, sample{2, 3, nil, nil}, sample{3, 5, nil, nil}, sample{6, 1, nil, nil}}, + {sample{7, 89, nil, nil}, sample{9, 8, nil, nil}}, + {sample{10, 22, nil, nil}, sample{203, 3493, nil, nil}}, }, expected: []tsdbutil.Sample{ - sample{1, 2, nil}, sample{2, 3, nil}, sample{3, 5, nil}, sample{6, 1, nil}, sample{7, 89, nil}, sample{9, 8, nil}, sample{10, 22, nil}, sample{203, 3493, nil}, + sample{1, 2, nil, nil}, sample{2, 3, nil, nil}, sample{3, 5, nil, nil}, sample{6, 1, nil, nil}, sample{7, 89, nil, nil}, sample{9, 8, nil, nil}, sample{10, 22, nil, nil}, sample{203, 3493, nil, nil}, }, expectedChks: []chunks.Meta{ tsdbutil.ChunkFromSamples([]tsdbutil.Sample{ - sample{1, 2, nil}, sample{2, 3, nil}, sample{3, 5, nil}, sample{6, 1, nil}, + sample{1, 2, nil, nil}, sample{2, 3, nil, nil}, sample{3, 5, nil, nil}, sample{6, 1, nil, nil}, }), tsdbutil.ChunkFromSamples([]tsdbutil.Sample{ - sample{7, 89, nil}, sample{9, 8, nil}, + sample{7, 89, nil, nil}, sample{9, 8, nil, nil}, }), tsdbutil.ChunkFromSamples([]tsdbutil.Sample{ - sample{10, 22, nil}, sample{203, 3493, nil}, + sample{10, 22, nil, nil}, sample{203, 3493, nil, nil}, }), }, }, @@ -741,8 +741,8 @@ func TestPopulateWithTombSeriesIterators(t *testing.T) { { name: "two chunks and seek beyond chunks", chks: [][]tsdbutil.Sample{ - {sample{1, 2, nil}, sample{3, 5, nil}, sample{6, 1, nil}}, - {sample{7, 89, nil}, sample{9, 8, nil}}, + {sample{1, 2, nil, nil}, sample{3, 5, nil, nil}, sample{6, 1, nil, nil}}, + {sample{7, 89, nil, nil}, sample{9, 8, nil, nil}}, }, seek: 10, @@ -751,27 +751,27 @@ func TestPopulateWithTombSeriesIterators(t *testing.T) { { name: "two chunks and seek on middle of first chunk", chks: [][]tsdbutil.Sample{ - {sample{1, 2, nil}, sample{3, 5, nil}, sample{6, 1, nil}}, - {sample{7, 89, nil}, sample{9, 8, nil}}, + {sample{1, 2, nil, nil}, sample{3, 5, nil, nil}, sample{6, 1, nil, nil}}, + {sample{7, 89, nil, nil}, sample{9, 8, nil, nil}}, }, seek: 2, seekSuccess: true, expected: []tsdbutil.Sample{ - sample{3, 5, nil}, sample{6, 1, nil}, sample{7, 89, nil}, sample{9, 8, nil}, + sample{3, 5, nil, nil}, sample{6, 1, nil, nil}, sample{7, 89, nil, nil}, sample{9, 8, nil, nil}, }, }, { name: "two chunks and seek before first chunk", chks: [][]tsdbutil.Sample{ - {sample{1, 2, nil}, sample{3, 5, nil}, sample{6, 1, nil}}, - {sample{7, 89, nil}, sample{9, 8, nil}}, + {sample{1, 2, nil, nil}, sample{3, 5, nil, nil}, sample{6, 1, nil, nil}}, + {sample{7, 89, nil, nil}, sample{9, 8, nil, nil}}, }, seek: -32, seekSuccess: true, expected: []tsdbutil.Sample{ - sample{1, 2, nil}, sample{3, 5, nil}, sample{6, 1, nil}, sample{7, 89, nil}, sample{9, 8, nil}, + sample{1, 2, nil, nil}, sample{3, 5, nil, nil}, sample{6, 1, nil, nil}, sample{7, 89, nil, nil}, sample{9, 8, nil, nil}, }, }, // Deletion / Trim cases. @@ -783,60 +783,60 @@ func TestPopulateWithTombSeriesIterators(t *testing.T) { { name: "two chunks with trimmed first and last samples from edge chunks", chks: [][]tsdbutil.Sample{ - {sample{1, 2, nil}, sample{2, 3, nil}, sample{3, 5, nil}, sample{6, 1, nil}}, - {sample{7, 89, nil}, sample{9, 8, nil}}, + {sample{1, 2, nil, nil}, sample{2, 3, nil, nil}, sample{3, 5, nil, nil}, sample{6, 1, nil, nil}}, + {sample{7, 89, nil, nil}, sample{9, 8, nil, nil}}, }, intervals: tombstones.Intervals{{Mint: math.MinInt64, Maxt: 2}}.Add(tombstones.Interval{Mint: 9, Maxt: math.MaxInt64}), expected: []tsdbutil.Sample{ - sample{3, 5, nil}, sample{6, 1, nil}, sample{7, 89, nil}, + sample{3, 5, nil, nil}, sample{6, 1, nil, nil}, sample{7, 89, nil, nil}, }, expectedChks: []chunks.Meta{ tsdbutil.ChunkFromSamples([]tsdbutil.Sample{ - sample{3, 5, nil}, sample{6, 1, nil}, + sample{3, 5, nil, nil}, sample{6, 1, nil, nil}, }), tsdbutil.ChunkFromSamples([]tsdbutil.Sample{ - sample{7, 89, nil}, + sample{7, 89, nil, nil}, }), }, }, { name: "two chunks with trimmed middle sample of first chunk", chks: [][]tsdbutil.Sample{ - {sample{1, 2, nil}, sample{2, 3, nil}, sample{3, 5, nil}, sample{6, 1, nil}}, - {sample{7, 89, nil}, sample{9, 8, nil}}, + {sample{1, 2, nil, nil}, sample{2, 3, nil, nil}, sample{3, 5, nil, nil}, sample{6, 1, nil, nil}}, + {sample{7, 89, nil, nil}, sample{9, 8, nil, nil}}, }, intervals: tombstones.Intervals{{Mint: 2, Maxt: 3}}, expected: []tsdbutil.Sample{ - sample{1, 2, nil}, sample{6, 1, nil}, sample{7, 89, nil}, sample{9, 8, nil}, + sample{1, 2, nil, nil}, sample{6, 1, nil, nil}, sample{7, 89, nil, nil}, sample{9, 8, nil, nil}, }, expectedChks: []chunks.Meta{ tsdbutil.ChunkFromSamples([]tsdbutil.Sample{ - sample{1, 2, nil}, sample{6, 1, nil}, + sample{1, 2, nil, nil}, sample{6, 1, nil, nil}, }), tsdbutil.ChunkFromSamples([]tsdbutil.Sample{ - sample{7, 89, nil}, sample{9, 8, nil}, + sample{7, 89, nil, nil}, sample{9, 8, nil, nil}, }), }, }, { name: "two chunks with deletion across two chunks", chks: [][]tsdbutil.Sample{ - {sample{1, 2, nil}, sample{2, 3, nil}, sample{3, 5, nil}, sample{6, 1, nil}}, - {sample{7, 89, nil}, sample{9, 8, nil}}, + {sample{1, 2, nil, nil}, sample{2, 3, nil, nil}, sample{3, 5, nil, nil}, sample{6, 1, nil, nil}}, + {sample{7, 89, nil, nil}, sample{9, 8, nil, nil}}, }, intervals: tombstones.Intervals{{Mint: 6, Maxt: 7}}, expected: []tsdbutil.Sample{ - sample{1, 2, nil}, sample{2, 3, nil}, sample{3, 5, nil}, sample{9, 8, nil}, + sample{1, 2, nil, nil}, sample{2, 3, nil, nil}, sample{3, 5, nil, nil}, sample{9, 8, nil, nil}, }, expectedChks: []chunks.Meta{ tsdbutil.ChunkFromSamples([]tsdbutil.Sample{ - sample{1, 2, nil}, sample{2, 3, nil}, sample{3, 5, nil}, + sample{1, 2, nil, nil}, sample{2, 3, nil, nil}, sample{3, 5, nil, nil}, }), tsdbutil.ChunkFromSamples([]tsdbutil.Sample{ - sample{9, 8, nil}, + sample{9, 8, nil, nil}, }), }, }, @@ -844,15 +844,15 @@ func TestPopulateWithTombSeriesIterators(t *testing.T) { { name: "two chunks with trimmed first and last samples from edge chunks, seek from middle of first chunk", chks: [][]tsdbutil.Sample{ - {sample{1, 2, nil}, sample{2, 3, nil}, sample{3, 5, nil}, sample{6, 1, nil}}, - {sample{7, 89, nil}, sample{9, 8, nil}}, + {sample{1, 2, nil, nil}, sample{2, 3, nil, nil}, sample{3, 5, nil, nil}, sample{6, 1, nil, nil}}, + {sample{7, 89, nil, nil}, sample{9, 8, nil, nil}}, }, intervals: tombstones.Intervals{{Mint: math.MinInt64, Maxt: 2}}.Add(tombstones.Interval{Mint: 9, Maxt: math.MaxInt64}), seek: 3, seekSuccess: true, expected: []tsdbutil.Sample{ - sample{3, 5, nil}, sample{6, 1, nil}, sample{7, 89, nil}, + sample{3, 5, nil, nil}, sample{6, 1, nil, nil}, sample{7, 89, nil, nil}, }, }, } @@ -864,8 +864,8 @@ func TestPopulateWithTombSeriesIterators(t *testing.T) { var r []tsdbutil.Sample if tc.seek != 0 { - require.Equal(t, tc.seekSuccess, it.Seek(tc.seek)) - require.Equal(t, tc.seekSuccess, it.Seek(tc.seek)) // Next one should be noop. + require.Equal(t, tc.seekSuccess, it.Seek(tc.seek) == chunkenc.ValFloat) + require.Equal(t, tc.seekSuccess, it.Seek(tc.seek) == chunkenc.ValFloat) // Next one should be noop. if tc.seekSuccess { // After successful seek iterator is ready. Grab the value. @@ -908,14 +908,14 @@ func rmChunkRefs(chks []chunks.Meta) { func TestPopulateWithDelSeriesIterator_DoubleSeek(t *testing.T) { f, chkMetas := createFakeReaderAndNotPopulatedChunks( []tsdbutil.Sample{}, - []tsdbutil.Sample{sample{1, 1, nil}, sample{2, 2, nil}, sample{3, 3, nil}}, - []tsdbutil.Sample{sample{4, 4, nil}, sample{5, 5, nil}}, + []tsdbutil.Sample{sample{1, 1, nil, nil}, sample{2, 2, nil, nil}, sample{3, 3, nil, nil}}, + []tsdbutil.Sample{sample{4, 4, nil, nil}, sample{5, 5, nil, nil}}, ) it := newPopulateWithDelGenericSeriesIterator(f, chkMetas, nil).toSeriesIterator() - require.True(t, it.Seek(1)) - require.True(t, it.Seek(2)) - require.True(t, it.Seek(2)) + require.Equal(t, chunkenc.ValFloat, it.Seek(1)) + require.Equal(t, chunkenc.ValFloat, it.Seek(2)) + require.Equal(t, chunkenc.ValFloat, it.Seek(2)) ts, v := it.At() require.Equal(t, int64(2), ts) require.Equal(t, float64(2), v) @@ -926,17 +926,17 @@ func TestPopulateWithDelSeriesIterator_DoubleSeek(t *testing.T) { func TestPopulateWithDelSeriesIterator_SeekInCurrentChunk(t *testing.T) { f, chkMetas := createFakeReaderAndNotPopulatedChunks( []tsdbutil.Sample{}, - []tsdbutil.Sample{sample{1, 2, nil}, sample{3, 4, nil}, sample{5, 6, nil}, sample{7, 8, nil}}, + []tsdbutil.Sample{sample{1, 2, nil, nil}, sample{3, 4, nil, nil}, sample{5, 6, nil, nil}, sample{7, 8, nil, nil}}, []tsdbutil.Sample{}, ) it := newPopulateWithDelGenericSeriesIterator(f, chkMetas, nil).toSeriesIterator() - require.True(t, it.Next()) + require.Equal(t, chunkenc.ValFloat, it.Next()) ts, v := it.At() require.Equal(t, int64(1), ts) require.Equal(t, float64(2), v) - require.True(t, it.Seek(4)) + require.Equal(t, chunkenc.ValFloat, it.Seek(4)) ts, v = it.At() require.Equal(t, int64(5), ts) require.Equal(t, float64(6), v) @@ -944,25 +944,25 @@ func TestPopulateWithDelSeriesIterator_SeekInCurrentChunk(t *testing.T) { func TestPopulateWithDelSeriesIterator_SeekWithMinTime(t *testing.T) { f, chkMetas := createFakeReaderAndNotPopulatedChunks( - []tsdbutil.Sample{sample{1, 6, nil}, sample{5, 6, nil}, sample{6, 8, nil}}, + []tsdbutil.Sample{sample{1, 6, nil, nil}, sample{5, 6, nil, nil}, sample{6, 8, nil, nil}}, ) it := newPopulateWithDelGenericSeriesIterator(f, chkMetas, nil).toSeriesIterator() - require.Equal(t, false, it.Seek(7)) - require.Equal(t, true, it.Seek(3)) + require.Equal(t, chunkenc.ValNone, it.Seek(7)) + require.Equal(t, chunkenc.ValFloat, it.Seek(3)) } // Regression when calling Next() with a time bounded to fit within two samples. // Seek gets called and advances beyond the max time, which was just accepted as a valid sample. func TestPopulateWithDelSeriesIterator_NextWithMinTime(t *testing.T) { f, chkMetas := createFakeReaderAndNotPopulatedChunks( - []tsdbutil.Sample{sample{1, 6, nil}, sample{5, 6, nil}, sample{7, 8, nil}}, + []tsdbutil.Sample{sample{1, 6, nil, nil}, sample{5, 6, nil, nil}, sample{7, 8, nil, nil}}, ) it := newPopulateWithDelGenericSeriesIterator( f, chkMetas, tombstones.Intervals{{Mint: math.MinInt64, Maxt: 2}}.Add(tombstones.Interval{Mint: 4, Maxt: math.MaxInt64}), ).toSeriesIterator() - require.Equal(t, false, it.Next()) + require.Equal(t, chunkenc.ValNone, it.Next()) } // Test the cost of merging series sets for different number of merged sets and their size. @@ -1062,7 +1062,7 @@ func TestDeletedIterator(t *testing.T) { i := int64(-1) it := &DeletedIterator{Iter: chk.Iterator(nil), Intervals: c.r[:]} ranges := c.r[:] - for it.Next() { + for it.Next() == chunkenc.ValFloat { i++ for _, tr := range ranges { if tr.InBounds(i) { @@ -1124,9 +1124,9 @@ func TestDeletedIterator_WithSeek(t *testing.T) { for _, c := range cases { it := &DeletedIterator{Iter: chk.Iterator(nil), Intervals: c.r[:]} - require.Equal(t, c.ok, it.Seek(c.seek)) + require.Equal(t, c.ok, it.Seek(c.seek) == chunkenc.ValFloat) if c.ok { - ts, _ := it.At() + ts := it.AtT() require.Equal(t, c.seekedTs, ts) } } @@ -2057,7 +2057,7 @@ func benchQuery(b *testing.B, expExpansions int, q storage.Querier, selectors la s := ss.At() s.Labels() it := s.Iterator() - for it.Next() { + for it.Next() != chunkenc.ValNone { } actualExpansions++ } diff --git a/tsdb/record/record.go b/tsdb/record/record.go index 40c9ab2588..88eeb324e1 100644 --- a/tsdb/record/record.go +++ b/tsdb/record/record.go @@ -57,6 +57,7 @@ type RefSeries struct { } // RefSample is a timestamp/value pair associated with a reference to a series. +// TODO(beorn7): Perhaps make this "polymorphic", including histogram and float-histogram pointers? Then get rid of RefHistogram. type RefSample struct { Ref chunks.HeadSeriesRef T int64 diff --git a/tsdb/tsdbblockutil.go b/tsdb/tsdbblockutil.go index 7324463247..d55fa0c013 100644 --- a/tsdb/tsdbblockutil.go +++ b/tsdb/tsdbblockutil.go @@ -21,6 +21,7 @@ import ( "github.com/go-kit/log" "github.com/prometheus/prometheus/storage" + "github.com/prometheus/prometheus/tsdb/chunkenc" ) var ErrInvalidTimes = fmt.Errorf("max time is lesser than min time") @@ -51,7 +52,8 @@ func CreateBlock(series []storage.Series, dir string, chunkRange int64, logger l ref := storage.SeriesRef(0) it := s.Iterator() lset := s.Labels() - for it.Next() { + for it.Next() == chunkenc.ValFloat { + // TODO(beorn7): Add histogram support. t, v := it.At() ref, err = app.Append(ref, lset, t, v) if err != nil { diff --git a/tsdb/tsdbutil/buffer.go b/tsdb/tsdbutil/buffer.go index 9433be77a3..5139ca0333 100644 --- a/tsdb/tsdbutil/buffer.go +++ b/tsdb/tsdbutil/buffer.go @@ -14,13 +14,20 @@ package tsdbutil import ( + "fmt" "math" + "github.com/pkg/errors" + "github.com/prometheus/prometheus/model/histogram" "github.com/prometheus/prometheus/tsdb/chunkenc" ) // BufferedSeriesIterator wraps an iterator with a look-back buffer. +// +// TODO(beorn7): BufferedSeriesIterator does not support Histograms or +// FloatHistograms. Either add support or remove BufferedSeriesIterator +// altogether (it seems unused). type BufferedSeriesIterator struct { it chunkenc.Iterator buf *sampleRing @@ -50,7 +57,7 @@ func (b *BufferedSeriesIterator) Buffer() chunkenc.Iterator { } // Seek advances the iterator to the element at time t or greater. -func (b *BufferedSeriesIterator) Seek(t int64) bool { +func (b *BufferedSeriesIterator) Seek(t int64) chunkenc.ValueType { t0 := t - b.buf.delta // If the delta would cause us to seek backwards, preserve the buffer @@ -58,35 +65,43 @@ func (b *BufferedSeriesIterator) Seek(t int64) bool { if t0 > b.lastTime { b.buf.reset() - ok := b.it.Seek(t0) - if !ok { - return false + if b.it.Seek(t0) == chunkenc.ValNone { + return chunkenc.ValNone } - b.lastTime, _ = b.At() + b.lastTime = b.AtT() } if b.lastTime >= t { - return true + return chunkenc.ValFloat } - for b.Next() { + for { + valueType := b.Next() + switch valueType { + case chunkenc.ValNone: + return chunkenc.ValNone + case chunkenc.ValFloat: + if b.lastTime >= t { + return valueType + } + default: + panic(fmt.Errorf("BufferedSeriesIterator: unsupported value type %v", valueType)) + } if b.lastTime >= t { - return true + return valueType } } - - return false } // Next advances the iterator to the next element. -func (b *BufferedSeriesIterator) Next() bool { +func (b *BufferedSeriesIterator) Next() chunkenc.ValueType { // Add current element to buffer before advancing. b.buf.add(b.it.At()) - ok := b.it.Next() - if ok { - b.lastTime, _ = b.At() + valueType := b.it.Next() + if valueType != chunkenc.ValNone { + b.lastTime = b.AtT() } - return ok + return valueType } // At returns the current element of the iterator. @@ -94,15 +109,31 @@ func (b *BufferedSeriesIterator) At() (int64, float64) { return b.it.At() } +// AtHistogram is unsupported. +func (b *BufferedSeriesIterator) AtHistogram() (int64, *histogram.Histogram) { + panic(errors.New("BufferedSeriesIterator: AtHistogram not implemented")) +} + +// AtFloatHistogram is unsupported. +func (b *BufferedSeriesIterator) AtFloatHistogram() (int64, *histogram.FloatHistogram) { + panic(errors.New("BufferedSeriesIterator: AtFloatHistogram not implemented")) +} + +// At returns the timestamp of the current element of the iterator. +func (b *BufferedSeriesIterator) AtT() int64 { + return b.it.AtT() +} + // Err returns the last encountered error. func (b *BufferedSeriesIterator) Err() error { return b.it.Err() } type sample struct { - t int64 - v float64 - h *histogram.Histogram + t int64 + v float64 + h *histogram.Histogram + fh *histogram.FloatHistogram } func (s sample) T() int64 { @@ -117,6 +148,21 @@ func (s sample) H() *histogram.Histogram { return s.h } +func (s sample) FH() *histogram.FloatHistogram { + return s.fh +} + +func (s sample) Type() chunkenc.ValueType { + switch { + case s.h != nil: + return chunkenc.ValHistogram + case s.fh != nil: + return chunkenc.ValFloatHistogram + default: + return chunkenc.ValFloat + } +} + type sampleRing struct { delta int64 @@ -148,13 +194,16 @@ type sampleRingIterator struct { i int } -func (it *sampleRingIterator) Next() bool { +func (it *sampleRingIterator) Next() chunkenc.ValueType { it.i++ - return it.i < it.r.l + if it.i < it.r.l { + return chunkenc.ValFloat + } + return chunkenc.ValNone } -func (it *sampleRingIterator) Seek(int64) bool { - return false +func (it *sampleRingIterator) Seek(int64) chunkenc.ValueType { + return chunkenc.ValNone } func (it *sampleRingIterator) Err() error { @@ -166,12 +215,16 @@ func (it *sampleRingIterator) At() (int64, float64) { } func (it *sampleRingIterator) AtHistogram() (int64, *histogram.Histogram) { - // TODO(beorn7): Add proper histogram support. - return 0, nil + panic(errors.New("sampleRingIterator: AtHistogram not implemented")) } -func (it *sampleRingIterator) ChunkEncoding() chunkenc.Encoding { - return chunkenc.EncXOR +func (it *sampleRingIterator) AtFloatHistogram() (int64, *histogram.FloatHistogram) { + panic(errors.New("sampleRingIterator: AtFloatHistogram not implemented")) +} + +func (it *sampleRingIterator) AtT() int64 { + t, _ := it.r.at(it.i) + return t } func (r *sampleRing) at(i int) (int64, float64) { diff --git a/tsdb/tsdbutil/buffer_test.go b/tsdb/tsdbutil/buffer_test.go index 6423871cae..d88b8b6f7b 100644 --- a/tsdb/tsdbutil/buffer_test.go +++ b/tsdb/tsdbutil/buffer_test.go @@ -91,7 +91,7 @@ func TestBufferedSeriesIterator(t *testing.T) { bufferEq := func(exp []sample) { var b []sample bit := it.Buffer() - for bit.Next() { + for bit.Next() == chunkenc.ValFloat { t, v := bit.At() b = append(b, sample{t: t, v: v}) } @@ -114,29 +114,29 @@ func TestBufferedSeriesIterator(t *testing.T) { {t: 101, v: 10}, }), 2) - require.True(t, it.Seek(-123), "seek failed") + require.Equal(t, chunkenc.ValFloat, it.Seek(-123), "seek failed") sampleEq(1, 2) bufferEq(nil) - require.True(t, it.Next(), "next failed") + require.Equal(t, chunkenc.ValFloat, it.Next(), "next failed") sampleEq(2, 3) bufferEq([]sample{{t: 1, v: 2}}) - require.True(t, it.Next(), "next failed") - require.True(t, it.Next(), "next failed") - require.True(t, it.Next(), "next failed") + require.Equal(t, chunkenc.ValFloat, it.Next(), "next failed") + require.Equal(t, chunkenc.ValFloat, it.Next(), "next failed") + require.Equal(t, chunkenc.ValFloat, it.Next(), "next failed") sampleEq(5, 6) bufferEq([]sample{{t: 2, v: 3}, {t: 3, v: 4}, {t: 4, v: 5}}) - require.True(t, it.Seek(5), "seek failed") + require.Equal(t, chunkenc.ValFloat, it.Seek(5), "seek failed") sampleEq(5, 6) bufferEq([]sample{{t: 2, v: 3}, {t: 3, v: 4}, {t: 4, v: 5}}) - require.True(t, it.Seek(101), "seek failed") + require.Equal(t, chunkenc.ValFloat, it.Seek(101), "seek failed") sampleEq(101, 10) bufferEq([]sample{{t: 99, v: 8}, {t: 100, v: 9}}) - require.False(t, it.Next(), "next succeeded unexpectedly") + require.Equal(t, chunkenc.ValNone, it.Next(), "next succeeded unexpectedly") } type listSeriesIterator struct { @@ -158,26 +158,42 @@ func (it *listSeriesIterator) AtHistogram() (int64, *histogram.Histogram) { return s.t, s.h } -func (it *listSeriesIterator) ChunkEncoding() chunkenc.Encoding { - return chunkenc.EncXOR +func (it *listSeriesIterator) AtFloatHistogram() (int64, *histogram.FloatHistogram) { + s := it.list[it.idx] + return s.t, s.fh } -func (it *listSeriesIterator) Next() bool { +func (it *listSeriesIterator) AtT() int64 { + s := it.list[it.idx] + return s.t +} + +func (it *listSeriesIterator) Next() chunkenc.ValueType { it.idx++ - return it.idx < len(it.list) + if it.idx >= len(it.list) { + return chunkenc.ValNone + } + return it.list[it.idx].Type() } -func (it *listSeriesIterator) Seek(t int64) bool { +func (it *listSeriesIterator) Seek(t int64) chunkenc.ValueType { if it.idx == -1 { it.idx = 0 } + // No-op check. + if s := it.list[it.idx]; s.t >= t { + return s.Type() + } // Do binary search between current position and end. - it.idx = sort.Search(len(it.list)-it.idx, func(i int) bool { + it.idx += sort.Search(len(it.list)-it.idx, func(i int) bool { s := it.list[i+it.idx] return s.t >= t }) - return it.idx < len(it.list) + if it.idx >= len(it.list) { + return chunkenc.ValNone + } + return it.list[it.idx].Type() } func (it *listSeriesIterator) Err() error { diff --git a/tsdb/tsdbutil/chunks.go b/tsdb/tsdbutil/chunks.go index 93af3acfd2..af0f80772b 100644 --- a/tsdb/tsdbutil/chunks.go +++ b/tsdb/tsdbutil/chunks.go @@ -28,6 +28,8 @@ type Sample interface { T() int64 V() float64 H() *histogram.Histogram + FH() *histogram.FloatHistogram + Type() chunkenc.ValueType } type SampleSlice []Sample diff --git a/web/federate.go b/web/federate.go index 393a96bb66..137f7f08c8 100644 --- a/web/federate.go +++ b/web/federate.go @@ -33,6 +33,7 @@ import ( "github.com/prometheus/prometheus/promql/parser" "github.com/prometheus/prometheus/storage" "github.com/prometheus/prometheus/tsdb" + "github.com/prometheus/prometheus/tsdb/chunkenc" ) var ( @@ -110,9 +111,10 @@ func (h *Handler) federation(w http.ResponseWriter, req *http.Request) { var t int64 var v float64 + var ok bool - ok := it.Seek(maxt) - if ok { + valueType := it.Seek(maxt) + if valueType == chunkenc.ValFloat { t, v = it.Values() } else { // TODO(beorn7): Handle histograms.