From c9d396f47679eaeeb4a8aa22e7c12667402f7b76 Mon Sep 17 00:00:00 2001 From: Fabian Reinartz Date: Sat, 22 Aug 2015 13:32:13 +0200 Subject: [PATCH 1/2] Replace metric.LabelPair with model.LabelPair --- storage/local/codable/codable.go | 6 +-- storage/local/index/index.go | 7 ++- storage/local/persistence.go | 5 +- storage/local/persistence_test.go | 38 +++++++------- storage/local/storage.go | 8 +-- storage/local/storage_test.go | 20 ++++---- storage/metric/labelpair.go | 64 ------------------------ storage/metric/labelpair_test.go | 83 ------------------------------- 8 files changed, 40 insertions(+), 191 deletions(-) delete mode 100644 storage/metric/labelpair.go delete mode 100644 storage/metric/labelpair_test.go diff --git a/storage/local/codable/codable.go b/storage/local/codable/codable.go index f04d9ca544..37c273d1fd 100644 --- a/storage/local/codable/codable.go +++ b/storage/local/codable/codable.go @@ -38,8 +38,6 @@ import ( "sync" "github.com/prometheus/common/model" - - "github.com/prometheus/prometheus/storage/metric" ) // A byteReader is an io.ByteReader that also implements the vanilla io.Reader @@ -282,9 +280,9 @@ func (fps *Fingerprints) UnmarshalBinary(buf []byte) error { return nil } -// LabelPair is a metric.LabelPair that implements +// LabelPair is a model.LabelPair that implements // encoding.BinaryMarshaler and encoding.BinaryUnmarshaler. -type LabelPair metric.LabelPair +type LabelPair model.LabelPair // MarshalBinary implements encoding.BinaryMarshaler. func (lp LabelPair) MarshalBinary() ([]byte, error) { diff --git a/storage/local/index/index.go b/storage/local/index/index.go index b6b1e87c04..8f1985f041 100644 --- a/storage/local/index/index.go +++ b/storage/local/index/index.go @@ -23,7 +23,6 @@ import ( "github.com/prometheus/common/model" "github.com/prometheus/prometheus/storage/local/codable" - "github.com/prometheus/prometheus/storage/metric" ) const ( @@ -183,7 +182,7 @@ func DeleteLabelNameLabelValuesIndex(basePath string) error { // LabelPairFingerprintsMapping is an in-memory map of label pairs to // fingerprints. -type LabelPairFingerprintsMapping map[metric.LabelPair]codable.FingerprintSet +type LabelPairFingerprintsMapping map[model.LabelPair]codable.FingerprintSet // LabelPairFingerprintIndex is a KeyValueStore that maps existing label pairs // to the fingerprints of all metrics containing those label pairs. @@ -216,7 +215,7 @@ func (i *LabelPairFingerprintIndex) IndexBatch(m LabelPairFingerprintsMapping) e // returned. // // This method is goroutine-safe. -func (i *LabelPairFingerprintIndex) Lookup(p metric.LabelPair) (fps model.Fingerprints, ok bool, err error) { +func (i *LabelPairFingerprintIndex) Lookup(p model.LabelPair) (fps model.Fingerprints, ok bool, err error) { ok, err = i.Get((codable.LabelPair)(p), (*codable.Fingerprints)(&fps)) return } @@ -226,7 +225,7 @@ func (i *LabelPairFingerprintIndex) Lookup(p metric.LabelPair) (fps model.Finger // returned. // // This method is goroutine-safe. -func (i *LabelPairFingerprintIndex) LookupSet(p metric.LabelPair) (fps map[model.Fingerprint]struct{}, ok bool, err error) { +func (i *LabelPairFingerprintIndex) LookupSet(p model.LabelPair) (fps map[model.Fingerprint]struct{}, ok bool, err error) { ok, err = i.Get((codable.LabelPair)(p), (*codable.FingerprintSet)(&fps)) if fps == nil { fps = map[model.Fingerprint]struct{}{} diff --git a/storage/local/persistence.go b/storage/local/persistence.go index 9e191c0d84..8b3358e62b 100644 --- a/storage/local/persistence.go +++ b/storage/local/persistence.go @@ -34,7 +34,6 @@ import ( "github.com/prometheus/prometheus/storage/local/codable" "github.com/prometheus/prometheus/storage/local/index" - "github.com/prometheus/prometheus/storage/metric" "github.com/prometheus/prometheus/util/flock" ) @@ -334,7 +333,7 @@ func (p *persistence) setDirty(dirty bool) { // pair. This method is goroutine-safe but take into account that metrics queued // for indexing with IndexMetric might not have made it into the index // yet. (Same applies correspondingly to UnindexMetric.) -func (p *persistence) fingerprintsForLabelPair(lp metric.LabelPair) (model.Fingerprints, error) { +func (p *persistence) fingerprintsForLabelPair(lp model.LabelPair) (model.Fingerprints, error) { fps, _, err := p.labelPairToFingerprints.Lookup(lp) if err != nil { return nil, err @@ -1353,7 +1352,7 @@ loop: batchSize++ for ln, lv := range op.metric { - lp := metric.LabelPair{Name: ln, Value: lv} + lp := model.LabelPair{Name: ln, Value: lv} baseFPs, ok := pairToFPs[lp] if !ok { var err error diff --git a/storage/local/persistence_test.go b/storage/local/persistence_test.go index 0ef90dbb38..011c17d281 100644 --- a/storage/local/persistence_test.go +++ b/storage/local/persistence_test.go @@ -603,7 +603,7 @@ func testDropArchivedMetric(t *testing.T, encoding chunkEncoding) { p.indexMetric(2, m2) p.waitForIndexing() - outFPs, err := p.fingerprintsForLabelPair(metric.LabelPair{Name: "n1", Value: "v1"}) + outFPs, err := p.fingerprintsForLabelPair(model.LabelPair{Name: "n1", Value: "v1"}) if err != nil { t.Fatal(err) } @@ -611,7 +611,7 @@ func testDropArchivedMetric(t *testing.T, encoding chunkEncoding) { if !reflect.DeepEqual(outFPs, want) { t.Errorf("want %#v, got %#v", want, outFPs) } - outFPs, err = p.fingerprintsForLabelPair(metric.LabelPair{Name: "n2", Value: "v2"}) + outFPs, err = p.fingerprintsForLabelPair(model.LabelPair{Name: "n2", Value: "v2"}) if err != nil { t.Fatal(err) } @@ -635,7 +635,7 @@ func testDropArchivedMetric(t *testing.T, encoding chunkEncoding) { } p.waitForIndexing() - outFPs, err = p.fingerprintsForLabelPair(metric.LabelPair{Name: "n1", Value: "v1"}) + outFPs, err = p.fingerprintsForLabelPair(model.LabelPair{Name: "n1", Value: "v1"}) if err != nil { t.Fatal(err) } @@ -643,7 +643,7 @@ func testDropArchivedMetric(t *testing.T, encoding chunkEncoding) { if !reflect.DeepEqual(outFPs, want) { t.Errorf("want %#v, got %#v", want, outFPs) } - outFPs, err = p.fingerprintsForLabelPair(metric.LabelPair{Name: "n2", Value: "v2"}) + outFPs, err = p.fingerprintsForLabelPair(model.LabelPair{Name: "n2", Value: "v2"}) if err != nil { t.Fatal(err) } @@ -708,27 +708,27 @@ func testIndexing(t *testing.T, encoding chunkEncoding) { }, }, expectedLpToFps: index.LabelPairFingerprintsMapping{ - metric.LabelPair{ + model.LabelPair{ Name: model.MetricNameLabel, Value: "metric_0", }: codable.FingerprintSet{0: struct{}{}, 1: struct{}{}}, - metric.LabelPair{ + model.LabelPair{ Name: model.MetricNameLabel, Value: "metric_1", }: codable.FingerprintSet{2: struct{}{}}, - metric.LabelPair{ + model.LabelPair{ Name: "label_1", Value: "value_1", }: codable.FingerprintSet{0: struct{}{}}, - metric.LabelPair{ + model.LabelPair{ Name: "label_1", Value: "value_2", }: codable.FingerprintSet{2: struct{}{}}, - metric.LabelPair{ + model.LabelPair{ Name: "label_2", Value: "value_2", }: codable.FingerprintSet{1: struct{}{}}, - metric.LabelPair{ + model.LabelPair{ Name: "label_3", Value: "value_3", }: codable.FingerprintSet{1: struct{}{}}, @@ -769,39 +769,39 @@ func testIndexing(t *testing.T, encoding chunkEncoding) { }, }, expectedLpToFps: index.LabelPairFingerprintsMapping{ - metric.LabelPair{ + model.LabelPair{ Name: model.MetricNameLabel, Value: "metric_0", }: codable.FingerprintSet{0: struct{}{}, 1: struct{}{}, 3: struct{}{}}, - metric.LabelPair{ + model.LabelPair{ Name: model.MetricNameLabel, Value: "metric_1", }: codable.FingerprintSet{2: struct{}{}, 5: struct{}{}}, - metric.LabelPair{ + model.LabelPair{ Name: model.MetricNameLabel, Value: "metric_2", }: codable.FingerprintSet{4: struct{}{}}, - metric.LabelPair{ + model.LabelPair{ Name: "label_1", Value: "value_1", }: codable.FingerprintSet{0: struct{}{}}, - metric.LabelPair{ + model.LabelPair{ Name: "label_1", Value: "value_2", }: codable.FingerprintSet{2: struct{}{}}, - metric.LabelPair{ + model.LabelPair{ Name: "label_1", Value: "value_3", }: codable.FingerprintSet{3: struct{}{}, 5: struct{}{}}, - metric.LabelPair{ + model.LabelPair{ Name: "label_2", Value: "value_2", }: codable.FingerprintSet{1: struct{}{}, 4: struct{}{}}, - metric.LabelPair{ + model.LabelPair{ Name: "label_3", Value: "value_1", }: codable.FingerprintSet{4: struct{}{}}, - metric.LabelPair{ + model.LabelPair{ Name: "label_3", Value: "value_3", }: codable.FingerprintSet{1: struct{}{}}, diff --git a/storage/local/storage.go b/storage/local/storage.go index 014d5cee7d..fb3f6423d2 100644 --- a/storage/local/storage.go +++ b/storage/local/storage.go @@ -385,7 +385,7 @@ func (s *memorySeriesStorage) NewPreloader() Preloader { // fingerprintsForLabelPairs returns the set of fingerprints that have the given labels. // This does not work with empty label values. -func (s *memorySeriesStorage) fingerprintsForLabelPairs(pairs ...metric.LabelPair) map[model.Fingerprint]struct{} { +func (s *memorySeriesStorage) fingerprintsForLabelPairs(pairs ...model.LabelPair) map[model.Fingerprint]struct{} { var result map[model.Fingerprint]struct{} for _, pair := range pairs { intersection := map[model.Fingerprint]struct{}{} @@ -412,12 +412,12 @@ func (s *memorySeriesStorage) fingerprintsForLabelPairs(pairs ...metric.LabelPai // MetricsForLabelMatchers implements Storage. func (s *memorySeriesStorage) MetricsForLabelMatchers(matchers ...*metric.LabelMatcher) map[model.Fingerprint]model.COWMetric { var ( - equals []metric.LabelPair + equals []model.LabelPair filters []*metric.LabelMatcher ) for _, lm := range matchers { if lm.Type == metric.Equal && lm.Value != "" { - equals = append(equals, metric.LabelPair{ + equals = append(equals, model.LabelPair{ Name: lm.Name, Value: lm.Value, }) @@ -446,7 +446,7 @@ func (s *memorySeriesStorage) MetricsForLabelMatchers(matchers ...*metric.LabelM return nil } for _, v := range matches { - fps := s.fingerprintsForLabelPairs(metric.LabelPair{ + fps := s.fingerprintsForLabelPairs(model.LabelPair{ Name: matcher.Name, Value: v, }) diff --git a/storage/local/storage_test.go b/storage/local/storage_test.go index f4d4e0852e..8e600cccf1 100644 --- a/storage/local/storage_test.go +++ b/storage/local/storage_test.go @@ -222,33 +222,33 @@ func TestFingerprintsForLabels(t *testing.T) { storage.WaitForIndexing() var matcherTests = []struct { - pairs []metric.LabelPair + pairs []model.LabelPair expected model.Fingerprints }{ { - pairs: []metric.LabelPair{{"label1", "x"}}, + pairs: []model.LabelPair{{"label1", "x"}}, expected: fingerprints[:0], }, { - pairs: []metric.LabelPair{{"label1", "test_0"}}, + pairs: []model.LabelPair{{"label1", "test_0"}}, expected: fingerprints[:10], }, { - pairs: []metric.LabelPair{ + pairs: []model.LabelPair{ {"label1", "test_0"}, {"label1", "test_1"}, }, expected: fingerprints[:0], }, { - pairs: []metric.LabelPair{ + pairs: []model.LabelPair{ {"label1", "test_0"}, {"label2", "test_1"}, }, expected: fingerprints[5:10], }, { - pairs: []metric.LabelPair{ + pairs: []model.LabelPair{ {"label1", "test_1"}, {"label2", "test_2"}, }, @@ -391,7 +391,7 @@ func TestRetentionCutoff(t *testing.T) { s.WaitForIndexing() var fp model.Fingerprint - for f := range s.fingerprintsForLabelPairs(metric.LabelPair{Name: "job", Value: "test"}) { + for f := range s.fingerprintsForLabelPairs(model.LabelPair{Name: "job", Value: "test"}) { fp = f break } @@ -455,7 +455,7 @@ func TestDropMetrics(t *testing.T) { } s.WaitForIndexing() - fps := s.fingerprintsForLabelPairs(metric.LabelPair{Name: model.MetricNameLabel, Value: "test"}) + fps := s.fingerprintsForLabelPairs(model.LabelPair{Name: model.MetricNameLabel, Value: "test"}) if len(fps) != 2 { t.Fatalf("unexpected number of fingerprints: %d", len(fps)) } @@ -472,7 +472,7 @@ func TestDropMetrics(t *testing.T) { s.DropMetricsForFingerprints(fpList[0]) s.WaitForIndexing() - fps2 := s.fingerprintsForLabelPairs(metric.LabelPair{ + fps2 := s.fingerprintsForLabelPairs(model.LabelPair{ Name: model.MetricNameLabel, Value: "test", }) if len(fps2) != 1 { @@ -491,7 +491,7 @@ func TestDropMetrics(t *testing.T) { s.DropMetricsForFingerprints(fpList...) s.WaitForIndexing() - fps3 := s.fingerprintsForLabelPairs(metric.LabelPair{ + fps3 := s.fingerprintsForLabelPairs(model.LabelPair{ Name: model.MetricNameLabel, Value: "test", }) if len(fps3) != 0 { diff --git a/storage/metric/labelpair.go b/storage/metric/labelpair.go deleted file mode 100644 index 4babe6f4e3..0000000000 --- a/storage/metric/labelpair.go +++ /dev/null @@ -1,64 +0,0 @@ -// Copyright 2013 The Prometheus Authors -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package metric - -import ( - "github.com/prometheus/common/model" -) - -// LabelPair pairs a name with a value. -type LabelPair struct { - Name model.LabelName - Value model.LabelValue -} - -// Equal returns true iff both the Name and the Value of this LabelPair and o -// are equal. -func (l *LabelPair) Equal(o *LabelPair) bool { - switch { - case l.Name != o.Name: - return false - case l.Value != o.Value: - return false - default: - return true - } -} - -// LabelPairs is a sortable slice of LabelPair pointers. It implements -// sort.Interface. -type LabelPairs []*LabelPair - -func (l LabelPairs) Len() int { - return len(l) -} - -func (l LabelPairs) Less(i, j int) bool { - switch { - case l[i].Name > l[j].Name: - return false - case l[i].Name < l[j].Name: - return true - case l[i].Value > l[j].Value: - return false - case l[i].Value < l[j].Value: - return true - default: - return false - } -} - -func (l LabelPairs) Swap(i, j int) { - l[i], l[j] = l[j], l[i] -} diff --git a/storage/metric/labelpair_test.go b/storage/metric/labelpair_test.go deleted file mode 100644 index 12b47235e7..0000000000 --- a/storage/metric/labelpair_test.go +++ /dev/null @@ -1,83 +0,0 @@ -// Copyright 2013 The Prometheus Authors -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package metric - -import ( - "sort" - "testing" -) - -func testLabelPairs(t testing.TB) { - var scenarios = []struct { - in LabelPairs - out LabelPairs - }{ - { - in: LabelPairs{ - { - Name: "AAA", - Value: "aaa", - }, - }, - out: LabelPairs{ - { - Name: "AAA", - Value: "aaa", - }, - }, - }, - { - in: LabelPairs{ - { - Name: "aaa", - Value: "aaa", - }, - { - Name: "ZZZ", - Value: "aaa", - }, - }, - out: LabelPairs{ - { - Name: "ZZZ", - Value: "aaa", - }, - { - Name: "aaa", - Value: "aaa", - }, - }, - }, - } - - for i, scenario := range scenarios { - sort.Sort(scenario.in) - - for j, expected := range scenario.out { - if !expected.Equal(scenario.in[j]) { - t.Errorf("%d.%d expected %s, got %s", i, j, expected, scenario.in[j]) - } - } - } -} - -func TestLabelPairs(t *testing.T) { - testLabelPairs(t) -} - -func BenchmarkLabelPairs(b *testing.B) { - for i := 0; i < b.N; i++ { - testLabelPairs(b) - } -} From 1535ef1457562f047442a55a9a7a49dc491f40c3 Mon Sep 17 00:00:00 2001 From: Fabian Reinartz Date: Sat, 22 Aug 2015 14:52:35 +0200 Subject: [PATCH 2/2] Replace metric.SamplePair with model.SamplePair --- promql/engine.go | 28 +++++++++++------------ promql/functions.go | 12 +++++----- promql/test.go | 9 ++++---- storage/local/chunk.go | 16 ++++++------- storage/local/delta.go | 28 +++++++++++------------ storage/local/doubledelta.go | 32 +++++++++++++------------- storage/local/interface.go | 8 +++---- storage/local/persistence_test.go | 11 ++++----- storage/local/series.go | 38 +++++++++++++++---------------- storage/local/storage.go | 16 ++++++------- storage/local/storage_test.go | 4 ++-- storage/metric/sample.go | 35 +--------------------------- web/api/v1/api_test.go | 3 +-- 13 files changed, 102 insertions(+), 138 deletions(-) diff --git a/promql/engine.go b/promql/engine.go index 3ba6501871..e4406a08d7 100644 --- a/promql/engine.go +++ b/promql/engine.go @@ -33,8 +33,8 @@ import ( // SampleStream is a stream of Values belonging to an attached COWMetric. type SampleStream struct { - Metric model.COWMetric `json:"metric"` - Values metric.Values `json:"values"` + Metric model.COWMetric `json:"metric"` + Values []model.SamplePair `json:"values"` } // Sample is a single sample belonging to a COWMetric. @@ -47,11 +47,11 @@ type Sample struct { // MarshalJSON implements json.Marshaler. func (s *Sample) MarshalJSON() ([]byte, error) { v := struct { - Metric model.COWMetric `json:"metric"` - Value metric.SamplePair `json:"value"` + Metric model.COWMetric `json:"metric"` + Value model.SamplePair `json:"value"` }{ Metric: s.Metric, - Value: metric.SamplePair{ + Value: model.SamplePair{ Timestamp: s.Timestamp, Value: s.Value, }, @@ -480,10 +480,10 @@ func (ng *Engine) execEvalStmt(ctx context.Context, query *query, s *EvalStmt) ( // as the fingerprint for scalar expressions. ss := sampleStreams[0] if ss == nil { - ss = &SampleStream{Values: make(metric.Values, 0, numSteps)} + ss = &SampleStream{Values: make([]model.SamplePair, 0, numSteps)} sampleStreams[0] = ss } - ss.Values = append(ss.Values, metric.SamplePair{ + ss.Values = append(ss.Values, model.SamplePair{ Value: v.Value, Timestamp: v.Timestamp, }) @@ -494,11 +494,11 @@ func (ng *Engine) execEvalStmt(ctx context.Context, query *query, s *EvalStmt) ( if ss == nil { ss = &SampleStream{ Metric: sample.Metric, - Values: make(metric.Values, 0, numSteps), + Values: make([]model.SamplePair, 0, numSteps), } sampleStreams[fp] = ss } - ss.Values = append(ss.Values, metric.SamplePair{ + ss.Values = append(ss.Values, model.SamplePair{ Value: sample.Value, Timestamp: sample.Timestamp, }) @@ -1177,9 +1177,9 @@ var StalenessDelta = 5 * time.Minute // surrounding a given target time. If samples are found both before and after // the target time, the sample value is interpolated between these. Otherwise, // the single closest sample is returned verbatim. -func chooseClosestSample(samples metric.Values, timestamp model.Time) *metric.SamplePair { - var closestBefore *metric.SamplePair - var closestAfter *metric.SamplePair +func chooseClosestSample(samples []model.SamplePair, timestamp model.Time) *model.SamplePair { + var closestBefore *model.SamplePair + var closestAfter *model.SamplePair for _, candidate := range samples { delta := candidate.Timestamp.Sub(timestamp) // Samples before target time. @@ -1223,14 +1223,14 @@ func chooseClosestSample(samples metric.Values, timestamp model.Time) *metric.Sa // interpolateSamples interpolates a value at a target time between two // provided sample pairs. -func interpolateSamples(first, second *metric.SamplePair, timestamp model.Time) *metric.SamplePair { +func interpolateSamples(first, second *model.SamplePair, timestamp model.Time) *model.SamplePair { dv := second.Value - first.Value dt := second.Timestamp.Sub(first.Timestamp) dDt := dv / model.SampleValue(dt) offset := model.SampleValue(timestamp.Sub(first.Timestamp)) - return &metric.SamplePair{ + return &model.SamplePair{ Value: first.Value + (offset * dDt), Timestamp: timestamp, } diff --git a/promql/functions.go b/promql/functions.go index 570eedb599..a74538c994 100644 --- a/promql/functions.go +++ b/promql/functions.go @@ -258,7 +258,7 @@ func funcCountScalar(ev *evaluator, args Expressions) Value { } } -func aggrOverTime(ev *evaluator, args Expressions, aggrFn func(metric.Values) model.SampleValue) Value { +func aggrOverTime(ev *evaluator, args Expressions, aggrFn func([]model.SamplePair) model.SampleValue) Value { matrix := ev.evalMatrix(args[0]) resultVector := Vector{} @@ -279,7 +279,7 @@ func aggrOverTime(ev *evaluator, args Expressions, aggrFn func(metric.Values) mo // === avg_over_time(matrix ExprMatrix) Vector === func funcAvgOverTime(ev *evaluator, args Expressions) Value { - return aggrOverTime(ev, args, func(values metric.Values) model.SampleValue { + return aggrOverTime(ev, args, func(values []model.SamplePair) model.SampleValue { var sum model.SampleValue for _, v := range values { sum += v.Value @@ -290,7 +290,7 @@ func funcAvgOverTime(ev *evaluator, args Expressions) Value { // === count_over_time(matrix ExprMatrix) Vector === func funcCountOverTime(ev *evaluator, args Expressions) Value { - return aggrOverTime(ev, args, func(values metric.Values) model.SampleValue { + return aggrOverTime(ev, args, func(values []model.SamplePair) model.SampleValue { return model.SampleValue(len(values)) }) } @@ -307,7 +307,7 @@ func funcFloor(ev *evaluator, args Expressions) Value { // === max_over_time(matrix ExprMatrix) Vector === func funcMaxOverTime(ev *evaluator, args Expressions) Value { - return aggrOverTime(ev, args, func(values metric.Values) model.SampleValue { + return aggrOverTime(ev, args, func(values []model.SamplePair) model.SampleValue { max := math.Inf(-1) for _, v := range values { max = math.Max(max, float64(v.Value)) @@ -318,7 +318,7 @@ func funcMaxOverTime(ev *evaluator, args Expressions) Value { // === min_over_time(matrix ExprMatrix) Vector === func funcMinOverTime(ev *evaluator, args Expressions) Value { - return aggrOverTime(ev, args, func(values metric.Values) model.SampleValue { + return aggrOverTime(ev, args, func(values []model.SamplePair) model.SampleValue { min := math.Inf(1) for _, v := range values { min = math.Min(min, float64(v.Value)) @@ -329,7 +329,7 @@ func funcMinOverTime(ev *evaluator, args Expressions) Value { // === sum_over_time(matrix ExprMatrix) Vector === func funcSumOverTime(ev *evaluator, args Expressions) Value { - return aggrOverTime(ev, args, func(values metric.Values) model.SampleValue { + return aggrOverTime(ev, args, func(values []model.SamplePair) model.SampleValue { var sum model.SampleValue for _, v := range values { sum += v.Value diff --git a/promql/test.go b/promql/test.go index 13ecec7411..fecc89caf0 100644 --- a/promql/test.go +++ b/promql/test.go @@ -26,7 +26,6 @@ import ( "github.com/prometheus/prometheus/storage" "github.com/prometheus/prometheus/storage/local" - "github.com/prometheus/prometheus/storage/metric" "github.com/prometheus/prometheus/util/strutil" "github.com/prometheus/prometheus/util/testutil" ) @@ -239,14 +238,14 @@ func (*evalCmd) testCmd() {} type loadCmd struct { gap time.Duration metrics map[model.Fingerprint]model.Metric - defs map[model.Fingerprint]metric.Values + defs map[model.Fingerprint][]model.SamplePair } func newLoadCmd(gap time.Duration) *loadCmd { return &loadCmd{ gap: gap, metrics: map[model.Fingerprint]model.Metric{}, - defs: map[model.Fingerprint]metric.Values{}, + defs: map[model.Fingerprint][]model.SamplePair{}, } } @@ -258,11 +257,11 @@ func (cmd loadCmd) String() string { func (cmd *loadCmd) set(m model.Metric, vals ...sequenceValue) { fp := m.Fingerprint() - samples := make(metric.Values, 0, len(vals)) + samples := make([]model.SamplePair, 0, len(vals)) ts := testStartTime for _, v := range vals { if !v.omitted { - samples = append(samples, metric.SamplePair{ + samples = append(samples, model.SamplePair{ Timestamp: ts, Value: v.value, }) diff --git a/storage/local/chunk.go b/storage/local/chunk.go index c6463ad3a5..831b7e2a20 100644 --- a/storage/local/chunk.go +++ b/storage/local/chunk.go @@ -78,7 +78,7 @@ func newChunkDesc(c chunk) *chunkDesc { return &chunkDesc{c: c, rCnt: 1} } -func (cd *chunkDesc) add(s *metric.SamplePair) []chunk { +func (cd *chunkDesc) add(s *model.SamplePair) []chunk { cd.Lock() defer cd.Unlock() @@ -143,7 +143,7 @@ func (cd *chunkDesc) lastTime() model.Time { return cd.c.newIterator().lastTimestamp() } -func (cd *chunkDesc) lastSamplePair() *metric.SamplePair { +func (cd *chunkDesc) lastSamplePair() *model.SamplePair { cd.Lock() defer cd.Unlock() @@ -151,7 +151,7 @@ func (cd *chunkDesc) lastSamplePair() *metric.SamplePair { return nil } it := cd.c.newIterator() - return &metric.SamplePair{ + return &model.SamplePair{ Timestamp: it.lastTimestamp(), Value: it.lastSampleValue(), } @@ -215,7 +215,7 @@ type chunk interface { // any. The first chunk returned might be the same as the original one // or a newly allocated version. In any case, take the returned chunk as // the relevant one and discard the orginal chunk. - add(sample *metric.SamplePair) []chunk + add(sample *model.SamplePair) []chunk clone() chunk firstTime() model.Time newIterator() chunkIterator @@ -244,9 +244,9 @@ type chunkIterator interface { // value is returned. Only the first or last value is returned (as a // single value), if the given time is before or after the first or last // value, respectively. - valueAtTime(model.Time) metric.Values + valueAtTime(model.Time) []model.SamplePair // Gets all values contained within a given interval. - rangeValues(metric.Interval) metric.Values + rangeValues(metric.Interval) []model.SamplePair // Whether a given timestamp is contained between first and last value // in the chunk. contains(model.Time) bool @@ -254,10 +254,10 @@ type chunkIterator interface { // can be received in order. The channel is closed after the last // one. It is generally not safe to mutate the chunk while the channel // is still open. - values() <-chan *metric.SamplePair + values() <-chan *model.SamplePair } -func transcodeAndAdd(dst chunk, src chunk, s *metric.SamplePair) []chunk { +func transcodeAndAdd(dst chunk, src chunk, s *model.SamplePair) []chunk { chunkOps.WithLabelValues(transcode).Inc() head := dst diff --git a/storage/local/delta.go b/storage/local/delta.go index dd5e29920d..5db3df59a2 100644 --- a/storage/local/delta.go +++ b/storage/local/delta.go @@ -76,7 +76,7 @@ func newDeltaEncodedChunk(tb, vb deltaBytes, isInt bool, length int) *deltaEncod } // add implements chunk. -func (c deltaEncodedChunk) add(s *metric.SamplePair) []chunk { +func (c deltaEncodedChunk) add(s *model.SamplePair) []chunk { if c.len() == 0 { c = c[:deltaHeaderBytes] binary.LittleEndian.PutUint64(c[deltaHeaderBaseTimeOffset:], uint64(s.Timestamp)) @@ -288,36 +288,36 @@ type deltaEncodedChunkIterator struct { func (it *deltaEncodedChunkIterator) length() int { return it.len } // valueAtTime implements chunkIterator. -func (it *deltaEncodedChunkIterator) valueAtTime(t model.Time) metric.Values { +func (it *deltaEncodedChunkIterator) valueAtTime(t model.Time) []model.SamplePair { i := sort.Search(it.len, func(i int) bool { return !it.timestampAtIndex(i).Before(t) }) switch i { case 0: - return metric.Values{metric.SamplePair{ + return []model.SamplePair{{ Timestamp: it.timestampAtIndex(0), Value: it.sampleValueAtIndex(0), }} case it.len: - return metric.Values{metric.SamplePair{ + return []model.SamplePair{{ Timestamp: it.timestampAtIndex(it.len - 1), Value: it.sampleValueAtIndex(it.len - 1), }} default: ts := it.timestampAtIndex(i) if ts.Equal(t) { - return metric.Values{metric.SamplePair{ + return []model.SamplePair{{ Timestamp: ts, Value: it.sampleValueAtIndex(i), }} } - return metric.Values{ - metric.SamplePair{ + return []model.SamplePair{ + { Timestamp: it.timestampAtIndex(i - 1), Value: it.sampleValueAtIndex(i - 1), }, - metric.SamplePair{ + { Timestamp: ts, Value: it.sampleValueAtIndex(i), }, @@ -326,7 +326,7 @@ func (it *deltaEncodedChunkIterator) valueAtTime(t model.Time) metric.Values { } // rangeValues implements chunkIterator. -func (it *deltaEncodedChunkIterator) rangeValues(in metric.Interval) metric.Values { +func (it *deltaEncodedChunkIterator) rangeValues(in metric.Interval) []model.SamplePair { oldest := sort.Search(it.len, func(i int) bool { return !it.timestampAtIndex(i).Before(in.OldestInclusive) }) @@ -339,9 +339,9 @@ func (it *deltaEncodedChunkIterator) rangeValues(in metric.Interval) metric.Valu return nil } - result := make(metric.Values, 0, newest-oldest) + result := make([]model.SamplePair, 0, newest-oldest) for i := oldest; i < newest; i++ { - result = append(result, metric.SamplePair{ + result = append(result, model.SamplePair{ Timestamp: it.timestampAtIndex(i), Value: it.sampleValueAtIndex(i), }) @@ -355,11 +355,11 @@ func (it *deltaEncodedChunkIterator) contains(t model.Time) bool { } // values implements chunkIterator. -func (it *deltaEncodedChunkIterator) values() <-chan *metric.SamplePair { - valuesChan := make(chan *metric.SamplePair) +func (it *deltaEncodedChunkIterator) values() <-chan *model.SamplePair { + valuesChan := make(chan *model.SamplePair) go func() { for i := 0; i < it.len; i++ { - valuesChan <- &metric.SamplePair{ + valuesChan <- &model.SamplePair{ Timestamp: it.timestampAtIndex(i), Value: it.sampleValueAtIndex(i), } diff --git a/storage/local/doubledelta.go b/storage/local/doubledelta.go index 521a93ae14..d2a983689e 100644 --- a/storage/local/doubledelta.go +++ b/storage/local/doubledelta.go @@ -83,7 +83,7 @@ func newDoubleDeltaEncodedChunk(tb, vb deltaBytes, isInt bool, length int) *doub } // add implements chunk. -func (c doubleDeltaEncodedChunk) add(s *metric.SamplePair) []chunk { +func (c doubleDeltaEncodedChunk) add(s *model.SamplePair) []chunk { if c.len() == 0 { return c.addFirstSample(s) } @@ -321,7 +321,7 @@ func (c doubleDeltaEncodedChunk) isInt() bool { // addFirstSample is a helper method only used by c.add(). It adds timestamp and // value as base time and value. -func (c doubleDeltaEncodedChunk) addFirstSample(s *metric.SamplePair) []chunk { +func (c doubleDeltaEncodedChunk) addFirstSample(s *model.SamplePair) []chunk { c = c[:doubleDeltaHeaderBaseValueOffset+8] binary.LittleEndian.PutUint64( c[doubleDeltaHeaderBaseTimeOffset:], @@ -336,7 +336,7 @@ func (c doubleDeltaEncodedChunk) addFirstSample(s *metric.SamplePair) []chunk { // addSecondSample is a helper method only used by c.add(). It calculates the // base delta from the provided sample and adds it to the chunk. -func (c doubleDeltaEncodedChunk) addSecondSample(s *metric.SamplePair, tb, vb deltaBytes) []chunk { +func (c doubleDeltaEncodedChunk) addSecondSample(s *model.SamplePair, tb, vb deltaBytes) []chunk { baseTimeDelta := s.Timestamp - c.baseTime() if baseTimeDelta < 0 { panic("base time delta is less than zero") @@ -394,36 +394,36 @@ type doubleDeltaEncodedChunkIterator struct { func (it *doubleDeltaEncodedChunkIterator) length() int { return it.len } // valueAtTime implements chunkIterator. -func (it *doubleDeltaEncodedChunkIterator) valueAtTime(t model.Time) metric.Values { +func (it *doubleDeltaEncodedChunkIterator) valueAtTime(t model.Time) []model.SamplePair { i := sort.Search(it.len, func(i int) bool { return !it.timestampAtIndex(i).Before(t) }) switch i { case 0: - return metric.Values{metric.SamplePair{ + return []model.SamplePair{{ Timestamp: it.timestampAtIndex(0), Value: it.sampleValueAtIndex(0), }} case it.len: - return metric.Values{metric.SamplePair{ + return []model.SamplePair{{ Timestamp: it.timestampAtIndex(it.len - 1), Value: it.sampleValueAtIndex(it.len - 1), }} default: ts := it.timestampAtIndex(i) if ts.Equal(t) { - return metric.Values{metric.SamplePair{ + return []model.SamplePair{{ Timestamp: ts, Value: it.sampleValueAtIndex(i), }} } - return metric.Values{ - metric.SamplePair{ + return []model.SamplePair{ + { Timestamp: it.timestampAtIndex(i - 1), Value: it.sampleValueAtIndex(i - 1), }, - metric.SamplePair{ + { Timestamp: ts, Value: it.sampleValueAtIndex(i), }, @@ -432,7 +432,7 @@ func (it *doubleDeltaEncodedChunkIterator) valueAtTime(t model.Time) metric.Valu } // rangeValues implements chunkIterator. -func (it *doubleDeltaEncodedChunkIterator) rangeValues(in metric.Interval) metric.Values { +func (it *doubleDeltaEncodedChunkIterator) rangeValues(in metric.Interval) []model.SamplePair { oldest := sort.Search(it.len, func(i int) bool { return !it.timestampAtIndex(i).Before(in.OldestInclusive) }) @@ -445,9 +445,9 @@ func (it *doubleDeltaEncodedChunkIterator) rangeValues(in metric.Interval) metri return nil } - result := make(metric.Values, 0, newest-oldest) + result := make([]model.SamplePair, 0, newest-oldest) for i := oldest; i < newest; i++ { - result = append(result, metric.SamplePair{ + result = append(result, model.SamplePair{ Timestamp: it.timestampAtIndex(i), Value: it.sampleValueAtIndex(i), }) @@ -461,11 +461,11 @@ func (it *doubleDeltaEncodedChunkIterator) contains(t model.Time) bool { } // values implements chunkIterator. -func (it *doubleDeltaEncodedChunkIterator) values() <-chan *metric.SamplePair { - valuesChan := make(chan *metric.SamplePair) +func (it *doubleDeltaEncodedChunkIterator) values() <-chan *model.SamplePair { + valuesChan := make(chan *model.SamplePair) go func() { for i := 0; i < it.len; i++ { - valuesChan <- &metric.SamplePair{ + valuesChan <- &model.SamplePair{ Timestamp: it.timestampAtIndex(i), Value: it.sampleValueAtIndex(i), } diff --git a/storage/local/interface.go b/storage/local/interface.go index 6016590564..bf0b356d90 100644 --- a/storage/local/interface.go +++ b/storage/local/interface.go @@ -44,7 +44,7 @@ type Storage interface { // LastSamplePairForFingerprint returns the last sample pair for the // provided fingerprint. If the respective time series does not exist or // has an evicted head chunk, nil is returned. - LastSamplePairForFingerprint(model.Fingerprint) *metric.SamplePair + LastSamplePairForFingerprint(model.Fingerprint) *model.SamplePair // Get all of the label values that are associated with a given label name. LabelValuesForLabelName(model.LabelName) model.LabelValues // Get the metric associated with the provided fingerprint. @@ -80,12 +80,12 @@ type SeriesIterator interface { // value is returned. Only the first or last value is returned (as a // single value), if the given time is before or after the first or last // value, respectively. - ValueAtTime(model.Time) metric.Values + ValueAtTime(model.Time) []model.SamplePair // Gets the boundary values of an interval: the first and last value // within a given interval. - BoundaryValues(metric.Interval) metric.Values + BoundaryValues(metric.Interval) []model.SamplePair // Gets all values contained within a given interval. - RangeValues(metric.Interval) metric.Values + RangeValues(metric.Interval) []model.SamplePair } // A Preloader preloads series data necessary for a query into memory and pins diff --git a/storage/local/persistence_test.go b/storage/local/persistence_test.go index 011c17d281..ea6ce25e6e 100644 --- a/storage/local/persistence_test.go +++ b/storage/local/persistence_test.go @@ -23,7 +23,6 @@ import ( "github.com/prometheus/prometheus/storage/local/codable" "github.com/prometheus/prometheus/storage/local/index" - "github.com/prometheus/prometheus/storage/metric" "github.com/prometheus/prometheus/util/testutil" ) @@ -61,7 +60,7 @@ func buildTestChunks(encoding chunkEncoding) map[model.Fingerprint][]chunk { for _, fp := range fps { fpToChunks[fp] = make([]chunk, 0, 10) for i := 0; i < 10; i++ { - fpToChunks[fp] = append(fpToChunks[fp], newChunkForEncoding(encoding).add(&metric.SamplePair{ + fpToChunks[fp] = append(fpToChunks[fp], newChunkForEncoding(encoding).add(&model.SamplePair{ Timestamp: model.Time(i), Value: model.SampleValue(fp), })[0]) @@ -360,16 +359,16 @@ func testCheckpointAndLoadSeriesMapAndHeads(t *testing.T, encoding chunkEncoding s3 := newMemorySeries(m3, nil, time.Time{}) s4 := newMemorySeries(m4, nil, time.Time{}) s5 := newMemorySeries(m5, nil, time.Time{}) - s1.add(&metric.SamplePair{Timestamp: 1, Value: 3.14}) - s3.add(&metric.SamplePair{Timestamp: 2, Value: 2.7}) + s1.add(&model.SamplePair{Timestamp: 1, Value: 3.14}) + s3.add(&model.SamplePair{Timestamp: 2, Value: 2.7}) s3.headChunkClosed = true s3.persistWatermark = 1 for i := 0; i < 10000; i++ { - s4.add(&metric.SamplePair{ + s4.add(&model.SamplePair{ Timestamp: model.Time(i), Value: model.SampleValue(i) / 2, }) - s5.add(&metric.SamplePair{ + s5.add(&model.SamplePair{ Timestamp: model.Time(i), Value: model.SampleValue(i * i), }) diff --git a/storage/local/series.go b/storage/local/series.go index ef158aac6d..5de11fcdae 100644 --- a/storage/local/series.go +++ b/storage/local/series.go @@ -207,7 +207,7 @@ func newMemorySeries(m model.Metric, chunkDescs []*chunkDesc, modTime time.Time) // completed chunks (which are now eligible for persistence). // // The caller must have locked the fingerprint of the series. -func (s *memorySeries) add(v *metric.SamplePair) int { +func (s *memorySeries) add(v *model.SamplePair) int { if len(s.chunkDescs) == 0 || s.headChunkClosed { newHead := newChunkDesc(newChunk()) s.chunkDescs = append(s.chunkDescs, newHead) @@ -482,7 +482,7 @@ type memorySeriesIterator struct { } // ValueAtTime implements SeriesIterator. -func (it *memorySeriesIterator) ValueAtTime(t model.Time) metric.Values { +func (it *memorySeriesIterator) ValueAtTime(t model.Time) []model.SamplePair { // The most common case. We are iterating through a chunk. if it.chunkIt != nil && it.chunkIt.contains(t) { return it.chunkIt.valueAtTime(t) @@ -497,7 +497,7 @@ func (it *memorySeriesIterator) ValueAtTime(t model.Time) metric.Values { ts := it.chunkIt.timestampAtIndex(0) if !t.After(ts) { // return first value of first chunk - return metric.Values{metric.SamplePair{ + return []model.SamplePair{{ Timestamp: ts, Value: it.chunkIt.sampleValueAtIndex(0), }} @@ -508,7 +508,7 @@ func (it *memorySeriesIterator) ValueAtTime(t model.Time) metric.Values { ts = it.chunkIt.lastTimestamp() if !t.Before(ts) { // return last value of last chunk - return metric.Values{metric.SamplePair{ + return []model.SamplePair{{ Timestamp: ts, Value: it.chunkIt.sampleValueAtIndex(it.chunkIt.length() - 1), }} @@ -526,14 +526,14 @@ func (it *memorySeriesIterator) ValueAtTime(t model.Time) metric.Values { ts = it.chunkIt.lastTimestamp() if t.After(ts) { // We ended up between two chunks. - sp1 := metric.SamplePair{ + sp1 := model.SamplePair{ Timestamp: ts, Value: it.chunkIt.sampleValueAtIndex(it.chunkIt.length() - 1), } it.chunkIt = it.chunkIterator(l - i + 1) - return metric.Values{ + return []model.SamplePair{ sp1, - metric.SamplePair{ + model.SamplePair{ Timestamp: it.chunkIt.timestampAtIndex(0), Value: it.chunkIt.sampleValueAtIndex(0), }, @@ -543,7 +543,7 @@ func (it *memorySeriesIterator) ValueAtTime(t model.Time) metric.Values { } // BoundaryValues implements SeriesIterator. -func (it *memorySeriesIterator) BoundaryValues(in metric.Interval) metric.Values { +func (it *memorySeriesIterator) BoundaryValues(in metric.Interval) []model.SamplePair { // Find the first chunk for which the first sample is within the interval. i := sort.Search(len(it.chunks), func(i int) bool { return !it.chunks[i].firstTime().Before(in.OldestInclusive) @@ -554,7 +554,7 @@ func (it *memorySeriesIterator) BoundaryValues(in metric.Interval) metric.Values i-- } - values := make(metric.Values, 0, 2) + values := make([]model.SamplePair, 0, 2) for j, c := range it.chunks[i:] { if c.firstTime().After(in.NewestInclusive) { if len(values) == 1 { @@ -563,7 +563,7 @@ func (it *memorySeriesIterator) BoundaryValues(in metric.Interval) metric.Values // want must be the last value of the previous // chunk. So backtrack... chunkIt := it.chunkIterator(i + j - 1) - values = append(values, metric.SamplePair{ + values = append(values, model.SamplePair{ Timestamp: chunkIt.lastTimestamp(), Value: chunkIt.lastSampleValue(), }) @@ -590,7 +590,7 @@ func (it *memorySeriesIterator) BoundaryValues(in metric.Interval) metric.Values if len(values) == 1 { // We found exactly one value. In that case, add the most recent we know. chunkIt := it.chunkIterator(len(it.chunks) - 1) - values = append(values, metric.SamplePair{ + values = append(values, model.SamplePair{ Timestamp: chunkIt.lastTimestamp(), Value: chunkIt.lastSampleValue(), }) @@ -602,7 +602,7 @@ func (it *memorySeriesIterator) BoundaryValues(in metric.Interval) metric.Values } // RangeValues implements SeriesIterator. -func (it *memorySeriesIterator) RangeValues(in metric.Interval) metric.Values { +func (it *memorySeriesIterator) RangeValues(in metric.Interval) []model.SamplePair { // Find the first chunk for which the first sample is within the interval. i := sort.Search(len(it.chunks), func(i int) bool { return !it.chunks[i].firstTime().Before(in.OldestInclusive) @@ -613,7 +613,7 @@ func (it *memorySeriesIterator) RangeValues(in metric.Interval) metric.Values { i-- } - values := metric.Values{} + values := []model.SamplePair{} for j, c := range it.chunks[i:] { if c.firstTime().After(in.NewestInclusive) { break @@ -638,16 +638,16 @@ func (it *memorySeriesIterator) chunkIterator(i int) chunkIterator { type nopSeriesIterator struct{} // ValueAtTime implements SeriesIterator. -func (_ nopSeriesIterator) ValueAtTime(t model.Time) metric.Values { - return metric.Values{} +func (_ nopSeriesIterator) ValueAtTime(t model.Time) []model.SamplePair { + return []model.SamplePair{} } // BoundaryValues implements SeriesIterator. -func (_ nopSeriesIterator) BoundaryValues(in metric.Interval) metric.Values { - return metric.Values{} +func (_ nopSeriesIterator) BoundaryValues(in metric.Interval) []model.SamplePair { + return []model.SamplePair{} } // RangeValues implements SeriesIterator. -func (_ nopSeriesIterator) RangeValues(in metric.Interval) metric.Values { - return metric.Values{} +func (_ nopSeriesIterator) RangeValues(in metric.Interval) []model.SamplePair { + return []model.SamplePair{} } diff --git a/storage/local/storage.go b/storage/local/storage.go index fb3f6423d2..7883641df0 100644 --- a/storage/local/storage.go +++ b/storage/local/storage.go @@ -328,7 +328,7 @@ func (s *memorySeriesStorage) NewIterator(fp model.Fingerprint) SeriesIterator { } // LastSampleForFingerprint implements Storage. -func (s *memorySeriesStorage) LastSamplePairForFingerprint(fp model.Fingerprint) *metric.SamplePair { +func (s *memorySeriesStorage) LastSamplePairForFingerprint(fp model.Fingerprint) *model.SamplePair { s.fpLocker.Lock(fp) defer s.fpLocker.Unlock(fp) @@ -347,17 +347,17 @@ type boundedIterator struct { } // ValueAtTime implements the SeriesIterator interface. -func (bit *boundedIterator) ValueAtTime(ts model.Time) metric.Values { +func (bit *boundedIterator) ValueAtTime(ts model.Time) []model.SamplePair { if ts < bit.start { - return metric.Values{} + return []model.SamplePair{} } return bit.it.ValueAtTime(ts) } // BoundaryValues implements the SeriesIterator interface. -func (bit *boundedIterator) BoundaryValues(interval metric.Interval) metric.Values { +func (bit *boundedIterator) BoundaryValues(interval metric.Interval) []model.SamplePair { if interval.NewestInclusive < bit.start { - return metric.Values{} + return []model.SamplePair{} } if interval.OldestInclusive < bit.start { interval.OldestInclusive = bit.start @@ -366,9 +366,9 @@ func (bit *boundedIterator) BoundaryValues(interval metric.Interval) metric.Valu } // RangeValues implements the SeriesIterator interface. -func (bit *boundedIterator) RangeValues(interval metric.Interval) metric.Values { +func (bit *boundedIterator) RangeValues(interval metric.Interval) []model.SamplePair { if interval.NewestInclusive < bit.start { - return metric.Values{} + return []model.SamplePair{} } if interval.OldestInclusive < bit.start { interval.OldestInclusive = bit.start @@ -570,7 +570,7 @@ func (s *memorySeriesStorage) Append(sample *model.Sample) { s.fpLocker.Unlock(fp) return } - completedChunksCount := series.add(&metric.SamplePair{ + completedChunksCount := series.add(&model.SamplePair{ Value: sample.Value, Timestamp: sample.Timestamp, }) diff --git a/storage/local/storage_test.go b/storage/local/storage_test.go index 8e600cccf1..f7ed9a9351 100644 --- a/storage/local/storage_test.go +++ b/storage/local/storage_test.go @@ -571,7 +571,7 @@ func testChunk(t *testing.T, encoding chunkEncoding) { for m := range s.fpToSeries.iter() { s.fpLocker.Lock(m.fp) - var values metric.Values + var values []model.SamplePair for _, cd := range m.series.chunkDescs { if cd.isEvicted() { continue @@ -1527,7 +1527,7 @@ func TestAppendOutOfOrder(t *testing.T) { it := s.NewIterator(fp) - want := metric.Values{ + want := []model.SamplePair{ { Timestamp: 0, Value: 0, diff --git a/storage/metric/sample.go b/storage/metric/sample.go index 588edfd174..a30c2b4564 100644 --- a/storage/metric/sample.go +++ b/storage/metric/sample.go @@ -13,40 +13,7 @@ package metric -import ( - "fmt" - "strconv" - - "github.com/prometheus/common/model" -) - -// MarshalJSON implements json.Marshaler. -func (s SamplePair) MarshalJSON() ([]byte, error) { - return []byte(fmt.Sprintf("[%s, \"%s\"]", s.Timestamp.String(), strconv.FormatFloat(float64(s.Value), 'f', -1, 64))), nil -} - -// SamplePair pairs a SampleValue with a Timestamp. -type SamplePair struct { - Timestamp model.Time - Value model.SampleValue -} - -// Equal returns true if this SamplePair and o have equal Values and equal -// Timestamps. -func (s *SamplePair) Equal(o *SamplePair) bool { - if s == o { - return true - } - - return s.Value.Equal(o.Value) && s.Timestamp.Equal(o.Timestamp) -} - -func (s *SamplePair) String() string { - return fmt.Sprintf("SamplePair at %s of %s", s.Timestamp, s.Value) -} - -// Values is a slice of SamplePairs. -type Values []SamplePair +import "github.com/prometheus/common/model" // Interval describes the inclusive interval between two Timestamps. type Interval struct { diff --git a/web/api/v1/api_test.go b/web/api/v1/api_test.go index 64fe3a81d4..f5fcac28ec 100644 --- a/web/api/v1/api_test.go +++ b/web/api/v1/api_test.go @@ -16,7 +16,6 @@ import ( "golang.org/x/net/context" "github.com/prometheus/prometheus/promql" - "github.com/prometheus/prometheus/storage/metric" "github.com/prometheus/prometheus/util/route" ) @@ -103,7 +102,7 @@ func TestEndpoints(t *testing.T) { ResultType: promql.ExprMatrix, Result: promql.Matrix{ &promql.SampleStream{ - Values: metric.Values{ + Values: []model.SamplePair{ {Value: 0, Timestamp: start}, {Value: 1, Timestamp: start.Add(1 * time.Second)}, {Value: 2, Timestamp: start.Add(2 * time.Second)},