mirror of
https://github.com/prometheus/prometheus.git
synced 2025-12-07 10:31:03 +01:00
feat: rename CreatedTimestamp to StartTimestamp (#17523)
Partially fixes https://github.com/prometheus/prometheus/issues/17416 by renaming all CT* names to ST* in the whole codebase except RW2 (this is done in separate [PR](https://github.com/prometheus/prometheus/pull/17411)) and PrometheusProto exposition proto. ``` CreatedTimestamp -> StartTimestamp CreatedTimeStamp -> StartTimestamp created_timestamp -> start_timestamp CT -> ST ct -> st ``` Signed-off-by: bwplotka <bwplotka@gmail.com>
This commit is contained in:
parent
675bafe2fb
commit
f50ff0a40a
@ -263,8 +263,8 @@ func (c *flagConfig) setFeatureListOptions(logger *slog.Logger) error {
|
||||
case "ooo-native-histograms":
|
||||
logger.Warn("This option for --enable-feature is now permanently enabled and therefore a no-op.", "option", o)
|
||||
case "created-timestamp-zero-ingestion":
|
||||
c.scrape.EnableCreatedTimestampZeroIngestion = true
|
||||
c.web.CTZeroIngestionEnabled = true
|
||||
c.scrape.EnableStartTimestampZeroIngestion = true
|
||||
c.web.STZeroIngestionEnabled = true
|
||||
// Change relevant global variables. Hacky, but it's hard to pass a new option or default to unmarshallers.
|
||||
config.DefaultConfig.GlobalConfig.ScrapeProtocols = config.DefaultProtoFirstScrapeProtocols
|
||||
config.DefaultGlobalConfig.ScrapeProtocols = config.DefaultProtoFirstScrapeProtocols
|
||||
@ -1729,7 +1729,7 @@ func (notReadyAppender) AppendHistogram(storage.SeriesRef, labels.Labels, int64,
|
||||
return 0, tsdb.ErrNotReady
|
||||
}
|
||||
|
||||
func (notReadyAppender) AppendHistogramCTZeroSample(storage.SeriesRef, labels.Labels, int64, int64, *histogram.Histogram, *histogram.FloatHistogram) (storage.SeriesRef, error) {
|
||||
func (notReadyAppender) AppendHistogramSTZeroSample(storage.SeriesRef, labels.Labels, int64, int64, *histogram.Histogram, *histogram.FloatHistogram) (storage.SeriesRef, error) {
|
||||
return 0, tsdb.ErrNotReady
|
||||
}
|
||||
|
||||
@ -1737,7 +1737,7 @@ func (notReadyAppender) UpdateMetadata(storage.SeriesRef, labels.Labels, metadat
|
||||
return 0, tsdb.ErrNotReady
|
||||
}
|
||||
|
||||
func (notReadyAppender) AppendCTZeroSample(storage.SeriesRef, labels.Labels, int64, int64) (storage.SeriesRef, error) {
|
||||
func (notReadyAppender) AppendSTZeroSample(storage.SeriesRef, labels.Labels, int64, int64) (storage.SeriesRef, error) {
|
||||
return 0, tsdb.ErrNotReady
|
||||
}
|
||||
|
||||
|
||||
@ -67,20 +67,27 @@ Enables PromQL functions that are considered experimental. These functions
|
||||
might change their name, syntax, or semantics. They might also get removed
|
||||
entirely.
|
||||
|
||||
## Created Timestamps Zero Injection
|
||||
## Start (Created) Timestamps Zero Injection
|
||||
|
||||
`--enable-feature=created-timestamp-zero-ingestion`
|
||||
|
||||
Enables ingestion of created timestamp. Created timestamps are injected as 0 valued samples when appropriate. See [PromCon talk](https://youtu.be/nWf0BfQ5EEA) for details.
|
||||
> NOTE: CreatedTimestamp feature was renamed to StartTimestamp for consistency. The above flag uses old name for stability.
|
||||
|
||||
Currently Prometheus supports created timestamps only on the traditional
|
||||
Prometheus Protobuf protocol (WIP for other protocols). Therefore, enabling
|
||||
this feature pre-sets the global `scrape_protocols` configuration option to
|
||||
`[ PrometheusProto, OpenMetricsText1.0.0, OpenMetricsText0.0.1, PrometheusText0.0.4 ]`,
|
||||
resulting in negotiating the Prometheus Protobuf protocol with first priority
|
||||
(unless the `scrape_protocols` option is set to a different value explicitly).
|
||||
Enables ingestion of start timestamp. Start timestamps are injected as 0 valued samples when appropriate. See [PromCon talk](https://youtu.be/nWf0BfQ5EEA) for details.
|
||||
|
||||
Besides enabling this feature in Prometheus, created timestamps need to be exposed by the application being scraped.
|
||||
Currently, Prometheus supports start timestamps on the
|
||||
|
||||
* `PrometheusProto`
|
||||
* `OpenMetrics1.0.0`
|
||||
|
||||
|
||||
From the above, Prometheus recommends `PrometheusProto`. This is because OpenMetrics 1.0 Start Timestamp information is shared as a `<metric>_created` metric and parsing those
|
||||
are prone to errors and expensive (thus, adding an overhead). You also need to be careful to not pollute your Prometheus with extra `_created` metrics.
|
||||
|
||||
Therefore, when `created-timestamp-zero-ingestion` is enabled Prometheus changes the global `scrape_protocols` default configuration option to
|
||||
`[ PrometheusProto, OpenMetricsText1.0.0, OpenMetricsText0.0.1, PrometheusText0.0.4 ]`, resulting in negotiating the Prometheus Protobuf protocol first (unless the `scrape_protocols` option is set to a different value explicitly).
|
||||
|
||||
Besides enabling this feature in Prometheus, start timestamps need to be exposed by the application being scraped.
|
||||
|
||||
## Concurrent evaluation of independent rules
|
||||
|
||||
|
||||
@ -36,7 +36,7 @@ import (
|
||||
// and allows comparison with expfmt decoders if applicable.
|
||||
//
|
||||
// NOTE(bwplotka): Previous iterations of this benchmark had different cases for isolated
|
||||
// Series, Series+Metrics with and without reuse, Series+CT. Those cases are sometimes
|
||||
// Series, Series+Metrics with and without reuse, Series+ST. Those cases are sometimes
|
||||
// good to know if you are working on a certain optimization, but it does not
|
||||
// make sense to persist such cases for everybody (e.g. for CI one day).
|
||||
// For local iteration, feel free to adjust cases/comment out code etc.
|
||||
@ -153,7 +153,7 @@ func benchParse(b *testing.B, data []byte, parser string) {
|
||||
}
|
||||
case "omtext":
|
||||
newParserFn = func(b []byte, st *labels.SymbolTable) Parser {
|
||||
return NewOpenMetricsParser(b, st, WithOMParserCTSeriesSkipped())
|
||||
return NewOpenMetricsParser(b, st, WithOMParserSTSeriesSkipped())
|
||||
}
|
||||
case "omtext_with_nhcb":
|
||||
newParserFn = func(buf []byte, st *labels.SymbolTable) Parser {
|
||||
@ -206,7 +206,7 @@ func benchParse(b *testing.B, data []byte, parser string) {
|
||||
}
|
||||
|
||||
p.Labels(&res)
|
||||
_ = p.CreatedTimestamp()
|
||||
_ = p.StartTimestamp()
|
||||
for hasExemplar := p.Exemplar(&e); hasExemplar; hasExemplar = p.Exemplar(&e) {
|
||||
}
|
||||
}
|
||||
@ -266,11 +266,11 @@ func readTestdataFile(tb testing.TB, file string) []byte {
|
||||
|
||||
/*
|
||||
export bench=v1 && go test ./model/textparse/... \
|
||||
-run '^$' -bench '^BenchmarkCreatedTimestampPromProto' \
|
||||
-run '^$' -bench '^BenchmarkStartTimestampPromProto' \
|
||||
-benchtime 2s -count 6 -cpu 2 -benchmem -timeout 999m \
|
||||
| tee ${bench}.txt
|
||||
*/
|
||||
func BenchmarkCreatedTimestampPromProto(b *testing.B) {
|
||||
func BenchmarkStartTimestampPromProto(b *testing.B) {
|
||||
data := createTestProtoBuf(b).Bytes()
|
||||
|
||||
st := labels.NewSymbolTable()
|
||||
@ -301,7 +301,7 @@ Inner:
|
||||
b.ReportAllocs()
|
||||
b.ResetTimer()
|
||||
for b.Loop() {
|
||||
if p.CreatedTimestamp() != 0 {
|
||||
if p.StartTimestamp() != 0 {
|
||||
b.Fatal("should be nil")
|
||||
}
|
||||
}
|
||||
@ -331,7 +331,7 @@ Inner2:
|
||||
b.ReportAllocs()
|
||||
b.ResetTimer()
|
||||
for b.Loop() {
|
||||
if p.CreatedTimestamp() == 0 {
|
||||
if p.StartTimestamp() == 0 {
|
||||
b.Fatal("should be not nil")
|
||||
}
|
||||
}
|
||||
|
||||
@ -29,7 +29,7 @@ import (
|
||||
type Parser interface {
|
||||
// Series returns the bytes of a series with a simple float64 as a
|
||||
// value, the timestamp if set, and the value of the current sample.
|
||||
// TODO(bwplotka): Similar to CreatedTimestamp, have ts == 0 meaning no timestamp provided.
|
||||
// TODO(bwplotka): Similar to StartTimestamp, have ts == 0 meaning no timestamp provided.
|
||||
// We already accepted in many places (PRW, proto parsing histograms) that 0 timestamp is not a
|
||||
// a valid timestamp. If needed it can be represented as 0+1ms.
|
||||
Series() ([]byte, *int64, float64)
|
||||
@ -38,7 +38,7 @@ type Parser interface {
|
||||
// value, the timestamp if set, and the histogram in the current sample.
|
||||
// Depending on the parsed input, the function returns an (integer) Histogram
|
||||
// or a FloatHistogram, with the respective other return value being nil.
|
||||
// TODO(bwplotka): Similar to CreatedTimestamp, have ts == 0 meaning no timestamp provided.
|
||||
// TODO(bwplotka): Similar to StartTimestamp, have ts == 0 meaning no timestamp provided.
|
||||
// We already accepted in many places (PRW, proto parsing histograms) that 0 timestamp is not a
|
||||
// a valid timestamp. If needed it can be represented as 0+1ms.
|
||||
Histogram() ([]byte, *int64, *histogram.Histogram, *histogram.FloatHistogram)
|
||||
@ -76,10 +76,10 @@ type Parser interface {
|
||||
// retrieved (including the case where no exemplars exist at all).
|
||||
Exemplar(l *exemplar.Exemplar) bool
|
||||
|
||||
// CreatedTimestamp returns the created timestamp (in milliseconds) for the
|
||||
// StartTimestamp returns the created timestamp (in milliseconds) for the
|
||||
// current sample. It returns 0 if it is unknown e.g. if it wasn't set or
|
||||
// if the scrape protocol or metric type does not support created timestamps.
|
||||
CreatedTimestamp() int64
|
||||
StartTimestamp() int64
|
||||
|
||||
// Next advances the parser to the next sample.
|
||||
// It returns (EntryInvalid, io.EOF) if no samples were read.
|
||||
@ -146,9 +146,9 @@ type ParserOptions struct {
|
||||
// that is also present as a native histogram. (Proto parsing only).
|
||||
KeepClassicOnClassicAndNativeHistograms bool
|
||||
|
||||
// OpenMetricsSkipCTSeries determines whether to skip `_created` timestamp series
|
||||
// OpenMetricsSkipSTSeries determines whether to skip `_created` timestamp series
|
||||
// during (OpenMetrics parsing only).
|
||||
OpenMetricsSkipCTSeries bool
|
||||
OpenMetricsSkipSTSeries bool
|
||||
|
||||
// FallbackContentType specifies the fallback content type to use when the provided
|
||||
// Content-Type header cannot be parsed or is not supported.
|
||||
@ -175,7 +175,7 @@ func New(b []byte, contentType string, st *labels.SymbolTable, opts ParserOption
|
||||
switch mediaType {
|
||||
case "application/openmetrics-text":
|
||||
baseParser = NewOpenMetricsParser(b, st, func(o *openMetricsParserOptions) {
|
||||
o.skipCTSeries = opts.OpenMetricsSkipCTSeries
|
||||
o.skipSTSeries = opts.OpenMetricsSkipSTSeries
|
||||
o.enableTypeAndUnitLabels = opts.EnableTypeAndUnitLabels
|
||||
})
|
||||
case "application/vnd.google.protobuf":
|
||||
|
||||
@ -195,7 +195,7 @@ type parsedEntry struct {
|
||||
lset labels.Labels
|
||||
t *int64
|
||||
es []exemplar.Exemplar
|
||||
ct int64
|
||||
st int64
|
||||
|
||||
// In EntryType.
|
||||
typ model.MetricType
|
||||
@ -255,7 +255,7 @@ func testParse(t *testing.T, p Parser) (ret []parsedEntry) {
|
||||
}
|
||||
got.m = string(m)
|
||||
p.Labels(&got.lset)
|
||||
got.ct = p.CreatedTimestamp()
|
||||
got.st = p.StartTimestamp()
|
||||
|
||||
for e := (exemplar.Exemplar{}); p.Exemplar(&e); {
|
||||
got.es = append(got.es, e)
|
||||
|
||||
@ -83,7 +83,7 @@ type NHCBParser struct {
|
||||
fhNHCB *histogram.FloatHistogram
|
||||
lsetNHCB labels.Labels
|
||||
exemplars []exemplar.Exemplar
|
||||
ctNHCB int64
|
||||
stNHCB int64
|
||||
metricStringNHCB string
|
||||
|
||||
// Collates values from the classic histogram series to build
|
||||
@ -92,7 +92,7 @@ type NHCBParser struct {
|
||||
tempNHCB convertnhcb.TempHistogram
|
||||
tempExemplars []exemplar.Exemplar
|
||||
tempExemplarCount int
|
||||
tempCT int64
|
||||
tempST int64
|
||||
|
||||
// Remembers the last base histogram metric name (assuming it's
|
||||
// a classic histogram) so we can tell if the next float series
|
||||
@ -159,16 +159,16 @@ func (p *NHCBParser) Exemplar(ex *exemplar.Exemplar) bool {
|
||||
return p.parser.Exemplar(ex)
|
||||
}
|
||||
|
||||
func (p *NHCBParser) CreatedTimestamp() int64 {
|
||||
func (p *NHCBParser) StartTimestamp() int64 {
|
||||
switch p.state {
|
||||
case stateStart, stateInhibiting:
|
||||
if p.entry == EntrySeries || p.entry == EntryHistogram {
|
||||
return p.parser.CreatedTimestamp()
|
||||
return p.parser.StartTimestamp()
|
||||
}
|
||||
case stateCollecting:
|
||||
return p.tempCT
|
||||
return p.tempST
|
||||
case stateEmitting:
|
||||
return p.ctNHCB
|
||||
return p.stNHCB
|
||||
}
|
||||
return 0
|
||||
}
|
||||
@ -318,7 +318,7 @@ func (p *NHCBParser) handleClassicHistogramSeries(lset labels.Labels) bool {
|
||||
func (p *NHCBParser) processClassicHistogramSeries(lset labels.Labels, name string, updateHist func(*convertnhcb.TempHistogram)) {
|
||||
if p.state != stateCollecting {
|
||||
p.storeClassicLabels(name)
|
||||
p.tempCT = p.parser.CreatedTimestamp()
|
||||
p.tempST = p.parser.StartTimestamp()
|
||||
p.state = stateCollecting
|
||||
p.tempLsetNHCB = convertnhcb.GetHistogramMetricBase(lset, name)
|
||||
}
|
||||
@ -385,13 +385,13 @@ func (p *NHCBParser) processNHCB() bool {
|
||||
p.bytesNHCB = []byte(p.metricStringNHCB)
|
||||
p.lsetNHCB = p.tempLsetNHCB
|
||||
p.swapExemplars()
|
||||
p.ctNHCB = p.tempCT
|
||||
p.stNHCB = p.tempST
|
||||
p.state = stateEmitting
|
||||
} else {
|
||||
p.state = stateStart
|
||||
}
|
||||
p.tempNHCB.Reset()
|
||||
p.tempExemplarCount = 0
|
||||
p.tempCT = 0
|
||||
p.tempST = 0
|
||||
return err == nil
|
||||
}
|
||||
|
||||
@ -67,13 +67,13 @@ ss{A="a"} 0
|
||||
_metric_starting_with_underscore 1
|
||||
testmetric{_label_starting_with_underscore="foo"} 1
|
||||
testmetric{label="\"bar\""} 1
|
||||
# HELP foo Counter with and without labels to certify CT is parsed for both cases
|
||||
# HELP foo Counter with and without labels to certify ST is parsed for both cases
|
||||
# TYPE foo counter
|
||||
foo_total 17.0 1520879607.789 # {id="counter-test"} 5
|
||||
foo_created 1520872607.123
|
||||
foo_total{a="b"} 17.0 1520879607.789 # {id="counter-test"} 5
|
||||
foo_created{a="b"} 1520872607.123
|
||||
# HELP bar Summary with CT at the end, making sure we find CT even if it's multiple lines a far
|
||||
# HELP bar Summary with ST at the end, making sure we find ST even if it's multiple lines a far
|
||||
# TYPE bar summary
|
||||
bar_count 17.0
|
||||
bar_sum 324789.3
|
||||
@ -87,7 +87,7 @@ baz_bucket{le="+Inf"} 17
|
||||
baz_count 17
|
||||
baz_sum 324789.3
|
||||
baz_created 1520872609.125
|
||||
# HELP fizz_created Gauge which shouldn't be parsed as CT
|
||||
# HELP fizz_created Gauge which shouldn't be parsed as ST
|
||||
# TYPE fizz_created gauge
|
||||
fizz_created 17.0
|
||||
# HELP something Histogram with _created between buckets and summary
|
||||
@ -279,7 +279,7 @@ foobar{quantile="0.99"} 150.1`
|
||||
lset: labels.FromStrings("__name__", "testmetric", "label", `"bar"`),
|
||||
}, {
|
||||
m: "foo",
|
||||
help: "Counter with and without labels to certify CT is parsed for both cases",
|
||||
help: "Counter with and without labels to certify ST is parsed for both cases",
|
||||
}, {
|
||||
m: "foo",
|
||||
typ: model.MetricTypeCounter,
|
||||
@ -289,17 +289,17 @@ foobar{quantile="0.99"} 150.1`
|
||||
lset: labels.FromStrings("__name__", "foo_total"),
|
||||
t: int64p(1520879607789),
|
||||
es: []exemplar.Exemplar{{Labels: labels.FromStrings("id", "counter-test"), Value: 5}},
|
||||
ct: 1520872607123,
|
||||
st: 1520872607123,
|
||||
}, {
|
||||
m: `foo_total{a="b"}`,
|
||||
v: 17.0,
|
||||
lset: labels.FromStrings("__name__", "foo_total", "a", "b"),
|
||||
t: int64p(1520879607789),
|
||||
es: []exemplar.Exemplar{{Labels: labels.FromStrings("id", "counter-test"), Value: 5}},
|
||||
ct: 1520872607123,
|
||||
st: 1520872607123,
|
||||
}, {
|
||||
m: "bar",
|
||||
help: "Summary with CT at the end, making sure we find CT even if it's multiple lines a far",
|
||||
help: "Summary with ST at the end, making sure we find ST even if it's multiple lines a far",
|
||||
}, {
|
||||
m: "bar",
|
||||
typ: model.MetricTypeSummary,
|
||||
@ -307,22 +307,22 @@ foobar{quantile="0.99"} 150.1`
|
||||
m: "bar_count",
|
||||
v: 17.0,
|
||||
lset: labels.FromStrings("__name__", "bar_count"),
|
||||
ct: 1520872608124,
|
||||
st: 1520872608124,
|
||||
}, {
|
||||
m: "bar_sum",
|
||||
v: 324789.3,
|
||||
lset: labels.FromStrings("__name__", "bar_sum"),
|
||||
ct: 1520872608124,
|
||||
st: 1520872608124,
|
||||
}, {
|
||||
m: `bar{quantile="0.95"}`,
|
||||
v: 123.7,
|
||||
lset: labels.FromStrings("__name__", "bar", "quantile", "0.95"),
|
||||
ct: 1520872608124,
|
||||
st: 1520872608124,
|
||||
}, {
|
||||
m: `bar{quantile="0.99"}`,
|
||||
v: 150.0,
|
||||
lset: labels.FromStrings("__name__", "bar", "quantile", "0.99"),
|
||||
ct: 1520872608124,
|
||||
st: 1520872608124,
|
||||
}, {
|
||||
m: "baz",
|
||||
help: "Histogram with the same objective as above's summary",
|
||||
@ -340,10 +340,10 @@ foobar{quantile="0.99"} 150.1`
|
||||
CustomValues: []float64{0.0}, // We do not store the +Inf boundary.
|
||||
},
|
||||
lset: labels.FromStrings("__name__", "baz"),
|
||||
ct: 1520872609125,
|
||||
st: 1520872609125,
|
||||
}, {
|
||||
m: "fizz_created",
|
||||
help: "Gauge which shouldn't be parsed as CT",
|
||||
help: "Gauge which shouldn't be parsed as ST",
|
||||
}, {
|
||||
m: "fizz_created",
|
||||
typ: model.MetricTypeGauge,
|
||||
@ -368,7 +368,7 @@ foobar{quantile="0.99"} 150.1`
|
||||
CustomValues: []float64{0.0}, // We do not store the +Inf boundary.
|
||||
},
|
||||
lset: labels.FromStrings("__name__", "something"),
|
||||
ct: 1520430001000,
|
||||
st: 1520430001000,
|
||||
}, {
|
||||
m: `something{a="b"}`,
|
||||
shs: &histogram.Histogram{
|
||||
@ -380,7 +380,7 @@ foobar{quantile="0.99"} 150.1`
|
||||
CustomValues: []float64{0.0}, // We do not store the +Inf boundary.
|
||||
},
|
||||
lset: labels.FromStrings("__name__", "something", "a", "b"),
|
||||
ct: 1520430002000,
|
||||
st: 1520430002000,
|
||||
}, {
|
||||
m: "yum",
|
||||
help: "Summary with _created between sum and quantiles",
|
||||
@ -391,22 +391,22 @@ foobar{quantile="0.99"} 150.1`
|
||||
m: `yum_count`,
|
||||
v: 20,
|
||||
lset: labels.FromStrings("__name__", "yum_count"),
|
||||
ct: 1520430003000,
|
||||
st: 1520430003000,
|
||||
}, {
|
||||
m: `yum_sum`,
|
||||
v: 324789.5,
|
||||
lset: labels.FromStrings("__name__", "yum_sum"),
|
||||
ct: 1520430003000,
|
||||
st: 1520430003000,
|
||||
}, {
|
||||
m: `yum{quantile="0.95"}`,
|
||||
v: 123.7,
|
||||
lset: labels.FromStrings("__name__", "yum", "quantile", "0.95"),
|
||||
ct: 1520430003000,
|
||||
st: 1520430003000,
|
||||
}, {
|
||||
m: `yum{quantile="0.99"}`,
|
||||
v: 150.0,
|
||||
lset: labels.FromStrings("__name__", "yum", "quantile", "0.99"),
|
||||
ct: 1520430003000,
|
||||
st: 1520430003000,
|
||||
}, {
|
||||
m: "foobar",
|
||||
help: "Summary with _created as the first line",
|
||||
@ -417,22 +417,22 @@ foobar{quantile="0.99"} 150.1`
|
||||
m: `foobar_count`,
|
||||
v: 21,
|
||||
lset: labels.FromStrings("__name__", "foobar_count"),
|
||||
ct: 1520430004000,
|
||||
st: 1520430004000,
|
||||
}, {
|
||||
m: `foobar_sum`,
|
||||
v: 324789.6,
|
||||
lset: labels.FromStrings("__name__", "foobar_sum"),
|
||||
ct: 1520430004000,
|
||||
st: 1520430004000,
|
||||
}, {
|
||||
m: `foobar{quantile="0.95"}`,
|
||||
v: 123.8,
|
||||
lset: labels.FromStrings("__name__", "foobar", "quantile", "0.95"),
|
||||
ct: 1520430004000,
|
||||
st: 1520430004000,
|
||||
}, {
|
||||
m: `foobar{quantile="0.99"}`,
|
||||
v: 150.1,
|
||||
lset: labels.FromStrings("__name__", "foobar", "quantile", "0.99"),
|
||||
ct: 1520430004000,
|
||||
st: 1520430004000,
|
||||
}, {
|
||||
m: "metric",
|
||||
help: "foo\x00bar",
|
||||
@ -587,8 +587,8 @@ func TestNHCBParser_NoNHCBWhenExponential(t *testing.T) {
|
||||
}
|
||||
|
||||
type parserOptions struct {
|
||||
useUTF8sep bool
|
||||
hasCreatedTimeStamp bool
|
||||
useUTF8sep bool
|
||||
hasStartTimestamp bool
|
||||
}
|
||||
// Defines the parser name, the Parser factory and the test cases
|
||||
// supported by the parser and parser options.
|
||||
@ -598,14 +598,14 @@ func TestNHCBParser_NoNHCBWhenExponential(t *testing.T) {
|
||||
inputBuf := createTestProtoBufHistogram(t)
|
||||
return New(inputBuf.Bytes(), "application/vnd.google.protobuf", labels.NewSymbolTable(), ParserOptions{KeepClassicOnClassicAndNativeHistograms: keepClassic, ConvertClassicHistogramsToNHCB: nhcb})
|
||||
}
|
||||
return "ProtoBuf", factory, []int{1, 2, 3}, parserOptions{useUTF8sep: true, hasCreatedTimeStamp: true}
|
||||
return "ProtoBuf", factory, []int{1, 2, 3}, parserOptions{useUTF8sep: true, hasStartTimestamp: true}
|
||||
},
|
||||
func() (string, parserFactory, []int, parserOptions) {
|
||||
factory := func(keepClassic, nhcb bool) (Parser, error) {
|
||||
input := createTestOpenMetricsHistogram()
|
||||
return New([]byte(input), "application/openmetrics-text", labels.NewSymbolTable(), ParserOptions{KeepClassicOnClassicAndNativeHistograms: keepClassic, ConvertClassicHistogramsToNHCB: nhcb})
|
||||
}
|
||||
return "OpenMetrics", factory, []int{1}, parserOptions{hasCreatedTimeStamp: true}
|
||||
return "OpenMetrics", factory, []int{1}, parserOptions{hasStartTimestamp: true}
|
||||
},
|
||||
func() (string, parserFactory, []int, parserOptions) {
|
||||
factory := func(keepClassic, nhcb bool) (Parser, error) {
|
||||
@ -643,9 +643,9 @@ func TestNHCBParser_NoNHCBWhenExponential(t *testing.T) {
|
||||
typ: model.MetricTypeHistogram,
|
||||
})
|
||||
|
||||
var ct int64
|
||||
if options.hasCreatedTimeStamp {
|
||||
ct = 1000
|
||||
var st int64
|
||||
if options.hasStartTimestamp {
|
||||
st = 1000
|
||||
}
|
||||
|
||||
var bucketForMetric func(string) string
|
||||
@ -677,7 +677,7 @@ func TestNHCBParser_NoNHCBWhenExponential(t *testing.T) {
|
||||
},
|
||||
lset: labels.FromStrings("__name__", metric),
|
||||
t: int64p(1234568),
|
||||
ct: ct,
|
||||
st: st,
|
||||
},
|
||||
}
|
||||
tc.exp = append(tc.exp, exponentialSeries...)
|
||||
@ -690,42 +690,42 @@ func TestNHCBParser_NoNHCBWhenExponential(t *testing.T) {
|
||||
v: 175,
|
||||
lset: labels.FromStrings("__name__", metric+"_count"),
|
||||
t: int64p(1234568),
|
||||
ct: ct,
|
||||
st: st,
|
||||
},
|
||||
{
|
||||
m: metric + "_sum",
|
||||
v: 0.0008280461746287094,
|
||||
lset: labels.FromStrings("__name__", metric+"_sum"),
|
||||
t: int64p(1234568),
|
||||
ct: ct,
|
||||
st: st,
|
||||
},
|
||||
{
|
||||
m: metric + bucketForMetric("-0.0004899999999999998"),
|
||||
v: 2,
|
||||
lset: labels.FromStrings("__name__", metric+"_bucket", "le", "-0.0004899999999999998"),
|
||||
t: int64p(1234568),
|
||||
ct: ct,
|
||||
st: st,
|
||||
},
|
||||
{
|
||||
m: metric + bucketForMetric("-0.0003899999999999998"),
|
||||
v: 4,
|
||||
lset: labels.FromStrings("__name__", metric+"_bucket", "le", "-0.0003899999999999998"),
|
||||
t: int64p(1234568),
|
||||
ct: ct,
|
||||
st: st,
|
||||
},
|
||||
{
|
||||
m: metric + bucketForMetric("-0.0002899999999999998"),
|
||||
v: 16,
|
||||
lset: labels.FromStrings("__name__", metric+"_bucket", "le", "-0.0002899999999999998"),
|
||||
t: int64p(1234568),
|
||||
ct: ct,
|
||||
st: st,
|
||||
},
|
||||
{
|
||||
m: metric + bucketForMetric("+Inf"),
|
||||
v: 175,
|
||||
lset: labels.FromStrings("__name__", metric+"_bucket", "le", "+Inf"),
|
||||
t: int64p(1234568),
|
||||
ct: ct,
|
||||
st: st,
|
||||
},
|
||||
}
|
||||
tc.exp = append(tc.exp, classicSeries...)
|
||||
@ -745,7 +745,7 @@ func TestNHCBParser_NoNHCBWhenExponential(t *testing.T) {
|
||||
},
|
||||
lset: labels.FromStrings("__name__", metric),
|
||||
t: int64p(1234568),
|
||||
ct: ct,
|
||||
st: st,
|
||||
},
|
||||
}
|
||||
tc.exp = append(tc.exp, nhcbSeries...)
|
||||
@ -952,7 +952,7 @@ something_bucket{a="b",le="+Inf"} 9
|
||||
CustomValues: []float64{0.0}, // We do not store the +Inf boundary.
|
||||
},
|
||||
lset: labels.FromStrings("__name__", "something", "a", "b"),
|
||||
ct: 1520430002000,
|
||||
st: 1520430002000,
|
||||
},
|
||||
}
|
||||
|
||||
@ -1061,7 +1061,7 @@ metric: <
|
||||
},
|
||||
lset: labels.FromStrings("__name__", "test_histogram1"),
|
||||
t: int64p(1234568),
|
||||
ct: 1000,
|
||||
st: 1000,
|
||||
},
|
||||
{
|
||||
m: "test_histogram2",
|
||||
@ -1083,7 +1083,7 @@ metric: <
|
||||
},
|
||||
lset: labels.FromStrings("__name__", "test_histogram2"),
|
||||
t: int64p(1234568),
|
||||
ct: 1000,
|
||||
st: 1000,
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
@ -103,34 +103,34 @@ type OpenMetricsParser struct {
|
||||
hasExemplarTs bool
|
||||
|
||||
// Created timestamp parsing state.
|
||||
ct int64
|
||||
ctHashSet uint64
|
||||
st int64
|
||||
stHashSet uint64
|
||||
// ignoreExemplar instructs the parser to not overwrite exemplars (to keep them while peeking ahead).
|
||||
ignoreExemplar bool
|
||||
// visitedMFName is the metric family name of the last visited metric when peeking ahead
|
||||
// for _created series during the execution of the CreatedTimestamp method.
|
||||
// for _created series during the execution of the StartTimestamp method.
|
||||
visitedMFName []byte
|
||||
skipCTSeries bool
|
||||
skipSTSeries bool
|
||||
enableTypeAndUnitLabels bool
|
||||
}
|
||||
|
||||
type openMetricsParserOptions struct {
|
||||
skipCTSeries bool
|
||||
skipSTSeries bool
|
||||
enableTypeAndUnitLabels bool
|
||||
}
|
||||
|
||||
type OpenMetricsOption func(*openMetricsParserOptions)
|
||||
|
||||
// WithOMParserCTSeriesSkipped turns off exposing _created lines
|
||||
// WithOMParserSTSeriesSkipped turns off exposing _created lines
|
||||
// as series, which makes those only used for parsing created timestamp
|
||||
// for `CreatedTimestamp` method purposes.
|
||||
// for `StartTimestamp` method purposes.
|
||||
//
|
||||
// It's recommended to use this option to avoid using _created lines for other
|
||||
// purposes than created timestamp, but leave false by default for the
|
||||
// best-effort compatibility.
|
||||
func WithOMParserCTSeriesSkipped() OpenMetricsOption {
|
||||
func WithOMParserSTSeriesSkipped() OpenMetricsOption {
|
||||
return func(o *openMetricsParserOptions) {
|
||||
o.skipCTSeries = true
|
||||
o.skipSTSeries = true
|
||||
}
|
||||
}
|
||||
|
||||
@ -142,7 +142,7 @@ func WithOMParserTypeAndUnitLabels() OpenMetricsOption {
|
||||
}
|
||||
}
|
||||
|
||||
// NewOpenMetricsParser returns a new parser for the byte slice with option to skip CT series parsing.
|
||||
// NewOpenMetricsParser returns a new parser for the byte slice with option to skip ST series parsing.
|
||||
func NewOpenMetricsParser(b []byte, st *labels.SymbolTable, opts ...OpenMetricsOption) Parser {
|
||||
options := &openMetricsParserOptions{}
|
||||
|
||||
@ -153,7 +153,7 @@ func NewOpenMetricsParser(b []byte, st *labels.SymbolTable, opts ...OpenMetricsO
|
||||
parser := &OpenMetricsParser{
|
||||
l: &openMetricsLexer{b: b},
|
||||
builder: labels.NewScratchBuilderWithSymbolTable(st, 16),
|
||||
skipCTSeries: options.skipCTSeries,
|
||||
skipSTSeries: options.skipSTSeries,
|
||||
enableTypeAndUnitLabels: options.enableTypeAndUnitLabels,
|
||||
}
|
||||
|
||||
@ -285,12 +285,12 @@ func (p *OpenMetricsParser) Exemplar(e *exemplar.Exemplar) bool {
|
||||
return true
|
||||
}
|
||||
|
||||
// CreatedTimestamp returns the created timestamp for a current Metric if exists or nil.
|
||||
// StartTimestamp returns the created timestamp for a current Metric if exists or nil.
|
||||
// NOTE(Maniktherana): Might use additional CPU/mem resources due to deep copy of parser required for peeking given 1.0 OM specification on _created series.
|
||||
func (p *OpenMetricsParser) CreatedTimestamp() int64 {
|
||||
if !typeRequiresCT(p.mtype) {
|
||||
// Not a CT supported metric type, fast path.
|
||||
p.ctHashSet = 0 // Use ctHashSet as a single way of telling "empty cache"
|
||||
func (p *OpenMetricsParser) StartTimestamp() int64 {
|
||||
if !typeRequiresST(p.mtype) {
|
||||
// Not a ST supported metric type, fast path.
|
||||
p.stHashSet = 0 // Use stHashSet as a single way of telling "empty cache"
|
||||
return 0
|
||||
}
|
||||
|
||||
@ -307,8 +307,8 @@ func (p *OpenMetricsParser) CreatedTimestamp() int64 {
|
||||
|
||||
currHash := p.seriesHash(&buf, currName)
|
||||
// Check cache, perhaps we fetched something already.
|
||||
if currHash == p.ctHashSet && p.ct > 0 {
|
||||
return p.ct
|
||||
if currHash == p.stHashSet && p.st > 0 {
|
||||
return p.st
|
||||
}
|
||||
|
||||
// Create a new lexer and other core state details to reset the parser once this function is done executing.
|
||||
@ -322,7 +322,7 @@ func (p *OpenMetricsParser) CreatedTimestamp() int64 {
|
||||
resetStart := p.start
|
||||
resetMType := p.mtype
|
||||
|
||||
p.skipCTSeries = false
|
||||
p.skipSTSeries = false
|
||||
p.ignoreExemplar = true
|
||||
defer func() {
|
||||
p.l = resetLexer
|
||||
@ -334,38 +334,38 @@ func (p *OpenMetricsParser) CreatedTimestamp() int64 {
|
||||
for {
|
||||
eType, err := p.Next()
|
||||
if err != nil {
|
||||
// This means p.Next() will give error too later on, so def no CT line found.
|
||||
// This might result in partial scrape with wrong/missing CT, but only
|
||||
// This means p.Next() will give error too later on, so def no ST line found.
|
||||
// This might result in partial scrape with wrong/missing ST, but only
|
||||
// spec improvement would help.
|
||||
// TODO: Make sure OM 1.1/2.0 pass CT via metadata or exemplar-like to avoid this.
|
||||
p.resetCTParseValues()
|
||||
// TODO: Make sure OM 1.1/2.0 pass ST via metadata or exemplar-like to avoid this.
|
||||
p.resetSTParseValues()
|
||||
return 0
|
||||
}
|
||||
if eType != EntrySeries {
|
||||
// Assume we hit different family, no CT line found.
|
||||
p.resetCTParseValues()
|
||||
// Assume we hit different family, no ST line found.
|
||||
p.resetSTParseValues()
|
||||
return 0
|
||||
}
|
||||
|
||||
peekedName := p.series[p.offsets[0]-p.start : p.offsets[1]-p.start]
|
||||
if len(peekedName) < 8 || string(peekedName[len(peekedName)-8:]) != "_created" {
|
||||
// Not a CT line, search more.
|
||||
// Not a ST line, search more.
|
||||
continue
|
||||
}
|
||||
|
||||
// Remove _created suffix.
|
||||
peekedHash := p.seriesHash(&buf, peekedName[:len(peekedName)-8])
|
||||
if peekedHash != currHash {
|
||||
// Found CT line for a different series, for our series no CT.
|
||||
p.resetCTParseValues()
|
||||
// Found ST line for a different series, for our series no ST.
|
||||
p.resetSTParseValues()
|
||||
return 0
|
||||
}
|
||||
|
||||
// All timestamps in OpenMetrics are Unix Epoch in seconds. Convert to milliseconds.
|
||||
// https://github.com/prometheus/OpenMetrics/blob/v1.0.0/specification/OpenMetrics.md#timestamps
|
||||
ct := int64(p.val * 1000.0)
|
||||
p.setCTParseValues(ct, currHash, currName, true)
|
||||
return ct
|
||||
st := int64(p.val * 1000.0)
|
||||
p.setSTParseValues(st, currHash, currName, true)
|
||||
return st
|
||||
}
|
||||
}
|
||||
|
||||
@ -404,23 +404,23 @@ func (p *OpenMetricsParser) seriesHash(offsetsArr *[]byte, metricFamilyName []by
|
||||
return hashedOffsets
|
||||
}
|
||||
|
||||
// setCTParseValues sets the parser to the state after CreatedTimestamp method was called and CT was found.
|
||||
// This is useful to prevent re-parsing the same series again and early return the CT value.
|
||||
func (p *OpenMetricsParser) setCTParseValues(ct int64, ctHashSet uint64, mfName []byte, skipCTSeries bool) {
|
||||
p.ct = ct
|
||||
p.ctHashSet = ctHashSet
|
||||
// setSTParseValues sets the parser to the state after StartTimestamp method was called and ST was found.
|
||||
// This is useful to prevent re-parsing the same series again and early return the ST value.
|
||||
func (p *OpenMetricsParser) setSTParseValues(st int64, stHashSet uint64, mfName []byte, skipSTSeries bool) {
|
||||
p.st = st
|
||||
p.stHashSet = stHashSet
|
||||
p.visitedMFName = mfName
|
||||
p.skipCTSeries = skipCTSeries // Do we need to set it?
|
||||
p.skipSTSeries = skipSTSeries // Do we need to set it?
|
||||
}
|
||||
|
||||
// resetCTParseValues resets the parser to the state before CreatedTimestamp method was called.
|
||||
func (p *OpenMetricsParser) resetCTParseValues() {
|
||||
p.ctHashSet = 0
|
||||
p.skipCTSeries = true
|
||||
// resetSTParseValues resets the parser to the state before StartTimestamp method was called.
|
||||
func (p *OpenMetricsParser) resetSTParseValues() {
|
||||
p.stHashSet = 0
|
||||
p.skipSTSeries = true
|
||||
}
|
||||
|
||||
// typeRequiresCT returns true if the metric type requires a _created timestamp.
|
||||
func typeRequiresCT(t model.MetricType) bool {
|
||||
// typeRequiresST returns true if the metric type requires a _created timestamp.
|
||||
func typeRequiresST(t model.MetricType) bool {
|
||||
switch t {
|
||||
case model.MetricTypeCounter, model.MetricTypeSummary, model.MetricTypeHistogram:
|
||||
return true
|
||||
@ -544,7 +544,7 @@ func (p *OpenMetricsParser) Next() (Entry, error) {
|
||||
if err := p.parseSeriesEndOfLine(p.nextToken()); err != nil {
|
||||
return EntryInvalid, err
|
||||
}
|
||||
if p.skipCTSeries && p.isCreatedSeries() {
|
||||
if p.skipSTSeries && p.isCreatedSeries() {
|
||||
return p.Next()
|
||||
}
|
||||
return EntrySeries, nil
|
||||
@ -565,7 +565,7 @@ func (p *OpenMetricsParser) Next() (Entry, error) {
|
||||
if err := p.parseSeriesEndOfLine(t2); err != nil {
|
||||
return EntryInvalid, err
|
||||
}
|
||||
if p.skipCTSeries && p.isCreatedSeries() {
|
||||
if p.skipSTSeries && p.isCreatedSeries() {
|
||||
return p.Next()
|
||||
}
|
||||
return EntrySeries, nil
|
||||
@ -697,7 +697,7 @@ func (p *OpenMetricsParser) parseLVals(offsets []int, isExemplar bool) ([]int, e
|
||||
func (p *OpenMetricsParser) isCreatedSeries() bool {
|
||||
metricName := p.series[p.offsets[0]-p.start : p.offsets[1]-p.start]
|
||||
// check length so the metric is longer than len("_created")
|
||||
if typeRequiresCT(p.mtype) && len(metricName) >= 8 && string(metricName[len(metricName)-8:]) == "_created" {
|
||||
if typeRequiresST(p.mtype) && len(metricName) >= 8 && string(metricName[len(metricName)-8:]) == "_created" {
|
||||
return true
|
||||
}
|
||||
return false
|
||||
|
||||
@ -66,7 +66,7 @@ ss{A="a"} 0
|
||||
_metric_starting_with_underscore 1
|
||||
testmetric{_label_starting_with_underscore="foo"} 1
|
||||
testmetric{label="\"bar\""} 1
|
||||
# HELP foo Counter with and without labels to certify CT is parsed for both cases
|
||||
# HELP foo Counter with and without labels to certify ST is parsed for both cases
|
||||
# TYPE foo counter
|
||||
foo_total 17.0 1520879607.789 # {id="counter-test"} 5
|
||||
foo_created 1520872607.123
|
||||
@ -75,7 +75,7 @@ foo_created{a="b"} 1520872607.123
|
||||
foo_total{le="c"} 21.0
|
||||
foo_created{le="c"} 1520872621.123
|
||||
foo_total{le="1"} 10.0
|
||||
# HELP bar Summary with CT at the end, making sure we find CT even if it's multiple lines a far
|
||||
# HELP bar Summary with ST at the end, making sure we find ST even if it's multiple lines a far
|
||||
# TYPE bar summary
|
||||
bar_count 17.0
|
||||
bar_sum 324789.3
|
||||
@ -89,7 +89,7 @@ baz_bucket{le="+Inf"} 17
|
||||
baz_count 17
|
||||
baz_sum 324789.3
|
||||
baz_created 1520872609.125
|
||||
# HELP fizz_created Gauge which shouldn't be parsed as CT
|
||||
# HELP fizz_created Gauge which shouldn't be parsed as ST
|
||||
# TYPE fizz_created gauge
|
||||
fizz_created 17.0
|
||||
# HELP something Histogram with _created between buckets and summary
|
||||
@ -351,7 +351,7 @@ foobar{quantile="0.99"} 150.1`
|
||||
lset: labels.FromStrings("__name__", "testmetric", "label", `"bar"`),
|
||||
}, {
|
||||
m: "foo",
|
||||
help: "Counter with and without labels to certify CT is parsed for both cases",
|
||||
help: "Counter with and without labels to certify ST is parsed for both cases",
|
||||
}, {
|
||||
m: "foo",
|
||||
typ: model.MetricTypeCounter,
|
||||
@ -367,7 +367,7 @@ foobar{quantile="0.99"} 150.1`
|
||||
es: []exemplar.Exemplar{
|
||||
{Labels: labels.FromStrings("id", "counter-test"), Value: 5},
|
||||
},
|
||||
ct: 1520872607123,
|
||||
st: 1520872607123,
|
||||
}, {
|
||||
m: `foo_total{a="b"}`,
|
||||
v: 17.0,
|
||||
@ -380,7 +380,7 @@ foobar{quantile="0.99"} 150.1`
|
||||
es: []exemplar.Exemplar{
|
||||
{Labels: labels.FromStrings("id", "counter-test"), Value: 5},
|
||||
},
|
||||
ct: 1520872607123,
|
||||
st: 1520872607123,
|
||||
}, {
|
||||
m: `foo_total{le="c"}`,
|
||||
v: 21.0,
|
||||
@ -389,7 +389,7 @@ foobar{quantile="0.99"} 150.1`
|
||||
labels.FromStrings("__name__", "foo_total", "__type__", string(model.MetricTypeCounter), "le", "c"),
|
||||
labels.FromStrings("__name__", "foo_total", "le", "c"),
|
||||
),
|
||||
ct: 1520872621123,
|
||||
st: 1520872621123,
|
||||
}, {
|
||||
m: `foo_total{le="1"}`,
|
||||
v: 10.0,
|
||||
@ -400,7 +400,7 @@ foobar{quantile="0.99"} 150.1`
|
||||
),
|
||||
}, {
|
||||
m: "bar",
|
||||
help: "Summary with CT at the end, making sure we find CT even if it's multiple lines a far",
|
||||
help: "Summary with ST at the end, making sure we find ST even if it's multiple lines a far",
|
||||
}, {
|
||||
m: "bar",
|
||||
typ: model.MetricTypeSummary,
|
||||
@ -412,7 +412,7 @@ foobar{quantile="0.99"} 150.1`
|
||||
labels.FromStrings("__name__", "bar_count", "__type__", string(model.MetricTypeSummary)),
|
||||
labels.FromStrings("__name__", "bar_count"),
|
||||
),
|
||||
ct: 1520872608124,
|
||||
st: 1520872608124,
|
||||
}, {
|
||||
m: "bar_sum",
|
||||
v: 324789.3,
|
||||
@ -421,7 +421,7 @@ foobar{quantile="0.99"} 150.1`
|
||||
labels.FromStrings("__name__", "bar_sum", "__type__", string(model.MetricTypeSummary)),
|
||||
labels.FromStrings("__name__", "bar_sum"),
|
||||
),
|
||||
ct: 1520872608124,
|
||||
st: 1520872608124,
|
||||
}, {
|
||||
m: `bar{quantile="0.95"}`,
|
||||
v: 123.7,
|
||||
@ -430,7 +430,7 @@ foobar{quantile="0.99"} 150.1`
|
||||
labels.FromStrings("__name__", "bar", "__type__", string(model.MetricTypeSummary), "quantile", "0.95"),
|
||||
labels.FromStrings("__name__", "bar", "quantile", "0.95"),
|
||||
),
|
||||
ct: 1520872608124,
|
||||
st: 1520872608124,
|
||||
}, {
|
||||
m: `bar{quantile="0.99"}`,
|
||||
v: 150.0,
|
||||
@ -439,7 +439,7 @@ foobar{quantile="0.99"} 150.1`
|
||||
labels.FromStrings("__name__", "bar", "__type__", string(model.MetricTypeSummary), "quantile", "0.99"),
|
||||
labels.FromStrings("__name__", "bar", "quantile", "0.99"),
|
||||
),
|
||||
ct: 1520872608124,
|
||||
st: 1520872608124,
|
||||
}, {
|
||||
m: "baz",
|
||||
help: "Histogram with the same objective as above's summary",
|
||||
@ -454,7 +454,7 @@ foobar{quantile="0.99"} 150.1`
|
||||
labels.FromStrings("__name__", "baz_bucket", "__type__", string(model.MetricTypeHistogram), "le", "0.0"),
|
||||
labels.FromStrings("__name__", "baz_bucket", "le", "0.0"),
|
||||
),
|
||||
ct: 1520872609125,
|
||||
st: 1520872609125,
|
||||
}, {
|
||||
m: `baz_bucket{le="+Inf"}`,
|
||||
v: 17,
|
||||
@ -463,7 +463,7 @@ foobar{quantile="0.99"} 150.1`
|
||||
labels.FromStrings("__name__", "baz_bucket", "__type__", string(model.MetricTypeHistogram), "le", "+Inf"),
|
||||
labels.FromStrings("__name__", "baz_bucket", "le", "+Inf"),
|
||||
),
|
||||
ct: 1520872609125,
|
||||
st: 1520872609125,
|
||||
}, {
|
||||
m: `baz_count`,
|
||||
v: 17,
|
||||
@ -472,7 +472,7 @@ foobar{quantile="0.99"} 150.1`
|
||||
labels.FromStrings("__name__", "baz_count", "__type__", string(model.MetricTypeHistogram)),
|
||||
labels.FromStrings("__name__", "baz_count"),
|
||||
),
|
||||
ct: 1520872609125,
|
||||
st: 1520872609125,
|
||||
}, {
|
||||
m: `baz_sum`,
|
||||
v: 324789.3,
|
||||
@ -481,10 +481,10 @@ foobar{quantile="0.99"} 150.1`
|
||||
labels.FromStrings("__name__", "baz_sum", "__type__", string(model.MetricTypeHistogram)),
|
||||
labels.FromStrings("__name__", "baz_sum"),
|
||||
),
|
||||
ct: 1520872609125,
|
||||
st: 1520872609125,
|
||||
}, {
|
||||
m: "fizz_created",
|
||||
help: "Gauge which shouldn't be parsed as CT",
|
||||
help: "Gauge which shouldn't be parsed as ST",
|
||||
}, {
|
||||
m: "fizz_created",
|
||||
typ: model.MetricTypeGauge,
|
||||
@ -510,7 +510,7 @@ foobar{quantile="0.99"} 150.1`
|
||||
labels.FromStrings("__name__", "something_count", "__type__", string(model.MetricTypeHistogram)),
|
||||
labels.FromStrings("__name__", "something_count"),
|
||||
),
|
||||
ct: 1520430001000,
|
||||
st: 1520430001000,
|
||||
}, {
|
||||
m: `something_sum`,
|
||||
v: 324789.4,
|
||||
@ -519,7 +519,7 @@ foobar{quantile="0.99"} 150.1`
|
||||
labels.FromStrings("__name__", "something_sum", "__type__", string(model.MetricTypeHistogram)),
|
||||
labels.FromStrings("__name__", "something_sum"),
|
||||
),
|
||||
ct: 1520430001000,
|
||||
st: 1520430001000,
|
||||
}, {
|
||||
m: `something_bucket{le="0.0"}`,
|
||||
v: 1,
|
||||
@ -528,7 +528,7 @@ foobar{quantile="0.99"} 150.1`
|
||||
labels.FromStrings("__name__", "something_bucket", "__type__", string(model.MetricTypeHistogram), "le", "0.0"),
|
||||
labels.FromStrings("__name__", "something_bucket", "le", "0.0"),
|
||||
),
|
||||
ct: 1520430001000,
|
||||
st: 1520430001000,
|
||||
}, {
|
||||
m: `something_bucket{le="1"}`,
|
||||
v: 2,
|
||||
@ -537,7 +537,7 @@ foobar{quantile="0.99"} 150.1`
|
||||
labels.FromStrings("__name__", "something_bucket", "__type__", string(model.MetricTypeHistogram), "le", "1.0"),
|
||||
labels.FromStrings("__name__", "something_bucket", "le", "1.0"),
|
||||
),
|
||||
ct: 1520430001000,
|
||||
st: 1520430001000,
|
||||
}, {
|
||||
m: `something_bucket{le="+Inf"}`,
|
||||
v: 18,
|
||||
@ -546,7 +546,7 @@ foobar{quantile="0.99"} 150.1`
|
||||
labels.FromStrings("__name__", "something_bucket", "__type__", string(model.MetricTypeHistogram), "le", "+Inf"),
|
||||
labels.FromStrings("__name__", "something_bucket", "le", "+Inf"),
|
||||
),
|
||||
ct: 1520430001000,
|
||||
st: 1520430001000,
|
||||
}, {
|
||||
m: "yum",
|
||||
help: "Summary with _created between sum and quantiles",
|
||||
@ -561,7 +561,7 @@ foobar{quantile="0.99"} 150.1`
|
||||
labels.FromStrings("__name__", "yum_count", "__type__", string(model.MetricTypeSummary)),
|
||||
labels.FromStrings("__name__", "yum_count"),
|
||||
),
|
||||
ct: 1520430003000,
|
||||
st: 1520430003000,
|
||||
}, {
|
||||
m: `yum_sum`,
|
||||
v: 324789.5,
|
||||
@ -570,7 +570,7 @@ foobar{quantile="0.99"} 150.1`
|
||||
labels.FromStrings("__name__", "yum_sum", "__type__", string(model.MetricTypeSummary)),
|
||||
labels.FromStrings("__name__", "yum_sum"),
|
||||
),
|
||||
ct: 1520430003000,
|
||||
st: 1520430003000,
|
||||
}, {
|
||||
m: `yum{quantile="0.95"}`,
|
||||
v: 123.7,
|
||||
@ -579,7 +579,7 @@ foobar{quantile="0.99"} 150.1`
|
||||
labels.FromStrings("__name__", "yum", "__type__", string(model.MetricTypeSummary), "quantile", "0.95"),
|
||||
labels.FromStrings("__name__", "yum", "quantile", "0.95"),
|
||||
),
|
||||
ct: 1520430003000,
|
||||
st: 1520430003000,
|
||||
}, {
|
||||
m: `yum{quantile="0.99"}`,
|
||||
v: 150.0,
|
||||
@ -588,7 +588,7 @@ foobar{quantile="0.99"} 150.1`
|
||||
labels.FromStrings("__name__", "yum", "__type__", string(model.MetricTypeSummary), "quantile", "0.99"),
|
||||
labels.FromStrings("__name__", "yum", "quantile", "0.99"),
|
||||
),
|
||||
ct: 1520430003000,
|
||||
st: 1520430003000,
|
||||
}, {
|
||||
m: "foobar",
|
||||
help: "Summary with _created as the first line",
|
||||
@ -603,7 +603,7 @@ foobar{quantile="0.99"} 150.1`
|
||||
labels.FromStrings("__name__", "foobar_count", "__type__", string(model.MetricTypeSummary)),
|
||||
labels.FromStrings("__name__", "foobar_count"),
|
||||
),
|
||||
ct: 1520430004000,
|
||||
st: 1520430004000,
|
||||
}, {
|
||||
m: `foobar_sum`,
|
||||
v: 324789.6,
|
||||
@ -612,7 +612,7 @@ foobar{quantile="0.99"} 150.1`
|
||||
labels.FromStrings("__name__", "foobar_sum", "__type__", string(model.MetricTypeSummary)),
|
||||
labels.FromStrings("__name__", "foobar_sum"),
|
||||
),
|
||||
ct: 1520430004000,
|
||||
st: 1520430004000,
|
||||
}, {
|
||||
m: `foobar{quantile="0.95"}`,
|
||||
v: 123.8,
|
||||
@ -621,7 +621,7 @@ foobar{quantile="0.99"} 150.1`
|
||||
labels.FromStrings("__name__", "foobar", "__type__", string(model.MetricTypeSummary), "quantile", "0.95"),
|
||||
labels.FromStrings("__name__", "foobar", "quantile", "0.95"),
|
||||
),
|
||||
ct: 1520430004000,
|
||||
st: 1520430004000,
|
||||
}, {
|
||||
m: `foobar{quantile="0.99"}`,
|
||||
v: 150.1,
|
||||
@ -630,7 +630,7 @@ foobar{quantile="0.99"} 150.1`
|
||||
labels.FromStrings("__name__", "foobar", "__type__", string(model.MetricTypeSummary), "quantile", "0.99"),
|
||||
labels.FromStrings("__name__", "foobar", "quantile", "0.99"),
|
||||
),
|
||||
ct: 1520430004000,
|
||||
st: 1520430004000,
|
||||
}, {
|
||||
m: "metric",
|
||||
help: "foo\x00bar",
|
||||
@ -640,7 +640,7 @@ foobar{quantile="0.99"} 150.1`
|
||||
lset: todoDetectFamilySwitch(typeAndUnitEnabled, labels.FromStrings("__name__", "null_byte_metric", "a", "abc\x00"), model.MetricTypeSummary),
|
||||
},
|
||||
}
|
||||
opts := []OpenMetricsOption{WithOMParserCTSeriesSkipped()}
|
||||
opts := []OpenMetricsOption{WithOMParserSTSeriesSkipped()}
|
||||
if typeAndUnitEnabled {
|
||||
opts = append(opts, WithOMParserTypeAndUnitLabels())
|
||||
}
|
||||
@ -684,12 +684,12 @@ quotedexemplar2_count 1 # {"id.thing"="histogram-count-test",other="hello"} 4
|
||||
m: `{"go.gc_duration_seconds",quantile="0"}`,
|
||||
v: 4.9351e-05,
|
||||
lset: labels.FromStrings("__name__", "go.gc_duration_seconds", "quantile", "0.0"),
|
||||
ct: 1520872607123,
|
||||
st: 1520872607123,
|
||||
}, {
|
||||
m: `{"go.gc_duration_seconds",quantile="0.25"}`,
|
||||
v: 7.424100000000001e-05,
|
||||
lset: labels.FromStrings("__name__", "go.gc_duration_seconds", "quantile", "0.25"),
|
||||
ct: 1520872607123,
|
||||
st: 1520872607123,
|
||||
}, {
|
||||
m: `{"go.gc_duration_seconds",quantile="0.5",a="b"}`,
|
||||
v: 8.3835e-05,
|
||||
@ -732,7 +732,7 @@ choices}`, "strange©™\n'quoted' \"name\"", "6"),
|
||||
},
|
||||
}
|
||||
|
||||
p := NewOpenMetricsParser([]byte(input), labels.NewSymbolTable(), WithOMParserCTSeriesSkipped())
|
||||
p := NewOpenMetricsParser([]byte(input), labels.NewSymbolTable(), WithOMParserSTSeriesSkipped())
|
||||
got := testParse(t, p)
|
||||
requireEntries(t, exp, got)
|
||||
}
|
||||
@ -1028,7 +1028,7 @@ func TestOpenMetricsParseErrors(t *testing.T) {
|
||||
}
|
||||
|
||||
for i, c := range cases {
|
||||
p := NewOpenMetricsParser([]byte(c.input), labels.NewSymbolTable(), WithOMParserCTSeriesSkipped())
|
||||
p := NewOpenMetricsParser([]byte(c.input), labels.NewSymbolTable(), WithOMParserSTSeriesSkipped())
|
||||
var err error
|
||||
for err == nil {
|
||||
_, err = p.Next()
|
||||
@ -1093,7 +1093,7 @@ func TestOMNullByteHandling(t *testing.T) {
|
||||
}
|
||||
|
||||
for i, c := range cases {
|
||||
p := NewOpenMetricsParser([]byte(c.input), labels.NewSymbolTable(), WithOMParserCTSeriesSkipped())
|
||||
p := NewOpenMetricsParser([]byte(c.input), labels.NewSymbolTable(), WithOMParserSTSeriesSkipped())
|
||||
var err error
|
||||
for err == nil {
|
||||
_, err = p.Next()
|
||||
@ -1108,10 +1108,10 @@ func TestOMNullByteHandling(t *testing.T) {
|
||||
}
|
||||
}
|
||||
|
||||
// TestCTParseFailures tests known failure edge cases, we know does not work due
|
||||
// TestSTParseFailures tests known failure edge cases, we know does not work due
|
||||
// current OM spec limitations or clients with broken OM format.
|
||||
// TODO(maniktherana): Make sure OM 1.1/2.0 pass CT via metadata or exemplar-like to avoid this.
|
||||
func TestCTParseFailures(t *testing.T) {
|
||||
// TODO(maniktherana): Make sure OM 1.1/2.0 pass ST via metadata or exemplar-like to avoid this.
|
||||
func TestSTParseFailures(t *testing.T) {
|
||||
for _, tcase := range []struct {
|
||||
name string
|
||||
input string
|
||||
@ -1143,19 +1143,19 @@ thing_c_total 14123.232
|
||||
},
|
||||
{
|
||||
m: `thing_count`,
|
||||
ct: 0, // Should be int64p(1520872607123).
|
||||
st: 0, // Should be int64p(1520872607123).
|
||||
},
|
||||
{
|
||||
m: `thing_sum`,
|
||||
ct: 0, // Should be int64p(1520872607123).
|
||||
st: 0, // Should be int64p(1520872607123).
|
||||
},
|
||||
{
|
||||
m: `thing_bucket{le="0.0"}`,
|
||||
ct: 0, // Should be int64p(1520872607123).
|
||||
st: 0, // Should be int64p(1520872607123).
|
||||
},
|
||||
{
|
||||
m: `thing_bucket{le="+Inf"}`,
|
||||
ct: 0, // Should be int64p(1520872607123),
|
||||
st: 0, // Should be int64p(1520872607123),
|
||||
},
|
||||
{
|
||||
m: "thing_c",
|
||||
@ -1167,7 +1167,7 @@ thing_c_total 14123.232
|
||||
},
|
||||
{
|
||||
m: `thing_c_total`,
|
||||
ct: 0, // Should be int64p(1520872607123).
|
||||
st: 0, // Should be int64p(1520872607123).
|
||||
},
|
||||
},
|
||||
},
|
||||
@ -1197,9 +1197,9 @@ foo_created{a="b"} 1520872608.123
|
||||
},
|
||||
} {
|
||||
t.Run(fmt.Sprintf("case=%v", tcase.name), func(t *testing.T) {
|
||||
p := NewOpenMetricsParser([]byte(tcase.input), labels.NewSymbolTable(), WithOMParserCTSeriesSkipped())
|
||||
p := NewOpenMetricsParser([]byte(tcase.input), labels.NewSymbolTable(), WithOMParserSTSeriesSkipped())
|
||||
got := testParse(t, p)
|
||||
resetValAndLset(got) // Keep this test focused on metric, basic entries and CT only.
|
||||
resetValAndLset(got) // Keep this test focused on metric, basic entries and ST only.
|
||||
requireEntries(t, tcase.expected, got)
|
||||
})
|
||||
}
|
||||
|
||||
@ -274,9 +274,9 @@ func (*PromParser) Exemplar(*exemplar.Exemplar) bool {
|
||||
return false
|
||||
}
|
||||
|
||||
// CreatedTimestamp returns 0 as it's not implemented yet.
|
||||
// StartTimestamp returns 0 as it's not implemented yet.
|
||||
// TODO(bwplotka): https://github.com/prometheus/prometheus/issues/12980
|
||||
func (*PromParser) CreatedTimestamp() int64 {
|
||||
func (*PromParser) StartTimestamp() int64 {
|
||||
return 0
|
||||
}
|
||||
|
||||
|
||||
@ -400,24 +400,24 @@ func (p *ProtobufParser) Exemplar(ex *exemplar.Exemplar) bool {
|
||||
return true
|
||||
}
|
||||
|
||||
// CreatedTimestamp returns CT or 0 if CT is not present on counters, summaries or histograms.
|
||||
func (p *ProtobufParser) CreatedTimestamp() int64 {
|
||||
var ct *types.Timestamp
|
||||
// StartTimestamp returns ST or 0 if ST is not present on counters, summaries or histograms.
|
||||
func (p *ProtobufParser) StartTimestamp() int64 {
|
||||
var st *types.Timestamp
|
||||
switch p.dec.GetType() {
|
||||
case dto.MetricType_COUNTER:
|
||||
ct = p.dec.GetCounter().GetCreatedTimestamp()
|
||||
st = p.dec.GetCounter().GetCreatedTimestamp()
|
||||
case dto.MetricType_SUMMARY:
|
||||
ct = p.dec.GetSummary().GetCreatedTimestamp()
|
||||
st = p.dec.GetSummary().GetCreatedTimestamp()
|
||||
case dto.MetricType_HISTOGRAM, dto.MetricType_GAUGE_HISTOGRAM:
|
||||
ct = p.dec.GetHistogram().GetCreatedTimestamp()
|
||||
st = p.dec.GetHistogram().GetCreatedTimestamp()
|
||||
default:
|
||||
}
|
||||
if ct == nil {
|
||||
if st == nil {
|
||||
return 0
|
||||
}
|
||||
// Same as the gogo proto types.TimestampFromProto but straight to integer.
|
||||
// and without validation.
|
||||
return ct.GetSeconds()*1e3 + int64(ct.GetNanos())/1e6
|
||||
return st.GetSeconds()*1e3 + int64(st.GetNanos())/1e6
|
||||
}
|
||||
|
||||
// Next advances the parser to the next "sample" (emulating the behavior of a
|
||||
|
||||
@ -1334,7 +1334,7 @@ func TestProtobufParse(t *testing.T) {
|
||||
{
|
||||
m: "test_counter_with_createdtimestamp",
|
||||
v: 42,
|
||||
ct: 1625851153146,
|
||||
st: 1625851153146,
|
||||
lset: labels.FromStrings(
|
||||
"__name__", "test_counter_with_createdtimestamp",
|
||||
),
|
||||
@ -1350,7 +1350,7 @@ func TestProtobufParse(t *testing.T) {
|
||||
{
|
||||
m: "test_summary_with_createdtimestamp_count",
|
||||
v: 42,
|
||||
ct: 1625851153146,
|
||||
st: 1625851153146,
|
||||
lset: labels.FromStrings(
|
||||
"__name__", "test_summary_with_createdtimestamp_count",
|
||||
),
|
||||
@ -1358,7 +1358,7 @@ func TestProtobufParse(t *testing.T) {
|
||||
{
|
||||
m: "test_summary_with_createdtimestamp_sum",
|
||||
v: 1.234,
|
||||
ct: 1625851153146,
|
||||
st: 1625851153146,
|
||||
lset: labels.FromStrings(
|
||||
"__name__", "test_summary_with_createdtimestamp_sum",
|
||||
),
|
||||
@ -1373,7 +1373,7 @@ func TestProtobufParse(t *testing.T) {
|
||||
},
|
||||
{
|
||||
m: "test_histogram_with_createdtimestamp",
|
||||
ct: 1625851153146,
|
||||
st: 1625851153146,
|
||||
shs: &histogram.Histogram{
|
||||
CounterResetHint: histogram.UnknownCounterReset,
|
||||
PositiveSpans: []histogram.Span{},
|
||||
@ -1393,7 +1393,7 @@ func TestProtobufParse(t *testing.T) {
|
||||
},
|
||||
{
|
||||
m: "test_gaugehistogram_with_createdtimestamp",
|
||||
ct: 1625851153146,
|
||||
st: 1625851153146,
|
||||
shs: &histogram.Histogram{
|
||||
CounterResetHint: histogram.GaugeType,
|
||||
PositiveSpans: []histogram.Span{},
|
||||
@ -1999,7 +1999,7 @@ func TestProtobufParse(t *testing.T) {
|
||||
{
|
||||
m: "test_counter_with_createdtimestamp\xff__type__\xffcounter",
|
||||
v: 42,
|
||||
ct: 1625851153146,
|
||||
st: 1625851153146,
|
||||
lset: labels.FromStrings(
|
||||
"__name__", "test_counter_with_createdtimestamp",
|
||||
"__type__", string(model.MetricTypeCounter),
|
||||
@ -2016,7 +2016,7 @@ func TestProtobufParse(t *testing.T) {
|
||||
{
|
||||
m: "test_summary_with_createdtimestamp_count\xff__type__\xffsummary",
|
||||
v: 42,
|
||||
ct: 1625851153146,
|
||||
st: 1625851153146,
|
||||
lset: labels.FromStrings(
|
||||
"__name__", "test_summary_with_createdtimestamp_count",
|
||||
"__type__", string(model.MetricTypeSummary),
|
||||
@ -2025,7 +2025,7 @@ func TestProtobufParse(t *testing.T) {
|
||||
{
|
||||
m: "test_summary_with_createdtimestamp_sum\xff__type__\xffsummary",
|
||||
v: 1.234,
|
||||
ct: 1625851153146,
|
||||
st: 1625851153146,
|
||||
lset: labels.FromStrings(
|
||||
"__name__", "test_summary_with_createdtimestamp_sum",
|
||||
"__type__", string(model.MetricTypeSummary),
|
||||
@ -2041,7 +2041,7 @@ func TestProtobufParse(t *testing.T) {
|
||||
},
|
||||
{
|
||||
m: "test_histogram_with_createdtimestamp\xff__type__\xffhistogram",
|
||||
ct: 1625851153146,
|
||||
st: 1625851153146,
|
||||
shs: &histogram.Histogram{
|
||||
CounterResetHint: histogram.UnknownCounterReset,
|
||||
PositiveSpans: []histogram.Span{},
|
||||
@ -2062,7 +2062,7 @@ func TestProtobufParse(t *testing.T) {
|
||||
},
|
||||
{
|
||||
m: "test_gaugehistogram_with_createdtimestamp\xff__type__\xffgaugehistogram",
|
||||
ct: 1625851153146,
|
||||
st: 1625851153146,
|
||||
shs: &histogram.Histogram{
|
||||
CounterResetHint: histogram.GaugeType,
|
||||
PositiveSpans: []histogram.Span{},
|
||||
@ -2959,7 +2959,7 @@ func TestProtobufParse(t *testing.T) {
|
||||
{
|
||||
m: "test_counter_with_createdtimestamp",
|
||||
v: 42,
|
||||
ct: 1625851153146,
|
||||
st: 1625851153146,
|
||||
lset: labels.FromStrings(
|
||||
"__name__", "test_counter_with_createdtimestamp",
|
||||
),
|
||||
@ -2975,7 +2975,7 @@ func TestProtobufParse(t *testing.T) {
|
||||
{
|
||||
m: "test_summary_with_createdtimestamp_count",
|
||||
v: 42,
|
||||
ct: 1625851153146,
|
||||
st: 1625851153146,
|
||||
lset: labels.FromStrings(
|
||||
"__name__", "test_summary_with_createdtimestamp_count",
|
||||
),
|
||||
@ -2983,7 +2983,7 @@ func TestProtobufParse(t *testing.T) {
|
||||
{
|
||||
m: "test_summary_with_createdtimestamp_sum",
|
||||
v: 1.234,
|
||||
ct: 1625851153146,
|
||||
st: 1625851153146,
|
||||
lset: labels.FromStrings(
|
||||
"__name__", "test_summary_with_createdtimestamp_sum",
|
||||
),
|
||||
@ -2998,7 +2998,7 @@ func TestProtobufParse(t *testing.T) {
|
||||
},
|
||||
{
|
||||
m: "test_histogram_with_createdtimestamp",
|
||||
ct: 1625851153146,
|
||||
st: 1625851153146,
|
||||
shs: &histogram.Histogram{
|
||||
CounterResetHint: histogram.UnknownCounterReset,
|
||||
PositiveSpans: []histogram.Span{},
|
||||
@ -3018,7 +3018,7 @@ func TestProtobufParse(t *testing.T) {
|
||||
},
|
||||
{
|
||||
m: "test_gaugehistogram_with_createdtimestamp",
|
||||
ct: 1625851153146,
|
||||
st: 1625851153146,
|
||||
shs: &histogram.Histogram{
|
||||
CounterResetHint: histogram.GaugeType,
|
||||
PositiveSpans: []histogram.Span{},
|
||||
@ -3893,7 +3893,7 @@ func TestProtobufParse(t *testing.T) {
|
||||
{
|
||||
m: "test_counter_with_createdtimestamp",
|
||||
v: 42,
|
||||
ct: 1625851153146,
|
||||
st: 1625851153146,
|
||||
lset: labels.FromStrings(
|
||||
"__name__", "test_counter_with_createdtimestamp",
|
||||
),
|
||||
@ -3909,7 +3909,7 @@ func TestProtobufParse(t *testing.T) {
|
||||
{
|
||||
m: "test_summary_with_createdtimestamp_count",
|
||||
v: 42,
|
||||
ct: 1625851153146,
|
||||
st: 1625851153146,
|
||||
lset: labels.FromStrings(
|
||||
"__name__", "test_summary_with_createdtimestamp_count",
|
||||
),
|
||||
@ -3917,7 +3917,7 @@ func TestProtobufParse(t *testing.T) {
|
||||
{
|
||||
m: "test_summary_with_createdtimestamp_sum",
|
||||
v: 1.234,
|
||||
ct: 1625851153146,
|
||||
st: 1625851153146,
|
||||
lset: labels.FromStrings(
|
||||
"__name__", "test_summary_with_createdtimestamp_sum",
|
||||
),
|
||||
@ -3933,7 +3933,7 @@ func TestProtobufParse(t *testing.T) {
|
||||
{
|
||||
m: "test_histogram_with_createdtimestamp_count",
|
||||
v: 0,
|
||||
ct: 1625851153146,
|
||||
st: 1625851153146,
|
||||
lset: labels.FromStrings(
|
||||
"__name__", "test_histogram_with_createdtimestamp_count",
|
||||
),
|
||||
@ -3941,7 +3941,7 @@ func TestProtobufParse(t *testing.T) {
|
||||
{
|
||||
m: "test_histogram_with_createdtimestamp_sum",
|
||||
v: 0,
|
||||
ct: 1625851153146,
|
||||
st: 1625851153146,
|
||||
lset: labels.FromStrings(
|
||||
"__name__", "test_histogram_with_createdtimestamp_sum",
|
||||
),
|
||||
@ -3949,7 +3949,7 @@ func TestProtobufParse(t *testing.T) {
|
||||
{
|
||||
m: "test_histogram_with_createdtimestamp_bucket\xffle\xff+Inf",
|
||||
v: 0,
|
||||
ct: 1625851153146,
|
||||
st: 1625851153146,
|
||||
lset: labels.FromStrings(
|
||||
"__name__", "test_histogram_with_createdtimestamp_bucket",
|
||||
"le", "+Inf",
|
||||
@ -3966,7 +3966,7 @@ func TestProtobufParse(t *testing.T) {
|
||||
{
|
||||
m: "test_gaugehistogram_with_createdtimestamp_count",
|
||||
v: 0,
|
||||
ct: 1625851153146,
|
||||
st: 1625851153146,
|
||||
lset: labels.FromStrings(
|
||||
"__name__", "test_gaugehistogram_with_createdtimestamp_count",
|
||||
),
|
||||
@ -3974,7 +3974,7 @@ func TestProtobufParse(t *testing.T) {
|
||||
{
|
||||
m: "test_gaugehistogram_with_createdtimestamp_sum",
|
||||
v: 0,
|
||||
ct: 1625851153146,
|
||||
st: 1625851153146,
|
||||
lset: labels.FromStrings(
|
||||
"__name__", "test_gaugehistogram_with_createdtimestamp_sum",
|
||||
),
|
||||
@ -3982,7 +3982,7 @@ func TestProtobufParse(t *testing.T) {
|
||||
{
|
||||
m: "test_gaugehistogram_with_createdtimestamp_bucket\xffle\xff+Inf",
|
||||
v: 0,
|
||||
ct: 1625851153146,
|
||||
st: 1625851153146,
|
||||
lset: labels.FromStrings(
|
||||
"__name__", "test_gaugehistogram_with_createdtimestamp_bucket",
|
||||
"le", "+Inf",
|
||||
|
||||
@ -57,7 +57,7 @@ func (nopAppender) AppendHistogram(storage.SeriesRef, labels.Labels, int64, *his
|
||||
return 3, nil
|
||||
}
|
||||
|
||||
func (nopAppender) AppendHistogramCTZeroSample(storage.SeriesRef, labels.Labels, int64, int64, *histogram.Histogram, *histogram.FloatHistogram) (storage.SeriesRef, error) {
|
||||
func (nopAppender) AppendHistogramSTZeroSample(storage.SeriesRef, labels.Labels, int64, int64, *histogram.Histogram, *histogram.FloatHistogram) (storage.SeriesRef, error) {
|
||||
return 0, nil
|
||||
}
|
||||
|
||||
@ -65,7 +65,7 @@ func (nopAppender) UpdateMetadata(storage.SeriesRef, labels.Labels, metadata.Met
|
||||
return 4, nil
|
||||
}
|
||||
|
||||
func (nopAppender) AppendCTZeroSample(storage.SeriesRef, labels.Labels, int64, int64) (storage.SeriesRef, error) {
|
||||
func (nopAppender) AppendSTZeroSample(storage.SeriesRef, labels.Labels, int64, int64) (storage.SeriesRef, error) {
|
||||
return 5, nil
|
||||
}
|
||||
|
||||
@ -184,11 +184,11 @@ func (a *collectResultAppender) AppendHistogram(ref storage.SeriesRef, l labels.
|
||||
return a.next.AppendHistogram(ref, l, t, h, fh)
|
||||
}
|
||||
|
||||
func (a *collectResultAppender) AppendHistogramCTZeroSample(ref storage.SeriesRef, l labels.Labels, _, ct int64, h *histogram.Histogram, _ *histogram.FloatHistogram) (storage.SeriesRef, error) {
|
||||
func (a *collectResultAppender) AppendHistogramSTZeroSample(ref storage.SeriesRef, l labels.Labels, _, st int64, h *histogram.Histogram, _ *histogram.FloatHistogram) (storage.SeriesRef, error) {
|
||||
if h != nil {
|
||||
return a.AppendHistogram(ref, l, ct, &histogram.Histogram{}, nil)
|
||||
return a.AppendHistogram(ref, l, st, &histogram.Histogram{}, nil)
|
||||
}
|
||||
return a.AppendHistogram(ref, l, ct, nil, &histogram.FloatHistogram{})
|
||||
return a.AppendHistogram(ref, l, st, nil, &histogram.FloatHistogram{})
|
||||
}
|
||||
|
||||
func (a *collectResultAppender) UpdateMetadata(ref storage.SeriesRef, l labels.Labels, m metadata.Metadata) (storage.SeriesRef, error) {
|
||||
@ -205,8 +205,8 @@ func (a *collectResultAppender) UpdateMetadata(ref storage.SeriesRef, l labels.L
|
||||
return a.next.UpdateMetadata(ref, l, m)
|
||||
}
|
||||
|
||||
func (a *collectResultAppender) AppendCTZeroSample(ref storage.SeriesRef, l labels.Labels, _, ct int64) (storage.SeriesRef, error) {
|
||||
return a.Append(ref, l, ct, 0.0)
|
||||
func (a *collectResultAppender) AppendSTZeroSample(ref storage.SeriesRef, l labels.Labels, _, st int64) (storage.SeriesRef, error) {
|
||||
return a.Append(ref, l, st, 0.0)
|
||||
}
|
||||
|
||||
func (a *collectResultAppender) Commit() error {
|
||||
|
||||
@ -85,7 +85,7 @@ type Options struct {
|
||||
DiscoveryReloadInterval model.Duration
|
||||
// Option to enable the ingestion of the created timestamp as a synthetic zero sample.
|
||||
// See: https://github.com/prometheus/proposals/blob/main/proposals/2023-06-13_created-timestamp.md
|
||||
EnableCreatedTimestampZeroIngestion bool
|
||||
EnableStartTimestampZeroIngestion bool
|
||||
|
||||
// EnableTypeAndUnitLabels
|
||||
EnableTypeAndUnitLabels bool
|
||||
|
||||
@ -749,8 +749,8 @@ func setupTestServer(t *testing.T, typ string, toWrite []byte) *httptest.Server
|
||||
return server
|
||||
}
|
||||
|
||||
// TestManagerCTZeroIngestion tests scrape manager for various CT cases.
|
||||
func TestManagerCTZeroIngestion(t *testing.T) {
|
||||
// TestManagerSTZeroIngestion tests scrape manager for various ST cases.
|
||||
func TestManagerSTZeroIngestion(t *testing.T) {
|
||||
t.Parallel()
|
||||
const (
|
||||
// _total suffix is required, otherwise expfmt with OMText will mark metric as "unknown"
|
||||
@ -761,26 +761,26 @@ func TestManagerCTZeroIngestion(t *testing.T) {
|
||||
|
||||
for _, testFormat := range []config.ScrapeProtocol{config.PrometheusProto, config.OpenMetricsText1_0_0} {
|
||||
t.Run(fmt.Sprintf("format=%s", testFormat), func(t *testing.T) {
|
||||
for _, testWithCT := range []bool{false, true} {
|
||||
t.Run(fmt.Sprintf("withCT=%v", testWithCT), func(t *testing.T) {
|
||||
for _, testCTZeroIngest := range []bool{false, true} {
|
||||
t.Run(fmt.Sprintf("ctZeroIngest=%v", testCTZeroIngest), func(t *testing.T) {
|
||||
for _, testWithST := range []bool{false, true} {
|
||||
t.Run(fmt.Sprintf("withST=%v", testWithST), func(t *testing.T) {
|
||||
for _, testSTZeroIngest := range []bool{false, true} {
|
||||
t.Run(fmt.Sprintf("ctZeroIngest=%v", testSTZeroIngest), func(t *testing.T) {
|
||||
ctx, cancel := context.WithCancel(context.Background())
|
||||
defer cancel()
|
||||
|
||||
sampleTs := time.Now()
|
||||
ctTs := time.Time{}
|
||||
if testWithCT {
|
||||
ctTs = sampleTs.Add(-2 * time.Minute)
|
||||
stTs := time.Time{}
|
||||
if testWithST {
|
||||
stTs = sampleTs.Add(-2 * time.Minute)
|
||||
}
|
||||
|
||||
// TODO(bwplotka): Add more types than just counter?
|
||||
encoded := prepareTestEncodedCounter(t, testFormat, expectedMetricName, expectedSampleValue, sampleTs, ctTs)
|
||||
encoded := prepareTestEncodedCounter(t, testFormat, expectedMetricName, expectedSampleValue, sampleTs, stTs)
|
||||
|
||||
app := &collectResultAppender{}
|
||||
discoveryManager, scrapeManager := runManagers(t, ctx, &Options{
|
||||
EnableCreatedTimestampZeroIngestion: testCTZeroIngest,
|
||||
skipOffsetting: true,
|
||||
EnableStartTimestampZeroIngestion: testSTZeroIngest,
|
||||
skipOffsetting: true,
|
||||
}, &collectResultAppendable{app})
|
||||
defer scrapeManager.Stop()
|
||||
|
||||
@ -817,12 +817,12 @@ scrape_configs:
|
||||
}), "after 1 minute")
|
||||
|
||||
// Verify results.
|
||||
// Verify what we got vs expectations around CT injection.
|
||||
// Verify what we got vs expectations around ST injection.
|
||||
samples := findSamplesForMetric(app.resultFloats, expectedMetricName)
|
||||
if testWithCT && testCTZeroIngest {
|
||||
if testWithST && testSTZeroIngest {
|
||||
require.Len(t, samples, 2)
|
||||
require.Equal(t, 0.0, samples[0].f)
|
||||
require.Equal(t, timestamp.FromTime(ctTs), samples[0].t)
|
||||
require.Equal(t, timestamp.FromTime(stTs), samples[0].t)
|
||||
require.Equal(t, expectedSampleValue, samples[1].f)
|
||||
require.Equal(t, timestamp.FromTime(sampleTs), samples[1].t)
|
||||
} else {
|
||||
@ -832,16 +832,16 @@ scrape_configs:
|
||||
}
|
||||
|
||||
// Verify what we got vs expectations around additional _created series for OM text.
|
||||
// enableCTZeroInjection also kills that _created line.
|
||||
// enableSTZeroInjection also kills that _created line.
|
||||
createdSeriesSamples := findSamplesForMetric(app.resultFloats, expectedCreatedMetricName)
|
||||
if testFormat == config.OpenMetricsText1_0_0 && testWithCT && !testCTZeroIngest {
|
||||
// For OM Text, when counter has CT, and feature flag disabled we should see _created lines.
|
||||
if testFormat == config.OpenMetricsText1_0_0 && testWithST && !testSTZeroIngest {
|
||||
// For OM Text, when counter has ST, and feature flag disabled we should see _created lines.
|
||||
require.Len(t, createdSeriesSamples, 1)
|
||||
// Conversion taken from common/expfmt.writeOpenMetricsFloat.
|
||||
// We don't check the ct timestamp as explicit ts was not implemented in expfmt.Encoder,
|
||||
// We don't check the st timestamp as explicit ts was not implemented in expfmt.Encoder,
|
||||
// but exists in OM https://github.com/prometheus/OpenMetrics/blob/v1.0.0/specification/OpenMetrics.md#:~:text=An%20example%20with%20a%20Metric%20with%20no%20labels%2C%20and%20a%20MetricPoint%20with%20a%20timestamp%20and%20a%20created
|
||||
// We can implement this, but we want to potentially get rid of OM 1.0 CT lines
|
||||
require.Equal(t, float64(timestamppb.New(ctTs).AsTime().UnixNano())/1e9, createdSeriesSamples[0].f)
|
||||
// We can implement this, but we want to potentially get rid of OM 1.0 ST lines
|
||||
require.Equal(t, float64(timestamppb.New(stTs).AsTime().UnixNano())/1e9, createdSeriesSamples[0].f)
|
||||
} else {
|
||||
require.Empty(t, createdSeriesSamples)
|
||||
}
|
||||
@ -853,12 +853,12 @@ scrape_configs:
|
||||
}
|
||||
}
|
||||
|
||||
func prepareTestEncodedCounter(t *testing.T, format config.ScrapeProtocol, mName string, v float64, ts, ct time.Time) (encoded []byte) {
|
||||
func prepareTestEncodedCounter(t *testing.T, format config.ScrapeProtocol, mName string, v float64, ts, st time.Time) (encoded []byte) {
|
||||
t.Helper()
|
||||
|
||||
counter := &dto.Counter{Value: proto.Float64(v)}
|
||||
if !ct.IsZero() {
|
||||
counter.CreatedTimestamp = timestamppb.New(ct)
|
||||
if !st.IsZero() {
|
||||
counter.CreatedTimestamp = timestamppb.New(st)
|
||||
}
|
||||
ctrType := dto.MetricType_COUNTER
|
||||
inputMetric := &dto.MetricFamily{
|
||||
@ -923,40 +923,40 @@ func generateTestHistogram(i int) *dto.Histogram {
|
||||
return h
|
||||
}
|
||||
|
||||
func TestManagerCTZeroIngestionHistogram(t *testing.T) {
|
||||
func TestManagerSTZeroIngestionHistogram(t *testing.T) {
|
||||
t.Parallel()
|
||||
const mName = "expected_histogram"
|
||||
|
||||
for _, tc := range []struct {
|
||||
name string
|
||||
inputHistSample *dto.Histogram
|
||||
enableCTZeroIngestion bool
|
||||
enableSTZeroIngestion bool
|
||||
}{
|
||||
{
|
||||
name: "disabled with CT on histogram",
|
||||
name: "disabled with ST on histogram",
|
||||
inputHistSample: func() *dto.Histogram {
|
||||
h := generateTestHistogram(0)
|
||||
h.CreatedTimestamp = timestamppb.Now()
|
||||
return h
|
||||
}(),
|
||||
enableCTZeroIngestion: false,
|
||||
enableSTZeroIngestion: false,
|
||||
},
|
||||
{
|
||||
name: "enabled with CT on histogram",
|
||||
name: "enabled with ST on histogram",
|
||||
inputHistSample: func() *dto.Histogram {
|
||||
h := generateTestHistogram(0)
|
||||
h.CreatedTimestamp = timestamppb.Now()
|
||||
return h
|
||||
}(),
|
||||
enableCTZeroIngestion: true,
|
||||
enableSTZeroIngestion: true,
|
||||
},
|
||||
{
|
||||
name: "enabled without CT on histogram",
|
||||
name: "enabled without ST on histogram",
|
||||
inputHistSample: func() *dto.Histogram {
|
||||
h := generateTestHistogram(0)
|
||||
return h
|
||||
}(),
|
||||
enableCTZeroIngestion: true,
|
||||
enableSTZeroIngestion: true,
|
||||
},
|
||||
} {
|
||||
t.Run(tc.name, func(t *testing.T) {
|
||||
@ -966,8 +966,8 @@ func TestManagerCTZeroIngestionHistogram(t *testing.T) {
|
||||
|
||||
app := &collectResultAppender{}
|
||||
discoveryManager, scrapeManager := runManagers(t, ctx, &Options{
|
||||
EnableCreatedTimestampZeroIngestion: tc.enableCTZeroIngestion,
|
||||
skipOffsetting: true,
|
||||
EnableStartTimestampZeroIngestion: tc.enableSTZeroIngestion,
|
||||
skipOffsetting: true,
|
||||
}, &collectResultAppendable{app})
|
||||
defer scrapeManager.Stop()
|
||||
|
||||
@ -1035,8 +1035,8 @@ scrape_configs:
|
||||
}), "after 1 minute")
|
||||
|
||||
// Check for zero samples, assuming we only injected always one histogram sample.
|
||||
// Did it contain CT to inject? If yes, was CT zero enabled?
|
||||
if tc.inputHistSample.CreatedTimestamp.IsValid() && tc.enableCTZeroIngestion {
|
||||
// Did it contain ST to inject? If yes, was ST zero enabled?
|
||||
if tc.inputHistSample.CreatedTimestamp.IsValid() && tc.enableSTZeroIngestion {
|
||||
require.Len(t, got, 2)
|
||||
// Zero sample.
|
||||
require.Equal(t, histogram.Histogram{}, *got[0].h)
|
||||
@ -1066,12 +1066,12 @@ func TestUnregisterMetrics(t *testing.T) {
|
||||
}
|
||||
}
|
||||
|
||||
// TestNHCBAndCTZeroIngestion verifies that both ConvertClassicHistogramsToNHCBEnabled
|
||||
// and EnableCreatedTimestampZeroIngestion can be used simultaneously without errors.
|
||||
// TestNHCBAndSTZeroIngestion verifies that both ConvertClassicHistogramsToNHCBEnabled
|
||||
// and EnableStartTimestampZeroIngestion can be used simultaneously without errors.
|
||||
// This test addresses issue #17216 by ensuring the previously blocking check has been removed.
|
||||
// The test verifies that the presence of exemplars in the input does not cause errors,
|
||||
// although exemplars are not preserved during NHCB conversion (as documented below).
|
||||
func TestNHCBAndCTZeroIngestion(t *testing.T) {
|
||||
func TestNHCBAndSTZeroIngestion(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
const (
|
||||
@ -1085,8 +1085,8 @@ func TestNHCBAndCTZeroIngestion(t *testing.T) {
|
||||
|
||||
app := &collectResultAppender{}
|
||||
discoveryManager, scrapeManager := runManagers(t, ctx, &Options{
|
||||
EnableCreatedTimestampZeroIngestion: true,
|
||||
skipOffsetting: true,
|
||||
EnableStartTimestampZeroIngestion: true,
|
||||
skipOffsetting: true,
|
||||
}, &collectResultAppendable{app})
|
||||
defer scrapeManager.Stop()
|
||||
|
||||
@ -1122,7 +1122,7 @@ test_histogram_created 1520430001
|
||||
serverURL, err := url.Parse(server.URL)
|
||||
require.NoError(t, err)
|
||||
|
||||
// Configuration with both convert_classic_histograms_to_nhcb enabled and CT zero ingestion enabled.
|
||||
// Configuration with both convert_classic_histograms_to_nhcb enabled and ST zero ingestion enabled.
|
||||
testConfig := fmt.Sprintf(`
|
||||
global:
|
||||
# Use a very long scrape_interval to prevent automatic scraping during the test.
|
||||
@ -1167,7 +1167,7 @@ scrape_configs:
|
||||
// Verify that samples were ingested (proving both features work together).
|
||||
got := getMatchingHistograms()
|
||||
|
||||
// With CT zero ingestion enabled and a created timestamp present, we expect 2 samples:
|
||||
// With ST zero ingestion enabled and a created timestamp present, we expect 2 samples:
|
||||
// one zero sample and one actual sample.
|
||||
require.Len(t, got, 2, "expected 2 histogram samples (zero sample + actual sample)")
|
||||
require.Equal(t, histogram.Histogram{}, *got[0].h, "first sample should be zero sample")
|
||||
|
||||
@ -215,7 +215,7 @@ func newScrapePool(cfg *config.ScrapeConfig, app storage.Appendable, offsetSeed
|
||||
opts.alwaysScrapeClassicHist,
|
||||
opts.convertClassicHistToNHCB,
|
||||
cfg.ScrapeNativeHistogramsEnabled(),
|
||||
options.EnableCreatedTimestampZeroIngestion,
|
||||
options.EnableStartTimestampZeroIngestion,
|
||||
options.EnableTypeAndUnitLabels,
|
||||
options.ExtraMetrics,
|
||||
options.AppendMetadata,
|
||||
@ -951,7 +951,7 @@ type scrapeLoop struct {
|
||||
|
||||
alwaysScrapeClassicHist bool
|
||||
convertClassicHistToNHCB bool
|
||||
enableCTZeroIngestion bool
|
||||
enableSTZeroIngestion bool
|
||||
enableTypeAndUnitLabels bool
|
||||
fallbackScrapeProtocol string
|
||||
|
||||
@ -1264,7 +1264,7 @@ func newScrapeLoop(ctx context.Context,
|
||||
alwaysScrapeClassicHist bool,
|
||||
convertClassicHistToNHCB bool,
|
||||
enableNativeHistogramScraping bool,
|
||||
enableCTZeroIngestion bool,
|
||||
enableSTZeroIngestion bool,
|
||||
enableTypeAndUnitLabels bool,
|
||||
reportExtraMetrics bool,
|
||||
appendMetadataToWAL bool,
|
||||
@ -1321,7 +1321,7 @@ func newScrapeLoop(ctx context.Context,
|
||||
timeout: timeout,
|
||||
alwaysScrapeClassicHist: alwaysScrapeClassicHist,
|
||||
convertClassicHistToNHCB: convertClassicHistToNHCB,
|
||||
enableCTZeroIngestion: enableCTZeroIngestion,
|
||||
enableSTZeroIngestion: enableSTZeroIngestion,
|
||||
enableTypeAndUnitLabels: enableTypeAndUnitLabels,
|
||||
fallbackScrapeProtocol: fallbackScrapeProtocol,
|
||||
enableNativeHistogramScraping: enableNativeHistogramScraping,
|
||||
@ -1660,7 +1660,7 @@ func (sl *scrapeLoop) append(app storage.Appender, b []byte, contentType string,
|
||||
IgnoreNativeHistograms: !sl.enableNativeHistogramScraping,
|
||||
ConvertClassicHistogramsToNHCB: sl.convertClassicHistToNHCB,
|
||||
KeepClassicOnClassicAndNativeHistograms: sl.alwaysScrapeClassicHist,
|
||||
OpenMetricsSkipCTSeries: sl.enableCTZeroIngestion,
|
||||
OpenMetricsSkipSTSeries: sl.enableSTZeroIngestion,
|
||||
FallbackContentType: sl.fallbackScrapeProtocol,
|
||||
})
|
||||
if p == nil {
|
||||
@ -1801,21 +1801,21 @@ loop:
|
||||
if seriesAlreadyScraped && parsedTimestamp == nil {
|
||||
err = storage.ErrDuplicateSampleForTimestamp
|
||||
} else {
|
||||
if sl.enableCTZeroIngestion {
|
||||
if ctMs := p.CreatedTimestamp(); ctMs != 0 {
|
||||
if sl.enableSTZeroIngestion {
|
||||
if stMs := p.StartTimestamp(); stMs != 0 {
|
||||
if isHistogram {
|
||||
if h != nil {
|
||||
ref, err = app.AppendHistogramCTZeroSample(ref, lset, t, ctMs, h, nil)
|
||||
ref, err = app.AppendHistogramSTZeroSample(ref, lset, t, stMs, h, nil)
|
||||
} else {
|
||||
ref, err = app.AppendHistogramCTZeroSample(ref, lset, t, ctMs, nil, fh)
|
||||
ref, err = app.AppendHistogramSTZeroSample(ref, lset, t, stMs, nil, fh)
|
||||
}
|
||||
} else {
|
||||
ref, err = app.AppendCTZeroSample(ref, lset, t, ctMs)
|
||||
ref, err = app.AppendSTZeroSample(ref, lset, t, stMs)
|
||||
}
|
||||
if err != nil && !errors.Is(err, storage.ErrOutOfOrderCT) { // OOO is a common case, ignoring completely for now.
|
||||
// CT is an experimental feature. For now, we don't need to fail the
|
||||
if err != nil && !errors.Is(err, storage.ErrOutOfOrderST) { // OOO is a common case, ignoring completely for now.
|
||||
// ST is an experimental feature. For now, we don't need to fail the
|
||||
// scrape on errors updating the created timestamp, log debug.
|
||||
sl.l.Debug("Error when appending CT in scrape loop", "series", string(met), "ct", ctMs, "t", t, "err", err)
|
||||
sl.l.Debug("Error when appending ST in scrape loop", "series", string(met), "ct", stMs, "t", t, "err", err)
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -1913,7 +1913,7 @@ loop:
|
||||
if !seriesCached || lastMeta.lastIterChange == sl.cache.iter {
|
||||
// In majority cases we can trust that the current series/histogram is matching the lastMeta and lastMFName.
|
||||
// However, optional TYPE etc metadata and broken OM text can break this, detect those cases here.
|
||||
// TODO(bwplotka): Consider moving this to parser as many parser users end up doing this (e.g. CT and NHCB parsing).
|
||||
// TODO(bwplotka): Consider moving this to parser as many parser users end up doing this (e.g. ST and NHCB parsing).
|
||||
if isSeriesPartOfFamily(lset.Get(labels.MetricName), lastMFName, lastMeta.Type) {
|
||||
if _, merr := app.UpdateMetadata(ref, lset, lastMeta.Metadata); merr != nil {
|
||||
// No need to fail the scrape on errors appending metadata.
|
||||
|
||||
@ -3281,8 +3281,8 @@ func TestTargetScraperScrapeOK(t *testing.T) {
|
||||
}
|
||||
|
||||
contentTypes := strings.SplitSeq(accept, ",")
|
||||
for ct := range contentTypes {
|
||||
match := qValuePattern.FindStringSubmatch(ct)
|
||||
for st := range contentTypes {
|
||||
match := qValuePattern.FindStringSubmatch(st)
|
||||
require.Len(t, match, 3)
|
||||
qValue, err := strconv.ParseFloat(match[1], 64)
|
||||
require.NoError(t, err, "Error parsing q value")
|
||||
|
||||
@ -199,14 +199,14 @@ func (f *fanoutAppender) AppendHistogram(ref SeriesRef, l labels.Labels, t int64
|
||||
return ref, nil
|
||||
}
|
||||
|
||||
func (f *fanoutAppender) AppendHistogramCTZeroSample(ref SeriesRef, l labels.Labels, t, ct int64, h *histogram.Histogram, fh *histogram.FloatHistogram) (SeriesRef, error) {
|
||||
ref, err := f.primary.AppendHistogramCTZeroSample(ref, l, t, ct, h, fh)
|
||||
func (f *fanoutAppender) AppendHistogramSTZeroSample(ref SeriesRef, l labels.Labels, t, st int64, h *histogram.Histogram, fh *histogram.FloatHistogram) (SeriesRef, error) {
|
||||
ref, err := f.primary.AppendHistogramSTZeroSample(ref, l, t, st, h, fh)
|
||||
if err != nil {
|
||||
return ref, err
|
||||
}
|
||||
|
||||
for _, appender := range f.secondaries {
|
||||
if _, err := appender.AppendHistogramCTZeroSample(ref, l, t, ct, h, fh); err != nil {
|
||||
if _, err := appender.AppendHistogramSTZeroSample(ref, l, t, st, h, fh); err != nil {
|
||||
return 0, err
|
||||
}
|
||||
}
|
||||
@ -227,14 +227,14 @@ func (f *fanoutAppender) UpdateMetadata(ref SeriesRef, l labels.Labels, m metada
|
||||
return ref, nil
|
||||
}
|
||||
|
||||
func (f *fanoutAppender) AppendCTZeroSample(ref SeriesRef, l labels.Labels, t, ct int64) (SeriesRef, error) {
|
||||
ref, err := f.primary.AppendCTZeroSample(ref, l, t, ct)
|
||||
func (f *fanoutAppender) AppendSTZeroSample(ref SeriesRef, l labels.Labels, t, st int64) (SeriesRef, error) {
|
||||
ref, err := f.primary.AppendSTZeroSample(ref, l, t, st)
|
||||
if err != nil {
|
||||
return ref, err
|
||||
}
|
||||
|
||||
for _, appender := range f.secondaries {
|
||||
if _, err := appender.AppendCTZeroSample(ref, l, t, ct); err != nil {
|
||||
if _, err := appender.AppendSTZeroSample(ref, l, t, st); err != nil {
|
||||
return 0, err
|
||||
}
|
||||
}
|
||||
|
||||
@ -44,13 +44,13 @@ var (
|
||||
ErrExemplarsDisabled = errors.New("exemplar storage is disabled or max exemplars is less than or equal to 0")
|
||||
ErrNativeHistogramsDisabled = errors.New("native histograms are disabled")
|
||||
|
||||
// ErrOutOfOrderCT indicates failed append of CT to the storage
|
||||
// due to CT being older the then newer sample.
|
||||
// ErrOutOfOrderST indicates failed append of ST to the storage
|
||||
// due to ST being older the then newer sample.
|
||||
// NOTE(bwplotka): This can be both an instrumentation failure or commonly expected
|
||||
// behaviour, and we currently don't have a way to determine this. As a result
|
||||
// it's recommended to ignore this error for now.
|
||||
ErrOutOfOrderCT = errors.New("created timestamp out of order, ignoring")
|
||||
ErrCTNewerThanSample = errors.New("CT is newer or the same as sample's timestamp, ignoring")
|
||||
ErrOutOfOrderST = errors.New("created timestamp out of order, ignoring")
|
||||
ErrSTNewerThanSample = errors.New("ST is newer or the same as sample's timestamp, ignoring")
|
||||
)
|
||||
|
||||
// SeriesRef is a generic series reference. In prometheus it is either a
|
||||
@ -294,7 +294,7 @@ type Appender interface {
|
||||
ExemplarAppender
|
||||
HistogramAppender
|
||||
MetadataUpdater
|
||||
CreatedTimestampAppender
|
||||
StartTimestampAppender
|
||||
}
|
||||
|
||||
// GetRef is an extra interface on Appenders used by downstream projects
|
||||
@ -338,20 +338,20 @@ type HistogramAppender interface {
|
||||
// pointer. AppendHistogram won't mutate the histogram, but in turn
|
||||
// depends on the caller to not mutate it either.
|
||||
AppendHistogram(ref SeriesRef, l labels.Labels, t int64, h *histogram.Histogram, fh *histogram.FloatHistogram) (SeriesRef, error)
|
||||
// AppendHistogramCTZeroSample adds synthetic zero sample for the given ct timestamp,
|
||||
// AppendHistogramSTZeroSample adds synthetic zero sample for the given st timestamp,
|
||||
// which will be associated with given series, labels and the incoming
|
||||
// sample's t (timestamp). AppendHistogramCTZeroSample returns error if zero sample can't be
|
||||
// appended, for example when ct is too old, or when it would collide with
|
||||
// sample's t (timestamp). AppendHistogramSTZeroSample returns error if zero sample can't be
|
||||
// appended, for example when st is too old, or when it would collide with
|
||||
// incoming sample (sample has priority).
|
||||
//
|
||||
// AppendHistogramCTZeroSample has to be called before the corresponding histogram AppendHistogram.
|
||||
// AppendHistogramSTZeroSample has to be called before the corresponding histogram AppendHistogram.
|
||||
// A series reference number is returned which can be used to modify the
|
||||
// CT for the given series in the same or later transactions.
|
||||
// ST for the given series in the same or later transactions.
|
||||
// Returned reference numbers are ephemeral and may be rejected in calls
|
||||
// to AppendHistogramCTZeroSample() at any point.
|
||||
// to AppendHistogramSTZeroSample() at any point.
|
||||
//
|
||||
// If the reference is 0 it must not be used for caching.
|
||||
AppendHistogramCTZeroSample(ref SeriesRef, l labels.Labels, t, ct int64, h *histogram.Histogram, fh *histogram.FloatHistogram) (SeriesRef, error)
|
||||
AppendHistogramSTZeroSample(ref SeriesRef, l labels.Labels, t, st int64, h *histogram.Histogram, fh *histogram.FloatHistogram) (SeriesRef, error)
|
||||
}
|
||||
|
||||
// MetadataUpdater provides an interface for associating metadata to stored series.
|
||||
@ -366,22 +366,22 @@ type MetadataUpdater interface {
|
||||
UpdateMetadata(ref SeriesRef, l labels.Labels, m metadata.Metadata) (SeriesRef, error)
|
||||
}
|
||||
|
||||
// CreatedTimestampAppender provides an interface for appending CT to storage.
|
||||
type CreatedTimestampAppender interface {
|
||||
// AppendCTZeroSample adds synthetic zero sample for the given ct timestamp,
|
||||
// StartTimestampAppender provides an interface for appending ST to storage.
|
||||
type StartTimestampAppender interface {
|
||||
// AppendSTZeroSample adds synthetic zero sample for the given st timestamp,
|
||||
// which will be associated with given series, labels and the incoming
|
||||
// sample's t (timestamp). AppendCTZeroSample returns error if zero sample can't be
|
||||
// appended, for example when ct is too old, or when it would collide with
|
||||
// sample's t (timestamp). AppendSTZeroSample returns error if zero sample can't be
|
||||
// appended, for example when st is too old, or when it would collide with
|
||||
// incoming sample (sample has priority).
|
||||
//
|
||||
// AppendCTZeroSample has to be called before the corresponding sample Append.
|
||||
// AppendSTZeroSample has to be called before the corresponding sample Append.
|
||||
// A series reference number is returned which can be used to modify the
|
||||
// CT for the given series in the same or later transactions.
|
||||
// ST for the given series in the same or later transactions.
|
||||
// Returned reference numbers are ephemeral and may be rejected in calls
|
||||
// to AppendCTZeroSample() at any point.
|
||||
// to AppendSTZeroSample() at any point.
|
||||
//
|
||||
// If the reference is 0 it must not be used for caching.
|
||||
AppendCTZeroSample(ref SeriesRef, l labels.Labels, t, ct int64) (SeriesRef, error)
|
||||
AppendSTZeroSample(ref SeriesRef, l labels.Labels, t, st int64) (SeriesRef, error)
|
||||
}
|
||||
|
||||
// SeriesSet contains a set of series.
|
||||
|
||||
@ -122,7 +122,7 @@ var (
|
||||
writev2.FromIntHistogram(30, &testHistogramCustomBuckets),
|
||||
writev2.FromFloatHistogram(40, testHistogramCustomBuckets.ToFloat(nil)),
|
||||
},
|
||||
CreatedTimestamp: 1, // CT needs to be lower than the sample's timestamp.
|
||||
CreatedTimestamp: 1, // ST needs to be lower than the sample's timestamp.
|
||||
},
|
||||
{
|
||||
LabelsRefs: []uint32{1, 2, 3, 4, 5, 6, 7, 8, 9, 10}, // Same series as first.
|
||||
|
||||
@ -49,10 +49,10 @@ type Metadata struct {
|
||||
type CombinedAppender interface {
|
||||
// AppendSample appends a sample and related exemplars, metadata, and
|
||||
// created timestamp to the storage.
|
||||
AppendSample(ls labels.Labels, meta Metadata, ct, t int64, v float64, es []exemplar.Exemplar) error
|
||||
AppendSample(ls labels.Labels, meta Metadata, st, t int64, v float64, es []exemplar.Exemplar) error
|
||||
// AppendHistogram appends a histogram and related exemplars, metadata, and
|
||||
// created timestamp to the storage.
|
||||
AppendHistogram(ls labels.Labels, meta Metadata, ct, t int64, h *histogram.Histogram, es []exemplar.Exemplar) error
|
||||
AppendHistogram(ls labels.Labels, meta Metadata, st, t int64, h *histogram.Histogram, es []exemplar.Exemplar) error
|
||||
}
|
||||
|
||||
// CombinedAppenderMetrics is for the metrics observed by the
|
||||
@ -82,11 +82,11 @@ func NewCombinedAppenderMetrics(reg prometheus.Registerer) CombinedAppenderMetri
|
||||
// NewCombinedAppender creates a combined appender that sets start times and
|
||||
// updates metadata for each series only once, and appends samples and
|
||||
// exemplars for each call.
|
||||
func NewCombinedAppender(app storage.Appender, logger *slog.Logger, ingestCTZeroSample, appendMetadata bool, metrics CombinedAppenderMetrics) CombinedAppender {
|
||||
func NewCombinedAppender(app storage.Appender, logger *slog.Logger, ingestSTZeroSample, appendMetadata bool, metrics CombinedAppenderMetrics) CombinedAppender {
|
||||
return &combinedAppender{
|
||||
app: app,
|
||||
logger: logger,
|
||||
ingestCTZeroSample: ingestCTZeroSample,
|
||||
ingestSTZeroSample: ingestSTZeroSample,
|
||||
appendMetadata: appendMetadata,
|
||||
refs: make(map[uint64]seriesRef),
|
||||
samplesAppendedWithoutMetadata: metrics.samplesAppendedWithoutMetadata,
|
||||
@ -96,7 +96,7 @@ func NewCombinedAppender(app storage.Appender, logger *slog.Logger, ingestCTZero
|
||||
|
||||
type seriesRef struct {
|
||||
ref storage.SeriesRef
|
||||
ct int64
|
||||
st int64
|
||||
ls labels.Labels
|
||||
meta metadata.Metadata
|
||||
}
|
||||
@ -106,7 +106,7 @@ type combinedAppender struct {
|
||||
logger *slog.Logger
|
||||
samplesAppendedWithoutMetadata prometheus.Counter
|
||||
outOfOrderExemplars prometheus.Counter
|
||||
ingestCTZeroSample bool
|
||||
ingestSTZeroSample bool
|
||||
appendMetadata bool
|
||||
// Used to ensure we only update metadata and created timestamps once, and to share storage.SeriesRefs.
|
||||
// To detect hash collision it also stores the labels.
|
||||
@ -114,20 +114,20 @@ type combinedAppender struct {
|
||||
refs map[uint64]seriesRef
|
||||
}
|
||||
|
||||
func (b *combinedAppender) AppendSample(ls labels.Labels, meta Metadata, ct, t int64, v float64, es []exemplar.Exemplar) (err error) {
|
||||
return b.appendFloatOrHistogram(ls, meta.Metadata, ct, t, v, nil, es)
|
||||
func (b *combinedAppender) AppendSample(ls labels.Labels, meta Metadata, st, t int64, v float64, es []exemplar.Exemplar) (err error) {
|
||||
return b.appendFloatOrHistogram(ls, meta.Metadata, st, t, v, nil, es)
|
||||
}
|
||||
|
||||
func (b *combinedAppender) AppendHistogram(ls labels.Labels, meta Metadata, ct, t int64, h *histogram.Histogram, es []exemplar.Exemplar) (err error) {
|
||||
func (b *combinedAppender) AppendHistogram(ls labels.Labels, meta Metadata, st, t int64, h *histogram.Histogram, es []exemplar.Exemplar) (err error) {
|
||||
if h == nil {
|
||||
// Sanity check, we should never get here with a nil histogram.
|
||||
b.logger.Error("Received nil histogram in CombinedAppender.AppendHistogram", "series", ls.String())
|
||||
return errors.New("internal error, attempted to append nil histogram")
|
||||
}
|
||||
return b.appendFloatOrHistogram(ls, meta.Metadata, ct, t, 0, h, es)
|
||||
return b.appendFloatOrHistogram(ls, meta.Metadata, st, t, 0, h, es)
|
||||
}
|
||||
|
||||
func (b *combinedAppender) appendFloatOrHistogram(ls labels.Labels, meta metadata.Metadata, ct, t int64, v float64, h *histogram.Histogram, es []exemplar.Exemplar) (err error) {
|
||||
func (b *combinedAppender) appendFloatOrHistogram(ls labels.Labels, meta metadata.Metadata, st, t int64, v float64, h *histogram.Histogram, es []exemplar.Exemplar) (err error) {
|
||||
hash := ls.Hash()
|
||||
series, exists := b.refs[hash]
|
||||
ref := series.ref
|
||||
@ -140,28 +140,28 @@ func (b *combinedAppender) appendFloatOrHistogram(ls labels.Labels, meta metadat
|
||||
exists = false
|
||||
ref = 0
|
||||
}
|
||||
updateRefs := !exists || series.ct != ct
|
||||
if updateRefs && ct != 0 && ct < t && b.ingestCTZeroSample {
|
||||
updateRefs := !exists || series.st != st
|
||||
if updateRefs && st != 0 && st < t && b.ingestSTZeroSample {
|
||||
var newRef storage.SeriesRef
|
||||
if h != nil {
|
||||
newRef, err = b.app.AppendHistogramCTZeroSample(ref, ls, t, ct, h, nil)
|
||||
newRef, err = b.app.AppendHistogramSTZeroSample(ref, ls, t, st, h, nil)
|
||||
} else {
|
||||
newRef, err = b.app.AppendCTZeroSample(ref, ls, t, ct)
|
||||
newRef, err = b.app.AppendSTZeroSample(ref, ls, t, st)
|
||||
}
|
||||
if err != nil {
|
||||
if !errors.Is(err, storage.ErrOutOfOrderCT) && !errors.Is(err, storage.ErrDuplicateSampleForTimestamp) {
|
||||
if !errors.Is(err, storage.ErrOutOfOrderST) && !errors.Is(err, storage.ErrDuplicateSampleForTimestamp) {
|
||||
// Even for the first sample OOO is a common scenario because
|
||||
// we can't tell if a CT was already ingested in a previous request.
|
||||
// we can't tell if a ST was already ingested in a previous request.
|
||||
// We ignore the error.
|
||||
// ErrDuplicateSampleForTimestamp is also a common scenario because
|
||||
// unknown start times in Opentelemetry are indicated by setting
|
||||
// the start time to the same as the first sample time.
|
||||
// https://opentelemetry.io/docs/specs/otel/metrics/data-model/#cumulative-streams-handling-unknown-start-time
|
||||
b.logger.Warn("Error when appending CT from OTLP", "err", err, "series", ls.String(), "created_timestamp", ct, "timestamp", t, "sample_type", sampleType(h))
|
||||
b.logger.Warn("Error when appending ST from OTLP", "err", err, "series", ls.String(), "start_timestamp", st, "timestamp", t, "sample_type", sampleType(h))
|
||||
}
|
||||
} else {
|
||||
// We only use the returned reference on success as otherwise an
|
||||
// error of CT append could invalidate the series reference.
|
||||
// error of ST append could invalidate the series reference.
|
||||
ref = newRef
|
||||
}
|
||||
}
|
||||
@ -197,7 +197,7 @@ func (b *combinedAppender) appendFloatOrHistogram(ls labels.Labels, meta metadat
|
||||
if updateRefs || metadataChanged {
|
||||
b.refs[hash] = seriesRef{
|
||||
ref: ref,
|
||||
ct: ct,
|
||||
st: st,
|
||||
ls: ls,
|
||||
meta: meta,
|
||||
}
|
||||
|
||||
@ -52,7 +52,7 @@ type combinedSample struct {
|
||||
ls labels.Labels
|
||||
meta metadata.Metadata
|
||||
t int64
|
||||
ct int64
|
||||
st int64
|
||||
v float64
|
||||
es []exemplar.Exemplar
|
||||
}
|
||||
@ -62,31 +62,31 @@ type combinedHistogram struct {
|
||||
ls labels.Labels
|
||||
meta metadata.Metadata
|
||||
t int64
|
||||
ct int64
|
||||
st int64
|
||||
h *histogram.Histogram
|
||||
es []exemplar.Exemplar
|
||||
}
|
||||
|
||||
func (m *mockCombinedAppender) AppendSample(ls labels.Labels, meta Metadata, ct, t int64, v float64, es []exemplar.Exemplar) error {
|
||||
func (m *mockCombinedAppender) AppendSample(ls labels.Labels, meta Metadata, st, t int64, v float64, es []exemplar.Exemplar) error {
|
||||
m.pendingSamples = append(m.pendingSamples, combinedSample{
|
||||
metricFamilyName: meta.MetricFamilyName,
|
||||
ls: ls,
|
||||
meta: meta.Metadata,
|
||||
t: t,
|
||||
ct: ct,
|
||||
st: st,
|
||||
v: v,
|
||||
es: es,
|
||||
})
|
||||
return nil
|
||||
}
|
||||
|
||||
func (m *mockCombinedAppender) AppendHistogram(ls labels.Labels, meta Metadata, ct, t int64, h *histogram.Histogram, es []exemplar.Exemplar) error {
|
||||
func (m *mockCombinedAppender) AppendHistogram(ls labels.Labels, meta Metadata, st, t int64, h *histogram.Histogram, es []exemplar.Exemplar) error {
|
||||
m.pendingHistograms = append(m.pendingHistograms, combinedHistogram{
|
||||
metricFamilyName: meta.MetricFamilyName,
|
||||
ls: ls,
|
||||
meta: meta.Metadata,
|
||||
t: t,
|
||||
ct: ct,
|
||||
st: st,
|
||||
h: h,
|
||||
es: es,
|
||||
})
|
||||
@ -108,12 +108,12 @@ func requireEqual(t testing.TB, expected, actual any, msgAndArgs ...any) {
|
||||
// TestCombinedAppenderOnTSDB runs some basic tests on a real TSDB to check
|
||||
// that the combinedAppender works on a real TSDB.
|
||||
func TestCombinedAppenderOnTSDB(t *testing.T) {
|
||||
t.Run("ingestCTZeroSample=false", func(t *testing.T) { testCombinedAppenderOnTSDB(t, false) })
|
||||
t.Run("ingestSTZeroSample=false", func(t *testing.T) { testCombinedAppenderOnTSDB(t, false) })
|
||||
|
||||
t.Run("ingestCTZeroSample=true", func(t *testing.T) { testCombinedAppenderOnTSDB(t, true) })
|
||||
t.Run("ingestSTZeroSample=true", func(t *testing.T) { testCombinedAppenderOnTSDB(t, true) })
|
||||
}
|
||||
|
||||
func testCombinedAppenderOnTSDB(t *testing.T, ingestCTZeroSample bool) {
|
||||
func testCombinedAppenderOnTSDB(t *testing.T, ingestSTZeroSample bool) {
|
||||
t.Helper()
|
||||
|
||||
now := time.Now()
|
||||
@ -165,9 +165,9 @@ func testCombinedAppenderOnTSDB(t *testing.T, ingestCTZeroSample bool) {
|
||||
extraAppendFunc func(*testing.T, CombinedAppender)
|
||||
expectedSamples []sample
|
||||
expectedExemplars []exemplar.QueryResult
|
||||
expectedLogsForCT []string
|
||||
expectedLogsForST []string
|
||||
}{
|
||||
"single float sample, zero CT": {
|
||||
"single float sample, zero ST": {
|
||||
appendFunc: func(t *testing.T, app CombinedAppender) {
|
||||
require.NoError(t, app.AppendSample(seriesLabels.Copy(), floatMetadata, 0, now.UnixMilli(), 42.0, testExemplars))
|
||||
},
|
||||
@ -179,7 +179,7 @@ func testCombinedAppenderOnTSDB(t *testing.T, ingestCTZeroSample bool) {
|
||||
},
|
||||
expectedExemplars: expectedExemplars,
|
||||
},
|
||||
"single float sample, very old CT": {
|
||||
"single float sample, very old ST": {
|
||||
appendFunc: func(t *testing.T, app CombinedAppender) {
|
||||
require.NoError(t, app.AppendSample(seriesLabels.Copy(), floatMetadata, 1, now.UnixMilli(), 42.0, nil))
|
||||
},
|
||||
@ -189,18 +189,18 @@ func testCombinedAppenderOnTSDB(t *testing.T, ingestCTZeroSample bool) {
|
||||
f: 42.0,
|
||||
},
|
||||
},
|
||||
expectedLogsForCT: []string{
|
||||
"Error when appending CT from OTLP",
|
||||
expectedLogsForST: []string{
|
||||
"Error when appending ST from OTLP",
|
||||
"out of bound",
|
||||
},
|
||||
},
|
||||
"single float sample, normal CT": {
|
||||
"single float sample, normal ST": {
|
||||
appendFunc: func(t *testing.T, app CombinedAppender) {
|
||||
require.NoError(t, app.AppendSample(seriesLabels.Copy(), floatMetadata, now.Add(-2*time.Minute).UnixMilli(), now.UnixMilli(), 42.0, nil))
|
||||
},
|
||||
expectedSamples: []sample{
|
||||
{
|
||||
ctZero: true,
|
||||
stZero: true,
|
||||
t: now.Add(-2 * time.Minute).UnixMilli(),
|
||||
},
|
||||
{
|
||||
@ -209,7 +209,7 @@ func testCombinedAppenderOnTSDB(t *testing.T, ingestCTZeroSample bool) {
|
||||
},
|
||||
},
|
||||
},
|
||||
"single float sample, CT same time as sample": {
|
||||
"single float sample, ST same time as sample": {
|
||||
appendFunc: func(t *testing.T, app CombinedAppender) {
|
||||
require.NoError(t, app.AppendSample(seriesLabels.Copy(), floatMetadata, now.UnixMilli(), now.UnixMilli(), 42.0, nil))
|
||||
},
|
||||
@ -220,7 +220,7 @@ func testCombinedAppenderOnTSDB(t *testing.T, ingestCTZeroSample bool) {
|
||||
},
|
||||
},
|
||||
},
|
||||
"two float samples in different messages, CT same time as first sample": {
|
||||
"two float samples in different messages, ST same time as first sample": {
|
||||
appendFunc: func(t *testing.T, app CombinedAppender) {
|
||||
require.NoError(t, app.AppendSample(seriesLabels.Copy(), floatMetadata, now.UnixMilli(), now.UnixMilli(), 42.0, nil))
|
||||
},
|
||||
@ -238,7 +238,7 @@ func testCombinedAppenderOnTSDB(t *testing.T, ingestCTZeroSample bool) {
|
||||
},
|
||||
},
|
||||
},
|
||||
"single float sample, CT in the future of the sample": {
|
||||
"single float sample, ST in the future of the sample": {
|
||||
appendFunc: func(t *testing.T, app CombinedAppender) {
|
||||
require.NoError(t, app.AppendSample(seriesLabels.Copy(), floatMetadata, now.Add(time.Minute).UnixMilli(), now.UnixMilli(), 42.0, nil))
|
||||
},
|
||||
@ -249,7 +249,7 @@ func testCombinedAppenderOnTSDB(t *testing.T, ingestCTZeroSample bool) {
|
||||
},
|
||||
},
|
||||
},
|
||||
"single histogram sample, zero CT": {
|
||||
"single histogram sample, zero ST": {
|
||||
appendFunc: func(t *testing.T, app CombinedAppender) {
|
||||
require.NoError(t, app.AppendHistogram(seriesLabels.Copy(), histogramMetadata, 0, now.UnixMilli(), tsdbutil.GenerateTestHistogram(42), testExemplars))
|
||||
},
|
||||
@ -261,7 +261,7 @@ func testCombinedAppenderOnTSDB(t *testing.T, ingestCTZeroSample bool) {
|
||||
},
|
||||
expectedExemplars: expectedExemplars,
|
||||
},
|
||||
"single histogram sample, very old CT": {
|
||||
"single histogram sample, very old ST": {
|
||||
appendFunc: func(t *testing.T, app CombinedAppender) {
|
||||
require.NoError(t, app.AppendHistogram(seriesLabels.Copy(), histogramMetadata, 1, now.UnixMilli(), tsdbutil.GenerateTestHistogram(42), nil))
|
||||
},
|
||||
@ -271,18 +271,18 @@ func testCombinedAppenderOnTSDB(t *testing.T, ingestCTZeroSample bool) {
|
||||
h: tsdbutil.GenerateTestHistogram(42),
|
||||
},
|
||||
},
|
||||
expectedLogsForCT: []string{
|
||||
"Error when appending CT from OTLP",
|
||||
expectedLogsForST: []string{
|
||||
"Error when appending ST from OTLP",
|
||||
"out of bound",
|
||||
},
|
||||
},
|
||||
"single histogram sample, normal CT": {
|
||||
"single histogram sample, normal ST": {
|
||||
appendFunc: func(t *testing.T, app CombinedAppender) {
|
||||
require.NoError(t, app.AppendHistogram(seriesLabels.Copy(), histogramMetadata, now.Add(-2*time.Minute).UnixMilli(), now.UnixMilli(), tsdbutil.GenerateTestHistogram(42), nil))
|
||||
},
|
||||
expectedSamples: []sample{
|
||||
{
|
||||
ctZero: true,
|
||||
stZero: true,
|
||||
t: now.Add(-2 * time.Minute).UnixMilli(),
|
||||
h: &histogram.Histogram{},
|
||||
},
|
||||
@ -292,7 +292,7 @@ func testCombinedAppenderOnTSDB(t *testing.T, ingestCTZeroSample bool) {
|
||||
},
|
||||
},
|
||||
},
|
||||
"single histogram sample, CT same time as sample": {
|
||||
"single histogram sample, ST same time as sample": {
|
||||
appendFunc: func(t *testing.T, app CombinedAppender) {
|
||||
require.NoError(t, app.AppendHistogram(seriesLabels.Copy(), histogramMetadata, now.UnixMilli(), now.UnixMilli(), tsdbutil.GenerateTestHistogram(42), nil))
|
||||
},
|
||||
@ -303,7 +303,7 @@ func testCombinedAppenderOnTSDB(t *testing.T, ingestCTZeroSample bool) {
|
||||
},
|
||||
},
|
||||
},
|
||||
"two histogram samples in different messages, CT same time as first sample": {
|
||||
"two histogram samples in different messages, ST same time as first sample": {
|
||||
appendFunc: func(t *testing.T, app CombinedAppender) {
|
||||
require.NoError(t, app.AppendHistogram(seriesLabels.Copy(), floatMetadata, now.UnixMilli(), now.UnixMilli(), tsdbutil.GenerateTestHistogram(42), nil))
|
||||
},
|
||||
@ -321,7 +321,7 @@ func testCombinedAppenderOnTSDB(t *testing.T, ingestCTZeroSample bool) {
|
||||
},
|
||||
},
|
||||
},
|
||||
"single histogram sample, CT in the future of the sample": {
|
||||
"single histogram sample, ST in the future of the sample": {
|
||||
appendFunc: func(t *testing.T, app CombinedAppender) {
|
||||
require.NoError(t, app.AppendHistogram(seriesLabels.Copy(), histogramMetadata, now.Add(time.Minute).UnixMilli(), now.UnixMilli(), tsdbutil.GenerateTestHistogram(42), nil))
|
||||
},
|
||||
@ -364,14 +364,14 @@ func testCombinedAppenderOnTSDB(t *testing.T, ingestCTZeroSample bool) {
|
||||
},
|
||||
},
|
||||
},
|
||||
"float samples with CT changing": {
|
||||
"float samples with ST changing": {
|
||||
appendFunc: func(t *testing.T, app CombinedAppender) {
|
||||
require.NoError(t, app.AppendSample(seriesLabels.Copy(), floatMetadata, now.Add(-4*time.Second).UnixMilli(), now.Add(-3*time.Second).UnixMilli(), 42.0, nil))
|
||||
require.NoError(t, app.AppendSample(seriesLabels.Copy(), floatMetadata, now.Add(-1*time.Second).UnixMilli(), now.UnixMilli(), 62.0, nil))
|
||||
},
|
||||
expectedSamples: []sample{
|
||||
{
|
||||
ctZero: true,
|
||||
stZero: true,
|
||||
t: now.Add(-4 * time.Second).UnixMilli(),
|
||||
},
|
||||
{
|
||||
@ -379,7 +379,7 @@ func testCombinedAppenderOnTSDB(t *testing.T, ingestCTZeroSample bool) {
|
||||
f: 42.0,
|
||||
},
|
||||
{
|
||||
ctZero: true,
|
||||
stZero: true,
|
||||
t: now.Add(-1 * time.Second).UnixMilli(),
|
||||
},
|
||||
{
|
||||
@ -393,8 +393,8 @@ func testCombinedAppenderOnTSDB(t *testing.T, ingestCTZeroSample bool) {
|
||||
for name, tc := range testCases {
|
||||
t.Run(name, func(t *testing.T) {
|
||||
var expectedLogs []string
|
||||
if ingestCTZeroSample {
|
||||
expectedLogs = append(expectedLogs, tc.expectedLogsForCT...)
|
||||
if ingestSTZeroSample {
|
||||
expectedLogs = append(expectedLogs, tc.expectedLogsForST...)
|
||||
}
|
||||
|
||||
dir := t.TempDir()
|
||||
@ -413,13 +413,13 @@ func testCombinedAppenderOnTSDB(t *testing.T, ingestCTZeroSample bool) {
|
||||
reg := prometheus.NewRegistry()
|
||||
cappMetrics := NewCombinedAppenderMetrics(reg)
|
||||
app := db.Appender(ctx)
|
||||
capp := NewCombinedAppender(app, logger, ingestCTZeroSample, false, cappMetrics)
|
||||
capp := NewCombinedAppender(app, logger, ingestSTZeroSample, false, cappMetrics)
|
||||
tc.appendFunc(t, capp)
|
||||
require.NoError(t, app.Commit())
|
||||
|
||||
if tc.extraAppendFunc != nil {
|
||||
app = db.Appender(ctx)
|
||||
capp = NewCombinedAppender(app, logger, ingestCTZeroSample, false, cappMetrics)
|
||||
capp = NewCombinedAppender(app, logger, ingestSTZeroSample, false, cappMetrics)
|
||||
tc.extraAppendFunc(t, capp)
|
||||
require.NoError(t, app.Commit())
|
||||
}
|
||||
@ -446,7 +446,7 @@ func testCombinedAppenderOnTSDB(t *testing.T, ingestCTZeroSample bool) {
|
||||
series := ss.At()
|
||||
it := series.Iterator(nil)
|
||||
for i, sample := range tc.expectedSamples {
|
||||
if !ingestCTZeroSample && sample.ctZero {
|
||||
if !ingestSTZeroSample && sample.stZero {
|
||||
continue
|
||||
}
|
||||
if sample.h == nil {
|
||||
@ -476,7 +476,7 @@ func testCombinedAppenderOnTSDB(t *testing.T, ingestCTZeroSample bool) {
|
||||
}
|
||||
|
||||
type sample struct {
|
||||
ctZero bool
|
||||
stZero bool
|
||||
|
||||
t int64
|
||||
f float64
|
||||
@ -500,7 +500,7 @@ func TestCombinedAppenderSeriesRefs(t *testing.T) {
|
||||
MetricFamilyName: "test_bytes_total",
|
||||
}
|
||||
|
||||
t.Run("happy case with CT zero, reference is passed and reused", func(t *testing.T) {
|
||||
t.Run("happy case with ST zero, reference is passed and reused", func(t *testing.T) {
|
||||
app := &appenderRecorder{}
|
||||
capp := NewCombinedAppender(app, promslog.NewNopLogger(), true, false, NewCombinedAppenderMetrics(prometheus.NewRegistry()))
|
||||
|
||||
@ -514,31 +514,31 @@ func TestCombinedAppenderSeriesRefs(t *testing.T) {
|
||||
}))
|
||||
|
||||
require.Len(t, app.records, 5)
|
||||
requireEqualOpAndRef(t, "AppendCTZeroSample", 0, app.records[0])
|
||||
requireEqualOpAndRef(t, "AppendSTZeroSample", 0, app.records[0])
|
||||
ref := app.records[0].outRef
|
||||
require.NotZero(t, ref)
|
||||
requireEqualOpAndRef(t, "Append", ref, app.records[1])
|
||||
requireEqualOpAndRef(t, "AppendCTZeroSample", ref, app.records[2])
|
||||
requireEqualOpAndRef(t, "AppendSTZeroSample", ref, app.records[2])
|
||||
requireEqualOpAndRef(t, "Append", ref, app.records[3])
|
||||
requireEqualOpAndRef(t, "AppendExemplar", ref, app.records[4])
|
||||
})
|
||||
|
||||
t.Run("error on second CT ingest doesn't update the reference", func(t *testing.T) {
|
||||
t.Run("error on second ST ingest doesn't update the reference", func(t *testing.T) {
|
||||
app := &appenderRecorder{}
|
||||
capp := NewCombinedAppender(app, promslog.NewNopLogger(), true, false, NewCombinedAppenderMetrics(prometheus.NewRegistry()))
|
||||
|
||||
require.NoError(t, capp.AppendSample(seriesLabels.Copy(), floatMetadata, 1, 2, 42.0, nil))
|
||||
|
||||
app.appendCTZeroSampleError = errors.New("test error")
|
||||
app.appendSTZeroSampleError = errors.New("test error")
|
||||
require.NoError(t, capp.AppendSample(seriesLabels.Copy(), floatMetadata, 3, 4, 62.0, nil))
|
||||
|
||||
require.Len(t, app.records, 4)
|
||||
requireEqualOpAndRef(t, "AppendCTZeroSample", 0, app.records[0])
|
||||
requireEqualOpAndRef(t, "AppendSTZeroSample", 0, app.records[0])
|
||||
ref := app.records[0].outRef
|
||||
require.NotZero(t, ref)
|
||||
requireEqualOpAndRef(t, "Append", ref, app.records[1])
|
||||
requireEqualOpAndRef(t, "AppendCTZeroSample", ref, app.records[2])
|
||||
require.Zero(t, app.records[2].outRef, "the second AppendCTZeroSample returned 0")
|
||||
requireEqualOpAndRef(t, "AppendSTZeroSample", ref, app.records[2])
|
||||
require.Zero(t, app.records[2].outRef, "the second AppendSTZeroSample returned 0")
|
||||
requireEqualOpAndRef(t, "Append", ref, app.records[3])
|
||||
})
|
||||
|
||||
@ -577,12 +577,12 @@ func TestCombinedAppenderSeriesRefs(t *testing.T) {
|
||||
}))
|
||||
|
||||
require.Len(t, app.records, 7)
|
||||
requireEqualOpAndRef(t, "AppendCTZeroSample", 0, app.records[0])
|
||||
requireEqualOpAndRef(t, "AppendSTZeroSample", 0, app.records[0])
|
||||
ref := app.records[0].outRef
|
||||
require.NotZero(t, ref)
|
||||
requireEqualOpAndRef(t, "Append", ref, app.records[1])
|
||||
requireEqualOpAndRef(t, "UpdateMetadata", ref, app.records[2])
|
||||
requireEqualOpAndRef(t, "AppendCTZeroSample", ref, app.records[3])
|
||||
requireEqualOpAndRef(t, "AppendSTZeroSample", ref, app.records[3])
|
||||
requireEqualOpAndRef(t, "Append", ref, app.records[4])
|
||||
require.Zero(t, app.records[4].outRef, "the second Append returned 0")
|
||||
requireEqualOpAndRef(t, "UpdateMetadata", ref, app.records[5])
|
||||
@ -619,13 +619,13 @@ func TestCombinedAppenderSeriesRefs(t *testing.T) {
|
||||
}))
|
||||
|
||||
require.Len(t, app.records, 5)
|
||||
requireEqualOpAndRef(t, "AppendCTZeroSample", 0, app.records[0])
|
||||
requireEqualOpAndRef(t, "AppendSTZeroSample", 0, app.records[0])
|
||||
ref := app.records[0].outRef
|
||||
require.NotZero(t, ref)
|
||||
requireEqualOpAndRef(t, "Append", ref, app.records[1])
|
||||
requireEqualOpAndRef(t, "AppendCTZeroSample", 0, app.records[2])
|
||||
requireEqualOpAndRef(t, "AppendSTZeroSample", 0, app.records[2])
|
||||
newRef := app.records[2].outRef
|
||||
require.NotEqual(t, ref, newRef, "the second AppendCTZeroSample returned a different reference")
|
||||
require.NotEqual(t, ref, newRef, "the second AppendSTZeroSample returned a different reference")
|
||||
requireEqualOpAndRef(t, "Append", newRef, app.records[3])
|
||||
requireEqualOpAndRef(t, "AppendExemplar", newRef, app.records[4])
|
||||
})
|
||||
@ -651,12 +651,12 @@ func TestCombinedAppenderSeriesRefs(t *testing.T) {
|
||||
|
||||
if appendMetadata {
|
||||
require.Len(t, app.records, 3)
|
||||
requireEqualOp(t, "AppendCTZeroSample", app.records[0])
|
||||
requireEqualOp(t, "AppendSTZeroSample", app.records[0])
|
||||
requireEqualOp(t, "Append", app.records[1])
|
||||
requireEqualOp(t, "UpdateMetadata", app.records[2])
|
||||
} else {
|
||||
require.Len(t, app.records, 2)
|
||||
requireEqualOp(t, "AppendCTZeroSample", app.records[0])
|
||||
requireEqualOp(t, "AppendSTZeroSample", app.records[0])
|
||||
requireEqualOp(t, "Append", app.records[1])
|
||||
}
|
||||
})
|
||||
@ -720,12 +720,12 @@ func TestCombinedAppenderMetadataChanges(t *testing.T) {
|
||||
|
||||
// Verify expected operations.
|
||||
require.Len(t, app.records, 7)
|
||||
requireEqualOpAndRef(t, "AppendCTZeroSample", 0, app.records[0])
|
||||
requireEqualOpAndRef(t, "AppendSTZeroSample", 0, app.records[0])
|
||||
ref := app.records[0].outRef
|
||||
require.NotZero(t, ref)
|
||||
requireEqualOpAndRef(t, "Append", ref, app.records[1])
|
||||
requireEqualOpAndRef(t, "UpdateMetadata", ref, app.records[2])
|
||||
requireEqualOpAndRef(t, "AppendCTZeroSample", ref, app.records[3])
|
||||
requireEqualOpAndRef(t, "AppendSTZeroSample", ref, app.records[3])
|
||||
requireEqualOpAndRef(t, "Append", ref, app.records[4])
|
||||
requireEqualOpAndRef(t, "UpdateMetadata", ref, app.records[5])
|
||||
requireEqualOpAndRef(t, "Append", ref, app.records[6])
|
||||
@ -756,9 +756,9 @@ type appenderRecorder struct {
|
||||
records []appenderRecord
|
||||
|
||||
appendError error
|
||||
appendCTZeroSampleError error
|
||||
appendSTZeroSampleError error
|
||||
appendHistogramError error
|
||||
appendHistogramCTZeroSampleError error
|
||||
appendHistogramSTZeroSampleError error
|
||||
updateMetadataError error
|
||||
appendExemplarError error
|
||||
}
|
||||
@ -789,10 +789,10 @@ func (a *appenderRecorder) Append(ref storage.SeriesRef, ls labels.Labels, _ int
|
||||
return ref, nil
|
||||
}
|
||||
|
||||
func (a *appenderRecorder) AppendCTZeroSample(ref storage.SeriesRef, ls labels.Labels, _, _ int64) (storage.SeriesRef, error) {
|
||||
a.records = append(a.records, appenderRecord{op: "AppendCTZeroSample", ref: ref, ls: ls})
|
||||
if a.appendCTZeroSampleError != nil {
|
||||
return 0, a.appendCTZeroSampleError
|
||||
func (a *appenderRecorder) AppendSTZeroSample(ref storage.SeriesRef, ls labels.Labels, _, _ int64) (storage.SeriesRef, error) {
|
||||
a.records = append(a.records, appenderRecord{op: "AppendSTZeroSample", ref: ref, ls: ls})
|
||||
if a.appendSTZeroSampleError != nil {
|
||||
return 0, a.appendSTZeroSampleError
|
||||
}
|
||||
if ref == 0 {
|
||||
ref = a.newRef()
|
||||
@ -813,10 +813,10 @@ func (a *appenderRecorder) AppendHistogram(ref storage.SeriesRef, ls labels.Labe
|
||||
return ref, nil
|
||||
}
|
||||
|
||||
func (a *appenderRecorder) AppendHistogramCTZeroSample(ref storage.SeriesRef, ls labels.Labels, _, _ int64, _ *histogram.Histogram, _ *histogram.FloatHistogram) (storage.SeriesRef, error) {
|
||||
a.records = append(a.records, appenderRecord{op: "AppendHistogramCTZeroSample", ref: ref, ls: ls})
|
||||
if a.appendHistogramCTZeroSampleError != nil {
|
||||
return 0, a.appendHistogramCTZeroSampleError
|
||||
func (a *appenderRecorder) AppendHistogramSTZeroSample(ref storage.SeriesRef, ls labels.Labels, _, _ int64, _ *histogram.Histogram, _ *histogram.FloatHistogram) (storage.SeriesRef, error) {
|
||||
a.records = append(a.records, appenderRecord{op: "AppendHistogramSTZeroSample", ref: ref, ls: ls})
|
||||
if a.appendHistogramSTZeroSampleError != nil {
|
||||
return 0, a.appendHistogramSTZeroSampleError
|
||||
}
|
||||
if ref == 0 {
|
||||
ref = a.newRef()
|
||||
|
||||
@ -482,7 +482,7 @@ func TestPrometheusConverter_AddSummaryDataPoints(t *testing.T) {
|
||||
model.MetricNameLabel, "test_summary"+sumStr,
|
||||
),
|
||||
t: convertTimeStamp(ts),
|
||||
ct: convertTimeStamp(ts),
|
||||
st: convertTimeStamp(ts),
|
||||
v: 0,
|
||||
},
|
||||
{
|
||||
@ -491,7 +491,7 @@ func TestPrometheusConverter_AddSummaryDataPoints(t *testing.T) {
|
||||
model.MetricNameLabel, "test_summary"+countStr,
|
||||
),
|
||||
t: convertTimeStamp(ts),
|
||||
ct: convertTimeStamp(ts),
|
||||
st: convertTimeStamp(ts),
|
||||
v: 0,
|
||||
},
|
||||
}
|
||||
@ -526,7 +526,7 @@ func TestPrometheusConverter_AddSummaryDataPoints(t *testing.T) {
|
||||
ls: labels.FromStrings(append(scopeLabels,
|
||||
model.MetricNameLabel, "test_summary"+sumStr)...),
|
||||
t: convertTimeStamp(ts),
|
||||
ct: convertTimeStamp(ts),
|
||||
st: convertTimeStamp(ts),
|
||||
v: 0,
|
||||
},
|
||||
{
|
||||
@ -534,7 +534,7 @@ func TestPrometheusConverter_AddSummaryDataPoints(t *testing.T) {
|
||||
ls: labels.FromStrings(append(scopeLabels,
|
||||
model.MetricNameLabel, "test_summary"+countStr)...),
|
||||
t: convertTimeStamp(ts),
|
||||
ct: convertTimeStamp(ts),
|
||||
st: convertTimeStamp(ts),
|
||||
v: 0,
|
||||
},
|
||||
}
|
||||
@ -706,7 +706,7 @@ func TestPrometheusConverter_AddHistogramDataPoints(t *testing.T) {
|
||||
model.MetricNameLabel, "test_hist"+countStr,
|
||||
),
|
||||
t: convertTimeStamp(ts),
|
||||
ct: convertTimeStamp(ts),
|
||||
st: convertTimeStamp(ts),
|
||||
v: 0,
|
||||
},
|
||||
{
|
||||
@ -716,7 +716,7 @@ func TestPrometheusConverter_AddHistogramDataPoints(t *testing.T) {
|
||||
model.BucketLabel, "+Inf",
|
||||
),
|
||||
t: convertTimeStamp(ts),
|
||||
ct: convertTimeStamp(ts),
|
||||
st: convertTimeStamp(ts),
|
||||
v: 0,
|
||||
},
|
||||
}
|
||||
@ -751,7 +751,7 @@ func TestPrometheusConverter_AddHistogramDataPoints(t *testing.T) {
|
||||
ls: labels.FromStrings(append(scopeLabels,
|
||||
model.MetricNameLabel, "test_hist"+countStr)...),
|
||||
t: convertTimeStamp(ts),
|
||||
ct: convertTimeStamp(ts),
|
||||
st: convertTimeStamp(ts),
|
||||
v: 0,
|
||||
},
|
||||
{
|
||||
@ -760,7 +760,7 @@ func TestPrometheusConverter_AddHistogramDataPoints(t *testing.T) {
|
||||
model.MetricNameLabel, "test_hist_bucket",
|
||||
model.BucketLabel, "+Inf")...),
|
||||
t: convertTimeStamp(ts),
|
||||
ct: convertTimeStamp(ts),
|
||||
st: convertTimeStamp(ts),
|
||||
v: 0,
|
||||
},
|
||||
}
|
||||
|
||||
@ -67,13 +67,13 @@ func (c *PrometheusConverter) addExponentialHistogramDataPoints(ctx context.Cont
|
||||
return annots, err
|
||||
}
|
||||
ts := convertTimeStamp(pt.Timestamp())
|
||||
ct := convertTimeStamp(pt.StartTimestamp())
|
||||
st := convertTimeStamp(pt.StartTimestamp())
|
||||
exemplars, err := c.getPromExemplars(ctx, pt.Exemplars())
|
||||
if err != nil {
|
||||
return annots, err
|
||||
}
|
||||
// OTel exponential histograms are always Int Histograms.
|
||||
if err = c.appender.AppendHistogram(lbls, meta, ct, ts, hp, exemplars); err != nil {
|
||||
if err = c.appender.AppendHistogram(lbls, meta, st, ts, hp, exemplars); err != nil {
|
||||
return annots, err
|
||||
}
|
||||
}
|
||||
@ -286,12 +286,12 @@ func (c *PrometheusConverter) addCustomBucketsHistogramDataPoints(ctx context.Co
|
||||
return annots, err
|
||||
}
|
||||
ts := convertTimeStamp(pt.Timestamp())
|
||||
ct := convertTimeStamp(pt.StartTimestamp())
|
||||
st := convertTimeStamp(pt.StartTimestamp())
|
||||
exemplars, err := c.getPromExemplars(ctx, pt.Exemplars())
|
||||
if err != nil {
|
||||
return annots, err
|
||||
}
|
||||
if err = c.appender.AppendHistogram(lbls, meta, ct, ts, hp, exemplars); err != nil {
|
||||
if err = c.appender.AppendHistogram(lbls, meta, st, ts, hp, exemplars); err != nil {
|
||||
return annots, err
|
||||
}
|
||||
}
|
||||
|
||||
@ -673,7 +673,7 @@ func TestPrometheusConverter_addExponentialHistogramDataPoints(t *testing.T) {
|
||||
ls: lbls,
|
||||
meta: metadata.Metadata{},
|
||||
t: 0,
|
||||
ct: 0,
|
||||
st: 0,
|
||||
h: &histogram.Histogram{
|
||||
Count: 7,
|
||||
Schema: 1,
|
||||
@ -689,7 +689,7 @@ func TestPrometheusConverter_addExponentialHistogramDataPoints(t *testing.T) {
|
||||
ls: lbls,
|
||||
meta: metadata.Metadata{},
|
||||
t: 0,
|
||||
ct: 0,
|
||||
st: 0,
|
||||
h: &histogram.Histogram{
|
||||
Count: 4,
|
||||
Schema: 1,
|
||||
@ -746,7 +746,7 @@ func TestPrometheusConverter_addExponentialHistogramDataPoints(t *testing.T) {
|
||||
ls: lbls,
|
||||
meta: metadata.Metadata{},
|
||||
t: 0,
|
||||
ct: 0,
|
||||
st: 0,
|
||||
h: &histogram.Histogram{
|
||||
Count: 7,
|
||||
Schema: 1,
|
||||
@ -762,7 +762,7 @@ func TestPrometheusConverter_addExponentialHistogramDataPoints(t *testing.T) {
|
||||
ls: lbls,
|
||||
meta: metadata.Metadata{},
|
||||
t: 0,
|
||||
ct: 0,
|
||||
st: 0,
|
||||
h: &histogram.Histogram{
|
||||
Count: 4,
|
||||
Schema: 1,
|
||||
@ -819,7 +819,7 @@ func TestPrometheusConverter_addExponentialHistogramDataPoints(t *testing.T) {
|
||||
ls: lbls,
|
||||
meta: metadata.Metadata{},
|
||||
t: 0,
|
||||
ct: 0,
|
||||
st: 0,
|
||||
h: &histogram.Histogram{
|
||||
Count: 7,
|
||||
Schema: 1,
|
||||
@ -835,7 +835,7 @@ func TestPrometheusConverter_addExponentialHistogramDataPoints(t *testing.T) {
|
||||
ls: labelsAnother,
|
||||
meta: metadata.Metadata{},
|
||||
t: 0,
|
||||
ct: 0,
|
||||
st: 0,
|
||||
h: &histogram.Histogram{
|
||||
Count: 4,
|
||||
Schema: 1,
|
||||
@ -1146,7 +1146,7 @@ func TestPrometheusConverter_addCustomBucketsHistogramDataPoints(t *testing.T) {
|
||||
ls: lbls,
|
||||
meta: metadata.Metadata{},
|
||||
t: 0,
|
||||
ct: 0,
|
||||
st: 0,
|
||||
h: &histogram.Histogram{
|
||||
Count: 3,
|
||||
Sum: 3,
|
||||
@ -1162,7 +1162,7 @@ func TestPrometheusConverter_addCustomBucketsHistogramDataPoints(t *testing.T) {
|
||||
ls: lbls,
|
||||
meta: metadata.Metadata{},
|
||||
t: 0,
|
||||
ct: 0,
|
||||
st: 0,
|
||||
h: &histogram.Histogram{
|
||||
Count: 11,
|
||||
Sum: 5,
|
||||
@ -1219,7 +1219,7 @@ func TestPrometheusConverter_addCustomBucketsHistogramDataPoints(t *testing.T) {
|
||||
ls: lbls,
|
||||
meta: metadata.Metadata{},
|
||||
t: 0,
|
||||
ct: 0,
|
||||
st: 0,
|
||||
h: &histogram.Histogram{
|
||||
Count: 3,
|
||||
Sum: 3,
|
||||
@ -1235,7 +1235,7 @@ func TestPrometheusConverter_addCustomBucketsHistogramDataPoints(t *testing.T) {
|
||||
ls: lbls,
|
||||
meta: metadata.Metadata{},
|
||||
t: 0,
|
||||
ct: 0,
|
||||
st: 0,
|
||||
h: &histogram.Histogram{
|
||||
Count: 11,
|
||||
Sum: 5,
|
||||
@ -1292,7 +1292,7 @@ func TestPrometheusConverter_addCustomBucketsHistogramDataPoints(t *testing.T) {
|
||||
ls: lbls,
|
||||
meta: metadata.Metadata{},
|
||||
t: 0,
|
||||
ct: 0,
|
||||
st: 0,
|
||||
h: &histogram.Histogram{
|
||||
Count: 6,
|
||||
Sum: 3,
|
||||
@ -1308,7 +1308,7 @@ func TestPrometheusConverter_addCustomBucketsHistogramDataPoints(t *testing.T) {
|
||||
ls: labelsAnother,
|
||||
meta: metadata.Metadata{},
|
||||
t: 0,
|
||||
ct: 0,
|
||||
st: 0,
|
||||
h: &histogram.Histogram{
|
||||
Count: 11,
|
||||
Sum: 5,
|
||||
|
||||
@ -1105,7 +1105,7 @@ func (a *noOpAppender) Append(_ storage.SeriesRef, _ labels.Labels, _ int64, _ f
|
||||
return 1, nil
|
||||
}
|
||||
|
||||
func (*noOpAppender) AppendCTZeroSample(_ storage.SeriesRef, _ labels.Labels, _, _ int64) (storage.SeriesRef, error) {
|
||||
func (*noOpAppender) AppendSTZeroSample(_ storage.SeriesRef, _ labels.Labels, _, _ int64) (storage.SeriesRef, error) {
|
||||
return 1, nil
|
||||
}
|
||||
|
||||
@ -1114,7 +1114,7 @@ func (a *noOpAppender) AppendHistogram(_ storage.SeriesRef, _ labels.Labels, _ i
|
||||
return 1, nil
|
||||
}
|
||||
|
||||
func (*noOpAppender) AppendHistogramCTZeroSample(_ storage.SeriesRef, _ labels.Labels, _, _ int64, _ *histogram.Histogram, _ *histogram.FloatHistogram) (storage.SeriesRef, error) {
|
||||
func (*noOpAppender) AppendHistogramSTZeroSample(_ storage.SeriesRef, _ labels.Labels, _, _ int64, _ *histogram.Histogram, _ *histogram.FloatHistogram) (storage.SeriesRef, error) {
|
||||
return 1, nil
|
||||
}
|
||||
|
||||
|
||||
@ -61,8 +61,8 @@ func (c *PrometheusConverter) addGaugeNumberDataPoints(ctx context.Context, data
|
||||
val = math.Float64frombits(value.StaleNaN)
|
||||
}
|
||||
ts := convertTimeStamp(pt.Timestamp())
|
||||
ct := convertTimeStamp(pt.StartTimestamp())
|
||||
if err := c.appender.AppendSample(labels, meta, ct, ts, val, nil); err != nil {
|
||||
st := convertTimeStamp(pt.StartTimestamp())
|
||||
if err := c.appender.AppendSample(labels, meta, st, ts, val, nil); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
@ -104,12 +104,12 @@ func (c *PrometheusConverter) addSumNumberDataPoints(ctx context.Context, dataPo
|
||||
val = math.Float64frombits(value.StaleNaN)
|
||||
}
|
||||
ts := convertTimeStamp(pt.Timestamp())
|
||||
ct := convertTimeStamp(pt.StartTimestamp())
|
||||
st := convertTimeStamp(pt.StartTimestamp())
|
||||
exemplars, err := c.getPromExemplars(ctx, pt.Exemplars())
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if err := c.appender.AppendSample(lbls, meta, ct, ts, val, exemplars); err != nil {
|
||||
if err := c.appender.AppendSample(lbls, meta, st, ts, val, exemplars); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
@ -272,7 +272,7 @@ func TestPrometheusConverter_addSumNumberDataPoints(t *testing.T) {
|
||||
ls: lbls,
|
||||
meta: metadata.Metadata{},
|
||||
t: convertTimeStamp(ts),
|
||||
ct: convertTimeStamp(ts),
|
||||
st: convertTimeStamp(ts),
|
||||
v: 1,
|
||||
},
|
||||
}
|
||||
|
||||
@ -318,22 +318,22 @@ func (t *timestampTracker) AppendHistogram(_ storage.SeriesRef, _ labels.Labels,
|
||||
return 0, nil
|
||||
}
|
||||
|
||||
func (t *timestampTracker) AppendCTZeroSample(_ storage.SeriesRef, _ labels.Labels, _, ct int64) (storage.SeriesRef, error) {
|
||||
func (t *timestampTracker) AppendSTZeroSample(_ storage.SeriesRef, _ labels.Labels, _, st int64) (storage.SeriesRef, error) {
|
||||
t.samples++
|
||||
if ct > t.highestTimestamp {
|
||||
// Theoretically, we should never see a CT zero sample with a timestamp higher than the highest timestamp we've seen so far.
|
||||
if st > t.highestTimestamp {
|
||||
// Theoretically, we should never see a ST zero sample with a timestamp higher than the highest timestamp we've seen so far.
|
||||
// However, we're not going to enforce that here, as it is not the responsibility of the tracker to enforce this.
|
||||
t.highestTimestamp = ct
|
||||
t.highestTimestamp = st
|
||||
}
|
||||
return 0, nil
|
||||
}
|
||||
|
||||
func (t *timestampTracker) AppendHistogramCTZeroSample(_ storage.SeriesRef, _ labels.Labels, _, ct int64, _ *histogram.Histogram, _ *histogram.FloatHistogram) (storage.SeriesRef, error) {
|
||||
func (t *timestampTracker) AppendHistogramSTZeroSample(_ storage.SeriesRef, _ labels.Labels, _, st int64, _ *histogram.Histogram, _ *histogram.FloatHistogram) (storage.SeriesRef, error) {
|
||||
t.histograms++
|
||||
if ct > t.highestTimestamp {
|
||||
// Theoretically, we should never see a CT zero sample with a timestamp higher than the highest timestamp we've seen so far.
|
||||
if st > t.highestTimestamp {
|
||||
// Theoretically, we should never see a ST zero sample with a timestamp higher than the highest timestamp we've seen so far.
|
||||
// However, we're not going to enforce that here, as it is not the responsibility of the tracker to enforce this.
|
||||
t.highestTimestamp = ct
|
||||
t.highestTimestamp = st
|
||||
}
|
||||
return 0, nil
|
||||
}
|
||||
|
||||
@ -53,7 +53,7 @@ type writeHandler struct {
|
||||
samplesWithInvalidLabelsTotal prometheus.Counter
|
||||
samplesAppendedWithoutMetadata prometheus.Counter
|
||||
|
||||
ingestCTZeroSample bool
|
||||
ingestSTZeroSample bool
|
||||
enableTypeAndUnitLabels bool
|
||||
appendMetadata bool
|
||||
}
|
||||
@ -65,7 +65,7 @@ const maxAheadTime = 10 * time.Minute
|
||||
//
|
||||
// NOTE(bwplotka): When accepting v2 proto and spec, partial writes are possible
|
||||
// as per https://prometheus.io/docs/specs/remote_write_spec_2_0/#partial-write.
|
||||
func NewWriteHandler(logger *slog.Logger, reg prometheus.Registerer, appendable storage.Appendable, acceptedMsgs remoteapi.MessageTypes, ingestCTZeroSample, enableTypeAndUnitLabels, appendMetadata bool) http.Handler {
|
||||
func NewWriteHandler(logger *slog.Logger, reg prometheus.Registerer, appendable storage.Appendable, acceptedMsgs remoteapi.MessageTypes, ingestSTZeroSample, enableTypeAndUnitLabels, appendMetadata bool) http.Handler {
|
||||
h := &writeHandler{
|
||||
logger: logger,
|
||||
appendable: appendable,
|
||||
@ -82,7 +82,7 @@ func NewWriteHandler(logger *slog.Logger, reg prometheus.Registerer, appendable
|
||||
Help: "The total number of received remote write samples (and histogram samples) which were ingested without corresponding metadata.",
|
||||
}),
|
||||
|
||||
ingestCTZeroSample: ingestCTZeroSample,
|
||||
ingestSTZeroSample: ingestSTZeroSample,
|
||||
enableTypeAndUnitLabels: enableTypeAndUnitLabels,
|
||||
appendMetadata: appendMetadata,
|
||||
}
|
||||
@ -355,15 +355,15 @@ func (h *writeHandler) appendV2(app storage.Appender, req *writev2.Request, rs *
|
||||
var ref storage.SeriesRef
|
||||
|
||||
// Samples.
|
||||
if h.ingestCTZeroSample && len(ts.Samples) > 0 && ts.Samples[0].Timestamp != 0 && ts.CreatedTimestamp != 0 {
|
||||
// CT only needs to be ingested for the first sample, it will be considered
|
||||
if h.ingestSTZeroSample && len(ts.Samples) > 0 && ts.Samples[0].Timestamp != 0 && ts.CreatedTimestamp != 0 {
|
||||
// ST only needs to be ingested for the first sample, it will be considered
|
||||
// out of order for the rest.
|
||||
ref, err = app.AppendCTZeroSample(ref, ls, ts.Samples[0].Timestamp, ts.CreatedTimestamp)
|
||||
if err != nil && !errors.Is(err, storage.ErrOutOfOrderCT) {
|
||||
ref, err = app.AppendSTZeroSample(ref, ls, ts.Samples[0].Timestamp, ts.CreatedTimestamp)
|
||||
if err != nil && !errors.Is(err, storage.ErrOutOfOrderST) {
|
||||
// Even for the first sample OOO is a common scenario because
|
||||
// we can't tell if a CT was already ingested in a previous request.
|
||||
// we can't tell if a ST was already ingested in a previous request.
|
||||
// We ignore the error.
|
||||
h.logger.Debug("Error when appending CT in remote write request", "err", err, "series", ls.String(), "created_timestamp", ts.CreatedTimestamp, "timestamp", ts.Samples[0].Timestamp)
|
||||
h.logger.Debug("Error when appending ST in remote write request", "err", err, "series", ls.String(), "start_timestamp", ts.CreatedTimestamp, "timestamp", ts.Samples[0].Timestamp)
|
||||
}
|
||||
}
|
||||
for _, s := range ts.Samples {
|
||||
@ -387,15 +387,15 @@ func (h *writeHandler) appendV2(app storage.Appender, req *writev2.Request, rs *
|
||||
|
||||
// Native Histograms.
|
||||
for _, hp := range ts.Histograms {
|
||||
if h.ingestCTZeroSample && hp.Timestamp != 0 && ts.CreatedTimestamp != 0 {
|
||||
// Differently from samples, we need to handle CT for each histogram instead of just the first one.
|
||||
if h.ingestSTZeroSample && hp.Timestamp != 0 && ts.CreatedTimestamp != 0 {
|
||||
// Differently from samples, we need to handle ST for each histogram instead of just the first one.
|
||||
// This is because histograms and float histograms are stored separately, even if they have the same labels.
|
||||
ref, err = h.handleHistogramZeroSample(app, ref, ls, hp, ts.CreatedTimestamp)
|
||||
if err != nil && !errors.Is(err, storage.ErrOutOfOrderCT) {
|
||||
if err != nil && !errors.Is(err, storage.ErrOutOfOrderST) {
|
||||
// Even for the first sample OOO is a common scenario because
|
||||
// we can't tell if a CT was already ingested in a previous request.
|
||||
// we can't tell if a ST was already ingested in a previous request.
|
||||
// We ignore the error.
|
||||
h.logger.Debug("Error when appending CT in remote write request", "err", err, "series", ls.String(), "created_timestamp", ts.CreatedTimestamp, "timestamp", hp.Timestamp)
|
||||
h.logger.Debug("Error when appending ST in remote write request", "err", err, "series", ls.String(), "start_timestamp", ts.CreatedTimestamp, "timestamp", hp.Timestamp)
|
||||
}
|
||||
}
|
||||
if hp.IsFloatHistogram() {
|
||||
@ -474,14 +474,14 @@ func (h *writeHandler) appendV2(app storage.Appender, req *writev2.Request, rs *
|
||||
return samplesWithoutMetadata, http.StatusBadRequest, errors.Join(badRequestErrs...)
|
||||
}
|
||||
|
||||
// handleHistogramZeroSample appends CT as a zero-value sample with CT value as the sample timestamp.
|
||||
// It doesn't return errors in case of out of order CT.
|
||||
func (*writeHandler) handleHistogramZeroSample(app storage.Appender, ref storage.SeriesRef, l labels.Labels, hist writev2.Histogram, ct int64) (storage.SeriesRef, error) {
|
||||
// handleHistogramZeroSample appends ST as a zero-value sample with ST value as the sample timestamp.
|
||||
// It doesn't return errors in case of out of order ST.
|
||||
func (*writeHandler) handleHistogramZeroSample(app storage.Appender, ref storage.SeriesRef, l labels.Labels, hist writev2.Histogram, st int64) (storage.SeriesRef, error) {
|
||||
var err error
|
||||
if hist.IsFloatHistogram() {
|
||||
ref, err = app.AppendHistogramCTZeroSample(ref, l, hist.Timestamp, ct, nil, hist.ToFloatHistogram())
|
||||
ref, err = app.AppendHistogramSTZeroSample(ref, l, hist.Timestamp, st, nil, hist.ToFloatHistogram())
|
||||
} else {
|
||||
ref, err = app.AppendHistogramCTZeroSample(ref, l, hist.Timestamp, ct, hist.ToIntHistogram(), nil)
|
||||
ref, err = app.AppendHistogramSTZeroSample(ref, l, hist.Timestamp, st, hist.ToIntHistogram(), nil)
|
||||
}
|
||||
return ref, err
|
||||
}
|
||||
@ -498,9 +498,9 @@ type OTLPOptions struct {
|
||||
LookbackDelta time.Duration
|
||||
// Add type and unit labels to the metrics.
|
||||
EnableTypeAndUnitLabels bool
|
||||
// IngestCTZeroSample enables writing zero samples based on the start time
|
||||
// IngestSTZeroSample enables writing zero samples based on the start time
|
||||
// of metrics.
|
||||
IngestCTZeroSample bool
|
||||
IngestSTZeroSample bool
|
||||
// AppendMetadata enables writing metadata to WAL when metadata-wal-records feature is enabled.
|
||||
AppendMetadata bool
|
||||
}
|
||||
@ -519,7 +519,7 @@ func NewOTLPWriteHandler(logger *slog.Logger, reg prometheus.Registerer, appenda
|
||||
config: configFunc,
|
||||
allowDeltaTemporality: opts.NativeDelta,
|
||||
lookbackDelta: opts.LookbackDelta,
|
||||
ingestCTZeroSample: opts.IngestCTZeroSample,
|
||||
ingestSTZeroSample: opts.IngestSTZeroSample,
|
||||
enableTypeAndUnitLabels: opts.EnableTypeAndUnitLabels,
|
||||
appendMetadata: opts.AppendMetadata,
|
||||
// Register metrics.
|
||||
@ -562,7 +562,7 @@ type rwExporter struct {
|
||||
config func() config.Config
|
||||
allowDeltaTemporality bool
|
||||
lookbackDelta time.Duration
|
||||
ingestCTZeroSample bool
|
||||
ingestSTZeroSample bool
|
||||
enableTypeAndUnitLabels bool
|
||||
appendMetadata bool
|
||||
|
||||
@ -576,7 +576,7 @@ func (rw *rwExporter) ConsumeMetrics(ctx context.Context, md pmetric.Metrics) er
|
||||
Appender: rw.appendable.Appender(ctx),
|
||||
maxTime: timestamp.FromTime(time.Now().Add(maxAheadTime)),
|
||||
}
|
||||
combinedAppender := otlptranslator.NewCombinedAppender(app, rw.logger, rw.ingestCTZeroSample, rw.appendMetadata, rw.metrics)
|
||||
combinedAppender := otlptranslator.NewCombinedAppender(app, rw.logger, rw.ingestSTZeroSample, rw.appendMetadata, rw.metrics)
|
||||
converter := otlptranslator.NewPrometheusConverter(combinedAppender)
|
||||
annots, err := converter.FromMetrics(ctx, md, otlptranslator.Settings{
|
||||
AddMetricSuffixes: otlpCfg.TranslationStrategy.ShouldAddSuffixes(),
|
||||
|
||||
@ -358,12 +358,12 @@ func TestRemoteWriteHandler_V2Message(t *testing.T) {
|
||||
|
||||
commitErr error
|
||||
appendSampleErr error
|
||||
appendCTZeroSampleErr error
|
||||
appendSTZeroSampleErr error
|
||||
appendHistogramErr error
|
||||
appendExemplarErr error
|
||||
updateMetadataErr error
|
||||
|
||||
ingestCTZeroSample bool
|
||||
ingestSTZeroSample bool
|
||||
enableTypeAndUnitLabels bool
|
||||
appendMetadata bool
|
||||
expectedLabels labels.Labels // For verifying type/unit labels
|
||||
@ -372,7 +372,7 @@ func TestRemoteWriteHandler_V2Message(t *testing.T) {
|
||||
desc: "All timeseries accepted/ct_enabled",
|
||||
input: writeV2RequestFixture.Timeseries,
|
||||
expectedCode: http.StatusNoContent,
|
||||
ingestCTZeroSample: true,
|
||||
ingestSTZeroSample: true,
|
||||
},
|
||||
{
|
||||
desc: "All timeseries accepted/ct_disabled",
|
||||
@ -701,12 +701,12 @@ func TestRemoteWriteHandler_V2Message(t *testing.T) {
|
||||
appendable := &mockAppendable{
|
||||
commitErr: tc.commitErr,
|
||||
appendSampleErr: tc.appendSampleErr,
|
||||
appendCTZeroSampleErr: tc.appendCTZeroSampleErr,
|
||||
appendSTZeroSampleErr: tc.appendSTZeroSampleErr,
|
||||
appendHistogramErr: tc.appendHistogramErr,
|
||||
appendExemplarErr: tc.appendExemplarErr,
|
||||
updateMetadataErr: tc.updateMetadataErr,
|
||||
}
|
||||
handler := NewWriteHandler(promslog.NewNopLogger(), nil, appendable, []remoteapi.WriteMessageType{remoteapi.WriteV2MessageType}, tc.ingestCTZeroSample, tc.enableTypeAndUnitLabels, tc.appendMetadata)
|
||||
handler := NewWriteHandler(promslog.NewNopLogger(), nil, appendable, []remoteapi.WriteMessageType{remoteapi.WriteV2MessageType}, tc.ingestSTZeroSample, tc.enableTypeAndUnitLabels, tc.appendMetadata)
|
||||
|
||||
recorder := httptest.NewRecorder()
|
||||
handler.ServeHTTP(recorder, req)
|
||||
@ -758,7 +758,7 @@ func TestRemoteWriteHandler_V2Message(t *testing.T) {
|
||||
require.NoError(t, err)
|
||||
|
||||
for _, s := range ts.Samples {
|
||||
if ts.CreatedTimestamp != 0 && tc.ingestCTZeroSample {
|
||||
if ts.CreatedTimestamp != 0 && tc.ingestSTZeroSample {
|
||||
requireEqual(t, mockSample{ls, ts.CreatedTimestamp, 0}, appendable.samples[i])
|
||||
i++
|
||||
}
|
||||
@ -768,7 +768,7 @@ func TestRemoteWriteHandler_V2Message(t *testing.T) {
|
||||
for _, hp := range ts.Histograms {
|
||||
if hp.IsFloatHistogram() {
|
||||
fh := hp.ToFloatHistogram()
|
||||
if !zeroFloatHistogramIngested && ts.CreatedTimestamp != 0 && tc.ingestCTZeroSample {
|
||||
if !zeroFloatHistogramIngested && ts.CreatedTimestamp != 0 && tc.ingestSTZeroSample {
|
||||
requireEqual(t, mockHistogram{ls, ts.CreatedTimestamp, nil, &histogram.FloatHistogram{}}, appendable.histograms[k])
|
||||
k++
|
||||
zeroFloatHistogramIngested = true
|
||||
@ -776,7 +776,7 @@ func TestRemoteWriteHandler_V2Message(t *testing.T) {
|
||||
requireEqual(t, mockHistogram{ls, hp.Timestamp, nil, fh}, appendable.histograms[k])
|
||||
} else {
|
||||
h := hp.ToIntHistogram()
|
||||
if !zeroHistogramIngested && ts.CreatedTimestamp != 0 && tc.ingestCTZeroSample {
|
||||
if !zeroHistogramIngested && ts.CreatedTimestamp != 0 && tc.ingestSTZeroSample {
|
||||
requireEqual(t, mockHistogram{ls, ts.CreatedTimestamp, &histogram.Histogram{}, nil}, appendable.histograms[k])
|
||||
k++
|
||||
zeroHistogramIngested = true
|
||||
@ -785,7 +785,7 @@ func TestRemoteWriteHandler_V2Message(t *testing.T) {
|
||||
}
|
||||
k++
|
||||
}
|
||||
if ts.CreatedTimestamp != 0 && tc.ingestCTZeroSample {
|
||||
if ts.CreatedTimestamp != 0 && tc.ingestSTZeroSample {
|
||||
require.True(t, zeroHistogramIngested)
|
||||
require.True(t, zeroFloatHistogramIngested)
|
||||
}
|
||||
@ -1190,7 +1190,7 @@ type mockAppendable struct {
|
||||
// optional errors to inject.
|
||||
commitErr error
|
||||
appendSampleErr error
|
||||
appendCTZeroSampleErr error
|
||||
appendSTZeroSampleErr error
|
||||
appendHistogramErr error
|
||||
appendExemplarErr error
|
||||
updateMetadataErr error
|
||||
@ -1342,13 +1342,13 @@ func (m *mockAppendable) AppendHistogram(_ storage.SeriesRef, l labels.Labels, t
|
||||
return storage.SeriesRef(hash), nil
|
||||
}
|
||||
|
||||
func (m *mockAppendable) AppendHistogramCTZeroSample(_ storage.SeriesRef, l labels.Labels, t, ct int64, h *histogram.Histogram, _ *histogram.FloatHistogram) (storage.SeriesRef, error) {
|
||||
if m.appendCTZeroSampleErr != nil {
|
||||
return 0, m.appendCTZeroSampleErr
|
||||
func (m *mockAppendable) AppendHistogramSTZeroSample(_ storage.SeriesRef, l labels.Labels, t, st int64, h *histogram.Histogram, _ *histogram.FloatHistogram) (storage.SeriesRef, error) {
|
||||
if m.appendSTZeroSampleErr != nil {
|
||||
return 0, m.appendSTZeroSampleErr
|
||||
}
|
||||
|
||||
// Created Timestamp can't be higher than the original sample's timestamp.
|
||||
if ct > t {
|
||||
if st > t {
|
||||
return 0, storage.ErrOutOfOrderSample
|
||||
}
|
||||
hash := l.Hash()
|
||||
@ -1358,10 +1358,10 @@ func (m *mockAppendable) AppendHistogramCTZeroSample(_ storage.SeriesRef, l labe
|
||||
} else {
|
||||
latestTs = m.latestFloatHist[hash]
|
||||
}
|
||||
if ct < latestTs {
|
||||
if st < latestTs {
|
||||
return 0, storage.ErrOutOfOrderSample
|
||||
}
|
||||
if ct == latestTs {
|
||||
if st == latestTs {
|
||||
return 0, storage.ErrDuplicateSampleForTimestamp
|
||||
}
|
||||
|
||||
@ -1374,11 +1374,11 @@ func (m *mockAppendable) AppendHistogramCTZeroSample(_ storage.SeriesRef, l labe
|
||||
}
|
||||
|
||||
if h != nil {
|
||||
m.latestHistogram[hash] = ct
|
||||
m.histograms = append(m.histograms, mockHistogram{l, ct, &histogram.Histogram{}, nil})
|
||||
m.latestHistogram[hash] = st
|
||||
m.histograms = append(m.histograms, mockHistogram{l, st, &histogram.Histogram{}, nil})
|
||||
} else {
|
||||
m.latestFloatHist[hash] = ct
|
||||
m.histograms = append(m.histograms, mockHistogram{l, ct, nil, &histogram.FloatHistogram{}})
|
||||
m.latestFloatHist[hash] = st
|
||||
m.histograms = append(m.histograms, mockHistogram{l, st, nil, &histogram.FloatHistogram{}})
|
||||
}
|
||||
return storage.SeriesRef(hash), nil
|
||||
}
|
||||
@ -1392,21 +1392,21 @@ func (m *mockAppendable) UpdateMetadata(ref storage.SeriesRef, l labels.Labels,
|
||||
return ref, nil
|
||||
}
|
||||
|
||||
func (m *mockAppendable) AppendCTZeroSample(_ storage.SeriesRef, l labels.Labels, t, ct int64) (storage.SeriesRef, error) {
|
||||
if m.appendCTZeroSampleErr != nil {
|
||||
return 0, m.appendCTZeroSampleErr
|
||||
func (m *mockAppendable) AppendSTZeroSample(_ storage.SeriesRef, l labels.Labels, t, st int64) (storage.SeriesRef, error) {
|
||||
if m.appendSTZeroSampleErr != nil {
|
||||
return 0, m.appendSTZeroSampleErr
|
||||
}
|
||||
|
||||
// Created Timestamp can't be higher than the original sample's timestamp.
|
||||
if ct > t {
|
||||
if st > t {
|
||||
return 0, storage.ErrOutOfOrderSample
|
||||
}
|
||||
hash := l.Hash()
|
||||
latestTs := m.latestSample[hash]
|
||||
if ct < latestTs {
|
||||
if st < latestTs {
|
||||
return 0, storage.ErrOutOfOrderSample
|
||||
}
|
||||
if ct == latestTs {
|
||||
if st == latestTs {
|
||||
return 0, storage.ErrDuplicateSampleForTimestamp
|
||||
}
|
||||
|
||||
@ -1417,8 +1417,8 @@ func (m *mockAppendable) AppendCTZeroSample(_ storage.SeriesRef, l labels.Labels
|
||||
return 0, tsdb.ErrInvalidSample
|
||||
}
|
||||
|
||||
m.latestSample[hash] = ct
|
||||
m.samples = append(m.samples, mockSample{l, ct, 0})
|
||||
m.latestSample[hash] = st
|
||||
m.samples = append(m.samples, mockSample{l, st, 0})
|
||||
return storage.SeriesRef(hash), nil
|
||||
}
|
||||
|
||||
|
||||
@ -938,7 +938,7 @@ func TestOTLPWriteHandler(t *testing.T) {
|
||||
}
|
||||
}
|
||||
|
||||
// Check that start time is ingested if ingestCTZeroSample is enabled
|
||||
// Check that start time is ingested if ingestSTZeroSample is enabled
|
||||
// and the start time is actually set (non-zero).
|
||||
func TestOTLPWriteHandler_StartTime(t *testing.T) {
|
||||
timestamp := time.Now()
|
||||
@ -1023,72 +1023,72 @@ func TestOTLPWriteHandler_StartTime(t *testing.T) {
|
||||
},
|
||||
}
|
||||
|
||||
expectedSamplesWithCTZero := make([]mockSample, 0, len(expectedSamples)*2-1) // All samples will get CT zero, except target_info.
|
||||
expectedSamplesWithSTZero := make([]mockSample, 0, len(expectedSamples)*2-1) // All samples will get ST zero, except target_info.
|
||||
for _, s := range expectedSamples {
|
||||
if s.l.Get(model.MetricNameLabel) != "target_info" {
|
||||
expectedSamplesWithCTZero = append(expectedSamplesWithCTZero, mockSample{
|
||||
expectedSamplesWithSTZero = append(expectedSamplesWithSTZero, mockSample{
|
||||
l: s.l.Copy(),
|
||||
t: startTime.UnixMilli(),
|
||||
v: 0,
|
||||
})
|
||||
}
|
||||
expectedSamplesWithCTZero = append(expectedSamplesWithCTZero, s)
|
||||
expectedSamplesWithSTZero = append(expectedSamplesWithSTZero, s)
|
||||
}
|
||||
expectedHistogramsWithCTZero := make([]mockHistogram, 0, len(expectedHistograms)*2)
|
||||
expectedHistogramsWithSTZero := make([]mockHistogram, 0, len(expectedHistograms)*2)
|
||||
for _, s := range expectedHistograms {
|
||||
if s.l.Get(model.MetricNameLabel) != "target_info" {
|
||||
expectedHistogramsWithCTZero = append(expectedHistogramsWithCTZero, mockHistogram{
|
||||
expectedHistogramsWithSTZero = append(expectedHistogramsWithSTZero, mockHistogram{
|
||||
l: s.l.Copy(),
|
||||
t: startTime.UnixMilli(),
|
||||
h: &histogram.Histogram{},
|
||||
})
|
||||
}
|
||||
expectedHistogramsWithCTZero = append(expectedHistogramsWithCTZero, s)
|
||||
expectedHistogramsWithSTZero = append(expectedHistogramsWithSTZero, s)
|
||||
}
|
||||
|
||||
for _, testCase := range []struct {
|
||||
name string
|
||||
otlpOpts OTLPOptions
|
||||
startTime time.Time
|
||||
expectCTZero bool
|
||||
expectSTZero bool
|
||||
expectedSamples []mockSample
|
||||
expectedHistograms []mockHistogram
|
||||
}{
|
||||
{
|
||||
name: "IngestCTZero=false/startTime=0",
|
||||
name: "IngestSTZero=false/startTime=0",
|
||||
otlpOpts: OTLPOptions{
|
||||
IngestCTZeroSample: false,
|
||||
IngestSTZeroSample: false,
|
||||
},
|
||||
startTime: zeroTime,
|
||||
expectedSamples: expectedSamples,
|
||||
expectedHistograms: expectedHistograms,
|
||||
},
|
||||
{
|
||||
name: "IngestCTZero=true/startTime=0",
|
||||
name: "IngestSTZero=true/startTime=0",
|
||||
otlpOpts: OTLPOptions{
|
||||
IngestCTZeroSample: true,
|
||||
IngestSTZeroSample: true,
|
||||
},
|
||||
startTime: zeroTime,
|
||||
expectedSamples: expectedSamples,
|
||||
expectedHistograms: expectedHistograms,
|
||||
},
|
||||
{
|
||||
name: "IngestCTZero=false/startTime=ts-1ms",
|
||||
name: "IngestSTZero=false/startTime=ts-1ms",
|
||||
otlpOpts: OTLPOptions{
|
||||
IngestCTZeroSample: false,
|
||||
IngestSTZeroSample: false,
|
||||
},
|
||||
startTime: startTime,
|
||||
expectedSamples: expectedSamples,
|
||||
expectedHistograms: expectedHistograms,
|
||||
},
|
||||
{
|
||||
name: "IngestCTZero=true/startTime=ts-1ms",
|
||||
name: "IngestSTZero=true/startTime=ts-1ms",
|
||||
otlpOpts: OTLPOptions{
|
||||
IngestCTZeroSample: true,
|
||||
IngestSTZeroSample: true,
|
||||
},
|
||||
startTime: startTime,
|
||||
expectedSamples: expectedSamplesWithCTZero,
|
||||
expectedHistograms: expectedHistogramsWithCTZero,
|
||||
expectedSamples: expectedSamplesWithSTZero,
|
||||
expectedHistograms: expectedHistogramsWithSTZero,
|
||||
},
|
||||
} {
|
||||
t.Run(testCase.name, func(t *testing.T) {
|
||||
|
||||
@ -997,7 +997,7 @@ func (*appender) UpdateMetadata(storage.SeriesRef, labels.Labels, metadata.Metad
|
||||
return 0, nil
|
||||
}
|
||||
|
||||
func (a *appender) AppendHistogramCTZeroSample(ref storage.SeriesRef, l labels.Labels, t, ct int64, h *histogram.Histogram, fh *histogram.FloatHistogram) (storage.SeriesRef, error) {
|
||||
func (a *appender) AppendHistogramSTZeroSample(ref storage.SeriesRef, l labels.Labels, t, st int64, h *histogram.Histogram, fh *histogram.FloatHistogram) (storage.SeriesRef, error) {
|
||||
if h != nil {
|
||||
if err := h.Validate(); err != nil {
|
||||
return 0, err
|
||||
@ -1008,8 +1008,8 @@ func (a *appender) AppendHistogramCTZeroSample(ref storage.SeriesRef, l labels.L
|
||||
return 0, err
|
||||
}
|
||||
}
|
||||
if ct >= t {
|
||||
return 0, storage.ErrCTNewerThanSample
|
||||
if st >= t {
|
||||
return 0, storage.ErrSTNewerThanSample
|
||||
}
|
||||
|
||||
series := a.series.GetByID(chunks.HeadSeriesRef(ref))
|
||||
@ -1038,29 +1038,29 @@ func (a *appender) AppendHistogramCTZeroSample(ref storage.SeriesRef, l labels.L
|
||||
series.Lock()
|
||||
defer series.Unlock()
|
||||
|
||||
if ct <= a.minValidTime(series.lastTs) {
|
||||
return 0, storage.ErrOutOfOrderCT
|
||||
if st <= a.minValidTime(series.lastTs) {
|
||||
return 0, storage.ErrOutOfOrderST
|
||||
}
|
||||
|
||||
if ct <= series.lastTs {
|
||||
if st <= series.lastTs {
|
||||
// discard the sample if it's out of order.
|
||||
return 0, storage.ErrOutOfOrderCT
|
||||
return 0, storage.ErrOutOfOrderST
|
||||
}
|
||||
series.lastTs = ct
|
||||
series.lastTs = st
|
||||
|
||||
switch {
|
||||
case h != nil:
|
||||
zeroHistogram := &histogram.Histogram{}
|
||||
a.pendingHistograms = append(a.pendingHistograms, record.RefHistogramSample{
|
||||
Ref: series.ref,
|
||||
T: ct,
|
||||
T: st,
|
||||
H: zeroHistogram,
|
||||
})
|
||||
a.histogramSeries = append(a.histogramSeries, series)
|
||||
case fh != nil:
|
||||
a.pendingFloatHistograms = append(a.pendingFloatHistograms, record.RefFloatHistogramSample{
|
||||
Ref: series.ref,
|
||||
T: ct,
|
||||
T: st,
|
||||
FH: &histogram.FloatHistogram{},
|
||||
})
|
||||
a.floatHistogramSeries = append(a.floatHistogramSeries, series)
|
||||
@ -1070,9 +1070,9 @@ func (a *appender) AppendHistogramCTZeroSample(ref storage.SeriesRef, l labels.L
|
||||
return storage.SeriesRef(series.ref), nil
|
||||
}
|
||||
|
||||
func (a *appender) AppendCTZeroSample(ref storage.SeriesRef, l labels.Labels, t, ct int64) (storage.SeriesRef, error) {
|
||||
if ct >= t {
|
||||
return 0, storage.ErrCTNewerThanSample
|
||||
func (a *appender) AppendSTZeroSample(ref storage.SeriesRef, l labels.Labels, t, st int64) (storage.SeriesRef, error) {
|
||||
if st >= t {
|
||||
return 0, storage.ErrSTNewerThanSample
|
||||
}
|
||||
|
||||
series := a.series.GetByID(chunks.HeadSeriesRef(ref))
|
||||
@ -1106,16 +1106,16 @@ func (a *appender) AppendCTZeroSample(ref storage.SeriesRef, l labels.Labels, t,
|
||||
return 0, storage.ErrOutOfOrderSample
|
||||
}
|
||||
|
||||
if ct <= series.lastTs {
|
||||
if st <= series.lastTs {
|
||||
// discard the sample if it's out of order.
|
||||
return 0, storage.ErrOutOfOrderCT
|
||||
return 0, storage.ErrOutOfOrderST
|
||||
}
|
||||
series.lastTs = ct
|
||||
series.lastTs = st
|
||||
|
||||
// NOTE: always modify pendingSamples and sampleSeries together.
|
||||
a.pendingSamples = append(a.pendingSamples, record.RefSample{
|
||||
Ref: series.ref,
|
||||
T: ct,
|
||||
T: st,
|
||||
V: 0,
|
||||
})
|
||||
a.sampleSeries = append(a.sampleSeries, series)
|
||||
|
||||
@ -1142,12 +1142,12 @@ type walSample struct {
|
||||
ref storage.SeriesRef
|
||||
}
|
||||
|
||||
func TestDBCreatedTimestampSamplesIngestion(t *testing.T) {
|
||||
func TestDBStartTimestampSamplesIngestion(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
type appendableSample struct {
|
||||
t int64
|
||||
ct int64
|
||||
st int64
|
||||
v float64
|
||||
lbls labels.Labels
|
||||
h *histogram.Histogram
|
||||
@ -1169,8 +1169,8 @@ func TestDBCreatedTimestampSamplesIngestion(t *testing.T) {
|
||||
{
|
||||
name: "in order ct+normal sample/floatSamples",
|
||||
inputSamples: []appendableSample{
|
||||
{t: 100, ct: 1, v: 10, lbls: defLbls},
|
||||
{t: 101, ct: 1, v: 10, lbls: defLbls},
|
||||
{t: 100, st: 1, v: 10, lbls: defLbls},
|
||||
{t: 101, st: 1, v: 10, lbls: defLbls},
|
||||
},
|
||||
expectedSamples: []*walSample{
|
||||
{t: 1, f: 0, lbls: defLbls},
|
||||
@ -1179,17 +1179,17 @@ func TestDBCreatedTimestampSamplesIngestion(t *testing.T) {
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "CT+float && CT+histogram samples",
|
||||
name: "ST+float && ST+histogram samples",
|
||||
inputSamples: []appendableSample{
|
||||
{
|
||||
t: 100,
|
||||
ct: 30,
|
||||
st: 30,
|
||||
v: 20,
|
||||
lbls: defLbls,
|
||||
},
|
||||
{
|
||||
t: 300,
|
||||
ct: 230,
|
||||
st: 230,
|
||||
h: testHistogram,
|
||||
lbls: defLbls,
|
||||
},
|
||||
@ -1203,20 +1203,20 @@ func TestDBCreatedTimestampSamplesIngestion(t *testing.T) {
|
||||
expectedSeriesCount: 1,
|
||||
},
|
||||
{
|
||||
name: "CT+float && CT+histogram samples with error",
|
||||
name: "ST+float && ST+histogram samples with error",
|
||||
inputSamples: []appendableSample{
|
||||
{
|
||||
// invalid CT
|
||||
// invalid ST
|
||||
t: 100,
|
||||
ct: 100,
|
||||
st: 100,
|
||||
v: 10,
|
||||
lbls: defLbls,
|
||||
expectsError: true,
|
||||
},
|
||||
{
|
||||
// invalid CT histogram
|
||||
// invalid ST histogram
|
||||
t: 300,
|
||||
ct: 300,
|
||||
st: 300,
|
||||
h: testHistogram,
|
||||
lbls: defLbls,
|
||||
expectsError: true,
|
||||
@ -1231,8 +1231,8 @@ func TestDBCreatedTimestampSamplesIngestion(t *testing.T) {
|
||||
{
|
||||
name: "In order ct+normal sample/histogram",
|
||||
inputSamples: []appendableSample{
|
||||
{t: 100, h: testHistogram, ct: 1, lbls: defLbls},
|
||||
{t: 101, h: testHistogram, ct: 1, lbls: defLbls},
|
||||
{t: 100, h: testHistogram, st: 1, lbls: defLbls},
|
||||
{t: 101, h: testHistogram, st: 1, lbls: defLbls},
|
||||
},
|
||||
expectedSamples: []*walSample{
|
||||
{t: 1, h: &histogram.Histogram{}},
|
||||
@ -1243,10 +1243,10 @@ func TestDBCreatedTimestampSamplesIngestion(t *testing.T) {
|
||||
{
|
||||
name: "ct+normal then OOO sample/float",
|
||||
inputSamples: []appendableSample{
|
||||
{t: 60_000, ct: 40_000, v: 10, lbls: defLbls},
|
||||
{t: 120_000, ct: 40_000, v: 10, lbls: defLbls},
|
||||
{t: 180_000, ct: 40_000, v: 10, lbls: defLbls},
|
||||
{t: 50_000, ct: 40_000, v: 10, lbls: defLbls},
|
||||
{t: 60_000, st: 40_000, v: 10, lbls: defLbls},
|
||||
{t: 120_000, st: 40_000, v: 10, lbls: defLbls},
|
||||
{t: 180_000, st: 40_000, v: 10, lbls: defLbls},
|
||||
{t: 50_000, st: 40_000, v: 10, lbls: defLbls},
|
||||
},
|
||||
expectedSamples: []*walSample{
|
||||
{t: 40_000, f: 0, lbls: defLbls},
|
||||
@ -1271,8 +1271,8 @@ func TestDBCreatedTimestampSamplesIngestion(t *testing.T) {
|
||||
for _, sample := range tc.inputSamples {
|
||||
// We supposed to write a Histogram to the WAL
|
||||
if sample.h != nil {
|
||||
_, err := app.AppendHistogramCTZeroSample(0, sample.lbls, sample.t, sample.ct, zeroHistogram, nil)
|
||||
if !errors.Is(err, storage.ErrOutOfOrderCT) {
|
||||
_, err := app.AppendHistogramSTZeroSample(0, sample.lbls, sample.t, sample.st, zeroHistogram, nil)
|
||||
if !errors.Is(err, storage.ErrOutOfOrderST) {
|
||||
require.Equal(t, sample.expectsError, err != nil, "expected error: %v, got: %v", sample.expectsError, err)
|
||||
}
|
||||
|
||||
@ -1280,8 +1280,8 @@ func TestDBCreatedTimestampSamplesIngestion(t *testing.T) {
|
||||
require.NoError(t, err)
|
||||
} else {
|
||||
// We supposed to write a float sample to the WAL
|
||||
_, err := app.AppendCTZeroSample(0, sample.lbls, sample.t, sample.ct)
|
||||
if !errors.Is(err, storage.ErrOutOfOrderCT) {
|
||||
_, err := app.AppendSTZeroSample(0, sample.lbls, sample.t, sample.st)
|
||||
if !errors.Is(err, storage.ErrOutOfOrderST) {
|
||||
require.Equal(t, sample.expectsError, err != nil, "expected error: %v, got: %v", sample.expectsError, err)
|
||||
}
|
||||
|
||||
|
||||
@ -83,14 +83,14 @@ func (a *initAppender) AppendHistogram(ref storage.SeriesRef, l labels.Labels, t
|
||||
return a.app.AppendHistogram(ref, l, t, h, fh)
|
||||
}
|
||||
|
||||
func (a *initAppender) AppendHistogramCTZeroSample(ref storage.SeriesRef, l labels.Labels, t, ct int64, h *histogram.Histogram, fh *histogram.FloatHistogram) (storage.SeriesRef, error) {
|
||||
func (a *initAppender) AppendHistogramSTZeroSample(ref storage.SeriesRef, l labels.Labels, t, st int64, h *histogram.Histogram, fh *histogram.FloatHistogram) (storage.SeriesRef, error) {
|
||||
if a.app != nil {
|
||||
return a.app.AppendHistogramCTZeroSample(ref, l, t, ct, h, fh)
|
||||
return a.app.AppendHistogramSTZeroSample(ref, l, t, st, h, fh)
|
||||
}
|
||||
a.head.initTime(t)
|
||||
a.app = a.head.appender()
|
||||
|
||||
return a.app.AppendHistogramCTZeroSample(ref, l, t, ct, h, fh)
|
||||
return a.app.AppendHistogramSTZeroSample(ref, l, t, st, h, fh)
|
||||
}
|
||||
|
||||
func (a *initAppender) UpdateMetadata(ref storage.SeriesRef, l labels.Labels, m metadata.Metadata) (storage.SeriesRef, error) {
|
||||
@ -102,15 +102,15 @@ func (a *initAppender) UpdateMetadata(ref storage.SeriesRef, l labels.Labels, m
|
||||
return a.app.UpdateMetadata(ref, l, m)
|
||||
}
|
||||
|
||||
func (a *initAppender) AppendCTZeroSample(ref storage.SeriesRef, lset labels.Labels, t, ct int64) (storage.SeriesRef, error) {
|
||||
func (a *initAppender) AppendSTZeroSample(ref storage.SeriesRef, lset labels.Labels, t, st int64) (storage.SeriesRef, error) {
|
||||
if a.app != nil {
|
||||
return a.app.AppendCTZeroSample(ref, lset, t, ct)
|
||||
return a.app.AppendSTZeroSample(ref, lset, t, st)
|
||||
}
|
||||
|
||||
a.head.initTime(t)
|
||||
a.app = a.head.appender()
|
||||
|
||||
return a.app.AppendCTZeroSample(ref, lset, t, ct)
|
||||
return a.app.AppendSTZeroSample(ref, lset, t, st)
|
||||
}
|
||||
|
||||
// initTime initializes a head with the first timestamp. This only needs to be called
|
||||
@ -483,12 +483,12 @@ func (a *headAppender) Append(ref storage.SeriesRef, lset labels.Labels, t int64
|
||||
return storage.SeriesRef(s.ref), nil
|
||||
}
|
||||
|
||||
// AppendCTZeroSample appends synthetic zero sample for ct timestamp. It returns
|
||||
// AppendSTZeroSample appends synthetic zero sample for st timestamp. It returns
|
||||
// error when sample can't be appended. See
|
||||
// storage.CreatedTimestampAppender.AppendCTZeroSample for further documentation.
|
||||
func (a *headAppender) AppendCTZeroSample(ref storage.SeriesRef, lset labels.Labels, t, ct int64) (storage.SeriesRef, error) {
|
||||
if ct >= t {
|
||||
return 0, storage.ErrCTNewerThanSample
|
||||
// storage.StartTimestampAppender.AppendSTZeroSample for further documentation.
|
||||
func (a *headAppender) AppendSTZeroSample(ref storage.SeriesRef, lset labels.Labels, t, st int64) (storage.SeriesRef, error) {
|
||||
if st >= t {
|
||||
return 0, storage.ErrSTNewerThanSample
|
||||
}
|
||||
|
||||
s := a.head.series.getByID(chunks.HeadSeriesRef(ref))
|
||||
@ -500,11 +500,11 @@ func (a *headAppender) AppendCTZeroSample(ref storage.SeriesRef, lset labels.Lab
|
||||
}
|
||||
}
|
||||
|
||||
// Check if CT wouldn't be OOO vs samples we already might have for this series.
|
||||
// Check if ST wouldn't be OOO vs samples we already might have for this series.
|
||||
// NOTE(bwplotka): This will be often hit as it's expected for long living
|
||||
// counters to share the same CT.
|
||||
// counters to share the same ST.
|
||||
s.Lock()
|
||||
isOOO, _, err := s.appendable(ct, 0, a.headMaxt, a.minValidTime, a.oooTimeWindow)
|
||||
isOOO, _, err := s.appendable(st, 0, a.headMaxt, a.minValidTime, a.oooTimeWindow)
|
||||
if err == nil {
|
||||
s.pendingCommit = true
|
||||
}
|
||||
@ -513,14 +513,14 @@ func (a *headAppender) AppendCTZeroSample(ref storage.SeriesRef, lset labels.Lab
|
||||
return 0, err
|
||||
}
|
||||
if isOOO {
|
||||
return storage.SeriesRef(s.ref), storage.ErrOutOfOrderCT
|
||||
return storage.SeriesRef(s.ref), storage.ErrOutOfOrderST
|
||||
}
|
||||
|
||||
if ct > a.maxt {
|
||||
a.maxt = ct
|
||||
if st > a.maxt {
|
||||
a.maxt = st
|
||||
}
|
||||
b := a.getCurrentBatch(stFloat, s.ref)
|
||||
b.floats = append(b.floats, record.RefSample{Ref: s.ref, T: ct, V: 0.0})
|
||||
b.floats = append(b.floats, record.RefSample{Ref: s.ref, T: st, V: 0.0})
|
||||
b.floatSeries = append(b.floatSeries, s)
|
||||
return storage.SeriesRef(s.ref), nil
|
||||
}
|
||||
@ -902,9 +902,9 @@ func (a *headAppender) AppendHistogram(ref storage.SeriesRef, lset labels.Labels
|
||||
return storage.SeriesRef(s.ref), nil
|
||||
}
|
||||
|
||||
func (a *headAppender) AppendHistogramCTZeroSample(ref storage.SeriesRef, lset labels.Labels, t, ct int64, h *histogram.Histogram, fh *histogram.FloatHistogram) (storage.SeriesRef, error) {
|
||||
if ct >= t {
|
||||
return 0, storage.ErrCTNewerThanSample
|
||||
func (a *headAppender) AppendHistogramSTZeroSample(ref storage.SeriesRef, lset labels.Labels, t, st int64, h *histogram.Histogram, fh *histogram.FloatHistogram) (storage.SeriesRef, error) {
|
||||
if st >= t {
|
||||
return 0, storage.ErrSTNewerThanSample
|
||||
}
|
||||
|
||||
s := a.head.series.getByID(chunks.HeadSeriesRef(ref))
|
||||
@ -919,7 +919,7 @@ func (a *headAppender) AppendHistogramCTZeroSample(ref storage.SeriesRef, lset l
|
||||
switch {
|
||||
case h != nil:
|
||||
zeroHistogram := &histogram.Histogram{
|
||||
// The CTZeroSample represents a counter reset by definition.
|
||||
// The STZeroSample represents a counter reset by definition.
|
||||
CounterResetHint: histogram.CounterReset,
|
||||
// Replicate other fields to avoid needless chunk creation.
|
||||
Schema: h.Schema,
|
||||
@ -927,41 +927,41 @@ func (a *headAppender) AppendHistogramCTZeroSample(ref storage.SeriesRef, lset l
|
||||
CustomValues: h.CustomValues,
|
||||
}
|
||||
s.Lock()
|
||||
// For CTZeroSamples OOO is not allowed.
|
||||
// For STZeroSamples OOO is not allowed.
|
||||
// We set it to true to make this implementation as close as possible to the float implementation.
|
||||
isOOO, _, err := s.appendableHistogram(ct, zeroHistogram, a.headMaxt, a.minValidTime, a.oooTimeWindow)
|
||||
isOOO, _, err := s.appendableHistogram(st, zeroHistogram, a.headMaxt, a.minValidTime, a.oooTimeWindow)
|
||||
if err != nil {
|
||||
s.Unlock()
|
||||
if errors.Is(err, storage.ErrOutOfOrderSample) {
|
||||
return 0, storage.ErrOutOfOrderCT
|
||||
return 0, storage.ErrOutOfOrderST
|
||||
}
|
||||
|
||||
return 0, err
|
||||
}
|
||||
|
||||
// OOO is not allowed because after the first scrape, CT will be the same for most (if not all) future samples.
|
||||
// OOO is not allowed because after the first scrape, ST will be the same for most (if not all) future samples.
|
||||
// This is to prevent the injected zero from being marked as OOO forever.
|
||||
if isOOO {
|
||||
s.Unlock()
|
||||
return 0, storage.ErrOutOfOrderCT
|
||||
return 0, storage.ErrOutOfOrderST
|
||||
}
|
||||
|
||||
s.pendingCommit = true
|
||||
s.Unlock()
|
||||
st := stHistogram
|
||||
sTyp := stHistogram
|
||||
if h.UsesCustomBuckets() {
|
||||
st = stCustomBucketHistogram
|
||||
sTyp = stCustomBucketHistogram
|
||||
}
|
||||
b := a.getCurrentBatch(st, s.ref)
|
||||
b := a.getCurrentBatch(sTyp, s.ref)
|
||||
b.histograms = append(b.histograms, record.RefHistogramSample{
|
||||
Ref: s.ref,
|
||||
T: ct,
|
||||
T: st,
|
||||
H: zeroHistogram,
|
||||
})
|
||||
b.histogramSeries = append(b.histogramSeries, s)
|
||||
case fh != nil:
|
||||
zeroFloatHistogram := &histogram.FloatHistogram{
|
||||
// The CTZeroSample represents a counter reset by definition.
|
||||
// The STZeroSample represents a counter reset by definition.
|
||||
CounterResetHint: histogram.CounterReset,
|
||||
// Replicate other fields to avoid needless chunk creation.
|
||||
Schema: fh.Schema,
|
||||
@ -970,40 +970,40 @@ func (a *headAppender) AppendHistogramCTZeroSample(ref storage.SeriesRef, lset l
|
||||
}
|
||||
s.Lock()
|
||||
// We set it to true to make this implementation as close as possible to the float implementation.
|
||||
isOOO, _, err := s.appendableFloatHistogram(ct, zeroFloatHistogram, a.headMaxt, a.minValidTime, a.oooTimeWindow) // OOO is not allowed for CTZeroSamples.
|
||||
isOOO, _, err := s.appendableFloatHistogram(st, zeroFloatHistogram, a.headMaxt, a.minValidTime, a.oooTimeWindow) // OOO is not allowed for STZeroSamples.
|
||||
if err != nil {
|
||||
s.Unlock()
|
||||
if errors.Is(err, storage.ErrOutOfOrderSample) {
|
||||
return 0, storage.ErrOutOfOrderCT
|
||||
return 0, storage.ErrOutOfOrderST
|
||||
}
|
||||
|
||||
return 0, err
|
||||
}
|
||||
|
||||
// OOO is not allowed because after the first scrape, CT will be the same for most (if not all) future samples.
|
||||
// OOO is not allowed because after the first scrape, ST will be the same for most (if not all) future samples.
|
||||
// This is to prevent the injected zero from being marked as OOO forever.
|
||||
if isOOO {
|
||||
s.Unlock()
|
||||
return 0, storage.ErrOutOfOrderCT
|
||||
return 0, storage.ErrOutOfOrderST
|
||||
}
|
||||
|
||||
s.pendingCommit = true
|
||||
s.Unlock()
|
||||
st := stFloatHistogram
|
||||
sTyp := stFloatHistogram
|
||||
if fh.UsesCustomBuckets() {
|
||||
st = stCustomBucketFloatHistogram
|
||||
sTyp = stCustomBucketFloatHistogram
|
||||
}
|
||||
b := a.getCurrentBatch(st, s.ref)
|
||||
b := a.getCurrentBatch(sTyp, s.ref)
|
||||
b.floatHistograms = append(b.floatHistograms, record.RefFloatHistogramSample{
|
||||
Ref: s.ref,
|
||||
T: ct,
|
||||
T: st,
|
||||
FH: zeroFloatHistogram,
|
||||
})
|
||||
b.floatHistogramSeries = append(b.floatHistogramSeries, s)
|
||||
}
|
||||
|
||||
if ct > a.maxt {
|
||||
a.maxt = ct
|
||||
if st > a.maxt {
|
||||
a.maxt = st
|
||||
}
|
||||
|
||||
return storage.SeriesRef(s.ref), nil
|
||||
|
||||
@ -6715,7 +6715,7 @@ func TestHeadAppender_AppendFloatWithSameTimestampAsPreviousHistogram(t *testing
|
||||
require.ErrorIs(t, err, storage.NewDuplicateHistogramToFloatErr(2_000, 10.0))
|
||||
}
|
||||
|
||||
func TestHeadAppender_AppendCT(t *testing.T) {
|
||||
func TestHeadAppender_AppendST(t *testing.T) {
|
||||
testHistogram := tsdbutil.GenerateTestHistogram(1)
|
||||
testHistogram.CounterResetHint = histogram.NotCounterReset
|
||||
testFloatHistogram := tsdbutil.GenerateTestFloatHistogram(1)
|
||||
@ -6743,7 +6743,7 @@ func TestHeadAppender_AppendCT(t *testing.T) {
|
||||
fSample float64
|
||||
h *histogram.Histogram
|
||||
fh *histogram.FloatHistogram
|
||||
ct int64
|
||||
st int64
|
||||
}
|
||||
for _, tc := range []struct {
|
||||
name string
|
||||
@ -6753,8 +6753,8 @@ func TestHeadAppender_AppendCT(t *testing.T) {
|
||||
{
|
||||
name: "In order ct+normal sample/floatSample",
|
||||
appendableSamples: []appendableSamples{
|
||||
{ts: 100, fSample: 10, ct: 1},
|
||||
{ts: 101, fSample: 10, ct: 1},
|
||||
{ts: 100, fSample: 10, st: 1},
|
||||
{ts: 101, fSample: 10, st: 1},
|
||||
},
|
||||
expectedSamples: []chunks.Sample{
|
||||
sample{t: 1, f: 0},
|
||||
@ -6765,8 +6765,8 @@ func TestHeadAppender_AppendCT(t *testing.T) {
|
||||
{
|
||||
name: "In order ct+normal sample/histogram",
|
||||
appendableSamples: []appendableSamples{
|
||||
{ts: 100, h: testHistogram, ct: 1},
|
||||
{ts: 101, h: testHistogram, ct: 1},
|
||||
{ts: 100, h: testHistogram, st: 1},
|
||||
{ts: 101, h: testHistogram, st: 1},
|
||||
},
|
||||
expectedSamples: func() []chunks.Sample {
|
||||
return []chunks.Sample{
|
||||
@ -6779,8 +6779,8 @@ func TestHeadAppender_AppendCT(t *testing.T) {
|
||||
{
|
||||
name: "In order ct+normal sample/floathistogram",
|
||||
appendableSamples: []appendableSamples{
|
||||
{ts: 100, fh: testFloatHistogram, ct: 1},
|
||||
{ts: 101, fh: testFloatHistogram, ct: 1},
|
||||
{ts: 100, fh: testFloatHistogram, st: 1},
|
||||
{ts: 101, fh: testFloatHistogram, st: 1},
|
||||
},
|
||||
expectedSamples: func() []chunks.Sample {
|
||||
return []chunks.Sample{
|
||||
@ -6791,10 +6791,10 @@ func TestHeadAppender_AppendCT(t *testing.T) {
|
||||
}(),
|
||||
},
|
||||
{
|
||||
name: "Consecutive appends with same ct ignore ct/floatSample",
|
||||
name: "Consecutive appends with same st ignore st/floatSample",
|
||||
appendableSamples: []appendableSamples{
|
||||
{ts: 100, fSample: 10, ct: 1},
|
||||
{ts: 101, fSample: 10, ct: 1},
|
||||
{ts: 100, fSample: 10, st: 1},
|
||||
{ts: 101, fSample: 10, st: 1},
|
||||
},
|
||||
expectedSamples: []chunks.Sample{
|
||||
sample{t: 1, f: 0},
|
||||
@ -6803,10 +6803,10 @@ func TestHeadAppender_AppendCT(t *testing.T) {
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "Consecutive appends with same ct ignore ct/histogram",
|
||||
name: "Consecutive appends with same st ignore st/histogram",
|
||||
appendableSamples: []appendableSamples{
|
||||
{ts: 100, h: testHistogram, ct: 1},
|
||||
{ts: 101, h: testHistogram, ct: 1},
|
||||
{ts: 100, h: testHistogram, st: 1},
|
||||
{ts: 101, h: testHistogram, st: 1},
|
||||
},
|
||||
expectedSamples: func() []chunks.Sample {
|
||||
return []chunks.Sample{
|
||||
@ -6817,10 +6817,10 @@ func TestHeadAppender_AppendCT(t *testing.T) {
|
||||
}(),
|
||||
},
|
||||
{
|
||||
name: "Consecutive appends with same ct ignore ct/floathistogram",
|
||||
name: "Consecutive appends with same st ignore st/floathistogram",
|
||||
appendableSamples: []appendableSamples{
|
||||
{ts: 100, fh: testFloatHistogram, ct: 1},
|
||||
{ts: 101, fh: testFloatHistogram, ct: 1},
|
||||
{ts: 100, fh: testFloatHistogram, st: 1},
|
||||
{ts: 101, fh: testFloatHistogram, st: 1},
|
||||
},
|
||||
expectedSamples: func() []chunks.Sample {
|
||||
return []chunks.Sample{
|
||||
@ -6831,10 +6831,10 @@ func TestHeadAppender_AppendCT(t *testing.T) {
|
||||
}(),
|
||||
},
|
||||
{
|
||||
name: "Consecutive appends with newer ct do not ignore ct/floatSample",
|
||||
name: "Consecutive appends with newer st do not ignore st/floatSample",
|
||||
appendableSamples: []appendableSamples{
|
||||
{ts: 100, fSample: 10, ct: 1},
|
||||
{ts: 102, fSample: 10, ct: 101},
|
||||
{ts: 100, fSample: 10, st: 1},
|
||||
{ts: 102, fSample: 10, st: 101},
|
||||
},
|
||||
expectedSamples: []chunks.Sample{
|
||||
sample{t: 1, f: 0},
|
||||
@ -6844,10 +6844,10 @@ func TestHeadAppender_AppendCT(t *testing.T) {
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "Consecutive appends with newer ct do not ignore ct/histogram",
|
||||
name: "Consecutive appends with newer st do not ignore st/histogram",
|
||||
appendableSamples: []appendableSamples{
|
||||
{ts: 100, h: testHistogram, ct: 1},
|
||||
{ts: 102, h: testHistogram, ct: 101},
|
||||
{ts: 100, h: testHistogram, st: 1},
|
||||
{ts: 102, h: testHistogram, st: 101},
|
||||
},
|
||||
expectedSamples: []chunks.Sample{
|
||||
sample{t: 1, h: testZeroHistogram},
|
||||
@ -6857,10 +6857,10 @@ func TestHeadAppender_AppendCT(t *testing.T) {
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "Consecutive appends with newer ct do not ignore ct/floathistogram",
|
||||
name: "Consecutive appends with newer st do not ignore st/floathistogram",
|
||||
appendableSamples: []appendableSamples{
|
||||
{ts: 100, fh: testFloatHistogram, ct: 1},
|
||||
{ts: 102, fh: testFloatHistogram, ct: 101},
|
||||
{ts: 100, fh: testFloatHistogram, st: 1},
|
||||
{ts: 102, fh: testFloatHistogram, st: 101},
|
||||
},
|
||||
expectedSamples: []chunks.Sample{
|
||||
sample{t: 1, fh: testZeroFloatHistogram},
|
||||
@ -6870,10 +6870,10 @@ func TestHeadAppender_AppendCT(t *testing.T) {
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "CT equals to previous sample timestamp is ignored/floatSample",
|
||||
name: "ST equals to previous sample timestamp is ignored/floatSample",
|
||||
appendableSamples: []appendableSamples{
|
||||
{ts: 100, fSample: 10, ct: 1},
|
||||
{ts: 101, fSample: 10, ct: 100},
|
||||
{ts: 100, fSample: 10, st: 1},
|
||||
{ts: 101, fSample: 10, st: 100},
|
||||
},
|
||||
expectedSamples: []chunks.Sample{
|
||||
sample{t: 1, f: 0},
|
||||
@ -6882,10 +6882,10 @@ func TestHeadAppender_AppendCT(t *testing.T) {
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "CT equals to previous sample timestamp is ignored/histogram",
|
||||
name: "ST equals to previous sample timestamp is ignored/histogram",
|
||||
appendableSamples: []appendableSamples{
|
||||
{ts: 100, h: testHistogram, ct: 1},
|
||||
{ts: 101, h: testHistogram, ct: 100},
|
||||
{ts: 100, h: testHistogram, st: 1},
|
||||
{ts: 101, h: testHistogram, st: 100},
|
||||
},
|
||||
expectedSamples: func() []chunks.Sample {
|
||||
return []chunks.Sample{
|
||||
@ -6896,10 +6896,10 @@ func TestHeadAppender_AppendCT(t *testing.T) {
|
||||
}(),
|
||||
},
|
||||
{
|
||||
name: "CT equals to previous sample timestamp is ignored/floathistogram",
|
||||
name: "ST equals to previous sample timestamp is ignored/floathistogram",
|
||||
appendableSamples: []appendableSamples{
|
||||
{ts: 100, fh: testFloatHistogram, ct: 1},
|
||||
{ts: 101, fh: testFloatHistogram, ct: 100},
|
||||
{ts: 100, fh: testFloatHistogram, st: 1},
|
||||
{ts: 101, fh: testFloatHistogram, st: 100},
|
||||
},
|
||||
expectedSamples: func() []chunks.Sample {
|
||||
return []chunks.Sample{
|
||||
@ -6920,7 +6920,7 @@ func TestHeadAppender_AppendCT(t *testing.T) {
|
||||
for _, sample := range tc.appendableSamples {
|
||||
// Append float if it's a float test case
|
||||
if sample.fSample != 0 {
|
||||
_, err := a.AppendCTZeroSample(0, lbls, sample.ts, sample.ct)
|
||||
_, err := a.AppendSTZeroSample(0, lbls, sample.ts, sample.st)
|
||||
require.NoError(t, err)
|
||||
_, err = a.Append(0, lbls, sample.ts, sample.fSample)
|
||||
require.NoError(t, err)
|
||||
@ -6928,7 +6928,7 @@ func TestHeadAppender_AppendCT(t *testing.T) {
|
||||
|
||||
// Append histograms if it's a histogram test case
|
||||
if sample.h != nil || sample.fh != nil {
|
||||
ref, err := a.AppendHistogramCTZeroSample(0, lbls, sample.ts, sample.ct, sample.h, sample.fh)
|
||||
ref, err := a.AppendHistogramSTZeroSample(0, lbls, sample.ts, sample.st, sample.h, sample.fh)
|
||||
require.NoError(t, err)
|
||||
_, err = a.AppendHistogram(ref, lbls, sample.ts, sample.h, sample.fh)
|
||||
require.NoError(t, err)
|
||||
@ -6944,12 +6944,12 @@ func TestHeadAppender_AppendCT(t *testing.T) {
|
||||
}
|
||||
}
|
||||
|
||||
func TestHeadAppender_AppendHistogramCTZeroSample(t *testing.T) {
|
||||
func TestHeadAppender_AppendHistogramSTZeroSample(t *testing.T) {
|
||||
type appendableSamples struct {
|
||||
ts int64
|
||||
h *histogram.Histogram
|
||||
fh *histogram.FloatHistogram
|
||||
ct int64 // 0 if no created timestamp.
|
||||
st int64 // 0 if no created timestamp.
|
||||
}
|
||||
for _, tc := range []struct {
|
||||
name string
|
||||
@ -6957,32 +6957,32 @@ func TestHeadAppender_AppendHistogramCTZeroSample(t *testing.T) {
|
||||
expectedError error
|
||||
}{
|
||||
{
|
||||
name: "integer histogram CT lower than minValidTime initiates ErrOutOfBounds",
|
||||
name: "integer histogram ST lower than minValidTime initiates ErrOutOfBounds",
|
||||
appendableSamples: []appendableSamples{
|
||||
{ts: 100, h: tsdbutil.GenerateTestHistogram(1), ct: -1},
|
||||
{ts: 100, h: tsdbutil.GenerateTestHistogram(1), st: -1},
|
||||
},
|
||||
expectedError: storage.ErrOutOfBounds,
|
||||
},
|
||||
{
|
||||
name: "float histograms CT lower than minValidTime initiates ErrOutOfBounds",
|
||||
name: "float histograms ST lower than minValidTime initiates ErrOutOfBounds",
|
||||
appendableSamples: []appendableSamples{
|
||||
{ts: 100, fh: tsdbutil.GenerateTestFloatHistogram(1), ct: -1},
|
||||
{ts: 100, fh: tsdbutil.GenerateTestFloatHistogram(1), st: -1},
|
||||
},
|
||||
expectedError: storage.ErrOutOfBounds,
|
||||
},
|
||||
{
|
||||
name: "integer histogram CT duplicates an existing sample",
|
||||
name: "integer histogram ST duplicates an existing sample",
|
||||
appendableSamples: []appendableSamples{
|
||||
{ts: 100, h: tsdbutil.GenerateTestHistogram(1)},
|
||||
{ts: 200, h: tsdbutil.GenerateTestHistogram(1), ct: 100},
|
||||
{ts: 200, h: tsdbutil.GenerateTestHistogram(1), st: 100},
|
||||
},
|
||||
expectedError: storage.ErrDuplicateSampleForTimestamp,
|
||||
},
|
||||
{
|
||||
name: "float histogram CT duplicates an existing sample",
|
||||
name: "float histogram ST duplicates an existing sample",
|
||||
appendableSamples: []appendableSamples{
|
||||
{ts: 100, fh: tsdbutil.GenerateTestFloatHistogram(1)},
|
||||
{ts: 200, fh: tsdbutil.GenerateTestFloatHistogram(1), ct: 100},
|
||||
{ts: 200, fh: tsdbutil.GenerateTestFloatHistogram(1), st: 100},
|
||||
},
|
||||
expectedError: storage.ErrDuplicateSampleForTimestamp,
|
||||
},
|
||||
@ -7000,8 +7000,8 @@ func TestHeadAppender_AppendHistogramCTZeroSample(t *testing.T) {
|
||||
for _, sample := range tc.appendableSamples {
|
||||
a := h.Appender(context.Background())
|
||||
var err error
|
||||
if sample.ct != 0 {
|
||||
ref, err = a.AppendHistogramCTZeroSample(ref, lbls, sample.ts, sample.ct, sample.h, sample.fh)
|
||||
if sample.st != 0 {
|
||||
ref, err = a.AppendHistogramSTZeroSample(ref, lbls, sample.ts, sample.st, sample.h, sample.fh)
|
||||
require.ErrorIs(t, err, tc.expectedError)
|
||||
}
|
||||
|
||||
|
||||
@ -290,7 +290,7 @@ func NewAPI(
|
||||
rwEnabled bool,
|
||||
acceptRemoteWriteProtoMsgs remoteapi.MessageTypes,
|
||||
otlpEnabled, otlpDeltaToCumulative, otlpNativeDeltaIngestion bool,
|
||||
ctZeroIngestionEnabled bool,
|
||||
stZeroIngestionEnabled bool,
|
||||
lookbackDelta time.Duration,
|
||||
enableTypeAndUnitLabels bool,
|
||||
appendMetadata bool,
|
||||
@ -339,14 +339,14 @@ func NewAPI(
|
||||
}
|
||||
|
||||
if rwEnabled {
|
||||
a.remoteWriteHandler = remote.NewWriteHandler(logger, registerer, ap, acceptRemoteWriteProtoMsgs, ctZeroIngestionEnabled, enableTypeAndUnitLabels, appendMetadata)
|
||||
a.remoteWriteHandler = remote.NewWriteHandler(logger, registerer, ap, acceptRemoteWriteProtoMsgs, stZeroIngestionEnabled, enableTypeAndUnitLabels, appendMetadata)
|
||||
}
|
||||
if otlpEnabled {
|
||||
a.otlpWriteHandler = remote.NewOTLPWriteHandler(logger, registerer, ap, configFunc, remote.OTLPOptions{
|
||||
ConvertDelta: otlpDeltaToCumulative,
|
||||
NativeDelta: otlpNativeDeltaIngestion,
|
||||
LookbackDelta: lookbackDelta,
|
||||
IngestCTZeroSample: ctZeroIngestionEnabled,
|
||||
IngestSTZeroSample: stZeroIngestionEnabled,
|
||||
EnableTypeAndUnitLabels: enableTypeAndUnitLabels,
|
||||
AppendMetadata: appendMetadata,
|
||||
})
|
||||
|
||||
@ -293,7 +293,7 @@ type Options struct {
|
||||
ConvertOTLPDelta bool
|
||||
NativeOTLPDeltaIngestion bool
|
||||
IsAgent bool
|
||||
CTZeroIngestionEnabled bool
|
||||
STZeroIngestionEnabled bool
|
||||
EnableTypeAndUnitLabels bool
|
||||
AppendMetadata bool
|
||||
AppName string
|
||||
@ -394,7 +394,7 @@ func New(logger *slog.Logger, o *Options) *Handler {
|
||||
o.EnableOTLPWriteReceiver,
|
||||
o.ConvertOTLPDelta,
|
||||
o.NativeOTLPDeltaIngestion,
|
||||
o.CTZeroIngestionEnabled,
|
||||
o.STZeroIngestionEnabled,
|
||||
o.LookbackDelta,
|
||||
o.EnableTypeAndUnitLabels,
|
||||
o.AppendMetadata,
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user