mirror of
https://github.com/prometheus/prometheus.git
synced 2025-12-16 06:51:13 +01:00
fix(nhcb): flaky test TestConvertClassicHistogramsToNHCB (#17112)
* fix(nhcb): flaky test TestConvertClassicHistogramsToNHCB The test was e2e, including actually scraping an HTTP endpoint and running the scrape loop. This led to some timing issues. I've simplified it to call the scrape loop append directly. I think that this isn't nice as that is a private interface, but should gets rid of the flakiness and there's already a bunch of test doing this. Signed-off-by: György Krajcsovits <gyorgy.krajcsovits@grafana.com>
This commit is contained in:
parent
70bf09cb2b
commit
d09db02854
@ -4564,7 +4564,6 @@ metric: <
|
||||
%s
|
||||
%s
|
||||
>
|
||||
timestamp_ms: 1234568
|
||||
>
|
||||
`, name, classic, expo)
|
||||
}
|
||||
@ -4780,31 +4779,29 @@ metric: <
|
||||
require.Equal(t, expectedCount, count, "number of histogram series not as expected")
|
||||
}
|
||||
|
||||
tru := true
|
||||
fals := false
|
||||
for metricsTextName, metricsText := range metricsTexts {
|
||||
for name, tc := range map[string]struct {
|
||||
alwaysScrapeClassicHistograms *bool
|
||||
convertClassicHistToNHCB *bool
|
||||
alwaysScrapeClassicHistograms bool
|
||||
convertClassicHistToNHCB bool
|
||||
}{
|
||||
"convert with scrape": {
|
||||
alwaysScrapeClassicHistograms: &tru,
|
||||
convertClassicHistToNHCB: &tru,
|
||||
alwaysScrapeClassicHistograms: true,
|
||||
convertClassicHistToNHCB: true,
|
||||
},
|
||||
"convert without scrape": {
|
||||
alwaysScrapeClassicHistograms: &fals,
|
||||
convertClassicHistToNHCB: &tru,
|
||||
alwaysScrapeClassicHistograms: false,
|
||||
convertClassicHistToNHCB: true,
|
||||
},
|
||||
"scrape without convert": {
|
||||
alwaysScrapeClassicHistograms: &tru,
|
||||
convertClassicHistToNHCB: &fals,
|
||||
alwaysScrapeClassicHistograms: true,
|
||||
convertClassicHistToNHCB: false,
|
||||
},
|
||||
"scrape with nil convert": {
|
||||
alwaysScrapeClassicHistograms: &tru,
|
||||
alwaysScrapeClassicHistograms: true,
|
||||
},
|
||||
"neither scrape nor convert": {
|
||||
alwaysScrapeClassicHistograms: &fals,
|
||||
convertClassicHistToNHCB: &fals,
|
||||
alwaysScrapeClassicHistograms: false,
|
||||
convertClassicHistToNHCB: false,
|
||||
},
|
||||
} {
|
||||
var expectedClassicHistCount, expectedNativeHistCount int
|
||||
@ -4813,19 +4810,19 @@ metric: <
|
||||
expectedNativeHistCount = 1
|
||||
expectCustomBuckets = false
|
||||
expectedClassicHistCount = 0
|
||||
if metricsText.hasClassic && tc.alwaysScrapeClassicHistograms != nil && *tc.alwaysScrapeClassicHistograms {
|
||||
if metricsText.hasClassic && tc.alwaysScrapeClassicHistograms {
|
||||
expectedClassicHistCount = 1
|
||||
}
|
||||
} else if metricsText.hasClassic {
|
||||
switch {
|
||||
case tc.convertClassicHistToNHCB == nil || !*tc.convertClassicHistToNHCB:
|
||||
case !tc.convertClassicHistToNHCB:
|
||||
expectedClassicHistCount = 1
|
||||
expectedNativeHistCount = 0
|
||||
case tc.alwaysScrapeClassicHistograms != nil && *tc.alwaysScrapeClassicHistograms && *tc.convertClassicHistToNHCB:
|
||||
case tc.alwaysScrapeClassicHistograms && tc.convertClassicHistToNHCB:
|
||||
expectedClassicHistCount = 1
|
||||
expectedNativeHistCount = 1
|
||||
expectCustomBuckets = true
|
||||
case (tc.alwaysScrapeClassicHistograms == nil || !*tc.alwaysScrapeClassicHistograms) && *tc.convertClassicHistToNHCB:
|
||||
case !tc.alwaysScrapeClassicHistograms && tc.convertClassicHistToNHCB:
|
||||
expectedClassicHistCount = 0
|
||||
expectedNativeHistCount = 1
|
||||
expectCustomBuckets = true
|
||||
@ -4837,70 +4834,36 @@ metric: <
|
||||
simpleStorage := teststorage.New(t)
|
||||
defer simpleStorage.Close()
|
||||
|
||||
config := &config.ScrapeConfig{
|
||||
JobName: "test",
|
||||
SampleLimit: 100,
|
||||
Scheme: "http",
|
||||
ScrapeInterval: model.Duration(50 * time.Millisecond),
|
||||
ScrapeTimeout: model.Duration(49 * time.Millisecond),
|
||||
AlwaysScrapeClassicHistograms: tc.alwaysScrapeClassicHistograms,
|
||||
ConvertClassicHistogramsToNHCB: tc.convertClassicHistToNHCB,
|
||||
MetricNameValidationScheme: model.UTF8Validation,
|
||||
MetricNameEscapingScheme: model.AllowUTF8,
|
||||
}
|
||||
sl := newBasicScrapeLoop(t, context.Background(), nil, func(ctx context.Context) storage.Appender { return simpleStorage.Appender(ctx) }, 0)
|
||||
sl.alwaysScrapeClassicHist = tc.alwaysScrapeClassicHistograms
|
||||
sl.convertClassicHistToNHCB = tc.convertClassicHistToNHCB
|
||||
sl.enableNativeHistogramIngestion = true
|
||||
app := simpleStorage.Appender(context.Background())
|
||||
|
||||
scrapeCount := 0
|
||||
scraped := make(chan bool)
|
||||
|
||||
ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, _ *http.Request) {
|
||||
if metricsText.contentType != "" {
|
||||
w.Header().Set("Content-Type", `application/vnd.google.protobuf; proto=io.prometheus.client.MetricFamily; encoding=delimited`)
|
||||
for _, text := range metricsText.text {
|
||||
var content []byte
|
||||
contentType := metricsText.contentType
|
||||
switch contentType {
|
||||
case "application/vnd.google.protobuf":
|
||||
buf := &bytes.Buffer{}
|
||||
for _, text := range metricsText.text {
|
||||
// In case of protobuf, we have to create the binary representation.
|
||||
pb := &dto.MetricFamily{}
|
||||
// From text to proto message.
|
||||
require.NoError(t, proto.UnmarshalText(text, pb))
|
||||
// From proto message to binary protobuf.
|
||||
protoBuf, err := proto.Marshal(pb)
|
||||
require.NoError(t, err)
|
||||
|
||||
// Write first length, then binary protobuf.
|
||||
varintBuf := binary.AppendUvarint(nil, uint64(len(protoBuf)))
|
||||
buf.Write(varintBuf)
|
||||
buf.Write(protoBuf)
|
||||
w.Write(buf.Bytes())
|
||||
buf.Write(protoMarshalDelimited(t, pb))
|
||||
}
|
||||
} else {
|
||||
for _, text := range metricsText.text {
|
||||
fmt.Fprint(w, text)
|
||||
}
|
||||
}
|
||||
scrapeCount++
|
||||
if scrapeCount > 2 {
|
||||
close(scraped)
|
||||
}
|
||||
}))
|
||||
defer ts.Close()
|
||||
|
||||
sp, err := newScrapePool(config, simpleStorage, 0, nil, nil, &Options{DiscoveryReloadInterval: model.Duration(10 * time.Millisecond), EnableNativeHistogramsIngestion: true}, newTestScrapeMetrics(t))
|
||||
require.NoError(t, err)
|
||||
defer sp.stop()
|
||||
|
||||
testURL, err := url.Parse(ts.URL)
|
||||
require.NoError(t, err)
|
||||
sp.Sync([]*targetgroup.Group{
|
||||
{
|
||||
Targets: []model.LabelSet{{model.AddressLabel: model.LabelValue(testURL.Host)}},
|
||||
},
|
||||
})
|
||||
require.Eventually(t, func() bool { return len(sp.ActiveTargets()) == 1 }, 5*time.Second, 50*time.Millisecond)
|
||||
|
||||
select {
|
||||
case <-time.After(5 * time.Second):
|
||||
t.Fatalf("target was not scraped")
|
||||
case <-scraped:
|
||||
content = buf.Bytes()
|
||||
case "text/plain", "":
|
||||
// The input text fragments already have a newline at the
|
||||
// end, so we just concatenate them without separator.
|
||||
content = []byte(strings.Join(metricsText.text, ""))
|
||||
contentType = "text/plain"
|
||||
default:
|
||||
t.Error("unexpected content type")
|
||||
}
|
||||
sl.append(app, content, contentType, time.Now())
|
||||
require.NoError(t, app.Commit())
|
||||
|
||||
ctx, cancel := context.WithCancel(context.Background())
|
||||
defer cancel()
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user