mirror of
https://github.com/prometheus/prometheus.git
synced 2025-08-06 06:07:11 +02:00
scraper: fix UTF-8 scraping header always sent with PrometheusText1.0.0
The `Accept` header should not include `escape=allow-utf-8` unless explicitly requested. Conveniently, there was already a test covering this header's value, it just required updating so it also asserts that this value in the header is not set in the cases we don't expect it to be set. I also converted those tests into table tests to help make failures clearer. Issue: https://github.com/prometheus/prometheus/issues/15857 Signed-off-by: Matt Hughes <mhughes@uw.co.uk>
This commit is contained in:
parent
86bb04783c
commit
5868e36d91
@ -523,7 +523,7 @@ var (
|
||||
ScrapeProtocolsHeaders = map[ScrapeProtocol]string{
|
||||
PrometheusProto: "application/vnd.google.protobuf;proto=io.prometheus.client.MetricFamily;encoding=delimited",
|
||||
PrometheusText0_0_4: "text/plain;version=0.0.4",
|
||||
PrometheusText1_0_0: "text/plain;version=1.0.0;escaping=allow-utf-8",
|
||||
PrometheusText1_0_0: "text/plain;version=1.0.0",
|
||||
OpenMetricsText0_0_1: "application/openmetrics-text;version=0.0.1",
|
||||
OpenMetricsText1_0_0: "application/openmetrics-text;version=1.0.0",
|
||||
}
|
||||
|
@ -2889,6 +2889,8 @@ func TestTargetScraperScrapeOK(t *testing.T) {
|
||||
accept := r.Header.Get("Accept")
|
||||
if allowUTF8 {
|
||||
require.Containsf(t, accept, "escaping=allow-utf-8", "Expected Accept header to allow utf8, got %q", accept)
|
||||
} else {
|
||||
require.NotContainsf(t, accept, "escaping=allow-utf-8", "Expected Accept header to not allow utf8, got %q", accept)
|
||||
}
|
||||
if protobufParsing {
|
||||
require.True(t, strings.HasPrefix(accept, "application/vnd.google.protobuf;"),
|
||||
@ -2924,7 +2926,7 @@ func TestTargetScraperScrapeOK(t *testing.T) {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
runTest := func(acceptHeader string) {
|
||||
runTest := func(t *testing.T, acceptHeader string) {
|
||||
ts := &targetScraper{
|
||||
Target: &Target{
|
||||
labels: labels.FromStrings(
|
||||
@ -2951,14 +2953,43 @@ func TestTargetScraperScrapeOK(t *testing.T) {
|
||||
require.Equal(t, "metric_a 1\nmetric_b 2\n", buf.String())
|
||||
}
|
||||
|
||||
runTest(acceptHeader(config.DefaultScrapeProtocols, model.LegacyValidation))
|
||||
protobufParsing = true
|
||||
runTest(acceptHeader(config.DefaultProtoFirstScrapeProtocols, model.LegacyValidation))
|
||||
protobufParsing = false
|
||||
allowUTF8 = true
|
||||
runTest(acceptHeader(config.DefaultScrapeProtocols, model.UTF8Validation))
|
||||
protobufParsing = true
|
||||
runTest(acceptHeader(config.DefaultProtoFirstScrapeProtocols, model.UTF8Validation))
|
||||
for _, tc := range []struct {
|
||||
scrapeProtocols []config.ScrapeProtocol
|
||||
scheme model.ValidationScheme
|
||||
protobufParsing bool
|
||||
allowUTF8 bool
|
||||
}{
|
||||
{
|
||||
scrapeProtocols: config.DefaultScrapeProtocols,
|
||||
scheme: model.LegacyValidation,
|
||||
protobufParsing: false,
|
||||
allowUTF8: false,
|
||||
},
|
||||
{
|
||||
scrapeProtocols: config.DefaultProtoFirstScrapeProtocols,
|
||||
scheme: model.LegacyValidation,
|
||||
protobufParsing: true,
|
||||
allowUTF8: false,
|
||||
},
|
||||
{
|
||||
scrapeProtocols: config.DefaultScrapeProtocols,
|
||||
scheme: model.UTF8Validation,
|
||||
protobufParsing: false,
|
||||
allowUTF8: true,
|
||||
},
|
||||
{
|
||||
scrapeProtocols: config.DefaultProtoFirstScrapeProtocols,
|
||||
scheme: model.UTF8Validation,
|
||||
protobufParsing: true,
|
||||
allowUTF8: true,
|
||||
},
|
||||
} {
|
||||
t.Run(fmt.Sprintf("%+v", tc), func(t *testing.T) {
|
||||
protobufParsing = tc.protobufParsing
|
||||
allowUTF8 = tc.allowUTF8
|
||||
runTest(t, acceptHeader(tc.scrapeProtocols, tc.scheme))
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestTargetScrapeScrapeCancel(t *testing.T) {
|
||||
|
Loading…
Reference in New Issue
Block a user