scrape: Add config option for escaping scheme request. (#16066)

The new metric_name_escaping_scheme config option works in parallel with metric_name_validation_scheme and controls which escaping scheme is requested when scraping. When not specified, the scheme will request underscores if the validation scheme is set to legacy, and will request allow-utf-8 when the validation scheme is set to utf8. This setting allows users to allow utf8 names if they like, but explicitly request an escaping scheme rather than UTF-8.

Fixes https://github.com/prometheus/prometheus/issues/16034

Built on https://github.com/prometheus/prometheus/pull/16080

Signed-off-by: Owen Williams <owen.williams@grafana.com>
This commit is contained in:
Owen Williams 2025-03-26 18:27:28 -04:00 committed by GitHub
parent de399eb09c
commit 6566c5a2b3
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
9 changed files with 900 additions and 558 deletions

View File

@ -479,8 +479,13 @@ type GlobalConfig struct {
// Keep no more than this many dropped targets per job.
// 0 means no limit.
KeepDroppedTargets uint `yaml:"keep_dropped_targets,omitempty"`
// Allow UTF8 Metric and Label Names.
// Allow UTF8 Metric and Label Names. Can be blank in config files but must
// have a value if a ScrepeConfig is created programmatically.
MetricNameValidationScheme string `yaml:"metric_name_validation_scheme,omitempty"`
// Metric name escaping mode to request through content negotiation. Can be
// blank in config files but must have a value if a ScrepeConfig is created
// programmatically.
MetricNameEscapingScheme string `yaml:"metric_name_escaping_scheme,omitempty"`
}
// ScrapeProtocol represents supported protocol for scraping metrics.
@ -719,8 +724,13 @@ type ScrapeConfig struct {
// Keep no more than this many dropped targets per job.
// 0 means no limit.
KeepDroppedTargets uint `yaml:"keep_dropped_targets,omitempty"`
// Allow UTF8 Metric and Label Names.
// Allow UTF8 Metric and Label Names. Can be blank in config files but must
// have a value if a ScrepeConfig is created programmatically.
MetricNameValidationScheme string `yaml:"metric_name_validation_scheme,omitempty"`
// Metric name escaping mode to request through content negotiation. Can be
// blank in config files but must have a value if a ScrepeConfig is created
// programmatically.
MetricNameEscapingScheme string `yaml:"metric_name_escaping_scheme,omitempty"`
// We cannot do proper Go type embedding below as the parser will then parse
// values arbitrarily into the overflow maps of further-down types.
@ -841,13 +851,48 @@ func (c *ScrapeConfig) Validate(globalConfig GlobalConfig) error {
if model.NameValidationScheme != model.UTF8Validation {
return errors.New("model.NameValidationScheme must be set to UTF8")
}
switch globalConfig.MetricNameValidationScheme {
case "", LegacyValidationConfig, UTF8ValidationConfig:
case "":
globalConfig.MetricNameValidationScheme = UTF8ValidationConfig
case LegacyValidationConfig, UTF8ValidationConfig:
default:
return fmt.Errorf("unknown name validation method specified, must be either 'legacy' or 'utf8', got %s", globalConfig.MetricNameValidationScheme)
return fmt.Errorf("unknown global name validation method specified, must be either 'legacy' or 'utf8', got %s", globalConfig.MetricNameValidationScheme)
}
if c.MetricNameValidationScheme == "" {
// Scrapeconfig validation scheme matches global if left blank.
switch c.MetricNameValidationScheme {
case "":
c.MetricNameValidationScheme = globalConfig.MetricNameValidationScheme
case LegacyValidationConfig, UTF8ValidationConfig:
default:
return fmt.Errorf("unknown scrape config name validation method specified, must be either 'legacy' or 'utf8', got %s", c.MetricNameValidationScheme)
}
// Escaping scheme is based on the validation scheme if left blank.
switch globalConfig.MetricNameEscapingScheme {
case "":
if globalConfig.MetricNameValidationScheme == LegacyValidationConfig {
globalConfig.MetricNameEscapingScheme = model.EscapeUnderscores
} else {
globalConfig.MetricNameEscapingScheme = model.AllowUTF8
}
case model.AllowUTF8, model.EscapeUnderscores, model.EscapeDots, model.EscapeValues:
default:
return fmt.Errorf("unknown global name escaping method specified, must be one of '%s', '%s', '%s', or '%s', got %s", model.AllowUTF8, model.EscapeUnderscores, model.EscapeDots, model.EscapeValues, globalConfig.MetricNameValidationScheme)
}
if c.MetricNameEscapingScheme == "" {
c.MetricNameEscapingScheme = globalConfig.MetricNameEscapingScheme
}
switch c.MetricNameEscapingScheme {
case model.AllowUTF8:
if c.MetricNameValidationScheme != UTF8ValidationConfig {
return errors.New("utf8 metric names requested but validation scheme is not set to UTF8")
}
case model.EscapeUnderscores, model.EscapeDots, model.EscapeValues:
default:
return fmt.Errorf("unknown scrape config name escaping method specified, must be one of '%s', '%s', '%s', or '%s', got %s", model.AllowUTF8, model.EscapeUnderscores, model.EscapeDots, model.EscapeValues, c.MetricNameValidationScheme)
}
return nil
@ -858,6 +903,20 @@ func (c *ScrapeConfig) MarshalYAML() (interface{}, error) {
return discovery.MarshalYAMLWithInlineConfigs(c)
}
// ToValidationScheme returns the validation scheme for the given string config value.
func ToValidationScheme(s string) (validationScheme model.ValidationScheme, err error) {
switch s {
case UTF8ValidationConfig:
validationScheme = model.UTF8Validation
case LegacyValidationConfig:
validationScheme = model.LegacyValidation
default:
return model.UTF8Validation, fmt.Errorf("invalid metric name validation scheme, %s", s)
}
return validationScheme, nil
}
// StorageConfig configures runtime reloadable configuration options.
type StorageConfig struct {
TSDBConfig *TSDBConfig `yaml:"tsdb,omitempty"`

File diff suppressed because it is too large Load Diff

View File

@ -1,4 +1,5 @@
global:
metric_name_validation_scheme: legacy
metric_name_escaping_scheme: dots
scrape_configs:
- job_name: prometheus

View File

@ -1,5 +1,7 @@
global:
metric_name_validation_scheme: legacy
metric_name_escaping_scheme: values
scrape_configs:
- job_name: prometheus
metric_name_validation_scheme: utf8
metric_name_escaping_scheme: dots

View File

@ -1,3 +1,4 @@
scrape_configs:
- job_name: prometheus
metric_name_validation_scheme: legacy
metric_name_escaping_scheme: values

View File

@ -0,0 +1,5 @@
global:
metric_name_validation_scheme: legacy
metric_name_escaping_scheme: allow-utf-8
scrape_configs:
- job_name: prometheus

View File

@ -469,6 +469,22 @@ metric_relabel_configs:
# underscores.
[ metric_name_validation_scheme <string> | default "utf8" ]
# Specifies the character escaping scheme that will be requested when scraping
# for metric and label names that do not conform to the legacy Prometheus
# character set. Available options are:
# * `allow-utf-8`: Full UTF-8 support, no escaping needed.
# * `underscores`: Escape all legacy-invalid characters to underscores.
# * `dots`: Escapes dots to `_dot_`, underscores to `__`, and all other
# legacy-invalid characters to underscores.
# * `values`: Prepend the name with `U__` and replace all invalid
# characters with their unicode value, surrounded by underscores. Single
# underscores are replaced with double underscores.
# e.g. "U__my_2e_dotted_2e_name".
# If this value is left blank, Prometheus will default to `allow-utf-8` if the
# validation scheme for the current scrape config is set to utf8, or
# `underscores` if the validation scheme is set to `legacy`.
[ metric_name_validation_scheme <string> | default "utf8" ]
# Limit on total number of positive and negative buckets allowed in a single
# native histogram. The resolution of a histogram with more buckets will be
# reduced until the number of buckets is within the limit. If the limit cannot

View File

@ -102,6 +102,9 @@ type scrapePool struct {
scrapeFailureLogger FailureLogger
scrapeFailureLoggerMtx sync.RWMutex
validationScheme model.ValidationScheme
escapingScheme model.EscapingScheme
}
type labelLimits struct {
@ -124,7 +127,6 @@ type scrapeLoopOptions struct {
timeout time.Duration
alwaysScrapeClassicHist bool
convertClassicHistToNHCB bool
validationScheme model.ValidationScheme
fallbackScrapeProtocol string
mrc []*relabel.Config
@ -147,6 +149,16 @@ func newScrapePool(cfg *config.ScrapeConfig, app storage.Appendable, offsetSeed
return nil, fmt.Errorf("error creating HTTP client: %w", err)
}
validationScheme, err := config.ToValidationScheme(cfg.MetricNameValidationScheme)
if err != nil {
return nil, fmt.Errorf("invalid metric name validation scheme: %w", err)
}
var escapingScheme model.EscapingScheme
escapingScheme, err = model.ToEscapingScheme(cfg.MetricNameEscapingScheme)
if err != nil {
return nil, fmt.Errorf("invalid metric name escaping scheme, %w", err)
}
ctx, cancel := context.WithCancel(context.Background())
sp := &scrapePool{
cancel: cancel,
@ -160,6 +172,8 @@ func newScrapePool(cfg *config.ScrapeConfig, app storage.Appendable, offsetSeed
logger: logger,
metrics: metrics,
httpOpts: options.HTTPClientOptions,
validationScheme: validationScheme,
escapingScheme: escapingScheme,
}
sp.newLoop = func(opts scrapeLoopOptions) loop {
// Update the targets retrieval function for metadata to a new scrape cache.
@ -201,7 +215,8 @@ func newScrapePool(cfg *config.ScrapeConfig, app storage.Appendable, offsetSeed
options.PassMetadataInContext,
metrics,
options.skipOffsetting,
opts.validationScheme,
sp.validationScheme,
sp.escapingScheme,
opts.fallbackScrapeProtocol,
)
}
@ -309,6 +324,17 @@ func (sp *scrapePool) reload(cfg *config.ScrapeConfig) error {
sp.config = cfg
oldClient := sp.client
sp.client = client
validationScheme, err := config.ToValidationScheme(cfg.MetricNameValidationScheme)
if err != nil {
return fmt.Errorf("invalid metric name validation scheme: %w", err)
}
sp.validationScheme = validationScheme
var escapingScheme model.EscapingScheme
escapingScheme, err = model.ToEscapingScheme(cfg.MetricNameEscapingScheme)
if err != nil {
return fmt.Errorf("invalid metric name escaping scheme, %w", err)
}
sp.escapingScheme = escapingScheme
sp.metrics.targetScrapePoolTargetLimit.WithLabelValues(sp.config.JobName).Set(float64(sp.config.TargetLimit))
@ -344,11 +370,6 @@ func (sp *scrapePool) restartLoops(reuseCache bool) {
convertClassicHistToNHCB = sp.config.ConvertClassicHistogramsToNHCB
)
validationScheme := model.UTF8Validation
if sp.config.MetricNameValidationScheme == config.LegacyValidationConfig {
validationScheme = model.LegacyValidation
}
sp.targetMtx.Lock()
forcedErr := sp.refreshTargetLimitErr()
@ -369,7 +390,7 @@ func (sp *scrapePool) restartLoops(reuseCache bool) {
client: sp.client,
timeout: targetTimeout,
bodySizeLimit: bodySizeLimit,
acceptHeader: acceptHeader(sp.config.ScrapeProtocols, validationScheme),
acceptHeader: acceptHeader(sp.config.ScrapeProtocols, sp.escapingScheme),
acceptEncodingHeader: acceptEncodingHeader(enableCompression),
metrics: sp.metrics,
}
@ -388,7 +409,6 @@ func (sp *scrapePool) restartLoops(reuseCache bool) {
cache: cache,
interval: targetInterval,
timeout: targetTimeout,
validationScheme: validationScheme,
fallbackScrapeProtocol: fallbackScrapeProtocol,
alwaysScrapeClassicHist: alwaysScrapeClassicHist,
convertClassicHistToNHCB: convertClassicHistToNHCB,
@ -506,11 +526,6 @@ func (sp *scrapePool) sync(targets []*Target) {
convertClassicHistToNHCB = sp.config.ConvertClassicHistogramsToNHCB
)
validationScheme := model.UTF8Validation
if sp.config.MetricNameValidationScheme == config.LegacyValidationConfig {
validationScheme = model.LegacyValidation
}
sp.targetMtx.Lock()
for _, t := range targets {
hash := t.hash()
@ -526,7 +541,7 @@ func (sp *scrapePool) sync(targets []*Target) {
client: sp.client,
timeout: timeout,
bodySizeLimit: bodySizeLimit,
acceptHeader: acceptHeader(sp.config.ScrapeProtocols, validationScheme),
acceptHeader: acceptHeader(sp.config.ScrapeProtocols, sp.escapingScheme),
acceptEncodingHeader: acceptEncodingHeader(enableCompression),
metrics: sp.metrics,
}
@ -546,7 +561,6 @@ func (sp *scrapePool) sync(targets []*Target) {
timeout: timeout,
alwaysScrapeClassicHist: alwaysScrapeClassicHist,
convertClassicHistToNHCB: convertClassicHistToNHCB,
validationScheme: validationScheme,
fallbackScrapeProtocol: fallbackScrapeProtocol,
})
if err != nil {
@ -777,13 +791,14 @@ var errBodySizeLimit = errors.New("body size limit exceeded")
// acceptHeader transforms preference from the options into specific header values as
// https://www.rfc-editor.org/rfc/rfc9110.html#name-accept defines.
// No validation is here, we expect scrape protocols to be validated already.
func acceptHeader(sps []config.ScrapeProtocol, scheme model.ValidationScheme) string {
func acceptHeader(sps []config.ScrapeProtocol, scheme model.EscapingScheme) string {
var vals []string
weight := len(config.ScrapeProtocolsHeaders) + 1
for _, sp := range sps {
val := config.ScrapeProtocolsHeaders[sp]
if scheme == model.UTF8Validation {
val += ";" + config.UTF8NamesHeader
// Escaping header is only valid for newer versions of the text formats.
if sp == config.PrometheusText1_0_0 || sp == config.OpenMetricsText1_0_0 {
val += ";" + model.EscapingKey + "=" + scheme.String()
}
val += fmt.Sprintf(";q=0.%d", weight)
vals = append(vals, val)
@ -911,6 +926,7 @@ type scrapeLoop struct {
alwaysScrapeClassicHist bool
convertClassicHistToNHCB bool
validationScheme model.ValidationScheme
escapingScheme model.EscapingScheme
fallbackScrapeProtocol string
// Feature flagged options.
@ -1230,6 +1246,7 @@ func newScrapeLoop(ctx context.Context,
metrics *scrapeMetrics,
skipOffsetting bool,
validationScheme model.ValidationScheme,
escapingScheme model.EscapingScheme,
fallbackScrapeProtocol string,
) *scrapeLoop {
if l == nil {
@ -1284,6 +1301,7 @@ func newScrapeLoop(ctx context.Context,
metrics: metrics,
skipOffsetting: skipOffsetting,
validationScheme: validationScheme,
escapingScheme: escapingScheme,
fallbackScrapeProtocol: fallbackScrapeProtocol,
}
sl.ctx, sl.cancel = context.WithCancel(ctx)

View File

@ -82,10 +82,14 @@ func newTestScrapeMetrics(t testing.TB) *scrapeMetrics {
func TestNewScrapePool(t *testing.T) {
var (
app = &nopAppendable{}
cfg = &config.ScrapeConfig{}
sp, _ = newScrapePool(cfg, app, 0, nil, nil, &Options{}, newTestScrapeMetrics(t))
app = &nopAppendable{}
cfg = &config.ScrapeConfig{
MetricNameValidationScheme: config.UTF8ValidationConfig,
MetricNameEscapingScheme: model.AllowUTF8,
}
sp, err = newScrapePool(cfg, app, 0, nil, nil, &Options{}, newTestScrapeMetrics(t))
)
require.NoError(t, err)
a, ok := sp.appendable.(*nopAppendable)
require.True(t, ok, "Failure to append.")
@ -321,8 +325,10 @@ func TestDroppedTargetsList(t *testing.T) {
var (
app = &nopAppendable{}
cfg = &config.ScrapeConfig{
JobName: "dropMe",
ScrapeInterval: model.Duration(1),
JobName: "dropMe",
ScrapeInterval: model.Duration(1),
MetricNameValidationScheme: config.UTF8ValidationConfig,
MetricNameEscapingScheme: model.AllowUTF8,
RelabelConfigs: []*relabel.Config{
{
Action: relabel.Drop,
@ -366,8 +372,10 @@ func TestDiscoveredLabelsUpdate(t *testing.T) {
// These are used when syncing so need this to avoid a panic.
sp.config = &config.ScrapeConfig{
ScrapeInterval: model.Duration(1),
ScrapeTimeout: model.Duration(1),
ScrapeInterval: model.Duration(1),
ScrapeTimeout: model.Duration(1),
MetricNameValidationScheme: config.UTF8ValidationConfig,
MetricNameEscapingScheme: model.AllowUTF8,
}
sp.activeTargets = make(map[uint64]*Target)
t1 := &Target{
@ -496,8 +504,10 @@ func TestScrapePoolReload(t *testing.T) {
stopped := map[uint64]bool{}
reloadCfg := &config.ScrapeConfig{
ScrapeInterval: model.Duration(3 * time.Second),
ScrapeTimeout: model.Duration(2 * time.Second),
ScrapeInterval: model.Duration(3 * time.Second),
ScrapeTimeout: model.Duration(2 * time.Second),
MetricNameValidationScheme: config.UTF8ValidationConfig,
MetricNameEscapingScheme: model.AllowUTF8,
}
// On starting to run, new loops created on reload check whether their preceding
// equivalents have been stopped.
@ -588,8 +598,10 @@ func TestScrapePoolReload(t *testing.T) {
func TestScrapePoolReloadPreserveRelabeledIntervalTimeout(t *testing.T) {
reloadCfg := &config.ScrapeConfig{
ScrapeInterval: model.Duration(3 * time.Second),
ScrapeTimeout: model.Duration(2 * time.Second),
ScrapeInterval: model.Duration(3 * time.Second),
ScrapeTimeout: model.Duration(2 * time.Second),
MetricNameValidationScheme: config.UTF8ValidationConfig,
MetricNameEscapingScheme: model.AllowUTF8,
}
newLoop := func(opts scrapeLoopOptions) loop {
l := &testLoop{interval: opts.interval, timeout: opts.timeout}
@ -687,9 +699,11 @@ func TestScrapePoolTargetLimit(t *testing.T) {
reloadWithLimit := func(l uint) {
limit = l
require.NoError(t, sp.reload(&config.ScrapeConfig{
ScrapeInterval: model.Duration(3 * time.Second),
ScrapeTimeout: model.Duration(2 * time.Second),
TargetLimit: l,
ScrapeInterval: model.Duration(3 * time.Second),
ScrapeTimeout: model.Duration(2 * time.Second),
MetricNameValidationScheme: config.UTF8ValidationConfig,
MetricNameEscapingScheme: model.AllowUTF8,
TargetLimit: l,
}))
}
@ -776,7 +790,10 @@ func TestScrapePoolTargetLimit(t *testing.T) {
}
func TestScrapePoolAppender(t *testing.T) {
cfg := &config.ScrapeConfig{}
cfg := &config.ScrapeConfig{
MetricNameValidationScheme: config.UTF8ValidationConfig,
MetricNameEscapingScheme: model.AllowUTF8,
}
app := &nopAppendable{}
sp, _ := newScrapePool(cfg, app, 0, nil, nil, &Options{}, newTestScrapeMetrics(t))
@ -849,7 +866,12 @@ func TestScrapePoolRaces(t *testing.T) {
interval, _ := model.ParseDuration("1s")
timeout, _ := model.ParseDuration("500ms")
newConfig := func() *config.ScrapeConfig {
return &config.ScrapeConfig{ScrapeInterval: interval, ScrapeTimeout: timeout}
return &config.ScrapeConfig{
ScrapeInterval: interval,
ScrapeTimeout: timeout,
MetricNameValidationScheme: config.UTF8ValidationConfig,
MetricNameEscapingScheme: model.AllowUTF8,
}
}
sp, _ := newScrapePool(newConfig(), &nopAppendable{}, 0, nil, nil, &Options{}, newTestScrapeMetrics(t))
tgts := []*targetgroup.Group{
@ -919,8 +941,10 @@ func TestScrapePoolScrapeLoopsStarted(t *testing.T) {
}
require.NoError(t, sp.reload(&config.ScrapeConfig{
ScrapeInterval: model.Duration(3 * time.Second),
ScrapeTimeout: model.Duration(2 * time.Second),
ScrapeInterval: model.Duration(3 * time.Second),
ScrapeTimeout: model.Duration(2 * time.Second),
MetricNameValidationScheme: config.UTF8ValidationConfig,
MetricNameEscapingScheme: model.AllowUTF8,
}))
sp.Sync(tgs)
@ -963,7 +987,8 @@ func newBasicScrapeLoopWithFallback(t testing.TB, ctx context.Context, scraper s
false,
newTestScrapeMetrics(t),
false,
model.LegacyValidation,
model.UTF8Validation,
model.NoEscaping,
fallback,
)
}
@ -1109,7 +1134,8 @@ func TestScrapeLoopRun(t *testing.T) {
false,
scrapeMetrics,
false,
model.LegacyValidation,
model.UTF8Validation,
model.NoEscaping,
"",
)
@ -1256,7 +1282,8 @@ func TestScrapeLoopMetadata(t *testing.T) {
false,
scrapeMetrics,
false,
model.LegacyValidation,
model.UTF8Validation,
model.NoEscaping,
"",
)
defer cancel()
@ -3041,6 +3068,47 @@ func TestScrapeLoopOutOfBoundsTimeError(t *testing.T) {
require.Equal(t, 0, seriesAdded)
}
func TestAcceptHeader(t *testing.T) {
tests := []struct {
name string
scrapeProtocols []config.ScrapeProtocol
scheme model.EscapingScheme
expectedHeader string
}{
{
name: "default scrape protocols with underscore escaping",
scrapeProtocols: config.DefaultScrapeProtocols,
scheme: model.UnderscoreEscaping,
expectedHeader: "application/openmetrics-text;version=1.0.0;escaping=underscores;q=0.6,application/openmetrics-text;version=0.0.1;q=0.5,text/plain;version=1.0.0;escaping=underscores;q=0.4,text/plain;version=0.0.4;q=0.3,*/*;q=0.2",
},
{
name: "default proto first scrape protocols with underscore escaping",
scrapeProtocols: config.DefaultProtoFirstScrapeProtocols,
scheme: model.DotsEscaping,
expectedHeader: "application/vnd.google.protobuf;proto=io.prometheus.client.MetricFamily;encoding=delimited;q=0.6,application/openmetrics-text;version=1.0.0;escaping=dots;q=0.5,application/openmetrics-text;version=0.0.1;q=0.4,text/plain;version=1.0.0;escaping=dots;q=0.3,text/plain;version=0.0.4;q=0.2,*/*;q=0.1",
},
{
name: "default scrape protocols with no escaping",
scrapeProtocols: config.DefaultScrapeProtocols,
scheme: model.NoEscaping,
expectedHeader: "application/openmetrics-text;version=1.0.0;escaping=allow-utf-8;q=0.6,application/openmetrics-text;version=0.0.1;q=0.5,text/plain;version=1.0.0;escaping=allow-utf-8;q=0.4,text/plain;version=0.0.4;q=0.3,*/*;q=0.2",
},
{
name: "default proto first scrape protocols with no escaping",
scrapeProtocols: config.DefaultProtoFirstScrapeProtocols,
scheme: model.NoEscaping,
expectedHeader: "application/vnd.google.protobuf;proto=io.prometheus.client.MetricFamily;encoding=delimited;q=0.6,application/openmetrics-text;version=1.0.0;escaping=allow-utf-8;q=0.5,application/openmetrics-text;version=0.0.1;q=0.4,text/plain;version=1.0.0;escaping=allow-utf-8;q=0.3,text/plain;version=0.0.4;q=0.2,*/*;q=0.1",
},
}
for _, tc := range tests {
t.Run(tc.name, func(t *testing.T) {
header := acceptHeader(tc.scrapeProtocols, tc.scheme)
require.Equal(t, tc.expectedHeader, header)
})
}
}
func TestTargetScraperScrapeOK(t *testing.T) {
const (
configTimeout = 1500 * time.Millisecond
@ -3124,31 +3192,31 @@ func TestTargetScraperScrapeOK(t *testing.T) {
for _, tc := range []struct {
scrapeProtocols []config.ScrapeProtocol
scheme model.ValidationScheme
scheme model.EscapingScheme
protobufParsing bool
allowUTF8 bool
}{
{
scrapeProtocols: config.DefaultScrapeProtocols,
scheme: model.LegacyValidation,
scheme: model.UnderscoreEscaping,
protobufParsing: false,
allowUTF8: false,
},
{
scrapeProtocols: config.DefaultProtoFirstScrapeProtocols,
scheme: model.LegacyValidation,
scheme: model.UnderscoreEscaping,
protobufParsing: true,
allowUTF8: false,
},
{
scrapeProtocols: config.DefaultScrapeProtocols,
scheme: model.UTF8Validation,
scheme: model.NoEscaping,
protobufParsing: false,
allowUTF8: true,
},
{
scrapeProtocols: config.DefaultProtoFirstScrapeProtocols,
scheme: model.UTF8Validation,
scheme: model.NoEscaping,
protobufParsing: true,
allowUTF8: true,
},
@ -3185,7 +3253,7 @@ func TestTargetScrapeScrapeCancel(t *testing.T) {
scrapeConfig: &config.ScrapeConfig{},
},
client: http.DefaultClient,
acceptHeader: acceptHeader(config.DefaultGlobalConfig.ScrapeProtocols, model.LegacyValidation),
acceptHeader: acceptHeader(config.DefaultGlobalConfig.ScrapeProtocols, model.UnderscoreEscaping),
}
ctx, cancel := context.WithCancel(context.Background())
@ -3241,7 +3309,7 @@ func TestTargetScrapeScrapeNotFound(t *testing.T) {
scrapeConfig: &config.ScrapeConfig{},
},
client: http.DefaultClient,
acceptHeader: acceptHeader(config.DefaultGlobalConfig.ScrapeProtocols, model.LegacyValidation),
acceptHeader: acceptHeader(config.DefaultGlobalConfig.ScrapeProtocols, model.UnderscoreEscaping),
}
resp, err := ts.scrape(context.Background())
@ -3286,7 +3354,7 @@ func TestTargetScraperBodySizeLimit(t *testing.T) {
},
client: http.DefaultClient,
bodySizeLimit: bodySizeLimit,
acceptHeader: acceptHeader(config.DefaultGlobalConfig.ScrapeProtocols, model.LegacyValidation),
acceptHeader: acceptHeader(config.DefaultGlobalConfig.ScrapeProtocols, model.UnderscoreEscaping),
metrics: newTestScrapeMetrics(t),
}
var buf bytes.Buffer
@ -3546,15 +3614,20 @@ func TestReuseScrapeCache(t *testing.T) {
var (
app = &nopAppendable{}
cfg = &config.ScrapeConfig{
JobName: "Prometheus",
ScrapeTimeout: model.Duration(5 * time.Second),
ScrapeInterval: model.Duration(5 * time.Second),
MetricsPath: "/metrics",
JobName: "Prometheus",
ScrapeTimeout: model.Duration(5 * time.Second),
ScrapeInterval: model.Duration(5 * time.Second),
MetricsPath: "/metrics",
MetricNameValidationScheme: config.UTF8ValidationConfig,
MetricNameEscapingScheme: model.AllowUTF8,
}
sp, _ = newScrapePool(cfg, app, 0, nil, nil, &Options{}, newTestScrapeMetrics(t))
t1 = &Target{
labels: labels.FromStrings("labelNew", "nameNew", "labelNew1", "nameNew1", "labelNew2", "nameNew2"),
scrapeConfig: &config.ScrapeConfig{},
labels: labels.FromStrings("labelNew", "nameNew", "labelNew1", "nameNew1", "labelNew2", "nameNew2"),
scrapeConfig: &config.ScrapeConfig{
MetricNameValidationScheme: config.UTF8ValidationConfig,
MetricNameEscapingScheme: model.AllowUTF8,
},
}
proxyURL, _ = url.Parse("http://localhost:2128")
)
@ -3568,40 +3641,48 @@ func TestReuseScrapeCache(t *testing.T) {
{
keep: true,
newConfig: &config.ScrapeConfig{
JobName: "Prometheus",
ScrapeInterval: model.Duration(5 * time.Second),
ScrapeTimeout: model.Duration(5 * time.Second),
MetricsPath: "/metrics",
JobName: "Prometheus",
ScrapeInterval: model.Duration(5 * time.Second),
ScrapeTimeout: model.Duration(5 * time.Second),
MetricsPath: "/metrics",
MetricNameValidationScheme: config.UTF8ValidationConfig,
MetricNameEscapingScheme: model.AllowUTF8,
},
},
{
keep: false,
newConfig: &config.ScrapeConfig{
JobName: "Prometheus",
ScrapeInterval: model.Duration(5 * time.Second),
ScrapeTimeout: model.Duration(15 * time.Second),
MetricsPath: "/metrics2",
JobName: "Prometheus",
ScrapeInterval: model.Duration(5 * time.Second),
ScrapeTimeout: model.Duration(15 * time.Second),
MetricsPath: "/metrics2",
MetricNameValidationScheme: config.UTF8ValidationConfig,
MetricNameEscapingScheme: model.AllowUTF8,
},
},
{
keep: true,
newConfig: &config.ScrapeConfig{
JobName: "Prometheus",
SampleLimit: 400,
ScrapeInterval: model.Duration(5 * time.Second),
ScrapeTimeout: model.Duration(15 * time.Second),
MetricsPath: "/metrics2",
JobName: "Prometheus",
SampleLimit: 400,
ScrapeInterval: model.Duration(5 * time.Second),
ScrapeTimeout: model.Duration(15 * time.Second),
MetricsPath: "/metrics2",
MetricNameValidationScheme: config.UTF8ValidationConfig,
MetricNameEscapingScheme: model.AllowUTF8,
},
},
{
keep: false,
newConfig: &config.ScrapeConfig{
JobName: "Prometheus",
HonorTimestamps: true,
SampleLimit: 400,
ScrapeInterval: model.Duration(5 * time.Second),
ScrapeTimeout: model.Duration(15 * time.Second),
MetricsPath: "/metrics2",
JobName: "Prometheus",
HonorTimestamps: true,
SampleLimit: 400,
ScrapeInterval: model.Duration(5 * time.Second),
ScrapeTimeout: model.Duration(15 * time.Second),
MetricsPath: "/metrics2",
MetricNameValidationScheme: config.UTF8ValidationConfig,
MetricNameEscapingScheme: model.AllowUTF8,
},
},
{
@ -3613,64 +3694,76 @@ func TestReuseScrapeCache(t *testing.T) {
HTTPClientConfig: config_util.HTTPClientConfig{
ProxyConfig: config_util.ProxyConfig{ProxyURL: config_util.URL{URL: proxyURL}},
},
ScrapeInterval: model.Duration(5 * time.Second),
ScrapeTimeout: model.Duration(15 * time.Second),
MetricsPath: "/metrics2",
ScrapeInterval: model.Duration(5 * time.Second),
ScrapeTimeout: model.Duration(15 * time.Second),
MetricsPath: "/metrics2",
MetricNameValidationScheme: config.UTF8ValidationConfig,
MetricNameEscapingScheme: model.AllowUTF8,
},
},
{
keep: false,
newConfig: &config.ScrapeConfig{
JobName: "Prometheus",
HonorTimestamps: true,
HonorLabels: true,
SampleLimit: 400,
ScrapeInterval: model.Duration(5 * time.Second),
ScrapeTimeout: model.Duration(15 * time.Second),
MetricsPath: "/metrics2",
JobName: "Prometheus",
HonorTimestamps: true,
HonorLabels: true,
SampleLimit: 400,
ScrapeInterval: model.Duration(5 * time.Second),
ScrapeTimeout: model.Duration(15 * time.Second),
MetricsPath: "/metrics2",
MetricNameValidationScheme: config.UTF8ValidationConfig,
MetricNameEscapingScheme: model.AllowUTF8,
},
},
{
keep: false,
newConfig: &config.ScrapeConfig{
JobName: "Prometheus",
ScrapeInterval: model.Duration(5 * time.Second),
ScrapeTimeout: model.Duration(15 * time.Second),
MetricsPath: "/metrics",
LabelLimit: 1,
JobName: "Prometheus",
ScrapeInterval: model.Duration(5 * time.Second),
ScrapeTimeout: model.Duration(15 * time.Second),
MetricsPath: "/metrics",
LabelLimit: 1,
MetricNameValidationScheme: config.UTF8ValidationConfig,
MetricNameEscapingScheme: model.AllowUTF8,
},
},
{
keep: false,
newConfig: &config.ScrapeConfig{
JobName: "Prometheus",
ScrapeInterval: model.Duration(5 * time.Second),
ScrapeTimeout: model.Duration(15 * time.Second),
MetricsPath: "/metrics",
LabelLimit: 15,
JobName: "Prometheus",
ScrapeInterval: model.Duration(5 * time.Second),
ScrapeTimeout: model.Duration(15 * time.Second),
MetricsPath: "/metrics",
LabelLimit: 15,
MetricNameValidationScheme: config.UTF8ValidationConfig,
MetricNameEscapingScheme: model.AllowUTF8,
},
},
{
keep: false,
newConfig: &config.ScrapeConfig{
JobName: "Prometheus",
ScrapeInterval: model.Duration(5 * time.Second),
ScrapeTimeout: model.Duration(15 * time.Second),
MetricsPath: "/metrics",
LabelLimit: 15,
LabelNameLengthLimit: 5,
JobName: "Prometheus",
ScrapeInterval: model.Duration(5 * time.Second),
ScrapeTimeout: model.Duration(15 * time.Second),
MetricsPath: "/metrics",
LabelLimit: 15,
LabelNameLengthLimit: 5,
MetricNameValidationScheme: config.UTF8ValidationConfig,
MetricNameEscapingScheme: model.AllowUTF8,
},
},
{
keep: false,
newConfig: &config.ScrapeConfig{
JobName: "Prometheus",
ScrapeInterval: model.Duration(5 * time.Second),
ScrapeTimeout: model.Duration(15 * time.Second),
MetricsPath: "/metrics",
LabelLimit: 15,
LabelNameLengthLimit: 5,
LabelValueLengthLimit: 7,
JobName: "Prometheus",
ScrapeInterval: model.Duration(5 * time.Second),
ScrapeTimeout: model.Duration(15 * time.Second),
MetricsPath: "/metrics",
LabelLimit: 15,
LabelNameLengthLimit: 5,
LabelValueLengthLimit: 7,
MetricNameValidationScheme: config.UTF8ValidationConfig,
MetricNameEscapingScheme: model.AllowUTF8,
},
},
}
@ -3730,10 +3823,12 @@ func TestReuseCacheRace(t *testing.T) {
var (
app = &nopAppendable{}
cfg = &config.ScrapeConfig{
JobName: "Prometheus",
ScrapeTimeout: model.Duration(5 * time.Second),
ScrapeInterval: model.Duration(5 * time.Second),
MetricsPath: "/metrics",
JobName: "Prometheus",
ScrapeTimeout: model.Duration(5 * time.Second),
ScrapeInterval: model.Duration(5 * time.Second),
MetricsPath: "/metrics",
MetricNameValidationScheme: config.UTF8ValidationConfig,
MetricNameEscapingScheme: model.AllowUTF8,
}
buffers = pool.New(1e3, 100e6, 3, func(sz int) interface{} { return make([]byte, 0, sz) })
sp, _ = newScrapePool(cfg, app, 0, nil, buffers, &Options{}, newTestScrapeMetrics(t))
@ -3751,11 +3846,13 @@ func TestReuseCacheRace(t *testing.T) {
break
}
sp.reload(&config.ScrapeConfig{
JobName: "Prometheus",
ScrapeTimeout: model.Duration(1 * time.Millisecond),
ScrapeInterval: model.Duration(1 * time.Millisecond),
MetricsPath: "/metrics",
SampleLimit: i,
JobName: "Prometheus",
ScrapeTimeout: model.Duration(1 * time.Millisecond),
ScrapeInterval: model.Duration(1 * time.Millisecond),
MetricsPath: "/metrics",
SampleLimit: i,
MetricNameValidationScheme: config.UTF8ValidationConfig,
MetricNameEscapingScheme: model.AllowUTF8,
})
}
}
@ -3827,11 +3924,13 @@ func TestScrapeReportLimit(t *testing.T) {
defer s.Close()
cfg := &config.ScrapeConfig{
JobName: "test",
SampleLimit: 5,
Scheme: "http",
ScrapeInterval: model.Duration(100 * time.Millisecond),
ScrapeTimeout: model.Duration(100 * time.Millisecond),
JobName: "test",
SampleLimit: 5,
Scheme: "http",
ScrapeInterval: model.Duration(100 * time.Millisecond),
ScrapeTimeout: model.Duration(100 * time.Millisecond),
MetricNameValidationScheme: config.UTF8ValidationConfig,
MetricNameEscapingScheme: model.AllowUTF8,
}
ts, scrapedTwice := newScrapableServer("metric_a 44\nmetric_b 44\nmetric_c 44\nmetric_d 44\n")
@ -3887,6 +3986,7 @@ func TestScrapeUTF8(t *testing.T) {
ScrapeInterval: model.Duration(100 * time.Millisecond),
ScrapeTimeout: model.Duration(100 * time.Millisecond),
MetricNameValidationScheme: config.UTF8ValidationConfig,
MetricNameEscapingScheme: model.AllowUTF8,
}
ts, scrapedTwice := newScrapableServer("{\"with.dots\"} 42\n")
defer ts.Close()
@ -4019,8 +4119,10 @@ func TestTargetScrapeIntervalAndTimeoutRelabel(t *testing.T) {
interval, _ := model.ParseDuration("2s")
timeout, _ := model.ParseDuration("500ms")
config := &config.ScrapeConfig{
ScrapeInterval: interval,
ScrapeTimeout: timeout,
ScrapeInterval: interval,
ScrapeTimeout: timeout,
MetricNameValidationScheme: config.UTF8ValidationConfig,
MetricNameEscapingScheme: model.AllowUTF8,
RelabelConfigs: []*relabel.Config{
{
SourceLabels: model.LabelNames{model.ScrapeIntervalLabel},
@ -4077,10 +4179,12 @@ func TestLeQuantileReLabel(t *testing.T) {
Action: relabel.Replace,
},
},
SampleLimit: 100,
Scheme: "http",
ScrapeInterval: model.Duration(100 * time.Millisecond),
ScrapeTimeout: model.Duration(100 * time.Millisecond),
SampleLimit: 100,
Scheme: "http",
ScrapeInterval: model.Duration(100 * time.Millisecond),
ScrapeTimeout: model.Duration(100 * time.Millisecond),
MetricNameValidationScheme: config.UTF8ValidationConfig,
MetricNameEscapingScheme: model.AllowUTF8,
}
metricsText := `
@ -4586,6 +4690,8 @@ metric: <
ScrapeTimeout: model.Duration(25 * time.Millisecond),
AlwaysScrapeClassicHistograms: tc.alwaysScrapeClassicHistograms,
ConvertClassicHistogramsToNHCB: tc.convertClassicHistToNHCB,
MetricNameValidationScheme: config.UTF8ValidationConfig,
MetricNameEscapingScheme: model.AllowUTF8,
}
scrapeCount := 0
@ -4762,12 +4868,14 @@ func TestScrapeLoopCompression(t *testing.T) {
defer ts.Close()
config := &config.ScrapeConfig{
JobName: "test",
SampleLimit: 100,
Scheme: "http",
ScrapeInterval: model.Duration(100 * time.Millisecond),
ScrapeTimeout: model.Duration(100 * time.Millisecond),
EnableCompression: tc.enableCompression,
JobName: "test",
SampleLimit: 100,
Scheme: "http",
ScrapeInterval: model.Duration(100 * time.Millisecond),
ScrapeTimeout: model.Duration(100 * time.Millisecond),
EnableCompression: tc.enableCompression,
MetricNameValidationScheme: config.UTF8ValidationConfig,
MetricNameEscapingScheme: model.AllowUTF8,
}
sp, err := newScrapePool(config, simpleStorage, 0, nil, nil, &Options{}, newTestScrapeMetrics(t))
@ -4922,7 +5030,9 @@ func BenchmarkTargetScraperGzip(b *testing.B) {
model.AddressLabel, serverURL.Host,
),
scrapeConfig: &config.ScrapeConfig{
Params: url.Values{"count": []string{strconv.Itoa(scenario.metricsCount)}},
MetricNameValidationScheme: config.UTF8ValidationConfig,
MetricNameEscapingScheme: model.AllowUTF8,
Params: url.Values{"count": []string{strconv.Itoa(scenario.metricsCount)}},
},
},
client: client,
@ -5170,12 +5280,14 @@ func TestTargetScrapeConfigWithLabels(t *testing.T) {
{
name: "Everything in scrape config",
cfg: &config.ScrapeConfig{
ScrapeInterval: model.Duration(2 * time.Second),
ScrapeTimeout: model.Duration(configTimeout),
Params: url.Values{"param": []string{expectedParam}},
JobName: jobName,
Scheme: httpScheme,
MetricsPath: expectedPath,
ScrapeInterval: model.Duration(2 * time.Second),
ScrapeTimeout: model.Duration(configTimeout),
Params: url.Values{"param": []string{expectedParam}},
JobName: jobName,
Scheme: httpScheme,
MetricsPath: expectedPath,
MetricNameValidationScheme: config.UTF8ValidationConfig,
MetricNameEscapingScheme: model.AllowUTF8,
},
targets: []*targetgroup.Group{
{
@ -5188,12 +5300,14 @@ func TestTargetScrapeConfigWithLabels(t *testing.T) {
{
name: "Overridden in target",
cfg: &config.ScrapeConfig{
ScrapeInterval: model.Duration(2 * time.Second),
ScrapeTimeout: model.Duration(secondTimeout),
JobName: jobName,
Scheme: httpScheme,
MetricsPath: secondPath,
Params: url.Values{"param": []string{secondParam}},
ScrapeInterval: model.Duration(2 * time.Second),
ScrapeTimeout: model.Duration(secondTimeout),
JobName: jobName,
Scheme: httpScheme,
MetricsPath: secondPath,
MetricNameValidationScheme: config.UTF8ValidationConfig,
MetricNameEscapingScheme: model.AllowUTF8,
Params: url.Values{"param": []string{secondParam}},
},
targets: []*targetgroup.Group{
{
@ -5211,12 +5325,14 @@ func TestTargetScrapeConfigWithLabels(t *testing.T) {
{
name: "Overridden in relabel_config",
cfg: &config.ScrapeConfig{
ScrapeInterval: model.Duration(2 * time.Second),
ScrapeTimeout: model.Duration(secondTimeout),
JobName: jobName,
Scheme: httpScheme,
MetricsPath: secondPath,
Params: url.Values{"param": []string{secondParam}},
ScrapeInterval: model.Duration(2 * time.Second),
ScrapeTimeout: model.Duration(secondTimeout),
JobName: jobName,
Scheme: httpScheme,
MetricsPath: secondPath,
MetricNameValidationScheme: config.UTF8ValidationConfig,
MetricNameEscapingScheme: model.AllowUTF8,
Params: url.Values{"param": []string{secondParam}},
RelabelConfigs: []*relabel.Config{
{
Action: relabel.DefaultRelabelConfig.Action,
@ -5290,12 +5406,14 @@ func TestScrapePoolScrapeAfterReload(t *testing.T) {
t.Cleanup(h.Close)
cfg := &config.ScrapeConfig{
BodySizeLimit: 1,
JobName: "test",
Scheme: "http",
ScrapeInterval: model.Duration(100 * time.Millisecond),
ScrapeTimeout: model.Duration(100 * time.Millisecond),
EnableCompression: false,
BodySizeLimit: 1,
JobName: "test",
Scheme: "http",
ScrapeInterval: model.Duration(100 * time.Millisecond),
ScrapeTimeout: model.Duration(100 * time.Millisecond),
MetricNameValidationScheme: config.UTF8ValidationConfig,
MetricNameEscapingScheme: model.AllowUTF8,
EnableCompression: false,
ServiceDiscoveryConfigs: discovery.Configs{
&discovery.StaticConfig{
{