mirror of
https://github.com/prometheus/prometheus.git
synced 2025-08-06 22:27:17 +02:00
scrape: Add config option for escaping scheme request. (#16066)
The new metric_name_escaping_scheme config option works in parallel with metric_name_validation_scheme and controls which escaping scheme is requested when scraping. When not specified, the scheme will request underscores if the validation scheme is set to legacy, and will request allow-utf-8 when the validation scheme is set to utf8. This setting allows users to allow utf8 names if they like, but explicitly request an escaping scheme rather than UTF-8. Fixes https://github.com/prometheus/prometheus/issues/16034 Built on https://github.com/prometheus/prometheus/pull/16080 Signed-off-by: Owen Williams <owen.williams@grafana.com>
This commit is contained in:
parent
de399eb09c
commit
6566c5a2b3
@ -479,8 +479,13 @@ type GlobalConfig struct {
|
|||||||
// Keep no more than this many dropped targets per job.
|
// Keep no more than this many dropped targets per job.
|
||||||
// 0 means no limit.
|
// 0 means no limit.
|
||||||
KeepDroppedTargets uint `yaml:"keep_dropped_targets,omitempty"`
|
KeepDroppedTargets uint `yaml:"keep_dropped_targets,omitempty"`
|
||||||
// Allow UTF8 Metric and Label Names.
|
// Allow UTF8 Metric and Label Names. Can be blank in config files but must
|
||||||
|
// have a value if a ScrepeConfig is created programmatically.
|
||||||
MetricNameValidationScheme string `yaml:"metric_name_validation_scheme,omitempty"`
|
MetricNameValidationScheme string `yaml:"metric_name_validation_scheme,omitempty"`
|
||||||
|
// Metric name escaping mode to request through content negotiation. Can be
|
||||||
|
// blank in config files but must have a value if a ScrepeConfig is created
|
||||||
|
// programmatically.
|
||||||
|
MetricNameEscapingScheme string `yaml:"metric_name_escaping_scheme,omitempty"`
|
||||||
}
|
}
|
||||||
|
|
||||||
// ScrapeProtocol represents supported protocol for scraping metrics.
|
// ScrapeProtocol represents supported protocol for scraping metrics.
|
||||||
@ -719,8 +724,13 @@ type ScrapeConfig struct {
|
|||||||
// Keep no more than this many dropped targets per job.
|
// Keep no more than this many dropped targets per job.
|
||||||
// 0 means no limit.
|
// 0 means no limit.
|
||||||
KeepDroppedTargets uint `yaml:"keep_dropped_targets,omitempty"`
|
KeepDroppedTargets uint `yaml:"keep_dropped_targets,omitempty"`
|
||||||
// Allow UTF8 Metric and Label Names.
|
// Allow UTF8 Metric and Label Names. Can be blank in config files but must
|
||||||
|
// have a value if a ScrepeConfig is created programmatically.
|
||||||
MetricNameValidationScheme string `yaml:"metric_name_validation_scheme,omitempty"`
|
MetricNameValidationScheme string `yaml:"metric_name_validation_scheme,omitempty"`
|
||||||
|
// Metric name escaping mode to request through content negotiation. Can be
|
||||||
|
// blank in config files but must have a value if a ScrepeConfig is created
|
||||||
|
// programmatically.
|
||||||
|
MetricNameEscapingScheme string `yaml:"metric_name_escaping_scheme,omitempty"`
|
||||||
|
|
||||||
// We cannot do proper Go type embedding below as the parser will then parse
|
// We cannot do proper Go type embedding below as the parser will then parse
|
||||||
// values arbitrarily into the overflow maps of further-down types.
|
// values arbitrarily into the overflow maps of further-down types.
|
||||||
@ -841,13 +851,48 @@ func (c *ScrapeConfig) Validate(globalConfig GlobalConfig) error {
|
|||||||
if model.NameValidationScheme != model.UTF8Validation {
|
if model.NameValidationScheme != model.UTF8Validation {
|
||||||
return errors.New("model.NameValidationScheme must be set to UTF8")
|
return errors.New("model.NameValidationScheme must be set to UTF8")
|
||||||
}
|
}
|
||||||
|
|
||||||
switch globalConfig.MetricNameValidationScheme {
|
switch globalConfig.MetricNameValidationScheme {
|
||||||
case "", LegacyValidationConfig, UTF8ValidationConfig:
|
case "":
|
||||||
|
globalConfig.MetricNameValidationScheme = UTF8ValidationConfig
|
||||||
|
case LegacyValidationConfig, UTF8ValidationConfig:
|
||||||
default:
|
default:
|
||||||
return fmt.Errorf("unknown name validation method specified, must be either 'legacy' or 'utf8', got %s", globalConfig.MetricNameValidationScheme)
|
return fmt.Errorf("unknown global name validation method specified, must be either 'legacy' or 'utf8', got %s", globalConfig.MetricNameValidationScheme)
|
||||||
}
|
}
|
||||||
if c.MetricNameValidationScheme == "" {
|
// Scrapeconfig validation scheme matches global if left blank.
|
||||||
|
switch c.MetricNameValidationScheme {
|
||||||
|
case "":
|
||||||
c.MetricNameValidationScheme = globalConfig.MetricNameValidationScheme
|
c.MetricNameValidationScheme = globalConfig.MetricNameValidationScheme
|
||||||
|
case LegacyValidationConfig, UTF8ValidationConfig:
|
||||||
|
default:
|
||||||
|
return fmt.Errorf("unknown scrape config name validation method specified, must be either 'legacy' or 'utf8', got %s", c.MetricNameValidationScheme)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Escaping scheme is based on the validation scheme if left blank.
|
||||||
|
switch globalConfig.MetricNameEscapingScheme {
|
||||||
|
case "":
|
||||||
|
if globalConfig.MetricNameValidationScheme == LegacyValidationConfig {
|
||||||
|
globalConfig.MetricNameEscapingScheme = model.EscapeUnderscores
|
||||||
|
} else {
|
||||||
|
globalConfig.MetricNameEscapingScheme = model.AllowUTF8
|
||||||
|
}
|
||||||
|
case model.AllowUTF8, model.EscapeUnderscores, model.EscapeDots, model.EscapeValues:
|
||||||
|
default:
|
||||||
|
return fmt.Errorf("unknown global name escaping method specified, must be one of '%s', '%s', '%s', or '%s', got %s", model.AllowUTF8, model.EscapeUnderscores, model.EscapeDots, model.EscapeValues, globalConfig.MetricNameValidationScheme)
|
||||||
|
}
|
||||||
|
|
||||||
|
if c.MetricNameEscapingScheme == "" {
|
||||||
|
c.MetricNameEscapingScheme = globalConfig.MetricNameEscapingScheme
|
||||||
|
}
|
||||||
|
|
||||||
|
switch c.MetricNameEscapingScheme {
|
||||||
|
case model.AllowUTF8:
|
||||||
|
if c.MetricNameValidationScheme != UTF8ValidationConfig {
|
||||||
|
return errors.New("utf8 metric names requested but validation scheme is not set to UTF8")
|
||||||
|
}
|
||||||
|
case model.EscapeUnderscores, model.EscapeDots, model.EscapeValues:
|
||||||
|
default:
|
||||||
|
return fmt.Errorf("unknown scrape config name escaping method specified, must be one of '%s', '%s', '%s', or '%s', got %s", model.AllowUTF8, model.EscapeUnderscores, model.EscapeDots, model.EscapeValues, c.MetricNameValidationScheme)
|
||||||
}
|
}
|
||||||
|
|
||||||
return nil
|
return nil
|
||||||
@ -858,6 +903,20 @@ func (c *ScrapeConfig) MarshalYAML() (interface{}, error) {
|
|||||||
return discovery.MarshalYAMLWithInlineConfigs(c)
|
return discovery.MarshalYAMLWithInlineConfigs(c)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// ToValidationScheme returns the validation scheme for the given string config value.
|
||||||
|
func ToValidationScheme(s string) (validationScheme model.ValidationScheme, err error) {
|
||||||
|
switch s {
|
||||||
|
case UTF8ValidationConfig:
|
||||||
|
validationScheme = model.UTF8Validation
|
||||||
|
case LegacyValidationConfig:
|
||||||
|
validationScheme = model.LegacyValidation
|
||||||
|
default:
|
||||||
|
return model.UTF8Validation, fmt.Errorf("invalid metric name validation scheme, %s", s)
|
||||||
|
}
|
||||||
|
|
||||||
|
return validationScheme, nil
|
||||||
|
}
|
||||||
|
|
||||||
// StorageConfig configures runtime reloadable configuration options.
|
// StorageConfig configures runtime reloadable configuration options.
|
||||||
type StorageConfig struct {
|
type StorageConfig struct {
|
||||||
TSDBConfig *TSDBConfig `yaml:"tsdb,omitempty"`
|
TSDBConfig *TSDBConfig `yaml:"tsdb,omitempty"`
|
||||||
|
File diff suppressed because it is too large
Load Diff
@ -1,4 +1,5 @@
|
|||||||
global:
|
global:
|
||||||
metric_name_validation_scheme: legacy
|
metric_name_validation_scheme: legacy
|
||||||
|
metric_name_escaping_scheme: dots
|
||||||
scrape_configs:
|
scrape_configs:
|
||||||
- job_name: prometheus
|
- job_name: prometheus
|
||||||
|
@ -1,5 +1,7 @@
|
|||||||
global:
|
global:
|
||||||
metric_name_validation_scheme: legacy
|
metric_name_validation_scheme: legacy
|
||||||
|
metric_name_escaping_scheme: values
|
||||||
scrape_configs:
|
scrape_configs:
|
||||||
- job_name: prometheus
|
- job_name: prometheus
|
||||||
metric_name_validation_scheme: utf8
|
metric_name_validation_scheme: utf8
|
||||||
|
metric_name_escaping_scheme: dots
|
||||||
|
@ -1,3 +1,4 @@
|
|||||||
scrape_configs:
|
scrape_configs:
|
||||||
- job_name: prometheus
|
- job_name: prometheus
|
||||||
metric_name_validation_scheme: legacy
|
metric_name_validation_scheme: legacy
|
||||||
|
metric_name_escaping_scheme: values
|
||||||
|
5
config/testdata/scrape_config_utf8_conflicting.bad.yml
vendored
Normal file
5
config/testdata/scrape_config_utf8_conflicting.bad.yml
vendored
Normal file
@ -0,0 +1,5 @@
|
|||||||
|
global:
|
||||||
|
metric_name_validation_scheme: legacy
|
||||||
|
metric_name_escaping_scheme: allow-utf-8
|
||||||
|
scrape_configs:
|
||||||
|
- job_name: prometheus
|
@ -469,6 +469,22 @@ metric_relabel_configs:
|
|||||||
# underscores.
|
# underscores.
|
||||||
[ metric_name_validation_scheme <string> | default "utf8" ]
|
[ metric_name_validation_scheme <string> | default "utf8" ]
|
||||||
|
|
||||||
|
# Specifies the character escaping scheme that will be requested when scraping
|
||||||
|
# for metric and label names that do not conform to the legacy Prometheus
|
||||||
|
# character set. Available options are:
|
||||||
|
# * `allow-utf-8`: Full UTF-8 support, no escaping needed.
|
||||||
|
# * `underscores`: Escape all legacy-invalid characters to underscores.
|
||||||
|
# * `dots`: Escapes dots to `_dot_`, underscores to `__`, and all other
|
||||||
|
# legacy-invalid characters to underscores.
|
||||||
|
# * `values`: Prepend the name with `U__` and replace all invalid
|
||||||
|
# characters with their unicode value, surrounded by underscores. Single
|
||||||
|
# underscores are replaced with double underscores.
|
||||||
|
# e.g. "U__my_2e_dotted_2e_name".
|
||||||
|
# If this value is left blank, Prometheus will default to `allow-utf-8` if the
|
||||||
|
# validation scheme for the current scrape config is set to utf8, or
|
||||||
|
# `underscores` if the validation scheme is set to `legacy`.
|
||||||
|
[ metric_name_validation_scheme <string> | default "utf8" ]
|
||||||
|
|
||||||
# Limit on total number of positive and negative buckets allowed in a single
|
# Limit on total number of positive and negative buckets allowed in a single
|
||||||
# native histogram. The resolution of a histogram with more buckets will be
|
# native histogram. The resolution of a histogram with more buckets will be
|
||||||
# reduced until the number of buckets is within the limit. If the limit cannot
|
# reduced until the number of buckets is within the limit. If the limit cannot
|
||||||
|
@ -102,6 +102,9 @@ type scrapePool struct {
|
|||||||
|
|
||||||
scrapeFailureLogger FailureLogger
|
scrapeFailureLogger FailureLogger
|
||||||
scrapeFailureLoggerMtx sync.RWMutex
|
scrapeFailureLoggerMtx sync.RWMutex
|
||||||
|
|
||||||
|
validationScheme model.ValidationScheme
|
||||||
|
escapingScheme model.EscapingScheme
|
||||||
}
|
}
|
||||||
|
|
||||||
type labelLimits struct {
|
type labelLimits struct {
|
||||||
@ -124,7 +127,6 @@ type scrapeLoopOptions struct {
|
|||||||
timeout time.Duration
|
timeout time.Duration
|
||||||
alwaysScrapeClassicHist bool
|
alwaysScrapeClassicHist bool
|
||||||
convertClassicHistToNHCB bool
|
convertClassicHistToNHCB bool
|
||||||
validationScheme model.ValidationScheme
|
|
||||||
fallbackScrapeProtocol string
|
fallbackScrapeProtocol string
|
||||||
|
|
||||||
mrc []*relabel.Config
|
mrc []*relabel.Config
|
||||||
@ -147,6 +149,16 @@ func newScrapePool(cfg *config.ScrapeConfig, app storage.Appendable, offsetSeed
|
|||||||
return nil, fmt.Errorf("error creating HTTP client: %w", err)
|
return nil, fmt.Errorf("error creating HTTP client: %w", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
validationScheme, err := config.ToValidationScheme(cfg.MetricNameValidationScheme)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("invalid metric name validation scheme: %w", err)
|
||||||
|
}
|
||||||
|
var escapingScheme model.EscapingScheme
|
||||||
|
escapingScheme, err = model.ToEscapingScheme(cfg.MetricNameEscapingScheme)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("invalid metric name escaping scheme, %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
ctx, cancel := context.WithCancel(context.Background())
|
ctx, cancel := context.WithCancel(context.Background())
|
||||||
sp := &scrapePool{
|
sp := &scrapePool{
|
||||||
cancel: cancel,
|
cancel: cancel,
|
||||||
@ -160,6 +172,8 @@ func newScrapePool(cfg *config.ScrapeConfig, app storage.Appendable, offsetSeed
|
|||||||
logger: logger,
|
logger: logger,
|
||||||
metrics: metrics,
|
metrics: metrics,
|
||||||
httpOpts: options.HTTPClientOptions,
|
httpOpts: options.HTTPClientOptions,
|
||||||
|
validationScheme: validationScheme,
|
||||||
|
escapingScheme: escapingScheme,
|
||||||
}
|
}
|
||||||
sp.newLoop = func(opts scrapeLoopOptions) loop {
|
sp.newLoop = func(opts scrapeLoopOptions) loop {
|
||||||
// Update the targets retrieval function for metadata to a new scrape cache.
|
// Update the targets retrieval function for metadata to a new scrape cache.
|
||||||
@ -201,7 +215,8 @@ func newScrapePool(cfg *config.ScrapeConfig, app storage.Appendable, offsetSeed
|
|||||||
options.PassMetadataInContext,
|
options.PassMetadataInContext,
|
||||||
metrics,
|
metrics,
|
||||||
options.skipOffsetting,
|
options.skipOffsetting,
|
||||||
opts.validationScheme,
|
sp.validationScheme,
|
||||||
|
sp.escapingScheme,
|
||||||
opts.fallbackScrapeProtocol,
|
opts.fallbackScrapeProtocol,
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
@ -309,6 +324,17 @@ func (sp *scrapePool) reload(cfg *config.ScrapeConfig) error {
|
|||||||
sp.config = cfg
|
sp.config = cfg
|
||||||
oldClient := sp.client
|
oldClient := sp.client
|
||||||
sp.client = client
|
sp.client = client
|
||||||
|
validationScheme, err := config.ToValidationScheme(cfg.MetricNameValidationScheme)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("invalid metric name validation scheme: %w", err)
|
||||||
|
}
|
||||||
|
sp.validationScheme = validationScheme
|
||||||
|
var escapingScheme model.EscapingScheme
|
||||||
|
escapingScheme, err = model.ToEscapingScheme(cfg.MetricNameEscapingScheme)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("invalid metric name escaping scheme, %w", err)
|
||||||
|
}
|
||||||
|
sp.escapingScheme = escapingScheme
|
||||||
|
|
||||||
sp.metrics.targetScrapePoolTargetLimit.WithLabelValues(sp.config.JobName).Set(float64(sp.config.TargetLimit))
|
sp.metrics.targetScrapePoolTargetLimit.WithLabelValues(sp.config.JobName).Set(float64(sp.config.TargetLimit))
|
||||||
|
|
||||||
@ -344,11 +370,6 @@ func (sp *scrapePool) restartLoops(reuseCache bool) {
|
|||||||
convertClassicHistToNHCB = sp.config.ConvertClassicHistogramsToNHCB
|
convertClassicHistToNHCB = sp.config.ConvertClassicHistogramsToNHCB
|
||||||
)
|
)
|
||||||
|
|
||||||
validationScheme := model.UTF8Validation
|
|
||||||
if sp.config.MetricNameValidationScheme == config.LegacyValidationConfig {
|
|
||||||
validationScheme = model.LegacyValidation
|
|
||||||
}
|
|
||||||
|
|
||||||
sp.targetMtx.Lock()
|
sp.targetMtx.Lock()
|
||||||
|
|
||||||
forcedErr := sp.refreshTargetLimitErr()
|
forcedErr := sp.refreshTargetLimitErr()
|
||||||
@ -369,7 +390,7 @@ func (sp *scrapePool) restartLoops(reuseCache bool) {
|
|||||||
client: sp.client,
|
client: sp.client,
|
||||||
timeout: targetTimeout,
|
timeout: targetTimeout,
|
||||||
bodySizeLimit: bodySizeLimit,
|
bodySizeLimit: bodySizeLimit,
|
||||||
acceptHeader: acceptHeader(sp.config.ScrapeProtocols, validationScheme),
|
acceptHeader: acceptHeader(sp.config.ScrapeProtocols, sp.escapingScheme),
|
||||||
acceptEncodingHeader: acceptEncodingHeader(enableCompression),
|
acceptEncodingHeader: acceptEncodingHeader(enableCompression),
|
||||||
metrics: sp.metrics,
|
metrics: sp.metrics,
|
||||||
}
|
}
|
||||||
@ -388,7 +409,6 @@ func (sp *scrapePool) restartLoops(reuseCache bool) {
|
|||||||
cache: cache,
|
cache: cache,
|
||||||
interval: targetInterval,
|
interval: targetInterval,
|
||||||
timeout: targetTimeout,
|
timeout: targetTimeout,
|
||||||
validationScheme: validationScheme,
|
|
||||||
fallbackScrapeProtocol: fallbackScrapeProtocol,
|
fallbackScrapeProtocol: fallbackScrapeProtocol,
|
||||||
alwaysScrapeClassicHist: alwaysScrapeClassicHist,
|
alwaysScrapeClassicHist: alwaysScrapeClassicHist,
|
||||||
convertClassicHistToNHCB: convertClassicHistToNHCB,
|
convertClassicHistToNHCB: convertClassicHistToNHCB,
|
||||||
@ -506,11 +526,6 @@ func (sp *scrapePool) sync(targets []*Target) {
|
|||||||
convertClassicHistToNHCB = sp.config.ConvertClassicHistogramsToNHCB
|
convertClassicHistToNHCB = sp.config.ConvertClassicHistogramsToNHCB
|
||||||
)
|
)
|
||||||
|
|
||||||
validationScheme := model.UTF8Validation
|
|
||||||
if sp.config.MetricNameValidationScheme == config.LegacyValidationConfig {
|
|
||||||
validationScheme = model.LegacyValidation
|
|
||||||
}
|
|
||||||
|
|
||||||
sp.targetMtx.Lock()
|
sp.targetMtx.Lock()
|
||||||
for _, t := range targets {
|
for _, t := range targets {
|
||||||
hash := t.hash()
|
hash := t.hash()
|
||||||
@ -526,7 +541,7 @@ func (sp *scrapePool) sync(targets []*Target) {
|
|||||||
client: sp.client,
|
client: sp.client,
|
||||||
timeout: timeout,
|
timeout: timeout,
|
||||||
bodySizeLimit: bodySizeLimit,
|
bodySizeLimit: bodySizeLimit,
|
||||||
acceptHeader: acceptHeader(sp.config.ScrapeProtocols, validationScheme),
|
acceptHeader: acceptHeader(sp.config.ScrapeProtocols, sp.escapingScheme),
|
||||||
acceptEncodingHeader: acceptEncodingHeader(enableCompression),
|
acceptEncodingHeader: acceptEncodingHeader(enableCompression),
|
||||||
metrics: sp.metrics,
|
metrics: sp.metrics,
|
||||||
}
|
}
|
||||||
@ -546,7 +561,6 @@ func (sp *scrapePool) sync(targets []*Target) {
|
|||||||
timeout: timeout,
|
timeout: timeout,
|
||||||
alwaysScrapeClassicHist: alwaysScrapeClassicHist,
|
alwaysScrapeClassicHist: alwaysScrapeClassicHist,
|
||||||
convertClassicHistToNHCB: convertClassicHistToNHCB,
|
convertClassicHistToNHCB: convertClassicHistToNHCB,
|
||||||
validationScheme: validationScheme,
|
|
||||||
fallbackScrapeProtocol: fallbackScrapeProtocol,
|
fallbackScrapeProtocol: fallbackScrapeProtocol,
|
||||||
})
|
})
|
||||||
if err != nil {
|
if err != nil {
|
||||||
@ -777,13 +791,14 @@ var errBodySizeLimit = errors.New("body size limit exceeded")
|
|||||||
// acceptHeader transforms preference from the options into specific header values as
|
// acceptHeader transforms preference from the options into specific header values as
|
||||||
// https://www.rfc-editor.org/rfc/rfc9110.html#name-accept defines.
|
// https://www.rfc-editor.org/rfc/rfc9110.html#name-accept defines.
|
||||||
// No validation is here, we expect scrape protocols to be validated already.
|
// No validation is here, we expect scrape protocols to be validated already.
|
||||||
func acceptHeader(sps []config.ScrapeProtocol, scheme model.ValidationScheme) string {
|
func acceptHeader(sps []config.ScrapeProtocol, scheme model.EscapingScheme) string {
|
||||||
var vals []string
|
var vals []string
|
||||||
weight := len(config.ScrapeProtocolsHeaders) + 1
|
weight := len(config.ScrapeProtocolsHeaders) + 1
|
||||||
for _, sp := range sps {
|
for _, sp := range sps {
|
||||||
val := config.ScrapeProtocolsHeaders[sp]
|
val := config.ScrapeProtocolsHeaders[sp]
|
||||||
if scheme == model.UTF8Validation {
|
// Escaping header is only valid for newer versions of the text formats.
|
||||||
val += ";" + config.UTF8NamesHeader
|
if sp == config.PrometheusText1_0_0 || sp == config.OpenMetricsText1_0_0 {
|
||||||
|
val += ";" + model.EscapingKey + "=" + scheme.String()
|
||||||
}
|
}
|
||||||
val += fmt.Sprintf(";q=0.%d", weight)
|
val += fmt.Sprintf(";q=0.%d", weight)
|
||||||
vals = append(vals, val)
|
vals = append(vals, val)
|
||||||
@ -911,6 +926,7 @@ type scrapeLoop struct {
|
|||||||
alwaysScrapeClassicHist bool
|
alwaysScrapeClassicHist bool
|
||||||
convertClassicHistToNHCB bool
|
convertClassicHistToNHCB bool
|
||||||
validationScheme model.ValidationScheme
|
validationScheme model.ValidationScheme
|
||||||
|
escapingScheme model.EscapingScheme
|
||||||
fallbackScrapeProtocol string
|
fallbackScrapeProtocol string
|
||||||
|
|
||||||
// Feature flagged options.
|
// Feature flagged options.
|
||||||
@ -1230,6 +1246,7 @@ func newScrapeLoop(ctx context.Context,
|
|||||||
metrics *scrapeMetrics,
|
metrics *scrapeMetrics,
|
||||||
skipOffsetting bool,
|
skipOffsetting bool,
|
||||||
validationScheme model.ValidationScheme,
|
validationScheme model.ValidationScheme,
|
||||||
|
escapingScheme model.EscapingScheme,
|
||||||
fallbackScrapeProtocol string,
|
fallbackScrapeProtocol string,
|
||||||
) *scrapeLoop {
|
) *scrapeLoop {
|
||||||
if l == nil {
|
if l == nil {
|
||||||
@ -1284,6 +1301,7 @@ func newScrapeLoop(ctx context.Context,
|
|||||||
metrics: metrics,
|
metrics: metrics,
|
||||||
skipOffsetting: skipOffsetting,
|
skipOffsetting: skipOffsetting,
|
||||||
validationScheme: validationScheme,
|
validationScheme: validationScheme,
|
||||||
|
escapingScheme: escapingScheme,
|
||||||
fallbackScrapeProtocol: fallbackScrapeProtocol,
|
fallbackScrapeProtocol: fallbackScrapeProtocol,
|
||||||
}
|
}
|
||||||
sl.ctx, sl.cancel = context.WithCancel(ctx)
|
sl.ctx, sl.cancel = context.WithCancel(ctx)
|
||||||
|
@ -82,10 +82,14 @@ func newTestScrapeMetrics(t testing.TB) *scrapeMetrics {
|
|||||||
|
|
||||||
func TestNewScrapePool(t *testing.T) {
|
func TestNewScrapePool(t *testing.T) {
|
||||||
var (
|
var (
|
||||||
app = &nopAppendable{}
|
app = &nopAppendable{}
|
||||||
cfg = &config.ScrapeConfig{}
|
cfg = &config.ScrapeConfig{
|
||||||
sp, _ = newScrapePool(cfg, app, 0, nil, nil, &Options{}, newTestScrapeMetrics(t))
|
MetricNameValidationScheme: config.UTF8ValidationConfig,
|
||||||
|
MetricNameEscapingScheme: model.AllowUTF8,
|
||||||
|
}
|
||||||
|
sp, err = newScrapePool(cfg, app, 0, nil, nil, &Options{}, newTestScrapeMetrics(t))
|
||||||
)
|
)
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
a, ok := sp.appendable.(*nopAppendable)
|
a, ok := sp.appendable.(*nopAppendable)
|
||||||
require.True(t, ok, "Failure to append.")
|
require.True(t, ok, "Failure to append.")
|
||||||
@ -321,8 +325,10 @@ func TestDroppedTargetsList(t *testing.T) {
|
|||||||
var (
|
var (
|
||||||
app = &nopAppendable{}
|
app = &nopAppendable{}
|
||||||
cfg = &config.ScrapeConfig{
|
cfg = &config.ScrapeConfig{
|
||||||
JobName: "dropMe",
|
JobName: "dropMe",
|
||||||
ScrapeInterval: model.Duration(1),
|
ScrapeInterval: model.Duration(1),
|
||||||
|
MetricNameValidationScheme: config.UTF8ValidationConfig,
|
||||||
|
MetricNameEscapingScheme: model.AllowUTF8,
|
||||||
RelabelConfigs: []*relabel.Config{
|
RelabelConfigs: []*relabel.Config{
|
||||||
{
|
{
|
||||||
Action: relabel.Drop,
|
Action: relabel.Drop,
|
||||||
@ -366,8 +372,10 @@ func TestDiscoveredLabelsUpdate(t *testing.T) {
|
|||||||
|
|
||||||
// These are used when syncing so need this to avoid a panic.
|
// These are used when syncing so need this to avoid a panic.
|
||||||
sp.config = &config.ScrapeConfig{
|
sp.config = &config.ScrapeConfig{
|
||||||
ScrapeInterval: model.Duration(1),
|
ScrapeInterval: model.Duration(1),
|
||||||
ScrapeTimeout: model.Duration(1),
|
ScrapeTimeout: model.Duration(1),
|
||||||
|
MetricNameValidationScheme: config.UTF8ValidationConfig,
|
||||||
|
MetricNameEscapingScheme: model.AllowUTF8,
|
||||||
}
|
}
|
||||||
sp.activeTargets = make(map[uint64]*Target)
|
sp.activeTargets = make(map[uint64]*Target)
|
||||||
t1 := &Target{
|
t1 := &Target{
|
||||||
@ -496,8 +504,10 @@ func TestScrapePoolReload(t *testing.T) {
|
|||||||
stopped := map[uint64]bool{}
|
stopped := map[uint64]bool{}
|
||||||
|
|
||||||
reloadCfg := &config.ScrapeConfig{
|
reloadCfg := &config.ScrapeConfig{
|
||||||
ScrapeInterval: model.Duration(3 * time.Second),
|
ScrapeInterval: model.Duration(3 * time.Second),
|
||||||
ScrapeTimeout: model.Duration(2 * time.Second),
|
ScrapeTimeout: model.Duration(2 * time.Second),
|
||||||
|
MetricNameValidationScheme: config.UTF8ValidationConfig,
|
||||||
|
MetricNameEscapingScheme: model.AllowUTF8,
|
||||||
}
|
}
|
||||||
// On starting to run, new loops created on reload check whether their preceding
|
// On starting to run, new loops created on reload check whether their preceding
|
||||||
// equivalents have been stopped.
|
// equivalents have been stopped.
|
||||||
@ -588,8 +598,10 @@ func TestScrapePoolReload(t *testing.T) {
|
|||||||
|
|
||||||
func TestScrapePoolReloadPreserveRelabeledIntervalTimeout(t *testing.T) {
|
func TestScrapePoolReloadPreserveRelabeledIntervalTimeout(t *testing.T) {
|
||||||
reloadCfg := &config.ScrapeConfig{
|
reloadCfg := &config.ScrapeConfig{
|
||||||
ScrapeInterval: model.Duration(3 * time.Second),
|
ScrapeInterval: model.Duration(3 * time.Second),
|
||||||
ScrapeTimeout: model.Duration(2 * time.Second),
|
ScrapeTimeout: model.Duration(2 * time.Second),
|
||||||
|
MetricNameValidationScheme: config.UTF8ValidationConfig,
|
||||||
|
MetricNameEscapingScheme: model.AllowUTF8,
|
||||||
}
|
}
|
||||||
newLoop := func(opts scrapeLoopOptions) loop {
|
newLoop := func(opts scrapeLoopOptions) loop {
|
||||||
l := &testLoop{interval: opts.interval, timeout: opts.timeout}
|
l := &testLoop{interval: opts.interval, timeout: opts.timeout}
|
||||||
@ -687,9 +699,11 @@ func TestScrapePoolTargetLimit(t *testing.T) {
|
|||||||
reloadWithLimit := func(l uint) {
|
reloadWithLimit := func(l uint) {
|
||||||
limit = l
|
limit = l
|
||||||
require.NoError(t, sp.reload(&config.ScrapeConfig{
|
require.NoError(t, sp.reload(&config.ScrapeConfig{
|
||||||
ScrapeInterval: model.Duration(3 * time.Second),
|
ScrapeInterval: model.Duration(3 * time.Second),
|
||||||
ScrapeTimeout: model.Duration(2 * time.Second),
|
ScrapeTimeout: model.Duration(2 * time.Second),
|
||||||
TargetLimit: l,
|
MetricNameValidationScheme: config.UTF8ValidationConfig,
|
||||||
|
MetricNameEscapingScheme: model.AllowUTF8,
|
||||||
|
TargetLimit: l,
|
||||||
}))
|
}))
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -776,7 +790,10 @@ func TestScrapePoolTargetLimit(t *testing.T) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func TestScrapePoolAppender(t *testing.T) {
|
func TestScrapePoolAppender(t *testing.T) {
|
||||||
cfg := &config.ScrapeConfig{}
|
cfg := &config.ScrapeConfig{
|
||||||
|
MetricNameValidationScheme: config.UTF8ValidationConfig,
|
||||||
|
MetricNameEscapingScheme: model.AllowUTF8,
|
||||||
|
}
|
||||||
app := &nopAppendable{}
|
app := &nopAppendable{}
|
||||||
sp, _ := newScrapePool(cfg, app, 0, nil, nil, &Options{}, newTestScrapeMetrics(t))
|
sp, _ := newScrapePool(cfg, app, 0, nil, nil, &Options{}, newTestScrapeMetrics(t))
|
||||||
|
|
||||||
@ -849,7 +866,12 @@ func TestScrapePoolRaces(t *testing.T) {
|
|||||||
interval, _ := model.ParseDuration("1s")
|
interval, _ := model.ParseDuration("1s")
|
||||||
timeout, _ := model.ParseDuration("500ms")
|
timeout, _ := model.ParseDuration("500ms")
|
||||||
newConfig := func() *config.ScrapeConfig {
|
newConfig := func() *config.ScrapeConfig {
|
||||||
return &config.ScrapeConfig{ScrapeInterval: interval, ScrapeTimeout: timeout}
|
return &config.ScrapeConfig{
|
||||||
|
ScrapeInterval: interval,
|
||||||
|
ScrapeTimeout: timeout,
|
||||||
|
MetricNameValidationScheme: config.UTF8ValidationConfig,
|
||||||
|
MetricNameEscapingScheme: model.AllowUTF8,
|
||||||
|
}
|
||||||
}
|
}
|
||||||
sp, _ := newScrapePool(newConfig(), &nopAppendable{}, 0, nil, nil, &Options{}, newTestScrapeMetrics(t))
|
sp, _ := newScrapePool(newConfig(), &nopAppendable{}, 0, nil, nil, &Options{}, newTestScrapeMetrics(t))
|
||||||
tgts := []*targetgroup.Group{
|
tgts := []*targetgroup.Group{
|
||||||
@ -919,8 +941,10 @@ func TestScrapePoolScrapeLoopsStarted(t *testing.T) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
require.NoError(t, sp.reload(&config.ScrapeConfig{
|
require.NoError(t, sp.reload(&config.ScrapeConfig{
|
||||||
ScrapeInterval: model.Duration(3 * time.Second),
|
ScrapeInterval: model.Duration(3 * time.Second),
|
||||||
ScrapeTimeout: model.Duration(2 * time.Second),
|
ScrapeTimeout: model.Duration(2 * time.Second),
|
||||||
|
MetricNameValidationScheme: config.UTF8ValidationConfig,
|
||||||
|
MetricNameEscapingScheme: model.AllowUTF8,
|
||||||
}))
|
}))
|
||||||
sp.Sync(tgs)
|
sp.Sync(tgs)
|
||||||
|
|
||||||
@ -963,7 +987,8 @@ func newBasicScrapeLoopWithFallback(t testing.TB, ctx context.Context, scraper s
|
|||||||
false,
|
false,
|
||||||
newTestScrapeMetrics(t),
|
newTestScrapeMetrics(t),
|
||||||
false,
|
false,
|
||||||
model.LegacyValidation,
|
model.UTF8Validation,
|
||||||
|
model.NoEscaping,
|
||||||
fallback,
|
fallback,
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
@ -1109,7 +1134,8 @@ func TestScrapeLoopRun(t *testing.T) {
|
|||||||
false,
|
false,
|
||||||
scrapeMetrics,
|
scrapeMetrics,
|
||||||
false,
|
false,
|
||||||
model.LegacyValidation,
|
model.UTF8Validation,
|
||||||
|
model.NoEscaping,
|
||||||
"",
|
"",
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -1256,7 +1282,8 @@ func TestScrapeLoopMetadata(t *testing.T) {
|
|||||||
false,
|
false,
|
||||||
scrapeMetrics,
|
scrapeMetrics,
|
||||||
false,
|
false,
|
||||||
model.LegacyValidation,
|
model.UTF8Validation,
|
||||||
|
model.NoEscaping,
|
||||||
"",
|
"",
|
||||||
)
|
)
|
||||||
defer cancel()
|
defer cancel()
|
||||||
@ -3041,6 +3068,47 @@ func TestScrapeLoopOutOfBoundsTimeError(t *testing.T) {
|
|||||||
require.Equal(t, 0, seriesAdded)
|
require.Equal(t, 0, seriesAdded)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func TestAcceptHeader(t *testing.T) {
|
||||||
|
tests := []struct {
|
||||||
|
name string
|
||||||
|
scrapeProtocols []config.ScrapeProtocol
|
||||||
|
scheme model.EscapingScheme
|
||||||
|
expectedHeader string
|
||||||
|
}{
|
||||||
|
{
|
||||||
|
name: "default scrape protocols with underscore escaping",
|
||||||
|
scrapeProtocols: config.DefaultScrapeProtocols,
|
||||||
|
scheme: model.UnderscoreEscaping,
|
||||||
|
expectedHeader: "application/openmetrics-text;version=1.0.0;escaping=underscores;q=0.6,application/openmetrics-text;version=0.0.1;q=0.5,text/plain;version=1.0.0;escaping=underscores;q=0.4,text/plain;version=0.0.4;q=0.3,*/*;q=0.2",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "default proto first scrape protocols with underscore escaping",
|
||||||
|
scrapeProtocols: config.DefaultProtoFirstScrapeProtocols,
|
||||||
|
scheme: model.DotsEscaping,
|
||||||
|
expectedHeader: "application/vnd.google.protobuf;proto=io.prometheus.client.MetricFamily;encoding=delimited;q=0.6,application/openmetrics-text;version=1.0.0;escaping=dots;q=0.5,application/openmetrics-text;version=0.0.1;q=0.4,text/plain;version=1.0.0;escaping=dots;q=0.3,text/plain;version=0.0.4;q=0.2,*/*;q=0.1",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "default scrape protocols with no escaping",
|
||||||
|
scrapeProtocols: config.DefaultScrapeProtocols,
|
||||||
|
scheme: model.NoEscaping,
|
||||||
|
expectedHeader: "application/openmetrics-text;version=1.0.0;escaping=allow-utf-8;q=0.6,application/openmetrics-text;version=0.0.1;q=0.5,text/plain;version=1.0.0;escaping=allow-utf-8;q=0.4,text/plain;version=0.0.4;q=0.3,*/*;q=0.2",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "default proto first scrape protocols with no escaping",
|
||||||
|
scrapeProtocols: config.DefaultProtoFirstScrapeProtocols,
|
||||||
|
scheme: model.NoEscaping,
|
||||||
|
expectedHeader: "application/vnd.google.protobuf;proto=io.prometheus.client.MetricFamily;encoding=delimited;q=0.6,application/openmetrics-text;version=1.0.0;escaping=allow-utf-8;q=0.5,application/openmetrics-text;version=0.0.1;q=0.4,text/plain;version=1.0.0;escaping=allow-utf-8;q=0.3,text/plain;version=0.0.4;q=0.2,*/*;q=0.1",
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, tc := range tests {
|
||||||
|
t.Run(tc.name, func(t *testing.T) {
|
||||||
|
header := acceptHeader(tc.scrapeProtocols, tc.scheme)
|
||||||
|
require.Equal(t, tc.expectedHeader, header)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
func TestTargetScraperScrapeOK(t *testing.T) {
|
func TestTargetScraperScrapeOK(t *testing.T) {
|
||||||
const (
|
const (
|
||||||
configTimeout = 1500 * time.Millisecond
|
configTimeout = 1500 * time.Millisecond
|
||||||
@ -3124,31 +3192,31 @@ func TestTargetScraperScrapeOK(t *testing.T) {
|
|||||||
|
|
||||||
for _, tc := range []struct {
|
for _, tc := range []struct {
|
||||||
scrapeProtocols []config.ScrapeProtocol
|
scrapeProtocols []config.ScrapeProtocol
|
||||||
scheme model.ValidationScheme
|
scheme model.EscapingScheme
|
||||||
protobufParsing bool
|
protobufParsing bool
|
||||||
allowUTF8 bool
|
allowUTF8 bool
|
||||||
}{
|
}{
|
||||||
{
|
{
|
||||||
scrapeProtocols: config.DefaultScrapeProtocols,
|
scrapeProtocols: config.DefaultScrapeProtocols,
|
||||||
scheme: model.LegacyValidation,
|
scheme: model.UnderscoreEscaping,
|
||||||
protobufParsing: false,
|
protobufParsing: false,
|
||||||
allowUTF8: false,
|
allowUTF8: false,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
scrapeProtocols: config.DefaultProtoFirstScrapeProtocols,
|
scrapeProtocols: config.DefaultProtoFirstScrapeProtocols,
|
||||||
scheme: model.LegacyValidation,
|
scheme: model.UnderscoreEscaping,
|
||||||
protobufParsing: true,
|
protobufParsing: true,
|
||||||
allowUTF8: false,
|
allowUTF8: false,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
scrapeProtocols: config.DefaultScrapeProtocols,
|
scrapeProtocols: config.DefaultScrapeProtocols,
|
||||||
scheme: model.UTF8Validation,
|
scheme: model.NoEscaping,
|
||||||
protobufParsing: false,
|
protobufParsing: false,
|
||||||
allowUTF8: true,
|
allowUTF8: true,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
scrapeProtocols: config.DefaultProtoFirstScrapeProtocols,
|
scrapeProtocols: config.DefaultProtoFirstScrapeProtocols,
|
||||||
scheme: model.UTF8Validation,
|
scheme: model.NoEscaping,
|
||||||
protobufParsing: true,
|
protobufParsing: true,
|
||||||
allowUTF8: true,
|
allowUTF8: true,
|
||||||
},
|
},
|
||||||
@ -3185,7 +3253,7 @@ func TestTargetScrapeScrapeCancel(t *testing.T) {
|
|||||||
scrapeConfig: &config.ScrapeConfig{},
|
scrapeConfig: &config.ScrapeConfig{},
|
||||||
},
|
},
|
||||||
client: http.DefaultClient,
|
client: http.DefaultClient,
|
||||||
acceptHeader: acceptHeader(config.DefaultGlobalConfig.ScrapeProtocols, model.LegacyValidation),
|
acceptHeader: acceptHeader(config.DefaultGlobalConfig.ScrapeProtocols, model.UnderscoreEscaping),
|
||||||
}
|
}
|
||||||
ctx, cancel := context.WithCancel(context.Background())
|
ctx, cancel := context.WithCancel(context.Background())
|
||||||
|
|
||||||
@ -3241,7 +3309,7 @@ func TestTargetScrapeScrapeNotFound(t *testing.T) {
|
|||||||
scrapeConfig: &config.ScrapeConfig{},
|
scrapeConfig: &config.ScrapeConfig{},
|
||||||
},
|
},
|
||||||
client: http.DefaultClient,
|
client: http.DefaultClient,
|
||||||
acceptHeader: acceptHeader(config.DefaultGlobalConfig.ScrapeProtocols, model.LegacyValidation),
|
acceptHeader: acceptHeader(config.DefaultGlobalConfig.ScrapeProtocols, model.UnderscoreEscaping),
|
||||||
}
|
}
|
||||||
|
|
||||||
resp, err := ts.scrape(context.Background())
|
resp, err := ts.scrape(context.Background())
|
||||||
@ -3286,7 +3354,7 @@ func TestTargetScraperBodySizeLimit(t *testing.T) {
|
|||||||
},
|
},
|
||||||
client: http.DefaultClient,
|
client: http.DefaultClient,
|
||||||
bodySizeLimit: bodySizeLimit,
|
bodySizeLimit: bodySizeLimit,
|
||||||
acceptHeader: acceptHeader(config.DefaultGlobalConfig.ScrapeProtocols, model.LegacyValidation),
|
acceptHeader: acceptHeader(config.DefaultGlobalConfig.ScrapeProtocols, model.UnderscoreEscaping),
|
||||||
metrics: newTestScrapeMetrics(t),
|
metrics: newTestScrapeMetrics(t),
|
||||||
}
|
}
|
||||||
var buf bytes.Buffer
|
var buf bytes.Buffer
|
||||||
@ -3546,15 +3614,20 @@ func TestReuseScrapeCache(t *testing.T) {
|
|||||||
var (
|
var (
|
||||||
app = &nopAppendable{}
|
app = &nopAppendable{}
|
||||||
cfg = &config.ScrapeConfig{
|
cfg = &config.ScrapeConfig{
|
||||||
JobName: "Prometheus",
|
JobName: "Prometheus",
|
||||||
ScrapeTimeout: model.Duration(5 * time.Second),
|
ScrapeTimeout: model.Duration(5 * time.Second),
|
||||||
ScrapeInterval: model.Duration(5 * time.Second),
|
ScrapeInterval: model.Duration(5 * time.Second),
|
||||||
MetricsPath: "/metrics",
|
MetricsPath: "/metrics",
|
||||||
|
MetricNameValidationScheme: config.UTF8ValidationConfig,
|
||||||
|
MetricNameEscapingScheme: model.AllowUTF8,
|
||||||
}
|
}
|
||||||
sp, _ = newScrapePool(cfg, app, 0, nil, nil, &Options{}, newTestScrapeMetrics(t))
|
sp, _ = newScrapePool(cfg, app, 0, nil, nil, &Options{}, newTestScrapeMetrics(t))
|
||||||
t1 = &Target{
|
t1 = &Target{
|
||||||
labels: labels.FromStrings("labelNew", "nameNew", "labelNew1", "nameNew1", "labelNew2", "nameNew2"),
|
labels: labels.FromStrings("labelNew", "nameNew", "labelNew1", "nameNew1", "labelNew2", "nameNew2"),
|
||||||
scrapeConfig: &config.ScrapeConfig{},
|
scrapeConfig: &config.ScrapeConfig{
|
||||||
|
MetricNameValidationScheme: config.UTF8ValidationConfig,
|
||||||
|
MetricNameEscapingScheme: model.AllowUTF8,
|
||||||
|
},
|
||||||
}
|
}
|
||||||
proxyURL, _ = url.Parse("http://localhost:2128")
|
proxyURL, _ = url.Parse("http://localhost:2128")
|
||||||
)
|
)
|
||||||
@ -3568,40 +3641,48 @@ func TestReuseScrapeCache(t *testing.T) {
|
|||||||
{
|
{
|
||||||
keep: true,
|
keep: true,
|
||||||
newConfig: &config.ScrapeConfig{
|
newConfig: &config.ScrapeConfig{
|
||||||
JobName: "Prometheus",
|
JobName: "Prometheus",
|
||||||
ScrapeInterval: model.Duration(5 * time.Second),
|
ScrapeInterval: model.Duration(5 * time.Second),
|
||||||
ScrapeTimeout: model.Duration(5 * time.Second),
|
ScrapeTimeout: model.Duration(5 * time.Second),
|
||||||
MetricsPath: "/metrics",
|
MetricsPath: "/metrics",
|
||||||
|
MetricNameValidationScheme: config.UTF8ValidationConfig,
|
||||||
|
MetricNameEscapingScheme: model.AllowUTF8,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
keep: false,
|
keep: false,
|
||||||
newConfig: &config.ScrapeConfig{
|
newConfig: &config.ScrapeConfig{
|
||||||
JobName: "Prometheus",
|
JobName: "Prometheus",
|
||||||
ScrapeInterval: model.Duration(5 * time.Second),
|
ScrapeInterval: model.Duration(5 * time.Second),
|
||||||
ScrapeTimeout: model.Duration(15 * time.Second),
|
ScrapeTimeout: model.Duration(15 * time.Second),
|
||||||
MetricsPath: "/metrics2",
|
MetricsPath: "/metrics2",
|
||||||
|
MetricNameValidationScheme: config.UTF8ValidationConfig,
|
||||||
|
MetricNameEscapingScheme: model.AllowUTF8,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
keep: true,
|
keep: true,
|
||||||
newConfig: &config.ScrapeConfig{
|
newConfig: &config.ScrapeConfig{
|
||||||
JobName: "Prometheus",
|
JobName: "Prometheus",
|
||||||
SampleLimit: 400,
|
SampleLimit: 400,
|
||||||
ScrapeInterval: model.Duration(5 * time.Second),
|
ScrapeInterval: model.Duration(5 * time.Second),
|
||||||
ScrapeTimeout: model.Duration(15 * time.Second),
|
ScrapeTimeout: model.Duration(15 * time.Second),
|
||||||
MetricsPath: "/metrics2",
|
MetricsPath: "/metrics2",
|
||||||
|
MetricNameValidationScheme: config.UTF8ValidationConfig,
|
||||||
|
MetricNameEscapingScheme: model.AllowUTF8,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
keep: false,
|
keep: false,
|
||||||
newConfig: &config.ScrapeConfig{
|
newConfig: &config.ScrapeConfig{
|
||||||
JobName: "Prometheus",
|
JobName: "Prometheus",
|
||||||
HonorTimestamps: true,
|
HonorTimestamps: true,
|
||||||
SampleLimit: 400,
|
SampleLimit: 400,
|
||||||
ScrapeInterval: model.Duration(5 * time.Second),
|
ScrapeInterval: model.Duration(5 * time.Second),
|
||||||
ScrapeTimeout: model.Duration(15 * time.Second),
|
ScrapeTimeout: model.Duration(15 * time.Second),
|
||||||
MetricsPath: "/metrics2",
|
MetricsPath: "/metrics2",
|
||||||
|
MetricNameValidationScheme: config.UTF8ValidationConfig,
|
||||||
|
MetricNameEscapingScheme: model.AllowUTF8,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
@ -3613,64 +3694,76 @@ func TestReuseScrapeCache(t *testing.T) {
|
|||||||
HTTPClientConfig: config_util.HTTPClientConfig{
|
HTTPClientConfig: config_util.HTTPClientConfig{
|
||||||
ProxyConfig: config_util.ProxyConfig{ProxyURL: config_util.URL{URL: proxyURL}},
|
ProxyConfig: config_util.ProxyConfig{ProxyURL: config_util.URL{URL: proxyURL}},
|
||||||
},
|
},
|
||||||
ScrapeInterval: model.Duration(5 * time.Second),
|
ScrapeInterval: model.Duration(5 * time.Second),
|
||||||
ScrapeTimeout: model.Duration(15 * time.Second),
|
ScrapeTimeout: model.Duration(15 * time.Second),
|
||||||
MetricsPath: "/metrics2",
|
MetricsPath: "/metrics2",
|
||||||
|
MetricNameValidationScheme: config.UTF8ValidationConfig,
|
||||||
|
MetricNameEscapingScheme: model.AllowUTF8,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
keep: false,
|
keep: false,
|
||||||
newConfig: &config.ScrapeConfig{
|
newConfig: &config.ScrapeConfig{
|
||||||
JobName: "Prometheus",
|
JobName: "Prometheus",
|
||||||
HonorTimestamps: true,
|
HonorTimestamps: true,
|
||||||
HonorLabels: true,
|
HonorLabels: true,
|
||||||
SampleLimit: 400,
|
SampleLimit: 400,
|
||||||
ScrapeInterval: model.Duration(5 * time.Second),
|
ScrapeInterval: model.Duration(5 * time.Second),
|
||||||
ScrapeTimeout: model.Duration(15 * time.Second),
|
ScrapeTimeout: model.Duration(15 * time.Second),
|
||||||
MetricsPath: "/metrics2",
|
MetricsPath: "/metrics2",
|
||||||
|
MetricNameValidationScheme: config.UTF8ValidationConfig,
|
||||||
|
MetricNameEscapingScheme: model.AllowUTF8,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
keep: false,
|
keep: false,
|
||||||
newConfig: &config.ScrapeConfig{
|
newConfig: &config.ScrapeConfig{
|
||||||
JobName: "Prometheus",
|
JobName: "Prometheus",
|
||||||
ScrapeInterval: model.Duration(5 * time.Second),
|
ScrapeInterval: model.Duration(5 * time.Second),
|
||||||
ScrapeTimeout: model.Duration(15 * time.Second),
|
ScrapeTimeout: model.Duration(15 * time.Second),
|
||||||
MetricsPath: "/metrics",
|
MetricsPath: "/metrics",
|
||||||
LabelLimit: 1,
|
LabelLimit: 1,
|
||||||
|
MetricNameValidationScheme: config.UTF8ValidationConfig,
|
||||||
|
MetricNameEscapingScheme: model.AllowUTF8,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
keep: false,
|
keep: false,
|
||||||
newConfig: &config.ScrapeConfig{
|
newConfig: &config.ScrapeConfig{
|
||||||
JobName: "Prometheus",
|
JobName: "Prometheus",
|
||||||
ScrapeInterval: model.Duration(5 * time.Second),
|
ScrapeInterval: model.Duration(5 * time.Second),
|
||||||
ScrapeTimeout: model.Duration(15 * time.Second),
|
ScrapeTimeout: model.Duration(15 * time.Second),
|
||||||
MetricsPath: "/metrics",
|
MetricsPath: "/metrics",
|
||||||
LabelLimit: 15,
|
LabelLimit: 15,
|
||||||
|
MetricNameValidationScheme: config.UTF8ValidationConfig,
|
||||||
|
MetricNameEscapingScheme: model.AllowUTF8,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
keep: false,
|
keep: false,
|
||||||
newConfig: &config.ScrapeConfig{
|
newConfig: &config.ScrapeConfig{
|
||||||
JobName: "Prometheus",
|
JobName: "Prometheus",
|
||||||
ScrapeInterval: model.Duration(5 * time.Second),
|
ScrapeInterval: model.Duration(5 * time.Second),
|
||||||
ScrapeTimeout: model.Duration(15 * time.Second),
|
ScrapeTimeout: model.Duration(15 * time.Second),
|
||||||
MetricsPath: "/metrics",
|
MetricsPath: "/metrics",
|
||||||
LabelLimit: 15,
|
LabelLimit: 15,
|
||||||
LabelNameLengthLimit: 5,
|
LabelNameLengthLimit: 5,
|
||||||
|
MetricNameValidationScheme: config.UTF8ValidationConfig,
|
||||||
|
MetricNameEscapingScheme: model.AllowUTF8,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
keep: false,
|
keep: false,
|
||||||
newConfig: &config.ScrapeConfig{
|
newConfig: &config.ScrapeConfig{
|
||||||
JobName: "Prometheus",
|
JobName: "Prometheus",
|
||||||
ScrapeInterval: model.Duration(5 * time.Second),
|
ScrapeInterval: model.Duration(5 * time.Second),
|
||||||
ScrapeTimeout: model.Duration(15 * time.Second),
|
ScrapeTimeout: model.Duration(15 * time.Second),
|
||||||
MetricsPath: "/metrics",
|
MetricsPath: "/metrics",
|
||||||
LabelLimit: 15,
|
LabelLimit: 15,
|
||||||
LabelNameLengthLimit: 5,
|
LabelNameLengthLimit: 5,
|
||||||
LabelValueLengthLimit: 7,
|
LabelValueLengthLimit: 7,
|
||||||
|
MetricNameValidationScheme: config.UTF8ValidationConfig,
|
||||||
|
MetricNameEscapingScheme: model.AllowUTF8,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
@ -3730,10 +3823,12 @@ func TestReuseCacheRace(t *testing.T) {
|
|||||||
var (
|
var (
|
||||||
app = &nopAppendable{}
|
app = &nopAppendable{}
|
||||||
cfg = &config.ScrapeConfig{
|
cfg = &config.ScrapeConfig{
|
||||||
JobName: "Prometheus",
|
JobName: "Prometheus",
|
||||||
ScrapeTimeout: model.Duration(5 * time.Second),
|
ScrapeTimeout: model.Duration(5 * time.Second),
|
||||||
ScrapeInterval: model.Duration(5 * time.Second),
|
ScrapeInterval: model.Duration(5 * time.Second),
|
||||||
MetricsPath: "/metrics",
|
MetricsPath: "/metrics",
|
||||||
|
MetricNameValidationScheme: config.UTF8ValidationConfig,
|
||||||
|
MetricNameEscapingScheme: model.AllowUTF8,
|
||||||
}
|
}
|
||||||
buffers = pool.New(1e3, 100e6, 3, func(sz int) interface{} { return make([]byte, 0, sz) })
|
buffers = pool.New(1e3, 100e6, 3, func(sz int) interface{} { return make([]byte, 0, sz) })
|
||||||
sp, _ = newScrapePool(cfg, app, 0, nil, buffers, &Options{}, newTestScrapeMetrics(t))
|
sp, _ = newScrapePool(cfg, app, 0, nil, buffers, &Options{}, newTestScrapeMetrics(t))
|
||||||
@ -3751,11 +3846,13 @@ func TestReuseCacheRace(t *testing.T) {
|
|||||||
break
|
break
|
||||||
}
|
}
|
||||||
sp.reload(&config.ScrapeConfig{
|
sp.reload(&config.ScrapeConfig{
|
||||||
JobName: "Prometheus",
|
JobName: "Prometheus",
|
||||||
ScrapeTimeout: model.Duration(1 * time.Millisecond),
|
ScrapeTimeout: model.Duration(1 * time.Millisecond),
|
||||||
ScrapeInterval: model.Duration(1 * time.Millisecond),
|
ScrapeInterval: model.Duration(1 * time.Millisecond),
|
||||||
MetricsPath: "/metrics",
|
MetricsPath: "/metrics",
|
||||||
SampleLimit: i,
|
SampleLimit: i,
|
||||||
|
MetricNameValidationScheme: config.UTF8ValidationConfig,
|
||||||
|
MetricNameEscapingScheme: model.AllowUTF8,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -3827,11 +3924,13 @@ func TestScrapeReportLimit(t *testing.T) {
|
|||||||
defer s.Close()
|
defer s.Close()
|
||||||
|
|
||||||
cfg := &config.ScrapeConfig{
|
cfg := &config.ScrapeConfig{
|
||||||
JobName: "test",
|
JobName: "test",
|
||||||
SampleLimit: 5,
|
SampleLimit: 5,
|
||||||
Scheme: "http",
|
Scheme: "http",
|
||||||
ScrapeInterval: model.Duration(100 * time.Millisecond),
|
ScrapeInterval: model.Duration(100 * time.Millisecond),
|
||||||
ScrapeTimeout: model.Duration(100 * time.Millisecond),
|
ScrapeTimeout: model.Duration(100 * time.Millisecond),
|
||||||
|
MetricNameValidationScheme: config.UTF8ValidationConfig,
|
||||||
|
MetricNameEscapingScheme: model.AllowUTF8,
|
||||||
}
|
}
|
||||||
|
|
||||||
ts, scrapedTwice := newScrapableServer("metric_a 44\nmetric_b 44\nmetric_c 44\nmetric_d 44\n")
|
ts, scrapedTwice := newScrapableServer("metric_a 44\nmetric_b 44\nmetric_c 44\nmetric_d 44\n")
|
||||||
@ -3887,6 +3986,7 @@ func TestScrapeUTF8(t *testing.T) {
|
|||||||
ScrapeInterval: model.Duration(100 * time.Millisecond),
|
ScrapeInterval: model.Duration(100 * time.Millisecond),
|
||||||
ScrapeTimeout: model.Duration(100 * time.Millisecond),
|
ScrapeTimeout: model.Duration(100 * time.Millisecond),
|
||||||
MetricNameValidationScheme: config.UTF8ValidationConfig,
|
MetricNameValidationScheme: config.UTF8ValidationConfig,
|
||||||
|
MetricNameEscapingScheme: model.AllowUTF8,
|
||||||
}
|
}
|
||||||
ts, scrapedTwice := newScrapableServer("{\"with.dots\"} 42\n")
|
ts, scrapedTwice := newScrapableServer("{\"with.dots\"} 42\n")
|
||||||
defer ts.Close()
|
defer ts.Close()
|
||||||
@ -4019,8 +4119,10 @@ func TestTargetScrapeIntervalAndTimeoutRelabel(t *testing.T) {
|
|||||||
interval, _ := model.ParseDuration("2s")
|
interval, _ := model.ParseDuration("2s")
|
||||||
timeout, _ := model.ParseDuration("500ms")
|
timeout, _ := model.ParseDuration("500ms")
|
||||||
config := &config.ScrapeConfig{
|
config := &config.ScrapeConfig{
|
||||||
ScrapeInterval: interval,
|
ScrapeInterval: interval,
|
||||||
ScrapeTimeout: timeout,
|
ScrapeTimeout: timeout,
|
||||||
|
MetricNameValidationScheme: config.UTF8ValidationConfig,
|
||||||
|
MetricNameEscapingScheme: model.AllowUTF8,
|
||||||
RelabelConfigs: []*relabel.Config{
|
RelabelConfigs: []*relabel.Config{
|
||||||
{
|
{
|
||||||
SourceLabels: model.LabelNames{model.ScrapeIntervalLabel},
|
SourceLabels: model.LabelNames{model.ScrapeIntervalLabel},
|
||||||
@ -4077,10 +4179,12 @@ func TestLeQuantileReLabel(t *testing.T) {
|
|||||||
Action: relabel.Replace,
|
Action: relabel.Replace,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
SampleLimit: 100,
|
SampleLimit: 100,
|
||||||
Scheme: "http",
|
Scheme: "http",
|
||||||
ScrapeInterval: model.Duration(100 * time.Millisecond),
|
ScrapeInterval: model.Duration(100 * time.Millisecond),
|
||||||
ScrapeTimeout: model.Duration(100 * time.Millisecond),
|
ScrapeTimeout: model.Duration(100 * time.Millisecond),
|
||||||
|
MetricNameValidationScheme: config.UTF8ValidationConfig,
|
||||||
|
MetricNameEscapingScheme: model.AllowUTF8,
|
||||||
}
|
}
|
||||||
|
|
||||||
metricsText := `
|
metricsText := `
|
||||||
@ -4586,6 +4690,8 @@ metric: <
|
|||||||
ScrapeTimeout: model.Duration(25 * time.Millisecond),
|
ScrapeTimeout: model.Duration(25 * time.Millisecond),
|
||||||
AlwaysScrapeClassicHistograms: tc.alwaysScrapeClassicHistograms,
|
AlwaysScrapeClassicHistograms: tc.alwaysScrapeClassicHistograms,
|
||||||
ConvertClassicHistogramsToNHCB: tc.convertClassicHistToNHCB,
|
ConvertClassicHistogramsToNHCB: tc.convertClassicHistToNHCB,
|
||||||
|
MetricNameValidationScheme: config.UTF8ValidationConfig,
|
||||||
|
MetricNameEscapingScheme: model.AllowUTF8,
|
||||||
}
|
}
|
||||||
|
|
||||||
scrapeCount := 0
|
scrapeCount := 0
|
||||||
@ -4762,12 +4868,14 @@ func TestScrapeLoopCompression(t *testing.T) {
|
|||||||
defer ts.Close()
|
defer ts.Close()
|
||||||
|
|
||||||
config := &config.ScrapeConfig{
|
config := &config.ScrapeConfig{
|
||||||
JobName: "test",
|
JobName: "test",
|
||||||
SampleLimit: 100,
|
SampleLimit: 100,
|
||||||
Scheme: "http",
|
Scheme: "http",
|
||||||
ScrapeInterval: model.Duration(100 * time.Millisecond),
|
ScrapeInterval: model.Duration(100 * time.Millisecond),
|
||||||
ScrapeTimeout: model.Duration(100 * time.Millisecond),
|
ScrapeTimeout: model.Duration(100 * time.Millisecond),
|
||||||
EnableCompression: tc.enableCompression,
|
EnableCompression: tc.enableCompression,
|
||||||
|
MetricNameValidationScheme: config.UTF8ValidationConfig,
|
||||||
|
MetricNameEscapingScheme: model.AllowUTF8,
|
||||||
}
|
}
|
||||||
|
|
||||||
sp, err := newScrapePool(config, simpleStorage, 0, nil, nil, &Options{}, newTestScrapeMetrics(t))
|
sp, err := newScrapePool(config, simpleStorage, 0, nil, nil, &Options{}, newTestScrapeMetrics(t))
|
||||||
@ -4922,7 +5030,9 @@ func BenchmarkTargetScraperGzip(b *testing.B) {
|
|||||||
model.AddressLabel, serverURL.Host,
|
model.AddressLabel, serverURL.Host,
|
||||||
),
|
),
|
||||||
scrapeConfig: &config.ScrapeConfig{
|
scrapeConfig: &config.ScrapeConfig{
|
||||||
Params: url.Values{"count": []string{strconv.Itoa(scenario.metricsCount)}},
|
MetricNameValidationScheme: config.UTF8ValidationConfig,
|
||||||
|
MetricNameEscapingScheme: model.AllowUTF8,
|
||||||
|
Params: url.Values{"count": []string{strconv.Itoa(scenario.metricsCount)}},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
client: client,
|
client: client,
|
||||||
@ -5170,12 +5280,14 @@ func TestTargetScrapeConfigWithLabels(t *testing.T) {
|
|||||||
{
|
{
|
||||||
name: "Everything in scrape config",
|
name: "Everything in scrape config",
|
||||||
cfg: &config.ScrapeConfig{
|
cfg: &config.ScrapeConfig{
|
||||||
ScrapeInterval: model.Duration(2 * time.Second),
|
ScrapeInterval: model.Duration(2 * time.Second),
|
||||||
ScrapeTimeout: model.Duration(configTimeout),
|
ScrapeTimeout: model.Duration(configTimeout),
|
||||||
Params: url.Values{"param": []string{expectedParam}},
|
Params: url.Values{"param": []string{expectedParam}},
|
||||||
JobName: jobName,
|
JobName: jobName,
|
||||||
Scheme: httpScheme,
|
Scheme: httpScheme,
|
||||||
MetricsPath: expectedPath,
|
MetricsPath: expectedPath,
|
||||||
|
MetricNameValidationScheme: config.UTF8ValidationConfig,
|
||||||
|
MetricNameEscapingScheme: model.AllowUTF8,
|
||||||
},
|
},
|
||||||
targets: []*targetgroup.Group{
|
targets: []*targetgroup.Group{
|
||||||
{
|
{
|
||||||
@ -5188,12 +5300,14 @@ func TestTargetScrapeConfigWithLabels(t *testing.T) {
|
|||||||
{
|
{
|
||||||
name: "Overridden in target",
|
name: "Overridden in target",
|
||||||
cfg: &config.ScrapeConfig{
|
cfg: &config.ScrapeConfig{
|
||||||
ScrapeInterval: model.Duration(2 * time.Second),
|
ScrapeInterval: model.Duration(2 * time.Second),
|
||||||
ScrapeTimeout: model.Duration(secondTimeout),
|
ScrapeTimeout: model.Duration(secondTimeout),
|
||||||
JobName: jobName,
|
JobName: jobName,
|
||||||
Scheme: httpScheme,
|
Scheme: httpScheme,
|
||||||
MetricsPath: secondPath,
|
MetricsPath: secondPath,
|
||||||
Params: url.Values{"param": []string{secondParam}},
|
MetricNameValidationScheme: config.UTF8ValidationConfig,
|
||||||
|
MetricNameEscapingScheme: model.AllowUTF8,
|
||||||
|
Params: url.Values{"param": []string{secondParam}},
|
||||||
},
|
},
|
||||||
targets: []*targetgroup.Group{
|
targets: []*targetgroup.Group{
|
||||||
{
|
{
|
||||||
@ -5211,12 +5325,14 @@ func TestTargetScrapeConfigWithLabels(t *testing.T) {
|
|||||||
{
|
{
|
||||||
name: "Overridden in relabel_config",
|
name: "Overridden in relabel_config",
|
||||||
cfg: &config.ScrapeConfig{
|
cfg: &config.ScrapeConfig{
|
||||||
ScrapeInterval: model.Duration(2 * time.Second),
|
ScrapeInterval: model.Duration(2 * time.Second),
|
||||||
ScrapeTimeout: model.Duration(secondTimeout),
|
ScrapeTimeout: model.Duration(secondTimeout),
|
||||||
JobName: jobName,
|
JobName: jobName,
|
||||||
Scheme: httpScheme,
|
Scheme: httpScheme,
|
||||||
MetricsPath: secondPath,
|
MetricsPath: secondPath,
|
||||||
Params: url.Values{"param": []string{secondParam}},
|
MetricNameValidationScheme: config.UTF8ValidationConfig,
|
||||||
|
MetricNameEscapingScheme: model.AllowUTF8,
|
||||||
|
Params: url.Values{"param": []string{secondParam}},
|
||||||
RelabelConfigs: []*relabel.Config{
|
RelabelConfigs: []*relabel.Config{
|
||||||
{
|
{
|
||||||
Action: relabel.DefaultRelabelConfig.Action,
|
Action: relabel.DefaultRelabelConfig.Action,
|
||||||
@ -5290,12 +5406,14 @@ func TestScrapePoolScrapeAfterReload(t *testing.T) {
|
|||||||
t.Cleanup(h.Close)
|
t.Cleanup(h.Close)
|
||||||
|
|
||||||
cfg := &config.ScrapeConfig{
|
cfg := &config.ScrapeConfig{
|
||||||
BodySizeLimit: 1,
|
BodySizeLimit: 1,
|
||||||
JobName: "test",
|
JobName: "test",
|
||||||
Scheme: "http",
|
Scheme: "http",
|
||||||
ScrapeInterval: model.Duration(100 * time.Millisecond),
|
ScrapeInterval: model.Duration(100 * time.Millisecond),
|
||||||
ScrapeTimeout: model.Duration(100 * time.Millisecond),
|
ScrapeTimeout: model.Duration(100 * time.Millisecond),
|
||||||
EnableCompression: false,
|
MetricNameValidationScheme: config.UTF8ValidationConfig,
|
||||||
|
MetricNameEscapingScheme: model.AllowUTF8,
|
||||||
|
EnableCompression: false,
|
||||||
ServiceDiscoveryConfigs: discovery.Configs{
|
ServiceDiscoveryConfigs: discovery.Configs{
|
||||||
&discovery.StaticConfig{
|
&discovery.StaticConfig{
|
||||||
{
|
{
|
||||||
|
Loading…
Reference in New Issue
Block a user