scrape: Add config option for escaping scheme request. (#16066)

The new metric_name_escaping_scheme config option works in parallel with metric_name_validation_scheme and controls which escaping scheme is requested when scraping. When not specified, the scheme will request underscores if the validation scheme is set to legacy, and will request allow-utf-8 when the validation scheme is set to utf8. This setting allows users to allow utf8 names if they like, but explicitly request an escaping scheme rather than UTF-8.

Fixes https://github.com/prometheus/prometheus/issues/16034

Built on https://github.com/prometheus/prometheus/pull/16080

Signed-off-by: Owen Williams <owen.williams@grafana.com>
This commit is contained in:
Owen Williams 2025-03-26 18:27:28 -04:00 committed by GitHub
parent de399eb09c
commit 6566c5a2b3
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
9 changed files with 900 additions and 558 deletions

View File

@ -479,8 +479,13 @@ type GlobalConfig struct {
// Keep no more than this many dropped targets per job.
// 0 means no limit.
KeepDroppedTargets uint `yaml:"keep_dropped_targets,omitempty"`
// Allow UTF8 Metric and Label Names.
// Allow UTF8 Metric and Label Names. Can be blank in config files but must
// have a value if a ScrepeConfig is created programmatically.
MetricNameValidationScheme string `yaml:"metric_name_validation_scheme,omitempty"`
// Metric name escaping mode to request through content negotiation. Can be
// blank in config files but must have a value if a ScrepeConfig is created
// programmatically.
MetricNameEscapingScheme string `yaml:"metric_name_escaping_scheme,omitempty"`
}
// ScrapeProtocol represents supported protocol for scraping metrics.
@ -719,8 +724,13 @@ type ScrapeConfig struct {
// Keep no more than this many dropped targets per job.
// 0 means no limit.
KeepDroppedTargets uint `yaml:"keep_dropped_targets,omitempty"`
// Allow UTF8 Metric and Label Names.
// Allow UTF8 Metric and Label Names. Can be blank in config files but must
// have a value if a ScrepeConfig is created programmatically.
MetricNameValidationScheme string `yaml:"metric_name_validation_scheme,omitempty"`
// Metric name escaping mode to request through content negotiation. Can be
// blank in config files but must have a value if a ScrepeConfig is created
// programmatically.
MetricNameEscapingScheme string `yaml:"metric_name_escaping_scheme,omitempty"`
// We cannot do proper Go type embedding below as the parser will then parse
// values arbitrarily into the overflow maps of further-down types.
@ -841,13 +851,48 @@ func (c *ScrapeConfig) Validate(globalConfig GlobalConfig) error {
if model.NameValidationScheme != model.UTF8Validation {
return errors.New("model.NameValidationScheme must be set to UTF8")
}
switch globalConfig.MetricNameValidationScheme {
case "", LegacyValidationConfig, UTF8ValidationConfig:
case "":
globalConfig.MetricNameValidationScheme = UTF8ValidationConfig
case LegacyValidationConfig, UTF8ValidationConfig:
default:
return fmt.Errorf("unknown name validation method specified, must be either 'legacy' or 'utf8', got %s", globalConfig.MetricNameValidationScheme)
return fmt.Errorf("unknown global name validation method specified, must be either 'legacy' or 'utf8', got %s", globalConfig.MetricNameValidationScheme)
}
if c.MetricNameValidationScheme == "" {
// Scrapeconfig validation scheme matches global if left blank.
switch c.MetricNameValidationScheme {
case "":
c.MetricNameValidationScheme = globalConfig.MetricNameValidationScheme
case LegacyValidationConfig, UTF8ValidationConfig:
default:
return fmt.Errorf("unknown scrape config name validation method specified, must be either 'legacy' or 'utf8', got %s", c.MetricNameValidationScheme)
}
// Escaping scheme is based on the validation scheme if left blank.
switch globalConfig.MetricNameEscapingScheme {
case "":
if globalConfig.MetricNameValidationScheme == LegacyValidationConfig {
globalConfig.MetricNameEscapingScheme = model.EscapeUnderscores
} else {
globalConfig.MetricNameEscapingScheme = model.AllowUTF8
}
case model.AllowUTF8, model.EscapeUnderscores, model.EscapeDots, model.EscapeValues:
default:
return fmt.Errorf("unknown global name escaping method specified, must be one of '%s', '%s', '%s', or '%s', got %s", model.AllowUTF8, model.EscapeUnderscores, model.EscapeDots, model.EscapeValues, globalConfig.MetricNameValidationScheme)
}
if c.MetricNameEscapingScheme == "" {
c.MetricNameEscapingScheme = globalConfig.MetricNameEscapingScheme
}
switch c.MetricNameEscapingScheme {
case model.AllowUTF8:
if c.MetricNameValidationScheme != UTF8ValidationConfig {
return errors.New("utf8 metric names requested but validation scheme is not set to UTF8")
}
case model.EscapeUnderscores, model.EscapeDots, model.EscapeValues:
default:
return fmt.Errorf("unknown scrape config name escaping method specified, must be one of '%s', '%s', '%s', or '%s', got %s", model.AllowUTF8, model.EscapeUnderscores, model.EscapeDots, model.EscapeValues, c.MetricNameValidationScheme)
}
return nil
@ -858,6 +903,20 @@ func (c *ScrapeConfig) MarshalYAML() (interface{}, error) {
return discovery.MarshalYAMLWithInlineConfigs(c)
}
// ToValidationScheme returns the validation scheme for the given string config value.
func ToValidationScheme(s string) (validationScheme model.ValidationScheme, err error) {
switch s {
case UTF8ValidationConfig:
validationScheme = model.UTF8Validation
case LegacyValidationConfig:
validationScheme = model.LegacyValidation
default:
return model.UTF8Validation, fmt.Errorf("invalid metric name validation scheme, %s", s)
}
return validationScheme, nil
}
// StorageConfig configures runtime reloadable configuration options.
type StorageConfig struct {
TSDBConfig *TSDBConfig `yaml:"tsdb,omitempty"`

View File

@ -216,6 +216,8 @@ var expectedConf = &Config{
ScrapeProtocols: DefaultGlobalConfig.ScrapeProtocols,
ScrapeFallbackProtocol: PrometheusText0_0_4,
ScrapeFailureLogFile: "testdata/fail_prom.log",
MetricNameValidationScheme: UTF8ValidationConfig,
MetricNameEscapingScheme: model.AllowUTF8,
MetricsPath: DefaultScrapeConfig.MetricsPath,
Scheme: DefaultScrapeConfig.Scheme,
@ -329,6 +331,8 @@ var expectedConf = &Config{
LabelValueLengthLimit: 210,
ScrapeProtocols: []ScrapeProtocol{PrometheusText0_0_4},
ScrapeFailureLogFile: globScrapeFailureLogFile,
MetricNameValidationScheme: UTF8ValidationConfig,
MetricNameEscapingScheme: model.AllowUTF8,
HTTPClientConfig: config.HTTPClientConfig{
BasicAuth: &config.BasicAuth{
@ -427,6 +431,8 @@ var expectedConf = &Config{
LabelValueLengthLimit: globLabelValueLengthLimit,
ScrapeProtocols: DefaultGlobalConfig.ScrapeProtocols,
ScrapeFailureLogFile: globScrapeFailureLogFile,
MetricNameValidationScheme: UTF8ValidationConfig,
MetricNameEscapingScheme: model.AllowUTF8,
MetricsPath: DefaultScrapeConfig.MetricsPath,
Scheme: DefaultScrapeConfig.Scheme,
@ -483,6 +489,8 @@ var expectedConf = &Config{
LabelValueLengthLimit: globLabelValueLengthLimit,
ScrapeProtocols: DefaultGlobalConfig.ScrapeProtocols,
ScrapeFailureLogFile: globScrapeFailureLogFile,
MetricNameValidationScheme: UTF8ValidationConfig,
MetricNameEscapingScheme: model.AllowUTF8,
MetricsPath: "/metrics",
Scheme: "http",
@ -517,6 +525,8 @@ var expectedConf = &Config{
LabelValueLengthLimit: globLabelValueLengthLimit,
ScrapeProtocols: DefaultGlobalConfig.ScrapeProtocols,
ScrapeFailureLogFile: globScrapeFailureLogFile,
MetricNameValidationScheme: UTF8ValidationConfig,
MetricNameEscapingScheme: model.AllowUTF8,
MetricsPath: DefaultScrapeConfig.MetricsPath,
Scheme: DefaultScrapeConfig.Scheme,
@ -557,6 +567,8 @@ var expectedConf = &Config{
LabelValueLengthLimit: globLabelValueLengthLimit,
ScrapeProtocols: DefaultGlobalConfig.ScrapeProtocols,
ScrapeFailureLogFile: globScrapeFailureLogFile,
MetricNameValidationScheme: UTF8ValidationConfig,
MetricNameEscapingScheme: model.AllowUTF8,
MetricsPath: DefaultScrapeConfig.MetricsPath,
Scheme: DefaultScrapeConfig.Scheme,
@ -597,6 +609,8 @@ var expectedConf = &Config{
LabelValueLengthLimit: globLabelValueLengthLimit,
ScrapeProtocols: DefaultGlobalConfig.ScrapeProtocols,
ScrapeFailureLogFile: globScrapeFailureLogFile,
MetricNameValidationScheme: UTF8ValidationConfig,
MetricNameEscapingScheme: model.AllowUTF8,
MetricsPath: DefaultScrapeConfig.MetricsPath,
Scheme: DefaultScrapeConfig.Scheme,
@ -627,6 +641,8 @@ var expectedConf = &Config{
LabelValueLengthLimit: globLabelValueLengthLimit,
ScrapeProtocols: DefaultGlobalConfig.ScrapeProtocols,
ScrapeFailureLogFile: globScrapeFailureLogFile,
MetricNameValidationScheme: UTF8ValidationConfig,
MetricNameEscapingScheme: model.AllowUTF8,
MetricsPath: DefaultScrapeConfig.MetricsPath,
Scheme: DefaultScrapeConfig.Scheme,
@ -665,6 +681,8 @@ var expectedConf = &Config{
LabelValueLengthLimit: globLabelValueLengthLimit,
ScrapeProtocols: DefaultGlobalConfig.ScrapeProtocols,
ScrapeFailureLogFile: globScrapeFailureLogFile,
MetricNameValidationScheme: UTF8ValidationConfig,
MetricNameEscapingScheme: model.AllowUTF8,
MetricsPath: DefaultScrapeConfig.MetricsPath,
Scheme: DefaultScrapeConfig.Scheme,
@ -700,6 +718,8 @@ var expectedConf = &Config{
LabelValueLengthLimit: globLabelValueLengthLimit,
ScrapeProtocols: DefaultGlobalConfig.ScrapeProtocols,
ScrapeFailureLogFile: globScrapeFailureLogFile,
MetricNameValidationScheme: UTF8ValidationConfig,
MetricNameEscapingScheme: model.AllowUTF8,
MetricsPath: DefaultScrapeConfig.MetricsPath,
Scheme: DefaultScrapeConfig.Scheme,
@ -742,6 +762,8 @@ var expectedConf = &Config{
LabelValueLengthLimit: globLabelValueLengthLimit,
ScrapeProtocols: DefaultGlobalConfig.ScrapeProtocols,
ScrapeFailureLogFile: globScrapeFailureLogFile,
MetricNameValidationScheme: UTF8ValidationConfig,
MetricNameEscapingScheme: model.AllowUTF8,
MetricsPath: DefaultScrapeConfig.MetricsPath,
Scheme: DefaultScrapeConfig.Scheme,
@ -774,6 +796,8 @@ var expectedConf = &Config{
LabelValueLengthLimit: globLabelValueLengthLimit,
ScrapeProtocols: DefaultGlobalConfig.ScrapeProtocols,
ScrapeFailureLogFile: globScrapeFailureLogFile,
MetricNameValidationScheme: UTF8ValidationConfig,
MetricNameEscapingScheme: model.AllowUTF8,
MetricsPath: DefaultScrapeConfig.MetricsPath,
Scheme: DefaultScrapeConfig.Scheme,
@ -809,6 +833,8 @@ var expectedConf = &Config{
LabelValueLengthLimit: globLabelValueLengthLimit,
ScrapeProtocols: DefaultGlobalConfig.ScrapeProtocols,
ScrapeFailureLogFile: globScrapeFailureLogFile,
MetricNameValidationScheme: UTF8ValidationConfig,
MetricNameEscapingScheme: model.AllowUTF8,
MetricsPath: DefaultScrapeConfig.MetricsPath,
Scheme: DefaultScrapeConfig.Scheme,
@ -837,6 +863,8 @@ var expectedConf = &Config{
LabelValueLengthLimit: globLabelValueLengthLimit,
ScrapeProtocols: DefaultGlobalConfig.ScrapeProtocols,
ScrapeFailureLogFile: globScrapeFailureLogFile,
MetricNameValidationScheme: UTF8ValidationConfig,
MetricNameEscapingScheme: model.AllowUTF8,
MetricsPath: DefaultScrapeConfig.MetricsPath,
Scheme: DefaultScrapeConfig.Scheme,
@ -868,6 +896,8 @@ var expectedConf = &Config{
LabelValueLengthLimit: globLabelValueLengthLimit,
ScrapeProtocols: DefaultGlobalConfig.ScrapeProtocols,
ScrapeFailureLogFile: globScrapeFailureLogFile,
MetricNameValidationScheme: UTF8ValidationConfig,
MetricNameEscapingScheme: model.AllowUTF8,
MetricsPath: "/federate",
Scheme: DefaultScrapeConfig.Scheme,
@ -899,6 +929,8 @@ var expectedConf = &Config{
LabelValueLengthLimit: globLabelValueLengthLimit,
ScrapeProtocols: DefaultGlobalConfig.ScrapeProtocols,
ScrapeFailureLogFile: globScrapeFailureLogFile,
MetricNameValidationScheme: UTF8ValidationConfig,
MetricNameEscapingScheme: model.AllowUTF8,
MetricsPath: DefaultScrapeConfig.MetricsPath,
Scheme: DefaultScrapeConfig.Scheme,
@ -930,6 +962,8 @@ var expectedConf = &Config{
LabelValueLengthLimit: globLabelValueLengthLimit,
ScrapeProtocols: DefaultGlobalConfig.ScrapeProtocols,
ScrapeFailureLogFile: globScrapeFailureLogFile,
MetricNameValidationScheme: UTF8ValidationConfig,
MetricNameEscapingScheme: model.AllowUTF8,
MetricsPath: DefaultScrapeConfig.MetricsPath,
Scheme: DefaultScrapeConfig.Scheme,
@ -958,6 +992,8 @@ var expectedConf = &Config{
LabelValueLengthLimit: globLabelValueLengthLimit,
ScrapeProtocols: DefaultGlobalConfig.ScrapeProtocols,
ScrapeFailureLogFile: globScrapeFailureLogFile,
MetricNameValidationScheme: UTF8ValidationConfig,
MetricNameEscapingScheme: model.AllowUTF8,
MetricsPath: DefaultScrapeConfig.MetricsPath,
Scheme: DefaultScrapeConfig.Scheme,
@ -994,6 +1030,8 @@ var expectedConf = &Config{
LabelValueLengthLimit: globLabelValueLengthLimit,
ScrapeProtocols: DefaultGlobalConfig.ScrapeProtocols,
ScrapeFailureLogFile: globScrapeFailureLogFile,
MetricNameValidationScheme: UTF8ValidationConfig,
MetricNameEscapingScheme: model.AllowUTF8,
MetricsPath: DefaultScrapeConfig.MetricsPath,
Scheme: DefaultScrapeConfig.Scheme,
@ -1029,6 +1067,8 @@ var expectedConf = &Config{
LabelValueLengthLimit: globLabelValueLengthLimit,
ScrapeProtocols: DefaultGlobalConfig.ScrapeProtocols,
ScrapeFailureLogFile: globScrapeFailureLogFile,
MetricNameValidationScheme: UTF8ValidationConfig,
MetricNameEscapingScheme: model.AllowUTF8,
MetricsPath: DefaultScrapeConfig.MetricsPath,
Scheme: DefaultScrapeConfig.Scheme,
@ -1061,6 +1101,8 @@ var expectedConf = &Config{
LabelValueLengthLimit: globLabelValueLengthLimit,
ScrapeProtocols: DefaultGlobalConfig.ScrapeProtocols,
ScrapeFailureLogFile: globScrapeFailureLogFile,
MetricNameValidationScheme: UTF8ValidationConfig,
MetricNameEscapingScheme: model.AllowUTF8,
MetricsPath: DefaultScrapeConfig.MetricsPath,
Scheme: DefaultScrapeConfig.Scheme,
@ -1092,6 +1134,8 @@ var expectedConf = &Config{
LabelValueLengthLimit: globLabelValueLengthLimit,
ScrapeProtocols: DefaultGlobalConfig.ScrapeProtocols,
ScrapeFailureLogFile: globScrapeFailureLogFile,
MetricNameValidationScheme: UTF8ValidationConfig,
MetricNameEscapingScheme: model.AllowUTF8,
MetricsPath: DefaultScrapeConfig.MetricsPath,
Scheme: DefaultScrapeConfig.Scheme,
@ -1127,6 +1171,8 @@ var expectedConf = &Config{
LabelValueLengthLimit: globLabelValueLengthLimit,
ScrapeProtocols: DefaultGlobalConfig.ScrapeProtocols,
ScrapeFailureLogFile: globScrapeFailureLogFile,
MetricNameValidationScheme: UTF8ValidationConfig,
MetricNameEscapingScheme: model.AllowUTF8,
MetricsPath: DefaultScrapeConfig.MetricsPath,
Scheme: DefaultScrapeConfig.Scheme,
@ -1165,6 +1211,8 @@ var expectedConf = &Config{
LabelValueLengthLimit: globLabelValueLengthLimit,
ScrapeProtocols: DefaultGlobalConfig.ScrapeProtocols,
ScrapeFailureLogFile: globScrapeFailureLogFile,
MetricNameValidationScheme: UTF8ValidationConfig,
MetricNameEscapingScheme: model.AllowUTF8,
MetricsPath: DefaultScrapeConfig.MetricsPath,
Scheme: DefaultScrapeConfig.Scheme,
@ -1222,6 +1270,8 @@ var expectedConf = &Config{
LabelValueLengthLimit: globLabelValueLengthLimit,
ScrapeProtocols: DefaultGlobalConfig.ScrapeProtocols,
ScrapeFailureLogFile: globScrapeFailureLogFile,
MetricNameValidationScheme: UTF8ValidationConfig,
MetricNameEscapingScheme: model.AllowUTF8,
MetricsPath: DefaultScrapeConfig.MetricsPath,
Scheme: DefaultScrapeConfig.Scheme,
@ -1250,6 +1300,8 @@ var expectedConf = &Config{
LabelValueLengthLimit: globLabelValueLengthLimit,
ScrapeProtocols: DefaultGlobalConfig.ScrapeProtocols,
ScrapeFailureLogFile: globScrapeFailureLogFile,
MetricNameValidationScheme: UTF8ValidationConfig,
MetricNameEscapingScheme: model.AllowUTF8,
HTTPClientConfig: config.DefaultHTTPClientConfig,
MetricsPath: DefaultScrapeConfig.MetricsPath,
@ -1289,6 +1341,8 @@ var expectedConf = &Config{
LabelValueLengthLimit: globLabelValueLengthLimit,
ScrapeProtocols: DefaultGlobalConfig.ScrapeProtocols,
ScrapeFailureLogFile: globScrapeFailureLogFile,
MetricNameValidationScheme: UTF8ValidationConfig,
MetricNameEscapingScheme: model.AllowUTF8,
HTTPClientConfig: config.DefaultHTTPClientConfig,
MetricsPath: DefaultScrapeConfig.MetricsPath,
@ -1334,6 +1388,8 @@ var expectedConf = &Config{
LabelValueLengthLimit: globLabelValueLengthLimit,
ScrapeProtocols: DefaultGlobalConfig.ScrapeProtocols,
ScrapeFailureLogFile: globScrapeFailureLogFile,
MetricNameValidationScheme: UTF8ValidationConfig,
MetricNameEscapingScheme: model.AllowUTF8,
MetricsPath: DefaultScrapeConfig.MetricsPath,
Scheme: DefaultScrapeConfig.Scheme,
@ -1370,6 +1426,8 @@ var expectedConf = &Config{
LabelValueLengthLimit: globLabelValueLengthLimit,
ScrapeProtocols: DefaultGlobalConfig.ScrapeProtocols,
ScrapeFailureLogFile: globScrapeFailureLogFile,
MetricNameValidationScheme: UTF8ValidationConfig,
MetricNameEscapingScheme: model.AllowUTF8,
HTTPClientConfig: config.DefaultHTTPClientConfig,
MetricsPath: DefaultScrapeConfig.MetricsPath,
@ -1400,6 +1458,8 @@ var expectedConf = &Config{
LabelValueLengthLimit: globLabelValueLengthLimit,
ScrapeProtocols: DefaultGlobalConfig.ScrapeProtocols,
ScrapeFailureLogFile: globScrapeFailureLogFile,
MetricNameValidationScheme: UTF8ValidationConfig,
MetricNameEscapingScheme: model.AllowUTF8,
MetricsPath: DefaultScrapeConfig.MetricsPath,
Scheme: DefaultScrapeConfig.Scheme,
@ -1433,6 +1493,8 @@ var expectedConf = &Config{
LabelValueLengthLimit: globLabelValueLengthLimit,
ScrapeProtocols: DefaultGlobalConfig.ScrapeProtocols,
ScrapeFailureLogFile: globScrapeFailureLogFile,
MetricNameValidationScheme: UTF8ValidationConfig,
MetricNameEscapingScheme: model.AllowUTF8,
MetricsPath: DefaultScrapeConfig.MetricsPath,
Scheme: DefaultScrapeConfig.Scheme,
@ -2174,6 +2236,10 @@ var expectedErrors = []struct {
filename: "scrape_config_files_fallback_scrape_protocol2.bad.yml",
errMsg: `unmarshal errors`,
},
{
filename: "scrape_config_utf8_conflicting.bad.yml",
errMsg: `utf8 metric names requested but validation scheme is not set to UTF8`,
},
}
func TestBadConfigs(t *testing.T) {
@ -2258,6 +2324,8 @@ func TestGetScrapeConfigs(t *testing.T) {
ScrapeInterval: scrapeInterval,
ScrapeTimeout: scrapeTimeout,
ScrapeProtocols: DefaultGlobalConfig.ScrapeProtocols,
MetricNameValidationScheme: UTF8ValidationConfig,
MetricNameEscapingScheme: model.AllowUTF8,
MetricsPath: "/metrics",
Scheme: "http",
@ -2290,12 +2358,12 @@ func TestGetScrapeConfigs(t *testing.T) {
expectedResult: []*ScrapeConfig{sc("prometheus", model.Duration(60*time.Second), model.Duration(10*time.Second))},
},
{
name: "An global config that only include a scrape config file.",
name: "A global config that only include a scrape config file.",
configFile: "testdata/scrape_config_files_only.good.yml",
expectedResult: []*ScrapeConfig{sc("prometheus", model.Duration(60*time.Second), model.Duration(10*time.Second))},
},
{
name: "An global config that combine scrape config files and scrape configs.",
name: "A global config that combine scrape config files and scrape configs.",
configFile: "testdata/scrape_config_files_combined.good.yml",
expectedResult: []*ScrapeConfig{
sc("node", model.Duration(60*time.Second), model.Duration(10*time.Second)),
@ -2304,7 +2372,7 @@ func TestGetScrapeConfigs(t *testing.T) {
},
},
{
name: "An global config that includes a scrape config file with globs",
name: "A global config that includes a scrape config file with globs",
configFile: "testdata/scrape_config_files_glob.good.yml",
expectedResult: []*ScrapeConfig{
{
@ -2314,6 +2382,8 @@ func TestGetScrapeConfigs(t *testing.T) {
ScrapeInterval: model.Duration(60 * time.Second),
ScrapeTimeout: DefaultGlobalConfig.ScrapeTimeout,
ScrapeProtocols: DefaultGlobalConfig.ScrapeProtocols,
MetricNameValidationScheme: UTF8ValidationConfig,
MetricNameEscapingScheme: model.AllowUTF8,
MetricsPath: DefaultScrapeConfig.MetricsPath,
Scheme: DefaultScrapeConfig.Scheme,
@ -2347,6 +2417,8 @@ func TestGetScrapeConfigs(t *testing.T) {
ScrapeInterval: model.Duration(15 * time.Second),
ScrapeTimeout: DefaultGlobalConfig.ScrapeTimeout,
ScrapeProtocols: DefaultGlobalConfig.ScrapeProtocols,
MetricNameValidationScheme: UTF8ValidationConfig,
MetricNameEscapingScheme: model.AllowUTF8,
HTTPClientConfig: config.HTTPClientConfig{
TLSConfig: config.TLSConfig{
@ -2380,17 +2452,17 @@ func TestGetScrapeConfigs(t *testing.T) {
},
},
{
name: "An global config that includes twice the same scrape configs.",
name: "A global config that includes twice the same scrape configs.",
configFile: "testdata/scrape_config_files_double_import.bad.yml",
expectedError: `found multiple scrape configs with job name "prometheus"`,
},
{
name: "An global config that includes a scrape config identical to a scrape config in the main file.",
name: "A global config that includes a scrape config identical to a scrape config in the main file.",
configFile: "testdata/scrape_config_files_duplicate.bad.yml",
expectedError: `found multiple scrape configs with job name "prometheus"`,
},
{
name: "An global config that includes a scrape config file with errors.",
name: "A global config that includes a scrape config file with errors.",
configFile: "testdata/scrape_config_files_global.bad.yml",
expectedError: `scrape timeout greater than scrape interval for scrape config with job name "prometheus"`,
},
@ -2437,7 +2509,7 @@ func TestScrapeConfigNameValidationSettings(t *testing.T) {
{
name: "blank config implies default",
inputFile: "scrape_config_default_validation_mode",
expectScheme: "",
expectScheme: "utf8",
},
{
name: "global setting implies local settings",
@ -2472,6 +2544,56 @@ func TestScrapeConfigNameValidationSettings(t *testing.T) {
}
}
func TestScrapeConfigNameEscapingSettings(t *testing.T) {
tests := []struct {
name string
inputFile string
expectValidationScheme string
expectEscapingScheme string
}{
{
name: "blank config implies default",
inputFile: "scrape_config_default_validation_mode",
expectValidationScheme: "utf8",
expectEscapingScheme: "allow-utf-8",
},
{
name: "global setting implies local settings",
inputFile: "scrape_config_global_validation_mode",
expectValidationScheme: "legacy",
expectEscapingScheme: "dots",
},
{
name: "local setting",
inputFile: "scrape_config_local_validation_mode",
expectValidationScheme: "legacy",
expectEscapingScheme: "values",
},
{
name: "local setting overrides global setting",
inputFile: "scrape_config_local_global_validation_mode",
expectValidationScheme: "utf8",
expectEscapingScheme: "dots",
},
}
for _, tc := range tests {
t.Run(tc.name, func(t *testing.T) {
want, err := LoadFile(fmt.Sprintf("testdata/%s.yml", tc.inputFile), false, promslog.NewNopLogger())
require.NoError(t, err)
out, err := yaml.Marshal(want)
require.NoError(t, err)
got := &Config{}
require.NoError(t, yaml.UnmarshalStrict(out, got))
require.Equal(t, tc.expectValidationScheme, got.ScrapeConfigs[0].MetricNameValidationScheme)
require.Equal(t, tc.expectEscapingScheme, got.ScrapeConfigs[0].MetricNameEscapingScheme)
})
}
}
func TestScrapeProtocolHeader(t *testing.T) {
tests := []struct {
name string

View File

@ -1,4 +1,5 @@
global:
metric_name_validation_scheme: legacy
metric_name_escaping_scheme: dots
scrape_configs:
- job_name: prometheus

View File

@ -1,5 +1,7 @@
global:
metric_name_validation_scheme: legacy
metric_name_escaping_scheme: values
scrape_configs:
- job_name: prometheus
metric_name_validation_scheme: utf8
metric_name_escaping_scheme: dots

View File

@ -1,3 +1,4 @@
scrape_configs:
- job_name: prometheus
metric_name_validation_scheme: legacy
metric_name_escaping_scheme: values

View File

@ -0,0 +1,5 @@
global:
metric_name_validation_scheme: legacy
metric_name_escaping_scheme: allow-utf-8
scrape_configs:
- job_name: prometheus

View File

@ -469,6 +469,22 @@ metric_relabel_configs:
# underscores.
[ metric_name_validation_scheme <string> | default "utf8" ]
# Specifies the character escaping scheme that will be requested when scraping
# for metric and label names that do not conform to the legacy Prometheus
# character set. Available options are:
# * `allow-utf-8`: Full UTF-8 support, no escaping needed.
# * `underscores`: Escape all legacy-invalid characters to underscores.
# * `dots`: Escapes dots to `_dot_`, underscores to `__`, and all other
# legacy-invalid characters to underscores.
# * `values`: Prepend the name with `U__` and replace all invalid
# characters with their unicode value, surrounded by underscores. Single
# underscores are replaced with double underscores.
# e.g. "U__my_2e_dotted_2e_name".
# If this value is left blank, Prometheus will default to `allow-utf-8` if the
# validation scheme for the current scrape config is set to utf8, or
# `underscores` if the validation scheme is set to `legacy`.
[ metric_name_validation_scheme <string> | default "utf8" ]
# Limit on total number of positive and negative buckets allowed in a single
# native histogram. The resolution of a histogram with more buckets will be
# reduced until the number of buckets is within the limit. If the limit cannot

View File

@ -102,6 +102,9 @@ type scrapePool struct {
scrapeFailureLogger FailureLogger
scrapeFailureLoggerMtx sync.RWMutex
validationScheme model.ValidationScheme
escapingScheme model.EscapingScheme
}
type labelLimits struct {
@ -124,7 +127,6 @@ type scrapeLoopOptions struct {
timeout time.Duration
alwaysScrapeClassicHist bool
convertClassicHistToNHCB bool
validationScheme model.ValidationScheme
fallbackScrapeProtocol string
mrc []*relabel.Config
@ -147,6 +149,16 @@ func newScrapePool(cfg *config.ScrapeConfig, app storage.Appendable, offsetSeed
return nil, fmt.Errorf("error creating HTTP client: %w", err)
}
validationScheme, err := config.ToValidationScheme(cfg.MetricNameValidationScheme)
if err != nil {
return nil, fmt.Errorf("invalid metric name validation scheme: %w", err)
}
var escapingScheme model.EscapingScheme
escapingScheme, err = model.ToEscapingScheme(cfg.MetricNameEscapingScheme)
if err != nil {
return nil, fmt.Errorf("invalid metric name escaping scheme, %w", err)
}
ctx, cancel := context.WithCancel(context.Background())
sp := &scrapePool{
cancel: cancel,
@ -160,6 +172,8 @@ func newScrapePool(cfg *config.ScrapeConfig, app storage.Appendable, offsetSeed
logger: logger,
metrics: metrics,
httpOpts: options.HTTPClientOptions,
validationScheme: validationScheme,
escapingScheme: escapingScheme,
}
sp.newLoop = func(opts scrapeLoopOptions) loop {
// Update the targets retrieval function for metadata to a new scrape cache.
@ -201,7 +215,8 @@ func newScrapePool(cfg *config.ScrapeConfig, app storage.Appendable, offsetSeed
options.PassMetadataInContext,
metrics,
options.skipOffsetting,
opts.validationScheme,
sp.validationScheme,
sp.escapingScheme,
opts.fallbackScrapeProtocol,
)
}
@ -309,6 +324,17 @@ func (sp *scrapePool) reload(cfg *config.ScrapeConfig) error {
sp.config = cfg
oldClient := sp.client
sp.client = client
validationScheme, err := config.ToValidationScheme(cfg.MetricNameValidationScheme)
if err != nil {
return fmt.Errorf("invalid metric name validation scheme: %w", err)
}
sp.validationScheme = validationScheme
var escapingScheme model.EscapingScheme
escapingScheme, err = model.ToEscapingScheme(cfg.MetricNameEscapingScheme)
if err != nil {
return fmt.Errorf("invalid metric name escaping scheme, %w", err)
}
sp.escapingScheme = escapingScheme
sp.metrics.targetScrapePoolTargetLimit.WithLabelValues(sp.config.JobName).Set(float64(sp.config.TargetLimit))
@ -344,11 +370,6 @@ func (sp *scrapePool) restartLoops(reuseCache bool) {
convertClassicHistToNHCB = sp.config.ConvertClassicHistogramsToNHCB
)
validationScheme := model.UTF8Validation
if sp.config.MetricNameValidationScheme == config.LegacyValidationConfig {
validationScheme = model.LegacyValidation
}
sp.targetMtx.Lock()
forcedErr := sp.refreshTargetLimitErr()
@ -369,7 +390,7 @@ func (sp *scrapePool) restartLoops(reuseCache bool) {
client: sp.client,
timeout: targetTimeout,
bodySizeLimit: bodySizeLimit,
acceptHeader: acceptHeader(sp.config.ScrapeProtocols, validationScheme),
acceptHeader: acceptHeader(sp.config.ScrapeProtocols, sp.escapingScheme),
acceptEncodingHeader: acceptEncodingHeader(enableCompression),
metrics: sp.metrics,
}
@ -388,7 +409,6 @@ func (sp *scrapePool) restartLoops(reuseCache bool) {
cache: cache,
interval: targetInterval,
timeout: targetTimeout,
validationScheme: validationScheme,
fallbackScrapeProtocol: fallbackScrapeProtocol,
alwaysScrapeClassicHist: alwaysScrapeClassicHist,
convertClassicHistToNHCB: convertClassicHistToNHCB,
@ -506,11 +526,6 @@ func (sp *scrapePool) sync(targets []*Target) {
convertClassicHistToNHCB = sp.config.ConvertClassicHistogramsToNHCB
)
validationScheme := model.UTF8Validation
if sp.config.MetricNameValidationScheme == config.LegacyValidationConfig {
validationScheme = model.LegacyValidation
}
sp.targetMtx.Lock()
for _, t := range targets {
hash := t.hash()
@ -526,7 +541,7 @@ func (sp *scrapePool) sync(targets []*Target) {
client: sp.client,
timeout: timeout,
bodySizeLimit: bodySizeLimit,
acceptHeader: acceptHeader(sp.config.ScrapeProtocols, validationScheme),
acceptHeader: acceptHeader(sp.config.ScrapeProtocols, sp.escapingScheme),
acceptEncodingHeader: acceptEncodingHeader(enableCompression),
metrics: sp.metrics,
}
@ -546,7 +561,6 @@ func (sp *scrapePool) sync(targets []*Target) {
timeout: timeout,
alwaysScrapeClassicHist: alwaysScrapeClassicHist,
convertClassicHistToNHCB: convertClassicHistToNHCB,
validationScheme: validationScheme,
fallbackScrapeProtocol: fallbackScrapeProtocol,
})
if err != nil {
@ -777,13 +791,14 @@ var errBodySizeLimit = errors.New("body size limit exceeded")
// acceptHeader transforms preference from the options into specific header values as
// https://www.rfc-editor.org/rfc/rfc9110.html#name-accept defines.
// No validation is here, we expect scrape protocols to be validated already.
func acceptHeader(sps []config.ScrapeProtocol, scheme model.ValidationScheme) string {
func acceptHeader(sps []config.ScrapeProtocol, scheme model.EscapingScheme) string {
var vals []string
weight := len(config.ScrapeProtocolsHeaders) + 1
for _, sp := range sps {
val := config.ScrapeProtocolsHeaders[sp]
if scheme == model.UTF8Validation {
val += ";" + config.UTF8NamesHeader
// Escaping header is only valid for newer versions of the text formats.
if sp == config.PrometheusText1_0_0 || sp == config.OpenMetricsText1_0_0 {
val += ";" + model.EscapingKey + "=" + scheme.String()
}
val += fmt.Sprintf(";q=0.%d", weight)
vals = append(vals, val)
@ -911,6 +926,7 @@ type scrapeLoop struct {
alwaysScrapeClassicHist bool
convertClassicHistToNHCB bool
validationScheme model.ValidationScheme
escapingScheme model.EscapingScheme
fallbackScrapeProtocol string
// Feature flagged options.
@ -1230,6 +1246,7 @@ func newScrapeLoop(ctx context.Context,
metrics *scrapeMetrics,
skipOffsetting bool,
validationScheme model.ValidationScheme,
escapingScheme model.EscapingScheme,
fallbackScrapeProtocol string,
) *scrapeLoop {
if l == nil {
@ -1284,6 +1301,7 @@ func newScrapeLoop(ctx context.Context,
metrics: metrics,
skipOffsetting: skipOffsetting,
validationScheme: validationScheme,
escapingScheme: escapingScheme,
fallbackScrapeProtocol: fallbackScrapeProtocol,
}
sl.ctx, sl.cancel = context.WithCancel(ctx)

View File

@ -83,9 +83,13 @@ func newTestScrapeMetrics(t testing.TB) *scrapeMetrics {
func TestNewScrapePool(t *testing.T) {
var (
app = &nopAppendable{}
cfg = &config.ScrapeConfig{}
sp, _ = newScrapePool(cfg, app, 0, nil, nil, &Options{}, newTestScrapeMetrics(t))
cfg = &config.ScrapeConfig{
MetricNameValidationScheme: config.UTF8ValidationConfig,
MetricNameEscapingScheme: model.AllowUTF8,
}
sp, err = newScrapePool(cfg, app, 0, nil, nil, &Options{}, newTestScrapeMetrics(t))
)
require.NoError(t, err)
a, ok := sp.appendable.(*nopAppendable)
require.True(t, ok, "Failure to append.")
@ -323,6 +327,8 @@ func TestDroppedTargetsList(t *testing.T) {
cfg = &config.ScrapeConfig{
JobName: "dropMe",
ScrapeInterval: model.Duration(1),
MetricNameValidationScheme: config.UTF8ValidationConfig,
MetricNameEscapingScheme: model.AllowUTF8,
RelabelConfigs: []*relabel.Config{
{
Action: relabel.Drop,
@ -368,6 +374,8 @@ func TestDiscoveredLabelsUpdate(t *testing.T) {
sp.config = &config.ScrapeConfig{
ScrapeInterval: model.Duration(1),
ScrapeTimeout: model.Duration(1),
MetricNameValidationScheme: config.UTF8ValidationConfig,
MetricNameEscapingScheme: model.AllowUTF8,
}
sp.activeTargets = make(map[uint64]*Target)
t1 := &Target{
@ -498,6 +506,8 @@ func TestScrapePoolReload(t *testing.T) {
reloadCfg := &config.ScrapeConfig{
ScrapeInterval: model.Duration(3 * time.Second),
ScrapeTimeout: model.Duration(2 * time.Second),
MetricNameValidationScheme: config.UTF8ValidationConfig,
MetricNameEscapingScheme: model.AllowUTF8,
}
// On starting to run, new loops created on reload check whether their preceding
// equivalents have been stopped.
@ -590,6 +600,8 @@ func TestScrapePoolReloadPreserveRelabeledIntervalTimeout(t *testing.T) {
reloadCfg := &config.ScrapeConfig{
ScrapeInterval: model.Duration(3 * time.Second),
ScrapeTimeout: model.Duration(2 * time.Second),
MetricNameValidationScheme: config.UTF8ValidationConfig,
MetricNameEscapingScheme: model.AllowUTF8,
}
newLoop := func(opts scrapeLoopOptions) loop {
l := &testLoop{interval: opts.interval, timeout: opts.timeout}
@ -689,6 +701,8 @@ func TestScrapePoolTargetLimit(t *testing.T) {
require.NoError(t, sp.reload(&config.ScrapeConfig{
ScrapeInterval: model.Duration(3 * time.Second),
ScrapeTimeout: model.Duration(2 * time.Second),
MetricNameValidationScheme: config.UTF8ValidationConfig,
MetricNameEscapingScheme: model.AllowUTF8,
TargetLimit: l,
}))
}
@ -776,7 +790,10 @@ func TestScrapePoolTargetLimit(t *testing.T) {
}
func TestScrapePoolAppender(t *testing.T) {
cfg := &config.ScrapeConfig{}
cfg := &config.ScrapeConfig{
MetricNameValidationScheme: config.UTF8ValidationConfig,
MetricNameEscapingScheme: model.AllowUTF8,
}
app := &nopAppendable{}
sp, _ := newScrapePool(cfg, app, 0, nil, nil, &Options{}, newTestScrapeMetrics(t))
@ -849,7 +866,12 @@ func TestScrapePoolRaces(t *testing.T) {
interval, _ := model.ParseDuration("1s")
timeout, _ := model.ParseDuration("500ms")
newConfig := func() *config.ScrapeConfig {
return &config.ScrapeConfig{ScrapeInterval: interval, ScrapeTimeout: timeout}
return &config.ScrapeConfig{
ScrapeInterval: interval,
ScrapeTimeout: timeout,
MetricNameValidationScheme: config.UTF8ValidationConfig,
MetricNameEscapingScheme: model.AllowUTF8,
}
}
sp, _ := newScrapePool(newConfig(), &nopAppendable{}, 0, nil, nil, &Options{}, newTestScrapeMetrics(t))
tgts := []*targetgroup.Group{
@ -921,6 +943,8 @@ func TestScrapePoolScrapeLoopsStarted(t *testing.T) {
require.NoError(t, sp.reload(&config.ScrapeConfig{
ScrapeInterval: model.Duration(3 * time.Second),
ScrapeTimeout: model.Duration(2 * time.Second),
MetricNameValidationScheme: config.UTF8ValidationConfig,
MetricNameEscapingScheme: model.AllowUTF8,
}))
sp.Sync(tgs)
@ -963,7 +987,8 @@ func newBasicScrapeLoopWithFallback(t testing.TB, ctx context.Context, scraper s
false,
newTestScrapeMetrics(t),
false,
model.LegacyValidation,
model.UTF8Validation,
model.NoEscaping,
fallback,
)
}
@ -1109,7 +1134,8 @@ func TestScrapeLoopRun(t *testing.T) {
false,
scrapeMetrics,
false,
model.LegacyValidation,
model.UTF8Validation,
model.NoEscaping,
"",
)
@ -1256,7 +1282,8 @@ func TestScrapeLoopMetadata(t *testing.T) {
false,
scrapeMetrics,
false,
model.LegacyValidation,
model.UTF8Validation,
model.NoEscaping,
"",
)
defer cancel()
@ -3041,6 +3068,47 @@ func TestScrapeLoopOutOfBoundsTimeError(t *testing.T) {
require.Equal(t, 0, seriesAdded)
}
func TestAcceptHeader(t *testing.T) {
tests := []struct {
name string
scrapeProtocols []config.ScrapeProtocol
scheme model.EscapingScheme
expectedHeader string
}{
{
name: "default scrape protocols with underscore escaping",
scrapeProtocols: config.DefaultScrapeProtocols,
scheme: model.UnderscoreEscaping,
expectedHeader: "application/openmetrics-text;version=1.0.0;escaping=underscores;q=0.6,application/openmetrics-text;version=0.0.1;q=0.5,text/plain;version=1.0.0;escaping=underscores;q=0.4,text/plain;version=0.0.4;q=0.3,*/*;q=0.2",
},
{
name: "default proto first scrape protocols with underscore escaping",
scrapeProtocols: config.DefaultProtoFirstScrapeProtocols,
scheme: model.DotsEscaping,
expectedHeader: "application/vnd.google.protobuf;proto=io.prometheus.client.MetricFamily;encoding=delimited;q=0.6,application/openmetrics-text;version=1.0.0;escaping=dots;q=0.5,application/openmetrics-text;version=0.0.1;q=0.4,text/plain;version=1.0.0;escaping=dots;q=0.3,text/plain;version=0.0.4;q=0.2,*/*;q=0.1",
},
{
name: "default scrape protocols with no escaping",
scrapeProtocols: config.DefaultScrapeProtocols,
scheme: model.NoEscaping,
expectedHeader: "application/openmetrics-text;version=1.0.0;escaping=allow-utf-8;q=0.6,application/openmetrics-text;version=0.0.1;q=0.5,text/plain;version=1.0.0;escaping=allow-utf-8;q=0.4,text/plain;version=0.0.4;q=0.3,*/*;q=0.2",
},
{
name: "default proto first scrape protocols with no escaping",
scrapeProtocols: config.DefaultProtoFirstScrapeProtocols,
scheme: model.NoEscaping,
expectedHeader: "application/vnd.google.protobuf;proto=io.prometheus.client.MetricFamily;encoding=delimited;q=0.6,application/openmetrics-text;version=1.0.0;escaping=allow-utf-8;q=0.5,application/openmetrics-text;version=0.0.1;q=0.4,text/plain;version=1.0.0;escaping=allow-utf-8;q=0.3,text/plain;version=0.0.4;q=0.2,*/*;q=0.1",
},
}
for _, tc := range tests {
t.Run(tc.name, func(t *testing.T) {
header := acceptHeader(tc.scrapeProtocols, tc.scheme)
require.Equal(t, tc.expectedHeader, header)
})
}
}
func TestTargetScraperScrapeOK(t *testing.T) {
const (
configTimeout = 1500 * time.Millisecond
@ -3124,31 +3192,31 @@ func TestTargetScraperScrapeOK(t *testing.T) {
for _, tc := range []struct {
scrapeProtocols []config.ScrapeProtocol
scheme model.ValidationScheme
scheme model.EscapingScheme
protobufParsing bool
allowUTF8 bool
}{
{
scrapeProtocols: config.DefaultScrapeProtocols,
scheme: model.LegacyValidation,
scheme: model.UnderscoreEscaping,
protobufParsing: false,
allowUTF8: false,
},
{
scrapeProtocols: config.DefaultProtoFirstScrapeProtocols,
scheme: model.LegacyValidation,
scheme: model.UnderscoreEscaping,
protobufParsing: true,
allowUTF8: false,
},
{
scrapeProtocols: config.DefaultScrapeProtocols,
scheme: model.UTF8Validation,
scheme: model.NoEscaping,
protobufParsing: false,
allowUTF8: true,
},
{
scrapeProtocols: config.DefaultProtoFirstScrapeProtocols,
scheme: model.UTF8Validation,
scheme: model.NoEscaping,
protobufParsing: true,
allowUTF8: true,
},
@ -3185,7 +3253,7 @@ func TestTargetScrapeScrapeCancel(t *testing.T) {
scrapeConfig: &config.ScrapeConfig{},
},
client: http.DefaultClient,
acceptHeader: acceptHeader(config.DefaultGlobalConfig.ScrapeProtocols, model.LegacyValidation),
acceptHeader: acceptHeader(config.DefaultGlobalConfig.ScrapeProtocols, model.UnderscoreEscaping),
}
ctx, cancel := context.WithCancel(context.Background())
@ -3241,7 +3309,7 @@ func TestTargetScrapeScrapeNotFound(t *testing.T) {
scrapeConfig: &config.ScrapeConfig{},
},
client: http.DefaultClient,
acceptHeader: acceptHeader(config.DefaultGlobalConfig.ScrapeProtocols, model.LegacyValidation),
acceptHeader: acceptHeader(config.DefaultGlobalConfig.ScrapeProtocols, model.UnderscoreEscaping),
}
resp, err := ts.scrape(context.Background())
@ -3286,7 +3354,7 @@ func TestTargetScraperBodySizeLimit(t *testing.T) {
},
client: http.DefaultClient,
bodySizeLimit: bodySizeLimit,
acceptHeader: acceptHeader(config.DefaultGlobalConfig.ScrapeProtocols, model.LegacyValidation),
acceptHeader: acceptHeader(config.DefaultGlobalConfig.ScrapeProtocols, model.UnderscoreEscaping),
metrics: newTestScrapeMetrics(t),
}
var buf bytes.Buffer
@ -3550,11 +3618,16 @@ func TestReuseScrapeCache(t *testing.T) {
ScrapeTimeout: model.Duration(5 * time.Second),
ScrapeInterval: model.Duration(5 * time.Second),
MetricsPath: "/metrics",
MetricNameValidationScheme: config.UTF8ValidationConfig,
MetricNameEscapingScheme: model.AllowUTF8,
}
sp, _ = newScrapePool(cfg, app, 0, nil, nil, &Options{}, newTestScrapeMetrics(t))
t1 = &Target{
labels: labels.FromStrings("labelNew", "nameNew", "labelNew1", "nameNew1", "labelNew2", "nameNew2"),
scrapeConfig: &config.ScrapeConfig{},
scrapeConfig: &config.ScrapeConfig{
MetricNameValidationScheme: config.UTF8ValidationConfig,
MetricNameEscapingScheme: model.AllowUTF8,
},
}
proxyURL, _ = url.Parse("http://localhost:2128")
)
@ -3572,6 +3645,8 @@ func TestReuseScrapeCache(t *testing.T) {
ScrapeInterval: model.Duration(5 * time.Second),
ScrapeTimeout: model.Duration(5 * time.Second),
MetricsPath: "/metrics",
MetricNameValidationScheme: config.UTF8ValidationConfig,
MetricNameEscapingScheme: model.AllowUTF8,
},
},
{
@ -3581,6 +3656,8 @@ func TestReuseScrapeCache(t *testing.T) {
ScrapeInterval: model.Duration(5 * time.Second),
ScrapeTimeout: model.Duration(15 * time.Second),
MetricsPath: "/metrics2",
MetricNameValidationScheme: config.UTF8ValidationConfig,
MetricNameEscapingScheme: model.AllowUTF8,
},
},
{
@ -3591,6 +3668,8 @@ func TestReuseScrapeCache(t *testing.T) {
ScrapeInterval: model.Duration(5 * time.Second),
ScrapeTimeout: model.Duration(15 * time.Second),
MetricsPath: "/metrics2",
MetricNameValidationScheme: config.UTF8ValidationConfig,
MetricNameEscapingScheme: model.AllowUTF8,
},
},
{
@ -3602,6 +3681,8 @@ func TestReuseScrapeCache(t *testing.T) {
ScrapeInterval: model.Duration(5 * time.Second),
ScrapeTimeout: model.Duration(15 * time.Second),
MetricsPath: "/metrics2",
MetricNameValidationScheme: config.UTF8ValidationConfig,
MetricNameEscapingScheme: model.AllowUTF8,
},
},
{
@ -3616,6 +3697,8 @@ func TestReuseScrapeCache(t *testing.T) {
ScrapeInterval: model.Duration(5 * time.Second),
ScrapeTimeout: model.Duration(15 * time.Second),
MetricsPath: "/metrics2",
MetricNameValidationScheme: config.UTF8ValidationConfig,
MetricNameEscapingScheme: model.AllowUTF8,
},
},
{
@ -3628,6 +3711,8 @@ func TestReuseScrapeCache(t *testing.T) {
ScrapeInterval: model.Duration(5 * time.Second),
ScrapeTimeout: model.Duration(15 * time.Second),
MetricsPath: "/metrics2",
MetricNameValidationScheme: config.UTF8ValidationConfig,
MetricNameEscapingScheme: model.AllowUTF8,
},
},
{
@ -3638,6 +3723,8 @@ func TestReuseScrapeCache(t *testing.T) {
ScrapeTimeout: model.Duration(15 * time.Second),
MetricsPath: "/metrics",
LabelLimit: 1,
MetricNameValidationScheme: config.UTF8ValidationConfig,
MetricNameEscapingScheme: model.AllowUTF8,
},
},
{
@ -3648,6 +3735,8 @@ func TestReuseScrapeCache(t *testing.T) {
ScrapeTimeout: model.Duration(15 * time.Second),
MetricsPath: "/metrics",
LabelLimit: 15,
MetricNameValidationScheme: config.UTF8ValidationConfig,
MetricNameEscapingScheme: model.AllowUTF8,
},
},
{
@ -3659,6 +3748,8 @@ func TestReuseScrapeCache(t *testing.T) {
MetricsPath: "/metrics",
LabelLimit: 15,
LabelNameLengthLimit: 5,
MetricNameValidationScheme: config.UTF8ValidationConfig,
MetricNameEscapingScheme: model.AllowUTF8,
},
},
{
@ -3671,6 +3762,8 @@ func TestReuseScrapeCache(t *testing.T) {
LabelLimit: 15,
LabelNameLengthLimit: 5,
LabelValueLengthLimit: 7,
MetricNameValidationScheme: config.UTF8ValidationConfig,
MetricNameEscapingScheme: model.AllowUTF8,
},
},
}
@ -3734,6 +3827,8 @@ func TestReuseCacheRace(t *testing.T) {
ScrapeTimeout: model.Duration(5 * time.Second),
ScrapeInterval: model.Duration(5 * time.Second),
MetricsPath: "/metrics",
MetricNameValidationScheme: config.UTF8ValidationConfig,
MetricNameEscapingScheme: model.AllowUTF8,
}
buffers = pool.New(1e3, 100e6, 3, func(sz int) interface{} { return make([]byte, 0, sz) })
sp, _ = newScrapePool(cfg, app, 0, nil, buffers, &Options{}, newTestScrapeMetrics(t))
@ -3756,6 +3851,8 @@ func TestReuseCacheRace(t *testing.T) {
ScrapeInterval: model.Duration(1 * time.Millisecond),
MetricsPath: "/metrics",
SampleLimit: i,
MetricNameValidationScheme: config.UTF8ValidationConfig,
MetricNameEscapingScheme: model.AllowUTF8,
})
}
}
@ -3832,6 +3929,8 @@ func TestScrapeReportLimit(t *testing.T) {
Scheme: "http",
ScrapeInterval: model.Duration(100 * time.Millisecond),
ScrapeTimeout: model.Duration(100 * time.Millisecond),
MetricNameValidationScheme: config.UTF8ValidationConfig,
MetricNameEscapingScheme: model.AllowUTF8,
}
ts, scrapedTwice := newScrapableServer("metric_a 44\nmetric_b 44\nmetric_c 44\nmetric_d 44\n")
@ -3887,6 +3986,7 @@ func TestScrapeUTF8(t *testing.T) {
ScrapeInterval: model.Duration(100 * time.Millisecond),
ScrapeTimeout: model.Duration(100 * time.Millisecond),
MetricNameValidationScheme: config.UTF8ValidationConfig,
MetricNameEscapingScheme: model.AllowUTF8,
}
ts, scrapedTwice := newScrapableServer("{\"with.dots\"} 42\n")
defer ts.Close()
@ -4021,6 +4121,8 @@ func TestTargetScrapeIntervalAndTimeoutRelabel(t *testing.T) {
config := &config.ScrapeConfig{
ScrapeInterval: interval,
ScrapeTimeout: timeout,
MetricNameValidationScheme: config.UTF8ValidationConfig,
MetricNameEscapingScheme: model.AllowUTF8,
RelabelConfigs: []*relabel.Config{
{
SourceLabels: model.LabelNames{model.ScrapeIntervalLabel},
@ -4081,6 +4183,8 @@ func TestLeQuantileReLabel(t *testing.T) {
Scheme: "http",
ScrapeInterval: model.Duration(100 * time.Millisecond),
ScrapeTimeout: model.Duration(100 * time.Millisecond),
MetricNameValidationScheme: config.UTF8ValidationConfig,
MetricNameEscapingScheme: model.AllowUTF8,
}
metricsText := `
@ -4586,6 +4690,8 @@ metric: <
ScrapeTimeout: model.Duration(25 * time.Millisecond),
AlwaysScrapeClassicHistograms: tc.alwaysScrapeClassicHistograms,
ConvertClassicHistogramsToNHCB: tc.convertClassicHistToNHCB,
MetricNameValidationScheme: config.UTF8ValidationConfig,
MetricNameEscapingScheme: model.AllowUTF8,
}
scrapeCount := 0
@ -4768,6 +4874,8 @@ func TestScrapeLoopCompression(t *testing.T) {
ScrapeInterval: model.Duration(100 * time.Millisecond),
ScrapeTimeout: model.Duration(100 * time.Millisecond),
EnableCompression: tc.enableCompression,
MetricNameValidationScheme: config.UTF8ValidationConfig,
MetricNameEscapingScheme: model.AllowUTF8,
}
sp, err := newScrapePool(config, simpleStorage, 0, nil, nil, &Options{}, newTestScrapeMetrics(t))
@ -4922,6 +5030,8 @@ func BenchmarkTargetScraperGzip(b *testing.B) {
model.AddressLabel, serverURL.Host,
),
scrapeConfig: &config.ScrapeConfig{
MetricNameValidationScheme: config.UTF8ValidationConfig,
MetricNameEscapingScheme: model.AllowUTF8,
Params: url.Values{"count": []string{strconv.Itoa(scenario.metricsCount)}},
},
},
@ -5176,6 +5286,8 @@ func TestTargetScrapeConfigWithLabels(t *testing.T) {
JobName: jobName,
Scheme: httpScheme,
MetricsPath: expectedPath,
MetricNameValidationScheme: config.UTF8ValidationConfig,
MetricNameEscapingScheme: model.AllowUTF8,
},
targets: []*targetgroup.Group{
{
@ -5193,6 +5305,8 @@ func TestTargetScrapeConfigWithLabels(t *testing.T) {
JobName: jobName,
Scheme: httpScheme,
MetricsPath: secondPath,
MetricNameValidationScheme: config.UTF8ValidationConfig,
MetricNameEscapingScheme: model.AllowUTF8,
Params: url.Values{"param": []string{secondParam}},
},
targets: []*targetgroup.Group{
@ -5216,6 +5330,8 @@ func TestTargetScrapeConfigWithLabels(t *testing.T) {
JobName: jobName,
Scheme: httpScheme,
MetricsPath: secondPath,
MetricNameValidationScheme: config.UTF8ValidationConfig,
MetricNameEscapingScheme: model.AllowUTF8,
Params: url.Values{"param": []string{secondParam}},
RelabelConfigs: []*relabel.Config{
{
@ -5295,6 +5411,8 @@ func TestScrapePoolScrapeAfterReload(t *testing.T) {
Scheme: "http",
ScrapeInterval: model.Duration(100 * time.Millisecond),
ScrapeTimeout: model.Duration(100 * time.Millisecond),
MetricNameValidationScheme: config.UTF8ValidationConfig,
MetricNameEscapingScheme: model.AllowUTF8,
EnableCompression: false,
ServiceDiscoveryConfigs: discovery.Configs{
&discovery.StaticConfig{