diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index f280e9ad78..cbbef8b699 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -11,7 +11,7 @@ jobs: container: # Whenever the Go version is updated here, .promu.yml # should also be updated. - image: quay.io/prometheus/golang-builder:1.23-base + image: quay.io/prometheus/golang-builder:1.24-base steps: - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 - uses: prometheus/promci@443c7fc2397e946bc9f5029e313a9c3441b9b86d # v0.4.7 @@ -27,7 +27,7 @@ jobs: name: More Go tests runs-on: ubuntu-latest container: - image: quay.io/prometheus/golang-builder:1.23-base + image: quay.io/prometheus/golang-builder:1.24-base steps: - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 - uses: prometheus/promci@443c7fc2397e946bc9f5029e313a9c3441b9b86d # v0.4.7 @@ -59,7 +59,7 @@ jobs: # Whenever the Go version is updated here, .promu.yml # should also be updated. container: - image: quay.io/prometheus/golang-builder:1.23-base + image: quay.io/prometheus/golang-builder:1.24-base steps: - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 @@ -94,7 +94,7 @@ jobs: # Whenever the Go version is updated here, .promu.yml # should also be updated. container: - image: quay.io/prometheus/golang-builder:1.23-base + image: quay.io/prometheus/golang-builder:1.24-base steps: - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 - run: go install ./cmd/promtool/. diff --git a/.github/workflows/codeql-analysis.yml b/.github/workflows/codeql-analysis.yml index e8a5c1bab4..a67dea0320 100644 --- a/.github/workflows/codeql-analysis.yml +++ b/.github/workflows/codeql-analysis.yml @@ -27,12 +27,12 @@ jobs: uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 - name: Initialize CodeQL - uses: github/codeql-action/init@5f8171a638ada777af81d42b55959a643bb29017 # v3.28.12 + uses: github/codeql-action/init@1b549b9259bda1cb5ddde3b41741a82a2d15a841 # v3.28.13 with: languages: ${{ matrix.language }} - name: Autobuild - uses: github/codeql-action/autobuild@5f8171a638ada777af81d42b55959a643bb29017 # v3.28.12 + uses: github/codeql-action/autobuild@1b549b9259bda1cb5ddde3b41741a82a2d15a841 # v3.28.13 - name: Perform CodeQL Analysis - uses: github/codeql-action/analyze@5f8171a638ada777af81d42b55959a643bb29017 # v3.28.12 + uses: github/codeql-action/analyze@1b549b9259bda1cb5ddde3b41741a82a2d15a841 # v3.28.13 diff --git a/.github/workflows/scorecards.yml b/.github/workflows/scorecards.yml index 5c8e32772f..75db65efe0 100644 --- a/.github/workflows/scorecards.yml +++ b/.github/workflows/scorecards.yml @@ -45,6 +45,6 @@ jobs: # Upload the results to GitHub's code scanning dashboard. - name: "Upload to code-scanning" - uses: github/codeql-action/upload-sarif@5f8171a638ada777af81d42b55959a643bb29017 # tag=v3.28.12 + uses: github/codeql-action/upload-sarif@1b549b9259bda1cb5ddde3b41741a82a2d15a841 # tag=v3.28.13 with: sarif_file: results.sarif diff --git a/CHANGELOG.md b/CHANGELOG.md index a9072422a2..7354cc3956 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -4,9 +4,44 @@ * [CHANGE] Make setting out-of-order native histograms feature (`--enable-feature=ooo-native-histograms`) a no-op. Out-of-order native histograms are now always enabled when `out_of_order_time_window` is greater than zero and `--enable-feature=native-histograms` is set. #16207 * [FEATURE] OTLP translate: Add feature flag for optionally translating OTel explicit bucket histograms into native histograms with custom buckets. #15850 +* [FEATURE] OTLP translate: Add option to receive OTLP metrics without translating names or attributes. #16441 * [ENHANCEMENT] TSDB: add `prometheus_tsdb_wal_replay_unknown_refs_total` and `prometheus_tsdb_wbl_replay_unknown_refs_total` metrics to track unknown series references during WAL/WBL replay. #16166 * [BUGFIX] TSDB: fix unknown series errors and possible lost data during WAL replay when series are removed from the head due to inactivity and reappear before the next WAL checkpoint. #16060 +## 3.3.0 / 2025-04-15 + +* [FEATURE] PromQL: Implement `idelta()` and `irate()` for native histograms. #15853 +* [ENHANCEMENT] Scaleway SD: Add `__meta_scaleway_instance_public_ipv4_addresses` and `__meta_scaleway_instance_public_ipv6_addresses` labels. #14228 +* [ENHANCEMENT] TSDB: Reduce locking while reloading blocks. #12920 +* [ENHANCEMENT] PromQL: Allow UTF-8 labels in `label_replace()`. #15974 +* [ENHANCEMENT] Promtool: `tsdb create-blocks-from openmetrics` can now read from a Pipe. #16011 +* [ENHANCEMENT] Rules: Add support for anchors and aliases in rule files. #14957 +* [ENHANCEMENT] Dockerfile: Make `/prometheus` writable. #16073 +* [ENHANCEMENT] API: Include scrape pool name for dropped targets in `/api/v1/targets`. #16085 +* [ENHANCEMENT] UI: Improve time formatting and copying of selectors. #15999 #16165 +* [ENHANCEMENT] UI: Bring back vertical grid lines and graph legend series toggling instructions. #16163 #16164 +* [ENHANCEMENT] Mixin: The `cluster` label can be customized using `clusterLabel`. #15826 +* [PERF] TSDB: Optimize some operations on head chunks by taking shortcuts. #12659 +* [PERF] TSDB & Agent: Reduce memory footprint during WL replay. #15778 +* [PERF] Remote-Write: Reduce memory footprint during WAL replay. #16197 +* [PERF] API: Reduce memory footprint during header parsing. #16001 +* [PERF] Rules: Improve dependency evaluation, enabling better concurrency. #16039 +* [PERF] Scraping: Improve scraping performance for native histograms. #15731 +* [PERF] Scraping: Improve parsing of created timestamps. #16072 +* [BUGFIX] Scraping: Bump cache iteration after error to avoid false duplicate detections. #16174 +* [BUGFIX] Scraping: Skip native histograms series when ingestion is disabled. #16218 +* [BUGFIX] PromQL: Fix counter reset detection for native histograms. #15902 #15987 +* [BUGFIX] PromQL: Fix inconsistent behavior with an empty range. #15970 +* [BUGFIX] PromQL: Fix inconsistent annotation in `quantile_over_time()`. #16018 +* [BUGFIX] PromQL: Prevent `label_join()` from producing duplicates. #15975 +* [BUGFIX] PromQL: Ignore native histograms in `scalar()`, `sort()` and `sort_desc()`. #15964 +* [BUGFIX] PromQL: Fix annotations for binary operations between incompatible native histograms. #15895 +* [BUGFIX] Alerting: Consider alert relabeling when deciding whether alerts are dropped. #15979 +* [BUGFIX] Config: Set `GoGC` to the default value in case of an empty configuration. #16052 +* [BUGFIX] TSDB: Fix unknown series errors and potential data loss during WAL replay when inactive series are removed from the head and reappear before the next WAL checkpoint. #16060 +* [BUGFIX] Scaleway SD: The public IP will no longer be set to `__meta_meta_scaleway_instance_public_ipv4` if it is an IPv6 address. #14228 +* [BUGFIX] UI: Display the correct value of Alerting rules' `keep_firing_for`. #16211 + ## 3.2.1 / 2025-02-25 * [BUGFIX] Don't send Accept` header `escape=allow-utf-8` when `metric_name_validation_scheme: legacy` is configured. #16061 diff --git a/MAINTAINERS.md b/MAINTAINERS.md index de3f3c73b7..e3312b3129 100644 --- a/MAINTAINERS.md +++ b/MAINTAINERS.md @@ -9,6 +9,7 @@ Maintainers for specific parts of the codebase: * `cmd` * `promtool`: David Leadbeater ( / @dgl) * `discovery` + * `azure`: Jan-Otto Kröpke ( / @jkroepke) * `k8s`: Frederic Branczyk ( / @brancz) * `documentation` * `prometheus-mixin`: Matthias Loibl ( / @metalmatze) diff --git a/RELEASE.md b/RELEASE.md index fa4c193a52..34890c0d29 100644 --- a/RELEASE.md +++ b/RELEASE.md @@ -7,13 +7,15 @@ This page describes the release process and the currently planned schedule for u Release cadence of first pre-releases being cut is 6 weeks. Please see [the v2.55 RELEASE.md](https://github.com/prometheus/prometheus/blob/release-2.55/RELEASE.md) for the v2 release series schedule. -| release series | date of first pre-release (year-month-day) | release shepherd | -|----------------|--------------------------------------------|-----------------------------------| -| v3.0 | 2024-11-14 | Jan Fajerski (GitHub: @jan--f) | -| v3.1 | 2024-12-17 | Bryan Boreham (GitHub: @bboreham) | -| v3.2 | 2025-01-28 | Jan Fajerski (GitHub: @jan--f) | -| v3.3 | 2025-03-11 | Ayoub Mrini (Github: @machine424) | -| v3.4 | 2025-04-22 | **volunteer welcome** | +| release series | date of first pre-release (year-month-day) | release shepherd | +|----------------|--------------------------------------------|------------------------------------| +| v3.0 | 2024-11-14 | Jan Fajerski (GitHub: @jan--f) | +| v3.1 | 2024-12-17 | Bryan Boreham (GitHub: @bboreham) | +| v3.2 | 2025-01-28 | Jan Fajerski (GitHub: @jan--f) | +| v3.3 | 2025-03-11 | Ayoub Mrini (Github: @machine424) | +| v3.4 | 2025-04-22 | Jan-Otto Kröpke (Github: @jkroepke)| +| v3.5 LTS | 2025-06-03 | Bryan Boreham (GitHub: @bboreham) | +| v3.6 | 2025-07-15 | **volunteer welcome** | If you are interested in volunteering please create a pull request against the [prometheus/prometheus](https://github.com/prometheus/prometheus) repository and propose yourself for the release series of your choice. diff --git a/VERSION b/VERSION index e4604e3afd..15a2799817 100644 --- a/VERSION +++ b/VERSION @@ -1 +1 @@ -3.2.1 +3.3.0 diff --git a/cmd/prometheus/main.go b/cmd/prometheus/main.go index e513926da6..0e547deaf9 100644 --- a/cmd/prometheus/main.go +++ b/cmd/prometheus/main.go @@ -249,6 +249,9 @@ func (c *flagConfig) setFeatureListOptions(logger *slog.Logger) error { case "promql-experimental-functions": parser.EnableExperimentalFunctions = true logger.Info("Experimental PromQL functions enabled.") + case "promql-duration-expr": + parser.ExperimentalDurationExpr = true + logger.Info("Experimental duration expression parsing enabled.") case "native-histograms": c.tsdb.EnableNativeHistograms = true c.scrape.EnableNativeHistogramsIngestion = true @@ -279,12 +282,23 @@ func (c *flagConfig) setFeatureListOptions(logger *slog.Logger) error { case "otlp-deltatocumulative": c.web.ConvertOTLPDelta = true logger.Info("Converting delta OTLP metrics to cumulative") + case "otlp-native-delta-ingestion": + // Experimental OTLP native delta ingestion. + // This currently just stores the raw delta value as-is with unknown metric type. Better typing and + // type-aware functions may come later. + // See proposal: https://github.com/prometheus/proposals/pull/48 + c.web.NativeOTLPDeltaIngestion = true + logger.Info("Enabling native ingestion of delta OTLP metrics, storing the raw sample values without conversion. WARNING: Delta support is in an early stage of development. The ingestion and querying process is likely to change over time.") default: logger.Warn("Unknown option for --enable-feature", "option", o) } } } + if c.web.ConvertOTLPDelta && c.web.NativeOTLPDeltaIngestion { + return errors.New("cannot enable otlp-deltatocumulative and otlp-native-delta-ingestion features at the same time") + } + return nil } @@ -539,7 +553,7 @@ func main() { a.Flag("scrape.discovery-reload-interval", "Interval used by scrape manager to throttle target groups updates."). Hidden().Default("5s").SetValue(&cfg.scrape.DiscoveryReloadInterval) - a.Flag("enable-feature", "Comma separated feature names to enable. Valid options: exemplar-storage, expand-external-labels, memory-snapshot-on-shutdown, promql-per-step-stats, promql-experimental-functions, extra-scrape-metrics, auto-gomaxprocs, native-histograms, created-timestamp-zero-ingestion, concurrent-rule-eval, delayed-compaction, old-ui, otlp-deltatocumulative. See https://prometheus.io/docs/prometheus/latest/feature_flags/ for more details."). + a.Flag("enable-feature", "Comma separated feature names to enable. Valid options: exemplar-storage, expand-external-labels, memory-snapshot-on-shutdown, promql-per-step-stats, promql-experimental-functions, extra-scrape-metrics, auto-gomaxprocs, native-histograms, created-timestamp-zero-ingestion, concurrent-rule-eval, delayed-compaction, old-ui, otlp-deltatocumulative, promql-duration-expr. See https://prometheus.io/docs/prometheus/latest/feature_flags/ for more details."). Default("").StringsVar(&cfg.featureList) a.Flag("agent", "Run Prometheus in 'Agent mode'.").BoolVar(&agentMode) @@ -637,6 +651,32 @@ func main() { cfg.tsdb.OutOfOrderTimeWindow = cfgFile.StorageConfig.TSDBConfig.OutOfOrderTimeWindow } + // Set Go runtime parameters before we get too far into initialization. + updateGoGC(cfgFile, logger) + if cfg.maxprocsEnable { + l := func(format string, a ...interface{}) { + logger.Info(fmt.Sprintf(strings.TrimPrefix(format, "maxprocs: "), a...), "component", "automaxprocs") + } + if _, err := maxprocs.Set(maxprocs.Logger(l)); err != nil { + logger.Warn("Failed to set GOMAXPROCS automatically", "component", "automaxprocs", "err", err) + } + } + + if cfg.memlimitEnable { + if _, err := memlimit.SetGoMemLimitWithOpts( + memlimit.WithRatio(cfg.memlimitRatio), + memlimit.WithProvider( + memlimit.ApplyFallback( + memlimit.FromCgroup, + memlimit.FromSystem, + ), + ), + memlimit.WithLogger(logger.With("component", "automemlimit")), + ); err != nil { + logger.Warn("automemlimit", "msg", "Failed to set GOMEMLIMIT automatically", "err", err) + } + } + // Now that the validity of the config is established, set the config // success metrics accordingly, although the config isn't really loaded // yet. This will happen later (including setting these metrics again), @@ -787,29 +827,6 @@ func main() { ruleManager *rules.Manager ) - if cfg.maxprocsEnable { - l := func(format string, a ...interface{}) { - logger.Info(fmt.Sprintf(strings.TrimPrefix(format, "maxprocs: "), a...), "component", "automaxprocs") - } - if _, err := maxprocs.Set(maxprocs.Logger(l)); err != nil { - logger.Warn("Failed to set GOMAXPROCS automatically", "component", "automaxprocs", "err", err) - } - } - - if cfg.memlimitEnable { - if _, err := memlimit.SetGoMemLimitWithOpts( - memlimit.WithRatio(cfg.memlimitRatio), - memlimit.WithProvider( - memlimit.ApplyFallback( - memlimit.FromCgroup, - memlimit.FromSystem, - ), - ), - ); err != nil { - logger.Warn("automemlimit", "msg", "Failed to set GOMEMLIMIT automatically", "err", err) - } - } - if !agentMode { opts := promql.EngineOpts{ Logger: logger.With("component", "query engine"), @@ -1495,6 +1512,14 @@ func reloadConfig(filename string, enableExemplarStorage bool, logger *slog.Logg return fmt.Errorf("one or more errors occurred while applying the new configuration (--config.file=%q)", filename) } + updateGoGC(conf, logger) + + noStepSuqueryInterval.Set(conf.GlobalConfig.EvaluationInterval) + timingsLogger.Info("Completed loading of configuration file", "filename", filename, "totalDuration", time.Since(start)) + return nil +} + +func updateGoGC(conf *config.Config, logger *slog.Logger) { oldGoGC := debug.SetGCPercent(conf.Runtime.GoGC) if oldGoGC != conf.Runtime.GoGC { logger.Info("updated GOGC", "old", oldGoGC, "new", conf.Runtime.GoGC) @@ -1505,10 +1530,6 @@ func reloadConfig(filename string, enableExemplarStorage bool, logger *slog.Logg } else { os.Setenv("GOGC", "off") } - - noStepSuqueryInterval.Set(conf.GlobalConfig.EvaluationInterval) - timingsLogger.Info("Completed loading of configuration file", "filename", filename, "totalDuration", time.Since(start)) - return nil } func startsOrEndsWithQuote(s string) bool { diff --git a/cmd/promtool/main.go b/cmd/promtool/main.go index 5fa4de44ed..c6a5801d28 100644 --- a/cmd/promtool/main.go +++ b/cmd/promtool/main.go @@ -61,6 +61,8 @@ import ( "github.com/prometheus/prometheus/util/documentcli" ) +var promqlEnableDelayedNameRemoval = false + func init() { // This can be removed when the legacy global mode is fully deprecated. //nolint:staticcheck @@ -255,15 +257,15 @@ func main() { tsdbDumpCmd := tsdbCmd.Command("dump", "Dump samples from a TSDB.") dumpPath := tsdbDumpCmd.Arg("db path", "Database path (default is "+defaultDBPath+").").Default(defaultDBPath).String() dumpSandboxDirRoot := tsdbDumpCmd.Flag("sandbox-dir-root", "Root directory where a sandbox directory will be created, this sandbox is used in case WAL replay generates chunks (default is the database path). The sandbox is cleaned up at the end.").String() - dumpMinTime := tsdbDumpCmd.Flag("min-time", "Minimum timestamp to dump.").Default(strconv.FormatInt(math.MinInt64, 10)).Int64() - dumpMaxTime := tsdbDumpCmd.Flag("max-time", "Maximum timestamp to dump.").Default(strconv.FormatInt(math.MaxInt64, 10)).Int64() + dumpMinTime := tsdbDumpCmd.Flag("min-time", "Minimum timestamp to dump, in milliseconds since the Unix epoch.").Default(strconv.FormatInt(math.MinInt64, 10)).Int64() + dumpMaxTime := tsdbDumpCmd.Flag("max-time", "Maximum timestamp to dump, in milliseconds since the Unix epoch.").Default(strconv.FormatInt(math.MaxInt64, 10)).Int64() dumpMatch := tsdbDumpCmd.Flag("match", "Series selector. Can be specified multiple times.").Default("{__name__=~'(?s:.*)'}").Strings() tsdbDumpOpenMetricsCmd := tsdbCmd.Command("dump-openmetrics", "[Experimental] Dump samples from a TSDB into OpenMetrics text format, excluding native histograms and staleness markers, which are not representable in OpenMetrics.") dumpOpenMetricsPath := tsdbDumpOpenMetricsCmd.Arg("db path", "Database path (default is "+defaultDBPath+").").Default(defaultDBPath).String() dumpOpenMetricsSandboxDirRoot := tsdbDumpOpenMetricsCmd.Flag("sandbox-dir-root", "Root directory where a sandbox directory will be created, this sandbox is used in case WAL replay generates chunks (default is the database path). The sandbox is cleaned up at the end.").String() - dumpOpenMetricsMinTime := tsdbDumpOpenMetricsCmd.Flag("min-time", "Minimum timestamp to dump.").Default(strconv.FormatInt(math.MinInt64, 10)).Int64() - dumpOpenMetricsMaxTime := tsdbDumpOpenMetricsCmd.Flag("max-time", "Maximum timestamp to dump.").Default(strconv.FormatInt(math.MaxInt64, 10)).Int64() + dumpOpenMetricsMinTime := tsdbDumpOpenMetricsCmd.Flag("min-time", "Minimum timestamp to dump, in milliseconds since the Unix epoch.").Default(strconv.FormatInt(math.MinInt64, 10)).Int64() + dumpOpenMetricsMaxTime := tsdbDumpOpenMetricsCmd.Flag("max-time", "Maximum timestamp to dump, in milliseconds since the Unix epoch.").Default(strconv.FormatInt(math.MaxInt64, 10)).Int64() dumpOpenMetricsMatch := tsdbDumpOpenMetricsCmd.Flag("match", "Series selector. Can be specified multiple times.").Default("{__name__=~'(?s:.*)'}").Strings() importCmd := tsdbCmd.Command("create-blocks-from", "[Experimental] Import samples from input and produce TSDB blocks. Please refer to the storage docs for more details.") @@ -304,7 +306,7 @@ func main() { promQLLabelsDeleteQuery := promQLLabelsDeleteCmd.Arg("query", "PromQL query.").Required().String() promQLLabelsDeleteName := promQLLabelsDeleteCmd.Arg("name", "Name of the label to delete.").Required().String() - featureList := app.Flag("enable-feature", "Comma separated feature names to enable. Currently unused.").Default("").Strings() + featureList := app.Flag("enable-feature", "Comma separated feature names to enable. Valid options: promql-experimental-functions, promql-delayed-name-removal. See https://prometheus.io/docs/prometheus/latest/feature_flags/ for more details").Default("").Strings() documentationCmd := app.Command("write-documentation", "Generate command line documentation. Internal use.").Hidden() @@ -338,10 +340,14 @@ func main() { opts := strings.Split(f, ",") for _, o := range opts { switch o { + case "promql-experimental-functions": + parser.EnableExperimentalFunctions = true + case "promql-delayed-name-removal": + promqlEnableDelayedNameRemoval = true case "": continue default: - fmt.Printf(" WARNING: --enable-feature is currently a no-op") + fmt.Printf(" WARNING: Unknown feature passed to --enable-feature: %s", o) } } } @@ -399,8 +405,9 @@ func main() { } os.Exit(RulesUnitTestResult(results, promqltest.LazyLoaderOpts{ - EnableAtModifier: true, - EnableNegativeOffset: true, + EnableAtModifier: true, + EnableNegativeOffset: true, + EnableDelayedNameRemoval: promqlEnableDelayedNameRemoval, }, *testRulesRun, *testRulesDiff, diff --git a/cmd/promtool/main_test.go b/cmd/promtool/main_test.go index d3804da5e2..d1390f0d67 100644 --- a/cmd/promtool/main_test.go +++ b/cmd/promtool/main_test.go @@ -558,6 +558,16 @@ func TestCheckRules(t *testing.T) { }) } +func TestCheckRulesWithFeatureFlag(t *testing.T) { + // As opposed to TestCheckRules calling CheckRules directly we run promtool + // so the feature flag parsing can be tested. + + args := []string{"-test.main", "--enable-feature=promql-experimental-functions", "check", "rules", "testdata/features.yml"} + tool := exec.Command(promtoolPath, args...) + err := tool.Run() + require.NoError(t, err) +} + func TestCheckRulesWithRuleFiles(t *testing.T) { t.Run("rules-good", func(t *testing.T) { t.Parallel() diff --git a/cmd/promtool/testdata/features.yml b/cmd/promtool/testdata/features.yml new file mode 100644 index 0000000000..769f8362bf --- /dev/null +++ b/cmd/promtool/testdata/features.yml @@ -0,0 +1,6 @@ +groups: + - name: features + rules: + - record: x + # We don't expect anything from this, just want to check the function parses. + expr: sort_by_label(up, "instance") diff --git a/cmd/promtool/testdata/rules_run_fuzzy.yml b/cmd/promtool/testdata/rules_run_fuzzy.yml new file mode 100644 index 0000000000..3bf4e47a45 --- /dev/null +++ b/cmd/promtool/testdata/rules_run_fuzzy.yml @@ -0,0 +1,43 @@ +# Minimal test case to see that fuzzy compare is working as expected. +# It should allow slight floating point differences through. Larger +# floating point differences should still fail. + +evaluation_interval: 1m +fuzzy_compare: true + +tests: + - name: correct fuzzy match + input_series: + - series: test_low + values: 2.9999999999999996 + - series: test_high + values: 3.0000000000000004 + promql_expr_test: + - expr: test_low + eval_time: 0 + exp_samples: + - labels: test_low + value: 3 + - expr: test_high + eval_time: 0 + exp_samples: + - labels: test_high + value: 3 + + - name: wrong fuzzy match + input_series: + - series: test_low + values: 2.9999999999999987 + - series: test_high + values: 3.0000000000000013 + promql_expr_test: + - expr: test_low + eval_time: 0 + exp_samples: + - labels: test_low + value: 3 + - expr: test_high + eval_time: 0 + exp_samples: + - labels: test_high + value: 3 diff --git a/cmd/promtool/testdata/rules_run_no_fuzzy.yml b/cmd/promtool/testdata/rules_run_no_fuzzy.yml new file mode 100644 index 0000000000..eba201a28c --- /dev/null +++ b/cmd/promtool/testdata/rules_run_no_fuzzy.yml @@ -0,0 +1,24 @@ +# Minimal test case to see that fuzzy compare can be turned off, +# and slight floating point differences fail matching. + +evaluation_interval: 1m +fuzzy_compare: false + +tests: + - name: correct fuzzy match + input_series: + - series: test_low + values: 2.9999999999999996 + - series: test_high + values: 3.0000000000000004 + promql_expr_test: + - expr: test_low + eval_time: 0 + exp_samples: + - labels: test_low + value: 3 + - expr: test_high + eval_time: 0 + exp_samples: + - labels: test_high + value: 3 diff --git a/cmd/promtool/unittest.go b/cmd/promtool/unittest.go index 7a97a466a6..9bc1af1f61 100644 --- a/cmd/promtool/unittest.go +++ b/cmd/promtool/unittest.go @@ -19,6 +19,7 @@ import ( "errors" "fmt" "io" + "math" "os" "path/filepath" "sort" @@ -130,7 +131,7 @@ func ruleUnitTest(filename string, queryOpts promqltest.LazyLoaderOpts, run *reg if t.Interval == 0 { t.Interval = unitTestInp.EvaluationInterval } - ers := t.test(testname, evalInterval, groupOrderMap, queryOpts, diffFlag, debug, ignoreUnknownFields, unitTestInp.RuleFiles...) + ers := t.test(testname, evalInterval, groupOrderMap, queryOpts, diffFlag, debug, ignoreUnknownFields, unitTestInp.FuzzyCompare, unitTestInp.RuleFiles...) if ers != nil { for _, e := range ers { tc.Fail(e.Error()) @@ -159,6 +160,7 @@ type unitTestFile struct { EvaluationInterval model.Duration `yaml:"evaluation_interval,omitempty"` GroupEvalOrder []string `yaml:"group_eval_order"` Tests []testGroup `yaml:"tests"` + FuzzyCompare bool `yaml:"fuzzy_compare,omitempty"` } // resolveAndGlobFilepaths joins all relative paths in a configuration @@ -197,7 +199,7 @@ type testGroup struct { } // test performs the unit tests. -func (tg *testGroup) test(testname string, evalInterval time.Duration, groupOrderMap map[string]int, queryOpts promqltest.LazyLoaderOpts, diffFlag, debug, ignoreUnknownFields bool, ruleFiles ...string) (outErr []error) { +func (tg *testGroup) test(testname string, evalInterval time.Duration, groupOrderMap map[string]int, queryOpts promqltest.LazyLoaderOpts, diffFlag, debug, ignoreUnknownFields, fuzzyCompare bool, ruleFiles ...string) (outErr []error) { if debug { testStart := time.Now() fmt.Printf("DEBUG: Starting test %s\n", testname) @@ -237,6 +239,14 @@ func (tg *testGroup) test(testname string, evalInterval time.Duration, groupOrde mint := time.Unix(0, 0).UTC() maxt := mint.Add(tg.maxEvalTime()) + // Optional floating point compare fuzzing. + var compareFloat64 cmp.Option = cmp.Options{} + if fuzzyCompare { + compareFloat64 = cmp.Comparer(func(x, y float64) bool { + return x == y || math.Nextafter(x, math.Inf(-1)) == y || math.Nextafter(x, math.Inf(1)) == y + }) + } + // Pre-processing some data for testing alerts. // All this preparation is so that we can test alerts as we evaluate the rules. // This avoids storing them in memory, as the number of evals might be high. @@ -374,7 +384,7 @@ func (tg *testGroup) test(testname string, evalInterval time.Duration, groupOrde sort.Sort(gotAlerts) sort.Sort(expAlerts) - if !cmp.Equal(expAlerts, gotAlerts, cmp.Comparer(labels.Equal)) { + if !cmp.Equal(expAlerts, gotAlerts, cmp.Comparer(labels.Equal), compareFloat64) { var testName string if tg.TestGroupName != "" { testName = fmt.Sprintf(" name: %s,\n", tg.TestGroupName) @@ -482,7 +492,7 @@ Outer: sort.Slice(gotSamples, func(i, j int) bool { return labels.Compare(gotSamples[i].Labels, gotSamples[j].Labels) <= 0 }) - if !cmp.Equal(expSamples, gotSamples, cmp.Comparer(labels.Equal)) { + if !cmp.Equal(expSamples, gotSamples, cmp.Comparer(labels.Equal), compareFloat64) { errs = append(errs, fmt.Errorf(" expr: %q, time: %s,\n exp: %v\n got: %v", testCase.Expr, testCase.EvalTime.String(), parsedSamplesString(expSamples), parsedSamplesString(gotSamples))) } diff --git a/cmd/promtool/unittest_test.go b/cmd/promtool/unittest_test.go index 7466b222ca..566e0acbc6 100644 --- a/cmd/promtool/unittest_test.go +++ b/cmd/promtool/unittest_test.go @@ -240,6 +240,29 @@ func TestRulesUnitTestRun(t *testing.T) { ignoreUnknownFields: true, want: 0, }, + { + name: "Test precise floating point comparison expected failure", + args: args{ + files: []string{"./testdata/rules_run_no_fuzzy.yml"}, + }, + want: 1, + }, + { + name: "Test fuzzy floating point comparison correct match", + args: args{ + run: []string{"correct"}, + files: []string{"./testdata/rules_run_fuzzy.yml"}, + }, + want: 0, + }, + { + name: "Test fuzzy floating point comparison wrong match", + args: args{ + run: []string{"wrong"}, + files: []string{"./testdata/rules_run_fuzzy.yml"}, + }, + want: 1, + }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { diff --git a/config/config.go b/config/config.go index 09c79b3501..f140044baa 100644 --- a/config/config.go +++ b/config/config.go @@ -110,9 +110,9 @@ func Load(s string, logger *slog.Logger) (*Config, error) { switch cfg.OTLPConfig.TranslationStrategy { case UnderscoreEscapingWithSuffixes: case "": - case NoUTF8EscapingWithSuffixes: + case NoTranslation, NoUTF8EscapingWithSuffixes: if cfg.GlobalConfig.MetricNameValidationScheme == LegacyValidationConfig { - return nil, errors.New("OTLP translation strategy NoUTF8EscapingWithSuffixes is not allowed when UTF8 is disabled") + return nil, fmt.Errorf("OTLP translation strategy %q is not allowed when UTF8 is disabled", cfg.OTLPConfig.TranslationStrategy) } default: return nil, fmt.Errorf("unsupported OTLP translation strategy %q", cfg.OTLPConfig.TranslationStrategy) @@ -1509,6 +1509,21 @@ var ( // and label name characters that are not alphanumerics/underscores to underscores. // Unit and type suffixes may be appended to metric names, according to certain rules. UnderscoreEscapingWithSuffixes translationStrategyOption = "UnderscoreEscapingWithSuffixes" + // NoTranslation (EXPERIMENTAL): disables all translation of incoming metric + // and label names. This offers a way for the OTLP users to use native metric names, reducing confusion. + // + // WARNING: This setting has significant known risks and limitations (see + // https://prometheus.io/docs/practices/naming/ for details): + // * Impaired UX when using PromQL in plain YAML (e.g. alerts, rules, dashboard, autoscaling configuration). + // * Series collisions which in the best case may result in OOO errors, in the worst case a silently malformed + // time series. For instance, you may end up in situation of ingesting `foo.bar` series with unit + // `seconds` and a separate series `foo.bar` with unit `milliseconds`. + // + // As a result, this setting is experimental and currently, should not be used in + // production systems. + // + // TODO(ArthurSens): Mention `type-and-unit-labels` feature (https://github.com/prometheus/proposals/pull/39) once released, as potential mitigation of the above risks. + NoTranslation translationStrategyOption = "NoTranslation" ) // OTLPConfig is the configuration for writing to the OTLP endpoint. diff --git a/config/config_test.go b/config/config_test.go index 6d59c7220d..236b062898 100644 --- a/config/config_test.go +++ b/config/config_test.go @@ -1677,7 +1677,7 @@ func TestOTLPConvertHistogramsToNHCB(t *testing.T) { } func TestOTLPAllowUTF8(t *testing.T) { - t.Run("good config", func(t *testing.T) { + t.Run("good config - NoUTF8EscapingWithSuffixes", func(t *testing.T) { fpath := filepath.Join("testdata", "otlp_allow_utf8.good.yml") verify := func(t *testing.T, conf *Config, err error) { t.Helper() @@ -1697,11 +1697,51 @@ func TestOTLPAllowUTF8(t *testing.T) { }) }) - t.Run("incompatible config", func(t *testing.T) { + t.Run("incompatible config - NoUTF8EscapingWithSuffixes", func(t *testing.T) { fpath := filepath.Join("testdata", "otlp_allow_utf8.incompatible.yml") verify := func(t *testing.T, err error) { t.Helper() - require.ErrorContains(t, err, `OTLP translation strategy NoUTF8EscapingWithSuffixes is not allowed when UTF8 is disabled`) + require.ErrorContains(t, err, `OTLP translation strategy "NoUTF8EscapingWithSuffixes" is not allowed when UTF8 is disabled`) + } + + t.Run("LoadFile", func(t *testing.T) { + _, err := LoadFile(fpath, false, promslog.NewNopLogger()) + verify(t, err) + }) + t.Run("Load", func(t *testing.T) { + content, err := os.ReadFile(fpath) + require.NoError(t, err) + _, err = Load(string(content), promslog.NewNopLogger()) + t.Log("err", err) + verify(t, err) + }) + }) + + t.Run("good config - NoTranslation", func(t *testing.T) { + fpath := filepath.Join("testdata", "otlp_no_translation.good.yml") + verify := func(t *testing.T, conf *Config, err error) { + t.Helper() + require.NoError(t, err) + require.Equal(t, NoTranslation, conf.OTLPConfig.TranslationStrategy) + } + + t.Run("LoadFile", func(t *testing.T) { + conf, err := LoadFile(fpath, false, promslog.NewNopLogger()) + verify(t, conf, err) + }) + t.Run("Load", func(t *testing.T) { + content, err := os.ReadFile(fpath) + require.NoError(t, err) + conf, err := Load(string(content), promslog.NewNopLogger()) + verify(t, conf, err) + }) + }) + + t.Run("incompatible config - NoTranslation", func(t *testing.T) { + fpath := filepath.Join("testdata", "otlp_no_translation.incompatible.yml") + verify := func(t *testing.T, err error) { + t.Helper() + require.ErrorContains(t, err, `OTLP translation strategy "NoTranslation" is not allowed when UTF8 is disabled`) } t.Run("LoadFile", func(t *testing.T) { diff --git a/config/testdata/otlp_no_translation.good.yml b/config/testdata/otlp_no_translation.good.yml new file mode 100644 index 0000000000..e5c4460842 --- /dev/null +++ b/config/testdata/otlp_no_translation.good.yml @@ -0,0 +1,2 @@ +otlp: + translation_strategy: NoTranslation diff --git a/config/testdata/otlp_no_translation.incompatible.yml b/config/testdata/otlp_no_translation.incompatible.yml new file mode 100644 index 0000000000..33c5a756f5 --- /dev/null +++ b/config/testdata/otlp_no_translation.incompatible.yml @@ -0,0 +1,4 @@ +global: + metric_name_validation_scheme: legacy +otlp: + translation_strategy: NoTranslation diff --git a/discovery/hetzner/hcloud.go b/discovery/hetzner/hcloud.go index ba64250c0f..88fe09bd3e 100644 --- a/discovery/hetzner/hcloud.go +++ b/discovery/hetzner/hcloud.go @@ -53,14 +53,16 @@ const ( // the Discoverer interface. type hcloudDiscovery struct { *refresh.Discovery - client *hcloud.Client - port int + client *hcloud.Client + port int + labelSelector string } // newHcloudDiscovery returns a new hcloudDiscovery which periodically refreshes its targets. func newHcloudDiscovery(conf *SDConfig, _ *slog.Logger) (*hcloudDiscovery, error) { d := &hcloudDiscovery{ - port: conf.Port, + port: conf.Port, + labelSelector: conf.LabelSelector, } rt, err := config.NewRoundTripperFromConfig(conf.HTTPClientConfig, "hetzner_sd") @@ -79,7 +81,10 @@ func newHcloudDiscovery(conf *SDConfig, _ *slog.Logger) (*hcloudDiscovery, error } func (d *hcloudDiscovery) refresh(ctx context.Context) ([]*targetgroup.Group, error) { - servers, err := d.client.Server.All(ctx) + servers, err := d.client.Server.AllWithOpts(ctx, hcloud.ServerListOpts{ListOpts: hcloud.ListOpts{ + PerPage: 50, + LabelSelector: d.labelSelector, + }}) if err != nil { return nil, err } diff --git a/discovery/hetzner/hetzner.go b/discovery/hetzner/hetzner.go index 97d48f6d70..9245d933cc 100644 --- a/discovery/hetzner/hetzner.go +++ b/discovery/hetzner/hetzner.go @@ -59,8 +59,11 @@ type SDConfig struct { RefreshInterval model.Duration `yaml:"refresh_interval"` Port int `yaml:"port"` Role Role `yaml:"role"` - hcloudEndpoint string // For tests only. - robotEndpoint string // For tests only. + + LabelSelector string `yaml:"label_selector,omitempty"` + + hcloudEndpoint string // For tests only. + robotEndpoint string // For tests only. } // NewDiscovererMetrics implements discovery.Config. diff --git a/discovery/kubernetes/endpointslice.go b/discovery/kubernetes/endpointslice.go index 7331605f9b..1002025128 100644 --- a/discovery/kubernetes/endpointslice.go +++ b/discovery/kubernetes/endpointslice.go @@ -110,7 +110,8 @@ func NewEndpointSlice(l *slog.Logger, eps cache.SharedIndexInformer, svc, pod, n e.logger.Error("converting to EndpointSlice object failed", "err", err) continue } - if lv, exists := es.Labels[v1.LabelServiceName]; exists && lv == svc.Name { + // Only consider the underlying EndpointSlices in the same namespace. + if svcName, exists := es.Labels[v1.LabelServiceName]; exists && svcName == svc.Name && es.Namespace == svc.Namespace { e.enqueue(es) } } diff --git a/docs/command-line/prometheus.md b/docs/command-line/prometheus.md index 5124255316..ebd6007f7b 100644 --- a/docs/command-line/prometheus.md +++ b/docs/command-line/prometheus.md @@ -61,7 +61,7 @@ The Prometheus monitoring server | --query.timeout | Maximum time a query may take before being aborted. Use with server mode only. | `2m` | | --query.max-concurrency | Maximum number of queries executed concurrently. Use with server mode only. | `20` | | --query.max-samples | Maximum number of samples a single query can load into memory. Note that queries will fail if they try to load more samples than this into memory, so this also limits the number of samples a query can return. Use with server mode only. | `50000000` | -| --enable-feature ... | Comma separated feature names to enable. Valid options: exemplar-storage, expand-external-labels, memory-snapshot-on-shutdown, promql-per-step-stats, promql-experimental-functions, extra-scrape-metrics, auto-gomaxprocs, native-histograms, created-timestamp-zero-ingestion, concurrent-rule-eval, delayed-compaction, old-ui, otlp-deltatocumulative. See https://prometheus.io/docs/prometheus/latest/feature_flags/ for more details. | | +| --enable-feature ... | Comma separated feature names to enable. Valid options: exemplar-storage, expand-external-labels, memory-snapshot-on-shutdown, promql-per-step-stats, promql-experimental-functions, extra-scrape-metrics, auto-gomaxprocs, native-histograms, created-timestamp-zero-ingestion, concurrent-rule-eval, delayed-compaction, old-ui, otlp-deltatocumulative, promql-duration-expr. See https://prometheus.io/docs/prometheus/latest/feature_flags/ for more details. | | | --agent | Run Prometheus in 'Agent mode'. | | | --log.level | Only log messages with the given severity or above. One of: [debug, info, warn, error] | `info` | | --log.format | Output format of log messages. One of: [logfmt, json] | `logfmt` | diff --git a/docs/command-line/promtool.md b/docs/command-line/promtool.md index ab675e6345..3b1ca84f6b 100644 --- a/docs/command-line/promtool.md +++ b/docs/command-line/promtool.md @@ -15,7 +15,7 @@ Tooling for the Prometheus monitoring system. | -h, --help | Show context-sensitive help (also try --help-long and --help-man). | | --version | Show application version. | | --experimental | Enable experimental commands. | -| --enable-feature ... | Comma separated feature names to enable. Currently unused. | +| --enable-feature ... | Comma separated feature names to enable. Valid options: promql-experimental-functions, promql-delayed-name-removal. See https://prometheus.io/docs/prometheus/latest/feature_flags/ for more details | @@ -581,8 +581,8 @@ Dump samples from a TSDB. | Flag | Description | Default | | --- | --- | --- | | --sandbox-dir-root | Root directory where a sandbox directory will be created, this sandbox is used in case WAL replay generates chunks (default is the database path). The sandbox is cleaned up at the end. | | -| --min-time | Minimum timestamp to dump. | `-9223372036854775808` | -| --max-time | Maximum timestamp to dump. | `9223372036854775807` | +| --min-time | Minimum timestamp to dump, in milliseconds since the Unix epoch. | `-9223372036854775808` | +| --max-time | Maximum timestamp to dump, in milliseconds since the Unix epoch. | `9223372036854775807` | | --match ... | Series selector. Can be specified multiple times. | `{__name__=~'(?s:.*)'}` | @@ -608,8 +608,8 @@ Dump samples from a TSDB. | Flag | Description | Default | | --- | --- | --- | | --sandbox-dir-root | Root directory where a sandbox directory will be created, this sandbox is used in case WAL replay generates chunks (default is the database path). The sandbox is cleaned up at the end. | | -| --min-time | Minimum timestamp to dump. | `-9223372036854775808` | -| --max-time | Maximum timestamp to dump. | `9223372036854775807` | +| --min-time | Minimum timestamp to dump, in milliseconds since the Unix epoch. | `-9223372036854775808` | +| --max-time | Maximum timestamp to dump, in milliseconds since the Unix epoch. | `9223372036854775807` | | --match ... | Series selector. Can be specified multiple times. | `{__name__=~'(?s:.*)'}` | diff --git a/docs/configuration/configuration.md b/docs/configuration/configuration.md index 9c5c37b6d3..4cd4a677ab 100644 --- a/docs/configuration/configuration.md +++ b/docs/configuration/configuration.md @@ -186,6 +186,16 @@ otlp: # - "NoUTF8EscapingWithSuffixes" is a mode that relies on UTF-8 support in Prometheus. # It preserves all special characters like dots, but still adds required metric name suffixes # for units and _total, as UnderscoreEscapingWithSuffixes does. + # - (EXPERIMENTAL) "NoTranslation" is a mode that relies on UTF-8 support in Prometheus. + # It preserves all special character like dots and won't append special suffixes for metric + # unit and type. + # + # WARNING: The "NoTranslation" setting has significant known risks and limitations (see https://prometheus.io/docs/practices/naming/ + # for details): + # * Impaired UX when using PromQL in plain YAML (e.g. alerts, rules, dashboard, autoscaling configuration). + # * Series collisions which in the best case may result in OOO errors, in the worst case a silently malformed + # time series. For instance, you may end up in situation of ingesting `foo.bar` series with unit + # `seconds` and a separate series `foo.bar` with unit `milliseconds`. [ translation_strategy: | default = "UnderscoreEscapingWithSuffixes" ] # Enables adding "service.name", "service.namespace" and "service.instance.id" # resource attributes to the "target_info" metric, on top of converting @@ -649,7 +659,7 @@ A `tls_config` allows configuring TLS connections. ### `` -OAuth 2.0 authentication using the client credentials grant type. +OAuth 2.0 authentication using the client credentials or password grant type. Prometheus fetches an access token from the specified endpoint with the given client access and secret keys. @@ -669,6 +679,11 @@ scopes: token_url: # Optional parameters to append to the token URL. +# To set 'password' grant type, add it to params: +# endpoint_params: +# grant_type: 'password' +# username: 'username@example.com' +# password: 'strongpassword' endpoint_params: [ : ... ] @@ -1623,6 +1638,10 @@ role: # The time after which the servers are refreshed. [ refresh_interval: | default = 60s ] +# Label selector used to filter the servers when fetching them from the API. See https://docs.hetzner.cloud/#label-selector for more details. +# Only used when role is hcloud. +[ label_selector: ] + # HTTP client settings, including authentication methods (such as basic auth and # authorization), proxy configurations, TLS options, custom HTTP headers, etc. [ ] diff --git a/docs/configuration/recording_rules.md b/docs/configuration/recording_rules.md index 9a8e7a70c9..45a263292b 100644 --- a/docs/configuration/recording_rules.md +++ b/docs/configuration/recording_rules.md @@ -17,10 +17,6 @@ Rule files use YAML. The rule files can be reloaded at runtime by sending `SIGHUP` to the Prometheus process. The changes are only applied if all rule files are well-formatted. -_Note about native histograms (experimental feature): Native histogram are always -recorded as gauge histograms (for now). Most cases will create gauge histograms -naturally, e.g. after `rate()`._ - ## Syntax-checking rules To quickly check whether a rule file is syntactically correct without starting diff --git a/docs/configuration/unit_testing_rules.md b/docs/configuration/unit_testing_rules.md index 7fc676a251..ccf1961f48 100644 --- a/docs/configuration/unit_testing_rules.md +++ b/docs/configuration/unit_testing_rules.md @@ -24,6 +24,10 @@ rule_files: [ evaluation_interval: | default = 1m ] +# Setting fuzzy_compare true will very slightly weaken floating point comparisons. +# This will (effectively) ignore differences in the last bit of the mantissa. +[ fuzzy_compare: | default = false ] + # The order in which group names are listed below will be the order of evaluation of # rule groups (at a given evaluation time). The order is guaranteed only for the groups mentioned below. # All the groups need not be mentioned below. @@ -95,20 +99,20 @@ series: # {{schema:1 sum:-0.3 count:3.1 z_bucket:7.1 z_bucket_w:0.05 buckets:[5.1 10 7] offset:-3 n_buckets:[4.1 5] n_offset:-5 counter_reset_hint:gauge}} # Native histograms support the same expanding notation as floating point numbers, i.e. 'axn', 'a+bxn' and 'a-bxn'. # All properties are optional and default to 0. The order is not important. The following properties are supported: -# - schema (int): +# - schema (int): # Currently valid schema numbers are -4 <= n <= 8. They are all for # base-2 bucket schemas, where 1 is a bucket boundary in each case, and # then each power of two is divided into 2^n logarithmic buckets. Or # in other words, each bucket boundary is the previous boundary times # 2^(2^-n). -# - sum (float): +# - sum (float): # The sum of all observations, including the zero bucket. -# - count (non-negative float): +# - count (non-negative float): # The number of observations, including those that are NaN and including the zero bucket. -# - z_bucket (non-negative float): +# - z_bucket (non-negative float): # The sum of all observations in the zero bucket. -# - z_bucket_w (non-negative float): -# The width of the zero bucket. +# - z_bucket_w (non-negative float): +# The width of the zero bucket. # If z_bucket_w > 0, the zero bucket contains all observations -z_bucket_w <= x <= z_bucket_w. # Otherwise, the zero bucket only contains observations that are exactly 0. # - buckets (list of non-negative floats): diff --git a/docs/feature_flags.md b/docs/feature_flags.md index 6973d6d73b..174184072e 100644 --- a/docs/feature_flags.md +++ b/docs/feature_flags.md @@ -168,7 +168,7 @@ recommended to update these files atomically. `--enable-feature=otlp-deltatocumulative` When enabled, Prometheus will convert OTLP metrics from delta temporality to their -cumulative equivalent, instead of dropping them. +cumulative equivalent, instead of dropping them. This cannot be enabled in conjunction with `otlp-native-delta-ingestion`. This uses [deltatocumulative][d2c] @@ -183,4 +183,67 @@ This state is periodically ([`max_stale`][d2c]) cleared of inactive series. Enabling this _can_ have negative impact on performance, because the in-memory state is mutex guarded. Cumulative-only OTLP requests are not affected. +### PromQL arithmetic expressions in time durations + +`--enable-feature=promql-duration-expr` + +With this flag, arithmetic expressions can be used in time durations in range queries and offset durations. For example: + +In range queries: + rate(http_requests_total[5m * 2]) # 10 minute range + rate(http_requests_total[(5+2) * 1m]) # 7 minute range + +In offset durations: + http_requests_total offset (1h / 2) # 30 minute offset + http_requests_total offset ((2 ^ 3) * 1m) # 8 minute offset + +Note: Duration expressions are not supported in the @ timestamp operator. + +The following operators are supported: + +* `+` - addition +* `-` - subtraction +* `*` - multiplication +* `/` - division +* `%` - modulo +* `^` - exponentiation + +Examples of equivalent durations: + +* `5m * 2` is the equivalent to `10m` or `600s` +* `10m - 1m` is the equivalent to `9m` or `540s` +* `(5+2) * 1m` is the equivalent to `7m` or `420s` +* `1h / 2` is the equivalent to `30m` or `1800s` +* `4h % 3h` is the equivalent to `1h` or `3600s` +* `(2 ^ 3) * 1m` is the equivalent to `8m` or `480s` + [d2c]: https://github.com/open-telemetry/opentelemetry-collector-contrib/tree/main/processor/deltatocumulativeprocessor + +## OTLP Native Delta Support + +`--enable-feature=otlp-native-delta-ingestion` + +When enabled, allows for the native ingestion of delta OTLP metrics, storing the raw sample values without conversion. This cannot be enabled in conjunction with `otlp-deltatocumulative`. + +Currently, the StartTimeUnixNano field is ignored, and deltas are given the unknown metric metadata type. + +Delta support is in a very early stage of development and the ingestion and querying process my change over time. For the open proposal see [prometheus/proposals#48](https://github.com/prometheus/proposals/pull/48). + +### Querying + +We encourage users to experiment with deltas and existing PromQL functions; we will collect feedback and likely build features to improve the experience around querying deltas. + +Note that standard PromQL counter functions like `rate()` and `increase()` are designed for cumulative metrics and will produce incorrect results when used with delta metrics. This may change in the future, but for now, to get similar results for delta metrics, you need `sum_over_time()`: + +* `sum_over_time(delta_metric[])`: Calculates the sum of delta values over the specified time range. +* `sum_over_time(delta_metric[]) / `: Calculates the per-second rate of the delta metric. + +These may not work well if the `` is not a multiple of the collection interval of the metric. For example, if you do `sum_over_time(delta_metric[1m]) / 1m` range query (with a 1m step), but the collection interval of a metric is 10m, the graph will show a single point every 10 minutes with a high rate value, rather than 10 points with a lower, constant value. + +### Current gotchas + +* If delta metrics are exposed via [federation](https://prometheus.io/docs/prometheus/latest/federation/), data can be incorrectly collected if the ingestion interval is not the same as the scrape interval for the federated endpoint. + +* It is difficult to figure out whether a metric has delta or cumulative temporality, since there's no indication of temporality in metric names or labels. For now, if you are ingesting a mix of delta and cumulative metrics we advise you to explicitly add your own labels to distinguish them. In the future, we plan to introduce type labels to consistently distinguish metric types and potentially make PromQL functions type-aware (e.g. providing warnings when cumulative-only functions are used with delta metrics). + +* If there are multiple samples being ingested at the same timestamp, only one of the points is kept - the samples are **not** summed together (this is how Prometheus works in general - duplicate timestamp samples are rejected). Any aggregation will have to be done before sending samples to Prometheus. \ No newline at end of file diff --git a/docs/querying/functions.md b/docs/querying/functions.md index 6615c2aebc..40cc697055 100644 --- a/docs/querying/functions.md +++ b/docs/querying/functions.md @@ -253,10 +253,23 @@ histogram samples: ## `histogram_fraction()` -`histogram_fraction(lower scalar, upper scalar, v instant-vector)` returns the +`histogram_fraction(lower scalar, upper scalar, b instant-vector)` returns the estimated fraction of observations between the provided lower and upper values -for each histogram sample in `v`. Float samples are ignored and do not show up -in the returned vector. +for each classic or native histogram contained in `b`. Float samples in `b` are +considered the counts of observations in each bucket of one or more classic +histograms, while native histogram samples in `b` are treated each individually +as a separate histogram. This works in the same way as for `histogram_quantile()`. +(See there for more details.) + +If the provided lower and upper values do not coincide with bucket boundaries, +the calculated fraction is an estimate, using the same interpolation method as for +`histogram_quantile()`. (See there for more details.) Especially with classic +histograms, it is easy to accidentally pick lower or upper values that are very +far away from any bucket boundary, leading to large margins of error. Rather than +using `histogram_fraction()` with classic histograms, it is often a more robust approach +to directly act on the bucket series when calculating fractions. See the +[calculation of the Apdex scare](https://prometheus.io/docs/practices/histograms/#apdex-score) +as a typical example. For example, the following expression calculates the fraction of HTTP requests over the last hour that took 200ms or less: @@ -280,8 +293,8 @@ feature inclusive upper boundaries and exclusive lower boundaries for positive values, and vice versa for negative values.) Without a precise alignment of boundaries, the function uses interpolation to estimate the fraction. With the resulting uncertainty, it becomes irrelevant if the boundaries are inclusive or -exclusive. The interpolation method is the same as the one used for -`histogram_quantile()`. See there for more details. +exclusive. + ## `histogram_quantile()` @@ -415,9 +428,11 @@ annotation, you should find and remove the source of the invalid data. ## `histogram_stddev()` and `histogram_stdvar()` `histogram_stddev(v instant-vector)` returns the estimated standard deviation -of observations for each histogram sample in `v`, based on the geometric mean -of the buckets where the observations lie. Float samples are ignored and do not -show up in the returned vector. +of observations for each histogram sample in `v`. For this estimation, all observations +in a bucket are assumed to have the value of the mean of the bucket boundaries. For +the zero bucket and for buckets with custom boundaries, the arithmetic mean is used. +For the usual exponential buckets, the geometric mean is used. Float samples are ignored +and do not show up in the returned vector. Similarly, `histogram_stdvar(v instant-vector)` returns the estimated standard variance of observations for each histogram sample in `v`. diff --git a/documentation/examples/remote_storage/go.mod b/documentation/examples/remote_storage/go.mod index c4e4d5e5f3..af0afd84de 100644 --- a/documentation/examples/remote_storage/go.mod +++ b/documentation/examples/remote_storage/go.mod @@ -7,7 +7,7 @@ require ( github.com/gogo/protobuf v1.3.2 github.com/golang/snappy v1.0.0 github.com/influxdata/influxdb-client-go/v2 v2.14.0 - github.com/prometheus/client_golang v1.21.1 + github.com/prometheus/client_golang v1.22.0 github.com/prometheus/common v0.63.0 github.com/prometheus/prometheus v1.99.0 github.com/stretchr/testify v1.10.0 @@ -37,7 +37,7 @@ require ( github.com/jmespath/go-jmespath v0.4.0 // indirect github.com/jpillora/backoff v1.0.0 // indirect github.com/json-iterator/go v1.1.12 // indirect - github.com/klauspost/compress v1.17.11 // indirect + github.com/klauspost/compress v1.18.0 // indirect github.com/kylelemons/godebug v1.1.0 // indirect github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd // indirect github.com/modern-go/reflect2 v1.0.2 // indirect @@ -51,8 +51,8 @@ require ( github.com/prometheus/procfs v0.15.1 // indirect github.com/xhit/go-str2duration/v2 v2.1.0 // indirect go.opentelemetry.io/auto/sdk v1.1.0 // indirect - go.opentelemetry.io/collector/pdata v1.28.1 // indirect - go.opentelemetry.io/collector/semconv v0.122.1 // indirect + go.opentelemetry.io/collector/pdata v1.30.0 // indirect + go.opentelemetry.io/collector/semconv v0.124.0 // indirect go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.60.0 // indirect go.opentelemetry.io/otel v1.35.0 // indirect go.opentelemetry.io/otel/metric v1.35.0 // indirect @@ -60,14 +60,14 @@ require ( go.uber.org/atomic v1.11.0 // indirect go.uber.org/multierr v1.11.0 // indirect golang.org/x/crypto v0.36.0 // indirect - golang.org/x/net v0.37.0 // indirect + golang.org/x/net v0.38.0 // indirect golang.org/x/oauth2 v0.25.0 // indirect golang.org/x/sys v0.31.0 // indirect golang.org/x/text v0.23.0 // indirect golang.org/x/time v0.7.0 // indirect google.golang.org/genproto/googleapis/rpc v0.0.0-20250115164207-1a7da9e5054f // indirect - google.golang.org/grpc v1.71.0 // indirect - google.golang.org/protobuf v1.36.5 // indirect + google.golang.org/grpc v1.71.1 // indirect + google.golang.org/protobuf v1.36.6 // indirect gopkg.in/yaml.v2 v2.4.0 // indirect gopkg.in/yaml.v3 v3.0.1 // indirect k8s.io/apimachinery v0.32.3 // indirect diff --git a/documentation/examples/remote_storage/go.sum b/documentation/examples/remote_storage/go.sum index f52427a186..1a7d8a6c83 100644 --- a/documentation/examples/remote_storage/go.sum +++ b/documentation/examples/remote_storage/go.sum @@ -195,8 +195,8 @@ github.com/julienschmidt/httprouter v1.2.0/go.mod h1:SYymIcj16QtmaHHD7aYtjjsJG7V github.com/julienschmidt/httprouter v1.3.0/go.mod h1:JR6WtHb+2LUe8TCKY3cZOxFyyO8IZAc4RVcycCCAKdM= github.com/kisielk/errcheck v1.5.0/go.mod h1:pFxgyoBC7bSaBwPgfKdkLd5X25qrDl4LWUI2bnpBCr8= github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck= -github.com/klauspost/compress v1.17.11 h1:In6xLpyWOi1+C7tXUUWv2ot1QvBjxevKAaI6IXrJmUc= -github.com/klauspost/compress v1.17.11/go.mod h1:pMDklpSncoRMuLFrf1W9Ss9KT+0rH90U12bZKk7uwG0= +github.com/klauspost/compress v1.18.0 h1:c/Cqfb0r+Yi+JtIEq73FWXVkRonBlf0CRNYc8Zttxdo= +github.com/klauspost/compress v1.18.0/go.mod h1:2Pp+KzxcywXVXMr50+X0Q/Lsb43OQHYWRCY2AiWywWQ= github.com/kolo/xmlrpc v0.0.0-20220921171641-a4b6fa1dd06b h1:udzkj9S/zlT5X367kqJis0QP7YMxobob6zhzq6Yre00= github.com/kolo/xmlrpc v0.0.0-20220921171641-a4b6fa1dd06b/go.mod h1:pcaDhQK0/NJZEvtCO0qQPPropqV0sJOJ6YW7X+9kRwM= github.com/konsorten/go-windows-terminal-sequences v1.0.1/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ= @@ -265,8 +265,8 @@ github.com/prometheus/client_golang v0.9.1/go.mod h1:7SWBe2y4D6OKWSNQJUaRYU/AaXP github.com/prometheus/client_golang v1.0.0/go.mod h1:db9x61etRT2tGnBNRi70OPL5FsnadC4Ky3P0J6CfImo= github.com/prometheus/client_golang v1.7.1/go.mod h1:PY5Wy2awLA44sXw4AOSfFBetzPP4j5+D6mVACh+pe2M= github.com/prometheus/client_golang v1.11.0/go.mod h1:Z6t4BnS23TR94PD6BsDNk8yVqroYurpAkEiz0P2BEV0= -github.com/prometheus/client_golang v1.21.1 h1:DOvXXTqVzvkIewV/CDPFdejpMCGeMcbGCQ8YOmu+Ibk= -github.com/prometheus/client_golang v1.21.1/go.mod h1:U9NM32ykUErtVBxdvD3zfi+EuFkkaBvMb09mIfe0Zgg= +github.com/prometheus/client_golang v1.22.0 h1:rb93p9lokFEsctTys46VnV1kLCDpVZ0a/Y92Vm0Zc6Q= +github.com/prometheus/client_golang v1.22.0/go.mod h1:R7ljNsLXhuQXYZYtw6GAE9AZg8Y7vEW5scdCXrWRXC0= github.com/prometheus/client_model v0.0.0-20180712105110-5c3871d89910/go.mod h1:MbSGuTsp3dbXC40dX6PRTWyKYBIrTGTE9sqQNg2J8bo= github.com/prometheus/client_model v0.0.0-20190129233127-fd36f4220a90/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= github.com/prometheus/client_model v0.2.0/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= @@ -318,10 +318,10 @@ github.com/yuin/goldmark v1.1.27/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9de github.com/yuin/goldmark v1.2.1/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= go.opentelemetry.io/auto/sdk v1.1.0 h1:cH53jehLUN6UFLY71z+NDOiNJqDdPRaXzTel0sJySYA= go.opentelemetry.io/auto/sdk v1.1.0/go.mod h1:3wSPjt5PWp2RhlCcmmOial7AvC4DQqZb7a7wCow3W8A= -go.opentelemetry.io/collector/pdata v1.28.1 h1:ORl5WLpQJvjzBVpHu12lqKMdcf/qDBwRXMcUubhybiQ= -go.opentelemetry.io/collector/pdata v1.28.1/go.mod h1:asKE8MD/4SOKz1mCrGdAz4VO2U2HUNg8A6094uK7pq0= -go.opentelemetry.io/collector/semconv v0.122.1 h1:WLzDi3QC4/+LpNMLY90zn5aMDJKyqg/ujW2O4T4sxHg= -go.opentelemetry.io/collector/semconv v0.122.1/go.mod h1:te6VQ4zZJO5Lp8dM2XIhDxDiL45mwX0YAQQWRQ0Qr9U= +go.opentelemetry.io/collector/pdata v1.30.0 h1:j3jyq9um436r6WzWySzexP2nLnFdmL5uVBYAlyr9nDM= +go.opentelemetry.io/collector/pdata v1.30.0/go.mod h1:0Bxu1ktuj4wE7PIASNSvd0SdBscQ1PLtYasymJ13/Cs= +go.opentelemetry.io/collector/semconv v0.124.0 h1:YTdo3UFwNyDQCh9DiSm2rbzAgBuwn/9dNZ0rv454goA= +go.opentelemetry.io/collector/semconv v0.124.0/go.mod h1:te6VQ4zZJO5Lp8dM2XIhDxDiL45mwX0YAQQWRQ0Qr9U= go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.60.0 h1:sbiXRNDSWJOTobXh5HyQKjq6wUC5tNybqjIqDpAY4CU= go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.60.0/go.mod h1:69uWxva0WgAA/4bu2Yy70SLDBwZXuQ6PbBpbsa5iZrQ= go.opentelemetry.io/otel v1.35.0 h1:xKWKPxrxB6OtMCbmMY021CqC45J+3Onta9MqjhnusiQ= @@ -365,8 +365,8 @@ golang.org/x/net v0.0.0-20200822124328-c89045814202/go.mod h1:/O7V0waA8r7cgGh81R golang.org/x/net v0.0.0-20201021035429-f5854403a974/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= golang.org/x/net v0.0.0-20201110031124-69a78807bb2b/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= golang.org/x/net v0.0.0-20210525063256-abc453219eb5/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= -golang.org/x/net v0.37.0 h1:1zLorHbz+LYj7MQlSf1+2tPIIgibq2eL5xkrGk6f+2c= -golang.org/x/net v0.37.0/go.mod h1:ivrbrMbzFq5J41QOQh0siUuly180yBYtLp+CKbEaFx8= +golang.org/x/net v0.38.0 h1:vRMAPTMaeGqVhG5QyLJHqNDwecKTomGeqbnfZyKlBI8= +golang.org/x/net v0.38.0/go.mod h1:ivrbrMbzFq5J41QOQh0siUuly180yBYtLp+CKbEaFx8= golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= golang.org/x/oauth2 v0.0.0-20210514164344-f6687ab2804c/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= golang.org/x/oauth2 v0.25.0 h1:CY4y7XT9v0cRI9oupztF8AgiIu99L/ksR/Xp/6jrZ70= @@ -423,8 +423,8 @@ google.golang.org/genproto/googleapis/api v0.0.0-20250106144421-5f5ef82da422 h1: google.golang.org/genproto/googleapis/api v0.0.0-20250106144421-5f5ef82da422/go.mod h1:b6h1vNKhxaSoEI+5jc3PJUCustfli/mRab7295pY7rw= google.golang.org/genproto/googleapis/rpc v0.0.0-20250115164207-1a7da9e5054f h1:OxYkA3wjPsZyBylwymxSHa7ViiW1Sml4ToBrncvFehI= google.golang.org/genproto/googleapis/rpc v0.0.0-20250115164207-1a7da9e5054f/go.mod h1:+2Yz8+CLJbIfL9z73EW45avw8Lmge3xVElCP9zEKi50= -google.golang.org/grpc v1.71.0 h1:kF77BGdPTQ4/JZWMlb9VpJ5pa25aqvVqogsxNHHdeBg= -google.golang.org/grpc v1.71.0/go.mod h1:H0GRtasmQOh9LkFoCPDu3ZrwUtD1YGE+b2vYBYd/8Ec= +google.golang.org/grpc v1.71.1 h1:ffsFWr7ygTUscGPI0KKK6TLrGz0476KUvvsbqWK0rPI= +google.golang.org/grpc v1.71.1/go.mod h1:H0GRtasmQOh9LkFoCPDu3ZrwUtD1YGE+b2vYBYd/8Ec= google.golang.org/protobuf v0.0.0-20200109180630-ec00e32a8dfd/go.mod h1:DFci5gLYBciE7Vtevhsrf46CRTquxDuWsQurQQe4oz8= google.golang.org/protobuf v0.0.0-20200221191635-4d8936d0db64/go.mod h1:kwYJMbMJ01Woi6D6+Kah6886xMZcty6N08ah7+eCXa0= google.golang.org/protobuf v0.0.0-20200228230310-ab0ca4ff8a60/go.mod h1:cfTl7dwQJ+fmap5saPgwCLgHXTUD7jkjRqWcaiX5VyM= @@ -432,8 +432,8 @@ google.golang.org/protobuf v1.20.1-0.20200309200217-e05f789c0967/go.mod h1:A+miE google.golang.org/protobuf v1.21.0/go.mod h1:47Nbq4nVaFHyn7ilMalzfO3qCViNmqZ2kzikPIcrTAo= google.golang.org/protobuf v1.23.0/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU= google.golang.org/protobuf v1.26.0-rc.1/go.mod h1:jlhhOSvTdKEhbULTjvd4ARK9grFBp09yW+WbY/TyQbw= -google.golang.org/protobuf v1.36.5 h1:tPhr+woSbjfYvY6/GPufUoYizxw1cF/yFoxJ2fmpwlM= -google.golang.org/protobuf v1.36.5/go.mod h1:9fA7Ob0pmnwhb644+1+CVWFRbNajQ6iRojtC/QF5bRE= +google.golang.org/protobuf v1.36.6 h1:z1NpPI8ku2WgiWnf+t9wTPsn6eP1L7ksHUlkfLvd9xY= +google.golang.org/protobuf v1.36.6/go.mod h1:jduwjTPXsFjZGTmRluh+L6NjiWu7pchiJ2/5YcXBHnY= gopkg.in/alecthomas/kingpin.v2 v2.2.6/go.mod h1:FMv+mEhP44yOT+4EoQTLFTRgOQ1FBLkstjWtayDeSgw= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= diff --git a/go.mod b/go.mod index 408ca72bcb..ea4052c9ff 100644 --- a/go.mod +++ b/go.mod @@ -3,20 +3,20 @@ module github.com/prometheus/prometheus go 1.23.0 require ( - github.com/Azure/azure-sdk-for-go/sdk/azcore v1.17.1 - github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.8.2 + github.com/Azure/azure-sdk-for-go/sdk/azcore v1.18.0 + github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.9.0 github.com/Azure/azure-sdk-for-go/sdk/resourcemanager/compute/armcompute/v5 v5.7.0 github.com/Azure/azure-sdk-for-go/sdk/resourcemanager/network/armnetwork/v4 v4.3.0 github.com/Code-Hex/go-generics-cache v1.5.1 github.com/KimMachineGun/automemlimit v0.7.1 github.com/alecthomas/kingpin/v2 v2.4.0 github.com/alecthomas/units v0.0.0-20240927000941-0f3dac36c52b - github.com/aws/aws-sdk-go v1.55.6 + github.com/aws/aws-sdk-go v1.55.7 github.com/bboreham/go-loser v0.0.0-20230920113527-fcc2c21820a3 github.com/cespare/xxhash/v2 v2.3.0 github.com/dennwc/varint v1.0.0 - github.com/digitalocean/godo v1.141.0 - github.com/docker/docker v28.0.2+incompatible + github.com/digitalocean/godo v1.144.0 + github.com/docker/docker v28.1.1+incompatible github.com/edsrzf/mmap-go v1.2.0 github.com/envoyproxy/go-control-plane/envoy v1.32.4 github.com/envoyproxy/protoc-gen-validate v1.2.1 @@ -29,40 +29,40 @@ require ( github.com/google/go-cmp v0.7.0 github.com/google/pprof v0.0.0-20241210010833-40e02aabc2ad github.com/google/uuid v1.6.0 - github.com/gophercloud/gophercloud/v2 v2.6.0 + github.com/gophercloud/gophercloud/v2 v2.7.0 github.com/grafana/regexp v0.0.0-20240518133315-a468a5bfb3bc - github.com/hashicorp/consul/api v1.31.2 + github.com/hashicorp/consul/api v1.32.0 github.com/hashicorp/nomad/api v0.0.0-20241218080744-e3ac00f30eec - github.com/hetznercloud/hcloud-go/v2 v2.20.1 - github.com/ionos-cloud/sdk-go/v6 v6.3.2 + github.com/hetznercloud/hcloud-go/v2 v2.21.0 + github.com/ionos-cloud/sdk-go/v6 v6.3.3 github.com/json-iterator/go v1.1.12 github.com/klauspost/compress v1.18.0 github.com/kolo/xmlrpc v0.0.0-20220921171641-a4b6fa1dd06b - github.com/linode/linodego v1.48.1 - github.com/miekg/dns v1.1.64 + github.com/linode/linodego v1.49.0 + github.com/miekg/dns v1.1.65 github.com/munnerz/goautoneg v0.0.0-20191010083416-a7dc8b61c822 github.com/mwitkow/go-conntrack v0.0.0-20190716064945-2f068394615f github.com/nsf/jsondiff v0.0.0-20230430225905-43f6cf3098c1 github.com/oklog/run v1.1.0 github.com/oklog/ulid/v2 v2.1.0 - github.com/open-telemetry/opentelemetry-collector-contrib/processor/deltatocumulativeprocessor v0.122.0 + github.com/open-telemetry/opentelemetry-collector-contrib/processor/deltatocumulativeprocessor v0.124.1 github.com/ovh/go-ovh v1.7.0 github.com/prometheus/alertmanager v0.28.1 - github.com/prometheus/client_golang v1.21.1 - github.com/prometheus/client_model v0.6.1 + github.com/prometheus/client_golang v1.22.0 + github.com/prometheus/client_model v0.6.2 github.com/prometheus/common v0.63.0 github.com/prometheus/common/assets v0.2.0 github.com/prometheus/exporter-toolkit v0.14.0 github.com/prometheus/sigv4 v0.1.2 - github.com/scaleway/scaleway-sdk-go v1.0.0-beta.32 + github.com/scaleway/scaleway-sdk-go v1.0.0-beta.33 github.com/shurcooL/httpfs v0.0.0-20230704072500-f1e31cf0ba5c github.com/stretchr/testify v1.10.0 github.com/vultr/govultr/v2 v2.17.2 - go.opentelemetry.io/collector/component v1.28.1 - go.opentelemetry.io/collector/consumer v1.28.1 - go.opentelemetry.io/collector/pdata v1.28.1 - go.opentelemetry.io/collector/processor v0.122.1 - go.opentelemetry.io/collector/semconv v0.122.1 + go.opentelemetry.io/collector/component v1.30.0 + go.opentelemetry.io/collector/consumer v1.30.0 + go.opentelemetry.io/collector/pdata v1.30.0 + go.opentelemetry.io/collector/processor v1.30.0 + go.opentelemetry.io/collector/semconv v0.124.0 go.opentelemetry.io/contrib/instrumentation/net/http/httptrace/otelhttptrace v0.60.0 go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.60.0 go.opentelemetry.io/otel v1.35.0 @@ -76,14 +76,14 @@ require ( go.uber.org/automaxprocs v1.6.0 go.uber.org/goleak v1.3.0 go.uber.org/multierr v1.11.0 - golang.org/x/oauth2 v0.28.0 - golang.org/x/sync v0.12.0 - golang.org/x/sys v0.31.0 - golang.org/x/text v0.23.0 - google.golang.org/api v0.227.0 + golang.org/x/oauth2 v0.29.0 + golang.org/x/sync v0.13.0 + golang.org/x/sys v0.32.0 + golang.org/x/text v0.24.0 + google.golang.org/api v0.230.0 google.golang.org/genproto/googleapis/api v0.0.0-20250303144028-a0af3efb3deb - google.golang.org/grpc v1.71.0 - google.golang.org/protobuf v1.36.5 + google.golang.org/grpc v1.72.0 + google.golang.org/protobuf v1.36.6 gopkg.in/yaml.v2 v2.4.0 gopkg.in/yaml.v3 v3.0.1 k8s.io/api v0.32.3 @@ -95,22 +95,26 @@ require ( require ( github.com/hashicorp/go-version v1.7.0 // indirect + github.com/moby/sys/atomicwriter v0.1.0 // indirect github.com/puzpuzpuz/xsync/v3 v3.5.1 // indirect - go.opentelemetry.io/collector/featuregate v1.28.0 // indirect + go.opentelemetry.io/collector/featuregate v1.30.0 // indirect + go.opentelemetry.io/collector/internal/telemetry v0.124.0 // indirect + go.opentelemetry.io/contrib/bridges/otelzap v0.10.0 // indirect + go.opentelemetry.io/otel/log v0.11.0 // indirect ) require ( - cloud.google.com/go/auth v0.15.0 // indirect - cloud.google.com/go/auth/oauth2adapt v0.2.7 // indirect + cloud.google.com/go/auth v0.16.0 // indirect + cloud.google.com/go/auth/oauth2adapt v0.2.8 // indirect cloud.google.com/go/compute/metadata v0.6.0 // indirect - github.com/Azure/azure-sdk-for-go/sdk/internal v1.10.0 // indirect - github.com/AzureAD/microsoft-authentication-library-for-go v1.3.3 // indirect + github.com/Azure/azure-sdk-for-go/sdk/internal v1.11.1 // indirect + github.com/AzureAD/microsoft-authentication-library-for-go v1.4.2 // indirect github.com/Microsoft/go-winio v0.6.1 // indirect github.com/armon/go-metrics v0.4.1 // indirect github.com/asaskevich/govalidator v0.0.0-20230301143203-a9d515a09cc2 // indirect github.com/beorn7/perks v1.0.1 // indirect github.com/cenkalti/backoff/v4 v4.3.0 // indirect - github.com/cncf/xds/go v0.0.0-20241223141626-cff3c89139a3 // indirect + github.com/cncf/xds/go v0.0.0-20250121191232-2f005788dc42 // indirect github.com/containerd/log v0.1.0 // indirect github.com/coreos/go-systemd/v22 v22.5.0 // indirect github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc // indirect @@ -157,7 +161,7 @@ require ( github.com/josharian/intern v1.0.0 // indirect github.com/jpillora/backoff v1.0.0 // indirect github.com/julienschmidt/httprouter v1.3.0 // indirect - github.com/knadh/koanf/maps v0.1.1 // indirect + github.com/knadh/koanf/maps v0.1.2 // indirect github.com/knadh/koanf/providers/confmap v0.1.0 // indirect github.com/knadh/koanf/v2 v2.1.2 // indirect github.com/kylelemons/godebug v1.1.0 // indirect @@ -176,8 +180,8 @@ require ( github.com/modern-go/reflect2 v1.0.2 // indirect github.com/morikuni/aec v1.0.0 // indirect github.com/oklog/ulid v1.3.1 // indirect - github.com/open-telemetry/opentelemetry-collector-contrib/internal/exp/metrics v0.122.0 // indirect - github.com/open-telemetry/opentelemetry-collector-contrib/pkg/pdatautil v0.122.0 // indirect + github.com/open-telemetry/opentelemetry-collector-contrib/internal/exp/metrics v0.124.1 // indirect + github.com/open-telemetry/opentelemetry-collector-contrib/pkg/pdatautil v0.124.1 // indirect github.com/opencontainers/go-digest v1.0.0 // indirect github.com/opencontainers/image-spec v1.0.2 // indirect github.com/pbnjay/memory v0.0.0-20210728143218-7b4eea64cf58 // indirect @@ -193,19 +197,19 @@ require ( github.com/xhit/go-str2duration/v2 v2.1.0 // indirect go.mongodb.org/mongo-driver v1.14.0 // indirect go.opentelemetry.io/auto/sdk v1.1.0 // indirect - go.opentelemetry.io/collector/confmap v1.28.0 // indirect - go.opentelemetry.io/collector/confmap/xconfmap v0.122.0 // indirect - go.opentelemetry.io/collector/pipeline v0.122.1 // indirect + go.opentelemetry.io/collector/confmap v1.30.0 // indirect + go.opentelemetry.io/collector/confmap/xconfmap v0.124.0 // indirect + go.opentelemetry.io/collector/pipeline v0.124.0 // indirect go.opentelemetry.io/proto/otlp v1.5.0 // indirect go.uber.org/zap v1.27.0 // indirect - golang.org/x/crypto v0.36.0 // indirect + golang.org/x/crypto v0.37.0 // indirect golang.org/x/exp v0.0.0-20250106191152-7588d65b2ba8 // indirect golang.org/x/mod v0.24.0 // indirect - golang.org/x/net v0.38.0 // indirect - golang.org/x/term v0.30.0 // indirect + golang.org/x/net v0.39.0 // indirect + golang.org/x/term v0.31.0 // indirect golang.org/x/time v0.11.0 // indirect - golang.org/x/tools v0.31.0 // indirect - google.golang.org/genproto/googleapis/rpc v0.0.0-20250313205543-e70fdf4c4cb4 // indirect + golang.org/x/tools v0.32.0 // indirect + google.golang.org/genproto/googleapis/rpc v0.0.0-20250414145226-207652e42e2e // indirect gopkg.in/evanphx/json-patch.v4 v4.12.0 // indirect gopkg.in/inf.v0 v0.9.1 // indirect gopkg.in/ini.v1 v1.67.0 // indirect diff --git a/go.sum b/go.sum index 33f9924723..b9ed9a97c2 100644 --- a/go.sum +++ b/go.sum @@ -1,17 +1,17 @@ -cloud.google.com/go/auth v0.15.0 h1:Ly0u4aA5vG/fsSsxu98qCQBemXtAtJf+95z9HK+cxps= -cloud.google.com/go/auth v0.15.0/go.mod h1:WJDGqZ1o9E9wKIL+IwStfyn/+s59zl4Bi+1KQNVXLZ8= -cloud.google.com/go/auth/oauth2adapt v0.2.7 h1:/Lc7xODdqcEw8IrZ9SvwnlLX6j9FHQM74z6cBk9Rw6M= -cloud.google.com/go/auth/oauth2adapt v0.2.7/go.mod h1:NTbTTzfvPl1Y3V1nPpOgl2w6d/FjO7NNUQaWSox6ZMc= +cloud.google.com/go/auth v0.16.0 h1:Pd8P1s9WkcrBE2n/PhAwKsdrR35V3Sg2II9B+ndM3CU= +cloud.google.com/go/auth v0.16.0/go.mod h1:1howDHJ5IETh/LwYs3ZxvlkXF48aSqqJUM+5o02dNOI= +cloud.google.com/go/auth/oauth2adapt v0.2.8 h1:keo8NaayQZ6wimpNSmW5OPc283g65QNIiLpZnkHRbnc= +cloud.google.com/go/auth/oauth2adapt v0.2.8/go.mod h1:XQ9y31RkqZCcwJWNSx2Xvric3RrU88hAYYbjDWYDL+c= cloud.google.com/go/compute/metadata v0.6.0 h1:A6hENjEsCDtC1k8byVsgwvVcioamEHvZ4j01OwKxG9I= cloud.google.com/go/compute/metadata v0.6.0/go.mod h1:FjyFAW1MW0C203CEOMDTu3Dk1FlqW3Rga40jzHL4hfg= -github.com/Azure/azure-sdk-for-go/sdk/azcore v1.17.1 h1:DSDNVxqkoXJiko6x8a90zidoYqnYYa6c1MTzDKzKkTo= -github.com/Azure/azure-sdk-for-go/sdk/azcore v1.17.1/go.mod h1:zGqV2R4Cr/k8Uye5w+dgQ06WJtEcbQG/8J7BB6hnCr4= -github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.8.2 h1:F0gBpfdPLGsw+nsgk6aqqkZS1jiixa5WwFe3fk/T3Ys= -github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.8.2/go.mod h1:SqINnQ9lVVdRlyC8cd1lCI0SdX4n2paeABd2K8ggfnE= +github.com/Azure/azure-sdk-for-go/sdk/azcore v1.18.0 h1:Gt0j3wceWMwPmiazCa8MzMA0MfhmPIz0Qp0FJ6qcM0U= +github.com/Azure/azure-sdk-for-go/sdk/azcore v1.18.0/go.mod h1:Ot/6aikWnKWi4l9QB7qVSwa8iMphQNqkWALMoNT3rzM= +github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.9.0 h1:OVoM452qUFBrX+URdH3VpR299ma4kfom0yB0URYky9g= +github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.9.0/go.mod h1:kUjrAo8bgEwLeZ/CmHqNl3Z/kPm7y6FKfxxK0izYUg4= github.com/Azure/azure-sdk-for-go/sdk/azidentity/cache v0.3.2 h1:yz1bePFlP5Vws5+8ez6T3HWXPmwOK7Yvq8QxDBD3SKY= github.com/Azure/azure-sdk-for-go/sdk/azidentity/cache v0.3.2/go.mod h1:Pa9ZNPuoNu/GztvBSKk9J1cDJW6vk/n0zLtV4mgd8N8= -github.com/Azure/azure-sdk-for-go/sdk/internal v1.10.0 h1:ywEEhmNahHBihViHepv3xPBn1663uRv2t2q/ESv9seY= -github.com/Azure/azure-sdk-for-go/sdk/internal v1.10.0/go.mod h1:iZDifYGJTIgIIkYRNWPENUnqx6bJ2xnSDFI2tjwZNuY= +github.com/Azure/azure-sdk-for-go/sdk/internal v1.11.1 h1:FPKJS1T+clwv+OLGt13a8UjqeRuh0O4SJ3lUriThc+4= +github.com/Azure/azure-sdk-for-go/sdk/internal v1.11.1/go.mod h1:j2chePtV91HrC22tGoRX3sGY42uF13WzmmV80/OdVAA= github.com/Azure/azure-sdk-for-go/sdk/resourcemanager/compute/armcompute/v5 v5.7.0 h1:LkHbJbgF3YyvC53aqYGR+wWQDn2Rdp9AQdGndf9QvY4= github.com/Azure/azure-sdk-for-go/sdk/resourcemanager/compute/armcompute/v5 v5.7.0/go.mod h1:QyiQdW4f4/BIfB8ZutZ2s+28RAgfa/pT+zS++ZHyM1I= github.com/Azure/azure-sdk-for-go/sdk/resourcemanager/internal/v2 v2.0.0 h1:PTFGRSlMKCQelWwxUyYVEUqseBJVemLyqWJjvMyt0do= @@ -24,8 +24,8 @@ github.com/Azure/go-ansiterm v0.0.0-20210617225240-d185dfc1b5a1 h1:UQHMgLO+TxOEl github.com/Azure/go-ansiterm v0.0.0-20210617225240-d185dfc1b5a1/go.mod h1:xomTg63KZ2rFqZQzSB4Vz2SUXa1BpHTVz9L5PTmPC4E= github.com/AzureAD/microsoft-authentication-extensions-for-go/cache v0.1.1 h1:WJTmL004Abzc5wDB5VtZG2PJk5ndYDgVacGqfirKxjM= github.com/AzureAD/microsoft-authentication-extensions-for-go/cache v0.1.1/go.mod h1:tCcJZ0uHAmvjsVYzEFivsRTN00oz5BEsRgQHu5JZ9WE= -github.com/AzureAD/microsoft-authentication-library-for-go v1.3.3 h1:H5xDQaE3XowWfhZRUpnfC+rGZMEVoSiji+b+/HFAPU4= -github.com/AzureAD/microsoft-authentication-library-for-go v1.3.3/go.mod h1:wP83P5OoQ5p6ip3ScPr0BAq0BvuPAvacpEuSzyouqAI= +github.com/AzureAD/microsoft-authentication-library-for-go v1.4.2 h1:oygO0locgZJe7PpYPXT5A29ZkwJaPqcva7BVeemZOZs= +github.com/AzureAD/microsoft-authentication-library-for-go v1.4.2/go.mod h1:wP83P5OoQ5p6ip3ScPr0BAq0BvuPAvacpEuSzyouqAI= github.com/Code-Hex/go-generics-cache v1.5.1 h1:6vhZGc5M7Y/YD8cIUcY8kcuQLB4cHR7U+0KMqAA0KcU= github.com/Code-Hex/go-generics-cache v1.5.1/go.mod h1:qxcC9kRVrct9rHeiYpFWSoW1vxyillCVzX13KZG8dl4= github.com/DataDog/datadog-go v3.2.0+incompatible/go.mod h1:LButxg5PwREeZtORoXG3tL4fMGNddJ+vMq1mwgfaqoQ= @@ -49,8 +49,8 @@ github.com/armon/go-radix v0.0.0-20180808171621-7fddfc383310/go.mod h1:ufUuZ+zHj github.com/armon/go-radix v1.0.0/go.mod h1:ufUuZ+zHj4x4TnLV4JWEpy2hxWSpsRywHrMgIH9cCH8= github.com/asaskevich/govalidator v0.0.0-20230301143203-a9d515a09cc2 h1:DklsrG3dyBCFEj5IhUbnKptjxatkF07cF2ak3yi77so= github.com/asaskevich/govalidator v0.0.0-20230301143203-a9d515a09cc2/go.mod h1:WaHUgvxTVq04UNunO+XhnAqY/wQc+bxr74GqbsZ/Jqw= -github.com/aws/aws-sdk-go v1.55.6 h1:cSg4pvZ3m8dgYcgqB97MrcdjUmZ1BeMYKUxMMB89IPk= -github.com/aws/aws-sdk-go v1.55.6/go.mod h1:eRwEWoyTWFMVYVQzKMNHWP5/RV4xIUGMQfXQHfHkpNU= +github.com/aws/aws-sdk-go v1.55.7 h1:UJrkFq7es5CShfBwlWAC8DA077vp8PyVbQd3lqLiztE= +github.com/aws/aws-sdk-go v1.55.7/go.mod h1:eRwEWoyTWFMVYVQzKMNHWP5/RV4xIUGMQfXQHfHkpNU= github.com/bboreham/go-loser v0.0.0-20230920113527-fcc2c21820a3 h1:6df1vn4bBlDDo4tARvBm7l6KA9iVMnE3NWizDeWSrps= github.com/bboreham/go-loser v0.0.0-20230920113527-fcc2c21820a3/go.mod h1:CIWtjkly68+yqLPbvwwR/fjNJA/idrtULjZWh2v1ys0= github.com/beorn7/perks v0.0.0-20180321164747-3a771d992973/go.mod h1:Dwedo/Wpr24TaqPxmxbtue+5NUziq4I4S80YR8gNf3Q= @@ -65,8 +65,8 @@ github.com/cespare/xxhash/v2 v2.3.0 h1:UL815xU9SqsFlibzuggzjXhog7bL6oX9BbNZnL2UF github.com/cespare/xxhash/v2 v2.3.0/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs= github.com/circonus-labs/circonus-gometrics v2.3.1+incompatible/go.mod h1:nmEj6Dob7S7YxXgwXpfOuvO54S+tGdZdw9fuRZt25Ag= github.com/circonus-labs/circonusllhist v0.1.3/go.mod h1:kMXHVDlOchFAehlya5ePtbp5jckzBHf4XRpQvBOLI+I= -github.com/cncf/xds/go v0.0.0-20241223141626-cff3c89139a3 h1:boJj011Hh+874zpIySeApCX4GeOjPl9qhRF3QuIZq+Q= -github.com/cncf/xds/go v0.0.0-20241223141626-cff3c89139a3/go.mod h1:W+zGtBO5Y1IgJhy4+A9GOqVhqLpfZi+vwmdNXUehLA8= +github.com/cncf/xds/go v0.0.0-20250121191232-2f005788dc42 h1:Om6kYQYDUk5wWbT0t0q6pvyM49i9XZAv9dDrkDA7gjk= +github.com/cncf/xds/go v0.0.0-20250121191232-2f005788dc42/go.mod h1:W+zGtBO5Y1IgJhy4+A9GOqVhqLpfZi+vwmdNXUehLA8= github.com/containerd/log v0.1.0 h1:TCJt7ioM2cr/tfR8GPbGf9/VRAX8D2B4PjzCpfX540I= github.com/containerd/log v0.1.0/go.mod h1:VRRf09a7mHDIRezVKTRCrOq78v577GXq3bSa3EhrzVo= github.com/coreos/go-systemd/v22 v22.5.0 h1:RrqgGjYQKalulkV8NGVIfkXQf6YYmOyiJKk8iXXhfZs= @@ -80,14 +80,14 @@ github.com/dennwc/varint v1.0.0 h1:kGNFFSSw8ToIy3obO/kKr8U9GZYUAxQEVuix4zfDWzE= github.com/dennwc/varint v1.0.0/go.mod h1:hnItb35rvZvJrbTALZtY/iQfDs48JKRG1RPpgziApxA= github.com/dgryski/go-rendezvous v0.0.0-20200823014737-9f7001d12a5f h1:lO4WD4F/rVNCu3HqELle0jiPLLBs70cWOduZpkS1E78= github.com/dgryski/go-rendezvous v0.0.0-20200823014737-9f7001d12a5f/go.mod h1:cuUVRXasLTGF7a8hSLbxyZXjz+1KgoB3wDUb6vlszIc= -github.com/digitalocean/godo v1.141.0 h1:g4vKl9VZvgzE0EEFv8Y3SYiKqdteKS18BrVPatKFSVE= -github.com/digitalocean/godo v1.141.0/go.mod h1:PU8JB6I1XYkQIdHFop8lLAY9ojp6M0XcU0TWaQSxbrc= +github.com/digitalocean/godo v1.144.0 h1:rDCsmpwcDe5egFQ3Ae45HTde685/GzX037mWRMPufW0= +github.com/digitalocean/godo v1.144.0/go.mod h1:tYeiWY5ZXVpU48YaFv0M5irUFHXGorZpDNm7zzdWMzM= github.com/distribution/reference v0.5.0 h1:/FUIFXtfc/x2gpa5/VGfiGLuOIdYa1t65IKK2OFGvA0= github.com/distribution/reference v0.5.0/go.mod h1:BbU0aIcezP1/5jX/8MP0YiH4SdvB5Y4f/wlDRiLyi3E= github.com/dnaeon/go-vcr v1.2.0 h1:zHCHvJYTMh1N7xnV7zf1m1GPBF9Ad0Jk/whtQ1663qI= github.com/dnaeon/go-vcr v1.2.0/go.mod h1:R4UdLID7HZT3taECzJs4YgbbH6PIGXB6W/sc5OLb6RQ= -github.com/docker/docker v28.0.2+incompatible h1:9BILleFwug5FSSqWBgVevgL3ewDJfWWWyZVqlDMttE8= -github.com/docker/docker v28.0.2+incompatible/go.mod h1:eEKB0N0r5NX/I1kEveEz05bcu8tLC/8azJZsviup8Sk= +github.com/docker/docker v28.1.1+incompatible h1:49M11BFLsVO1gxY9UX9p/zwkE/rswggs8AdFmXQw51I= +github.com/docker/docker v28.1.1+incompatible/go.mod h1:eEKB0N0r5NX/I1kEveEz05bcu8tLC/8azJZsviup8Sk= github.com/docker/go-connections v0.4.0 h1:El9xVISelRB7BuFusrZozjnkIM5YnzCViNKohAFqRJQ= github.com/docker/go-connections v0.4.0/go.mod h1:Gbd7IOopHjR8Iph03tsViu4nIes5XhDvyHbTtUxmeec= github.com/docker/go-units v0.5.0 h1:69rxXcBk27SvSaaxTtLh/8llcHD8vYHT7WSdRZ/jvr4= @@ -190,16 +190,16 @@ github.com/googleapis/enterprise-certificate-proxy v0.3.6 h1:GW/XbdyBFQ8Qe+YAmFU github.com/googleapis/enterprise-certificate-proxy v0.3.6/go.mod h1:MkHOF77EYAE7qfSuSS9PU6g4Nt4e11cnsDUowfwewLA= github.com/googleapis/gax-go/v2 v2.14.1 h1:hb0FFeiPaQskmvakKu5EbCbpntQn48jyHuvrkurSS/Q= github.com/googleapis/gax-go/v2 v2.14.1/go.mod h1:Hb/NubMaVM88SrNkvl8X/o8XWwDJEPqouaLeN2IUxoA= -github.com/gophercloud/gophercloud/v2 v2.6.0 h1:XJKQ0in3iHOZHVAFMXq/OhjCuvvG+BKR0unOqRfG1EI= -github.com/gophercloud/gophercloud/v2 v2.6.0/go.mod h1:Ki/ILhYZr/5EPebrPL9Ej+tUg4lqx71/YH2JWVeU+Qk= +github.com/gophercloud/gophercloud/v2 v2.7.0 h1:o0m4kgVcPgHlcXiWAjoVxGd8QCmvM5VU+YM71pFbn0E= +github.com/gophercloud/gophercloud/v2 v2.7.0/go.mod h1:Ki/ILhYZr/5EPebrPL9Ej+tUg4lqx71/YH2JWVeU+Qk= github.com/gorilla/websocket v1.5.0 h1:PPwGk2jz7EePpoHN/+ClbZu8SPxiqlu12wZP/3sWmnc= github.com/gorilla/websocket v1.5.0/go.mod h1:YR8l580nyteQvAITg2hZ9XVh4b55+EU/adAjf1fMHhE= github.com/grafana/regexp v0.0.0-20240518133315-a468a5bfb3bc h1:GN2Lv3MGO7AS6PrRoT6yV5+wkrOpcszoIsO4+4ds248= github.com/grafana/regexp v0.0.0-20240518133315-a468a5bfb3bc/go.mod h1:+JKpmjMGhpgPL+rXZ5nsZieVzvarn86asRlBg4uNGnk= github.com/grpc-ecosystem/grpc-gateway/v2 v2.26.3 h1:5ZPtiqj0JL5oKWmcsq4VMaAW5ukBEgSGXEN89zeH1Jo= github.com/grpc-ecosystem/grpc-gateway/v2 v2.26.3/go.mod h1:ndYquD05frm2vACXE1nsccT4oJzjhw2arTS2cpUD1PI= -github.com/hashicorp/consul/api v1.31.2 h1:NicObVJHcCmyOIl7Z9iHPvvFrocgTYo9cITSGg0/7pw= -github.com/hashicorp/consul/api v1.31.2/go.mod h1:Z8YgY0eVPukT/17ejW+l+C7zJmKwgPHtjU1q16v/Y40= +github.com/hashicorp/consul/api v1.32.0 h1:5wp5u780Gri7c4OedGEPzmlUEzi0g2KyiPphSr6zjVg= +github.com/hashicorp/consul/api v1.32.0/go.mod h1:Z8YgY0eVPukT/17ejW+l+C7zJmKwgPHtjU1q16v/Y40= github.com/hashicorp/consul/sdk v0.16.1 h1:V8TxTnImoPD5cj0U9Spl0TUxcytjcbbJeADFF07KdHg= github.com/hashicorp/consul/sdk v0.16.1/go.mod h1:fSXvwxB2hmh1FMZCNl6PwX0Q/1wdWtHJcZ7Ea5tns0s= github.com/hashicorp/cronexpr v1.1.2 h1:wG/ZYIKT+RT3QkOdgYc+xsKWVRgnxJ1OJtjjy84fJ9A= @@ -251,10 +251,10 @@ github.com/hashicorp/nomad/api v0.0.0-20241218080744-e3ac00f30eec h1:+YBzb977Vrm github.com/hashicorp/nomad/api v0.0.0-20241218080744-e3ac00f30eec/go.mod h1:svtxn6QnrQ69P23VvIWMR34tg3vmwLz4UdUzm1dSCgE= github.com/hashicorp/serf v0.10.1 h1:Z1H2J60yRKvfDYAOZLd2MU0ND4AH/WDz7xYHDWQsIPY= github.com/hashicorp/serf v0.10.1/go.mod h1:yL2t6BqATOLGc5HF7qbFkTfXoPIY0WZdWHfEvMqbG+4= -github.com/hetznercloud/hcloud-go/v2 v2.20.1 h1:1wnKY6keRb6ip1kGPAtnU/ugjMxNkQ4tkulS6uc+vfk= -github.com/hetznercloud/hcloud-go/v2 v2.20.1/go.mod h1:WSM7w+9tT86sJTNcF8a/oHljC3HUmQfcLxYsgx6PpSc= -github.com/ionos-cloud/sdk-go/v6 v6.3.2 h1:2mUmrZZz6cPyT9IRX0T8fBLc/7XU/eTxP2Y5tS7/09k= -github.com/ionos-cloud/sdk-go/v6 v6.3.2/go.mod h1:SXrO9OGyWjd2rZhAhEpdYN6VUAODzzqRdqA9BCviQtI= +github.com/hetznercloud/hcloud-go/v2 v2.21.0 h1:wUpQT+fgAxIcdMtFvuCJ78ziqc/VARubpOQPQyj4Q84= +github.com/hetznercloud/hcloud-go/v2 v2.21.0/go.mod h1:WSM7w+9tT86sJTNcF8a/oHljC3HUmQfcLxYsgx6PpSc= +github.com/ionos-cloud/sdk-go/v6 v6.3.3 h1:q33Sw1ZqsvqDkFaKG53dGk7BCOvPCPbGZpYqsF6tdjw= +github.com/ionos-cloud/sdk-go/v6 v6.3.3/go.mod h1:wCVwNJ/21W29FWFUv+fNawOTMlFoP1dS3L+ZuztFW48= github.com/jarcoal/httpmock v1.3.1 h1:iUx3whfZWVf3jT01hQTO/Eo5sAYtB2/rqaUuOtpInww= github.com/jarcoal/httpmock v1.3.1/go.mod h1:3yb8rc4BI7TCBhFY8ng0gjuLKJNquuDNiPaZjnENuYg= github.com/jmespath/go-jmespath v0.4.0 h1:BEgLn5cpjn8UN1mAw4NjwDrS35OdebyEtFe+9YPoQUg= @@ -272,14 +272,14 @@ github.com/json-iterator/go v1.1.12/go.mod h1:e30LSqwooZae/UwlEbR2852Gd8hjQvJoHm github.com/julienschmidt/httprouter v1.2.0/go.mod h1:SYymIcj16QtmaHHD7aYtjjsJG7VTCxuUUipMqKk8s4w= github.com/julienschmidt/httprouter v1.3.0 h1:U0609e9tgbseu3rBINet9P48AI/D3oJs4dN7jwJOQ1U= github.com/julienschmidt/httprouter v1.3.0/go.mod h1:JR6WtHb+2LUe8TCKY3cZOxFyyO8IZAc4RVcycCCAKdM= -github.com/keybase/go-keychain v0.0.0-20231219164618-57a3676c3af6 h1:IsMZxCuZqKuao2vNdfD82fjjgPLfyHLpR41Z88viRWs= -github.com/keybase/go-keychain v0.0.0-20231219164618-57a3676c3af6/go.mod h1:3VeWNIJaW+O5xpRQbPp0Ybqu1vJd/pm7s2F473HRrkw= +github.com/keybase/go-keychain v0.0.1 h1:way+bWYa6lDppZoZcgMbYsvC7GxljxrskdNInRtuthU= +github.com/keybase/go-keychain v0.0.1/go.mod h1:PdEILRW3i9D8JcdM+FmY6RwkHGnhHxXwkPPMeUgOK1k= github.com/kisielk/errcheck v1.5.0/go.mod h1:pFxgyoBC7bSaBwPgfKdkLd5X25qrDl4LWUI2bnpBCr8= github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck= github.com/klauspost/compress v1.18.0 h1:c/Cqfb0r+Yi+JtIEq73FWXVkRonBlf0CRNYc8Zttxdo= github.com/klauspost/compress v1.18.0/go.mod h1:2Pp+KzxcywXVXMr50+X0Q/Lsb43OQHYWRCY2AiWywWQ= -github.com/knadh/koanf/maps v0.1.1 h1:G5TjmUh2D7G2YWf5SQQqSiHRJEjaicvU0KpypqB3NIs= -github.com/knadh/koanf/maps v0.1.1/go.mod h1:npD/QZY3V6ghQDdcQzl1W4ICNVTkohC8E73eI2xW4yI= +github.com/knadh/koanf/maps v0.1.2 h1:RBfmAW5CnZT+PJ1CVc1QSJKf4Xu9kxfQgYVQSu8hpbo= +github.com/knadh/koanf/maps v0.1.2/go.mod h1:npD/QZY3V6ghQDdcQzl1W4ICNVTkohC8E73eI2xW4yI= github.com/knadh/koanf/providers/confmap v0.1.0 h1:gOkxhHkemwG4LezxxN8DMOFopOPghxRVp7JbIvdvqzU= github.com/knadh/koanf/providers/confmap v0.1.0/go.mod h1:2uLhxQzJnyHKfxG927awZC7+fyHFdQkd697K4MdLnIU= github.com/knadh/koanf/v2 v2.1.2 h1:I2rtLRqXRy1p01m/utEtpZSSA6dcJbgGVuE27kW2PzQ= @@ -297,8 +297,8 @@ github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY= github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE= github.com/kylelemons/godebug v1.1.0 h1:RPNrshWIDI6G2gRW9EHilWtl7Z6Sb1BR0xunSBf0SNc= github.com/kylelemons/godebug v1.1.0/go.mod h1:9/0rRGxNHcop5bhtWyNeEfOS8JIWk580+fNqagV/RAw= -github.com/linode/linodego v1.48.1 h1:Ojw1S+K5jJr1dggO8/H6r4FINxXnJbOU5GkbpaTfmhU= -github.com/linode/linodego v1.48.1/go.mod h1:fc3t60If8X+yZTFAebhCnNDFrhwQhq9HDU92WnBousQ= +github.com/linode/linodego v1.49.0 h1:MNd3qwvQzbXB5mCpvdCqlUIu1RPA9oC+50LyB9kK+GQ= +github.com/linode/linodego v1.49.0/go.mod h1:B+HAM3//4w1wOS0BwdaQBKwBxlfe6kYJ7bSC6jJ/xtc= github.com/mailru/easyjson v0.7.7 h1:UGYAvKxe3sBsEDzO8ZeWOSlIQfWFlxbzLZe7hwFURr0= github.com/mailru/easyjson v0.7.7/go.mod h1:xzfreul335JAWq5oZzymOObrkdz5UnU4kGfJJLY9Nlc= github.com/mattn/go-colorable v0.0.9/go.mod h1:9vuHe8Xs5qXnSaW/c/ABM9alt+Vo+STaOChaDxuIBZU= @@ -325,8 +325,8 @@ github.com/mdlayher/vsock v1.2.1 h1:pC1mTJTvjo1r9n9fbm7S1j04rCgCzhCOS5DY0zqHlnQ= github.com/mdlayher/vsock v1.2.1/go.mod h1:NRfCibel++DgeMD8z/hP+PPTjlNJsdPOmxcnENvE+SE= github.com/miekg/dns v1.1.26/go.mod h1:bPDLeHnStXmXAq1m/Ch/hvfNHr14JKNPMBo3VZKjuso= github.com/miekg/dns v1.1.41/go.mod h1:p6aan82bvRIyn+zDIv9xYNUpwa73JcSh9BKwknJysuI= -github.com/miekg/dns v1.1.64 h1:wuZgD9wwCE6XMT05UU/mlSko71eRSXEAm2EbjQXLKnQ= -github.com/miekg/dns v1.1.64/go.mod h1:Dzw9769uoKVaLuODMDZz9M6ynFU6Em65csPuoi8G0ck= +github.com/miekg/dns v1.1.65 h1:0+tIPHzUW0GCge7IiK3guGP57VAw7hoPDfApjkMD1Fc= +github.com/miekg/dns v1.1.65/go.mod h1:Dzw9769uoKVaLuODMDZz9M6ynFU6Em65csPuoi8G0ck= github.com/mitchellh/cli v1.1.0/go.mod h1:xcISNoH86gajksDmfB23e/pu+B+GeFRMYmoHXxx3xhI= github.com/mitchellh/copystructure v1.2.0 h1:vpKXTN4ewci03Vljg/q9QvCGUDttBOGBIa15WveJJGw= github.com/mitchellh/copystructure v1.2.0/go.mod h1:qLl+cE2AmVv+CoeAwDPye/v+N2HKCj9FbZEVFJRxO9s= @@ -341,6 +341,10 @@ github.com/mitchellh/reflectwalk v1.0.2 h1:G2LzWKi524PWgd3mLHV8Y5k7s6XUvT0Gef6zx github.com/mitchellh/reflectwalk v1.0.2/go.mod h1:mSTlrgnPZtwu0c4WaC2kGObEpuNDbx0jmZXqmk4esnw= github.com/moby/docker-image-spec v1.3.1 h1:jMKff3w6PgbfSa69GfNg+zN/XLhfXJGnEx3Nl2EsFP0= github.com/moby/docker-image-spec v1.3.1/go.mod h1:eKmb5VW8vQEh/BAr2yvVNvuiJuY6UIocYsFu/DxxRpo= +github.com/moby/sys/atomicwriter v0.1.0 h1:kw5D/EqkBwsBFi0ss9v1VG3wIkVhzGvLklJ+w3A14Sw= +github.com/moby/sys/atomicwriter v0.1.0/go.mod h1:Ul8oqv2ZMNHOceF643P6FKPXeCmYtlQMvpizfsSoaWs= +github.com/moby/sys/sequential v0.6.0 h1:qrx7XFUd/5DxtqcoH1h438hF5TmOvzC/lspjy7zgvCU= +github.com/moby/sys/sequential v0.6.0/go.mod h1:uyv8EUTrca5PnDsdMGXhZe6CCe8U/UiTWd+lL+7b/Ko= github.com/moby/term v0.0.0-20210619224110-3f7ff695adc6 h1:dcztxKSvZ4Id8iPpHERQBbIJfabdt4wUm5qy3wOL2Zc= github.com/moby/term v0.0.0-20210619224110-3f7ff695adc6/go.mod h1:E2VnQOmVuvZB6UYnnDB0qG5Nq/1tD9acaOpo6xmt0Kw= github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= @@ -369,14 +373,14 @@ github.com/onsi/ginkgo/v2 v2.21.0 h1:7rg/4f3rB88pb5obDgNZrNHrQ4e6WpjonchcpuBRnZM github.com/onsi/ginkgo/v2 v2.21.0/go.mod h1:7Du3c42kxCUegi0IImZ1wUQzMBVecgIHjR1C+NkhLQo= github.com/onsi/gomega v1.35.1 h1:Cwbd75ZBPxFSuZ6T+rN/WCb/gOc6YgFBXLlZLhC7Ds4= github.com/onsi/gomega v1.35.1/go.mod h1:PvZbdDc8J6XJEpDK4HCuRBm8a6Fzp9/DmhC9C7yFlog= -github.com/open-telemetry/opentelemetry-collector-contrib/internal/exp/metrics v0.122.0 h1:zHlrYHCN/uGsdfWnAqFb6iksIQv1Aq9lsSTMe/kDsZ0= -github.com/open-telemetry/opentelemetry-collector-contrib/internal/exp/metrics v0.122.0/go.mod h1:lG9v3A48Y/jox3y8TdhCuakVTZfslTs+u2lkdhc6LIk= -github.com/open-telemetry/opentelemetry-collector-contrib/pkg/pdatatest v0.122.0 h1:P6lq+OWqsSdO+o+uTrqu/lko96/MnS+Zc4SqMo3bdvs= -github.com/open-telemetry/opentelemetry-collector-contrib/pkg/pdatatest v0.122.0/go.mod h1:45Di232vetvGjROIPxlBlyBMBAgA95szYP8du09shDE= -github.com/open-telemetry/opentelemetry-collector-contrib/pkg/pdatautil v0.122.0 h1:Jsn9I74nG85Iw7wWET6g0eQ9tbwVndgNHbzHqdlZVqI= -github.com/open-telemetry/opentelemetry-collector-contrib/pkg/pdatautil v0.122.0/go.mod h1:BpcyQo7MedcfxlBmIgRB5DxdLlEa0wHRJ/Nhe8jjnW4= -github.com/open-telemetry/opentelemetry-collector-contrib/processor/deltatocumulativeprocessor v0.122.0 h1:VoVWWmasrx6boiis/OV+HmkEXtVm73LXeZMYHJwEgwE= -github.com/open-telemetry/opentelemetry-collector-contrib/processor/deltatocumulativeprocessor v0.122.0/go.mod h1:DEk8LYKrIZS01fhJXohi4tRR89iEcF3zt0oHDTB2TT0= +github.com/open-telemetry/opentelemetry-collector-contrib/internal/exp/metrics v0.124.1 h1:jOG1ceAx+IATloKXHsE2Cy88XTgqPB/hiXicOrxENx8= +github.com/open-telemetry/opentelemetry-collector-contrib/internal/exp/metrics v0.124.1/go.mod h1:mtNCoy09iO1f2zy5bEqkyRfRPaNKea57yK63cfHixts= +github.com/open-telemetry/opentelemetry-collector-contrib/pkg/pdatatest v0.124.1 h1:G2daAIXiQhAwQSz9RK71QsBH9rmH/m/vdkFuGIEPfS4= +github.com/open-telemetry/opentelemetry-collector-contrib/pkg/pdatatest v0.124.1/go.mod h1:/WAA1PKvHNz7E5SrtGg2KfAWl/PrmS0FVYOanoGxk0I= +github.com/open-telemetry/opentelemetry-collector-contrib/pkg/pdatautil v0.124.1 h1:mMVzpkpy6rKL1Q/xXNogZVtWebIlxTRzhsgp3b9ioCM= +github.com/open-telemetry/opentelemetry-collector-contrib/pkg/pdatautil v0.124.1/go.mod h1:jM8Gsd0fIiwRzWrzd7Gm6PZYi5AgHPRkz0625Rtqyxo= +github.com/open-telemetry/opentelemetry-collector-contrib/processor/deltatocumulativeprocessor v0.124.1 h1:gmmzhgewk2fU0Md0vmaDEFgfRycfCfjgPvMA4SEdKiU= +github.com/open-telemetry/opentelemetry-collector-contrib/processor/deltatocumulativeprocessor v0.124.1/go.mod h1:AsQJBuUUY1/yqK2c87hv4deeteaKwktwLIfQCN2OGk4= github.com/opencontainers/go-digest v1.0.0 h1:apOUWs51W5PlhuyGyz9FCeeBIOUDA/6nW8Oi/yOhh5U= github.com/opencontainers/go-digest v1.0.0/go.mod h1:0JzlMkj0TRzQZfJkVvzbP0HBR3IKzErnv2BNG4W4MAM= github.com/opencontainers/image-spec v1.0.2 h1:9yCKha/T5XdGtO0q9Q9a6T5NUCsTn/DrBg0D7ufOcFM= @@ -409,13 +413,13 @@ github.com/prometheus/alertmanager v0.28.1/go.mod h1:0StpPUDDHi1VXeM7p2yYfeZgLVi github.com/prometheus/client_golang v0.9.1/go.mod h1:7SWBe2y4D6OKWSNQJUaRYU/AaXPKyh/dDVn+NZz0KFw= github.com/prometheus/client_golang v1.0.0/go.mod h1:db9x61etRT2tGnBNRi70OPL5FsnadC4Ky3P0J6CfImo= github.com/prometheus/client_golang v1.4.0/go.mod h1:e9GMxYsXl05ICDXkRhurwBS4Q3OK1iX/F2sw+iXX5zU= -github.com/prometheus/client_golang v1.21.1 h1:DOvXXTqVzvkIewV/CDPFdejpMCGeMcbGCQ8YOmu+Ibk= -github.com/prometheus/client_golang v1.21.1/go.mod h1:U9NM32ykUErtVBxdvD3zfi+EuFkkaBvMb09mIfe0Zgg= +github.com/prometheus/client_golang v1.22.0 h1:rb93p9lokFEsctTys46VnV1kLCDpVZ0a/Y92Vm0Zc6Q= +github.com/prometheus/client_golang v1.22.0/go.mod h1:R7ljNsLXhuQXYZYtw6GAE9AZg8Y7vEW5scdCXrWRXC0= github.com/prometheus/client_model v0.0.0-20180712105110-5c3871d89910/go.mod h1:MbSGuTsp3dbXC40dX6PRTWyKYBIrTGTE9sqQNg2J8bo= github.com/prometheus/client_model v0.0.0-20190129233127-fd36f4220a90/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= github.com/prometheus/client_model v0.2.0/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= -github.com/prometheus/client_model v0.6.1 h1:ZKSh/rekM+n3CeS952MLRAdFwIKqeY8b62p8ais2e9E= -github.com/prometheus/client_model v0.6.1/go.mod h1:OrxVMOVHjw3lKMa8+x6HeMGkHMQyHDk9E3jmP2AmGiY= +github.com/prometheus/client_model v0.6.2 h1:oBsgwpGs7iVziMvrGhE53c/GrLUsZdHnqNwqPLxwZyk= +github.com/prometheus/client_model v0.6.2/go.mod h1:y3m2F6Gdpfy6Ut/GBsUqTWZqCUvMVzSfMLjcu6wAwpE= github.com/prometheus/common v0.4.1/go.mod h1:TNfzLD0ON7rHzMJeJkieUDPYmFC7Snx/y86RQel1bk4= github.com/prometheus/common v0.9.1/go.mod h1:yhUN8i9wzaXS3w1O07YhxHEBxD+W35wd8bs7vj7HSQ4= github.com/prometheus/common v0.63.0 h1:YR/EIY1o3mEFP/kZCD7iDMnLPlGyuU2Gb3HIcXnA98k= @@ -435,13 +439,13 @@ github.com/prometheus/sigv4 v0.1.2 h1:R7570f8AoM5YnTUPFm3mjZH5q2k4D+I/phCWvZ4PXG github.com/prometheus/sigv4 v0.1.2/go.mod h1:GF9fwrvLgkQwDdQ5BXeV9XUSCH/IPNqzvAoaohfjqMU= github.com/puzpuzpuz/xsync/v3 v3.5.1 h1:GJYJZwO6IdxN/IKbneznS6yPkVC+c3zyY/j19c++5Fg= github.com/puzpuzpuz/xsync/v3 v3.5.1/go.mod h1:VjzYrABPabuM4KyBh1Ftq6u8nhwY5tBPKP9jpmh0nnA= -github.com/redis/go-redis/v9 v9.7.0 h1:HhLSs+B6O021gwzl+locl0zEDnyNkxMtf/Z3NNBMa9E= -github.com/redis/go-redis/v9 v9.7.0/go.mod h1:f6zhXITC7JUJIlPEiBOTXxJgPLdZcA93GewI7inzyWw= +github.com/redis/go-redis/v9 v9.7.3 h1:YpPyAayJV+XErNsatSElgRZZVCwXX9QzkKYNvO7x0wM= +github.com/redis/go-redis/v9 v9.7.3/go.mod h1:bGUrSggJ9X9GUmZpZNEOQKaANxSGgOEBRltRTZHSvrA= github.com/rogpeppe/go-internal v1.13.1 h1:KvO1DLK/DRN07sQ1LQKScxyZJuNnedQ5/wKSR38lUII= github.com/rogpeppe/go-internal v1.13.1/go.mod h1:uMEvuHeurkdAXX61udpOXGD/AzZDWNMNyH2VO9fmH0o= github.com/ryanuber/columnize v0.0.0-20160712163229-9b3edd62028f/go.mod h1:sm1tb6uqfes/u+d4ooFouqFdy9/2g9QGwK3SQygK0Ts= -github.com/scaleway/scaleway-sdk-go v1.0.0-beta.32 h1:4+LP7qmsLSGbmc66m1s5dKRMBwztRppfxFKlYqYte/c= -github.com/scaleway/scaleway-sdk-go v1.0.0-beta.32/go.mod h1:kzh+BSAvpoyHHdHBCDhmSWtBc1NbLMZ2lWHqnBoxFks= +github.com/scaleway/scaleway-sdk-go v1.0.0-beta.33 h1:KhF0WejiUTDbL5X55nXowP7zNopwpowa6qaMAWyIE+0= +github.com/scaleway/scaleway-sdk-go v1.0.0-beta.33/go.mod h1:792k1RTU+5JeMXm35/e2Wgp71qPH/DmDoZrRc+EFZDk= github.com/sean-/seed v0.0.0-20170313163322-e2103e2c3529 h1:nn5Wsu0esKSJiIVhscUtVbo7ada43DJhG55ua/hjS5I= github.com/sean-/seed v0.0.0-20170313163322-e2103e2c3529/go.mod h1:DxrIzT+xaE7yg65j358z/aeFdxmN0P9QXhEzd20vsDc= github.com/shoenig/test v1.7.1 h1:UJcjSAI3aUKx52kfcfhblgyhZceouhvvs3OYdWgn+PY= @@ -484,40 +488,44 @@ go.mongodb.org/mongo-driver v1.14.0 h1:P98w8egYRjYe3XDjxhYJagTokP/H6HzlsnojRgZRd go.mongodb.org/mongo-driver v1.14.0/go.mod h1:Vzb0Mk/pa7e6cWw85R4F/endUC3u0U9jGcNU603k65c= go.opentelemetry.io/auto/sdk v1.1.0 h1:cH53jehLUN6UFLY71z+NDOiNJqDdPRaXzTel0sJySYA= go.opentelemetry.io/auto/sdk v1.1.0/go.mod h1:3wSPjt5PWp2RhlCcmmOial7AvC4DQqZb7a7wCow3W8A= -go.opentelemetry.io/collector/component v1.28.1 h1:JjwfvLR0UdadRDAANAdM4mOSwGmfGO3va2X+fdk4YdA= -go.opentelemetry.io/collector/component v1.28.1/go.mod h1:jwZRDML3tXo1whueZdRf+y6z3DeEYTLPBmb/O1ujB40= -go.opentelemetry.io/collector/component/componentstatus v0.122.1 h1:zMQC0y8ZBITa87GOwEANdOoAox5I4UgaIHxY79nwCbk= -go.opentelemetry.io/collector/component/componentstatus v0.122.1/go.mod h1:ZYwOgoXyPu4gGqfQ5DeaEpStpUCD/Clctz4rMd9qQYw= -go.opentelemetry.io/collector/component/componenttest v0.122.1 h1:HE4oeLub2FWVTUzCQG6SWwfnJfcK1FMknXhGQ2gOxnY= -go.opentelemetry.io/collector/component/componenttest v0.122.1/go.mod h1:o3Xq6z3C0aVhrd/fD56aKxShrILVnHnbgQVP5NoFuic= -go.opentelemetry.io/collector/confmap v1.28.0 h1:pUQh4eOW0YQ1GFWTDP5pw/ZMQuppkz6oSoDDloAH/Sc= -go.opentelemetry.io/collector/confmap v1.28.0/go.mod h1:k/3fo+2RE6m+OKlJzx78Q8hstABYwYgvXO3u9zyTeHI= -go.opentelemetry.io/collector/confmap/xconfmap v0.122.0 h1:uRwR2/DEhLCwsdQyD5rTG/cAPUm5ixZb96y3rUaUo/g= -go.opentelemetry.io/collector/confmap/xconfmap v0.122.0/go.mod h1:76K9ypccfRyorlYYit8O82mX4hseQP8VJ/TYqCKI4fA= -go.opentelemetry.io/collector/consumer v1.28.1 h1:3lHW2e0i7kEkbDqK1vErA8illqPpwDxMzgc5OUDsJ0Y= -go.opentelemetry.io/collector/consumer v1.28.1/go.mod h1:g0T16JPMYFN6T2noh+1YBxJSt5i5Zp+Y0Y6pvkMqsDQ= -go.opentelemetry.io/collector/consumer/consumertest v0.122.1 h1:LKkLMdWwJCuOYyCMVzwc0OG9vncIqpl8Tp9+H8RikNg= -go.opentelemetry.io/collector/consumer/consumertest v0.122.1/go.mod h1:pYqWgx62ou3uUn8nlt2ohRyKod+7xLTf/uA3YfRwVkA= -go.opentelemetry.io/collector/consumer/xconsumer v0.122.1 h1:iK1hGbho/XICdBfGb4MnKwF9lnhLmv09yQ4YlVm+LGo= -go.opentelemetry.io/collector/consumer/xconsumer v0.122.1/go.mod h1:xYbRPP1oWcYUUDQJTlv78M/rlYb+qE4weiv++ObZRSU= -go.opentelemetry.io/collector/featuregate v1.28.0 h1:nkaMw0HyOSxojLwlezF2O/xJ9T/Jo1a0iEetesT9lr0= -go.opentelemetry.io/collector/featuregate v1.28.0/go.mod h1:Y/KsHbvREENKvvN9RlpiWk/IGBK+CATBYzIIpU7nccc= -go.opentelemetry.io/collector/pdata v1.28.1 h1:ORl5WLpQJvjzBVpHu12lqKMdcf/qDBwRXMcUubhybiQ= -go.opentelemetry.io/collector/pdata v1.28.1/go.mod h1:asKE8MD/4SOKz1mCrGdAz4VO2U2HUNg8A6094uK7pq0= -go.opentelemetry.io/collector/pdata/pprofile v0.122.1 h1:25Fs0eL/J/M2ZEaVplesbI1H7pYx462zUUVxVOszpOg= -go.opentelemetry.io/collector/pdata/pprofile v0.122.1/go.mod h1:+jSjgb4zRnNmr1R/zgVLVyTVSm9irfGrvGTrk3lDxSE= -go.opentelemetry.io/collector/pdata/testdata v0.122.1 h1:9DO8nUUnPAGYMKmrep6wLAfOHprvKY4w/7LpE4jldPQ= -go.opentelemetry.io/collector/pdata/testdata v0.122.1/go.mod h1:hYdNrn8KxFwq1nf44YYRgNhDjJTBzoyEr/Qa26pN0t4= -go.opentelemetry.io/collector/pipeline v0.122.1 h1:f0uuiDmanVyKwfYo6cWveJsGbLXidV7i+Z7u8QJwWxI= -go.opentelemetry.io/collector/pipeline v0.122.1/go.mod h1:TO02zju/K6E+oFIOdi372Wk0MXd+Szy72zcTsFQwXl4= -go.opentelemetry.io/collector/processor v0.122.1 h1:AvZvEujq8+FYdJsm9lmAMwuuae5Y2/vKIkOJwsoxsxQ= -go.opentelemetry.io/collector/processor v0.122.1/go.mod h1:nYKctftba7SbdLml6LxgIrnYRXCShDe2bnNWjTIpF7g= -go.opentelemetry.io/collector/processor/processortest v0.122.1 h1:n4UOx1mq+kLaRiHGsu7vBLq+EGXfzWhSxyFweMjMl54= -go.opentelemetry.io/collector/processor/processortest v0.122.1/go.mod h1:8/NRWx18tNJMBwCQ8/YPWr4qsFUrwk27qE7/dXoJb1M= -go.opentelemetry.io/collector/processor/xprocessor v0.122.1 h1:Wfv4/7n4YK1HunAVTMS6yf0xmDjCkftJ6EECNcSwzfs= -go.opentelemetry.io/collector/processor/xprocessor v0.122.1/go.mod h1:9zMW3NQ9+DzcJ1cUq5BhZg3ajoUEMGhNY0ZdYjpX+VI= -go.opentelemetry.io/collector/semconv v0.122.1 h1:WLzDi3QC4/+LpNMLY90zn5aMDJKyqg/ujW2O4T4sxHg= -go.opentelemetry.io/collector/semconv v0.122.1/go.mod h1:te6VQ4zZJO5Lp8dM2XIhDxDiL45mwX0YAQQWRQ0Qr9U= +go.opentelemetry.io/collector/component v1.30.0 h1:HXjqBHaQ47/EEuWdnkjr4Y3kRWvmyWIDvqa1Q262Fls= +go.opentelemetry.io/collector/component v1.30.0/go.mod h1:vfM9kN+BM6oHBXWibquiprz8CVawxd4/aYy3nbhme3E= +go.opentelemetry.io/collector/component/componentstatus v0.124.0 h1:0WHaANNktxLIk+lN+CtgPBESI1MJBrfVW/LvNCbnMQ4= +go.opentelemetry.io/collector/component/componentstatus v0.124.0/go.mod h1:a/wa8nxJGWOGuLwCN8gHCzFHCaUVZ+VyUYuKz9Yaq38= +go.opentelemetry.io/collector/component/componenttest v0.124.0 h1:Wsc+DmDrWTFs/aEyjDA3slNwV+h/0NOyIR5Aywvr6Zw= +go.opentelemetry.io/collector/component/componenttest v0.124.0/go.mod h1:NQ4ATOzMFc7QA06B993tq8o27DR0cu/JR/zK7slGJ3E= +go.opentelemetry.io/collector/confmap v1.30.0 h1:Y0MXhjQCdMyJN9xZMWWdNPWs6ncMVf7YVnyAEN2dAcM= +go.opentelemetry.io/collector/confmap v1.30.0/go.mod h1:9DdThVDIC3VsdtTb7DgT+HwusWOocoqDkd/TErEtQgA= +go.opentelemetry.io/collector/confmap/xconfmap v0.124.0 h1:PK+CaSgjLvzHaafBieJ3AjiUTAPuf40C+/Fn38LvmW8= +go.opentelemetry.io/collector/confmap/xconfmap v0.124.0/go.mod h1:DZmFSgWiqXQrzld9uU+73YAVI5JRIgd8RkK5HcaXGU0= +go.opentelemetry.io/collector/consumer v1.30.0 h1:Nn6kFTH+EJbv13E0W+sNvWrTgbiFCRv8f6DaA2F1DQs= +go.opentelemetry.io/collector/consumer v1.30.0/go.mod h1:edRyfk61ugdhCQ93PBLRZfYMVWjdMPpKP8z5QLyESf0= +go.opentelemetry.io/collector/consumer/consumertest v0.124.0 h1:2arChG4RPrHW3lfVWlK/KDF7Y7qkUm/YAiBXh8oTue0= +go.opentelemetry.io/collector/consumer/consumertest v0.124.0/go.mod h1:Hlu+EXbINHxVAyIT1baKO2d0j5odR3fLlLAiaP+JqQg= +go.opentelemetry.io/collector/consumer/xconsumer v0.124.0 h1:/cut96EWVNoz6lIeGI9+EzS6UClMtnZkx5YIpkD0Xe0= +go.opentelemetry.io/collector/consumer/xconsumer v0.124.0/go.mod h1:fHH/MpzFCRNk/4foiYE6BoXQCAMf5sJTO35uvzVrrd4= +go.opentelemetry.io/collector/featuregate v1.30.0 h1:mx7+iP/FQnY7KO8qw/xE3Qd1MQkWcU8VgcqLNrJ8EU8= +go.opentelemetry.io/collector/featuregate v1.30.0/go.mod h1:Y/KsHbvREENKvvN9RlpiWk/IGBK+CATBYzIIpU7nccc= +go.opentelemetry.io/collector/internal/telemetry v0.124.0 h1:kzd1/ZYhLj4bt2pDB529mL4rIRrRacemXodFNxfhdWk= +go.opentelemetry.io/collector/internal/telemetry v0.124.0/go.mod h1:ZjXjqV0dJ+6D4XGhTOxg/WHjnhdmXsmwmUSgALea66Y= +go.opentelemetry.io/collector/pdata v1.30.0 h1:j3jyq9um436r6WzWySzexP2nLnFdmL5uVBYAlyr9nDM= +go.opentelemetry.io/collector/pdata v1.30.0/go.mod h1:0Bxu1ktuj4wE7PIASNSvd0SdBscQ1PLtYasymJ13/Cs= +go.opentelemetry.io/collector/pdata/pprofile v0.124.0 h1:ZjL9wKqzP4BHj0/F1jfGxs1Va8B7xmYayipZeNVoWJE= +go.opentelemetry.io/collector/pdata/pprofile v0.124.0/go.mod h1:1EN3Gw5LSI4fSVma/Yfv/6nqeuYgRTm1/kmG5nE5Oyo= +go.opentelemetry.io/collector/pdata/testdata v0.124.0 h1:vY+pWG7CQfzzGSB5+zGYHQOltRQr59Ek9QiPe+rI+NY= +go.opentelemetry.io/collector/pdata/testdata v0.124.0/go.mod h1:lNH48lGhGv4CYk27fJecpsR1zYHmZjKgNrAprwjym0o= +go.opentelemetry.io/collector/pipeline v0.124.0 h1:hKvhDyH2GPnNO8LGL34ugf36sY7EOXPjBvlrvBhsOdw= +go.opentelemetry.io/collector/pipeline v0.124.0/go.mod h1:TO02zju/K6E+oFIOdi372Wk0MXd+Szy72zcTsFQwXl4= +go.opentelemetry.io/collector/processor v1.30.0 h1:dxmu+sO6MzQydyrf2CON5Hm1KU7yV4ofH1stmreUtPk= +go.opentelemetry.io/collector/processor v1.30.0/go.mod h1:DjXAgelT8rfIWCTJP5kiPpxPqz4JLE1mJwsE2kJMTk8= +go.opentelemetry.io/collector/processor/processortest v0.124.0 h1:qcyo0dSWmgpNFxjObsKk3Rd/wWV8CkMevd+jApkTQWE= +go.opentelemetry.io/collector/processor/processortest v0.124.0/go.mod h1:1YDTxd4c/uVU3Ui1+AzvYW94mo5DbhNmB1xSof6zvD0= +go.opentelemetry.io/collector/processor/xprocessor v0.124.0 h1:KAe8gIje8TcB8varZ4PDy0HV5xX5rNdaQ7q46BE915w= +go.opentelemetry.io/collector/processor/xprocessor v0.124.0/go.mod h1:ItJBBlR6/141vg1v4iRrcsBrGjPCgmXAztxS2x2YkdI= +go.opentelemetry.io/collector/semconv v0.124.0 h1:YTdo3UFwNyDQCh9DiSm2rbzAgBuwn/9dNZ0rv454goA= +go.opentelemetry.io/collector/semconv v0.124.0/go.mod h1:te6VQ4zZJO5Lp8dM2XIhDxDiL45mwX0YAQQWRQ0Qr9U= +go.opentelemetry.io/contrib/bridges/otelzap v0.10.0 h1:ojdSRDvjrnm30beHOmwsSvLpoRF40MlwNCA+Oo93kXU= +go.opentelemetry.io/contrib/bridges/otelzap v0.10.0/go.mod h1:oTTm4g7NEtHSV2i/0FeVdPaPgUIZPfQkFbq0vbzqnv0= go.opentelemetry.io/contrib/instrumentation/net/http/httptrace/otelhttptrace v0.60.0 h1:0tY123n7CdWMem7MOVdKOt0YfshufLCwfE5Bob+hQuM= go.opentelemetry.io/contrib/instrumentation/net/http/httptrace/otelhttptrace v0.60.0/go.mod h1:CosX/aS4eHnG9D7nESYpV753l4j9q5j3SL/PUYd2lR8= go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.60.0 h1:sbiXRNDSWJOTobXh5HyQKjq6wUC5tNybqjIqDpAY4CU= @@ -530,6 +538,8 @@ go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracegrpc v1.35.0 h1:m639+ go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracegrpc v1.35.0/go.mod h1:LjReUci/F4BUyv+y4dwnq3h/26iNOeC3wAIqgvTIZVo= go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp v1.35.0 h1:xJ2qHD0C1BeYVTLLR9sX12+Qb95kfeD/byKj6Ky1pXg= go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp v1.35.0/go.mod h1:u5BF1xyjstDowA1R5QAO9JHzqK+ublenEW/dyqTjBVk= +go.opentelemetry.io/otel/log v0.11.0 h1:c24Hrlk5WJ8JWcwbQxdBqxZdOK7PcP/LFtOtwpDTe3Y= +go.opentelemetry.io/otel/log v0.11.0/go.mod h1:U/sxQ83FPmT29trrifhQg+Zj2lo1/IPN1PF6RTFqdwc= go.opentelemetry.io/otel/metric v1.35.0 h1:0znxYu2SNyuMSQT4Y9WDWej0VpcsxkuklLa4/siN90M= go.opentelemetry.io/otel/metric v1.35.0/go.mod h1:nKVFgxBZ2fReX6IlyW28MgZojkoAkJGaE8CpgeAU3oE= go.opentelemetry.io/otel/sdk v1.35.0 h1:iPctf8iprVySXSKJffSS79eOjl9pvxV9ZqOWT0QejKY= @@ -555,8 +565,8 @@ golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACk golang.org/x/crypto v0.0.0-20190923035154-9ee001bba392/go.mod h1:/lpIB1dKB+9EgE3H3cr1v9wB50oz8l4C4h62xy7jSTY= golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= -golang.org/x/crypto v0.36.0 h1:AnAEvhDddvBdpY+uR+MyHmuZzzNqXSe/GvuDeob5L34= -golang.org/x/crypto v0.36.0/go.mod h1:Y4J0ReaxCR1IMaabaSMugxJES1EpwhBHhv2bDHklZvc= +golang.org/x/crypto v0.37.0 h1:kJNSjF/Xp7kU0iB2Z+9viTPMW4EqqsrywMXLJOOsXSE= +golang.org/x/crypto v0.37.0/go.mod h1:vg+k43peMZ0pUMhYmVAWysMK35e6ioLh3wB8ZCAfbVc= golang.org/x/exp v0.0.0-20250106191152-7588d65b2ba8 h1:yqrTHse8TCMW1M1ZCP+VAR/l0kKxwaAIqN/il7x4voA= golang.org/x/exp v0.0.0-20250106191152-7588d65b2ba8/go.mod h1:tujkw807nyEEAamNbDrEGzRav+ilXA7PCRAd6xsmwiU= golang.org/x/mod v0.2.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= @@ -573,18 +583,18 @@ golang.org/x/net v0.0.0-20200226121028-0de0cce0169b/go.mod h1:z5CRVTTTmAJ677TzLL golang.org/x/net v0.0.0-20201021035429-f5854403a974/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= golang.org/x/net v0.0.0-20210410081132-afb366fc7cd1/go.mod h1:9tjilg8BloeKEkVJvy7fQ90B1CfIiPueXVOjqfkSzI8= -golang.org/x/net v0.38.0 h1:vRMAPTMaeGqVhG5QyLJHqNDwecKTomGeqbnfZyKlBI8= -golang.org/x/net v0.38.0/go.mod h1:ivrbrMbzFq5J41QOQh0siUuly180yBYtLp+CKbEaFx8= -golang.org/x/oauth2 v0.28.0 h1:CrgCKl8PPAVtLnU3c+EDw6x11699EWlsDeWNWKdIOkc= -golang.org/x/oauth2 v0.28.0/go.mod h1:onh5ek6nERTohokkhCD/y2cV4Do3fxFHFuAejCkRWT8= +golang.org/x/net v0.39.0 h1:ZCu7HMWDxpXpaiKdhzIfaltL9Lp31x/3fCP11bc6/fY= +golang.org/x/net v0.39.0/go.mod h1:X7NRbYVEA+ewNkCNyJ513WmMdQ3BineSwVtN2zD/d+E= +golang.org/x/oauth2 v0.29.0 h1:WdYw2tdTK1S8olAzWHdgeqfy+Mtm9XNhv/xJsY65d98= +golang.org/x/oauth2 v0.29.0/go.mod h1:onh5ek6nERTohokkhCD/y2cV4Do3fxFHFuAejCkRWT8= golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20181221193216-37e7f081c4d4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20190911185100-cd5d95a43a6e/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20210220032951-036812b2e83c/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.12.0 h1:MHc5BpPuC30uJk597Ri8TV3CNZcTLu6B6z4lJy+g6Jw= -golang.org/x/sync v0.12.0/go.mod h1:1dzgHSNfp02xaA81J2MS99Qcpr2w7fw1gpm99rleRqA= +golang.org/x/sync v0.13.0 h1:AauUjRAJ9OSnvULf/ARrrVywoJDy0YS2AwQ98I37610= +golang.org/x/sync v0.13.0/go.mod h1:1dzgHSNfp02xaA81J2MS99Qcpr2w7fw1gpm99rleRqA= golang.org/x/sys v0.0.0-20180823144017-11551d06cbcc/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20180905080454-ebe1bf3edb33/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20181116152217-5ac8a444bdc5/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= @@ -610,17 +620,17 @@ golang.org/x/sys v0.0.0-20220728004956-3c1f35247d10/go.mod h1:oPkhp1MJrh7nUepCBc golang.org/x/sys v0.0.0-20220811171246-fbc7d0a398ab/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.1.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.31.0 h1:ioabZlmFYtWhL+TRYpcnNlLwhyxaM9kWTDEmfnprqik= -golang.org/x/sys v0.31.0/go.mod h1:BJP2sWEmIv4KK5OTEluFJCKSidICx8ciO85XgH3Ak8k= +golang.org/x/sys v0.32.0 h1:s77OFDvIQeibCmezSnk/q6iAfkdiQaJi4VzroCFrN20= +golang.org/x/sys v0.32.0/go.mod h1:BJP2sWEmIv4KK5OTEluFJCKSidICx8ciO85XgH3Ak8k= golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= -golang.org/x/term v0.30.0 h1:PQ39fJZ+mfadBm0y5WlL4vlM7Sx1Hgf13sMIY2+QS9Y= -golang.org/x/term v0.30.0/go.mod h1:NYYFdzHoI5wRh/h5tDMdMqCqPJZEuNqVR5xJLd/n67g= +golang.org/x/term v0.31.0 h1:erwDkOK1Msy6offm1mOgvspSkslFnIGsFnxOKoufg3o= +golang.org/x/term v0.31.0/go.mod h1:R4BeIy7D95HzImkxGkTW1UQTtP54tio2RyHz7PwK0aw= golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk= golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= -golang.org/x/text v0.23.0 h1:D71I7dUrlY+VX0gQShAThNGHFxZ13dGLBHQLVl1mJlY= -golang.org/x/text v0.23.0/go.mod h1:/BLNzu4aZCJ1+kcD0DNRotWKage4q2rGVAg4o22unh4= +golang.org/x/text v0.24.0 h1:dd5Bzh4yt5KYA8f9CJHCP4FB4D51c2c6JvN37xJJkJ0= +golang.org/x/text v0.24.0/go.mod h1:L8rBsPeo2pSS+xqN0d5u2ikmjtmoJbDBT1b7nHvFCdU= golang.org/x/time v0.11.0 h1:/bpjEDfN9tkoN/ryeYHnv5hcMlc8ncjMcM4XBk5NWV0= golang.org/x/time v0.11.0/go.mod h1:CDIdPxbZBQxdj6cxyCIdrNogrJKMJ7pr37NYpMcMDSg= golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= @@ -629,22 +639,22 @@ golang.org/x/tools v0.0.0-20190907020128-2ca718005c18/go.mod h1:b+2E5dAYhXwXZwtn golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.0.0-20200619180055-7c47624df98f/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= golang.org/x/tools v0.0.0-20210106214847-113979e3529a/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= -golang.org/x/tools v0.31.0 h1:0EedkvKDbh+qistFTd0Bcwe/YLh4vHwWEkiI0toFIBU= -golang.org/x/tools v0.31.0/go.mod h1:naFTU+Cev749tSJRXJlna0T3WxKvb1kWEx15xA4SdmQ= +golang.org/x/tools v0.32.0 h1:Q7N1vhpkQv7ybVzLFtTjvQya2ewbwNDZzUgfXGqtMWU= +golang.org/x/tools v0.32.0/go.mod h1:ZxrU41P/wAbZD8EDa6dDCa6XfpkhJ7HFMjHJXfBDu8s= golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= -google.golang.org/api v0.227.0 h1:QvIHF9IuyG6d6ReE+BNd11kIB8hZvjN8Z5xY5t21zYc= -google.golang.org/api v0.227.0/go.mod h1:EIpaG6MbTgQarWF5xJvX0eOJPK9n/5D4Bynb9j2HXvQ= +google.golang.org/api v0.230.0 h1:2u1hni3E+UXAXrONrrkfWpi/V6cyKVAbfGVeGtC3OxM= +google.golang.org/api v0.230.0/go.mod h1:aqvtoMk7YkiXx+6U12arQFExiRV9D/ekvMCwCd/TksQ= google.golang.org/genproto/googleapis/api v0.0.0-20250303144028-a0af3efb3deb h1:p31xT4yrYrSM/G4Sn2+TNUkVhFCbG9y8itM2S6Th950= google.golang.org/genproto/googleapis/api v0.0.0-20250303144028-a0af3efb3deb/go.mod h1:jbe3Bkdp+Dh2IrslsFCklNhweNTBgSYanP1UXhJDhKg= -google.golang.org/genproto/googleapis/rpc v0.0.0-20250313205543-e70fdf4c4cb4 h1:iK2jbkWL86DXjEx0qiHcRE9dE4/Ahua5k6V8OWFb//c= -google.golang.org/genproto/googleapis/rpc v0.0.0-20250313205543-e70fdf4c4cb4/go.mod h1:LuRYeWDFV6WOn90g357N17oMCaxpgCnbi/44qJvDn2I= -google.golang.org/grpc v1.71.0 h1:kF77BGdPTQ4/JZWMlb9VpJ5pa25aqvVqogsxNHHdeBg= -google.golang.org/grpc v1.71.0/go.mod h1:H0GRtasmQOh9LkFoCPDu3ZrwUtD1YGE+b2vYBYd/8Ec= -google.golang.org/protobuf v1.36.5 h1:tPhr+woSbjfYvY6/GPufUoYizxw1cF/yFoxJ2fmpwlM= -google.golang.org/protobuf v1.36.5/go.mod h1:9fA7Ob0pmnwhb644+1+CVWFRbNajQ6iRojtC/QF5bRE= +google.golang.org/genproto/googleapis/rpc v0.0.0-20250414145226-207652e42e2e h1:ztQaXfzEXTmCBvbtWYRhJxW+0iJcz2qXfd38/e9l7bA= +google.golang.org/genproto/googleapis/rpc v0.0.0-20250414145226-207652e42e2e/go.mod h1:qQ0YXyHHx3XkvlzUtpXDkS29lDSafHMZBAZDc03LQ3A= +google.golang.org/grpc v1.72.0 h1:S7UkcVa60b5AAQTaO6ZKamFp1zMZSU0fGDK2WZLbBnM= +google.golang.org/grpc v1.72.0/go.mod h1:wH5Aktxcg25y1I3w7H69nHfXdOG3UiadoBtjh3izSDM= +google.golang.org/protobuf v1.36.6 h1:z1NpPI8ku2WgiWnf+t9wTPsn6eP1L7ksHUlkfLvd9xY= +google.golang.org/protobuf v1.36.6/go.mod h1:jduwjTPXsFjZGTmRluh+L6NjiWu7pchiJ2/5YcXBHnY= gopkg.in/alecthomas/kingpin.v2 v2.2.6/go.mod h1:FMv+mEhP44yOT+4EoQTLFTRgOQ1FBLkstjWtayDeSgw= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= diff --git a/internal/tools/go.mod b/internal/tools/go.mod index 2cad9b4b71..046a272340 100644 --- a/internal/tools/go.mod +++ b/internal/tools/go.mod @@ -3,37 +3,37 @@ module github.com/prometheus/prometheus/internal/tools go 1.23.0 require ( - github.com/bufbuild/buf v1.50.1 + github.com/bufbuild/buf v1.51.0 github.com/daixiang0/gci v0.13.6 github.com/gogo/protobuf v1.3.2 github.com/grpc-ecosystem/grpc-gateway/v2 v2.26.3 ) require ( - buf.build/gen/go/bufbuild/bufplugin/protocolbuffers/go v1.36.4-20250121211742-6d880cc6cc8d.1 // indirect - buf.build/gen/go/bufbuild/protovalidate/protocolbuffers/go v1.36.4-20241127180247-a33202765966.1 // indirect + buf.build/gen/go/bufbuild/bufplugin/protocolbuffers/go v1.36.5-20250121211742-6d880cc6cc8d.1 // indirect + buf.build/gen/go/bufbuild/protovalidate/protocolbuffers/go v1.36.5-20250307204501-0409229c3780.1 // indirect buf.build/gen/go/bufbuild/registry/connectrpc/go v1.18.1-20250116203702-1c024d64352b.1 // indirect - buf.build/gen/go/bufbuild/registry/protocolbuffers/go v1.36.4-20250116203702-1c024d64352b.1 // indirect + buf.build/gen/go/bufbuild/registry/protocolbuffers/go v1.36.5-20250116203702-1c024d64352b.1 // indirect buf.build/gen/go/pluginrpc/pluginrpc/protocolbuffers/go v1.36.4-20241007202033-cf42259fcbfc.1 // indirect - buf.build/go/bufplugin v0.7.0 // indirect + buf.build/go/bufplugin v0.8.0 // indirect buf.build/go/protoyaml v0.3.1 // indirect buf.build/go/spdx v0.2.0 // indirect - cel.dev/expr v0.19.2 // indirect + cel.dev/expr v0.21.2 // indirect connectrpc.com/connect v1.18.1 // indirect - connectrpc.com/otelconnect v0.7.1 // indirect + connectrpc.com/otelconnect v0.7.2 // indirect github.com/Azure/go-ansiterm v0.0.0-20250102033503-faa5f7b0171c // indirect github.com/Microsoft/go-winio v0.6.2 // indirect github.com/antlr4-go/antlr/v4 v4.13.1 // indirect github.com/bufbuild/protocompile v0.14.1 // indirect github.com/bufbuild/protoplugin v0.0.0-20250106231243-3a819552c9d9 // indirect - github.com/bufbuild/protovalidate-go v0.8.2 // indirect + github.com/bufbuild/protovalidate-go v0.9.3-0.20250317160558-38a17488914d // indirect github.com/containerd/log v0.1.0 // indirect github.com/containerd/stargz-snapshotter/estargz v0.16.3 // indirect github.com/cpuguy83/go-md2man/v2 v2.0.6 // indirect github.com/distribution/reference v0.6.0 // indirect github.com/docker/cli v27.5.1+incompatible // indirect github.com/docker/distribution v2.8.3+incompatible // indirect - github.com/docker/docker v28.0.0+incompatible // indirect + github.com/docker/docker v28.0.2+incompatible // indirect github.com/docker/docker-credential-helpers v0.8.2 // indirect github.com/docker/go-connections v0.5.0 // indirect github.com/docker/go-units v0.5.0 // indirect @@ -73,7 +73,7 @@ require ( github.com/pkg/errors v0.9.1 // indirect github.com/pkg/profile v1.7.0 // indirect github.com/quic-go/qpack v0.5.1 // indirect - github.com/quic-go/quic-go v0.50.0 // indirect + github.com/quic-go/quic-go v0.50.1 // indirect github.com/rs/cors v1.11.1 // indirect github.com/russross/blackfriday/v2 v2.1.0 // indirect github.com/segmentio/asm v1.2.0 // indirect @@ -100,7 +100,7 @@ require ( golang.org/x/crypto v0.36.0 // indirect golang.org/x/exp v0.0.0-20250228200357-dead58393ab7 // indirect golang.org/x/mod v0.24.0 // indirect - golang.org/x/net v0.37.0 // indirect + golang.org/x/net v0.38.0 // indirect golang.org/x/sync v0.12.0 // indirect golang.org/x/sys v0.31.0 // indirect golang.org/x/term v0.30.0 // indirect diff --git a/internal/tools/go.sum b/internal/tools/go.sum index 8bbbd0a32f..54a200b5a6 100644 --- a/internal/tools/go.sum +++ b/internal/tools/go.sum @@ -1,25 +1,25 @@ -buf.build/gen/go/bufbuild/bufplugin/protocolbuffers/go v1.36.4-20250121211742-6d880cc6cc8d.1 h1:p5SFT60M93aMQhOz81VH3kPg8t1pp/Litae/1eSxie4= -buf.build/gen/go/bufbuild/bufplugin/protocolbuffers/go v1.36.4-20250121211742-6d880cc6cc8d.1/go.mod h1:umI0o7WWHv8lCbLjYUMzfjHKjyaIt2D89sIj1D9fqy0= -buf.build/gen/go/bufbuild/protovalidate/protocolbuffers/go v1.36.4-20241127180247-a33202765966.1 h1:yeaeyw0RQUe009ebxBQ3TsqBPptiNEGsiS10t+8Htuo= -buf.build/gen/go/bufbuild/protovalidate/protocolbuffers/go v1.36.4-20241127180247-a33202765966.1/go.mod h1:novQBstnxcGpfKf8qGRATqn1anQKwMJIbH5Q581jibU= +buf.build/gen/go/bufbuild/bufplugin/protocolbuffers/go v1.36.5-20250121211742-6d880cc6cc8d.1 h1:z/NYWpgoeKkKL3+LYF+8QK58Rjz3qkMAshpdzJTaJ7o= +buf.build/gen/go/bufbuild/bufplugin/protocolbuffers/go v1.36.5-20250121211742-6d880cc6cc8d.1/go.mod h1:LpnZWZGTs6IBCnY9WHAkR9X4/NbpL5nwOXivQdXILTs= +buf.build/gen/go/bufbuild/protovalidate/protocolbuffers/go v1.36.5-20250307204501-0409229c3780.1 h1:j+l4+E1EEo83GVIxuqinfFOTyImSQUH90WfufE86xaI= +buf.build/gen/go/bufbuild/protovalidate/protocolbuffers/go v1.36.5-20250307204501-0409229c3780.1/go.mod h1:eOqrCVUfhh7SLo00urDe/XhJHljj0dWMZirS0aX7cmc= buf.build/gen/go/bufbuild/registry/connectrpc/go v1.18.1-20250116203702-1c024d64352b.1 h1:1SDs5tEGoWWv2vmKLx2B0Bp+yfhlxiU4DaZUII8+Pvs= buf.build/gen/go/bufbuild/registry/connectrpc/go v1.18.1-20250116203702-1c024d64352b.1/go.mod h1:o2AgVM1j3MczvxnMqfZTpiqGwK1VD4JbEagseY0QcjE= -buf.build/gen/go/bufbuild/registry/protocolbuffers/go v1.36.4-20250116203702-1c024d64352b.1 h1:uKJgSNHvwQUZ6+0dSnx9MtkZ+h/ORbkKym0rlzIjUSI= -buf.build/gen/go/bufbuild/registry/protocolbuffers/go v1.36.4-20250116203702-1c024d64352b.1/go.mod h1:Ua59W2s7uwPS5sGNgW08QewjBaPnUxOdpkWsuDvJ36Q= +buf.build/gen/go/bufbuild/registry/protocolbuffers/go v1.36.5-20250116203702-1c024d64352b.1 h1:MTNYELBBDEj2ddEwWb/vuAm5PRLyWtZe7CLnc6WZ5qQ= +buf.build/gen/go/bufbuild/registry/protocolbuffers/go v1.36.5-20250116203702-1c024d64352b.1/go.mod h1:E8bt4dG1/NfuocvVmlDNWIfKoLK0B4AgGq4ubwEGBvo= buf.build/gen/go/pluginrpc/pluginrpc/protocolbuffers/go v1.36.4-20241007202033-cf42259fcbfc.1 h1:XmYgi9W/9oST2ZrfT3ucGWkzD9+Vd0ls9yhyZ8ae0KQ= buf.build/gen/go/pluginrpc/pluginrpc/protocolbuffers/go v1.36.4-20241007202033-cf42259fcbfc.1/go.mod h1:cxFpqWIC80Wm8YNo1038ocBmrF84uQ0IfL0uVdAu9ZY= -buf.build/go/bufplugin v0.7.0 h1:Tq8FXBVfpMxhl3QR6P/gMQHROg1Ss7WhpyD4QVV61ds= -buf.build/go/bufplugin v0.7.0/go.mod h1:LuQzv36Ezu2zQIQUtwg4WJJFe58tXn1anL1IosAh6ik= +buf.build/go/bufplugin v0.8.0 h1:YgR1+CNGmzR69jt85oRWTa5FioZoX/tOrHV+JxfNnnk= +buf.build/go/bufplugin v0.8.0/go.mod h1:rcm0Esd3P/GM2rtYTvz3+9Gf8w9zdo7rG8dKSxYHHIE= buf.build/go/protoyaml v0.3.1 h1:ucyzE7DRnjX+mQ6AH4JzN0Kg50ByHHu+yrSKbgQn2D4= buf.build/go/protoyaml v0.3.1/go.mod h1:0TzNpFQDXhwbkXb/ajLvxIijqbve+vMQvWY/b3/Dzxg= buf.build/go/spdx v0.2.0 h1:IItqM0/cMxvFJJumcBuP8NrsIzMs/UYjp/6WSpq8LTw= buf.build/go/spdx v0.2.0/go.mod h1:bXdwQFem9Si3nsbNy8aJKGPoaPi5DKwdeEp5/ArZ6w8= -cel.dev/expr v0.19.2 h1:V354PbqIXr9IQdwy4SYA4xa0HXaWq1BUPAGzugBY5V4= -cel.dev/expr v0.19.2/go.mod h1:MrpN08Q+lEBs+bGYdLxxHkZoUSsCp0nSKTs0nTymJgw= +cel.dev/expr v0.21.2 h1:o+Wj235dy4gFYlYin3JsMpp3EEfMrPm/6tdoyjT98S0= +cel.dev/expr v0.21.2/go.mod h1:MrpN08Q+lEBs+bGYdLxxHkZoUSsCp0nSKTs0nTymJgw= connectrpc.com/connect v1.18.1 h1:PAg7CjSAGvscaf6YZKUefjoih5Z/qYkyaTrBW8xvYPw= connectrpc.com/connect v1.18.1/go.mod h1:0292hj1rnx8oFrStN7cB4jjVBeqs+Yx5yDIC2prWDO8= -connectrpc.com/otelconnect v0.7.1 h1:scO5pOb0i4yUE66CnNrHeK1x51yq0bE0ehPg6WvzXJY= -connectrpc.com/otelconnect v0.7.1/go.mod h1:dh3bFgHBTb2bkqGCeVVOtHJreSns7uu9wwL2Tbz17ms= +connectrpc.com/otelconnect v0.7.2 h1:WlnwFzaW64dN06JXU+hREPUGeEzpz3Acz2ACOmN8cMI= +connectrpc.com/otelconnect v0.7.2/go.mod h1:JS7XUKfuJs2adhCnXhNHPHLz6oAaZniCJdSF00OZSew= github.com/AdaLogics/go-fuzz-headers v0.0.0-20230811130428-ced1acdcaa24 h1:bvDV9vkmnHYOMsOr4WLk+Vo07yKIzd94sVoIqshQ4bU= github.com/AdaLogics/go-fuzz-headers v0.0.0-20230811130428-ced1acdcaa24/go.mod h1:8o94RPi1/7XTJvwPpRSzSUedZrtlirdB3r9Z20bi2f8= github.com/Azure/go-ansiterm v0.0.0-20250102033503-faa5f7b0171c h1:udKWzYgxTojEKWjV8V+WSxDXJ4NFATAsZjh8iIbsQIg= @@ -28,14 +28,14 @@ github.com/Microsoft/go-winio v0.6.2 h1:F2VQgta7ecxGYO8k3ZZz3RS8fVIXVxONVUPlNERo github.com/Microsoft/go-winio v0.6.2/go.mod h1:yd8OoFMLzJbo9gZq8j5qaps8bJ9aShtEA8Ipt1oGCvU= github.com/antlr4-go/antlr/v4 v4.13.1 h1:SqQKkuVZ+zWkMMNkjy5FZe5mr5WURWnlpmOuzYWrPrQ= github.com/antlr4-go/antlr/v4 v4.13.1/go.mod h1:GKmUxMtwp6ZgGwZSva4eWPC5mS6vUAmOABFgjdkM7Nw= -github.com/bufbuild/buf v1.50.1 h1:3sEaWLw6g7bSIJ+yKo6ERF3qpkaLNGd8SzImFpA5gUI= -github.com/bufbuild/buf v1.50.1/go.mod h1:LqTlfsFs4RD3L+VoBudEWJzWi12Pa0+Q2vDQnY0YQv0= +github.com/bufbuild/buf v1.51.0 h1:k2we7gmuSDeIqxkv16F/8s5Kk0l2ZfvMHpvC1n6o5Rk= +github.com/bufbuild/buf v1.51.0/go.mod h1:TbX4Df3BfE0Lugd3Y3sFr7QTxqmCfPkuiEexe29KZeE= github.com/bufbuild/protocompile v0.14.1 h1:iA73zAf/fyljNjQKwYzUHD6AD4R8KMasmwa/FBatYVw= github.com/bufbuild/protocompile v0.14.1/go.mod h1:ppVdAIhbr2H8asPk6k4pY7t9zB1OU5DoEw9xY/FUi1c= github.com/bufbuild/protoplugin v0.0.0-20250106231243-3a819552c9d9 h1:kAWER21DzhzU7ys8LL1WkSfbGkwXv+tM30hyEsYrW2k= github.com/bufbuild/protoplugin v0.0.0-20250106231243-3a819552c9d9/go.mod h1:c5D8gWRIZ2HLWO3gXYTtUfw/hbJyD8xikv2ooPxnklQ= -github.com/bufbuild/protovalidate-go v0.8.2 h1:sgzXHkHYP6HnAsL2Rd3I1JxkYUyEQUv9awU1PduMxbM= -github.com/bufbuild/protovalidate-go v0.8.2/go.mod h1:K6w8iPNAXBoIivVueSELbUeUl+MmeTQfCDSug85pn3M= +github.com/bufbuild/protovalidate-go v0.9.3-0.20250317160558-38a17488914d h1:Y6Yp/LwSaRG8gw9GyyQD7jensL9NXqPlkbuulaAvCEE= +github.com/bufbuild/protovalidate-go v0.9.3-0.20250317160558-38a17488914d/go.mod h1:SZN6Qr3lPWuKMoQtIhKdhESkb+3m2vk0lqN9WMuZDDU= github.com/cenkalti/backoff/v4 v4.3.0 h1:MyRJ/UdXutAwSAT+s3wNd7MfTIcy71VQueUuFK343L8= github.com/cenkalti/backoff/v4 v4.3.0/go.mod h1:Y3VNntkOUPxTVeUxJ/G5vcM//AlwfmyYozVcomhLiZE= github.com/chromedp/cdproto v0.0.0-20230802225258-3cf4e6d46a89/go.mod h1:GKljq0VrfU4D5yc+2qA6OVr8pmO/MBbPEWqWQ/oqGEs= @@ -68,16 +68,16 @@ github.com/docker/cli v27.5.1+incompatible h1:JB9cieUT9YNiMITtIsguaN55PLOHhBSz3L github.com/docker/cli v27.5.1+incompatible/go.mod h1:JLrzqnKDaYBop7H2jaqPtU4hHvMKP+vjCwu2uszcLI8= github.com/docker/distribution v2.8.3+incompatible h1:AtKxIZ36LoNK51+Z6RpzLpddBirtxJnzDrHLEKxTAYk= github.com/docker/distribution v2.8.3+incompatible/go.mod h1:J2gT2udsDAN96Uj4KfcMRqY0/ypR+oyYUYmja8H+y+w= -github.com/docker/docker v28.0.0+incompatible h1:Olh0KS820sJ7nPsBKChVhk5pzqcwDR15fumfAd/p9hM= -github.com/docker/docker v28.0.0+incompatible/go.mod h1:eEKB0N0r5NX/I1kEveEz05bcu8tLC/8azJZsviup8Sk= +github.com/docker/docker v28.0.2+incompatible h1:9BILleFwug5FSSqWBgVevgL3ewDJfWWWyZVqlDMttE8= +github.com/docker/docker v28.0.2+incompatible/go.mod h1:eEKB0N0r5NX/I1kEveEz05bcu8tLC/8azJZsviup8Sk= github.com/docker/docker-credential-helpers v0.8.2 h1:bX3YxiGzFP5sOXWc3bTPEXdEaZSeVMrFgOr3T+zrFAo= github.com/docker/docker-credential-helpers v0.8.2/go.mod h1:P3ci7E3lwkZg6XiHdRKft1KckHiO9a2rNtyFbZ/ry9M= github.com/docker/go-connections v0.5.0 h1:USnMq7hx7gwdVZq1L49hLXaFtUdTADjXGp+uj1Br63c= github.com/docker/go-connections v0.5.0/go.mod h1:ov60Kzw0kKElRwhNs9UlUHAE/F9Fe6GLaXnqyDdmEXc= github.com/docker/go-units v0.5.0 h1:69rxXcBk27SvSaaxTtLh/8llcHD8vYHT7WSdRZ/jvr4= github.com/docker/go-units v0.5.0/go.mod h1:fgPhTUdO+D/Jk86RDLlptpiXQzgHJF7gydDDbaIK4Dk= -github.com/envoyproxy/protoc-gen-validate v1.1.0 h1:tntQDh69XqOCOZsDz0lVJQez/2L6Uu2PdjCQwWCJ3bM= -github.com/envoyproxy/protoc-gen-validate v1.1.0/go.mod h1:sXRDRVmzEbkM7CVcM06s9shE/m23dg3wzjl0UWqJ2q4= +github.com/envoyproxy/protoc-gen-validate v1.2.1 h1:DEo3O99U8j4hBFwbJfrz9VtgcDfUKS7KJ7spH3d86P8= +github.com/envoyproxy/protoc-gen-validate v1.2.1/go.mod h1:d/C80l/jxXLdfEIhX1W2TmLfsJ31lvEjwamM4DxlWXU= github.com/felixge/fgprof v0.9.3/go.mod h1:RdbpDgzqYVh/T9fPELJyV7EYJuHB55UTEULNun8eiPw= github.com/felixge/fgprof v0.9.5 h1:8+vR6yu2vvSKn08urWyEuxx75NWPEvybbkBirEpsbVY= github.com/felixge/fgprof v0.9.5/go.mod h1:yKl+ERSa++RYOs32d8K6WEXCB4uXdLls4ZaZPpayhMM= @@ -184,8 +184,8 @@ github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZb github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= github.com/quic-go/qpack v0.5.1 h1:giqksBPnT/HDtZ6VhtFKgoLOWmlyo9Ei6u9PqzIMbhI= github.com/quic-go/qpack v0.5.1/go.mod h1:+PC4XFrEskIVkcLzpEkbLqq1uCoxPhQuvK5rH1ZgaEg= -github.com/quic-go/quic-go v0.50.0 h1:3H/ld1pa3CYhkcc20TPIyG1bNsdhn9qZBGN3b9/UyUo= -github.com/quic-go/quic-go v0.50.0/go.mod h1:Vim6OmUvlYdwBhXP9ZVrtGmCMWa3wEqhq3NgYrI8b4E= +github.com/quic-go/quic-go v0.50.1 h1:unsgjFIUqW8a2oopkY7YNONpV1gYND6Nt9hnt1PN94Q= +github.com/quic-go/quic-go v0.50.1/go.mod h1:Vim6OmUvlYdwBhXP9ZVrtGmCMWa3wEqhq3NgYrI8b4E= github.com/rogpeppe/go-internal v1.13.1 h1:KvO1DLK/DRN07sQ1LQKScxyZJuNnedQ5/wKSR38lUII= github.com/rogpeppe/go-internal v1.13.1/go.mod h1:uMEvuHeurkdAXX61udpOXGD/AzZDWNMNyH2VO9fmH0o= github.com/rs/cors v1.11.1 h1:eU3gRzXLRK57F5rKMGMZURNdIG4EoAmX8k94r9wXWHA= @@ -272,8 +272,8 @@ golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= golang.org/x/net v0.0.0-20200226121028-0de0cce0169b/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= golang.org/x/net v0.0.0-20201021035429-f5854403a974/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= -golang.org/x/net v0.37.0 h1:1zLorHbz+LYj7MQlSf1+2tPIIgibq2eL5xkrGk6f+2c= -golang.org/x/net v0.37.0/go.mod h1:ivrbrMbzFq5J41QOQh0siUuly180yBYtLp+CKbEaFx8= +golang.org/x/net v0.38.0 h1:vRMAPTMaeGqVhG5QyLJHqNDwecKTomGeqbnfZyKlBI8= +golang.org/x/net v0.38.0/go.mod h1:ivrbrMbzFq5J41QOQh0siUuly180yBYtLp+CKbEaFx8= golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20190911185100-cd5d95a43a6e/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= diff --git a/model/labels/labels.go b/model/labels/labels.go index 5ebdf6a3fe..5dc25ba85d 100644 --- a/model/labels/labels.go +++ b/model/labels/labels.go @@ -32,8 +32,8 @@ func (ls Labels) Len() int { return len(ls) } func (ls Labels) Swap(i, j int) { ls[i], ls[j] = ls[j], ls[i] } func (ls Labels) Less(i, j int) bool { return ls[i].Name < ls[j].Name } -// Bytes returns ls as a byte slice. -// It uses an byte invalid character as a separator and so should not be used for printing. +// Bytes returns an opaque, not-human-readable, encoding of ls, usable as a map key. +// Encoding may change over time or between runs of Prometheus. func (ls Labels) Bytes(buf []byte) []byte { b := bytes.NewBuffer(buf[:0]) b.WriteByte(labelSep) diff --git a/model/labels/labels_dedupelabels.go b/model/labels/labels_dedupelabels.go index a0d83e0044..38cf91301c 100644 --- a/model/labels/labels_dedupelabels.go +++ b/model/labels/labels_dedupelabels.go @@ -140,8 +140,8 @@ func decodeString(t *nameTable, data string, index int) (string, int) { return t.ToName(num), index } -// Bytes returns ls as a byte slice. -// It uses non-printing characters and so should not be used for printing. +// Bytes returns an opaque, not-human-readable, encoding of ls, usable as a map key. +// Encoding may change over time or between runs of Prometheus. func (ls Labels) Bytes(buf []byte) []byte { b := bytes.NewBuffer(buf[:0]) for i := 0; i < len(ls.data); { diff --git a/model/labels/labels_stringlabels.go b/model/labels/labels_stringlabels.go index fa0bd7bc27..17f01d02c0 100644 --- a/model/labels/labels_stringlabels.go +++ b/model/labels/labels_stringlabels.go @@ -24,31 +24,25 @@ import ( ) // Labels is implemented by a single flat string holding name/value pairs. -// Each name and value is preceded by its length in varint encoding. +// Each name and value is preceded by its length, encoded as a single byte +// for size 0-254, or the following 3 bytes little-endian, if the first byte is 255. +// Maximum length allowed is 2^24 or 16MB. // Names are in order. type Labels struct { data string } func decodeSize(data string, index int) (int, int) { - // Fast-path for common case of a single byte, value 0..127. b := data[index] index++ - if b < 0x80 { - return int(b), index - } - size := int(b & 0x7F) - for shift := uint(7); ; shift += 7 { + if b == 255 { + // Larger numbers are encoded as 3 bytes little-endian. // Just panic if we go of the end of data, since all Labels strings are constructed internally and // malformed data indicates a bug, or memory corruption. - b := data[index] - index++ - size |= int(b&0x7F) << shift - if b < 0x80 { - break - } + return int(data[index]) + (int(data[index+1]) << 8) + (int(data[index+2]) << 16), index + 3 } - return size, index + // More common case of a single byte, value 0..254. + return int(b), index } func decodeString(data string, index int) (string, int) { @@ -57,8 +51,8 @@ func decodeString(data string, index int) (string, int) { return data[index : index+size], index + size } -// Bytes returns ls as a byte slice. -// It uses non-printing characters and so should not be used for printing. +// Bytes returns an opaque, not-human-readable, encoding of ls, usable as a map key. +// Encoding may change over time or between runs of Prometheus. func (ls Labels) Bytes(buf []byte) []byte { if cap(buf) < len(ls.data) { buf = make([]byte, len(ls.data)) @@ -528,48 +522,27 @@ func marshalLabelToSizedBuffer(m *Label, data []byte) int { return len(data) - i } -func sizeVarint(x uint64) (n int) { - // Most common case first - if x < 1<<7 { +func sizeWhenEncoded(x uint64) (n int) { + if x < 255 { return 1 + } else if x <= 1<<24 { + return 4 } - if x >= 1<<56 { - return 9 - } - if x >= 1<<28 { - x >>= 28 - n = 4 - } - if x >= 1<<14 { - x >>= 14 - n += 2 - } - if x >= 1<<7 { - n++ - } - return n + 1 + panic("String too long to encode as label.") } -func encodeVarint(data []byte, offset int, v uint64) int { - offset -= sizeVarint(v) - base := offset - for v >= 1<<7 { - data[offset] = uint8(v&0x7f | 0x80) - v >>= 7 - offset++ - } - data[offset] = uint8(v) - return base -} - -// Special code for the common case that a size is less than 128. func encodeSize(data []byte, offset, v int) int { - if v < 1<<7 { + if v < 255 { offset-- data[offset] = uint8(v) return offset } - return encodeVarint(data, offset, uint64(v)) + offset -= 4 + data[offset] = 255 + data[offset+1] = byte(v) + data[offset+2] = byte((v >> 8)) + data[offset+3] = byte((v >> 16)) + return offset } func labelsSize(lbls []Label) (n int) { @@ -583,9 +556,9 @@ func labelsSize(lbls []Label) (n int) { func labelSize(m *Label) (n int) { // strings are encoded as length followed by contents. l := len(m.Name) - n += l + sizeVarint(uint64(l)) + n += l + sizeWhenEncoded(uint64(l)) l = len(m.Value) - n += l + sizeVarint(uint64(l)) + n += l + sizeWhenEncoded(uint64(l)) return n } diff --git a/model/labels/labels_test.go b/model/labels/labels_test.go index a2a7734326..b7ba71b553 100644 --- a/model/labels/labels_test.go +++ b/model/labels/labels_test.go @@ -27,6 +27,8 @@ import ( ) func TestLabels_String(t *testing.T) { + s254 := strings.Repeat("x", 254) // Edge cases for stringlabels encoding. + s255 := strings.Repeat("x", 255) cases := []struct { labels Labels expected string @@ -43,6 +45,14 @@ func TestLabels_String(t *testing.T) { labels: FromStrings("service.name", "t1", "whatever\\whatever", "t2"), expected: `{"service.name"="t1", "whatever\\whatever"="t2"}`, }, + { + labels: FromStrings("aaa", "111", "xx", s254), + expected: `{aaa="111", xx="` + s254 + `"}`, + }, + { + labels: FromStrings("aaa", "111", "xx", s255), + expected: `{aaa="111", xx="` + s255 + `"}`, + }, } for _, c := range cases { str := c.labels.String() diff --git a/promql/durations.go b/promql/durations.go new file mode 100644 index 0000000000..8431fa5bd4 --- /dev/null +++ b/promql/durations.go @@ -0,0 +1,136 @@ +// Copyright 2025 The Prometheus Authors +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package promql + +import ( + "fmt" + "math" + "time" + + "github.com/prometheus/prometheus/promql/parser" +) + +// durationVisitor is a visitor that visits a duration expression and calculates the duration. +type durationVisitor struct{} + +func (v *durationVisitor) Visit(node parser.Node, _ []parser.Node) (parser.Visitor, error) { + switch n := node.(type) { + case *parser.VectorSelector: + if n.OriginalOffsetExpr != nil { + duration, err := calculateDuration(n.OriginalOffsetExpr, true) + if err != nil { + return nil, err + } + n.OriginalOffset = duration + } + case *parser.MatrixSelector: + if n.RangeExpr != nil { + duration, err := calculateDuration(n.RangeExpr, false) + if err != nil { + return nil, err + } + n.Range = duration + } + case *parser.SubqueryExpr: + if n.OriginalOffsetExpr != nil { + duration, err := calculateDuration(n.OriginalOffsetExpr, true) + if err != nil { + return nil, err + } + n.OriginalOffset = duration + } + if n.StepExpr != nil { + duration, err := calculateDuration(n.StepExpr, false) + if err != nil { + return nil, err + } + n.Step = duration + } + if n.RangeExpr != nil { + duration, err := calculateDuration(n.RangeExpr, false) + if err != nil { + return nil, err + } + n.Range = duration + } + } + return v, nil +} + +// calculateDuration computes the duration from a duration expression. +func calculateDuration(expr parser.Expr, allowedNegative bool) (time.Duration, error) { + duration, err := evaluateDurationExpr(expr) + if err != nil { + return 0, err + } + if duration <= 0 && !allowedNegative { + return 0, fmt.Errorf("%d:%d: duration must be greater than 0", expr.PositionRange().Start, expr.PositionRange().End) + } + if duration > 1<<63-1 || duration < -1<<63 { + return 0, fmt.Errorf("%d:%d: duration is out of range", expr.PositionRange().Start, expr.PositionRange().End) + } + return time.Duration(duration*1000) * time.Millisecond, nil +} + +// evaluateDurationExpr recursively evaluates a duration expression to a float64 value. +func evaluateDurationExpr(expr parser.Expr) (float64, error) { + switch n := expr.(type) { + case *parser.NumberLiteral: + return n.Val, nil + case *parser.DurationExpr: + var lhs, rhs float64 + var err error + + if n.LHS != nil { + lhs, err = evaluateDurationExpr(n.LHS) + if err != nil { + return 0, err + } + } + + rhs, err = evaluateDurationExpr(n.RHS) + if err != nil { + return 0, err + } + + switch n.Op { + case parser.ADD: + return lhs + rhs, nil + case parser.SUB: + if n.LHS == nil { + // Unary negative duration expression. + return -rhs, nil + } + return lhs - rhs, nil + case parser.MUL: + return lhs * rhs, nil + case parser.DIV: + if rhs == 0 { + return 0, fmt.Errorf("%d:%d: division by zero", expr.PositionRange().Start, expr.PositionRange().End) + } + return lhs / rhs, nil + case parser.MOD: + if rhs == 0 { + return 0, fmt.Errorf("%d:%d: modulo by zero", expr.PositionRange().Start, expr.PositionRange().End) + } + return math.Mod(lhs, rhs), nil + case parser.POW: + return math.Pow(lhs, rhs), nil + default: + return 0, fmt.Errorf("unexpected duration expression operator %q", n.Op) + } + default: + return 0, fmt.Errorf("unexpected duration expression type %T", n) + } +} diff --git a/promql/durations_test.go b/promql/durations_test.go new file mode 100644 index 0000000000..0cdfb7597a --- /dev/null +++ b/promql/durations_test.go @@ -0,0 +1,238 @@ +// Copyright 2025 The Prometheus Authors +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package promql + +import ( + "testing" + "time" + + "github.com/stretchr/testify/require" + + "github.com/prometheus/prometheus/promql/parser" +) + +func TestDurationVisitor(t *testing.T) { + // Enable experimental duration expression parsing. + parser.ExperimentalDurationExpr = true + t.Cleanup(func() { + parser.ExperimentalDurationExpr = false + }) + complexExpr := `sum_over_time( + rate(metric[5m] offset 1h)[10m:30s] offset 2h + ) + + avg_over_time( + metric[1h + 30m] offset -1h + ) * + count_over_time( + metric[2h * 0.5] + )` + + expr, err := parser.ParseExpr(complexExpr) + require.NoError(t, err) + + err = parser.Walk(&durationVisitor{}, expr, nil) + require.NoError(t, err) + + // Verify different parts of the expression have correct durations. + // This is a binary expression at the top level. + binExpr, ok := expr.(*parser.BinaryExpr) + require.True(t, ok, "Expected binary expression at top level") + + // Left side should be sum_over_time with subquery. + leftCall, ok := binExpr.LHS.(*parser.Call) + require.True(t, ok, "Expected call expression on left side") + require.Equal(t, "sum_over_time", leftCall.Func.Name) + + // Extract the subquery from sum_over_time. + sumSubquery, ok := leftCall.Args[0].(*parser.SubqueryExpr) + require.True(t, ok, "Expected subquery in sum_over_time") + require.Equal(t, 10*time.Minute, sumSubquery.Range) + require.Equal(t, 30*time.Second, sumSubquery.Step) + require.Equal(t, 2*time.Hour, sumSubquery.OriginalOffset) + + // Extract the rate call inside the subquery. + rateCall, ok := sumSubquery.Expr.(*parser.Call) + require.True(t, ok, "Expected rate call in subquery") + require.Equal(t, "rate", rateCall.Func.Name) + + // Extract the matrix selector from rate. + rateMatrix, ok := rateCall.Args[0].(*parser.MatrixSelector) + require.True(t, ok, "Expected matrix selector in rate") + require.Equal(t, 5*time.Minute, rateMatrix.Range) + require.Equal(t, 1*time.Hour, rateMatrix.VectorSelector.(*parser.VectorSelector).OriginalOffset) + + // Right side should be another binary expression (multiplication). + rightBinExpr, ok := binExpr.RHS.(*parser.BinaryExpr) + require.True(t, ok, "Expected binary expression on right side") + + // Left side of multiplication should be avg_over_time. + avgCall, ok := rightBinExpr.LHS.(*parser.Call) + require.True(t, ok, "Expected call expression on left side of multiplication") + require.Equal(t, "avg_over_time", avgCall.Func.Name) + + // Extract the matrix selector from avg_over_time. + avgMatrix, ok := avgCall.Args[0].(*parser.MatrixSelector) + require.True(t, ok, "Expected matrix selector in avg_over_time") + require.Equal(t, 90*time.Minute, avgMatrix.Range) // 1h + 30m + require.Equal(t, -1*time.Hour, avgMatrix.VectorSelector.(*parser.VectorSelector).OriginalOffset) + + // Right side of multiplication should be count_over_time. + countCall, ok := rightBinExpr.RHS.(*parser.Call) + require.True(t, ok, "Expected call expression on right side of multiplication") + require.Equal(t, "count_over_time", countCall.Func.Name) + + // Extract the matrix selector from count_over_time. + countMatrix, ok := countCall.Args[0].(*parser.MatrixSelector) + require.True(t, ok, "Expected matrix selector in count_over_time") + require.Equal(t, 1*time.Hour, countMatrix.Range) // 2h * 0.5 +} + +func TestCalculateDuration(t *testing.T) { + tests := []struct { + name string + expr parser.Expr + expected time.Duration + errorMessage string + allowedNegative bool + }{ + { + name: "addition", + expr: &parser.DurationExpr{ + LHS: &parser.NumberLiteral{Val: 5}, + RHS: &parser.NumberLiteral{Val: 10}, + Op: parser.ADD, + }, + expected: 15 * time.Second, + }, + { + name: "subtraction", + expr: &parser.DurationExpr{ + LHS: &parser.NumberLiteral{Val: 15}, + RHS: &parser.NumberLiteral{Val: 5}, + Op: parser.SUB, + }, + expected: 10 * time.Second, + }, + { + name: "subtraction with negative", + expr: &parser.DurationExpr{ + LHS: &parser.NumberLiteral{Val: 5}, + RHS: &parser.NumberLiteral{Val: 10}, + Op: parser.SUB, + }, + errorMessage: "duration must be greater than 0", + }, + { + name: "multiplication", + expr: &parser.DurationExpr{ + LHS: &parser.NumberLiteral{Val: 5}, + RHS: &parser.NumberLiteral{Val: 3}, + Op: parser.MUL, + }, + expected: 15 * time.Second, + }, + { + name: "division", + expr: &parser.DurationExpr{ + LHS: &parser.NumberLiteral{Val: 15}, + RHS: &parser.NumberLiteral{Val: 3}, + Op: parser.DIV, + }, + expected: 5 * time.Second, + }, + { + name: "modulo with numbers", + expr: &parser.DurationExpr{ + LHS: &parser.NumberLiteral{Val: 17}, + RHS: &parser.NumberLiteral{Val: 5}, + Op: parser.MOD, + }, + expected: 2 * time.Second, + }, + { + name: "power", + expr: &parser.DurationExpr{ + LHS: &parser.NumberLiteral{Val: 2}, + RHS: &parser.NumberLiteral{Val: 3}, + Op: parser.POW, + }, + expected: 8 * time.Second, + }, + { + name: "complex expression", + expr: &parser.DurationExpr{ + LHS: &parser.DurationExpr{ + LHS: &parser.NumberLiteral{Val: 2}, + RHS: &parser.DurationExpr{ + LHS: &parser.NumberLiteral{Val: 3}, + RHS: &parser.NumberLiteral{Val: 4}, + Op: parser.ADD, + }, + Op: parser.MUL, + }, + RHS: &parser.NumberLiteral{Val: 1}, + Op: parser.SUB, + }, + expected: 13 * time.Second, + }, + { + name: "unary negative", + expr: &parser.DurationExpr{ + RHS: &parser.NumberLiteral{Val: 5}, + Op: parser.SUB, + }, + expected: -5 * time.Second, + allowedNegative: true, + }, + { + name: "division by zero", + expr: &parser.DurationExpr{ + LHS: &parser.NumberLiteral{Val: 5}, + RHS: &parser.DurationExpr{ + LHS: &parser.NumberLiteral{Val: 5}, + RHS: &parser.NumberLiteral{Val: 5}, + Op: parser.SUB, + }, + Op: parser.DIV, + }, + errorMessage: "division by zero", + }, + { + name: "modulo by zero", + expr: &parser.DurationExpr{ + LHS: &parser.NumberLiteral{Val: 5}, + RHS: &parser.DurationExpr{ + LHS: &parser.NumberLiteral{Val: 5}, + RHS: &parser.NumberLiteral{Val: 5}, + Op: parser.SUB, + }, + Op: parser.MOD, + }, + errorMessage: "modulo by zero", + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + result, err := calculateDuration(tt.expr, tt.allowedNegative) + if tt.errorMessage != "" { + require.Error(t, err) + require.Contains(t, err.Error(), tt.errorMessage) + return + } + require.NoError(t, err) + require.Equal(t, tt.expected, result) + }) + } +} diff --git a/promql/engine.go b/promql/engine.go index 8c37f12e42..f1829efdd8 100644 --- a/promql/engine.go +++ b/promql/engine.go @@ -489,9 +489,9 @@ func (ng *Engine) NewInstantQuery(ctx context.Context, q storage.Queryable, opts if err := ng.validateOpts(expr); err != nil { return nil, err } - *pExpr = PreprocessExpr(expr, ts, ts) + *pExpr, err = PreprocessExpr(expr, ts, ts) - return qry, nil + return qry, err } // NewRangeQuery returns an evaluation query for the given time range and with @@ -513,9 +513,9 @@ func (ng *Engine) NewRangeQuery(ctx context.Context, q storage.Queryable, opts Q if expr.Type() != parser.ValueTypeVector && expr.Type() != parser.ValueTypeScalar { return nil, fmt.Errorf("invalid expression type %q for range query, must be Scalar or instant Vector", parser.DocumentedType(expr.Type())) } - *pExpr = PreprocessExpr(expr, start, end) + *pExpr, err = PreprocessExpr(expr, start, end) - return qry, nil + return qry, err } func (ng *Engine) newQuery(q storage.Queryable, qs string, opts QueryOpts, start, end time.Time, interval time.Duration) (*parser.Expr, *query) { @@ -1137,8 +1137,9 @@ type EvalNodeHelper struct { Out Vector // Caches. - // funcHistogramQuantile for classic histograms. + // funcHistogramQuantile and funcHistogramFraction for classic histograms. signatureToMetricWithBuckets map[string]*metricWithBuckets + nativeHistogramSamples []Sample lb *labels.Builder lblBuf []byte @@ -1161,6 +1162,62 @@ func (enh *EvalNodeHelper) resetBuilder(lbls labels.Labels) { } } +// resetHistograms prepares the histogram caches by splitting the given vector into native and classic histograms. +func (enh *EvalNodeHelper) resetHistograms(inVec Vector, arg parser.Expr) annotations.Annotations { + var annos annotations.Annotations + + if enh.signatureToMetricWithBuckets == nil { + enh.signatureToMetricWithBuckets = map[string]*metricWithBuckets{} + } else { + for _, v := range enh.signatureToMetricWithBuckets { + v.buckets = v.buckets[:0] + } + } + enh.nativeHistogramSamples = enh.nativeHistogramSamples[:0] + + for _, sample := range inVec { + // We are only looking for classic buckets here. Remember + // the histograms for later treatment. + if sample.H != nil { + enh.nativeHistogramSamples = append(enh.nativeHistogramSamples, sample) + continue + } + + upperBound, err := strconv.ParseFloat( + sample.Metric.Get(model.BucketLabel), 64, + ) + if err != nil { + annos.Add(annotations.NewBadBucketLabelWarning(sample.Metric.Get(labels.MetricName), sample.Metric.Get(model.BucketLabel), arg.PositionRange())) + continue + } + enh.lblBuf = sample.Metric.BytesWithoutLabels(enh.lblBuf, labels.BucketLabel) + mb, ok := enh.signatureToMetricWithBuckets[string(enh.lblBuf)] + if !ok { + sample.Metric = labels.NewBuilder(sample.Metric). + Del(excludedLabels...). + Labels() + mb = &metricWithBuckets{sample.Metric, nil} + enh.signatureToMetricWithBuckets[string(enh.lblBuf)] = mb + } + mb.buckets = append(mb.buckets, Bucket{upperBound, sample.F}) + } + + for _, sample := range enh.nativeHistogramSamples { + // We have to reconstruct the exact same signature as above for + // a classic histogram, just ignoring any le label. + enh.lblBuf = sample.Metric.Bytes(enh.lblBuf) + if mb, ok := enh.signatureToMetricWithBuckets[string(enh.lblBuf)]; ok && len(mb.buckets) > 0 { + // At this data point, we have classic histogram + // buckets and a native histogram with the same name and + // labels. Do not evaluate anything. + annos.Add(annotations.NewMixedClassicNativeHistogramsWarning(sample.Metric.Get(labels.MetricName), arg.PositionRange())) + delete(enh.signatureToMetricWithBuckets, string(enh.lblBuf)) + continue + } + } + return annos +} + // rangeEval evaluates the given expressions, and then for each step calls // the given funcCall with the values computed for each expression at that // step. The return value is the combination into time series of all the @@ -1582,6 +1639,11 @@ func (ev *evaluator) eval(ctx context.Context, expr parser.Expr) (parser.Value, if err := contextDone(ctx, "expression evaluation"); err != nil { ev.error(err) } + + if ev.endTimestamp < ev.startTimestamp { + return Matrix{}, nil + } + numSteps := int((ev.endTimestamp-ev.startTimestamp)/ev.interval) + 1 // Create a new span to help investigate inner evaluation performances. @@ -3591,15 +3653,20 @@ func unwrapStepInvariantExpr(e parser.Expr) parser.Expr { } // PreprocessExpr wraps all possible step invariant parts of the given expression with -// StepInvariantExpr. It also resolves the preprocessors. -func PreprocessExpr(expr parser.Expr, start, end time.Time) parser.Expr { +// StepInvariantExpr. It also resolves the preprocessors and evaluates duration expressions +// into their numeric values. +func PreprocessExpr(expr parser.Expr, start, end time.Time) (parser.Expr, error) { detectHistogramStatsDecoding(expr) + if err := parser.Walk(&durationVisitor{}, expr, nil); err != nil { + return nil, err + } + isStepInvariant := preprocessExprHelper(expr, start, end) if isStepInvariant { - return newStepInvariantExpr(expr) + return newStepInvariantExpr(expr), nil } - return expr + return expr, nil } // preprocessExprHelper wraps the child nodes of the expression diff --git a/promql/engine_test.go b/promql/engine_test.go index 5dddebe5df..0181d01534 100644 --- a/promql/engine_test.go +++ b/promql/engine_test.go @@ -1900,15 +1900,6 @@ func TestSubquerySelector(t *testing.T) { }, Start: time.Unix(35, 0), }, - { - Query: "metric[0:10s]", - Result: promql.Result{ - nil, - promql.Matrix{}, - nil, - }, - Start: time.Unix(10, 0), - }, }, }, { @@ -3096,7 +3087,8 @@ func TestPreprocessAndWrapWithStepInvariantExpr(t *testing.T) { t.Run(test.input, func(t *testing.T) { expr, err := parser.ParseExpr(test.input) require.NoError(t, err) - expr = promql.PreprocessExpr(expr, startTime, endTime) + expr, err = promql.PreprocessExpr(expr, startTime, endTime) + require.NoError(t, err) if test.outputTest { require.Equal(t, test.input, expr.String(), "error on input '%s'", test.input) } @@ -3268,11 +3260,6 @@ func TestInstantQueryWithRangeVectorSelector(t *testing.T) { }, }, }, - "matches series but range is 0": { - expr: "some_metric[0]", - ts: baseT.Add(2 * time.Minute), - expected: promql.Matrix{}, - }, } for name, testCase := range testCases { diff --git a/promql/functions.go b/promql/functions.go index 3c79684b0f..0662c8d451 100644 --- a/promql/functions.go +++ b/promql/functions.go @@ -20,7 +20,6 @@ import ( "math" "slices" "sort" - "strconv" "strings" "time" @@ -932,8 +931,7 @@ func funcQuantileOverTime(vals []parser.Value, args parser.Expressions, enh *Eva return append(enh.Out, Sample{F: quantile(q, values)}), annos } -// === stddev_over_time(Matrix parser.ValueTypeMatrix) (Vector, Annotations) === -func funcStddevOverTime(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) (Vector, annotations.Annotations) { +func varianceOverTime(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper, varianceToResult func(float64) float64) (Vector, annotations.Annotations) { samples := vals[0].(Matrix)[0] var annos annotations.Annotations if len(samples.Floats) == 0 { @@ -953,33 +951,22 @@ func funcStddevOverTime(vals []parser.Value, args parser.Expressions, enh *EvalN mean, cMean = kahanSumInc(delta/count, mean, cMean) aux, cAux = kahanSumInc(delta*(f.F-(mean+cMean)), aux, cAux) } - return math.Sqrt((aux + cAux) / count) + variance := (aux + cAux) / count + if varianceToResult == nil { + return variance + } + return varianceToResult(variance) }), annos } +// === stddev_over_time(Matrix parser.ValueTypeMatrix) (Vector, Annotations) === +func funcStddevOverTime(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) (Vector, annotations.Annotations) { + return varianceOverTime(vals, args, enh, math.Sqrt) +} + // === stdvar_over_time(Matrix parser.ValueTypeMatrix) (Vector, Annotations) === func funcStdvarOverTime(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) (Vector, annotations.Annotations) { - samples := vals[0].(Matrix)[0] - var annos annotations.Annotations - if len(samples.Floats) == 0 { - return enh.Out, nil - } - if len(samples.Histograms) > 0 { - metricName := samples.Metric.Get(labels.MetricName) - annos.Add(annotations.NewHistogramIgnoredInMixedRangeInfo(metricName, args[0].PositionRange())) - } - return aggrOverTime(vals, enh, func(s Series) float64 { - var count float64 - var mean, cMean float64 - var aux, cAux float64 - for _, f := range s.Floats { - count++ - delta := f.F - (mean + cMean) - mean, cMean = kahanSumInc(delta/count, mean, cMean) - aux, cAux = kahanSumInc(delta*(f.F-(mean+cMean)), aux, cAux) - } - return (aux + cAux) / count - }), annos + return varianceOverTime(vals, args, enh, nil) } // === absent(Vector parser.ValueTypeVector) (Vector, Annotations) === @@ -1347,11 +1334,9 @@ func funcHistogramAvg(vals []parser.Value, _ parser.Expressions, enh *EvalNodeHe return enh.Out, nil } -// === histogram_stddev(Vector parser.ValueTypeVector) (Vector, Annotations) === -func funcHistogramStdDev(vals []parser.Value, _ parser.Expressions, enh *EvalNodeHelper) (Vector, annotations.Annotations) { - inVec := vals[0].(Vector) - - for _, sample := range inVec { +func histogramVariance(vals []parser.Value, enh *EvalNodeHelper, varianceToResult func(float64) float64) (Vector, annotations.Annotations) { + vec := vals[0].(Vector) + for _, sample := range vec { // Skip non-histogram samples. if sample.H == nil { continue @@ -1365,9 +1350,15 @@ func funcHistogramStdDev(vals []parser.Value, _ parser.Expressions, enh *EvalNod continue } var val float64 - if bucket.Lower <= 0 && 0 <= bucket.Upper { + switch { + case sample.H.UsesCustomBuckets(): + // Use arithmetic mean in case of custom buckets. + val = (bucket.Upper + bucket.Lower) / 2.0 + case bucket.Lower <= 0 && bucket.Upper >= 0: + // Use zero (effectively the arithmetic mean) in the zero bucket of a standard exponential histogram. val = 0 - } else { + default: + // Use geometric mean in case of standard exponential buckets. val = math.Sqrt(bucket.Upper * bucket.Lower) if bucket.Upper < 0 { val = -val @@ -1381,48 +1372,8 @@ func funcHistogramStdDev(vals []parser.Value, _ parser.Expressions, enh *EvalNod if !enh.enableDelayedNameRemoval { sample.Metric = sample.Metric.DropMetricName() } - enh.Out = append(enh.Out, Sample{ - Metric: sample.Metric, - F: math.Sqrt(variance), - DropName: true, - }) - } - return enh.Out, nil -} - -// === histogram_stdvar(Vector parser.ValueTypeVector) (Vector, Annotations) === -func funcHistogramStdVar(vals []parser.Value, _ parser.Expressions, enh *EvalNodeHelper) (Vector, annotations.Annotations) { - inVec := vals[0].(Vector) - - for _, sample := range inVec { - // Skip non-histogram samples. - if sample.H == nil { - continue - } - mean := sample.H.Sum / sample.H.Count - var variance, cVariance float64 - it := sample.H.AllBucketIterator() - for it.Next() { - bucket := it.At() - if bucket.Count == 0 { - continue - } - var val float64 - if bucket.Lower <= 0 && 0 <= bucket.Upper { - val = 0 - } else { - val = math.Sqrt(bucket.Upper * bucket.Lower) - if bucket.Upper < 0 { - val = -val - } - } - delta := val - mean - variance, cVariance = kahanSumInc(bucket.Count*delta*delta, variance, cVariance) - } - variance += cVariance - variance /= sample.H.Count - if !enh.enableDelayedNameRemoval { - sample.Metric = sample.Metric.DropMetricName() + if varianceToResult != nil { + variance = varianceToResult(variance) } enh.Out = append(enh.Out, Sample{ Metric: sample.Metric, @@ -1433,17 +1384,26 @@ func funcHistogramStdVar(vals []parser.Value, _ parser.Expressions, enh *EvalNod return enh.Out, nil } +// === histogram_stddev(Vector parser.ValueTypeVector) (Vector, Annotations) === +func funcHistogramStdDev(vals []parser.Value, _ parser.Expressions, enh *EvalNodeHelper) (Vector, annotations.Annotations) { + return histogramVariance(vals, enh, math.Sqrt) +} + +// === histogram_stdvar(Vector parser.ValueTypeVector) (Vector, Annotations) === +func funcHistogramStdVar(vals []parser.Value, _ parser.Expressions, enh *EvalNodeHelper) (Vector, annotations.Annotations) { + return histogramVariance(vals, enh, nil) +} + // === histogram_fraction(lower, upper parser.ValueTypeScalar, Vector parser.ValueTypeVector) (Vector, Annotations) === -func funcHistogramFraction(vals []parser.Value, _ parser.Expressions, enh *EvalNodeHelper) (Vector, annotations.Annotations) { +func funcHistogramFraction(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) (Vector, annotations.Annotations) { lower := vals[0].(Vector)[0].F upper := vals[1].(Vector)[0].F inVec := vals[2].(Vector) - for _, sample := range inVec { - // Skip non-histogram samples. - if sample.H == nil { - continue - } + annos := enh.resetHistograms(inVec, args[2]) + + // Deal with the native histograms. + for _, sample := range enh.nativeHistogramSamples { if !enh.enableDelayedNameRemoval { sample.Metric = sample.Metric.DropMetricName() } @@ -1453,7 +1413,24 @@ func funcHistogramFraction(vals []parser.Value, _ parser.Expressions, enh *EvalN DropName: true, }) } - return enh.Out, nil + + // Deal with classic histograms that have already been filtered for conflicting native histograms. + for _, mb := range enh.signatureToMetricWithBuckets { + if len(mb.buckets) == 0 { + continue + } + if !enh.enableDelayedNameRemoval { + mb.metric = mb.metric.DropMetricName() + } + + enh.Out = append(enh.Out, Sample{ + Metric: mb.metric, + F: BucketFraction(lower, upper, mb.buckets), + DropName: true, + }) + } + + return enh.Out, annos } // === histogram_quantile(k parser.ValueTypeScalar, Vector parser.ValueTypeVector) (Vector, Annotations) === @@ -1465,58 +1442,10 @@ func funcHistogramQuantile(vals []parser.Value, args parser.Expressions, enh *Ev if math.IsNaN(q) || q < 0 || q > 1 { annos.Add(annotations.NewInvalidQuantileWarning(q, args[0].PositionRange())) } + annos.Merge(enh.resetHistograms(inVec, args[1])) - if enh.signatureToMetricWithBuckets == nil { - enh.signatureToMetricWithBuckets = map[string]*metricWithBuckets{} - } else { - for _, v := range enh.signatureToMetricWithBuckets { - v.buckets = v.buckets[:0] - } - } - - var histogramSamples []Sample - - for _, sample := range inVec { - // We are only looking for classic buckets here. Remember - // the histograms for later treatment. - if sample.H != nil { - histogramSamples = append(histogramSamples, sample) - continue - } - - upperBound, err := strconv.ParseFloat( - sample.Metric.Get(model.BucketLabel), 64, - ) - if err != nil { - annos.Add(annotations.NewBadBucketLabelWarning(sample.Metric.Get(labels.MetricName), sample.Metric.Get(model.BucketLabel), args[1].PositionRange())) - continue - } - enh.lblBuf = sample.Metric.BytesWithoutLabels(enh.lblBuf, labels.BucketLabel) - mb, ok := enh.signatureToMetricWithBuckets[string(enh.lblBuf)] - if !ok { - sample.Metric = labels.NewBuilder(sample.Metric). - Del(excludedLabels...). - Labels() - mb = &metricWithBuckets{sample.Metric, nil} - enh.signatureToMetricWithBuckets[string(enh.lblBuf)] = mb - } - mb.buckets = append(mb.buckets, Bucket{upperBound, sample.F}) - } - - // Now deal with the native histograms. - for _, sample := range histogramSamples { - // We have to reconstruct the exact same signature as above for - // a classic histogram, just ignoring any le label. - enh.lblBuf = sample.Metric.Bytes(enh.lblBuf) - if mb, ok := enh.signatureToMetricWithBuckets[string(enh.lblBuf)]; ok && len(mb.buckets) > 0 { - // At this data point, we have classic histogram - // buckets and a native histogram with the same name and - // labels. Do not evaluate anything. - annos.Add(annotations.NewMixedClassicNativeHistogramsWarning(sample.Metric.Get(labels.MetricName), args[1].PositionRange())) - delete(enh.signatureToMetricWithBuckets, string(enh.lblBuf)) - continue - } - + // Deal with the native histograms. + for _, sample := range enh.nativeHistogramSamples { if !enh.enableDelayedNameRemoval { sample.Metric = sample.Metric.DropMetricName() } @@ -1527,7 +1456,7 @@ func funcHistogramQuantile(vals []parser.Value, args parser.Expressions, enh *Ev }) } - // Now do classic histograms that have already been filtered for conflicting native histograms. + // Deal with classic histograms that have already been filtered for conflicting native histograms. for _, mb := range enh.signatureToMetricWithBuckets { if len(mb.buckets) > 0 { res, forcedMonotonicity, _ := BucketQuantile(q, mb.buckets) diff --git a/promql/parser/ast.go b/promql/parser/ast.go index a9f41978e0..9eebaed9ab 100644 --- a/promql/parser/ast.go +++ b/promql/parser/ast.go @@ -110,6 +110,15 @@ type BinaryExpr struct { ReturnBool bool } +// DurationExpr represents a binary expression between two duration expressions. +type DurationExpr struct { + Op ItemType // The operation of the expression. + LHS, RHS Expr // The operands on the respective sides of the operator. + Wrapped bool // Set when the duration is wrapped in parentheses. + + StartPos posrange.Pos // For unary operations, the position of the operator. +} + // Call represents a function call. type Call struct { Func *Function // The function that was called. @@ -124,24 +133,27 @@ type MatrixSelector struct { // if the parser hasn't returned an error. VectorSelector Expr Range time.Duration - - EndPos posrange.Pos + RangeExpr *DurationExpr + EndPos posrange.Pos } // SubqueryExpr represents a subquery. type SubqueryExpr struct { - Expr Expr - Range time.Duration + Expr Expr + Range time.Duration + RangeExpr *DurationExpr // OriginalOffset is the actual offset that was set in the query. - // This never changes. OriginalOffset time.Duration + // OriginalOffsetExpr is the actual offset expression that was set in the query. + OriginalOffsetExpr *DurationExpr // Offset is the offset used during the query execution - // which is calculated using the original offset, at modifier time, + // which is calculated using the original offset, offset expression, at modifier time, // eval time, and subquery offsets in the AST tree. Offset time.Duration Timestamp *int64 StartOrEnd ItemType // Set when @ is used with start() or end() Step time.Duration + StepExpr *DurationExpr EndPos posrange.Pos } @@ -150,6 +162,7 @@ type SubqueryExpr struct { type NumberLiteral struct { Val float64 + Duration bool // Used to format the number as a duration. PosRange posrange.PositionRange } @@ -191,9 +204,10 @@ func (e *StepInvariantExpr) PositionRange() posrange.PositionRange { // VectorSelector represents a Vector selection. type VectorSelector struct { Name string - // OriginalOffset is the actual offset that was set in the query. - // This never changes. + // OriginalOffset is the actual offset calculated from OriginalOffsetExpr. OriginalOffset time.Duration + // OriginalOffsetExpr is the actual offset that was set in the query. + OriginalOffsetExpr *DurationExpr // Offset is the offset used during the query execution // which is calculated using the original offset, at modifier time, // eval time, and subquery offsets in the AST tree. @@ -244,6 +258,7 @@ func (e *BinaryExpr) Type() ValueType { return ValueTypeVector } func (e *StepInvariantExpr) Type() ValueType { return e.Expr.Type() } +func (e *DurationExpr) Type() ValueType { return ValueTypeScalar } func (*AggregateExpr) PromQLExpr() {} func (*BinaryExpr) PromQLExpr() {} @@ -256,6 +271,7 @@ func (*StringLiteral) PromQLExpr() {} func (*UnaryExpr) PromQLExpr() {} func (*VectorSelector) PromQLExpr() {} func (*StepInvariantExpr) PromQLExpr() {} +func (*DurationExpr) PromQLExpr() {} // VectorMatchCardinality describes the cardinality relationship // of two Vectors in a binary operation. @@ -438,6 +454,16 @@ func (e *BinaryExpr) PositionRange() posrange.PositionRange { return mergeRanges(e.LHS, e.RHS) } +func (e *DurationExpr) PositionRange() posrange.PositionRange { + if e.LHS == nil { + return posrange.PositionRange{ + Start: e.StartPos, + End: e.RHS.PositionRange().End, + } + } + return mergeRanges(e.LHS, e.RHS) +} + func (e *Call) PositionRange() posrange.PositionRange { return e.PosRange } diff --git a/promql/parser/generated_parser.y b/promql/parser/generated_parser.y index cdb4532d3b..de9234589c 100644 --- a/promql/parser/generated_parser.y +++ b/promql/parser/generated_parser.y @@ -186,7 +186,7 @@ START_METRIC_SELECTOR %type int %type uint %type number series_value signed_number signed_or_unsigned_number -%type step_invariant_expr aggregate_expr aggregate_modifier bin_modifier binary_expr bool_modifier expr function_call function_call_args function_call_body group_modifiers label_matchers matrix_selector number_duration_literal offset_expr on_or_ignoring paren_expr string_literal subquery_expr unary_expr vector_selector +%type step_invariant_expr aggregate_expr aggregate_modifier bin_modifier binary_expr bool_modifier expr function_call function_call_args function_call_body group_modifiers label_matchers matrix_selector number_duration_literal offset_expr on_or_ignoring paren_expr string_literal subquery_expr unary_expr vector_selector duration_expr paren_duration_expr positive_duration_expr %start start @@ -235,6 +235,7 @@ expr : | unary_expr | vector_selector | step_invariant_expr + | duration_expr ; /* @@ -433,23 +434,35 @@ paren_expr : LEFT_PAREN expr RIGHT_PAREN * Offset modifiers. */ -offset_expr: expr OFFSET number_duration_literal +positive_duration_expr : duration_expr { - numLit, _ := $3.(*NumberLiteral) - dur := time.Duration(numLit.Val * 1000) * time.Millisecond - yylex.(*parser).addOffset($1, dur) + if numLit, ok := $1.(*NumberLiteral); ok { + if numLit.Val <= 0 { + yylex.(*parser).addParseErrf(numLit.PositionRange(), "duration must be greater than 0") + $$ = &NumberLiteral{Val: 0} // Return 0 on error. + break + } + $$ = $1 + break + } $$ = $1 } - | expr OFFSET SUB number_duration_literal + ; + +offset_expr: expr OFFSET duration_expr { - numLit, _ := $4.(*NumberLiteral) - dur := time.Duration(numLit.Val * 1000) * time.Millisecond - yylex.(*parser).addOffset($1, -dur) + if numLit, ok := $3.(*NumberLiteral); ok { + yylex.(*parser).addOffset($1, time.Duration(numLit.Val*1000)*time.Millisecond) $$ = $1 + break + } + yylex.(*parser).addOffsetExpr($1, $3.(*DurationExpr)) + $$ = $1 } | expr OFFSET error { yylex.(*parser).unexpected("offset", "number or duration"); $$ = $1 } ; + /* * @ modifiers. */ @@ -474,7 +487,7 @@ at_modifier_preprocessors: START | END; * Subquery and range selectors. */ -matrix_selector : expr LEFT_BRACKET number_duration_literal RIGHT_BRACKET +matrix_selector : expr LEFT_BRACKET positive_duration_expr RIGHT_BRACKET { var errMsg string vs, ok := $1.(*VectorSelector) @@ -491,41 +504,60 @@ matrix_selector : expr LEFT_BRACKET number_duration_literal RIGHT_BRACKET yylex.(*parser).addParseErrf(errRange, "%s", errMsg) } - numLit, _ := $3.(*NumberLiteral) + var rangeNl time.Duration + if numLit, ok := $3.(*NumberLiteral); ok { + rangeNl = time.Duration(numLit.Val*1000)*time.Millisecond + } + rangeExpr, _ := $3.(*DurationExpr) $$ = &MatrixSelector{ VectorSelector: $1.(Expr), - Range: time.Duration(numLit.Val * 1000) * time.Millisecond, + Range: rangeNl, + RangeExpr: rangeExpr, EndPos: yylex.(*parser).lastClosing, } } ; -subquery_expr : expr LEFT_BRACKET number_duration_literal COLON number_duration_literal RIGHT_BRACKET +subquery_expr : expr LEFT_BRACKET positive_duration_expr COLON positive_duration_expr RIGHT_BRACKET { - numLitRange, _ := $3.(*NumberLiteral) - numLitStep, _ := $5.(*NumberLiteral) + var rangeNl time.Duration + var stepNl time.Duration + if numLit, ok := $3.(*NumberLiteral); ok { + rangeNl = time.Duration(numLit.Val*1000)*time.Millisecond + } + rangeExpr, _ := $3.(*DurationExpr) + if numLit, ok := $5.(*NumberLiteral); ok { + stepNl = time.Duration(numLit.Val*1000)*time.Millisecond + } + stepExpr, _ := $5.(*DurationExpr) $$ = &SubqueryExpr{ Expr: $1.(Expr), - Range: time.Duration(numLitRange.Val * 1000) * time.Millisecond, - Step: time.Duration(numLitStep.Val * 1000) * time.Millisecond, + Range: rangeNl, + RangeExpr: rangeExpr, + Step: stepNl, + StepExpr: stepExpr, EndPos: $6.Pos + 1, } } - | expr LEFT_BRACKET number_duration_literal COLON RIGHT_BRACKET - { - numLitRange, _ := $3.(*NumberLiteral) - $$ = &SubqueryExpr{ - Expr: $1.(Expr), - Range: time.Duration(numLitRange.Val * 1000) * time.Millisecond, - Step: 0, - EndPos: $5.Pos + 1, - } - } - | expr LEFT_BRACKET number_duration_literal COLON number_duration_literal error + | expr LEFT_BRACKET positive_duration_expr COLON RIGHT_BRACKET + { + var rangeNl time.Duration + if numLit, ok := $3.(*NumberLiteral); ok { + rangeNl = time.Duration(numLit.Val*1000)*time.Millisecond + } + rangeExpr, _ := $3.(*DurationExpr) + $$ = &SubqueryExpr{ + Expr: $1.(Expr), + Range: rangeNl, + RangeExpr: rangeExpr, + EndPos: $5.Pos + 1, + } + } + | expr LEFT_BRACKET positive_duration_expr COLON positive_duration_expr error { yylex.(*parser).unexpected("subquery selector", "\"]\""); $$ = $1 } - | expr LEFT_BRACKET number_duration_literal COLON error + | expr LEFT_BRACKET positive_duration_expr COLON error { yylex.(*parser).unexpected("subquery selector", "number or duration or \"]\""); $$ = $1 } - | expr LEFT_BRACKET number_duration_literal error + | expr LEFT_BRACKET positive_duration_expr error { yylex.(*parser).unexpected("subquery or range", "\":\" or \"]\""); $$ = $1 } | expr LEFT_BRACKET error { yylex.(*parser).unexpected("subquery selector", "number or duration"); $$ = $1 } @@ -930,6 +962,7 @@ number_duration_literal : NUMBER $$ = &NumberLiteral{ Val: dur.Seconds(), PosRange: $1.PositionRange(), + Duration: true, } } ; @@ -997,4 +1030,105 @@ maybe_grouping_labels: /* empty */ { $$ = nil } | grouping_labels ; +/* + * Duration expressions. + */ + +duration_expr : number_duration_literal + { + nl := $1.(*NumberLiteral) + if nl.Val > 1<<63/1e9 || nl.Val < -(1<<63)/1e9 { + yylex.(*parser).addParseErrf(nl.PosRange, "duration out of range") + $$ = &NumberLiteral{Val: 0} + break + } + $$ = nl + } + | unary_op duration_expr %prec MUL + { + switch expr := $2.(type) { + case *NumberLiteral: + if $1.Typ == SUB { + expr.Val *= -1 + } + if expr.Val > 1<<63/1e9 || expr.Val < -(1<<63)/1e9 { + yylex.(*parser).addParseErrf($1.PositionRange(), "duration out of range") + $$ = &NumberLiteral{Val: 0} + break + } + expr.PosRange.Start = $1.Pos + $$ = expr + break + case *DurationExpr: + if $1.Typ == SUB { + $$ = &DurationExpr{ + Op: SUB, + RHS: expr, + StartPos: $1.Pos, + } + break + } + $$ = expr + break + default: + yylex.(*parser).addParseErrf($1.PositionRange(), "expected number literal or duration expression") + $$ = &NumberLiteral{Val: 0} + break + } + } + | duration_expr ADD duration_expr + { + yylex.(*parser).experimentalDurationExpr($1.(Expr)) + $$ = &DurationExpr{Op: ADD, LHS: $1.(Expr), RHS: $3.(Expr)} + } + | duration_expr SUB duration_expr + { + yylex.(*parser).experimentalDurationExpr($1.(Expr)) + $$ = &DurationExpr{Op: SUB, LHS: $1.(Expr), RHS: $3.(Expr)} + } + | duration_expr MUL duration_expr + { + yylex.(*parser).experimentalDurationExpr($1.(Expr)) + $$ = &DurationExpr{Op: MUL, LHS: $1.(Expr), RHS: $3.(Expr)} + } + | duration_expr DIV duration_expr + { + yylex.(*parser).experimentalDurationExpr($1.(Expr)) + if nl, ok := $3.(*NumberLiteral); ok && nl.Val == 0 { + yylex.(*parser).addParseErrf($2.PositionRange(), "division by zero") + $$ = &NumberLiteral{Val: 0} + break + } + $$ = &DurationExpr{Op: DIV, LHS: $1.(Expr), RHS: $3.(Expr)} + } + | duration_expr MOD duration_expr + { + yylex.(*parser).experimentalDurationExpr($1.(Expr)) + if nl, ok := $3.(*NumberLiteral); ok && nl.Val == 0 { + yylex.(*parser).addParseErrf($2.PositionRange(), "modulo by zero") + $$ = &NumberLiteral{Val: 0} + break + } + $$ = &DurationExpr{Op: MOD, LHS: $1.(Expr), RHS: $3.(Expr)} + } + | duration_expr POW duration_expr + { + yylex.(*parser).experimentalDurationExpr($1.(Expr)) + $$ = &DurationExpr{Op: POW, LHS: $1.(Expr), RHS: $3.(Expr)} + } + | paren_duration_expr + ; + +paren_duration_expr : LEFT_PAREN duration_expr RIGHT_PAREN + { + yylex.(*parser).experimentalDurationExpr($2.(Expr)) + if durationExpr, ok := $2.(*DurationExpr); ok { + durationExpr.Wrapped = true + $$ = durationExpr + break + } + $$ = $2 + } + ; + %% diff --git a/promql/parser/generated_parser.y.go b/promql/parser/generated_parser.y.go index 78d5e15245..8c84b42f14 100644 --- a/promql/parser/generated_parser.y.go +++ b/promql/parser/generated_parser.y.go @@ -251,431 +251,455 @@ var yyExca = [...]int16{ 1, -1, -2, 0, -1, 37, - 1, 141, - 10, 141, - 24, 141, + 1, 142, + 10, 142, + 24, 142, -2, 0, - -1, 61, - 2, 184, - 15, 184, - 79, 184, - 85, 184, - -2, 102, - -1, 62, + -1, 63, 2, 185, 15, 185, 79, 185, 85, 185, -2, 103, - -1, 63, + -1, 64, 2, 186, 15, 186, 79, 186, 85, 186, - -2, 105, - -1, 64, + -2, 104, + -1, 65, 2, 187, 15, 187, 79, 187, 85, 187, -2, 106, - -1, 65, + -1, 66, 2, 188, 15, 188, 79, 188, 85, 188, -2, 107, - -1, 66, + -1, 67, 2, 189, 15, 189, 79, 189, 85, 189, - -2, 112, - -1, 67, + -2, 108, + -1, 68, 2, 190, 15, 190, 79, 190, 85, 190, - -2, 114, - -1, 68, + -2, 113, + -1, 69, 2, 191, 15, 191, 79, 191, 85, 191, - -2, 116, - -1, 69, + -2, 115, + -1, 70, 2, 192, 15, 192, 79, 192, 85, 192, -2, 117, - -1, 70, + -1, 71, 2, 193, 15, 193, 79, 193, 85, 193, -2, 118, - -1, 71, + -1, 72, 2, 194, 15, 194, 79, 194, 85, 194, -2, 119, - -1, 72, + -1, 73, 2, 195, 15, 195, 79, 195, 85, 195, -2, 120, - -1, 73, + -1, 74, 2, 196, 15, 196, 79, 196, 85, 196, - -2, 124, - -1, 74, + -2, 121, + -1, 75, 2, 197, 15, 197, 79, 197, 85, 197, -2, 125, - -1, 204, - 9, 246, - 12, 246, - 13, 246, - 18, 246, - 19, 246, - 25, 246, - 41, 246, - 47, 246, - 48, 246, - 51, 246, - 57, 246, - 62, 246, - 63, 246, - 64, 246, - 65, 246, - 66, 246, - 67, 246, - 68, 246, - 69, 246, - 70, 246, - 71, 246, - 72, 246, - 73, 246, - 74, 246, - 75, 246, - 79, 246, - 83, 246, - 85, 246, - 88, 246, - 89, 246, + -1, 76, + 2, 198, + 15, 198, + 79, 198, + 85, 198, + -2, 126, + -1, 126, + 41, 250, + 42, 250, + 52, 250, + 53, 250, + 57, 250, + -2, 20, + -1, 224, + 9, 247, + 12, 247, + 13, 247, + 18, 247, + 19, 247, + 25, 247, + 41, 247, + 47, 247, + 48, 247, + 51, 247, + 57, 247, + 62, 247, + 63, 247, + 64, 247, + 65, 247, + 66, 247, + 67, 247, + 68, 247, + 69, 247, + 70, 247, + 71, 247, + 72, 247, + 73, 247, + 74, 247, + 75, 247, + 79, 247, + 83, 247, + 85, 247, + 88, 247, + 89, 247, -2, 0, - -1, 205, - 9, 246, - 12, 246, - 13, 246, - 18, 246, - 19, 246, - 25, 246, - 41, 246, - 47, 246, - 48, 246, - 51, 246, - 57, 246, - 62, 246, - 63, 246, - 64, 246, - 65, 246, - 66, 246, - 67, 246, - 68, 246, - 69, 246, - 70, 246, - 71, 246, - 72, 246, - 73, 246, - 74, 246, - 75, 246, - 79, 246, - 83, 246, - 85, 246, - 88, 246, - 89, 246, + -1, 225, + 9, 247, + 12, 247, + 13, 247, + 18, 247, + 19, 247, + 25, 247, + 41, 247, + 47, 247, + 48, 247, + 51, 247, + 57, 247, + 62, 247, + 63, 247, + 64, 247, + 65, 247, + 66, 247, + 67, 247, + 68, 247, + 69, 247, + 70, 247, + 71, 247, + 72, 247, + 73, 247, + 74, 247, + 75, 247, + 79, 247, + 83, 247, + 85, 247, + 88, 247, + 89, 247, -2, 0, } const yyPrivate = 57344 -const yyLast = 803 +const yyLast = 892 var yyAct = [...]int16{ - 154, 338, 336, 157, 343, 230, 39, 196, 280, 44, - 295, 294, 84, 120, 82, 233, 180, 109, 108, 350, - 351, 352, 353, 110, 111, 243, 202, 158, 203, 135, - 112, 249, 361, 6, 333, 329, 113, 332, 232, 204, - 205, 308, 271, 60, 130, 270, 297, 268, 162, 315, - 156, 360, 153, 306, 359, 344, 200, 162, 161, 55, - 245, 246, 222, 115, 247, 116, 107, 161, 269, 54, - 267, 114, 260, 306, 182, 234, 236, 238, 239, 240, - 248, 250, 253, 254, 255, 256, 257, 261, 262, 163, - 122, 235, 237, 241, 242, 244, 251, 252, 192, 328, - 111, 258, 259, 117, 190, 164, 112, 152, 103, 55, - 106, 337, 77, 113, 184, 151, 35, 165, 327, 54, - 175, 191, 169, 172, 183, 185, 167, 189, 168, 2, - 3, 4, 5, 107, 198, 105, 159, 160, 201, 186, - 188, 7, 326, 206, 207, 208, 209, 210, 211, 212, - 213, 214, 215, 216, 217, 218, 219, 220, 199, 194, - 89, 91, 221, 162, 264, 325, 197, 223, 224, 171, - 200, 100, 101, 161, 162, 103, 104, 106, 90, 263, - 233, 324, 170, 162, 161, 323, 362, 322, 321, 274, - 243, 122, 266, 161, 131, 163, 249, 272, 123, 320, - 229, 319, 105, 232, 275, 318, 163, 317, 121, 85, - 316, 164, 163, 292, 293, 163, 265, 296, 129, 83, - 276, 86, 164, 273, 10, 245, 246, 187, 164, 247, - 88, 164, 86, 50, 79, 36, 298, 260, 1, 78, - 234, 236, 238, 239, 240, 248, 250, 253, 254, 255, - 256, 257, 261, 262, 123, 49, 235, 237, 241, 242, - 244, 251, 252, 181, 121, 182, 258, 259, 128, 48, - 127, 304, 119, 305, 307, 59, 309, 86, 9, 9, - 47, 46, 134, 310, 311, 136, 137, 138, 139, 140, - 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, - 45, 43, 132, 173, 179, 184, 166, 85, 330, 178, - 331, 42, 133, 55, 41, 183, 185, 83, 339, 340, - 341, 335, 177, 54, 342, 81, 346, 345, 348, 347, - 86, 303, 40, 314, 354, 355, 302, 55, 51, 356, - 53, 77, 300, 56, 195, 358, 22, 54, 313, 55, - 174, 301, 227, 57, 8, 312, 226, 357, 37, 54, - 363, 299, 126, 277, 87, 193, 228, 125, 80, 75, - 349, 225, 155, 58, 231, 18, 19, 52, 118, 20, - 124, 0, 0, 0, 0, 76, 0, 0, 0, 0, - 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, - 71, 72, 73, 74, 0, 0, 0, 13, 0, 0, - 0, 24, 0, 30, 0, 0, 31, 32, 55, 38, - 107, 53, 77, 0, 56, 279, 0, 22, 54, 0, - 0, 0, 278, 0, 57, 0, 282, 283, 281, 288, - 290, 287, 289, 284, 285, 286, 291, 0, 91, 0, - 75, 0, 0, 0, 0, 0, 18, 19, 100, 101, - 20, 0, 103, 0, 106, 90, 76, 0, 0, 0, - 0, 61, 62, 63, 64, 65, 66, 67, 68, 69, - 70, 71, 72, 73, 74, 0, 0, 0, 13, 105, - 0, 0, 24, 0, 30, 0, 55, 31, 32, 53, - 77, 0, 56, 334, 0, 22, 54, 0, 0, 0, - 0, 0, 57, 0, 282, 283, 281, 288, 290, 287, - 289, 284, 285, 286, 291, 0, 0, 0, 75, 0, - 0, 0, 0, 0, 18, 19, 0, 0, 20, 0, - 0, 0, 17, 77, 76, 0, 0, 0, 22, 61, - 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, - 72, 73, 74, 0, 0, 0, 13, 0, 0, 0, - 24, 0, 30, 0, 0, 31, 32, 18, 19, 0, - 0, 20, 0, 0, 0, 17, 35, 0, 0, 0, - 0, 22, 11, 12, 14, 15, 16, 21, 23, 25, - 26, 27, 28, 29, 33, 34, 0, 0, 0, 13, + 166, 359, 357, 169, 364, 251, 39, 216, 301, 52, + 177, 315, 86, 130, 84, 6, 316, 109, 200, 44, + 145, 117, 116, 61, 118, 195, 109, 295, 119, 170, + 371, 372, 373, 374, 120, 109, 222, 121, 223, 224, + 225, 115, 296, 327, 91, 93, 94, 140, 95, 96, + 97, 98, 99, 100, 101, 102, 103, 104, 297, 105, + 106, 108, 92, 93, 123, 202, 125, 124, 105, 126, + 108, 354, 293, 102, 103, 353, 122, 105, 59, 108, + 92, 358, 110, 113, 127, 329, 107, 175, 292, 349, + 318, 285, 132, 114, 112, 107, 327, 115, 111, 174, + 336, 119, 141, 176, 107, 204, 284, 120, 348, 173, + 220, 2, 3, 4, 5, 203, 205, 161, 291, 179, + 180, 181, 182, 183, 184, 185, 190, 163, 194, 163, + 163, 163, 163, 163, 163, 163, 188, 191, 186, 189, + 187, 290, 243, 121, 218, 365, 289, 79, 221, 382, + 208, 206, 35, 226, 227, 228, 229, 230, 231, 232, + 233, 234, 235, 236, 237, 238, 239, 240, 381, 288, + 219, 380, 7, 10, 241, 242, 347, 346, 345, 244, + 245, 344, 343, 81, 163, 163, 164, 109, 164, 164, + 164, 164, 164, 164, 164, 146, 147, 148, 149, 150, + 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, + 342, 132, 287, 212, 91, 93, 94, 139, 95, 96, + 97, 98, 99, 100, 101, 102, 103, 104, 210, 105, + 106, 108, 92, 109, 341, 168, 211, 317, 340, 313, + 314, 339, 174, 164, 164, 294, 178, 338, 337, 50, + 8, 209, 173, 56, 37, 80, 107, 179, 319, 165, + 91, 93, 94, 55, 95, 96, 97, 163, 99, 100, + 101, 102, 103, 104, 175, 105, 106, 108, 92, 248, + 138, 335, 137, 247, 60, 77, 90, 9, 9, 196, + 176, 201, 325, 202, 326, 328, 334, 330, 246, 321, + 113, 78, 107, 333, 331, 332, 56, 36, 110, 113, + 114, 112, 165, 1, 115, 62, 55, 49, 320, 114, + 112, 171, 172, 115, 111, 174, 164, 48, 47, 351, + 46, 352, 144, 204, 45, 173, 43, 383, 77, 360, + 361, 362, 356, 203, 205, 363, 162, 367, 366, 369, + 368, 254, 142, 56, 78, 375, 376, 175, 214, 165, + 377, 264, 174, 55, 192, 217, 379, 270, 133, 220, + 87, 350, 173, 176, 253, 42, 143, 199, 131, 324, + 85, 384, 198, 41, 323, 77, 286, 136, 207, 40, + 51, 88, 135, 88, 175, 197, 266, 267, 215, 322, + 268, 78, 378, 298, 89, 134, 213, 249, 281, 82, + 176, 255, 257, 259, 260, 261, 269, 271, 274, 275, + 276, 277, 278, 282, 283, 254, 370, 256, 258, 262, + 263, 265, 272, 273, 133, 264, 56, 279, 280, 167, + 87, 270, 165, 252, 131, 250, 55, 53, 253, 128, + 85, 174, 129, 0, 0, 0, 0, 88, 83, 0, + 0, 173, 0, 88, 0, 0, 0, 0, 77, 0, + 266, 267, 0, 0, 268, 0, 0, 0, 0, 0, + 0, 0, 281, 175, 78, 255, 257, 259, 260, 261, + 269, 271, 274, 275, 276, 277, 278, 282, 283, 176, + 0, 256, 258, 262, 263, 265, 272, 273, 0, 0, + 56, 279, 280, 54, 79, 0, 57, 300, 0, 22, + 55, 0, 0, 193, 299, 0, 58, 0, 303, 304, + 302, 309, 311, 308, 310, 305, 306, 307, 312, 0, + 0, 0, 77, 0, 0, 0, 0, 0, 18, 19, + 0, 0, 20, 0, 0, 0, 0, 0, 78, 0, + 0, 0, 0, 63, 64, 65, 66, 67, 68, 69, + 70, 71, 72, 73, 74, 75, 76, 0, 0, 0, + 13, 0, 0, 0, 24, 0, 30, 0, 0, 31, + 32, 56, 38, 109, 54, 79, 0, 57, 355, 0, + 22, 55, 0, 0, 0, 0, 0, 58, 0, 303, + 304, 302, 309, 311, 308, 310, 305, 306, 307, 312, + 91, 93, 0, 77, 0, 0, 0, 0, 0, 18, + 19, 102, 103, 20, 0, 105, 106, 108, 92, 78, + 0, 0, 0, 0, 63, 64, 65, 66, 67, 68, + 69, 70, 71, 72, 73, 74, 75, 76, 0, 0, + 0, 13, 107, 0, 0, 24, 0, 30, 0, 56, + 31, 32, 54, 79, 0, 57, 0, 0, 22, 55, + 0, 0, 0, 0, 0, 58, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 77, 0, 0, 0, 0, 0, 18, 19, 0, + 0, 20, 0, 0, 0, 17, 79, 78, 0, 0, + 0, 22, 63, 64, 65, 66, 67, 68, 69, 70, + 71, 72, 73, 74, 75, 76, 0, 0, 0, 13, 0, 0, 0, 24, 0, 30, 0, 0, 31, 32, - 18, 19, 0, 0, 20, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 11, 12, 14, 15, 16, - 21, 23, 25, 26, 27, 28, 29, 33, 34, 107, - 0, 0, 13, 0, 0, 0, 24, 176, 30, 0, - 0, 31, 32, 0, 0, 0, 0, 0, 107, 0, - 0, 0, 0, 0, 0, 0, 89, 91, 92, 0, - 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, - 0, 103, 104, 106, 90, 89, 91, 92, 0, 93, - 94, 95, 96, 97, 98, 99, 100, 101, 102, 0, - 103, 104, 106, 90, 107, 0, 0, 0, 105, 0, + 18, 19, 0, 0, 20, 0, 0, 0, 17, 35, + 0, 0, 0, 0, 22, 11, 12, 14, 15, 16, + 21, 23, 25, 26, 27, 28, 29, 33, 34, 0, + 0, 0, 13, 0, 0, 0, 24, 0, 30, 0, + 0, 31, 32, 18, 19, 0, 0, 20, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 11, 12, + 14, 15, 16, 21, 23, 25, 26, 27, 28, 29, + 33, 34, 109, 0, 0, 13, 0, 0, 0, 24, + 0, 30, 0, 0, 31, 32, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 91, + 93, 94, 0, 95, 96, 0, 0, 99, 100, 0, + 102, 103, 104, 0, 105, 106, 108, 92, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 107, 0, 0, 0, 105, 0, 0, - 0, 89, 91, 92, 0, 93, 94, 95, 0, 97, - 98, 99, 100, 101, 102, 0, 103, 104, 106, 90, - 89, 91, 92, 0, 93, 94, 0, 0, 97, 98, - 0, 100, 101, 102, 0, 103, 104, 106, 90, 0, - 0, 0, 0, 105, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 105, + 0, 107, } var yyPact = [...]int16{ - 31, 131, 573, 573, 409, 530, -1000, -1000, -1000, 103, + 13, 162, 746, 746, 582, 703, -1000, -1000, -1000, 139, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, - -1000, -1000, -1000, -1000, -1000, 305, -1000, 228, -1000, 654, + -1000, -1000, -1000, -1000, -1000, 438, -1000, 284, -1000, 173, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, - -1000, -1000, 21, 98, -1000, -1000, 487, -1000, 487, 99, - -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, - -1000, -1000, -1000, -1000, -1000, -1000, -1000, 252, -1000, -1000, - 360, -1000, -1000, 266, 214, -1000, -1000, 20, -1000, -49, - -49, -49, -49, -49, -49, -49, -49, -49, -49, -49, - -49, -49, -49, -49, -49, 50, 48, 304, 98, -55, - -1000, 167, 167, 328, -1000, 635, 52, -1000, 302, -1000, - -1000, 261, 70, -1000, -1000, 207, -1000, 102, -1000, 96, - 154, 487, -1000, -56, -41, -1000, 487, 487, 487, 487, - 487, 487, 487, 487, 487, 487, 487, 487, 487, 487, - 487, -1000, 100, -1000, -1000, 47, -1000, -1000, -1000, -1000, - -1000, -1000, -1000, 39, 39, 350, -1000, -1000, -1000, -1000, - 178, -1000, -1000, 157, -1000, 654, -1000, -1000, 196, -1000, - 45, -1000, -1000, -1000, -1000, -1000, 43, -1000, -1000, -1000, - -1000, -1000, -1000, -1000, 16, 171, 163, -1000, -1000, -1000, - 408, 406, 167, 167, 167, 167, 52, 52, 119, 119, - 119, 719, 700, 119, 119, 719, 52, 52, 119, 52, - 406, -1000, 24, -1000, -1000, -1000, 340, -1000, 329, -1000, + -1000, -1000, 41, 22, 128, -1000, -1000, 660, -1000, 660, + 134, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, + -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, 432, + -1000, -1000, 385, -1000, -1000, 278, 213, -1000, -1000, 23, + -1000, -58, -58, -58, -58, -58, -58, -58, -58, -58, + -58, -58, -58, -58, -58, -58, -58, 344, 233, 244, + 427, 427, 427, 427, 427, 427, 128, -51, -1000, 124, + 124, 501, -1000, 3, 267, 12, -15, -1000, 375, -1000, + -1000, 289, 61, -1000, -1000, 368, -1000, 226, -1000, 211, + 353, 660, -1000, -46, -41, -1000, 660, 660, 660, 660, + 660, 660, 660, 660, 660, 660, 660, 660, 660, 660, + 660, -1000, -1000, -1000, 427, 427, -1000, 127, -1000, -1000, + -1000, -1000, -1000, -1000, -1000, 90, 90, 277, -1000, 41, + 258, 258, -15, -15, -15, -15, -1000, -1000, -1000, 423, + -1000, -1000, 84, -1000, 173, -1000, -1000, -1000, 366, -1000, + 144, -1000, -1000, -1000, -1000, -1000, 116, -1000, -1000, -1000, + -1000, -1000, -1000, -1000, 62, 46, 1, -1000, -1000, -1000, + 500, 21, 124, 124, 124, 124, 12, 12, 579, 579, + 579, 808, 219, 579, 579, 808, 12, 12, 579, 12, + 21, -15, 267, 68, -1000, -1000, -1000, 297, -1000, 377, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, - -1000, -1000, -1000, -1000, 487, -1000, -1000, -1000, -1000, -1000, - -1000, 34, 34, 15, 34, 40, 40, 331, 32, -1000, - -1000, 204, 201, 199, 195, 193, 182, 181, 179, 175, - 159, 136, -1000, -1000, -1000, -1000, -1000, -1000, 97, -1000, - -1000, -1000, 13, -1000, 654, -1000, -1000, -1000, 34, -1000, - 11, 8, 486, -1000, -1000, -1000, 54, 174, 174, 174, - 39, 41, 41, 54, 41, 54, -73, -1000, -1000, -1000, - -1000, -1000, 34, 34, -1000, -1000, -1000, 34, -1000, -1000, - -1000, -1000, -1000, -1000, 174, -1000, -1000, -1000, -1000, -1000, - -1000, -1000, -1000, -1000, -1000, -1000, -1000, 30, -1000, 165, - -1000, -1000, -1000, -1000, + -1000, -1000, -1000, -1000, -1000, 660, -1000, -1000, -1000, -1000, + -1000, -1000, 77, 77, 59, 77, 94, 94, 279, 83, + -1000, -1000, 242, 241, 235, 232, 228, 204, 176, 175, + 172, 171, 170, -1000, -1000, -1000, -1000, -1000, -1000, 87, + -1000, -1000, -1000, 349, -1000, 173, -1000, -1000, -1000, 77, + -1000, 49, 45, 581, -1000, -1000, -1000, 24, 442, 442, + 442, 90, 131, 131, 24, 131, 24, -62, -1000, -1000, + -1000, -1000, -1000, 77, 77, -1000, -1000, -1000, 77, -1000, + -1000, -1000, -1000, -1000, -1000, 442, -1000, -1000, -1000, -1000, + -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, 147, -1000, + 316, -1000, -1000, -1000, -1000, } var yyPgo = [...]int16{ - 0, 378, 13, 377, 5, 16, 374, 275, 373, 372, - 12, 370, 224, 354, 368, 14, 366, 10, 11, 365, - 364, 7, 363, 8, 4, 357, 2, 1, 3, 344, - 27, 0, 338, 332, 18, 194, 314, 312, 6, 311, - 303, 17, 302, 43, 301, 9, 300, 282, 281, 280, - 269, 255, 233, 238, 235, + 0, 449, 13, 447, 5, 18, 443, 284, 78, 439, + 12, 426, 173, 250, 409, 14, 407, 16, 11, 406, + 404, 7, 403, 8, 4, 402, 2, 1, 3, 398, + 29, 0, 390, 389, 22, 102, 383, 376, 6, 375, + 364, 21, 352, 23, 336, 19, 334, 332, 330, 328, + 327, 317, 249, 9, 315, 10, 313, 307, } var yyR1 = [...]int8{ - 0, 53, 53, 53, 53, 53, 53, 53, 38, 38, + 0, 56, 56, 56, 56, 56, 56, 56, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, - 33, 33, 33, 33, 34, 34, 36, 36, 36, 36, + 38, 33, 33, 33, 33, 34, 34, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, - 36, 36, 35, 37, 37, 47, 47, 42, 42, 42, - 42, 17, 17, 17, 17, 16, 16, 16, 4, 4, - 4, 39, 41, 41, 40, 40, 40, 48, 46, 46, - 46, 32, 32, 32, 9, 9, 44, 50, 50, 50, - 50, 50, 50, 51, 52, 52, 52, 43, 43, 43, - 1, 1, 1, 2, 2, 2, 2, 2, 2, 2, - 13, 13, 7, 7, 7, 7, 7, 7, 7, 7, + 36, 36, 36, 35, 37, 37, 47, 47, 42, 42, + 42, 42, 17, 17, 17, 17, 16, 16, 16, 4, + 4, 4, 39, 41, 41, 40, 40, 40, 48, 55, + 46, 46, 32, 32, 32, 9, 9, 44, 50, 50, + 50, 50, 50, 50, 51, 52, 52, 52, 43, 43, + 43, 1, 1, 1, 2, 2, 2, 2, 2, 2, + 2, 13, 13, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, - 7, 7, 7, 7, 7, 7, 12, 12, 12, 12, - 14, 14, 14, 15, 15, 15, 15, 15, 15, 15, - 54, 20, 20, 20, 20, 19, 19, 19, 19, 19, - 19, 19, 19, 19, 29, 29, 29, 21, 21, 21, - 21, 22, 22, 22, 23, 23, 23, 23, 23, 23, - 23, 23, 23, 23, 23, 24, 24, 25, 25, 25, - 11, 11, 11, 11, 3, 3, 3, 3, 3, 3, - 3, 3, 3, 3, 3, 3, 3, 3, 6, 6, + 7, 7, 7, 7, 7, 7, 7, 12, 12, 12, + 12, 14, 14, 14, 15, 15, 15, 15, 15, 15, + 15, 57, 20, 20, 20, 20, 19, 19, 19, 19, + 19, 19, 19, 19, 19, 29, 29, 29, 21, 21, + 21, 21, 22, 22, 22, 23, 23, 23, 23, 23, + 23, 23, 23, 23, 23, 23, 24, 24, 25, 25, + 25, 11, 11, 11, 11, 3, 3, 3, 3, 3, + 3, 3, 3, 3, 3, 3, 3, 3, 3, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, - 6, 6, 6, 6, 6, 6, 6, 8, 8, 5, - 5, 5, 5, 45, 45, 28, 28, 30, 30, 31, - 31, 27, 26, 26, 49, 10, 18, 18, + 6, 6, 6, 6, 6, 6, 6, 6, 8, 8, + 5, 5, 5, 5, 45, 45, 28, 28, 30, 30, + 31, 31, 27, 26, 26, 49, 10, 18, 18, 53, + 53, 53, 53, 53, 53, 53, 53, 53, 54, } var yyR2 = [...]int8{ 0, 2, 2, 2, 2, 2, 2, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, - 3, 3, 2, 2, 2, 2, 4, 4, 4, 4, + 1, 3, 3, 2, 2, 2, 2, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, - 4, 4, 1, 0, 1, 3, 3, 1, 1, 3, - 3, 3, 4, 2, 1, 3, 1, 2, 1, 1, - 1, 2, 3, 2, 3, 1, 2, 3, 3, 4, - 3, 3, 5, 3, 1, 1, 4, 6, 5, 6, - 5, 4, 3, 2, 2, 1, 1, 3, 4, 2, - 3, 1, 2, 3, 3, 1, 3, 3, 2, 1, + 4, 4, 4, 1, 0, 1, 3, 3, 1, 1, + 3, 3, 3, 4, 2, 1, 3, 1, 2, 1, + 1, 1, 2, 3, 2, 3, 1, 2, 3, 1, + 3, 3, 3, 5, 3, 1, 1, 4, 6, 5, + 6, 5, 4, 3, 2, 2, 1, 1, 3, 4, + 2, 3, 1, 2, 3, 3, 1, 3, 3, 2, + 1, 2, 1, 1, 1, 1, 1, 1, 1, 1, + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, + 1, 1, 1, 1, 1, 1, 1, 3, 4, 2, + 0, 3, 1, 2, 3, 3, 1, 3, 3, 2, + 1, 2, 0, 3, 2, 1, 1, 3, 1, 3, + 4, 1, 3, 5, 5, 1, 1, 1, 4, 3, + 3, 2, 3, 1, 2, 3, 3, 3, 3, 3, + 3, 3, 3, 3, 3, 3, 4, 3, 3, 1, 2, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, - 1, 1, 1, 1, 1, 1, 3, 4, 2, 0, - 3, 1, 2, 3, 3, 1, 3, 3, 2, 1, - 2, 0, 3, 2, 1, 1, 3, 1, 3, 4, - 1, 3, 5, 5, 1, 1, 1, 4, 3, 3, - 2, 3, 1, 2, 3, 3, 3, 3, 3, 3, - 3, 3, 3, 3, 3, 4, 3, 3, 1, 2, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, - 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, - 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, - 1, 1, 1, 1, 1, 1, 1, 2, 2, 1, - 1, 1, 2, 1, 1, 1, 0, 1, + 1, 1, 1, 1, 1, 1, 1, 1, 2, 2, + 1, 1, 1, 2, 1, 1, 1, 0, 1, 1, + 2, 3, 3, 3, 3, 3, 3, 1, 3, } var yyChk = [...]int16{ - -1000, -53, 98, 99, 100, 101, 2, 10, -13, -7, + -1000, -56, 98, 99, 100, 101, 2, 10, -13, -7, -12, 62, 63, 79, 64, 65, 66, 12, 47, 48, 51, 67, 18, 68, 83, 69, 70, 71, 72, 73, - 85, 88, 89, 74, 75, 13, -54, -13, 10, -38, + 85, 88, 89, 74, 75, 13, -57, -13, 10, -38, -33, -36, -39, -44, -45, -46, -48, -49, -50, -51, - -52, -32, -3, 12, 19, 9, 15, 25, -8, -7, - -43, 62, 63, 64, 65, 66, 67, 68, 69, 70, - 71, 72, 73, 74, 75, 41, 57, 13, -52, -12, - -14, 20, -15, 12, -10, 2, 25, -20, 2, 41, - 59, 42, 43, 45, 46, 47, 48, 49, 50, 51, - 52, 53, 54, 56, 57, 83, 58, 14, -34, -41, - 2, 79, 85, 15, -41, -38, -38, -43, -1, 20, + -52, -32, -53, -3, 12, 19, 9, 15, 25, -8, + -7, -43, -54, 62, 63, 64, 65, 66, 67, 68, + 69, 70, 71, 72, 73, 74, 75, 41, 57, 13, + -52, -12, -14, 20, -15, 12, -10, 2, 25, -20, + 2, 41, 59, 42, 43, 45, 46, 47, 48, 49, + 50, 51, 52, 53, 54, 56, 57, 83, 58, 14, + 41, 57, 53, 42, 52, 56, -34, -41, 2, 79, + 85, 15, -41, -38, -53, -38, -53, -43, -1, 20, -2, 12, -10, 2, 20, 7, 2, 4, 2, 4, 24, -35, -42, -37, -47, 78, -35, -35, -35, -35, -35, -35, -35, -35, -35, -35, -35, -35, -35, -35, - -35, -45, 57, 2, -31, -9, 2, -28, -30, 88, - 89, 19, 9, 41, 57, -45, 2, -41, -34, -17, - 15, 2, -17, -40, 22, -38, 22, 20, 7, 2, + -35, -53, 2, -45, -8, 15, -31, -9, 2, -28, + -30, 88, 89, 19, 9, 41, 57, -55, 2, -53, + -53, -53, -53, -53, -53, -53, -41, -34, -17, 15, + 2, -17, -40, 22, -38, 22, 22, 20, 7, 2, -5, 2, 4, 54, 44, 55, -5, 20, -15, 25, 2, 25, 2, -19, 5, -29, -21, 12, -28, -30, 16, -38, 82, 84, 80, 81, -38, -38, -38, -38, -38, -38, -38, -38, -38, -38, -38, -38, -38, -38, - -38, -45, 15, -28, -28, 21, 6, 2, -16, 22, - -4, -6, 25, 2, 62, 78, 63, 79, 64, 65, - 66, 80, 81, 12, 82, 47, 48, 51, 67, 18, - 68, 83, 84, 69, 70, 71, 72, 73, 88, 89, - 59, 74, 75, 22, 7, 20, -2, 25, 2, 25, - 2, 26, 26, -30, 26, 41, 57, -22, 24, 17, - -23, 30, 28, 29, 35, 36, 37, 33, 31, 34, - 32, 38, -17, -17, -18, -17, -18, 22, -45, 21, - 2, 22, 7, 2, -38, -27, 19, -27, 26, -27, - -21, -21, 24, 17, 2, 17, 6, 6, 6, 6, - 6, 6, 6, 6, 6, 6, 6, 21, 2, 22, - -4, -27, 26, 26, 17, -23, -26, 57, -27, -31, - -31, -31, -28, -24, 14, -24, -26, -24, -26, -11, - 92, 93, 94, 95, -27, -27, -27, -25, -31, 24, - 21, 2, 21, -31, + -38, -53, -53, 15, -28, -28, 21, 6, 2, -16, + 22, -4, -6, 25, 2, 62, 78, 63, 79, 64, + 65, 66, 80, 81, 12, 82, 47, 48, 51, 67, + 18, 68, 83, 84, 69, 70, 71, 72, 73, 88, + 89, 59, 74, 75, 22, 7, 20, -2, 25, 2, + 25, 2, 26, 26, -30, 26, 41, 57, -22, 24, + 17, -23, 30, 28, 29, 35, 36, 37, 33, 31, + 34, 32, 38, -17, -17, -18, -17, -18, 22, -55, + 21, 2, 22, 7, 2, -38, -27, 19, -27, 26, + -27, -21, -21, 24, 17, 2, 17, 6, 6, 6, + 6, 6, 6, 6, 6, 6, 6, 6, 21, 2, + 22, -4, -27, 26, 26, 17, -23, -26, 57, -27, + -31, -31, -31, -28, -24, 14, -24, -26, -24, -26, + -11, 92, 93, 94, 95, -27, -27, -27, -25, -31, + 24, 21, 2, 21, -31, } var yyDef = [...]int16{ - 0, -2, 129, 129, 0, 0, 7, 6, 1, 129, - 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, - 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, - 121, 122, 123, 124, 125, 0, 2, -2, 3, 4, + 0, -2, 130, 130, 0, 0, 7, 6, 1, 130, + 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, + 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, + 122, 123, 124, 125, 126, 0, 2, -2, 3, 4, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, - 18, 19, 0, 108, 233, 234, 0, 244, 0, 85, - 86, -2, -2, -2, -2, -2, -2, -2, -2, -2, - -2, -2, -2, -2, -2, 227, 228, 0, 5, 100, - 0, 128, 131, 0, 135, 139, 245, 140, 144, 43, - 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, - 43, 43, 43, 43, 43, 0, 0, 0, 0, 22, - 23, 0, 0, 0, 61, 0, 83, 84, 0, 89, - 91, 0, 95, 99, 126, 0, 132, 0, 138, 0, - 143, 0, 42, 47, 48, 44, 0, 0, 0, 0, + 18, 19, 20, 0, 109, 234, 235, 0, 245, 0, + 86, 87, 257, -2, -2, -2, -2, -2, -2, -2, + -2, -2, -2, -2, -2, -2, -2, 228, 229, 0, + 5, 101, 0, 129, 132, 0, 136, 140, 246, 141, + 145, 44, 44, 44, 44, 44, 44, 44, 44, 44, + 44, 44, 44, 44, 44, 44, 44, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 23, 24, 0, + 0, 0, 62, 0, 20, 84, -2, 85, 0, 90, + 92, 0, 96, 100, 127, 0, 133, 0, 139, 0, + 144, 0, 43, 48, 49, 45, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 68, 0, 70, 71, 0, 73, 239, 240, 74, - 75, 235, 236, 0, 0, 0, 82, 20, 21, 24, - 0, 54, 25, 0, 63, 65, 67, 87, 0, 92, - 0, 98, 229, 230, 231, 232, 0, 127, 130, 133, - 136, 134, 137, 142, 145, 147, 150, 154, 155, 156, - 0, 26, 0, 0, -2, -2, 27, 28, 29, 30, - 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, - 41, 69, 0, 237, 238, 76, 0, 81, 0, 53, - 56, 58, 59, 60, 198, 199, 200, 201, 202, 203, + 0, 70, 71, 249, 0, 0, 72, 0, 74, 240, + 241, 75, 76, 236, 237, 0, 0, 0, 83, 69, + 251, 252, 253, 254, 255, 256, 21, 22, 25, 0, + 55, 26, 0, 64, 66, 68, 258, 88, 0, 93, + 0, 99, 230, 231, 232, 233, 0, 128, 131, 134, + 137, 135, 138, 143, 146, 148, 151, 155, 156, 157, + 0, 27, 0, 0, -2, -2, 28, 29, 30, 31, + 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, + 42, 250, 0, 0, 238, 239, 77, 0, 82, 0, + 54, 57, 59, 60, 61, 199, 200, 201, 202, 203, 204, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, - 224, 225, 226, 62, 66, 88, 90, 93, 97, 94, - 96, 0, 0, 0, 0, 0, 0, 0, 0, 160, - 162, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 45, 46, 49, 247, 50, 72, 0, 78, - 80, 51, 0, 57, 64, 146, 241, 148, 0, 151, - 0, 0, 0, 158, 163, 159, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 77, 79, 52, - 55, 149, 0, 0, 157, 161, 164, 0, 243, 165, - 166, 167, 168, 169, 0, 170, 171, 172, 173, 174, - 180, 181, 182, 183, 152, 153, 242, 0, 178, 0, - 176, 179, 175, 177, + 224, 225, 226, 227, 63, 67, 89, 91, 94, 98, + 95, 97, 0, 0, 0, 0, 0, 0, 0, 0, + 161, 163, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 46, 47, 50, 248, 51, 73, 0, + 79, 81, 52, 0, 58, 65, 147, 242, 149, 0, + 152, 0, 0, 0, 159, 164, 160, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 78, 80, + 53, 56, 150, 0, 0, 158, 162, 165, 0, 244, + 166, 167, 168, 169, 170, 0, 171, 172, 173, 174, + 175, 181, 182, 183, 184, 153, 154, 243, 0, 179, + 0, 177, 180, 176, 178, } var yyTok1 = [...]int8{ @@ -1060,46 +1084,41 @@ yydefault: { yylex.(*parser).unexpected("", "") } - case 20: + case 21: yyDollar = yyS[yypt-3 : yypt+1] { yyVAL.node = yylex.(*parser).newAggregateExpr(yyDollar[1].item, yyDollar[2].node, yyDollar[3].node) } - case 21: + case 22: yyDollar = yyS[yypt-3 : yypt+1] { yyVAL.node = yylex.(*parser).newAggregateExpr(yyDollar[1].item, yyDollar[3].node, yyDollar[2].node) } - case 22: + case 23: yyDollar = yyS[yypt-2 : yypt+1] { yyVAL.node = yylex.(*parser).newAggregateExpr(yyDollar[1].item, &AggregateExpr{}, yyDollar[2].node) } - case 23: + case 24: yyDollar = yyS[yypt-2 : yypt+1] { yylex.(*parser).unexpected("aggregation", "") yyVAL.node = yylex.(*parser).newAggregateExpr(yyDollar[1].item, &AggregateExpr{}, Expressions{}) } - case 24: - yyDollar = yyS[yypt-2 : yypt+1] - { - yyVAL.node = &AggregateExpr{ - Grouping: yyDollar[2].strings, - } - } case 25: yyDollar = yyS[yypt-2 : yypt+1] { yyVAL.node = &AggregateExpr{ Grouping: yyDollar[2].strings, - Without: true, } } case 26: - yyDollar = yyS[yypt-4 : yypt+1] + yyDollar = yyS[yypt-2 : yypt+1] { - yyVAL.node = yylex.(*parser).newBinaryExpression(yyDollar[1].node, yyDollar[2].item, yyDollar[3].node, yyDollar[4].node) + yyVAL.node = &AggregateExpr{ + Grouping: yyDollar[2].strings, + Without: true, + } } case 27: yyDollar = yyS[yypt-4 : yypt+1] @@ -1176,14 +1195,19 @@ yydefault: { yyVAL.node = yylex.(*parser).newBinaryExpression(yyDollar[1].node, yyDollar[2].item, yyDollar[3].node, yyDollar[4].node) } - case 43: + case 42: + yyDollar = yyS[yypt-4 : yypt+1] + { + yyVAL.node = yylex.(*parser).newBinaryExpression(yyDollar[1].node, yyDollar[2].item, yyDollar[3].node, yyDollar[4].node) + } + case 44: yyDollar = yyS[yypt-0 : yypt+1] { yyVAL.node = &BinaryExpr{ VectorMatching: &VectorMatching{Card: CardOneToOne}, } } - case 44: + case 45: yyDollar = yyS[yypt-1 : yypt+1] { yyVAL.node = &BinaryExpr{ @@ -1191,71 +1215,71 @@ yydefault: ReturnBool: true, } } - case 45: + case 46: yyDollar = yyS[yypt-3 : yypt+1] { yyVAL.node = yyDollar[1].node yyVAL.node.(*BinaryExpr).VectorMatching.MatchingLabels = yyDollar[3].strings } - case 46: + case 47: yyDollar = yyS[yypt-3 : yypt+1] { yyVAL.node = yyDollar[1].node yyVAL.node.(*BinaryExpr).VectorMatching.MatchingLabels = yyDollar[3].strings yyVAL.node.(*BinaryExpr).VectorMatching.On = true } - case 49: + case 50: yyDollar = yyS[yypt-3 : yypt+1] { yyVAL.node = yyDollar[1].node yyVAL.node.(*BinaryExpr).VectorMatching.Card = CardManyToOne yyVAL.node.(*BinaryExpr).VectorMatching.Include = yyDollar[3].strings } - case 50: + case 51: yyDollar = yyS[yypt-3 : yypt+1] { yyVAL.node = yyDollar[1].node yyVAL.node.(*BinaryExpr).VectorMatching.Card = CardOneToMany yyVAL.node.(*BinaryExpr).VectorMatching.Include = yyDollar[3].strings } - case 51: + case 52: yyDollar = yyS[yypt-3 : yypt+1] { yyVAL.strings = yyDollar[2].strings } - case 52: + case 53: yyDollar = yyS[yypt-4 : yypt+1] { yyVAL.strings = yyDollar[2].strings } - case 53: + case 54: yyDollar = yyS[yypt-2 : yypt+1] { yyVAL.strings = []string{} } - case 54: + case 55: yyDollar = yyS[yypt-1 : yypt+1] { yylex.(*parser).unexpected("grouping opts", "\"(\"") yyVAL.strings = nil } - case 55: + case 56: yyDollar = yyS[yypt-3 : yypt+1] { yyVAL.strings = append(yyDollar[1].strings, yyDollar[3].item.Val) } - case 56: + case 57: yyDollar = yyS[yypt-1 : yypt+1] { yyVAL.strings = []string{yyDollar[1].item.Val} } - case 57: + case 58: yyDollar = yyS[yypt-2 : yypt+1] { yylex.(*parser).unexpected("grouping opts", "\",\" or \")\"") yyVAL.strings = yyDollar[1].strings } - case 58: + case 59: yyDollar = yyS[yypt-1 : yypt+1] { if !model.LabelName(yyDollar[1].item.Val).IsValid() { @@ -1263,7 +1287,7 @@ yydefault: } yyVAL.item = yyDollar[1].item } - case 59: + case 60: yyDollar = yyS[yypt-1 : yypt+1] { unquoted := yylex.(*parser).unquoteString(yyDollar[1].item.Val) @@ -1274,13 +1298,13 @@ yydefault: yyVAL.item.Pos++ yyVAL.item.Val = unquoted } - case 60: + case 61: yyDollar = yyS[yypt-1 : yypt+1] { yylex.(*parser).unexpected("grouping opts", "label") yyVAL.item = Item{} } - case 61: + case 62: yyDollar = yyS[yypt-2 : yypt+1] { fn, exist := getFunction(yyDollar[1].item.Val, yylex.(*parser).functions) @@ -1299,78 +1323,87 @@ yydefault: }, } } - case 62: + case 63: yyDollar = yyS[yypt-3 : yypt+1] { yyVAL.node = yyDollar[2].node } - case 63: + case 64: yyDollar = yyS[yypt-2 : yypt+1] { yyVAL.node = Expressions{} } - case 64: + case 65: yyDollar = yyS[yypt-3 : yypt+1] { yyVAL.node = append(yyDollar[1].node.(Expressions), yyDollar[3].node.(Expr)) } - case 65: + case 66: yyDollar = yyS[yypt-1 : yypt+1] { yyVAL.node = Expressions{yyDollar[1].node.(Expr)} } - case 66: + case 67: yyDollar = yyS[yypt-2 : yypt+1] { yylex.(*parser).addParseErrf(yyDollar[2].item.PositionRange(), "trailing commas not allowed in function call args") yyVAL.node = yyDollar[1].node } - case 67: + case 68: yyDollar = yyS[yypt-3 : yypt+1] { yyVAL.node = &ParenExpr{Expr: yyDollar[2].node.(Expr), PosRange: mergeRanges(&yyDollar[1].item, &yyDollar[3].item)} } - case 68: - yyDollar = yyS[yypt-3 : yypt+1] - { - numLit, _ := yyDollar[3].node.(*NumberLiteral) - dur := time.Duration(numLit.Val*1000) * time.Millisecond - yylex.(*parser).addOffset(yyDollar[1].node, dur) - yyVAL.node = yyDollar[1].node - } case 69: - yyDollar = yyS[yypt-4 : yypt+1] + yyDollar = yyS[yypt-1 : yypt+1] { - numLit, _ := yyDollar[4].node.(*NumberLiteral) - dur := time.Duration(numLit.Val*1000) * time.Millisecond - yylex.(*parser).addOffset(yyDollar[1].node, -dur) + if numLit, ok := yyDollar[1].node.(*NumberLiteral); ok { + if numLit.Val <= 0 { + yylex.(*parser).addParseErrf(numLit.PositionRange(), "duration must be greater than 0") + yyVAL.node = &NumberLiteral{Val: 0} // Return 0 on error. + break + } + yyVAL.node = yyDollar[1].node + break + } yyVAL.node = yyDollar[1].node } case 70: yyDollar = yyS[yypt-3 : yypt+1] { - yylex.(*parser).unexpected("offset", "number or duration") + if numLit, ok := yyDollar[3].node.(*NumberLiteral); ok { + yylex.(*parser).addOffset(yyDollar[1].node, time.Duration(numLit.Val*1000)*time.Millisecond) + yyVAL.node = yyDollar[1].node + break + } + yylex.(*parser).addOffsetExpr(yyDollar[1].node, yyDollar[3].node.(*DurationExpr)) yyVAL.node = yyDollar[1].node } case 71: yyDollar = yyS[yypt-3 : yypt+1] { - yylex.(*parser).setTimestamp(yyDollar[1].node, yyDollar[3].float) + yylex.(*parser).unexpected("offset", "number or duration") yyVAL.node = yyDollar[1].node } case 72: + yyDollar = yyS[yypt-3 : yypt+1] + { + yylex.(*parser).setTimestamp(yyDollar[1].node, yyDollar[3].float) + yyVAL.node = yyDollar[1].node + } + case 73: yyDollar = yyS[yypt-5 : yypt+1] { yylex.(*parser).setAtModifierPreprocessor(yyDollar[1].node, yyDollar[3].item) yyVAL.node = yyDollar[1].node } - case 73: + case 74: yyDollar = yyS[yypt-3 : yypt+1] { yylex.(*parser).unexpected("@", "timestamp") yyVAL.node = yyDollar[1].node } - case 76: + case 77: yyDollar = yyS[yypt-4 : yypt+1] { var errMsg string @@ -1388,61 +1421,80 @@ yydefault: yylex.(*parser).addParseErrf(errRange, "%s", errMsg) } - numLit, _ := yyDollar[3].node.(*NumberLiteral) + var rangeNl time.Duration + if numLit, ok := yyDollar[3].node.(*NumberLiteral); ok { + rangeNl = time.Duration(numLit.Val*1000) * time.Millisecond + } + rangeExpr, _ := yyDollar[3].node.(*DurationExpr) yyVAL.node = &MatrixSelector{ VectorSelector: yyDollar[1].node.(Expr), - Range: time.Duration(numLit.Val*1000) * time.Millisecond, + Range: rangeNl, + RangeExpr: rangeExpr, EndPos: yylex.(*parser).lastClosing, } } - case 77: + case 78: yyDollar = yyS[yypt-6 : yypt+1] { - numLitRange, _ := yyDollar[3].node.(*NumberLiteral) - numLitStep, _ := yyDollar[5].node.(*NumberLiteral) - yyVAL.node = &SubqueryExpr{ - Expr: yyDollar[1].node.(Expr), - Range: time.Duration(numLitRange.Val*1000) * time.Millisecond, - Step: time.Duration(numLitStep.Val*1000) * time.Millisecond, - EndPos: yyDollar[6].item.Pos + 1, + var rangeNl time.Duration + var stepNl time.Duration + if numLit, ok := yyDollar[3].node.(*NumberLiteral); ok { + rangeNl = time.Duration(numLit.Val*1000) * time.Millisecond } - } - case 78: - yyDollar = yyS[yypt-5 : yypt+1] - { - numLitRange, _ := yyDollar[3].node.(*NumberLiteral) + rangeExpr, _ := yyDollar[3].node.(*DurationExpr) + if numLit, ok := yyDollar[5].node.(*NumberLiteral); ok { + stepNl = time.Duration(numLit.Val*1000) * time.Millisecond + } + stepExpr, _ := yyDollar[5].node.(*DurationExpr) yyVAL.node = &SubqueryExpr{ - Expr: yyDollar[1].node.(Expr), - Range: time.Duration(numLitRange.Val*1000) * time.Millisecond, - Step: 0, - EndPos: yyDollar[5].item.Pos + 1, + Expr: yyDollar[1].node.(Expr), + Range: rangeNl, + RangeExpr: rangeExpr, + Step: stepNl, + StepExpr: stepExpr, + EndPos: yyDollar[6].item.Pos + 1, } } case 79: + yyDollar = yyS[yypt-5 : yypt+1] + { + var rangeNl time.Duration + if numLit, ok := yyDollar[3].node.(*NumberLiteral); ok { + rangeNl = time.Duration(numLit.Val*1000) * time.Millisecond + } + rangeExpr, _ := yyDollar[3].node.(*DurationExpr) + yyVAL.node = &SubqueryExpr{ + Expr: yyDollar[1].node.(Expr), + Range: rangeNl, + RangeExpr: rangeExpr, + EndPos: yyDollar[5].item.Pos + 1, + } + } + case 80: yyDollar = yyS[yypt-6 : yypt+1] { yylex.(*parser).unexpected("subquery selector", "\"]\"") yyVAL.node = yyDollar[1].node } - case 80: + case 81: yyDollar = yyS[yypt-5 : yypt+1] { yylex.(*parser).unexpected("subquery selector", "number or duration or \"]\"") yyVAL.node = yyDollar[1].node } - case 81: + case 82: yyDollar = yyS[yypt-4 : yypt+1] { yylex.(*parser).unexpected("subquery or range", "\":\" or \"]\"") yyVAL.node = yyDollar[1].node } - case 82: + case 83: yyDollar = yyS[yypt-3 : yypt+1] { yylex.(*parser).unexpected("subquery selector", "number or duration") yyVAL.node = yyDollar[1].node } - case 83: + case 84: yyDollar = yyS[yypt-2 : yypt+1] { if nl, ok := yyDollar[2].node.(*NumberLiteral); ok { @@ -1455,7 +1507,7 @@ yydefault: yyVAL.node = &UnaryExpr{Op: yyDollar[1].item.Typ, Expr: yyDollar[2].node.(Expr), StartPos: yyDollar[1].item.Pos} } } - case 84: + case 85: yyDollar = yyS[yypt-2 : yypt+1] { vs := yyDollar[2].node.(*VectorSelector) @@ -1464,7 +1516,7 @@ yydefault: yylex.(*parser).assembleVectorSelector(vs) yyVAL.node = vs } - case 85: + case 86: yyDollar = yyS[yypt-1 : yypt+1] { vs := &VectorSelector{ @@ -1475,14 +1527,14 @@ yydefault: yylex.(*parser).assembleVectorSelector(vs) yyVAL.node = vs } - case 86: + case 87: yyDollar = yyS[yypt-1 : yypt+1] { vs := yyDollar[1].node.(*VectorSelector) yylex.(*parser).assembleVectorSelector(vs) yyVAL.node = vs } - case 87: + case 88: yyDollar = yyS[yypt-3 : yypt+1] { yyVAL.node = &VectorSelector{ @@ -1490,7 +1542,7 @@ yydefault: PosRange: mergeRanges(&yyDollar[1].item, &yyDollar[3].item), } } - case 88: + case 89: yyDollar = yyS[yypt-4 : yypt+1] { yyVAL.node = &VectorSelector{ @@ -1498,7 +1550,7 @@ yydefault: PosRange: mergeRanges(&yyDollar[1].item, &yyDollar[4].item), } } - case 89: + case 90: yyDollar = yyS[yypt-2 : yypt+1] { yyVAL.node = &VectorSelector{ @@ -1506,7 +1558,7 @@ yydefault: PosRange: mergeRanges(&yyDollar[1].item, &yyDollar[2].item), } } - case 90: + case 91: yyDollar = yyS[yypt-3 : yypt+1] { if yyDollar[1].matchers != nil { @@ -1515,38 +1567,32 @@ yydefault: yyVAL.matchers = yyDollar[1].matchers } } - case 91: + case 92: yyDollar = yyS[yypt-1 : yypt+1] { yyVAL.matchers = []*labels.Matcher{yyDollar[1].matcher} } - case 92: + case 93: yyDollar = yyS[yypt-2 : yypt+1] { yylex.(*parser).unexpected("label matching", "\",\" or \"}\"") yyVAL.matchers = yyDollar[1].matchers } - case 93: - yyDollar = yyS[yypt-3 : yypt+1] - { - yyVAL.matcher = yylex.(*parser).newLabelMatcher(yyDollar[1].item, yyDollar[2].item, yyDollar[3].item) - } case 94: yyDollar = yyS[yypt-3 : yypt+1] { yyVAL.matcher = yylex.(*parser).newLabelMatcher(yyDollar[1].item, yyDollar[2].item, yyDollar[3].item) } case 95: + yyDollar = yyS[yypt-3 : yypt+1] + { + yyVAL.matcher = yylex.(*parser).newLabelMatcher(yyDollar[1].item, yyDollar[2].item, yyDollar[3].item) + } + case 96: yyDollar = yyS[yypt-1 : yypt+1] { yyVAL.matcher = yylex.(*parser).newMetricNameMatcher(yyDollar[1].item) } - case 96: - yyDollar = yyS[yypt-3 : yypt+1] - { - yylex.(*parser).unexpected("label matching", "string") - yyVAL.matcher = nil - } case 97: yyDollar = yyS[yypt-3 : yypt+1] { @@ -1554,86 +1600,86 @@ yydefault: yyVAL.matcher = nil } case 98: + yyDollar = yyS[yypt-3 : yypt+1] + { + yylex.(*parser).unexpected("label matching", "string") + yyVAL.matcher = nil + } + case 99: yyDollar = yyS[yypt-2 : yypt+1] { yylex.(*parser).unexpected("label matching", "label matching operator") yyVAL.matcher = nil } - case 99: + case 100: yyDollar = yyS[yypt-1 : yypt+1] { yylex.(*parser).unexpected("label matching", "identifier or \"}\"") yyVAL.matcher = nil } - case 100: + case 101: yyDollar = yyS[yypt-2 : yypt+1] { b := labels.NewBuilder(yyDollar[2].labels) b.Set(labels.MetricName, yyDollar[1].item.Val) yyVAL.labels = b.Labels() } - case 101: + case 102: yyDollar = yyS[yypt-1 : yypt+1] { yyVAL.labels = yyDollar[1].labels } - case 126: - yyDollar = yyS[yypt-3 : yypt+1] - { - yyVAL.labels = labels.New(yyDollar[2].lblList...) - } case 127: - yyDollar = yyS[yypt-4 : yypt+1] + yyDollar = yyS[yypt-3 : yypt+1] { yyVAL.labels = labels.New(yyDollar[2].lblList...) } case 128: - yyDollar = yyS[yypt-2 : yypt+1] + yyDollar = yyS[yypt-4 : yypt+1] { - yyVAL.labels = labels.New() + yyVAL.labels = labels.New(yyDollar[2].lblList...) } case 129: - yyDollar = yyS[yypt-0 : yypt+1] + yyDollar = yyS[yypt-2 : yypt+1] { yyVAL.labels = labels.New() } case 130: + yyDollar = yyS[yypt-0 : yypt+1] + { + yyVAL.labels = labels.New() + } + case 131: yyDollar = yyS[yypt-3 : yypt+1] { yyVAL.lblList = append(yyDollar[1].lblList, yyDollar[3].label) } - case 131: + case 132: yyDollar = yyS[yypt-1 : yypt+1] { yyVAL.lblList = []labels.Label{yyDollar[1].label} } - case 132: + case 133: yyDollar = yyS[yypt-2 : yypt+1] { yylex.(*parser).unexpected("label set", "\",\" or \"}\"") yyVAL.lblList = yyDollar[1].lblList } - case 133: - yyDollar = yyS[yypt-3 : yypt+1] - { - yyVAL.label = labels.Label{Name: yyDollar[1].item.Val, Value: yylex.(*parser).unquoteString(yyDollar[3].item.Val)} - } case 134: yyDollar = yyS[yypt-3 : yypt+1] { yyVAL.label = labels.Label{Name: yyDollar[1].item.Val, Value: yylex.(*parser).unquoteString(yyDollar[3].item.Val)} } case 135: + yyDollar = yyS[yypt-3 : yypt+1] + { + yyVAL.label = labels.Label{Name: yyDollar[1].item.Val, Value: yylex.(*parser).unquoteString(yyDollar[3].item.Val)} + } + case 136: yyDollar = yyS[yypt-1 : yypt+1] { yyVAL.label = labels.Label{Name: labels.MetricName, Value: yyDollar[1].item.Val} } - case 136: - yyDollar = yyS[yypt-3 : yypt+1] - { - yylex.(*parser).unexpected("label set", "string") - yyVAL.label = labels.Label{} - } case 137: yyDollar = yyS[yypt-3 : yypt+1] { @@ -1641,18 +1687,24 @@ yydefault: yyVAL.label = labels.Label{} } case 138: + yyDollar = yyS[yypt-3 : yypt+1] + { + yylex.(*parser).unexpected("label set", "string") + yyVAL.label = labels.Label{} + } + case 139: yyDollar = yyS[yypt-2 : yypt+1] { yylex.(*parser).unexpected("label set", "\"=\"") yyVAL.label = labels.Label{} } - case 139: + case 140: yyDollar = yyS[yypt-1 : yypt+1] { yylex.(*parser).unexpected("label set", "identifier or \"}\"") yyVAL.label = labels.Label{} } - case 140: + case 141: yyDollar = yyS[yypt-2 : yypt+1] { yylex.(*parser).generatedParserResult = &seriesDescription{ @@ -1660,33 +1712,33 @@ yydefault: values: yyDollar[2].series, } } - case 141: + case 142: yyDollar = yyS[yypt-0 : yypt+1] { yyVAL.series = []SequenceValue{} } - case 142: + case 143: yyDollar = yyS[yypt-3 : yypt+1] { yyVAL.series = append(yyDollar[1].series, yyDollar[3].series...) } - case 143: + case 144: yyDollar = yyS[yypt-2 : yypt+1] { yyVAL.series = yyDollar[1].series } - case 144: + case 145: yyDollar = yyS[yypt-1 : yypt+1] { yylex.(*parser).unexpected("series values", "") yyVAL.series = nil } - case 145: + case 146: yyDollar = yyS[yypt-1 : yypt+1] { yyVAL.series = []SequenceValue{{Omitted: true}} } - case 146: + case 147: yyDollar = yyS[yypt-3 : yypt+1] { yyVAL.series = []SequenceValue{} @@ -1694,12 +1746,12 @@ yydefault: yyVAL.series = append(yyVAL.series, SequenceValue{Omitted: true}) } } - case 147: + case 148: yyDollar = yyS[yypt-1 : yypt+1] { yyVAL.series = []SequenceValue{{Value: yyDollar[1].float}} } - case 148: + case 149: yyDollar = yyS[yypt-3 : yypt+1] { yyVAL.series = []SequenceValue{} @@ -1708,7 +1760,7 @@ yydefault: yyVAL.series = append(yyVAL.series, SequenceValue{Value: yyDollar[1].float}) } } - case 149: + case 150: yyDollar = yyS[yypt-4 : yypt+1] { yyVAL.series = []SequenceValue{} @@ -1718,12 +1770,12 @@ yydefault: yyDollar[1].float += yyDollar[2].float } } - case 150: + case 151: yyDollar = yyS[yypt-1 : yypt+1] { yyVAL.series = []SequenceValue{{Histogram: yyDollar[1].histogram}} } - case 151: + case 152: yyDollar = yyS[yypt-3 : yypt+1] { yyVAL.series = []SequenceValue{} @@ -1733,7 +1785,7 @@ yydefault: //$1 += $2 } } - case 152: + case 153: yyDollar = yyS[yypt-5 : yypt+1] { val, err := yylex.(*parser).histogramsIncreaseSeries(yyDollar[1].histogram, yyDollar[3].histogram, yyDollar[5].uint) @@ -1742,7 +1794,7 @@ yydefault: } yyVAL.series = val } - case 153: + case 154: yyDollar = yyS[yypt-5 : yypt+1] { val, err := yylex.(*parser).histogramsDecreaseSeries(yyDollar[1].histogram, yyDollar[3].histogram, yyDollar[5].uint) @@ -1751,7 +1803,7 @@ yydefault: } yyVAL.series = val } - case 154: + case 155: yyDollar = yyS[yypt-1 : yypt+1] { if yyDollar[1].item.Val != "stale" { @@ -1759,130 +1811,130 @@ yydefault: } yyVAL.float = math.Float64frombits(value.StaleNaN) } - case 157: - yyDollar = yyS[yypt-4 : yypt+1] - { - yyVAL.histogram = yylex.(*parser).buildHistogramFromMap(&yyDollar[2].descriptors) - } case 158: - yyDollar = yyS[yypt-3 : yypt+1] + yyDollar = yyS[yypt-4 : yypt+1] { yyVAL.histogram = yylex.(*parser).buildHistogramFromMap(&yyDollar[2].descriptors) } case 159: yyDollar = yyS[yypt-3 : yypt+1] { - m := yylex.(*parser).newMap() - yyVAL.histogram = yylex.(*parser).buildHistogramFromMap(&m) + yyVAL.histogram = yylex.(*parser).buildHistogramFromMap(&yyDollar[2].descriptors) } case 160: - yyDollar = yyS[yypt-2 : yypt+1] + yyDollar = yyS[yypt-3 : yypt+1] { m := yylex.(*parser).newMap() yyVAL.histogram = yylex.(*parser).buildHistogramFromMap(&m) } case 161: + yyDollar = yyS[yypt-2 : yypt+1] + { + m := yylex.(*parser).newMap() + yyVAL.histogram = yylex.(*parser).buildHistogramFromMap(&m) + } + case 162: yyDollar = yyS[yypt-3 : yypt+1] { yyVAL.descriptors = *(yylex.(*parser).mergeMaps(&yyDollar[1].descriptors, &yyDollar[3].descriptors)) } - case 162: + case 163: yyDollar = yyS[yypt-1 : yypt+1] { yyVAL.descriptors = yyDollar[1].descriptors } - case 163: + case 164: yyDollar = yyS[yypt-2 : yypt+1] { yylex.(*parser).unexpected("histogram description", "histogram description key, e.g. buckets:[5 10 7]") } - case 164: - yyDollar = yyS[yypt-3 : yypt+1] - { - yyVAL.descriptors = yylex.(*parser).newMap() - yyVAL.descriptors["schema"] = yyDollar[3].int - } case 165: yyDollar = yyS[yypt-3 : yypt+1] { yyVAL.descriptors = yylex.(*parser).newMap() - yyVAL.descriptors["sum"] = yyDollar[3].float + yyVAL.descriptors["schema"] = yyDollar[3].int } case 166: yyDollar = yyS[yypt-3 : yypt+1] { yyVAL.descriptors = yylex.(*parser).newMap() - yyVAL.descriptors["count"] = yyDollar[3].float + yyVAL.descriptors["sum"] = yyDollar[3].float } case 167: yyDollar = yyS[yypt-3 : yypt+1] { yyVAL.descriptors = yylex.(*parser).newMap() - yyVAL.descriptors["z_bucket"] = yyDollar[3].float + yyVAL.descriptors["count"] = yyDollar[3].float } case 168: yyDollar = yyS[yypt-3 : yypt+1] { yyVAL.descriptors = yylex.(*parser).newMap() - yyVAL.descriptors["z_bucket_w"] = yyDollar[3].float + yyVAL.descriptors["z_bucket"] = yyDollar[3].float } case 169: yyDollar = yyS[yypt-3 : yypt+1] { yyVAL.descriptors = yylex.(*parser).newMap() - yyVAL.descriptors["custom_values"] = yyDollar[3].bucket_set + yyVAL.descriptors["z_bucket_w"] = yyDollar[3].float } case 170: yyDollar = yyS[yypt-3 : yypt+1] { yyVAL.descriptors = yylex.(*parser).newMap() - yyVAL.descriptors["buckets"] = yyDollar[3].bucket_set + yyVAL.descriptors["custom_values"] = yyDollar[3].bucket_set } case 171: yyDollar = yyS[yypt-3 : yypt+1] { yyVAL.descriptors = yylex.(*parser).newMap() - yyVAL.descriptors["offset"] = yyDollar[3].int + yyVAL.descriptors["buckets"] = yyDollar[3].bucket_set } case 172: yyDollar = yyS[yypt-3 : yypt+1] { yyVAL.descriptors = yylex.(*parser).newMap() - yyVAL.descriptors["n_buckets"] = yyDollar[3].bucket_set + yyVAL.descriptors["offset"] = yyDollar[3].int } case 173: yyDollar = yyS[yypt-3 : yypt+1] { yyVAL.descriptors = yylex.(*parser).newMap() - yyVAL.descriptors["n_offset"] = yyDollar[3].int + yyVAL.descriptors["n_buckets"] = yyDollar[3].bucket_set } case 174: yyDollar = yyS[yypt-3 : yypt+1] { yyVAL.descriptors = yylex.(*parser).newMap() - yyVAL.descriptors["counter_reset_hint"] = yyDollar[3].item + yyVAL.descriptors["n_offset"] = yyDollar[3].int } case 175: - yyDollar = yyS[yypt-4 : yypt+1] + yyDollar = yyS[yypt-3 : yypt+1] { - yyVAL.bucket_set = yyDollar[2].bucket_set + yyVAL.descriptors = yylex.(*parser).newMap() + yyVAL.descriptors["counter_reset_hint"] = yyDollar[3].item } case 176: - yyDollar = yyS[yypt-3 : yypt+1] + yyDollar = yyS[yypt-4 : yypt+1] { yyVAL.bucket_set = yyDollar[2].bucket_set } case 177: yyDollar = yyS[yypt-3 : yypt+1] { - yyVAL.bucket_set = append(yyDollar[1].bucket_set, yyDollar[3].float) + yyVAL.bucket_set = yyDollar[2].bucket_set } case 178: + yyDollar = yyS[yypt-3 : yypt+1] + { + yyVAL.bucket_set = append(yyDollar[1].bucket_set, yyDollar[3].float) + } + case 179: yyDollar = yyS[yypt-1 : yypt+1] { yyVAL.bucket_set = []float64{yyDollar[1].float} } - case 233: + case 234: yyDollar = yyS[yypt-1 : yypt+1] { yyVAL.node = &NumberLiteral{ @@ -1890,7 +1942,7 @@ yydefault: PosRange: yyDollar[1].item.PositionRange(), } } - case 234: + case 235: yyDollar = yyS[yypt-1 : yypt+1] { var err error @@ -1902,14 +1954,15 @@ yydefault: yyVAL.node = &NumberLiteral{ Val: dur.Seconds(), PosRange: yyDollar[1].item.PositionRange(), + Duration: true, } } - case 235: + case 236: yyDollar = yyS[yypt-1 : yypt+1] { yyVAL.float = yylex.(*parser).number(yyDollar[1].item.Val) } - case 236: + case 237: yyDollar = yyS[yypt-1 : yypt+1] { var err error @@ -1920,17 +1973,17 @@ yydefault: } yyVAL.float = dur.Seconds() } - case 237: + case 238: yyDollar = yyS[yypt-2 : yypt+1] { yyVAL.float = yyDollar[2].float } - case 238: + case 239: yyDollar = yyS[yypt-2 : yypt+1] { yyVAL.float = -yyDollar[2].float } - case 241: + case 242: yyDollar = yyS[yypt-1 : yypt+1] { var err error @@ -1939,17 +1992,17 @@ yydefault: yylex.(*parser).addParseErrf(yyDollar[1].item.PositionRange(), "invalid repetition in series values: %s", err) } } - case 242: + case 243: yyDollar = yyS[yypt-2 : yypt+1] { yyVAL.int = -int64(yyDollar[2].uint) } - case 243: + case 244: yyDollar = yyS[yypt-1 : yypt+1] { yyVAL.int = int64(yyDollar[1].uint) } - case 244: + case 245: yyDollar = yyS[yypt-1 : yypt+1] { yyVAL.node = &StringLiteral{ @@ -1957,7 +2010,7 @@ yydefault: PosRange: yyDollar[1].item.PositionRange(), } } - case 245: + case 246: yyDollar = yyS[yypt-1 : yypt+1] { yyVAL.item = Item{ @@ -1966,11 +2019,112 @@ yydefault: Val: yylex.(*parser).unquoteString(yyDollar[1].item.Val), } } - case 246: + case 247: yyDollar = yyS[yypt-0 : yypt+1] { yyVAL.strings = nil } + case 249: + yyDollar = yyS[yypt-1 : yypt+1] + { + nl := yyDollar[1].node.(*NumberLiteral) + if nl.Val > 1<<63/1e9 || nl.Val < -(1<<63)/1e9 { + yylex.(*parser).addParseErrf(nl.PosRange, "duration out of range") + yyVAL.node = &NumberLiteral{Val: 0} + break + } + yyVAL.node = nl + } + case 250: + yyDollar = yyS[yypt-2 : yypt+1] + { + switch expr := yyDollar[2].node.(type) { + case *NumberLiteral: + if yyDollar[1].item.Typ == SUB { + expr.Val *= -1 + } + if expr.Val > 1<<63/1e9 || expr.Val < -(1<<63)/1e9 { + yylex.(*parser).addParseErrf(yyDollar[1].item.PositionRange(), "duration out of range") + yyVAL.node = &NumberLiteral{Val: 0} + break + } + expr.PosRange.Start = yyDollar[1].item.Pos + yyVAL.node = expr + break + case *DurationExpr: + if yyDollar[1].item.Typ == SUB { + yyVAL.node = &DurationExpr{ + Op: SUB, + RHS: expr, + StartPos: yyDollar[1].item.Pos, + } + break + } + yyVAL.node = expr + break + default: + yylex.(*parser).addParseErrf(yyDollar[1].item.PositionRange(), "expected number literal or duration expression") + yyVAL.node = &NumberLiteral{Val: 0} + break + } + } + case 251: + yyDollar = yyS[yypt-3 : yypt+1] + { + yylex.(*parser).experimentalDurationExpr(yyDollar[1].node.(Expr)) + yyVAL.node = &DurationExpr{Op: ADD, LHS: yyDollar[1].node.(Expr), RHS: yyDollar[3].node.(Expr)} + } + case 252: + yyDollar = yyS[yypt-3 : yypt+1] + { + yylex.(*parser).experimentalDurationExpr(yyDollar[1].node.(Expr)) + yyVAL.node = &DurationExpr{Op: SUB, LHS: yyDollar[1].node.(Expr), RHS: yyDollar[3].node.(Expr)} + } + case 253: + yyDollar = yyS[yypt-3 : yypt+1] + { + yylex.(*parser).experimentalDurationExpr(yyDollar[1].node.(Expr)) + yyVAL.node = &DurationExpr{Op: MUL, LHS: yyDollar[1].node.(Expr), RHS: yyDollar[3].node.(Expr)} + } + case 254: + yyDollar = yyS[yypt-3 : yypt+1] + { + yylex.(*parser).experimentalDurationExpr(yyDollar[1].node.(Expr)) + if nl, ok := yyDollar[3].node.(*NumberLiteral); ok && nl.Val == 0 { + yylex.(*parser).addParseErrf(yyDollar[2].item.PositionRange(), "division by zero") + yyVAL.node = &NumberLiteral{Val: 0} + break + } + yyVAL.node = &DurationExpr{Op: DIV, LHS: yyDollar[1].node.(Expr), RHS: yyDollar[3].node.(Expr)} + } + case 255: + yyDollar = yyS[yypt-3 : yypt+1] + { + yylex.(*parser).experimentalDurationExpr(yyDollar[1].node.(Expr)) + if nl, ok := yyDollar[3].node.(*NumberLiteral); ok && nl.Val == 0 { + yylex.(*parser).addParseErrf(yyDollar[2].item.PositionRange(), "modulo by zero") + yyVAL.node = &NumberLiteral{Val: 0} + break + } + yyVAL.node = &DurationExpr{Op: MOD, LHS: yyDollar[1].node.(Expr), RHS: yyDollar[3].node.(Expr)} + } + case 256: + yyDollar = yyS[yypt-3 : yypt+1] + { + yylex.(*parser).experimentalDurationExpr(yyDollar[1].node.(Expr)) + yyVAL.node = &DurationExpr{Op: POW, LHS: yyDollar[1].node.(Expr), RHS: yyDollar[3].node.(Expr)} + } + case 258: + yyDollar = yyS[yypt-3 : yypt+1] + { + yylex.(*parser).experimentalDurationExpr(yyDollar[2].node.(Expr)) + if durationExpr, ok := yyDollar[2].node.(*DurationExpr); ok { + durationExpr.Wrapped = true + yyVAL.node = durationExpr + break + } + yyVAL.node = yyDollar[2].node + } } goto yystack /* stack new state and value */ } diff --git a/promql/parser/lex.go b/promql/parser/lex.go index 0b76911e39..66522f59da 100644 --- a/promql/parser/lex.go +++ b/promql/parser/lex.go @@ -277,6 +277,7 @@ type Lexer struct { braceOpen bool // Whether a { is opened. bracketOpen bool // Whether a [ is opened. gotColon bool // Whether we got a ':' after [ was opened. + gotDuration bool // Whether we got a duration after [ was opened. stringOpen rune // Quote rune of the string currently being read. // series description variables for internal PromQL testing framework as well as in promtool rules unit tests. @@ -491,7 +492,7 @@ func lexStatements(l *Lexer) stateFn { skipSpaces(l) } l.bracketOpen = true - return lexNumberOrDuration + return lexDurationExpr case r == ']': if !l.bracketOpen { return l.errorf("unexpected right bracket %q", r) @@ -549,6 +550,8 @@ func lexHistogram(l *Lexer) stateFn { return lexNumber case r == '[': l.bracketOpen = true + l.gotColon = false + l.gotDuration = false l.emit(LEFT_BRACKET) return lexBuckets case r == '}' && l.peek() == '}': @@ -1077,3 +1080,64 @@ func isDigit(r rune) bool { func isAlpha(r rune) bool { return r == '_' || ('a' <= r && r <= 'z') || ('A' <= r && r <= 'Z') } + +// lexDurationExpr scans arithmetic expressions within brackets for duration expressions. +func lexDurationExpr(l *Lexer) stateFn { + switch r := l.next(); { + case r == eof: + return l.errorf("unexpected end of input in duration expression") + case r == ']': + l.emit(RIGHT_BRACKET) + l.bracketOpen = false + l.gotColon = false + return lexStatements + case r == ':': + l.emit(COLON) + if !l.gotDuration { + return l.errorf("unexpected colon before duration in duration expression") + } + if l.gotColon { + return l.errorf("unexpected repeated colon in duration expression") + } + l.gotColon = true + return lexDurationExpr + case r == '(': + l.emit(LEFT_PAREN) + l.parenDepth++ + return lexDurationExpr + case r == ')': + l.emit(RIGHT_PAREN) + l.parenDepth-- + if l.parenDepth < 0 { + return l.errorf("unexpected right parenthesis %q", r) + } + return lexDurationExpr + case isSpace(r): + skipSpaces(l) + return lexDurationExpr + case r == '+': + l.emit(ADD) + return lexDurationExpr + case r == '-': + l.emit(SUB) + return lexDurationExpr + case r == '*': + l.emit(MUL) + return lexDurationExpr + case r == '/': + l.emit(DIV) + return lexDurationExpr + case r == '%': + l.emit(MOD) + return lexDurationExpr + case r == '^': + l.emit(POW) + return lexDurationExpr + case isDigit(r) || (r == '.' && isDigit(l.peek())): + l.backup() + l.gotDuration = true + return lexNumberOrDuration + default: + return l.errorf("unexpected character in duration expression: %q", r) + } +} diff --git a/promql/parser/lex_test.go b/promql/parser/lex_test.go index ffcfc8aac9..f86f282089 100644 --- a/promql/parser/lex_test.go +++ b/promql/parser/lex_test.go @@ -951,6 +951,10 @@ var tests = []struct { input: `test:name{on!~"bar"}[:4s]`, fail: true, }, + { + input: `test:name{on!~"bar"}[1s:1s:1s]`, + fail: true, + }, }, }, } diff --git a/promql/parser/parse.go b/promql/parser/parse.go index 5ace332d71..5cf85ea350 100644 --- a/promql/parser/parse.go +++ b/promql/parser/parse.go @@ -39,6 +39,9 @@ var parserPool = sync.Pool{ }, } +// ExperimentalDurationExpr is a flag to enable experimental duration expression parsing. +var ExperimentalDurationExpr bool + type Parser interface { ParseExpr() (Expr, error) Close() @@ -881,9 +884,6 @@ func parseDuration(ds string) (time.Duration, error) { if err != nil { return 0, err } - if dur == 0 { - return 0, errors.New("duration must be greater than 0") - } return time.Duration(dur), nil } @@ -939,11 +939,13 @@ func (p *parser) newMetricNameMatcher(value Item) *labels.Matcher { // addOffset is used to set the offset in the generated parser. func (p *parser) addOffset(e Node, offset time.Duration) { var orgoffsetp *time.Duration + var orgoffsetexprp *DurationExpr var endPosp *posrange.Pos switch s := e.(type) { case *VectorSelector: orgoffsetp = &s.OriginalOffset + orgoffsetexprp = s.OriginalOffsetExpr endPosp = &s.PosRange.End case *MatrixSelector: vs, ok := s.VectorSelector.(*VectorSelector) @@ -952,9 +954,11 @@ func (p *parser) addOffset(e Node, offset time.Duration) { return } orgoffsetp = &vs.OriginalOffset + orgoffsetexprp = vs.OriginalOffsetExpr endPosp = &s.EndPos case *SubqueryExpr: orgoffsetp = &s.OriginalOffset + orgoffsetexprp = s.OriginalOffsetExpr endPosp = &s.EndPos default: p.addParseErrf(e.PositionRange(), "offset modifier must be preceded by an instant vector selector or range vector selector or a subquery") @@ -963,7 +967,7 @@ func (p *parser) addOffset(e Node, offset time.Duration) { // it is already ensured by parseDuration func that there never will be a zero offset modifier switch { - case *orgoffsetp != 0: + case *orgoffsetp != 0 || orgoffsetexprp != nil: p.addParseErrf(e.PositionRange(), "offset may not be set multiple times") case orgoffsetp != nil: *orgoffsetp = offset @@ -972,6 +976,45 @@ func (p *parser) addOffset(e Node, offset time.Duration) { *endPosp = p.lastClosing } +// addOffsetExpr is used to set the offset expression in the generated parser. +func (p *parser) addOffsetExpr(e Node, expr *DurationExpr) { + var orgoffsetp *time.Duration + var orgoffsetexprp **DurationExpr + var endPosp *posrange.Pos + + switch s := e.(type) { + case *VectorSelector: + orgoffsetp = &s.OriginalOffset + orgoffsetexprp = &s.OriginalOffsetExpr + endPosp = &s.PosRange.End + case *MatrixSelector: + vs, ok := s.VectorSelector.(*VectorSelector) + if !ok { + p.addParseErrf(e.PositionRange(), "ranges only allowed for vector selectors") + return + } + orgoffsetp = &vs.OriginalOffset + orgoffsetexprp = &vs.OriginalOffsetExpr + endPosp = &s.EndPos + case *SubqueryExpr: + orgoffsetp = &s.OriginalOffset + orgoffsetexprp = &s.OriginalOffsetExpr + endPosp = &s.EndPos + default: + p.addParseErrf(e.PositionRange(), "offset modifier must be preceded by an instant vector selector or range vector selector or a subquery") + return + } + + switch { + case *orgoffsetp != 0 || *orgoffsetexprp != nil: + p.addParseErrf(e.PositionRange(), "offset may not be set multiple times") + case orgoffsetexprp != nil: + *orgoffsetexprp = expr + } + + *endPosp = p.lastClosing +} + // setTimestamp is used to set the timestamp from the @ modifier in the generated parser. func (p *parser) setTimestamp(e Node, ts float64) { if math.IsInf(ts, -1) || math.IsInf(ts, 1) || math.IsNaN(ts) || @@ -1045,6 +1088,12 @@ func (p *parser) getAtModifierVars(e Node) (**int64, *ItemType, *posrange.Pos, b return timestampp, preprocp, endPosp, true } +func (p *parser) experimentalDurationExpr(e Expr) { + if !ExperimentalDurationExpr { + p.addParseErrf(e.PositionRange(), "experimental duration expression is not enabled") + } +} + func MustLabelMatcher(mt labels.MatchType, name, val string) *labels.Matcher { m, err := labels.NewMatcher(mt, name, val) if err != nil { diff --git a/promql/parser/parse_test.go b/promql/parser/parse_test.go index 06f41c3f77..a09ccea9d6 100644 --- a/promql/parser/parse_test.go +++ b/promql/parser/parse_test.go @@ -2337,12 +2337,12 @@ var testExpr = []struct { { input: `foo[]`, fail: true, - errMsg: "bad number or duration syntax: \"\"", + errMsg: "unexpected \"]\" in subquery selector, expected number or duration", }, { input: `foo[-1]`, fail: true, - errMsg: "bad number or duration syntax: \"\"", + errMsg: "duration must be greater than 0", }, { input: `some_metric[5m] OFFSET 1mm`, @@ -3091,7 +3091,7 @@ var testExpr = []struct { { input: `foo{bar="baz"}[`, fail: true, - errMsg: `1:16: parse error: bad number or duration syntax: ""`, + errMsg: `unexpected end of input in duration expression`, }, { input: `foo{bar="baz"}[10m:6s]`, @@ -3946,6 +3946,304 @@ var testExpr = []struct { }, }, }, + { + input: `foo[11s+10s-5*2^2]`, + expected: &MatrixSelector{ + VectorSelector: &VectorSelector{ + Name: "foo", + LabelMatchers: []*labels.Matcher{ + MustLabelMatcher(labels.MatchEqual, model.MetricNameLabel, "foo"), + }, + PosRange: posrange.PositionRange{ + Start: 0, + End: 3, + }, + }, + RangeExpr: &DurationExpr{ + Op: SUB, + LHS: &DurationExpr{ + Op: ADD, + LHS: &NumberLiteral{ + Val: 11, + PosRange: posrange.PositionRange{ + Start: 4, + End: 7, + }, + Duration: true, + }, + RHS: &NumberLiteral{ + Val: 10, + PosRange: posrange.PositionRange{ + Start: 8, + End: 11, + }, + Duration: true, + }, + }, + RHS: &DurationExpr{ + Op: MUL, + LHS: &NumberLiteral{Val: 5, PosRange: posrange.PositionRange{Start: 12, End: 13}}, + RHS: &DurationExpr{ + Op: POW, + LHS: &NumberLiteral{Val: 2, PosRange: posrange.PositionRange{Start: 14, End: 15}}, + RHS: &NumberLiteral{Val: 2, PosRange: posrange.PositionRange{Start: 16, End: 17}}, + }, + }, + }, + EndPos: 18, + }, + }, + { + input: `foo[-(10s-5s)+20s]`, + expected: &MatrixSelector{ + VectorSelector: &VectorSelector{ + Name: "foo", + LabelMatchers: []*labels.Matcher{ + MustLabelMatcher(labels.MatchEqual, model.MetricNameLabel, "foo"), + }, + PosRange: posrange.PositionRange{ + Start: 0, + End: 3, + }, + }, + RangeExpr: &DurationExpr{ + Op: ADD, + LHS: &DurationExpr{ + Op: SUB, + StartPos: 4, + RHS: &DurationExpr{ + Op: SUB, + LHS: &NumberLiteral{ + Val: 10, + PosRange: posrange.PositionRange{ + Start: 6, + End: 9, + }, + Duration: true, + }, + RHS: &NumberLiteral{ + Val: 5, + PosRange: posrange.PositionRange{ + Start: 10, + End: 12, + }, + Duration: true, + }, + Wrapped: true, + }, + }, + RHS: &NumberLiteral{ + Val: 20, + PosRange: posrange.PositionRange{ + Start: 14, + End: 17, + }, + Duration: true, + }, + }, + EndPos: 18, + }, + }, + { + input: `foo[-10s+15s]`, + expected: &MatrixSelector{ + VectorSelector: &VectorSelector{ + Name: "foo", + LabelMatchers: []*labels.Matcher{ + MustLabelMatcher(labels.MatchEqual, model.MetricNameLabel, "foo"), + }, + PosRange: posrange.PositionRange{ + Start: 0, + End: 3, + }, + }, + RangeExpr: &DurationExpr{ + Op: ADD, + LHS: &NumberLiteral{ + Val: -10, + PosRange: posrange.PositionRange{ + Start: 4, + End: 8, + }, + Duration: true, + }, + RHS: &NumberLiteral{ + Val: 15, + PosRange: posrange.PositionRange{ + Start: 9, + End: 12, + }, + Duration: true, + }, + }, + EndPos: 13, + }, + }, + { + input: `foo[4s+4s:1s*2] offset (5s-8)`, + expected: &SubqueryExpr{ + Expr: &VectorSelector{ + Name: "foo", + LabelMatchers: []*labels.Matcher{ + MustLabelMatcher(labels.MatchEqual, model.MetricNameLabel, "foo"), + }, + PosRange: posrange.PositionRange{ + Start: 0, + End: 3, + }, + }, + RangeExpr: &DurationExpr{ + Op: ADD, + LHS: &NumberLiteral{ + Val: 4, + PosRange: posrange.PositionRange{ + Start: 4, + End: 6, + }, + Duration: true, + }, + RHS: &NumberLiteral{ + Val: 4, + PosRange: posrange.PositionRange{ + Start: 7, + End: 9, + }, + Duration: true, + }, + }, + StepExpr: &DurationExpr{ + Op: MUL, + LHS: &NumberLiteral{ + Val: 1, + PosRange: posrange.PositionRange{ + Start: 10, + End: 12, + }, + Duration: true, + }, + RHS: &NumberLiteral{ + Val: 2, + PosRange: posrange.PositionRange{ + Start: 13, + End: 14, + }, + }, + }, + OriginalOffsetExpr: &DurationExpr{ + Op: SUB, + LHS: &NumberLiteral{ + Val: 5, + PosRange: posrange.PositionRange{ + Start: 24, + End: 26, + }, + Duration: true, + }, + RHS: &NumberLiteral{ + Val: 8, + PosRange: posrange.PositionRange{ + Start: 27, + End: 28, + }, + }, + Wrapped: true, + }, + EndPos: 29, + }, + }, + { + input: `foo offset 5s-8`, + expected: &BinaryExpr{ + Op: SUB, + LHS: &VectorSelector{ + Name: "foo", + OriginalOffset: 5 * time.Second, + LabelMatchers: []*labels.Matcher{ + MustLabelMatcher(labels.MatchEqual, model.MetricNameLabel, "foo"), + }, + PosRange: posrange.PositionRange{ + Start: 0, + End: 13, + }, + }, + RHS: &NumberLiteral{ + Val: 8, + PosRange: posrange.PositionRange{ + Start: 14, + End: 15, + }, + }, + }, + }, + { + input: `rate(foo[2m+2m])`, + expected: &Call{ + Func: MustGetFunction("rate"), + Args: Expressions{ + &MatrixSelector{ + VectorSelector: &VectorSelector{ + Name: "foo", + LabelMatchers: []*labels.Matcher{ + MustLabelMatcher(labels.MatchEqual, model.MetricNameLabel, "foo"), + }, + PosRange: posrange.PositionRange{ + Start: 5, + End: 8, + }, + }, + RangeExpr: &DurationExpr{ + Op: ADD, + LHS: &NumberLiteral{ + Val: 120, + PosRange: posrange.PositionRange{ + Start: 9, + End: 11, + }, + Duration: true, + }, + RHS: &NumberLiteral{ + Val: 120, + PosRange: posrange.PositionRange{ + Start: 12, + End: 14, + }, + Duration: true, + }, + }, + EndPos: 15, + }, + }, + PosRange: posrange.PositionRange{ + Start: 0, + End: 16, + }, + }, + }, + { + input: `foo[5s/0d]`, + fail: true, + errMsg: `division by zero`, + }, + { + input: `foo offset (4d/0)`, + fail: true, + errMsg: `division by zero`, + }, + { + input: `foo[5s%0d]`, + fail: true, + errMsg: `modulo by zero`, + }, + { + input: `foo offset 9.5e10`, + fail: true, + errMsg: `duration out of range`, + }, + { + input: `foo[9.5e10]`, + fail: true, + errMsg: `duration out of range`, + }, } func makeInt64Pointer(val int64) *int64 { @@ -3965,8 +4263,11 @@ func readable(s string) string { func TestParseExpressions(t *testing.T) { // Enable experimental functions testing. EnableExperimentalFunctions = true + // Enable experimental duration expression parsing. + ExperimentalDurationExpr = true t.Cleanup(func() { EnableExperimentalFunctions = false + ExperimentalDurationExpr = false }) for _, test := range testExpr { diff --git a/promql/parser/prettier.go b/promql/parser/prettier.go index 9870d6da74..568e65eab5 100644 --- a/promql/parser/prettier.go +++ b/promql/parser/prettier.go @@ -79,6 +79,22 @@ func (e *BinaryExpr) Pretty(level int) string { return fmt.Sprintf("%s\n%s%s%s%s\n%s", e.LHS.Pretty(level+1), indent(level), e.Op, returnBool, matching, e.RHS.Pretty(level+1)) } +func (e *DurationExpr) Pretty(int) string { + var s string + fmt.Println("e.LHS", e.LHS) + fmt.Println("e.RHS", e.RHS) + if e.LHS == nil { + // This is a unary negative duration expression. + s = fmt.Sprintf("%s %s", e.Op, e.RHS.Pretty(0)) + } else { + s = fmt.Sprintf("%s %s %s", e.LHS.Pretty(0), e.Op, e.RHS.Pretty(0)) + } + if e.Wrapped { + s = fmt.Sprintf("(%s)", s) + } + return s +} + func (e *Call) Pretty(level int) string { s := indent(level) if !needsSplit(e) { diff --git a/promql/parser/prettier_test.go b/promql/parser/prettier_test.go index 16f4906f62..ea9a7a1a26 100644 --- a/promql/parser/prettier_test.go +++ b/promql/parser/prettier_test.go @@ -668,3 +668,41 @@ func TestUnaryPretty(t *testing.T) { }) } } + +func TestDurationExprPretty(t *testing.T) { + // Enable experimental duration expression parsing. + ExperimentalDurationExpr = true + t.Cleanup(func() { + ExperimentalDurationExpr = false + }) + maxCharactersPerLine = 10 + inputs := []struct { + in, out string + }{ + { + in: `rate(foo[2*1h])`, + out: `rate( + foo[2 * 1h] +)`, + }, + { + in: `rate(foo[2*1h])`, + out: `rate( + foo[2 * 1h] +)`, + }, + { + in: `rate(foo[-5m+35m])`, + out: `rate( + foo[-5m + 35m] +)`, + }, + } + for _, test := range inputs { + t.Run(test.in, func(t *testing.T) { + expr, err := ParseExpr(test.in) + require.NoError(t, err) + require.Equal(t, test.out, Prettify(expr)) + }) + } +} diff --git a/promql/parser/printer.go b/promql/parser/printer.go index 6f234a0290..dc22f8fb52 100644 --- a/promql/parser/printer.go +++ b/promql/parser/printer.go @@ -146,6 +146,24 @@ func (node *BinaryExpr) getMatchingStr() string { return matching } +func (node *DurationExpr) String() string { + var expr string + if node.LHS == nil { + // This is a unary negative duration expression. + expr = fmt.Sprintf("%s%s", node.Op, node.RHS) + } else { + expr = fmt.Sprintf("%s %s %s", node.LHS, node.Op, node.RHS) + } + if node.Wrapped { + return fmt.Sprintf("(%s)", expr) + } + return expr +} + +func (node *DurationExpr) ShortString() string { + return node.Op.String() +} + func (node *Call) String() string { return fmt.Sprintf("%s(%s)", node.Func.Name, node.Args) } @@ -159,6 +177,8 @@ func (node *MatrixSelector) atOffset() (string, string) { vecSelector := node.VectorSelector.(*VectorSelector) offset := "" switch { + case vecSelector.OriginalOffsetExpr != nil: + offset = fmt.Sprintf(" offset %s", vecSelector.OriginalOffsetExpr) case vecSelector.OriginalOffset > time.Duration(0): offset = fmt.Sprintf(" offset %s", model.Duration(vecSelector.OriginalOffset)) case vecSelector.OriginalOffset < time.Duration(0): @@ -181,21 +201,30 @@ func (node *MatrixSelector) String() string { // Copy the Vector selector before changing the offset vecSelector := *node.VectorSelector.(*VectorSelector) // Do not print the @ and offset twice. - offsetVal, atVal, preproc := vecSelector.OriginalOffset, vecSelector.Timestamp, vecSelector.StartOrEnd + offsetVal, offsetExprVal, atVal, preproc := vecSelector.OriginalOffset, vecSelector.OriginalOffsetExpr, vecSelector.Timestamp, vecSelector.StartOrEnd vecSelector.OriginalOffset = 0 + vecSelector.OriginalOffsetExpr = nil vecSelector.Timestamp = nil vecSelector.StartOrEnd = 0 - str := fmt.Sprintf("%s[%s]%s%s", vecSelector.String(), model.Duration(node.Range), at, offset) + rangeStr := model.Duration(node.Range).String() + if node.RangeExpr != nil { + rangeStr = node.RangeExpr.String() + } + str := fmt.Sprintf("%s[%s]%s%s", vecSelector.String(), rangeStr, at, offset) - vecSelector.OriginalOffset, vecSelector.Timestamp, vecSelector.StartOrEnd = offsetVal, atVal, preproc + vecSelector.OriginalOffset, vecSelector.OriginalOffsetExpr, vecSelector.Timestamp, vecSelector.StartOrEnd = offsetVal, offsetExprVal, atVal, preproc return str } func (node *MatrixSelector) ShortString() string { at, offset := node.atOffset() - return fmt.Sprintf("[%s]%s%s", model.Duration(node.Range), at, offset) + rangeStr := model.Duration(node.Range).String() + if node.RangeExpr != nil { + rangeStr = node.RangeExpr.String() + } + return fmt.Sprintf("[%s]%s%s", rangeStr, at, offset) } func (node *SubqueryExpr) String() string { @@ -211,9 +240,13 @@ func (node *SubqueryExpr) getSubqueryTimeSuffix() string { step := "" if node.Step != 0 { step = model.Duration(node.Step).String() + } else if node.StepExpr != nil { + step = node.StepExpr.String() } offset := "" switch { + case node.OriginalOffsetExpr != nil: + offset = fmt.Sprintf(" offset %s", node.OriginalOffsetExpr) case node.OriginalOffset > time.Duration(0): offset = fmt.Sprintf(" offset %s", model.Duration(node.OriginalOffset)) case node.OriginalOffset < time.Duration(0): @@ -228,10 +261,20 @@ func (node *SubqueryExpr) getSubqueryTimeSuffix() string { case node.StartOrEnd == END: at = " @ end()" } - return fmt.Sprintf("[%s:%s]%s%s", model.Duration(node.Range), step, at, offset) + rangeStr := model.Duration(node.Range).String() + if node.RangeExpr != nil { + rangeStr = node.RangeExpr.String() + } + return fmt.Sprintf("[%s:%s]%s%s", rangeStr, step, at, offset) } func (node *NumberLiteral) String() string { + if node.Duration { + if node.Val < 0 { + return fmt.Sprintf("-%s", model.Duration(-node.Val*1e9).String()) + } + return model.Duration(node.Val * 1e9).String() + } return strconv.FormatFloat(node.Val, 'f', -1, 64) } @@ -265,6 +308,8 @@ func (node *VectorSelector) String() string { } offset := "" switch { + case node.OriginalOffsetExpr != nil: + offset = fmt.Sprintf(" offset %s", node.OriginalOffsetExpr) case node.OriginalOffset > time.Duration(0): offset = fmt.Sprintf(" offset %s", model.Duration(node.OriginalOffset)) case node.OriginalOffset < time.Duration(0): diff --git a/promql/promqltest/test.go b/promql/promqltest/test.go index 84ca16e8ab..7e781121b8 100644 --- a/promql/promqltest/test.go +++ b/promql/promqltest/test.go @@ -117,8 +117,12 @@ func RunBuiltinTests(t TBRun, engine promql.QueryEngine) { // RunBuiltinTestsWithStorage runs an acceptance test suite against the provided engine and storage. func RunBuiltinTestsWithStorage(t TBRun, engine promql.QueryEngine, newStorage func(testutil.T) storage.Storage) { - t.Cleanup(func() { parser.EnableExperimentalFunctions = false }) + t.Cleanup(func() { + parser.EnableExperimentalFunctions = false + parser.ExperimentalDurationExpr = false + }) parser.EnableExperimentalFunctions = true + parser.ExperimentalDurationExpr = true files, err := fs.Glob(testsFs, "*/*.test") require.NoError(t, err) @@ -1501,6 +1505,9 @@ type LazyLoaderOpts struct { // Prometheus v2.33). They can still be disabled here for legacy and // other uses. EnableAtModifier, EnableNegativeOffset bool + // Currently defaults to false, matches the "promql-delayed-name-removal" + // feature flag. + EnableDelayedNameRemoval bool } // NewLazyLoader returns an initialized empty LazyLoader. @@ -1563,7 +1570,7 @@ func (ll *LazyLoader) clear() error { NoStepSubqueryIntervalFn: func(int64) int64 { return durationMilliseconds(ll.SubqueryInterval) }, EnableAtModifier: ll.opts.EnableAtModifier, EnableNegativeOffset: ll.opts.EnableNegativeOffset, - EnableDelayedNameRemoval: true, + EnableDelayedNameRemoval: ll.opts.EnableDelayedNameRemoval, } ll.queryEngine = promql.NewEngine(opts) diff --git a/promql/promqltest/testdata/duration_expression.test b/promql/promqltest/testdata/duration_expression.test new file mode 100644 index 0000000000..251856241a --- /dev/null +++ b/promql/promqltest/testdata/duration_expression.test @@ -0,0 +1,121 @@ +# Test for different duration expression formats in range selectors. +# This tests the parser's ability to handle various duration expression. + +# Set up a basic counter that increases steadily. +load 5m + http_requests{path="/foo"} 1 2 3 0 1 0 0 1 2 0 + http_requests{path="/bar"} 1 2 3 4 5 1 2 3 4 5 + http_requests{path="/biz"} 0 0 0 0 0 1 1 1 1 1 + +# Test basic duration with unit: [30m] +eval instant at 50m changes(http_requests[30m]) + {path="/foo"} 3 + {path="/bar"} 4 + {path="/biz"} 0 + +# Test addition in duration: [26m+4m] +eval instant at 50m changes(http_requests[26m+4m]) + {path="/foo"} 3 + {path="/bar"} 4 + {path="/biz"} 0 + +# Test addition with 0 in duration: [30m+0s] +eval instant at 50m changes(http_requests[30m+0s]) + {path="/foo"} 3 + {path="/bar"} 4 + {path="/biz"} 0 + +# Test raw seconds: [1800] +eval instant at 50m changes(http_requests[1800]) + {path="/foo"} 3 + {path="/bar"} 4 + {path="/biz"} 0 + +# Test seconds with multiplication: [60*30] +eval instant at 50m changes(http_requests[60*30]) + {path="/foo"} 3 + {path="/bar"} 4 + {path="/biz"} 0 + +# Test minutes with multiplication: [2m*15] +eval instant at 50m changes(http_requests[2m*15]) + {path="/foo"} 3 + {path="/bar"} 4 + {path="/biz"} 0 + +# Test complex expression with parentheses: [2m*(10+5)] +eval instant at 50m changes(http_requests[2m*(10+5)]) + {path="/foo"} 3 + {path="/bar"} 4 + {path="/biz"} 0 + +# Test mixed units: [29m+60s] +eval instant at 50m changes(http_requests[29m+60s]) + {path="/foo"} 3 + {path="/bar"} 4 + {path="/biz"} 0 + +# Test nested parentheses: [24m+((1.5*2m)+2m)] +eval instant at 50m changes(http_requests[24m+((1.5*2m)+2m)]) + {path="/foo"} 3 + {path="/bar"} 4 + {path="/biz"} 0 + +# Test start with -: [-5m+35m] +eval instant at 50m changes(http_requests[-5m+35m]) + {path="/foo"} 3 + {path="/bar"} 4 + {path="/biz"} 0 + +# Test division: [1h/2] +eval instant at 50m changes(http_requests[1h/2]) + {path="/foo"} 3 + {path="/bar"} 4 + {path="/biz"} 0 + +# Test modulo: [1h30m % 1h] +eval instant at 50m changes(http_requests[1h30m % 1h]) + {path="/foo"} 3 + {path="/bar"} 4 + {path="/biz"} 0 + +# Test modulo and calculation: [30m1s-30m1s % 1m] +eval instant at 50m changes(http_requests[30m1s-30m1s % 1m]) + {path="/foo"} 3 + {path="/bar"} 4 + {path="/biz"} 0 + +# Test combination of operations: [(9m30s+30s)*3] +eval instant at 50m changes(http_requests[(9m30s+30s)*3]) + {path="/foo"} 3 + {path="/bar"} 4 + {path="/biz"} 0 + +clear + +load 10s + metric1_total 0+1x1000 + +# In subquery expression. +eval instant at 1000s sum_over_time(metric1_total[29s+1s:5s+5s]) + {} 297 + +# Test complex expressions in subquery ranges. +eval instant at 1000s sum_over_time(metric1_total[29s+1s:((((8 - 2) / 3) * 7s) % 4) + 8000ms]) + {} 297 + +# Test complex expressions in offset ranges. +eval instant at 1200s sum_over_time(metric1_total[29s+1s:20*500ms] offset (20*(((((8 - 2) / 3) * 7s) % 4) + 8000ms))) + {} 297 + +# Test complex expressions in offset ranges with negative offset. +eval instant at 800s sum_over_time(metric1_total[29s+1s:20*500ms] offset -(20*(((((8 - 2) / 3) * 7s) % 4) + 8000ms))) + {} 297 + +# Test offset precedence with parentheses: offset (100 + 2) +eval instant at 1000s metric1_total offset (100 + 2) + {__name__="metric1_total"} 89 + +# Test offset precedence without parentheses: offset 100 + 2 +eval instant at 1000s metric1_total offset 100 + 2 + {} 92 \ No newline at end of file diff --git a/promql/promqltest/testdata/histograms.test b/promql/promqltest/testdata/histograms.test index 8ab23640af..45492d89f3 100644 --- a/promql/promqltest/testdata/histograms.test +++ b/promql/promqltest/testdata/histograms.test @@ -95,24 +95,52 @@ eval instant at 50m histogram_avg(testhistogram3) # Test histogram_stddev. This has no classic equivalent. eval instant at 50m histogram_stddev(testhistogram3) - {start="positive"} 2.8189265757336734 - {start="negative"} 4.182715937754936 + {start="positive"} 2.7435461458749795 + {start="negative"} 4.187667907081458 # Test histogram_stdvar. This has no classic equivalent. eval instant at 50m histogram_stdvar(testhistogram3) - {start="positive"} 7.946347039377573 - {start="negative"} 17.495112615949154 + {start="positive"} 7.527045454545455 + {start="negative"} 17.5365625 # Test histogram_fraction. +# +eval instant at 50m histogram_fraction(0, 4, testhistogram2) + {} 0.6666666666666666 + +eval instant at 50m histogram_fraction(0, 4, testhistogram2_bucket) + {} 0.6666666666666666 + +eval instant at 50m histogram_fraction(0, 6, testhistogram2) + {} 1 + +eval instant at 50m histogram_fraction(0, 6, testhistogram2_bucket) + {} 1 + +eval instant at 50m histogram_fraction(0, 3.5, testhistogram2) + {} 0.5833333333333334 + +eval instant at 50m histogram_fraction(0, 3.5, testhistogram2_bucket) + {} 0.5833333333333334 + eval instant at 50m histogram_fraction(0, 0.2, testhistogram3) {start="positive"} 0.6363636363636364 {start="negative"} 0 +eval instant at 50m histogram_fraction(0, 0.2, testhistogram3_bucket) + {start="positive"} 0.6363636363636364 + {start="negative"} 0 + eval instant at 50m histogram_fraction(0, 0.2, rate(testhistogram3[10m])) {start="positive"} 0.6363636363636364 {start="negative"} 0 + +eval instant at 50m histogram_fraction(0, 0.2, rate(testhistogram3_bucket[10m])) + {start="positive"} 0.6363636363636364 + {start="negative"} 0 + # In the classic histogram, we can access the corresponding bucket (if # it exists) and divide by the count to get the same result. diff --git a/promql/promqltest/testdata/native_histograms.test b/promql/promqltest/testdata/native_histograms.test index 751039e029..edca4eec4b 100644 --- a/promql/promqltest/testdata/native_histograms.test +++ b/promql/promqltest/testdata/native_histograms.test @@ -337,7 +337,7 @@ load 10m histogram_stddev_stdvar_3 {{schema:3 count:7 sum:62 z_bucket:1 buckets:[0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 1 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 ] n_buckets:[0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 ]}}x1 eval instant at 10m histogram_stddev(histogram_stddev_stdvar_3) - {} 42.947236400258 + {} 42.94723640026 eval instant at 10m histogram_stdvar(histogram_stddev_stdvar_3) {} 1844.4651144196398 diff --git a/promql/promqltest/testdata/subquery.test b/promql/promqltest/testdata/subquery.test index 377ee1e5ce..8c7c178b85 100644 --- a/promql/promqltest/testdata/subquery.test +++ b/promql/promqltest/testdata/subquery.test @@ -150,3 +150,10 @@ eval instant at 10m increase(native_histogram[10m:3m]) # by the sub-query multiple times. eval instant at 10m increase(native_histogram[10m:15s]) {} {{count:30.769230769230766 sum:30.769230769230766}} + +# When range < resolution and the first evaluation time is out of range. +load 5m + foo 3+0x10 + +eval instant at 12m min_over_time((topk(1, foo))[1m:5m]) + #empty diff --git a/promql/quantile.go b/promql/quantile.go index f3af82487c..f21914cb94 100644 --- a/promql/quantile.go +++ b/promql/quantile.go @@ -448,6 +448,84 @@ func HistogramFraction(lower, upper float64, h *histogram.FloatHistogram) float6 return (upperRank - lowerRank) / h.Count } +// BucketFraction is a version of HistogramFraction for classic histograms. +func BucketFraction(lower, upper float64, buckets Buckets) float64 { + slices.SortFunc(buckets, func(a, b Bucket) int { + // We don't expect the bucket boundary to be a NaN. + if a.UpperBound < b.UpperBound { + return -1 + } + if a.UpperBound > b.UpperBound { + return +1 + } + return 0 + }) + if !math.IsInf(buckets[len(buckets)-1].UpperBound, +1) { + return math.NaN() + } + buckets = coalesceBuckets(buckets) + + count := buckets[len(buckets)-1].Count + if count == 0 || math.IsNaN(lower) || math.IsNaN(upper) { + return math.NaN() + } + if lower >= upper { + return 0 + } + + var ( + rank, lowerRank, upperRank float64 + lowerSet, upperSet bool + ) + for i, b := range buckets { + lowerBound := math.Inf(-1) + if i > 0 { + lowerBound = buckets[i-1].UpperBound + } + upperBound := b.UpperBound + + interpolateLinearly := func(v float64) float64 { + return rank + (b.Count-rank)*(v-lowerBound)/(upperBound-lowerBound) + } + + if !lowerSet && lowerBound >= lower { + // We have hit the lower value at the lower bucket boundary. + lowerRank = rank + lowerSet = true + } + if !upperSet && lowerBound >= upper { + // We have hit the upper value at the lower bucket boundary. + upperRank = rank + upperSet = true + } + if lowerSet && upperSet { + break + } + if !lowerSet && lowerBound < lower && upperBound > lower { + // The lower value is in this bucket. + lowerRank = interpolateLinearly(lower) + lowerSet = true + } + if !upperSet && lowerBound < upper && upperBound > upper { + // The upper value is in this bucket. + upperRank = interpolateLinearly(upper) + upperSet = true + } + if lowerSet && upperSet { + break + } + rank = b.Count + } + if !lowerSet || lowerRank > count { + lowerRank = count + } + if !upperSet || upperRank > count { + upperRank = count + } + + return (upperRank - lowerRank) / count +} + // coalesceBuckets merges buckets with the same upper bound. // // The input buckets must be sorted. diff --git a/storage/remote/client.go b/storage/remote/client.go index f00b3e7331..68891f659e 100644 --- a/storage/remote/client.go +++ b/storage/remote/client.go @@ -384,7 +384,9 @@ func (c *Client) Read(ctx context.Context, query *prompb.Query, sortSeries bool) _ = httpResp.Body.Close() cancel() - return nil, fmt.Errorf("remote server %s returned http status %s: %s", c.urlString, httpResp.Status, string(body)) + errStr := strings.Trim(string(body), "\n") + err := errors.New(errStr) + return nil, fmt.Errorf("remote server %s returned http status %s: %w", c.urlString, httpResp.Status, err) } contentType := httpResp.Header.Get("Content-Type") diff --git a/storage/remote/client_test.go b/storage/remote/client_test.go index 112e96d2b6..a9e312ffc8 100644 --- a/storage/remote/client_test.go +++ b/storage/remote/client_test.go @@ -225,6 +225,7 @@ func TestReadClient(t *testing.T) { expectedSamples [][]model.SamplePair expectedErrorContains string sortSeries bool + unwrap bool }{ { name: "sorted sampled response", @@ -336,6 +337,14 @@ func TestReadClient(t *testing.T) { timeout: 5 * time.Millisecond, expectedErrorContains: "context deadline exceeded: request timed out after 5ms", }, + { + name: "unwrap error", + httpHandler: http.HandlerFunc(func(w http.ResponseWriter, _ *http.Request) { + http.Error(w, "test error", http.StatusBadRequest) + }), + expectedErrorContains: "test error", + unwrap: true, + }, } for _, test := range tests { @@ -366,6 +375,10 @@ func TestReadClient(t *testing.T) { ss, err := c.Read(context.Background(), query, test.sortSeries) if test.expectedErrorContains != "" { require.ErrorContains(t, err, test.expectedErrorContains) + if test.unwrap { + err = errors.Unwrap(err) + require.EqualError(t, err, test.expectedErrorContains) + } return } diff --git a/storage/remote/otlptranslator/prometheusremotewrite/helper.go b/storage/remote/otlptranslator/prometheusremotewrite/helper.go index 0660f8ee5f..09be335a8b 100644 --- a/storage/remote/otlptranslator/prometheusremotewrite/helper.go +++ b/storage/remote/otlptranslator/prometheusremotewrite/helper.go @@ -224,21 +224,19 @@ func createAttributes(resource pcommon.Resource, attributes pcommon.Map, setting return labels } -// isValidAggregationTemporality checks whether an OTel metric has a valid -// aggregation temporality for conversion to a Prometheus metric. -func isValidAggregationTemporality(metric pmetric.Metric) bool { +func aggregationTemporality(metric pmetric.Metric) (pmetric.AggregationTemporality, bool, error) { //exhaustive:enforce switch metric.Type() { case pmetric.MetricTypeGauge, pmetric.MetricTypeSummary: - return true + return 0, false, nil case pmetric.MetricTypeSum: - return metric.Sum().AggregationTemporality() == pmetric.AggregationTemporalityCumulative + return metric.Sum().AggregationTemporality(), true, nil case pmetric.MetricTypeHistogram: - return metric.Histogram().AggregationTemporality() == pmetric.AggregationTemporalityCumulative + return metric.Histogram().AggregationTemporality(), true, nil case pmetric.MetricTypeExponentialHistogram: - return metric.ExponentialHistogram().AggregationTemporality() == pmetric.AggregationTemporalityCumulative + return metric.ExponentialHistogram().AggregationTemporality(), true, nil } - return false + return 0, false, fmt.Errorf("could not get aggregation temporality for %s as it has unsupported metric type %s", metric.Name(), metric.Type()) } // addHistogramDataPoints adds OTel histogram data points to the corresponding Prometheus time series diff --git a/storage/remote/otlptranslator/prometheusremotewrite/histograms.go b/storage/remote/otlptranslator/prometheusremotewrite/histograms.go index db26b62925..6a405f104f 100644 --- a/storage/remote/otlptranslator/prometheusremotewrite/histograms.go +++ b/storage/remote/otlptranslator/prometheusremotewrite/histograms.go @@ -37,6 +37,7 @@ const defaultZeroThreshold = 1e-128 // as native histogram samples. func (c *PrometheusConverter) addExponentialHistogramDataPoints(ctx context.Context, dataPoints pmetric.ExponentialHistogramDataPointSlice, resource pcommon.Resource, settings Settings, promName string, + temporality pmetric.AggregationTemporality, ) (annotations.Annotations, error) { var annots annotations.Annotations for x := 0; x < dataPoints.Len(); x++ { @@ -46,7 +47,7 @@ func (c *PrometheusConverter) addExponentialHistogramDataPoints(ctx context.Cont pt := dataPoints.At(x) - histogram, ws, err := exponentialToNativeHistogram(pt) + histogram, ws, err := exponentialToNativeHistogram(pt, temporality) annots.Merge(ws) if err != nil { return annots, err @@ -76,7 +77,7 @@ func (c *PrometheusConverter) addExponentialHistogramDataPoints(ctx context.Cont // exponentialToNativeHistogram translates an OTel Exponential Histogram data point // to a Prometheus Native Histogram. -func exponentialToNativeHistogram(p pmetric.ExponentialHistogramDataPoint) (prompb.Histogram, annotations.Annotations, error) { +func exponentialToNativeHistogram(p pmetric.ExponentialHistogramDataPoint, temporality pmetric.AggregationTemporality) (prompb.Histogram, annotations.Annotations, error) { var annots annotations.Annotations scale := p.Scale() if scale < -4 { @@ -94,17 +95,27 @@ func exponentialToNativeHistogram(p pmetric.ExponentialHistogramDataPoint) (prom pSpans, pDeltas := convertBucketsLayout(p.Positive().BucketCounts().AsRaw(), p.Positive().Offset(), scaleDown, true) nSpans, nDeltas := convertBucketsLayout(p.Negative().BucketCounts().AsRaw(), p.Negative().Offset(), scaleDown, true) + // The counter reset detection must be compatible with Prometheus to + // safely set ResetHint to NO. This is not ensured currently. + // Sending a sample that triggers counter reset but with ResetHint==NO + // would lead to Prometheus panic as it does not double check the hint. + // Thus we're explicitly saying UNKNOWN here, which is always safe. + // TODO: using created time stamp should be accurate, but we + // need to know here if it was used for the detection. + // Ref: https://github.com/open-telemetry/opentelemetry-collector-contrib/pull/28663#issuecomment-1810577303 + // Counter reset detection in Prometheus: https://github.com/prometheus/prometheus/blob/f997c72f294c0f18ca13fa06d51889af04135195/tsdb/chunkenc/histogram.go#L232 + resetHint := prompb.Histogram_UNKNOWN + + if temporality == pmetric.AggregationTemporalityDelta { + // If the histogram has delta temporality, set the reset hint to gauge to avoid unnecessary chunk cutting. + // We're in an early phase of implementing delta support (proposal: https://github.com/prometheus/proposals/pull/48/). + // This might be changed to a different hint name as gauge type might be misleading for samples that should be + // summed over time. + resetHint = prompb.Histogram_GAUGE + } + h := prompb.Histogram{ - // The counter reset detection must be compatible with Prometheus to - // safely set ResetHint to NO. This is not ensured currently. - // Sending a sample that triggers counter reset but with ResetHint==NO - // would lead to Prometheus panic as it does not double check the hint. - // Thus we're explicitly saying UNKNOWN here, which is always safe. - // TODO: using created time stamp should be accurate, but we - // need to know here if it was used for the detection. - // Ref: https://github.com/open-telemetry/opentelemetry-collector-contrib/pull/28663#issuecomment-1810577303 - // Counter reset detection in Prometheus: https://github.com/prometheus/prometheus/blob/f997c72f294c0f18ca13fa06d51889af04135195/tsdb/chunkenc/histogram.go#L232 - ResetHint: prompb.Histogram_UNKNOWN, + ResetHint: resetHint, Schema: scale, ZeroCount: &prompb.Histogram_ZeroCountInt{ZeroCountInt: p.ZeroCount()}, @@ -242,6 +253,7 @@ func convertBucketsLayout(bucketCounts []uint64, offset, scaleDown int32, adjust func (c *PrometheusConverter) addCustomBucketsHistogramDataPoints(ctx context.Context, dataPoints pmetric.HistogramDataPointSlice, resource pcommon.Resource, settings Settings, promName string, + temporality pmetric.AggregationTemporality, ) (annotations.Annotations, error) { var annots annotations.Annotations @@ -252,7 +264,7 @@ func (c *PrometheusConverter) addCustomBucketsHistogramDataPoints(ctx context.Co pt := dataPoints.At(x) - histogram, ws, err := explicitHistogramToCustomBucketsHistogram(pt) + histogram, ws, err := explicitHistogramToCustomBucketsHistogram(pt, temporality) annots.Merge(ws) if err != nil { return annots, err @@ -281,7 +293,7 @@ func (c *PrometheusConverter) addCustomBucketsHistogramDataPoints(ctx context.Co return annots, nil } -func explicitHistogramToCustomBucketsHistogram(p pmetric.HistogramDataPoint) (prompb.Histogram, annotations.Annotations, error) { +func explicitHistogramToCustomBucketsHistogram(p pmetric.HistogramDataPoint, temporality pmetric.AggregationTemporality) (prompb.Histogram, annotations.Annotations, error) { var annots annotations.Annotations buckets := p.BucketCounts().AsRaw() @@ -289,18 +301,28 @@ func explicitHistogramToCustomBucketsHistogram(p pmetric.HistogramDataPoint) (pr bucketCounts := buckets[offset:] positiveSpans, positiveDeltas := convertBucketsLayout(bucketCounts, int32(offset), 0, false) + // The counter reset detection must be compatible with Prometheus to + // safely set ResetHint to NO. This is not ensured currently. + // Sending a sample that triggers counter reset but with ResetHint==NO + // would lead to Prometheus panic as it does not double check the hint. + // Thus we're explicitly saying UNKNOWN here, which is always safe. + // TODO: using created time stamp should be accurate, but we + // need to know here if it was used for the detection. + // Ref: https://github.com/open-telemetry/opentelemetry-collector-contrib/pull/28663#issuecomment-1810577303 + // Counter reset detection in Prometheus: https://github.com/prometheus/prometheus/blob/f997c72f294c0f18ca13fa06d51889af04135195/tsdb/chunkenc/histogram.go#L232 + resetHint := prompb.Histogram_UNKNOWN + + if temporality == pmetric.AggregationTemporalityDelta { + // If the histogram has delta temporality, set the reset hint to gauge to avoid unnecessary chunk cutting. + // We're in an early phase of implementing delta support (proposal: https://github.com/prometheus/proposals/pull/48/). + // This might be changed to a different hint name as gauge type might be misleading for samples that should be + // summed over time. + resetHint = prompb.Histogram_GAUGE + } + // TODO(carrieedwards): Add setting to limit maximum bucket count h := prompb.Histogram{ - // The counter reset detection must be compatible with Prometheus to - // safely set ResetHint to NO. This is not ensured currently. - // Sending a sample that triggers counter reset but with ResetHint==NO - // would lead to Prometheus panic as it does not double check the hint. - // Thus we're explicitly saying UNKNOWN here, which is always safe. - // TODO: using created time stamp should be accurate, but we - // need to know here if it was used for the detection. - // Ref: https://github.com/open-telemetry/opentelemetry-collector-contrib/pull/28663#issuecomment-1810577303 - // Counter reset detection in Prometheus: https://github.com/prometheus/prometheus/blob/f997c72f294c0f18ca13fa06d51889af04135195/tsdb/chunkenc/histogram.go#L232 - ResetHint: prompb.Histogram_UNKNOWN, + ResetHint: resetHint, Schema: histogram.CustomBucketsSchema, PositiveSpans: positiveSpans, diff --git a/storage/remote/otlptranslator/prometheusremotewrite/histograms_test.go b/storage/remote/otlptranslator/prometheusremotewrite/histograms_test.go index 63e453a535..8071b1c93c 100644 --- a/storage/remote/otlptranslator/prometheusremotewrite/histograms_test.go +++ b/storage/remote/otlptranslator/prometheusremotewrite/histograms_test.go @@ -566,7 +566,7 @@ func TestExponentialToNativeHistogram(t *testing.T) { for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { validateExponentialHistogramCount(t, tt.exponentialHist()) // Sanity check. - got, annots, err := exponentialToNativeHistogram(tt.exponentialHist()) + got, annots, err := exponentialToNativeHistogram(tt.exponentialHist(), pmetric.AggregationTemporalityCumulative) if tt.wantErrMessage != "" { require.ErrorContains(t, err, tt.wantErrMessage) return @@ -769,6 +769,7 @@ func TestPrometheusConverter_addExponentialHistogramDataPoints(t *testing.T) { ExportCreatedMetric: true, }, otlptranslator.BuildCompliantMetricName(metric, "", true), + pmetric.AggregationTemporalityCumulative, ) require.NoError(t, err) require.Empty(t, annots) @@ -972,7 +973,7 @@ func TestHistogramToCustomBucketsHistogram(t *testing.T) { for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { validateHistogramCount(t, tt.hist()) - got, annots, err := explicitHistogramToCustomBucketsHistogram(tt.hist()) + got, annots, err := explicitHistogramToCustomBucketsHistogram(tt.hist(), pmetric.AggregationTemporalityCumulative) if tt.wantErrMessage != "" { require.ErrorContains(t, err, tt.wantErrMessage) return @@ -1137,6 +1138,7 @@ func TestPrometheusConverter_addCustomBucketsHistogramDataPoints(t *testing.T) { ConvertHistogramsToNHCB: true, }, otlptranslator.BuildCompliantMetricName(metric, "", true), + pmetric.AggregationTemporalityCumulative, ) require.NoError(t, err) diff --git a/storage/remote/otlptranslator/prometheusremotewrite/metrics_to_prw.go b/storage/remote/otlptranslator/prometheusremotewrite/metrics_to_prw.go index d2e79e4b6f..79d127bb80 100644 --- a/storage/remote/otlptranslator/prometheusremotewrite/metrics_to_prw.go +++ b/storage/remote/otlptranslator/prometheusremotewrite/metrics_to_prw.go @@ -41,6 +41,7 @@ type Settings struct { PromoteResourceAttributes []string KeepIdentifyingResourceAttributes bool ConvertHistogramsToNHCB bool + AllowDeltaTemporality bool } // PrometheusConverter converts from OTel write format to Prometheus remote write format. @@ -91,8 +92,18 @@ func (c *PrometheusConverter) FromMetrics(ctx context.Context, md pmetric.Metric metric := metricSlice.At(k) mostRecentTimestamp = max(mostRecentTimestamp, mostRecentTimestampInMetric(metric)) + temporality, hasTemporality, err := aggregationTemporality(metric) + if err != nil { + errs = multierr.Append(errs, err) + continue + } - if !isValidAggregationTemporality(metric) { + if hasTemporality && + // Cumulative temporality is always valid. + // Delta temporality is also valid if AllowDeltaTemporality is true. + // All other temporality values are invalid. + !(temporality == pmetric.AggregationTemporalityCumulative || + (settings.AllowDeltaTemporality && temporality == pmetric.AggregationTemporalityDelta)) { errs = multierr.Append(errs, fmt.Errorf("invalid temporality and type combination for metric %q", metric.Name())) continue } @@ -144,7 +155,7 @@ func (c *PrometheusConverter) FromMetrics(ctx context.Context, md pmetric.Metric break } if settings.ConvertHistogramsToNHCB { - ws, err := c.addCustomBucketsHistogramDataPoints(ctx, dataPoints, resource, settings, promName) + ws, err := c.addCustomBucketsHistogramDataPoints(ctx, dataPoints, resource, settings, promName, temporality) annots.Merge(ws) if err != nil { errs = multierr.Append(errs, err) @@ -172,6 +183,7 @@ func (c *PrometheusConverter) FromMetrics(ctx context.Context, md pmetric.Metric resource, settings, promName, + temporality, ) annots.Merge(ws) if err != nil { diff --git a/storage/remote/otlptranslator/prometheusremotewrite/metrics_to_prw_test.go b/storage/remote/otlptranslator/prometheusremotewrite/metrics_to_prw_test.go index d9f433d713..a222d741d1 100644 --- a/storage/remote/otlptranslator/prometheusremotewrite/metrics_to_prw_test.go +++ b/storage/remote/otlptranslator/prometheusremotewrite/metrics_to_prw_test.go @@ -19,6 +19,7 @@ package prometheusremotewrite import ( "context" "fmt" + "sort" "testing" "time" @@ -31,6 +32,7 @@ import ( "github.com/prometheus/prometheus/model/labels" "github.com/prometheus/prometheus/prompb" + "github.com/prometheus/prometheus/util/testutil" ) func TestFromMetrics(t *testing.T) { @@ -235,6 +237,461 @@ func TestFromMetrics(t *testing.T) { }) } +func TestTemporality(t *testing.T) { + ts := time.Unix(100, 0) + + tests := []struct { + name string + allowDelta bool + convertToNHCB bool + inputSeries []pmetric.Metric + expectedSeries []prompb.TimeSeries + expectedError string + }{ + { + name: "all cumulative when delta not allowed", + allowDelta: false, + inputSeries: []pmetric.Metric{ + createOtelSum("test_metric_1", pmetric.AggregationTemporalityCumulative, ts), + createOtelSum("test_metric_2", pmetric.AggregationTemporalityCumulative, ts), + }, + expectedSeries: []prompb.TimeSeries{ + createPromFloatSeries("test_metric_1", ts), + createPromFloatSeries("test_metric_2", ts), + }, + }, + { + name: "all delta when allowed", + allowDelta: true, + inputSeries: []pmetric.Metric{ + createOtelSum("test_metric_1", pmetric.AggregationTemporalityDelta, ts), + createOtelSum("test_metric_2", pmetric.AggregationTemporalityDelta, ts), + }, + expectedSeries: []prompb.TimeSeries{ + createPromFloatSeries("test_metric_1", ts), + createPromFloatSeries("test_metric_2", ts), + }, + }, + { + name: "mixed temporality when delta allowed", + allowDelta: true, + inputSeries: []pmetric.Metric{ + createOtelSum("test_metric_1", pmetric.AggregationTemporalityDelta, ts), + createOtelSum("test_metric_2", pmetric.AggregationTemporalityCumulative, ts), + }, + expectedSeries: []prompb.TimeSeries{ + createPromFloatSeries("test_metric_1", ts), + createPromFloatSeries("test_metric_2", ts), + }, + }, + { + name: "delta rejected when not allowed", + allowDelta: false, + inputSeries: []pmetric.Metric{ + createOtelSum("test_metric_1", pmetric.AggregationTemporalityCumulative, ts), + createOtelSum("test_metric_2", pmetric.AggregationTemporalityDelta, ts), + }, + expectedSeries: []prompb.TimeSeries{ + createPromFloatSeries("test_metric_1", ts), + }, + expectedError: `invalid temporality and type combination for metric "test_metric_2"`, + }, + { + name: "unspecified temporality not allowed", + allowDelta: true, + inputSeries: []pmetric.Metric{ + createOtelSum("test_metric_1", pmetric.AggregationTemporalityCumulative, ts), + createOtelSum("test_metric_2", pmetric.AggregationTemporalityUnspecified, ts), + }, + expectedSeries: []prompb.TimeSeries{ + createPromFloatSeries("test_metric_1", ts), + }, + expectedError: `invalid temporality and type combination for metric "test_metric_2"`, + }, + { + name: "cumulative histogram", + allowDelta: false, + inputSeries: []pmetric.Metric{ + createOtelExponentialHistogram("test_histogram", pmetric.AggregationTemporalityCumulative, ts), + }, + expectedSeries: []prompb.TimeSeries{ + createPromNativeHistogramSeries("test_histogram", prompb.Histogram_UNKNOWN, ts), + }, + }, + { + name: "delta histogram when allowed", + allowDelta: true, + inputSeries: []pmetric.Metric{ + createOtelExponentialHistogram("test_histogram_1", pmetric.AggregationTemporalityDelta, ts), + createOtelExponentialHistogram("test_histogram_2", pmetric.AggregationTemporalityCumulative, ts), + }, + expectedSeries: []prompb.TimeSeries{ + createPromNativeHistogramSeries("test_histogram_1", prompb.Histogram_GAUGE, ts), + createPromNativeHistogramSeries("test_histogram_2", prompb.Histogram_UNKNOWN, ts), + }, + }, + { + name: "delta histogram when not allowed", + allowDelta: false, + inputSeries: []pmetric.Metric{ + createOtelExponentialHistogram("test_histogram_1", pmetric.AggregationTemporalityDelta, ts), + createOtelExponentialHistogram("test_histogram_2", pmetric.AggregationTemporalityCumulative, ts), + }, + expectedSeries: []prompb.TimeSeries{ + createPromNativeHistogramSeries("test_histogram_2", prompb.Histogram_UNKNOWN, ts), + }, + expectedError: `invalid temporality and type combination for metric "test_histogram_1"`, + }, + { + name: "cumulative histogram with buckets", + allowDelta: false, + convertToNHCB: true, + inputSeries: []pmetric.Metric{ + createOtelExplicitHistogram("test_histogram", pmetric.AggregationTemporalityCumulative, ts), + }, + expectedSeries: []prompb.TimeSeries{ + createPromNHCBSeries("test_histogram", prompb.Histogram_UNKNOWN, ts), + }, + }, + { + name: "delta histogram with buckets when allowed", + allowDelta: true, + convertToNHCB: true, + inputSeries: []pmetric.Metric{ + createOtelExplicitHistogram("test_histogram_1", pmetric.AggregationTemporalityDelta, ts), + createOtelExplicitHistogram("test_histogram_2", pmetric.AggregationTemporalityCumulative, ts), + }, + expectedSeries: []prompb.TimeSeries{ + createPromNHCBSeries("test_histogram_1", prompb.Histogram_GAUGE, ts), + createPromNHCBSeries("test_histogram_2", prompb.Histogram_UNKNOWN, ts), + }, + }, + { + name: "delta histogram with buckets when not allowed", + allowDelta: false, + convertToNHCB: true, + inputSeries: []pmetric.Metric{ + createOtelExplicitHistogram("test_histogram_1", pmetric.AggregationTemporalityDelta, ts), + createOtelExplicitHistogram("test_histogram_2", pmetric.AggregationTemporalityCumulative, ts), + }, + expectedSeries: []prompb.TimeSeries{ + createPromNHCBSeries("test_histogram_2", prompb.Histogram_UNKNOWN, ts), + }, + expectedError: `invalid temporality and type combination for metric "test_histogram_1"`, + }, + { + name: "delta histogram with buckets and convertToNHCB=false when not allowed", + allowDelta: false, + convertToNHCB: false, + inputSeries: []pmetric.Metric{ + createOtelExplicitHistogram("test_histogram_1", pmetric.AggregationTemporalityDelta, ts), + createOtelExplicitHistogram("test_histogram_2", pmetric.AggregationTemporalityCumulative, ts), + }, + expectedSeries: createPromClassicHistogramSeries("test_histogram_2", ts), + expectedError: `invalid temporality and type combination for metric "test_histogram_1"`, + }, + { + name: "delta histogram with buckets and convertToNHCB=false when allowed", + allowDelta: true, + convertToNHCB: false, + inputSeries: []pmetric.Metric{ + createOtelExplicitHistogram("test_histogram_1", pmetric.AggregationTemporalityDelta, ts), + createOtelExplicitHistogram("test_histogram_2", pmetric.AggregationTemporalityCumulative, ts), + }, + expectedSeries: append( + createPromClassicHistogramSeries("test_histogram_1", ts), + createPromClassicHistogramSeries("test_histogram_2", ts)..., + ), + }, + { + name: "summary does not have temporality", + inputSeries: []pmetric.Metric{ + createOtelSummary("test_summary_1", ts), + }, + expectedSeries: createPromSummarySeries("test_summary_1", ts), + }, + { + name: "gauge does not have temporality", + inputSeries: []pmetric.Metric{ + createOtelGauge("test_gauge_1", ts), + }, + expectedSeries: []prompb.TimeSeries{ + createPromFloatSeries("test_gauge_1", ts), + }, + }, + { + name: "empty metric type errors", + inputSeries: []pmetric.Metric{ + createOtelEmptyType("test_empty"), + }, + expectedSeries: []prompb.TimeSeries{}, + expectedError: `could not get aggregation temporality for test_empty as it has unsupported metric type Empty`, + }, + } + + for _, tc := range tests { + t.Run(tc.name, func(t *testing.T) { + metrics := pmetric.NewMetrics() + rm := metrics.ResourceMetrics().AppendEmpty() + sm := rm.ScopeMetrics().AppendEmpty() + + for _, s := range tc.inputSeries { + s.CopyTo(sm.Metrics().AppendEmpty()) + } + + c := NewPrometheusConverter() + settings := Settings{ + AllowDeltaTemporality: tc.allowDelta, + ConvertHistogramsToNHCB: tc.convertToNHCB, + } + + _, err := c.FromMetrics(context.Background(), metrics, settings) + + if tc.expectedError != "" { + require.EqualError(t, err, tc.expectedError) + } else { + require.NoError(t, err) + } + + series := c.TimeSeries() + + // Sort series to make the test deterministic. + testutil.RequireEqual(t, sortTimeSeries(tc.expectedSeries), sortTimeSeries(series)) + }) + } +} + +func createOtelSum(name string, temporality pmetric.AggregationTemporality, ts time.Time) pmetric.Metric { + metrics := pmetric.NewMetricSlice() + m := metrics.AppendEmpty() + m.SetName(name) + sum := m.SetEmptySum() + sum.SetAggregationTemporality(temporality) + dp := sum.DataPoints().AppendEmpty() + dp.SetDoubleValue(5) + dp.SetTimestamp(pcommon.NewTimestampFromTime(ts)) + dp.Attributes().PutStr("test_label", "test_value") + return m +} + +func createPromFloatSeries(name string, ts time.Time) prompb.TimeSeries { + return prompb.TimeSeries{ + Labels: []prompb.Label{ + {Name: "__name__", Value: name}, + {Name: "test_label", Value: "test_value"}, + }, + Samples: []prompb.Sample{{ + Value: 5, + Timestamp: ts.UnixMilli(), + }}, + } +} + +func createOtelGauge(name string, ts time.Time) pmetric.Metric { + metrics := pmetric.NewMetricSlice() + m := metrics.AppendEmpty() + m.SetName(name) + gauge := m.SetEmptyGauge() + dp := gauge.DataPoints().AppendEmpty() + dp.SetDoubleValue(5) + dp.SetTimestamp(pcommon.NewTimestampFromTime(ts)) + dp.Attributes().PutStr("test_label", "test_value") + return m +} + +func createOtelExponentialHistogram(name string, temporality pmetric.AggregationTemporality, ts time.Time) pmetric.Metric { + metrics := pmetric.NewMetricSlice() + m := metrics.AppendEmpty() + m.SetName(name) + hist := m.SetEmptyExponentialHistogram() + hist.SetAggregationTemporality(temporality) + dp := hist.DataPoints().AppendEmpty() + dp.SetCount(1) + dp.SetSum(5) + dp.SetTimestamp(pcommon.NewTimestampFromTime(ts)) + dp.Attributes().PutStr("test_label", "test_value") + return m +} + +func createPromNativeHistogramSeries(name string, hint prompb.Histogram_ResetHint, ts time.Time) prompb.TimeSeries { + return prompb.TimeSeries{ + Labels: []prompb.Label{ + {Name: "__name__", Value: name}, + {Name: "test_label", Value: "test_value"}, + }, + Histograms: []prompb.Histogram{ + { + Count: &prompb.Histogram_CountInt{CountInt: 1}, + Sum: 5, + Schema: 0, + ZeroThreshold: 1e-128, + ZeroCount: &prompb.Histogram_ZeroCountInt{ZeroCountInt: 0}, + Timestamp: ts.UnixMilli(), + ResetHint: hint, + }, + }, + } +} + +func createOtelExplicitHistogram(name string, temporality pmetric.AggregationTemporality, ts time.Time) pmetric.Metric { + metrics := pmetric.NewMetricSlice() + m := metrics.AppendEmpty() + m.SetName(name) + hist := m.SetEmptyHistogram() + hist.SetAggregationTemporality(temporality) + dp := hist.DataPoints().AppendEmpty() + dp.SetCount(20) + dp.SetSum(30) + dp.BucketCounts().FromRaw([]uint64{10, 10, 0}) + dp.ExplicitBounds().FromRaw([]float64{1, 2}) + dp.SetTimestamp(pcommon.NewTimestampFromTime(ts)) + dp.Attributes().PutStr("test_label", "test_value") + return m +} + +func createPromNHCBSeries(name string, hint prompb.Histogram_ResetHint, ts time.Time) prompb.TimeSeries { + return prompb.TimeSeries{ + Labels: []prompb.Label{ + {Name: "__name__", Value: name}, + {Name: "test_label", Value: "test_value"}, + }, + Histograms: []prompb.Histogram{ + { + Count: &prompb.Histogram_CountInt{CountInt: 20}, + Sum: 30, + Schema: -53, + ZeroThreshold: 0, + ZeroCount: nil, + PositiveSpans: []prompb.BucketSpan{ + { + Length: 3, + }, + }, + PositiveDeltas: []int64{10, 0, -10}, + CustomValues: []float64{1, 2}, + Timestamp: ts.UnixMilli(), + ResetHint: hint, + }, + }, + } +} + +func createPromClassicHistogramSeries(name string, ts time.Time) []prompb.TimeSeries { + return []prompb.TimeSeries{ + { + Labels: []prompb.Label{ + {Name: "__name__", Value: name + "_bucket"}, + {Name: "le", Value: "1"}, + {Name: "test_label", Value: "test_value"}, + }, + Samples: []prompb.Sample{{Value: 10, Timestamp: ts.UnixMilli()}}, + }, + { + Labels: []prompb.Label{ + {Name: "__name__", Value: name + "_bucket"}, + {Name: "le", Value: "2"}, + {Name: "test_label", Value: "test_value"}, + }, + Samples: []prompb.Sample{{Value: 20, Timestamp: ts.UnixMilli()}}, + }, + { + Labels: []prompb.Label{ + {Name: "__name__", Value: name + "_bucket"}, + {Name: "le", Value: "+Inf"}, + {Name: "test_label", Value: "test_value"}, + }, + Samples: []prompb.Sample{{Value: 20, Timestamp: ts.UnixMilli()}}, + }, + { + Labels: []prompb.Label{ + {Name: "__name__", Value: name + "_count"}, + {Name: "test_label", Value: "test_value"}, + }, + Samples: []prompb.Sample{{Value: 20, Timestamp: ts.UnixMilli()}}, + }, + { + Labels: []prompb.Label{ + {Name: "__name__", Value: name + "_sum"}, + {Name: "test_label", Value: "test_value"}, + }, + Samples: []prompb.Sample{{Value: 30, Timestamp: ts.UnixMilli()}}, + }, + } +} + +func createOtelSummary(name string, ts time.Time) pmetric.Metric { + metrics := pmetric.NewMetricSlice() + m := metrics.AppendEmpty() + m.SetName(name) + summary := m.SetEmptySummary() + dp := summary.DataPoints().AppendEmpty() + dp.SetCount(9) + dp.SetSum(18) + qv := dp.QuantileValues().AppendEmpty() + qv.SetQuantile(0.5) + qv.SetValue(2) + dp.SetTimestamp(pcommon.NewTimestampFromTime(ts)) + dp.Attributes().PutStr("test_label", "test_value") + return m +} + +func createPromSummarySeries(name string, ts time.Time) []prompb.TimeSeries { + return []prompb.TimeSeries{ + { + Labels: []prompb.Label{ + {Name: "__name__", Value: name + "_sum"}, + {Name: "test_label", Value: "test_value"}, + }, + Samples: []prompb.Sample{{ + Value: 18, + Timestamp: ts.UnixMilli(), + }}, + }, + { + Labels: []prompb.Label{ + {Name: "__name__", Value: name + "_count"}, + {Name: "test_label", Value: "test_value"}, + }, + Samples: []prompb.Sample{{ + Value: 9, + Timestamp: ts.UnixMilli(), + }}, + }, + { + Labels: []prompb.Label{ + {Name: "__name__", Value: name}, + {Name: "quantile", Value: "0.5"}, + {Name: "test_label", Value: "test_value"}, + }, + Samples: []prompb.Sample{{ + Value: 2, + Timestamp: ts.UnixMilli(), + }}, + }, + } +} + +func createOtelEmptyType(name string) pmetric.Metric { + metrics := pmetric.NewMetricSlice() + m := metrics.AppendEmpty() + m.SetName(name) + return m +} + +func sortTimeSeries(series []prompb.TimeSeries) []prompb.TimeSeries { + for i := range series { + sort.Slice(series[i].Labels, func(j, k int) bool { + return series[i].Labels[j].Name < series[i].Labels[k].Name + }) + } + + sort.Slice(series, func(i, j int) bool { + return fmt.Sprint(series[i].Labels) < fmt.Sprint(series[j].Labels) + }) + + return series +} + func BenchmarkPrometheusConverter_FromMetrics(b *testing.B) { for _, resourceAttributeCount := range []int{0, 5, 50} { b.Run(fmt.Sprintf("resource attribute count: %v", resourceAttributeCount), func(b *testing.B) { diff --git a/storage/remote/otlptranslator/prometheusremotewrite/otlp_to_openmetrics_metadata.go b/storage/remote/otlptranslator/prometheusremotewrite/otlp_to_openmetrics_metadata.go index 359fc52522..716a6cd6f9 100644 --- a/storage/remote/otlptranslator/prometheusremotewrite/otlp_to_openmetrics_metadata.go +++ b/storage/remote/otlptranslator/prometheusremotewrite/otlp_to_openmetrics_metadata.go @@ -31,12 +31,27 @@ func otelMetricTypeToPromMetricType(otelMetric pmetric.Metric) prompb.MetricMeta if otelMetric.Sum().IsMonotonic() { metricType = prompb.MetricMetadata_COUNTER } + // We're in an early phase of implementing delta support (proposal: https://github.com/prometheus/proposals/pull/48/) + // We don't have a proper way to flag delta metrics yet, therefore marking the metric type as unknown for now. + if otelMetric.Sum().AggregationTemporality() == pmetric.AggregationTemporalityDelta { + metricType = prompb.MetricMetadata_UNKNOWN + } return metricType case pmetric.MetricTypeHistogram: + // We're in an early phase of implementing delta support (proposal: https://github.com/prometheus/proposals/pull/48/) + // We don't have a proper way to flag delta metrics yet, therefore marking the metric type as unknown for now. + if otelMetric.Histogram().AggregationTemporality() == pmetric.AggregationTemporalityDelta { + return prompb.MetricMetadata_UNKNOWN + } return prompb.MetricMetadata_HISTOGRAM case pmetric.MetricTypeSummary: return prompb.MetricMetadata_SUMMARY case pmetric.MetricTypeExponentialHistogram: + if otelMetric.ExponentialHistogram().AggregationTemporality() == pmetric.AggregationTemporalityDelta { + // We're in an early phase of implementing delta support (proposal: https://github.com/prometheus/proposals/pull/48/) + // We don't have a proper way to flag delta metrics yet, therefore marking the metric type as unknown for now. + return prompb.MetricMetadata_UNKNOWN + } return prompb.MetricMetadata_HISTOGRAM } return prompb.MetricMetadata_UNKNOWN diff --git a/storage/remote/queue_manager.go b/storage/remote/queue_manager.go index 67071f1f8d..87567fb9c6 100644 --- a/storage/remote/queue_manager.go +++ b/storage/remote/queue_manager.go @@ -1669,7 +1669,7 @@ func (s *shards) updateMetrics(_ context.Context, err error, sampleCount, exempl if err != nil { s.qm.logger.Error("non-recoverable error", "failedSampleCount", sampleDiff, "failedHistogramCount", histogramDiff, "failedExemplarCount", exemplarDiff, "err", err) } else if sampleDiff+exemplarDiff+histogramDiff > 0 { - s.qm.logger.Error("we got 2xx status code from the Receiver yet statistics indicate some dat was not written; investigation needed", "failedSampleCount", sampleDiff, "failedHistogramCount", histogramDiff, "failedExemplarCount", exemplarDiff) + s.qm.logger.Error("we got 2xx status code from the Receiver yet statistics indicate some data was not written; investigation needed", "failedSampleCount", sampleDiff, "failedHistogramCount", histogramDiff, "failedExemplarCount", exemplarDiff) } // These counters are used to calculate the dynamic sharding, and as such diff --git a/storage/remote/write_handler.go b/storage/remote/write_handler.go index cbd4225d08..d43edd78bb 100644 --- a/storage/remote/write_handler.go +++ b/storage/remote/write_handler.go @@ -526,20 +526,30 @@ func (h *writeHandler) handleHistogramZeroSample(app storage.Appender, ref stora type OTLPOptions struct { // Convert delta samples to their cumulative equivalent by aggregating in-memory ConvertDelta bool + // Store the raw delta samples as metrics with unknown type (we don't have a proper type for delta yet, therefore + // marking the metric type as unknown for now). + // We're in an early phase of implementing delta support (proposal: https://github.com/prometheus/proposals/pull/48/) + NativeDelta bool } // NewOTLPWriteHandler creates a http.Handler that accepts OTLP write requests and // writes them to the provided appendable. func NewOTLPWriteHandler(logger *slog.Logger, _ prometheus.Registerer, appendable storage.Appendable, configFunc func() config.Config, opts OTLPOptions) http.Handler { + if opts.NativeDelta && opts.ConvertDelta { + // This should be validated when iterating through feature flags, so not expected to fail here. + panic("cannot enable native delta ingestion and delta2cumulative conversion at the same time") + } + ex := &rwExporter{ writeHandler: &writeHandler{ logger: logger, appendable: appendable, }, - config: configFunc, + config: configFunc, + allowDeltaTemporality: opts.NativeDelta, } - wh := &otlpWriteHandler{logger: logger, cumul: ex} + wh := &otlpWriteHandler{logger: logger, defaultConsumer: ex} if opts.ConvertDelta { fac := deltatocumulative.NewFactory() @@ -547,7 +557,7 @@ func NewOTLPWriteHandler(logger *slog.Logger, _ prometheus.Registerer, appendabl ID: component.NewID(fac.Type()), TelemetrySettings: component.TelemetrySettings{MeterProvider: noop.NewMeterProvider()}, } - d2c, err := fac.CreateMetrics(context.Background(), set, fac.CreateDefaultConfig(), wh.cumul) + d2c, err := fac.CreateMetrics(context.Background(), set, fac.CreateDefaultConfig(), wh.defaultConsumer) if err != nil { // fac.CreateMetrics directly calls [deltatocumulativeprocessor.createMetricsProcessor], // which only errors if: @@ -563,7 +573,7 @@ func NewOTLPWriteHandler(logger *slog.Logger, _ prometheus.Registerer, appendabl // deltatocumulative does not error on start. see above for panic reasoning panic(err) } - wh.delta = d2c + wh.d2cConsumer = d2c } return wh @@ -571,7 +581,8 @@ func NewOTLPWriteHandler(logger *slog.Logger, _ prometheus.Registerer, appendabl type rwExporter struct { *writeHandler - config func() config.Config + config func() config.Config + allowDeltaTemporality bool } func (rw *rwExporter) ConsumeMetrics(ctx context.Context, md pmetric.Metrics) error { @@ -579,11 +590,12 @@ func (rw *rwExporter) ConsumeMetrics(ctx context.Context, md pmetric.Metrics) er converter := otlptranslator.NewPrometheusConverter() annots, err := converter.FromMetrics(ctx, md, otlptranslator.Settings{ - AddMetricSuffixes: true, - AllowUTF8: otlpCfg.TranslationStrategy == config.NoUTF8EscapingWithSuffixes, + AddMetricSuffixes: otlpCfg.TranslationStrategy != config.NoTranslation, + AllowUTF8: otlpCfg.TranslationStrategy != config.UnderscoreEscapingWithSuffixes, PromoteResourceAttributes: otlpCfg.PromoteResourceAttributes, KeepIdentifyingResourceAttributes: otlpCfg.KeepIdentifyingResourceAttributes, ConvertHistogramsToNHCB: otlpCfg.ConvertHistogramsToNHCB, + AllowDeltaTemporality: rw.allowDeltaTemporality, }) if err != nil { rw.logger.Warn("Error translating OTLP metrics to Prometheus write request", "err", err) @@ -607,8 +619,8 @@ func (rw *rwExporter) Capabilities() consumer.Capabilities { type otlpWriteHandler struct { logger *slog.Logger - cumul consumer.Metrics // only cumulative - delta consumer.Metrics // delta capable + defaultConsumer consumer.Metrics // stores deltas as-is + d2cConsumer consumer.Metrics // converts deltas to cumulative } func (h *otlpWriteHandler) ServeHTTP(w http.ResponseWriter, r *http.Request) { @@ -620,13 +632,15 @@ func (h *otlpWriteHandler) ServeHTTP(w http.ResponseWriter, r *http.Request) { } md := req.Metrics() - // if delta conversion enabled AND delta samples exist, use slower delta capable path - if h.delta != nil && hasDelta(md) { - err = h.delta.ConsumeMetrics(r.Context(), md) + // If deltatocumulative conversion enabled AND delta samples exist, use slower conversion path. + // While deltatocumulative can also accept cumulative metrics (and then just forwards them as-is), it currently + // holds a sync.Mutex when entering ConsumeMetrics. This is slow and not necessary when ingesting cumulative metrics. + if h.d2cConsumer != nil && hasDelta(md) { + err = h.d2cConsumer.ConsumeMetrics(r.Context(), md) } else { - // deltatocumulative currently holds a sync.Mutex when entering ConsumeMetrics. - // This is slow and not necessary when no delta samples exist anyways - err = h.cumul.ConsumeMetrics(r.Context(), md) + // Otherwise use default consumer (alongside cumulative samples, this will accept delta samples and write as-is + // if native-delta-support is enabled). + err = h.defaultConsumer.ConsumeMetrics(r.Context(), md) } switch { diff --git a/storage/remote/write_test.go b/storage/remote/write_test.go index a3b30b6425..9d32067a6d 100644 --- a/storage/remote/write_test.go +++ b/storage/remote/write_test.go @@ -382,7 +382,118 @@ func TestWriteStorageApplyConfig_PartialUpdate(t *testing.T) { func TestOTLPWriteHandler(t *testing.T) { exportRequest := generateOTLPWriteRequest() + timestamp := time.Now() + for _, testCase := range []struct { + name string + otlpCfg config.OTLPConfig + expectedSamples []mockSample + }{ + { + name: "NoTranslation", + otlpCfg: config.OTLPConfig{ + TranslationStrategy: config.NoTranslation, + }, + expectedSamples: []mockSample{ + { + l: labels.New(labels.Label{Name: "__name__", Value: "test.counter"}, + labels.Label{Name: "foo.bar", Value: "baz"}, + labels.Label{Name: "instance", Value: "test-instance"}, + labels.Label{Name: "job", Value: "test-service"}), + t: timestamp.UnixMilli(), + v: 10.0, + }, + { + l: labels.New( + labels.Label{Name: "__name__", Value: "target_info"}, + labels.Label{Name: "host.name", Value: "test-host"}, + labels.Label{Name: "instance", Value: "test-instance"}, + labels.Label{Name: "job", Value: "test-service"}, + ), + t: timestamp.UnixMilli(), + v: 1, + }, + }, + }, + { + name: "UnderscoreEscapingWithSuffixes", + otlpCfg: config.OTLPConfig{ + TranslationStrategy: config.UnderscoreEscapingWithSuffixes, + }, + expectedSamples: []mockSample{ + { + l: labels.New(labels.Label{Name: "__name__", Value: "test_counter_total"}, + labels.Label{Name: "foo_bar", Value: "baz"}, + labels.Label{Name: "instance", Value: "test-instance"}, + labels.Label{Name: "job", Value: "test-service"}), + t: timestamp.UnixMilli(), + v: 10.0, + }, + { + l: labels.New( + labels.Label{Name: "__name__", Value: "target_info"}, + labels.Label{Name: "host_name", Value: "test-host"}, + labels.Label{Name: "instance", Value: "test-instance"}, + labels.Label{Name: "job", Value: "test-service"}, + ), + t: timestamp.UnixMilli(), + v: 1, + }, + }, + }, + { + name: "NoUTF8EscapingWithSuffixes", + otlpCfg: config.OTLPConfig{ + TranslationStrategy: config.NoUTF8EscapingWithSuffixes, + }, + expectedSamples: []mockSample{ + { + l: labels.New(labels.Label{Name: "__name__", Value: "test.counter_total"}, + labels.Label{Name: "foo.bar", Value: "baz"}, + labels.Label{Name: "instance", Value: "test-instance"}, + labels.Label{Name: "job", Value: "test-service"}), + t: timestamp.UnixMilli(), + v: 10.0, + }, + { + l: labels.New( + labels.Label{Name: "__name__", Value: "target_info"}, + labels.Label{Name: "host.name", Value: "test-host"}, + labels.Label{Name: "instance", Value: "test-instance"}, + labels.Label{Name: "job", Value: "test-service"}, + ), + t: timestamp.UnixMilli(), + v: 1, + }, + }, + }, + } { + t.Run(testCase.name, func(t *testing.T) { + appendable := handleOTLP(t, exportRequest, testCase.otlpCfg) + for _, sample := range testCase.expectedSamples { + requireContainsSample(t, appendable.samples, sample) + } + require.Len(t, appendable.samples, 12) // 1 (counter) + 1 (gauge) + 1 (target_info) + 7 (hist_bucket) + 2 (hist_sum, hist_count) + require.Len(t, appendable.histograms, 1) // 1 (exponential histogram) + require.Len(t, appendable.exemplars, 1) // 1 (exemplar) + }) + } +} + +func requireContainsSample(t *testing.T, actual []mockSample, expected mockSample) { + t.Helper() + + for _, got := range actual { + if labels.Equal(expected.l, got.l) && expected.t == got.t && expected.v == got.v { + return + } + } + require.Fail(t, fmt.Sprintf("Sample not found: \n"+ + "expected: %v\n"+ + "actual : %v", expected, actual)) +} + +func handleOTLP(t *testing.T, exportRequest pmetricotlp.ExportRequest, otlpCfg config.OTLPConfig) *mockAppendable { buf, err := exportRequest.MarshalProto() require.NoError(t, err) @@ -393,19 +504,16 @@ func TestOTLPWriteHandler(t *testing.T) { appendable := &mockAppendable{} handler := NewOTLPWriteHandler(nil, nil, appendable, func() config.Config { return config.Config{ - OTLPConfig: config.DefaultOTLPConfig, + OTLPConfig: otlpCfg, } }, OTLPOptions{}) - recorder := httptest.NewRecorder() handler.ServeHTTP(recorder, req) resp := recorder.Result() require.Equal(t, http.StatusOK, resp.StatusCode) - require.Len(t, appendable.samples, 12) // 1 (counter) + 1 (gauge) + 1 (target_info) + 7 (hist_bucket) + 2 (hist_sum, hist_count) - require.Len(t, appendable.histograms, 1) // 1 (exponential histogram) - require.Len(t, appendable.exemplars, 1) // 1 (exemplar) + return appendable } func generateOTLPWriteRequest() pmetricotlp.ExportRequest { @@ -426,7 +534,7 @@ func generateOTLPWriteRequest() pmetricotlp.ExportRequest { // Generate One Counter counterMetric := scopeMetric.Metrics().AppendEmpty() - counterMetric.SetName("test-counter") + counterMetric.SetName("test.counter") counterMetric.SetDescription("test-counter-description") counterMetric.SetEmptySum() counterMetric.Sum().SetAggregationTemporality(pmetric.AggregationTemporalityCumulative) @@ -446,7 +554,7 @@ func generateOTLPWriteRequest() pmetricotlp.ExportRequest { // Generate One Gauge gaugeMetric := scopeMetric.Metrics().AppendEmpty() - gaugeMetric.SetName("test-gauge") + gaugeMetric.SetName("test.gauge") gaugeMetric.SetDescription("test-gauge-description") gaugeMetric.SetEmptyGauge() @@ -457,7 +565,7 @@ func generateOTLPWriteRequest() pmetricotlp.ExportRequest { // Generate One Histogram histogramMetric := scopeMetric.Metrics().AppendEmpty() - histogramMetric.SetName("test-histogram") + histogramMetric.SetName("test.histogram") histogramMetric.SetDescription("test-histogram-description") histogramMetric.SetEmptyHistogram() histogramMetric.Histogram().SetAggregationTemporality(pmetric.AggregationTemporalityCumulative) @@ -472,7 +580,7 @@ func generateOTLPWriteRequest() pmetricotlp.ExportRequest { // Generate One Exponential-Histogram exponentialHistogramMetric := scopeMetric.Metrics().AppendEmpty() - exponentialHistogramMetric.SetName("test-exponential-histogram") + exponentialHistogramMetric.SetName("test.exponential.histogram") exponentialHistogramMetric.SetDescription("test-exponential-histogram-description") exponentialHistogramMetric.SetEmptyExponentialHistogram() exponentialHistogramMetric.ExponentialHistogram().SetAggregationTemporality(pmetric.AggregationTemporalityCumulative) diff --git a/tsdb/agent/db.go b/tsdb/agent/db.go index 99126d16f1..cd5f531870 100644 --- a/tsdb/agent/db.go +++ b/tsdb/agent/db.go @@ -236,7 +236,8 @@ type DB struct { appenderPool sync.Pool bufPool sync.Pool - // These pools are used during WAL replay. + // These pools are only used during WAL replay and are reset at the end. + // NOTE: Adjust resetWALReplayResources() upon changes to the pools. walReplaySeriesPool zeropool.Pool[[]record.RefSeries] walReplaySamplesPool zeropool.Pool[[]record.RefSample] walReplayHistogramsPool zeropool.Pool[[]record.RefHistogramSample] @@ -366,6 +367,7 @@ func validateOptions(opts *Options) *Options { func (db *DB) replayWAL() error { db.logger.Info("replaying WAL, this may take a while", "dir", db.wal.Dir()) + defer db.resetWALReplayResources() start := time.Now() dir, startFrom, err := wlog.LastCheckpoint(db.wal.Dir()) @@ -425,6 +427,13 @@ func (db *DB) replayWAL() error { return nil } +func (db *DB) resetWALReplayResources() { + db.walReplaySeriesPool = zeropool.Pool[[]record.RefSeries]{} + db.walReplaySamplesPool = zeropool.Pool[[]record.RefSample]{} + db.walReplayHistogramsPool = zeropool.Pool[[]record.RefHistogramSample]{} + db.walReplayFloatHistogramsPool = zeropool.Pool[[]record.RefFloatHistogramSample]{} +} + func (db *DB) loadWAL(r *wlog.Reader, multiRef map[chunks.HeadSeriesRef]chunks.HeadSeriesRef) (err error) { var ( syms = labels.NewSymbolTable() // One table for the whole WAL. diff --git a/tsdb/db.go b/tsdb/db.go index 8abcaf117e..2d0af5c940 100644 --- a/tsdb/db.go +++ b/tsdb/db.go @@ -1486,7 +1486,7 @@ func (db *DB) compactBlocks() (err error) { // long enough that we end up with a HEAD block that needs to be written. // Check if that's the case and stop compactions early. if db.head.compactable() && !db.waitingForCompactionDelay() { - db.logger.Warn("aborting block compactions to persit the head block") + db.logger.Warn("aborting block compactions to persist the head block") return nil } diff --git a/tsdb/db_test.go b/tsdb/db_test.go index 931bc42dda..6270220be4 100644 --- a/tsdb/db_test.go +++ b/tsdb/db_test.go @@ -5041,7 +5041,7 @@ func testOOOCompaction(t *testing.T, scenario sampleTypeScenario, addExtraSample // Verify that the in-memory ooo chunk is empty. checkEmptyOOOChunk := func(lbls labels.Labels) { - ms, created, err := db.head.getOrCreate(lbls.Hash(), lbls) + ms, created, err := db.head.getOrCreate(lbls.Hash(), lbls, false) require.NoError(t, err) require.False(t, created) require.Nil(t, ms.ooo) @@ -5085,7 +5085,7 @@ func testOOOCompaction(t *testing.T, scenario sampleTypeScenario, addExtraSample // Verify that the in-memory ooo chunk is not empty. checkNonEmptyOOOChunk := func(lbls labels.Labels) { - ms, created, err := db.head.getOrCreate(lbls.Hash(), lbls) + ms, created, err := db.head.getOrCreate(lbls.Hash(), lbls, false) require.NoError(t, err) require.False(t, created) require.Positive(t, ms.ooo.oooHeadChunk.chunk.NumSamples()) @@ -5246,7 +5246,7 @@ func testOOOCompactionWithNormalCompaction(t *testing.T, scenario sampleTypeScen // Checking that ooo chunk is not empty. for _, lbls := range []labels.Labels{series1, series2} { - ms, created, err := db.head.getOrCreate(lbls.Hash(), lbls) + ms, created, err := db.head.getOrCreate(lbls.Hash(), lbls, false) require.NoError(t, err) require.False(t, created) require.Positive(t, ms.ooo.oooHeadChunk.chunk.NumSamples()) @@ -5274,7 +5274,7 @@ func testOOOCompactionWithNormalCompaction(t *testing.T, scenario sampleTypeScen // Checking that ooo chunk is empty. for _, lbls := range []labels.Labels{series1, series2} { - ms, created, err := db.head.getOrCreate(lbls.Hash(), lbls) + ms, created, err := db.head.getOrCreate(lbls.Hash(), lbls, false) require.NoError(t, err) require.False(t, created) require.Nil(t, ms.ooo) @@ -5357,7 +5357,7 @@ func testOOOCompactionWithDisabledWriteLog(t *testing.T, scenario sampleTypeScen // Checking that ooo chunk is not empty. for _, lbls := range []labels.Labels{series1, series2} { - ms, created, err := db.head.getOrCreate(lbls.Hash(), lbls) + ms, created, err := db.head.getOrCreate(lbls.Hash(), lbls, false) require.NoError(t, err) require.False(t, created) require.Positive(t, ms.ooo.oooHeadChunk.chunk.NumSamples()) @@ -5385,7 +5385,7 @@ func testOOOCompactionWithDisabledWriteLog(t *testing.T, scenario sampleTypeScen // Checking that ooo chunk is empty. for _, lbls := range []labels.Labels{series1, series2} { - ms, created, err := db.head.getOrCreate(lbls.Hash(), lbls) + ms, created, err := db.head.getOrCreate(lbls.Hash(), lbls, false) require.NoError(t, err) require.False(t, created) require.Nil(t, ms.ooo) @@ -5467,7 +5467,7 @@ func testOOOQueryAfterRestartWithSnapshotAndRemovedWBL(t *testing.T, scenario sa // Checking that there are some ooo m-map chunks. for _, lbls := range []labels.Labels{series1, series2} { - ms, created, err := db.head.getOrCreate(lbls.Hash(), lbls) + ms, created, err := db.head.getOrCreate(lbls.Hash(), lbls, false) require.NoError(t, err) require.False(t, created) require.Len(t, ms.ooo.oooMmappedChunks, 2) @@ -5486,7 +5486,7 @@ func testOOOQueryAfterRestartWithSnapshotAndRemovedWBL(t *testing.T, scenario sa // Check ooo m-map chunks again. for _, lbls := range []labels.Labels{series1, series2} { - ms, created, err := db.head.getOrCreate(lbls.Hash(), lbls) + ms, created, err := db.head.getOrCreate(lbls.Hash(), lbls, false) require.NoError(t, err) require.False(t, created) require.Len(t, ms.ooo.oooMmappedChunks, 2) @@ -5526,7 +5526,7 @@ func testOOOQueryAfterRestartWithSnapshotAndRemovedWBL(t *testing.T, scenario sa // Checking that ooo chunk is empty in Head. for _, lbls := range []labels.Labels{series1, series2} { - ms, created, err := db.head.getOrCreate(lbls.Hash(), lbls) + ms, created, err := db.head.getOrCreate(lbls.Hash(), lbls, false) require.NoError(t, err) require.False(t, created) require.Nil(t, ms.ooo) @@ -6835,7 +6835,7 @@ func testOOODisabled(t *testing.T, scenario sampleTypeScenario) { _, err = os.ReadDir(path.Join(db.Dir(), wlog.WblDirName)) require.True(t, os.IsNotExist(err)) - ms, created, err := db.head.getOrCreate(s1.Hash(), s1) + ms, created, err := db.head.getOrCreate(s1.Hash(), s1, false) require.NoError(t, err) require.False(t, created) require.NotNil(t, ms) @@ -6908,7 +6908,7 @@ func testWBLAndMmapReplay(t *testing.T, scenario sampleTypeScenario) { oooMint, oooMaxt := minutes(195), minutes(260) // Collect the samples only present in the ooo m-map chunks. - ms, created, err := db.head.getOrCreate(s1.Hash(), s1) + ms, created, err := db.head.getOrCreate(s1.Hash(), s1, false) require.False(t, created) require.NoError(t, err) var s1MmapSamples []chunks.Sample @@ -7088,7 +7088,7 @@ func TestOOOHistogramCompactionWithCounterResets(t *testing.T) { // Verify that the in-memory ooo chunk is empty. checkEmptyOOOChunk := func(lbls labels.Labels) { - ms, created, err := db.head.getOrCreate(lbls.Hash(), lbls) + ms, created, err := db.head.getOrCreate(lbls.Hash(), lbls, false) require.NoError(t, err) require.False(t, created) require.Nil(t, ms.ooo) @@ -7270,7 +7270,7 @@ func TestOOOHistogramCompactionWithCounterResets(t *testing.T) { // Verify that the in-memory ooo chunk is not empty. checkNonEmptyOOOChunk := func(lbls labels.Labels) { - ms, created, err := db.head.getOrCreate(lbls.Hash(), lbls) + ms, created, err := db.head.getOrCreate(lbls.Hash(), lbls, false) require.NoError(t, err) require.False(t, created) require.Positive(t, ms.ooo.oooHeadChunk.chunk.NumSamples()) @@ -7594,7 +7594,7 @@ func testOOOCompactionFailure(t *testing.T, scenario sampleTypeScenario) { require.Len(t, db.Blocks(), 3) // Check that the ooo chunks were removed. - ms, created, err := db.head.getOrCreate(series1.Hash(), series1) + ms, created, err := db.head.getOrCreate(series1.Hash(), series1, false) require.NoError(t, err) require.False(t, created) require.Nil(t, ms.ooo) diff --git a/tsdb/head.go b/tsdb/head.go index 3835970ca3..7763d272b7 100644 --- a/tsdb/head.go +++ b/tsdb/head.go @@ -93,7 +93,8 @@ type Head struct { bytesPool zeropool.Pool[[]byte] memChunkPool sync.Pool - // These pools are used during WAL/WBL replay. + // These pools are only used during WAL/WBL replay and are reset at the end. + // NOTE: Adjust resetWLReplayResources() upon changes to the pools. wlReplaySeriesPool zeropool.Pool[[]record.RefSeries] wlReplaySamplesPool zeropool.Pool[[]record.RefSample] wlReplaytStonesPool zeropool.Pool[[]tombstones.Stone] @@ -345,6 +346,17 @@ func (h *Head) resetInMemoryState() error { return nil } +func (h *Head) resetWLReplayResources() { + h.wlReplaySeriesPool = zeropool.Pool[[]record.RefSeries]{} + h.wlReplaySamplesPool = zeropool.Pool[[]record.RefSample]{} + h.wlReplaytStonesPool = zeropool.Pool[[]tombstones.Stone]{} + h.wlReplayExemplarsPool = zeropool.Pool[[]record.RefExemplar]{} + h.wlReplayHistogramsPool = zeropool.Pool[[]record.RefHistogramSample]{} + h.wlReplayFloatHistogramsPool = zeropool.Pool[[]record.RefFloatHistogramSample]{} + h.wlReplayMetadataPool = zeropool.Pool[[]record.RefMetadata]{} + h.wlReplayMmapMarkersPool = zeropool.Pool[[]record.RefMmapMarker]{} +} + type headMetrics struct { activeAppenders prometheus.Gauge series prometheus.GaugeFunc @@ -629,6 +641,7 @@ const cardinalityCacheExpirationTime = time.Duration(30) * time.Second // limits the ingested samples to the head min valid time. func (h *Head) Init(minValidTime int64) error { h.minValidTime.Store(minValidTime) + defer h.resetWLReplayResources() defer func() { h.postings.EnsureOrder(h.opts.WALReplayConcurrency) }() @@ -1721,7 +1734,7 @@ func (h *Head) String() string { return "head" } -func (h *Head) getOrCreate(hash uint64, lset labels.Labels) (*memSeries, bool, error) { +func (h *Head) getOrCreate(hash uint64, lset labels.Labels, pendingCommit bool) (*memSeries, bool, error) { // Just using `getOrCreateWithID` below would be semantically sufficient, but we'd create // a new series on every sample inserted via Add(), which causes allocations // and makes our series IDs rather random and harder to compress in postings. @@ -1733,17 +1746,17 @@ func (h *Head) getOrCreate(hash uint64, lset labels.Labels) (*memSeries, bool, e // Optimistically assume that we are the first one to create the series. id := chunks.HeadSeriesRef(h.lastSeriesID.Inc()) - return h.getOrCreateWithID(id, hash, lset) + return h.getOrCreateWithID(id, hash, lset, pendingCommit) } -func (h *Head) getOrCreateWithID(id chunks.HeadSeriesRef, hash uint64, lset labels.Labels) (*memSeries, bool, error) { +func (h *Head) getOrCreateWithID(id chunks.HeadSeriesRef, hash uint64, lset labels.Labels, pendingCommit bool) (*memSeries, bool, error) { s, created, err := h.series.getOrSet(hash, lset, func() *memSeries { shardHash := uint64(0) if h.opts.EnableSharding { shardHash = labels.StableHash(lset) } - return newMemSeries(lset, id, shardHash, h.opts.IsolationDisabled) + return newMemSeries(lset, id, shardHash, h.opts.IsolationDisabled, pendingCommit) }) if err != nil { return nil, false, err @@ -2184,12 +2197,13 @@ type memSeriesOOOFields struct { firstOOOChunkID chunks.HeadChunkID // HeadOOOChunkID for oooMmappedChunks[0]. } -func newMemSeries(lset labels.Labels, id chunks.HeadSeriesRef, shardHash uint64, isolationDisabled bool) *memSeries { +func newMemSeries(lset labels.Labels, id chunks.HeadSeriesRef, shardHash uint64, isolationDisabled, pendingCommit bool) *memSeries { s := &memSeries{ - lset: lset, - ref: id, - nextAt: math.MinInt64, - shardHash: shardHash, + lset: lset, + ref: id, + nextAt: math.MinInt64, + shardHash: shardHash, + pendingCommit: pendingCommit, } if !isolationDisabled { s.txs = newTxRing(0) diff --git a/tsdb/head_append.go b/tsdb/head_append.go index 322ff65022..03800b2455 100644 --- a/tsdb/head_append.go +++ b/tsdb/head_append.go @@ -319,7 +319,8 @@ type headAppender struct { headMaxt int64 // We track it here to not take the lock for every sample appended. oooTimeWindow int64 // Use the same for the entire append, and don't load the atomic for each sample. - series []record.RefSeries // New series held by this appender. + seriesRefs []record.RefSeries // New series records held by this appender. + series []*memSeries // New series held by this appender (using corresponding slices indexes from seriesRefs) samples []record.RefSample // New float samples held by this appender. sampleSeries []*memSeries // Float series corresponding to the samples held by this appender (using corresponding slice indices - same series may appear more than once). histograms []record.RefHistogramSample // New histogram samples held by this appender. @@ -461,15 +462,16 @@ func (a *headAppender) getOrCreate(lset labels.Labels) (s *memSeries, created bo if l, dup := lset.HasDuplicateLabelNames(); dup { return nil, false, fmt.Errorf(`label name "%s" is not unique: %w`, l, ErrInvalidSample) } - s, created, err = a.head.getOrCreate(lset.Hash(), lset) + s, created, err = a.head.getOrCreate(lset.Hash(), lset, true) if err != nil { return nil, false, err } if created { - a.series = append(a.series, record.RefSeries{ + a.seriesRefs = append(a.seriesRefs, record.RefSeries{ Ref: s.ref, Labels: lset, }) + a.series = append(a.series, s) } return s, created, nil } @@ -907,8 +909,8 @@ func (a *headAppender) log() error { var rec []byte var enc record.Encoder - if len(a.series) > 0 { - rec = enc.Series(a.series, buf) + if len(a.seriesRefs) > 0 { + rec = enc.Series(a.seriesRefs, buf) buf = rec[:0] if err := a.head.wal.Log(rec); err != nil { @@ -1426,6 +1428,14 @@ func (a *headAppender) commitMetadata() { } } +func (a *headAppender) unmarkCreatedSeriesAsPendingCommit() { + for _, s := range a.series { + s.Lock() + s.pendingCommit = false + s.Unlock() + } +} + // Commit writes to the WAL and adds the data to the Head. // TODO(codesome): Refactor this method to reduce indentation and make it more readable. func (a *headAppender) Commit() (err error) { @@ -1479,6 +1489,8 @@ func (a *headAppender) Commit() (err error) { a.commitHistograms(acc) a.commitFloatHistograms(acc) a.commitMetadata() + // Unmark all series as pending commit after all samples have been committed. + a.unmarkCreatedSeriesAsPendingCommit() a.head.metrics.outOfOrderSamples.WithLabelValues(sampleMetricTypeFloat).Add(float64(acc.floatOOORejected)) a.head.metrics.outOfOrderSamples.WithLabelValues(sampleMetricTypeHistogram).Add(float64(acc.histoOOORejected)) @@ -1952,6 +1964,7 @@ func (a *headAppender) Rollback() (err error) { defer a.head.metrics.activeAppenders.Dec() defer a.head.iso.closeAppend(a.appendID) defer a.head.putSeriesBuffer(a.sampleSeries) + defer a.unmarkCreatedSeriesAsPendingCommit() var series *memSeries for i := range a.samples { diff --git a/tsdb/head_bench_test.go b/tsdb/head_bench_test.go index 0ffc75abaf..ff9b22ec60 100644 --- a/tsdb/head_bench_test.go +++ b/tsdb/head_bench_test.go @@ -43,7 +43,7 @@ func BenchmarkHeadStripeSeriesCreate(b *testing.B) { defer h.Close() for i := 0; i < b.N; i++ { - h.getOrCreate(uint64(i), labels.FromStrings("a", strconv.Itoa(i))) + h.getOrCreate(uint64(i), labels.FromStrings("a", strconv.Itoa(i)), false) } } @@ -62,7 +62,7 @@ func BenchmarkHeadStripeSeriesCreateParallel(b *testing.B) { b.RunParallel(func(pb *testing.PB) { for pb.Next() { i := count.Inc() - h.getOrCreate(uint64(i), labels.FromStrings("a", strconv.Itoa(int(i)))) + h.getOrCreate(uint64(i), labels.FromStrings("a", strconv.Itoa(int(i))), false) } }) } @@ -82,7 +82,7 @@ func BenchmarkHeadStripeSeriesCreate_PreCreationFailure(b *testing.B) { defer h.Close() for i := 0; i < b.N; i++ { - h.getOrCreate(uint64(i), labels.FromStrings("a", strconv.Itoa(i))) + h.getOrCreate(uint64(i), labels.FromStrings("a", strconv.Itoa(i)), false) } } diff --git a/tsdb/head_read_test.go b/tsdb/head_read_test.go index 6dd4c0ff55..ae506c1d8e 100644 --- a/tsdb/head_read_test.go +++ b/tsdb/head_read_test.go @@ -382,7 +382,7 @@ func TestMemSeries_chunk(t *testing.T) { require.NoError(t, chunkDiskMapper.Close()) }() - series := newMemSeries(labels.EmptyLabels(), 1, 0, true) + series := newMemSeries(labels.EmptyLabels(), 1, 0, true, false) if tc.setup != nil { tc.setup(t, series, chunkDiskMapper) diff --git a/tsdb/head_test.go b/tsdb/head_test.go index 100d5b1265..dcf9c9c9aa 100644 --- a/tsdb/head_test.go +++ b/tsdb/head_test.go @@ -102,7 +102,7 @@ func BenchmarkCreateSeries(b *testing.B) { b.ResetTimer() for _, s := range series { - h.getOrCreate(s.Labels().Hash(), s.Labels()) + h.getOrCreate(s.Labels().Hash(), s.Labels(), false) } } @@ -149,24 +149,29 @@ func BenchmarkHeadAppender_Append_Commit_ExistingSeries(b *testing.B) { } } -func populateTestWL(t testing.TB, w *wlog.WL, recs []interface{}) { +func populateTestWL(t testing.TB, w *wlog.WL, recs []interface{}, buf []byte) []byte { var enc record.Encoder for _, r := range recs { + buf = buf[:0] switch v := r.(type) { case []record.RefSeries: - require.NoError(t, w.Log(enc.Series(v, nil))) + buf = enc.Series(v, buf) case []record.RefSample: - require.NoError(t, w.Log(enc.Samples(v, nil))) + buf = enc.Samples(v, buf) case []tombstones.Stone: - require.NoError(t, w.Log(enc.Tombstones(v, nil))) + buf = enc.Tombstones(v, buf) case []record.RefExemplar: - require.NoError(t, w.Log(enc.Exemplars(v, nil))) + buf = enc.Exemplars(v, buf) case []record.RefMmapMarker: - require.NoError(t, w.Log(enc.MmapMarkers(v, nil))) + buf = enc.MmapMarkers(v, buf) case []record.RefMetadata: - require.NoError(t, w.Log(enc.Metadata(v, nil))) + buf = enc.Metadata(v, buf) + default: + continue } + require.NoError(t, w.Log(buf)) } + return buf } func readTestWAL(t testing.TB, dir string) (recs []interface{}) { @@ -309,15 +314,16 @@ func BenchmarkLoadWLs(b *testing.B) { // Write series. refSeries := make([]record.RefSeries, 0, c.seriesPerBatch) + var buf []byte + builder := labels.NewBuilder(labels.EmptyLabels()) + for j := 1; j < labelsPerSeries; j++ { + builder.Set(defaultLabelName+strconv.Itoa(j), defaultLabelValue+strconv.Itoa(j)) + } for k := 0; k < c.batches; k++ { refSeries = refSeries[:0] for i := k * c.seriesPerBatch; i < (k+1)*c.seriesPerBatch; i++ { - lbls := make(map[string]string, labelsPerSeries) - lbls[defaultLabelName] = strconv.Itoa(i) - for j := 1; len(lbls) < labelsPerSeries; j++ { - lbls[defaultLabelName+strconv.Itoa(j)] = defaultLabelValue + strconv.Itoa(j) - } - refSeries = append(refSeries, record.RefSeries{Ref: chunks.HeadSeriesRef(i) * 101, Labels: labels.FromMap(lbls)}) + builder.Set(defaultLabelName, strconv.Itoa(i)) + refSeries = append(refSeries, record.RefSeries{Ref: chunks.HeadSeriesRef(i) * 101, Labels: builder.Labels()}) } writeSeries := refSeries @@ -333,7 +339,7 @@ func BenchmarkLoadWLs(b *testing.B) { writeSeries = newWriteSeries } - populateTestWL(b, wal, []interface{}{writeSeries}) + buf = populateTestWL(b, wal, []interface{}{writeSeries}, buf) } // Write samples. @@ -359,7 +365,7 @@ func BenchmarkLoadWLs(b *testing.B) { V: float64(i) * 100, }) } - populateTestWL(b, wal, []interface{}{refSamples}) + buf = populateTestWL(b, wal, []interface{}{refSamples}, buf) } } @@ -374,7 +380,7 @@ func BenchmarkLoadWLs(b *testing.B) { } for k := 0; k < c.batches*c.seriesPerBatch; k++ { // Create one mmapped chunk per series, with one sample at the given time. - s := newMemSeries(labels.Labels{}, chunks.HeadSeriesRef(k)*101, 0, defaultIsolationDisabled) + s := newMemSeries(labels.Labels{}, chunks.HeadSeriesRef(k)*101, 0, defaultIsolationDisabled, false) s.append(c.mmappedChunkT, 42, 0, cOpts) // There's only one head chunk because only a single sample is appended. mmapChunks() // ignores the latest chunk, so we need to cut a new head chunk to guarantee the chunk with @@ -398,7 +404,7 @@ func BenchmarkLoadWLs(b *testing.B) { Labels: labels.FromStrings("trace_id", fmt.Sprintf("trace-%d", i)), }) } - populateTestWL(b, wal, []interface{}{refExemplars}) + buf = populateTestWL(b, wal, []interface{}{refExemplars}, buf) } } @@ -427,10 +433,10 @@ func BenchmarkLoadWLs(b *testing.B) { }) } if shouldAddMarkers { - populateTestWL(b, wbl, []interface{}{refMarkers}) + populateTestWL(b, wbl, []interface{}{refMarkers}, buf) } - populateTestWL(b, wal, []interface{}{refSamples}) - populateTestWL(b, wbl, []interface{}{refSamples}) + buf = populateTestWL(b, wal, []interface{}{refSamples}, buf) + buf = populateTestWL(b, wbl, []interface{}{refSamples}, buf) } } @@ -739,7 +745,7 @@ func TestHead_ReadWAL(t *testing.T) { require.NoError(t, head.Close()) }() - populateTestWL(t, w, entries) + populateTestWL(t, w, entries, nil) require.NoError(t, head.Init(math.MinInt64)) require.Equal(t, uint64(101), head.lastSeriesID.Load()) @@ -895,7 +901,7 @@ func TestHead_KeepSeriesInWALCheckpoint(t *testing.T) { { name: "keep series still in the head", prepare: func(t *testing.T, h *Head) { - _, _, err := h.getOrCreateWithID(chunks.HeadSeriesRef(existingRef), existingLbls.Hash(), existingLbls) + _, _, err := h.getOrCreateWithID(chunks.HeadSeriesRef(existingRef), existingLbls.Hash(), existingLbls, false) require.NoError(t, err) }, seriesRef: chunks.HeadSeriesRef(existingRef), @@ -971,6 +977,86 @@ func TestHead_ActiveAppenders(t *testing.T) { require.Equal(t, 0.0, prom_testutil.ToFloat64(head.metrics.activeAppenders)) } +func TestHead_RaceBetweenSeriesCreationAndGC(t *testing.T) { + head, _ := newTestHead(t, 1000, compression.None, false) + t.Cleanup(func() { _ = head.Close() }) + require.NoError(t, head.Init(0)) + + const totalSeries = 100_000 + series := make([]labels.Labels, totalSeries) + for i := 0; i < totalSeries; i++ { + series[i] = labels.FromStrings("foo", strconv.Itoa(i)) + } + done := atomic.NewBool(false) + + go func() { + defer done.Store(true) + app := head.Appender(context.Background()) + defer func() { + if err := app.Commit(); err != nil { + t.Errorf("Failed to commit: %v", err) + } + }() + for i := 0; i < totalSeries; i++ { + _, err := app.Append(0, series[i], 100, 1) + if err != nil { + t.Errorf("Failed to append: %v", err) + return + } + } + }() + + // Don't check the atomic.Bool on all iterations in order to perform more gc iterations and make the race condition more likely. + for i := 1; i%128 != 0 || !done.Load(); i++ { + head.gc() + } + + require.Equal(t, totalSeries, int(head.NumSeries())) +} + +func TestHead_CanGarbagecollectSeriesCreatedWithoutSamples(t *testing.T) { + for op, finishTxn := range map[string]func(app storage.Appender) error{ + "after commit": func(app storage.Appender) error { return app.Commit() }, + "after rollback": func(app storage.Appender) error { return app.Rollback() }, + } { + t.Run(op, func(t *testing.T) { + chunkRange := time.Hour.Milliseconds() + head, _ := newTestHead(t, chunkRange, compression.None, true) + t.Cleanup(func() { _ = head.Close() }) + + require.NoError(t, head.Init(0)) + + firstSampleTime := 10 * chunkRange + { + // Append first sample, it should init head max time to firstSampleTime. + app := head.Appender(context.Background()) + _, err := app.Append(0, labels.FromStrings("lbl", "ok"), firstSampleTime, 1) + require.NoError(t, err) + require.NoError(t, app.Commit()) + require.Equal(t, 1, int(head.NumSeries())) + } + + // Append a sample in a time range that is not covered by the chunk range, + // We would create series first and then append no sample. + app := head.Appender(context.Background()) + invalidSampleTime := firstSampleTime - chunkRange + _, err := app.Append(0, labels.FromStrings("foo", "bar"), invalidSampleTime, 2) + require.Error(t, err) + // These are our assumptions: we're not testing them, we're just checking them to make debugging a failed + // test easier if someone refactors the code and breaks these assumptions. + // If these assumptions fail after a refactor, feel free to remove them but make sure that the test is still what we intended to test. + require.NotErrorIs(t, err, storage.ErrOutOfBounds, "Failed to append sample shouldn't take the shortcut that returns storage.ErrOutOfBounds") + require.ErrorIs(t, err, storage.ErrTooOldSample, "Failed to append sample should return storage.ErrTooOldSample, because OOO window was enabled but this sample doesn't fall into it.") + // Do commit or rollback, depending on what we're testing. + require.NoError(t, finishTxn(app)) + + // Garbage-collect, since we finished the transaction and series has no samples, it should be collectable. + head.gc() + require.Equal(t, 1, int(head.NumSeries())) + }) + } +} + func TestHead_UnknownWALRecord(t *testing.T) { head, w := newTestHead(t, 1000, compression.None, false) w.Log([]byte{255, 42}) @@ -1025,7 +1111,7 @@ func BenchmarkHead_Truncate(b *testing.B) { } allSeries[i] = labels.FromStrings(append(nameValues, "first", "a", "second", "a", "third", "a")...) - s, _, _ := h.getOrCreate(allSeries[i].Hash(), allSeries[i]) + s, _, _ := h.getOrCreate(allSeries[i].Hash(), allSeries[i], false) s.mmappedChunks = []*mmappedChunk{ {minTime: 1000 * int64(i/churn), maxTime: 999 + 1000*int64(i/churn)}, } @@ -1062,10 +1148,10 @@ func TestHead_Truncate(t *testing.T) { ctx := context.Background() - s1, _, _ := h.getOrCreate(1, labels.FromStrings("a", "1", "b", "1")) - s2, _, _ := h.getOrCreate(2, labels.FromStrings("a", "2", "b", "1")) - s3, _, _ := h.getOrCreate(3, labels.FromStrings("a", "1", "b", "2")) - s4, _, _ := h.getOrCreate(4, labels.FromStrings("a", "2", "b", "2", "c", "1")) + s1, _, _ := h.getOrCreate(1, labels.FromStrings("a", "1", "b", "1"), false) + s2, _, _ := h.getOrCreate(2, labels.FromStrings("a", "2", "b", "1"), false) + s3, _, _ := h.getOrCreate(3, labels.FromStrings("a", "1", "b", "2"), false) + s4, _, _ := h.getOrCreate(4, labels.FromStrings("a", "2", "b", "2", "c", "1"), false) s1.mmappedChunks = []*mmappedChunk{ {minTime: 0, maxTime: 999}, @@ -1162,7 +1248,7 @@ func TestMemSeries_truncateChunks(t *testing.T) { }, } - s := newMemSeries(labels.FromStrings("a", "b"), 1, 0, defaultIsolationDisabled) + s := newMemSeries(labels.FromStrings("a", "b"), 1, 0, defaultIsolationDisabled, false) for i := 0; i < 4000; i += 5 { ok, _ := s.append(int64(i), float64(i), 0, cOpts) @@ -1303,7 +1389,7 @@ func TestMemSeries_truncateChunks_scenarios(t *testing.T) { require.NoError(t, chunkDiskMapper.Close()) }() - series := newMemSeries(labels.EmptyLabels(), 1, 0, true) + series := newMemSeries(labels.EmptyLabels(), 1, 0, true, false) cOpts := chunkOpts{ chunkDiskMapper: chunkDiskMapper, @@ -1383,7 +1469,7 @@ func TestHeadDeleteSeriesWithoutSamples(t *testing.T) { require.NoError(t, head.Close()) }() - populateTestWL(t, w, entries) + populateTestWL(t, w, entries, nil) require.NoError(t, head.Init(math.MinInt64)) @@ -1877,7 +1963,7 @@ func TestMemSeries_append(t *testing.T) { samplesPerChunk: DefaultSamplesPerChunk, } - s := newMemSeries(labels.Labels{}, 1, 0, defaultIsolationDisabled) + s := newMemSeries(labels.Labels{}, 1, 0, defaultIsolationDisabled, false) // Add first two samples at the very end of a chunk range and the next two // on and after it. @@ -1938,7 +2024,7 @@ func TestMemSeries_appendHistogram(t *testing.T) { samplesPerChunk: DefaultSamplesPerChunk, } - s := newMemSeries(labels.Labels{}, 1, 0, defaultIsolationDisabled) + s := newMemSeries(labels.Labels{}, 1, 0, defaultIsolationDisabled, false) histograms := tsdbutil.GenerateTestHistograms(4) histogramWithOneMoreBucket := histograms[3].Copy() @@ -2000,7 +2086,7 @@ func TestMemSeries_append_atVariableRate(t *testing.T) { samplesPerChunk: samplesPerChunk, } - s := newMemSeries(labels.Labels{}, 1, 0, defaultIsolationDisabled) + s := newMemSeries(labels.Labels{}, 1, 0, defaultIsolationDisabled, false) // At this slow rate, we will fill the chunk in two block durations. slowRate := (DefaultBlockDuration * 2) / samplesPerChunk @@ -2051,7 +2137,7 @@ func TestGCChunkAccess(t *testing.T) { h.initTime(0) - s, _, _ := h.getOrCreate(1, labels.FromStrings("a", "1")) + s, _, _ := h.getOrCreate(1, labels.FromStrings("a", "1"), false) // Appending 2 samples for the first chunk. ok, chunkCreated := s.append(0, 0, 0, cOpts) @@ -2110,7 +2196,7 @@ func TestGCSeriesAccess(t *testing.T) { h.initTime(0) - s, _, _ := h.getOrCreate(1, labels.FromStrings("a", "1")) + s, _, _ := h.getOrCreate(1, labels.FromStrings("a", "1"), false) // Appending 2 samples for the first chunk. ok, chunkCreated := s.append(0, 0, 0, cOpts) @@ -2463,7 +2549,7 @@ func TestHeadReadWriterRepair(t *testing.T) { samplesPerChunk: DefaultSamplesPerChunk, } - s, created, _ := h.getOrCreate(1, labels.FromStrings("a", "1")) + s, created, _ := h.getOrCreate(1, labels.FromStrings("a", "1"), false) require.True(t, created, "series was not created") for i := 0; i < 7; i++ { @@ -2823,7 +2909,7 @@ func TestIsolationAppendIDZeroIsNoop(t *testing.T) { samplesPerChunk: DefaultSamplesPerChunk, } - s, _, _ := h.getOrCreate(1, labels.FromStrings("a", "1")) + s, _, _ := h.getOrCreate(1, labels.FromStrings("a", "1"), false) ok, _ := s.append(0, 0, 0, cOpts) require.True(t, ok, "Series append failed.") @@ -3406,7 +3492,7 @@ func TestIteratorSeekIntoBuffer(t *testing.T) { samplesPerChunk: DefaultSamplesPerChunk, } - s := newMemSeries(labels.Labels{}, 1, 0, defaultIsolationDisabled) + s := newMemSeries(labels.Labels{}, 1, 0, defaultIsolationDisabled, false) for i := 0; i < 7; i++ { ok, _ := s.append(int64(i), float64(i), 0, cOpts) @@ -4716,7 +4802,7 @@ func TestHistogramCounterResetHeader(t *testing.T) { checkExpCounterResetHeader := func(newHeaders ...chunkenc.CounterResetHeader) { expHeaders = append(expHeaders, newHeaders...) - ms, _, err := head.getOrCreate(l.Hash(), l) + ms, _, err := head.getOrCreate(l.Hash(), l, false) require.NoError(t, err) ms.mmapChunks(head.chunkDiskMapper) require.Len(t, ms.mmappedChunks, len(expHeaders)-1) // One is the head chunk. @@ -4843,7 +4929,7 @@ func TestOOOHistogramCounterResetHeaders(t *testing.T) { checkOOOExpCounterResetHeader := func(newChunks ...expOOOMmappedChunks) { expChunks = append(expChunks, newChunks...) - ms, _, err := head.getOrCreate(l.Hash(), l) + ms, _, err := head.getOrCreate(l.Hash(), l, false) require.NoError(t, err) require.Len(t, ms.ooo.oooMmappedChunks, len(expChunks)) @@ -4986,7 +5072,7 @@ func TestAppendingDifferentEncodingToSameSeries(t *testing.T) { var expResult []chunks.Sample checkExpChunks := func(count int) { - ms, created, err := db.Head().getOrCreate(lbls.Hash(), lbls) + ms, created, err := db.Head().getOrCreate(lbls.Hash(), lbls, false) require.NoError(t, err) require.False(t, created) require.NotNil(t, ms) @@ -5290,7 +5376,7 @@ func testWBLReplay(t *testing.T, scenario sampleTypeScenario) { require.NoError(t, h.Init(0)) // Replay happens here. // Get the ooo samples from the Head. - ms, ok, err := h.getOrCreate(l.Hash(), l) + ms, ok, err := h.getOrCreate(l.Hash(), l, false) require.NoError(t, err) require.False(t, ok) require.NotNil(t, ms) @@ -5359,7 +5445,7 @@ func testOOOMmapReplay(t *testing.T, scenario sampleTypeScenario) { appendSample(mins) } - ms, ok, err := h.getOrCreate(l.Hash(), l) + ms, ok, err := h.getOrCreate(l.Hash(), l, false) require.NoError(t, err) require.False(t, ok) require.NotNil(t, ms) @@ -5387,7 +5473,7 @@ func testOOOMmapReplay(t *testing.T, scenario sampleTypeScenario) { require.NoError(t, h.Init(0)) // Replay happens here. // Get the mmap chunks from the Head. - ms, ok, err = h.getOrCreate(l.Hash(), l) + ms, ok, err = h.getOrCreate(l.Hash(), l, false) require.NoError(t, err) require.False(t, ok) require.NotNil(t, ms) @@ -5442,7 +5528,7 @@ func TestHeadInit_DiscardChunksWithUnsupportedEncoding(t *testing.T) { require.NoError(t, app.Commit()) require.Greater(t, prom_testutil.ToFloat64(h.metrics.chunksCreated), 4.0) - series, created, err := h.getOrCreate(seriesLabels.Hash(), seriesLabels) + series, created, err := h.getOrCreate(seriesLabels.Hash(), seriesLabels, false) require.NoError(t, err) require.False(t, created, "should already exist") require.NotNil(t, series, "should return the series we created above") @@ -5459,7 +5545,7 @@ func TestHeadInit_DiscardChunksWithUnsupportedEncoding(t *testing.T) { require.NoError(t, err) require.NoError(t, h.Init(0)) - series, created, err = h.getOrCreate(seriesLabels.Hash(), seriesLabels) + series, created, err = h.getOrCreate(seriesLabels.Hash(), seriesLabels, false) require.NoError(t, err) require.False(t, created, "should already exist") require.NotNil(t, series, "should return the series we created above") @@ -5656,7 +5742,7 @@ func testOOOAppendWithNoSeries(t *testing.T, appendFunc func(appender storage.Ap } verifyOOOSamples := func(lbls labels.Labels, expSamples int) { - ms, created, err := h.getOrCreate(lbls.Hash(), lbls) + ms, created, err := h.getOrCreate(lbls.Hash(), lbls, false) require.NoError(t, err) require.False(t, created) require.NotNil(t, ms) @@ -5667,7 +5753,7 @@ func testOOOAppendWithNoSeries(t *testing.T, appendFunc func(appender storage.Ap } verifyInOrderSamples := func(lbls labels.Labels, expSamples int) { - ms, created, err := h.getOrCreate(lbls.Hash(), lbls) + ms, created, err := h.getOrCreate(lbls.Hash(), lbls, false) require.NoError(t, err) require.False(t, created) require.NotNil(t, ms) @@ -5795,7 +5881,7 @@ func TestGaugeHistogramWALAndChunkHeader(t *testing.T) { checkHeaders := func() { head.mmapHeadChunks() - ms, _, err := head.getOrCreate(l.Hash(), l) + ms, _, err := head.getOrCreate(l.Hash(), l, false) require.NoError(t, err) require.Len(t, ms.mmappedChunks, 3) expHeaders := []chunkenc.CounterResetHeader{ @@ -5870,7 +5956,7 @@ func TestGaugeFloatHistogramWALAndChunkHeader(t *testing.T) { appendHistogram(hists[4]) checkHeaders := func() { - ms, _, err := head.getOrCreate(l.Hash(), l) + ms, _, err := head.getOrCreate(l.Hash(), l, false) require.NoError(t, err) head.mmapHeadChunks() require.Len(t, ms.mmappedChunks, 3) diff --git a/tsdb/head_wal.go b/tsdb/head_wal.go index 3450eb9b87..926af84603 100644 --- a/tsdb/head_wal.go +++ b/tsdb/head_wal.go @@ -254,7 +254,7 @@ Outer: switch v := d.(type) { case []record.RefSeries: for _, walSeries := range v { - mSeries, created, err := h.getOrCreateWithID(walSeries.Ref, walSeries.Labels.Hash(), walSeries.Labels) + mSeries, created, err := h.getOrCreateWithID(walSeries.Ref, walSeries.Labels.Hash(), walSeries.Labels, false) if err != nil { seriesCreationErr = err break Outer @@ -1577,7 +1577,7 @@ func (h *Head) loadChunkSnapshot() (int, int, map[chunks.HeadSeriesRef]*memSerie localRefSeries := shardedRefSeries[idx] for csr := range rc { - series, _, err := h.getOrCreateWithID(csr.ref, csr.lset.Hash(), csr.lset) + series, _, err := h.getOrCreateWithID(csr.ref, csr.lset.Hash(), csr.lset, false) if err != nil { errChan <- err return diff --git a/tsdb/ooo_head_read_test.go b/tsdb/ooo_head_read_test.go index d2c490f221..d49c7d8fc3 100644 --- a/tsdb/ooo_head_read_test.go +++ b/tsdb/ooo_head_read_test.go @@ -306,7 +306,7 @@ func TestOOOHeadIndexReader_Series(t *testing.T) { }() require.NoError(t, h.Init(0)) - s1, _, _ := h.getOrCreate(s1ID, s1Lset) + s1, _, _ := h.getOrCreate(s1ID, s1Lset, false) s1.ooo = &memSeriesOOOFields{} // define our expected chunks, by looking at the expected ChunkIntervals and setting... diff --git a/web/api/v1/api.go b/web/api/v1/api.go index 82aff1c940..c924c9092c 100644 --- a/web/api/v1/api.go +++ b/web/api/v1/api.go @@ -262,7 +262,7 @@ func NewAPI( statsRenderer StatsRenderer, rwEnabled bool, acceptRemoteWriteProtoMsgs []config.RemoteWriteProtoMsg, - otlpEnabled, otlpDeltaToCumulative bool, + otlpEnabled, otlpDeltaToCumulative, otlpNativeDeltaIngestion bool, ctZeroIngestionEnabled bool, ) *API { a := &API{ @@ -310,7 +310,7 @@ func NewAPI( a.remoteWriteHandler = remote.NewWriteHandler(logger, registerer, ap, acceptRemoteWriteProtoMsgs, ctZeroIngestionEnabled) } if otlpEnabled { - a.otlpWriteHandler = remote.NewOTLPWriteHandler(logger, registerer, ap, configFunc, remote.OTLPOptions{ConvertDelta: otlpDeltaToCumulative}) + a.otlpWriteHandler = remote.NewOTLPWriteHandler(logger, registerer, ap, configFunc, remote.OTLPOptions{ConvertDelta: otlpDeltaToCumulative, NativeDelta: otlpNativeDeltaIngestion}) } return a diff --git a/web/api/v1/errors_test.go b/web/api/v1/errors_test.go index eb929c33ce..bb70792583 100644 --- a/web/api/v1/errors_test.go +++ b/web/api/v1/errors_test.go @@ -144,6 +144,7 @@ func createPrometheusAPI(t *testing.T, q storage.SampleAndChunkQueryable) *route false, false, false, + false, ) promRouter := route.New().WithPrefix("/api/v1") diff --git a/web/ui/mantine-ui/package.json b/web/ui/mantine-ui/package.json index 2a6b84855f..15ac1fa5ed 100644 --- a/web/ui/mantine-ui/package.json +++ b/web/ui/mantine-ui/package.json @@ -1,7 +1,7 @@ { "name": "@prometheus-io/mantine-ui", "private": true, - "version": "0.302.1", + "version": "0.303.0", "type": "module", "scripts": { "start": "vite", @@ -12,62 +12,62 @@ "test": "vitest" }, "dependencies": { - "@codemirror/autocomplete": "^6.18.4", - "@codemirror/language": "^6.10.8", - "@codemirror/lint": "^6.8.4", + "@codemirror/autocomplete": "^6.18.6", + "@codemirror/language": "^6.11.0", + "@codemirror/lint": "^6.8.5", "@codemirror/state": "^6.5.2", - "@codemirror/view": "^6.36.4", - "@floating-ui/dom": "^1.6.12", + "@codemirror/view": "^6.36.6", + "@floating-ui/dom": "^1.6.13", "@lezer/common": "^1.2.3", "@lezer/highlight": "^1.2.1", - "@mantine/code-highlight": "^7.17.2", - "@mantine/core": "^7.17.2", - "@mantine/dates": "^7.17.2", - "@mantine/hooks": "^7.17.2", - "@mantine/notifications": "^7.17.2", + "@mantine/code-highlight": "^7.17.5", + "@mantine/core": "^7.17.5", + "@mantine/dates": "^7.17.5", + "@mantine/hooks": "^7.17.5", + "@mantine/notifications": "^7.17.5", "@microsoft/fetch-event-source": "^2.0.1", "@nexucis/fuzzy": "^0.5.1", "@nexucis/kvsearch": "^0.9.1", - "@prometheus-io/codemirror-promql": "0.302.1", - "@reduxjs/toolkit": "^2.6.1", + "@prometheus-io/codemirror-promql": "0.303.0", + "@reduxjs/toolkit": "^2.7.0", "@tabler/icons-react": "^3.31.0", - "@tanstack/react-query": "^5.67.1", + "@tanstack/react-query": "^5.74.7", "@testing-library/jest-dom": "^6.6.3", - "@testing-library/react": "^16.2.0", + "@testing-library/react": "^16.3.0", "@types/lodash": "^4.17.16", - "@types/sanitize-html": "^2.13.0", - "@uiw/react-codemirror": "^4.23.10", + "@types/sanitize-html": "^2.15.0", + "@uiw/react-codemirror": "^4.23.11", "clsx": "^2.1.1", - "dayjs": "^1.11.10", + "dayjs": "^1.11.13", "lodash": "^4.17.21", - "react": "^19.0.0", - "react-dom": "^19.0.0", + "react": "^19.1.0", + "react-dom": "^19.1.0", "react-infinite-scroll-component": "^6.1.0", "react-redux": "^9.2.0", - "react-router-dom": "^7.4.0", - "sanitize-html": "^2.15.0", + "react-router-dom": "^7.5.2", + "sanitize-html": "^2.16.0", "uplot": "^1.6.32", "uplot-react": "^1.2.2", "use-query-params": "^2.2.1" }, "devDependencies": { - "@eslint/compat": "^1.2.4", + "@eslint/compat": "^1.2.8", "@eslint/eslintrc": "^3.3.1", - "@eslint/js": "^9.21.0", - "@types/react": "^19.0.12", - "@types/react-dom": "^19.0.4", - "@typescript-eslint/eslint-plugin": "^8.20.0", - "@typescript-eslint/parser": "^8.25.0", - "@vitejs/plugin-react": "^4.3.4", - "eslint": "^9.23.0", - "eslint-plugin-react-hooks": "^5.1.0", - "eslint-plugin-react-refresh": "^0.4.19", + "@eslint/js": "^9.25.1", + "@types/react": "^19.1.2", + "@types/react-dom": "^19.1.2", + "@typescript-eslint/eslint-plugin": "^8.31.0", + "@typescript-eslint/parser": "^8.31.0", + "@vitejs/plugin-react": "^4.4.1", + "eslint": "^9.25.1", + "eslint-plugin-react-hooks": "^5.2.0", + "eslint-plugin-react-refresh": "^0.4.20", "globals": "^16.0.0", "jsdom": "^25.0.1", - "postcss": "^8.4.47", + "postcss": "^8.5.3", "postcss-preset-mantine": "^1.17.0", "postcss-simple-vars": "^7.0.1", - "vite": "^6.2.4", - "vitest": "^3.0.8" + "vite": "^6.3.3", + "vitest": "^3.1.2" } } diff --git a/web/ui/mantine-ui/src/pages/AlertsPage.tsx b/web/ui/mantine-ui/src/pages/AlertsPage.tsx index afe3d3db90..a905850628 100644 --- a/web/ui/mantine-ui/src/pages/AlertsPage.tsx +++ b/web/ui/mantine-ui/src/pages/AlertsPage.tsx @@ -216,12 +216,12 @@ export default function AlertsPage() { // convenient to have in the same file IMO). const renderedPageItems = useMemo( () => - currentPageGroups.map((g, i) => ( + currentPageGroups.map((g) => ( diff --git a/web/ui/mantine-ui/src/pages/RulesPage.tsx b/web/ui/mantine-ui/src/pages/RulesPage.tsx index ef1e1d6243..054bea0939 100644 --- a/web/ui/mantine-ui/src/pages/RulesPage.tsx +++ b/web/ui/mantine-ui/src/pages/RulesPage.tsx @@ -86,12 +86,13 @@ export default function RulesPage() { (effectiveActivePage - 1) * ruleGroupsPerPage, effectiveActivePage * ruleGroupsPerPage ) - .map((g, i) => ( + .map((g) => ( diff --git a/web/ui/mantine-ui/src/promql/functionDocs.tsx b/web/ui/mantine-ui/src/promql/functionDocs.tsx index 45fcd03b7f..99ffdd74f5 100644 --- a/web/ui/mantine-ui/src/promql/functionDocs.tsx +++ b/web/ui/mantine-ui/src/promql/functionDocs.tsx @@ -1266,9 +1266,11 @@ const funcDocs: Record = {

+ histogram_stddev(v instant-vector) returns the estimated standard deviation of observations in a native - histogram, based on the geometric mean of the buckets where the observations lie. Samples that are not native - histograms are ignored and do not show up in the returned vector. + histogram. For this estimation, all observations in a bucket are assumed to have the value of the mean of the bucket boundaries. + For the zero bucket and for buckets with custom boundaries, the arithmetic mean is used. For the usual exponential buckets, + the geometric mean is used. Samples that are not native histograms are ignored and do not show up in the returned vector.

diff --git a/web/ui/module/codemirror-promql/package.json b/web/ui/module/codemirror-promql/package.json index 6ac0341e23..aa4749c10c 100644 --- a/web/ui/module/codemirror-promql/package.json +++ b/web/ui/module/codemirror-promql/package.json @@ -1,6 +1,6 @@ { "name": "@prometheus-io/codemirror-promql", - "version": "0.302.1", + "version": "0.303.0", "description": "a CodeMirror mode for the PromQL language", "types": "dist/esm/index.d.ts", "module": "dist/esm/index.js", @@ -29,21 +29,21 @@ }, "homepage": "https://github.com/prometheus/prometheus/blob/main/web/ui/module/codemirror-promql/README.md", "dependencies": { - "@prometheus-io/lezer-promql": "0.302.1", - "lru-cache": "^11.0.2" + "@prometheus-io/lezer-promql": "0.303.0", + "lru-cache": "^11.1.0" }, "devDependencies": { - "@codemirror/autocomplete": "^6.18.4", - "@codemirror/language": "^6.10.8", - "@codemirror/lint": "^6.8.4", + "@codemirror/autocomplete": "^6.18.6", + "@codemirror/language": "^6.11.0", + "@codemirror/lint": "^6.8.5", "@codemirror/state": "^6.5.2", - "@codemirror/view": "^6.36.4", + "@codemirror/view": "^6.36.6", "@lezer/common": "^1.2.3", - "@lezer/highlight": "^1.2.0", + "@lezer/highlight": "^1.2.1", "@lezer/lr": "^1.4.2", - "eslint-plugin-prettier": "^5.2.3", + "eslint-plugin-prettier": "^5.2.6", "isomorphic-fetch": "^3.0.0", - "nock": "^14.0.1" + "nock": "^14.0.4" }, "peerDependencies": { "@codemirror/autocomplete": "^6.4.0", diff --git a/web/ui/module/lezer-promql/package.json b/web/ui/module/lezer-promql/package.json index b4f1a793a6..de48707ab0 100644 --- a/web/ui/module/lezer-promql/package.json +++ b/web/ui/module/lezer-promql/package.json @@ -1,6 +1,6 @@ { "name": "@prometheus-io/lezer-promql", - "version": "0.302.1", + "version": "0.303.0", "description": "lezer-based PromQL grammar", "main": "dist/index.cjs", "type": "module", @@ -31,10 +31,10 @@ "test": "NODE_OPTIONS=--experimental-vm-modules jest" }, "devDependencies": { - "@lezer/generator": "^1.7.2", - "@lezer/highlight": "^1.2.0", + "@lezer/generator": "^1.7.3", + "@lezer/highlight": "^1.2.1", "@lezer/lr": "^1.4.2", - "@rollup/plugin-node-resolve": "^16.0.0" + "@rollup/plugin-node-resolve": "^16.0.1" }, "peerDependencies": { "@lezer/highlight": "^1.1.2", diff --git a/web/ui/package-lock.json b/web/ui/package-lock.json index 66bc43e8cd..c5d674ea5a 100644 --- a/web/ui/package-lock.json +++ b/web/ui/package-lock.json @@ -1,110 +1,212 @@ { "name": "prometheus-io", - "version": "0.302.1", + "version": "0.303.0", "lockfileVersion": 3, "requires": true, "packages": { "": { "name": "prometheus-io", - "version": "0.302.1", + "version": "0.303.0", "workspaces": [ "mantine-ui", "module/*" ], "devDependencies": { "@types/jest": "^29.5.14", - "@typescript-eslint/eslint-plugin": "^8.20.0", - "@typescript-eslint/parser": "^8.25.0", - "eslint-config-prettier": "^10.1.1", - "prettier": "^3.4.2", - "ts-jest": "^29.2.2", - "typescript": "^5.7.2", - "vite": "^6.2.4" + "@typescript-eslint/eslint-plugin": "^8.31.0", + "@typescript-eslint/parser": "^8.31.0", + "eslint-config-prettier": "^10.1.2", + "prettier": "^3.5.3", + "ts-jest": "^29.3.2", + "typescript": "^5.8.3", + "vite": "^6.3.3" } }, "mantine-ui": { "name": "@prometheus-io/mantine-ui", - "version": "0.302.1", + "version": "0.303.0", "dependencies": { - "@codemirror/autocomplete": "^6.18.4", - "@codemirror/language": "^6.10.8", - "@codemirror/lint": "^6.8.4", + "@codemirror/autocomplete": "^6.18.6", + "@codemirror/language": "^6.11.0", + "@codemirror/lint": "^6.8.5", "@codemirror/state": "^6.5.2", - "@codemirror/view": "^6.36.4", - "@floating-ui/dom": "^1.6.12", + "@codemirror/view": "^6.36.6", + "@floating-ui/dom": "^1.6.13", "@lezer/common": "^1.2.3", "@lezer/highlight": "^1.2.1", - "@mantine/code-highlight": "^7.17.2", - "@mantine/core": "^7.17.2", - "@mantine/dates": "^7.17.2", - "@mantine/hooks": "^7.17.2", - "@mantine/notifications": "^7.17.2", + "@mantine/code-highlight": "^7.17.5", + "@mantine/core": "^7.17.5", + "@mantine/dates": "^7.17.5", + "@mantine/hooks": "^7.17.5", + "@mantine/notifications": "^7.17.5", "@microsoft/fetch-event-source": "^2.0.1", "@nexucis/fuzzy": "^0.5.1", "@nexucis/kvsearch": "^0.9.1", - "@prometheus-io/codemirror-promql": "0.302.1", - "@reduxjs/toolkit": "^2.6.1", + "@prometheus-io/codemirror-promql": "0.303.0", + "@reduxjs/toolkit": "^2.7.0", "@tabler/icons-react": "^3.31.0", - "@tanstack/react-query": "^5.67.1", + "@tanstack/react-query": "^5.74.7", "@testing-library/jest-dom": "^6.6.3", - "@testing-library/react": "^16.2.0", + "@testing-library/react": "^16.3.0", "@types/lodash": "^4.17.16", - "@types/sanitize-html": "^2.13.0", - "@uiw/react-codemirror": "^4.23.10", + "@types/sanitize-html": "^2.15.0", + "@uiw/react-codemirror": "^4.23.11", "clsx": "^2.1.1", - "dayjs": "^1.11.10", + "dayjs": "^1.11.13", "lodash": "^4.17.21", - "react": "^19.0.0", - "react-dom": "^19.0.0", + "react": "^19.1.0", + "react-dom": "^19.1.0", "react-infinite-scroll-component": "^6.1.0", "react-redux": "^9.2.0", - "react-router-dom": "^7.4.0", - "sanitize-html": "^2.15.0", + "react-router-dom": "^7.5.2", + "sanitize-html": "^2.16.0", "uplot": "^1.6.32", "uplot-react": "^1.2.2", "use-query-params": "^2.2.1" }, "devDependencies": { - "@eslint/compat": "^1.2.4", + "@eslint/compat": "^1.2.8", "@eslint/eslintrc": "^3.3.1", - "@eslint/js": "^9.21.0", - "@types/react": "^19.0.12", - "@types/react-dom": "^19.0.4", - "@typescript-eslint/eslint-plugin": "^8.20.0", - "@typescript-eslint/parser": "^8.25.0", - "@vitejs/plugin-react": "^4.3.4", - "eslint": "^9.23.0", - "eslint-plugin-react-hooks": "^5.1.0", - "eslint-plugin-react-refresh": "^0.4.19", + "@eslint/js": "^9.25.1", + "@types/react": "^19.1.2", + "@types/react-dom": "^19.1.2", + "@typescript-eslint/eslint-plugin": "^8.31.0", + "@typescript-eslint/parser": "^8.31.0", + "@vitejs/plugin-react": "^4.4.1", + "eslint": "^9.25.1", + "eslint-plugin-react-hooks": "^5.2.0", + "eslint-plugin-react-refresh": "^0.4.20", "globals": "^16.0.0", "jsdom": "^25.0.1", - "postcss": "^8.4.47", + "postcss": "^8.5.3", "postcss-preset-mantine": "^1.17.0", "postcss-simple-vars": "^7.0.1", - "vite": "^6.2.4", - "vitest": "^3.0.8" + "vite": "^6.3.3", + "vitest": "^3.1.2" + } + }, + "mantine-ui/node_modules/@mantine/code-highlight": { + "version": "7.17.5", + "resolved": "https://registry.npmjs.org/@mantine/code-highlight/-/code-highlight-7.17.5.tgz", + "integrity": "sha512-EzOLEGSbc3Psp/dfpf9yakiWEhcMPZ8qsCuSWvEVJIC40N4VPQ7Pdz1tyN2NSI9Qa31BGzHcqXZcxVtfZ0yG5A==", + "license": "MIT", + "dependencies": { + "clsx": "^2.1.1", + "highlight.js": "^11.10.0" + }, + "peerDependencies": { + "@mantine/core": "7.17.5", + "@mantine/hooks": "7.17.5", + "react": "^18.x || ^19.x", + "react-dom": "^18.x || ^19.x" + } + }, + "mantine-ui/node_modules/@mantine/core": { + "version": "7.17.5", + "resolved": "https://registry.npmjs.org/@mantine/core/-/core-7.17.5.tgz", + "integrity": "sha512-66g/lr281cDPfucjtPw8gFo/yNS9G5iSKqysvPGuDpUBG2bEw8FsJMIsU0bMXtravToIpa3vJRrFUuPndPGnpQ==", + "license": "MIT", + "dependencies": { + "@floating-ui/react": "^0.26.28", + "clsx": "^2.1.1", + "react-number-format": "^5.4.3", + "react-remove-scroll": "^2.6.2", + "react-textarea-autosize": "8.5.9", + "type-fest": "^4.27.0" + }, + "peerDependencies": { + "@mantine/hooks": "7.17.5", + "react": "^18.x || ^19.x", + "react-dom": "^18.x || ^19.x" + } + }, + "mantine-ui/node_modules/@mantine/dates": { + "version": "7.17.5", + "resolved": "https://registry.npmjs.org/@mantine/dates/-/dates-7.17.5.tgz", + "integrity": "sha512-8/Qjjb9159rQhxtuHLukNfo3/AEGl7SVSRESN8MkfyP8MCQ7mgm9wqvMADb/9Q75boUA0HIN2pewlzlcd31MHQ==", + "license": "MIT", + "dependencies": { + "clsx": "^2.1.1" + }, + "peerDependencies": { + "@mantine/core": "7.17.5", + "@mantine/hooks": "7.17.5", + "dayjs": ">=1.0.0", + "react": "^18.x || ^19.x", + "react-dom": "^18.x || ^19.x" + } + }, + "mantine-ui/node_modules/@mantine/hooks": { + "version": "7.17.5", + "resolved": "https://registry.npmjs.org/@mantine/hooks/-/hooks-7.17.5.tgz", + "integrity": "sha512-Q/3AHI1fjl+W7xQ3jEoMmSoTxLqxMI2gPfxIjd73OPmRpPenYWR1zk/diirXXm2t7JOrAbmpA3/O1gzmgqzc/Q==", + "license": "MIT", + "peerDependencies": { + "react": "^18.x || ^19.x" + } + }, + "mantine-ui/node_modules/@mantine/notifications": { + "version": "7.17.5", + "resolved": "https://registry.npmjs.org/@mantine/notifications/-/notifications-7.17.5.tgz", + "integrity": "sha512-dLbrSm7ct97fb0Yv8ImVj9mVNkjnD/xkDCP1PANt/shM9xDh0TguqQTvborb+8Wxh1g+RtWVML/an5rVsHbKmg==", + "license": "MIT", + "dependencies": { + "@mantine/store": "7.17.5", + "react-transition-group": "4.4.5" + }, + "peerDependencies": { + "@mantine/core": "7.17.5", + "@mantine/hooks": "7.17.5", + "react": "^18.x || ^19.x", + "react-dom": "^18.x || ^19.x" + } + }, + "mantine-ui/node_modules/@mantine/store": { + "version": "7.17.5", + "resolved": "https://registry.npmjs.org/@mantine/store/-/store-7.17.5.tgz", + "integrity": "sha512-HQzy4rBlUFyw+39TgiAV+TGuQivMWYeQlnMctKJlaxwVY5ZTdF2IpJ/JZcjkGrmrZvBwUQ8IOMFvt2eZc20P1Q==", + "license": "MIT", + "peerDependencies": { + "react": "^18.x || ^19.x" + } + }, + "mantine-ui/node_modules/react-textarea-autosize": { + "version": "8.5.9", + "resolved": "https://registry.npmjs.org/react-textarea-autosize/-/react-textarea-autosize-8.5.9.tgz", + "integrity": "sha512-U1DGlIQN5AwgjTyOEnI1oCcMuEr1pv1qOtklB2l4nyMGbHzWrI0eFsYK0zos2YWqAolJyG0IWJaqWmWj5ETh0A==", + "license": "MIT", + "dependencies": { + "@babel/runtime": "^7.20.13", + "use-composed-ref": "^1.3.0", + "use-latest": "^1.2.1" + }, + "engines": { + "node": ">=10" + }, + "peerDependencies": { + "react": "^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0" } }, "module/codemirror-promql": { "name": "@prometheus-io/codemirror-promql", - "version": "0.302.1", + "version": "0.303.0", "license": "Apache-2.0", "dependencies": { - "@prometheus-io/lezer-promql": "0.302.1", - "lru-cache": "^11.0.2" + "@prometheus-io/lezer-promql": "0.303.0", + "lru-cache": "^11.1.0" }, "devDependencies": { - "@codemirror/autocomplete": "^6.18.4", - "@codemirror/language": "^6.10.8", - "@codemirror/lint": "^6.8.4", + "@codemirror/autocomplete": "^6.18.6", + "@codemirror/language": "^6.11.0", + "@codemirror/lint": "^6.8.5", "@codemirror/state": "^6.5.2", - "@codemirror/view": "^6.36.4", + "@codemirror/view": "^6.36.6", "@lezer/common": "^1.2.3", - "@lezer/highlight": "^1.2.0", + "@lezer/highlight": "^1.2.1", "@lezer/lr": "^1.4.2", - "eslint-plugin-prettier": "^5.2.3", + "eslint-plugin-prettier": "^5.2.6", "isomorphic-fetch": "^3.0.0", - "nock": "^14.0.1" + "nock": "^14.0.4" }, "engines": { "node": ">=12.0.0" @@ -120,13 +222,13 @@ }, "module/lezer-promql": { "name": "@prometheus-io/lezer-promql", - "version": "0.302.1", + "version": "0.303.0", "license": "Apache-2.0", "devDependencies": { - "@lezer/generator": "^1.7.2", - "@lezer/highlight": "^1.2.0", + "@lezer/generator": "^1.7.3", + "@lezer/highlight": "^1.2.1", "@lezer/lr": "^1.4.2", - "@rollup/plugin-node-resolve": "^16.0.0" + "@rollup/plugin-node-resolve": "^16.0.1" }, "peerDependencies": { "@lezer/highlight": "^1.1.2", @@ -167,30 +269,32 @@ } }, "node_modules/@babel/compat-data": { - "version": "7.26.2", - "resolved": "https://registry.npmjs.org/@babel/compat-data/-/compat-data-7.26.2.tgz", - "integrity": "sha512-Z0WgzSEa+aUcdiJuCIqgujCshpMWgUpgOxXotrYPSA53hA3qopNaqcJpyr0hVb1FeWdnqFA35/fUtXgBK8srQg==", + "version": "7.26.8", + "resolved": "https://registry.npmjs.org/@babel/compat-data/-/compat-data-7.26.8.tgz", + "integrity": "sha512-oH5UPLMWR3L2wEFLnFJ1TZXqHufiTKAiLfqw5zkhS4dKXLJ10yVztfil/twG8EDTA4F/tvVNw9nOl4ZMslB8rQ==", "dev": true, + "license": "MIT", "engines": { "node": ">=6.9.0" } }, "node_modules/@babel/core": { - "version": "7.26.0", - "resolved": "https://registry.npmjs.org/@babel/core/-/core-7.26.0.tgz", - "integrity": "sha512-i1SLeK+DzNnQ3LL/CswPCa/E5u4lh1k6IAEphON8F+cXt0t9euTshDru0q7/IqMa1PMPz5RnHuHscF8/ZJsStg==", + "version": "7.26.10", + "resolved": "https://registry.npmjs.org/@babel/core/-/core-7.26.10.tgz", + "integrity": "sha512-vMqyb7XCDMPvJFFOaT9kxtiRh42GwlZEg1/uIgtZshS5a/8OaduUfCi7kynKgc3Tw/6Uo2D+db9qBttghhmxwQ==", "dev": true, + "license": "MIT", "dependencies": { "@ampproject/remapping": "^2.2.0", - "@babel/code-frame": "^7.26.0", - "@babel/generator": "^7.26.0", - "@babel/helper-compilation-targets": "^7.25.9", + "@babel/code-frame": "^7.26.2", + "@babel/generator": "^7.26.10", + "@babel/helper-compilation-targets": "^7.26.5", "@babel/helper-module-transforms": "^7.26.0", - "@babel/helpers": "^7.26.0", - "@babel/parser": "^7.26.0", - "@babel/template": "^7.25.9", - "@babel/traverse": "^7.25.9", - "@babel/types": "^7.26.0", + "@babel/helpers": "^7.26.10", + "@babel/parser": "^7.26.10", + "@babel/template": "^7.26.9", + "@babel/traverse": "^7.26.10", + "@babel/types": "^7.26.10", "convert-source-map": "^2.0.0", "debug": "^4.1.0", "gensync": "^1.0.0-beta.2", @@ -216,13 +320,14 @@ } }, "node_modules/@babel/generator": { - "version": "7.26.2", - "resolved": "https://registry.npmjs.org/@babel/generator/-/generator-7.26.2.tgz", - "integrity": "sha512-zevQbhbau95nkoxSq3f/DC/SC+EEOUZd3DYqfSkMhY2/wfSeaHV1Ew4vk8e+x8lja31IbyuUa2uQ3JONqKbysw==", + "version": "7.27.0", + "resolved": "https://registry.npmjs.org/@babel/generator/-/generator-7.27.0.tgz", + "integrity": "sha512-VybsKvpiN1gU1sdMZIp7FcqphVVKEwcuj02x73uvcHE0PTihx1nlBcowYWhDwjpoAXRv43+gDzyggGnn1XZhVw==", "dev": true, + "license": "MIT", "dependencies": { - "@babel/parser": "^7.26.2", - "@babel/types": "^7.26.0", + "@babel/parser": "^7.27.0", + "@babel/types": "^7.27.0", "@jridgewell/gen-mapping": "^0.3.5", "@jridgewell/trace-mapping": "^0.3.25", "jsesc": "^3.0.2" @@ -232,12 +337,13 @@ } }, "node_modules/@babel/helper-compilation-targets": { - "version": "7.25.9", - "resolved": "https://registry.npmjs.org/@babel/helper-compilation-targets/-/helper-compilation-targets-7.25.9.tgz", - "integrity": "sha512-j9Db8Suy6yV/VHa4qzrj9yZfZxhLWQdVnRlXxmKLYlhWUVB1sB2G5sxuWYXk/whHD9iW76PmNzxZ4UCnTQTVEQ==", + "version": "7.27.0", + "resolved": "https://registry.npmjs.org/@babel/helper-compilation-targets/-/helper-compilation-targets-7.27.0.tgz", + "integrity": "sha512-LVk7fbXml0H2xH34dFzKQ7TDZ2G4/rVTOrq9V+icbbadjbVxxeFeDsNHv2SrZeWoA+6ZiTyWYWtScEIW07EAcA==", "dev": true, + "license": "MIT", "dependencies": { - "@babel/compat-data": "^7.25.9", + "@babel/compat-data": "^7.26.8", "@babel/helper-validator-option": "^7.25.9", "browserslist": "^4.24.0", "lru-cache": "^5.1.1", @@ -252,6 +358,7 @@ "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-5.1.1.tgz", "integrity": "sha512-KpNARQA3Iwv+jTA0utUVVbrh+Jlrr1Fv0e56GGzAFOXN7dk/FviaDW8LHmK52DlcH4WP2n6gI8vN1aesBFgo9w==", "dev": true, + "license": "ISC", "dependencies": { "yallist": "^3.0.2" } @@ -261,6 +368,7 @@ "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz", "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==", "dev": true, + "license": "ISC", "bin": { "semver": "bin/semver.js" } @@ -326,6 +434,7 @@ "resolved": "https://registry.npmjs.org/@babel/helper-validator-option/-/helper-validator-option-7.25.9.tgz", "integrity": "sha512-e/zv1co8pp55dNdEcCynfj9X7nyUKUXoUEwfXqaZt0omVOmDe9oOTdKStH4GmAw6zxMFs50ZayuMfHDKlO7Tfw==", "dev": true, + "license": "MIT", "engines": { "node": ">=6.9.0" } @@ -674,16 +783,17 @@ } }, "node_modules/@babel/traverse": { - "version": "7.25.9", - "resolved": "https://registry.npmjs.org/@babel/traverse/-/traverse-7.25.9.tgz", - "integrity": "sha512-ZCuvfwOwlz/bawvAuvcj8rrithP2/N55Tzz342AkTvq4qaWbGfmCk/tKhNaV2cthijKrPAA8SRJV5WWe7IBMJw==", + "version": "7.27.0", + "resolved": "https://registry.npmjs.org/@babel/traverse/-/traverse-7.27.0.tgz", + "integrity": "sha512-19lYZFzYVQkkHkl4Cy4WrAVcqBkgvV2YM2TU3xG6DIwO7O3ecbDPfW3yM3bjAGcqcQHi+CCtjMR3dIEHxsd6bA==", "dev": true, + "license": "MIT", "dependencies": { - "@babel/code-frame": "^7.25.9", - "@babel/generator": "^7.25.9", - "@babel/parser": "^7.25.9", - "@babel/template": "^7.25.9", - "@babel/types": "^7.25.9", + "@babel/code-frame": "^7.26.2", + "@babel/generator": "^7.27.0", + "@babel/parser": "^7.27.0", + "@babel/template": "^7.27.0", + "@babel/types": "^7.27.0", "debug": "^4.3.1", "globals": "^11.1.0" }, @@ -723,9 +833,10 @@ "peer": true }, "node_modules/@codemirror/autocomplete": { - "version": "6.18.4", - "resolved": "https://registry.npmjs.org/@codemirror/autocomplete/-/autocomplete-6.18.4.tgz", - "integrity": "sha512-sFAphGQIqyQZfP2ZBsSHV7xQvo9Py0rV0dW7W3IMRdS+zDuNb2l3no78CvUaWKGfzFjI4FTrLdUSj86IGb2hRA==", + "version": "6.18.6", + "resolved": "https://registry.npmjs.org/@codemirror/autocomplete/-/autocomplete-6.18.6.tgz", + "integrity": "sha512-PHHBXFomUs5DF+9tCOM/UoW6XQ4R44lLNNhRaW9PKPTU0D7lIjRg3ElxaJnTwsl/oHiR93WSXDBrekhoUGCPtg==", + "license": "MIT", "dependencies": { "@codemirror/language": "^6.0.0", "@codemirror/state": "^6.0.0", @@ -746,9 +857,10 @@ } }, "node_modules/@codemirror/language": { - "version": "6.10.8", - "resolved": "https://registry.npmjs.org/@codemirror/language/-/language-6.10.8.tgz", - "integrity": "sha512-wcP8XPPhDH2vTqf181U8MbZnW+tDyPYy0UzVOa+oHORjyT+mhhom9vBd7dApJwoDz9Nb/a8kHjJIsuA/t8vNFw==", + "version": "6.11.0", + "resolved": "https://registry.npmjs.org/@codemirror/language/-/language-6.11.0.tgz", + "integrity": "sha512-A7+f++LodNNc1wGgoRDTt78cOwWm9KVezApgjOMp1W4hM0898nsqBXwF+sbePE7ZRcjN7Sa1Z5m2oN27XkmEjQ==", + "license": "MIT", "dependencies": { "@codemirror/state": "^6.0.0", "@codemirror/view": "^6.23.0", @@ -759,9 +871,9 @@ } }, "node_modules/@codemirror/lint": { - "version": "6.8.4", - "resolved": "https://registry.npmjs.org/@codemirror/lint/-/lint-6.8.4.tgz", - "integrity": "sha512-u4q7PnZlJUojeRe8FJa/njJcMctISGgPQ4PnWsd9268R4ZTtU+tfFYmwkBvgcrK2+QQ8tYFVALVb5fVJykKc5A==", + "version": "6.8.5", + "resolved": "https://registry.npmjs.org/@codemirror/lint/-/lint-6.8.5.tgz", + "integrity": "sha512-s3n3KisH7dx3vsoeGMxsbRAgKe4O1vbrnKBClm99PU0fWxmxsx5rR2PfqQgIt+2MMJBHbiJ5rfIdLYfB9NNvsA==", "license": "MIT", "dependencies": { "@codemirror/state": "^6.0.0", @@ -802,9 +914,9 @@ } }, "node_modules/@codemirror/view": { - "version": "6.36.4", - "resolved": "https://registry.npmjs.org/@codemirror/view/-/view-6.36.4.tgz", - "integrity": "sha512-ZQ0V5ovw/miKEXTvjgzRyjnrk9TwriUB1k4R5p7uNnHR9Hus+D1SXHGdJshijEzPFjU25xea/7nhIeSqYFKdbA==", + "version": "6.36.6", + "resolved": "https://registry.npmjs.org/@codemirror/view/-/view-6.36.6.tgz", + "integrity": "sha512-uxugGLet+Nzp0Jcit8Hn3LypM8ioMLKTsdf8FRoT3HWvZtb9GhaWMe0Cc15rz90Ljab4YFJiAulmIVB74OY0IQ==", "license": "MIT", "dependencies": { "@codemirror/state": "^6.5.0", @@ -1277,10 +1389,11 @@ } }, "node_modules/@eslint/compat": { - "version": "1.2.4", - "resolved": "https://registry.npmjs.org/@eslint/compat/-/compat-1.2.4.tgz", - "integrity": "sha512-S8ZdQj/N69YAtuqFt7653jwcvuUj131+6qGLUyDqfDg1OIoBQ66OCuXC473YQfO2AaxITTutiRQiDwoo7ZLYyg==", + "version": "1.2.8", + "resolved": "https://registry.npmjs.org/@eslint/compat/-/compat-1.2.8.tgz", + "integrity": "sha512-LqCYHdWL/QqKIJuZ/ucMAv8d4luKGs4oCPgpt8mWztQAtPrHfXKQ/XAUc8ljCHAfJCn6SvkpTcGt5Tsh8saowA==", "dev": true, + "license": "Apache-2.0", "engines": { "node": "^18.18.0 || ^20.9.0 || >=21.1.0" }, @@ -1294,9 +1407,9 @@ } }, "node_modules/@eslint/config-array": { - "version": "0.19.2", - "resolved": "https://registry.npmjs.org/@eslint/config-array/-/config-array-0.19.2.tgz", - "integrity": "sha512-GNKqxfHG2ySmJOBSHg7LxeUx4xpuCoFjacmlCoYWEbaPXLwvfIjixRI12xCQZeULksQb23uiA8F40w5TojpV7w==", + "version": "0.20.0", + "resolved": "https://registry.npmjs.org/@eslint/config-array/-/config-array-0.20.0.tgz", + "integrity": "sha512-fxlS1kkIjx8+vy2SjuCB94q3htSNrufYTXubwiBFeaQHbH6Ipi43gFJq2zCMt6PHhImH3Xmr0NksKDvchWlpQQ==", "dev": true, "license": "Apache-2.0", "dependencies": { @@ -1309,9 +1422,9 @@ } }, "node_modules/@eslint/config-helpers": { - "version": "0.2.0", - "resolved": "https://registry.npmjs.org/@eslint/config-helpers/-/config-helpers-0.2.0.tgz", - "integrity": "sha512-yJLLmLexii32mGrhW29qvU3QBVTu0GUmEf/J4XsBtVhp4JkIUFN/BjWqTF63yRvGApIDpZm5fa97LtYtINmfeQ==", + "version": "0.2.1", + "resolved": "https://registry.npmjs.org/@eslint/config-helpers/-/config-helpers-0.2.1.tgz", + "integrity": "sha512-RI17tsD2frtDu/3dmI7QRrD4bedNKPM08ziRYaC5AhkGrzIAJelm9kJU1TznK+apx6V+cqRz8tfpEeG3oIyjxw==", "dev": true, "license": "Apache-2.0", "engines": { @@ -1319,9 +1432,9 @@ } }, "node_modules/@eslint/core": { - "version": "0.12.0", - "resolved": "https://registry.npmjs.org/@eslint/core/-/core-0.12.0.tgz", - "integrity": "sha512-cmrR6pytBuSMTaBweKoGMwu3EiHiEC+DoyupPmlZ0HxBJBtIxwe+j/E4XPIKNx+Q74c8lXKPwYawBf5glsTkHg==", + "version": "0.13.0", + "resolved": "https://registry.npmjs.org/@eslint/core/-/core-0.13.0.tgz", + "integrity": "sha512-yfkgDw1KR66rkT5A8ci4irzDysN7FRpq3ttJolR88OqQikAWqwA8j5VZyas+vjyBNFIJ7MfybJ9plMILI2UrCw==", "dev": true, "license": "Apache-2.0", "dependencies": { @@ -1368,9 +1481,9 @@ } }, "node_modules/@eslint/js": { - "version": "9.23.0", - "resolved": "https://registry.npmjs.org/@eslint/js/-/js-9.23.0.tgz", - "integrity": "sha512-35MJ8vCPU0ZMxo7zfev2pypqTwWTofFZO6m4KAtdoFhRpLJUpHTZZ+KB3C7Hb1d7bULYwO4lJXGCi5Se+8OMbw==", + "version": "9.25.1", + "resolved": "https://registry.npmjs.org/@eslint/js/-/js-9.25.1.tgz", + "integrity": "sha512-dEIwmjntEx8u3Uvv+kr3PDeeArL8Hw07H9kyYxCjnM9pBjfEhk6uLXSchxxzgiwtRhhzVzqmUSDFBOi1TuZ7qg==", "dev": true, "license": "MIT", "engines": { @@ -1388,13 +1501,13 @@ } }, "node_modules/@eslint/plugin-kit": { - "version": "0.2.7", - "resolved": "https://registry.npmjs.org/@eslint/plugin-kit/-/plugin-kit-0.2.7.tgz", - "integrity": "sha512-JubJ5B2pJ4k4yGxaNLdbjrnk9d/iDz6/q8wOilpIowd6PJPgaxCuHBnBszq7Ce2TyMrywm5r4PnKm6V3iiZF+g==", + "version": "0.2.8", + "resolved": "https://registry.npmjs.org/@eslint/plugin-kit/-/plugin-kit-0.2.8.tgz", + "integrity": "sha512-ZAoA40rNMPwSm+AeHpCq8STiNAwzWLJuP8Xv4CHIc9wv/PSuExjMrmjfYNj682vW0OOiZ1HKxzvjQr9XZIisQA==", "dev": true, "license": "Apache-2.0", "dependencies": { - "@eslint/core": "^0.12.0", + "@eslint/core": "^0.13.0", "levn": "^0.4.1" }, "engines": { @@ -1411,13 +1524,13 @@ } }, "node_modules/@floating-ui/dom": { - "version": "1.6.12", - "resolved": "https://registry.npmjs.org/@floating-ui/dom/-/dom-1.6.12.tgz", - "integrity": "sha512-NP83c0HjokcGVEMeoStg317VD9W7eDlGK7457dMBANbKA6GJZdc7rjujdgqzTaz93jkGgc5P/jeWbaCHnMNc+w==", + "version": "1.6.13", + "resolved": "https://registry.npmjs.org/@floating-ui/dom/-/dom-1.6.13.tgz", + "integrity": "sha512-umqzocjDgNRGTuO7Q8CU32dkHkECqI8ZdMZ5Swb6QAM0t5rnlrN3lGo1hdpscRd3WS8T6DKYK4ephgIH9iRh3w==", "license": "MIT", "dependencies": { "@floating-ui/core": "^1.6.0", - "@floating-ui/utils": "^0.2.8" + "@floating-ui/utils": "^0.2.9" } }, "node_modules/@floating-ui/react": { @@ -1447,9 +1560,10 @@ } }, "node_modules/@floating-ui/utils": { - "version": "0.2.8", - "resolved": "https://registry.npmjs.org/@floating-ui/utils/-/utils-0.2.8.tgz", - "integrity": "sha512-kym7SodPp8/wloecOpcmSnWJsK7M0E5Wg8UcFA+uO4B9s5d0ywXOEro/8HM9x0rW+TljRzul/14UYz3TleT3ig==" + "version": "0.2.9", + "resolved": "https://registry.npmjs.org/@floating-ui/utils/-/utils-0.2.9.tgz", + "integrity": "sha512-MDWhGtE+eHw5JW7lq4qhc5yRLS11ERl1c7Z6Xd0a58DozHES6EnNNwUWbMiG4J9Cgj053Bhk8zvlhFYKVhULwg==", + "license": "MIT" }, "node_modules/@humanfs/core": { "version": "0.19.1", @@ -2040,9 +2154,9 @@ "integrity": "sha512-w7ojc8ejBqr2REPsWxJjrMFsA/ysDCFICn8zEOR9mrqzOu2amhITYuLD8ag6XZf0CFXDrhKqw7+tW8cX66NaDA==" }, "node_modules/@lezer/generator": { - "version": "1.7.2", - "resolved": "https://registry.npmjs.org/@lezer/generator/-/generator-1.7.2.tgz", - "integrity": "sha512-CwgULPOPPmH54tv4gki18bElLCdJ1+FBC+nGVSVD08vFWDsMjS7KEjNTph9JOypDnet90ujN3LzQiW3CyVODNQ==", + "version": "1.7.3", + "resolved": "https://registry.npmjs.org/@lezer/generator/-/generator-1.7.3.tgz", + "integrity": "sha512-vAI2O1tPF8QMMgp+bdUeeJCneJNkOZvqsrtyb4ohnFVFdboSqPwBEacnt0HH4E+5h+qsIwTHUSAhffU4hzKl1A==", "dev": true, "license": "MIT", "dependencies": { @@ -2071,91 +2185,6 @@ "@lezer/common": "^1.0.0" } }, - "node_modules/@mantine/code-highlight": { - "version": "7.17.2", - "resolved": "https://registry.npmjs.org/@mantine/code-highlight/-/code-highlight-7.17.2.tgz", - "integrity": "sha512-sUS3ZsZ1KAOgywZdnR448V9pVe+O9tZLUZwtZKNF9SrxUJazddxD3TS+e/qFLUocQQwhSa/zHMSmbtPbfiDvMg==", - "license": "MIT", - "dependencies": { - "clsx": "^2.1.1", - "highlight.js": "^11.10.0" - }, - "peerDependencies": { - "@mantine/core": "7.17.2", - "@mantine/hooks": "7.17.2", - "react": "^18.x || ^19.x", - "react-dom": "^18.x || ^19.x" - } - }, - "node_modules/@mantine/core": { - "version": "7.17.2", - "resolved": "https://registry.npmjs.org/@mantine/core/-/core-7.17.2.tgz", - "integrity": "sha512-R6MYhitJ0JEgrhadd31Nw9FhRaQwDHjXUs5YIlitKH/fTOz9gKSxKjzmNng3bEBQCcbEDOkZj3FRcBgTUh/F0Q==", - "license": "MIT", - "dependencies": { - "@floating-ui/react": "^0.26.28", - "clsx": "^2.1.1", - "react-number-format": "^5.4.3", - "react-remove-scroll": "^2.6.2", - "react-textarea-autosize": "8.5.6", - "type-fest": "^4.27.0" - }, - "peerDependencies": { - "@mantine/hooks": "7.17.2", - "react": "^18.x || ^19.x", - "react-dom": "^18.x || ^19.x" - } - }, - "node_modules/@mantine/dates": { - "version": "7.17.2", - "resolved": "https://registry.npmjs.org/@mantine/dates/-/dates-7.17.2.tgz", - "integrity": "sha512-7bB992j8f+uEi280jab0/8i5yfsN/3oSrMDFwatZ+7XSDUwiP0YFib/FVX0pNSSqdFpbXhUmsZEECX71QtHw+Q==", - "license": "MIT", - "dependencies": { - "clsx": "^2.1.1" - }, - "peerDependencies": { - "@mantine/core": "7.17.2", - "@mantine/hooks": "7.17.2", - "dayjs": ">=1.0.0", - "react": "^18.x || ^19.x", - "react-dom": "^18.x || ^19.x" - } - }, - "node_modules/@mantine/hooks": { - "version": "7.17.2", - "resolved": "https://registry.npmjs.org/@mantine/hooks/-/hooks-7.17.2.tgz", - "integrity": "sha512-tbErVcGZu0E4dSmE6N0k6Tv1y9R3SQmmQgwqorcc+guEgKMdamc36lucZGlJnSGUmGj+WLUgELkEQ0asdfYBDA==", - "license": "MIT", - "peerDependencies": { - "react": "^18.x || ^19.x" - } - }, - "node_modules/@mantine/notifications": { - "version": "7.17.2", - "resolved": "https://registry.npmjs.org/@mantine/notifications/-/notifications-7.17.2.tgz", - "integrity": "sha512-vg0L8cmihz0ODg4WJ9MAyK06WPt/6g67ksIUFxd4F8RfdJbIMLTsNG9yWoSfuhtXenUg717KaA917IWLjDSaqw==", - "license": "MIT", - "dependencies": { - "@mantine/store": "7.17.2", - "react-transition-group": "4.4.5" - }, - "peerDependencies": { - "@mantine/core": "7.17.2", - "@mantine/hooks": "7.17.2", - "react": "^18.x || ^19.x", - "react-dom": "^18.x || ^19.x" - } - }, - "node_modules/@mantine/store": { - "version": "7.17.2", - "resolved": "https://registry.npmjs.org/@mantine/store/-/store-7.17.2.tgz", - "integrity": "sha512-UoMUYQK/z58hMueCkpDIXc49gPgrVO/zcpb0k+B7MFU51EIUiFzHLxLFBmWrgCAM6rzJORqN8JjyCd/PB9j4aw==", - "license": "MIT", - "peerDependencies": { - "react": "^18.x || ^19.x" - } - }, "node_modules/@marijn/find-cluster-break": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/@marijn/find-cluster-break/-/find-cluster-break-1.0.2.tgz", @@ -2168,9 +2197,9 @@ "integrity": "sha512-W6CLUJ2eBMw3Rec70qrsEW0jOm/3twwJv21mrmj2yORiaVmVYGS4sSS5yUwvQc1ZlDLYGPnClVWmUUMagKNsfA==" }, "node_modules/@mswjs/interceptors": { - "version": "0.37.6", - "resolved": "https://registry.npmjs.org/@mswjs/interceptors/-/interceptors-0.37.6.tgz", - "integrity": "sha512-wK+5pLK5XFmgtH3aQ2YVvA3HohS3xqV/OxuVOdNx9Wpnz7VE/fnC+e1A7ln6LFYeck7gOJ/dsZV6OLplOtAJ2w==", + "version": "0.38.6", + "resolved": "https://registry.npmjs.org/@mswjs/interceptors/-/interceptors-0.38.6.tgz", + "integrity": "sha512-qFlpmObPqeUs4u3oFYv/OM/xyX+pNa5TRAjqjvMhbGYlyMhzSrE5UfncL2rUcEeVfD9Gebgff73hPwqcOwJQNA==", "dev": true, "license": "MIT", "dependencies": { @@ -2264,16 +2293,16 @@ "license": "MIT" }, "node_modules/@pkgr/core": { - "version": "0.1.1", - "resolved": "https://registry.npmjs.org/@pkgr/core/-/core-0.1.1.tgz", - "integrity": "sha512-cq8o4cWH0ibXh9VGi5P20Tu9XF/0fFXl9EUinr9QfTM7a7p0oTA4iJRCQWppXR1Pg8dSM0UCItCkPwsk9qWWYA==", + "version": "0.2.4", + "resolved": "https://registry.npmjs.org/@pkgr/core/-/core-0.2.4.tgz", + "integrity": "sha512-ROFF39F6ZrnzSUEmQQZUar0Jt4xVoP9WnDRdWwF4NNcXs3xBTLgBUDoOwW141y1jP+S8nahIbdxbFC7IShw9Iw==", "dev": true, "license": "MIT", "engines": { "node": "^12.20.0 || ^14.18.0 || >=16.0.0" }, "funding": { - "url": "https://opencollective.com/unts" + "url": "https://opencollective.com/pkgr" } }, "node_modules/@prometheus-io/codemirror-promql": { @@ -2289,11 +2318,13 @@ "link": true }, "node_modules/@reduxjs/toolkit": { - "version": "2.6.1", - "resolved": "https://registry.npmjs.org/@reduxjs/toolkit/-/toolkit-2.6.1.tgz", - "integrity": "sha512-SSlIqZNYhqm/oMkXbtofwZSt9lrncblzo6YcZ9zoX+zLngRBrCOjK4lNLdkNucJF58RHOWrD9txT3bT3piH7Zw==", + "version": "2.7.0", + "resolved": "https://registry.npmjs.org/@reduxjs/toolkit/-/toolkit-2.7.0.tgz", + "integrity": "sha512-XVwolG6eTqwV0N8z/oDlN93ITCIGIop6leXlGJI/4EKy+0POYkR+ABHRSdGXY+0MQvJBP8yAzh+EYFxTuvmBiQ==", "license": "MIT", "dependencies": { + "@standard-schema/spec": "^1.0.0", + "@standard-schema/utils": "^0.3.0", "immer": "^10.0.3", "redux": "^5.0.1", "redux-thunk": "^3.1.0", @@ -2313,10 +2344,11 @@ } }, "node_modules/@rollup/plugin-node-resolve": { - "version": "16.0.0", - "resolved": "https://registry.npmjs.org/@rollup/plugin-node-resolve/-/plugin-node-resolve-16.0.0.tgz", - "integrity": "sha512-0FPvAeVUT/zdWoO0jnb/V5BlBsUSNfkIOtFHzMO4H9MOklrmQFY6FduVHKucNb/aTFxvnGhj4MNj/T1oNdDfNg==", + "version": "16.0.1", + "resolved": "https://registry.npmjs.org/@rollup/plugin-node-resolve/-/plugin-node-resolve-16.0.1.tgz", + "integrity": "sha512-tk5YCxJWIG81umIvNkSod2qK5KyQW19qcBF/B78n1bjtOON6gzKoVeSzAE8yHCZEDmqkHKkxplExA8KzdJLJpA==", "dev": true, + "license": "MIT", "dependencies": { "@rollup/pluginutils": "^5.0.1", "@types/resolve": "1.20.2", @@ -2661,6 +2693,18 @@ "@sinonjs/commons": "^3.0.0" } }, + "node_modules/@standard-schema/spec": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/@standard-schema/spec/-/spec-1.0.0.tgz", + "integrity": "sha512-m2bOd0f2RT9k8QJx1JN85cZYyH1RqFBdlwtkSlf4tBDYLCiiZnv1fIIwacK6cqwXavOydf0NPToMQgpKq+dVlA==", + "license": "MIT" + }, + "node_modules/@standard-schema/utils": { + "version": "0.3.0", + "resolved": "https://registry.npmjs.org/@standard-schema/utils/-/utils-0.3.0.tgz", + "integrity": "sha512-e7Mew686owMaPJVNNLs55PUvgz371nKgwsc4vxE49zsODpJEnxgxRo2y/OKrqueavXgZNMDVj3DdHFlaSAeU8g==", + "license": "MIT" + }, "node_modules/@tabler/icons": { "version": "3.31.0", "resolved": "https://registry.npmjs.org/@tabler/icons/-/icons-3.31.0.tgz", @@ -2688,9 +2732,9 @@ } }, "node_modules/@tanstack/query-core": { - "version": "5.67.1", - "resolved": "https://registry.npmjs.org/@tanstack/query-core/-/query-core-5.67.1.tgz", - "integrity": "sha512-AkFmuukVejyqVIjEQoFhLb3q+xHl7JG8G9cANWTMe3s8iKzD9j1VBSYXgCjy6vm6xM8cUCR9zP2yqWxY9pTWOA==", + "version": "5.74.7", + "resolved": "https://registry.npmjs.org/@tanstack/query-core/-/query-core-5.74.7.tgz", + "integrity": "sha512-X3StkN/Y6KGHndTjJf8H8th7AX4bKfbRpiVhVqevf0QWlxl6DhyJ0TYG3R0LARa/+xqDwzU9mA4pbJxzPCI29A==", "license": "MIT", "funding": { "type": "github", @@ -2698,12 +2742,12 @@ } }, "node_modules/@tanstack/react-query": { - "version": "5.67.1", - "resolved": "https://registry.npmjs.org/@tanstack/react-query/-/react-query-5.67.1.tgz", - "integrity": "sha512-fH5u4JLwB6A+wLFdi8wWBWAYoJV5deYif2OveJ26ktAWjU499uvVFS1wPWnyEyq5LvZX1MZInvv9QRaIZANRaQ==", + "version": "5.74.7", + "resolved": "https://registry.npmjs.org/@tanstack/react-query/-/react-query-5.74.7.tgz", + "integrity": "sha512-u4o/RIWnnrq26orGZu2NDPwmVof1vtAiiV6KYUXd49GuK+8HX+gyxoAYqIaZogvCE1cqOuZAhQKcrKGYGkrLxg==", "license": "MIT", "dependencies": { - "@tanstack/query-core": "5.67.1" + "@tanstack/query-core": "5.74.7" }, "funding": { "type": "github", @@ -2772,9 +2816,9 @@ "license": "MIT" }, "node_modules/@testing-library/react": { - "version": "16.2.0", - "resolved": "https://registry.npmjs.org/@testing-library/react/-/react-16.2.0.tgz", - "integrity": "sha512-2cSskAvA1QNtKc8Y9VJQRv0tm3hLVgxRGDB+KYhIaPQJ1I+RHbhIXcM+zClKXzMes/wshsMVzf4B9vS4IZpqDQ==", + "version": "16.3.0", + "resolved": "https://registry.npmjs.org/@testing-library/react/-/react-16.3.0.tgz", + "integrity": "sha512-kFSyxiEDwv1WLl2fgsq6pPBbw5aWKrsY2/noi1Id0TK0UParSF62oFQFGHXIyaG4pp2tEub/Zlel+fjjZILDsw==", "license": "MIT", "dependencies": { "@babel/runtime": "^7.12.5" @@ -2850,12 +2894,6 @@ "@babel/types": "^7.20.7" } }, - "node_modules/@types/cookie": { - "version": "0.6.0", - "resolved": "https://registry.npmjs.org/@types/cookie/-/cookie-0.6.0.tgz", - "integrity": "sha512-4Kh9a6B2bQciAhf7FSuMRRkUWecJgJu9nPnx3yzpsfXX/c50REIqpHY4C82bXP90qrLtXtkDxTZosYO3UpOwlA==", - "license": "MIT" - }, "node_modules/@types/estree": { "version": "1.0.6", "resolved": "https://registry.npmjs.org/@types/estree/-/estree-1.0.6.tgz", @@ -2949,7 +2987,8 @@ "version": "7.0.15", "resolved": "https://registry.npmjs.org/@types/json-schema/-/json-schema-7.0.15.tgz", "integrity": "sha512-5+fP8P8MFNC+AyZCDxrB2pkZFPGzqQWUzpSeuuVLvm8VMcorNYavBqoFcxK8bQz4Qsbn4oUEEem4wDLfcysGHA==", - "dev": true + "dev": true, + "license": "MIT" }, "node_modules/@types/lodash": { "version": "4.17.16", @@ -2967,9 +3006,9 @@ } }, "node_modules/@types/react": { - "version": "19.0.12", - "resolved": "https://registry.npmjs.org/@types/react/-/react-19.0.12.tgz", - "integrity": "sha512-V6Ar115dBDrjbtXSrS+/Oruobc+qVbbUxDFC1RSbRqLt5SYvxxyIDrSC85RWml54g+jfNeEMZhEj7wW07ONQhA==", + "version": "19.1.2", + "resolved": "https://registry.npmjs.org/@types/react/-/react-19.1.2.tgz", + "integrity": "sha512-oxLPMytKchWGbnQM9O7D67uPa9paTNxO7jVoNMXgkkErULBPhPARCfkKL9ytcIJJRGjbsVwW4ugJzyFFvm/Tiw==", "devOptional": true, "license": "MIT", "dependencies": { @@ -2977,9 +3016,9 @@ } }, "node_modules/@types/react-dom": { - "version": "19.0.4", - "resolved": "https://registry.npmjs.org/@types/react-dom/-/react-dom-19.0.4.tgz", - "integrity": "sha512-4fSQ8vWFkg+TGhePfUzVmat3eC14TXYSsiiDSLI0dVLsrm9gZFABjPy/Qu6TKgl1tq1Bu1yDsuQgY3A3DOjCcg==", + "version": "19.1.2", + "resolved": "https://registry.npmjs.org/@types/react-dom/-/react-dom-19.1.2.tgz", + "integrity": "sha512-XGJkWF41Qq305SKWEILa1O8vzhb3aOo3ogBlSmiqNko/WmRb6QIaweuZCXjKygVDXpzXb5wyxKTSOsmkuqj+Qw==", "devOptional": true, "license": "MIT", "peerDependencies": { @@ -2994,9 +3033,9 @@ "license": "MIT" }, "node_modules/@types/sanitize-html": { - "version": "2.13.0", - "resolved": "https://registry.npmjs.org/@types/sanitize-html/-/sanitize-html-2.13.0.tgz", - "integrity": "sha512-X31WxbvW9TjIhZZNyNBZ/p5ax4ti7qsNDBDEnH4zAgmEh35YnFD1UiS6z9Cd34kKm0LslFW0KPmTQzu/oGtsqQ==", + "version": "2.15.0", + "resolved": "https://registry.npmjs.org/@types/sanitize-html/-/sanitize-html-2.15.0.tgz", + "integrity": "sha512-71Z6PbYsVKfp4i6Jvr37s5ql6if1Q/iJQT80NbaSi7uGaG8CqBMXP0pk/EsURAOuGdk5IJCd/vnzKrR7S3Txsw==", "license": "MIT", "dependencies": { "htmlparser2": "^8.0.0" @@ -3033,21 +3072,21 @@ "license": "MIT" }, "node_modules/@typescript-eslint/eslint-plugin": { - "version": "8.20.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-8.20.0.tgz", - "integrity": "sha512-naduuphVw5StFfqp4Gq4WhIBE2gN1GEmMUExpJYknZJdRnc+2gDzB8Z3+5+/Kv33hPQRDGzQO/0opHE72lZZ6A==", + "version": "8.31.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-8.31.0.tgz", + "integrity": "sha512-evaQJZ/J/S4wisevDvC1KFZkPzRetH8kYZbkgcTRyql3mcKsf+ZFDV1BVWUGTCAW5pQHoqn5gK5b8kn7ou9aFQ==", "dev": true, "license": "MIT", "dependencies": { "@eslint-community/regexpp": "^4.10.0", - "@typescript-eslint/scope-manager": "8.20.0", - "@typescript-eslint/type-utils": "8.20.0", - "@typescript-eslint/utils": "8.20.0", - "@typescript-eslint/visitor-keys": "8.20.0", + "@typescript-eslint/scope-manager": "8.31.0", + "@typescript-eslint/type-utils": "8.31.0", + "@typescript-eslint/utils": "8.31.0", + "@typescript-eslint/visitor-keys": "8.31.0", "graphemer": "^1.4.0", "ignore": "^5.3.1", "natural-compare": "^1.4.0", - "ts-api-utils": "^2.0.0" + "ts-api-utils": "^2.0.1" }, "engines": { "node": "^18.18.0 || ^20.9.0 || >=21.1.0" @@ -3059,20 +3098,20 @@ "peerDependencies": { "@typescript-eslint/parser": "^8.0.0 || ^8.0.0-alpha.0", "eslint": "^8.57.0 || ^9.0.0", - "typescript": ">=4.8.4 <5.8.0" + "typescript": ">=4.8.4 <5.9.0" } }, "node_modules/@typescript-eslint/parser": { - "version": "8.25.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-8.25.0.tgz", - "integrity": "sha512-4gbs64bnbSzu4FpgMiQ1A+D+urxkoJk/kqlDJ2W//5SygaEiAP2B4GoS7TEdxgwol2el03gckFV9lJ4QOMiiHg==", + "version": "8.31.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-8.31.0.tgz", + "integrity": "sha512-67kYYShjBR0jNI5vsf/c3WG4u+zDnCTHTPqVMQguffaWWFs7artgwKmfwdifl+r6XyM5LYLas/dInj2T0SgJyw==", "dev": true, "license": "MIT", "dependencies": { - "@typescript-eslint/scope-manager": "8.25.0", - "@typescript-eslint/types": "8.25.0", - "@typescript-eslint/typescript-estree": "8.25.0", - "@typescript-eslint/visitor-keys": "8.25.0", + "@typescript-eslint/scope-manager": "8.31.0", + "@typescript-eslint/types": "8.31.0", + "@typescript-eslint/typescript-estree": "8.31.0", + "@typescript-eslint/visitor-keys": "8.31.0", "debug": "^4.3.4" }, "engines": { @@ -3084,18 +3123,18 @@ }, "peerDependencies": { "eslint": "^8.57.0 || ^9.0.0", - "typescript": ">=4.8.4 <5.8.0" + "typescript": ">=4.8.4 <5.9.0" } }, - "node_modules/@typescript-eslint/parser/node_modules/@typescript-eslint/scope-manager": { - "version": "8.25.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-8.25.0.tgz", - "integrity": "sha512-6PPeiKIGbgStEyt4NNXa2ru5pMzQ8OYKO1hX1z53HMomrmiSB+R5FmChgQAP1ro8jMtNawz+TRQo/cSXrauTpg==", + "node_modules/@typescript-eslint/scope-manager": { + "version": "8.31.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-8.31.0.tgz", + "integrity": "sha512-knO8UyF78Nt8O/B64i7TlGXod69ko7z6vJD9uhSlm0qkAbGeRUSudcm0+K/4CrRjrpiHfBCjMWlc08Vav1xwcw==", "dev": true, "license": "MIT", "dependencies": { - "@typescript-eslint/types": "8.25.0", - "@typescript-eslint/visitor-keys": "8.25.0" + "@typescript-eslint/types": "8.31.0", + "@typescript-eslint/visitor-keys": "8.31.0" }, "engines": { "node": "^18.18.0 || ^20.9.0 || >=21.1.0" @@ -3105,10 +3144,34 @@ "url": "https://opencollective.com/typescript-eslint" } }, - "node_modules/@typescript-eslint/parser/node_modules/@typescript-eslint/types": { - "version": "8.25.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-8.25.0.tgz", - "integrity": "sha512-+vUe0Zb4tkNgznQwicsvLUJgZIRs6ITeWSCclX1q85pR1iOiaj+4uZJIUp//Z27QWu5Cseiw3O3AR8hVpax7Aw==", + "node_modules/@typescript-eslint/type-utils": { + "version": "8.31.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/type-utils/-/type-utils-8.31.0.tgz", + "integrity": "sha512-DJ1N1GdjI7IS7uRlzJuEDCgDQix3ZVYVtgeWEyhyn4iaoitpMBX6Ndd488mXSx0xah/cONAkEaYyylDyAeHMHg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@typescript-eslint/typescript-estree": "8.31.0", + "@typescript-eslint/utils": "8.31.0", + "debug": "^4.3.4", + "ts-api-utils": "^2.0.1" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + }, + "peerDependencies": { + "eslint": "^8.57.0 || ^9.0.0", + "typescript": ">=4.8.4 <5.9.0" + } + }, + "node_modules/@typescript-eslint/types": { + "version": "8.31.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-8.31.0.tgz", + "integrity": "sha512-Ch8oSjVyYyJxPQk8pMiP2FFGYatqXQfQIaMp+TpuuLlDachRWpUAeEu1u9B/v/8LToehUIWyiKcA/w5hUFRKuQ==", "dev": true, "license": "MIT", "engines": { @@ -3119,15 +3182,15 @@ "url": "https://opencollective.com/typescript-eslint" } }, - "node_modules/@typescript-eslint/parser/node_modules/@typescript-eslint/typescript-estree": { - "version": "8.25.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-8.25.0.tgz", - "integrity": "sha512-ZPaiAKEZ6Blt/TPAx5Ot0EIB/yGtLI2EsGoY6F7XKklfMxYQyvtL+gT/UCqkMzO0BVFHLDlzvFqQzurYahxv9Q==", + "node_modules/@typescript-eslint/typescript-estree": { + "version": "8.31.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-8.31.0.tgz", + "integrity": "sha512-xLmgn4Yl46xi6aDSZ9KkyfhhtnYI15/CvHbpOy/eR5NWhK/BK8wc709KKwhAR0m4ZKRP7h07bm4BWUYOCuRpQQ==", "dev": true, "license": "MIT", "dependencies": { - "@typescript-eslint/types": "8.25.0", - "@typescript-eslint/visitor-keys": "8.25.0", + "@typescript-eslint/types": "8.31.0", + "@typescript-eslint/visitor-keys": "8.31.0", "debug": "^4.3.4", "fast-glob": "^3.3.2", "is-glob": "^4.0.3", @@ -3143,134 +3206,7 @@ "url": "https://opencollective.com/typescript-eslint" }, "peerDependencies": { - "typescript": ">=4.8.4 <5.8.0" - } - }, - "node_modules/@typescript-eslint/parser/node_modules/@typescript-eslint/visitor-keys": { - "version": "8.25.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-8.25.0.tgz", - "integrity": "sha512-kCYXKAum9CecGVHGij7muybDfTS2sD3t0L4bJsEZLkyrXUImiCTq1M3LG2SRtOhiHFwMR9wAFplpT6XHYjTkwQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "@typescript-eslint/types": "8.25.0", - "eslint-visitor-keys": "^4.2.0" - }, - "engines": { - "node": "^18.18.0 || ^20.9.0 || >=21.1.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/typescript-eslint" - } - }, - "node_modules/@typescript-eslint/parser/node_modules/brace-expansion": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.1.tgz", - "integrity": "sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA==", - "dev": true, - "license": "MIT", - "dependencies": { - "balanced-match": "^1.0.0" - } - }, - "node_modules/@typescript-eslint/parser/node_modules/minimatch": { - "version": "9.0.5", - "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-9.0.5.tgz", - "integrity": "sha512-G6T0ZX48xgozx7587koeX9Ys2NYy6Gmv//P89sEte9V9whIapMNF4idKxnW2QtCcLiTWlb/wfCabAtAFWhhBow==", - "dev": true, - "license": "ISC", - "dependencies": { - "brace-expansion": "^2.0.1" - }, - "engines": { - "node": ">=16 || 14 >=14.17" - }, - "funding": { - "url": "https://github.com/sponsors/isaacs" - } - }, - "node_modules/@typescript-eslint/scope-manager": { - "version": "8.20.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-8.20.0.tgz", - "integrity": "sha512-J7+VkpeGzhOt3FeG1+SzhiMj9NzGD/M6KoGn9f4dbz3YzK9hvbhVTmLj/HiTp9DazIzJ8B4XcM80LrR9Dm1rJw==", - "dev": true, - "license": "MIT", - "dependencies": { - "@typescript-eslint/types": "8.20.0", - "@typescript-eslint/visitor-keys": "8.20.0" - }, - "engines": { - "node": "^18.18.0 || ^20.9.0 || >=21.1.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/typescript-eslint" - } - }, - "node_modules/@typescript-eslint/type-utils": { - "version": "8.20.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/type-utils/-/type-utils-8.20.0.tgz", - "integrity": "sha512-bPC+j71GGvA7rVNAHAtOjbVXbLN5PkwqMvy1cwGeaxUoRQXVuKCebRoLzm+IPW/NtFFpstn1ummSIasD5t60GA==", - "dev": true, - "license": "MIT", - "dependencies": { - "@typescript-eslint/typescript-estree": "8.20.0", - "@typescript-eslint/utils": "8.20.0", - "debug": "^4.3.4", - "ts-api-utils": "^2.0.0" - }, - "engines": { - "node": "^18.18.0 || ^20.9.0 || >=21.1.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/typescript-eslint" - }, - "peerDependencies": { - "eslint": "^8.57.0 || ^9.0.0", - "typescript": ">=4.8.4 <5.8.0" - } - }, - "node_modules/@typescript-eslint/types": { - "version": "8.20.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-8.20.0.tgz", - "integrity": "sha512-cqaMiY72CkP+2xZRrFt3ExRBu0WmVitN/rYPZErA80mHjHx/Svgp8yfbzkJmDoQ/whcytOPO9/IZXnOc+wigRA==", - "dev": true, - "license": "MIT", - "engines": { - "node": "^18.18.0 || ^20.9.0 || >=21.1.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/typescript-eslint" - } - }, - "node_modules/@typescript-eslint/typescript-estree": { - "version": "8.20.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-8.20.0.tgz", - "integrity": "sha512-Y7ncuy78bJqHI35NwzWol8E0X7XkRVS4K4P4TCyzWkOJih5NDvtoRDW4Ba9YJJoB2igm9yXDdYI/+fkiiAxPzA==", - "dev": true, - "license": "MIT", - "dependencies": { - "@typescript-eslint/types": "8.20.0", - "@typescript-eslint/visitor-keys": "8.20.0", - "debug": "^4.3.4", - "fast-glob": "^3.3.2", - "is-glob": "^4.0.3", - "minimatch": "^9.0.4", - "semver": "^7.6.0", - "ts-api-utils": "^2.0.0" - }, - "engines": { - "node": "^18.18.0 || ^20.9.0 || >=21.1.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/typescript-eslint" - }, - "peerDependencies": { - "typescript": ">=4.8.4 <5.8.0" + "typescript": ">=4.8.4 <5.9.0" } }, "node_modules/@typescript-eslint/typescript-estree/node_modules/brace-expansion": { @@ -3300,16 +3236,16 @@ } }, "node_modules/@typescript-eslint/utils": { - "version": "8.20.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-8.20.0.tgz", - "integrity": "sha512-dq70RUw6UK9ei7vxc4KQtBRk7qkHZv447OUZ6RPQMQl71I3NZxQJX/f32Smr+iqWrB02pHKn2yAdHBb0KNrRMA==", + "version": "8.31.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-8.31.0.tgz", + "integrity": "sha512-qi6uPLt9cjTFxAb1zGNgTob4x9ur7xC6mHQJ8GwEzGMGE9tYniublmJaowOJ9V2jUzxrltTPfdG2nKlWsq0+Ww==", "dev": true, "license": "MIT", "dependencies": { "@eslint-community/eslint-utils": "^4.4.0", - "@typescript-eslint/scope-manager": "8.20.0", - "@typescript-eslint/types": "8.20.0", - "@typescript-eslint/typescript-estree": "8.20.0" + "@typescript-eslint/scope-manager": "8.31.0", + "@typescript-eslint/types": "8.31.0", + "@typescript-eslint/typescript-estree": "8.31.0" }, "engines": { "node": "^18.18.0 || ^20.9.0 || >=21.1.0" @@ -3320,17 +3256,17 @@ }, "peerDependencies": { "eslint": "^8.57.0 || ^9.0.0", - "typescript": ">=4.8.4 <5.8.0" + "typescript": ">=4.8.4 <5.9.0" } }, "node_modules/@typescript-eslint/visitor-keys": { - "version": "8.20.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-8.20.0.tgz", - "integrity": "sha512-v/BpkeeYAsPkKCkR8BDwcno0llhzWVqPOamQrAEMdpZav2Y9OVjd9dwJyBLJWwf335B5DmlifECIkZRJCaGaHA==", + "version": "8.31.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-8.31.0.tgz", + "integrity": "sha512-QcGHmlRHWOl93o64ZUMNewCdwKGU6WItOU52H0djgNmn1EOrhVudrDzXz4OycCRSCPwFCDrE2iIt5vmuUdHxuQ==", "dev": true, "license": "MIT", "dependencies": { - "@typescript-eslint/types": "8.20.0", + "@typescript-eslint/types": "8.31.0", "eslint-visitor-keys": "^4.2.0" }, "engines": { @@ -3342,9 +3278,9 @@ } }, "node_modules/@uiw/codemirror-extensions-basic-setup": { - "version": "4.23.10", - "resolved": "https://registry.npmjs.org/@uiw/codemirror-extensions-basic-setup/-/codemirror-extensions-basic-setup-4.23.10.tgz", - "integrity": "sha512-zpbmSeNs3OU/f/Eyd6brFnjsBUYwv2mFjWxlAsIRSwTlW+skIT60rQHFBSfsj/5UVSxSLWVeUYczN7AyXvgTGQ==", + "version": "4.23.11", + "resolved": "https://registry.npmjs.org/@uiw/codemirror-extensions-basic-setup/-/codemirror-extensions-basic-setup-4.23.11.tgz", + "integrity": "sha512-U31s5LEqEKFU4SPz1tldfrPohKddxC02z1kTnWe50k+K0CYK+PtISD5ufH/PVC0EgGL+c0TydTx72nRMwaeC4A==", "license": "MIT", "dependencies": { "@codemirror/autocomplete": "^6.0.0", @@ -3369,16 +3305,16 @@ } }, "node_modules/@uiw/react-codemirror": { - "version": "4.23.10", - "resolved": "https://registry.npmjs.org/@uiw/react-codemirror/-/react-codemirror-4.23.10.tgz", - "integrity": "sha512-AbN4eVHOL4ckRuIXpZxkzEqL/1ChVA+BSdEnAKjIB68pLQvKsVoYbiFP8zkXkYc4+Fcgq5KbAjvYqdo4ewemKw==", + "version": "4.23.11", + "resolved": "https://registry.npmjs.org/@uiw/react-codemirror/-/react-codemirror-4.23.11.tgz", + "integrity": "sha512-oMUXl/yu/a8qKy7w7q769kH2TPlvPln6IpvkjXQX90ziD8GHRFLDz+Ij51D2RRwB3glrkPnhCN9YC+PDlnmhqA==", "license": "MIT", "dependencies": { "@babel/runtime": "^7.18.6", "@codemirror/commands": "^6.1.0", "@codemirror/state": "^6.1.1", "@codemirror/theme-one-dark": "^6.0.0", - "@uiw/codemirror-extensions-basic-setup": "4.23.10", + "@uiw/codemirror-extensions-basic-setup": "4.23.11", "codemirror": "^6.0.0" }, "funding": { @@ -3395,16 +3331,17 @@ } }, "node_modules/@vitejs/plugin-react": { - "version": "4.3.4", - "resolved": "https://registry.npmjs.org/@vitejs/plugin-react/-/plugin-react-4.3.4.tgz", - "integrity": "sha512-SCCPBJtYLdE8PX/7ZQAs1QAZ8Jqwih+0VBLum1EGqmCCQal+MIUqLCzj3ZUy8ufbC0cAM4LRlSTm7IQJwWT4ug==", + "version": "4.4.1", + "resolved": "https://registry.npmjs.org/@vitejs/plugin-react/-/plugin-react-4.4.1.tgz", + "integrity": "sha512-IpEm5ZmeXAP/osiBXVVP5KjFMzbWOonMs0NaQQl+xYnUAcq4oHUBsF2+p4MgKWG4YMmFYJU8A6sxRPuowllm6w==", "dev": true, + "license": "MIT", "dependencies": { - "@babel/core": "^7.26.0", + "@babel/core": "^7.26.10", "@babel/plugin-transform-react-jsx-self": "^7.25.9", "@babel/plugin-transform-react-jsx-source": "^7.25.9", "@types/babel__core": "^7.20.5", - "react-refresh": "^0.14.2" + "react-refresh": "^0.17.0" }, "engines": { "node": "^14.18.0 || >=16.0.0" @@ -3414,14 +3351,14 @@ } }, "node_modules/@vitest/expect": { - "version": "3.0.8", - "resolved": "https://registry.npmjs.org/@vitest/expect/-/expect-3.0.8.tgz", - "integrity": "sha512-Xu6TTIavTvSSS6LZaA3EebWFr6tsoXPetOWNMOlc7LO88QVVBwq2oQWBoDiLCN6YTvNYsGSjqOO8CAdjom5DCQ==", + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/@vitest/expect/-/expect-3.1.2.tgz", + "integrity": "sha512-O8hJgr+zREopCAqWl3uCVaOdqJwZ9qaDwUP7vy3Xigad0phZe9APxKhPcDNqYYi0rX5oMvwJMSCAXY2afqeTSA==", "dev": true, "license": "MIT", "dependencies": { - "@vitest/spy": "3.0.8", - "@vitest/utils": "3.0.8", + "@vitest/spy": "3.1.2", + "@vitest/utils": "3.1.2", "chai": "^5.2.0", "tinyrainbow": "^2.0.0" }, @@ -3430,13 +3367,13 @@ } }, "node_modules/@vitest/mocker": { - "version": "3.0.8", - "resolved": "https://registry.npmjs.org/@vitest/mocker/-/mocker-3.0.8.tgz", - "integrity": "sha512-n3LjS7fcW1BCoF+zWZxG7/5XvuYH+lsFg+BDwwAz0arIwHQJFUEsKBQ0BLU49fCxuM/2HSeBPHQD8WjgrxMfow==", + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/@vitest/mocker/-/mocker-3.1.2.tgz", + "integrity": "sha512-kOtd6K2lc7SQ0mBqYv/wdGedlqPdM/B38paPY+OwJ1XiNi44w3Fpog82UfOibmHaV9Wod18A09I9SCKLyDMqgw==", "dev": true, "license": "MIT", "dependencies": { - "@vitest/spy": "3.0.8", + "@vitest/spy": "3.1.2", "estree-walker": "^3.0.3", "magic-string": "^0.30.17" }, @@ -3457,9 +3394,9 @@ } }, "node_modules/@vitest/pretty-format": { - "version": "3.0.8", - "resolved": "https://registry.npmjs.org/@vitest/pretty-format/-/pretty-format-3.0.8.tgz", - "integrity": "sha512-BNqwbEyitFhzYMYHUVbIvepOyeQOSFA/NeJMIP9enMntkkxLgOcgABH6fjyXG85ipTgvero6noreavGIqfJcIg==", + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/@vitest/pretty-format/-/pretty-format-3.1.2.tgz", + "integrity": "sha512-R0xAiHuWeDjTSB3kQ3OQpT8Rx3yhdOAIm/JM4axXxnG7Q/fS8XUwggv/A4xzbQA+drYRjzkMnpYnOGAc4oeq8w==", "dev": true, "license": "MIT", "dependencies": { @@ -3470,13 +3407,13 @@ } }, "node_modules/@vitest/runner": { - "version": "3.0.8", - "resolved": "https://registry.npmjs.org/@vitest/runner/-/runner-3.0.8.tgz", - "integrity": "sha512-c7UUw6gEcOzI8fih+uaAXS5DwjlBaCJUo7KJ4VvJcjL95+DSR1kova2hFuRt3w41KZEFcOEiq098KkyrjXeM5w==", + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/@vitest/runner/-/runner-3.1.2.tgz", + "integrity": "sha512-bhLib9l4xb4sUMPXnThbnhX2Yi8OutBMA8Yahxa7yavQsFDtwY/jrUZwpKp2XH9DhRFJIeytlyGpXCqZ65nR+g==", "dev": true, "license": "MIT", "dependencies": { - "@vitest/utils": "3.0.8", + "@vitest/utils": "3.1.2", "pathe": "^2.0.3" }, "funding": { @@ -3484,13 +3421,13 @@ } }, "node_modules/@vitest/snapshot": { - "version": "3.0.8", - "resolved": "https://registry.npmjs.org/@vitest/snapshot/-/snapshot-3.0.8.tgz", - "integrity": "sha512-x8IlMGSEMugakInj44nUrLSILh/zy1f2/BgH0UeHpNyOocG18M9CWVIFBaXPt8TrqVZWmcPjwfG/ht5tnpba8A==", + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/@vitest/snapshot/-/snapshot-3.1.2.tgz", + "integrity": "sha512-Q1qkpazSF/p4ApZg1vfZSQ5Yw6OCQxVMVrLjslbLFA1hMDrT2uxtqMaw8Tc/jy5DLka1sNs1Y7rBcftMiaSH/Q==", "dev": true, "license": "MIT", "dependencies": { - "@vitest/pretty-format": "3.0.8", + "@vitest/pretty-format": "3.1.2", "magic-string": "^0.30.17", "pathe": "^2.0.3" }, @@ -3499,9 +3436,9 @@ } }, "node_modules/@vitest/spy": { - "version": "3.0.8", - "resolved": "https://registry.npmjs.org/@vitest/spy/-/spy-3.0.8.tgz", - "integrity": "sha512-MR+PzJa+22vFKYb934CejhR4BeRpMSoxkvNoDit68GQxRLSf11aT6CTj3XaqUU9rxgWJFnqicN/wxw6yBRkI1Q==", + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/@vitest/spy/-/spy-3.1.2.tgz", + "integrity": "sha512-OEc5fSXMws6sHVe4kOFyDSj/+4MSwst0ib4un0DlcYgQvRuYQ0+M2HyqGaauUMnjq87tmUaMNDxKQx7wNfVqPA==", "dev": true, "license": "MIT", "dependencies": { @@ -3512,13 +3449,13 @@ } }, "node_modules/@vitest/utils": { - "version": "3.0.8", - "resolved": "https://registry.npmjs.org/@vitest/utils/-/utils-3.0.8.tgz", - "integrity": "sha512-nkBC3aEhfX2PdtQI/QwAWp8qZWwzASsU4Npbcd5RdMPBSSLCpkZp52P3xku3s3uA0HIEhGvEcF8rNkBsz9dQ4Q==", + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/@vitest/utils/-/utils-3.1.2.tgz", + "integrity": "sha512-5GGd0ytZ7BH3H6JTj9Kw7Prn1Nbg0wZVrIvou+UWxm54d+WoXXgAgjFJ8wn3LdagWLFSEfpPeyYrByZaGEZHLg==", "dev": true, "license": "MIT", "dependencies": { - "@vitest/pretty-format": "3.0.8", + "@vitest/pretty-format": "3.1.2", "loupe": "^3.1.3", "tinyrainbow": "^2.0.0" }, @@ -3855,9 +3792,9 @@ } }, "node_modules/browserslist": { - "version": "4.24.2", - "resolved": "https://registry.npmjs.org/browserslist/-/browserslist-4.24.2.tgz", - "integrity": "sha512-ZIc+Q62revdMcqC6aChtW4jz3My3klmCO1fEmINZY/8J3EpBg5/A/D0AKmBveUh6pgoeycoMkVMko84tuYS+Gg==", + "version": "4.24.4", + "resolved": "https://registry.npmjs.org/browserslist/-/browserslist-4.24.4.tgz", + "integrity": "sha512-KDi1Ny1gSePi1vm0q4oxSF8b4DR44GF4BbmS2YdhPLOEqd8pDviZOGH/GsmRwoWJ2+5Lr085X7naowMwKHDG1A==", "dev": true, "funding": [ { @@ -3873,10 +3810,11 @@ "url": "https://github.com/sponsors/ai" } ], + "license": "MIT", "dependencies": { - "caniuse-lite": "^1.0.30001669", - "electron-to-chromium": "^1.5.41", - "node-releases": "^2.0.18", + "caniuse-lite": "^1.0.30001688", + "electron-to-chromium": "^1.5.73", + "node-releases": "^2.0.19", "update-browserslist-db": "^1.1.1" }, "bin": { @@ -3960,9 +3898,9 @@ } }, "node_modules/caniuse-lite": { - "version": "1.0.30001684", - "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001684.tgz", - "integrity": "sha512-G1LRwLIQjBQoyq0ZJGqGIJUXzJ8irpbjHLpVRXDvBEScFJ9b17sgK6vlx0GAJFE21okD7zXl08rRRUfq6HdoEQ==", + "version": "1.0.30001715", + "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001715.tgz", + "integrity": "sha512-7ptkFGMm2OAOgvZpwgA4yjQ5SQbrNVGdRjzH0pBdy1Fasvcr+KAeECmbCAECzTuDuoX0FCY8KzUxjf9+9kfZEw==", "dev": true, "funding": [ { @@ -3977,7 +3915,8 @@ "type": "github", "url": "https://github.com/sponsors/ai" } - ] + ], + "license": "CC-BY-4.0" }, "node_modules/chai": { "version": "5.2.0", @@ -4472,10 +4411,11 @@ } }, "node_modules/electron-to-chromium": { - "version": "1.5.67", - "resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.5.67.tgz", - "integrity": "sha512-nz88NNBsD7kQSAGGJyp8hS6xSPtWwqNogA0mjtc2nUYeEf3nURK9qpV18TuBdDmEDgVWotS8Wkzf+V52dSQ/LQ==", - "dev": true + "version": "1.5.143", + "resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.5.143.tgz", + "integrity": "sha512-QqklJMOFBMqe46k8iIOwA9l2hz57V2OKMmP5eSWcUvwx+mASAsbU+wkF1pHjn9ZVSBPrsYWr4/W/95y5SwYg2g==", + "dev": true, + "license": "ISC" }, "node_modules/emittery": { "version": "0.13.1", @@ -4523,9 +4463,9 @@ } }, "node_modules/es-module-lexer": { - "version": "1.6.0", - "resolved": "https://registry.npmjs.org/es-module-lexer/-/es-module-lexer-1.6.0.tgz", - "integrity": "sha512-qqnD1yMU6tk/jnaMosogGySTZP8YtUgAffA9nMN+E/rjxcfRQ6IEk7IiozUjgxKoFHBGjTLnrHB/YC45r/59EQ==", + "version": "1.7.0", + "resolved": "https://registry.npmjs.org/es-module-lexer/-/es-module-lexer-1.7.0.tgz", + "integrity": "sha512-jEQoCwk8hyb2AZziIOLhDqpm5+2ww5uIE6lkO/6jcOCusfk6LhMHpXXfBLXTZ7Ydyt0j4VoUQv6uGNYbdW+kBA==", "dev": true, "license": "MIT" }, @@ -4593,20 +4533,20 @@ } }, "node_modules/eslint": { - "version": "9.23.0", - "resolved": "https://registry.npmjs.org/eslint/-/eslint-9.23.0.tgz", - "integrity": "sha512-jV7AbNoFPAY1EkFYpLq5bslU9NLNO8xnEeQXwErNibVryjk67wHVmddTBilc5srIttJDBrB0eMHKZBFbSIABCw==", + "version": "9.25.1", + "resolved": "https://registry.npmjs.org/eslint/-/eslint-9.25.1.tgz", + "integrity": "sha512-E6Mtz9oGQWDCpV12319d59n4tx9zOTXSTmc8BLVxBx+G/0RdM5MvEEJLU9c0+aleoePYYgVTOsRblx433qmhWQ==", "dev": true, "license": "MIT", "dependencies": { "@eslint-community/eslint-utils": "^4.2.0", "@eslint-community/regexpp": "^4.12.1", - "@eslint/config-array": "^0.19.2", - "@eslint/config-helpers": "^0.2.0", - "@eslint/core": "^0.12.0", + "@eslint/config-array": "^0.20.0", + "@eslint/config-helpers": "^0.2.1", + "@eslint/core": "^0.13.0", "@eslint/eslintrc": "^3.3.1", - "@eslint/js": "9.23.0", - "@eslint/plugin-kit": "^0.2.7", + "@eslint/js": "9.25.1", + "@eslint/plugin-kit": "^0.2.8", "@humanfs/node": "^0.16.6", "@humanwhocodes/module-importer": "^1.0.1", "@humanwhocodes/retry": "^0.4.2", @@ -4654,9 +4594,9 @@ } }, "node_modules/eslint-config-prettier": { - "version": "10.1.1", - "resolved": "https://registry.npmjs.org/eslint-config-prettier/-/eslint-config-prettier-10.1.1.tgz", - "integrity": "sha512-4EQQr6wXwS+ZJSzaR5ZCrYgLxqvUjdXctaEtBqHcbkW944B1NQyO4qpdHQbXBONfwxXdkAY81HH4+LUfrg+zPw==", + "version": "10.1.2", + "resolved": "https://registry.npmjs.org/eslint-config-prettier/-/eslint-config-prettier-10.1.2.tgz", + "integrity": "sha512-Epgp/EofAUeEpIdZkW60MHKvPyru1ruQJxPL+WIycnaPApuseK0Zpkrh/FwL9oIpQvIhJwV7ptOy0DWUjTlCiA==", "dev": true, "license": "MIT", "bin": { @@ -4667,14 +4607,14 @@ } }, "node_modules/eslint-plugin-prettier": { - "version": "5.2.3", - "resolved": "https://registry.npmjs.org/eslint-plugin-prettier/-/eslint-plugin-prettier-5.2.3.tgz", - "integrity": "sha512-qJ+y0FfCp/mQYQ/vWQ3s7eUlFEL4PyKfAJxsnYTJ4YT73nsJBWqmEpFryxV9OeUiqmsTsYJ5Y+KDNaeP31wrRw==", + "version": "5.2.6", + "resolved": "https://registry.npmjs.org/eslint-plugin-prettier/-/eslint-plugin-prettier-5.2.6.tgz", + "integrity": "sha512-mUcf7QG2Tjk7H055Jk0lGBjbgDnfrvqjhXh9t2xLMSCjZVcw9Rb1V6sVNXO0th3jgeO7zllWPTNRil3JW94TnQ==", "dev": true, "license": "MIT", "dependencies": { "prettier-linter-helpers": "^1.0.0", - "synckit": "^0.9.1" + "synckit": "^0.11.0" }, "engines": { "node": "^14.18.0 || >=16.0.0" @@ -4685,7 +4625,7 @@ "peerDependencies": { "@types/eslint": ">=8.0.0", "eslint": ">=8.0.0", - "eslint-config-prettier": "*", + "eslint-config-prettier": ">= 7.0.0 <10.0.0 || >=10.1.0", "prettier": ">=3.0.0" }, "peerDependenciesMeta": { @@ -4698,10 +4638,11 @@ } }, "node_modules/eslint-plugin-react-hooks": { - "version": "5.1.0", - "resolved": "https://registry.npmjs.org/eslint-plugin-react-hooks/-/eslint-plugin-react-hooks-5.1.0.tgz", - "integrity": "sha512-mpJRtPgHN2tNAvZ35AMfqeB3Xqeo273QxrHJsbBEPWODRM4r0yB6jfoROqKEYrOn27UtRPpcpHc2UqyBSuUNTw==", + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/eslint-plugin-react-hooks/-/eslint-plugin-react-hooks-5.2.0.tgz", + "integrity": "sha512-+f15FfK64YQwZdJNELETdn5ibXEUQmW1DZL6KXhNnc2heoy/sg9VJJeT7n8TlMWouzWqSWavFkIhHyIbIAEapg==", "dev": true, + "license": "MIT", "engines": { "node": ">=10" }, @@ -4710,9 +4651,9 @@ } }, "node_modules/eslint-plugin-react-refresh": { - "version": "0.4.19", - "resolved": "https://registry.npmjs.org/eslint-plugin-react-refresh/-/eslint-plugin-react-refresh-0.4.19.tgz", - "integrity": "sha512-eyy8pcr/YxSYjBoqIFSrlbn9i/xvxUFa8CjzAYo9cFjgGXqq1hyjihcpZvxRLalpaWmueWR81xn7vuKmAFijDQ==", + "version": "0.4.20", + "resolved": "https://registry.npmjs.org/eslint-plugin-react-refresh/-/eslint-plugin-react-refresh-0.4.20.tgz", + "integrity": "sha512-XpbHQ2q5gUF8BGOX4dHe+71qoirYMhApEPZ7sfhF/dNnOF1UXnCMGZf79SFTBO7Bz5YEIT4TMieSlJBWhP9WBA==", "dev": true, "license": "MIT", "peerDependencies": { @@ -4891,9 +4832,9 @@ } }, "node_modules/expect-type": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/expect-type/-/expect-type-1.1.0.tgz", - "integrity": "sha512-bFi65yM+xZgk+u/KRIpekdSYkTB5W1pEf0Lt8Q8Msh7b+eQ7LXVtIB1Bkm4fvclDEL1b2CZkMhv2mOeF8tMdkA==", + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/expect-type/-/expect-type-1.2.1.tgz", + "integrity": "sha512-/kP8CAwxzLVEeFrMm4kMmy4CCDlpipyA7MYLVrdJIkV0fYF0UaigQHRsxHiuY/GEea+bh4KSv3TIlgr+2UL6bw==", "dev": true, "license": "Apache-2.0", "engines": { @@ -6861,9 +6802,9 @@ "license": "MIT" }, "node_modules/lru-cache": { - "version": "11.0.2", - "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-11.0.2.tgz", - "integrity": "sha512-123qHRfJBmo2jXDbo/a5YOQrJoHF/GNQTLzQ5+IdK5pWpceK17yRc6ozlWd25FxvGKQbIUs91fDFkXmDHTKcyA==", + "version": "11.1.0", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-11.1.0.tgz", + "integrity": "sha512-QIXZUBJUx+2zHUdQujWejBkcD9+cs94tLn0+YL8UrCh+D5sCXZ4c7LaEH48pNwRY3MLDgqUFyhlCyjJPf1WP0A==", "license": "ISC", "engines": { "node": "20 || >=22" @@ -7044,13 +6985,13 @@ "license": "MIT" }, "node_modules/nock": { - "version": "14.0.1", - "resolved": "https://registry.npmjs.org/nock/-/nock-14.0.1.tgz", - "integrity": "sha512-IJN4O9pturuRdn60NjQ7YkFt6Rwei7ZKaOwb1tvUIIqTgeD0SDDAX3vrqZD4wcXczeEy/AsUXxpGpP/yHqV7xg==", + "version": "14.0.4", + "resolved": "https://registry.npmjs.org/nock/-/nock-14.0.4.tgz", + "integrity": "sha512-86fh+gIKH8H02+y0/HKAOZZXn6OwgzXvl6JYwfjvKkoKxUWz54wIIDU/+w24xzMvk/R8pNVXOrvTubyl+Ml6cg==", "dev": true, "license": "MIT", "dependencies": { - "@mswjs/interceptors": "^0.37.3", + "@mswjs/interceptors": "^0.38.5", "json-stringify-safe": "^5.0.1", "propagate": "^2.0.0" }, @@ -7113,10 +7054,11 @@ "peer": true }, "node_modules/node-releases": { - "version": "2.0.18", - "resolved": "https://registry.npmjs.org/node-releases/-/node-releases-2.0.18.tgz", - "integrity": "sha512-d9VeXT4SJ7ZeOqGX6R5EM022wpL+eWPooLI+5UpWn2jCT1aosUQEhQP214x33Wkwx3JQMvIm+tIoVOdodFS40g==", - "dev": true + "version": "2.0.19", + "resolved": "https://registry.npmjs.org/node-releases/-/node-releases-2.0.19.tgz", + "integrity": "sha512-xxOWJsBKtzAq7DY0J+DTzuz58K8e7sJbdgwkbMWQe8UYB6ekmsQ45q0M/tJDsGaZmbC+l7n57UV8Hl5tHxO9uw==", + "dev": true, + "license": "MIT" }, "node_modules/normalize-path": { "version": "3.0.0", @@ -7618,9 +7560,9 @@ } }, "node_modules/prettier": { - "version": "3.4.2", - "resolved": "https://registry.npmjs.org/prettier/-/prettier-3.4.2.tgz", - "integrity": "sha512-e9MewbtFo+Fevyuxn/4rrcDAaq0IYxPGLvObpQjiZBMAzB9IGmzlnG9RZy3FFas+eBMu2vA0CszMeduow5dIuQ==", + "version": "3.5.3", + "resolved": "https://registry.npmjs.org/prettier/-/prettier-3.5.3.tgz", + "integrity": "sha512-QQtaxnoDJeAkDvDKWCLiwIXkTgRhwYDEQCghU9Z6q03iyek/rxRh/2lC3HB7P8sWT2xC/y5JDctPLBIGzHKbhw==", "dev": true, "license": "MIT", "bin": { @@ -7764,22 +7706,24 @@ "license": "MIT" }, "node_modules/react": { - "version": "19.0.0", - "resolved": "https://registry.npmjs.org/react/-/react-19.0.0.tgz", - "integrity": "sha512-V8AVnmPIICiWpGfm6GLzCR/W5FXLchHop40W4nXBmdlEceh16rCN8O8LNWm5bh5XUX91fh7KpA+W0TgMKmgTpQ==", + "version": "19.1.0", + "resolved": "https://registry.npmjs.org/react/-/react-19.1.0.tgz", + "integrity": "sha512-FS+XFBNvn3GTAWq26joslQgWNoFu08F4kl0J4CgdNKADkdSGXQyTCnKteIAJy96Br6YbpEU1LSzV5dYtjMkMDg==", + "license": "MIT", "engines": { "node": ">=0.10.0" } }, "node_modules/react-dom": { - "version": "19.0.0", - "resolved": "https://registry.npmjs.org/react-dom/-/react-dom-19.0.0.tgz", - "integrity": "sha512-4GV5sHFG0e/0AD4X+ySy6UJd3jVl1iNsNHdpad0qhABJ11twS3TTBnseqsKurKcsNqCEFeGL3uLpVChpIO3QfQ==", + "version": "19.1.0", + "resolved": "https://registry.npmjs.org/react-dom/-/react-dom-19.1.0.tgz", + "integrity": "sha512-Xs1hdnE+DyKgeHJeJznQmYMIBG3TKIHJJT95Q58nHLSrElKlGQqDTR2HQ9fx5CN/Gk6Vh/kupBTDLU11/nDk/g==", + "license": "MIT", "dependencies": { - "scheduler": "^0.25.0" + "scheduler": "^0.26.0" }, "peerDependencies": { - "react": "^19.0.0" + "react": "^19.1.0" } }, "node_modules/react-infinite-scroll-component": { @@ -7834,9 +7778,9 @@ } }, "node_modules/react-refresh": { - "version": "0.14.2", - "resolved": "https://registry.npmjs.org/react-refresh/-/react-refresh-0.14.2.tgz", - "integrity": "sha512-jCvmsr+1IUSMUyzOkRcvnVbX3ZYC6g9TDrDbFuFmRDq7PD4yaGbLKNQL6k2jnArV8hjYxh7hVhAZB6s9HDGpZA==", + "version": "0.17.0", + "resolved": "https://registry.npmjs.org/react-refresh/-/react-refresh-0.17.0.tgz", + "integrity": "sha512-z6F7K9bV85EfseRCp2bzrpyQ0Gkw1uLoCel9XBVWPg/TjRj94SkJzUTGfOa4bs7iJvBWtQG0Wq7wnI0syw3EBQ==", "dev": true, "license": "MIT", "engines": { @@ -7889,12 +7833,11 @@ } }, "node_modules/react-router": { - "version": "7.4.0", - "resolved": "https://registry.npmjs.org/react-router/-/react-router-7.4.0.tgz", - "integrity": "sha512-Y2g5ObjkvX3VFeVt+0CIPuYd9PpgqCslG7ASSIdN73LwA1nNWzcMLaoMRJfP3prZFI92svxFwbn7XkLJ+UPQ6A==", + "version": "7.5.2", + "resolved": "https://registry.npmjs.org/react-router/-/react-router-7.5.2.tgz", + "integrity": "sha512-9Rw8r199klMnlGZ8VAsV/I8WrIF6IyJ90JQUdboupx1cdkgYqwnrYjH+I/nY/7cA1X5zia4mDJqH36npP7sxGQ==", "license": "MIT", "dependencies": { - "@types/cookie": "^0.6.0", "cookie": "^1.0.1", "set-cookie-parser": "^2.6.0", "turbo-stream": "2.4.0" @@ -7913,12 +7856,12 @@ } }, "node_modules/react-router-dom": { - "version": "7.4.0", - "resolved": "https://registry.npmjs.org/react-router-dom/-/react-router-dom-7.4.0.tgz", - "integrity": "sha512-VlksBPf3n2bijPvnA7nkTsXxMAKOj+bWp4R9c3i+bnwlSOFAGOkJkKhzy/OsRkWaBMICqcAl1JDzh9ZSOze9CA==", + "version": "7.5.2", + "resolved": "https://registry.npmjs.org/react-router-dom/-/react-router-dom-7.5.2.tgz", + "integrity": "sha512-yk1XW8Fj7gK7flpYBXF3yzd2NbX6P7Kxjvs2b5nu1M04rb5pg/Zc4fGdBNTeT4eDYL2bvzWNyKaIMJX/RKHTTg==", "license": "MIT", "dependencies": { - "react-router": "7.4.0" + "react-router": "7.5.2" }, "engines": { "node": ">=20.0.0" @@ -7949,22 +7892,6 @@ } } }, - "node_modules/react-textarea-autosize": { - "version": "8.5.6", - "resolved": "https://registry.npmjs.org/react-textarea-autosize/-/react-textarea-autosize-8.5.6.tgz", - "integrity": "sha512-aT3ioKXMa8f6zHYGebhbdMD2L00tKeRX1zuVuDx9YQK/JLLRSaSxq3ugECEmUB9z2kvk6bFSIoRHLkkUv0RJiw==", - "dependencies": { - "@babel/runtime": "^7.20.13", - "use-composed-ref": "^1.3.0", - "use-latest": "^1.2.1" - }, - "engines": { - "node": ">=10" - }, - "peerDependencies": { - "react": "^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0" - } - }, "node_modules/react-transition-group": { "version": "4.4.5", "resolved": "https://registry.npmjs.org/react-transition-group/-/react-transition-group-4.4.5.tgz", @@ -8184,9 +8111,9 @@ "license": "MIT" }, "node_modules/sanitize-html": { - "version": "2.15.0", - "resolved": "https://registry.npmjs.org/sanitize-html/-/sanitize-html-2.15.0.tgz", - "integrity": "sha512-wIjst57vJGpLyBP8ioUbg6ThwJie5SuSIjHxJg53v5Fg+kUK+AXlb7bK3RNXpp315MvwM+0OBGCV6h5pPHsVhA==", + "version": "2.16.0", + "resolved": "https://registry.npmjs.org/sanitize-html/-/sanitize-html-2.16.0.tgz", + "integrity": "sha512-0s4caLuHHaZFVxFTG74oW91+j6vW7gKbGD6CD2+miP73CE6z6YtOBN0ArtLd2UGyi4IC7K47v3ENUbQX4jV3Mg==", "license": "MIT", "dependencies": { "deepmerge": "^4.2.2", @@ -8211,14 +8138,15 @@ } }, "node_modules/scheduler": { - "version": "0.25.0", - "resolved": "https://registry.npmjs.org/scheduler/-/scheduler-0.25.0.tgz", - "integrity": "sha512-xFVuu11jh+xcO7JOAGJNOXld8/TcEHK/4CituBUeUb5hqxJLj9YuemAEuvm9gQ/+pgXYfbQuqAkiYu+u7YEsNA==" + "version": "0.26.0", + "resolved": "https://registry.npmjs.org/scheduler/-/scheduler-0.26.0.tgz", + "integrity": "sha512-NlHwttCI/l5gCPR3D1nNXtWABUmBwvZpEQiD4IXSbIDq8BzLIK/7Ir5gTFSGZDUu37K5cMNp0hFtzO38sC7gWA==", + "license": "MIT" }, "node_modules/semver": { - "version": "7.6.3", - "resolved": "https://registry.npmjs.org/semver/-/semver-7.6.3.tgz", - "integrity": "sha512-oVekP1cKtI+CTDvHWYFUcMtsK/00wmAEfyqKfNdARm8u1wNVhSgaX7A8d4UuIlUI5e84iEwOhs7ZPYRmzU9U6A==", + "version": "7.7.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.7.1.tgz", + "integrity": "sha512-hlq8tAfn0m/61p4BVRcPzIGr6LKiMwo4VM6dGi6pt4qcRkmNzTcWq6eCEjEh+qXjkMDvPlOFFSGwQjoEa6gyMA==", "dev": true, "license": "ISC", "bin": { @@ -8366,9 +8294,9 @@ "license": "MIT" }, "node_modules/std-env": { - "version": "3.8.0", - "resolved": "https://registry.npmjs.org/std-env/-/std-env-3.8.0.tgz", - "integrity": "sha512-Bc3YwwCB+OzldMxOXJIIvC6cPRWr/LxOp48CdQTOkPyk/t4JWWJbrilwBd7RJzKV8QW7tJkcgAmeuLLJugl5/w==", + "version": "3.9.0", + "resolved": "https://registry.npmjs.org/std-env/-/std-env-3.9.0.tgz", + "integrity": "sha512-UGvjygr6F6tpH7o2qyqR6QYpwraIjKSdtzyBdyytFOHmPZY917kwdwLG0RbOjWOnKmnm3PeHjaoLLMie7kPLQw==", "dev": true, "license": "MIT" }, @@ -8527,20 +8455,20 @@ "license": "MIT" }, "node_modules/synckit": { - "version": "0.9.1", - "resolved": "https://registry.npmjs.org/synckit/-/synckit-0.9.1.tgz", - "integrity": "sha512-7gr8p9TQP6RAHusBOSLs46F4564ZrjV8xFmw5zCmgmhGUcw2hxsShhJ6CEiHQMgPDwAQ1fWHPM0ypc4RMAig4A==", + "version": "0.11.4", + "resolved": "https://registry.npmjs.org/synckit/-/synckit-0.11.4.tgz", + "integrity": "sha512-Q/XQKRaJiLiFIBNN+mndW7S/RHxvwzuZS6ZwmRzUBqJBv/5QIKCEwkBC8GBf8EQJKYnaFs0wOZbKTXBPj8L9oQ==", "dev": true, "license": "MIT", "dependencies": { - "@pkgr/core": "^0.1.0", - "tslib": "^2.6.2" + "@pkgr/core": "^0.2.3", + "tslib": "^2.8.1" }, "engines": { "node": "^14.18.0 || >=16.0.0" }, "funding": { - "url": "https://opencollective.com/unts" + "url": "https://opencollective.com/synckit" } }, "node_modules/tabbable": { @@ -8587,6 +8515,51 @@ "dev": true, "license": "MIT" }, + "node_modules/tinyglobby": { + "version": "0.2.13", + "resolved": "https://registry.npmjs.org/tinyglobby/-/tinyglobby-0.2.13.tgz", + "integrity": "sha512-mEwzpUgrLySlveBwEVDMKk5B57bhLPYovRfPAXD5gA/98Opn0rCDj3GtLwFvCvH5RK9uPCExUROW5NjDwvqkxw==", + "dev": true, + "license": "MIT", + "dependencies": { + "fdir": "^6.4.4", + "picomatch": "^4.0.2" + }, + "engines": { + "node": ">=12.0.0" + }, + "funding": { + "url": "https://github.com/sponsors/SuperchupuDev" + } + }, + "node_modules/tinyglobby/node_modules/fdir": { + "version": "6.4.4", + "resolved": "https://registry.npmjs.org/fdir/-/fdir-6.4.4.tgz", + "integrity": "sha512-1NZP+GK4GfuAv3PqKvxQRDMjdSRZjnkq7KfhlNrCNNlZ0ygQFpebfrnfnq/W7fpUnAv9aGWmY1zKx7FYL3gwhg==", + "dev": true, + "license": "MIT", + "peerDependencies": { + "picomatch": "^3 || ^4" + }, + "peerDependenciesMeta": { + "picomatch": { + "optional": true + } + } + }, + "node_modules/tinyglobby/node_modules/picomatch": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-4.0.2.tgz", + "integrity": "sha512-M7BAV6Rlcy5u+m6oPhAPFgJTzAioX/6B0DxyvDlo9l8+T3nLKbrczg2WLUyzd45L8RqfUMyGPzekbMvX2Ldkwg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/jonschlinkert" + } + }, "node_modules/tinypool": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/tinypool/-/tinypool-1.0.2.tgz", @@ -8695,9 +8668,9 @@ } }, "node_modules/ts-jest": { - "version": "29.2.5", - "resolved": "https://registry.npmjs.org/ts-jest/-/ts-jest-29.2.5.tgz", - "integrity": "sha512-KD8zB2aAZrcKIdGk4OwpJggeLcH1FgrICqDSROWqlnJXGCXK4Mn6FcdK2B6670Xr73lHMG1kHw8R87A0ecZ+vA==", + "version": "29.3.2", + "resolved": "https://registry.npmjs.org/ts-jest/-/ts-jest-29.3.2.tgz", + "integrity": "sha512-bJJkrWc6PjFVz5g2DGCNUo8z7oFEYaz1xP1NpeDU7KNLMWPpEyV8Chbpkn8xjzgRDpQhnGMyvyldoL7h8JXyug==", "dev": true, "license": "MIT", "dependencies": { @@ -8708,7 +8681,8 @@ "json5": "^2.2.3", "lodash.memoize": "^4.1.2", "make-error": "^1.3.6", - "semver": "^7.6.3", + "semver": "^7.7.1", + "type-fest": "^4.39.1", "yargs-parser": "^21.1.1" }, "bin": { @@ -8744,9 +8718,9 @@ } }, "node_modules/tslib": { - "version": "2.7.0", - "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.7.0.tgz", - "integrity": "sha512-gLXCKdN1/j47AiHiOkJN69hJmcbGTHI0ImLmbYLHykhgeN0jVGola9yVjFgzCUklsZQMW55o+dW7IXv3RCXDzA==", + "version": "2.8.1", + "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.8.1.tgz", + "integrity": "sha512-oJFu94HQb+KVduSUQL7wnpmqnfmLsOA/nAh6b6EH0wCEoK0/mPeXU6c3wKDV83MkOuHPRHtSXKKU99IBazS/2w==", "license": "0BSD" }, "node_modules/turbo-stream": { @@ -8780,9 +8754,10 @@ } }, "node_modules/type-fest": { - "version": "4.29.0", - "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-4.29.0.tgz", - "integrity": "sha512-RPYt6dKyemXJe7I6oNstcH24myUGSReicxcHTvCLgzm4e0n8y05dGvcGB15/SoPRBmhlMthWQ9pvKyL81ko8nQ==", + "version": "4.40.1", + "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-4.40.1.tgz", + "integrity": "sha512-9YvLNnORDpI+vghLU/Nf+zSv0kL47KbVJ1o3sKgoTefl6i+zebxbiDQWoe/oWWqPhIgQdRZRT1KA9sCPL810SA==", + "license": "(MIT OR CC0-1.0)", "engines": { "node": ">=16" }, @@ -8791,9 +8766,9 @@ } }, "node_modules/typescript": { - "version": "5.7.2", - "resolved": "https://registry.npmjs.org/typescript/-/typescript-5.7.2.tgz", - "integrity": "sha512-i5t66RHxDvVN40HfDd1PsEThGNnlMCMT3jMUuoh9/0TaqWevNontacunWyN02LA9/fIbEWlcHZcgTKb9QoaLfg==", + "version": "5.8.3", + "resolved": "https://registry.npmjs.org/typescript/-/typescript-5.8.3.tgz", + "integrity": "sha512-p1diW6TqL9L07nNxvRMM7hMMw4c5XOo/1ibL4aAIGmSAt9slTE1Xgw5KWuof2uTOvCg9BY7ZRi+GaF+7sfgPeQ==", "dev": true, "license": "Apache-2.0", "bin": { @@ -8812,9 +8787,9 @@ "license": "MIT" }, "node_modules/update-browserslist-db": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/update-browserslist-db/-/update-browserslist-db-1.1.1.tgz", - "integrity": "sha512-R8UzCaa9Az+38REPiJ1tXlImTJXlVfgHZsglwBD/k6nj76ctsH1E3q4doGrukiLQd3sGQYu56r5+lo5r94l29A==", + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/update-browserslist-db/-/update-browserslist-db-1.1.3.tgz", + "integrity": "sha512-UxhIZQ+QInVdunkDAaiazvvT/+fXL5Osr0JZlJulepYu6Jd7qJtDZjlur0emRlT71EN3ScPoE7gvsuIKKNavKw==", "dev": true, "funding": [ { @@ -8830,9 +8805,10 @@ "url": "https://github.com/sponsors/ai" } ], + "license": "MIT", "dependencies": { "escalade": "^3.2.0", - "picocolors": "^1.1.0" + "picocolors": "^1.1.1" }, "bin": { "update-browserslist-db": "cli.js" @@ -9009,15 +8985,18 @@ } }, "node_modules/vite": { - "version": "6.2.4", - "resolved": "https://registry.npmjs.org/vite/-/vite-6.2.4.tgz", - "integrity": "sha512-veHMSew8CcRzhL5o8ONjy8gkfmFJAd5Ac16oxBUjlwgX3Gq2Wqr+qNC3TjPIpy7TPV/KporLga5GT9HqdrCizw==", + "version": "6.3.3", + "resolved": "https://registry.npmjs.org/vite/-/vite-6.3.3.tgz", + "integrity": "sha512-5nXH+QsELbFKhsEfWLkHrvgRpTdGJzqOZ+utSdmPTvwHmvU6ITTm3xx+mRusihkcI8GeC7lCDyn3kDtiki9scw==", "dev": true, "license": "MIT", "dependencies": { "esbuild": "^0.25.0", + "fdir": "^6.4.4", + "picomatch": "^4.0.2", "postcss": "^8.5.3", - "rollup": "^4.30.1" + "rollup": "^4.34.9", + "tinyglobby": "^0.2.13" }, "bin": { "vite": "bin/vite.js" @@ -9081,9 +9060,9 @@ } }, "node_modules/vite-node": { - "version": "3.0.8", - "resolved": "https://registry.npmjs.org/vite-node/-/vite-node-3.0.8.tgz", - "integrity": "sha512-6PhR4H9VGlcwXZ+KWCdMqbtG649xCPZqfI9j2PsK1FcXgEzro5bGHcVKFCTqPLaNKZES8Evqv4LwvZARsq5qlg==", + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/vite-node/-/vite-node-3.1.2.tgz", + "integrity": "sha512-/8iMryv46J3aK13iUXsei5G/A3CUlW4665THCPS+K8xAaqrVWiGB4RfXMQXCLjpK9P2eK//BczrVkn5JLAk6DA==", "dev": true, "license": "MIT", "dependencies": { @@ -9103,32 +9082,61 @@ "url": "https://opencollective.com/vitest" } }, + "node_modules/vite/node_modules/fdir": { + "version": "6.4.4", + "resolved": "https://registry.npmjs.org/fdir/-/fdir-6.4.4.tgz", + "integrity": "sha512-1NZP+GK4GfuAv3PqKvxQRDMjdSRZjnkq7KfhlNrCNNlZ0ygQFpebfrnfnq/W7fpUnAv9aGWmY1zKx7FYL3gwhg==", + "dev": true, + "license": "MIT", + "peerDependencies": { + "picomatch": "^3 || ^4" + }, + "peerDependenciesMeta": { + "picomatch": { + "optional": true + } + } + }, + "node_modules/vite/node_modules/picomatch": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-4.0.2.tgz", + "integrity": "sha512-M7BAV6Rlcy5u+m6oPhAPFgJTzAioX/6B0DxyvDlo9l8+T3nLKbrczg2WLUyzd45L8RqfUMyGPzekbMvX2Ldkwg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/jonschlinkert" + } + }, "node_modules/vitest": { - "version": "3.0.8", - "resolved": "https://registry.npmjs.org/vitest/-/vitest-3.0.8.tgz", - "integrity": "sha512-dfqAsNqRGUc8hB9OVR2P0w8PZPEckti2+5rdZip0WIz9WW0MnImJ8XiR61QhqLa92EQzKP2uPkzenKOAHyEIbA==", + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/vitest/-/vitest-3.1.2.tgz", + "integrity": "sha512-WaxpJe092ID1C0mr+LH9MmNrhfzi8I65EX/NRU/Ld016KqQNRgxSOlGNP1hHN+a/F8L15Mh8klwaF77zR3GeDQ==", "dev": true, "license": "MIT", "dependencies": { - "@vitest/expect": "3.0.8", - "@vitest/mocker": "3.0.8", - "@vitest/pretty-format": "^3.0.8", - "@vitest/runner": "3.0.8", - "@vitest/snapshot": "3.0.8", - "@vitest/spy": "3.0.8", - "@vitest/utils": "3.0.8", + "@vitest/expect": "3.1.2", + "@vitest/mocker": "3.1.2", + "@vitest/pretty-format": "^3.1.2", + "@vitest/runner": "3.1.2", + "@vitest/snapshot": "3.1.2", + "@vitest/spy": "3.1.2", + "@vitest/utils": "3.1.2", "chai": "^5.2.0", "debug": "^4.4.0", - "expect-type": "^1.1.0", + "expect-type": "^1.2.1", "magic-string": "^0.30.17", "pathe": "^2.0.3", - "std-env": "^3.8.0", + "std-env": "^3.9.0", "tinybench": "^2.9.0", "tinyexec": "^0.3.2", + "tinyglobby": "^0.2.13", "tinypool": "^1.0.2", "tinyrainbow": "^2.0.0", "vite": "^5.0.0 || ^6.0.0", - "vite-node": "3.0.8", + "vite-node": "3.1.2", "why-is-node-running": "^2.3.0" }, "bin": { @@ -9144,8 +9152,8 @@ "@edge-runtime/vm": "*", "@types/debug": "^4.1.12", "@types/node": "^18.0.0 || ^20.0.0 || >=22.0.0", - "@vitest/browser": "3.0.8", - "@vitest/ui": "3.0.8", + "@vitest/browser": "3.1.2", + "@vitest/ui": "3.1.2", "happy-dom": "*", "jsdom": "*" }, @@ -9396,7 +9404,8 @@ "version": "3.1.1", "resolved": "https://registry.npmjs.org/yallist/-/yallist-3.1.1.tgz", "integrity": "sha512-a4UGQaWPH59mOXUYnAG2ewncQS4i4F43Tv3JoAM+s2VDAmS9NsK8GpDMLrCHPksFT7h3K6TOoUNn2pb7RoXx4g==", - "dev": true + "dev": true, + "license": "ISC" }, "node_modules/yargs": { "version": "17.7.2", diff --git a/web/ui/package.json b/web/ui/package.json index 650bb65f7d..3e61a34821 100644 --- a/web/ui/package.json +++ b/web/ui/package.json @@ -1,7 +1,7 @@ { "name": "prometheus-io", "description": "Monorepo for the Prometheus UI", - "version": "0.302.1", + "version": "0.303.0", "private": true, "scripts": { "build": "bash build_ui.sh --all", @@ -16,12 +16,12 @@ ], "devDependencies": { "@types/jest": "^29.5.14", - "@typescript-eslint/eslint-plugin": "^8.20.0", - "@typescript-eslint/parser": "^8.25.0", - "eslint-config-prettier": "^10.1.1", - "prettier": "^3.4.2", - "ts-jest": "^29.2.2", - "typescript": "^5.7.2", - "vite": "^6.2.4" + "@typescript-eslint/eslint-plugin": "^8.31.0", + "@typescript-eslint/parser": "^8.31.0", + "eslint-config-prettier": "^10.1.2", + "prettier": "^3.5.3", + "ts-jest": "^29.3.2", + "typescript": "^5.8.3", + "vite": "^6.3.3" } } diff --git a/web/ui/react-app/package-lock.json b/web/ui/react-app/package-lock.json index 6b9b0b86e4..c94d77d48b 100644 --- a/web/ui/react-app/package-lock.json +++ b/web/ui/react-app/package-lock.json @@ -9,12 +9,12 @@ "version": "0.300.1", "dependencies": { "@codemirror/autocomplete": "^6.18.6", - "@codemirror/commands": "^6.8.0", + "@codemirror/commands": "^6.8.1", "@codemirror/language": "^6.11.0", - "@codemirror/lint": "^6.8.4", + "@codemirror/lint": "^6.8.5", "@codemirror/search": "^6.5.10", "@codemirror/state": "^6.5.2", - "@codemirror/view": "^6.36.4", + "@codemirror/view": "^6.36.6", "@forevolve/bootstrap-dark": "^4.0.2", "@fortawesome/fontawesome-svg-core": "6.7.2", "@fortawesome/free-solid-svg-icons": "6.7.2", @@ -24,16 +24,16 @@ "@lezer/lr": "^1.4.2", "@nexucis/fuzzy": "^0.5.1", "@nexucis/kvsearch": "^0.9.1", - "@prometheus-io/codemirror-promql": "0.302.1", + "@prometheus-io/codemirror-promql": "0.303.0", "bootstrap": "^4.6.2", "css.escape": "^1.5.1", "downshift": "^9.0.9", - "http-proxy-middleware": "^3.0.3", + "http-proxy-middleware": "^3.0.5", "jquery": "^3.7.1", "jquery.flot.tooltip": "^0.9.0", "moment": "^2.30.1", - "moment-timezone": "^0.5.47", - "popper.js": "^1.14.3", + "moment-timezone": "^0.5.48", + "popper.js": "^1.16.1", "react": "^17.0.2", "react-copy-to-clipboard": "^5.1.0", "react-dom": "^17.0.2", @@ -42,8 +42,8 @@ "react-router-dom": "^5.3.4", "react-test-renderer": "^17.0.2", "reactstrap": "^8.10.1", - "sanitize-html": "^2.15.0", - "sass": "1.86.0", + "sanitize-html": "^2.16.0", + "sass": "1.87.0", "tempusdominus-bootstrap-4": "^5.39.2", "tempusdominus-core": "^5.19.3" }, @@ -54,26 +54,26 @@ "@types/flot": "0.0.36", "@types/jest": "^29.5.14", "@types/jquery": "^3.5.32", - "@types/node": "^22.13.11", - "@types/react": "^17.0.71", + "@types/node": "^22.15.2", + "@types/react": "^17.0.85", "@types/react-copy-to-clipboard": "^5.0.7", - "@types/react-dom": "^17.0.25", + "@types/react-dom": "^17.0.26", "@types/react-router-dom": "^5.3.3", - "@types/sanitize-html": "^2.13.0", + "@types/sanitize-html": "^2.15.0", "@types/sinon": "^17.0.4", "@wojtekmaj/enzyme-adapter-react-17": "^0.8.0", "enzyme": "^3.11.0", "enzyme-to-json": "^3.6.2", - "eslint-config-prettier": "^10.1.1", + "eslint-config-prettier": "^10.1.2", "eslint-config-react-app": "^7.0.1", - "eslint-plugin-prettier": "^5.2.3", + "eslint-plugin-prettier": "^5.2.6", "jest-canvas-mock": "^2.5.2", "jest-fetch-mock": "^3.0.3", "mutationobserver-shim": "^0.3.7", "prettier": "^3.5.3", "react-scripts": "^5.0.1", - "sinon": "^19.0.4", - "ts-jest": "^29.2.6" + "sinon": "^19.0.5", + "ts-jest": "^29.3.2" }, "optionalDependencies": { "fsevents": "^2.3.3" @@ -114,12 +114,14 @@ } }, "node_modules/@babel/code-frame": { - "version": "7.24.2", - "resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.24.2.tgz", - "integrity": "sha512-y5+tLQyV8pg3fsiln67BVLD1P13Eg4lh5RW9mF0zUuvLrv9uIQ4MCL+CRT+FTsBlBjcIan6PGsLcBN0m3ClUyQ==", + "version": "7.26.2", + "resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.26.2.tgz", + "integrity": "sha512-RJlIHRueQgwWitWgF8OdFYGZX328Ax5BCemNGlqHfplnRT9ESi8JkFlvaVYbS+UubVY6dpv87Fs2u5M29iNFVQ==", "dev": true, + "license": "MIT", "dependencies": { - "@babel/highlight": "^7.24.2", + "@babel/helper-validator-identifier": "^7.25.9", + "js-tokens": "^4.0.0", "picocolors": "^1.0.0" }, "engines": { @@ -526,19 +528,21 @@ } }, "node_modules/@babel/helper-string-parser": { - "version": "7.24.1", - "resolved": "https://registry.npmjs.org/@babel/helper-string-parser/-/helper-string-parser-7.24.1.tgz", - "integrity": "sha512-2ofRCjnnA9y+wk8b9IAREroeUP02KHp431N2mhKniy2yKIDKpbrHv9eXwm8cBeWQYcJmzv5qKCu65P47eCF7CQ==", + "version": "7.25.9", + "resolved": "https://registry.npmjs.org/@babel/helper-string-parser/-/helper-string-parser-7.25.9.tgz", + "integrity": "sha512-4A/SCr/2KLd5jrtOMFzaKjVtAei3+2r/NChoBNoZ3EyP/+GlhoaEGoWOZUmFmoITP7zOJyHIMm+DYRd8o3PvHA==", "dev": true, + "license": "MIT", "engines": { "node": ">=6.9.0" } }, "node_modules/@babel/helper-validator-identifier": { - "version": "7.22.20", - "resolved": "https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.22.20.tgz", - "integrity": "sha512-Y4OZ+ytlatR8AI+8KZfKuL5urKp7qey08ha31L8b3BwewJAoJamTzyvxPR/5D+KkdJCGPq/+8TukHBlY10FX9A==", + "version": "7.25.9", + "resolved": "https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.25.9.tgz", + "integrity": "sha512-Ed61U6XJc3CVRfkERJWDz4dJwKe7iLmmJsbOGu9wSloNSFttHV0I8g6UAgb7qnK5ly5bGLPd4oXZlxCdANBOWQ==", "dev": true, + "license": "MIT", "engines": { "node": ">=6.9.0" } @@ -567,39 +571,28 @@ } }, "node_modules/@babel/helpers": { - "version": "7.24.4", - "resolved": "https://registry.npmjs.org/@babel/helpers/-/helpers-7.24.4.tgz", - "integrity": "sha512-FewdlZbSiwaVGlgT1DPANDuCHaDMiOo+D/IDYRFYjHOuv66xMSJ7fQwwODwRNAPkADIO/z1EoF/l2BCWlWABDw==", + "version": "7.27.0", + "resolved": "https://registry.npmjs.org/@babel/helpers/-/helpers-7.27.0.tgz", + "integrity": "sha512-U5eyP/CTFPuNE3qk+WZMxFkp/4zUzdceQlfzf7DdGdhp+Fezd7HD+i8Y24ZuTMKX3wQBld449jijbGq6OdGNQg==", "dev": true, + "license": "MIT", "dependencies": { - "@babel/template": "^7.24.0", - "@babel/traverse": "^7.24.1", - "@babel/types": "^7.24.0" - }, - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/highlight": { - "version": "7.24.2", - "resolved": "https://registry.npmjs.org/@babel/highlight/-/highlight-7.24.2.tgz", - "integrity": "sha512-Yac1ao4flkTxTteCDZLEvdxg2fZfz1v8M4QpaGypq/WPDqg3ijHYbDfs+LG5hvzSoqaSZ9/Z9lKSP3CjZjv+pA==", - "dev": true, - "dependencies": { - "@babel/helper-validator-identifier": "^7.22.20", - "chalk": "^2.4.2", - "js-tokens": "^4.0.0", - "picocolors": "^1.0.0" + "@babel/template": "^7.27.0", + "@babel/types": "^7.27.0" }, "engines": { "node": ">=6.9.0" } }, "node_modules/@babel/parser": { - "version": "7.24.4", - "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.24.4.tgz", - "integrity": "sha512-zTvEBcghmeBma9QIGunWevvBAp4/Qu9Bdq+2k0Ot4fVMD6v3dsC9WOcRSKk7tRRyBM/53yKMJko9xOatGQAwSg==", + "version": "7.27.0", + "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.27.0.tgz", + "integrity": "sha512-iaepho73/2Pz7w2eMS0Q5f83+0RKI7i4xmiYeBmDzfRVbQtTOG7Ts0S4HzJVsTMGI9keU8rNfuZr8DKfSt7Yyg==", "dev": true, + "license": "MIT", + "dependencies": { + "@babel/types": "^7.27.0" + }, "bin": { "parser": "bin/babel-parser.js" }, @@ -2202,14 +2195,15 @@ } }, "node_modules/@babel/template": { - "version": "7.24.0", - "resolved": "https://registry.npmjs.org/@babel/template/-/template-7.24.0.tgz", - "integrity": "sha512-Bkf2q8lMB0AFpX0NFEqSbx1OkTHf0f+0j82mkw+ZpzBnkk7e9Ql0891vlfgi+kHwOk8tQjiQHpqh4LaSa0fKEA==", + "version": "7.27.0", + "resolved": "https://registry.npmjs.org/@babel/template/-/template-7.27.0.tgz", + "integrity": "sha512-2ncevenBqXI6qRMukPlXwHKHchC7RyMuu4xv5JBXRfOGVcTy1mXCD12qrp7Jsoxll1EV3+9sE4GugBVRjT2jFA==", "dev": true, + "license": "MIT", "dependencies": { - "@babel/code-frame": "^7.23.5", - "@babel/parser": "^7.24.0", - "@babel/types": "^7.24.0" + "@babel/code-frame": "^7.26.2", + "@babel/parser": "^7.27.0", + "@babel/types": "^7.27.0" }, "engines": { "node": ">=6.9.0" @@ -2237,14 +2231,14 @@ } }, "node_modules/@babel/types": { - "version": "7.24.0", - "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.24.0.tgz", - "integrity": "sha512-+j7a5c253RfKh8iABBhywc8NSfP5LURe7Uh4qpsh6jc+aLJguvmIUBdjSdEMQv2bENrCR5MfRdjGo7vzS/ob7w==", + "version": "7.27.0", + "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.27.0.tgz", + "integrity": "sha512-H45s8fVLYjbhFH62dIJ3WtmJ6RSPt/3DRO0ZcT2SUiYiQyz3BLVb9ADEnLl91m74aQPS3AzzeajZHYOalWe3bg==", "dev": true, + "license": "MIT", "dependencies": { - "@babel/helper-string-parser": "^7.23.4", - "@babel/helper-validator-identifier": "^7.22.20", - "to-fast-properties": "^2.0.0" + "@babel/helper-string-parser": "^7.25.9", + "@babel/helper-validator-identifier": "^7.25.9" }, "engines": { "node": ">=6.9.0" @@ -2269,9 +2263,9 @@ } }, "node_modules/@codemirror/commands": { - "version": "6.8.0", - "resolved": "https://registry.npmjs.org/@codemirror/commands/-/commands-6.8.0.tgz", - "integrity": "sha512-q8VPEFaEP4ikSlt6ZxjB3zW72+7osfAYW9i8Zu943uqbKuz6utc1+F170hyLUCUltXORjQXRyYQNfkckzA/bPQ==", + "version": "6.8.1", + "resolved": "https://registry.npmjs.org/@codemirror/commands/-/commands-6.8.1.tgz", + "integrity": "sha512-KlGVYufHMQzxbdQONiLyGQDUW0itrLZwq3CcY7xpv9ZLRHqzkBSoteocBHtMCoY7/Ci4xhzSrToIeLg7FxHuaw==", "license": "MIT", "dependencies": { "@codemirror/language": "^6.0.0", @@ -2295,9 +2289,9 @@ } }, "node_modules/@codemirror/lint": { - "version": "6.8.4", - "resolved": "https://registry.npmjs.org/@codemirror/lint/-/lint-6.8.4.tgz", - "integrity": "sha512-u4q7PnZlJUojeRe8FJa/njJcMctISGgPQ4PnWsd9268R4ZTtU+tfFYmwkBvgcrK2+QQ8tYFVALVb5fVJykKc5A==", + "version": "6.8.5", + "resolved": "https://registry.npmjs.org/@codemirror/lint/-/lint-6.8.5.tgz", + "integrity": "sha512-s3n3KisH7dx3vsoeGMxsbRAgKe4O1vbrnKBClm99PU0fWxmxsx5rR2PfqQgIt+2MMJBHbiJ5rfIdLYfB9NNvsA==", "license": "MIT", "dependencies": { "@codemirror/state": "^6.0.0", @@ -2326,9 +2320,9 @@ } }, "node_modules/@codemirror/view": { - "version": "6.36.4", - "resolved": "https://registry.npmjs.org/@codemirror/view/-/view-6.36.4.tgz", - "integrity": "sha512-ZQ0V5ovw/miKEXTvjgzRyjnrk9TwriUB1k4R5p7uNnHR9Hus+D1SXHGdJshijEzPFjU25xea/7nhIeSqYFKdbA==", + "version": "6.36.6", + "resolved": "https://registry.npmjs.org/@codemirror/view/-/view-6.36.6.tgz", + "integrity": "sha512-uxugGLet+Nzp0Jcit8Hn3LypM8ioMLKTsdf8FRoT3HWvZtb9GhaWMe0Cc15rz90Ljab4YFJiAulmIVB74OY0IQ==", "license": "MIT", "dependencies": { "@codemirror/state": "^6.5.0", @@ -4611,16 +4605,16 @@ } }, "node_modules/@pkgr/core": { - "version": "0.1.1", - "resolved": "https://registry.npmjs.org/@pkgr/core/-/core-0.1.1.tgz", - "integrity": "sha512-cq8o4cWH0ibXh9VGi5P20Tu9XF/0fFXl9EUinr9QfTM7a7p0oTA4iJRCQWppXR1Pg8dSM0UCItCkPwsk9qWWYA==", + "version": "0.2.4", + "resolved": "https://registry.npmjs.org/@pkgr/core/-/core-0.2.4.tgz", + "integrity": "sha512-ROFF39F6ZrnzSUEmQQZUar0Jt4xVoP9WnDRdWwF4NNcXs3xBTLgBUDoOwW141y1jP+S8nahIbdxbFC7IShw9Iw==", "dev": true, "license": "MIT", "engines": { "node": "^12.20.0 || ^14.18.0 || >=16.0.0" }, "funding": { - "url": "https://opencollective.com/unts" + "url": "https://opencollective.com/pkgr" } }, "node_modules/@pmmmwh/react-refresh-webpack-plugin": { @@ -4674,12 +4668,12 @@ } }, "node_modules/@prometheus-io/codemirror-promql": { - "version": "0.302.1", - "resolved": "https://registry.npmjs.org/@prometheus-io/codemirror-promql/-/codemirror-promql-0.302.1.tgz", - "integrity": "sha512-u2uZbVKwz7UeJarE1LcOzbxiocetpgoqZ3ngs9HKOHG48i2dFUEXDfn4zs4dhuClQ/NixirmdGhSYq3l6b+9Yw==", + "version": "0.303.0", + "resolved": "https://registry.npmjs.org/@prometheus-io/codemirror-promql/-/codemirror-promql-0.303.0.tgz", + "integrity": "sha512-qakS0MKnv/yBaX3tz2Lz3b3rCANpAyJhzzcI3AFxzyHWPdOUxZC64IkSjJG4SM1aLgRD0LAWH6xdqXVhIlCrlA==", "license": "Apache-2.0", "dependencies": { - "@prometheus-io/lezer-promql": "0.302.1", + "@prometheus-io/lezer-promql": "0.303.0", "lru-cache": "^11.0.2" }, "engines": { @@ -4695,9 +4689,9 @@ } }, "node_modules/@prometheus-io/lezer-promql": { - "version": "0.302.1", - "resolved": "https://registry.npmjs.org/@prometheus-io/lezer-promql/-/lezer-promql-0.302.1.tgz", - "integrity": "sha512-pxaWbzqwRXe+/wS6VWLcMSD23bgct56GQccqTWZIu9atmwTCMHWRBjGjCaHa5OpIoQYLAgKKClIFkVZdcW00Mw==", + "version": "0.303.0", + "resolved": "https://registry.npmjs.org/@prometheus-io/lezer-promql/-/lezer-promql-0.303.0.tgz", + "integrity": "sha512-mO8vE85ft4S7fmw+oYgrzWatX3UCVQZkSoCfoATsxNaM8tgKdh1FkHMnux4rWjC3NNNnyQFXYOnUVcFisL2i0A==", "license": "Apache-2.0", "peerDependencies": { "@lezer/highlight": "^1.1.2", @@ -5426,12 +5420,12 @@ "dev": true }, "node_modules/@types/node": { - "version": "22.13.11", - "resolved": "https://registry.npmjs.org/@types/node/-/node-22.13.11.tgz", - "integrity": "sha512-iEUCUJoU0i3VnrCmgoWCXttklWcvoCIx4jzcP22fioIVSdTmjgoEvmAO/QPw6TcS9k5FrNgn4w7q5lGOd1CT5g==", + "version": "22.15.2", + "resolved": "https://registry.npmjs.org/@types/node/-/node-22.15.2.tgz", + "integrity": "sha512-uKXqKN9beGoMdBfcaTY1ecwz6ctxuJAcUlwE55938g0ZJ8lRxwAZqRz2AJ4pzpt5dHdTPMB863UZ0ESiFUcP7A==", "license": "MIT", "dependencies": { - "undici-types": "~6.20.0" + "undici-types": "~6.21.0" } }, "node_modules/@types/node-forge": { @@ -5480,10 +5474,11 @@ "dev": true }, "node_modules/@types/react": { - "version": "17.0.80", - "resolved": "https://registry.npmjs.org/@types/react/-/react-17.0.80.tgz", - "integrity": "sha512-LrgHIu2lEtIo8M7d1FcI3BdwXWoRQwMoXOZ7+dPTW0lYREjmlHl3P0U1VD0i/9tppOuv8/sam7sOjx34TxSFbA==", + "version": "17.0.85", + "resolved": "https://registry.npmjs.org/@types/react/-/react-17.0.85.tgz", + "integrity": "sha512-5oBDUsRDsrYq4DdyHaL99gE1AJCfuDhyxqF6/55fvvOIRkp1PpKuwJ+aMiGJR+GJt7YqMNclPROTHF20vY2cXA==", "dev": true, + "license": "MIT", "dependencies": { "@types/prop-types": "*", "@types/scheduler": "^0.16", @@ -5500,12 +5495,13 @@ } }, "node_modules/@types/react-dom": { - "version": "17.0.25", - "resolved": "https://registry.npmjs.org/@types/react-dom/-/react-dom-17.0.25.tgz", - "integrity": "sha512-urx7A7UxkZQmThYA4So0NelOVjx3V4rNFVJwp0WZlbIK5eM4rNJDiN3R/E9ix0MBh6kAEojk/9YL+Te6D9zHNA==", + "version": "17.0.26", + "resolved": "https://registry.npmjs.org/@types/react-dom/-/react-dom-17.0.26.tgz", + "integrity": "sha512-Z+2VcYXJwOqQ79HreLU/1fyQ88eXSSFh6I3JdrEHQIfYSI0kCQpTGvOrbE6jFGGYXKsHuwY9tBa/w5Uo6KzrEg==", "dev": true, - "dependencies": { - "@types/react": "^17" + "license": "MIT", + "peerDependencies": { + "@types/react": "^17.0.0" } }, "node_modules/@types/react-router": { @@ -5545,9 +5541,9 @@ "dev": true }, "node_modules/@types/sanitize-html": { - "version": "2.13.0", - "resolved": "https://registry.npmjs.org/@types/sanitize-html/-/sanitize-html-2.13.0.tgz", - "integrity": "sha512-X31WxbvW9TjIhZZNyNBZ/p5ax4ti7qsNDBDEnH4zAgmEh35YnFD1UiS6z9Cd34kKm0LslFW0KPmTQzu/oGtsqQ==", + "version": "2.15.0", + "resolved": "https://registry.npmjs.org/@types/sanitize-html/-/sanitize-html-2.15.0.tgz", + "integrity": "sha512-71Z6PbYsVKfp4i6Jvr37s5ql6if1Q/iJQT80NbaSi7uGaG8CqBMXP0pk/EsURAOuGdk5IJCd/vnzKrR7S3Txsw==", "dev": true, "license": "MIT", "dependencies": { @@ -9364,9 +9360,9 @@ } }, "node_modules/eslint-config-prettier": { - "version": "10.1.1", - "resolved": "https://registry.npmjs.org/eslint-config-prettier/-/eslint-config-prettier-10.1.1.tgz", - "integrity": "sha512-4EQQr6wXwS+ZJSzaR5ZCrYgLxqvUjdXctaEtBqHcbkW944B1NQyO4qpdHQbXBONfwxXdkAY81HH4+LUfrg+zPw==", + "version": "10.1.2", + "resolved": "https://registry.npmjs.org/eslint-config-prettier/-/eslint-config-prettier-10.1.2.tgz", + "integrity": "sha512-Epgp/EofAUeEpIdZkW60MHKvPyru1ruQJxPL+WIycnaPApuseK0Zpkrh/FwL9oIpQvIhJwV7ptOy0DWUjTlCiA==", "dev": true, "license": "MIT", "bin": { @@ -9584,14 +9580,14 @@ } }, "node_modules/eslint-plugin-prettier": { - "version": "5.2.3", - "resolved": "https://registry.npmjs.org/eslint-plugin-prettier/-/eslint-plugin-prettier-5.2.3.tgz", - "integrity": "sha512-qJ+y0FfCp/mQYQ/vWQ3s7eUlFEL4PyKfAJxsnYTJ4YT73nsJBWqmEpFryxV9OeUiqmsTsYJ5Y+KDNaeP31wrRw==", + "version": "5.2.6", + "resolved": "https://registry.npmjs.org/eslint-plugin-prettier/-/eslint-plugin-prettier-5.2.6.tgz", + "integrity": "sha512-mUcf7QG2Tjk7H055Jk0lGBjbgDnfrvqjhXh9t2xLMSCjZVcw9Rb1V6sVNXO0th3jgeO7zllWPTNRil3JW94TnQ==", "dev": true, "license": "MIT", "dependencies": { "prettier-linter-helpers": "^1.0.0", - "synckit": "^0.9.1" + "synckit": "^0.11.0" }, "engines": { "node": "^14.18.0 || >=16.0.0" @@ -9602,7 +9598,7 @@ "peerDependencies": { "@types/eslint": ">=8.0.0", "eslint": ">=8.0.0", - "eslint-config-prettier": "*", + "eslint-config-prettier": ">= 7.0.0 <10.0.0 || >=10.1.0", "prettier": ">=3.0.0" }, "peerDependenciesMeta": { @@ -11416,9 +11412,9 @@ } }, "node_modules/http-proxy-middleware": { - "version": "3.0.3", - "resolved": "https://registry.npmjs.org/http-proxy-middleware/-/http-proxy-middleware-3.0.3.tgz", - "integrity": "sha512-usY0HG5nyDUwtqpiZdETNbmKtw3QQ1jwYFZ9wi5iHzX2BcILwQKtYDJPo7XHTsu5Z0B2Hj3W9NNnbd+AjFWjqg==", + "version": "3.0.5", + "resolved": "https://registry.npmjs.org/http-proxy-middleware/-/http-proxy-middleware-3.0.5.tgz", + "integrity": "sha512-GLZZm1X38BPY4lkXA01jhwxvDoOkkXqjgVyUzVxiEK4iuRu03PZoYHhHRwxnfhQMDuaxi3vVri0YgSro/1oWqg==", "license": "MIT", "dependencies": { "@types/http-proxy": "^1.17.15", @@ -16382,9 +16378,9 @@ } }, "node_modules/moment-timezone": { - "version": "0.5.47", - "resolved": "https://registry.npmjs.org/moment-timezone/-/moment-timezone-0.5.47.tgz", - "integrity": "sha512-UbNt/JAWS0m/NJOebR0QMRHBk0hu03r5dx9GK8Cs0AS3I81yDcOc9k+DytPItgVvBP7J6Mf6U2n3BPAacAV9oA==", + "version": "0.5.48", + "resolved": "https://registry.npmjs.org/moment-timezone/-/moment-timezone-0.5.48.tgz", + "integrity": "sha512-f22b8LV1gbTO2ms2j2z13MuPogNoh5UzxL3nzNAYKGraILnbGc9NEE6dyiiiLv46DGRb8A4kg8UKWLjPthxBHw==", "license": "MIT", "dependencies": { "moment": "^2.29.4" @@ -21212,9 +21208,9 @@ "dev": true }, "node_modules/sanitize-html": { - "version": "2.15.0", - "resolved": "https://registry.npmjs.org/sanitize-html/-/sanitize-html-2.15.0.tgz", - "integrity": "sha512-wIjst57vJGpLyBP8ioUbg6ThwJie5SuSIjHxJg53v5Fg+kUK+AXlb7bK3RNXpp315MvwM+0OBGCV6h5pPHsVhA==", + "version": "2.16.0", + "resolved": "https://registry.npmjs.org/sanitize-html/-/sanitize-html-2.16.0.tgz", + "integrity": "sha512-0s4caLuHHaZFVxFTG74oW91+j6vW7gKbGD6CD2+miP73CE6z6YtOBN0ArtLd2UGyi4IC7K47v3ENUbQX4jV3Mg==", "license": "MIT", "dependencies": { "deepmerge": "^4.2.2", @@ -21232,9 +21228,9 @@ "dev": true }, "node_modules/sass": { - "version": "1.86.0", - "resolved": "https://registry.npmjs.org/sass/-/sass-1.86.0.tgz", - "integrity": "sha512-zV8vGUld/+mP4KbMLJMX7TyGCuUp7hnkOScgCMsWuHtns8CWBoz+vmEhoGMXsaJrbUP8gj+F1dLvVe79sK8UdA==", + "version": "1.87.0", + "resolved": "https://registry.npmjs.org/sass/-/sass-1.87.0.tgz", + "integrity": "sha512-d0NoFH4v6SjEK7BoX810Jsrhj7IQSYHAHLi/iSpgqKc7LaIDshFRlSg5LOymf9FqQhxEHs2W5ZQXlvy0KD45Uw==", "license": "MIT", "dependencies": { "chokidar": "^4.0.0", @@ -21635,9 +21631,9 @@ "dev": true }, "node_modules/sinon": { - "version": "19.0.4", - "resolved": "https://registry.npmjs.org/sinon/-/sinon-19.0.4.tgz", - "integrity": "sha512-myidFob7fjmYHJb+CHNLtAYScxn3sngGq4t75L2rCGGpE/k4OQVkN3KE5FsN+XkO2+fcDZ65PGvq3KHrlLAm7g==", + "version": "19.0.5", + "resolved": "https://registry.npmjs.org/sinon/-/sinon-19.0.5.tgz", + "integrity": "sha512-r15s9/s+ub/d4bxNXqIUmwp6imVSdTorIRaxoecYjqTVLZ8RuoXr/4EDGwIBo6Waxn7f2gnURX9zuhAfCwaF6Q==", "dev": true, "license": "BSD-3-Clause", "dependencies": { @@ -22411,20 +22407,20 @@ "dev": true }, "node_modules/synckit": { - "version": "0.9.2", - "resolved": "https://registry.npmjs.org/synckit/-/synckit-0.9.2.tgz", - "integrity": "sha512-vrozgXDQwYO72vHjUb/HnFbQx1exDjoKzqx23aXEg2a9VIg2TSFZ8FmeZpTjUCFMYw7mpX4BE2SFu8wI7asYsw==", + "version": "0.11.4", + "resolved": "https://registry.npmjs.org/synckit/-/synckit-0.11.4.tgz", + "integrity": "sha512-Q/XQKRaJiLiFIBNN+mndW7S/RHxvwzuZS6ZwmRzUBqJBv/5QIKCEwkBC8GBf8EQJKYnaFs0wOZbKTXBPj8L9oQ==", "dev": true, "license": "MIT", "dependencies": { - "@pkgr/core": "^0.1.0", - "tslib": "^2.6.2" + "@pkgr/core": "^0.2.3", + "tslib": "^2.8.1" }, "engines": { "node": "^14.18.0 || >=16.0.0" }, "funding": { - "url": "https://opencollective.com/unts" + "url": "https://opencollective.com/synckit" } }, "node_modules/tailwindcss": { @@ -22712,15 +22708,6 @@ "integrity": "sha512-3f0uOEAQwIqGuWW2MVzYg8fV/QNnc/IpuJNG837rLuczAaLVHslWHZQj4IGiEl5Hs3kkbhwL9Ab7Hrsmuj+Smw==", "dev": true }, - "node_modules/to-fast-properties": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/to-fast-properties/-/to-fast-properties-2.0.0.tgz", - "integrity": "sha512-/OaKK0xYrs3DmxRYqL/yDc+FxFUVYhDlXMhRmv3z915w2HF1tnN1omB354j8VUGO/hbRzyD6Y3sA7v7GS/ceog==", - "dev": true, - "engines": { - "node": ">=4" - } - }, "node_modules/to-regex-range": { "version": "5.0.1", "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz", @@ -22789,9 +22776,9 @@ "dev": true }, "node_modules/ts-jest": { - "version": "29.2.6", - "resolved": "https://registry.npmjs.org/ts-jest/-/ts-jest-29.2.6.tgz", - "integrity": "sha512-yTNZVZqc8lSixm+QGVFcPe6+yj7+TWZwIesuOWvfcn4B9bz5x4NDzVCQQjOs7Hfouu36aEqfEbo9Qpo+gq8dDg==", + "version": "29.3.2", + "resolved": "https://registry.npmjs.org/ts-jest/-/ts-jest-29.3.2.tgz", + "integrity": "sha512-bJJkrWc6PjFVz5g2DGCNUo8z7oFEYaz1xP1NpeDU7KNLMWPpEyV8Chbpkn8xjzgRDpQhnGMyvyldoL7h8JXyug==", "dev": true, "license": "MIT", "dependencies": { @@ -22803,6 +22790,7 @@ "lodash.memoize": "^4.1.2", "make-error": "^1.3.6", "semver": "^7.7.1", + "type-fest": "^4.39.1", "yargs-parser": "^21.1.1" }, "bin": { @@ -22903,6 +22891,19 @@ "node": "^14.15.0 || ^16.10.0 || >=18.0.0" } }, + "node_modules/ts-jest/node_modules/type-fest": { + "version": "4.40.1", + "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-4.40.1.tgz", + "integrity": "sha512-9YvLNnORDpI+vghLU/Nf+zSv0kL47KbVJ1o3sKgoTefl6i+zebxbiDQWoe/oWWqPhIgQdRZRT1KA9sCPL810SA==", + "dev": true, + "license": "(MIT OR CC0-1.0)", + "engines": { + "node": ">=16" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, "node_modules/tsconfig-paths": { "version": "3.15.0", "resolved": "https://registry.npmjs.org/tsconfig-paths/-/tsconfig-paths-3.15.0.tgz", @@ -22937,9 +22938,10 @@ } }, "node_modules/tslib": { - "version": "2.6.2", - "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.6.2.tgz", - "integrity": "sha512-AEYxH93jGFPn/a2iVAwW87VuUIkR1FVUKB77NwMF7nBTDkDrrT/Hpt/IrCJ0QXhW27jTBDcf5ZY7w6RiqTMw2Q==" + "version": "2.8.1", + "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.8.1.tgz", + "integrity": "sha512-oJFu94HQb+KVduSUQL7wnpmqnfmLsOA/nAh6b6EH0wCEoK0/mPeXU6c3wKDV83MkOuHPRHtSXKKU99IBazS/2w==", + "license": "0BSD" }, "node_modules/tsutils": { "version": "3.21.0", @@ -23131,9 +23133,9 @@ "dev": true }, "node_modules/undici-types": { - "version": "6.20.0", - "resolved": "https://registry.npmjs.org/undici-types/-/undici-types-6.20.0.tgz", - "integrity": "sha512-Ny6QZ2Nju20vw1SRHe3d9jVu6gJ+4e3+MMpqu7pqE5HT6WsTSlce++GQmK5UXS8mzV8DSYHrQH+Xrf2jVcuKNg==", + "version": "6.21.0", + "resolved": "https://registry.npmjs.org/undici-types/-/undici-types-6.21.0.tgz", + "integrity": "sha512-iwDZqg0QAGrg9Rav5H4n0M64c3mkR59cJ6wQp+7C4nI0gsmExaedaYLNO44eT4AtBBwjbTiGPMlt2Md0T9H9JQ==", "license": "MIT" }, "node_modules/unicode-canonical-property-names-ecmascript": { @@ -23627,10 +23629,11 @@ } }, "node_modules/webpack-dev-server/node_modules/http-proxy-middleware": { - "version": "2.0.7", - "resolved": "https://registry.npmjs.org/http-proxy-middleware/-/http-proxy-middleware-2.0.7.tgz", - "integrity": "sha512-fgVY8AV7qU7z/MmXJ/rxwbrtQH4jBQ9m7kp3llF0liB7glmFeVZFBepQb32T3y8n8k2+AEYuMPCpinYW+/CuRA==", + "version": "2.0.9", + "resolved": "https://registry.npmjs.org/http-proxy-middleware/-/http-proxy-middleware-2.0.9.tgz", + "integrity": "sha512-c1IyJYLYppU574+YI7R4QyX2ystMtVXZwIdzazUIPIJsHuWNd+mho2j+bKoHftndicGj9yh+xjd+l0yj7VeT1Q==", "dev": true, + "license": "MIT", "dependencies": { "@types/http-proxy": "^1.17.8", "http-proxy": "^1.18.1", diff --git a/web/ui/react-app/package.json b/web/ui/react-app/package.json index e9ebddc2c9..354f3a5768 100644 --- a/web/ui/react-app/package.json +++ b/web/ui/react-app/package.json @@ -4,12 +4,12 @@ "private": true, "dependencies": { "@codemirror/autocomplete": "^6.18.6", - "@codemirror/commands": "^6.8.0", + "@codemirror/commands": "^6.8.1", "@codemirror/language": "^6.11.0", - "@codemirror/lint": "^6.8.4", + "@codemirror/lint": "^6.8.5", "@codemirror/search": "^6.5.10", "@codemirror/state": "^6.5.2", - "@codemirror/view": "^6.36.4", + "@codemirror/view": "^6.36.6", "@forevolve/bootstrap-dark": "^4.0.2", "@fortawesome/fontawesome-svg-core": "6.7.2", "@fortawesome/free-solid-svg-icons": "6.7.2", @@ -19,16 +19,16 @@ "@lezer/lr": "^1.4.2", "@nexucis/fuzzy": "^0.5.1", "@nexucis/kvsearch": "^0.9.1", - "@prometheus-io/codemirror-promql": "0.302.1", + "@prometheus-io/codemirror-promql": "0.303.0", "bootstrap": "^4.6.2", "css.escape": "^1.5.1", "downshift": "^9.0.9", - "http-proxy-middleware": "^3.0.3", + "http-proxy-middleware": "^3.0.5", "jquery": "^3.7.1", "jquery.flot.tooltip": "^0.9.0", "moment": "^2.30.1", - "moment-timezone": "^0.5.47", - "popper.js": "^1.14.3", + "moment-timezone": "^0.5.48", + "popper.js": "^1.16.1", "react": "^17.0.2", "react-copy-to-clipboard": "^5.1.0", "react-dom": "^17.0.2", @@ -37,8 +37,8 @@ "react-router-dom": "^5.3.4", "react-test-renderer": "^17.0.2", "reactstrap": "^8.10.1", - "sanitize-html": "^2.15.0", - "sass": "1.86.0", + "sanitize-html": "^2.16.0", + "sass": "1.87.0", "tempusdominus-bootstrap-4": "^5.39.2", "tempusdominus-core": "^5.19.3" }, @@ -71,26 +71,26 @@ "@types/flot": "0.0.36", "@types/jest": "^29.5.14", "@types/jquery": "^3.5.32", - "@types/node": "^22.13.11", - "@types/react": "^17.0.71", + "@types/node": "^22.15.2", + "@types/react": "^17.0.85", "@types/react-copy-to-clipboard": "^5.0.7", - "@types/react-dom": "^17.0.25", + "@types/react-dom": "^17.0.26", "@types/react-router-dom": "^5.3.3", - "@types/sanitize-html": "^2.13.0", + "@types/sanitize-html": "^2.15.0", "@types/sinon": "^17.0.4", "@wojtekmaj/enzyme-adapter-react-17": "^0.8.0", "enzyme": "^3.11.0", "enzyme-to-json": "^3.6.2", - "eslint-config-prettier": "^10.1.1", + "eslint-config-prettier": "^10.1.2", "eslint-config-react-app": "^7.0.1", - "eslint-plugin-prettier": "^5.2.3", + "eslint-plugin-prettier": "^5.2.6", "jest-canvas-mock": "^2.5.2", "jest-fetch-mock": "^3.0.3", "mutationobserver-shim": "^0.3.7", "prettier": "^3.5.3", "react-scripts": "^5.0.1", - "sinon": "^19.0.4", - "ts-jest": "^29.2.6" + "sinon": "^19.0.5", + "ts-jest": "^29.3.2" }, "jest": { "snapshotSerializers": [ diff --git a/web/web.go b/web/web.go index 21624141ad..84c4a2a529 100644 --- a/web/web.go +++ b/web/web.go @@ -290,6 +290,7 @@ type Options struct { EnableRemoteWriteReceiver bool EnableOTLPWriteReceiver bool ConvertOTLPDelta bool + NativeOTLPDeltaIngestion bool IsAgent bool CTZeroIngestionEnabled bool AppName string @@ -389,6 +390,7 @@ func New(logger *slog.Logger, o *Options) *Handler { o.AcceptRemoteWriteProtoMsgs, o.EnableOTLPWriteReceiver, o.ConvertOTLPDelta, + o.NativeOTLPDeltaIngestion, o.CTZeroIngestionEnabled, )