From ba4b058b7ab60105e03f83380cc3200a8a66e52f Mon Sep 17 00:00:00 2001 From: hardlydearly <799511800@qq.com> Date: Wed, 30 Apr 2025 10:43:26 +0800 Subject: [PATCH 1/5] refactor: use slices.Contains to simplify code Signed-off-by: hardlydearly <799511800@qq.com> --- cmd/prometheus/main.go | 7 +++---- config/config.go | 9 ++++----- discovery/kubernetes/kubernetes.go | 12 ++---------- model/labels/labels_common.go | 6 ++---- model/labels/regexp.go | 22 ++++++---------------- rules/manager_test.go | 8 +++----- tsdb/index/postings.go | 6 ++---- 7 files changed, 22 insertions(+), 48 deletions(-) diff --git a/cmd/prometheus/main.go b/cmd/prometheus/main.go index 0e547deaf9..52c5194a91 100644 --- a/cmd/prometheus/main.go +++ b/cmd/prometheus/main.go @@ -30,6 +30,7 @@ import ( goregexp "regexp" //nolint:depguard // The Prometheus client library requires us to pass a regexp from this package. "runtime" "runtime/debug" + "slices" "strconv" "strings" "sync" @@ -1921,10 +1922,8 @@ func (p *rwProtoMsgFlagParser) Set(opt string) error { if err := t.Validate(); err != nil { return err } - for _, prev := range *p.msgs { - if prev == t { - return fmt.Errorf("duplicated %v flag value, got %v already", t, *p.msgs) - } + if slices.Contains(*p.msgs, t) { + return fmt.Errorf("duplicated %v flag value, got %v already", t, *p.msgs) } *p.msgs = append(*p.msgs, t) return nil diff --git a/config/config.go b/config/config.go index 5fbcbd8307..63464afe03 100644 --- a/config/config.go +++ b/config/config.go @@ -21,6 +21,7 @@ import ( "net/url" "os" "path/filepath" + "slices" "sort" "strconv" "strings" @@ -1109,13 +1110,11 @@ func (v *AlertmanagerAPIVersion) UnmarshalYAML(unmarshal func(interface{}) error return err } - for _, supportedVersion := range SupportedAlertmanagerAPIVersions { - if *v == supportedVersion { - return nil - } + if !slices.Contains(SupportedAlertmanagerAPIVersions, *v) { + return fmt.Errorf("expected Alertmanager api version to be one of %v but got %v", SupportedAlertmanagerAPIVersions, *v) } - return fmt.Errorf("expected Alertmanager api version to be one of %v but got %v", SupportedAlertmanagerAPIVersions, *v) + return nil } const ( diff --git a/discovery/kubernetes/kubernetes.go b/discovery/kubernetes/kubernetes.go index 03d9f2f449..2c4829ca8d 100644 --- a/discovery/kubernetes/kubernetes.go +++ b/discovery/kubernetes/kubernetes.go @@ -20,6 +20,7 @@ import ( "log/slog" "os" "reflect" + "slices" "strings" "sync" "time" @@ -210,18 +211,9 @@ func (c *SDConfig) UnmarshalYAML(unmarshal func(interface{}) error) error { if _, ok := allowedSelectors[c.Role]; !ok { return fmt.Errorf("invalid role: %q, expecting one of: pod, service, endpoints, endpointslice, node or ingress", c.Role) } - var allowed bool - for _, role := range allowedSelectors[c.Role] { - if role == string(selector.Role) { - allowed = true - break - } - } - - if !allowed { + if !slices.Contains(allowedSelectors[c.Role], string(selector.Role)) { return fmt.Errorf("%s role supports only %s selectors", c.Role, strings.Join(allowedSelectors[c.Role], ", ")) } - _, err := fields.ParseSelector(selector.Field) if err != nil { return err diff --git a/model/labels/labels_common.go b/model/labels/labels_common.go index 005eaa509e..092783fbbd 100644 --- a/model/labels/labels_common.go +++ b/model/labels/labels_common.go @@ -167,10 +167,8 @@ func (b *Builder) Del(ns ...string) *Builder { // Keep removes all labels from the base except those with the given names. func (b *Builder) Keep(ns ...string) *Builder { b.base.Range(func(l Label) { - for _, n := range ns { - if l.Name == n { - return - } + if slices.Contains(ns, l.Name) { + return } b.del = append(b.del, l.Name) }) diff --git a/model/labels/regexp.go b/model/labels/regexp.go index cf6c9158e9..1636aacc21 100644 --- a/model/labels/regexp.go +++ b/model/labels/regexp.go @@ -95,12 +95,7 @@ func (m *FastRegexMatcher) compileMatchStringFunction() func(string) bool { return func(s string) bool { if len(m.setMatches) != 0 { - for _, match := range m.setMatches { - if match == s { - return true - } - } - return false + return slices.Contains(m.setMatches, s) } if m.prefix != "" && !strings.HasPrefix(s, m.prefix) { return false @@ -771,16 +766,11 @@ func (m *equalMultiStringSliceMatcher) setMatches() []string { func (m *equalMultiStringSliceMatcher) Matches(s string) bool { if m.caseSensitive { - for _, v := range m.values { - if s == v { - return true - } - } - } else { - for _, v := range m.values { - if strings.EqualFold(s, v) { - return true - } + return slices.Contains(m.values, s) + } + for _, v := range m.values { + if strings.EqualFold(s, v) { + return true } } return false diff --git a/rules/manager_test.go b/rules/manager_test.go index efd7a8b23c..54ca8ebfb3 100644 --- a/rules/manager_test.go +++ b/rules/manager_test.go @@ -20,6 +20,7 @@ import ( "math" "os" "path" + "slices" "sort" "strconv" "sync" @@ -1008,11 +1009,8 @@ func TestMetricsUpdate(t *testing.T) { var metrics int for _, m := range ms { s := m.GetName() - for _, n := range metricNames { - if s == n { - metrics += len(m.Metric) - break - } + if slices.Contains(metricNames, s) { + metrics += len(m.Metric) } } return metrics diff --git a/tsdb/index/postings.go b/tsdb/index/postings.go index 7fdf64acca..75e3c2c148 100644 --- a/tsdb/index/postings.go +++ b/tsdb/index/postings.go @@ -599,10 +599,8 @@ func Intersect(its ...Postings) Postings { if len(its) == 1 { return its[0] } - for _, p := range its { - if p == EmptyPostings() { - return EmptyPostings() - } + if slices.Contains(its, EmptyPostings()) { + return EmptyPostings() } return newIntersectPostings(its...) From 591242901ac691d0f688cceb1c02e297c57478ba Mon Sep 17 00:00:00 2001 From: Neeraj Gartia <80708727+NeerajGartia21@users.noreply.github.com> Date: Sun, 11 May 2025 18:46:15 +0530 Subject: [PATCH 2/5] promql: Refactor some functions to make them more DRY (#16532) Signed-off-by: Neeraj Gartia --- promql/functions.go | 206 ++++++++++++++++---------------------------- 1 file changed, 72 insertions(+), 134 deletions(-) diff --git a/promql/functions.go b/promql/functions.go index 41526fcb16..272ed15893 100644 --- a/promql/functions.go +++ b/promql/functions.go @@ -612,7 +612,6 @@ func funcClampMin(vals []parser.Value, _ parser.Expressions, enh *EvalNodeHelper // === round(Vector parser.ValueTypeVector, toNearest=1 Scalar) (Vector, Annotations) === func funcRound(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) (Vector, annotations.Annotations) { - vec := vals[0].(Vector) // round returns a number rounded to toNearest. // Ties are solved by rounding up. toNearest := float64(1) @@ -621,23 +620,9 @@ func funcRound(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper } // Invert as it seems to cause fewer floating point accuracy issues. toNearestInverse := 1.0 / toNearest - - for _, el := range vec { - if el.H != nil { - // Process only float samples. - continue - } - f := math.Floor(el.F*toNearestInverse+0.5) / toNearestInverse - if !enh.enableDelayedNameRemoval { - el.Metric = el.Metric.DropMetricName() - } - enh.Out = append(enh.Out, Sample{ - Metric: el.Metric, - F: f, - DropName: true, - }) - } - return enh.Out, nil + return simpleFloatFunc(vals, enh, func(f float64) float64 { + return math.Floor(f*toNearestInverse+0.5) / toNearestInverse + }), nil } // === Scalar(node parser.ValueTypeVector) Scalar === @@ -823,8 +808,8 @@ func funcMadOverTime(vals []parser.Value, args parser.Expressions, enh *EvalNode }), annos } -// === max_over_time(Matrix parser.ValueTypeMatrix) (Vector, Annotations) === -func funcMaxOverTime(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) (Vector, annotations.Annotations) { +// compareOverTime is a helper used by funcMaxOverTime and funcMinOverTime. +func compareOverTime(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper, compareFn func(float64, float64) bool) (Vector, annotations.Annotations) { samples := vals[0].(Matrix)[0] var annos annotations.Annotations if len(samples.Floats) == 0 { @@ -837,7 +822,7 @@ func funcMaxOverTime(vals []parser.Value, args parser.Expressions, enh *EvalNode return aggrOverTime(vals, enh, func(s Series) float64 { maxVal := s.Floats[0].F for _, f := range s.Floats { - if f.F > maxVal || math.IsNaN(maxVal) { + if compareFn(f.F, maxVal) { maxVal = f.F } } @@ -845,26 +830,18 @@ func funcMaxOverTime(vals []parser.Value, args parser.Expressions, enh *EvalNode }), annos } +// === max_over_time(Matrix parser.ValueTypeMatrix) (Vector, Annotations) === +func funcMaxOverTime(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) (Vector, annotations.Annotations) { + return compareOverTime(vals, args, enh, func(cur, maxVal float64) bool { + return (cur > maxVal) || math.IsNaN(maxVal) + }) +} + // === min_over_time(Matrix parser.ValueTypeMatrix) (Vector, Annotations) === func funcMinOverTime(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) (Vector, annotations.Annotations) { - samples := vals[0].(Matrix)[0] - var annos annotations.Annotations - if len(samples.Floats) == 0 { - return enh.Out, nil - } - if len(samples.Histograms) > 0 { - metricName := samples.Metric.Get(labels.MetricName) - annos.Add(annotations.NewHistogramIgnoredInMixedRangeInfo(metricName, args[0].PositionRange())) - } - return aggrOverTime(vals, enh, func(s Series) float64 { - minVal := s.Floats[0].F - for _, f := range s.Floats { - if f.F < minVal || math.IsNaN(minVal) { - minVal = f.F - } - } - return minVal - }), annos + return compareOverTime(vals, args, enh, func(cur, maxVal float64) bool { + return (cur < maxVal) || math.IsNaN(maxVal) + }) } // === sum_over_time(Matrix parser.ValueTypeMatrix) (Vector, Annotations) === @@ -997,7 +974,7 @@ func funcPresentOverTime(vals []parser.Value, _ parser.Expressions, enh *EvalNod }), nil } -func simpleFunc(vals []parser.Value, enh *EvalNodeHelper, f func(float64) float64) Vector { +func simpleFloatFunc(vals []parser.Value, enh *EvalNodeHelper, f func(float64) float64) Vector { for _, el := range vals[0].(Vector) { if el.H == nil { // Process only float samples. if !enh.enableDelayedNameRemoval { @@ -1015,114 +992,114 @@ func simpleFunc(vals []parser.Value, enh *EvalNodeHelper, f func(float64) float6 // === abs(Vector parser.ValueTypeVector) (Vector, Annotations) === func funcAbs(vals []parser.Value, _ parser.Expressions, enh *EvalNodeHelper) (Vector, annotations.Annotations) { - return simpleFunc(vals, enh, math.Abs), nil + return simpleFloatFunc(vals, enh, math.Abs), nil } // === ceil(Vector parser.ValueTypeVector) (Vector, Annotations) === func funcCeil(vals []parser.Value, _ parser.Expressions, enh *EvalNodeHelper) (Vector, annotations.Annotations) { - return simpleFunc(vals, enh, math.Ceil), nil + return simpleFloatFunc(vals, enh, math.Ceil), nil } // === floor(Vector parser.ValueTypeVector) (Vector, Annotations) === func funcFloor(vals []parser.Value, _ parser.Expressions, enh *EvalNodeHelper) (Vector, annotations.Annotations) { - return simpleFunc(vals, enh, math.Floor), nil + return simpleFloatFunc(vals, enh, math.Floor), nil } // === exp(Vector parser.ValueTypeVector) (Vector, Annotations) === func funcExp(vals []parser.Value, _ parser.Expressions, enh *EvalNodeHelper) (Vector, annotations.Annotations) { - return simpleFunc(vals, enh, math.Exp), nil + return simpleFloatFunc(vals, enh, math.Exp), nil } // === sqrt(Vector VectorNode) (Vector, Annotations) === func funcSqrt(vals []parser.Value, _ parser.Expressions, enh *EvalNodeHelper) (Vector, annotations.Annotations) { - return simpleFunc(vals, enh, math.Sqrt), nil + return simpleFloatFunc(vals, enh, math.Sqrt), nil } // === ln(Vector parser.ValueTypeVector) (Vector, Annotations) === func funcLn(vals []parser.Value, _ parser.Expressions, enh *EvalNodeHelper) (Vector, annotations.Annotations) { - return simpleFunc(vals, enh, math.Log), nil + return simpleFloatFunc(vals, enh, math.Log), nil } // === log2(Vector parser.ValueTypeVector) (Vector, Annotations) === func funcLog2(vals []parser.Value, _ parser.Expressions, enh *EvalNodeHelper) (Vector, annotations.Annotations) { - return simpleFunc(vals, enh, math.Log2), nil + return simpleFloatFunc(vals, enh, math.Log2), nil } // === log10(Vector parser.ValueTypeVector) (Vector, Annotations) === func funcLog10(vals []parser.Value, _ parser.Expressions, enh *EvalNodeHelper) (Vector, annotations.Annotations) { - return simpleFunc(vals, enh, math.Log10), nil + return simpleFloatFunc(vals, enh, math.Log10), nil } // === sin(Vector parser.ValueTypeVector) (Vector, Annotations) === func funcSin(vals []parser.Value, _ parser.Expressions, enh *EvalNodeHelper) (Vector, annotations.Annotations) { - return simpleFunc(vals, enh, math.Sin), nil + return simpleFloatFunc(vals, enh, math.Sin), nil } // === cos(Vector parser.ValueTypeVector) (Vector, Annotations) === func funcCos(vals []parser.Value, _ parser.Expressions, enh *EvalNodeHelper) (Vector, annotations.Annotations) { - return simpleFunc(vals, enh, math.Cos), nil + return simpleFloatFunc(vals, enh, math.Cos), nil } // === tan(Vector parser.ValueTypeVector) (Vector, Annotations) === func funcTan(vals []parser.Value, _ parser.Expressions, enh *EvalNodeHelper) (Vector, annotations.Annotations) { - return simpleFunc(vals, enh, math.Tan), nil + return simpleFloatFunc(vals, enh, math.Tan), nil } // === asin(Vector parser.ValueTypeVector) (Vector, Annotations) === func funcAsin(vals []parser.Value, _ parser.Expressions, enh *EvalNodeHelper) (Vector, annotations.Annotations) { - return simpleFunc(vals, enh, math.Asin), nil + return simpleFloatFunc(vals, enh, math.Asin), nil } // === acos(Vector parser.ValueTypeVector) (Vector, Annotations) === func funcAcos(vals []parser.Value, _ parser.Expressions, enh *EvalNodeHelper) (Vector, annotations.Annotations) { - return simpleFunc(vals, enh, math.Acos), nil + return simpleFloatFunc(vals, enh, math.Acos), nil } // === atan(Vector parser.ValueTypeVector) (Vector, Annotations) === func funcAtan(vals []parser.Value, _ parser.Expressions, enh *EvalNodeHelper) (Vector, annotations.Annotations) { - return simpleFunc(vals, enh, math.Atan), nil + return simpleFloatFunc(vals, enh, math.Atan), nil } // === sinh(Vector parser.ValueTypeVector) (Vector, Annotations) === func funcSinh(vals []parser.Value, _ parser.Expressions, enh *EvalNodeHelper) (Vector, annotations.Annotations) { - return simpleFunc(vals, enh, math.Sinh), nil + return simpleFloatFunc(vals, enh, math.Sinh), nil } // === cosh(Vector parser.ValueTypeVector) (Vector, Annotations) === func funcCosh(vals []parser.Value, _ parser.Expressions, enh *EvalNodeHelper) (Vector, annotations.Annotations) { - return simpleFunc(vals, enh, math.Cosh), nil + return simpleFloatFunc(vals, enh, math.Cosh), nil } // === tanh(Vector parser.ValueTypeVector) (Vector, Annotations) === func funcTanh(vals []parser.Value, _ parser.Expressions, enh *EvalNodeHelper) (Vector, annotations.Annotations) { - return simpleFunc(vals, enh, math.Tanh), nil + return simpleFloatFunc(vals, enh, math.Tanh), nil } // === asinh(Vector parser.ValueTypeVector) (Vector, Annotations) === func funcAsinh(vals []parser.Value, _ parser.Expressions, enh *EvalNodeHelper) (Vector, annotations.Annotations) { - return simpleFunc(vals, enh, math.Asinh), nil + return simpleFloatFunc(vals, enh, math.Asinh), nil } // === acosh(Vector parser.ValueTypeVector) (Vector, Annotations) === func funcAcosh(vals []parser.Value, _ parser.Expressions, enh *EvalNodeHelper) (Vector, annotations.Annotations) { - return simpleFunc(vals, enh, math.Acosh), nil + return simpleFloatFunc(vals, enh, math.Acosh), nil } // === atanh(Vector parser.ValueTypeVector) (Vector, Annotations) === func funcAtanh(vals []parser.Value, _ parser.Expressions, enh *EvalNodeHelper) (Vector, annotations.Annotations) { - return simpleFunc(vals, enh, math.Atanh), nil + return simpleFloatFunc(vals, enh, math.Atanh), nil } // === rad(Vector parser.ValueTypeVector) (Vector, Annotations) === func funcRad(vals []parser.Value, _ parser.Expressions, enh *EvalNodeHelper) (Vector, annotations.Annotations) { - return simpleFunc(vals, enh, func(v float64) float64 { + return simpleFloatFunc(vals, enh, func(v float64) float64 { return v * math.Pi / 180 }), nil } // === deg(Vector parser.ValueTypeVector) (Vector, Annotations) === func funcDeg(vals []parser.Value, _ parser.Expressions, enh *EvalNodeHelper) (Vector, annotations.Annotations) { - return simpleFunc(vals, enh, func(v float64) float64 { + return simpleFloatFunc(vals, enh, func(v float64) float64 { return v * 180 / math.Pi }), nil } @@ -1134,7 +1111,7 @@ func funcPi(_ []parser.Value, _ parser.Expressions, _ *EvalNodeHelper) (Vector, // === sgn(Vector parser.ValueTypeVector) (Vector, Annotations) === func funcSgn(vals []parser.Value, _ parser.Expressions, enh *EvalNodeHelper) (Vector, annotations.Annotations) { - return simpleFunc(vals, enh, func(v float64) float64 { + return simpleFloatFunc(vals, enh, func(v float64) float64 { switch { case v < 0: return -1 @@ -1271,79 +1248,48 @@ func funcPredictLinear(vals []parser.Value, args parser.Expressions, enh *EvalNo return append(enh.Out, Sample{F: slope*duration + intercept}), nil } +func simpleHistogramFunc(vals []parser.Value, enh *EvalNodeHelper, f func(h *histogram.FloatHistogram) float64) Vector { + for _, el := range vals[0].(Vector) { + if el.H != nil { // Process only histogram samples. + if !enh.enableDelayedNameRemoval { + el.Metric = el.Metric.DropMetricName() + } + enh.Out = append(enh.Out, Sample{ + Metric: el.Metric, + F: f(el.H), + DropName: true, + }) + } + } + return enh.Out +} + // === histogram_count(Vector parser.ValueTypeVector) (Vector, Annotations) === func funcHistogramCount(vals []parser.Value, _ parser.Expressions, enh *EvalNodeHelper) (Vector, annotations.Annotations) { - inVec := vals[0].(Vector) - - for _, sample := range inVec { - // Skip non-histogram samples. - if sample.H == nil { - continue - } - if !enh.enableDelayedNameRemoval { - sample.Metric = sample.Metric.DropMetricName() - } - enh.Out = append(enh.Out, Sample{ - Metric: sample.Metric, - F: sample.H.Count, - DropName: true, - }) - } - return enh.Out, nil + return simpleHistogramFunc(vals, enh, func(h *histogram.FloatHistogram) float64 { + return h.Count + }), nil } // === histogram_sum(Vector parser.ValueTypeVector) (Vector, Annotations) === func funcHistogramSum(vals []parser.Value, _ parser.Expressions, enh *EvalNodeHelper) (Vector, annotations.Annotations) { - inVec := vals[0].(Vector) - - for _, sample := range inVec { - // Skip non-histogram samples. - if sample.H == nil { - continue - } - if !enh.enableDelayedNameRemoval { - sample.Metric = sample.Metric.DropMetricName() - } - enh.Out = append(enh.Out, Sample{ - Metric: sample.Metric, - F: sample.H.Sum, - DropName: true, - }) - } - return enh.Out, nil + return simpleHistogramFunc(vals, enh, func(h *histogram.FloatHistogram) float64 { + return h.Sum + }), nil } // === histogram_avg(Vector parser.ValueTypeVector) (Vector, Annotations) === func funcHistogramAvg(vals []parser.Value, _ parser.Expressions, enh *EvalNodeHelper) (Vector, annotations.Annotations) { - inVec := vals[0].(Vector) - - for _, sample := range inVec { - // Skip non-histogram samples. - if sample.H == nil { - continue - } - if !enh.enableDelayedNameRemoval { - sample.Metric = sample.Metric.DropMetricName() - } - enh.Out = append(enh.Out, Sample{ - Metric: sample.Metric, - F: sample.H.Sum / sample.H.Count, - DropName: true, - }) - } - return enh.Out, nil + return simpleHistogramFunc(vals, enh, func(h *histogram.FloatHistogram) float64 { + return h.Sum / h.Count + }), nil } func histogramVariance(vals []parser.Value, enh *EvalNodeHelper, varianceToResult func(float64) float64) (Vector, annotations.Annotations) { - vec := vals[0].(Vector) - for _, sample := range vec { - // Skip non-histogram samples. - if sample.H == nil { - continue - } - mean := sample.H.Sum / sample.H.Count + return simpleHistogramFunc(vals, enh, func(h *histogram.FloatHistogram) float64 { + mean := h.Sum / h.Count var variance, cVariance float64 - it := sample.H.AllBucketIterator() + it := h.AllBucketIterator() for it.Next() { bucket := it.At() if bucket.Count == 0 { @@ -1351,7 +1297,7 @@ func histogramVariance(vals []parser.Value, enh *EvalNodeHelper, varianceToResul } var val float64 switch { - case sample.H.UsesCustomBuckets(): + case h.UsesCustomBuckets(): // Use arithmetic mean in case of custom buckets. val = (bucket.Upper + bucket.Lower) / 2.0 case bucket.Lower <= 0 && bucket.Upper >= 0: @@ -1368,20 +1314,12 @@ func histogramVariance(vals []parser.Value, enh *EvalNodeHelper, varianceToResul variance, cVariance = kahanSumInc(bucket.Count*delta*delta, variance, cVariance) } variance += cVariance - variance /= sample.H.Count - if !enh.enableDelayedNameRemoval { - sample.Metric = sample.Metric.DropMetricName() - } + variance /= h.Count if varianceToResult != nil { variance = varianceToResult(variance) } - enh.Out = append(enh.Out, Sample{ - Metric: sample.Metric, - F: variance, - DropName: true, - }) - } - return enh.Out, nil + return variance + }), nil } // === histogram_stddev(Vector parser.ValueTypeVector) (Vector, Annotations) === From 8b0d33e5b20475318a1a72857ed8a9947b13e1aa Mon Sep 17 00:00:00 2001 From: Neeraj Gartia <80708727+NeerajGartia21@users.noreply.github.com> Date: Sun, 11 May 2025 19:10:31 +0530 Subject: [PATCH 3/5] promql: support variable scalar parameter in aggregations in range queries (#16404) This fixes the regression introduced in https://github.com/prometheus/prometheus/issues/15971 while preserving the performance improvements. Signed-off-by: Neeraj Gartia --- promql/engine.go | 131 +++++++++++++------- promql/promqltest/testdata/aggregators.test | 20 ++- promql/promqltest/testdata/limit.test | 22 +++- promql/value.go | 66 ++++++++++ 4 files changed, 186 insertions(+), 53 deletions(-) diff --git a/promql/engine.go b/promql/engine.go index a2738fdc1e..b5fec3153e 100644 --- a/promql/engine.go +++ b/promql/engine.go @@ -1377,7 +1377,7 @@ func (ev *evaluator) rangeEval(ctx context.Context, prepSeries func(labels.Label return mat, warnings } -func (ev *evaluator) rangeEvalAgg(ctx context.Context, aggExpr *parser.AggregateExpr, sortedGrouping []string, inputMatrix Matrix, param float64) (Matrix, annotations.Annotations) { +func (ev *evaluator) rangeEvalAgg(ctx context.Context, aggExpr *parser.AggregateExpr, sortedGrouping []string, inputMatrix Matrix, params *fParams) (Matrix, annotations.Annotations) { // Keep a copy of the original point slice so that it can be returned to the pool. origMatrix := slices.Clone(inputMatrix) defer func() { @@ -1387,7 +1387,7 @@ func (ev *evaluator) rangeEvalAgg(ctx context.Context, aggExpr *parser.Aggregate } }() - var warnings annotations.Annotations + var annos annotations.Annotations enh := &EvalNodeHelper{enableDelayedNameRemoval: ev.enableDelayedNameRemoval} tempNumSamples := ev.currentSamples @@ -1417,46 +1417,43 @@ func (ev *evaluator) rangeEvalAgg(ctx context.Context, aggExpr *parser.Aggregate } groups := make([]groupedAggregation, groupCount) - var k int64 - var ratio float64 var seriess map[uint64]Series + switch aggExpr.Op { case parser.TOPK, parser.BOTTOMK, parser.LIMITK: - if !convertibleToInt64(param) { - ev.errorf("Scalar value %v overflows int64", param) + // Return early if all k values are less than one. + if params.Max() < 1 { + return nil, annos } - k = int64(param) - if k > int64(len(inputMatrix)) { - k = int64(len(inputMatrix)) - } - if k < 1 { - return nil, warnings - } - seriess = make(map[uint64]Series, len(inputMatrix)) // Output series by series hash. + seriess = make(map[uint64]Series, len(inputMatrix)) + case parser.LIMIT_RATIO: - if math.IsNaN(param) { - ev.errorf("Ratio value %v is NaN", param) + // Return early if all r values are zero. + if params.Max() == 0 && params.Min() == 0 { + return nil, annos } - switch { - case param == 0: - return nil, warnings - case param < -1.0: - ratio = -1.0 - warnings.Add(annotations.NewInvalidRatioWarning(param, ratio, aggExpr.Param.PositionRange())) - case param > 1.0: - ratio = 1.0 - warnings.Add(annotations.NewInvalidRatioWarning(param, ratio, aggExpr.Param.PositionRange())) - default: - ratio = param + if params.Max() > 1.0 { + annos.Add(annotations.NewInvalidRatioWarning(params.Max(), 1.0, aggExpr.Param.PositionRange())) } - seriess = make(map[uint64]Series, len(inputMatrix)) // Output series by series hash. + if params.Min() < -1.0 { + annos.Add(annotations.NewInvalidRatioWarning(params.Min(), -1.0, aggExpr.Param.PositionRange())) + } + seriess = make(map[uint64]Series, len(inputMatrix)) + case parser.QUANTILE: - if math.IsNaN(param) || param < 0 || param > 1 { - warnings.Add(annotations.NewInvalidQuantileWarning(param, aggExpr.Param.PositionRange())) + if params.HasAnyNaN() { + annos.Add(annotations.NewInvalidQuantileWarning(math.NaN(), aggExpr.Param.PositionRange())) + } + if params.Max() > 1 { + annos.Add(annotations.NewInvalidQuantileWarning(params.Max(), aggExpr.Param.PositionRange())) + } + if params.Min() < 0 { + annos.Add(annotations.NewInvalidQuantileWarning(params.Min(), aggExpr.Param.PositionRange())) } } for ts := ev.startTimestamp; ts <= ev.endTimestamp; ts += ev.interval { + fParam := params.Next() if err := contextDone(ctx, "expression evaluation"); err != nil { ev.error(err) } @@ -1468,17 +1465,17 @@ func (ev *evaluator) rangeEvalAgg(ctx context.Context, aggExpr *parser.Aggregate var ws annotations.Annotations switch aggExpr.Op { case parser.TOPK, parser.BOTTOMK, parser.LIMITK, parser.LIMIT_RATIO: - result, ws = ev.aggregationK(aggExpr, k, ratio, inputMatrix, seriesToResult, groups, enh, seriess) + result, ws = ev.aggregationK(aggExpr, fParam, inputMatrix, seriesToResult, groups, enh, seriess) // If this could be an instant query, shortcut so as not to change sort order. - if ev.endTimestamp == ev.startTimestamp { - warnings.Merge(ws) - return result, warnings + if ev.startTimestamp == ev.endTimestamp { + annos.Merge(ws) + return result, annos } default: - ws = ev.aggregation(aggExpr, param, inputMatrix, result, seriesToResult, groups, enh) + ws = ev.aggregation(aggExpr, fParam, inputMatrix, result, seriesToResult, groups, enh) } - warnings.Merge(ws) + annos.Merge(ws) if ev.currentSamples > ev.maxSamples { ev.error(ErrTooManySamples(env)) @@ -1503,7 +1500,7 @@ func (ev *evaluator) rangeEvalAgg(ctx context.Context, aggExpr *parser.Aggregate } result = result[:dst] } - return result, warnings + return result, annos } // evalSeries generates a Matrix between ev.startTimestamp and ev.endTimestamp (inclusive), each point spaced ev.interval apart, from series given offset. @@ -1681,18 +1678,14 @@ func (ev *evaluator) eval(ctx context.Context, expr parser.Expr) (parser.Value, var warnings annotations.Annotations originalNumSamples := ev.currentSamples // param is the number k for topk/bottomk, or q for quantile. - var fParam float64 - if param != nil { - val, ws := ev.eval(ctx, param) - warnings.Merge(ws) - fParam = val.(Matrix)[0].Floats[0].F - } + fp, ws := newFParams(ctx, ev, param) + warnings.Merge(ws) // Now fetch the data to be aggregated. val, ws := ev.eval(ctx, e.Expr) warnings.Merge(ws) inputMatrix := val.(Matrix) - result, ws := ev.rangeEvalAgg(ctx, e, sortedGrouping, inputMatrix, fParam) + result, ws := ev.rangeEvalAgg(ctx, e, sortedGrouping, inputMatrix, fp) warnings.Merge(ws) ev.currentSamples = originalNumSamples + result.TotalSamples() ev.samplesStats.UpdatePeak(ev.currentSamples) @@ -3269,7 +3262,7 @@ func (ev *evaluator) aggregation(e *parser.AggregateExpr, q float64, inputMatrix // seriesToResult maps inputMatrix indexes to groups indexes. // For an instant query, returns a Matrix in descending order for topk or ascending for bottomk, or without any order for limitk / limit_ratio. // For a range query, aggregates output in the seriess map. -func (ev *evaluator) aggregationK(e *parser.AggregateExpr, k int64, r float64, inputMatrix Matrix, seriesToResult []int, groups []groupedAggregation, enh *EvalNodeHelper, seriess map[uint64]Series) (Matrix, annotations.Annotations) { +func (ev *evaluator) aggregationK(e *parser.AggregateExpr, fParam float64, inputMatrix Matrix, seriesToResult []int, groups []groupedAggregation, enh *EvalNodeHelper, seriess map[uint64]Series) (Matrix, annotations.Annotations) { op := e.Op var s Sample var annos annotations.Annotations @@ -3278,6 +3271,14 @@ func (ev *evaluator) aggregationK(e *parser.AggregateExpr, k int64, r float64, i for i := range groups { groups[i].seen = false } + // advanceRemainingSeries discards any values at the current timestamp `ts` + // for the remaining input series. In range queries, if these values are not + // consumed now, they will no longer be accessible in the next evaluation step. + advanceRemainingSeries := func(ts int64, startIdx int) { + for i := startIdx; i < len(inputMatrix); i++ { + _, _, _ = ev.nextValues(ts, &inputMatrix[i]) + } + } seriesLoop: for si := range inputMatrix { @@ -3287,6 +3288,42 @@ seriesLoop: } s = Sample{Metric: inputMatrix[si].Metric, F: f, H: h, DropName: inputMatrix[si].DropName} + var k int64 + var r float64 + switch op { + case parser.TOPK, parser.BOTTOMK, parser.LIMITK: + if !convertibleToInt64(fParam) { + ev.errorf("Scalar value %v overflows int64", fParam) + } + k = int64(fParam) + if k > int64(len(inputMatrix)) { + k = int64(len(inputMatrix)) + } + if k < 1 { + if enh.Ts != ev.endTimestamp { + advanceRemainingSeries(enh.Ts, si+1) + } + return nil, annos + } + case parser.LIMIT_RATIO: + if math.IsNaN(fParam) { + ev.errorf("Ratio value %v is NaN", fParam) + } + switch { + case fParam == 0: + if enh.Ts != ev.endTimestamp { + advanceRemainingSeries(enh.Ts, si+1) + } + return nil, annos + case fParam < -1.0: + r = -1.0 + case fParam > 1.0: + r = 1.0 + default: + r = fParam + } + } + group := &groups[seriesToResult[si]] // Initialize this group if it's the first time we've seen it. if !group.seen { @@ -3377,6 +3414,10 @@ seriesLoop: group.groupAggrComplete = true groupsRemaining-- if groupsRemaining == 0 { + // Process other values in the series before breaking the loop in case of range query. + if enh.Ts != ev.endTimestamp { + advanceRemainingSeries(enh.Ts, si+1) + } break seriesLoop } } diff --git a/promql/promqltest/testdata/aggregators.test b/promql/promqltest/testdata/aggregators.test index 1e3ab79a35..b8ebdc55c6 100644 --- a/promql/promqltest/testdata/aggregators.test +++ b/promql/promqltest/testdata/aggregators.test @@ -274,7 +274,7 @@ load 5m http_requests{job="app-server", instance="1", group="canary"} 0+80x10 http_requests_histogram{job="app-server", instance="2", group="canary"} {{schema:0 sum:10 count:10}}x11 http_requests_histogram{job="api-server", instance="3", group="production"} {{schema:0 sum:20 count:20}}x11 - foo 3+0x10 + foo 1+1x9 3 eval_ordered instant at 50m topk(3, http_requests) http_requests{group="canary", instance="1", job="app-server"} 800 @@ -340,6 +340,13 @@ eval_ordered instant at 50m topk(scalar(foo), http_requests) http_requests{group="canary", instance="0", job="app-server"} 700 http_requests{group="production", instance="1", job="app-server"} 600 +# Bug #15971. +eval range from 0m to 50m step 5m count(topk(scalar(foo), http_requests)) + {} 1 2 3 4 5 6 7 8 9 9 3 + +eval range from 0m to 50m step 5m count(bottomk(scalar(foo), http_requests)) + {} 1 2 3 4 5 6 7 8 9 9 3 + # Tests for histogram: should ignore histograms. eval_info instant at 50m topk(100, http_requests_histogram) #empty @@ -447,7 +454,7 @@ load 10s data{test="uneven samples",point="b"} 1 data{test="uneven samples",point="c"} 4 data_histogram{test="histogram sample", point="c"} {{schema:2 count:4 sum:10 buckets:[1 0 0 0 1 0 0 1 1]}} - foo .8 + foo 0 1 0 1 0 1 0.8 eval instant at 1m quantile without(point)(0.8, data) {test="two samples"} 0.8 @@ -475,11 +482,18 @@ eval instant at 1m quantile without(point)((scalar(foo)), data) {test="three samples"} 1.6 {test="uneven samples"} 2.8 -eval_warn instant at 1m quantile without(point)(NaN, data) +eval instant at 1m quantile without(point)(NaN, data) + expect warn msg: PromQL warning: quantile value should be between 0 and 1, got NaN {test="two samples"} NaN {test="three samples"} NaN {test="uneven samples"} NaN +# Bug #15971. +eval range from 0m to 1m step 10s quantile without(point) (scalar(foo), data) + {test="two samples"} 0 1 0 1 0 1 0.8 + {test="three samples"} 0 2 0 2 0 2 1.6 + {test="uneven samples"} 0 4 0 4 0 4 2.8 + # Tests for group. clear diff --git a/promql/promqltest/testdata/limit.test b/promql/promqltest/testdata/limit.test index e6dd007af4..484760cc85 100644 --- a/promql/promqltest/testdata/limit.test +++ b/promql/promqltest/testdata/limit.test @@ -11,6 +11,8 @@ load 5m http_requests{job="api-server", instance="3", group="canary"} 0+60x10 http_requests{job="api-server", instance="histogram_1", group="canary"} {{schema:0 sum:10 count:10}}x11 http_requests{job="api-server", instance="histogram_2", group="canary"} {{schema:0 sum:20 count:20}}x11 + foo 1+1x10 + bar 0 1 0 -1 0 1 0 -1 0 1 0 eval instant at 50m count(limitk by (group) (0, http_requests)) # empty @@ -69,6 +71,10 @@ eval instant at 50m count(limitk(1000, http_requests{instance=~"histogram_[0-9]" eval range from 0 to 50m step 5m count(limitk(1000, http_requests{instance=~"histogram_[0-9]"})) {} 2+0x10 +# Bug #15971. +eval range from 0m to 50m step 5m count(limitk(scalar(foo), http_requests)) + {} 1 2 3 4 5 6 7 8 8 8 8 + # limit_ratio eval range from 0 to 50m step 5m count(limit_ratio(0.0, http_requests)) # empty @@ -105,11 +111,13 @@ eval range from 0 to 50m step 5m count(limit_ratio(-1.0, http_requests) and http {} 8+0x10 # Capped to 1.0 -> all samples. -eval_warn range from 0 to 50m step 5m count(limit_ratio(1.1, http_requests) and http_requests) +eval range from 0 to 50m step 5m count(limit_ratio(1.1, http_requests) and http_requests) + expect warn msg: PromQL warning: ratio value should be between -1 and 1, got 1.1, capping to 1 {} 8+0x10 # Capped to -1.0 -> all samples. -eval_warn range from 0 to 50m step 5m count(limit_ratio(-1.1, http_requests) and http_requests) +eval range from 0 to 50m step 5m count(limit_ratio(-1.1, http_requests) and http_requests) + expect warn msg: PromQL warning: ratio value should be between -1 and 1, got -1.1, capping to -1 {} 8+0x10 # Verify that limit_ratio(value) and limit_ratio(1.0-value) return the "complement" of each other. @@ -137,12 +145,12 @@ eval range from 0 to 50m step 5m count(limit_ratio(0.8, http_requests) or limit_ eval range from 0 to 50m step 5m count(limit_ratio(0.8, http_requests) and limit_ratio(-0.2, http_requests)) # empty -# Complement below for [some_ratio, 1.0 - some_ratio], some_ratio derived from time(), +# Complement below for [some_ratio, - (1.0 - some_ratio)], some_ratio derived from time(), # using a small prime number to avoid rounded ratio values, and a small set of them. -eval range from 0 to 50m step 5m count(limit_ratio(time() % 17/17, http_requests) or limit_ratio(1.0 - (time() % 17/17), http_requests)) +eval range from 0 to 50m step 5m count(limit_ratio(time() % 17/17, http_requests) or limit_ratio( - (1.0 - (time() % 17/17)), http_requests)) {} 8+0x10 -eval range from 0 to 50m step 5m count(limit_ratio(time() % 17/17, http_requests) and limit_ratio(1.0 - (time() % 17/17), http_requests)) +eval range from 0 to 50m step 5m count(limit_ratio(time() % 17/17, http_requests) and limit_ratio( - (1.0 - (time() % 17/17)), http_requests)) # empty # Poor man's normality check: ok (loaded samples follow a nice linearity over labels and time). @@ -156,3 +164,7 @@ eval instant at 50m limit_ratio(1, http_requests{instance="histogram_1"}) eval range from 0 to 50m step 5m limit_ratio(1, http_requests{instance="histogram_1"}) {__name__="http_requests", group="canary", instance="histogram_1", job="api-server"} {{count:10 sum:10}}x10 + +# Bug #15971. +eval range from 0m to 50m step 5m count(limit_ratio(scalar(bar), http_requests)) + {} _ 8 _ 8 _ 8 _ 8 _ 8 _ diff --git a/promql/value.go b/promql/value.go index f19c0b5b58..dc59b9e9cc 100644 --- a/promql/value.go +++ b/promql/value.go @@ -14,6 +14,7 @@ package promql import ( + "context" "encoding/json" "errors" "fmt" @@ -533,3 +534,68 @@ func (ssi *storageSeriesIterator) Next() chunkenc.ValueType { func (ssi *storageSeriesIterator) Err() error { return nil } + +type fParams struct { + series Series + constValue float64 + isConstant bool + minValue float64 + maxValue float64 + hasAnyNaN bool +} + +// newFParams evaluates the expression and returns an fParams object, +// which holds the parameter values (constant or series) along with min, max, and NaN info. +func newFParams(ctx context.Context, ev *evaluator, expr parser.Expr) (*fParams, annotations.Annotations) { + if expr == nil { + return &fParams{}, nil + } + var constParam bool + if _, ok := expr.(*parser.NumberLiteral); ok { + constParam = true + } + val, ws := ev.eval(ctx, expr) + mat, ok := val.(Matrix) + if !ok || len(mat) == 0 { + return &fParams{}, ws + } + fp := &fParams{ + series: mat[0], + isConstant: constParam, + minValue: math.MaxFloat64, + maxValue: -math.MaxFloat64, + } + + if constParam { + fp.constValue = fp.series.Floats[0].F + fp.minValue, fp.maxValue = fp.constValue, fp.constValue + fp.hasAnyNaN = math.IsNaN(fp.constValue) + return fp, ws + } + + for _, v := range fp.series.Floats { + fp.maxValue = math.Max(fp.maxValue, v.F) + fp.minValue = math.Min(fp.minValue, v.F) + if math.IsNaN(v.F) { + fp.hasAnyNaN = true + } + } + return fp, ws +} + +func (fp *fParams) Max() float64 { return fp.maxValue } +func (fp *fParams) Min() float64 { return fp.minValue } +func (fp *fParams) HasAnyNaN() bool { return fp.hasAnyNaN } + +// Next returns the next value from the series or the constant value, and advances the series if applicable. +func (fp *fParams) Next() float64 { + if fp.isConstant { + return fp.constValue + } + if len(fp.series.Floats) > 0 { + val := fp.series.Floats[0].F + fp.series.Floats = fp.series.Floats[1:] + return val + } + return 0 +} From 5c06804df88d1e92e04e5790295fe0d3250eec16 Mon Sep 17 00:00:00 2001 From: Julius Volz Date: Mon, 12 May 2025 10:39:58 +0200 Subject: [PATCH 4/5] Optimize memoization and search debouncing on /targets page (#16589) Moving the debouncing of the search field to the parent component and then memoizing the ScrapePoolsList component prevents a lot of superfluous re-renders of the entire scrape pools list that previously got triggered immediately when you typed in the search box or even just collapsed a pool. (While the computation of what data to show was already memoized in the ScrapePoolList component, the component itself still had to re-render a lot with the same data.) Discovered this problem + verified fix using react-scan. Signed-off-by: Julius Volz --- .../src/pages/targets/ScrapePoolsList.tsx | 519 +++++++++--------- .../src/pages/targets/TargetsPage.tsx | 12 +- 2 files changed, 270 insertions(+), 261 deletions(-) diff --git a/web/ui/mantine-ui/src/pages/targets/ScrapePoolsList.tsx b/web/ui/mantine-ui/src/pages/targets/ScrapePoolsList.tsx index c444747724..7423fd8a72 100644 --- a/web/ui/mantine-ui/src/pages/targets/ScrapePoolsList.tsx +++ b/web/ui/mantine-ui/src/pages/targets/ScrapePoolsList.tsx @@ -19,7 +19,7 @@ import { } from "@tabler/icons-react"; import { useSuspenseAPIQuery } from "../../api/api"; import { Target, TargetsResult } from "../../api/responseTypes/targets"; -import React, { FC, useMemo } from "react"; +import React, { FC, memo, useMemo } from "react"; import { humanizeDurationRelative, humanizeDuration, @@ -37,7 +37,6 @@ import CustomInfiniteScroll from "../../components/CustomInfiniteScroll"; import badgeClasses from "../../Badge.module.css"; import panelClasses from "../../Panel.module.css"; import TargetLabels from "./TargetLabels"; -import { useDebouncedValue } from "@mantine/hooks"; import { targetPoolDisplayLimit } from "./TargetsPage"; import { badgeIconStyle } from "../../styles"; @@ -145,278 +144,280 @@ type ScrapePoolListProp = { searchFilter: string; }; -const ScrapePoolList: FC = ({ - poolNames, - selectedPool, - healthFilter, - searchFilter, -}) => { - // Based on the selected pool (if any), load the list of targets. - const { - data: { - data: { activeTargets }, - }, - } = useSuspenseAPIQuery({ - path: `/targets`, - params: { - state: "active", - scrapePool: selectedPool === null ? "" : selectedPool, - }, - }); +const ScrapePoolList: FC = memo( + ({ poolNames, selectedPool, healthFilter, searchFilter }) => { + // Based on the selected pool (if any), load the list of targets. + const { + data: { + data: { activeTargets }, + }, + } = useSuspenseAPIQuery({ + path: `/targets`, + params: { + state: "active", + scrapePool: selectedPool === null ? "" : selectedPool, + }, + }); - const dispatch = useAppDispatch(); - const [showEmptyPools, setShowEmptyPools] = useLocalStorage({ - key: "targetsPage.showEmptyPools", - defaultValue: false, - }); + const dispatch = useAppDispatch(); + const [showEmptyPools, setShowEmptyPools] = useLocalStorage({ + key: "targetsPage.showEmptyPools", + defaultValue: false, + }); - const { collapsedPools, showLimitAlert } = useAppSelector( - (state) => state.targetsPage - ); + const { collapsedPools, showLimitAlert } = useAppSelector( + (state) => state.targetsPage + ); - const [debouncedSearch] = useDebouncedValue(searchFilter.trim(), 250); + const allPools = useMemo( + () => + buildPoolsData( + selectedPool ? [selectedPool] : poolNames, + activeTargets, + searchFilter, + healthFilter + ), + [selectedPool, poolNames, activeTargets, searchFilter, healthFilter] + ); - const allPools = useMemo( - () => - buildPoolsData( - selectedPool ? [selectedPool] : poolNames, - activeTargets, - debouncedSearch, - healthFilter - ), - [selectedPool, poolNames, activeTargets, debouncedSearch, healthFilter] - ); + const allPoolNames = Object.keys(allPools); + const shownPoolNames = showEmptyPools + ? allPoolNames + : allPoolNames.filter((pn) => allPools[pn].targets.length !== 0); - const allPoolNames = Object.keys(allPools); - const shownPoolNames = showEmptyPools - ? allPoolNames - : allPoolNames.filter((pn) => allPools[pn].targets.length !== 0); - - return ( - - {allPoolNames.length === 0 ? ( - }> - No scrape pools found. - - ) : ( - !showEmptyPools && - allPoolNames.length !== shownPoolNames.length && ( - } - > - Hiding {allPoolNames.length - shownPoolNames.length} empty pools due - to filters or no targets. - setShowEmptyPools(true)}> - Show empty pools - + return ( + + {allPoolNames.length === 0 ? ( + }> + No scrape pools found. - ) - )} - {showLimitAlert && ( - } - withCloseButton - onClose={() => dispatch(setShowLimitAlert(false))} - > - There are more than {targetPoolDisplayLimit} scrape pools. Showing - only the first one. Use the dropdown to select a different pool. - - )} - !collapsedPools.includes(p))} - onChange={(value) => - dispatch( - setCollapsedPools(allPoolNames.filter((p) => !value.includes(p))) - ) - } - > - {shownPoolNames.map((poolName) => { - const pool = allPools[poolName]; - return ( - } > - - - {poolName} - - - {pool.upCount} / {pool.count} up - - + Hiding {allPoolNames.length - shownPoolNames.length} empty pools + due to filters or no targets. + setShowEmptyPools(true)}> + Show empty pools + + + ) + )} + {showLimitAlert && ( + } + withCloseButton + onClose={() => dispatch(setShowLimitAlert(false))} + > + There are more than {targetPoolDisplayLimit} scrape pools. Showing + only the first one. Use the dropdown to select a different pool. + + )} + !collapsedPools.includes(p))} + onChange={(value) => + dispatch( + setCollapsedPools(allPoolNames.filter((p) => !value.includes(p))) + ) + } + > + {shownPoolNames.map((poolName) => { + const pool = allPools[poolName]; + return ( + + + + {poolName} + + + {pool.upCount} / {pool.count} up + + + - - - - {pool.count === 0 ? ( - }> - No active targets in this scrape pool. - setShowEmptyPools(false)} + + + {pool.count === 0 ? ( + }> + No active targets in this scrape pool. + setShowEmptyPools(false)} + > + Hide empty pools + + + ) : pool.targets.length === 0 ? ( + } > - Hide empty pools - - - ) : pool.targets.length === 0 ? ( - }> - No targets in this pool match your filter criteria (omitted{" "} - {pool.count} filtered targets). - setShowEmptyPools(false)} - > - Hide empty pools - - - ) : ( - ( - - - - Endpoint - Labels - Last scrape - State - - - - {items.map((target, i) => ( - // TODO: Find a stable and definitely unique key. - - - - - + No targets in this pool match your filter criteria + (omitted {pool.count} filtered targets). + setShowEmptyPools(false)} + > + Hide empty pools + + + ) : ( + ( +
+ + + Endpoint + Labels + Last scrape + State + + + + {items.map((target, i) => ( + // TODO: Find a stable and definitely unique key. + + + + + - - - - - - - - } + + + + + + - {humanizeDurationRelative( - target.lastScrape, - now() - )} - - + + } + > + {humanizeDurationRelative( + target.lastScrape, + now() + )} + + - - - } + - {humanizeDuration( - target.lastScrapeDuration * 1000 - )} - - - - - - - {target.health} - - - - {target.lastError && ( - - - } + + } + > + {humanizeDuration( + target.lastScrapeDuration * 1000 + )} + + + + + + - Error scraping target:{" "} - {target.lastError} - + {target.health} + - )} - - ))} - -
- )} - /> - )} -
-
- ); - })} -
-
- ); -}; + {target.lastError && ( + + + } + > + Error scraping target:{" "} + {target.lastError} + + + + )} + + ))} + + + )} + /> + )} + + + ); + })} + + + ); + } +); export default ScrapePoolList; diff --git a/web/ui/mantine-ui/src/pages/targets/TargetsPage.tsx b/web/ui/mantine-ui/src/pages/targets/TargetsPage.tsx index 399d1a458d..75d7bd2f4e 100644 --- a/web/ui/mantine-ui/src/pages/targets/TargetsPage.tsx +++ b/web/ui/mantine-ui/src/pages/targets/TargetsPage.tsx @@ -30,9 +30,16 @@ import ScrapePoolList from "./ScrapePoolsList"; import { useSuspenseAPIQuery } from "../../api/api"; import { ScrapePoolsResult } from "../../api/responseTypes/scrapePools"; import { expandIconStyle, inputIconStyle } from "../../styles"; +import { useDebouncedValue } from "@mantine/hooks"; export const targetPoolDisplayLimit = 20; +// Should be defined as a constant here instead of inline as a value +// to avoid unnecessary re-renders. Otherwise the empty array has +// a different reference on each render and causes subsequent memoized +// computations to re-run as long as no state filter is selected. +const emptyHealthFilter: string[] = []; + export default function TargetsPage() { // Load the list of all available scrape pools. const { @@ -48,12 +55,13 @@ export default function TargetsPage() { const [scrapePool, setScrapePool] = useQueryParam("pool", StringParam); const [healthFilter, setHealthFilter] = useQueryParam( "health", - withDefault(ArrayParam, []) + withDefault(ArrayParam, emptyHealthFilter) ); const [searchFilter, setSearchFilter] = useQueryParam( "search", withDefault(StringParam, "") ); + const [debouncedSearch] = useDebouncedValue(searchFilter.trim(), 250); const { collapsedPools, showLimitAlert } = useAppSelector( (state) => state.targetsPage @@ -147,7 +155,7 @@ export default function TargetsPage() { poolNames={scrapePools} selectedPool={(limited && scrapePools[0]) || scrapePool || null} healthFilter={healthFilter as string[]} - searchFilter={searchFilter} + searchFilter={debouncedSearch} /> From dbf5d01a62249eddcd202303069f6cf7dd3c4a73 Mon Sep 17 00:00:00 2001 From: Julius Volz Date: Mon, 12 May 2025 12:17:18 +0200 Subject: [PATCH 5/5] Fix full-page re-rendering when opening status nav menu (#16590) When opening the status pages menu while already viewing one of the status pages, the whole page would be re-rendered because the menu target's default action of following the current page's URL was not prevented. Also, we don't need to use a NavLink component for the menu target when we are not viewing a status page, because then the component won't need to be highlighted anyways. Discovered + fixed with the help of react-scan. Signed-off-by: Julius Volz --- web/ui/mantine-ui/src/App.tsx | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/web/ui/mantine-ui/src/App.tsx b/web/ui/mantine-ui/src/App.tsx index 2599c9f5aa..f3f02b41dc 100644 --- a/web/ui/mantine-ui/src/App.tsx +++ b/web/ui/mantine-ui/src/App.tsx @@ -224,6 +224,7 @@ function App() { leftSection={p.icon} rightSection={} px={navLinkXPadding} + onClick={(e) => e.preventDefault()} > Status {p.title} @@ -236,14 +237,9 @@ function App() { element={