Merge branch 'prometheus:main' into patch-tracing

This commit is contained in:
Vanshika 2025-05-12 20:11:55 +05:30 committed by GitHub
commit 4d5add3cc3
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
15 changed files with 557 additions and 503 deletions

View File

@ -30,6 +30,7 @@ import (
goregexp "regexp" //nolint:depguard // The Prometheus client library requires us to pass a regexp from this package.
"runtime"
"runtime/debug"
"slices"
"strconv"
"strings"
"sync"
@ -1921,10 +1922,8 @@ func (p *rwProtoMsgFlagParser) Set(opt string) error {
if err := t.Validate(); err != nil {
return err
}
for _, prev := range *p.msgs {
if prev == t {
return fmt.Errorf("duplicated %v flag value, got %v already", t, *p.msgs)
}
if slices.Contains(*p.msgs, t) {
return fmt.Errorf("duplicated %v flag value, got %v already", t, *p.msgs)
}
*p.msgs = append(*p.msgs, t)
return nil

View File

@ -21,6 +21,7 @@ import (
"net/url"
"os"
"path/filepath"
"slices"
"sort"
"strconv"
"strings"
@ -1109,13 +1110,11 @@ func (v *AlertmanagerAPIVersion) UnmarshalYAML(unmarshal func(interface{}) error
return err
}
for _, supportedVersion := range SupportedAlertmanagerAPIVersions {
if *v == supportedVersion {
return nil
}
if !slices.Contains(SupportedAlertmanagerAPIVersions, *v) {
return fmt.Errorf("expected Alertmanager api version to be one of %v but got %v", SupportedAlertmanagerAPIVersions, *v)
}
return fmt.Errorf("expected Alertmanager api version to be one of %v but got %v", SupportedAlertmanagerAPIVersions, *v)
return nil
}
const (

View File

@ -20,6 +20,7 @@ import (
"log/slog"
"os"
"reflect"
"slices"
"strings"
"sync"
"time"
@ -210,18 +211,9 @@ func (c *SDConfig) UnmarshalYAML(unmarshal func(interface{}) error) error {
if _, ok := allowedSelectors[c.Role]; !ok {
return fmt.Errorf("invalid role: %q, expecting one of: pod, service, endpoints, endpointslice, node or ingress", c.Role)
}
var allowed bool
for _, role := range allowedSelectors[c.Role] {
if role == string(selector.Role) {
allowed = true
break
}
}
if !allowed {
if !slices.Contains(allowedSelectors[c.Role], string(selector.Role)) {
return fmt.Errorf("%s role supports only %s selectors", c.Role, strings.Join(allowedSelectors[c.Role], ", "))
}
_, err := fields.ParseSelector(selector.Field)
if err != nil {
return err

View File

@ -167,10 +167,8 @@ func (b *Builder) Del(ns ...string) *Builder {
// Keep removes all labels from the base except those with the given names.
func (b *Builder) Keep(ns ...string) *Builder {
b.base.Range(func(l Label) {
for _, n := range ns {
if l.Name == n {
return
}
if slices.Contains(ns, l.Name) {
return
}
b.del = append(b.del, l.Name)
})

View File

@ -95,12 +95,7 @@ func (m *FastRegexMatcher) compileMatchStringFunction() func(string) bool {
return func(s string) bool {
if len(m.setMatches) != 0 {
for _, match := range m.setMatches {
if match == s {
return true
}
}
return false
return slices.Contains(m.setMatches, s)
}
if m.prefix != "" && !strings.HasPrefix(s, m.prefix) {
return false
@ -771,16 +766,11 @@ func (m *equalMultiStringSliceMatcher) setMatches() []string {
func (m *equalMultiStringSliceMatcher) Matches(s string) bool {
if m.caseSensitive {
for _, v := range m.values {
if s == v {
return true
}
}
} else {
for _, v := range m.values {
if strings.EqualFold(s, v) {
return true
}
return slices.Contains(m.values, s)
}
for _, v := range m.values {
if strings.EqualFold(s, v) {
return true
}
}
return false

View File

@ -1377,7 +1377,7 @@ func (ev *evaluator) rangeEval(ctx context.Context, prepSeries func(labels.Label
return mat, warnings
}
func (ev *evaluator) rangeEvalAgg(ctx context.Context, aggExpr *parser.AggregateExpr, sortedGrouping []string, inputMatrix Matrix, param float64) (Matrix, annotations.Annotations) {
func (ev *evaluator) rangeEvalAgg(ctx context.Context, aggExpr *parser.AggregateExpr, sortedGrouping []string, inputMatrix Matrix, params *fParams) (Matrix, annotations.Annotations) {
// Keep a copy of the original point slice so that it can be returned to the pool.
origMatrix := slices.Clone(inputMatrix)
defer func() {
@ -1387,7 +1387,7 @@ func (ev *evaluator) rangeEvalAgg(ctx context.Context, aggExpr *parser.Aggregate
}
}()
var warnings annotations.Annotations
var annos annotations.Annotations
enh := &EvalNodeHelper{enableDelayedNameRemoval: ev.enableDelayedNameRemoval}
tempNumSamples := ev.currentSamples
@ -1417,46 +1417,43 @@ func (ev *evaluator) rangeEvalAgg(ctx context.Context, aggExpr *parser.Aggregate
}
groups := make([]groupedAggregation, groupCount)
var k int64
var ratio float64
var seriess map[uint64]Series
switch aggExpr.Op {
case parser.TOPK, parser.BOTTOMK, parser.LIMITK:
if !convertibleToInt64(param) {
ev.errorf("Scalar value %v overflows int64", param)
// Return early if all k values are less than one.
if params.Max() < 1 {
return nil, annos
}
k = int64(param)
if k > int64(len(inputMatrix)) {
k = int64(len(inputMatrix))
}
if k < 1 {
return nil, warnings
}
seriess = make(map[uint64]Series, len(inputMatrix)) // Output series by series hash.
seriess = make(map[uint64]Series, len(inputMatrix))
case parser.LIMIT_RATIO:
if math.IsNaN(param) {
ev.errorf("Ratio value %v is NaN", param)
// Return early if all r values are zero.
if params.Max() == 0 && params.Min() == 0 {
return nil, annos
}
switch {
case param == 0:
return nil, warnings
case param < -1.0:
ratio = -1.0
warnings.Add(annotations.NewInvalidRatioWarning(param, ratio, aggExpr.Param.PositionRange()))
case param > 1.0:
ratio = 1.0
warnings.Add(annotations.NewInvalidRatioWarning(param, ratio, aggExpr.Param.PositionRange()))
default:
ratio = param
if params.Max() > 1.0 {
annos.Add(annotations.NewInvalidRatioWarning(params.Max(), 1.0, aggExpr.Param.PositionRange()))
}
seriess = make(map[uint64]Series, len(inputMatrix)) // Output series by series hash.
if params.Min() < -1.0 {
annos.Add(annotations.NewInvalidRatioWarning(params.Min(), -1.0, aggExpr.Param.PositionRange()))
}
seriess = make(map[uint64]Series, len(inputMatrix))
case parser.QUANTILE:
if math.IsNaN(param) || param < 0 || param > 1 {
warnings.Add(annotations.NewInvalidQuantileWarning(param, aggExpr.Param.PositionRange()))
if params.HasAnyNaN() {
annos.Add(annotations.NewInvalidQuantileWarning(math.NaN(), aggExpr.Param.PositionRange()))
}
if params.Max() > 1 {
annos.Add(annotations.NewInvalidQuantileWarning(params.Max(), aggExpr.Param.PositionRange()))
}
if params.Min() < 0 {
annos.Add(annotations.NewInvalidQuantileWarning(params.Min(), aggExpr.Param.PositionRange()))
}
}
for ts := ev.startTimestamp; ts <= ev.endTimestamp; ts += ev.interval {
fParam := params.Next()
if err := contextDone(ctx, "expression evaluation"); err != nil {
ev.error(err)
}
@ -1468,17 +1465,17 @@ func (ev *evaluator) rangeEvalAgg(ctx context.Context, aggExpr *parser.Aggregate
var ws annotations.Annotations
switch aggExpr.Op {
case parser.TOPK, parser.BOTTOMK, parser.LIMITK, parser.LIMIT_RATIO:
result, ws = ev.aggregationK(aggExpr, k, ratio, inputMatrix, seriesToResult, groups, enh, seriess)
result, ws = ev.aggregationK(aggExpr, fParam, inputMatrix, seriesToResult, groups, enh, seriess)
// If this could be an instant query, shortcut so as not to change sort order.
if ev.endTimestamp == ev.startTimestamp {
warnings.Merge(ws)
return result, warnings
if ev.startTimestamp == ev.endTimestamp {
annos.Merge(ws)
return result, annos
}
default:
ws = ev.aggregation(aggExpr, param, inputMatrix, result, seriesToResult, groups, enh)
ws = ev.aggregation(aggExpr, fParam, inputMatrix, result, seriesToResult, groups, enh)
}
warnings.Merge(ws)
annos.Merge(ws)
if ev.currentSamples > ev.maxSamples {
ev.error(ErrTooManySamples(env))
@ -1503,7 +1500,7 @@ func (ev *evaluator) rangeEvalAgg(ctx context.Context, aggExpr *parser.Aggregate
}
result = result[:dst]
}
return result, warnings
return result, annos
}
// evalSeries generates a Matrix between ev.startTimestamp and ev.endTimestamp (inclusive), each point spaced ev.interval apart, from series given offset.
@ -1681,18 +1678,14 @@ func (ev *evaluator) eval(ctx context.Context, expr parser.Expr) (parser.Value,
var warnings annotations.Annotations
originalNumSamples := ev.currentSamples
// param is the number k for topk/bottomk, or q for quantile.
var fParam float64
if param != nil {
val, ws := ev.eval(ctx, param)
warnings.Merge(ws)
fParam = val.(Matrix)[0].Floats[0].F
}
fp, ws := newFParams(ctx, ev, param)
warnings.Merge(ws)
// Now fetch the data to be aggregated.
val, ws := ev.eval(ctx, e.Expr)
warnings.Merge(ws)
inputMatrix := val.(Matrix)
result, ws := ev.rangeEvalAgg(ctx, e, sortedGrouping, inputMatrix, fParam)
result, ws := ev.rangeEvalAgg(ctx, e, sortedGrouping, inputMatrix, fp)
warnings.Merge(ws)
ev.currentSamples = originalNumSamples + result.TotalSamples()
ev.samplesStats.UpdatePeak(ev.currentSamples)
@ -3269,7 +3262,7 @@ func (ev *evaluator) aggregation(e *parser.AggregateExpr, q float64, inputMatrix
// seriesToResult maps inputMatrix indexes to groups indexes.
// For an instant query, returns a Matrix in descending order for topk or ascending for bottomk, or without any order for limitk / limit_ratio.
// For a range query, aggregates output in the seriess map.
func (ev *evaluator) aggregationK(e *parser.AggregateExpr, k int64, r float64, inputMatrix Matrix, seriesToResult []int, groups []groupedAggregation, enh *EvalNodeHelper, seriess map[uint64]Series) (Matrix, annotations.Annotations) {
func (ev *evaluator) aggregationK(e *parser.AggregateExpr, fParam float64, inputMatrix Matrix, seriesToResult []int, groups []groupedAggregation, enh *EvalNodeHelper, seriess map[uint64]Series) (Matrix, annotations.Annotations) {
op := e.Op
var s Sample
var annos annotations.Annotations
@ -3278,6 +3271,14 @@ func (ev *evaluator) aggregationK(e *parser.AggregateExpr, k int64, r float64, i
for i := range groups {
groups[i].seen = false
}
// advanceRemainingSeries discards any values at the current timestamp `ts`
// for the remaining input series. In range queries, if these values are not
// consumed now, they will no longer be accessible in the next evaluation step.
advanceRemainingSeries := func(ts int64, startIdx int) {
for i := startIdx; i < len(inputMatrix); i++ {
_, _, _ = ev.nextValues(ts, &inputMatrix[i])
}
}
seriesLoop:
for si := range inputMatrix {
@ -3287,6 +3288,42 @@ seriesLoop:
}
s = Sample{Metric: inputMatrix[si].Metric, F: f, H: h, DropName: inputMatrix[si].DropName}
var k int64
var r float64
switch op {
case parser.TOPK, parser.BOTTOMK, parser.LIMITK:
if !convertibleToInt64(fParam) {
ev.errorf("Scalar value %v overflows int64", fParam)
}
k = int64(fParam)
if k > int64(len(inputMatrix)) {
k = int64(len(inputMatrix))
}
if k < 1 {
if enh.Ts != ev.endTimestamp {
advanceRemainingSeries(enh.Ts, si+1)
}
return nil, annos
}
case parser.LIMIT_RATIO:
if math.IsNaN(fParam) {
ev.errorf("Ratio value %v is NaN", fParam)
}
switch {
case fParam == 0:
if enh.Ts != ev.endTimestamp {
advanceRemainingSeries(enh.Ts, si+1)
}
return nil, annos
case fParam < -1.0:
r = -1.0
case fParam > 1.0:
r = 1.0
default:
r = fParam
}
}
group := &groups[seriesToResult[si]]
// Initialize this group if it's the first time we've seen it.
if !group.seen {
@ -3377,6 +3414,10 @@ seriesLoop:
group.groupAggrComplete = true
groupsRemaining--
if groupsRemaining == 0 {
// Process other values in the series before breaking the loop in case of range query.
if enh.Ts != ev.endTimestamp {
advanceRemainingSeries(enh.Ts, si+1)
}
break seriesLoop
}
}

View File

@ -612,7 +612,6 @@ func funcClampMin(vals []parser.Value, _ parser.Expressions, enh *EvalNodeHelper
// === round(Vector parser.ValueTypeVector, toNearest=1 Scalar) (Vector, Annotations) ===
func funcRound(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) (Vector, annotations.Annotations) {
vec := vals[0].(Vector)
// round returns a number rounded to toNearest.
// Ties are solved by rounding up.
toNearest := float64(1)
@ -621,23 +620,9 @@ func funcRound(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper
}
// Invert as it seems to cause fewer floating point accuracy issues.
toNearestInverse := 1.0 / toNearest
for _, el := range vec {
if el.H != nil {
// Process only float samples.
continue
}
f := math.Floor(el.F*toNearestInverse+0.5) / toNearestInverse
if !enh.enableDelayedNameRemoval {
el.Metric = el.Metric.DropMetricName()
}
enh.Out = append(enh.Out, Sample{
Metric: el.Metric,
F: f,
DropName: true,
})
}
return enh.Out, nil
return simpleFloatFunc(vals, enh, func(f float64) float64 {
return math.Floor(f*toNearestInverse+0.5) / toNearestInverse
}), nil
}
// === Scalar(node parser.ValueTypeVector) Scalar ===
@ -823,8 +808,8 @@ func funcMadOverTime(vals []parser.Value, args parser.Expressions, enh *EvalNode
}), annos
}
// === max_over_time(Matrix parser.ValueTypeMatrix) (Vector, Annotations) ===
func funcMaxOverTime(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) (Vector, annotations.Annotations) {
// compareOverTime is a helper used by funcMaxOverTime and funcMinOverTime.
func compareOverTime(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper, compareFn func(float64, float64) bool) (Vector, annotations.Annotations) {
samples := vals[0].(Matrix)[0]
var annos annotations.Annotations
if len(samples.Floats) == 0 {
@ -837,7 +822,7 @@ func funcMaxOverTime(vals []parser.Value, args parser.Expressions, enh *EvalNode
return aggrOverTime(vals, enh, func(s Series) float64 {
maxVal := s.Floats[0].F
for _, f := range s.Floats {
if f.F > maxVal || math.IsNaN(maxVal) {
if compareFn(f.F, maxVal) {
maxVal = f.F
}
}
@ -845,26 +830,18 @@ func funcMaxOverTime(vals []parser.Value, args parser.Expressions, enh *EvalNode
}), annos
}
// === max_over_time(Matrix parser.ValueTypeMatrix) (Vector, Annotations) ===
func funcMaxOverTime(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) (Vector, annotations.Annotations) {
return compareOverTime(vals, args, enh, func(cur, maxVal float64) bool {
return (cur > maxVal) || math.IsNaN(maxVal)
})
}
// === min_over_time(Matrix parser.ValueTypeMatrix) (Vector, Annotations) ===
func funcMinOverTime(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) (Vector, annotations.Annotations) {
samples := vals[0].(Matrix)[0]
var annos annotations.Annotations
if len(samples.Floats) == 0 {
return enh.Out, nil
}
if len(samples.Histograms) > 0 {
metricName := samples.Metric.Get(labels.MetricName)
annos.Add(annotations.NewHistogramIgnoredInMixedRangeInfo(metricName, args[0].PositionRange()))
}
return aggrOverTime(vals, enh, func(s Series) float64 {
minVal := s.Floats[0].F
for _, f := range s.Floats {
if f.F < minVal || math.IsNaN(minVal) {
minVal = f.F
}
}
return minVal
}), annos
return compareOverTime(vals, args, enh, func(cur, maxVal float64) bool {
return (cur < maxVal) || math.IsNaN(maxVal)
})
}
// === sum_over_time(Matrix parser.ValueTypeMatrix) (Vector, Annotations) ===
@ -997,7 +974,7 @@ func funcPresentOverTime(vals []parser.Value, _ parser.Expressions, enh *EvalNod
}), nil
}
func simpleFunc(vals []parser.Value, enh *EvalNodeHelper, f func(float64) float64) Vector {
func simpleFloatFunc(vals []parser.Value, enh *EvalNodeHelper, f func(float64) float64) Vector {
for _, el := range vals[0].(Vector) {
if el.H == nil { // Process only float samples.
if !enh.enableDelayedNameRemoval {
@ -1015,114 +992,114 @@ func simpleFunc(vals []parser.Value, enh *EvalNodeHelper, f func(float64) float6
// === abs(Vector parser.ValueTypeVector) (Vector, Annotations) ===
func funcAbs(vals []parser.Value, _ parser.Expressions, enh *EvalNodeHelper) (Vector, annotations.Annotations) {
return simpleFunc(vals, enh, math.Abs), nil
return simpleFloatFunc(vals, enh, math.Abs), nil
}
// === ceil(Vector parser.ValueTypeVector) (Vector, Annotations) ===
func funcCeil(vals []parser.Value, _ parser.Expressions, enh *EvalNodeHelper) (Vector, annotations.Annotations) {
return simpleFunc(vals, enh, math.Ceil), nil
return simpleFloatFunc(vals, enh, math.Ceil), nil
}
// === floor(Vector parser.ValueTypeVector) (Vector, Annotations) ===
func funcFloor(vals []parser.Value, _ parser.Expressions, enh *EvalNodeHelper) (Vector, annotations.Annotations) {
return simpleFunc(vals, enh, math.Floor), nil
return simpleFloatFunc(vals, enh, math.Floor), nil
}
// === exp(Vector parser.ValueTypeVector) (Vector, Annotations) ===
func funcExp(vals []parser.Value, _ parser.Expressions, enh *EvalNodeHelper) (Vector, annotations.Annotations) {
return simpleFunc(vals, enh, math.Exp), nil
return simpleFloatFunc(vals, enh, math.Exp), nil
}
// === sqrt(Vector VectorNode) (Vector, Annotations) ===
func funcSqrt(vals []parser.Value, _ parser.Expressions, enh *EvalNodeHelper) (Vector, annotations.Annotations) {
return simpleFunc(vals, enh, math.Sqrt), nil
return simpleFloatFunc(vals, enh, math.Sqrt), nil
}
// === ln(Vector parser.ValueTypeVector) (Vector, Annotations) ===
func funcLn(vals []parser.Value, _ parser.Expressions, enh *EvalNodeHelper) (Vector, annotations.Annotations) {
return simpleFunc(vals, enh, math.Log), nil
return simpleFloatFunc(vals, enh, math.Log), nil
}
// === log2(Vector parser.ValueTypeVector) (Vector, Annotations) ===
func funcLog2(vals []parser.Value, _ parser.Expressions, enh *EvalNodeHelper) (Vector, annotations.Annotations) {
return simpleFunc(vals, enh, math.Log2), nil
return simpleFloatFunc(vals, enh, math.Log2), nil
}
// === log10(Vector parser.ValueTypeVector) (Vector, Annotations) ===
func funcLog10(vals []parser.Value, _ parser.Expressions, enh *EvalNodeHelper) (Vector, annotations.Annotations) {
return simpleFunc(vals, enh, math.Log10), nil
return simpleFloatFunc(vals, enh, math.Log10), nil
}
// === sin(Vector parser.ValueTypeVector) (Vector, Annotations) ===
func funcSin(vals []parser.Value, _ parser.Expressions, enh *EvalNodeHelper) (Vector, annotations.Annotations) {
return simpleFunc(vals, enh, math.Sin), nil
return simpleFloatFunc(vals, enh, math.Sin), nil
}
// === cos(Vector parser.ValueTypeVector) (Vector, Annotations) ===
func funcCos(vals []parser.Value, _ parser.Expressions, enh *EvalNodeHelper) (Vector, annotations.Annotations) {
return simpleFunc(vals, enh, math.Cos), nil
return simpleFloatFunc(vals, enh, math.Cos), nil
}
// === tan(Vector parser.ValueTypeVector) (Vector, Annotations) ===
func funcTan(vals []parser.Value, _ parser.Expressions, enh *EvalNodeHelper) (Vector, annotations.Annotations) {
return simpleFunc(vals, enh, math.Tan), nil
return simpleFloatFunc(vals, enh, math.Tan), nil
}
// === asin(Vector parser.ValueTypeVector) (Vector, Annotations) ===
func funcAsin(vals []parser.Value, _ parser.Expressions, enh *EvalNodeHelper) (Vector, annotations.Annotations) {
return simpleFunc(vals, enh, math.Asin), nil
return simpleFloatFunc(vals, enh, math.Asin), nil
}
// === acos(Vector parser.ValueTypeVector) (Vector, Annotations) ===
func funcAcos(vals []parser.Value, _ parser.Expressions, enh *EvalNodeHelper) (Vector, annotations.Annotations) {
return simpleFunc(vals, enh, math.Acos), nil
return simpleFloatFunc(vals, enh, math.Acos), nil
}
// === atan(Vector parser.ValueTypeVector) (Vector, Annotations) ===
func funcAtan(vals []parser.Value, _ parser.Expressions, enh *EvalNodeHelper) (Vector, annotations.Annotations) {
return simpleFunc(vals, enh, math.Atan), nil
return simpleFloatFunc(vals, enh, math.Atan), nil
}
// === sinh(Vector parser.ValueTypeVector) (Vector, Annotations) ===
func funcSinh(vals []parser.Value, _ parser.Expressions, enh *EvalNodeHelper) (Vector, annotations.Annotations) {
return simpleFunc(vals, enh, math.Sinh), nil
return simpleFloatFunc(vals, enh, math.Sinh), nil
}
// === cosh(Vector parser.ValueTypeVector) (Vector, Annotations) ===
func funcCosh(vals []parser.Value, _ parser.Expressions, enh *EvalNodeHelper) (Vector, annotations.Annotations) {
return simpleFunc(vals, enh, math.Cosh), nil
return simpleFloatFunc(vals, enh, math.Cosh), nil
}
// === tanh(Vector parser.ValueTypeVector) (Vector, Annotations) ===
func funcTanh(vals []parser.Value, _ parser.Expressions, enh *EvalNodeHelper) (Vector, annotations.Annotations) {
return simpleFunc(vals, enh, math.Tanh), nil
return simpleFloatFunc(vals, enh, math.Tanh), nil
}
// === asinh(Vector parser.ValueTypeVector) (Vector, Annotations) ===
func funcAsinh(vals []parser.Value, _ parser.Expressions, enh *EvalNodeHelper) (Vector, annotations.Annotations) {
return simpleFunc(vals, enh, math.Asinh), nil
return simpleFloatFunc(vals, enh, math.Asinh), nil
}
// === acosh(Vector parser.ValueTypeVector) (Vector, Annotations) ===
func funcAcosh(vals []parser.Value, _ parser.Expressions, enh *EvalNodeHelper) (Vector, annotations.Annotations) {
return simpleFunc(vals, enh, math.Acosh), nil
return simpleFloatFunc(vals, enh, math.Acosh), nil
}
// === atanh(Vector parser.ValueTypeVector) (Vector, Annotations) ===
func funcAtanh(vals []parser.Value, _ parser.Expressions, enh *EvalNodeHelper) (Vector, annotations.Annotations) {
return simpleFunc(vals, enh, math.Atanh), nil
return simpleFloatFunc(vals, enh, math.Atanh), nil
}
// === rad(Vector parser.ValueTypeVector) (Vector, Annotations) ===
func funcRad(vals []parser.Value, _ parser.Expressions, enh *EvalNodeHelper) (Vector, annotations.Annotations) {
return simpleFunc(vals, enh, func(v float64) float64 {
return simpleFloatFunc(vals, enh, func(v float64) float64 {
return v * math.Pi / 180
}), nil
}
// === deg(Vector parser.ValueTypeVector) (Vector, Annotations) ===
func funcDeg(vals []parser.Value, _ parser.Expressions, enh *EvalNodeHelper) (Vector, annotations.Annotations) {
return simpleFunc(vals, enh, func(v float64) float64 {
return simpleFloatFunc(vals, enh, func(v float64) float64 {
return v * 180 / math.Pi
}), nil
}
@ -1134,7 +1111,7 @@ func funcPi(_ []parser.Value, _ parser.Expressions, _ *EvalNodeHelper) (Vector,
// === sgn(Vector parser.ValueTypeVector) (Vector, Annotations) ===
func funcSgn(vals []parser.Value, _ parser.Expressions, enh *EvalNodeHelper) (Vector, annotations.Annotations) {
return simpleFunc(vals, enh, func(v float64) float64 {
return simpleFloatFunc(vals, enh, func(v float64) float64 {
switch {
case v < 0:
return -1
@ -1271,79 +1248,48 @@ func funcPredictLinear(vals []parser.Value, args parser.Expressions, enh *EvalNo
return append(enh.Out, Sample{F: slope*duration + intercept}), nil
}
func simpleHistogramFunc(vals []parser.Value, enh *EvalNodeHelper, f func(h *histogram.FloatHistogram) float64) Vector {
for _, el := range vals[0].(Vector) {
if el.H != nil { // Process only histogram samples.
if !enh.enableDelayedNameRemoval {
el.Metric = el.Metric.DropMetricName()
}
enh.Out = append(enh.Out, Sample{
Metric: el.Metric,
F: f(el.H),
DropName: true,
})
}
}
return enh.Out
}
// === histogram_count(Vector parser.ValueTypeVector) (Vector, Annotations) ===
func funcHistogramCount(vals []parser.Value, _ parser.Expressions, enh *EvalNodeHelper) (Vector, annotations.Annotations) {
inVec := vals[0].(Vector)
for _, sample := range inVec {
// Skip non-histogram samples.
if sample.H == nil {
continue
}
if !enh.enableDelayedNameRemoval {
sample.Metric = sample.Metric.DropMetricName()
}
enh.Out = append(enh.Out, Sample{
Metric: sample.Metric,
F: sample.H.Count,
DropName: true,
})
}
return enh.Out, nil
return simpleHistogramFunc(vals, enh, func(h *histogram.FloatHistogram) float64 {
return h.Count
}), nil
}
// === histogram_sum(Vector parser.ValueTypeVector) (Vector, Annotations) ===
func funcHistogramSum(vals []parser.Value, _ parser.Expressions, enh *EvalNodeHelper) (Vector, annotations.Annotations) {
inVec := vals[0].(Vector)
for _, sample := range inVec {
// Skip non-histogram samples.
if sample.H == nil {
continue
}
if !enh.enableDelayedNameRemoval {
sample.Metric = sample.Metric.DropMetricName()
}
enh.Out = append(enh.Out, Sample{
Metric: sample.Metric,
F: sample.H.Sum,
DropName: true,
})
}
return enh.Out, nil
return simpleHistogramFunc(vals, enh, func(h *histogram.FloatHistogram) float64 {
return h.Sum
}), nil
}
// === histogram_avg(Vector parser.ValueTypeVector) (Vector, Annotations) ===
func funcHistogramAvg(vals []parser.Value, _ parser.Expressions, enh *EvalNodeHelper) (Vector, annotations.Annotations) {
inVec := vals[0].(Vector)
for _, sample := range inVec {
// Skip non-histogram samples.
if sample.H == nil {
continue
}
if !enh.enableDelayedNameRemoval {
sample.Metric = sample.Metric.DropMetricName()
}
enh.Out = append(enh.Out, Sample{
Metric: sample.Metric,
F: sample.H.Sum / sample.H.Count,
DropName: true,
})
}
return enh.Out, nil
return simpleHistogramFunc(vals, enh, func(h *histogram.FloatHistogram) float64 {
return h.Sum / h.Count
}), nil
}
func histogramVariance(vals []parser.Value, enh *EvalNodeHelper, varianceToResult func(float64) float64) (Vector, annotations.Annotations) {
vec := vals[0].(Vector)
for _, sample := range vec {
// Skip non-histogram samples.
if sample.H == nil {
continue
}
mean := sample.H.Sum / sample.H.Count
return simpleHistogramFunc(vals, enh, func(h *histogram.FloatHistogram) float64 {
mean := h.Sum / h.Count
var variance, cVariance float64
it := sample.H.AllBucketIterator()
it := h.AllBucketIterator()
for it.Next() {
bucket := it.At()
if bucket.Count == 0 {
@ -1351,7 +1297,7 @@ func histogramVariance(vals []parser.Value, enh *EvalNodeHelper, varianceToResul
}
var val float64
switch {
case sample.H.UsesCustomBuckets():
case h.UsesCustomBuckets():
// Use arithmetic mean in case of custom buckets.
val = (bucket.Upper + bucket.Lower) / 2.0
case bucket.Lower <= 0 && bucket.Upper >= 0:
@ -1368,20 +1314,12 @@ func histogramVariance(vals []parser.Value, enh *EvalNodeHelper, varianceToResul
variance, cVariance = kahanSumInc(bucket.Count*delta*delta, variance, cVariance)
}
variance += cVariance
variance /= sample.H.Count
if !enh.enableDelayedNameRemoval {
sample.Metric = sample.Metric.DropMetricName()
}
variance /= h.Count
if varianceToResult != nil {
variance = varianceToResult(variance)
}
enh.Out = append(enh.Out, Sample{
Metric: sample.Metric,
F: variance,
DropName: true,
})
}
return enh.Out, nil
return variance
}), nil
}
// === histogram_stddev(Vector parser.ValueTypeVector) (Vector, Annotations) ===

View File

@ -274,7 +274,7 @@ load 5m
http_requests{job="app-server", instance="1", group="canary"} 0+80x10
http_requests_histogram{job="app-server", instance="2", group="canary"} {{schema:0 sum:10 count:10}}x11
http_requests_histogram{job="api-server", instance="3", group="production"} {{schema:0 sum:20 count:20}}x11
foo 3+0x10
foo 1+1x9 3
eval_ordered instant at 50m topk(3, http_requests)
http_requests{group="canary", instance="1", job="app-server"} 800
@ -340,6 +340,13 @@ eval_ordered instant at 50m topk(scalar(foo), http_requests)
http_requests{group="canary", instance="0", job="app-server"} 700
http_requests{group="production", instance="1", job="app-server"} 600
# Bug #15971.
eval range from 0m to 50m step 5m count(topk(scalar(foo), http_requests))
{} 1 2 3 4 5 6 7 8 9 9 3
eval range from 0m to 50m step 5m count(bottomk(scalar(foo), http_requests))
{} 1 2 3 4 5 6 7 8 9 9 3
# Tests for histogram: should ignore histograms.
eval_info instant at 50m topk(100, http_requests_histogram)
#empty
@ -447,7 +454,7 @@ load 10s
data{test="uneven samples",point="b"} 1
data{test="uneven samples",point="c"} 4
data_histogram{test="histogram sample", point="c"} {{schema:2 count:4 sum:10 buckets:[1 0 0 0 1 0 0 1 1]}}
foo .8
foo 0 1 0 1 0 1 0.8
eval instant at 1m quantile without(point)(0.8, data)
{test="two samples"} 0.8
@ -475,11 +482,18 @@ eval instant at 1m quantile without(point)((scalar(foo)), data)
{test="three samples"} 1.6
{test="uneven samples"} 2.8
eval_warn instant at 1m quantile without(point)(NaN, data)
eval instant at 1m quantile without(point)(NaN, data)
expect warn msg: PromQL warning: quantile value should be between 0 and 1, got NaN
{test="two samples"} NaN
{test="three samples"} NaN
{test="uneven samples"} NaN
# Bug #15971.
eval range from 0m to 1m step 10s quantile without(point) (scalar(foo), data)
{test="two samples"} 0 1 0 1 0 1 0.8
{test="three samples"} 0 2 0 2 0 2 1.6
{test="uneven samples"} 0 4 0 4 0 4 2.8
# Tests for group.
clear

View File

@ -11,6 +11,8 @@ load 5m
http_requests{job="api-server", instance="3", group="canary"} 0+60x10
http_requests{job="api-server", instance="histogram_1", group="canary"} {{schema:0 sum:10 count:10}}x11
http_requests{job="api-server", instance="histogram_2", group="canary"} {{schema:0 sum:20 count:20}}x11
foo 1+1x10
bar 0 1 0 -1 0 1 0 -1 0 1 0
eval instant at 50m count(limitk by (group) (0, http_requests))
# empty
@ -69,6 +71,10 @@ eval instant at 50m count(limitk(1000, http_requests{instance=~"histogram_[0-9]"
eval range from 0 to 50m step 5m count(limitk(1000, http_requests{instance=~"histogram_[0-9]"}))
{} 2+0x10
# Bug #15971.
eval range from 0m to 50m step 5m count(limitk(scalar(foo), http_requests))
{} 1 2 3 4 5 6 7 8 8 8 8
# limit_ratio
eval range from 0 to 50m step 5m count(limit_ratio(0.0, http_requests))
# empty
@ -105,11 +111,13 @@ eval range from 0 to 50m step 5m count(limit_ratio(-1.0, http_requests) and http
{} 8+0x10
# Capped to 1.0 -> all samples.
eval_warn range from 0 to 50m step 5m count(limit_ratio(1.1, http_requests) and http_requests)
eval range from 0 to 50m step 5m count(limit_ratio(1.1, http_requests) and http_requests)
expect warn msg: PromQL warning: ratio value should be between -1 and 1, got 1.1, capping to 1
{} 8+0x10
# Capped to -1.0 -> all samples.
eval_warn range from 0 to 50m step 5m count(limit_ratio(-1.1, http_requests) and http_requests)
eval range from 0 to 50m step 5m count(limit_ratio(-1.1, http_requests) and http_requests)
expect warn msg: PromQL warning: ratio value should be between -1 and 1, got -1.1, capping to -1
{} 8+0x10
# Verify that limit_ratio(value) and limit_ratio(1.0-value) return the "complement" of each other.
@ -137,12 +145,12 @@ eval range from 0 to 50m step 5m count(limit_ratio(0.8, http_requests) or limit_
eval range from 0 to 50m step 5m count(limit_ratio(0.8, http_requests) and limit_ratio(-0.2, http_requests))
# empty
# Complement below for [some_ratio, 1.0 - some_ratio], some_ratio derived from time(),
# Complement below for [some_ratio, - (1.0 - some_ratio)], some_ratio derived from time(),
# using a small prime number to avoid rounded ratio values, and a small set of them.
eval range from 0 to 50m step 5m count(limit_ratio(time() % 17/17, http_requests) or limit_ratio(1.0 - (time() % 17/17), http_requests))
eval range from 0 to 50m step 5m count(limit_ratio(time() % 17/17, http_requests) or limit_ratio( - (1.0 - (time() % 17/17)), http_requests))
{} 8+0x10
eval range from 0 to 50m step 5m count(limit_ratio(time() % 17/17, http_requests) and limit_ratio(1.0 - (time() % 17/17), http_requests))
eval range from 0 to 50m step 5m count(limit_ratio(time() % 17/17, http_requests) and limit_ratio( - (1.0 - (time() % 17/17)), http_requests))
# empty
# Poor man's normality check: ok (loaded samples follow a nice linearity over labels and time).
@ -156,3 +164,7 @@ eval instant at 50m limit_ratio(1, http_requests{instance="histogram_1"})
eval range from 0 to 50m step 5m limit_ratio(1, http_requests{instance="histogram_1"})
{__name__="http_requests", group="canary", instance="histogram_1", job="api-server"} {{count:10 sum:10}}x10
# Bug #15971.
eval range from 0m to 50m step 5m count(limit_ratio(scalar(bar), http_requests))
{} _ 8 _ 8 _ 8 _ 8 _ 8 _

View File

@ -14,6 +14,7 @@
package promql
import (
"context"
"encoding/json"
"errors"
"fmt"
@ -533,3 +534,68 @@ func (ssi *storageSeriesIterator) Next() chunkenc.ValueType {
func (ssi *storageSeriesIterator) Err() error {
return nil
}
type fParams struct {
series Series
constValue float64
isConstant bool
minValue float64
maxValue float64
hasAnyNaN bool
}
// newFParams evaluates the expression and returns an fParams object,
// which holds the parameter values (constant or series) along with min, max, and NaN info.
func newFParams(ctx context.Context, ev *evaluator, expr parser.Expr) (*fParams, annotations.Annotations) {
if expr == nil {
return &fParams{}, nil
}
var constParam bool
if _, ok := expr.(*parser.NumberLiteral); ok {
constParam = true
}
val, ws := ev.eval(ctx, expr)
mat, ok := val.(Matrix)
if !ok || len(mat) == 0 {
return &fParams{}, ws
}
fp := &fParams{
series: mat[0],
isConstant: constParam,
minValue: math.MaxFloat64,
maxValue: -math.MaxFloat64,
}
if constParam {
fp.constValue = fp.series.Floats[0].F
fp.minValue, fp.maxValue = fp.constValue, fp.constValue
fp.hasAnyNaN = math.IsNaN(fp.constValue)
return fp, ws
}
for _, v := range fp.series.Floats {
fp.maxValue = math.Max(fp.maxValue, v.F)
fp.minValue = math.Min(fp.minValue, v.F)
if math.IsNaN(v.F) {
fp.hasAnyNaN = true
}
}
return fp, ws
}
func (fp *fParams) Max() float64 { return fp.maxValue }
func (fp *fParams) Min() float64 { return fp.minValue }
func (fp *fParams) HasAnyNaN() bool { return fp.hasAnyNaN }
// Next returns the next value from the series or the constant value, and advances the series if applicable.
func (fp *fParams) Next() float64 {
if fp.isConstant {
return fp.constValue
}
if len(fp.series.Floats) > 0 {
val := fp.series.Floats[0].F
fp.series.Floats = fp.series.Floats[1:]
return val
}
return 0
}

View File

@ -20,6 +20,7 @@ import (
"math"
"os"
"path"
"slices"
"sort"
"strconv"
"sync"
@ -1008,11 +1009,8 @@ func TestMetricsUpdate(t *testing.T) {
var metrics int
for _, m := range ms {
s := m.GetName()
for _, n := range metricNames {
if s == n {
metrics += len(m.Metric)
break
}
if slices.Contains(metricNames, s) {
metrics += len(m.Metric)
}
}
return metrics

View File

@ -599,10 +599,8 @@ func Intersect(its ...Postings) Postings {
if len(its) == 1 {
return its[0]
}
for _, p := range its {
if p == EmptyPostings() {
return EmptyPostings()
}
if slices.Contains(its, EmptyPostings()) {
return EmptyPostings()
}
return newIntersectPostings(its...)

View File

@ -224,6 +224,7 @@ function App() {
leftSection={p.icon}
rightSection={<IconChevronDown style={navIconStyle} />}
px={navLinkXPadding}
onClick={(e) => e.preventDefault()}
>
Status <IconChevronRight style={navIconStyle} /> {p.title}
</Button>
@ -236,14 +237,9 @@ function App() {
element={
<Menu.Target>
<Button
component={NavLink}
to="/"
className={classes.link}
leftSection={<IconServer style={navIconStyle} />}
rightSection={<IconChevronDown style={navIconStyle} />}
onClick={(e) => {
e.preventDefault();
}}
px={navLinkXPadding}
>
Status
@ -339,8 +335,12 @@ function App() {
>
<Group gap={10} wrap="nowrap">
<img src={PrometheusLogo} height={30} />
<Text hiddenFrom="sm" fz={20}>Prometheus</Text>
<Text visibleFrom="md" fz={20}>Prometheus</Text>
<Text hiddenFrom="sm" fz={20}>
Prometheus
</Text>
<Text visibleFrom="md" fz={20}>
Prometheus
</Text>
<Text fz={20}>{agentMode && "Agent"}</Text>
</Group>
</Link>

View File

@ -19,7 +19,7 @@ import {
} from "@tabler/icons-react";
import { useSuspenseAPIQuery } from "../../api/api";
import { Target, TargetsResult } from "../../api/responseTypes/targets";
import React, { FC, useMemo } from "react";
import React, { FC, memo, useMemo } from "react";
import {
humanizeDurationRelative,
humanizeDuration,
@ -37,7 +37,6 @@ import CustomInfiniteScroll from "../../components/CustomInfiniteScroll";
import badgeClasses from "../../Badge.module.css";
import panelClasses from "../../Panel.module.css";
import TargetLabels from "./TargetLabels";
import { useDebouncedValue } from "@mantine/hooks";
import { targetPoolDisplayLimit } from "./TargetsPage";
import { badgeIconStyle } from "../../styles";
@ -145,278 +144,280 @@ type ScrapePoolListProp = {
searchFilter: string;
};
const ScrapePoolList: FC<ScrapePoolListProp> = ({
poolNames,
selectedPool,
healthFilter,
searchFilter,
}) => {
// Based on the selected pool (if any), load the list of targets.
const {
data: {
data: { activeTargets },
},
} = useSuspenseAPIQuery<TargetsResult>({
path: `/targets`,
params: {
state: "active",
scrapePool: selectedPool === null ? "" : selectedPool,
},
});
const ScrapePoolList: FC<ScrapePoolListProp> = memo(
({ poolNames, selectedPool, healthFilter, searchFilter }) => {
// Based on the selected pool (if any), load the list of targets.
const {
data: {
data: { activeTargets },
},
} = useSuspenseAPIQuery<TargetsResult>({
path: `/targets`,
params: {
state: "active",
scrapePool: selectedPool === null ? "" : selectedPool,
},
});
const dispatch = useAppDispatch();
const [showEmptyPools, setShowEmptyPools] = useLocalStorage<boolean>({
key: "targetsPage.showEmptyPools",
defaultValue: false,
});
const dispatch = useAppDispatch();
const [showEmptyPools, setShowEmptyPools] = useLocalStorage<boolean>({
key: "targetsPage.showEmptyPools",
defaultValue: false,
});
const { collapsedPools, showLimitAlert } = useAppSelector(
(state) => state.targetsPage
);
const { collapsedPools, showLimitAlert } = useAppSelector(
(state) => state.targetsPage
);
const [debouncedSearch] = useDebouncedValue<string>(searchFilter.trim(), 250);
const allPools = useMemo(
() =>
buildPoolsData(
selectedPool ? [selectedPool] : poolNames,
activeTargets,
searchFilter,
healthFilter
),
[selectedPool, poolNames, activeTargets, searchFilter, healthFilter]
);
const allPools = useMemo(
() =>
buildPoolsData(
selectedPool ? [selectedPool] : poolNames,
activeTargets,
debouncedSearch,
healthFilter
),
[selectedPool, poolNames, activeTargets, debouncedSearch, healthFilter]
);
const allPoolNames = Object.keys(allPools);
const shownPoolNames = showEmptyPools
? allPoolNames
: allPoolNames.filter((pn) => allPools[pn].targets.length !== 0);
const allPoolNames = Object.keys(allPools);
const shownPoolNames = showEmptyPools
? allPoolNames
: allPoolNames.filter((pn) => allPools[pn].targets.length !== 0);
return (
<Stack>
{allPoolNames.length === 0 ? (
<Alert title="No scrape pools found" icon={<IconInfoCircle />}>
No scrape pools found.
</Alert>
) : (
!showEmptyPools &&
allPoolNames.length !== shownPoolNames.length && (
<Alert
title="Hiding pools with no matching targets"
icon={<IconInfoCircle />}
>
Hiding {allPoolNames.length - shownPoolNames.length} empty pools due
to filters or no targets.
<Anchor ml="md" fz="1em" onClick={() => setShowEmptyPools(true)}>
Show empty pools
</Anchor>
return (
<Stack>
{allPoolNames.length === 0 ? (
<Alert title="No scrape pools found" icon={<IconInfoCircle />}>
No scrape pools found.
</Alert>
)
)}
{showLimitAlert && (
<Alert
title="Found many pools, showing only one"
icon={<IconInfoCircle />}
withCloseButton
onClose={() => dispatch(setShowLimitAlert(false))}
>
There are more than {targetPoolDisplayLimit} scrape pools. Showing
only the first one. Use the dropdown to select a different pool.
</Alert>
)}
<Accordion
multiple
variant="separated"
value={allPoolNames.filter((p) => !collapsedPools.includes(p))}
onChange={(value) =>
dispatch(
setCollapsedPools(allPoolNames.filter((p) => !value.includes(p)))
)
}
>
{shownPoolNames.map((poolName) => {
const pool = allPools[poolName];
return (
<Accordion.Item
key={poolName}
value={poolName}
className={poolPanelHealthClass(pool)}
) : (
!showEmptyPools &&
allPoolNames.length !== shownPoolNames.length && (
<Alert
title="Hiding pools with no matching targets"
icon={<IconInfoCircle />}
>
<Accordion.Control>
<Group wrap="nowrap" justify="space-between" mr="lg">
<Text>{poolName}</Text>
<Group gap="xs">
<Text c="gray.6">
{pool.upCount} / {pool.count} up
</Text>
<RingProgress
size={25}
thickness={5}
sections={
pool.count === 0
? []
: [
{
value: (pool.upCount / pool.count) * 100,
color: "green.4",
},
{
value: (pool.unknownCount / pool.count) * 100,
color: "gray.4",
},
{
value: (pool.downCount / pool.count) * 100,
color: "red.5",
},
]
}
/>
Hiding {allPoolNames.length - shownPoolNames.length} empty pools
due to filters or no targets.
<Anchor ml="md" fz="1em" onClick={() => setShowEmptyPools(true)}>
Show empty pools
</Anchor>
</Alert>
)
)}
{showLimitAlert && (
<Alert
title="Found many pools, showing only one"
icon={<IconInfoCircle />}
withCloseButton
onClose={() => dispatch(setShowLimitAlert(false))}
>
There are more than {targetPoolDisplayLimit} scrape pools. Showing
only the first one. Use the dropdown to select a different pool.
</Alert>
)}
<Accordion
multiple
variant="separated"
value={allPoolNames.filter((p) => !collapsedPools.includes(p))}
onChange={(value) =>
dispatch(
setCollapsedPools(allPoolNames.filter((p) => !value.includes(p)))
)
}
>
{shownPoolNames.map((poolName) => {
const pool = allPools[poolName];
return (
<Accordion.Item
key={poolName}
value={poolName}
className={poolPanelHealthClass(pool)}
>
<Accordion.Control>
<Group wrap="nowrap" justify="space-between" mr="lg">
<Text>{poolName}</Text>
<Group gap="xs">
<Text c="gray.6">
{pool.upCount} / {pool.count} up
</Text>
<RingProgress
size={25}
thickness={5}
sections={
pool.count === 0
? []
: [
{
value: (pool.upCount / pool.count) * 100,
color: "green.4",
},
{
value: (pool.unknownCount / pool.count) * 100,
color: "gray.4",
},
{
value: (pool.downCount / pool.count) * 100,
color: "red.5",
},
]
}
/>
</Group>
</Group>
</Group>
</Accordion.Control>
<Accordion.Panel>
{pool.count === 0 ? (
<Alert title="No targets" icon={<IconInfoCircle />}>
No active targets in this scrape pool.
<Anchor
ml="md"
fz="1em"
onClick={() => setShowEmptyPools(false)}
</Accordion.Control>
<Accordion.Panel>
{pool.count === 0 ? (
<Alert title="No targets" icon={<IconInfoCircle />}>
No active targets in this scrape pool.
<Anchor
ml="md"
fz="1em"
onClick={() => setShowEmptyPools(false)}
>
Hide empty pools
</Anchor>
</Alert>
) : pool.targets.length === 0 ? (
<Alert
title="No matching targets"
icon={<IconInfoCircle />}
>
Hide empty pools
</Anchor>
</Alert>
) : pool.targets.length === 0 ? (
<Alert title="No matching targets" icon={<IconInfoCircle />}>
No targets in this pool match your filter criteria (omitted{" "}
{pool.count} filtered targets).
<Anchor
ml="md"
fz="1em"
onClick={() => setShowEmptyPools(false)}
>
Hide empty pools
</Anchor>
</Alert>
) : (
<CustomInfiniteScroll
allItems={pool.targets}
child={({ items }) => (
<Table>
<Table.Thead>
<Table.Tr>
<Table.Th w="25%">Endpoint</Table.Th>
<Table.Th>Labels</Table.Th>
<Table.Th w={230}>Last scrape</Table.Th>
<Table.Th w={100}>State</Table.Th>
</Table.Tr>
</Table.Thead>
<Table.Tbody>
{items.map((target, i) => (
// TODO: Find a stable and definitely unique key.
<React.Fragment key={i}>
<Table.Tr
style={{
borderBottom: target.lastError
? "none"
: undefined,
}}
>
<Table.Td valign="top">
<EndpointLink
endpoint={target.scrapeUrl}
globalUrl={target.globalUrl}
/>
</Table.Td>
No targets in this pool match your filter criteria
(omitted {pool.count} filtered targets).
<Anchor
ml="md"
fz="1em"
onClick={() => setShowEmptyPools(false)}
>
Hide empty pools
</Anchor>
</Alert>
) : (
<CustomInfiniteScroll
allItems={pool.targets}
child={({ items }) => (
<Table>
<Table.Thead>
<Table.Tr>
<Table.Th w="25%">Endpoint</Table.Th>
<Table.Th>Labels</Table.Th>
<Table.Th w={230}>Last scrape</Table.Th>
<Table.Th w={100}>State</Table.Th>
</Table.Tr>
</Table.Thead>
<Table.Tbody>
{items.map((target, i) => (
// TODO: Find a stable and definitely unique key.
<React.Fragment key={i}>
<Table.Tr
style={{
borderBottom: target.lastError
? "none"
: undefined,
}}
>
<Table.Td valign="top">
<EndpointLink
endpoint={target.scrapeUrl}
globalUrl={target.globalUrl}
/>
</Table.Td>
<Table.Td valign="top">
<TargetLabels
labels={target.labels}
discoveredLabels={target.discoveredLabels}
/>
</Table.Td>
<Table.Td valign="top">
<Group gap="xs" wrap="wrap">
<Tooltip
label="Last target scrape"
withArrow
>
<Badge
variant="light"
className={badgeClasses.statsBadge}
styles={{
label: { textTransform: "none" },
}}
leftSection={
<IconRefresh style={badgeIconStyle} />
}
<Table.Td valign="top">
<TargetLabels
labels={target.labels}
discoveredLabels={target.discoveredLabels}
/>
</Table.Td>
<Table.Td valign="top">
<Group gap="xs" wrap="wrap">
<Tooltip
label="Last target scrape"
withArrow
>
{humanizeDurationRelative(
target.lastScrape,
now()
)}
</Badge>
</Tooltip>
<Badge
variant="light"
className={badgeClasses.statsBadge}
styles={{
label: { textTransform: "none" },
}}
leftSection={
<IconRefresh
style={badgeIconStyle}
/>
}
>
{humanizeDurationRelative(
target.lastScrape,
now()
)}
</Badge>
</Tooltip>
<Tooltip
label="Duration of last target scrape"
withArrow
>
<Badge
variant="light"
className={badgeClasses.statsBadge}
styles={{
label: { textTransform: "none" },
}}
leftSection={
<IconHourglass
style={badgeIconStyle}
/>
}
<Tooltip
label="Duration of last target scrape"
withArrow
>
{humanizeDuration(
target.lastScrapeDuration * 1000
)}
</Badge>
</Tooltip>
</Group>
</Table.Td>
<Table.Td valign="top">
<Badge
className={healthBadgeClass(target.health)}
>
{target.health}
</Badge>
</Table.Td>
</Table.Tr>
{target.lastError && (
<Table.Tr>
<Table.Td colSpan={5} valign="top">
<Alert
color="red"
mb="sm"
icon={<IconAlertTriangle />}
<Badge
variant="light"
className={badgeClasses.statsBadge}
styles={{
label: { textTransform: "none" },
}}
leftSection={
<IconHourglass
style={badgeIconStyle}
/>
}
>
{humanizeDuration(
target.lastScrapeDuration * 1000
)}
</Badge>
</Tooltip>
</Group>
</Table.Td>
<Table.Td valign="top">
<Badge
className={healthBadgeClass(
target.health
)}
>
<strong>Error scraping target:</strong>{" "}
{target.lastError}
</Alert>
{target.health}
</Badge>
</Table.Td>
</Table.Tr>
)}
</React.Fragment>
))}
</Table.Tbody>
</Table>
)}
/>
)}
</Accordion.Panel>
</Accordion.Item>
);
})}
</Accordion>
</Stack>
);
};
{target.lastError && (
<Table.Tr>
<Table.Td colSpan={5} valign="top">
<Alert
color="red"
mb="sm"
icon={<IconAlertTriangle />}
>
<strong>Error scraping target:</strong>{" "}
{target.lastError}
</Alert>
</Table.Td>
</Table.Tr>
)}
</React.Fragment>
))}
</Table.Tbody>
</Table>
)}
/>
)}
</Accordion.Panel>
</Accordion.Item>
);
})}
</Accordion>
</Stack>
);
}
);
export default ScrapePoolList;

View File

@ -30,9 +30,16 @@ import ScrapePoolList from "./ScrapePoolsList";
import { useSuspenseAPIQuery } from "../../api/api";
import { ScrapePoolsResult } from "../../api/responseTypes/scrapePools";
import { expandIconStyle, inputIconStyle } from "../../styles";
import { useDebouncedValue } from "@mantine/hooks";
export const targetPoolDisplayLimit = 20;
// Should be defined as a constant here instead of inline as a value
// to avoid unnecessary re-renders. Otherwise the empty array has
// a different reference on each render and causes subsequent memoized
// computations to re-run as long as no state filter is selected.
const emptyHealthFilter: string[] = [];
export default function TargetsPage() {
// Load the list of all available scrape pools.
const {
@ -48,12 +55,13 @@ export default function TargetsPage() {
const [scrapePool, setScrapePool] = useQueryParam("pool", StringParam);
const [healthFilter, setHealthFilter] = useQueryParam(
"health",
withDefault(ArrayParam, [])
withDefault(ArrayParam, emptyHealthFilter)
);
const [searchFilter, setSearchFilter] = useQueryParam(
"search",
withDefault(StringParam, "")
);
const [debouncedSearch] = useDebouncedValue<string>(searchFilter.trim(), 250);
const { collapsedPools, showLimitAlert } = useAppSelector(
(state) => state.targetsPage
@ -147,7 +155,7 @@ export default function TargetsPage() {
poolNames={scrapePools}
selectedPool={(limited && scrapePools[0]) || scrapePool || null}
healthFilter={healthFilter as string[]}
searchFilter={searchFilter}
searchFilter={debouncedSearch}
/>
</Suspense>
</ErrorBoundary>