mirror of
https://github.com/prometheus/prometheus.git
synced 2025-08-06 22:27:17 +02:00
Merge branch 'prometheus:main' into patch-tracing
This commit is contained in:
commit
4d5add3cc3
@ -30,6 +30,7 @@ import (
|
|||||||
goregexp "regexp" //nolint:depguard // The Prometheus client library requires us to pass a regexp from this package.
|
goregexp "regexp" //nolint:depguard // The Prometheus client library requires us to pass a regexp from this package.
|
||||||
"runtime"
|
"runtime"
|
||||||
"runtime/debug"
|
"runtime/debug"
|
||||||
|
"slices"
|
||||||
"strconv"
|
"strconv"
|
||||||
"strings"
|
"strings"
|
||||||
"sync"
|
"sync"
|
||||||
@ -1921,10 +1922,8 @@ func (p *rwProtoMsgFlagParser) Set(opt string) error {
|
|||||||
if err := t.Validate(); err != nil {
|
if err := t.Validate(); err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
for _, prev := range *p.msgs {
|
if slices.Contains(*p.msgs, t) {
|
||||||
if prev == t {
|
return fmt.Errorf("duplicated %v flag value, got %v already", t, *p.msgs)
|
||||||
return fmt.Errorf("duplicated %v flag value, got %v already", t, *p.msgs)
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
*p.msgs = append(*p.msgs, t)
|
*p.msgs = append(*p.msgs, t)
|
||||||
return nil
|
return nil
|
||||||
|
@ -21,6 +21,7 @@ import (
|
|||||||
"net/url"
|
"net/url"
|
||||||
"os"
|
"os"
|
||||||
"path/filepath"
|
"path/filepath"
|
||||||
|
"slices"
|
||||||
"sort"
|
"sort"
|
||||||
"strconv"
|
"strconv"
|
||||||
"strings"
|
"strings"
|
||||||
@ -1109,13 +1110,11 @@ func (v *AlertmanagerAPIVersion) UnmarshalYAML(unmarshal func(interface{}) error
|
|||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
for _, supportedVersion := range SupportedAlertmanagerAPIVersions {
|
if !slices.Contains(SupportedAlertmanagerAPIVersions, *v) {
|
||||||
if *v == supportedVersion {
|
return fmt.Errorf("expected Alertmanager api version to be one of %v but got %v", SupportedAlertmanagerAPIVersions, *v)
|
||||||
return nil
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
return fmt.Errorf("expected Alertmanager api version to be one of %v but got %v", SupportedAlertmanagerAPIVersions, *v)
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
const (
|
const (
|
||||||
|
@ -20,6 +20,7 @@ import (
|
|||||||
"log/slog"
|
"log/slog"
|
||||||
"os"
|
"os"
|
||||||
"reflect"
|
"reflect"
|
||||||
|
"slices"
|
||||||
"strings"
|
"strings"
|
||||||
"sync"
|
"sync"
|
||||||
"time"
|
"time"
|
||||||
@ -210,18 +211,9 @@ func (c *SDConfig) UnmarshalYAML(unmarshal func(interface{}) error) error {
|
|||||||
if _, ok := allowedSelectors[c.Role]; !ok {
|
if _, ok := allowedSelectors[c.Role]; !ok {
|
||||||
return fmt.Errorf("invalid role: %q, expecting one of: pod, service, endpoints, endpointslice, node or ingress", c.Role)
|
return fmt.Errorf("invalid role: %q, expecting one of: pod, service, endpoints, endpointslice, node or ingress", c.Role)
|
||||||
}
|
}
|
||||||
var allowed bool
|
if !slices.Contains(allowedSelectors[c.Role], string(selector.Role)) {
|
||||||
for _, role := range allowedSelectors[c.Role] {
|
|
||||||
if role == string(selector.Role) {
|
|
||||||
allowed = true
|
|
||||||
break
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if !allowed {
|
|
||||||
return fmt.Errorf("%s role supports only %s selectors", c.Role, strings.Join(allowedSelectors[c.Role], ", "))
|
return fmt.Errorf("%s role supports only %s selectors", c.Role, strings.Join(allowedSelectors[c.Role], ", "))
|
||||||
}
|
}
|
||||||
|
|
||||||
_, err := fields.ParseSelector(selector.Field)
|
_, err := fields.ParseSelector(selector.Field)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
|
@ -167,10 +167,8 @@ func (b *Builder) Del(ns ...string) *Builder {
|
|||||||
// Keep removes all labels from the base except those with the given names.
|
// Keep removes all labels from the base except those with the given names.
|
||||||
func (b *Builder) Keep(ns ...string) *Builder {
|
func (b *Builder) Keep(ns ...string) *Builder {
|
||||||
b.base.Range(func(l Label) {
|
b.base.Range(func(l Label) {
|
||||||
for _, n := range ns {
|
if slices.Contains(ns, l.Name) {
|
||||||
if l.Name == n {
|
return
|
||||||
return
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
b.del = append(b.del, l.Name)
|
b.del = append(b.del, l.Name)
|
||||||
})
|
})
|
||||||
|
@ -95,12 +95,7 @@ func (m *FastRegexMatcher) compileMatchStringFunction() func(string) bool {
|
|||||||
|
|
||||||
return func(s string) bool {
|
return func(s string) bool {
|
||||||
if len(m.setMatches) != 0 {
|
if len(m.setMatches) != 0 {
|
||||||
for _, match := range m.setMatches {
|
return slices.Contains(m.setMatches, s)
|
||||||
if match == s {
|
|
||||||
return true
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return false
|
|
||||||
}
|
}
|
||||||
if m.prefix != "" && !strings.HasPrefix(s, m.prefix) {
|
if m.prefix != "" && !strings.HasPrefix(s, m.prefix) {
|
||||||
return false
|
return false
|
||||||
@ -771,16 +766,11 @@ func (m *equalMultiStringSliceMatcher) setMatches() []string {
|
|||||||
|
|
||||||
func (m *equalMultiStringSliceMatcher) Matches(s string) bool {
|
func (m *equalMultiStringSliceMatcher) Matches(s string) bool {
|
||||||
if m.caseSensitive {
|
if m.caseSensitive {
|
||||||
for _, v := range m.values {
|
return slices.Contains(m.values, s)
|
||||||
if s == v {
|
}
|
||||||
return true
|
for _, v := range m.values {
|
||||||
}
|
if strings.EqualFold(s, v) {
|
||||||
}
|
return true
|
||||||
} else {
|
|
||||||
for _, v := range m.values {
|
|
||||||
if strings.EqualFold(s, v) {
|
|
||||||
return true
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return false
|
return false
|
||||||
|
131
promql/engine.go
131
promql/engine.go
@ -1377,7 +1377,7 @@ func (ev *evaluator) rangeEval(ctx context.Context, prepSeries func(labels.Label
|
|||||||
return mat, warnings
|
return mat, warnings
|
||||||
}
|
}
|
||||||
|
|
||||||
func (ev *evaluator) rangeEvalAgg(ctx context.Context, aggExpr *parser.AggregateExpr, sortedGrouping []string, inputMatrix Matrix, param float64) (Matrix, annotations.Annotations) {
|
func (ev *evaluator) rangeEvalAgg(ctx context.Context, aggExpr *parser.AggregateExpr, sortedGrouping []string, inputMatrix Matrix, params *fParams) (Matrix, annotations.Annotations) {
|
||||||
// Keep a copy of the original point slice so that it can be returned to the pool.
|
// Keep a copy of the original point slice so that it can be returned to the pool.
|
||||||
origMatrix := slices.Clone(inputMatrix)
|
origMatrix := slices.Clone(inputMatrix)
|
||||||
defer func() {
|
defer func() {
|
||||||
@ -1387,7 +1387,7 @@ func (ev *evaluator) rangeEvalAgg(ctx context.Context, aggExpr *parser.Aggregate
|
|||||||
}
|
}
|
||||||
}()
|
}()
|
||||||
|
|
||||||
var warnings annotations.Annotations
|
var annos annotations.Annotations
|
||||||
|
|
||||||
enh := &EvalNodeHelper{enableDelayedNameRemoval: ev.enableDelayedNameRemoval}
|
enh := &EvalNodeHelper{enableDelayedNameRemoval: ev.enableDelayedNameRemoval}
|
||||||
tempNumSamples := ev.currentSamples
|
tempNumSamples := ev.currentSamples
|
||||||
@ -1417,46 +1417,43 @@ func (ev *evaluator) rangeEvalAgg(ctx context.Context, aggExpr *parser.Aggregate
|
|||||||
}
|
}
|
||||||
groups := make([]groupedAggregation, groupCount)
|
groups := make([]groupedAggregation, groupCount)
|
||||||
|
|
||||||
var k int64
|
|
||||||
var ratio float64
|
|
||||||
var seriess map[uint64]Series
|
var seriess map[uint64]Series
|
||||||
|
|
||||||
switch aggExpr.Op {
|
switch aggExpr.Op {
|
||||||
case parser.TOPK, parser.BOTTOMK, parser.LIMITK:
|
case parser.TOPK, parser.BOTTOMK, parser.LIMITK:
|
||||||
if !convertibleToInt64(param) {
|
// Return early if all k values are less than one.
|
||||||
ev.errorf("Scalar value %v overflows int64", param)
|
if params.Max() < 1 {
|
||||||
|
return nil, annos
|
||||||
}
|
}
|
||||||
k = int64(param)
|
seriess = make(map[uint64]Series, len(inputMatrix))
|
||||||
if k > int64(len(inputMatrix)) {
|
|
||||||
k = int64(len(inputMatrix))
|
|
||||||
}
|
|
||||||
if k < 1 {
|
|
||||||
return nil, warnings
|
|
||||||
}
|
|
||||||
seriess = make(map[uint64]Series, len(inputMatrix)) // Output series by series hash.
|
|
||||||
case parser.LIMIT_RATIO:
|
case parser.LIMIT_RATIO:
|
||||||
if math.IsNaN(param) {
|
// Return early if all r values are zero.
|
||||||
ev.errorf("Ratio value %v is NaN", param)
|
if params.Max() == 0 && params.Min() == 0 {
|
||||||
|
return nil, annos
|
||||||
}
|
}
|
||||||
switch {
|
if params.Max() > 1.0 {
|
||||||
case param == 0:
|
annos.Add(annotations.NewInvalidRatioWarning(params.Max(), 1.0, aggExpr.Param.PositionRange()))
|
||||||
return nil, warnings
|
|
||||||
case param < -1.0:
|
|
||||||
ratio = -1.0
|
|
||||||
warnings.Add(annotations.NewInvalidRatioWarning(param, ratio, aggExpr.Param.PositionRange()))
|
|
||||||
case param > 1.0:
|
|
||||||
ratio = 1.0
|
|
||||||
warnings.Add(annotations.NewInvalidRatioWarning(param, ratio, aggExpr.Param.PositionRange()))
|
|
||||||
default:
|
|
||||||
ratio = param
|
|
||||||
}
|
}
|
||||||
seriess = make(map[uint64]Series, len(inputMatrix)) // Output series by series hash.
|
if params.Min() < -1.0 {
|
||||||
|
annos.Add(annotations.NewInvalidRatioWarning(params.Min(), -1.0, aggExpr.Param.PositionRange()))
|
||||||
|
}
|
||||||
|
seriess = make(map[uint64]Series, len(inputMatrix))
|
||||||
|
|
||||||
case parser.QUANTILE:
|
case parser.QUANTILE:
|
||||||
if math.IsNaN(param) || param < 0 || param > 1 {
|
if params.HasAnyNaN() {
|
||||||
warnings.Add(annotations.NewInvalidQuantileWarning(param, aggExpr.Param.PositionRange()))
|
annos.Add(annotations.NewInvalidQuantileWarning(math.NaN(), aggExpr.Param.PositionRange()))
|
||||||
|
}
|
||||||
|
if params.Max() > 1 {
|
||||||
|
annos.Add(annotations.NewInvalidQuantileWarning(params.Max(), aggExpr.Param.PositionRange()))
|
||||||
|
}
|
||||||
|
if params.Min() < 0 {
|
||||||
|
annos.Add(annotations.NewInvalidQuantileWarning(params.Min(), aggExpr.Param.PositionRange()))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
for ts := ev.startTimestamp; ts <= ev.endTimestamp; ts += ev.interval {
|
for ts := ev.startTimestamp; ts <= ev.endTimestamp; ts += ev.interval {
|
||||||
|
fParam := params.Next()
|
||||||
if err := contextDone(ctx, "expression evaluation"); err != nil {
|
if err := contextDone(ctx, "expression evaluation"); err != nil {
|
||||||
ev.error(err)
|
ev.error(err)
|
||||||
}
|
}
|
||||||
@ -1468,17 +1465,17 @@ func (ev *evaluator) rangeEvalAgg(ctx context.Context, aggExpr *parser.Aggregate
|
|||||||
var ws annotations.Annotations
|
var ws annotations.Annotations
|
||||||
switch aggExpr.Op {
|
switch aggExpr.Op {
|
||||||
case parser.TOPK, parser.BOTTOMK, parser.LIMITK, parser.LIMIT_RATIO:
|
case parser.TOPK, parser.BOTTOMK, parser.LIMITK, parser.LIMIT_RATIO:
|
||||||
result, ws = ev.aggregationK(aggExpr, k, ratio, inputMatrix, seriesToResult, groups, enh, seriess)
|
result, ws = ev.aggregationK(aggExpr, fParam, inputMatrix, seriesToResult, groups, enh, seriess)
|
||||||
// If this could be an instant query, shortcut so as not to change sort order.
|
// If this could be an instant query, shortcut so as not to change sort order.
|
||||||
if ev.endTimestamp == ev.startTimestamp {
|
if ev.startTimestamp == ev.endTimestamp {
|
||||||
warnings.Merge(ws)
|
annos.Merge(ws)
|
||||||
return result, warnings
|
return result, annos
|
||||||
}
|
}
|
||||||
default:
|
default:
|
||||||
ws = ev.aggregation(aggExpr, param, inputMatrix, result, seriesToResult, groups, enh)
|
ws = ev.aggregation(aggExpr, fParam, inputMatrix, result, seriesToResult, groups, enh)
|
||||||
}
|
}
|
||||||
|
|
||||||
warnings.Merge(ws)
|
annos.Merge(ws)
|
||||||
|
|
||||||
if ev.currentSamples > ev.maxSamples {
|
if ev.currentSamples > ev.maxSamples {
|
||||||
ev.error(ErrTooManySamples(env))
|
ev.error(ErrTooManySamples(env))
|
||||||
@ -1503,7 +1500,7 @@ func (ev *evaluator) rangeEvalAgg(ctx context.Context, aggExpr *parser.Aggregate
|
|||||||
}
|
}
|
||||||
result = result[:dst]
|
result = result[:dst]
|
||||||
}
|
}
|
||||||
return result, warnings
|
return result, annos
|
||||||
}
|
}
|
||||||
|
|
||||||
// evalSeries generates a Matrix between ev.startTimestamp and ev.endTimestamp (inclusive), each point spaced ev.interval apart, from series given offset.
|
// evalSeries generates a Matrix between ev.startTimestamp and ev.endTimestamp (inclusive), each point spaced ev.interval apart, from series given offset.
|
||||||
@ -1681,18 +1678,14 @@ func (ev *evaluator) eval(ctx context.Context, expr parser.Expr) (parser.Value,
|
|||||||
var warnings annotations.Annotations
|
var warnings annotations.Annotations
|
||||||
originalNumSamples := ev.currentSamples
|
originalNumSamples := ev.currentSamples
|
||||||
// param is the number k for topk/bottomk, or q for quantile.
|
// param is the number k for topk/bottomk, or q for quantile.
|
||||||
var fParam float64
|
fp, ws := newFParams(ctx, ev, param)
|
||||||
if param != nil {
|
warnings.Merge(ws)
|
||||||
val, ws := ev.eval(ctx, param)
|
|
||||||
warnings.Merge(ws)
|
|
||||||
fParam = val.(Matrix)[0].Floats[0].F
|
|
||||||
}
|
|
||||||
// Now fetch the data to be aggregated.
|
// Now fetch the data to be aggregated.
|
||||||
val, ws := ev.eval(ctx, e.Expr)
|
val, ws := ev.eval(ctx, e.Expr)
|
||||||
warnings.Merge(ws)
|
warnings.Merge(ws)
|
||||||
inputMatrix := val.(Matrix)
|
inputMatrix := val.(Matrix)
|
||||||
|
|
||||||
result, ws := ev.rangeEvalAgg(ctx, e, sortedGrouping, inputMatrix, fParam)
|
result, ws := ev.rangeEvalAgg(ctx, e, sortedGrouping, inputMatrix, fp)
|
||||||
warnings.Merge(ws)
|
warnings.Merge(ws)
|
||||||
ev.currentSamples = originalNumSamples + result.TotalSamples()
|
ev.currentSamples = originalNumSamples + result.TotalSamples()
|
||||||
ev.samplesStats.UpdatePeak(ev.currentSamples)
|
ev.samplesStats.UpdatePeak(ev.currentSamples)
|
||||||
@ -3269,7 +3262,7 @@ func (ev *evaluator) aggregation(e *parser.AggregateExpr, q float64, inputMatrix
|
|||||||
// seriesToResult maps inputMatrix indexes to groups indexes.
|
// seriesToResult maps inputMatrix indexes to groups indexes.
|
||||||
// For an instant query, returns a Matrix in descending order for topk or ascending for bottomk, or without any order for limitk / limit_ratio.
|
// For an instant query, returns a Matrix in descending order for topk or ascending for bottomk, or without any order for limitk / limit_ratio.
|
||||||
// For a range query, aggregates output in the seriess map.
|
// For a range query, aggregates output in the seriess map.
|
||||||
func (ev *evaluator) aggregationK(e *parser.AggregateExpr, k int64, r float64, inputMatrix Matrix, seriesToResult []int, groups []groupedAggregation, enh *EvalNodeHelper, seriess map[uint64]Series) (Matrix, annotations.Annotations) {
|
func (ev *evaluator) aggregationK(e *parser.AggregateExpr, fParam float64, inputMatrix Matrix, seriesToResult []int, groups []groupedAggregation, enh *EvalNodeHelper, seriess map[uint64]Series) (Matrix, annotations.Annotations) {
|
||||||
op := e.Op
|
op := e.Op
|
||||||
var s Sample
|
var s Sample
|
||||||
var annos annotations.Annotations
|
var annos annotations.Annotations
|
||||||
@ -3278,6 +3271,14 @@ func (ev *evaluator) aggregationK(e *parser.AggregateExpr, k int64, r float64, i
|
|||||||
for i := range groups {
|
for i := range groups {
|
||||||
groups[i].seen = false
|
groups[i].seen = false
|
||||||
}
|
}
|
||||||
|
// advanceRemainingSeries discards any values at the current timestamp `ts`
|
||||||
|
// for the remaining input series. In range queries, if these values are not
|
||||||
|
// consumed now, they will no longer be accessible in the next evaluation step.
|
||||||
|
advanceRemainingSeries := func(ts int64, startIdx int) {
|
||||||
|
for i := startIdx; i < len(inputMatrix); i++ {
|
||||||
|
_, _, _ = ev.nextValues(ts, &inputMatrix[i])
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
seriesLoop:
|
seriesLoop:
|
||||||
for si := range inputMatrix {
|
for si := range inputMatrix {
|
||||||
@ -3287,6 +3288,42 @@ seriesLoop:
|
|||||||
}
|
}
|
||||||
s = Sample{Metric: inputMatrix[si].Metric, F: f, H: h, DropName: inputMatrix[si].DropName}
|
s = Sample{Metric: inputMatrix[si].Metric, F: f, H: h, DropName: inputMatrix[si].DropName}
|
||||||
|
|
||||||
|
var k int64
|
||||||
|
var r float64
|
||||||
|
switch op {
|
||||||
|
case parser.TOPK, parser.BOTTOMK, parser.LIMITK:
|
||||||
|
if !convertibleToInt64(fParam) {
|
||||||
|
ev.errorf("Scalar value %v overflows int64", fParam)
|
||||||
|
}
|
||||||
|
k = int64(fParam)
|
||||||
|
if k > int64(len(inputMatrix)) {
|
||||||
|
k = int64(len(inputMatrix))
|
||||||
|
}
|
||||||
|
if k < 1 {
|
||||||
|
if enh.Ts != ev.endTimestamp {
|
||||||
|
advanceRemainingSeries(enh.Ts, si+1)
|
||||||
|
}
|
||||||
|
return nil, annos
|
||||||
|
}
|
||||||
|
case parser.LIMIT_RATIO:
|
||||||
|
if math.IsNaN(fParam) {
|
||||||
|
ev.errorf("Ratio value %v is NaN", fParam)
|
||||||
|
}
|
||||||
|
switch {
|
||||||
|
case fParam == 0:
|
||||||
|
if enh.Ts != ev.endTimestamp {
|
||||||
|
advanceRemainingSeries(enh.Ts, si+1)
|
||||||
|
}
|
||||||
|
return nil, annos
|
||||||
|
case fParam < -1.0:
|
||||||
|
r = -1.0
|
||||||
|
case fParam > 1.0:
|
||||||
|
r = 1.0
|
||||||
|
default:
|
||||||
|
r = fParam
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
group := &groups[seriesToResult[si]]
|
group := &groups[seriesToResult[si]]
|
||||||
// Initialize this group if it's the first time we've seen it.
|
// Initialize this group if it's the first time we've seen it.
|
||||||
if !group.seen {
|
if !group.seen {
|
||||||
@ -3377,6 +3414,10 @@ seriesLoop:
|
|||||||
group.groupAggrComplete = true
|
group.groupAggrComplete = true
|
||||||
groupsRemaining--
|
groupsRemaining--
|
||||||
if groupsRemaining == 0 {
|
if groupsRemaining == 0 {
|
||||||
|
// Process other values in the series before breaking the loop in case of range query.
|
||||||
|
if enh.Ts != ev.endTimestamp {
|
||||||
|
advanceRemainingSeries(enh.Ts, si+1)
|
||||||
|
}
|
||||||
break seriesLoop
|
break seriesLoop
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -612,7 +612,6 @@ func funcClampMin(vals []parser.Value, _ parser.Expressions, enh *EvalNodeHelper
|
|||||||
|
|
||||||
// === round(Vector parser.ValueTypeVector, toNearest=1 Scalar) (Vector, Annotations) ===
|
// === round(Vector parser.ValueTypeVector, toNearest=1 Scalar) (Vector, Annotations) ===
|
||||||
func funcRound(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) (Vector, annotations.Annotations) {
|
func funcRound(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) (Vector, annotations.Annotations) {
|
||||||
vec := vals[0].(Vector)
|
|
||||||
// round returns a number rounded to toNearest.
|
// round returns a number rounded to toNearest.
|
||||||
// Ties are solved by rounding up.
|
// Ties are solved by rounding up.
|
||||||
toNearest := float64(1)
|
toNearest := float64(1)
|
||||||
@ -621,23 +620,9 @@ func funcRound(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper
|
|||||||
}
|
}
|
||||||
// Invert as it seems to cause fewer floating point accuracy issues.
|
// Invert as it seems to cause fewer floating point accuracy issues.
|
||||||
toNearestInverse := 1.0 / toNearest
|
toNearestInverse := 1.0 / toNearest
|
||||||
|
return simpleFloatFunc(vals, enh, func(f float64) float64 {
|
||||||
for _, el := range vec {
|
return math.Floor(f*toNearestInverse+0.5) / toNearestInverse
|
||||||
if el.H != nil {
|
}), nil
|
||||||
// Process only float samples.
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
f := math.Floor(el.F*toNearestInverse+0.5) / toNearestInverse
|
|
||||||
if !enh.enableDelayedNameRemoval {
|
|
||||||
el.Metric = el.Metric.DropMetricName()
|
|
||||||
}
|
|
||||||
enh.Out = append(enh.Out, Sample{
|
|
||||||
Metric: el.Metric,
|
|
||||||
F: f,
|
|
||||||
DropName: true,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
return enh.Out, nil
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// === Scalar(node parser.ValueTypeVector) Scalar ===
|
// === Scalar(node parser.ValueTypeVector) Scalar ===
|
||||||
@ -823,8 +808,8 @@ func funcMadOverTime(vals []parser.Value, args parser.Expressions, enh *EvalNode
|
|||||||
}), annos
|
}), annos
|
||||||
}
|
}
|
||||||
|
|
||||||
// === max_over_time(Matrix parser.ValueTypeMatrix) (Vector, Annotations) ===
|
// compareOverTime is a helper used by funcMaxOverTime and funcMinOverTime.
|
||||||
func funcMaxOverTime(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) (Vector, annotations.Annotations) {
|
func compareOverTime(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper, compareFn func(float64, float64) bool) (Vector, annotations.Annotations) {
|
||||||
samples := vals[0].(Matrix)[0]
|
samples := vals[0].(Matrix)[0]
|
||||||
var annos annotations.Annotations
|
var annos annotations.Annotations
|
||||||
if len(samples.Floats) == 0 {
|
if len(samples.Floats) == 0 {
|
||||||
@ -837,7 +822,7 @@ func funcMaxOverTime(vals []parser.Value, args parser.Expressions, enh *EvalNode
|
|||||||
return aggrOverTime(vals, enh, func(s Series) float64 {
|
return aggrOverTime(vals, enh, func(s Series) float64 {
|
||||||
maxVal := s.Floats[0].F
|
maxVal := s.Floats[0].F
|
||||||
for _, f := range s.Floats {
|
for _, f := range s.Floats {
|
||||||
if f.F > maxVal || math.IsNaN(maxVal) {
|
if compareFn(f.F, maxVal) {
|
||||||
maxVal = f.F
|
maxVal = f.F
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -845,26 +830,18 @@ func funcMaxOverTime(vals []parser.Value, args parser.Expressions, enh *EvalNode
|
|||||||
}), annos
|
}), annos
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// === max_over_time(Matrix parser.ValueTypeMatrix) (Vector, Annotations) ===
|
||||||
|
func funcMaxOverTime(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) (Vector, annotations.Annotations) {
|
||||||
|
return compareOverTime(vals, args, enh, func(cur, maxVal float64) bool {
|
||||||
|
return (cur > maxVal) || math.IsNaN(maxVal)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
// === min_over_time(Matrix parser.ValueTypeMatrix) (Vector, Annotations) ===
|
// === min_over_time(Matrix parser.ValueTypeMatrix) (Vector, Annotations) ===
|
||||||
func funcMinOverTime(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) (Vector, annotations.Annotations) {
|
func funcMinOverTime(vals []parser.Value, args parser.Expressions, enh *EvalNodeHelper) (Vector, annotations.Annotations) {
|
||||||
samples := vals[0].(Matrix)[0]
|
return compareOverTime(vals, args, enh, func(cur, maxVal float64) bool {
|
||||||
var annos annotations.Annotations
|
return (cur < maxVal) || math.IsNaN(maxVal)
|
||||||
if len(samples.Floats) == 0 {
|
})
|
||||||
return enh.Out, nil
|
|
||||||
}
|
|
||||||
if len(samples.Histograms) > 0 {
|
|
||||||
metricName := samples.Metric.Get(labels.MetricName)
|
|
||||||
annos.Add(annotations.NewHistogramIgnoredInMixedRangeInfo(metricName, args[0].PositionRange()))
|
|
||||||
}
|
|
||||||
return aggrOverTime(vals, enh, func(s Series) float64 {
|
|
||||||
minVal := s.Floats[0].F
|
|
||||||
for _, f := range s.Floats {
|
|
||||||
if f.F < minVal || math.IsNaN(minVal) {
|
|
||||||
minVal = f.F
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return minVal
|
|
||||||
}), annos
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// === sum_over_time(Matrix parser.ValueTypeMatrix) (Vector, Annotations) ===
|
// === sum_over_time(Matrix parser.ValueTypeMatrix) (Vector, Annotations) ===
|
||||||
@ -997,7 +974,7 @@ func funcPresentOverTime(vals []parser.Value, _ parser.Expressions, enh *EvalNod
|
|||||||
}), nil
|
}), nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func simpleFunc(vals []parser.Value, enh *EvalNodeHelper, f func(float64) float64) Vector {
|
func simpleFloatFunc(vals []parser.Value, enh *EvalNodeHelper, f func(float64) float64) Vector {
|
||||||
for _, el := range vals[0].(Vector) {
|
for _, el := range vals[0].(Vector) {
|
||||||
if el.H == nil { // Process only float samples.
|
if el.H == nil { // Process only float samples.
|
||||||
if !enh.enableDelayedNameRemoval {
|
if !enh.enableDelayedNameRemoval {
|
||||||
@ -1015,114 +992,114 @@ func simpleFunc(vals []parser.Value, enh *EvalNodeHelper, f func(float64) float6
|
|||||||
|
|
||||||
// === abs(Vector parser.ValueTypeVector) (Vector, Annotations) ===
|
// === abs(Vector parser.ValueTypeVector) (Vector, Annotations) ===
|
||||||
func funcAbs(vals []parser.Value, _ parser.Expressions, enh *EvalNodeHelper) (Vector, annotations.Annotations) {
|
func funcAbs(vals []parser.Value, _ parser.Expressions, enh *EvalNodeHelper) (Vector, annotations.Annotations) {
|
||||||
return simpleFunc(vals, enh, math.Abs), nil
|
return simpleFloatFunc(vals, enh, math.Abs), nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// === ceil(Vector parser.ValueTypeVector) (Vector, Annotations) ===
|
// === ceil(Vector parser.ValueTypeVector) (Vector, Annotations) ===
|
||||||
func funcCeil(vals []parser.Value, _ parser.Expressions, enh *EvalNodeHelper) (Vector, annotations.Annotations) {
|
func funcCeil(vals []parser.Value, _ parser.Expressions, enh *EvalNodeHelper) (Vector, annotations.Annotations) {
|
||||||
return simpleFunc(vals, enh, math.Ceil), nil
|
return simpleFloatFunc(vals, enh, math.Ceil), nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// === floor(Vector parser.ValueTypeVector) (Vector, Annotations) ===
|
// === floor(Vector parser.ValueTypeVector) (Vector, Annotations) ===
|
||||||
func funcFloor(vals []parser.Value, _ parser.Expressions, enh *EvalNodeHelper) (Vector, annotations.Annotations) {
|
func funcFloor(vals []parser.Value, _ parser.Expressions, enh *EvalNodeHelper) (Vector, annotations.Annotations) {
|
||||||
return simpleFunc(vals, enh, math.Floor), nil
|
return simpleFloatFunc(vals, enh, math.Floor), nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// === exp(Vector parser.ValueTypeVector) (Vector, Annotations) ===
|
// === exp(Vector parser.ValueTypeVector) (Vector, Annotations) ===
|
||||||
func funcExp(vals []parser.Value, _ parser.Expressions, enh *EvalNodeHelper) (Vector, annotations.Annotations) {
|
func funcExp(vals []parser.Value, _ parser.Expressions, enh *EvalNodeHelper) (Vector, annotations.Annotations) {
|
||||||
return simpleFunc(vals, enh, math.Exp), nil
|
return simpleFloatFunc(vals, enh, math.Exp), nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// === sqrt(Vector VectorNode) (Vector, Annotations) ===
|
// === sqrt(Vector VectorNode) (Vector, Annotations) ===
|
||||||
func funcSqrt(vals []parser.Value, _ parser.Expressions, enh *EvalNodeHelper) (Vector, annotations.Annotations) {
|
func funcSqrt(vals []parser.Value, _ parser.Expressions, enh *EvalNodeHelper) (Vector, annotations.Annotations) {
|
||||||
return simpleFunc(vals, enh, math.Sqrt), nil
|
return simpleFloatFunc(vals, enh, math.Sqrt), nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// === ln(Vector parser.ValueTypeVector) (Vector, Annotations) ===
|
// === ln(Vector parser.ValueTypeVector) (Vector, Annotations) ===
|
||||||
func funcLn(vals []parser.Value, _ parser.Expressions, enh *EvalNodeHelper) (Vector, annotations.Annotations) {
|
func funcLn(vals []parser.Value, _ parser.Expressions, enh *EvalNodeHelper) (Vector, annotations.Annotations) {
|
||||||
return simpleFunc(vals, enh, math.Log), nil
|
return simpleFloatFunc(vals, enh, math.Log), nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// === log2(Vector parser.ValueTypeVector) (Vector, Annotations) ===
|
// === log2(Vector parser.ValueTypeVector) (Vector, Annotations) ===
|
||||||
func funcLog2(vals []parser.Value, _ parser.Expressions, enh *EvalNodeHelper) (Vector, annotations.Annotations) {
|
func funcLog2(vals []parser.Value, _ parser.Expressions, enh *EvalNodeHelper) (Vector, annotations.Annotations) {
|
||||||
return simpleFunc(vals, enh, math.Log2), nil
|
return simpleFloatFunc(vals, enh, math.Log2), nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// === log10(Vector parser.ValueTypeVector) (Vector, Annotations) ===
|
// === log10(Vector parser.ValueTypeVector) (Vector, Annotations) ===
|
||||||
func funcLog10(vals []parser.Value, _ parser.Expressions, enh *EvalNodeHelper) (Vector, annotations.Annotations) {
|
func funcLog10(vals []parser.Value, _ parser.Expressions, enh *EvalNodeHelper) (Vector, annotations.Annotations) {
|
||||||
return simpleFunc(vals, enh, math.Log10), nil
|
return simpleFloatFunc(vals, enh, math.Log10), nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// === sin(Vector parser.ValueTypeVector) (Vector, Annotations) ===
|
// === sin(Vector parser.ValueTypeVector) (Vector, Annotations) ===
|
||||||
func funcSin(vals []parser.Value, _ parser.Expressions, enh *EvalNodeHelper) (Vector, annotations.Annotations) {
|
func funcSin(vals []parser.Value, _ parser.Expressions, enh *EvalNodeHelper) (Vector, annotations.Annotations) {
|
||||||
return simpleFunc(vals, enh, math.Sin), nil
|
return simpleFloatFunc(vals, enh, math.Sin), nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// === cos(Vector parser.ValueTypeVector) (Vector, Annotations) ===
|
// === cos(Vector parser.ValueTypeVector) (Vector, Annotations) ===
|
||||||
func funcCos(vals []parser.Value, _ parser.Expressions, enh *EvalNodeHelper) (Vector, annotations.Annotations) {
|
func funcCos(vals []parser.Value, _ parser.Expressions, enh *EvalNodeHelper) (Vector, annotations.Annotations) {
|
||||||
return simpleFunc(vals, enh, math.Cos), nil
|
return simpleFloatFunc(vals, enh, math.Cos), nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// === tan(Vector parser.ValueTypeVector) (Vector, Annotations) ===
|
// === tan(Vector parser.ValueTypeVector) (Vector, Annotations) ===
|
||||||
func funcTan(vals []parser.Value, _ parser.Expressions, enh *EvalNodeHelper) (Vector, annotations.Annotations) {
|
func funcTan(vals []parser.Value, _ parser.Expressions, enh *EvalNodeHelper) (Vector, annotations.Annotations) {
|
||||||
return simpleFunc(vals, enh, math.Tan), nil
|
return simpleFloatFunc(vals, enh, math.Tan), nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// === asin(Vector parser.ValueTypeVector) (Vector, Annotations) ===
|
// === asin(Vector parser.ValueTypeVector) (Vector, Annotations) ===
|
||||||
func funcAsin(vals []parser.Value, _ parser.Expressions, enh *EvalNodeHelper) (Vector, annotations.Annotations) {
|
func funcAsin(vals []parser.Value, _ parser.Expressions, enh *EvalNodeHelper) (Vector, annotations.Annotations) {
|
||||||
return simpleFunc(vals, enh, math.Asin), nil
|
return simpleFloatFunc(vals, enh, math.Asin), nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// === acos(Vector parser.ValueTypeVector) (Vector, Annotations) ===
|
// === acos(Vector parser.ValueTypeVector) (Vector, Annotations) ===
|
||||||
func funcAcos(vals []parser.Value, _ parser.Expressions, enh *EvalNodeHelper) (Vector, annotations.Annotations) {
|
func funcAcos(vals []parser.Value, _ parser.Expressions, enh *EvalNodeHelper) (Vector, annotations.Annotations) {
|
||||||
return simpleFunc(vals, enh, math.Acos), nil
|
return simpleFloatFunc(vals, enh, math.Acos), nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// === atan(Vector parser.ValueTypeVector) (Vector, Annotations) ===
|
// === atan(Vector parser.ValueTypeVector) (Vector, Annotations) ===
|
||||||
func funcAtan(vals []parser.Value, _ parser.Expressions, enh *EvalNodeHelper) (Vector, annotations.Annotations) {
|
func funcAtan(vals []parser.Value, _ parser.Expressions, enh *EvalNodeHelper) (Vector, annotations.Annotations) {
|
||||||
return simpleFunc(vals, enh, math.Atan), nil
|
return simpleFloatFunc(vals, enh, math.Atan), nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// === sinh(Vector parser.ValueTypeVector) (Vector, Annotations) ===
|
// === sinh(Vector parser.ValueTypeVector) (Vector, Annotations) ===
|
||||||
func funcSinh(vals []parser.Value, _ parser.Expressions, enh *EvalNodeHelper) (Vector, annotations.Annotations) {
|
func funcSinh(vals []parser.Value, _ parser.Expressions, enh *EvalNodeHelper) (Vector, annotations.Annotations) {
|
||||||
return simpleFunc(vals, enh, math.Sinh), nil
|
return simpleFloatFunc(vals, enh, math.Sinh), nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// === cosh(Vector parser.ValueTypeVector) (Vector, Annotations) ===
|
// === cosh(Vector parser.ValueTypeVector) (Vector, Annotations) ===
|
||||||
func funcCosh(vals []parser.Value, _ parser.Expressions, enh *EvalNodeHelper) (Vector, annotations.Annotations) {
|
func funcCosh(vals []parser.Value, _ parser.Expressions, enh *EvalNodeHelper) (Vector, annotations.Annotations) {
|
||||||
return simpleFunc(vals, enh, math.Cosh), nil
|
return simpleFloatFunc(vals, enh, math.Cosh), nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// === tanh(Vector parser.ValueTypeVector) (Vector, Annotations) ===
|
// === tanh(Vector parser.ValueTypeVector) (Vector, Annotations) ===
|
||||||
func funcTanh(vals []parser.Value, _ parser.Expressions, enh *EvalNodeHelper) (Vector, annotations.Annotations) {
|
func funcTanh(vals []parser.Value, _ parser.Expressions, enh *EvalNodeHelper) (Vector, annotations.Annotations) {
|
||||||
return simpleFunc(vals, enh, math.Tanh), nil
|
return simpleFloatFunc(vals, enh, math.Tanh), nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// === asinh(Vector parser.ValueTypeVector) (Vector, Annotations) ===
|
// === asinh(Vector parser.ValueTypeVector) (Vector, Annotations) ===
|
||||||
func funcAsinh(vals []parser.Value, _ parser.Expressions, enh *EvalNodeHelper) (Vector, annotations.Annotations) {
|
func funcAsinh(vals []parser.Value, _ parser.Expressions, enh *EvalNodeHelper) (Vector, annotations.Annotations) {
|
||||||
return simpleFunc(vals, enh, math.Asinh), nil
|
return simpleFloatFunc(vals, enh, math.Asinh), nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// === acosh(Vector parser.ValueTypeVector) (Vector, Annotations) ===
|
// === acosh(Vector parser.ValueTypeVector) (Vector, Annotations) ===
|
||||||
func funcAcosh(vals []parser.Value, _ parser.Expressions, enh *EvalNodeHelper) (Vector, annotations.Annotations) {
|
func funcAcosh(vals []parser.Value, _ parser.Expressions, enh *EvalNodeHelper) (Vector, annotations.Annotations) {
|
||||||
return simpleFunc(vals, enh, math.Acosh), nil
|
return simpleFloatFunc(vals, enh, math.Acosh), nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// === atanh(Vector parser.ValueTypeVector) (Vector, Annotations) ===
|
// === atanh(Vector parser.ValueTypeVector) (Vector, Annotations) ===
|
||||||
func funcAtanh(vals []parser.Value, _ parser.Expressions, enh *EvalNodeHelper) (Vector, annotations.Annotations) {
|
func funcAtanh(vals []parser.Value, _ parser.Expressions, enh *EvalNodeHelper) (Vector, annotations.Annotations) {
|
||||||
return simpleFunc(vals, enh, math.Atanh), nil
|
return simpleFloatFunc(vals, enh, math.Atanh), nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// === rad(Vector parser.ValueTypeVector) (Vector, Annotations) ===
|
// === rad(Vector parser.ValueTypeVector) (Vector, Annotations) ===
|
||||||
func funcRad(vals []parser.Value, _ parser.Expressions, enh *EvalNodeHelper) (Vector, annotations.Annotations) {
|
func funcRad(vals []parser.Value, _ parser.Expressions, enh *EvalNodeHelper) (Vector, annotations.Annotations) {
|
||||||
return simpleFunc(vals, enh, func(v float64) float64 {
|
return simpleFloatFunc(vals, enh, func(v float64) float64 {
|
||||||
return v * math.Pi / 180
|
return v * math.Pi / 180
|
||||||
}), nil
|
}), nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// === deg(Vector parser.ValueTypeVector) (Vector, Annotations) ===
|
// === deg(Vector parser.ValueTypeVector) (Vector, Annotations) ===
|
||||||
func funcDeg(vals []parser.Value, _ parser.Expressions, enh *EvalNodeHelper) (Vector, annotations.Annotations) {
|
func funcDeg(vals []parser.Value, _ parser.Expressions, enh *EvalNodeHelper) (Vector, annotations.Annotations) {
|
||||||
return simpleFunc(vals, enh, func(v float64) float64 {
|
return simpleFloatFunc(vals, enh, func(v float64) float64 {
|
||||||
return v * 180 / math.Pi
|
return v * 180 / math.Pi
|
||||||
}), nil
|
}), nil
|
||||||
}
|
}
|
||||||
@ -1134,7 +1111,7 @@ func funcPi(_ []parser.Value, _ parser.Expressions, _ *EvalNodeHelper) (Vector,
|
|||||||
|
|
||||||
// === sgn(Vector parser.ValueTypeVector) (Vector, Annotations) ===
|
// === sgn(Vector parser.ValueTypeVector) (Vector, Annotations) ===
|
||||||
func funcSgn(vals []parser.Value, _ parser.Expressions, enh *EvalNodeHelper) (Vector, annotations.Annotations) {
|
func funcSgn(vals []parser.Value, _ parser.Expressions, enh *EvalNodeHelper) (Vector, annotations.Annotations) {
|
||||||
return simpleFunc(vals, enh, func(v float64) float64 {
|
return simpleFloatFunc(vals, enh, func(v float64) float64 {
|
||||||
switch {
|
switch {
|
||||||
case v < 0:
|
case v < 0:
|
||||||
return -1
|
return -1
|
||||||
@ -1271,79 +1248,48 @@ func funcPredictLinear(vals []parser.Value, args parser.Expressions, enh *EvalNo
|
|||||||
return append(enh.Out, Sample{F: slope*duration + intercept}), nil
|
return append(enh.Out, Sample{F: slope*duration + intercept}), nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func simpleHistogramFunc(vals []parser.Value, enh *EvalNodeHelper, f func(h *histogram.FloatHistogram) float64) Vector {
|
||||||
|
for _, el := range vals[0].(Vector) {
|
||||||
|
if el.H != nil { // Process only histogram samples.
|
||||||
|
if !enh.enableDelayedNameRemoval {
|
||||||
|
el.Metric = el.Metric.DropMetricName()
|
||||||
|
}
|
||||||
|
enh.Out = append(enh.Out, Sample{
|
||||||
|
Metric: el.Metric,
|
||||||
|
F: f(el.H),
|
||||||
|
DropName: true,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return enh.Out
|
||||||
|
}
|
||||||
|
|
||||||
// === histogram_count(Vector parser.ValueTypeVector) (Vector, Annotations) ===
|
// === histogram_count(Vector parser.ValueTypeVector) (Vector, Annotations) ===
|
||||||
func funcHistogramCount(vals []parser.Value, _ parser.Expressions, enh *EvalNodeHelper) (Vector, annotations.Annotations) {
|
func funcHistogramCount(vals []parser.Value, _ parser.Expressions, enh *EvalNodeHelper) (Vector, annotations.Annotations) {
|
||||||
inVec := vals[0].(Vector)
|
return simpleHistogramFunc(vals, enh, func(h *histogram.FloatHistogram) float64 {
|
||||||
|
return h.Count
|
||||||
for _, sample := range inVec {
|
}), nil
|
||||||
// Skip non-histogram samples.
|
|
||||||
if sample.H == nil {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
if !enh.enableDelayedNameRemoval {
|
|
||||||
sample.Metric = sample.Metric.DropMetricName()
|
|
||||||
}
|
|
||||||
enh.Out = append(enh.Out, Sample{
|
|
||||||
Metric: sample.Metric,
|
|
||||||
F: sample.H.Count,
|
|
||||||
DropName: true,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
return enh.Out, nil
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// === histogram_sum(Vector parser.ValueTypeVector) (Vector, Annotations) ===
|
// === histogram_sum(Vector parser.ValueTypeVector) (Vector, Annotations) ===
|
||||||
func funcHistogramSum(vals []parser.Value, _ parser.Expressions, enh *EvalNodeHelper) (Vector, annotations.Annotations) {
|
func funcHistogramSum(vals []parser.Value, _ parser.Expressions, enh *EvalNodeHelper) (Vector, annotations.Annotations) {
|
||||||
inVec := vals[0].(Vector)
|
return simpleHistogramFunc(vals, enh, func(h *histogram.FloatHistogram) float64 {
|
||||||
|
return h.Sum
|
||||||
for _, sample := range inVec {
|
}), nil
|
||||||
// Skip non-histogram samples.
|
|
||||||
if sample.H == nil {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
if !enh.enableDelayedNameRemoval {
|
|
||||||
sample.Metric = sample.Metric.DropMetricName()
|
|
||||||
}
|
|
||||||
enh.Out = append(enh.Out, Sample{
|
|
||||||
Metric: sample.Metric,
|
|
||||||
F: sample.H.Sum,
|
|
||||||
DropName: true,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
return enh.Out, nil
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// === histogram_avg(Vector parser.ValueTypeVector) (Vector, Annotations) ===
|
// === histogram_avg(Vector parser.ValueTypeVector) (Vector, Annotations) ===
|
||||||
func funcHistogramAvg(vals []parser.Value, _ parser.Expressions, enh *EvalNodeHelper) (Vector, annotations.Annotations) {
|
func funcHistogramAvg(vals []parser.Value, _ parser.Expressions, enh *EvalNodeHelper) (Vector, annotations.Annotations) {
|
||||||
inVec := vals[0].(Vector)
|
return simpleHistogramFunc(vals, enh, func(h *histogram.FloatHistogram) float64 {
|
||||||
|
return h.Sum / h.Count
|
||||||
for _, sample := range inVec {
|
}), nil
|
||||||
// Skip non-histogram samples.
|
|
||||||
if sample.H == nil {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
if !enh.enableDelayedNameRemoval {
|
|
||||||
sample.Metric = sample.Metric.DropMetricName()
|
|
||||||
}
|
|
||||||
enh.Out = append(enh.Out, Sample{
|
|
||||||
Metric: sample.Metric,
|
|
||||||
F: sample.H.Sum / sample.H.Count,
|
|
||||||
DropName: true,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
return enh.Out, nil
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func histogramVariance(vals []parser.Value, enh *EvalNodeHelper, varianceToResult func(float64) float64) (Vector, annotations.Annotations) {
|
func histogramVariance(vals []parser.Value, enh *EvalNodeHelper, varianceToResult func(float64) float64) (Vector, annotations.Annotations) {
|
||||||
vec := vals[0].(Vector)
|
return simpleHistogramFunc(vals, enh, func(h *histogram.FloatHistogram) float64 {
|
||||||
for _, sample := range vec {
|
mean := h.Sum / h.Count
|
||||||
// Skip non-histogram samples.
|
|
||||||
if sample.H == nil {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
mean := sample.H.Sum / sample.H.Count
|
|
||||||
var variance, cVariance float64
|
var variance, cVariance float64
|
||||||
it := sample.H.AllBucketIterator()
|
it := h.AllBucketIterator()
|
||||||
for it.Next() {
|
for it.Next() {
|
||||||
bucket := it.At()
|
bucket := it.At()
|
||||||
if bucket.Count == 0 {
|
if bucket.Count == 0 {
|
||||||
@ -1351,7 +1297,7 @@ func histogramVariance(vals []parser.Value, enh *EvalNodeHelper, varianceToResul
|
|||||||
}
|
}
|
||||||
var val float64
|
var val float64
|
||||||
switch {
|
switch {
|
||||||
case sample.H.UsesCustomBuckets():
|
case h.UsesCustomBuckets():
|
||||||
// Use arithmetic mean in case of custom buckets.
|
// Use arithmetic mean in case of custom buckets.
|
||||||
val = (bucket.Upper + bucket.Lower) / 2.0
|
val = (bucket.Upper + bucket.Lower) / 2.0
|
||||||
case bucket.Lower <= 0 && bucket.Upper >= 0:
|
case bucket.Lower <= 0 && bucket.Upper >= 0:
|
||||||
@ -1368,20 +1314,12 @@ func histogramVariance(vals []parser.Value, enh *EvalNodeHelper, varianceToResul
|
|||||||
variance, cVariance = kahanSumInc(bucket.Count*delta*delta, variance, cVariance)
|
variance, cVariance = kahanSumInc(bucket.Count*delta*delta, variance, cVariance)
|
||||||
}
|
}
|
||||||
variance += cVariance
|
variance += cVariance
|
||||||
variance /= sample.H.Count
|
variance /= h.Count
|
||||||
if !enh.enableDelayedNameRemoval {
|
|
||||||
sample.Metric = sample.Metric.DropMetricName()
|
|
||||||
}
|
|
||||||
if varianceToResult != nil {
|
if varianceToResult != nil {
|
||||||
variance = varianceToResult(variance)
|
variance = varianceToResult(variance)
|
||||||
}
|
}
|
||||||
enh.Out = append(enh.Out, Sample{
|
return variance
|
||||||
Metric: sample.Metric,
|
}), nil
|
||||||
F: variance,
|
|
||||||
DropName: true,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
return enh.Out, nil
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// === histogram_stddev(Vector parser.ValueTypeVector) (Vector, Annotations) ===
|
// === histogram_stddev(Vector parser.ValueTypeVector) (Vector, Annotations) ===
|
||||||
|
20
promql/promqltest/testdata/aggregators.test
vendored
20
promql/promqltest/testdata/aggregators.test
vendored
@ -274,7 +274,7 @@ load 5m
|
|||||||
http_requests{job="app-server", instance="1", group="canary"} 0+80x10
|
http_requests{job="app-server", instance="1", group="canary"} 0+80x10
|
||||||
http_requests_histogram{job="app-server", instance="2", group="canary"} {{schema:0 sum:10 count:10}}x11
|
http_requests_histogram{job="app-server", instance="2", group="canary"} {{schema:0 sum:10 count:10}}x11
|
||||||
http_requests_histogram{job="api-server", instance="3", group="production"} {{schema:0 sum:20 count:20}}x11
|
http_requests_histogram{job="api-server", instance="3", group="production"} {{schema:0 sum:20 count:20}}x11
|
||||||
foo 3+0x10
|
foo 1+1x9 3
|
||||||
|
|
||||||
eval_ordered instant at 50m topk(3, http_requests)
|
eval_ordered instant at 50m topk(3, http_requests)
|
||||||
http_requests{group="canary", instance="1", job="app-server"} 800
|
http_requests{group="canary", instance="1", job="app-server"} 800
|
||||||
@ -340,6 +340,13 @@ eval_ordered instant at 50m topk(scalar(foo), http_requests)
|
|||||||
http_requests{group="canary", instance="0", job="app-server"} 700
|
http_requests{group="canary", instance="0", job="app-server"} 700
|
||||||
http_requests{group="production", instance="1", job="app-server"} 600
|
http_requests{group="production", instance="1", job="app-server"} 600
|
||||||
|
|
||||||
|
# Bug #15971.
|
||||||
|
eval range from 0m to 50m step 5m count(topk(scalar(foo), http_requests))
|
||||||
|
{} 1 2 3 4 5 6 7 8 9 9 3
|
||||||
|
|
||||||
|
eval range from 0m to 50m step 5m count(bottomk(scalar(foo), http_requests))
|
||||||
|
{} 1 2 3 4 5 6 7 8 9 9 3
|
||||||
|
|
||||||
# Tests for histogram: should ignore histograms.
|
# Tests for histogram: should ignore histograms.
|
||||||
eval_info instant at 50m topk(100, http_requests_histogram)
|
eval_info instant at 50m topk(100, http_requests_histogram)
|
||||||
#empty
|
#empty
|
||||||
@ -447,7 +454,7 @@ load 10s
|
|||||||
data{test="uneven samples",point="b"} 1
|
data{test="uneven samples",point="b"} 1
|
||||||
data{test="uneven samples",point="c"} 4
|
data{test="uneven samples",point="c"} 4
|
||||||
data_histogram{test="histogram sample", point="c"} {{schema:2 count:4 sum:10 buckets:[1 0 0 0 1 0 0 1 1]}}
|
data_histogram{test="histogram sample", point="c"} {{schema:2 count:4 sum:10 buckets:[1 0 0 0 1 0 0 1 1]}}
|
||||||
foo .8
|
foo 0 1 0 1 0 1 0.8
|
||||||
|
|
||||||
eval instant at 1m quantile without(point)(0.8, data)
|
eval instant at 1m quantile without(point)(0.8, data)
|
||||||
{test="two samples"} 0.8
|
{test="two samples"} 0.8
|
||||||
@ -475,11 +482,18 @@ eval instant at 1m quantile without(point)((scalar(foo)), data)
|
|||||||
{test="three samples"} 1.6
|
{test="three samples"} 1.6
|
||||||
{test="uneven samples"} 2.8
|
{test="uneven samples"} 2.8
|
||||||
|
|
||||||
eval_warn instant at 1m quantile without(point)(NaN, data)
|
eval instant at 1m quantile without(point)(NaN, data)
|
||||||
|
expect warn msg: PromQL warning: quantile value should be between 0 and 1, got NaN
|
||||||
{test="two samples"} NaN
|
{test="two samples"} NaN
|
||||||
{test="three samples"} NaN
|
{test="three samples"} NaN
|
||||||
{test="uneven samples"} NaN
|
{test="uneven samples"} NaN
|
||||||
|
|
||||||
|
# Bug #15971.
|
||||||
|
eval range from 0m to 1m step 10s quantile without(point) (scalar(foo), data)
|
||||||
|
{test="two samples"} 0 1 0 1 0 1 0.8
|
||||||
|
{test="three samples"} 0 2 0 2 0 2 1.6
|
||||||
|
{test="uneven samples"} 0 4 0 4 0 4 2.8
|
||||||
|
|
||||||
# Tests for group.
|
# Tests for group.
|
||||||
clear
|
clear
|
||||||
|
|
||||||
|
22
promql/promqltest/testdata/limit.test
vendored
22
promql/promqltest/testdata/limit.test
vendored
@ -11,6 +11,8 @@ load 5m
|
|||||||
http_requests{job="api-server", instance="3", group="canary"} 0+60x10
|
http_requests{job="api-server", instance="3", group="canary"} 0+60x10
|
||||||
http_requests{job="api-server", instance="histogram_1", group="canary"} {{schema:0 sum:10 count:10}}x11
|
http_requests{job="api-server", instance="histogram_1", group="canary"} {{schema:0 sum:10 count:10}}x11
|
||||||
http_requests{job="api-server", instance="histogram_2", group="canary"} {{schema:0 sum:20 count:20}}x11
|
http_requests{job="api-server", instance="histogram_2", group="canary"} {{schema:0 sum:20 count:20}}x11
|
||||||
|
foo 1+1x10
|
||||||
|
bar 0 1 0 -1 0 1 0 -1 0 1 0
|
||||||
|
|
||||||
eval instant at 50m count(limitk by (group) (0, http_requests))
|
eval instant at 50m count(limitk by (group) (0, http_requests))
|
||||||
# empty
|
# empty
|
||||||
@ -69,6 +71,10 @@ eval instant at 50m count(limitk(1000, http_requests{instance=~"histogram_[0-9]"
|
|||||||
eval range from 0 to 50m step 5m count(limitk(1000, http_requests{instance=~"histogram_[0-9]"}))
|
eval range from 0 to 50m step 5m count(limitk(1000, http_requests{instance=~"histogram_[0-9]"}))
|
||||||
{} 2+0x10
|
{} 2+0x10
|
||||||
|
|
||||||
|
# Bug #15971.
|
||||||
|
eval range from 0m to 50m step 5m count(limitk(scalar(foo), http_requests))
|
||||||
|
{} 1 2 3 4 5 6 7 8 8 8 8
|
||||||
|
|
||||||
# limit_ratio
|
# limit_ratio
|
||||||
eval range from 0 to 50m step 5m count(limit_ratio(0.0, http_requests))
|
eval range from 0 to 50m step 5m count(limit_ratio(0.0, http_requests))
|
||||||
# empty
|
# empty
|
||||||
@ -105,11 +111,13 @@ eval range from 0 to 50m step 5m count(limit_ratio(-1.0, http_requests) and http
|
|||||||
{} 8+0x10
|
{} 8+0x10
|
||||||
|
|
||||||
# Capped to 1.0 -> all samples.
|
# Capped to 1.0 -> all samples.
|
||||||
eval_warn range from 0 to 50m step 5m count(limit_ratio(1.1, http_requests) and http_requests)
|
eval range from 0 to 50m step 5m count(limit_ratio(1.1, http_requests) and http_requests)
|
||||||
|
expect warn msg: PromQL warning: ratio value should be between -1 and 1, got 1.1, capping to 1
|
||||||
{} 8+0x10
|
{} 8+0x10
|
||||||
|
|
||||||
# Capped to -1.0 -> all samples.
|
# Capped to -1.0 -> all samples.
|
||||||
eval_warn range from 0 to 50m step 5m count(limit_ratio(-1.1, http_requests) and http_requests)
|
eval range from 0 to 50m step 5m count(limit_ratio(-1.1, http_requests) and http_requests)
|
||||||
|
expect warn msg: PromQL warning: ratio value should be between -1 and 1, got -1.1, capping to -1
|
||||||
{} 8+0x10
|
{} 8+0x10
|
||||||
|
|
||||||
# Verify that limit_ratio(value) and limit_ratio(1.0-value) return the "complement" of each other.
|
# Verify that limit_ratio(value) and limit_ratio(1.0-value) return the "complement" of each other.
|
||||||
@ -137,12 +145,12 @@ eval range from 0 to 50m step 5m count(limit_ratio(0.8, http_requests) or limit_
|
|||||||
eval range from 0 to 50m step 5m count(limit_ratio(0.8, http_requests) and limit_ratio(-0.2, http_requests))
|
eval range from 0 to 50m step 5m count(limit_ratio(0.8, http_requests) and limit_ratio(-0.2, http_requests))
|
||||||
# empty
|
# empty
|
||||||
|
|
||||||
# Complement below for [some_ratio, 1.0 - some_ratio], some_ratio derived from time(),
|
# Complement below for [some_ratio, - (1.0 - some_ratio)], some_ratio derived from time(),
|
||||||
# using a small prime number to avoid rounded ratio values, and a small set of them.
|
# using a small prime number to avoid rounded ratio values, and a small set of them.
|
||||||
eval range from 0 to 50m step 5m count(limit_ratio(time() % 17/17, http_requests) or limit_ratio(1.0 - (time() % 17/17), http_requests))
|
eval range from 0 to 50m step 5m count(limit_ratio(time() % 17/17, http_requests) or limit_ratio( - (1.0 - (time() % 17/17)), http_requests))
|
||||||
{} 8+0x10
|
{} 8+0x10
|
||||||
|
|
||||||
eval range from 0 to 50m step 5m count(limit_ratio(time() % 17/17, http_requests) and limit_ratio(1.0 - (time() % 17/17), http_requests))
|
eval range from 0 to 50m step 5m count(limit_ratio(time() % 17/17, http_requests) and limit_ratio( - (1.0 - (time() % 17/17)), http_requests))
|
||||||
# empty
|
# empty
|
||||||
|
|
||||||
# Poor man's normality check: ok (loaded samples follow a nice linearity over labels and time).
|
# Poor man's normality check: ok (loaded samples follow a nice linearity over labels and time).
|
||||||
@ -156,3 +164,7 @@ eval instant at 50m limit_ratio(1, http_requests{instance="histogram_1"})
|
|||||||
|
|
||||||
eval range from 0 to 50m step 5m limit_ratio(1, http_requests{instance="histogram_1"})
|
eval range from 0 to 50m step 5m limit_ratio(1, http_requests{instance="histogram_1"})
|
||||||
{__name__="http_requests", group="canary", instance="histogram_1", job="api-server"} {{count:10 sum:10}}x10
|
{__name__="http_requests", group="canary", instance="histogram_1", job="api-server"} {{count:10 sum:10}}x10
|
||||||
|
|
||||||
|
# Bug #15971.
|
||||||
|
eval range from 0m to 50m step 5m count(limit_ratio(scalar(bar), http_requests))
|
||||||
|
{} _ 8 _ 8 _ 8 _ 8 _ 8 _
|
||||||
|
@ -14,6 +14,7 @@
|
|||||||
package promql
|
package promql
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"context"
|
||||||
"encoding/json"
|
"encoding/json"
|
||||||
"errors"
|
"errors"
|
||||||
"fmt"
|
"fmt"
|
||||||
@ -533,3 +534,68 @@ func (ssi *storageSeriesIterator) Next() chunkenc.ValueType {
|
|||||||
func (ssi *storageSeriesIterator) Err() error {
|
func (ssi *storageSeriesIterator) Err() error {
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
type fParams struct {
|
||||||
|
series Series
|
||||||
|
constValue float64
|
||||||
|
isConstant bool
|
||||||
|
minValue float64
|
||||||
|
maxValue float64
|
||||||
|
hasAnyNaN bool
|
||||||
|
}
|
||||||
|
|
||||||
|
// newFParams evaluates the expression and returns an fParams object,
|
||||||
|
// which holds the parameter values (constant or series) along with min, max, and NaN info.
|
||||||
|
func newFParams(ctx context.Context, ev *evaluator, expr parser.Expr) (*fParams, annotations.Annotations) {
|
||||||
|
if expr == nil {
|
||||||
|
return &fParams{}, nil
|
||||||
|
}
|
||||||
|
var constParam bool
|
||||||
|
if _, ok := expr.(*parser.NumberLiteral); ok {
|
||||||
|
constParam = true
|
||||||
|
}
|
||||||
|
val, ws := ev.eval(ctx, expr)
|
||||||
|
mat, ok := val.(Matrix)
|
||||||
|
if !ok || len(mat) == 0 {
|
||||||
|
return &fParams{}, ws
|
||||||
|
}
|
||||||
|
fp := &fParams{
|
||||||
|
series: mat[0],
|
||||||
|
isConstant: constParam,
|
||||||
|
minValue: math.MaxFloat64,
|
||||||
|
maxValue: -math.MaxFloat64,
|
||||||
|
}
|
||||||
|
|
||||||
|
if constParam {
|
||||||
|
fp.constValue = fp.series.Floats[0].F
|
||||||
|
fp.minValue, fp.maxValue = fp.constValue, fp.constValue
|
||||||
|
fp.hasAnyNaN = math.IsNaN(fp.constValue)
|
||||||
|
return fp, ws
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, v := range fp.series.Floats {
|
||||||
|
fp.maxValue = math.Max(fp.maxValue, v.F)
|
||||||
|
fp.minValue = math.Min(fp.minValue, v.F)
|
||||||
|
if math.IsNaN(v.F) {
|
||||||
|
fp.hasAnyNaN = true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return fp, ws
|
||||||
|
}
|
||||||
|
|
||||||
|
func (fp *fParams) Max() float64 { return fp.maxValue }
|
||||||
|
func (fp *fParams) Min() float64 { return fp.minValue }
|
||||||
|
func (fp *fParams) HasAnyNaN() bool { return fp.hasAnyNaN }
|
||||||
|
|
||||||
|
// Next returns the next value from the series or the constant value, and advances the series if applicable.
|
||||||
|
func (fp *fParams) Next() float64 {
|
||||||
|
if fp.isConstant {
|
||||||
|
return fp.constValue
|
||||||
|
}
|
||||||
|
if len(fp.series.Floats) > 0 {
|
||||||
|
val := fp.series.Floats[0].F
|
||||||
|
fp.series.Floats = fp.series.Floats[1:]
|
||||||
|
return val
|
||||||
|
}
|
||||||
|
return 0
|
||||||
|
}
|
||||||
|
@ -20,6 +20,7 @@ import (
|
|||||||
"math"
|
"math"
|
||||||
"os"
|
"os"
|
||||||
"path"
|
"path"
|
||||||
|
"slices"
|
||||||
"sort"
|
"sort"
|
||||||
"strconv"
|
"strconv"
|
||||||
"sync"
|
"sync"
|
||||||
@ -1008,11 +1009,8 @@ func TestMetricsUpdate(t *testing.T) {
|
|||||||
var metrics int
|
var metrics int
|
||||||
for _, m := range ms {
|
for _, m := range ms {
|
||||||
s := m.GetName()
|
s := m.GetName()
|
||||||
for _, n := range metricNames {
|
if slices.Contains(metricNames, s) {
|
||||||
if s == n {
|
metrics += len(m.Metric)
|
||||||
metrics += len(m.Metric)
|
|
||||||
break
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return metrics
|
return metrics
|
||||||
|
@ -599,10 +599,8 @@ func Intersect(its ...Postings) Postings {
|
|||||||
if len(its) == 1 {
|
if len(its) == 1 {
|
||||||
return its[0]
|
return its[0]
|
||||||
}
|
}
|
||||||
for _, p := range its {
|
if slices.Contains(its, EmptyPostings()) {
|
||||||
if p == EmptyPostings() {
|
return EmptyPostings()
|
||||||
return EmptyPostings()
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
return newIntersectPostings(its...)
|
return newIntersectPostings(its...)
|
||||||
|
@ -224,6 +224,7 @@ function App() {
|
|||||||
leftSection={p.icon}
|
leftSection={p.icon}
|
||||||
rightSection={<IconChevronDown style={navIconStyle} />}
|
rightSection={<IconChevronDown style={navIconStyle} />}
|
||||||
px={navLinkXPadding}
|
px={navLinkXPadding}
|
||||||
|
onClick={(e) => e.preventDefault()}
|
||||||
>
|
>
|
||||||
Status <IconChevronRight style={navIconStyle} /> {p.title}
|
Status <IconChevronRight style={navIconStyle} /> {p.title}
|
||||||
</Button>
|
</Button>
|
||||||
@ -236,14 +237,9 @@ function App() {
|
|||||||
element={
|
element={
|
||||||
<Menu.Target>
|
<Menu.Target>
|
||||||
<Button
|
<Button
|
||||||
component={NavLink}
|
|
||||||
to="/"
|
|
||||||
className={classes.link}
|
className={classes.link}
|
||||||
leftSection={<IconServer style={navIconStyle} />}
|
leftSection={<IconServer style={navIconStyle} />}
|
||||||
rightSection={<IconChevronDown style={navIconStyle} />}
|
rightSection={<IconChevronDown style={navIconStyle} />}
|
||||||
onClick={(e) => {
|
|
||||||
e.preventDefault();
|
|
||||||
}}
|
|
||||||
px={navLinkXPadding}
|
px={navLinkXPadding}
|
||||||
>
|
>
|
||||||
Status
|
Status
|
||||||
@ -339,8 +335,12 @@ function App() {
|
|||||||
>
|
>
|
||||||
<Group gap={10} wrap="nowrap">
|
<Group gap={10} wrap="nowrap">
|
||||||
<img src={PrometheusLogo} height={30} />
|
<img src={PrometheusLogo} height={30} />
|
||||||
<Text hiddenFrom="sm" fz={20}>Prometheus</Text>
|
<Text hiddenFrom="sm" fz={20}>
|
||||||
<Text visibleFrom="md" fz={20}>Prometheus</Text>
|
Prometheus
|
||||||
|
</Text>
|
||||||
|
<Text visibleFrom="md" fz={20}>
|
||||||
|
Prometheus
|
||||||
|
</Text>
|
||||||
<Text fz={20}>{agentMode && "Agent"}</Text>
|
<Text fz={20}>{agentMode && "Agent"}</Text>
|
||||||
</Group>
|
</Group>
|
||||||
</Link>
|
</Link>
|
||||||
|
@ -19,7 +19,7 @@ import {
|
|||||||
} from "@tabler/icons-react";
|
} from "@tabler/icons-react";
|
||||||
import { useSuspenseAPIQuery } from "../../api/api";
|
import { useSuspenseAPIQuery } from "../../api/api";
|
||||||
import { Target, TargetsResult } from "../../api/responseTypes/targets";
|
import { Target, TargetsResult } from "../../api/responseTypes/targets";
|
||||||
import React, { FC, useMemo } from "react";
|
import React, { FC, memo, useMemo } from "react";
|
||||||
import {
|
import {
|
||||||
humanizeDurationRelative,
|
humanizeDurationRelative,
|
||||||
humanizeDuration,
|
humanizeDuration,
|
||||||
@ -37,7 +37,6 @@ import CustomInfiniteScroll from "../../components/CustomInfiniteScroll";
|
|||||||
import badgeClasses from "../../Badge.module.css";
|
import badgeClasses from "../../Badge.module.css";
|
||||||
import panelClasses from "../../Panel.module.css";
|
import panelClasses from "../../Panel.module.css";
|
||||||
import TargetLabels from "./TargetLabels";
|
import TargetLabels from "./TargetLabels";
|
||||||
import { useDebouncedValue } from "@mantine/hooks";
|
|
||||||
import { targetPoolDisplayLimit } from "./TargetsPage";
|
import { targetPoolDisplayLimit } from "./TargetsPage";
|
||||||
import { badgeIconStyle } from "../../styles";
|
import { badgeIconStyle } from "../../styles";
|
||||||
|
|
||||||
@ -145,278 +144,280 @@ type ScrapePoolListProp = {
|
|||||||
searchFilter: string;
|
searchFilter: string;
|
||||||
};
|
};
|
||||||
|
|
||||||
const ScrapePoolList: FC<ScrapePoolListProp> = ({
|
const ScrapePoolList: FC<ScrapePoolListProp> = memo(
|
||||||
poolNames,
|
({ poolNames, selectedPool, healthFilter, searchFilter }) => {
|
||||||
selectedPool,
|
// Based on the selected pool (if any), load the list of targets.
|
||||||
healthFilter,
|
const {
|
||||||
searchFilter,
|
data: {
|
||||||
}) => {
|
data: { activeTargets },
|
||||||
// Based on the selected pool (if any), load the list of targets.
|
},
|
||||||
const {
|
} = useSuspenseAPIQuery<TargetsResult>({
|
||||||
data: {
|
path: `/targets`,
|
||||||
data: { activeTargets },
|
params: {
|
||||||
},
|
state: "active",
|
||||||
} = useSuspenseAPIQuery<TargetsResult>({
|
scrapePool: selectedPool === null ? "" : selectedPool,
|
||||||
path: `/targets`,
|
},
|
||||||
params: {
|
});
|
||||||
state: "active",
|
|
||||||
scrapePool: selectedPool === null ? "" : selectedPool,
|
|
||||||
},
|
|
||||||
});
|
|
||||||
|
|
||||||
const dispatch = useAppDispatch();
|
const dispatch = useAppDispatch();
|
||||||
const [showEmptyPools, setShowEmptyPools] = useLocalStorage<boolean>({
|
const [showEmptyPools, setShowEmptyPools] = useLocalStorage<boolean>({
|
||||||
key: "targetsPage.showEmptyPools",
|
key: "targetsPage.showEmptyPools",
|
||||||
defaultValue: false,
|
defaultValue: false,
|
||||||
});
|
});
|
||||||
|
|
||||||
const { collapsedPools, showLimitAlert } = useAppSelector(
|
const { collapsedPools, showLimitAlert } = useAppSelector(
|
||||||
(state) => state.targetsPage
|
(state) => state.targetsPage
|
||||||
);
|
);
|
||||||
|
|
||||||
const [debouncedSearch] = useDebouncedValue<string>(searchFilter.trim(), 250);
|
const allPools = useMemo(
|
||||||
|
() =>
|
||||||
|
buildPoolsData(
|
||||||
|
selectedPool ? [selectedPool] : poolNames,
|
||||||
|
activeTargets,
|
||||||
|
searchFilter,
|
||||||
|
healthFilter
|
||||||
|
),
|
||||||
|
[selectedPool, poolNames, activeTargets, searchFilter, healthFilter]
|
||||||
|
);
|
||||||
|
|
||||||
const allPools = useMemo(
|
const allPoolNames = Object.keys(allPools);
|
||||||
() =>
|
const shownPoolNames = showEmptyPools
|
||||||
buildPoolsData(
|
? allPoolNames
|
||||||
selectedPool ? [selectedPool] : poolNames,
|
: allPoolNames.filter((pn) => allPools[pn].targets.length !== 0);
|
||||||
activeTargets,
|
|
||||||
debouncedSearch,
|
|
||||||
healthFilter
|
|
||||||
),
|
|
||||||
[selectedPool, poolNames, activeTargets, debouncedSearch, healthFilter]
|
|
||||||
);
|
|
||||||
|
|
||||||
const allPoolNames = Object.keys(allPools);
|
return (
|
||||||
const shownPoolNames = showEmptyPools
|
<Stack>
|
||||||
? allPoolNames
|
{allPoolNames.length === 0 ? (
|
||||||
: allPoolNames.filter((pn) => allPools[pn].targets.length !== 0);
|
<Alert title="No scrape pools found" icon={<IconInfoCircle />}>
|
||||||
|
No scrape pools found.
|
||||||
return (
|
|
||||||
<Stack>
|
|
||||||
{allPoolNames.length === 0 ? (
|
|
||||||
<Alert title="No scrape pools found" icon={<IconInfoCircle />}>
|
|
||||||
No scrape pools found.
|
|
||||||
</Alert>
|
|
||||||
) : (
|
|
||||||
!showEmptyPools &&
|
|
||||||
allPoolNames.length !== shownPoolNames.length && (
|
|
||||||
<Alert
|
|
||||||
title="Hiding pools with no matching targets"
|
|
||||||
icon={<IconInfoCircle />}
|
|
||||||
>
|
|
||||||
Hiding {allPoolNames.length - shownPoolNames.length} empty pools due
|
|
||||||
to filters or no targets.
|
|
||||||
<Anchor ml="md" fz="1em" onClick={() => setShowEmptyPools(true)}>
|
|
||||||
Show empty pools
|
|
||||||
</Anchor>
|
|
||||||
</Alert>
|
</Alert>
|
||||||
)
|
) : (
|
||||||
)}
|
!showEmptyPools &&
|
||||||
{showLimitAlert && (
|
allPoolNames.length !== shownPoolNames.length && (
|
||||||
<Alert
|
<Alert
|
||||||
title="Found many pools, showing only one"
|
title="Hiding pools with no matching targets"
|
||||||
icon={<IconInfoCircle />}
|
icon={<IconInfoCircle />}
|
||||||
withCloseButton
|
|
||||||
onClose={() => dispatch(setShowLimitAlert(false))}
|
|
||||||
>
|
|
||||||
There are more than {targetPoolDisplayLimit} scrape pools. Showing
|
|
||||||
only the first one. Use the dropdown to select a different pool.
|
|
||||||
</Alert>
|
|
||||||
)}
|
|
||||||
<Accordion
|
|
||||||
multiple
|
|
||||||
variant="separated"
|
|
||||||
value={allPoolNames.filter((p) => !collapsedPools.includes(p))}
|
|
||||||
onChange={(value) =>
|
|
||||||
dispatch(
|
|
||||||
setCollapsedPools(allPoolNames.filter((p) => !value.includes(p)))
|
|
||||||
)
|
|
||||||
}
|
|
||||||
>
|
|
||||||
{shownPoolNames.map((poolName) => {
|
|
||||||
const pool = allPools[poolName];
|
|
||||||
return (
|
|
||||||
<Accordion.Item
|
|
||||||
key={poolName}
|
|
||||||
value={poolName}
|
|
||||||
className={poolPanelHealthClass(pool)}
|
|
||||||
>
|
>
|
||||||
<Accordion.Control>
|
Hiding {allPoolNames.length - shownPoolNames.length} empty pools
|
||||||
<Group wrap="nowrap" justify="space-between" mr="lg">
|
due to filters or no targets.
|
||||||
<Text>{poolName}</Text>
|
<Anchor ml="md" fz="1em" onClick={() => setShowEmptyPools(true)}>
|
||||||
<Group gap="xs">
|
Show empty pools
|
||||||
<Text c="gray.6">
|
</Anchor>
|
||||||
{pool.upCount} / {pool.count} up
|
</Alert>
|
||||||
</Text>
|
)
|
||||||
<RingProgress
|
)}
|
||||||
size={25}
|
{showLimitAlert && (
|
||||||
thickness={5}
|
<Alert
|
||||||
sections={
|
title="Found many pools, showing only one"
|
||||||
pool.count === 0
|
icon={<IconInfoCircle />}
|
||||||
? []
|
withCloseButton
|
||||||
: [
|
onClose={() => dispatch(setShowLimitAlert(false))}
|
||||||
{
|
>
|
||||||
value: (pool.upCount / pool.count) * 100,
|
There are more than {targetPoolDisplayLimit} scrape pools. Showing
|
||||||
color: "green.4",
|
only the first one. Use the dropdown to select a different pool.
|
||||||
},
|
</Alert>
|
||||||
{
|
)}
|
||||||
value: (pool.unknownCount / pool.count) * 100,
|
<Accordion
|
||||||
color: "gray.4",
|
multiple
|
||||||
},
|
variant="separated"
|
||||||
{
|
value={allPoolNames.filter((p) => !collapsedPools.includes(p))}
|
||||||
value: (pool.downCount / pool.count) * 100,
|
onChange={(value) =>
|
||||||
color: "red.5",
|
dispatch(
|
||||||
},
|
setCollapsedPools(allPoolNames.filter((p) => !value.includes(p)))
|
||||||
]
|
)
|
||||||
}
|
}
|
||||||
/>
|
>
|
||||||
|
{shownPoolNames.map((poolName) => {
|
||||||
|
const pool = allPools[poolName];
|
||||||
|
return (
|
||||||
|
<Accordion.Item
|
||||||
|
key={poolName}
|
||||||
|
value={poolName}
|
||||||
|
className={poolPanelHealthClass(pool)}
|
||||||
|
>
|
||||||
|
<Accordion.Control>
|
||||||
|
<Group wrap="nowrap" justify="space-between" mr="lg">
|
||||||
|
<Text>{poolName}</Text>
|
||||||
|
<Group gap="xs">
|
||||||
|
<Text c="gray.6">
|
||||||
|
{pool.upCount} / {pool.count} up
|
||||||
|
</Text>
|
||||||
|
<RingProgress
|
||||||
|
size={25}
|
||||||
|
thickness={5}
|
||||||
|
sections={
|
||||||
|
pool.count === 0
|
||||||
|
? []
|
||||||
|
: [
|
||||||
|
{
|
||||||
|
value: (pool.upCount / pool.count) * 100,
|
||||||
|
color: "green.4",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
value: (pool.unknownCount / pool.count) * 100,
|
||||||
|
color: "gray.4",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
value: (pool.downCount / pool.count) * 100,
|
||||||
|
color: "red.5",
|
||||||
|
},
|
||||||
|
]
|
||||||
|
}
|
||||||
|
/>
|
||||||
|
</Group>
|
||||||
</Group>
|
</Group>
|
||||||
</Group>
|
</Accordion.Control>
|
||||||
</Accordion.Control>
|
<Accordion.Panel>
|
||||||
<Accordion.Panel>
|
{pool.count === 0 ? (
|
||||||
{pool.count === 0 ? (
|
<Alert title="No targets" icon={<IconInfoCircle />}>
|
||||||
<Alert title="No targets" icon={<IconInfoCircle />}>
|
No active targets in this scrape pool.
|
||||||
No active targets in this scrape pool.
|
<Anchor
|
||||||
<Anchor
|
ml="md"
|
||||||
ml="md"
|
fz="1em"
|
||||||
fz="1em"
|
onClick={() => setShowEmptyPools(false)}
|
||||||
onClick={() => setShowEmptyPools(false)}
|
>
|
||||||
|
Hide empty pools
|
||||||
|
</Anchor>
|
||||||
|
</Alert>
|
||||||
|
) : pool.targets.length === 0 ? (
|
||||||
|
<Alert
|
||||||
|
title="No matching targets"
|
||||||
|
icon={<IconInfoCircle />}
|
||||||
>
|
>
|
||||||
Hide empty pools
|
No targets in this pool match your filter criteria
|
||||||
</Anchor>
|
(omitted {pool.count} filtered targets).
|
||||||
</Alert>
|
<Anchor
|
||||||
) : pool.targets.length === 0 ? (
|
ml="md"
|
||||||
<Alert title="No matching targets" icon={<IconInfoCircle />}>
|
fz="1em"
|
||||||
No targets in this pool match your filter criteria (omitted{" "}
|
onClick={() => setShowEmptyPools(false)}
|
||||||
{pool.count} filtered targets).
|
>
|
||||||
<Anchor
|
Hide empty pools
|
||||||
ml="md"
|
</Anchor>
|
||||||
fz="1em"
|
</Alert>
|
||||||
onClick={() => setShowEmptyPools(false)}
|
) : (
|
||||||
>
|
<CustomInfiniteScroll
|
||||||
Hide empty pools
|
allItems={pool.targets}
|
||||||
</Anchor>
|
child={({ items }) => (
|
||||||
</Alert>
|
<Table>
|
||||||
) : (
|
<Table.Thead>
|
||||||
<CustomInfiniteScroll
|
<Table.Tr>
|
||||||
allItems={pool.targets}
|
<Table.Th w="25%">Endpoint</Table.Th>
|
||||||
child={({ items }) => (
|
<Table.Th>Labels</Table.Th>
|
||||||
<Table>
|
<Table.Th w={230}>Last scrape</Table.Th>
|
||||||
<Table.Thead>
|
<Table.Th w={100}>State</Table.Th>
|
||||||
<Table.Tr>
|
</Table.Tr>
|
||||||
<Table.Th w="25%">Endpoint</Table.Th>
|
</Table.Thead>
|
||||||
<Table.Th>Labels</Table.Th>
|
<Table.Tbody>
|
||||||
<Table.Th w={230}>Last scrape</Table.Th>
|
{items.map((target, i) => (
|
||||||
<Table.Th w={100}>State</Table.Th>
|
// TODO: Find a stable and definitely unique key.
|
||||||
</Table.Tr>
|
<React.Fragment key={i}>
|
||||||
</Table.Thead>
|
<Table.Tr
|
||||||
<Table.Tbody>
|
style={{
|
||||||
{items.map((target, i) => (
|
borderBottom: target.lastError
|
||||||
// TODO: Find a stable and definitely unique key.
|
? "none"
|
||||||
<React.Fragment key={i}>
|
: undefined,
|
||||||
<Table.Tr
|
}}
|
||||||
style={{
|
>
|
||||||
borderBottom: target.lastError
|
<Table.Td valign="top">
|
||||||
? "none"
|
<EndpointLink
|
||||||
: undefined,
|
endpoint={target.scrapeUrl}
|
||||||
}}
|
globalUrl={target.globalUrl}
|
||||||
>
|
/>
|
||||||
<Table.Td valign="top">
|
</Table.Td>
|
||||||
<EndpointLink
|
|
||||||
endpoint={target.scrapeUrl}
|
|
||||||
globalUrl={target.globalUrl}
|
|
||||||
/>
|
|
||||||
</Table.Td>
|
|
||||||
|
|
||||||
<Table.Td valign="top">
|
<Table.Td valign="top">
|
||||||
<TargetLabels
|
<TargetLabels
|
||||||
labels={target.labels}
|
labels={target.labels}
|
||||||
discoveredLabels={target.discoveredLabels}
|
discoveredLabels={target.discoveredLabels}
|
||||||
/>
|
/>
|
||||||
</Table.Td>
|
</Table.Td>
|
||||||
<Table.Td valign="top">
|
<Table.Td valign="top">
|
||||||
<Group gap="xs" wrap="wrap">
|
<Group gap="xs" wrap="wrap">
|
||||||
<Tooltip
|
<Tooltip
|
||||||
label="Last target scrape"
|
label="Last target scrape"
|
||||||
withArrow
|
withArrow
|
||||||
>
|
|
||||||
<Badge
|
|
||||||
variant="light"
|
|
||||||
className={badgeClasses.statsBadge}
|
|
||||||
styles={{
|
|
||||||
label: { textTransform: "none" },
|
|
||||||
}}
|
|
||||||
leftSection={
|
|
||||||
<IconRefresh style={badgeIconStyle} />
|
|
||||||
}
|
|
||||||
>
|
>
|
||||||
{humanizeDurationRelative(
|
<Badge
|
||||||
target.lastScrape,
|
variant="light"
|
||||||
now()
|
className={badgeClasses.statsBadge}
|
||||||
)}
|
styles={{
|
||||||
</Badge>
|
label: { textTransform: "none" },
|
||||||
</Tooltip>
|
}}
|
||||||
|
leftSection={
|
||||||
|
<IconRefresh
|
||||||
|
style={badgeIconStyle}
|
||||||
|
/>
|
||||||
|
}
|
||||||
|
>
|
||||||
|
{humanizeDurationRelative(
|
||||||
|
target.lastScrape,
|
||||||
|
now()
|
||||||
|
)}
|
||||||
|
</Badge>
|
||||||
|
</Tooltip>
|
||||||
|
|
||||||
<Tooltip
|
<Tooltip
|
||||||
label="Duration of last target scrape"
|
label="Duration of last target scrape"
|
||||||
withArrow
|
withArrow
|
||||||
>
|
|
||||||
<Badge
|
|
||||||
variant="light"
|
|
||||||
className={badgeClasses.statsBadge}
|
|
||||||
styles={{
|
|
||||||
label: { textTransform: "none" },
|
|
||||||
}}
|
|
||||||
leftSection={
|
|
||||||
<IconHourglass
|
|
||||||
style={badgeIconStyle}
|
|
||||||
/>
|
|
||||||
}
|
|
||||||
>
|
>
|
||||||
{humanizeDuration(
|
<Badge
|
||||||
target.lastScrapeDuration * 1000
|
variant="light"
|
||||||
)}
|
className={badgeClasses.statsBadge}
|
||||||
</Badge>
|
styles={{
|
||||||
</Tooltip>
|
label: { textTransform: "none" },
|
||||||
</Group>
|
}}
|
||||||
</Table.Td>
|
leftSection={
|
||||||
<Table.Td valign="top">
|
<IconHourglass
|
||||||
<Badge
|
style={badgeIconStyle}
|
||||||
className={healthBadgeClass(target.health)}
|
/>
|
||||||
>
|
}
|
||||||
{target.health}
|
>
|
||||||
</Badge>
|
{humanizeDuration(
|
||||||
</Table.Td>
|
target.lastScrapeDuration * 1000
|
||||||
</Table.Tr>
|
)}
|
||||||
{target.lastError && (
|
</Badge>
|
||||||
<Table.Tr>
|
</Tooltip>
|
||||||
<Table.Td colSpan={5} valign="top">
|
</Group>
|
||||||
<Alert
|
</Table.Td>
|
||||||
color="red"
|
<Table.Td valign="top">
|
||||||
mb="sm"
|
<Badge
|
||||||
icon={<IconAlertTriangle />}
|
className={healthBadgeClass(
|
||||||
|
target.health
|
||||||
|
)}
|
||||||
>
|
>
|
||||||
<strong>Error scraping target:</strong>{" "}
|
{target.health}
|
||||||
{target.lastError}
|
</Badge>
|
||||||
</Alert>
|
|
||||||
</Table.Td>
|
</Table.Td>
|
||||||
</Table.Tr>
|
</Table.Tr>
|
||||||
)}
|
{target.lastError && (
|
||||||
</React.Fragment>
|
<Table.Tr>
|
||||||
))}
|
<Table.Td colSpan={5} valign="top">
|
||||||
</Table.Tbody>
|
<Alert
|
||||||
</Table>
|
color="red"
|
||||||
)}
|
mb="sm"
|
||||||
/>
|
icon={<IconAlertTriangle />}
|
||||||
)}
|
>
|
||||||
</Accordion.Panel>
|
<strong>Error scraping target:</strong>{" "}
|
||||||
</Accordion.Item>
|
{target.lastError}
|
||||||
);
|
</Alert>
|
||||||
})}
|
</Table.Td>
|
||||||
</Accordion>
|
</Table.Tr>
|
||||||
</Stack>
|
)}
|
||||||
);
|
</React.Fragment>
|
||||||
};
|
))}
|
||||||
|
</Table.Tbody>
|
||||||
|
</Table>
|
||||||
|
)}
|
||||||
|
/>
|
||||||
|
)}
|
||||||
|
</Accordion.Panel>
|
||||||
|
</Accordion.Item>
|
||||||
|
);
|
||||||
|
})}
|
||||||
|
</Accordion>
|
||||||
|
</Stack>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
);
|
||||||
|
|
||||||
export default ScrapePoolList;
|
export default ScrapePoolList;
|
||||||
|
@ -30,9 +30,16 @@ import ScrapePoolList from "./ScrapePoolsList";
|
|||||||
import { useSuspenseAPIQuery } from "../../api/api";
|
import { useSuspenseAPIQuery } from "../../api/api";
|
||||||
import { ScrapePoolsResult } from "../../api/responseTypes/scrapePools";
|
import { ScrapePoolsResult } from "../../api/responseTypes/scrapePools";
|
||||||
import { expandIconStyle, inputIconStyle } from "../../styles";
|
import { expandIconStyle, inputIconStyle } from "../../styles";
|
||||||
|
import { useDebouncedValue } from "@mantine/hooks";
|
||||||
|
|
||||||
export const targetPoolDisplayLimit = 20;
|
export const targetPoolDisplayLimit = 20;
|
||||||
|
|
||||||
|
// Should be defined as a constant here instead of inline as a value
|
||||||
|
// to avoid unnecessary re-renders. Otherwise the empty array has
|
||||||
|
// a different reference on each render and causes subsequent memoized
|
||||||
|
// computations to re-run as long as no state filter is selected.
|
||||||
|
const emptyHealthFilter: string[] = [];
|
||||||
|
|
||||||
export default function TargetsPage() {
|
export default function TargetsPage() {
|
||||||
// Load the list of all available scrape pools.
|
// Load the list of all available scrape pools.
|
||||||
const {
|
const {
|
||||||
@ -48,12 +55,13 @@ export default function TargetsPage() {
|
|||||||
const [scrapePool, setScrapePool] = useQueryParam("pool", StringParam);
|
const [scrapePool, setScrapePool] = useQueryParam("pool", StringParam);
|
||||||
const [healthFilter, setHealthFilter] = useQueryParam(
|
const [healthFilter, setHealthFilter] = useQueryParam(
|
||||||
"health",
|
"health",
|
||||||
withDefault(ArrayParam, [])
|
withDefault(ArrayParam, emptyHealthFilter)
|
||||||
);
|
);
|
||||||
const [searchFilter, setSearchFilter] = useQueryParam(
|
const [searchFilter, setSearchFilter] = useQueryParam(
|
||||||
"search",
|
"search",
|
||||||
withDefault(StringParam, "")
|
withDefault(StringParam, "")
|
||||||
);
|
);
|
||||||
|
const [debouncedSearch] = useDebouncedValue<string>(searchFilter.trim(), 250);
|
||||||
|
|
||||||
const { collapsedPools, showLimitAlert } = useAppSelector(
|
const { collapsedPools, showLimitAlert } = useAppSelector(
|
||||||
(state) => state.targetsPage
|
(state) => state.targetsPage
|
||||||
@ -147,7 +155,7 @@ export default function TargetsPage() {
|
|||||||
poolNames={scrapePools}
|
poolNames={scrapePools}
|
||||||
selectedPool={(limited && scrapePools[0]) || scrapePool || null}
|
selectedPool={(limited && scrapePools[0]) || scrapePool || null}
|
||||||
healthFilter={healthFilter as string[]}
|
healthFilter={healthFilter as string[]}
|
||||||
searchFilter={searchFilter}
|
searchFilter={debouncedSearch}
|
||||||
/>
|
/>
|
||||||
</Suspense>
|
</Suspense>
|
||||||
</ErrorBoundary>
|
</ErrorBoundary>
|
||||||
|
Loading…
Reference in New Issue
Block a user