mirror of
https://github.com/prometheus/prometheus.git
synced 2025-08-06 14:17:12 +02:00
Merge pull request #16327 from machine424/3.3rc1
Prepare release 3.3.0-rc.1
This commit is contained in:
commit
293f0c9185
@ -17,7 +17,8 @@ linters:
|
|||||||
- loggercheck
|
- loggercheck
|
||||||
- misspell
|
- misspell
|
||||||
- nilnesserr
|
- nilnesserr
|
||||||
- nolintlint
|
# TODO(bwplotka): Enable once https://github.com/golangci/golangci-lint/issues/3228 is fixed.
|
||||||
|
# - nolintlint
|
||||||
- perfsprint
|
- perfsprint
|
||||||
- predeclared
|
- predeclared
|
||||||
- revive
|
- revive
|
||||||
|
@ -2,6 +2,13 @@
|
|||||||
|
|
||||||
## unreleased
|
## unreleased
|
||||||
|
|
||||||
|
## 3.3.0-rc.1 / 2025-04-02
|
||||||
|
|
||||||
|
* [BUGFIX] Remote-Write: Reduce memory footprint during WAL replay. #16197
|
||||||
|
* [BUGFIX] Scraping: Skip native histograms series when ingestion is disabled. #16218
|
||||||
|
* [BUGFIX] UI: Display the correct value of Alerting rules' `keep_firing_for`. #16211
|
||||||
|
* [BUGFIX] PromQL: return NaN from `irate()` if second-last sample is NaN. #16199 #15853
|
||||||
|
|
||||||
## 3.3.0-rc.0 / 2025-03-11
|
## 3.3.0-rc.0 / 2025-03-11
|
||||||
|
|
||||||
* [FEATURE] PromQL: Implement `idelta()` and `irate()` for native histograms. #15853
|
* [FEATURE] PromQL: Implement `idelta()` and `irate()` for native histograms. #15853
|
||||||
|
@ -367,10 +367,11 @@ func instantValue(vals []parser.Value, args parser.Expressions, out Vector, isRa
|
|||||||
}
|
}
|
||||||
switch {
|
switch {
|
||||||
case ss[1].H == nil && ss[0].H == nil:
|
case ss[1].H == nil && ss[0].H == nil:
|
||||||
if !isRate || ss[1].F >= ss[0].F {
|
if !isRate || !(ss[1].F < ss[0].F) {
|
||||||
// Gauge or counter without reset.
|
// Gauge, or counter without reset, or counter with NaN value.
|
||||||
resultSample.F = ss[1].F - ss[0].F
|
resultSample.F = ss[1].F - ss[0].F
|
||||||
}
|
}
|
||||||
|
|
||||||
// In case of a counter reset, we leave resultSample at
|
// In case of a counter reset, we leave resultSample at
|
||||||
// its current value, which is already ss[1].
|
// its current value, which is already ss[1].
|
||||||
case ss[1].H != nil && ss[0].H != nil:
|
case ss[1].H != nil && ss[0].H != nil:
|
||||||
|
8
promql/promqltest/testdata/functions.test
vendored
8
promql/promqltest/testdata/functions.test
vendored
@ -218,6 +218,7 @@ clear
|
|||||||
load 5m
|
load 5m
|
||||||
http_requests_total{path="/foo"} 0+10x10
|
http_requests_total{path="/foo"} 0+10x10
|
||||||
http_requests_total{path="/bar"} 0+10x5 0+10x5
|
http_requests_total{path="/bar"} 0+10x5 0+10x5
|
||||||
|
http_requests_nan{} 1 NaN NaN 5 11
|
||||||
http_requests_histogram{path="/a"} {{sum:2 count:2}}+{{sum:3 count:3}}x5
|
http_requests_histogram{path="/a"} {{sum:2 count:2}}+{{sum:3 count:3}}x5
|
||||||
http_requests_histogram{path="/b"} 0 0 {{sum:1 count:1}} {{sum:4 count:4}}
|
http_requests_histogram{path="/b"} 0 0 {{sum:1 count:1}} {{sum:4 count:4}}
|
||||||
http_requests_histogram{path="/c"} 0 0 {{sum:1 count:1}} {{sum:4 count:4 counter_reset_hint:gauge}}
|
http_requests_histogram{path="/c"} 0 0 {{sum:1 count:1}} {{sum:4 count:4 counter_reset_hint:gauge}}
|
||||||
@ -235,6 +236,9 @@ eval instant at 30m irate(http_requests_total[50m])
|
|||||||
{path="/foo"} .03333333333333333333
|
{path="/foo"} .03333333333333333333
|
||||||
{path="/bar"} 0
|
{path="/bar"} 0
|
||||||
|
|
||||||
|
eval range from 0 to 20m step 5m irate(http_requests_nan[15m1s])
|
||||||
|
{} _ NaN NaN NaN 0.02
|
||||||
|
|
||||||
eval instant at 20m irate(http_requests_histogram{path="/a"}[20m])
|
eval instant at 20m irate(http_requests_histogram{path="/a"}[20m])
|
||||||
{path="/a"} {{sum:0.01 count:0.01 counter_reset_hint:gauge}}
|
{path="/a"} {{sum:0.01 count:0.01 counter_reset_hint:gauge}}
|
||||||
|
|
||||||
@ -288,6 +292,7 @@ clear
|
|||||||
load 5m
|
load 5m
|
||||||
http_requests{path="/foo"} 0 50 100 150
|
http_requests{path="/foo"} 0 50 100 150
|
||||||
http_requests{path="/bar"} 0 50 100 50
|
http_requests{path="/bar"} 0 50 100 50
|
||||||
|
http_requests_nan{} 1 NaN NaN 5 11
|
||||||
http_requests_histogram{path="/a"} {{sum:2 count:2 counter_reset_hint:gauge}}+{{sum:1 count:3 counter_reset_hint:gauge}}x5
|
http_requests_histogram{path="/a"} {{sum:2 count:2 counter_reset_hint:gauge}}+{{sum:1 count:3 counter_reset_hint:gauge}}x5
|
||||||
http_requests_histogram{path="/b"} 0 0 {{sum:1 count:1 counter_reset_hint:gauge}} {{sum:2 count:2 counter_reset_hint:gauge}}
|
http_requests_histogram{path="/b"} 0 0 {{sum:1 count:1 counter_reset_hint:gauge}} {{sum:2 count:2 counter_reset_hint:gauge}}
|
||||||
http_requests_histogram{path="/c"} 0 0 {{sum:1 count:1}} {{sum:2 count:2 counter_reset_hint:gauge}}
|
http_requests_histogram{path="/c"} 0 0 {{sum:1 count:1}} {{sum:2 count:2 counter_reset_hint:gauge}}
|
||||||
@ -300,6 +305,9 @@ eval instant at 20m idelta(http_requests[20m])
|
|||||||
{path="/foo"} 50
|
{path="/foo"} 50
|
||||||
{path="/bar"} -50
|
{path="/bar"} -50
|
||||||
|
|
||||||
|
eval range from 0 to 20m step 5m idelta(http_requests_nan[15m1s])
|
||||||
|
{} _ NaN NaN NaN 6
|
||||||
|
|
||||||
eval instant at 20m idelta(http_requests_histogram{path="/a"}[20m])
|
eval instant at 20m idelta(http_requests_histogram{path="/a"}[20m])
|
||||||
{path="/a"} {{sum:1 count:3 counter_reset_hint:gauge}}
|
{path="/a"} {{sum:1 count:3 counter_reset_hint:gauge}}
|
||||||
|
|
||||||
|
@ -1700,7 +1700,7 @@ loop:
|
|||||||
t = *parsedTimestamp
|
t = *parsedTimestamp
|
||||||
}
|
}
|
||||||
|
|
||||||
if sl.cache.getDropped(met) {
|
if sl.cache.getDropped(met) || isHistogram && !sl.enableNativeHistogramIngestion {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
ce, seriesCached, seriesAlreadyScraped := sl.cache.get(met)
|
ce, seriesCached, seriesAlreadyScraped := sl.cache.get(met)
|
||||||
@ -1748,7 +1748,7 @@ loop:
|
|||||||
} else {
|
} else {
|
||||||
if sl.enableCTZeroIngestion {
|
if sl.enableCTZeroIngestion {
|
||||||
if ctMs := p.CreatedTimestamp(); ctMs != 0 {
|
if ctMs := p.CreatedTimestamp(); ctMs != 0 {
|
||||||
if isHistogram && sl.enableNativeHistogramIngestion {
|
if isHistogram {
|
||||||
if h != nil {
|
if h != nil {
|
||||||
ref, err = app.AppendHistogramCTZeroSample(ref, lset, t, ctMs, h, nil)
|
ref, err = app.AppendHistogramCTZeroSample(ref, lset, t, ctMs, h, nil)
|
||||||
} else {
|
} else {
|
||||||
@ -1765,7 +1765,7 @@ loop:
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if isHistogram && sl.enableNativeHistogramIngestion {
|
if isHistogram {
|
||||||
if h != nil {
|
if h != nil {
|
||||||
ref, err = app.AppendHistogram(ref, lset, t, h, nil)
|
ref, err = app.AppendHistogram(ref, lset, t, h, nil)
|
||||||
} else {
|
} else {
|
||||||
|
@ -2395,7 +2395,7 @@ metric_total{n="2"} 2 # {t="2"} 2.0 20000
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
title: "Native histogram with three exemplars",
|
title: "Native histogram with three exemplars from classic buckets",
|
||||||
|
|
||||||
enableNativeHistogramsIngestion: true,
|
enableNativeHistogramsIngestion: true,
|
||||||
scrapeText: `name: "test_histogram"
|
scrapeText: `name: "test_histogram"
|
||||||
@ -2644,6 +2644,180 @@ metric: <
|
|||||||
{Labels: labels.FromStrings("dummyID", "58215"), Value: -0.00019, Ts: 1625851055146, HasTs: true},
|
{Labels: labels.FromStrings("dummyID", "58215"), Value: -0.00019, Ts: 1625851055146, HasTs: true},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
title: "Native histogram with exemplars and no classic buckets",
|
||||||
|
contentType: "application/vnd.google.protobuf",
|
||||||
|
enableNativeHistogramsIngestion: true,
|
||||||
|
scrapeText: `name: "test_histogram"
|
||||||
|
help: "Test histogram."
|
||||||
|
type: HISTOGRAM
|
||||||
|
metric: <
|
||||||
|
histogram: <
|
||||||
|
sample_count: 175
|
||||||
|
sample_sum: 0.0008280461746287094
|
||||||
|
schema: 3
|
||||||
|
zero_threshold: 2.938735877055719e-39
|
||||||
|
zero_count: 2
|
||||||
|
negative_span: <
|
||||||
|
offset: -162
|
||||||
|
length: 1
|
||||||
|
>
|
||||||
|
negative_span: <
|
||||||
|
offset: 23
|
||||||
|
length: 4
|
||||||
|
>
|
||||||
|
negative_delta: 1
|
||||||
|
negative_delta: 3
|
||||||
|
negative_delta: -2
|
||||||
|
negative_delta: -1
|
||||||
|
negative_delta: 1
|
||||||
|
positive_span: <
|
||||||
|
offset: -161
|
||||||
|
length: 1
|
||||||
|
>
|
||||||
|
positive_span: <
|
||||||
|
offset: 8
|
||||||
|
length: 3
|
||||||
|
>
|
||||||
|
positive_delta: 1
|
||||||
|
positive_delta: 2
|
||||||
|
positive_delta: -1
|
||||||
|
positive_delta: -1
|
||||||
|
exemplars: <
|
||||||
|
label: <
|
||||||
|
name: "dummyID"
|
||||||
|
value: "59732"
|
||||||
|
>
|
||||||
|
value: -0.00039
|
||||||
|
timestamp: <
|
||||||
|
seconds: 1625851155
|
||||||
|
nanos: 146848499
|
||||||
|
>
|
||||||
|
>
|
||||||
|
exemplars: <
|
||||||
|
label: <
|
||||||
|
name: "dummyID"
|
||||||
|
value: "58242"
|
||||||
|
>
|
||||||
|
value: -0.00019
|
||||||
|
timestamp: <
|
||||||
|
seconds: 1625851055
|
||||||
|
nanos: 146848599
|
||||||
|
>
|
||||||
|
>
|
||||||
|
exemplars: <
|
||||||
|
label: <
|
||||||
|
name: "dummyID"
|
||||||
|
value: "5617"
|
||||||
|
>
|
||||||
|
value: -0.00029
|
||||||
|
>
|
||||||
|
>
|
||||||
|
timestamp_ms: 1234568
|
||||||
|
>
|
||||||
|
|
||||||
|
`,
|
||||||
|
histograms: []histogramSample{{
|
||||||
|
t: 1234568,
|
||||||
|
metric: labels.FromStrings("__name__", "test_histogram"),
|
||||||
|
h: &histogram.Histogram{
|
||||||
|
Count: 175,
|
||||||
|
ZeroCount: 2,
|
||||||
|
Sum: 0.0008280461746287094,
|
||||||
|
ZeroThreshold: 2.938735877055719e-39,
|
||||||
|
Schema: 3,
|
||||||
|
PositiveSpans: []histogram.Span{
|
||||||
|
{Offset: -161, Length: 1},
|
||||||
|
{Offset: 8, Length: 3},
|
||||||
|
},
|
||||||
|
NegativeSpans: []histogram.Span{
|
||||||
|
{Offset: -162, Length: 1},
|
||||||
|
{Offset: 23, Length: 4},
|
||||||
|
},
|
||||||
|
PositiveBuckets: []int64{1, 2, -1, -1},
|
||||||
|
NegativeBuckets: []int64{1, 3, -2, -1, 1},
|
||||||
|
},
|
||||||
|
}},
|
||||||
|
exemplars: []exemplar.Exemplar{
|
||||||
|
// Exemplars with missing timestamps are dropped for native histograms.
|
||||||
|
{Labels: labels.FromStrings("dummyID", "58242"), Value: -0.00019, Ts: 1625851055146, HasTs: true},
|
||||||
|
{Labels: labels.FromStrings("dummyID", "59732"), Value: -0.00039, Ts: 1625851155146, HasTs: true},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
title: "Native histogram with exemplars but ingestion disabled",
|
||||||
|
contentType: "application/vnd.google.protobuf",
|
||||||
|
enableNativeHistogramsIngestion: false,
|
||||||
|
scrapeText: `name: "test_histogram"
|
||||||
|
help: "Test histogram."
|
||||||
|
type: HISTOGRAM
|
||||||
|
metric: <
|
||||||
|
histogram: <
|
||||||
|
sample_count: 175
|
||||||
|
sample_sum: 0.0008280461746287094
|
||||||
|
schema: 3
|
||||||
|
zero_threshold: 2.938735877055719e-39
|
||||||
|
zero_count: 2
|
||||||
|
negative_span: <
|
||||||
|
offset: -162
|
||||||
|
length: 1
|
||||||
|
>
|
||||||
|
negative_span: <
|
||||||
|
offset: 23
|
||||||
|
length: 4
|
||||||
|
>
|
||||||
|
negative_delta: 1
|
||||||
|
negative_delta: 3
|
||||||
|
negative_delta: -2
|
||||||
|
negative_delta: -1
|
||||||
|
negative_delta: 1
|
||||||
|
positive_span: <
|
||||||
|
offset: -161
|
||||||
|
length: 1
|
||||||
|
>
|
||||||
|
positive_span: <
|
||||||
|
offset: 8
|
||||||
|
length: 3
|
||||||
|
>
|
||||||
|
positive_delta: 1
|
||||||
|
positive_delta: 2
|
||||||
|
positive_delta: -1
|
||||||
|
positive_delta: -1
|
||||||
|
exemplars: <
|
||||||
|
label: <
|
||||||
|
name: "dummyID"
|
||||||
|
value: "59732"
|
||||||
|
>
|
||||||
|
value: -0.00039
|
||||||
|
timestamp: <
|
||||||
|
seconds: 1625851155
|
||||||
|
nanos: 146848499
|
||||||
|
>
|
||||||
|
>
|
||||||
|
exemplars: <
|
||||||
|
label: <
|
||||||
|
name: "dummyID"
|
||||||
|
value: "58242"
|
||||||
|
>
|
||||||
|
value: -0.00019
|
||||||
|
timestamp: <
|
||||||
|
seconds: 1625851055
|
||||||
|
nanos: 146848599
|
||||||
|
>
|
||||||
|
>
|
||||||
|
exemplars: <
|
||||||
|
label: <
|
||||||
|
name: "dummyID"
|
||||||
|
value: "5617"
|
||||||
|
>
|
||||||
|
value: -0.00029
|
||||||
|
>
|
||||||
|
>
|
||||||
|
timestamp_ms: 1234568
|
||||||
|
>
|
||||||
|
|
||||||
|
`,
|
||||||
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
for _, test := range tests {
|
for _, test := range tests {
|
||||||
|
@ -491,12 +491,13 @@ func (w *Watcher) readSegment(r *LiveReader, segmentNum int, tail bool) error {
|
|||||||
metadata []record.RefMetadata
|
metadata []record.RefMetadata
|
||||||
)
|
)
|
||||||
for r.Next() && !isClosed(w.quit) {
|
for r.Next() && !isClosed(w.quit) {
|
||||||
|
var err error
|
||||||
rec := r.Record()
|
rec := r.Record()
|
||||||
w.recordsReadMetric.WithLabelValues(dec.Type(rec).String()).Inc()
|
w.recordsReadMetric.WithLabelValues(dec.Type(rec).String()).Inc()
|
||||||
|
|
||||||
switch dec.Type(rec) {
|
switch dec.Type(rec) {
|
||||||
case record.Series:
|
case record.Series:
|
||||||
series, err := dec.Series(rec, series[:0])
|
series, err = dec.Series(rec, series[:0])
|
||||||
if err != nil {
|
if err != nil {
|
||||||
w.recordDecodeFailsMetric.Inc()
|
w.recordDecodeFailsMetric.Inc()
|
||||||
return err
|
return err
|
||||||
@ -509,7 +510,7 @@ func (w *Watcher) readSegment(r *LiveReader, segmentNum int, tail bool) error {
|
|||||||
if !tail {
|
if !tail {
|
||||||
break
|
break
|
||||||
}
|
}
|
||||||
samples, err := dec.Samples(rec, samples[:0])
|
samples, err = dec.Samples(rec, samples[:0])
|
||||||
if err != nil {
|
if err != nil {
|
||||||
w.recordDecodeFailsMetric.Inc()
|
w.recordDecodeFailsMetric.Inc()
|
||||||
return err
|
return err
|
||||||
@ -539,7 +540,7 @@ func (w *Watcher) readSegment(r *LiveReader, segmentNum int, tail bool) error {
|
|||||||
if !tail {
|
if !tail {
|
||||||
break
|
break
|
||||||
}
|
}
|
||||||
exemplars, err := dec.Exemplars(rec, exemplars[:0])
|
exemplars, err = dec.Exemplars(rec, exemplars[:0])
|
||||||
if err != nil {
|
if err != nil {
|
||||||
w.recordDecodeFailsMetric.Inc()
|
w.recordDecodeFailsMetric.Inc()
|
||||||
return err
|
return err
|
||||||
@ -554,7 +555,7 @@ func (w *Watcher) readSegment(r *LiveReader, segmentNum int, tail bool) error {
|
|||||||
if !tail {
|
if !tail {
|
||||||
break
|
break
|
||||||
}
|
}
|
||||||
histograms, err := dec.HistogramSamples(rec, histograms[:0])
|
histograms, err = dec.HistogramSamples(rec, histograms[:0])
|
||||||
if err != nil {
|
if err != nil {
|
||||||
w.recordDecodeFailsMetric.Inc()
|
w.recordDecodeFailsMetric.Inc()
|
||||||
return err
|
return err
|
||||||
@ -582,7 +583,7 @@ func (w *Watcher) readSegment(r *LiveReader, segmentNum int, tail bool) error {
|
|||||||
if !tail {
|
if !tail {
|
||||||
break
|
break
|
||||||
}
|
}
|
||||||
floatHistograms, err := dec.FloatHistogramSamples(rec, floatHistograms[:0])
|
floatHistograms, err = dec.FloatHistogramSamples(rec, floatHistograms[:0])
|
||||||
if err != nil {
|
if err != nil {
|
||||||
w.recordDecodeFailsMetric.Inc()
|
w.recordDecodeFailsMetric.Inc()
|
||||||
return err
|
return err
|
||||||
@ -606,12 +607,12 @@ func (w *Watcher) readSegment(r *LiveReader, segmentNum int, tail bool) error {
|
|||||||
if !w.sendMetadata {
|
if !w.sendMetadata {
|
||||||
break
|
break
|
||||||
}
|
}
|
||||||
meta, err := dec.Metadata(rec, metadata[:0])
|
metadata, err = dec.Metadata(rec, metadata[:0])
|
||||||
if err != nil {
|
if err != nil {
|
||||||
w.recordDecodeFailsMetric.Inc()
|
w.recordDecodeFailsMetric.Inc()
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
w.writer.StoreMetadata(meta)
|
w.writer.StoreMetadata(metadata)
|
||||||
|
|
||||||
case record.Unknown:
|
case record.Unknown:
|
||||||
// Could be corruption, or reading from a WAL from a newer Prometheus.
|
// Could be corruption, or reading from a WAL from a newer Prometheus.
|
||||||
|
@ -1,7 +1,7 @@
|
|||||||
{
|
{
|
||||||
"name": "@prometheus-io/mantine-ui",
|
"name": "@prometheus-io/mantine-ui",
|
||||||
"private": true,
|
"private": true,
|
||||||
"version": "0.303.0-rc.0",
|
"version": "0.303.0-rc.1",
|
||||||
"type": "module",
|
"type": "module",
|
||||||
"scripts": {
|
"scripts": {
|
||||||
"start": "vite",
|
"start": "vite",
|
||||||
@ -28,7 +28,7 @@
|
|||||||
"@microsoft/fetch-event-source": "^2.0.1",
|
"@microsoft/fetch-event-source": "^2.0.1",
|
||||||
"@nexucis/fuzzy": "^0.5.1",
|
"@nexucis/fuzzy": "^0.5.1",
|
||||||
"@nexucis/kvsearch": "^0.9.1",
|
"@nexucis/kvsearch": "^0.9.1",
|
||||||
"@prometheus-io/codemirror-promql": "0.303.0-rc.0",
|
"@prometheus-io/codemirror-promql": "0.303.0-rc.1",
|
||||||
"@reduxjs/toolkit": "^2.5.0",
|
"@reduxjs/toolkit": "^2.5.0",
|
||||||
"@tabler/icons-react": "^3.28.1",
|
"@tabler/icons-react": "^3.28.1",
|
||||||
"@tanstack/react-query": "^5.67.1",
|
"@tanstack/react-query": "^5.67.1",
|
||||||
|
@ -85,7 +85,7 @@ const RuleDefinition: FC<{ rule: Rule }> = ({ rule }) => {
|
|||||||
styles={{ label: { textTransform: "none" } }}
|
styles={{ label: { textTransform: "none" } }}
|
||||||
leftSection={<IconClockPlay style={badgeIconStyle} />}
|
leftSection={<IconClockPlay style={badgeIconStyle} />}
|
||||||
>
|
>
|
||||||
keep_firing_for: {formatPrometheusDuration(rule.duration * 1000)}
|
keep_firing_for: {formatPrometheusDuration(rule.keepFiringFor * 1000)}
|
||||||
</Badge>
|
</Badge>
|
||||||
)}
|
)}
|
||||||
</Group>
|
</Group>
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
{
|
{
|
||||||
"name": "@prometheus-io/codemirror-promql",
|
"name": "@prometheus-io/codemirror-promql",
|
||||||
"version": "0.303.0-rc.0",
|
"version": "0.303.0-rc.1",
|
||||||
"description": "a CodeMirror mode for the PromQL language",
|
"description": "a CodeMirror mode for the PromQL language",
|
||||||
"types": "dist/esm/index.d.ts",
|
"types": "dist/esm/index.d.ts",
|
||||||
"module": "dist/esm/index.js",
|
"module": "dist/esm/index.js",
|
||||||
@ -29,7 +29,7 @@
|
|||||||
},
|
},
|
||||||
"homepage": "https://github.com/prometheus/prometheus/blob/main/web/ui/module/codemirror-promql/README.md",
|
"homepage": "https://github.com/prometheus/prometheus/blob/main/web/ui/module/codemirror-promql/README.md",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@prometheus-io/lezer-promql": "0.303.0-rc.0",
|
"@prometheus-io/lezer-promql": "0.303.0-rc.1",
|
||||||
"lru-cache": "^11.0.2"
|
"lru-cache": "^11.0.2"
|
||||||
},
|
},
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
{
|
{
|
||||||
"name": "@prometheus-io/lezer-promql",
|
"name": "@prometheus-io/lezer-promql",
|
||||||
"version": "0.303.0-rc.0",
|
"version": "0.303.0-rc.1",
|
||||||
"description": "lezer-based PromQL grammar",
|
"description": "lezer-based PromQL grammar",
|
||||||
"main": "dist/index.cjs",
|
"main": "dist/index.cjs",
|
||||||
"type": "module",
|
"type": "module",
|
||||||
|
14
web/ui/package-lock.json
generated
14
web/ui/package-lock.json
generated
@ -1,12 +1,12 @@
|
|||||||
{
|
{
|
||||||
"name": "prometheus-io",
|
"name": "prometheus-io",
|
||||||
"version": "0.303.0-rc.0",
|
"version": "0.303.0-rc.1",
|
||||||
"lockfileVersion": 3,
|
"lockfileVersion": 3,
|
||||||
"requires": true,
|
"requires": true,
|
||||||
"packages": {
|
"packages": {
|
||||||
"": {
|
"": {
|
||||||
"name": "prometheus-io",
|
"name": "prometheus-io",
|
||||||
"version": "0.303.0-rc.0",
|
"version": "0.303.0-rc.1",
|
||||||
"workspaces": [
|
"workspaces": [
|
||||||
"mantine-ui",
|
"mantine-ui",
|
||||||
"module/*"
|
"module/*"
|
||||||
@ -24,7 +24,7 @@
|
|||||||
},
|
},
|
||||||
"mantine-ui": {
|
"mantine-ui": {
|
||||||
"name": "@prometheus-io/mantine-ui",
|
"name": "@prometheus-io/mantine-ui",
|
||||||
"version": "0.303.0-rc.0",
|
"version": "0.303.0-rc.1",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@codemirror/autocomplete": "^6.18.4",
|
"@codemirror/autocomplete": "^6.18.4",
|
||||||
"@codemirror/language": "^6.10.8",
|
"@codemirror/language": "^6.10.8",
|
||||||
@ -42,7 +42,7 @@
|
|||||||
"@microsoft/fetch-event-source": "^2.0.1",
|
"@microsoft/fetch-event-source": "^2.0.1",
|
||||||
"@nexucis/fuzzy": "^0.5.1",
|
"@nexucis/fuzzy": "^0.5.1",
|
||||||
"@nexucis/kvsearch": "^0.9.1",
|
"@nexucis/kvsearch": "^0.9.1",
|
||||||
"@prometheus-io/codemirror-promql": "0.303.0-rc.0",
|
"@prometheus-io/codemirror-promql": "0.303.0-rc.1",
|
||||||
"@reduxjs/toolkit": "^2.5.0",
|
"@reduxjs/toolkit": "^2.5.0",
|
||||||
"@tabler/icons-react": "^3.28.1",
|
"@tabler/icons-react": "^3.28.1",
|
||||||
"@tanstack/react-query": "^5.67.1",
|
"@tanstack/react-query": "^5.67.1",
|
||||||
@ -156,10 +156,10 @@
|
|||||||
},
|
},
|
||||||
"module/codemirror-promql": {
|
"module/codemirror-promql": {
|
||||||
"name": "@prometheus-io/codemirror-promql",
|
"name": "@prometheus-io/codemirror-promql",
|
||||||
"version": "0.303.0-rc.0",
|
"version": "0.303.0-rc.1",
|
||||||
"license": "Apache-2.0",
|
"license": "Apache-2.0",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@prometheus-io/lezer-promql": "0.303.0-rc.0",
|
"@prometheus-io/lezer-promql": "0.303.0-rc.1",
|
||||||
"lru-cache": "^11.0.2"
|
"lru-cache": "^11.0.2"
|
||||||
},
|
},
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
@ -189,7 +189,7 @@
|
|||||||
},
|
},
|
||||||
"module/lezer-promql": {
|
"module/lezer-promql": {
|
||||||
"name": "@prometheus-io/lezer-promql",
|
"name": "@prometheus-io/lezer-promql",
|
||||||
"version": "0.303.0-rc.0",
|
"version": "0.303.0-rc.1",
|
||||||
"license": "Apache-2.0",
|
"license": "Apache-2.0",
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"@lezer/generator": "^1.7.2",
|
"@lezer/generator": "^1.7.2",
|
||||||
|
@ -1,7 +1,7 @@
|
|||||||
{
|
{
|
||||||
"name": "prometheus-io",
|
"name": "prometheus-io",
|
||||||
"description": "Monorepo for the Prometheus UI",
|
"description": "Monorepo for the Prometheus UI",
|
||||||
"version": "0.303.0-rc.0",
|
"version": "0.303.0-rc.1",
|
||||||
"private": true,
|
"private": true,
|
||||||
"scripts": {
|
"scripts": {
|
||||||
"build": "bash build_ui.sh --all",
|
"build": "bash build_ui.sh --all",
|
||||||
|
Loading…
Reference in New Issue
Block a user