From 68b1bcc6488ba0f4fd30b143d05496c4954998f9 Mon Sep 17 00:00:00 2001 From: beorn7 Date: Tue, 10 Jun 2025 22:06:04 +0200 Subject: [PATCH] promql: typo fix in comment Signed-off-by: beorn7 --- promql/functions.go | 2 +- promql/promqltest/testdata/functions.test | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/promql/functions.go b/promql/functions.go index 7e5205844a..4eb6dfb65e 100644 --- a/promql/functions.go +++ b/promql/functions.go @@ -682,7 +682,7 @@ func funcAvgOverTime(vals []parser.Value, args parser.Expressions, enh *EvalNode // https://stackoverflow.com/questions/61665473/is-it-beneficial-for-precision-to-calculate-the-incremental-mean-average // Additional note: For even better numerical accuracy, we would need to // process the values in a particular order. For avg_over_time, that - // would be more or less feasible, but it would be more expensivo, and + // would be more or less feasible, but it would be more expensive, and // it would also be much harder for the avg aggregator, given how the // PromQL engine works. if len(firstSeries.Floats) == 0 { diff --git a/promql/promqltest/testdata/functions.test b/promql/promqltest/testdata/functions.test index c39b387f0c..1d7b7ed2d4 100644 --- a/promql/promqltest/testdata/functions.test +++ b/promql/promqltest/testdata/functions.test @@ -1086,8 +1086,8 @@ load 5s # relevant this scenario is. # eval instant at 55s avg_over_time(metric11[1m]) # {} -44.848083237000004 <- This is the correct value. -# {} -1.881783551706252e+203 <- This is the relust on linux/amd64. -# {} 2.303079268822384e+202 <- This is the relust on darwin/arm64. +# {} -1.881783551706252e+203 <- This is the result on linux/amd64. +# {} 2.303079268822384e+202 <- This is the result on darwin/arm64. # Test per-series aggregation on dense samples. clear