Fix rate() per-second adjustment.

This got broken during the depointerization of the Vector type.
This commit is contained in:
Julius Volz 2013-04-15 14:39:05 +02:00
parent 62f33f1fc2
commit 1cff4f3d91
2 changed files with 8 additions and 2 deletions

View File

@ -137,8 +137,8 @@ func rateImpl(timestamp time.Time, view *viewAdapter, args []Node) interface{} {
// MatrixLiteral exists). Find a better way of getting the duration of a
// matrix, such as looking at the samples themselves.
interval := args[0].(*MatrixLiteral).interval
for _, sample := range vector {
sample.Value /= model.SampleValue(interval / time.Second)
for i, _ := range vector {
vector[i].Value /= model.SampleValue(interval / time.Second)
}
return vector
}

View File

@ -232,6 +232,12 @@ var expressionTests = []struct {
output: []string{"http_requests{group='canary',instance='1',job='app-server'} => 288 @[%v]"},
fullRanges: 1,
intervalRanges: 0,
}, {
// Rates should transform per-interval deltas to per-second rates.
expr: "rate(http_requests{group='canary',instance='1',job='app-server'}[10m])",
output: []string{"http_requests{group='canary',instance='1',job='app-server'} => 0.26666668 @[%v]"},
fullRanges: 1,
intervalRanges: 0,
}, {
// Empty expressions shouldn't parse.
expr: "",