Check postings bytes length when decoding (#9766)
Added validation to expected postings length compared to the bytes slice length. With 32bit postings, we expect to have 4 bytes per each posting. If the number doesn't add up, we know that the input data is not compatible with our code (maybe it's cut, or padded with trash, or even written in a different coded). This is needed in downstream projects to correctly identify cached postings written with an unknown codec, but it's also a good idea to validate it here. Signed-off-by: Oleg Zaytsev <mail@olegzaytsev.com>
This commit is contained in:
parent
d9a8c453a0
commit
5e746e4e88
|
@ -18,6 +18,7 @@ import (
|
|||
"bytes"
|
||||
"context"
|
||||
"encoding/binary"
|
||||
"fmt"
|
||||
"hash"
|
||||
"hash/crc32"
|
||||
"io"
|
||||
|
@ -1782,7 +1783,13 @@ func (dec *Decoder) Postings(b []byte) (int, Postings, error) {
|
|||
d := encoding.Decbuf{B: b}
|
||||
n := d.Be32int()
|
||||
l := d.Get()
|
||||
return n, newBigEndianPostings(l), d.Err()
|
||||
if d.Err() != nil {
|
||||
return 0, nil, d.Err()
|
||||
}
|
||||
if len(l) != 4*n {
|
||||
return 0, nil, fmt.Errorf("unexpected postings length, should be %d bytes for %d postings, got %d bytes", 4*n, n, len(l))
|
||||
}
|
||||
return n, newBigEndianPostings(l), nil
|
||||
}
|
||||
|
||||
// LabelNamesOffsetsFor decodes the offsets of the name symbols for a given series.
|
||||
|
|
|
@ -560,3 +560,8 @@ func TestSymbols(t *testing.T) {
|
|||
}
|
||||
require.NoError(t, iter.Err())
|
||||
}
|
||||
|
||||
func TestDecoder_Postings_WrongInput(t *testing.T) {
|
||||
_, _, err := (&Decoder{}).Postings([]byte("the cake is a lie"))
|
||||
require.Error(t, err)
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue