Merge pull request #75 from Gouthamve/head-gen

E2E test for headBlock
This commit is contained in:
Fabian Reinartz 2017-05-05 18:56:53 +02:00 committed by GitHub
commit 09cd2021de
4 changed files with 237 additions and 12 deletions

View File

@ -296,6 +296,10 @@ type refdSample struct {
}
func (a *headAppender) Add(lset labels.Labels, t int64, v float64) (uint64, error) {
if !a.inBounds(t) {
return 0, ErrOutOfBounds
}
hash := lset.Hash()
if ms := a.get(hash, lset); ms != nil {

View File

@ -16,7 +16,9 @@ package tsdb
import (
"io/ioutil"
"math"
"math/rand"
"os"
"sort"
"testing"
"unsafe"
@ -185,3 +187,202 @@ func TestSkippingInvalidValuesInSameTxn(t *testing.T) {
}, ssMap)
require.NoError(t, q.Close())
}
func TestHeadBlock_e2e(t *testing.T) {
numDatapoints := 1000
numRanges := 1000
timeInterval := int64(3)
maxTime := int64(2 * 1000)
minTime := int64(200)
// Create 8 series with 1000 data-points of different ranges and run queries.
lbls := [][]labels.Label{
{
{"a", "b"},
{"instance", "localhost:9090"},
{"job", "prometheus"},
},
{
{"a", "b"},
{"instance", "127.0.0.1:9090"},
{"job", "prometheus"},
},
{
{"a", "b"},
{"instance", "127.0.0.1:9090"},
{"job", "prom-k8s"},
},
{
{"a", "b"},
{"instance", "localhost:9090"},
{"job", "prom-k8s"},
},
{
{"a", "c"},
{"instance", "localhost:9090"},
{"job", "prometheus"},
},
{
{"a", "c"},
{"instance", "127.0.0.1:9090"},
{"job", "prometheus"},
},
{
{"a", "c"},
{"instance", "127.0.0.1:9090"},
{"job", "prom-k8s"},
},
{
{"a", "c"},
{"instance", "localhost:9090"},
{"job", "prom-k8s"},
},
}
seriesMap := map[string][]sample{}
for _, l := range lbls {
seriesMap[labels.New(l...).String()] = []sample{}
}
tmpdir, _ := ioutil.TempDir("", "test")
defer os.RemoveAll(tmpdir)
hb, err := createHeadBlock(tmpdir+"/hb", 0, nil, minTime, maxTime)
require.NoError(t, err)
app := hb.Appender()
for _, l := range lbls {
ls := labels.New(l...)
series := []sample{}
ts := rand.Int63n(300)
for i := 0; i < numDatapoints; i++ {
v := rand.Float64()
if ts >= minTime && ts <= maxTime {
series = append(series, sample{ts, v})
}
_, err := app.Add(ls, ts, v)
if ts >= minTime && ts <= maxTime {
require.NoError(t, err)
} else {
require.Error(t, ErrOutOfBounds, err)
}
ts += rand.Int63n(timeInterval) + 1
}
seriesMap[labels.New(l...).String()] = series
}
require.NoError(t, app.Commit())
// Query each selector on 1000 random time-ranges.
queries := []struct {
ms []labels.Matcher
}{
{
ms: []labels.Matcher{labels.NewEqualMatcher("a", "b")},
},
{
ms: []labels.Matcher{
labels.NewEqualMatcher("a", "b"),
labels.NewEqualMatcher("job", "prom-k8s"),
},
},
{
ms: []labels.Matcher{
labels.NewEqualMatcher("a", "c"),
labels.NewEqualMatcher("instance", "localhost:9090"),
labels.NewEqualMatcher("job", "prometheus"),
},
},
// TODO: Add Regexp Matchers.
}
for _, qry := range queries {
matched := labels.Slice{}
for _, ls := range lbls {
s := labels.Selector(qry.ms)
if s.Matches(ls) {
matched = append(matched, ls)
}
}
sort.Sort(matched)
for i := 0; i < numRanges; i++ {
mint := rand.Int63n(300)
maxt := mint + rand.Int63n(timeInterval*int64(numDatapoints))
q := hb.Querier(mint, maxt)
ss := q.Select(qry.ms...)
// Build the mockSeriesSet.
matchedSeries := make([]Series, 0, len(matched))
for _, m := range matched {
smpls := boundedSamples(seriesMap[m.String()], mint, maxt)
// Only append those series for which samples exist as mockSeriesSet
// doesn't skip series with no samples.
// TODO: But sometimes SeriesSet returns an empty SeriesIterator
if len(smpls) > 0 {
matchedSeries = append(matchedSeries, newSeries(
m.Map(),
smpls,
))
}
}
expSs := newListSeriesSet(matchedSeries)
// Compare both SeriesSets.
for {
eok, rok := expSs.Next(), ss.Next()
// Skip a series if iterator is empty.
if rok {
for !ss.At().Iterator().Next() {
rok = ss.Next()
if !rok {
break
}
}
}
require.Equal(t, eok, rok, "next")
if !eok {
break
}
sexp := expSs.At()
sres := ss.At()
require.Equal(t, sexp.Labels(), sres.Labels(), "labels")
smplExp, errExp := expandSeriesIterator(sexp.Iterator())
smplRes, errRes := expandSeriesIterator(sres.Iterator())
require.Equal(t, errExp, errRes, "samples error")
require.Equal(t, smplExp, smplRes, "samples")
}
}
}
return
}
func boundedSamples(full []sample, mint, maxt int64) []sample {
for len(full) > 0 {
if full[0].t >= mint {
break
}
full = full[1:]
}
for i, s := range full {
// Terminate on the first sample larger than maxt.
if s.t > maxt {
return full[:i]
}
}
// maxt is after highest sample.
return full
}

View File

@ -413,14 +413,17 @@ func (s *populatedChunkSeries) Next() bool {
for s.set.Next() {
lset, chks := s.set.At()
from := -1
for i, c := range chks {
if c.MaxTime < s.mint {
from = i
continue
for len(chks) > 0 {
if chks[0].MaxTime >= s.mint {
break
}
chks = chks[1:]
}
// Break out at the first chunk that has no overlap with mint, maxt.
for i, c := range chks {
if c.MinTime > s.maxt {
chks = chks[from+1 : i]
chks = chks[:i]
break
}
c.Chunk, s.err = s.chunks.Chunk(c.Ref)
@ -428,6 +431,7 @@ func (s *populatedChunkSeries) Next() bool {
return false
}
}
if len(chks) == 0 {
continue
}

View File

@ -41,6 +41,12 @@ type mockSeries struct {
iterator func() SeriesIterator
}
func newSeries(l map[string]string, s []sample) Series {
return &mockSeries{
labels: func() labels.Labels { return labels.FromMap(l) },
iterator: func() SeriesIterator { return newListSeriesIterator(s) },
}
}
func (m *mockSeries) Labels() labels.Labels { return m.labels() }
func (m *mockSeries) Iterator() SeriesIterator { return m.iterator() }
@ -81,12 +87,6 @@ func (it *listSeriesIterator) Err() error {
}
func TestMergedSeriesSet(t *testing.T) {
newSeries := func(l map[string]string, s []sample) Series {
return &mockSeries{
labels: func() labels.Labels { return labels.FromMap(l) },
iterator: func() SeriesIterator { return newListSeriesIterator(s) },
}
}
cases := []struct {
// The input sets in order (samples in series in b are strictly
@ -885,6 +885,22 @@ func TestPopulatedCSReturnsValidChunkSlice(t *testing.T) {
p.maxt = 9
require.False(t, p.Next())
// Test the case where 1 chunk could cause an unpopulated chunk to be returned.
chunkMetas = [][]*ChunkMeta{
{
{MinTime: 1, MaxTime: 2, Ref: 1},
},
}
m = &mockChunkSeriesSet{l: lbls, cm: chunkMetas, i: -1}
p = &populatedChunkSeries{
set: m,
chunks: cr,
mint: 10,
maxt: 15,
}
require.False(t, p.Next())
return
}