diff --git a/chunks_test.go b/chunks_test.go index 77b8bc0ba..a05cd6859 100644 --- a/chunks_test.go +++ b/chunks_test.go @@ -1,8 +1,7 @@ package tsdb import ( - "errors" - + "github.com/pkg/errors" "github.com/prometheus/tsdb/chunks" ) diff --git a/postings_test.go b/postings_test.go index 4801b486e..fc7290b2f 100644 --- a/postings_test.go +++ b/postings_test.go @@ -138,7 +138,7 @@ func TestMultiMerge(t *testing.T) { } } -func TestMerge(t *testing.T) { +func TestMergedPostings(t *testing.T) { var cases = []struct { a, b []uint32 res []uint32 @@ -169,72 +169,73 @@ func TestMerge(t *testing.T) { require.Equal(t, c.res, res) } - t.Run("Seek", func(t *testing.T) { - var cases = []struct { - a, b []uint32 +} - seek uint32 - success bool - res []uint32 - }{ - { - a: []uint32{1, 2, 3, 4, 5}, - b: []uint32{6, 7, 8, 9, 10}, +func TestMergedPostingsSeek(t *testing.T) { + var cases = []struct { + a, b []uint32 - seek: 0, - success: true, - res: []uint32{1, 2, 3, 4, 5, 6, 7, 8, 9, 10}, - }, - { - a: []uint32{1, 2, 3, 4, 5}, - b: []uint32{6, 7, 8, 9, 10}, + seek uint32 + success bool + res []uint32 + }{ + { + a: []uint32{1, 2, 3, 4, 5}, + b: []uint32{6, 7, 8, 9, 10}, - seek: 2, - success: true, - res: []uint32{2, 3, 4, 5, 6, 7, 8, 9, 10}, - }, - { - a: []uint32{1, 2, 3, 4, 5}, - b: []uint32{4, 5, 6, 7, 8}, + seek: 0, + success: true, + res: []uint32{1, 2, 3, 4, 5, 6, 7, 8, 9, 10}, + }, + { + a: []uint32{1, 2, 3, 4, 5}, + b: []uint32{6, 7, 8, 9, 10}, - seek: 9, - success: false, - res: nil, - }, - { - a: []uint32{1, 2, 3, 4, 9, 10}, - b: []uint32{1, 4, 5, 6, 7, 8, 10, 11}, + seek: 2, + success: true, + res: []uint32{2, 3, 4, 5, 6, 7, 8, 9, 10}, + }, + { + a: []uint32{1, 2, 3, 4, 5}, + b: []uint32{4, 5, 6, 7, 8}, - seek: 10, - success: true, - res: []uint32{10, 11}, - }, - } + seek: 9, + success: false, + res: nil, + }, + { + a: []uint32{1, 2, 3, 4, 9, 10}, + b: []uint32{1, 4, 5, 6, 7, 8, 10, 11}, - for _, c := range cases { - a := newListPostings(c.a) - b := newListPostings(c.b) + seek: 10, + success: true, + res: []uint32{10, 11}, + }, + } - p := newMergedPostings(a, b) + for _, c := range cases { + a := newListPostings(c.a) + b := newListPostings(c.b) - require.Equal(t, c.success, p.Seek(c.seek)) + p := newMergedPostings(a, b) - if c.success { - // check the current element and then proceed to check the rest. - i := 0 - require.Equal(t, c.res[i], p.At()) + require.Equal(t, c.success, p.Seek(c.seek)) - for p.Next() { - i++ - require.Equal(t, int(c.res[i]), int(p.At())) - } + if c.success { + // check the current element and then proceed to check the rest. + i := 0 + require.Equal(t, c.res[i], p.At()) - require.Equal(t, len(c.res)-1, i) + for p.Next() { + i++ + require.Equal(t, int(c.res[i]), int(p.At())) } - } - return - }) + require.Equal(t, len(c.res)-1, i) + } + } + + return } func TestBigEndian(t *testing.T) { diff --git a/querier_test.go b/querier_test.go index f4ade1af9..9ccc5b298 100644 --- a/querier_test.go +++ b/querier_test.go @@ -261,12 +261,26 @@ func TestBlockQuerier(t *testing.T) { } } - // Build the querier on data first. Then execute queries on it. - basedata := [][]struct { - lset map[string]string - chunks [][]sample + type query struct { + dataIdx int + + mint, maxt int64 + ms []labels.Matcher + exp SeriesSet + } + + cases := struct { + data []struct { + lset map[string]string + chunks [][]sample + } + + queries []query }{ - { + data: []struct { + lset map[string]string + chunks [][]sample + }{ { lset: map[string]string{ "a": "a", @@ -308,65 +322,58 @@ func TestBlockQuerier(t *testing.T) { }, }, }, - } - cases := []struct { - dataIdx int + queries: []query{ + { + dataIdx: 0, - mint, maxt int64 - ms []labels.Matcher - exp SeriesSet - }{ - { - dataIdx: 0, + mint: 0, + maxt: 0, + ms: []labels.Matcher{}, + exp: newListSeriesSet([]Series{}), + }, + { + dataIdx: 0, - mint: 0, - maxt: 0, - ms: []labels.Matcher{}, - exp: newListSeriesSet([]Series{}), - }, - { - dataIdx: 0, + mint: 0, + maxt: 0, + ms: []labels.Matcher{labels.NewEqualMatcher("a", "a")}, + exp: newListSeriesSet([]Series{}), + }, + { + dataIdx: 0, - mint: 0, - maxt: 0, - ms: []labels.Matcher{labels.NewEqualMatcher("a", "a")}, - exp: newListSeriesSet([]Series{}), - }, - { - dataIdx: 0, + mint: 1, + maxt: 0, + ms: []labels.Matcher{labels.NewEqualMatcher("a", "a")}, + exp: newListSeriesSet([]Series{}), + }, + { + dataIdx: 0, - mint: 1, - maxt: 0, - ms: []labels.Matcher{labels.NewEqualMatcher("a", "a")}, - exp: newListSeriesSet([]Series{}), - }, - { - dataIdx: 0, - - mint: 2, - maxt: 6, - ms: []labels.Matcher{labels.NewEqualMatcher("a", "a")}, - exp: newListSeriesSet([]Series{ - newSeries(map[string]string{ - "a": "a", - }, - []sample{{2, 3}, {3, 4}, {5, 2}, {6, 3}}, - ), - newSeries(map[string]string{ - "a": "a", - "b": "b", - }, - []sample{{2, 2}, {3, 3}, {5, 3}, {6, 6}}, - ), - }), + mint: 2, + maxt: 6, + ms: []labels.Matcher{labels.NewEqualMatcher("a", "a")}, + exp: newListSeriesSet([]Series{ + newSeries(map[string]string{ + "a": "a", + }, + []sample{{2, 3}, {3, 4}, {5, 2}, {6, 3}}, + ), + newSeries(map[string]string{ + "a": "a", + "b": "b", + }, + []sample{{2, 2}, {3, 3}, {5, 3}, {6, 6}}, + ), + }), + }, }, } Outer: - for _, c := range cases { - ir, cr := createIdxChkReaders(basedata[c.dataIdx]) - + for _, c := range cases.queries { + ir, cr := createIdxChkReaders(cases.data) querier := &blockQuerier{ index: ir, chunks: cr,