Merge branch 'beorn7/storage2' into beorn7/storage3

This commit is contained in:
beorn7 2016-03-02 16:11:25 +01:00
commit b6840997a7
4 changed files with 12 additions and 10 deletions

View File

@ -136,8 +136,10 @@ func (a *Analyzer) Prepare(ctx context.Context) (local.Preloader, error) {
}()
// Preload all analyzed ranges.
iters := map[model.Fingerprint]local.SeriesIterator{}
iters := map[time.Duration]map[model.Fingerprint]local.SeriesIterator{}
for offset, pt := range a.offsetPreloadTimes {
itersForDuration := map[model.Fingerprint]local.SeriesIterator{}
iters[offset] = itersForDuration
start := a.Start.Add(-offset)
end := a.End.Add(-offset)
for fp, rangeDuration := range pt.ranges {
@ -148,7 +150,7 @@ func (a *Analyzer) Prepare(ctx context.Context) (local.Preloader, error) {
if err != nil {
return nil, err
}
iters[fp] = iter
itersForDuration[fp] = iter
}
for fp := range pt.instants {
if err = contextDone(ctx, env); err != nil {
@ -158,7 +160,7 @@ func (a *Analyzer) Prepare(ctx context.Context) (local.Preloader, error) {
if err != nil {
return nil, err
}
iters[fp] = iter
itersForDuration[fp] = iter
}
}
@ -167,11 +169,11 @@ func (a *Analyzer) Prepare(ctx context.Context) (local.Preloader, error) {
switch n := node.(type) {
case *VectorSelector:
for fp := range n.metrics {
n.iterators[fp] = iters[fp]
n.iterators[fp] = iters[n.Offset][fp]
}
case *MatrixSelector:
for fp := range n.metrics {
n.iterators[fp] = iters[fp]
n.iterators[fp] = iters[n.Offset][fp]
}
}
return true

View File

@ -54,7 +54,7 @@ const (
)
// chunkDesc contains meta-data for a chunk. Pay special attention to the
// documented requirements for calling its method concurrently (WRT pinning and
// documented requirements for calling its methods concurrently (WRT pinning and
// locking). The doc comments spell out the requirements for each method, but
// here is an overview and general explanation:
//
@ -71,7 +71,7 @@ const (
// or creation) or by locking the fingerprint of the series the chunkDesc
// belongs to. The affected methods are: add, maybePopulateLastTime, setChunk.
//
// Finally, there is the special cases firstTime and lastTime. lastTime requires
// Finally, there are the special cases firstTime and lastTime. lastTime requires
// to have locked the fingerprint of the series but the chunk does not need to
// be pinned. That's because the chunkLastTime field in chunkDesc gets populated
// upon completion of the chunk (when it is still pinned, and which happens
@ -267,7 +267,7 @@ type chunkIterator interface {
// Gets the last sample value in the chunk.
lastSampleValue() model.SampleValue
// Gets the value that is closest before the given time. In case a value
// exist at precisely the given time, that value is returned. If no
// exists at precisely the given time, that value is returned. If no
// applicable value exists, ZeroSamplePair is returned.
valueAtOrBeforeTime(model.Time) model.SamplePair
// Gets all values contained within a given interval.

View File

@ -76,7 +76,7 @@ type Storage interface {
// of the series prior the modification.
type SeriesIterator interface {
// Gets the value that is closest before the given time. In case a value
// exist at precisely the given time, that value is returned. If no
// exists at precisely the given time, that value is returned. If no
// applicable value exists, ZeroSamplePair is returned.
ValueAtOrBeforeTime(model.Time) model.SamplePair
// Gets all values contained within a given interval.

View File

@ -45,7 +45,7 @@ func (p *memorySeriesPreloader) PreloadInstant(
) (SeriesIterator, error) {
cds, iter, err := p.storage.preloadChunksForRange(fp, timestamp.Add(-stalenessDelta), timestamp, true)
if err != nil {
return iter, err
return nil, err
}
p.pinnedChunkDescs = append(p.pinnedChunkDescs, cds...)
return iter, nil