Make scrape time unambigious.

This commit changes the scraper interface to accept a timestamp
so the reported timestamp by the caller and the timestamp
attached to samples does not differ.
This commit is contained in:
Fabian Reinartz 2016-02-25 13:58:46 +01:00
parent 2bb8ef99d1
commit 1a3253e8ed
4 changed files with 16 additions and 18 deletions

View File

@ -66,13 +66,6 @@ func init() {
prometheus.MustRegister(targetSkippedScrapes) prometheus.MustRegister(targetSkippedScrapes)
} }
// A scraper retrieves samples and accepts a status report at the end.
type scraper interface {
scrape(context.Context) (model.Samples, error)
report(start time.Time, dur time.Duration, err error)
offset(interval time.Duration) time.Duration
}
// scrapePool manages scrapes for sets of targets. // scrapePool manages scrapes for sets of targets.
type scrapePool struct { type scrapePool struct {
appender storage.SampleAppender appender storage.SampleAppender
@ -178,6 +171,13 @@ func (sp *scrapePool) sync(tgroups map[string]map[model.Fingerprint]*Target) {
sp.mtx.Unlock() sp.mtx.Unlock()
} }
// A scraper retrieves samples and accepts a status report at the end.
type scraper interface {
scrape(ctx context.Context, ts time.Time) (model.Samples, error)
report(start time.Time, dur time.Duration, err error)
offset(interval time.Duration) time.Duration
}
type loop interface { type loop interface {
run(interval, timeout time.Duration, errc chan<- error) run(interval, timeout time.Duration, errc chan<- error)
stop() stop()
@ -239,7 +239,7 @@ func (sl *scrapeLoop) run(interval, timeout time.Duration, errc chan<- error) {
float64(time.Since(last)) / float64(time.Second), // Sub-second precision. float64(time.Since(last)) / float64(time.Second), // Sub-second precision.
) )
samples, err := sl.scraper.scrape(scrapeCtx) samples, err := sl.scraper.scrape(scrapeCtx, start)
if err == nil { if err == nil {
sl.append(samples) sl.append(samples)
} else if errc != nil { } else if errc != nil {

View File

@ -63,7 +63,7 @@ func TestScrapeLoopRun(t *testing.T) {
scraper.offsetDur = 0 scraper.offsetDur = 0
block := make(chan struct{}) block := make(chan struct{})
scraper.scrapeFunc = func(ctx context.Context) (model.Samples, error) { scraper.scrapeFunc = func(ctx context.Context, ts time.Time) (model.Samples, error) {
select { select {
case <-block: case <-block:
case <-ctx.Done(): case <-ctx.Done():
@ -116,7 +116,7 @@ type testScraper struct {
samples model.Samples samples model.Samples
scrapeErr error scrapeErr error
scrapeFunc func(context.Context) (model.Samples, error) scrapeFunc func(context.Context, time.Time) (model.Samples, error)
} }
func (ts *testScraper) offset(interval time.Duration) time.Duration { func (ts *testScraper) offset(interval time.Duration) time.Duration {
@ -129,9 +129,9 @@ func (ts *testScraper) report(start time.Time, duration time.Duration, err error
ts.lastError = err ts.lastError = err
} }
func (ts *testScraper) scrape(ctx context.Context) (model.Samples, error) { func (ts *testScraper) scrape(ctx context.Context, t time.Time) (model.Samples, error) {
if ts.scrapeFunc != nil { if ts.scrapeFunc != nil {
return ts.scrapeFunc(ctx) return ts.scrapeFunc(ctx, t)
} }
return ts.samples, ts.scrapeErr return ts.samples, ts.scrapeErr
} }

View File

@ -340,13 +340,11 @@ func (t *Target) InstanceIdentifier() string {
const acceptHeader = `application/vnd.google.protobuf;proto=io.prometheus.client.MetricFamily;encoding=delimited;q=0.7,text/plain;version=0.0.4;q=0.3,application/json;schema="prometheus/telemetry";version=0.0.2;q=0.2,*/*;q=0.1` const acceptHeader = `application/vnd.google.protobuf;proto=io.prometheus.client.MetricFamily;encoding=delimited;q=0.7,text/plain;version=0.0.4;q=0.3,application/json;schema="prometheus/telemetry";version=0.0.2;q=0.2,*/*;q=0.1`
func (t *Target) scrape(ctx context.Context) (model.Samples, error) { func (t *Target) scrape(ctx context.Context, ts time.Time) (model.Samples, error) {
t.RLock() t.RLock()
client := t.httpClient client := t.httpClient
t.RUnlock() t.RUnlock()
start := time.Now()
req, err := http.NewRequest("GET", t.URL().String(), nil) req, err := http.NewRequest("GET", t.URL().String(), nil)
if err != nil { if err != nil {
return nil, err return nil, err
@ -370,7 +368,7 @@ func (t *Target) scrape(ctx context.Context) (model.Samples, error) {
sdec := expfmt.SampleDecoder{ sdec := expfmt.SampleDecoder{
Dec: expfmt.NewDecoder(resp.Body, expfmt.ResponseFormat(resp.Header)), Dec: expfmt.NewDecoder(resp.Body, expfmt.ResponseFormat(resp.Header)),
Opts: &expfmt.DecodeOptions{ Opts: &expfmt.DecodeOptions{
Timestamp: model.TimeFromUnixNano(start.UnixNano()), Timestamp: model.TimeFromUnixNano(ts.UnixNano()),
}, },
} }

View File

@ -181,7 +181,7 @@ func TestTargetScrape404(t *testing.T) {
testTarget := newTestTarget(server.URL, time.Second, model.LabelSet{}) testTarget := newTestTarget(server.URL, time.Second, model.LabelSet{})
want := errors.New("server returned HTTP status 404 Not Found") want := errors.New("server returned HTTP status 404 Not Found")
_, got := testTarget.scrape(context.Background()) _, got := testTarget.scrape(context.Background(), time.Now())
if got == nil || want.Error() != got.Error() { if got == nil || want.Error() != got.Error() {
t.Fatalf("want err %q, got %q", want, got) t.Fatalf("want err %q, got %q", want, got)
} }
@ -229,7 +229,7 @@ func TestURLParams(t *testing.T) {
if err != nil { if err != nil {
t.Fatal(err) t.Fatal(err)
} }
if _, err = target.scrape(context.Background()); err != nil { if _, err = target.scrape(context.Background(), time.Now()); err != nil {
t.Fatal(err) t.Fatal(err)
} }
} }