Make scrape time unambigious.
This commit changes the scraper interface to accept a timestamp so the reported timestamp by the caller and the timestamp attached to samples does not differ.
This commit is contained in:
parent
2bb8ef99d1
commit
1a3253e8ed
|
@ -66,13 +66,6 @@ func init() {
|
|||
prometheus.MustRegister(targetSkippedScrapes)
|
||||
}
|
||||
|
||||
// A scraper retrieves samples and accepts a status report at the end.
|
||||
type scraper interface {
|
||||
scrape(context.Context) (model.Samples, error)
|
||||
report(start time.Time, dur time.Duration, err error)
|
||||
offset(interval time.Duration) time.Duration
|
||||
}
|
||||
|
||||
// scrapePool manages scrapes for sets of targets.
|
||||
type scrapePool struct {
|
||||
appender storage.SampleAppender
|
||||
|
@ -178,6 +171,13 @@ func (sp *scrapePool) sync(tgroups map[string]map[model.Fingerprint]*Target) {
|
|||
sp.mtx.Unlock()
|
||||
}
|
||||
|
||||
// A scraper retrieves samples and accepts a status report at the end.
|
||||
type scraper interface {
|
||||
scrape(ctx context.Context, ts time.Time) (model.Samples, error)
|
||||
report(start time.Time, dur time.Duration, err error)
|
||||
offset(interval time.Duration) time.Duration
|
||||
}
|
||||
|
||||
type loop interface {
|
||||
run(interval, timeout time.Duration, errc chan<- error)
|
||||
stop()
|
||||
|
@ -239,7 +239,7 @@ func (sl *scrapeLoop) run(interval, timeout time.Duration, errc chan<- error) {
|
|||
float64(time.Since(last)) / float64(time.Second), // Sub-second precision.
|
||||
)
|
||||
|
||||
samples, err := sl.scraper.scrape(scrapeCtx)
|
||||
samples, err := sl.scraper.scrape(scrapeCtx, start)
|
||||
if err == nil {
|
||||
sl.append(samples)
|
||||
} else if errc != nil {
|
||||
|
|
|
@ -63,7 +63,7 @@ func TestScrapeLoopRun(t *testing.T) {
|
|||
scraper.offsetDur = 0
|
||||
|
||||
block := make(chan struct{})
|
||||
scraper.scrapeFunc = func(ctx context.Context) (model.Samples, error) {
|
||||
scraper.scrapeFunc = func(ctx context.Context, ts time.Time) (model.Samples, error) {
|
||||
select {
|
||||
case <-block:
|
||||
case <-ctx.Done():
|
||||
|
@ -116,7 +116,7 @@ type testScraper struct {
|
|||
|
||||
samples model.Samples
|
||||
scrapeErr error
|
||||
scrapeFunc func(context.Context) (model.Samples, error)
|
||||
scrapeFunc func(context.Context, time.Time) (model.Samples, error)
|
||||
}
|
||||
|
||||
func (ts *testScraper) offset(interval time.Duration) time.Duration {
|
||||
|
@ -129,9 +129,9 @@ func (ts *testScraper) report(start time.Time, duration time.Duration, err error
|
|||
ts.lastError = err
|
||||
}
|
||||
|
||||
func (ts *testScraper) scrape(ctx context.Context) (model.Samples, error) {
|
||||
func (ts *testScraper) scrape(ctx context.Context, t time.Time) (model.Samples, error) {
|
||||
if ts.scrapeFunc != nil {
|
||||
return ts.scrapeFunc(ctx)
|
||||
return ts.scrapeFunc(ctx, t)
|
||||
}
|
||||
return ts.samples, ts.scrapeErr
|
||||
}
|
||||
|
|
|
@ -340,13 +340,11 @@ func (t *Target) InstanceIdentifier() string {
|
|||
|
||||
const acceptHeader = `application/vnd.google.protobuf;proto=io.prometheus.client.MetricFamily;encoding=delimited;q=0.7,text/plain;version=0.0.4;q=0.3,application/json;schema="prometheus/telemetry";version=0.0.2;q=0.2,*/*;q=0.1`
|
||||
|
||||
func (t *Target) scrape(ctx context.Context) (model.Samples, error) {
|
||||
func (t *Target) scrape(ctx context.Context, ts time.Time) (model.Samples, error) {
|
||||
t.RLock()
|
||||
client := t.httpClient
|
||||
t.RUnlock()
|
||||
|
||||
start := time.Now()
|
||||
|
||||
req, err := http.NewRequest("GET", t.URL().String(), nil)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
|
@ -370,7 +368,7 @@ func (t *Target) scrape(ctx context.Context) (model.Samples, error) {
|
|||
sdec := expfmt.SampleDecoder{
|
||||
Dec: expfmt.NewDecoder(resp.Body, expfmt.ResponseFormat(resp.Header)),
|
||||
Opts: &expfmt.DecodeOptions{
|
||||
Timestamp: model.TimeFromUnixNano(start.UnixNano()),
|
||||
Timestamp: model.TimeFromUnixNano(ts.UnixNano()),
|
||||
},
|
||||
}
|
||||
|
||||
|
|
|
@ -181,7 +181,7 @@ func TestTargetScrape404(t *testing.T) {
|
|||
testTarget := newTestTarget(server.URL, time.Second, model.LabelSet{})
|
||||
|
||||
want := errors.New("server returned HTTP status 404 Not Found")
|
||||
_, got := testTarget.scrape(context.Background())
|
||||
_, got := testTarget.scrape(context.Background(), time.Now())
|
||||
if got == nil || want.Error() != got.Error() {
|
||||
t.Fatalf("want err %q, got %q", want, got)
|
||||
}
|
||||
|
@ -229,7 +229,7 @@ func TestURLParams(t *testing.T) {
|
|||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
if _, err = target.scrape(context.Background()); err != nil {
|
||||
if _, err = target.scrape(context.Background(), time.Now()); err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue