From c92673fb1480cd6e3404a004522cbb851be4115d Mon Sep 17 00:00:00 2001 From: Nicholas Blott Date: Thu, 16 Dec 2021 12:28:46 +0000 Subject: [PATCH] Remove check against cfg so interval/ timeout are always set (#10023) Signed-off-by: Nicholas Blott --- cmd/promtool/sd_test.go | 11 +++++++---- scrape/target.go | 34 ++++++++++++++-------------------- scrape/target_test.go | 6 +++++- 3 files changed, 26 insertions(+), 25 deletions(-) diff --git a/cmd/promtool/sd_test.go b/cmd/promtool/sd_test.go index 7f80437cf..5fa97bf8a 100644 --- a/cmd/promtool/sd_test.go +++ b/cmd/promtool/sd_test.go @@ -15,6 +15,7 @@ package main import ( "testing" + "time" "github.com/prometheus/common/model" @@ -37,6 +38,8 @@ func TestSDCheckResult(t *testing.T) { require.Nil(t, err) scrapeConfig := &config.ScrapeConfig{ + ScrapeInterval: model.Duration(1 * time.Minute), + ScrapeTimeout: model.Duration(10 * time.Second), RelabelConfigs: []*relabel.Config{{ SourceLabels: model.LabelNames{"foo"}, Action: relabel.Replace, @@ -50,14 +53,14 @@ func TestSDCheckResult(t *testing.T) { { DiscoveredLabels: labels.Labels{ labels.Label{Name: "__address__", Value: "localhost:8080"}, - labels.Label{Name: "__scrape_interval__", Value: "0s"}, - labels.Label{Name: "__scrape_timeout__", Value: "0s"}, + labels.Label{Name: "__scrape_interval__", Value: "1m"}, + labels.Label{Name: "__scrape_timeout__", Value: "10s"}, labels.Label{Name: "foo", Value: "bar"}, }, Labels: labels.Labels{ labels.Label{Name: "__address__", Value: "localhost:8080"}, - labels.Label{Name: "__scrape_interval__", Value: "0s"}, - labels.Label{Name: "__scrape_timeout__", Value: "0s"}, + labels.Label{Name: "__scrape_interval__", Value: "1m"}, + labels.Label{Name: "__scrape_timeout__", Value: "10s"}, labels.Label{Name: "foo", Value: "bar"}, labels.Label{Name: "instance", Value: "localhost:8080"}, labels.Label{Name: "newfoo", Value: "bar"}, diff --git a/scrape/target.go b/scrape/target.go index 59d6c9403..e017a4584 100644 --- a/scrape/target.go +++ b/scrape/target.go @@ -418,28 +418,22 @@ func PopulateLabels(lset labels.Labels, cfg *config.ScrapeConfig) (res, orig lab return nil, nil, err } - var interval string - var intervalDuration model.Duration - if interval = lset.Get(model.ScrapeIntervalLabel); interval != cfg.ScrapeInterval.String() { - intervalDuration, err = model.ParseDuration(interval) - if err != nil { - return nil, nil, errors.Errorf("error parsing scrape interval: %v", err) - } - if time.Duration(intervalDuration) == 0 { - return nil, nil, errors.New("scrape interval cannot be 0") - } + interval := lset.Get(model.ScrapeIntervalLabel) + intervalDuration, err := model.ParseDuration(interval) + if err != nil { + return nil, nil, errors.Errorf("error parsing scrape interval: %v", err) + } + if time.Duration(intervalDuration) == 0 { + return nil, nil, errors.New("scrape interval cannot be 0") } - var timeout string - var timeoutDuration model.Duration - if timeout = lset.Get(model.ScrapeTimeoutLabel); timeout != cfg.ScrapeTimeout.String() { - timeoutDuration, err = model.ParseDuration(timeout) - if err != nil { - return nil, nil, errors.Errorf("error parsing scrape timeout: %v", err) - } - if time.Duration(timeoutDuration) == 0 { - return nil, nil, errors.New("scrape timeout cannot be 0") - } + timeout := lset.Get(model.ScrapeTimeoutLabel) + timeoutDuration, err := model.ParseDuration(timeout) + if err != nil { + return nil, nil, errors.Errorf("error parsing scrape timeout: %v", err) + } + if time.Duration(timeoutDuration) == 0 { + return nil, nil, errors.New("scrape timeout cannot be 0") } if timeoutDuration > intervalDuration { diff --git a/scrape/target_test.go b/scrape/target_test.go index 32ab4669e..0e02dd8f9 100644 --- a/scrape/target_test.go +++ b/scrape/target_test.go @@ -371,7 +371,11 @@ func TestNewClientWithBadTLSConfig(t *testing.T) { func TestTargetsFromGroup(t *testing.T) { expectedError := "instance 0 in group : no address" - targets, failures := TargetsFromGroup(&targetgroup.Group{Targets: []model.LabelSet{{}, {model.AddressLabel: "localhost:9090"}}}, &config.ScrapeConfig{}) + cfg := config.ScrapeConfig{ + ScrapeTimeout: model.Duration(10 * time.Second), + ScrapeInterval: model.Duration(1 * time.Minute), + } + targets, failures := TargetsFromGroup(&targetgroup.Group{Targets: []model.LabelSet{{}, {model.AddressLabel: "localhost:9090"}}}, &cfg) if len(targets) != 1 { t.Fatalf("Expected 1 target, got %v", len(targets)) }