Remove check against cfg so interval/ timeout are always set (#10023) (#10031)

Signed-off-by: Nicholas Blott <blottn@tcd.ie>

Co-authored-by: Nicholas Blott <blottn@tcd.ie>
This commit is contained in:
Julien Pivotto 2021-12-16 16:46:14 +01:00 committed by GitHub
parent 565ee356df
commit 67a64ee092
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
3 changed files with 26 additions and 25 deletions

View File

@ -15,6 +15,7 @@ package main
import (
"testing"
"time"
"github.com/prometheus/common/model"
@ -37,6 +38,8 @@ func TestSDCheckResult(t *testing.T) {
require.Nil(t, err)
scrapeConfig := &config.ScrapeConfig{
ScrapeInterval: model.Duration(1 * time.Minute),
ScrapeTimeout: model.Duration(10 * time.Second),
RelabelConfigs: []*relabel.Config{{
SourceLabels: model.LabelNames{"foo"},
Action: relabel.Replace,
@ -50,14 +53,14 @@ func TestSDCheckResult(t *testing.T) {
{
DiscoveredLabels: labels.Labels{
labels.Label{Name: "__address__", Value: "localhost:8080"},
labels.Label{Name: "__scrape_interval__", Value: "0s"},
labels.Label{Name: "__scrape_timeout__", Value: "0s"},
labels.Label{Name: "__scrape_interval__", Value: "1m"},
labels.Label{Name: "__scrape_timeout__", Value: "10s"},
labels.Label{Name: "foo", Value: "bar"},
},
Labels: labels.Labels{
labels.Label{Name: "__address__", Value: "localhost:8080"},
labels.Label{Name: "__scrape_interval__", Value: "0s"},
labels.Label{Name: "__scrape_timeout__", Value: "0s"},
labels.Label{Name: "__scrape_interval__", Value: "1m"},
labels.Label{Name: "__scrape_timeout__", Value: "10s"},
labels.Label{Name: "foo", Value: "bar"},
labels.Label{Name: "instance", Value: "localhost:8080"},
labels.Label{Name: "newfoo", Value: "bar"},

View File

@ -418,28 +418,22 @@ func PopulateLabels(lset labels.Labels, cfg *config.ScrapeConfig) (res, orig lab
return nil, nil, err
}
var interval string
var intervalDuration model.Duration
if interval = lset.Get(model.ScrapeIntervalLabel); interval != cfg.ScrapeInterval.String() {
intervalDuration, err = model.ParseDuration(interval)
if err != nil {
return nil, nil, errors.Errorf("error parsing scrape interval: %v", err)
}
if time.Duration(intervalDuration) == 0 {
return nil, nil, errors.New("scrape interval cannot be 0")
}
interval := lset.Get(model.ScrapeIntervalLabel)
intervalDuration, err := model.ParseDuration(interval)
if err != nil {
return nil, nil, errors.Errorf("error parsing scrape interval: %v", err)
}
if time.Duration(intervalDuration) == 0 {
return nil, nil, errors.New("scrape interval cannot be 0")
}
var timeout string
var timeoutDuration model.Duration
if timeout = lset.Get(model.ScrapeTimeoutLabel); timeout != cfg.ScrapeTimeout.String() {
timeoutDuration, err = model.ParseDuration(timeout)
if err != nil {
return nil, nil, errors.Errorf("error parsing scrape timeout: %v", err)
}
if time.Duration(timeoutDuration) == 0 {
return nil, nil, errors.New("scrape timeout cannot be 0")
}
timeout := lset.Get(model.ScrapeTimeoutLabel)
timeoutDuration, err := model.ParseDuration(timeout)
if err != nil {
return nil, nil, errors.Errorf("error parsing scrape timeout: %v", err)
}
if time.Duration(timeoutDuration) == 0 {
return nil, nil, errors.New("scrape timeout cannot be 0")
}
if timeoutDuration > intervalDuration {

View File

@ -371,7 +371,11 @@ func TestNewClientWithBadTLSConfig(t *testing.T) {
func TestTargetsFromGroup(t *testing.T) {
expectedError := "instance 0 in group : no address"
targets, failures := TargetsFromGroup(&targetgroup.Group{Targets: []model.LabelSet{{}, {model.AddressLabel: "localhost:9090"}}}, &config.ScrapeConfig{})
cfg := config.ScrapeConfig{
ScrapeTimeout: model.Duration(10 * time.Second),
ScrapeInterval: model.Duration(1 * time.Minute),
}
targets, failures := TargetsFromGroup(&targetgroup.Group{Targets: []model.LabelSet{{}, {model.AddressLabel: "localhost:9090"}}}, &cfg)
if len(targets) != 1 {
t.Fatalf("Expected 1 target, got %v", len(targets))
}