Consolidate Target.Update into constructor.
The Target.Update method is no longer needed.
This commit is contained in:
parent
d15adfc917
commit
da99366f85
|
@ -185,13 +185,20 @@ type Target struct {
|
|||
|
||||
// NewTarget creates a reasonably configured target for querying.
|
||||
func NewTarget(cfg *config.ScrapeConfig, labels, metaLabels model.LabelSet) (*Target, error) {
|
||||
client, err := newHTTPClient(cfg)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
t := &Target{
|
||||
status: &TargetStatus{},
|
||||
scraperStopping: make(chan struct{}),
|
||||
scraperStopped: make(chan struct{}),
|
||||
scrapeConfig: cfg,
|
||||
labels: labels,
|
||||
metaLabels: metaLabels,
|
||||
httpClient: client,
|
||||
}
|
||||
err := t.Update(cfg, labels, metaLabels)
|
||||
return t, err
|
||||
return t, nil
|
||||
}
|
||||
|
||||
// Status returns the status of the target.
|
||||
|
@ -199,28 +206,6 @@ func (t *Target) Status() *TargetStatus {
|
|||
return t.status
|
||||
}
|
||||
|
||||
// Update overwrites settings in the target that are derived from the job config
|
||||
// it belongs to.
|
||||
func (t *Target) Update(cfg *config.ScrapeConfig, labels, metaLabels model.LabelSet) error {
|
||||
t.Lock()
|
||||
|
||||
t.scrapeConfig = cfg
|
||||
t.labels = labels
|
||||
t.metaLabels = metaLabels
|
||||
|
||||
t.Unlock()
|
||||
|
||||
httpClient, err := t.client()
|
||||
if err != nil {
|
||||
return fmt.Errorf("cannot create HTTP client: %s", err)
|
||||
}
|
||||
t.Lock()
|
||||
t.httpClient = httpClient
|
||||
t.Unlock()
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func newHTTPClient(cfg *config.ScrapeConfig) (*http.Client, error) {
|
||||
tlsOpts := httputil.TLSOptions{
|
||||
InsecureSkipVerify: cfg.TLSConfig.InsecureSkipVerify,
|
||||
|
@ -293,13 +278,6 @@ func (t *Target) offset(interval time.Duration) time.Duration {
|
|||
return time.Duration(next)
|
||||
}
|
||||
|
||||
func (t *Target) client() (*http.Client, error) {
|
||||
t.RLock()
|
||||
defer t.RUnlock()
|
||||
|
||||
return newHTTPClient(t.scrapeConfig)
|
||||
}
|
||||
|
||||
func (t *Target) interval() time.Duration {
|
||||
t.RLock()
|
||||
defer t.RUnlock()
|
||||
|
|
|
@ -578,7 +578,7 @@ func newTestTarget(targetURL string, deadline time.Duration, labels model.LabelS
|
|||
labels[model.AddressLabel] = model.LabelValue(strings.TrimLeft(targetURL, "http://"))
|
||||
labels[model.MetricsPathLabel] = "/metrics"
|
||||
|
||||
t := &Target{
|
||||
return &Target{
|
||||
scrapeConfig: &config.ScrapeConfig{
|
||||
ScrapeInterval: model.Duration(time.Millisecond),
|
||||
ScrapeTimeout: model.Duration(deadline),
|
||||
|
@ -588,13 +588,6 @@ func newTestTarget(targetURL string, deadline time.Duration, labels model.LabelS
|
|||
scraperStopping: make(chan struct{}),
|
||||
scraperStopped: make(chan struct{}),
|
||||
}
|
||||
|
||||
var err error
|
||||
if t.httpClient, err = t.client(); err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
return t
|
||||
}
|
||||
|
||||
func TestNewHTTPBearerToken(t *testing.T) {
|
||||
|
|
Loading…
Reference in New Issue