Merge pull request #2217 from prometheus/alertingsd

Extract alertmanager into interface
This commit is contained in:
Fabian Reinartz 2016-11-25 11:28:38 +01:00 committed by GitHub
commit 9b7f5c7f29
3 changed files with 45 additions and 34 deletions

View File

@ -87,8 +87,8 @@ var (
HonorLabels: false,
}
// DefaultAlertmanagersConfig is the default alertmanager configuration.
DefaultAlertmanagersConfig = AlertmanagerConfig{
// DefaultAlertmanagerConfig is the default alertmanager configuration.
DefaultAlertmanagerConfig = AlertmanagerConfig{
Scheme: "http",
Timeout: 10 * time.Second,
}
@ -548,7 +548,7 @@ func (c *ScrapeConfig) UnmarshalYAML(unmarshal func(interface{}) error) error {
return nil
}
// AlertingConfig configures alerting and alertmanager related configs
// AlertingConfig configures alerting and alertmanager related configs.
type AlertingConfig struct {
AlertRelabelConfigs []*RelabelConfig `yaml:"alert_relabel_configs,omitempty"`
AlertmanagerConfigs []*AlertmanagerConfig `yaml:"alertmanagers,omitempty"`
@ -596,7 +596,7 @@ type AlertmanagerConfig struct {
// UnmarshalYAML implements the yaml.Unmarshaler interface.
func (c *AlertmanagerConfig) UnmarshalYAML(unmarshal func(interface{}) error) error {
*c = DefaultAlertmanagersConfig
*c = DefaultAlertmanagerConfig
type plain AlertmanagerConfig
if err := unmarshal((*plain)(c)); err != nil {
return err

View File

@ -331,7 +331,7 @@ func (n *Notifier) sendAll(alerts ...*model.Alert) bool {
go func(am alertmanager) {
u := am.url()
if err := am.send(ctx, ams.client, b); err != nil {
if err := n.sendOne(ctx, ams.client, u, b); err != nil {
log.With("alertmanager", u).With("count", len(alerts)).Errorf("Error sending alerts: %s", err)
n.errors.WithLabelValues(u).Inc()
} else {
@ -350,6 +350,20 @@ func (n *Notifier) sendAll(alerts ...*model.Alert) bool {
return numSuccess > 0
}
func (n *Notifier) sendOne(ctx context.Context, c *http.Client, url string, b []byte) error {
resp, err := ctxhttp.Post(ctx, c, url, contentTypeJSON, bytes.NewReader(b))
if err != nil {
return err
}
defer resp.Body.Close()
// Any HTTP status 2xx is OK.
if resp.StatusCode/100 != 2 {
return fmt.Errorf("bad response status %v", resp.Status)
}
return err
}
// Stop shuts down the notification handler.
func (n *Notifier) Stop() {
log.Info("Stopping notification handler...")
@ -381,39 +395,23 @@ func (n *Notifier) Collect(ch chan<- prometheus.Metric) {
}
// alertmanager holds Alertmanager endpoint information.
type alertmanager struct {
plainURL string // test injection hook
labels model.LabelSet
type alertmanager interface {
url() string
}
type alertmanagerLabels model.LabelSet
const pathLabel = "__alerts_path__"
func (a alertmanager) url() string {
if a.plainURL != "" {
return a.plainURL
}
func (a alertmanagerLabels) url() string {
u := &url.URL{
Scheme: string(a.labels[model.SchemeLabel]),
Host: string(a.labels[model.AddressLabel]),
Path: string(a.labels[pathLabel]),
Scheme: string(a[model.SchemeLabel]),
Host: string(a[model.AddressLabel]),
Path: string(a[pathLabel]),
}
return u.String()
}
func (a alertmanager) send(ctx context.Context, c *http.Client, b []byte) error {
resp, err := ctxhttp.Post(ctx, c, a.url(), contentTypeJSON, bytes.NewReader(b))
if err != nil {
return err
}
defer resp.Body.Close()
// Any HTTP status 2xx is OK.
if resp.StatusCode/100 != 2 {
return fmt.Errorf("bad response status %v", resp.Status)
}
return err
}
// alertmanagerSet contains a set of Alertmanagers discovered via a group of service
// discovery definitions that have a common configuration on how alerts should be sent.
type alertmanagerSet struct {
@ -532,7 +530,7 @@ func alertmanagerFromGroup(tg *config.TargetGroup, cfg *config.AlertmanagerConfi
}
}
res = append(res, alertmanager{labels: lset})
res = append(res, alertmanagerLabels(lset))
}
return res, nil
}

View File

@ -23,7 +23,6 @@ import (
"time"
"github.com/prometheus/common/model"
"github.com/prometheus/prometheus/config"
)
@ -150,8 +149,12 @@ func TestHandlerSendAll(t *testing.T) {
h := New(&Options{})
h.alertmanagers = append(h.alertmanagers, &alertmanagerSet{
ams: []alertmanager{
{plainURL: server1.URL},
{plainURL: server2.URL},
alertmanagerMock{
urlf: func() string { return server1.URL },
},
alertmanagerMock{
urlf: func() string { return server2.URL },
},
},
cfg: &config.AlertmanagerConfig{
Timeout: time.Second,
@ -312,7 +315,9 @@ func TestHandlerQueueing(t *testing.T) {
})
h.alertmanagers = append(h.alertmanagers, &alertmanagerSet{
ams: []alertmanager{
{plainURL: server.URL},
alertmanagerMock{
urlf: func() string { return server.URL },
},
},
cfg: &config.AlertmanagerConfig{
Timeout: time.Second,
@ -371,3 +376,11 @@ func TestHandlerQueueing(t *testing.T) {
}
}
}
type alertmanagerMock struct {
urlf func() string
}
func (a alertmanagerMock) url() string {
return a.urlf()
}