mirror of
https://github.com/prometheus/prometheus
synced 2024-12-24 23:42:32 +00:00
Fix target group foreach nil bug, directly return err.
This commit is contained in:
parent
3101606756
commit
2b49df2c61
@ -15,6 +15,7 @@ package file
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"fmt"
|
||||
"io/ioutil"
|
||||
"path/filepath"
|
||||
@ -256,6 +257,11 @@ func readFile(filename string) ([]*config.TargetGroup, error) {
|
||||
}
|
||||
|
||||
for i, tg := range targetGroups {
|
||||
if tg == nil {
|
||||
err = errors.New("nil target group item found")
|
||||
return nil, err
|
||||
}
|
||||
|
||||
tg.Source = fileSource(filename, i)
|
||||
if tg.Labels == nil {
|
||||
tg.Labels = model.LabelSet{}
|
||||
|
@ -29,13 +29,17 @@ import (
|
||||
)
|
||||
|
||||
func TestFileSD(t *testing.T) {
|
||||
defer os.Remove("fixtures/_test.yml")
|
||||
defer os.Remove("fixtures/_test.json")
|
||||
testFileSD(t, ".yml")
|
||||
testFileSD(t, ".json")
|
||||
defer os.Remove("fixtures/_test_valid.yml")
|
||||
defer os.Remove("fixtures/_test_valid.json")
|
||||
defer os.Remove("fixtures/_test_invalid_nil.json")
|
||||
defer os.Remove("fixtures/_test_invalid_nil.yml")
|
||||
testFileSD(t, "valid", ".yml", true)
|
||||
testFileSD(t, "valid", ".json", true)
|
||||
testFileSD(t, "invalid_nil", ".json", false)
|
||||
testFileSD(t, "invalid_nil", ".yml", false)
|
||||
}
|
||||
|
||||
func testFileSD(t *testing.T, ext string) {
|
||||
func testFileSD(t *testing.T, prefix, ext string, expect bool) {
|
||||
// As interval refreshing is more of a fallback, we only want to test
|
||||
// whether file watches work as expected.
|
||||
var conf config.FileSDConfig
|
||||
@ -56,13 +60,13 @@ func testFileSD(t *testing.T, ext string) {
|
||||
t.Fatalf("Unexpected target groups in file discovery: %s", tgs)
|
||||
}
|
||||
|
||||
newf, err := os.Create("fixtures/_test" + ext)
|
||||
newf, err := os.Create("fixtures/_test_" + prefix + ext)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
defer newf.Close()
|
||||
|
||||
f, err := os.Open("fixtures/valid" + ext)
|
||||
f, err := os.Open("fixtures/" + prefix + ext)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
@ -80,8 +84,17 @@ retry:
|
||||
for {
|
||||
select {
|
||||
case <-timeout:
|
||||
if expect {
|
||||
t.Fatalf("Expected new target group but got none")
|
||||
} else {
|
||||
// invalid type fsd should always broken down.
|
||||
break retry
|
||||
}
|
||||
case tgs := <-ch:
|
||||
if !expect {
|
||||
t.Fatalf("Unexpected target groups %s, we expected a failure here.", tgs)
|
||||
}
|
||||
|
||||
if len(tgs) != 2 {
|
||||
continue retry // Potentially a partial write, just retry.
|
||||
}
|
||||
@ -90,12 +103,12 @@ retry:
|
||||
if _, ok := tg.Labels["foo"]; !ok {
|
||||
t.Fatalf("Label not parsed")
|
||||
}
|
||||
if tg.String() != filepath.FromSlash(fmt.Sprintf("fixtures/_test%s:0", ext)) {
|
||||
if tg.String() != filepath.FromSlash(fmt.Sprintf("fixtures/_test_%s%s:0", prefix, ext)) {
|
||||
t.Fatalf("Unexpected target group %s", tg)
|
||||
}
|
||||
|
||||
tg = tgs[1]
|
||||
if tg.String() != filepath.FromSlash(fmt.Sprintf("fixtures/_test%s:1", ext)) {
|
||||
if tg.String() != filepath.FromSlash(fmt.Sprintf("fixtures/_test_%s%s:1", prefix, ext)) {
|
||||
t.Fatalf("Unexpected target groups %s", tg)
|
||||
}
|
||||
break retry
|
||||
@ -135,7 +148,7 @@ retry:
|
||||
}
|
||||
newf.Close()
|
||||
|
||||
os.Rename(newf.Name(), "fixtures/_test"+ext)
|
||||
os.Rename(newf.Name(), "fixtures/_test_"+prefix+ext)
|
||||
|
||||
cancel()
|
||||
<-drained
|
||||
|
9
discovery/file/fixtures/invalid_nil.json
Normal file
9
discovery/file/fixtures/invalid_nil.json
Normal file
@ -0,0 +1,9 @@
|
||||
[
|
||||
{
|
||||
"targets": ["localhost:9090", "example.org:443"],
|
||||
"labels": {
|
||||
"foo": "bar"
|
||||
}
|
||||
},
|
||||
null
|
||||
]
|
5
discovery/file/fixtures/invalid_nil.yml
Normal file
5
discovery/file/fixtures/invalid_nil.yml
Normal file
@ -0,0 +1,5 @@
|
||||
- targets: ['localhost:9090', 'example.org:443']
|
||||
labels:
|
||||
foo: bar
|
||||
|
||||
- null
|
Loading…
Reference in New Issue
Block a user