2018-04-03 19:37:10 +00:00
|
|
|
// Copyright 2015 The Prometheus Authors
|
|
|
|
// Licensed under the Apache License, Version 2.0 (the "License");
|
|
|
|
// you may not use this file except in compliance with the License.
|
|
|
|
// You may obtain a copy of the License at
|
|
|
|
//
|
|
|
|
// http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
//
|
|
|
|
// Unless required by applicable law or agreed to in writing, software
|
|
|
|
// distributed under the License is distributed on an "AS IS" BASIS,
|
|
|
|
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
|
|
// See the License for the specific language governing permissions and
|
|
|
|
// limitations under the License.
|
|
|
|
|
2021-12-18 18:01:29 +00:00
|
|
|
//go:build !notextfile
|
2018-04-03 19:37:10 +00:00
|
|
|
// +build !notextfile
|
|
|
|
|
|
|
|
package collector
|
|
|
|
|
|
|
|
import (
|
|
|
|
"fmt"
|
2018-05-15 18:52:11 +00:00
|
|
|
"io"
|
2018-04-03 19:37:10 +00:00
|
|
|
"io/ioutil"
|
|
|
|
"os"
|
|
|
|
"path/filepath"
|
2021-04-18 03:04:39 +00:00
|
|
|
"reflect"
|
2018-04-03 19:37:10 +00:00
|
|
|
"sort"
|
|
|
|
"strings"
|
|
|
|
"time"
|
|
|
|
|
2018-11-18 15:23:15 +00:00
|
|
|
"github.com/dimchansky/utfbom"
|
2021-01-30 10:16:53 +00:00
|
|
|
"github.com/prometheus-community/windows_exporter/log"
|
2018-04-03 19:37:10 +00:00
|
|
|
"github.com/prometheus/client_golang/prometheus"
|
|
|
|
dto "github.com/prometheus/client_model/go"
|
|
|
|
"github.com/prometheus/common/expfmt"
|
2018-04-05 05:11:36 +00:00
|
|
|
kingpin "gopkg.in/alecthomas/kingpin.v2"
|
2018-04-03 19:37:10 +00:00
|
|
|
)
|
|
|
|
|
|
|
|
var (
|
2018-04-05 05:11:36 +00:00
|
|
|
textFileDirectory = kingpin.Flag(
|
|
|
|
"collector.textfile.directory",
|
|
|
|
"Directory to read text files with metrics from.",
|
2021-10-03 18:12:36 +00:00
|
|
|
).Default(getDefaultPath()).String()
|
2018-04-03 19:37:10 +00:00
|
|
|
|
2018-04-05 05:11:36 +00:00
|
|
|
mtimeDesc = prometheus.NewDesc(
|
2020-05-24 18:36:25 +00:00
|
|
|
prometheus.BuildFQName(Namespace, "textfile", "mtime_seconds"),
|
2018-04-03 19:37:10 +00:00
|
|
|
"Unixtime mtime of textfiles successfully read.",
|
|
|
|
[]string{"file"},
|
|
|
|
nil,
|
|
|
|
)
|
|
|
|
)
|
|
|
|
|
|
|
|
type textFileCollector struct {
|
|
|
|
path string
|
|
|
|
// Only set for testing to get predictable output.
|
|
|
|
mtime *float64
|
|
|
|
}
|
|
|
|
|
|
|
|
func init() {
|
2020-02-09 20:09:26 +00:00
|
|
|
registerCollector("textfile", NewTextFileCollector)
|
2018-04-03 19:37:10 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
// NewTextFileCollector returns a new Collector exposing metrics read from files
|
|
|
|
// in the given textfile directory.
|
|
|
|
func NewTextFileCollector() (Collector, error) {
|
|
|
|
return &textFileCollector{
|
|
|
|
path: *textFileDirectory,
|
|
|
|
}, nil
|
|
|
|
}
|
|
|
|
|
2021-05-16 02:29:09 +00:00
|
|
|
// Given a slice of metric families, determine if any two entries are duplicates.
|
2021-04-18 03:04:39 +00:00
|
|
|
// Duplicates will be detected where the metric name, labels and label values are identical.
|
2021-05-16 02:29:09 +00:00
|
|
|
func duplicateMetricEntry(metricFamilies []*dto.MetricFamily) bool {
|
2021-04-18 03:04:39 +00:00
|
|
|
uniqueMetrics := make(map[string]map[string]string)
|
|
|
|
for _, metricFamily := range metricFamilies {
|
|
|
|
metric_name := *metricFamily.Name
|
|
|
|
for _, metric := range metricFamily.Metric {
|
|
|
|
metric_labels := metric.GetLabel()
|
|
|
|
labels := make(map[string]string)
|
|
|
|
for _, label := range metric_labels {
|
|
|
|
labels[label.GetName()] = label.GetValue()
|
|
|
|
}
|
|
|
|
// Check if key is present before appending
|
|
|
|
_, mapContainsKey := uniqueMetrics[metric_name]
|
|
|
|
|
|
|
|
// Duplicate metric found with identical labels & label values
|
|
|
|
if mapContainsKey == true && reflect.DeepEqual(uniqueMetrics[metric_name], labels) {
|
|
|
|
return true
|
|
|
|
}
|
|
|
|
uniqueMetrics[metric_name] = labels
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return false
|
|
|
|
}
|
|
|
|
|
2018-04-03 19:37:10 +00:00
|
|
|
func convertMetricFamily(metricFamily *dto.MetricFamily, ch chan<- prometheus.Metric) {
|
|
|
|
var valType prometheus.ValueType
|
|
|
|
var val float64
|
|
|
|
|
|
|
|
allLabelNames := map[string]struct{}{}
|
|
|
|
for _, metric := range metricFamily.Metric {
|
|
|
|
labels := metric.GetLabel()
|
|
|
|
for _, label := range labels {
|
|
|
|
if _, ok := allLabelNames[label.GetName()]; !ok {
|
|
|
|
allLabelNames[label.GetName()] = struct{}{}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
for _, metric := range metricFamily.Metric {
|
|
|
|
if metric.TimestampMs != nil {
|
|
|
|
log.Warnf("Ignoring unsupported custom timestamp on textfile collector metric %v", metric)
|
|
|
|
}
|
|
|
|
|
|
|
|
labels := metric.GetLabel()
|
|
|
|
var names []string
|
|
|
|
var values []string
|
|
|
|
for _, label := range labels {
|
|
|
|
names = append(names, label.GetName())
|
|
|
|
values = append(values, label.GetValue())
|
|
|
|
}
|
|
|
|
|
|
|
|
for k := range allLabelNames {
|
|
|
|
present := false
|
|
|
|
for _, name := range names {
|
|
|
|
if k == name {
|
|
|
|
present = true
|
|
|
|
break
|
|
|
|
}
|
|
|
|
}
|
|
|
|
if present == false {
|
|
|
|
names = append(names, k)
|
|
|
|
values = append(values, "")
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
metricType := metricFamily.GetType()
|
|
|
|
switch metricType {
|
|
|
|
case dto.MetricType_COUNTER:
|
|
|
|
valType = prometheus.CounterValue
|
|
|
|
val = metric.Counter.GetValue()
|
|
|
|
|
|
|
|
case dto.MetricType_GAUGE:
|
|
|
|
valType = prometheus.GaugeValue
|
|
|
|
val = metric.Gauge.GetValue()
|
|
|
|
|
|
|
|
case dto.MetricType_UNTYPED:
|
|
|
|
valType = prometheus.UntypedValue
|
|
|
|
val = metric.Untyped.GetValue()
|
|
|
|
|
|
|
|
case dto.MetricType_SUMMARY:
|
|
|
|
quantiles := map[float64]float64{}
|
|
|
|
for _, q := range metric.Summary.Quantile {
|
|
|
|
quantiles[q.GetQuantile()] = q.GetValue()
|
|
|
|
}
|
|
|
|
ch <- prometheus.MustNewConstSummary(
|
|
|
|
prometheus.NewDesc(
|
|
|
|
*metricFamily.Name,
|
|
|
|
metricFamily.GetHelp(),
|
|
|
|
names, nil,
|
|
|
|
),
|
|
|
|
metric.Summary.GetSampleCount(),
|
|
|
|
metric.Summary.GetSampleSum(),
|
|
|
|
quantiles, values...,
|
|
|
|
)
|
|
|
|
case dto.MetricType_HISTOGRAM:
|
|
|
|
buckets := map[float64]uint64{}
|
|
|
|
for _, b := range metric.Histogram.Bucket {
|
|
|
|
buckets[b.GetUpperBound()] = b.GetCumulativeCount()
|
|
|
|
}
|
|
|
|
ch <- prometheus.MustNewConstHistogram(
|
|
|
|
prometheus.NewDesc(
|
|
|
|
*metricFamily.Name,
|
|
|
|
metricFamily.GetHelp(),
|
|
|
|
names, nil,
|
|
|
|
),
|
|
|
|
metric.Histogram.GetSampleCount(),
|
|
|
|
metric.Histogram.GetSampleSum(),
|
|
|
|
buckets, values...,
|
|
|
|
)
|
|
|
|
default:
|
2018-04-05 05:11:36 +00:00
|
|
|
log.Errorf("unknown metric type for file")
|
|
|
|
continue
|
2018-04-03 19:37:10 +00:00
|
|
|
}
|
|
|
|
if metricType == dto.MetricType_GAUGE || metricType == dto.MetricType_COUNTER || metricType == dto.MetricType_UNTYPED {
|
|
|
|
ch <- prometheus.MustNewConstMetric(
|
|
|
|
prometheus.NewDesc(
|
|
|
|
*metricFamily.Name,
|
|
|
|
metricFamily.GetHelp(),
|
|
|
|
names, nil,
|
|
|
|
),
|
|
|
|
valType, val, values...,
|
|
|
|
)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
func (c *textFileCollector) exportMTimes(mtimes map[string]time.Time, ch chan<- prometheus.Metric) {
|
|
|
|
// Export the mtimes of the successful files.
|
|
|
|
if len(mtimes) > 0 {
|
|
|
|
// Sorting is needed for predictable output comparison in tests.
|
|
|
|
filenames := make([]string, 0, len(mtimes))
|
|
|
|
for filename := range mtimes {
|
|
|
|
filenames = append(filenames, filename)
|
|
|
|
}
|
|
|
|
sort.Strings(filenames)
|
|
|
|
|
|
|
|
for _, filename := range filenames {
|
|
|
|
mtime := float64(mtimes[filename].UnixNano() / 1e9)
|
|
|
|
if c.mtime != nil {
|
|
|
|
mtime = *c.mtime
|
|
|
|
}
|
|
|
|
ch <- prometheus.MustNewConstMetric(mtimeDesc, prometheus.GaugeValue, mtime, filename)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2018-05-15 18:52:11 +00:00
|
|
|
type carriageReturnFilteringReader struct {
|
|
|
|
r io.Reader
|
|
|
|
}
|
|
|
|
|
|
|
|
// Read returns data from the underlying io.Reader, but with \r filtered out
|
|
|
|
func (cr carriageReturnFilteringReader) Read(p []byte) (int, error) {
|
|
|
|
buf := make([]byte, len(p))
|
|
|
|
n, err := cr.r.Read(buf)
|
|
|
|
|
|
|
|
if err != nil && err != io.EOF {
|
|
|
|
return n, err
|
|
|
|
}
|
|
|
|
|
|
|
|
pi := 0
|
|
|
|
for i := 0; i < n; i++ {
|
|
|
|
if buf[i] != '\r' {
|
|
|
|
p[pi] = buf[i]
|
|
|
|
pi++
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
return pi, err
|
|
|
|
}
|
|
|
|
|
2018-04-03 19:37:10 +00:00
|
|
|
// Update implements the Collector interface.
|
2019-04-05 13:59:40 +00:00
|
|
|
func (c *textFileCollector) Collect(ctx *ScrapeContext, ch chan<- prometheus.Metric) error {
|
2018-04-03 19:37:10 +00:00
|
|
|
error := 0.0
|
|
|
|
mtimes := map[string]time.Time{}
|
|
|
|
|
|
|
|
// Iterate over files and accumulate their metrics.
|
|
|
|
files, err := ioutil.ReadDir(c.path)
|
|
|
|
if err != nil && c.path != "" {
|
|
|
|
log.Errorf("Error reading textfile collector directory %q: %s", c.path, err)
|
|
|
|
error = 1.0
|
|
|
|
}
|
|
|
|
|
2021-05-16 02:29:09 +00:00
|
|
|
// Create empty metricFamily slice here and append parsedFamilies to it inside the loop.
|
|
|
|
// Once loop is complete, raise error if any duplicates are present.
|
|
|
|
// This will ensure that duplicate metrics are correctly detected between multiple .prom files.
|
|
|
|
var metricFamilies = []*dto.MetricFamily{}
|
2018-04-03 19:37:10 +00:00
|
|
|
fileLoop:
|
|
|
|
for _, f := range files {
|
|
|
|
if !strings.HasSuffix(f.Name(), ".prom") {
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
path := filepath.Join(c.path, f.Name())
|
2018-05-15 18:52:11 +00:00
|
|
|
log.Debugf("Processing file %q", path)
|
2018-04-03 19:37:10 +00:00
|
|
|
file, err := os.Open(path)
|
|
|
|
if err != nil {
|
|
|
|
log.Errorf("Error opening %q: %v", path, err)
|
|
|
|
error = 1.0
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
var parser expfmt.TextParser
|
2018-11-29 16:31:48 +00:00
|
|
|
r, encoding := utfbom.Skip(carriageReturnFilteringReader{r: file})
|
2019-01-20 11:56:52 +00:00
|
|
|
if err = checkBOM(encoding); err != nil {
|
2018-11-29 16:31:48 +00:00
|
|
|
log.Errorf("Invalid file encoding detected in %s: %s - file must be UTF8", path, err.Error())
|
|
|
|
error = 1.0
|
|
|
|
continue
|
|
|
|
}
|
2018-11-18 15:23:15 +00:00
|
|
|
parsedFamilies, err := parser.TextToMetricFamilies(r)
|
2018-10-05 05:29:56 +00:00
|
|
|
closeErr := file.Close()
|
|
|
|
if closeErr != nil {
|
|
|
|
log.Warnf("Error closing file: %v", err)
|
|
|
|
}
|
2018-04-03 19:37:10 +00:00
|
|
|
if err != nil {
|
|
|
|
log.Errorf("Error parsing %q: %v", path, err)
|
|
|
|
error = 1.0
|
|
|
|
continue
|
|
|
|
}
|
2022-04-06 09:48:37 +00:00
|
|
|
|
|
|
|
// Use temporary array to check for duplicates
|
|
|
|
var families_array []*dto.MetricFamily
|
|
|
|
|
2018-04-03 19:37:10 +00:00
|
|
|
for _, mf := range parsedFamilies {
|
2022-04-06 09:48:37 +00:00
|
|
|
families_array = append(families_array, mf)
|
2018-04-03 19:37:10 +00:00
|
|
|
for _, m := range mf.Metric {
|
|
|
|
if m.TimestampMs != nil {
|
|
|
|
log.Errorf("Textfile %q contains unsupported client-side timestamps, skipping entire file", path)
|
|
|
|
error = 1.0
|
|
|
|
continue fileLoop
|
|
|
|
}
|
|
|
|
}
|
|
|
|
if mf.Help == nil {
|
|
|
|
help := fmt.Sprintf("Metric read from %s", path)
|
|
|
|
mf.Help = &help
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2022-04-06 09:48:37 +00:00
|
|
|
// If duplicate metrics are detected in a *single* file, skip processing of file metrics
|
|
|
|
if duplicateMetricEntry(families_array) {
|
|
|
|
log.Errorf("Duplicate metrics detected in file %s. Skipping file processing.", f.Name())
|
|
|
|
error = 1.0
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
|
2018-04-03 19:37:10 +00:00
|
|
|
// Only set this once it has been parsed and validated, so that
|
|
|
|
// a failure does not appear fresh.
|
|
|
|
mtimes[f.Name()] = f.ModTime()
|
|
|
|
|
2021-05-16 02:29:09 +00:00
|
|
|
for _, metricFamily := range parsedFamilies {
|
|
|
|
metricFamilies = append(metricFamilies, metricFamily)
|
2021-04-18 03:04:39 +00:00
|
|
|
}
|
2021-05-16 02:29:09 +00:00
|
|
|
}
|
2021-04-18 03:04:39 +00:00
|
|
|
|
2022-04-06 09:48:37 +00:00
|
|
|
// If duplicates are detected across *multiple* files, return error.
|
2021-05-16 02:29:09 +00:00
|
|
|
if duplicateMetricEntry(metricFamilies) {
|
2022-04-06 09:48:37 +00:00
|
|
|
log.Errorf("Duplicate metrics detected across multiple files")
|
2021-05-16 02:29:09 +00:00
|
|
|
error = 1.0
|
|
|
|
} else {
|
|
|
|
for _, mf := range metricFamilies {
|
2018-04-03 19:37:10 +00:00
|
|
|
convertMetricFamily(mf, ch)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2022-01-01 01:44:04 +00:00
|
|
|
c.exportMTimes(mtimes, ch)
|
|
|
|
|
2018-04-03 19:37:10 +00:00
|
|
|
// Export if there were errors.
|
|
|
|
ch <- prometheus.MustNewConstMetric(
|
|
|
|
prometheus.NewDesc(
|
2020-05-24 18:36:25 +00:00
|
|
|
prometheus.BuildFQName(Namespace, "textfile", "scrape_error"),
|
2018-04-03 19:37:10 +00:00
|
|
|
"1 if there was an error opening or reading a file, 0 otherwise",
|
|
|
|
nil, nil,
|
|
|
|
),
|
|
|
|
prometheus.GaugeValue, error,
|
|
|
|
)
|
|
|
|
return nil
|
|
|
|
}
|
2018-11-29 16:31:48 +00:00
|
|
|
|
|
|
|
func checkBOM(encoding utfbom.Encoding) error {
|
2018-12-12 17:30:25 +00:00
|
|
|
if encoding == utfbom.Unknown || encoding == utfbom.UTF8 {
|
2018-11-29 16:31:48 +00:00
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
2018-12-12 17:30:25 +00:00
|
|
|
return fmt.Errorf(encoding.String())
|
2018-11-29 16:31:48 +00:00
|
|
|
}
|
2021-10-03 18:12:36 +00:00
|
|
|
|
|
|
|
func getDefaultPath() string {
|
|
|
|
execPath, _ := os.Executable()
|
2021-11-30 15:30:52 +00:00
|
|
|
return filepath.Join(filepath.Dir(execPath), "textfile_inputs")
|
2021-10-03 18:12:36 +00:00
|
|
|
}
|