2013-02-07 10:49:04 +00:00
|
|
|
// Copyright 2013 Prometheus Team
|
|
|
|
// Licensed under the Apache License, Version 2.0 (the "License");
|
|
|
|
// you may not use this file except in compliance with the License.
|
|
|
|
// You may obtain a copy of the License at
|
|
|
|
//
|
|
|
|
// http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
//
|
|
|
|
// Unless required by applicable law or agreed to in writing, software
|
|
|
|
// distributed under the License is distributed on an "AS IS" BASIS,
|
|
|
|
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
|
|
// See the License for the specific language governing permissions and
|
|
|
|
// limitations under the License.
|
|
|
|
|
2013-01-07 22:24:26 +00:00
|
|
|
package rules
|
|
|
|
|
|
|
|
import (
|
2013-07-11 16:38:44 +00:00
|
|
|
"bufio"
|
2013-01-07 22:24:26 +00:00
|
|
|
"errors"
|
|
|
|
"fmt"
|
|
|
|
"io"
|
2013-07-11 16:38:44 +00:00
|
|
|
"log"
|
2013-01-07 22:24:26 +00:00
|
|
|
"os"
|
|
|
|
"strings"
|
|
|
|
|
2013-07-11 16:38:44 +00:00
|
|
|
"github.com/prometheus/prometheus/rules/ast"
|
2013-04-10 17:08:14 +00:00
|
|
|
)
|
2013-01-07 22:24:26 +00:00
|
|
|
|
|
|
|
type RulesLexer struct {
|
2013-07-11 16:38:44 +00:00
|
|
|
// Errors encountered during parsing.
|
|
|
|
errors []string
|
|
|
|
// Dummy token to simulate multiple start symbols (see below).
|
|
|
|
startToken int
|
|
|
|
// Parsed full rules.
|
|
|
|
parsedRules []Rule
|
|
|
|
// Parsed single expression.
|
|
|
|
parsedExpr ast.Node
|
|
|
|
|
|
|
|
// Current character.
|
|
|
|
current byte
|
|
|
|
// Current token buffer.
|
|
|
|
buf []byte
|
|
|
|
// Input text.
|
|
|
|
src *bufio.Reader
|
|
|
|
// Whether we have a current char.
|
|
|
|
empty bool
|
|
|
|
|
|
|
|
// Current input line.
|
|
|
|
line int
|
|
|
|
// Current character position within the current input line.
|
|
|
|
pos int
|
2013-01-07 22:24:26 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
func (lexer *RulesLexer) Error(errorStr string) {
|
2013-07-11 16:38:44 +00:00
|
|
|
err := fmt.Sprintf("Error parsing rules at line %v, char %v: %v", lexer.line, lexer.pos, errorStr)
|
2013-01-07 22:24:26 +00:00
|
|
|
lexer.errors = append(lexer.errors, err)
|
|
|
|
}
|
|
|
|
|
2013-07-11 16:38:44 +00:00
|
|
|
func (lexer *RulesLexer) getChar() byte {
|
|
|
|
if lexer.current != 0 {
|
|
|
|
lexer.buf = append(lexer.buf, lexer.current)
|
|
|
|
}
|
|
|
|
lexer.current = 0
|
|
|
|
if b, err := lexer.src.ReadByte(); err == nil {
|
|
|
|
if b == '\n' {
|
|
|
|
lexer.line++
|
|
|
|
lexer.pos = 0
|
|
|
|
} else {
|
|
|
|
lexer.pos++
|
|
|
|
}
|
|
|
|
lexer.current = b
|
|
|
|
} else if err != io.EOF {
|
|
|
|
log.Fatal(err)
|
|
|
|
}
|
|
|
|
return lexer.current
|
|
|
|
}
|
2013-04-10 17:08:14 +00:00
|
|
|
|
2013-07-11 16:38:44 +00:00
|
|
|
func (lexer *RulesLexer) token() string {
|
|
|
|
return string(lexer.buf)
|
|
|
|
}
|
2013-01-07 22:24:26 +00:00
|
|
|
|
2013-07-11 16:38:44 +00:00
|
|
|
func newRulesLexer(src io.Reader, singleExpr bool) *RulesLexer {
|
2013-01-11 00:17:37 +00:00
|
|
|
lexer := &RulesLexer{
|
2013-01-12 20:22:59 +00:00
|
|
|
startToken: START_RULES,
|
2013-07-11 16:38:44 +00:00
|
|
|
src: bufio.NewReader(src),
|
|
|
|
pos: 1,
|
|
|
|
line: 1,
|
2013-01-12 20:22:59 +00:00
|
|
|
}
|
2013-01-11 00:17:37 +00:00
|
|
|
|
2013-01-12 20:22:59 +00:00
|
|
|
if singleExpr {
|
|
|
|
lexer.startToken = START_EXPRESSION
|
|
|
|
}
|
2013-07-11 16:38:44 +00:00
|
|
|
lexer.getChar()
|
|
|
|
return lexer
|
|
|
|
}
|
2013-01-11 00:17:37 +00:00
|
|
|
|
2013-07-11 16:38:44 +00:00
|
|
|
func LoadFromReader(rulesReader io.Reader, singleExpr bool) (interface{}, error) {
|
|
|
|
lexer := newRulesLexer(rulesReader, singleExpr)
|
2013-01-07 22:24:26 +00:00
|
|
|
ret := yyParse(lexer)
|
|
|
|
if ret != 0 && len(lexer.errors) == 0 {
|
|
|
|
lexer.Error("Unknown parser error")
|
|
|
|
}
|
|
|
|
|
|
|
|
if len(lexer.errors) > 0 {
|
|
|
|
err := errors.New(strings.Join(lexer.errors, "\n"))
|
2013-01-11 00:17:37 +00:00
|
|
|
return nil, err
|
2013-01-07 22:24:26 +00:00
|
|
|
}
|
|
|
|
|
2013-01-12 20:22:59 +00:00
|
|
|
if singleExpr {
|
|
|
|
return lexer.parsedExpr, nil
|
|
|
|
} else {
|
|
|
|
return lexer.parsedRules, nil
|
|
|
|
}
|
2013-01-11 00:17:37 +00:00
|
|
|
}
|
|
|
|
|
2013-04-22 22:26:59 +00:00
|
|
|
func LoadRulesFromReader(rulesReader io.Reader) ([]Rule, error) {
|
2013-01-12 20:22:59 +00:00
|
|
|
expr, err := LoadFromReader(rulesReader, false)
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
2013-04-22 22:26:59 +00:00
|
|
|
return expr.([]Rule), err
|
2013-01-07 22:24:26 +00:00
|
|
|
}
|
|
|
|
|
2013-04-22 22:26:59 +00:00
|
|
|
func LoadRulesFromString(rulesString string) ([]Rule, error) {
|
2013-01-07 22:24:26 +00:00
|
|
|
rulesReader := strings.NewReader(rulesString)
|
2013-01-11 00:17:37 +00:00
|
|
|
return LoadRulesFromReader(rulesReader)
|
2013-01-07 22:24:26 +00:00
|
|
|
}
|
|
|
|
|
2013-04-22 22:26:59 +00:00
|
|
|
func LoadRulesFromFile(fileName string) ([]Rule, error) {
|
2013-01-07 22:24:26 +00:00
|
|
|
rulesReader, err := os.Open(fileName)
|
|
|
|
if err != nil {
|
2013-04-22 22:26:59 +00:00
|
|
|
return []Rule{}, err
|
2013-01-07 22:24:26 +00:00
|
|
|
}
|
2013-02-07 10:38:01 +00:00
|
|
|
defer rulesReader.Close()
|
2013-01-11 00:17:37 +00:00
|
|
|
return LoadRulesFromReader(rulesReader)
|
|
|
|
}
|
|
|
|
|
|
|
|
func LoadExprFromReader(exprReader io.Reader) (ast.Node, error) {
|
2013-01-12 20:22:59 +00:00
|
|
|
expr, err := LoadFromReader(exprReader, true)
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
return expr.(ast.Node), err
|
2013-01-11 00:17:37 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
func LoadExprFromString(exprString string) (ast.Node, error) {
|
|
|
|
exprReader := strings.NewReader(exprString)
|
|
|
|
return LoadExprFromReader(exprReader)
|
|
|
|
}
|
|
|
|
|
|
|
|
func LoadExprFromFile(fileName string) (ast.Node, error) {
|
|
|
|
exprReader, err := os.Open(fileName)
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
2013-01-25 02:32:46 +00:00
|
|
|
defer exprReader.Close()
|
2013-01-11 00:17:37 +00:00
|
|
|
return LoadExprFromReader(exprReader)
|
2013-01-07 22:24:26 +00:00
|
|
|
}
|