From f4669a812c3299e0e1f2cf02542b28f3884a609b Mon Sep 17 00:00:00 2001 From: "Matt T. Proud" Date: Sat, 3 Aug 2013 18:46:02 +0200 Subject: [PATCH] Extract index storage into separate types. --- Makefile | 3 +- rules/lexer.l.go | 2536 +++++++++++++++++++------------------ rules/parser.y.go | 223 ++-- storage/metric/index.go | 330 +++++ storage/metric/leveldb.go | 397 +++--- storage/raw/interface.go | 23 +- 6 files changed, 1910 insertions(+), 1602 deletions(-) create mode 100644 storage/metric/index.go diff --git a/Makefile b/Makefile index 32cbbb561..56f7ed8d4 100644 --- a/Makefile +++ b/Makefile @@ -83,8 +83,7 @@ source_path: [ -d "$(FULL_GOPATH)" ] test: build - $(GOENV) find . -maxdepth 1 -mindepth 1 -type d -and -not -path $(BUILD_PATH) -exec $(GOCC) test {}/... $(GO_TEST_FLAGS) \; - $(GO) test $(GO_TEST_FLAGS) + $(GO) test $(GO_TEST_FLAGS) ./... tools: dependencies preparation $(MAKE) -C tools diff --git a/rules/lexer.l.go b/rules/lexer.l.go index 60f03e778..10a04a883 100644 --- a/rules/lexer.l.go +++ b/rules/lexer.l.go @@ -13,1574 +13,1582 @@ package rules import ( - "fmt" - "strconv" - "strings" + "fmt" + "strconv" + "strings" - clientmodel "github.com/prometheus/client_golang/model" + clientmodel "github.com/prometheus/client_golang/model" ) // Lex is called by the parser generated by "go tool yacc" to obtain each // token. The method is opened before the matching rules block and closed at // the end of the file. func (lexer *RulesLexer) Lex(lval *yySymType) int { - // Internal lexer states. - const ( - S_INITIAL = iota - S_COMMENTS - ) + // Internal lexer states. + const ( + S_INITIAL = iota + S_COMMENTS + ) - // We simulate multiple start symbols for closely-related grammars via dummy tokens. See - // http://www.gnu.org/software/bison/manual/html_node/Multiple-start_002dsymbols.html - // Reason: we want to be able to parse lists of named rules as well as single expressions. - if lexer.startToken != 0 { - startToken := lexer.startToken - lexer.startToken = 0 - return startToken - } - - c := lexer.current - currentState := 0 - - if lexer.empty { - c, lexer.empty = lexer.getChar(), false - } + // We simulate multiple start symbols for closely-related grammars via dummy tokens. See + // http://www.gnu.org/software/bison/manual/html_node/Multiple-start_002dsymbols.html + // Reason: we want to be able to parse lists of named rules as well as single expressions. + if lexer.startToken != 0 { + startToken := lexer.startToken + lexer.startToken = 0 + return startToken + } + c := lexer.current + currentState := 0 + if lexer.empty { + c, lexer.empty = lexer.getChar(), false + } yystate0: - lexer.buf = lexer.buf[:0] // The code before the first rule executed before every scan cycle (rule #0 / state 0 action) + lexer.buf = lexer.buf[:0] // The code before the first rule executed before every scan cycle (rule #0 / state 0 action) -switch yyt := currentState; yyt { -default: -panic(fmt.Errorf(`invalid start condition %d`, yyt)) -case 0: // start condition: INITIAL -goto yystart1 -case 1: // start condition: S_COMMENTS -goto yystart127 -} + switch yyt := currentState; yyt { + default: + panic(fmt.Errorf(`invalid start condition %d`, yyt)) + case 0: // start condition: INITIAL + goto yystart1 + case 1: // start condition: S_COMMENTS + goto yystart127 + } -goto yystate1 // silence unused label error + goto yystate1 // silence unused label error yystate1: -c = lexer.getChar() + c = lexer.getChar() yystart1: -switch { -default: -goto yyabort -case c == '!': -goto yystate3 -case c == '"': -goto yystate5 -case c == '%' || c == '*': -goto yystate8 -case c == '(' || c == ')' || c == ',' || c == '[' || c == ']' || c == '{' || c == '}': -goto yystate12 -case c == '+': -goto yystate13 -case c == '-': -goto yystate14 -case c == '/': -goto yystate17 -case c == ':' || c == 'E' || c == 'G' || c == 'H' || c >= 'J' && c <= 'L' || c == 'N' || c == 'Q' || c == 'R' || c >= 'T' && c <= 'V' || c >= 'X' && c <= 'Z' || c == '_' || c == 'e' || c == 'g' || c == 'h' || c >= 'j' && c <= 'l' || c == 'n' || c == 'q' || c == 'r' || c >= 't' && c <= 'v' || c >= 'x' && c <= 'z': -goto yystate23 -case c == '<' || c == '>': -goto yystate24 -case c == '=': -goto yystate25 -case c == 'A': -goto yystate26 -case c == 'B': -goto yystate35 -case c == 'C': -goto yystate37 -case c == 'D': -goto yystate41 -case c == 'F': -goto yystate52 -case c == 'I': -goto yystate55 -case c == 'M': -goto yystate57 -case c == 'O': -goto yystate60 -case c == 'P': -goto yystate61 -case c == 'S': -goto yystate70 -case c == 'W': -goto yystate77 -case c == '\'': -goto yystate9 -case c == '\t' || c == '\n' || c == '\r' || c == ' ': -goto yystate2 -case c == 'a': -goto yystate81 -case c == 'b': -goto yystate88 -case c == 'c': -goto yystate89 -case c == 'd': -goto yystate93 -case c == 'f': -goto yystate103 -case c == 'i': -goto yystate105 -case c == 'm': -goto yystate106 -case c == 'o': -goto yystate109 -case c == 'p': -goto yystate110 -case c == 's': -goto yystate118 -case c == 'w': -goto yystate124 -case c >= '0' && c <= '9': -goto yystate21 -} + switch { + default: + goto yyabort + case c == '!': + goto yystate3 + case c == '"': + goto yystate5 + case c == '%' || c == '*': + goto yystate8 + case c == '(' || c == ')' || c == ',' || c == '[' || c == ']' || c == '{' || c == '}': + goto yystate12 + case c == '+': + goto yystate13 + case c == '-': + goto yystate14 + case c == '/': + goto yystate17 + case c == ':' || c == 'E' || c == 'G' || c == 'H' || c >= 'J' && c <= 'L' || c == 'N' || c == 'Q' || c == 'R' || c >= 'T' && c <= 'V' || c >= 'X' && c <= 'Z' || c == '_' || c == 'e' || c == 'g' || c == 'h' || c >= 'j' && c <= 'l' || c == 'n' || c == 'q' || c == 'r' || c >= 't' && c <= 'v' || c >= 'x' && c <= 'z': + goto yystate23 + case c == '<' || c == '>': + goto yystate24 + case c == '=': + goto yystate25 + case c == 'A': + goto yystate26 + case c == 'B': + goto yystate35 + case c == 'C': + goto yystate37 + case c == 'D': + goto yystate41 + case c == 'F': + goto yystate52 + case c == 'I': + goto yystate55 + case c == 'M': + goto yystate57 + case c == 'O': + goto yystate60 + case c == 'P': + goto yystate61 + case c == 'S': + goto yystate70 + case c == 'W': + goto yystate77 + case c == '\'': + goto yystate9 + case c == '\t' || c == '\n' || c == '\r' || c == ' ': + goto yystate2 + case c == 'a': + goto yystate81 + case c == 'b': + goto yystate88 + case c == 'c': + goto yystate89 + case c == 'd': + goto yystate93 + case c == 'f': + goto yystate103 + case c == 'i': + goto yystate105 + case c == 'm': + goto yystate106 + case c == 'o': + goto yystate109 + case c == 'p': + goto yystate110 + case c == 's': + goto yystate118 + case c == 'w': + goto yystate124 + case c >= '0' && c <= '9': + goto yystate21 + } yystate2: -c = lexer.getChar() -goto yyrule25 + c = lexer.getChar() + goto yyrule25 yystate3: -c = lexer.getChar() -switch { -default: -goto yyabort -case c == '=': -goto yystate4 -} + c = lexer.getChar() + switch { + default: + goto yyabort + case c == '=': + goto yystate4 + } yystate4: -c = lexer.getChar() -goto yyrule16 + c = lexer.getChar() + goto yyrule16 yystate5: -c = lexer.getChar() -switch { -default: -goto yyabort -case c == '"': -goto yystate6 -case c == '\\': -goto yystate7 -case c >= '\x01' && c <= '!' || c >= '#' && c <= '[' || c >= ']' && c <= 'ÿ': -goto yystate5 -} + c = lexer.getChar() + switch { + default: + goto yyabort + case c == '"': + goto yystate6 + case c == '\\': + goto yystate7 + case c >= '\x01' && c <= '!' || c >= '#' && c <= '[' || c >= ']' && c <= 'ÿ': + goto yystate5 + } yystate6: -c = lexer.getChar() -goto yyrule22 + c = lexer.getChar() + goto yyrule22 yystate7: -c = lexer.getChar() -switch { -default: -goto yyabort -case c >= '\x01' && c <= '\t' || c >= '\v' && c <= 'ÿ': -goto yystate5 -} + c = lexer.getChar() + switch { + default: + goto yyabort + case c >= '\x01' && c <= '\t' || c >= '\v' && c <= 'ÿ': + goto yystate5 + } yystate8: -c = lexer.getChar() -goto yyrule18 + c = lexer.getChar() + goto yyrule18 yystate9: -c = lexer.getChar() -switch { -default: -goto yyabort -case c == '\'': -goto yystate10 -case c == '\\': -goto yystate11 -case c >= '\x01' && c <= '&' || c >= '(' && c <= '[' || c >= ']' && c <= 'ÿ': -goto yystate9 -} + c = lexer.getChar() + switch { + default: + goto yyabort + case c == '\'': + goto yystate10 + case c == '\\': + goto yystate11 + case c >= '\x01' && c <= '&' || c >= '(' && c <= '[' || c >= ']' && c <= 'ÿ': + goto yystate9 + } yystate10: -c = lexer.getChar() -goto yyrule23 + c = lexer.getChar() + goto yyrule23 yystate11: -c = lexer.getChar() -switch { -default: -goto yyabort -case c >= '\x01' && c <= '\t' || c >= '\v' && c <= 'ÿ': -goto yystate9 -} + c = lexer.getChar() + switch { + default: + goto yyabort + case c >= '\x01' && c <= '\t' || c >= '\v' && c <= 'ÿ': + goto yystate9 + } yystate12: -c = lexer.getChar() -goto yyrule24 + c = lexer.getChar() + goto yyrule24 yystate13: -c = lexer.getChar() -goto yyrule17 + c = lexer.getChar() + goto yyrule17 yystate14: -c = lexer.getChar() -switch { -default: -goto yyrule17 -case c >= '0' && c <= '9': -goto yystate15 -} + c = lexer.getChar() + switch { + default: + goto yyrule17 + case c >= '0' && c <= '9': + goto yystate15 + } yystate15: -c = lexer.getChar() -switch { -default: -goto yyrule21 -case c == '.': -goto yystate16 -case c >= '0' && c <= '9': -goto yystate15 -} + c = lexer.getChar() + switch { + default: + goto yyrule21 + case c == '.': + goto yystate16 + case c >= '0' && c <= '9': + goto yystate15 + } yystate16: -c = lexer.getChar() -switch { -default: -goto yyrule21 -case c >= '0' && c <= '9': -goto yystate16 -} + c = lexer.getChar() + switch { + default: + goto yyrule21 + case c >= '0' && c <= '9': + goto yystate16 + } yystate17: -c = lexer.getChar() -switch { -default: -goto yyrule18 -case c == '*': -goto yystate18 -case c == '/': -goto yystate19 -} + c = lexer.getChar() + switch { + default: + goto yyrule18 + case c == '*': + goto yystate18 + case c == '/': + goto yystate19 + } yystate18: -c = lexer.getChar() -goto yyrule1 + c = lexer.getChar() + goto yyrule1 yystate19: -c = lexer.getChar() -switch { -default: -goto yyabort -case c == '\n': -goto yystate20 -case c >= '\x01' && c <= '\t' || c == '\v' || c == '\f' || c >= '\x0e' && c <= 'ÿ': -goto yystate19 -} + c = lexer.getChar() + switch { + default: + goto yyabort + case c == '\n': + goto yystate20 + case c >= '\x01' && c <= '\t' || c == '\v' || c == '\f' || c >= '\x0e' && c <= 'ÿ': + goto yystate19 + } yystate20: -c = lexer.getChar() -goto yyrule4 + c = lexer.getChar() + goto yyrule4 yystate21: -c = lexer.getChar() -switch { -default: -goto yyrule21 -case c == '.': -goto yystate16 -case c == 'd' || c == 'h' || c == 'm' || c == 's' || c == 'w' || c == 'y': -goto yystate22 -case c >= '0' && c <= '9': -goto yystate21 -} + c = lexer.getChar() + switch { + default: + goto yyrule21 + case c == '.': + goto yystate16 + case c == 'd' || c == 'h' || c == 'm' || c == 's' || c == 'w' || c == 'y': + goto yystate22 + case c >= '0' && c <= '9': + goto yystate21 + } yystate22: -c = lexer.getChar() -goto yyrule19 + c = lexer.getChar() + goto yyrule19 yystate23: -c = lexer.getChar() -switch { -default: -goto yyrule20 -case c >= '0' && c <= ':' || c >= 'A' && c <= 'Z' || c == '_' || c >= 'a' && c <= 'z': -goto yystate23 -} + c = lexer.getChar() + switch { + default: + goto yyrule20 + case c >= '0' && c <= ':' || c >= 'A' && c <= 'Z' || c == '_' || c >= 'a' && c <= 'z': + goto yystate23 + } yystate24: -c = lexer.getChar() -switch { -default: -goto yyrule15 -case c == '=': -goto yystate4 -} + c = lexer.getChar() + switch { + default: + goto yyrule15 + case c == '=': + goto yystate4 + } yystate25: -c = lexer.getChar() -switch { -default: -goto yyrule24 -case c == '=': -goto yystate4 -} + c = lexer.getChar() + switch { + default: + goto yyrule24 + case c == '=': + goto yystate4 + } yystate26: -c = lexer.getChar() -switch { -default: -goto yyrule20 -case c == 'L': -goto yystate27 -case c == 'N': -goto yystate31 -case c == 'V': -goto yystate33 -case c >= '0' && c <= ':' || c >= 'A' && c <= 'K' || c == 'M' || c >= 'O' && c <= 'U' || c >= 'W' && c <= 'Z' || c == '_' || c >= 'a' && c <= 'z': -goto yystate23 -} + c = lexer.getChar() + switch { + default: + goto yyrule20 + case c == 'L': + goto yystate27 + case c == 'N': + goto yystate31 + case c == 'V': + goto yystate33 + case c >= '0' && c <= ':' || c >= 'A' && c <= 'K' || c == 'M' || c >= 'O' && c <= 'U' || c >= 'W' && c <= 'Z' || c == '_' || c >= 'a' && c <= 'z': + goto yystate23 + } yystate27: -c = lexer.getChar() -switch { -default: -goto yyrule20 -case c == 'E': -goto yystate28 -case c >= '0' && c <= ':' || c >= 'A' && c <= 'D' || c >= 'F' && c <= 'Z' || c == '_' || c >= 'a' && c <= 'z': -goto yystate23 -} + c = lexer.getChar() + switch { + default: + goto yyrule20 + case c == 'E': + goto yystate28 + case c >= '0' && c <= ':' || c >= 'A' && c <= 'D' || c >= 'F' && c <= 'Z' || c == '_' || c >= 'a' && c <= 'z': + goto yystate23 + } yystate28: -c = lexer.getChar() -switch { -default: -goto yyrule20 -case c == 'R': -goto yystate29 -case c >= '0' && c <= ':' || c >= 'A' && c <= 'Q' || c >= 'S' && c <= 'Z' || c == '_' || c >= 'a' && c <= 'z': -goto yystate23 -} + c = lexer.getChar() + switch { + default: + goto yyrule20 + case c == 'R': + goto yystate29 + case c >= '0' && c <= ':' || c >= 'A' && c <= 'Q' || c >= 'S' && c <= 'Z' || c == '_' || c >= 'a' && c <= 'z': + goto yystate23 + } yystate29: -c = lexer.getChar() -switch { -default: -goto yyrule20 -case c == 'T': -goto yystate30 -case c >= '0' && c <= ':' || c >= 'A' && c <= 'S' || c >= 'U' && c <= 'Z' || c == '_' || c >= 'a' && c <= 'z': -goto yystate23 -} + c = lexer.getChar() + switch { + default: + goto yyrule20 + case c == 'T': + goto yystate30 + case c >= '0' && c <= ':' || c >= 'A' && c <= 'S' || c >= 'U' && c <= 'Z' || c == '_' || c >= 'a' && c <= 'z': + goto yystate23 + } yystate30: -c = lexer.getChar() -switch { -default: -goto yyrule5 -case c >= '0' && c <= ':' || c >= 'A' && c <= 'Z' || c == '_' || c >= 'a' && c <= 'z': -goto yystate23 -} + c = lexer.getChar() + switch { + default: + goto yyrule5 + case c >= '0' && c <= ':' || c >= 'A' && c <= 'Z' || c == '_' || c >= 'a' && c <= 'z': + goto yystate23 + } yystate31: -c = lexer.getChar() -switch { -default: -goto yyrule20 -case c == 'D': -goto yystate32 -case c >= '0' && c <= ':' || c >= 'A' && c <= 'C' || c >= 'E' && c <= 'Z' || c == '_' || c >= 'a' && c <= 'z': -goto yystate23 -} + c = lexer.getChar() + switch { + default: + goto yyrule20 + case c == 'D': + goto yystate32 + case c >= '0' && c <= ':' || c >= 'A' && c <= 'C' || c >= 'E' && c <= 'Z' || c == '_' || c >= 'a' && c <= 'z': + goto yystate23 + } yystate32: -c = lexer.getChar() -switch { -default: -goto yyrule15 -case c >= '0' && c <= ':' || c >= 'A' && c <= 'Z' || c == '_' || c >= 'a' && c <= 'z': -goto yystate23 -} + c = lexer.getChar() + switch { + default: + goto yyrule15 + case c >= '0' && c <= ':' || c >= 'A' && c <= 'Z' || c == '_' || c >= 'a' && c <= 'z': + goto yystate23 + } yystate33: -c = lexer.getChar() -switch { -default: -goto yyrule20 -case c == 'G': -goto yystate34 -case c >= '0' && c <= ':' || c >= 'A' && c <= 'F' || c >= 'H' && c <= 'Z' || c == '_' || c >= 'a' && c <= 'z': -goto yystate23 -} + c = lexer.getChar() + switch { + default: + goto yyrule20 + case c == 'G': + goto yystate34 + case c >= '0' && c <= ':' || c >= 'A' && c <= 'F' || c >= 'H' && c <= 'Z' || c == '_' || c >= 'a' && c <= 'z': + goto yystate23 + } yystate34: -c = lexer.getChar() -switch { -default: -goto yyrule13 -case c >= '0' && c <= ':' || c >= 'A' && c <= 'Z' || c == '_' || c >= 'a' && c <= 'z': -goto yystate23 -} + c = lexer.getChar() + switch { + default: + goto yyrule13 + case c >= '0' && c <= ':' || c >= 'A' && c <= 'Z' || c == '_' || c >= 'a' && c <= 'z': + goto yystate23 + } yystate35: -c = lexer.getChar() -switch { -default: -goto yyrule20 -case c == 'Y': -goto yystate36 -case c >= '0' && c <= ':' || c >= 'A' && c <= 'X' || c == 'Z' || c == '_' || c >= 'a' && c <= 'z': -goto yystate23 -} + c = lexer.getChar() + switch { + default: + goto yyrule20 + case c == 'Y': + goto yystate36 + case c >= '0' && c <= ':' || c >= 'A' && c <= 'X' || c == 'Z' || c == '_' || c >= 'a' && c <= 'z': + goto yystate23 + } yystate36: -c = lexer.getChar() -switch { -default: -goto yyrule12 -case c >= '0' && c <= ':' || c >= 'A' && c <= 'Z' || c == '_' || c >= 'a' && c <= 'z': -goto yystate23 -} + c = lexer.getChar() + switch { + default: + goto yyrule12 + case c >= '0' && c <= ':' || c >= 'A' && c <= 'Z' || c == '_' || c >= 'a' && c <= 'z': + goto yystate23 + } yystate37: -c = lexer.getChar() -switch { -default: -goto yyrule20 -case c == 'O': -goto yystate38 -case c >= '0' && c <= ':' || c >= 'A' && c <= 'N' || c >= 'P' && c <= 'Z' || c == '_' || c >= 'a' && c <= 'z': -goto yystate23 -} + c = lexer.getChar() + switch { + default: + goto yyrule20 + case c == 'O': + goto yystate38 + case c >= '0' && c <= ':' || c >= 'A' && c <= 'N' || c >= 'P' && c <= 'Z' || c == '_' || c >= 'a' && c <= 'z': + goto yystate23 + } yystate38: -c = lexer.getChar() -switch { -default: -goto yyrule20 -case c == 'U': -goto yystate39 -case c >= '0' && c <= ':' || c >= 'A' && c <= 'T' || c >= 'V' && c <= 'Z' || c == '_' || c >= 'a' && c <= 'z': -goto yystate23 -} + c = lexer.getChar() + switch { + default: + goto yyrule20 + case c == 'U': + goto yystate39 + case c >= '0' && c <= ':' || c >= 'A' && c <= 'T' || c >= 'V' && c <= 'Z' || c == '_' || c >= 'a' && c <= 'z': + goto yystate23 + } yystate39: -c = lexer.getChar() -switch { -default: -goto yyrule20 -case c == 'N': -goto yystate40 -case c >= '0' && c <= ':' || c >= 'A' && c <= 'M' || c >= 'O' && c <= 'Z' || c == '_' || c >= 'a' && c <= 'z': -goto yystate23 -} + c = lexer.getChar() + switch { + default: + goto yyrule20 + case c == 'N': + goto yystate40 + case c >= '0' && c <= ':' || c >= 'A' && c <= 'M' || c >= 'O' && c <= 'Z' || c == '_' || c >= 'a' && c <= 'z': + goto yystate23 + } yystate40: -c = lexer.getChar() -switch { -default: -goto yyrule20 -case c == 'T': -goto yystate34 -case c >= '0' && c <= ':' || c >= 'A' && c <= 'S' || c >= 'U' && c <= 'Z' || c == '_' || c >= 'a' && c <= 'z': -goto yystate23 -} + c = lexer.getChar() + switch { + default: + goto yyrule20 + case c == 'T': + goto yystate34 + case c >= '0' && c <= ':' || c >= 'A' && c <= 'S' || c >= 'U' && c <= 'Z' || c == '_' || c >= 'a' && c <= 'z': + goto yystate23 + } yystate41: -c = lexer.getChar() -switch { -default: -goto yyrule20 -case c == 'E': -goto yystate42 -case c >= '0' && c <= ':' || c >= 'A' && c <= 'D' || c >= 'F' && c <= 'Z' || c == '_' || c >= 'a' && c <= 'z': -goto yystate23 -} + c = lexer.getChar() + switch { + default: + goto yyrule20 + case c == 'E': + goto yystate42 + case c >= '0' && c <= ':' || c >= 'A' && c <= 'D' || c >= 'F' && c <= 'Z' || c == '_' || c >= 'a' && c <= 'z': + goto yystate23 + } yystate42: -c = lexer.getChar() -switch { -default: -goto yyrule20 -case c == 'S': -goto yystate43 -case c >= '0' && c <= ':' || c >= 'A' && c <= 'R' || c >= 'T' && c <= 'Z' || c == '_' || c >= 'a' && c <= 'z': -goto yystate23 -} + c = lexer.getChar() + switch { + default: + goto yyrule20 + case c == 'S': + goto yystate43 + case c >= '0' && c <= ':' || c >= 'A' && c <= 'R' || c >= 'T' && c <= 'Z' || c == '_' || c >= 'a' && c <= 'z': + goto yystate23 + } yystate43: -c = lexer.getChar() -switch { -default: -goto yyrule20 -case c == 'C': -goto yystate44 -case c >= '0' && c <= ':' || c == 'A' || c == 'B' || c >= 'D' && c <= 'Z' || c == '_' || c >= 'a' && c <= 'z': -goto yystate23 -} + c = lexer.getChar() + switch { + default: + goto yyrule20 + case c == 'C': + goto yystate44 + case c >= '0' && c <= ':' || c == 'A' || c == 'B' || c >= 'D' && c <= 'Z' || c == '_' || c >= 'a' && c <= 'z': + goto yystate23 + } yystate44: -c = lexer.getChar() -switch { -default: -goto yyrule20 -case c == 'R': -goto yystate45 -case c >= '0' && c <= ':' || c >= 'A' && c <= 'Q' || c >= 'S' && c <= 'Z' || c == '_' || c >= 'a' && c <= 'z': -goto yystate23 -} + c = lexer.getChar() + switch { + default: + goto yyrule20 + case c == 'R': + goto yystate45 + case c >= '0' && c <= ':' || c >= 'A' && c <= 'Q' || c >= 'S' && c <= 'Z' || c == '_' || c >= 'a' && c <= 'z': + goto yystate23 + } yystate45: -c = lexer.getChar() -switch { -default: -goto yyrule20 -case c == 'I': -goto yystate46 -case c >= '0' && c <= ':' || c >= 'A' && c <= 'H' || c >= 'J' && c <= 'Z' || c == '_' || c >= 'a' && c <= 'z': -goto yystate23 -} + c = lexer.getChar() + switch { + default: + goto yyrule20 + case c == 'I': + goto yystate46 + case c >= '0' && c <= ':' || c >= 'A' && c <= 'H' || c >= 'J' && c <= 'Z' || c == '_' || c >= 'a' && c <= 'z': + goto yystate23 + } yystate46: -c = lexer.getChar() -switch { -default: -goto yyrule20 -case c == 'P': -goto yystate47 -case c >= '0' && c <= ':' || c >= 'A' && c <= 'O' || c >= 'Q' && c <= 'Z' || c == '_' || c >= 'a' && c <= 'z': -goto yystate23 -} + c = lexer.getChar() + switch { + default: + goto yyrule20 + case c == 'P': + goto yystate47 + case c >= '0' && c <= ':' || c >= 'A' && c <= 'O' || c >= 'Q' && c <= 'Z' || c == '_' || c >= 'a' && c <= 'z': + goto yystate23 + } yystate47: -c = lexer.getChar() -switch { -default: -goto yyrule20 -case c == 'T': -goto yystate48 -case c >= '0' && c <= ':' || c >= 'A' && c <= 'S' || c >= 'U' && c <= 'Z' || c == '_' || c >= 'a' && c <= 'z': -goto yystate23 -} + c = lexer.getChar() + switch { + default: + goto yyrule20 + case c == 'T': + goto yystate48 + case c >= '0' && c <= ':' || c >= 'A' && c <= 'S' || c >= 'U' && c <= 'Z' || c == '_' || c >= 'a' && c <= 'z': + goto yystate23 + } yystate48: -c = lexer.getChar() -switch { -default: -goto yyrule20 -case c == 'I': -goto yystate49 -case c >= '0' && c <= ':' || c >= 'A' && c <= 'H' || c >= 'J' && c <= 'Z' || c == '_' || c >= 'a' && c <= 'z': -goto yystate23 -} + c = lexer.getChar() + switch { + default: + goto yyrule20 + case c == 'I': + goto yystate49 + case c >= '0' && c <= ':' || c >= 'A' && c <= 'H' || c >= 'J' && c <= 'Z' || c == '_' || c >= 'a' && c <= 'z': + goto yystate23 + } yystate49: -c = lexer.getChar() -switch { -default: -goto yyrule20 -case c == 'O': -goto yystate50 -case c >= '0' && c <= ':' || c >= 'A' && c <= 'N' || c >= 'P' && c <= 'Z' || c == '_' || c >= 'a' && c <= 'z': -goto yystate23 -} + c = lexer.getChar() + switch { + default: + goto yyrule20 + case c == 'O': + goto yystate50 + case c >= '0' && c <= ':' || c >= 'A' && c <= 'N' || c >= 'P' && c <= 'Z' || c == '_' || c >= 'a' && c <= 'z': + goto yystate23 + } yystate50: -c = lexer.getChar() -switch { -default: -goto yyrule20 -case c == 'N': -goto yystate51 -case c >= '0' && c <= ':' || c >= 'A' && c <= 'M' || c >= 'O' && c <= 'Z' || c == '_' || c >= 'a' && c <= 'z': -goto yystate23 -} + c = lexer.getChar() + switch { + default: + goto yyrule20 + case c == 'N': + goto yystate51 + case c >= '0' && c <= ':' || c >= 'A' && c <= 'M' || c >= 'O' && c <= 'Z' || c == '_' || c >= 'a' && c <= 'z': + goto yystate23 + } yystate51: -c = lexer.getChar() -switch { -default: -goto yyrule10 -case c >= '0' && c <= ':' || c >= 'A' && c <= 'Z' || c == '_' || c >= 'a' && c <= 'z': -goto yystate23 -} + c = lexer.getChar() + switch { + default: + goto yyrule10 + case c >= '0' && c <= ':' || c >= 'A' && c <= 'Z' || c == '_' || c >= 'a' && c <= 'z': + goto yystate23 + } yystate52: -c = lexer.getChar() -switch { -default: -goto yyrule20 -case c == 'O': -goto yystate53 -case c >= '0' && c <= ':' || c >= 'A' && c <= 'N' || c >= 'P' && c <= 'Z' || c == '_' || c >= 'a' && c <= 'z': -goto yystate23 -} + c = lexer.getChar() + switch { + default: + goto yyrule20 + case c == 'O': + goto yystate53 + case c >= '0' && c <= ':' || c >= 'A' && c <= 'N' || c >= 'P' && c <= 'Z' || c == '_' || c >= 'a' && c <= 'z': + goto yystate23 + } yystate53: -c = lexer.getChar() -switch { -default: -goto yyrule20 -case c == 'R': -goto yystate54 -case c >= '0' && c <= ':' || c >= 'A' && c <= 'Q' || c >= 'S' && c <= 'Z' || c == '_' || c >= 'a' && c <= 'z': -goto yystate23 -} + c = lexer.getChar() + switch { + default: + goto yyrule20 + case c == 'R': + goto yystate54 + case c >= '0' && c <= ':' || c >= 'A' && c <= 'Q' || c >= 'S' && c <= 'Z' || c == '_' || c >= 'a' && c <= 'z': + goto yystate23 + } yystate54: -c = lexer.getChar() -switch { -default: -goto yyrule7 -case c >= '0' && c <= ':' || c >= 'A' && c <= 'Z' || c == '_' || c >= 'a' && c <= 'z': -goto yystate23 -} + c = lexer.getChar() + switch { + default: + goto yyrule7 + case c >= '0' && c <= ':' || c >= 'A' && c <= 'Z' || c == '_' || c >= 'a' && c <= 'z': + goto yystate23 + } yystate55: -c = lexer.getChar() -switch { -default: -goto yyrule20 -case c == 'F': -goto yystate56 -case c >= '0' && c <= ':' || c >= 'A' && c <= 'E' || c >= 'G' && c <= 'Z' || c == '_' || c >= 'a' && c <= 'z': -goto yystate23 -} + c = lexer.getChar() + switch { + default: + goto yyrule20 + case c == 'F': + goto yystate56 + case c >= '0' && c <= ':' || c >= 'A' && c <= 'E' || c >= 'G' && c <= 'Z' || c == '_' || c >= 'a' && c <= 'z': + goto yystate23 + } yystate56: -c = lexer.getChar() -switch { -default: -goto yyrule6 -case c >= '0' && c <= ':' || c >= 'A' && c <= 'Z' || c == '_' || c >= 'a' && c <= 'z': -goto yystate23 -} + c = lexer.getChar() + switch { + default: + goto yyrule6 + case c >= '0' && c <= ':' || c >= 'A' && c <= 'Z' || c == '_' || c >= 'a' && c <= 'z': + goto yystate23 + } yystate57: -c = lexer.getChar() -switch { -default: -goto yyrule20 -case c == 'A': -goto yystate58 -case c == 'I': -goto yystate59 -case c >= '0' && c <= ':' || c >= 'B' && c <= 'H' || c >= 'J' && c <= 'Z' || c == '_' || c >= 'a' && c <= 'z': -goto yystate23 -} + c = lexer.getChar() + switch { + default: + goto yyrule20 + case c == 'A': + goto yystate58 + case c == 'I': + goto yystate59 + case c >= '0' && c <= ':' || c >= 'B' && c <= 'H' || c >= 'J' && c <= 'Z' || c == '_' || c >= 'a' && c <= 'z': + goto yystate23 + } yystate58: -c = lexer.getChar() -switch { -default: -goto yyrule20 -case c == 'X': -goto yystate34 -case c >= '0' && c <= ':' || c >= 'A' && c <= 'W' || c == 'Y' || c == 'Z' || c == '_' || c >= 'a' && c <= 'z': -goto yystate23 -} + c = lexer.getChar() + switch { + default: + goto yyrule20 + case c == 'X': + goto yystate34 + case c >= '0' && c <= ':' || c >= 'A' && c <= 'W' || c == 'Y' || c == 'Z' || c == '_' || c >= 'a' && c <= 'z': + goto yystate23 + } yystate59: -c = lexer.getChar() -switch { -default: -goto yyrule20 -case c == 'N': -goto yystate34 -case c >= '0' && c <= ':' || c >= 'A' && c <= 'M' || c >= 'O' && c <= 'Z' || c == '_' || c >= 'a' && c <= 'z': -goto yystate23 -} + c = lexer.getChar() + switch { + default: + goto yyrule20 + case c == 'N': + goto yystate34 + case c >= '0' && c <= ':' || c >= 'A' && c <= 'M' || c >= 'O' && c <= 'Z' || c == '_' || c >= 'a' && c <= 'z': + goto yystate23 + } yystate60: -c = lexer.getChar() -switch { -default: -goto yyrule20 -case c == 'R': -goto yystate32 -case c >= '0' && c <= ':' || c >= 'A' && c <= 'Q' || c >= 'S' && c <= 'Z' || c == '_' || c >= 'a' && c <= 'z': -goto yystate23 -} + c = lexer.getChar() + switch { + default: + goto yyrule20 + case c == 'R': + goto yystate32 + case c >= '0' && c <= ':' || c >= 'A' && c <= 'Q' || c >= 'S' && c <= 'Z' || c == '_' || c >= 'a' && c <= 'z': + goto yystate23 + } yystate61: -c = lexer.getChar() -switch { -default: -goto yyrule20 -case c == 'E': -goto yystate62 -case c >= '0' && c <= ':' || c >= 'A' && c <= 'D' || c >= 'F' && c <= 'Z' || c == '_' || c >= 'a' && c <= 'z': -goto yystate23 -} + c = lexer.getChar() + switch { + default: + goto yyrule20 + case c == 'E': + goto yystate62 + case c >= '0' && c <= ':' || c >= 'A' && c <= 'D' || c >= 'F' && c <= 'Z' || c == '_' || c >= 'a' && c <= 'z': + goto yystate23 + } yystate62: -c = lexer.getChar() -switch { -default: -goto yyrule20 -case c == 'R': -goto yystate63 -case c >= '0' && c <= ':' || c >= 'A' && c <= 'Q' || c >= 'S' && c <= 'Z' || c == '_' || c >= 'a' && c <= 'z': -goto yystate23 -} + c = lexer.getChar() + switch { + default: + goto yyrule20 + case c == 'R': + goto yystate63 + case c >= '0' && c <= ':' || c >= 'A' && c <= 'Q' || c >= 'S' && c <= 'Z' || c == '_' || c >= 'a' && c <= 'z': + goto yystate23 + } yystate63: -c = lexer.getChar() -switch { -default: -goto yyrule20 -case c == 'M': -goto yystate64 -case c >= '0' && c <= ':' || c >= 'A' && c <= 'L' || c >= 'N' && c <= 'Z' || c == '_' || c >= 'a' && c <= 'z': -goto yystate23 -} + c = lexer.getChar() + switch { + default: + goto yyrule20 + case c == 'M': + goto yystate64 + case c >= '0' && c <= ':' || c >= 'A' && c <= 'L' || c >= 'N' && c <= 'Z' || c == '_' || c >= 'a' && c <= 'z': + goto yystate23 + } yystate64: -c = lexer.getChar() -switch { -default: -goto yyrule20 -case c == 'A': -goto yystate65 -case c >= '0' && c <= ':' || c >= 'B' && c <= 'Z' || c == '_' || c >= 'a' && c <= 'z': -goto yystate23 -} + c = lexer.getChar() + switch { + default: + goto yyrule20 + case c == 'A': + goto yystate65 + case c >= '0' && c <= ':' || c >= 'B' && c <= 'Z' || c == '_' || c >= 'a' && c <= 'z': + goto yystate23 + } yystate65: -c = lexer.getChar() -switch { -default: -goto yyrule20 -case c == 'N': -goto yystate66 -case c >= '0' && c <= ':' || c >= 'A' && c <= 'M' || c >= 'O' && c <= 'Z' || c == '_' || c >= 'a' && c <= 'z': -goto yystate23 -} + c = lexer.getChar() + switch { + default: + goto yyrule20 + case c == 'N': + goto yystate66 + case c >= '0' && c <= ':' || c >= 'A' && c <= 'M' || c >= 'O' && c <= 'Z' || c == '_' || c >= 'a' && c <= 'z': + goto yystate23 + } yystate66: -c = lexer.getChar() -switch { -default: -goto yyrule20 -case c == 'E': -goto yystate67 -case c >= '0' && c <= ':' || c >= 'A' && c <= 'D' || c >= 'F' && c <= 'Z' || c == '_' || c >= 'a' && c <= 'z': -goto yystate23 -} + c = lexer.getChar() + switch { + default: + goto yyrule20 + case c == 'E': + goto yystate67 + case c >= '0' && c <= ':' || c >= 'A' && c <= 'D' || c >= 'F' && c <= 'Z' || c == '_' || c >= 'a' && c <= 'z': + goto yystate23 + } yystate67: -c = lexer.getChar() -switch { -default: -goto yyrule20 -case c == 'N': -goto yystate68 -case c >= '0' && c <= ':' || c >= 'A' && c <= 'M' || c >= 'O' && c <= 'Z' || c == '_' || c >= 'a' && c <= 'z': -goto yystate23 -} + c = lexer.getChar() + switch { + default: + goto yyrule20 + case c == 'N': + goto yystate68 + case c >= '0' && c <= ':' || c >= 'A' && c <= 'M' || c >= 'O' && c <= 'Z' || c == '_' || c >= 'a' && c <= 'z': + goto yystate23 + } yystate68: -c = lexer.getChar() -switch { -default: -goto yyrule20 -case c == 'T': -goto yystate69 -case c >= '0' && c <= ':' || c >= 'A' && c <= 'S' || c >= 'U' && c <= 'Z' || c == '_' || c >= 'a' && c <= 'z': -goto yystate23 -} + c = lexer.getChar() + switch { + default: + goto yyrule20 + case c == 'T': + goto yystate69 + case c >= '0' && c <= ':' || c >= 'A' && c <= 'S' || c >= 'U' && c <= 'Z' || c == '_' || c >= 'a' && c <= 'z': + goto yystate23 + } yystate69: -c = lexer.getChar() -switch { -default: -goto yyrule11 -case c >= '0' && c <= ':' || c >= 'A' && c <= 'Z' || c == '_' || c >= 'a' && c <= 'z': -goto yystate23 -} + c = lexer.getChar() + switch { + default: + goto yyrule11 + case c >= '0' && c <= ':' || c >= 'A' && c <= 'Z' || c == '_' || c >= 'a' && c <= 'z': + goto yystate23 + } yystate70: -c = lexer.getChar() -switch { -default: -goto yyrule20 -case c == 'U': -goto yystate71 -case c >= '0' && c <= ':' || c >= 'A' && c <= 'T' || c >= 'V' && c <= 'Z' || c == '_' || c >= 'a' && c <= 'z': -goto yystate23 -} + c = lexer.getChar() + switch { + default: + goto yyrule20 + case c == 'U': + goto yystate71 + case c >= '0' && c <= ':' || c >= 'A' && c <= 'T' || c >= 'V' && c <= 'Z' || c == '_' || c >= 'a' && c <= 'z': + goto yystate23 + } yystate71: -c = lexer.getChar() -switch { -default: -goto yyrule20 -case c == 'M': -goto yystate72 -case c >= '0' && c <= ':' || c >= 'A' && c <= 'L' || c >= 'N' && c <= 'Z' || c == '_' || c >= 'a' && c <= 'z': -goto yystate23 -} + c = lexer.getChar() + switch { + default: + goto yyrule20 + case c == 'M': + goto yystate72 + case c >= '0' && c <= ':' || c >= 'A' && c <= 'L' || c >= 'N' && c <= 'Z' || c == '_' || c >= 'a' && c <= 'z': + goto yystate23 + } yystate72: -c = lexer.getChar() -switch { -default: -goto yyrule13 -case c == 'M': -goto yystate73 -case c >= '0' && c <= ':' || c >= 'A' && c <= 'L' || c >= 'N' && c <= 'Z' || c == '_' || c >= 'a' && c <= 'z': -goto yystate23 -} + c = lexer.getChar() + switch { + default: + goto yyrule13 + case c == 'M': + goto yystate73 + case c >= '0' && c <= ':' || c >= 'A' && c <= 'L' || c >= 'N' && c <= 'Z' || c == '_' || c >= 'a' && c <= 'z': + goto yystate23 + } yystate73: -c = lexer.getChar() -switch { -default: -goto yyrule20 -case c == 'A': -goto yystate74 -case c >= '0' && c <= ':' || c >= 'B' && c <= 'Z' || c == '_' || c >= 'a' && c <= 'z': -goto yystate23 -} + c = lexer.getChar() + switch { + default: + goto yyrule20 + case c == 'A': + goto yystate74 + case c >= '0' && c <= ':' || c >= 'B' && c <= 'Z' || c == '_' || c >= 'a' && c <= 'z': + goto yystate23 + } yystate74: -c = lexer.getChar() -switch { -default: -goto yyrule20 -case c == 'R': -goto yystate75 -case c >= '0' && c <= ':' || c >= 'A' && c <= 'Q' || c >= 'S' && c <= 'Z' || c == '_' || c >= 'a' && c <= 'z': -goto yystate23 -} + c = lexer.getChar() + switch { + default: + goto yyrule20 + case c == 'R': + goto yystate75 + case c >= '0' && c <= ':' || c >= 'A' && c <= 'Q' || c >= 'S' && c <= 'Z' || c == '_' || c >= 'a' && c <= 'z': + goto yystate23 + } yystate75: -c = lexer.getChar() -switch { -default: -goto yyrule20 -case c == 'Y': -goto yystate76 -case c >= '0' && c <= ':' || c >= 'A' && c <= 'X' || c == 'Z' || c == '_' || c >= 'a' && c <= 'z': -goto yystate23 -} + c = lexer.getChar() + switch { + default: + goto yyrule20 + case c == 'Y': + goto yystate76 + case c >= '0' && c <= ':' || c >= 'A' && c <= 'X' || c == 'Z' || c == '_' || c >= 'a' && c <= 'z': + goto yystate23 + } yystate76: -c = lexer.getChar() -switch { -default: -goto yyrule9 -case c >= '0' && c <= ':' || c >= 'A' && c <= 'Z' || c == '_' || c >= 'a' && c <= 'z': -goto yystate23 -} + c = lexer.getChar() + switch { + default: + goto yyrule9 + case c >= '0' && c <= ':' || c >= 'A' && c <= 'Z' || c == '_' || c >= 'a' && c <= 'z': + goto yystate23 + } yystate77: -c = lexer.getChar() -switch { -default: -goto yyrule20 -case c == 'I': -goto yystate78 -case c >= '0' && c <= ':' || c >= 'A' && c <= 'H' || c >= 'J' && c <= 'Z' || c == '_' || c >= 'a' && c <= 'z': -goto yystate23 -} + c = lexer.getChar() + switch { + default: + goto yyrule20 + case c == 'I': + goto yystate78 + case c >= '0' && c <= ':' || c >= 'A' && c <= 'H' || c >= 'J' && c <= 'Z' || c == '_' || c >= 'a' && c <= 'z': + goto yystate23 + } yystate78: -c = lexer.getChar() -switch { -default: -goto yyrule20 -case c == 'T': -goto yystate79 -case c >= '0' && c <= ':' || c >= 'A' && c <= 'S' || c >= 'U' && c <= 'Z' || c == '_' || c >= 'a' && c <= 'z': -goto yystate23 -} + c = lexer.getChar() + switch { + default: + goto yyrule20 + case c == 'T': + goto yystate79 + case c >= '0' && c <= ':' || c >= 'A' && c <= 'S' || c >= 'U' && c <= 'Z' || c == '_' || c >= 'a' && c <= 'z': + goto yystate23 + } yystate79: -c = lexer.getChar() -switch { -default: -goto yyrule20 -case c == 'H': -goto yystate80 -case c >= '0' && c <= ':' || c >= 'A' && c <= 'G' || c >= 'I' && c <= 'Z' || c == '_' || c >= 'a' && c <= 'z': -goto yystate23 -} + c = lexer.getChar() + switch { + default: + goto yyrule20 + case c == 'H': + goto yystate80 + case c >= '0' && c <= ':' || c >= 'A' && c <= 'G' || c >= 'I' && c <= 'Z' || c == '_' || c >= 'a' && c <= 'z': + goto yystate23 + } yystate80: -c = lexer.getChar() -switch { -default: -goto yyrule8 -case c >= '0' && c <= ':' || c >= 'A' && c <= 'Z' || c == '_' || c >= 'a' && c <= 'z': -goto yystate23 -} + c = lexer.getChar() + switch { + default: + goto yyrule8 + case c >= '0' && c <= ':' || c >= 'A' && c <= 'Z' || c == '_' || c >= 'a' && c <= 'z': + goto yystate23 + } yystate81: -c = lexer.getChar() -switch { -default: -goto yyrule20 -case c == 'l': -goto yystate82 -case c == 'n': -goto yystate85 -case c == 'v': -goto yystate86 -case c >= '0' && c <= ':' || c >= 'A' && c <= 'Z' || c == '_' || c >= 'a' && c <= 'k' || c == 'm' || c >= 'o' && c <= 'u' || c >= 'w' && c <= 'z': -goto yystate23 -} + c = lexer.getChar() + switch { + default: + goto yyrule20 + case c == 'l': + goto yystate82 + case c == 'n': + goto yystate85 + case c == 'v': + goto yystate86 + case c >= '0' && c <= ':' || c >= 'A' && c <= 'Z' || c == '_' || c >= 'a' && c <= 'k' || c == 'm' || c >= 'o' && c <= 'u' || c >= 'w' && c <= 'z': + goto yystate23 + } yystate82: -c = lexer.getChar() -switch { -default: -goto yyrule20 -case c == 'e': -goto yystate83 -case c >= '0' && c <= ':' || c >= 'A' && c <= 'Z' || c == '_' || c >= 'a' && c <= 'd' || c >= 'f' && c <= 'z': -goto yystate23 -} + c = lexer.getChar() + switch { + default: + goto yyrule20 + case c == 'e': + goto yystate83 + case c >= '0' && c <= ':' || c >= 'A' && c <= 'Z' || c == '_' || c >= 'a' && c <= 'd' || c >= 'f' && c <= 'z': + goto yystate23 + } yystate83: -c = lexer.getChar() -switch { -default: -goto yyrule20 -case c == 'r': -goto yystate84 -case c >= '0' && c <= ':' || c >= 'A' && c <= 'Z' || c == '_' || c >= 'a' && c <= 'q' || c >= 's' && c <= 'z': -goto yystate23 -} + c = lexer.getChar() + switch { + default: + goto yyrule20 + case c == 'r': + goto yystate84 + case c >= '0' && c <= ':' || c >= 'A' && c <= 'Z' || c == '_' || c >= 'a' && c <= 'q' || c >= 's' && c <= 'z': + goto yystate23 + } yystate84: -c = lexer.getChar() -switch { -default: -goto yyrule20 -case c == 't': -goto yystate30 -case c >= '0' && c <= ':' || c >= 'A' && c <= 'Z' || c == '_' || c >= 'a' && c <= 's' || c >= 'u' && c <= 'z': -goto yystate23 -} + c = lexer.getChar() + switch { + default: + goto yyrule20 + case c == 't': + goto yystate30 + case c >= '0' && c <= ':' || c >= 'A' && c <= 'Z' || c == '_' || c >= 'a' && c <= 's' || c >= 'u' && c <= 'z': + goto yystate23 + } yystate85: -c = lexer.getChar() -switch { -default: -goto yyrule20 -case c == 'd': -goto yystate32 -case c >= '0' && c <= ':' || c >= 'A' && c <= 'Z' || c == '_' || c >= 'a' && c <= 'c' || c >= 'e' && c <= 'z': -goto yystate23 -} + c = lexer.getChar() + switch { + default: + goto yyrule20 + case c == 'd': + goto yystate32 + case c >= '0' && c <= ':' || c >= 'A' && c <= 'Z' || c == '_' || c >= 'a' && c <= 'c' || c >= 'e' && c <= 'z': + goto yystate23 + } yystate86: -c = lexer.getChar() -switch { -default: -goto yyrule20 -case c == 'g': -goto yystate87 -case c >= '0' && c <= ':' || c >= 'A' && c <= 'Z' || c == '_' || c >= 'a' && c <= 'f' || c >= 'h' && c <= 'z': -goto yystate23 -} + c = lexer.getChar() + switch { + default: + goto yyrule20 + case c == 'g': + goto yystate87 + case c >= '0' && c <= ':' || c >= 'A' && c <= 'Z' || c == '_' || c >= 'a' && c <= 'f' || c >= 'h' && c <= 'z': + goto yystate23 + } yystate87: -c = lexer.getChar() -switch { -default: -goto yyrule14 -case c >= '0' && c <= ':' || c >= 'A' && c <= 'Z' || c == '_' || c >= 'a' && c <= 'z': -goto yystate23 -} + c = lexer.getChar() + switch { + default: + goto yyrule14 + case c >= '0' && c <= ':' || c >= 'A' && c <= 'Z' || c == '_' || c >= 'a' && c <= 'z': + goto yystate23 + } yystate88: -c = lexer.getChar() -switch { -default: -goto yyrule20 -case c == 'y': -goto yystate36 -case c >= '0' && c <= ':' || c >= 'A' && c <= 'Z' || c == '_' || c >= 'a' && c <= 'x' || c == 'z': -goto yystate23 -} + c = lexer.getChar() + switch { + default: + goto yyrule20 + case c == 'y': + goto yystate36 + case c >= '0' && c <= ':' || c >= 'A' && c <= 'Z' || c == '_' || c >= 'a' && c <= 'x' || c == 'z': + goto yystate23 + } yystate89: -c = lexer.getChar() -switch { -default: -goto yyrule20 -case c == 'o': -goto yystate90 -case c >= '0' && c <= ':' || c >= 'A' && c <= 'Z' || c == '_' || c >= 'a' && c <= 'n' || c >= 'p' && c <= 'z': -goto yystate23 -} + c = lexer.getChar() + switch { + default: + goto yyrule20 + case c == 'o': + goto yystate90 + case c >= '0' && c <= ':' || c >= 'A' && c <= 'Z' || c == '_' || c >= 'a' && c <= 'n' || c >= 'p' && c <= 'z': + goto yystate23 + } yystate90: -c = lexer.getChar() -switch { -default: -goto yyrule20 -case c == 'u': -goto yystate91 -case c >= '0' && c <= ':' || c >= 'A' && c <= 'Z' || c == '_' || c >= 'a' && c <= 't' || c >= 'v' && c <= 'z': -goto yystate23 -} + c = lexer.getChar() + switch { + default: + goto yyrule20 + case c == 'u': + goto yystate91 + case c >= '0' && c <= ':' || c >= 'A' && c <= 'Z' || c == '_' || c >= 'a' && c <= 't' || c >= 'v' && c <= 'z': + goto yystate23 + } yystate91: -c = lexer.getChar() -switch { -default: -goto yyrule20 -case c == 'n': -goto yystate92 -case c >= '0' && c <= ':' || c >= 'A' && c <= 'Z' || c == '_' || c >= 'a' && c <= 'm' || c >= 'o' && c <= 'z': -goto yystate23 -} + c = lexer.getChar() + switch { + default: + goto yyrule20 + case c == 'n': + goto yystate92 + case c >= '0' && c <= ':' || c >= 'A' && c <= 'Z' || c == '_' || c >= 'a' && c <= 'm' || c >= 'o' && c <= 'z': + goto yystate23 + } yystate92: -c = lexer.getChar() -switch { -default: -goto yyrule20 -case c == 't': -goto yystate87 -case c >= '0' && c <= ':' || c >= 'A' && c <= 'Z' || c == '_' || c >= 'a' && c <= 's' || c >= 'u' && c <= 'z': -goto yystate23 -} + c = lexer.getChar() + switch { + default: + goto yyrule20 + case c == 't': + goto yystate87 + case c >= '0' && c <= ':' || c >= 'A' && c <= 'Z' || c == '_' || c >= 'a' && c <= 's' || c >= 'u' && c <= 'z': + goto yystate23 + } yystate93: -c = lexer.getChar() -switch { -default: -goto yyrule20 -case c == 'e': -goto yystate94 -case c >= '0' && c <= ':' || c >= 'A' && c <= 'Z' || c == '_' || c >= 'a' && c <= 'd' || c >= 'f' && c <= 'z': -goto yystate23 -} + c = lexer.getChar() + switch { + default: + goto yyrule20 + case c == 'e': + goto yystate94 + case c >= '0' && c <= ':' || c >= 'A' && c <= 'Z' || c == '_' || c >= 'a' && c <= 'd' || c >= 'f' && c <= 'z': + goto yystate23 + } yystate94: -c = lexer.getChar() -switch { -default: -goto yyrule20 -case c == 's': -goto yystate95 -case c >= '0' && c <= ':' || c >= 'A' && c <= 'Z' || c == '_' || c >= 'a' && c <= 'r' || c >= 't' && c <= 'z': -goto yystate23 -} + c = lexer.getChar() + switch { + default: + goto yyrule20 + case c == 's': + goto yystate95 + case c >= '0' && c <= ':' || c >= 'A' && c <= 'Z' || c == '_' || c >= 'a' && c <= 'r' || c >= 't' && c <= 'z': + goto yystate23 + } yystate95: -c = lexer.getChar() -switch { -default: -goto yyrule20 -case c == 'c': -goto yystate96 -case c >= '0' && c <= ':' || c >= 'A' && c <= 'Z' || c == '_' || c == 'a' || c == 'b' || c >= 'd' && c <= 'z': -goto yystate23 -} + c = lexer.getChar() + switch { + default: + goto yyrule20 + case c == 'c': + goto yystate96 + case c >= '0' && c <= ':' || c >= 'A' && c <= 'Z' || c == '_' || c == 'a' || c == 'b' || c >= 'd' && c <= 'z': + goto yystate23 + } yystate96: -c = lexer.getChar() -switch { -default: -goto yyrule20 -case c == 'r': -goto yystate97 -case c >= '0' && c <= ':' || c >= 'A' && c <= 'Z' || c == '_' || c >= 'a' && c <= 'q' || c >= 's' && c <= 'z': -goto yystate23 -} + c = lexer.getChar() + switch { + default: + goto yyrule20 + case c == 'r': + goto yystate97 + case c >= '0' && c <= ':' || c >= 'A' && c <= 'Z' || c == '_' || c >= 'a' && c <= 'q' || c >= 's' && c <= 'z': + goto yystate23 + } yystate97: -c = lexer.getChar() -switch { -default: -goto yyrule20 -case c == 'i': -goto yystate98 -case c >= '0' && c <= ':' || c >= 'A' && c <= 'Z' || c == '_' || c >= 'a' && c <= 'h' || c >= 'j' && c <= 'z': -goto yystate23 -} + c = lexer.getChar() + switch { + default: + goto yyrule20 + case c == 'i': + goto yystate98 + case c >= '0' && c <= ':' || c >= 'A' && c <= 'Z' || c == '_' || c >= 'a' && c <= 'h' || c >= 'j' && c <= 'z': + goto yystate23 + } yystate98: -c = lexer.getChar() -switch { -default: -goto yyrule20 -case c == 'p': -goto yystate99 -case c >= '0' && c <= ':' || c >= 'A' && c <= 'Z' || c == '_' || c >= 'a' && c <= 'o' || c >= 'q' && c <= 'z': -goto yystate23 -} + c = lexer.getChar() + switch { + default: + goto yyrule20 + case c == 'p': + goto yystate99 + case c >= '0' && c <= ':' || c >= 'A' && c <= 'Z' || c == '_' || c >= 'a' && c <= 'o' || c >= 'q' && c <= 'z': + goto yystate23 + } yystate99: -c = lexer.getChar() -switch { -default: -goto yyrule20 -case c == 't': -goto yystate100 -case c >= '0' && c <= ':' || c >= 'A' && c <= 'Z' || c == '_' || c >= 'a' && c <= 's' || c >= 'u' && c <= 'z': -goto yystate23 -} + c = lexer.getChar() + switch { + default: + goto yyrule20 + case c == 't': + goto yystate100 + case c >= '0' && c <= ':' || c >= 'A' && c <= 'Z' || c == '_' || c >= 'a' && c <= 's' || c >= 'u' && c <= 'z': + goto yystate23 + } yystate100: -c = lexer.getChar() -switch { -default: -goto yyrule20 -case c == 'i': -goto yystate101 -case c >= '0' && c <= ':' || c >= 'A' && c <= 'Z' || c == '_' || c >= 'a' && c <= 'h' || c >= 'j' && c <= 'z': -goto yystate23 -} + c = lexer.getChar() + switch { + default: + goto yyrule20 + case c == 'i': + goto yystate101 + case c >= '0' && c <= ':' || c >= 'A' && c <= 'Z' || c == '_' || c >= 'a' && c <= 'h' || c >= 'j' && c <= 'z': + goto yystate23 + } yystate101: -c = lexer.getChar() -switch { -default: -goto yyrule20 -case c == 'o': -goto yystate102 -case c >= '0' && c <= ':' || c >= 'A' && c <= 'Z' || c == '_' || c >= 'a' && c <= 'n' || c >= 'p' && c <= 'z': -goto yystate23 -} + c = lexer.getChar() + switch { + default: + goto yyrule20 + case c == 'o': + goto yystate102 + case c >= '0' && c <= ':' || c >= 'A' && c <= 'Z' || c == '_' || c >= 'a' && c <= 'n' || c >= 'p' && c <= 'z': + goto yystate23 + } yystate102: -c = lexer.getChar() -switch { -default: -goto yyrule20 -case c == 'n': -goto yystate51 -case c >= '0' && c <= ':' || c >= 'A' && c <= 'Z' || c == '_' || c >= 'a' && c <= 'm' || c >= 'o' && c <= 'z': -goto yystate23 -} + c = lexer.getChar() + switch { + default: + goto yyrule20 + case c == 'n': + goto yystate51 + case c >= '0' && c <= ':' || c >= 'A' && c <= 'Z' || c == '_' || c >= 'a' && c <= 'm' || c >= 'o' && c <= 'z': + goto yystate23 + } yystate103: -c = lexer.getChar() -switch { -default: -goto yyrule20 -case c == 'o': -goto yystate104 -case c >= '0' && c <= ':' || c >= 'A' && c <= 'Z' || c == '_' || c >= 'a' && c <= 'n' || c >= 'p' && c <= 'z': -goto yystate23 -} + c = lexer.getChar() + switch { + default: + goto yyrule20 + case c == 'o': + goto yystate104 + case c >= '0' && c <= ':' || c >= 'A' && c <= 'Z' || c == '_' || c >= 'a' && c <= 'n' || c >= 'p' && c <= 'z': + goto yystate23 + } yystate104: -c = lexer.getChar() -switch { -default: -goto yyrule20 -case c == 'r': -goto yystate54 -case c >= '0' && c <= ':' || c >= 'A' && c <= 'Z' || c == '_' || c >= 'a' && c <= 'q' || c >= 's' && c <= 'z': -goto yystate23 -} + c = lexer.getChar() + switch { + default: + goto yyrule20 + case c == 'r': + goto yystate54 + case c >= '0' && c <= ':' || c >= 'A' && c <= 'Z' || c == '_' || c >= 'a' && c <= 'q' || c >= 's' && c <= 'z': + goto yystate23 + } yystate105: -c = lexer.getChar() -switch { -default: -goto yyrule20 -case c == 'f': -goto yystate56 -case c >= '0' && c <= ':' || c >= 'A' && c <= 'Z' || c == '_' || c >= 'a' && c <= 'e' || c >= 'g' && c <= 'z': -goto yystate23 -} + c = lexer.getChar() + switch { + default: + goto yyrule20 + case c == 'f': + goto yystate56 + case c >= '0' && c <= ':' || c >= 'A' && c <= 'Z' || c == '_' || c >= 'a' && c <= 'e' || c >= 'g' && c <= 'z': + goto yystate23 + } yystate106: -c = lexer.getChar() -switch { -default: -goto yyrule20 -case c == 'a': -goto yystate107 -case c == 'i': -goto yystate108 -case c >= '0' && c <= ':' || c >= 'A' && c <= 'Z' || c == '_' || c >= 'b' && c <= 'h' || c >= 'j' && c <= 'z': -goto yystate23 -} + c = lexer.getChar() + switch { + default: + goto yyrule20 + case c == 'a': + goto yystate107 + case c == 'i': + goto yystate108 + case c >= '0' && c <= ':' || c >= 'A' && c <= 'Z' || c == '_' || c >= 'b' && c <= 'h' || c >= 'j' && c <= 'z': + goto yystate23 + } yystate107: -c = lexer.getChar() -switch { -default: -goto yyrule20 -case c == 'x': -goto yystate87 -case c >= '0' && c <= ':' || c >= 'A' && c <= 'Z' || c == '_' || c >= 'a' && c <= 'w' || c == 'y' || c == 'z': -goto yystate23 -} + c = lexer.getChar() + switch { + default: + goto yyrule20 + case c == 'x': + goto yystate87 + case c >= '0' && c <= ':' || c >= 'A' && c <= 'Z' || c == '_' || c >= 'a' && c <= 'w' || c == 'y' || c == 'z': + goto yystate23 + } yystate108: -c = lexer.getChar() -switch { -default: -goto yyrule20 -case c == 'n': -goto yystate87 -case c >= '0' && c <= ':' || c >= 'A' && c <= 'Z' || c == '_' || c >= 'a' && c <= 'm' || c >= 'o' && c <= 'z': -goto yystate23 -} + c = lexer.getChar() + switch { + default: + goto yyrule20 + case c == 'n': + goto yystate87 + case c >= '0' && c <= ':' || c >= 'A' && c <= 'Z' || c == '_' || c >= 'a' && c <= 'm' || c >= 'o' && c <= 'z': + goto yystate23 + } yystate109: -c = lexer.getChar() -switch { -default: -goto yyrule20 -case c == 'r': -goto yystate32 -case c >= '0' && c <= ':' || c >= 'A' && c <= 'Z' || c == '_' || c >= 'a' && c <= 'q' || c >= 's' && c <= 'z': -goto yystate23 -} + c = lexer.getChar() + switch { + default: + goto yyrule20 + case c == 'r': + goto yystate32 + case c >= '0' && c <= ':' || c >= 'A' && c <= 'Z' || c == '_' || c >= 'a' && c <= 'q' || c >= 's' && c <= 'z': + goto yystate23 + } yystate110: -c = lexer.getChar() -switch { -default: -goto yyrule20 -case c == 'e': -goto yystate111 -case c >= '0' && c <= ':' || c >= 'A' && c <= 'Z' || c == '_' || c >= 'a' && c <= 'd' || c >= 'f' && c <= 'z': -goto yystate23 -} + c = lexer.getChar() + switch { + default: + goto yyrule20 + case c == 'e': + goto yystate111 + case c >= '0' && c <= ':' || c >= 'A' && c <= 'Z' || c == '_' || c >= 'a' && c <= 'd' || c >= 'f' && c <= 'z': + goto yystate23 + } yystate111: -c = lexer.getChar() -switch { -default: -goto yyrule20 -case c == 'r': -goto yystate112 -case c >= '0' && c <= ':' || c >= 'A' && c <= 'Z' || c == '_' || c >= 'a' && c <= 'q' || c >= 's' && c <= 'z': -goto yystate23 -} + c = lexer.getChar() + switch { + default: + goto yyrule20 + case c == 'r': + goto yystate112 + case c >= '0' && c <= ':' || c >= 'A' && c <= 'Z' || c == '_' || c >= 'a' && c <= 'q' || c >= 's' && c <= 'z': + goto yystate23 + } yystate112: -c = lexer.getChar() -switch { -default: -goto yyrule20 -case c == 'm': -goto yystate113 -case c >= '0' && c <= ':' || c >= 'A' && c <= 'Z' || c == '_' || c >= 'a' && c <= 'l' || c >= 'n' && c <= 'z': -goto yystate23 -} + c = lexer.getChar() + switch { + default: + goto yyrule20 + case c == 'm': + goto yystate113 + case c >= '0' && c <= ':' || c >= 'A' && c <= 'Z' || c == '_' || c >= 'a' && c <= 'l' || c >= 'n' && c <= 'z': + goto yystate23 + } yystate113: -c = lexer.getChar() -switch { -default: -goto yyrule20 -case c == 'a': -goto yystate114 -case c >= '0' && c <= ':' || c >= 'A' && c <= 'Z' || c == '_' || c >= 'b' && c <= 'z': -goto yystate23 -} + c = lexer.getChar() + switch { + default: + goto yyrule20 + case c == 'a': + goto yystate114 + case c >= '0' && c <= ':' || c >= 'A' && c <= 'Z' || c == '_' || c >= 'b' && c <= 'z': + goto yystate23 + } yystate114: -c = lexer.getChar() -switch { -default: -goto yyrule20 -case c == 'n': -goto yystate115 -case c >= '0' && c <= ':' || c >= 'A' && c <= 'Z' || c == '_' || c >= 'a' && c <= 'm' || c >= 'o' && c <= 'z': -goto yystate23 -} + c = lexer.getChar() + switch { + default: + goto yyrule20 + case c == 'n': + goto yystate115 + case c >= '0' && c <= ':' || c >= 'A' && c <= 'Z' || c == '_' || c >= 'a' && c <= 'm' || c >= 'o' && c <= 'z': + goto yystate23 + } yystate115: -c = lexer.getChar() -switch { -default: -goto yyrule20 -case c == 'e': -goto yystate116 -case c >= '0' && c <= ':' || c >= 'A' && c <= 'Z' || c == '_' || c >= 'a' && c <= 'd' || c >= 'f' && c <= 'z': -goto yystate23 -} + c = lexer.getChar() + switch { + default: + goto yyrule20 + case c == 'e': + goto yystate116 + case c >= '0' && c <= ':' || c >= 'A' && c <= 'Z' || c == '_' || c >= 'a' && c <= 'd' || c >= 'f' && c <= 'z': + goto yystate23 + } yystate116: -c = lexer.getChar() -switch { -default: -goto yyrule20 -case c == 'n': -goto yystate117 -case c >= '0' && c <= ':' || c >= 'A' && c <= 'Z' || c == '_' || c >= 'a' && c <= 'm' || c >= 'o' && c <= 'z': -goto yystate23 -} + c = lexer.getChar() + switch { + default: + goto yyrule20 + case c == 'n': + goto yystate117 + case c >= '0' && c <= ':' || c >= 'A' && c <= 'Z' || c == '_' || c >= 'a' && c <= 'm' || c >= 'o' && c <= 'z': + goto yystate23 + } yystate117: -c = lexer.getChar() -switch { -default: -goto yyrule20 -case c == 't': -goto yystate69 -case c >= '0' && c <= ':' || c >= 'A' && c <= 'Z' || c == '_' || c >= 'a' && c <= 's' || c >= 'u' && c <= 'z': -goto yystate23 -} + c = lexer.getChar() + switch { + default: + goto yyrule20 + case c == 't': + goto yystate69 + case c >= '0' && c <= ':' || c >= 'A' && c <= 'Z' || c == '_' || c >= 'a' && c <= 's' || c >= 'u' && c <= 'z': + goto yystate23 + } yystate118: -c = lexer.getChar() -switch { -default: -goto yyrule20 -case c == 'u': -goto yystate119 -case c >= '0' && c <= ':' || c >= 'A' && c <= 'Z' || c == '_' || c >= 'a' && c <= 't' || c >= 'v' && c <= 'z': -goto yystate23 -} + c = lexer.getChar() + switch { + default: + goto yyrule20 + case c == 'u': + goto yystate119 + case c >= '0' && c <= ':' || c >= 'A' && c <= 'Z' || c == '_' || c >= 'a' && c <= 't' || c >= 'v' && c <= 'z': + goto yystate23 + } yystate119: -c = lexer.getChar() -switch { -default: -goto yyrule20 -case c == 'm': -goto yystate120 -case c >= '0' && c <= ':' || c >= 'A' && c <= 'Z' || c == '_' || c >= 'a' && c <= 'l' || c >= 'n' && c <= 'z': -goto yystate23 -} + c = lexer.getChar() + switch { + default: + goto yyrule20 + case c == 'm': + goto yystate120 + case c >= '0' && c <= ':' || c >= 'A' && c <= 'Z' || c == '_' || c >= 'a' && c <= 'l' || c >= 'n' && c <= 'z': + goto yystate23 + } yystate120: -c = lexer.getChar() -switch { -default: -goto yyrule14 -case c == 'm': -goto yystate121 -case c >= '0' && c <= ':' || c >= 'A' && c <= 'Z' || c == '_' || c >= 'a' && c <= 'l' || c >= 'n' && c <= 'z': -goto yystate23 -} + c = lexer.getChar() + switch { + default: + goto yyrule14 + case c == 'm': + goto yystate121 + case c >= '0' && c <= ':' || c >= 'A' && c <= 'Z' || c == '_' || c >= 'a' && c <= 'l' || c >= 'n' && c <= 'z': + goto yystate23 + } yystate121: -c = lexer.getChar() -switch { -default: -goto yyrule20 -case c == 'a': -goto yystate122 -case c >= '0' && c <= ':' || c >= 'A' && c <= 'Z' || c == '_' || c >= 'b' && c <= 'z': -goto yystate23 -} + c = lexer.getChar() + switch { + default: + goto yyrule20 + case c == 'a': + goto yystate122 + case c >= '0' && c <= ':' || c >= 'A' && c <= 'Z' || c == '_' || c >= 'b' && c <= 'z': + goto yystate23 + } yystate122: -c = lexer.getChar() -switch { -default: -goto yyrule20 -case c == 'r': -goto yystate123 -case c >= '0' && c <= ':' || c >= 'A' && c <= 'Z' || c == '_' || c >= 'a' && c <= 'q' || c >= 's' && c <= 'z': -goto yystate23 -} + c = lexer.getChar() + switch { + default: + goto yyrule20 + case c == 'r': + goto yystate123 + case c >= '0' && c <= ':' || c >= 'A' && c <= 'Z' || c == '_' || c >= 'a' && c <= 'q' || c >= 's' && c <= 'z': + goto yystate23 + } yystate123: -c = lexer.getChar() -switch { -default: -goto yyrule20 -case c == 'y': -goto yystate76 -case c >= '0' && c <= ':' || c >= 'A' && c <= 'Z' || c == '_' || c >= 'a' && c <= 'x' || c == 'z': -goto yystate23 -} + c = lexer.getChar() + switch { + default: + goto yyrule20 + case c == 'y': + goto yystate76 + case c >= '0' && c <= ':' || c >= 'A' && c <= 'Z' || c == '_' || c >= 'a' && c <= 'x' || c == 'z': + goto yystate23 + } yystate124: -c = lexer.getChar() -switch { -default: -goto yyrule20 -case c == 'i': -goto yystate125 -case c >= '0' && c <= ':' || c >= 'A' && c <= 'Z' || c == '_' || c >= 'a' && c <= 'h' || c >= 'j' && c <= 'z': -goto yystate23 -} + c = lexer.getChar() + switch { + default: + goto yyrule20 + case c == 'i': + goto yystate125 + case c >= '0' && c <= ':' || c >= 'A' && c <= 'Z' || c == '_' || c >= 'a' && c <= 'h' || c >= 'j' && c <= 'z': + goto yystate23 + } yystate125: -c = lexer.getChar() -switch { -default: -goto yyrule20 -case c == 't': -goto yystate126 -case c >= '0' && c <= ':' || c >= 'A' && c <= 'Z' || c == '_' || c >= 'a' && c <= 's' || c >= 'u' && c <= 'z': -goto yystate23 -} + c = lexer.getChar() + switch { + default: + goto yyrule20 + case c == 't': + goto yystate126 + case c >= '0' && c <= ':' || c >= 'A' && c <= 'Z' || c == '_' || c >= 'a' && c <= 's' || c >= 'u' && c <= 'z': + goto yystate23 + } yystate126: -c = lexer.getChar() -switch { -default: -goto yyrule20 -case c == 'h': -goto yystate80 -case c >= '0' && c <= ':' || c >= 'A' && c <= 'Z' || c == '_' || c >= 'a' && c <= 'g' || c >= 'i' && c <= 'z': -goto yystate23 -} + c = lexer.getChar() + switch { + default: + goto yyrule20 + case c == 'h': + goto yystate80 + case c >= '0' && c <= ':' || c >= 'A' && c <= 'Z' || c == '_' || c >= 'a' && c <= 'g' || c >= 'i' && c <= 'z': + goto yystate23 + } -goto yystate127 // silence unused label error + goto yystate127 // silence unused label error yystate127: -c = lexer.getChar() + c = lexer.getChar() yystart127: -switch { -default: -goto yyabort -case c == '*': -goto yystate129 -case c >= '\x01' && c <= ')' || c >= '+' && c <= 'ÿ': -goto yystate128 -} + switch { + default: + goto yyabort + case c == '*': + goto yystate129 + case c >= '\x01' && c <= ')' || c >= '+' && c <= 'ÿ': + goto yystate128 + } yystate128: -c = lexer.getChar() -goto yyrule3 + c = lexer.getChar() + goto yyrule3 yystate129: -c = lexer.getChar() -switch { -default: -goto yyrule3 -case c == '/': -goto yystate130 -} + c = lexer.getChar() + switch { + default: + goto yyrule3 + case c == '/': + goto yystate130 + } yystate130: -c = lexer.getChar() -goto yyrule2 + c = lexer.getChar() + goto yyrule2 yyrule1: // "/*" -{ - currentState = S_COMMENTS -goto yystate0 -} + { + currentState = S_COMMENTS + goto yystate0 + } yyrule2: // "*/" -{ - currentState = S_INITIAL -goto yystate0 -} + { + currentState = S_INITIAL + goto yystate0 + } yyrule3: // .|\n -{ - /* ignore chars within multi-line comments */ -goto yystate0 -} + { + /* ignore chars within multi-line comments */ + goto yystate0 + } yyrule4: // \/\/[^\r\n]*\n -{ - /* gobble up one-line comments */ -goto yystate0 -} + { + /* gobble up one-line comments */ + goto yystate0 + } yyrule5: // ALERT|alert -{ - return ALERT -} + { + return ALERT + } yyrule6: // IF|if -{ - return IF -} + { + return IF + } yyrule7: // FOR|for -{ - return FOR -} + { + return FOR + } yyrule8: // WITH|with -{ - return WITH -} + { + return WITH + } yyrule9: // SUMMARY|summary -{ - return SUMMARY -} + { + return SUMMARY + } yyrule10: // DESCRIPTION|description -{ - return DESCRIPTION -} + { + return DESCRIPTION + } yyrule11: // PERMANENT|permanent -{ - return PERMANENT -} + { + return PERMANENT + } yyrule12: // BY|by -{ - return GROUP_OP -} + { + return GROUP_OP + } yyrule13: // AVG|SUM|MAX|MIN|COUNT -{ - lval.str = lexer.token(); return AGGR_OP -goto yystate0 -} + { + lval.str = lexer.token() + return AGGR_OP + goto yystate0 + } yyrule14: // avg|sum|max|min|count -{ - lval.str = strings.ToUpper(lexer.token()); return AGGR_OP -goto yystate0 -} + { + lval.str = strings.ToUpper(lexer.token()) + return AGGR_OP + goto yystate0 + } yyrule15: // \<|>|AND|OR|and|or -{ - lval.str = strings.ToUpper(lexer.token()); return CMP_OP -goto yystate0 -} + { + lval.str = strings.ToUpper(lexer.token()) + return CMP_OP + goto yystate0 + } yyrule16: // ==|!=|>=|<= -{ - lval.str = lexer.token(); return CMP_OP -goto yystate0 -} + { + lval.str = lexer.token() + return CMP_OP + goto yystate0 + } yyrule17: // [+\-] -{ - lval.str = lexer.token(); return ADDITIVE_OP -goto yystate0 -} + { + lval.str = lexer.token() + return ADDITIVE_OP + goto yystate0 + } yyrule18: // [*/%] -{ - lval.str = lexer.token(); return MULT_OP -goto yystate0 -} + { + lval.str = lexer.token() + return MULT_OP + goto yystate0 + } yyrule19: // {D}+{U} -{ - lval.str = lexer.token(); return DURATION -goto yystate0 -} + { + lval.str = lexer.token() + return DURATION + goto yystate0 + } yyrule20: // {L}({L}|{D})* -{ - lval.str = lexer.token(); return IDENTIFIER -goto yystate0 -} + { + lval.str = lexer.token() + return IDENTIFIER + goto yystate0 + } yyrule21: // \-?{D}+(\.{D}*)? -{ - num, err := strconv.ParseFloat(lexer.token(), 64); - if (err != nil && err.(*strconv.NumError).Err == strconv.ErrSyntax) { - panic("Invalid float") - } - lval.num = clientmodel.SampleValue(num) - return NUMBER -} + { + num, err := strconv.ParseFloat(lexer.token(), 64) + if err != nil && err.(*strconv.NumError).Err == strconv.ErrSyntax { + panic("Invalid float") + } + lval.num = clientmodel.SampleValue(num) + return NUMBER + } yyrule22: // \"(\\.|[^\\"])*\" -{ - lval.str = lexer.token()[1:len(lexer.token()) - 1]; return STRING -goto yystate0 -} + { + lval.str = lexer.token()[1 : len(lexer.token())-1] + return STRING + goto yystate0 + } yyrule23: // \'(\\.|[^\\'])*\' -{ - lval.str = lexer.token()[1:len(lexer.token()) - 1]; return STRING -goto yystate0 -} + { + lval.str = lexer.token()[1 : len(lexer.token())-1] + return STRING + goto yystate0 + } yyrule24: // [{}\[\]()=,] -{ - return int(lexer.buf[0]) -} + { + return int(lexer.buf[0]) + } yyrule25: // [\t\n\r ] -{ - /* gobble up any whitespace */ -goto yystate0 -} -panic("unreachable") + { + /* gobble up any whitespace */ + goto yystate0 + } + panic("unreachable") -goto yyabort // silence unused label error + goto yyabort // silence unused label error yyabort: // no lexem recognized - lexer.empty = true - return int(c) + lexer.empty = true + return int(c) } diff --git a/rules/parser.y.go b/rules/parser.y.go index d33cb7941..5959653c6 100644 --- a/rules/parser.y.go +++ b/rules/parser.y.go @@ -1,25 +1,25 @@ - //line parser.y:15 - package rules +package rules + import __yyfmt__ "fmt" -//line parser.y:15 - - import ( - clientmodel "github.com/prometheus/client_golang/model" - "github.com/prometheus/prometheus/rules/ast" - ) +//line parser.y:15 +import ( + clientmodel "github.com/prometheus/client_golang/model" + + "github.com/prometheus/prometheus/rules/ast" +) //line parser.y:24 type yySymType struct { - yys int - num clientmodel.SampleValue - str string - ruleNode ast.Node - ruleNodeSlice []ast.Node - boolean bool - labelNameSlice clientmodel.LabelNames - labelSet clientmodel.LabelSet + yys int + num clientmodel.SampleValue + str string + ruleNode ast.Node + ruleNodeSlice []ast.Node + boolean bool + labelNameSlice clientmodel.LabelNames + labelSet clientmodel.LabelSet } const START_RULES = 57346 @@ -70,7 +70,6 @@ const yyMaxDepth = 200 //line parser.y:191 - //line yacctab:1 var yyExca = []int{ -1, 1, @@ -407,133 +406,207 @@ yydefault: case 5: //line parser.y:69 - { yylex.(*RulesLexer).parsedExpr = yyS[yypt-0].ruleNode } + { + yylex.(*RulesLexer).parsedExpr = yyS[yypt-0].ruleNode + } case 6: //line parser.y:73 { - rule, err := CreateRecordingRule(yyS[yypt-3].str, yyS[yypt-2].labelSet, yyS[yypt-0].ruleNode, yyS[yypt-4].boolean) - if err != nil { yylex.Error(err.Error()); return 1 } - yylex.(*RulesLexer).parsedRules = append(yylex.(*RulesLexer).parsedRules, rule) - } + rule, err := CreateRecordingRule(yyS[yypt-3].str, yyS[yypt-2].labelSet, yyS[yypt-0].ruleNode, yyS[yypt-4].boolean) + if err != nil { + yylex.Error(err.Error()) + return 1 + } + yylex.(*RulesLexer).parsedRules = append(yylex.(*RulesLexer).parsedRules, rule) + } case 7: //line parser.y:79 { - rule, err := CreateAlertingRule(yyS[yypt-9].str, yyS[yypt-7].ruleNode, yyS[yypt-6].str, yyS[yypt-4].labelSet, yyS[yypt-2].str, yyS[yypt-0].str) - if err != nil { yylex.Error(err.Error()); return 1 } - yylex.(*RulesLexer).parsedRules = append(yylex.(*RulesLexer).parsedRules, rule) - } + rule, err := CreateAlertingRule(yyS[yypt-9].str, yyS[yypt-7].ruleNode, yyS[yypt-6].str, yyS[yypt-4].labelSet, yyS[yypt-2].str, yyS[yypt-0].str) + if err != nil { + yylex.Error(err.Error()) + return 1 + } + yylex.(*RulesLexer).parsedRules = append(yylex.(*RulesLexer).parsedRules, rule) + } case 8: //line parser.y:87 - { yyVAL.str = "0s" } + { + yyVAL.str = "0s" + } case 9: //line parser.y:89 - { yyVAL.str = yyS[yypt-0].str } + { + yyVAL.str = yyS[yypt-0].str + } case 10: //line parser.y:93 - { yyVAL.boolean = false } + { + yyVAL.boolean = false + } case 11: //line parser.y:95 - { yyVAL.boolean = true } + { + yyVAL.boolean = true + } case 12: //line parser.y:99 - { yyVAL.labelSet = clientmodel.LabelSet{} } + { + yyVAL.labelSet = clientmodel.LabelSet{} + } case 13: //line parser.y:101 - { yyVAL.labelSet = yyS[yypt-1].labelSet } + { + yyVAL.labelSet = yyS[yypt-1].labelSet + } case 14: //line parser.y:103 - { yyVAL.labelSet = clientmodel.LabelSet{} } + { + yyVAL.labelSet = clientmodel.LabelSet{} + } case 15: //line parser.y:106 - { yyVAL.labelSet = yyS[yypt-0].labelSet } + { + yyVAL.labelSet = yyS[yypt-0].labelSet + } case 16: //line parser.y:108 - { for k, v := range yyS[yypt-0].labelSet { yyVAL.labelSet[k] = v } } + { + for k, v := range yyS[yypt-0].labelSet { + yyVAL.labelSet[k] = v + } + } case 17: //line parser.y:112 - { yyVAL.labelSet = clientmodel.LabelSet{ clientmodel.LabelName(yyS[yypt-2].str): clientmodel.LabelValue(yyS[yypt-0].str) } } + { + yyVAL.labelSet = clientmodel.LabelSet{clientmodel.LabelName(yyS[yypt-2].str): clientmodel.LabelValue(yyS[yypt-0].str)} + } case 18: //line parser.y:117 - { yyVAL.ruleNode = yyS[yypt-1].ruleNode } + { + yyVAL.ruleNode = yyS[yypt-1].ruleNode + } case 19: //line parser.y:119 - { yyS[yypt-0].labelSet[clientmodel.MetricNameLabel] = clientmodel.LabelValue(yyS[yypt-1].str); yyVAL.ruleNode = ast.NewVectorLiteral(yyS[yypt-0].labelSet) } + { + yyS[yypt-0].labelSet[clientmodel.MetricNameLabel] = clientmodel.LabelValue(yyS[yypt-1].str) + yyVAL.ruleNode = ast.NewVectorLiteral(yyS[yypt-0].labelSet) + } case 20: //line parser.y:121 { - var err error - yyVAL.ruleNode, err = NewFunctionCall(yyS[yypt-3].str, yyS[yypt-1].ruleNodeSlice) - if err != nil { yylex.Error(err.Error()); return 1 } - } + var err error + yyVAL.ruleNode, err = NewFunctionCall(yyS[yypt-3].str, yyS[yypt-1].ruleNodeSlice) + if err != nil { + yylex.Error(err.Error()) + return 1 + } + } case 21: //line parser.y:127 { - var err error - yyVAL.ruleNode, err = NewFunctionCall(yyS[yypt-2].str, []ast.Node{}) - if err != nil { yylex.Error(err.Error()); return 1 } - } + var err error + yyVAL.ruleNode, err = NewFunctionCall(yyS[yypt-2].str, []ast.Node{}) + if err != nil { + yylex.Error(err.Error()) + return 1 + } + } case 22: //line parser.y:133 { - var err error - yyVAL.ruleNode, err = NewMatrix(yyS[yypt-3].ruleNode, yyS[yypt-1].str) - if err != nil { yylex.Error(err.Error()); return 1 } - } + var err error + yyVAL.ruleNode, err = NewMatrix(yyS[yypt-3].ruleNode, yyS[yypt-1].str) + if err != nil { + yylex.Error(err.Error()) + return 1 + } + } case 23: //line parser.y:139 { - var err error - yyVAL.ruleNode, err = NewVectorAggregation(yyS[yypt-4].str, yyS[yypt-2].ruleNode, yyS[yypt-0].labelNameSlice) - if err != nil { yylex.Error(err.Error()); return 1 } - } + var err error + yyVAL.ruleNode, err = NewVectorAggregation(yyS[yypt-4].str, yyS[yypt-2].ruleNode, yyS[yypt-0].labelNameSlice) + if err != nil { + yylex.Error(err.Error()) + return 1 + } + } case 24: //line parser.y:147 { - var err error - yyVAL.ruleNode, err = NewArithExpr(yyS[yypt-1].str, yyS[yypt-2].ruleNode, yyS[yypt-0].ruleNode) - if err != nil { yylex.Error(err.Error()); return 1 } - } + var err error + yyVAL.ruleNode, err = NewArithExpr(yyS[yypt-1].str, yyS[yypt-2].ruleNode, yyS[yypt-0].ruleNode) + if err != nil { + yylex.Error(err.Error()) + return 1 + } + } case 25: //line parser.y:153 { - var err error - yyVAL.ruleNode, err = NewArithExpr(yyS[yypt-1].str, yyS[yypt-2].ruleNode, yyS[yypt-0].ruleNode) - if err != nil { yylex.Error(err.Error()); return 1 } - } + var err error + yyVAL.ruleNode, err = NewArithExpr(yyS[yypt-1].str, yyS[yypt-2].ruleNode, yyS[yypt-0].ruleNode) + if err != nil { + yylex.Error(err.Error()) + return 1 + } + } case 26: //line parser.y:159 { - var err error - yyVAL.ruleNode, err = NewArithExpr(yyS[yypt-1].str, yyS[yypt-2].ruleNode, yyS[yypt-0].ruleNode) - if err != nil { yylex.Error(err.Error()); return 1 } - } + var err error + yyVAL.ruleNode, err = NewArithExpr(yyS[yypt-1].str, yyS[yypt-2].ruleNode, yyS[yypt-0].ruleNode) + if err != nil { + yylex.Error(err.Error()) + return 1 + } + } case 27: //line parser.y:165 - { yyVAL.ruleNode = ast.NewScalarLiteral(yyS[yypt-0].num)} + { + yyVAL.ruleNode = ast.NewScalarLiteral(yyS[yypt-0].num) + } case 28: //line parser.y:169 - { yyVAL.labelNameSlice = clientmodel.LabelNames{} } + { + yyVAL.labelNameSlice = clientmodel.LabelNames{} + } case 29: //line parser.y:171 - { yyVAL.labelNameSlice = yyS[yypt-1].labelNameSlice } + { + yyVAL.labelNameSlice = yyS[yypt-1].labelNameSlice + } case 30: //line parser.y:175 - { yyVAL.labelNameSlice = clientmodel.LabelNames{clientmodel.LabelName(yyS[yypt-0].str)} } + { + yyVAL.labelNameSlice = clientmodel.LabelNames{clientmodel.LabelName(yyS[yypt-0].str)} + } case 31: //line parser.y:177 - { yyVAL.labelNameSlice = append(yyVAL.labelNameSlice, clientmodel.LabelName(yyS[yypt-0].str)) } + { + yyVAL.labelNameSlice = append(yyVAL.labelNameSlice, clientmodel.LabelName(yyS[yypt-0].str)) + } case 32: //line parser.y:181 - { yyVAL.ruleNodeSlice = []ast.Node{yyS[yypt-0].ruleNode} } + { + yyVAL.ruleNodeSlice = []ast.Node{yyS[yypt-0].ruleNode} + } case 33: //line parser.y:183 - { yyVAL.ruleNodeSlice = append(yyVAL.ruleNodeSlice, yyS[yypt-0].ruleNode) } + { + yyVAL.ruleNodeSlice = append(yyVAL.ruleNodeSlice, yyS[yypt-0].ruleNode) + } case 34: //line parser.y:187 - { yyVAL.ruleNode = yyS[yypt-0].ruleNode } + { + yyVAL.ruleNode = yyS[yypt-0].ruleNode + } case 35: //line parser.y:189 - { yyVAL.ruleNode = ast.NewStringLiteral(yyS[yypt-0].str) } + { + yyVAL.ruleNode = ast.NewStringLiteral(yyS[yypt-0].str) + } } goto yystack /* stack new state and value */ } diff --git a/storage/metric/index.go b/storage/metric/index.go new file mode 100644 index 000000000..a8e187807 --- /dev/null +++ b/storage/metric/index.go @@ -0,0 +1,330 @@ +// Copyright 2013 Prometheus Team +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package metric + +import ( + "sort" + + "code.google.com/p/goprotobuf/proto" + + clientmodel "github.com/prometheus/client_golang/model" + + dto "github.com/prometheus/prometheus/model/generated" + + "github.com/prometheus/prometheus/storage" + "github.com/prometheus/prometheus/storage/raw" + "github.com/prometheus/prometheus/storage/raw/leveldb" +) + +type FingerprintMetricMapping map[clientmodel.Fingerprint]clientmodel.Metric + +type FingerprintMetricIndex interface { + IndexBatch(FingerprintMetricMapping) error + Lookup(*clientmodel.Fingerprint) (m clientmodel.Metric, ok bool, err error) + Close() error +} + +type leveldbFingerprintMetricIndex struct { + p *leveldb.LevelDBPersistence +} + +type LevelDBFingerprintMetricIndexOptions struct { + leveldb.LevelDBOptions +} + +func (i *leveldbFingerprintMetricIndex) Close() error { + i.p.Close() + + return nil +} + +func (i *leveldbFingerprintMetricIndex) IndexBatch(mapping FingerprintMetricMapping) error { + b := leveldb.NewBatch() + defer b.Close() + + for f, m := range mapping { + k := new(dto.Fingerprint) + dumpFingerprint(k, &f) + v := new(dto.Metric) + dumpMetric(v, m) + + b.Put(k, v) + } + + return i.p.Commit(b) +} + +func (i *leveldbFingerprintMetricIndex) Lookup(f *clientmodel.Fingerprint) (m clientmodel.Metric, ok bool, err error) { + k := new(dto.Fingerprint) + dumpFingerprint(k, f) + v := new(dto.Metric) + if ok, err := i.p.Get(k, v); !ok { + return nil, false, nil + } else if err != nil { + return nil, false, err + } + + m = clientmodel.Metric{} + + for _, pair := range v.LabelPair { + m[clientmodel.LabelName(pair.GetName())] = clientmodel.LabelValue(pair.GetValue()) + } + + return m, true, nil +} + +func NewLevelDBFingerprintMetricIndex(o *LevelDBFingerprintMetricIndexOptions) (FingerprintMetricIndex, error) { + s, err := leveldb.NewLevelDBPersistence(&o.LevelDBOptions) + if err != nil { + return nil, err + } + + return &leveldbFingerprintMetricIndex{ + p: s, + }, nil +} + +type LabelNameFingerprintMapping map[clientmodel.LabelName]clientmodel.Fingerprints + +type LabelNameFingerprintIndex interface { + IndexBatch(LabelNameFingerprintMapping) error + Lookup(clientmodel.LabelName) (fps clientmodel.Fingerprints, ok bool, err error) + Has(clientmodel.LabelName) (ok bool, err error) + Close() error +} + +type leveldbLabelNameFingerprintIndex struct { + p *leveldb.LevelDBPersistence +} + +func (i *leveldbLabelNameFingerprintIndex) IndexBatch(b LabelNameFingerprintMapping) error { + batch := leveldb.NewBatch() + defer batch.Close() + + for labelName, fingerprints := range b { + sort.Sort(fingerprints) + + key := &dto.LabelName{ + Name: proto.String(string(labelName)), + } + value := new(dto.FingerprintCollection) + for _, fingerprint := range fingerprints { + f := new(dto.Fingerprint) + dumpFingerprint(f, fingerprint) + value.Member = append(value.Member, f) + } + + batch.Put(key, value) + } + + return i.p.Commit(batch) +} + +func (i *leveldbLabelNameFingerprintIndex) Lookup(l clientmodel.LabelName) (fps clientmodel.Fingerprints, ok bool, err error) { + k := new(dto.LabelName) + dumpLabelName(k, l) + v := new(dto.FingerprintCollection) + ok, err = i.p.Get(k, v) + if err != nil { + return nil, false, err + } + if !ok { + return nil, false, nil + } + + for _, m := range v.Member { + fp := new(clientmodel.Fingerprint) + loadFingerprint(fp, m) + fps = append(fps, fp) + } + + return fps, true, nil +} + +func (i *leveldbLabelNameFingerprintIndex) Has(l clientmodel.LabelName) (ok bool, err error) { + return i.p.Has(&dto.LabelName{ + Name: proto.String(string(l)), + }) +} + +func (i *leveldbLabelNameFingerprintIndex) Close() error { + i.p.Close() + + return nil +} + +type LevelDBLabelNameFingerprintIndexOptions struct { + leveldb.LevelDBOptions +} + +func NewLevelLabelNameFingerprintIndex(o *LevelDBLabelNameFingerprintIndexOptions) (LabelNameFingerprintIndex, error) { + s, err := leveldb.NewLevelDBPersistence(&o.LevelDBOptions) + if err != nil { + return nil, err + } + + return &leveldbLabelNameFingerprintIndex{ + p: s, + }, nil +} + +type LabelSetFingerprintMapping map[LabelPair]clientmodel.Fingerprints + +type LabelSetFingerprintIndex interface { + raw.ForEacher + + IndexBatch(LabelSetFingerprintMapping) error + Lookup(*LabelPair) (m clientmodel.Fingerprints, ok bool, err error) + Has(*LabelPair) (ok bool, err error) + Close() error +} + +type leveldbLabelSetFingerprintIndex struct { + p *leveldb.LevelDBPersistence +} + +type LevelDBLabelSetFingerprintIndexOptions struct { + leveldb.LevelDBOptions +} + +func (i *leveldbLabelSetFingerprintIndex) IndexBatch(m LabelSetFingerprintMapping) error { + batch := leveldb.NewBatch() + defer batch.Close() + + for pair, fps := range m { + sort.Sort(fps) + + key := &dto.LabelPair{ + Name: proto.String(string(pair.Name)), + Value: proto.String(string(pair.Value)), + } + value := new(dto.FingerprintCollection) + for _, fp := range fps { + f := new(dto.Fingerprint) + dumpFingerprint(f, fp) + value.Member = append(value.Member, f) + } + + batch.Put(key, value) + } + + return i.p.Commit(batch) +} + +func (i *leveldbLabelSetFingerprintIndex) Lookup(p *LabelPair) (m clientmodel.Fingerprints, ok bool, err error) { + k := &dto.LabelPair{ + Name: proto.String(string(p.Name)), + Value: proto.String(string(p.Value)), + } + v := new(dto.FingerprintCollection) + + ok, err = i.p.Get(k, v) + + if !ok { + return nil, false, nil + } + if err != nil { + return nil, false, err + } + + for _, pair := range v.Member { + fp := new(clientmodel.Fingerprint) + loadFingerprint(fp, pair) + m = append(m, fp) + } + + return m, true, nil +} + +func (i *leveldbLabelSetFingerprintIndex) Has(p *LabelPair) (ok bool, err error) { + k := &dto.LabelPair{ + Name: proto.String(string(p.Name)), + Value: proto.String(string(p.Value)), + } + + return i.p.Has(k) +} + +func (i *leveldbLabelSetFingerprintIndex) ForEach(d storage.RecordDecoder, f storage.RecordFilter, o storage.RecordOperator) (bool, error) { + return i.p.ForEach(d, f, o) +} + +func (i *leveldbLabelSetFingerprintIndex) Close() error { + i.p.Close() + return nil +} + +func NewLevelDBLabelSetFingerprintIndex(o *LevelDBLabelSetFingerprintIndexOptions) (LabelSetFingerprintIndex, error) { + s, err := leveldb.NewLevelDBPersistence(&o.LevelDBOptions) + if err != nil { + return nil, err + } + + return &leveldbLabelSetFingerprintIndex{ + p: s, + }, nil +} + +type MetricMembershipIndex interface { + IndexBatch([]clientmodel.Metric) error + Has(clientmodel.Metric) (ok bool, err error) + Close() error +} + +type leveldbMetricMembershipIndex struct { + p *leveldb.LevelDBPersistence +} + +var existenceIdentity = new(dto.MembershipIndexValue) + +func (i *leveldbMetricMembershipIndex) IndexBatch(ms []clientmodel.Metric) error { + batch := leveldb.NewBatch() + defer batch.Close() + + for _, m := range ms { + k := new(dto.Metric) + dumpMetric(k, m) + batch.Put(k, existenceIdentity) + } + + return i.p.Commit(batch) +} + +func (i *leveldbMetricMembershipIndex) Has(m clientmodel.Metric) (ok bool, err error) { + k := new(dto.Metric) + dumpMetric(k, m) + + return i.p.Has(k) +} + +func (i *leveldbMetricMembershipIndex) Close() error { + i.p.Close() + + return nil +} + +type LevelDBMetricMembershipIndexOptions struct { + leveldb.LevelDBOptions +} + +func NewLevelDBMetricMembershipIndex(o *LevelDBMetricMembershipIndexOptions) (MetricMembershipIndex, error) { + s, err := leveldb.NewLevelDBPersistence(&o.LevelDBOptions) + if err != nil { + return nil, err + } + + return &leveldbMetricMembershipIndex{ + p: s, + }, nil +} diff --git a/storage/metric/leveldb.go b/storage/metric/leveldb.go index b251279c6..9fbab7ba8 100644 --- a/storage/metric/leveldb.go +++ b/storage/metric/leveldb.go @@ -26,7 +26,6 @@ import ( clientmodel "github.com/prometheus/client_golang/model" dto "github.com/prometheus/prometheus/model/generated" - index "github.com/prometheus/prometheus/storage/raw/index/leveldb" "github.com/prometheus/prometheus/storage" "github.com/prometheus/prometheus/storage/raw/leveldb" @@ -37,11 +36,11 @@ const sortConcurrency = 2 type LevelDBMetricPersistence struct { CurationRemarks *leveldb.LevelDBPersistence - fingerprintToMetrics *leveldb.LevelDBPersistence - labelNameToFingerprints *leveldb.LevelDBPersistence - labelSetToFingerprints *leveldb.LevelDBPersistence + fingerprintToMetrics FingerprintMetricIndex + labelNameToFingerprints LabelNameFingerprintIndex + labelSetToFingerprints LabelSetFingerprintIndex MetricHighWatermarks *leveldb.LevelDBPersistence - metricMembershipIndex *index.LevelDBMembershipIndex + metricMembershipIndex MetricMembershipIndex MetricSamples *leveldb.LevelDBPersistence } @@ -60,12 +59,15 @@ var ( ) type leveldbOpener func() -type leveldbCloser interface { +type errorCloser interface { + Close() error +} +type closer interface { Close() } func (l *LevelDBMetricPersistence) Close() { - var persistences = []leveldbCloser{ + var persistences = []interface{}{ l.CurationRemarks, l.fingerprintToMetrics, l.labelNameToFingerprints, @@ -77,14 +79,21 @@ func (l *LevelDBMetricPersistence) Close() { closerGroup := sync.WaitGroup{} - for _, closer := range persistences { + for _, c := range persistences { closerGroup.Add(1) - go func(closer leveldbCloser) { - if closer != nil { - closer.Close() + go func(c interface{}) { + if c != nil { + switch closer := c.(type) { + case closer: + closer.Close() + case errorCloser: + if err := closer.Close(); err != nil { + log.Println("anomaly closing", err) + } + } } closerGroup.Done() - }(closer) + }(c) } closerGroup.Wait() @@ -103,11 +112,12 @@ func NewLevelDBMetricPersistence(baseDirectory string) (*LevelDBMetricPersistenc "Label Names and Value Pairs by Fingerprint", func() { var err error - o := &leveldb.LevelDBOptions{ - Path: baseDirectory + "/label_name_and_value_pairs_by_fingerprint", - CacheSizeBytes: *fingerprintsToLabelPairCacheSize, - } - emission.fingerprintToMetrics, err = leveldb.NewLevelDBPersistence(o) + emission.fingerprintToMetrics, err = NewLevelDBFingerprintMetricIndex(&LevelDBFingerprintMetricIndexOptions{ + LevelDBOptions: leveldb.LevelDBOptions{ + Path: baseDirectory + "/label_name_and_value_pairs_by_fingerprint", + CacheSizeBytes: *fingerprintsToLabelPairCacheSize, + }, + }) workers.MayFail(err) }, }, @@ -139,11 +149,12 @@ func NewLevelDBMetricPersistence(baseDirectory string) (*LevelDBMetricPersistenc "Fingerprints by Label Name", func() { var err error - o := &leveldb.LevelDBOptions{ - Path: baseDirectory + "/fingerprints_by_label_name", - CacheSizeBytes: *labelNameToFingerprintsCacheSize, - } - emission.labelNameToFingerprints, err = leveldb.NewLevelDBPersistence(o) + emission.labelNameToFingerprints, err = NewLevelLabelNameFingerprintIndex(&LevelDBLabelNameFingerprintIndexOptions{ + LevelDBOptions: leveldb.LevelDBOptions{ + Path: baseDirectory + "/fingerprints_by_label_name", + CacheSizeBytes: *labelNameToFingerprintsCacheSize, + }, + }) workers.MayFail(err) }, }, @@ -151,11 +162,12 @@ func NewLevelDBMetricPersistence(baseDirectory string) (*LevelDBMetricPersistenc "Fingerprints by Label Name and Value Pair", func() { var err error - o := &leveldb.LevelDBOptions{ - Path: baseDirectory + "/fingerprints_by_label_name_and_value_pair", - CacheSizeBytes: *labelPairToFingerprintsCacheSize, - } - emission.labelSetToFingerprints, err = leveldb.NewLevelDBPersistence(o) + emission.labelSetToFingerprints, err = NewLevelDBLabelSetFingerprintIndex(&LevelDBLabelSetFingerprintIndexOptions{ + LevelDBOptions: leveldb.LevelDBOptions{ + Path: baseDirectory + "/fingerprints_by_label_name_and_value_pair", + CacheSizeBytes: *labelPairToFingerprintsCacheSize, + }, + }) workers.MayFail(err) }, }, @@ -163,13 +175,13 @@ func NewLevelDBMetricPersistence(baseDirectory string) (*LevelDBMetricPersistenc "Metric Membership Index", func() { var err error - o := &index.LevelDBIndexOptions{ - LevelDBOptions: leveldb.LevelDBOptions{ - Path: baseDirectory + "/metric_membership_index", - CacheSizeBytes: *metricMembershipIndexCacheSize, - }, - } - emission.metricMembershipIndex, err = index.NewLevelDBMembershipIndex(o) + emission.metricMembershipIndex, err = NewLevelDBMetricMembershipIndex( + &LevelDBMetricMembershipIndexOptions{ + LevelDBOptions: leveldb.LevelDBOptions{ + Path: baseDirectory + "/metric_membership_index", + CacheSizeBytes: *metricMembershipIndexCacheSize, + }, + }) workers.MayFail(err) }, }, @@ -252,19 +264,16 @@ func groupByFingerprint(samples clientmodel.Samples) map[clientmodel.Fingerprint // findUnindexedMetrics scours the metric membership index for each given Metric // in the keyspace and returns a map of Fingerprint-Metric pairs that are // absent. -func (l *LevelDBMetricPersistence) findUnindexedMetrics(candidates map[clientmodel.Fingerprint]clientmodel.Metric) (unindexed map[clientmodel.Fingerprint]clientmodel.Metric, err error) { +func (l *LevelDBMetricPersistence) findUnindexedMetrics(candidates map[clientmodel.Fingerprint]clientmodel.Metric) (unindexed FingerprintMetricMapping, err error) { defer func(begin time.Time) { duration := time.Since(begin) recordOutcome(duration, err, map[string]string{operation: findUnindexedMetrics, result: success}, map[string]string{operation: findUnindexedMetrics, result: failure}) }(time.Now()) - unindexed = make(map[clientmodel.Fingerprint]clientmodel.Metric) - - dto := &dto.Metric{} + unindexed = FingerprintMetricMapping{} for fingerprint, metric := range candidates { - dumpMetric(dto, metric) - indexHas, err := l.hasIndexMetric(dto) + indexHas, err := l.hasIndexMetric(metric) if err != nil { return unindexed, err } @@ -281,67 +290,47 @@ func (l *LevelDBMetricPersistence) findUnindexedMetrics(candidates map[clientmod // the index to reflect the new state. // // This operation is idempotent. -func (l *LevelDBMetricPersistence) indexLabelNames(metrics map[clientmodel.Fingerprint]clientmodel.Metric) (err error) { +func (l *LevelDBMetricPersistence) indexLabelNames(metrics FingerprintMetricMapping) (err error) { defer func(begin time.Time) { duration := time.Since(begin) recordOutcome(duration, err, map[string]string{operation: indexLabelNames, result: success}, map[string]string{operation: indexLabelNames, result: failure}) }(time.Now()) - labelNameFingerprints := map[clientmodel.LabelName]utility.Set{} + retrieved := map[clientmodel.LabelName]utility.Set{} for fingerprint, metric := range metrics { for labelName := range metric { - fingerprintSet, ok := labelNameFingerprints[labelName] + fingerprintSet, ok := retrieved[labelName] if !ok { - fingerprintSet = utility.Set{} - fingerprints, err := l.GetFingerprintsForLabelName(labelName) if err != nil { return err } + fingerprintSet = utility.Set{} + retrieved[labelName] = fingerprintSet + for _, fingerprint := range fingerprints { fingerprintSet.Add(*fingerprint) } } fingerprintSet.Add(fingerprint) - labelNameFingerprints[labelName] = fingerprintSet } } - batch := leveldb.NewBatch() - defer batch.Close() - - for labelName, fingerprintSet := range labelNameFingerprints { - fingerprints := clientmodel.Fingerprints{} - for e := range fingerprintSet { - fingerprint := e.(clientmodel.Fingerprint) - fingerprints = append(fingerprints, &fingerprint) + pending := LabelNameFingerprintMapping{} + for name, set := range retrieved { + fps := pending[name] + for fp := range set { + f := fp.(clientmodel.Fingerprint) + fps = append(fps, &f) } - - sort.Sort(fingerprints) - - key := &dto.LabelName{ - Name: proto.String(string(labelName)), - } - value := new(dto.FingerprintCollection) - for _, fingerprint := range fingerprints { - f := new(dto.Fingerprint) - dumpFingerprint(f, fingerprint) - value.Member = append(value.Member, f) - } - - batch.Put(key, value) + pending[name] = fps } - err = l.labelNameToFingerprints.Commit(batch) - if err != nil { - return - } - - return + return l.labelNameToFingerprints.IndexBatch(pending) } // indexLabelPairs accumulates all label pair to fingerprint index entries for @@ -356,7 +345,7 @@ func (l *LevelDBMetricPersistence) indexLabelPairs(metrics map[clientmodel.Finge recordOutcome(duration, err, map[string]string{operation: indexLabelPairs, result: success}, map[string]string{operation: indexLabelPairs, result: failure}) }(time.Now()) - labelPairFingerprints := map[LabelPair]utility.Set{} + collection := map[LabelPair]utility.Set{} for fingerprint, metric := range metrics { for labelName, labelValue := range metric { @@ -364,113 +353,69 @@ func (l *LevelDBMetricPersistence) indexLabelPairs(metrics map[clientmodel.Finge Name: labelName, Value: labelValue, } - fingerprintSet, ok := labelPairFingerprints[labelPair] + fingerprintSet, ok := collection[labelPair] if !ok { - fingerprintSet = utility.Set{} - - fingerprints, err := l.GetFingerprintsForLabelSet(clientmodel.LabelSet{ - labelName: labelValue, - }) + fingerprints, _, err := l.labelSetToFingerprints.Lookup(&labelPair) if err != nil { return err } + fingerprintSet = utility.Set{} for _, fingerprint := range fingerprints { fingerprintSet.Add(*fingerprint) } + + collection[labelPair] = fingerprintSet } fingerprintSet.Add(fingerprint) - labelPairFingerprints[labelPair] = fingerprintSet } } - batch := leveldb.NewBatch() - defer batch.Close() + batch := LabelSetFingerprintMapping{} - for labelPair, fingerprintSet := range labelPairFingerprints { - fingerprints := clientmodel.Fingerprints{} - for e := range fingerprintSet { - fingerprint := e.(clientmodel.Fingerprint) - fingerprints = append(fingerprints, &fingerprint) + for pair, elements := range collection { + fps := batch[pair] + for element := range elements { + fp := element.(clientmodel.Fingerprint) + fps = append(fps, &fp) } - - sort.Sort(fingerprints) - - key := &dto.LabelPair{ - Name: proto.String(string(labelPair.Name)), - Value: proto.String(string(labelPair.Value)), - } - value := new(dto.FingerprintCollection) - for _, fingerprint := range fingerprints { - f := new(dto.Fingerprint) - dumpFingerprint(f, fingerprint) - value.Member = append(value.Member, f) - } - - batch.Put(key, value) + batch[pair] = fps } - err = l.labelSetToFingerprints.Commit(batch) - if err != nil { - return - } - - return + return l.labelSetToFingerprints.IndexBatch(batch) } // indexFingerprints updates all of the Fingerprint to Metric reverse lookups // in the index and then bulk updates. // // This operation is idempotent. -func (l *LevelDBMetricPersistence) indexFingerprints(metrics map[clientmodel.Fingerprint]clientmodel.Metric) (err error) { +func (l *LevelDBMetricPersistence) indexFingerprints(b FingerprintMetricMapping) (err error) { defer func(begin time.Time) { duration := time.Since(begin) recordOutcome(duration, err, map[string]string{operation: indexFingerprints, result: success}, map[string]string{operation: indexFingerprints, result: failure}) }(time.Now()) - batch := leveldb.NewBatch() - defer batch.Close() - - for fingerprint, metric := range metrics { - f := new(dto.Fingerprint) - dumpFingerprint(f, &fingerprint) - m := &dto.Metric{} - dumpMetric(m, metric) - batch.Put(f, m) - } - - err = l.fingerprintToMetrics.Commit(batch) - if err != nil { - return - } - - return + return l.fingerprintToMetrics.IndexBatch(b) } -var existenceIdentity = &dto.MembershipIndexValue{} - // indexMetrics takes groups of samples, determines which ones contain metrics // that are unknown to the storage stack, and then proceeds to update all // affected indices. -func (l *LevelDBMetricPersistence) indexMetrics(fingerprints map[clientmodel.Fingerprint]clientmodel.Metric) (err error) { +func (l *LevelDBMetricPersistence) indexMetrics(fingerprints FingerprintMetricMapping) (err error) { defer func(begin time.Time) { duration := time.Since(begin) recordOutcome(duration, err, map[string]string{operation: indexMetrics, result: success}, map[string]string{operation: indexMetrics, result: failure}) }(time.Now()) - var ( - absentMetrics map[clientmodel.Fingerprint]clientmodel.Metric - ) - - absentMetrics, err = l.findUnindexedMetrics(fingerprints) + absentees, err := l.findUnindexedMetrics(fingerprints) if err != nil { return } - if len(absentMetrics) == 0 { + if len(absentees) == 0 { return } @@ -479,42 +424,32 @@ func (l *LevelDBMetricPersistence) indexMetrics(fingerprints map[clientmodel.Fin workers := utility.NewUncertaintyGroup(3) go func() { - workers.MayFail(l.indexLabelNames(absentMetrics)) + workers.MayFail(l.indexLabelNames(absentees)) }() go func() { - workers.MayFail(l.indexLabelPairs(absentMetrics)) + workers.MayFail(l.indexLabelPairs(absentees)) }() go func() { - workers.MayFail(l.indexFingerprints(absentMetrics)) + workers.MayFail(l.indexFingerprints(absentees)) }() - if !workers.Wait() { - return fmt.Errorf("Could not index due to %s", workers.Errors()) - } - // If any of the preceding operations failed, we will have inconsistent // indices. Thusly, the Metric membership index should NOT be updated, as // its state is used to determine whether to bulk update the other indices. // Given that those operations are idempotent, it is OK to repeat them; // however, it will consume considerable amounts of time. - batch := leveldb.NewBatch() - defer batch.Close() - - for _, metric := range absentMetrics { - m := &dto.Metric{} - dumpMetric(m, metric) - batch.Put(m, existenceIdentity) + if !workers.Wait() { + return fmt.Errorf("Could not index due to %s", workers.Errors()) } - err = l.metricMembershipIndex.Commit(batch) - if err != nil { - // Not critical but undesirable. - log.Println(err) + ms := []clientmodel.Metric{} + for _, m := range absentees { + ms = append(ms, m) } - return + return l.metricMembershipIndex.IndexBatch(ms) } func (l *LevelDBMetricPersistence) refreshHighWatermarks(groups map[clientmodel.Fingerprint]clientmodel.Samples) (err error) { @@ -573,7 +508,7 @@ func (l *LevelDBMetricPersistence) AppendSamples(samples clientmodel.Samples) (e watermarkErrChan := make(chan error, 1) go func(groups map[clientmodel.Fingerprint]clientmodel.Samples) { - metrics := map[clientmodel.Fingerprint]clientmodel.Metric{} + metrics := FingerprintMetricMapping{} for fingerprint, samples := range groups { metrics[fingerprint] = samples[0].Metric @@ -667,38 +602,34 @@ func extractSampleValues(i leveldb.Iterator) (Values, error) { return NewValuesFromDTO(v), nil } -func (l *LevelDBMetricPersistence) hasIndexMetric(dto *dto.Metric) (value bool, err error) { +func (l *LevelDBMetricPersistence) hasIndexMetric(m clientmodel.Metric) (value bool, err error) { defer func(begin time.Time) { duration := time.Since(begin) recordOutcome(duration, err, map[string]string{operation: hasIndexMetric, result: success}, map[string]string{operation: hasIndexMetric, result: failure}) }(time.Now()) - value, err = l.metricMembershipIndex.Has(dto) - - return + return l.metricMembershipIndex.Has(m) } -func (l *LevelDBMetricPersistence) HasLabelPair(dto *dto.LabelPair) (value bool, err error) { +func (l *LevelDBMetricPersistence) HasLabelPair(p *LabelPair) (value bool, err error) { defer func(begin time.Time) { duration := time.Since(begin) recordOutcome(duration, err, map[string]string{operation: hasLabelPair, result: success}, map[string]string{operation: hasLabelPair, result: failure}) }(time.Now()) - value, err = l.labelSetToFingerprints.Has(dto) - - return + return l.labelSetToFingerprints.Has(p) } -func (l *LevelDBMetricPersistence) HasLabelName(dto *dto.LabelName) (value bool, err error) { +func (l *LevelDBMetricPersistence) HasLabelName(n clientmodel.LabelName) (value bool, err error) { defer func(begin time.Time) { duration := time.Since(begin) recordOutcome(duration, err, map[string]string{operation: hasLabelName, result: success}, map[string]string{operation: hasLabelName, result: failure}) }(time.Now()) - value, err = l.labelNameToFingerprints.Has(dto) + value, err = l.labelNameToFingerprints.Has(n) return } @@ -711,29 +642,19 @@ func (l *LevelDBMetricPersistence) GetFingerprintsForLabelSet(labelSet clientmod }(time.Now()) sets := []utility.Set{} - pair := &dto.LabelPair{} - unmarshaled := new(dto.FingerprintCollection) for name, value := range labelSet { - pair.Reset() - unmarshaled.Reset() - - pair.Name = proto.String(string(name)) - pair.Value = proto.String(string(value)) - - present, err := l.labelSetToFingerprints.Get(pair, unmarshaled) + fps, _, err := l.labelSetToFingerprints.Lookup(&LabelPair{ + Name: name, + Value: value, + }) if err != nil { - return fps, err - } - if !present { - return nil, nil + return nil, err } set := utility.Set{} - for _, m := range unmarshaled.Member { - fp := &clientmodel.Fingerprint{} - loadFingerprint(fp, m) + for _, fp := range fps { set.Add(*fp) } @@ -764,24 +685,10 @@ func (l *LevelDBMetricPersistence) GetFingerprintsForLabelName(labelName clientm recordOutcome(duration, err, map[string]string{operation: getFingerprintsForLabelName, result: success}, map[string]string{operation: getFingerprintsForLabelName, result: failure}) }(time.Now()) - unmarshaled := new(dto.FingerprintCollection) - d := &dto.LabelName{} - dumpLabelName(d, labelName) - present, err := l.labelNameToFingerprints.Get(d, unmarshaled) - if err != nil { - return nil, err - } - if !present { - return nil, nil - } + // TODO(matt): Update signature to work with ok. + fps, _, err = l.labelNameToFingerprints.Lookup(labelName) - for _, m := range unmarshaled.Member { - fp := &clientmodel.Fingerprint{} - loadFingerprint(fp, m) - fps = append(fps, fp) - } - - return fps, nil + return fps, err } func (l *LevelDBMetricPersistence) GetMetricForFingerprint(f *clientmodel.Fingerprint) (m clientmodel.Metric, err error) { @@ -791,22 +698,8 @@ func (l *LevelDBMetricPersistence) GetMetricForFingerprint(f *clientmodel.Finger recordOutcome(duration, err, map[string]string{operation: getMetricForFingerprint, result: success}, map[string]string{operation: getMetricForFingerprint, result: failure}) }(time.Now()) - unmarshaled := &dto.Metric{} - d := new(dto.Fingerprint) - dumpFingerprint(d, f) - present, err := l.fingerprintToMetrics.Get(d, unmarshaled) - if err != nil { - return nil, err - } - if !present { - return nil, nil - } - - m = clientmodel.Metric{} - - for _, v := range unmarshaled.LabelPair { - m[clientmodel.LabelName(v.GetName())] = clientmodel.LabelValue(v.GetValue()) - } + // TODO(matt): Update signature to work with ok. + m, _, err = l.fingerprintToMetrics.Lookup(f) return m, nil } @@ -887,11 +780,11 @@ func (l *LevelDBMetricPersistence) GetAllValuesForLabel(labelName clientmodel.La // server due to latency implications. func (l *LevelDBMetricPersistence) CompactKeyspaces() { l.CurationRemarks.CompactKeyspace() - l.fingerprintToMetrics.CompactKeyspace() - l.labelNameToFingerprints.CompactKeyspace() - l.labelSetToFingerprints.CompactKeyspace() + // l.fingerprintToMetrics.CompactKeyspace() + // l.labelNameToFingerprints.CompactKeyspace() + // l.labelSetToFingerprints.CompactKeyspace() l.MetricHighWatermarks.CompactKeyspace() - l.metricMembershipIndex.CompactKeyspace() + // l.metricMembershipIndex.CompactKeyspace() l.MetricSamples.CompactKeyspace() } @@ -903,30 +796,30 @@ func (l *LevelDBMetricPersistence) ApproximateSizes() (total uint64, err error) } total += size - if size, err = l.fingerprintToMetrics.ApproximateSize(); err != nil { - return 0, err - } - total += size + // if size, err = l.fingerprintToMetrics.ApproximateSize(); err != nil { + // return 0, err + // } + // total += size - if size, err = l.labelNameToFingerprints.ApproximateSize(); err != nil { - return 0, err - } - total += size + // if size, err = l.labelNameToFingerprints.ApproximateSize(); err != nil { + // return 0, err + // } + // total += size - if size, err = l.labelSetToFingerprints.ApproximateSize(); err != nil { - return 0, err - } - total += size + // if size, err = l.labelSetToFingerprints.ApproximateSize(); err != nil { + // return 0, err + // } + // total += size if size, err = l.MetricHighWatermarks.ApproximateSize(); err != nil { return 0, err } total += size - if size, err = l.metricMembershipIndex.ApproximateSize(); err != nil { - return 0, err - } - total += size + // if size, err = l.metricMembershipIndex.ApproximateSize(); err != nil { + // return 0, err + // } + // total += size if size, err = l.MetricSamples.ApproximateSize(); err != nil { return 0, err @@ -944,30 +837,30 @@ func (l *LevelDBMetricPersistence) States() []leveldb.DatabaseState { state.Type = "Watermark" states = append(states, state) - state = l.fingerprintToMetrics.State() - state.Name = "Fingerprints to Metrics" - state.Type = "Index" - states = append(states, state) + // state = l.fingerprintToMetrics.State() + // state.Name = "Fingerprints to Metrics" + // state.Type = "Index" + // states = append(states, state) - state = l.labelNameToFingerprints.State() - state.Name = "Label Name to Fingerprints" - state.Type = "Inverted Index" - states = append(states, state) + // state = l.labelNameToFingerprints.State() + // state.Name = "Label Name to Fingerprints" + // state.Type = "Inverted Index" + // states = append(states, state) - state = l.labelSetToFingerprints.State() - state.Name = "Label Pair to Fingerprints" - state.Type = "Inverted Index" - states = append(states, state) + // state = l.labelSetToFingerprints.State() + // state.Name = "Label Pair to Fingerprints" + // state.Type = "Inverted Index" + // states = append(states, state) state = l.MetricHighWatermarks.State() state.Name = "Metric Last Write" state.Type = "Watermark" states = append(states, state) - state = l.metricMembershipIndex.State() - state.Name = "Metric Membership" - state.Type = "Index" - states = append(states, state) + // state = l.metricMembershipIndex.State() + // state.Name = "Metric Membership" + // state.Type = "Index" + // states = append(states, state) state = l.MetricSamples.State() state.Name = "Samples" diff --git a/storage/raw/interface.go b/storage/raw/interface.go index c369724b8..be4cf695b 100644 --- a/storage/raw/interface.go +++ b/storage/raw/interface.go @@ -19,9 +19,23 @@ import ( "github.com/prometheus/prometheus/storage" ) +type ForEacher interface { + // ForEach is responsible for iterating through all records in the database + // until one of the following conditions are met: + // + // 1.) A system anomaly in the database scan. + // 2.) The last record in the database is reached. + // 3.) A FilterResult of STOP is emitted by the Filter. + // + // Decoding errors for an entity cause that entity to be skipped. + ForEach(storage.RecordDecoder, storage.RecordFilter, storage.RecordOperator) (scannedEntireCorpus bool, err error) +} + // Persistence models a key-value store for bytes that supports various // additional operations. type Persistence interface { + ForEacher + // Close reaps all of the underlying system resources associated with this // persistence. Close() @@ -34,15 +48,6 @@ type Persistence interface { Drop(key proto.Message) error // Put sets the key to a given value. Put(key, value proto.Message) error - // ForEach is responsible for iterating through all records in the database - // until one of the following conditions are met: - // - // 1.) A system anomaly in the database scan. - // 2.) The last record in the database is reached. - // 3.) A FilterResult of STOP is emitted by the Filter. - // - // Decoding errors for an entity cause that entity to be skipped. - ForEach(storage.RecordDecoder, storage.RecordFilter, storage.RecordOperator) (scannedEntireCorpus bool, err error) // Commit applies the Batch operations to the database. Commit(Batch) error }