diff --git a/Makefile b/Makefile index e89a95b659e203af8e76499c8e37d6f22c2ee37c..206267221c3ea621d942f393c48ab95af8fe4a6d 100644 --- a/Makefile +++ b/Makefile @@ -88,9 +88,8 @@ pkg/logproto/logproto.pb.go: pkg/logproto/logproto.proto vendor/github.com/cortexproject/cortex/pkg/ring/ring.pb.go: vendor/github.com/cortexproject/cortex/pkg/ring/ring.proto vendor/github.com/cortexproject/cortex/pkg/ingester/client/cortex.pb.go: vendor/github.com/cortexproject/cortex/pkg/ingester/client/cortex.proto vendor/github.com/cortexproject/cortex/pkg/chunk/storage/caching_index_client.pb.go: vendor/github.com/cortexproject/cortex/pkg/chunk/storage/caching_index_client.proto -pkg/parser/labels.go: pkg/parser/labels.y -pkg/parser/matchers.go: pkg/parser/matchers.y pkg/promtail/server/server.go: assets +pkg/logql/expr.go: pkg/logql/expr.y all: $(UPTODATE_FILES) test: $(PROTO_GOS) $(YACC_GOS) debug: $(DEBUG_UPTODATE_FILES) diff --git a/pkg/ingester/instance.go b/pkg/ingester/instance.go index 37584db94861c43f24d497ad57f0db9b72a1ddc7..811787f854af2c67c1c70fa5553e9369fa09d950 100644 --- a/pkg/ingester/instance.go +++ b/pkg/ingester/instance.go @@ -9,6 +9,7 @@ import ( "github.com/prometheus/client_golang/prometheus/promauto" "github.com/prometheus/common/model" "github.com/prometheus/prometheus/pkg/labels" + "github.com/prometheus/prometheus/promql" "github.com/cortexproject/cortex/pkg/ingester/client" "github.com/cortexproject/cortex/pkg/ingester/index" @@ -16,7 +17,6 @@ import ( "github.com/grafana/loki/pkg/helpers" "github.com/grafana/loki/pkg/iter" "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/parser" "github.com/grafana/loki/pkg/querier" "github.com/grafana/loki/pkg/util" ) @@ -98,7 +98,7 @@ func (i *instance) Push(ctx context.Context, req *logproto.PushRequest) error { } func (i *instance) Query(req *logproto.QueryRequest, queryServer logproto.Querier_QueryServer) error { - matchers, err := parser.Matchers(req.Query) + matchers, err := promql.ParseMetricSelector(req.Query) if err != nil { return err } diff --git a/pkg/logproto/logproto.pb.go b/pkg/logproto/logproto.pb.go index 721481068a49ef810e1850dfaea3ea6207176791..c7598067f04940ac6ce0ffb6fe60c84fc4973828 100644 --- a/pkg/logproto/logproto.pb.go +++ b/pkg/logproto/logproto.pb.go @@ -6,18 +6,17 @@ package logproto import ( context "context" fmt "fmt" + _ "github.com/gogo/protobuf/gogoproto" + proto "github.com/gogo/protobuf/proto" + _ "github.com/gogo/protobuf/types" + github_com_gogo_protobuf_types "github.com/gogo/protobuf/types" + grpc "google.golang.org/grpc" io "io" math "math" reflect "reflect" strconv "strconv" strings "strings" time "time" - - _ "github.com/gogo/protobuf/gogoproto" - proto "github.com/gogo/protobuf/proto" - _ "github.com/gogo/protobuf/types" - github_com_gogo_protobuf_types "github.com/gogo/protobuf/types" - grpc "google.golang.org/grpc" ) // Reference imports to suppress errors if they are not otherwise used. diff --git a/pkg/logql/expr.go b/pkg/logql/expr.go new file mode 100644 index 0000000000000000000000000000000000000000..e130b9dd704d4ee4e2d8ea74db3992f9506731fe --- /dev/null +++ b/pkg/logql/expr.go @@ -0,0 +1,552 @@ +// Code generated by goyacc -p expr -o pkg/logql/expr.go pkg/logql/expr.y. DO NOT EDIT. + +//line pkg/logql/expr.y:2 +package logql + +import __yyfmt__ "fmt" + +//line pkg/logql/expr.y:2 + +import ( + "github.com/prometheus/prometheus/pkg/labels" +) + +//line pkg/logql/expr.y:9 +type exprSymType struct { + yys int + Expr Expr + Matchers []*labels.Matcher + Matcher *labels.Matcher + str string + int int64 + Identifier string +} + +const IDENTIFIER = 57346 +const STRING = 57347 +const MATCHERS = 57348 +const LABELS = 57349 +const EQ = 57350 +const NEQ = 57351 +const RE = 57352 +const NRE = 57353 +const OPEN_BRACE = 57354 +const CLOSE_BRACE = 57355 +const COMMA = 57356 +const DOT = 57357 +const PIPE_MATCH = 57358 +const PIPE_EXACT = 57359 + +var exprToknames = [...]string{ + "$end", + "error", + "$unk", + "IDENTIFIER", + "STRING", + "MATCHERS", + "LABELS", + "EQ", + "NEQ", + "RE", + "NRE", + "OPEN_BRACE", + "CLOSE_BRACE", + "COMMA", + "DOT", + "PIPE_MATCH", + "PIPE_EXACT", +} +var exprStatenames = [...]string{} + +const exprEofCode = 1 +const exprErrCode = 2 +const exprInitialStackSize = 16 + +//line pkg/logql/expr.y:56 + +//line yacctab:1 +var exprExca = [...]int{ + -1, 1, + 1, -1, + -2, 0, +} + +const exprPrivate = 57344 + +const exprLast = 28 + +var exprAct = [...]int{ + + 7, 9, 6, 18, 19, 20, 21, 4, 5, 3, + 22, 16, 17, 27, 26, 25, 24, 15, 14, 23, + 13, 12, 28, 11, 10, 8, 2, 1, +} +var exprPact = [...]int{ + + -3, -1000, -9, 19, 16, 15, 13, 12, -2, -1000, + -5, -1000, -1000, -1000, -1000, -1000, -1000, 19, 11, 10, + 9, 8, 18, -1000, -1000, -1000, -1000, -1000, -1000, +} +var exprPgo = [...]int{ + + 0, 27, 26, 25, 1, 24, +} +var exprR1 = [...]int{ + + 0, 1, 2, 2, 2, 2, 2, 3, 3, 4, + 4, 4, 4, 5, 5, +} +var exprR2 = [...]int{ + + 0, 1, 3, 3, 3, 3, 3, 1, 3, 3, + 3, 3, 3, 1, 3, +} +var exprChk = [...]int{ + + -1000, -1, -2, 12, 16, 17, 11, 9, -3, -4, + -5, 4, 5, 5, 5, 5, 13, 14, 8, 9, + 10, 11, 15, -4, 5, 5, 5, 5, 4, +} +var exprDef = [...]int{ + + 0, -2, 1, 0, 0, 0, 0, 0, 0, 7, + 0, 13, 3, 4, 5, 6, 2, 0, 0, 0, + 0, 0, 0, 8, 9, 10, 11, 12, 14, +} +var exprTok1 = [...]int{ + + 1, +} +var exprTok2 = [...]int{ + + 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, + 12, 13, 14, 15, 16, 17, +} +var exprTok3 = [...]int{ + 0, +} + +var exprErrorMessages = [...]struct { + state int + token int + msg string +}{} + +//line yaccpar:1 + +/* parser for yacc output */ + +var ( + exprDebug = 0 + exprErrorVerbose = false +) + +type exprLexer interface { + Lex(lval *exprSymType) int + Error(s string) +} + +type exprParser interface { + Parse(exprLexer) int + Lookahead() int +} + +type exprParserImpl struct { + lval exprSymType + stack [exprInitialStackSize]exprSymType + char int +} + +func (p *exprParserImpl) Lookahead() int { + return p.char +} + +func exprNewParser() exprParser { + return &exprParserImpl{} +} + +const exprFlag = -1000 + +func exprTokname(c int) string { + if c >= 1 && c-1 < len(exprToknames) { + if exprToknames[c-1] != "" { + return exprToknames[c-1] + } + } + return __yyfmt__.Sprintf("tok-%v", c) +} + +func exprStatname(s int) string { + if s >= 0 && s < len(exprStatenames) { + if exprStatenames[s] != "" { + return exprStatenames[s] + } + } + return __yyfmt__.Sprintf("state-%v", s) +} + +func exprErrorMessage(state, lookAhead int) string { + const TOKSTART = 4 + + if !exprErrorVerbose { + return "syntax error" + } + + for _, e := range exprErrorMessages { + if e.state == state && e.token == lookAhead { + return "syntax error: " + e.msg + } + } + + res := "syntax error: unexpected " + exprTokname(lookAhead) + + // To match Bison, suggest at most four expected tokens. + expected := make([]int, 0, 4) + + // Look for shiftable tokens. + base := exprPact[state] + for tok := TOKSTART; tok-1 < len(exprToknames); tok++ { + if n := base + tok; n >= 0 && n < exprLast && exprChk[exprAct[n]] == tok { + if len(expected) == cap(expected) { + return res + } + expected = append(expected, tok) + } + } + + if exprDef[state] == -2 { + i := 0 + for exprExca[i] != -1 || exprExca[i+1] != state { + i += 2 + } + + // Look for tokens that we accept or reduce. + for i += 2; exprExca[i] >= 0; i += 2 { + tok := exprExca[i] + if tok < TOKSTART || exprExca[i+1] == 0 { + continue + } + if len(expected) == cap(expected) { + return res + } + expected = append(expected, tok) + } + + // If the default action is to accept or reduce, give up. + if exprExca[i+1] != 0 { + return res + } + } + + for i, tok := range expected { + if i == 0 { + res += ", expecting " + } else { + res += " or " + } + res += exprTokname(tok) + } + return res +} + +func exprlex1(lex exprLexer, lval *exprSymType) (char, token int) { + token = 0 + char = lex.Lex(lval) + if char <= 0 { + token = exprTok1[0] + goto out + } + if char < len(exprTok1) { + token = exprTok1[char] + goto out + } + if char >= exprPrivate { + if char < exprPrivate+len(exprTok2) { + token = exprTok2[char-exprPrivate] + goto out + } + } + for i := 0; i < len(exprTok3); i += 2 { + token = exprTok3[i+0] + if token == char { + token = exprTok3[i+1] + goto out + } + } + +out: + if token == 0 { + token = exprTok2[1] /* unknown char */ + } + if exprDebug >= 3 { + __yyfmt__.Printf("lex %s(%d)\n", exprTokname(token), uint(char)) + } + return char, token +} + +func exprParse(exprlex exprLexer) int { + return exprNewParser().Parse(exprlex) +} + +func (exprrcvr *exprParserImpl) Parse(exprlex exprLexer) int { + var exprn int + var exprVAL exprSymType + var exprDollar []exprSymType + _ = exprDollar // silence set and not used + exprS := exprrcvr.stack[:] + + Nerrs := 0 /* number of errors */ + Errflag := 0 /* error recovery flag */ + exprstate := 0 + exprrcvr.char = -1 + exprtoken := -1 // exprrcvr.char translated into internal numbering + defer func() { + // Make sure we report no lookahead when not parsing. + exprstate = -1 + exprrcvr.char = -1 + exprtoken = -1 + }() + exprp := -1 + goto exprstack + +ret0: + return 0 + +ret1: + return 1 + +exprstack: + /* put a state and value onto the stack */ + if exprDebug >= 4 { + __yyfmt__.Printf("char %v in %v\n", exprTokname(exprtoken), exprStatname(exprstate)) + } + + exprp++ + if exprp >= len(exprS) { + nyys := make([]exprSymType, len(exprS)*2) + copy(nyys, exprS) + exprS = nyys + } + exprS[exprp] = exprVAL + exprS[exprp].yys = exprstate + +exprnewstate: + exprn = exprPact[exprstate] + if exprn <= exprFlag { + goto exprdefault /* simple state */ + } + if exprrcvr.char < 0 { + exprrcvr.char, exprtoken = exprlex1(exprlex, &exprrcvr.lval) + } + exprn += exprtoken + if exprn < 0 || exprn >= exprLast { + goto exprdefault + } + exprn = exprAct[exprn] + if exprChk[exprn] == exprtoken { /* valid shift */ + exprrcvr.char = -1 + exprtoken = -1 + exprVAL = exprrcvr.lval + exprstate = exprn + if Errflag > 0 { + Errflag-- + } + goto exprstack + } + +exprdefault: + /* default state action */ + exprn = exprDef[exprstate] + if exprn == -2 { + if exprrcvr.char < 0 { + exprrcvr.char, exprtoken = exprlex1(exprlex, &exprrcvr.lval) + } + + /* look through exception table */ + xi := 0 + for { + if exprExca[xi+0] == -1 && exprExca[xi+1] == exprstate { + break + } + xi += 2 + } + for xi += 2; ; xi += 2 { + exprn = exprExca[xi+0] + if exprn < 0 || exprn == exprtoken { + break + } + } + exprn = exprExca[xi+1] + if exprn < 0 { + goto ret0 + } + } + if exprn == 0 { + /* error ... attempt to resume parsing */ + switch Errflag { + case 0: /* brand new error */ + exprlex.Error(exprErrorMessage(exprstate, exprtoken)) + Nerrs++ + if exprDebug >= 1 { + __yyfmt__.Printf("%s", exprStatname(exprstate)) + __yyfmt__.Printf(" saw %s\n", exprTokname(exprtoken)) + } + fallthrough + + case 1, 2: /* incompletely recovered error ... try again */ + Errflag = 3 + + /* find a state where "error" is a legal shift action */ + for exprp >= 0 { + exprn = exprPact[exprS[exprp].yys] + exprErrCode + if exprn >= 0 && exprn < exprLast { + exprstate = exprAct[exprn] /* simulate a shift of "error" */ + if exprChk[exprstate] == exprErrCode { + goto exprstack + } + } + + /* the current p has no shift on "error", pop stack */ + if exprDebug >= 2 { + __yyfmt__.Printf("error recovery pops state %d\n", exprS[exprp].yys) + } + exprp-- + } + /* there is no state on the stack with an error shift ... abort */ + goto ret1 + + case 3: /* no shift yet; clobber input char */ + if exprDebug >= 2 { + __yyfmt__.Printf("error recovery discards %s\n", exprTokname(exprtoken)) + } + if exprtoken == exprEofCode { + goto ret1 + } + exprrcvr.char = -1 + exprtoken = -1 + goto exprnewstate /* try again in the same state */ + } + } + + /* reduction by production exprn */ + if exprDebug >= 2 { + __yyfmt__.Printf("reduce %v in:\n\t%v\n", exprn, exprStatname(exprstate)) + } + + exprnt := exprn + exprpt := exprp + _ = exprpt // guard against "declared and not used" + + exprp -= exprR2[exprn] + // exprp is now the index of $0. Perform the default action. Iff the + // reduced production is ε, $1 is possibly out of range. + if exprp+1 >= len(exprS) { + nyys := make([]exprSymType, len(exprS)*2) + copy(nyys, exprS) + exprS = nyys + } + exprVAL = exprS[exprp+1] + + /* consult goto table to find next state */ + exprn = exprR1[exprn] + exprg := exprPgo[exprn] + exprj := exprg + exprS[exprp].yys + 1 + + if exprj >= exprLast { + exprstate = exprAct[exprg] + } else { + exprstate = exprAct[exprj] + if exprChk[exprstate] != -exprn { + exprstate = exprAct[exprg] + } + } + // dummy call; replaced with literal code + switch exprnt { + + case 1: + exprDollar = exprS[exprpt-1 : exprpt+1] +//line pkg/logql/expr.y:30 + { + exprlex.(*lexer).expr = exprDollar[1].Expr + } + case 2: + exprDollar = exprS[exprpt-3 : exprpt+1] +//line pkg/logql/expr.y:33 + { + exprVAL.Expr = &matchersExpr{exprDollar[2].Matchers} + } + case 3: + exprDollar = exprS[exprpt-3 : exprpt+1] +//line pkg/logql/expr.y:34 + { + exprVAL.Expr = &matchExpr{exprDollar[1].Expr, labels.MatchRegexp, exprDollar[3].str} + } + case 4: + exprDollar = exprS[exprpt-3 : exprpt+1] +//line pkg/logql/expr.y:35 + { + exprVAL.Expr = &matchExpr{exprDollar[1].Expr, labels.MatchEqual, exprDollar[3].str} + } + case 5: + exprDollar = exprS[exprpt-3 : exprpt+1] +//line pkg/logql/expr.y:36 + { + exprVAL.Expr = &matchExpr{exprDollar[1].Expr, labels.MatchNotRegexp, exprDollar[3].str} + } + case 6: + exprDollar = exprS[exprpt-3 : exprpt+1] +//line pkg/logql/expr.y:37 + { + exprVAL.Expr = &matchExpr{exprDollar[1].Expr, labels.MatchNotEqual, exprDollar[3].str} + } + case 7: + exprDollar = exprS[exprpt-1 : exprpt+1] +//line pkg/logql/expr.y:41 + { + exprVAL.Matchers = []*labels.Matcher{exprDollar[1].Matcher} + } + case 8: + exprDollar = exprS[exprpt-3 : exprpt+1] +//line pkg/logql/expr.y:42 + { + exprVAL.Matchers = append(exprDollar[1].Matchers, exprDollar[3].Matcher) + } + case 9: + exprDollar = exprS[exprpt-3 : exprpt+1] +//line pkg/logql/expr.y:46 + { + exprVAL.Matcher = mustNewMatcher(labels.MatchEqual, exprDollar[1].Identifier, exprDollar[3].str) + } + case 10: + exprDollar = exprS[exprpt-3 : exprpt+1] +//line pkg/logql/expr.y:47 + { + exprVAL.Matcher = mustNewMatcher(labels.MatchNotEqual, exprDollar[1].Identifier, exprDollar[3].str) + } + case 11: + exprDollar = exprS[exprpt-3 : exprpt+1] +//line pkg/logql/expr.y:48 + { + exprVAL.Matcher = mustNewMatcher(labels.MatchRegexp, exprDollar[1].Identifier, exprDollar[3].str) + } + case 12: + exprDollar = exprS[exprpt-3 : exprpt+1] +//line pkg/logql/expr.y:49 + { + exprVAL.Matcher = mustNewMatcher(labels.MatchNotRegexp, exprDollar[1].Identifier, exprDollar[3].str) + } + case 13: + exprDollar = exprS[exprpt-1 : exprpt+1] +//line pkg/logql/expr.y:53 + { + exprVAL.Identifier = exprDollar[1].str + } + case 14: + exprDollar = exprS[exprpt-3 : exprpt+1] +//line pkg/logql/expr.y:54 + { + exprVAL.Identifier = exprDollar[1].Identifier + "." + exprDollar[3].str + } + } + goto exprstack /* stack new state and value */ +} diff --git a/pkg/parser/labels.y b/pkg/logql/expr.y similarity index 63% rename from pkg/parser/labels.y rename to pkg/logql/expr.y index b6aba4b5ffd7c1142f0df8b932aee0ba5c87903f..a72206bcbfdd51b56af90db971f194e7fce4d93c 100644 --- a/pkg/parser/labels.y +++ b/pkg/logql/expr.y @@ -1,5 +1,5 @@ %{ -package parser +package logql import ( "github.com/prometheus/prometheus/pkg/labels" @@ -7,33 +7,35 @@ import ( %} %union{ - MatchersExpr []*labels.Matcher + Expr Expr Matchers []*labels.Matcher Matcher *labels.Matcher - LabelsExpr labels.Labels - Labels labels.Labels - Label labels.Label str string int int64 Identifier string } -%start expr +%start root +%type <Expr> expr %type <Matchers> matchers %type <Matcher> matcher -%type <Labels> labels -%type <Label> label %type <Identifier> identifier %token <str> IDENTIFIER STRING -%token <val> MATCHERS LABELS EQ NEQ RE NRE OPEN_BRACE CLOSE_BRACE COMMA DOT +%token <val> MATCHERS LABELS EQ NEQ RE NRE OPEN_BRACE CLOSE_BRACE COMMA DOT PIPE_MATCH PIPE_EXACT %% +root: expr { exprlex.(*lexer).expr = $1 }; + expr: - MATCHERS OPEN_BRACE matchers CLOSE_BRACE { labelslex.(*lexer).matcher = $3 }; - | LABELS OPEN_BRACE labels CLOSE_BRACE { labelslex.(*lexer).labels = $3 } + OPEN_BRACE matchers CLOSE_BRACE { $$ = &matchersExpr{ $2 } } + | expr PIPE_MATCH STRING { $$ = &matchExpr{ $1, labels.MatchRegexp, $3 } } + | expr PIPE_EXACT STRING { $$ = &matchExpr{ $1, labels.MatchEqual, $3 } } + | expr NRE STRING { $$ = &matchExpr{ $1, labels.MatchNotRegexp, $3 } } + | expr NEQ STRING { $$ = &matchExpr{ $1, labels.MatchNotEqual, $3 } } + ; matchers: matcher { $$ = []*labels.Matcher{ $1 } } @@ -47,17 +49,8 @@ matcher: | identifier NRE STRING { $$ = mustNewMatcher(labels.MatchNotRegexp, $1, $3) } ; -labels: - label { $$ = labels.Labels{ $1 } } - | labels COMMA label { $$ = append($1, $3) } - ; - -label: - identifier EQ STRING { $$ = labels.Label{Name: $1, Value: $3} } - ; - identifier: IDENTIFIER { $$ = $1 } | identifier DOT IDENTIFIER { $$ = $1 + "." + $3 } ; -%% +%% \ No newline at end of file diff --git a/pkg/logql/parser.go b/pkg/logql/parser.go new file mode 100644 index 0000000000000000000000000000000000000000..0a390d45743df53f0e948096f0bbcefeca70231d --- /dev/null +++ b/pkg/logql/parser.go @@ -0,0 +1,117 @@ +package logql + +import ( + "fmt" + "strconv" + "strings" + "text/scanner" + + "github.com/prometheus/prometheus/pkg/labels" +) + +// ParseExpr parses a string and returns an Expr. +func ParseExpr(input string) (Expr, error) { + l := lexer{ + Scanner: scanner.Scanner{ + Mode: scanner.SkipComments | scanner.ScanStrings | scanner.ScanInts, + }, + } + l.Init(strings.NewReader(input)) + e := exprParse(&l) + if e != 0 { + return nil, l.err + } + return l.expr, nil +} + +// Expr is a LogQL expression. +type Expr interface { + Eval() + Walk(func(Expr) error) error +} + +type matchersExpr struct { + matchers []*labels.Matcher +} + +func (e *matchersExpr) Eval() {} + +func (e *matchersExpr) Walk(f func(Expr) error) error { + return f(e) +} + +type matchExpr struct { + left Expr + ty labels.MatchType + match string +} + +func (e *matchExpr) Eval() {} + +func (e *matchExpr) Walk(f func(Expr) error) error { + if err := f(e); err != nil { + return err + } + return e.left.Walk(f) +} + +func mustNewMatcher(t labels.MatchType, n, v string) *labels.Matcher { + m, err := labels.NewMatcher(t, n, v) + if err != nil { + panic(err) + } + return m +} + +var tokens = map[string]int{ + ",": COMMA, + ".": DOT, + "{": OPEN_BRACE, + "}": CLOSE_BRACE, + "=": EQ, + "!=": NEQ, + "=~": RE, + "!~": NRE, + "|=": PIPE_EXACT, + "|~": PIPE_MATCH, +} + +type lexer struct { + scanner.Scanner + err error + + expr Expr +} + +func (l *lexer) Lex(lval *exprSymType) int { + r := l.Scan() + var err error + switch r { + case scanner.EOF: + return 0 + + case scanner.String: + lval.str, err = strconv.Unquote(l.TokenText()) + if err != nil { + l.err = err + return 0 + } + return STRING + } + + if tok, ok := tokens[l.TokenText()+string(l.Peek())]; ok { + l.Next() + return tok + } + + if tok, ok := tokens[l.TokenText()]; ok { + return tok + } + + lval.str = l.TokenText() + return IDENTIFIER +} + +func (l *lexer) Error(s string) { + l.err = fmt.Errorf(s) +} diff --git a/pkg/logql/parser_test.go b/pkg/logql/parser_test.go new file mode 100644 index 0000000000000000000000000000000000000000..ffba818fdf3857090321be0f037f2094211597d4 --- /dev/null +++ b/pkg/logql/parser_test.go @@ -0,0 +1,98 @@ +package logql + +import ( + "strings" + "testing" + "text/scanner" + + "github.com/prometheus/prometheus/pkg/labels" + "github.com/stretchr/testify/require" +) + +func TestLex(t *testing.T) { + for _, tc := range []struct { + input string + expected []int + }{ + {`{foo="bar"}`, []int{OPEN_BRACE, IDENTIFIER, EQ, STRING, CLOSE_BRACE}}, + {`{ foo = "bar" }`, []int{OPEN_BRACE, IDENTIFIER, EQ, STRING, CLOSE_BRACE}}, + {`{ foo != "bar" }`, []int{OPEN_BRACE, IDENTIFIER, NEQ, STRING, CLOSE_BRACE}}, + {`{ foo =~ "bar" }`, []int{OPEN_BRACE, IDENTIFIER, RE, STRING, CLOSE_BRACE}}, + {`{ foo !~ "bar" }`, []int{OPEN_BRACE, IDENTIFIER, NRE, STRING, CLOSE_BRACE}}, + {`{ foo = "bar", bar != "baz" }`, []int{OPEN_BRACE, IDENTIFIER, EQ, STRING, + COMMA, IDENTIFIER, NEQ, STRING, CLOSE_BRACE}}, + {`{ foo = "ba\"r" }`, []int{OPEN_BRACE, IDENTIFIER, EQ, STRING, CLOSE_BRACE}}, + } { + t.Run(tc.input, func(t *testing.T) { + actual := []int{} + l := lexer{ + Scanner: scanner.Scanner{ + Mode: scanner.SkipComments | scanner.ScanStrings, + }, + } + l.Init(strings.NewReader(tc.input)) + var lval exprSymType + for { + tok := l.Lex(&lval) + if tok == 0 { + break + } + actual = append(actual, tok) + } + require.Equal(t, tc.expected, actual) + }) + } +} + +func TestParse(t *testing.T) { + for _, tc := range []struct { + input string + expected Expr + }{ + { + `{foo="bar"}`, + &matchersExpr{matchers: []*labels.Matcher{mustNewMatcher(labels.MatchEqual, "foo", "bar")}}, + }, + { + `{http.url=~"^/admin"}`, + &matchersExpr{matchers: []*labels.Matcher{mustNewMatcher(labels.MatchRegexp, "http.url", "^/admin")}}, + }, + { + `{ foo = "bar" }`, + &matchersExpr{matchers: []*labels.Matcher{mustNewMatcher(labels.MatchEqual, "foo", "bar")}}, + }, + { + `{ foo != "bar" }`, + &matchersExpr{matchers: []*labels.Matcher{mustNewMatcher(labels.MatchNotEqual, "foo", "bar")}}, + }, + { + `{ foo =~ "bar" }`, + &matchersExpr{matchers: []*labels.Matcher{mustNewMatcher(labels.MatchRegexp, "foo", "bar")}}, + }, + { + `{ foo !~ "bar" }`, + &matchersExpr{matchers: []*labels.Matcher{mustNewMatcher(labels.MatchNotRegexp, "foo", "bar")}}, + }, + { + `{ foo = "bar", bar != "baz" }`, + &matchersExpr{matchers: []*labels.Matcher{ + mustNewMatcher(labels.MatchEqual, "foo", "bar"), + mustNewMatcher(labels.MatchNotEqual, "bar", "baz"), + }}, + }, + { + `{foo="bar"} |= "baz"`, + &matchExpr{ + left: &matchersExpr{matchers: []*labels.Matcher{mustNewMatcher(labels.MatchEqual, "foo", "bar")}}, + ty: labels.MatchEqual, + match: "baz", + }, + }, + } { + t.Run(tc.input, func(t *testing.T) { + matchers, err := ParseExpr(tc.input) + require.NoError(t, err) + require.Equal(t, tc.expected, matchers) + }) + } +} diff --git a/pkg/parser/labels.go b/pkg/parser/labels.go deleted file mode 100644 index 6863c1963cfc25521a83f179dc76720c9424b774..0000000000000000000000000000000000000000 --- a/pkg/parser/labels.go +++ /dev/null @@ -1,548 +0,0 @@ -// Code generated by goyacc -p labels -o pkg/parser/labels.go pkg/parser/labels.y. DO NOT EDIT. - -//line pkg/parser/labels.y:2 -package parser - -import __yyfmt__ "fmt" - -//line pkg/parser/labels.y:2 -import ( - "github.com/prometheus/prometheus/pkg/labels" -) - -//line pkg/parser/labels.y:9 -type labelsSymType struct { - yys int - MatchersExpr []*labels.Matcher - Matchers []*labels.Matcher - Matcher *labels.Matcher - LabelsExpr labels.Labels - Labels labels.Labels - Label labels.Label - str string - int int64 - Identifier string -} - -const IDENTIFIER = 57346 -const STRING = 57347 -const MATCHERS = 57348 -const LABELS = 57349 -const EQ = 57350 -const NEQ = 57351 -const RE = 57352 -const NRE = 57353 -const OPEN_BRACE = 57354 -const CLOSE_BRACE = 57355 -const COMMA = 57356 -const DOT = 57357 - -var labelsToknames = [...]string{ - "$end", - "error", - "$unk", - "IDENTIFIER", - "STRING", - "MATCHERS", - "LABELS", - "EQ", - "NEQ", - "RE", - "NRE", - "OPEN_BRACE", - "CLOSE_BRACE", - "COMMA", - "DOT", -} -var labelsStatenames = [...]string{} - -const labelsEofCode = 1 -const labelsErrCode = 2 -const labelsInitialStackSize = 16 - -//line pkg/parser/labels.y:63 - -//line yacctab:1 -var labelsExca = [...]int{ - -1, 1, - 1, -1, - -2, 0, -} - -const labelsPrivate = 57344 - -const labelsLast = 32 - -var labelsAct = [...]int{ - - 8, 11, 15, 16, 17, 18, 12, 22, 5, 19, - 20, 21, 7, 4, 19, 13, 14, 2, 3, 30, - 27, 26, 12, 29, 25, 24, 9, 23, 28, 10, - 6, 1, -} -var labelsPact = [...]int{ - - 11, -1000, 1, -4, 22, 22, 2, -1000, -6, -1000, - -3, -1000, -1, -1000, 22, 20, 19, 16, 15, 24, - -1000, 22, 14, -1000, -1000, -1000, -1000, -1000, -1000, -1000, - -1000, -} -var labelsPgo = [...]int{ - - 0, 31, 30, 12, 29, 1, 0, -} -var labelsR1 = [...]int{ - - 0, 1, 1, 2, 2, 3, 3, 3, 3, 4, - 4, 5, 6, 6, -} -var labelsR2 = [...]int{ - - 0, 4, 4, 1, 3, 3, 3, 3, 3, 1, - 3, 3, 1, 3, -} -var labelsChk = [...]int{ - - -1000, -1, 6, 7, 12, 12, -2, -3, -6, 4, - -4, -5, -6, 13, 14, 8, 9, 10, 11, 15, - 13, 14, 8, -3, 5, 5, 5, 5, 4, -5, - 5, -} -var labelsDef = [...]int{ - - 0, -2, 0, 0, 0, 0, 0, 3, 0, 12, - 0, 9, 0, 1, 0, 0, 0, 0, 0, 0, - 2, 0, 0, 4, 5, 6, 7, 8, 13, 10, - 11, -} -var labelsTok1 = [...]int{ - - 1, -} -var labelsTok2 = [...]int{ - - 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, - 12, 13, 14, 15, -} -var labelsTok3 = [...]int{ - 0, -} - -var labelsErrorMessages = [...]struct { - state int - token int - msg string -}{} - -//line yaccpar:1 - -/* parser for yacc output */ - -var ( - labelsDebug = 0 - labelsErrorVerbose = false -) - -type labelsLexer interface { - Lex(lval *labelsSymType) int - Error(s string) -} - -type labelsParser interface { - Parse(labelsLexer) int - Lookahead() int -} - -type labelsParserImpl struct { - lval labelsSymType - stack [labelsInitialStackSize]labelsSymType - char int -} - -func (p *labelsParserImpl) Lookahead() int { - return p.char -} - -func labelsNewParser() labelsParser { - return &labelsParserImpl{} -} - -const labelsFlag = -1000 - -func labelsTokname(c int) string { - if c >= 1 && c-1 < len(labelsToknames) { - if labelsToknames[c-1] != "" { - return labelsToknames[c-1] - } - } - return __yyfmt__.Sprintf("tok-%v", c) -} - -func labelsStatname(s int) string { - if s >= 0 && s < len(labelsStatenames) { - if labelsStatenames[s] != "" { - return labelsStatenames[s] - } - } - return __yyfmt__.Sprintf("state-%v", s) -} - -func labelsErrorMessage(state, lookAhead int) string { - const TOKSTART = 4 - - if !labelsErrorVerbose { - return "syntax error" - } - - for _, e := range labelsErrorMessages { - if e.state == state && e.token == lookAhead { - return "syntax error: " + e.msg - } - } - - res := "syntax error: unexpected " + labelsTokname(lookAhead) - - // To match Bison, suggest at most four expected tokens. - expected := make([]int, 0, 4) - - // Look for shiftable tokens. - base := labelsPact[state] - for tok := TOKSTART; tok-1 < len(labelsToknames); tok++ { - if n := base + tok; n >= 0 && n < labelsLast && labelsChk[labelsAct[n]] == tok { - if len(expected) == cap(expected) { - return res - } - expected = append(expected, tok) - } - } - - if labelsDef[state] == -2 { - i := 0 - for labelsExca[i] != -1 || labelsExca[i+1] != state { - i += 2 - } - - // Look for tokens that we accept or reduce. - for i += 2; labelsExca[i] >= 0; i += 2 { - tok := labelsExca[i] - if tok < TOKSTART || labelsExca[i+1] == 0 { - continue - } - if len(expected) == cap(expected) { - return res - } - expected = append(expected, tok) - } - - // If the default action is to accept or reduce, give up. - if labelsExca[i+1] != 0 { - return res - } - } - - for i, tok := range expected { - if i == 0 { - res += ", expecting " - } else { - res += " or " - } - res += labelsTokname(tok) - } - return res -} - -func labelslex1(lex labelsLexer, lval *labelsSymType) (char, token int) { - token = 0 - char = lex.Lex(lval) - if char <= 0 { - token = labelsTok1[0] - goto out - } - if char < len(labelsTok1) { - token = labelsTok1[char] - goto out - } - if char >= labelsPrivate { - if char < labelsPrivate+len(labelsTok2) { - token = labelsTok2[char-labelsPrivate] - goto out - } - } - for i := 0; i < len(labelsTok3); i += 2 { - token = labelsTok3[i+0] - if token == char { - token = labelsTok3[i+1] - goto out - } - } - -out: - if token == 0 { - token = labelsTok2[1] /* unknown char */ - } - if labelsDebug >= 3 { - __yyfmt__.Printf("lex %s(%d)\n", labelsTokname(token), uint(char)) - } - return char, token -} - -func labelsParse(labelslex labelsLexer) int { - return labelsNewParser().Parse(labelslex) -} - -func (labelsrcvr *labelsParserImpl) Parse(labelslex labelsLexer) int { - var labelsn int - var labelsVAL labelsSymType - var labelsDollar []labelsSymType - _ = labelsDollar // silence set and not used - labelsS := labelsrcvr.stack[:] - - Nerrs := 0 /* number of errors */ - Errflag := 0 /* error recovery flag */ - labelsstate := 0 - labelsrcvr.char = -1 - labelstoken := -1 // labelsrcvr.char translated into internal numbering - defer func() { - // Make sure we report no lookahead when not parsing. - labelsstate = -1 - labelsrcvr.char = -1 - labelstoken = -1 - }() - labelsp := -1 - goto labelsstack - -ret0: - return 0 - -ret1: - return 1 - -labelsstack: - /* put a state and value onto the stack */ - if labelsDebug >= 4 { - __yyfmt__.Printf("char %v in %v\n", labelsTokname(labelstoken), labelsStatname(labelsstate)) - } - - labelsp++ - if labelsp >= len(labelsS) { - nyys := make([]labelsSymType, len(labelsS)*2) - copy(nyys, labelsS) - labelsS = nyys - } - labelsS[labelsp] = labelsVAL - labelsS[labelsp].yys = labelsstate - -labelsnewstate: - labelsn = labelsPact[labelsstate] - if labelsn <= labelsFlag { - goto labelsdefault /* simple state */ - } - if labelsrcvr.char < 0 { - labelsrcvr.char, labelstoken = labelslex1(labelslex, &labelsrcvr.lval) - } - labelsn += labelstoken - if labelsn < 0 || labelsn >= labelsLast { - goto labelsdefault - } - labelsn = labelsAct[labelsn] - if labelsChk[labelsn] == labelstoken { /* valid shift */ - labelsrcvr.char = -1 - labelstoken = -1 - labelsVAL = labelsrcvr.lval - labelsstate = labelsn - if Errflag > 0 { - Errflag-- - } - goto labelsstack - } - -labelsdefault: - /* default state action */ - labelsn = labelsDef[labelsstate] - if labelsn == -2 { - if labelsrcvr.char < 0 { - labelsrcvr.char, labelstoken = labelslex1(labelslex, &labelsrcvr.lval) - } - - /* look through exception table */ - xi := 0 - for { - if labelsExca[xi+0] == -1 && labelsExca[xi+1] == labelsstate { - break - } - xi += 2 - } - for xi += 2; ; xi += 2 { - labelsn = labelsExca[xi+0] - if labelsn < 0 || labelsn == labelstoken { - break - } - } - labelsn = labelsExca[xi+1] - if labelsn < 0 { - goto ret0 - } - } - if labelsn == 0 { - /* error ... attempt to resume parsing */ - switch Errflag { - case 0: /* brand new error */ - labelslex.Error(labelsErrorMessage(labelsstate, labelstoken)) - Nerrs++ - if labelsDebug >= 1 { - __yyfmt__.Printf("%s", labelsStatname(labelsstate)) - __yyfmt__.Printf(" saw %s\n", labelsTokname(labelstoken)) - } - fallthrough - - case 1, 2: /* incompletely recovered error ... try again */ - Errflag = 3 - - /* find a state where "error" is a legal shift action */ - for labelsp >= 0 { - labelsn = labelsPact[labelsS[labelsp].yys] + labelsErrCode - if labelsn >= 0 && labelsn < labelsLast { - labelsstate = labelsAct[labelsn] /* simulate a shift of "error" */ - if labelsChk[labelsstate] == labelsErrCode { - goto labelsstack - } - } - - /* the current p has no shift on "error", pop stack */ - if labelsDebug >= 2 { - __yyfmt__.Printf("error recovery pops state %d\n", labelsS[labelsp].yys) - } - labelsp-- - } - /* there is no state on the stack with an error shift ... abort */ - goto ret1 - - case 3: /* no shift yet; clobber input char */ - if labelsDebug >= 2 { - __yyfmt__.Printf("error recovery discards %s\n", labelsTokname(labelstoken)) - } - if labelstoken == labelsEofCode { - goto ret1 - } - labelsrcvr.char = -1 - labelstoken = -1 - goto labelsnewstate /* try again in the same state */ - } - } - - /* reduction by production labelsn */ - if labelsDebug >= 2 { - __yyfmt__.Printf("reduce %v in:\n\t%v\n", labelsn, labelsStatname(labelsstate)) - } - - labelsnt := labelsn - labelspt := labelsp - _ = labelspt // guard against "declared and not used" - - labelsp -= labelsR2[labelsn] - // labelsp is now the index of $0. Perform the default action. Iff the - // reduced production is ε, $1 is possibly out of range. - if labelsp+1 >= len(labelsS) { - nyys := make([]labelsSymType, len(labelsS)*2) - copy(nyys, labelsS) - labelsS = nyys - } - labelsVAL = labelsS[labelsp+1] - - /* consult goto table to find next state */ - labelsn = labelsR1[labelsn] - labelsg := labelsPgo[labelsn] - labelsj := labelsg + labelsS[labelsp].yys + 1 - - if labelsj >= labelsLast { - labelsstate = labelsAct[labelsg] - } else { - labelsstate = labelsAct[labelsj] - if labelsChk[labelsstate] != -labelsn { - labelsstate = labelsAct[labelsg] - } - } - // dummy call; replaced with literal code - switch labelsnt { - - case 1: - labelsDollar = labelsS[labelspt-4 : labelspt+1] -//line pkg/parser/labels.y:35 - { - labelslex.(*lexer).matcher = labelsDollar[3].Matchers - } - case 2: - labelsDollar = labelsS[labelspt-4 : labelspt+1] -//line pkg/parser/labels.y:36 - { - labelslex.(*lexer).labels = labelsDollar[3].Labels - } - case 3: - labelsDollar = labelsS[labelspt-1 : labelspt+1] -//line pkg/parser/labels.y:39 - { - labelsVAL.Matchers = []*labels.Matcher{labelsDollar[1].Matcher} - } - case 4: - labelsDollar = labelsS[labelspt-3 : labelspt+1] -//line pkg/parser/labels.y:40 - { - labelsVAL.Matchers = append(labelsDollar[1].Matchers, labelsDollar[3].Matcher) - } - case 5: - labelsDollar = labelsS[labelspt-3 : labelspt+1] -//line pkg/parser/labels.y:44 - { - labelsVAL.Matcher = mustNewMatcher(labels.MatchEqual, labelsDollar[1].Identifier, labelsDollar[3].str) - } - case 6: - labelsDollar = labelsS[labelspt-3 : labelspt+1] -//line pkg/parser/labels.y:45 - { - labelsVAL.Matcher = mustNewMatcher(labels.MatchNotEqual, labelsDollar[1].Identifier, labelsDollar[3].str) - } - case 7: - labelsDollar = labelsS[labelspt-3 : labelspt+1] -//line pkg/parser/labels.y:46 - { - labelsVAL.Matcher = mustNewMatcher(labels.MatchRegexp, labelsDollar[1].Identifier, labelsDollar[3].str) - } - case 8: - labelsDollar = labelsS[labelspt-3 : labelspt+1] -//line pkg/parser/labels.y:47 - { - labelsVAL.Matcher = mustNewMatcher(labels.MatchNotRegexp, labelsDollar[1].Identifier, labelsDollar[3].str) - } - case 9: - labelsDollar = labelsS[labelspt-1 : labelspt+1] -//line pkg/parser/labels.y:51 - { - labelsVAL.Labels = labels.Labels{labelsDollar[1].Label} - } - case 10: - labelsDollar = labelsS[labelspt-3 : labelspt+1] -//line pkg/parser/labels.y:52 - { - labelsVAL.Labels = append(labelsDollar[1].Labels, labelsDollar[3].Label) - } - case 11: - labelsDollar = labelsS[labelspt-3 : labelspt+1] -//line pkg/parser/labels.y:56 - { - labelsVAL.Label = labels.Label{Name: labelsDollar[1].Identifier, Value: labelsDollar[3].str} - } - case 12: - labelsDollar = labelsS[labelspt-1 : labelspt+1] -//line pkg/parser/labels.y:60 - { - labelsVAL.Identifier = labelsDollar[1].str - } - case 13: - labelsDollar = labelsS[labelspt-3 : labelspt+1] -//line pkg/parser/labels.y:61 - { - labelsVAL.Identifier = labelsDollar[1].Identifier + "." + labelsDollar[3].str - } - } - goto labelsstack /* stack new state and value */ -} diff --git a/pkg/parser/parser.go b/pkg/parser/parser.go deleted file mode 100644 index 79dea0979d18ddfa093d335bb5aab073e40bd6fb..0000000000000000000000000000000000000000 --- a/pkg/parser/parser.go +++ /dev/null @@ -1,125 +0,0 @@ -package parser - -import ( - "fmt" - "strconv" - "strings" - "text/scanner" - - "github.com/prometheus/prometheus/pkg/labels" -) - -// Matchers parses a string and returns a set of matchers. -func Matchers(input string) ([]*labels.Matcher, error) { - l, err := parse(MATCHERS, input) - if err != nil { - return nil, err - } - return l.matcher, nil -} - -// Labels parses a string and returns a set of labels. -func Labels(input string) (labels.Labels, error) { - l, err := parse(LABELS, input) - if err != nil { - return nil, err - } - return l.labels, nil -} - -func parse(thing int, input string) (*lexer, error) { - l := lexer{ - thing: thing, - Scanner: scanner.Scanner{ - Mode: scanner.SkipComments | scanner.ScanStrings | scanner.ScanInts, - }, - } - l.Init(strings.NewReader(input)) - e := labelsParse(&l) - if e != 0 { - return nil, l.err - } - return &l, nil -} - -func mustNewMatcher(t labels.MatchType, n, v string) *labels.Matcher { - m, err := labels.NewMatcher(t, n, v) - if err != nil { - panic(err) - } - return m -} - -var tokens = map[string]int{ - ",": COMMA, - ".": DOT, - "{": OPEN_BRACE, - "}": CLOSE_BRACE, - "=": EQ, - "!=": NEQ, - "=~": RE, - "!~": NRE, -} - -type lexer struct { - // What type of thing are we parsing? - thing int - sent bool - - // Output - labels labels.Labels - matcher []*labels.Matcher - - scanner.Scanner - err error -} - -func (l *lexer) Lex(lval *labelsSymType) int { - if !l.sent { - l.sent = true - return l.thing - } - - r := l.Scan() - var err error - switch r { - case scanner.EOF: - return 0 - - case scanner.String: - lval.str, err = strconv.Unquote(l.TokenText()) - if err != nil { - l.err = err - return 0 - } - return STRING - } - - switch l.TokenText() { - case "=": - if l.Peek() == '~' { - l.Scan() - return RE - } - return EQ - case "!": - if l.Peek() == '=' { - l.Scan() - return NEQ - } else if l.Peek() == '~' { - l.Scan() - return NRE - } - } - - if token, ok := tokens[l.TokenText()]; ok { - return token - } - - lval.str = l.TokenText() - return IDENTIFIER -} - -func (l *lexer) Error(s string) { - l.err = fmt.Errorf(s) -} diff --git a/pkg/parser/parser_test.go b/pkg/parser/parser_test.go deleted file mode 100644 index a6bc2eaa011ea8d89aaabb9796991ee4ee433427..0000000000000000000000000000000000000000 --- a/pkg/parser/parser_test.go +++ /dev/null @@ -1,98 +0,0 @@ -package parser - -import ( - "strings" - "testing" - "text/scanner" - - "github.com/prometheus/prometheus/pkg/labels" - "github.com/stretchr/testify/require" -) - -func TestLex(t *testing.T) { - for _, tc := range []struct { - input string - expected []int - }{ - {`{foo="bar"}`, []int{MATCHERS, OPEN_BRACE, IDENTIFIER, EQ, STRING, CLOSE_BRACE}}, - {`{ foo = "bar" }`, []int{MATCHERS, OPEN_BRACE, IDENTIFIER, EQ, STRING, CLOSE_BRACE}}, - {`{ foo != "bar" }`, []int{MATCHERS, OPEN_BRACE, IDENTIFIER, NEQ, STRING, CLOSE_BRACE}}, - {`{ foo =~ "bar" }`, []int{MATCHERS, OPEN_BRACE, IDENTIFIER, RE, STRING, CLOSE_BRACE}}, - {`{ foo !~ "bar" }`, []int{MATCHERS, OPEN_BRACE, IDENTIFIER, NRE, STRING, CLOSE_BRACE}}, - {`{ foo = "bar", bar != "baz" }`, []int{MATCHERS, OPEN_BRACE, IDENTIFIER, EQ, STRING, - COMMA, IDENTIFIER, NEQ, STRING, CLOSE_BRACE}}, - {`{ foo = "ba\"r" }`, []int{MATCHERS, OPEN_BRACE, IDENTIFIER, EQ, STRING, CLOSE_BRACE}}, - } { - t.Run(tc.input, func(t *testing.T) { - actual := []int{} - l := lexer{ - thing: MATCHERS, - Scanner: scanner.Scanner{ - Mode: scanner.SkipComments | scanner.ScanStrings, - }, - } - l.Init(strings.NewReader(tc.input)) - var lval labelsSymType - for { - tok := l.Lex(&lval) - if tok == 0 { - break - } - actual = append(actual, tok) - } - require.Equal(t, tc.expected, actual) - }) - } -} - -func TestParse(t *testing.T) { - for _, tc := range []struct { - input string - expected []*labels.Matcher - }{ - { - `{foo="bar"}`, - []*labels.Matcher{mustNewMatcher(labels.MatchEqual, "foo", "bar")}, - }, - { - `{http.url=~"^/admin"}`, - []*labels.Matcher{mustNewMatcher(labels.MatchRegexp, "http.url", "^/admin")}, - }, - { - `{ foo = "bar" }`, - []*labels.Matcher{mustNewMatcher(labels.MatchEqual, "foo", "bar")}, - }, - { - `{ foo != "bar" }`, - []*labels.Matcher{mustNewMatcher(labels.MatchNotEqual, "foo", "bar")}, - }, - { - `{ foo =~ "bar" }`, - []*labels.Matcher{mustNewMatcher(labels.MatchRegexp, "foo", "bar")}, - }, - { - `{ foo !~ "bar" }`, - []*labels.Matcher{mustNewMatcher(labels.MatchNotRegexp, "foo", "bar")}, - }, - { - `{ foo = "bar", bar != "baz" }`, - []*labels.Matcher{ - mustNewMatcher(labels.MatchEqual, "foo", "bar"), - mustNewMatcher(labels.MatchNotEqual, "bar", "baz"), - }, - }, - { - `{http.url=~"^/admin", http.status_code!="200"}`, - []*labels.Matcher{ - mustNewMatcher(labels.MatchRegexp, "http.url", "^/admin"), - mustNewMatcher(labels.MatchNotEqual, "http.status_code", "200"), - }, - }, - } { - t.Run(tc.input, func(t *testing.T) { - output, err := Matchers(tc.input) - require.Nil(t, err) - require.Equal(t, tc.expected, output) - }) - } -} diff --git a/pkg/querier/store.go b/pkg/querier/store.go index 5680778618ab18e359d0684f16d84a5de4060b4a..8dc0b5d859e7c52bfecabf44dcab21857d3aa109 100644 --- a/pkg/querier/store.go +++ b/pkg/querier/store.go @@ -4,6 +4,8 @@ import ( "context" "sort" + "github.com/prometheus/prometheus/promql" + "github.com/cortexproject/cortex/pkg/chunk" "github.com/opentracing/opentracing-go" "github.com/prometheus/common/model" @@ -12,11 +14,10 @@ import ( "github.com/grafana/loki/pkg/chunkenc" "github.com/grafana/loki/pkg/iter" "github.com/grafana/loki/pkg/logproto" - "github.com/grafana/loki/pkg/parser" ) func (q Querier) queryStore(ctx context.Context, req *logproto.QueryRequest) ([]iter.EntryIterator, error) { - matchers, err := parser.Matchers(req.Query) + matchers, err := promql.ParseMetricSelector(req.Query) if err != nil { return nil, err } diff --git a/pkg/util/conv.go b/pkg/util/conv.go index 77dce97f315515b3f144d12c357da8755c99f4dc..29ffc1d087d9fbca4f3b91a2c3a58ddad4075e86 100644 --- a/pkg/util/conv.go +++ b/pkg/util/conv.go @@ -2,22 +2,15 @@ package util import ( "github.com/cortexproject/cortex/pkg/ingester/client" - "github.com/grafana/loki/pkg/parser" + "github.com/prometheus/prometheus/promql" ) // ToClientLabels parses the labels and converts them to the Cortex type. func ToClientLabels(labels string) ([]client.LabelAdapter, error) { - ls, err := parser.Labels(labels) + ls, err := promql.ParseMetric(labels) if err != nil { return nil, err } - pairs := make([]client.LabelAdapter, 0, len(ls)) - for i := 0; i < len(ls); i++ { - pairs = append(pairs, client.LabelAdapter{ - Name: ls[i].Name, - Value: ls[i].Value, - }) - } - return pairs, nil + return client.FromLabelsToLabelAdapaters(ls), nil }