diff --git a/pkg/logql/expr.y b/pkg/logql/expr.y
index ed6d97056e88bae5e13235e883906fd42a757dc7..e87eea723fac43bcf700205ad873399a7f04ac5c 100644
--- a/pkg/logql/expr.y
+++ b/pkg/logql/expr.y
@@ -9,18 +9,18 @@ import (
 
 %union{
   Expr                    Expr
-  LogExpr                 LogSelectorExpr
-  RangeAggregationExpr    SampleExpr
-  VectorAggregationExpr   SampleExpr
-  LogRangeExpr            *logRange
   Filter                  labels.MatchType
-  Selector                []*labels.Matcher
-  Matchers                []*labels.Matcher
-  Matcher                 *labels.Matcher
   Grouping                *grouping
   Labels                  []string
-  VectorOp                string
+  LogExpr                 LogSelectorExpr
+  LogRangeExpr            *logRange
+  Matcher                 *labels.Matcher
+  Matchers                []*labels.Matcher
+  RangeAggregationExpr    SampleExpr
   RangeOp                 string
+  Selector                []*labels.Matcher
+  VectorAggregationExpr   SampleExpr
+  VectorOp                string
   str                     string
   duration                time.Duration
   int                     int64
@@ -30,17 +30,17 @@ import (
 
 %type <Expr>                  expr
 %type <Filter>                filter
-%type <Selector>              selector
-%type <Matchers>              matchers
-%type <Matcher>               matcher
-%type <VectorOp>              vectorOp
-%type <RangeOp>               rangeOp
-%type <Labels>                labels
 %type <Grouping>              grouping
+%type <Labels>                labels
 %type <LogExpr>               logExpr
+%type <LogRangeExpr>          logRangeExpr
+%type <Matcher>               matcher
+%type <Matchers>              matchers
 %type <RangeAggregationExpr>  rangeAggregationExpr
+%type <RangeOp>               rangeOp
+%type <Selector>              selector
 %type <VectorAggregationExpr> vectorAggregationExpr
-%type <LogRangeExpr>          logRangeExpr
+%type <VectorOp>              vectorOp
 
 %token <str>      IDENTIFIER STRING
 %token <duration> DURATION
@@ -72,16 +72,18 @@ logRangeExpr: logExpr DURATION { $$ = mustNewRange($1, $2) };
 rangeAggregationExpr: rangeOp OPEN_PARENTHESIS logRangeExpr CLOSE_PARENTHESIS { $$ = newRangeAggregationExpr($3,$1) };
 
 vectorAggregationExpr:
+    // Aggregations with 1 argument.
       vectorOp OPEN_PARENTHESIS rangeAggregationExpr CLOSE_PARENTHESIS                               { $$ = mustNewVectorAggregationExpr($3, $1, nil, nil) }
-    | vectorOp grouping OPEN_PARENTHESIS rangeAggregationExpr CLOSE_PARENTHESIS                      { $$ = mustNewVectorAggregationExpr($4, $1, $2, nil,) }
-    | vectorOp OPEN_PARENTHESIS rangeAggregationExpr CLOSE_PARENTHESIS grouping                      { $$ = mustNewVectorAggregationExpr($3, $1, $5, nil) }
-    | vectorOp OPEN_PARENTHESIS IDENTIFIER COMMA rangeAggregationExpr CLOSE_PARENTHESIS              { $$ = mustNewVectorAggregationExpr($5, $1, nil, &$3) }
-    | vectorOp OPEN_PARENTHESIS IDENTIFIER COMMA rangeAggregationExpr CLOSE_PARENTHESIS grouping     { $$ = mustNewVectorAggregationExpr($5, $1, $7, &$3) }
     | vectorOp OPEN_PARENTHESIS vectorAggregationExpr CLOSE_PARENTHESIS                              { $$ = mustNewVectorAggregationExpr($3, $1, nil, nil) }
+    | vectorOp grouping OPEN_PARENTHESIS rangeAggregationExpr CLOSE_PARENTHESIS                      { $$ = mustNewVectorAggregationExpr($4, $1, $2, nil,) }
     | vectorOp grouping OPEN_PARENTHESIS vectorAggregationExpr CLOSE_PARENTHESIS                     { $$ = mustNewVectorAggregationExpr($4, $1, $2, nil,) }
+    | vectorOp OPEN_PARENTHESIS rangeAggregationExpr CLOSE_PARENTHESIS grouping                      { $$ = mustNewVectorAggregationExpr($3, $1, $5, nil) }
     | vectorOp OPEN_PARENTHESIS vectorAggregationExpr CLOSE_PARENTHESIS grouping                     { $$ = mustNewVectorAggregationExpr($3, $1, $5, nil) }
+    // Aggregations with 2 arguments.
     | vectorOp OPEN_PARENTHESIS IDENTIFIER COMMA vectorAggregationExpr CLOSE_PARENTHESIS             { $$ = mustNewVectorAggregationExpr($5, $1, nil, &$3) }
     | vectorOp OPEN_PARENTHESIS IDENTIFIER COMMA vectorAggregationExpr CLOSE_PARENTHESIS grouping    { $$ = mustNewVectorAggregationExpr($5, $1, $7, &$3) }
+    | vectorOp OPEN_PARENTHESIS IDENTIFIER COMMA rangeAggregationExpr CLOSE_PARENTHESIS              { $$ = mustNewVectorAggregationExpr($5, $1, nil, &$3) }
+    | vectorOp OPEN_PARENTHESIS IDENTIFIER COMMA rangeAggregationExpr CLOSE_PARENTHESIS grouping     { $$ = mustNewVectorAggregationExpr($5, $1, $7, &$3) }
     ;
 
 filter:
diff --git a/pkg/logql/expr.y.go b/pkg/logql/expr.y.go
index 9f5484601e98e982d6a3373f8c0dc5176d181deb..2e5a0521140472d1963ae37df5511da3da3ed8d9 100644
--- a/pkg/logql/expr.y.go
+++ b/pkg/logql/expr.y.go
@@ -16,18 +16,18 @@ import (
 type exprSymType struct {
 	yys                   int
 	Expr                  Expr
-	LogExpr               LogSelectorExpr
-	RangeAggregationExpr  SampleExpr
-	VectorAggregationExpr SampleExpr
-	LogRangeExpr          *logRange
 	Filter                labels.MatchType
-	Selector              []*labels.Matcher
-	Matchers              []*labels.Matcher
-	Matcher               *labels.Matcher
 	Grouping              *grouping
 	Labels                []string
-	VectorOp              string
+	LogExpr               LogSelectorExpr
+	LogRangeExpr          *logRange
+	Matcher               *labels.Matcher
+	Matchers              []*labels.Matcher
+	RangeAggregationExpr  SampleExpr
 	RangeOp               string
+	Selector              []*labels.Matcher
+	VectorAggregationExpr SampleExpr
+	VectorOp              string
 	str                   string
 	duration              time.Duration
 	int                   int64
@@ -109,7 +109,7 @@ const exprEofCode = 1
 const exprErrCode = 2
 const exprInitialStackSize = 16
 
-//line pkg/logql/expr.y:138
+//line pkg/logql/expr.y:140
 
 //line yacctab:1
 var exprExca = [...]int{
@@ -127,64 +127,64 @@ const exprLast = 123
 
 var exprAct = [...]int{
 
-	31, 36, 5, 4, 10, 64, 30, 48, 32, 33,
-	32, 33, 7, 47, 44, 82, 11, 12, 13, 14,
-	16, 17, 15, 18, 19, 20, 21, 81, 77, 76,
-	61, 59, 46, 45, 43, 11, 12, 13, 14, 16,
-	17, 15, 18, 19, 20, 21, 78, 3, 57, 63,
-	62, 80, 78, 67, 66, 28, 60, 79, 29, 52,
-	72, 71, 75, 74, 73, 11, 12, 13, 14, 16,
-	17, 15, 18, 19, 20, 21, 23, 42, 70, 83,
-	10, 23, 84, 85, 27, 58, 26, 23, 7, 27,
-	37, 26, 69, 24, 25, 27, 40, 26, 24, 25,
-	50, 68, 41, 65, 24, 25, 53, 54, 55, 56,
-	39, 8, 49, 38, 35, 51, 37, 9, 34, 6,
+	31, 5, 4, 36, 64, 10, 30, 78, 32, 33,
+	32, 33, 80, 7, 45, 82, 81, 11, 12, 13,
+	14, 16, 17, 15, 18, 19, 20, 21, 77, 78,
+	76, 60, 44, 43, 79, 11, 12, 13, 14, 16,
+	17, 15, 18, 19, 20, 21, 3, 59, 63, 62,
+	57, 10, 48, 66, 28, 67, 47, 46, 29, 7,
+	72, 73, 61, 74, 75, 11, 12, 13, 14, 16,
+	17, 15, 18, 19, 20, 21, 42, 23, 50, 52,
+	9, 39, 84, 85, 38, 27, 71, 26, 23, 70,
+	49, 23, 58, 51, 24, 25, 27, 40, 26, 27,
+	69, 26, 68, 83, 37, 24, 25, 65, 24, 25,
+	53, 54, 55, 56, 35, 6, 37, 8, 34, 41,
 	22, 2, 1,
 }
 var exprPact = [...]int{
 
-	-9, -1000, -1000, 85, -1000, -1000, -1000, 67, 37, -15,
+	-8, -1000, -1000, 89, -1000, -1000, -1000, 38, 37, -15,
 	112, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000, -1000,
-	-1000, -1000, 108, -1000, -1000, -1000, -1000, -1000, 74, 67,
-	10, 11, -8, -14, 98, 45, -1000, 97, -1000, -1000,
-	-1000, 26, 79, 9, 39, 8, 40, 99, 99, -1000,
-	-1000, 86, -1000, 96, 87, 73, 56, -1000, -1000, -13,
-	40, -13, 7, 6, 35, -1000, 29, -1000, -1000, -1000,
-	-1000, -1000, -1000, 5, -7, -1000, -1000, -1000, 75, -1000,
+	-1000, -1000, 79, -1000, -1000, -1000, -1000, -1000, 75, 38,
+	10, 36, 35, 31, 76, 65, -1000, 101, -1000, -1000,
+	-1000, 28, 86, 25, 9, 45, 40, 103, 103, -1000,
+	-1000, 100, -1000, 97, 95, 84, 81, -1000, -1000, -13,
+	-13, 40, 8, 6, 12, -1000, -10, -1000, -1000, -1000,
+	-1000, -1000, -1000, -1000, -6, -7, -1000, -1000, 99, -1000,
 	-1000, -13, -13, -1000, -1000, -1000,
 }
 var exprPgo = [...]int{
 
-	0, 122, 121, 120, 119, 118, 1, 117, 111, 5,
-	0, 47, 3, 2, 102,
+	0, 122, 121, 120, 0, 4, 46, 119, 3, 118,
+	2, 117, 115, 1, 80,
 }
 var exprR1 = [...]int{
 
-	0, 1, 2, 2, 2, 11, 11, 11, 11, 11,
-	14, 12, 13, 13, 13, 13, 13, 13, 13, 13,
-	13, 13, 3, 3, 3, 3, 4, 4, 4, 5,
-	5, 6, 6, 6, 6, 7, 7, 7, 7, 7,
-	7, 7, 7, 7, 8, 8, 9, 9, 10, 10,
+	0, 1, 2, 2, 2, 6, 6, 6, 6, 6,
+	7, 10, 13, 13, 13, 13, 13, 13, 13, 13,
+	13, 13, 3, 3, 3, 3, 12, 12, 12, 9,
+	9, 8, 8, 8, 8, 14, 14, 14, 14, 14,
+	14, 14, 14, 14, 11, 11, 5, 5, 4, 4,
 }
 var exprR2 = [...]int{
 
 	0, 1, 1, 1, 1, 1, 3, 3, 3, 2,
-	2, 4, 4, 5, 5, 6, 7, 4, 5, 5,
+	2, 4, 4, 4, 5, 5, 5, 5, 6, 7,
 	6, 7, 1, 1, 1, 1, 3, 3, 3, 1,
 	3, 3, 3, 3, 3, 1, 1, 1, 1, 1,
 	1, 1, 1, 1, 1, 1, 1, 3, 4, 4,
 }
 var exprChk = [...]int{
 
-	-1000, -1, -2, -11, -12, -13, -4, 21, -8, -7,
+	-1000, -1, -2, -6, -10, -13, -12, 21, -11, -14,
 	13, 25, 26, 27, 28, 31, 29, 30, 32, 33,
-	34, 35, -3, 2, 19, 20, 12, 10, -11, 21,
-	21, -10, 23, 24, -5, 2, -6, 4, 5, 2,
-	22, -14, -11, -12, 4, -13, 21, 21, 21, 14,
+	34, 35, -3, 2, 19, 20, 12, 10, -6, 21,
+	21, -4, 23, 24, -9, 2, -8, 4, 5, 2,
+	22, -7, -6, -10, -13, 4, 21, 21, 21, 14,
 	2, 17, 14, 9, 10, 11, 12, 22, 6, 22,
-	17, 22, -12, -13, -9, 4, -9, -6, 5, 5,
-	5, 5, -10, -12, -13, -10, 22, 22, 17, 22,
-	22, 22, 22, 4, -10, -10,
+	22, 17, -10, -13, -5, 4, -5, -8, 5, 5,
+	5, 5, -4, -4, -13, -10, 22, 22, 17, 22,
+	22, 22, 22, 4, -4, -4,
 }
 var exprDef = [...]int{
 
@@ -194,9 +194,9 @@ var exprDef = [...]int{
 	0, 0, 0, 0, 0, 0, 29, 0, 6, 8,
 	7, 0, 0, 0, 0, 0, 0, 0, 0, 26,
 	27, 0, 28, 0, 0, 0, 0, 11, 10, 12,
-	0, 17, 0, 0, 0, 46, 0, 30, 31, 32,
-	33, 34, 14, 0, 0, 19, 13, 18, 0, 48,
-	49, 15, 20, 47, 16, 21,
+	13, 0, 0, 0, 0, 46, 0, 30, 31, 32,
+	33, 34, 16, 17, 0, 0, 14, 15, 0, 48,
+	49, 18, 20, 47, 19, 21,
 }
 var exprTok1 = [...]int{
 
@@ -606,228 +606,228 @@ exprdefault:
 		}
 	case 12:
 		exprDollar = exprS[exprpt-4 : exprpt+1]
-//line pkg/logql/expr.y:75
+//line pkg/logql/expr.y:76
 		{
 			exprVAL.VectorAggregationExpr = mustNewVectorAggregationExpr(exprDollar[3].RangeAggregationExpr, exprDollar[1].VectorOp, nil, nil)
 		}
 	case 13:
-		exprDollar = exprS[exprpt-5 : exprpt+1]
-//line pkg/logql/expr.y:76
+		exprDollar = exprS[exprpt-4 : exprpt+1]
+//line pkg/logql/expr.y:77
 		{
-			exprVAL.VectorAggregationExpr = mustNewVectorAggregationExpr(exprDollar[4].RangeAggregationExpr, exprDollar[1].VectorOp, exprDollar[2].Grouping, nil)
+			exprVAL.VectorAggregationExpr = mustNewVectorAggregationExpr(exprDollar[3].VectorAggregationExpr, exprDollar[1].VectorOp, nil, nil)
 		}
 	case 14:
 		exprDollar = exprS[exprpt-5 : exprpt+1]
-//line pkg/logql/expr.y:77
+//line pkg/logql/expr.y:78
 		{
-			exprVAL.VectorAggregationExpr = mustNewVectorAggregationExpr(exprDollar[3].RangeAggregationExpr, exprDollar[1].VectorOp, exprDollar[5].Grouping, nil)
+			exprVAL.VectorAggregationExpr = mustNewVectorAggregationExpr(exprDollar[4].RangeAggregationExpr, exprDollar[1].VectorOp, exprDollar[2].Grouping, nil)
 		}
 	case 15:
-		exprDollar = exprS[exprpt-6 : exprpt+1]
-//line pkg/logql/expr.y:78
+		exprDollar = exprS[exprpt-5 : exprpt+1]
+//line pkg/logql/expr.y:79
 		{
-			exprVAL.VectorAggregationExpr = mustNewVectorAggregationExpr(exprDollar[5].RangeAggregationExpr, exprDollar[1].VectorOp, nil, &exprDollar[3].str)
+			exprVAL.VectorAggregationExpr = mustNewVectorAggregationExpr(exprDollar[4].VectorAggregationExpr, exprDollar[1].VectorOp, exprDollar[2].Grouping, nil)
 		}
 	case 16:
-		exprDollar = exprS[exprpt-7 : exprpt+1]
-//line pkg/logql/expr.y:79
+		exprDollar = exprS[exprpt-5 : exprpt+1]
+//line pkg/logql/expr.y:80
 		{
-			exprVAL.VectorAggregationExpr = mustNewVectorAggregationExpr(exprDollar[5].RangeAggregationExpr, exprDollar[1].VectorOp, exprDollar[7].Grouping, &exprDollar[3].str)
+			exprVAL.VectorAggregationExpr = mustNewVectorAggregationExpr(exprDollar[3].RangeAggregationExpr, exprDollar[1].VectorOp, exprDollar[5].Grouping, nil)
 		}
 	case 17:
-		exprDollar = exprS[exprpt-4 : exprpt+1]
-//line pkg/logql/expr.y:80
+		exprDollar = exprS[exprpt-5 : exprpt+1]
+//line pkg/logql/expr.y:81
 		{
-			exprVAL.VectorAggregationExpr = mustNewVectorAggregationExpr(exprDollar[3].VectorAggregationExpr, exprDollar[1].VectorOp, nil, nil)
+			exprVAL.VectorAggregationExpr = mustNewVectorAggregationExpr(exprDollar[3].VectorAggregationExpr, exprDollar[1].VectorOp, exprDollar[5].Grouping, nil)
 		}
 	case 18:
-		exprDollar = exprS[exprpt-5 : exprpt+1]
-//line pkg/logql/expr.y:81
+		exprDollar = exprS[exprpt-6 : exprpt+1]
+//line pkg/logql/expr.y:83
 		{
-			exprVAL.VectorAggregationExpr = mustNewVectorAggregationExpr(exprDollar[4].VectorAggregationExpr, exprDollar[1].VectorOp, exprDollar[2].Grouping, nil)
+			exprVAL.VectorAggregationExpr = mustNewVectorAggregationExpr(exprDollar[5].VectorAggregationExpr, exprDollar[1].VectorOp, nil, &exprDollar[3].str)
 		}
 	case 19:
-		exprDollar = exprS[exprpt-5 : exprpt+1]
-//line pkg/logql/expr.y:82
+		exprDollar = exprS[exprpt-7 : exprpt+1]
+//line pkg/logql/expr.y:84
 		{
-			exprVAL.VectorAggregationExpr = mustNewVectorAggregationExpr(exprDollar[3].VectorAggregationExpr, exprDollar[1].VectorOp, exprDollar[5].Grouping, nil)
+			exprVAL.VectorAggregationExpr = mustNewVectorAggregationExpr(exprDollar[5].VectorAggregationExpr, exprDollar[1].VectorOp, exprDollar[7].Grouping, &exprDollar[3].str)
 		}
 	case 20:
 		exprDollar = exprS[exprpt-6 : exprpt+1]
-//line pkg/logql/expr.y:83
+//line pkg/logql/expr.y:85
 		{
-			exprVAL.VectorAggregationExpr = mustNewVectorAggregationExpr(exprDollar[5].VectorAggregationExpr, exprDollar[1].VectorOp, nil, &exprDollar[3].str)
+			exprVAL.VectorAggregationExpr = mustNewVectorAggregationExpr(exprDollar[5].RangeAggregationExpr, exprDollar[1].VectorOp, nil, &exprDollar[3].str)
 		}
 	case 21:
 		exprDollar = exprS[exprpt-7 : exprpt+1]
-//line pkg/logql/expr.y:84
+//line pkg/logql/expr.y:86
 		{
-			exprVAL.VectorAggregationExpr = mustNewVectorAggregationExpr(exprDollar[5].VectorAggregationExpr, exprDollar[1].VectorOp, exprDollar[7].Grouping, &exprDollar[3].str)
+			exprVAL.VectorAggregationExpr = mustNewVectorAggregationExpr(exprDollar[5].RangeAggregationExpr, exprDollar[1].VectorOp, exprDollar[7].Grouping, &exprDollar[3].str)
 		}
 	case 22:
 		exprDollar = exprS[exprpt-1 : exprpt+1]
-//line pkg/logql/expr.y:88
+//line pkg/logql/expr.y:90
 		{
 			exprVAL.Filter = labels.MatchRegexp
 		}
 	case 23:
 		exprDollar = exprS[exprpt-1 : exprpt+1]
-//line pkg/logql/expr.y:89
+//line pkg/logql/expr.y:91
 		{
 			exprVAL.Filter = labels.MatchEqual
 		}
 	case 24:
 		exprDollar = exprS[exprpt-1 : exprpt+1]
-//line pkg/logql/expr.y:90
+//line pkg/logql/expr.y:92
 		{
 			exprVAL.Filter = labels.MatchNotRegexp
 		}
 	case 25:
 		exprDollar = exprS[exprpt-1 : exprpt+1]
-//line pkg/logql/expr.y:91
+//line pkg/logql/expr.y:93
 		{
 			exprVAL.Filter = labels.MatchNotEqual
 		}
 	case 26:
 		exprDollar = exprS[exprpt-3 : exprpt+1]
-//line pkg/logql/expr.y:95
+//line pkg/logql/expr.y:97
 		{
 			exprVAL.Selector = exprDollar[2].Matchers
 		}
 	case 27:
 		exprDollar = exprS[exprpt-3 : exprpt+1]
-//line pkg/logql/expr.y:96
+//line pkg/logql/expr.y:98
 		{
 			exprVAL.Selector = exprDollar[2].Matchers
 		}
 	case 28:
 		exprDollar = exprS[exprpt-3 : exprpt+1]
-//line pkg/logql/expr.y:97
+//line pkg/logql/expr.y:99
 		{
 		}
 	case 29:
 		exprDollar = exprS[exprpt-1 : exprpt+1]
-//line pkg/logql/expr.y:101
+//line pkg/logql/expr.y:103
 		{
 			exprVAL.Matchers = []*labels.Matcher{exprDollar[1].Matcher}
 		}
 	case 30:
 		exprDollar = exprS[exprpt-3 : exprpt+1]
-//line pkg/logql/expr.y:102
+//line pkg/logql/expr.y:104
 		{
 			exprVAL.Matchers = append(exprDollar[1].Matchers, exprDollar[3].Matcher)
 		}
 	case 31:
 		exprDollar = exprS[exprpt-3 : exprpt+1]
-//line pkg/logql/expr.y:106
+//line pkg/logql/expr.y:108
 		{
 			exprVAL.Matcher = mustNewMatcher(labels.MatchEqual, exprDollar[1].str, exprDollar[3].str)
 		}
 	case 32:
 		exprDollar = exprS[exprpt-3 : exprpt+1]
-//line pkg/logql/expr.y:107
+//line pkg/logql/expr.y:109
 		{
 			exprVAL.Matcher = mustNewMatcher(labels.MatchNotEqual, exprDollar[1].str, exprDollar[3].str)
 		}
 	case 33:
 		exprDollar = exprS[exprpt-3 : exprpt+1]
-//line pkg/logql/expr.y:108
+//line pkg/logql/expr.y:110
 		{
 			exprVAL.Matcher = mustNewMatcher(labels.MatchRegexp, exprDollar[1].str, exprDollar[3].str)
 		}
 	case 34:
 		exprDollar = exprS[exprpt-3 : exprpt+1]
-//line pkg/logql/expr.y:109
+//line pkg/logql/expr.y:111
 		{
 			exprVAL.Matcher = mustNewMatcher(labels.MatchNotRegexp, exprDollar[1].str, exprDollar[3].str)
 		}
 	case 35:
 		exprDollar = exprS[exprpt-1 : exprpt+1]
-//line pkg/logql/expr.y:113
+//line pkg/logql/expr.y:115
 		{
 			exprVAL.VectorOp = OpTypeSum
 		}
 	case 36:
 		exprDollar = exprS[exprpt-1 : exprpt+1]
-//line pkg/logql/expr.y:114
+//line pkg/logql/expr.y:116
 		{
 			exprVAL.VectorOp = OpTypeAvg
 		}
 	case 37:
 		exprDollar = exprS[exprpt-1 : exprpt+1]
-//line pkg/logql/expr.y:115
+//line pkg/logql/expr.y:117
 		{
 			exprVAL.VectorOp = OpTypeCount
 		}
 	case 38:
 		exprDollar = exprS[exprpt-1 : exprpt+1]
-//line pkg/logql/expr.y:116
+//line pkg/logql/expr.y:118
 		{
 			exprVAL.VectorOp = OpTypeMax
 		}
 	case 39:
 		exprDollar = exprS[exprpt-1 : exprpt+1]
-//line pkg/logql/expr.y:117
+//line pkg/logql/expr.y:119
 		{
 			exprVAL.VectorOp = OpTypeMin
 		}
 	case 40:
 		exprDollar = exprS[exprpt-1 : exprpt+1]
-//line pkg/logql/expr.y:118
+//line pkg/logql/expr.y:120
 		{
 			exprVAL.VectorOp = OpTypeStddev
 		}
 	case 41:
 		exprDollar = exprS[exprpt-1 : exprpt+1]
-//line pkg/logql/expr.y:119
+//line pkg/logql/expr.y:121
 		{
 			exprVAL.VectorOp = OpTypeStdvar
 		}
 	case 42:
 		exprDollar = exprS[exprpt-1 : exprpt+1]
-//line pkg/logql/expr.y:120
+//line pkg/logql/expr.y:122
 		{
 			exprVAL.VectorOp = OpTypeBottomK
 		}
 	case 43:
 		exprDollar = exprS[exprpt-1 : exprpt+1]
-//line pkg/logql/expr.y:121
+//line pkg/logql/expr.y:123
 		{
 			exprVAL.VectorOp = OpTypeTopK
 		}
 	case 44:
 		exprDollar = exprS[exprpt-1 : exprpt+1]
-//line pkg/logql/expr.y:125
+//line pkg/logql/expr.y:127
 		{
 			exprVAL.RangeOp = OpTypeCountOverTime
 		}
 	case 45:
 		exprDollar = exprS[exprpt-1 : exprpt+1]
-//line pkg/logql/expr.y:126
+//line pkg/logql/expr.y:128
 		{
 			exprVAL.RangeOp = OpTypeRate
 		}
 	case 46:
 		exprDollar = exprS[exprpt-1 : exprpt+1]
-//line pkg/logql/expr.y:130
+//line pkg/logql/expr.y:132
 		{
 			exprVAL.Labels = []string{exprDollar[1].str}
 		}
 	case 47:
 		exprDollar = exprS[exprpt-3 : exprpt+1]
-//line pkg/logql/expr.y:131
+//line pkg/logql/expr.y:133
 		{
 			exprVAL.Labels = append(exprDollar[1].Labels, exprDollar[3].str)
 		}
 	case 48:
 		exprDollar = exprS[exprpt-4 : exprpt+1]
-//line pkg/logql/expr.y:135
+//line pkg/logql/expr.y:137
 		{
 			exprVAL.Grouping = &grouping{without: false, groups: exprDollar[3].Labels}
 		}
 	case 49:
 		exprDollar = exprS[exprpt-4 : exprpt+1]
-//line pkg/logql/expr.y:136
+//line pkg/logql/expr.y:138
 		{
 			exprVAL.Grouping = &grouping{without: true, groups: exprDollar[3].Labels}
 		}
diff --git a/pkg/logql/lex.go b/pkg/logql/lex.go
new file mode 100644
index 0000000000000000000000000000000000000000..035eec9eb25e293c56f0a50e0a999d41140131d2
--- /dev/null
+++ b/pkg/logql/lex.go
@@ -0,0 +1,96 @@
+package logql
+
+import (
+	"strconv"
+	"text/scanner"
+	"time"
+)
+
+var tokens = map[string]int{
+	",":                 COMMA,
+	".":                 DOT,
+	"{":                 OPEN_BRACE,
+	"}":                 CLOSE_BRACE,
+	"=":                 EQ,
+	"!=":                NEQ,
+	"=~":                RE,
+	"!~":                NRE,
+	"|=":                PIPE_EXACT,
+	"|~":                PIPE_MATCH,
+	"(":                 OPEN_PARENTHESIS,
+	")":                 CLOSE_PARENTHESIS,
+	"by":                BY,
+	"without":           WITHOUT,
+	OpTypeCountOverTime: COUNT_OVER_TIME,
+	"[":                 OPEN_BRACKET,
+	"]":                 CLOSE_BRACKET,
+	OpTypeRate:          RATE,
+	OpTypeSum:           SUM,
+	OpTypeAvg:           AVG,
+	OpTypeMax:           MAX,
+	OpTypeMin:           MIN,
+	OpTypeCount:         COUNT,
+	OpTypeStddev:        STDDEV,
+	OpTypeStdvar:        STDVAR,
+	OpTypeBottomK:       BOTTOMK,
+	OpTypeTopK:          TOPK,
+}
+
+type lexer struct {
+	scanner.Scanner
+	errs   []ParseError
+	expr   Expr
+	parser *exprParserImpl
+}
+
+func (l *lexer) Lex(lval *exprSymType) int {
+	r := l.Scan()
+	switch r {
+	case scanner.EOF:
+		return 0
+
+	case scanner.String:
+		var err error
+		lval.str, err = strconv.Unquote(l.TokenText())
+		if err != nil {
+			l.Error(err.Error())
+			return 0
+		}
+		return STRING
+	}
+
+	// scaning duration tokens
+	if l.TokenText() == "[" {
+		d := ""
+		for r := l.Next(); r != scanner.EOF; r = l.Next() {
+			if string(r) == "]" {
+				i, err := time.ParseDuration(d)
+				if err != nil {
+					l.Error(err.Error())
+					return 0
+				}
+				lval.duration = i
+				return DURATION
+			}
+			d += string(r)
+		}
+		l.Error("missing closing ']' in duration")
+		return 0
+	}
+
+	if tok, ok := tokens[l.TokenText()+string(l.Peek())]; ok {
+		l.Next()
+		return tok
+	}
+
+	if tok, ok := tokens[l.TokenText()]; ok {
+		return tok
+	}
+
+	lval.str = l.TokenText()
+	return IDENTIFIER
+}
+
+func (l *lexer) Error(msg string) {
+	l.errs = append(l.errs, newParseError(msg, l.Line, l.Column))
+}
diff --git a/pkg/logql/lex_test.go b/pkg/logql/lex_test.go
new file mode 100644
index 0000000000000000000000000000000000000000..9505a9fc1450e4be10478784723b02ed8acd62b4
--- /dev/null
+++ b/pkg/logql/lex_test.go
@@ -0,0 +1,50 @@
+package logql
+
+import (
+	"strings"
+	"testing"
+	"text/scanner"
+
+	"github.com/stretchr/testify/require"
+)
+
+func TestLex(t *testing.T) {
+	for _, tc := range []struct {
+		input    string
+		expected []int
+	}{
+		{`{foo="bar"}`, []int{OPEN_BRACE, IDENTIFIER, EQ, STRING, CLOSE_BRACE}},
+		{`{ foo = "bar" }`, []int{OPEN_BRACE, IDENTIFIER, EQ, STRING, CLOSE_BRACE}},
+		{`{ foo != "bar" }`, []int{OPEN_BRACE, IDENTIFIER, NEQ, STRING, CLOSE_BRACE}},
+		{`{ foo =~ "bar" }`, []int{OPEN_BRACE, IDENTIFIER, RE, STRING, CLOSE_BRACE}},
+		{`{ foo !~ "bar" }`, []int{OPEN_BRACE, IDENTIFIER, NRE, STRING, CLOSE_BRACE}},
+		{`{ foo = "bar", bar != "baz" }`, []int{OPEN_BRACE, IDENTIFIER, EQ, STRING,
+			COMMA, IDENTIFIER, NEQ, STRING, CLOSE_BRACE}},
+		{`{ foo = "ba\"r" }`, []int{OPEN_BRACE, IDENTIFIER, EQ, STRING, CLOSE_BRACE}},
+		{`rate({foo="bar"}[10s])`, []int{RATE, OPEN_PARENTHESIS, OPEN_BRACE, IDENTIFIER, EQ, STRING, CLOSE_BRACE, DURATION, CLOSE_PARENTHESIS}},
+		{`count_over_time({foo="bar"}[5m])`, []int{COUNT_OVER_TIME, OPEN_PARENTHESIS, OPEN_BRACE, IDENTIFIER, EQ, STRING, CLOSE_BRACE, DURATION, CLOSE_PARENTHESIS}},
+		{`sum(count_over_time({foo="bar"}[5m])) by (foo,bar)`, []int{SUM, OPEN_PARENTHESIS, COUNT_OVER_TIME, OPEN_PARENTHESIS, OPEN_BRACE, IDENTIFIER, EQ, STRING, CLOSE_BRACE, DURATION, CLOSE_PARENTHESIS, CLOSE_PARENTHESIS, BY, OPEN_PARENTHESIS, IDENTIFIER, COMMA, IDENTIFIER, CLOSE_PARENTHESIS}},
+		{`topk(3,count_over_time({foo="bar"}[5m])) by (foo,bar)`, []int{TOPK, OPEN_PARENTHESIS, IDENTIFIER, COMMA, COUNT_OVER_TIME, OPEN_PARENTHESIS, OPEN_BRACE, IDENTIFIER, EQ, STRING, CLOSE_BRACE, DURATION, CLOSE_PARENTHESIS, CLOSE_PARENTHESIS, BY, OPEN_PARENTHESIS, IDENTIFIER, COMMA, IDENTIFIER, CLOSE_PARENTHESIS}},
+		{`bottomk(10,sum(count_over_time({foo="bar"}[5m])) by (foo,bar))`, []int{BOTTOMK, OPEN_PARENTHESIS, IDENTIFIER, COMMA, SUM, OPEN_PARENTHESIS, COUNT_OVER_TIME, OPEN_PARENTHESIS, OPEN_BRACE, IDENTIFIER, EQ, STRING, CLOSE_BRACE, DURATION, CLOSE_PARENTHESIS, CLOSE_PARENTHESIS, BY, OPEN_PARENTHESIS, IDENTIFIER, COMMA, IDENTIFIER, CLOSE_PARENTHESIS, CLOSE_PARENTHESIS}},
+		{`sum(max(rate({foo="bar"}[5m])) by (foo,bar)) by (foo)`, []int{SUM, OPEN_PARENTHESIS, MAX, OPEN_PARENTHESIS, RATE, OPEN_PARENTHESIS, OPEN_BRACE, IDENTIFIER, EQ, STRING, CLOSE_BRACE, DURATION, CLOSE_PARENTHESIS, CLOSE_PARENTHESIS, BY, OPEN_PARENTHESIS, IDENTIFIER, COMMA, IDENTIFIER, CLOSE_PARENTHESIS, CLOSE_PARENTHESIS, BY, OPEN_PARENTHESIS, IDENTIFIER, CLOSE_PARENTHESIS}},
+	} {
+		t.Run(tc.input, func(t *testing.T) {
+			actual := []int{}
+			l := lexer{
+				Scanner: scanner.Scanner{
+					Mode: scanner.SkipComments | scanner.ScanStrings,
+				},
+			}
+			l.Init(strings.NewReader(tc.input))
+			var lval exprSymType
+			for {
+				tok := l.Lex(&lval)
+				if tok == 0 {
+					break
+				}
+				actual = append(actual, tok)
+			}
+			require.Equal(t, tc.expected, actual)
+		})
+	}
+}
diff --git a/pkg/logql/parser.go b/pkg/logql/parser.go
index 1818783e79cf31dc3b13f4a99c5826c07fad2cd5..c69db0fc5c491b8a6e23ed28d578eae9d1d2c77f 100644
--- a/pkg/logql/parser.go
+++ b/pkg/logql/parser.go
@@ -3,10 +3,8 @@ package logql
 import (
 	"errors"
 	"fmt"
-	"strconv"
 	"strings"
 	"text/scanner"
-	"time"
 
 	"github.com/prometheus/prometheus/pkg/labels"
 )
@@ -71,95 +69,6 @@ func ParseLogSelector(input string) (LogSelectorExpr, error) {
 	return logSelector, nil
 }
 
-var tokens = map[string]int{
-	",":                 COMMA,
-	".":                 DOT,
-	"{":                 OPEN_BRACE,
-	"}":                 CLOSE_BRACE,
-	"=":                 EQ,
-	"!=":                NEQ,
-	"=~":                RE,
-	"!~":                NRE,
-	"|=":                PIPE_EXACT,
-	"|~":                PIPE_MATCH,
-	"(":                 OPEN_PARENTHESIS,
-	")":                 CLOSE_PARENTHESIS,
-	"by":                BY,
-	"without":           WITHOUT,
-	OpTypeCountOverTime: COUNT_OVER_TIME,
-	"[":                 OPEN_BRACKET,
-	"]":                 CLOSE_BRACKET,
-	OpTypeRate:          RATE,
-	OpTypeSum:           SUM,
-	OpTypeAvg:           AVG,
-	OpTypeMax:           MAX,
-	OpTypeMin:           MIN,
-	OpTypeCount:         COUNT,
-	OpTypeStddev:        STDDEV,
-	OpTypeStdvar:        STDVAR,
-	OpTypeBottomK:       BOTTOMK,
-	OpTypeTopK:          TOPK,
-}
-
-type lexer struct {
-	scanner.Scanner
-	errs   []ParseError
-	expr   Expr
-	parser *exprParserImpl
-}
-
-func (l *lexer) Lex(lval *exprSymType) int {
-	r := l.Scan()
-	switch r {
-	case scanner.EOF:
-		return 0
-
-	case scanner.String:
-		var err error
-		lval.str, err = strconv.Unquote(l.TokenText())
-		if err != nil {
-			l.Error(err.Error())
-			return 0
-		}
-		return STRING
-	}
-
-	// scaning duration tokens
-	if l.TokenText() == "[" {
-		d := ""
-		for r := l.Next(); r != scanner.EOF; r = l.Next() {
-			if string(r) == "]" {
-				i, err := time.ParseDuration(d)
-				if err != nil {
-					l.Error(err.Error())
-					return 0
-				}
-				lval.duration = i
-				return DURATION
-			}
-			d += string(r)
-		}
-		l.Error("missing closing ']' in duration")
-		return 0
-	}
-
-	if tok, ok := tokens[l.TokenText()+string(l.Peek())]; ok {
-		l.Next()
-		return tok
-	}
-
-	if tok, ok := tokens[l.TokenText()]; ok {
-		return tok
-	}
-
-	lval.str = l.TokenText()
-	return IDENTIFIER
-}
-
-func (l *lexer) Error(msg string) {
-	l.errs = append(l.errs, newParseError(msg, l.Line, l.Column))
-}
-
 // ParseError is what is returned when we failed to parse.
 type ParseError struct {
 	msg       string
diff --git a/pkg/logql/parser_test.go b/pkg/logql/parser_test.go
index b63ed792fb8a9a1f36b1cd3a6ccb07e5a45fe892..7658dcd1ec9374a0682b89c8c3da633b1e5ab842 100644
--- a/pkg/logql/parser_test.go
+++ b/pkg/logql/parser_test.go
@@ -2,56 +2,13 @@ package logql
 
 import (
 	"reflect"
-	"strings"
 	"testing"
-	"text/scanner"
 	"time"
 
 	"github.com/prometheus/prometheus/pkg/labels"
 	"github.com/stretchr/testify/require"
 )
 
-func TestLex(t *testing.T) {
-	for _, tc := range []struct {
-		input    string
-		expected []int
-	}{
-		{`{foo="bar"}`, []int{OPEN_BRACE, IDENTIFIER, EQ, STRING, CLOSE_BRACE}},
-		{`{ foo = "bar" }`, []int{OPEN_BRACE, IDENTIFIER, EQ, STRING, CLOSE_BRACE}},
-		{`{ foo != "bar" }`, []int{OPEN_BRACE, IDENTIFIER, NEQ, STRING, CLOSE_BRACE}},
-		{`{ foo =~ "bar" }`, []int{OPEN_BRACE, IDENTIFIER, RE, STRING, CLOSE_BRACE}},
-		{`{ foo !~ "bar" }`, []int{OPEN_BRACE, IDENTIFIER, NRE, STRING, CLOSE_BRACE}},
-		{`{ foo = "bar", bar != "baz" }`, []int{OPEN_BRACE, IDENTIFIER, EQ, STRING,
-			COMMA, IDENTIFIER, NEQ, STRING, CLOSE_BRACE}},
-		{`{ foo = "ba\"r" }`, []int{OPEN_BRACE, IDENTIFIER, EQ, STRING, CLOSE_BRACE}},
-		{`rate({foo="bar"}[10s])`, []int{RATE, OPEN_PARENTHESIS, OPEN_BRACE, IDENTIFIER, EQ, STRING, CLOSE_BRACE, DURATION, CLOSE_PARENTHESIS}},
-		{`count_over_time({foo="bar"}[5m])`, []int{COUNT_OVER_TIME, OPEN_PARENTHESIS, OPEN_BRACE, IDENTIFIER, EQ, STRING, CLOSE_BRACE, DURATION, CLOSE_PARENTHESIS}},
-		{`sum(count_over_time({foo="bar"}[5m])) by (foo,bar)`, []int{SUM, OPEN_PARENTHESIS, COUNT_OVER_TIME, OPEN_PARENTHESIS, OPEN_BRACE, IDENTIFIER, EQ, STRING, CLOSE_BRACE, DURATION, CLOSE_PARENTHESIS, CLOSE_PARENTHESIS, BY, OPEN_PARENTHESIS, IDENTIFIER, COMMA, IDENTIFIER, CLOSE_PARENTHESIS}},
-		{`topk(3,count_over_time({foo="bar"}[5m])) by (foo,bar)`, []int{TOPK, OPEN_PARENTHESIS, IDENTIFIER, COMMA, COUNT_OVER_TIME, OPEN_PARENTHESIS, OPEN_BRACE, IDENTIFIER, EQ, STRING, CLOSE_BRACE, DURATION, CLOSE_PARENTHESIS, CLOSE_PARENTHESIS, BY, OPEN_PARENTHESIS, IDENTIFIER, COMMA, IDENTIFIER, CLOSE_PARENTHESIS}},
-		{`bottomk(10,sum(count_over_time({foo="bar"}[5m])) by (foo,bar))`, []int{BOTTOMK, OPEN_PARENTHESIS, IDENTIFIER, COMMA, SUM, OPEN_PARENTHESIS, COUNT_OVER_TIME, OPEN_PARENTHESIS, OPEN_BRACE, IDENTIFIER, EQ, STRING, CLOSE_BRACE, DURATION, CLOSE_PARENTHESIS, CLOSE_PARENTHESIS, BY, OPEN_PARENTHESIS, IDENTIFIER, COMMA, IDENTIFIER, CLOSE_PARENTHESIS, CLOSE_PARENTHESIS}},
-		{`sum(max(rate({foo="bar"}[5m])) by (foo,bar)) by (foo)`, []int{SUM, OPEN_PARENTHESIS, MAX, OPEN_PARENTHESIS, RATE, OPEN_PARENTHESIS, OPEN_BRACE, IDENTIFIER, EQ, STRING, CLOSE_BRACE, DURATION, CLOSE_PARENTHESIS, CLOSE_PARENTHESIS, BY, OPEN_PARENTHESIS, IDENTIFIER, COMMA, IDENTIFIER, CLOSE_PARENTHESIS, CLOSE_PARENTHESIS, BY, OPEN_PARENTHESIS, IDENTIFIER, CLOSE_PARENTHESIS}},
-	} {
-		t.Run(tc.input, func(t *testing.T) {
-			actual := []int{}
-			l := lexer{
-				Scanner: scanner.Scanner{
-					Mode: scanner.SkipComments | scanner.ScanStrings,
-				},
-			}
-			l.Init(strings.NewReader(tc.input))
-			var lval exprSymType
-			for {
-				tok := l.Lex(&lval)
-				if tok == 0 {
-					break
-				}
-				actual = append(actual, tok)
-			}
-			require.Equal(t, tc.expected, actual)
-		})
-	}
-}
-
 func newString(s string) *string {
 	return &s
 }