Commit dbe55c13 authored by Ganesh Vernekar's avatar Ganesh Vernekar Committed by Brian Brazil

Subquery (#4831)

Signed-off-by: 's avatarGanesh Vernekar <cs15btech11018@iith.ac.in>
parent 2e725a19
......@@ -233,6 +233,7 @@ func main() {
}
promql.LookbackDelta = time.Duration(cfg.lookbackDelta)
promql.SetDefaultEvaluationInterval(time.Duration(config.DefaultGlobalConfig.EvaluationInterval))
logger := promlog.New(&cfg.promlogConfig)
......@@ -654,6 +655,7 @@ func reloadConfig(filename string, logger log.Logger, rls ...func(*config.Config
if failed {
return fmt.Errorf("one or more errors occurred while applying the new configuration (--config.file=%q)", filename)
}
promql.SetDefaultEvaluationInterval(time.Duration(conf.GlobalConfig.EvaluationInterval))
level.Info(logger).Log("msg", "Completed loading of configuration file", "filename", filename)
return nil
}
......
......@@ -170,6 +170,14 @@ The same works for range vectors. This returns the 5-minutes rate that
rate(http_requests_total[5m] offset 1w)
## Subquery
Subquery allows you to run an instant query for a given range and resolution. The result of a subquery is a range vector.
Syntax: `<instant_query> '[' <range> ':' [<resolution>] ']' [ offset <duration> ]`
* `<resolution>` is optional. Default is the global evaluation interval.
## Operators
Prometheus supports many binary and aggregation operators. These are described
......
......@@ -38,6 +38,16 @@ To select all HTTP status codes except 4xx ones, you could run:
http_requests_total{status!~"4.."}
## Subquery
This query returns 5-minute rate of `http_requests_total` metric for the past 30 minutes, at a resolution of 1 minute.
rate(http_requests_total[5m])[30m:1m]
This is an example of nested subquery. The subquery for the `deriv` function uses default resolution. Note that using subqueries unnecessarily is unwise.
max_over_time(deriv(rate(distance_covered_total[5s])[30s:5s])[10m:])
## Using functions, operators, etc.
Return the per-second rate for all time series with the `http_requests_total`
......
......@@ -116,6 +116,14 @@ type MatrixSelector struct {
series []storage.Series
}
// SubqueryExpr represents a subquery.
type SubqueryExpr struct {
Expr Expr
Range time.Duration
Offset time.Duration
Step time.Duration
}
// NumberLiteral represents a number.
type NumberLiteral struct {
Val float64
......@@ -153,6 +161,7 @@ type VectorSelector struct {
func (e *AggregateExpr) Type() ValueType { return ValueTypeVector }
func (e *Call) Type() ValueType { return e.Func.ReturnType }
func (e *MatrixSelector) Type() ValueType { return ValueTypeMatrix }
func (e *SubqueryExpr) Type() ValueType { return ValueTypeMatrix }
func (e *NumberLiteral) Type() ValueType { return ValueTypeScalar }
func (e *ParenExpr) Type() ValueType { return e.Expr.Type() }
func (e *StringLiteral) Type() ValueType { return ValueTypeString }
......@@ -169,6 +178,7 @@ func (*AggregateExpr) expr() {}
func (*BinaryExpr) expr() {}
func (*Call) expr() {}
func (*MatrixSelector) expr() {}
func (*SubqueryExpr) expr() {}
func (*NumberLiteral) expr() {}
func (*ParenExpr) expr() {}
func (*StringLiteral) expr() {}
......@@ -267,6 +277,11 @@ func Walk(v Visitor, node Node, path []Node) error {
return err
}
case *SubqueryExpr:
if err := Walk(v, n.Expr, path); err != nil {
return err
}
case *ParenExpr:
if err := Walk(v, n.Expr, path); err != nil {
return err
......
......@@ -23,6 +23,7 @@ import (
"sort"
"strconv"
"sync"
"sync/atomic"
"time"
"github.com/go-kit/kit/log"
......@@ -51,6 +52,26 @@ const (
minInt64 = -9223372036854775808
)
var (
// LookbackDelta determines the time since the last sample after which a time
// series is considered stale.
LookbackDelta = 5 * time.Minute
// DefaultEvaluationInterval is the default evaluation interval of
// a subquery in milliseconds.
DefaultEvaluationInterval int64
)
// SetDefaultEvaluationInterval sets DefaultEvaluationInterval.
func SetDefaultEvaluationInterval(ev time.Duration) {
atomic.StoreInt64(&DefaultEvaluationInterval, durationToInt64Millis(ev))
}
// GetDefaultEvaluationInterval returns the DefaultEvaluationInterval as time.Duration.
func GetDefaultEvaluationInterval() int64 {
return atomic.LoadInt64(&DefaultEvaluationInterval)
}
type engineMetrics struct {
currentQueries prometheus.Gauge
maxConcurrentQueries prometheus.Gauge
......@@ -404,12 +425,13 @@ func (ng *Engine) execEvalStmt(ctx context.Context, query *query, s *EvalStmt) (
if s.Start == s.End && s.Interval == 0 {
start := timeMilliseconds(s.Start)
evaluator := &evaluator{
startTimestamp: start,
endTimestamp: start,
interval: 1,
ctx: ctx,
maxSamples: ng.maxSamplesPerQuery,
logger: ng.logger,
startTimestamp: start,
endTimestamp: start,
interval: 1,
ctx: ctx,
maxSamples: ng.maxSamplesPerQuery,
defaultEvalInterval: GetDefaultEvaluationInterval(),
logger: ng.logger,
}
val, err := evaluator.Eval(s.Expr)
if err != nil {
......@@ -445,12 +467,13 @@ func (ng *Engine) execEvalStmt(ctx context.Context, query *query, s *EvalStmt) (
// Range evaluation.
evaluator := &evaluator{
startTimestamp: timeMilliseconds(s.Start),
endTimestamp: timeMilliseconds(s.End),
interval: durationMilliseconds(s.Interval),
ctx: ctx,
maxSamples: ng.maxSamplesPerQuery,
logger: ng.logger,
startTimestamp: timeMilliseconds(s.Start),
endTimestamp: timeMilliseconds(s.End),
interval: durationMilliseconds(s.Interval),
ctx: ctx,
maxSamples: ng.maxSamplesPerQuery,
defaultEvalInterval: GetDefaultEvaluationInterval(),
logger: ng.logger,
}
val, err := evaluator.Eval(s.Expr)
if err != nil {
......@@ -477,23 +500,36 @@ func (ng *Engine) execEvalStmt(ctx context.Context, query *query, s *EvalStmt) (
return mat, nil, warnings
}
// cumulativeSubqueryOffset returns the sum of range and offset of all subqueries in the path.
func (ng *Engine) cumulativeSubqueryOffset(path []Node) time.Duration {
var subqOffset time.Duration
for _, node := range path {
switch n := node.(type) {
case *SubqueryExpr:
subqOffset += n.Range + n.Offset
}
}
return subqOffset
}
func (ng *Engine) populateSeries(ctx context.Context, q storage.Queryable, s *EvalStmt) (storage.Querier, error, storage.Warnings) {
var maxOffset time.Duration
Inspect(s.Expr, func(node Node, _ []Node) error {
Inspect(s.Expr, func(node Node, path []Node) error {
subqOffset := ng.cumulativeSubqueryOffset(path)
switch n := node.(type) {
case *VectorSelector:
if maxOffset < LookbackDelta {
maxOffset = LookbackDelta
if maxOffset < LookbackDelta+subqOffset {
maxOffset = LookbackDelta + subqOffset
}
if n.Offset+LookbackDelta > maxOffset {
maxOffset = n.Offset + LookbackDelta
if n.Offset+LookbackDelta+subqOffset > maxOffset {
maxOffset = n.Offset + LookbackDelta + subqOffset
}
case *MatrixSelector:
if maxOffset < n.Range {
maxOffset = n.Range
if maxOffset < n.Range+subqOffset {
maxOffset = n.Range + subqOffset
}
if n.Offset+n.Range > maxOffset {
maxOffset = n.Offset + n.Range
if n.Offset+n.Range+subqOffset > maxOffset {
maxOffset = n.Offset + n.Range + subqOffset
}
}
return nil
......@@ -514,7 +550,7 @@ func (ng *Engine) populateSeries(ctx context.Context, q storage.Queryable, s *Ev
params := &storage.SelectParams{
Start: timestamp.FromTime(s.Start),
End: timestamp.FromTime(s.End),
Step: int64(s.Interval / time.Millisecond),
Step: durationToInt64Millis(s.Interval),
}
switch n := node.(type) {
......@@ -624,9 +660,10 @@ type evaluator struct {
endTimestamp int64 // End time in milliseconds.
interval int64 // Interval in milliseconds.
maxSamples int
currentSamples int
logger log.Logger
maxSamples int
currentSamples int
defaultEvalInterval int64
logger log.Logger
}
// errorf causes a panic with the input formatted into an error.
......@@ -839,6 +876,21 @@ func (ev *evaluator) rangeEval(f func([]Value, *EvalNodeHelper) Vector, exprs ..
return mat
}
// evalSubquery evaluates given SubqueryExpr and returns an equivalent
// evaluated MatrixSelector in its place. Note that the Name and LabelMatchers are not set.
func (ev *evaluator) evalSubquery(subq *SubqueryExpr) *MatrixSelector {
val := ev.eval(subq).(Matrix)
ms := &MatrixSelector{
Range: subq.Range,
Offset: subq.Offset,
series: make([]storage.Series, 0, len(val)),
}
for _, s := range val {
ms.series = append(ms.series, NewStorageSeries(s))
}
return ms
}
// eval evaluates the given expression as the given AST expression node requires.
func (ev *evaluator) eval(expr Expr) Value {
// This is the top-level evaluation method.
......@@ -880,10 +932,17 @@ func (ev *evaluator) eval(expr Expr) Value {
var matrixArgIndex int
var matrixArg bool
for i, a := range e.Args {
_, ok := a.(*MatrixSelector)
if ok {
if _, ok := a.(*MatrixSelector); ok {
matrixArgIndex = i
matrixArg = true
break
}
// SubqueryExpr can be used in place of MatrixSelector.
if subq, ok := a.(*SubqueryExpr); ok {
matrixArgIndex = i
matrixArg = true
// Replacing SubqueryExpr with MatrixSelector.
e.Args[i] = ev.evalSubquery(subq)
break
}
}
......@@ -1077,11 +1136,43 @@ func (ev *evaluator) eval(expr Expr) Value {
panic(fmt.Errorf("cannot do range evaluation of matrix selector"))
}
return ev.matrixSelector(e)
case *SubqueryExpr:
offsetMillis := durationToInt64Millis(e.Offset)
rangeMillis := durationToInt64Millis(e.Range)
newEv := &evaluator{
endTimestamp: ev.endTimestamp - offsetMillis,
interval: ev.defaultEvalInterval,
ctx: ev.ctx,
currentSamples: ev.currentSamples,
maxSamples: ev.maxSamples,
defaultEvalInterval: ev.defaultEvalInterval,
logger: ev.logger,
}
if e.Step != 0 {
newEv.interval = durationToInt64Millis(e.Step)
}
// Start with the first timestamp after (ev.startTimestamp - offset - range)
// that is aligned with the step (multiple of 'newEv.interval').
newEv.startTimestamp = newEv.interval * ((ev.startTimestamp - offsetMillis - rangeMillis) / newEv.interval)
if newEv.startTimestamp < (ev.startTimestamp - offsetMillis - rangeMillis) {
newEv.startTimestamp += newEv.interval
}
res := newEv.eval(e.Expr)
ev.currentSamples = newEv.currentSamples
return res
}
panic(fmt.Errorf("unhandled expression of type: %T", expr))
}
func durationToInt64Millis(d time.Duration) int64 {
return int64(d / time.Millisecond)
}
// vectorSelector evaluates a *VectorSelector expression.
func (ev *evaluator) vectorSelector(node *VectorSelector, ts int64) Vector {
if err := checkForSeriesSetExpansion(node, ev.ctx); err != nil {
......@@ -1825,10 +1916,6 @@ func shouldDropMetricName(op ItemType) bool {
}
}
// LookbackDelta determines the time since the last sample after which a time
// series is considered stale.
var LookbackDelta = 5 * time.Minute
// documentedType returns the internal type to the equivalent
// user facing terminology as defined in the documentation.
func documentedType(t ValueType) string {
......
......@@ -475,6 +475,34 @@ load 10s
},
Start: time.Unix(10, 0),
},
{
Query: "rate(metric[20s])",
MaxSamples: 3,
Result: Result{
nil,
Vector{
Sample{
Point: Point{V: 0.1, T: 10000},
Metric: labels.Labels{},
},
},
nil,
},
Start: time.Unix(10, 0),
},
{
Query: "metric[20s:5s]",
MaxSamples: 3,
Result: Result{
nil,
Matrix{Series{
Points: []Point{{V: 1, T: 0}, {V: 1, T: 5000}, {V: 2, T: 10000}},
Metric: labels.FromStrings("__name__", "metric")},
},
nil,
},
Start: time.Unix(10, 0),
},
{
Query: "metric[20s]",
MaxSamples: 0,
......@@ -624,3 +652,265 @@ func TestRecoverEvaluatorError(t *testing.T) {
panic(e)
}
func TestSubquerySelector(t *testing.T) {
tests := []struct {
loadString string
cases []struct {
Query string
Result Result
Start time.Time
}
}{
{
loadString: `load 10s
metric 1 2`,
cases: []struct {
Query string
Result Result
Start time.Time
}{
{
Query: "metric[20s:10s]",
Result: Result{
nil,
Matrix{Series{
Points: []Point{{V: 1, T: 0}, {V: 2, T: 10000}},
Metric: labels.FromStrings("__name__", "metric")},
},
nil,
},
Start: time.Unix(10, 0),
},
{
Query: "metric[20s:5s]",
Result: Result{
nil,
Matrix{Series{
Points: []Point{{V: 1, T: 0}, {V: 1, T: 5000}, {V: 2, T: 10000}},
Metric: labels.FromStrings("__name__", "metric")},
},
nil,
},
Start: time.Unix(10, 0),
},
{
Query: "metric[20s:5s] offset 2s",
Result: Result{
nil,
Matrix{Series{
Points: []Point{{V: 1, T: 0}, {V: 1, T: 5000}, {V: 2, T: 10000}},
Metric: labels.FromStrings("__name__", "metric")},
},
nil,
},
Start: time.Unix(12, 0),
},
{
Query: "metric[20s:5s] offset 6s",
Result: Result{
nil,
Matrix{Series{
Points: []Point{{V: 1, T: 0}, {V: 1, T: 5000}, {V: 2, T: 10000}},
Metric: labels.FromStrings("__name__", "metric")},
},
nil,
},
Start: time.Unix(20, 0),
},
{
Query: "metric[20s:5s] offset 4s",
Result: Result{
nil,
Matrix{Series{
Points: []Point{{V: 2, T: 15000}, {V: 2, T: 20000}, {V: 2, T: 25000}, {V: 2, T: 30000}},
Metric: labels.FromStrings("__name__", "metric")},
},
nil,
},
Start: time.Unix(35, 0),
},
{
Query: "metric[20s:5s] offset 5s",
Result: Result{
nil,
Matrix{Series{
Points: []Point{{V: 2, T: 10000}, {V: 2, T: 15000}, {V: 2, T: 20000}, {V: 2, T: 25000}, {V: 2, T: 30000}},
Metric: labels.FromStrings("__name__", "metric")},
},
nil,
},
Start: time.Unix(35, 0),
},
{
Query: "metric[20s:5s] offset 6s",
Result: Result{
nil,
Matrix{Series{
Points: []Point{{V: 2, T: 10000}, {V: 2, T: 15000}, {V: 2, T: 20000}, {V: 2, T: 25000}},
Metric: labels.FromStrings("__name__", "metric")},
},
nil,
},
Start: time.Unix(35, 0),
},
{
Query: "metric[20s:5s] offset 7s",
Result: Result{
nil,
Matrix{Series{
Points: []Point{{V: 2, T: 10000}, {V: 2, T: 15000}, {V: 2, T: 20000}, {V: 2, T: 25000}},
Metric: labels.FromStrings("__name__", "metric")},
},
nil,
},
Start: time.Unix(35, 0),
},
},
},
{
loadString: `load 10s
http_requests{job="api-server", instance="0", group="production"} 0+10x1000 100+30x1000
http_requests{job="api-server", instance="1", group="production"} 0+20x1000 200+30x1000
http_requests{job="api-server", instance="0", group="canary"} 0+30x1000 300+80x1000
http_requests{job="api-server", instance="1", group="canary"} 0+40x2000`,
cases: []struct {
Query string
Result Result
Start time.Time
}{
{ // Normal selector.
Query: `http_requests{group=~"pro.*",instance="0"}[30s:10s]`,
Result: Result{
nil,
Matrix{Series{
Points: []Point{{V: 9990, T: 9990000}, {V: 10000, T: 10000000}, {V: 100, T: 10010000}, {V: 130, T: 10020000}},
Metric: labels.FromStrings("__name__", "http_requests", "job", "api-server", "instance", "0", "group", "production")},
},
nil,
},
Start: time.Unix(10020, 0),
},
{ // Default step.
Query: `http_requests{group=~"pro.*",instance="0"}[5m:]`,
Result: Result{
nil,
Matrix{Series{
Points: []Point{{V: 9840, T: 9840000}, {V: 9900, T: 9900000}, {V: 9960, T: 9960000}, {V: 130, T: 10020000}, {V: 310, T: 10080000}},
Metric: labels.FromStrings("__name__", "http_requests", "job", "api-server", "instance", "0", "group", "production")},
},
nil,
},
Start: time.Unix(10100, 0),
},
{ // Checking if high offset (>LookbackDelta) is being taken care of.
Query: `http_requests{group=~"pro.*",instance="0"}[5m:] offset 20m`,
Result: Result{
nil,
Matrix{Series{
Points: []Point{{V: 8640, T: 8640000}, {V: 8700, T: 8700000}, {V: 8760, T: 8760000}, {V: 8820, T: 8820000}, {V: 8880, T: 8880000}},
Metric: labels.FromStrings("__name__", "http_requests", "job", "api-server", "instance", "0", "group", "production")},
},
nil,
},
Start: time.Unix(10100, 0),
},
{
Query: `rate(http_requests[1m])[15s:5s]`,
Result: Result{
nil,
Matrix{
Series{
Points: []Point{{V: 3, T: 7985000}, {V: 3, T: 7990000}, {V: 3, T: 7995000}, {V: 3, T: 8000000}},
Metric: labels.FromStrings("job", "api-server", "instance", "0", "group", "canary"),
},
Series{
Points: []Point{{V: 4, T: 7985000}, {V: 4, T: 7990000}, {V: 4, T: 7995000}, {V: 4, T: 8000000}},
Metric: labels.FromStrings("job", "api-server", "instance", "1", "group", "canary"),
},
Series{
Points: []Point{{V: 1, T: 7985000}, {V: 1, T: 7990000}, {V: 1, T: 7995000}, {V: 1, T: 8000000}},
Metric: labels.FromStrings("job", "api-server", "instance", "0", "group", "production"),
},
Series{
Points: []Point{{V: 2, T: 7985000}, {V: 2, T: 7990000}, {V: 2, T: 7995000}, {V: 2, T: 8000000}},
Metric: labels.FromStrings("job", "api-server", "instance", "1", "group", "production"),
},
},
nil,
},
Start: time.Unix(8000, 0),
},
{
Query: `sum(http_requests{group=~"pro.*"})[30s:10s]`,
Result: Result{
nil,
Matrix{Series{
Points: []Point{{V: 270, T: 90000}, {V: 300, T: 100000}, {V: 330, T: 110000}, {V: 360, T: 120000}},
Metric: labels.Labels{}},
},
nil,
},
Start: time.Unix(120, 0),
},
{
Query: `sum(http_requests)[40s:10s]`,
Result: Result{
nil,
Matrix{Series{
Points: []Point{{V: 800, T: 80000}, {V: 900, T: 90000}, {V: 1000, T: 100000}, {V: 1100, T: 110000}, {V: 1200, T: 120000}},
Metric: labels.Labels{}},
},
nil,
},
Start: time.Unix(120, 0),
},
{
Query: `(sum(http_requests{group=~"p.*"})+sum(http_requests{group=~"c.*"}))[20s:5s]`,
Result: Result{
nil,
Matrix{Series{
Points: []Point{{V: 1000, T: 100000}, {V: 1000, T: 105000}, {V: 1100, T: 110000}, {V: 1100, T: 115000}, {V: 1200, T: 120000}},
Metric: labels.Labels{}},
},
nil,
},
Start: time.Unix(120, 0),
},
},
},
}
SetDefaultEvaluationInterval(1 * time.Minute)
for _, tst := range tests {
test, err := NewTest(t, tst.loadString)
if err != nil {
t.Fatalf("unexpected error creating test: %q", err)
}
defer test.Close()
err = test.Run()
if err != nil {
t.Fatalf("unexpected error initializing test: %q", err)
}
engine := test.QueryEngine()
for _, c := range tst.cases {
var err error
var qry Query
qry, err = engine.NewInstantQuery(test.Queryable(), c.Query, c.Start)
if err != nil {
t.Fatalf("unexpected error creating query: %q", err)
}
res := qry.Exec(test.Context())
if res.Err != nil && res.Err != c.Result.Err {
t.Fatalf("unexpected error running query: %q, expected to get result: %q", res.Err, c.Result.Value)
}
if !reflect.DeepEqual(res.Value, c.Result.Value) {
t.Fatalf("unexpected result for query %q: got %q wanted %q", c.Query, res.Value.String(), c.Result.String())
}
}
}
}
......@@ -137,6 +137,7 @@ const (
itemRightBracket
itemComma
itemAssign
itemColon
itemSemicolon
itemString
itemNumber
......@@ -235,6 +236,7 @@ var itemTypeStr = map[ItemType]string{
itemRightBracket: "]",
itemComma: ",",
itemAssign: "=",
itemColon: ":",
itemSemicolon: ";",
itemBlank: "_",
itemTimes: "x",
......@@ -326,6 +328,7 @@ type lexer struct {
parenDepth int // Nesting depth of ( ) exprs.
braceOpen bool // Whether a { is opened.
bracketOpen bool // Whether a [ is opened.
gotColon bool // Whether we got a ':' after [ was opened.
stringOpen rune // Quote rune of the string currently being read.
// seriesDesc is set when a series description for the testing
......@@ -517,8 +520,15 @@ func lexStatements(l *lexer) stateFn {
l.stringOpen = r
return lexRawString
case isAlpha(r) || r == ':':
l.backup()
return lexKeywordOrIdentifier
if !l.bracketOpen {
l.backup()
return lexKeywordOrIdentifier
}
if l.gotColon {
return l.errorf("unexpected colon %q", r)
}
l.emit(itemColon)
l.gotColon = true
case r == '(':
l.emit(itemLeftParen)
l.parenDepth++
......@@ -538,6 +548,7 @@ func lexStatements(l *lexer) stateFn {
if l.bracketOpen {
return l.errorf("unexpected left bracket %q", r)
}
l.gotColon = false
l.emit(itemLeftBracket)
l.bracketOpen = true
return lexDuration
......
......@@ -429,6 +429,167 @@ var tests = []struct {
},
seriesDesc: true,
},
// Test subquery.
{
input: `test_name{on!~"bar"}[4m:4s]`,
expected: []item{
{itemIdentifier, 0, `test_name`},
{itemLeftBrace, 9, `{`},
{itemIdentifier, 10, `on`},
{itemNEQRegex, 12, `!~`},
{itemString, 14, `"bar"`},
{itemRightBrace, 19, `}`},
{itemLeftBracket, 20, `[`},
{itemDuration, 21, `4m`},
{itemColon, 23, `:`},
{itemDuration, 24, `4s`},
{itemRightBracket, 26, `]`},
},
},
{
input: `test:name{on!~"bar"}[4m:4s]`,
expected: []item{
{itemMetricIdentifier, 0, `test:name`},
{itemLeftBrace, 9, `{`},
{itemIdentifier, 10, `on`},
{itemNEQRegex, 12, `!~`},
{itemString, 14, `"bar"`},
{itemRightBrace, 19, `}`},
{itemLeftBracket, 20, `[`},
{itemDuration, 21, `4m`},
{itemColon, 23, `:`},
{itemDuration, 24, `4s`},
{itemRightBracket, 26, `]`},
},
}, {
input: `test:name{on!~"b:ar"}[4m:4s]`,
expected: []item{
{itemMetricIdentifier, 0, `test:name`},
{itemLeftBrace, 9, `{`},
{itemIdentifier, 10, `on`},
{itemNEQRegex, 12, `!~`},
{itemString, 14, `"b:ar"`},
{itemRightBrace, 20, `}`},
{itemLeftBracket, 21, `[`},
{itemDuration, 22, `4m`},
{itemColon, 24, `:`},
{itemDuration, 25, `4s`},
{itemRightBracket, 27, `]`},
},