mirror of
https://github.com/google/go-jsonnet.git
synced 2025-09-28 17:01:02 +02:00
Merge pull request #8 from jbeda/parser
Finish writing unit tests for parser
This commit is contained in:
commit
c0060affd2
33
ast.go
33
ast.go
@ -20,40 +20,7 @@ import (
|
|||||||
"fmt"
|
"fmt"
|
||||||
)
|
)
|
||||||
|
|
||||||
// type astKind int
|
|
||||||
|
|
||||||
// const (
|
|
||||||
// astApply astKind = iota
|
|
||||||
// astArray
|
|
||||||
// astArrayComprehension
|
|
||||||
// astArrayComprehensionSimple
|
|
||||||
// astAssert
|
|
||||||
// astBinary
|
|
||||||
// astBuiltinFunction
|
|
||||||
// astConditional
|
|
||||||
// astDollar
|
|
||||||
// astError
|
|
||||||
// astFunction
|
|
||||||
// astImport
|
|
||||||
// astImportstr
|
|
||||||
// astIndex
|
|
||||||
// astLocal
|
|
||||||
// astLiteralBoolean
|
|
||||||
// astLiteralNull
|
|
||||||
// astLiteralNumber
|
|
||||||
// astLiteralString
|
|
||||||
// astObject
|
|
||||||
// astDesugaredObject
|
|
||||||
// astObjectComprehension
|
|
||||||
// astObjectComprehensionSimple
|
|
||||||
// astSelf
|
|
||||||
// astSuperIndex
|
|
||||||
// astUnary
|
|
||||||
// astVar
|
|
||||||
// )
|
|
||||||
|
|
||||||
// identifier represents a variable / parameter / field name.
|
// identifier represents a variable / parameter / field name.
|
||||||
|
|
||||||
//+gen set
|
//+gen set
|
||||||
type identifier string
|
type identifier string
|
||||||
type identifiers []identifier
|
type identifiers []identifier
|
||||||
|
@ -43,7 +43,7 @@ var lexTests = []lexTest{
|
|||||||
{"colon3", ":::", tokens{{kind: tokenOperator, data: ":::"}}, ""},
|
{"colon3", ":::", tokens{{kind: tokenOperator, data: ":::"}}, ""},
|
||||||
{"arrow right", "->", tokens{{kind: tokenOperator, data: "->"}}, ""},
|
{"arrow right", "->", tokens{{kind: tokenOperator, data: "->"}}, ""},
|
||||||
{"less than minus", "<-", tokens{{kind: tokenOperator, data: "<"},
|
{"less than minus", "<-", tokens{{kind: tokenOperator, data: "<"},
|
||||||
{kind: tokenOperator, data: "-"}}, ""},
|
{kind: tokenOperator, data: "-"}}, ""},
|
||||||
{"comma", ",", tokens{{kind: tokenComma, data: ","}}, ""},
|
{"comma", ",", tokens{{kind: tokenComma, data: ","}}, ""},
|
||||||
{"dollar", "$", tokens{{kind: tokenDollar, data: "$"}}, ""},
|
{"dollar", "$", tokens{{kind: tokenDollar, data: "$"}}, ""},
|
||||||
{"dot", ".", tokens{{kind: tokenDot, data: "."}}, ""},
|
{"dot", ".", tokens{{kind: tokenDot, data: "."}}, ""},
|
||||||
|
82
parser.go
82
parser.go
@ -24,10 +24,9 @@ import (
|
|||||||
type precedence int
|
type precedence int
|
||||||
|
|
||||||
const (
|
const (
|
||||||
applyPrecedence precedence = 2 // Function calls and indexing.
|
applyPrecedence precedence = 2 // Function calls and indexing.
|
||||||
unaryPrecedence precedence = 4 // Logical and bitwise negation, unary + -
|
unaryPrecedence precedence = 4 // Logical and bitwise negation, unary + -
|
||||||
beforeElsePrecedence precedence = 15 // True branch of an if.
|
maxPrecedence precedence = 16 // Local, If, Import, Function, Error
|
||||||
maxPrecedence precedence = 16 // Local, If, Import, Function, Error
|
|
||||||
)
|
)
|
||||||
|
|
||||||
var bopPrecedence = map[binaryOp]precedence{
|
var bopPrecedence = map[binaryOp]precedence{
|
||||||
@ -116,7 +115,7 @@ func (p *parser) parseIdentifierList(elementKind string) (identifiers, bool, err
|
|||||||
for _, n := range exprs {
|
for _, n := range exprs {
|
||||||
v, ok := n.(*astVar)
|
v, ok := n.(*astVar)
|
||||||
if !ok {
|
if !ok {
|
||||||
return identifiers{}, false, makeStaticError(fmt.Sprintf("Not an identifier: %v", n), *n.Loc())
|
return identifiers{}, false, makeStaticError(fmt.Sprintf("Expected simple identifier but got a complex expression."), *n.Loc())
|
||||||
}
|
}
|
||||||
ids = append(ids, v.id)
|
ids = append(ids, v.id)
|
||||||
}
|
}
|
||||||
@ -189,6 +188,9 @@ func (p *parser) parseBind(binds *astLocalBinds) error {
|
|||||||
})
|
})
|
||||||
} else {
|
} else {
|
||||||
_, err = p.popExpectOp("=")
|
_, err = p.popExpectOp("=")
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
body, err := p.parse(maxPrecedence)
|
body, err := p.parse(maxPrecedence)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
@ -248,7 +250,6 @@ func (p *parser) parseObjectRemainder(tok *token) (astNode, *token, error) {
|
|||||||
literalFields := make(literalFieldSet)
|
literalFields := make(literalFieldSet)
|
||||||
binds := make(identifierSet)
|
binds := make(identifierSet)
|
||||||
|
|
||||||
_ = "breakpoint"
|
|
||||||
gotComma := false
|
gotComma := false
|
||||||
first := true
|
first := true
|
||||||
|
|
||||||
@ -914,6 +915,7 @@ func (p *parser) parse(prec precedence) (astNode, error) {
|
|||||||
// the operator.
|
// the operator.
|
||||||
switch p.peek().kind {
|
switch p.peek().kind {
|
||||||
case tokenOperator:
|
case tokenOperator:
|
||||||
|
_ = "breakpoint"
|
||||||
if p.peek().data == ":" {
|
if p.peek().data == ":" {
|
||||||
// Special case for the colons in assert. Since COLON is no-longer a
|
// Special case for the colons in assert. Since COLON is no-longer a
|
||||||
// special token, we have to make sure it does not trip the
|
// special token, we have to make sure it does not trip the
|
||||||
@ -939,8 +941,74 @@ func (p *parser) parse(prec precedence) (astNode, error) {
|
|||||||
default:
|
default:
|
||||||
return lhs, nil
|
return lhs, nil
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
|
op := p.pop()
|
||||||
|
switch op.kind {
|
||||||
|
case tokenBracketL:
|
||||||
|
index, err := p.parse(maxPrecedence)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
end, err := p.popExpect(tokenBracketR)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
lhs = &astIndex{
|
||||||
|
astNodeBase: astNodeBase{loc: locFromTokens(begin, end)},
|
||||||
|
target: lhs,
|
||||||
|
index: index,
|
||||||
|
}
|
||||||
|
case tokenDot:
|
||||||
|
fieldID, err := p.popExpect(tokenIdentifier)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
id := identifier(fieldID.data)
|
||||||
|
lhs = &astIndex{
|
||||||
|
astNodeBase: astNodeBase{loc: locFromTokens(begin, fieldID)},
|
||||||
|
target: lhs,
|
||||||
|
id: &id,
|
||||||
|
}
|
||||||
|
case tokenParenL:
|
||||||
|
end, args, gotComma, err := p.parseCommaList(tokenParenR, "function argument")
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
tailStrict := false
|
||||||
|
if p.peek().kind == tokenTailStrict {
|
||||||
|
p.pop()
|
||||||
|
tailStrict = true
|
||||||
|
}
|
||||||
|
lhs = &astApply{
|
||||||
|
astNodeBase: astNodeBase{loc: locFromTokens(begin, end)},
|
||||||
|
target: lhs,
|
||||||
|
arguments: args,
|
||||||
|
trailingComma: gotComma,
|
||||||
|
tailStrict: tailStrict,
|
||||||
|
}
|
||||||
|
case tokenBraceL:
|
||||||
|
obj, end, err := p.parseObjectRemainder(op)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
lhs = &astApplyBrace{
|
||||||
|
astNodeBase: astNodeBase{loc: locFromTokens(begin, end)},
|
||||||
|
left: lhs,
|
||||||
|
right: obj,
|
||||||
|
}
|
||||||
|
default:
|
||||||
|
rhs, err := p.parse(prec - 1)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
lhs = &astBinary{
|
||||||
|
astNodeBase: astNodeBase{loc: locFromTokenAST(begin, rhs)},
|
||||||
|
left: lhs,
|
||||||
|
op: bop,
|
||||||
|
right: rhs,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
215
parser_test.go
215
parser_test.go
@ -15,21 +15,210 @@ limitations under the License.
|
|||||||
*/
|
*/
|
||||||
package jsonnet
|
package jsonnet
|
||||||
|
|
||||||
import (
|
import "testing"
|
||||||
"fmt"
|
|
||||||
"testing"
|
|
||||||
|
|
||||||
"github.com/kr/pretty"
|
var tests = []string{
|
||||||
)
|
`true`,
|
||||||
|
`1`,
|
||||||
|
`1.2e3`,
|
||||||
|
`!true`,
|
||||||
|
`null`,
|
||||||
|
|
||||||
|
`$.foo.bar`,
|
||||||
|
`self.foo.bar`,
|
||||||
|
`super.foo.bar`,
|
||||||
|
`super[1]`,
|
||||||
|
`error "Error!"`,
|
||||||
|
|
||||||
|
`"world"`,
|
||||||
|
`'world'`,
|
||||||
|
`|||
|
||||||
|
world
|
||||||
|
|||`,
|
||||||
|
|
||||||
|
`foo(bar)`,
|
||||||
|
`foo(bar) tailstrict`,
|
||||||
|
`foo.bar`,
|
||||||
|
`foo[bar]`,
|
||||||
|
|
||||||
|
`true || false`,
|
||||||
|
`0 && 1 || 0`,
|
||||||
|
`0 && (1 || 0)`,
|
||||||
|
|
||||||
|
`local foo = "bar"; foo`,
|
||||||
|
`local foo(bar) = bar; foo(1)`,
|
||||||
|
`{ local foo = "bar", baz: 1}`,
|
||||||
|
`{ local foo(bar) = bar, baz: foo(1)}`,
|
||||||
|
|
||||||
|
`{ foo(bar, baz): bar+baz }`,
|
||||||
|
|
||||||
|
`{ ["foo" + "bar"]: 3 }`,
|
||||||
|
`{ ["field" + x]: x for x in [1, 2, 3] }`,
|
||||||
|
`{ local y = x, ["field" + x]: x for x in [1, 2, 3] }`,
|
||||||
|
`{ ["field" + x]: x for x in [1, 2, 3] if x <= 2 }`,
|
||||||
|
`{ ["field" + x + y]: x + y for x in [1, 2, 3] if x <= 2 for y in [4, 5, 6]}`,
|
||||||
|
|
||||||
|
`[]`,
|
||||||
|
`[a, b, c]`,
|
||||||
|
`[x for x in [1,2,3] ]`,
|
||||||
|
`[x for x in [1,2,3] if x <= 2]`,
|
||||||
|
`[x+y for x in [1,2,3] if x <= 2 for y in [4, 5, 6]]`,
|
||||||
|
|
||||||
|
`{}`,
|
||||||
|
`{ hello: "world" }`,
|
||||||
|
`{ hello +: "world" }`,
|
||||||
|
`{
|
||||||
|
hello: "world",
|
||||||
|
"name":: joe,
|
||||||
|
'mood'::: "happy",
|
||||||
|
|||
|
||||||
|
key type
|
||||||
|
|||: "block",
|
||||||
|
}`,
|
||||||
|
|
||||||
|
`assert true: 'woah!'; true`,
|
||||||
|
`{ assert true: 'woah!', foo: bar }`,
|
||||||
|
|
||||||
|
`if n > 1 then 'foos' else 'foo'`,
|
||||||
|
|
||||||
|
`local foo = function(x) x + 1; true`,
|
||||||
|
|
||||||
|
`import 'foo.jsonnet'`,
|
||||||
|
`importstr 'foo.text'`,
|
||||||
|
|
||||||
|
`{a: b} + {c: d}`,
|
||||||
|
`{a: b}{c: d}`,
|
||||||
|
}
|
||||||
|
|
||||||
func TestParser(t *testing.T) {
|
func TestParser(t *testing.T) {
|
||||||
tokens, err := lex("test", `{hello: "world"}`)
|
for _, s := range tests {
|
||||||
if err != nil {
|
tokens, err := lex("test", s)
|
||||||
t.Errorf("Unexpected lex error: %v", err)
|
if err != nil {
|
||||||
|
t.Errorf("Unexpected lex error\n input: %v\n error: %v", s, err)
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
_, err = parse(tokens)
|
||||||
|
if err != nil {
|
||||||
|
t.Errorf("Unexpected parse error\n input: %v\n error: %v", s, err)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
ast, err := parse(tokens)
|
}
|
||||||
if err != nil {
|
|
||||||
t.Errorf("Unexpected parse error: %v", err)
|
type testError struct {
|
||||||
}
|
input string
|
||||||
fmt.Printf("%# v", pretty.Formatter(ast))
|
err string
|
||||||
|
}
|
||||||
|
|
||||||
|
var errorTests = []testError{
|
||||||
|
{`function(a, b c)`, `test:1:15-16 Expected a comma before next function parameter.`},
|
||||||
|
{`function(a, 1)`, `test:1:13-14 Expected simple identifier but got a complex expression.`},
|
||||||
|
{`a b`, `test:1:3-4 Did not expect: (IDENTIFIER, "b")`},
|
||||||
|
{`foo(a, bar(a b))`, `test:1:14-15 Expected a comma before next function argument.`},
|
||||||
|
|
||||||
|
{`local`, `test:1:6 Expected token IDENTIFIER but got end of file`},
|
||||||
|
{`local foo = 1, foo = 2; true`, `test:1:16-19 Duplicate local var: foo`},
|
||||||
|
{`local foo(a b) = a; true`, `test:1:13-14 Expected a comma before next function parameter.`},
|
||||||
|
{`local foo(a): a; true`, `test:1:13-14 Expected operator = but got ":"`},
|
||||||
|
{`local foo(a) = bar(a b); true`, `test:1:22-23 Expected a comma before next function argument.`},
|
||||||
|
{`local foo: 1; true`, `test:1:10-11 Expected operator = but got ":"`},
|
||||||
|
{`local foo = bar(a b); true`, `test:1:19-20 Expected a comma before next function argument.`},
|
||||||
|
|
||||||
|
{`{a b}`, `test:1:4-5 Expected token OPERATOR but got (IDENTIFIER, "b")`},
|
||||||
|
{`{a = b}`, `test:1:4-5 Expected one of :, ::, :::, +:, +::, +:::, got: =`},
|
||||||
|
{`{a :::: b}`, `test:1:4-8 Expected one of :, ::, :::, +:, +::, +:::, got: ::::`},
|
||||||
|
|
||||||
|
{`{assert x for x in [1, 2, 3]}`, `test:1:11-14 Object comprehension cannot have asserts.`},
|
||||||
|
{`{['foo' + x]: true, [x]: x for x in [1, 2, 3]}`, `test:1:28-31 Object comprehension can only have one field.`},
|
||||||
|
{`{foo: x for x in [1, 2, 3]}`, `test:1:9-12 Object comprehensions can only have [e] fields.`},
|
||||||
|
{`{[x]:: true for x in [1, 2, 3]}`, `test:1:13-16 Object comprehensions cannot have hidden fields.`},
|
||||||
|
{`{[x]: true for 1 in [1, 2, 3]}`, `test:1:16-17 Expected token IDENTIFIER but got (NUMBER, "1")`},
|
||||||
|
{`{[x]: true for x at [1, 2, 3]}`, `test:1:18-20 Expected token in but got (IDENTIFIER, "at")`},
|
||||||
|
{`{[x]: true for x in [1, 2 3]}`, `test:1:27-28 Expected a comma before next array element.`},
|
||||||
|
{`{[x]: true for x in [1, 2, 3] if (a b)}`, `test:1:37-38 Expected token ")" but got (IDENTIFIER, "b")`},
|
||||||
|
{`{[x]: true for x in [1, 2, 3] if a b}`, `test:1:36-37 Expected for, if or "}" after for clause, got: (IDENTIFIER, "b")`},
|
||||||
|
|
||||||
|
{`{a: b c:d}`, `test:1:7-8 Expected a comma before next field.`},
|
||||||
|
|
||||||
|
{`{[(x y)]: z}`, `test:1:6-7 Expected token ")" but got (IDENTIFIER, "y")`},
|
||||||
|
{`{[x y]: z}`, `test:1:5-6 Expected token "]" but got (IDENTIFIER, "y")`},
|
||||||
|
|
||||||
|
{`{foo(x y): z}`, `test:1:8-9 Expected a comma before next method parameter.`},
|
||||||
|
{`{foo(x)+: z}`, `test:1:2-5 Cannot use +: syntax sugar in a method: foo`},
|
||||||
|
{`{foo: 1, foo: 2}`, `test:1:10-13 Duplicate field: foo`},
|
||||||
|
{`{foo: (1 2)}`, `test:1:10-11 Expected token ")" but got (NUMBER, "2")`},
|
||||||
|
|
||||||
|
{`{local 1 = 3, true}`, `test:1:8-9 Expected token IDENTIFIER but got (NUMBER, "1")`},
|
||||||
|
{`{local foo = 1, local foo = 2, true}`, `test:1:23-26 Duplicate local var: foo`},
|
||||||
|
{`{local foo(a b) = 1, a: true}`, `test:1:14-15 Expected a comma before next function parameter.`},
|
||||||
|
{`{local foo(a): 1, a: true}`, `test:1:14-15 Expected operator = but got ":"`},
|
||||||
|
{`{local foo(a) = (a b), a: true}`, `test:1:20-21 Expected token ")" but got (IDENTIFIER, "b")`},
|
||||||
|
|
||||||
|
{`{assert (a b), a: true}`, `test:1:12-13 Expected token ")" but got (IDENTIFIER, "b")`},
|
||||||
|
{`{assert a: (a b), a: true}`, `test:1:15-16 Expected token ")" but got (IDENTIFIER, "b")`},
|
||||||
|
|
||||||
|
{`{function(a, b) a+b: true}`, `test:1:2-10 Unexpected: (function, "function") while parsing field definition`},
|
||||||
|
|
||||||
|
{`[(a b), 2, 3]`, `test:1:5-6 Expected token ")" but got (IDENTIFIER, "b")`},
|
||||||
|
{`[1, (a b), 2, 3]`, `test:1:8-9 Expected token ")" but got (IDENTIFIER, "b")`},
|
||||||
|
{`[a for b in [1 2 3]]`, `test:1:16-17 Expected a comma before next array element.`},
|
||||||
|
|
||||||
|
{`for`, `test:1:1-4 Unexpected: (for, "for") while parsing terminal`},
|
||||||
|
{``, `test:1:1 Unexpected end of file.`},
|
||||||
|
{`((a b))`, `test:1:5-6 Expected token ")" but got (IDENTIFIER, "b")`},
|
||||||
|
{`a.1`, `test:1:3-4 Expected token IDENTIFIER but got (NUMBER, "1")`},
|
||||||
|
{`super.1`, `test:1:7-8 Expected token IDENTIFIER but got (NUMBER, "1")`},
|
||||||
|
{`super[(a b)]`, `test:1:10-11 Expected token ")" but got (IDENTIFIER, "b")`},
|
||||||
|
{`super[a b]`, `test:1:9-10 Expected token "]" but got (IDENTIFIER, "b")`},
|
||||||
|
{`super`, `test:1:1-6 Expected . or [ after super.`},
|
||||||
|
|
||||||
|
{`assert (a b); true`, `test:1:11-12 Expected token ")" but got (IDENTIFIER, "b")`},
|
||||||
|
{`assert a: (a b); true`, `test:1:14-15 Expected token ")" but got (IDENTIFIER, "b")`},
|
||||||
|
{`assert a: 'foo', true`, `test:1:16-17 Expected token ";" but got (",", ",")`},
|
||||||
|
{`assert a: 'foo'; (a b)`, `test:1:21-22 Expected token ")" but got (IDENTIFIER, "b")`},
|
||||||
|
|
||||||
|
{`error (a b)`, `test:1:10-11 Expected token ")" but got (IDENTIFIER, "b")`},
|
||||||
|
|
||||||
|
{`if (a b) then c`, `test:1:7-8 Expected token ")" but got (IDENTIFIER, "b")`},
|
||||||
|
{`if a b c`, `test:1:6-7 Expected token then but got (IDENTIFIER, "b")`},
|
||||||
|
{`if a then (b c)`, `test:1:14-15 Expected token ")" but got (IDENTIFIER, "c")`},
|
||||||
|
{`if a then b else (c d)`, `test:1:21-22 Expected token ")" but got (IDENTIFIER, "d")`},
|
||||||
|
|
||||||
|
{`function(a) (a b)`, `test:1:16-17 Expected token ")" but got (IDENTIFIER, "b")`},
|
||||||
|
{`function a a`, `test:1:10-11 Expected ( but got (IDENTIFIER, "a")`},
|
||||||
|
|
||||||
|
{`import (a b)`, `test:1:11-12 Expected token ")" but got (IDENTIFIER, "b")`},
|
||||||
|
{`import (a+b)`, `test:1:9-12 Computed imports are not allowed`},
|
||||||
|
{`importstr (a b)`, `test:1:14-15 Expected token ")" but got (IDENTIFIER, "b")`},
|
||||||
|
{`importstr (a+b)`, `test:1:12-15 Computed imports are not allowed`},
|
||||||
|
|
||||||
|
{`local a = b ()`, `test:1:15 Expected , or ; but got end of file`},
|
||||||
|
{`local a = b; (a b)`, `test:1:17-18 Expected token ")" but got (IDENTIFIER, "b")`},
|
||||||
|
|
||||||
|
{`1+ <<`, `test:1:4-6 Not a unary operator: <<`},
|
||||||
|
{`-(a b)`, `test:1:5-6 Expected token ")" but got (IDENTIFIER, "b")`},
|
||||||
|
{`1~2`, `test:1:2-3 Not a binary operator: ~`},
|
||||||
|
|
||||||
|
{`a[(b c)]`, `test:1:6-7 Expected token ")" but got (IDENTIFIER, "c")`},
|
||||||
|
{`a[b c]`, `test:1:5-6 Expected token "]" but got (IDENTIFIER, "c")`},
|
||||||
|
|
||||||
|
{`a{b c}`, `test:1:5-6 Expected token OPERATOR but got (IDENTIFIER, "c")`},
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestParserErrors(t *testing.T) {
|
||||||
|
for _, s := range errorTests {
|
||||||
|
tokens, err := lex("test", s.input)
|
||||||
|
if err != nil {
|
||||||
|
t.Errorf("Unexpected lex error\n input: %v\n error: %v", s.input, err)
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
_, err = parse(tokens)
|
||||||
|
if err == nil {
|
||||||
|
t.Errorf("Expected parse error but got success\n input: %v", s.input)
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
if err.Error() != s.err {
|
||||||
|
t.Errorf("Error string not as expected\n input: %v\n expected error: %v\n actual error: %v", s.input, s.err, err.Error())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
Loading…
x
Reference in New Issue
Block a user