Finish up blocking out the parser.

There is the most minimal of tests and so this probably doesn't work well yet.  Adding tests is the next job.
This commit is contained in:
Joe Beda 2016-02-26 19:48:44 -08:00
parent 3b5fc97c33
commit 142b77c41f
13 changed files with 1249 additions and 20 deletions

View File

@ -12,3 +12,13 @@
This is a port of [jsonnet](http://jsonnet.org/) to go. It is very much a work in progress.
This implementation is largely based on the the [jsonnet C++ implementation](https://github.com/google/jsonnet).
## Implementation Notes
We are generating some helper classes on types by using http://clipperhouse.github.io/gen/. Do the following to regenerate these if necessary:
```
go get github.com/clipperhouse/gen
go get github.com/clipperhouse/set
go generate
```

7
_gen.go Normal file
View File

@ -0,0 +1,7 @@
package main
import (
_ "github.com/clipperhouse/set"
_ "github.com/clipperhouse/slice"
_ "github.com/clipperhouse/stringer"
)

66
ast.go
View File

@ -53,6 +53,8 @@ import (
// )
// identifier represents a variable / parameter / field name.
//+gen set
type identifier string
type identifiers []identifier
@ -78,17 +80,18 @@ func (n *astNodeBase) Loc() *LocationRange {
// ---------------------------------------------------------------------------
// +gen stringer
type astCompKind int
const (
astCompFor = iota
astCompFor astCompKind = iota
astCompIf
)
type astCompSpec struct {
kind astCompKind
varName identifier // nil when kind != compSpecFor
expr *astNode
varName *identifier // nil when kind != compSpecFor
expr astNode
}
type astCompSpecs []astCompSpec
@ -203,9 +206,36 @@ var bopStrings = []string{
bopOr: "||",
}
var bopMap = map[string]binaryOp{
"*": bopMult,
"/": bopDiv,
"%": bopPercent,
"+": bopPlus,
"-": bopMinus,
"<<": bopShiftL,
">>": bopShiftR,
">": bopGreater,
">=": bopGreaterEq,
"<": bopLess,
"<=": bopLessEq,
"==": bopManifestEqual,
"!=": bopManifestUnequal,
"&": bopBitwiseAnd,
"^": bopBitwiseXor,
"|": bopBitwiseOr,
"&&": bopAnd,
"||": bopOr,
}
func (b binaryOp) String() string {
if b < 0 || int(b) >= len(bopStrings) {
panic(fmt.Sprintf("INTERNAL ERROR: Unrecognised binary operator: %v", b))
panic(fmt.Sprintf("INTERNAL ERROR: Unrecognised binary operator: %d", b))
}
return bopStrings[b]
}
@ -299,11 +329,11 @@ type astIndex struct {
// astLocalBind is a helper struct for astLocal
type astLocalBind struct {
variable identifier
body astNode
functionSugar bool
params identifiers // if functionSugar is true
trailingComman bool
variable identifier
body astNode
functionSugar bool
params identifiers // if functionSugar is true
trailingComma bool
}
type astLocalBinds []astLocalBind
@ -332,11 +362,13 @@ type astLiteralNull struct{ astNodeBase }
// astLiteralNumber represents a JSON number
type astLiteralNumber struct {
astNodeBase
value float64
value float64
originalString string
}
// ---------------------------------------------------------------------------
// +gen stringer
type astLiteralStringKind int
const (
@ -355,6 +387,7 @@ type astLiteralString struct {
// ---------------------------------------------------------------------------
// +gen stringer
type astObjectFieldKind int
const (
@ -365,6 +398,7 @@ const (
astObjectLocal // local id = expr2
)
// +gen stringer
type astObjectFieldHide int
const (
@ -379,7 +413,7 @@ type astObjectField struct {
superSugar bool // +: (ignore if kind != astObjectField*)
methodSugar bool // f(x, y, z): ... (ignore if kind == astObjectAssert)
expr1 astNode // Not in scope of the object
id identifier
id *identifier
ids identifiers // If methodSugar == true then holds the params.
trailingComma bool // If methodSugar == true then remembers the trailing comma
expr2, expr3 astNode // In scope of the object (can see self).
@ -453,6 +487,7 @@ type astSelf struct{ astNodeBase }
// Either index or identifier will be set before desugaring. After desugaring, id will be
// nil.
type astSuperIndex struct {
astNodeBase
index astNode
id *identifier
}
@ -475,9 +510,16 @@ var uopStrings = []string{
uopMinus: "-",
}
var uopMap = map[string]unaryOp{
"!": uopNot,
"~": uopBitwiseNot,
"+": uopPlus,
"-": uopMinus,
}
func (u unaryOp) String() string {
if u < 0 || int(u) >= len(uopStrings) {
panic(fmt.Sprintf("INTERNAL ERROR: Unrecognised unary operator: %v", u))
panic(fmt.Sprintf("INTERNAL ERROR: Unrecognised unary operator: %d", u))
}
return uopStrings[u]
}

20
astcompkind_stringer.go Normal file
View File

@ -0,0 +1,20 @@
// Generated by: main
// TypeWriter: stringer
// Directive: +gen on astCompKind
package jsonnet
import (
"fmt"
)
const _astCompKind_name = "astCompForastCompIf"
var _astCompKind_index = [...]uint8{0, 10, 19}
func (i astCompKind) String() string {
if i < 0 || i+1 >= astCompKind(len(_astCompKind_index)) {
return fmt.Sprintf("astCompKind(%d)", i)
}
return _astCompKind_name[_astCompKind_index[i]:_astCompKind_index[i+1]]
}

View File

@ -0,0 +1,20 @@
// Generated by: main
// TypeWriter: stringer
// Directive: +gen on astLiteralStringKind
package jsonnet
import (
"fmt"
)
const _astLiteralStringKind_name = "astStringSingleastStringDoubleastStringBlock"
var _astLiteralStringKind_index = [...]uint8{0, 15, 30, 44}
func (i astLiteralStringKind) String() string {
if i < 0 || i+1 >= astLiteralStringKind(len(_astLiteralStringKind_index)) {
return fmt.Sprintf("astLiteralStringKind(%d)", i)
}
return _astLiteralStringKind_name[_astLiteralStringKind_index[i]:_astLiteralStringKind_index[i+1]]
}

View File

@ -0,0 +1,20 @@
// Generated by: main
// TypeWriter: stringer
// Directive: +gen on astObjectFieldHide
package jsonnet
import (
"fmt"
)
const _astObjectFieldHide_name = "astObjectFieldHiddenastObjectFieldInheritastObjectFieldVisible"
var _astObjectFieldHide_index = [...]uint8{0, 20, 41, 62}
func (i astObjectFieldHide) String() string {
if i < 0 || i+1 >= astObjectFieldHide(len(_astObjectFieldHide_index)) {
return fmt.Sprintf("astObjectFieldHide(%d)", i)
}
return _astObjectFieldHide_name[_astObjectFieldHide_index[i]:_astObjectFieldHide_index[i+1]]
}

View File

@ -0,0 +1,20 @@
// Generated by: main
// TypeWriter: stringer
// Directive: +gen on astObjectFieldKind
package jsonnet
import (
"fmt"
)
const _astObjectFieldKind_name = "astObjectAssertastObjectFieldIDastObjectFieldExprastObjectFieldStrastObjectLocal"
var _astObjectFieldKind_index = [...]uint8{0, 15, 31, 49, 66, 80}
func (i astObjectFieldKind) String() string {
if i < 0 || i+1 >= astObjectFieldKind(len(_astObjectFieldKind_index)) {
return fmt.Sprintf("astObjectFieldKind(%d)", i)
}
return _astObjectFieldKind_name[_astObjectFieldKind_index[i]:_astObjectFieldKind_index[i+1]]
}

2
doc.go
View File

@ -25,3 +25,5 @@ text.
See http://jsonnet.org/ for a full language description and tutorial.
*/
package jsonnet
//go:generate gen

172
identifier_set.go Normal file
View File

@ -0,0 +1,172 @@
// Generated by: main
// TypeWriter: set
// Directive: +gen on identifier
package jsonnet
// Set is a modification of https://github.com/deckarep/golang-set
// The MIT License (MIT)
// Copyright (c) 2013 Ralph Caraveo (deckarep@gmail.com)
// identifierSet is the primary type that represents a set
type identifierSet map[identifier]struct{}
// NewidentifierSet creates and returns a reference to an empty set.
func NewidentifierSet(a ...identifier) identifierSet {
s := make(identifierSet)
for _, i := range a {
s.Add(i)
}
return s
}
// ToSlice returns the elements of the current set as a slice
func (set identifierSet) ToSlice() []identifier {
var s []identifier
for v := range set {
s = append(s, v)
}
return s
}
// Add adds an item to the current set if it doesn't already exist in the set.
func (set identifierSet) Add(i identifier) bool {
_, found := set[i]
set[i] = struct{}{}
return !found //False if it existed already
}
// Contains determines if a given item is already in the set.
func (set identifierSet) Contains(i identifier) bool {
_, found := set[i]
return found
}
// ContainsAll determines if the given items are all in the set
func (set identifierSet) ContainsAll(i ...identifier) bool {
for _, v := range i {
if !set.Contains(v) {
return false
}
}
return true
}
// IsSubset determines if every item in the other set is in this set.
func (set identifierSet) IsSubset(other identifierSet) bool {
for elem := range set {
if !other.Contains(elem) {
return false
}
}
return true
}
// IsSuperset determines if every item of this set is in the other set.
func (set identifierSet) IsSuperset(other identifierSet) bool {
return other.IsSubset(set)
}
// Union returns a new set with all items in both sets.
func (set identifierSet) Union(other identifierSet) identifierSet {
unionedSet := NewidentifierSet()
for elem := range set {
unionedSet.Add(elem)
}
for elem := range other {
unionedSet.Add(elem)
}
return unionedSet
}
// Intersect returns a new set with items that exist only in both sets.
func (set identifierSet) Intersect(other identifierSet) identifierSet {
intersection := NewidentifierSet()
// loop over smaller set
if set.Cardinality() < other.Cardinality() {
for elem := range set {
if other.Contains(elem) {
intersection.Add(elem)
}
}
} else {
for elem := range other {
if set.Contains(elem) {
intersection.Add(elem)
}
}
}
return intersection
}
// Difference returns a new set with items in the current set but not in the other set
func (set identifierSet) Difference(other identifierSet) identifierSet {
differencedSet := NewidentifierSet()
for elem := range set {
if !other.Contains(elem) {
differencedSet.Add(elem)
}
}
return differencedSet
}
// SymmetricDifference returns a new set with items in the current set or the other set but not in both.
func (set identifierSet) SymmetricDifference(other identifierSet) identifierSet {
aDiff := set.Difference(other)
bDiff := other.Difference(set)
return aDiff.Union(bDiff)
}
// Clear clears the entire set to be the empty set.
func (set *identifierSet) Clear() {
*set = make(identifierSet)
}
// Remove allows the removal of a single item in the set.
func (set identifierSet) Remove(i identifier) {
delete(set, i)
}
// Cardinality returns how many items are currently in the set.
func (set identifierSet) Cardinality() int {
return len(set)
}
// Iter returns a channel of type identifier that you can range over.
func (set identifierSet) Iter() <-chan identifier {
ch := make(chan identifier)
go func() {
for elem := range set {
ch <- elem
}
close(ch)
}()
return ch
}
// Equal determines if two sets are equal to each other.
// If they both are the same size and have the same items they are considered equal.
// Order of items is not relevent for sets to be equal.
func (set identifierSet) Equal(other identifierSet) bool {
if set.Cardinality() != other.Cardinality() {
return false
}
for elem := range set {
if !other.Contains(elem) {
return false
}
}
return true
}
// Clone returns a clone of the set.
// Does NOT clone the underlying elements.
func (set identifierSet) Clone() identifierSet {
clonedSet := NewidentifierSet()
for elem := range set {
clonedSet.Add(elem)
}
return clonedSet
}

View File

@ -142,7 +142,7 @@ var tokenKindStrings = []string{
func (tk tokenKind) String() string {
if tk < 0 || int(tk) >= len(tokenKindStrings) {
panic(fmt.Sprintf("INTERNAL ERROR: Unknown token kind:: %v", tk))
panic(fmt.Sprintf("INTERNAL ERROR: Unknown token kind:: %d", tk))
}
return tokenKindStrings[tk]
}
@ -649,9 +649,9 @@ func (l *lexer) lexSymbol() error {
// So, wind it back if we need to, but stop at the first rune.
// This relies on the hack that all operator symbols are ASCII and thus there is
// no need to treat this substring as general UTF-8.
for r = rune(l.input[l.pos - 1]); l.pos > l.tokenStart + 1; l.pos-- {
for r = rune(l.input[l.pos-1]); l.pos > l.tokenStart+1; l.pos-- {
switch r {
case '+', '-', '~', '!':
case '+', '-', '~', '!':
continue
}
break

172
literalfield_set.go Normal file
View File

@ -0,0 +1,172 @@
// Generated by: main
// TypeWriter: set
// Directive: +gen on literalField
package jsonnet
// Set is a modification of https://github.com/deckarep/golang-set
// The MIT License (MIT)
// Copyright (c) 2013 Ralph Caraveo (deckarep@gmail.com)
// literalFieldSet is the primary type that represents a set
type literalFieldSet map[literalField]struct{}
// NewliteralFieldSet creates and returns a reference to an empty set.
func NewliteralFieldSet(a ...literalField) literalFieldSet {
s := make(literalFieldSet)
for _, i := range a {
s.Add(i)
}
return s
}
// ToSlice returns the elements of the current set as a slice
func (set literalFieldSet) ToSlice() []literalField {
var s []literalField
for v := range set {
s = append(s, v)
}
return s
}
// Add adds an item to the current set if it doesn't already exist in the set.
func (set literalFieldSet) Add(i literalField) bool {
_, found := set[i]
set[i] = struct{}{}
return !found //False if it existed already
}
// Contains determines if a given item is already in the set.
func (set literalFieldSet) Contains(i literalField) bool {
_, found := set[i]
return found
}
// ContainsAll determines if the given items are all in the set
func (set literalFieldSet) ContainsAll(i ...literalField) bool {
for _, v := range i {
if !set.Contains(v) {
return false
}
}
return true
}
// IsSubset determines if every item in the other set is in this set.
func (set literalFieldSet) IsSubset(other literalFieldSet) bool {
for elem := range set {
if !other.Contains(elem) {
return false
}
}
return true
}
// IsSuperset determines if every item of this set is in the other set.
func (set literalFieldSet) IsSuperset(other literalFieldSet) bool {
return other.IsSubset(set)
}
// Union returns a new set with all items in both sets.
func (set literalFieldSet) Union(other literalFieldSet) literalFieldSet {
unionedSet := NewliteralFieldSet()
for elem := range set {
unionedSet.Add(elem)
}
for elem := range other {
unionedSet.Add(elem)
}
return unionedSet
}
// Intersect returns a new set with items that exist only in both sets.
func (set literalFieldSet) Intersect(other literalFieldSet) literalFieldSet {
intersection := NewliteralFieldSet()
// loop over smaller set
if set.Cardinality() < other.Cardinality() {
for elem := range set {
if other.Contains(elem) {
intersection.Add(elem)
}
}
} else {
for elem := range other {
if set.Contains(elem) {
intersection.Add(elem)
}
}
}
return intersection
}
// Difference returns a new set with items in the current set but not in the other set
func (set literalFieldSet) Difference(other literalFieldSet) literalFieldSet {
differencedSet := NewliteralFieldSet()
for elem := range set {
if !other.Contains(elem) {
differencedSet.Add(elem)
}
}
return differencedSet
}
// SymmetricDifference returns a new set with items in the current set or the other set but not in both.
func (set literalFieldSet) SymmetricDifference(other literalFieldSet) literalFieldSet {
aDiff := set.Difference(other)
bDiff := other.Difference(set)
return aDiff.Union(bDiff)
}
// Clear clears the entire set to be the empty set.
func (set *literalFieldSet) Clear() {
*set = make(literalFieldSet)
}
// Remove allows the removal of a single item in the set.
func (set literalFieldSet) Remove(i literalField) {
delete(set, i)
}
// Cardinality returns how many items are currently in the set.
func (set literalFieldSet) Cardinality() int {
return len(set)
}
// Iter returns a channel of type literalField that you can range over.
func (set literalFieldSet) Iter() <-chan literalField {
ch := make(chan literalField)
go func() {
for elem := range set {
ch <- elem
}
close(ch)
}()
return ch
}
// Equal determines if two sets are equal to each other.
// If they both are the same size and have the same items they are considered equal.
// Order of items is not relevent for sets to be equal.
func (set literalFieldSet) Equal(other literalFieldSet) bool {
if set.Cardinality() != other.Cardinality() {
return false
}
for elem := range set {
if !other.Contains(elem) {
return false
}
}
return true
}
// Clone returns a clone of the set.
// Does NOT clone the underlying elements.
func (set literalFieldSet) Clone() literalFieldSet {
clonedSet := NewliteralFieldSet()
for elem := range set {
clonedSet.Add(elem)
}
return clonedSet
}

719
parser.go
View File

@ -18,6 +18,7 @@ package jsonnet
import (
"fmt"
"strconv"
)
type precedence int
@ -29,9 +30,30 @@ const (
maxPrecedence precedence = 16 // Local, If, Import, Function, Error
)
var bopPrecedence = map[binaryOp]precedence{
bopMult: 5,
bopDiv: 5,
bopPercent: 5,
bopPlus: 6,
bopMinus: 6,
bopShiftL: 7,
bopShiftR: 7,
bopGreater: 8,
bopGreaterEq: 8,
bopLess: 8,
bopLessEq: 8,
bopManifestEqual: 9,
bopManifestUnequal: 9,
bopBitwiseAnd: 10,
bopBitwiseXor: 11,
bopBitwiseOr: 12,
bopAnd: 13,
bopOr: 14,
}
// ---------------------------------------------------------------------------
func makeUnexpectedError(t token, while string) error {
func makeUnexpectedError(t *token, while string) error {
return makeStaticError(
fmt.Sprintf("Unexpected: %v while %v", t, while), t.loc)
}
@ -115,9 +137,10 @@ func (p *parser) parseCommaList(end tokenKind, elementKind string) (*token, astN
}
}
if next.kind == end {
// got_comma can be true or false here.
// gotComma can be true or false here.
return p.pop(), exprs, gotComma, nil
}
if !first && !gotComma {
return nil, nil, false, makeStaticError(fmt.Sprintf("Expected a comma before next %s.", elementKind), next.loc)
}
@ -132,6 +155,572 @@ func (p *parser) parseCommaList(end tokenKind, elementKind string) (*token, astN
}
}
func (p *parser) parseBind(binds *astLocalBinds) error {
varID, err := p.popExpect(tokenIdentifier)
if err != nil {
return err
}
for _, b := range *binds {
if b.variable == identifier(varID.data) {
return makeStaticError(fmt.Sprintf("Duplicate local var: %v", varID.data), varID.loc)
}
}
if p.peek().kind == tokenParenL {
p.pop()
params, gotComma, err := p.parseIdentifierList("function parameter")
if err != nil {
return err
}
_, err = p.popExpectOp("=")
if err != nil {
return err
}
body, err := p.parse(maxPrecedence)
if err != nil {
return err
}
*binds = append(*binds, astLocalBind{
variable: identifier(varID.data),
body: body,
functionSugar: true,
params: params,
trailingComma: gotComma,
})
} else {
_, err = p.popExpectOp("=")
body, err := p.parse(maxPrecedence)
if err != nil {
return err
}
*binds = append(*binds, astLocalBind{
variable: identifier(varID.data),
body: body,
})
}
return nil
}
func (p *parser) parseObjectAssignmentOp() (plusSugar bool, hide astObjectFieldHide, err error) {
op, err := p.popExpect(tokenOperator)
if err != nil {
return
}
opStr := op.data
if opStr[0] == '+' {
plusSugar = true
opStr = opStr[1:]
}
numColons := 0
for len(opStr) > 0 {
if opStr[0] != ':' {
err = makeStaticError(
fmt.Sprintf("Expected one of :, ::, :::, +:, +::, +:::, got: %v", op.data), op.loc)
return
}
opStr = opStr[1:]
numColons++
}
switch numColons {
case 1:
hide = astObjectFieldInherit
case 2:
hide = astObjectFieldHidden
case 3:
hide = astObjectFieldVisible
default:
err = makeStaticError(
fmt.Sprintf("Expected one of :, ::, :::, +:, +::, +:::, got: %v", op.data), op.loc)
return
}
return
}
// +gen set
type literalField string
func (p *parser) parseObjectRemainder(tok *token) (astNode, *token, error) {
var fields astObjectFields
literalFields := make(literalFieldSet)
binds := make(identifierSet)
_ = "breakpoint"
gotComma := false
first := true
for {
next := p.pop()
if !gotComma && !first {
if next.kind == tokenComma {
next = p.pop()
gotComma = true
}
}
if next.kind == tokenBraceR {
return &astObject{
astNodeBase: astNodeBase{loc: locFromTokens(tok, next)},
fields: fields,
trailingComma: gotComma,
}, next, nil
}
if next.kind == tokenFor {
// It's a comprehension
numFields := 0
numAsserts := 0
var field astObjectField
for _, field = range fields {
if field.kind == astObjectLocal {
continue
}
if field.kind == astObjectAssert {
numAsserts++
continue
}
numFields++
}
if numAsserts > 0 {
return nil, nil, makeStaticError("Object comprehension cannot have asserts.", next.loc)
}
if numFields != 1 {
return nil, nil, makeStaticError("Object comprehension can only have one field.", next.loc)
}
if field.hide != astObjectFieldInherit {
return nil, nil, makeStaticError("Object comprehensions cannot have hidden fields.", next.loc)
}
if field.kind != astObjectFieldExpr {
return nil, nil, makeStaticError("Object comprehensions can only have [e] fields.", next.loc)
}
specs, last, err := p.parseComprehensionSpecs(tokenBraceR)
if err != nil {
return nil, nil, err
}
return &astObjectComp{
astNodeBase: astNodeBase{loc: locFromTokens(tok, last)},
fields: fields,
trailingComma: gotComma,
specs: *specs,
}, last, nil
}
if !gotComma && !first {
return nil, nil, makeStaticError("Expected a comma before next field.", next.loc)
}
first = false
switch next.kind {
case tokenBracketL, tokenIdentifier, tokenStringDouble, tokenStringSingle, tokenStringBlock:
var kind astObjectFieldKind
var expr1 astNode
var id *identifier
switch next.kind {
case tokenIdentifier:
kind = astObjectFieldID
id = (*identifier)(&next.data)
case tokenStringDouble:
kind = astObjectFieldStr
expr1 = &astLiteralString{
astNodeBase: astNodeBase{loc: next.loc},
value: next.data,
kind: astStringDouble,
}
case tokenStringSingle:
kind = astObjectFieldStr
expr1 = &astLiteralString{
astNodeBase: astNodeBase{loc: next.loc},
value: next.data,
kind: astStringSingle,
}
case tokenStringBlock:
kind = astObjectFieldStr
expr1 = &astLiteralString{
astNodeBase: astNodeBase{loc: next.loc},
value: next.data,
kind: astStringBlock,
blockIndent: next.stringBlockIndent,
}
default:
kind = astObjectFieldExpr
var err error
expr1, err = p.parse(maxPrecedence)
if err != nil {
return nil, nil, err
}
_, err = p.popExpect(tokenBracketR)
if err != nil {
return nil, nil, err
}
}
isMethod := false
methComma := false
var params identifiers
if p.peek().kind == tokenParenL {
p.pop()
var err error
params, methComma, err = p.parseIdentifierList("method parameter")
if err != nil {
return nil, nil, err
}
isMethod = true
}
plusSugar, hide, err := p.parseObjectAssignmentOp()
if err != nil {
return nil, nil, err
}
if plusSugar && isMethod {
return nil, nil, makeStaticError(
fmt.Sprintf("Cannot use +: syntax sugar in a method: %v", next.data), next.loc)
}
if kind != astObjectFieldExpr {
if !literalFields.Add(literalField(next.data)) {
return nil, nil, makeStaticError(
fmt.Sprintf("Duplicate field: %v", next.data), next.loc)
}
}
body, err := p.parse(maxPrecedence)
if err != nil {
return nil, nil, err
}
fields = append(fields, astObjectField{
kind: kind,
hide: hide,
superSugar: plusSugar,
methodSugar: isMethod,
expr1: expr1,
id: id,
ids: params,
trailingComma: methComma,
expr2: body,
})
case tokenLocal:
varID, err := p.popExpect(tokenIdentifier)
if err != nil {
return nil, nil, err
}
id := identifier(varID.data)
if binds.Contains(id) {
return nil, nil, makeStaticError(fmt.Sprintf("Duplicate local var: %v", id), varID.loc)
}
isMethod := false
funcComma := false
var params identifiers
if p.peek().kind == tokenParenL {
p.pop()
isMethod = true
params, funcComma, err = p.parseIdentifierList("function parameter")
if err != nil {
return nil, nil, err
}
}
_, err = p.popExpectOp("=")
if err != nil {
return nil, nil, err
}
body, err := p.parse(maxPrecedence)
if err != nil {
return nil, nil, err
}
binds.Add(id)
fields = append(fields, astObjectField{
kind: astObjectLocal,
hide: astObjectFieldVisible,
superSugar: false,
methodSugar: isMethod,
id: &id,
ids: params,
trailingComma: funcComma,
expr2: body,
})
case tokenAssert:
cond, err := p.parse(maxPrecedence)
if err != nil {
return nil, nil, err
}
var msg astNode
if p.peek().kind == tokenOperator && p.peek().data == ":" {
p.pop()
msg, err = p.parse(maxPrecedence)
if err != nil {
return nil, nil, err
}
}
fields = append(fields, astObjectField{
kind: astObjectAssert,
hide: astObjectFieldVisible,
expr2: cond,
expr3: msg,
})
default:
return nil, nil, makeUnexpectedError(next, "parsing field definition")
}
gotComma = false
}
}
/* parses for x in expr for y in expr if expr for z in expr ... */
func (p *parser) parseComprehensionSpecs(end tokenKind) (*astCompSpecs, *token, error) {
var specs astCompSpecs
for {
varID, err := p.popExpect(tokenIdentifier)
if err != nil {
return nil, nil, err
}
id := identifier(varID.data)
_, err = p.popExpect(tokenIn)
if err != nil {
return nil, nil, err
}
arr, err := p.parse(maxPrecedence)
if err != nil {
return nil, nil, err
}
specs = append(specs, astCompSpec{
kind: astCompFor,
varName: &id,
expr: arr,
})
maybeIf := p.pop()
for ; maybeIf.kind == tokenIf; maybeIf = p.pop() {
cond, err := p.parse(maxPrecedence)
if err != nil {
return nil, nil, err
}
specs = append(specs, astCompSpec{
kind: astCompIf,
varName: nil,
expr: cond,
})
}
if maybeIf.kind == end {
return &specs, maybeIf, nil
}
if maybeIf.kind != tokenFor {
return nil, nil, makeStaticError(
fmt.Sprintf("Expected for, if or %v after for clause, got: %v", end, maybeIf), maybeIf.loc)
}
}
}
// Assumes that the leading '[' has already been consumed and passed as tok.
// Should read up to and consume the trailing ']'
func (p *parser) parseArray(tok *token) (astNode, error) {
next := p.peek()
if next.kind == tokenBracketR {
p.pop()
return &astArray{
astNodeBase: astNodeBase{loc: locFromTokens(tok, next)},
}, nil
}
first, err := p.parse(maxPrecedence)
if err != nil {
return nil, err
}
var gotComma bool
next = p.peek()
if next.kind == tokenComma {
p.pop()
next = p.peek()
gotComma = true
}
if next.kind == tokenFor {
// It's a comprehension
p.pop()
specs, last, err := p.parseComprehensionSpecs(tokenBracketR)
if err != nil {
return nil, err
}
return &astArrayComp{
astNodeBase: astNodeBase{loc: locFromTokens(tok, last)},
body: first,
trailingComma: gotComma,
specs: *specs,
}, nil
}
// Not a comprehension: It can have more elements.
elements := astNodes{first}
for {
if next.kind == tokenBracketR {
// TODO(dcunnin): SYNTAX SUGAR HERE (preserve comma)
p.pop()
break
}
if !gotComma {
return nil, makeStaticError("Expected a comma before next array element.", next.loc)
}
nextElem, err := p.parse(maxPrecedence)
if err != nil {
return nil, err
}
elements = append(elements, nextElem)
next = p.peek()
if next.kind == tokenComma {
p.pop()
next = p.peek()
gotComma = true
} else {
gotComma = false
}
}
return &astArray{
astNodeBase: astNodeBase{loc: locFromTokens(tok, next)},
elements: elements,
trailingComma: gotComma,
}, nil
}
func (p *parser) parseTerminal() (astNode, error) {
tok := p.pop()
switch tok.kind {
case tokenAssert, tokenBraceR, tokenBracketR, tokenComma, tokenDot, tokenElse,
tokenError, tokenFor, tokenFunction, tokenIf, tokenIn, tokenImport, tokenImportStr,
tokenLocal, tokenOperator, tokenParenR, tokenSemicolon, tokenTailStrict, tokenThen:
return nil, makeUnexpectedError(tok, "parsing terminal")
case tokenEndOfFile:
return nil, makeStaticError("Unexpected end of file.", tok.loc)
case tokenBraceL:
obj, _, err := p.parseObjectRemainder(tok)
return obj, err
case tokenBracketL:
return p.parseArray(tok)
case tokenParenL:
inner, err := p.parse(maxPrecedence)
if err != nil {
return nil, err
}
_, err = p.popExpect(tokenParenR)
if err != nil {
return nil, err
}
return inner, nil
// Literals
case tokenNumber:
// This shouldn't fail as the lexer should make sure we have good input but
// we handle the error regardless.
num, err := strconv.ParseFloat(tok.data, 64)
if err != nil {
return nil, makeStaticError("Could not parse floating point number.", tok.loc)
}
return &astLiteralNumber{
astNodeBase: astNodeBase{loc: tok.loc},
value: num,
originalString: tok.data,
}, nil
case tokenStringSingle:
return &astLiteralString{
astNodeBase: astNodeBase{loc: tok.loc},
value: tok.data,
kind: astStringSingle,
}, nil
case tokenStringDouble:
return &astLiteralString{
astNodeBase: astNodeBase{loc: tok.loc},
value: tok.data,
kind: astStringDouble,
}, nil
case tokenStringBlock:
return &astLiteralString{
astNodeBase: astNodeBase{loc: tok.loc},
value: tok.data,
kind: astStringDouble,
blockIndent: tok.stringBlockIndent,
}, nil
case tokenFalse:
return &astLiteralBoolean{
astNodeBase: astNodeBase{loc: tok.loc},
value: false,
}, nil
case tokenTrue:
return &astLiteralBoolean{
astNodeBase: astNodeBase{loc: tok.loc},
value: true,
}, nil
case tokenNullLit:
return &astLiteralNull{
astNodeBase: astNodeBase{loc: tok.loc},
}, nil
// Variables
case tokenDollar:
return &astDollar{
astNodeBase: astNodeBase{loc: tok.loc},
}, nil
case tokenIdentifier:
return &astVar{
astNodeBase: astNodeBase{loc: tok.loc},
id: identifier(tok.data),
original: identifier(tok.data),
}, nil
case tokenSelf:
return &astSelf{
astNodeBase: astNodeBase{loc: tok.loc},
}, nil
case tokenSuper:
next := p.pop()
var index astNode
var id *identifier
switch next.kind {
case tokenDot:
fieldID, err := p.popExpect(tokenIdentifier)
if err != nil {
return nil, err
}
id = (*identifier)(&fieldID.data)
case tokenBracketL:
var err error
index, err = p.parse(maxPrecedence)
if err != nil {
return nil, err
}
_, err = p.popExpect(tokenBracketR)
if err != nil {
return nil, err
}
default:
return nil, makeStaticError("Expected . or [ after super.", tok.loc)
}
return &astSuperIndex{
astNodeBase: astNodeBase{loc: tok.loc},
index: index,
id: id,
}, nil
}
return nil, makeStaticError(fmt.Sprintf("INTERNAL ERROR: Unknown tok kind: %v", tok.kind), tok.loc)
}
func (p *parser) parse(prec precedence) (astNode, error) {
begin := p.peek()
@ -228,11 +817,131 @@ func (p *parser) parse(prec precedence) (astNode, error) {
body: body,
}, nil
}
return nil, makeStaticError(fmt.Sprintf("Expected ( but got %v", next), next.loc)
}
return nil, nil
case tokenImport:
p.pop()
body, err := p.parse(maxPrecedence)
if err != nil {
return nil, err
}
if lit, ok := body.(*astLiteralString); ok {
return &astImport{
astNodeBase: astNodeBase{locFromTokenAST(begin, body)},
file: lit.value,
}, nil
}
return nil, makeStaticError("Computed imports are not allowed", *body.Loc())
case tokenImportStr:
p.pop()
body, err := p.parse(maxPrecedence)
if err != nil {
return nil, err
}
if lit, ok := body.(*astLiteralString); ok {
return &astImportStr{
astNodeBase: astNodeBase{locFromTokenAST(begin, body)},
file: lit.value,
}, nil
}
return nil, makeStaticError("Computed imports are not allowed", *body.Loc())
case tokenLocal:
p.pop()
var binds astLocalBinds
for {
err := p.parseBind(&binds)
if err != nil {
return nil, err
}
delim := p.pop()
if delim.kind != tokenSemicolon && delim.kind != tokenComma {
return nil, makeStaticError(fmt.Sprintf("Expected , or ; but got %v", delim), delim.loc)
}
if delim.kind == tokenSemicolon {
break
}
}
body, err := p.parse(maxPrecedence)
if err != nil {
return nil, err
}
return &astLocal{
astNodeBase: astNodeBase{locFromTokenAST(begin, body)},
binds: binds,
body: body,
}, nil
default:
// Unary operator
if begin.kind == tokenOperator {
uop, ok := uopMap[begin.data]
if !ok {
return nil, makeStaticError(fmt.Sprintf("Not a unary operator: %v", begin.data), begin.loc)
}
if prec == unaryPrecedence {
op := p.pop()
expr, err := p.parse(prec)
if err != nil {
return nil, err
}
return &astUnary{
astNodeBase: astNodeBase{locFromTokenAST(op, expr)},
op: uop,
expr: expr,
}, nil
}
}
// Base case
if prec == 0 {
return p.parseTerminal()
}
lhs, err := p.parse(prec - 1)
if err != nil {
return nil, err
}
for {
// Then next token must be a binary operator.
var bop binaryOp
// Check precedence is correct for this level. If we're parsing operators
// with higher precedence, then return lhs and let lower levels deal with
// the operator.
switch p.peek().kind {
case tokenOperator:
if p.peek().data == ":" {
// Special case for the colons in assert. Since COLON is no-longer a
// special token, we have to make sure it does not trip the
// op_is_binary test below. It should terminate parsing of the
// expression here, returning control to the parsing of the actual
// assert AST.
return lhs, nil
}
var ok bool
bop, ok = bopMap[p.peek().data]
if !ok {
return nil, makeStaticError(fmt.Sprintf("Not a binary operator: %v", p.peek().data), p.peek().loc)
}
if bopPrecedence[bop] != prec {
return lhs, nil
}
case tokenDot, tokenBracketL, tokenParenL, tokenBraceL:
if applyPrecedence != prec {
return lhs, nil
}
default:
return lhs, nil
}
}
}
}
// ---------------------------------------------------------------------------

35
parser_test.go Normal file
View File

@ -0,0 +1,35 @@
/*
Copyright 2016 Google Inc. All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package jsonnet
import (
"fmt"
"testing"
"github.com/kr/pretty"
)
func TestParser(t *testing.T) {
tokens, err := lex("test", `{hello: "world"}`)
if err != nil {
t.Errorf("Unexpected lex error: %v", err)
}
ast, err := parse(tokens)
if err != nil {
t.Errorf("Unexpected parse error: %v", err)
}
fmt.Printf("%# v", pretty.Formatter(ast))
}