mirror of
https://github.com/google/go-jsonnet.git
synced 2025-12-20 16:41:06 +01:00
Move AST to its own package
This commit is contained in:
parent
c610dec2ef
commit
ad56a074aa
@ -14,7 +14,7 @@ See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
*/
|
||||
|
||||
package jsonnet
|
||||
package ast
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
@ -33,26 +33,40 @@ type Identifiers []Identifier
|
||||
type Node interface {
|
||||
Loc() *LocationRange
|
||||
FreeVariables() Identifiers
|
||||
setFreeVariables(Identifiers)
|
||||
SetFreeVariables(Identifiers)
|
||||
}
|
||||
type Nodes []Node
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
type nodeBase struct {
|
||||
type NodeBase struct {
|
||||
loc LocationRange
|
||||
freeVariables Identifiers
|
||||
}
|
||||
|
||||
func (n *nodeBase) Loc() *LocationRange {
|
||||
func NewNodeBase(loc LocationRange, freeVariables Identifiers) NodeBase {
|
||||
return NodeBase{
|
||||
loc: loc,
|
||||
freeVariables: freeVariables,
|
||||
}
|
||||
}
|
||||
|
||||
func NewNodeBaseLoc(loc LocationRange) NodeBase {
|
||||
return NodeBase{
|
||||
loc: loc,
|
||||
freeVariables: []Identifier{},
|
||||
}
|
||||
}
|
||||
|
||||
func (n *NodeBase) Loc() *LocationRange {
|
||||
return &n.loc
|
||||
}
|
||||
|
||||
func (n *nodeBase) FreeVariables() Identifiers {
|
||||
func (n *NodeBase) FreeVariables() Identifiers {
|
||||
return n.freeVariables
|
||||
}
|
||||
|
||||
func (n *nodeBase) setFreeVariables(idents Identifiers) {
|
||||
func (n *NodeBase) SetFreeVariables(idents Identifiers) {
|
||||
n.freeVariables = idents
|
||||
}
|
||||
|
||||
@ -79,7 +93,7 @@ type CompSpecs []CompSpec
|
||||
|
||||
// Apply represents a function call
|
||||
type Apply struct {
|
||||
nodeBase
|
||||
NodeBase
|
||||
Target Node
|
||||
Arguments Nodes
|
||||
TrailingComma bool
|
||||
@ -91,7 +105,7 @@ type Apply struct {
|
||||
|
||||
// ApplyBrace represents e { }. Desugared to e + { }.
|
||||
type ApplyBrace struct {
|
||||
nodeBase
|
||||
NodeBase
|
||||
Left Node
|
||||
Right Node
|
||||
}
|
||||
@ -100,7 +114,7 @@ type ApplyBrace struct {
|
||||
|
||||
// Array represents array constructors [1, 2, 3].
|
||||
type Array struct {
|
||||
nodeBase
|
||||
NodeBase
|
||||
Elements Nodes
|
||||
TrailingComma bool
|
||||
}
|
||||
@ -110,7 +124,7 @@ type Array struct {
|
||||
// ArrayComp represents array comprehensions (which are like Python list
|
||||
// comprehensions)
|
||||
type ArrayComp struct {
|
||||
nodeBase
|
||||
NodeBase
|
||||
Body Node
|
||||
TrailingComma bool
|
||||
Specs CompSpecs
|
||||
@ -123,7 +137,7 @@ type ArrayComp struct {
|
||||
// After parsing, message can be nil indicating that no message was
|
||||
// specified. This AST is elimiated by desugaring.
|
||||
type Assert struct {
|
||||
nodeBase
|
||||
NodeBase
|
||||
Cond Node
|
||||
Message Node
|
||||
Rest Node
|
||||
@ -187,7 +201,7 @@ var bopStrings = []string{
|
||||
BopOr: "||",
|
||||
}
|
||||
|
||||
var bopMap = map[string]BinaryOp{
|
||||
var BopMap = map[string]BinaryOp{
|
||||
"*": BopMult,
|
||||
"/": BopDiv,
|
||||
"%": BopPercent,
|
||||
@ -223,7 +237,7 @@ func (b BinaryOp) String() string {
|
||||
|
||||
// Binary represents binary operators.
|
||||
type Binary struct {
|
||||
nodeBase
|
||||
NodeBase
|
||||
Left Node
|
||||
Op BinaryOp
|
||||
Right Node
|
||||
@ -236,7 +250,7 @@ type Binary struct {
|
||||
// After parsing, branchFalse can be nil indicating that no else branch
|
||||
// was specified. The desugarer fills this in with a LiteralNull
|
||||
type Conditional struct {
|
||||
nodeBase
|
||||
NodeBase
|
||||
Cond Node
|
||||
BranchTrue Node
|
||||
BranchFalse Node
|
||||
@ -245,13 +259,13 @@ type Conditional struct {
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
// Dollar represents the $ keyword
|
||||
type Dollar struct{ nodeBase }
|
||||
type Dollar struct{ NodeBase }
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
// Error represents the error e.
|
||||
type Error struct {
|
||||
nodeBase
|
||||
NodeBase
|
||||
Expr Node
|
||||
}
|
||||
|
||||
@ -259,7 +273,7 @@ type Error struct {
|
||||
|
||||
// Function represents a function definition
|
||||
type Function struct {
|
||||
nodeBase
|
||||
NodeBase
|
||||
Parameters Identifiers // TODO(sbarzowski) support default arguments
|
||||
TrailingComma bool
|
||||
Body Node
|
||||
@ -269,7 +283,7 @@ type Function struct {
|
||||
|
||||
// Import represents import "file".
|
||||
type Import struct {
|
||||
nodeBase
|
||||
NodeBase
|
||||
File string
|
||||
}
|
||||
|
||||
@ -277,7 +291,7 @@ type Import struct {
|
||||
|
||||
// ImportStr represents importstr "file".
|
||||
type ImportStr struct {
|
||||
nodeBase
|
||||
NodeBase
|
||||
File string
|
||||
}
|
||||
|
||||
@ -288,14 +302,14 @@ type ImportStr struct {
|
||||
// One of index and id will be nil before desugaring. After desugaring id
|
||||
// will be nil.
|
||||
type Index struct {
|
||||
nodeBase
|
||||
NodeBase
|
||||
Target Node
|
||||
Index Node
|
||||
Id *Identifier
|
||||
}
|
||||
|
||||
type Slice struct {
|
||||
nodeBase
|
||||
NodeBase
|
||||
Target Node
|
||||
|
||||
// Each of these can be nil
|
||||
@ -318,7 +332,7 @@ type LocalBinds []LocalBind
|
||||
|
||||
// Local represents local x = e; e. After desugaring, functionSugar is false.
|
||||
type Local struct {
|
||||
nodeBase
|
||||
NodeBase
|
||||
Binds LocalBinds
|
||||
Body Node
|
||||
}
|
||||
@ -327,20 +341,20 @@ type Local struct {
|
||||
|
||||
// LiteralBoolean represents true and false
|
||||
type LiteralBoolean struct {
|
||||
nodeBase
|
||||
NodeBase
|
||||
Value bool
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
// LiteralNull represents the null keyword
|
||||
type LiteralNull struct{ nodeBase }
|
||||
type LiteralNull struct{ NodeBase }
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
// LiteralNumber represents a JSON number
|
||||
type LiteralNumber struct {
|
||||
nodeBase
|
||||
NodeBase
|
||||
Value float64
|
||||
OriginalString string
|
||||
}
|
||||
@ -360,7 +374,7 @@ const (
|
||||
|
||||
// LiteralString represents a JSON string
|
||||
type LiteralString struct {
|
||||
nodeBase
|
||||
NodeBase
|
||||
Value string
|
||||
Kind LiteralStringKind
|
||||
BlockIndent string
|
||||
@ -418,7 +432,7 @@ type ObjectFields []ObjectField
|
||||
// The trailing comma is only allowed if len(fields) > 0. Converted to
|
||||
// DesugaredObject during desugaring.
|
||||
type Object struct {
|
||||
nodeBase
|
||||
NodeBase
|
||||
Fields ObjectFields
|
||||
TrailingComma bool
|
||||
}
|
||||
@ -437,7 +451,7 @@ type DesugaredObjectFields []DesugaredObjectField
|
||||
//
|
||||
// The assertions either return true or raise an error.
|
||||
type DesugaredObject struct {
|
||||
nodeBase
|
||||
NodeBase
|
||||
Asserts Nodes
|
||||
Fields DesugaredObjectFields
|
||||
}
|
||||
@ -447,7 +461,7 @@ type DesugaredObject struct {
|
||||
// ObjectComp represents object comprehension
|
||||
// { [e]: e for x in e for.. if... }.
|
||||
type ObjectComp struct {
|
||||
nodeBase
|
||||
NodeBase
|
||||
Fields ObjectFields
|
||||
TrailingComma bool
|
||||
Specs CompSpecs
|
||||
@ -458,7 +472,7 @@ type ObjectComp struct {
|
||||
// ObjectComprehensionSimple represents post-desugaring object
|
||||
// comprehension { [e]: e for x in e }.
|
||||
type ObjectComprehensionSimple struct {
|
||||
nodeBase
|
||||
NodeBase
|
||||
Field Node
|
||||
Value Node
|
||||
Id Identifier
|
||||
@ -468,7 +482,7 @@ type ObjectComprehensionSimple struct {
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
// Self represents the self keyword.
|
||||
type Self struct{ nodeBase }
|
||||
type Self struct{ NodeBase }
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
@ -477,7 +491,7 @@ type Self struct{ nodeBase }
|
||||
// Either index or identifier will be set before desugaring. After desugaring, id will be
|
||||
// nil.
|
||||
type SuperIndex struct {
|
||||
nodeBase
|
||||
NodeBase
|
||||
Index Node
|
||||
Id *Identifier
|
||||
}
|
||||
@ -500,7 +514,7 @@ var uopStrings = []string{
|
||||
UopMinus: "-",
|
||||
}
|
||||
|
||||
var uopMap = map[string]UnaryOp{
|
||||
var UopMap = map[string]UnaryOp{
|
||||
"!": UopNot,
|
||||
"~": UopBitwiseNot,
|
||||
"+": UopPlus,
|
||||
@ -516,7 +530,7 @@ func (u UnaryOp) String() string {
|
||||
|
||||
// Unary represents unary operators.
|
||||
type Unary struct {
|
||||
nodeBase
|
||||
NodeBase
|
||||
Op UnaryOp
|
||||
Expr Node
|
||||
}
|
||||
@ -525,7 +539,7 @@ type Unary struct {
|
||||
|
||||
// Var represents variables.
|
||||
type Var struct {
|
||||
nodeBase
|
||||
NodeBase
|
||||
Id Identifier
|
||||
}
|
||||
|
||||
@ -2,7 +2,7 @@
|
||||
// TypeWriter: stringer
|
||||
// Directive: +gen on astCompKind
|
||||
|
||||
package jsonnet
|
||||
package ast
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
@ -2,7 +2,7 @@
|
||||
// TypeWriter: set
|
||||
// Directive: +gen on identifier
|
||||
|
||||
package jsonnet
|
||||
package ast
|
||||
|
||||
// Set is a modification of https://github.com/deckarep/golang-set
|
||||
// The MIT License (MIT)
|
||||
@ -2,7 +2,7 @@
|
||||
// TypeWriter: stringer
|
||||
// Directive: +gen on astLiteralStringKind
|
||||
|
||||
package jsonnet
|
||||
package ast
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
@ -14,7 +14,7 @@ See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
*/
|
||||
|
||||
package jsonnet
|
||||
package ast
|
||||
|
||||
import "fmt"
|
||||
|
||||
@ -71,10 +71,10 @@ func (lr *LocationRange) String() string {
|
||||
}
|
||||
|
||||
// This is useful for special locations, e.g. manifestation entry point.
|
||||
func makeLocationRangeMessage(msg string) LocationRange {
|
||||
func MakeLocationRangeMessage(msg string) LocationRange {
|
||||
return LocationRange{FileName: msg}
|
||||
}
|
||||
|
||||
func makeLocationRange(fn string, begin Location, end Location) LocationRange {
|
||||
func MakeLocationRange(fn string, begin Location, end Location) LocationRange {
|
||||
return LocationRange{FileName: fn, Begin: begin, End: end}
|
||||
}
|
||||
@ -2,7 +2,7 @@
|
||||
// TypeWriter: stringer
|
||||
// Directive: +gen on astObjectFieldHide
|
||||
|
||||
package jsonnet
|
||||
package ast
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
@ -2,7 +2,7 @@
|
||||
// TypeWriter: stringer
|
||||
// Directive: +gen on astObjectFieldKind
|
||||
|
||||
package jsonnet
|
||||
package ast
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
7
ast/util.go
Normal file
7
ast/util.go
Normal file
@ -0,0 +1,7 @@
|
||||
package ast
|
||||
|
||||
func (i *IdentifierSet) Append(idents Identifiers) {
|
||||
for _, ident := range idents {
|
||||
i.Add(ident)
|
||||
}
|
||||
}
|
||||
78
builtins.go
78
builtins.go
@ -16,6 +16,8 @@ limitations under the License.
|
||||
|
||||
package jsonnet
|
||||
|
||||
import "github.com/google/go-jsonnet/ast"
|
||||
|
||||
// TODO(sbarzowski) Is this the best option? It's the first one that worked for me...
|
||||
//go:generate esc -o std.go -pkg=jsonnet std/std.jsonnet
|
||||
|
||||
@ -250,13 +252,13 @@ type unaryBuiltin func(*evaluator, potentialValue) (value, error)
|
||||
type binaryBuiltin func(*evaluator, potentialValue, potentialValue) (value, error)
|
||||
|
||||
type UnaryBuiltin struct {
|
||||
name Identifier
|
||||
name ast.Identifier
|
||||
function unaryBuiltin
|
||||
parameters Identifiers
|
||||
parameters ast.Identifiers
|
||||
}
|
||||
|
||||
func getBuiltinEvaluator(e *evaluator, name Identifier) *evaluator {
|
||||
loc := makeLocationRangeMessage("<builtin>")
|
||||
func getBuiltinEvaluator(e *evaluator, name ast.Identifier) *evaluator {
|
||||
loc := ast.MakeLocationRangeMessage("<builtin>")
|
||||
context := TraceContext{Name: "builtin function <" + string(name) + ">"}
|
||||
trace := TraceElement{loc: &loc, context: &context}
|
||||
return &evaluator{i: e.i, trace: &trace}
|
||||
@ -268,14 +270,14 @@ func (b *UnaryBuiltin) EvalCall(args callArguments, e *evaluator) (value, error)
|
||||
return b.function(getBuiltinEvaluator(e, b.name), args.positional[0])
|
||||
}
|
||||
|
||||
func (b *UnaryBuiltin) Parameters() Identifiers {
|
||||
func (b *UnaryBuiltin) Parameters() ast.Identifiers {
|
||||
return b.parameters
|
||||
}
|
||||
|
||||
type BinaryBuiltin struct {
|
||||
name Identifier
|
||||
name ast.Identifier
|
||||
function binaryBuiltin
|
||||
parameters Identifiers
|
||||
parameters ast.Identifiers
|
||||
}
|
||||
|
||||
func (b *BinaryBuiltin) EvalCall(args callArguments, e *evaluator) (value, error) {
|
||||
@ -283,7 +285,7 @@ func (b *BinaryBuiltin) EvalCall(args callArguments, e *evaluator) (value, error
|
||||
return b.function(getBuiltinEvaluator(e, b.name), args.positional[0], args.positional[1])
|
||||
}
|
||||
|
||||
func (b *BinaryBuiltin) Parameters() Identifiers {
|
||||
func (b *BinaryBuiltin) Parameters() ast.Identifiers {
|
||||
return b.parameters
|
||||
}
|
||||
|
||||
@ -292,52 +294,52 @@ func todoFunc(e *evaluator, x, y potentialValue) (value, error) {
|
||||
}
|
||||
|
||||
// so that we don't get segfaults
|
||||
var todo = &BinaryBuiltin{function: todoFunc, parameters: Identifiers{"x", "y"}}
|
||||
var todo = &BinaryBuiltin{function: todoFunc, parameters: ast.Identifiers{"x", "y"}}
|
||||
|
||||
var desugaredBop = map[BinaryOp]Identifier{
|
||||
var desugaredBop = map[ast.BinaryOp]ast.Identifier{
|
||||
//bopPercent,
|
||||
BopManifestEqual: "equals",
|
||||
BopManifestUnequal: "notEquals", // Special case
|
||||
ast.BopManifestEqual: "equals",
|
||||
ast.BopManifestUnequal: "notEquals", // Special case
|
||||
}
|
||||
|
||||
var bopBuiltins = []*BinaryBuiltin{
|
||||
BopMult: todo,
|
||||
BopDiv: todo,
|
||||
BopPercent: todo,
|
||||
ast.BopMult: todo,
|
||||
ast.BopDiv: todo,
|
||||
ast.BopPercent: todo,
|
||||
|
||||
BopPlus: &BinaryBuiltin{name: "operator+", function: builtinPlus, parameters: Identifiers{"x", "y"}},
|
||||
BopMinus: &BinaryBuiltin{name: "operator-", function: builtinMinus, parameters: Identifiers{"x", "y"}},
|
||||
ast.BopPlus: &BinaryBuiltin{name: "operator+", function: builtinPlus, parameters: ast.Identifiers{"x", "y"}},
|
||||
ast.BopMinus: &BinaryBuiltin{name: "operator-", function: builtinMinus, parameters: ast.Identifiers{"x", "y"}},
|
||||
|
||||
BopShiftL: todo,
|
||||
BopShiftR: todo,
|
||||
ast.BopShiftL: todo,
|
||||
ast.BopShiftR: todo,
|
||||
|
||||
BopGreater: &BinaryBuiltin{name: "operator>", function: builtinGreater, parameters: Identifiers{"x", "y"}},
|
||||
BopGreaterEq: &BinaryBuiltin{name: "operator>=", function: builtinGreaterEq, parameters: Identifiers{"x", "y"}},
|
||||
BopLess: &BinaryBuiltin{name: "operator<,", function: builtinLess, parameters: Identifiers{"x", "y"}},
|
||||
BopLessEq: &BinaryBuiltin{name: "operator<=", function: builtinLessEq, parameters: Identifiers{"x", "y"}},
|
||||
ast.BopGreater: &BinaryBuiltin{name: "operator>", function: builtinGreater, parameters: ast.Identifiers{"x", "y"}},
|
||||
ast.BopGreaterEq: &BinaryBuiltin{name: "operator>=", function: builtinGreaterEq, parameters: ast.Identifiers{"x", "y"}},
|
||||
ast.BopLess: &BinaryBuiltin{name: "operator<,", function: builtinLess, parameters: ast.Identifiers{"x", "y"}},
|
||||
ast.BopLessEq: &BinaryBuiltin{name: "operator<=", function: builtinLessEq, parameters: ast.Identifiers{"x", "y"}},
|
||||
|
||||
BopManifestEqual: todo,
|
||||
BopManifestUnequal: todo,
|
||||
ast.BopManifestEqual: todo,
|
||||
ast.BopManifestUnequal: todo,
|
||||
|
||||
BopBitwiseAnd: todo,
|
||||
BopBitwiseXor: todo,
|
||||
BopBitwiseOr: todo,
|
||||
ast.BopBitwiseAnd: todo,
|
||||
ast.BopBitwiseXor: todo,
|
||||
ast.BopBitwiseOr: todo,
|
||||
|
||||
BopAnd: &BinaryBuiltin{name: "operator&&", function: builtinAnd, parameters: Identifiers{"x", "y"}},
|
||||
BopOr: todo,
|
||||
ast.BopAnd: &BinaryBuiltin{name: "operator&&", function: builtinAnd, parameters: ast.Identifiers{"x", "y"}},
|
||||
ast.BopOr: todo,
|
||||
}
|
||||
|
||||
var uopBuiltins = []*UnaryBuiltin{
|
||||
UopNot: &UnaryBuiltin{name: "operator!", function: builtinNegation, parameters: Identifiers{"x"}},
|
||||
UopBitwiseNot: &UnaryBuiltin{name: "operator~", function: builtinBitNeg, parameters: Identifiers{"x"}},
|
||||
UopPlus: &UnaryBuiltin{name: "operator+ (unary)", function: builtinIdentity, parameters: Identifiers{"x"}},
|
||||
UopMinus: &UnaryBuiltin{name: "operator- (unary)", function: builtinUnaryMinus, parameters: Identifiers{"x"}},
|
||||
ast.UopNot: &UnaryBuiltin{name: "operator!", function: builtinNegation, parameters: ast.Identifiers{"x"}},
|
||||
ast.UopBitwiseNot: &UnaryBuiltin{name: "operator~", function: builtinBitNeg, parameters: ast.Identifiers{"x"}},
|
||||
ast.UopPlus: &UnaryBuiltin{name: "operator+ (unary)", function: builtinIdentity, parameters: ast.Identifiers{"x"}},
|
||||
ast.UopMinus: &UnaryBuiltin{name: "operator- (unary)", function: builtinUnaryMinus, parameters: ast.Identifiers{"x"}},
|
||||
}
|
||||
|
||||
// TODO(sbarzowski) eliminate duplication in function names (e.g. build map from array or constants)
|
||||
var funcBuiltins = map[string]evalCallable{
|
||||
"length": &UnaryBuiltin{name: "length", function: builtinLength, parameters: Identifiers{"x"}},
|
||||
"makeArray": &BinaryBuiltin{name: "makeArray", function: builtinMakeArray, parameters: Identifiers{"sz", "func"}},
|
||||
"primitiveEquals": &BinaryBuiltin{name: "primitiveEquals", function: primitiveEquals, parameters: Identifiers{"sz", "func"}},
|
||||
"type": &UnaryBuiltin{name: "type", function: builtinType, parameters: Identifiers{"x"}},
|
||||
"length": &UnaryBuiltin{name: "length", function: builtinLength, parameters: ast.Identifiers{"x"}},
|
||||
"makeArray": &BinaryBuiltin{name: "makeArray", function: builtinMakeArray, parameters: ast.Identifiers{"sz", "func"}},
|
||||
"primitiveEquals": &BinaryBuiltin{name: "primitiveEquals", function: primitiveEquals, parameters: ast.Identifiers{"sz", "func"}},
|
||||
"type": &UnaryBuiltin{name: "type", function: builtinType, parameters: ast.Identifiers{"x"}},
|
||||
}
|
||||
|
||||
286
desugarer.go
286
desugarer.go
@ -22,13 +22,15 @@ import (
|
||||
"fmt"
|
||||
"reflect"
|
||||
"unicode/utf8"
|
||||
|
||||
"github.com/google/go-jsonnet/ast"
|
||||
)
|
||||
|
||||
func makeStr(s string) *LiteralString {
|
||||
return &LiteralString{nodeBase{loc: LocationRange{}}, s, StringDouble, ""}
|
||||
func makeStr(s string) *ast.LiteralString {
|
||||
return &ast.LiteralString{ast.NodeBase{}, s, ast.StringDouble, ""}
|
||||
}
|
||||
|
||||
func stringUnescape(loc *LocationRange, s string) (string, error) {
|
||||
func stringUnescape(loc *ast.LocationRange, s string) (string, error) {
|
||||
var buf bytes.Buffer
|
||||
// read one rune at a time
|
||||
for i := 0; i < len(s); {
|
||||
@ -82,7 +84,7 @@ func stringUnescape(loc *LocationRange, s string) (string, error) {
|
||||
return buf.String(), nil
|
||||
}
|
||||
|
||||
func desugarFields(location LocationRange, fields *ObjectFields, objLevel int) error {
|
||||
func desugarFields(location ast.LocationRange, fields *ast.ObjectFields, objLevel int) error {
|
||||
|
||||
// Desugar children
|
||||
for i := range *fields {
|
||||
@ -108,7 +110,7 @@ func desugarFields(location LocationRange, fields *ObjectFields, objLevel int) e
|
||||
// Simplify asserts
|
||||
// TODO(dcunnin): this
|
||||
for _, field := range *fields {
|
||||
if field.Kind != ObjectAssert {
|
||||
if field.Kind != ast.ObjectAssert {
|
||||
continue
|
||||
}
|
||||
/*
|
||||
@ -116,15 +118,15 @@ func desugarFields(location LocationRange, fields *ObjectFields, objLevel int) e
|
||||
field.expr3 = nil
|
||||
if (msg == nil) {
|
||||
auto msg_str = U"Object assertion failed."
|
||||
msg = alloc->make<LiteralString>(field.expr2->location, msg_str,
|
||||
LiteralString::DOUBLE, "")
|
||||
msg = alloc->make<ast.LiteralString>(field.expr2->location, msg_str,
|
||||
ast.LiteralString::DOUBLE, "")
|
||||
}
|
||||
|
||||
// if expr2 then true else error msg
|
||||
field.expr2 = alloc->make<Conditional>(
|
||||
field.expr2 = alloc->make<ast.Conditional>(
|
||||
ast->location,
|
||||
field.expr2,
|
||||
alloc->make<LiteralBoolean>(E, true),
|
||||
alloc->make<ast.LiteralBoolean>(E, true),
|
||||
alloc->make<Error>(msg->location, msg))
|
||||
*/
|
||||
}
|
||||
@ -137,9 +139,9 @@ func desugarFields(location LocationRange, fields *ObjectFields, objLevel int) e
|
||||
continue
|
||||
}
|
||||
origBody := field.Expr2
|
||||
function := &Function{
|
||||
function := &ast.Function{
|
||||
// TODO(sbarzowski) better location
|
||||
nodeBase: nodeBase{loc: *origBody.Loc()},
|
||||
NodeBase: ast.NewNodeBaseLoc(*origBody.Loc()),
|
||||
Parameters: field.Ids,
|
||||
Body: origBody,
|
||||
}
|
||||
@ -149,20 +151,20 @@ func desugarFields(location LocationRange, fields *ObjectFields, objLevel int) e
|
||||
}
|
||||
|
||||
// Remove object-level locals
|
||||
newFields := []ObjectField{}
|
||||
var binds LocalBinds
|
||||
newFields := []ast.ObjectField{}
|
||||
var binds ast.LocalBinds
|
||||
for _, local := range *fields {
|
||||
if local.Kind != ObjectLocal {
|
||||
if local.Kind != ast.ObjectLocal {
|
||||
continue
|
||||
}
|
||||
binds = append(binds, LocalBind{Variable: *local.Id, Body: local.Expr2})
|
||||
binds = append(binds, ast.LocalBind{Variable: *local.Id, Body: local.Expr2})
|
||||
}
|
||||
for _, field := range *fields {
|
||||
if field.Kind == ObjectLocal {
|
||||
if field.Kind == ast.ObjectLocal {
|
||||
continue
|
||||
}
|
||||
if len(binds) > 0 {
|
||||
field.Expr2 = &Local{nodeBase{loc: *field.Expr2.Loc()}, binds, field.Expr2}
|
||||
field.Expr2 = &ast.Local{ast.NewNodeBaseLoc(*field.Expr2.Loc()), binds, field.Expr2}
|
||||
}
|
||||
newFields = append(newFields, field)
|
||||
}
|
||||
@ -172,21 +174,21 @@ func desugarFields(location LocationRange, fields *ObjectFields, objLevel int) e
|
||||
for i := range *fields {
|
||||
field := &(*fields)[i]
|
||||
switch field.Kind {
|
||||
case ObjectAssert:
|
||||
case ast.ObjectAssert:
|
||||
// Nothing to do.
|
||||
|
||||
case ObjectFieldID:
|
||||
case ast.ObjectFieldID:
|
||||
field.Expr1 = makeStr(string(*field.Id))
|
||||
field.Kind = ObjectFieldExpr
|
||||
field.Kind = ast.ObjectFieldExpr
|
||||
|
||||
case ObjectFieldExpr:
|
||||
case ast.ObjectFieldExpr:
|
||||
// Nothing to do.
|
||||
|
||||
case ObjectFieldStr:
|
||||
case ast.ObjectFieldStr:
|
||||
// Just set the flag.
|
||||
field.Kind = ObjectFieldExpr
|
||||
field.Kind = ast.ObjectFieldExpr
|
||||
|
||||
case ObjectLocal:
|
||||
case ast.ObjectLocal:
|
||||
return fmt.Errorf("INTERNAL ERROR: Locals should be removed by now")
|
||||
}
|
||||
}
|
||||
@ -199,7 +201,7 @@ func desugarFields(location LocationRange, fields *ObjectFields, objLevel int) e
|
||||
}
|
||||
/*
|
||||
AST *super_f = alloc->make<SuperIndex>(field.expr1->location, field.expr1, nil)
|
||||
field.expr2 = alloc->make<Binary>(ast->location, super_f, BOP_PLUS, field.expr2)
|
||||
field.expr2 = alloc->make<ast.Binary>(ast->location, super_f, BOP_PLUS, field.expr2)
|
||||
field.superSugar = false
|
||||
*/
|
||||
}
|
||||
@ -207,35 +209,35 @@ func desugarFields(location LocationRange, fields *ObjectFields, objLevel int) e
|
||||
return nil
|
||||
}
|
||||
|
||||
func desugarArrayComp(astComp *ArrayComp, objLevel int) (Node, error) {
|
||||
return &LiteralNull{}, nil
|
||||
func desugarArrayComp(astComp *ast.ArrayComp, objLevel int) (ast.Node, error) {
|
||||
return &ast.LiteralNull{}, nil
|
||||
// TODO(sbarzowski) this
|
||||
switch astComp.Specs[0].Kind {
|
||||
case CompFor:
|
||||
case ast.CompFor:
|
||||
panic("TODO")
|
||||
case CompIf:
|
||||
case ast.CompIf:
|
||||
panic("TODO")
|
||||
default:
|
||||
panic("TODO")
|
||||
}
|
||||
}
|
||||
|
||||
func desugarObjectComp(astComp *ObjectComp, objLevel int) (Node, error) {
|
||||
return &LiteralNull{}, nil
|
||||
func desugarObjectComp(astComp *ast.ObjectComp, objLevel int) (ast.Node, error) {
|
||||
return &ast.LiteralNull{}, nil
|
||||
// TODO(sbarzowski) this
|
||||
}
|
||||
|
||||
func buildSimpleIndex(obj Node, member Identifier) Node {
|
||||
return &Index{
|
||||
func buildSimpleIndex(obj ast.Node, member ast.Identifier) ast.Node {
|
||||
return &ast.Index{
|
||||
Target: obj,
|
||||
Id: &member,
|
||||
}
|
||||
}
|
||||
|
||||
func buildStdCall(builtinName Identifier, args ...Node) Node {
|
||||
std := &Var{Id: "std"}
|
||||
func buildStdCall(builtinName ast.Identifier, args ...ast.Node) ast.Node {
|
||||
std := &ast.Var{Id: "std"}
|
||||
builtin := buildSimpleIndex(std, builtinName)
|
||||
return &Apply{
|
||||
return &ast.Apply{
|
||||
Target: builtin,
|
||||
Arguments: args,
|
||||
}
|
||||
@ -249,267 +251,267 @@ func buildStdCall(builtinName Identifier, args ...Node) Node {
|
||||
// variables used in user code.
|
||||
// TODO(sbarzowski) Actually we may want to do some static analysis before desugaring, e.g.
|
||||
// warning user about dangerous use of constructs that we desugar.
|
||||
func desugar(astPtr *Node, objLevel int) (err error) {
|
||||
ast := *astPtr
|
||||
func desugar(astPtr *ast.Node, objLevel int) (err error) {
|
||||
node := *astPtr
|
||||
|
||||
if ast == nil {
|
||||
if node == nil {
|
||||
return
|
||||
}
|
||||
|
||||
switch ast := ast.(type) {
|
||||
case *Apply:
|
||||
desugar(&ast.Target, objLevel)
|
||||
for i := range ast.Arguments {
|
||||
err = desugar(&ast.Arguments[i], objLevel)
|
||||
switch node := node.(type) {
|
||||
case *ast.Apply:
|
||||
desugar(&node.Target, objLevel)
|
||||
for i := range node.Arguments {
|
||||
err = desugar(&node.Arguments[i], objLevel)
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
case *ApplyBrace:
|
||||
err = desugar(&ast.Left, objLevel)
|
||||
case *ast.ApplyBrace:
|
||||
err = desugar(&node.Left, objLevel)
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
err = desugar(&ast.Right, objLevel)
|
||||
err = desugar(&node.Right, objLevel)
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
*astPtr = &Binary{
|
||||
nodeBase: ast.nodeBase,
|
||||
Left: ast.Left,
|
||||
Op: BopPlus,
|
||||
Right: ast.Right,
|
||||
*astPtr = &ast.Binary{
|
||||
NodeBase: node.NodeBase,
|
||||
Left: node.Left,
|
||||
Op: ast.BopPlus,
|
||||
Right: node.Right,
|
||||
}
|
||||
|
||||
case *Array:
|
||||
for i := range ast.Elements {
|
||||
err = desugar(&ast.Elements[i], objLevel)
|
||||
case *ast.Array:
|
||||
for i := range node.Elements {
|
||||
err = desugar(&node.Elements[i], objLevel)
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
case *ArrayComp:
|
||||
comp, err := desugarArrayComp(ast, objLevel)
|
||||
case *ast.ArrayComp:
|
||||
comp, err := desugarArrayComp(node, objLevel)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
*astPtr = comp
|
||||
|
||||
case *Assert:
|
||||
case *ast.Assert:
|
||||
// TODO(sbarzowski) this
|
||||
*astPtr = &LiteralNull{}
|
||||
*astPtr = &ast.LiteralNull{}
|
||||
|
||||
case *Binary:
|
||||
case *ast.Binary:
|
||||
// some operators get replaced by stdlib functions
|
||||
if funcname, replaced := desugaredBop[ast.Op]; replaced {
|
||||
if funcname, replaced := desugaredBop[node.Op]; replaced {
|
||||
if funcname == "notEquals" {
|
||||
// TODO(sbarzowski) maybe we can handle it in more regular way
|
||||
// but let's be consistent with the spec
|
||||
*astPtr = &Unary{
|
||||
Op: UopNot,
|
||||
Expr: buildStdCall(desugaredBop[BopManifestEqual], ast.Left, ast.Right),
|
||||
*astPtr = &ast.Unary{
|
||||
Op: ast.UopNot,
|
||||
Expr: buildStdCall(desugaredBop[ast.BopManifestEqual], node.Left, node.Right),
|
||||
}
|
||||
} else {
|
||||
*astPtr = buildStdCall(funcname, ast.Left, ast.Right)
|
||||
*astPtr = buildStdCall(funcname, node.Left, node.Right)
|
||||
}
|
||||
return desugar(astPtr, objLevel)
|
||||
}
|
||||
|
||||
err = desugar(&ast.Left, objLevel)
|
||||
err = desugar(&node.Left, objLevel)
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
err = desugar(&ast.Right, objLevel)
|
||||
err = desugar(&node.Right, objLevel)
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
// TODO(dcunnin): Need to handle bopPercent, bopManifestUnequal, bopManifestEqual
|
||||
|
||||
case *Conditional:
|
||||
err = desugar(&ast.Cond, objLevel)
|
||||
case *ast.Conditional:
|
||||
err = desugar(&node.Cond, objLevel)
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
err = desugar(&ast.BranchTrue, objLevel)
|
||||
err = desugar(&node.BranchTrue, objLevel)
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
if ast.BranchFalse == nil {
|
||||
ast.BranchFalse = &LiteralNull{}
|
||||
if node.BranchFalse == nil {
|
||||
node.BranchFalse = &ast.LiteralNull{}
|
||||
}
|
||||
err = desugar(&ast.BranchFalse, objLevel)
|
||||
err = desugar(&node.BranchFalse, objLevel)
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
|
||||
case *Dollar:
|
||||
case *ast.Dollar:
|
||||
if objLevel == 0 {
|
||||
return makeStaticError("No top-level object found.", *ast.Loc())
|
||||
return makeStaticError("No top-level object found.", *node.Loc())
|
||||
}
|
||||
*astPtr = &Var{nodeBase: ast.nodeBase, Id: Identifier("$")}
|
||||
*astPtr = &ast.Var{NodeBase: node.NodeBase, Id: ast.Identifier("$")}
|
||||
|
||||
case *Error:
|
||||
err = desugar(&ast.Expr, objLevel)
|
||||
case *ast.Error:
|
||||
err = desugar(&node.Expr, objLevel)
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
|
||||
case *Function:
|
||||
err = desugar(&ast.Body, objLevel)
|
||||
case *ast.Function:
|
||||
err = desugar(&node.Body, objLevel)
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
|
||||
case *Import:
|
||||
case *ast.Import:
|
||||
// Nothing to do.
|
||||
|
||||
case *ImportStr:
|
||||
case *ast.ImportStr:
|
||||
// Nothing to do.
|
||||
|
||||
case *Index:
|
||||
err = desugar(&ast.Target, objLevel)
|
||||
case *ast.Index:
|
||||
err = desugar(&node.Target, objLevel)
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
if ast.Id != nil {
|
||||
if ast.Index != nil {
|
||||
if node.Id != nil {
|
||||
if node.Index != nil {
|
||||
panic("TODO")
|
||||
}
|
||||
ast.Index = makeStr(string(*ast.Id))
|
||||
ast.Id = nil
|
||||
node.Index = makeStr(string(*node.Id))
|
||||
node.Id = nil
|
||||
}
|
||||
err = desugar(&ast.Index, objLevel)
|
||||
err = desugar(&node.Index, objLevel)
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
|
||||
case *Slice:
|
||||
if ast.BeginIndex == nil {
|
||||
ast.BeginIndex = &LiteralNull{}
|
||||
case *ast.Slice:
|
||||
if node.BeginIndex == nil {
|
||||
node.BeginIndex = &ast.LiteralNull{}
|
||||
}
|
||||
if ast.EndIndex == nil {
|
||||
ast.EndIndex = &LiteralNull{}
|
||||
if node.EndIndex == nil {
|
||||
node.EndIndex = &ast.LiteralNull{}
|
||||
}
|
||||
if ast.Step == nil {
|
||||
ast.Step = &LiteralNull{}
|
||||
if node.Step == nil {
|
||||
node.Step = &ast.LiteralNull{}
|
||||
}
|
||||
*astPtr = buildStdCall("std.slice", ast.Target, ast.BeginIndex, ast.EndIndex, ast.Step)
|
||||
*astPtr = buildStdCall("std.slice", node.Target, node.BeginIndex, node.EndIndex, node.Step)
|
||||
desugar(astPtr, objLevel)
|
||||
|
||||
case *Local:
|
||||
for i := range ast.Binds {
|
||||
if ast.Binds[i].FunctionSugar {
|
||||
origBody := ast.Binds[i].Body
|
||||
function := &Function{
|
||||
case *ast.Local:
|
||||
for i := range node.Binds {
|
||||
if node.Binds[i].FunctionSugar {
|
||||
origBody := node.Binds[i].Body
|
||||
function := &ast.Function{
|
||||
// TODO(sbarzowski) better location
|
||||
nodeBase: nodeBase{loc: *origBody.Loc()},
|
||||
Parameters: ast.Binds[i].Params,
|
||||
NodeBase: ast.NewNodeBaseLoc(*origBody.Loc()),
|
||||
Parameters: node.Binds[i].Params,
|
||||
Body: origBody,
|
||||
}
|
||||
ast.Binds[i] = LocalBind{
|
||||
Variable: ast.Binds[i].Variable,
|
||||
node.Binds[i] = ast.LocalBind{
|
||||
Variable: node.Binds[i].Variable,
|
||||
Body: function,
|
||||
FunctionSugar: false,
|
||||
Params: nil,
|
||||
}
|
||||
}
|
||||
err = desugar(&ast.Binds[i].Body, objLevel)
|
||||
err = desugar(&node.Binds[i].Body, objLevel)
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
}
|
||||
err = desugar(&ast.Body, objLevel)
|
||||
err = desugar(&node.Body, objLevel)
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
|
||||
case *LiteralBoolean:
|
||||
case *ast.LiteralBoolean:
|
||||
// Nothing to do.
|
||||
|
||||
case *LiteralNull:
|
||||
case *ast.LiteralNull:
|
||||
// Nothing to do.
|
||||
|
||||
case *LiteralNumber:
|
||||
case *ast.LiteralNumber:
|
||||
// Nothing to do.
|
||||
|
||||
case *LiteralString:
|
||||
if ast.Kind != VerbatimStringDouble && ast.Kind != VerbatimStringSingle {
|
||||
unescaped, err := stringUnescape(ast.Loc(), ast.Value)
|
||||
case *ast.LiteralString:
|
||||
if node.Kind != ast.VerbatimStringDouble && node.Kind != ast.VerbatimStringSingle {
|
||||
unescaped, err := stringUnescape(node.Loc(), node.Value)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
ast.Value = unescaped
|
||||
ast.Kind = StringDouble
|
||||
ast.BlockIndent = ""
|
||||
node.Value = unescaped
|
||||
node.Kind = ast.StringDouble
|
||||
node.BlockIndent = ""
|
||||
}
|
||||
case *Object:
|
||||
case *ast.Object:
|
||||
// Hidden variable to allow $ binding.
|
||||
if objLevel == 0 {
|
||||
dollar := Identifier("$")
|
||||
ast.Fields = append(ast.Fields, ObjectFieldLocalNoMethod(&dollar, &Self{}))
|
||||
dollar := ast.Identifier("$")
|
||||
node.Fields = append(node.Fields, ast.ObjectFieldLocalNoMethod(&dollar, &ast.Self{}))
|
||||
}
|
||||
|
||||
err = desugarFields(*ast.Loc(), &ast.Fields, objLevel)
|
||||
err = desugarFields(*node.Loc(), &node.Fields, objLevel)
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
|
||||
var newFields DesugaredObjectFields
|
||||
var newAsserts Nodes
|
||||
var newFields ast.DesugaredObjectFields
|
||||
var newAsserts ast.Nodes
|
||||
|
||||
for _, field := range ast.Fields {
|
||||
if field.Kind == ObjectAssert {
|
||||
for _, field := range node.Fields {
|
||||
if field.Kind == ast.ObjectAssert {
|
||||
newAsserts = append(newAsserts, field.Expr2)
|
||||
} else if field.Kind == ObjectFieldExpr {
|
||||
newFields = append(newFields, DesugaredObjectField{field.Hide, field.Expr1, field.Expr2})
|
||||
} else if field.Kind == ast.ObjectFieldExpr {
|
||||
newFields = append(newFields, ast.DesugaredObjectField{field.Hide, field.Expr1, field.Expr2})
|
||||
} else {
|
||||
panic(fmt.Sprintf("INTERNAL ERROR: field should have been desugared: %s", field.Kind))
|
||||
}
|
||||
}
|
||||
|
||||
*astPtr = &DesugaredObject{ast.nodeBase, newAsserts, newFields}
|
||||
*astPtr = &ast.DesugaredObject{node.NodeBase, newAsserts, newFields}
|
||||
|
||||
case *DesugaredObject:
|
||||
case *ast.DesugaredObject:
|
||||
panic("Desugaring desugared object")
|
||||
|
||||
case *ObjectComp:
|
||||
comp, err := desugarObjectComp(ast, objLevel)
|
||||
case *ast.ObjectComp:
|
||||
comp, err := desugarObjectComp(node, objLevel)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
*astPtr = comp
|
||||
|
||||
case *ObjectComprehensionSimple:
|
||||
case *ast.ObjectComprehensionSimple:
|
||||
panic("Desugaring desugared object comprehension")
|
||||
|
||||
case *Self:
|
||||
case *ast.Self:
|
||||
// Nothing to do.
|
||||
|
||||
case *SuperIndex:
|
||||
if ast.Id != nil {
|
||||
ast.Index = &LiteralString{Value: string(*ast.Id)}
|
||||
ast.Id = nil
|
||||
case *ast.SuperIndex:
|
||||
if node.Id != nil {
|
||||
node.Index = &ast.LiteralString{Value: string(*node.Id)}
|
||||
node.Id = nil
|
||||
}
|
||||
|
||||
case *Unary:
|
||||
err = desugar(&ast.Expr, objLevel)
|
||||
case *ast.Unary:
|
||||
err = desugar(&node.Expr, objLevel)
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
|
||||
case *Var:
|
||||
case *ast.Var:
|
||||
// Nothing to do.
|
||||
|
||||
default:
|
||||
panic(fmt.Sprintf("Desugarer does not recognize ast: %s", reflect.TypeOf(ast)))
|
||||
panic(fmt.Sprintf("Desugarer does not recognize ast: %s", reflect.TypeOf(node)))
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func desugarFile(ast *Node) error {
|
||||
func desugarFile(ast *ast.Node) error {
|
||||
err := desugar(ast, 0)
|
||||
if err != nil {
|
||||
return err
|
||||
|
||||
@ -18,6 +18,8 @@ package jsonnet
|
||||
import (
|
||||
"bytes"
|
||||
"fmt"
|
||||
|
||||
"github.com/google/go-jsonnet/ast"
|
||||
)
|
||||
|
||||
type ErrorFormatter struct {
|
||||
@ -72,5 +74,5 @@ func (ef *ErrorFormatter) buildStackTrace(frames []TraceFrame) string {
|
||||
type SourceProvider interface {
|
||||
// TODO(sbarzowski) problem: locationRange.FileName may not necessarily
|
||||
// uniquely identify a file. But this is the interface we want to have here.
|
||||
getCode(LocationRange) string
|
||||
getCode(ast.LocationRange) string
|
||||
}
|
||||
|
||||
12
evaluator.go
12
evaluator.go
@ -16,7 +16,11 @@ limitations under the License.
|
||||
|
||||
package jsonnet
|
||||
|
||||
import "fmt"
|
||||
import (
|
||||
"fmt"
|
||||
|
||||
"github.com/google/go-jsonnet/ast"
|
||||
)
|
||||
|
||||
// evaluator is a convenience wrapper for interpreter
|
||||
// Most importantly it keeps the context for traces and handles details
|
||||
@ -140,15 +144,15 @@ func (e *evaluator) evaluateObject(pv potentialValue) (valueObject, error) {
|
||||
return e.getObject(v)
|
||||
}
|
||||
|
||||
func (e *evaluator) evalInCurrentContext(a Node) (value, error) {
|
||||
func (e *evaluator) evalInCurrentContext(a ast.Node) (value, error) {
|
||||
return e.i.evaluate(a, e.trace.context)
|
||||
}
|
||||
|
||||
func (e *evaluator) evalInCleanEnv(newContext *TraceContext, env *environment, ast Node) (value, error) {
|
||||
func (e *evaluator) evalInCleanEnv(newContext *TraceContext, env *environment, ast ast.Node) (value, error) {
|
||||
return e.i.EvalInCleanEnv(e.trace, newContext, env, ast)
|
||||
}
|
||||
|
||||
func (e *evaluator) lookUpVar(ident Identifier) potentialValue {
|
||||
func (e *evaluator) lookUpVar(ident ast.Identifier) potentialValue {
|
||||
th := e.i.stack.lookUpVar(ident)
|
||||
if th == nil {
|
||||
panic(fmt.Sprintf("RUNTIME: Unknown variable: %v (we should have caught this statically)", ident))
|
||||
|
||||
@ -83,7 +83,7 @@ func (cache *ImportCache) ImportCode(codeDir, importedPath string, e *evaluator)
|
||||
return nil, cached.data.err
|
||||
}
|
||||
if cached.asCode == nil {
|
||||
ast, err := snippetToAST(cached.data.foundHere, cached.data.content)
|
||||
node, err := snippetToAST(cached.data.foundHere, cached.data.content)
|
||||
if err != nil {
|
||||
// TODO(sbarzowski) perhaps we should wrap (static) error here
|
||||
// within a RuntimeError? Because whether we get this error or not
|
||||
@ -100,7 +100,7 @@ func (cache *ImportCache) ImportCode(codeDir, importedPath string, e *evaluator)
|
||||
// The same thinking applies to external variables.
|
||||
cached.asCode = makeErrorThunk(err)
|
||||
} else {
|
||||
cached.asCode = makeThunk("import", e.i.initialEnv, ast)
|
||||
cached.asCode = makeThunk("import", e.i.initialEnv, node)
|
||||
}
|
||||
}
|
||||
return e.evaluate(cached.asCode)
|
||||
|
||||
@ -23,6 +23,8 @@ import (
|
||||
"path"
|
||||
"reflect"
|
||||
"sort"
|
||||
|
||||
"github.com/google/go-jsonnet/ast"
|
||||
)
|
||||
|
||||
// TODO(sbarzowski) use it as a pointer in most places b/c it can sometimes be shared
|
||||
@ -166,7 +168,7 @@ func (s *callStack) getSelfBinding() selfBinding {
|
||||
}
|
||||
|
||||
// lookUpVar finds for the closest variable in scope that matches the given name.
|
||||
func (s *callStack) lookUpVar(id Identifier) potentialValue {
|
||||
func (s *callStack) lookUpVar(id ast.Identifier) potentialValue {
|
||||
for i := len(s.stack) - 1; i >= 0; i-- {
|
||||
bind := s.stack[i].env.upValues[id]
|
||||
if bind != nil {
|
||||
@ -192,17 +194,17 @@ func makeCallStack(limit int) callStack {
|
||||
|
||||
// Keeps current execution context and evaluates things
|
||||
type interpreter struct {
|
||||
stack callStack // TODO what is it?
|
||||
idArrayElement Identifier // TODO what is it?
|
||||
idInvariant Identifier // TODO what is it?
|
||||
externalVars vmExtMap // TODO what is it?
|
||||
stack callStack // TODO what is it?
|
||||
idArrayElement ast.Identifier // TODO what is it?
|
||||
idInvariant ast.Identifier // TODO what is it?
|
||||
externalVars vmExtMap // TODO what is it?
|
||||
|
||||
initialEnv environment
|
||||
importCache *ImportCache
|
||||
}
|
||||
|
||||
// Build a binding frame containing specified variables.
|
||||
func (i *interpreter) capture(freeVars Identifiers) bindingFrame {
|
||||
func (i *interpreter) capture(freeVars ast.Identifiers) bindingFrame {
|
||||
env := make(bindingFrame)
|
||||
for _, fv := range freeVars {
|
||||
env[fv] = i.stack.lookUpVar(fv)
|
||||
@ -227,14 +229,14 @@ func addBindings(a, b bindingFrame) bindingFrame {
|
||||
return result
|
||||
}
|
||||
|
||||
func (i *interpreter) getCurrentEnv(ast Node) environment {
|
||||
func (i *interpreter) getCurrentEnv(ast ast.Node) environment {
|
||||
return makeEnvironment(
|
||||
i.capture(ast.FreeVariables()),
|
||||
i.stack.getSelfBinding(),
|
||||
)
|
||||
}
|
||||
|
||||
func (i *interpreter) evaluate(a Node, context *TraceContext) (value, error) {
|
||||
func (i *interpreter) evaluate(a ast.Node, context *TraceContext) (value, error) {
|
||||
// TODO(dcunnin): All the other cases...
|
||||
|
||||
e := &evaluator{
|
||||
@ -246,7 +248,7 @@ func (i *interpreter) evaluate(a Node, context *TraceContext) (value, error) {
|
||||
}
|
||||
|
||||
switch ast := a.(type) {
|
||||
case *Array:
|
||||
case *ast.Array:
|
||||
sb := i.stack.getSelfBinding()
|
||||
var elements []potentialValue
|
||||
for _, el := range ast.Elements {
|
||||
@ -256,7 +258,7 @@ func (i *interpreter) evaluate(a Node, context *TraceContext) (value, error) {
|
||||
}
|
||||
return makeValueArray(elements), nil
|
||||
|
||||
case *Binary:
|
||||
case *ast.Binary:
|
||||
// Some binary operators are lazy, so thunks are needed in general
|
||||
env := i.getCurrentEnv(ast)
|
||||
// TODO(sbarzowski) make sure it displays nicely in stack trace (thunk names etc.)
|
||||
@ -274,7 +276,7 @@ func (i *interpreter) evaluate(a Node, context *TraceContext) (value, error) {
|
||||
}
|
||||
return result, nil
|
||||
|
||||
case *Unary:
|
||||
case *ast.Unary:
|
||||
env := i.getCurrentEnv(ast)
|
||||
arg := makeThunk("x", env, ast.Expr)
|
||||
|
||||
@ -286,7 +288,7 @@ func (i *interpreter) evaluate(a Node, context *TraceContext) (value, error) {
|
||||
}
|
||||
return result, nil
|
||||
|
||||
case *Conditional:
|
||||
case *ast.Conditional:
|
||||
cond, err := e.evalInCurrentContext(ast.Cond)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
@ -300,7 +302,7 @@ func (i *interpreter) evaluate(a Node, context *TraceContext) (value, error) {
|
||||
}
|
||||
return e.evalInCurrentContext(ast.BranchFalse)
|
||||
|
||||
case *DesugaredObject:
|
||||
case *ast.DesugaredObject:
|
||||
// Evaluate all the field names. Check for null, dups, etc.
|
||||
fields := make(valueSimpleObjectFieldMap)
|
||||
for _, field := range ast.Fields {
|
||||
@ -327,7 +329,7 @@ func (i *interpreter) evaluate(a Node, context *TraceContext) (value, error) {
|
||||
upValues := i.capture(ast.FreeVariables())
|
||||
return makeValueSimpleObject(upValues, fields, ast.Asserts), nil
|
||||
|
||||
case *Error:
|
||||
case *ast.Error:
|
||||
msgVal, err := e.evalInCurrentContext(ast.Expr)
|
||||
if err != nil {
|
||||
// error when evaluating error message
|
||||
@ -339,7 +341,7 @@ func (i *interpreter) evaluate(a Node, context *TraceContext) (value, error) {
|
||||
}
|
||||
return nil, e.Error(msg.value)
|
||||
|
||||
case *Index:
|
||||
case *ast.Index:
|
||||
targetValue, err := e.evalInCurrentContext(ast.Target)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
@ -360,29 +362,29 @@ func (i *interpreter) evaluate(a Node, context *TraceContext) (value, error) {
|
||||
|
||||
return nil, e.Error(fmt.Sprintf("Value non indexable: %v", reflect.TypeOf(targetValue)))
|
||||
|
||||
case *Import:
|
||||
case *ast.Import:
|
||||
// TODO(sbarzowski) put this information in AST instead of getting it out of tracing data...
|
||||
codeDir := path.Dir(e.trace.loc.FileName)
|
||||
return i.importCache.ImportCode(codeDir, ast.File, e)
|
||||
|
||||
case *ImportStr:
|
||||
case *ast.ImportStr:
|
||||
// TODO(sbarzowski) put this information in AST instead of getting it out of tracing data...
|
||||
codeDir := path.Dir(e.trace.loc.FileName)
|
||||
return i.importCache.ImportString(codeDir, ast.File)
|
||||
|
||||
case *LiteralBoolean:
|
||||
case *ast.LiteralBoolean:
|
||||
return makeValueBoolean(ast.Value), nil
|
||||
|
||||
case *LiteralNull:
|
||||
case *ast.LiteralNull:
|
||||
return makeValueNull(), nil
|
||||
|
||||
case *LiteralNumber:
|
||||
case *ast.LiteralNumber:
|
||||
return makeValueNumber(ast.Value), nil
|
||||
|
||||
case *LiteralString:
|
||||
case *ast.LiteralString:
|
||||
return makeValueString(ast.Value), nil
|
||||
|
||||
case *Local:
|
||||
case *ast.Local:
|
||||
vars := make(bindingFrame)
|
||||
bindEnv := i.getCurrentEnv(a)
|
||||
for _, bind := range ast.Binds {
|
||||
@ -399,14 +401,14 @@ func (i *interpreter) evaluate(a Node, context *TraceContext) (value, error) {
|
||||
i.stack.pop()
|
||||
return v, err
|
||||
|
||||
case *Self:
|
||||
case *ast.Self:
|
||||
sb := i.stack.getSelfBinding()
|
||||
return sb.self, nil
|
||||
|
||||
case *Var:
|
||||
case *ast.Var:
|
||||
return e.evaluate(e.lookUpVar(ast.Id))
|
||||
|
||||
case *SuperIndex:
|
||||
case *ast.SuperIndex:
|
||||
index, err := e.evalInCurrentContext(ast.Index)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
@ -417,12 +419,12 @@ func (i *interpreter) evaluate(a Node, context *TraceContext) (value, error) {
|
||||
}
|
||||
return superIndex(e, i.stack.getSelfBinding(), indexStr.value)
|
||||
|
||||
case *Function:
|
||||
case *ast.Function:
|
||||
return &valueFunction{
|
||||
ec: makeClosure(i.getCurrentEnv(a), ast),
|
||||
}, nil
|
||||
|
||||
case *Apply:
|
||||
case *ast.Apply:
|
||||
// Eval target
|
||||
target, err := e.evalInCurrentContext(ast.Target)
|
||||
if err != nil {
|
||||
@ -625,7 +627,7 @@ func (i *interpreter) manifestJSON(trace *TraceElement, v value, multiline bool,
|
||||
}
|
||||
|
||||
func (i *interpreter) EvalInCleanEnv(fromWhere *TraceElement, newContext *TraceContext,
|
||||
env *environment, ast Node) (value, error) {
|
||||
env *environment, ast ast.Node) (value, error) {
|
||||
err := i.newCall(fromWhere, *env)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
@ -648,7 +650,7 @@ func buildStdObject(i *interpreter) (value, error) {
|
||||
}
|
||||
|
||||
for name, value := range builtinFields {
|
||||
obj.fields[name] = valueSimpleObjectField{ObjectFieldHidden, value}
|
||||
obj.fields[name] = valueSimpleObjectField{ast.ObjectFieldHidden, value}
|
||||
}
|
||||
return obj, nil
|
||||
}
|
||||
@ -658,21 +660,21 @@ func evaluateStd(i *interpreter) (value, error) {
|
||||
bindingFrame{},
|
||||
makeUnboundSelfBinding(),
|
||||
)
|
||||
evalLoc := makeLocationRangeMessage("During evaluation of std")
|
||||
evalLoc := ast.MakeLocationRangeMessage("During evaluation of std")
|
||||
evalTrace := &TraceElement{loc: &evalLoc}
|
||||
ast, err := snippetToAST("std.jsonnet", getStdCode())
|
||||
node, err := snippetToAST("std.jsonnet", getStdCode())
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
context := TraceContext{Name: "<stdlib>"}
|
||||
return i.EvalInCleanEnv(evalTrace, &context, &beforeStdEnv, ast)
|
||||
return i.EvalInCleanEnv(evalTrace, &context, &beforeStdEnv, node)
|
||||
}
|
||||
|
||||
func buildInterpreter(ext vmExtMap, maxStack int, importer Importer) (*interpreter, error) {
|
||||
i := interpreter{
|
||||
stack: makeCallStack(maxStack),
|
||||
idArrayElement: Identifier("array_element"),
|
||||
idInvariant: Identifier("object_assert"),
|
||||
idArrayElement: ast.Identifier("array_element"),
|
||||
idInvariant: ast.Identifier("object_assert"),
|
||||
externalVars: ext,
|
||||
|
||||
importCache: MakeImportCache(importer),
|
||||
@ -692,22 +694,22 @@ func buildInterpreter(ext vmExtMap, maxStack int, importer Importer) (*interpret
|
||||
return &i, nil
|
||||
}
|
||||
|
||||
func evaluate(ast Node, ext vmExtMap, maxStack int, importer Importer) (string, error) {
|
||||
func evaluate(node ast.Node, ext vmExtMap, maxStack int, importer Importer) (string, error) {
|
||||
i, err := buildInterpreter(ext, maxStack, importer)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
evalLoc := makeLocationRangeMessage("During evaluation")
|
||||
evalLoc := ast.MakeLocationRangeMessage("During evaluation")
|
||||
evalTrace := &TraceElement{
|
||||
loc: &evalLoc,
|
||||
}
|
||||
context := TraceContext{Name: "<main>"}
|
||||
result, err := i.EvalInCleanEnv(evalTrace, &context, &i.initialEnv, ast)
|
||||
result, err := i.EvalInCleanEnv(evalTrace, &context, &i.initialEnv, node)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
var buffer bytes.Buffer
|
||||
manifestationLoc := makeLocationRangeMessage("During manifestation")
|
||||
manifestationLoc := ast.MakeLocationRangeMessage("During manifestation")
|
||||
manifestationTrace := &TraceElement{
|
||||
loc: &manifestationLoc,
|
||||
}
|
||||
|
||||
18
lexer.go
18
lexer.go
@ -22,6 +22,8 @@ import (
|
||||
"strconv"
|
||||
"strings"
|
||||
"unicode/utf8"
|
||||
|
||||
"github.com/google/go-jsonnet/ast"
|
||||
)
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
@ -160,7 +162,7 @@ type token struct {
|
||||
stringBlockIndent string // The sequence of whitespace that indented the block.
|
||||
stringBlockTermIndent string // This is always fewer whitespace characters than in stringBlockIndent.
|
||||
|
||||
loc LocationRange
|
||||
loc ast.LocationRange
|
||||
}
|
||||
|
||||
type tokens []token
|
||||
@ -252,7 +254,7 @@ type lexer struct {
|
||||
// Information about the token we are working on right now
|
||||
fodder fodder
|
||||
tokenStart int
|
||||
tokenStartLoc Location
|
||||
tokenStartLoc ast.Location
|
||||
}
|
||||
|
||||
const lexEOF = -1
|
||||
@ -263,7 +265,7 @@ func makeLexer(fn string, input string) *lexer {
|
||||
input: input,
|
||||
pos: position{byteNo: 0, lineNo: 1, lineStart: 0},
|
||||
prev: position{byteNo: lexEOF, lineNo: 0, lineStart: 0},
|
||||
tokenStartLoc: Location{Line: 1, Column: 1},
|
||||
tokenStartLoc: ast.Location{Line: 1, Column: 1},
|
||||
}
|
||||
}
|
||||
|
||||
@ -305,15 +307,15 @@ func (l *lexer) backup() {
|
||||
l.prev = position{byteNo: lexEOF}
|
||||
}
|
||||
|
||||
func locationFromPosition(pos position) Location {
|
||||
return Location{Line: pos.lineNo, Column: pos.byteNo - pos.lineStart + 1}
|
||||
func locationFromPosition(pos position) ast.Location {
|
||||
return ast.Location{Line: pos.lineNo, Column: pos.byteNo - pos.lineStart + 1}
|
||||
}
|
||||
|
||||
func (l *lexer) location() Location {
|
||||
func (l *lexer) location() ast.Location {
|
||||
return locationFromPosition(l.pos)
|
||||
}
|
||||
|
||||
func (l *lexer) prevLocation() Location {
|
||||
func (l *lexer) prevLocation() ast.Location {
|
||||
if l.prev.byteNo == lexEOF {
|
||||
panic("prevLocation called with no valid previous rune")
|
||||
}
|
||||
@ -335,7 +337,7 @@ func (l *lexer) emitFullToken(kind tokenKind, data, stringBlockIndent, stringBlo
|
||||
data: data,
|
||||
stringBlockIndent: stringBlockIndent,
|
||||
stringBlockTermIndent: stringBlockTermIndent,
|
||||
loc: makeLocationRange(l.fileName, l.tokenStartLoc, l.location()),
|
||||
loc: ast.MakeLocationRange(l.fileName, l.tokenStartLoc, l.location()),
|
||||
})
|
||||
l.fodder = fodder{}
|
||||
}
|
||||
|
||||
358
parser.go
358
parser.go
@ -19,35 +19,37 @@ package jsonnet
|
||||
import (
|
||||
"fmt"
|
||||
"strconv"
|
||||
|
||||
"github.com/google/go-jsonnet/ast"
|
||||
)
|
||||
|
||||
type precedence int
|
||||
|
||||
const (
|
||||
applyPrecedence precedence = 2 // Function calls and indexing.
|
||||
applyPrecedence precedence = 2 // ast.Function calls and indexing.
|
||||
unaryPrecedence precedence = 4 // Logical and bitwise negation, unary + -
|
||||
maxPrecedence precedence = 16 // Local, If, Import, Function, Error
|
||||
maxPrecedence precedence = 16 // ast.Local, If, ast.Import, ast.Function, Error
|
||||
)
|
||||
|
||||
var bopPrecedence = map[BinaryOp]precedence{
|
||||
BopMult: 5,
|
||||
BopDiv: 5,
|
||||
BopPercent: 5,
|
||||
BopPlus: 6,
|
||||
BopMinus: 6,
|
||||
BopShiftL: 7,
|
||||
BopShiftR: 7,
|
||||
BopGreater: 8,
|
||||
BopGreaterEq: 8,
|
||||
BopLess: 8,
|
||||
BopLessEq: 8,
|
||||
BopManifestEqual: 9,
|
||||
BopManifestUnequal: 9,
|
||||
BopBitwiseAnd: 10,
|
||||
BopBitwiseXor: 11,
|
||||
BopBitwiseOr: 12,
|
||||
BopAnd: 13,
|
||||
BopOr: 14,
|
||||
var bopPrecedence = map[ast.BinaryOp]precedence{
|
||||
ast.BopMult: 5,
|
||||
ast.BopDiv: 5,
|
||||
ast.BopPercent: 5,
|
||||
ast.BopPlus: 6,
|
||||
ast.BopMinus: 6,
|
||||
ast.BopShiftL: 7,
|
||||
ast.BopShiftR: 7,
|
||||
ast.BopGreater: 8,
|
||||
ast.BopGreaterEq: 8,
|
||||
ast.BopLess: 8,
|
||||
ast.BopLessEq: 8,
|
||||
ast.BopManifestEqual: 9,
|
||||
ast.BopManifestUnequal: 9,
|
||||
ast.BopBitwiseAnd: 10,
|
||||
ast.BopBitwiseXor: 11,
|
||||
ast.BopBitwiseOr: 12,
|
||||
ast.BopAnd: 13,
|
||||
ast.BopOr: 14,
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
@ -57,12 +59,12 @@ func makeUnexpectedError(t *token, while string) error {
|
||||
fmt.Sprintf("Unexpected: %v while %v", t, while), t.loc)
|
||||
}
|
||||
|
||||
func locFromTokens(begin, end *token) LocationRange {
|
||||
return makeLocationRange(begin.loc.FileName, begin.loc.Begin, end.loc.End)
|
||||
func locFromTokens(begin, end *token) ast.LocationRange {
|
||||
return ast.MakeLocationRange(begin.loc.FileName, begin.loc.Begin, end.loc.End)
|
||||
}
|
||||
|
||||
func locFromTokenAST(begin *token, end Node) LocationRange {
|
||||
return makeLocationRange(begin.loc.FileName, begin.loc.Begin, end.Loc().End)
|
||||
func locFromTokenAST(begin *token, end ast.Node) ast.LocationRange {
|
||||
return ast.MakeLocationRange(begin.loc.FileName, begin.loc.Begin, end.Loc().End)
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
@ -112,24 +114,24 @@ func (p *parser) peek() *token {
|
||||
return &p.t[p.currT]
|
||||
}
|
||||
|
||||
func (p *parser) parseIdentifierList(elementKind string) (Identifiers, bool, error) {
|
||||
func (p *parser) parseIdentifierList(elementKind string) (ast.Identifiers, bool, error) {
|
||||
_, exprs, gotComma, err := p.parseCommaList(tokenParenR, elementKind)
|
||||
if err != nil {
|
||||
return Identifiers{}, false, err
|
||||
return ast.Identifiers{}, false, err
|
||||
}
|
||||
var ids Identifiers
|
||||
var ids ast.Identifiers
|
||||
for _, n := range exprs {
|
||||
v, ok := n.(*Var)
|
||||
v, ok := n.(*ast.Var)
|
||||
if !ok {
|
||||
return Identifiers{}, false, makeStaticError(fmt.Sprintf("Expected simple identifier but got a complex expression."), *n.Loc())
|
||||
return ast.Identifiers{}, false, makeStaticError(fmt.Sprintf("Expected simple identifier but got a complex expression."), *n.Loc())
|
||||
}
|
||||
ids = append(ids, v.Id)
|
||||
}
|
||||
return ids, gotComma, nil
|
||||
}
|
||||
|
||||
func (p *parser) parseCommaList(end tokenKind, elementKind string) (*token, Nodes, bool, error) {
|
||||
var exprs Nodes
|
||||
func (p *parser) parseCommaList(end tokenKind, elementKind string) (*token, ast.Nodes, bool, error) {
|
||||
var exprs ast.Nodes
|
||||
gotComma := false
|
||||
first := true
|
||||
for {
|
||||
@ -160,13 +162,13 @@ func (p *parser) parseCommaList(end tokenKind, elementKind string) (*token, Node
|
||||
}
|
||||
}
|
||||
|
||||
func (p *parser) parseBind(binds *LocalBinds) error {
|
||||
func (p *parser) parseBind(binds *ast.LocalBinds) error {
|
||||
varID, err := p.popExpect(tokenIdentifier)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
for _, b := range *binds {
|
||||
if b.Variable == Identifier(varID.data) {
|
||||
if b.Variable == ast.Identifier(varID.data) {
|
||||
return makeStaticError(fmt.Sprintf("Duplicate local var: %v", varID.data), varID.loc)
|
||||
}
|
||||
}
|
||||
@ -185,8 +187,8 @@ func (p *parser) parseBind(binds *LocalBinds) error {
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
*binds = append(*binds, LocalBind{
|
||||
Variable: Identifier(varID.data),
|
||||
*binds = append(*binds, ast.LocalBind{
|
||||
Variable: ast.Identifier(varID.data),
|
||||
Body: body,
|
||||
FunctionSugar: true,
|
||||
Params: params,
|
||||
@ -201,8 +203,8 @@ func (p *parser) parseBind(binds *LocalBinds) error {
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
*binds = append(*binds, LocalBind{
|
||||
Variable: Identifier(varID.data),
|
||||
*binds = append(*binds, ast.LocalBind{
|
||||
Variable: ast.Identifier(varID.data),
|
||||
Body: body,
|
||||
})
|
||||
}
|
||||
@ -210,7 +212,7 @@ func (p *parser) parseBind(binds *LocalBinds) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (p *parser) parseObjectAssignmentOp() (plusSugar bool, hide ObjectFieldHide, err error) {
|
||||
func (p *parser) parseObjectAssignmentOp() (plusSugar bool, hide ast.ObjectFieldHide, err error) {
|
||||
op, err := p.popExpect(tokenOperator)
|
||||
if err != nil {
|
||||
return
|
||||
@ -234,11 +236,11 @@ func (p *parser) parseObjectAssignmentOp() (plusSugar bool, hide ObjectFieldHide
|
||||
|
||||
switch numColons {
|
||||
case 1:
|
||||
hide = ObjectFieldInherit
|
||||
hide = ast.ObjectFieldInherit
|
||||
case 2:
|
||||
hide = ObjectFieldHidden
|
||||
hide = ast.ObjectFieldHidden
|
||||
case 3:
|
||||
hide = ObjectFieldVisible
|
||||
hide = ast.ObjectFieldVisible
|
||||
default:
|
||||
err = makeStaticError(
|
||||
fmt.Sprintf("Expected one of :, ::, :::, +:, +::, +:::, got: %v", op.data), op.loc)
|
||||
@ -251,10 +253,10 @@ func (p *parser) parseObjectAssignmentOp() (plusSugar bool, hide ObjectFieldHide
|
||||
// +gen set
|
||||
type literalField string
|
||||
|
||||
func (p *parser) parseObjectRemainder(tok *token) (Node, *token, error) {
|
||||
var fields ObjectFields
|
||||
func (p *parser) parseObjectRemainder(tok *token) (ast.Node, *token, error) {
|
||||
var fields ast.ObjectFields
|
||||
literalFields := make(literalFieldSet)
|
||||
binds := make(IdentifierSet)
|
||||
binds := make(ast.IdentifierSet)
|
||||
|
||||
gotComma := false
|
||||
first := true
|
||||
@ -269,8 +271,8 @@ func (p *parser) parseObjectRemainder(tok *token) (Node, *token, error) {
|
||||
}
|
||||
|
||||
if next.kind == tokenBraceR {
|
||||
return &Object{
|
||||
nodeBase: nodeBase{loc: locFromTokens(tok, next)},
|
||||
return &ast.Object{
|
||||
NodeBase: ast.NewNodeBaseLoc(locFromTokens(tok, next)),
|
||||
Fields: fields,
|
||||
TrailingComma: gotComma,
|
||||
}, next, nil
|
||||
@ -280,12 +282,12 @@ func (p *parser) parseObjectRemainder(tok *token) (Node, *token, error) {
|
||||
// It's a comprehension
|
||||
numFields := 0
|
||||
numAsserts := 0
|
||||
var field ObjectField
|
||||
var field ast.ObjectField
|
||||
for _, f := range fields {
|
||||
if f.Kind == ObjectLocal {
|
||||
if f.Kind == ast.ObjectLocal {
|
||||
continue
|
||||
}
|
||||
if f.Kind == ObjectAssert {
|
||||
if f.Kind == ast.ObjectAssert {
|
||||
numAsserts++
|
||||
continue
|
||||
}
|
||||
@ -299,18 +301,18 @@ func (p *parser) parseObjectRemainder(tok *token) (Node, *token, error) {
|
||||
if numFields != 1 {
|
||||
return nil, nil, makeStaticError("Object comprehension can only have one field.", next.loc)
|
||||
}
|
||||
if field.Hide != ObjectFieldInherit {
|
||||
if field.Hide != ast.ObjectFieldInherit {
|
||||
return nil, nil, makeStaticError("Object comprehensions cannot have hidden fields.", next.loc)
|
||||
}
|
||||
if field.Kind != ObjectFieldExpr {
|
||||
if field.Kind != ast.ObjectFieldExpr {
|
||||
return nil, nil, makeStaticError("Object comprehensions can only have [e] fields.", next.loc)
|
||||
}
|
||||
specs, last, err := p.parseComprehensionSpecs(tokenBraceR)
|
||||
if err != nil {
|
||||
return nil, nil, err
|
||||
}
|
||||
return &ObjectComp{
|
||||
nodeBase: nodeBase{loc: locFromTokens(tok, last)},
|
||||
return &ast.ObjectComp{
|
||||
NodeBase: ast.NewNodeBaseLoc(locFromTokens(tok, last)),
|
||||
Fields: fields,
|
||||
TrailingComma: gotComma,
|
||||
Specs: *specs,
|
||||
@ -324,39 +326,39 @@ func (p *parser) parseObjectRemainder(tok *token) (Node, *token, error) {
|
||||
|
||||
switch next.kind {
|
||||
case tokenBracketL, tokenIdentifier, tokenStringDouble, tokenStringSingle, tokenStringBlock:
|
||||
var kind ObjectFieldKind
|
||||
var expr1 Node
|
||||
var id *Identifier
|
||||
var kind ast.ObjectFieldKind
|
||||
var expr1 ast.Node
|
||||
var id *ast.Identifier
|
||||
switch next.kind {
|
||||
case tokenIdentifier:
|
||||
kind = ObjectFieldID
|
||||
id = (*Identifier)(&next.data)
|
||||
kind = ast.ObjectFieldID
|
||||
id = (*ast.Identifier)(&next.data)
|
||||
case tokenStringDouble:
|
||||
kind = ObjectFieldStr
|
||||
expr1 = &LiteralString{
|
||||
nodeBase: nodeBase{loc: next.loc},
|
||||
kind = ast.ObjectFieldStr
|
||||
expr1 = &ast.LiteralString{
|
||||
NodeBase: ast.NewNodeBaseLoc(next.loc),
|
||||
Value: next.data,
|
||||
Kind: StringDouble,
|
||||
Kind: ast.StringDouble,
|
||||
}
|
||||
case tokenStringSingle:
|
||||
kind = ObjectFieldStr
|
||||
expr1 = &LiteralString{
|
||||
nodeBase: nodeBase{loc: next.loc},
|
||||
kind = ast.ObjectFieldStr
|
||||
expr1 = &ast.LiteralString{
|
||||
NodeBase: ast.NewNodeBaseLoc(next.loc),
|
||||
Value: next.data,
|
||||
Kind: StringSingle,
|
||||
Kind: ast.StringSingle,
|
||||
}
|
||||
case tokenStringBlock:
|
||||
kind = ObjectFieldStr
|
||||
expr1 = &LiteralString{
|
||||
nodeBase: nodeBase{loc: next.loc},
|
||||
kind = ast.ObjectFieldStr
|
||||
expr1 = &ast.LiteralString{
|
||||
NodeBase: ast.NewNodeBaseLoc(next.loc),
|
||||
Value: next.data,
|
||||
Kind: StringBlock,
|
||||
Kind: ast.StringBlock,
|
||||
BlockIndent: next.stringBlockIndent,
|
||||
}
|
||||
// TODO(sbarzowski) are verbatim string literals allowed here?
|
||||
// if so, maybe it's time we extracted string literal creation somewhere...
|
||||
default:
|
||||
kind = ObjectFieldExpr
|
||||
kind = ast.ObjectFieldExpr
|
||||
var err error
|
||||
expr1, err = p.parse(maxPrecedence)
|
||||
if err != nil {
|
||||
@ -370,7 +372,7 @@ func (p *parser) parseObjectRemainder(tok *token) (Node, *token, error) {
|
||||
|
||||
isMethod := false
|
||||
methComma := false
|
||||
var params Identifiers
|
||||
var params ast.Identifiers
|
||||
if p.peek().kind == tokenParenL {
|
||||
p.pop()
|
||||
var err error
|
||||
@ -391,7 +393,7 @@ func (p *parser) parseObjectRemainder(tok *token) (Node, *token, error) {
|
||||
fmt.Sprintf("Cannot use +: syntax sugar in a method: %v", next.data), next.loc)
|
||||
}
|
||||
|
||||
if kind != ObjectFieldExpr {
|
||||
if kind != ast.ObjectFieldExpr {
|
||||
if !literalFields.Add(literalField(next.data)) {
|
||||
return nil, nil, makeStaticError(
|
||||
fmt.Sprintf("Duplicate field: %v", next.data), next.loc)
|
||||
@ -403,7 +405,7 @@ func (p *parser) parseObjectRemainder(tok *token) (Node, *token, error) {
|
||||
return nil, nil, err
|
||||
}
|
||||
|
||||
fields = append(fields, ObjectField{
|
||||
fields = append(fields, ast.ObjectField{
|
||||
Kind: kind,
|
||||
Hide: hide,
|
||||
SuperSugar: plusSugar,
|
||||
@ -421,7 +423,7 @@ func (p *parser) parseObjectRemainder(tok *token) (Node, *token, error) {
|
||||
return nil, nil, err
|
||||
}
|
||||
|
||||
id := Identifier(varID.data)
|
||||
id := ast.Identifier(varID.data)
|
||||
|
||||
if binds.Contains(id) {
|
||||
return nil, nil, makeStaticError(fmt.Sprintf("Duplicate local var: %v", id), varID.loc)
|
||||
@ -429,7 +431,7 @@ func (p *parser) parseObjectRemainder(tok *token) (Node, *token, error) {
|
||||
|
||||
isMethod := false
|
||||
funcComma := false
|
||||
var params Identifiers
|
||||
var params ast.Identifiers
|
||||
if p.peek().kind == tokenParenL {
|
||||
p.pop()
|
||||
isMethod = true
|
||||
@ -450,9 +452,9 @@ func (p *parser) parseObjectRemainder(tok *token) (Node, *token, error) {
|
||||
|
||||
binds.Add(id)
|
||||
|
||||
fields = append(fields, ObjectField{
|
||||
Kind: ObjectLocal,
|
||||
Hide: ObjectFieldVisible,
|
||||
fields = append(fields, ast.ObjectField{
|
||||
Kind: ast.ObjectLocal,
|
||||
Hide: ast.ObjectFieldVisible,
|
||||
SuperSugar: false,
|
||||
MethodSugar: isMethod,
|
||||
Id: &id,
|
||||
@ -466,7 +468,7 @@ func (p *parser) parseObjectRemainder(tok *token) (Node, *token, error) {
|
||||
if err != nil {
|
||||
return nil, nil, err
|
||||
}
|
||||
var msg Node
|
||||
var msg ast.Node
|
||||
if p.peek().kind == tokenOperator && p.peek().data == ":" {
|
||||
p.pop()
|
||||
msg, err = p.parse(maxPrecedence)
|
||||
@ -475,9 +477,9 @@ func (p *parser) parseObjectRemainder(tok *token) (Node, *token, error) {
|
||||
}
|
||||
}
|
||||
|
||||
fields = append(fields, ObjectField{
|
||||
Kind: ObjectAssert,
|
||||
Hide: ObjectFieldVisible,
|
||||
fields = append(fields, ast.ObjectField{
|
||||
Kind: ast.ObjectAssert,
|
||||
Hide: ast.ObjectFieldVisible,
|
||||
Expr2: cond,
|
||||
Expr3: msg,
|
||||
})
|
||||
@ -489,14 +491,14 @@ func (p *parser) parseObjectRemainder(tok *token) (Node, *token, error) {
|
||||
}
|
||||
|
||||
/* parses for x in expr for y in expr if expr for z in expr ... */
|
||||
func (p *parser) parseComprehensionSpecs(end tokenKind) (*CompSpecs, *token, error) {
|
||||
var specs CompSpecs
|
||||
func (p *parser) parseComprehensionSpecs(end tokenKind) (*ast.CompSpecs, *token, error) {
|
||||
var specs ast.CompSpecs
|
||||
for {
|
||||
varID, err := p.popExpect(tokenIdentifier)
|
||||
if err != nil {
|
||||
return nil, nil, err
|
||||
}
|
||||
id := Identifier(varID.data)
|
||||
id := ast.Identifier(varID.data)
|
||||
_, err = p.popExpect(tokenIn)
|
||||
if err != nil {
|
||||
return nil, nil, err
|
||||
@ -505,8 +507,8 @@ func (p *parser) parseComprehensionSpecs(end tokenKind) (*CompSpecs, *token, err
|
||||
if err != nil {
|
||||
return nil, nil, err
|
||||
}
|
||||
specs = append(specs, CompSpec{
|
||||
Kind: CompFor,
|
||||
specs = append(specs, ast.CompSpec{
|
||||
Kind: ast.CompFor,
|
||||
VarName: &id,
|
||||
Expr: arr,
|
||||
})
|
||||
@ -517,8 +519,8 @@ func (p *parser) parseComprehensionSpecs(end tokenKind) (*CompSpecs, *token, err
|
||||
if err != nil {
|
||||
return nil, nil, err
|
||||
}
|
||||
specs = append(specs, CompSpec{
|
||||
Kind: CompIf,
|
||||
specs = append(specs, ast.CompSpec{
|
||||
Kind: ast.CompIf,
|
||||
VarName: nil,
|
||||
Expr: cond,
|
||||
})
|
||||
@ -537,12 +539,12 @@ func (p *parser) parseComprehensionSpecs(end tokenKind) (*CompSpecs, *token, err
|
||||
|
||||
// Assumes that the leading '[' has already been consumed and passed as tok.
|
||||
// Should read up to and consume the trailing ']'
|
||||
func (p *parser) parseArray(tok *token) (Node, error) {
|
||||
func (p *parser) parseArray(tok *token) (ast.Node, error) {
|
||||
next := p.peek()
|
||||
if next.kind == tokenBracketR {
|
||||
p.pop()
|
||||
return &Array{
|
||||
nodeBase: nodeBase{loc: locFromTokens(tok, next)},
|
||||
return &ast.Array{
|
||||
NodeBase: ast.NewNodeBaseLoc(locFromTokens(tok, next)),
|
||||
}, nil
|
||||
}
|
||||
|
||||
@ -565,15 +567,15 @@ func (p *parser) parseArray(tok *token) (Node, error) {
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return &ArrayComp{
|
||||
nodeBase: nodeBase{loc: locFromTokens(tok, last)},
|
||||
return &ast.ArrayComp{
|
||||
NodeBase: ast.NewNodeBaseLoc(locFromTokens(tok, last)),
|
||||
Body: first,
|
||||
TrailingComma: gotComma,
|
||||
Specs: *specs,
|
||||
}, nil
|
||||
}
|
||||
// Not a comprehension: It can have more elements.
|
||||
elements := Nodes{first}
|
||||
elements := ast.Nodes{first}
|
||||
|
||||
for {
|
||||
if next.kind == tokenBracketR {
|
||||
@ -599,14 +601,14 @@ func (p *parser) parseArray(tok *token) (Node, error) {
|
||||
}
|
||||
}
|
||||
|
||||
return &Array{
|
||||
nodeBase: nodeBase{loc: locFromTokens(tok, next)},
|
||||
return &ast.Array{
|
||||
NodeBase: ast.NewNodeBaseLoc(locFromTokens(tok, next)),
|
||||
Elements: elements,
|
||||
TrailingComma: gotComma,
|
||||
}, nil
|
||||
}
|
||||
|
||||
func (p *parser) parseTerminal() (Node, error) {
|
||||
func (p *parser) parseTerminal() (ast.Node, error) {
|
||||
tok := p.pop()
|
||||
switch tok.kind {
|
||||
case tokenAssert, tokenBraceR, tokenBracketR, tokenComma, tokenDot, tokenElse,
|
||||
@ -643,82 +645,82 @@ func (p *parser) parseTerminal() (Node, error) {
|
||||
if err != nil {
|
||||
return nil, makeStaticError("Could not parse floating point number.", tok.loc)
|
||||
}
|
||||
return &LiteralNumber{
|
||||
nodeBase: nodeBase{loc: tok.loc},
|
||||
return &ast.LiteralNumber{
|
||||
NodeBase: ast.NewNodeBaseLoc(tok.loc),
|
||||
Value: num,
|
||||
OriginalString: tok.data,
|
||||
}, nil
|
||||
case tokenStringSingle:
|
||||
return &LiteralString{
|
||||
nodeBase: nodeBase{loc: tok.loc},
|
||||
return &ast.LiteralString{
|
||||
NodeBase: ast.NewNodeBaseLoc(tok.loc),
|
||||
Value: tok.data,
|
||||
Kind: StringSingle,
|
||||
Kind: ast.StringSingle,
|
||||
}, nil
|
||||
case tokenStringDouble:
|
||||
return &LiteralString{
|
||||
nodeBase: nodeBase{loc: tok.loc},
|
||||
return &ast.LiteralString{
|
||||
NodeBase: ast.NewNodeBaseLoc(tok.loc),
|
||||
Value: tok.data,
|
||||
Kind: StringDouble,
|
||||
Kind: ast.StringDouble,
|
||||
}, nil
|
||||
case tokenStringBlock:
|
||||
return &LiteralString{
|
||||
nodeBase: nodeBase{loc: tok.loc},
|
||||
return &ast.LiteralString{
|
||||
NodeBase: ast.NewNodeBaseLoc(tok.loc),
|
||||
Value: tok.data,
|
||||
Kind: StringDouble,
|
||||
Kind: ast.StringDouble,
|
||||
BlockIndent: tok.stringBlockIndent,
|
||||
}, nil
|
||||
case tokenVerbatimStringDouble:
|
||||
return &LiteralString{
|
||||
nodeBase: nodeBase{loc: tok.loc},
|
||||
return &ast.LiteralString{
|
||||
NodeBase: ast.NewNodeBaseLoc(tok.loc),
|
||||
Value: tok.data,
|
||||
Kind: VerbatimStringDouble,
|
||||
Kind: ast.VerbatimStringDouble,
|
||||
}, nil
|
||||
case tokenVerbatimStringSingle:
|
||||
return &LiteralString{
|
||||
nodeBase: nodeBase{loc: tok.loc},
|
||||
return &ast.LiteralString{
|
||||
NodeBase: ast.NewNodeBaseLoc(tok.loc),
|
||||
Value: tok.data,
|
||||
Kind: VerbatimStringSingle,
|
||||
Kind: ast.VerbatimStringSingle,
|
||||
}, nil
|
||||
case tokenFalse:
|
||||
return &LiteralBoolean{
|
||||
nodeBase: nodeBase{loc: tok.loc},
|
||||
return &ast.LiteralBoolean{
|
||||
NodeBase: ast.NewNodeBaseLoc(tok.loc),
|
||||
Value: false,
|
||||
}, nil
|
||||
case tokenTrue:
|
||||
return &LiteralBoolean{
|
||||
nodeBase: nodeBase{loc: tok.loc},
|
||||
return &ast.LiteralBoolean{
|
||||
NodeBase: ast.NewNodeBaseLoc(tok.loc),
|
||||
Value: true,
|
||||
}, nil
|
||||
case tokenNullLit:
|
||||
return &LiteralNull{
|
||||
nodeBase: nodeBase{loc: tok.loc},
|
||||
return &ast.LiteralNull{
|
||||
NodeBase: ast.NewNodeBaseLoc(tok.loc),
|
||||
}, nil
|
||||
|
||||
// Variables
|
||||
case tokenDollar:
|
||||
return &Dollar{
|
||||
nodeBase: nodeBase{loc: tok.loc},
|
||||
return &ast.Dollar{
|
||||
NodeBase: ast.NewNodeBaseLoc(tok.loc),
|
||||
}, nil
|
||||
case tokenIdentifier:
|
||||
return &Var{
|
||||
nodeBase: nodeBase{loc: tok.loc},
|
||||
Id: Identifier(tok.data),
|
||||
return &ast.Var{
|
||||
NodeBase: ast.NewNodeBaseLoc(tok.loc),
|
||||
Id: ast.Identifier(tok.data),
|
||||
}, nil
|
||||
case tokenSelf:
|
||||
return &Self{
|
||||
nodeBase: nodeBase{loc: tok.loc},
|
||||
return &ast.Self{
|
||||
NodeBase: ast.NewNodeBaseLoc(tok.loc),
|
||||
}, nil
|
||||
case tokenSuper:
|
||||
next := p.pop()
|
||||
var index Node
|
||||
var id *Identifier
|
||||
var index ast.Node
|
||||
var id *ast.Identifier
|
||||
switch next.kind {
|
||||
case tokenDot:
|
||||
fieldID, err := p.popExpect(tokenIdentifier)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
id = (*Identifier)(&fieldID.data)
|
||||
id = (*ast.Identifier)(&fieldID.data)
|
||||
case tokenBracketL:
|
||||
var err error
|
||||
index, err = p.parse(maxPrecedence)
|
||||
@ -732,8 +734,8 @@ func (p *parser) parseTerminal() (Node, error) {
|
||||
default:
|
||||
return nil, makeStaticError("Expected . or [ after super.", tok.loc)
|
||||
}
|
||||
return &SuperIndex{
|
||||
nodeBase: nodeBase{loc: tok.loc},
|
||||
return &ast.SuperIndex{
|
||||
NodeBase: ast.NewNodeBaseLoc(tok.loc),
|
||||
Index: index,
|
||||
Id: id,
|
||||
}, nil
|
||||
@ -742,11 +744,11 @@ func (p *parser) parseTerminal() (Node, error) {
|
||||
return nil, makeStaticError(fmt.Sprintf("INTERNAL ERROR: Unknown tok kind: %v", tok.kind), tok.loc)
|
||||
}
|
||||
|
||||
func (p *parser) parsingFailure(msg string, tok *token) (Node, error) {
|
||||
func (p *parser) parsingFailure(msg string, tok *token) (ast.Node, error) {
|
||||
return nil, makeStaticError(msg, tok.loc)
|
||||
}
|
||||
|
||||
func (p *parser) parse(prec precedence) (Node, error) {
|
||||
func (p *parser) parse(prec precedence) (ast.Node, error) {
|
||||
begin := p.peek()
|
||||
|
||||
switch begin.kind {
|
||||
@ -758,7 +760,7 @@ func (p *parser) parse(prec precedence) (Node, error) {
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
var msg Node
|
||||
var msg ast.Node
|
||||
if p.peek().kind == tokenOperator && p.peek().data == ":" {
|
||||
p.pop()
|
||||
msg, err = p.parse(maxPrecedence)
|
||||
@ -774,8 +776,8 @@ func (p *parser) parse(prec precedence) (Node, error) {
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return &Assert{
|
||||
nodeBase: nodeBase{loc: locFromTokenAST(begin, rest)},
|
||||
return &ast.Assert{
|
||||
NodeBase: ast.NewNodeBaseLoc(locFromTokenAST(begin, rest)),
|
||||
Cond: cond,
|
||||
Message: msg,
|
||||
Rest: rest,
|
||||
@ -787,8 +789,8 @@ func (p *parser) parse(prec precedence) (Node, error) {
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return &Error{
|
||||
nodeBase: nodeBase{loc: locFromTokenAST(begin, expr)},
|
||||
return &ast.Error{
|
||||
NodeBase: ast.NewNodeBaseLoc(locFromTokenAST(begin, expr)),
|
||||
Expr: expr,
|
||||
}, nil
|
||||
|
||||
@ -806,7 +808,7 @@ func (p *parser) parse(prec precedence) (Node, error) {
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
var branchFalse Node
|
||||
var branchFalse ast.Node
|
||||
lr := locFromTokenAST(begin, branchTrue)
|
||||
if p.peek().kind == tokenElse {
|
||||
p.pop()
|
||||
@ -816,8 +818,8 @@ func (p *parser) parse(prec precedence) (Node, error) {
|
||||
}
|
||||
lr = locFromTokenAST(begin, branchFalse)
|
||||
}
|
||||
return &Conditional{
|
||||
nodeBase: nodeBase{loc: lr},
|
||||
return &ast.Conditional{
|
||||
NodeBase: ast.NewNodeBaseLoc(lr),
|
||||
Cond: cond,
|
||||
BranchTrue: branchTrue,
|
||||
BranchFalse: branchFalse,
|
||||
@ -835,8 +837,8 @@ func (p *parser) parse(prec precedence) (Node, error) {
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return &Function{
|
||||
nodeBase: nodeBase{loc: locFromTokenAST(begin, body)},
|
||||
return &ast.Function{
|
||||
NodeBase: ast.NewNodeBaseLoc(locFromTokenAST(begin, body)),
|
||||
Parameters: params,
|
||||
TrailingComma: gotComma,
|
||||
Body: body,
|
||||
@ -850,9 +852,9 @@ func (p *parser) parse(prec precedence) (Node, error) {
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if lit, ok := body.(*LiteralString); ok {
|
||||
return &Import{
|
||||
nodeBase: nodeBase{loc: locFromTokenAST(begin, body)},
|
||||
if lit, ok := body.(*ast.LiteralString); ok {
|
||||
return &ast.Import{
|
||||
NodeBase: ast.NewNodeBaseLoc(locFromTokenAST(begin, body)),
|
||||
File: lit.Value,
|
||||
}, nil
|
||||
}
|
||||
@ -864,9 +866,9 @@ func (p *parser) parse(prec precedence) (Node, error) {
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if lit, ok := body.(*LiteralString); ok {
|
||||
return &ImportStr{
|
||||
nodeBase: nodeBase{loc: locFromTokenAST(begin, body)},
|
||||
if lit, ok := body.(*ast.LiteralString); ok {
|
||||
return &ast.ImportStr{
|
||||
NodeBase: ast.NewNodeBaseLoc(locFromTokenAST(begin, body)),
|
||||
File: lit.Value,
|
||||
}, nil
|
||||
}
|
||||
@ -874,7 +876,7 @@ func (p *parser) parse(prec precedence) (Node, error) {
|
||||
|
||||
case tokenLocal:
|
||||
p.pop()
|
||||
var binds LocalBinds
|
||||
var binds ast.LocalBinds
|
||||
for {
|
||||
err := p.parseBind(&binds)
|
||||
if err != nil {
|
||||
@ -892,16 +894,16 @@ func (p *parser) parse(prec precedence) (Node, error) {
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return &Local{
|
||||
nodeBase: nodeBase{loc: locFromTokenAST(begin, body)},
|
||||
return &ast.Local{
|
||||
NodeBase: ast.NewNodeBaseLoc(locFromTokenAST(begin, body)),
|
||||
Binds: binds,
|
||||
Body: body,
|
||||
}, nil
|
||||
|
||||
default:
|
||||
// Unary operator
|
||||
// ast.Unary operator
|
||||
if begin.kind == tokenOperator {
|
||||
uop, ok := uopMap[begin.data]
|
||||
uop, ok := ast.UopMap[begin.data]
|
||||
if !ok {
|
||||
return nil, makeStaticError(fmt.Sprintf("Not a unary operator: %v", begin.data), begin.loc)
|
||||
}
|
||||
@ -911,8 +913,8 @@ func (p *parser) parse(prec precedence) (Node, error) {
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return &Unary{
|
||||
nodeBase: nodeBase{loc: locFromTokenAST(op, expr)},
|
||||
return &ast.Unary{
|
||||
NodeBase: ast.NewNodeBaseLoc(locFromTokenAST(op, expr)),
|
||||
Op: uop,
|
||||
Expr: expr,
|
||||
}, nil
|
||||
@ -932,7 +934,7 @@ func (p *parser) parse(prec precedence) (Node, error) {
|
||||
for {
|
||||
// Then next token must be a binary operator.
|
||||
|
||||
var bop BinaryOp
|
||||
var bop ast.BinaryOp
|
||||
|
||||
// Check precedence is correct for this level. If we're parsing operators
|
||||
// with higher precedence, then return lhs and let lower levels deal with
|
||||
@ -955,7 +957,7 @@ func (p *parser) parse(prec precedence) (Node, error) {
|
||||
return lhs, nil
|
||||
}
|
||||
var ok bool
|
||||
bop, ok = bopMap[p.peek().data]
|
||||
bop, ok = ast.BopMap[p.peek().data]
|
||||
if !ok {
|
||||
return nil, makeStaticError(fmt.Sprintf("Not a binary operator: %v", p.peek().data), p.peek().loc)
|
||||
}
|
||||
@ -976,7 +978,7 @@ func (p *parser) parse(prec precedence) (Node, error) {
|
||||
switch op.kind {
|
||||
case tokenBracketL:
|
||||
// handle slice
|
||||
var indexes [3]Node
|
||||
var indexes [3]ast.Node
|
||||
colonsConsumed := 0
|
||||
|
||||
var end *token
|
||||
@ -1009,21 +1011,21 @@ func (p *parser) parse(prec precedence) (Node, error) {
|
||||
}
|
||||
if colonsConsumed == 0 && readyForNextIndex {
|
||||
// example: target[]
|
||||
return p.parsingFailure("Index requires an expression", end)
|
||||
return p.parsingFailure("ast.Index requires an expression", end)
|
||||
}
|
||||
isSlice := colonsConsumed > 0
|
||||
|
||||
if isSlice {
|
||||
lhs = &Slice{
|
||||
nodeBase: nodeBase{loc: locFromTokens(begin, end)},
|
||||
lhs = &ast.Slice{
|
||||
NodeBase: ast.NewNodeBaseLoc(locFromTokens(begin, end)),
|
||||
Target: lhs,
|
||||
BeginIndex: indexes[0],
|
||||
EndIndex: indexes[1],
|
||||
Step: indexes[2],
|
||||
}
|
||||
} else {
|
||||
lhs = &Index{
|
||||
nodeBase: nodeBase{loc: locFromTokens(begin, end)},
|
||||
lhs = &ast.Index{
|
||||
NodeBase: ast.NewNodeBaseLoc(locFromTokens(begin, end)),
|
||||
Target: lhs,
|
||||
Index: indexes[0],
|
||||
}
|
||||
@ -1033,9 +1035,9 @@ func (p *parser) parse(prec precedence) (Node, error) {
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
id := Identifier(fieldID.data)
|
||||
lhs = &Index{
|
||||
nodeBase: nodeBase{loc: locFromTokens(begin, fieldID)},
|
||||
id := ast.Identifier(fieldID.data)
|
||||
lhs = &ast.Index{
|
||||
NodeBase: ast.NewNodeBaseLoc(locFromTokens(begin, fieldID)),
|
||||
Target: lhs,
|
||||
Id: &id,
|
||||
}
|
||||
@ -1049,8 +1051,8 @@ func (p *parser) parse(prec precedence) (Node, error) {
|
||||
p.pop()
|
||||
tailStrict = true
|
||||
}
|
||||
lhs = &Apply{
|
||||
nodeBase: nodeBase{loc: locFromTokens(begin, end)},
|
||||
lhs = &ast.Apply{
|
||||
NodeBase: ast.NewNodeBaseLoc(locFromTokens(begin, end)),
|
||||
Target: lhs,
|
||||
Arguments: args,
|
||||
TrailingComma: gotComma,
|
||||
@ -1061,8 +1063,8 @@ func (p *parser) parse(prec precedence) (Node, error) {
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
lhs = &ApplyBrace{
|
||||
nodeBase: nodeBase{loc: locFromTokens(begin, end)},
|
||||
lhs = &ast.ApplyBrace{
|
||||
NodeBase: ast.NewNodeBaseLoc(locFromTokens(begin, end)),
|
||||
Left: lhs,
|
||||
Right: obj,
|
||||
}
|
||||
@ -1071,8 +1073,8 @@ func (p *parser) parse(prec precedence) (Node, error) {
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
lhs = &Binary{
|
||||
nodeBase: nodeBase{loc: locFromTokenAST(begin, rhs)},
|
||||
lhs = &ast.Binary{
|
||||
NodeBase: ast.NewNodeBaseLoc(locFromTokenAST(begin, rhs)),
|
||||
Left: lhs,
|
||||
Op: bop,
|
||||
Right: rhs,
|
||||
@ -1084,7 +1086,7 @@ func (p *parser) parse(prec precedence) (Node, error) {
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
func parse(t tokens) (Node, error) {
|
||||
func parse(t tokens) (ast.Node, error) {
|
||||
p := makeParser(t)
|
||||
expr, err := p.parse(maxPrecedence)
|
||||
if err != nil {
|
||||
|
||||
@ -220,7 +220,7 @@ var errorTests = []testError{
|
||||
|
||||
{`a[(b c)]`, `test:1:6-7 Expected token ")" but got (IDENTIFIER, "c")`},
|
||||
{`a[b c]`, `test:1:5-6 Expected token "]" but got (IDENTIFIER, "c")`},
|
||||
{`a[]`, `test:1:3-4 Index requires an expression`},
|
||||
{`a[]`, `test:1:3-4 ast.Index requires an expression`},
|
||||
{`a[42:42:42:42]`, `test:1:11-12 Invalid slice: too many colons`},
|
||||
{`a[42:42::42]`, `test:1:8-10 Invalid slice: too many colons`},
|
||||
|
||||
|
||||
@ -16,6 +16,8 @@ limitations under the License.
|
||||
|
||||
package jsonnet
|
||||
|
||||
import "github.com/google/go-jsonnet/ast"
|
||||
|
||||
// RuntimeError is an error discovered during evaluation of the program
|
||||
type RuntimeError struct {
|
||||
StackTrace []TraceFrame
|
||||
@ -38,7 +40,7 @@ func (err RuntimeError) Error() string {
|
||||
// TraceFrame is tracing information about a single frame of the call stack.
|
||||
// TODO(sbarzowski) the difference from TraceElement. Do we even need this?
|
||||
type TraceFrame struct {
|
||||
Loc LocationRange
|
||||
Loc ast.LocationRange
|
||||
Name string
|
||||
}
|
||||
|
||||
@ -54,13 +56,12 @@ func traceElementToTraceFrame(trace *TraceElement) TraceFrame {
|
||||
}
|
||||
|
||||
type TraceContext struct {
|
||||
|
||||
// Human readable name - e.g. function <foo>
|
||||
Name string
|
||||
}
|
||||
|
||||
// TODO(sbarzowski) better name
|
||||
type TraceElement struct {
|
||||
loc *LocationRange
|
||||
loc *ast.LocationRange
|
||||
context *TraceContext
|
||||
}
|
||||
|
||||
@ -18,20 +18,16 @@ package jsonnet
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
)
|
||||
|
||||
func (i *IdentifierSet) Append(idents Identifiers) {
|
||||
for _, ident := range idents {
|
||||
i.Add(ident)
|
||||
}
|
||||
}
|
||||
"github.com/google/go-jsonnet/ast"
|
||||
)
|
||||
|
||||
type analysisState struct {
|
||||
err error
|
||||
freeVars IdentifierSet
|
||||
freeVars ast.IdentifierSet
|
||||
}
|
||||
|
||||
func visitNext(a Node, inObject bool, vars IdentifierSet, state *analysisState) {
|
||||
func visitNext(a ast.Node, inObject bool, vars ast.IdentifierSet, state *analysisState) {
|
||||
if state.err != nil {
|
||||
return
|
||||
}
|
||||
@ -39,108 +35,108 @@ func visitNext(a Node, inObject bool, vars IdentifierSet, state *analysisState)
|
||||
state.freeVars.Append(a.FreeVariables())
|
||||
}
|
||||
|
||||
func analyzeVisit(a Node, inObject bool, vars IdentifierSet) error {
|
||||
s := &analysisState{freeVars: NewIdentifierSet()}
|
||||
func analyzeVisit(a ast.Node, inObject bool, vars ast.IdentifierSet) error {
|
||||
s := &analysisState{freeVars: ast.NewIdentifierSet()}
|
||||
|
||||
// TODO(sbarzowski) Test somehow that we're visiting all the nodes
|
||||
switch ast := a.(type) {
|
||||
case *Apply:
|
||||
visitNext(ast.Target, inObject, vars, s)
|
||||
for _, arg := range ast.Arguments {
|
||||
switch a := a.(type) {
|
||||
case *ast.Apply:
|
||||
visitNext(a.Target, inObject, vars, s)
|
||||
for _, arg := range a.Arguments {
|
||||
visitNext(arg, inObject, vars, s)
|
||||
}
|
||||
case *Array:
|
||||
for _, elem := range ast.Elements {
|
||||
case *ast.Array:
|
||||
for _, elem := range a.Elements {
|
||||
visitNext(elem, inObject, vars, s)
|
||||
}
|
||||
case *Binary:
|
||||
visitNext(ast.Left, inObject, vars, s)
|
||||
visitNext(ast.Right, inObject, vars, s)
|
||||
case *Conditional:
|
||||
visitNext(ast.Cond, inObject, vars, s)
|
||||
visitNext(ast.BranchTrue, inObject, vars, s)
|
||||
visitNext(ast.BranchFalse, inObject, vars, s)
|
||||
case *Error:
|
||||
visitNext(ast.Expr, inObject, vars, s)
|
||||
case *Function:
|
||||
case *ast.Binary:
|
||||
visitNext(a.Left, inObject, vars, s)
|
||||
visitNext(a.Right, inObject, vars, s)
|
||||
case *ast.Conditional:
|
||||
visitNext(a.Cond, inObject, vars, s)
|
||||
visitNext(a.BranchTrue, inObject, vars, s)
|
||||
visitNext(a.BranchFalse, inObject, vars, s)
|
||||
case *ast.Error:
|
||||
visitNext(a.Expr, inObject, vars, s)
|
||||
case *ast.Function:
|
||||
// TODO(sbarzowski) check duplicate function parameters
|
||||
// or maybe somewhere else as it doesn't require any context
|
||||
newVars := vars.Clone()
|
||||
for _, param := range ast.Parameters {
|
||||
for _, param := range a.Parameters {
|
||||
newVars.Add(param)
|
||||
}
|
||||
visitNext(ast.Body, inObject, newVars, s)
|
||||
visitNext(a.Body, inObject, newVars, s)
|
||||
// Parameters are free inside the body, but not visible here or outside
|
||||
for _, param := range ast.Parameters {
|
||||
for _, param := range a.Parameters {
|
||||
s.freeVars.Remove(param)
|
||||
}
|
||||
// TODO(sbarzowski) when we have default values of params check them
|
||||
case *Import:
|
||||
case *ast.Import:
|
||||
//nothing to do here
|
||||
case *ImportStr:
|
||||
case *ast.ImportStr:
|
||||
//nothing to do here
|
||||
case *SuperIndex:
|
||||
case *ast.SuperIndex:
|
||||
if !inObject {
|
||||
return makeStaticError("Can't use super outside of an object.", ast.loc)
|
||||
return makeStaticError("Can't use super outside of an object.", *a.Loc())
|
||||
}
|
||||
visitNext(ast.Index, inObject, vars, s)
|
||||
case *Index:
|
||||
visitNext(ast.Target, inObject, vars, s)
|
||||
visitNext(ast.Index, inObject, vars, s)
|
||||
case *Local:
|
||||
visitNext(a.Index, inObject, vars, s)
|
||||
case *ast.Index:
|
||||
visitNext(a.Target, inObject, vars, s)
|
||||
visitNext(a.Index, inObject, vars, s)
|
||||
case *ast.Local:
|
||||
newVars := vars.Clone()
|
||||
for _, bind := range ast.Binds {
|
||||
for _, bind := range a.Binds {
|
||||
newVars.Add(bind.Variable)
|
||||
}
|
||||
// Binds in local can be mutually or even self recursive
|
||||
for _, bind := range ast.Binds {
|
||||
for _, bind := range a.Binds {
|
||||
visitNext(bind.Body, inObject, newVars, s)
|
||||
}
|
||||
visitNext(ast.Body, inObject, newVars, s)
|
||||
visitNext(a.Body, inObject, newVars, s)
|
||||
|
||||
// Any usage of newly created variables inside are considered free
|
||||
// but they are not here or outside
|
||||
for _, bind := range ast.Binds {
|
||||
for _, bind := range a.Binds {
|
||||
s.freeVars.Remove(bind.Variable)
|
||||
}
|
||||
case *LiteralBoolean:
|
||||
case *ast.LiteralBoolean:
|
||||
//nothing to do here
|
||||
case *LiteralNull:
|
||||
case *ast.LiteralNull:
|
||||
//nothing to do here
|
||||
case *LiteralNumber:
|
||||
case *ast.LiteralNumber:
|
||||
//nothing to do here
|
||||
case *LiteralString:
|
||||
case *ast.LiteralString:
|
||||
//nothing to do here
|
||||
case *DesugaredObject:
|
||||
for _, field := range ast.Fields {
|
||||
case *ast.DesugaredObject:
|
||||
for _, field := range a.Fields {
|
||||
// Field names are calculated *outside* of the object
|
||||
visitNext(field.Name, inObject, vars, s)
|
||||
visitNext(field.Body, true, vars, s)
|
||||
}
|
||||
for _, assert := range ast.Asserts {
|
||||
for _, assert := range a.Asserts {
|
||||
visitNext(assert, true, vars, s)
|
||||
}
|
||||
case *ObjectComprehensionSimple:
|
||||
case *ast.ObjectComprehensionSimple:
|
||||
// TODO (sbarzowski) this
|
||||
panic("Comprehensions not supported yet")
|
||||
case *Self:
|
||||
case *ast.Self:
|
||||
if !inObject {
|
||||
return makeStaticError("Can't use self outside of an object.", ast.loc)
|
||||
return makeStaticError("Can't use self outside of an object.", *a.Loc())
|
||||
}
|
||||
case *Unary:
|
||||
visitNext(ast.Expr, inObject, vars, s)
|
||||
case *Var:
|
||||
if !vars.Contains(ast.Id) {
|
||||
return makeStaticError(fmt.Sprintf("Unknown variable: %v", ast.Id), ast.loc)
|
||||
case *ast.Unary:
|
||||
visitNext(a.Expr, inObject, vars, s)
|
||||
case *ast.Var:
|
||||
if !vars.Contains(a.Id) {
|
||||
return makeStaticError(fmt.Sprintf("Unknown variable: %v", a.Id), *a.Loc())
|
||||
}
|
||||
s.freeVars.Add(ast.Id)
|
||||
s.freeVars.Add(a.Id)
|
||||
default:
|
||||
panic(fmt.Sprintf("Unexpected node %#v", a))
|
||||
}
|
||||
a.setFreeVariables(s.freeVars.ToSlice())
|
||||
a.SetFreeVariables(s.freeVars.ToSlice())
|
||||
return s.err
|
||||
}
|
||||
|
||||
func analyze(ast Node) error {
|
||||
return analyzeVisit(ast, false, NewIdentifierSet("std"))
|
||||
func analyze(node ast.Node) error {
|
||||
return analyzeVisit(node, false, ast.NewIdentifierSet("std"))
|
||||
}
|
||||
|
||||
@ -16,14 +16,18 @@ limitations under the License.
|
||||
|
||||
package jsonnet
|
||||
|
||||
import "testing"
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/google/go-jsonnet/ast"
|
||||
)
|
||||
|
||||
// func dummyNodeBase() astNodeBase {
|
||||
// return astNode
|
||||
// }
|
||||
|
||||
func TestSimpleNull(t *testing.T) {
|
||||
ast := &LiteralNull{}
|
||||
ast := &ast.LiteralNull{}
|
||||
err := analyze(ast)
|
||||
if err != nil {
|
||||
t.Errorf("Unexpected error: %+v", err)
|
||||
@ -33,7 +37,7 @@ func TestSimpleNull(t *testing.T) {
|
||||
}
|
||||
}
|
||||
|
||||
func hasTheseFreeVars(returned Identifiers, expected Identifiers) bool {
|
||||
func hasTheseFreeVars(returned ast.Identifiers, expected ast.Identifiers) bool {
|
||||
if len(returned) != len(expected) {
|
||||
return false
|
||||
}
|
||||
@ -46,25 +50,25 @@ func hasTheseFreeVars(returned Identifiers, expected Identifiers) bool {
|
||||
}
|
||||
|
||||
func TestSimpleLocal(t *testing.T) {
|
||||
ast := &Local{
|
||||
Binds: LocalBinds{
|
||||
LocalBind{
|
||||
node := &ast.Local{
|
||||
Binds: ast.LocalBinds{
|
||||
ast.LocalBind{
|
||||
Variable: "x",
|
||||
Body: &LiteralNull{},
|
||||
Body: &ast.LiteralNull{},
|
||||
},
|
||||
},
|
||||
Body: &Var{Id: "x"},
|
||||
Body: &ast.Var{Id: "x"},
|
||||
}
|
||||
|
||||
err := analyze(ast)
|
||||
err := analyze(node)
|
||||
if err != nil {
|
||||
t.Errorf("Unexpected error: %+v", err)
|
||||
}
|
||||
if ast.FreeVariables() != nil {
|
||||
t.Errorf("Unexpected free variables %+v in root local. Expected none.", ast.FreeVariables())
|
||||
if node.FreeVariables() != nil {
|
||||
t.Errorf("Unexpected free variables %+v in root local. Expected none.", node.FreeVariables())
|
||||
}
|
||||
returned := ast.Body.FreeVariables()
|
||||
expectedVars := Identifiers{"x"}
|
||||
returned := node.Body.FreeVariables()
|
||||
expectedVars := ast.Identifiers{"x"}
|
||||
if !hasTheseFreeVars(returned, expectedVars) {
|
||||
t.Errorf("Unexpected free variables %+v in local body. Expected %+v.", returned, expectedVars)
|
||||
}
|
||||
|
||||
@ -18,6 +18,8 @@ package jsonnet
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
|
||||
"github.com/google/go-jsonnet/ast"
|
||||
)
|
||||
|
||||
//////////////////////////////////////////////////////////////////////////////
|
||||
@ -26,7 +28,7 @@ import (
|
||||
// StaticError represents an error during parsing/lexing or static analysis.
|
||||
// TODO(sbarzowski) Make it possible to have multiple static errors and warnings
|
||||
type StaticError struct {
|
||||
Loc LocationRange
|
||||
Loc ast.LocationRange
|
||||
Msg string
|
||||
}
|
||||
|
||||
@ -34,11 +36,11 @@ func makeStaticErrorMsg(msg string) StaticError {
|
||||
return StaticError{Msg: msg}
|
||||
}
|
||||
|
||||
func makeStaticErrorPoint(msg string, fn string, l Location) StaticError {
|
||||
return StaticError{Msg: msg, Loc: makeLocationRange(fn, l, l)}
|
||||
func makeStaticErrorPoint(msg string, fn string, l ast.Location) StaticError {
|
||||
return StaticError{Msg: msg, Loc: ast.MakeLocationRange(fn, l, l)}
|
||||
}
|
||||
|
||||
func makeStaticError(msg string, lr LocationRange) StaticError {
|
||||
func makeStaticError(msg string, lr ast.LocationRange) StaticError {
|
||||
return StaticError{Msg: msg, Loc: lr}
|
||||
}
|
||||
|
||||
|
||||
16
thunks.go
16
thunks.go
@ -16,6 +16,8 @@ limitations under the License.
|
||||
|
||||
package jsonnet
|
||||
|
||||
import "github.com/google/go-jsonnet/ast"
|
||||
|
||||
// readyValue
|
||||
// -------------------------------------
|
||||
|
||||
@ -41,9 +43,9 @@ func (rv *readyValue) bindToObject(sb selfBinding, origBinding bindingFrame) pot
|
||||
|
||||
// thunk holds code and environment in which the code is supposed to be evaluated
|
||||
type thunk struct {
|
||||
name Identifier
|
||||
name ast.Identifier
|
||||
env environment
|
||||
body Node
|
||||
body ast.Node
|
||||
}
|
||||
|
||||
// TODO(sbarzowski) feedback from dcunnin:
|
||||
@ -51,7 +53,7 @@ type thunk struct {
|
||||
// Maybe call thunk 'exprThunk' (or astThunk but then it looks like an AST node).
|
||||
// Then call cachedThunk just thunk?
|
||||
// Or, call this makeCachedExprThunk because that's what it really is.
|
||||
func makeThunk(name Identifier, env environment, body Node) *cachedThunk {
|
||||
func makeThunk(name ast.Identifier, env environment, body ast.Node) *cachedThunk {
|
||||
return makeCachedThunk(&thunk{
|
||||
name: name,
|
||||
env: env,
|
||||
@ -126,7 +128,7 @@ func makeErrorThunk(err error) *errorThunk {
|
||||
// -------------------------------------
|
||||
|
||||
type codeUnboundField struct {
|
||||
body Node
|
||||
body ast.Node
|
||||
}
|
||||
|
||||
func (f *codeUnboundField) bindToObject(sb selfBinding, origBinding bindingFrame) potentialValue {
|
||||
@ -141,7 +143,7 @@ type closure struct {
|
||||
// base environment of a closure
|
||||
// arguments should be added to it, before executing it
|
||||
env environment
|
||||
function *Function
|
||||
function *ast.Function
|
||||
}
|
||||
|
||||
func (closure *closure) EvalCall(arguments callArguments, e *evaluator) (value, error) {
|
||||
@ -161,11 +163,11 @@ func (closure *closure) EvalCall(arguments callArguments, e *evaluator) (value,
|
||||
return e.evalInCleanEnv(&context, &calledEnvironment, closure.function.Body)
|
||||
}
|
||||
|
||||
func (closure *closure) Parameters() Identifiers {
|
||||
func (closure *closure) Parameters() ast.Identifiers {
|
||||
return closure.function.Parameters
|
||||
}
|
||||
|
||||
func makeClosure(env environment, function *Function) *closure {
|
||||
func makeClosure(env environment, function *ast.Function) *closure {
|
||||
return &closure{
|
||||
env: env,
|
||||
function: function,
|
||||
|
||||
26
value.go
26
value.go
@ -15,7 +15,11 @@ limitations under the License.
|
||||
*/
|
||||
package jsonnet
|
||||
|
||||
import "fmt"
|
||||
import (
|
||||
"fmt"
|
||||
|
||||
"github.com/google/go-jsonnet/ast"
|
||||
)
|
||||
|
||||
// value represents a concrete jsonnet value of a specific type.
|
||||
// Various operations on values are allowed, depending on their type.
|
||||
@ -46,7 +50,7 @@ type potentialValue interface {
|
||||
}
|
||||
|
||||
// A set of variables with associated potentialValues.
|
||||
type bindingFrame map[Identifier]potentialValue
|
||||
type bindingFrame map[ast.Identifier]potentialValue
|
||||
|
||||
type valueBase struct{}
|
||||
|
||||
@ -119,7 +123,7 @@ func (*valueNull) typename() string {
|
||||
return "null"
|
||||
}
|
||||
|
||||
// Array
|
||||
// ast.Array
|
||||
// -------------------------------------
|
||||
|
||||
type valueArray struct {
|
||||
@ -137,7 +141,7 @@ func (*valueArray) typename() string {
|
||||
return "array"
|
||||
}
|
||||
|
||||
// Function
|
||||
// ast.Function
|
||||
// -------------------------------------
|
||||
|
||||
type valueFunction struct {
|
||||
@ -148,14 +152,14 @@ type valueFunction struct {
|
||||
// TODO(sbarzowski) better name?
|
||||
type evalCallable interface {
|
||||
EvalCall(args callArguments, e *evaluator) (value, error)
|
||||
Parameters() Identifiers
|
||||
Parameters() ast.Identifiers
|
||||
}
|
||||
|
||||
func (f *valueFunction) call(args callArguments) potentialValue {
|
||||
return makeCallThunk(f.ec, args)
|
||||
}
|
||||
|
||||
func (f *valueFunction) parameters() Identifiers {
|
||||
func (f *valueFunction) parameters() ast.Identifiers {
|
||||
return f.ec.Parameters()
|
||||
}
|
||||
|
||||
@ -233,7 +237,7 @@ type valueSimpleObject struct {
|
||||
valueObjectBase
|
||||
upValues bindingFrame
|
||||
fields valueSimpleObjectFieldMap
|
||||
asserts []Node
|
||||
asserts []ast.Node
|
||||
}
|
||||
|
||||
func (o *valueSimpleObject) index(e *evaluator, field string) (value, error) {
|
||||
@ -244,7 +248,7 @@ func (*valueSimpleObject) inheritanceSize() int {
|
||||
return 1
|
||||
}
|
||||
|
||||
func makeValueSimpleObject(b bindingFrame, fields valueSimpleObjectFieldMap, asserts Nodes) *valueSimpleObject {
|
||||
func makeValueSimpleObject(b bindingFrame, fields valueSimpleObjectFieldMap, asserts ast.Nodes) *valueSimpleObject {
|
||||
return &valueSimpleObject{
|
||||
upValues: b,
|
||||
fields: fields,
|
||||
@ -257,7 +261,7 @@ type valueSimpleObjectFieldMap map[string]valueSimpleObjectField
|
||||
// TODO(sbarzowski) this is not a value and the name suggests it is...
|
||||
// TODO(sbarzowski) better name? This is basically just a (hide, field) pair.
|
||||
type valueSimpleObjectField struct {
|
||||
hide ObjectFieldHide
|
||||
hide ast.ObjectFieldHide
|
||||
field unboundField
|
||||
}
|
||||
|
||||
@ -350,7 +354,7 @@ func objectIndex(e *evaluator, sb selfBinding, fieldName string) (value, error)
|
||||
return e.evaluate(field.field.bindToObject(fieldSelfBinding, upValues))
|
||||
}
|
||||
|
||||
type fieldHideMap map[string]ObjectFieldHide
|
||||
type fieldHideMap map[string]ast.ObjectFieldHide
|
||||
|
||||
func objectFieldsVisibility(obj valueObject) fieldHideMap {
|
||||
r := make(fieldHideMap)
|
||||
@ -374,7 +378,7 @@ func objectFieldsVisibility(obj valueObject) fieldHideMap {
|
||||
func objectFields(obj valueObject, manifesting bool) []string {
|
||||
var r []string
|
||||
for fieldName, hide := range objectFieldsVisibility(obj) {
|
||||
if !manifesting || hide != ObjectFieldHidden {
|
||||
if !manifesting || hide != ast.ObjectFieldHidden {
|
||||
r = append(r, fieldName)
|
||||
}
|
||||
}
|
||||
|
||||
16
vm.go
16
vm.go
@ -20,6 +20,8 @@ import (
|
||||
"errors"
|
||||
"fmt"
|
||||
"runtime/debug"
|
||||
|
||||
"github.com/google/go-jsonnet/ast"
|
||||
)
|
||||
|
||||
// Note: There are no garbage collection params because we're using the native
|
||||
@ -67,11 +69,11 @@ func (vm *VM) ExtCode(key string, val string) {
|
||||
}
|
||||
|
||||
func (vm *VM) evaluateSnippet(filename string, snippet string) (string, error) {
|
||||
ast, err := snippetToAST(filename, snippet)
|
||||
node, err := snippetToAST(filename, snippet)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
output, err := evaluate(ast, vm.ext, vm.MaxStack, &FileImporter{})
|
||||
output, err := evaluate(node, vm.ext, vm.MaxStack, &FileImporter{})
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
@ -95,23 +97,23 @@ func (vm *VM) EvaluateSnippet(filename string, snippet string) (json string, for
|
||||
return json, nil
|
||||
}
|
||||
|
||||
func snippetToAST(filename string, snippet string) (Node, error) {
|
||||
func snippetToAST(filename string, snippet string) (ast.Node, error) {
|
||||
tokens, err := lex(filename, snippet)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
ast, err := parse(tokens)
|
||||
node, err := parse(tokens)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
// fmt.Println(ast.(dumpable).dump())
|
||||
err = desugarFile(&ast)
|
||||
err = desugarFile(&node)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
err = analyze(ast)
|
||||
err = analyze(node)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return ast, nil
|
||||
return node, nil
|
||||
}
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user