mirror of
https://github.com/google/go-jsonnet.git
synced 2025-09-28 17:01:02 +02:00
parent
5ae4798d8d
commit
ed5f280c59
48
ast/ast.go
48
ast/ast.go
@ -23,6 +23,8 @@ import (
|
|||||||
// Identifier represents a variable / parameter / field name.
|
// Identifier represents a variable / parameter / field name.
|
||||||
//+gen set
|
//+gen set
|
||||||
type Identifier string
|
type Identifier string
|
||||||
|
|
||||||
|
// Identifiers represents an Identifier slice.
|
||||||
type Identifiers []Identifier
|
type Identifiers []Identifier
|
||||||
|
|
||||||
// TODO(jbeda) implement interning of identifiers if necessary. The C++
|
// TODO(jbeda) implement interning of identifiers if necessary. The C++
|
||||||
@ -30,8 +32,10 @@ type Identifiers []Identifier
|
|||||||
|
|
||||||
// ---------------------------------------------------------------------------
|
// ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
// Context represents the surrounding context of a node (e.g. a function it's in)
|
||||||
type Context *string
|
type Context *string
|
||||||
|
|
||||||
|
// Node represents a node in the AST.
|
||||||
type Node interface {
|
type Node interface {
|
||||||
Context() Context
|
Context() Context
|
||||||
Loc() *LocationRange
|
Loc() *LocationRange
|
||||||
@ -39,16 +43,21 @@ type Node interface {
|
|||||||
SetFreeVariables(Identifiers)
|
SetFreeVariables(Identifiers)
|
||||||
SetContext(Context)
|
SetContext(Context)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Nodes represents a Node slice.
|
||||||
type Nodes []Node
|
type Nodes []Node
|
||||||
|
|
||||||
// ---------------------------------------------------------------------------
|
// ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
// NodeBase holds fields common to all node types.
|
||||||
type NodeBase struct {
|
type NodeBase struct {
|
||||||
loc LocationRange
|
loc LocationRange
|
||||||
context Context
|
context Context
|
||||||
freeVariables Identifiers
|
freeVariables Identifiers
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// NewNodeBase creates a new NodeBase from initial LocationRange and
|
||||||
|
// Identifiers.
|
||||||
func NewNodeBase(loc LocationRange, freeVariables Identifiers) NodeBase {
|
func NewNodeBase(loc LocationRange, freeVariables Identifiers) NodeBase {
|
||||||
return NodeBase{
|
return NodeBase{
|
||||||
loc: loc,
|
loc: loc,
|
||||||
@ -56,6 +65,7 @@ func NewNodeBase(loc LocationRange, freeVariables Identifiers) NodeBase {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// NewNodeBaseLoc creates a new NodeBase from an initial LocationRange.
|
||||||
func NewNodeBaseLoc(loc LocationRange) NodeBase {
|
func NewNodeBaseLoc(loc LocationRange) NodeBase {
|
||||||
return NodeBase{
|
return NodeBase{
|
||||||
loc: loc,
|
loc: loc,
|
||||||
@ -63,32 +73,39 @@ func NewNodeBaseLoc(loc LocationRange) NodeBase {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Loc returns a NodeBase's loc.
|
||||||
func (n *NodeBase) Loc() *LocationRange {
|
func (n *NodeBase) Loc() *LocationRange {
|
||||||
return &n.loc
|
return &n.loc
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// FreeVariables returns a NodeBase's freeVariables.
|
||||||
func (n *NodeBase) FreeVariables() Identifiers {
|
func (n *NodeBase) FreeVariables() Identifiers {
|
||||||
return n.freeVariables
|
return n.freeVariables
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// SetFreeVariables sets a NodeBase's freeVariables.
|
||||||
func (n *NodeBase) SetFreeVariables(idents Identifiers) {
|
func (n *NodeBase) SetFreeVariables(idents Identifiers) {
|
||||||
n.freeVariables = idents
|
n.freeVariables = idents
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Context returns a NodeBase's context.
|
||||||
func (n *NodeBase) Context() Context {
|
func (n *NodeBase) Context() Context {
|
||||||
return n.context
|
return n.context
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// SetContext sets a NodeBase's context.
|
||||||
func (n *NodeBase) SetContext(context Context) {
|
func (n *NodeBase) SetContext(context Context) {
|
||||||
n.context = context
|
n.context = context
|
||||||
}
|
}
|
||||||
|
|
||||||
// ---------------------------------------------------------------------------
|
// ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
// IfSpec represents an if-specification in a comprehension.
|
||||||
type IfSpec struct {
|
type IfSpec struct {
|
||||||
Expr Node
|
Expr Node
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// ForSpec represents a for-specification in a comprehension.
|
||||||
// Example:
|
// Example:
|
||||||
// expr for x in arr1 for y in arr2 for z in arr3
|
// expr for x in arr1 for y in arr2 for z in arr3
|
||||||
// The order is the same as in python, i.e. the leftmost is the outermost.
|
// The order is the same as in python, i.e. the leftmost is the outermost.
|
||||||
@ -125,11 +142,14 @@ type Apply struct {
|
|||||||
TailStrict bool
|
TailStrict bool
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// NamedArgument represents a named argument to function call x=1.
|
||||||
type NamedArgument struct {
|
type NamedArgument struct {
|
||||||
Name Identifier
|
Name Identifier
|
||||||
Arg Node
|
Arg Node
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Arguments represents positional and named arguments to a function call
|
||||||
|
// f(x, y, z=1).
|
||||||
type Arguments struct {
|
type Arguments struct {
|
||||||
Positional Nodes
|
Positional Nodes
|
||||||
Named []NamedArgument
|
Named []NamedArgument
|
||||||
@ -179,8 +199,10 @@ type Assert struct {
|
|||||||
|
|
||||||
// ---------------------------------------------------------------------------
|
// ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
// BinaryOp represents a binary operator.
|
||||||
type BinaryOp int
|
type BinaryOp int
|
||||||
|
|
||||||
|
// Binary operators
|
||||||
const (
|
const (
|
||||||
BopMult BinaryOp = iota
|
BopMult BinaryOp = iota
|
||||||
BopDiv
|
BopDiv
|
||||||
@ -237,6 +259,7 @@ var bopStrings = []string{
|
|||||||
BopOr: "||",
|
BopOr: "||",
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// BopMap is a map from binary operator token strings to BinaryOp values.
|
||||||
var BopMap = map[string]BinaryOp{
|
var BopMap = map[string]BinaryOp{
|
||||||
"*": BopMult,
|
"*": BopMult,
|
||||||
"/": BopDiv,
|
"/": BopDiv,
|
||||||
@ -316,11 +339,14 @@ type Function struct {
|
|||||||
Body Node
|
Body Node
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// NamedParameter represents an optional named parameter of a function.
|
||||||
type NamedParameter struct {
|
type NamedParameter struct {
|
||||||
Name Identifier
|
Name Identifier
|
||||||
DefaultArg Node
|
DefaultArg Node
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Parameters represents the required positional parameters and optional named
|
||||||
|
// parameters to a function definition.
|
||||||
type Parameters struct {
|
type Parameters struct {
|
||||||
Required Identifiers
|
Required Identifiers
|
||||||
Optional []NamedParameter
|
Optional []NamedParameter
|
||||||
@ -355,6 +381,7 @@ type Index struct {
|
|||||||
Id *Identifier
|
Id *Identifier
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Slice represents an array slice a[begin:end:step].
|
||||||
type Slice struct {
|
type Slice struct {
|
||||||
NodeBase
|
NodeBase
|
||||||
Target Node
|
Target Node
|
||||||
@ -373,6 +400,8 @@ type LocalBind struct {
|
|||||||
Body Node
|
Body Node
|
||||||
Fun *Function
|
Fun *Function
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// LocalBinds represents a LocalBind slice.
|
||||||
type LocalBinds []LocalBind
|
type LocalBinds []LocalBind
|
||||||
|
|
||||||
// Local represents local x = e; e. After desugaring, functionSugar is false.
|
// Local represents local x = e; e. After desugaring, functionSugar is false.
|
||||||
@ -406,8 +435,10 @@ type LiteralNumber struct {
|
|||||||
|
|
||||||
// ---------------------------------------------------------------------------
|
// ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
// LiteralStringKind represents the kind of a literal string.
|
||||||
type LiteralStringKind int
|
type LiteralStringKind int
|
||||||
|
|
||||||
|
// Literal string kinds
|
||||||
const (
|
const (
|
||||||
StringSingle LiteralStringKind = iota
|
StringSingle LiteralStringKind = iota
|
||||||
StringDouble
|
StringDouble
|
||||||
@ -416,6 +447,8 @@ const (
|
|||||||
VerbatimStringSingle
|
VerbatimStringSingle
|
||||||
)
|
)
|
||||||
|
|
||||||
|
// FullyEscaped returns true iff the literal string kind may contain escape
|
||||||
|
// sequences that require unescaping.
|
||||||
func (k LiteralStringKind) FullyEscaped() bool {
|
func (k LiteralStringKind) FullyEscaped() bool {
|
||||||
switch k {
|
switch k {
|
||||||
case StringSingle, StringDouble:
|
case StringSingle, StringDouble:
|
||||||
@ -436,8 +469,10 @@ type LiteralString struct {
|
|||||||
|
|
||||||
// ---------------------------------------------------------------------------
|
// ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
// ObjectFieldKind represents the kind of an object field.
|
||||||
type ObjectFieldKind int
|
type ObjectFieldKind int
|
||||||
|
|
||||||
|
// Kinds of object fields
|
||||||
const (
|
const (
|
||||||
ObjectAssert ObjectFieldKind = iota // assert expr2 [: expr3] where expr3 can be nil
|
ObjectAssert ObjectFieldKind = iota // assert expr2 [: expr3] where expr3 can be nil
|
||||||
ObjectFieldID // id:[:[:]] expr2
|
ObjectFieldID // id:[:[:]] expr2
|
||||||
@ -449,14 +484,17 @@ const (
|
|||||||
ObjectNullStr // null expr1
|
ObjectNullStr // null expr1
|
||||||
)
|
)
|
||||||
|
|
||||||
|
// ObjectFieldHide represents the visibility of an object field.
|
||||||
type ObjectFieldHide int
|
type ObjectFieldHide int
|
||||||
|
|
||||||
|
// Object field visibilities
|
||||||
const (
|
const (
|
||||||
ObjectFieldHidden ObjectFieldHide = iota // f:: e
|
ObjectFieldHidden ObjectFieldHide = iota // f:: e
|
||||||
ObjectFieldInherit // f: e
|
ObjectFieldInherit // f: e
|
||||||
ObjectFieldVisible // f::: e
|
ObjectFieldVisible // f::: e
|
||||||
)
|
)
|
||||||
|
|
||||||
|
// ObjectField represents a field of an object or object comprehension.
|
||||||
// TODO(sbarzowski) consider having separate types for various kinds
|
// TODO(sbarzowski) consider having separate types for various kinds
|
||||||
type ObjectField struct {
|
type ObjectField struct {
|
||||||
Kind ObjectFieldKind
|
Kind ObjectFieldKind
|
||||||
@ -471,10 +509,12 @@ type ObjectField struct {
|
|||||||
Expr2, Expr3 Node // In scope of the object (can see self).
|
Expr2, Expr3 Node // In scope of the object (can see self).
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// ObjectFieldLocalNoMethod creates a non-method local object field.
|
||||||
func ObjectFieldLocalNoMethod(id *Identifier, body Node) ObjectField {
|
func ObjectFieldLocalNoMethod(id *Identifier, body Node) ObjectField {
|
||||||
return ObjectField{ObjectLocal, ObjectFieldVisible, false, false, nil, nil, id, nil, false, body, nil}
|
return ObjectField{ObjectLocal, ObjectFieldVisible, false, false, nil, nil, id, nil, false, body, nil}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// ObjectFields represents an ObjectField slice.
|
||||||
type ObjectFields []ObjectField
|
type ObjectFields []ObjectField
|
||||||
|
|
||||||
// Object represents object constructors { f: e ... }.
|
// Object represents object constructors { f: e ... }.
|
||||||
@ -489,12 +529,15 @@ type Object struct {
|
|||||||
|
|
||||||
// ---------------------------------------------------------------------------
|
// ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
// DesugaredObjectField represents a desugared object field.
|
||||||
type DesugaredObjectField struct {
|
type DesugaredObjectField struct {
|
||||||
Hide ObjectFieldHide
|
Hide ObjectFieldHide
|
||||||
Name Node
|
Name Node
|
||||||
Body Node
|
Body Node
|
||||||
PlusSuper bool
|
PlusSuper bool
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// DesugaredObjectFields represents a DesugaredObjectField slice.
|
||||||
type DesugaredObjectFields []DesugaredObjectField
|
type DesugaredObjectFields []DesugaredObjectField
|
||||||
|
|
||||||
// DesugaredObject represents object constructors { f: e ... } after
|
// DesugaredObject represents object constructors { f: e ... } after
|
||||||
@ -544,7 +587,7 @@ type SuperIndex struct {
|
|||||||
Id *Identifier
|
Id *Identifier
|
||||||
}
|
}
|
||||||
|
|
||||||
// Represents the e in super construct.
|
// InSuper represents the e in super construct.
|
||||||
type InSuper struct {
|
type InSuper struct {
|
||||||
NodeBase
|
NodeBase
|
||||||
Index Node
|
Index Node
|
||||||
@ -552,8 +595,10 @@ type InSuper struct {
|
|||||||
|
|
||||||
// ---------------------------------------------------------------------------
|
// ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
// UnaryOp represents a unary operator.
|
||||||
type UnaryOp int
|
type UnaryOp int
|
||||||
|
|
||||||
|
// Unary operators
|
||||||
const (
|
const (
|
||||||
UopNot UnaryOp = iota
|
UopNot UnaryOp = iota
|
||||||
UopBitwiseNot
|
UopBitwiseNot
|
||||||
@ -568,6 +613,7 @@ var uopStrings = []string{
|
|||||||
UopMinus: "-",
|
UopMinus: "-",
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// UopMap is a map from unary operator token strings to UnaryOp values.
|
||||||
var UopMap = map[string]UnaryOp{
|
var UopMap = map[string]UnaryOp{
|
||||||
"!": UopNot,
|
"!": UopNot,
|
||||||
"~": UopBitwiseNot,
|
"~": UopBitwiseNot,
|
||||||
|
@ -21,6 +21,7 @@ import (
|
|||||||
"fmt"
|
"fmt"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
// Source represents a source file.
|
||||||
type Source struct {
|
type Source struct {
|
||||||
lines []string
|
lines []string
|
||||||
}
|
}
|
||||||
@ -62,6 +63,7 @@ type LocationRange struct {
|
|||||||
file *Source
|
file *Source
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// LocationRangeBetween returns a LocationRange containing both a and b.
|
||||||
func LocationRangeBetween(a, b *LocationRange) LocationRange {
|
func LocationRangeBetween(a, b *LocationRange) LocationRange {
|
||||||
if a.file != b.file {
|
if a.file != b.file {
|
||||||
panic("Cannot create a LocationRange between different files")
|
panic("Cannot create a LocationRange between different files")
|
||||||
@ -93,22 +95,30 @@ func (lr *LocationRange) String() string {
|
|||||||
return fmt.Sprintf("%s(%v)-(%v)", filePrefix, lr.Begin.String(), lr.End.String())
|
return fmt.Sprintf("%s(%v)-(%v)", filePrefix, lr.Begin.String(), lr.End.String())
|
||||||
}
|
}
|
||||||
|
|
||||||
func (l *LocationRange) WithCode() bool {
|
// WithCode returns true iff the LocationRange is linked to code.
|
||||||
return l.Begin.Line != 0
|
// TODO: This is identical to lr.IsSet(). Is it required at all?
|
||||||
|
func (lr *LocationRange) WithCode() bool {
|
||||||
|
return lr.Begin.Line != 0
|
||||||
}
|
}
|
||||||
|
|
||||||
// This is useful for special locations, e.g. manifestation entry point.
|
// MakeLocationRangeMessage creates a pseudo-LocationRange with a message but no
|
||||||
|
// location information. This is useful for special locations, e.g.
|
||||||
|
// manifestation entry point.
|
||||||
func MakeLocationRangeMessage(msg string) LocationRange {
|
func MakeLocationRangeMessage(msg string) LocationRange {
|
||||||
return LocationRange{FileName: msg}
|
return LocationRange{FileName: msg}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// MakeLocationRange creates a LocationRange.
|
||||||
func MakeLocationRange(fn string, fc *Source, begin Location, end Location) LocationRange {
|
func MakeLocationRange(fn string, fc *Source, begin Location, end Location) LocationRange {
|
||||||
return LocationRange{FileName: fn, file: fc, Begin: begin, End: end}
|
return LocationRange{FileName: fn, file: fc, Begin: begin, End: end}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// SourceProvider represents a source provider.
|
||||||
|
// TODO: Need an explanation of why this exists.
|
||||||
type SourceProvider struct {
|
type SourceProvider struct {
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// GetSnippet returns a code snippet corresponding to loc.
|
||||||
func (sp *SourceProvider) GetSnippet(loc LocationRange) string {
|
func (sp *SourceProvider) GetSnippet(loc LocationRange) string {
|
||||||
var result bytes.Buffer
|
var result bytes.Buffer
|
||||||
if loc.Begin.Line == 0 {
|
if loc.Begin.Line == 0 {
|
||||||
@ -126,6 +136,8 @@ func (sp *SourceProvider) GetSnippet(loc LocationRange) string {
|
|||||||
return result.String()
|
return result.String()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// BuildSource transforms a source file string into a Source struct.
|
||||||
|
// TODO: This seems like a job for strings.Split() with a final \n touch-up.
|
||||||
func BuildSource(s string) *Source {
|
func BuildSource(s string) *Source {
|
||||||
var result []string
|
var result []string
|
||||||
var lineBuf bytes.Buffer
|
var lineBuf bytes.Buffer
|
||||||
@ -160,7 +172,7 @@ func trimToLine(loc LocationRange, line int) LocationRange {
|
|||||||
return loc
|
return loc
|
||||||
}
|
}
|
||||||
|
|
||||||
// lineBeginning returns a part of the line directly before LocationRange
|
// LineBeginning returns the part of a line directly before LocationRange
|
||||||
// for example:
|
// for example:
|
||||||
// local x = foo()
|
// local x = foo()
|
||||||
// ^^^^^ <- LocationRange loc
|
// ^^^^^ <- LocationRange loc
|
||||||
@ -176,7 +188,7 @@ func LineBeginning(loc *LocationRange) LocationRange {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// lineEnding returns a part of the line directly after LocationRange
|
// LineEnding returns the part of a line directly after LocationRange
|
||||||
// for example:
|
// for example:
|
||||||
// local x = foo() + test
|
// local x = foo() + test
|
||||||
// ^^^^^ <- LocationRange loc
|
// ^^^^^ <- LocationRange loc
|
||||||
|
147
builtins.go
147
builtins.go
@ -554,9 +554,8 @@ var builtinExp = liftNumeric(func(f float64) float64 {
|
|||||||
res := math.Exp(f)
|
res := math.Exp(f)
|
||||||
if res == 0 && f > 0 {
|
if res == 0 && f > 0 {
|
||||||
return math.Inf(1)
|
return math.Inf(1)
|
||||||
} else {
|
|
||||||
return res
|
|
||||||
}
|
}
|
||||||
|
return res
|
||||||
})
|
})
|
||||||
var builtinMantissa = liftNumeric(func(f float64) float64 {
|
var builtinMantissa = liftNumeric(func(f float64) float64 {
|
||||||
mantissa, _ := math.Frexp(f)
|
mantissa, _ := math.Frexp(f)
|
||||||
@ -720,13 +719,13 @@ func builtinNative(e *evaluator, namep potentialValue) (value, error) {
|
|||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
type unaryBuiltin func(*evaluator, potentialValue) (value, error)
|
type unaryBuiltinFunc func(*evaluator, potentialValue) (value, error)
|
||||||
type binaryBuiltin func(*evaluator, potentialValue, potentialValue) (value, error)
|
type binaryBuiltinFunc func(*evaluator, potentialValue, potentialValue) (value, error)
|
||||||
type ternaryBuiltin func(*evaluator, potentialValue, potentialValue, potentialValue) (value, error)
|
type ternaryBuiltinFunc func(*evaluator, potentialValue, potentialValue, potentialValue) (value, error)
|
||||||
|
|
||||||
type UnaryBuiltin struct {
|
type unaryBuiltin struct {
|
||||||
name ast.Identifier
|
name ast.Identifier
|
||||||
function unaryBuiltin
|
function unaryBuiltinFunc
|
||||||
parameters ast.Identifiers
|
parameters ast.Identifiers
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -737,22 +736,22 @@ func getBuiltinEvaluator(e *evaluator, name ast.Identifier) *evaluator {
|
|||||||
return &evaluator{i: e.i, trace: &trace}
|
return &evaluator{i: e.i, trace: &trace}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (b *UnaryBuiltin) EvalCall(args callArguments, e *evaluator) (value, error) {
|
func (b *unaryBuiltin) EvalCall(args callArguments, e *evaluator) (value, error) {
|
||||||
flatArgs := flattenArgs(args, b.Parameters())
|
flatArgs := flattenArgs(args, b.Parameters())
|
||||||
return b.function(getBuiltinEvaluator(e, b.name), flatArgs[0])
|
return b.function(getBuiltinEvaluator(e, b.name), flatArgs[0])
|
||||||
}
|
}
|
||||||
|
|
||||||
func (b *UnaryBuiltin) Parameters() Parameters {
|
func (b *unaryBuiltin) Parameters() Parameters {
|
||||||
return Parameters{required: b.parameters}
|
return Parameters{required: b.parameters}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (b *UnaryBuiltin) Name() ast.Identifier {
|
func (b *unaryBuiltin) Name() ast.Identifier {
|
||||||
return b.name
|
return b.name
|
||||||
}
|
}
|
||||||
|
|
||||||
type BinaryBuiltin struct {
|
type binaryBuiltin struct {
|
||||||
name ast.Identifier
|
name ast.Identifier
|
||||||
function binaryBuiltin
|
function binaryBuiltinFunc
|
||||||
parameters ast.Identifiers
|
parameters ast.Identifiers
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -781,35 +780,35 @@ func flattenArgs(args callArguments, params Parameters) []potentialValue {
|
|||||||
return flatArgs
|
return flatArgs
|
||||||
}
|
}
|
||||||
|
|
||||||
func (b *BinaryBuiltin) EvalCall(args callArguments, e *evaluator) (value, error) {
|
func (b *binaryBuiltin) EvalCall(args callArguments, e *evaluator) (value, error) {
|
||||||
flatArgs := flattenArgs(args, b.Parameters())
|
flatArgs := flattenArgs(args, b.Parameters())
|
||||||
return b.function(getBuiltinEvaluator(e, b.name), flatArgs[0], flatArgs[1])
|
return b.function(getBuiltinEvaluator(e, b.name), flatArgs[0], flatArgs[1])
|
||||||
}
|
}
|
||||||
|
|
||||||
func (b *BinaryBuiltin) Parameters() Parameters {
|
func (b *binaryBuiltin) Parameters() Parameters {
|
||||||
return Parameters{required: b.parameters}
|
return Parameters{required: b.parameters}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (b *BinaryBuiltin) Name() ast.Identifier {
|
func (b *binaryBuiltin) Name() ast.Identifier {
|
||||||
return b.name
|
return b.name
|
||||||
}
|
}
|
||||||
|
|
||||||
type TernaryBuiltin struct {
|
type ternaryBuiltin struct {
|
||||||
name ast.Identifier
|
name ast.Identifier
|
||||||
function ternaryBuiltin
|
function ternaryBuiltinFunc
|
||||||
parameters ast.Identifiers
|
parameters ast.Identifiers
|
||||||
}
|
}
|
||||||
|
|
||||||
func (b *TernaryBuiltin) EvalCall(args callArguments, e *evaluator) (value, error) {
|
func (b *ternaryBuiltin) EvalCall(args callArguments, e *evaluator) (value, error) {
|
||||||
flatArgs := flattenArgs(args, b.Parameters())
|
flatArgs := flattenArgs(args, b.Parameters())
|
||||||
return b.function(getBuiltinEvaluator(e, b.name), flatArgs[0], flatArgs[1], flatArgs[2])
|
return b.function(getBuiltinEvaluator(e, b.name), flatArgs[0], flatArgs[1], flatArgs[2])
|
||||||
}
|
}
|
||||||
|
|
||||||
func (b *TernaryBuiltin) Parameters() Parameters {
|
func (b *ternaryBuiltin) Parameters() Parameters {
|
||||||
return Parameters{required: b.parameters}
|
return Parameters{required: b.parameters}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (b *TernaryBuiltin) Name() ast.Identifier {
|
func (b *ternaryBuiltin) Name() ast.Identifier {
|
||||||
return b.name
|
return b.name
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -820,38 +819,38 @@ var desugaredBop = map[ast.BinaryOp]ast.Identifier{
|
|||||||
ast.BopIn: "objectHasAll",
|
ast.BopIn: "objectHasAll",
|
||||||
}
|
}
|
||||||
|
|
||||||
var bopBuiltins = []*BinaryBuiltin{
|
var bopBuiltins = []*binaryBuiltin{
|
||||||
ast.BopMult: &BinaryBuiltin{name: "operator*", function: builtinMult, parameters: ast.Identifiers{"x", "y"}},
|
ast.BopMult: &binaryBuiltin{name: "operator*", function: builtinMult, parameters: ast.Identifiers{"x", "y"}},
|
||||||
ast.BopDiv: &BinaryBuiltin{name: "operator/", function: builtinDiv, parameters: ast.Identifiers{"x", "y"}},
|
ast.BopDiv: &binaryBuiltin{name: "operator/", function: builtinDiv, parameters: ast.Identifiers{"x", "y"}},
|
||||||
// ast.BopPercent: <desugared>,
|
// ast.BopPercent: <desugared>,
|
||||||
|
|
||||||
ast.BopPlus: &BinaryBuiltin{name: "operator+", function: builtinPlus, parameters: ast.Identifiers{"x", "y"}},
|
ast.BopPlus: &binaryBuiltin{name: "operator+", function: builtinPlus, parameters: ast.Identifiers{"x", "y"}},
|
||||||
ast.BopMinus: &BinaryBuiltin{name: "operator-", function: builtinMinus, parameters: ast.Identifiers{"x", "y"}},
|
ast.BopMinus: &binaryBuiltin{name: "operator-", function: builtinMinus, parameters: ast.Identifiers{"x", "y"}},
|
||||||
|
|
||||||
ast.BopShiftL: &BinaryBuiltin{name: "operator<<", function: builtinShiftL, parameters: ast.Identifiers{"x", "y"}},
|
ast.BopShiftL: &binaryBuiltin{name: "operator<<", function: builtinShiftL, parameters: ast.Identifiers{"x", "y"}},
|
||||||
ast.BopShiftR: &BinaryBuiltin{name: "operator>>", function: builtinShiftR, parameters: ast.Identifiers{"x", "y"}},
|
ast.BopShiftR: &binaryBuiltin{name: "operator>>", function: builtinShiftR, parameters: ast.Identifiers{"x", "y"}},
|
||||||
|
|
||||||
ast.BopGreater: &BinaryBuiltin{name: "operator>", function: builtinGreater, parameters: ast.Identifiers{"x", "y"}},
|
ast.BopGreater: &binaryBuiltin{name: "operator>", function: builtinGreater, parameters: ast.Identifiers{"x", "y"}},
|
||||||
ast.BopGreaterEq: &BinaryBuiltin{name: "operator>=", function: builtinGreaterEq, parameters: ast.Identifiers{"x", "y"}},
|
ast.BopGreaterEq: &binaryBuiltin{name: "operator>=", function: builtinGreaterEq, parameters: ast.Identifiers{"x", "y"}},
|
||||||
ast.BopLess: &BinaryBuiltin{name: "operator<,", function: builtinLess, parameters: ast.Identifiers{"x", "y"}},
|
ast.BopLess: &binaryBuiltin{name: "operator<,", function: builtinLess, parameters: ast.Identifiers{"x", "y"}},
|
||||||
ast.BopLessEq: &BinaryBuiltin{name: "operator<=", function: builtinLessEq, parameters: ast.Identifiers{"x", "y"}},
|
ast.BopLessEq: &binaryBuiltin{name: "operator<=", function: builtinLessEq, parameters: ast.Identifiers{"x", "y"}},
|
||||||
|
|
||||||
// bopManifestEqual: <desugared>,
|
// bopManifestEqual: <desugared>,
|
||||||
// bopManifestUnequal: <desugared>,
|
// bopManifestUnequal: <desugared>,
|
||||||
|
|
||||||
ast.BopBitwiseAnd: &BinaryBuiltin{name: "operator&", function: builtinBitwiseAnd, parameters: ast.Identifiers{"x", "y"}},
|
ast.BopBitwiseAnd: &binaryBuiltin{name: "operator&", function: builtinBitwiseAnd, parameters: ast.Identifiers{"x", "y"}},
|
||||||
ast.BopBitwiseXor: &BinaryBuiltin{name: "operator^", function: builtinBitwiseXor, parameters: ast.Identifiers{"x", "y"}},
|
ast.BopBitwiseXor: &binaryBuiltin{name: "operator^", function: builtinBitwiseXor, parameters: ast.Identifiers{"x", "y"}},
|
||||||
ast.BopBitwiseOr: &BinaryBuiltin{name: "operator|", function: builtinBitwiseOr, parameters: ast.Identifiers{"x", "y"}},
|
ast.BopBitwiseOr: &binaryBuiltin{name: "operator|", function: builtinBitwiseOr, parameters: ast.Identifiers{"x", "y"}},
|
||||||
|
|
||||||
ast.BopAnd: &BinaryBuiltin{name: "operator&&", function: builtinAnd, parameters: ast.Identifiers{"x", "y"}},
|
ast.BopAnd: &binaryBuiltin{name: "operator&&", function: builtinAnd, parameters: ast.Identifiers{"x", "y"}},
|
||||||
ast.BopOr: &BinaryBuiltin{name: "operator||", function: builtinOr, parameters: ast.Identifiers{"x", "y"}},
|
ast.BopOr: &binaryBuiltin{name: "operator||", function: builtinOr, parameters: ast.Identifiers{"x", "y"}},
|
||||||
}
|
}
|
||||||
|
|
||||||
var uopBuiltins = []*UnaryBuiltin{
|
var uopBuiltins = []*unaryBuiltin{
|
||||||
ast.UopNot: &UnaryBuiltin{name: "operator!", function: builtinNegation, parameters: ast.Identifiers{"x"}},
|
ast.UopNot: &unaryBuiltin{name: "operator!", function: builtinNegation, parameters: ast.Identifiers{"x"}},
|
||||||
ast.UopBitwiseNot: &UnaryBuiltin{name: "operator~", function: builtinBitNeg, parameters: ast.Identifiers{"x"}},
|
ast.UopBitwiseNot: &unaryBuiltin{name: "operator~", function: builtinBitNeg, parameters: ast.Identifiers{"x"}},
|
||||||
ast.UopPlus: &UnaryBuiltin{name: "operator+ (unary)", function: builtinIdentity, parameters: ast.Identifiers{"x"}},
|
ast.UopPlus: &unaryBuiltin{name: "operator+ (unary)", function: builtinIdentity, parameters: ast.Identifiers{"x"}},
|
||||||
ast.UopMinus: &UnaryBuiltin{name: "operator- (unary)", function: builtinUnaryMinus, parameters: ast.Identifiers{"x"}},
|
ast.UopMinus: &unaryBuiltin{name: "operator- (unary)", function: builtinUnaryMinus, parameters: ast.Identifiers{"x"}},
|
||||||
}
|
}
|
||||||
|
|
||||||
type builtin interface {
|
type builtin interface {
|
||||||
@ -868,39 +867,39 @@ func buildBuiltinMap(builtins []builtin) map[string]evalCallable {
|
|||||||
}
|
}
|
||||||
|
|
||||||
var funcBuiltins = buildBuiltinMap([]builtin{
|
var funcBuiltins = buildBuiltinMap([]builtin{
|
||||||
&UnaryBuiltin{name: "extVar", function: builtinExtVar, parameters: ast.Identifiers{"x"}},
|
&unaryBuiltin{name: "extVar", function: builtinExtVar, parameters: ast.Identifiers{"x"}},
|
||||||
&UnaryBuiltin{name: "length", function: builtinLength, parameters: ast.Identifiers{"x"}},
|
&unaryBuiltin{name: "length", function: builtinLength, parameters: ast.Identifiers{"x"}},
|
||||||
&UnaryBuiltin{name: "toString", function: builtinToString, parameters: ast.Identifiers{"a"}},
|
&unaryBuiltin{name: "toString", function: builtinToString, parameters: ast.Identifiers{"a"}},
|
||||||
&BinaryBuiltin{name: "makeArray", function: builtinMakeArray, parameters: ast.Identifiers{"sz", "func"}},
|
&binaryBuiltin{name: "makeArray", function: builtinMakeArray, parameters: ast.Identifiers{"sz", "func"}},
|
||||||
&BinaryBuiltin{name: "flatMap", function: builtinFlatMap, parameters: ast.Identifiers{"func", "arr"}},
|
&binaryBuiltin{name: "flatMap", function: builtinFlatMap, parameters: ast.Identifiers{"func", "arr"}},
|
||||||
&BinaryBuiltin{name: "join", function: builtinJoin, parameters: ast.Identifiers{"sep", "arr"}},
|
&binaryBuiltin{name: "join", function: builtinJoin, parameters: ast.Identifiers{"sep", "arr"}},
|
||||||
&BinaryBuiltin{name: "filter", function: builtinFilter, parameters: ast.Identifiers{"func", "arr"}},
|
&binaryBuiltin{name: "filter", function: builtinFilter, parameters: ast.Identifiers{"func", "arr"}},
|
||||||
&BinaryBuiltin{name: "range", function: builtinRange, parameters: ast.Identifiers{"from", "to"}},
|
&binaryBuiltin{name: "range", function: builtinRange, parameters: ast.Identifiers{"from", "to"}},
|
||||||
&BinaryBuiltin{name: "primitiveEquals", function: primitiveEquals, parameters: ast.Identifiers{"sz", "func"}},
|
&binaryBuiltin{name: "primitiveEquals", function: primitiveEquals, parameters: ast.Identifiers{"sz", "func"}},
|
||||||
&BinaryBuiltin{name: "objectFieldsEx", function: builtinObjectFieldsEx, parameters: ast.Identifiers{"obj", "hidden"}},
|
&binaryBuiltin{name: "objectFieldsEx", function: builtinObjectFieldsEx, parameters: ast.Identifiers{"obj", "hidden"}},
|
||||||
&TernaryBuiltin{name: "objectHasEx", function: builtinObjectHasEx, parameters: ast.Identifiers{"obj", "fname", "hidden"}},
|
&ternaryBuiltin{name: "objectHasEx", function: builtinObjectHasEx, parameters: ast.Identifiers{"obj", "fname", "hidden"}},
|
||||||
&UnaryBuiltin{name: "type", function: builtinType, parameters: ast.Identifiers{"x"}},
|
&unaryBuiltin{name: "type", function: builtinType, parameters: ast.Identifiers{"x"}},
|
||||||
&UnaryBuiltin{name: "char", function: builtinChar, parameters: ast.Identifiers{"x"}},
|
&unaryBuiltin{name: "char", function: builtinChar, parameters: ast.Identifiers{"x"}},
|
||||||
&UnaryBuiltin{name: "codepoint", function: builtinCodepoint, parameters: ast.Identifiers{"x"}},
|
&unaryBuiltin{name: "codepoint", function: builtinCodepoint, parameters: ast.Identifiers{"x"}},
|
||||||
&UnaryBuiltin{name: "ceil", function: builtinCeil, parameters: ast.Identifiers{"x"}},
|
&unaryBuiltin{name: "ceil", function: builtinCeil, parameters: ast.Identifiers{"x"}},
|
||||||
&UnaryBuiltin{name: "floor", function: builtinFloor, parameters: ast.Identifiers{"x"}},
|
&unaryBuiltin{name: "floor", function: builtinFloor, parameters: ast.Identifiers{"x"}},
|
||||||
&UnaryBuiltin{name: "sqrt", function: builtinSqrt, parameters: ast.Identifiers{"x"}},
|
&unaryBuiltin{name: "sqrt", function: builtinSqrt, parameters: ast.Identifiers{"x"}},
|
||||||
&UnaryBuiltin{name: "sin", function: builtinSin, parameters: ast.Identifiers{"x"}},
|
&unaryBuiltin{name: "sin", function: builtinSin, parameters: ast.Identifiers{"x"}},
|
||||||
&UnaryBuiltin{name: "cos", function: builtinCos, parameters: ast.Identifiers{"x"}},
|
&unaryBuiltin{name: "cos", function: builtinCos, parameters: ast.Identifiers{"x"}},
|
||||||
&UnaryBuiltin{name: "tan", function: builtinTan, parameters: ast.Identifiers{"x"}},
|
&unaryBuiltin{name: "tan", function: builtinTan, parameters: ast.Identifiers{"x"}},
|
||||||
&UnaryBuiltin{name: "asin", function: builtinAsin, parameters: ast.Identifiers{"x"}},
|
&unaryBuiltin{name: "asin", function: builtinAsin, parameters: ast.Identifiers{"x"}},
|
||||||
&UnaryBuiltin{name: "acos", function: builtinAcos, parameters: ast.Identifiers{"x"}},
|
&unaryBuiltin{name: "acos", function: builtinAcos, parameters: ast.Identifiers{"x"}},
|
||||||
&UnaryBuiltin{name: "atan", function: builtinAtan, parameters: ast.Identifiers{"x"}},
|
&unaryBuiltin{name: "atan", function: builtinAtan, parameters: ast.Identifiers{"x"}},
|
||||||
&UnaryBuiltin{name: "log", function: builtinLog, parameters: ast.Identifiers{"x"}},
|
&unaryBuiltin{name: "log", function: builtinLog, parameters: ast.Identifiers{"x"}},
|
||||||
&UnaryBuiltin{name: "exp", function: builtinExp, parameters: ast.Identifiers{"x"}},
|
&unaryBuiltin{name: "exp", function: builtinExp, parameters: ast.Identifiers{"x"}},
|
||||||
&UnaryBuiltin{name: "mantissa", function: builtinMantissa, parameters: ast.Identifiers{"x"}},
|
&unaryBuiltin{name: "mantissa", function: builtinMantissa, parameters: ast.Identifiers{"x"}},
|
||||||
&UnaryBuiltin{name: "exponent", function: builtinExponent, parameters: ast.Identifiers{"x"}},
|
&unaryBuiltin{name: "exponent", function: builtinExponent, parameters: ast.Identifiers{"x"}},
|
||||||
&BinaryBuiltin{name: "pow", function: builtinPow, parameters: ast.Identifiers{"base", "exp"}},
|
&binaryBuiltin{name: "pow", function: builtinPow, parameters: ast.Identifiers{"base", "exp"}},
|
||||||
&BinaryBuiltin{name: "modulo", function: builtinModulo, parameters: ast.Identifiers{"x", "y"}},
|
&binaryBuiltin{name: "modulo", function: builtinModulo, parameters: ast.Identifiers{"x", "y"}},
|
||||||
&UnaryBuiltin{name: "md5", function: builtinMd5, parameters: ast.Identifiers{"x"}},
|
&unaryBuiltin{name: "md5", function: builtinMd5, parameters: ast.Identifiers{"x"}},
|
||||||
&TernaryBuiltin{name: "strReplace", function: builtinStrReplace, parameters: ast.Identifiers{"str", "from", "to"}},
|
&ternaryBuiltin{name: "strReplace", function: builtinStrReplace, parameters: ast.Identifiers{"str", "from", "to"}},
|
||||||
&UnaryBuiltin{name: "native", function: builtinNative, parameters: ast.Identifiers{"x"}},
|
&unaryBuiltin{name: "native", function: builtinNative, parameters: ast.Identifiers{"x"}},
|
||||||
|
|
||||||
// internal
|
// internal
|
||||||
&UnaryBuiltin{name: "$objectFlatMerge", function: builtinUglyObjectFlatMerge, parameters: ast.Identifiers{"x"}},
|
&unaryBuiltin{name: "$objectFlatMerge", function: builtinUglyObjectFlatMerge, parameters: ast.Identifiers{"x"}},
|
||||||
})
|
})
|
||||||
|
26
desugarer.go
26
desugarer.go
@ -28,7 +28,12 @@ import (
|
|||||||
)
|
)
|
||||||
|
|
||||||
func makeStr(s string) *ast.LiteralString {
|
func makeStr(s string) *ast.LiteralString {
|
||||||
return &ast.LiteralString{ast.NodeBase{}, s, ast.StringDouble, ""}
|
return &ast.LiteralString{
|
||||||
|
NodeBase: ast.NodeBase{},
|
||||||
|
Value: s,
|
||||||
|
Kind: ast.StringDouble,
|
||||||
|
BlockIndent: "",
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func stringUnescape(loc *ast.LocationRange, s string) (string, error) {
|
func stringUnescape(loc *ast.LocationRange, s string) (string, error) {
|
||||||
@ -130,7 +135,11 @@ func desugarFields(location ast.LocationRange, fields *ast.ObjectFields, objLeve
|
|||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
if len(binds) > 0 {
|
if len(binds) > 0 {
|
||||||
field.Expr2 = &ast.Local{ast.NewNodeBaseLoc(*field.Expr2.Loc()), binds, field.Expr2}
|
field.Expr2 = &ast.Local{
|
||||||
|
NodeBase: ast.NewNodeBaseLoc(*field.Expr2.Loc()),
|
||||||
|
Binds: binds,
|
||||||
|
Body: field.Expr2,
|
||||||
|
}
|
||||||
}
|
}
|
||||||
newFields = append(newFields, field)
|
newFields = append(newFields, field)
|
||||||
}
|
}
|
||||||
@ -265,13 +274,22 @@ func buildDesugaredObject(nodeBase ast.NodeBase, fields ast.ObjectFields) *ast.D
|
|||||||
if field.Kind == ast.ObjectAssert {
|
if field.Kind == ast.ObjectAssert {
|
||||||
newAsserts = append(newAsserts, field.Expr2)
|
newAsserts = append(newAsserts, field.Expr2)
|
||||||
} else if field.Kind == ast.ObjectFieldExpr {
|
} else if field.Kind == ast.ObjectFieldExpr {
|
||||||
newFields = append(newFields, ast.DesugaredObjectField{field.Hide, field.Expr1, field.Expr2, field.SuperSugar})
|
newFields = append(newFields, ast.DesugaredObjectField{
|
||||||
|
Hide: field.Hide,
|
||||||
|
Name: field.Expr1,
|
||||||
|
Body: field.Expr2,
|
||||||
|
PlusSuper: field.SuperSugar,
|
||||||
|
})
|
||||||
} else {
|
} else {
|
||||||
panic(fmt.Sprintf("INTERNAL ERROR: field should have been desugared: %v", field.Kind))
|
panic(fmt.Sprintf("INTERNAL ERROR: field should have been desugared: %v", field.Kind))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return &ast.DesugaredObject{nodeBase, newAsserts, newFields}
|
return &ast.DesugaredObject{
|
||||||
|
NodeBase: nodeBase,
|
||||||
|
Asserts: newAsserts,
|
||||||
|
Fields: newFields,
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Desugar Jsonnet expressions to reduce the number of constructs the rest of the implementation
|
// Desugar Jsonnet expressions to reduce the number of constructs the rest of the implementation
|
||||||
|
@ -25,6 +25,7 @@ import (
|
|||||||
"github.com/google/go-jsonnet/parser"
|
"github.com/google/go-jsonnet/parser"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
// An ErrorFormatter formats errors with stacktraces and color.
|
||||||
type ErrorFormatter interface {
|
type ErrorFormatter interface {
|
||||||
// Format static, runtime, and unexpected errors prior to printing them.
|
// Format static, runtime, and unexpected errors prior to printing them.
|
||||||
Format(err error) string
|
Format(err error) string
|
||||||
@ -36,6 +37,7 @@ type ErrorFormatter interface {
|
|||||||
SetColorFormatter(color ColorFormatter)
|
SetColorFormatter(color ColorFormatter)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// ColorFormatter represents a function that writes to the terminal using color.
|
||||||
type ColorFormatter func(w io.Writer, f string, a ...interface{}) (n int, err error)
|
type ColorFormatter func(w io.Writer, f string, a ...interface{}) (n int, err error)
|
||||||
|
|
||||||
var _ ErrorFormatter = &termErrorFormatter{}
|
var _ ErrorFormatter = &termErrorFormatter{}
|
||||||
|
15
imports.go
15
imports.go
@ -23,15 +23,18 @@ import (
|
|||||||
"path"
|
"path"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
// ImportedData represents imported data and where it came from.
|
||||||
type ImportedData struct {
|
type ImportedData struct {
|
||||||
FoundHere string
|
FoundHere string
|
||||||
Content string
|
Content string
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// An Importer imports data from a path.
|
||||||
type Importer interface {
|
type Importer interface {
|
||||||
Import(codeDir string, importedPath string) (*ImportedData, error)
|
Import(codeDir string, importedPath string) (*ImportedData, error)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// ImportCacheValue represents a value in an imported-data cache.
|
||||||
type ImportCacheValue struct {
|
type ImportCacheValue struct {
|
||||||
// nil if we got an error
|
// nil if we got an error
|
||||||
data *ImportedData
|
data *ImportedData
|
||||||
@ -50,11 +53,13 @@ type importCacheKey struct {
|
|||||||
|
|
||||||
type importCacheMap map[importCacheKey]*ImportCacheValue
|
type importCacheMap map[importCacheKey]*ImportCacheValue
|
||||||
|
|
||||||
|
// ImportCache represents a cache of imported data.
|
||||||
type ImportCache struct {
|
type ImportCache struct {
|
||||||
cache importCacheMap
|
cache importCacheMap
|
||||||
importer Importer
|
importer Importer
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// MakeImportCache creates and ImportCache using an importer.
|
||||||
func MakeImportCache(importer Importer) *ImportCache {
|
func MakeImportCache(importer Importer) *ImportCache {
|
||||||
return &ImportCache{importer: importer, cache: make(importCacheMap)}
|
return &ImportCache{importer: importer, cache: make(importCacheMap)}
|
||||||
}
|
}
|
||||||
@ -72,6 +77,7 @@ func (cache *ImportCache) importData(key importCacheKey) *ImportCacheValue {
|
|||||||
return cached
|
return cached
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// ImportString imports a string, caches it and then returns it.
|
||||||
func (cache *ImportCache) ImportString(codeDir, importedPath string, e *evaluator) (*valueString, error) {
|
func (cache *ImportCache) ImportString(codeDir, importedPath string, e *evaluator) (*valueString, error) {
|
||||||
cached := cache.importData(importCacheKey{codeDir, importedPath})
|
cached := cache.importData(importCacheKey{codeDir, importedPath})
|
||||||
if cached.err != nil {
|
if cached.err != nil {
|
||||||
@ -93,6 +99,7 @@ func codeToPV(e *evaluator, filename string, code string) potentialValue {
|
|||||||
return makeThunk(makeInitialEnv(filename, e.i.baseStd), node)
|
return makeThunk(makeInitialEnv(filename, e.i.baseStd), node)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// ImportCode imports code from a path.
|
||||||
func (cache *ImportCache) ImportCode(codeDir, importedPath string, e *evaluator) (value, error) {
|
func (cache *ImportCache) ImportCode(codeDir, importedPath string, e *evaluator) (value, error) {
|
||||||
cached := cache.importData(importCacheKey{codeDir, importedPath})
|
cached := cache.importData(importCacheKey{codeDir, importedPath})
|
||||||
if cached.err != nil {
|
if cached.err != nil {
|
||||||
@ -108,6 +115,7 @@ func (cache *ImportCache) ImportCode(codeDir, importedPath string, e *evaluator)
|
|||||||
// Concrete importers
|
// Concrete importers
|
||||||
// -------------------------------------
|
// -------------------------------------
|
||||||
|
|
||||||
|
// FileImporter imports data from files.
|
||||||
type FileImporter struct {
|
type FileImporter struct {
|
||||||
JPaths []string
|
JPaths []string
|
||||||
}
|
}
|
||||||
@ -126,6 +134,7 @@ func tryPath(dir, importedPath string) (found bool, content []byte, foundHere st
|
|||||||
return true, content, absPath, err
|
return true, content, absPath, err
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Import imports a file.
|
||||||
func (importer *FileImporter) Import(dir, importedPath string) (*ImportedData, error) {
|
func (importer *FileImporter) Import(dir, importedPath string) (*ImportedData, error) {
|
||||||
found, content, foundHere, err := tryPath(dir, importedPath)
|
found, content, foundHere, err := tryPath(dir, importedPath)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
@ -140,18 +149,20 @@ func (importer *FileImporter) Import(dir, importedPath string) (*ImportedData, e
|
|||||||
}
|
}
|
||||||
|
|
||||||
if !found {
|
if !found {
|
||||||
return nil, fmt.Errorf("Couldn't open import %#v: No match locally or in the Jsonnet library paths.", importedPath)
|
return nil, fmt.Errorf("couldn't open import %#v: no match locally or in the Jsonnet library paths", importedPath)
|
||||||
}
|
}
|
||||||
return &ImportedData{Content: string(content), FoundHere: foundHere}, nil
|
return &ImportedData{Content: string(content), FoundHere: foundHere}, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// MemoryImporter "imports" data from an in-memory map.
|
||||||
type MemoryImporter struct {
|
type MemoryImporter struct {
|
||||||
Data map[string]string
|
Data map[string]string
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Import imports a map entry.
|
||||||
func (importer *MemoryImporter) Import(dir, importedPath string) (*ImportedData, error) {
|
func (importer *MemoryImporter) Import(dir, importedPath string) (*ImportedData, error) {
|
||||||
if content, ok := importer.Data[importedPath]; ok {
|
if content, ok := importer.Data[importedPath]; ok {
|
||||||
return &ImportedData{Content: content, FoundHere: importedPath}, nil
|
return &ImportedData{Content: content, FoundHere: importedPath}, nil
|
||||||
}
|
}
|
||||||
return nil, fmt.Errorf("Import not available %v", importedPath)
|
return nil, fmt.Errorf("import not available %v", importedPath)
|
||||||
}
|
}
|
||||||
|
@ -758,9 +758,9 @@ func (i *interpreter) manifestAndSerializeMulti(trace *TraceElement, v value) (r
|
|||||||
}
|
}
|
||||||
switch json := json.(type) {
|
switch json := json.(type) {
|
||||||
case map[string]interface{}:
|
case map[string]interface{}:
|
||||||
for filename, fileJson := range json {
|
for filename, fileJSON := range json {
|
||||||
var buf bytes.Buffer
|
var buf bytes.Buffer
|
||||||
serializeJSON(fileJson, true, "", &buf)
|
serializeJSON(fileJSON, true, "", &buf)
|
||||||
buf.WriteString("\n")
|
buf.WriteString("\n")
|
||||||
r[filename] = buf.String()
|
r[filename] = buf.String()
|
||||||
}
|
}
|
||||||
|
@ -5,10 +5,10 @@ import (
|
|||||||
"github.com/google/go-jsonnet/parser"
|
"github.com/google/go-jsonnet/parser"
|
||||||
)
|
)
|
||||||
|
|
||||||
type vScope map[ast.Identifier]*Variable
|
type vScope map[ast.Identifier]*variable
|
||||||
|
|
||||||
func addVar(name ast.Identifier, node ast.Node, info *LintingInfo, scope vScope, param bool) {
|
func addVar(name ast.Identifier, node ast.Node, info *LintingInfo, scope vScope, param bool) {
|
||||||
v := Variable{
|
v := variable{
|
||||||
name: name,
|
name: name,
|
||||||
declNode: node,
|
declNode: node,
|
||||||
uses: nil,
|
uses: nil,
|
||||||
|
@ -7,6 +7,8 @@ import (
|
|||||||
"github.com/google/go-jsonnet/parser"
|
"github.com/google/go-jsonnet/parser"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
// ErrorWriter encapsulates a writer and an error state indicating when at least
|
||||||
|
// one error has been written to the writer.
|
||||||
type ErrorWriter struct {
|
type ErrorWriter struct {
|
||||||
ErrorsFound bool
|
ErrorsFound bool
|
||||||
Writer io.Writer
|
Writer io.Writer
|
||||||
@ -17,7 +19,7 @@ func (e *ErrorWriter) writeError(err parser.StaticError) {
|
|||||||
e.Writer.Write([]byte(err.Error() + "\n"))
|
e.Writer.Write([]byte(err.Error() + "\n"))
|
||||||
}
|
}
|
||||||
|
|
||||||
type Variable struct {
|
type variable struct {
|
||||||
name ast.Identifier
|
name ast.Identifier
|
||||||
declNode ast.Node
|
declNode ast.Node
|
||||||
uses []ast.Node
|
uses []ast.Node
|
||||||
@ -29,14 +31,15 @@ type Variable struct {
|
|||||||
// It is global, i.e. it holds the same data regardless of scope we're
|
// It is global, i.e. it holds the same data regardless of scope we're
|
||||||
// currently analyzing.
|
// currently analyzing.
|
||||||
type LintingInfo struct {
|
type LintingInfo struct {
|
||||||
variables []Variable
|
variables []variable
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Lint analyses a node and reports any issues it encounters to an error writer.
|
||||||
func Lint(node ast.Node, e *ErrorWriter) {
|
func Lint(node ast.Node, e *ErrorWriter) {
|
||||||
lintingInfo := LintingInfo{
|
lintingInfo := LintingInfo{
|
||||||
variables: nil,
|
variables: nil,
|
||||||
}
|
}
|
||||||
std := Variable{
|
std := variable{
|
||||||
name: "std",
|
name: "std",
|
||||||
declNode: nil,
|
declNode: nil,
|
||||||
uses: nil,
|
uses: nil,
|
||||||
|
@ -88,7 +88,7 @@ var nativeError = &NativeFunction{
|
|||||||
Name: "nativeError",
|
Name: "nativeError",
|
||||||
Params: ast.Identifiers{},
|
Params: ast.Identifiers{},
|
||||||
Func: func(x []interface{}) (interface{}, error) {
|
Func: func(x []interface{}) (interface{}, error) {
|
||||||
return nil, errors.New("Native function error")
|
return nil, errors.New("native function error")
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -172,9 +172,8 @@ func runJsonnetCommand(i jsonnetInput) jsonnetResult {
|
|||||||
func runJsonnet(i jsonnetInput) jsonnetResult {
|
func runJsonnet(i jsonnetInput) jsonnetResult {
|
||||||
if jsonnetCmd != nil && *jsonnetCmd != "" {
|
if jsonnetCmd != nil && *jsonnetCmd != "" {
|
||||||
return runJsonnetCommand(i)
|
return runJsonnetCommand(i)
|
||||||
} else {
|
|
||||||
return runInternalJsonnet(i)
|
|
||||||
}
|
}
|
||||||
|
return runInternalJsonnet(i)
|
||||||
}
|
}
|
||||||
|
|
||||||
func runTest(t *testing.T, test *mainTest) {
|
func runTest(t *testing.T, test *mainTest) {
|
||||||
|
@ -278,6 +278,7 @@ func specialChildren(node ast.Node) []ast.Node {
|
|||||||
panic(fmt.Sprintf("specialChildren: Unknown node %#v", node))
|
panic(fmt.Sprintf("specialChildren: Unknown node %#v", node))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Children returns all children of a node.
|
||||||
func Children(node ast.Node) []ast.Node {
|
func Children(node ast.Node) []ast.Node {
|
||||||
var result []ast.Node
|
var result []ast.Node
|
||||||
result = append(result, directChildren(node)...)
|
result = append(result, directChildren(node)...)
|
||||||
|
@ -165,7 +165,8 @@ type token struct {
|
|||||||
loc ast.LocationRange
|
loc ast.LocationRange
|
||||||
}
|
}
|
||||||
|
|
||||||
type tokens []token
|
// Tokens is a slice of token structs.
|
||||||
|
type Tokens []token
|
||||||
|
|
||||||
func (t *token) String() string {
|
func (t *token) String() string {
|
||||||
if t.data == "" {
|
if t.data == "" {
|
||||||
@ -250,7 +251,7 @@ type lexer struct {
|
|||||||
pos position // Current position in input
|
pos position // Current position in input
|
||||||
prev position // Previous position in input
|
prev position // Previous position in input
|
||||||
|
|
||||||
tokens tokens // The tokens that we've generated so far
|
tokens Tokens // The tokens that we've generated so far
|
||||||
|
|
||||||
// Information about the token we are working on right now
|
// Information about the token we are working on right now
|
||||||
fodder fodder
|
fodder fodder
|
||||||
@ -682,7 +683,8 @@ func (l *lexer) lexSymbol() error {
|
|||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func Lex(fn string, input string) (tokens, error) {
|
// Lex returns a slice of tokens recognised in input.
|
||||||
|
func Lex(fn string, input string) (Tokens, error) {
|
||||||
l := makeLexer(fn, input)
|
l := makeLexer(fn, input)
|
||||||
|
|
||||||
var err error
|
var err error
|
||||||
|
@ -22,7 +22,7 @@ import (
|
|||||||
type lexTest struct {
|
type lexTest struct {
|
||||||
name string
|
name string
|
||||||
input string
|
input string
|
||||||
tokens tokens
|
tokens Tokens
|
||||||
errString string
|
errString string
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -31,92 +31,92 @@ var (
|
|||||||
)
|
)
|
||||||
|
|
||||||
var lexTests = []lexTest{
|
var lexTests = []lexTest{
|
||||||
{"empty", "", tokens{}, ""},
|
{"empty", "", Tokens{}, ""},
|
||||||
{"whitespace", " \t\n\r\r\n", tokens{}, ""},
|
{"whitespace", " \t\n\r\r\n", Tokens{}, ""},
|
||||||
|
|
||||||
{"brace L", "{", tokens{{kind: tokenBraceL, data: "{"}}, ""},
|
{"brace L", "{", Tokens{{kind: tokenBraceL, data: "{"}}, ""},
|
||||||
{"brace R", "}", tokens{{kind: tokenBraceR, data: "}"}}, ""},
|
{"brace R", "}", Tokens{{kind: tokenBraceR, data: "}"}}, ""},
|
||||||
{"bracket L", "[", tokens{{kind: tokenBracketL, data: "["}}, ""},
|
{"bracket L", "[", Tokens{{kind: tokenBracketL, data: "["}}, ""},
|
||||||
{"bracket R", "]", tokens{{kind: tokenBracketR, data: "]"}}, ""},
|
{"bracket R", "]", Tokens{{kind: tokenBracketR, data: "]"}}, ""},
|
||||||
{"colon", ":", tokens{{kind: tokenOperator, data: ":"}}, ""},
|
{"colon", ":", Tokens{{kind: tokenOperator, data: ":"}}, ""},
|
||||||
{"colon2", "::", tokens{{kind: tokenOperator, data: "::"}}, ""},
|
{"colon2", "::", Tokens{{kind: tokenOperator, data: "::"}}, ""},
|
||||||
{"colon3", ":::", tokens{{kind: tokenOperator, data: ":::"}}, ""},
|
{"colon3", ":::", Tokens{{kind: tokenOperator, data: ":::"}}, ""},
|
||||||
{"arrow right", "->", tokens{{kind: tokenOperator, data: "->"}}, ""},
|
{"arrow right", "->", Tokens{{kind: tokenOperator, data: "->"}}, ""},
|
||||||
{"less than minus", "<-", tokens{{kind: tokenOperator, data: "<"},
|
{"less than minus", "<-", Tokens{{kind: tokenOperator, data: "<"},
|
||||||
{kind: tokenOperator, data: "-"}}, ""},
|
{kind: tokenOperator, data: "-"}}, ""},
|
||||||
{"comma", ",", tokens{{kind: tokenComma, data: ","}}, ""},
|
{"comma", ",", Tokens{{kind: tokenComma, data: ","}}, ""},
|
||||||
{"dollar", "$", tokens{{kind: tokenDollar, data: "$"}}, ""},
|
{"dollar", "$", Tokens{{kind: tokenDollar, data: "$"}}, ""},
|
||||||
{"dot", ".", tokens{{kind: tokenDot, data: "."}}, ""},
|
{"dot", ".", Tokens{{kind: tokenDot, data: "."}}, ""},
|
||||||
{"paren L", "(", tokens{{kind: tokenParenL, data: "("}}, ""},
|
{"paren L", "(", Tokens{{kind: tokenParenL, data: "("}}, ""},
|
||||||
{"paren R", ")", tokens{{kind: tokenParenR, data: ")"}}, ""},
|
{"paren R", ")", Tokens{{kind: tokenParenR, data: ")"}}, ""},
|
||||||
{"semicolon", ";", tokens{{kind: tokenSemicolon, data: ";"}}, ""},
|
{"semicolon", ";", Tokens{{kind: tokenSemicolon, data: ";"}}, ""},
|
||||||
|
|
||||||
{"not 1", "!", tokens{{kind: tokenOperator, data: "!"}}, ""},
|
{"not 1", "!", Tokens{{kind: tokenOperator, data: "!"}}, ""},
|
||||||
{"not 2", "! ", tokens{{kind: tokenOperator, data: "!"}}, ""},
|
{"not 2", "! ", Tokens{{kind: tokenOperator, data: "!"}}, ""},
|
||||||
{"not equal", "!=", tokens{{kind: tokenOperator, data: "!="}}, ""},
|
{"not equal", "!=", Tokens{{kind: tokenOperator, data: "!="}}, ""},
|
||||||
{"tilde", "~", tokens{{kind: tokenOperator, data: "~"}}, ""},
|
{"tilde", "~", Tokens{{kind: tokenOperator, data: "~"}}, ""},
|
||||||
{"plus", "+", tokens{{kind: tokenOperator, data: "+"}}, ""},
|
{"plus", "+", Tokens{{kind: tokenOperator, data: "+"}}, ""},
|
||||||
{"minus", "-", tokens{{kind: tokenOperator, data: "-"}}, ""},
|
{"minus", "-", Tokens{{kind: tokenOperator, data: "-"}}, ""},
|
||||||
|
|
||||||
{"number 0", "0", tokens{{kind: tokenNumber, data: "0"}}, ""},
|
{"number 0", "0", Tokens{{kind: tokenNumber, data: "0"}}, ""},
|
||||||
{"number 1", "1", tokens{{kind: tokenNumber, data: "1"}}, ""},
|
{"number 1", "1", Tokens{{kind: tokenNumber, data: "1"}}, ""},
|
||||||
{"number 1.0", "1.0", tokens{{kind: tokenNumber, data: "1.0"}}, ""},
|
{"number 1.0", "1.0", Tokens{{kind: tokenNumber, data: "1.0"}}, ""},
|
||||||
{"number 0.10", "0.10", tokens{{kind: tokenNumber, data: "0.10"}}, ""},
|
{"number 0.10", "0.10", Tokens{{kind: tokenNumber, data: "0.10"}}, ""},
|
||||||
{"number 0e100", "0e100", tokens{{kind: tokenNumber, data: "0e100"}}, ""},
|
{"number 0e100", "0e100", Tokens{{kind: tokenNumber, data: "0e100"}}, ""},
|
||||||
{"number 1e100", "1e100", tokens{{kind: tokenNumber, data: "1e100"}}, ""},
|
{"number 1e100", "1e100", Tokens{{kind: tokenNumber, data: "1e100"}}, ""},
|
||||||
{"number 1.1e100", "1.1e100", tokens{{kind: tokenNumber, data: "1.1e100"}}, ""},
|
{"number 1.1e100", "1.1e100", Tokens{{kind: tokenNumber, data: "1.1e100"}}, ""},
|
||||||
{"number 1.1e-100", "1.1e-100", tokens{{kind: tokenNumber, data: "1.1e-100"}}, ""},
|
{"number 1.1e-100", "1.1e-100", Tokens{{kind: tokenNumber, data: "1.1e-100"}}, ""},
|
||||||
{"number 1.1e+100", "1.1e+100", tokens{{kind: tokenNumber, data: "1.1e+100"}}, ""},
|
{"number 1.1e+100", "1.1e+100", Tokens{{kind: tokenNumber, data: "1.1e+100"}}, ""},
|
||||||
{"number 0100", "0100", tokens{
|
{"number 0100", "0100", Tokens{
|
||||||
{kind: tokenNumber, data: "0"},
|
{kind: tokenNumber, data: "0"},
|
||||||
{kind: tokenNumber, data: "100"},
|
{kind: tokenNumber, data: "100"},
|
||||||
}, ""},
|
}, ""},
|
||||||
{"number 10+10", "10+10", tokens{
|
{"number 10+10", "10+10", Tokens{
|
||||||
{kind: tokenNumber, data: "10"},
|
{kind: tokenNumber, data: "10"},
|
||||||
{kind: tokenOperator, data: "+"},
|
{kind: tokenOperator, data: "+"},
|
||||||
{kind: tokenNumber, data: "10"},
|
{kind: tokenNumber, data: "10"},
|
||||||
}, ""},
|
}, ""},
|
||||||
{"number 1.+3", "1.+3", tokens{}, "number 1.+3:1:3 Couldn't lex number, junk after decimal point: '+'"},
|
{"number 1.+3", "1.+3", Tokens{}, "number 1.+3:1:3 Couldn't lex number, junk after decimal point: '+'"},
|
||||||
{"number 1e!", "1e!", tokens{}, "number 1e!:1:3 Couldn't lex number, junk after 'E': '!'"},
|
{"number 1e!", "1e!", Tokens{}, "number 1e!:1:3 Couldn't lex number, junk after 'E': '!'"},
|
||||||
{"number 1e+!", "1e+!", tokens{}, "number 1e+!:1:4 Couldn't lex number, junk after exponent sign: '!'"},
|
{"number 1e+!", "1e+!", Tokens{}, "number 1e+!:1:4 Couldn't lex number, junk after exponent sign: '!'"},
|
||||||
|
|
||||||
{"double string \"hi\"", "\"hi\"", tokens{{kind: tokenStringDouble, data: "hi"}}, ""},
|
{"double string \"hi\"", "\"hi\"", Tokens{{kind: tokenStringDouble, data: "hi"}}, ""},
|
||||||
{"double string \"hi nl\"", "\"hi\n\"", tokens{{kind: tokenStringDouble, data: "hi\n"}}, ""},
|
{"double string \"hi nl\"", "\"hi\n\"", Tokens{{kind: tokenStringDouble, data: "hi\n"}}, ""},
|
||||||
{"double string \"hi\\\"\"", "\"hi\\\"\"", tokens{{kind: tokenStringDouble, data: "hi\\\""}}, ""},
|
{"double string \"hi\\\"\"", "\"hi\\\"\"", Tokens{{kind: tokenStringDouble, data: "hi\\\""}}, ""},
|
||||||
{"double string \"hi\\nl\"", "\"hi\\\n\"", tokens{{kind: tokenStringDouble, data: "hi\\\n"}}, ""},
|
{"double string \"hi\\nl\"", "\"hi\\\n\"", Tokens{{kind: tokenStringDouble, data: "hi\\\n"}}, ""},
|
||||||
{"double string \"hi", "\"hi", tokens{}, "double string \"hi:1:1 Unterminated String"},
|
{"double string \"hi", "\"hi", Tokens{}, "double string \"hi:1:1 Unterminated String"},
|
||||||
|
|
||||||
{"single string 'hi'", "'hi'", tokens{{kind: tokenStringSingle, data: "hi"}}, ""},
|
{"single string 'hi'", "'hi'", Tokens{{kind: tokenStringSingle, data: "hi"}}, ""},
|
||||||
{"single string 'hi nl'", "'hi\n'", tokens{{kind: tokenStringSingle, data: "hi\n"}}, ""},
|
{"single string 'hi nl'", "'hi\n'", Tokens{{kind: tokenStringSingle, data: "hi\n"}}, ""},
|
||||||
{"single string 'hi\\''", "'hi\\''", tokens{{kind: tokenStringSingle, data: "hi\\'"}}, ""},
|
{"single string 'hi\\''", "'hi\\''", Tokens{{kind: tokenStringSingle, data: "hi\\'"}}, ""},
|
||||||
{"single string 'hi\\nl'", "'hi\\\n'", tokens{{kind: tokenStringSingle, data: "hi\\\n"}}, ""},
|
{"single string 'hi\\nl'", "'hi\\\n'", Tokens{{kind: tokenStringSingle, data: "hi\\\n"}}, ""},
|
||||||
{"single string 'hi", "'hi", tokens{}, "single string 'hi:1:1 Unterminated String"},
|
{"single string 'hi", "'hi", Tokens{}, "single string 'hi:1:1 Unterminated String"},
|
||||||
|
|
||||||
{"assert", "assert", tokens{{kind: tokenAssert, data: "assert"}}, ""},
|
{"assert", "assert", Tokens{{kind: tokenAssert, data: "assert"}}, ""},
|
||||||
{"else", "else", tokens{{kind: tokenElse, data: "else"}}, ""},
|
{"else", "else", Tokens{{kind: tokenElse, data: "else"}}, ""},
|
||||||
{"error", "error", tokens{{kind: tokenError, data: "error"}}, ""},
|
{"error", "error", Tokens{{kind: tokenError, data: "error"}}, ""},
|
||||||
{"false", "false", tokens{{kind: tokenFalse, data: "false"}}, ""},
|
{"false", "false", Tokens{{kind: tokenFalse, data: "false"}}, ""},
|
||||||
{"for", "for", tokens{{kind: tokenFor, data: "for"}}, ""},
|
{"for", "for", Tokens{{kind: tokenFor, data: "for"}}, ""},
|
||||||
{"function", "function", tokens{{kind: tokenFunction, data: "function"}}, ""},
|
{"function", "function", Tokens{{kind: tokenFunction, data: "function"}}, ""},
|
||||||
{"if", "if", tokens{{kind: tokenIf, data: "if"}}, ""},
|
{"if", "if", Tokens{{kind: tokenIf, data: "if"}}, ""},
|
||||||
{"import", "import", tokens{{kind: tokenImport, data: "import"}}, ""},
|
{"import", "import", Tokens{{kind: tokenImport, data: "import"}}, ""},
|
||||||
{"importstr", "importstr", tokens{{kind: tokenImportStr, data: "importstr"}}, ""},
|
{"importstr", "importstr", Tokens{{kind: tokenImportStr, data: "importstr"}}, ""},
|
||||||
{"in", "in", tokens{{kind: tokenIn, data: "in"}}, ""},
|
{"in", "in", Tokens{{kind: tokenIn, data: "in"}}, ""},
|
||||||
{"local", "local", tokens{{kind: tokenLocal, data: "local"}}, ""},
|
{"local", "local", Tokens{{kind: tokenLocal, data: "local"}}, ""},
|
||||||
{"null", "null", tokens{{kind: tokenNullLit, data: "null"}}, ""},
|
{"null", "null", Tokens{{kind: tokenNullLit, data: "null"}}, ""},
|
||||||
{"self", "self", tokens{{kind: tokenSelf, data: "self"}}, ""},
|
{"self", "self", Tokens{{kind: tokenSelf, data: "self"}}, ""},
|
||||||
{"super", "super", tokens{{kind: tokenSuper, data: "super"}}, ""},
|
{"super", "super", Tokens{{kind: tokenSuper, data: "super"}}, ""},
|
||||||
{"tailstrict", "tailstrict", tokens{{kind: tokenTailStrict, data: "tailstrict"}}, ""},
|
{"tailstrict", "tailstrict", Tokens{{kind: tokenTailStrict, data: "tailstrict"}}, ""},
|
||||||
{"then", "then", tokens{{kind: tokenThen, data: "then"}}, ""},
|
{"then", "then", Tokens{{kind: tokenThen, data: "then"}}, ""},
|
||||||
{"true", "true", tokens{{kind: tokenTrue, data: "true"}}, ""},
|
{"true", "true", Tokens{{kind: tokenTrue, data: "true"}}, ""},
|
||||||
|
|
||||||
{"identifier", "foobar123", tokens{{kind: tokenIdentifier, data: "foobar123"}}, ""},
|
{"identifier", "foobar123", Tokens{{kind: tokenIdentifier, data: "foobar123"}}, ""},
|
||||||
{"identifier", "foo bar123", tokens{{kind: tokenIdentifier, data: "foo"}, {kind: tokenIdentifier, data: "bar123"}}, ""},
|
{"identifier", "foo bar123", Tokens{{kind: tokenIdentifier, data: "foo"}, {kind: tokenIdentifier, data: "bar123"}}, ""},
|
||||||
|
|
||||||
{"c++ comment", "// hi", tokens{}, ""}, // This test doesn't look at fodder (yet?)
|
{"c++ comment", "// hi", Tokens{}, ""}, // This test doesn't look at fodder (yet?)
|
||||||
{"hash comment", "# hi", tokens{}, ""}, // This test doesn't look at fodder (yet?)
|
{"hash comment", "# hi", Tokens{}, ""}, // This test doesn't look at fodder (yet?)
|
||||||
{"c comment", "/* hi */", tokens{}, ""}, // This test doesn't look at fodder (yet?)
|
{"c comment", "/* hi */", Tokens{}, ""}, // This test doesn't look at fodder (yet?)
|
||||||
{"c comment no term", "/* hi", tokens{}, "c comment no term:1:1 Multi-line comment has no terminating */"}, // This test doesn't look at fodder (yet?)
|
{"c comment no term", "/* hi", Tokens{}, "c comment no term:1:1 Multi-line comment has no terminating */"}, // This test doesn't look at fodder (yet?)
|
||||||
|
|
||||||
{
|
{
|
||||||
"block string spaces",
|
"block string spaces",
|
||||||
@ -126,7 +126,7 @@ var lexTests = []lexTest{
|
|||||||
|||
|
|||
|
||||||
foo
|
foo
|
||||||
|||`,
|
|||`,
|
||||||
tokens{
|
Tokens{
|
||||||
{
|
{
|
||||||
kind: tokenStringBlock,
|
kind: tokenStringBlock,
|
||||||
data: "test\n more\n|||\n foo\n",
|
data: "test\n more\n|||\n foo\n",
|
||||||
@ -144,7 +144,7 @@ var lexTests = []lexTest{
|
|||||||
|||
|
|||
|
||||||
foo
|
foo
|
||||||
|||`,
|
|||`,
|
||||||
tokens{
|
Tokens{
|
||||||
{
|
{
|
||||||
kind: tokenStringBlock,
|
kind: tokenStringBlock,
|
||||||
data: "test\n more\n|||\n foo\n",
|
data: "test\n more\n|||\n foo\n",
|
||||||
@ -162,7 +162,7 @@ var lexTests = []lexTest{
|
|||||||
|||
|
|||
|
||||||
foo
|
foo
|
||||||
|||`,
|
|||`,
|
||||||
tokens{
|
Tokens{
|
||||||
{
|
{
|
||||||
kind: tokenStringBlock,
|
kind: tokenStringBlock,
|
||||||
data: "test\n more\n|||\n foo\n",
|
data: "test\n more\n|||\n foo\n",
|
||||||
@ -183,7 +183,7 @@ var lexTests = []lexTest{
|
|||||||
|||
|
|||
|
||||||
foo
|
foo
|
||||||
|||`,
|
|||`,
|
||||||
tokens{
|
Tokens{
|
||||||
{
|
{
|
||||||
kind: tokenStringBlock,
|
kind: tokenStringBlock,
|
||||||
data: "\ntest\n\n\n more\n|||\n foo\n",
|
data: "\ntest\n\n\n more\n|||\n foo\n",
|
||||||
@ -199,14 +199,14 @@ var lexTests = []lexTest{
|
|||||||
test
|
test
|
||||||
foo
|
foo
|
||||||
|||`,
|
|||`,
|
||||||
tokens{},
|
Tokens{},
|
||||||
"block string bad indent:1:1 Text block not terminated with |||",
|
"block string bad indent:1:1 Text block not terminated with |||",
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"block string eof",
|
"block string eof",
|
||||||
`|||
|
`|||
|
||||||
test`,
|
test`,
|
||||||
tokens{},
|
Tokens{},
|
||||||
"block string eof:1:1 Unexpected EOF",
|
"block string eof:1:1 Unexpected EOF",
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
@ -214,7 +214,7 @@ var lexTests = []lexTest{
|
|||||||
`|||
|
`|||
|
||||||
test
|
test
|
||||||
`,
|
`,
|
||||||
tokens{},
|
Tokens{},
|
||||||
"block string not term:1:1 Text block not terminated with |||",
|
"block string not term:1:1 Text block not terminated with |||",
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
@ -222,35 +222,35 @@ var lexTests = []lexTest{
|
|||||||
`|||
|
`|||
|
||||||
test
|
test
|
||||||
|||`,
|
|||`,
|
||||||
tokens{},
|
Tokens{},
|
||||||
"block string no ws:1:1 Text block's first line must start with whitespace",
|
"block string no ws:1:1 Text block's first line must start with whitespace",
|
||||||
},
|
},
|
||||||
|
|
||||||
{"verbatim_string1", `@""`, tokens{{kind: tokenVerbatimStringDouble, data: ""}}, ""},
|
{"verbatim_string1", `@""`, Tokens{{kind: tokenVerbatimStringDouble, data: ""}}, ""},
|
||||||
{"verbatim_string2", `@''`, tokens{{kind: tokenVerbatimStringSingle, data: ""}}, ""},
|
{"verbatim_string2", `@''`, Tokens{{kind: tokenVerbatimStringSingle, data: ""}}, ""},
|
||||||
{"verbatim_string3", `@""""`, tokens{{kind: tokenVerbatimStringDouble, data: `"`}}, ""},
|
{"verbatim_string3", `@""""`, Tokens{{kind: tokenVerbatimStringDouble, data: `"`}}, ""},
|
||||||
{"verbatim_string4", `@''''`, tokens{{kind: tokenVerbatimStringSingle, data: "'"}}, ""},
|
{"verbatim_string4", `@''''`, Tokens{{kind: tokenVerbatimStringSingle, data: "'"}}, ""},
|
||||||
{"verbatim_string5", `@"\n"`, tokens{{kind: tokenVerbatimStringDouble, data: "\\n"}}, ""},
|
{"verbatim_string5", `@"\n"`, Tokens{{kind: tokenVerbatimStringDouble, data: "\\n"}}, ""},
|
||||||
{"verbatim_string6", `@"''"`, tokens{{kind: tokenVerbatimStringDouble, data: "''"}}, ""},
|
{"verbatim_string6", `@"''"`, Tokens{{kind: tokenVerbatimStringDouble, data: "''"}}, ""},
|
||||||
|
|
||||||
{"verbatim_string_unterminated", `@"blah blah`, tokens{}, "verbatim_string_unterminated:1:1 Unterminated String"},
|
{"verbatim_string_unterminated", `@"blah blah`, Tokens{}, "verbatim_string_unterminated:1:1 Unterminated String"},
|
||||||
{"verbatim_string_junk", `@blah blah`, tokens{}, "verbatim_string_junk:1:1 Couldn't lex verbatim string, junk after '@': 98"},
|
{"verbatim_string_junk", `@blah blah`, Tokens{}, "verbatim_string_junk:1:1 Couldn't lex verbatim string, junk after '@': 98"},
|
||||||
|
|
||||||
{"op *", "*", tokens{{kind: tokenOperator, data: "*"}}, ""},
|
{"op *", "*", Tokens{{kind: tokenOperator, data: "*"}}, ""},
|
||||||
{"op /", "/", tokens{{kind: tokenOperator, data: "/"}}, ""},
|
{"op /", "/", Tokens{{kind: tokenOperator, data: "/"}}, ""},
|
||||||
{"op %", "%", tokens{{kind: tokenOperator, data: "%"}}, ""},
|
{"op %", "%", Tokens{{kind: tokenOperator, data: "%"}}, ""},
|
||||||
{"op &", "&", tokens{{kind: tokenOperator, data: "&"}}, ""},
|
{"op &", "&", Tokens{{kind: tokenOperator, data: "&"}}, ""},
|
||||||
{"op |", "|", tokens{{kind: tokenOperator, data: "|"}}, ""},
|
{"op |", "|", Tokens{{kind: tokenOperator, data: "|"}}, ""},
|
||||||
{"op ^", "^", tokens{{kind: tokenOperator, data: "^"}}, ""},
|
{"op ^", "^", Tokens{{kind: tokenOperator, data: "^"}}, ""},
|
||||||
{"op =", "=", tokens{{kind: tokenOperator, data: "="}}, ""},
|
{"op =", "=", Tokens{{kind: tokenOperator, data: "="}}, ""},
|
||||||
{"op <", "<", tokens{{kind: tokenOperator, data: "<"}}, ""},
|
{"op <", "<", Tokens{{kind: tokenOperator, data: "<"}}, ""},
|
||||||
{"op >", ">", tokens{{kind: tokenOperator, data: ">"}}, ""},
|
{"op >", ">", Tokens{{kind: tokenOperator, data: ">"}}, ""},
|
||||||
{"op >==|", ">==|", tokens{{kind: tokenOperator, data: ">==|"}}, ""},
|
{"op >==|", ">==|", Tokens{{kind: tokenOperator, data: ">==|"}}, ""},
|
||||||
|
|
||||||
{"junk", "💩", tokens{}, "junk:1:1 Could not lex the character '\\U0001f4a9'"},
|
{"junk", "💩", Tokens{}, "junk:1:1 Could not lex the character '\\U0001f4a9'"},
|
||||||
}
|
}
|
||||||
|
|
||||||
func tokensEqual(ts1, ts2 tokens) bool {
|
func tokensEqual(ts1, ts2 Tokens) bool {
|
||||||
if len(ts1) != len(ts2) {
|
if len(ts1) != len(ts2) {
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
@ -275,7 +275,7 @@ func tokensEqual(ts1, ts2 tokens) bool {
|
|||||||
func TestLex(t *testing.T) {
|
func TestLex(t *testing.T) {
|
||||||
for _, test := range lexTests {
|
for _, test := range lexTests {
|
||||||
// Copy the test tokens and append an EOF token
|
// Copy the test tokens and append an EOF token
|
||||||
testTokens := append(tokens(nil), test.tokens...)
|
testTokens := append(Tokens(nil), test.tokens...)
|
||||||
testTokens = append(testTokens, tEOF)
|
testTokens = append(testTokens, tEOF)
|
||||||
tokens, err := Lex(test.name, test.input)
|
tokens, err := Lex(test.name, test.input)
|
||||||
var errString string
|
var errString string
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
// Generated by: main
|
// Generated by: main
|
||||||
// TypeWriter: set
|
// TypeWriter: set
|
||||||
// Directive: +gen on literalField
|
// Directive: +gen on LiteralField
|
||||||
|
|
||||||
package parser
|
package parser
|
||||||
|
|
||||||
@ -8,12 +8,12 @@ package parser
|
|||||||
// The MIT License (MIT)
|
// The MIT License (MIT)
|
||||||
// Copyright (c) 2013 Ralph Caraveo (deckarep@gmail.com)
|
// Copyright (c) 2013 Ralph Caraveo (deckarep@gmail.com)
|
||||||
|
|
||||||
// literalFieldSet is the primary type that represents a set
|
// LiteralFieldSet is the primary type that represents a set
|
||||||
type literalFieldSet map[LiteralField]struct{}
|
type LiteralFieldSet map[LiteralField]struct{}
|
||||||
|
|
||||||
// NewliteralFieldSet creates and returns a reference to an empty set.
|
// NewLiteralFieldSet creates and returns a reference to an empty set.
|
||||||
func NewliteralFieldSet(a ...LiteralField) literalFieldSet {
|
func NewLiteralFieldSet(a ...LiteralField) LiteralFieldSet {
|
||||||
s := make(literalFieldSet)
|
s := make(LiteralFieldSet)
|
||||||
for _, i := range a {
|
for _, i := range a {
|
||||||
s.Add(i)
|
s.Add(i)
|
||||||
}
|
}
|
||||||
@ -21,7 +21,7 @@ func NewliteralFieldSet(a ...LiteralField) literalFieldSet {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// ToSlice returns the elements of the current set as a slice
|
// ToSlice returns the elements of the current set as a slice
|
||||||
func (set literalFieldSet) ToSlice() []LiteralField {
|
func (set LiteralFieldSet) ToSlice() []LiteralField {
|
||||||
var s []LiteralField
|
var s []LiteralField
|
||||||
for v := range set {
|
for v := range set {
|
||||||
s = append(s, v)
|
s = append(s, v)
|
||||||
@ -30,20 +30,20 @@ func (set literalFieldSet) ToSlice() []LiteralField {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Add adds an item to the current set if it doesn't already exist in the set.
|
// Add adds an item to the current set if it doesn't already exist in the set.
|
||||||
func (set literalFieldSet) Add(i LiteralField) bool {
|
func (set LiteralFieldSet) Add(i LiteralField) bool {
|
||||||
_, found := set[i]
|
_, found := set[i]
|
||||||
set[i] = struct{}{}
|
set[i] = struct{}{}
|
||||||
return !found //False if it existed already
|
return !found //False if it existed already
|
||||||
}
|
}
|
||||||
|
|
||||||
// Contains determines if a given item is already in the set.
|
// Contains determines if a given item is already in the set.
|
||||||
func (set literalFieldSet) Contains(i LiteralField) bool {
|
func (set LiteralFieldSet) Contains(i LiteralField) bool {
|
||||||
_, found := set[i]
|
_, found := set[i]
|
||||||
return found
|
return found
|
||||||
}
|
}
|
||||||
|
|
||||||
// ContainsAll determines if the given items are all in the set
|
// ContainsAll determines if the given items are all in the set
|
||||||
func (set literalFieldSet) ContainsAll(i ...LiteralField) bool {
|
func (set LiteralFieldSet) ContainsAll(i ...LiteralField) bool {
|
||||||
for _, v := range i {
|
for _, v := range i {
|
||||||
if !set.Contains(v) {
|
if !set.Contains(v) {
|
||||||
return false
|
return false
|
||||||
@ -53,7 +53,7 @@ func (set literalFieldSet) ContainsAll(i ...LiteralField) bool {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// IsSubset determines if every item in the other set is in this set.
|
// IsSubset determines if every item in the other set is in this set.
|
||||||
func (set literalFieldSet) IsSubset(other literalFieldSet) bool {
|
func (set LiteralFieldSet) IsSubset(other LiteralFieldSet) bool {
|
||||||
for elem := range set {
|
for elem := range set {
|
||||||
if !other.Contains(elem) {
|
if !other.Contains(elem) {
|
||||||
return false
|
return false
|
||||||
@ -63,13 +63,13 @@ func (set literalFieldSet) IsSubset(other literalFieldSet) bool {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// IsSuperset determines if every item of this set is in the other set.
|
// IsSuperset determines if every item of this set is in the other set.
|
||||||
func (set literalFieldSet) IsSuperset(other literalFieldSet) bool {
|
func (set LiteralFieldSet) IsSuperset(other LiteralFieldSet) bool {
|
||||||
return other.IsSubset(set)
|
return other.IsSubset(set)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Union returns a new set with all items in both sets.
|
// Union returns a new set with all items in both sets.
|
||||||
func (set literalFieldSet) Union(other literalFieldSet) literalFieldSet {
|
func (set LiteralFieldSet) Union(other LiteralFieldSet) LiteralFieldSet {
|
||||||
unionedSet := NewliteralFieldSet()
|
unionedSet := NewLiteralFieldSet()
|
||||||
|
|
||||||
for elem := range set {
|
for elem := range set {
|
||||||
unionedSet.Add(elem)
|
unionedSet.Add(elem)
|
||||||
@ -81,8 +81,8 @@ func (set literalFieldSet) Union(other literalFieldSet) literalFieldSet {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Intersect returns a new set with items that exist only in both sets.
|
// Intersect returns a new set with items that exist only in both sets.
|
||||||
func (set literalFieldSet) Intersect(other literalFieldSet) literalFieldSet {
|
func (set LiteralFieldSet) Intersect(other LiteralFieldSet) LiteralFieldSet {
|
||||||
intersection := NewliteralFieldSet()
|
intersection := NewLiteralFieldSet()
|
||||||
// loop over smaller set
|
// loop over smaller set
|
||||||
if set.Cardinality() < other.Cardinality() {
|
if set.Cardinality() < other.Cardinality() {
|
||||||
for elem := range set {
|
for elem := range set {
|
||||||
@ -101,8 +101,8 @@ func (set literalFieldSet) Intersect(other literalFieldSet) literalFieldSet {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Difference returns a new set with items in the current set but not in the other set
|
// Difference returns a new set with items in the current set but not in the other set
|
||||||
func (set literalFieldSet) Difference(other literalFieldSet) literalFieldSet {
|
func (set LiteralFieldSet) Difference(other LiteralFieldSet) LiteralFieldSet {
|
||||||
differencedSet := NewliteralFieldSet()
|
differencedSet := NewLiteralFieldSet()
|
||||||
for elem := range set {
|
for elem := range set {
|
||||||
if !other.Contains(elem) {
|
if !other.Contains(elem) {
|
||||||
differencedSet.Add(elem)
|
differencedSet.Add(elem)
|
||||||
@ -112,29 +112,29 @@ func (set literalFieldSet) Difference(other literalFieldSet) literalFieldSet {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// SymmetricDifference returns a new set with items in the current set or the other set but not in both.
|
// SymmetricDifference returns a new set with items in the current set or the other set but not in both.
|
||||||
func (set literalFieldSet) SymmetricDifference(other literalFieldSet) literalFieldSet {
|
func (set LiteralFieldSet) SymmetricDifference(other LiteralFieldSet) LiteralFieldSet {
|
||||||
aDiff := set.Difference(other)
|
aDiff := set.Difference(other)
|
||||||
bDiff := other.Difference(set)
|
bDiff := other.Difference(set)
|
||||||
return aDiff.Union(bDiff)
|
return aDiff.Union(bDiff)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Clear clears the entire set to be the empty set.
|
// Clear clears the entire set to be the empty set.
|
||||||
func (set *literalFieldSet) Clear() {
|
func (set *LiteralFieldSet) Clear() {
|
||||||
*set = make(literalFieldSet)
|
*set = make(LiteralFieldSet)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Remove allows the removal of a single item in the set.
|
// Remove allows the removal of a single item in the set.
|
||||||
func (set literalFieldSet) Remove(i LiteralField) {
|
func (set LiteralFieldSet) Remove(i LiteralField) {
|
||||||
delete(set, i)
|
delete(set, i)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Cardinality returns how many items are currently in the set.
|
// Cardinality returns how many items are currently in the set.
|
||||||
func (set literalFieldSet) Cardinality() int {
|
func (set LiteralFieldSet) Cardinality() int {
|
||||||
return len(set)
|
return len(set)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Iter returns a channel of type literalField that you can range over.
|
// Iter returns a channel of type LiteralField that you can range over.
|
||||||
func (set literalFieldSet) Iter() <-chan LiteralField {
|
func (set LiteralFieldSet) Iter() <-chan LiteralField {
|
||||||
ch := make(chan LiteralField)
|
ch := make(chan LiteralField)
|
||||||
go func() {
|
go func() {
|
||||||
for elem := range set {
|
for elem := range set {
|
||||||
@ -149,7 +149,7 @@ func (set literalFieldSet) Iter() <-chan LiteralField {
|
|||||||
// Equal determines if two sets are equal to each other.
|
// Equal determines if two sets are equal to each other.
|
||||||
// If they both are the same size and have the same items they are considered equal.
|
// If they both are the same size and have the same items they are considered equal.
|
||||||
// Order of items is not relevent for sets to be equal.
|
// Order of items is not relevent for sets to be equal.
|
||||||
func (set literalFieldSet) Equal(other literalFieldSet) bool {
|
func (set LiteralFieldSet) Equal(other LiteralFieldSet) bool {
|
||||||
if set.Cardinality() != other.Cardinality() {
|
if set.Cardinality() != other.Cardinality() {
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
@ -163,8 +163,8 @@ func (set literalFieldSet) Equal(other literalFieldSet) bool {
|
|||||||
|
|
||||||
// Clone returns a clone of the set.
|
// Clone returns a clone of the set.
|
||||||
// Does NOT clone the underlying elements.
|
// Does NOT clone the underlying elements.
|
||||||
func (set literalFieldSet) Clone() literalFieldSet {
|
func (set LiteralFieldSet) Clone() LiteralFieldSet {
|
||||||
clonedSet := NewliteralFieldSet()
|
clonedSet := NewLiteralFieldSet()
|
||||||
for elem := range set {
|
for elem := range set {
|
||||||
clonedSet.Add(elem)
|
clonedSet.Add(elem)
|
||||||
}
|
}
|
||||||
|
@ -71,11 +71,11 @@ func locFromTokenAST(begin *token, end ast.Node) ast.LocationRange {
|
|||||||
// ---------------------------------------------------------------------------
|
// ---------------------------------------------------------------------------
|
||||||
|
|
||||||
type parser struct {
|
type parser struct {
|
||||||
t tokens
|
t Tokens
|
||||||
currT int
|
currT int
|
||||||
}
|
}
|
||||||
|
|
||||||
func makeParser(t tokens) *parser {
|
func makeParser(t Tokens) *parser {
|
||||||
return &parser{
|
return &parser{
|
||||||
t: t,
|
t: t,
|
||||||
}
|
}
|
||||||
@ -298,13 +298,14 @@ func (p *parser) parseObjectAssignmentOp() (plusSugar bool, hide ast.ObjectField
|
|||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// A LiteralField is a field of an object or object comprehension.
|
||||||
// +gen set
|
// +gen set
|
||||||
type LiteralField string
|
type LiteralField string
|
||||||
|
|
||||||
// Parse object or object comprehension without leading brace
|
// Parse object or object comprehension without leading brace
|
||||||
func (p *parser) parseObjectRemainder(tok *token) (ast.Node, *token, error) {
|
func (p *parser) parseObjectRemainder(tok *token) (ast.Node, *token, error) {
|
||||||
var fields ast.ObjectFields
|
var fields ast.ObjectFields
|
||||||
literalFields := make(literalFieldSet)
|
literalFields := make(LiteralFieldSet)
|
||||||
binds := make(ast.IdentifierSet)
|
binds := make(ast.IdentifierSet)
|
||||||
|
|
||||||
gotComma := false
|
gotComma := false
|
||||||
@ -1173,7 +1174,8 @@ func (p *parser) parse(prec precedence) (ast.Node, error) {
|
|||||||
|
|
||||||
// ---------------------------------------------------------------------------
|
// ---------------------------------------------------------------------------
|
||||||
|
|
||||||
func Parse(t tokens) (ast.Node, error) {
|
// Parse parses a slice of tokens into a parse tree.
|
||||||
|
func Parse(t Tokens) (ast.Node, error) {
|
||||||
p := makeParser(t)
|
p := makeParser(t)
|
||||||
expr, err := p.parse(maxPrecedence)
|
expr, err := p.parse(maxPrecedence)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
@ -32,14 +32,17 @@ type StaticError struct {
|
|||||||
Msg string
|
Msg string
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// MakeStaticErrorMsg returns a StaticError with a message.
|
||||||
func MakeStaticErrorMsg(msg string) StaticError {
|
func MakeStaticErrorMsg(msg string) StaticError {
|
||||||
return StaticError{Msg: msg}
|
return StaticError{Msg: msg}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// MakeStaticError returns a StaticError with a message and a LocationRange.
|
||||||
func MakeStaticError(msg string, lr ast.LocationRange) StaticError {
|
func MakeStaticError(msg string, lr ast.LocationRange) StaticError {
|
||||||
return StaticError{Msg: msg, Loc: lr}
|
return StaticError{Msg: msg, Loc: lr}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Error returns the string representation of a StaticError.
|
||||||
func (err StaticError) Error() string {
|
func (err StaticError) Error() string {
|
||||||
loc := ""
|
loc := ""
|
||||||
if err.Loc.IsSet() {
|
if err.Loc.IsSet() {
|
||||||
|
@ -55,6 +55,8 @@ func traceElementToTraceFrame(trace *TraceElement) TraceFrame {
|
|||||||
return tf
|
return tf
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// TraceElement represents tracing information, including a location range and a
|
||||||
|
// surrounding context.
|
||||||
// TODO(sbarzowski) better name
|
// TODO(sbarzowski) better name
|
||||||
type TraceElement struct {
|
type TraceElement struct {
|
||||||
loc *ast.LocationRange
|
loc *ast.LocationRange
|
||||||
|
2
testdata/native_error.golden
vendored
2
testdata/native_error.golden
vendored
@ -1,4 +1,4 @@
|
|||||||
RUNTIME ERROR: Native function error
|
RUNTIME ERROR: native function error
|
||||||
-------------------------------------------------
|
-------------------------------------------------
|
||||||
testdata/native_error:1:1-28 $
|
testdata/native_error:1:1-28 $
|
||||||
|
|
||||||
|
2
testdata/nonexistent_import.golden
vendored
2
testdata/nonexistent_import.golden
vendored
@ -1,4 +1,4 @@
|
|||||||
RUNTIME ERROR: Couldn't open import "no chance a file with this name exists": No match locally or in the Jsonnet library paths.
|
RUNTIME ERROR: couldn't open import "no chance a file with this name exists": no match locally or in the Jsonnet library paths
|
||||||
-------------------------------------------------
|
-------------------------------------------------
|
||||||
testdata/nonexistent_import:1:1-51 $
|
testdata/nonexistent_import:1:1-51 $
|
||||||
|
|
||||||
|
2
testdata/nonexistent_import_crazy.golden
vendored
2
testdata/nonexistent_import_crazy.golden
vendored
@ -1,4 +1,4 @@
|
|||||||
RUNTIME ERROR: Couldn't open import "ąęółńśćźż \" ' \n\n\t\t": No match locally or in the Jsonnet library paths.
|
RUNTIME ERROR: couldn't open import "ąęółńśćźż \" ' \n\n\t\t": no match locally or in the Jsonnet library paths
|
||||||
-------------------------------------------------
|
-------------------------------------------------
|
||||||
testdata/nonexistent_import_crazy:1:1-46 $
|
testdata/nonexistent_import_crazy:1:1-46 $
|
||||||
|
|
||||||
|
@ -181,6 +181,7 @@ func (f *bindingsUnboundField) bindToObject(sb selfBinding, origBindings binding
|
|||||||
return f.inner.bindToObject(sb, upValues, fieldName)
|
return f.inner.bindToObject(sb, upValues, fieldName)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// PlusSuperUnboundField represents a `field+: ...` that hasn't been bound to an object.
|
||||||
type PlusSuperUnboundField struct {
|
type PlusSuperUnboundField struct {
|
||||||
inner unboundField
|
inner unboundField
|
||||||
}
|
}
|
||||||
@ -287,12 +288,14 @@ func makeClosure(env environment, function *ast.Function) *closure {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// NativeFunction represents a function implemented in Go.
|
||||||
type NativeFunction struct {
|
type NativeFunction struct {
|
||||||
Func func([]interface{}) (interface{}, error)
|
Func func([]interface{}) (interface{}, error)
|
||||||
Params ast.Identifiers
|
Params ast.Identifiers
|
||||||
Name string
|
Name string
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// EvalCall evaluates a call to a NativeFunction and returns the result.
|
||||||
func (native *NativeFunction) EvalCall(arguments callArguments, e *evaluator) (value, error) {
|
func (native *NativeFunction) EvalCall(arguments callArguments, e *evaluator) (value, error) {
|
||||||
flatArgs := flattenArgs(arguments, native.Parameters())
|
flatArgs := flattenArgs(arguments, native.Parameters())
|
||||||
nativeArgs := make([]interface{}, 0, len(flatArgs))
|
nativeArgs := make([]interface{}, 0, len(flatArgs))
|
||||||
@ -314,6 +317,7 @@ func (native *NativeFunction) EvalCall(arguments callArguments, e *evaluator) (v
|
|||||||
return jsonToValue(e, resultJSON)
|
return jsonToValue(e, resultJSON)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Parameters returns a NativeFunction's parameters.
|
||||||
func (native *NativeFunction) Parameters() Parameters {
|
func (native *NativeFunction) Parameters() Parameters {
|
||||||
return Parameters{required: native.Params}
|
return Parameters{required: native.Params}
|
||||||
}
|
}
|
||||||
|
11
value.go
11
value.go
@ -73,6 +73,8 @@ func (v *valueBase) aValue() {}
|
|||||||
// Primitive values
|
// Primitive values
|
||||||
// -------------------------------------
|
// -------------------------------------
|
||||||
|
|
||||||
|
// valueString represents a string value, internally using a []rune for quick
|
||||||
|
// indexing.
|
||||||
type valueString struct {
|
type valueString struct {
|
||||||
valueBase
|
valueBase
|
||||||
// We use rune slices instead of strings for quick indexing
|
// We use rune slices instead of strings for quick indexing
|
||||||
@ -319,6 +321,8 @@ func (f *valueFunction) getType() *valueType {
|
|||||||
return functionType
|
return functionType
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Parameters represents required position and optional named parameters for a
|
||||||
|
// function definition.
|
||||||
type Parameters struct {
|
type Parameters struct {
|
||||||
required ast.Identifiers
|
required ast.Identifiers
|
||||||
optional []namedParameter
|
optional []namedParameter
|
||||||
@ -399,8 +403,10 @@ func (sb selfBinding) super() selfBinding {
|
|||||||
return selfBinding{self: sb.self, superDepth: sb.superDepth + 1}
|
return selfBinding{self: sb.self, superDepth: sb.superDepth + 1}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Hidden represents wether to include hidden fields in a lookup.
|
||||||
type Hidden int
|
type Hidden int
|
||||||
|
|
||||||
|
// With/without hidden fields
|
||||||
const (
|
const (
|
||||||
withHidden Hidden = iota
|
withHidden Hidden = iota
|
||||||
withoutHidden
|
withoutHidden
|
||||||
@ -409,14 +415,13 @@ const (
|
|||||||
func withHiddenFromBool(with bool) Hidden {
|
func withHiddenFromBool(with bool) Hidden {
|
||||||
if with {
|
if with {
|
||||||
return withHidden
|
return withHidden
|
||||||
} else {
|
|
||||||
return withoutHidden
|
|
||||||
}
|
}
|
||||||
|
return withoutHidden
|
||||||
}
|
}
|
||||||
|
|
||||||
// Hack - we need to distinguish not-checked-yet and no error situations
|
// Hack - we need to distinguish not-checked-yet and no error situations
|
||||||
// so we have a special value for no error and nil means that we don't know yet.
|
// so we have a special value for no error and nil means that we don't know yet.
|
||||||
var errNoErrorInObjectInvariants = errors.New("No error - assertions passed")
|
var errNoErrorInObjectInvariants = errors.New("no error - assertions passed")
|
||||||
|
|
||||||
type valueObjectBase struct {
|
type valueObjectBase struct {
|
||||||
valueBase
|
valueBase
|
||||||
|
2
vm.go
2
vm.go
@ -184,10 +184,12 @@ func snippetToAST(filename string, snippet string) (ast.Node, error) {
|
|||||||
return node, nil
|
return node, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// SnippetToAST parses a snippet and returns the resulting AST.
|
||||||
func SnippetToAST(filename string, snippet string) (ast.Node, error) {
|
func SnippetToAST(filename string, snippet string) (ast.Node, error) {
|
||||||
return snippetToAST(filename, snippet)
|
return snippetToAST(filename, snippet)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Version returns the Jsonnet version number.
|
||||||
func Version() string {
|
func Version() string {
|
||||||
return "v0.9.5"
|
return "v0.9.5"
|
||||||
}
|
}
|
||||||
|
Loading…
x
Reference in New Issue
Block a user