diff --git a/ast/ast.go b/ast/ast.go index fb326b8..57f0dd6 100644 --- a/ast/ast.go +++ b/ast/ast.go @@ -23,6 +23,8 @@ import ( // Identifier represents a variable / parameter / field name. //+gen set type Identifier string + +// Identifiers represents an Identifier slice. type Identifiers []Identifier // TODO(jbeda) implement interning of identifiers if necessary. The C++ @@ -30,8 +32,10 @@ type Identifiers []Identifier // --------------------------------------------------------------------------- +// Context represents the surrounding context of a node (e.g. a function it's in) type Context *string +// Node represents a node in the AST. type Node interface { Context() Context Loc() *LocationRange @@ -39,16 +43,21 @@ type Node interface { SetFreeVariables(Identifiers) SetContext(Context) } + +// Nodes represents a Node slice. type Nodes []Node // --------------------------------------------------------------------------- +// NodeBase holds fields common to all node types. type NodeBase struct { loc LocationRange context Context freeVariables Identifiers } +// NewNodeBase creates a new NodeBase from initial LocationRange and +// Identifiers. func NewNodeBase(loc LocationRange, freeVariables Identifiers) NodeBase { return NodeBase{ loc: loc, @@ -56,6 +65,7 @@ func NewNodeBase(loc LocationRange, freeVariables Identifiers) NodeBase { } } +// NewNodeBaseLoc creates a new NodeBase from an initial LocationRange. func NewNodeBaseLoc(loc LocationRange) NodeBase { return NodeBase{ loc: loc, @@ -63,32 +73,39 @@ func NewNodeBaseLoc(loc LocationRange) NodeBase { } } +// Loc returns a NodeBase's loc. func (n *NodeBase) Loc() *LocationRange { return &n.loc } +// FreeVariables returns a NodeBase's freeVariables. func (n *NodeBase) FreeVariables() Identifiers { return n.freeVariables } +// SetFreeVariables sets a NodeBase's freeVariables. func (n *NodeBase) SetFreeVariables(idents Identifiers) { n.freeVariables = idents } +// Context returns a NodeBase's context. func (n *NodeBase) Context() Context { return n.context } +// SetContext sets a NodeBase's context. func (n *NodeBase) SetContext(context Context) { n.context = context } // --------------------------------------------------------------------------- +// IfSpec represents an if-specification in a comprehension. type IfSpec struct { Expr Node } +// ForSpec represents a for-specification in a comprehension. // Example: // expr for x in arr1 for y in arr2 for z in arr3 // The order is the same as in python, i.e. the leftmost is the outermost. @@ -125,11 +142,14 @@ type Apply struct { TailStrict bool } +// NamedArgument represents a named argument to function call x=1. type NamedArgument struct { Name Identifier Arg Node } +// Arguments represents positional and named arguments to a function call +// f(x, y, z=1). type Arguments struct { Positional Nodes Named []NamedArgument @@ -179,8 +199,10 @@ type Assert struct { // --------------------------------------------------------------------------- +// BinaryOp represents a binary operator. type BinaryOp int +// Binary operators const ( BopMult BinaryOp = iota BopDiv @@ -237,6 +259,7 @@ var bopStrings = []string{ BopOr: "||", } +// BopMap is a map from binary operator token strings to BinaryOp values. var BopMap = map[string]BinaryOp{ "*": BopMult, "/": BopDiv, @@ -316,11 +339,14 @@ type Function struct { Body Node } +// NamedParameter represents an optional named parameter of a function. type NamedParameter struct { Name Identifier DefaultArg Node } +// Parameters represents the required positional parameters and optional named +// parameters to a function definition. type Parameters struct { Required Identifiers Optional []NamedParameter @@ -355,6 +381,7 @@ type Index struct { Id *Identifier } +// Slice represents an array slice a[begin:end:step]. type Slice struct { NodeBase Target Node @@ -373,6 +400,8 @@ type LocalBind struct { Body Node Fun *Function } + +// LocalBinds represents a LocalBind slice. type LocalBinds []LocalBind // Local represents local x = e; e. After desugaring, functionSugar is false. @@ -406,8 +435,10 @@ type LiteralNumber struct { // --------------------------------------------------------------------------- +// LiteralStringKind represents the kind of a literal string. type LiteralStringKind int +// Literal string kinds const ( StringSingle LiteralStringKind = iota StringDouble @@ -416,6 +447,8 @@ const ( VerbatimStringSingle ) +// FullyEscaped returns true iff the literal string kind may contain escape +// sequences that require unescaping. func (k LiteralStringKind) FullyEscaped() bool { switch k { case StringSingle, StringDouble: @@ -436,8 +469,10 @@ type LiteralString struct { // --------------------------------------------------------------------------- +// ObjectFieldKind represents the kind of an object field. type ObjectFieldKind int +// Kinds of object fields const ( ObjectAssert ObjectFieldKind = iota // assert expr2 [: expr3] where expr3 can be nil ObjectFieldID // id:[:[:]] expr2 @@ -449,14 +484,17 @@ const ( ObjectNullStr // null expr1 ) +// ObjectFieldHide represents the visibility of an object field. type ObjectFieldHide int +// Object field visibilities const ( ObjectFieldHidden ObjectFieldHide = iota // f:: e ObjectFieldInherit // f: e ObjectFieldVisible // f::: e ) +// ObjectField represents a field of an object or object comprehension. // TODO(sbarzowski) consider having separate types for various kinds type ObjectField struct { Kind ObjectFieldKind @@ -471,10 +509,12 @@ type ObjectField struct { Expr2, Expr3 Node // In scope of the object (can see self). } +// ObjectFieldLocalNoMethod creates a non-method local object field. func ObjectFieldLocalNoMethod(id *Identifier, body Node) ObjectField { return ObjectField{ObjectLocal, ObjectFieldVisible, false, false, nil, nil, id, nil, false, body, nil} } +// ObjectFields represents an ObjectField slice. type ObjectFields []ObjectField // Object represents object constructors { f: e ... }. @@ -489,12 +529,15 @@ type Object struct { // --------------------------------------------------------------------------- +// DesugaredObjectField represents a desugared object field. type DesugaredObjectField struct { Hide ObjectFieldHide Name Node Body Node PlusSuper bool } + +// DesugaredObjectFields represents a DesugaredObjectField slice. type DesugaredObjectFields []DesugaredObjectField // DesugaredObject represents object constructors { f: e ... } after @@ -524,7 +567,7 @@ type ObjectComp struct { // ( e ) type Parens struct { NodeBase - Inner Node + Inner Node } // --------------------------------------------------------------------------- @@ -544,7 +587,7 @@ type SuperIndex struct { Id *Identifier } -// Represents the e in super construct. +// InSuper represents the e in super construct. type InSuper struct { NodeBase Index Node @@ -552,8 +595,10 @@ type InSuper struct { // --------------------------------------------------------------------------- +// UnaryOp represents a unary operator. type UnaryOp int +// Unary operators const ( UopNot UnaryOp = iota UopBitwiseNot @@ -568,6 +613,7 @@ var uopStrings = []string{ UopMinus: "-", } +// UopMap is a map from unary operator token strings to UnaryOp values. var UopMap = map[string]UnaryOp{ "!": UopNot, "~": UopBitwiseNot, diff --git a/ast/location.go b/ast/location.go index 84eff08..85c7628 100644 --- a/ast/location.go +++ b/ast/location.go @@ -21,6 +21,7 @@ import ( "fmt" ) +// Source represents a source file. type Source struct { lines []string } @@ -62,6 +63,7 @@ type LocationRange struct { file *Source } +// LocationRangeBetween returns a LocationRange containing both a and b. func LocationRangeBetween(a, b *LocationRange) LocationRange { if a.file != b.file { panic("Cannot create a LocationRange between different files") @@ -93,22 +95,30 @@ func (lr *LocationRange) String() string { return fmt.Sprintf("%s(%v)-(%v)", filePrefix, lr.Begin.String(), lr.End.String()) } -func (l *LocationRange) WithCode() bool { - return l.Begin.Line != 0 +// WithCode returns true iff the LocationRange is linked to code. +// TODO: This is identical to lr.IsSet(). Is it required at all? +func (lr *LocationRange) WithCode() bool { + return lr.Begin.Line != 0 } -// This is useful for special locations, e.g. manifestation entry point. +// MakeLocationRangeMessage creates a pseudo-LocationRange with a message but no +// location information. This is useful for special locations, e.g. +// manifestation entry point. func MakeLocationRangeMessage(msg string) LocationRange { return LocationRange{FileName: msg} } +// MakeLocationRange creates a LocationRange. func MakeLocationRange(fn string, fc *Source, begin Location, end Location) LocationRange { return LocationRange{FileName: fn, file: fc, Begin: begin, End: end} } +// SourceProvider represents a source provider. +// TODO: Need an explanation of why this exists. type SourceProvider struct { } +// GetSnippet returns a code snippet corresponding to loc. func (sp *SourceProvider) GetSnippet(loc LocationRange) string { var result bytes.Buffer if loc.Begin.Line == 0 { @@ -126,6 +136,8 @@ func (sp *SourceProvider) GetSnippet(loc LocationRange) string { return result.String() } +// BuildSource transforms a source file string into a Source struct. +// TODO: This seems like a job for strings.Split() with a final \n touch-up. func BuildSource(s string) *Source { var result []string var lineBuf bytes.Buffer @@ -160,7 +172,7 @@ func trimToLine(loc LocationRange, line int) LocationRange { return loc } -// lineBeginning returns a part of the line directly before LocationRange +// LineBeginning returns the part of a line directly before LocationRange // for example: // local x = foo() // ^^^^^ <- LocationRange loc @@ -176,7 +188,7 @@ func LineBeginning(loc *LocationRange) LocationRange { } } -// lineEnding returns a part of the line directly after LocationRange +// LineEnding returns the part of a line directly after LocationRange // for example: // local x = foo() + test // ^^^^^ <- LocationRange loc diff --git a/builtins.go b/builtins.go index b1af409..6639ec2 100644 --- a/builtins.go +++ b/builtins.go @@ -554,9 +554,8 @@ var builtinExp = liftNumeric(func(f float64) float64 { res := math.Exp(f) if res == 0 && f > 0 { return math.Inf(1) - } else { - return res } + return res }) var builtinMantissa = liftNumeric(func(f float64) float64 { mantissa, _ := math.Frexp(f) @@ -720,13 +719,13 @@ func builtinNative(e *evaluator, namep potentialValue) (value, error) { } -type unaryBuiltin func(*evaluator, potentialValue) (value, error) -type binaryBuiltin func(*evaluator, potentialValue, potentialValue) (value, error) -type ternaryBuiltin func(*evaluator, potentialValue, potentialValue, potentialValue) (value, error) +type unaryBuiltinFunc func(*evaluator, potentialValue) (value, error) +type binaryBuiltinFunc func(*evaluator, potentialValue, potentialValue) (value, error) +type ternaryBuiltinFunc func(*evaluator, potentialValue, potentialValue, potentialValue) (value, error) -type UnaryBuiltin struct { +type unaryBuiltin struct { name ast.Identifier - function unaryBuiltin + function unaryBuiltinFunc parameters ast.Identifiers } @@ -737,22 +736,22 @@ func getBuiltinEvaluator(e *evaluator, name ast.Identifier) *evaluator { return &evaluator{i: e.i, trace: &trace} } -func (b *UnaryBuiltin) EvalCall(args callArguments, e *evaluator) (value, error) { +func (b *unaryBuiltin) EvalCall(args callArguments, e *evaluator) (value, error) { flatArgs := flattenArgs(args, b.Parameters()) return b.function(getBuiltinEvaluator(e, b.name), flatArgs[0]) } -func (b *UnaryBuiltin) Parameters() Parameters { +func (b *unaryBuiltin) Parameters() Parameters { return Parameters{required: b.parameters} } -func (b *UnaryBuiltin) Name() ast.Identifier { +func (b *unaryBuiltin) Name() ast.Identifier { return b.name } -type BinaryBuiltin struct { +type binaryBuiltin struct { name ast.Identifier - function binaryBuiltin + function binaryBuiltinFunc parameters ast.Identifiers } @@ -781,35 +780,35 @@ func flattenArgs(args callArguments, params Parameters) []potentialValue { return flatArgs } -func (b *BinaryBuiltin) EvalCall(args callArguments, e *evaluator) (value, error) { +func (b *binaryBuiltin) EvalCall(args callArguments, e *evaluator) (value, error) { flatArgs := flattenArgs(args, b.Parameters()) return b.function(getBuiltinEvaluator(e, b.name), flatArgs[0], flatArgs[1]) } -func (b *BinaryBuiltin) Parameters() Parameters { +func (b *binaryBuiltin) Parameters() Parameters { return Parameters{required: b.parameters} } -func (b *BinaryBuiltin) Name() ast.Identifier { +func (b *binaryBuiltin) Name() ast.Identifier { return b.name } -type TernaryBuiltin struct { +type ternaryBuiltin struct { name ast.Identifier - function ternaryBuiltin + function ternaryBuiltinFunc parameters ast.Identifiers } -func (b *TernaryBuiltin) EvalCall(args callArguments, e *evaluator) (value, error) { +func (b *ternaryBuiltin) EvalCall(args callArguments, e *evaluator) (value, error) { flatArgs := flattenArgs(args, b.Parameters()) return b.function(getBuiltinEvaluator(e, b.name), flatArgs[0], flatArgs[1], flatArgs[2]) } -func (b *TernaryBuiltin) Parameters() Parameters { +func (b *ternaryBuiltin) Parameters() Parameters { return Parameters{required: b.parameters} } -func (b *TernaryBuiltin) Name() ast.Identifier { +func (b *ternaryBuiltin) Name() ast.Identifier { return b.name } @@ -820,38 +819,38 @@ var desugaredBop = map[ast.BinaryOp]ast.Identifier{ ast.BopIn: "objectHasAll", } -var bopBuiltins = []*BinaryBuiltin{ - ast.BopMult: &BinaryBuiltin{name: "operator*", function: builtinMult, parameters: ast.Identifiers{"x", "y"}}, - ast.BopDiv: &BinaryBuiltin{name: "operator/", function: builtinDiv, parameters: ast.Identifiers{"x", "y"}}, +var bopBuiltins = []*binaryBuiltin{ + ast.BopMult: &binaryBuiltin{name: "operator*", function: builtinMult, parameters: ast.Identifiers{"x", "y"}}, + ast.BopDiv: &binaryBuiltin{name: "operator/", function: builtinDiv, parameters: ast.Identifiers{"x", "y"}}, // ast.BopPercent: , - ast.BopPlus: &BinaryBuiltin{name: "operator+", function: builtinPlus, parameters: ast.Identifiers{"x", "y"}}, - ast.BopMinus: &BinaryBuiltin{name: "operator-", function: builtinMinus, parameters: ast.Identifiers{"x", "y"}}, + ast.BopPlus: &binaryBuiltin{name: "operator+", function: builtinPlus, parameters: ast.Identifiers{"x", "y"}}, + ast.BopMinus: &binaryBuiltin{name: "operator-", function: builtinMinus, parameters: ast.Identifiers{"x", "y"}}, - ast.BopShiftL: &BinaryBuiltin{name: "operator<<", function: builtinShiftL, parameters: ast.Identifiers{"x", "y"}}, - ast.BopShiftR: &BinaryBuiltin{name: "operator>>", function: builtinShiftR, parameters: ast.Identifiers{"x", "y"}}, + ast.BopShiftL: &binaryBuiltin{name: "operator<<", function: builtinShiftL, parameters: ast.Identifiers{"x", "y"}}, + ast.BopShiftR: &binaryBuiltin{name: "operator>>", function: builtinShiftR, parameters: ast.Identifiers{"x", "y"}}, - ast.BopGreater: &BinaryBuiltin{name: "operator>", function: builtinGreater, parameters: ast.Identifiers{"x", "y"}}, - ast.BopGreaterEq: &BinaryBuiltin{name: "operator>=", function: builtinGreaterEq, parameters: ast.Identifiers{"x", "y"}}, - ast.BopLess: &BinaryBuiltin{name: "operator<,", function: builtinLess, parameters: ast.Identifiers{"x", "y"}}, - ast.BopLessEq: &BinaryBuiltin{name: "operator<=", function: builtinLessEq, parameters: ast.Identifiers{"x", "y"}}, + ast.BopGreater: &binaryBuiltin{name: "operator>", function: builtinGreater, parameters: ast.Identifiers{"x", "y"}}, + ast.BopGreaterEq: &binaryBuiltin{name: "operator>=", function: builtinGreaterEq, parameters: ast.Identifiers{"x", "y"}}, + ast.BopLess: &binaryBuiltin{name: "operator<,", function: builtinLess, parameters: ast.Identifiers{"x", "y"}}, + ast.BopLessEq: &binaryBuiltin{name: "operator<=", function: builtinLessEq, parameters: ast.Identifiers{"x", "y"}}, // bopManifestEqual: , // bopManifestUnequal: , - ast.BopBitwiseAnd: &BinaryBuiltin{name: "operator&", function: builtinBitwiseAnd, parameters: ast.Identifiers{"x", "y"}}, - ast.BopBitwiseXor: &BinaryBuiltin{name: "operator^", function: builtinBitwiseXor, parameters: ast.Identifiers{"x", "y"}}, - ast.BopBitwiseOr: &BinaryBuiltin{name: "operator|", function: builtinBitwiseOr, parameters: ast.Identifiers{"x", "y"}}, + ast.BopBitwiseAnd: &binaryBuiltin{name: "operator&", function: builtinBitwiseAnd, parameters: ast.Identifiers{"x", "y"}}, + ast.BopBitwiseXor: &binaryBuiltin{name: "operator^", function: builtinBitwiseXor, parameters: ast.Identifiers{"x", "y"}}, + ast.BopBitwiseOr: &binaryBuiltin{name: "operator|", function: builtinBitwiseOr, parameters: ast.Identifiers{"x", "y"}}, - ast.BopAnd: &BinaryBuiltin{name: "operator&&", function: builtinAnd, parameters: ast.Identifiers{"x", "y"}}, - ast.BopOr: &BinaryBuiltin{name: "operator||", function: builtinOr, parameters: ast.Identifiers{"x", "y"}}, + ast.BopAnd: &binaryBuiltin{name: "operator&&", function: builtinAnd, parameters: ast.Identifiers{"x", "y"}}, + ast.BopOr: &binaryBuiltin{name: "operator||", function: builtinOr, parameters: ast.Identifiers{"x", "y"}}, } -var uopBuiltins = []*UnaryBuiltin{ - ast.UopNot: &UnaryBuiltin{name: "operator!", function: builtinNegation, parameters: ast.Identifiers{"x"}}, - ast.UopBitwiseNot: &UnaryBuiltin{name: "operator~", function: builtinBitNeg, parameters: ast.Identifiers{"x"}}, - ast.UopPlus: &UnaryBuiltin{name: "operator+ (unary)", function: builtinIdentity, parameters: ast.Identifiers{"x"}}, - ast.UopMinus: &UnaryBuiltin{name: "operator- (unary)", function: builtinUnaryMinus, parameters: ast.Identifiers{"x"}}, +var uopBuiltins = []*unaryBuiltin{ + ast.UopNot: &unaryBuiltin{name: "operator!", function: builtinNegation, parameters: ast.Identifiers{"x"}}, + ast.UopBitwiseNot: &unaryBuiltin{name: "operator~", function: builtinBitNeg, parameters: ast.Identifiers{"x"}}, + ast.UopPlus: &unaryBuiltin{name: "operator+ (unary)", function: builtinIdentity, parameters: ast.Identifiers{"x"}}, + ast.UopMinus: &unaryBuiltin{name: "operator- (unary)", function: builtinUnaryMinus, parameters: ast.Identifiers{"x"}}, } type builtin interface { @@ -868,39 +867,39 @@ func buildBuiltinMap(builtins []builtin) map[string]evalCallable { } var funcBuiltins = buildBuiltinMap([]builtin{ - &UnaryBuiltin{name: "extVar", function: builtinExtVar, parameters: ast.Identifiers{"x"}}, - &UnaryBuiltin{name: "length", function: builtinLength, parameters: ast.Identifiers{"x"}}, - &UnaryBuiltin{name: "toString", function: builtinToString, parameters: ast.Identifiers{"a"}}, - &BinaryBuiltin{name: "makeArray", function: builtinMakeArray, parameters: ast.Identifiers{"sz", "func"}}, - &BinaryBuiltin{name: "flatMap", function: builtinFlatMap, parameters: ast.Identifiers{"func", "arr"}}, - &BinaryBuiltin{name: "join", function: builtinJoin, parameters: ast.Identifiers{"sep", "arr"}}, - &BinaryBuiltin{name: "filter", function: builtinFilter, parameters: ast.Identifiers{"func", "arr"}}, - &BinaryBuiltin{name: "range", function: builtinRange, parameters: ast.Identifiers{"from", "to"}}, - &BinaryBuiltin{name: "primitiveEquals", function: primitiveEquals, parameters: ast.Identifiers{"sz", "func"}}, - &BinaryBuiltin{name: "objectFieldsEx", function: builtinObjectFieldsEx, parameters: ast.Identifiers{"obj", "hidden"}}, - &TernaryBuiltin{name: "objectHasEx", function: builtinObjectHasEx, parameters: ast.Identifiers{"obj", "fname", "hidden"}}, - &UnaryBuiltin{name: "type", function: builtinType, parameters: ast.Identifiers{"x"}}, - &UnaryBuiltin{name: "char", function: builtinChar, parameters: ast.Identifiers{"x"}}, - &UnaryBuiltin{name: "codepoint", function: builtinCodepoint, parameters: ast.Identifiers{"x"}}, - &UnaryBuiltin{name: "ceil", function: builtinCeil, parameters: ast.Identifiers{"x"}}, - &UnaryBuiltin{name: "floor", function: builtinFloor, parameters: ast.Identifiers{"x"}}, - &UnaryBuiltin{name: "sqrt", function: builtinSqrt, parameters: ast.Identifiers{"x"}}, - &UnaryBuiltin{name: "sin", function: builtinSin, parameters: ast.Identifiers{"x"}}, - &UnaryBuiltin{name: "cos", function: builtinCos, parameters: ast.Identifiers{"x"}}, - &UnaryBuiltin{name: "tan", function: builtinTan, parameters: ast.Identifiers{"x"}}, - &UnaryBuiltin{name: "asin", function: builtinAsin, parameters: ast.Identifiers{"x"}}, - &UnaryBuiltin{name: "acos", function: builtinAcos, parameters: ast.Identifiers{"x"}}, - &UnaryBuiltin{name: "atan", function: builtinAtan, parameters: ast.Identifiers{"x"}}, - &UnaryBuiltin{name: "log", function: builtinLog, parameters: ast.Identifiers{"x"}}, - &UnaryBuiltin{name: "exp", function: builtinExp, parameters: ast.Identifiers{"x"}}, - &UnaryBuiltin{name: "mantissa", function: builtinMantissa, parameters: ast.Identifiers{"x"}}, - &UnaryBuiltin{name: "exponent", function: builtinExponent, parameters: ast.Identifiers{"x"}}, - &BinaryBuiltin{name: "pow", function: builtinPow, parameters: ast.Identifiers{"base", "exp"}}, - &BinaryBuiltin{name: "modulo", function: builtinModulo, parameters: ast.Identifiers{"x", "y"}}, - &UnaryBuiltin{name: "md5", function: builtinMd5, parameters: ast.Identifiers{"x"}}, - &TernaryBuiltin{name: "strReplace", function: builtinStrReplace, parameters: ast.Identifiers{"str", "from", "to"}}, - &UnaryBuiltin{name: "native", function: builtinNative, parameters: ast.Identifiers{"x"}}, + &unaryBuiltin{name: "extVar", function: builtinExtVar, parameters: ast.Identifiers{"x"}}, + &unaryBuiltin{name: "length", function: builtinLength, parameters: ast.Identifiers{"x"}}, + &unaryBuiltin{name: "toString", function: builtinToString, parameters: ast.Identifiers{"a"}}, + &binaryBuiltin{name: "makeArray", function: builtinMakeArray, parameters: ast.Identifiers{"sz", "func"}}, + &binaryBuiltin{name: "flatMap", function: builtinFlatMap, parameters: ast.Identifiers{"func", "arr"}}, + &binaryBuiltin{name: "join", function: builtinJoin, parameters: ast.Identifiers{"sep", "arr"}}, + &binaryBuiltin{name: "filter", function: builtinFilter, parameters: ast.Identifiers{"func", "arr"}}, + &binaryBuiltin{name: "range", function: builtinRange, parameters: ast.Identifiers{"from", "to"}}, + &binaryBuiltin{name: "primitiveEquals", function: primitiveEquals, parameters: ast.Identifiers{"sz", "func"}}, + &binaryBuiltin{name: "objectFieldsEx", function: builtinObjectFieldsEx, parameters: ast.Identifiers{"obj", "hidden"}}, + &ternaryBuiltin{name: "objectHasEx", function: builtinObjectHasEx, parameters: ast.Identifiers{"obj", "fname", "hidden"}}, + &unaryBuiltin{name: "type", function: builtinType, parameters: ast.Identifiers{"x"}}, + &unaryBuiltin{name: "char", function: builtinChar, parameters: ast.Identifiers{"x"}}, + &unaryBuiltin{name: "codepoint", function: builtinCodepoint, parameters: ast.Identifiers{"x"}}, + &unaryBuiltin{name: "ceil", function: builtinCeil, parameters: ast.Identifiers{"x"}}, + &unaryBuiltin{name: "floor", function: builtinFloor, parameters: ast.Identifiers{"x"}}, + &unaryBuiltin{name: "sqrt", function: builtinSqrt, parameters: ast.Identifiers{"x"}}, + &unaryBuiltin{name: "sin", function: builtinSin, parameters: ast.Identifiers{"x"}}, + &unaryBuiltin{name: "cos", function: builtinCos, parameters: ast.Identifiers{"x"}}, + &unaryBuiltin{name: "tan", function: builtinTan, parameters: ast.Identifiers{"x"}}, + &unaryBuiltin{name: "asin", function: builtinAsin, parameters: ast.Identifiers{"x"}}, + &unaryBuiltin{name: "acos", function: builtinAcos, parameters: ast.Identifiers{"x"}}, + &unaryBuiltin{name: "atan", function: builtinAtan, parameters: ast.Identifiers{"x"}}, + &unaryBuiltin{name: "log", function: builtinLog, parameters: ast.Identifiers{"x"}}, + &unaryBuiltin{name: "exp", function: builtinExp, parameters: ast.Identifiers{"x"}}, + &unaryBuiltin{name: "mantissa", function: builtinMantissa, parameters: ast.Identifiers{"x"}}, + &unaryBuiltin{name: "exponent", function: builtinExponent, parameters: ast.Identifiers{"x"}}, + &binaryBuiltin{name: "pow", function: builtinPow, parameters: ast.Identifiers{"base", "exp"}}, + &binaryBuiltin{name: "modulo", function: builtinModulo, parameters: ast.Identifiers{"x", "y"}}, + &unaryBuiltin{name: "md5", function: builtinMd5, parameters: ast.Identifiers{"x"}}, + &ternaryBuiltin{name: "strReplace", function: builtinStrReplace, parameters: ast.Identifiers{"str", "from", "to"}}, + &unaryBuiltin{name: "native", function: builtinNative, parameters: ast.Identifiers{"x"}}, // internal - &UnaryBuiltin{name: "$objectFlatMerge", function: builtinUglyObjectFlatMerge, parameters: ast.Identifiers{"x"}}, + &unaryBuiltin{name: "$objectFlatMerge", function: builtinUglyObjectFlatMerge, parameters: ast.Identifiers{"x"}}, }) diff --git a/desugarer.go b/desugarer.go index 42e53c3..5bb4a6c 100644 --- a/desugarer.go +++ b/desugarer.go @@ -28,7 +28,12 @@ import ( ) func makeStr(s string) *ast.LiteralString { - return &ast.LiteralString{ast.NodeBase{}, s, ast.StringDouble, ""} + return &ast.LiteralString{ + NodeBase: ast.NodeBase{}, + Value: s, + Kind: ast.StringDouble, + BlockIndent: "", + } } func stringUnescape(loc *ast.LocationRange, s string) (string, error) { @@ -130,7 +135,11 @@ func desugarFields(location ast.LocationRange, fields *ast.ObjectFields, objLeve continue } if len(binds) > 0 { - field.Expr2 = &ast.Local{ast.NewNodeBaseLoc(*field.Expr2.Loc()), binds, field.Expr2} + field.Expr2 = &ast.Local{ + NodeBase: ast.NewNodeBaseLoc(*field.Expr2.Loc()), + Binds: binds, + Body: field.Expr2, + } } newFields = append(newFields, field) } @@ -265,13 +274,22 @@ func buildDesugaredObject(nodeBase ast.NodeBase, fields ast.ObjectFields) *ast.D if field.Kind == ast.ObjectAssert { newAsserts = append(newAsserts, field.Expr2) } else if field.Kind == ast.ObjectFieldExpr { - newFields = append(newFields, ast.DesugaredObjectField{field.Hide, field.Expr1, field.Expr2, field.SuperSugar}) + newFields = append(newFields, ast.DesugaredObjectField{ + Hide: field.Hide, + Name: field.Expr1, + Body: field.Expr2, + PlusSuper: field.SuperSugar, + }) } else { panic(fmt.Sprintf("INTERNAL ERROR: field should have been desugared: %v", field.Kind)) } } - return &ast.DesugaredObject{nodeBase, newAsserts, newFields} + return &ast.DesugaredObject{ + NodeBase: nodeBase, + Asserts: newAsserts, + Fields: newFields, + } } // Desugar Jsonnet expressions to reduce the number of constructs the rest of the implementation diff --git a/error_formatter.go b/error_formatter.go index 533f441..3a79a1f 100644 --- a/error_formatter.go +++ b/error_formatter.go @@ -25,6 +25,7 @@ import ( "github.com/google/go-jsonnet/parser" ) +// An ErrorFormatter formats errors with stacktraces and color. type ErrorFormatter interface { // Format static, runtime, and unexpected errors prior to printing them. Format(err error) string @@ -36,6 +37,7 @@ type ErrorFormatter interface { SetColorFormatter(color ColorFormatter) } +// ColorFormatter represents a function that writes to the terminal using color. type ColorFormatter func(w io.Writer, f string, a ...interface{}) (n int, err error) var _ ErrorFormatter = &termErrorFormatter{} diff --git a/imports.go b/imports.go index eec7cb2..c717e54 100644 --- a/imports.go +++ b/imports.go @@ -23,15 +23,18 @@ import ( "path" ) +// ImportedData represents imported data and where it came from. type ImportedData struct { FoundHere string Content string } +// An Importer imports data from a path. type Importer interface { Import(codeDir string, importedPath string) (*ImportedData, error) } +// ImportCacheValue represents a value in an imported-data cache. type ImportCacheValue struct { // nil if we got an error data *ImportedData @@ -50,11 +53,13 @@ type importCacheKey struct { type importCacheMap map[importCacheKey]*ImportCacheValue +// ImportCache represents a cache of imported data. type ImportCache struct { cache importCacheMap importer Importer } +// MakeImportCache creates and ImportCache using an importer. func MakeImportCache(importer Importer) *ImportCache { return &ImportCache{importer: importer, cache: make(importCacheMap)} } @@ -72,6 +77,7 @@ func (cache *ImportCache) importData(key importCacheKey) *ImportCacheValue { return cached } +// ImportString imports a string, caches it and then returns it. func (cache *ImportCache) ImportString(codeDir, importedPath string, e *evaluator) (*valueString, error) { cached := cache.importData(importCacheKey{codeDir, importedPath}) if cached.err != nil { @@ -93,6 +99,7 @@ func codeToPV(e *evaluator, filename string, code string) potentialValue { return makeThunk(makeInitialEnv(filename, e.i.baseStd), node) } +// ImportCode imports code from a path. func (cache *ImportCache) ImportCode(codeDir, importedPath string, e *evaluator) (value, error) { cached := cache.importData(importCacheKey{codeDir, importedPath}) if cached.err != nil { @@ -108,6 +115,7 @@ func (cache *ImportCache) ImportCode(codeDir, importedPath string, e *evaluator) // Concrete importers // ------------------------------------- +// FileImporter imports data from files. type FileImporter struct { JPaths []string } @@ -126,6 +134,7 @@ func tryPath(dir, importedPath string) (found bool, content []byte, foundHere st return true, content, absPath, err } +// Import imports a file. func (importer *FileImporter) Import(dir, importedPath string) (*ImportedData, error) { found, content, foundHere, err := tryPath(dir, importedPath) if err != nil { @@ -140,18 +149,20 @@ func (importer *FileImporter) Import(dir, importedPath string) (*ImportedData, e } if !found { - return nil, fmt.Errorf("Couldn't open import %#v: No match locally or in the Jsonnet library paths.", importedPath) + return nil, fmt.Errorf("couldn't open import %#v: no match locally or in the Jsonnet library paths", importedPath) } return &ImportedData{Content: string(content), FoundHere: foundHere}, nil } +// MemoryImporter "imports" data from an in-memory map. type MemoryImporter struct { Data map[string]string } +// Import imports a map entry. func (importer *MemoryImporter) Import(dir, importedPath string) (*ImportedData, error) { if content, ok := importer.Data[importedPath]; ok { return &ImportedData{Content: content, FoundHere: importedPath}, nil } - return nil, fmt.Errorf("Import not available %v", importedPath) + return nil, fmt.Errorf("import not available %v", importedPath) } diff --git a/interpreter.go b/interpreter.go index b6dfdb4..331e7e8 100644 --- a/interpreter.go +++ b/interpreter.go @@ -758,9 +758,9 @@ func (i *interpreter) manifestAndSerializeMulti(trace *TraceElement, v value) (r } switch json := json.(type) { case map[string]interface{}: - for filename, fileJson := range json { + for filename, fileJSON := range json { var buf bytes.Buffer - serializeJSON(fileJson, true, "", &buf) + serializeJSON(fileJSON, true, "", &buf) buf.WriteString("\n") r[filename] = buf.String() } diff --git a/linter/find_variables.go b/linter/find_variables.go index 8ed1628..403d235 100644 --- a/linter/find_variables.go +++ b/linter/find_variables.go @@ -5,10 +5,10 @@ import ( "github.com/google/go-jsonnet/parser" ) -type vScope map[ast.Identifier]*Variable +type vScope map[ast.Identifier]*variable func addVar(name ast.Identifier, node ast.Node, info *LintingInfo, scope vScope, param bool) { - v := Variable{ + v := variable{ name: name, declNode: node, uses: nil, diff --git a/linter/linter.go b/linter/linter.go index fd9d7b1..1fde5f7 100644 --- a/linter/linter.go +++ b/linter/linter.go @@ -7,6 +7,8 @@ import ( "github.com/google/go-jsonnet/parser" ) +// ErrorWriter encapsulates a writer and an error state indicating when at least +// one error has been written to the writer. type ErrorWriter struct { ErrorsFound bool Writer io.Writer @@ -17,7 +19,7 @@ func (e *ErrorWriter) writeError(err parser.StaticError) { e.Writer.Write([]byte(err.Error() + "\n")) } -type Variable struct { +type variable struct { name ast.Identifier declNode ast.Node uses []ast.Node @@ -29,14 +31,15 @@ type Variable struct { // It is global, i.e. it holds the same data regardless of scope we're // currently analyzing. type LintingInfo struct { - variables []Variable + variables []variable } +// Lint analyses a node and reports any issues it encounters to an error writer. func Lint(node ast.Node, e *ErrorWriter) { lintingInfo := LintingInfo{ variables: nil, } - std := Variable{ + std := variable{ name: "std", declNode: nil, uses: nil, diff --git a/main_test.go b/main_test.go index 44542c9..d5f3a53 100644 --- a/main_test.go +++ b/main_test.go @@ -85,10 +85,10 @@ var jsonToString = &NativeFunction{ } var nativeError = &NativeFunction{ - Name: "nativeError", + Name: "nativeError", Params: ast.Identifiers{}, Func: func(x []interface{}) (interface{}, error) { - return nil, errors.New("Native function error") + return nil, errors.New("native function error") }, } @@ -172,9 +172,8 @@ func runJsonnetCommand(i jsonnetInput) jsonnetResult { func runJsonnet(i jsonnetInput) jsonnetResult { if jsonnetCmd != nil && *jsonnetCmd != "" { return runJsonnetCommand(i) - } else { - return runInternalJsonnet(i) } + return runInternalJsonnet(i) } func runTest(t *testing.T, test *mainTest) { diff --git a/parser/context.go b/parser/context.go index ab3a4b5..2fc84f6 100644 --- a/parser/context.go +++ b/parser/context.go @@ -278,6 +278,7 @@ func specialChildren(node ast.Node) []ast.Node { panic(fmt.Sprintf("specialChildren: Unknown node %#v", node)) } +// Children returns all children of a node. func Children(node ast.Node) []ast.Node { var result []ast.Node result = append(result, directChildren(node)...) diff --git a/parser/lexer.go b/parser/lexer.go index c87eda9..c63817a 100644 --- a/parser/lexer.go +++ b/parser/lexer.go @@ -165,7 +165,8 @@ type token struct { loc ast.LocationRange } -type tokens []token +// Tokens is a slice of token structs. +type Tokens []token func (t *token) String() string { if t.data == "" { @@ -250,7 +251,7 @@ type lexer struct { pos position // Current position in input prev position // Previous position in input - tokens tokens // The tokens that we've generated so far + tokens Tokens // The tokens that we've generated so far // Information about the token we are working on right now fodder fodder @@ -682,7 +683,8 @@ func (l *lexer) lexSymbol() error { return nil } -func Lex(fn string, input string) (tokens, error) { +// Lex returns a slice of tokens recognised in input. +func Lex(fn string, input string) (Tokens, error) { l := makeLexer(fn, input) var err error diff --git a/parser/lexer_test.go b/parser/lexer_test.go index 14a3250..90ffa61 100644 --- a/parser/lexer_test.go +++ b/parser/lexer_test.go @@ -22,7 +22,7 @@ import ( type lexTest struct { name string input string - tokens tokens + tokens Tokens errString string } @@ -31,92 +31,92 @@ var ( ) var lexTests = []lexTest{ - {"empty", "", tokens{}, ""}, - {"whitespace", " \t\n\r\r\n", tokens{}, ""}, + {"empty", "", Tokens{}, ""}, + {"whitespace", " \t\n\r\r\n", Tokens{}, ""}, - {"brace L", "{", tokens{{kind: tokenBraceL, data: "{"}}, ""}, - {"brace R", "}", tokens{{kind: tokenBraceR, data: "}"}}, ""}, - {"bracket L", "[", tokens{{kind: tokenBracketL, data: "["}}, ""}, - {"bracket R", "]", tokens{{kind: tokenBracketR, data: "]"}}, ""}, - {"colon", ":", tokens{{kind: tokenOperator, data: ":"}}, ""}, - {"colon2", "::", tokens{{kind: tokenOperator, data: "::"}}, ""}, - {"colon3", ":::", tokens{{kind: tokenOperator, data: ":::"}}, ""}, - {"arrow right", "->", tokens{{kind: tokenOperator, data: "->"}}, ""}, - {"less than minus", "<-", tokens{{kind: tokenOperator, data: "<"}, + {"brace L", "{", Tokens{{kind: tokenBraceL, data: "{"}}, ""}, + {"brace R", "}", Tokens{{kind: tokenBraceR, data: "}"}}, ""}, + {"bracket L", "[", Tokens{{kind: tokenBracketL, data: "["}}, ""}, + {"bracket R", "]", Tokens{{kind: tokenBracketR, data: "]"}}, ""}, + {"colon", ":", Tokens{{kind: tokenOperator, data: ":"}}, ""}, + {"colon2", "::", Tokens{{kind: tokenOperator, data: "::"}}, ""}, + {"colon3", ":::", Tokens{{kind: tokenOperator, data: ":::"}}, ""}, + {"arrow right", "->", Tokens{{kind: tokenOperator, data: "->"}}, ""}, + {"less than minus", "<-", Tokens{{kind: tokenOperator, data: "<"}, {kind: tokenOperator, data: "-"}}, ""}, - {"comma", ",", tokens{{kind: tokenComma, data: ","}}, ""}, - {"dollar", "$", tokens{{kind: tokenDollar, data: "$"}}, ""}, - {"dot", ".", tokens{{kind: tokenDot, data: "."}}, ""}, - {"paren L", "(", tokens{{kind: tokenParenL, data: "("}}, ""}, - {"paren R", ")", tokens{{kind: tokenParenR, data: ")"}}, ""}, - {"semicolon", ";", tokens{{kind: tokenSemicolon, data: ";"}}, ""}, + {"comma", ",", Tokens{{kind: tokenComma, data: ","}}, ""}, + {"dollar", "$", Tokens{{kind: tokenDollar, data: "$"}}, ""}, + {"dot", ".", Tokens{{kind: tokenDot, data: "."}}, ""}, + {"paren L", "(", Tokens{{kind: tokenParenL, data: "("}}, ""}, + {"paren R", ")", Tokens{{kind: tokenParenR, data: ")"}}, ""}, + {"semicolon", ";", Tokens{{kind: tokenSemicolon, data: ";"}}, ""}, - {"not 1", "!", tokens{{kind: tokenOperator, data: "!"}}, ""}, - {"not 2", "! ", tokens{{kind: tokenOperator, data: "!"}}, ""}, - {"not equal", "!=", tokens{{kind: tokenOperator, data: "!="}}, ""}, - {"tilde", "~", tokens{{kind: tokenOperator, data: "~"}}, ""}, - {"plus", "+", tokens{{kind: tokenOperator, data: "+"}}, ""}, - {"minus", "-", tokens{{kind: tokenOperator, data: "-"}}, ""}, + {"not 1", "!", Tokens{{kind: tokenOperator, data: "!"}}, ""}, + {"not 2", "! ", Tokens{{kind: tokenOperator, data: "!"}}, ""}, + {"not equal", "!=", Tokens{{kind: tokenOperator, data: "!="}}, ""}, + {"tilde", "~", Tokens{{kind: tokenOperator, data: "~"}}, ""}, + {"plus", "+", Tokens{{kind: tokenOperator, data: "+"}}, ""}, + {"minus", "-", Tokens{{kind: tokenOperator, data: "-"}}, ""}, - {"number 0", "0", tokens{{kind: tokenNumber, data: "0"}}, ""}, - {"number 1", "1", tokens{{kind: tokenNumber, data: "1"}}, ""}, - {"number 1.0", "1.0", tokens{{kind: tokenNumber, data: "1.0"}}, ""}, - {"number 0.10", "0.10", tokens{{kind: tokenNumber, data: "0.10"}}, ""}, - {"number 0e100", "0e100", tokens{{kind: tokenNumber, data: "0e100"}}, ""}, - {"number 1e100", "1e100", tokens{{kind: tokenNumber, data: "1e100"}}, ""}, - {"number 1.1e100", "1.1e100", tokens{{kind: tokenNumber, data: "1.1e100"}}, ""}, - {"number 1.1e-100", "1.1e-100", tokens{{kind: tokenNumber, data: "1.1e-100"}}, ""}, - {"number 1.1e+100", "1.1e+100", tokens{{kind: tokenNumber, data: "1.1e+100"}}, ""}, - {"number 0100", "0100", tokens{ + {"number 0", "0", Tokens{{kind: tokenNumber, data: "0"}}, ""}, + {"number 1", "1", Tokens{{kind: tokenNumber, data: "1"}}, ""}, + {"number 1.0", "1.0", Tokens{{kind: tokenNumber, data: "1.0"}}, ""}, + {"number 0.10", "0.10", Tokens{{kind: tokenNumber, data: "0.10"}}, ""}, + {"number 0e100", "0e100", Tokens{{kind: tokenNumber, data: "0e100"}}, ""}, + {"number 1e100", "1e100", Tokens{{kind: tokenNumber, data: "1e100"}}, ""}, + {"number 1.1e100", "1.1e100", Tokens{{kind: tokenNumber, data: "1.1e100"}}, ""}, + {"number 1.1e-100", "1.1e-100", Tokens{{kind: tokenNumber, data: "1.1e-100"}}, ""}, + {"number 1.1e+100", "1.1e+100", Tokens{{kind: tokenNumber, data: "1.1e+100"}}, ""}, + {"number 0100", "0100", Tokens{ {kind: tokenNumber, data: "0"}, {kind: tokenNumber, data: "100"}, }, ""}, - {"number 10+10", "10+10", tokens{ + {"number 10+10", "10+10", Tokens{ {kind: tokenNumber, data: "10"}, {kind: tokenOperator, data: "+"}, {kind: tokenNumber, data: "10"}, }, ""}, - {"number 1.+3", "1.+3", tokens{}, "number 1.+3:1:3 Couldn't lex number, junk after decimal point: '+'"}, - {"number 1e!", "1e!", tokens{}, "number 1e!:1:3 Couldn't lex number, junk after 'E': '!'"}, - {"number 1e+!", "1e+!", tokens{}, "number 1e+!:1:4 Couldn't lex number, junk after exponent sign: '!'"}, + {"number 1.+3", "1.+3", Tokens{}, "number 1.+3:1:3 Couldn't lex number, junk after decimal point: '+'"}, + {"number 1e!", "1e!", Tokens{}, "number 1e!:1:3 Couldn't lex number, junk after 'E': '!'"}, + {"number 1e+!", "1e+!", Tokens{}, "number 1e+!:1:4 Couldn't lex number, junk after exponent sign: '!'"}, - {"double string \"hi\"", "\"hi\"", tokens{{kind: tokenStringDouble, data: "hi"}}, ""}, - {"double string \"hi nl\"", "\"hi\n\"", tokens{{kind: tokenStringDouble, data: "hi\n"}}, ""}, - {"double string \"hi\\\"\"", "\"hi\\\"\"", tokens{{kind: tokenStringDouble, data: "hi\\\""}}, ""}, - {"double string \"hi\\nl\"", "\"hi\\\n\"", tokens{{kind: tokenStringDouble, data: "hi\\\n"}}, ""}, - {"double string \"hi", "\"hi", tokens{}, "double string \"hi:1:1 Unterminated String"}, + {"double string \"hi\"", "\"hi\"", Tokens{{kind: tokenStringDouble, data: "hi"}}, ""}, + {"double string \"hi nl\"", "\"hi\n\"", Tokens{{kind: tokenStringDouble, data: "hi\n"}}, ""}, + {"double string \"hi\\\"\"", "\"hi\\\"\"", Tokens{{kind: tokenStringDouble, data: "hi\\\""}}, ""}, + {"double string \"hi\\nl\"", "\"hi\\\n\"", Tokens{{kind: tokenStringDouble, data: "hi\\\n"}}, ""}, + {"double string \"hi", "\"hi", Tokens{}, "double string \"hi:1:1 Unterminated String"}, - {"single string 'hi'", "'hi'", tokens{{kind: tokenStringSingle, data: "hi"}}, ""}, - {"single string 'hi nl'", "'hi\n'", tokens{{kind: tokenStringSingle, data: "hi\n"}}, ""}, - {"single string 'hi\\''", "'hi\\''", tokens{{kind: tokenStringSingle, data: "hi\\'"}}, ""}, - {"single string 'hi\\nl'", "'hi\\\n'", tokens{{kind: tokenStringSingle, data: "hi\\\n"}}, ""}, - {"single string 'hi", "'hi", tokens{}, "single string 'hi:1:1 Unterminated String"}, + {"single string 'hi'", "'hi'", Tokens{{kind: tokenStringSingle, data: "hi"}}, ""}, + {"single string 'hi nl'", "'hi\n'", Tokens{{kind: tokenStringSingle, data: "hi\n"}}, ""}, + {"single string 'hi\\''", "'hi\\''", Tokens{{kind: tokenStringSingle, data: "hi\\'"}}, ""}, + {"single string 'hi\\nl'", "'hi\\\n'", Tokens{{kind: tokenStringSingle, data: "hi\\\n"}}, ""}, + {"single string 'hi", "'hi", Tokens{}, "single string 'hi:1:1 Unterminated String"}, - {"assert", "assert", tokens{{kind: tokenAssert, data: "assert"}}, ""}, - {"else", "else", tokens{{kind: tokenElse, data: "else"}}, ""}, - {"error", "error", tokens{{kind: tokenError, data: "error"}}, ""}, - {"false", "false", tokens{{kind: tokenFalse, data: "false"}}, ""}, - {"for", "for", tokens{{kind: tokenFor, data: "for"}}, ""}, - {"function", "function", tokens{{kind: tokenFunction, data: "function"}}, ""}, - {"if", "if", tokens{{kind: tokenIf, data: "if"}}, ""}, - {"import", "import", tokens{{kind: tokenImport, data: "import"}}, ""}, - {"importstr", "importstr", tokens{{kind: tokenImportStr, data: "importstr"}}, ""}, - {"in", "in", tokens{{kind: tokenIn, data: "in"}}, ""}, - {"local", "local", tokens{{kind: tokenLocal, data: "local"}}, ""}, - {"null", "null", tokens{{kind: tokenNullLit, data: "null"}}, ""}, - {"self", "self", tokens{{kind: tokenSelf, data: "self"}}, ""}, - {"super", "super", tokens{{kind: tokenSuper, data: "super"}}, ""}, - {"tailstrict", "tailstrict", tokens{{kind: tokenTailStrict, data: "tailstrict"}}, ""}, - {"then", "then", tokens{{kind: tokenThen, data: "then"}}, ""}, - {"true", "true", tokens{{kind: tokenTrue, data: "true"}}, ""}, + {"assert", "assert", Tokens{{kind: tokenAssert, data: "assert"}}, ""}, + {"else", "else", Tokens{{kind: tokenElse, data: "else"}}, ""}, + {"error", "error", Tokens{{kind: tokenError, data: "error"}}, ""}, + {"false", "false", Tokens{{kind: tokenFalse, data: "false"}}, ""}, + {"for", "for", Tokens{{kind: tokenFor, data: "for"}}, ""}, + {"function", "function", Tokens{{kind: tokenFunction, data: "function"}}, ""}, + {"if", "if", Tokens{{kind: tokenIf, data: "if"}}, ""}, + {"import", "import", Tokens{{kind: tokenImport, data: "import"}}, ""}, + {"importstr", "importstr", Tokens{{kind: tokenImportStr, data: "importstr"}}, ""}, + {"in", "in", Tokens{{kind: tokenIn, data: "in"}}, ""}, + {"local", "local", Tokens{{kind: tokenLocal, data: "local"}}, ""}, + {"null", "null", Tokens{{kind: tokenNullLit, data: "null"}}, ""}, + {"self", "self", Tokens{{kind: tokenSelf, data: "self"}}, ""}, + {"super", "super", Tokens{{kind: tokenSuper, data: "super"}}, ""}, + {"tailstrict", "tailstrict", Tokens{{kind: tokenTailStrict, data: "tailstrict"}}, ""}, + {"then", "then", Tokens{{kind: tokenThen, data: "then"}}, ""}, + {"true", "true", Tokens{{kind: tokenTrue, data: "true"}}, ""}, - {"identifier", "foobar123", tokens{{kind: tokenIdentifier, data: "foobar123"}}, ""}, - {"identifier", "foo bar123", tokens{{kind: tokenIdentifier, data: "foo"}, {kind: tokenIdentifier, data: "bar123"}}, ""}, + {"identifier", "foobar123", Tokens{{kind: tokenIdentifier, data: "foobar123"}}, ""}, + {"identifier", "foo bar123", Tokens{{kind: tokenIdentifier, data: "foo"}, {kind: tokenIdentifier, data: "bar123"}}, ""}, - {"c++ comment", "// hi", tokens{}, ""}, // This test doesn't look at fodder (yet?) - {"hash comment", "# hi", tokens{}, ""}, // This test doesn't look at fodder (yet?) - {"c comment", "/* hi */", tokens{}, ""}, // This test doesn't look at fodder (yet?) - {"c comment no term", "/* hi", tokens{}, "c comment no term:1:1 Multi-line comment has no terminating */"}, // This test doesn't look at fodder (yet?) + {"c++ comment", "// hi", Tokens{}, ""}, // This test doesn't look at fodder (yet?) + {"hash comment", "# hi", Tokens{}, ""}, // This test doesn't look at fodder (yet?) + {"c comment", "/* hi */", Tokens{}, ""}, // This test doesn't look at fodder (yet?) + {"c comment no term", "/* hi", Tokens{}, "c comment no term:1:1 Multi-line comment has no terminating */"}, // This test doesn't look at fodder (yet?) { "block string spaces", @@ -126,7 +126,7 @@ var lexTests = []lexTest{ ||| foo |||`, - tokens{ + Tokens{ { kind: tokenStringBlock, data: "test\n more\n|||\n foo\n", @@ -144,7 +144,7 @@ var lexTests = []lexTest{ ||| foo |||`, - tokens{ + Tokens{ { kind: tokenStringBlock, data: "test\n more\n|||\n foo\n", @@ -162,7 +162,7 @@ var lexTests = []lexTest{ ||| foo |||`, - tokens{ + Tokens{ { kind: tokenStringBlock, data: "test\n more\n|||\n foo\n", @@ -183,7 +183,7 @@ var lexTests = []lexTest{ ||| foo |||`, - tokens{ + Tokens{ { kind: tokenStringBlock, data: "\ntest\n\n\n more\n|||\n foo\n", @@ -199,14 +199,14 @@ var lexTests = []lexTest{ test foo |||`, - tokens{}, + Tokens{}, "block string bad indent:1:1 Text block not terminated with |||", }, { "block string eof", `||| test`, - tokens{}, + Tokens{}, "block string eof:1:1 Unexpected EOF", }, { @@ -214,7 +214,7 @@ var lexTests = []lexTest{ `||| test `, - tokens{}, + Tokens{}, "block string not term:1:1 Text block not terminated with |||", }, { @@ -222,35 +222,35 @@ var lexTests = []lexTest{ `||| test |||`, - tokens{}, + Tokens{}, "block string no ws:1:1 Text block's first line must start with whitespace", }, - {"verbatim_string1", `@""`, tokens{{kind: tokenVerbatimStringDouble, data: ""}}, ""}, - {"verbatim_string2", `@''`, tokens{{kind: tokenVerbatimStringSingle, data: ""}}, ""}, - {"verbatim_string3", `@""""`, tokens{{kind: tokenVerbatimStringDouble, data: `"`}}, ""}, - {"verbatim_string4", `@''''`, tokens{{kind: tokenVerbatimStringSingle, data: "'"}}, ""}, - {"verbatim_string5", `@"\n"`, tokens{{kind: tokenVerbatimStringDouble, data: "\\n"}}, ""}, - {"verbatim_string6", `@"''"`, tokens{{kind: tokenVerbatimStringDouble, data: "''"}}, ""}, + {"verbatim_string1", `@""`, Tokens{{kind: tokenVerbatimStringDouble, data: ""}}, ""}, + {"verbatim_string2", `@''`, Tokens{{kind: tokenVerbatimStringSingle, data: ""}}, ""}, + {"verbatim_string3", `@""""`, Tokens{{kind: tokenVerbatimStringDouble, data: `"`}}, ""}, + {"verbatim_string4", `@''''`, Tokens{{kind: tokenVerbatimStringSingle, data: "'"}}, ""}, + {"verbatim_string5", `@"\n"`, Tokens{{kind: tokenVerbatimStringDouble, data: "\\n"}}, ""}, + {"verbatim_string6", `@"''"`, Tokens{{kind: tokenVerbatimStringDouble, data: "''"}}, ""}, - {"verbatim_string_unterminated", `@"blah blah`, tokens{}, "verbatim_string_unterminated:1:1 Unterminated String"}, - {"verbatim_string_junk", `@blah blah`, tokens{}, "verbatim_string_junk:1:1 Couldn't lex verbatim string, junk after '@': 98"}, + {"verbatim_string_unterminated", `@"blah blah`, Tokens{}, "verbatim_string_unterminated:1:1 Unterminated String"}, + {"verbatim_string_junk", `@blah blah`, Tokens{}, "verbatim_string_junk:1:1 Couldn't lex verbatim string, junk after '@': 98"}, - {"op *", "*", tokens{{kind: tokenOperator, data: "*"}}, ""}, - {"op /", "/", tokens{{kind: tokenOperator, data: "/"}}, ""}, - {"op %", "%", tokens{{kind: tokenOperator, data: "%"}}, ""}, - {"op &", "&", tokens{{kind: tokenOperator, data: "&"}}, ""}, - {"op |", "|", tokens{{kind: tokenOperator, data: "|"}}, ""}, - {"op ^", "^", tokens{{kind: tokenOperator, data: "^"}}, ""}, - {"op =", "=", tokens{{kind: tokenOperator, data: "="}}, ""}, - {"op <", "<", tokens{{kind: tokenOperator, data: "<"}}, ""}, - {"op >", ">", tokens{{kind: tokenOperator, data: ">"}}, ""}, - {"op >==|", ">==|", tokens{{kind: tokenOperator, data: ">==|"}}, ""}, + {"op *", "*", Tokens{{kind: tokenOperator, data: "*"}}, ""}, + {"op /", "/", Tokens{{kind: tokenOperator, data: "/"}}, ""}, + {"op %", "%", Tokens{{kind: tokenOperator, data: "%"}}, ""}, + {"op &", "&", Tokens{{kind: tokenOperator, data: "&"}}, ""}, + {"op |", "|", Tokens{{kind: tokenOperator, data: "|"}}, ""}, + {"op ^", "^", Tokens{{kind: tokenOperator, data: "^"}}, ""}, + {"op =", "=", Tokens{{kind: tokenOperator, data: "="}}, ""}, + {"op <", "<", Tokens{{kind: tokenOperator, data: "<"}}, ""}, + {"op >", ">", Tokens{{kind: tokenOperator, data: ">"}}, ""}, + {"op >==|", ">==|", Tokens{{kind: tokenOperator, data: ">==|"}}, ""}, - {"junk", "💩", tokens{}, "junk:1:1 Could not lex the character '\\U0001f4a9'"}, + {"junk", "💩", Tokens{}, "junk:1:1 Could not lex the character '\\U0001f4a9'"}, } -func tokensEqual(ts1, ts2 tokens) bool { +func tokensEqual(ts1, ts2 Tokens) bool { if len(ts1) != len(ts2) { return false } @@ -275,7 +275,7 @@ func tokensEqual(ts1, ts2 tokens) bool { func TestLex(t *testing.T) { for _, test := range lexTests { // Copy the test tokens and append an EOF token - testTokens := append(tokens(nil), test.tokens...) + testTokens := append(Tokens(nil), test.tokens...) testTokens = append(testTokens, tEOF) tokens, err := Lex(test.name, test.input) var errString string diff --git a/parser/literalfield_set.go b/parser/literalfield_set.go index 983ed5c..e39b4cd 100644 --- a/parser/literalfield_set.go +++ b/parser/literalfield_set.go @@ -1,6 +1,6 @@ // Generated by: main // TypeWriter: set -// Directive: +gen on literalField +// Directive: +gen on LiteralField package parser @@ -8,12 +8,12 @@ package parser // The MIT License (MIT) // Copyright (c) 2013 Ralph Caraveo (deckarep@gmail.com) -// literalFieldSet is the primary type that represents a set -type literalFieldSet map[LiteralField]struct{} +// LiteralFieldSet is the primary type that represents a set +type LiteralFieldSet map[LiteralField]struct{} -// NewliteralFieldSet creates and returns a reference to an empty set. -func NewliteralFieldSet(a ...LiteralField) literalFieldSet { - s := make(literalFieldSet) +// NewLiteralFieldSet creates and returns a reference to an empty set. +func NewLiteralFieldSet(a ...LiteralField) LiteralFieldSet { + s := make(LiteralFieldSet) for _, i := range a { s.Add(i) } @@ -21,7 +21,7 @@ func NewliteralFieldSet(a ...LiteralField) literalFieldSet { } // ToSlice returns the elements of the current set as a slice -func (set literalFieldSet) ToSlice() []LiteralField { +func (set LiteralFieldSet) ToSlice() []LiteralField { var s []LiteralField for v := range set { s = append(s, v) @@ -30,20 +30,20 @@ func (set literalFieldSet) ToSlice() []LiteralField { } // Add adds an item to the current set if it doesn't already exist in the set. -func (set literalFieldSet) Add(i LiteralField) bool { +func (set LiteralFieldSet) Add(i LiteralField) bool { _, found := set[i] set[i] = struct{}{} return !found //False if it existed already } // Contains determines if a given item is already in the set. -func (set literalFieldSet) Contains(i LiteralField) bool { +func (set LiteralFieldSet) Contains(i LiteralField) bool { _, found := set[i] return found } // ContainsAll determines if the given items are all in the set -func (set literalFieldSet) ContainsAll(i ...LiteralField) bool { +func (set LiteralFieldSet) ContainsAll(i ...LiteralField) bool { for _, v := range i { if !set.Contains(v) { return false @@ -53,7 +53,7 @@ func (set literalFieldSet) ContainsAll(i ...LiteralField) bool { } // IsSubset determines if every item in the other set is in this set. -func (set literalFieldSet) IsSubset(other literalFieldSet) bool { +func (set LiteralFieldSet) IsSubset(other LiteralFieldSet) bool { for elem := range set { if !other.Contains(elem) { return false @@ -63,13 +63,13 @@ func (set literalFieldSet) IsSubset(other literalFieldSet) bool { } // IsSuperset determines if every item of this set is in the other set. -func (set literalFieldSet) IsSuperset(other literalFieldSet) bool { +func (set LiteralFieldSet) IsSuperset(other LiteralFieldSet) bool { return other.IsSubset(set) } // Union returns a new set with all items in both sets. -func (set literalFieldSet) Union(other literalFieldSet) literalFieldSet { - unionedSet := NewliteralFieldSet() +func (set LiteralFieldSet) Union(other LiteralFieldSet) LiteralFieldSet { + unionedSet := NewLiteralFieldSet() for elem := range set { unionedSet.Add(elem) @@ -81,8 +81,8 @@ func (set literalFieldSet) Union(other literalFieldSet) literalFieldSet { } // Intersect returns a new set with items that exist only in both sets. -func (set literalFieldSet) Intersect(other literalFieldSet) literalFieldSet { - intersection := NewliteralFieldSet() +func (set LiteralFieldSet) Intersect(other LiteralFieldSet) LiteralFieldSet { + intersection := NewLiteralFieldSet() // loop over smaller set if set.Cardinality() < other.Cardinality() { for elem := range set { @@ -101,8 +101,8 @@ func (set literalFieldSet) Intersect(other literalFieldSet) literalFieldSet { } // Difference returns a new set with items in the current set but not in the other set -func (set literalFieldSet) Difference(other literalFieldSet) literalFieldSet { - differencedSet := NewliteralFieldSet() +func (set LiteralFieldSet) Difference(other LiteralFieldSet) LiteralFieldSet { + differencedSet := NewLiteralFieldSet() for elem := range set { if !other.Contains(elem) { differencedSet.Add(elem) @@ -112,29 +112,29 @@ func (set literalFieldSet) Difference(other literalFieldSet) literalFieldSet { } // SymmetricDifference returns a new set with items in the current set or the other set but not in both. -func (set literalFieldSet) SymmetricDifference(other literalFieldSet) literalFieldSet { +func (set LiteralFieldSet) SymmetricDifference(other LiteralFieldSet) LiteralFieldSet { aDiff := set.Difference(other) bDiff := other.Difference(set) return aDiff.Union(bDiff) } // Clear clears the entire set to be the empty set. -func (set *literalFieldSet) Clear() { - *set = make(literalFieldSet) +func (set *LiteralFieldSet) Clear() { + *set = make(LiteralFieldSet) } // Remove allows the removal of a single item in the set. -func (set literalFieldSet) Remove(i LiteralField) { +func (set LiteralFieldSet) Remove(i LiteralField) { delete(set, i) } // Cardinality returns how many items are currently in the set. -func (set literalFieldSet) Cardinality() int { +func (set LiteralFieldSet) Cardinality() int { return len(set) } -// Iter returns a channel of type literalField that you can range over. -func (set literalFieldSet) Iter() <-chan LiteralField { +// Iter returns a channel of type LiteralField that you can range over. +func (set LiteralFieldSet) Iter() <-chan LiteralField { ch := make(chan LiteralField) go func() { for elem := range set { @@ -149,7 +149,7 @@ func (set literalFieldSet) Iter() <-chan LiteralField { // Equal determines if two sets are equal to each other. // If they both are the same size and have the same items they are considered equal. // Order of items is not relevent for sets to be equal. -func (set literalFieldSet) Equal(other literalFieldSet) bool { +func (set LiteralFieldSet) Equal(other LiteralFieldSet) bool { if set.Cardinality() != other.Cardinality() { return false } @@ -163,8 +163,8 @@ func (set literalFieldSet) Equal(other literalFieldSet) bool { // Clone returns a clone of the set. // Does NOT clone the underlying elements. -func (set literalFieldSet) Clone() literalFieldSet { - clonedSet := NewliteralFieldSet() +func (set LiteralFieldSet) Clone() LiteralFieldSet { + clonedSet := NewLiteralFieldSet() for elem := range set { clonedSet.Add(elem) } diff --git a/parser/parser.go b/parser/parser.go index 54dba33..3436856 100644 --- a/parser/parser.go +++ b/parser/parser.go @@ -71,11 +71,11 @@ func locFromTokenAST(begin *token, end ast.Node) ast.LocationRange { // --------------------------------------------------------------------------- type parser struct { - t tokens + t Tokens currT int } -func makeParser(t tokens) *parser { +func makeParser(t Tokens) *parser { return &parser{ t: t, } @@ -298,13 +298,14 @@ func (p *parser) parseObjectAssignmentOp() (plusSugar bool, hide ast.ObjectField return } +// A LiteralField is a field of an object or object comprehension. // +gen set type LiteralField string // Parse object or object comprehension without leading brace func (p *parser) parseObjectRemainder(tok *token) (ast.Node, *token, error) { var fields ast.ObjectFields - literalFields := make(literalFieldSet) + literalFields := make(LiteralFieldSet) binds := make(ast.IdentifierSet) gotComma := false @@ -730,7 +731,7 @@ func (p *parser) parseTerminal() (ast.Node, error) { } return &ast.Parens{ NodeBase: ast.NewNodeBaseLoc(locFromTokens(tok, tokRight)), - Inner: inner, + Inner: inner, }, nil // Literals @@ -1173,7 +1174,8 @@ func (p *parser) parse(prec precedence) (ast.Node, error) { // --------------------------------------------------------------------------- -func Parse(t tokens) (ast.Node, error) { +// Parse parses a slice of tokens into a parse tree. +func Parse(t Tokens) (ast.Node, error) { p := makeParser(t) expr, err := p.parse(maxPrecedence) if err != nil { diff --git a/parser/static_error.go b/parser/static_error.go index c74edbb..ac96e65 100644 --- a/parser/static_error.go +++ b/parser/static_error.go @@ -32,14 +32,17 @@ type StaticError struct { Msg string } +// MakeStaticErrorMsg returns a StaticError with a message. func MakeStaticErrorMsg(msg string) StaticError { return StaticError{Msg: msg} } +// MakeStaticError returns a StaticError with a message and a LocationRange. func MakeStaticError(msg string, lr ast.LocationRange) StaticError { return StaticError{Msg: msg, Loc: lr} } +// Error returns the string representation of a StaticError. func (err StaticError) Error() string { loc := "" if err.Loc.IsSet() { diff --git a/runtime_error.go b/runtime_error.go index 1b14b63..0692039 100644 --- a/runtime_error.go +++ b/runtime_error.go @@ -55,6 +55,8 @@ func traceElementToTraceFrame(trace *TraceElement) TraceFrame { return tf } +// TraceElement represents tracing information, including a location range and a +// surrounding context. // TODO(sbarzowski) better name type TraceElement struct { loc *ast.LocationRange diff --git a/testdata/native_error.golden b/testdata/native_error.golden index ce1f8b7..da6c12f 100644 --- a/testdata/native_error.golden +++ b/testdata/native_error.golden @@ -1,4 +1,4 @@ -RUNTIME ERROR: Native function error +RUNTIME ERROR: native function error ------------------------------------------------- testdata/native_error:1:1-28 $ diff --git a/testdata/nonexistent_import.golden b/testdata/nonexistent_import.golden index 2352821..fbe3f0d 100644 --- a/testdata/nonexistent_import.golden +++ b/testdata/nonexistent_import.golden @@ -1,4 +1,4 @@ -RUNTIME ERROR: Couldn't open import "no chance a file with this name exists": No match locally or in the Jsonnet library paths. +RUNTIME ERROR: couldn't open import "no chance a file with this name exists": no match locally or in the Jsonnet library paths ------------------------------------------------- testdata/nonexistent_import:1:1-51 $ diff --git a/testdata/nonexistent_import_crazy.golden b/testdata/nonexistent_import_crazy.golden index 05ba464..81ff988 100644 --- a/testdata/nonexistent_import_crazy.golden +++ b/testdata/nonexistent_import_crazy.golden @@ -1,4 +1,4 @@ -RUNTIME ERROR: Couldn't open import "ąęółńśćźż \" ' \n\n\t\t": No match locally or in the Jsonnet library paths. +RUNTIME ERROR: couldn't open import "ąęółńśćźż \" ' \n\n\t\t": no match locally or in the Jsonnet library paths ------------------------------------------------- testdata/nonexistent_import_crazy:1:1-46 $ diff --git a/thunks.go b/thunks.go index 5737506..5cfab6a 100644 --- a/thunks.go +++ b/thunks.go @@ -181,6 +181,7 @@ func (f *bindingsUnboundField) bindToObject(sb selfBinding, origBindings binding return f.inner.bindToObject(sb, upValues, fieldName) } +// PlusSuperUnboundField represents a `field+: ...` that hasn't been bound to an object. type PlusSuperUnboundField struct { inner unboundField } @@ -287,12 +288,14 @@ func makeClosure(env environment, function *ast.Function) *closure { } } +// NativeFunction represents a function implemented in Go. type NativeFunction struct { Func func([]interface{}) (interface{}, error) Params ast.Identifiers Name string } +// EvalCall evaluates a call to a NativeFunction and returns the result. func (native *NativeFunction) EvalCall(arguments callArguments, e *evaluator) (value, error) { flatArgs := flattenArgs(arguments, native.Parameters()) nativeArgs := make([]interface{}, 0, len(flatArgs)) @@ -314,6 +317,7 @@ func (native *NativeFunction) EvalCall(arguments callArguments, e *evaluator) (v return jsonToValue(e, resultJSON) } +// Parameters returns a NativeFunction's parameters. func (native *NativeFunction) Parameters() Parameters { return Parameters{required: native.Params} } diff --git a/value.go b/value.go index 661cebb..1cb1074 100644 --- a/value.go +++ b/value.go @@ -73,6 +73,8 @@ func (v *valueBase) aValue() {} // Primitive values // ------------------------------------- +// valueString represents a string value, internally using a []rune for quick +// indexing. type valueString struct { valueBase // We use rune slices instead of strings for quick indexing @@ -319,6 +321,8 @@ func (f *valueFunction) getType() *valueType { return functionType } +// Parameters represents required position and optional named parameters for a +// function definition. type Parameters struct { required ast.Identifiers optional []namedParameter @@ -399,8 +403,10 @@ func (sb selfBinding) super() selfBinding { return selfBinding{self: sb.self, superDepth: sb.superDepth + 1} } +// Hidden represents wether to include hidden fields in a lookup. type Hidden int +// With/without hidden fields const ( withHidden Hidden = iota withoutHidden @@ -409,14 +415,13 @@ const ( func withHiddenFromBool(with bool) Hidden { if with { return withHidden - } else { - return withoutHidden } + return withoutHidden } // Hack - we need to distinguish not-checked-yet and no error situations // so we have a special value for no error and nil means that we don't know yet. -var errNoErrorInObjectInvariants = errors.New("No error - assertions passed") +var errNoErrorInObjectInvariants = errors.New("no error - assertions passed") type valueObjectBase struct { valueBase diff --git a/vm.go b/vm.go index add1628..2bfe0df 100644 --- a/vm.go +++ b/vm.go @@ -184,10 +184,12 @@ func snippetToAST(filename string, snippet string) (ast.Node, error) { return node, nil } +// SnippetToAST parses a snippet and returns the resulting AST. func SnippetToAST(filename string, snippet string) (ast.Node, error) { return snippetToAST(filename, snippet) } +// Version returns the Jsonnet version number. func Version() string { return "v0.9.5" }