Add native golang formatter (#388)

Add native golang formatter
This commit is contained in:
Dave Cunningham 2020-03-10 22:19:49 +00:00 committed by GitHub
parent 234b97cd9c
commit 724650d358
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
47 changed files with 5802 additions and 974 deletions

1
.gitignore vendored
View File

@ -23,6 +23,7 @@ gojsonnet.egg-info/
/jsonnet-old
/jsonnet-old.exe
/jsonnetfmt
/linter/jsonnet-lint/jsonnet-lint
/tests_path.source

View File

@ -35,6 +35,7 @@ go_library(
go_test(
name = "go_default_test",
srcs = [
"builtins_benchmark_test.go",
"interpreter_test.go",
"jsonnet_test.go",
"main_test.go",

View File

@ -25,6 +25,7 @@ go get github.com/google/go-jsonnet/cmd/jsonnet
git clone git@github.com:google/go-jsonnet.git
cd go-jsonnet
go build ./cmd/jsonnet
go build ./cmd/jsonnetfmt
```
To build with [Bazel](https://bazel.build/) instead:
```bash
@ -33,6 +34,7 @@ cd go-jsonnet
git submodule init
git submodule update
bazel build //cmd/jsonnet
bazel build //cmd/jsonnetfmt
```
The resulting _jsonnet_ program will then be available at a platform-specific path, such as _bazel-bin/cmd/jsonnet/darwin_amd64_stripped/jsonnet_ for macOS.

View File

@ -43,6 +43,13 @@ type Node interface {
FreeVariables() Identifiers
SetFreeVariables(Identifiers)
SetContext(Context)
// OpenFodder returns the fodder before the first token of an AST node.
// Since every AST node has opening fodder, it is defined here.
// If the AST node is left recursive (e.g. BinaryOp) then it is ambiguous
// where the fodder should be stored. This is resolved by storing it as
// far inside the tree as possible. OpenFodder returns a pointer to allow
// the caller to modify the fodder.
OpenFodder() *Fodder
}
// Nodes represents a Node slice.
@ -82,8 +89,8 @@ func (n *NodeBase) Loc() *LocationRange {
}
// OpenFodder returns a NodeBase's opening fodder.
func (n *NodeBase) OpenFodder() Fodder {
return n.Fodder
func (n *NodeBase) OpenFodder() *Fodder {
return &n.Fodder
}
// FreeVariables returns a NodeBase's freeVariables.
@ -427,7 +434,7 @@ type Index struct {
LeftBracketFodder Fodder
Index Node
// When Index is being used, this is the fodder before the ']'.
// When Id is being used, this is always empty.
// When Id is being used, this is the fodder before the id.
RightBracketFodder Fodder
//nolint: golint,stylecheck // keeping Id instead of ID for now to avoid breaking 3rd parties
Id *Identifier
@ -523,9 +530,10 @@ func (k LiteralStringKind) FullyEscaped() bool {
// LiteralString represents a JSON string
type LiteralString struct {
NodeBase
Value string
Kind LiteralStringKind
BlockIndent string
Value string
Kind LiteralStringKind
BlockIndent string
BlockTermIndent string
}
// ---------------------------------------------------------------------------
@ -642,10 +650,11 @@ type DesugaredObject struct {
// { [e]: e for x in e for.. if... }.
type ObjectComp struct {
NodeBase
Fields ObjectFields
TrailingComma bool
Spec ForSpec
CloseFodder Fodder
Fields ObjectFields
TrailingCommaFodder Fodder
TrailingComma bool
Spec ForSpec
CloseFodder Fodder
}
// ---------------------------------------------------------------------------

View File

@ -16,7 +16,9 @@ limitations under the License.
package ast
import "sort"
import (
"sort"
)
// AddIdentifiers adds a slice of identifiers to an identifier set.
func (i IdentifierSet) AddIdentifiers(idents Identifiers) {

File diff suppressed because it is too large Load Diff

View File

@ -16,26 +16,26 @@ def jsonnet_go_dependencies():
go_repository(
name = "com_github_davecgh_go_spew",
importpath = "github.com/davecgh/go-spew",
sum = "h1:ZDRjVQ15GmhC3fiQ8ni8+OwkZQO4DARzQgrnXU1Liz8=",
version = "v1.1.0",
sum = "h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=",
version = "v1.1.1",
)
go_repository(
name = "com_github_fatih_color",
importpath = "github.com/fatih/color",
sum = "h1:DkWD4oS2D8LGGgTQ6IvwJJXSL5Vp2ffcQg58nFV38Ys=",
version = "v1.7.0",
sum = "h1:8xPHl4/q1VyqGIPif1F+1V3Y3lSmrq01EabUW3CoW5s=",
version = "v1.9.0",
)
go_repository(
name = "com_github_mattn_go_colorable",
importpath = "github.com/mattn/go-colorable",
sum = "h1:G1f5SKeVxmagw/IyvzvtZE4Gybcc4Tr1tf7I8z0XgOg=",
version = "v0.1.1",
sum = "h1:snbPLB8fVfU9iwbbo30TPtbLRzwWu6aJS6Xh4eaaviA=",
version = "v0.1.4",
)
go_repository(
name = "com_github_mattn_go_isatty",
importpath = "github.com/mattn/go-isatty",
sum = "h1:UvyT9uN+3r7yLEYSlJsbQGdsaB/a0DlgWP3pql6iwOc=",
version = "v0.0.7",
sum = "h1:FxPOTFNqGkuDUGi3H/qkUbQO4ZiBa2brKq5r0l8TGeM=",
version = "v0.0.11",
)
go_repository(
name = "com_github_pmezard_go_difflib",
@ -46,8 +46,8 @@ def jsonnet_go_dependencies():
go_repository(
name = "com_github_sergi_go_diff",
importpath = "github.com/sergi/go-diff",
sum = "h1:Kpca3qRNrduNnOQeazBd0ysaKrUJiIuISHxogkT9RPQ=",
version = "v1.0.0",
sum = "h1:we8PVUC3FE2uYfodKH/nBHMSetSfHDR6scGdBi+erh0=",
version = "v1.1.0",
)
go_repository(
name = "com_github_stretchr_objx",
@ -58,12 +58,42 @@ def jsonnet_go_dependencies():
go_repository(
name = "com_github_stretchr_testify",
importpath = "github.com/stretchr/testify",
sum = "h1:TivCn/peBQ7UY8ooIcPgZFpTNSz0Q2U6UrFlUfqbe0Q=",
version = "v1.3.0",
sum = "h1:2E4SXV/wtOkTonXsotYi4li6zVWxYlZuYNCXe9XRJyk=",
version = "v1.4.0",
)
go_repository(
name = "org_golang_x_sys",
importpath = "golang.org/x/sys",
sum = "h1:T5DasATyLQfmbTpfEXx/IOL9vfjzW6up+ZDkmHvIf2s=",
version = "v0.0.0-20190531175056-4c3a928424d2",
sum = "h1:YyJpGZS1sBuBCzLAR1VEpK193GlqGZbnPFnPV/5Rsb4=",
version = "v0.0.0-20191026070338-33540a1f6037",
)
go_repository(
name = "com_github_kr_pretty",
importpath = "github.com/kr/pretty",
sum = "h1:L/CwN0zerZDmRFUapSPitk6f+Q3+0za1rQkzVuMiMFI=",
version = "v0.1.0",
)
go_repository(
name = "com_github_kr_pty",
importpath = "github.com/kr/pty",
sum = "h1:VkoXIwSboBpnk99O/KFauAEILuNHv5DVFKZMBN/gUgw=",
version = "v1.1.1",
)
go_repository(
name = "com_github_kr_text",
importpath = "github.com/kr/text",
sum = "h1:45sCR5RtlFHMR4UwH9sdQ5TC8v0qDQCHnXt+kaKSTVE=",
version = "v0.1.0",
)
go_repository(
name = "in_gopkg_check_v1",
importpath = "gopkg.in/check.v1",
sum = "h1:YR8cESwS4TdDjEe65xsg0ogRM/Nc3DYOhEAlW+xobZo=",
version = "v1.0.0-20190902080502-41f04d3bba15",
)
go_repository(
name = "in_gopkg_yaml_v2",
importpath = "gopkg.in/yaml.v2",
sum = "h1:/eiJrUcujPVeJ3xlSWaiNi3uSVmDGBK1pDHUHAnao1I=",
version = "v2.2.4",
)

View File

@ -12,11 +12,9 @@ load("@io_bazel_rules_go//go:def.bzl", "go_binary", "go_library")
go_library(
name = "go_default_library",
srcs = [
"handles.go",
"c-bindings.go",
"handles.go",
"internal.h",
"json.cpp",
"json.h",
"libjsonnet.cpp",
],
cdeps = [
@ -27,7 +25,10 @@ go_library(
cxxopts = ["-std=c++11"],
importpath = "github.com/google/go-jsonnet/c-bindings",
visibility = ["//visibility:private"],
deps = ["//:go_default_library"],
deps = [
"//:go_default_library",
"//ast:go_default_library",
],
)
go_binary(

0
cmd/BUILD.bazel Normal file
View File

0
cmd/internal/BUILD.bazel Normal file
View File

View File

@ -0,0 +1,14 @@
load("@io_bazel_rules_go//go:def.bzl", "go_library", "go_test")
go_library(
name = "go_default_library",
srcs = ["utils.go"],
importpath = "github.com/google/go-jsonnet/cmd/internal/cmd",
visibility = ["//visibility:public"],
)
go_test(
name = "go_default_test",
srcs = ["utils_test.go"],
embed = [":go_default_library"],
)

187
cmd/internal/cmd/utils.go Normal file
View File

@ -0,0 +1,187 @@
/*
Copyright 2019 Google Inc. All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package cmd
import (
"fmt"
"io/ioutil"
"log"
"os"
"path/filepath"
"runtime"
"runtime/pprof"
"strconv"
)
// NextArg retrieves the next argument from the commandline.
func NextArg(i *int, args []string) string {
(*i)++
if (*i) >= len(args) {
fmt.Fprintln(os.Stderr, "Expected another commandline argument.")
os.Exit(1)
}
return args[*i]
}
// SimplifyArgs transforms an array of commandline arguments so that
// any -abc arg before the first -- (if any) are expanded into
// -a -b -c.
func SimplifyArgs(args []string) (r []string) {
r = make([]string, 0, len(args)*2)
for i, arg := range args {
if arg == "--" {
for j := i; j < len(args); j++ {
r = append(r, args[j])
}
break
}
if len(arg) > 2 && arg[0] == '-' && arg[1] != '-' {
for j := 1; j < len(arg); j++ {
r = append(r, "-"+string(arg[j]))
}
} else {
r = append(r, arg)
}
}
return
}
// SafeStrToInt returns the int or exits the process.
func SafeStrToInt(str string) (i int) {
i, err := strconv.Atoi(str)
if err != nil {
fmt.Fprintf(os.Stderr, "Invalid integer \"%s\"\n", str)
os.Exit(1)
}
return
}
// ReadInput gets Jsonnet code from the given place (file, commandline, stdin).
// It also updates the given filename to <stdin> or <cmdline> if it wasn't a
// real filename.
func ReadInput(filenameIsCode bool, filename *string) (input string, err error) {
if filenameIsCode {
input, err = *filename, nil
*filename = "<cmdline>"
} else if *filename == "-" {
var bytes []byte
bytes, err = ioutil.ReadAll(os.Stdin)
input = string(bytes)
*filename = "<stdin>"
} else {
var bytes []byte
bytes, err = ioutil.ReadFile(*filename)
input = string(bytes)
}
return
}
// SafeReadInput runs ReadInput, exiting the process if there was a problem.
func SafeReadInput(filenameIsCode bool, filename *string) string {
output, err := ReadInput(filenameIsCode, filename)
if err != nil {
var op string
switch typedErr := err.(type) {
case *os.PathError:
op = typedErr.Op
err = typedErr.Err
}
if op == "open" {
fmt.Fprintf(os.Stderr, "Opening input file: %s: %s\n", *filename, err.Error())
} else if op == "read" {
fmt.Fprintf(os.Stderr, "Reading input file: %s: %s\n", *filename, err.Error())
} else {
fmt.Fprintln(os.Stderr, err.Error())
}
os.Exit(1)
}
return output
}
// WriteOutputFile writes the output to the given file, creating directories
// if requested, and printing to stdout instead if the outputFile is "".
func WriteOutputFile(output string, outputFile string, createDirs bool) (err error) {
if outputFile == "" {
fmt.Print(output)
return nil
}
if createDirs {
if err := os.MkdirAll(filepath.Dir(outputFile), 0755); err != nil {
return err
}
}
f, createErr := os.Create(outputFile)
if createErr != nil {
return createErr
}
defer func() {
if ferr := f.Close(); ferr != nil {
err = ferr
}
}()
_, err = f.WriteString(output)
return err
}
// StartCPUProfile creates a CPU profile if requested by environment
// variable.
func StartCPUProfile() {
// https://blog.golang.org/profiling-go-programs
var cpuprofile = os.Getenv("JSONNET_CPU_PROFILE")
if cpuprofile != "" {
f, err := os.Create(cpuprofile)
if err != nil {
log.Fatal(err)
}
err = pprof.StartCPUProfile(f)
if err != nil {
log.Fatal(err)
}
}
}
// StopCPUProfile ensures any running CPU profile is stopped.
func StopCPUProfile() {
var cpuprofile = os.Getenv("JSONNET_CPU_PROFILE")
if cpuprofile != "" {
pprof.StopCPUProfile()
}
}
// MemProfile creates a memory profile if requested by environment
// variable.
func MemProfile() {
var memprofile = os.Getenv("JSONNET_MEM_PROFILE")
if memprofile != "" {
f, err := os.Create(memprofile)
if err != nil {
log.Fatal("could not create memory profile: ", err)
}
runtime.GC() // get up-to-date statistics
if err := pprof.WriteHeapProfile(f); err != nil {
log.Fatal("could not write memory profile: ", err)
}
defer func() {
if err := f.Close(); err != nil {
log.Fatal("Failed to close the memprofile: ", err)
}
}()
}
}

View File

@ -14,7 +14,7 @@ See the License for the specific language governing permissions and
limitations under the License.
*/
package main
package cmd
import (
"testing"
@ -40,7 +40,7 @@ func testEq(a, b []string) bool {
func testSimplifyAux(t *testing.T, name string, input, expected []string) {
t.Run(name, func(t *testing.T) {
got := simplifyArgs(input)
got := SimplifyArgs(input)
if !testEq(got, expected) {
t.Fail()
t.Errorf("Got %v, expected %v\n", got, expected)

View File

@ -1,4 +1,4 @@
load("@io_bazel_rules_go//go:def.bzl", "go_binary", "go_library", "go_test")
load("@io_bazel_rules_go//go:def.bzl", "go_binary", "go_library")
go_library(
name = "go_default_library",
@ -7,6 +7,7 @@ go_library(
visibility = ["//visibility:private"],
deps = [
"//:go_default_library",
"//cmd/internal/cmd:go_default_library",
"@com_github_fatih_color//:go_default_library",
],
)
@ -16,9 +17,3 @@ go_binary(
embed = [":go_default_library"],
visibility = ["//visibility:public"],
)
go_test(
name = "go_default_test",
srcs = ["cmd_test.go"],
embed = [":go_default_library"],
)

View File

@ -20,11 +20,8 @@ import (
"fmt"
"io"
"io/ioutil"
"log"
"os"
"path/filepath"
"runtime"
"runtime/pprof"
"sort"
"strconv"
"strings"
@ -32,40 +29,9 @@ import (
"github.com/fatih/color"
"github.com/google/go-jsonnet"
"github.com/google/go-jsonnet/cmd/internal/cmd"
)
func nextArg(i *int, args []string) string {
(*i)++
if (*i) >= len(args) {
fmt.Fprintln(os.Stderr, "Expected another commandline argument.")
os.Exit(1)
}
return args[*i]
}
// simplifyArgs transforms an array of commandline arguments so that
// any -abc arg before the first -- (if any) are expanded into
// -a -b -c.
func simplifyArgs(args []string) (r []string) {
r = make([]string, 0, len(args)*2)
for i, arg := range args {
if arg == "--" {
for j := i; j < len(args); j++ {
r = append(r, args[j])
}
break
}
if len(arg) > 2 && arg[0] == '-' && arg[1] != '-' {
for j := 1; j < len(arg); j++ {
r = append(r, "-"+string(arg[j]))
}
} else {
r = append(r, arg)
}
}
return
}
func version(o io.Writer) {
fmt.Fprintf(o, "Jsonnet commandline interpreter %s\n", jsonnet.Version())
}
@ -179,12 +145,12 @@ const (
)
func processArgs(givenArgs []string, config *config, vm *jsonnet.VM) (processArgsStatus, error) {
args := simplifyArgs(givenArgs)
args := cmd.SimplifyArgs(givenArgs)
remainingArgs := make([]string, 0, len(args))
i := 0
handleVarVal := func(handle func(key string, val string)) error {
next := nextArg(&i, args)
next := cmd.NextArg(&i, args)
name, content, err := getVarVal(next)
if err != nil {
return err
@ -194,7 +160,7 @@ func processArgs(givenArgs []string, config *config, vm *jsonnet.VM) (processArg
}
handleVarFile := func(handle func(key string, val string), imp string) error {
next := nextArg(&i, args)
next := cmd.NextArg(&i, args)
name, content, err := getVarFile(next, imp)
if err != nil {
return err
@ -213,7 +179,7 @@ func processArgs(givenArgs []string, config *config, vm *jsonnet.VM) (processArg
} else if arg == "-e" || arg == "--exec" {
config.filenameIsCode = true
} else if arg == "-o" || arg == "--output-file" {
outputFile := nextArg(&i, args)
outputFile := cmd.NextArg(&i, args)
if len(outputFile) == 0 {
return processArgsStatusFailure, fmt.Errorf("-o argument was empty string")
}
@ -226,13 +192,13 @@ func processArgs(givenArgs []string, config *config, vm *jsonnet.VM) (processArg
}
break
} else if arg == "-s" || arg == "--max-stack" {
l := safeStrToInt(nextArg(&i, args))
l := safeStrToInt(cmd.NextArg(&i, args))
if l < 1 {
return processArgsStatusFailure, fmt.Errorf("invalid --max-stack value: %d", l)
}
vm.MaxStack = l
} else if arg == "-J" || arg == "--jpath" {
dir := nextArg(&i, args)
dir := cmd.NextArg(&i, args)
if len(dir) == 0 {
return processArgsStatusFailure, fmt.Errorf("-J argument was empty string")
}
@ -273,14 +239,14 @@ func processArgs(givenArgs []string, config *config, vm *jsonnet.VM) (processArg
return processArgsStatusFailure, err
}
} else if arg == "-t" || arg == "--max-trace" {
l := safeStrToInt(nextArg(&i, args))
l := safeStrToInt(cmd.NextArg(&i, args))
if l < 0 {
return processArgsStatusFailure, fmt.Errorf("invalid --max-trace value: %d", l)
}
vm.ErrorFormatter.SetMaxStackTraceSize(l)
} else if arg == "-m" || arg == "--multi" {
config.evalMulti = true
outputDir := nextArg(&i, args)
outputDir := cmd.NextArg(&i, args)
if len(outputDir) == 0 {
return processArgsStatusFailure, fmt.Errorf("-m argument was empty string")
}
@ -322,25 +288,6 @@ func processArgs(givenArgs []string, config *config, vm *jsonnet.VM) (processArg
return processArgsStatusContinue, nil
}
// readInput gets Jsonnet code from the given place (file, commandline, stdin).
// It also updates the given filename to <stdin> or <cmdline> if it wasn't a real filename.
func readInput(config config, filename *string) (input string, err error) {
if config.filenameIsCode {
input, err = *filename, nil
*filename = "<cmdline>"
} else if *filename == "-" {
var bytes []byte
bytes, err = ioutil.ReadAll(os.Stdin)
input = string(bytes)
*filename = "<stdin>"
} else {
var bytes []byte
bytes, err = ioutil.ReadFile(*filename)
input = string(bytes)
}
return
}
func writeMultiOutputFiles(output map[string]string, outputDir, outputFile string, createDirs bool) (err error) {
// If multiple file output is used, then iterate over each string from
// the sequence of strings returned by jsonnet_evaluate_snippet_multi,
@ -458,46 +405,9 @@ func writeOutputStream(output []string, outputFile string) (err error) {
return nil
}
func writeOutputFile(output string, outputFile string, createDirs bool) (err error) {
if outputFile == "" {
fmt.Print(output)
return nil
}
if createDirs {
if err := os.MkdirAll(filepath.Dir(outputFile), 0755); err != nil {
return err
}
}
f, createErr := os.Create(outputFile)
if createErr != nil {
return err
}
defer func() {
if ferr := f.Close(); err != nil {
err = ferr
}
}()
_, err = f.WriteString(output)
return err
}
func main() {
// https://blog.golang.org/profiling-go-programs
var cpuprofile = os.Getenv("JSONNET_CPU_PROFILE")
if cpuprofile != "" {
f, err := os.Create(cpuprofile)
if err != nil {
log.Fatal(err)
}
err = pprof.StartCPUProfile(f)
if err != nil {
log.Fatal(err)
}
defer pprof.StopCPUProfile()
}
cmd.StartCPUProfile()
defer cmd.StopCPUProfile()
vm := jsonnet.MakeVM()
vm.ErrorFormatter.SetColorFormatter(color.New(color.FgRed).Fprintf)
@ -539,23 +449,7 @@ func main() {
panic("Internal error: expected a single input file.")
}
filename := config.inputFiles[0]
input, err := readInput(config, &filename)
if err != nil {
var op string
switch typedErr := err.(type) {
case *os.PathError:
op = typedErr.Op
err = typedErr.Err
}
if op == "open" {
fmt.Fprintf(os.Stderr, "Opening input file: %s: %s\n", filename, err.Error())
} else if op == "read" {
fmt.Fprintf(os.Stderr, "Reading input file: %s: %s\n", filename, err.Error())
} else {
fmt.Fprintln(os.Stderr, err.Error())
}
os.Exit(1)
}
input := cmd.SafeReadInput(config.filenameIsCode, &filename)
var output string
var outputArray []string
var outputDict map[string]string
@ -567,22 +461,7 @@ func main() {
output, err = vm.EvaluateSnippet(filename, input)
}
var memprofile = os.Getenv("JSONNET_MEM_PROFILE")
if memprofile != "" {
f, err := os.Create(memprofile)
if err != nil {
log.Fatal("could not create memory profile: ", err)
}
runtime.GC() // get up-to-date statistics
if err := pprof.WriteHeapProfile(f); err != nil {
log.Fatal("could not write memory profile: ", err)
}
defer func() {
if err := f.Close(); err != nil {
log.Fatal("Failed to close the memprofile: ", err)
}
}()
}
cmd.MemProfile()
if err != nil {
fmt.Fprintln(os.Stderr, err.Error())
@ -603,7 +482,7 @@ func main() {
os.Exit(1)
}
} else {
err := writeOutputFile(output, config.outputFile, config.evalCreateOutputDirs)
err := cmd.WriteOutputFile(output, config.outputFile, config.evalCreateOutputDirs)
if err != nil {
fmt.Fprintln(os.Stderr, err.Error())
os.Exit(1)

View File

@ -0,0 +1,20 @@
load("@io_bazel_rules_go//go:def.bzl", "go_binary", "go_library")
go_library(
name = "go_default_library",
srcs = ["cmd.go"],
importpath = "github.com/google/go-jsonnet/cmd/jsonnetfmt",
visibility = ["//visibility:private"],
deps = [
"//:go_default_library",
"//cmd:go_default_library",
"//internal/formatter:go_default_library",
"@com_github_fatih_color//:go_default_library",
],
)
go_binary(
name = "jsonnetfmt",
embed = [":go_default_library"],
visibility = ["//visibility:public"],
)

291
cmd/jsonnetfmt/cmd.go Normal file
View File

@ -0,0 +1,291 @@
/*
Copyright 2019 Google Inc. All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package main
import (
"fmt"
"io"
"os"
"github.com/fatih/color"
"github.com/google/go-jsonnet"
"github.com/google/go-jsonnet/cmd/internal/cmd"
"github.com/google/go-jsonnet/internal/formatter"
)
func version(o io.Writer) {
fmt.Fprintf(o, "Jsonnet reformatter %s\n", jsonnet.Version())
}
func usage(o io.Writer) {
version(o)
fmt.Fprintln(o)
fmt.Fprintln(o, "jsonnetfmt {<option>} { <filename> }")
fmt.Fprintln(o)
fmt.Fprintln(o, "Available options:")
fmt.Fprintln(o, " -h / --help This message")
fmt.Fprintln(o, " -e / --exec Treat filename as code")
fmt.Fprintln(o, " -o / --output-file <file> Write to the output file rather than stdout")
fmt.Fprintln(o, " -c / --create-output-dirs Automatically creates all parent directories for files")
fmt.Fprintln(o, " -i / --in-place Update the Jsonnet file(s) in place.")
fmt.Fprintln(o, " --test Exit with failure if reformatting changed the file(s).")
fmt.Fprintln(o, " -n / --indent <n> Number of spaces to indent by (default 2, 0 means no change)")
fmt.Fprintln(o, " --max-blank-lines <n> Max vertical spacing, 0 means no change (default 2)")
fmt.Fprintln(o, " --string-style <d|s|l> Enforce double, single (default) quotes or 'leave'")
fmt.Fprintln(o, " --comment-style <h|s|l> # (h), // (s)(default), or 'leave'; never changes she-bang")
fmt.Fprintln(o, " --[no-]pretty-field-names Use syntax sugar for fields and indexing (on by default)")
fmt.Fprintln(o, " --[no-]pad-arrays [ 1, 2, 3 ] instead of [1, 2, 3]")
fmt.Fprintln(o, " --[no-]pad-objects { x: 1, y: 2 } instead of {x: 1, y: 2} (on by default)")
fmt.Fprintln(o, " --[no-]sort-imports Sorting of imports (on by default)")
fmt.Fprintln(o, " --version Print version")
fmt.Fprintln(o)
fmt.Fprintln(o, "In all cases:")
fmt.Fprintln(o, "<filename> can be - (stdin)")
fmt.Fprintln(o, "Multichar options are expanded e.g. -abc becomes -a -b -c.")
fmt.Fprintln(o, "The -- option suppresses option processing for subsequent arguments.")
fmt.Fprintln(o, "Note that since filenames and jsonnet programs can begin with -, it is advised to")
fmt.Fprintln(o, "use -- if the argument is unknown, e.g. jsonnet -- \"$FILENAME\".")
}
type config struct {
evalCreateOutputDirs bool
filenameIsCode bool
inPlace bool
inputFiles []string
options formatter.Options
outputFile string
test bool
}
func makeConfig() config {
return config{
options: formatter.DefaultOptions(),
}
}
type processArgsStatus int
const (
processArgsStatusContinue = iota
processArgsStatusSuccessUsage = iota
processArgsStatusFailureUsage = iota
processArgsStatusSuccess = iota
processArgsStatusFailure = iota
)
func processArgs(givenArgs []string, config *config, vm *jsonnet.VM) (processArgsStatus, error) {
args := cmd.SimplifyArgs(givenArgs)
remainingArgs := make([]string, 0, len(args))
i := 0
for ; i < len(args); i++ {
arg := args[i]
if arg == "-h" || arg == "--help" {
return processArgsStatusSuccessUsage, nil
} else if arg == "-v" || arg == "--version" {
version(os.Stdout)
return processArgsStatusSuccess, nil
} else if arg == "-e" || arg == "--exec" {
config.filenameIsCode = true
} else if arg == "-o" || arg == "--output-file" {
outputFile := cmd.NextArg(&i, args)
if len(outputFile) == 0 {
return processArgsStatusFailure, fmt.Errorf("-o argument was empty string")
}
config.outputFile = outputFile
} else if arg == "--" {
// All subsequent args are not options.
i++
for ; i < len(args); i++ {
remainingArgs = append(remainingArgs, args[i])
}
break
} else if arg == "-i" || arg == "--in-place" {
config.inPlace = true
} else if arg == "--test" {
config.test = true
} else if arg == "-n" || arg == "--indent" {
n := cmd.SafeStrToInt(cmd.NextArg(&i, args))
if n < 0 {
return processArgsStatusFailure, fmt.Errorf("invalid --indent value: %d", n)
}
config.options.Indent = n
} else if arg == "--max-blank-lines" {
n := cmd.SafeStrToInt(cmd.NextArg(&i, args))
if n < 0 {
return processArgsStatusFailure, fmt.Errorf("invalid --max-blank-lines value: %d", n)
}
config.options.MaxBlankLines = n
} else if arg == "--string-style" {
str := cmd.NextArg(&i, args)
switch str {
case "d":
config.options.StringStyle = formatter.StringStyleDouble
case "s":
config.options.StringStyle = formatter.StringStyleSingle
case "l":
config.options.StringStyle = formatter.StringStyleLeave
default:
return processArgsStatusFailure, fmt.Errorf("invalid --string-style value: %s", str)
}
} else if arg == "--comment-style" {
str := cmd.NextArg(&i, args)
switch str {
case "h":
config.options.CommentStyle = formatter.CommentStyleHash
case "s":
config.options.CommentStyle = formatter.CommentStyleSlash
case "l":
config.options.CommentStyle = formatter.CommentStyleLeave
default:
return processArgsStatusFailure, fmt.Errorf("invalid --comment-style value: %s", str)
}
} else if arg == "--pretty-field-names" {
config.options.PrettyFieldNames = true
} else if arg == "--no-pretty-field-names" {
config.options.PrettyFieldNames = false
} else if arg == "--pad-arrays" {
config.options.PadArrays = true
} else if arg == "--no-pad-arrays" {
config.options.PadArrays = false
} else if arg == "--pad-objects" {
config.options.PadObjects = true
} else if arg == "--no-pad-objects" {
config.options.PadObjects = false
} else if arg == "--sort-imports" {
config.options.SortImports = true
} else if arg == "--no-sort-imports" {
config.options.SortImports = false
} else if arg == "-c" || arg == "--create-output-dirs" {
config.evalCreateOutputDirs = true
} else if len(arg) > 1 && arg[0] == '-' {
return processArgsStatusFailure, fmt.Errorf("unrecognized argument: %s", arg)
} else {
remainingArgs = append(remainingArgs, arg)
}
}
want := "filename"
if config.filenameIsCode {
want = "code"
}
if len(remainingArgs) == 0 {
return processArgsStatusFailureUsage, fmt.Errorf("must give %s", want)
}
if !config.test && !config.inPlace {
if len(remainingArgs) > 1 {
return processArgsStatusFailure, fmt.Errorf("only one %s is allowed", want)
}
}
config.inputFiles = remainingArgs
return processArgsStatusContinue, nil
}
func main() {
cmd.StartCPUProfile()
defer cmd.StopCPUProfile()
vm := jsonnet.MakeVM()
vm.ErrorFormatter.SetColorFormatter(color.New(color.FgRed).Fprintf)
config := makeConfig()
status, err := processArgs(os.Args[1:], &config, vm)
if err != nil {
fmt.Fprintln(os.Stderr, "ERROR: "+err.Error())
}
switch status {
case processArgsStatusContinue:
break
case processArgsStatusSuccessUsage:
usage(os.Stdout)
os.Exit(0)
case processArgsStatusFailureUsage:
if err != nil {
fmt.Fprintln(os.Stderr, "")
}
usage(os.Stderr)
os.Exit(1)
case processArgsStatusSuccess:
os.Exit(0)
case processArgsStatusFailure:
os.Exit(1)
}
if config.inPlace || config.test {
if len(config.inputFiles) == 0 {
// Should already have been caught by processArgs.
panic("Internal error: expected at least one input file.")
}
for _, inputFile := range config.inputFiles {
outputFile := inputFile
if config.inPlace {
if inputFile == "-" {
fmt.Fprintf(os.Stderr, "ERROR: cannot use --in-place with stdin\n")
os.Exit(1)
}
if config.filenameIsCode {
fmt.Fprintf(os.Stderr, "ERROR: cannot use --in-place with --exec\n")
os.Exit(1)
}
}
input := cmd.SafeReadInput(config.filenameIsCode, &inputFile)
output, err := formatter.Format(inputFile, input, config.options)
cmd.MemProfile()
if err != nil {
fmt.Fprintln(os.Stderr, err.Error())
os.Exit(1)
}
if output != input {
if config.inPlace {
err := cmd.WriteOutputFile(output, outputFile, false)
if err != nil {
fmt.Fprintln(os.Stderr, err.Error())
os.Exit(1)
}
} else {
os.Exit(2)
}
}
}
} else {
if len(config.inputFiles) != 1 {
// Should already have been caught by processArgs.
panic("Internal error: expected a single input file.")
}
inputFile := config.inputFiles[0]
input := cmd.SafeReadInput(config.filenameIsCode, &inputFile)
output, err := formatter.Format(inputFile, input, config.options)
cmd.MemProfile()
if err != nil {
fmt.Fprintln(os.Stderr, err.Error())
os.Exit(1)
}
err = cmd.WriteOutputFile(output, config.outputFile, true)
if err != nil {
fmt.Fprintln(os.Stderr, err.Error())
os.Exit(1)
}
}
}

@ -1 +1 @@
Subproject commit 76d6ecd32e253a5429ad9568538df7ec07f470fc
Subproject commit 1753f44619d347ac5fa72cd6d4df2a2d1a42ad8d

14
go.mod
View File

@ -1,12 +1,8 @@
module github.com/google/go-jsonnet
require (
github.com/fatih/color v1.7.0
github.com/mattn/go-colorable v0.1.1 // indirect
github.com/mattn/go-isatty v0.0.7 // indirect
github.com/sergi/go-diff v1.0.0
github.com/stretchr/testify v1.3.0 // indirect
golang.org/x/sys v0.0.0-20190531175056-4c3a928424d2 // indirect
)
go 1.13
require (
github.com/fatih/color v1.9.0
github.com/sergi/go-diff v1.1.0
)

41
go.sum
View File

@ -1,20 +1,31 @@
github.com/davecgh/go-spew v1.1.0 h1:ZDRjVQ15GmhC3fiQ8ni8+OwkZQO4DARzQgrnXU1Liz8=
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/fatih/color v1.7.0 h1:DkWD4oS2D8LGGgTQ6IvwJJXSL5Vp2ffcQg58nFV38Ys=
github.com/fatih/color v1.7.0/go.mod h1:Zm6kSWBoL9eyXnKyktHP6abPY2pDugNf5KwzbycvMj4=
github.com/mattn/go-colorable v0.1.1 h1:G1f5SKeVxmagw/IyvzvtZE4Gybcc4Tr1tf7I8z0XgOg=
github.com/mattn/go-colorable v0.1.1/go.mod h1:FuOcm+DKB9mbwrcAfNl7/TZVBZ6rcnceauSikq3lYCQ=
github.com/mattn/go-isatty v0.0.5/go.mod h1:Iq45c/XA43vh69/j3iqttzPXn0bhXyGjM0Hdxcsrc5s=
github.com/mattn/go-isatty v0.0.7 h1:UvyT9uN+3r7yLEYSlJsbQGdsaB/a0DlgWP3pql6iwOc=
github.com/mattn/go-isatty v0.0.7/go.mod h1:Iq45c/XA43vh69/j3iqttzPXn0bhXyGjM0Hdxcsrc5s=
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/fatih/color v1.9.0 h1:8xPHl4/q1VyqGIPif1F+1V3Y3lSmrq01EabUW3CoW5s=
github.com/fatih/color v1.9.0/go.mod h1:eQcE1qtQxscV5RaZvpXrrb8Drkc3/DdQ+uUYCNjL+zU=
github.com/kr/pretty v0.1.0 h1:L/CwN0zerZDmRFUapSPitk6f+Q3+0za1rQkzVuMiMFI=
github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo=
github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ=
github.com/kr/text v0.1.0 h1:45sCR5RtlFHMR4UwH9sdQ5TC8v0qDQCHnXt+kaKSTVE=
github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI=
github.com/mattn/go-colorable v0.1.4 h1:snbPLB8fVfU9iwbbo30TPtbLRzwWu6aJS6Xh4eaaviA=
github.com/mattn/go-colorable v0.1.4/go.mod h1:U0ppj6V5qS13XJ6of8GYAs25YV2eR4EVcfRqFIhoBtE=
github.com/mattn/go-isatty v0.0.8/go.mod h1:Iq45c/XA43vh69/j3iqttzPXn0bhXyGjM0Hdxcsrc5s=
github.com/mattn/go-isatty v0.0.11 h1:FxPOTFNqGkuDUGi3H/qkUbQO4ZiBa2brKq5r0l8TGeM=
github.com/mattn/go-isatty v0.0.11/go.mod h1:PhnuNfih5lzO57/f3n+odYbM4JtupLOxQOAqxQCu2WE=
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
github.com/sergi/go-diff v1.0.0 h1:Kpca3qRNrduNnOQeazBd0ysaKrUJiIuISHxogkT9RPQ=
github.com/sergi/go-diff v1.0.0/go.mod h1:0CfEIISq7TuYL3j771MWULgwwjU+GofnZX9QAmXWZgo=
github.com/sergi/go-diff v1.1.0 h1:we8PVUC3FE2uYfodKH/nBHMSetSfHDR6scGdBi+erh0=
github.com/sergi/go-diff v1.1.0/go.mod h1:STckp+ISIX8hZLjrqAeVduY0gWCT9IjLuqbuNXdaHfM=
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
github.com/stretchr/testify v1.3.0 h1:TivCn/peBQ7UY8ooIcPgZFpTNSz0Q2U6UrFlUfqbe0Q=
github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI=
golang.org/x/sys v0.0.0-20190222072716-a9d3bda3a223 h1:DH4skfRX4EBpamg7iV4ZlCpblAHI6s6TDM39bFZumv8=
github.com/stretchr/testify v1.4.0 h1:2E4SXV/wtOkTonXsotYi4li6zVWxYlZuYNCXe9XRJyk=
github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4=
golang.org/x/sys v0.0.0-20190222072716-a9d3bda3a223/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20190531175056-4c3a928424d2 h1:T5DasATyLQfmbTpfEXx/IOL9vfjzW6up+ZDkmHvIf2s=
golang.org/x/sys v0.0.0-20190531175056-4c3a928424d2/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20191026070338-33540a1f6037 h1:YyJpGZS1sBuBCzLAR1VEpK193GlqGZbnPFnPV/5Rsb4=
golang.org/x/sys v0.0.0-20191026070338-33540a1f6037/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15 h1:YR8cESwS4TdDjEe65xsg0ogRM/Nc3DYOhEAlW+xobZo=
gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
gopkg.in/yaml.v2 v2.2.4 h1:/eiJrUcujPVeJ3xlSWaiNi3uSVmDGBK1pDHUHAnao1I=
gopkg.in/yaml.v2 v2.2.4/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=

View File

@ -0,0 +1,27 @@
load("@io_bazel_rules_go//go:def.bzl", "go_library")
go_library(
name = "go_default_library",
srcs = [
"enforce_comment_style.go",
"enforce_max_blank_lines.go",
"enforce_string_style.go",
"fix_indentation.go",
"fix_newlines.go",
"fix_parens.go",
"fix_plus_object.go",
"fix_trailing_commas.go",
"jsonnetfmt.go",
"no_redundant_slice_colon.go",
"pretty_field_names.go",
"sort_imports.go",
"strip.go",
"unparser.go",
],
importpath = "github.com/google/go-jsonnet/internal/formatter",
visibility = ["//visibility:public"],
deps = [
"//ast:go_default_library",
"//internal/parser:go_default_library",
],
)

View File

@ -0,0 +1,49 @@
/*
Copyright 2019 Google Inc. All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package formatter
import (
"github.com/google/go-jsonnet/ast"
"github.com/google/go-jsonnet/internal/pass"
)
// EnforceCommentStyle is a formatter pass that ensures the comments are styled
// according to the configuration in Options.
type EnforceCommentStyle struct {
pass.Base
Options Options
seenFirstFodder bool
}
// FodderElement implements this pass.
func (c *EnforceCommentStyle) FodderElement(p pass.ASTPass, element *ast.FodderElement, ctx pass.Context) {
if element.Kind != ast.FodderInterstitial {
if len(element.Comment) == 1 {
comment := &element.Comment[0]
if c.Options.CommentStyle == CommentStyleHash && (*comment)[0] == '/' {
*comment = "#" + (*comment)[2:]
}
if c.Options.CommentStyle == CommentStyleSlash && (*comment)[0] == '#' {
if !c.seenFirstFodder && (*comment)[1] == '!' {
return
}
*comment = "//" + (*comment)[1:]
}
}
c.seenFirstFodder = true
}
}

View File

@ -0,0 +1,38 @@
/*
Copyright 2019 Google Inc. All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package formatter
import (
"github.com/google/go-jsonnet/ast"
"github.com/google/go-jsonnet/internal/pass"
)
// EnforceMaxBlankLines is a formatter pass that ensures there are not
// too many blank lines in the code.
type EnforceMaxBlankLines struct {
pass.Base
Options Options
}
// FodderElement implements this pass.
func (c *EnforceMaxBlankLines) FodderElement(p pass.ASTPass, element *ast.FodderElement, ctx pass.Context) {
if element.Kind != ast.FodderInterstitial {
if element.Blanks > c.Options.MaxBlankLines {
element.Blanks = c.Options.MaxBlankLines
}
}
}

View File

@ -0,0 +1,76 @@
/*
Copyright 2019 Google Inc. All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package formatter
import (
"github.com/google/go-jsonnet/ast"
"github.com/google/go-jsonnet/internal/parser"
"github.com/google/go-jsonnet/internal/pass"
)
// EnforceStringStyle is a formatter pass that manages string literals
type EnforceStringStyle struct {
pass.Base
Options Options
}
// LiteralString implements this pass.
func (c *EnforceStringStyle) LiteralString(p pass.ASTPass, lit *ast.LiteralString, ctx pass.Context) {
if lit.Kind == ast.StringBlock {
return
}
if lit.Kind == ast.VerbatimStringDouble {
return
}
if lit.Kind == ast.VerbatimStringSingle {
return
}
canonical, err := parser.StringUnescape(lit.Loc(), lit.Value)
if err != nil {
panic("Badly formatted string, should have been caught in lexer.")
}
numSingle := 0
numDouble := 0
for _, r := range canonical {
if r == '\'' {
numSingle++
}
if r == '"' {
numDouble++
}
}
if numSingle > 0 && numDouble > 0 {
return // Don't change it.
}
useSingle := c.Options.StringStyle == StringStyleSingle
if numSingle > 0 {
useSingle = false
}
if numDouble > 0 {
useSingle = true
}
// Change it.
lit.Value = parser.StringEscape(canonical, useSingle)
if useSingle {
lit.Kind = ast.StringSingle
} else {
lit.Kind = ast.StringDouble
}
}

View File

@ -0,0 +1,807 @@
/*
Copyright 2019 Google Inc. All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package formatter
import (
"strings"
"github.com/google/go-jsonnet/ast"
"github.com/google/go-jsonnet/internal/pass"
)
// FixIndentation is a formatter pass that changes the indentation of new line
// fodder so that it follows the nested structure of the code.
type FixIndentation struct {
pass.Base
column int
Options Options
}
// indent is the representation of the indentation level. The field lineUp is
// what is generally used to indent after a new line. The field base is used to
// help derive a new Indent struct when the indentation level increases. lineUp
// is generally > base.
//
// In the following case (where spaces are replaced with underscores):
// ____foobar(1,
// ___________2)
//
// At the AST representing the 2, the indent has base == 4 and lineUp == 11.
type indent struct {
base int
lineUp int
}
// setIndents sets the indentation values within the fodder elements.
// The last one gets a special indentation value, all the others are set to the same thing.
func (c *FixIndentation) setIndents(
fodder ast.Fodder, allButLastIndent int, lastIndent int) {
// First count how many there are.
count := 0
for _, f := range fodder {
if f.Kind != ast.FodderInterstitial {
count++
}
}
// Now set the indents.
i := 0
for index := range fodder {
f := &fodder[index]
if f.Kind != ast.FodderInterstitial {
if i+1 < count {
f.Indent = allButLastIndent
} else {
if i != count-1 {
panic("Shouldn't get here")
}
f.Indent = lastIndent
}
i++
}
}
}
// fill sets the indentation on the fodder elements and adjusts the c.column
// counter as if it was printed.
// To understand fodder, crowded, separateToken, see the documentation of
// unparse.fill.
// allButLastIndent is the new indentation value for all but the final fodder
// element.
// lastIndent is the new indentation value for the final fodder element.
func (c *FixIndentation) fillLast(
fodder ast.Fodder, crowded bool, separateToken bool,
allButLastIndent int, lastIndent int) {
c.setIndents(fodder, allButLastIndent, lastIndent)
// A model of unparser.fill that just keeps track of the
// c.column counter.
for _, fod := range fodder {
switch fod.Kind {
case ast.FodderParagraph:
c.column = fod.Indent
crowded = false
case ast.FodderLineEnd:
c.column = fod.Indent
crowded = false
case ast.FodderInterstitial:
if crowded {
c.column++
}
c.column += len(fod.Comment[0])
crowded = true
}
}
if separateToken && crowded {
c.column++
}
}
// fill is like fillLast but where the final and prior fodder get the same
// currIndent.
func (c *FixIndentation) fill(
fodder ast.Fodder, crowded bool, separateToken bool, indent int) {
c.fillLast(fodder, crowded, separateToken, indent, indent)
}
// newIndent calculates the indentation of sub-expressions.
// If the first sub-expression is on the same line as the current node, then subsequent
// ones will be lined up, otherwise subsequent ones will be on the next line indented
// by 'indent'.
func (c *FixIndentation) newIndent(firstFodder ast.Fodder, old indent, lineUp int) indent {
if len(firstFodder) == 0 || firstFodder[0].Kind == ast.FodderInterstitial {
return indent{old.base, lineUp}
}
// Reset
return indent{old.base + c.Options.Indent, old.base + c.Options.Indent}
}
// Calculate the indentation of sub-expressions.
// If the first sub-expression is on the same line as the current node, then
// subsequent ones will be lined up and further indentations in their
// subexpressions will be based from this c.column.
func (c *FixIndentation) newIndentStrong(firstFodder ast.Fodder, old indent, lineUp int) indent {
if len(firstFodder) == 0 || firstFodder[0].Kind == ast.FodderInterstitial {
return indent{lineUp, lineUp}
}
// Reset
return indent{old.base + c.Options.Indent, old.base + c.Options.Indent}
}
// Calculate the indentation of sub-expressions.
// If the first sub-expression is on the same line as the current node, then
// subsequent ones will be lined up, otherwise subseqeuent ones will be on the
// next line with no additional currIndent.
func (c *FixIndentation) align(firstFodder ast.Fodder, old indent, lineUp int) indent {
if len(firstFodder) == 0 || firstFodder[0].Kind == ast.FodderInterstitial {
return indent{old.base, lineUp}
}
// Reset
return old
}
// alignStrong calculates the indentation of sub-expressions.
// If the first sub-expression is on the same line as the current node, then
// subsequent ones will be lined up and further indentations in their
// subexpresssions will be based from this c.column. Otherwise, subseqeuent ones
// will be on the next line with no additional currIndent.
func (c *FixIndentation) alignStrong(firstFodder ast.Fodder, old indent, lineUp int) indent {
if len(firstFodder) == 0 || firstFodder[0].Kind == ast.FodderInterstitial {
return indent{lineUp, lineUp}
}
// Reset
return old
}
/** Does the given fodder contain at least one new line? */
func (c *FixIndentation) hasNewLines(fodder ast.Fodder) bool {
for _, f := range fodder {
if f.Kind != ast.FodderInterstitial {
return true
}
}
return false
}
// specs indents comprehension forspecs.
func (c *FixIndentation) specs(spec *ast.ForSpec, currIndent indent) {
if spec.Outer != nil {
c.specs(spec.Outer, currIndent)
}
c.fill(spec.ForFodder, true, true, currIndent.lineUp)
c.column += 3 // for
c.fill(spec.VarFodder, true, true, currIndent.lineUp)
c.column += len(spec.VarName)
c.fill(spec.InFodder, true, true, currIndent.lineUp)
c.column += 2 // in
newIndent := c.newIndent(*openFodder(spec.Expr), currIndent, c.column)
c.Visit(spec.Expr, newIndent, true)
for _, cond := range spec.Conditions {
c.fill(cond.IfFodder, true, true, currIndent.lineUp)
c.column += 2 // if
newIndent := c.newIndent(*openFodder(spec.Expr), currIndent, c.column)
c.Visit(spec.Expr, newIndent, true)
}
}
func (c *FixIndentation) params(fodderL ast.Fodder, params []ast.Parameter,
trailingComma bool, fodderR ast.Fodder, currIndent indent) {
c.fill(fodderL, false, false, currIndent.lineUp)
c.column++ // (
var firstInside ast.Fodder
gotFodder := false
for _, param := range params {
firstInside = param.NameFodder
gotFodder = true
break
}
if !gotFodder {
firstInside = fodderR
}
newIndent := c.newIndent(firstInside, currIndent, c.column)
first := true
for _, param := range params {
if !first {
c.column++ // ','
}
c.fill(param.NameFodder, !first, true, newIndent.lineUp)
c.column += len(param.Name)
if param.DefaultArg != nil {
c.fill(param.EqFodder, false, false, newIndent.lineUp)
// default arg, no spacing: x=e
c.column++
c.Visit(param.DefaultArg, newIndent, false)
}
c.fill(param.CommaFodder, false, false, newIndent.lineUp)
first = false
}
if trailingComma {
c.column++
}
c.fillLast(fodderR, false, false, newIndent.lineUp, currIndent.lineUp)
c.column++ // )
}
func (c *FixIndentation) fieldParams(field ast.ObjectField, currIndent indent) {
m := field.Method
if m != nil {
c.params(m.ParenLeftFodder, m.Parameters, m.TrailingComma,
m.ParenRightFodder, currIndent)
}
}
// fields indents fields within an object.
// indent is the indent of the first field
// crowded is whether the first field is crowded (see unparser.fill)
func (c *FixIndentation) fields(fields ast.ObjectFields, currIndent indent, crowded bool) {
newIndent := currIndent.lineUp
for i, field := range fields {
if i > 0 {
c.column++ // ','
}
// An aux function so we don't repeat ourselves for the 3 kinds of
// basic field.
unparseFieldRemainder := func(field ast.ObjectField) {
c.fieldParams(field, currIndent)
c.fill(field.OpFodder, false, false, newIndent)
if field.SuperSugar {
c.column++
}
switch field.Hide {
case ast.ObjectFieldInherit:
c.column++
case ast.ObjectFieldHidden:
c.column += 2
case ast.ObjectFieldVisible:
c.column += 3
}
c.Visit(field.Expr2,
c.newIndent(*openFodder(field.Expr2), currIndent, c.column),
true)
}
switch field.Kind {
case ast.ObjectLocal:
c.fill(field.Fodder1, i > 0 || crowded, true, currIndent.lineUp)
c.column += 5 // local
c.fill(field.Fodder2, true, true, currIndent.lineUp)
c.column += len(*field.Id)
c.fieldParams(field, currIndent)
c.fill(field.OpFodder, true, true, currIndent.lineUp)
c.column++ // =
newIndent2 := c.newIndent(*openFodder(field.Expr2), currIndent, c.column)
c.Visit(field.Expr2, newIndent2, true)
case ast.ObjectFieldID:
c.fill(field.Fodder1, i > 0 || crowded, true, newIndent)
c.column += len(*field.Id)
unparseFieldRemainder(field)
case ast.ObjectFieldStr:
c.Visit(field.Expr1, currIndent, i > 0 || crowded)
unparseFieldRemainder(field)
case ast.ObjectFieldExpr:
c.fill(field.Fodder1, i > 0 || crowded, true, newIndent)
c.column++ // [
c.Visit(field.Expr1, currIndent, false)
c.fill(field.Fodder2, false, false, newIndent)
c.column++ // ]
unparseFieldRemainder(field)
case ast.ObjectAssert:
c.fill(field.Fodder1, i > 0 || crowded, true, newIndent)
c.column += 6 // assert
// + 1 for the space after the assert
newIndent2 := c.newIndent(*openFodder(field.Expr2), currIndent, c.column+1)
c.Visit(field.Expr2, currIndent, true)
if field.Expr3 != nil {
c.fill(field.OpFodder, true, true, newIndent2.lineUp)
c.column++ // ":"
c.Visit(field.Expr3, newIndent2, true)
}
}
c.fill(field.CommaFodder, false, false, newIndent)
}
}
// Visit has logic common to all nodes.
func (c *FixIndentation) Visit(expr ast.Node, currIndent indent, crowded bool) {
separateToken := leftRecursive(expr) == nil
c.fill(*expr.OpenFodder(), crowded, separateToken, currIndent.lineUp)
switch node := expr.(type) {
case *ast.Apply:
initFodder := *openFodder(node.Target)
newColumn := c.column
if crowded {
newColumn++
}
newIndent := c.align(initFodder, currIndent, newColumn)
c.Visit(node.Target, newIndent, crowded)
c.fill(node.FodderLeft, false, false, newIndent.lineUp)
c.column++ // (
firstFodder := node.FodderRight
for _, arg := range node.Arguments.Named {
firstFodder = arg.NameFodder
break
}
for _, arg := range node.Arguments.Positional {
firstFodder = *openFodder(arg.Expr)
break
}
strongIndent := false
// Need to use strong indent if any of the
// arguments (except the first) are preceded by newlines.
first := true
for _, arg := range node.Arguments.Positional {
if first {
// Skip first element.
first = false
continue
}
if c.hasNewLines(*openFodder(arg.Expr)) {
strongIndent = true
}
}
for _, arg := range node.Arguments.Named {
if first {
// Skip first element.
first = false
continue
}
if c.hasNewLines(arg.NameFodder) {
strongIndent = true
}
}
var argIndent indent
if strongIndent {
argIndent = c.newIndentStrong(firstFodder, currIndent, c.column)
} else {
argIndent = c.newIndent(firstFodder, currIndent, c.column)
}
first = true
for _, arg := range node.Arguments.Positional {
if !first {
c.column++ // ","
}
space := !first
c.Visit(arg.Expr, argIndent, space)
c.fill(arg.CommaFodder, false, false, argIndent.lineUp)
first = false
}
for _, arg := range node.Arguments.Named {
if !first {
c.column++ // ","
}
space := !first
c.fill(arg.NameFodder, space, false, argIndent.lineUp)
c.column += len(arg.Name)
c.column++ // "="
c.Visit(arg.Arg, argIndent, false)
c.fill(arg.CommaFodder, false, false, argIndent.lineUp)
first = false
}
if node.TrailingComma {
c.column++ // ","
}
c.fillLast(node.FodderRight, false, false, argIndent.lineUp, currIndent.base)
c.column++ // )
if node.TailStrict {
c.fill(node.TailStrictFodder, true, true, currIndent.base)
c.column += 10 // tailstrict
}
case *ast.ApplyBrace:
initFodder := *openFodder(node.Left)
newColumn := c.column
if crowded {
newColumn++
}
newIndent := c.align(initFodder, currIndent, newColumn)
c.Visit(node.Left, newIndent, crowded)
c.Visit(node.Right, newIndent, true)
case *ast.Array:
c.column++ // '['
// First fodder element exists and is a newline
var firstFodder ast.Fodder
if len(node.Elements) > 0 {
firstFodder = *openFodder(node.Elements[0].Expr)
} else {
firstFodder = node.CloseFodder
}
newColumn := c.column
if c.Options.PadArrays {
newColumn++
}
strongIndent := false
// Need to use strong indent if there are not newlines before any of the sub-expressions
for i, el := range node.Elements {
if i == 0 {
continue
}
if c.hasNewLines(*openFodder(el.Expr)) {
strongIndent = true
}
}
var newIndent indent
if strongIndent {
newIndent = c.newIndentStrong(firstFodder, currIndent, newColumn)
} else {
newIndent = c.newIndent(firstFodder, currIndent, newColumn)
}
for i, el := range node.Elements {
if i > 0 {
c.column++
}
c.Visit(el.Expr, newIndent, i > 0 || c.Options.PadArrays)
c.fill(el.CommaFodder, false, false, newIndent.lineUp)
}
if node.TrailingComma {
c.column++
}
// Handle penultimate newlines from expr.CloseFodder if there are any.
c.fillLast(node.CloseFodder,
len(node.Elements) > 0,
c.Options.PadArrays,
newIndent.lineUp,
currIndent.base)
c.column++ // ']'
case *ast.ArrayComp:
c.column++ // [
newColumn := c.column
if c.Options.PadArrays {
newColumn++
}
newIndent :=
c.newIndent(*openFodder(node.Body), currIndent, newColumn)
c.Visit(node.Body, newIndent, c.Options.PadArrays)
c.fill(node.TrailingCommaFodder, false, false, newIndent.lineUp)
if node.TrailingComma {
c.column++ // ','
}
c.specs(&node.Spec, newIndent)
c.fillLast(node.CloseFodder, true, c.Options.PadArrays,
newIndent.lineUp, currIndent.base)
c.column++ // ]
case *ast.Assert:
c.column += 6 // assert
// + 1 for the space after the assert
newIndent := c.newIndent(*openFodder(node.Cond), currIndent, c.column+1)
c.Visit(node.Cond, newIndent, true)
if node.Message != nil {
c.fill(node.ColonFodder, true, true, newIndent.lineUp)
c.column++ // ":"
c.Visit(node.Message, newIndent, true)
}
c.fill(node.SemicolonFodder, false, false, newIndent.lineUp)
c.column++ // ";"
c.Visit(node.Rest, currIndent, true)
case *ast.Binary:
firstFodder := *openFodder(node.Left)
// Need to use strong indent in the case of
/*
A
+ B
or
A +
B
*/
innerColumn := c.column
if crowded {
innerColumn++
}
var newIndent indent
if c.hasNewLines(node.OpFodder) || c.hasNewLines(*openFodder(node.Right)) {
newIndent = c.alignStrong(firstFodder, currIndent, innerColumn)
} else {
newIndent = c.align(firstFodder, currIndent, innerColumn)
}
c.Visit(node.Left, newIndent, crowded)
c.fill(node.OpFodder, true, true, newIndent.lineUp)
c.column += len(node.Op.String())
// Don't calculate a new indent for here, because we like being able to do:
// true &&
// true &&
// true
c.Visit(node.Right, newIndent, true)
case *ast.Conditional:
c.column += 2 // if
condIndent := c.newIndent(*openFodder(node.Cond), currIndent, c.column+1)
c.Visit(node.Cond, condIndent, true)
c.fill(node.ThenFodder, true, true, currIndent.base)
c.column += 4 // then
trueIndent := c.newIndent(*openFodder(node.BranchTrue), currIndent, c.column+1)
c.Visit(node.BranchTrue, trueIndent, true)
if node.BranchFalse != nil {
c.fill(node.ElseFodder, true, true, currIndent.base)
c.column += 4 // else
falseIndent := c.newIndent(*openFodder(node.BranchFalse), currIndent, c.column+1)
c.Visit(node.BranchFalse, falseIndent, true)
}
case *ast.Dollar:
c.column++ // $
case *ast.Error:
c.column += 5 // error
newIndent := c.newIndent(*openFodder(node.Expr), currIndent, c.column+1)
c.Visit(node.Expr, newIndent, true)
case *ast.Function:
c.column += 8 // function
c.params(node.ParenLeftFodder, node.Parameters,
node.TrailingComma, node.ParenRightFodder, currIndent)
newIndent := c.newIndent(*openFodder(node.Body), currIndent, c.column+1)
c.Visit(node.Body, newIndent, true)
case *ast.Import:
c.column += 6 // import
newIndent := c.newIndent(*openFodder(node.File), currIndent, c.column+1)
c.Visit(node.File, newIndent, true)
case *ast.ImportStr:
c.column += 9 // importstr
newIndent := c.newIndent(*openFodder(node.File), currIndent, c.column+1)
c.Visit(node.File, newIndent, true)
case *ast.InSuper:
c.Visit(node.Index, currIndent, crowded)
c.fill(node.InFodder, true, true, currIndent.lineUp)
c.column += 2 // in
c.fill(node.SuperFodder, true, true, currIndent.lineUp)
c.column += 5 // super
case *ast.Index:
c.Visit(node.Target, currIndent, crowded)
c.fill(node.LeftBracketFodder, false, false, currIndent.lineUp) // Can also be DotFodder
if node.Id != nil {
c.column++ // "."
newIndent := c.newIndent(node.RightBracketFodder, currIndent, c.column)
c.fill(node.RightBracketFodder, false, false, newIndent.lineUp) // Can also be IdFodder
c.column += len(*node.Id)
} else {
c.column++ // "["
newIndent := c.newIndent(*openFodder(node.Index), currIndent, c.column)
c.Visit(node.Index, newIndent, false)
c.fillLast(node.RightBracketFodder, false, false, newIndent.lineUp, currIndent.base)
c.column++ // "]"
}
case *ast.Slice:
c.Visit(node.Target, currIndent, crowded)
c.fill(node.LeftBracketFodder, false, false, currIndent.lineUp)
c.column++ // "["
var newIndent indent
if node.BeginIndex != nil {
newIndent = c.newIndent(*openFodder(node.BeginIndex), currIndent, c.column)
c.Visit(node.BeginIndex, newIndent, false)
}
if node.EndIndex != nil {
newIndent = c.newIndent(node.EndColonFodder, currIndent, c.column)
c.fill(node.EndColonFodder, false, false, newIndent.lineUp)
c.column++ // ":"
c.Visit(node.EndIndex, newIndent, false)
}
if node.Step != nil {
if node.EndIndex == nil {
newIndent = c.newIndent(node.EndColonFodder, currIndent, c.column)
c.fill(node.EndColonFodder, false, false, newIndent.lineUp)
c.column++ // ":"
}
c.fill(node.StepColonFodder, false, false, newIndent.lineUp)
c.column++ // ":"
c.Visit(node.Step, newIndent, false)
}
if node.BeginIndex == nil && node.EndIndex == nil && node.Step == nil {
newIndent = c.newIndent(node.EndColonFodder, currIndent, c.column)
c.fill(node.EndColonFodder, false, false, newIndent.lineUp)
c.column++ // ":"
}
c.column++ // "]"
case *ast.Local:
c.column += 5 // local
if len(node.Binds) == 0 {
panic("Not enough binds in local")
}
first := true
newIndent := c.newIndent(node.Binds[0].VarFodder, currIndent, c.column+1)
for _, bind := range node.Binds {
if !first {
c.column++ // ','
}
first = false
c.fill(bind.VarFodder, true, true, newIndent.lineUp)
c.column += len(bind.Variable)
if bind.Fun != nil {
c.params(bind.Fun.ParenLeftFodder,
bind.Fun.Parameters,
bind.Fun.TrailingComma,
bind.Fun.ParenRightFodder,
newIndent)
}
c.fill(bind.EqFodder, true, true, newIndent.lineUp)
c.column++ // '='
newIndent2 := c.newIndent(*openFodder(bind.Body), newIndent, c.column+1)
c.Visit(bind.Body, newIndent2, true)
c.fillLast(bind.CloseFodder, false, false, newIndent2.lineUp,
currIndent.base)
}
c.column++ // ';'
c.Visit(node.Body, currIndent, true)
case *ast.LiteralBoolean:
if node.Value {
c.column += 4
} else {
c.column += 5
}
case *ast.LiteralNumber:
c.column += len(node.OriginalString)
case *ast.LiteralString:
switch node.Kind {
case ast.StringDouble:
c.column += 2 + len(node.Value) // Include quotes
case ast.StringSingle:
c.column += 2 + len(node.Value) // Include quotes
case ast.StringBlock:
node.BlockIndent = strings.Repeat(" ", currIndent.base+c.Options.Indent)
node.BlockTermIndent = strings.Repeat(" ", currIndent.base)
c.column = currIndent.base // blockTermIndent
c.column += 3 // "|||"
case ast.VerbatimStringSingle:
c.column += 3 // Include @, start and end quotes
for _, r := range node.Value {
if r == '\'' {
c.column += 2
} else {
c.column++
}
}
case ast.VerbatimStringDouble:
c.column += 3 // Include @, start and end quotes
for _, r := range node.Value {
if r == '"' {
c.column += 2
} else {
c.column++
}
}
}
case *ast.LiteralNull:
c.column += 4 // null
case *ast.Object:
c.column++ // '{'
var firstFodder ast.Fodder
if len(node.Fields) == 0 {
firstFodder = node.CloseFodder
} else {
if node.Fields[0].Kind == ast.ObjectFieldStr {
firstFodder = *openFodder(node.Fields[0].Expr1)
} else {
firstFodder = node.Fields[0].Fodder1
}
}
newColumn := c.column
if c.Options.PadObjects {
newColumn++
}
newIndent := c.newIndent(firstFodder, currIndent, newColumn)
c.fields(node.Fields, newIndent, c.Options.PadObjects)
if node.TrailingComma {
c.column++
}
c.fillLast(node.CloseFodder,
len(node.Fields) > 0,
c.Options.PadObjects,
newIndent.lineUp,
currIndent.base)
c.column++ // '}'
case *ast.ObjectComp:
c.column++ // '{'
var firstFodder ast.Fodder
if len(node.Fields) == 0 {
firstFodder = node.CloseFodder
} else {
if node.Fields[0].Kind == ast.ObjectFieldStr {
firstFodder = *openFodder(node.Fields[0].Expr1)
} else {
firstFodder = node.Fields[0].Fodder1
}
}
newColumn := c.column
if c.Options.PadObjects {
newColumn++
}
newIndent := c.newIndent(firstFodder, currIndent, newColumn)
c.fields(node.Fields, newIndent, c.Options.PadObjects)
if node.TrailingComma {
c.column++ // ','
}
c.specs(&node.Spec, newIndent)
c.fillLast(node.CloseFodder,
true,
c.Options.PadObjects,
newIndent.lineUp,
currIndent.base)
c.column++ // '}'
case *ast.Parens:
c.column++ // (
newIndent := c.newIndentStrong(*openFodder(node.Inner), currIndent, c.column)
c.Visit(node.Inner, newIndent, false)
c.fillLast(node.CloseFodder, false, false, newIndent.lineUp, currIndent.base)
c.column++ // )
case *ast.Self:
c.column += 4 // self
case *ast.SuperIndex:
c.column += 5 // super
c.fill(node.DotFodder, false, false, currIndent.lineUp)
if node.Id != nil {
c.column++ // ".";
newIndent := c.newIndent(node.IDFodder, currIndent, c.column)
c.fill(node.IDFodder, false, false, newIndent.lineUp)
c.column += len(*node.Id)
} else {
c.column++ // "[";
newIndent := c.newIndent(*openFodder(node.Index), currIndent, c.column)
c.Visit(node.Index, newIndent, false)
c.fillLast(node.IDFodder, false, false, newIndent.lineUp, currIndent.base)
c.column++ // "]";
}
case *ast.Unary:
c.column += len(node.Op.String())
newIndent := c.newIndent(*openFodder(node.Expr), currIndent, c.column)
_, leftIsDollar := leftRecursiveDeep(node.Expr).(*ast.Dollar)
c.Visit(node.Expr, newIndent, leftIsDollar)
case *ast.Var:
c.column += len(node.Id)
}
}
// VisitFile corrects the whole file including the final fodder.
func (c *FixIndentation) VisitFile(body ast.Node, finalFodder ast.Fodder) {
c.Visit(body, indent{0, 0}, false)
c.setIndents(finalFodder, 0, 0)
}

View File

@ -0,0 +1,305 @@
/*
Copyright 2019 Google Inc. All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package formatter
import (
"github.com/google/go-jsonnet/ast"
"github.com/google/go-jsonnet/internal/pass"
)
// FixNewlines is a formatter pass that adds newlines inside complex structures
// (arrays, objects etc.).
//
// The main principle is that a structure can either be:
// * expanded and contain newlines in all the designated places
// * unexpanded and contain newlines in none of the designated places
//
// It only looks shallowly at the AST nodes, so there may be some newlines deeper that
// don't affect expanding. For example:
// [{
// 'a': 'b',
// 'c': 'd',
// }]
// The outer array can stay unexpanded, because there are no newlines between
// the square brackets and the braces.
type FixNewlines struct {
pass.Base
}
// Array handles this type of node
func (c *FixNewlines) Array(p pass.ASTPass, array *ast.Array, ctx pass.Context) {
shouldExpand := false
for _, element := range array.Elements {
if ast.FodderCountNewlines(*openFodder(element.Expr)) > 0 {
shouldExpand = true
}
}
if ast.FodderCountNewlines(array.CloseFodder) > 0 {
shouldExpand = true
}
if shouldExpand {
for i := range array.Elements {
ast.FodderEnsureCleanNewline(openFodder(array.Elements[i].Expr))
}
ast.FodderEnsureCleanNewline(&array.CloseFodder)
}
c.Base.Array(p, array, ctx)
}
func objectFieldOpenFodder(field *ast.ObjectField) *ast.Fodder {
if field.Kind == ast.ObjectFieldStr {
// This can only ever be a ast.sStringLiteral, so openFodder
// will return without recursing.
return openFodder(field.Expr1)
}
return &field.Fodder1
}
// Object handles this type of node
func (c *FixNewlines) Object(p pass.ASTPass, object *ast.Object, ctx pass.Context) {
shouldExpand := false
for _, field := range object.Fields {
if ast.FodderCountNewlines(*objectFieldOpenFodder(&field)) > 0 {
shouldExpand = true
}
}
if ast.FodderCountNewlines(object.CloseFodder) > 0 {
shouldExpand = true
}
if shouldExpand {
for i := range object.Fields {
ast.FodderEnsureCleanNewline(
objectFieldOpenFodder(&object.Fields[i]))
}
ast.FodderEnsureCleanNewline(&object.CloseFodder)
}
c.Base.Object(p, object, ctx)
}
// Local handles this type of node
func (c *FixNewlines) Local(p pass.ASTPass, local *ast.Local, ctx pass.Context) {
shouldExpand := false
for _, bind := range local.Binds {
if ast.FodderCountNewlines(bind.VarFodder) > 0 {
shouldExpand = true
}
}
if shouldExpand {
for i := range local.Binds {
if i > 0 {
ast.FodderEnsureCleanNewline(&local.Binds[i].VarFodder)
}
}
}
c.Base.Local(p, local, ctx)
}
func shouldExpandSpec(spec ast.ForSpec) bool {
shouldExpand := false
if spec.Outer != nil {
shouldExpand = shouldExpandSpec(*spec.Outer)
}
if ast.FodderCountNewlines(spec.ForFodder) > 0 {
shouldExpand = true
}
for _, ifSpec := range spec.Conditions {
if ast.FodderCountNewlines(ifSpec.IfFodder) > 0 {
shouldExpand = true
}
}
return shouldExpand
}
func ensureSpecExpanded(spec *ast.ForSpec) {
if spec.Outer != nil {
ensureSpecExpanded(spec.Outer)
}
ast.FodderEnsureCleanNewline(&spec.ForFodder)
for i := range spec.Conditions {
ast.FodderEnsureCleanNewline(&spec.Conditions[i].IfFodder)
}
}
// ArrayComp handles this type of node
func (c *FixNewlines) ArrayComp(p pass.ASTPass, arrayComp *ast.ArrayComp, ctx pass.Context) {
shouldExpand := false
if ast.FodderCountNewlines(*openFodder(arrayComp.Body)) > 0 {
shouldExpand = true
}
if shouldExpandSpec(arrayComp.Spec) {
shouldExpand = true
}
if ast.FodderCountNewlines(arrayComp.CloseFodder) > 0 {
shouldExpand = true
}
if shouldExpand {
ast.FodderEnsureCleanNewline(openFodder(arrayComp.Body))
ensureSpecExpanded(&arrayComp.Spec)
ast.FodderEnsureCleanNewline(&arrayComp.CloseFodder)
}
c.Base.ArrayComp(p, arrayComp, ctx)
}
// ObjectComp handles this type of node
func (c *FixNewlines) ObjectComp(p pass.ASTPass, objectComp *ast.ObjectComp, ctx pass.Context) {
shouldExpand := false
for _, field := range objectComp.Fields {
if ast.FodderCountNewlines(*objectFieldOpenFodder(&field)) > 0 {
shouldExpand = true
}
}
if shouldExpandSpec(objectComp.Spec) {
shouldExpand = true
}
if ast.FodderCountNewlines(objectComp.CloseFodder) > 0 {
shouldExpand = true
}
if shouldExpand {
for i := range objectComp.Fields {
ast.FodderEnsureCleanNewline(
objectFieldOpenFodder(&objectComp.Fields[i]))
}
ensureSpecExpanded(&objectComp.Spec)
ast.FodderEnsureCleanNewline(&objectComp.CloseFodder)
}
c.Base.ObjectComp(p, objectComp, ctx)
}
// Parens handles this type of node
func (c *FixNewlines) Parens(p pass.ASTPass, parens *ast.Parens, ctx pass.Context) {
shouldExpand := false
if ast.FodderCountNewlines(*openFodder(parens.Inner)) > 0 {
shouldExpand = true
}
if ast.FodderCountNewlines(parens.CloseFodder) > 0 {
shouldExpand = true
}
if shouldExpand {
ast.FodderEnsureCleanNewline(openFodder(parens.Inner))
ast.FodderEnsureCleanNewline(&parens.CloseFodder)
}
c.Base.Parens(p, parens, ctx)
}
// Parameters handles parameters
// Example2:
// f(1, 2,
// 3)
// Should be expanded to:
// f(1,
// 2,
// 3)
// And:
// foo(
// 1, 2, 3)
// Should be expanded to:
// foo(
// 1, 2, 3
// )
func (c *FixNewlines) Parameters(p pass.ASTPass, l *ast.Fodder, params *[]ast.Parameter, r *ast.Fodder, ctx pass.Context) {
shouldExpandBetween := false
shouldExpandNearParens := false
first := true
for _, param := range *params {
if ast.FodderCountNewlines(param.NameFodder) > 0 {
if first {
shouldExpandNearParens = true
} else {
shouldExpandBetween = true
}
}
first = false
}
if ast.FodderCountNewlines(*r) > 0 {
shouldExpandNearParens = true
}
first = true
for i := range *params {
param := &(*params)[i]
if first && shouldExpandNearParens || !first && shouldExpandBetween {
ast.FodderEnsureCleanNewline(&param.NameFodder)
}
first = false
}
if shouldExpandNearParens {
ast.FodderEnsureCleanNewline(r)
}
c.Base.Parameters(p, l, params, r, ctx)
}
// Arguments handles parameters
// Example2:
// f(1, 2,
// 3)
// Should be expanded to:
// f(1,
// 2,
// 3)
// And:
// foo(
// 1, 2, 3)
// Should be expanded to:
// foo(
// 1, 2, 3
// )
func (c *FixNewlines) Arguments(p pass.ASTPass, l *ast.Fodder, args *ast.Arguments, r *ast.Fodder, ctx pass.Context) {
shouldExpandBetween := false
shouldExpandNearParens := false
first := true
for _, arg := range args.Positional {
if ast.FodderCountNewlines(*openFodder(arg.Expr)) > 0 {
if first {
shouldExpandNearParens = true
} else {
shouldExpandBetween = true
}
}
first = false
}
for _, arg := range args.Named {
if ast.FodderCountNewlines(arg.NameFodder) > 0 {
if first {
shouldExpandNearParens = true
} else {
shouldExpandBetween = true
}
}
first = false
}
if ast.FodderCountNewlines(*r) > 0 {
shouldExpandNearParens = true
}
first = true
for i := range args.Positional {
arg := &args.Positional[i]
if first && shouldExpandNearParens || !first && shouldExpandBetween {
ast.FodderEnsureCleanNewline(openFodder(arg.Expr))
}
first = false
}
for i := range args.Named {
arg := &args.Named[i]
if first && shouldExpandNearParens || !first && shouldExpandBetween {
ast.FodderEnsureCleanNewline(&arg.NameFodder)
}
first = false
}
if shouldExpandNearParens {
ast.FodderEnsureCleanNewline(r)
}
c.Base.Arguments(p, l, args, r, ctx)
}

View File

@ -0,0 +1,38 @@
/*
Copyright 2019 Google Inc. All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package formatter
import (
"github.com/google/go-jsonnet/ast"
"github.com/google/go-jsonnet/internal/pass"
)
// FixParens is a formatter pass that replaces ((e)) with (e).
type FixParens struct {
pass.Base
}
// Parens handles that type of node
func (c *FixParens) Parens(p pass.ASTPass, node *ast.Parens, ctx pass.Context) {
innerParens, ok := node.Inner.(*ast.Parens)
if ok {
node.Inner = innerParens.Inner
ast.FodderMoveFront(openFodder(node), &innerParens.Fodder)
ast.FodderMoveFront(&node.CloseFodder, &innerParens.CloseFodder)
}
c.Base.Parens(p, node, ctx)
}

View File

@ -0,0 +1,49 @@
/*
Copyright 2019 Google Inc. All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package formatter
import (
"github.com/google/go-jsonnet/ast"
"github.com/google/go-jsonnet/internal/pass"
)
// FixPlusObject is a formatter pass that replaces ((e)) with (e).
type FixPlusObject struct {
pass.Base
}
// Visit replaces e + { ... } with an ApplyBrace in some situations.
func (c *FixPlusObject) Visit(p pass.ASTPass, node *ast.Node, ctx pass.Context) {
binary, ok := (*node).(*ast.Binary)
if ok {
// Could relax this to allow more ASTs on the LHS but this seems OK for now.
_, leftIsVar := binary.Left.(*ast.Var)
_, leftIsIndex := binary.Left.(*ast.Index)
if leftIsVar || leftIsIndex {
rhs, ok := binary.Right.(*ast.Object)
if ok && binary.Op == ast.BopPlus {
ast.FodderMoveFront(&rhs.Fodder, &binary.OpFodder)
*node = &ast.ApplyBrace{
NodeBase: binary.NodeBase,
Left: binary.Left,
Right: rhs,
}
}
}
}
c.Base.Visit(p, node, ctx)
}

View File

@ -0,0 +1,96 @@
/*
Copyright 2019 Google Inc. All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package formatter
import (
"github.com/google/go-jsonnet/ast"
"github.com/google/go-jsonnet/internal/pass"
)
func containsNewline(fodder ast.Fodder) bool {
for _, f := range fodder {
if f.Kind != ast.FodderInterstitial {
return true
}
}
return false
}
// FixTrailingCommas is a formatter pass that ensures trailing commas are
// present when a list is split over several lines.
type FixTrailingCommas struct {
pass.Base
}
func (c *FixTrailingCommas) fixComma(lastCommaFodder *ast.Fodder, trailingComma *bool, closeFodder *ast.Fodder) {
needComma := containsNewline(*closeFodder) || containsNewline(*lastCommaFodder)
if *trailingComma {
if !needComma {
// Remove it but keep fodder.
*trailingComma = false
ast.FodderMoveFront(closeFodder, lastCommaFodder)
} else if containsNewline(*lastCommaFodder) {
// The comma is needed but currently is separated by a newline.
ast.FodderMoveFront(closeFodder, lastCommaFodder)
}
} else {
if needComma {
// There was no comma, but there was a newline before the ] so add a comma.
*trailingComma = true
}
}
}
func (c *FixTrailingCommas) removeComma(lastCommaFodder *ast.Fodder, trailingComma *bool, closeFodder *ast.Fodder) {
if *trailingComma {
// Remove it but keep fodder.
*trailingComma = false
ast.FodderMoveFront(closeFodder, lastCommaFodder)
}
}
// Array handles that type of node
func (c *FixTrailingCommas) Array(p pass.ASTPass, node *ast.Array, ctx pass.Context) {
if len(node.Elements) == 0 {
// No comma present and none can be added.
return
}
c.fixComma(&node.Elements[len(node.Elements)-1].CommaFodder, &node.TrailingComma, &node.CloseFodder)
c.Base.Array(p, node, ctx)
}
// ArrayComp handles that type of node
func (c *FixTrailingCommas) ArrayComp(p pass.ASTPass, node *ast.ArrayComp, ctx pass.Context) {
c.removeComma(&node.TrailingCommaFodder, &node.TrailingComma, &node.Spec.ForFodder)
c.Base.ArrayComp(p, node, ctx)
}
// Object handles that type of node
func (c *FixTrailingCommas) Object(p pass.ASTPass, node *ast.Object, ctx pass.Context) {
if len(node.Fields) == 0 {
// No comma present and none can be added.
return
}
c.fixComma(&node.Fields[len(node.Fields)-1].CommaFodder, &node.TrailingComma, &node.CloseFodder)
c.Base.Object(p, node, ctx)
}
// ObjectComp handles that type of node
func (c *FixTrailingCommas) ObjectComp(p pass.ASTPass, node *ast.ObjectComp, ctx pass.Context) {
c.removeComma(&node.Fields[len(node.Fields)-1].CommaFodder, &node.TrailingComma, &node.Spec.ForFodder)
c.Base.ObjectComp(p, node, ctx)
}

View File

@ -0,0 +1,185 @@
/*
Copyright 2019 Google Inc. All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package formatter
import (
"github.com/google/go-jsonnet/ast"
"github.com/google/go-jsonnet/internal/parser"
"github.com/google/go-jsonnet/internal/pass"
)
// StringStyle controls how the reformatter rewrites string literals.
// Strings that contain a ' or a " use the optimal syntax to avoid escaping
// those characters.
type StringStyle int
const (
// StringStyleDouble means "this".
StringStyleDouble StringStyle = iota
// StringStyleSingle means 'this'.
StringStyleSingle
// StringStyleLeave means strings are left how they were found.
StringStyleLeave
)
// CommentStyle controls how the reformatter rewrites comments.
// Comments that look like a #! hashbang are always left alone.
type CommentStyle int
const (
// CommentStyleHash means #.
CommentStyleHash CommentStyle = iota
// CommentStyleSlash means //.
CommentStyleSlash
// CommentStyleLeave means comments are left as they are found.
CommentStyleLeave
)
// Options is a set of parameters that control the reformatter's behaviour.
type Options struct {
// Indent is the number of spaces for each level of indenation.
Indent int
// MaxBlankLines is the max allowed number of consecutive blank lines.
MaxBlankLines int
StringStyle StringStyle
CommentStyle CommentStyle
// PrettyFieldNames causes fields to only be wrapped in '' when needed.
PrettyFieldNames bool
// PadArrays causes arrays to be written like [ this ] instead of [this].
PadArrays bool
// PadObjects causes arrays to be written like { this } instead of {this}.
PadObjects bool
// SortImports causes imports at the top of the file to be sorted in groups
// by filename.
SortImports bool
StripEverything bool
StripComments bool
StripAllButComments bool
}
// DefaultOptions returns the recommended formatter behaviour.
func DefaultOptions() Options {
return Options{
Indent: 2,
MaxBlankLines: 2,
StringStyle: StringStyleSingle,
CommentStyle: CommentStyleSlash,
PrettyFieldNames: true,
PadArrays: false,
PadObjects: true,
SortImports: true,
}
}
// If left recursive, return the left hand side, else return nullptr.
func leftRecursive(expr ast.Node) ast.Node {
switch node := expr.(type) {
case *ast.Apply:
return node.Target
case *ast.ApplyBrace:
return node.Left
case *ast.Binary:
return node.Left
case *ast.Index:
return node.Target
case *ast.InSuper:
return node.Index
case *ast.Slice:
return node.Target
default:
return nil
}
}
// leftRecursiveDeep is the transitive closure of leftRecursive.
// It only returns nil when called with nil.
func leftRecursiveDeep(expr ast.Node) ast.Node {
last := expr
left := leftRecursive(expr)
for left != nil {
last = left
left = leftRecursive(last)
}
return last
}
func openFodder(node ast.Node) *ast.Fodder {
return leftRecursiveDeep(node).OpenFodder()
}
func removeInitialNewlines(node ast.Node) {
f := openFodder(node)
for len(*f) > 0 && (*f)[0].Kind == ast.FodderLineEnd {
*f = (*f)[1:]
}
}
func visitFile(p pass.ASTPass, node *ast.Node, finalFodder *ast.Fodder) {
p.File(p, node, finalFodder)
}
// Format returns code that is equivalent to its input but better formatted
// according to the given options.
func Format(filename string, input string, options Options) (string, error) {
node, finalFodder, err := parser.SnippetToRawAST(filename, input)
if err != nil {
return "", err
}
// Passes to enforce style on the AST.
if options.SortImports {
SortImports(&node)
}
removeInitialNewlines(node)
if options.MaxBlankLines > 0 {
visitFile(&EnforceMaxBlankLines{Options: options}, &node, &finalFodder)
}
visitFile(&FixNewlines{}, &node, &finalFodder)
visitFile(&FixTrailingCommas{}, &node, &finalFodder)
visitFile(&FixParens{}, &node, &finalFodder)
visitFile(&FixPlusObject{}, &node, &finalFodder)
visitFile(&NoRedundantSliceColon{}, &node, &finalFodder)
if options.StripComments {
visitFile(&StripComments{}, &node, &finalFodder)
} else if options.StripAllButComments {
visitFile(&StripAllButComments{}, &node, &finalFodder)
} else if options.StripEverything {
visitFile(&StripEverything{}, &node, &finalFodder)
}
if options.PrettyFieldNames {
visitFile(&PrettyFieldNames{}, &node, &finalFodder)
}
if options.StringStyle != StringStyleLeave {
visitFile(&EnforceStringStyle{Options: options}, &node, &finalFodder)
}
if options.CommentStyle != CommentStyleLeave {
visitFile(&EnforceCommentStyle{Options: options}, &node, &finalFodder)
}
if options.Indent > 0 {
visitor := FixIndentation{Options: options}
visitor.VisitFile(node, finalFodder)
}
u := &unparser{options: options}
u.unparse(node, false)
u.fill(finalFodder, true, false)
// Final whitespace is stripped at lexing time. Add a single new line
// as files ought to end with a new line.
u.write("\n")
return u.string(), nil
}

View File

@ -0,0 +1,38 @@
/*
Copyright 2019 Google Inc. All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package formatter
import (
"github.com/google/go-jsonnet/ast"
"github.com/google/go-jsonnet/internal/pass"
)
// NoRedundantSliceColon is a formatter pass that preserves fodder in the case
// of arr[1::] being formatted as arr[1:]
type NoRedundantSliceColon struct {
pass.Base
}
// Slice implements this pass.
func (c *NoRedundantSliceColon) Slice(p pass.ASTPass, slice *ast.Slice, ctx pass.Context) {
if slice.Step == nil {
if len(slice.StepColonFodder) > 0 {
ast.FodderMoveFront(&slice.RightBracketFodder, &slice.StepColonFodder)
}
}
c.Base.Slice(p, slice, ctx)
}

View File

@ -0,0 +1,76 @@
/*
Copyright 2019 Google Inc. All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package formatter
import (
"github.com/google/go-jsonnet/ast"
"github.com/google/go-jsonnet/internal/parser"
"github.com/google/go-jsonnet/internal/pass"
)
// PrettyFieldNames forces minimal syntax with field lookups and definitions
type PrettyFieldNames struct {
pass.Base
}
// Index prettifies the definitions.
func (c *PrettyFieldNames) Index(p pass.ASTPass, index *ast.Index, ctx pass.Context) {
if index.Index != nil {
// Maybe we can use an id instead.
lit, ok := index.Index.(*ast.LiteralString)
if ok {
if parser.IsValidIdentifier(lit.Value) {
index.Index = nil
id := ast.Identifier(lit.Value)
index.Id = &id
index.RightBracketFodder = lit.Fodder
}
}
}
c.Base.Index(p, index, ctx)
}
// ObjectField prettifies the definitions.
func (c *PrettyFieldNames) ObjectField(p pass.ASTPass, field *ast.ObjectField, ctx pass.Context) {
if field.Kind == ast.ObjectFieldExpr {
// First try ["foo"] -> "foo".
lit, ok := field.Expr1.(*ast.LiteralString)
if ok {
field.Kind = ast.ObjectFieldStr
ast.FodderMoveFront(&lit.Fodder, &field.Fodder1)
if field.Method != nil {
ast.FodderMoveFront(&field.Method.ParenLeftFodder, &field.Fodder2)
} else {
ast.FodderMoveFront(&field.OpFodder, &field.Fodder2)
}
}
}
if field.Kind == ast.ObjectFieldStr {
// Then try "foo" -> foo.
lit, ok := field.Expr1.(*ast.LiteralString)
if ok {
if parser.IsValidIdentifier(lit.Value) {
field.Kind = ast.ObjectFieldID
id := ast.Identifier(lit.Value)
field.Id = &id
field.Fodder1 = lit.Fodder
field.Expr1 = nil
}
}
}
c.Base.ObjectField(p, field, ctx)
}

View File

@ -0,0 +1,229 @@
/*
Copyright 2019 Google Inc. All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package formatter
import (
"sort"
"github.com/google/go-jsonnet/ast"
)
type importElem struct {
key string
adjacentFodder ast.Fodder
bind ast.LocalBind
}
func sortGroup(imports []importElem) {
if !duplicatedVariables(imports) {
sort.Slice(imports, func(i, j int) bool {
return imports[i].key < imports[j].key
})
}
}
// Check if `local` expression is used for importing.
func isGoodLocal(local *ast.Local) bool {
for _, bind := range local.Binds {
if bind.Fun != nil {
return false
}
_, ok := bind.Body.(*ast.Import)
if !ok {
return false
}
}
return true
}
func goodLocalOrNull(node ast.Node) *ast.Local {
local, ok := node.(*ast.Local)
if ok && isGoodLocal(local) {
return local
}
return nil
}
/** Split fodder after the first new line / paragraph fodder,
* leaving blank lines after the newline in the second half.
*
* The two returned fodders can be concatenated using concat_fodder to get the original fodder.
*
* It's a heuristic that given two consecutive tokens `prev_token`, `next_token`
* with some fodder between them, decides which part of the fodder logically belongs
* to `prev_token` and which part belongs to the `next_token`.
*
* Example:
* prev_token // prev_token is awesome!
*
* // blah blah
* next_token
*
* In such case "// prev_token is awesome!\n" part of the fodder belongs
* to the `prev_token` and "\n//blah blah\n" to the `next_token`.
*/
func splitFodder(fodder ast.Fodder) (ast.Fodder, ast.Fodder) {
var afterPrev, beforeNext ast.Fodder
inSecondPart := false
for _, fodderElem := range fodder {
if inSecondPart {
ast.FodderAppend(&beforeNext, fodderElem)
} else {
afterPrev = append(afterPrev, fodderElem)
}
if fodderElem.Kind != ast.FodderInterstitial && !inSecondPart {
inSecondPart = true
if fodderElem.Blanks > 0 {
// If there are any blank lines at the end of afterPrev, move them
// to beforeNext.
afterPrev[len(afterPrev)-1].Blanks = 0
if len(beforeNext) != 0 {
panic("beforeNext should still be empty.")
}
beforeNext = append(beforeNext, ast.FodderElement{
Kind: ast.FodderLineEnd,
Blanks: fodderElem.Blanks,
Indent: fodderElem.Indent,
})
}
}
}
return afterPrev, beforeNext
}
func extractImportElems(binds ast.LocalBinds, after ast.Fodder) []importElem {
var result []importElem
before := binds[0].VarFodder
for i, bind := range binds {
last := i == len(binds)-1
var adjacent ast.Fodder
var beforeNext ast.Fodder
if !last {
next := &binds[i+1]
adjacent, beforeNext = splitFodder(next.VarFodder)
} else {
adjacent = after
}
ast.FodderEnsureCleanNewline(&adjacent)
newBind := bind
newBind.VarFodder = before
theImport := bind.Body.(*ast.Import)
result = append(result,
importElem{theImport.File.Value, adjacent, newBind})
before = beforeNext
}
return result
}
func buildGroupAST(imports []importElem, body ast.Node, groupOpenFodder ast.Fodder) ast.Node {
for i := len(imports) - 1; i >= 0; i-- {
theImport := &(imports)[i]
var fodder ast.Fodder
if i == 0 {
fodder = groupOpenFodder
} else {
fodder = imports[i-1].adjacentFodder
}
local := &ast.Local{
NodeBase: ast.NodeBase{Fodder: fodder},
Binds: []ast.LocalBind{theImport.bind},
Body: body}
body = local
}
return body
}
func duplicatedVariables(elems []importElem) bool {
idents := make(map[string]bool)
for _, elem := range elems {
idents[string(elem.bind.Variable)] = true
}
return len(idents) < len(elems)
}
func groupEndsAfter(local *ast.Local) bool {
next := goodLocalOrNull(local.Body)
if next == nil {
return true
}
newlineReached := false
for _, fodderElem := range *openFodder(next) {
if newlineReached || fodderElem.Blanks > 0 {
return true
}
if fodderElem.Kind != ast.FodderInterstitial {
newlineReached = true
}
}
return false
}
func topLevelImport(local *ast.Local, imports *[]importElem, groupOpenFodder ast.Fodder) ast.Node {
if !isGoodLocal(local) {
panic("topLevelImport called with bad local.")
}
adjacentCommentFodder, beforeNextFodder :=
splitFodder(*openFodder(local.Body))
ast.FodderEnsureCleanNewline(&adjacentCommentFodder)
newImports := extractImportElems(local.Binds, adjacentCommentFodder)
*imports = append(*imports, newImports...)
if groupEndsAfter(local) {
sortGroup(*imports)
afterGroup := (*imports)[len(*imports)-1].adjacentFodder
ast.FodderEnsureCleanNewline(&beforeNextFodder)
nextOpenFodder := ast.FodderConcat(afterGroup, beforeNextFodder)
var bodyAfterGroup ast.Node
// Process the code after the current group:
next := goodLocalOrNull(local.Body)
if next != nil {
// Another group of imports
nextImports := make([]importElem, 0)
bodyAfterGroup = topLevelImport(next, &nextImports, nextOpenFodder)
} else {
// Something else
bodyAfterGroup = local.Body
*openFodder(bodyAfterGroup) = nextOpenFodder
}
return buildGroupAST(*imports, bodyAfterGroup, groupOpenFodder)
}
if len(beforeNextFodder) > 0 {
panic("Expected beforeNextFodder to be empty")
}
return topLevelImport(local.Body.(*ast.Local), imports, groupOpenFodder)
}
// SortImports sorts imports at the top of the file into alphabetical order
// by path.
//
// Top-level imports are `local x = import 'xxx.jsonnet` expressions
// that go before anything else in the file (more precisely all such imports
// that are either the root of AST or a direct child (body) of a top-level
// import. Top-level imports are therefore more top-level than top-level
// functions.
//
// Grouping of imports is preserved. Groups of imports are separated by blank
// lines or lines containing comments.
func SortImports(file *ast.Node) {
imports := make([]importElem, 0)
local := goodLocalOrNull(*file)
if local != nil {
*file = topLevelImport(local, &imports, *openFodder(local))
}
}

View File

@ -0,0 +1,86 @@
/*
Copyright 2019 Google Inc. All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package formatter
import (
"github.com/google/go-jsonnet/ast"
"github.com/google/go-jsonnet/internal/pass"
)
// StripComments removes all comments
type StripComments struct {
pass.Base
}
// Fodder implements this pass.
func (c *StripComments) Fodder(p pass.ASTPass, fodder *ast.Fodder, ctx pass.Context) {
newFodder := make(ast.Fodder, 0)
for _, el := range *fodder {
if el.Kind == ast.FodderLineEnd {
newElement := el
newElement.Comment = nil
newFodder = append(newFodder, newElement)
}
}
*fodder = newFodder
}
// StripEverything removes all comments and newlines
type StripEverything struct {
pass.Base
}
// Fodder implements this pass.
func (c *StripEverything) Fodder(p pass.ASTPass, fodder *ast.Fodder, ctx pass.Context) {
*fodder = nil
}
// StripAllButComments removes all comments and newlines
type StripAllButComments struct {
pass.Base
comments ast.Fodder
}
// Fodder remembers all the fodder in c.comments
func (c *StripAllButComments) Fodder(p pass.ASTPass, fodder *ast.Fodder, ctx pass.Context) {
for _, el := range *fodder {
if el.Kind == ast.FodderParagraph {
c.comments = append(c.comments, ast.FodderElement{
Kind: ast.FodderParagraph,
Comment: el.Comment,
})
} else if el.Kind == ast.FodderInterstitial {
c.comments = append(c.comments, el)
c.comments = append(c.comments, ast.FodderElement{
Kind: ast.FodderLineEnd,
})
}
}
*fodder = nil
}
// File replaces the entire file with the remembered comments.
func (c *StripAllButComments) File(p pass.ASTPass, node *ast.Node, finalFodder *ast.Fodder) {
c.Base.File(p, node, finalFodder)
*node = &ast.LiteralNull{
NodeBase: ast.NodeBase{
LocRange: *(*node).Loc(),
Fodder: c.comments,
},
}
*finalFodder = nil
}

View File

@ -0,0 +1,550 @@
/*
Copyright 2019 Google Inc. All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package formatter
import (
"bytes"
"fmt"
"github.com/google/go-jsonnet/ast"
)
type unparser struct {
buf bytes.Buffer
options Options
}
func (u *unparser) write(str string) {
u.buf.WriteString(str)
}
// fill Pretty-prints fodder.
// The crowded and separateToken params control whether single whitespace
// characters are added to keep tokens from joining together in the output.
// The intuition of crowded is that the caller passes true for crowded if the
// last thing printed would crowd whatever we're printing here. For example, if
// we just printed a ',' then crowded would be true. If we just printed a '('
// then crowded would be false because we don't want the space after the '('.
//
// If crowded is true, a space is printed after any fodder, unless
// separateToken is false or the fodder ended with a newline.
// If crowded is true and separateToken is false and the fodder begins with
// an interstitial, then the interstitial is prefixed with a single space, but
// there is no space after the interstitial.
// If crowded is false and separateToken is true then a space character
// is only printed when the fodder ended with an interstitial comment (which
// creates a crowded situation where there was not one before).
// If crowded is false and separateToken is false then no space is printed
// after or before the fodder, even if the last fodder was an interstitial.
func (u *unparser) fill(fodder ast.Fodder, crowded bool, separateToken bool) {
var lastIndent int
for _, fod := range fodder {
switch fod.Kind {
case ast.FodderParagraph:
for i, l := range fod.Comment {
// Do not indent empty lines (note: first line is never empty).
if len(l) > 0 {
// First line is already indented by previous fod.
if i > 0 {
for i := 0; i < lastIndent; i++ {
u.write(" ")
}
}
u.write(l)
}
u.write("\n")
}
for i := 0; i < fod.Blanks; i++ {
u.write("\n")
}
for i := 0; i < fod.Indent; i++ {
u.write(" ")
}
lastIndent = fod.Indent
crowded = false
case ast.FodderLineEnd:
if len(fod.Comment) > 0 {
u.write(" ")
u.write(fod.Comment[0])
}
for i := 0; i <= fod.Blanks; i++ {
u.write("\n")
}
for i := 0; i < fod.Indent; i++ {
u.write(" ")
}
lastIndent = fod.Indent
crowded = false
case ast.FodderInterstitial:
if crowded {
u.write(" ")
}
u.write(fod.Comment[0])
crowded = true
}
}
if separateToken && crowded {
u.write(" ")
}
}
func (u *unparser) unparseSpecs(spec *ast.ForSpec) {
if spec.Outer != nil {
u.unparseSpecs(spec.Outer)
}
u.fill(spec.ForFodder, true, true)
u.write("for")
u.fill(spec.VarFodder, true, true)
u.write(string(spec.VarName))
u.fill(spec.InFodder, true, true)
u.write("in")
u.unparse(spec.Expr, true)
for _, cond := range spec.Conditions {
u.fill(cond.IfFodder, true, true)
u.write("if")
u.unparse(cond.Expr, true)
}
}
func (u *unparser) unparseParams(fodderL ast.Fodder, params []ast.Parameter, trailingComma bool, fodderR ast.Fodder) {
u.fill(fodderL, false, false)
u.write("(")
first := true
for _, param := range params {
if !first {
u.write(",")
}
u.fill(param.NameFodder, !first, true)
u.unparseID(param.Name)
if param.DefaultArg != nil {
u.fill(param.EqFodder, false, false)
u.write("=")
u.unparse(param.DefaultArg, false)
}
u.fill(param.CommaFodder, false, false)
first = false
}
if trailingComma {
u.write(",")
}
u.fill(fodderR, false, false)
u.write(")")
}
func (u *unparser) unparseFieldParams(field ast.ObjectField) {
m := field.Method
if m != nil {
u.unparseParams(m.ParenLeftFodder, m.Parameters, m.TrailingComma,
m.ParenRightFodder)
}
}
func (u *unparser) unparseFields(fields ast.ObjectFields, crowded bool) {
first := true
for _, field := range fields {
if !first {
u.write(",")
}
// An aux function so we don't repeat ourselves for the 3 kinds of
// basic field.
unparseFieldRemainder := func(field ast.ObjectField) {
u.unparseFieldParams(field)
u.fill(field.OpFodder, false, false)
if field.SuperSugar {
u.write("+")
}
switch field.Hide {
case ast.ObjectFieldInherit:
u.write(":")
case ast.ObjectFieldHidden:
u.write("::")
case ast.ObjectFieldVisible:
u.write(":::")
}
u.unparse(field.Expr2, true)
}
switch field.Kind {
case ast.ObjectLocal:
u.fill(field.Fodder1, !first || crowded, true)
u.write("local")
u.fill(field.Fodder2, true, true)
u.unparseID(*field.Id)
u.unparseFieldParams(field)
u.fill(field.OpFodder, true, true)
u.write("=")
u.unparse(field.Expr2, true)
case ast.ObjectFieldID:
u.fill(field.Fodder1, !first || crowded, true)
u.unparseID(*field.Id)
unparseFieldRemainder(field)
case ast.ObjectFieldStr:
u.unparse(field.Expr1, !first || crowded)
unparseFieldRemainder(field)
case ast.ObjectFieldExpr:
u.fill(field.Fodder1, !first || crowded, true)
u.write("[")
u.unparse(field.Expr1, false)
u.fill(field.Fodder2, false, false)
u.write("]")
unparseFieldRemainder(field)
case ast.ObjectAssert:
u.fill(field.Fodder1, !first || crowded, true)
u.write("assert")
u.unparse(field.Expr2, true)
if field.Expr3 != nil {
u.fill(field.OpFodder, true, true)
u.write(":")
u.unparse(field.Expr3, true)
}
}
first = false
u.fill(field.CommaFodder, false, false)
}
}
func (u *unparser) unparseID(id ast.Identifier) {
u.write(string(id))
}
func (u *unparser) unparse(expr ast.Node, crowded bool) {
if leftRecursive(expr) == nil {
u.fill(*expr.OpenFodder(), crowded, true)
}
switch node := expr.(type) {
case *ast.Apply:
u.unparse(node.Target, crowded)
u.fill(node.FodderLeft, false, false)
u.write("(")
first := true
for _, arg := range node.Arguments.Positional {
if !first {
u.write(",")
}
space := !first
u.unparse(arg.Expr, space)
u.fill(arg.CommaFodder, false, false)
first = false
}
for _, arg := range node.Arguments.Named {
if !first {
u.write(",")
}
space := !first
u.fill(arg.NameFodder, space, true)
u.unparseID(arg.Name)
space = false
u.write("=")
u.unparse(arg.Arg, space)
u.fill(arg.CommaFodder, false, false)
first = false
}
if node.TrailingComma {
u.write(",")
}
u.fill(node.FodderRight, false, false)
u.write(")")
if node.TailStrict {
u.fill(node.TailStrictFodder, true, true)
u.write("tailstrict")
}
case *ast.ApplyBrace:
u.unparse(node.Left, crowded)
u.unparse(node.Right, true)
case *ast.Array:
u.write("[")
first := true
for _, element := range node.Elements {
if !first {
u.write(",")
}
u.unparse(element.Expr, !first || u.options.PadArrays)
u.fill(element.CommaFodder, false, false)
first = false
}
if node.TrailingComma {
u.write(",")
}
u.fill(node.CloseFodder, len(node.Elements) > 0, u.options.PadArrays)
u.write("]")
case *ast.ArrayComp:
u.write("[")
u.unparse(node.Body, u.options.PadArrays)
u.fill(node.TrailingCommaFodder, false, false)
if node.TrailingComma {
u.write(",")
}
u.unparseSpecs(&node.Spec)
u.fill(node.CloseFodder, true, u.options.PadArrays)
u.write("]")
case *ast.Assert:
u.write("assert")
u.unparse(node.Cond, true)
if node.Message != nil {
u.fill(node.ColonFodder, true, true)
u.write(":")
u.unparse(node.Message, true)
}
u.fill(node.SemicolonFodder, false, false)
u.write(";")
u.unparse(node.Rest, true)
case *ast.Binary:
u.unparse(node.Left, crowded)
u.fill(node.OpFodder, true, true)
u.write(node.Op.String())
u.unparse(node.Right, true)
case *ast.Conditional:
u.write("if")
u.unparse(node.Cond, true)
u.fill(node.ThenFodder, true, true)
u.write("then")
u.unparse(node.BranchTrue, true)
if node.BranchFalse != nil {
u.fill(node.ElseFodder, true, true)
u.write("else")
u.unparse(node.BranchFalse, true)
}
case *ast.Dollar:
u.write("$")
case *ast.Error:
u.write("error")
u.unparse(node.Expr, true)
case *ast.Function:
u.write("function")
u.unparseParams(node.ParenLeftFodder, node.Parameters, node.TrailingComma, node.ParenRightFodder)
u.unparse(node.Body, true)
case *ast.Import:
u.write("import")
u.unparse(node.File, true)
case *ast.ImportStr:
u.write("importstr")
u.unparse(node.File, true)
case *ast.Index:
u.unparse(node.Target, crowded)
u.fill(node.LeftBracketFodder, false, false) // Can also be DotFodder
if node.Id != nil {
u.write(".")
u.fill(node.RightBracketFodder, false, false) // IdFodder
u.unparseID(*node.Id)
} else {
u.write("[")
u.unparse(node.Index, false)
u.fill(node.RightBracketFodder, false, false)
u.write("]")
}
case *ast.Slice:
u.unparse(node.Target, crowded)
u.fill(node.LeftBracketFodder, false, false)
u.write("[")
if node.BeginIndex != nil {
u.unparse(node.BeginIndex, false)
}
u.fill(node.EndColonFodder, false, false)
u.write(":")
if node.EndIndex != nil {
u.unparse(node.EndIndex, false)
}
if node.Step != nil || len(node.StepColonFodder) > 0 {
u.fill(node.StepColonFodder, false, false)
u.write(":")
if node.Step != nil {
u.unparse(node.Step, false)
}
}
u.fill(node.RightBracketFodder, false, false)
u.write("]")
case *ast.InSuper:
u.unparse(node.Index, true)
u.fill(node.InFodder, true, true)
u.write("in")
u.fill(node.SuperFodder, true, true)
u.write("super")
case *ast.Local:
u.write("local")
if len(node.Binds) == 0 {
panic("INTERNAL ERROR: local with no binds")
}
first := true
for _, bind := range node.Binds {
if !first {
u.write(",")
}
first = false
u.fill(bind.VarFodder, true, true)
u.unparseID(bind.Variable)
if bind.Fun != nil {
u.unparseParams(bind.Fun.ParenLeftFodder,
bind.Fun.Parameters,
bind.Fun.TrailingComma,
bind.Fun.ParenRightFodder)
}
u.fill(bind.EqFodder, true, true)
u.write("=")
u.unparse(bind.Body, true)
u.fill(bind.CloseFodder, false, false)
}
u.write(";")
u.unparse(node.Body, true)
case *ast.LiteralBoolean:
if node.Value {
u.write("true")
} else {
u.write("false")
}
case *ast.LiteralNumber:
u.write(node.OriginalString)
case *ast.LiteralString:
switch node.Kind {
case ast.StringDouble:
u.write("\"")
// The original escape codes are still in the string.
u.write(node.Value)
u.write("\"")
case ast.StringSingle:
u.write("'")
// The original escape codes are still in the string.
u.write(node.Value)
u.write("'")
case ast.StringBlock:
u.write("|||\n")
if node.Value[0] != '\n' {
u.write(node.BlockIndent)
}
for i, r := range node.Value {
// Formatter always outputs in unix mode.
if r == '\r' {
continue
}
u.write(string(r))
if r == '\n' && (i+1 < len(node.Value)) && node.Value[i+1] != '\n' {
u.write(node.BlockIndent)
}
}
u.write(node.BlockTermIndent)
u.write("|||")
case ast.VerbatimStringDouble:
u.write("@\"")
// Escapes were processed by the parser, so put them back in.
for _, r := range node.Value {
if r == '"' {
u.write("\"\"")
} else {
u.write(string(r))
}
}
u.write("\"")
case ast.VerbatimStringSingle:
u.write("@'")
// Escapes were processed by the parser, so put them back in.
for _, r := range node.Value {
if r == '\'' {
u.write("''")
} else {
u.write(string(r))
}
}
u.write("'")
}
case *ast.LiteralNull:
u.write("null")
case *ast.Object:
u.write("{")
u.unparseFields(node.Fields, u.options.PadObjects)
if node.TrailingComma {
u.write(",")
}
u.fill(node.CloseFodder, len(node.Fields) > 0, u.options.PadObjects)
u.write("}")
case *ast.ObjectComp:
u.write("{")
u.unparseFields(node.Fields, u.options.PadObjects)
if node.TrailingComma {
u.write(",")
}
u.unparseSpecs(&node.Spec)
u.fill(node.CloseFodder, true, u.options.PadObjects)
u.write("}")
case *ast.Parens:
u.write("(")
u.unparse(node.Inner, false)
u.fill(node.CloseFodder, false, false)
u.write(")")
case *ast.Self:
u.write("self")
case *ast.SuperIndex:
u.write("super")
u.fill(node.DotFodder, false, false)
if node.Id != nil {
u.write(".")
u.fill(node.IDFodder, false, false)
u.unparseID(*node.Id)
} else {
u.write("[")
u.unparse(node.Index, false)
u.fill(node.IDFodder, false, false)
u.write("]")
}
case *ast.Var:
u.unparseID(node.Id)
case *ast.Unary:
u.write(node.Op.String())
u.unparse(node.Expr, false)
default:
panic(fmt.Sprintf("INTERNAL ERROR: Unknown AST: %T", expr))
}
}
func (u *unparser) string() string {
return u.buf.String()
}

View File

@ -7,6 +7,7 @@ go_library(
"lexer.go",
"literalfield_set.go",
"parser.go",
"string_util.go",
],
importpath = "github.com/google/go-jsonnet/internal/parser",
visibility = ["//:__subpackages__"],

View File

@ -280,8 +280,7 @@ type lexer struct {
input string // The input string
source *ast.Source
pos position // Current position in input
prev position // Previous position in input
pos position // Current position in input
tokens Tokens // The tokens that we've generated so far
@ -302,7 +301,6 @@ func makeLexer(fn string, input string) *lexer {
input: input,
source: ast.BuildSource(input),
pos: position{byteNo: 0, lineNo: 1, lineStart: 0},
prev: position{byteNo: lexEOF, lineNo: 0, lineStart: 0},
tokenStartLoc: ast.Location{Line: 1, Column: 1},
freshLine: true,
}
@ -311,11 +309,9 @@ func makeLexer(fn string, input string) *lexer {
// next returns the next rune in the input.
func (l *lexer) next() rune {
if int(l.pos.byteNo) >= len(l.input) {
l.prev = l.pos
return lexEOF
}
r, w := utf8.DecodeRuneInString(l.input[l.pos.byteNo:])
l.prev = l.pos
l.pos.byteNo += w
if r == '\n' {
l.pos.lineStart = l.pos.byteNo
@ -337,19 +333,11 @@ func (l *lexer) acceptN(n int) {
// peek returns but does not consume the next rune in the input.
func (l *lexer) peek() rune {
r := l.next()
l.backup()
return r
}
// backup steps back one rune. Can only be called once per call of next.
// It also does not recover the previous value of freshLine.
func (l *lexer) backup() {
if l.prev.byteNo == lexEOF {
panic("backup called with no valid previous rune")
if int(l.pos.byteNo) >= len(l.input) {
return lexEOF
}
l.pos = l.prev
l.prev = position{byteNo: lexEOF}
r, _ := utf8.DecodeRuneInString(l.input[l.pos.byteNo:])
return r
}
func locationFromPosition(pos position) ast.Location {
@ -360,13 +348,6 @@ func (l *lexer) location() ast.Location {
return locationFromPosition(l.pos)
}
func (l *lexer) prevLocation() ast.Location {
if l.prev.byteNo == lexEOF {
panic("prevLocation called with no valid previous rune")
}
return locationFromPosition(l.prev)
}
// Reset the current working token start to the current cursor position. This
// may throw away some characters. This does not throw away any accumulated
// fodder.
@ -397,6 +378,11 @@ func (l *lexer) addFodder(kind ast.FodderKind, blanks int, indent int, comment [
l.fodder = append(l.fodder, elem)
}
func (l *lexer) addFodderSafe(kind ast.FodderKind, blanks int, indent int, comment []string) {
elem := ast.MakeFodderElement(kind, blanks, indent, comment)
ast.FodderAppend(&l.fodder, elem)
}
func (l *lexer) makeStaticErrorPoint(msg string, loc ast.Location) errors.StaticError {
return errors.MakeStaticError(msg, ast.MakeLocationRange(l.fileName, l.source, loc, loc))
}
@ -405,10 +391,11 @@ func (l *lexer) makeStaticErrorPoint(msg string, loc ast.Location) errors.Static
// spaces after last \n. It also converts \t to spaces.
// The parameter 'r' is the rune that begins the whitespace.
func (l *lexer) lexWhitespace() (int, int) {
r := l.next()
r := l.peek()
indent := 0
newLines := 0
for ; isWhitespace(r); r = l.next() {
for ; isWhitespace(r); r = l.peek() {
l.next()
switch r {
case '\r':
// Ignore.
@ -428,7 +415,6 @@ func (l *lexer) lexWhitespace() (int, int) {
indent += 8
}
}
l.backup()
return newLines, indent
}
@ -438,13 +424,13 @@ func (l *lexer) lexUntilNewline() (string, int, int) {
// Compute 'text'.
var buf bytes.Buffer
lastNonSpace := 0
for r := l.next(); r != lexEOF && r != '\n'; r = l.next() {
for r := l.peek(); r != lexEOF && r != '\n'; r = l.peek() {
l.next()
buf.WriteRune(r)
if !isHorizontalWhitespace(r) {
lastNonSpace = buf.Len()
}
}
l.backup()
// Trim whitespace off the end.
buf.Truncate(lastNonSpace)
text := buf.String()
@ -486,7 +472,7 @@ func (l *lexer) lexNumber() error {
outerLoop:
for {
r := l.next()
r := l.peek()
switch state {
case numBegin:
switch {
@ -525,7 +511,7 @@ outerLoop:
default:
return l.makeStaticErrorPoint(
fmt.Sprintf("Couldn't lex number, junk after decimal point: %v", strconv.QuoteRuneToASCII(r)),
l.prevLocation())
l.location())
}
case numAfterDigit:
switch {
@ -545,7 +531,7 @@ outerLoop:
default:
return l.makeStaticErrorPoint(
fmt.Sprintf("Couldn't lex number, junk after 'E': %v", strconv.QuoteRuneToASCII(r)),
l.prevLocation())
l.location())
}
case numAfterExpSign:
if r >= '0' && r <= '9' {
@ -553,7 +539,7 @@ outerLoop:
} else {
return l.makeStaticErrorPoint(
fmt.Sprintf("Couldn't lex number, junk after exponent sign: %v", strconv.QuoteRuneToASCII(r)),
l.prevLocation())
l.location())
}
case numAfterExpDigit:
@ -563,80 +549,106 @@ outerLoop:
break outerLoop
}
}
l.next()
}
l.backup()
l.emitToken(tokenNumber)
return nil
}
// lexIdentifier will consume a identifer and emit a token. It is assumed
// that the next rune to be served by the lexer will be a leading digit. This
// may emit a keyword or an identifier.
// getTokenKindFromID will return a keyword if the identifier string is
// recognised as one, otherwise it will return tokenIdentifier.
func getTokenKindFromID(str string) tokenKind {
switch str {
case "assert":
return tokenAssert
case "else":
return tokenElse
case "error":
return tokenError
case "false":
return tokenFalse
case "for":
return tokenFor
case "function":
return tokenFunction
case "if":
return tokenIf
case "import":
return tokenImport
case "importstr":
return tokenImportStr
case "in":
return tokenIn
case "local":
return tokenLocal
case "null":
return tokenNullLit
case "self":
return tokenSelf
case "super":
return tokenSuper
case "tailstrict":
return tokenTailStrict
case "then":
return tokenThen
case "true":
return tokenTrue
default:
// Not a keyword, assume it is an identifier
return tokenIdentifier
}
}
// IsValidIdentifier is true if the string could be a valid identifier.
func IsValidIdentifier(str string) bool {
if len(str) == 0 {
return false
}
for i, r := range str {
if i == 0 {
if !isIdentifierFirst(r) {
return false
}
} else {
if !isIdentifier(r) {
return false
}
}
}
return getTokenKindFromID(str) == tokenIdentifier
}
// lexIdentifier will consume an identifer and emit a token. It is assumed
// that the next rune to be served by the lexer will not be a leading digit.
// This may emit a keyword or an identifier.
func (l *lexer) lexIdentifier() {
r := l.next()
r := l.peek()
if !isIdentifierFirst(r) {
panic("Unexpected character in lexIdentifier")
}
for ; r != lexEOF; r = l.next() {
for ; r != lexEOF; r = l.peek() {
if !isIdentifier(r) {
break
}
l.next()
}
l.backup()
switch l.input[l.tokenStart:l.pos.byteNo] {
case "assert":
l.emitToken(tokenAssert)
case "else":
l.emitToken(tokenElse)
case "error":
l.emitToken(tokenError)
case "false":
l.emitToken(tokenFalse)
case "for":
l.emitToken(tokenFor)
case "function":
l.emitToken(tokenFunction)
case "if":
l.emitToken(tokenIf)
case "import":
l.emitToken(tokenImport)
case "importstr":
l.emitToken(tokenImportStr)
case "in":
l.emitToken(tokenIn)
case "local":
l.emitToken(tokenLocal)
case "null":
l.emitToken(tokenNullLit)
case "self":
l.emitToken(tokenSelf)
case "super":
l.emitToken(tokenSuper)
case "tailstrict":
l.emitToken(tokenTailStrict)
case "then":
l.emitToken(tokenThen)
case "true":
l.emitToken(tokenTrue)
default:
// Not a keyword, assume it is an identifier
l.emitToken(tokenIdentifier)
}
l.emitToken(getTokenKindFromID(l.input[l.tokenStart:l.pos.byteNo]))
}
// lexSymbol will lex a token that starts with a symbol. This could be a
// C or C++ comment, block quote or an operator. This function assumes that the next
// rune to be served by the lexer will be the first rune of the new token.
func (l *lexer) lexSymbol() error {
// freshLine is reset by next() so cache it here.
freshLine := l.freshLine
r := l.next()
// Single line C++ style comment
if r == '#' || (r == '/' && l.peek() == '/') {
comment, blanks, indent := l.lexUntilNewline()
var k ast.FodderKind
if l.freshLine {
if freshLine {
k = ast.FodderParagraph
} else {
k = ast.FodderLineEnd
@ -647,7 +659,7 @@ func (l *lexer) lexSymbol() error {
// C style comment (could be interstitial or paragraph comment)
if r == '/' && l.peek() == '*' {
margin := l.pos.byteNo - l.pos.lineStart
margin := l.pos.byteNo - l.pos.lineStart - 1
commentStartLoc := l.tokenStartLoc
//nolint:ineffassign,staticcheck
@ -695,7 +707,7 @@ func (l *lexer) lexSymbol() error {
newLinesAfter = 1
indentAfter = 0
}
l.addFodder(ast.FodderParagraph, newLinesAfter-1, indentAfter, lines)
l.addFodderSafe(ast.FodderParagraph, newLinesAfter-1, indentAfter, lines)
}
return nil
}
@ -716,10 +728,10 @@ func (l *lexer) lexSymbol() error {
}
// Process leading blank lines before calculating stringBlockIndent
for r = l.next(); r == '\n'; r = l.next() {
for r = l.peek(); r == '\n'; r = l.peek() {
l.next()
cb.WriteRune(r)
}
l.backup()
numWhiteSpace := checkWhitespace(l.input[l.pos.byteNo:], l.input[l.pos.byteNo:])
stringBlockIndent := l.input[l.pos.byteNo : l.pos.byteNo+numWhiteSpace]
if numWhiteSpace == 0 {
@ -741,20 +753,20 @@ func (l *lexer) lexSymbol() error {
cb.WriteRune('\n')
// Skip any blank lines
for r = l.next(); r == '\n'; r = l.next() {
for r = l.peek(); r == '\n'; r = l.peek() {
l.next()
cb.WriteRune(r)
}
l.backup()
// Look at the next line
numWhiteSpace = checkWhitespace(stringBlockIndent, l.input[l.pos.byteNo:])
if numWhiteSpace == 0 {
// End of the text block
var stringBlockTermIndent string
for r = l.next(); r == ' ' || r == '\t'; r = l.next() {
for r = l.peek(); r == ' ' || r == '\t'; r = l.peek() {
l.next()
stringBlockTermIndent += string(r)
}
l.backup()
if !strings.HasPrefix(l.input[l.pos.byteNo:], "|||") {
return l.makeStaticErrorPoint("Text block not terminated with |||", commentStartLoc)
}
@ -768,7 +780,7 @@ func (l *lexer) lexSymbol() error {
}
// Assume any string of symbols is a single operator.
for r = l.next(); isSymbol(r); r = l.next() {
for r = l.peek(); isSymbol(r); r = l.peek() {
// Not allowed // in operators
if r == '/' && strings.HasPrefix(l.input[l.pos.byteNo:], "/") {
break
@ -781,10 +793,9 @@ func (l *lexer) lexSymbol() error {
if r == '|' && strings.HasPrefix(l.input[l.pos.byteNo:], "||") {
break
}
l.next()
}
l.backup()
// Operators are not allowed to end with + - ~ ! unless they are one rune long.
// So, wind it back if we need to, but stop at the first rune.
// This relies on the hack that all operator symbols are ASCII and thus there is
@ -824,29 +835,37 @@ func Lex(fn string, input string) (Tokens, error) {
l.addFodder(ast.FodderLineEnd, blanks, indent, []string{})
}
l.resetTokenStart() // Don't include whitespace in actual token.
r := l.next()
r := l.peek()
switch r {
case '{':
l.next()
l.emitToken(tokenBraceL)
case '}':
l.next()
l.emitToken(tokenBraceR)
case '[':
l.next()
l.emitToken(tokenBracketL)
case ']':
l.next()
l.emitToken(tokenBracketR)
case ',':
l.next()
l.emitToken(tokenComma)
case '.':
l.next()
l.emitToken(tokenDot)
case '(':
l.next()
l.emitToken(tokenParenL)
case ')':
l.next()
l.emitToken(tokenParenR)
case ';':
l.next()
l.emitToken(tokenSemicolon)
case '0', '1', '2', '3', '4', '5', '6', '7', '8', '9':
l.backup()
err = l.lexNumber()
if err != nil {
return nil, err
@ -855,7 +874,8 @@ func Lex(fn string, input string) (Tokens, error) {
// String literals
case '"':
stringStartLoc := l.prevLocation()
stringStartLoc := l.location()
l.next()
for r = l.next(); ; r = l.next() {
if r == lexEOF {
return nil, l.makeStaticErrorPoint("Unterminated String", stringStartLoc)
@ -872,7 +892,8 @@ func Lex(fn string, input string) (Tokens, error) {
}
}
case '\'':
stringStartLoc := l.prevLocation()
stringStartLoc := l.location()
l.next()
for r = l.next(); ; r = l.next() {
if r == lexEOF {
return nil, l.makeStaticErrorPoint("Unterminated String", stringStartLoc)
@ -889,6 +910,8 @@ func Lex(fn string, input string) (Tokens, error) {
}
}
case '@':
stringStartLoc := l.location()
l.next()
// Verbatim string literals.
// ' and " quoting is interpreted here, unlike non-verbatim strings
// where it is done later by jsonnet_string_unescape. This is OK
@ -896,7 +919,6 @@ func Lex(fn string, input string) (Tokens, error) {
// repeated quote into a single quote, so we can go back to the
// original form in the formatter.
var data []rune
stringStartLoc := l.prevLocation()
quot := l.next()
var kind tokenKind
if quot == '"' {
@ -928,10 +950,8 @@ func Lex(fn string, input string) (Tokens, error) {
default:
if isIdentifierFirst(r) {
l.backup()
l.lexIdentifier()
} else if isSymbol(r) || r == '#' {
l.backup()
err = l.lexSymbol()
if err != nil {
return nil, err
@ -939,7 +959,7 @@ func Lex(fn string, input string) (Tokens, error) {
} else {
return nil, l.makeStaticErrorPoint(
fmt.Sprintf("Could not lex the character %s", strconv.QuoteRuneToASCII(r)),
l.prevLocation())
l.location())
}
}

View File

@ -487,13 +487,13 @@ func TestIdentifiers(t *testing.T) {
func TestCppComment(t *testing.T) {
SingleTest(t, "// hi", "", Tokens{
{kind: tokenEndOfFile, fodder: ast.Fodder{{Kind: ast.FodderLineEnd, Comment: []string{"// hi"}}}},
{kind: tokenEndOfFile, fodder: ast.Fodder{{Kind: ast.FodderParagraph, Comment: []string{"// hi"}}}},
})
}
func TestHashComment(t *testing.T) {
SingleTest(t, "# hi", "", Tokens{
{kind: tokenEndOfFile, fodder: ast.Fodder{{Kind: ast.FodderLineEnd, Comment: []string{"# hi"}}}},
{kind: tokenEndOfFile, fodder: ast.Fodder{{Kind: ast.FodderParagraph, Comment: []string{"# hi"}}}},
})
}
@ -526,7 +526,9 @@ func TestCCommentSpaceSlash(t *testing.T) {
func TestCCommentManyLines(t *testing.T) {
SingleTest(t, "/*\n\n*/", "", Tokens{
{kind: tokenEndOfFile, fodder: ast.Fodder{{Kind: ast.FodderParagraph, Comment: []string{"/*", "", "*/"}}}},
{kind: tokenEndOfFile, fodder: ast.Fodder{
{Kind: ast.FodderLineEnd},
{Kind: ast.FodderParagraph, Comment: []string{"/*", "", "*/"}}}},
})
}

View File

@ -834,10 +834,11 @@ func tokenStringToAst(tok *token) *ast.LiteralString {
}
case tokenStringBlock:
return &ast.LiteralString{
NodeBase: ast.NewNodeBaseLoc(tok.loc, tok.fodder),
Value: tok.data,
Kind: ast.StringBlock,
BlockIndent: tok.stringBlockIndent,
NodeBase: ast.NewNodeBaseLoc(tok.loc, tok.fodder),
Value: tok.data,
Kind: ast.StringBlock,
BlockIndent: tok.stringBlockIndent,
BlockTermIndent: tok.stringBlockTermIndent,
}
case tokenVerbatimStringDouble:
return &ast.LiteralString{
@ -1286,10 +1287,11 @@ func (p *parser) parse(prec precedence) (ast.Node, errors.StaticError) {
}
id := ast.Identifier(fieldID.data)
lhs = &ast.Index{
NodeBase: ast.NewNodeBaseLoc(locFromTokens(begin, fieldID), ast.Fodder{}),
Target: lhs,
LeftBracketFodder: op.fodder,
Id: &id,
NodeBase: ast.NewNodeBaseLoc(locFromTokens(begin, fieldID), ast.Fodder{}),
Target: lhs,
LeftBracketFodder: op.fodder,
Id: &id,
RightBracketFodder: fieldID.fodder,
}
case tokenParenL:
end, args, gotComma, err := p.parseArguments("function argument")
@ -1352,28 +1354,31 @@ func (p *parser) parse(prec precedence) (ast.Node, errors.StaticError) {
// ---------------------------------------------------------------------------
// Parse parses a slice of tokens into a parse tree.
func Parse(t Tokens) (ast.Node, errors.StaticError) {
// Parse parses a slice of tokens into a parse tree. Any fodder after the final token is
// returned as well.
func Parse(t Tokens) (ast.Node, ast.Fodder, errors.StaticError) {
p := makeParser(t)
expr, err := p.parse(maxPrecedence)
if err != nil {
return nil, err
return nil, nil, err
}
eof := p.peek()
if p.peek().kind != tokenEndOfFile {
return nil, errors.MakeStaticError(fmt.Sprintf("Did not expect: %v", p.peek()), p.peek().loc)
if eof.kind != tokenEndOfFile {
return nil, nil, errors.MakeStaticError(fmt.Sprintf("Did not expect: %v", eof), eof.loc)
}
addContext(expr, &topLevelContext, anonymous)
return expr, nil
return expr, eof.fodder, nil
}
// SnippetToRawAST converts a Jsonnet code snippet to an AST (without any transformations).
func SnippetToRawAST(filename string, snippet string) (ast.Node, error) {
// Any fodder after the final token is returned as well.
func SnippetToRawAST(filename string, snippet string) (ast.Node, ast.Fodder, error) {
tokens, err := Lex(filename, snippet)
if err != nil {
return nil, err
return nil, nil, err
}
return Parse(tokens)
}

View File

@ -131,7 +131,7 @@ func TestParser(t *testing.T) {
t.Errorf("Unexpected lex error\n input: %v\n error: %v", s, err)
return
}
_, err = Parse(tokens)
_, _, err = Parse(tokens)
if err != nil {
t.Errorf("Unexpected parse error\n input: %v\n error: %v", s, err)
}
@ -255,7 +255,7 @@ func TestParserErrors(t *testing.T) {
t.Errorf("Unexpected lex error\n input: %v\n error: %v", s.input, err)
return
}
_, err = Parse(tokens)
_, _, err = Parse(tokens)
if err == nil {
t.Errorf("Expected parse error but got success\n input: %v", s.input)
return

View File

@ -0,0 +1,134 @@
/*
Copyright 2016 Google Inc. All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package parser
import (
"bytes"
"encoding/hex"
"fmt"
"unicode/utf8"
"github.com/google/go-jsonnet/ast"
"github.com/google/go-jsonnet/internal/errors"
)
// StringUnescape compiles out the escape codes in the string
func StringUnescape(loc *ast.LocationRange, s string) (string, error) {
var buf bytes.Buffer
// read one rune at a time
for i := 0; i < len(s); {
r, w := utf8.DecodeRuneInString(s[i:])
i += w
switch r {
case '\\':
if i >= len(s) {
return "", errors.MakeStaticError("Truncated escape sequence in string literal.", *loc)
}
r2, w := utf8.DecodeRuneInString(s[i:])
i += w
switch r2 {
case '"':
buf.WriteRune('"')
case '\'':
buf.WriteRune('\'')
case '\\':
buf.WriteRune('\\')
case '/':
buf.WriteRune('/') // See json.org, \/ is a valid escape.
case 'b':
buf.WriteRune('\b')
case 'f':
buf.WriteRune('\f')
case 'n':
buf.WriteRune('\n')
case 'r':
buf.WriteRune('\r')
case 't':
buf.WriteRune('\t')
case 'u':
if i+4 > len(s) {
return "", errors.MakeStaticError("Truncated unicode escape sequence in string literal.", *loc)
}
codeBytes, err := hex.DecodeString(s[i : i+4])
if err != nil {
return "", errors.MakeStaticError(fmt.Sprintf("Unicode escape sequence was malformed: %s", s[0:4]), *loc)
}
code := int(codeBytes[0])*256 + int(codeBytes[1])
buf.WriteRune(rune(code))
i += 4
default:
return "", errors.MakeStaticError(fmt.Sprintf("Unknown escape sequence in string literal: \\%c", r2), *loc)
}
default:
buf.WriteRune(r)
}
}
return buf.String(), nil
}
// StringEscape does the opposite of StringUnescape
func StringEscape(s string, single bool) string {
var buf bytes.Buffer
// read one rune at a time
for i := 0; i < len(s); {
r, w := utf8.DecodeRuneInString(s[i:])
i += w
switch r {
case '"':
if !single {
buf.WriteRune('\\')
}
buf.WriteRune(r)
case '\'':
if single {
buf.WriteRune('\\')
}
buf.WriteRune(r)
case '\\':
buf.WriteRune('\\')
buf.WriteRune(r)
case '\b':
buf.WriteRune('\\')
buf.WriteRune('b')
case '\f':
buf.WriteRune('\\')
buf.WriteRune('f')
case '\n':
buf.WriteRune('\\')
buf.WriteRune('n')
case '\r':
buf.WriteRune('\\')
buf.WriteRune('r')
case '\t':
buf.WriteRune('\\')
buf.WriteRune('t')
case '\u0000':
buf.WriteString("\\u0000")
default:
if r < 0x20 || (r >= 0x7f && r <= 0x9f) {
buf.WriteRune('\\')
buf.WriteRune('u')
buf.Write([]byte(fmt.Sprintf("%04x", int(r))))
} else {
buf.WriteRune(r)
}
}
}
return buf.String()
}

11
internal/pass/BUILD.bazel Normal file
View File

@ -0,0 +1,11 @@
load("@io_bazel_rules_go//go:def.bzl", "go_library")
go_library(
name = "go_default_library",
srcs = ["pass.go"],
importpath = "github.com/google/go-jsonnet/internal/pass",
visibility = ["//:__subpackages__"],
deps = [
"//ast:go_default_library",
],
)

463
internal/pass/pass.go Normal file
View File

@ -0,0 +1,463 @@
/*
Copyright 2019 Google Inc. All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package pass
import (
"github.com/google/go-jsonnet/ast"
)
// Context can be used to provide context when visting child expressions.
type Context interface{}
// ASTPass is an interface for a pass that transforms the AST in some way.
type ASTPass interface {
FodderElement(ASTPass, *ast.FodderElement, Context)
Fodder(ASTPass, *ast.Fodder, Context)
ForSpec(ASTPass, *ast.ForSpec, Context)
Parameters(ASTPass, *ast.Fodder, *[]ast.Parameter, *ast.Fodder, Context)
Arguments(ASTPass, *ast.Fodder, *ast.Arguments, *ast.Fodder, Context)
FieldParams(ASTPass, *ast.ObjectField, Context)
ObjectField(ASTPass, *ast.ObjectField, Context)
ObjectFields(ASTPass, *ast.ObjectFields, Context)
Apply(ASTPass, *ast.Apply, Context)
ApplyBrace(ASTPass, *ast.ApplyBrace, Context)
Array(ASTPass, *ast.Array, Context)
ArrayComp(ASTPass, *ast.ArrayComp, Context)
Assert(ASTPass, *ast.Assert, Context)
Binary(ASTPass, *ast.Binary, Context)
Conditional(ASTPass, *ast.Conditional, Context)
Dollar(ASTPass, *ast.Dollar, Context)
Error(ASTPass, *ast.Error, Context)
Function(ASTPass, *ast.Function, Context)
Import(ASTPass, *ast.Import, Context)
ImportStr(ASTPass, *ast.ImportStr, Context)
Index(ASTPass, *ast.Index, Context)
Slice(ASTPass, *ast.Slice, Context)
Local(ASTPass, *ast.Local, Context)
LiteralBoolean(ASTPass, *ast.LiteralBoolean, Context)
LiteralNull(ASTPass, *ast.LiteralNull, Context)
LiteralNumber(ASTPass, *ast.LiteralNumber, Context)
LiteralString(ASTPass, *ast.LiteralString, Context)
Object(ASTPass, *ast.Object, Context)
ObjectComp(ASTPass, *ast.ObjectComp, Context)
Parens(ASTPass, *ast.Parens, Context)
Self(ASTPass, *ast.Self, Context)
SuperIndex(ASTPass, *ast.SuperIndex, Context)
InSuper(ASTPass, *ast.InSuper, Context)
Unary(ASTPass, *ast.Unary, Context)
Var(ASTPass, *ast.Var, Context)
Visit(ASTPass, *ast.Node, Context)
BaseContext(ASTPass) Context
File(ASTPass, *ast.Node, *ast.Fodder)
}
// Base implements basic traversal so other passes can extend it.
type Base struct {
}
// FodderElement cannot descend any further
func (*Base) FodderElement(p ASTPass, element *ast.FodderElement, ctx Context) {
}
// Fodder traverses fodder
func (*Base) Fodder(p ASTPass, fodder *ast.Fodder, ctx Context) {
for i := range *fodder {
p.FodderElement(p, &(*fodder)[i], ctx)
}
}
// ForSpec traverses a ForSpec
func (*Base) ForSpec(p ASTPass, forSpec *ast.ForSpec, ctx Context) {
if forSpec.Outer != nil {
p.ForSpec(p, forSpec.Outer, ctx)
}
p.Fodder(p, &forSpec.ForFodder, ctx)
p.Fodder(p, &forSpec.VarFodder, ctx)
p.Fodder(p, &forSpec.InFodder, ctx)
p.Visit(p, &forSpec.Expr, ctx)
for i := range forSpec.Conditions {
cond := &forSpec.Conditions[i]
p.Fodder(p, &cond.IfFodder, ctx)
p.Visit(p, &cond.Expr, ctx)
}
}
// Parameters traverses the list of parameters
func (*Base) Parameters(p ASTPass, l *ast.Fodder, params *[]ast.Parameter, r *ast.Fodder, ctx Context) {
p.Fodder(p, l, ctx)
for i := range *params {
param := &(*params)[i]
p.Fodder(p, &param.NameFodder, ctx)
if param.DefaultArg != nil {
p.Fodder(p, &param.EqFodder, ctx)
p.Visit(p, &param.DefaultArg, ctx)
}
p.Fodder(p, &param.CommaFodder, ctx)
}
p.Fodder(p, r, ctx)
}
// Arguments traverses the list of arguments
func (*Base) Arguments(p ASTPass, l *ast.Fodder, args *ast.Arguments, r *ast.Fodder, ctx Context) {
p.Fodder(p, l, ctx)
for i := range args.Positional {
arg := &args.Positional[i]
p.Visit(p, &arg.Expr, ctx)
p.Fodder(p, &arg.CommaFodder, ctx)
}
for i := range args.Named {
arg := &args.Named[i]
p.Fodder(p, &arg.NameFodder, ctx)
p.Fodder(p, &arg.EqFodder, ctx)
p.Visit(p, &arg.Arg, ctx)
p.Fodder(p, &arg.CommaFodder, ctx)
}
p.Fodder(p, r, ctx)
}
// FieldParams is factored out of ObjectField
func (*Base) FieldParams(p ASTPass, field *ast.ObjectField, ctx Context) {
if field.Method != nil {
p.Parameters(
p,
&field.Method.ParenLeftFodder,
&field.Method.Parameters,
&field.Method.ParenRightFodder,
ctx)
}
}
// ObjectField traverses a single field
func (*Base) ObjectField(p ASTPass, field *ast.ObjectField, ctx Context) {
switch field.Kind {
case ast.ObjectLocal:
p.Fodder(p, &field.Fodder1, ctx)
p.Fodder(p, &field.Fodder2, ctx)
p.FieldParams(p, field, ctx)
p.Fodder(p, &field.OpFodder, ctx)
p.Visit(p, &field.Expr2, ctx)
case ast.ObjectFieldID:
p.Fodder(p, &field.Fodder1, ctx)
p.FieldParams(p, field, ctx)
p.Fodder(p, &field.OpFodder, ctx)
p.Visit(p, &field.Expr2, ctx)
case ast.ObjectFieldStr:
p.Visit(p, &field.Expr1, ctx)
p.FieldParams(p, field, ctx)
p.Fodder(p, &field.OpFodder, ctx)
p.Visit(p, &field.Expr2, ctx)
case ast.ObjectFieldExpr:
p.Fodder(p, &field.Fodder1, ctx)
p.Visit(p, &field.Expr1, ctx)
p.Fodder(p, &field.Fodder2, ctx)
p.FieldParams(p, field, ctx)
p.Fodder(p, &field.OpFodder, ctx)
p.Visit(p, &field.Expr2, ctx)
case ast.ObjectAssert:
p.Fodder(p, &field.Fodder1, ctx)
p.Visit(p, &field.Expr2, ctx)
if field.Expr3 != nil {
p.Fodder(p, &field.OpFodder, ctx)
p.Visit(p, &field.Expr3, ctx)
}
}
p.Fodder(p, &field.CommaFodder, ctx)
}
// ObjectFields traverses object fields
func (*Base) ObjectFields(p ASTPass, fields *ast.ObjectFields, ctx Context) {
for i := range *fields {
p.ObjectField(p, &(*fields)[i], ctx)
}
}
// Apply traverses that kind of node
func (*Base) Apply(p ASTPass, node *ast.Apply, ctx Context) {
p.Visit(p, &node.Target, ctx)
p.Arguments(p, &node.FodderLeft, &node.Arguments, &node.FodderRight, ctx)
if node.TailStrict {
p.Fodder(p, &node.TailStrictFodder, ctx)
}
}
// ApplyBrace traverses that kind of node
func (*Base) ApplyBrace(p ASTPass, node *ast.ApplyBrace, ctx Context) {
p.Visit(p, &node.Left, ctx)
p.Visit(p, &node.Right, ctx)
}
// Array traverses that kind of node
func (*Base) Array(p ASTPass, node *ast.Array, ctx Context) {
for i := range node.Elements {
p.Visit(p, &node.Elements[i].Expr, ctx)
p.Fodder(p, &node.Elements[i].CommaFodder, ctx)
}
p.Fodder(p, &node.CloseFodder, ctx)
}
// ArrayComp traverses that kind of node
func (*Base) ArrayComp(p ASTPass, node *ast.ArrayComp, ctx Context) {
p.Visit(p, &node.Body, ctx)
p.Fodder(p, &node.TrailingCommaFodder, ctx)
p.ForSpec(p, &node.Spec, ctx)
p.Fodder(p, &node.CloseFodder, ctx)
}
// Assert traverses that kind of node
func (*Base) Assert(p ASTPass, node *ast.Assert, ctx Context) {
p.Visit(p, &node.Cond, ctx)
if node.Message != nil {
p.Fodder(p, &node.ColonFodder, ctx)
p.Visit(p, &node.Message, ctx)
}
p.Fodder(p, &node.SemicolonFodder, ctx)
p.Visit(p, &node.Rest, ctx)
}
// Binary traverses that kind of node
func (*Base) Binary(p ASTPass, node *ast.Binary, ctx Context) {
p.Visit(p, &node.Left, ctx)
p.Fodder(p, &node.OpFodder, ctx)
p.Visit(p, &node.Right, ctx)
}
// Conditional traverses that kind of node
func (*Base) Conditional(p ASTPass, node *ast.Conditional, ctx Context) {
p.Visit(p, &node.Cond, ctx)
p.Fodder(p, &node.ThenFodder, ctx)
p.Visit(p, &node.BranchTrue, ctx)
if node.BranchFalse != nil {
p.Fodder(p, &node.ElseFodder, ctx)
p.Visit(p, &node.BranchFalse, ctx)
}
}
// Dollar cannot descend any further
func (*Base) Dollar(p ASTPass, node *ast.Dollar, ctx Context) {
}
// Error traverses that kind of node
func (*Base) Error(p ASTPass, node *ast.Error, ctx Context) {
p.Visit(p, &node.Expr, ctx)
}
// Function traverses that kind of node
func (*Base) Function(p ASTPass, node *ast.Function, ctx Context) {
p.Parameters(p, &node.ParenLeftFodder, &node.Parameters, &node.ParenRightFodder, ctx)
p.Visit(p, &node.Body, ctx)
}
// Import traverses that kind of node
func (*Base) Import(p ASTPass, node *ast.Import, ctx Context) {
p.Fodder(p, &node.File.Fodder, ctx)
p.LiteralString(p, node.File, ctx)
}
// ImportStr traverses that kind of node
func (*Base) ImportStr(p ASTPass, node *ast.ImportStr, ctx Context) {
p.Fodder(p, &node.File.Fodder, ctx)
p.LiteralString(p, node.File, ctx)
}
// Index traverses that kind of node
func (*Base) Index(p ASTPass, node *ast.Index, ctx Context) {
p.Visit(p, &node.Target, ctx)
p.Fodder(p, &node.LeftBracketFodder, ctx)
if node.Id == nil {
p.Visit(p, &node.Index, ctx)
p.Fodder(p, &node.RightBracketFodder, ctx)
}
}
// InSuper traverses that kind of node
func (*Base) InSuper(p ASTPass, node *ast.InSuper, ctx Context) {
p.Visit(p, &node.Index, ctx)
}
// LiteralBoolean cannot descend any further
func (*Base) LiteralBoolean(p ASTPass, node *ast.LiteralBoolean, ctx Context) {
}
// LiteralNull cannot descend any further
func (*Base) LiteralNull(p ASTPass, node *ast.LiteralNull, ctx Context) {
}
// LiteralNumber cannot descend any further
func (*Base) LiteralNumber(p ASTPass, node *ast.LiteralNumber, ctx Context) {
}
// LiteralString cannot descend any further
func (*Base) LiteralString(p ASTPass, node *ast.LiteralString, ctx Context) {
}
// Local traverses that kind of node
func (*Base) Local(p ASTPass, node *ast.Local, ctx Context) {
for i := range node.Binds {
bind := &node.Binds[i]
p.Fodder(p, &bind.VarFodder, ctx)
if bind.Fun != nil {
p.Parameters(p, &bind.Fun.ParenLeftFodder, &bind.Fun.Parameters, &bind.Fun.ParenRightFodder, ctx)
}
p.Fodder(p, &bind.EqFodder, ctx)
p.Visit(p, &bind.Body, ctx)
p.Fodder(p, &bind.CloseFodder, ctx)
}
p.Visit(p, &node.Body, ctx)
}
// Object traverses that kind of node
func (*Base) Object(p ASTPass, node *ast.Object, ctx Context) {
p.ObjectFields(p, &node.Fields, ctx)
p.Fodder(p, &node.CloseFodder, ctx)
}
// ObjectComp traverses that kind of node
func (*Base) ObjectComp(p ASTPass, node *ast.ObjectComp, ctx Context) {
p.ObjectFields(p, &node.Fields, ctx)
p.ForSpec(p, &node.Spec, ctx)
p.Fodder(p, &node.CloseFodder, ctx)
}
// Parens traverses that kind of node
func (*Base) Parens(p ASTPass, node *ast.Parens, ctx Context) {
p.Visit(p, &node.Inner, ctx)
p.Fodder(p, &node.CloseFodder, ctx)
}
// Self cannot descend any further
func (*Base) Self(p ASTPass, node *ast.Self, ctx Context) {
}
// Slice traverses that kind of node
func (*Base) Slice(p ASTPass, node *ast.Slice, ctx Context) {
p.Visit(p, &node.Target, ctx)
p.Fodder(p, &node.LeftBracketFodder, ctx)
if node.BeginIndex != nil {
p.Visit(p, &node.BeginIndex, ctx)
}
p.Fodder(p, &node.EndColonFodder, ctx)
if node.EndIndex != nil {
p.Visit(p, &node.EndIndex, ctx)
}
p.Fodder(p, &node.StepColonFodder, ctx)
if node.Step != nil {
p.Visit(p, &node.Step, ctx)
}
p.Fodder(p, &node.RightBracketFodder, ctx)
}
// SuperIndex traverses that kind of node
func (*Base) SuperIndex(p ASTPass, node *ast.SuperIndex, ctx Context) {
p.Fodder(p, &node.DotFodder, ctx)
if node.Id == nil {
p.Visit(p, &node.Index, ctx)
}
p.Fodder(p, &node.IDFodder, ctx)
}
// Unary traverses that kind of node
func (*Base) Unary(p ASTPass, node *ast.Unary, ctx Context) {
p.Visit(p, &node.Expr, ctx)
}
// Var cannot descend any further
func (*Base) Var(p ASTPass, node *ast.Var, ctx Context) {
}
// Visit traverses into an arbitrary node type
func (*Base) Visit(p ASTPass, node *ast.Node, ctx Context) {
f := *(*node).OpenFodder()
p.Fodder(p, &f, ctx)
*(*node).OpenFodder() = f
switch node := (*node).(type) {
case *ast.Apply:
p.Apply(p, node, ctx)
case *ast.ApplyBrace:
p.ApplyBrace(p, node, ctx)
case *ast.Array:
p.Array(p, node, ctx)
case *ast.ArrayComp:
p.ArrayComp(p, node, ctx)
case *ast.Assert:
p.Assert(p, node, ctx)
case *ast.Binary:
p.Binary(p, node, ctx)
case *ast.Conditional:
p.Conditional(p, node, ctx)
case *ast.Dollar:
p.Dollar(p, node, ctx)
case *ast.Error:
p.Error(p, node, ctx)
case *ast.Function:
p.Function(p, node, ctx)
case *ast.Import:
p.Import(p, node, ctx)
case *ast.ImportStr:
p.ImportStr(p, node, ctx)
case *ast.Index:
p.Index(p, node, ctx)
case *ast.InSuper:
p.InSuper(p, node, ctx)
case *ast.LiteralBoolean:
p.LiteralBoolean(p, node, ctx)
case *ast.LiteralNull:
p.LiteralNull(p, node, ctx)
case *ast.LiteralNumber:
p.LiteralNumber(p, node, ctx)
case *ast.LiteralString:
p.LiteralString(p, node, ctx)
case *ast.Local:
p.Local(p, node, ctx)
case *ast.Object:
p.Object(p, node, ctx)
case *ast.ObjectComp:
p.ObjectComp(p, node, ctx)
case *ast.Parens:
p.Parens(p, node, ctx)
case *ast.Self:
p.Self(p, node, ctx)
case *ast.Slice:
p.Slice(p, node, ctx)
case *ast.SuperIndex:
p.SuperIndex(p, node, ctx)
case *ast.Unary:
p.Unary(p, node, ctx)
case *ast.Var:
p.Var(p, node, ctx)
}
}
// BaseContext just returns nil.
func (*Base) BaseContext(ASTPass) Context {
return nil
}
// File processes a whole Jsonnet file
func (*Base) File(p ASTPass, node *ast.Node, finalFodder *ast.Fodder) {
ctx := p.BaseContext(p)
p.Visit(p, node, ctx)
p.Fodder(p, finalFodder, ctx)
}

View File

@ -17,14 +17,12 @@ limitations under the License.
package program
import (
"bytes"
"encoding/hex"
"fmt"
"reflect"
"unicode/utf8"
"github.com/google/go-jsonnet/ast"
"github.com/google/go-jsonnet/internal/errors"
"github.com/google/go-jsonnet/internal/parser"
)
var desugaredBop = map[ast.BinaryOp]ast.Identifier{
@ -41,60 +39,6 @@ func makeStr(s string) *ast.LiteralString {
}
}
func stringUnescape(loc *ast.LocationRange, s string) (string, error) {
var buf bytes.Buffer
// read one rune at a time
for i := 0; i < len(s); {
r, w := utf8.DecodeRuneInString(s[i:])
i += w
switch r {
case '\\':
if i >= len(s) {
return "", errors.MakeStaticError("Truncated escape sequence in string literal.", *loc)
}
r2, w := utf8.DecodeRuneInString(s[i:])
i += w
switch r2 {
case '"':
buf.WriteRune('"')
case '\'':
buf.WriteRune('\'')
case '\\':
buf.WriteRune('\\')
case '/':
buf.WriteRune('/') // See json.org, \/ is a valid escape.
case 'b':
buf.WriteRune('\b')
case 'f':
buf.WriteRune('\f')
case 'n':
buf.WriteRune('\n')
case 'r':
buf.WriteRune('\r')
case 't':
buf.WriteRune('\t')
case 'u':
if i+4 > len(s) {
return "", errors.MakeStaticError("Truncated unicode escape sequence in string literal.", *loc)
}
codeBytes, err := hex.DecodeString(s[i : i+4])
if err != nil {
return "", errors.MakeStaticError(fmt.Sprintf("Unicode escape sequence was malformed: %s", s[0:4]), *loc)
}
code := int(codeBytes[0])*256 + int(codeBytes[1])
buf.WriteRune(rune(code))
i += 4
default:
return "", errors.MakeStaticError(fmt.Sprintf("Unknown escape sequence in string literal: \\%c", r2), *loc)
}
default:
buf.WriteRune(r)
}
}
return buf.String(), nil
}
func desugarFields(nodeBase ast.NodeBase, fields *ast.ObjectFields, objLevel int) (*ast.DesugaredObject, error) {
for i := range *fields {
field := &((*fields)[i])
@ -518,7 +462,7 @@ func desugar(astPtr *ast.Node, objLevel int) (err error) {
case *ast.LiteralString:
if node.Kind.FullyEscaped() {
unescaped, err := stringUnescape(node.Loc(), node.Value)
unescaped, err := parser.StringUnescape(node.Loc(), node.Value)
if err != nil {
return err
}

View File

@ -7,7 +7,7 @@ import (
// SnippetToAST converts a Jsonnet code snippet to a desugared and analyzed AST.
func SnippetToAST(filename string, snippet string) (ast.Node, error) {
node, err := parser.SnippetToRawAST(filename, snippet)
node, _, err := parser.SnippetToRawAST(filename, snippet)
if err != nil {
return nil, err
}

View File

@ -137,7 +137,7 @@ func runInternalJsonnet(i jsonnetInput) jsonnetResult {
vm.NativeFunction(jsonToString)
vm.NativeFunction(nativeError)
rawAST, staticErr := parser.SnippetToRawAST(i.name, string(i.input))
rawAST, _, staticErr := parser.SnippetToRawAST(i.name, string(i.input))
if staticErr != nil {
return jsonnetResult{
output: errFormatter.Format(staticErr) + "\n",

View File

@ -21,10 +21,11 @@ fi
export IMPLEMENTATION=golang
go build ./cmd/jsonnet
go build ./cmd/jsonnetfmt
export DISABLE_LIB_TESTS=true
export DISABLE_FMT_TESTS=true
export DISABLE_ERROR_TESTS=true
export JSONNETFMT_BIN="$PWD/jsonnetfmt"
export JSONNET_BIN="$PWD/jsonnet"
git submodule update --recursive cpp-jsonnet