summaryrefslogtreecommitdiffstats
path: root/vendor/github.com/pelletier
diff options
context:
space:
mode:
authorChristopher Speller <crspeller@gmail.com>2018-04-16 05:37:14 -0700
committerJoram Wilander <jwawilander@gmail.com>2018-04-16 08:37:14 -0400
commit6e2cb00008cbf09e556b00f87603797fcaa47e09 (patch)
tree3c0eb55ff4226a3f024aad373140d1fb860a6404 /vendor/github.com/pelletier
parentbf24f51c4e1cc6286885460672f7f449e8c6f5ef (diff)
downloadchat-6e2cb00008cbf09e556b00f87603797fcaa47e09.tar.gz
chat-6e2cb00008cbf09e556b00f87603797fcaa47e09.tar.bz2
chat-6e2cb00008cbf09e556b00f87603797fcaa47e09.zip
Depenancy upgrades and movign to dep. (#8630)
Diffstat (limited to 'vendor/github.com/pelletier')
-rw-r--r--vendor/github.com/pelletier/go-toml/benchmark_test.go192
-rw-r--r--vendor/github.com/pelletier/go-toml/cmd/test_program.go91
-rw-r--r--vendor/github.com/pelletier/go-toml/cmd/tomljson/main.go72
-rw-r--r--vendor/github.com/pelletier/go-toml/cmd/tomljson/main_test.go82
-rw-r--r--vendor/github.com/pelletier/go-toml/cmd/tomll/main.go66
-rw-r--r--vendor/github.com/pelletier/go-toml/doc_test.go105
-rw-r--r--vendor/github.com/pelletier/go-toml/keysparsing_test.go63
-rw-r--r--vendor/github.com/pelletier/go-toml/lexer_test.go750
-rw-r--r--vendor/github.com/pelletier/go-toml/marshal_test.go806
-rw-r--r--vendor/github.com/pelletier/go-toml/parser_test.go899
-rw-r--r--vendor/github.com/pelletier/go-toml/position_test.go29
-rw-r--r--vendor/github.com/pelletier/go-toml/query/doc.go175
-rw-r--r--vendor/github.com/pelletier/go-toml/query/lexer.go357
-rw-r--r--vendor/github.com/pelletier/go-toml/query/lexer_test.go179
-rw-r--r--vendor/github.com/pelletier/go-toml/query/match.go232
-rw-r--r--vendor/github.com/pelletier/go-toml/query/match_test.go202
-rw-r--r--vendor/github.com/pelletier/go-toml/query/parser.go275
-rw-r--r--vendor/github.com/pelletier/go-toml/query/parser_test.go482
-rw-r--r--vendor/github.com/pelletier/go-toml/query/query.go158
-rw-r--r--vendor/github.com/pelletier/go-toml/query/query_test.go157
-rw-r--r--vendor/github.com/pelletier/go-toml/query/tokens.go106
-rw-r--r--vendor/github.com/pelletier/go-toml/token_test.go67
-rw-r--r--vendor/github.com/pelletier/go-toml/toml_test.go106
-rw-r--r--vendor/github.com/pelletier/go-toml/tomltree_create_test.go126
-rw-r--r--vendor/github.com/pelletier/go-toml/tomltree_write_test.go376
25 files changed, 0 insertions, 6153 deletions
diff --git a/vendor/github.com/pelletier/go-toml/benchmark_test.go b/vendor/github.com/pelletier/go-toml/benchmark_test.go
deleted file mode 100644
index e1f209dfa..000000000
--- a/vendor/github.com/pelletier/go-toml/benchmark_test.go
+++ /dev/null
@@ -1,192 +0,0 @@
-package toml
-
-import (
- "bytes"
- "encoding/json"
- "io/ioutil"
- "testing"
- "time"
-
- burntsushi "github.com/BurntSushi/toml"
- yaml "gopkg.in/yaml.v2"
-)
-
-type benchmarkDoc struct {
- Table struct {
- Key string
- Subtable struct {
- Key string
- }
- Inline struct {
- Name struct {
- First string
- Last string
- }
- Point struct {
- X int64
- U int64
- }
- }
- }
- String struct {
- Basic struct {
- Basic string
- }
- Multiline struct {
- Key1 string
- Key2 string
- Key3 string
- Continued struct {
- Key1 string
- Key2 string
- Key3 string
- }
- }
- Literal struct {
- Winpath string
- Winpath2 string
- Quoted string
- Regex string
- Multiline struct {
- Regex2 string
- Lines string
- }
- }
- }
- Integer struct {
- Key1 int64
- Key2 int64
- Key3 int64
- Key4 int64
- Underscores struct {
- Key1 int64
- Key2 int64
- Key3 int64
- }
- }
- Float struct {
- Fractional struct {
- Key1 float64
- Key2 float64
- Key3 float64
- }
- Exponent struct {
- Key1 float64
- Key2 float64
- Key3 float64
- }
- Both struct {
- Key float64
- }
- Underscores struct {
- Key1 float64
- Key2 float64
- }
- }
- Boolean struct {
- True bool
- False bool
- }
- Datetime struct {
- Key1 time.Time
- Key2 time.Time
- Key3 time.Time
- }
- Array struct {
- Key1 []int64
- Key2 []string
- Key3 [][]int64
- // TODO: Key4 not supported by go-toml's Unmarshal
- Key5 []int64
- Key6 []int64
- }
- Products []struct {
- Name string
- Sku int64
- Color string
- }
- Fruit []struct {
- Name string
- Physical struct {
- Color string
- Shape string
- Variety []struct {
- Name string
- }
- }
- }
-}
-
-func BenchmarkParseToml(b *testing.B) {
- fileBytes, err := ioutil.ReadFile("benchmark.toml")
- if err != nil {
- b.Fatal(err)
- }
- b.ResetTimer()
- for i := 0; i < b.N; i++ {
- _, err := LoadReader(bytes.NewReader(fileBytes))
- if err != nil {
- b.Fatal(err)
- }
- }
-}
-
-func BenchmarkUnmarshalToml(b *testing.B) {
- bytes, err := ioutil.ReadFile("benchmark.toml")
- if err != nil {
- b.Fatal(err)
- }
- b.ResetTimer()
- for i := 0; i < b.N; i++ {
- target := benchmarkDoc{}
- err := Unmarshal(bytes, &target)
- if err != nil {
- b.Fatal(err)
- }
- }
-}
-
-func BenchmarkUnmarshalBurntSushiToml(b *testing.B) {
- bytes, err := ioutil.ReadFile("benchmark.toml")
- if err != nil {
- b.Fatal(err)
- }
- b.ResetTimer()
- for i := 0; i < b.N; i++ {
- target := benchmarkDoc{}
- err := burntsushi.Unmarshal(bytes, &target)
- if err != nil {
- b.Fatal(err)
- }
- }
-}
-
-func BenchmarkUnmarshalJson(b *testing.B) {
- bytes, err := ioutil.ReadFile("benchmark.json")
- if err != nil {
- b.Fatal(err)
- }
- b.ResetTimer()
- for i := 0; i < b.N; i++ {
- target := benchmarkDoc{}
- err := json.Unmarshal(bytes, &target)
- if err != nil {
- b.Fatal(err)
- }
- }
-}
-
-func BenchmarkUnmarshalYaml(b *testing.B) {
- bytes, err := ioutil.ReadFile("benchmark.yml")
- if err != nil {
- b.Fatal(err)
- }
- b.ResetTimer()
- for i := 0; i < b.N; i++ {
- target := benchmarkDoc{}
- err := yaml.Unmarshal(bytes, &target)
- if err != nil {
- b.Fatal(err)
- }
- }
-}
diff --git a/vendor/github.com/pelletier/go-toml/cmd/test_program.go b/vendor/github.com/pelletier/go-toml/cmd/test_program.go
deleted file mode 100644
index 73077f614..000000000
--- a/vendor/github.com/pelletier/go-toml/cmd/test_program.go
+++ /dev/null
@@ -1,91 +0,0 @@
-package main
-
-import (
- "encoding/json"
- "fmt"
- "io/ioutil"
- "log"
- "os"
- "time"
-
- "github.com/pelletier/go-toml"
-)
-
-func main() {
- bytes, err := ioutil.ReadAll(os.Stdin)
- if err != nil {
- log.Fatalf("Error during TOML read: %s", err)
- os.Exit(2)
- }
- tree, err := toml.Load(string(bytes))
- if err != nil {
- log.Fatalf("Error during TOML load: %s", err)
- os.Exit(1)
- }
-
- typedTree := translate(*tree)
-
- if err := json.NewEncoder(os.Stdout).Encode(typedTree); err != nil {
- log.Fatalf("Error encoding JSON: %s", err)
- os.Exit(3)
- }
-
- os.Exit(0)
-}
-
-func translate(tomlData interface{}) interface{} {
- switch orig := tomlData.(type) {
- case map[string]interface{}:
- typed := make(map[string]interface{}, len(orig))
- for k, v := range orig {
- typed[k] = translate(v)
- }
- return typed
- case *toml.Tree:
- return translate(*orig)
- case toml.Tree:
- keys := orig.Keys()
- typed := make(map[string]interface{}, len(keys))
- for _, k := range keys {
- typed[k] = translate(orig.GetPath([]string{k}))
- }
- return typed
- case []*toml.Tree:
- typed := make([]map[string]interface{}, len(orig))
- for i, v := range orig {
- typed[i] = translate(v).(map[string]interface{})
- }
- return typed
- case []map[string]interface{}:
- typed := make([]map[string]interface{}, len(orig))
- for i, v := range orig {
- typed[i] = translate(v).(map[string]interface{})
- }
- return typed
- case []interface{}:
- typed := make([]interface{}, len(orig))
- for i, v := range orig {
- typed[i] = translate(v)
- }
- return tag("array", typed)
- case time.Time:
- return tag("datetime", orig.Format("2006-01-02T15:04:05Z"))
- case bool:
- return tag("bool", fmt.Sprintf("%v", orig))
- case int64:
- return tag("integer", fmt.Sprintf("%d", orig))
- case float64:
- return tag("float", fmt.Sprintf("%v", orig))
- case string:
- return tag("string", orig)
- }
-
- panic(fmt.Sprintf("Unknown type: %T", tomlData))
-}
-
-func tag(typeName string, data interface{}) map[string]interface{} {
- return map[string]interface{}{
- "type": typeName,
- "value": data,
- }
-}
diff --git a/vendor/github.com/pelletier/go-toml/cmd/tomljson/main.go b/vendor/github.com/pelletier/go-toml/cmd/tomljson/main.go
deleted file mode 100644
index b2d6fc673..000000000
--- a/vendor/github.com/pelletier/go-toml/cmd/tomljson/main.go
+++ /dev/null
@@ -1,72 +0,0 @@
-// Tomljson reads TOML and converts to JSON.
-//
-// Usage:
-// cat file.toml | tomljson > file.json
-// tomljson file1.toml > file.json
-package main
-
-import (
- "encoding/json"
- "flag"
- "fmt"
- "io"
- "os"
-
- "github.com/pelletier/go-toml"
-)
-
-func main() {
- flag.Usage = func() {
- fmt.Fprintln(os.Stderr, `tomljson can be used in two ways:
-Writing to STDIN and reading from STDOUT:
- cat file.toml | tomljson > file.json
-
-Reading from a file name:
- tomljson file.toml
-`)
- }
- flag.Parse()
- os.Exit(processMain(flag.Args(), os.Stdin, os.Stdout, os.Stderr))
-}
-
-func processMain(files []string, defaultInput io.Reader, output io.Writer, errorOutput io.Writer) int {
- // read from stdin and print to stdout
- inputReader := defaultInput
-
- if len(files) > 0 {
- var err error
- inputReader, err = os.Open(files[0])
- if err != nil {
- printError(err, errorOutput)
- return -1
- }
- }
- s, err := reader(inputReader)
- if err != nil {
- printError(err, errorOutput)
- return -1
- }
- io.WriteString(output, s+"\n")
- return 0
-}
-
-func printError(err error, output io.Writer) {
- io.WriteString(output, err.Error()+"\n")
-}
-
-func reader(r io.Reader) (string, error) {
- tree, err := toml.LoadReader(r)
- if err != nil {
- return "", err
- }
- return mapToJSON(tree)
-}
-
-func mapToJSON(tree *toml.Tree) (string, error) {
- treeMap := tree.ToMap()
- bytes, err := json.MarshalIndent(treeMap, "", " ")
- if err != nil {
- return "", err
- }
- return string(bytes[:]), nil
-}
diff --git a/vendor/github.com/pelletier/go-toml/cmd/tomljson/main_test.go b/vendor/github.com/pelletier/go-toml/cmd/tomljson/main_test.go
deleted file mode 100644
index 0b4bdbb11..000000000
--- a/vendor/github.com/pelletier/go-toml/cmd/tomljson/main_test.go
+++ /dev/null
@@ -1,82 +0,0 @@
-package main
-
-import (
- "bytes"
- "io/ioutil"
- "os"
- "strings"
- "testing"
-)
-
-func expectBufferEquality(t *testing.T, name string, buffer *bytes.Buffer, expected string) {
- output := buffer.String()
- if output != expected {
- t.Errorf("incorrect %s:\n%s\n\nexpected %s:\n%s", name, output, name, expected)
- t.Log([]rune(output))
- t.Log([]rune(expected))
- }
-}
-
-func expectProcessMainResults(t *testing.T, input string, args []string, exitCode int, expectedOutput string, expectedError string) {
- inputReader := strings.NewReader(input)
- outputBuffer := new(bytes.Buffer)
- errorBuffer := new(bytes.Buffer)
-
- returnCode := processMain(args, inputReader, outputBuffer, errorBuffer)
-
- expectBufferEquality(t, "output", outputBuffer, expectedOutput)
- expectBufferEquality(t, "error", errorBuffer, expectedError)
-
- if returnCode != exitCode {
- t.Error("incorrect return code:", returnCode, "expected", exitCode)
- }
-}
-
-func TestProcessMainReadFromStdin(t *testing.T) {
- input := `
- [mytoml]
- a = 42`
- expectedOutput := `{
- "mytoml": {
- "a": 42
- }
-}
-`
- expectedError := ``
- expectedExitCode := 0
-
- expectProcessMainResults(t, input, []string{}, expectedExitCode, expectedOutput, expectedError)
-}
-
-func TestProcessMainReadFromFile(t *testing.T) {
- input := `
- [mytoml]
- a = 42`
-
- tmpfile, err := ioutil.TempFile("", "example.toml")
- if err != nil {
- t.Fatal(err)
- }
- if _, err := tmpfile.Write([]byte(input)); err != nil {
- t.Fatal(err)
- }
-
- defer os.Remove(tmpfile.Name())
-
- expectedOutput := `{
- "mytoml": {
- "a": 42
- }
-}
-`
- expectedError := ``
- expectedExitCode := 0
-
- expectProcessMainResults(t, ``, []string{tmpfile.Name()}, expectedExitCode, expectedOutput, expectedError)
-}
-
-func TestProcessMainReadFromMissingFile(t *testing.T) {
- expectedError := `open /this/file/does/not/exist: no such file or directory
-`
- expectProcessMainResults(t, ``, []string{"/this/file/does/not/exist"}, -1, ``, expectedError)
-}
diff --git a/vendor/github.com/pelletier/go-toml/cmd/tomll/main.go b/vendor/github.com/pelletier/go-toml/cmd/tomll/main.go
deleted file mode 100644
index 36c7e3759..000000000
--- a/vendor/github.com/pelletier/go-toml/cmd/tomll/main.go
+++ /dev/null
@@ -1,66 +0,0 @@
-// Tomll is a linter for TOML
-//
-// Usage:
-// cat file.toml | tomll > file_linted.toml
-// tomll file1.toml file2.toml # lint the two files in place
-package main
-
-import (
- "flag"
- "fmt"
- "io"
- "io/ioutil"
- "os"
-
- "github.com/pelletier/go-toml"
-)
-
-func main() {
- flag.Usage = func() {
- fmt.Fprintln(os.Stderr, `tomll can be used in two ways:
-Writing to STDIN and reading from STDOUT:
- cat file.toml | tomll > file.toml
-
-Reading and updating a list of files:
- tomll a.toml b.toml c.toml
-
-When given a list of files, tomll will modify all files in place without asking.
-`)
- }
- flag.Parse()
- // read from stdin and print to stdout
- if flag.NArg() == 0 {
- s, err := lintReader(os.Stdin)
- if err != nil {
- io.WriteString(os.Stderr, err.Error())
- os.Exit(-1)
- }
- io.WriteString(os.Stdout, s)
- } else {
- // otherwise modify a list of files
- for _, filename := range flag.Args() {
- s, err := lintFile(filename)
- if err != nil {
- io.WriteString(os.Stderr, err.Error())
- os.Exit(-1)
- }
- ioutil.WriteFile(filename, []byte(s), 0644)
- }
- }
-}
-
-func lintFile(filename string) (string, error) {
- tree, err := toml.LoadFile(filename)
- if err != nil {
- return "", err
- }
- return tree.String(), nil
-}
-
-func lintReader(r io.Reader) (string, error) {
- tree, err := toml.LoadReader(r)
- if err != nil {
- return "", err
- }
- return tree.String(), nil
-}
diff --git a/vendor/github.com/pelletier/go-toml/doc_test.go b/vendor/github.com/pelletier/go-toml/doc_test.go
deleted file mode 100644
index 3b8171b22..000000000
--- a/vendor/github.com/pelletier/go-toml/doc_test.go
+++ /dev/null
@@ -1,105 +0,0 @@
-// code examples for godoc
-
-package toml_test
-
-import (
- "fmt"
- "log"
-
- toml "github.com/pelletier/go-toml"
-)
-
-func Example_tree() {
- config, err := toml.LoadFile("config.toml")
-
- if err != nil {
- fmt.Println("Error ", err.Error())
- } else {
- // retrieve data directly
- user := config.Get("postgres.user").(string)
- password := config.Get("postgres.password").(string)
-
- // or using an intermediate object
- configTree := config.Get("postgres").(*toml.Tree)
- user = configTree.Get("user").(string)
- password = configTree.Get("password").(string)
- fmt.Println("User is", user, " and password is", password)
-
- // show where elements are in the file
- fmt.Printf("User position: %v\n", configTree.GetPosition("user"))
- fmt.Printf("Password position: %v\n", configTree.GetPosition("password"))
- }
-}
-
-func Example_unmarshal() {
- type Employer struct {
- Name string
- Phone string
- }
- type Person struct {
- Name string
- Age int64
- Employer Employer
- }
-
- document := []byte(`
- name = "John"
- age = 30
- [employer]
- name = "Company Inc."
- phone = "+1 234 567 89012"
- `)
-
- person := Person{}
- toml.Unmarshal(document, &person)
- fmt.Println(person.Name, "is", person.Age, "and works at", person.Employer.Name)
- // Output:
- // John is 30 and works at Company Inc.
-}
-
-func ExampleMarshal() {
- type Postgres struct {
- User string `toml:"user"`
- Password string `toml:"password"`
- Database string `toml:"db" commented:"true" comment:"not used anymore"`
- }
- type Config struct {
- Postgres Postgres `toml:"postgres" comment:"Postgres configuration"`
- }
-
- config := Config{Postgres{User: "pelletier", Password: "mypassword", Database: "old_database"}}
- b, err := toml.Marshal(config)
- if err != nil {
- log.Fatal(err)
- }
- fmt.Println(string(b))
- // Output:
- // # Postgres configuration
- // [postgres]
- //
- // # not used anymore
- // # db = "old_database"
- // password = "mypassword"
- // user = "pelletier"
-}
-
-func ExampleUnmarshal() {
- type Postgres struct {
- User string
- Password string
- }
- type Config struct {
- Postgres Postgres
- }
-
- doc := []byte(`
- [postgres]
- user = "pelletier"
- password = "mypassword"`)
-
- config := Config{}
- toml.Unmarshal(doc, &config)
- fmt.Println("user=", config.Postgres.User)
- // Output:
- // user= pelletier
-}
diff --git a/vendor/github.com/pelletier/go-toml/keysparsing_test.go b/vendor/github.com/pelletier/go-toml/keysparsing_test.go
deleted file mode 100644
index 84cb82604..000000000
--- a/vendor/github.com/pelletier/go-toml/keysparsing_test.go
+++ /dev/null
@@ -1,63 +0,0 @@
-package toml
-
-import (
- "fmt"
- "testing"
-)
-
-func testResult(t *testing.T, key string, expected []string) {
- parsed, err := parseKey(key)
- t.Logf("key=%s expected=%s parsed=%s", key, expected, parsed)
- if err != nil {
- t.Fatal("Unexpected error:", err)
- }
- if len(expected) != len(parsed) {
- t.Fatal("Expected length", len(expected), "but", len(parsed), "parsed")
- }
- for index, expectedKey := range expected {
- if expectedKey != parsed[index] {
- t.Fatal("Expected", expectedKey, "at index", index, "but found", parsed[index])
- }
- }
-}
-
-func testError(t *testing.T, key string, expectedError string) {
- res, err := parseKey(key)
- if err == nil {
- t.Fatalf("Expected error, but succesfully parsed key %s", res)
- }
- if fmt.Sprintf("%s", err) != expectedError {
- t.Fatalf("Expected error \"%s\", but got \"%s\".", expectedError, err)
- }
-}
-
-func TestBareKeyBasic(t *testing.T) {
- testResult(t, "test", []string{"test"})
-}
-
-func TestBareKeyDotted(t *testing.T) {
- testResult(t, "this.is.a.key", []string{"this", "is", "a", "key"})
-}
-
-func TestDottedKeyBasic(t *testing.T) {
- testResult(t, "\"a.dotted.key\"", []string{"a.dotted.key"})
-}
-
-func TestBaseKeyPound(t *testing.T) {
- testError(t, "hello#world", "invalid bare character: #")
-}
-
-func TestQuotedKeys(t *testing.T) {
- testResult(t, `hello."foo".bar`, []string{"hello", "foo", "bar"})
- testResult(t, `"hello!"`, []string{"hello!"})
- testResult(t, `foo."ba.r".baz`, []string{"foo", "ba.r", "baz"})
-
- // escape sequences must not be converted
- testResult(t, `"hello\tworld"`, []string{`hello\tworld`})
-}
-
-func TestEmptyKey(t *testing.T) {
- testError(t, "", "empty key")
- testError(t, " ", "empty key")
- testResult(t, `""`, []string{""})
-}
diff --git a/vendor/github.com/pelletier/go-toml/lexer_test.go b/vendor/github.com/pelletier/go-toml/lexer_test.go
deleted file mode 100644
index cb4913031..000000000
--- a/vendor/github.com/pelletier/go-toml/lexer_test.go
+++ /dev/null
@@ -1,750 +0,0 @@
-package toml
-
-import (
- "reflect"
- "testing"
-)
-
-func testFlow(t *testing.T, input string, expectedFlow []token) {
- tokens := lexToml([]byte(input))
- if !reflect.DeepEqual(tokens, expectedFlow) {
- t.Fatal("Different flows. Expected\n", expectedFlow, "\nGot:\n", tokens)
- }
-}
-
-func TestValidKeyGroup(t *testing.T) {
- testFlow(t, "[hello world]", []token{
- {Position{1, 1}, tokenLeftBracket, "["},
- {Position{1, 2}, tokenKeyGroup, "hello world"},
- {Position{1, 13}, tokenRightBracket, "]"},
- {Position{1, 14}, tokenEOF, ""},
- })
-}
-
-func TestNestedQuotedUnicodeKeyGroup(t *testing.T) {
- testFlow(t, `[ j . "ʞ" . l ]`, []token{
- {Position{1, 1}, tokenLeftBracket, "["},
- {Position{1, 2}, tokenKeyGroup, ` j . "ʞ" . l `},
- {Position{1, 15}, tokenRightBracket, "]"},
- {Position{1, 16}, tokenEOF, ""},
- })
-}
-
-func TestUnclosedKeyGroup(t *testing.T) {
- testFlow(t, "[hello world", []token{
- {Position{1, 1}, tokenLeftBracket, "["},
- {Position{1, 2}, tokenError, "unclosed table key"},
- })
-}
-
-func TestComment(t *testing.T) {
- testFlow(t, "# blahblah", []token{
- {Position{1, 11}, tokenEOF, ""},
- })
-}
-
-func TestKeyGroupComment(t *testing.T) {
- testFlow(t, "[hello world] # blahblah", []token{
- {Position{1, 1}, tokenLeftBracket, "["},
- {Position{1, 2}, tokenKeyGroup, "hello world"},
- {Position{1, 13}, tokenRightBracket, "]"},
- {Position{1, 25}, tokenEOF, ""},
- })
-}
-
-func TestMultipleKeyGroupsComment(t *testing.T) {
- testFlow(t, "[hello world] # blahblah\n[test]", []token{
- {Position{1, 1}, tokenLeftBracket, "["},
- {Position{1, 2}, tokenKeyGroup, "hello world"},
- {Position{1, 13}, tokenRightBracket, "]"},
- {Position{2, 1}, tokenLeftBracket, "["},
- {Position{2, 2}, tokenKeyGroup, "test"},
- {Position{2, 6}, tokenRightBracket, "]"},
- {Position{2, 7}, tokenEOF, ""},
- })
-}
-
-func TestSimpleWindowsCRLF(t *testing.T) {
- testFlow(t, "a=4\r\nb=2", []token{
- {Position{1, 1}, tokenKey, "a"},
- {Position{1, 2}, tokenEqual, "="},
- {Position{1, 3}, tokenInteger, "4"},
- {Position{2, 1}, tokenKey, "b"},
- {Position{2, 2}, tokenEqual, "="},
- {Position{2, 3}, tokenInteger, "2"},
- {Position{2, 4}, tokenEOF, ""},
- })
-}
-
-func TestBasicKey(t *testing.T) {
- testFlow(t, "hello", []token{
- {Position{1, 1}, tokenKey, "hello"},
- {Position{1, 6}, tokenEOF, ""},
- })
-}
-
-func TestBasicKeyWithUnderscore(t *testing.T) {
- testFlow(t, "hello_hello", []token{
- {Position{1, 1}, tokenKey, "hello_hello"},
- {Position{1, 12}, tokenEOF, ""},
- })
-}
-
-func TestBasicKeyWithDash(t *testing.T) {
- testFlow(t, "hello-world", []token{
- {Position{1, 1}, tokenKey, "hello-world"},
- {Position{1, 12}, tokenEOF, ""},
- })
-}
-
-func TestBasicKeyWithUppercaseMix(t *testing.T) {
- testFlow(t, "helloHELLOHello", []token{
- {Position{1, 1}, tokenKey, "helloHELLOHello"},
- {Position{1, 16}, tokenEOF, ""},
- })
-}
-
-func TestBasicKeyWithInternationalCharacters(t *testing.T) {
- testFlow(t, "héllÖ", []token{
- {Position{1, 1}, tokenKey, "héllÖ"},
- {Position{1, 6}, tokenEOF, ""},
- })
-}
-
-func TestBasicKeyAndEqual(t *testing.T) {
- testFlow(t, "hello =", []token{
- {Position{1, 1}, tokenKey, "hello"},
- {Position{1, 7}, tokenEqual, "="},
- {Position{1, 8}, tokenEOF, ""},
- })
-}
-
-func TestKeyWithSharpAndEqual(t *testing.T) {
- testFlow(t, "key#name = 5", []token{
- {Position{1, 1}, tokenError, "keys cannot contain # character"},
- })
-}
-
-func TestKeyWithSymbolsAndEqual(t *testing.T) {
- testFlow(t, "~!@$^&*()_+-`1234567890[]\\|/?><.,;:' = 5", []token{
- {Position{1, 1}, tokenError, "keys cannot contain ~ character"},
- })
-}
-
-func TestKeyEqualStringEscape(t *testing.T) {
- testFlow(t, `foo = "hello\""`, []token{
- {Position{1, 1}, tokenKey, "foo"},
- {Position{1, 5}, tokenEqual, "="},
- {Position{1, 8}, tokenString, "hello\""},
- {Position{1, 16}, tokenEOF, ""},
- })
-}
-
-func TestKeyEqualStringUnfinished(t *testing.T) {
- testFlow(t, `foo = "bar`, []token{
- {Position{1, 1}, tokenKey, "foo"},
- {Position{1, 5}, tokenEqual, "="},
- {Position{1, 8}, tokenError, "unclosed string"},
- })
-}
-
-func TestKeyEqualString(t *testing.T) {
- testFlow(t, `foo = "bar"`, []token{
- {Position{1, 1}, tokenKey, "foo"},
- {Position{1, 5}, tokenEqual, "="},
- {Position{1, 8}, tokenString, "bar"},
- {Position{1, 12}, tokenEOF, ""},
- })
-}
-
-func TestKeyEqualTrue(t *testing.T) {
- testFlow(t, "foo = true", []token{
- {Position{1, 1}, tokenKey, "foo"},
- {Position{1, 5}, tokenEqual, "="},
- {Position{1, 7}, tokenTrue, "true"},
- {Position{1, 11}, tokenEOF, ""},
- })
-}
-
-func TestKeyEqualFalse(t *testing.T) {
- testFlow(t, "foo = false", []token{
- {Position{1, 1}, tokenKey, "foo"},
- {Position{1, 5}, tokenEqual, "="},
- {Position{1, 7}, tokenFalse, "false"},
- {Position{1, 12}, tokenEOF, ""},
- })
-}
-
-func TestArrayNestedString(t *testing.T) {
- testFlow(t, `a = [ ["hello", "world"] ]`, []token{
- {Position{1, 1}, tokenKey, "a"},
- {Position{1, 3}, tokenEqual, "="},
- {Position{1, 5}, tokenLeftBracket, "["},
- {Position{1, 7}, tokenLeftBracket, "["},
- {Position{1, 9}, tokenString, "hello"},
- {Position{1, 15}, tokenComma, ","},
- {Position{1, 18}, tokenString, "world"},
- {Position{1, 24}, tokenRightBracket, "]"},
- {Position{1, 26}, tokenRightBracket, "]"},
- {Position{1, 27}, tokenEOF, ""},
- })
-}
-
-func TestArrayNestedInts(t *testing.T) {
- testFlow(t, "a = [ [42, 21], [10] ]", []token{
- {Position{1, 1}, tokenKey, "a"},
- {Position{1, 3}, tokenEqual, "="},
- {Position{1, 5}, tokenLeftBracket, "["},
- {Position{1, 7}, tokenLeftBracket, "["},
- {Position{1, 8}, tokenInteger, "42"},
- {Position{1, 10}, tokenComma, ","},
- {Position{1, 12}, tokenInteger, "21"},
- {Position{1, 14}, tokenRightBracket, "]"},
- {Position{1, 15}, tokenComma, ","},
- {Position{1, 17}, tokenLeftBracket, "["},
- {Position{1, 18}, tokenInteger, "10"},
- {Position{1, 20}, tokenRightBracket, "]"},
- {Position{1, 22}, tokenRightBracket, "]"},
- {Position{1, 23}, tokenEOF, ""},
- })
-}
-
-func TestArrayInts(t *testing.T) {
- testFlow(t, "a = [ 42, 21, 10, ]", []token{
- {Position{1, 1}, tokenKey, "a"},
- {Position{1, 3}, tokenEqual, "="},
- {Position{1, 5}, tokenLeftBracket, "["},
- {Position{1, 7}, tokenInteger, "42"},
- {Position{1, 9}, tokenComma, ","},
- {Position{1, 11}, tokenInteger, "21"},
- {Position{1, 13}, tokenComma, ","},
- {Position{1, 15}, tokenInteger, "10"},
- {Position{1, 17}, tokenComma, ","},
- {Position{1, 19}, tokenRightBracket, "]"},
- {Position{1, 20}, tokenEOF, ""},
- })
-}
-
-func TestMultilineArrayComments(t *testing.T) {
- testFlow(t, "a = [1, # wow\n2, # such items\n3, # so array\n]", []token{
- {Position{1, 1}, tokenKey, "a"},
- {Position{1, 3}, tokenEqual, "="},
- {Position{1, 5}, tokenLeftBracket, "["},
- {Position{1, 6}, tokenInteger, "1"},
- {Position{1, 7}, tokenComma, ","},
- {Position{2, 1}, tokenInteger, "2"},
- {Position{2, 2}, tokenComma, ","},
- {Position{3, 1}, tokenInteger, "3"},
- {Position{3, 2}, tokenComma, ","},
- {Position{4, 1}, tokenRightBracket, "]"},
- {Position{4, 2}, tokenEOF, ""},
- })
-}
-
-func TestNestedArraysComment(t *testing.T) {
- toml := `
-someArray = [
-# does not work
-["entry1"]
-]`
- testFlow(t, toml, []token{
- {Position{2, 1}, tokenKey, "someArray"},
- {Position{2, 11}, tokenEqual, "="},
- {Position{2, 13}, tokenLeftBracket, "["},
- {Position{4, 1}, tokenLeftBracket, "["},
- {Position{4, 3}, tokenString, "entry1"},
- {Position{4, 10}, tokenRightBracket, "]"},
- {Position{5, 1}, tokenRightBracket, "]"},
- {Position{5, 2}, tokenEOF, ""},
- })
-}
-
-func TestKeyEqualArrayBools(t *testing.T) {
- testFlow(t, "foo = [true, false, true]", []token{
- {Position{1, 1}, tokenKey, "foo"},
- {Position{1, 5}, tokenEqual, "="},
- {Position{1, 7}, tokenLeftBracket, "["},
- {Position{1, 8}, tokenTrue, "true"},
- {Position{1, 12}, tokenComma, ","},
- {Position{1, 14}, tokenFalse, "false"},
- {Position{1, 19}, tokenComma, ","},
- {Position{1, 21}, tokenTrue, "true"},
- {Position{1, 25}, tokenRightBracket, "]"},
- {Position{1, 26}, tokenEOF, ""},
- })
-}
-
-func TestKeyEqualArrayBoolsWithComments(t *testing.T) {
- testFlow(t, "foo = [true, false, true] # YEAH", []token{
- {Position{1, 1}, tokenKey, "foo"},
- {Position{1, 5}, tokenEqual, "="},
- {Position{1, 7}, tokenLeftBracket, "["},
- {Position{1, 8}, tokenTrue, "true"},
- {Position{1, 12}, tokenComma, ","},
- {Position{1, 14}, tokenFalse, "false"},
- {Position{1, 19}, tokenComma, ","},
- {Position{1, 21}, tokenTrue, "true"},
- {Position{1, 25}, tokenRightBracket, "]"},
- {Position{1, 33}, tokenEOF, ""},
- })
-}
-
-func TestDateRegexp(t *testing.T) {
- if dateRegexp.FindString("1979-05-27T07:32:00Z") == "" {
- t.Error("basic lexing")
- }
- if dateRegexp.FindString("1979-05-27T00:32:00-07:00") == "" {
- t.Error("offset lexing")
- }
- if dateRegexp.FindString("1979-05-27T00:32:00.999999-07:00") == "" {
- t.Error("nano precision lexing")
- }
-}
-
-func TestKeyEqualDate(t *testing.T) {
- testFlow(t, "foo = 1979-05-27T07:32:00Z", []token{
- {Position{1, 1}, tokenKey, "foo"},
- {Position{1, 5}, tokenEqual, "="},
- {Position{1, 7}, tokenDate, "1979-05-27T07:32:00Z"},
- {Position{1, 27}, tokenEOF, ""},
- })
- testFlow(t, "foo = 1979-05-27T00:32:00-07:00", []token{
- {Position{1, 1}, tokenKey, "foo"},
- {Position{1, 5}, tokenEqual, "="},
- {Position{1, 7}, tokenDate, "1979-05-27T00:32:00-07:00"},
- {Position{1, 32}, tokenEOF, ""},
- })
- testFlow(t, "foo = 1979-05-27T00:32:00.999999-07:00", []token{
- {Position{1, 1}, tokenKey, "foo"},
- {Position{1, 5}, tokenEqual, "="},
- {Position{1, 7}, tokenDate, "1979-05-27T00:32:00.999999-07:00"},
- {Position{1, 39}, tokenEOF, ""},
- })
-}
-
-func TestFloatEndingWithDot(t *testing.T) {
- testFlow(t, "foo = 42.", []token{
- {Position{1, 1}, tokenKey, "foo"},
- {Position{1, 5}, tokenEqual, "="},
- {Position{1, 7}, tokenError, "float cannot end with a dot"},
- })
-}
-
-func TestFloatWithTwoDots(t *testing.T) {
- testFlow(t, "foo = 4.2.", []token{
- {Position{1, 1}, tokenKey, "foo"},
- {Position{1, 5}, tokenEqual, "="},
- {Position{1, 7}, tokenError, "cannot have two dots in one float"},
- })
-}
-
-func TestFloatWithExponent1(t *testing.T) {
- testFlow(t, "a = 5e+22", []token{
- {Position{1, 1}, tokenKey, "a"},
- {Position{1, 3}, tokenEqual, "="},
- {Position{1, 5}, tokenFloat, "5e+22"},
- {Position{1, 10}, tokenEOF, ""},
- })
-}
-
-func TestFloatWithExponent2(t *testing.T) {
- testFlow(t, "a = 5E+22", []token{
- {Position{1, 1}, tokenKey, "a"},
- {Position{1, 3}, tokenEqual, "="},
- {Position{1, 5}, tokenFloat, "5E+22"},
- {Position{1, 10}, tokenEOF, ""},
- })
-}
-
-func TestFloatWithExponent3(t *testing.T) {
- testFlow(t, "a = -5e+22", []token{
- {Position{1, 1}, tokenKey, "a"},
- {Position{1, 3}, tokenEqual, "="},
- {Position{1, 5}, tokenFloat, "-5e+22"},
- {Position{1, 11}, tokenEOF, ""},
- })
-}
-
-func TestFloatWithExponent4(t *testing.T) {
- testFlow(t, "a = -5e-22", []token{
- {Position{1, 1}, tokenKey, "a"},
- {Position{1, 3}, tokenEqual, "="},
- {Position{1, 5}, tokenFloat, "-5e-22"},
- {Position{1, 11}, tokenEOF, ""},
- })
-}
-
-func TestFloatWithExponent5(t *testing.T) {
- testFlow(t, "a = 6.626e-34", []token{
- {Position{1, 1}, tokenKey, "a"},
- {Position{1, 3}, tokenEqual, "="},
- {Position{1, 5}, tokenFloat, "6.626e-34"},
- {Position{1, 14}, tokenEOF, ""},
- })
-}
-
-func TestInvalidEsquapeSequence(t *testing.T) {
- testFlow(t, `foo = "\x"`, []token{
- {Position{1, 1}, tokenKey, "foo"},
- {Position{1, 5}, tokenEqual, "="},
- {Position{1, 8}, tokenError, "invalid escape sequence: \\x"},
- })
-}
-
-func TestNestedArrays(t *testing.T) {
- testFlow(t, "foo = [[[]]]", []token{
- {Position{1, 1}, tokenKey, "foo"},
- {Position{1, 5}, tokenEqual, "="},
- {Position{1, 7}, tokenLeftBracket, "["},
- {Position{1, 8}, tokenLeftBracket, "["},
- {Position{1, 9}, tokenLeftBracket, "["},
- {Position{1, 10}, tokenRightBracket, "]"},
- {Position{1, 11}, tokenRightBracket, "]"},
- {Position{1, 12}, tokenRightBracket, "]"},
- {Position{1, 13}, tokenEOF, ""},
- })
-}
-
-func TestKeyEqualNumber(t *testing.T) {
- testFlow(t, "foo = 42", []token{
- {Position{1, 1}, tokenKey, "foo"},
- {Position{1, 5}, tokenEqual, "="},
- {Position{1, 7}, tokenInteger, "42"},
- {Position{1, 9}, tokenEOF, ""},
- })
-
- testFlow(t, "foo = +42", []token{
- {Position{1, 1}, tokenKey, "foo"},
- {Position{1, 5}, tokenEqual, "="},
- {Position{1, 7}, tokenInteger, "+42"},
- {Position{1, 10}, tokenEOF, ""},
- })
-
- testFlow(t, "foo = -42", []token{
- {Position{1, 1}, tokenKey, "foo"},
- {Position{1, 5}, tokenEqual, "="},
- {Position{1, 7}, tokenInteger, "-42"},
- {Position{1, 10}, tokenEOF, ""},
- })
-
- testFlow(t, "foo = 4.2", []token{
- {Position{1, 1}, tokenKey, "foo"},
- {Position{1, 5}, tokenEqual, "="},
- {Position{1, 7}, tokenFloat, "4.2"},
- {Position{1, 10}, tokenEOF, ""},
- })
-
- testFlow(t, "foo = +4.2", []token{
- {Position{1, 1}, tokenKey, "foo"},
- {Position{1, 5}, tokenEqual, "="},
- {Position{1, 7}, tokenFloat, "+4.2"},
- {Position{1, 11}, tokenEOF, ""},
- })
-
- testFlow(t, "foo = -4.2", []token{
- {Position{1, 1}, tokenKey, "foo"},
- {Position{1, 5}, tokenEqual, "="},
- {Position{1, 7}, tokenFloat, "-4.2"},
- {Position{1, 11}, tokenEOF, ""},
- })
-
- testFlow(t, "foo = 1_000", []token{
- {Position{1, 1}, tokenKey, "foo"},
- {Position{1, 5}, tokenEqual, "="},
- {Position{1, 7}, tokenInteger, "1_000"},
- {Position{1, 12}, tokenEOF, ""},
- })
-
- testFlow(t, "foo = 5_349_221", []token{
- {Position{1, 1}, tokenKey, "foo"},
- {Position{1, 5}, tokenEqual, "="},
- {Position{1, 7}, tokenInteger, "5_349_221"},
- {Position{1, 16}, tokenEOF, ""},
- })
-
- testFlow(t, "foo = 1_2_3_4_5", []token{
- {Position{1, 1}, tokenKey, "foo"},
- {Position{1, 5}, tokenEqual, "="},
- {Position{1, 7}, tokenInteger, "1_2_3_4_5"},
- {Position{1, 16}, tokenEOF, ""},
- })
-
- testFlow(t, "flt8 = 9_224_617.445_991_228_313", []token{
- {Position{1, 1}, tokenKey, "flt8"},
- {Position{1, 6}, tokenEqual, "="},
- {Position{1, 8}, tokenFloat, "9_224_617.445_991_228_313"},
- {Position{1, 33}, tokenEOF, ""},
- })
-
- testFlow(t, "foo = +", []token{
- {Position{1, 1}, tokenKey, "foo"},
- {Position{1, 5}, tokenEqual, "="},
- {Position{1, 7}, tokenError, "no digit in that number"},
- })
-}
-
-func TestMultiline(t *testing.T) {
- testFlow(t, "foo = 42\nbar=21", []token{
- {Position{1, 1}, tokenKey, "foo"},
- {Position{1, 5}, tokenEqual, "="},
- {Position{1, 7}, tokenInteger, "42"},
- {Position{2, 1}, tokenKey, "bar"},
- {Position{2, 4}, tokenEqual, "="},
- {Position{2, 5}, tokenInteger, "21"},
- {Position{2, 7}, tokenEOF, ""},
- })
-}
-
-func TestKeyEqualStringUnicodeEscape(t *testing.T) {
- testFlow(t, `foo = "hello \u2665"`, []token{
- {Position{1, 1}, tokenKey, "foo"},
- {Position{1, 5}, tokenEqual, "="},
- {Position{1, 8}, tokenString, "hello ♥"},
- {Position{1, 21}, tokenEOF, ""},
- })
- testFlow(t, `foo = "hello \U000003B4"`, []token{
- {Position{1, 1}, tokenKey, "foo"},
- {Position{1, 5}, tokenEqual, "="},
- {Position{1, 8}, tokenString, "hello δ"},
- {Position{1, 25}, tokenEOF, ""},
- })
- testFlow(t, `foo = "\uabcd"`, []token{
- {Position{1, 1}, tokenKey, "foo"},
- {Position{1, 5}, tokenEqual, "="},
- {Position{1, 8}, tokenString, "\uabcd"},
- {Position{1, 15}, tokenEOF, ""},
- })
- testFlow(t, `foo = "\uABCD"`, []token{
- {Position{1, 1}, tokenKey, "foo"},
- {Position{1, 5}, tokenEqual, "="},
- {Position{1, 8}, tokenString, "\uABCD"},
- {Position{1, 15}, tokenEOF, ""},
- })
- testFlow(t, `foo = "\U000bcdef"`, []token{
- {Position{1, 1}, tokenKey, "foo"},
- {Position{1, 5}, tokenEqual, "="},
- {Position{1, 8}, tokenString, "\U000bcdef"},
- {Position{1, 19}, tokenEOF, ""},
- })
- testFlow(t, `foo = "\U000BCDEF"`, []token{
- {Position{1, 1}, tokenKey, "foo"},
- {Position{1, 5}, tokenEqual, "="},
- {Position{1, 8}, tokenString, "\U000BCDEF"},
- {Position{1, 19}, tokenEOF, ""},
- })
- testFlow(t, `foo = "\u2"`, []token{
- {Position{1, 1}, tokenKey, "foo"},
- {Position{1, 5}, tokenEqual, "="},
- {Position{1, 8}, tokenError, "unfinished unicode escape"},
- })
- testFlow(t, `foo = "\U2"`, []token{
- {Position{1, 1}, tokenKey, "foo"},
- {Position{1, 5}, tokenEqual, "="},
- {Position{1, 8}, tokenError, "unfinished unicode escape"},
- })
-}
-
-func TestKeyEqualStringNoEscape(t *testing.T) {
- testFlow(t, "foo = \"hello \u0002\"", []token{
- {Position{1, 1}, tokenKey, "foo"},
- {Position{1, 5}, tokenEqual, "="},
- {Position{1, 8}, tokenError, "unescaped control character U+0002"},
- })
- testFlow(t, "foo = \"hello \u001F\"", []token{
- {Position{1, 1}, tokenKey, "foo"},
- {Position{1, 5}, tokenEqual, "="},
- {Position{1, 8}, tokenError, "unescaped control character U+001F"},
- })
-}
-
-func TestLiteralString(t *testing.T) {
- testFlow(t, `foo = 'C:\Users\nodejs\templates'`, []token{
- {Position{1, 1}, tokenKey, "foo"},
- {Position{1, 5}, tokenEqual, "="},
- {Position{1, 8}, tokenString, `C:\Users\nodejs\templates`},
- {Position{1, 34}, tokenEOF, ""},
- })
- testFlow(t, `foo = '\\ServerX\admin$\system32\'`, []token{
- {Position{1, 1}, tokenKey, "foo"},
- {Position{1, 5}, tokenEqual, "="},
- {Position{1, 8}, tokenString, `\\ServerX\admin$\system32\`},
- {Position{1, 35}, tokenEOF, ""},
- })
- testFlow(t, `foo = 'Tom "Dubs" Preston-Werner'`, []token{
- {Position{1, 1}, tokenKey, "foo"},
- {Position{1, 5}, tokenEqual, "="},
- {Position{1, 8}, tokenString, `Tom "Dubs" Preston-Werner`},
- {Position{1, 34}, tokenEOF, ""},
- })
- testFlow(t, `foo = '<\i\c*\s*>'`, []token{
- {Position{1, 1}, tokenKey, "foo"},
- {Position{1, 5}, tokenEqual, "="},
- {Position{1, 8}, tokenString, `<\i\c*\s*>`},
- {Position{1, 19}, tokenEOF, ""},
- })
- testFlow(t, `foo = 'C:\Users\nodejs\unfinis`, []token{
- {Position{1, 1}, tokenKey, "foo"},
- {Position{1, 5}, tokenEqual, "="},
- {Position{1, 8}, tokenError, "unclosed string"},
- })
-}
-
-func TestMultilineLiteralString(t *testing.T) {
- testFlow(t, `foo = '''hello 'literal' world'''`, []token{
- {Position{1, 1}, tokenKey, "foo"},
- {Position{1, 5}, tokenEqual, "="},
- {Position{1, 10}, tokenString, `hello 'literal' world`},
- {Position{1, 34}, tokenEOF, ""},
- })
-
- testFlow(t, "foo = '''\nhello\n'literal'\nworld'''", []token{
- {Position{1, 1}, tokenKey, "foo"},
- {Position{1, 5}, tokenEqual, "="},
- {Position{2, 1}, tokenString, "hello\n'literal'\nworld"},
- {Position{4, 9}, tokenEOF, ""},
- })
- testFlow(t, "foo = '''\r\nhello\r\n'literal'\r\nworld'''", []token{
- {Position{1, 1}, tokenKey, "foo"},
- {Position{1, 5}, tokenEqual, "="},
- {Position{2, 1}, tokenString, "hello\r\n'literal'\r\nworld"},
- {Position{4, 9}, tokenEOF, ""},
- })
-}
-
-func TestMultilineString(t *testing.T) {
- testFlow(t, `foo = """hello "literal" world"""`, []token{
- {Position{1, 1}, tokenKey, "foo"},
- {Position{1, 5}, tokenEqual, "="},
- {Position{1, 10}, tokenString, `hello "literal" world`},
- {Position{1, 34}, tokenEOF, ""},
- })
-
- testFlow(t, "foo = \"\"\"\r\nhello\\\r\n\"literal\"\\\nworld\"\"\"", []token{
- {Position{1, 1}, tokenKey, "foo"},
- {Position{1, 5}, tokenEqual, "="},
- {Position{2, 1}, tokenString, "hello\"literal\"world"},
- {Position{4, 9}, tokenEOF, ""},
- })
-
- testFlow(t, "foo = \"\"\"\\\n \\\n \\\n hello\\\nmultiline\\\nworld\"\"\"", []token{
- {Position{1, 1}, tokenKey, "foo"},
- {Position{1, 5}, tokenEqual, "="},
- {Position{1, 10}, tokenString, "hellomultilineworld"},
- {Position{6, 9}, tokenEOF, ""},
- })
-
- testFlow(t, "key2 = \"\"\"\nThe quick brown \\\n\n\n fox jumps over \\\n the lazy dog.\"\"\"", []token{
- {Position{1, 1}, tokenKey, "key2"},
- {Position{1, 6}, tokenEqual, "="},
- {Position{2, 1}, tokenString, "The quick brown fox jumps over the lazy dog."},
- {Position{6, 21}, tokenEOF, ""},
- })
-
- testFlow(t, "key2 = \"\"\"\\\n The quick brown \\\n fox jumps over \\\n the lazy dog.\\\n \"\"\"", []token{
- {Position{1, 1}, tokenKey, "key2"},
- {Position{1, 6}, tokenEqual, "="},
- {Position{1, 11}, tokenString, "The quick brown fox jumps over the lazy dog."},
- {Position{5, 11}, tokenEOF, ""},
- })
-
- testFlow(t, `key2 = "Roses are red\nViolets are blue"`, []token{
- {Position{1, 1}, tokenKey, "key2"},
- {Position{1, 6}, tokenEqual, "="},
- {Position{1, 9}, tokenString, "Roses are red\nViolets are blue"},
- {Position{1, 41}, tokenEOF, ""},
- })
-
- testFlow(t, "key2 = \"\"\"\nRoses are red\nViolets are blue\"\"\"", []token{
- {Position{1, 1}, tokenKey, "key2"},
- {Position{1, 6}, tokenEqual, "="},
- {Position{2, 1}, tokenString, "Roses are red\nViolets are blue"},
- {Position{3, 20}, tokenEOF, ""},
- })
-}
-
-func TestUnicodeString(t *testing.T) {
- testFlow(t, `foo = "hello ♥ world"`, []token{
- {Position{1, 1}, tokenKey, "foo"},
- {Position{1, 5}, tokenEqual, "="},
- {Position{1, 8}, tokenString, "hello ♥ world"},
- {Position{1, 22}, tokenEOF, ""},
- })
-}
-func TestEscapeInString(t *testing.T) {
- testFlow(t, `foo = "\b\f\/"`, []token{
- {Position{1, 1}, tokenKey, "foo"},
- {Position{1, 5}, tokenEqual, "="},
- {Position{1, 8}, tokenString, "\b\f/"},
- {Position{1, 15}, tokenEOF, ""},
- })
-}
-
-func TestKeyGroupArray(t *testing.T) {
- testFlow(t, "[[foo]]", []token{
- {Position{1, 1}, tokenDoubleLeftBracket, "[["},
- {Position{1, 3}, tokenKeyGroupArray, "foo"},
- {Position{1, 6}, tokenDoubleRightBracket, "]]"},
- {Position{1, 8}, tokenEOF, ""},
- })
-}
-
-func TestQuotedKey(t *testing.T) {
- testFlow(t, "\"a b\" = 42", []token{
- {Position{1, 1}, tokenKey, "a b"},
- {Position{1, 7}, tokenEqual, "="},
- {Position{1, 9}, tokenInteger, "42"},
- {Position{1, 11}, tokenEOF, ""},
- })
-}
-
-func TestKeyNewline(t *testing.T) {
- testFlow(t, "a\n= 4", []token{
- {Position{1, 1}, tokenError, "keys cannot contain new lines"},
- })
-}
-
-func TestInvalidFloat(t *testing.T) {
- testFlow(t, "a=7e1_", []token{
- {Position{1, 1}, tokenKey, "a"},
- {Position{1, 2}, tokenEqual, "="},
- {Position{1, 3}, tokenFloat, "7e1_"},
- {Position{1, 7}, tokenEOF, ""},
- })
-}
-
-func TestLexUnknownRvalue(t *testing.T) {
- testFlow(t, `a = !b`, []token{
- {Position{1, 1}, tokenKey, "a"},
- {Position{1, 3}, tokenEqual, "="},
- {Position{1, 5}, tokenError, "no value can start with !"},
- })
-
- testFlow(t, `a = \b`, []token{
- {Position{1, 1}, tokenKey, "a"},
- {Position{1, 3}, tokenEqual, "="},
- {Position{1, 5}, tokenError, `no value can start with \`},
- })
-}
-
-func BenchmarkLexer(b *testing.B) {
- sample := `title = "Hugo: A Fast and Flexible Website Generator"
-baseurl = "http://gohugo.io/"
-MetaDataFormat = "yaml"
-pluralizeListTitles = false
-
-[params]
- description = "Documentation of Hugo, a fast and flexible static site generator built with love by spf13, bep and friends in Go"
- author = "Steve Francia (spf13) and friends"
- release = "0.22-DEV"
-
-[[menu.main]]
- name = "Download Hugo"
- pre = "<i class='fa fa-download'></i>"
- url = "https://github.com/spf13/hugo/releases"
- weight = -200
-`
- b.ResetTimer()
- for i := 0; i < b.N; i++ {
- lexToml([]byte(sample))
- }
-}
diff --git a/vendor/github.com/pelletier/go-toml/marshal_test.go b/vendor/github.com/pelletier/go-toml/marshal_test.go
deleted file mode 100644
index 291a80d2a..000000000
--- a/vendor/github.com/pelletier/go-toml/marshal_test.go
+++ /dev/null
@@ -1,806 +0,0 @@
-package toml
-
-import (
- "bytes"
- "encoding/json"
- "fmt"
- "io/ioutil"
- "reflect"
- "strings"
- "testing"
- "time"
-)
-
-type basicMarshalTestStruct struct {
- String string `toml:"string"`
- StringList []string `toml:"strlist"`
- Sub basicMarshalTestSubStruct `toml:"subdoc"`
- SubList []basicMarshalTestSubStruct `toml:"sublist"`
-}
-
-type basicMarshalTestSubStruct struct {
- String2 string
-}
-
-var basicTestData = basicMarshalTestStruct{
- String: "Hello",
- StringList: []string{"Howdy", "Hey There"},
- Sub: basicMarshalTestSubStruct{"One"},
- SubList: []basicMarshalTestSubStruct{{"Two"}, {"Three"}},
-}
-
-var basicTestToml = []byte(`string = "Hello"
-strlist = ["Howdy","Hey There"]
-
-[subdoc]
- String2 = "One"
-
-[[sublist]]
- String2 = "Two"
-
-[[sublist]]
- String2 = "Three"
-`)
-
-func TestBasicMarshal(t *testing.T) {
- result, err := Marshal(basicTestData)
- if err != nil {
- t.Fatal(err)
- }
- expected := basicTestToml
- if !bytes.Equal(result, expected) {
- t.Errorf("Bad marshal: expected\n-----\n%s\n-----\ngot\n-----\n%s\n-----\n", expected, result)
- }
-}
-
-func TestBasicUnmarshal(t *testing.T) {
- result := basicMarshalTestStruct{}
- err := Unmarshal(basicTestToml, &result)
- expected := basicTestData
- if err != nil {
- t.Fatal(err)
- }
- if !reflect.DeepEqual(result, expected) {
- t.Errorf("Bad unmarshal: expected %v, got %v", expected, result)
- }
-}
-
-type testDoc struct {
- Title string `toml:"title"`
- Basics testDocBasics `toml:"basic"`
- BasicLists testDocBasicLists `toml:"basic_lists"`
- BasicMap map[string]string `toml:"basic_map"`
- Subdocs testDocSubs `toml:"subdoc"`
- SubDocList []testSubDoc `toml:"subdoclist"`
- SubDocPtrs []*testSubDoc `toml:"subdocptrs"`
- err int `toml:"shouldntBeHere"`
- unexported int `toml:"shouldntBeHere"`
- Unexported2 int `toml:"-"`
-}
-
-type testDocBasics struct {
- Bool bool `toml:"bool"`
- Date time.Time `toml:"date"`
- Float float32 `toml:"float"`
- Int int `toml:"int"`
- Uint uint `toml:"uint"`
- String *string `toml:"string"`
- unexported int `toml:"shouldntBeHere"`
-}
-
-type testDocBasicLists struct {
- Bools []bool `toml:"bools"`
- Dates []time.Time `toml:"dates"`
- Floats []*float32 `toml:"floats"`
- Ints []int `toml:"ints"`
- Strings []string `toml:"strings"`
- UInts []uint `toml:"uints"`
-}
-
-type testDocSubs struct {
- First testSubDoc `toml:"first"`
- Second *testSubDoc `toml:"second"`
-}
-
-type testSubDoc struct {
- Name string `toml:"name"`
- unexported int `toml:"shouldntBeHere"`
-}
-
-var biteMe = "Bite me"
-var float1 float32 = 12.3
-var float2 float32 = 45.6
-var float3 float32 = 78.9
-var subdoc = testSubDoc{"Second", 0}
-
-var docData = testDoc{
- Title: "TOML Marshal Testing",
- unexported: 0,
- Unexported2: 0,
- Basics: testDocBasics{
- Bool: true,
- Date: time.Date(1979, 5, 27, 7, 32, 0, 0, time.UTC),
- Float: 123.4,
- Int: 5000,
- Uint: 5001,
- String: &biteMe,
- unexported: 0,
- },
- BasicLists: testDocBasicLists{
- Bools: []bool{true, false, true},
- Dates: []time.Time{
- time.Date(1979, 5, 27, 7, 32, 0, 0, time.UTC),
- time.Date(1980, 5, 27, 7, 32, 0, 0, time.UTC),
- },
- Floats: []*float32{&float1, &float2, &float3},
- Ints: []int{8001, 8001, 8002},
- Strings: []string{"One", "Two", "Three"},
- UInts: []uint{5002, 5003},
- },
- BasicMap: map[string]string{
- "one": "one",
- "two": "two",
- },
- Subdocs: testDocSubs{
- First: testSubDoc{"First", 0},
- Second: &subdoc,
- },
- SubDocList: []testSubDoc{
- {"List.First", 0},
- {"List.Second", 0},
- },
- SubDocPtrs: []*testSubDoc{&subdoc},
-}
-
-func TestDocMarshal(t *testing.T) {
- result, err := Marshal(docData)
- if err != nil {
- t.Fatal(err)
- }
- expected, _ := ioutil.ReadFile("marshal_test.toml")
- if !bytes.Equal(result, expected) {
- t.Errorf("Bad marshal: expected\n-----\n%s\n-----\ngot\n-----\n%s\n-----\n", expected, result)
- }
-}
-
-func TestDocUnmarshal(t *testing.T) {
- result := testDoc{}
- tomlData, _ := ioutil.ReadFile("marshal_test.toml")
- err := Unmarshal(tomlData, &result)
- expected := docData
- if err != nil {
- t.Fatal(err)
- }
- if !reflect.DeepEqual(result, expected) {
- resStr, _ := json.MarshalIndent(result, "", " ")
- expStr, _ := json.MarshalIndent(expected, "", " ")
- t.Errorf("Bad unmarshal: expected\n-----\n%s\n-----\ngot\n-----\n%s\n-----\n", expStr, resStr)
- }
-}
-
-func TestDocPartialUnmarshal(t *testing.T) {
- result := testDocSubs{}
-
- tree, _ := LoadFile("marshal_test.toml")
- subTree := tree.Get("subdoc").(*Tree)
- err := subTree.Unmarshal(&result)
- expected := docData.Subdocs
- if err != nil {
- t.Fatal(err)
- }
- if !reflect.DeepEqual(result, expected) {
- resStr, _ := json.MarshalIndent(result, "", " ")
- expStr, _ := json.MarshalIndent(expected, "", " ")
- t.Errorf("Bad partial unmartial: expected\n-----\n%s\n-----\ngot\n-----\n%s\n-----\n", expStr, resStr)
- }
-}
-
-type tomlTypeCheckTest struct {
- name string
- item interface{}
- typ int //0=primitive, 1=otherslice, 2=treeslice, 3=tree
-}
-
-func TestTypeChecks(t *testing.T) {
- tests := []tomlTypeCheckTest{
- {"integer", 2, 0},
- {"time", time.Date(2015, 1, 1, 0, 0, 0, 0, time.UTC), 0},
- {"stringlist", []string{"hello", "hi"}, 1},
- {"timelist", []time.Time{time.Date(2015, 1, 1, 0, 0, 0, 0, time.UTC)}, 1},
- {"objectlist", []tomlTypeCheckTest{}, 2},
- {"object", tomlTypeCheckTest{}, 3},
- }
-
- for _, test := range tests {
- expected := []bool{false, false, false, false}
- expected[test.typ] = true
- result := []bool{
- isPrimitive(reflect.TypeOf(test.item)),
- isOtherSlice(reflect.TypeOf(test.item)),
- isTreeSlice(reflect.TypeOf(test.item)),
- isTree(reflect.TypeOf(test.item)),
- }
- if !reflect.DeepEqual(expected, result) {
- t.Errorf("Bad type check on %q: expected %v, got %v", test.name, expected, result)
- }
- }
-}
-
-type unexportedMarshalTestStruct struct {
- String string `toml:"string"`
- StringList []string `toml:"strlist"`
- Sub basicMarshalTestSubStruct `toml:"subdoc"`
- SubList []basicMarshalTestSubStruct `toml:"sublist"`
- unexported int `toml:"shouldntBeHere"`
- Unexported2 int `toml:"-"`
-}
-
-var unexportedTestData = unexportedMarshalTestStruct{
- String: "Hello",
- StringList: []string{"Howdy", "Hey There"},
- Sub: basicMarshalTestSubStruct{"One"},
- SubList: []basicMarshalTestSubStruct{{"Two"}, {"Three"}},
- unexported: 0,
- Unexported2: 0,
-}
-
-var unexportedTestToml = []byte(`string = "Hello"
-strlist = ["Howdy","Hey There"]
-unexported = 1
-shouldntBeHere = 2
-
-[subdoc]
- String2 = "One"
-
-[[sublist]]
- String2 = "Two"
-
-[[sublist]]
- String2 = "Three"
-`)
-
-func TestUnexportedUnmarshal(t *testing.T) {
- result := unexportedMarshalTestStruct{}
- err := Unmarshal(unexportedTestToml, &result)
- expected := unexportedTestData
- if err != nil {
- t.Fatal(err)
- }
- if !reflect.DeepEqual(result, expected) {
- t.Errorf("Bad unexported unmarshal: expected %v, got %v", expected, result)
- }
-}
-
-type errStruct struct {
- Bool bool `toml:"bool"`
- Date time.Time `toml:"date"`
- Float float64 `toml:"float"`
- Int int16 `toml:"int"`
- String *string `toml:"string"`
-}
-
-var errTomls = []string{
- "bool = truly\ndate = 1979-05-27T07:32:00Z\nfloat = 123.4\nint = 5000\nstring = \"Bite me\"",
- "bool = true\ndate = 1979-05-27T07:3200Z\nfloat = 123.4\nint = 5000\nstring = \"Bite me\"",
- "bool = true\ndate = 1979-05-27T07:32:00Z\nfloat = 123a4\nint = 5000\nstring = \"Bite me\"",
- "bool = true\ndate = 1979-05-27T07:32:00Z\nfloat = 123.4\nint = j000\nstring = \"Bite me\"",
- "bool = true\ndate = 1979-05-27T07:32:00Z\nfloat = 123.4\nint = 5000\nstring = Bite me",
- "bool = true\ndate = 1979-05-27T07:32:00Z\nfloat = 123.4\nint = 5000\nstring = Bite me",
- "bool = 1\ndate = 1979-05-27T07:32:00Z\nfloat = 123.4\nint = 5000\nstring = \"Bite me\"",
- "bool = true\ndate = 1\nfloat = 123.4\nint = 5000\nstring = \"Bite me\"",
- "bool = true\ndate = 1979-05-27T07:32:00Z\n\"sorry\"\nint = 5000\nstring = \"Bite me\"",
- "bool = true\ndate = 1979-05-27T07:32:00Z\nfloat = 123.4\nint = \"sorry\"\nstring = \"Bite me\"",
- "bool = true\ndate = 1979-05-27T07:32:00Z\nfloat = 123.4\nint = 5000\nstring = 1",
-}
-
-type mapErr struct {
- Vals map[string]float64
-}
-
-type intErr struct {
- Int1 int
- Int2 int8
- Int3 int16
- Int4 int32
- Int5 int64
- UInt1 uint
- UInt2 uint8
- UInt3 uint16
- UInt4 uint32
- UInt5 uint64
- Flt1 float32
- Flt2 float64
-}
-
-var intErrTomls = []string{
- "Int1 = []\nInt2 = 2\nInt3 = 3\nInt4 = 4\nInt5 = 5\nUInt1 = 1\nUInt2 = 2\nUInt3 = 3\nUInt4 = 4\nUInt5 = 5\nFlt1 = 1.0\nFlt2 = 2.0",
- "Int1 = 1\nInt2 = []\nInt3 = 3\nInt4 = 4\nInt5 = 5\nUInt1 = 1\nUInt2 = 2\nUInt3 = 3\nUInt4 = 4\nUInt5 = 5\nFlt1 = 1.0\nFlt2 = 2.0",
- "Int1 = 1\nInt2 = 2\nInt3 = []\nInt4 = 4\nInt5 = 5\nUInt1 = 1\nUInt2 = 2\nUInt3 = 3\nUInt4 = 4\nUInt5 = 5\nFlt1 = 1.0\nFlt2 = 2.0",
- "Int1 = 1\nInt2 = 2\nInt3 = 3\nInt4 = []\nInt5 = 5\nUInt1 = 1\nUInt2 = 2\nUInt3 = 3\nUInt4 = 4\nUInt5 = 5\nFlt1 = 1.0\nFlt2 = 2.0",
- "Int1 = 1\nInt2 = 2\nInt3 = 3\nInt4 = 4\nInt5 = []\nUInt1 = 1\nUInt2 = 2\nUInt3 = 3\nUInt4 = 4\nUInt5 = 5\nFlt1 = 1.0\nFlt2 = 2.0",
- "Int1 = 1\nInt2 = 2\nInt3 = 3\nInt4 = 4\nInt5 = 5\nUInt1 = []\nUInt2 = 2\nUInt3 = 3\nUInt4 = 4\nUInt5 = 5\nFlt1 = 1.0\nFlt2 = 2.0",
- "Int1 = 1\nInt2 = 2\nInt3 = 3\nInt4 = 4\nInt5 = 5\nUInt1 = 1\nUInt2 = []\nUInt3 = 3\nUInt4 = 4\nUInt5 = 5\nFlt1 = 1.0\nFlt2 = 2.0",
- "Int1 = 1\nInt2 = 2\nInt3 = 3\nInt4 = 4\nInt5 = 5\nUInt1 = 1\nUInt2 = 2\nUInt3 = []\nUInt4 = 4\nUInt5 = 5\nFlt1 = 1.0\nFlt2 = 2.0",
- "Int1 = 1\nInt2 = 2\nInt3 = 3\nInt4 = 4\nInt5 = 5\nUInt1 = 1\nUInt2 = 2\nUInt3 = 3\nUInt4 = []\nUInt5 = 5\nFlt1 = 1.0\nFlt2 = 2.0",
- "Int1 = 1\nInt2 = 2\nInt3 = 3\nInt4 = 4\nInt5 = 5\nUInt1 = 1\nUInt2 = 2\nUInt3 = 3\nUInt4 = 4\nUInt5 = []\nFlt1 = 1.0\nFlt2 = 2.0",
- "Int1 = 1\nInt2 = 2\nInt3 = 3\nInt4 = 4\nInt5 = 5\nUInt1 = 1\nUInt2 = 2\nUInt3 = 3\nUInt4 = 4\nUInt5 = 5\nFlt1 = []\nFlt2 = 2.0",
- "Int1 = 1\nInt2 = 2\nInt3 = 3\nInt4 = 4\nInt5 = 5\nUInt1 = 1\nUInt2 = 2\nUInt3 = 3\nUInt4 = 4\nUInt5 = 5\nFlt1 = 1.0\nFlt2 = []",
-}
-
-func TestErrUnmarshal(t *testing.T) {
- for ind, toml := range errTomls {
- result := errStruct{}
- err := Unmarshal([]byte(toml), &result)
- if err == nil {
- t.Errorf("Expected err from case %d\n", ind)
- }
- }
- result2 := mapErr{}
- err := Unmarshal([]byte("[Vals]\nfred=\"1.2\""), &result2)
- if err == nil {
- t.Errorf("Expected err from map")
- }
- for ind, toml := range intErrTomls {
- result3 := intErr{}
- err := Unmarshal([]byte(toml), &result3)
- if err == nil {
- t.Errorf("Expected int err from case %d\n", ind)
- }
- }
-}
-
-type emptyMarshalTestStruct struct {
- Title string `toml:"title"`
- Bool bool `toml:"bool"`
- Int int `toml:"int"`
- String string `toml:"string"`
- StringList []string `toml:"stringlist"`
- Ptr *basicMarshalTestStruct `toml:"ptr"`
- Map map[string]string `toml:"map"`
-}
-
-var emptyTestData = emptyMarshalTestStruct{
- Title: "Placeholder",
- Bool: false,
- Int: 0,
- String: "",
- StringList: []string{},
- Ptr: nil,
- Map: map[string]string{},
-}
-
-var emptyTestToml = []byte(`bool = false
-int = 0
-string = ""
-stringlist = []
-title = "Placeholder"
-
-[map]
-`)
-
-type emptyMarshalTestStruct2 struct {
- Title string `toml:"title"`
- Bool bool `toml:"bool,omitempty"`
- Int int `toml:"int, omitempty"`
- String string `toml:"string,omitempty "`
- StringList []string `toml:"stringlist,omitempty"`
- Ptr *basicMarshalTestStruct `toml:"ptr,omitempty"`
- Map map[string]string `toml:"map,omitempty"`
-}
-
-var emptyTestData2 = emptyMarshalTestStruct2{
- Title: "Placeholder",
- Bool: false,
- Int: 0,
- String: "",
- StringList: []string{},
- Ptr: nil,
- Map: map[string]string{},
-}
-
-var emptyTestToml2 = []byte(`title = "Placeholder"
-`)
-
-func TestEmptyMarshal(t *testing.T) {
- result, err := Marshal(emptyTestData)
- if err != nil {
- t.Fatal(err)
- }
- expected := emptyTestToml
- if !bytes.Equal(result, expected) {
- t.Errorf("Bad empty marshal: expected\n-----\n%s\n-----\ngot\n-----\n%s\n-----\n", expected, result)
- }
-}
-
-func TestEmptyMarshalOmit(t *testing.T) {
- result, err := Marshal(emptyTestData2)
- if err != nil {
- t.Fatal(err)
- }
- expected := emptyTestToml2
- if !bytes.Equal(result, expected) {
- t.Errorf("Bad empty omit marshal: expected\n-----\n%s\n-----\ngot\n-----\n%s\n-----\n", expected, result)
- }
-}
-
-func TestEmptyUnmarshal(t *testing.T) {
- result := emptyMarshalTestStruct{}
- err := Unmarshal(emptyTestToml, &result)
- expected := emptyTestData
- if err != nil {
- t.Fatal(err)
- }
- if !reflect.DeepEqual(result, expected) {
- t.Errorf("Bad empty unmarshal: expected %v, got %v", expected, result)
- }
-}
-
-func TestEmptyUnmarshalOmit(t *testing.T) {
- result := emptyMarshalTestStruct2{}
- err := Unmarshal(emptyTestToml, &result)
- expected := emptyTestData2
- if err != nil {
- t.Fatal(err)
- }
- if !reflect.DeepEqual(result, expected) {
- t.Errorf("Bad empty omit unmarshal: expected %v, got %v", expected, result)
- }
-}
-
-type pointerMarshalTestStruct struct {
- Str *string
- List *[]string
- ListPtr *[]*string
- Map *map[string]string
- MapPtr *map[string]*string
- EmptyStr *string
- EmptyList *[]string
- EmptyMap *map[string]string
- DblPtr *[]*[]*string
-}
-
-var pointerStr = "Hello"
-var pointerList = []string{"Hello back"}
-var pointerListPtr = []*string{&pointerStr}
-var pointerMap = map[string]string{"response": "Goodbye"}
-var pointerMapPtr = map[string]*string{"alternate": &pointerStr}
-var pointerTestData = pointerMarshalTestStruct{
- Str: &pointerStr,
- List: &pointerList,
- ListPtr: &pointerListPtr,
- Map: &pointerMap,
- MapPtr: &pointerMapPtr,
- EmptyStr: nil,
- EmptyList: nil,
- EmptyMap: nil,
-}
-
-var pointerTestToml = []byte(`List = ["Hello back"]
-ListPtr = ["Hello"]
-Str = "Hello"
-
-[Map]
- response = "Goodbye"
-
-[MapPtr]
- alternate = "Hello"
-`)
-
-func TestPointerMarshal(t *testing.T) {
- result, err := Marshal(pointerTestData)
- if err != nil {
- t.Fatal(err)
- }
- expected := pointerTestToml
- if !bytes.Equal(result, expected) {
- t.Errorf("Bad pointer marshal: expected\n-----\n%s\n-----\ngot\n-----\n%s\n-----\n", expected, result)
- }
-}
-
-func TestPointerUnmarshal(t *testing.T) {
- result := pointerMarshalTestStruct{}
- err := Unmarshal(pointerTestToml, &result)
- expected := pointerTestData
- if err != nil {
- t.Fatal(err)
- }
- if !reflect.DeepEqual(result, expected) {
- t.Errorf("Bad pointer unmarshal: expected %v, got %v", expected, result)
- }
-}
-
-func TestUnmarshalTypeMismatch(t *testing.T) {
- result := pointerMarshalTestStruct{}
- err := Unmarshal([]byte("List = 123"), &result)
- if !strings.HasPrefix(err.Error(), "(1, 1): Can't convert 123(int64) to []string(slice)") {
- t.Errorf("Type mismatch must be reported: got %v", err.Error())
- }
-}
-
-type nestedMarshalTestStruct struct {
- String [][]string
- //Struct [][]basicMarshalTestSubStruct
- StringPtr *[]*[]*string
- // StructPtr *[]*[]*basicMarshalTestSubStruct
-}
-
-var str1 = "Three"
-var str2 = "Four"
-var strPtr = []*string{&str1, &str2}
-var strPtr2 = []*[]*string{&strPtr}
-
-var nestedTestData = nestedMarshalTestStruct{
- String: [][]string{{"Five", "Six"}, {"One", "Two"}},
- StringPtr: &strPtr2,
-}
-
-var nestedTestToml = []byte(`String = [["Five","Six"],["One","Two"]]
-StringPtr = [["Three","Four"]]
-`)
-
-func TestNestedMarshal(t *testing.T) {
- result, err := Marshal(nestedTestData)
- if err != nil {
- t.Fatal(err)
- }
- expected := nestedTestToml
- if !bytes.Equal(result, expected) {
- t.Errorf("Bad nested marshal: expected\n-----\n%s\n-----\ngot\n-----\n%s\n-----\n", expected, result)
- }
-}
-
-func TestNestedUnmarshal(t *testing.T) {
- result := nestedMarshalTestStruct{}
- err := Unmarshal(nestedTestToml, &result)
- expected := nestedTestData
- if err != nil {
- t.Fatal(err)
- }
- if !reflect.DeepEqual(result, expected) {
- t.Errorf("Bad nested unmarshal: expected %v, got %v", expected, result)
- }
-}
-
-type customMarshalerParent struct {
- Self customMarshaler `toml:"me"`
- Friends []customMarshaler `toml:"friends"`
-}
-
-type customMarshaler struct {
- FirsName string
- LastName string
-}
-
-func (c customMarshaler) MarshalTOML() ([]byte, error) {
- fullName := fmt.Sprintf("%s %s", c.FirsName, c.LastName)
- return []byte(fullName), nil
-}
-
-var customMarshalerData = customMarshaler{FirsName: "Sally", LastName: "Fields"}
-var customMarshalerToml = []byte(`Sally Fields`)
-var nestedCustomMarshalerData = customMarshalerParent{
- Self: customMarshaler{FirsName: "Maiku", LastName: "Suteda"},
- Friends: []customMarshaler{customMarshalerData},
-}
-var nestedCustomMarshalerToml = []byte(`friends = ["Sally Fields"]
-me = "Maiku Suteda"
-`)
-
-func TestCustomMarshaler(t *testing.T) {
- result, err := Marshal(customMarshalerData)
- if err != nil {
- t.Fatal(err)
- }
- expected := customMarshalerToml
- if !bytes.Equal(result, expected) {
- t.Errorf("Bad custom marshaler: expected\n-----\n%s\n-----\ngot\n-----\n%s\n-----\n", expected, result)
- }
-}
-
-func TestNestedCustomMarshaler(t *testing.T) {
- result, err := Marshal(nestedCustomMarshalerData)
- if err != nil {
- t.Fatal(err)
- }
- expected := nestedCustomMarshalerToml
- if !bytes.Equal(result, expected) {
- t.Errorf("Bad nested custom marshaler: expected\n-----\n%s\n-----\ngot\n-----\n%s\n-----\n", expected, result)
- }
-}
-
-var commentTestToml = []byte(`
-# it's a comment on type
-[postgres]
- # isCommented = "dvalue"
- noComment = "cvalue"
-
- # A comment on AttrB with a
- # break line
- password = "bvalue"
-
- # A comment on AttrA
- user = "avalue"
-
- [[postgres.My]]
-
- # a comment on my on typeC
- My = "Foo"
-
- [[postgres.My]]
-
- # a comment on my on typeC
- My = "Baar"
-`)
-
-func TestMarshalComment(t *testing.T) {
- type TypeC struct {
- My string `comment:"a comment on my on typeC"`
- }
- type TypeB struct {
- AttrA string `toml:"user" comment:"A comment on AttrA"`
- AttrB string `toml:"password" comment:"A comment on AttrB with a\n break line"`
- AttrC string `toml:"noComment"`
- AttrD string `toml:"isCommented" commented:"true"`
- My []TypeC
- }
- type TypeA struct {
- TypeB TypeB `toml:"postgres" comment:"it's a comment on type"`
- }
-
- ta := []TypeC{{My: "Foo"}, {My: "Baar"}}
- config := TypeA{TypeB{AttrA: "avalue", AttrB: "bvalue", AttrC: "cvalue", AttrD: "dvalue", My: ta}}
- result, err := Marshal(config)
- if err != nil {
- t.Fatal(err)
- }
- expected := commentTestToml
- if !bytes.Equal(result, expected) {
- t.Errorf("Bad marshal: expected\n-----\n%s\n-----\ngot\n-----\n%s\n-----\n", expected, result)
- }
-}
-
-type mapsTestStruct struct {
- Simple map[string]string
- Paths map[string]string
- Other map[string]float64
- X struct {
- Y struct {
- Z map[string]bool
- }
- }
-}
-
-var mapsTestData = mapsTestStruct{
- Simple: map[string]string{
- "one plus one": "two",
- "next": "three",
- },
- Paths: map[string]string{
- "/this/is/a/path": "/this/is/also/a/path",
- "/heloo.txt": "/tmp/lololo.txt",
- },
- Other: map[string]float64{
- "testing": 3.9999,
- },
- X: struct{ Y struct{ Z map[string]bool } }{
- Y: struct{ Z map[string]bool }{
- Z: map[string]bool{
- "is.Nested": true,
- },
- },
- },
-}
-var mapsTestToml = []byte(`
-[Other]
- "testing" = 3.9999
-
-[Paths]
- "/heloo.txt" = "/tmp/lololo.txt"
- "/this/is/a/path" = "/this/is/also/a/path"
-
-[Simple]
- "next" = "three"
- "one plus one" = "two"
-
-[X]
-
- [X.Y]
-
- [X.Y.Z]
- "is.Nested" = true
-`)
-
-func TestEncodeQuotedMapKeys(t *testing.T) {
- var buf bytes.Buffer
- if err := NewEncoder(&buf).QuoteMapKeys(true).Encode(mapsTestData); err != nil {
- t.Fatal(err)
- }
- result := buf.Bytes()
- expected := mapsTestToml
- if !bytes.Equal(result, expected) {
- t.Errorf("Bad maps marshal: expected\n-----\n%s\n-----\ngot\n-----\n%s\n-----\n", expected, result)
- }
-}
-
-func TestDecodeQuotedMapKeys(t *testing.T) {
- result := mapsTestStruct{}
- err := NewDecoder(bytes.NewBuffer(mapsTestToml)).Decode(&result)
- expected := mapsTestData
- if err != nil {
- t.Fatal(err)
- }
- if !reflect.DeepEqual(result, expected) {
- t.Errorf("Bad maps unmarshal: expected %v, got %v", expected, result)
- }
-}
-
-type structArrayNoTag struct {
- A struct {
- B []int64
- C []int64
- }
-}
-
-func TestMarshalArray(t *testing.T) {
- expected := []byte(`
-[A]
- B = [1,2,3]
- C = [1]
-`)
-
- m := structArrayNoTag{
- A: struct {
- B []int64
- C []int64
- }{
- B: []int64{1, 2, 3},
- C: []int64{1},
- },
- }
-
- b, err := Marshal(m)
-
- if err != nil {
- t.Fatal(err)
- }
-
- if !bytes.Equal(b, expected) {
- t.Errorf("Bad arrays marshal: expected\n-----\n%s\n-----\ngot\n-----\n%s\n-----\n", expected, b)
- }
-}
-
-func TestMarshalArrayOnePerLine(t *testing.T) {
- expected := []byte(`
-[A]
- B = [
- 1,
- 2,
- 3
- ]
- C = [1]
-`)
-
- m := structArrayNoTag{
- A: struct {
- B []int64
- C []int64
- }{
- B: []int64{1, 2, 3},
- C: []int64{1},
- },
- }
-
- var buf bytes.Buffer
- encoder := NewEncoder(&buf).ArraysWithOneElementPerLine(true)
- err := encoder.Encode(m)
-
- if err != nil {
- t.Fatal(err)
- }
-
- b := buf.Bytes()
-
- if !bytes.Equal(b, expected) {
- t.Errorf("Bad arrays marshal: expected\n-----\n%s\n-----\ngot\n-----\n%s\n-----\n", expected, b)
- }
-}
diff --git a/vendor/github.com/pelletier/go-toml/parser_test.go b/vendor/github.com/pelletier/go-toml/parser_test.go
deleted file mode 100644
index ca29c442e..000000000
--- a/vendor/github.com/pelletier/go-toml/parser_test.go
+++ /dev/null
@@ -1,899 +0,0 @@
-package toml
-
-import (
- "fmt"
- "math"
- "reflect"
- "testing"
- "time"
-
- "github.com/davecgh/go-spew/spew"
-)
-
-func assertSubTree(t *testing.T, path []string, tree *Tree, err error, ref map[string]interface{}) {
- if err != nil {
- t.Error("Non-nil error:", err.Error())
- return
- }
- for k, v := range ref {
- nextPath := append(path, k)
- t.Log("asserting path", nextPath)
- // NOTE: directly access key instead of resolve by path
- // NOTE: see TestSpecialKV
- switch node := tree.GetPath([]string{k}).(type) {
- case []*Tree:
- t.Log("\tcomparing key", nextPath, "by array iteration")
- for idx, item := range node {
- assertSubTree(t, nextPath, item, err, v.([]map[string]interface{})[idx])
- }
- case *Tree:
- t.Log("\tcomparing key", nextPath, "by subtree assestion")
- assertSubTree(t, nextPath, node, err, v.(map[string]interface{}))
- default:
- t.Log("\tcomparing key", nextPath, "by string representation because it's of type", reflect.TypeOf(node))
- if fmt.Sprintf("%v", node) != fmt.Sprintf("%v", v) {
- t.Errorf("was expecting %v at %v but got %v", v, k, node)
- }
- }
- }
-}
-
-func assertTree(t *testing.T, tree *Tree, err error, ref map[string]interface{}) {
- t.Log("Asserting tree:\n", spew.Sdump(tree))
- assertSubTree(t, []string{}, tree, err, ref)
- t.Log("Finished tree assertion.")
-}
-
-func TestCreateSubTree(t *testing.T) {
- tree := newTree()
- tree.createSubTree([]string{"a", "b", "c"}, Position{})
- tree.Set("a.b.c", 42)
- if tree.Get("a.b.c") != 42 {
- t.Fail()
- }
-}
-
-func TestSimpleKV(t *testing.T) {
- tree, err := Load("a = 42")
- assertTree(t, tree, err, map[string]interface{}{
- "a": int64(42),
- })
-
- tree, _ = Load("a = 42\nb = 21")
- assertTree(t, tree, err, map[string]interface{}{
- "a": int64(42),
- "b": int64(21),
- })
-}
-
-func TestNumberInKey(t *testing.T) {
- tree, err := Load("hello2 = 42")
- assertTree(t, tree, err, map[string]interface{}{
- "hello2": int64(42),
- })
-}
-
-func TestIncorrectKeyExtraSquareBracket(t *testing.T) {
- _, err := Load(`[a]b]
-zyx = 42`)
- if err == nil {
- t.Error("Error should have been returned.")
- }
- if err.Error() != "(1, 4): unexpected token" {
- t.Error("Bad error message:", err.Error())
- }
-}
-
-func TestSimpleNumbers(t *testing.T) {
- tree, err := Load("a = +42\nb = -21\nc = +4.2\nd = -2.1")
- assertTree(t, tree, err, map[string]interface{}{
- "a": int64(42),
- "b": int64(-21),
- "c": float64(4.2),
- "d": float64(-2.1),
- })
-}
-
-func TestSpecialFloats(t *testing.T) {
- tree, err := Load(`
-normalinf = inf
-plusinf = +inf
-minusinf = -inf
-normalnan = nan
-plusnan = +nan
-minusnan = -nan
-`)
- assertTree(t, tree, err, map[string]interface{}{
- "normalinf": math.Inf(1),
- "plusinf": math.Inf(1),
- "minusinf": math.Inf(-1),
- "normalnan": math.NaN(),
- "plusnan": math.NaN(),
- "minusnan": math.NaN(),
- })
-}
-
-func TestHexIntegers(t *testing.T) {
- tree, err := Load(`a = 0xDEADBEEF`)
- assertTree(t, tree, err, map[string]interface{}{"a": int64(3735928559)})
-
- tree, err = Load(`a = 0xdeadbeef`)
- assertTree(t, tree, err, map[string]interface{}{"a": int64(3735928559)})
-
- tree, err = Load(`a = 0xdead_beef`)
- assertTree(t, tree, err, map[string]interface{}{"a": int64(3735928559)})
-
- _, err = Load(`a = 0x_1`)
- if err.Error() != "(1, 5): invalid use of _ in hex number" {
- t.Error("Bad error message:", err.Error())
- }
-}
-
-func TestOctIntegers(t *testing.T) {
- tree, err := Load(`a = 0o01234567`)
- assertTree(t, tree, err, map[string]interface{}{"a": int64(342391)})
-
- tree, err = Load(`a = 0o755`)
- assertTree(t, tree, err, map[string]interface{}{"a": int64(493)})
-
- _, err = Load(`a = 0o_1`)
- if err.Error() != "(1, 5): invalid use of _ in number" {
- t.Error("Bad error message:", err.Error())
- }
-}
-
-func TestBinIntegers(t *testing.T) {
- tree, err := Load(`a = 0b11010110`)
- assertTree(t, tree, err, map[string]interface{}{"a": int64(214)})
-
- _, err = Load(`a = 0b_1`)
- if err.Error() != "(1, 5): invalid use of _ in number" {
- t.Error("Bad error message:", err.Error())
- }
-}
-
-func TestBadIntegerBase(t *testing.T) {
- _, err := Load(`a = 0k1`)
- if err.Error() != "(1, 5): unknown number base: k. possible options are x (hex) o (octal) b (binary)" {
- t.Error("Error should have been returned.")
- }
-}
-
-func TestIntegerNoDigit(t *testing.T) {
- _, err := Load(`a = 0b`)
- if err.Error() != "(1, 5): number needs at least one digit" {
- t.Error("Bad error message:", err.Error())
- }
-}
-
-func TestNumbersWithUnderscores(t *testing.T) {
- tree, err := Load("a = 1_000")
- assertTree(t, tree, err, map[string]interface{}{
- "a": int64(1000),
- })
-
- tree, err = Load("a = 5_349_221")
- assertTree(t, tree, err, map[string]interface{}{
- "a": int64(5349221),
- })
-
- tree, err = Load("a = 1_2_3_4_5")
- assertTree(t, tree, err, map[string]interface{}{
- "a": int64(12345),
- })
-
- tree, err = Load("flt8 = 9_224_617.445_991_228_313")
- assertTree(t, tree, err, map[string]interface{}{
- "flt8": float64(9224617.445991228313),
- })
-
- tree, err = Load("flt9 = 1e1_00")
- assertTree(t, tree, err, map[string]interface{}{
- "flt9": float64(1e100),
- })
-}
-
-func TestFloatsWithExponents(t *testing.T) {
- tree, err := Load("a = 5e+22\nb = 5E+22\nc = -5e+22\nd = -5e-22\ne = 6.626e-34")
- assertTree(t, tree, err, map[string]interface{}{
- "a": float64(5e+22),
- "b": float64(5E+22),
- "c": float64(-5e+22),
- "d": float64(-5e-22),
- "e": float64(6.626e-34),
- })
-}
-
-func TestSimpleDate(t *testing.T) {
- tree, err := Load("a = 1979-05-27T07:32:00Z")
- assertTree(t, tree, err, map[string]interface{}{
- "a": time.Date(1979, time.May, 27, 7, 32, 0, 0, time.UTC),
- })
-}
-
-func TestDateOffset(t *testing.T) {
- tree, err := Load("a = 1979-05-27T00:32:00-07:00")
- assertTree(t, tree, err, map[string]interface{}{
- "a": time.Date(1979, time.May, 27, 0, 32, 0, 0, time.FixedZone("", -7*60*60)),
- })
-}
-
-func TestDateNano(t *testing.T) {
- tree, err := Load("a = 1979-05-27T00:32:00.999999999-07:00")
- assertTree(t, tree, err, map[string]interface{}{
- "a": time.Date(1979, time.May, 27, 0, 32, 0, 999999999, time.FixedZone("", -7*60*60)),
- })
-}
-
-func TestSimpleString(t *testing.T) {
- tree, err := Load("a = \"hello world\"")
- assertTree(t, tree, err, map[string]interface{}{
- "a": "hello world",
- })
-}
-
-func TestSpaceKey(t *testing.T) {
- tree, err := Load("\"a b\" = \"hello world\"")
- assertTree(t, tree, err, map[string]interface{}{
- "a b": "hello world",
- })
-}
-
-func TestDoubleQuotedKey(t *testing.T) {
- tree, err := Load(`
- "key" = "a"
- "\t" = "b"
- "\U0001F914" = "c"
- "\u2764" = "d"
- `)
- assertTree(t, tree, err, map[string]interface{}{
- "key": "a",
- "\t": "b",
- "\U0001F914": "c",
- "\u2764": "d",
- })
-}
-
-func TestSingleQuotedKey(t *testing.T) {
- tree, err := Load(`
- 'key' = "a"
- '\t' = "b"
- '\U0001F914' = "c"
- '\u2764' = "d"
- `)
- assertTree(t, tree, err, map[string]interface{}{
- `key`: "a",
- `\t`: "b",
- `\U0001F914`: "c",
- `\u2764`: "d",
- })
-}
-
-func TestStringEscapables(t *testing.T) {
- tree, err := Load("a = \"a \\n b\"")
- assertTree(t, tree, err, map[string]interface{}{
- "a": "a \n b",
- })
-
- tree, err = Load("a = \"a \\t b\"")
- assertTree(t, tree, err, map[string]interface{}{
- "a": "a \t b",
- })
-
- tree, err = Load("a = \"a \\r b\"")
- assertTree(t, tree, err, map[string]interface{}{
- "a": "a \r b",
- })
-
- tree, err = Load("a = \"a \\\\ b\"")
- assertTree(t, tree, err, map[string]interface{}{
- "a": "a \\ b",
- })
-}
-
-func TestEmptyQuotedString(t *testing.T) {
- tree, err := Load(`[""]
-"" = 1`)
- assertTree(t, tree, err, map[string]interface{}{
- "": map[string]interface{}{
- "": int64(1),
- },
- })
-}
-
-func TestBools(t *testing.T) {
- tree, err := Load("a = true\nb = false")
- assertTree(t, tree, err, map[string]interface{}{
- "a": true,
- "b": false,
- })
-}
-
-func TestNestedKeys(t *testing.T) {
- tree, err := Load("[a.b.c]\nd = 42")
- assertTree(t, tree, err, map[string]interface{}{
- "a": map[string]interface{}{
- "b": map[string]interface{}{
- "c": map[string]interface{}{
- "d": int64(42),
- },
- },
- },
- })
-}
-
-func TestNestedQuotedUnicodeKeys(t *testing.T) {
- tree, err := Load("[ j . \"ʞ\" . l ]\nd = 42")
- assertTree(t, tree, err, map[string]interface{}{
- "j": map[string]interface{}{
- "ʞ": map[string]interface{}{
- "l": map[string]interface{}{
- "d": int64(42),
- },
- },
- },
- })
-
- tree, err = Load("[ g . h . i ]\nd = 42")
- assertTree(t, tree, err, map[string]interface{}{
- "g": map[string]interface{}{
- "h": map[string]interface{}{
- "i": map[string]interface{}{
- "d": int64(42),
- },
- },
- },
- })
-
- tree, err = Load("[ d.e.f ]\nk = 42")
- assertTree(t, tree, err, map[string]interface{}{
- "d": map[string]interface{}{
- "e": map[string]interface{}{
- "f": map[string]interface{}{
- "k": int64(42),
- },
- },
- },
- })
-}
-
-func TestArrayOne(t *testing.T) {
- tree, err := Load("a = [1]")
- assertTree(t, tree, err, map[string]interface{}{
- "a": []int64{int64(1)},
- })
-}
-
-func TestArrayZero(t *testing.T) {
- tree, err := Load("a = []")
- assertTree(t, tree, err, map[string]interface{}{
- "a": []interface{}{},
- })
-}
-
-func TestArraySimple(t *testing.T) {
- tree, err := Load("a = [42, 21, 10]")
- assertTree(t, tree, err, map[string]interface{}{
- "a": []int64{int64(42), int64(21), int64(10)},
- })
-
- tree, _ = Load("a = [42, 21, 10,]")
- assertTree(t, tree, err, map[string]interface{}{
- "a": []int64{int64(42), int64(21), int64(10)},
- })
-}
-
-func TestArrayMultiline(t *testing.T) {
- tree, err := Load("a = [42,\n21, 10,]")
- assertTree(t, tree, err, map[string]interface{}{
- "a": []int64{int64(42), int64(21), int64(10)},
- })
-}
-
-func TestArrayNested(t *testing.T) {
- tree, err := Load("a = [[42, 21], [10]]")
- assertTree(t, tree, err, map[string]interface{}{
- "a": [][]int64{{int64(42), int64(21)}, {int64(10)}},
- })
-}
-
-func TestNestedArrayComment(t *testing.T) {
- tree, err := Load(`
-someArray = [
-# does not work
-["entry1"]
-]`)
- assertTree(t, tree, err, map[string]interface{}{
- "someArray": [][]string{{"entry1"}},
- })
-}
-
-func TestNestedEmptyArrays(t *testing.T) {
- tree, err := Load("a = [[[]]]")
- assertTree(t, tree, err, map[string]interface{}{
- "a": [][][]interface{}{{{}}},
- })
-}
-
-func TestArrayMixedTypes(t *testing.T) {
- _, err := Load("a = [42, 16.0]")
- if err.Error() != "(1, 10): mixed types in array" {
- t.Error("Bad error message:", err.Error())
- }
-
- _, err = Load("a = [42, \"hello\"]")
- if err.Error() != "(1, 11): mixed types in array" {
- t.Error("Bad error message:", err.Error())
- }
-}
-
-func TestArrayNestedStrings(t *testing.T) {
- tree, err := Load("data = [ [\"gamma\", \"delta\"], [\"Foo\"] ]")
- assertTree(t, tree, err, map[string]interface{}{
- "data": [][]string{{"gamma", "delta"}, {"Foo"}},
- })
-}
-
-func TestParseUnknownRvalue(t *testing.T) {
- _, err := Load("a = !bssss")
- if err == nil {
- t.Error("Expecting a parse error")
- }
-
- _, err = Load("a = /b")
- if err == nil {
- t.Error("Expecting a parse error")
- }
-}
-
-func TestMissingValue(t *testing.T) {
- _, err := Load("a = ")
- if err.Error() != "(1, 5): expecting a value" {
- t.Error("Bad error message:", err.Error())
- }
-}
-
-func TestUnterminatedArray(t *testing.T) {
- _, err := Load("a = [1,")
- if err.Error() != "(1, 8): unterminated array" {
- t.Error("Bad error message:", err.Error())
- }
-
- _, err = Load("a = [1")
- if err.Error() != "(1, 7): unterminated array" {
- t.Error("Bad error message:", err.Error())
- }
-
- _, err = Load("a = [1 2")
- if err.Error() != "(1, 8): missing comma" {
- t.Error("Bad error message:", err.Error())
- }
-}
-
-func TestNewlinesInArrays(t *testing.T) {
- tree, err := Load("a = [1,\n2,\n3]")
- assertTree(t, tree, err, map[string]interface{}{
- "a": []int64{int64(1), int64(2), int64(3)},
- })
-}
-
-func TestArrayWithExtraComma(t *testing.T) {
- tree, err := Load("a = [1,\n2,\n3,\n]")
- assertTree(t, tree, err, map[string]interface{}{
- "a": []int64{int64(1), int64(2), int64(3)},
- })
-}
-
-func TestArrayWithExtraCommaComment(t *testing.T) {
- tree, err := Load("a = [1, # wow\n2, # such items\n3, # so array\n]")
- assertTree(t, tree, err, map[string]interface{}{
- "a": []int64{int64(1), int64(2), int64(3)},
- })
-}
-
-func TestSimpleInlineGroup(t *testing.T) {
- tree, err := Load("key = {a = 42}")
- assertTree(t, tree, err, map[string]interface{}{
- "key": map[string]interface{}{
- "a": int64(42),
- },
- })
-}
-
-func TestDoubleInlineGroup(t *testing.T) {
- tree, err := Load("key = {a = 42, b = \"foo\"}")
- assertTree(t, tree, err, map[string]interface{}{
- "key": map[string]interface{}{
- "a": int64(42),
- "b": "foo",
- },
- })
-}
-
-func TestExampleInlineGroup(t *testing.T) {
- tree, err := Load(`name = { first = "Tom", last = "Preston-Werner" }
-point = { x = 1, y = 2 }`)
- assertTree(t, tree, err, map[string]interface{}{
- "name": map[string]interface{}{
- "first": "Tom",
- "last": "Preston-Werner",
- },
- "point": map[string]interface{}{
- "x": int64(1),
- "y": int64(2),
- },
- })
-}
-
-func TestExampleInlineGroupInArray(t *testing.T) {
- tree, err := Load(`points = [{ x = 1, y = 2 }]`)
- assertTree(t, tree, err, map[string]interface{}{
- "points": []map[string]interface{}{
- {
- "x": int64(1),
- "y": int64(2),
- },
- },
- })
-}
-
-func TestInlineTableUnterminated(t *testing.T) {
- _, err := Load("foo = {")
- if err.Error() != "(1, 8): unterminated inline table" {
- t.Error("Bad error message:", err.Error())
- }
-}
-
-func TestInlineTableCommaExpected(t *testing.T) {
- _, err := Load("foo = {hello = 53 test = foo}")
- if err.Error() != "(1, 19): comma expected between fields in inline table" {
- t.Error("Bad error message:", err.Error())
- }
-}
-
-func TestInlineTableCommaStart(t *testing.T) {
- _, err := Load("foo = {, hello = 53}")
- if err.Error() != "(1, 8): inline table cannot start with a comma" {
- t.Error("Bad error message:", err.Error())
- }
-}
-
-func TestInlineTableDoubleComma(t *testing.T) {
- _, err := Load("foo = {hello = 53,, foo = 17}")
- if err.Error() != "(1, 19): need field between two commas in inline table" {
- t.Error("Bad error message:", err.Error())
- }
-}
-
-func TestDuplicateGroups(t *testing.T) {
- _, err := Load("[foo]\na=2\n[foo]b=3")
- if err.Error() != "(3, 2): duplicated tables" {
- t.Error("Bad error message:", err.Error())
- }
-}
-
-func TestDuplicateKeys(t *testing.T) {
- _, err := Load("foo = 2\nfoo = 3")
- if err.Error() != "(2, 1): The following key was defined twice: foo" {
- t.Error("Bad error message:", err.Error())
- }
-}
-
-func TestEmptyIntermediateTable(t *testing.T) {
- _, err := Load("[foo..bar]")
- if err.Error() != "(1, 2): invalid table array key: empty table key" {
- t.Error("Bad error message:", err.Error())
- }
-}
-
-func TestImplicitDeclarationBefore(t *testing.T) {
- tree, err := Load("[a.b.c]\nanswer = 42\n[a]\nbetter = 43")
- assertTree(t, tree, err, map[string]interface{}{
- "a": map[string]interface{}{
- "b": map[string]interface{}{
- "c": map[string]interface{}{
- "answer": int64(42),
- },
- },
- "better": int64(43),
- },
- })
-}
-
-func TestFloatsWithoutLeadingZeros(t *testing.T) {
- _, err := Load("a = .42")
- if err.Error() != "(1, 5): cannot start float with a dot" {
- t.Error("Bad error message:", err.Error())
- }
-
- _, err = Load("a = -.42")
- if err.Error() != "(1, 5): cannot start float with a dot" {
- t.Error("Bad error message:", err.Error())
- }
-}
-
-func TestMissingFile(t *testing.T) {
- _, err := LoadFile("foo.toml")
- if err.Error() != "open foo.toml: no such file or directory" &&
- err.Error() != "open foo.toml: The system cannot find the file specified." {
- t.Error("Bad error message:", err.Error())
- }
-}
-
-func TestParseFile(t *testing.T) {
- tree, err := LoadFile("example.toml")
-
- assertTree(t, tree, err, map[string]interface{}{
- "title": "TOML Example",
- "owner": map[string]interface{}{
- "name": "Tom Preston-Werner",
- "organization": "GitHub",
- "bio": "GitHub Cofounder & CEO\nLikes tater tots and beer.",
- "dob": time.Date(1979, time.May, 27, 7, 32, 0, 0, time.UTC),
- },
- "database": map[string]interface{}{
- "server": "192.168.1.1",
- "ports": []int64{8001, 8001, 8002},
- "connection_max": 5000,
- "enabled": true,
- },
- "servers": map[string]interface{}{
- "alpha": map[string]interface{}{
- "ip": "10.0.0.1",
- "dc": "eqdc10",
- },
- "beta": map[string]interface{}{
- "ip": "10.0.0.2",
- "dc": "eqdc10",
- },
- },
- "clients": map[string]interface{}{
- "data": []interface{}{
- []string{"gamma", "delta"},
- []int64{1, 2},
- },
- },
- })
-}
-
-func TestParseFileCRLF(t *testing.T) {
- tree, err := LoadFile("example-crlf.toml")
-
- assertTree(t, tree, err, map[string]interface{}{
- "title": "TOML Example",
- "owner": map[string]interface{}{
- "name": "Tom Preston-Werner",
- "organization": "GitHub",
- "bio": "GitHub Cofounder & CEO\nLikes tater tots and beer.",
- "dob": time.Date(1979, time.May, 27, 7, 32, 0, 0, time.UTC),
- },
- "database": map[string]interface{}{
- "server": "192.168.1.1",
- "ports": []int64{8001, 8001, 8002},
- "connection_max": 5000,
- "enabled": true,
- },
- "servers": map[string]interface{}{
- "alpha": map[string]interface{}{
- "ip": "10.0.0.1",
- "dc": "eqdc10",
- },
- "beta": map[string]interface{}{
- "ip": "10.0.0.2",
- "dc": "eqdc10",
- },
- },
- "clients": map[string]interface{}{
- "data": []interface{}{
- []string{"gamma", "delta"},
- []int64{1, 2},
- },
- },
- })
-}
-
-func TestParseKeyGroupArray(t *testing.T) {
- tree, err := Load("[[foo.bar]] a = 42\n[[foo.bar]] a = 69")
- assertTree(t, tree, err, map[string]interface{}{
- "foo": map[string]interface{}{
- "bar": []map[string]interface{}{
- {"a": int64(42)},
- {"a": int64(69)},
- },
- },
- })
-}
-
-func TestParseKeyGroupArrayUnfinished(t *testing.T) {
- _, err := Load("[[foo.bar]\na = 42")
- if err.Error() != "(1, 10): was expecting token [[, but got unclosed table array key instead" {
- t.Error("Bad error message:", err.Error())
- }
-
- _, err = Load("[[foo.[bar]\na = 42")
- if err.Error() != "(1, 3): unexpected token table array key cannot contain ']', was expecting a table array key" {
- t.Error("Bad error message:", err.Error())
- }
-}
-
-func TestParseKeyGroupArrayQueryExample(t *testing.T) {
- tree, err := Load(`
- [[book]]
- title = "The Stand"
- author = "Stephen King"
- [[book]]
- title = "For Whom the Bell Tolls"
- author = "Ernest Hemmingway"
- [[book]]
- title = "Neuromancer"
- author = "William Gibson"
- `)
-
- assertTree(t, tree, err, map[string]interface{}{
- "book": []map[string]interface{}{
- {"title": "The Stand", "author": "Stephen King"},
- {"title": "For Whom the Bell Tolls", "author": "Ernest Hemmingway"},
- {"title": "Neuromancer", "author": "William Gibson"},
- },
- })
-}
-
-func TestParseKeyGroupArraySpec(t *testing.T) {
- tree, err := Load("[[fruit]]\n name=\"apple\"\n [fruit.physical]\n color=\"red\"\n shape=\"round\"\n [[fruit]]\n name=\"banana\"")
- assertTree(t, tree, err, map[string]interface{}{
- "fruit": []map[string]interface{}{
- {"name": "apple", "physical": map[string]interface{}{"color": "red", "shape": "round"}},
- {"name": "banana"},
- },
- })
-}
-
-func TestTomlValueStringRepresentation(t *testing.T) {
- for idx, item := range []struct {
- Value interface{}
- Expect string
- }{
- {int64(12345), "12345"},
- {uint64(50), "50"},
- {float64(123.45), "123.45"},
- {true, "true"},
- {"hello world", "\"hello world\""},
- {"\b\t\n\f\r\"\\", "\"\\b\\t\\n\\f\\r\\\"\\\\\""},
- {"\x05", "\"\\u0005\""},
- {time.Date(1979, time.May, 27, 7, 32, 0, 0, time.UTC),
- "1979-05-27T07:32:00Z"},
- {[]interface{}{"gamma", "delta"},
- "[\"gamma\",\"delta\"]"},
- {nil, ""},
- } {
- result, err := tomlValueStringRepresentation(item.Value, "", false)
- if err != nil {
- t.Errorf("Test %d - unexpected error: %s", idx, err)
- }
- if result != item.Expect {
- t.Errorf("Test %d - got '%s', expected '%s'", idx, result, item.Expect)
- }
- }
-}
-
-func TestToStringMapStringString(t *testing.T) {
- tree, err := TreeFromMap(map[string]interface{}{"m": map[string]interface{}{"v": "abc"}})
- if err != nil {
- t.Fatalf("unexpected error: %s", err)
- }
- want := "\n[m]\n v = \"abc\"\n"
- got := tree.String()
-
- if got != want {
- t.Errorf("want:\n%q\ngot:\n%q", want, got)
- }
-}
-
-func assertPosition(t *testing.T, text string, ref map[string]Position) {
- tree, err := Load(text)
- if err != nil {
- t.Errorf("Error loading document text: `%v`", text)
- t.Errorf("Error: %v", err)
- }
- for path, pos := range ref {
- testPos := tree.GetPosition(path)
- if testPos.Invalid() {
- t.Errorf("Failed to query tree path or path has invalid position: %s", path)
- } else if pos != testPos {
- t.Errorf("Expected position %v, got %v instead", pos, testPos)
- }
- }
-}
-
-func TestDocumentPositions(t *testing.T) {
- assertPosition(t,
- "[foo]\nbar=42\nbaz=69",
- map[string]Position{
- "": {1, 1},
- "foo": {1, 1},
- "foo.bar": {2, 1},
- "foo.baz": {3, 1},
- })
-}
-
-func TestDocumentPositionsWithSpaces(t *testing.T) {
- assertPosition(t,
- " [foo]\n bar=42\n baz=69",
- map[string]Position{
- "": {1, 1},
- "foo": {1, 3},
- "foo.bar": {2, 3},
- "foo.baz": {3, 3},
- })
-}
-
-func TestDocumentPositionsWithGroupArray(t *testing.T) {
- assertPosition(t,
- "[[foo]]\nbar=42\nbaz=69",
- map[string]Position{
- "": {1, 1},
- "foo": {1, 1},
- "foo.bar": {2, 1},
- "foo.baz": {3, 1},
- })
-}
-
-func TestNestedTreePosition(t *testing.T) {
- assertPosition(t,
- "[foo.bar]\na=42\nb=69",
- map[string]Position{
- "": {1, 1},
- "foo": {1, 1},
- "foo.bar": {1, 1},
- "foo.bar.a": {2, 1},
- "foo.bar.b": {3, 1},
- })
-}
-
-func TestInvalidGroupArray(t *testing.T) {
- _, err := Load("[table#key]\nanswer = 42")
- if err == nil {
- t.Error("Should error")
- }
-
- _, err = Load("[foo.[bar]\na = 42")
- if err.Error() != "(1, 2): unexpected token table key cannot contain ']', was expecting a table key" {
- t.Error("Bad error message:", err.Error())
- }
-}
-
-func TestDoubleEqual(t *testing.T) {
- _, err := Load("foo= = 2")
- if err.Error() != "(1, 6): cannot have multiple equals for the same key" {
- t.Error("Bad error message:", err.Error())
- }
-}
-
-func TestGroupArrayReassign(t *testing.T) {
- _, err := Load("[hello]\n[[hello]]")
- if err.Error() != "(2, 3): key \"hello\" is already assigned and not of type table array" {
- t.Error("Bad error message:", err.Error())
- }
-}
-
-func TestInvalidFloatParsing(t *testing.T) {
- _, err := Load("a=1e_2")
- if err.Error() != "(1, 3): invalid use of _ in number" {
- t.Error("Bad error message:", err.Error())
- }
-
- _, err = Load("a=1e2_")
- if err.Error() != "(1, 3): invalid use of _ in number" {
- t.Error("Bad error message:", err.Error())
- }
-
- _, err = Load("a=1__2")
- if err.Error() != "(1, 3): invalid use of _ in number" {
- t.Error("Bad error message:", err.Error())
- }
-
- _, err = Load("a=_1_2")
- if err.Error() != "(1, 3): cannot start number with underscore" {
- t.Error("Bad error message:", err.Error())
- }
-}
diff --git a/vendor/github.com/pelletier/go-toml/position_test.go b/vendor/github.com/pelletier/go-toml/position_test.go
deleted file mode 100644
index 63ad1afc8..000000000
--- a/vendor/github.com/pelletier/go-toml/position_test.go
+++ /dev/null
@@ -1,29 +0,0 @@
-// Testing support for go-toml
-
-package toml
-
-import (
- "testing"
-)
-
-func TestPositionString(t *testing.T) {
- p := Position{123, 456}
- expected := "(123, 456)"
- value := p.String()
-
- if value != expected {
- t.Errorf("Expected %v, got %v instead", expected, value)
- }
-}
-
-func TestInvalid(t *testing.T) {
- for i, v := range []Position{
- {0, 1234},
- {1234, 0},
- {0, 0},
- } {
- if !v.Invalid() {
- t.Errorf("Position at %v is valid: %v", i, v)
- }
- }
-}
diff --git a/vendor/github.com/pelletier/go-toml/query/doc.go b/vendor/github.com/pelletier/go-toml/query/doc.go
deleted file mode 100644
index ed63c1109..000000000
--- a/vendor/github.com/pelletier/go-toml/query/doc.go
+++ /dev/null
@@ -1,175 +0,0 @@
-// Package query performs JSONPath-like queries on a TOML document.
-//
-// The query path implementation is based loosely on the JSONPath specification:
-// http://goessner.net/articles/JsonPath/.
-//
-// The idea behind a query path is to allow quick access to any element, or set
-// of elements within TOML document, with a single expression.
-//
-// result, err := query.CompileAndExecute("$.foo.bar.baz", tree)
-//
-// This is roughly equivalent to:
-//
-// next := tree.Get("foo")
-// if next != nil {
-// next = next.Get("bar")
-// if next != nil {
-// next = next.Get("baz")
-// }
-// }
-// result := next
-//
-// err is nil if any parsing exception occurs.
-//
-// If no node in the tree matches the query, result will simply contain an empty list of
-// items.
-//
-// As illustrated above, the query path is much more efficient, especially since
-// the structure of the TOML file can vary. Rather than making assumptions about
-// a document's structure, a query allows the programmer to make structured
-// requests into the document, and get zero or more values as a result.
-//
-// Query syntax
-//
-// The syntax of a query begins with a root token, followed by any number
-// sub-expressions:
-//
-// $
-// Root of the TOML tree. This must always come first.
-// .name
-// Selects child of this node, where 'name' is a TOML key
-// name.
-// ['name']
-// Selects child of this node, where 'name' is a string
-// containing a TOML key name.
-// [index]
-// Selcts child array element at 'index'.
-// ..expr
-// Recursively selects all children, filtered by an a union,
-// index, or slice expression.
-// ..*
-// Recursive selection of all nodes at this point in the
-// tree.
-// .*
-// Selects all children of the current node.
-// [expr,expr]
-// Union operator - a logical 'or' grouping of two or more
-// sub-expressions: index, key name, or filter.
-// [start:end:step]
-// Slice operator - selects array elements from start to
-// end-1, at the given step. All three arguments are
-// optional.
-// [?(filter)]
-// Named filter expression - the function 'filter' is
-// used to filter children at this node.
-//
-// Query Indexes And Slices
-//
-// Index expressions perform no bounds checking, and will contribute no
-// values to the result set if the provided index or index range is invalid.
-// Negative indexes represent values from the end of the array, counting backwards.
-//
-// // select the last index of the array named 'foo'
-// query.CompileAndExecute("$.foo[-1]", tree)
-//
-// Slice expressions are supported, by using ':' to separate a start/end index pair.
-//
-// // select up to the first five elements in the array
-// query.CompileAndExecute("$.foo[0:5]", tree)
-//
-// Slice expressions also allow negative indexes for the start and stop
-// arguments.
-//
-// // select all array elements.
-// query.CompileAndExecute("$.foo[0:-1]", tree)
-//
-// Slice expressions may have an optional stride/step parameter:
-//
-// // select every other element
-// query.CompileAndExecute("$.foo[0:-1:2]", tree)
-//
-// Slice start and end parameters are also optional:
-//
-// // these are all equivalent and select all the values in the array
-// query.CompileAndExecute("$.foo[:]", tree)
-// query.CompileAndExecute("$.foo[0:]", tree)
-// query.CompileAndExecute("$.foo[:-1]", tree)
-// query.CompileAndExecute("$.foo[0:-1:]", tree)
-// query.CompileAndExecute("$.foo[::1]", tree)
-// query.CompileAndExecute("$.foo[0::1]", tree)
-// query.CompileAndExecute("$.foo[:-1:1]", tree)
-// query.CompileAndExecute("$.foo[0:-1:1]", tree)
-//
-// Query Filters
-//
-// Query filters are used within a Union [,] or single Filter [] expression.
-// A filter only allows nodes that qualify through to the next expression,
-// and/or into the result set.
-//
-// // returns children of foo that are permitted by the 'bar' filter.
-// query.CompileAndExecute("$.foo[?(bar)]", tree)
-//
-// There are several filters provided with the library:
-//
-// tree
-// Allows nodes of type Tree.
-// int
-// Allows nodes of type int64.
-// float
-// Allows nodes of type float64.
-// string
-// Allows nodes of type string.
-// time
-// Allows nodes of type time.Time.
-// bool
-// Allows nodes of type bool.
-//
-// Query Results
-//
-// An executed query returns a Result object. This contains the nodes
-// in the TOML tree that qualify the query expression. Position information
-// is also available for each value in the set.
-//
-// // display the results of a query
-// results := query.CompileAndExecute("$.foo.bar.baz", tree)
-// for idx, value := results.Values() {
-// fmt.Println("%v: %v", results.Positions()[idx], value)
-// }
-//
-// Compiled Queries
-//
-// Queries may be executed directly on a Tree object, or compiled ahead
-// of time and executed discretely. The former is more convenient, but has the
-// penalty of having to recompile the query expression each time.
-//
-// // basic query
-// results := query.CompileAndExecute("$.foo.bar.baz", tree)
-//
-// // compiled query
-// query, err := toml.Compile("$.foo.bar.baz")
-// results := query.Execute(tree)
-//
-// // run the compiled query again on a different tree
-// moreResults := query.Execute(anotherTree)
-//
-// User Defined Query Filters
-//
-// Filter expressions may also be user defined by using the SetFilter()
-// function on the Query object. The function must return true/false, which
-// signifies if the passed node is kept or discarded, respectively.
-//
-// // create a query that references a user-defined filter
-// query, _ := query.Compile("$[?(bazOnly)]")
-//
-// // define the filter, and assign it to the query
-// query.SetFilter("bazOnly", func(node interface{}) bool{
-// if tree, ok := node.(*Tree); ok {
-// return tree.Has("baz")
-// }
-// return false // reject all other node types
-// })
-//
-// // run the query
-// query.Execute(tree)
-//
-package query
diff --git a/vendor/github.com/pelletier/go-toml/query/lexer.go b/vendor/github.com/pelletier/go-toml/query/lexer.go
deleted file mode 100644
index 2dc319408..000000000
--- a/vendor/github.com/pelletier/go-toml/query/lexer.go
+++ /dev/null
@@ -1,357 +0,0 @@
-// TOML JSONPath lexer.
-//
-// Written using the principles developed by Rob Pike in
-// http://www.youtube.com/watch?v=HxaD_trXwRE
-
-package query
-
-import (
- "fmt"
- "github.com/pelletier/go-toml"
- "strconv"
- "strings"
- "unicode/utf8"
-)
-
-// Lexer state function
-type queryLexStateFn func() queryLexStateFn
-
-// Lexer definition
-type queryLexer struct {
- input string
- start int
- pos int
- width int
- tokens chan token
- depth int
- line int
- col int
- stringTerm string
-}
-
-func (l *queryLexer) run() {
- for state := l.lexVoid; state != nil; {
- state = state()
- }
- close(l.tokens)
-}
-
-func (l *queryLexer) nextStart() {
- // iterate by runes (utf8 characters)
- // search for newlines and advance line/col counts
- for i := l.start; i < l.pos; {
- r, width := utf8.DecodeRuneInString(l.input[i:])
- if r == '\n' {
- l.line++
- l.col = 1
- } else {
- l.col++
- }
- i += width
- }
- // advance start position to next token
- l.start = l.pos
-}
-
-func (l *queryLexer) emit(t tokenType) {
- l.tokens <- token{
- Position: toml.Position{Line: l.line, Col: l.col},
- typ: t,
- val: l.input[l.start:l.pos],
- }
- l.nextStart()
-}
-
-func (l *queryLexer) emitWithValue(t tokenType, value string) {
- l.tokens <- token{
- Position: toml.Position{Line: l.line, Col: l.col},
- typ: t,
- val: value,
- }
- l.nextStart()
-}
-
-func (l *queryLexer) next() rune {
- if l.pos >= len(l.input) {
- l.width = 0
- return eof
- }
- var r rune
- r, l.width = utf8.DecodeRuneInString(l.input[l.pos:])
- l.pos += l.width
- return r
-}
-
-func (l *queryLexer) ignore() {
- l.nextStart()
-}
-
-func (l *queryLexer) backup() {
- l.pos -= l.width
-}
-
-func (l *queryLexer) errorf(format string, args ...interface{}) queryLexStateFn {
- l.tokens <- token{
- Position: toml.Position{Line: l.line, Col: l.col},
- typ: tokenError,
- val: fmt.Sprintf(format, args...),
- }
- return nil
-}
-
-func (l *queryLexer) peek() rune {
- r := l.next()
- l.backup()
- return r
-}
-
-func (l *queryLexer) accept(valid string) bool {
- if strings.ContainsRune(valid, l.next()) {
- return true
- }
- l.backup()
- return false
-}
-
-func (l *queryLexer) follow(next string) bool {
- return strings.HasPrefix(l.input[l.pos:], next)
-}
-
-func (l *queryLexer) lexVoid() queryLexStateFn {
- for {
- next := l.peek()
- switch next {
- case '$':
- l.pos++
- l.emit(tokenDollar)
- continue
- case '.':
- if l.follow("..") {
- l.pos += 2
- l.emit(tokenDotDot)
- } else {
- l.pos++
- l.emit(tokenDot)
- }
- continue
- case '[':
- l.pos++
- l.emit(tokenLeftBracket)
- continue
- case ']':
- l.pos++
- l.emit(tokenRightBracket)
- continue
- case ',':
- l.pos++
- l.emit(tokenComma)
- continue
- case '*':
- l.pos++
- l.emit(tokenStar)
- continue
- case '(':
- l.pos++
- l.emit(tokenLeftParen)
- continue
- case ')':
- l.pos++
- l.emit(tokenRightParen)
- continue
- case '?':
- l.pos++
- l.emit(tokenQuestion)
- continue
- case ':':
- l.pos++
- l.emit(tokenColon)
- continue
- case '\'':
- l.ignore()
- l.stringTerm = string(next)
- return l.lexString
- case '"':
- l.ignore()
- l.stringTerm = string(next)
- return l.lexString
- }
-
- if isSpace(next) {
- l.next()
- l.ignore()
- continue
- }
-
- if isAlphanumeric(next) {
- return l.lexKey
- }
-
- if next == '+' || next == '-' || isDigit(next) {
- return l.lexNumber
- }
-
- if l.next() == eof {
- break
- }
-
- return l.errorf("unexpected char: '%v'", next)
- }
- l.emit(tokenEOF)
- return nil
-}
-
-func (l *queryLexer) lexKey() queryLexStateFn {
- for {
- next := l.peek()
- if !isAlphanumeric(next) {
- l.emit(tokenKey)
- return l.lexVoid
- }
-
- if l.next() == eof {
- break
- }
- }
- l.emit(tokenEOF)
- return nil
-}
-
-func (l *queryLexer) lexString() queryLexStateFn {
- l.pos++
- l.ignore()
- growingString := ""
-
- for {
- if l.follow(l.stringTerm) {
- l.emitWithValue(tokenString, growingString)
- l.pos++
- l.ignore()
- return l.lexVoid
- }
-
- if l.follow("\\\"") {
- l.pos++
- growingString += "\""
- } else if l.follow("\\'") {
- l.pos++
- growingString += "'"
- } else if l.follow("\\n") {
- l.pos++
- growingString += "\n"
- } else if l.follow("\\b") {
- l.pos++
- growingString += "\b"
- } else if l.follow("\\f") {
- l.pos++
- growingString += "\f"
- } else if l.follow("\\/") {
- l.pos++
- growingString += "/"
- } else if l.follow("\\t") {
- l.pos++
- growingString += "\t"
- } else if l.follow("\\r") {
- l.pos++
- growingString += "\r"
- } else if l.follow("\\\\") {
- l.pos++
- growingString += "\\"
- } else if l.follow("\\u") {
- l.pos += 2
- code := ""
- for i := 0; i < 4; i++ {
- c := l.peek()
- l.pos++
- if !isHexDigit(c) {
- return l.errorf("unfinished unicode escape")
- }
- code = code + string(c)
- }
- l.pos--
- intcode, err := strconv.ParseInt(code, 16, 32)
- if err != nil {
- return l.errorf("invalid unicode escape: \\u" + code)
- }
- growingString += string(rune(intcode))
- } else if l.follow("\\U") {
- l.pos += 2
- code := ""
- for i := 0; i < 8; i++ {
- c := l.peek()
- l.pos++
- if !isHexDigit(c) {
- return l.errorf("unfinished unicode escape")
- }
- code = code + string(c)
- }
- l.pos--
- intcode, err := strconv.ParseInt(code, 16, 32)
- if err != nil {
- return l.errorf("invalid unicode escape: \\u" + code)
- }
- growingString += string(rune(intcode))
- } else if l.follow("\\") {
- l.pos++
- return l.errorf("invalid escape sequence: \\" + string(l.peek()))
- } else {
- growingString += string(l.peek())
- }
-
- if l.next() == eof {
- break
- }
- }
-
- return l.errorf("unclosed string")
-}
-
-func (l *queryLexer) lexNumber() queryLexStateFn {
- l.ignore()
- if !l.accept("+") {
- l.accept("-")
- }
- pointSeen := false
- digitSeen := false
- for {
- next := l.next()
- if next == '.' {
- if pointSeen {
- return l.errorf("cannot have two dots in one float")
- }
- if !isDigit(l.peek()) {
- return l.errorf("float cannot end with a dot")
- }
- pointSeen = true
- } else if isDigit(next) {
- digitSeen = true
- } else {
- l.backup()
- break
- }
- if pointSeen && !digitSeen {
- return l.errorf("cannot start float with a dot")
- }
- }
-
- if !digitSeen {
- return l.errorf("no digit in that number")
- }
- if pointSeen {
- l.emit(tokenFloat)
- } else {
- l.emit(tokenInteger)
- }
- return l.lexVoid
-}
-
-// Entry point
-func lexQuery(input string) chan token {
- l := &queryLexer{
- input: input,
- tokens: make(chan token),
- line: 1,
- col: 1,
- }
- go l.run()
- return l.tokens
-}
diff --git a/vendor/github.com/pelletier/go-toml/query/lexer_test.go b/vendor/github.com/pelletier/go-toml/query/lexer_test.go
deleted file mode 100644
index 8ce0501fe..000000000
--- a/vendor/github.com/pelletier/go-toml/query/lexer_test.go
+++ /dev/null
@@ -1,179 +0,0 @@
-package query
-
-import (
- "github.com/pelletier/go-toml"
- "testing"
-)
-
-func testQLFlow(t *testing.T, input string, expectedFlow []token) {
- ch := lexQuery(input)
- for idx, expected := range expectedFlow {
- token := <-ch
- if token != expected {
- t.Log("While testing #", idx, ":", input)
- t.Log("compared (got)", token, "to (expected)", expected)
- t.Log("\tvalue:", token.val, "<->", expected.val)
- t.Log("\tvalue as bytes:", []byte(token.val), "<->", []byte(expected.val))
- t.Log("\ttype:", token.typ.String(), "<->", expected.typ.String())
- t.Log("\tline:", token.Line, "<->", expected.Line)
- t.Log("\tcolumn:", token.Col, "<->", expected.Col)
- t.Log("compared", token, "to", expected)
- t.FailNow()
- }
- }
-
- tok, ok := <-ch
- if ok {
- t.Log("channel is not closed!")
- t.Log(len(ch)+1, "tokens remaining:")
-
- t.Log("token ->", tok)
- for token := range ch {
- t.Log("token ->", token)
- }
- t.FailNow()
- }
-}
-
-func TestLexSpecialChars(t *testing.T) {
- testQLFlow(t, " .$[]..()?*", []token{
- {toml.Position{1, 2}, tokenDot, "."},
- {toml.Position{1, 3}, tokenDollar, "$"},
- {toml.Position{1, 4}, tokenLeftBracket, "["},
- {toml.Position{1, 5}, tokenRightBracket, "]"},
- {toml.Position{1, 6}, tokenDotDot, ".."},
- {toml.Position{1, 8}, tokenLeftParen, "("},
- {toml.Position{1, 9}, tokenRightParen, ")"},
- {toml.Position{1, 10}, tokenQuestion, "?"},
- {toml.Position{1, 11}, tokenStar, "*"},
- {toml.Position{1, 12}, tokenEOF, ""},
- })
-}
-
-func TestLexString(t *testing.T) {
- testQLFlow(t, "'foo\n'", []token{
- {toml.Position{1, 2}, tokenString, "foo\n"},
- {toml.Position{2, 2}, tokenEOF, ""},
- })
-}
-
-func TestLexDoubleString(t *testing.T) {
- testQLFlow(t, `"bar"`, []token{
- {toml.Position{1, 2}, tokenString, "bar"},
- {toml.Position{1, 6}, tokenEOF, ""},
- })
-}
-
-func TestLexStringEscapes(t *testing.T) {
- testQLFlow(t, `"foo \" \' \b \f \/ \t \r \\ \u03A9 \U00012345 \n bar"`, []token{
- {toml.Position{1, 2}, tokenString, "foo \" ' \b \f / \t \r \\ \u03A9 \U00012345 \n bar"},
- {toml.Position{1, 55}, tokenEOF, ""},
- })
-}
-
-func TestLexStringUnfinishedUnicode4(t *testing.T) {
- testQLFlow(t, `"\u000"`, []token{
- {toml.Position{1, 2}, tokenError, "unfinished unicode escape"},
- })
-}
-
-func TestLexStringUnfinishedUnicode8(t *testing.T) {
- testQLFlow(t, `"\U0000"`, []token{
- {toml.Position{1, 2}, tokenError, "unfinished unicode escape"},
- })
-}
-
-func TestLexStringInvalidEscape(t *testing.T) {
- testQLFlow(t, `"\x"`, []token{
- {toml.Position{1, 2}, tokenError, "invalid escape sequence: \\x"},
- })
-}
-
-func TestLexStringUnfinished(t *testing.T) {
- testQLFlow(t, `"bar`, []token{
- {toml.Position{1, 2}, tokenError, "unclosed string"},
- })
-}
-
-func TestLexKey(t *testing.T) {
- testQLFlow(t, "foo", []token{
- {toml.Position{1, 1}, tokenKey, "foo"},
- {toml.Position{1, 4}, tokenEOF, ""},
- })
-}
-
-func TestLexRecurse(t *testing.T) {
- testQLFlow(t, "$..*", []token{
- {toml.Position{1, 1}, tokenDollar, "$"},
- {toml.Position{1, 2}, tokenDotDot, ".."},
- {toml.Position{1, 4}, tokenStar, "*"},
- {toml.Position{1, 5}, tokenEOF, ""},
- })
-}
-
-func TestLexBracketKey(t *testing.T) {
- testQLFlow(t, "$[foo]", []token{
- {toml.Position{1, 1}, tokenDollar, "$"},
- {toml.Position{1, 2}, tokenLeftBracket, "["},
- {toml.Position{1, 3}, tokenKey, "foo"},
- {toml.Position{1, 6}, tokenRightBracket, "]"},
- {toml.Position{1, 7}, tokenEOF, ""},
- })
-}
-
-func TestLexSpace(t *testing.T) {
- testQLFlow(t, "foo bar baz", []token{
- {toml.Position{1, 1}, tokenKey, "foo"},
- {toml.Position{1, 5}, tokenKey, "bar"},
- {toml.Position{1, 9}, tokenKey, "baz"},
- {toml.Position{1, 12}, tokenEOF, ""},
- })
-}
-
-func TestLexInteger(t *testing.T) {
- testQLFlow(t, "100 +200 -300", []token{
- {toml.Position{1, 1}, tokenInteger, "100"},
- {toml.Position{1, 5}, tokenInteger, "+200"},
- {toml.Position{1, 10}, tokenInteger, "-300"},
- {toml.Position{1, 14}, tokenEOF, ""},
- })
-}
-
-func TestLexFloat(t *testing.T) {
- testQLFlow(t, "100.0 +200.0 -300.0", []token{
- {toml.Position{1, 1}, tokenFloat, "100.0"},
- {toml.Position{1, 7}, tokenFloat, "+200.0"},
- {toml.Position{1, 14}, tokenFloat, "-300.0"},
- {toml.Position{1, 20}, tokenEOF, ""},
- })
-}
-
-func TestLexFloatWithMultipleDots(t *testing.T) {
- testQLFlow(t, "4.2.", []token{
- {toml.Position{1, 1}, tokenError, "cannot have two dots in one float"},
- })
-}
-
-func TestLexFloatLeadingDot(t *testing.T) {
- testQLFlow(t, "+.1", []token{
- {toml.Position{1, 1}, tokenError, "cannot start float with a dot"},
- })
-}
-
-func TestLexFloatWithTrailingDot(t *testing.T) {
- testQLFlow(t, "42.", []token{
- {toml.Position{1, 1}, tokenError, "float cannot end with a dot"},
- })
-}
-
-func TestLexNumberWithoutDigit(t *testing.T) {
- testQLFlow(t, "+", []token{
- {toml.Position{1, 1}, tokenError, "no digit in that number"},
- })
-}
-
-func TestLexUnknown(t *testing.T) {
- testQLFlow(t, "^", []token{
- {toml.Position{1, 1}, tokenError, "unexpected char: '94'"},
- })
-}
diff --git a/vendor/github.com/pelletier/go-toml/query/match.go b/vendor/github.com/pelletier/go-toml/query/match.go
deleted file mode 100644
index d7bb15a45..000000000
--- a/vendor/github.com/pelletier/go-toml/query/match.go
+++ /dev/null
@@ -1,232 +0,0 @@
-package query
-
-import (
- "fmt"
- "github.com/pelletier/go-toml"
-)
-
-// base match
-type matchBase struct {
- next pathFn
-}
-
-func (f *matchBase) setNext(next pathFn) {
- f.next = next
-}
-
-// terminating functor - gathers results
-type terminatingFn struct {
- // empty
-}
-
-func newTerminatingFn() *terminatingFn {
- return &terminatingFn{}
-}
-
-func (f *terminatingFn) setNext(next pathFn) {
- // do nothing
-}
-
-func (f *terminatingFn) call(node interface{}, ctx *queryContext) {
- ctx.result.appendResult(node, ctx.lastPosition)
-}
-
-// match single key
-type matchKeyFn struct {
- matchBase
- Name string
-}
-
-func newMatchKeyFn(name string) *matchKeyFn {
- return &matchKeyFn{Name: name}
-}
-
-func (f *matchKeyFn) call(node interface{}, ctx *queryContext) {
- if array, ok := node.([]*toml.Tree); ok {
- for _, tree := range array {
- item := tree.Get(f.Name)
- if item != nil {
- ctx.lastPosition = tree.GetPosition(f.Name)
- f.next.call(item, ctx)
- }
- }
- } else if tree, ok := node.(*toml.Tree); ok {
- item := tree.Get(f.Name)
- if item != nil {
- ctx.lastPosition = tree.GetPosition(f.Name)
- f.next.call(item, ctx)
- }
- }
-}
-
-// match single index
-type matchIndexFn struct {
- matchBase
- Idx int
-}
-
-func newMatchIndexFn(idx int) *matchIndexFn {
- return &matchIndexFn{Idx: idx}
-}
-
-func (f *matchIndexFn) call(node interface{}, ctx *queryContext) {
- if arr, ok := node.([]interface{}); ok {
- if f.Idx < len(arr) && f.Idx >= 0 {
- if treesArray, ok := node.([]*toml.Tree); ok {
- if len(treesArray) > 0 {
- ctx.lastPosition = treesArray[0].Position()
- }
- }
- f.next.call(arr[f.Idx], ctx)
- }
- }
-}
-
-// filter by slicing
-type matchSliceFn struct {
- matchBase
- Start, End, Step int
-}
-
-func newMatchSliceFn(start, end, step int) *matchSliceFn {
- return &matchSliceFn{Start: start, End: end, Step: step}
-}
-
-func (f *matchSliceFn) call(node interface{}, ctx *queryContext) {
- if arr, ok := node.([]interface{}); ok {
- // adjust indexes for negative values, reverse ordering
- realStart, realEnd := f.Start, f.End
- if realStart < 0 {
- realStart = len(arr) + realStart
- }
- if realEnd < 0 {
- realEnd = len(arr) + realEnd
- }
- if realEnd < realStart {
- realEnd, realStart = realStart, realEnd // swap
- }
- // loop and gather
- for idx := realStart; idx < realEnd; idx += f.Step {
- if treesArray, ok := node.([]*toml.Tree); ok {
- if len(treesArray) > 0 {
- ctx.lastPosition = treesArray[0].Position()
- }
- }
- f.next.call(arr[idx], ctx)
- }
- }
-}
-
-// match anything
-type matchAnyFn struct {
- matchBase
-}
-
-func newMatchAnyFn() *matchAnyFn {
- return &matchAnyFn{}
-}
-
-func (f *matchAnyFn) call(node interface{}, ctx *queryContext) {
- if tree, ok := node.(*toml.Tree); ok {
- for _, k := range tree.Keys() {
- v := tree.Get(k)
- ctx.lastPosition = tree.GetPosition(k)
- f.next.call(v, ctx)
- }
- }
-}
-
-// filter through union
-type matchUnionFn struct {
- Union []pathFn
-}
-
-func (f *matchUnionFn) setNext(next pathFn) {
- for _, fn := range f.Union {
- fn.setNext(next)
- }
-}
-
-func (f *matchUnionFn) call(node interface{}, ctx *queryContext) {
- for _, fn := range f.Union {
- fn.call(node, ctx)
- }
-}
-
-// match every single last node in the tree
-type matchRecursiveFn struct {
- matchBase
-}
-
-func newMatchRecursiveFn() *matchRecursiveFn {
- return &matchRecursiveFn{}
-}
-
-func (f *matchRecursiveFn) call(node interface{}, ctx *queryContext) {
- originalPosition := ctx.lastPosition
- if tree, ok := node.(*toml.Tree); ok {
- var visit func(tree *toml.Tree)
- visit = func(tree *toml.Tree) {
- for _, k := range tree.Keys() {
- v := tree.Get(k)
- ctx.lastPosition = tree.GetPosition(k)
- f.next.call(v, ctx)
- switch node := v.(type) {
- case *toml.Tree:
- visit(node)
- case []*toml.Tree:
- for _, subtree := range node {
- visit(subtree)
- }
- }
- }
- }
- ctx.lastPosition = originalPosition
- f.next.call(tree, ctx)
- visit(tree)
- }
-}
-
-// match based on an externally provided functional filter
-type matchFilterFn struct {
- matchBase
- Pos toml.Position
- Name string
-}
-
-func newMatchFilterFn(name string, pos toml.Position) *matchFilterFn {
- return &matchFilterFn{Name: name, Pos: pos}
-}
-
-func (f *matchFilterFn) call(node interface{}, ctx *queryContext) {
- fn, ok := (*ctx.filters)[f.Name]
- if !ok {
- panic(fmt.Sprintf("%s: query context does not have filter '%s'",
- f.Pos.String(), f.Name))
- }
- switch castNode := node.(type) {
- case *toml.Tree:
- for _, k := range castNode.Keys() {
- v := castNode.Get(k)
- if fn(v) {
- ctx.lastPosition = castNode.GetPosition(k)
- f.next.call(v, ctx)
- }
- }
- case []*toml.Tree:
- for _, v := range castNode {
- if fn(v) {
- if len(castNode) > 0 {
- ctx.lastPosition = castNode[0].Position()
- }
- f.next.call(v, ctx)
- }
- }
- case []interface{}:
- for _, v := range castNode {
- if fn(v) {
- f.next.call(v, ctx)
- }
- }
- }
-}
diff --git a/vendor/github.com/pelletier/go-toml/query/match_test.go b/vendor/github.com/pelletier/go-toml/query/match_test.go
deleted file mode 100644
index 429b8f6b9..000000000
--- a/vendor/github.com/pelletier/go-toml/query/match_test.go
+++ /dev/null
@@ -1,202 +0,0 @@
-package query
-
-import (
- "fmt"
- "github.com/pelletier/go-toml"
- "testing"
-)
-
-// dump path tree to a string
-func pathString(root pathFn) string {
- result := fmt.Sprintf("%T:", root)
- switch fn := root.(type) {
- case *terminatingFn:
- result += "{}"
- case *matchKeyFn:
- result += fmt.Sprintf("{%s}", fn.Name)
- result += pathString(fn.next)
- case *matchIndexFn:
- result += fmt.Sprintf("{%d}", fn.Idx)
- result += pathString(fn.next)
- case *matchSliceFn:
- result += fmt.Sprintf("{%d:%d:%d}",
- fn.Start, fn.End, fn.Step)
- result += pathString(fn.next)
- case *matchAnyFn:
- result += "{}"
- result += pathString(fn.next)
- case *matchUnionFn:
- result += "{["
- for _, v := range fn.Union {
- result += pathString(v) + ", "
- }
- result += "]}"
- case *matchRecursiveFn:
- result += "{}"
- result += pathString(fn.next)
- case *matchFilterFn:
- result += fmt.Sprintf("{%s}", fn.Name)
- result += pathString(fn.next)
- }
- return result
-}
-
-func assertPathMatch(t *testing.T, path, ref *Query) bool {
- pathStr := pathString(path.root)
- refStr := pathString(ref.root)
- if pathStr != refStr {
- t.Errorf("paths do not match")
- t.Log("test:", pathStr)
- t.Log("ref: ", refStr)
- return false
- }
- return true
-}
-
-func assertPath(t *testing.T, query string, ref *Query) {
- path, _ := parseQuery(lexQuery(query))
- assertPathMatch(t, path, ref)
-}
-
-func buildPath(parts ...pathFn) *Query {
- query := newQuery()
- for _, v := range parts {
- query.appendPath(v)
- }
- return query
-}
-
-func TestPathRoot(t *testing.T) {
- assertPath(t,
- "$",
- buildPath(
- // empty
- ))
-}
-
-func TestPathKey(t *testing.T) {
- assertPath(t,
- "$.foo",
- buildPath(
- newMatchKeyFn("foo"),
- ))
-}
-
-func TestPathBracketKey(t *testing.T) {
- assertPath(t,
- "$[foo]",
- buildPath(
- newMatchKeyFn("foo"),
- ))
-}
-
-func TestPathBracketStringKey(t *testing.T) {
- assertPath(t,
- "$['foo']",
- buildPath(
- newMatchKeyFn("foo"),
- ))
-}
-
-func TestPathIndex(t *testing.T) {
- assertPath(t,
- "$[123]",
- buildPath(
- newMatchIndexFn(123),
- ))
-}
-
-func TestPathSliceStart(t *testing.T) {
- assertPath(t,
- "$[123:]",
- buildPath(
- newMatchSliceFn(123, maxInt, 1),
- ))
-}
-
-func TestPathSliceStartEnd(t *testing.T) {
- assertPath(t,
- "$[123:456]",
- buildPath(
- newMatchSliceFn(123, 456, 1),
- ))
-}
-
-func TestPathSliceStartEndColon(t *testing.T) {
- assertPath(t,
- "$[123:456:]",
- buildPath(
- newMatchSliceFn(123, 456, 1),
- ))
-}
-
-func TestPathSliceStartStep(t *testing.T) {
- assertPath(t,
- "$[123::7]",
- buildPath(
- newMatchSliceFn(123, maxInt, 7),
- ))
-}
-
-func TestPathSliceEndStep(t *testing.T) {
- assertPath(t,
- "$[:456:7]",
- buildPath(
- newMatchSliceFn(0, 456, 7),
- ))
-}
-
-func TestPathSliceStep(t *testing.T) {
- assertPath(t,
- "$[::7]",
- buildPath(
- newMatchSliceFn(0, maxInt, 7),
- ))
-}
-
-func TestPathSliceAll(t *testing.T) {
- assertPath(t,
- "$[123:456:7]",
- buildPath(
- newMatchSliceFn(123, 456, 7),
- ))
-}
-
-func TestPathAny(t *testing.T) {
- assertPath(t,
- "$.*",
- buildPath(
- newMatchAnyFn(),
- ))
-}
-
-func TestPathUnion(t *testing.T) {
- assertPath(t,
- "$[foo, bar, baz]",
- buildPath(
- &matchUnionFn{[]pathFn{
- newMatchKeyFn("foo"),
- newMatchKeyFn("bar"),
- newMatchKeyFn("baz"),
- }},
- ))
-}
-
-func TestPathRecurse(t *testing.T) {
- assertPath(t,
- "$..*",
- buildPath(
- newMatchRecursiveFn(),
- ))
-}
-
-func TestPathFilterExpr(t *testing.T) {
- assertPath(t,
- "$[?('foo'),?(bar)]",
- buildPath(
- &matchUnionFn{[]pathFn{
- newMatchFilterFn("foo", toml.Position{}),
- newMatchFilterFn("bar", toml.Position{}),
- }},
- ))
-}
diff --git a/vendor/github.com/pelletier/go-toml/query/parser.go b/vendor/github.com/pelletier/go-toml/query/parser.go
deleted file mode 100644
index 5f69b70d4..000000000
--- a/vendor/github.com/pelletier/go-toml/query/parser.go
+++ /dev/null
@@ -1,275 +0,0 @@
-/*
- Based on the "jsonpath" spec/concept.
-
- http://goessner.net/articles/JsonPath/
- https://code.google.com/p/json-path/
-*/
-
-package query
-
-import (
- "fmt"
-)
-
-const maxInt = int(^uint(0) >> 1)
-
-type queryParser struct {
- flow chan token
- tokensBuffer []token
- query *Query
- union []pathFn
- err error
-}
-
-type queryParserStateFn func() queryParserStateFn
-
-// Formats and panics an error message based on a token
-func (p *queryParser) parseError(tok *token, msg string, args ...interface{}) queryParserStateFn {
- p.err = fmt.Errorf(tok.Position.String()+": "+msg, args...)
- return nil // trigger parse to end
-}
-
-func (p *queryParser) run() {
- for state := p.parseStart; state != nil; {
- state = state()
- }
-}
-
-func (p *queryParser) backup(tok *token) {
- p.tokensBuffer = append(p.tokensBuffer, *tok)
-}
-
-func (p *queryParser) peek() *token {
- if len(p.tokensBuffer) != 0 {
- return &(p.tokensBuffer[0])
- }
-
- tok, ok := <-p.flow
- if !ok {
- return nil
- }
- p.backup(&tok)
- return &tok
-}
-
-func (p *queryParser) lookahead(types ...tokenType) bool {
- result := true
- buffer := []token{}
-
- for _, typ := range types {
- tok := p.getToken()
- if tok == nil {
- result = false
- break
- }
- buffer = append(buffer, *tok)
- if tok.typ != typ {
- result = false
- break
- }
- }
- // add the tokens back to the buffer, and return
- p.tokensBuffer = append(p.tokensBuffer, buffer...)
- return result
-}
-
-func (p *queryParser) getToken() *token {
- if len(p.tokensBuffer) != 0 {
- tok := p.tokensBuffer[0]
- p.tokensBuffer = p.tokensBuffer[1:]
- return &tok
- }
- tok, ok := <-p.flow
- if !ok {
- return nil
- }
- return &tok
-}
-
-func (p *queryParser) parseStart() queryParserStateFn {
- tok := p.getToken()
-
- if tok == nil || tok.typ == tokenEOF {
- return nil
- }
-
- if tok.typ != tokenDollar {
- return p.parseError(tok, "Expected '$' at start of expression")
- }
-
- return p.parseMatchExpr
-}
-
-// handle '.' prefix, '[]', and '..'
-func (p *queryParser) parseMatchExpr() queryParserStateFn {
- tok := p.getToken()
- switch tok.typ {
- case tokenDotDot:
- p.query.appendPath(&matchRecursiveFn{})
- // nested parse for '..'
- tok := p.getToken()
- switch tok.typ {
- case tokenKey:
- p.query.appendPath(newMatchKeyFn(tok.val))
- return p.parseMatchExpr
- case tokenLeftBracket:
- return p.parseBracketExpr
- case tokenStar:
- // do nothing - the recursive predicate is enough
- return p.parseMatchExpr
- }
-
- case tokenDot:
- // nested parse for '.'
- tok := p.getToken()
- switch tok.typ {
- case tokenKey:
- p.query.appendPath(newMatchKeyFn(tok.val))
- return p.parseMatchExpr
- case tokenStar:
- p.query.appendPath(&matchAnyFn{})
- return p.parseMatchExpr
- }
-
- case tokenLeftBracket:
- return p.parseBracketExpr
-
- case tokenEOF:
- return nil // allow EOF at this stage
- }
- return p.parseError(tok, "expected match expression")
-}
-
-func (p *queryParser) parseBracketExpr() queryParserStateFn {
- if p.lookahead(tokenInteger, tokenColon) {
- return p.parseSliceExpr
- }
- if p.peek().typ == tokenColon {
- return p.parseSliceExpr
- }
- return p.parseUnionExpr
-}
-
-func (p *queryParser) parseUnionExpr() queryParserStateFn {
- var tok *token
-
- // this state can be traversed after some sub-expressions
- // so be careful when setting up state in the parser
- if p.union == nil {
- p.union = []pathFn{}
- }
-
-loop: // labeled loop for easy breaking
- for {
- if len(p.union) > 0 {
- // parse delimiter or terminator
- tok = p.getToken()
- switch tok.typ {
- case tokenComma:
- // do nothing
- case tokenRightBracket:
- break loop
- default:
- return p.parseError(tok, "expected ',' or ']', not '%s'", tok.val)
- }
- }
-
- // parse sub expression
- tok = p.getToken()
- switch tok.typ {
- case tokenInteger:
- p.union = append(p.union, newMatchIndexFn(tok.Int()))
- case tokenKey:
- p.union = append(p.union, newMatchKeyFn(tok.val))
- case tokenString:
- p.union = append(p.union, newMatchKeyFn(tok.val))
- case tokenQuestion:
- return p.parseFilterExpr
- default:
- return p.parseError(tok, "expected union sub expression, not '%s', %d", tok.val, len(p.union))
- }
- }
-
- // if there is only one sub-expression, use that instead
- if len(p.union) == 1 {
- p.query.appendPath(p.union[0])
- } else {
- p.query.appendPath(&matchUnionFn{p.union})
- }
-
- p.union = nil // clear out state
- return p.parseMatchExpr
-}
-
-func (p *queryParser) parseSliceExpr() queryParserStateFn {
- // init slice to grab all elements
- start, end, step := 0, maxInt, 1
-
- // parse optional start
- tok := p.getToken()
- if tok.typ == tokenInteger {
- start = tok.Int()
- tok = p.getToken()
- }
- if tok.typ != tokenColon {
- return p.parseError(tok, "expected ':'")
- }
-
- // parse optional end
- tok = p.getToken()
- if tok.typ == tokenInteger {
- end = tok.Int()
- tok = p.getToken()
- }
- if tok.typ == tokenRightBracket {
- p.query.appendPath(newMatchSliceFn(start, end, step))
- return p.parseMatchExpr
- }
- if tok.typ != tokenColon {
- return p.parseError(tok, "expected ']' or ':'")
- }
-
- // parse optional step
- tok = p.getToken()
- if tok.typ == tokenInteger {
- step = tok.Int()
- if step < 0 {
- return p.parseError(tok, "step must be a positive value")
- }
- tok = p.getToken()
- }
- if tok.typ != tokenRightBracket {
- return p.parseError(tok, "expected ']'")
- }
-
- p.query.appendPath(newMatchSliceFn(start, end, step))
- return p.parseMatchExpr
-}
-
-func (p *queryParser) parseFilterExpr() queryParserStateFn {
- tok := p.getToken()
- if tok.typ != tokenLeftParen {
- return p.parseError(tok, "expected left-parenthesis for filter expression")
- }
- tok = p.getToken()
- if tok.typ != tokenKey && tok.typ != tokenString {
- return p.parseError(tok, "expected key or string for filter function name")
- }
- name := tok.val
- tok = p.getToken()
- if tok.typ != tokenRightParen {
- return p.parseError(tok, "expected right-parenthesis for filter expression")
- }
- p.union = append(p.union, newMatchFilterFn(name, tok.Position))
- return p.parseUnionExpr
-}
-
-func parseQuery(flow chan token) (*Query, error) {
- parser := &queryParser{
- flow: flow,
- tokensBuffer: []token{},
- query: newQuery(),
- }
- parser.run()
- return parser.query, parser.err
-}
diff --git a/vendor/github.com/pelletier/go-toml/query/parser_test.go b/vendor/github.com/pelletier/go-toml/query/parser_test.go
deleted file mode 100644
index 473896a02..000000000
--- a/vendor/github.com/pelletier/go-toml/query/parser_test.go
+++ /dev/null
@@ -1,482 +0,0 @@
-package query
-
-import (
- "fmt"
- "github.com/pelletier/go-toml"
- "io/ioutil"
- "sort"
- "strings"
- "testing"
- "time"
-)
-
-type queryTestNode struct {
- value interface{}
- position toml.Position
-}
-
-func valueString(root interface{}) string {
- result := "" //fmt.Sprintf("%T:", root)
- switch node := root.(type) {
- case *Result:
- items := []string{}
- for i, v := range node.Values() {
- items = append(items, fmt.Sprintf("%s:%s",
- node.Positions()[i].String(), valueString(v)))
- }
- sort.Strings(items)
- result = "[" + strings.Join(items, ", ") + "]"
- case queryTestNode:
- result = fmt.Sprintf("%s:%s",
- node.position.String(), valueString(node.value))
- case []interface{}:
- items := []string{}
- for _, v := range node {
- items = append(items, valueString(v))
- }
- sort.Strings(items)
- result = "[" + strings.Join(items, ", ") + "]"
- case *toml.Tree:
- // workaround for unreliable map key ordering
- items := []string{}
- for _, k := range node.Keys() {
- v := node.GetPath([]string{k})
- items = append(items, k+":"+valueString(v))
- }
- sort.Strings(items)
- result = "{" + strings.Join(items, ", ") + "}"
- case map[string]interface{}:
- // workaround for unreliable map key ordering
- items := []string{}
- for k, v := range node {
- items = append(items, k+":"+valueString(v))
- }
- sort.Strings(items)
- result = "{" + strings.Join(items, ", ") + "}"
- case int64:
- result += fmt.Sprintf("%d", node)
- case string:
- result += "'" + node + "'"
- case float64:
- result += fmt.Sprintf("%f", node)
- case bool:
- result += fmt.Sprintf("%t", node)
- case time.Time:
- result += fmt.Sprintf("'%v'", node)
- }
- return result
-}
-
-func assertValue(t *testing.T, result, ref interface{}) {
- pathStr := valueString(result)
- refStr := valueString(ref)
- if pathStr != refStr {
- t.Errorf("values do not match")
- t.Log("test:", pathStr)
- t.Log("ref: ", refStr)
- }
-}
-
-func assertQueryPositions(t *testing.T, tomlDoc string, query string, ref []interface{}) {
- tree, err := toml.Load(tomlDoc)
- if err != nil {
- t.Errorf("Non-nil toml parse error: %v", err)
- return
- }
- q, err := Compile(query)
- if err != nil {
- t.Error(err)
- return
- }
- results := q.Execute(tree)
- assertValue(t, results, ref)
-}
-
-func TestQueryRoot(t *testing.T) {
- assertQueryPositions(t,
- "a = 42",
- "$",
- []interface{}{
- queryTestNode{
- map[string]interface{}{
- "a": int64(42),
- }, toml.Position{1, 1},
- },
- })
-}
-
-func TestQueryKey(t *testing.T) {
- assertQueryPositions(t,
- "[foo]\na = 42",
- "$.foo.a",
- []interface{}{
- queryTestNode{
- int64(42), toml.Position{2, 1},
- },
- })
-}
-
-func TestQueryKeyString(t *testing.T) {
- assertQueryPositions(t,
- "[foo]\na = 42",
- "$.foo['a']",
- []interface{}{
- queryTestNode{
- int64(42), toml.Position{2, 1},
- },
- })
-}
-
-func TestQueryIndex(t *testing.T) {
- assertQueryPositions(t,
- "[foo]\na = [1,2,3,4,5,6,7,8,9,0]",
- "$.foo.a[5]",
- []interface{}{
- queryTestNode{
- int64(6), toml.Position{2, 1},
- },
- })
-}
-
-func TestQuerySliceRange(t *testing.T) {
- assertQueryPositions(t,
- "[foo]\na = [1,2,3,4,5,6,7,8,9,0]",
- "$.foo.a[0:5]",
- []interface{}{
- queryTestNode{
- int64(1), toml.Position{2, 1},
- },
- queryTestNode{
- int64(2), toml.Position{2, 1},
- },
- queryTestNode{
- int64(3), toml.Position{2, 1},
- },
- queryTestNode{
- int64(4), toml.Position{2, 1},
- },
- queryTestNode{
- int64(5), toml.Position{2, 1},
- },
- })
-}
-
-func TestQuerySliceStep(t *testing.T) {
- assertQueryPositions(t,
- "[foo]\na = [1,2,3,4,5,6,7,8,9,0]",
- "$.foo.a[0:5:2]",
- []interface{}{
- queryTestNode{
- int64(1), toml.Position{2, 1},
- },
- queryTestNode{
- int64(3), toml.Position{2, 1},
- },
- queryTestNode{
- int64(5), toml.Position{2, 1},
- },
- })
-}
-
-func TestQueryAny(t *testing.T) {
- assertQueryPositions(t,
- "[foo.bar]\na=1\nb=2\n[foo.baz]\na=3\nb=4",
- "$.foo.*",
- []interface{}{
- queryTestNode{
- map[string]interface{}{
- "a": int64(1),
- "b": int64(2),
- }, toml.Position{1, 1},
- },
- queryTestNode{
- map[string]interface{}{
- "a": int64(3),
- "b": int64(4),
- }, toml.Position{4, 1},
- },
- })
-}
-func TestQueryUnionSimple(t *testing.T) {
- assertQueryPositions(t,
- "[foo.bar]\na=1\nb=2\n[baz.foo]\na=3\nb=4\n[gorf.foo]\na=5\nb=6",
- "$.*[bar,foo]",
- []interface{}{
- queryTestNode{
- map[string]interface{}{
- "a": int64(1),
- "b": int64(2),
- }, toml.Position{1, 1},
- },
- queryTestNode{
- map[string]interface{}{
- "a": int64(3),
- "b": int64(4),
- }, toml.Position{4, 1},
- },
- queryTestNode{
- map[string]interface{}{
- "a": int64(5),
- "b": int64(6),
- }, toml.Position{7, 1},
- },
- })
-}
-
-func TestQueryRecursionAll(t *testing.T) {
- assertQueryPositions(t,
- "[foo.bar]\na=1\nb=2\n[baz.foo]\na=3\nb=4\n[gorf.foo]\na=5\nb=6",
- "$..*",
- []interface{}{
- queryTestNode{
- map[string]interface{}{
- "foo": map[string]interface{}{
- "bar": map[string]interface{}{
- "a": int64(1),
- "b": int64(2),
- },
- },
- "baz": map[string]interface{}{
- "foo": map[string]interface{}{
- "a": int64(3),
- "b": int64(4),
- },
- },
- "gorf": map[string]interface{}{
- "foo": map[string]interface{}{
- "a": int64(5),
- "b": int64(6),
- },
- },
- }, toml.Position{1, 1},
- },
- queryTestNode{
- map[string]interface{}{
- "bar": map[string]interface{}{
- "a": int64(1),
- "b": int64(2),
- },
- }, toml.Position{1, 1},
- },
- queryTestNode{
- map[string]interface{}{
- "a": int64(1),
- "b": int64(2),
- }, toml.Position{1, 1},
- },
- queryTestNode{
- int64(1), toml.Position{2, 1},
- },
- queryTestNode{
- int64(2), toml.Position{3, 1},
- },
- queryTestNode{
- map[string]interface{}{
- "foo": map[string]interface{}{
- "a": int64(3),
- "b": int64(4),
- },
- }, toml.Position{4, 1},
- },
- queryTestNode{
- map[string]interface{}{
- "a": int64(3),
- "b": int64(4),
- }, toml.Position{4, 1},
- },
- queryTestNode{
- int64(3), toml.Position{5, 1},
- },
- queryTestNode{
- int64(4), toml.Position{6, 1},
- },
- queryTestNode{
- map[string]interface{}{
- "foo": map[string]interface{}{
- "a": int64(5),
- "b": int64(6),
- },
- }, toml.Position{7, 1},
- },
- queryTestNode{
- map[string]interface{}{
- "a": int64(5),
- "b": int64(6),
- }, toml.Position{7, 1},
- },
- queryTestNode{
- int64(5), toml.Position{8, 1},
- },
- queryTestNode{
- int64(6), toml.Position{9, 1},
- },
- })
-}
-
-func TestQueryRecursionUnionSimple(t *testing.T) {
- assertQueryPositions(t,
- "[foo.bar]\na=1\nb=2\n[baz.foo]\na=3\nb=4\n[gorf.foo]\na=5\nb=6",
- "$..['foo','bar']",
- []interface{}{
- queryTestNode{
- map[string]interface{}{
- "bar": map[string]interface{}{
- "a": int64(1),
- "b": int64(2),
- },
- }, toml.Position{1, 1},
- },
- queryTestNode{
- map[string]interface{}{
- "a": int64(3),
- "b": int64(4),
- }, toml.Position{4, 1},
- },
- queryTestNode{
- map[string]interface{}{
- "a": int64(1),
- "b": int64(2),
- }, toml.Position{1, 1},
- },
- queryTestNode{
- map[string]interface{}{
- "a": int64(5),
- "b": int64(6),
- }, toml.Position{7, 1},
- },
- })
-}
-
-func TestQueryFilterFn(t *testing.T) {
- buff, err := ioutil.ReadFile("../example.toml")
- if err != nil {
- t.Error(err)
- return
- }
-
- assertQueryPositions(t, string(buff),
- "$..[?(int)]",
- []interface{}{
- queryTestNode{
- int64(8001), toml.Position{13, 1},
- },
- queryTestNode{
- int64(8001), toml.Position{13, 1},
- },
- queryTestNode{
- int64(8002), toml.Position{13, 1},
- },
- queryTestNode{
- int64(5000), toml.Position{14, 1},
- },
- })
-
- assertQueryPositions(t, string(buff),
- "$..[?(string)]",
- []interface{}{
- queryTestNode{
- "TOML Example", toml.Position{3, 1},
- },
- queryTestNode{
- "Tom Preston-Werner", toml.Position{6, 1},
- },
- queryTestNode{
- "GitHub", toml.Position{7, 1},
- },
- queryTestNode{
- "GitHub Cofounder & CEO\nLikes tater tots and beer.",
- toml.Position{8, 1},
- },
- queryTestNode{
- "192.168.1.1", toml.Position{12, 1},
- },
- queryTestNode{
- "10.0.0.1", toml.Position{21, 3},
- },
- queryTestNode{
- "eqdc10", toml.Position{22, 3},
- },
- queryTestNode{
- "10.0.0.2", toml.Position{25, 3},
- },
- queryTestNode{
- "eqdc10", toml.Position{26, 3},
- },
- })
-
- assertQueryPositions(t, string(buff),
- "$..[?(float)]",
- []interface{}{
- // no float values in document
- })
-
- tv, _ := time.Parse(time.RFC3339, "1979-05-27T07:32:00Z")
- assertQueryPositions(t, string(buff),
- "$..[?(tree)]",
- []interface{}{
- queryTestNode{
- map[string]interface{}{
- "name": "Tom Preston-Werner",
- "organization": "GitHub",
- "bio": "GitHub Cofounder & CEO\nLikes tater tots and beer.",
- "dob": tv,
- }, toml.Position{5, 1},
- },
- queryTestNode{
- map[string]interface{}{
- "server": "192.168.1.1",
- "ports": []interface{}{int64(8001), int64(8001), int64(8002)},
- "connection_max": int64(5000),
- "enabled": true,
- }, toml.Position{11, 1},
- },
- queryTestNode{
- map[string]interface{}{
- "alpha": map[string]interface{}{
- "ip": "10.0.0.1",
- "dc": "eqdc10",
- },
- "beta": map[string]interface{}{
- "ip": "10.0.0.2",
- "dc": "eqdc10",
- },
- }, toml.Position{17, 1},
- },
- queryTestNode{
- map[string]interface{}{
- "ip": "10.0.0.1",
- "dc": "eqdc10",
- }, toml.Position{20, 3},
- },
- queryTestNode{
- map[string]interface{}{
- "ip": "10.0.0.2",
- "dc": "eqdc10",
- }, toml.Position{24, 3},
- },
- queryTestNode{
- map[string]interface{}{
- "data": []interface{}{
- []interface{}{"gamma", "delta"},
- []interface{}{int64(1), int64(2)},
- },
- }, toml.Position{28, 1},
- },
- })
-
- assertQueryPositions(t, string(buff),
- "$..[?(time)]",
- []interface{}{
- queryTestNode{
- tv, toml.Position{9, 1},
- },
- })
-
- assertQueryPositions(t, string(buff),
- "$..[?(bool)]",
- []interface{}{
- queryTestNode{
- true, toml.Position{15, 1},
- },
- })
-}
diff --git a/vendor/github.com/pelletier/go-toml/query/query.go b/vendor/github.com/pelletier/go-toml/query/query.go
deleted file mode 100644
index 1c6cd8014..000000000
--- a/vendor/github.com/pelletier/go-toml/query/query.go
+++ /dev/null
@@ -1,158 +0,0 @@
-package query
-
-import (
- "time"
-
- "github.com/pelletier/go-toml"
-)
-
-// NodeFilterFn represents a user-defined filter function, for use with
-// Query.SetFilter().
-//
-// The return value of the function must indicate if 'node' is to be included
-// at this stage of the TOML path. Returning true will include the node, and
-// returning false will exclude it.
-//
-// NOTE: Care should be taken to write script callbacks such that they are safe
-// to use from multiple goroutines.
-type NodeFilterFn func(node interface{}) bool
-
-// Result is the result of Executing a Query.
-type Result struct {
- items []interface{}
- positions []toml.Position
-}
-
-// appends a value/position pair to the result set.
-func (r *Result) appendResult(node interface{}, pos toml.Position) {
- r.items = append(r.items, node)
- r.positions = append(r.positions, pos)
-}
-
-// Values is a set of values within a Result. The order of values is not
-// guaranteed to be in document order, and may be different each time a query is
-// executed.
-func (r Result) Values() []interface{} {
- return r.items
-}
-
-// Positions is a set of positions for values within a Result. Each index
-// in Positions() corresponds to the entry in Value() of the same index.
-func (r Result) Positions() []toml.Position {
- return r.positions
-}
-
-// runtime context for executing query paths
-type queryContext struct {
- result *Result
- filters *map[string]NodeFilterFn
- lastPosition toml.Position
-}
-
-// generic path functor interface
-type pathFn interface {
- setNext(next pathFn)
- // it is the caller's responsibility to set the ctx.lastPosition before invoking call()
- // node can be one of: *toml.Tree, []*toml.Tree, or a scalar
- call(node interface{}, ctx *queryContext)
-}
-
-// A Query is the representation of a compiled TOML path. A Query is safe
-// for concurrent use by multiple goroutines.
-type Query struct {
- root pathFn
- tail pathFn
- filters *map[string]NodeFilterFn
-}
-
-func newQuery() *Query {
- return &Query{
- root: nil,
- tail: nil,
- filters: &defaultFilterFunctions,
- }
-}
-
-func (q *Query) appendPath(next pathFn) {
- if q.root == nil {
- q.root = next
- } else {
- q.tail.setNext(next)
- }
- q.tail = next
- next.setNext(newTerminatingFn()) // init the next functor
-}
-
-// Compile compiles a TOML path expression. The returned Query can be used
-// to match elements within a Tree and its descendants. See Execute.
-func Compile(path string) (*Query, error) {
- return parseQuery(lexQuery(path))
-}
-
-// Execute executes a query against a Tree, and returns the result of the query.
-func (q *Query) Execute(tree *toml.Tree) *Result {
- result := &Result{
- items: []interface{}{},
- positions: []toml.Position{},
- }
- if q.root == nil {
- result.appendResult(tree, tree.GetPosition(""))
- } else {
- ctx := &queryContext{
- result: result,
- filters: q.filters,
- }
- ctx.lastPosition = tree.Position()
- q.root.call(tree, ctx)
- }
- return result
-}
-
-// CompileAndExecute is a shorthand for Compile(path) followed by Execute(tree).
-func CompileAndExecute(path string, tree *toml.Tree) (*Result, error) {
- query, err := Compile(path)
- if err != nil {
- return nil, err
- }
- return query.Execute(tree), nil
-}
-
-// SetFilter sets a user-defined filter function. These may be used inside
-// "?(..)" query expressions to filter TOML document elements within a query.
-func (q *Query) SetFilter(name string, fn NodeFilterFn) {
- if q.filters == &defaultFilterFunctions {
- // clone the static table
- q.filters = &map[string]NodeFilterFn{}
- for k, v := range defaultFilterFunctions {
- (*q.filters)[k] = v
- }
- }
- (*q.filters)[name] = fn
-}
-
-var defaultFilterFunctions = map[string]NodeFilterFn{
- "tree": func(node interface{}) bool {
- _, ok := node.(*toml.Tree)
- return ok
- },
- "int": func(node interface{}) bool {
- _, ok := node.(int64)
- return ok
- },
- "float": func(node interface{}) bool {
- _, ok := node.(float64)
- return ok
- },
- "string": func(node interface{}) bool {
- _, ok := node.(string)
- return ok
- },
- "time": func(node interface{}) bool {
- _, ok := node.(time.Time)
- return ok
- },
- "bool": func(node interface{}) bool {
- _, ok := node.(bool)
- return ok
- },
-}
diff --git a/vendor/github.com/pelletier/go-toml/query/query_test.go b/vendor/github.com/pelletier/go-toml/query/query_test.go
deleted file mode 100644
index 903a8dc73..000000000
--- a/vendor/github.com/pelletier/go-toml/query/query_test.go
+++ /dev/null
@@ -1,157 +0,0 @@
-package query
-
-import (
- "fmt"
- "testing"
-
- "github.com/pelletier/go-toml"
-)
-
-func assertArrayContainsInAnyOrder(t *testing.T, array []interface{}, objects ...interface{}) {
- if len(array) != len(objects) {
- t.Fatalf("array contains %d objects but %d are expected", len(array), len(objects))
- }
-
- for _, o := range objects {
- found := false
- for _, a := range array {
- if a == o {
- found = true
- break
- }
- }
- if !found {
- t.Fatal(o, "not found in array", array)
- }
- }
-}
-
-func TestQueryExample(t *testing.T) {
- config, _ := toml.Load(`
- [[book]]
- title = "The Stand"
- author = "Stephen King"
- [[book]]
- title = "For Whom the Bell Tolls"
- author = "Ernest Hemmingway"
- [[book]]
- title = "Neuromancer"
- author = "William Gibson"
- `)
- authors, err := CompileAndExecute("$.book.author", config)
- if err != nil {
- t.Fatal("unexpected error:", err)
- }
- names := authors.Values()
- if len(names) != 3 {
- t.Fatalf("query should return 3 names but returned %d", len(names))
- }
- assertArrayContainsInAnyOrder(t, names, "Stephen King", "Ernest Hemmingway", "William Gibson")
-}
-
-func TestQueryReadmeExample(t *testing.T) {
- config, _ := toml.Load(`
-[postgres]
-user = "pelletier"
-password = "mypassword"
-`)
-
- query, err := Compile("$..[user,password]")
- if err != nil {
- t.Fatal("unexpected error:", err)
- }
- results := query.Execute(config)
- values := results.Values()
- if len(values) != 2 {
- t.Fatalf("query should return 2 values but returned %d", len(values))
- }
- assertArrayContainsInAnyOrder(t, values, "pelletier", "mypassword")
-}
-
-func TestQueryPathNotPresent(t *testing.T) {
- config, _ := toml.Load(`a = "hello"`)
- query, err := Compile("$.foo.bar")
- if err != nil {
- t.Fatal("unexpected error:", err)
- }
- results := query.Execute(config)
- if err != nil {
- t.Fatalf("err should be nil. got %s instead", err)
- }
- if len(results.items) != 0 {
- t.Fatalf("no items should be matched. %d matched instead", len(results.items))
- }
-}
-
-func ExampleNodeFilterFn_filterExample() {
- tree, _ := toml.Load(`
- [struct_one]
- foo = "foo"
- bar = "bar"
-
- [struct_two]
- baz = "baz"
- gorf = "gorf"
- `)
-
- // create a query that references a user-defined-filter
- query, _ := Compile("$[?(bazOnly)]")
-
- // define the filter, and assign it to the query
- query.SetFilter("bazOnly", func(node interface{}) bool {
- if tree, ok := node.(*toml.Tree); ok {
- return tree.Has("baz")
- }
- return false // reject all other node types
- })
-
- // results contain only the 'struct_two' Tree
- query.Execute(tree)
-}
-
-func ExampleQuery_queryExample() {
- config, _ := toml.Load(`
- [[book]]
- title = "The Stand"
- author = "Stephen King"
- [[book]]
- title = "For Whom the Bell Tolls"
- author = "Ernest Hemmingway"
- [[book]]
- title = "Neuromancer"
- author = "William Gibson"
- `)
-
- // find and print all the authors in the document
- query, _ := Compile("$.book.author")
- authors := query.Execute(config)
- for _, name := range authors.Values() {
- fmt.Println(name)
- }
-}
-
-func TestTomlQuery(t *testing.T) {
- tree, err := toml.Load("[foo.bar]\na=1\nb=2\n[baz.foo]\na=3\nb=4\n[gorf.foo]\na=5\nb=6")
- if err != nil {
- t.Error(err)
- return
- }
- query, err := Compile("$.foo.bar")
- if err != nil {
- t.Error(err)
- return
- }
- result := query.Execute(tree)
- values := result.Values()
- if len(values) != 1 {
- t.Errorf("Expected resultset of 1, got %d instead: %v", len(values), values)
- }
-
- if tt, ok := values[0].(*toml.Tree); !ok {
- t.Errorf("Expected type of Tree: %T", values[0])
- } else if tt.Get("a") != int64(1) {
- t.Errorf("Expected 'a' with a value 1: %v", tt.Get("a"))
- } else if tt.Get("b") != int64(2) {
- t.Errorf("Expected 'b' with a value 2: %v", tt.Get("b"))
- }
-}
diff --git a/vendor/github.com/pelletier/go-toml/query/tokens.go b/vendor/github.com/pelletier/go-toml/query/tokens.go
deleted file mode 100644
index 9ae579de2..000000000
--- a/vendor/github.com/pelletier/go-toml/query/tokens.go
+++ /dev/null
@@ -1,106 +0,0 @@
-package query
-
-import (
- "fmt"
- "github.com/pelletier/go-toml"
- "strconv"
- "unicode"
-)
-
-// Define tokens
-type tokenType int
-
-const (
- eof = -(iota + 1)
-)
-
-const (
- tokenError tokenType = iota
- tokenEOF
- tokenKey
- tokenString
- tokenInteger
- tokenFloat
- tokenLeftBracket
- tokenRightBracket
- tokenLeftParen
- tokenRightParen
- tokenComma
- tokenColon
- tokenDollar
- tokenStar
- tokenQuestion
- tokenDot
- tokenDotDot
-)
-
-var tokenTypeNames = []string{
- "Error",
- "EOF",
- "Key",
- "String",
- "Integer",
- "Float",
- "[",
- "]",
- "(",
- ")",
- ",",
- ":",
- "$",
- "*",
- "?",
- ".",
- "..",
-}
-
-type token struct {
- toml.Position
- typ tokenType
- val string
-}
-
-func (tt tokenType) String() string {
- idx := int(tt)
- if idx < len(tokenTypeNames) {
- return tokenTypeNames[idx]
- }
- return "Unknown"
-}
-
-func (t token) Int() int {
- if result, err := strconv.Atoi(t.val); err != nil {
- panic(err)
- } else {
- return result
- }
-}
-
-func (t token) String() string {
- switch t.typ {
- case tokenEOF:
- return "EOF"
- case tokenError:
- return t.val
- }
-
- return fmt.Sprintf("%q", t.val)
-}
-
-func isSpace(r rune) bool {
- return r == ' ' || r == '\t'
-}
-
-func isAlphanumeric(r rune) bool {
- return unicode.IsLetter(r) || r == '_'
-}
-
-func isDigit(r rune) bool {
- return unicode.IsNumber(r)
-}
-
-func isHexDigit(r rune) bool {
- return isDigit(r) ||
- (r >= 'a' && r <= 'f') ||
- (r >= 'A' && r <= 'F')
-}
diff --git a/vendor/github.com/pelletier/go-toml/token_test.go b/vendor/github.com/pelletier/go-toml/token_test.go
deleted file mode 100644
index 20b560d51..000000000
--- a/vendor/github.com/pelletier/go-toml/token_test.go
+++ /dev/null
@@ -1,67 +0,0 @@
-package toml
-
-import "testing"
-
-func TestTokenStringer(t *testing.T) {
- var tests = []struct {
- tt tokenType
- expect string
- }{
- {tokenError, "Error"},
- {tokenEOF, "EOF"},
- {tokenComment, "Comment"},
- {tokenKey, "Key"},
- {tokenString, "String"},
- {tokenInteger, "Integer"},
- {tokenTrue, "True"},
- {tokenFalse, "False"},
- {tokenFloat, "Float"},
- {tokenEqual, "="},
- {tokenLeftBracket, "["},
- {tokenRightBracket, "]"},
- {tokenLeftCurlyBrace, "{"},
- {tokenRightCurlyBrace, "}"},
- {tokenLeftParen, "("},
- {tokenRightParen, ")"},
- {tokenDoubleLeftBracket, "]]"},
- {tokenDoubleRightBracket, "[["},
- {tokenDate, "Date"},
- {tokenKeyGroup, "KeyGroup"},
- {tokenKeyGroupArray, "KeyGroupArray"},
- {tokenComma, ","},
- {tokenColon, ":"},
- {tokenDollar, "$"},
- {tokenStar, "*"},
- {tokenQuestion, "?"},
- {tokenDot, "."},
- {tokenDotDot, ".."},
- {tokenEOL, "EOL"},
- {tokenEOL + 1, "Unknown"},
- }
-
- for i, test := range tests {
- got := test.tt.String()
- if got != test.expect {
- t.Errorf("[%d] invalid string of token type; got %q, expected %q", i, got, test.expect)
- }
- }
-}
-
-func TestTokenString(t *testing.T) {
- var tests = []struct {
- tok token
- expect string
- }{
- {token{Position{1, 1}, tokenEOF, ""}, "EOF"},
- {token{Position{1, 1}, tokenError, "Δt"}, "Δt"},
- {token{Position{1, 1}, tokenString, "bar"}, `"bar"`},
- {token{Position{1, 1}, tokenString, "123456789012345"}, `"123456789012345"`},
- }
-
- for i, test := range tests {
- got := test.tok.String()
- if got != test.expect {
- t.Errorf("[%d] invalid of string token; got %q, expected %q", i, got, test.expect)
- }
- }
-}
diff --git a/vendor/github.com/pelletier/go-toml/toml_test.go b/vendor/github.com/pelletier/go-toml/toml_test.go
deleted file mode 100644
index ab9c24253..000000000
--- a/vendor/github.com/pelletier/go-toml/toml_test.go
+++ /dev/null
@@ -1,106 +0,0 @@
-// Testing support for go-toml
-
-package toml
-
-import (
- "testing"
-)
-
-func TestTomlHas(t *testing.T) {
- tree, _ := Load(`
- [test]
- key = "value"
- `)
-
- if !tree.Has("test.key") {
- t.Errorf("Has - expected test.key to exists")
- }
-
- if tree.Has("") {
- t.Errorf("Should return false if the key is not provided")
- }
-}
-
-func TestTomlGet(t *testing.T) {
- tree, _ := Load(`
- [test]
- key = "value"
- `)
-
- if tree.Get("") != tree {
- t.Errorf("Get should return the tree itself when given an empty path")
- }
-
- if tree.Get("test.key") != "value" {
- t.Errorf("Get should return the value")
- }
- if tree.Get(`\`) != nil {
- t.Errorf("should return nil when the key is malformed")
- }
-}
-
-func TestTomlGetDefault(t *testing.T) {
- tree, _ := Load(`
- [test]
- key = "value"
- `)
-
- if tree.GetDefault("", "hello") != tree {
- t.Error("GetDefault should return the tree itself when given an empty path")
- }
-
- if tree.GetDefault("test.key", "hello") != "value" {
- t.Error("Get should return the value")
- }
-
- if tree.GetDefault("whatever", "hello") != "hello" {
- t.Error("GetDefault should return the default value if the key does not exist")
- }
-}
-
-func TestTomlHasPath(t *testing.T) {
- tree, _ := Load(`
- [test]
- key = "value"
- `)
-
- if !tree.HasPath([]string{"test", "key"}) {
- t.Errorf("HasPath - expected test.key to exists")
- }
-}
-
-func TestTomlGetPath(t *testing.T) {
- node := newTree()
- //TODO: set other node data
-
- for idx, item := range []struct {
- Path []string
- Expected *Tree
- }{
- { // empty path test
- []string{},
- node,
- },
- } {
- result := node.GetPath(item.Path)
- if result != item.Expected {
- t.Errorf("GetPath[%d] %v - expected %v, got %v instead.", idx, item.Path, item.Expected, result)
- }
- }
-
- tree, _ := Load("[foo.bar]\na=1\nb=2\n[baz.foo]\na=3\nb=4\n[gorf.foo]\na=5\nb=6")
- if tree.GetPath([]string{"whatever"}) != nil {
- t.Error("GetPath should return nil when the key does not exist")
- }
-}
-
-func TestTomlFromMap(t *testing.T) {
- simpleMap := map[string]interface{}{"hello": 42}
- tree, err := TreeFromMap(simpleMap)
- if err != nil {
- t.Fatal("unexpected error:", err)
- }
- if tree.Get("hello") != int64(42) {
- t.Fatal("hello should be 42, not", tree.Get("hello"))
- }
-}
diff --git a/vendor/github.com/pelletier/go-toml/tomltree_create_test.go b/vendor/github.com/pelletier/go-toml/tomltree_create_test.go
deleted file mode 100644
index 3465a1066..000000000
--- a/vendor/github.com/pelletier/go-toml/tomltree_create_test.go
+++ /dev/null
@@ -1,126 +0,0 @@
-package toml
-
-import (
- "strconv"
- "testing"
- "time"
-)
-
-type customString string
-
-type stringer struct{}
-
-func (s stringer) String() string {
- return "stringer"
-}
-
-func validate(t *testing.T, path string, object interface{}) {
- switch o := object.(type) {
- case *Tree:
- for key, tree := range o.values {
- validate(t, path+"."+key, tree)
- }
- case []*Tree:
- for index, tree := range o {
- validate(t, path+"."+strconv.Itoa(index), tree)
- }
- case *tomlValue:
- switch o.value.(type) {
- case int64, uint64, bool, string, float64, time.Time,
- []int64, []uint64, []bool, []string, []float64, []time.Time:
- default:
- t.Fatalf("tomlValue at key %s containing incorrect type %T", path, o.value)
- }
- default:
- t.Fatalf("value at key %s is of incorrect type %T", path, object)
- }
- t.Logf("validation ok %s as %T", path, object)
-}
-
-func validateTree(t *testing.T, tree *Tree) {
- validate(t, "", tree)
-}
-
-func TestTreeCreateToTree(t *testing.T) {
- data := map[string]interface{}{
- "a_string": "bar",
- "an_int": 42,
- "time": time.Now(),
- "int8": int8(2),
- "int16": int16(2),
- "int32": int32(2),
- "uint8": uint8(2),
- "uint16": uint16(2),
- "uint32": uint32(2),
- "float32": float32(2),
- "a_bool": false,
- "stringer": stringer{},
- "nested": map[string]interface{}{
- "foo": "bar",
- },
- "array": []string{"a", "b", "c"},
- "array_uint": []uint{uint(1), uint(2)},
- "array_table": []map[string]interface{}{{"sub_map": 52}},
- "array_times": []time.Time{time.Now(), time.Now()},
- "map_times": map[string]time.Time{"now": time.Now()},
- "custom_string_map_key": map[customString]interface{}{customString("custom"): "custom"},
- }
- tree, err := TreeFromMap(data)
- if err != nil {
- t.Fatal("unexpected error:", err)
- }
- validateTree(t, tree)
-}
-
-func TestTreeCreateToTreeInvalidLeafType(t *testing.T) {
- _, err := TreeFromMap(map[string]interface{}{"foo": t})
- expected := "cannot convert type *testing.T to Tree"
- if err.Error() != expected {
- t.Fatalf("expected error %s, got %s", expected, err.Error())
- }
-}
-
-func TestTreeCreateToTreeInvalidMapKeyType(t *testing.T) {
- _, err := TreeFromMap(map[string]interface{}{"foo": map[int]interface{}{2: 1}})
- expected := "map key needs to be a string, not int (int)"
- if err.Error() != expected {
- t.Fatalf("expected error %s, got %s", expected, err.Error())
- }
-}
-
-func TestTreeCreateToTreeInvalidArrayMemberType(t *testing.T) {
- _, err := TreeFromMap(map[string]interface{}{"foo": []*testing.T{t}})
- expected := "cannot convert type *testing.T to Tree"
- if err.Error() != expected {
- t.Fatalf("expected error %s, got %s", expected, err.Error())
- }
-}
-
-func TestTreeCreateToTreeInvalidTableGroupType(t *testing.T) {
- _, err := TreeFromMap(map[string]interface{}{"foo": []map[string]interface{}{{"hello": t}}})
- expected := "cannot convert type *testing.T to Tree"
- if err.Error() != expected {
- t.Fatalf("expected error %s, got %s", expected, err.Error())
- }
-}
-
-func TestRoundTripArrayOfTables(t *testing.T) {
- orig := "\n[[stuff]]\n name = \"foo\"\n things = [\"a\",\"b\"]\n"
- tree, err := Load(orig)
- if err != nil {
- t.Fatalf("unexpected error: %s", err)
- }
-
- m := tree.ToMap()
-
- tree, err = TreeFromMap(m)
- if err != nil {
- t.Fatalf("unexpected error: %s", err)
- }
- want := orig
- got := tree.String()
-
- if got != want {
- t.Errorf("want:\n%s\ngot:\n%s", want, got)
- }
-}
diff --git a/vendor/github.com/pelletier/go-toml/tomltree_write_test.go b/vendor/github.com/pelletier/go-toml/tomltree_write_test.go
deleted file mode 100644
index 206203b88..000000000
--- a/vendor/github.com/pelletier/go-toml/tomltree_write_test.go
+++ /dev/null
@@ -1,376 +0,0 @@
-package toml
-
-import (
- "bytes"
- "errors"
- "fmt"
- "reflect"
- "strings"
- "testing"
- "time"
-)
-
-type failingWriter struct {
- failAt int
- written int
- buffer bytes.Buffer
-}
-
-func (f *failingWriter) Write(p []byte) (n int, err error) {
- count := len(p)
- toWrite := f.failAt - (count + f.written)
- if toWrite < 0 {
- toWrite = 0
- }
- if toWrite > count {
- f.written += count
- f.buffer.Write(p)
- return count, nil
- }
-
- f.buffer.Write(p[:toWrite])
- f.written = f.failAt
- return toWrite, fmt.Errorf("failingWriter failed after writing %d bytes", f.written)
-}
-
-func assertErrorString(t *testing.T, expected string, err error) {
- expectedErr := errors.New(expected)
- if err == nil || err.Error() != expectedErr.Error() {
- t.Errorf("expecting error %s, but got %s instead", expected, err)
- }
-}
-
-func TestTreeWriteToEmptyTable(t *testing.T) {
- doc := `[[empty-tables]]
-[[empty-tables]]`
-
- toml, err := Load(doc)
- if err != nil {
- t.Fatal("Unexpected Load error:", err)
- }
- tomlString, err := toml.ToTomlString()
- if err != nil {
- t.Fatal("Unexpected ToTomlString error:", err)
- }
-
- expected := `
-[[empty-tables]]
-
-[[empty-tables]]
-`
-
- if tomlString != expected {
- t.Fatalf("Expected:\n%s\nGot:\n%s", expected, tomlString)
- }
-}
-
-func TestTreeWriteToTomlString(t *testing.T) {
- toml, err := Load(`name = { first = "Tom", last = "Preston-Werner" }
-points = { x = 1, y = 2 }`)
-
- if err != nil {
- t.Fatal("Unexpected error:", err)
- }
-
- tomlString, _ := toml.ToTomlString()
- reparsedTree, err := Load(tomlString)
-
- assertTree(t, reparsedTree, err, map[string]interface{}{
- "name": map[string]interface{}{
- "first": "Tom",
- "last": "Preston-Werner",
- },
- "points": map[string]interface{}{
- "x": int64(1),
- "y": int64(2),
- },
- })
-}
-
-func TestTreeWriteToTomlStringSimple(t *testing.T) {
- tree, err := Load("[foo]\n\n[[foo.bar]]\na = 42\n\n[[foo.bar]]\na = 69\n")
- if err != nil {
- t.Errorf("Test failed to parse: %v", err)
- return
- }
- result, err := tree.ToTomlString()
- if err != nil {
- t.Errorf("Unexpected error: %s", err)
- }
- expected := "\n[foo]\n\n [[foo.bar]]\n a = 42\n\n [[foo.bar]]\n a = 69\n"
- if result != expected {
- t.Errorf("Expected got '%s', expected '%s'", result, expected)
- }
-}
-
-func TestTreeWriteToTomlStringKeysOrders(t *testing.T) {
- for i := 0; i < 100; i++ {
- tree, _ := Load(`
- foobar = true
- bar = "baz"
- foo = 1
- [qux]
- foo = 1
- bar = "baz2"`)
-
- stringRepr, _ := tree.ToTomlString()
-
- t.Log("Intermediate string representation:")
- t.Log(stringRepr)
-
- r := strings.NewReader(stringRepr)
- toml, err := LoadReader(r)
-
- if err != nil {
- t.Fatal("Unexpected error:", err)
- }
-
- assertTree(t, toml, err, map[string]interface{}{
- "foobar": true,
- "bar": "baz",
- "foo": 1,
- "qux": map[string]interface{}{
- "foo": 1,
- "bar": "baz2",
- },
- })
- }
-}
-
-func testMaps(t *testing.T, actual, expected map[string]interface{}) {
- if !reflect.DeepEqual(actual, expected) {
- t.Fatal("trees aren't equal.\n", "Expected:\n", expected, "\nActual:\n", actual)
- }
-}
-
-func TestTreeWriteToMapSimple(t *testing.T) {
- tree, _ := Load("a = 42\nb = 17")
-
- expected := map[string]interface{}{
- "a": int64(42),
- "b": int64(17),
- }
-
- testMaps(t, tree.ToMap(), expected)
-}
-
-func TestTreeWriteToInvalidTreeSimpleValue(t *testing.T) {
- tree := Tree{values: map[string]interface{}{"foo": int8(1)}}
- _, err := tree.ToTomlString()
- assertErrorString(t, "invalid value type at foo: int8", err)
-}
-
-func TestTreeWriteToInvalidTreeTomlValue(t *testing.T) {
- tree := Tree{values: map[string]interface{}{"foo": &tomlValue{value: int8(1), comment: "", position: Position{}}}}
- _, err := tree.ToTomlString()
- assertErrorString(t, "unsupported value type int8: 1", err)
-}
-
-func TestTreeWriteToInvalidTreeTomlValueArray(t *testing.T) {
- tree := Tree{values: map[string]interface{}{"foo": &tomlValue{value: int8(1), comment: "", position: Position{}}}}
- _, err := tree.ToTomlString()
- assertErrorString(t, "unsupported value type int8: 1", err)
-}
-
-func TestTreeWriteToFailingWriterInSimpleValue(t *testing.T) {
- toml, _ := Load(`a = 2`)
- writer := failingWriter{failAt: 0, written: 0}
- _, err := toml.WriteTo(&writer)
- assertErrorString(t, "failingWriter failed after writing 0 bytes", err)
-}
-
-func TestTreeWriteToFailingWriterInTable(t *testing.T) {
- toml, _ := Load(`
-[b]
-a = 2`)
- writer := failingWriter{failAt: 2, written: 0}
- _, err := toml.WriteTo(&writer)
- assertErrorString(t, "failingWriter failed after writing 2 bytes", err)
-
- writer = failingWriter{failAt: 13, written: 0}
- _, err = toml.WriteTo(&writer)
- assertErrorString(t, "failingWriter failed after writing 13 bytes", err)
-}
-
-func TestTreeWriteToFailingWriterInArray(t *testing.T) {
- toml, _ := Load(`
-[[b]]
-a = 2`)
- writer := failingWriter{failAt: 2, written: 0}
- _, err := toml.WriteTo(&writer)
- assertErrorString(t, "failingWriter failed after writing 2 bytes", err)
-
- writer = failingWriter{failAt: 15, written: 0}
- _, err = toml.WriteTo(&writer)
- assertErrorString(t, "failingWriter failed after writing 15 bytes", err)
-}
-
-func TestTreeWriteToMapExampleFile(t *testing.T) {
- tree, _ := LoadFile("example.toml")
- expected := map[string]interface{}{
- "title": "TOML Example",
- "owner": map[string]interface{}{
- "name": "Tom Preston-Werner",
- "organization": "GitHub",
- "bio": "GitHub Cofounder & CEO\nLikes tater tots and beer.",
- "dob": time.Date(1979, time.May, 27, 7, 32, 0, 0, time.UTC),
- },
- "database": map[string]interface{}{
- "server": "192.168.1.1",
- "ports": []interface{}{int64(8001), int64(8001), int64(8002)},
- "connection_max": int64(5000),
- "enabled": true,
- },
- "servers": map[string]interface{}{
- "alpha": map[string]interface{}{
- "ip": "10.0.0.1",
- "dc": "eqdc10",
- },
- "beta": map[string]interface{}{
- "ip": "10.0.0.2",
- "dc": "eqdc10",
- },
- },
- "clients": map[string]interface{}{
- "data": []interface{}{
- []interface{}{"gamma", "delta"},
- []interface{}{int64(1), int64(2)},
- },
- },
- }
- testMaps(t, tree.ToMap(), expected)
-}
-
-func TestTreeWriteToMapWithTablesInMultipleChunks(t *testing.T) {
- tree, _ := Load(`
- [[menu.main]]
- a = "menu 1"
- b = "menu 2"
- [[menu.main]]
- c = "menu 3"
- d = "menu 4"`)
- expected := map[string]interface{}{
- "menu": map[string]interface{}{
- "main": []interface{}{
- map[string]interface{}{"a": "menu 1", "b": "menu 2"},
- map[string]interface{}{"c": "menu 3", "d": "menu 4"},
- },
- },
- }
- treeMap := tree.ToMap()
-
- testMaps(t, treeMap, expected)
-}
-
-func TestTreeWriteToMapWithArrayOfInlineTables(t *testing.T) {
- tree, _ := Load(`
- [params]
- language_tabs = [
- { key = "shell", name = "Shell" },
- { key = "ruby", name = "Ruby" },
- { key = "python", name = "Python" }
- ]`)
-
- expected := map[string]interface{}{
- "params": map[string]interface{}{
- "language_tabs": []interface{}{
- map[string]interface{}{
- "key": "shell",
- "name": "Shell",
- },
- map[string]interface{}{
- "key": "ruby",
- "name": "Ruby",
- },
- map[string]interface{}{
- "key": "python",
- "name": "Python",
- },
- },
- },
- }
-
- treeMap := tree.ToMap()
- testMaps(t, treeMap, expected)
-}
-
-func TestTreeWriteToFloat(t *testing.T) {
- tree, err := Load(`a = 3.0`)
- if err != nil {
- t.Fatal(err)
- }
- str, err := tree.ToTomlString()
- if err != nil {
- t.Fatal(err)
- }
- expected := `a = 3.0`
- if strings.TrimSpace(str) != strings.TrimSpace(expected) {
- t.Fatalf("Expected:\n%s\nGot:\n%s", expected, str)
- }
-}
-
-func TestTreeWriteToSpecialFloat(t *testing.T) {
- expected := `a = +inf
-b = -inf
-c = nan`
-
- tree, err := Load(expected)
- if err != nil {
- t.Fatal(err)
- }
- str, err := tree.ToTomlString()
- if err != nil {
- t.Fatal(err)
- }
- if strings.TrimSpace(str) != strings.TrimSpace(expected) {
- t.Fatalf("Expected:\n%s\nGot:\n%s", expected, str)
- }
-}
-
-func BenchmarkTreeToTomlString(b *testing.B) {
- toml, err := Load(sampleHard)
- if err != nil {
- b.Fatal("Unexpected error:", err)
- }
-
- for i := 0; i < b.N; i++ {
- _, err := toml.ToTomlString()
- if err != nil {
- b.Fatal(err)
- }
- }
-}
-
-var sampleHard = `# Test file for TOML
-# Only this one tries to emulate a TOML file written by a user of the kind of parser writers probably hate
-# This part you'll really hate
-
-[the]
-test_string = "You'll hate me after this - #" # " Annoying, isn't it?
-
- [the.hard]
- test_array = [ "] ", " # "] # ] There you go, parse this!
- test_array2 = [ "Test #11 ]proved that", "Experiment #9 was a success" ]
- # You didn't think it'd as easy as chucking out the last #, did you?
- another_test_string = " Same thing, but with a string #"
- harder_test_string = " And when \"'s are in the string, along with # \"" # "and comments are there too"
- # Things will get harder
-
- [the.hard."bit#"]
- "what?" = "You don't think some user won't do that?"
- multi_line_array = [
- "]",
- # ] Oh yes I did
- ]
-
-# Each of the following keygroups/key value pairs should produce an error. Uncomment to them to test
-
-#[error] if you didn't catch this, your parser is broken
-#string = "Anything other than tabs, spaces and newline after a keygroup or key value pair has ended should produce an error unless it is a comment" like this
-#array = [
-# "This might most likely happen in multiline arrays",
-# Like here,
-# "or here,
-# and here"
-# ] End of array comment, forgot the #
-#number = 3.14 pi <--again forgot the # `