summaryrefslogtreecommitdiffstats
path: root/vendor/github.com/pelletier/go-toml
diff options
context:
space:
mode:
authorCorey Hulen <corey@hulen.com>2017-03-24 23:31:34 -0700
committerenahum <nahumhbl@gmail.com>2017-03-25 03:31:34 -0300
commit54d3d47daf9190275bbdaf8703b84969a4593451 (patch)
tree05899b296d0186c1a0da8a540bc486e34ad8eec9 /vendor/github.com/pelletier/go-toml
parent7460302dec7796e01c98264e84bece8169cb6ed9 (diff)
downloadchat-54d3d47daf9190275bbdaf8703b84969a4593451.tar.gz
chat-54d3d47daf9190275bbdaf8703b84969a4593451.tar.bz2
chat-54d3d47daf9190275bbdaf8703b84969a4593451.zip
PLT-6076 Adding viper libs for config file changes (#5871)
* Adding viper libs for config file changes * Removing the old fsnotify lib * updating some missing libs
Diffstat (limited to 'vendor/github.com/pelletier/go-toml')
-rw-r--r--vendor/github.com/pelletier/go-toml/.gitignore1
-rw-r--r--vendor/github.com/pelletier/go-toml/.travis.yml21
-rw-r--r--vendor/github.com/pelletier/go-toml/LICENSE21
-rw-r--r--vendor/github.com/pelletier/go-toml/README.md120
-rwxr-xr-xvendor/github.com/pelletier/go-toml/clean.sh6
-rw-r--r--vendor/github.com/pelletier/go-toml/cmd/test_program.go91
-rw-r--r--vendor/github.com/pelletier/go-toml/cmd/tomljson/main.go67
-rw-r--r--vendor/github.com/pelletier/go-toml/cmd/tomljson/main_test.go82
-rw-r--r--vendor/github.com/pelletier/go-toml/cmd/tomll/main.go61
-rw-r--r--vendor/github.com/pelletier/go-toml/doc.go250
-rw-r--r--vendor/github.com/pelletier/go-toml/doc_test.go81
-rw-r--r--vendor/github.com/pelletier/go-toml/example-crlf.toml29
-rw-r--r--vendor/github.com/pelletier/go-toml/example.toml29
-rw-r--r--vendor/github.com/pelletier/go-toml/keysparsing.go94
-rw-r--r--vendor/github.com/pelletier/go-toml/keysparsing_test.go56
-rw-r--r--vendor/github.com/pelletier/go-toml/lexer.go657
-rw-r--r--vendor/github.com/pelletier/go-toml/lexer_test.go750
-rw-r--r--vendor/github.com/pelletier/go-toml/match.go234
-rw-r--r--vendor/github.com/pelletier/go-toml/match_test.go201
-rw-r--r--vendor/github.com/pelletier/go-toml/parser.go393
-rw-r--r--vendor/github.com/pelletier/go-toml/parser_test.go785
-rw-r--r--vendor/github.com/pelletier/go-toml/position.go29
-rw-r--r--vendor/github.com/pelletier/go-toml/position_test.go29
-rw-r--r--vendor/github.com/pelletier/go-toml/query.go153
-rw-r--r--vendor/github.com/pelletier/go-toml/query_test.go70
-rw-r--r--vendor/github.com/pelletier/go-toml/querylexer.go356
-rw-r--r--vendor/github.com/pelletier/go-toml/querylexer_test.go178
-rw-r--r--vendor/github.com/pelletier/go-toml/queryparser.go275
-rw-r--r--vendor/github.com/pelletier/go-toml/queryparser_test.go483
-rwxr-xr-xvendor/github.com/pelletier/go-toml/test.sh82
-rw-r--r--vendor/github.com/pelletier/go-toml/token.go140
-rw-r--r--vendor/github.com/pelletier/go-toml/token_test.go67
-rw-r--r--vendor/github.com/pelletier/go-toml/toml.go284
-rw-r--r--vendor/github.com/pelletier/go-toml/toml_test.go131
-rw-r--r--vendor/github.com/pelletier/go-toml/tomltree_create.go135
-rw-r--r--vendor/github.com/pelletier/go-toml/tomltree_create_test.go126
-rw-r--r--vendor/github.com/pelletier/go-toml/tomltree_write.go214
-rw-r--r--vendor/github.com/pelletier/go-toml/tomltree_write_test.go271
38 files changed, 7052 insertions, 0 deletions
diff --git a/vendor/github.com/pelletier/go-toml/.gitignore b/vendor/github.com/pelletier/go-toml/.gitignore
new file mode 100644
index 000000000..f1b619018
--- /dev/null
+++ b/vendor/github.com/pelletier/go-toml/.gitignore
@@ -0,0 +1 @@
+test_program/test_program_bin
diff --git a/vendor/github.com/pelletier/go-toml/.travis.yml b/vendor/github.com/pelletier/go-toml/.travis.yml
new file mode 100644
index 000000000..64f03809a
--- /dev/null
+++ b/vendor/github.com/pelletier/go-toml/.travis.yml
@@ -0,0 +1,21 @@
+sudo: false
+language: go
+go:
+ - 1.6.4
+ - 1.7.5
+ - 1.8
+ - tip
+matrix:
+ allow_failures:
+ - go: tip
+ fast_finish: true
+script:
+ - ./test.sh
+before_install:
+ - go get github.com/axw/gocov/gocov
+ - go get github.com/mattn/goveralls
+ - if ! go get code.google.com/p/go.tools/cmd/cover; then go get golang.org/x/tools/cmd/cover; fi
+branches:
+ only: [master]
+after_success:
+ - $HOME/gopath/bin/goveralls -service=travis-ci -coverprofile=coverage.out -repotoken $COVERALLS_TOKEN
diff --git a/vendor/github.com/pelletier/go-toml/LICENSE b/vendor/github.com/pelletier/go-toml/LICENSE
new file mode 100644
index 000000000..583bdae62
--- /dev/null
+++ b/vendor/github.com/pelletier/go-toml/LICENSE
@@ -0,0 +1,21 @@
+The MIT License (MIT)
+
+Copyright (c) 2013 - 2017 Thomas Pelletier, Eric Anderton
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
diff --git a/vendor/github.com/pelletier/go-toml/README.md b/vendor/github.com/pelletier/go-toml/README.md
new file mode 100644
index 000000000..b8137e022
--- /dev/null
+++ b/vendor/github.com/pelletier/go-toml/README.md
@@ -0,0 +1,120 @@
+# go-toml
+
+Go library for the [TOML](https://github.com/mojombo/toml) format.
+
+This library supports TOML version
+[v0.4.0](https://github.com/toml-lang/toml/blob/master/versions/en/toml-v0.4.0.md)
+
+[![GoDoc](https://godoc.org/github.com/pelletier/go-toml?status.svg)](http://godoc.org/github.com/pelletier/go-toml)
+[![license](https://img.shields.io/github/license/pelletier/go-toml.svg)](https://github.com/pelletier/go-toml/blob/master/LICENSE)
+[![Build Status](https://travis-ci.org/pelletier/go-toml.svg?branch=master)](https://travis-ci.org/pelletier/go-toml)
+[![Coverage Status](https://coveralls.io/repos/github/pelletier/go-toml/badge.svg?branch=master)](https://coveralls.io/github/pelletier/go-toml?branch=master)
+[![Go Report Card](https://goreportcard.com/badge/github.com/pelletier/go-toml)](https://goreportcard.com/report/github.com/pelletier/go-toml)
+
+## Features
+
+Go-toml provides the following features for using data parsed from TOML documents:
+
+* Load TOML documents from files and string data
+* Easily navigate TOML structure using TomlTree
+* Line & column position data for all parsed elements
+* Query support similar to JSON-Path
+* Syntax errors contain line and column numbers
+
+Go-toml is designed to help cover use-cases not covered by reflection-based TOML parsing:
+
+* Semantic evaluation of parsed TOML
+* Informing a user of mistakes in the source document, after it has been parsed
+* Programatic handling of default values on a case-by-case basis
+* Using a TOML document as a flexible data-store
+
+## Import
+
+ import "github.com/pelletier/go-toml"
+
+## Usage
+
+### Example
+
+Say you have a TOML file that looks like this:
+
+```toml
+[postgres]
+user = "pelletier"
+password = "mypassword"
+```
+
+Read the username and password like this:
+
+```go
+import (
+ "fmt"
+ "github.com/pelletier/go-toml"
+)
+
+config, err := toml.LoadFile("config.toml")
+if err != nil {
+ fmt.Println("Error ", err.Error())
+} else {
+ // retrieve data directly
+ user := config.Get("postgres.user").(string)
+ password := config.Get("postgres.password").(string)
+
+ // or using an intermediate object
+ configTree := config.Get("postgres").(*toml.TomlTree)
+ user = configTree.Get("user").(string)
+ password = configTree.Get("password").(string)
+ fmt.Println("User is ", user, ". Password is ", password)
+
+ // show where elements are in the file
+ fmt.Println("User position: %v", configTree.GetPosition("user"))
+ fmt.Println("Password position: %v", configTree.GetPosition("password"))
+
+ // use a query to gather elements without walking the tree
+ results, _ := config.Query("$..[user,password]")
+ for ii, item := range results.Values() {
+ fmt.Println("Query result %d: %v", ii, item)
+ }
+}
+```
+
+## Documentation
+
+The documentation and additional examples are available at
+[godoc.org](http://godoc.org/github.com/pelletier/go-toml).
+
+## Tools
+
+Go-toml provides two handy command line tools:
+
+* `tomll`: Reads TOML files and lint them.
+
+ ```
+ go install github.com/pelletier/go-toml/cmd/tomll
+ tomll --help
+ ```
+* `tomljson`: Reads a TOML file and outputs its JSON representation.
+
+ ```
+ go install github.com/pelletier/go-toml/cmd/tomljson
+ tomljson --help
+ ```
+
+## Contribute
+
+Feel free to report bugs and patches using GitHub's pull requests system on
+[pelletier/go-toml](https://github.com/pelletier/go-toml). Any feedback would be
+much appreciated!
+
+### Run tests
+
+You have to make sure two kind of tests run:
+
+1. The Go unit tests
+2. The TOML examples base
+
+You can run both of them using `./test.sh`.
+
+## License
+
+The MIT License (MIT). Read [LICENSE](LICENSE).
diff --git a/vendor/github.com/pelletier/go-toml/clean.sh b/vendor/github.com/pelletier/go-toml/clean.sh
new file mode 100755
index 000000000..44d49d936
--- /dev/null
+++ b/vendor/github.com/pelletier/go-toml/clean.sh
@@ -0,0 +1,6 @@
+#!/bin/bash
+# fail out of the script if anything here fails
+set -e
+
+# clear out stuff generated by test.sh
+rm -rf src test_program_bin toml-test
diff --git a/vendor/github.com/pelletier/go-toml/cmd/test_program.go b/vendor/github.com/pelletier/go-toml/cmd/test_program.go
new file mode 100644
index 000000000..58293446b
--- /dev/null
+++ b/vendor/github.com/pelletier/go-toml/cmd/test_program.go
@@ -0,0 +1,91 @@
+package main
+
+import (
+ "encoding/json"
+ "fmt"
+ "io/ioutil"
+ "log"
+ "os"
+ "time"
+
+ "github.com/pelletier/go-toml"
+)
+
+func main() {
+ bytes, err := ioutil.ReadAll(os.Stdin)
+ if err != nil {
+ log.Fatalf("Error during TOML read: %s", err)
+ os.Exit(2)
+ }
+ tree, err := toml.Load(string(bytes))
+ if err != nil {
+ log.Fatalf("Error during TOML load: %s", err)
+ os.Exit(1)
+ }
+
+ typedTree := translate(*tree)
+
+ if err := json.NewEncoder(os.Stdout).Encode(typedTree); err != nil {
+ log.Fatalf("Error encoding JSON: %s", err)
+ os.Exit(3)
+ }
+
+ os.Exit(0)
+}
+
+func translate(tomlData interface{}) interface{} {
+ switch orig := tomlData.(type) {
+ case map[string]interface{}:
+ typed := make(map[string]interface{}, len(orig))
+ for k, v := range orig {
+ typed[k] = translate(v)
+ }
+ return typed
+ case *toml.TomlTree:
+ return translate(*orig)
+ case toml.TomlTree:
+ keys := orig.Keys()
+ typed := make(map[string]interface{}, len(keys))
+ for _, k := range keys {
+ typed[k] = translate(orig.GetPath([]string{k}))
+ }
+ return typed
+ case []*toml.TomlTree:
+ typed := make([]map[string]interface{}, len(orig))
+ for i, v := range orig {
+ typed[i] = translate(v).(map[string]interface{})
+ }
+ return typed
+ case []map[string]interface{}:
+ typed := make([]map[string]interface{}, len(orig))
+ for i, v := range orig {
+ typed[i] = translate(v).(map[string]interface{})
+ }
+ return typed
+ case []interface{}:
+ typed := make([]interface{}, len(orig))
+ for i, v := range orig {
+ typed[i] = translate(v)
+ }
+ return tag("array", typed)
+ case time.Time:
+ return tag("datetime", orig.Format("2006-01-02T15:04:05Z"))
+ case bool:
+ return tag("bool", fmt.Sprintf("%v", orig))
+ case int64:
+ return tag("integer", fmt.Sprintf("%d", orig))
+ case float64:
+ return tag("float", fmt.Sprintf("%v", orig))
+ case string:
+ return tag("string", orig)
+ }
+
+ panic(fmt.Sprintf("Unknown type: %T", tomlData))
+}
+
+func tag(typeName string, data interface{}) map[string]interface{} {
+ return map[string]interface{}{
+ "type": typeName,
+ "value": data,
+ }
+}
diff --git a/vendor/github.com/pelletier/go-toml/cmd/tomljson/main.go b/vendor/github.com/pelletier/go-toml/cmd/tomljson/main.go
new file mode 100644
index 000000000..7e9dc467e
--- /dev/null
+++ b/vendor/github.com/pelletier/go-toml/cmd/tomljson/main.go
@@ -0,0 +1,67 @@
+package main
+
+import (
+ "encoding/json"
+ "flag"
+ "fmt"
+ "io"
+ "os"
+
+ "github.com/pelletier/go-toml"
+)
+
+func main() {
+ flag.Usage = func() {
+ fmt.Fprintln(os.Stderr, `tomljson can be used in two ways:
+Writing to STDIN and reading from STDOUT:
+ cat file.toml | tomljson > file.json
+
+Reading from a file name:
+ tomljson file.toml
+`)
+ }
+ flag.Parse()
+ os.Exit(processMain(flag.Args(), os.Stdin, os.Stdout, os.Stderr))
+}
+
+func processMain(files []string, defaultInput io.Reader, output io.Writer, errorOutput io.Writer) int {
+ // read from stdin and print to stdout
+ inputReader := defaultInput
+
+ if len(files) > 0 {
+ var err error
+ inputReader, err = os.Open(files[0])
+ if err != nil {
+ printError(err, errorOutput)
+ return -1
+ }
+ }
+ s, err := reader(inputReader)
+ if err != nil {
+ printError(err, errorOutput)
+ return -1
+ }
+ io.WriteString(output, s+"\n")
+ return 0
+}
+
+func printError(err error, output io.Writer) {
+ io.WriteString(output, err.Error()+"\n")
+}
+
+func reader(r io.Reader) (string, error) {
+ tree, err := toml.LoadReader(r)
+ if err != nil {
+ return "", err
+ }
+ return mapToJSON(tree)
+}
+
+func mapToJSON(tree *toml.TomlTree) (string, error) {
+ treeMap := tree.ToMap()
+ bytes, err := json.MarshalIndent(treeMap, "", " ")
+ if err != nil {
+ return "", err
+ }
+ return string(bytes[:]), nil
+}
diff --git a/vendor/github.com/pelletier/go-toml/cmd/tomljson/main_test.go b/vendor/github.com/pelletier/go-toml/cmd/tomljson/main_test.go
new file mode 100644
index 000000000..0b4bdbb11
--- /dev/null
+++ b/vendor/github.com/pelletier/go-toml/cmd/tomljson/main_test.go
@@ -0,0 +1,82 @@
+package main
+
+import (
+ "bytes"
+ "io/ioutil"
+ "os"
+ "strings"
+ "testing"
+)
+
+func expectBufferEquality(t *testing.T, name string, buffer *bytes.Buffer, expected string) {
+ output := buffer.String()
+ if output != expected {
+ t.Errorf("incorrect %s:\n%s\n\nexpected %s:\n%s", name, output, name, expected)
+ t.Log([]rune(output))
+ t.Log([]rune(expected))
+ }
+}
+
+func expectProcessMainResults(t *testing.T, input string, args []string, exitCode int, expectedOutput string, expectedError string) {
+ inputReader := strings.NewReader(input)
+ outputBuffer := new(bytes.Buffer)
+ errorBuffer := new(bytes.Buffer)
+
+ returnCode := processMain(args, inputReader, outputBuffer, errorBuffer)
+
+ expectBufferEquality(t, "output", outputBuffer, expectedOutput)
+ expectBufferEquality(t, "error", errorBuffer, expectedError)
+
+ if returnCode != exitCode {
+ t.Error("incorrect return code:", returnCode, "expected", exitCode)
+ }
+}
+
+func TestProcessMainReadFromStdin(t *testing.T) {
+ input := `
+ [mytoml]
+ a = 42`
+ expectedOutput := `{
+ "mytoml": {
+ "a": 42
+ }
+}
+`
+ expectedError := ``
+ expectedExitCode := 0
+
+ expectProcessMainResults(t, input, []string{}, expectedExitCode, expectedOutput, expectedError)
+}
+
+func TestProcessMainReadFromFile(t *testing.T) {
+ input := `
+ [mytoml]
+ a = 42`
+
+ tmpfile, err := ioutil.TempFile("", "example.toml")
+ if err != nil {
+ t.Fatal(err)
+ }
+ if _, err := tmpfile.Write([]byte(input)); err != nil {
+ t.Fatal(err)
+ }
+
+ defer os.Remove(tmpfile.Name())
+
+ expectedOutput := `{
+ "mytoml": {
+ "a": 42
+ }
+}
+`
+ expectedError := ``
+ expectedExitCode := 0
+
+ expectProcessMainResults(t, ``, []string{tmpfile.Name()}, expectedExitCode, expectedOutput, expectedError)
+}
+
+func TestProcessMainReadFromMissingFile(t *testing.T) {
+ expectedError := `open /this/file/does/not/exist: no such file or directory
+`
+ expectProcessMainResults(t, ``, []string{"/this/file/does/not/exist"}, -1, ``, expectedError)
+}
diff --git a/vendor/github.com/pelletier/go-toml/cmd/tomll/main.go b/vendor/github.com/pelletier/go-toml/cmd/tomll/main.go
new file mode 100644
index 000000000..f185c56b9
--- /dev/null
+++ b/vendor/github.com/pelletier/go-toml/cmd/tomll/main.go
@@ -0,0 +1,61 @@
+package main
+
+import (
+ "flag"
+ "fmt"
+ "io"
+ "io/ioutil"
+ "os"
+
+ "github.com/pelletier/go-toml"
+)
+
+func main() {
+ flag.Usage = func() {
+ fmt.Fprintln(os.Stderr, `tomll can be used in two ways:
+Writing to STDIN and reading from STDOUT:
+ cat file.toml | tomll > file.toml
+
+Reading and updating a list of files:
+ tomll a.toml b.toml c.toml
+
+When given a list of files, tomll will modify all files in place without asking.
+`)
+ }
+ flag.Parse()
+ // read from stdin and print to stdout
+ if flag.NArg() == 0 {
+ s, err := lintReader(os.Stdin)
+ if err != nil {
+ io.WriteString(os.Stderr, err.Error())
+ os.Exit(-1)
+ }
+ io.WriteString(os.Stdout, s)
+ } else {
+ // otherwise modify a list of files
+ for _, filename := range flag.Args() {
+ s, err := lintFile(filename)
+ if err != nil {
+ io.WriteString(os.Stderr, err.Error())
+ os.Exit(-1)
+ }
+ ioutil.WriteFile(filename, []byte(s), 0644)
+ }
+ }
+}
+
+func lintFile(filename string) (string, error) {
+ tree, err := toml.LoadFile(filename)
+ if err != nil {
+ return "", err
+ }
+ return tree.String(), nil
+}
+
+func lintReader(r io.Reader) (string, error) {
+ tree, err := toml.LoadReader(r)
+ if err != nil {
+ return "", err
+ }
+ return tree.String(), nil
+}
diff --git a/vendor/github.com/pelletier/go-toml/doc.go b/vendor/github.com/pelletier/go-toml/doc.go
new file mode 100644
index 000000000..9156b736f
--- /dev/null
+++ b/vendor/github.com/pelletier/go-toml/doc.go
@@ -0,0 +1,250 @@
+// Package toml is a TOML markup language parser.
+//
+// This version supports the specification as described in
+// https://github.com/toml-lang/toml/blob/master/versions/en/toml-v0.4.0.md
+//
+// TOML Parsing
+//
+// TOML data may be parsed in two ways: by file, or by string.
+//
+// // load TOML data by filename
+// tree, err := toml.LoadFile("filename.toml")
+//
+// // load TOML data stored in a string
+// tree, err := toml.Load(stringContainingTomlData)
+//
+// Either way, the result is a TomlTree object that can be used to navigate the
+// structure and data within the original document.
+//
+//
+// Getting data from the TomlTree
+//
+// After parsing TOML data with Load() or LoadFile(), use the Has() and Get()
+// methods on the returned TomlTree, to find your way through the document data.
+//
+// if tree.Has("foo") {
+// fmt.Println("foo is:", tree.Get("foo"))
+// }
+//
+// Working with Paths
+//
+// Go-toml has support for basic dot-separated key paths on the Has(), Get(), Set()
+// and GetDefault() methods. These are the same kind of key paths used within the
+// TOML specification for struct tames.
+//
+// // looks for a key named 'baz', within struct 'bar', within struct 'foo'
+// tree.Has("foo.bar.baz")
+//
+// // returns the key at this path, if it is there
+// tree.Get("foo.bar.baz")
+//
+// TOML allows keys to contain '.', which can cause this syntax to be problematic
+// for some documents. In such cases, use the GetPath(), HasPath(), and SetPath(),
+// methods to explicitly define the path. This form is also faster, since
+// it avoids having to parse the passed key for '.' delimiters.
+//
+// // looks for a key named 'baz', within struct 'bar', within struct 'foo'
+// tree.HasPath([]string{"foo","bar","baz"})
+//
+// // returns the key at this path, if it is there
+// tree.GetPath([]string{"foo","bar","baz"})
+//
+// Note that this is distinct from the heavyweight query syntax supported by
+// TomlTree.Query() and the Query() struct (see below).
+//
+// Position Support
+//
+// Each element within the TomlTree is stored with position metadata, which is
+// invaluable for providing semantic feedback to a user. This helps in
+// situations where the TOML file parses correctly, but contains data that is
+// not correct for the application. In such cases, an error message can be
+// generated that indicates the problem line and column number in the source
+// TOML document.
+//
+// // load TOML data
+// tree, _ := toml.Load("filename.toml")
+//
+// // get an entry and report an error if it's the wrong type
+// element := tree.Get("foo")
+// if value, ok := element.(int64); !ok {
+// return fmt.Errorf("%v: Element 'foo' must be an integer", tree.GetPosition("foo"))
+// }
+//
+// // report an error if an expected element is missing
+// if !tree.Has("bar") {
+// return fmt.Errorf("%v: Expected 'bar' element", tree.GetPosition(""))
+// }
+//
+// Query Support
+//
+// The TOML query path implementation is based loosely on the JSONPath specification:
+// http://goessner.net/articles/JsonPath/
+//
+// The idea behind a query path is to allow quick access to any element, or set
+// of elements within TOML document, with a single expression.
+//
+// result, err := tree.Query("$.foo.bar.baz")
+//
+// This is roughly equivalent to:
+//
+// next := tree.Get("foo")
+// if next != nil {
+// next = next.Get("bar")
+// if next != nil {
+// next = next.Get("baz")
+// }
+// }
+// result := next
+//
+// err is nil if any parsing exception occurs.
+//
+// If no node in the tree matches the query, result will simply contain an empty list of
+// items.
+//
+// As illustrated above, the query path is much more efficient, especially since
+// the structure of the TOML file can vary. Rather than making assumptions about
+// a document's structure, a query allows the programmer to make structured
+// requests into the document, and get zero or more values as a result.
+//
+// The syntax of a query begins with a root token, followed by any number
+// sub-expressions:
+//
+// $
+// Root of the TOML tree. This must always come first.
+// .name
+// Selects child of this node, where 'name' is a TOML key
+// name.
+// ['name']
+// Selects child of this node, where 'name' is a string
+// containing a TOML key name.
+// [index]
+// Selcts child array element at 'index'.
+// ..expr
+// Recursively selects all children, filtered by an a union,
+// index, or slice expression.
+// ..*
+// Recursive selection of all nodes at this point in the
+// tree.
+// .*
+// Selects all children of the current node.
+// [expr,expr]
+// Union operator - a logical 'or' grouping of two or more
+// sub-expressions: index, key name, or filter.
+// [start:end:step]
+// Slice operator - selects array elements from start to
+// end-1, at the given step. All three arguments are
+// optional.
+// [?(filter)]
+// Named filter expression - the function 'filter' is
+// used to filter children at this node.
+//
+// Query Indexes And Slices
+//
+// Index expressions perform no bounds checking, and will contribute no
+// values to the result set if the provided index or index range is invalid.
+// Negative indexes represent values from the end of the array, counting backwards.
+//
+// // select the last index of the array named 'foo'
+// tree.Query("$.foo[-1]")
+//
+// Slice expressions are supported, by using ':' to separate a start/end index pair.
+//
+// // select up to the first five elements in the array
+// tree.Query("$.foo[0:5]")
+//
+// Slice expressions also allow negative indexes for the start and stop
+// arguments.
+//
+// // select all array elements.
+// tree.Query("$.foo[0:-1]")
+//
+// Slice expressions may have an optional stride/step parameter:
+//
+// // select every other element
+// tree.Query("$.foo[0:-1:2]")
+//
+// Slice start and end parameters are also optional:
+//
+// // these are all equivalent and select all the values in the array
+// tree.Query("$.foo[:]")
+// tree.Query("$.foo[0:]")
+// tree.Query("$.foo[:-1]")
+// tree.Query("$.foo[0:-1:]")
+// tree.Query("$.foo[::1]")
+// tree.Query("$.foo[0::1]")
+// tree.Query("$.foo[:-1:1]")
+// tree.Query("$.foo[0:-1:1]")
+//
+// Query Filters
+//
+// Query filters are used within a Union [,] or single Filter [] expression.
+// A filter only allows nodes that qualify through to the next expression,
+// and/or into the result set.
+//
+// // returns children of foo that are permitted by the 'bar' filter.
+// tree.Query("$.foo[?(bar)]")
+//
+// There are several filters provided with the library:
+//
+// tree
+// Allows nodes of type TomlTree.
+// int
+// Allows nodes of type int64.
+// float
+// Allows nodes of type float64.
+// string
+// Allows nodes of type string.
+// time
+// Allows nodes of type time.Time.
+// bool
+// Allows nodes of type bool.
+//
+// Query Results
+//
+// An executed query returns a QueryResult object. This contains the nodes
+// in the TOML tree that qualify the query expression. Position information
+// is also available for each value in the set.
+//
+// // display the results of a query
+// results := tree.Query("$.foo.bar.baz")
+// for idx, value := results.Values() {
+// fmt.Println("%v: %v", results.Positions()[idx], value)
+// }
+//
+// Compiled Queries
+//
+// Queries may be executed directly on a TomlTree object, or compiled ahead
+// of time and executed discretely. The former is more convienent, but has the
+// penalty of having to recompile the query expression each time.
+//
+// // basic query
+// results := tree.Query("$.foo.bar.baz")
+//
+// // compiled query
+// query := toml.CompileQuery("$.foo.bar.baz")
+// results := query.Execute(tree)
+//
+// // run the compiled query again on a different tree
+// moreResults := query.Execute(anotherTree)
+//
+// User Defined Query Filters
+//
+// Filter expressions may also be user defined by using the SetFilter()
+// function on the Query object. The function must return true/false, which
+// signifies if the passed node is kept or discarded, respectively.
+//
+// // create a query that references a user-defined filter
+// query, _ := CompileQuery("$[?(bazOnly)]")
+//
+// // define the filter, and assign it to the query
+// query.SetFilter("bazOnly", func(node interface{}) bool{
+// if tree, ok := node.(*TomlTree); ok {
+// return tree.Has("baz")
+// }
+// return false // reject all other node types
+// })
+//
+// // run the query
+// query.Execute(tree)
+//
+package toml
diff --git a/vendor/github.com/pelletier/go-toml/doc_test.go b/vendor/github.com/pelletier/go-toml/doc_test.go
new file mode 100644
index 000000000..69452415a
--- /dev/null
+++ b/vendor/github.com/pelletier/go-toml/doc_test.go
@@ -0,0 +1,81 @@
+// code examples for godoc
+
+package toml
+
+import (
+ "fmt"
+)
+
+func ExampleNodeFilterFn_filterExample() {
+ tree, _ := Load(`
+ [struct_one]
+ foo = "foo"
+ bar = "bar"
+
+ [struct_two]
+ baz = "baz"
+ gorf = "gorf"
+ `)
+
+ // create a query that references a user-defined-filter
+ query, _ := CompileQuery("$[?(bazOnly)]")
+
+ // define the filter, and assign it to the query
+ query.SetFilter("bazOnly", func(node interface{}) bool {
+ if tree, ok := node.(*TomlTree); ok {
+ return tree.Has("baz")
+ }
+ return false // reject all other node types
+ })
+
+ // results contain only the 'struct_two' TomlTree
+ query.Execute(tree)
+}
+
+func ExampleQuery_queryExample() {
+ config, _ := Load(`
+ [[book]]
+ title = "The Stand"
+ author = "Stephen King"
+ [[book]]
+ title = "For Whom the Bell Tolls"
+ author = "Ernest Hemmingway"
+ [[book]]
+ title = "Neuromancer"
+ author = "William Gibson"
+ `)
+
+ // find and print all the authors in the document
+ authors, _ := config.Query("$.book.author")
+ for _, name := range authors.Values() {
+ fmt.Println(name)
+ }
+}
+
+func Example_comprehensiveExample() {
+ config, err := LoadFile("config.toml")
+
+ if err != nil {
+ fmt.Println("Error ", err.Error())
+ } else {
+ // retrieve data directly
+ user := config.Get("postgres.user").(string)
+ password := config.Get("postgres.password").(string)
+
+ // or using an intermediate object
+ configTree := config.Get("postgres").(*TomlTree)
+ user = configTree.Get("user").(string)
+ password = configTree.Get("password").(string)
+ fmt.Println("User is ", user, ". Password is ", password)
+
+ // show where elements are in the file
+ fmt.Printf("User position: %v\n", configTree.GetPosition("user"))
+ fmt.Printf("Password position: %v\n", configTree.GetPosition("password"))
+
+ // use a query to gather elements without walking the tree
+ results, _ := config.Query("$..[user,password]")
+ for ii, item := range results.Values() {
+ fmt.Printf("Query result %d: %v\n", ii, item)
+ }
+ }
+}
diff --git a/vendor/github.com/pelletier/go-toml/example-crlf.toml b/vendor/github.com/pelletier/go-toml/example-crlf.toml
new file mode 100644
index 000000000..12950a163
--- /dev/null
+++ b/vendor/github.com/pelletier/go-toml/example-crlf.toml
@@ -0,0 +1,29 @@
+# This is a TOML document. Boom.
+
+title = "TOML Example"
+
+[owner]
+name = "Tom Preston-Werner"
+organization = "GitHub"
+bio = "GitHub Cofounder & CEO\nLikes tater tots and beer."
+dob = 1979-05-27T07:32:00Z # First class dates? Why not?
+
+[database]
+server = "192.168.1.1"
+ports = [ 8001, 8001, 8002 ]
+connection_max = 5000
+enabled = true
+
+[servers]
+
+ # You can indent as you please. Tabs or spaces. TOML don't care.
+ [servers.alpha]
+ ip = "10.0.0.1"
+ dc = "eqdc10"
+
+ [servers.beta]
+ ip = "10.0.0.2"
+ dc = "eqdc10"
+
+[clients]
+data = [ ["gamma", "delta"], [1, 2] ] # just an update to make sure parsers support it
diff --git a/vendor/github.com/pelletier/go-toml/example.toml b/vendor/github.com/pelletier/go-toml/example.toml
new file mode 100644
index 000000000..3d902f282
--- /dev/null
+++ b/vendor/github.com/pelletier/go-toml/example.toml
@@ -0,0 +1,29 @@
+# This is a TOML document. Boom.
+
+title = "TOML Example"
+
+[owner]
+name = "Tom Preston-Werner"
+organization = "GitHub"
+bio = "GitHub Cofounder & CEO\nLikes tater tots and beer."
+dob = 1979-05-27T07:32:00Z # First class dates? Why not?
+
+[database]
+server = "192.168.1.1"
+ports = [ 8001, 8001, 8002 ]
+connection_max = 5000
+enabled = true
+
+[servers]
+
+ # You can indent as you please. Tabs or spaces. TOML don't care.
+ [servers.alpha]
+ ip = "10.0.0.1"
+ dc = "eqdc10"
+
+ [servers.beta]
+ ip = "10.0.0.2"
+ dc = "eqdc10"
+
+[clients]
+data = [ ["gamma", "delta"], [1, 2] ] # just an update to make sure parsers support it
diff --git a/vendor/github.com/pelletier/go-toml/keysparsing.go b/vendor/github.com/pelletier/go-toml/keysparsing.go
new file mode 100644
index 000000000..d62ca5fd1
--- /dev/null
+++ b/vendor/github.com/pelletier/go-toml/keysparsing.go
@@ -0,0 +1,94 @@
+// Parsing keys handling both bare and quoted keys.
+
+package toml
+
+import (
+ "bytes"
+ "errors"
+ "fmt"
+ "unicode"
+)
+
+func parseKey(key string) ([]string, error) {
+ groups := []string{}
+ var buffer bytes.Buffer
+ inQuotes := false
+ wasInQuotes := false
+ escapeNext := false
+ ignoreSpace := true
+ expectDot := false
+
+ for _, char := range key {
+ if ignoreSpace {
+ if char == ' ' {
+ continue
+ }
+ ignoreSpace = false
+ }
+ if escapeNext {
+ buffer.WriteRune(char)
+ escapeNext = false
+ continue
+ }
+ switch char {
+ case '\\':
+ escapeNext = true
+ continue
+ case '"':
+ if inQuotes {
+ groups = append(groups, buffer.String())
+ buffer.Reset()
+ wasInQuotes = true
+ }
+ inQuotes = !inQuotes
+ expectDot = false
+ case '.':
+ if inQuotes {
+ buffer.WriteRune(char)
+ } else {
+ if !wasInQuotes {
+ if buffer.Len() == 0 {
+ return nil, errors.New("empty table key")
+ }
+ groups = append(groups, buffer.String())
+ buffer.Reset()
+ }
+ ignoreSpace = true
+ expectDot = false
+ wasInQuotes = false
+ }
+ case ' ':
+ if inQuotes {
+ buffer.WriteRune(char)
+ } else {
+ expectDot = true
+ }
+ default:
+ if !inQuotes && !isValidBareChar(char) {
+ return nil, fmt.Errorf("invalid bare character: %c", char)
+ }
+ if !inQuotes && expectDot {
+ return nil, errors.New("what?")
+ }
+ buffer.WriteRune(char)
+ expectDot = false
+ }
+ }
+ if inQuotes {
+ return nil, errors.New("mismatched quotes")
+ }
+ if escapeNext {
+ return nil, errors.New("unfinished escape sequence")
+ }
+ if buffer.Len() > 0 {
+ groups = append(groups, buffer.String())
+ }
+ if len(groups) == 0 {
+ return nil, errors.New("empty key")
+ }
+ return groups, nil
+}
+
+func isValidBareChar(r rune) bool {
+ return isAlphanumeric(r) || r == '-' || unicode.IsNumber(r)
+}
diff --git a/vendor/github.com/pelletier/go-toml/keysparsing_test.go b/vendor/github.com/pelletier/go-toml/keysparsing_test.go
new file mode 100644
index 000000000..1a9ecccaa
--- /dev/null
+++ b/vendor/github.com/pelletier/go-toml/keysparsing_test.go
@@ -0,0 +1,56 @@
+package toml
+
+import (
+ "fmt"
+ "testing"
+)
+
+func testResult(t *testing.T, key string, expected []string) {
+ parsed, err := parseKey(key)
+ t.Logf("key=%s expected=%s parsed=%s", key, expected, parsed)
+ if err != nil {
+ t.Fatal("Unexpected error:", err)
+ }
+ if len(expected) != len(parsed) {
+ t.Fatal("Expected length", len(expected), "but", len(parsed), "parsed")
+ }
+ for index, expectedKey := range expected {
+ if expectedKey != parsed[index] {
+ t.Fatal("Expected", expectedKey, "at index", index, "but found", parsed[index])
+ }
+ }
+}
+
+func testError(t *testing.T, key string, expectedError string) {
+ _, err := parseKey(key)
+ if fmt.Sprintf("%s", err) != expectedError {
+ t.Fatalf("Expected error \"%s\", but got \"%s\".", expectedError, err)
+ }
+}
+
+func TestBareKeyBasic(t *testing.T) {
+ testResult(t, "test", []string{"test"})
+}
+
+func TestBareKeyDotted(t *testing.T) {
+ testResult(t, "this.is.a.key", []string{"this", "is", "a", "key"})
+}
+
+func TestDottedKeyBasic(t *testing.T) {
+ testResult(t, "\"a.dotted.key\"", []string{"a.dotted.key"})
+}
+
+func TestBaseKeyPound(t *testing.T) {
+ testError(t, "hello#world", "invalid bare character: #")
+}
+
+func TestQuotedKeys(t *testing.T) {
+ testResult(t, `hello."foo".bar`, []string{"hello", "foo", "bar"})
+ testResult(t, `"hello!"`, []string{"hello!"})
+}
+
+func TestEmptyKey(t *testing.T) {
+ testError(t, "", "empty key")
+ testError(t, " ", "empty key")
+ testResult(t, `""`, []string{""})
+}
diff --git a/vendor/github.com/pelletier/go-toml/lexer.go b/vendor/github.com/pelletier/go-toml/lexer.go
new file mode 100644
index 000000000..104f3b1f4
--- /dev/null
+++ b/vendor/github.com/pelletier/go-toml/lexer.go
@@ -0,0 +1,657 @@
+// TOML lexer.
+//
+// Written using the principles developed by Rob Pike in
+// http://www.youtube.com/watch?v=HxaD_trXwRE
+
+package toml
+
+import (
+ "errors"
+ "fmt"
+ "io"
+ "regexp"
+ "strconv"
+ "strings"
+
+ "github.com/pelletier/go-buffruneio"
+)
+
+var dateRegexp *regexp.Regexp
+
+// Define state functions
+type tomlLexStateFn func() tomlLexStateFn
+
+// Define lexer
+type tomlLexer struct {
+ input *buffruneio.Reader // Textual source
+ buffer []rune // Runes composing the current token
+ tokens chan token
+ depth int
+ line int
+ col int
+ endbufferLine int
+ endbufferCol int
+}
+
+// Basic read operations on input
+
+func (l *tomlLexer) read() rune {
+ r, _, err := l.input.ReadRune()
+ if err != nil {
+ panic(err)
+ }
+ if r == '\n' {
+ l.endbufferLine++
+ l.endbufferCol = 1
+ } else {
+ l.endbufferCol++
+ }
+ return r
+}
+
+func (l *tomlLexer) next() rune {
+ r := l.read()
+
+ if r != eof {
+ l.buffer = append(l.buffer, r)
+ }
+ return r
+}
+
+func (l *tomlLexer) ignore() {
+ l.buffer = make([]rune, 0)
+ l.line = l.endbufferLine
+ l.col = l.endbufferCol
+}
+
+func (l *tomlLexer) skip() {
+ l.next()
+ l.ignore()
+}
+
+func (l *tomlLexer) fastForward(n int) {
+ for i := 0; i < n; i++ {
+ l.next()
+ }
+}
+
+func (l *tomlLexer) emitWithValue(t tokenType, value string) {
+ l.tokens <- token{
+ Position: Position{l.line, l.col},
+ typ: t,
+ val: value,
+ }
+ l.ignore()
+}
+
+func (l *tomlLexer) emit(t tokenType) {
+ l.emitWithValue(t, string(l.buffer))
+}
+
+func (l *tomlLexer) peek() rune {
+ r, _, err := l.input.ReadRune()
+ if err != nil {
+ panic(err)
+ }
+ l.input.UnreadRune()
+ return r
+}
+
+func (l *tomlLexer) follow(next string) bool {
+ for _, expectedRune := range next {
+ r, _, err := l.input.ReadRune()
+ defer l.input.UnreadRune()
+ if err != nil {
+ panic(err)
+ }
+ if expectedRune != r {
+ return false
+ }
+ }
+ return true
+}
+
+// Error management
+
+func (l *tomlLexer) errorf(format string, args ...interface{}) tomlLexStateFn {
+ l.tokens <- token{
+ Position: Position{l.line, l.col},
+ typ: tokenError,
+ val: fmt.Sprintf(format, args...),
+ }
+ return nil
+}
+
+// State functions
+
+func (l *tomlLexer) lexVoid() tomlLexStateFn {
+ for {
+ next := l.peek()
+ switch next {
+ case '[':
+ return l.lexTableKey
+ case '#':
+ return l.lexComment(l.lexVoid)
+ case '=':
+ return l.lexEqual
+ case '\r':
+ fallthrough
+ case '\n':
+ l.skip()
+ continue
+ }
+
+ if isSpace(next) {
+ l.skip()
+ }
+
+ if l.depth > 0 {
+ return l.lexRvalue
+ }
+
+ if isKeyStartChar(next) {
+ return l.lexKey
+ }
+
+ if next == eof {
+ l.next()
+ break
+ }
+ }
+
+ l.emit(tokenEOF)
+ return nil
+}
+
+func (l *tomlLexer) lexRvalue() tomlLexStateFn {
+ for {
+ next := l.peek()
+ switch next {
+ case '.':
+ return l.errorf("cannot start float with a dot")
+ case '=':
+ return l.lexEqual
+ case '[':
+ l.depth++
+ return l.lexLeftBracket
+ case ']':
+ l.depth--
+ return l.lexRightBracket
+ case '{':
+ return l.lexLeftCurlyBrace
+ case '}':
+ return l.lexRightCurlyBrace
+ case '#':
+ return l.lexComment(l.lexRvalue)
+ case '"':
+ return l.lexString
+ case '\'':
+ return l.lexLiteralString
+ case ',':
+ return l.lexComma
+ case '\r':
+ fallthrough
+ case '\n':
+ l.skip()
+ if l.depth == 0 {
+ return l.lexVoid
+ }
+ return l.lexRvalue
+ case '_':
+ return l.errorf("cannot start number with underscore")
+ }
+
+ if l.follow("true") {
+ return l.lexTrue
+ }
+
+ if l.follow("false") {
+ return l.lexFalse
+ }
+
+ if isSpace(next) {
+ l.skip()
+ continue
+ }
+
+ if next == eof {
+ l.next()
+ break
+ }
+
+ possibleDate := string(l.input.PeekRunes(35))
+ dateMatch := dateRegexp.FindString(possibleDate)
+ if dateMatch != "" {
+ l.fastForward(len(dateMatch))
+ return l.lexDate
+ }
+
+ if next == '+' || next == '-' || isDigit(next) {
+ return l.lexNumber
+ }
+
+ if isAlphanumeric(next) {
+ return l.lexKey
+ }
+
+ return l.errorf("no value can start with %c", next)
+ }
+
+ l.emit(tokenEOF)
+ return nil
+}
+
+func (l *tomlLexer) lexLeftCurlyBrace() tomlLexStateFn {
+ l.next()
+ l.emit(tokenLeftCurlyBrace)
+ return l.lexRvalue
+}
+
+func (l *tomlLexer) lexRightCurlyBrace() tomlLexStateFn {
+ l.next()
+ l.emit(tokenRightCurlyBrace)
+ return l.lexRvalue
+}
+
+func (l *tomlLexer) lexDate() tomlLexStateFn {
+ l.emit(tokenDate)
+ return l.lexRvalue
+}
+
+func (l *tomlLexer) lexTrue() tomlLexStateFn {
+ l.fastForward(4)
+ l.emit(tokenTrue)
+ return l.lexRvalue
+}
+
+func (l *tomlLexer) lexFalse() tomlLexStateFn {
+ l.fastForward(5)
+ l.emit(tokenFalse)
+ return l.lexRvalue
+}
+
+func (l *tomlLexer) lexEqual() tomlLexStateFn {
+ l.next()
+ l.emit(tokenEqual)
+ return l.lexRvalue
+}
+
+func (l *tomlLexer) lexComma() tomlLexStateFn {
+ l.next()
+ l.emit(tokenComma)
+ return l.lexRvalue
+}
+
+func (l *tomlLexer) lexKey() tomlLexStateFn {
+ growingString := ""
+
+ for r := l.peek(); isKeyChar(r) || r == '\n' || r == '\r'; r = l.peek() {
+ if r == '"' {
+ l.next()
+ str, err := l.lexStringAsString(`"`, false, true)
+ if err != nil {
+ return l.errorf(err.Error())
+ }
+ growingString += `"` + str + `"`
+ l.next()
+ continue
+ } else if r == '\n' {
+ return l.errorf("keys cannot contain new lines")
+ } else if isSpace(r) {
+ break
+ } else if !isValidBareChar(r) {
+ return l.errorf("keys cannot contain %c character", r)
+ }
+ growingString += string(r)
+ l.next()
+ }
+ l.emitWithValue(tokenKey, growingString)
+ return l.lexVoid
+}
+
+func (l *tomlLexer) lexComment(previousState tomlLexStateFn) tomlLexStateFn {
+ return func() tomlLexStateFn {
+ for next := l.peek(); next != '\n' && next != eof; next = l.peek() {
+ if next == '\r' && l.follow("\r\n") {
+ break
+ }
+ l.next()
+ }
+ l.ignore()
+ return previousState
+ }
+}
+
+func (l *tomlLexer) lexLeftBracket() tomlLexStateFn {
+ l.next()
+ l.emit(tokenLeftBracket)
+ return l.lexRvalue
+}
+
+func (l *tomlLexer) lexLiteralStringAsString(terminator string, discardLeadingNewLine bool) (string, error) {
+ growingString := ""
+
+ if discardLeadingNewLine {
+ if l.follow("\r\n") {
+ l.skip()
+ l.skip()
+ } else if l.peek() == '\n' {
+ l.skip()
+ }
+ }
+
+ // find end of string
+ for {
+ if l.follow(terminator) {
+ return growingString, nil
+ }
+
+ next := l.peek()
+ if next == eof {
+ break
+ }
+ growingString += string(l.next())
+ }
+
+ return "", errors.New("unclosed string")
+}
+
+func (l *tomlLexer) lexLiteralString() tomlLexStateFn {
+ l.skip()
+
+ // handle special case for triple-quote
+ terminator := "'"
+ discardLeadingNewLine := false
+ if l.follow("''") {
+ l.skip()
+ l.skip()
+ terminator = "'''"
+ discardLeadingNewLine = true
+ }
+
+ str, err := l.lexLiteralStringAsString(terminator, discardLeadingNewLine)
+ if err != nil {
+ return l.errorf(err.Error())
+ }
+
+ l.emitWithValue(tokenString, str)
+ l.fastForward(len(terminator))
+ l.ignore()
+ return l.lexRvalue
+}
+
+// Lex a string and return the results as a string.
+// Terminator is the substring indicating the end of the token.
+// The resulting string does not include the terminator.
+func (l *tomlLexer) lexStringAsString(terminator string, discardLeadingNewLine, acceptNewLines bool) (string, error) {
+ growingString := ""
+
+ if discardLeadingNewLine {
+ if l.follow("\r\n") {
+ l.skip()
+ l.skip()
+ } else if l.peek() == '\n' {
+ l.skip()
+ }
+ }
+
+ for {
+ if l.follow(terminator) {
+ return growingString, nil
+ }
+
+ if l.follow("\\") {
+ l.next()
+ switch l.peek() {
+ case '\r':
+ fallthrough
+ case '\n':
+ fallthrough
+ case '\t':
+ fallthrough
+ case ' ':
+ // skip all whitespace chars following backslash
+ for strings.ContainsRune("\r\n\t ", l.peek()) {
+ l.next()
+ }
+ case '"':
+ growingString += "\""
+ l.next()
+ case 'n':
+ growingString += "\n"
+ l.next()
+ case 'b':
+ growingString += "\b"
+ l.next()
+ case 'f':
+ growingString += "\f"
+ l.next()
+ case '/':
+ growingString += "/"
+ l.next()
+ case 't':
+ growingString += "\t"
+ l.next()
+ case 'r':
+ growingString += "\r"
+ l.next()
+ case '\\':
+ growingString += "\\"
+ l.next()
+ case 'u':
+ l.next()
+ code := ""
+ for i := 0; i < 4; i++ {
+ c := l.peek()
+ if !isHexDigit(c) {
+ return "", errors.New("unfinished unicode escape")
+ }
+ l.next()
+ code = code + string(c)
+ }
+ intcode, err := strconv.ParseInt(code, 16, 32)
+ if err != nil {
+ return "", errors.New("invalid unicode escape: \\u" + code)
+ }
+ growingString += string(rune(intcode))
+ case 'U':
+ l.next()
+ code := ""
+ for i := 0; i < 8; i++ {
+ c := l.peek()
+ if !isHexDigit(c) {
+ return "", errors.New("unfinished unicode escape")
+ }
+ l.next()
+ code = code + string(c)
+ }
+ intcode, err := strconv.ParseInt(code, 16, 64)
+ if err != nil {
+ return "", errors.New("invalid unicode escape: \\U" + code)
+ }
+ growingString += string(rune(intcode))
+ default:
+ return "", errors.New("invalid escape sequence: \\" + string(l.peek()))
+ }
+ } else {
+ r := l.peek()
+
+ if 0x00 <= r && r <= 0x1F && !(acceptNewLines && (r == '\n' || r == '\r')) {
+ return "", fmt.Errorf("unescaped control character %U", r)
+ }
+ l.next()
+ growingString += string(r)
+ }
+
+ if l.peek() == eof {
+ break
+ }
+ }
+
+ return "", errors.New("unclosed string")
+}
+
+func (l *tomlLexer) lexString() tomlLexStateFn {
+ l.skip()
+
+ // handle special case for triple-quote
+ terminator := `"`
+ discardLeadingNewLine := false
+ acceptNewLines := false
+ if l.follow(`""`) {
+ l.skip()
+ l.skip()
+ terminator = `"""`
+ discardLeadingNewLine = true
+ acceptNewLines = true
+ }
+
+ str, err := l.lexStringAsString(terminator, discardLeadingNewLine, acceptNewLines)
+
+ if err != nil {
+ return l.errorf(err.Error())
+ }
+
+ l.emitWithValue(tokenString, str)
+ l.fastForward(len(terminator))
+ l.ignore()
+ return l.lexRvalue
+}
+
+func (l *tomlLexer) lexTableKey() tomlLexStateFn {
+ l.next()
+
+ if l.peek() == '[' {
+ // token '[[' signifies an array of tables
+ l.next()
+ l.emit(tokenDoubleLeftBracket)
+ return l.lexInsideTableArrayKey
+ }
+ // vanilla table key
+ l.emit(tokenLeftBracket)
+ return l.lexInsideTableKey
+}
+
+func (l *tomlLexer) lexInsideTableArrayKey() tomlLexStateFn {
+ for r := l.peek(); r != eof; r = l.peek() {
+ switch r {
+ case ']':
+ if len(l.buffer) > 0 {
+ l.emit(tokenKeyGroupArray)
+ }
+ l.next()
+ if l.peek() != ']' {
+ break
+ }
+ l.next()
+ l.emit(tokenDoubleRightBracket)
+ return l.lexVoid
+ case '[':
+ return l.errorf("table array key cannot contain ']'")
+ default:
+ l.next()
+ }
+ }
+ return l.errorf("unclosed table array key")
+}
+
+func (l *tomlLexer) lexInsideTableKey() tomlLexStateFn {
+ for r := l.peek(); r != eof; r = l.peek() {
+ switch r {
+ case ']':
+ if len(l.buffer) > 0 {
+ l.emit(tokenKeyGroup)
+ }
+ l.next()
+ l.emit(tokenRightBracket)
+ return l.lexVoid
+ case '[':
+ return l.errorf("table key cannot contain ']'")
+ default:
+ l.next()
+ }
+ }
+ return l.errorf("unclosed table key")
+}
+
+func (l *tomlLexer) lexRightBracket() tomlLexStateFn {
+ l.next()
+ l.emit(tokenRightBracket)
+ return l.lexRvalue
+}
+
+func (l *tomlLexer) lexNumber() tomlLexStateFn {
+ r := l.peek()
+ if r == '+' || r == '-' {
+ l.next()
+ }
+ pointSeen := false
+ expSeen := false
+ digitSeen := false
+ for {
+ next := l.peek()
+ if next == '.' {
+ if pointSeen {
+ return l.errorf("cannot have two dots in one float")
+ }
+ l.next()
+ if !isDigit(l.peek()) {
+ return l.errorf("float cannot end with a dot")
+ }
+ pointSeen = true
+ } else if next == 'e' || next == 'E' {
+ expSeen = true
+ l.next()
+ r := l.peek()
+ if r == '+' || r == '-' {
+ l.next()
+ }
+ } else if isDigit(next) {
+ digitSeen = true
+ l.next()
+ } else if next == '_' {
+ l.next()
+ } else {
+ break
+ }
+ if pointSeen && !digitSeen {
+ return l.errorf("cannot start float with a dot")
+ }
+ }
+
+ if !digitSeen {
+ return l.errorf("no digit in that number")
+ }
+ if pointSeen || expSeen {
+ l.emit(tokenFloat)
+ } else {
+ l.emit(tokenInteger)
+ }
+ return l.lexRvalue
+}
+
+func (l *tomlLexer) run() {
+ for state := l.lexVoid; state != nil; {
+ state = state()
+ }
+ close(l.tokens)
+}
+
+func init() {
+ dateRegexp = regexp.MustCompile(`^\d{1,4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}(\.\d{1,9})?(Z|[+-]\d{2}:\d{2})`)
+}
+
+// Entry point
+func lexToml(input io.Reader) chan token {
+ bufferedInput := buffruneio.NewReader(input)
+ l := &tomlLexer{
+ input: bufferedInput,
+ tokens: make(chan token),
+ line: 1,
+ col: 1,
+ endbufferLine: 1,
+ endbufferCol: 1,
+ }
+ go l.run()
+ return l.tokens
+}
diff --git a/vendor/github.com/pelletier/go-toml/lexer_test.go b/vendor/github.com/pelletier/go-toml/lexer_test.go
new file mode 100644
index 000000000..6b324ea0e
--- /dev/null
+++ b/vendor/github.com/pelletier/go-toml/lexer_test.go
@@ -0,0 +1,750 @@
+package toml
+
+import (
+ "strings"
+ "testing"
+)
+
+func testFlow(t *testing.T, input string, expectedFlow []token) {
+ ch := lexToml(strings.NewReader(input))
+ for _, expected := range expectedFlow {
+ token := <-ch
+ if token != expected {
+ t.Log("While testing: ", input)
+ t.Log("compared (got)", token, "to (expected)", expected)
+ t.Log("\tvalue:", token.val, "<->", expected.val)
+ t.Log("\tvalue as bytes:", []byte(token.val), "<->", []byte(expected.val))
+ t.Log("\ttype:", token.typ.String(), "<->", expected.typ.String())
+ t.Log("\tline:", token.Line, "<->", expected.Line)
+ t.Log("\tcolumn:", token.Col, "<->", expected.Col)
+ t.Log("compared", token, "to", expected)
+ t.FailNow()
+ }
+ }
+
+ tok, ok := <-ch
+ if ok {
+ t.Log("channel is not closed!")
+ t.Log(len(ch)+1, "tokens remaining:")
+
+ t.Log("token ->", tok)
+ for token := range ch {
+ t.Log("token ->", token)
+ }
+ t.FailNow()
+ }
+}
+
+func TestValidKeyGroup(t *testing.T) {
+ testFlow(t, "[hello world]", []token{
+ {Position{1, 1}, tokenLeftBracket, "["},
+ {Position{1, 2}, tokenKeyGroup, "hello world"},
+ {Position{1, 13}, tokenRightBracket, "]"},
+ {Position{1, 14}, tokenEOF, ""},
+ })
+}
+
+func TestNestedQuotedUnicodeKeyGroup(t *testing.T) {
+ testFlow(t, `[ j . "ʞ" . l ]`, []token{
+ {Position{1, 1}, tokenLeftBracket, "["},
+ {Position{1, 2}, tokenKeyGroup, ` j . "ʞ" . l `},
+ {Position{1, 15}, tokenRightBracket, "]"},
+ {Position{1, 16}, tokenEOF, ""},
+ })
+}
+
+func TestUnclosedKeyGroup(t *testing.T) {
+ testFlow(t, "[hello world", []token{
+ {Position{1, 1}, tokenLeftBracket, "["},
+ {Position{1, 2}, tokenError, "unclosed table key"},
+ })
+}
+
+func TestComment(t *testing.T) {
+ testFlow(t, "# blahblah", []token{
+ {Position{1, 11}, tokenEOF, ""},
+ })
+}
+
+func TestKeyGroupComment(t *testing.T) {
+ testFlow(t, "[hello world] # blahblah", []token{
+ {Position{1, 1}, tokenLeftBracket, "["},
+ {Position{1, 2}, tokenKeyGroup, "hello world"},
+ {Position{1, 13}, tokenRightBracket, "]"},
+ {Position{1, 25}, tokenEOF, ""},
+ })
+}
+
+func TestMultipleKeyGroupsComment(t *testing.T) {
+ testFlow(t, "[hello world] # blahblah\n[test]", []token{
+ {Position{1, 1}, tokenLeftBracket, "["},
+ {Position{1, 2}, tokenKeyGroup, "hello world"},
+ {Position{1, 13}, tokenRightBracket, "]"},
+ {Position{2, 1}, tokenLeftBracket, "["},
+ {Position{2, 2}, tokenKeyGroup, "test"},
+ {Position{2, 6}, tokenRightBracket, "]"},
+ {Position{2, 7}, tokenEOF, ""},
+ })
+}
+
+func TestSimpleWindowsCRLF(t *testing.T) {
+ testFlow(t, "a=4\r\nb=2", []token{
+ {Position{1, 1}, tokenKey, "a"},
+ {Position{1, 2}, tokenEqual, "="},
+ {Position{1, 3}, tokenInteger, "4"},
+ {Position{2, 1}, tokenKey, "b"},
+ {Position{2, 2}, tokenEqual, "="},
+ {Position{2, 3}, tokenInteger, "2"},
+ {Position{2, 4}, tokenEOF, ""},
+ })
+}
+
+func TestBasicKey(t *testing.T) {
+ testFlow(t, "hello", []token{
+ {Position{1, 1}, tokenKey, "hello"},
+ {Position{1, 6}, tokenEOF, ""},
+ })
+}
+
+func TestBasicKeyWithUnderscore(t *testing.T) {
+ testFlow(t, "hello_hello", []token{
+ {Position{1, 1}, tokenKey, "hello_hello"},
+ {Position{1, 12}, tokenEOF, ""},
+ })
+}
+
+func TestBasicKeyWithDash(t *testing.T) {
+ testFlow(t, "hello-world", []token{
+ {Position{1, 1}, tokenKey, "hello-world"},
+ {Position{1, 12}, tokenEOF, ""},
+ })
+}
+
+func TestBasicKeyWithUppercaseMix(t *testing.T) {
+ testFlow(t, "helloHELLOHello", []token{
+ {Position{1, 1}, tokenKey, "helloHELLOHello"},
+ {Position{1, 16}, tokenEOF, ""},
+ })
+}
+
+func TestBasicKeyWithInternationalCharacters(t *testing.T) {
+ testFlow(t, "héllÖ", []token{
+ {Position{1, 1}, tokenKey, "héllÖ"},
+ {Position{1, 6}, tokenEOF, ""},
+ })
+}
+
+func TestBasicKeyAndEqual(t *testing.T) {
+ testFlow(t, "hello =", []token{
+ {Position{1, 1}, tokenKey, "hello"},
+ {Position{1, 7}, tokenEqual, "="},
+ {Position{1, 8}, tokenEOF, ""},
+ })
+}
+
+func TestKeyWithSharpAndEqual(t *testing.T) {
+ testFlow(t, "key#name = 5", []token{
+ {Position{1, 1}, tokenError, "keys cannot contain # character"},
+ })
+}
+
+func TestKeyWithSymbolsAndEqual(t *testing.T) {
+ testFlow(t, "~!@$^&*()_+-`1234567890[]\\|/?><.,;:' = 5", []token{
+ {Position{1, 1}, tokenError, "keys cannot contain ~ character"},
+ })
+}
+
+func TestKeyEqualStringEscape(t *testing.T) {
+ testFlow(t, `foo = "hello\""`, []token{
+ {Position{1, 1}, tokenKey, "foo"},
+ {Position{1, 5}, tokenEqual, "="},
+ {Position{1, 8}, tokenString, "hello\""},
+ {Position{1, 16}, tokenEOF, ""},
+ })
+}
+
+func TestKeyEqualStringUnfinished(t *testing.T) {
+ testFlow(t, `foo = "bar`, []token{
+ {Position{1, 1}, tokenKey, "foo"},
+ {Position{1, 5}, tokenEqual, "="},
+ {Position{1, 8}, tokenError, "unclosed string"},
+ })
+}
+
+func TestKeyEqualString(t *testing.T) {
+ testFlow(t, `foo = "bar"`, []token{
+ {Position{1, 1}, tokenKey, "foo"},
+ {Position{1, 5}, tokenEqual, "="},
+ {Position{1, 8}, tokenString, "bar"},
+ {Position{1, 12}, tokenEOF, ""},
+ })
+}
+
+func TestKeyEqualTrue(t *testing.T) {
+ testFlow(t, "foo = true", []token{
+ {Position{1, 1}, tokenKey, "foo"},
+ {Position{1, 5}, tokenEqual, "="},
+ {Position{1, 7}, tokenTrue, "true"},
+ {Position{1, 11}, tokenEOF, ""},
+ })
+}
+
+func TestKeyEqualFalse(t *testing.T) {
+ testFlow(t, "foo = false", []token{
+ {Position{1, 1}, tokenKey, "foo"},
+ {Position{1, 5}, tokenEqual, "="},
+ {Position{1, 7}, tokenFalse, "false"},
+ {Position{1, 12}, tokenEOF, ""},
+ })
+}
+
+func TestArrayNestedString(t *testing.T) {
+ testFlow(t, `a = [ ["hello", "world"] ]`, []token{
+ {Position{1, 1}, tokenKey, "a"},
+ {Position{1, 3}, tokenEqual, "="},
+ {Position{1, 5}, tokenLeftBracket, "["},
+ {Position{1, 7}, tokenLeftBracket, "["},
+ {Position{1, 9}, tokenString, "hello"},
+ {Position{1, 15}, tokenComma, ","},
+ {Position{1, 18}, tokenString, "world"},
+ {Position{1, 24}, tokenRightBracket, "]"},
+ {Position{1, 26}, tokenRightBracket, "]"},
+ {Position{1, 27}, tokenEOF, ""},
+ })
+}
+
+func TestArrayNestedInts(t *testing.T) {
+ testFlow(t, "a = [ [42, 21], [10] ]", []token{
+ {Position{1, 1}, tokenKey, "a"},
+ {Position{1, 3}, tokenEqual, "="},
+ {Position{1, 5}, tokenLeftBracket, "["},
+ {Position{1, 7}, tokenLeftBracket, "["},
+ {Position{1, 8}, tokenInteger, "42"},
+ {Position{1, 10}, tokenComma, ","},
+ {Position{1, 12}, tokenInteger, "21"},
+ {Position{1, 14}, tokenRightBracket, "]"},
+ {Position{1, 15}, tokenComma, ","},
+ {Position{1, 17}, tokenLeftBracket, "["},
+ {Position{1, 18}, tokenInteger, "10"},
+ {Position{1, 20}, tokenRightBracket, "]"},
+ {Position{1, 22}, tokenRightBracket, "]"},
+ {Position{1, 23}, tokenEOF, ""},
+ })
+}
+
+func TestArrayInts(t *testing.T) {
+ testFlow(t, "a = [ 42, 21, 10, ]", []token{
+ {Position{1, 1}, tokenKey, "a"},
+ {Position{1, 3}, tokenEqual, "="},
+ {Position{1, 5}, tokenLeftBracket, "["},
+ {Position{1, 7}, tokenInteger, "42"},
+ {Position{1, 9}, tokenComma, ","},
+ {Position{1, 11}, tokenInteger, "21"},
+ {Position{1, 13}, tokenComma, ","},
+ {Position{1, 15}, tokenInteger, "10"},
+ {Position{1, 17}, tokenComma, ","},
+ {Position{1, 19}, tokenRightBracket, "]"},
+ {Position{1, 20}, tokenEOF, ""},
+ })
+}
+
+func TestMultilineArrayComments(t *testing.T) {
+ testFlow(t, "a = [1, # wow\n2, # such items\n3, # so array\n]", []token{
+ {Position{1, 1}, tokenKey, "a"},
+ {Position{1, 3}, tokenEqual, "="},
+ {Position{1, 5}, tokenLeftBracket, "["},
+ {Position{1, 6}, tokenInteger, "1"},
+ {Position{1, 7}, tokenComma, ","},
+ {Position{2, 1}, tokenInteger, "2"},
+ {Position{2, 2}, tokenComma, ","},
+ {Position{3, 1}, tokenInteger, "3"},
+ {Position{3, 2}, tokenComma, ","},
+ {Position{4, 1}, tokenRightBracket, "]"},
+ {Position{4, 2}, tokenEOF, ""},
+ })
+}
+
+func TestNestedArraysComment(t *testing.T) {
+ toml := `
+someArray = [
+# does not work
+["entry1"]
+]`
+ testFlow(t, toml, []token{
+ {Position{2, 1}, tokenKey, "someArray"},
+ {Position{2, 11}, tokenEqual, "="},
+ {Position{2, 13}, tokenLeftBracket, "["},
+ {Position{4, 1}, tokenLeftBracket, "["},
+ {Position{4, 3}, tokenString, "entry1"},
+ {Position{4, 10}, tokenRightBracket, "]"},
+ {Position{5, 1}, tokenRightBracket, "]"},
+ {Position{5, 2}, tokenEOF, ""},
+ })
+}
+
+func TestKeyEqualArrayBools(t *testing.T) {
+ testFlow(t, "foo = [true, false, true]", []token{
+ {Position{1, 1}, tokenKey, "foo"},
+ {Position{1, 5}, tokenEqual, "="},
+ {Position{1, 7}, tokenLeftBracket, "["},
+ {Position{1, 8}, tokenTrue, "true"},
+ {Position{1, 12}, tokenComma, ","},
+ {Position{1, 14}, tokenFalse, "false"},
+ {Position{1, 19}, tokenComma, ","},
+ {Position{1, 21}, tokenTrue, "true"},
+ {Position{1, 25}, tokenRightBracket, "]"},
+ {Position{1, 26}, tokenEOF, ""},
+ })
+}
+
+func TestKeyEqualArrayBoolsWithComments(t *testing.T) {
+ testFlow(t, "foo = [true, false, true] # YEAH", []token{
+ {Position{1, 1}, tokenKey, "foo"},
+ {Position{1, 5}, tokenEqual, "="},
+ {Position{1, 7}, tokenLeftBracket, "["},
+ {Position{1, 8}, tokenTrue, "true"},
+ {Position{1, 12}, tokenComma, ","},
+ {Position{1, 14}, tokenFalse, "false"},
+ {Position{1, 19}, tokenComma, ","},
+ {Position{1, 21}, tokenTrue, "true"},
+ {Position{1, 25}, tokenRightBracket, "]"},
+ {Position{1, 33}, tokenEOF, ""},
+ })
+}
+
+func TestDateRegexp(t *testing.T) {
+ if dateRegexp.FindString("1979-05-27T07:32:00Z") == "" {
+ t.Error("basic lexing")
+ }
+ if dateRegexp.FindString("1979-05-27T00:32:00-07:00") == "" {
+ t.Error("offset lexing")
+ }
+ if dateRegexp.FindString("1979-05-27T00:32:00.999999-07:00") == "" {
+ t.Error("nano precision lexing")
+ }
+}
+
+func TestKeyEqualDate(t *testing.T) {
+ testFlow(t, "foo = 1979-05-27T07:32:00Z", []token{
+ {Position{1, 1}, tokenKey, "foo"},
+ {Position{1, 5}, tokenEqual, "="},
+ {Position{1, 7}, tokenDate, "1979-05-27T07:32:00Z"},
+ {Position{1, 27}, tokenEOF, ""},
+ })
+ testFlow(t, "foo = 1979-05-27T00:32:00-07:00", []token{
+ {Position{1, 1}, tokenKey, "foo"},
+ {Position{1, 5}, tokenEqual, "="},
+ {Position{1, 7}, tokenDate, "1979-05-27T00:32:00-07:00"},
+ {Position{1, 32}, tokenEOF, ""},
+ })
+ testFlow(t, "foo = 1979-05-27T00:32:00.999999-07:00", []token{
+ {Position{1, 1}, tokenKey, "foo"},
+ {Position{1, 5}, tokenEqual, "="},
+ {Position{1, 7}, tokenDate, "1979-05-27T00:32:00.999999-07:00"},
+ {Position{1, 39}, tokenEOF, ""},
+ })
+}
+
+func TestFloatEndingWithDot(t *testing.T) {
+ testFlow(t, "foo = 42.", []token{
+ {Position{1, 1}, tokenKey, "foo"},
+ {Position{1, 5}, tokenEqual, "="},
+ {Position{1, 7}, tokenError, "float cannot end with a dot"},
+ })
+}
+
+func TestFloatWithTwoDots(t *testing.T) {
+ testFlow(t, "foo = 4.2.", []token{
+ {Position{1, 1}, tokenKey, "foo"},
+ {Position{1, 5}, tokenEqual, "="},
+ {Position{1, 7}, tokenError, "cannot have two dots in one float"},
+ })
+}
+
+func TestFloatWithExponent1(t *testing.T) {
+ testFlow(t, "a = 5e+22", []token{
+ {Position{1, 1}, tokenKey, "a"},
+ {Position{1, 3}, tokenEqual, "="},
+ {Position{1, 5}, tokenFloat, "5e+22"},
+ {Position{1, 10}, tokenEOF, ""},
+ })
+}
+
+func TestFloatWithExponent2(t *testing.T) {
+ testFlow(t, "a = 5E+22", []token{
+ {Position{1, 1}, tokenKey, "a"},
+ {Position{1, 3}, tokenEqual, "="},
+ {Position{1, 5}, tokenFloat, "5E+22"},
+ {Position{1, 10}, tokenEOF, ""},
+ })
+}
+
+func TestFloatWithExponent3(t *testing.T) {
+ testFlow(t, "a = -5e+22", []token{
+ {Position{1, 1}, tokenKey, "a"},
+ {Position{1, 3}, tokenEqual, "="},
+ {Position{1, 5}, tokenFloat, "-5e+22"},
+ {Position{1, 11}, tokenEOF, ""},
+ })
+}
+
+func TestFloatWithExponent4(t *testing.T) {
+ testFlow(t, "a = -5e-22", []token{
+ {Position{1, 1}, tokenKey, "a"},
+ {Position{1, 3}, tokenEqual, "="},
+ {Position{1, 5}, tokenFloat, "-5e-22"},
+ {Position{1, 11}, tokenEOF, ""},
+ })
+}
+
+func TestFloatWithExponent5(t *testing.T) {
+ testFlow(t, "a = 6.626e-34", []token{
+ {Position{1, 1}, tokenKey, "a"},
+ {Position{1, 3}, tokenEqual, "="},
+ {Position{1, 5}, tokenFloat, "6.626e-34"},
+ {Position{1, 14}, tokenEOF, ""},
+ })
+}
+
+func TestInvalidEsquapeSequence(t *testing.T) {
+ testFlow(t, `foo = "\x"`, []token{
+ {Position{1, 1}, tokenKey, "foo"},
+ {Position{1, 5}, tokenEqual, "="},
+ {Position{1, 8}, tokenError, "invalid escape sequence: \\x"},
+ })
+}
+
+func TestNestedArrays(t *testing.T) {
+ testFlow(t, "foo = [[[]]]", []token{
+ {Position{1, 1}, tokenKey, "foo"},
+ {Position{1, 5}, tokenEqual, "="},
+ {Position{1, 7}, tokenLeftBracket, "["},
+ {Position{1, 8}, tokenLeftBracket, "["},
+ {Position{1, 9}, tokenLeftBracket, "["},
+ {Position{1, 10}, tokenRightBracket, "]"},
+ {Position{1, 11}, tokenRightBracket, "]"},
+ {Position{1, 12}, tokenRightBracket, "]"},
+ {Position{1, 13}, tokenEOF, ""},
+ })
+}
+
+func TestKeyEqualNumber(t *testing.T) {
+ testFlow(t, "foo = 42", []token{
+ {Position{1, 1}, tokenKey, "foo"},
+ {Position{1, 5}, tokenEqual, "="},
+ {Position{1, 7}, tokenInteger, "42"},
+ {Position{1, 9}, tokenEOF, ""},
+ })
+
+ testFlow(t, "foo = +42", []token{
+ {Position{1, 1}, tokenKey, "foo"},
+ {Position{1, 5}, tokenEqual, "="},
+ {Position{1, 7}, tokenInteger, "+42"},
+ {Position{1, 10}, tokenEOF, ""},
+ })
+
+ testFlow(t, "foo = -42", []token{
+ {Position{1, 1}, tokenKey, "foo"},
+ {Position{1, 5}, tokenEqual, "="},
+ {Position{1, 7}, tokenInteger, "-42"},
+ {Position{1, 10}, tokenEOF, ""},
+ })
+
+ testFlow(t, "foo = 4.2", []token{
+ {Position{1, 1}, tokenKey, "foo"},
+ {Position{1, 5}, tokenEqual, "="},
+ {Position{1, 7}, tokenFloat, "4.2"},
+ {Position{1, 10}, tokenEOF, ""},
+ })
+
+ testFlow(t, "foo = +4.2", []token{
+ {Position{1, 1}, tokenKey, "foo"},
+ {Position{1, 5}, tokenEqual, "="},
+ {Position{1, 7}, tokenFloat, "+4.2"},
+ {Position{1, 11}, tokenEOF, ""},
+ })
+
+ testFlow(t, "foo = -4.2", []token{
+ {Position{1, 1}, tokenKey, "foo"},
+ {Position{1, 5}, tokenEqual, "="},
+ {Position{1, 7}, tokenFloat, "-4.2"},
+ {Position{1, 11}, tokenEOF, ""},
+ })
+
+ testFlow(t, "foo = 1_000", []token{
+ {Position{1, 1}, tokenKey, "foo"},
+ {Position{1, 5}, tokenEqual, "="},
+ {Position{1, 7}, tokenInteger, "1_000"},
+ {Position{1, 12}, tokenEOF, ""},
+ })
+
+ testFlow(t, "foo = 5_349_221", []token{
+ {Position{1, 1}, tokenKey, "foo"},
+ {Position{1, 5}, tokenEqual, "="},
+ {Position{1, 7}, tokenInteger, "5_349_221"},
+ {Position{1, 16}, tokenEOF, ""},
+ })
+
+ testFlow(t, "foo = 1_2_3_4_5", []token{
+ {Position{1, 1}, tokenKey, "foo"},
+ {Position{1, 5}, tokenEqual, "="},
+ {Position{1, 7}, tokenInteger, "1_2_3_4_5"},
+ {Position{1, 16}, tokenEOF, ""},
+ })
+
+ testFlow(t, "flt8 = 9_224_617.445_991_228_313", []token{
+ {Position{1, 1}, tokenKey, "flt8"},
+ {Position{1, 6}, tokenEqual, "="},
+ {Position{1, 8}, tokenFloat, "9_224_617.445_991_228_313"},
+ {Position{1, 33}, tokenEOF, ""},
+ })
+
+ testFlow(t, "foo = +", []token{
+ {Position{1, 1}, tokenKey, "foo"},
+ {Position{1, 5}, tokenEqual, "="},
+ {Position{1, 7}, tokenError, "no digit in that number"},
+ })
+}
+
+func TestMultiline(t *testing.T) {
+ testFlow(t, "foo = 42\nbar=21", []token{
+ {Position{1, 1}, tokenKey, "foo"},
+ {Position{1, 5}, tokenEqual, "="},
+ {Position{1, 7}, tokenInteger, "42"},
+ {Position{2, 1}, tokenKey, "bar"},
+ {Position{2, 4}, tokenEqual, "="},
+ {Position{2, 5}, tokenInteger, "21"},
+ {Position{2, 7}, tokenEOF, ""},
+ })
+}
+
+func TestKeyEqualStringUnicodeEscape(t *testing.T) {
+ testFlow(t, `foo = "hello \u2665"`, []token{
+ {Position{1, 1}, tokenKey, "foo"},
+ {Position{1, 5}, tokenEqual, "="},
+ {Position{1, 8}, tokenString, "hello ♥"},
+ {Position{1, 21}, tokenEOF, ""},
+ })
+ testFlow(t, `foo = "hello \U000003B4"`, []token{
+ {Position{1, 1}, tokenKey, "foo"},
+ {Position{1, 5}, tokenEqual, "="},
+ {Position{1, 8}, tokenString, "hello δ"},
+ {Position{1, 25}, tokenEOF, ""},
+ })
+ testFlow(t, `foo = "\uabcd"`, []token{
+ {Position{1, 1}, tokenKey, "foo"},
+ {Position{1, 5}, tokenEqual, "="},
+ {Position{1, 8}, tokenString, "\uabcd"},
+ {Position{1, 15}, tokenEOF, ""},
+ })
+ testFlow(t, `foo = "\uABCD"`, []token{
+ {Position{1, 1}, tokenKey, "foo"},
+ {Position{1, 5}, tokenEqual, "="},
+ {Position{1, 8}, tokenString, "\uABCD"},
+ {Position{1, 15}, tokenEOF, ""},
+ })
+ testFlow(t, `foo = "\U000bcdef"`, []token{
+ {Position{1, 1}, tokenKey, "foo"},
+ {Position{1, 5}, tokenEqual, "="},
+ {Position{1, 8}, tokenString, "\U000bcdef"},
+ {Position{1, 19}, tokenEOF, ""},
+ })
+ testFlow(t, `foo = "\U000BCDEF"`, []token{
+ {Position{1, 1}, tokenKey, "foo"},
+ {Position{1, 5}, tokenEqual, "="},
+ {Position{1, 8}, tokenString, "\U000BCDEF"},
+ {Position{1, 19}, tokenEOF, ""},
+ })
+ testFlow(t, `foo = "\u2"`, []token{
+ {Position{1, 1}, tokenKey, "foo"},
+ {Position{1, 5}, tokenEqual, "="},
+ {Position{1, 8}, tokenError, "unfinished unicode escape"},
+ })
+ testFlow(t, `foo = "\U2"`, []token{
+ {Position{1, 1}, tokenKey, "foo"},
+ {Position{1, 5}, tokenEqual, "="},
+ {Position{1, 8}, tokenError, "unfinished unicode escape"},
+ })
+}
+
+func TestKeyEqualStringNoEscape(t *testing.T) {
+ testFlow(t, "foo = \"hello \u0002\"", []token{
+ {Position{1, 1}, tokenKey, "foo"},
+ {Position{1, 5}, tokenEqual, "="},
+ {Position{1, 8}, tokenError, "unescaped control character U+0002"},
+ })
+ testFlow(t, "foo = \"hello \u001F\"", []token{
+ {Position{1, 1}, tokenKey, "foo"},
+ {Position{1, 5}, tokenEqual, "="},
+ {Position{1, 8}, tokenError, "unescaped control character U+001F"},
+ })
+}
+
+func TestLiteralString(t *testing.T) {
+ testFlow(t, `foo = 'C:\Users\nodejs\templates'`, []token{
+ {Position{1, 1}, tokenKey, "foo"},
+ {Position{1, 5}, tokenEqual, "="},
+ {Position{1, 8}, tokenString, `C:\Users\nodejs\templates`},
+ {Position{1, 34}, tokenEOF, ""},
+ })
+ testFlow(t, `foo = '\\ServerX\admin$\system32\'`, []token{
+ {Position{1, 1}, tokenKey, "foo"},
+ {Position{1, 5}, tokenEqual, "="},
+ {Position{1, 8}, tokenString, `\\ServerX\admin$\system32\`},
+ {Position{1, 35}, tokenEOF, ""},
+ })
+ testFlow(t, `foo = 'Tom "Dubs" Preston-Werner'`, []token{
+ {Position{1, 1}, tokenKey, "foo"},
+ {Position{1, 5}, tokenEqual, "="},
+ {Position{1, 8}, tokenString, `Tom "Dubs" Preston-Werner`},
+ {Position{1, 34}, tokenEOF, ""},
+ })
+ testFlow(t, `foo = '<\i\c*\s*>'`, []token{
+ {Position{1, 1}, tokenKey, "foo"},
+ {Position{1, 5}, tokenEqual, "="},
+ {Position{1, 8}, tokenString, `<\i\c*\s*>`},
+ {Position{1, 19}, tokenEOF, ""},
+ })
+ testFlow(t, `foo = 'C:\Users\nodejs\unfinis`, []token{
+ {Position{1, 1}, tokenKey, "foo"},
+ {Position{1, 5}, tokenEqual, "="},
+ {Position{1, 8}, tokenError, "unclosed string"},
+ })
+}
+
+func TestMultilineLiteralString(t *testing.T) {
+ testFlow(t, `foo = '''hello 'literal' world'''`, []token{
+ {Position{1, 1}, tokenKey, "foo"},
+ {Position{1, 5}, tokenEqual, "="},
+ {Position{1, 10}, tokenString, `hello 'literal' world`},
+ {Position{1, 34}, tokenEOF, ""},
+ })
+
+ testFlow(t, "foo = '''\nhello\n'literal'\nworld'''", []token{
+ {Position{1, 1}, tokenKey, "foo"},
+ {Position{1, 5}, tokenEqual, "="},
+ {Position{2, 1}, tokenString, "hello\n'literal'\nworld"},
+ {Position{4, 9}, tokenEOF, ""},
+ })
+ testFlow(t, "foo = '''\r\nhello\r\n'literal'\r\nworld'''", []token{
+ {Position{1, 1}, tokenKey, "foo"},
+ {Position{1, 5}, tokenEqual, "="},
+ {Position{2, 1}, tokenString, "hello\r\n'literal'\r\nworld"},
+ {Position{4, 9}, tokenEOF, ""},
+ })
+}
+
+func TestMultilineString(t *testing.T) {
+ testFlow(t, `foo = """hello "literal" world"""`, []token{
+ {Position{1, 1}, tokenKey, "foo"},
+ {Position{1, 5}, tokenEqual, "="},
+ {Position{1, 10}, tokenString, `hello "literal" world`},
+ {Position{1, 34}, tokenEOF, ""},
+ })
+
+ testFlow(t, "foo = \"\"\"\r\nhello\\\r\n\"literal\"\\\nworld\"\"\"", []token{
+ {Position{1, 1}, tokenKey, "foo"},
+ {Position{1, 5}, tokenEqual, "="},
+ {Position{2, 1}, tokenString, "hello\"literal\"world"},
+ {Position{4, 9}, tokenEOF, ""},
+ })
+
+ testFlow(t, "foo = \"\"\"\\\n \\\n \\\n hello\\\nmultiline\\\nworld\"\"\"", []token{
+ {Position{1, 1}, tokenKey, "foo"},
+ {Position{1, 5}, tokenEqual, "="},
+ {Position{1, 10}, tokenString, "hellomultilineworld"},
+ {Position{6, 9}, tokenEOF, ""},
+ })
+
+ testFlow(t, "key2 = \"\"\"\nThe quick brown \\\n\n\n fox jumps over \\\n the lazy dog.\"\"\"", []token{
+ {Position{1, 1}, tokenKey, "key2"},
+ {Position{1, 6}, tokenEqual, "="},
+ {Position{2, 1}, tokenString, "The quick brown fox jumps over the lazy dog."},
+ {Position{6, 21}, tokenEOF, ""},
+ })
+
+ testFlow(t, "key2 = \"\"\"\\\n The quick brown \\\n fox jumps over \\\n the lazy dog.\\\n \"\"\"", []token{
+ {Position{1, 1}, tokenKey, "key2"},
+ {Position{1, 6}, tokenEqual, "="},
+ {Position{1, 11}, tokenString, "The quick brown fox jumps over the lazy dog."},
+ {Position{5, 11}, tokenEOF, ""},
+ })
+
+ testFlow(t, `key2 = "Roses are red\nViolets are blue"`, []token{
+ {Position{1, 1}, tokenKey, "key2"},
+ {Position{1, 6}, tokenEqual, "="},
+ {Position{1, 9}, tokenString, "Roses are red\nViolets are blue"},
+ {Position{1, 41}, tokenEOF, ""},
+ })
+
+ testFlow(t, "key2 = \"\"\"\nRoses are red\nViolets are blue\"\"\"", []token{
+ {Position{1, 1}, tokenKey, "key2"},
+ {Position{1, 6}, tokenEqual, "="},
+ {Position{2, 1}, tokenString, "Roses are red\nViolets are blue"},
+ {Position{3, 20}, tokenEOF, ""},
+ })
+}
+
+func TestUnicodeString(t *testing.T) {
+ testFlow(t, `foo = "hello ♥ world"`, []token{
+ {Position{1, 1}, tokenKey, "foo"},
+ {Position{1, 5}, tokenEqual, "="},
+ {Position{1, 8}, tokenString, "hello ♥ world"},
+ {Position{1, 22}, tokenEOF, ""},
+ })
+}
+func TestEscapeInString(t *testing.T) {
+ testFlow(t, `foo = "\b\f\/"`, []token{
+ {Position{1, 1}, tokenKey, "foo"},
+ {Position{1, 5}, tokenEqual, "="},
+ {Position{1, 8}, tokenString, "\b\f/"},
+ {Position{1, 15}, tokenEOF, ""},
+ })
+}
+
+func TestKeyGroupArray(t *testing.T) {
+ testFlow(t, "[[foo]]", []token{
+ {Position{1, 1}, tokenDoubleLeftBracket, "[["},
+ {Position{1, 3}, tokenKeyGroupArray, "foo"},
+ {Position{1, 6}, tokenDoubleRightBracket, "]]"},
+ {Position{1, 8}, tokenEOF, ""},
+ })
+}
+
+func TestQuotedKey(t *testing.T) {
+ testFlow(t, "\"a b\" = 42", []token{
+ {Position{1, 1}, tokenKey, "\"a b\""},
+ {Position{1, 7}, tokenEqual, "="},
+ {Position{1, 9}, tokenInteger, "42"},
+ {Position{1, 11}, tokenEOF, ""},
+ })
+}
+
+func TestKeyNewline(t *testing.T) {
+ testFlow(t, "a\n= 4", []token{
+ {Position{1, 1}, tokenError, "keys cannot contain new lines"},
+ })
+}
+
+func TestInvalidFloat(t *testing.T) {
+ testFlow(t, "a=7e1_", []token{
+ {Position{1, 1}, tokenKey, "a"},
+ {Position{1, 2}, tokenEqual, "="},
+ {Position{1, 3}, tokenFloat, "7e1_"},
+ {Position{1, 7}, tokenEOF, ""},
+ })
+}
+
+func TestLexUnknownRvalue(t *testing.T) {
+ testFlow(t, `a = !b`, []token{
+ {Position{1, 1}, tokenKey, "a"},
+ {Position{1, 3}, tokenEqual, "="},
+ {Position{1, 5}, tokenError, "no value can start with !"},
+ })
+
+ testFlow(t, `a = \b`, []token{
+ {Position{1, 1}, tokenKey, "a"},
+ {Position{1, 3}, tokenEqual, "="},
+ {Position{1, 5}, tokenError, `no value can start with \`},
+ })
+}
diff --git a/vendor/github.com/pelletier/go-toml/match.go b/vendor/github.com/pelletier/go-toml/match.go
new file mode 100644
index 000000000..48b0f2a15
--- /dev/null
+++ b/vendor/github.com/pelletier/go-toml/match.go
@@ -0,0 +1,234 @@
+package toml
+
+import (
+ "fmt"
+)
+
+// support function to set positions for tomlValues
+// NOTE: this is done to allow ctx.lastPosition to indicate the start of any
+// values returned by the query engines
+func tomlValueCheck(node interface{}, ctx *queryContext) interface{} {
+ switch castNode := node.(type) {
+ case *tomlValue:
+ ctx.lastPosition = castNode.position
+ return castNode.value
+ case []*TomlTree:
+ if len(castNode) > 0 {
+ ctx.lastPosition = castNode[0].position
+ }
+ return node
+ default:
+ return node
+ }
+}
+
+// base match
+type matchBase struct {
+ next pathFn
+}
+
+func (f *matchBase) setNext(next pathFn) {
+ f.next = next
+}
+
+// terminating functor - gathers results
+type terminatingFn struct {
+ // empty
+}
+
+func newTerminatingFn() *terminatingFn {
+ return &terminatingFn{}
+}
+
+func (f *terminatingFn) setNext(next pathFn) {
+ // do nothing
+}
+
+func (f *terminatingFn) call(node interface{}, ctx *queryContext) {
+ switch castNode := node.(type) {
+ case *TomlTree:
+ ctx.result.appendResult(node, castNode.position)
+ case *tomlValue:
+ ctx.result.appendResult(node, castNode.position)
+ default:
+ // use last position for scalars
+ ctx.result.appendResult(node, ctx.lastPosition)
+ }
+}
+
+// match single key
+type matchKeyFn struct {
+ matchBase
+ Name string
+}
+
+func newMatchKeyFn(name string) *matchKeyFn {
+ return &matchKeyFn{Name: name}
+}
+
+func (f *matchKeyFn) call(node interface{}, ctx *queryContext) {
+ if array, ok := node.([]*TomlTree); ok {
+ for _, tree := range array {
+ item := tree.values[f.Name]
+ if item != nil {
+ f.next.call(item, ctx)
+ }
+ }
+ } else if tree, ok := node.(*TomlTree); ok {
+ item := tree.values[f.Name]
+ if item != nil {
+ f.next.call(item, ctx)
+ }
+ }
+}
+
+// match single index
+type matchIndexFn struct {
+ matchBase
+ Idx int
+}
+
+func newMatchIndexFn(idx int) *matchIndexFn {
+ return &matchIndexFn{Idx: idx}
+}
+
+func (f *matchIndexFn) call(node interface{}, ctx *queryContext) {
+ if arr, ok := tomlValueCheck(node, ctx).([]interface{}); ok {
+ if f.Idx < len(arr) && f.Idx >= 0 {
+ f.next.call(arr[f.Idx], ctx)
+ }
+ }
+}
+
+// filter by slicing
+type matchSliceFn struct {
+ matchBase
+ Start, End, Step int
+}
+
+func newMatchSliceFn(start, end, step int) *matchSliceFn {
+ return &matchSliceFn{Start: start, End: end, Step: step}
+}
+
+func (f *matchSliceFn) call(node interface{}, ctx *queryContext) {
+ if arr, ok := tomlValueCheck(node, ctx).([]interface{}); ok {
+ // adjust indexes for negative values, reverse ordering
+ realStart, realEnd := f.Start, f.End
+ if realStart < 0 {
+ realStart = len(arr) + realStart
+ }
+ if realEnd < 0 {
+ realEnd = len(arr) + realEnd
+ }
+ if realEnd < realStart {
+ realEnd, realStart = realStart, realEnd // swap
+ }
+ // loop and gather
+ for idx := realStart; idx < realEnd; idx += f.Step {
+ f.next.call(arr[idx], ctx)
+ }
+ }
+}
+
+// match anything
+type matchAnyFn struct {
+ matchBase
+}
+
+func newMatchAnyFn() *matchAnyFn {
+ return &matchAnyFn{}
+}
+
+func (f *matchAnyFn) call(node interface{}, ctx *queryContext) {
+ if tree, ok := node.(*TomlTree); ok {
+ for _, v := range tree.values {
+ f.next.call(v, ctx)
+ }
+ }
+}
+
+// filter through union
+type matchUnionFn struct {
+ Union []pathFn
+}
+
+func (f *matchUnionFn) setNext(next pathFn) {
+ for _, fn := range f.Union {
+ fn.setNext(next)
+ }
+}
+
+func (f *matchUnionFn) call(node interface{}, ctx *queryContext) {
+ for _, fn := range f.Union {
+ fn.call(node, ctx)
+ }
+}
+
+// match every single last node in the tree
+type matchRecursiveFn struct {
+ matchBase
+}
+
+func newMatchRecursiveFn() *matchRecursiveFn {
+ return &matchRecursiveFn{}
+}
+
+func (f *matchRecursiveFn) call(node interface{}, ctx *queryContext) {
+ if tree, ok := node.(*TomlTree); ok {
+ var visit func(tree *TomlTree)
+ visit = func(tree *TomlTree) {
+ for _, v := range tree.values {
+ f.next.call(v, ctx)
+ switch node := v.(type) {
+ case *TomlTree:
+ visit(node)
+ case []*TomlTree:
+ for _, subtree := range node {
+ visit(subtree)
+ }
+ }
+ }
+ }
+ f.next.call(tree, ctx)
+ visit(tree)
+ }
+}
+
+// match based on an externally provided functional filter
+type matchFilterFn struct {
+ matchBase
+ Pos Position
+ Name string
+}
+
+func newMatchFilterFn(name string, pos Position) *matchFilterFn {
+ return &matchFilterFn{Name: name, Pos: pos}
+}
+
+func (f *matchFilterFn) call(node interface{}, ctx *queryContext) {
+ fn, ok := (*ctx.filters)[f.Name]
+ if !ok {
+ panic(fmt.Sprintf("%s: query context does not have filter '%s'",
+ f.Pos.String(), f.Name))
+ }
+ switch castNode := tomlValueCheck(node, ctx).(type) {
+ case *TomlTree:
+ for _, v := range castNode.values {
+ if tv, ok := v.(*tomlValue); ok {
+ if fn(tv.value) {
+ f.next.call(v, ctx)
+ }
+ } else {
+ if fn(v) {
+ f.next.call(v, ctx)
+ }
+ }
+ }
+ case []interface{}:
+ for _, v := range castNode {
+ if fn(v) {
+ f.next.call(v, ctx)
+ }
+ }
+ }
+}
diff --git a/vendor/github.com/pelletier/go-toml/match_test.go b/vendor/github.com/pelletier/go-toml/match_test.go
new file mode 100644
index 000000000..b63654ad2
--- /dev/null
+++ b/vendor/github.com/pelletier/go-toml/match_test.go
@@ -0,0 +1,201 @@
+package toml
+
+import (
+ "fmt"
+ "testing"
+)
+
+// dump path tree to a string
+func pathString(root pathFn) string {
+ result := fmt.Sprintf("%T:", root)
+ switch fn := root.(type) {
+ case *terminatingFn:
+ result += "{}"
+ case *matchKeyFn:
+ result += fmt.Sprintf("{%s}", fn.Name)
+ result += pathString(fn.next)
+ case *matchIndexFn:
+ result += fmt.Sprintf("{%d}", fn.Idx)
+ result += pathString(fn.next)
+ case *matchSliceFn:
+ result += fmt.Sprintf("{%d:%d:%d}",
+ fn.Start, fn.End, fn.Step)
+ result += pathString(fn.next)
+ case *matchAnyFn:
+ result += "{}"
+ result += pathString(fn.next)
+ case *matchUnionFn:
+ result += "{["
+ for _, v := range fn.Union {
+ result += pathString(v) + ", "
+ }
+ result += "]}"
+ case *matchRecursiveFn:
+ result += "{}"
+ result += pathString(fn.next)
+ case *matchFilterFn:
+ result += fmt.Sprintf("{%s}", fn.Name)
+ result += pathString(fn.next)
+ }
+ return result
+}
+
+func assertPathMatch(t *testing.T, path, ref *Query) bool {
+ pathStr := pathString(path.root)
+ refStr := pathString(ref.root)
+ if pathStr != refStr {
+ t.Errorf("paths do not match")
+ t.Log("test:", pathStr)
+ t.Log("ref: ", refStr)
+ return false
+ }
+ return true
+}
+
+func assertPath(t *testing.T, query string, ref *Query) {
+ path, _ := parseQuery(lexQuery(query))
+ assertPathMatch(t, path, ref)
+}
+
+func buildPath(parts ...pathFn) *Query {
+ query := newQuery()
+ for _, v := range parts {
+ query.appendPath(v)
+ }
+ return query
+}
+
+func TestPathRoot(t *testing.T) {
+ assertPath(t,
+ "$",
+ buildPath(
+ // empty
+ ))
+}
+
+func TestPathKey(t *testing.T) {
+ assertPath(t,
+ "$.foo",
+ buildPath(
+ newMatchKeyFn("foo"),
+ ))
+}
+
+func TestPathBracketKey(t *testing.T) {
+ assertPath(t,
+ "$[foo]",
+ buildPath(
+ newMatchKeyFn("foo"),
+ ))
+}
+
+func TestPathBracketStringKey(t *testing.T) {
+ assertPath(t,
+ "$['foo']",
+ buildPath(
+ newMatchKeyFn("foo"),
+ ))
+}
+
+func TestPathIndex(t *testing.T) {
+ assertPath(t,
+ "$[123]",
+ buildPath(
+ newMatchIndexFn(123),
+ ))
+}
+
+func TestPathSliceStart(t *testing.T) {
+ assertPath(t,
+ "$[123:]",
+ buildPath(
+ newMatchSliceFn(123, maxInt, 1),
+ ))
+}
+
+func TestPathSliceStartEnd(t *testing.T) {
+ assertPath(t,
+ "$[123:456]",
+ buildPath(
+ newMatchSliceFn(123, 456, 1),
+ ))
+}
+
+func TestPathSliceStartEndColon(t *testing.T) {
+ assertPath(t,
+ "$[123:456:]",
+ buildPath(
+ newMatchSliceFn(123, 456, 1),
+ ))
+}
+
+func TestPathSliceStartStep(t *testing.T) {
+ assertPath(t,
+ "$[123::7]",
+ buildPath(
+ newMatchSliceFn(123, maxInt, 7),
+ ))
+}
+
+func TestPathSliceEndStep(t *testing.T) {
+ assertPath(t,
+ "$[:456:7]",
+ buildPath(
+ newMatchSliceFn(0, 456, 7),
+ ))
+}
+
+func TestPathSliceStep(t *testing.T) {
+ assertPath(t,
+ "$[::7]",
+ buildPath(
+ newMatchSliceFn(0, maxInt, 7),
+ ))
+}
+
+func TestPathSliceAll(t *testing.T) {
+ assertPath(t,
+ "$[123:456:7]",
+ buildPath(
+ newMatchSliceFn(123, 456, 7),
+ ))
+}
+
+func TestPathAny(t *testing.T) {
+ assertPath(t,
+ "$.*",
+ buildPath(
+ newMatchAnyFn(),
+ ))
+}
+
+func TestPathUnion(t *testing.T) {
+ assertPath(t,
+ "$[foo, bar, baz]",
+ buildPath(
+ &matchUnionFn{[]pathFn{
+ newMatchKeyFn("foo"),
+ newMatchKeyFn("bar"),
+ newMatchKeyFn("baz"),
+ }},
+ ))
+}
+
+func TestPathRecurse(t *testing.T) {
+ assertPath(t,
+ "$..*",
+ buildPath(
+ newMatchRecursiveFn(),
+ ))
+}
+
+func TestPathFilterExpr(t *testing.T) {
+ assertPath(t,
+ "$[?('foo'),?(bar)]",
+ buildPath(
+ &matchUnionFn{[]pathFn{
+ newMatchFilterFn("foo", Position{}),
+ newMatchFilterFn("bar", Position{}),
+ }},
+ ))
+}
diff --git a/vendor/github.com/pelletier/go-toml/parser.go b/vendor/github.com/pelletier/go-toml/parser.go
new file mode 100644
index 000000000..20e90a3ed
--- /dev/null
+++ b/vendor/github.com/pelletier/go-toml/parser.go
@@ -0,0 +1,393 @@
+// TOML Parser.
+
+package toml
+
+import (
+ "errors"
+ "fmt"
+ "reflect"
+ "regexp"
+ "strconv"
+ "strings"
+ "time"
+)
+
+type tomlParser struct {
+ flow chan token
+ tree *TomlTree
+ tokensBuffer []token
+ currentTable []string
+ seenTableKeys []string
+}
+
+type tomlParserStateFn func() tomlParserStateFn
+
+// Formats and panics an error message based on a token
+func (p *tomlParser) raiseError(tok *token, msg string, args ...interface{}) {
+ panic(tok.Position.String() + ": " + fmt.Sprintf(msg, args...))
+}
+
+func (p *tomlParser) run() {
+ for state := p.parseStart; state != nil; {
+ state = state()
+ }
+}
+
+func (p *tomlParser) peek() *token {
+ if len(p.tokensBuffer) != 0 {
+ return &(p.tokensBuffer[0])
+ }
+
+ tok, ok := <-p.flow
+ if !ok {
+ return nil
+ }
+ p.tokensBuffer = append(p.tokensBuffer, tok)
+ return &tok
+}
+
+func (p *tomlParser) assume(typ tokenType) {
+ tok := p.getToken()
+ if tok == nil {
+ p.raiseError(tok, "was expecting token %s, but token stream is empty", tok)
+ }
+ if tok.typ != typ {
+ p.raiseError(tok, "was expecting token %s, but got %s instead", typ, tok)
+ }
+}
+
+func (p *tomlParser) getToken() *token {
+ if len(p.tokensBuffer) != 0 {
+ tok := p.tokensBuffer[0]
+ p.tokensBuffer = p.tokensBuffer[1:]
+ return &tok
+ }
+ tok, ok := <-p.flow
+ if !ok {
+ return nil
+ }
+ return &tok
+}
+
+func (p *tomlParser) parseStart() tomlParserStateFn {
+ tok := p.peek()
+
+ // end of stream, parsing is finished
+ if tok == nil {
+ return nil
+ }
+
+ switch tok.typ {
+ case tokenDoubleLeftBracket:
+ return p.parseGroupArray
+ case tokenLeftBracket:
+ return p.parseGroup
+ case tokenKey:
+ return p.parseAssign
+ case tokenEOF:
+ return nil
+ default:
+ p.raiseError(tok, "unexpected token")
+ }
+ return nil
+}
+
+func (p *tomlParser) parseGroupArray() tomlParserStateFn {
+ startToken := p.getToken() // discard the [[
+ key := p.getToken()
+ if key.typ != tokenKeyGroupArray {
+ p.raiseError(key, "unexpected token %s, was expecting a table array key", key)
+ }
+
+ // get or create table array element at the indicated part in the path
+ keys, err := parseKey(key.val)
+ if err != nil {
+ p.raiseError(key, "invalid table array key: %s", err)
+ }
+ p.tree.createSubTree(keys[:len(keys)-1], startToken.Position) // create parent entries
+ destTree := p.tree.GetPath(keys)
+ var array []*TomlTree
+ if destTree == nil {
+ array = make([]*TomlTree, 0)
+ } else if target, ok := destTree.([]*TomlTree); ok && target != nil {
+ array = destTree.([]*TomlTree)
+ } else {
+ p.raiseError(key, "key %s is already assigned and not of type table array", key)
+ }
+ p.currentTable = keys
+
+ // add a new tree to the end of the table array
+ newTree := newTomlTree()
+ newTree.position = startToken.Position
+ array = append(array, newTree)
+ p.tree.SetPath(p.currentTable, array)
+
+ // remove all keys that were children of this table array
+ prefix := key.val + "."
+ found := false
+ for ii := 0; ii < len(p.seenTableKeys); {
+ tableKey := p.seenTableKeys[ii]
+ if strings.HasPrefix(tableKey, prefix) {
+ p.seenTableKeys = append(p.seenTableKeys[:ii], p.seenTableKeys[ii+1:]...)
+ } else {
+ found = (tableKey == key.val)
+ ii++
+ }
+ }
+
+ // keep this key name from use by other kinds of assignments
+ if !found {
+ p.seenTableKeys = append(p.seenTableKeys, key.val)
+ }
+
+ // move to next parser state
+ p.assume(tokenDoubleRightBracket)
+ return p.parseStart
+}
+
+func (p *tomlParser) parseGroup() tomlParserStateFn {
+ startToken := p.getToken() // discard the [
+ key := p.getToken()
+ if key.typ != tokenKeyGroup {
+ p.raiseError(key, "unexpected token %s, was expecting a table key", key)
+ }
+ for _, item := range p.seenTableKeys {
+ if item == key.val {
+ p.raiseError(key, "duplicated tables")
+ }
+ }
+
+ p.seenTableKeys = append(p.seenTableKeys, key.val)
+ keys, err := parseKey(key.val)
+ if err != nil {
+ p.raiseError(key, "invalid table array key: %s", err)
+ }
+ if err := p.tree.createSubTree(keys, startToken.Position); err != nil {
+ p.raiseError(key, "%s", err)
+ }
+ p.assume(tokenRightBracket)
+ p.currentTable = keys
+ return p.parseStart
+}
+
+func (p *tomlParser) parseAssign() tomlParserStateFn {
+ key := p.getToken()
+ p.assume(tokenEqual)
+
+ value := p.parseRvalue()
+ var tableKey []string
+ if len(p.currentTable) > 0 {
+ tableKey = p.currentTable
+ } else {
+ tableKey = []string{}
+ }
+
+ // find the table to assign, looking out for arrays of tables
+ var targetNode *TomlTree
+ switch node := p.tree.GetPath(tableKey).(type) {
+ case []*TomlTree:
+ targetNode = node[len(node)-1]
+ case *TomlTree:
+ targetNode = node
+ default:
+ p.raiseError(key, "Unknown table type for path: %s",
+ strings.Join(tableKey, "."))
+ }
+
+ // assign value to the found table
+ keyVals, err := parseKey(key.val)
+ if err != nil {
+ p.raiseError(key, "%s", err)
+ }
+ if len(keyVals) != 1 {
+ p.raiseError(key, "Invalid key")
+ }
+ keyVal := keyVals[0]
+ localKey := []string{keyVal}
+ finalKey := append(tableKey, keyVal)
+ if targetNode.GetPath(localKey) != nil {
+ p.raiseError(key, "The following key was defined twice: %s",
+ strings.Join(finalKey, "."))
+ }
+ var toInsert interface{}
+
+ switch value.(type) {
+ case *TomlTree, []*TomlTree:
+ toInsert = value
+ default:
+ toInsert = &tomlValue{value, key.Position}
+ }
+ targetNode.values[keyVal] = toInsert
+ return p.parseStart
+}
+
+var numberUnderscoreInvalidRegexp *regexp.Regexp
+
+func cleanupNumberToken(value string) (string, error) {
+ if numberUnderscoreInvalidRegexp.MatchString(value) {
+ return "", errors.New("invalid use of _ in number")
+ }
+ cleanedVal := strings.Replace(value, "_", "", -1)
+ return cleanedVal, nil
+}
+
+func (p *tomlParser) parseRvalue() interface{} {
+ tok := p.getToken()
+ if tok == nil || tok.typ == tokenEOF {
+ p.raiseError(tok, "expecting a value")
+ }
+
+ switch tok.typ {
+ case tokenString:
+ return tok.val
+ case tokenTrue:
+ return true
+ case tokenFalse:
+ return false
+ case tokenInteger:
+ cleanedVal, err := cleanupNumberToken(tok.val)
+ if err != nil {
+ p.raiseError(tok, "%s", err)
+ }
+ val, err := strconv.ParseInt(cleanedVal, 10, 64)
+ if err != nil {
+ p.raiseError(tok, "%s", err)
+ }
+ return val
+ case tokenFloat:
+ cleanedVal, err := cleanupNumberToken(tok.val)
+ if err != nil {
+ p.raiseError(tok, "%s", err)
+ }
+ val, err := strconv.ParseFloat(cleanedVal, 64)
+ if err != nil {
+ p.raiseError(tok, "%s", err)
+ }
+ return val
+ case tokenDate:
+ val, err := time.ParseInLocation(time.RFC3339Nano, tok.val, time.UTC)
+ if err != nil {
+ p.raiseError(tok, "%s", err)
+ }
+ return val
+ case tokenLeftBracket:
+ return p.parseArray()
+ case tokenLeftCurlyBrace:
+ return p.parseInlineTable()
+ case tokenEqual:
+ p.raiseError(tok, "cannot have multiple equals for the same key")
+ case tokenError:
+ p.raiseError(tok, "%s", tok)
+ }
+
+ p.raiseError(tok, "never reached")
+
+ return nil
+}
+
+func tokenIsComma(t *token) bool {
+ return t != nil && t.typ == tokenComma
+}
+
+func (p *tomlParser) parseInlineTable() *TomlTree {
+ tree := newTomlTree()
+ var previous *token
+Loop:
+ for {
+ follow := p.peek()
+ if follow == nil || follow.typ == tokenEOF {
+ p.raiseError(follow, "unterminated inline table")
+ }
+ switch follow.typ {
+ case tokenRightCurlyBrace:
+ p.getToken()
+ break Loop
+ case tokenKey:
+ if !tokenIsComma(previous) && previous != nil {
+ p.raiseError(follow, "comma expected between fields in inline table")
+ }
+ key := p.getToken()
+ p.assume(tokenEqual)
+ value := p.parseRvalue()
+ tree.Set(key.val, value)
+ case tokenComma:
+ if previous == nil {
+ p.raiseError(follow, "inline table cannot start with a comma")
+ }
+ if tokenIsComma(previous) {
+ p.raiseError(follow, "need field between two commas in inline table")
+ }
+ p.getToken()
+ default:
+ p.raiseError(follow, "unexpected token type in inline table: %s", follow.typ.String())
+ }
+ previous = follow
+ }
+ if tokenIsComma(previous) {
+ p.raiseError(previous, "trailing comma at the end of inline table")
+ }
+ return tree
+}
+
+func (p *tomlParser) parseArray() interface{} {
+ var array []interface{}
+ arrayType := reflect.TypeOf(nil)
+ for {
+ follow := p.peek()
+ if follow == nil || follow.typ == tokenEOF {
+ p.raiseError(follow, "unterminated array")
+ }
+ if follow.typ == tokenRightBracket {
+ p.getToken()
+ break
+ }
+ val := p.parseRvalue()
+ if arrayType == nil {
+ arrayType = reflect.TypeOf(val)
+ }
+ if reflect.TypeOf(val) != arrayType {
+ p.raiseError(follow, "mixed types in array")
+ }
+ array = append(array, val)
+ follow = p.peek()
+ if follow == nil || follow.typ == tokenEOF {
+ p.raiseError(follow, "unterminated array")
+ }
+ if follow.typ != tokenRightBracket && follow.typ != tokenComma {
+ p.raiseError(follow, "missing comma")
+ }
+ if follow.typ == tokenComma {
+ p.getToken()
+ }
+ }
+ // An array of TomlTrees is actually an array of inline
+ // tables, which is a shorthand for a table array. If the
+ // array was not converted from []interface{} to []*TomlTree,
+ // the two notations would not be equivalent.
+ if arrayType == reflect.TypeOf(newTomlTree()) {
+ tomlArray := make([]*TomlTree, len(array))
+ for i, v := range array {
+ tomlArray[i] = v.(*TomlTree)
+ }
+ return tomlArray
+ }
+ return array
+}
+
+func parseToml(flow chan token) *TomlTree {
+ result := newTomlTree()
+ result.position = Position{1, 1}
+ parser := &tomlParser{
+ flow: flow,
+ tree: result,
+ tokensBuffer: make([]token, 0),
+ currentTable: make([]string, 0),
+ seenTableKeys: make([]string, 0),
+ }
+ parser.run()
+ return result
+}
+
+func init() {
+ numberUnderscoreInvalidRegexp = regexp.MustCompile(`([^\d]_|_[^\d]|_$|^_)`)
+}
diff --git a/vendor/github.com/pelletier/go-toml/parser_test.go b/vendor/github.com/pelletier/go-toml/parser_test.go
new file mode 100644
index 000000000..58aae203a
--- /dev/null
+++ b/vendor/github.com/pelletier/go-toml/parser_test.go
@@ -0,0 +1,785 @@
+package toml
+
+import (
+ "fmt"
+ "reflect"
+ "testing"
+ "time"
+
+ "github.com/davecgh/go-spew/spew"
+)
+
+func assertSubTree(t *testing.T, path []string, tree *TomlTree, err error, ref map[string]interface{}) {
+ if err != nil {
+ t.Error("Non-nil error:", err.Error())
+ return
+ }
+ for k, v := range ref {
+ nextPath := append(path, k)
+ t.Log("asserting path", nextPath)
+ // NOTE: directly access key instead of resolve by path
+ // NOTE: see TestSpecialKV
+ switch node := tree.GetPath([]string{k}).(type) {
+ case []*TomlTree:
+ t.Log("\tcomparing key", nextPath, "by array iteration")
+ for idx, item := range node {
+ assertSubTree(t, nextPath, item, err, v.([]map[string]interface{})[idx])
+ }
+ case *TomlTree:
+ t.Log("\tcomparing key", nextPath, "by subtree assestion")
+ assertSubTree(t, nextPath, node, err, v.(map[string]interface{}))
+ default:
+ t.Log("\tcomparing key", nextPath, "by string representation because it's of type", reflect.TypeOf(node))
+ if fmt.Sprintf("%v", node) != fmt.Sprintf("%v", v) {
+ t.Errorf("was expecting %v at %v but got %v", v, k, node)
+ }
+ }
+ }
+}
+
+func assertTree(t *testing.T, tree *TomlTree, err error, ref map[string]interface{}) {
+ t.Log("Asserting tree:\n", spew.Sdump(tree))
+ assertSubTree(t, []string{}, tree, err, ref)
+ t.Log("Finished tree assertion.")
+}
+
+func TestCreateSubTree(t *testing.T) {
+ tree := newTomlTree()
+ tree.createSubTree([]string{"a", "b", "c"}, Position{})
+ tree.Set("a.b.c", 42)
+ if tree.Get("a.b.c") != 42 {
+ t.Fail()
+ }
+}
+
+func TestSimpleKV(t *testing.T) {
+ tree, err := Load("a = 42")
+ assertTree(t, tree, err, map[string]interface{}{
+ "a": int64(42),
+ })
+
+ tree, _ = Load("a = 42\nb = 21")
+ assertTree(t, tree, err, map[string]interface{}{
+ "a": int64(42),
+ "b": int64(21),
+ })
+}
+
+func TestNumberInKey(t *testing.T) {
+ tree, err := Load("hello2 = 42")
+ assertTree(t, tree, err, map[string]interface{}{
+ "hello2": int64(42),
+ })
+}
+
+func TestSimpleNumbers(t *testing.T) {
+ tree, err := Load("a = +42\nb = -21\nc = +4.2\nd = -2.1")
+ assertTree(t, tree, err, map[string]interface{}{
+ "a": int64(42),
+ "b": int64(-21),
+ "c": float64(4.2),
+ "d": float64(-2.1),
+ })
+}
+
+func TestNumbersWithUnderscores(t *testing.T) {
+ tree, err := Load("a = 1_000")
+ assertTree(t, tree, err, map[string]interface{}{
+ "a": int64(1000),
+ })
+
+ tree, err = Load("a = 5_349_221")
+ assertTree(t, tree, err, map[string]interface{}{
+ "a": int64(5349221),
+ })
+
+ tree, err = Load("a = 1_2_3_4_5")
+ assertTree(t, tree, err, map[string]interface{}{
+ "a": int64(12345),
+ })
+
+ tree, err = Load("flt8 = 9_224_617.445_991_228_313")
+ assertTree(t, tree, err, map[string]interface{}{
+ "flt8": float64(9224617.445991228313),
+ })
+
+ tree, err = Load("flt9 = 1e1_00")
+ assertTree(t, tree, err, map[string]interface{}{
+ "flt9": float64(1e100),
+ })
+}
+
+func TestFloatsWithExponents(t *testing.T) {
+ tree, err := Load("a = 5e+22\nb = 5E+22\nc = -5e+22\nd = -5e-22\ne = 6.626e-34")
+ assertTree(t, tree, err, map[string]interface{}{
+ "a": float64(5e+22),
+ "b": float64(5E+22),
+ "c": float64(-5e+22),
+ "d": float64(-5e-22),
+ "e": float64(6.626e-34),
+ })
+}
+
+func TestSimpleDate(t *testing.T) {
+ tree, err := Load("a = 1979-05-27T07:32:00Z")
+ assertTree(t, tree, err, map[string]interface{}{
+ "a": time.Date(1979, time.May, 27, 7, 32, 0, 0, time.UTC),
+ })
+}
+
+func TestDateOffset(t *testing.T) {
+ tree, err := Load("a = 1979-05-27T00:32:00-07:00")
+ assertTree(t, tree, err, map[string]interface{}{
+ "a": time.Date(1979, time.May, 27, 0, 32, 0, 0, time.FixedZone("", -7*60*60)),
+ })
+}
+
+func TestDateNano(t *testing.T) {
+ tree, err := Load("a = 1979-05-27T00:32:00.999999999-07:00")
+ assertTree(t, tree, err, map[string]interface{}{
+ "a": time.Date(1979, time.May, 27, 0, 32, 0, 999999999, time.FixedZone("", -7*60*60)),
+ })
+}
+
+func TestSimpleString(t *testing.T) {
+ tree, err := Load("a = \"hello world\"")
+ assertTree(t, tree, err, map[string]interface{}{
+ "a": "hello world",
+ })
+}
+
+func TestSpaceKey(t *testing.T) {
+ tree, err := Load("\"a b\" = \"hello world\"")
+ assertTree(t, tree, err, map[string]interface{}{
+ "a b": "hello world",
+ })
+}
+
+func TestStringEscapables(t *testing.T) {
+ tree, err := Load("a = \"a \\n b\"")
+ assertTree(t, tree, err, map[string]interface{}{
+ "a": "a \n b",
+ })
+
+ tree, err = Load("a = \"a \\t b\"")
+ assertTree(t, tree, err, map[string]interface{}{
+ "a": "a \t b",
+ })
+
+ tree, err = Load("a = \"a \\r b\"")
+ assertTree(t, tree, err, map[string]interface{}{
+ "a": "a \r b",
+ })
+
+ tree, err = Load("a = \"a \\\\ b\"")
+ assertTree(t, tree, err, map[string]interface{}{
+ "a": "a \\ b",
+ })
+}
+
+func TestEmptyQuotedString(t *testing.T) {
+ tree, err := Load(`[""]
+"" = 1`)
+ assertTree(t, tree, err, map[string]interface{}{
+ "": map[string]interface{}{
+ "": int64(1),
+ },
+ })
+}
+
+func TestBools(t *testing.T) {
+ tree, err := Load("a = true\nb = false")
+ assertTree(t, tree, err, map[string]interface{}{
+ "a": true,
+ "b": false,
+ })
+}
+
+func TestNestedKeys(t *testing.T) {
+ tree, err := Load("[a.b.c]\nd = 42")
+ assertTree(t, tree, err, map[string]interface{}{
+ "a": map[string]interface{}{
+ "b": map[string]interface{}{
+ "c": map[string]interface{}{
+ "d": int64(42),
+ },
+ },
+ },
+ })
+}
+
+func TestNestedQuotedUnicodeKeys(t *testing.T) {
+ tree, err := Load("[ j . \"ʞ\" . l ]\nd = 42")
+ assertTree(t, tree, err, map[string]interface{}{
+ "j": map[string]interface{}{
+ "ʞ": map[string]interface{}{
+ "l": map[string]interface{}{
+ "d": int64(42),
+ },
+ },
+ },
+ })
+
+ tree, err = Load("[ g . h . i ]\nd = 42")
+ assertTree(t, tree, err, map[string]interface{}{
+ "g": map[string]interface{}{
+ "h": map[string]interface{}{
+ "i": map[string]interface{}{
+ "d": int64(42),
+ },
+ },
+ },
+ })
+
+ tree, err = Load("[ d.e.f ]\nk = 42")
+ assertTree(t, tree, err, map[string]interface{}{
+ "d": map[string]interface{}{
+ "e": map[string]interface{}{
+ "f": map[string]interface{}{
+ "k": int64(42),
+ },
+ },
+ },
+ })
+}
+
+func TestArrayOne(t *testing.T) {
+ tree, err := Load("a = [1]")
+ assertTree(t, tree, err, map[string]interface{}{
+ "a": []int64{int64(1)},
+ })
+}
+
+func TestArrayZero(t *testing.T) {
+ tree, err := Load("a = []")
+ assertTree(t, tree, err, map[string]interface{}{
+ "a": []interface{}{},
+ })
+}
+
+func TestArraySimple(t *testing.T) {
+ tree, err := Load("a = [42, 21, 10]")
+ assertTree(t, tree, err, map[string]interface{}{
+ "a": []int64{int64(42), int64(21), int64(10)},
+ })
+
+ tree, _ = Load("a = [42, 21, 10,]")
+ assertTree(t, tree, err, map[string]interface{}{
+ "a": []int64{int64(42), int64(21), int64(10)},
+ })
+}
+
+func TestArrayMultiline(t *testing.T) {
+ tree, err := Load("a = [42,\n21, 10,]")
+ assertTree(t, tree, err, map[string]interface{}{
+ "a": []int64{int64(42), int64(21), int64(10)},
+ })
+}
+
+func TestArrayNested(t *testing.T) {
+ tree, err := Load("a = [[42, 21], [10]]")
+ assertTree(t, tree, err, map[string]interface{}{
+ "a": [][]int64{{int64(42), int64(21)}, {int64(10)}},
+ })
+}
+
+func TestNestedArrayComment(t *testing.T) {
+ tree, err := Load(`
+someArray = [
+# does not work
+["entry1"]
+]`)
+ assertTree(t, tree, err, map[string]interface{}{
+ "someArray": [][]string{{"entry1"}},
+ })
+}
+
+func TestNestedEmptyArrays(t *testing.T) {
+ tree, err := Load("a = [[[]]]")
+ assertTree(t, tree, err, map[string]interface{}{
+ "a": [][][]interface{}{{{}}},
+ })
+}
+
+func TestArrayMixedTypes(t *testing.T) {
+ _, err := Load("a = [42, 16.0]")
+ if err.Error() != "(1, 10): mixed types in array" {
+ t.Error("Bad error message:", err.Error())
+ }
+
+ _, err = Load("a = [42, \"hello\"]")
+ if err.Error() != "(1, 11): mixed types in array" {
+ t.Error("Bad error message:", err.Error())
+ }
+}
+
+func TestArrayNestedStrings(t *testing.T) {
+ tree, err := Load("data = [ [\"gamma\", \"delta\"], [\"Foo\"] ]")
+ assertTree(t, tree, err, map[string]interface{}{
+ "data": [][]string{{"gamma", "delta"}, {"Foo"}},
+ })
+}
+
+func TestParseUnknownRvalue(t *testing.T) {
+ _, err := Load("a = !bssss")
+ if err == nil {
+ t.Error("Expecting a parse error")
+ }
+
+ _, err = Load("a = /b")
+ if err == nil {
+ t.Error("Expecting a parse error")
+ }
+}
+
+func TestMissingValue(t *testing.T) {
+ _, err := Load("a = ")
+ if err.Error() != "(1, 5): expecting a value" {
+ t.Error("Bad error message:", err.Error())
+ }
+}
+
+func TestUnterminatedArray(t *testing.T) {
+ _, err := Load("a = [1,")
+ if err.Error() != "(1, 8): unterminated array" {
+ t.Error("Bad error message:", err.Error())
+ }
+
+ _, err = Load("a = [1")
+ if err.Error() != "(1, 7): unterminated array" {
+ t.Error("Bad error message:", err.Error())
+ }
+
+ _, err = Load("a = [1 2")
+ if err.Error() != "(1, 8): missing comma" {
+ t.Error("Bad error message:", err.Error())
+ }
+}
+
+func TestNewlinesInArrays(t *testing.T) {
+ tree, err := Load("a = [1,\n2,\n3]")
+ assertTree(t, tree, err, map[string]interface{}{
+ "a": []int64{int64(1), int64(2), int64(3)},
+ })
+}
+
+func TestArrayWithExtraComma(t *testing.T) {
+ tree, err := Load("a = [1,\n2,\n3,\n]")
+ assertTree(t, tree, err, map[string]interface{}{
+ "a": []int64{int64(1), int64(2), int64(3)},
+ })
+}
+
+func TestArrayWithExtraCommaComment(t *testing.T) {
+ tree, err := Load("a = [1, # wow\n2, # such items\n3, # so array\n]")
+ assertTree(t, tree, err, map[string]interface{}{
+ "a": []int64{int64(1), int64(2), int64(3)},
+ })
+}
+
+func TestSimpleInlineGroup(t *testing.T) {
+ tree, err := Load("key = {a = 42}")
+ assertTree(t, tree, err, map[string]interface{}{
+ "key": map[string]interface{}{
+ "a": int64(42),
+ },
+ })
+}
+
+func TestDoubleInlineGroup(t *testing.T) {
+ tree, err := Load("key = {a = 42, b = \"foo\"}")
+ assertTree(t, tree, err, map[string]interface{}{
+ "key": map[string]interface{}{
+ "a": int64(42),
+ "b": "foo",
+ },
+ })
+}
+
+func TestExampleInlineGroup(t *testing.T) {
+ tree, err := Load(`name = { first = "Tom", last = "Preston-Werner" }
+point = { x = 1, y = 2 }`)
+ assertTree(t, tree, err, map[string]interface{}{
+ "name": map[string]interface{}{
+ "first": "Tom",
+ "last": "Preston-Werner",
+ },
+ "point": map[string]interface{}{
+ "x": int64(1),
+ "y": int64(2),
+ },
+ })
+}
+
+func TestExampleInlineGroupInArray(t *testing.T) {
+ tree, err := Load(`points = [{ x = 1, y = 2 }]`)
+ assertTree(t, tree, err, map[string]interface{}{
+ "points": []map[string]interface{}{
+ {
+ "x": int64(1),
+ "y": int64(2),
+ },
+ },
+ })
+}
+
+func TestInlineTableUnterminated(t *testing.T) {
+ _, err := Load("foo = {")
+ if err.Error() != "(1, 8): unterminated inline table" {
+ t.Error("Bad error message:", err.Error())
+ }
+}
+
+func TestInlineTableCommaExpected(t *testing.T) {
+ _, err := Load("foo = {hello = 53 test = foo}")
+ if err.Error() != "(1, 19): comma expected between fields in inline table" {
+ t.Error("Bad error message:", err.Error())
+ }
+}
+
+func TestInlineTableCommaStart(t *testing.T) {
+ _, err := Load("foo = {, hello = 53}")
+ if err.Error() != "(1, 8): inline table cannot start with a comma" {
+ t.Error("Bad error message:", err.Error())
+ }
+}
+
+func TestInlineTableDoubleComma(t *testing.T) {
+ _, err := Load("foo = {hello = 53,, foo = 17}")
+ if err.Error() != "(1, 19): need field between two commas in inline table" {
+ t.Error("Bad error message:", err.Error())
+ }
+}
+
+func TestDuplicateGroups(t *testing.T) {
+ _, err := Load("[foo]\na=2\n[foo]b=3")
+ if err.Error() != "(3, 2): duplicated tables" {
+ t.Error("Bad error message:", err.Error())
+ }
+}
+
+func TestDuplicateKeys(t *testing.T) {
+ _, err := Load("foo = 2\nfoo = 3")
+ if err.Error() != "(2, 1): The following key was defined twice: foo" {
+ t.Error("Bad error message:", err.Error())
+ }
+}
+
+func TestEmptyIntermediateTable(t *testing.T) {
+ _, err := Load("[foo..bar]")
+ if err.Error() != "(1, 2): invalid table array key: empty table key" {
+ t.Error("Bad error message:", err.Error())
+ }
+}
+
+func TestImplicitDeclarationBefore(t *testing.T) {
+ tree, err := Load("[a.b.c]\nanswer = 42\n[a]\nbetter = 43")
+ assertTree(t, tree, err, map[string]interface{}{
+ "a": map[string]interface{}{
+ "b": map[string]interface{}{
+ "c": map[string]interface{}{
+ "answer": int64(42),
+ },
+ },
+ "better": int64(43),
+ },
+ })
+}
+
+func TestFloatsWithoutLeadingZeros(t *testing.T) {
+ _, err := Load("a = .42")
+ if err.Error() != "(1, 5): cannot start float with a dot" {
+ t.Error("Bad error message:", err.Error())
+ }
+
+ _, err = Load("a = -.42")
+ if err.Error() != "(1, 5): cannot start float with a dot" {
+ t.Error("Bad error message:", err.Error())
+ }
+}
+
+func TestMissingFile(t *testing.T) {
+ _, err := LoadFile("foo.toml")
+ if err.Error() != "open foo.toml: no such file or directory" &&
+ err.Error() != "open foo.toml: The system cannot find the file specified." {
+ t.Error("Bad error message:", err.Error())
+ }
+}
+
+func TestParseFile(t *testing.T) {
+ tree, err := LoadFile("example.toml")
+
+ assertTree(t, tree, err, map[string]interface{}{
+ "title": "TOML Example",
+ "owner": map[string]interface{}{
+ "name": "Tom Preston-Werner",
+ "organization": "GitHub",
+ "bio": "GitHub Cofounder & CEO\nLikes tater tots and beer.",
+ "dob": time.Date(1979, time.May, 27, 7, 32, 0, 0, time.UTC),
+ },
+ "database": map[string]interface{}{
+ "server": "192.168.1.1",
+ "ports": []int64{8001, 8001, 8002},
+ "connection_max": 5000,
+ "enabled": true,
+ },
+ "servers": map[string]interface{}{
+ "alpha": map[string]interface{}{
+ "ip": "10.0.0.1",
+ "dc": "eqdc10",
+ },
+ "beta": map[string]interface{}{
+ "ip": "10.0.0.2",
+ "dc": "eqdc10",
+ },
+ },
+ "clients": map[string]interface{}{
+ "data": []interface{}{
+ []string{"gamma", "delta"},
+ []int64{1, 2},
+ },
+ },
+ })
+}
+
+func TestParseFileCRLF(t *testing.T) {
+ tree, err := LoadFile("example-crlf.toml")
+
+ assertTree(t, tree, err, map[string]interface{}{
+ "title": "TOML Example",
+ "owner": map[string]interface{}{
+ "name": "Tom Preston-Werner",
+ "organization": "GitHub",
+ "bio": "GitHub Cofounder & CEO\nLikes tater tots and beer.",
+ "dob": time.Date(1979, time.May, 27, 7, 32, 0, 0, time.UTC),
+ },
+ "database": map[string]interface{}{
+ "server": "192.168.1.1",
+ "ports": []int64{8001, 8001, 8002},
+ "connection_max": 5000,
+ "enabled": true,
+ },
+ "servers": map[string]interface{}{
+ "alpha": map[string]interface{}{
+ "ip": "10.0.0.1",
+ "dc": "eqdc10",
+ },
+ "beta": map[string]interface{}{
+ "ip": "10.0.0.2",
+ "dc": "eqdc10",
+ },
+ },
+ "clients": map[string]interface{}{
+ "data": []interface{}{
+ []string{"gamma", "delta"},
+ []int64{1, 2},
+ },
+ },
+ })
+}
+
+func TestParseKeyGroupArray(t *testing.T) {
+ tree, err := Load("[[foo.bar]] a = 42\n[[foo.bar]] a = 69")
+ assertTree(t, tree, err, map[string]interface{}{
+ "foo": map[string]interface{}{
+ "bar": []map[string]interface{}{
+ {"a": int64(42)},
+ {"a": int64(69)},
+ },
+ },
+ })
+}
+
+func TestParseKeyGroupArrayUnfinished(t *testing.T) {
+ _, err := Load("[[foo.bar]\na = 42")
+ if err.Error() != "(1, 10): was expecting token [[, but got unclosed table array key instead" {
+ t.Error("Bad error message:", err.Error())
+ }
+
+ _, err = Load("[[foo.[bar]\na = 42")
+ if err.Error() != "(1, 3): unexpected token table array key cannot contain ']', was expecting a table array key" {
+ t.Error("Bad error message:", err.Error())
+ }
+}
+
+func TestParseKeyGroupArrayQueryExample(t *testing.T) {
+ tree, err := Load(`
+ [[book]]
+ title = "The Stand"
+ author = "Stephen King"
+ [[book]]
+ title = "For Whom the Bell Tolls"
+ author = "Ernest Hemmingway"
+ [[book]]
+ title = "Neuromancer"
+ author = "William Gibson"
+ `)
+
+ assertTree(t, tree, err, map[string]interface{}{
+ "book": []map[string]interface{}{
+ {"title": "The Stand", "author": "Stephen King"},
+ {"title": "For Whom the Bell Tolls", "author": "Ernest Hemmingway"},
+ {"title": "Neuromancer", "author": "William Gibson"},
+ },
+ })
+}
+
+func TestParseKeyGroupArraySpec(t *testing.T) {
+ tree, err := Load("[[fruit]]\n name=\"apple\"\n [fruit.physical]\n color=\"red\"\n shape=\"round\"\n [[fruit]]\n name=\"banana\"")
+ assertTree(t, tree, err, map[string]interface{}{
+ "fruit": []map[string]interface{}{
+ {"name": "apple", "physical": map[string]interface{}{"color": "red", "shape": "round"}},
+ {"name": "banana"},
+ },
+ })
+}
+
+func TestTomlValueStringRepresentation(t *testing.T) {
+ for idx, item := range []struct {
+ Value interface{}
+ Expect string
+ }{
+ {int64(12345), "12345"},
+ {uint64(50), "50"},
+ {float64(123.45), "123.45"},
+ {bool(true), "true"},
+ {"hello world", "\"hello world\""},
+ {"\b\t\n\f\r\"\\", "\"\\b\\t\\n\\f\\r\\\"\\\\\""},
+ {"\x05", "\"\\u0005\""},
+ {time.Date(1979, time.May, 27, 7, 32, 0, 0, time.UTC),
+ "1979-05-27T07:32:00Z"},
+ {[]interface{}{"gamma", "delta"},
+ "[\"gamma\",\"delta\"]"},
+ {nil, ""},
+ } {
+ result, err := tomlValueStringRepresentation(item.Value)
+ if err != nil {
+ t.Errorf("Test %d - unexpected error: %s", idx, err)
+ }
+ if result != item.Expect {
+ t.Errorf("Test %d - got '%s', expected '%s'", idx, result, item.Expect)
+ }
+ }
+}
+
+func TestToStringMapStringString(t *testing.T) {
+ tree, err := TreeFromMap(map[string]interface{}{"m": map[string]interface{}{"v": "abc"}})
+ if err != nil {
+ t.Fatalf("unexpected error: %s", err)
+ }
+ want := "\n[m]\n v = \"abc\"\n"
+ got := tree.String()
+
+ if got != want {
+ t.Errorf("want:\n%q\ngot:\n%q", want, got)
+ }
+}
+
+func assertPosition(t *testing.T, text string, ref map[string]Position) {
+ tree, err := Load(text)
+ if err != nil {
+ t.Errorf("Error loading document text: `%v`", text)
+ t.Errorf("Error: %v", err)
+ }
+ for path, pos := range ref {
+ testPos := tree.GetPosition(path)
+ if testPos.Invalid() {
+ t.Errorf("Failed to query tree path or path has invalid position: %s", path)
+ } else if pos != testPos {
+ t.Errorf("Expected position %v, got %v instead", pos, testPos)
+ }
+ }
+}
+
+func TestDocumentPositions(t *testing.T) {
+ assertPosition(t,
+ "[foo]\nbar=42\nbaz=69",
+ map[string]Position{
+ "": {1, 1},
+ "foo": {1, 1},
+ "foo.bar": {2, 1},
+ "foo.baz": {3, 1},
+ })
+}
+
+func TestDocumentPositionsWithSpaces(t *testing.T) {
+ assertPosition(t,
+ " [foo]\n bar=42\n baz=69",
+ map[string]Position{
+ "": {1, 1},
+ "foo": {1, 3},
+ "foo.bar": {2, 3},
+ "foo.baz": {3, 3},
+ })
+}
+
+func TestDocumentPositionsWithGroupArray(t *testing.T) {
+ assertPosition(t,
+ "[[foo]]\nbar=42\nbaz=69",
+ map[string]Position{
+ "": {1, 1},
+ "foo": {1, 1},
+ "foo.bar": {2, 1},
+ "foo.baz": {3, 1},
+ })
+}
+
+func TestNestedTreePosition(t *testing.T) {
+ assertPosition(t,
+ "[foo.bar]\na=42\nb=69",
+ map[string]Position{
+ "": {1, 1},
+ "foo": {1, 1},
+ "foo.bar": {1, 1},
+ "foo.bar.a": {2, 1},
+ "foo.bar.b": {3, 1},
+ })
+}
+
+func TestInvalidGroupArray(t *testing.T) {
+ _, err := Load("[table#key]\nanswer = 42")
+ if err == nil {
+ t.Error("Should error")
+ }
+
+ _, err = Load("[foo.[bar]\na = 42")
+ if err.Error() != "(1, 2): unexpected token table key cannot contain ']', was expecting a table key" {
+ t.Error("Bad error message:", err.Error())
+ }
+}
+
+func TestDoubleEqual(t *testing.T) {
+ _, err := Load("foo= = 2")
+ if err.Error() != "(1, 6): cannot have multiple equals for the same key" {
+ t.Error("Bad error message:", err.Error())
+ }
+}
+
+func TestGroupArrayReassign(t *testing.T) {
+ _, err := Load("[hello]\n[[hello]]")
+ if err.Error() != "(2, 3): key \"hello\" is already assigned and not of type table array" {
+ t.Error("Bad error message:", err.Error())
+ }
+}
+
+func TestInvalidFloatParsing(t *testing.T) {
+ _, err := Load("a=1e_2")
+ if err.Error() != "(1, 3): invalid use of _ in number" {
+ t.Error("Bad error message:", err.Error())
+ }
+
+ _, err = Load("a=1e2_")
+ if err.Error() != "(1, 3): invalid use of _ in number" {
+ t.Error("Bad error message:", err.Error())
+ }
+
+ _, err = Load("a=1__2")
+ if err.Error() != "(1, 3): invalid use of _ in number" {
+ t.Error("Bad error message:", err.Error())
+ }
+
+ _, err = Load("a=_1_2")
+ if err.Error() != "(1, 3): cannot start number with underscore" {
+ t.Error("Bad error message:", err.Error())
+ }
+}
diff --git a/vendor/github.com/pelletier/go-toml/position.go b/vendor/github.com/pelletier/go-toml/position.go
new file mode 100644
index 000000000..c17bff87b
--- /dev/null
+++ b/vendor/github.com/pelletier/go-toml/position.go
@@ -0,0 +1,29 @@
+// Position support for go-toml
+
+package toml
+
+import (
+ "fmt"
+)
+
+// Position of a document element within a TOML document.
+//
+// Line and Col are both 1-indexed positions for the element's line number and
+// column number, respectively. Values of zero or less will cause Invalid(),
+// to return true.
+type Position struct {
+ Line int // line within the document
+ Col int // column within the line
+}
+
+// String representation of the position.
+// Displays 1-indexed line and column numbers.
+func (p Position) String() string {
+ return fmt.Sprintf("(%d, %d)", p.Line, p.Col)
+}
+
+// Invalid returns whether or not the position is valid (i.e. with negative or
+// null values)
+func (p Position) Invalid() bool {
+ return p.Line <= 0 || p.Col <= 0
+}
diff --git a/vendor/github.com/pelletier/go-toml/position_test.go b/vendor/github.com/pelletier/go-toml/position_test.go
new file mode 100644
index 000000000..63ad1afc8
--- /dev/null
+++ b/vendor/github.com/pelletier/go-toml/position_test.go
@@ -0,0 +1,29 @@
+// Testing support for go-toml
+
+package toml
+
+import (
+ "testing"
+)
+
+func TestPositionString(t *testing.T) {
+ p := Position{123, 456}
+ expected := "(123, 456)"
+ value := p.String()
+
+ if value != expected {
+ t.Errorf("Expected %v, got %v instead", expected, value)
+ }
+}
+
+func TestInvalid(t *testing.T) {
+ for i, v := range []Position{
+ {0, 1234},
+ {1234, 0},
+ {0, 0},
+ } {
+ if !v.Invalid() {
+ t.Errorf("Position at %v is valid: %v", i, v)
+ }
+ }
+}
diff --git a/vendor/github.com/pelletier/go-toml/query.go b/vendor/github.com/pelletier/go-toml/query.go
new file mode 100644
index 000000000..307a1ecab
--- /dev/null
+++ b/vendor/github.com/pelletier/go-toml/query.go
@@ -0,0 +1,153 @@
+package toml
+
+import (
+ "time"
+)
+
+// NodeFilterFn represents a user-defined filter function, for use with
+// Query.SetFilter().
+//
+// The return value of the function must indicate if 'node' is to be included
+// at this stage of the TOML path. Returning true will include the node, and
+// returning false will exclude it.
+//
+// NOTE: Care should be taken to write script callbacks such that they are safe
+// to use from multiple goroutines.
+type NodeFilterFn func(node interface{}) bool
+
+// QueryResult is the result of Executing a Query.
+type QueryResult struct {
+ items []interface{}
+ positions []Position
+}
+
+// appends a value/position pair to the result set.
+func (r *QueryResult) appendResult(node interface{}, pos Position) {
+ r.items = append(r.items, node)
+ r.positions = append(r.positions, pos)
+}
+
+// Values is a set of values within a QueryResult. The order of values is not
+// guaranteed to be in document order, and may be different each time a query is
+// executed.
+func (r QueryResult) Values() []interface{} {
+ values := make([]interface{}, len(r.items))
+ for i, v := range r.items {
+ o, ok := v.(*tomlValue)
+ if ok {
+ values[i] = o.value
+ } else {
+ values[i] = v
+ }
+ }
+ return values
+}
+
+// Positions is a set of positions for values within a QueryResult. Each index
+// in Positions() corresponds to the entry in Value() of the same index.
+func (r QueryResult) Positions() []Position {
+ return r.positions
+}
+
+// runtime context for executing query paths
+type queryContext struct {
+ result *QueryResult
+ filters *map[string]NodeFilterFn
+ lastPosition Position
+}
+
+// generic path functor interface
+type pathFn interface {
+ setNext(next pathFn)
+ call(node interface{}, ctx *queryContext)
+}
+
+// A Query is the representation of a compiled TOML path. A Query is safe
+// for concurrent use by multiple goroutines.
+type Query struct {
+ root pathFn
+ tail pathFn
+ filters *map[string]NodeFilterFn
+}
+
+func newQuery() *Query {
+ return &Query{
+ root: nil,
+ tail: nil,
+ filters: &defaultFilterFunctions,
+ }
+}
+
+func (q *Query) appendPath(next pathFn) {
+ if q.root == nil {
+ q.root = next
+ } else {
+ q.tail.setNext(next)
+ }
+ q.tail = next
+ next.setNext(newTerminatingFn()) // init the next functor
+}
+
+// CompileQuery compiles a TOML path expression. The returned Query can be used
+// to match elements within a TomlTree and its descendants.
+func CompileQuery(path string) (*Query, error) {
+ return parseQuery(lexQuery(path))
+}
+
+// Execute executes a query against a TomlTree, and returns the result of the query.
+func (q *Query) Execute(tree *TomlTree) *QueryResult {
+ result := &QueryResult{
+ items: []interface{}{},
+ positions: []Position{},
+ }
+ if q.root == nil {
+ result.appendResult(tree, tree.GetPosition(""))
+ } else {
+ ctx := &queryContext{
+ result: result,
+ filters: q.filters,
+ }
+ q.root.call(tree, ctx)
+ }
+ return result
+}
+
+// SetFilter sets a user-defined filter function. These may be used inside
+// "?(..)" query expressions to filter TOML document elements within a query.
+func (q *Query) SetFilter(name string, fn NodeFilterFn) {
+ if q.filters == &defaultFilterFunctions {
+ // clone the static table
+ q.filters = &map[string]NodeFilterFn{}
+ for k, v := range defaultFilterFunctions {
+ (*q.filters)[k] = v
+ }
+ }
+ (*q.filters)[name] = fn
+}
+
+var defaultFilterFunctions = map[string]NodeFilterFn{
+ "tree": func(node interface{}) bool {
+ _, ok := node.(*TomlTree)
+ return ok
+ },
+ "int": func(node interface{}) bool {
+ _, ok := node.(int64)
+ return ok
+ },
+ "float": func(node interface{}) bool {
+ _, ok := node.(float64)
+ return ok
+ },
+ "string": func(node interface{}) bool {
+ _, ok := node.(string)
+ return ok
+ },
+ "time": func(node interface{}) bool {
+ _, ok := node.(time.Time)
+ return ok
+ },
+ "bool": func(node interface{}) bool {
+ _, ok := node.(bool)
+ return ok
+ },
+}
diff --git a/vendor/github.com/pelletier/go-toml/query_test.go b/vendor/github.com/pelletier/go-toml/query_test.go
new file mode 100644
index 000000000..0d9f3832b
--- /dev/null
+++ b/vendor/github.com/pelletier/go-toml/query_test.go
@@ -0,0 +1,70 @@
+package toml
+
+import (
+ "testing"
+)
+
+func assertArrayContainsInAnyOrder(t *testing.T, array []interface{}, objects ...interface{}) {
+ if len(array) != len(objects) {
+ t.Fatalf("array contains %d objects but %d are expected", len(array), len(objects))
+ }
+
+ for _, o := range objects {
+ found := false
+ for _, a := range array {
+ if a == o {
+ found = true
+ break
+ }
+ }
+ if !found {
+ t.Fatal(o, "not found in array", array)
+ }
+ }
+}
+
+func TestQueryExample(t *testing.T) {
+ config, _ := Load(`
+ [[book]]
+ title = "The Stand"
+ author = "Stephen King"
+ [[book]]
+ title = "For Whom the Bell Tolls"
+ author = "Ernest Hemmingway"
+ [[book]]
+ title = "Neuromancer"
+ author = "William Gibson"
+ `)
+
+ authors, _ := config.Query("$.book.author")
+ names := authors.Values()
+ if len(names) != 3 {
+ t.Fatalf("query should return 3 names but returned %d", len(names))
+ }
+ assertArrayContainsInAnyOrder(t, names, "Stephen King", "Ernest Hemmingway", "William Gibson")
+}
+
+func TestQueryReadmeExample(t *testing.T) {
+ config, _ := Load(`
+[postgres]
+user = "pelletier"
+password = "mypassword"
+`)
+ results, _ := config.Query("$..[user,password]")
+ values := results.Values()
+ if len(values) != 2 {
+ t.Fatalf("query should return 2 values but returned %d", len(values))
+ }
+ assertArrayContainsInAnyOrder(t, values, "pelletier", "mypassword")
+}
+
+func TestQueryPathNotPresent(t *testing.T) {
+ config, _ := Load(`a = "hello"`)
+ results, err := config.Query("$.foo.bar")
+ if err != nil {
+ t.Fatalf("err should be nil. got %s instead", err)
+ }
+ if len(results.items) != 0 {
+ t.Fatalf("no items should be matched. %d matched instead", len(results.items))
+ }
+}
diff --git a/vendor/github.com/pelletier/go-toml/querylexer.go b/vendor/github.com/pelletier/go-toml/querylexer.go
new file mode 100644
index 000000000..960681d0f
--- /dev/null
+++ b/vendor/github.com/pelletier/go-toml/querylexer.go
@@ -0,0 +1,356 @@
+// TOML JSONPath lexer.
+//
+// Written using the principles developed by Rob Pike in
+// http://www.youtube.com/watch?v=HxaD_trXwRE
+
+package toml
+
+import (
+ "fmt"
+ "strconv"
+ "strings"
+ "unicode/utf8"
+)
+
+// Lexer state function
+type queryLexStateFn func() queryLexStateFn
+
+// Lexer definition
+type queryLexer struct {
+ input string
+ start int
+ pos int
+ width int
+ tokens chan token
+ depth int
+ line int
+ col int
+ stringTerm string
+}
+
+func (l *queryLexer) run() {
+ for state := l.lexVoid; state != nil; {
+ state = state()
+ }
+ close(l.tokens)
+}
+
+func (l *queryLexer) nextStart() {
+ // iterate by runes (utf8 characters)
+ // search for newlines and advance line/col counts
+ for i := l.start; i < l.pos; {
+ r, width := utf8.DecodeRuneInString(l.input[i:])
+ if r == '\n' {
+ l.line++
+ l.col = 1
+ } else {
+ l.col++
+ }
+ i += width
+ }
+ // advance start position to next token
+ l.start = l.pos
+}
+
+func (l *queryLexer) emit(t tokenType) {
+ l.tokens <- token{
+ Position: Position{l.line, l.col},
+ typ: t,
+ val: l.input[l.start:l.pos],
+ }
+ l.nextStart()
+}
+
+func (l *queryLexer) emitWithValue(t tokenType, value string) {
+ l.tokens <- token{
+ Position: Position{l.line, l.col},
+ typ: t,
+ val: value,
+ }
+ l.nextStart()
+}
+
+func (l *queryLexer) next() rune {
+ if l.pos >= len(l.input) {
+ l.width = 0
+ return eof
+ }
+ var r rune
+ r, l.width = utf8.DecodeRuneInString(l.input[l.pos:])
+ l.pos += l.width
+ return r
+}
+
+func (l *queryLexer) ignore() {
+ l.nextStart()
+}
+
+func (l *queryLexer) backup() {
+ l.pos -= l.width
+}
+
+func (l *queryLexer) errorf(format string, args ...interface{}) queryLexStateFn {
+ l.tokens <- token{
+ Position: Position{l.line, l.col},
+ typ: tokenError,
+ val: fmt.Sprintf(format, args...),
+ }
+ return nil
+}
+
+func (l *queryLexer) peek() rune {
+ r := l.next()
+ l.backup()
+ return r
+}
+
+func (l *queryLexer) accept(valid string) bool {
+ if strings.ContainsRune(valid, l.next()) {
+ return true
+ }
+ l.backup()
+ return false
+}
+
+func (l *queryLexer) follow(next string) bool {
+ return strings.HasPrefix(l.input[l.pos:], next)
+}
+
+func (l *queryLexer) lexVoid() queryLexStateFn {
+ for {
+ next := l.peek()
+ switch next {
+ case '$':
+ l.pos++
+ l.emit(tokenDollar)
+ continue
+ case '.':
+ if l.follow("..") {
+ l.pos += 2
+ l.emit(tokenDotDot)
+ } else {
+ l.pos++
+ l.emit(tokenDot)
+ }
+ continue
+ case '[':
+ l.pos++
+ l.emit(tokenLeftBracket)
+ continue
+ case ']':
+ l.pos++
+ l.emit(tokenRightBracket)
+ continue
+ case ',':
+ l.pos++
+ l.emit(tokenComma)
+ continue
+ case '*':
+ l.pos++
+ l.emit(tokenStar)
+ continue
+ case '(':
+ l.pos++
+ l.emit(tokenLeftParen)
+ continue
+ case ')':
+ l.pos++
+ l.emit(tokenRightParen)
+ continue
+ case '?':
+ l.pos++
+ l.emit(tokenQuestion)
+ continue
+ case ':':
+ l.pos++
+ l.emit(tokenColon)
+ continue
+ case '\'':
+ l.ignore()
+ l.stringTerm = string(next)
+ return l.lexString
+ case '"':
+ l.ignore()
+ l.stringTerm = string(next)
+ return l.lexString
+ }
+
+ if isSpace(next) {
+ l.next()
+ l.ignore()
+ continue
+ }
+
+ if isAlphanumeric(next) {
+ return l.lexKey
+ }
+
+ if next == '+' || next == '-' || isDigit(next) {
+ return l.lexNumber
+ }
+
+ if l.next() == eof {
+ break
+ }
+
+ return l.errorf("unexpected char: '%v'", next)
+ }
+ l.emit(tokenEOF)
+ return nil
+}
+
+func (l *queryLexer) lexKey() queryLexStateFn {
+ for {
+ next := l.peek()
+ if !isAlphanumeric(next) {
+ l.emit(tokenKey)
+ return l.lexVoid
+ }
+
+ if l.next() == eof {
+ break
+ }
+ }
+ l.emit(tokenEOF)
+ return nil
+}
+
+func (l *queryLexer) lexString() queryLexStateFn {
+ l.pos++
+ l.ignore()
+ growingString := ""
+
+ for {
+ if l.follow(l.stringTerm) {
+ l.emitWithValue(tokenString, growingString)
+ l.pos++
+ l.ignore()
+ return l.lexVoid
+ }
+
+ if l.follow("\\\"") {
+ l.pos++
+ growingString += "\""
+ } else if l.follow("\\'") {
+ l.pos++
+ growingString += "'"
+ } else if l.follow("\\n") {
+ l.pos++
+ growingString += "\n"
+ } else if l.follow("\\b") {
+ l.pos++
+ growingString += "\b"
+ } else if l.follow("\\f") {
+ l.pos++
+ growingString += "\f"
+ } else if l.follow("\\/") {
+ l.pos++
+ growingString += "/"
+ } else if l.follow("\\t") {
+ l.pos++
+ growingString += "\t"
+ } else if l.follow("\\r") {
+ l.pos++
+ growingString += "\r"
+ } else if l.follow("\\\\") {
+ l.pos++
+ growingString += "\\"
+ } else if l.follow("\\u") {
+ l.pos += 2
+ code := ""
+ for i := 0; i < 4; i++ {
+ c := l.peek()
+ l.pos++
+ if !isHexDigit(c) {
+ return l.errorf("unfinished unicode escape")
+ }
+ code = code + string(c)
+ }
+ l.pos--
+ intcode, err := strconv.ParseInt(code, 16, 32)
+ if err != nil {
+ return l.errorf("invalid unicode escape: \\u" + code)
+ }
+ growingString += string(rune(intcode))
+ } else if l.follow("\\U") {
+ l.pos += 2
+ code := ""
+ for i := 0; i < 8; i++ {
+ c := l.peek()
+ l.pos++
+ if !isHexDigit(c) {
+ return l.errorf("unfinished unicode escape")
+ }
+ code = code + string(c)
+ }
+ l.pos--
+ intcode, err := strconv.ParseInt(code, 16, 32)
+ if err != nil {
+ return l.errorf("invalid unicode escape: \\u" + code)
+ }
+ growingString += string(rune(intcode))
+ } else if l.follow("\\") {
+ l.pos++
+ return l.errorf("invalid escape sequence: \\" + string(l.peek()))
+ } else {
+ growingString += string(l.peek())
+ }
+
+ if l.next() == eof {
+ break
+ }
+ }
+
+ return l.errorf("unclosed string")
+}
+
+func (l *queryLexer) lexNumber() queryLexStateFn {
+ l.ignore()
+ if !l.accept("+") {
+ l.accept("-")
+ }
+ pointSeen := false
+ digitSeen := false
+ for {
+ next := l.next()
+ if next == '.' {
+ if pointSeen {
+ return l.errorf("cannot have two dots in one float")
+ }
+ if !isDigit(l.peek()) {
+ return l.errorf("float cannot end with a dot")
+ }
+ pointSeen = true
+ } else if isDigit(next) {
+ digitSeen = true
+ } else {
+ l.backup()
+ break
+ }
+ if pointSeen && !digitSeen {
+ return l.errorf("cannot start float with a dot")
+ }
+ }
+
+ if !digitSeen {
+ return l.errorf("no digit in that number")
+ }
+ if pointSeen {
+ l.emit(tokenFloat)
+ } else {
+ l.emit(tokenInteger)
+ }
+ return l.lexVoid
+}
+
+// Entry point
+func lexQuery(input string) chan token {
+ l := &queryLexer{
+ input: input,
+ tokens: make(chan token),
+ line: 1,
+ col: 1,
+ }
+ go l.run()
+ return l.tokens
+}
diff --git a/vendor/github.com/pelletier/go-toml/querylexer_test.go b/vendor/github.com/pelletier/go-toml/querylexer_test.go
new file mode 100644
index 000000000..2d0803ff7
--- /dev/null
+++ b/vendor/github.com/pelletier/go-toml/querylexer_test.go
@@ -0,0 +1,178 @@
+package toml
+
+import (
+ "testing"
+)
+
+func testQLFlow(t *testing.T, input string, expectedFlow []token) {
+ ch := lexQuery(input)
+ for idx, expected := range expectedFlow {
+ token := <-ch
+ if token != expected {
+ t.Log("While testing #", idx, ":", input)
+ t.Log("compared (got)", token, "to (expected)", expected)
+ t.Log("\tvalue:", token.val, "<->", expected.val)
+ t.Log("\tvalue as bytes:", []byte(token.val), "<->", []byte(expected.val))
+ t.Log("\ttype:", token.typ.String(), "<->", expected.typ.String())
+ t.Log("\tline:", token.Line, "<->", expected.Line)
+ t.Log("\tcolumn:", token.Col, "<->", expected.Col)
+ t.Log("compared", token, "to", expected)
+ t.FailNow()
+ }
+ }
+
+ tok, ok := <-ch
+ if ok {
+ t.Log("channel is not closed!")
+ t.Log(len(ch)+1, "tokens remaining:")
+
+ t.Log("token ->", tok)
+ for token := range ch {
+ t.Log("token ->", token)
+ }
+ t.FailNow()
+ }
+}
+
+func TestLexSpecialChars(t *testing.T) {
+ testQLFlow(t, " .$[]..()?*", []token{
+ {Position{1, 2}, tokenDot, "."},
+ {Position{1, 3}, tokenDollar, "$"},
+ {Position{1, 4}, tokenLeftBracket, "["},
+ {Position{1, 5}, tokenRightBracket, "]"},
+ {Position{1, 6}, tokenDotDot, ".."},
+ {Position{1, 8}, tokenLeftParen, "("},
+ {Position{1, 9}, tokenRightParen, ")"},
+ {Position{1, 10}, tokenQuestion, "?"},
+ {Position{1, 11}, tokenStar, "*"},
+ {Position{1, 12}, tokenEOF, ""},
+ })
+}
+
+func TestLexString(t *testing.T) {
+ testQLFlow(t, "'foo\n'", []token{
+ {Position{1, 2}, tokenString, "foo\n"},
+ {Position{2, 2}, tokenEOF, ""},
+ })
+}
+
+func TestLexDoubleString(t *testing.T) {
+ testQLFlow(t, `"bar"`, []token{
+ {Position{1, 2}, tokenString, "bar"},
+ {Position{1, 6}, tokenEOF, ""},
+ })
+}
+
+func TestLexStringEscapes(t *testing.T) {
+ testQLFlow(t, `"foo \" \' \b \f \/ \t \r \\ \u03A9 \U00012345 \n bar"`, []token{
+ {Position{1, 2}, tokenString, "foo \" ' \b \f / \t \r \\ \u03A9 \U00012345 \n bar"},
+ {Position{1, 55}, tokenEOF, ""},
+ })
+}
+
+func TestLexStringUnfinishedUnicode4(t *testing.T) {
+ testQLFlow(t, `"\u000"`, []token{
+ {Position{1, 2}, tokenError, "unfinished unicode escape"},
+ })
+}
+
+func TestLexStringUnfinishedUnicode8(t *testing.T) {
+ testQLFlow(t, `"\U0000"`, []token{
+ {Position{1, 2}, tokenError, "unfinished unicode escape"},
+ })
+}
+
+func TestLexStringInvalidEscape(t *testing.T) {
+ testQLFlow(t, `"\x"`, []token{
+ {Position{1, 2}, tokenError, "invalid escape sequence: \\x"},
+ })
+}
+
+func TestLexStringUnfinished(t *testing.T) {
+ testQLFlow(t, `"bar`, []token{
+ {Position{1, 2}, tokenError, "unclosed string"},
+ })
+}
+
+func TestLexKey(t *testing.T) {
+ testQLFlow(t, "foo", []token{
+ {Position{1, 1}, tokenKey, "foo"},
+ {Position{1, 4}, tokenEOF, ""},
+ })
+}
+
+func TestLexRecurse(t *testing.T) {
+ testQLFlow(t, "$..*", []token{
+ {Position{1, 1}, tokenDollar, "$"},
+ {Position{1, 2}, tokenDotDot, ".."},
+ {Position{1, 4}, tokenStar, "*"},
+ {Position{1, 5}, tokenEOF, ""},
+ })
+}
+
+func TestLexBracketKey(t *testing.T) {
+ testQLFlow(t, "$[foo]", []token{
+ {Position{1, 1}, tokenDollar, "$"},
+ {Position{1, 2}, tokenLeftBracket, "["},
+ {Position{1, 3}, tokenKey, "foo"},
+ {Position{1, 6}, tokenRightBracket, "]"},
+ {Position{1, 7}, tokenEOF, ""},
+ })
+}
+
+func TestLexSpace(t *testing.T) {
+ testQLFlow(t, "foo bar baz", []token{
+ {Position{1, 1}, tokenKey, "foo"},
+ {Position{1, 5}, tokenKey, "bar"},
+ {Position{1, 9}, tokenKey, "baz"},
+ {Position{1, 12}, tokenEOF, ""},
+ })
+}
+
+func TestLexInteger(t *testing.T) {
+ testQLFlow(t, "100 +200 -300", []token{
+ {Position{1, 1}, tokenInteger, "100"},
+ {Position{1, 5}, tokenInteger, "+200"},
+ {Position{1, 10}, tokenInteger, "-300"},
+ {Position{1, 14}, tokenEOF, ""},
+ })
+}
+
+func TestLexFloat(t *testing.T) {
+ testQLFlow(t, "100.0 +200.0 -300.0", []token{
+ {Position{1, 1}, tokenFloat, "100.0"},
+ {Position{1, 7}, tokenFloat, "+200.0"},
+ {Position{1, 14}, tokenFloat, "-300.0"},
+ {Position{1, 20}, tokenEOF, ""},
+ })
+}
+
+func TestLexFloatWithMultipleDots(t *testing.T) {
+ testQLFlow(t, "4.2.", []token{
+ {Position{1, 1}, tokenError, "cannot have two dots in one float"},
+ })
+}
+
+func TestLexFloatLeadingDot(t *testing.T) {
+ testQLFlow(t, "+.1", []token{
+ {Position{1, 1}, tokenError, "cannot start float with a dot"},
+ })
+}
+
+func TestLexFloatWithTrailingDot(t *testing.T) {
+ testQLFlow(t, "42.", []token{
+ {Position{1, 1}, tokenError, "float cannot end with a dot"},
+ })
+}
+
+func TestLexNumberWithoutDigit(t *testing.T) {
+ testQLFlow(t, "+", []token{
+ {Position{1, 1}, tokenError, "no digit in that number"},
+ })
+}
+
+func TestLexUnknown(t *testing.T) {
+ testQLFlow(t, "^", []token{
+ {Position{1, 1}, tokenError, "unexpected char: '94'"},
+ })
+}
diff --git a/vendor/github.com/pelletier/go-toml/queryparser.go b/vendor/github.com/pelletier/go-toml/queryparser.go
new file mode 100644
index 000000000..1cbfc83b2
--- /dev/null
+++ b/vendor/github.com/pelletier/go-toml/queryparser.go
@@ -0,0 +1,275 @@
+/*
+ Based on the "jsonpath" spec/concept.
+
+ http://goessner.net/articles/JsonPath/
+ https://code.google.com/p/json-path/
+*/
+
+package toml
+
+import (
+ "fmt"
+)
+
+const maxInt = int(^uint(0) >> 1)
+
+type queryParser struct {
+ flow chan token
+ tokensBuffer []token
+ query *Query
+ union []pathFn
+ err error
+}
+
+type queryParserStateFn func() queryParserStateFn
+
+// Formats and panics an error message based on a token
+func (p *queryParser) parseError(tok *token, msg string, args ...interface{}) queryParserStateFn {
+ p.err = fmt.Errorf(tok.Position.String()+": "+msg, args...)
+ return nil // trigger parse to end
+}
+
+func (p *queryParser) run() {
+ for state := p.parseStart; state != nil; {
+ state = state()
+ }
+}
+
+func (p *queryParser) backup(tok *token) {
+ p.tokensBuffer = append(p.tokensBuffer, *tok)
+}
+
+func (p *queryParser) peek() *token {
+ if len(p.tokensBuffer) != 0 {
+ return &(p.tokensBuffer[0])
+ }
+
+ tok, ok := <-p.flow
+ if !ok {
+ return nil
+ }
+ p.backup(&tok)
+ return &tok
+}
+
+func (p *queryParser) lookahead(types ...tokenType) bool {
+ result := true
+ buffer := []token{}
+
+ for _, typ := range types {
+ tok := p.getToken()
+ if tok == nil {
+ result = false
+ break
+ }
+ buffer = append(buffer, *tok)
+ if tok.typ != typ {
+ result = false
+ break
+ }
+ }
+ // add the tokens back to the buffer, and return
+ p.tokensBuffer = append(p.tokensBuffer, buffer...)
+ return result
+}
+
+func (p *queryParser) getToken() *token {
+ if len(p.tokensBuffer) != 0 {
+ tok := p.tokensBuffer[0]
+ p.tokensBuffer = p.tokensBuffer[1:]
+ return &tok
+ }
+ tok, ok := <-p.flow
+ if !ok {
+ return nil
+ }
+ return &tok
+}
+
+func (p *queryParser) parseStart() queryParserStateFn {
+ tok := p.getToken()
+
+ if tok == nil || tok.typ == tokenEOF {
+ return nil
+ }
+
+ if tok.typ != tokenDollar {
+ return p.parseError(tok, "Expected '$' at start of expression")
+ }
+
+ return p.parseMatchExpr
+}
+
+// handle '.' prefix, '[]', and '..'
+func (p *queryParser) parseMatchExpr() queryParserStateFn {
+ tok := p.getToken()
+ switch tok.typ {
+ case tokenDotDot:
+ p.query.appendPath(&matchRecursiveFn{})
+ // nested parse for '..'
+ tok := p.getToken()
+ switch tok.typ {
+ case tokenKey:
+ p.query.appendPath(newMatchKeyFn(tok.val))
+ return p.parseMatchExpr
+ case tokenLeftBracket:
+ return p.parseBracketExpr
+ case tokenStar:
+ // do nothing - the recursive predicate is enough
+ return p.parseMatchExpr
+ }
+
+ case tokenDot:
+ // nested parse for '.'
+ tok := p.getToken()
+ switch tok.typ {
+ case tokenKey:
+ p.query.appendPath(newMatchKeyFn(tok.val))
+ return p.parseMatchExpr
+ case tokenStar:
+ p.query.appendPath(&matchAnyFn{})
+ return p.parseMatchExpr
+ }
+
+ case tokenLeftBracket:
+ return p.parseBracketExpr
+
+ case tokenEOF:
+ return nil // allow EOF at this stage
+ }
+ return p.parseError(tok, "expected match expression")
+}
+
+func (p *queryParser) parseBracketExpr() queryParserStateFn {
+ if p.lookahead(tokenInteger, tokenColon) {
+ return p.parseSliceExpr
+ }
+ if p.peek().typ == tokenColon {
+ return p.parseSliceExpr
+ }
+ return p.parseUnionExpr
+}
+
+func (p *queryParser) parseUnionExpr() queryParserStateFn {
+ var tok *token
+
+ // this state can be traversed after some sub-expressions
+ // so be careful when setting up state in the parser
+ if p.union == nil {
+ p.union = []pathFn{}
+ }
+
+loop: // labeled loop for easy breaking
+ for {
+ if len(p.union) > 0 {
+ // parse delimiter or terminator
+ tok = p.getToken()
+ switch tok.typ {
+ case tokenComma:
+ // do nothing
+ case tokenRightBracket:
+ break loop
+ default:
+ return p.parseError(tok, "expected ',' or ']', not '%s'", tok.val)
+ }
+ }
+
+ // parse sub expression
+ tok = p.getToken()
+ switch tok.typ {
+ case tokenInteger:
+ p.union = append(p.union, newMatchIndexFn(tok.Int()))
+ case tokenKey:
+ p.union = append(p.union, newMatchKeyFn(tok.val))
+ case tokenString:
+ p.union = append(p.union, newMatchKeyFn(tok.val))
+ case tokenQuestion:
+ return p.parseFilterExpr
+ default:
+ return p.parseError(tok, "expected union sub expression, not '%s', %d", tok.val, len(p.union))
+ }
+ }
+
+ // if there is only one sub-expression, use that instead
+ if len(p.union) == 1 {
+ p.query.appendPath(p.union[0])
+ } else {
+ p.query.appendPath(&matchUnionFn{p.union})
+ }
+
+ p.union = nil // clear out state
+ return p.parseMatchExpr
+}
+
+func (p *queryParser) parseSliceExpr() queryParserStateFn {
+ // init slice to grab all elements
+ start, end, step := 0, maxInt, 1
+
+ // parse optional start
+ tok := p.getToken()
+ if tok.typ == tokenInteger {
+ start = tok.Int()
+ tok = p.getToken()
+ }
+ if tok.typ != tokenColon {
+ return p.parseError(tok, "expected ':'")
+ }
+
+ // parse optional end
+ tok = p.getToken()
+ if tok.typ == tokenInteger {
+ end = tok.Int()
+ tok = p.getToken()
+ }
+ if tok.typ == tokenRightBracket {
+ p.query.appendPath(newMatchSliceFn(start, end, step))
+ return p.parseMatchExpr
+ }
+ if tok.typ != tokenColon {
+ return p.parseError(tok, "expected ']' or ':'")
+ }
+
+ // parse optional step
+ tok = p.getToken()
+ if tok.typ == tokenInteger {
+ step = tok.Int()
+ if step < 0 {
+ return p.parseError(tok, "step must be a positive value")
+ }
+ tok = p.getToken()
+ }
+ if tok.typ != tokenRightBracket {
+ return p.parseError(tok, "expected ']'")
+ }
+
+ p.query.appendPath(newMatchSliceFn(start, end, step))
+ return p.parseMatchExpr
+}
+
+func (p *queryParser) parseFilterExpr() queryParserStateFn {
+ tok := p.getToken()
+ if tok.typ != tokenLeftParen {
+ return p.parseError(tok, "expected left-parenthesis for filter expression")
+ }
+ tok = p.getToken()
+ if tok.typ != tokenKey && tok.typ != tokenString {
+ return p.parseError(tok, "expected key or string for filter funciton name")
+ }
+ name := tok.val
+ tok = p.getToken()
+ if tok.typ != tokenRightParen {
+ return p.parseError(tok, "expected right-parenthesis for filter expression")
+ }
+ p.union = append(p.union, newMatchFilterFn(name, tok.Position))
+ return p.parseUnionExpr
+}
+
+func parseQuery(flow chan token) (*Query, error) {
+ parser := &queryParser{
+ flow: flow,
+ tokensBuffer: []token{},
+ query: newQuery(),
+ }
+ parser.run()
+ return parser.query, parser.err
+}
diff --git a/vendor/github.com/pelletier/go-toml/queryparser_test.go b/vendor/github.com/pelletier/go-toml/queryparser_test.go
new file mode 100644
index 000000000..b2b85cefd
--- /dev/null
+++ b/vendor/github.com/pelletier/go-toml/queryparser_test.go
@@ -0,0 +1,483 @@
+package toml
+
+import (
+ "fmt"
+ "io/ioutil"
+ "sort"
+ "strings"
+ "testing"
+ "time"
+)
+
+type queryTestNode struct {
+ value interface{}
+ position Position
+}
+
+func valueString(root interface{}) string {
+ result := "" //fmt.Sprintf("%T:", root)
+ switch node := root.(type) {
+ case *tomlValue:
+ return valueString(node.value)
+ case *QueryResult:
+ items := []string{}
+ for i, v := range node.Values() {
+ items = append(items, fmt.Sprintf("%s:%s",
+ node.Positions()[i].String(), valueString(v)))
+ }
+ sort.Strings(items)
+ result = "[" + strings.Join(items, ", ") + "]"
+ case queryTestNode:
+ result = fmt.Sprintf("%s:%s",
+ node.position.String(), valueString(node.value))
+ case []interface{}:
+ items := []string{}
+ for _, v := range node {
+ items = append(items, valueString(v))
+ }
+ sort.Strings(items)
+ result = "[" + strings.Join(items, ", ") + "]"
+ case *TomlTree:
+ // workaround for unreliable map key ordering
+ items := []string{}
+ for _, k := range node.Keys() {
+ v := node.GetPath([]string{k})
+ items = append(items, k+":"+valueString(v))
+ }
+ sort.Strings(items)
+ result = "{" + strings.Join(items, ", ") + "}"
+ case map[string]interface{}:
+ // workaround for unreliable map key ordering
+ items := []string{}
+ for k, v := range node {
+ items = append(items, k+":"+valueString(v))
+ }
+ sort.Strings(items)
+ result = "{" + strings.Join(items, ", ") + "}"
+ case int64:
+ result += fmt.Sprintf("%d", node)
+ case string:
+ result += "'" + node + "'"
+ case float64:
+ result += fmt.Sprintf("%f", node)
+ case bool:
+ result += fmt.Sprintf("%t", node)
+ case time.Time:
+ result += fmt.Sprintf("'%v'", node)
+ }
+ return result
+}
+
+func assertValue(t *testing.T, result, ref interface{}) {
+ pathStr := valueString(result)
+ refStr := valueString(ref)
+ if pathStr != refStr {
+ t.Errorf("values do not match")
+ t.Log("test:", pathStr)
+ t.Log("ref: ", refStr)
+ }
+}
+
+func assertQueryPositions(t *testing.T, toml, query string, ref []interface{}) {
+ tree, err := Load(toml)
+ if err != nil {
+ t.Errorf("Non-nil toml parse error: %v", err)
+ return
+ }
+ q, err := CompileQuery(query)
+ if err != nil {
+ t.Error(err)
+ return
+ }
+ results := q.Execute(tree)
+ assertValue(t, results, ref)
+}
+
+func TestQueryRoot(t *testing.T) {
+ assertQueryPositions(t,
+ "a = 42",
+ "$",
+ []interface{}{
+ queryTestNode{
+ map[string]interface{}{
+ "a": int64(42),
+ }, Position{1, 1},
+ },
+ })
+}
+
+func TestQueryKey(t *testing.T) {
+ assertQueryPositions(t,
+ "[foo]\na = 42",
+ "$.foo.a",
+ []interface{}{
+ queryTestNode{
+ int64(42), Position{2, 1},
+ },
+ })
+}
+
+func TestQueryKeyString(t *testing.T) {
+ assertQueryPositions(t,
+ "[foo]\na = 42",
+ "$.foo['a']",
+ []interface{}{
+ queryTestNode{
+ int64(42), Position{2, 1},
+ },
+ })
+}
+
+func TestQueryIndex(t *testing.T) {
+ assertQueryPositions(t,
+ "[foo]\na = [1,2,3,4,5,6,7,8,9,0]",
+ "$.foo.a[5]",
+ []interface{}{
+ queryTestNode{
+ int64(6), Position{2, 1},
+ },
+ })
+}
+
+func TestQuerySliceRange(t *testing.T) {
+ assertQueryPositions(t,
+ "[foo]\na = [1,2,3,4,5,6,7,8,9,0]",
+ "$.foo.a[0:5]",
+ []interface{}{
+ queryTestNode{
+ int64(1), Position{2, 1},
+ },
+ queryTestNode{
+ int64(2), Position{2, 1},
+ },
+ queryTestNode{
+ int64(3), Position{2, 1},
+ },
+ queryTestNode{
+ int64(4), Position{2, 1},
+ },
+ queryTestNode{
+ int64(5), Position{2, 1},
+ },
+ })
+}
+
+func TestQuerySliceStep(t *testing.T) {
+ assertQueryPositions(t,
+ "[foo]\na = [1,2,3,4,5,6,7,8,9,0]",
+ "$.foo.a[0:5:2]",
+ []interface{}{
+ queryTestNode{
+ int64(1), Position{2, 1},
+ },
+ queryTestNode{
+ int64(3), Position{2, 1},
+ },
+ queryTestNode{
+ int64(5), Position{2, 1},
+ },
+ })
+}
+
+func TestQueryAny(t *testing.T) {
+ assertQueryPositions(t,
+ "[foo.bar]\na=1\nb=2\n[foo.baz]\na=3\nb=4",
+ "$.foo.*",
+ []interface{}{
+ queryTestNode{
+ map[string]interface{}{
+ "a": int64(1),
+ "b": int64(2),
+ }, Position{1, 1},
+ },
+ queryTestNode{
+ map[string]interface{}{
+ "a": int64(3),
+ "b": int64(4),
+ }, Position{4, 1},
+ },
+ })
+}
+func TestQueryUnionSimple(t *testing.T) {
+ assertQueryPositions(t,
+ "[foo.bar]\na=1\nb=2\n[baz.foo]\na=3\nb=4\n[gorf.foo]\na=5\nb=6",
+ "$.*[bar,foo]",
+ []interface{}{
+ queryTestNode{
+ map[string]interface{}{
+ "a": int64(1),
+ "b": int64(2),
+ }, Position{1, 1},
+ },
+ queryTestNode{
+ map[string]interface{}{
+ "a": int64(3),
+ "b": int64(4),
+ }, Position{4, 1},
+ },
+ queryTestNode{
+ map[string]interface{}{
+ "a": int64(5),
+ "b": int64(6),
+ }, Position{7, 1},
+ },
+ })
+}
+
+func TestQueryRecursionAll(t *testing.T) {
+ assertQueryPositions(t,
+ "[foo.bar]\na=1\nb=2\n[baz.foo]\na=3\nb=4\n[gorf.foo]\na=5\nb=6",
+ "$..*",
+ []interface{}{
+ queryTestNode{
+ map[string]interface{}{
+ "foo": map[string]interface{}{
+ "bar": map[string]interface{}{
+ "a": int64(1),
+ "b": int64(2),
+ },
+ },
+ "baz": map[string]interface{}{
+ "foo": map[string]interface{}{
+ "a": int64(3),
+ "b": int64(4),
+ },
+ },
+ "gorf": map[string]interface{}{
+ "foo": map[string]interface{}{
+ "a": int64(5),
+ "b": int64(6),
+ },
+ },
+ }, Position{1, 1},
+ },
+ queryTestNode{
+ map[string]interface{}{
+ "bar": map[string]interface{}{
+ "a": int64(1),
+ "b": int64(2),
+ },
+ }, Position{1, 1},
+ },
+ queryTestNode{
+ map[string]interface{}{
+ "a": int64(1),
+ "b": int64(2),
+ }, Position{1, 1},
+ },
+ queryTestNode{
+ int64(1), Position{2, 1},
+ },
+ queryTestNode{
+ int64(2), Position{3, 1},
+ },
+ queryTestNode{
+ map[string]interface{}{
+ "foo": map[string]interface{}{
+ "a": int64(3),
+ "b": int64(4),
+ },
+ }, Position{4, 1},
+ },
+ queryTestNode{
+ map[string]interface{}{
+ "a": int64(3),
+ "b": int64(4),
+ }, Position{4, 1},
+ },
+ queryTestNode{
+ int64(3), Position{5, 1},
+ },
+ queryTestNode{
+ int64(4), Position{6, 1},
+ },
+ queryTestNode{
+ map[string]interface{}{
+ "foo": map[string]interface{}{
+ "a": int64(5),
+ "b": int64(6),
+ },
+ }, Position{7, 1},
+ },
+ queryTestNode{
+ map[string]interface{}{
+ "a": int64(5),
+ "b": int64(6),
+ }, Position{7, 1},
+ },
+ queryTestNode{
+ int64(5), Position{8, 1},
+ },
+ queryTestNode{
+ int64(6), Position{9, 1},
+ },
+ })
+}
+
+func TestQueryRecursionUnionSimple(t *testing.T) {
+ assertQueryPositions(t,
+ "[foo.bar]\na=1\nb=2\n[baz.foo]\na=3\nb=4\n[gorf.foo]\na=5\nb=6",
+ "$..['foo','bar']",
+ []interface{}{
+ queryTestNode{
+ map[string]interface{}{
+ "bar": map[string]interface{}{
+ "a": int64(1),
+ "b": int64(2),
+ },
+ }, Position{1, 1},
+ },
+ queryTestNode{
+ map[string]interface{}{
+ "a": int64(3),
+ "b": int64(4),
+ }, Position{4, 1},
+ },
+ queryTestNode{
+ map[string]interface{}{
+ "a": int64(1),
+ "b": int64(2),
+ }, Position{1, 1},
+ },
+ queryTestNode{
+ map[string]interface{}{
+ "a": int64(5),
+ "b": int64(6),
+ }, Position{7, 1},
+ },
+ })
+}
+
+func TestQueryFilterFn(t *testing.T) {
+ buff, err := ioutil.ReadFile("example.toml")
+ if err != nil {
+ t.Error(err)
+ return
+ }
+
+ assertQueryPositions(t, string(buff),
+ "$..[?(int)]",
+ []interface{}{
+ queryTestNode{
+ int64(8001), Position{13, 1},
+ },
+ queryTestNode{
+ int64(8001), Position{13, 1},
+ },
+ queryTestNode{
+ int64(8002), Position{13, 1},
+ },
+ queryTestNode{
+ int64(5000), Position{14, 1},
+ },
+ })
+
+ assertQueryPositions(t, string(buff),
+ "$..[?(string)]",
+ []interface{}{
+ queryTestNode{
+ "TOML Example", Position{3, 1},
+ },
+ queryTestNode{
+ "Tom Preston-Werner", Position{6, 1},
+ },
+ queryTestNode{
+ "GitHub", Position{7, 1},
+ },
+ queryTestNode{
+ "GitHub Cofounder & CEO\nLikes tater tots and beer.",
+ Position{8, 1},
+ },
+ queryTestNode{
+ "192.168.1.1", Position{12, 1},
+ },
+ queryTestNode{
+ "10.0.0.1", Position{21, 3},
+ },
+ queryTestNode{
+ "eqdc10", Position{22, 3},
+ },
+ queryTestNode{
+ "10.0.0.2", Position{25, 3},
+ },
+ queryTestNode{
+ "eqdc10", Position{26, 3},
+ },
+ })
+
+ assertQueryPositions(t, string(buff),
+ "$..[?(float)]",
+ []interface{}{
+ // no float values in document
+ })
+
+ tv, _ := time.Parse(time.RFC3339, "1979-05-27T07:32:00Z")
+ assertQueryPositions(t, string(buff),
+ "$..[?(tree)]",
+ []interface{}{
+ queryTestNode{
+ map[string]interface{}{
+ "name": "Tom Preston-Werner",
+ "organization": "GitHub",
+ "bio": "GitHub Cofounder & CEO\nLikes tater tots and beer.",
+ "dob": tv,
+ }, Position{5, 1},
+ },
+ queryTestNode{
+ map[string]interface{}{
+ "server": "192.168.1.1",
+ "ports": []interface{}{int64(8001), int64(8001), int64(8002)},
+ "connection_max": int64(5000),
+ "enabled": true,
+ }, Position{11, 1},
+ },
+ queryTestNode{
+ map[string]interface{}{
+ "alpha": map[string]interface{}{
+ "ip": "10.0.0.1",
+ "dc": "eqdc10",
+ },
+ "beta": map[string]interface{}{
+ "ip": "10.0.0.2",
+ "dc": "eqdc10",
+ },
+ }, Position{17, 1},
+ },
+ queryTestNode{
+ map[string]interface{}{
+ "ip": "10.0.0.1",
+ "dc": "eqdc10",
+ }, Position{20, 3},
+ },
+ queryTestNode{
+ map[string]interface{}{
+ "ip": "10.0.0.2",
+ "dc": "eqdc10",
+ }, Position{24, 3},
+ },
+ queryTestNode{
+ map[string]interface{}{
+ "data": []interface{}{
+ []interface{}{"gamma", "delta"},
+ []interface{}{int64(1), int64(2)},
+ },
+ }, Position{28, 1},
+ },
+ })
+
+ assertQueryPositions(t, string(buff),
+ "$..[?(time)]",
+ []interface{}{
+ queryTestNode{
+ tv, Position{9, 1},
+ },
+ })
+
+ assertQueryPositions(t, string(buff),
+ "$..[?(bool)]",
+ []interface{}{
+ queryTestNode{
+ true, Position{15, 1},
+ },
+ })
+}
diff --git a/vendor/github.com/pelletier/go-toml/test.sh b/vendor/github.com/pelletier/go-toml/test.sh
new file mode 100755
index 000000000..436d2fb6c
--- /dev/null
+++ b/vendor/github.com/pelletier/go-toml/test.sh
@@ -0,0 +1,82 @@
+#!/bin/bash
+# fail out of the script if anything here fails
+set -e
+
+# set the path to the present working directory
+export GOPATH=`pwd`
+
+function git_clone() {
+ path=$1
+ branch=$2
+ version=$3
+ if [ ! -d "src/$path" ]; then
+ mkdir -p src/$path
+ git clone https://$path.git src/$path
+ fi
+ pushd src/$path
+ git checkout "$branch"
+ git reset --hard "$version"
+ popd
+}
+
+# Run go vet
+go vet ./...
+
+go get github.com/pelletier/go-buffruneio
+go get github.com/davecgh/go-spew/spew
+
+# get code for BurntSushi TOML validation
+# pinning all to 'HEAD' for version 0.3.x work (TODO: pin to commit hash when tests stabilize)
+git_clone github.com/BurntSushi/toml master HEAD
+git_clone github.com/BurntSushi/toml-test master HEAD #was: 0.2.0 HEAD
+
+# build the BurntSushi test application
+go build -o toml-test github.com/BurntSushi/toml-test
+
+# vendorize the current lib for testing
+# NOTE: this basically mocks an install without having to go back out to github for code
+mkdir -p src/github.com/pelletier/go-toml/cmd
+cp *.go *.toml src/github.com/pelletier/go-toml
+cp -R cmd/* src/github.com/pelletier/go-toml/cmd
+go build -o test_program_bin src/github.com/pelletier/go-toml/cmd/test_program.go
+
+# Run basic unit tests
+go test github.com/pelletier/go-toml -v -covermode=count -coverprofile=coverage.out
+go test github.com/pelletier/go-toml/cmd/tomljson
+
+# run the entire BurntSushi test suite
+if [[ $# -eq 0 ]] ; then
+ echo "Running all BurntSushi tests"
+ ./toml-test ./test_program_bin | tee test_out
+else
+ # run a specific test
+ test=$1
+ test_path='src/github.com/BurntSushi/toml-test/tests'
+ valid_test="$test_path/valid/$test"
+ invalid_test="$test_path/invalid/$test"
+
+ if [ -e "$valid_test.toml" ]; then
+ echo "Valid Test TOML for $test:"
+ echo "===="
+ cat "$valid_test.toml"
+
+ echo "Valid Test JSON for $test:"
+ echo "===="
+ cat "$valid_test.json"
+
+ echo "Go-TOML Output for $test:"
+ echo "===="
+ cat "$valid_test.toml" | ./test_program_bin
+ fi
+
+ if [ -e "$invalid_test.toml" ]; then
+ echo "Invalid Test TOML for $test:"
+ echo "===="
+ cat "$invalid_test.toml"
+
+ echo "Go-TOML Output for $test:"
+ echo "===="
+ echo "go-toml Output:"
+ cat "$invalid_test.toml" | ./test_program_bin
+ fi
+fi
diff --git a/vendor/github.com/pelletier/go-toml/token.go b/vendor/github.com/pelletier/go-toml/token.go
new file mode 100644
index 000000000..5581fe0bc
--- /dev/null
+++ b/vendor/github.com/pelletier/go-toml/token.go
@@ -0,0 +1,140 @@
+package toml
+
+import (
+ "fmt"
+ "strconv"
+ "unicode"
+)
+
+// Define tokens
+type tokenType int
+
+const (
+ eof = -(iota + 1)
+)
+
+const (
+ tokenError tokenType = iota
+ tokenEOF
+ tokenComment
+ tokenKey
+ tokenString
+ tokenInteger
+ tokenTrue
+ tokenFalse
+ tokenFloat
+ tokenEqual
+ tokenLeftBracket
+ tokenRightBracket
+ tokenLeftCurlyBrace
+ tokenRightCurlyBrace
+ tokenLeftParen
+ tokenRightParen
+ tokenDoubleLeftBracket
+ tokenDoubleRightBracket
+ tokenDate
+ tokenKeyGroup
+ tokenKeyGroupArray
+ tokenComma
+ tokenColon
+ tokenDollar
+ tokenStar
+ tokenQuestion
+ tokenDot
+ tokenDotDot
+ tokenEOL
+)
+
+var tokenTypeNames = []string{
+ "Error",
+ "EOF",
+ "Comment",
+ "Key",
+ "String",
+ "Integer",
+ "True",
+ "False",
+ "Float",
+ "=",
+ "[",
+ "]",
+ "{",
+ "}",
+ "(",
+ ")",
+ "]]",
+ "[[",
+ "Date",
+ "KeyGroup",
+ "KeyGroupArray",
+ ",",
+ ":",
+ "$",
+ "*",
+ "?",
+ ".",
+ "..",
+ "EOL",
+}
+
+type token struct {
+ Position
+ typ tokenType
+ val string
+}
+
+func (tt tokenType) String() string {
+ idx := int(tt)
+ if idx < len(tokenTypeNames) {
+ return tokenTypeNames[idx]
+ }
+ return "Unknown"
+}
+
+func (t token) Int() int {
+ if result, err := strconv.Atoi(t.val); err != nil {
+ panic(err)
+ } else {
+ return result
+ }
+}
+
+func (t token) String() string {
+ switch t.typ {
+ case tokenEOF:
+ return "EOF"
+ case tokenError:
+ return t.val
+ }
+
+ return fmt.Sprintf("%q", t.val)
+}
+
+func isSpace(r rune) bool {
+ return r == ' ' || r == '\t'
+}
+
+func isAlphanumeric(r rune) bool {
+ return unicode.IsLetter(r) || r == '_'
+}
+
+func isKeyChar(r rune) bool {
+ // Keys start with the first character that isn't whitespace or [ and end
+ // with the last non-whitespace character before the equals sign. Keys
+ // cannot contain a # character."
+ return !(r == '\r' || r == '\n' || r == eof || r == '=')
+}
+
+func isKeyStartChar(r rune) bool {
+ return !(isSpace(r) || r == '\r' || r == '\n' || r == eof || r == '[')
+}
+
+func isDigit(r rune) bool {
+ return unicode.IsNumber(r)
+}
+
+func isHexDigit(r rune) bool {
+ return isDigit(r) ||
+ (r >= 'a' && r <= 'f') ||
+ (r >= 'A' && r <= 'F')
+}
diff --git a/vendor/github.com/pelletier/go-toml/token_test.go b/vendor/github.com/pelletier/go-toml/token_test.go
new file mode 100644
index 000000000..20b560d51
--- /dev/null
+++ b/vendor/github.com/pelletier/go-toml/token_test.go
@@ -0,0 +1,67 @@
+package toml
+
+import "testing"
+
+func TestTokenStringer(t *testing.T) {
+ var tests = []struct {
+ tt tokenType
+ expect string
+ }{
+ {tokenError, "Error"},
+ {tokenEOF, "EOF"},
+ {tokenComment, "Comment"},
+ {tokenKey, "Key"},
+ {tokenString, "String"},
+ {tokenInteger, "Integer"},
+ {tokenTrue, "True"},
+ {tokenFalse, "False"},
+ {tokenFloat, "Float"},
+ {tokenEqual, "="},
+ {tokenLeftBracket, "["},
+ {tokenRightBracket, "]"},
+ {tokenLeftCurlyBrace, "{"},
+ {tokenRightCurlyBrace, "}"},
+ {tokenLeftParen, "("},
+ {tokenRightParen, ")"},
+ {tokenDoubleLeftBracket, "]]"},
+ {tokenDoubleRightBracket, "[["},
+ {tokenDate, "Date"},
+ {tokenKeyGroup, "KeyGroup"},
+ {tokenKeyGroupArray, "KeyGroupArray"},
+ {tokenComma, ","},
+ {tokenColon, ":"},
+ {tokenDollar, "$"},
+ {tokenStar, "*"},
+ {tokenQuestion, "?"},
+ {tokenDot, "."},
+ {tokenDotDot, ".."},
+ {tokenEOL, "EOL"},
+ {tokenEOL + 1, "Unknown"},
+ }
+
+ for i, test := range tests {
+ got := test.tt.String()
+ if got != test.expect {
+ t.Errorf("[%d] invalid string of token type; got %q, expected %q", i, got, test.expect)
+ }
+ }
+}
+
+func TestTokenString(t *testing.T) {
+ var tests = []struct {
+ tok token
+ expect string
+ }{
+ {token{Position{1, 1}, tokenEOF, ""}, "EOF"},
+ {token{Position{1, 1}, tokenError, "Δt"}, "Δt"},
+ {token{Position{1, 1}, tokenString, "bar"}, `"bar"`},
+ {token{Position{1, 1}, tokenString, "123456789012345"}, `"123456789012345"`},
+ }
+
+ for i, test := range tests {
+ got := test.tok.String()
+ if got != test.expect {
+ t.Errorf("[%d] invalid of string token; got %q, expected %q", i, got, test.expect)
+ }
+ }
+}
diff --git a/vendor/github.com/pelletier/go-toml/toml.go b/vendor/github.com/pelletier/go-toml/toml.go
new file mode 100644
index 000000000..1ba56a1c1
--- /dev/null
+++ b/vendor/github.com/pelletier/go-toml/toml.go
@@ -0,0 +1,284 @@
+package toml
+
+import (
+ "errors"
+ "fmt"
+ "io"
+ "os"
+ "runtime"
+ "strings"
+)
+
+type tomlValue struct {
+ value interface{} // string, int64, uint64, float64, bool, time.Time, [] of any of this list
+ position Position
+}
+
+// TomlTree is the result of the parsing of a TOML file.
+type TomlTree struct {
+ values map[string]interface{} // string -> *tomlValue, *TomlTree, []*TomlTree
+ position Position
+}
+
+func newTomlTree() *TomlTree {
+ return &TomlTree{
+ values: make(map[string]interface{}),
+ position: Position{},
+ }
+}
+
+// TreeFromMap initializes a new TomlTree object using the given map.
+func TreeFromMap(m map[string]interface{}) (*TomlTree, error) {
+ result, err := toTree(m)
+ if err != nil {
+ return nil, err
+ }
+ return result.(*TomlTree), nil
+}
+
+// Has returns a boolean indicating if the given key exists.
+func (t *TomlTree) Has(key string) bool {
+ if key == "" {
+ return false
+ }
+ return t.HasPath(strings.Split(key, "."))
+}
+
+// HasPath returns true if the given path of keys exists, false otherwise.
+func (t *TomlTree) HasPath(keys []string) bool {
+ return t.GetPath(keys) != nil
+}
+
+// Keys returns the keys of the toplevel tree.
+// Warning: this is a costly operation.
+func (t *TomlTree) Keys() []string {
+ var keys []string
+ for k := range t.values {
+ keys = append(keys, k)
+ }
+ return keys
+}
+
+// Get the value at key in the TomlTree.
+// Key is a dot-separated path (e.g. a.b.c).
+// Returns nil if the path does not exist in the tree.
+// If keys is of length zero, the current tree is returned.
+func (t *TomlTree) Get(key string) interface{} {
+ if key == "" {
+ return t
+ }
+ comps, err := parseKey(key)
+ if err != nil {
+ return nil
+ }
+ return t.GetPath(comps)
+}
+
+// GetPath returns the element in the tree indicated by 'keys'.
+// If keys is of length zero, the current tree is returned.
+func (t *TomlTree) GetPath(keys []string) interface{} {
+ if len(keys) == 0 {
+ return t
+ }
+ subtree := t
+ for _, intermediateKey := range keys[:len(keys)-1] {
+ value, exists := subtree.values[intermediateKey]
+ if !exists {
+ return nil
+ }
+ switch node := value.(type) {
+ case *TomlTree:
+ subtree = node
+ case []*TomlTree:
+ // go to most recent element
+ if len(node) == 0 {
+ return nil
+ }
+ subtree = node[len(node)-1]
+ default:
+ return nil // cannot navigate through other node types
+ }
+ }
+ // branch based on final node type
+ switch node := subtree.values[keys[len(keys)-1]].(type) {
+ case *tomlValue:
+ return node.value
+ default:
+ return node
+ }
+}
+
+// GetPosition returns the position of the given key.
+func (t *TomlTree) GetPosition(key string) Position {
+ if key == "" {
+ return t.position
+ }
+ return t.GetPositionPath(strings.Split(key, "."))
+}
+
+// GetPositionPath returns the element in the tree indicated by 'keys'.
+// If keys is of length zero, the current tree is returned.
+func (t *TomlTree) GetPositionPath(keys []string) Position {
+ if len(keys) == 0 {
+ return t.position
+ }
+ subtree := t
+ for _, intermediateKey := range keys[:len(keys)-1] {
+ value, exists := subtree.values[intermediateKey]
+ if !exists {
+ return Position{0, 0}
+ }
+ switch node := value.(type) {
+ case *TomlTree:
+ subtree = node
+ case []*TomlTree:
+ // go to most recent element
+ if len(node) == 0 {
+ return Position{0, 0}
+ }
+ subtree = node[len(node)-1]
+ default:
+ return Position{0, 0}
+ }
+ }
+ // branch based on final node type
+ switch node := subtree.values[keys[len(keys)-1]].(type) {
+ case *tomlValue:
+ return node.position
+ case *TomlTree:
+ return node.position
+ case []*TomlTree:
+ // go to most recent element
+ if len(node) == 0 {
+ return Position{0, 0}
+ }
+ return node[len(node)-1].position
+ default:
+ return Position{0, 0}
+ }
+}
+
+// GetDefault works like Get but with a default value
+func (t *TomlTree) GetDefault(key string, def interface{}) interface{} {
+ val := t.Get(key)
+ if val == nil {
+ return def
+ }
+ return val
+}
+
+// Set an element in the tree.
+// Key is a dot-separated path (e.g. a.b.c).
+// Creates all necessary intermediates trees, if needed.
+func (t *TomlTree) Set(key string, value interface{}) {
+ t.SetPath(strings.Split(key, "."), value)
+}
+
+// SetPath sets an element in the tree.
+// Keys is an array of path elements (e.g. {"a","b","c"}).
+// Creates all necessary intermediates trees, if needed.
+func (t *TomlTree) SetPath(keys []string, value interface{}) {
+ subtree := t
+ for _, intermediateKey := range keys[:len(keys)-1] {
+ nextTree, exists := subtree.values[intermediateKey]
+ if !exists {
+ nextTree = newTomlTree()
+ subtree.values[intermediateKey] = nextTree // add new element here
+ }
+ switch node := nextTree.(type) {
+ case *TomlTree:
+ subtree = node
+ case []*TomlTree:
+ // go to most recent element
+ if len(node) == 0 {
+ // create element if it does not exist
+ subtree.values[intermediateKey] = append(node, newTomlTree())
+ }
+ subtree = node[len(node)-1]
+ }
+ }
+
+ var toInsert interface{}
+
+ switch value.(type) {
+ case *TomlTree:
+ toInsert = value
+ case []*TomlTree:
+ toInsert = value
+ case *tomlValue:
+ toInsert = value
+ default:
+ toInsert = &tomlValue{value: value}
+ }
+
+ subtree.values[keys[len(keys)-1]] = toInsert
+}
+
+// createSubTree takes a tree and a key and create the necessary intermediate
+// subtrees to create a subtree at that point. In-place.
+//
+// e.g. passing a.b.c will create (assuming tree is empty) tree[a], tree[a][b]
+// and tree[a][b][c]
+//
+// Returns nil on success, error object on failure
+func (t *TomlTree) createSubTree(keys []string, pos Position) error {
+ subtree := t
+ for _, intermediateKey := range keys {
+ nextTree, exists := subtree.values[intermediateKey]
+ if !exists {
+ tree := newTomlTree()
+ tree.position = pos
+ subtree.values[intermediateKey] = tree
+ nextTree = tree
+ }
+
+ switch node := nextTree.(type) {
+ case []*TomlTree:
+ subtree = node[len(node)-1]
+ case *TomlTree:
+ subtree = node
+ default:
+ return fmt.Errorf("unknown type for path %s (%s): %T (%#v)",
+ strings.Join(keys, "."), intermediateKey, nextTree, nextTree)
+ }
+ }
+ return nil
+}
+
+// Query compiles and executes a query on a tree and returns the query result.
+func (t *TomlTree) Query(query string) (*QueryResult, error) {
+ q, err := CompileQuery(query)
+ if err != nil {
+ return nil, err
+ }
+ return q.Execute(t), nil
+}
+
+// LoadReader creates a TomlTree from any io.Reader.
+func LoadReader(reader io.Reader) (tree *TomlTree, err error) {
+ defer func() {
+ if r := recover(); r != nil {
+ if _, ok := r.(runtime.Error); ok {
+ panic(r)
+ }
+ err = errors.New(r.(string))
+ }
+ }()
+ tree = parseToml(lexToml(reader))
+ return
+}
+
+// Load creates a TomlTree from a string.
+func Load(content string) (tree *TomlTree, err error) {
+ return LoadReader(strings.NewReader(content))
+}
+
+// LoadFile creates a TomlTree from a file.
+func LoadFile(path string) (tree *TomlTree, err error) {
+ file, err := os.Open(path)
+ if err != nil {
+ return nil, err
+ }
+ defer file.Close()
+ return LoadReader(file)
+}
diff --git a/vendor/github.com/pelletier/go-toml/toml_test.go b/vendor/github.com/pelletier/go-toml/toml_test.go
new file mode 100644
index 000000000..7c7f9eff9
--- /dev/null
+++ b/vendor/github.com/pelletier/go-toml/toml_test.go
@@ -0,0 +1,131 @@
+// Testing support for go-toml
+
+package toml
+
+import (
+ "testing"
+)
+
+func TestTomlHas(t *testing.T) {
+ tree, _ := Load(`
+ [test]
+ key = "value"
+ `)
+
+ if !tree.Has("test.key") {
+ t.Errorf("Has - expected test.key to exists")
+ }
+
+ if tree.Has("") {
+ t.Errorf("Should return false if the key is not provided")
+ }
+}
+
+func TestTomlGet(t *testing.T) {
+ tree, _ := Load(`
+ [test]
+ key = "value"
+ `)
+
+ if tree.Get("") != tree {
+ t.Errorf("Get should return the tree itself when given an empty path")
+ }
+
+ if tree.Get("test.key") != "value" {
+ t.Errorf("Get should return the value")
+ }
+ if tree.Get(`\`) != nil {
+ t.Errorf("should return nil when the key is malformed")
+ }
+}
+
+func TestTomlGetDefault(t *testing.T) {
+ tree, _ := Load(`
+ [test]
+ key = "value"
+ `)
+
+ if tree.GetDefault("", "hello") != tree {
+ t.Error("GetDefault should return the tree itself when given an empty path")
+ }
+
+ if tree.GetDefault("test.key", "hello") != "value" {
+ t.Error("Get should return the value")
+ }
+
+ if tree.GetDefault("whatever", "hello") != "hello" {
+ t.Error("GetDefault should return the default value if the key does not exist")
+ }
+}
+
+func TestTomlHasPath(t *testing.T) {
+ tree, _ := Load(`
+ [test]
+ key = "value"
+ `)
+
+ if !tree.HasPath([]string{"test", "key"}) {
+ t.Errorf("HasPath - expected test.key to exists")
+ }
+}
+
+func TestTomlGetPath(t *testing.T) {
+ node := newTomlTree()
+ //TODO: set other node data
+
+ for idx, item := range []struct {
+ Path []string
+ Expected *TomlTree
+ }{
+ { // empty path test
+ []string{},
+ node,
+ },
+ } {
+ result := node.GetPath(item.Path)
+ if result != item.Expected {
+ t.Errorf("GetPath[%d] %v - expected %v, got %v instead.", idx, item.Path, item.Expected, result)
+ }
+ }
+
+ tree, _ := Load("[foo.bar]\na=1\nb=2\n[baz.foo]\na=3\nb=4\n[gorf.foo]\na=5\nb=6")
+ if tree.GetPath([]string{"whatever"}) != nil {
+ t.Error("GetPath should return nil when the key does not exist")
+ }
+}
+
+func TestTomlQuery(t *testing.T) {
+ tree, err := Load("[foo.bar]\na=1\nb=2\n[baz.foo]\na=3\nb=4\n[gorf.foo]\na=5\nb=6")
+ if err != nil {
+ t.Error(err)
+ return
+ }
+ result, err := tree.Query("$.foo.bar")
+ if err != nil {
+ t.Error(err)
+ return
+ }
+ values := result.Values()
+ if len(values) != 1 {
+ t.Errorf("Expected resultset of 1, got %d instead: %v", len(values), values)
+ }
+
+ if tt, ok := values[0].(*TomlTree); !ok {
+ t.Errorf("Expected type of TomlTree: %T", values[0])
+ } else if tt.Get("a") != int64(1) {
+ t.Errorf("Expected 'a' with a value 1: %v", tt.Get("a"))
+ } else if tt.Get("b") != int64(2) {
+ t.Errorf("Expected 'b' with a value 2: %v", tt.Get("b"))
+ }
+}
+
+func TestTomlFromMap(t *testing.T) {
+ simpleMap := map[string]interface{}{"hello": 42}
+ tree, err := TreeFromMap(simpleMap)
+ if err != nil {
+ t.Fatal("unexpected error:", err)
+ }
+ if tree.Get("hello") != int64(42) {
+ t.Fatal("hello should be 42, not", tree.Get("hello"))
+ }
+}
diff --git a/vendor/github.com/pelletier/go-toml/tomltree_create.go b/vendor/github.com/pelletier/go-toml/tomltree_create.go
new file mode 100644
index 000000000..c6054f358
--- /dev/null
+++ b/vendor/github.com/pelletier/go-toml/tomltree_create.go
@@ -0,0 +1,135 @@
+package toml
+
+import (
+ "fmt"
+ "reflect"
+ "time"
+)
+
+// supported values:
+// string, bool, int64, uint64, float64, time.Time, int, int8, int16, int32, uint, uint8, uint16, uint32, float32
+
+var kindToTypeMapping = map[reflect.Kind]reflect.Type{
+ reflect.Bool: reflect.TypeOf(true),
+ reflect.String: reflect.TypeOf(""),
+ reflect.Float32: reflect.TypeOf(float64(1)),
+ reflect.Float64: reflect.TypeOf(float64(1)),
+ reflect.Int: reflect.TypeOf(int64(1)),
+ reflect.Int8: reflect.TypeOf(int64(1)),
+ reflect.Int16: reflect.TypeOf(int64(1)),
+ reflect.Int32: reflect.TypeOf(int64(1)),
+ reflect.Int64: reflect.TypeOf(int64(1)),
+ reflect.Uint: reflect.TypeOf(uint64(1)),
+ reflect.Uint8: reflect.TypeOf(uint64(1)),
+ reflect.Uint16: reflect.TypeOf(uint64(1)),
+ reflect.Uint32: reflect.TypeOf(uint64(1)),
+ reflect.Uint64: reflect.TypeOf(uint64(1)),
+}
+
+func simpleValueCoercion(object interface{}) (interface{}, error) {
+ switch original := object.(type) {
+ case string, bool, int64, uint64, float64, time.Time:
+ return original, nil
+ case int:
+ return int64(original), nil
+ case int8:
+ return int64(original), nil
+ case int16:
+ return int64(original), nil
+ case int32:
+ return int64(original), nil
+ case uint:
+ return uint64(original), nil
+ case uint8:
+ return uint64(original), nil
+ case uint16:
+ return uint64(original), nil
+ case uint32:
+ return uint64(original), nil
+ case float32:
+ return float64(original), nil
+ case fmt.Stringer:
+ return original.String(), nil
+ default:
+ return nil, fmt.Errorf("cannot convert type %T to TomlTree", object)
+ }
+}
+
+func sliceToTree(object interface{}) (interface{}, error) {
+ // arrays are a bit tricky, since they can represent either a
+ // collection of simple values, which is represented by one
+ // *tomlValue, or an array of tables, which is represented by an
+ // array of *TomlTree.
+
+ // holding the assumption that this function is called from toTree only when value.Kind() is Array or Slice
+ value := reflect.ValueOf(object)
+ insideType := value.Type().Elem()
+ length := value.Len()
+ if length > 0 {
+ insideType = reflect.ValueOf(value.Index(0).Interface()).Type()
+ }
+ if insideType.Kind() == reflect.Map {
+ // this is considered as an array of tables
+ tablesArray := make([]*TomlTree, 0, length)
+ for i := 0; i < length; i++ {
+ table := value.Index(i)
+ tree, err := toTree(table.Interface())
+ if err != nil {
+ return nil, err
+ }
+ tablesArray = append(tablesArray, tree.(*TomlTree))
+ }
+ return tablesArray, nil
+ }
+
+ sliceType := kindToTypeMapping[insideType.Kind()]
+ if sliceType == nil {
+ sliceType = insideType
+ }
+
+ arrayValue := reflect.MakeSlice(reflect.SliceOf(sliceType), 0, length)
+
+ for i := 0; i < length; i++ {
+ val := value.Index(i).Interface()
+ simpleValue, err := simpleValueCoercion(val)
+ if err != nil {
+ return nil, err
+ }
+ arrayValue = reflect.Append(arrayValue, reflect.ValueOf(simpleValue))
+ }
+ return &tomlValue{arrayValue.Interface(), Position{}}, nil
+}
+
+func toTree(object interface{}) (interface{}, error) {
+ value := reflect.ValueOf(object)
+
+ if value.Kind() == reflect.Map {
+ values := map[string]interface{}{}
+ keys := value.MapKeys()
+ for _, key := range keys {
+ if key.Kind() != reflect.String {
+ if _, ok := key.Interface().(string); !ok {
+ return nil, fmt.Errorf("map key needs to be a string, not %T (%v)", key.Interface(), key.Kind())
+ }
+ }
+
+ v := value.MapIndex(key)
+ newValue, err := toTree(v.Interface())
+ if err != nil {
+ return nil, err
+ }
+ values[key.String()] = newValue
+ }
+ return &TomlTree{values, Position{}}, nil
+ }
+
+ if value.Kind() == reflect.Array || value.Kind() == reflect.Slice {
+ return sliceToTree(object)
+ }
+
+ simpleValue, err := simpleValueCoercion(object)
+ if err != nil {
+ return nil, err
+ }
+ return &tomlValue{simpleValue, Position{}}, nil
+}
diff --git a/vendor/github.com/pelletier/go-toml/tomltree_create_test.go b/vendor/github.com/pelletier/go-toml/tomltree_create_test.go
new file mode 100644
index 000000000..6c1496835
--- /dev/null
+++ b/vendor/github.com/pelletier/go-toml/tomltree_create_test.go
@@ -0,0 +1,126 @@
+package toml
+
+import (
+ "testing"
+ "time"
+ "strconv"
+)
+
+type customString string
+
+type stringer struct{}
+
+func (s stringer) String() string {
+ return "stringer"
+}
+
+func validate(t *testing.T, path string, object interface{}) {
+ switch o := object.(type) {
+ case *TomlTree:
+ for key, tree := range o.values {
+ validate(t, path+"."+key, tree)
+ }
+ case []*TomlTree:
+ for index, tree := range o {
+ validate(t, path+"."+strconv.Itoa(index), tree)
+ }
+ case *tomlValue:
+ switch o.value.(type) {
+ case int64, uint64, bool, string, float64, time.Time,
+ []int64, []uint64, []bool, []string, []float64, []time.Time:
+ default:
+ t.Fatalf("tomlValue at key %s containing incorrect type %T", path, o.value)
+ }
+ default:
+ t.Fatalf("value at key %s is of incorrect type %T", path, object)
+ }
+ t.Logf("validation ok %s as %T", path, object)
+}
+
+func validateTree(t *testing.T, tree *TomlTree) {
+ validate(t, "", tree)
+}
+
+func TestTomlTreeCreateToTree(t *testing.T) {
+ data := map[string]interface{}{
+ "a_string": "bar",
+ "an_int": 42,
+ "time": time.Now(),
+ "int8": int8(2),
+ "int16": int16(2),
+ "int32": int32(2),
+ "uint8": uint8(2),
+ "uint16": uint16(2),
+ "uint32": uint32(2),
+ "float32": float32(2),
+ "a_bool": false,
+ "stringer": stringer{},
+ "nested": map[string]interface{}{
+ "foo": "bar",
+ },
+ "array": []string{"a", "b", "c"},
+ "array_uint": []uint{uint(1), uint(2)},
+ "array_table": []map[string]interface{}{map[string]interface{}{"sub_map": 52}},
+ "array_times": []time.Time{time.Now(), time.Now()},
+ "map_times": map[string]time.Time{"now": time.Now()},
+ "custom_string_map_key": map[customString]interface{}{customString("custom"): "custom"},
+ }
+ tree, err := TreeFromMap(data)
+ if err != nil {
+ t.Fatal("unexpected error:", err)
+ }
+ validateTree(t, tree)
+}
+
+func TestTomlTreeCreateToTreeInvalidLeafType(t *testing.T) {
+ _, err := TreeFromMap(map[string]interface{}{"foo": t})
+ expected := "cannot convert type *testing.T to TomlTree"
+ if err.Error() != expected {
+ t.Fatalf("expected error %s, got %s", expected, err.Error())
+ }
+}
+
+func TestTomlTreeCreateToTreeInvalidMapKeyType(t *testing.T) {
+ _, err := TreeFromMap(map[string]interface{}{"foo": map[int]interface{}{2: 1}})
+ expected := "map key needs to be a string, not int (int)"
+ if err.Error() != expected {
+ t.Fatalf("expected error %s, got %s", expected, err.Error())
+ }
+}
+
+func TestTomlTreeCreateToTreeInvalidArrayMemberType(t *testing.T) {
+ _, err := TreeFromMap(map[string]interface{}{"foo": []*testing.T{t}})
+ expected := "cannot convert type *testing.T to TomlTree"
+ if err.Error() != expected {
+ t.Fatalf("expected error %s, got %s", expected, err.Error())
+ }
+}
+
+func TestTomlTreeCreateToTreeInvalidTableGroupType(t *testing.T) {
+ _, err := TreeFromMap(map[string]interface{}{"foo": []map[string]interface{}{map[string]interface{}{"hello": t}}})
+ expected := "cannot convert type *testing.T to TomlTree"
+ if err.Error() != expected {
+ t.Fatalf("expected error %s, got %s", expected, err.Error())
+ }
+}
+
+func TestRoundTripArrayOfTables(t *testing.T) {
+ orig := "\n[[stuff]]\n name = \"foo\"\n things = [\"a\",\"b\"]\n"
+ tree, err := Load(orig)
+ if err != nil {
+ t.Fatalf("unexpected error: %s", err)
+ }
+
+ m := tree.ToMap()
+
+ tree, err = TreeFromMap(m)
+ if err != nil {
+ t.Fatalf("unexpected error: %s", err)
+ }
+ want := orig
+ got := tree.String()
+
+ if got != want {
+ t.Errorf("want:\n%s\ngot:\n%s", want, got)
+ }
+}
diff --git a/vendor/github.com/pelletier/go-toml/tomltree_write.go b/vendor/github.com/pelletier/go-toml/tomltree_write.go
new file mode 100644
index 000000000..89c3c4229
--- /dev/null
+++ b/vendor/github.com/pelletier/go-toml/tomltree_write.go
@@ -0,0 +1,214 @@
+package toml
+
+import (
+ "bytes"
+ "fmt"
+ "io"
+ "sort"
+ "strconv"
+ "strings"
+ "time"
+ "reflect"
+)
+
+// encodes a string to a TOML-compliant string value
+func encodeTomlString(value string) string {
+ result := ""
+ for _, rr := range value {
+ switch rr {
+ case '\b':
+ result += "\\b"
+ case '\t':
+ result += "\\t"
+ case '\n':
+ result += "\\n"
+ case '\f':
+ result += "\\f"
+ case '\r':
+ result += "\\r"
+ case '"':
+ result += "\\\""
+ case '\\':
+ result += "\\\\"
+ default:
+ intRr := uint16(rr)
+ if intRr < 0x001F {
+ result += fmt.Sprintf("\\u%0.4X", intRr)
+ } else {
+ result += string(rr)
+ }
+ }
+ }
+ return result
+}
+
+func tomlValueStringRepresentation(v interface{}) (string, error) {
+ switch value := v.(type) {
+ case uint64:
+ return strconv.FormatUint(value, 10), nil
+ case int64:
+ return strconv.FormatInt(value, 10), nil
+ case float64:
+ return strconv.FormatFloat(value, 'f', -1, 32), nil
+ case string:
+ return "\"" + encodeTomlString(value) + "\"", nil
+ case bool:
+ if value {
+ return "true", nil
+ }
+ return "false", nil
+ case time.Time:
+ return value.Format(time.RFC3339), nil
+ case nil:
+ return "", nil
+ }
+
+ rv := reflect.ValueOf(v)
+
+ if rv.Kind() == reflect.Slice {
+ values := []string{}
+ for i := 0; i < rv.Len(); i++ {
+ item := rv.Index(i).Interface()
+ itemRepr, err := tomlValueStringRepresentation(item)
+ if err != nil {
+ return "", err
+ }
+ values = append(values, itemRepr)
+ }
+ return "[" + strings.Join(values, ",") + "]", nil
+ }
+ return "", fmt.Errorf("unsupported value type %T: %v", v, v)
+}
+
+func (t *TomlTree) writeTo(w io.Writer, indent, keyspace string, bytesCount int64) (int64, error) {
+ simpleValuesKeys := make([]string, 0)
+ complexValuesKeys := make([]string, 0)
+
+ for k := range t.values {
+ v := t.values[k]
+ switch v.(type) {
+ case *TomlTree, []*TomlTree:
+ complexValuesKeys = append(complexValuesKeys, k)
+ default:
+ simpleValuesKeys = append(simpleValuesKeys, k)
+ }
+ }
+
+ sort.Strings(simpleValuesKeys)
+ sort.Strings(complexValuesKeys)
+
+ for _, k := range simpleValuesKeys {
+ v, ok := t.values[k].(*tomlValue)
+ if !ok {
+ return bytesCount, fmt.Errorf("invalid value type at %s: %T", k, t.values[k])
+ }
+
+ repr, err := tomlValueStringRepresentation(v.value)
+ if err != nil {
+ return bytesCount, err
+ }
+
+ kvRepr := fmt.Sprintf("%s%s = %s\n", indent, k, repr)
+ writtenBytesCount, err := w.Write([]byte(kvRepr))
+ bytesCount += int64(writtenBytesCount)
+ if err != nil {
+ return bytesCount, err
+ }
+ }
+
+ for _, k := range complexValuesKeys {
+ v := t.values[k]
+
+ combinedKey := k
+ if keyspace != "" {
+ combinedKey = keyspace + "." + combinedKey
+ }
+
+ switch node := v.(type) {
+ // node has to be of those two types given how keys are sorted above
+ case *TomlTree:
+ tableName := fmt.Sprintf("\n%s[%s]\n", indent, combinedKey)
+ writtenBytesCount, err := w.Write([]byte(tableName))
+ bytesCount += int64(writtenBytesCount)
+ if err != nil {
+ return bytesCount, err
+ }
+ bytesCount, err = node.writeTo(w, indent+" ", combinedKey, bytesCount)
+ if err != nil {
+ return bytesCount, err
+ }
+ case []*TomlTree:
+ for _, subTree := range node {
+ if len(subTree.values) > 0 {
+ tableArrayName := fmt.Sprintf("\n%s[[%s]]\n", indent, combinedKey)
+ writtenBytesCount, err := w.Write([]byte(tableArrayName))
+ bytesCount += int64(writtenBytesCount)
+ if err != nil {
+ return bytesCount, err
+ }
+
+ bytesCount, err = subTree.writeTo(w, indent+" ", combinedKey, bytesCount)
+ if err != nil {
+ return bytesCount, err
+ }
+ }
+ }
+ }
+ }
+
+ return bytesCount, nil
+}
+
+// WriteTo encode the TomlTree as Toml and writes it to the writer w.
+// Returns the number of bytes written in case of success, or an error if anything happened.
+func (t *TomlTree) WriteTo(w io.Writer) (int64, error) {
+ return t.writeTo(w, "", "", 0)
+}
+
+// ToTomlString generates a human-readable representation of the current tree.
+// Output spans multiple lines, and is suitable for ingest by a TOML parser.
+// If the conversion cannot be performed, ToString returns a non-nil error.
+func (t *TomlTree) ToTomlString() (string, error) {
+ var buf bytes.Buffer
+ _, err := t.WriteTo(&buf)
+ if err != nil {
+ return "", err
+ }
+ return buf.String(), nil
+}
+
+// String generates a human-readable representation of the current tree.
+// Alias of ToString. Present to implement the fmt.Stringer interface.
+func (t *TomlTree) String() string {
+ result, _ := t.ToTomlString()
+ return result
+}
+
+// ToMap recursively generates a representation of the tree using Go built-in structures.
+// The following types are used:
+// * uint64
+// * int64
+// * bool
+// * string
+// * time.Time
+// * map[string]interface{} (where interface{} is any of this list)
+// * []interface{} (where interface{} is any of this list)
+func (t *TomlTree) ToMap() map[string]interface{} {
+ result := map[string]interface{}{}
+
+ for k, v := range t.values {
+ switch node := v.(type) {
+ case []*TomlTree:
+ var array []interface{}
+ for _, item := range node {
+ array = append(array, item.ToMap())
+ }
+ result[k] = array
+ case *TomlTree:
+ result[k] = node.ToMap()
+ case *tomlValue:
+ result[k] = node.value
+ }
+ }
+ return result
+}
diff --git a/vendor/github.com/pelletier/go-toml/tomltree_write_test.go b/vendor/github.com/pelletier/go-toml/tomltree_write_test.go
new file mode 100644
index 000000000..b5ad8db7c
--- /dev/null
+++ b/vendor/github.com/pelletier/go-toml/tomltree_write_test.go
@@ -0,0 +1,271 @@
+package toml
+
+import (
+ "bytes"
+ "errors"
+ "fmt"
+ "reflect"
+ "strings"
+ "testing"
+ "time"
+)
+
+type failingWriter struct {
+ failAt int
+ written int
+ buffer bytes.Buffer
+}
+
+func (f failingWriter) Write(p []byte) (n int, err error) {
+ count := len(p)
+ toWrite := f.failAt - count + f.written
+ if toWrite < 0 {
+ toWrite = 0
+ }
+ if toWrite > count {
+ f.written += count
+ f.buffer.WriteString(string(p))
+ return count, nil
+ }
+
+ f.buffer.WriteString(string(p[:toWrite]))
+ f.written = f.failAt
+ return f.written, fmt.Errorf("failingWriter failed after writting %d bytes", f.written)
+}
+
+func assertErrorString(t *testing.T, expected string, err error) {
+ expectedErr := errors.New(expected)
+ if err.Error() != expectedErr.Error() {
+ t.Errorf("expecting error %s, but got %s instead", expected, err)
+ }
+}
+
+func TestTomlTreeWriteToTomlString(t *testing.T) {
+ toml, err := Load(`name = { first = "Tom", last = "Preston-Werner" }
+points = { x = 1, y = 2 }`)
+
+ if err != nil {
+ t.Fatal("Unexpected error:", err)
+ }
+
+ tomlString, _ := toml.ToTomlString()
+ reparsedTree, err := Load(tomlString)
+
+ assertTree(t, reparsedTree, err, map[string]interface{}{
+ "name": map[string]interface{}{
+ "first": "Tom",
+ "last": "Preston-Werner",
+ },
+ "points": map[string]interface{}{
+ "x": int64(1),
+ "y": int64(2),
+ },
+ })
+}
+
+func TestTomlTreeWriteToTomlStringSimple(t *testing.T) {
+ tree, err := Load("[foo]\n\n[[foo.bar]]\na = 42\n\n[[foo.bar]]\na = 69\n")
+ if err != nil {
+ t.Errorf("Test failed to parse: %v", err)
+ return
+ }
+ result, err := tree.ToTomlString()
+ if err != nil {
+ t.Errorf("Unexpected error: %s", err)
+ }
+ expected := "\n[foo]\n\n [[foo.bar]]\n a = 42\n\n [[foo.bar]]\n a = 69\n"
+ if result != expected {
+ t.Errorf("Expected got '%s', expected '%s'", result, expected)
+ }
+}
+
+func TestTomlTreeWriteToTomlStringKeysOrders(t *testing.T) {
+ for i := 0; i < 100; i++ {
+ tree, _ := Load(`
+ foobar = true
+ bar = "baz"
+ foo = 1
+ [qux]
+ foo = 1
+ bar = "baz2"`)
+
+ stringRepr, _ := tree.ToTomlString()
+
+ t.Log("Intermediate string representation:")
+ t.Log(stringRepr)
+
+ r := strings.NewReader(stringRepr)
+ toml, err := LoadReader(r)
+
+ if err != nil {
+ t.Fatal("Unexpected error:", err)
+ }
+
+ assertTree(t, toml, err, map[string]interface{}{
+ "foobar": true,
+ "bar": "baz",
+ "foo": 1,
+ "qux": map[string]interface{}{
+ "foo": 1,
+ "bar": "baz2",
+ },
+ })
+ }
+}
+
+func testMaps(t *testing.T, actual, expected map[string]interface{}) {
+ if !reflect.DeepEqual(actual, expected) {
+ t.Fatal("trees aren't equal.\n", "Expected:\n", expected, "\nActual:\n", actual)
+ }
+}
+
+func TestTomlTreeWriteToMapSimple(t *testing.T) {
+ tree, _ := Load("a = 42\nb = 17")
+
+ expected := map[string]interface{}{
+ "a": int64(42),
+ "b": int64(17),
+ }
+
+ testMaps(t, tree.ToMap(), expected)
+}
+
+func TestTomlTreeWriteToInvalidTreeSimpleValue(t *testing.T) {
+ tree := TomlTree{values: map[string]interface{}{"foo": int8(1)}}
+ _, err := tree.ToTomlString()
+ assertErrorString(t, "invalid value type at foo: int8", err)
+}
+
+func TestTomlTreeWriteToInvalidTreeTomlValue(t *testing.T) {
+ tree := TomlTree{values: map[string]interface{}{"foo": &tomlValue{int8(1), Position{}}}}
+ _, err := tree.ToTomlString()
+ assertErrorString(t, "unsupported value type int8: 1", err)
+}
+
+func TestTomlTreeWriteToInvalidTreeTomlValueArray(t *testing.T) {
+ tree := TomlTree{values: map[string]interface{}{"foo": &tomlValue{[]interface{}{int8(1)}, Position{}}}}
+ _, err := tree.ToTomlString()
+ assertErrorString(t, "unsupported value type int8: 1", err)
+}
+
+func TestTomlTreeWriteToFailingWriterInSimpleValue(t *testing.T) {
+ toml, _ := Load(`a = 2`)
+ writer := failingWriter{failAt: 0, written: 0}
+ _, err := toml.WriteTo(writer)
+ assertErrorString(t, "failingWriter failed after writting 0 bytes", err)
+}
+
+func TestTomlTreeWriteToFailingWriterInTable(t *testing.T) {
+ toml, _ := Load(`
+[b]
+a = 2`)
+ writer := failingWriter{failAt: 2, written: 0}
+ _, err := toml.WriteTo(writer)
+ assertErrorString(t, "failingWriter failed after writting 2 bytes", err)
+
+ writer = failingWriter{failAt: 13, written: 0}
+ _, err = toml.WriteTo(writer)
+ assertErrorString(t, "failingWriter failed after writting 13 bytes", err)
+}
+
+func TestTomlTreeWriteToFailingWriterInArray(t *testing.T) {
+ toml, _ := Load(`
+[[b]]
+a = 2`)
+ writer := failingWriter{failAt: 2, written: 0}
+ _, err := toml.WriteTo(writer)
+ assertErrorString(t, "failingWriter failed after writting 2 bytes", err)
+
+ writer = failingWriter{failAt: 15, written: 0}
+ _, err = toml.WriteTo(writer)
+ assertErrorString(t, "failingWriter failed after writting 15 bytes", err)
+}
+
+func TestTomlTreeWriteToMapExampleFile(t *testing.T) {
+ tree, _ := LoadFile("example.toml")
+ expected := map[string]interface{}{
+ "title": "TOML Example",
+ "owner": map[string]interface{}{
+ "name": "Tom Preston-Werner",
+ "organization": "GitHub",
+ "bio": "GitHub Cofounder & CEO\nLikes tater tots and beer.",
+ "dob": time.Date(1979, time.May, 27, 7, 32, 0, 0, time.UTC),
+ },
+ "database": map[string]interface{}{
+ "server": "192.168.1.1",
+ "ports": []interface{}{int64(8001), int64(8001), int64(8002)},
+ "connection_max": int64(5000),
+ "enabled": true,
+ },
+ "servers": map[string]interface{}{
+ "alpha": map[string]interface{}{
+ "ip": "10.0.0.1",
+ "dc": "eqdc10",
+ },
+ "beta": map[string]interface{}{
+ "ip": "10.0.0.2",
+ "dc": "eqdc10",
+ },
+ },
+ "clients": map[string]interface{}{
+ "data": []interface{}{
+ []interface{}{"gamma", "delta"},
+ []interface{}{int64(1), int64(2)},
+ },
+ },
+ }
+ testMaps(t, tree.ToMap(), expected)
+}
+
+func TestTomlTreeWriteToMapWithTablesInMultipleChunks(t *testing.T) {
+ tree, _ := Load(`
+ [[menu.main]]
+ a = "menu 1"
+ b = "menu 2"
+ [[menu.main]]
+ c = "menu 3"
+ d = "menu 4"`)
+ expected := map[string]interface{}{
+ "menu": map[string]interface{}{
+ "main": []interface{}{
+ map[string]interface{}{"a": "menu 1", "b": "menu 2"},
+ map[string]interface{}{"c": "menu 3", "d": "menu 4"},
+ },
+ },
+ }
+ treeMap := tree.ToMap()
+
+ testMaps(t, treeMap, expected)
+}
+
+func TestTomlTreeWriteToMapWithArrayOfInlineTables(t *testing.T) {
+ tree, _ := Load(`
+ [params]
+ language_tabs = [
+ { key = "shell", name = "Shell" },
+ { key = "ruby", name = "Ruby" },
+ { key = "python", name = "Python" }
+ ]`)
+
+ expected := map[string]interface{}{
+ "params": map[string]interface{}{
+ "language_tabs": []interface{}{
+ map[string]interface{}{
+ "key": "shell",
+ "name": "Shell",
+ },
+ map[string]interface{}{
+ "key": "ruby",
+ "name": "Ruby",
+ },
+ map[string]interface{}{
+ "key": "python",
+ "name": "Python",
+ },
+ },
+ },
+ }
+
+ treeMap := tree.ToMap()
+ testMaps(t, treeMap, expected)
+}