summaryrefslogtreecommitdiffstats
path: root/vendor/github.com/hashicorp/hcl/json
diff options
context:
space:
mode:
authorChristopher Speller <crspeller@gmail.com>2018-04-16 05:37:14 -0700
committerJoram Wilander <jwawilander@gmail.com>2018-04-16 08:37:14 -0400
commit6e2cb00008cbf09e556b00f87603797fcaa47e09 (patch)
tree3c0eb55ff4226a3f024aad373140d1fb860a6404 /vendor/github.com/hashicorp/hcl/json
parentbf24f51c4e1cc6286885460672f7f449e8c6f5ef (diff)
downloadchat-6e2cb00008cbf09e556b00f87603797fcaa47e09.tar.gz
chat-6e2cb00008cbf09e556b00f87603797fcaa47e09.tar.bz2
chat-6e2cb00008cbf09e556b00f87603797fcaa47e09.zip
Depenancy upgrades and movign to dep. (#8630)
Diffstat (limited to 'vendor/github.com/hashicorp/hcl/json')
-rw-r--r--vendor/github.com/hashicorp/hcl/json/parser/parser_test.go384
-rw-r--r--vendor/github.com/hashicorp/hcl/json/parser/test-fixtures/array.json4
-rw-r--r--vendor/github.com/hashicorp/hcl/json/parser/test-fixtures/bad_input_128.json1
-rw-r--r--vendor/github.com/hashicorp/hcl/json/parser/test-fixtures/bad_input_tf_8110.json7
-rw-r--r--vendor/github.com/hashicorp/hcl/json/parser/test-fixtures/basic.json3
-rw-r--r--vendor/github.com/hashicorp/hcl/json/parser/test-fixtures/good_input_tf_8110.json7
-rw-r--r--vendor/github.com/hashicorp/hcl/json/parser/test-fixtures/object.json5
-rw-r--r--vendor/github.com/hashicorp/hcl/json/parser/test-fixtures/types.json10
-rw-r--r--vendor/github.com/hashicorp/hcl/json/scanner/scanner_test.go362
-rw-r--r--vendor/github.com/hashicorp/hcl/json/test-fixtures/array.json4
-rw-r--r--vendor/github.com/hashicorp/hcl/json/test-fixtures/basic.json3
-rw-r--r--vendor/github.com/hashicorp/hcl/json/test-fixtures/object.json5
-rw-r--r--vendor/github.com/hashicorp/hcl/json/test-fixtures/types.json10
-rw-r--r--vendor/github.com/hashicorp/hcl/json/token/token_test.go34
14 files changed, 0 insertions, 839 deletions
diff --git a/vendor/github.com/hashicorp/hcl/json/parser/parser_test.go b/vendor/github.com/hashicorp/hcl/json/parser/parser_test.go
deleted file mode 100644
index e0cebf50a..000000000
--- a/vendor/github.com/hashicorp/hcl/json/parser/parser_test.go
+++ /dev/null
@@ -1,384 +0,0 @@
-package parser
-
-import (
- "fmt"
- "io/ioutil"
- "path/filepath"
- "reflect"
- "runtime"
- "testing"
-
- "github.com/hashicorp/hcl/hcl/ast"
- "github.com/hashicorp/hcl/hcl/token"
-)
-
-func TestType(t *testing.T) {
- var literals = []struct {
- typ token.Type
- src string
- }{
- {token.STRING, `"foo": "bar"`},
- {token.NUMBER, `"foo": 123`},
- {token.FLOAT, `"foo": 123.12`},
- {token.FLOAT, `"foo": -123.12`},
- {token.BOOL, `"foo": true`},
- {token.STRING, `"foo": null`},
- }
-
- for _, l := range literals {
- t.Logf("Testing: %s", l.src)
-
- p := newParser([]byte(l.src))
- item, err := p.objectItem()
- if err != nil {
- t.Error(err)
- }
-
- lit, ok := item.Val.(*ast.LiteralType)
- if !ok {
- t.Errorf("node should be of type LiteralType, got: %T", item.Val)
- }
-
- if lit.Token.Type != l.typ {
- t.Errorf("want: %s, got: %s", l.typ, lit.Token.Type)
- }
- }
-}
-
-func TestListType(t *testing.T) {
- var literals = []struct {
- src string
- tokens []token.Type
- }{
- {
- `"foo": ["123", 123]`,
- []token.Type{token.STRING, token.NUMBER},
- },
- {
- `"foo": [123, "123",]`,
- []token.Type{token.NUMBER, token.STRING},
- },
- {
- `"foo": []`,
- []token.Type{},
- },
- {
- `"foo": ["123", 123]`,
- []token.Type{token.STRING, token.NUMBER},
- },
- {
- `"foo": ["123", {}]`,
- []token.Type{token.STRING, token.LBRACE},
- },
- }
-
- for _, l := range literals {
- t.Logf("Testing: %s", l.src)
-
- p := newParser([]byte(l.src))
- item, err := p.objectItem()
- if err != nil {
- t.Error(err)
- }
-
- list, ok := item.Val.(*ast.ListType)
- if !ok {
- t.Errorf("node should be of type LiteralType, got: %T", item.Val)
- }
-
- tokens := []token.Type{}
- for _, li := range list.List {
- switch v := li.(type) {
- case *ast.LiteralType:
- tokens = append(tokens, v.Token.Type)
- case *ast.ObjectType:
- tokens = append(tokens, token.LBRACE)
- }
- }
-
- equals(t, l.tokens, tokens)
- }
-}
-
-func TestObjectType(t *testing.T) {
- var literals = []struct {
- src string
- nodeType []ast.Node
- itemLen int
- }{
- {
- `"foo": {}`,
- nil,
- 0,
- },
- {
- `"foo": {
- "bar": "fatih"
- }`,
- []ast.Node{&ast.LiteralType{}},
- 1,
- },
- {
- `"foo": {
- "bar": "fatih",
- "baz": ["arslan"]
- }`,
- []ast.Node{
- &ast.LiteralType{},
- &ast.ListType{},
- },
- 2,
- },
- {
- `"foo": {
- "bar": {}
- }`,
- []ast.Node{
- &ast.ObjectType{},
- },
- 1,
- },
- {
- `"foo": {
- "bar": {},
- "foo": true
- }`,
- []ast.Node{
- &ast.ObjectType{},
- &ast.LiteralType{},
- },
- 2,
- },
- }
-
- for _, l := range literals {
- t.Logf("Testing:\n%s\n", l.src)
-
- p := newParser([]byte(l.src))
- // p.enableTrace = true
- item, err := p.objectItem()
- if err != nil {
- t.Error(err)
- }
-
- // we know that the ObjectKey name is foo for all cases, what matters
- // is the object
- obj, ok := item.Val.(*ast.ObjectType)
- if !ok {
- t.Errorf("node should be of type LiteralType, got: %T", item.Val)
- }
-
- // check if the total length of items are correct
- equals(t, l.itemLen, len(obj.List.Items))
-
- // check if the types are correct
- for i, item := range obj.List.Items {
- equals(t, reflect.TypeOf(l.nodeType[i]), reflect.TypeOf(item.Val))
- }
- }
-}
-
-func TestFlattenObjects(t *testing.T) {
- var literals = []struct {
- src string
- nodeType []ast.Node
- itemLen int
- }{
- {
- `{
- "foo": [
- {
- "foo": "svh",
- "bar": "fatih"
- }
- ]
- }`,
- []ast.Node{
- &ast.ObjectType{},
- &ast.LiteralType{},
- &ast.LiteralType{},
- },
- 3,
- },
- {
- `{
- "variable": {
- "foo": {}
- }
- }`,
- []ast.Node{
- &ast.ObjectType{},
- },
- 1,
- },
- {
- `{
- "empty": []
- }`,
- []ast.Node{
- &ast.ListType{},
- },
- 1,
- },
- {
- `{
- "basic": [1, 2, 3]
- }`,
- []ast.Node{
- &ast.ListType{},
- },
- 1,
- },
- }
-
- for _, l := range literals {
- t.Logf("Testing:\n%s\n", l.src)
-
- f, err := Parse([]byte(l.src))
- if err != nil {
- t.Error(err)
- }
-
- // the first object is always an ObjectList so just assert that one
- // so we can use it as such
- obj, ok := f.Node.(*ast.ObjectList)
- if !ok {
- t.Errorf("node should be *ast.ObjectList, got: %T", f.Node)
- }
-
- // check if the types are correct
- var i int
- for _, item := range obj.Items {
- equals(t, reflect.TypeOf(l.nodeType[i]), reflect.TypeOf(item.Val))
- i++
-
- if obj, ok := item.Val.(*ast.ObjectType); ok {
- for _, item := range obj.List.Items {
- equals(t, reflect.TypeOf(l.nodeType[i]), reflect.TypeOf(item.Val))
- i++
- }
- }
- }
-
- // check if the number of items is correct
- equals(t, l.itemLen, i)
-
- }
-}
-
-func TestObjectKey(t *testing.T) {
- keys := []struct {
- exp []token.Type
- src string
- }{
- {[]token.Type{token.STRING}, `"foo": {}`},
- }
-
- for _, k := range keys {
- p := newParser([]byte(k.src))
- keys, err := p.objectKey()
- if err != nil {
- t.Fatal(err)
- }
-
- tokens := []token.Type{}
- for _, o := range keys {
- tokens = append(tokens, o.Token.Type)
- }
-
- equals(t, k.exp, tokens)
- }
-
- errKeys := []struct {
- src string
- }{
- {`foo 12 {}`},
- {`foo bar = {}`},
- {`foo []`},
- {`12 {}`},
- }
-
- for _, k := range errKeys {
- p := newParser([]byte(k.src))
- _, err := p.objectKey()
- if err == nil {
- t.Errorf("case '%s' should give an error", k.src)
- }
- }
-}
-
-// Official HCL tests
-func TestParse(t *testing.T) {
- cases := []struct {
- Name string
- Err bool
- }{
- {
- "array.json",
- false,
- },
- {
- "basic.json",
- false,
- },
- {
- "object.json",
- false,
- },
- {
- "types.json",
- false,
- },
- {
- "bad_input_128.json",
- true,
- },
- {
- "bad_input_tf_8110.json",
- true,
- },
- {
- "good_input_tf_8110.json",
- false,
- },
- }
-
- const fixtureDir = "./test-fixtures"
-
- for _, tc := range cases {
- d, err := ioutil.ReadFile(filepath.Join(fixtureDir, tc.Name))
- if err != nil {
- t.Fatalf("err: %s", err)
- }
-
- _, err = Parse(d)
- if (err != nil) != tc.Err {
- t.Fatalf("Input: %s\n\nError: %s", tc.Name, err)
- }
- }
-}
-
-func TestParse_inline(t *testing.T) {
- cases := []struct {
- Value string
- Err bool
- }{
- {"{:{", true},
- }
-
- for _, tc := range cases {
- _, err := Parse([]byte(tc.Value))
- if (err != nil) != tc.Err {
- t.Fatalf("Input: %q\n\nError: %s", tc.Value, err)
- }
- }
-}
-
-// equals fails the test if exp is not equal to act.
-func equals(tb testing.TB, exp, act interface{}) {
- if !reflect.DeepEqual(exp, act) {
- _, file, line, _ := runtime.Caller(1)
- fmt.Printf("\033[31m%s:%d:\n\n\texp: %s\n\n\tgot: %s\033[39m\n\n", filepath.Base(file), line, exp, act)
- tb.FailNow()
- }
-}
diff --git a/vendor/github.com/hashicorp/hcl/json/parser/test-fixtures/array.json b/vendor/github.com/hashicorp/hcl/json/parser/test-fixtures/array.json
deleted file mode 100644
index e320f17ab..000000000
--- a/vendor/github.com/hashicorp/hcl/json/parser/test-fixtures/array.json
+++ /dev/null
@@ -1,4 +0,0 @@
-{
- "foo": [1, 2, "bar"],
- "bar": "baz"
-}
diff --git a/vendor/github.com/hashicorp/hcl/json/parser/test-fixtures/bad_input_128.json b/vendor/github.com/hashicorp/hcl/json/parser/test-fixtures/bad_input_128.json
deleted file mode 100644
index b5f850c96..000000000
--- a/vendor/github.com/hashicorp/hcl/json/parser/test-fixtures/bad_input_128.json
+++ /dev/null
@@ -1 +0,0 @@
-{:{
diff --git a/vendor/github.com/hashicorp/hcl/json/parser/test-fixtures/bad_input_tf_8110.json b/vendor/github.com/hashicorp/hcl/json/parser/test-fixtures/bad_input_tf_8110.json
deleted file mode 100644
index a04385833..000000000
--- a/vendor/github.com/hashicorp/hcl/json/parser/test-fixtures/bad_input_tf_8110.json
+++ /dev/null
@@ -1,7 +0,0 @@
-{
- "variable": {
- "poc": {
- "default": "${replace("europe-west", "-", " ")}"
- }
- }
-}
diff --git a/vendor/github.com/hashicorp/hcl/json/parser/test-fixtures/basic.json b/vendor/github.com/hashicorp/hcl/json/parser/test-fixtures/basic.json
deleted file mode 100644
index b54bde96c..000000000
--- a/vendor/github.com/hashicorp/hcl/json/parser/test-fixtures/basic.json
+++ /dev/null
@@ -1,3 +0,0 @@
-{
- "foo": "bar"
-}
diff --git a/vendor/github.com/hashicorp/hcl/json/parser/test-fixtures/good_input_tf_8110.json b/vendor/github.com/hashicorp/hcl/json/parser/test-fixtures/good_input_tf_8110.json
deleted file mode 100644
index f21aa090d..000000000
--- a/vendor/github.com/hashicorp/hcl/json/parser/test-fixtures/good_input_tf_8110.json
+++ /dev/null
@@ -1,7 +0,0 @@
-{
- "variable": {
- "poc": {
- "default": "${replace(\"europe-west\", \"-\", \" \")}"
- }
- }
-}
diff --git a/vendor/github.com/hashicorp/hcl/json/parser/test-fixtures/object.json b/vendor/github.com/hashicorp/hcl/json/parser/test-fixtures/object.json
deleted file mode 100644
index 72168a3cc..000000000
--- a/vendor/github.com/hashicorp/hcl/json/parser/test-fixtures/object.json
+++ /dev/null
@@ -1,5 +0,0 @@
-{
- "foo": {
- "bar": [1,2]
- }
-}
diff --git a/vendor/github.com/hashicorp/hcl/json/parser/test-fixtures/types.json b/vendor/github.com/hashicorp/hcl/json/parser/test-fixtures/types.json
deleted file mode 100644
index 9a142a6ca..000000000
--- a/vendor/github.com/hashicorp/hcl/json/parser/test-fixtures/types.json
+++ /dev/null
@@ -1,10 +0,0 @@
-{
- "foo": "bar",
- "bar": 7,
- "baz": [1,2,3],
- "foo": -12,
- "bar": 3.14159,
- "foo": true,
- "bar": false,
- "foo": null
-}
diff --git a/vendor/github.com/hashicorp/hcl/json/scanner/scanner_test.go b/vendor/github.com/hashicorp/hcl/json/scanner/scanner_test.go
deleted file mode 100644
index 3033a5797..000000000
--- a/vendor/github.com/hashicorp/hcl/json/scanner/scanner_test.go
+++ /dev/null
@@ -1,362 +0,0 @@
-package scanner
-
-import (
- "bytes"
- "fmt"
- "testing"
-
- "github.com/hashicorp/hcl/json/token"
-)
-
-var f100 = "ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff"
-
-type tokenPair struct {
- tok token.Type
- text string
-}
-
-var tokenLists = map[string][]tokenPair{
- "operator": []tokenPair{
- {token.LBRACK, "["},
- {token.LBRACE, "{"},
- {token.COMMA, ","},
- {token.PERIOD, "."},
- {token.RBRACK, "]"},
- {token.RBRACE, "}"},
- },
- "bool": []tokenPair{
- {token.BOOL, "true"},
- {token.BOOL, "false"},
- },
- "string": []tokenPair{
- {token.STRING, `" "`},
- {token.STRING, `"a"`},
- {token.STRING, `"本"`},
- {token.STRING, `"${file(\"foo\")}"`},
- {token.STRING, `"\a"`},
- {token.STRING, `"\b"`},
- {token.STRING, `"\f"`},
- {token.STRING, `"\n"`},
- {token.STRING, `"\r"`},
- {token.STRING, `"\t"`},
- {token.STRING, `"\v"`},
- {token.STRING, `"\""`},
- {token.STRING, `"\000"`},
- {token.STRING, `"\777"`},
- {token.STRING, `"\x00"`},
- {token.STRING, `"\xff"`},
- {token.STRING, `"\u0000"`},
- {token.STRING, `"\ufA16"`},
- {token.STRING, `"\U00000000"`},
- {token.STRING, `"\U0000ffAB"`},
- {token.STRING, `"` + f100 + `"`},
- },
- "number": []tokenPair{
- {token.NUMBER, "0"},
- {token.NUMBER, "1"},
- {token.NUMBER, "9"},
- {token.NUMBER, "42"},
- {token.NUMBER, "1234567890"},
- {token.NUMBER, "-0"},
- {token.NUMBER, "-1"},
- {token.NUMBER, "-9"},
- {token.NUMBER, "-42"},
- {token.NUMBER, "-1234567890"},
- },
- "float": []tokenPair{
- {token.FLOAT, "0."},
- {token.FLOAT, "1."},
- {token.FLOAT, "42."},
- {token.FLOAT, "01234567890."},
- {token.FLOAT, ".0"},
- {token.FLOAT, ".1"},
- {token.FLOAT, ".42"},
- {token.FLOAT, ".0123456789"},
- {token.FLOAT, "0.0"},
- {token.FLOAT, "1.0"},
- {token.FLOAT, "42.0"},
- {token.FLOAT, "01234567890.0"},
- {token.FLOAT, "0e0"},
- {token.FLOAT, "1e0"},
- {token.FLOAT, "42e0"},
- {token.FLOAT, "01234567890e0"},
- {token.FLOAT, "0E0"},
- {token.FLOAT, "1E0"},
- {token.FLOAT, "42E0"},
- {token.FLOAT, "01234567890E0"},
- {token.FLOAT, "0e+10"},
- {token.FLOAT, "1e-10"},
- {token.FLOAT, "42e+10"},
- {token.FLOAT, "01234567890e-10"},
- {token.FLOAT, "0E+10"},
- {token.FLOAT, "1E-10"},
- {token.FLOAT, "42E+10"},
- {token.FLOAT, "01234567890E-10"},
- {token.FLOAT, "01.8e0"},
- {token.FLOAT, "1.4e0"},
- {token.FLOAT, "42.2e0"},
- {token.FLOAT, "01234567890.12e0"},
- {token.FLOAT, "0.E0"},
- {token.FLOAT, "1.12E0"},
- {token.FLOAT, "42.123E0"},
- {token.FLOAT, "01234567890.213E0"},
- {token.FLOAT, "0.2e+10"},
- {token.FLOAT, "1.2e-10"},
- {token.FLOAT, "42.54e+10"},
- {token.FLOAT, "01234567890.98e-10"},
- {token.FLOAT, "0.1E+10"},
- {token.FLOAT, "1.1E-10"},
- {token.FLOAT, "42.1E+10"},
- {token.FLOAT, "01234567890.1E-10"},
- {token.FLOAT, "-0.0"},
- {token.FLOAT, "-1.0"},
- {token.FLOAT, "-42.0"},
- {token.FLOAT, "-01234567890.0"},
- {token.FLOAT, "-0e0"},
- {token.FLOAT, "-1e0"},
- {token.FLOAT, "-42e0"},
- {token.FLOAT, "-01234567890e0"},
- {token.FLOAT, "-0E0"},
- {token.FLOAT, "-1E0"},
- {token.FLOAT, "-42E0"},
- {token.FLOAT, "-01234567890E0"},
- {token.FLOAT, "-0e+10"},
- {token.FLOAT, "-1e-10"},
- {token.FLOAT, "-42e+10"},
- {token.FLOAT, "-01234567890e-10"},
- {token.FLOAT, "-0E+10"},
- {token.FLOAT, "-1E-10"},
- {token.FLOAT, "-42E+10"},
- {token.FLOAT, "-01234567890E-10"},
- {token.FLOAT, "-01.8e0"},
- {token.FLOAT, "-1.4e0"},
- {token.FLOAT, "-42.2e0"},
- {token.FLOAT, "-01234567890.12e0"},
- {token.FLOAT, "-0.E0"},
- {token.FLOAT, "-1.12E0"},
- {token.FLOAT, "-42.123E0"},
- {token.FLOAT, "-01234567890.213E0"},
- {token.FLOAT, "-0.2e+10"},
- {token.FLOAT, "-1.2e-10"},
- {token.FLOAT, "-42.54e+10"},
- {token.FLOAT, "-01234567890.98e-10"},
- {token.FLOAT, "-0.1E+10"},
- {token.FLOAT, "-1.1E-10"},
- {token.FLOAT, "-42.1E+10"},
- {token.FLOAT, "-01234567890.1E-10"},
- },
-}
-
-var orderedTokenLists = []string{
- "comment",
- "operator",
- "bool",
- "string",
- "number",
- "float",
-}
-
-func TestPosition(t *testing.T) {
- // create artifical source code
- buf := new(bytes.Buffer)
-
- for _, listName := range orderedTokenLists {
- for _, ident := range tokenLists[listName] {
- fmt.Fprintf(buf, "\t\t\t\t%s\n", ident.text)
- }
- }
-
- s := New(buf.Bytes())
-
- pos := token.Pos{"", 4, 1, 5}
- s.Scan()
- for _, listName := range orderedTokenLists {
-
- for _, k := range tokenLists[listName] {
- curPos := s.tokPos
- // fmt.Printf("[%q] s = %+v:%+v\n", k.text, curPos.Offset, curPos.Column)
-
- if curPos.Offset != pos.Offset {
- t.Fatalf("offset = %d, want %d for %q", curPos.Offset, pos.Offset, k.text)
- }
- if curPos.Line != pos.Line {
- t.Fatalf("line = %d, want %d for %q", curPos.Line, pos.Line, k.text)
- }
- if curPos.Column != pos.Column {
- t.Fatalf("column = %d, want %d for %q", curPos.Column, pos.Column, k.text)
- }
- pos.Offset += 4 + len(k.text) + 1 // 4 tabs + token bytes + newline
- pos.Line += countNewlines(k.text) + 1 // each token is on a new line
-
- s.Error = func(pos token.Pos, msg string) {
- t.Errorf("error %q for %q", msg, k.text)
- }
-
- s.Scan()
- }
- }
- // make sure there were no token-internal errors reported by scanner
- if s.ErrorCount != 0 {
- t.Errorf("%d errors", s.ErrorCount)
- }
-}
-
-func TestComment(t *testing.T) {
- testTokenList(t, tokenLists["comment"])
-}
-
-func TestOperator(t *testing.T) {
- testTokenList(t, tokenLists["operator"])
-}
-
-func TestBool(t *testing.T) {
- testTokenList(t, tokenLists["bool"])
-}
-
-func TestIdent(t *testing.T) {
- testTokenList(t, tokenLists["ident"])
-}
-
-func TestString(t *testing.T) {
- testTokenList(t, tokenLists["string"])
-}
-
-func TestNumber(t *testing.T) {
- testTokenList(t, tokenLists["number"])
-}
-
-func TestFloat(t *testing.T) {
- testTokenList(t, tokenLists["float"])
-}
-
-func TestRealExample(t *testing.T) {
- complexReal := `
-{
- "variable": {
- "foo": {
- "default": "bar",
- "description": "bar",
- "depends_on": ["something"]
- }
- }
-}`
-
- literals := []struct {
- tokenType token.Type
- literal string
- }{
- {token.LBRACE, `{`},
- {token.STRING, `"variable"`},
- {token.COLON, `:`},
- {token.LBRACE, `{`},
- {token.STRING, `"foo"`},
- {token.COLON, `:`},
- {token.LBRACE, `{`},
- {token.STRING, `"default"`},
- {token.COLON, `:`},
- {token.STRING, `"bar"`},
- {token.COMMA, `,`},
- {token.STRING, `"description"`},
- {token.COLON, `:`},
- {token.STRING, `"bar"`},
- {token.COMMA, `,`},
- {token.STRING, `"depends_on"`},
- {token.COLON, `:`},
- {token.LBRACK, `[`},
- {token.STRING, `"something"`},
- {token.RBRACK, `]`},
- {token.RBRACE, `}`},
- {token.RBRACE, `}`},
- {token.RBRACE, `}`},
- {token.EOF, ``},
- }
-
- s := New([]byte(complexReal))
- for _, l := range literals {
- tok := s.Scan()
- if l.tokenType != tok.Type {
- t.Errorf("got: %s want %s for %s\n", tok, l.tokenType, tok.String())
- }
-
- if l.literal != tok.Text {
- t.Errorf("got: %s want %s\n", tok, l.literal)
- }
- }
-
-}
-
-func TestError(t *testing.T) {
- testError(t, "\x80", "1:1", "illegal UTF-8 encoding", token.ILLEGAL)
- testError(t, "\xff", "1:1", "illegal UTF-8 encoding", token.ILLEGAL)
-
- testError(t, `"ab`+"\x80", "1:4", "illegal UTF-8 encoding", token.STRING)
- testError(t, `"abc`+"\xff", "1:5", "illegal UTF-8 encoding", token.STRING)
-
- testError(t, `01238`, "1:7", "numbers cannot start with 0", token.NUMBER)
- testError(t, `01238123`, "1:10", "numbers cannot start with 0", token.NUMBER)
- testError(t, `'aa'`, "1:1", "illegal char: '", token.ILLEGAL)
-
- testError(t, `"`, "1:2", "literal not terminated", token.STRING)
- testError(t, `"abc`, "1:5", "literal not terminated", token.STRING)
- testError(t, `"abc`+"\n", "1:5", "literal not terminated", token.STRING)
-}
-
-func testError(t *testing.T, src, pos, msg string, tok token.Type) {
- s := New([]byte(src))
-
- errorCalled := false
- s.Error = func(p token.Pos, m string) {
- if !errorCalled {
- if pos != p.String() {
- t.Errorf("pos = %q, want %q for %q", p, pos, src)
- }
-
- if m != msg {
- t.Errorf("msg = %q, want %q for %q", m, msg, src)
- }
- errorCalled = true
- }
- }
-
- tk := s.Scan()
- if tk.Type != tok {
- t.Errorf("tok = %s, want %s for %q", tk, tok, src)
- }
- if !errorCalled {
- t.Errorf("error handler not called for %q", src)
- }
- if s.ErrorCount == 0 {
- t.Errorf("count = %d, want > 0 for %q", s.ErrorCount, src)
- }
-}
-
-func testTokenList(t *testing.T, tokenList []tokenPair) {
- // create artifical source code
- buf := new(bytes.Buffer)
- for _, ident := range tokenList {
- fmt.Fprintf(buf, "%s\n", ident.text)
- }
-
- s := New(buf.Bytes())
- for _, ident := range tokenList {
- tok := s.Scan()
- if tok.Type != ident.tok {
- t.Errorf("tok = %q want %q for %q\n", tok, ident.tok, ident.text)
- }
-
- if tok.Text != ident.text {
- t.Errorf("text = %q want %q", tok.String(), ident.text)
- }
-
- }
-}
-
-func countNewlines(s string) int {
- n := 0
- for _, ch := range s {
- if ch == '\n' {
- n++
- }
- }
- return n
-}
diff --git a/vendor/github.com/hashicorp/hcl/json/test-fixtures/array.json b/vendor/github.com/hashicorp/hcl/json/test-fixtures/array.json
deleted file mode 100644
index e320f17ab..000000000
--- a/vendor/github.com/hashicorp/hcl/json/test-fixtures/array.json
+++ /dev/null
@@ -1,4 +0,0 @@
-{
- "foo": [1, 2, "bar"],
- "bar": "baz"
-}
diff --git a/vendor/github.com/hashicorp/hcl/json/test-fixtures/basic.json b/vendor/github.com/hashicorp/hcl/json/test-fixtures/basic.json
deleted file mode 100644
index b54bde96c..000000000
--- a/vendor/github.com/hashicorp/hcl/json/test-fixtures/basic.json
+++ /dev/null
@@ -1,3 +0,0 @@
-{
- "foo": "bar"
-}
diff --git a/vendor/github.com/hashicorp/hcl/json/test-fixtures/object.json b/vendor/github.com/hashicorp/hcl/json/test-fixtures/object.json
deleted file mode 100644
index 72168a3cc..000000000
--- a/vendor/github.com/hashicorp/hcl/json/test-fixtures/object.json
+++ /dev/null
@@ -1,5 +0,0 @@
-{
- "foo": {
- "bar": [1,2]
- }
-}
diff --git a/vendor/github.com/hashicorp/hcl/json/test-fixtures/types.json b/vendor/github.com/hashicorp/hcl/json/test-fixtures/types.json
deleted file mode 100644
index 9a142a6ca..000000000
--- a/vendor/github.com/hashicorp/hcl/json/test-fixtures/types.json
+++ /dev/null
@@ -1,10 +0,0 @@
-{
- "foo": "bar",
- "bar": 7,
- "baz": [1,2,3],
- "foo": -12,
- "bar": 3.14159,
- "foo": true,
- "bar": false,
- "foo": null
-}
diff --git a/vendor/github.com/hashicorp/hcl/json/token/token_test.go b/vendor/github.com/hashicorp/hcl/json/token/token_test.go
deleted file mode 100644
index a83fdd55b..000000000
--- a/vendor/github.com/hashicorp/hcl/json/token/token_test.go
+++ /dev/null
@@ -1,34 +0,0 @@
-package token
-
-import (
- "testing"
-)
-
-func TestTypeString(t *testing.T) {
- var tokens = []struct {
- tt Type
- str string
- }{
- {ILLEGAL, "ILLEGAL"},
- {EOF, "EOF"},
- {NUMBER, "NUMBER"},
- {FLOAT, "FLOAT"},
- {BOOL, "BOOL"},
- {STRING, "STRING"},
- {NULL, "NULL"},
- {LBRACK, "LBRACK"},
- {LBRACE, "LBRACE"},
- {COMMA, "COMMA"},
- {PERIOD, "PERIOD"},
- {RBRACK, "RBRACK"},
- {RBRACE, "RBRACE"},
- }
-
- for _, token := range tokens {
- if token.tt.String() != token.str {
- t.Errorf("want: %q got:%q\n", token.str, token.tt)
-
- }
- }
-
-}